index int64 0 1,000k | blob_id stringlengths 40 40 | code stringlengths 7 10.4M |
|---|---|---|
998,500 | e2602f4e588ae2ab5c122859952db6ad5c3b14f0 | from django.shortcuts import render
import requests
from geopy.geocoders import Nominatim
from .forms import CityForm
from .models import City
from django.views.generic import DeleteView, TemplateView
from django.urls import reverse_lazy
import datetime
import json
class IndexView(TemplateView):
template_name = 'Base.html'
def Current(request):
url = "https://community-open-weather-map.p.rapidapi.com/onecall/timemachine"
url_second = "https://community-open-weather-map.p.rapidapi.com/weather"
form = CityForm()
Error_message = ''
message = ''
message_class = ''
if request.method == 'POST':
form = CityForm(request.POST) # Handling form request
if form.is_valid():
New_City = form.cleaned_data['Address']
New_Dt = form.cleaned_data['Dt']
Existing_City = City.objects.filter(Address=New_City).count()
if Existing_City == 0:
querystring = {"callback": "test", "id": "2172797", "units": "%22metric%22 or %22imperial%22",
"mode": "xml%2C html", "q": New_City}
headers = {
'x-rapidapi-host': "community-open-weather-map.p.rapidapi.com",
'x-rapidapi-key': "6446924734mshd20c29c9014fd63p155d13jsnc1cdd0345c05"
}
response = requests.request("GET", url_second, headers=headers, params=querystring)
if response.status_code == 200:
form.save()
else:
Error_message = 'City does not exist in the world'
else:
Error_message = 'City already exists in the database'
if Error_message:
message = Error_message
message_class = 'alert alert-danger'
else:
message = 'City added successfully'
message_class = 'alert alert-success'
Cities = City.objects.all()
Current_List = []
for city in Cities:
# Get the coordinates of address of the city
Geolocator = Nominatim(user_agent="Lucas")
Location = Geolocator.geocode(city.Address)
Coordinates = []
Latitude = Location.latitude
Longitude = Location.longitude
Coordinates.append(Latitude)
Coordinates.append(Longitude)
#############################################################################
querystring = {"lat": Coordinates[0], "lon": Coordinates[1], "dt": city.Dt}
headers = {
'x-rapidapi-host': "community-open-weather-map.p.rapidapi.com",
'x-rapidapi-key': "6446924734mshd20c29c9014fd63p155d13jsnc1cdd0345c05"
}
# Get all the features of this particular city, in the last 24 hours
response = requests.request("GET", url, headers=headers, params=querystring)
data = response.json()
# Get only the current features of this particular city and put it in a dictionary
Current_Dict = {
'Id': city.Id,
'City': city.Address,
'Date': datetime.datetime.fromtimestamp(data['current']['dt']).strftime('%Y-%m-%d %H:%M:%S'),
'Temperature': data['current']['temp'],
'Humidity': data['current']['humidity'],
'Wind_Speed': data['current']['wind_speed'],
'Pressure': data['current']['pressure'],
'Icon': data['current']['weather'][0]['icon'],
}
Current_List.append(Current_Dict)
context = {'Current_List': Current_List, 'form': form, 'message': message, 'message_class': message_class}
return render(request, 'WeatherApp/Current_Weather.html', context)
class WeatherCurrentDeleteView(DeleteView):
model = City
context_object_name = "Wd"
template_name = "WeatherApp/Weather_current_confirm_delete.html"
success_url = reverse_lazy("WeatherApp:current")
def Hourly(request):
url = "https://community-open-weather-map.p.rapidapi.com/onecall/timemachine"
url_second = "https://community-open-weather-map.p.rapidapi.com/weather"
form = CityForm()
Error_message = ''
message = ''
message_class = ''
if request.method == 'POST':
form = CityForm(request.POST) # Handling form request
if form.is_valid():
New_City = form.cleaned_data['Address']
Existing_City = City.objects.filter(Address=New_City).count()
if Existing_City == 0:
querystring = {"callback": "test", "id": "2172797", "units": "%22metric%22 or %22imperial%22",
"mode": "xml%2C html", "q": New_City}
headers = {
'x-rapidapi-host': "community-open-weather-map.p.rapidapi.com",
'x-rapidapi-key': "6446924734mshd20c29c9014fd63p155d13jsnc1cdd0345c05"
}
response = requests.request("GET", url_second, headers=headers, params=querystring)
if response.status_code == 200:
form.save()
else:
Error_message = 'City does not exist in the world'
else:
Error_message = 'City already exists in the database'
if Error_message:
message = Error_message
message_class = 'alert alert-danger'
else:
message = 'City added successfully'
message_class = 'alert alert-success'
Cities = City.objects.all()
Hourly_List = []
for city in Cities:
Hourly_Dict = {}
# Get the coordinates of address of the city
Geolocator = Nominatim(user_agent="Lucas")
Location = Geolocator.geocode(city.Address)
Coordinates = []
Latitude = Location.latitude
Longitude = Location.longitude
Coordinates.append(Latitude)
Coordinates.append(Longitude)
#############################################################################
querystring = {"lat": Coordinates[0], "lon": Coordinates[1], "dt": city.Dt}
headers = {
'x-rapidapi-host': "community-open-weather-map.p.rapidapi.com",
'x-rapidapi-key': "6446924734mshd20c29c9014fd63p155d13jsnc1cdd0345c05"
}
# Get all the features of this particular city, in the last 24 hours
response = requests.request("GET", url, headers=headers, params=querystring)
data = response.json()
# Get only the hourlies features of this particular city and put it in a dictionary
hourly = data['hourly']
for i in range(0, len(hourly)):
Hourly_Dict["Horario{}".format(i)] = datetime.datetime.fromtimestamp(hourly[i]['dt']).strftime('%Y-%m-%d %H:%M:%S')
Hourly_Dict["Temperature{}".format(i)] = hourly[i]['temp']
Hourly_Dict["Humidity{}".format(i)] = hourly[i]['humidity']
Hourly_Dict["Wind_speed{}".format(i)] = hourly[i]['wind_speed']
Hourly_Dict["Pressure{}".format(i)] = hourly[i]['pressure']
Hourly_Dict["Icon{}".format(i)] = hourly[i]['weather'][0]['icon']
Hourly_Dict["Address"] = city.Address
Hourly_Dict["Id"] = city.Id
Hourly_List.append(Hourly_Dict)
context = {'Hourly_List': Hourly_List, 'form': form, 'message': message, 'message_class': message_class}
return render(request, 'WeatherApp/Hourly_Weather.html', context)
class WeatherHourlyDeleteView(DeleteView):
model = City
context_object_name = "Wd"
template_name = "WeatherApp/Weather_hourly_confirm_delete.html"
success_url = reverse_lazy("WeatherApp:hourly")
def Average(request):
url = "https://community-open-weather-map.p.rapidapi.com/onecall/timemachine"
url_second = "https://community-open-weather-map.p.rapidapi.com/weather"
form = CityForm()
Error_message = ''
message = ''
message_class = ''
if request.method == 'POST':
form = CityForm(request.POST) # Handling form request
if form.is_valid():
New_City = form.cleaned_data['Address']
Existing_City = City.objects.filter(Address=New_City).count()
if Existing_City == 0:
querystring = {"callback": "test", "id": "2172797", "units": "%22metric%22 or %22imperial%22",
"mode": "xml%2C html", "q": New_City}
headers = {
'x-rapidapi-host': "community-open-weather-map.p.rapidapi.com",
'x-rapidapi-key': "6446924734mshd20c29c9014fd63p155d13jsnc1cdd0345c05"
}
response = requests.request("GET", url_second, headers=headers, params=querystring)
if response.status_code == 200:
form.save()
else:
Error_message = 'City does not exist in the world'
else:
Error_message = 'City already exists in the database'
if Error_message:
message = Error_message
message_class = 'alert alert-danger'
else:
message = 'City added successfully'
message_class = 'alert alert-success'
Cities = City.objects.all()
Average_List = []
for city in Cities:
Sum_Temperature = 0
Sum_humidity = 0
Sum_speedwind = 0
Sum_pressure = 0
Average_Dict = {}
# Get the coordinates of address of the city
Geolocator = Nominatim(user_agent="Lucas")
Location = Geolocator.geocode(city.Address)
Coordinates = []
Latitude = Location.latitude
Longitude = Location.longitude
Coordinates.append(Latitude)
Coordinates.append(Longitude)
#############################################################################
querystring = {"lat": Coordinates[0], "lon": Coordinates[1], "dt": city.Dt}
headers = {
'x-rapidapi-host': "community-open-weather-map.p.rapidapi.com",
'x-rapidapi-key': "6446924734mshd20c29c9014fd63p155d13jsnc1cdd0345c05"
}
# Get all the features of this particular city, in the last 24 hours
response = requests.request("GET", url, headers=headers, params=querystring)
data = response.json()
# Get only the hourlies features of this particular city and put it in a dictionary
hourly = data['hourly']
for i in range(0, len(hourly)):
Sum_Temperature += hourly[i]['temp']
Sum_humidity += hourly[i]['humidity']
Sum_speedwind += hourly[i]['wind_speed']
Sum_pressure += hourly[i]['pressure']
Average_Dict = {
"Temperature_average": round(Sum_Temperature/len(hourly), 2),
"Humidity_average": round(Sum_humidity/len(hourly), 2),
"WindSpeed_average": round(Sum_speedwind/len(hourly), 2),
"Pressure_average": round(Sum_pressure/len(hourly), 2),
"Id": city.Id,
"Address": city.Address,
}
Average_List.append(Average_Dict)
context = {'Average_List': Average_List, 'form': form, 'message': message, 'message_class': message_class}
return render(request, 'WeatherApp/Average_Weather.html', context)
class WeatherAverageDeleteView(DeleteView):
model = City
context_object_name = "Wd"
template_name = "WeatherApp/Weather_Average_confirm_delete.html"
success_url = reverse_lazy("WeatherApp:average")
def Max_Min(request):
url = "https://community-open-weather-map.p.rapidapi.com/onecall/timemachine"
url_second = "https://community-open-weather-map.p.rapidapi.com/weather"
form = CityForm()
Error_message = ''
message = ''
message_class = ''
if request.method == 'POST':
form = CityForm(request.POST) # Handling form request
if form.is_valid():
New_City = form.cleaned_data['Address']
Existing_City = City.objects.filter(Address=New_City).count()
if Existing_City == 0:
querystring = {"callback": "test", "id": "2172797", "units": "%22metric%22 or %22imperial%22",
"mode": "xml%2C html", "q": New_City}
headers = {
'x-rapidapi-host': "community-open-weather-map.p.rapidapi.com",
'x-rapidapi-key': "6446924734mshd20c29c9014fd63p155d13jsnc1cdd0345c05"
}
response = requests.request("GET", url_second, headers=headers, params=querystring)
if response.status_code == 200:
form.save()
else:
Error_message = 'City does not exist in the world'
else:
Error_message = 'City already exists in the database'
if Error_message:
message = Error_message
message_class = 'alert alert-danger'
else:
message = 'City added successfully'
message_class = 'alert alert-success'
Cities = City.objects.all()
Max_Min_List = []
for city in Cities:
List_Temperature = []
List_humidity = []
List_speedwind = []
List_pressure = []
Max_Min_Dict = {}
# Get the coordinates of address of the city
Geolocator = Nominatim(user_agent="Lucas")
Location = Geolocator.geocode(city.Address)
Coordinates = []
Latitude = Location.latitude
Longitude = Location.longitude
Coordinates.append(Latitude)
Coordinates.append(Longitude)
#############################################################################
querystring = {"lat": Coordinates[0], "lon": Coordinates[1], "dt": city.Dt}
headers = {
'x-rapidapi-host': "community-open-weather-map.p.rapidapi.com",
'x-rapidapi-key': "6446924734mshd20c29c9014fd63p155d13jsnc1cdd0345c05"
}
# Get all the features of this particular city, in the last 24 hours
response = requests.request("GET", url, headers=headers, params=querystring)
data = response.json()
# Get only the hourlies features of this particular city and put it in a dictionary
hourly = data['hourly']
for i in range(0, len(hourly)):
List_Temperature.append(hourly[i]['temp'])
List_humidity.append(hourly[i]['humidity'])
List_speedwind.append(hourly[i]['wind_speed'])
List_pressure.append(hourly[i]['pressure'])
Max_Min_Dict = {
"Max_Temperature": max(List_Temperature),
"Min_Temperature": min(List_Temperature),
"Max_Humidity": max(List_humidity),
"Min_Humidity": min(List_humidity),
"Max_WindSpeed": max(List_speedwind),
"Min_WindSpeed": min(List_speedwind),
"Max_Pressure": max(List_pressure),
"Min_Pressure": min(List_pressure),
"Id": city.Id,
"Address": city.Address,
}
Max_Min_List.append(Max_Min_Dict)
context = {'Max_Min_List': Max_Min_List, 'form': form, 'message': message, 'message_class': message_class}
return render(request, 'WeatherApp/Max_Min_Weather.html', context)
class Weather_Max_Min_DeleteView(DeleteView):
model = City
context_object_name = "Wd"
template_name = "WeatherApp/Weather_Max_Min_confirm_delete.html"
success_url = reverse_lazy("WeatherApp:max_min")
def Bar_Chart(request):
url = "https://community-open-weather-map.p.rapidapi.com/onecall/timemachine"
url_second = "https://community-open-weather-map.p.rapidapi.com/weather"
form = CityForm()
if request.method == 'POST':
form = CityForm(request.POST) # Handling form request
if form.is_valid():
New_City = form.cleaned_data['Address']
Existing_City = City.objects.filter(Address=New_City).count()
if Existing_City == 0:
querystring = {"callback": "test", "id": "2172797", "units": "%22metric%22 or %22imperial%22",
"mode": "xml%2C html", "q": New_City}
headers = {
'x-rapidapi-host': "community-open-weather-map.p.rapidapi.com",
'x-rapidapi-key': "6446924734mshd20c29c9014fd63p155d13jsnc1cdd0345c05"
}
response = requests.request("GET", url_second, headers=headers, params=querystring)
if response.status_code == 200:
form.save()
else:
Error_message = 'City does not exist in the world'
else:
Error_message = 'City already exists in the database'
Cities = City.objects.all()
List_graphic = []
for city in Cities:
List_Temperature = []
List_Humidity = []
List_WindSpeed = []
List_Pressure = []
List_Time = []
Dict_graphic = {}
# Get the coordinates of address of the city
Geolocator = Nominatim(user_agent="Lucas")
Location = Geolocator.geocode(city.Address)
Coordinates = []
Latitude = Location.latitude
Longitude = Location.longitude
Coordinates.append(Latitude)
Coordinates.append(Longitude)
#############################################################################
querystring = {"lat": Coordinates[0], "lon": Coordinates[1], "dt": city.Dt}
headers = {
'x-rapidapi-host': "community-open-weather-map.p.rapidapi.com",
'x-rapidapi-key': "6446924734mshd20c29c9014fd63p155d13jsnc1cdd0345c05"
}
# Get all the features of this particular city, in the last 24 hours
response = requests.request("GET", url, headers=headers, params=querystring)
data = response.json()
# Get only the hourlies features of this particular city and put it in a dictionary
hourly = data['hourly']
for i in range(0, len(hourly)):
List_Temperature.append(hourly[i]['temp'])
List_Humidity.append(hourly[i]['humidity'])
List_WindSpeed.append(hourly[i]['wind_speed'])
List_Pressure.append(hourly[i]['pressure'])
List_Time.append(datetime.datetime.fromtimestamp(hourly[i]['dt']).strftime('%Y-%m-%d %H:%M:%S'))
Dict_graphic = {
"Temperature": json.dumps(List_Temperature),
"Humidity": json.dumps(List_Humidity),
"WindSpeed": json.dumps(List_WindSpeed),
"Pressure": json.dumps(List_Pressure),
"Time": json.dumps(List_Time),
"Address": city.Address,
}
List_graphic.append(Dict_graphic)
context = {"List_graphic": List_graphic}
return render(request, 'WeatherApp/Graphics.html', context)
|
998,501 | ef1112baa8bd2bb3f58eaaef043e3154185ead9b | # coding=utf-8
# Загружает список кортежей (id, date, status) из csv-файла
class CycloneCsvFileReader:
def __init__(self):
pass
def read(self, file):
for line in file.readlines():
parts = line.rstrip().split(',') # Уберем завершающие переносы строк. Правда rstrip еще и пробелы удаляет!
yield parts[0], parts[1], parts[2]
|
998,502 | dde7dc221cf8003029858e808364d73306510e6a | import csv
class Reader(object):
"""
Generic Reader class for processing input. Should be subclassed instead of used directly.
"""
def __init__(self, filename):
self.filename = filename
return self.process()
def process(self):
# Override in subclass.
pass |
998,503 | 9b4659399eae71e48bd2f4bea88766550f65506f | #!/usr/bin/python
numbers = [12, 45, 78, 20]
sum = 0
for number in numbers:
sum += number
print "The sum is:", sum
sum_while = 0
index = len(numbers)
while index > 0:
index -= 1
sum_while += numbers[index]
print "The sum_while is:", sum_while
|
998,504 | 4716fe2cdd05028a50dd27df735109477d2206a5 | from sklearn import svm
from sklearn.metrics import accuracy_score, f1_score, precision_score, recall_score
import numpy as np
import sys
from sklearn.ensemble import RandomForestClassifier
from sklearn.neural_network import MLPClassifier
def prediction(train, test):
feature_num = parse_feature_num(train,0)
feature_num = parse_feature_num(test,feature_num)
X,y = parse_feature_label(train, feature_num)
# print(y)
# clf = svm.SVC()
clf = get_SVM_classifier()
clf.fit(X, y)
test_X, test_y = parse_feature_label(test, feature_num)
predicted_y = clf.predict(test_X)
y_true = np.array(test_y)
y_pred = np.array(predicted_y)
accuracy = accuracy_score(y_true, y_pred)
precision = precision_score(y_true, y_pred, average=None)
recall = recall_score(y_true, y_pred, average=None)
f1 = f1_score(y_true, y_pred, average=None)
print("precision: "+str(precision[0]))
print("recall: "+str(recall[0]))
print("f1: "+str(f1[0]))
print("accuracy: "+str(accuracy))
def get_random_forest_classifier():
return RandomForestClassifier(n_estimators=500, oob_score=True, random_state=1)
def get_neural_network_classifier():
return MLPClassifier(solver='lbfgs', alpha=1e-5, hidden_layer_sizes=(50,50,50,50), random_state=1)
def get_SVM_classifier():
return svm.SVC()
def parse_feature_label(train, feature_num):
X = []
y = []
with open(train) as fh:
for line in fh:
label = line.split(' ')[0]
y.append(label)
features = [0] * (int(feature_num)+1)
for i in line.split(' '):
if ':' in i:
index = i.split(':')[0]
val = i.split(':')[1]
features[int(index)] = float(val)*100
X.append(features)
return X,y
def parse_feature_num(files, feature_num):
with open(files) as fh:
for line in fh:
for i in line.split(' '):
if ':' in i and (int(i.split(':')[0]) > int(feature_num)):
feature_num = i.split(':')[0]
return feature_num
def main():
train = sys.argv[1]
test = sys.argv[2]
prediction(train, test)
if __name__ == "__main__":
main()
|
998,505 | 5ba1b6bf8bfb9cdde177ba5df87f309605ddf7d4 | # Даны два целых числа m и n (m≤n). Напишите программу, которая выводит все числа от mm до nn включительно.
m = int(input())
n = int(input())
for i in range(m, n + 1):
print(i)
|
998,506 | de2a370a6d69ed38e449c48e8201a4c4d9185f5f | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# pylint: disable=invalid-name, unused-argument
import pytest
pytest.importorskip("ethosu.vela")
import math
import numpy as np
import tensorflow as tf
import tflite.Model
import tvm
from tvm import relay
from tvm.relay.backend.contrib.ethosu import legalize, preprocess
from tvm.relay import dataflow_pattern
from tvm.relay.op.contrib import ethosu
from tvm.relay.backend.contrib.ethosu import util, codegen
from tvm.relay.build_module import bind_params_by_name
from tvm.relay.frontend.tflite import get_pad_value
from tvm.relay.expr_functor import ExprVisitor
from . import infra
def partition_ethosu_by_table(mod, pattern_table):
"""In case only the legalization part is supported for an operator, we don't
want to add the operator's pattern to the pattern table so that the compiler
wouldn't attempt to offload an operator without full stack support."""
mod = relay.transform.InferType()(mod)
mod = mod = codegen.replicate_pads(mod)
mod = relay.transform.InferType()(mod)
mod = relay.transform.MergeComposite(pattern_table)(mod)
mod = relay.transform.AnnotateTarget("ethos-u")(mod)
mod = relay.transform.MergeCompilerRegions()(mod)
mod = relay.transform.InferType()(mod)
mod = relay.transform.PartitionGraph()(mod)
mod = relay.transform.InferType()(mod)
mod = preprocess.preprocess_ext_io()(mod)
return mod
def relu_n1_to_1(x):
"""
The specific pattern will be replaced into RELU_N1_TO_1 by tflite.
"""
return tf.math.maximum(-1.0, tf.math.minimum(x, 1.0))
def test_split_indices_legalize():
def create_graph(axis):
x = relay.var("x", shape=(1, 50, 50, 3))
x_relu = relay.nn.relu(x)
split_output = relay.split(x_relu, [5, 20, 45], axis).tuple_value
return relay.Function([x], split_output)
def expected_mod_axis1():
expected_ir_string = """
#[version = "0.0.5"]
def @tvmgen_default_ethos_u_main_0(%x: Tensor[(1, 50, 50, 3), float32]) -> (Tensor[(1, 5, 50, 3), float32],\
Tensor[(1, 15, 50, 3), float32],\
Tensor[(1, 25, 50, 3), float32],\
Tensor[(1, 5, 50, 3), float32]) {
%0 = nn.relu(%x) /* ty=Tensor[(1, 50, 50, 3), float32] */;
%1 = strided_slice(%0, begin=[0, 0, 0, 0], end=[1, 5, 50, 3], strides=[1], axes=None)\
/* ty=Tensor[(1, 5, 50, 3), float32] */;
%2 = strided_slice(%0, begin=[0, 5, 0, 0], end=[1, 20, 50, 3], strides=[1], axes=None)\
/* ty=Tensor[(1, 15, 50, 3), float32] */;
%3 = strided_slice(%0, begin=[0, 20, 0, 0], end=[1, 45, 50, 3], strides=[1], axes=None)\
/* ty=Tensor[(1, 25, 50, 3), float32] */;
%4 = strided_slice(%0, begin=[0, 45, 0, 0], end=[1, 50, 50, 3], strides=[1], axes=None)\
/* ty=Tensor[(1, 5, 50, 3), float32] */;
(%1, %2, %3, %4)
}
"""
return tvm.relay.fromtext(expected_ir_string)
def expected_mod_axis2():
expected_ir_string = """
#[version = "0.0.5"]
def @tvmgen_default_ethos_u_main_0(%x: Tensor[(1, 50, 50, 3), float32]) -> (Tensor[(1, 50, 5, 3), float32],\
Tensor[(1, 50, 15, 3), float32],\
Tensor[(1, 50, 25, 3), float32],\
Tensor[(1, 50, 5, 3), float32]) {
%0 = nn.relu(%x) /* ty=Tensor[(1, 50, 50, 3), float32] */;
%1 = strided_slice(%0, begin=[0, 0, 0, 0], end=[1, 50, 5, 3], strides=[1], axes=None)\
/* ty=Tensor[(1, 50, 5, 3), float32] */;
%2 = strided_slice(%0, begin=[0, 0, 5, 0], end=[1, 50, 20, 3], strides=[1], axes=None)\
/* ty=Tensor[(1, 50, 15, 3), float32] */;
%3 = strided_slice(%0, begin=[0, 0, 20, 0], end=[1, 50, 45, 3], strides=[1], axes=None)\
/* ty=Tensor[(1, 50, 25, 3), float32] */;
%4 = strided_slice(%0, begin=[0, 0, 45, 0], end=[1, 50, 50, 3], strides=[1], axes=None)\
/* ty=Tensor[(1, 50, 5, 3), float32] */;
(%1, %2, %3, %4)
}
"""
return tvm.relay.fromtext(expected_ir_string)
rewrite_split = [legalize.PartitionedSplitRewriter(), legalize.SplitRewriter()]
mod_axis1 = tvm.IRModule()
func = create_graph(1)
for r in rewrite_split:
func = dataflow_pattern.rewrite(r, func)
mod_axis1["tvmgen_default_ethos_u_main_0"] = func
expected_axis1 = expected_mod_axis1()
tvm.ir.assert_structural_equal(mod_axis1, expected_axis1)
mod_axis2 = tvm.IRModule()
func = create_graph(2)
for r in rewrite_split:
func = dataflow_pattern.rewrite(r, func)
mod_axis2["tvmgen_default_ethos_u_main_0"] = func
expected_axis2 = expected_mod_axis2()
tvm.ir.assert_structural_equal(mod_axis2, expected_axis2)
def test_split_sections_legalize():
def create_graph(axis, sections):
x = relay.var("x", shape=(1, 50, 50, 3))
x_abs = relay.abs(x)
split_output = relay.split(x_abs, sections, axis).tuple_value
outputs = list()
for section_idx in range(sections):
split_single_out = relay.TupleGetItem(split_output, section_idx)
tanh = relay.tanh(split_single_out)
outputs.append(tanh)
tuple_out = relay.Tuple(outputs)
return relay.Function([x], tuple_out)
def expected_mod_axis1():
expected_ir_string = """
#[version = "0.0.5"]
def @tvmgen_default_ethos_u_main_0(%x: Tensor[(1, 50, 50, 3), float32]) -> (Tensor[(1, 10, 50, 3), float32],\
Tensor[(1, 10, 50, 3), float32],\
Tensor[(1, 10, 50, 3), float32],\
Tensor[(1, 10, 50, 3), float32],\
Tensor[(1, 10, 50, 3), float32]) {
%0 = abs(%x) /* ty=Tensor[(1, 50, 50, 3), float32] */;
%1 = strided_slice(%0, begin=[0, 0, 0, 0], end=[1, 10, 50, 3], strides=[1], axes=None)\
/* ty=Tensor[(1, 10, 50, 3), float32] */;
%2 = strided_slice(%0, begin=[0, 10, 0, 0], end=[1, 20, 50, 3], strides=[1], axes=None)\
/* ty=Tensor[(1, 10, 50, 3), float32] */;
%3 = strided_slice(%0, begin=[0, 20, 0, 0], end=[1, 30, 50, 3], strides=[1], axes=None)\
/* ty=Tensor[(1, 10, 50, 3), float32] */;
%4 = strided_slice(%0, begin=[0, 30, 0, 0], end=[1, 40, 50, 3], strides=[1], axes=None)\
/* ty=Tensor[(1, 10, 50, 3), float32] */;
%5 = strided_slice(%0, begin=[0, 40, 0, 0], end=[1, 50, 50, 3], strides=[1], axes=None)\
/* ty=Tensor[(1, 10, 50, 3), float32] */;
%6 = (%1, %2, %3, %4, %5);
%7 = %6.0;
%8 = tanh(%7) /* ty=Tensor[(1, 10, 50, 3), float32] */;
%9 = %6.1;
%10 = tanh(%9) /* ty=Tensor[(1, 10, 50, 3), float32] */;
%11 = %6.2;
%12 = tanh(%11) /* ty=Tensor[(1, 10, 50, 3), float32] */;
%13 = %6.3;
%14 = tanh(%13) /* ty=Tensor[(1, 10, 50, 3), float32] */;
%15 = %6.4;
%16 = tanh(%15) /* ty=Tensor[(1, 10, 50, 3), float32] */;
(%8, %10, %12, %14, %16)
}
"""
return tvm.relay.fromtext(expected_ir_string)
def expected_mod_axis2():
expected_ir_string = """
#[version = "0.0.5"]
def @tvmgen_default_ethos_u_main_0(%x: Tensor[(1, 50, 50, 3), float32]) -> (Tensor[(1, 50, 10, 3), float32],\
Tensor[(1, 50, 10, 3), float32],\
Tensor[(1, 50, 10, 3), float32],\
Tensor[(1, 50, 10, 3), float32],\
Tensor[(1, 50, 10, 3), float32]) {
%0 = abs(%x) /* ty=Tensor[(1, 50, 50, 3), float32] */;
%1 = strided_slice(%0, begin=[0, 0, 0, 0], end=[1, 50, 10, 3], strides=[1], axes=None)\
/* ty=Tensor[(1, 50, 10, 3), float32] */;
%2 = strided_slice(%0, begin=[0, 0, 10, 0], end=[1, 50, 20, 3], strides=[1], axes=None)\
/* ty=Tensor[(1, 50, 10, 3), float32] */;
%3 = strided_slice(%0, begin=[0, 0, 20, 0], end=[1, 50, 30, 3], strides=[1], axes=None)\
/* ty=Tensor[(1, 50, 10, 3), float32] */;
%4 = strided_slice(%0, begin=[0, 0, 30, 0], end=[1, 50, 40, 3], strides=[1], axes=None)\
/* ty=Tensor[(1, 50, 10, 3), float32] */;
%5 = strided_slice(%0, begin=[0, 0, 40, 0], end=[1, 50, 50, 3], strides=[1], axes=None)\
/* ty=Tensor[(1, 50, 10, 3), float32] */;
%6 = (%1, %2, %3, %4, %5);
%7 = %6.0;
%8 = tanh(%7) /* ty=Tensor[(1, 50, 10, 3), float32] */;
%9 = %6.1;
%10 = tanh(%9) /* ty=Tensor[(1, 50, 10, 3), float32] */;
%11 = %6.2;
%12 = tanh(%11) /* ty=Tensor[(1, 50, 10, 3), float32] */;
%13 = %6.3;
%14 = tanh(%13) /* ty=Tensor[(1, 50, 10, 3), float32] */;
%15 = %6.4;
%16 = tanh(%15) /* ty=Tensor[(1, 50, 10, 3), float32] */;
(%8, %10, %12, %14, %16)
}
"""
return tvm.relay.fromtext(expected_ir_string)
rewrite_split = [legalize.PartitionedSplitRewriter(), legalize.SplitRewriter()]
mod_axis1 = tvm.IRModule()
func = create_graph(1, 5)
for r in rewrite_split:
func = dataflow_pattern.rewrite(r, func)
mod_axis1["tvmgen_default_ethos_u_main_0"] = func
expected_axis1 = expected_mod_axis1()
tvm.ir.assert_structural_equal(mod_axis1, expected_axis1)
mod_axis2 = tvm.IRModule()
func = create_graph(2, 5)
for r in rewrite_split:
func = dataflow_pattern.rewrite(r, func)
mod_axis2["tvmgen_default_ethos_u_main_0"] = func
expected_axis2 = expected_mod_axis2()
tvm.ir.assert_structural_equal(mod_axis2, expected_axis2)
INVERSE_LAYOUT_TRANSFORM_OHWI_MAP = {
"HWIO": [1, 2, 3, 0],
"HWOI": [1, 2, 0, 3],
"OWHI": [0, 1, 2, 3],
}
@pytest.mark.parametrize("ifm_shape", [(1, 299, 299, 3), (1, 55, 55, 3)])
@pytest.mark.parametrize("kernel_shape", [(3, 2), (1, 3)])
@pytest.mark.parametrize("padding", ["SAME", "VALID"])
@pytest.mark.parametrize("strides, dilation", [((1, 1), (2, 1)), ((3, 2), (1, 1))])
@pytest.mark.parametrize("activation", [None, "RELU"])
def test_tflite_conv2d_legalize(ifm_shape, kernel_shape, padding, strides, dilation, activation):
dtype = "int8"
def create_tflite_graph_single():
class Model(tf.Module):
@tf.function
def tf_function(self, input_shape):
op = tf.nn.conv2d(
input_shape,
filters=tf.constant(
np.random.uniform(size=(kernel_shape[0], kernel_shape[1], 3, 3)),
dtype=tf.float32,
),
strides=strides,
padding=padding,
data_format="NHWC",
dilations=dilation,
)
if activation:
op = tf.nn.relu(op)
return op
model = Model()
concrete_func = model.tf_function.get_concrete_function(
tf.TensorSpec(ifm_shape, dtype=tf.float32)
)
# Convert the model
def representative_dataset():
for _ in range(100):
data = np.random.rand(*tuple(ifm_shape))
yield [data.astype(np.float32)]
converter = tf.lite.TFLiteConverter.from_concrete_functions([concrete_func])
converter.optimizations = [tf.lite.Optimize.DEFAULT]
converter.representative_dataset = representative_dataset
converter.target_spec.supported_ops = [tf.lite.OpsSet.TFLITE_BUILTINS_INT8]
converter.inference_input_type = tf.int8
converter.inference_output_type = tf.int8
tflite_model = converter.convert()
return tflite_model
def verify(ext_func):
op = ext_func.body
ofm_channels = op.attrs.ofm_channels
# check IFM
ifm = op.args[0].checked_type
assert list(ifm.shape) == list(ifm_shape)
assert str(ifm.dtype) == dtype
assert ifm.shape[3] == ofm_channels
# check OFM
ofm = op.checked_type
expected_ofm_shape = infra.compute_ofm_shape(
ifm_shape, padding, kernel_shape, strides, dilation
)
assert list(ofm.shape) == list(expected_ofm_shape)
assert str(ofm.dtype) == dtype
assert ofm.shape[3] == ofm_channels
# check weights
weights_ohwi = op.args[1].data.asnumpy()
assert str(weights_ohwi.dtype) == dtype
assert weights_ohwi.shape[0] == ofm_channels
assert weights_ohwi.shape[1] == kernel_shape[0]
assert weights_ohwi.shape[2] == kernel_shape[1]
assert weights_ohwi.shape[3] == 3
# Check that scale_bias matches weight tensor
assert list(op.args[2].checked_type.shape)[0] == ofm_channels
expected_padding = infra.compute_padding_shape(
ifm_shape,
expected_ofm_shape,
padding,
(kernel_shape[0], kernel_shape[1]),
strides,
dilation,
)
assert list(op.attrs.padding) == list(expected_padding)
assert list(op.attrs.strides) == list(strides)
assert list(op.attrs.dilation) == list(dilation)
if activation == "RELU":
assert str(op.attrs.activation) == "CLIP"
conv2d_pattern_table = [
(
ethosu.QnnConv2DParams.composite_name,
ethosu.qnn_conv2d_pattern(),
lambda pat: ethosu.QnnConv2DParams(pat).is_valid(),
)
]
tflite_graph = create_tflite_graph_single()
tflite_model = tflite.Model.Model.GetRootAsModel(tflite_graph, 0)
mod, conv_params = relay.frontend.from_tflite(
tflite_model,
shape_dict={"input": ifm_shape},
dtype_dict={"input": dtype},
)
mod["main"] = bind_params_by_name(mod["main"], conv_params)
mod = partition_ethosu_by_table(mod, conv2d_pattern_table)
mod["tvmgen_default_ethos_u_main_0"] = dataflow_pattern.rewrite(
legalize.Conv2DRewriter(), mod["tvmgen_default_ethos_u_main_0"]
)
verify(mod["tvmgen_default_ethos_u_main_0"])
def test_tflite_conv2d_with_separate_padding_legalize():
dtype = "int8"
ifm_shape = (1, 55, 34, 3)
kernel_shape = (3, 2)
strides = (1, 1)
dilation = (2, 1)
padding = (0, 0, 1, 1)
def create_tflite_graph_single():
class Model(tf.Module):
@tf.function
def tf_function(self, x):
tf_strides = [1, strides[0], strides[1], 1]
op = tf.pad(
x,
[[0, 0], [padding[0], padding[2]], [padding[1], padding[3]], [0, 0]],
"CONSTANT",
)
weight_shape = [kernel_shape[0], kernel_shape[1], ifm_shape[3], 3]
weight = tf.constant(np.random.uniform(size=weight_shape), dtype=tf.float32)
return tf.nn.conv2d(
op,
weight,
strides=tf_strides,
padding="VALID",
dilations=dilation,
)
model = Model()
concrete_func = model.tf_function.get_concrete_function(
tf.TensorSpec(ifm_shape, dtype=tf.float32)
)
# Convert the model
def representative_dataset():
for _ in range(100):
data = np.random.rand(*tuple(ifm_shape))
yield [data.astype(np.float32)]
converter = tf.lite.TFLiteConverter.from_concrete_functions([concrete_func])
converter.optimizations = [tf.lite.Optimize.DEFAULT]
converter.representative_dataset = representative_dataset
converter.target_spec.supported_ops = [tf.lite.OpsSet.TFLITE_BUILTINS_INT8]
converter.inference_input_type = tf.int8
converter.inference_output_type = tf.int8
tflite_model = converter.convert()
return tflite_model
def verify(ext_func):
op = ext_func.body
ofm_channels = op.attrs.ofm_channels
# check IFM
ifm = op.args[0].checked_type
assert list(ifm.shape) == list(ifm_shape)
assert str(ifm.dtype) == dtype
assert ifm.shape[3] == ofm_channels
# check OFM
ofm = op.checked_type
expected_ofm_shape = infra.compute_ofm_shape(
ifm_shape, padding, kernel_shape, strides, dilation
)
assert list(ofm.shape) == list(expected_ofm_shape)
assert str(ofm.dtype) == dtype
assert ofm.shape[3] == ofm_channels
# check weights
weights_ohwi = op.args[1].data.asnumpy()
assert str(weights_ohwi.dtype) == dtype
assert weights_ohwi.shape[0] == ofm_channels
assert weights_ohwi.shape[1] == kernel_shape[0]
assert weights_ohwi.shape[2] == kernel_shape[1]
assert weights_ohwi.shape[3] == 3
# Check that scale_bias matches weight tensor
assert list(op.args[2].checked_type.shape)[0] == ofm_channels
assert list(op.attrs.padding) == list(padding)
assert list(op.attrs.strides) == list(strides)
assert list(op.attrs.dilation) == list(dilation)
conv2d_pattern_table = [
(
ethosu.QnnConv2DParams.composite_name,
ethosu.qnn_conv2d_pattern(),
lambda pat: ethosu.QnnConv2DParams(pat).is_valid(),
)
]
tflite_graph = create_tflite_graph_single()
tflite_model = tflite.Model.Model.GetRootAsModel(tflite_graph, 0)
mod, conv_params = relay.frontend.from_tflite(
tflite_model,
shape_dict={"input": ifm_shape},
dtype_dict={"input": dtype},
)
mod["main"] = bind_params_by_name(mod["main"], conv_params)
mod = partition_ethosu_by_table(mod, conv2d_pattern_table)
mod["tvmgen_default_ethos_u_main_0"] = dataflow_pattern.rewrite(
legalize.Conv2DRewriter(), mod["tvmgen_default_ethos_u_main_0"]
)
verify(mod["tvmgen_default_ethos_u_main_0"])
def test_tflite_conv2d_with_separate_channel_padding_legalize():
dtype = "int8"
ifm_shape = (1, 55, 34, 3)
kernel_shape = (3, 2)
strides = (1, 1)
dilation = (2, 1)
padding_ch = (1, 1)
class ArePadOnGraph(ExprVisitor):
"""
Visits the Graph recursively and checks if it contains 'nn.pad' op
"""
def __init__(self):
ExprVisitor.__init__(self)
self.on_graph = False
def visit_call(self, call):
if isinstance(call.op, tvm.ir.Op):
if str(call.op.name) == "nn.pad":
self.on_graph = True
return super().visit_call(call)
def are_pad_on_graph(self, subgraph) -> bool:
"""
This function recursively visits the graph and checks if 'nn.pad' op is on graph
"""
self.visit(subgraph)
return self.on_graph
def create_tflite_graph():
class Model(tf.Module):
@tf.function
def tf_function(self, x):
tf_strides = [1, strides[0], strides[1], 1]
op = tf.pad(
x,
[[0, 0], [0, 0], [0, 0], [padding_ch[0], padding_ch[1]]],
"CONSTANT",
)
# HWIO
weight_shape = [
kernel_shape[0],
kernel_shape[1],
ifm_shape[3] + padding_ch[0] + padding_ch[1],
3,
]
weight = tf.constant(np.random.uniform(size=weight_shape), dtype=tf.float32)
return tf.nn.conv2d(
op,
weight,
strides=tf_strides,
padding="VALID",
dilations=dilation,
)
model = Model()
concrete_func = model.tf_function.get_concrete_function(
tf.TensorSpec(ifm_shape, dtype=tf.float32)
)
# Convert the model
def representative_dataset():
for _ in range(100):
data = np.random.rand(*tuple(ifm_shape))
yield [data.astype(np.float32)]
converter = tf.lite.TFLiteConverter.from_concrete_functions([concrete_func])
converter.optimizations = [tf.lite.Optimize.DEFAULT]
converter.representative_dataset = representative_dataset
converter.target_spec.supported_ops = [tf.lite.OpsSet.TFLITE_BUILTINS_INT8]
converter.inference_input_type = tf.int8
converter.inference_output_type = tf.int8
tflite_model = converter.convert()
return tflite_model
def verify(ext_func):
assert ArePadOnGraph().are_pad_on_graph(ext_func.body) == True
conv2d_pattern_table = [
(
ethosu.ChannelPadParams.composite_name,
ethosu.pad_pattern(),
lambda pat: ethosu.ChannelPadParams(pat).is_valid(),
),
(
ethosu.QnnConv2DParams.composite_name,
ethosu.qnn_conv2d_pattern(),
lambda pat: ethosu.QnnConv2DParams(pat).is_valid(),
),
]
tflite_graph = create_tflite_graph()
tflite_model = tflite.Model.Model.GetRootAsModel(tflite_graph, 0)
mod, conv_params = relay.frontend.from_tflite(
tflite_model,
shape_dict={"input": ifm_shape},
dtype_dict={"input": dtype},
)
mod["main"] = bind_params_by_name(mod["main"], conv_params)
mod = partition_ethosu_by_table(mod, conv2d_pattern_table)
mod["tvmgen_default_ethos_u_main_0"] = dataflow_pattern.rewrite(
legalize.Conv2DRewriter(), mod["tvmgen_default_ethos_u_main_0"]
)
verify(mod["tvmgen_default_ethos_u_main_0"])
@pytest.mark.parametrize("ifm_shape", [(1, 299, 299, 3), (1, 123, 17, 7)])
@pytest.mark.parametrize("kernel_shape", [(7, 3), (22, 5)])
@pytest.mark.parametrize("padding", ["SAME", "VALID"])
@pytest.mark.parametrize("strides, dilation", [((1, 1), (2, 1)), ((3, 2), (1, 1))])
@pytest.mark.parametrize("activation", ["RELU", None])
def test_tflite_depthwise_conv_2d_legalize(
ifm_shape, kernel_shape, padding, strides, dilation, activation
):
dtype = "int8"
def create_tflite_graph():
class Model(tf.Module):
@tf.function
def depthwise_conv2d(self, x):
weight_shape = [kernel_shape[0], kernel_shape[1], ifm_shape[3], 1]
weight = tf.constant(np.random.uniform(size=weight_shape), dtype=tf.float32)
# The input strides to the TensorFlow API needs to be of shape 1x4
tf_strides = [1, strides[0], strides[1], 1]
op = tf.nn.depthwise_conv2d(
x, weight, strides=tf_strides, padding=padding, dilations=dilation
)
if activation:
op = tf.nn.relu(op)
return op
model = Model()
concrete_func = model.depthwise_conv2d.get_concrete_function(
tf.TensorSpec(ifm_shape, dtype=tf.float32)
)
# Convert the model
def representative_dataset():
for _ in range(100):
data = np.random.rand(*tuple(ifm_shape))
yield [data.astype(np.float32)]
converter = tf.lite.TFLiteConverter.from_concrete_functions([concrete_func])
converter.optimizations = [tf.lite.Optimize.DEFAULT]
converter.representative_dataset = representative_dataset
converter.target_spec.supported_ops = [tf.lite.OpsSet.TFLITE_BUILTINS_INT8]
converter.inference_input_type = tf.int8
converter.inference_output_type = tf.int8
tflite_model = converter.convert()
return tflite_model
def verify(ext_func):
op = ext_func.body
ofm_channels = op.attrs.ofm_channels
# check IFM
ifm = op.args[0].checked_type
assert list(ifm.shape) == list(ifm_shape)
assert str(ifm.dtype) == dtype
assert ifm.shape[3] == ofm_channels
# check OFM
ofm = op.checked_type
expected_ofm_shape = infra.compute_ofm_shape(
ifm_shape, padding, kernel_shape, strides, dilation
)
assert list(ofm.shape) == list(expected_ofm_shape)
assert str(ofm.dtype) == dtype
assert ofm.shape[3] == ofm_channels
# check weights
weights_ohwi = op.args[1].data.asnumpy()
assert str(weights_ohwi.dtype) == dtype
assert weights_ohwi.shape[0] == ofm_channels
assert weights_ohwi.shape[1] == kernel_shape[0]
assert weights_ohwi.shape[2] == kernel_shape[1]
assert weights_ohwi.shape[3] == 1 # only depth multiplier 1 is supported
# Check that scale_bias matches weight tensor
assert list(op.args[2].checked_type.shape)[0] == ofm_channels
expected_padding = infra.compute_padding_shape(
ifm_shape, expected_ofm_shape, padding, kernel_shape, strides, dilation
)
assert list(op.attrs.padding) == list(expected_padding)
assert op.attrs.ofm_channels == ofm_channels
assert list(op.attrs.strides) == list(strides)
assert list(op.attrs.dilation) == list(dilation)
if activation == "RELU":
assert str(op.attrs.activation) == "CLIP"
depthwise_pattern_table = [
(
ethosu.QnnDepthwiseConv2DParams.composite_name,
ethosu.qnn_depthwise_conv2d_pattern(),
lambda pat: ethosu.QnnDepthwiseConv2DParams(pat).is_valid(),
)
]
tflite_graph = create_tflite_graph()
tflite_model = tflite.Model.Model.GetRootAsModel(tflite_graph, 0)
mod, params = relay.frontend.from_tflite(
tflite_model,
shape_dict={"input": ifm_shape},
dtype_dict={"input": dtype},
)
mod["main"] = bind_params_by_name(mod["main"], params)
mod = partition_ethosu_by_table(mod, depthwise_pattern_table)
mod["tvmgen_default_ethos_u_main_0"] = dataflow_pattern.rewrite(
legalize.DepthwiseConv2DRewriter(), mod["tvmgen_default_ethos_u_main_0"]
)
verify(mod["tvmgen_default_ethos_u_main_0"])
def test_tflite_depthwise_conv2d_with_separate_padding_legalize():
dtype = "int8"
ifm_shape = (1, 23, 32, 7)
kernel_shape = (1, 2)
strides = (3, 2)
dilation = (1, 1)
padding = (0, 0, 1, 1)
def create_tflite_graph():
class Model(tf.Module):
@tf.function
def tf_function(self, x):
tf_strides = [1, strides[0], strides[1], 1]
op = tf.pad(
x,
[[0, 0], [padding[0], padding[2]], [padding[1], padding[3]], [0, 0]],
"CONSTANT",
)
weight_shape = [kernel_shape[0], kernel_shape[1], ifm_shape[3], 1]
weight = tf.constant(np.random.uniform(size=weight_shape), dtype=tf.float32)
return tf.nn.depthwise_conv2d(
op,
weight,
strides=tf_strides,
padding="VALID",
dilations=dilation,
)
model = Model()
concrete_func = model.tf_function.get_concrete_function(
tf.TensorSpec(ifm_shape, dtype=tf.float32)
)
# Convert the model
def representative_dataset():
for _ in range(100):
data = np.random.rand(*tuple(ifm_shape))
yield [data.astype(np.float32)]
converter = tf.lite.TFLiteConverter.from_concrete_functions([concrete_func])
converter.optimizations = [tf.lite.Optimize.DEFAULT]
converter.representative_dataset = representative_dataset
converter.target_spec.supported_ops = [tf.lite.OpsSet.TFLITE_BUILTINS_INT8]
converter.inference_input_type = tf.int8
converter.inference_output_type = tf.int8
tflite_model = converter.convert()
return tflite_model
def verify(ext_func):
op = ext_func.body
ofm_channels = op.attrs.ofm_channels
# check IFM
ifm = op.args[0].checked_type
assert list(ifm.shape) == list(ifm_shape)
assert str(ifm.dtype) == dtype
assert ifm.shape[3] == ofm_channels
# check OFM
ofm = op.checked_type
expected_ofm_shape = infra.compute_ofm_shape(
ifm_shape, padding, kernel_shape, strides, dilation
)
assert list(ofm.shape) == list(expected_ofm_shape)
assert str(ofm.dtype) == dtype
assert ofm.shape[3] == ofm_channels
# check weights
weights_ohwi = op.args[1].data.asnumpy()
assert str(weights_ohwi.dtype) == dtype
assert weights_ohwi.shape[0] == ofm_channels
assert weights_ohwi.shape[1] == kernel_shape[0]
assert weights_ohwi.shape[2] == kernel_shape[1]
assert weights_ohwi.shape[3] == 1 # only depth multiplier 1 is supported
# Check that scale_bias matches weight tensor
assert list(op.args[2].checked_type.shape)[0] == ofm_channels
assert list(op.attrs.padding) == list(padding)
assert op.attrs.ofm_channels == ofm_channels
assert list(op.attrs.strides) == list(strides)
assert list(op.attrs.dilation) == list(dilation)
depthwise_pattern_table = [
(
ethosu.QnnDepthwiseConv2DParams.composite_name,
ethosu.qnn_depthwise_conv2d_pattern(),
lambda pat: ethosu.QnnDepthwiseConv2DParams(pat).is_valid(),
)
]
tflite_graph = create_tflite_graph()
tflite_model = tflite.Model.Model.GetRootAsModel(tflite_graph, 0)
mod, params = relay.frontend.from_tflite(
tflite_model,
shape_dict={"input": ifm_shape},
dtype_dict={"input": dtype},
)
mod["main"] = bind_params_by_name(mod["main"], params)
mod = partition_ethosu_by_table(mod, depthwise_pattern_table)
mod["tvmgen_default_ethos_u_main_0"] = dataflow_pattern.rewrite(
legalize.DepthwiseConv2DRewriter(), mod["tvmgen_default_ethos_u_main_0"]
)
verify(mod["tvmgen_default_ethos_u_main_0"])
@pytest.mark.parametrize("ifm_shape", [(1, 55, 55, 3), (1, 23, 32, 7)])
@pytest.mark.parametrize("padding", [(0, 1, 0, 0), (1, 1, 1, 1), (1, 1, 5, 5)])
@pytest.mark.parametrize("const_value", [0, 5, 125, -5])
def test_tflite_separate_padding_legalize(ifm_shape, padding, const_value):
dtype = "int8"
kernel_shape = (1, 1)
strides = (1, 1)
dilation = (1, 1)
def create_tflite_graph():
class Model(tf.Module):
@tf.function
def tf_function(self, x):
return tf.pad(
x,
[[0, 0], [padding[0], padding[2]], [padding[1], padding[3]], [0, 0]],
"CONSTANT",
const_value,
)
model = Model()
concrete_func = model.tf_function.get_concrete_function(
tf.TensorSpec(ifm_shape, dtype=tf.float32)
)
# Convert the model
def representative_dataset():
for _ in range(100):
data = np.random.rand(*tuple(ifm_shape))
yield [data.astype(np.float32)]
converter = tf.lite.TFLiteConverter.from_concrete_functions([concrete_func])
converter.optimizations = [tf.lite.Optimize.DEFAULT]
converter.representative_dataset = representative_dataset
converter.target_spec.supported_ops = [tf.lite.OpsSet.TFLITE_BUILTINS_INT8]
converter.inference_input_type = tf.int8
converter.inference_output_type = tf.int8
tflite_model = converter.convert()
return tflite_model
def verify(ext_func):
op = ext_func.body
ofm_channels = op.attrs.ofm_channels
# check IFM
ifm = op.args[0].checked_type
assert list(ifm.shape) == list(ifm_shape)
assert str(ifm.dtype) == dtype
assert ifm.shape[3] == ofm_channels
# check OFM
ofm = op.checked_type
expected_ofm_shape = infra.compute_ofm_shape(
ifm_shape, padding, kernel_shape, strides, dilation
)
assert list(ofm.shape) == list(expected_ofm_shape)
assert str(ofm.dtype) == dtype
assert ofm.shape[3] == ofm_channels
# check weights
weights_ohwi = op.args[1].data.asnumpy()
assert str(weights_ohwi.dtype) == dtype
assert weights_ohwi.shape[0] == ofm_channels
assert weights_ohwi.shape[1] == kernel_shape[0]
assert weights_ohwi.shape[2] == kernel_shape[1]
assert weights_ohwi.shape[3] == 1 # only depth multiplier 1 is supported
# Check that scale_bias matches weight tensor
assert list(op.args[2].checked_type.shape)[0] == ofm_channels
assert list(op.attrs.padding) == list(padding)
assert op.attrs.ofm_channels == ofm_channels
assert list(op.attrs.strides) == list(strides)
assert list(op.attrs.dilation) == list(dilation)
pad_pattern_table = [
(
ethosu.PadParams.composite_name,
ethosu.pad_pattern(),
lambda pat: ethosu.PadParams(pat).is_valid(),
),
]
tflite_graph = create_tflite_graph()
tflite_model = tflite.Model.Model.GetRootAsModel(tflite_graph, 0)
mod, params = relay.frontend.from_tflite(
tflite_model,
shape_dict={"input": ifm_shape},
dtype_dict={"input": dtype},
)
mod["main"] = bind_params_by_name(mod["main"], params)
mod = partition_ethosu_by_table(mod, pad_pattern_table)
mod["tvmgen_default_ethos_u_main_0"] = dataflow_pattern.rewrite(
legalize.PadRewriter(), mod["tvmgen_default_ethos_u_main_0"]
)
verify(mod["tvmgen_default_ethos_u_main_0"])
@pytest.mark.parametrize("ifm_shape", [(1, 55, 55, 3), (1, 23, 32, 7)])
@pytest.mark.parametrize("channel_padding", [(0, 1), (1, 1), (5, 2)])
@pytest.mark.parametrize("const_value", [0, 5, 125, -5])
def test_tflite_separate_channel_padding_legalize(ifm_shape, channel_padding, const_value):
dtype = "int8"
padding = (0, 0, 0, 0)
class AreConcatenateOnGraph(ExprVisitor):
"""
Visits the Graph recursively and checks if it contains 'concatenate' op
"""
def __init__(self):
ExprVisitor.__init__(self)
self.on_graph = False
def visit_call(self, call):
if isinstance(call.op, tvm.ir.Op):
if str(call.op.name) == "concatenate":
self.on_graph = True
return super().visit_call(call)
def are_concatenate_on_graph(self, subgraph) -> bool:
"""
This function recursively visits the graph and checks if 'concatenate' op is on graph
"""
self.visit(subgraph)
return self.on_graph
def create_tflite_graph():
class Model(tf.Module):
@tf.function
def tf_function(self, x):
return tf.pad(
x,
[
[0, 0],
[padding[0], padding[2]],
[padding[1], padding[3]],
[channel_padding[0], channel_padding[1]],
],
"CONSTANT",
const_value,
)
model = Model()
concrete_func = model.tf_function.get_concrete_function(
tf.TensorSpec(ifm_shape, dtype=tf.float32)
)
# Convert the model
def representative_dataset():
for _ in range(100):
data = np.random.rand(*tuple(ifm_shape))
yield [data.astype(np.float32)]
converter = tf.lite.TFLiteConverter.from_concrete_functions([concrete_func])
converter.optimizations = [tf.lite.Optimize.DEFAULT]
converter.representative_dataset = representative_dataset
converter.target_spec.supported_ops = [tf.lite.OpsSet.TFLITE_BUILTINS_INT8]
converter.inference_input_type = tf.int8
converter.inference_output_type = tf.int8
tflite_model = converter.convert()
return tflite_model
def verify(ext_func, channel_padding):
op = ext_func.body
pad_before = 0
pad_after = 0
if channel_padding[0] == 0 and channel_padding[1] > 0:
pad_after = ext_func.body.args[0][1].args[0].checked_type.shape[3]
ifm = ext_func.body.args[0][0].args[0].checked_type
if channel_padding[0] > 0 and channel_padding[1] == 0:
pad_before = ext_func.body.args[0][0].args[0].checked_type.shape[3]
ifm = ext_func.body.args[0][1].args[0].checked_type
if channel_padding[0] > 0 and channel_padding[1] > 0:
pad_before = ext_func.body.args[0][0].args[0].checked_type.shape[3]
ifm = ext_func.body.args[0][1].args[0].checked_type
pad_after = ext_func.body.args[0][2].args[0].checked_type.shape[3]
# check IFM
assert list(ifm.shape) == list(ifm_shape)
assert str(ifm.dtype) == dtype
assert ifm.shape[3] == ifm_shape[3]
# check OFM
ofm = op.checked_type
expected_ofm_shape = list(ifm_shape)
expected_ofm_shape[3] = channel_padding[0] + ifm_shape[3] + channel_padding[1]
assert list(ofm.shape) == expected_ofm_shape
assert str(ofm.dtype) == dtype
# check padding
assert [pad_before, pad_after] == list(channel_padding)
# check if relay contains 'concatenate' op
assert AreConcatenateOnGraph().are_concatenate_on_graph(ext_func.body) == True
pad_pattern_table = [
(
ethosu.ChannelPadParams.composite_name,
ethosu.pad_pattern(),
lambda pat: ethosu.ChannelPadParams(pat).is_valid(),
),
]
tflite_graph = create_tflite_graph()
tflite_model = tflite.Model.Model.GetRootAsModel(tflite_graph, 0)
mod, params = relay.frontend.from_tflite(
tflite_model,
shape_dict={"input": ifm_shape},
dtype_dict={"input": dtype},
)
mod["main"] = bind_params_by_name(mod["main"], params)
mod = partition_ethosu_by_table(mod, pad_pattern_table)
mod["tvmgen_default_ethos_u_main_0"] = dataflow_pattern.rewrite(
legalize.ChannelPadRewriter(), mod["tvmgen_default_ethos_u_main_0"]
)
verify(mod["tvmgen_default_ethos_u_main_0"], channel_padding)
@pytest.mark.parametrize("pooling_type", ["MAX", "AVG"])
@pytest.mark.parametrize("ifm_shape", [[1, 3, 4, 3], [1, 4, 5, 2]])
@pytest.mark.parametrize(
"pool_shape, strides, activation_function, padding",
[([1, 2], [1, 2], "NONE", "SAME"), ([2, 3], [2, 3], "RELU", "VALID")],
)
def test_tflite_pool2d_legalize(
ifm_shape, pooling_type, strides, pool_shape, activation_function, padding
):
dtype = "int8"
def create_tflite_graph():
class Model(tf.Module):
@tf.function
def tf_function(self, x):
if pooling_type == "MAX":
op = tf.nn.max_pool(x, pool_shape, strides, padding)
elif pooling_type == "AVG":
op = tf.nn.avg_pool(x, pool_shape, strides, padding)
if activation_function == "RELU":
op = tf.nn.relu(op)
return op
model = Model()
concrete_func = model.tf_function.get_concrete_function(
tf.TensorSpec(ifm_shape, dtype=tf.float32)
)
# Convert the model
def representative_dataset():
for _ in range(100):
data = np.random.rand(*tuple(ifm_shape))
yield [data.astype(np.float32)]
converter = tf.lite.TFLiteConverter.from_concrete_functions([concrete_func])
converter.optimizations = [tf.lite.Optimize.DEFAULT]
converter.representative_dataset = representative_dataset
converter.target_spec.supported_ops = [tf.lite.OpsSet.TFLITE_BUILTINS_INT8]
converter.inference_input_type = tf.int8
converter.inference_output_type = tf.int8
tflite_model = converter.convert()
return tflite_model
def verify(ext_func):
ofm_shape = infra.compute_ofm_shape(ifm_shape, padding, pool_shape, strides)
op = ext_func.body
assert list(op.args[0].checked_type.shape) == ifm_shape
assert op.args[0].checked_type.dtype == dtype
assert list(op.checked_type.shape) == ofm_shape
assert op.checked_type.dtype == dtype
assert op.attrs.pooling_type == pooling_type
assert list(op.attrs.strides) == strides
assert list(op.attrs.padding) == infra.compute_padding_shape(
ifm_shape, ofm_shape, padding, pool_shape, strides
)
assert list(op.attrs.pool_shape) == pool_shape
assert op.attrs.ofm_channels == ifm_shape[3]
if activation_function == "RELU":
assert str(op.attrs.activation) == "CLIP"
if pooling_type == "MAX":
rewriter = legalize.MaxPoolingRewriter()
pattern_table = [
(
ethosu.MaxPool2DParams.composite_name,
ethosu.qnn_maxpool2d_pattern(),
lambda pat: ethosu.MaxPool2DParams(pat).is_valid(),
),
]
elif pooling_type == "AVG":
rewriter = legalize.AvgPoolingRewriter()
pattern_table = [
(
ethosu.AvgPool2DParams.composite_name,
ethosu.qnn_avgpool2d_pattern(),
lambda pat: ethosu.AvgPool2DParams(pat).is_valid(),
),
]
tflite_graph = create_tflite_graph()
tflite_model = tflite.Model.Model.GetRootAsModel(tflite_graph, 0)
mod, _ = relay.frontend.from_tflite(
tflite_model,
shape_dict={"x": ifm_shape},
dtype_dict={"x": dtype},
)
mod = partition_ethosu_by_table(mod, pattern_table)
mod["tvmgen_default_ethos_u_main_0"] = dataflow_pattern.rewrite(
rewriter, mod["tvmgen_default_ethos_u_main_0"]
)
verify(mod["tvmgen_default_ethos_u_main_0"])
@pytest.mark.parametrize("pooling_type", ["MAX", "AVG"])
@pytest.mark.parametrize(
"ifm_shape, pool_shape, strides, activation_function, padding",
[
([1, 4, 4, 3], [4, 4], [4, 4], "NONE", "SAME"),
([1, 4, 4, 3], [4, 4], [4, 4], "RELU", "VALID"),
([1, 25, 5, 64], [25, 5], [25, 5], "NONE", "VALID"),
([1, 25, 5, 64], [25, 5], [25, 5], "RELU", "SAME"),
],
)
def test_tflite_pool2d_same_ifm_and_kernel_shape_legalize(
pooling_type, ifm_shape, pool_shape, strides, activation_function, padding
):
dtype = "int8"
strides_legalized = [1, 1]
def create_tflite_graph():
class Model(tf.Module):
@tf.function
def tf_function(self, x):
if pooling_type == "MAX":
op = tf.nn.max_pool(x, pool_shape, strides, padding)
elif pooling_type == "AVG":
op = tf.nn.avg_pool(x, pool_shape, strides, padding)
if activation_function == "RELU":
op = tf.nn.relu(op)
return op
model = Model()
concrete_func = model.tf_function.get_concrete_function(
tf.TensorSpec(ifm_shape, dtype=tf.float32)
)
# Convert the model
def representative_dataset():
for _ in range(100):
data = np.random.rand(*tuple(ifm_shape))
yield [data.astype(np.float32)]
converter = tf.lite.TFLiteConverter.from_concrete_functions([concrete_func])
converter.optimizations = [tf.lite.Optimize.DEFAULT]
converter.representative_dataset = representative_dataset
converter.target_spec.supported_ops = [tf.lite.OpsSet.TFLITE_BUILTINS_INT8]
converter.inference_input_type = tf.int8
converter.inference_output_type = tf.int8
tflite_model = converter.convert()
return tflite_model
def expected_mod():
expected_ir_string = ""
if activation_function == "NONE" and pooling_type == "AVG":
expected_ir_string = f"""
#[version = "0.0.5"]
def @main(%x: Tensor[{str(tuple(ifm_shape))}, {dtype}], output_tensor_names=\
["Identity"]) -> Tensor[(1, 1, 1, {str(ifm_shape[3])}), {dtype}] {{
@tvmgen_default_ethos_u_main_0(%x)
}}
def @tvmgen_default_ethos_u_main_0(%y: Tensor[{str(tuple(ifm_shape))}, {dtype}], \
Compiler="ethos-u", Primitive=1, Inline=1, \
global_symbol="tvmgen_default_ethos_u_main_0") -> Tensor[(1, 1, 1, \
{str(ifm_shape[3])}), {dtype}] {{
%2 = fn (%z: Tensor[{str(tuple(ifm_shape))}, {dtype}], \
PartitionedFromPattern="cast_nn.avg_pool2d_cast_", \
Composite="ethos-u.avgpool2d") -> Tensor[(1, 1, 1, {str(ifm_shape[3])}), \
{dtype}] {{
%0 = cast(%z, dtype="int32") ;
%1 = nn.avg_pool2d(%0, pool_size={str(pool_shape)}, strides={str(strides)}, \
padding=[0, 0, 0, 0], layout="NHWC") ;
cast(%1, dtype="{dtype}")
}} ;
%2(%y)
}}
"""
if activation_function == "RELU" and pooling_type == "AVG":
expected_ir_string = f"""
#[version = "0.0.5"]
def @main(%x: Tensor[{str(tuple(ifm_shape))}, {dtype}], output_tensor_names=\
["Identity"]) -> Tensor[(1, 1, 1, {str(ifm_shape[3])}), {dtype}] {{
@tvmgen_default_ethos_u_main_0(%x)
}}
def @tvmgen_default_ethos_u_main_0(%y: Tensor[{str(tuple(ifm_shape))}, {dtype}], \
Compiler="ethos-u", Primitive=1, Inline=1, \
global_symbol="tvmgen_default_ethos_u_main_0") -> Tensor[(1, 1, 1, \
{str(ifm_shape[3])}), {dtype}] {{
%3 = fn (%z: Tensor[{str(tuple(ifm_shape))}, {dtype}], \
PartitionedFromPattern="cast_nn.avg_pool2d_cast_clip_", \
Composite="ethos-u.avgpool2d") -> Tensor[(1, 1, 1, {str(ifm_shape[3])}), \
{dtype}] {{
%0 = cast(%z, dtype="int32") ;
%1 = nn.avg_pool2d(%0, pool_size={str(pool_shape)}, strides={str(strides)}, \
padding=[0, 0, 0, 0], layout="NHWC") ;
%2 = cast(%1, dtype="{dtype}") ;
clip(%2, a_min=-128f, a_max=127f)
}} ;
%3(%y)
}}
"""
if activation_function == "NONE" and pooling_type == "MAX":
expected_ir_string = f"""
#[version = "0.0.5"]
def @main(%x: Tensor[{str(tuple(ifm_shape))}, {dtype}], output_tensor_names=\
["Identity"]) -> Tensor[(1, 1, 1, {str(ifm_shape[3])}), {dtype}] {{
@tvmgen_default_ethos_u_main_0(%x)
}}
def @tvmgen_default_ethos_u_main_0(%y: Tensor[{str(tuple(ifm_shape))}, {dtype}], \
Compiler="ethos-u", Primitive=1, Inline=1, \
global_symbol="tvmgen_default_ethos_u_main_0") -> Tensor[(1, 1, 1, \
{str(ifm_shape[3])}), {dtype}] {{
%0 = fn (%z: Tensor[{str(tuple(ifm_shape))}, {dtype}], \
PartitionedFromPattern="nn.max_pool2d_", \
Composite="ethos-u.maxpool2d") -> Tensor[(1, 1, 1, {str(ifm_shape[3])}), \
{dtype}] {{
nn.max_pool2d(%z, pool_size={str(pool_shape)}, strides={str(strides)}, \
padding=[0, 0, 0, 0], layout="NHWC")
}} ;
%0(%y)
}}
"""
if activation_function == "RELU" and pooling_type == "MAX":
expected_ir_string = f"""
#[version = "0.0.5"]
def @main(%x: Tensor[{str(tuple(ifm_shape))}, {dtype}] , output_tensor_names=\
["Identity"]) -> Tensor[(1, 1, 1, {str(ifm_shape[3])}), {dtype}] {{
@tvmgen_default_ethos_u_main_0(%x)
}}
def @tvmgen_default_ethos_u_main_0(%y: Tensor[{str(tuple(ifm_shape))}, {dtype}] , \
Compiler="ethos-u", Primitive=1, Inline=1, \
global_symbol="tvmgen_default_ethos_u_main_0") -> Tensor[(1, 1, 1, \
{str(ifm_shape[3])}), {dtype}] {{
%1 = fn (%z: Tensor[{str(tuple(ifm_shape))}, {dtype}] , \
PartitionedFromPattern="nn.max_pool2d_clip_", \
Composite="ethos-u.maxpool2d") -> Tensor[(1, 1, 1, {str(ifm_shape[3])}), \
{dtype}] {{
%0 = nn.max_pool2d(%z, pool_size={str(pool_shape)}, strides={str(strides)}, \
padding=[0, 0, 0, 0], layout="NHWC");
clip(%0, a_min=-128f, a_max=127f)
}};
%1(%y)
}}
"""
return tvm.relay.fromtext(expected_ir_string)
def verify(ext_func):
ofm_shape = infra.compute_ofm_shape(ifm_shape, padding, pool_shape, strides)
op = ext_func.body
assert list(op.args[0].checked_type.shape) == ifm_shape
assert op.args[0].checked_type.dtype == dtype
assert list(op.checked_type.shape) == ofm_shape
assert op.checked_type.dtype == dtype
assert op.attrs.pooling_type == pooling_type
assert list(op.attrs.strides) == strides_legalized
assert list(op.attrs.padding) == infra.compute_padding_shape(
ifm_shape, ofm_shape, padding, pool_shape, strides
)
assert list(op.attrs.padding) == infra.compute_padding_shape(
ifm_shape, ofm_shape, padding, pool_shape, strides_legalized
)
assert list(op.attrs.pool_shape) == pool_shape
assert op.attrs.ofm_channels == ifm_shape[3]
if activation_function == "RELU":
assert str(op.attrs.activation) == "CLIP"
if pooling_type == "MAX":
rewriter = legalize.MaxPoolingRewriter()
pattern_table = [
(
ethosu.MaxPool2DParams.composite_name,
ethosu.qnn_maxpool2d_pattern(),
lambda pat: ethosu.MaxPool2DParams(pat).is_valid(),
),
]
if pooling_type == "AVG":
rewriter = legalize.AvgPoolingRewriter()
pattern_table = [
(
ethosu.AvgPool2DParams.composite_name,
ethosu.qnn_avgpool2d_pattern(),
lambda pat: ethosu.AvgPool2DParams(pat).is_valid(),
),
]
tflite_graph = create_tflite_graph()
tflite_model = tflite.Model.Model.GetRootAsModel(tflite_graph, 0)
mod, _ = relay.frontend.from_tflite(
tflite_model,
shape_dict={"x": ifm_shape},
dtype_dict={"x": dtype},
)
mod = partition_ethosu_by_table(mod, pattern_table)
expected = expected_mod()
tvm.ir.assert_structural_equal(mod, expected)
mod["tvmgen_default_ethos_u_main_0"] = dataflow_pattern.rewrite(
rewriter, mod["tvmgen_default_ethos_u_main_0"]
)
verify(mod["tvmgen_default_ethos_u_main_0"])
@pytest.mark.parametrize("operator_type", ["ADD", "SUB", "MUL", "MIN", "MAX"])
@pytest.mark.parametrize(
"ifm_shape, ifm2_shape, reversed_operands",
[
([1, 2, 3, 4], [1, 2, 3, 4], False),
([1, 2, 3, 4], [1, 1, 3, 1], False),
([1, 1, 3, 1], [1, 2, 3, 4], True),
([1, 4, 4], [4, 1], False),
([4], [4], False),
([4], [1, 2, 3, 4], True),
([1, 4, 4], [4, 1], False),
],
)
@pytest.mark.parametrize("activation_function", [None, tf.nn.relu])
def test_tflite_binary_elemwise_legalize(
operator_type,
ifm_shape,
ifm2_shape,
reversed_operands,
activation_function,
):
np.random.seed(0)
dtype = "int8"
def create_tflite_graph():
class Model(tf.Module):
@tf.function
def tf_function(self, x, y):
if operator_type == "ADD":
op = tf.math.add(x, y)
elif operator_type == "SUB":
op = tf.math.subtract(x, y)
elif operator_type == "MUL":
op = tf.math.multiply(x, y)
elif operator_type == "MIN":
op = tf.math.minimum(x, y)
elif operator_type == "MAX":
op = tf.math.maximum(x, y)
if activation_function:
op = activation_function(op)
return op
model = Model()
concrete_func = model.tf_function.get_concrete_function(
tf.TensorSpec(ifm_shape, dtype=tf.float32), tf.TensorSpec(ifm2_shape, dtype=tf.float32)
)
# Convert the model
def representative_dataset():
for _ in range(100):
data = np.random.rand(*tuple(ifm_shape))
data2 = np.random.rand(*tuple(ifm2_shape)) * 2
yield [data.astype(np.float32), data2.astype(np.float32)]
converter = tf.lite.TFLiteConverter.from_concrete_functions([concrete_func])
converter.optimizations = [tf.lite.Optimize.DEFAULT]
converter.representative_dataset = representative_dataset
converter.target_spec.supported_ops = [tf.lite.OpsSet.TFLITE_BUILTINS_INT8]
converter.inference_input_type = tf.int8
converter.inference_output_type = tf.int8
tflite_model = converter.convert()
return tflite_model
def verify(ext_func):
out_shape = ifm2_shape if reversed_operands else ifm_shape
shapes = [ifm_shape, ifm2_shape]
ifm_index, ifm2_index = (1, 0) if reversed_operands else (0, 1)
op = ext_func.body
has_reshaped_output = False
has_separate_requantize = False
shapes_padded = [[1] * (4 - len(s)) + s for s in shapes]
out_padded = [1] * (4 - len(out_shape)) + out_shape
if op.op.name == "contrib.ethosu.identity":
op = op.args[0]
has_separate_requantize = True
if op.op.name == "reshape":
has_reshaped_output = True
op = op.args[0]
assert list(op.args[0].checked_type.shape) == shapes_padded[ifm_index]
assert list(op.args[1].checked_type.shape) == shapes_padded[ifm2_index]
assert op.args[0].checked_type.dtype == dtype
assert list(op.checked_type.shape) == out_padded
assert op.checked_type.dtype == dtype
assert op.attrs.operator_type == operator_type
assert op.attrs.reversed_operands == reversed_operands
if activation_function != None:
assert str(op.attrs.activation) == "CLIP"
if operator_type in ["MIN", "MAX"]:
if has_separate_requantize:
# In case when requantize cannot be fused with MIN/MAX + CLIP due to hardware constraints
# there should be default quantization values since requantize is separate operation.
assert float(op.attrs.ifm_scale) == 1.0
assert int(op.attrs.ifm_zero_point) == 0
assert float(op.attrs.ifm2_scale) == 1.0
assert int(op.attrs.ifm2_zero_point) == 0
assert float(op.attrs.ofm_scale) == 1.0
assert int(op.attrs.ofm_zero_point) == 0
else:
# MIN and MAX with an activation must have a requantize operation
# baked into the output. To check the extra requantize node was
# picked up by the pattern, we can make sure the quantization
# information is not default.
assert float(op.attrs.ifm_scale) != 1.0
assert int(op.attrs.ifm_zero_point) != 0
assert float(op.attrs.ifm2_scale) != 1.0
assert int(op.attrs.ifm2_zero_point) != 0
assert float(op.attrs.ofm_scale) != 1.0
assert int(op.attrs.ofm_zero_point) != 0
if has_reshaped_output:
assert list(ext_func.body.checked_type.shape) == out_shape
if operator_type == "ADD":
rewriter = legalize.AddRewriter()
pattern_table = [
(
ethosu.AddParams.composite_name,
ethosu.qnn_add_pattern(),
lambda pat: ethosu.AddParams(pat).is_valid(),
),
]
elif operator_type == "SUB":
rewriter = legalize.SubRewriter()
pattern_table = [
(
ethosu.SubParams.composite_name,
ethosu.qnn_subtract_pattern(),
lambda pat: ethosu.SubParams(pat).is_valid(),
),
]
elif operator_type == "MUL":
rewriter = legalize.MulRewriter()
pattern_table = [
(
ethosu.MulParams.composite_name,
ethosu.qnn_mul_pattern(),
lambda pat: ethosu.MulParams(pat).is_valid(),
),
]
elif operator_type == "MIN":
rewriter = [legalize.MinRewriter(), legalize.RequantizeRewriter()]
pattern_table = [
(
ethosu.MinParams.composite_name,
ethosu.minimum_clip_requantize_pattern(),
lambda pat: ethosu.MinParams(pat).is_valid(),
),
(
ethosu.MinParams.composite_name,
ethosu.minimum_pattern(),
lambda pat: ethosu.MinParams(pat).is_valid(),
),
(
ethosu.RequantizeParams.composite_name,
ethosu.requantize_pattern(),
lambda pat: ethosu.RequantizeParams(pat).is_valid(),
),
]
elif operator_type == "MAX":
rewriter = [legalize.MaxRewriter(), legalize.RequantizeRewriter()]
pattern_table = [
(
ethosu.MaxParams.composite_name,
ethosu.maximum_clip_requantize_pattern(),
lambda pat: ethosu.MaxParams(pat).is_valid(),
),
(
ethosu.MaxParams.composite_name,
ethosu.maximum_pattern(),
lambda pat: ethosu.MaxParams(pat).is_valid(),
),
(
ethosu.RequantizeParams.composite_name,
ethosu.requantize_pattern(),
lambda pat: ethosu.RequantizeParams(pat).is_valid(),
),
]
tflite_graph = create_tflite_graph()
tflite_model = tflite.Model.Model.GetRootAsModel(tflite_graph, 0)
mod, _ = relay.frontend.from_tflite(
tflite_model,
shape_dict={"x": ifm_shape, "y": ifm2_shape},
dtype_dict={"x": dtype, "y": dtype},
)
mod = partition_ethosu_by_table(mod, pattern_table)
mod["tvmgen_default_ethos_u_main_0"] = dataflow_pattern.rewrite(
rewriter, mod["tvmgen_default_ethos_u_main_0"]
)
verify(mod["tvmgen_default_ethos_u_main_0"])
# This test is for checking the case when requantize cannot be fused with MIN/MAX + CLIP due to hardware constraints.
def test_tflite_max_relu_n1_to_1_legalize():
ifm_shape = [1, 4, 8, 16]
test_tflite_binary_elemwise_legalize("MAX", ifm_shape, ifm_shape, False, relu_n1_to_1)
def test_binary_add_from_constant_scalar():
dtype = "uint8"
ifm_shape = (1, 4, 4, 8)
def create_graph():
inp = relay.var("input", shape=ifm_shape, dtype=dtype)
scalar = relay.const(np.ones((1, 1, 1, 1), dtype=dtype), dtype=dtype)
add = relay.qnn.op.add(
inp,
scalar,
relay.const(1.0, dtype="float32"),
relay.const(0, dtype="int32"),
relay.const(1.0, dtype="float32"),
relay.const(0, dtype="int32"),
relay.const(1.0, dtype="float32"),
relay.const(0, dtype="int32"),
)
func = relay.Function(relay.analysis.free_vars(add), add)
return tvm.IRModule.from_expr(func)
def verify(ext_func):
op = ext_func.body
assert list(op.args[0].checked_type.shape) == [1, 4, 4, 8]
assert list(op.args[1].checked_type.shape) == [1, 1, 1, 1]
assert op.args[0].checked_type.dtype == "uint8"
assert list(op.checked_type.shape) == [1, 4, 4, 8]
assert op.checked_type.dtype == "uint8"
assert op.attrs.operator_type == "ADD"
rewriter = legalize.AddRewriter()
pattern_table = [
(
ethosu.AddParams.composite_name,
ethosu.qnn_add_pattern(),
lambda pat: ethosu.AddParams(pat).is_valid(),
),
]
mod = create_graph()
mod = partition_ethosu_by_table(mod, pattern_table)
mod["tvmgen_default_ethos_u_main_0"] = dataflow_pattern.rewrite(
rewriter, mod["tvmgen_default_ethos_u_main_0"]
)
verify(mod["tvmgen_default_ethos_u_main_0"])
@pytest.mark.parametrize(
"ifm_shape, ifm2_shape, reversed_operands",
[
([1, 2, 3, 4], [1, 2, 3, 4], False),
([1, 2, 3, 4], [1, 1, 3, 1], False),
([1, 1, 3, 1], [1, 2, 3, 4], True),
],
)
def test_ethosu_left_shift_binary_elemwise_legalize(ifm_shape, ifm2_shape, reversed_operands):
dtype = "int32"
operator_type = "SHL"
def create_graph():
input1 = relay.var("x1", shape=ifm_shape, dtype=dtype)
input2 = relay.var("x2", shape=ifm2_shape, dtype=dtype)
c1 = relay.left_shift(input1, input2)
f = relay.Function([input1, input2], c1)
mod = tvm.IRModule()
mod["main"] = f
return mod
def verify(ext_func):
out_shape = ifm2_shape if reversed_operands else ifm_shape
shapes = [ifm_shape, ifm2_shape]
ifm_index, ifm2_index = (1, 0) if reversed_operands else (0, 1)
op = ext_func.body
assert list(op.args[0].checked_type.shape) == shapes[ifm_index]
assert list(op.args[1].checked_type.shape) == shapes[ifm2_index]
assert op.args[0].checked_type.dtype == dtype
assert list(op.checked_type.shape) == out_shape
assert op.checked_type.dtype == dtype
assert op.attrs.operator_type == operator_type
assert op.attrs.reversed_operands == reversed_operands
assert str(op.attrs.activation) == "NONE"
rewriter = legalize.ShlRewriter()
pattern_table = [
(
ethosu.ShlParams.composite_name,
ethosu.shl_pattern(),
lambda pat: ethosu.ShlParams(pat).is_valid(),
),
]
mod = create_graph()
mod = partition_ethosu_by_table(mod, pattern_table)
mod["tvmgen_default_ethos_u_main_0"] = dataflow_pattern.rewrite(
rewriter, mod["tvmgen_default_ethos_u_main_0"]
)
verify(mod["tvmgen_default_ethos_u_main_0"])
@pytest.mark.parametrize(
"ifm_shape, new_shape",
[
((1, 4, 1, 2), (4, 2)),
((1, 5, 1, 20), (100,)),
((12, 20), (1, 6, 4, 10)),
((30,), (10, 1, 3)),
],
)
def test_relay_reshape_legalize(ifm_shape, new_shape):
ifm = relay.var("ifm", shape=ifm_shape, dtype="int8")
reshape = relay.op.reshape(ifm, new_shape)
func = relay.Function([ifm], reshape)
mod = tvm.IRModule()
mod["main"] = func
mod = relay.transform.InferType()(mod)
reshape_pattern_table = [
(
ethosu.ReshapeParams.composite_name,
ethosu.reshape_pattern(),
lambda pat: ethosu.ReshapeParams(pat).is_valid(),
),
]
mod = partition_ethosu_by_table(mod, reshape_pattern_table)
mod["tvmgen_default_ethos_u_main_0"] = dataflow_pattern.rewrite(
legalize.ReshapeRewriter(), mod["tvmgen_default_ethos_u_main_0"]
)
mod["tvmgen_default_ethos_u_main_0"] = dataflow_pattern.rewrite(
legalize.NoOpRewriter(), mod["tvmgen_default_ethos_u_main_0"]
)
mod = relay.transform.InferType()(mod)
ext_func = mod["tvmgen_default_ethos_u_main_0"]
identity = ext_func.body
assert identity.op.name == "contrib.ethosu.identity"
# check that the reshape is still there
reshape = identity.args[0]
assert reshape.op.name == "reshape"
# check that identity's output shape matches reshape's output shape
assert tuple(identity.checked_type.shape) == new_shape
@pytest.mark.parametrize(
"ifm_shape, begin, size",
[
([1, 10, 50, 4], [0, 5, 11, 2], [1, 5, 11, 1]),
([15, 17, 3], [3, 0, 1], [8, 17, 2]),
([7, 6043], [0, 704], [1, 2860]),
([5000], [123], [2151]),
],
)
def test_tflite_slice(ifm_shape, begin, size):
dtype = "int8"
def create_tflite_graph():
class Model(tf.Module):
@tf.function
def slice_func(self, x):
return tf.slice(x, begin, size)
model = Model()
# Save the model
concrete_func = model.slice_func.get_concrete_function(
tf.TensorSpec(ifm_shape, dtype=tf.float32)
)
# Convert the model
def representative_dataset():
for _ in range(100):
data = np.random.rand(*tuple(ifm_shape))
yield [data.astype(np.float32)]
converter = tf.lite.TFLiteConverter.from_concrete_functions([concrete_func])
converter.optimizations = [tf.lite.Optimize.DEFAULT]
converter.representative_dataset = representative_dataset
converter.target_spec.supported_ops = [tf.lite.OpsSet.TFLITE_BUILTINS_INT8]
converter.inference_input_type = tf.int8
converter.inference_output_type = tf.int8
tflite_model = converter.convert()
return tflite_model
def verify(ext_func):
identity = ext_func.body
assert identity.op.name == "contrib.ethosu.identity"
# check that the strided_slice is still there
strided_slice = identity.args[0]
assert strided_slice.op.name == "strided_slice"
# check that identity's output shape matches strided slice's output shape
assert list(identity.checked_type.shape) == size
tflite_graph = create_tflite_graph()
tflite_model = tflite.Model.Model.GetRootAsModel(tflite_graph, 0)
mod, _ = relay.frontend.from_tflite(
tflite_model,
shape_dict={"input": ifm_shape},
dtype_dict={"input": dtype},
)
strided_slice_pattern_table = [
(
ethosu.StridedSliceParams.composite_name,
ethosu.strided_slice_pattern(),
lambda pat: ethosu.StridedSliceParams(pat).is_valid(),
),
]
mod = partition_ethosu_by_table(mod, strided_slice_pattern_table)
mod["tvmgen_default_ethos_u_main_0"] = dataflow_pattern.rewrite(
legalize.StridedSliceRewriter(), mod["tvmgen_default_ethos_u_main_0"]
)
mod["tvmgen_default_ethos_u_main_0"] = dataflow_pattern.rewrite(
legalize.NoOpRewriter(), mod["tvmgen_default_ethos_u_main_0"]
)
mod = relay.transform.InferType()(mod)
verify(mod["tvmgen_default_ethos_u_main_0"])
@pytest.mark.parametrize(
"ifm_shape, begin, end",
[([1, 1, 5, 8], [0, 0, 0, 0], [1, 1, 2, 3]), ([1, 3, 3], [0, 1, 2], [1, 2, 3])],
)
def test_tflite_strided_slice(ifm_shape, begin, end):
dtype = "int8"
def create_tflite_graph():
class Model(tf.Module):
@tf.function
def strided_slice_func(self, x):
return tf.strided_slice(x, begin, end)
model = Model()
# Save the model
concrete_func = model.strided_slice_func.get_concrete_function(
tf.TensorSpec(ifm_shape, dtype=tf.float32)
)
# Convert the model
def representative_dataset():
for _ in range(100):
data = np.random.rand(*tuple(ifm_shape))
yield [data.astype(np.float32)]
converter = tf.lite.TFLiteConverter.from_concrete_functions([concrete_func])
converter.optimizations = [tf.lite.Optimize.DEFAULT]
converter.representative_dataset = representative_dataset
converter.target_spec.supported_ops = [tf.lite.OpsSet.TFLITE_BUILTINS_INT8]
converter.inference_input_type = tf.int8
converter.inference_output_type = tf.int8
tflite_model = converter.convert()
return tflite_model
def verify(ext_func):
identity = ext_func.body
assert identity.op.name == "contrib.ethosu.identity"
# check that the strided_slice is still there
strided_slice = identity.args[0]
assert strided_slice.op.name == "strided_slice"
# check that identity's output shape matches strided slice's output shape
size = list(np.array(end) - np.array(begin))
assert list(identity.checked_type.shape) == size
tflite_graph = create_tflite_graph()
tflite_model = tflite.Model.Model.GetRootAsModel(tflite_graph, 0)
mod, _ = relay.frontend.from_tflite(
tflite_model,
shape_dict={"input": ifm_shape},
dtype_dict={"input": dtype},
)
strided_slice_pattern_table = [
(
ethosu.StridedSliceParams.composite_name,
ethosu.strided_slice_pattern(),
lambda pat: ethosu.StridedSliceParams(pat).is_valid(),
),
]
mod = partition_ethosu_by_table(mod, strided_slice_pattern_table)
mod["tvmgen_default_ethos_u_main_0"] = dataflow_pattern.rewrite(
legalize.StridedSliceRewriter(), mod["tvmgen_default_ethos_u_main_0"]
)
mod["tvmgen_default_ethos_u_main_0"] = dataflow_pattern.rewrite(
legalize.NoOpRewriter(), mod["tvmgen_default_ethos_u_main_0"]
)
mod = relay.transform.InferType()(mod)
verify(mod["tvmgen_default_ethos_u_main_0"])
@pytest.mark.parametrize("operator_type", ["ABS"])
@pytest.mark.parametrize(
"ifm_shape",
[[1, 2, 3, 4], [1, 7, 3], [8, 3, 1], [11, 22], [300]],
)
def test_tflite_unary_elemwise_legalize(
operator_type,
ifm_shape,
):
dtype = "int8"
def create_tflite_graph():
class Model(tf.Module):
@tf.function
def abs_func(self, x):
if operator_type == "ABS":
op = tf.math.abs(x)
return op
model = Model()
# Save the model
concrete_func = model.abs_func.get_concrete_function(
tf.TensorSpec(ifm_shape, dtype=tf.float32)
)
# Convert the model
def representative_dataset():
for _ in range(100):
data = np.random.rand(*tuple(ifm_shape))
yield [data.astype(np.float32)]
converter = tf.lite.TFLiteConverter.from_concrete_functions([concrete_func])
converter.optimizations = [tf.lite.Optimize.DEFAULT]
converter.representative_dataset = representative_dataset
converter.target_spec.supported_ops = [tf.lite.OpsSet.TFLITE_BUILTINS_INT8]
converter.inference_input_type = tf.int8
converter.inference_output_type = tf.int8
tflite_model = converter.convert()
return tflite_model
def verify(ext_func):
out_shape = ifm_shape
func_body = ext_func.body
# If we legalized the unary elementwise op into 4D
if func_body.op.name == "reshape":
reshape = func_body
unary = func_body.args[0]
reshape2 = unary.args[0]
# Check the input to the reshape
reshape2_in_shape = [i for i in reshape2.args[0].checked_type.shape]
assert reshape2_in_shape == ifm_shape
# Check that the unary elementwise operator is 4D after reshape
assert len(unary.checked_type.shape) == 4
assert unary.args[0].checked_type.dtype == dtype
# Check that the output of the graph has the same shape as input
reshape_out_shape = [i for i in reshape.checked_type.shape]
assert reshape_out_shape == ifm_shape
assert unary.attrs.operator_type == operator_type
else:
unary = func_body
# Check the IFM
assert list(unary.args[0].checked_type.shape) == ifm_shape
assert unary.args[0].checked_type.dtype == dtype
# Check the OFM
assert list(unary.checked_type.shape) == out_shape
assert unary.checked_type.dtype == dtype
# operator type check
assert unary.attrs.operator_type == operator_type
if operator_type == "ABS":
rewriter = legalize.AbsRewriter()
pattern_table = [
(
ethosu.AbsParams.composite_name,
ethosu.abs_pattern(),
lambda pat: ethosu.AbsParams(pat).is_valid(),
),
]
tflite_graph = create_tflite_graph()
tflite_model = tflite.Model.Model.GetRootAsModel(tflite_graph, 0)
mod, _ = relay.frontend.from_tflite(
tflite_model,
shape_dict={"input": ifm_shape},
dtype_dict={"input": dtype},
)
mod = partition_ethosu_by_table(mod, pattern_table)
mod["tvmgen_default_ethos_u_main_0"] = dataflow_pattern.rewrite(
rewriter, mod["tvmgen_default_ethos_u_main_0"]
)
verify(mod["tvmgen_default_ethos_u_main_0"])
def test_tflite_tanh_legalize():
dtype = "int8"
ifm_shape = (1, 241, 132, 7)
def create_tflite_graph():
class Model(tf.Module):
@tf.function
def tanh_func(self, x):
op = tf.math.tanh(x)
return op
model = Model()
concrete_func = model.tanh_func.get_concrete_function(
tf.TensorSpec(ifm_shape, dtype=tf.float32)
)
# Convert the model
def representative_dataset():
for _ in range(100):
data = np.random.rand(*tuple(ifm_shape))
yield [data.astype(np.float32)]
converter = tf.lite.TFLiteConverter.from_concrete_functions([concrete_func])
converter.optimizations = [tf.lite.Optimize.DEFAULT]
converter.representative_dataset = representative_dataset
converter.target_spec.supported_ops = [tf.lite.OpsSet.TFLITE_BUILTINS_INT8]
converter.inference_input_type = tf.int8
converter.inference_output_type = tf.int8
tflite_model = converter.convert()
return tflite_model
tflite_graph = create_tflite_graph()
tflite_model = tflite.Model.Model.GetRootAsModel(tflite_graph, 0)
mod, params = relay.frontend.from_tflite(
tflite_model,
shape_dict={"input": ifm_shape},
dtype_dict={"input": dtype},
)
mod = ethosu.partition_for_ethosu(mod, params)
mod["tvmgen_default_ethos_u_main_0"] = dataflow_pattern.rewrite(
legalize.TanhRewriter(), mod["tvmgen_default_ethos_u_main_0"]
)
mod = relay.transform.InferType()(mod)
func_body = mod["tvmgen_default_ethos_u_main_0"].body
assert func_body.op.name == "contrib.ethosu.identity"
assert func_body.attrs.activation == "TANH"
assert tuple(func_body.args[0].checked_type.shape) == (ifm_shape)
assert tuple(func_body.args[1].checked_type.shape) == (256,)
@pytest.mark.parametrize("dtype", ["int8", "uint8"])
@pytest.mark.parametrize(
"ifm_shape, axis, keep_dims, use_same_quantization",
[
# mean to average pool
[(1, 8, 16, 16), (1,), True, True],
[(1, 8, 16, 16), (2,), False, True],
[(1, 8, 16, 16), (1, 2), False, True],
[(3, 3, 4), (0,), True, True],
[(3, 3, 4), (1,), False, True],
[(8, 5), (0,), False, True],
[(8, 5), (1,), True, True],
# mean to depthwise
[(1, 8, 16, 16), (1,), True, False],
[(1, 8, 16, 16), (2,), True, False],
[(1, 8, 16, 16), (1, 2), False, False],
[(8, 4), (0,), False, False],
[(1, 65, 2, 1), (1, 2), True, False], # special case when h > 64
],
)
def test_mean(ifm_shape, axis, keep_dims, use_same_quantization, dtype):
def create_tflite_graph():
class Model(tf.Module):
@tf.function
def tf_function(self, x):
op = tf.math.reduce_mean(x, axis=axis, keepdims=keep_dims)
return op
model = Model()
concrete_func = model.tf_function.get_concrete_function(
tf.TensorSpec(ifm_shape, dtype=tf.float32)
)
# Convert the model
def representative_dataset():
for _ in range(100):
data = np.random.rand(*tuple(ifm_shape))
yield [data.astype(np.float32)]
converter = tf.lite.TFLiteConverter.from_concrete_functions([concrete_func])
converter.optimizations = [tf.lite.Optimize.DEFAULT]
converter.representative_dataset = representative_dataset
converter.target_spec.supported_ops = [tf.lite.OpsSet.TFLITE_BUILTINS_INT8]
converter.inference_input_type = tf.int8
converter.inference_output_type = tf.int8
tflite_model = converter.convert()
tflite_model = tflite.Model.Model.GetRootAsModel(tflite_model, 0)
mod, _ = relay.frontend.from_tflite(
tflite_model,
shape_dict={"input": ifm_shape},
dtype_dict={"input": dtype},
)
return mod
def create_relay_graph_with_same_quantization():
ifm = relay.var("input", shape=ifm_shape, dtype=dtype)
cast = relay.cast(ifm, dtype="int32")
mean = relay.mean(cast, axis=axis, keepdims=keep_dims)
requantize = relay.qnn.op.requantize(
mean,
input_scale=relay.const(1.0, dtype="float32"),
input_zero_point=relay.const(0, dtype="int32"),
output_scale=relay.const(1.0, dtype="float32"),
output_zero_point=relay.const(0, dtype="int32"),
out_dtype=dtype,
)
func = relay.Function(relay.analysis.free_vars(requantize), requantize)
mod = tvm.IRModule.from_expr(func)
return mod
def verify(ext_func):
out_var = ext_func.body
next_op = out_var
pooling_op = None
depthwise_op = None
if (
isinstance(next_op, relay.expr.Call)
and isinstance(next_op.op, tvm.ir.op.Op)
and next_op.op.name == "reshape"
):
next_op = next_op.args[0]
if util.is_named_ethosu_op(next_op, "pooling"):
pooling_op = next_op
next_op = next_op.args[0]
if util.is_named_ethosu_op(next_op, "depthwise_conv2d"):
depthwise_op = next_op
next_op = next_op.args[0]
while (
isinstance(next_op, relay.expr.Call)
and isinstance(next_op.op, tvm.ir.op.Op)
and next_op.op.name == "reshape"
):
next_op = next_op.args[0]
in_var = next_op
def calculate_expected_output_shape():
for i in range(len(ifm_shape)):
if i in axis:
if keep_dims:
yield 1
else:
yield ifm_shape[i]
out_shape = tuple(calculate_expected_output_shape())
# check IFM
assert tuple(in_var.checked_type.shape) == ifm_shape
if use_same_quantization:
assert in_var.checked_type.dtype == dtype
else:
# in_var's dtype is equal to int8 due to TFLite's requantize
assert in_var.checked_type.dtype == "int8"
# check OFM
assert tuple(out_var.checked_type.shape) == out_shape
if use_same_quantization:
assert out_var.checked_type.dtype == dtype
else:
# out_var's dtype is equal to int8 due to TFLite's requantize
assert out_var.checked_type.dtype == "int8"
# check expected legalization case
if pooling_op:
attrs = pooling_op.attrs
assert (
attrs.ifm_scale == attrs.ofm_scale and attrs.ifm_zero_point == attrs.ofm_zero_point
)
else:
assert depthwise_op
attrs = depthwise_op.attrs
assert (
attrs.ifm_scale != attrs.ofm_scale or attrs.ifm_zero_point != attrs.ofm_zero_point
)
rewriter = legalize.MeanRewriter()
pattern_table = [
(
ethosu.MeanParams.composite_name,
ethosu.mean_pattern(),
lambda pat: ethosu.MeanParams(pat).is_valid(),
),
]
mod = (
create_relay_graph_with_same_quantization()
if use_same_quantization
else create_tflite_graph()
)
mod = partition_ethosu_by_table(mod, pattern_table)
mod["tvmgen_default_ethos_u_main_0"] = dataflow_pattern.rewrite(
rewriter, mod["tvmgen_default_ethos_u_main_0"]
)
verify(mod["tvmgen_default_ethos_u_main_0"])
@pytest.mark.parametrize(
"ifm_shape, axis, keepdims, relu",
[
[(1, 4, 2, 8), 3, False, False],
[(1, 4, 4, 1), 3, False, True],
[(3, 5, 7), 2, False, True],
[(1, 4, 2, 8), 3, True, False],
[(3, 5, 7), 2, True, False],
],
)
def test_ethosu_sum(ifm_shape, axis, keepdims, relu):
dtype = "int8"
def create_tflite_graph():
class Model(tf.Module):
@tf.function
def tf_function(self, x):
op = tf.math.reduce_sum(x, axis=axis, keepdims=keepdims)
return tf.nn.relu(op) if relu else op
model = Model()
concrete_func = model.tf_function.get_concrete_function(
tf.TensorSpec(ifm_shape, dtype=tf.float32)
)
# Convert the model
def representative_dataset():
for _ in range(100):
data = np.random.rand(*tuple(ifm_shape))
yield [data.astype(np.float32)]
converter = tf.lite.TFLiteConverter.from_concrete_functions([concrete_func])
converter.optimizations = [tf.lite.Optimize.DEFAULT]
converter.representative_dataset = representative_dataset
converter.target_spec.supported_ops = [tf.lite.OpsSet.TFLITE_BUILTINS_INT8]
converter.inference_input_type = tf.int8
converter.inference_output_type = tf.int8
tflite_model = converter.convert()
tflite_model = tflite.Model.Model.GetRootAsModel(tflite_model, 0)
mod, _ = relay.frontend.from_tflite(
tflite_model,
shape_dict={"input": ifm_shape},
dtype_dict={"input": dtype},
)
return mod
def verify(ext_func):
out_var = ext_func.body
binary_elementwise_op = None
pooling_op = None
next_op = out_var
if (
isinstance(next_op, relay.expr.Call)
and isinstance(next_op.op, tvm.ir.op.Op)
and next_op.op.name == "reshape"
):
next_op = next_op.args[0]
binary_elementwise_op = next_op
pooling_op = binary_elementwise_op.args[0]
next_op = pooling_op.args[0]
if (
isinstance(next_op, relay.expr.Call)
and isinstance(next_op.op, tvm.ir.op.Op)
and next_op.op.name == "reshape"
):
next_op = next_op.args[0]
in_var = next_op
def calculate_expected_output_shape():
for i in range(len(ifm_shape)):
if i != axis:
yield ifm_shape[i]
elif keepdims:
yield 1
out_shape = tuple(calculate_expected_output_shape())
# check IFM
assert tuple(in_var.checked_type.shape) == ifm_shape
assert in_var.checked_type.dtype == dtype
# check OFM
assert tuple(out_var.checked_type.shape) == out_shape
assert out_var.checked_type.dtype == dtype
# check expected legalization case
assert pooling_op
attrs = pooling_op.attrs
assert attrs.pooling_type == "SUM"
if relu:
assert attrs.activation == "CLIP"
assert binary_elementwise_op
attrs = binary_elementwise_op.attrs
assert attrs.operator_type == "MUL"
assert attrs.ifm_channels == attrs.ifm2_channels == 1
assert attrs.ofm_dtype == "int8"
rewriter = legalize.SumRewriter()
pattern_table = [
(
ethosu.SumParams.composite_name,
ethosu.sum_pattern(),
lambda pat: ethosu.SumParams(pat).is_valid(),
),
]
mod = create_tflite_graph()
mod = partition_ethosu_by_table(mod, pattern_table)
mod["tvmgen_default_ethos_u_main_0"] = dataflow_pattern.rewrite(
rewriter, mod["tvmgen_default_ethos_u_main_0"]
)
verify(mod["tvmgen_default_ethos_u_main_0"])
@pytest.mark.parametrize(
"shapes, axis",
[
([(2, 3), (4, 3)], 0),
([(10, 2, 1), (10, 14, 1)], 1),
([(10,), (13,), (14,)], 0),
([(1, 5, 2, 1), (1, 5, 7, 1), (1, 5, 3, 1)], 2),
],
)
def test_tflite_concat_legalize(shapes, axis):
def create_tflite_graph():
class Model(tf.Module):
@tf.function
def tf_function(self, shapes, axis):
op = tf.concat(shapes, axis)
return op
model = Model()
concrete_func = model.tf_function.get_concrete_function(
[tf.TensorSpec(shape, tf.float32) for shape in shapes], axis
)
def representative_dataset():
for _ in range(100):
datas = [np.random.rand(*shape) for shape in shapes]
yield [data.astype(np.float32) for data in datas]
converter = tf.lite.TFLiteConverter.from_concrete_functions([concrete_func])
converter.optimizations = [tf.lite.Optimize.DEFAULT]
converter.representative_dataset = representative_dataset
converter.target_spec.supported_ops = [tf.lite.OpsSet.TFLITE_BUILTINS_INT8]
converter.inference_input_type = tf.int8
converter.inference_output_type = tf.int8
tflite_model = converter.convert()
return tflite_model
def verify(ext_func):
new_concat_axis = np.sum(shape[axis] for shape in shapes)
out_shape = list(shapes[0])
out_shape[axis] = new_concat_axis
op = ext_func.body
for i, _ in enumerate(shapes):
assert list(op.args[0][i].checked_type.shape) == list(shapes[i])
assert list(op.checked_type.shape) == out_shape
assert op.checked_type.dtype == "int8"
concat_pattern_table = [
(
ethosu.ConcatParams.composite_name,
ethosu.concat_pattern(),
lambda pat: ethosu.ConcatParams(pat).is_valid(),
)
]
tflite_graph = create_tflite_graph()
tflite_model = tflite.Model.Model.GetRootAsModel(tflite_graph, 0)
relay_module, _ = relay.frontend.from_tflite(
tflite_model,
shape_dict={("ifm" + str(i)): shape for i, shape in enumerate(shapes)},
dtype_dict={("ifm" + str(i)): "int8" for i, _ in enumerate(shapes)},
)
mod = partition_ethosu_by_table(relay_module, concat_pattern_table)
mod["tvmgen_default_ethos_u_main_0"] = dataflow_pattern.rewrite(
legalize.ConcatRewriter(), mod["tvmgen_default_ethos_u_main_0"]
)
mod["tvmgen_default_ethos_u_main_0"] = dataflow_pattern.rewrite(
legalize.NoOpRewriter(), mod["tvmgen_default_ethos_u_main_0"]
)
mod["tvmgen_default_ethos_u_main_0"] = relay.transform.InferType()(mod)[
"tvmgen_default_ethos_u_main_0"
]
verify(mod["tvmgen_default_ethos_u_main_0"])
def test_tflite_sigmoid_legalize():
dtype = "int8"
ifm_shape = (1, 237, 91, 7)
def create_tflite_graph():
class Model(tf.Module):
@tf.function
def sigmoid_func(self, x):
op = tf.math.sigmoid(x)
return op
model = Model()
concrete_func = model.sigmoid_func.get_concrete_function(
tf.TensorSpec(ifm_shape, dtype=tf.float32)
)
# Convert the model
def representative_dataset():
for _ in range(100):
data = np.random.rand(*tuple(ifm_shape))
yield [data.astype(np.float32)]
converter = tf.lite.TFLiteConverter.from_concrete_functions([concrete_func])
converter.optimizations = [tf.lite.Optimize.DEFAULT]
converter.representative_dataset = representative_dataset
converter.target_spec.supported_ops = [tf.lite.OpsSet.TFLITE_BUILTINS_INT8]
converter.inference_output_type = tf.int8
converter.inference_input_type = tf.int8
tflite_model = converter.convert()
return tflite_model
tflite_graph = create_tflite_graph()
tflite_model = tflite.Model.Model.GetRootAsModel(tflite_graph, 0)
mod, params = relay.frontend.from_tflite(
tflite_model,
shape_dict={"input": ifm_shape},
dtype_dict={"input": dtype},
)
mod = ethosu.partition_for_ethosu(mod, params)
mod["tvmgen_default_ethos_u_main_0"] = dataflow_pattern.rewrite(
legalize.SigmoidRewriter(), mod["tvmgen_default_ethos_u_main_0"]
)
mod = relay.transform.InferType()(mod)
func_body = mod["tvmgen_default_ethos_u_main_0"].body
assert func_body.op.name == "contrib.ethosu.identity"
assert func_body.attrs.activation == "SIGMOID"
assert tuple(func_body.args[0].checked_type.shape) == (ifm_shape)
assert tuple(func_body.args[1].checked_type.shape) == (256,)
@pytest.mark.parametrize(
"ifm_shape, num_or_size_splits, axis",
[
((1, 4, 6, 8), 3, 2),
((4, 6, 8), 2, 0),
((5, 15), 3, 1),
((3, 7), 1, 1),
((100,), 25, 0),
],
)
def test_tflite_split_legalize(ifm_shape, num_or_size_splits, axis):
dtype = "int8"
def create_tflite_graph():
class Model(tf.Module):
@tf.function
def tf_function(self, x, num_or_size_splits, axis):
op = tf.split(x, num_or_size_splits, axis=axis)
return op
model = Model()
concrete_func = model.tf_function.get_concrete_function(
tf.TensorSpec(ifm_shape, tf.float32), num_or_size_splits, axis
)
def representative_dataset():
for _ in range(100):
data = np.random.rand(*tuple(ifm_shape))
yield [data.astype(np.float32)]
converter = tf.lite.TFLiteConverter.from_concrete_functions([concrete_func])
converter.optimizations = [tf.lite.Optimize.DEFAULT]
converter.representative_dataset = representative_dataset
converter.target_spec.supported_ops = [tf.lite.OpsSet.TFLITE_BUILTINS_INT8]
converter.inference_input_type = tf.int8
converter.inference_output_type = tf.int8
tflite_model = converter.convert()
return tflite_model
def verify(ext_func):
# dig out the split
single_output_split = num_or_size_splits == 1
split = (
ext_func.body.tuple_value
if single_output_split
else ext_func.body.args[0][0].args[0].tuple_value
)
assert split.op.name == "split"
# Split is specified by number of equal chunks
assert split.attrs.indices_or_sections == num_or_size_splits
assert split.attrs.axis == axis
tflite_graph = create_tflite_graph()
tflite_model = tflite.Model.Model.GetRootAsModel(tflite_graph, 0)
mod, _ = relay.frontend.from_tflite(
tflite_model,
shape_dict={"input": ifm_shape},
dtype_dict={"input": dtype},
)
mod = ethosu.partition_for_ethosu(mod)
mod["tvmgen_default_ethos_u_main_0"] = dataflow_pattern.rewrite(
legalize.PartitionedSplitRewriter(), mod["tvmgen_default_ethos_u_main_0"]
)
mod["tvmgen_default_ethos_u_main_0"] = relay.transform.InferType()(mod)[
"tvmgen_default_ethos_u_main_0"
]
verify(mod["tvmgen_default_ethos_u_main_0"])
@pytest.mark.parametrize(
"ifm_shape, num_or_size_splits, axis",
[
((1, 4, 6, 8), (1, 3, 4), 3),
((10, 18, 4), (1, 4, 3, 2), 0),
((22, 7), (4, -1), 1),
((25,), (25,), 0),
],
)
def test_tflite_split_v_legalize(ifm_shape, num_or_size_splits, axis):
dtype = "int8"
def create_tflite_graph():
class Model(tf.Module):
@tf.function
def tf_function(self, x, num_or_size_splits, axis):
# TF split gets converted into TFLite's split_v
op = tf.split(x, num_or_size_splits, axis=axis)
return op
model = Model()
concrete_func = model.tf_function.get_concrete_function(
tf.TensorSpec(ifm_shape, tf.float32), num_or_size_splits, axis
)
def representative_dataset():
for _ in range(100):
data = np.random.rand(*tuple(ifm_shape))
yield [data.astype(np.float32)]
converter = tf.lite.TFLiteConverter.from_concrete_functions([concrete_func])
converter.optimizations = [tf.lite.Optimize.DEFAULT]
converter.representative_dataset = representative_dataset
converter.target_spec.supported_ops = [tf.lite.OpsSet.TFLITE_BUILTINS_INT8]
converter.inference_input_type = tf.int8
converter.inference_output_type = tf.int8
tflite_model = converter.convert()
return tflite_model
def verify(ext_func):
# dig out the split
single_output_split = len(num_or_size_splits) == 1
split = (
ext_func.body.tuple_value
if single_output_split
else ext_func.body.args[0][0].args[0].tuple_value
)
assert split.op.name == "split"
# Split is specified by the size of sections, so converting num_or_size_splits
# into the indices where the tensor is split at since this is how split is represented
# in Relay
split_sections = [] if single_output_split else [num_or_size_splits[0]]
for split_size in num_or_size_splits[1:-1]:
sec = split_sections[-1] + split_size
split_sections.append(sec)
assert list(split.attrs.indices_or_sections) == split_sections
assert split.attrs.axis == axis
tflite_graph = create_tflite_graph()
tflite_model = tflite.Model.Model.GetRootAsModel(tflite_graph, 0)
mod, _ = relay.frontend.from_tflite(
tflite_model,
shape_dict={"input": ifm_shape},
dtype_dict={"input": dtype},
)
mod = ethosu.partition_for_ethosu(mod)
mod["tvmgen_default_ethos_u_main_0"] = dataflow_pattern.rewrite(
legalize.PartitionedSplitRewriter(), mod["tvmgen_default_ethos_u_main_0"]
)
mod["tvmgen_default_ethos_u_main_0"] = relay.transform.InferType()(mod)[
"tvmgen_default_ethos_u_main_0"
]
verify(mod["tvmgen_default_ethos_u_main_0"])
@pytest.mark.parametrize(
"ifm_shape,ifm_scale,ifm_zp,ofm_scale,ofm_zp",
[[(1, 8, 8, 3), 1.0, 0, 1.0, 0], [(1, 20, 30, 3), 1.345, 34, 0.32, -23]],
)
def test_ethosu_requantize(ifm_shape, ifm_scale, ifm_zp, ofm_scale, ofm_zp):
dtype = "int8"
def create_model():
ifm = relay.var("ifm", shape=ifm_shape, dtype="int8")
requantize = relay.qnn.op.requantize(
ifm,
relay.const(ifm_scale, dtype="float32"),
relay.const(ifm_zp, dtype="int32"),
relay.const(ofm_scale, dtype="float32"),
relay.const(ofm_zp, dtype="int32"),
)
return tvm.IRModule.from_expr(relay.Function([ifm], requantize))
def verify(ext_func):
op = ext_func.body
# Check IFM
ifm = op.args[0].checked_type
assert list(ifm.shape) == list(ifm_shape)
assert str(ifm.dtype) == dtype
# Check OFM
ofm = op.checked_type
assert list(ofm.shape) == list(ifm_shape)
assert str(ofm.dtype) == dtype
# Check quantization params
assert math.isclose(op.attrs.ifm_scale, ifm_scale, abs_tol=1e-7)
assert op.attrs.ifm_zero_point == ifm_zp
assert math.isclose(op.attrs.ofm_scale, ofm_scale, abs_tol=1e-7)
assert op.attrs.ofm_zero_point == ofm_zp
rewriter = legalize.RequantizeRewriter()
pattern_table = [
(
ethosu.RequantizeParams.composite_name,
ethosu.requantize_pattern(),
lambda pat: ethosu.RequantizeParams(pat).is_valid(),
),
]
mod = create_model()
mod = partition_ethosu_by_table(mod, pattern_table)
mod["tvmgen_default_ethos_u_main_0"] = dataflow_pattern.rewrite(
rewriter, mod["tvmgen_default_ethos_u_main_0"]
)
verify(mod["tvmgen_default_ethos_u_main_0"])
def test_multiple_requantize_offload():
"""
Testing requantize offload in the case one requantize operation is part of
an existing pattern (in this case Mean: cast->mean->requantize) and the
other is a stand-alone requantize.
"""
def create_model():
ifm = relay.var("input", shape=(1, 3, 3, 4), dtype="int8")
cast = relay.cast(ifm, dtype="int32")
mean = relay.mean(cast, axis=1, keepdims=True)
requantize = relay.qnn.op.requantize(
mean,
input_scale=relay.const(1.0, dtype="float32"),
input_zero_point=relay.const(0, dtype="int32"),
output_scale=relay.const(1.0, dtype="float32"),
output_zero_point=relay.const(0, dtype="int32"),
)
requantize = relay.qnn.op.requantize(
requantize,
input_scale=relay.const(1.0, dtype="float32"),
input_zero_point=relay.const(0, dtype="int32"),
output_scale=relay.const(1.0, dtype="float32"),
output_zero_point=relay.const(0, dtype="int32"),
)
return tvm.IRModule.from_expr(relay.Function([ifm], requantize))
def verify(ext_func):
# If mean operation and separate requantize were offloaded correctly,
# there should only be a pooling operation followed by an identity
# operation leagalized.
op = ext_func.body
assert op.op.name == "contrib.ethosu.identity"
op = op.args[0]
assert ext_func.body.args[0].op.name == "contrib.ethosu.pooling"
op = op.args[0]
assert isinstance(op, relay.Var)
mod = create_model()
mod = ethosu.partition_for_ethosu(mod)
mod = legalize.LegalizeEthosU()(mod)
verify(mod["tvmgen_default_ethos_u_main_0"])
@pytest.mark.parametrize("ifm_shape,axis", [((2,), 0), ((1, 3, 3), 2)])
def test_tflite_expand_dims(ifm_shape, axis):
dtype = "int8"
def create_tflite_graph():
class Model(tf.Module):
@tf.function
def tf_function(self, x):
return tf.expand_dims(x, axis=axis)
model = Model()
concrete_func = model.tf_function.get_concrete_function(
tf.TensorSpec(ifm_shape, tf.float32)
)
def representative_dataset():
for _ in range(100):
data = np.random.rand(*tuple(ifm_shape))
yield [data.astype(np.float32)]
converter = tf.lite.TFLiteConverter.from_concrete_functions([concrete_func])
converter.optimizations = [tf.lite.Optimize.DEFAULT]
converter.representative_dataset = representative_dataset
converter.target_spec.supported_ops = [tf.lite.OpsSet.TFLITE_BUILTINS_INT8]
converter.inference_input_type = tf.int8
converter.inference_output_type = tf.int8
tflite_model = converter.convert()
return tflite_model
def verify(ext_func):
op = ext_func.body
expected_shape = list(ifm_shape)
expected_shape.insert(axis, 1)
# Check IFM
assert list(op.args[0].checked_type.shape) == list(ifm_shape)
assert op.args[0].checked_type.dtype == dtype
# Check OFM
assert list(op.checked_type.shape) == expected_shape
assert op.checked_type.dtype == dtype
# Check op
assert op.op.name == "reshape"
tflite_graph = create_tflite_graph()
tflite_model = tflite.Model.Model.GetRootAsModel(tflite_graph, 0)
mod, _ = relay.frontend.from_tflite(
tflite_model,
shape_dict={"input": ifm_shape},
dtype_dict={"input": dtype},
)
mod = ethosu.partition_for_ethosu(mod)
mod["tvmgen_default_ethos_u_main_0"] = dataflow_pattern.rewrite(
legalize.ExpandDimsRewriter(), mod["tvmgen_default_ethos_u_main_0"]
)
mod["tvmgen_default_ethos_u_main_0"] = dataflow_pattern.rewrite(
legalize.ReshapeRewriter(), mod["tvmgen_default_ethos_u_main_0"]
)
mod["tvmgen_default_ethos_u_main_0"] = relay.transform.InferType()(mod)[
"tvmgen_default_ethos_u_main_0"
]
verify(mod["tvmgen_default_ethos_u_main_0"])
@pytest.mark.parametrize(
"ifm_shape,axis", [((1, 1, 2, 1), 0), ((1, 3, 3, 1), 3), ((1, 1, 2, 1), None)]
)
def test_tflite_squeeze(ifm_shape, axis):
dtype = "int8"
def create_tflite_graph():
class Model(tf.Module):
@tf.function
def tf_function(self, x):
return tf.squeeze(x, axis=axis)
model = Model()
concrete_func = model.tf_function.get_concrete_function(
tf.TensorSpec(ifm_shape, tf.float32)
)
def representative_dataset():
for _ in range(100):
data = np.random.rand(*tuple(ifm_shape))
yield [data.astype(np.float32)]
converter = tf.lite.TFLiteConverter.from_concrete_functions([concrete_func])
converter.optimizations = [tf.lite.Optimize.DEFAULT]
converter.representative_dataset = representative_dataset
converter.target_spec.supported_ops = [tf.lite.OpsSet.TFLITE_BUILTINS_INT8]
converter.inference_input_type = tf.int8
converter.inference_output_type = tf.int8
tflite_model = converter.convert()
return tflite_model
def verify(ext_func):
op = ext_func.body
expected_shape = list(ifm_shape)
if isinstance(axis, int):
expected_shape = ifm_shape[:axis] + ifm_shape[axis + 1 :]
else:
expected_shape = list(filter(lambda a: a != 1, expected_shape))
# Check IFM
assert list(op.args[0].checked_type.shape) == list(ifm_shape)
assert op.args[0].checked_type.dtype == dtype
# Check OFM
assert list(op.checked_type.shape) == list(expected_shape)
assert op.checked_type.dtype == dtype
# Check op
assert op.op.name == "reshape"
tflite_graph = create_tflite_graph()
tflite_model = tflite.Model.Model.GetRootAsModel(tflite_graph, 0)
mod, _ = relay.frontend.from_tflite(
tflite_model,
shape_dict={"input": ifm_shape},
dtype_dict={"input": dtype},
)
mod = ethosu.partition_for_ethosu(mod)
mod["tvmgen_default_ethos_u_main_0"] = dataflow_pattern.rewrite(
legalize.SqueezeRewriter(), mod["tvmgen_default_ethos_u_main_0"]
)
mod["tvmgen_default_ethos_u_main_0"] = dataflow_pattern.rewrite(
legalize.ReshapeRewriter(), mod["tvmgen_default_ethos_u_main_0"]
)
mod["tvmgen_default_ethos_u_main_0"] = relay.transform.InferType()(mod)[
"tvmgen_default_ethos_u_main_0"
]
verify(mod["tvmgen_default_ethos_u_main_0"])
@pytest.mark.parametrize(
"ifm_shape,size,half_pixel",
[
[(1, 2, 2, 1), (4, 4), False],
[(1, 2, 2, 1), (4, 4), True],
[(1, 4, 7, 3), (8, 14), False],
[(1, 3, 5, 3), (3, 5), False],
[(1, 6, 6, 96), (12, 12), False],
[(1, 6, 6, 96), (12, 12), True],
],
)
def test_tflite_resize2d_nearest_neighbor(ifm_shape, size, half_pixel):
align_corners = False
dtype = "int8"
def create_tflite_graph():
@tf.function
def resize_model(x):
return tf.compat.v1.image.resize_nearest_neighbor(
x,
size,
align_corners=align_corners,
half_pixel_centers=half_pixel,
)
concrete_func = resize_model.get_concrete_function(
tf.TensorSpec(ifm_shape, dtype=tf.float32)
)
def representative_dataset():
for _ in range(100):
data = np.random.rand(*tuple(ifm_shape))
yield [data.astype(np.float32)]
converter = tf.lite.TFLiteConverter.from_concrete_functions([concrete_func])
converter.optimizations = [tf.lite.Optimize.DEFAULT]
converter.representative_dataset = representative_dataset
converter.target_spec.supported_ops = [tf.lite.OpsSet.TFLITE_BUILTINS_INT8]
converter.inference_input_type = tf.int8
converter.inference_output_type = tf.int8
tflite_model = converter.convert()
tflite_model = tflite.Model.Model.GetRootAsModel(tflite_model, 0)
mod, _ = relay.frontend.from_tflite(
tflite_model,
shape_dict={"input": ifm_shape},
dtype_dict={"input": dtype},
)
return mod
def verify(ext_func):
op = ext_func.body
in_var = op.args[0]
# check IFM
assert tuple(in_var.checked_type.shape) == ifm_shape
assert in_var.checked_type.dtype == dtype
# check OFM
attrs = dict(op.attrs)
out_shape = (ifm_shape[0], size[0], size[1], ifm_shape[3])
assert tuple(op.checked_type.shape) == out_shape
assert op.checked_type.dtype == dtype
# Check Op attributes
if size[0] == ifm_shape[1] and size[1] == ifm_shape[2]:
assert op.op.name == "contrib.ethosu.identity"
else:
assert attrs["pooling_type"] == "AVG"
assert attrs["upscale"] == "NEAREST"
rewriter = legalize.Resize2dRewriter()
pattern_table = [
(
ethosu.Resize2dParams.composite_name,
ethosu.resize2d_pattern(),
lambda pat: ethosu.Resize2dParams(pat).is_valid(),
),
]
mod = create_tflite_graph()
mod = partition_ethosu_by_table(mod, pattern_table)
mod["tvmgen_default_ethos_u_main_0"] = dataflow_pattern.rewrite(
rewriter, mod["tvmgen_default_ethos_u_main_0"]
)
verify(mod["tvmgen_default_ethos_u_main_0"])
@pytest.mark.parametrize(
"ifm_shape,size,align_corners",
[
[(1, 2, 2, 1), (4, 4), False],
[(1, 4, 7, 3), (8, 14), False],
[(1, 2, 2, 1), (3, 3), True],
[(1, 4, 7, 3), (7, 13), True],
[(1, 3, 5, 3), (3, 5), False],
],
)
def test_tflite_resize2d_bilinear(ifm_shape, size, align_corners):
dtype = "int8"
def create_tflite_graph():
@tf.function
def resize_model(x):
return tf.compat.v1.image.resize_bilinear(
x, size, align_corners=align_corners, half_pixel_centers=False
)
concrete_func = resize_model.get_concrete_function(
tf.TensorSpec(ifm_shape, dtype=tf.float32)
)
def representative_dataset():
for _ in range(100):
data = np.random.rand(*tuple(ifm_shape))
yield [data.astype(np.float32)]
converter = tf.lite.TFLiteConverter.from_concrete_functions([concrete_func])
converter.optimizations = [tf.lite.Optimize.DEFAULT]
converter.representative_dataset = representative_dataset
converter.target_spec.supported_ops = [tf.lite.OpsSet.TFLITE_BUILTINS_INT8]
converter.inference_input_type = tf.int8
converter.inference_output_type = tf.int8
tflite_model = converter.convert()
tflite_model = tflite.Model.Model.GetRootAsModel(tflite_model, 0)
mod, _ = relay.frontend.from_tflite(
tflite_model,
shape_dict={"input": ifm_shape},
dtype_dict={"input": dtype},
)
return mod
def verify(ext_func):
op = ext_func.body
in_var = op.args[0]
# check IFM
assert tuple(in_var.checked_type.shape) == ifm_shape
assert in_var.checked_type.dtype == dtype
# check OFM
attrs = dict(op.attrs)
out_shape = (ifm_shape[0], size[0], size[1], ifm_shape[3])
assert tuple(op.checked_type.shape) == out_shape
assert op.checked_type.dtype == dtype
# Check Op attributes
if size[0] == ifm_shape[1] and size[1] == ifm_shape[2]:
assert op.op.name == "contrib.ethosu.identity"
else:
assert attrs["pooling_type"] == "AVG"
assert attrs["upscale"] == "NEAREST"
# Check padding
if align_corners:
assert list(attrs["padding"]) == [0, 0, 0, 0]
else:
assert list(attrs["padding"]) == [0, 0, 1, 1]
rewriter = legalize.Resize2dRewriter()
pattern_table = [
(
ethosu.Resize2dParams.composite_name,
ethosu.resize2d_pattern(),
lambda pat: ethosu.Resize2dParams(pat).is_valid(),
),
]
mod = create_tflite_graph()
mod = partition_ethosu_by_table(mod, pattern_table)
mod["tvmgen_default_ethos_u_main_0"] = dataflow_pattern.rewrite(
rewriter, mod["tvmgen_default_ethos_u_main_0"]
)
verify(mod["tvmgen_default_ethos_u_main_0"])
@pytest.mark.parametrize(
"ifm_shape,ofm_shape,kernel_shape,padding",
[
[(1, 2, 2, 1), (1, 4, 4, 1), (3, 3), "SAME"],
[(1, 2, 2, 1), (1, 9, 9, 1), (7, 7), "VALID"],
[(1, 2, 4, 3), (1, 4, 8, 3), (3, 3), "SAME"],
[(1, 10, 5, 3), (1, 21, 13, 3), (3, 5), "VALID"],
],
)
@pytest.mark.parametrize("has_bias", [False, True])
def test_tflite_transpose_convolution(ifm_shape, ofm_shape, kernel_shape, padding, has_bias):
dtype = "int8"
dilations = (1, 1)
strides = (2, 2)
def create_tflite_graph():
@tf.function
def conv2d_transpose(x):
bias_shape = ofm_shape[3]
bias = tf.constant(np.random.uniform(size=bias_shape), dtype=tf.float32)
weight_shape = [kernel_shape[0], kernel_shape[1], ifm_shape[3], ofm_shape[3]]
weight = tf.constant(np.random.uniform(size=weight_shape), dtype=tf.float32)
tf_strides = [1, strides[0], strides[1], 1]
op = tf.nn.conv2d_transpose(
x,
weight,
output_shape=ofm_shape,
strides=tf_strides,
padding=padding,
dilations=dilations,
)
if has_bias:
op = tf.nn.bias_add(op, bias)
return op
concrete_func = conv2d_transpose.get_concrete_function(
tf.TensorSpec(ifm_shape, dtype=tf.float32)
)
def representative_dataset():
for _ in range(100):
data = np.random.rand(*tuple(ifm_shape))
yield [data.astype(np.float32)]
converter = tf.lite.TFLiteConverter.from_concrete_functions([concrete_func])
converter.optimizations = [tf.lite.Optimize.DEFAULT]
converter.representative_dataset = representative_dataset
converter.target_spec.supported_ops = [tf.lite.OpsSet.TFLITE_BUILTINS_INT8]
converter.inference_input_type = tf.int8
converter.inference_output_type = tf.int8
tflite_model = converter.convert()
tflite_model = tflite.Model.Model.GetRootAsModel(tflite_model, 0)
mod, params = relay.frontend.from_tflite(
tflite_model,
shape_dict={"input": ifm_shape},
dtype_dict={"input": dtype},
)
return mod, params
def verify(ext_func):
strided_slice = ext_func.body
conv = strided_slice.args[0]
ofm_channels = conv.attrs.ofm_channels
# Check IFM
ifm = conv.args[0].checked_type
assert list(ifm.shape) == list(ifm_shape)
assert str(ifm.dtype) == dtype
assert ifm.shape[3] == ofm_channels
# Check OFM
ofm = strided_slice.checked_type
assert list(ofm.shape) == list(ofm_shape)
assert str(ofm.dtype) == dtype
assert ofm.shape[3] == ofm_channels
# Check weights
weights_ohwi = conv.args[1].data.asnumpy()
assert str(weights_ohwi.dtype) == dtype
assert list(weights_ohwi.shape) == [
ofm_channels,
kernel_shape[0],
kernel_shape[1],
ifm_shape[3],
]
# Check that scale_bias matches weight tensor
assert list(conv.args[2].checked_type.shape)[0] == ofm_channels
# Calculate expected padding for conv2d op
if padding == "VALID":
expected_padding = [0, 0, 0, 0]
elif padding == "SAME":
pad_top, pad_bottom = get_pad_value(ofm_shape[1], kernel_shape[0], strides[0])
pad_left, pad_right = get_pad_value(ofm_shape[2], kernel_shape[1], strides[1])
expected_padding = [pad_top, pad_left, pad_bottom, pad_right]
pad_top = kernel_shape[0] - 1 - expected_padding[0]
pad_left = kernel_shape[1] - 1 - expected_padding[1]
pad_bottom = kernel_shape[0] - 1 - expected_padding[2]
pad_right = kernel_shape[1] - 1 - expected_padding[3]
if strides == [2, 2]:
pad_bottom -= 1
pad_right -= 1
expected_padding = [pad_top, pad_left, pad_bottom, pad_right]
assert list(conv.attrs.padding) == list(expected_padding)
assert list(conv.attrs.strides) == [1, 1]
rewriter = legalize.Conv2DTransposeRewriter()
pattern_table = [
(
ethosu.QnnConv2DTransposeParams.composite_name,
ethosu.qnn_conv2d_transpose_pattern(),
lambda pat: ethosu.QnnConv2DTransposeParams(pat).is_valid(),
),
]
mod, params = create_tflite_graph()
mod["main"] = bind_params_by_name(mod["main"], params)
mod = partition_ethosu_by_table(mod, pattern_table)
mod["tvmgen_default_ethos_u_main_0"] = dataflow_pattern.rewrite(
rewriter, mod["tvmgen_default_ethos_u_main_0"]
)
verify(mod["tvmgen_default_ethos_u_main_0"])
@pytest.mark.parametrize(
"ifm_shapes,axis",
[
([(1, 2, 2), (1, 2, 2), (1, 2, 2)], 2),
([(5, 4), (5, 4)], 1),
([(1,), (1,)], 0),
([(3, 1), (3, 1), (3, 1), (3, 1)], 0),
],
)
def test_tflite_pack(ifm_shapes, axis):
dtype = "int8"
def create_tflite_graph():
class Model(tf.Module):
@tf.function
def tf_function(self, inputs, axis):
return tf.stack(inputs, axis=axis)
model = Model()
concrete_func = model.tf_function.get_concrete_function(
[tf.TensorSpec(shape, tf.float32) for shape in ifm_shapes], axis
)
def representative_dataset():
for _ in range(100):
datas = [np.random.rand(*shape) for shape in ifm_shapes]
yield [data.astype(np.float32) for data in datas]
converter = tf.lite.TFLiteConverter.from_concrete_functions([concrete_func])
converter.optimizations = [tf.lite.Optimize.DEFAULT]
converter.representative_dataset = representative_dataset
converter.target_spec.supported_ops = [tf.lite.OpsSet.TFLITE_BUILTINS_INT8]
converter.inference_input_type = tf.int8
converter.inference_output_type = tf.int8
tflite_model = converter.convert()
return tflite_model
def verify(ext_func):
new_pack_axis = len(ifm_shapes)
ifm_shape = list(ifm_shapes[0])
op = ext_func.body
after_reshape = ifm_shape[:axis] + [1] + ifm_shape[axis:]
out_shape = ifm_shape[:axis] + [new_pack_axis] + ifm_shape[axis:]
assert op.op.name == "concatenate"
# Check shapes after expand_dims (legalized as reshape)
for i in range(len(ifm_shapes)):
assert list(op.args[0][i].checked_type.shape) == after_reshape
assert op.args[0][i].checked_type.dtype == dtype
# Check output
assert list(op.checked_type.shape) == out_shape
assert op.checked_type.dtype == dtype
pack_pattern_table = [
(
ethosu.ConcatParams.composite_name,
ethosu.concat_pattern(),
lambda pat: ethosu.ConcatParams(pat).is_valid(),
),
(
ethosu.ExpandDimsParams.composite_name,
ethosu.expand_dims_pattern(),
lambda pat: ethosu.ExpandDimsParams(pat).is_valid(),
),
(
ethosu.ReshapeParams.composite_name,
ethosu.reshape_pattern(),
lambda pat: ethosu.ReshapeParams(pat).is_valid(),
),
]
tflite_graph = create_tflite_graph()
tflite_model = tflite.Model.Model.GetRootAsModel(tflite_graph, 0)
relay_module, _ = relay.frontend.from_tflite(
tflite_model,
shape_dict={("ifm" + str(i)): shape for i, shape in enumerate(ifm_shapes)},
dtype_dict={("ifm" + str(i)): dtype for i, _ in enumerate(ifm_shapes)},
)
mod = partition_ethosu_by_table(relay_module, pack_pattern_table)
seq = [
legalize.ConcatRewriter(),
legalize.ExpandDimsRewriter(),
legalize.ReshapeRewriter(),
legalize.NoOpRewriter(),
]
for legalizer in seq:
mod["tvmgen_default_ethos_u_main_0"] = dataflow_pattern.rewrite(
legalizer, mod["tvmgen_default_ethos_u_main_0"]
)
mod["tvmgen_default_ethos_u_main_0"] = relay.transform.InferType()(mod)[
"tvmgen_default_ethos_u_main_0"
]
verify(mod["tvmgen_default_ethos_u_main_0"])
@pytest.mark.parametrize(
"ifm_shape,axis",
[[(1, 2, 3, 4), 1], [(2, 3), 1], [(5, 6, 7), 2]],
)
def test_tflite_unpack(ifm_shape, axis):
dtype = "int8"
def create_tflite_graph():
class Model(tf.Module):
@tf.function
def tf_function(self, x, axis):
return tf.unstack(x, axis=axis)
model = Model()
concrete_func = model.tf_function.get_concrete_function(
tf.TensorSpec(ifm_shape, tf.float32), axis
)
def representative_dataset():
for _ in range(100):
data = np.random.rand(*tuple(ifm_shape))
yield [data.astype(np.float32)]
converter = tf.lite.TFLiteConverter.from_concrete_functions([concrete_func])
converter.optimizations = [tf.lite.Optimize.DEFAULT]
converter.representative_dataset = representative_dataset
converter.target_spec.supported_ops = [tf.lite.OpsSet.TFLITE_BUILTINS_INT8]
converter.inference_input_type = tf.int8
converter.inference_output_type = tf.int8
tflite_model = converter.convert()
return tflite_model
def verify(ext_func):
outputs = ext_func.body.args[0].fields
shape = list(ifm_shape)
unpacked_shape = shape[:axis] + shape[axis + 1 :]
split_shape = shape[:axis] + [1] + shape[axis + 1 :]
assert len(outputs) == shape[axis]
for i, output in enumerate(outputs):
expr = output.args[0].args[0]
expr = expr.tuple_value[expr.index]
expr = expr.args[0]
# Checking expected unpacked output shape.
# Squeeze is legalized to a reshape.
assert expr.op.name == "reshape"
assert list(expr.checked_type.shape) == unpacked_shape
assert output.checked_type.dtype == dtype
expr = expr.args[0]
expr = expr.tuple_value[expr.index]
expr = expr.args[0]
# Check input is split correctly
assert list(expr.args[0].checked_type.shape) == shape
assert list(expr.checked_type.shape) == split_shape
assert expr.checked_type.dtype == dtype
# Check split attrs
begin_shape = [0] * len(ifm_shape)
begin_shape[axis] = i
assert list(expr.attrs.begin) == begin_shape
end_shape = shape[:axis] + [i + 1] + shape[axis + 1 :]
assert list(expr.attrs.end) == end_shape
assert list(expr.attrs.strides) == [1]
pack_pattern_table = [
(
ethosu.SplitParams.composite_name,
ethosu.split_pattern(),
lambda pat: ethosu.SplitParams(pat).is_valid(),
),
(
ethosu.SqueezeParams.composite_name,
ethosu.squeeze_pattern(),
lambda pat: ethosu.SqueezeParams(pat).is_valid(),
),
(
ethosu.ReshapeParams.composite_name,
ethosu.reshape_pattern(),
lambda pat: ethosu.ReshapeParams(pat).is_valid(),
),
]
tflite_graph = create_tflite_graph()
tflite_model = tflite.Model.Model.GetRootAsModel(tflite_graph, 0)
mod, _ = relay.frontend.from_tflite(
tflite_model,
shape_dict={"input": ifm_shape},
dtype_dict={"input": dtype},
)
mod = partition_ethosu_by_table(mod, pack_pattern_table)
seq = [
legalize.PartitionedSplitRewriter(),
legalize.SplitRewriter(),
legalize.SqueezeRewriter(),
legalize.ReshapeRewriter(),
legalize.NoOpRewriter(),
]
for legalizer in seq:
mod["tvmgen_default_ethos_u_main_0"] = dataflow_pattern.rewrite(
legalizer, mod["tvmgen_default_ethos_u_main_0"]
)
mod["tvmgen_default_ethos_u_main_0"] = relay.transform.InferType()(mod)[
"tvmgen_default_ethos_u_main_0"
]
verify(mod["tvmgen_default_ethos_u_main_0"])
@pytest.mark.parametrize("ifm_shape", [(1, 15, 15, 3), (1, 8, 9, 1)])
@pytest.mark.parametrize("alpha", [0.2, 0.634])
def test_tflite_leaky_relu(ifm_shape, alpha):
dtype = "int8"
def create_tflite_graph():
class Model(tf.Module):
@tf.function
def leaky_relu_func(self, x):
return tf.nn.leaky_relu(x, alpha=alpha)
model = Model()
concrete_func = model.leaky_relu_func.get_concrete_function(
tf.TensorSpec(ifm_shape, tf.float32),
)
def representative_dataset():
for _ in range(100):
data = np.random.rand(*tuple(ifm_shape))
yield [data.astype(np.float32)]
converter = tf.lite.TFLiteConverter.from_concrete_functions([concrete_func])
converter.optimizations = [tf.lite.Optimize.DEFAULT]
converter.representative_dataset = representative_dataset
converter.target_spec.supported_ops = [tf.lite.OpsSet.TFLITE_BUILTINS_INT8]
converter.inference_input_type = tf.int8
converter.inference_output_type = tf.int8
tflite_model = converter.convert()
return tflite_model
def verify(ext_func):
func_body = ext_func.body
assert func_body.op.name == "contrib.ethosu.identity"
assert func_body.attrs.activation == "LUT"
assert tuple(func_body.args[0].checked_type.shape) == (ifm_shape)
assert tuple(func_body.args[1].checked_type.shape) == (256,)
tflite_graph = create_tflite_graph()
tflite_model = tflite.Model.Model.GetRootAsModel(tflite_graph, 0)
mod, _ = relay.frontend.from_tflite(
tflite_model,
shape_dict={"input": ifm_shape},
dtype_dict={"input": dtype},
)
mod = ethosu.partition_for_ethosu(mod)
mod["tvmgen_default_ethos_u_main_0"] = dataflow_pattern.rewrite(
legalize.LeakyReLURewriter(), mod["tvmgen_default_ethos_u_main_0"]
)
mod["tvmgen_default_ethos_u_main_0"] = relay.transform.InferType()(mod)[
"tvmgen_default_ethos_u_main_0"
]
verify(mod["tvmgen_default_ethos_u_main_0"])
@pytest.mark.parametrize("ifm_shape", [(1, 14), (1, 151)])
@pytest.mark.parametrize("ofm_channels", [32, 64])
@pytest.mark.parametrize("use_bias", [True, False])
@pytest.mark.parametrize("activation_function", ["RELU", "NONE"])
def test_tflite_fully_connected(
ifm_shape,
ofm_channels,
use_bias,
activation_function,
):
dtype = "int8"
def create_tflite_graph():
class Model(tf.Module):
@tf.function
def fully_connected(self, x):
bias_shape = ofm_channels
bias = tf.constant(np.random.uniform(size=bias_shape), dtype=tf.float32)
w = tf.constant(
np.random.uniform(size=[ifm_shape[1], ofm_channels]),
dtype=tf.float32,
)
x = tf.matmul(x, w)
if use_bias:
x = tf.nn.bias_add(x, bias)
if activation_function:
x = tf.nn.relu(x)
return x
model = Model()
concrete_func = model.fully_connected.get_concrete_function(
tf.TensorSpec(ifm_shape, dtype=tf.float32)
)
# Convert the model
def representative_dataset():
for _ in range(100):
data = np.random.rand(*tuple(ifm_shape))
yield [data.astype(np.float32)]
converter = tf.lite.TFLiteConverter.from_concrete_functions([concrete_func])
converter.optimizations = [tf.lite.Optimize.DEFAULT]
converter.representative_dataset = representative_dataset
converter.target_spec.supported_ops = [tf.lite.OpsSet.TFLITE_BUILTINS_INT8]
converter.inference_input_type = tf.int8
converter.inference_output_type = tf.int8
tflite_model = converter.convert()
return tflite_model
def verify(ext_func):
op = ext_func.body.args[0]
ofm_channels = op.attrs.ofm_channels
# check IFM
ifm = op.args[0].checked_type
assert list(ifm.shape) == [1, 1] + list(ifm_shape)
assert str(ifm.dtype) == dtype
# check OFM
ofm = op.checked_type
assert list(ofm.shape) == [1, 1, 1, ofm_channels]
assert str(ofm.dtype) == dtype
# check weights
weights_ohwi = op.args[1].data.asnumpy()
assert str(weights_ohwi.dtype) == dtype
assert list(weights_ohwi.shape) == [ofm_channels, 1, 1, ifm_shape[1]]
# Check that scale_bias matches weight tensor
assert list(op.args[2].checked_type.shape)[0] == ofm_channels
assert list(op.attrs.padding) == [0, 0, 0, 0]
assert list(op.attrs.strides) == [1, 1]
assert list(op.attrs.dilation) == [1, 1]
if activation_function == "RELU":
assert str(op.attrs.activation) == "CLIP"
fc_pattern_table = [
(
ethosu.FullyConnectedParams.composite_name,
ethosu.qnn_fc_pattern(),
lambda pat: ethosu.FullyConnectedParams(pat).is_valid(),
)
]
tflite_graph = create_tflite_graph()
tflite_model = tflite.Model.Model.GetRootAsModel(tflite_graph, 0)
mod, fc_params = relay.frontend.from_tflite(
tflite_model,
shape_dict={"input": ifm_shape},
dtype_dict={"input": dtype},
)
mod["main"] = bind_params_by_name(mod["main"], fc_params)
mod = partition_ethosu_by_table(mod, fc_pattern_table)
mod["tvmgen_default_ethos_u_main_0"] = dataflow_pattern.rewrite(
legalize.FullyConnectedRewriter(), mod["tvmgen_default_ethos_u_main_0"]
)
verify(mod["tvmgen_default_ethos_u_main_0"])
@pytest.mark.parametrize("ifm_shape", [(1, 5, 5, 3), (1, 12, 9, 1)])
def test_tflite_hard_swish(ifm_shape):
dtype = "int8"
def create_tflite_graph():
class Model(tf.Module):
@tf.function
def tf_function(self, x):
op = tf.keras.layers.Lambda(
lambda x: x * tf.keras.activations.relu(x + 3.0, max_value=6.0) / 6.0
)(x)
return op
model = Model()
concrete_func = model.tf_function.get_concrete_function(
tf.TensorSpec(ifm_shape, tf.float32)
)
def representative_dataset():
for _ in range(100):
data = np.random.rand(*tuple(ifm_shape))
yield [data.astype(np.float32)]
converter = tf.lite.TFLiteConverter.from_concrete_functions([concrete_func])
converter.optimizations = [tf.lite.Optimize.DEFAULT]
converter.representative_dataset = representative_dataset
converter.target_spec.supported_ops = [tf.lite.OpsSet.TFLITE_BUILTINS_INT8]
converter.inference_input_type = tf.int8
converter.inference_output_type = tf.int8
tflite_model = converter.convert()
return tflite_model
tflite_graph = create_tflite_graph()
tflite_model = tflite.Model.Model.GetRootAsModel(tflite_graph, 0)
mod, params = relay.frontend.from_tflite(
tflite_model,
shape_dict={"input": ifm_shape},
dtype_dict={"input": dtype},
)
mod = ethosu.partition_for_ethosu(mod, params)
mod["tvmgen_default_ethos_u_main_0"] = dataflow_pattern.rewrite(
legalize.HardSwishRewriter(), mod["tvmgen_default_ethos_u_main_0"]
)
mod = relay.transform.InferType()(mod)
func_body = mod["tvmgen_default_ethos_u_main_0"].body
assert func_body.op.name == "contrib.ethosu.identity"
assert func_body.attrs.activation == "LUT"
assert tuple(func_body.args[0].checked_type.shape) == (ifm_shape)
assert tuple(func_body.args[1].checked_type.shape) == (256,)
def test_tflite_softmax():
np.random.seed(0)
dtype = "int8"
ifm_shape = (1, 12)
def create_tflite_graph():
@tf.function
def softmax(x):
return tf.nn.softmax(x)
concrete_func = softmax.get_concrete_function(tf.TensorSpec(ifm_shape, dtype=tf.float32))
# Convert the model
def representative_dataset():
for _ in range(100):
data = np.random.uniform(low=-1, high=2, size=tuple(ifm_shape))
yield [data.astype(np.float32)]
converter = tf.lite.TFLiteConverter.from_concrete_functions([concrete_func])
converter.optimizations = [tf.lite.Optimize.DEFAULT]
converter.representative_dataset = representative_dataset
converter.target_spec.supported_ops = [tf.lite.OpsSet.TFLITE_BUILTINS_INT8]
converter.inference_input_type = tf.int8
converter.inference_output_type = tf.int8
tflite_model = converter.convert()
return tflite_model
def verify(ext_func):
out_op = ext_func.body
ops = []
# List of expected operations, their type and activation parameters if it exists
expected_ops_params = [
("reshape", None, [None, None, None, None, None, None]),
("reshape", None, [None, None, None, None, None, None]),
("contrib.ethosu.pooling", "MAX", [0.011756093241274357, -43, None, None, 0.0, -43]),
(
"contrib.ethosu.binary_elementwise",
"SUB",
[0.011756093241274357, -43, 0.0, -43, 1.0, 127],
),
("contrib.ethosu.binary_elementwise", "SHR", [1.0, 0, 0.0, 0, 0.0, -43]),
("contrib.ethosu.pooling", "SUM", [0.0, 0, None, None, 0.0, -43]),
("contrib.ethosu.unary_elementwise", "CLZ", [0.0, 0, None, None, 0.0, -43]),
("contrib.ethosu.binary_elementwise", "SUB", [0.0, 0, 0.0, 0, 0.0, -43]),
("contrib.ethosu.binary_elementwise", "SHL", [0.0, 0, 0.0, 0, 0.0, -43]),
("contrib.ethosu.binary_elementwise", "SUB", [0.0, 0, 0.0, 0, 0.0, -43]),
("contrib.ethosu.binary_elementwise", "SHL", [0.0, 0, 0.0, 0, 0.0, -43]),
("contrib.ethosu.binary_elementwise", "ADD", [0.0, 0, 0.0, 0, 1.0, 0]),
("contrib.ethosu.binary_elementwise", "MUL", [1.0, 0, 1.0, 0, 2.0, 0]),
("contrib.ethosu.binary_elementwise", "ADD", [2.0, 0, 0.0, 0, 1.0, 0]),
("contrib.ethosu.binary_elementwise", "MUL", [1.0, 0, 1.0, 0, 2.0, 0]),
("contrib.ethosu.binary_elementwise", "SUB", [2.0, 0, 0.0, 0, 1.0, 0]),
("contrib.ethosu.binary_elementwise", "MUL", [1.0, 0, 1.0, 0, 2.0, 0]),
("contrib.ethosu.binary_elementwise", "MUL", [2.0, 0, 0.0, 0, 0.0, -43]),
("contrib.ethosu.binary_elementwise", "ADD", [1.0, 0, 0.0, 0, 1.0, 0]),
("contrib.ethosu.binary_elementwise", "MUL", [1.0, 0, 1.0, 0, 2.0, 0]),
("contrib.ethosu.binary_elementwise", "SUB", [2.0, 0, 0.0, 0, 1.0, 0]),
("contrib.ethosu.binary_elementwise", "MUL", [1.0, 0, 1.0, 0, 2.0, 0]),
("contrib.ethosu.binary_elementwise", "MUL", [2.0, 0, 0.0, 0, 0.0, -43]),
("contrib.ethosu.binary_elementwise", "ADD", [1.0, 0, 0.0, 0, 1.0, 0]),
("contrib.ethosu.binary_elementwise", "MUL", [1.0, 0, 1.0, 0, 2.0, 0]),
("contrib.ethosu.binary_elementwise", "SUB", [2.0, 0, 0.0, 0, 1.0, 0]),
("contrib.ethosu.binary_elementwise", "MUL", [1.0, 0, 1.0, 0, 2.0, 0]),
("contrib.ethosu.binary_elementwise", "MUL", [2.0, 0, 0.0, 0, 0.0, -43]),
("contrib.ethosu.binary_elementwise", "ADD", [1.0, 0, 0.0, 0, 1.0, 0]),
("contrib.ethosu.binary_elementwise", "MUL", [1.0, 0, 0.0, 0, 1.0, 0]),
("contrib.ethosu.binary_elementwise", "MUL", [1.0, 0, 1.0, 0, 2.0, 0]),
("contrib.ethosu.binary_elementwise", "SUB", [0.0, 0, 0.0, 0, 0.0, -43]),
("contrib.ethosu.binary_elementwise", "SHR", [2.0, 0, 0.0, 0, 0.00390625, -128]),
("reshape", None, [None, None, None, None, None, None]),
]
def get_attr_value(op, attr_name):
if hasattr(op.attrs, attr_name):
return op.attrs[attr_name]
else:
return None
def get_op_type(op):
if hasattr(op.attrs, "pooling_type"):
return op.attrs.pooling_type
elif hasattr(op.attrs, "operator_type"):
return op.attrs.operator_type
return None
def get_activation_params(op):
activation_params = []
activation_params.append(get_attr_value(op, "ifm_scale"))
activation_params.append(get_attr_value(op, "ifm_zero_point"))
activation_params.append(get_attr_value(op, "ifm2_scale"))
activation_params.append(get_attr_value(op, "ifm2_zero_point"))
activation_params.append(get_attr_value(op, "ofm_scale"))
activation_params.append(get_attr_value(op, "ofm_zero_point"))
return activation_params
def _visit(stmt):
if isinstance(stmt, relay.expr.Call):
ops.append(stmt)
relay.analysis.post_order_visit(out_op, _visit)
# check IFM
ifm = ops[0].args[0].checked_type
assert list(ifm.shape) == list(ifm_shape)
assert str(ifm.dtype) == dtype
# check OFM
ofm = out_op.checked_type
assert list(ofm.shape) == list(ifm_shape)
assert ofm.dtype == dtype
# check operations
for op, expected_op_params in zip(ops, expected_ops_params):
activation_params = get_activation_params(op)
expected_op_name, expected_op_type, expected_activation_params = expected_op_params
assert op.op.name == expected_op_name
assert expected_op_type == get_op_type(op)
for activation_param, expected_activation_param in zip(
activation_params, expected_activation_params
):
if isinstance(activation_param, float):
assert math.isclose(expected_activation_param, activation_param, abs_tol=1e-7)
else:
assert expected_activation_param == activation_param
softmax_pattern_table = [
(
ethosu.SoftMaxParams.composite_name,
ethosu.softmax_pattern(),
lambda pat: ethosu.SoftMaxParams(pat).is_valid(),
)
]
tflite_graph = create_tflite_graph()
tflite_model = tflite.Model.Model.GetRootAsModel(tflite_graph, 0)
mod, params = relay.frontend.from_tflite(
tflite_model,
shape_dict={"input": ifm_shape},
dtype_dict={"input": dtype},
)
mod["main"] = bind_params_by_name(mod["main"], params)
mod = partition_ethosu_by_table(mod, softmax_pattern_table)
mod["tvmgen_default_ethos_u_main_0"] = dataflow_pattern.rewrite(
legalize.SoftmaxRewriter(), mod["tvmgen_default_ethos_u_main_0"]
)
mod = relay.transform.InferType()(mod)
verify(mod["tvmgen_default_ethos_u_main_0"])
@pytest.mark.parametrize("ifm_shape", [(1, 55, 55, 3)])
@pytest.mark.parametrize("kernel_shape", [(3, 3)])
@pytest.mark.parametrize("strides, dilation", [((1, 1), (1, 1))])
@pytest.mark.parametrize("op_padding", ["SAME", "VALID"])
@pytest.mark.parametrize("sep_padding", [(0, 0, 1, 1), (7, 5, 4, 5)])
@pytest.mark.parametrize(
"op_pairs", [("conv2d", "conv2d"), ("depthwise", "depthwise"), ("conv2d", "depthwise")]
)
def test_tflite_shared_pad_legalize(
ifm_shape,
kernel_shape,
strides,
dilation,
op_padding,
sep_padding,
op_pairs,
):
dtype = "int8"
def create_tflite_graph():
class Model(tf.Module):
@tf.function
def tf_function(self, x):
def make_depthwise_or_conv2d(pair_idx):
if op_pairs[pair_idx] == "depthwise":
weight_shape = [kernel_shape[0], kernel_shape[1], ifm_shape[3], 1]
weight = tf.constant(np.random.uniform(size=weight_shape), dtype=tf.float32)
return tf.nn.depthwise_conv2d(
x, weight, strides=tf_strides, padding=op_padding, dilations=dilation
)
weight_shape = [kernel_shape[0], kernel_shape[1], ifm_shape[3], 3]
weight = tf.constant(np.random.uniform(size=weight_shape), dtype=tf.float32)
return tf.nn.conv2d(
x,
weight,
strides=tf_strides,
padding=op_padding,
dilations=dilation,
)
x = tf.pad(
x,
[
[0, 0],
[sep_padding[0], sep_padding[2]],
[sep_padding[1], sep_padding[3]],
[0, 0],
],
"CONSTANT",
)
# The input strides to the TensorFlow API needs to be of shape 1x4
tf_strides = [1, strides[0], strides[1], 1]
x1 = make_depthwise_or_conv2d(0)
x2 = make_depthwise_or_conv2d(1)
x3 = tf.math.add(x1, x2)
return x3
model = Model()
concrete_func = model.tf_function.get_concrete_function(
tf.TensorSpec(ifm_shape, dtype=tf.float32)
)
# Convert the model
def representative_dataset():
for _ in range(100):
data = np.random.rand(*tuple(ifm_shape))
yield [data.astype(np.float32)]
converter = tf.lite.TFLiteConverter.from_concrete_functions([concrete_func])
converter.optimizations = [tf.lite.Optimize.DEFAULT]
converter.representative_dataset = representative_dataset
converter.target_spec.supported_ops = [tf.lite.OpsSet.TFLITE_BUILTINS_INT8]
converter.inference_input_type = tf.int8
converter.inference_output_type = tf.int8
tflite_model = converter.convert()
return tflite_model
conv2d_pattern_table = [
(
ethosu.QnnConv2DParams.composite_name,
ethosu.qnn_conv2d_pattern(),
lambda pat: ethosu.QnnConv2DParams(pat).is_valid(),
),
(
ethosu.QnnDepthwiseConv2DParams.composite_name,
ethosu.qnn_depthwise_conv2d_pattern(),
lambda pat: ethosu.QnnDepthwiseConv2DParams(pat).is_valid(),
),
]
tflite_graph = create_tflite_graph()
tflite_model = tflite.Model.Model.GetRootAsModel(tflite_graph, 0)
mod, params = relay.frontend.from_tflite(
tflite_model,
shape_dict={"input": ifm_shape},
dtype_dict={"input": dtype},
)
mod["main"] = bind_params_by_name(mod["main"], params)
mod = partition_ethosu_by_table(mod, conv2d_pattern_table)
mod["tvmgen_default_ethos_u_main_0"] = dataflow_pattern.rewrite(
[legalize.Conv2DRewriter(), legalize.DepthwiseConv2DRewriter()],
mod["tvmgen_default_ethos_u_main_0"],
)
mod["tvmgen_default_ethos_u_main_1"] = dataflow_pattern.rewrite(
[legalize.Conv2DRewriter(), legalize.DepthwiseConv2DRewriter()],
mod["tvmgen_default_ethos_u_main_1"],
)
if op_pairs[0] == "depthwise":
assert (
mod["tvmgen_default_ethos_u_main_0"].body.op.name == "contrib.ethosu.depthwise_conv2d"
)
else:
assert mod["tvmgen_default_ethos_u_main_0"].body.op.name == "contrib.ethosu.conv2d"
if op_pairs[1] == "depthwise":
assert (
mod["tvmgen_default_ethos_u_main_1"].body.op.name == "contrib.ethosu.depthwise_conv2d"
)
else:
assert mod["tvmgen_default_ethos_u_main_1"].body.op.name == "contrib.ethosu.conv2d"
if __name__ == "__main__":
tvm.testing.main()
|
998,507 | 1e95c6c80fbb22f49582f8956b9debb9f1c7deb8 | class Unit:
MINUTE = "minute"
HOUR = "hour"
SECONDS = {
MINUTE: 60,
HOUR: 3600,
}
@classmethod
def seconds(cls, unit):
return cls.SECONDS[unit]
|
998,508 | d2903b0943f54d1eaeb6e5bf35823749fd349103 | """request_tracker
Revision ID: 86d8aca36208
Revises: 40015e4aa4f5
Create Date: 2022-04-25 14:55:03.076474
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '86d8aca36208'
down_revision = '40015e4aa4f5'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('request_tracker',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('request_type', sa.Enum('INFORM_CRA', 'GET_BN', name='request_tracker_type'), nullable=False),
sa.Column('is_processed', sa.Boolean(), nullable=True),
sa.Column('request_object', sa.Text(), nullable=True),
sa.Column('response_object', sa.Text(), nullable=True),
sa.Column('retry_number', sa.Integer(), nullable=False),
sa.Column('service_name', sa.Enum('BN_HUB', name='request_tracker_servicename'), nullable=False),
sa.Column('business_id', sa.Integer(), nullable=True),
sa.Column('creation_date', sa.DateTime(timezone=True), nullable=True),
sa.Column('last_modified', sa.DateTime(timezone=True), nullable=True),
sa.ForeignKeyConstraint(['business_id'], ['businesses.id'], ),
sa.PrimaryKeyConstraint('id')
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('request_tracker')
op.execute("DROP TYPE request_tracker_type;")
op.execute("DROP TYPE request_tracker_servicename;")
# ### end Alembic commands ###
|
998,509 | a0a562a1ccd2fd1dd8d58d6caefc6bc0f9ff406c | # 给出集合 [1,2,3,…,n],其所有元素共有 n! 种排列。
#
# 按大小顺序列出所有排列情况,并一一标记,当 n = 3 时, 所有排列如下:
#
# "123"
# "132"
# "213"
# "231"
# "312"
# "321"
# 给定 n 和 k,返回第 k 个排列。
#
# 说明:
# 给定 n 的范围是 [1, 9]。
# 给定 k 的范围是[1, n!]。
# DEMO:
# 输入: n = 3, k = 3
# 输出: "213"
# 示例 2:
#
# 输入: n = 4, k = 9
# 输出: "2314"
import math
class Solution:
def getPermutation(self, n, k):
"""
按照数学公式推导
:type n: int
:type k: int
:rtype: str
"""
# seq: 结果集
# k: 当前位置有多少种情况
# fact: (n - 1)!
seq, k, fact = "", k - 1, math.factorial(n - 1)
# 从小到大排出所有位置
perm = [i for i in range(1, n + 1)]
# 从 n - 1 开始向前遍历 直到 0
for i in reversed(range(n)):
# fact 指当前位选定后,后续有多少种情况
# k / fact 当前位下标
# 由此求出 curr 为当前数
curr = perm[k // fact]
seq += str(curr)
# 当前数字不能再次出现
perm.remove(curr)
# 还有剩余位数
if i > 0:
# 当前位共有多少种情况
k %= fact
# fact 降一级. 即 (n-1)! / (n-1)
fact //= i
return seq
|
998,510 | 5aace31734df412e86f59b824ea914201918d8a3 | from typing import List
import ghidra.program.model.address
import ghidra.program.model.data
import ghidra.program.model.listing
import ghidra.program.model.mem
import java.lang
class MSDataTypeUtils(object):
"""
An abstract class containing static utility methods for creating structure data types.
"""
def equals(self, __a0: object) -> bool: ...
@staticmethod
def getAbsoluteAddress(program: ghidra.program.model.listing.Program, address: ghidra.program.model.address.Address) -> ghidra.program.model.address.Address:
"""
Extracts an absolute address from the bytes in memory at the indicated address in memory.
@param program the program containing the bytes
@param address the address in memory where the address bytes should be obtained.
@return the absolute address or null if the address isn't in the program's memory.
"""
...
@staticmethod
def getAlignedPack4Structure(dataTypeManager: ghidra.program.model.data.DataTypeManager, categoryPath: ghidra.program.model.data.CategoryPath, structureName: unicode) -> ghidra.program.model.data.StructureDataType:
"""
Gets an empty aligned structure with a packing value of 4 that can be use to create the
model's data type.
@param dataTypeManager the data type manager to associate with the structure.
@param categoryPath the structure's category path.
@param structureName the structure's name.
@return the aligned pack(4) structure.
"""
...
@staticmethod
def getAlignedPack8Structure(dataTypeManager: ghidra.program.model.data.DataTypeManager, categoryPath: ghidra.program.model.data.CategoryPath, structureName: unicode) -> ghidra.program.model.data.StructureDataType:
"""
Gets an empty aligned structure with a packing value of 8 that can be use to create the
model's data type.
@param dataTypeManager the data type manager to associate with the structure.
@param categoryPath the structure's category path.
@param structureName the structure's name.
@return the aligned pack(8) structure.
"""
...
@staticmethod
def getBytes(memory: ghidra.program.model.mem.Memory, startAddress: ghidra.program.model.address.Address, length: int) -> List[int]:
"""
Gets bytes from <code>memory</code> at the indicated <code>startAddress</code>.
The <code>length</code> indicates the number of bytes that must be read
from memory.
@param memory the program memory for obtaining the bytes
@param startAddress the address to begin reading bytes
@param length the number of bytes to read
@return the bytes
@throws InvalidDataTypeException if the <code>length</code> number of bytes couldn't
be read starting at the <code>startAddress</code> in <code>memory</code>.
"""
...
def getClass(self) -> java.lang.Class: ...
@staticmethod
def getEHStateDataType(program: ghidra.program.model.listing.Program) -> ghidra.program.model.data.DataType:
"""
Gets an exception handling state data type.
@param program the program for the data type.
@return the exception handling state data type.
"""
...
@staticmethod
def getMatchingDataType(program: ghidra.program.model.listing.Program, comparisonDt: ghidra.program.model.data.DataType) -> ghidra.program.model.data.DataType:
"""
Gets the named data type from the program or the windows data type archive. If neither
the program or data type archive has an equivalent data type then the original data type
is returned.
@param program the program for the data type.
@param comparisonDt the data type it should match
@return the matching data type
"""
...
@staticmethod
def getPMDDataType(program: ghidra.program.model.listing.Program) -> ghidra.program.model.data.Structure:
"""
Gets a PMD displacement structure data type.
@param program the program for the data type.
@return the PMD data type or null.
"""
...
@staticmethod
def getPointerDisplacementDataType(program: ghidra.program.model.listing.Program) -> ghidra.program.model.data.DataType:
"""
Gets a pointer displacement data type.
@param program the program for the data type.
@return the pointer displacement data type.
"""
...
@staticmethod
def getReferenceDataType(program: ghidra.program.model.listing.Program, referredToDataType: ghidra.program.model.data.DataType) -> ghidra.program.model.data.DataType:
"""
Gets the appropriate reference data type. If program is 64 bit, then a 32-bit image
base offset data type will be returned. Otherwise, a default pointer to the
referredToDataType will be returned.
@param program the program that will contain the returned data type
@param referredToDataType the data type that is at the address being referred to by the
pointer or image base offset. Otherwise, null.
@return the image base offset or pointer reference data type
"""
...
@staticmethod
def getReferencedAddress(program: ghidra.program.model.listing.Program, address: ghidra.program.model.address.Address) -> ghidra.program.model.address.Address:
"""
Gets the referred to address from the bytes in the program at the indicated address.
If the program has 64 bit pointers, then a 32 bit image base offset value is expected to
be found at the indicated address.
If the program has 32 bit pointers, then a 32 bit absolute pointer value is expected at the
indicated address.
@param program the program whose memory is to be read.
@param address the address to start reading the bytes for the referenced address.
@return the referred to address or null.
"""
...
def hashCode(self) -> int: ...
@staticmethod
def is64Bit(program: ghidra.program.model.listing.Program) -> bool:
"""
Determines if the indicated program appears to be 64 bit (has 64 bit pointers).
@param program the program
@return true if 64 bit.
"""
...
def notify(self) -> None: ...
def notifyAll(self) -> None: ...
def toString(self) -> unicode: ...
@overload
def wait(self) -> None: ...
@overload
def wait(self, __a0: long) -> None: ...
@overload
def wait(self, __a0: long, __a1: int) -> None: ...
|
998,511 | bf97395ead523744e6fb03ff3b5305f86e744b3f | import sys
fname = sys.argv[1]
outfname = sys.argv[2]
fin = open(fname)
fout = open(outfname, 'w')
for line in fin:
# A9:ALA:CB A7:VAL:CB 8.45181
bufferline = line.split()
res1 = bufferline[0].split(':')[0]
res2 = bufferline[1].split(':')[0]
if res1 == res2: # meaningless to define linkage with self
continue
# end if
else:
fout.writelines(line)
# end if
# end for
fin.close()
fout.close()
|
998,512 | 123a65ee9a30318b4f101dc5c2fbd8b9e6b51b26 | from django.conf.urls import patterns, include, url
urlpatterns = patterns('cursom.apps.home.views',
url(r'^$', 'index', name="index"),
url(r'^agregar/', 'agregar', name="agregar"),
url(r'^actualizar/(?P<id_p>.*)/$','actualizar', name="actualizar"),
) |
998,513 | 766972e47671acd58e86aec8d20446060d2f1ea7 | # -*- coding: utf-8 -*-
"""
CPP文件简单封装,用于生成cpp文件.
"""
from os import path as op
from time import time, localtime, strftime
class CppFile(object):
def __init__(self, fpath, author='Unknown author', ver='1.0', doc='', include_macro_prefix=None):
self.__fpath = fpath
self.__author = author
self.__ver = ver
self.__doc = doc
self.__include_macro_prefix = include_macro_prefix
self.__custom_filehead = None
self.__incls = []
self.__clses = []
self.__funcs = []
self.__datas = []
self.__defs = []
self.__codelines = []
# region Properties fpath, author, version, doc, include_macro_prefix
@property
def fpath(self):
"""返回文件路径"""
return self.__fpath
@property
def author(self):
"""返回文件作者"""
return self.__author
@author.setter
def author(self, author):
"""设置文件作者"""
self.__author = author
@property
def version(self):
"""返回文件版本"""
return self.__ver
@version.setter
def version(self, version):
"""设置文件版本"""
self.__ver = version
@property
def doc(self):
"""取得文件描述文档"""
return self.__doc
@doc.setter
def doc(self, doc):
"""设置文件描述文档"""
self.__doc = doc
@property
def custom_filehead(self):
"""取得自定义文件头"""
return self.__custom_filehead
@custom_filehead.setter
def custom_filehead(self, custom_filehead):
"""设置自定义文件头"""
self.__custom_filehead = custom_filehead
@property
def include_macro_prefix(self):
"""取得文件头包含宏前缀"""
return self.__include_macro_prefix
@include_macro_prefix.setter
def include_macro_prefix(self, prefix):
"""设置取得文件头包含宏前缀"""
self.__include_macro_prefix = prefix
# endregion
# region 文件操作: 增加include语句, 增加类, 增加函数, 增加新数据定义, 增加宏定义
def addincl(self, incl):
"""增加新头文件包含"""
self.__incls.append(incl.replace('\\', '/'))
def addcls(self, cls):
"""增加新类定义"""
self.__clses.append(cls)
def addfunction(self, func):
"""增加新函数定义"""
self.__funcs.append(func)
def adddata(self, data):
"""增加新数据定义"""
self.__datas.append(data)
def adddef(self, d):
"""增加新的宏定义"""
self.__defs.append(d)
def addcodeline(self, line):
"""增加代码行"""
self.__codelines.append(line)
# endregion
# region 构建
def build(self, sort_incl=True):
"""生成文件"""
# Generate file head.
now_date = strftime('%Y-%m-%d', localtime(time()))
if not self.__custom_filehead:
cnt = '/**\n'
cnt += ' *@file\t\t{0}\n'.format(op.basename(self.__fpath))
cnt += ' *@author\t{0}\n'.format(self.__author)
cnt += ' *@date\t\t{0}\n'.format('<auto generate code, not tag date>')
cnt += ' *@version\t{0}\n'.format(self.__ver)
cnt += ' *\n'
cnt += ' *@brief\tAuto generate by script tools, do not modify this file!!\n'
cnt += ' *@doc\t{0}\n'.format(self.__doc)
cnt += ' */\n\n'
else:
cnt = self.__custom_filehead
cnt += '\n'
basename = op.splitext(op.basename(self.__fpath))[0]
parent_dirname = op.basename(op.dirname(self.__fpath)).upper()
extension = op.splitext(op.basename(self.__fpath))[1][1:]
if extension in ('h', 'hpp'):
macro = '__{0}_AUTOGEN_{1}_{2}_{3}__'.format(self.include_macro_prefix or 'NOPREFIX',
parent_dirname, basename.upper(), extension.upper())
cnt += '#ifndef {0}\n'.format(macro)
cnt += '#define {0}\n\n'.format(macro)
else:
macro = ''
# Generate include stmts
incls = self.__incls
if sort_incl:
incls = sorted(incls)
for incl in incls:
cnt += '#include "{0}"\n'.format(incl)
if incls:
cnt += '\n'
# Generate classes, functions and datas.
for cls in self.__clses:
cnt += cls.build()
for func in self.__funcs:
cnt += func.build()
for data in self.__datas:
cnt += data.build()
# Generate codelines.
for codeline in self.__codelines:
cnt += codeline + ';\n'
# Generate defines.
for d in self.__defs:
cnt += '#define {0}\n'.format(d)
if self.__defs:
cnt += '\n'
# End of file.
if macro:
cnt += '#endif // !{0}\n\n'.format(macro)
with open(self.__fpath, 'w+') as f:
f.write(cnt)
# endregion
__Dummy = None
|
998,514 | 032bacb26f207cf152333ce99c1516fecd503cc2 | # Generated by Django 3.0 on 2019-12-22 21:14
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('drive', '0006_auto_20191222_2029'),
]
operations = [
migrations.AlterField(
model_name='file',
name='storage_name',
field=models.CharField(max_length=256, null=True),
),
migrations.AlterUniqueTogether(
name='directory',
unique_together={('parent', 'name')},
),
]
|
998,515 | 12b77e6e199c11ceced48d649e13bdb4c37857d1 | import scrapy
from scrapy.loader import ItemLoader
from ..items import CmbmcItem
from itemloaders.processors import TakeFirst
class CmbmcSpider(scrapy.Spider):
name = 'cmbmc'
start_urls = ['https://www.cmb.mc/en/our-latest-news/']
def parse(self, response):
post_links = response.xpath('//a[text()="Read more"]/@href').getall()
yield from response.follow_all(post_links, self.parse_post)
def parse_post(self, response):
title = response.xpath('//h2[@class="title"]/text()').get()
description = response.xpath('//div[@class="text-grid-content"]//text()[normalize-space() and not(ancestor::a)]').getall()
description = [p.strip() for p in description]
description = ' '.join(description).strip()
date = response.xpath('//div[@class="date-actualite"]/h6[@class="txt-info"]/text()').get()
item = ItemLoader(item=CmbmcItem(), response=response)
item.default_output_processor = TakeFirst()
item.add_value('title', title)
item.add_value('description', description)
item.add_value('date', date)
return item.load_item()
|
998,516 | bb83b28d28a09be2cc85f6c4c0cae981d5937b23 | class Solution:
def searchRange(self, nums, target):
"""
:type nums: List[int]
:type target: int
:rtype: List[int]
"""
res = [-1,-1]
n = len(nums)
if target in nums:
res[0] = nums.index(target)
res[1] = n-nums[::-1].index(target)-1
return res
def searchRange1(self, nums, target):
if not nums:
return [-1,-1]
n = len(nums)
left,right = 0,n-1
while left <= right:
mid = (left+right)//2
if nums[mid] == target:
left = mid
right = mid
while left > 0 and nums[left-1] == target:
left -= 1
while right < n-1 and nums[right+1] == target:
right += 1
return [left,right]
elif nums[mid] > target:
right = mid - 1
else:
left = mid + 1
return [-1,-1]
a = Solution()
print(a.searchRange1([0,0,0,0,1,2,3,3,4,5,6,6,7,8,8,8,9,9,10,10,11,11],0)) |
998,517 | 0a0d551283dff0eb6019180a6568fa263c9d6c71 | from django.contrib import admin
from .models import *
# Register your models here.
admin.site.register(Department)
admin.site.register(Employee)
admin.site.register(DeptEmp)
admin.site.register(Title)
admin.site.register(Salary)
admin.site.register(DeptManager)
admin.site.register(logs)
|
998,518 | 9575bb6514814996595a1d2698d6dab356616461 | def city_country(city,country,population=0):
message=city+","+country
if population:
message+= '-population '+str(population)
return message
|
998,519 | 0aa3b1be198bc0c1a9b92230fcd532f7bb880346 | import commands
import string
class sshSessions:
def __init__(self):
self.netgroups = []
self.processes = []
def getComputers(self):
temp = []
linuxLogin = commands.getoutput("netgrouplist linux-login-sys")
csServer = commands.getoutput("netgrouplist cs-server-sys")
eceGeneral = commands.getoutput("netgrouplist ece-general-sys")
catService = commands.getoutput("netgrouplist cat-service-sys")
catServer = commands.getoutput("netgrouplist cat-server-sys")
temp.extend(linuxLogin.split(' '))
temp.extend(csServer.split(' '))
temp.extend(eceGeneral.split(' '))
temp.extend(catService.split(' '))
temp.extend(catServer.split(' '))
for i in temp:
self.netgroups.extend(i.split('\n'))
return self.netgroups
def getPS(self):
for i in self.netgroups:
print commands.getoutput("ssh " + i + " ps -eo pid,pcpu,pmem,args,time").split(' ')
if __name__=="__main__":
s = sshSessions();
print netgroups
|
998,520 | 415801d8f46095b5850a2ec51b0dec2a8f734330 | # Generated by Django 3.2.6 on 2021-08-31 08:36
from django.db import migrations, models
import tinymce.models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Banner',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('photo', models.ImageField(upload_to=None, verbose_name='Photo 2')),
('title', models.CharField(blank=True, max_length=50, null=True, verbose_name='Grand titre de la photo')),
('sub_title', models.CharField(blank=True, max_length=50, null=True, verbose_name='Sous titre de la photo')),
('url', models.URLField(max_length=250, verbose_name='Lien')),
],
options={
'verbose_name': '3. Banner',
},
),
migrations.CreateModel(
name='Business',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=100, verbose_name="Nom de l'entreprise")),
('logo', models.ImageField(upload_to='images/logos', verbose_name='Logo')),
('logo_negatif', models.ImageField(upload_to='images/slides', verbose_name='Logo négatif')),
('title', models.CharField(blank=True, max_length=50, verbose_name='Titre')),
('adress', models.CharField(blank=True, max_length=50, verbose_name='Adresse')),
('email', models.EmailField(blank=True, max_length=50, verbose_name="email de l'entreprise")),
('phone', models.CharField(blank=True, max_length=50, verbose_name="numéro de téléphone de l'entreprise")),
('about', tinymce.models.HTMLField(blank=True, null=True, verbose_name='Text a propos')),
('facebook', models.URLField(blank=True, max_length=300, null=True, verbose_name='Lien page Facebook')),
('insta', models.URLField(blank=True, max_length=300, null=True, verbose_name='Lien page Instagram')),
],
options={
'verbose_name': '1. Infomations',
},
),
migrations.CreateModel(
name='Slide',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('photo', models.ImageField(upload_to=None, verbose_name='Photo 1')),
('url', models.URLField(verbose_name='lien')),
],
options={
'verbose_name': '2. Grande Photo',
},
),
]
|
998,521 | 81e9528bfa94ceb82bc210f1d8322d67e1298b63 | ## Chapter about files and exceptions
with open('pi_digits.txt') as file_object: # The keyword with closes the file once access to it is no longer needed
contents = file_object.read()
line = contents.rstrip # rstrip prevents from printing a blank line
## "with open('text_files/filename.txt') as file_object:" if in a folder
print('\n')
## Absolute Paths:
# file_path = '/home/ehmatthes/other_files/text_files/filename.txt'
# with open(file_path) as file_object:
filename = 'pi_digits.txt'
with open(filename) as file_object:
lines = file_object.readlines()
pi_string = ''
for line in lines:
pi_string += line.strip()
print(pi_string)
print(len(pi_string))
print('\n')
filename = 'pi_million_digits.txt'
with open(filename) as file_object:
lines = file_object.readlines()
pi_string = ''
for line in lines:
pi_string += line.rstrip()
## birthday = input("Enter your birthday, in the form mmddyy: ")
birthday = str(52597)
if birthday in pi_string:
print("Your birthday appears in the first million digits of pi!")
else:
print("Your birthday does not appear in the first million digits of pi.")
## WRITING TO A FILE
filename = 'programming.txt'
with open(filename, 'w') as file_object: # You can open a file in read mode ('r'), write mode ('w'),
# append mode ('a'), or a mode that allows you to read and
# write to the file ('r+')
# 'w' recreates the file everytime!
file_object.write("I love programming.\n")
file_object.write("I love creating new games.\n")
## APPENDING TO A FILE
with open(filename, 'a') as file_object:
file_object.write("I also love finding meaning in large datasets.\n")
file_object.write("I love creating apps that can run in a browser.\n")
print('\n\n')
## EXCEPTIONS
try:
print(5/0)
except ZeroDivisionError:
print("You can't divide by zero!")
print('\n')
print("Give me two numbers, and I'll divide them.")
print("Enter 'q' to quit.")
while True:
first_number = input("\nFirst number: ")
if first_number == 'q':
break
second_number = input("Second number: ")
if second_number == 'q':
break
try:
answer = int(first_number) / int(second_number)
except ZeroDivisionError:
print("You can't divide by zero!")
except ValueError:
print("Type a valid number")
else:
print(answer)
print('\n')
def count_words(filename):
"""Count the approximate number of words in a file."""
try:
with open(filename) as f_obj:
contents = f_obj.read()
except FileNotFoundError:
# msg = "Sorry, the file " + filename + " does not exist."
# print(msg)
pass
else:
words = contents.split()
num_words = len(words)
print("The file " + filename + " has about " + str(num_words) + " words.")
filenames = ['alice.txt', 'siddhartha.txt', 'pi_digits.txt', 'programming.txt']
for filename in filenames:
count_words(filename)
print('\n\n')
## STORING DATA
import json
numbers = [2, 3, 5, 7, 11, 13]
filename = 'numbers.json'
with open(filename, 'w') as f_obj:
json.dump(numbers, f_obj)
with open(filename, 'r') as f_obj:
numbers = json.load(f_obj)
print(numbers)
print('\n')
import json
filename = 'username.json'
try:
with open(filename) as f_obj:
username = json.load(f_obj)
except FileNotFoundError:
username = input("What is your name? ")
with open(filename, 'w') as f_obj:
json.dump(username, f_obj)
print("We'll remember you when you come back, " + username + "!")
else:
print("Welcome back, " + username + "!") |
998,522 | d40f7c5a7cb29938d13522a0c5693c23f8e72721 | #2단에서 9단까지
for y in range(2,10):
print("%d단" % y)
for x in range(1,10):
print("{} X {} : {}".format(y, x, y*x))
print("")
#홀수단짝수
num = int(input("1을 입력하면 홀수단이, 2를 입력하면 짝수단이 출력됩니다 :"))
if(num == 1):
for y in range(2,10):
if y%2 == 0:
continue
for x in range(1,10):
print("{} X {} : {}".format(y, x, y * x))
print("")
elif(num == 2):
for y in range(2,10):
if y%2 == 1:
continue
for x in range(1,10):
print("{} X {} : {}".format(y, x, y * x))
print("")
#입력한 값의 그거만 나오는 구구단
aa = int(input("입력 :"))
for x in range(1,10):
print("{} X {} : {}".format(aa, x , x * aa))
#구구단이 나오는데 입력한 값의 그거만 안나오는 구구단
vv = int(input("입력 :"))
for y in range(2, 10):
for x in range(1, 10):
if vv == x:
continue
print("{} X {} : {}".format(y, x, y * x))
print("") |
998,523 | d33b0926ba0b6463d6b2b6106a63020859a02484 | # Lab 6 Softmax Classifier
import tensorflow as tf
import numpy as np
from neural_network import NeuralNetwork
from nntype import NNType
from neural_network_one_hot import NeuralNetworkOneHot
class XXX (NeuralNetworkOneHot):
def init_network(self):
self.set_placeholder(16, 1)
self.target_to_one_hot(7)
logits = self.create_layer(self.X, 16, 7, 'W', 'b')
hypothesis = self.softmax(logits)
self.set_hypothesis(hypothesis)
self.set_cost_function_with_one_hot(logits, self.get_one_hot()) #not hypothesis, but logits
self.set_optimizer(NNType.GRADIENT_DESCENT, 0.1)
gildong = XXX()
xdata, ydata = gildong.load_file('data-04-zoo.csv')
gildong.learn(xdata, ydata, 2000, 100)
gildong.print_error()
gildong.evaluate('data-04-zoo.csv')
gildong.show_error()
'''
# Let's see if we can predict
pred = sess.run(prediction, feed_dict={X: x_data})
# y_data: (N,1) = flatten => (N, ) matches pred.shape
for p, y in zip(pred, y_data.flatten()):
print("[{}] Prediction: {} True Y: {}".format(p == int(y), p, int(y)))
'''
'''
Step: 0 Loss: 5.10635
Step: 100 Loss: 0.80030
Step: 200 Loss: 0.48635
Step: 300 Loss: 0.34942
Step: 400 Loss: 0.27165
Step: 500 Loss: 0.22188
Step: 600 Loss: 0.18692
Step: 700 Loss: 0.16078
Step: 800 Loss: 0.14046
Step: 900 Loss: 0.12429
Step: 1000 Loss: 0.11121
Step: 1100 Loss: 0.10050
Step: 1200 Loss: 0.09163
Step: 1300 Loss: 0.08418
Step: 1400 Loss: 0.07786
Step: 1500 Loss: 0.07243
Step: 1600 Loss: 0.06772
Step: 1700 Loss: 0.06361
Step: 1800 Loss: 0.05997
Step: 1900 Loss: 0.05675
Step: 2000 Loss: 0.05386
Acc: 100.00%
''' |
998,524 | ce1d0a13f4816b543570d0adde77b0dae1b209eb | with open('input.txt', 'r') as f:
moves = f.read()
sx = sy = 0
rx = ry = 0
grid = {(0, 0): 2}
directions = {'^': (0, 1),
'v': (0, -1),
'>': (1, 0),
'<': (-1, 0)}
def compute_location(x, y):
location = (x, y)
if location not in grid:
grid[location] = 1
else:
grid[location] += 1
for i, move in enumerate(moves):
if i % 2 == 0:
sx += directions[move][0]
sy += directions[move][1]
compute_location(sx, sy)
else:
rx += directions[move][0]
ry += directions[move][1]
compute_location(rx, ry)
print(len(grid.values()))
|
998,525 | 2aae4bfd954fdb2c9349acb5155d78290eb76683 | from .settings_base import *
DEBUG = True
ALLOWED_HOSTS = ['127.0.0.1']
STATICFILES_DIRS =[BASE_DIR/'react/dist']
CORS_ALLOWED_ORIGINS = (
'http://localhost:3000',
"http://127.0.0.1:8000",
"http://localhost:8000",
"http://localhost:8000",
"http://0.0.0.0:8000",
)
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': BASE_DIR / 'db.sqlite3',
}
} |
998,526 | beabefe45f4e9efdcc9c4cec2c99c6d80af04b9a | [
'acorn squash',
'alfalfa sprouts',
'anchovies',
'apple',
'apples',
'artichoke',
'arugula',
'asparagus',
'aubergine',
'avocado',
'bacon',
'baking powder',
'baking soda',
'baking sugar',
'bar sugar',
'basil',
'beans',
'bell peppers',
'blackberries',
'bok choy',
'brassicas',
'bread',
'breadfruit',
'broad beans',
'broccoflower',
'broccoli',
'broccoli rabe',
'broccolini',
'brown sugar',
'brussels sprouts',
'butter',
'butternut pumpkin',
'butternut squash',
'cabbage',
'cactus, edible',
'calabrese',
'cane sugar',
'cannabis',
'capsicum',
'caraway',
'carrot',
'caster sugar',
'castor sugar',
'catfish (farm-raised)',
'cauliflower',
'cayenne pepper',
'celeriac',
'celery',
'cereal grains',
'chard',
'cheese',
'chicory',
'chilli peppers',
'chinese leaves',
'chives',
'chocolate',
'cilantro',
'cinnamon',
'clarified butter',
'coconut',
'coconut milk',
'cod',
'coffee',
'collard greens',
"confectioners' sugar",
'coriander',
'corn',
'corn syrup',
'cottonseed oil',
'courgette',
'cream of tartar',
'cucumber',
'cumin',
'daikon',
'dairy products and dairy substitutes',
'dandelion',
'demerara sugar',
'dough',
'edible cactus',
'eggplant',
'eggs',
'endive',
'fats',
'fava beans',
'fiddlehead',
'fiddlehead fern',
'fish',
'five spice powder',
'flour',
'frisee',
'fructose',
'fruit',
'fruit sugar',
'ful',
'garam masala',
'garlic',
'gem squash',
'ghee',
'giblets',
'ginger',
'grains',
'granulated sugar',
'grape seed oil',
'green onion',
'heart of palm',
'hemp',
'herbs',
'honey',
'horse',
'icing sugar',
'isomalt',
'jackfruit',
'jaggery',
'jams',
'jellies',
'jerusalem artichoke',
'jicama',
'kale',
'kohlrabi',
'kumara',
'leavening agents',
'leek',
'legumes',
'lemongrass',
'lentils',
'lettuce',
'liver',
'maize',
'maple syrup',
'meat',
'milk',
'mortadella',
'mushroom',
'mussels',
'nanaimo bar mix',
'nori',
'nutmeg',
'nutritional yeast flakes',
'nuts',
'octopuses',
'oils',
'okra',
'olive',
'olive oil',
'onion',
'orange blossom water',
'oranges',
'oregano',
'oysters',
'panch puran',
'paprika',
'parsley',
'parsnip',
'pear',
'peas',
'pepper',
'peppers',
'pineapple',
'plantain',
'poppy seeds',
'potato',
'potatoes',
'poultry',
'powdered sugar',
'pumpkin',
'pumpkin seeds',
'radish',
'rape',
'raw sugar',
'refined sugar',
'rice',
'rice flour',
'rock sugar',
'rum',
'salmon',
'salt',
'salt cod',
'scallion',
'seafood',
'seeds',
'sesame seeds',
'shallot',
'skate',
'soda',
'soda, baking',
'soybean',
'spaghetti squash',
'spek',
'spices',
'spinach',
'spring onion',
'squash',
'stockfish',
'sugar',
'sunchoke',
'sunflower seeds',
'superfine sugar',
'sweet potato',
'sweetcorn',
'sweeteners',
'tahini',
'taro',
'teff',
'tomato',
'trout',
'tubers',
'tuna',
'turbanado sugar',
'turnip',
'unrefined sugar',
'vanilla',
'vegetables',
'watercress',
'watermelon',
'white mushroom',
'white sugar',
'xanthan gum',
'yam',
'yeast',
'zucchini'
] |
998,527 | be61320a11a459807ff1b2badd995ef6597beeb6 | # Generated by Django 2.0.2 on 2019-02-01 01:49
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('store', '0007_pub_name'),
]
operations = [
migrations.AlterModelOptions(
name='contact',
options={'verbose_name': 'Contact', 'verbose_name_plural': 'Contacts'},
),
]
|
998,528 | 0b0d1d2bf463f0c99621b26c47c7c5734ad03628 | import matplotlib
import matplotlib.pyplot as plt
import chess
import chess.svg
import chess.pgn
#from IPython.display import SVG
pgn = open("C:/Users/Aryan Anand/Documents/12323.pgn")
# n = 1
# print(n)
for i in range(1, 3):
act_game = chess.pgn.read_game(pgn)
print(act_game.headers["Event"] + " | " + act_game.headers["White"] +
" - " + act_game.headers["Black"] + " " + act_game.headers["Result"] +
" | " + act_game.headers["Date"])
# import chess.engine
#
# engine = chess.engine.SimpleEngine.popen_uci("C:/Users/iitda/Chess_analysis/stockfish_9_x64.exe")
import chess.uci
engine = chess.uci.popen_engine("C:/Users/Aryan Anand/Documents/stockfish_20011801_x64.exe")
engine.uci()
engine.name
board = act_game.board()
board.fen()
import numpy as np
def fentotensor(inputstr):
pieces_str = "PNBRQK"
pieces_str += pieces_str.lower()
pieces = set(pieces_str)
valid_spaces = set(range(1, 9))
pieces_dict = {pieces_str[0]: 1, pieces_str[1]: 2, pieces_str[2]: 3, pieces_str[3]: 4,
pieces_str[4]: 5, pieces_str[5]: 6,
pieces_str[6]: -1, pieces_str[7]: -2, pieces_str[8]: -3, pieces_str[9]: -4,
pieces_str[10]: -5, pieces_str[11]: -6}
boardtensor = np.zeros((8, 8, 6))
inputliste = inputstr.split()
rownr = 0
colnr = 0
for i, c in enumerate(inputliste[0]):
if c in pieces:
boardtensor[rownr, colnr, np.abs(pieces_dict[c]) - 1] = np.sign(pieces_dict[c])
colnr = colnr + 1
elif c == '/': # new row
rownr = rownr + 1
colnr = 0
elif int(c) in valid_spaces:
colnr = colnr + int(c)
else:
raise ValueError("invalid fenstr at index: {} char: {}".format(i, c))
return boardtensor
def countpieces(fen):
boardtensor = fentotensor(fen)
count = np.sum(np.abs(boardtensor))
return count
countpieces(board.fen())
def pawnending(fen):
boardtensor = fentotensor(fen)
counts = np.sum(np.abs(boardtensor), axis=(0, 1))
if counts[1] == 0 and counts[2] == 0 and counts[3] == 0 and counts[4] == 0:
return True
else:
return False
def rookending(fen):
boardtensor = fentotensor(fen)
counts = np.sum(np.abs(boardtensor), axis=(0, 1))
if counts[1] == 0 and counts[2] == 0 and counts[4] == 0 and counts[3] > 0:
return True
else:
return False
# Register a standard info handler.
info_handler = chess.uci.InfoHandler()
engine.info_handlers.append(info_handler)
counts = {"movecount": [], "scores": [], "check": [], "bestdiff": [], "pawnending": [], "rookending": []}
# Iterate through all moves and play them on a board.
board = act_game.board()
for move in act_game.mainline_moves():
board.push(move)
cnt = len([i for i in board.legal_moves])
counts["movecount"].append(cnt)
counts["check"].append(board.is_check())
counts["pawnending"].append(pawnending(board.fen()))
counts["rookending"].append(rookending(board.fen()))
# Start a search.
engine.position(board)
engine.go(movetime=100)
if board.turn == chess.WHITE:
counts["scores"].append((info_handler.info["score"][1][0]) / 100)
else:
counts["scores"].append((-info_handler.info["score"][1][0]) / 100)
nextmovescores = []
for mov in board.legal_moves:
board.push(mov)
engine.position(board)
engine.go(movetime=2)
if board.turn == chess.WHITE:
if info_handler.info["score"][1][0] != None:
nextmovescores.append(info_handler.info["score"][1][0])
elif board.turn == chess.BLACK:
if info_handler.info["score"][1][0] != None:
nextmovescores.append(-info_handler.info["score"][1][0])
board.pop()
if len(nextmovescores) > 1:
nextmovescores.sort(reverse=True)
counts["bestdiff"].append(nextmovescores[0] - nextmovescores[1])
else:
counts["bestdiff"].append(0)
SVG(chess.svg.board(board=board, size=400))
fig, ax = plt.subplots()
ax.plot(counts["scores"])
ax.grid()
plt.show()
import mplcursors
mplcursors.cursor(hover=True)
|
998,529 | ec4b04a4f129abe5ce95691cbfe0d60fd48632d2 | from __future__ import unicode_literals
def test_push(session):
# We use a temporary created RAG widget if you need to re-record
# this:
#
# Set GECKO_RECORD_MODE=once and GECKO_API_KEY to a valid api key
#
# Remove the tests/casettes/test_session.test_push.json casette
#
# Set widget below to the id of a new (legacy) RAG widget
#
# Run the test. A new casette will be created. You can then safely
# delete the above RAG widget
widget = '120885-142a61f0-74a2-0134-3615-22000b5980c2'
data = {
'item': [
{
'value': 20,
'text': 'Overdue'
},
{},
{
'value': 80,
'text': 'Good'
},
]
}
session.push(widget, data)
|
998,530 | 5473130c423bf6fd0274a2481bdd0bd0114fa3d5 | #!/usr/bin/python3
from PySide2 import QtCore, QtGui, QtWidgets
from loadconfigs import getStyle
from reimplemented import Buttons, HOVER, DEFAULT
from menu import MyMenu
class Interface(QtWidgets.QWidget):
STYLE: str = getStyle()
XX: int = 0
def __init__(self) -> None:
super(Interface, self).__init__()
self.places = QtWidgets.QGridLayout(self)
self.places.setContentsMargins(0, 0, 0, 0)
self.startMethods()
def startMethods(self) -> None:
self.startLogo()
self.buttons()
self.purchasingView()
self.pricesShow()
self.setWidgetName()
self.pushStyle()
self.menu()
self.nameMarket()
def menu(self) -> None:
self.menuBar = MyMenu(self)
def startLogo(self) -> None:
image = QtGui.QPixmap('images/cart.png')
self.frameLogo = QtWidgets.QFrame()
conteiner = QtWidgets.QGridLayout(self.frameLogo)
self.logo = QtWidgets.QLabel()
self.frameLogo.setFixedSize(300, 300)
self.logo.setPixmap(image)
self.logo.setScaledContents(True)
conteiner.addWidget(self.logo)
self.places.addWidget(self.frameLogo, 0, 0)
def buttons(self) -> None:
self.frameButtons = QtWidgets.QFrame()
self.btSearch = Buttons('Buscar', HOVER, DEFAULT)
self.btThing = Buttons('Troco', HOVER, DEFAULT)
self.btFinish = Buttons('Finalizar', HOVER, DEFAULT)
self.btRemove = Buttons('Remover', HOVER, DEFAULT)
conteiner = QtWidgets.QGridLayout(self.frameButtons)
widget = [
self.btSearch, self.btThing,
self.btFinish, self.btRemove
]
for wid in widget:
conteiner.addWidget(wid)
wid.setCursor(QtCore.Qt.PointingHandCursor)
self.places.addWidget(self.frameButtons, 1, 0)
def purchasingView(self) -> None:
self.framePurchase = QtWidgets.QFrame()
conteiner = QtWidgets.QGridLayout(self.framePurchase)
label = QtWidgets.QLabel('Código do Produto: ')
self.entryCod = QtWidgets.QLineEdit()
self.tree = QtWidgets.QTreeWidget()
not_scroll = QtCore.Qt.ScrollBarAlwaysOff
self.tree.setWordWrap(False)
self.tree.setHeaderLabels(('Produto', 'Qunt.', 'Preço', 'Total'))
self.tree.setColumnWidth(0, 400)
self.tree.setHorizontalScrollBarPolicy(not_scroll)
self.tree.setVerticalScrollBarPolicy(not_scroll)
conteiner.addWidget(label)
conteiner.addWidget(self.entryCod)
conteiner.addWidget(self.tree)
self.places.addWidget(self.framePurchase, 0, 1)
#=> preco total e corrente...
def pricesShow(self) -> None:
self.framePrices = QtWidgets.QFrame()
conteiner = QtWidgets.QGridLayout(self.framePrices)
self.lbPriceText = QtWidgets.QLabel('Preço do Item')
self.lbPriceCurrent = QtWidgets.QLabel('R$ 0,00')
self.lbPriceTextTotal = QtWidgets.QLabel('Total da Compra')
self.lbPriceTotal = QtWidgets.QLabel('R$ 0,00')
widgets = [
self.lbPriceText,
self.lbPriceCurrent,
self.lbPriceTextTotal,
self.lbPriceTotal
]
for wid in widgets:
conteiner.addWidget(wid)
wid.setObjectName(str(wid))
self.places.addWidget(self.framePrices, 0, 2)
def nameMarket(self, label_t: str = 'mercadinho') -> None:
def animation():
self.label_latters.move(self.XX, 100)
CONSTANT_WIDTH = frame.geometry().width()
t_max = self.label_latters.geometry().getRect()[2]
self.XX -= 1
if self.XX <= -t_max:
self.XX = CONSTANT_WIDTH
frame = QtWidgets.QFrame()
self.label_latters = QtWidgets.QLabel(label_t, frame)
self.XX = frame.geometry().width()
self.timer = QtCore.QTimer(self)
self.timer.start(10)
self.timer.timeout.connect(animation)
self.label_latters.setStyleSheet('font-size: 100pt;')
self.places.addWidget(frame, 1, 1, 1, 2)
def setWidgetName(self) -> QtWidgets.QWidget.objectName:
self.setObjectName('mainWindow')
self.lbPriceText.setObjectName('showLabelPrice')
self.lbPriceCurrent.setObjectName('priceCurrentItem')
self.lbPriceTextTotal.setObjectName('labelTextPriceTotal')
self.lbPriceTotal.setObjectName('labelPriceTotal')
def pushStyle(self):
self.setStyleSheet(self.STYLE)
|
998,531 | 50bd94618d2e2b359dd4ab79cf3b3539ff2c049d | #
# @lc app=leetcode id=905 lang=python3
#
# [905] Sort Array By Parity
#
# @lc code=start
class Solution:
def sortArrayByParity(self, A: List[int]) -> List[int]:
odd = []
even = []
for ele in A:
if ele & 1:
odd.append(ele)
else:
even.append(ele)
return even + odd
# @lc code=end
|
998,532 | 539e4e378a35db41b97b31b4f97650d8bb9120f6 | import time
import utils.encryption
from utils.encryption import AESCipher
class DbBeaconAPI():
def __init__(self, mongo_db_connector):
self.mongo_db_connector = mongo_db_connector
self.config_comparison_fields = self.get_field("config comparison")
self.maintenance_field = self.get_field("maintenance field")
self.timestamp_field = self.get_field("timestamp field")
self.aes_cipher = AESCipher.keyFromVariable(utils.encryption.key)
def get_beacon_config(self, config_type):
response = self.mongo_db_connector.get_collection_beacon_template().find_one({"type": config_type},
{self.mongo_db_connector.mongo_db_identifier: False})
return response
def get_registered_beacons(self):
db_cursor = self.mongo_db_connector.get_collection_beacon_data().find({},
{self.mongo_db_connector.mongo_db_identifier: False,
self.mongo_db_connector.identifier: True}).distinct(self.mongo_db_connector.identifier)
return list(db_cursor)
def beacon_exists(self, identifier):
count = self.mongo_db_connector.get_collection_beacon_data().find(
{self.mongo_db_connector.identifier: identifier},
{self.mongo_db_connector.mongo_db_identifier: True}).count()
if count > 0:
return True
else:
return False
def insert_beacon(self, beacon):
self.process_maintenance(beacon)
key = self.mongo_db_connector.identifier
result_data = self.mongo_db_connector.get_collection_beacon_data().update_one(
{key : beacon[key]},
{'$set': beacon},
upsert=True)
result_web = self.mongo_db_connector.get_collection_beacon_web().update_one(
{key : beacon[key]},
{'$set': beacon},
upsert=True)
return all([result_data.acknowledged, result_web.acknowledged])
def update_beacon(self, beacon):
self.process_maintenance(beacon)
key = self.mongo_db_connector.identifier
result = self.mongo_db_connector.get_collection_beacon_data().update_one(
{key : beacon[key]}, {'$set': beacon}, upsert=True)
return result.acknowledged
def get_beacon_configs_to_update(self):
beacons_to_update = []
beacons_config = self.mongo_db_connector.get_collection_beacon_data().find()
beacons_web_config = self.mongo_db_connector.get_collection_beacon_web().find({},
{self.mongo_db_connector.mongo_db_identifier: False})
for beacon_config in beacons_config:
identifier = beacon_config[self.mongo_db_connector.identifier]
beacon_web_config = self.get_beacon_web_config(beacons_web_config, identifier)
if beacon_web_config:
if self.is_config_updated(beacon_config, beacon_web_config) and \
not self.is_broken(beacon_config):
beacons_to_update.append(beacon_web_config)
return beacons_to_update
def get_beacon_web_config(self, beacons_web_config, identifier):
for beacon_web_config in beacons_web_config:
if beacon_web_config[self.mongo_db_connector.identifier] == identifier:
return beacon_web_config
return None
def is_config_updated(self, beacon_config, beacon_web_config):
for config_field in self.config_comparison_fields:
keys = config_field.split(",")
if self.config_compare(beacon_config, beacon_web_config, keys):
return True
return False
def config_compare(self, beacon_config, beacon_web_config, keys):
key = keys[0]
if isinstance(beacon_config[key], dict):
del keys[0]
return self.config_compare(beacon_config[key], beacon_web_config[key], keys)
else:
if key == "new password":
dec_config = self.get_cipher().decrypt(beacon_config[key])
dec_config_web = self.get_cipher().decrypt(beacon_web_config[key])
return dec_config != dec_config_web
else:
return beacon_config[key] != beacon_web_config[key]
def process_maintenance(self, beacon):
if self.maintenance_field in beacon:
self.push_to_maintenance(beacon)
del beacon[self.maintenance_field]
def push_to_maintenance(self, beacon):
timestamp = self.create_timestamp()
maintenance = beacon[self.maintenance_field]
maintenance[self.timestamp_field] = timestamp
field = self.maintenance_field
key = self.mongo_db_connector.identifier
self.mongo_db_connector.get_collection_beacon_web().update_one(
{key: beacon[key]},
{'$push': {field: maintenance}},
upsert=True)
def is_broken(self, beacon):
# If at least one status is true, then not broken, otherwise broken
return not any(beacon["status"].values())
def create_timestamp(self):
return int(time.time())
def get_field(self, field, entry_type="web api"):
return self.mongo_db_connector.get_field(field, entry_type)
def get_cipher(self):
return self.aes_cipher
|
998,533 | a4472737ed5d65eb67b89c2d4e49534ae643976d | #! /usr/bin/python
'''
This script takes the tables of miRNAs identified with miRBase
and puts them into one long list and also makes a fasta file
out of them that can be used to align/compare with. This
script collapses samples (output has no duplicates) and
numbers of reads are converted into percentages (from the
corresponding sample(s)). Percentages are calculated to mean
percentages if a sequence is present in multiple samples.
Input = miRBase_found_#.csv (where # is all the samples)
Output = miRBase_found-all.csv + miRBase_found-all.fasta
'''
from sys import argv
import os
### argvs to be used are:
# 1: folder with the files (miRBase_found_#.csv)
# 2: output .csv
# 3: output .fasta
def read_lists(read_folder):
'''
This function reads the separate files with miRBase hits.
It returns a dictionary of data to be written by the next
function.
Input = miRBase_found_#.csv (files)
Output = dictionary of data to be used
'''
inputfiles = [read_folder + csv for csv in os.listdir(read_folder) if csv.startswith("miRBase_found_")]
## automatically detect files to read from the supplied
## folder
writedata = {}
for inputfile in inputfiles:
## loop over each file from the list
writelist = []
## keep a list of the data per file
total_sequences = 0
## keep track of the number of sequences
with open(inputfile, 'r') as tables:
## open each file
for line in tables:
line = line.strip('\n').split('\t')
name = line[0]
number = int(line[1])
sequence = line[2]
writelist.append([name, number, sequence])
total_sequences += int(number)
## read each file and note name, number and
## sequence and append these to the list
print(total_sequences, " in ", inputfile)
for lists in writelist:
lists[1] = float(lists[1] / total_sequences * 100)
if lists[2] not in writedata:
n = 1
# keep an 'n' to make a weighted average
writedata[lists[2]] = [lists[0], lists[1], n]
else:
n = writedata[lists[2]][2] + 1
average = (writedata[lists[2]][1] * n + lists[1]) / (n + 1)
writedata[lists[2]] = [writedata[lists[2]][0], average, n]
return writedata
def write_output(data, outputcsv, outputfasta):
'''
This function writes all the required data to a new,
collapsed .csv file and to a fasta file. It takes the
dictionary created by the function above.
Input = dictionary of data to be used
Output = miRBase_found_all.csv (file) +
miRBase_found_all.fasta (file)
'''
sortlist = []
with open(outputcsv, 'w') as csv:
csv.write("Reference\tOccurrence (%)\tSequence\n")
for keys in data.keys():
sortlist.append([data[keys][0], data[keys][1], keys])
sortlist.sort(key=lambda x: x[1], reverse = True)
## I want to have the miRNAs sorted by their occurence
## and descending (from high to low)
for lists in sortlist:
csv.write(str(lists[0]) + '\t' + '{0:.5f}'.format(lists[1]) + '\t' + lists[2].replace('T', 'U') + '\n')
with open(outputfasta, 'w') as fasta:
for lists in sortlist:
fasta.write('>' + lists[0] + '_x' + "{0:.5f}%".format(lists[1]) + '\n' + lists[2] + '\n')
print("The list and fasta file are ready!")
def main(argv):
'''
This function simply serves to combine all previous
functions into one working script.
'''
write_output(read_lists(argv[0]), argv[0]+argv[1], argv[0]+argv[2])
if __name__ == "__main__":
main(argv[1:])
|
998,534 | 1e86f2bd3f9df376c49b0f7de469b86283080c7e | #
# Copyright (c) 2004-2006
# Andreas Kloeckner
#
# Permission to use, copy, modify, distribute and sell this software
# and its documentation for any purpose is hereby granted without fee,
# provided that the above copyright notice appear in all copies and
# that both that copyright notice and this permission notice appear
# in supporting documentation. The authors make no representations
# about the suitability of this software for any purpose.
# It is provided "as is" without express or implied warranty.
#
"""
PyLinear's module for random matrices.
"""
import random
import pylinear.array as num
import pylinear.computation as comp
def write_random_vector(vec):
size, = vec.shape
for i in range(size):
value = random.normalvariate(0,10)
if vec.dtype == num.Complex64:
value += 1j*random.normalvariate(0,10)
vec[i] = value
def make_random_vector(size, dtype):
vec = num.zeros((size,), dtype)
write_random_vector(vec)
return vec
def make_random_onb(size, dtype):
vectors = [ makeRandomVector(size, dtype) for i in range(size) ]
vectors = comp.orthonormalize(vectors)
for i in range(size):
for j in range(size):
assert abs(delta(i,j) - sp(vectors[i], vectors[j])) < 1e-12
return vectors
def make_random_orthogonal_matrix(size, dtype):
vectors = []
for i in range(size):
v = num.zeros((size,), dtype)
write_random_vector(v)
vectors.append(v)
orth_vectors = comp.orthonormalize(vectors)
mat = num.zeros((size,size), dtype)
for i in range(size):
mat[:,i] = orth_vectors[i]
return mat
def make_random_skewhermitian_matrix(size, dtype):
a = num.zeros((size, size), dtype)
# fill diagonal
if dtype is num.Complex:
for i in range(size):
a[i,i] = 1j*random.normalvariate(0,10)
def _conjugate(x):
try:
return x.conjugate()
except AttributeError:
return x
# fill rest
for i in range(size):
for j in range(i):
value = random.normalvariate(0,10)
if dtype is num.Complex:
value += 1j*random.normalvariate(0,10)
a[i,j] = value
a[j,i] = -_conjugate(value)
return a
def make_random_spd_matrix(size, dtype):
eigenvalues = make_random_vector(size, dtype)
eigenmat = num.zeros((size,size), dtype)
for i in range(size):
eigenmat[i,i] = abs(eigenvalues[i])
orthomat = make_random_orthogonal_matrix(size, dtype)
return orthomat.H * eigenmat *orthomat
def make_random_full_matrix(size, dtype):
result = num.zeros((size, size), dtype)
for row in range(size):
for col in range(size):
value = random.normalvariate(0,10)
if dtype == num.Complex64:
value += 1j*random.normalvariate(0,10)
result[row,col] = value
return result
def make_random_matrix(size, dtype, flavor = num.DenseMatrix):
result = num.zeros((size, size), dtype, flavor)
elements = size ** 2 / 10
for i in range(elements):
row = random.randrange(0, size)
col = random.randrange(0, size)
value = random.normalvariate(0,10)
if dtype == num.Complex64:
value += 1j*random.normalvariate(0,10)
result[row,col] += value
return result
|
998,535 | 73b2fc4d39095506acc1af5248359ceeee29ad23 |
if __name__ == '__main__':
print('Review your code')
else:
print('Your are not in the correct folder location to run this test')
|
998,536 | cf52d7f295629ea28009ae9f5ee98f8375979718 | import math
import sys
import Image
import ImageOps
import ImageChops
import matplotlib
import numpy as np
import matplotlib.pyplot as plt
import numpy.fft as fft
from numpy import cos
from pylab import imshow
from matplotlib.gridspec import GridSpec
from global_contrast import *
from histogram_equalize import *
def load_gray(path):
return ImageOps.grayscale(Image.open(path))
def show_gray(img):
return imshow(img, cmap=plt.cm.gray)
def get_hist_fft(im):
return fft.fft(np.array(im.histogram()))
def compare_display(im, im_cont):
"""
Display a number of images in one figure window:
* Original im (@im)
* Contrast enhanced image (@im_cont)
* @im_cont - @im (difference)
* Histograms of @im, @im_cont
* dft of *histogram* of @im, @im_cont
"""
gs = GridSpec(3, 3)
plt.subplot(gs[0,0])
show_gray(im)
plt.xlabel('original')
plt.subplot(gs[0,1])
show_gray(im_cont)
plt.xlabel('contrast')
plt.subplot(gs[:,2])
show_gray(ImageChops.subtract(im_cont, im))
plt.xlabel('subtracted')
plt.subplot(gs[1,:-1])
plt.plot(im.histogram(), label='original')
plt.plot(im_cont.histogram(), label='contrast')
plt.legend()
plt.subplot(gs[2,:-1])
plt.plot(np.abs(fft.fftshift(fft.fft(im.histogram()))), label='original')
plt.plot(np.abs(fft.fftshift(fft.fft(im_cont.histogram()))), label='contrast')
plt.legend()
def main():
path = sys.argv[1] if len(sys.argv) > 1 else 'img/lena.tiff'
im = load_gray(path)
im_cont = ImageOps.autocontrast(im)
gcd = GlobalContrastDetector()
print "Energy (original): %s" % gcd.fft_energy(im)
print "Energy (contrast): %s" % gcd.fft_energy(im_cont)
compare_display(im, im_cont)
im_eq = ImageOps.equalize(im)
hed = HistogramEqualizationDetector()
print "Distance from uniform (original): %s" % hed.distance_from_uniform(im)
print "Distance from uniform (equalized): %s" % hed.distance_from_uniform(im_eq)
show_gray(im)
plt.figure()
show_gray(im_eq)
plt.figure()
plt.plot(im.histogram(), label="original")
plt.plot(im_eq.histogram(), label="equalized")
plt.plot(hed.handle_exposure(im_eq.histogram()), label="exposure handling equalized")
plt.legend()
plt.show()
if __name__ == '__main__':
main()
|
998,537 | 049969593ac3dc3cc10db5ac27a50a937645183c | from pulp import *
import time
budget = int(input("Введите Ваш бюджет на рекламу:\n")) # 10000 бюджет на рекламу
minuteTV = int(input("Сколько стоит минута рекламы на ТВ?\n")) # 90 д.е. минута рекламы на ТВ
minuteRad = int(input("Сколько стоит минута рекламы на радио?\n")) # 5 д.е. минута рекламы на радио
effect = int(input("Во сколько раз реклама на ТВ эффективнее радиорекламы?\n")) # в 30 раз эффективнее, чем второй вид рекламы
relation = int(input("Во сколько раз будет рекламы на ТВ больше, чем на радио?\n")) # в 3 раз больше одного вида, чем второго
start = time.time()
budgetTVAd = pulp.LpVariable("budgetTVAd", lowBound=0)
budgetRadioAd = pulp.LpVariable("budgetRadioAd", lowBound=0)
problem = pulp.LpProblem('0',pulp.LpMaximize)
problem += effect * budgetTVAd + budgetRadioAd, "Функция цели"
problem += minuteTV * budgetTVAd + minuteRad * budgetRadioAd <= budget, "1"
problem += budgetRadioAd == 3 * budgetTVAd, "2"
problem.solve()
print ("Результат:")
for variable in problem.variables():
print (variable.name, "=", variable.varValue)
print ("Прибыль:")
print (value(problem.objective))
stop = time.time()
print ("Время :")
print(stop - start) |
998,538 | be63e1f44a67706210ab667b70dfe5eeb7536880 | from pyspark.sql import SparkSession
from pyspark.sql.functions import col
from pyspark.sql.types import BooleanType
# create spark session
spark = SparkSession.builder\
.appName("Github push counter")\
.master("local[*]")\
.getOrCreate()
# spark context
sc = spark.sparkContext
# load json file
filePath = "/Users/sahil/data/githubarchive/2015-01-01-0.json"
ghLogs = spark.read.json(filePath)
print(f"type(ghLogs): {type(ghLogs)}")
print(f"all events: {ghLogs.count()}")
# filter ghLogs
pushes = ghLogs.filter("type = 'PushEvent'")
print(f"type(pushes): {type(pushes)}")
print(f"push events: {pushes.count()}")
# group based on login actor's count
grouped = pushes.groupBy("actor.login").count()
print(f"type(grouped): {type(grouped)}")
ordered = grouped.orderBy("count", ascending=False)
print(f"type(ordered): {type(ordered)}")
ordered.show(10)
# filter only employees
# load employees in a set
empPath = "/Users/sahil/Projects/github.com/spark-in-action/ch03/ghEmployees.txt"
employees = {emp.strip() for emp in open(empPath).readlines()}
print(f'employee count: {len(employees)}')
bcEmployees = sc.broadcast(employees) # broadcase the `employees` var
print(f"type(bcEmployees): {type(bcEmployees)}")
isEmp = lambda user: user in bcEmployees.value
isEmpUdf = spark.udf.register("SetContainsUdf", isEmp, BooleanType())
print(f'type(isEmpUdf): {type(isEmpUdf)}')
filteredEmployees = ordered.filter(isEmpUdf(col("login")))
filteredEmployees.show()
|
998,539 | 5d9fdd13cca9a2e5119727bdeb8a4b60236f5c81 | print('Content-Type:text/html \n\n')
print("hell word") |
998,540 | cbc324755363028d8737c00b2f4a4e7374812dea | #!/usr/bin/env python3
#
# (c) Yoichi Tanibayashi
#
"""
ytbg.py
"""
__author__ = 'Yoichi Tanibayashi'
__date__ = '2020/05'
from ytBackgammonServer import ytBackgammonServer
from flask import Flask, request
from flask_socketio import SocketIO
import json
from MyLogger import get_logger
import click
CONTEXT_SETTINGS = dict(help_option_names=['-h', '--help'])
MY_NAME = 'ytBackgammon Server'
VERSION = '0.80'
_log = get_logger(__name__, True)
app = Flask(__name__)
app.config['SECRET_KEY'] = 'secret!'
app.config['DEBUG'] = False
app.config['JSON_AS_ASCII'] = False # XXX 文字化け対策が効かない TBD
socketio = SocketIO(app, cors_allowed_origins='*')
svr_id = "0"
svr = None
@app.route('/')
def top():
_log.debug('')
return svr.app_index()
@app.route('/p1')
def index_p1():
_log.debug('')
return svr.app_index()
@app.route('/p2')
def index_p2():
_log.debug('')
return svr.app_index()
@socketio.on('connect')
def handle_connect():
svr.on_connect(request)
@socketio.on('disconnect')
def handle_disconnect():
svr.on_disconnect(request)
@socketio.on_error_default
def default_error_handler(e):
svr.on_error(request, e)
@socketio.on('json')
def handle_json(msg):
_log.debug('msg=%s', json.dumps(msg, ensure_ascii=False))
svr.on_json(request, msg)
@click.command(context_settings=CONTEXT_SETTINGS)
@click.argument('server_id', type=str)
@click.option('--port', '-p', 'port', type=int, default=5001,
help='port number')
@click.option('--image_dir', '-i', 'image_dir', type=str,
default="images1",
help="Images directory under '/static/'")
@click.option('--debug', '-d', 'debug', is_flag=True, default=False,
help='debug flag')
def main(server_id, port, image_dir, debug):
global svr_id, svr
_log = get_logger(__name__, debug)
_log.info('server_id=%s, port=%s, image_dir=%s',
server_id, port, image_dir)
svr_id = server_id
svr = ytBackgammonServer(MY_NAME, VERSION, svr_id, image_dir, debug=True)
try:
socketio.run(app, host='0.0.0.0', port=int(port), debug=debug)
finally:
_log.info('end')
if __name__ == "__main__":
main()
|
998,541 | b8275d37ff12e34d7a64b3f7cf4fc241ccd19070 | import numpy as np
import sys
N,M =(map(lambda x : int(x),sys.argv[1:]))
while not(N>=3 and 0<=M<=7):
if N<3 :
print '\'n\' must be Greater than or equal to 3'
if 0>M or M>7:
print '\'m\' must be between 0-7'
print '\ninput n and m again'
N=input("n = ")
M=input("m = ")
print "\n------------------------------------------------------"
a = np.zeros((N,M),int)
print 'Matrix A :\n',a
b=np.random.randint(10,size=(N,M))
print '\nMatrix B :\n',b
for i in range(N):
for j in range(M):
a[i][j]=a[i][j]+b[i][j]
print "\n------------------------------------------------------"
print "Matrix A after addition of Matrix B in A\n",a
print "------------------------------------------------------" |
998,542 | 2669d64ab7bf0dc2b7f5a0cf14128fa98bc58ac8 | from flask import Flask, render_template, flash, redirect, Response, request, session, abort, url_for
from ast import literal_eval
from base64 import urlsafe_b64encode as encode
from base64 import urlsafe_b64decode as decode
import ldap
import sys, json, ast, crypt
import os
import re
import itertools
from flask import jsonify
import random
import getpass
import subprocess
import hashlib
import base64
import ldap.modlist
#import modlist
#from passlib.hash import pbkdf2_sha256
#-con = ldap.initialize('ldap://localhost:389')
ldap_ip="localhost"
con = ldap.initialize(str("ldap://"+ldap_ip))
connect = ldap.initialize(str("ldap://"+ldap_ip))
ldap_base2 = "dc=example,dc=com"
grp_names_list=list()
priviledge=False
ldap.set_option(ldap.OPT_DEBUG_LEVEL, 4095)
l = ldap.initialize("ldap://"+ldap_ip , trace_level=2)
# At this point, we're connected as an anonymous user
# If we want to be associated to an account
# you can log by binding your account details to your connection
con.simple_bind_s("cn=Manager,dc=example,dc=com", "cndy525//")
connect.simple_bind_s("cn=Manager,dc=example,dc=com", "cndy525//")
########## User Input Variable ####################################
app = Flask(__name__)
app.config['SECRET_KEY'] = 'F34TF$($e34D'
@app.route('/')
def home():
if not session.get('logged_in'):
return render_template('login.html')
else:
return "Welcome to Ldap Admin Console <a href='/logout'>Logout</a>"
@app.route('/addu')
def addu():
if session['logged_in'] == True and priviledge==True:
return render_template('form_submit.html', priviledge=str(priviledge))
else:
return home()
@app.route('/adduser', methods=['POST'])
def adduser():
if request.method == 'POST':
user = request.form['username']
empid = request.form['empid']
mob = request.form['mobileno']
email = request.form['email']
pwd = request.form['password']
group = request.form['group']
#def adduser(user,empid,mobileno,email,password,group):
user = re.split('@', email)[0]
E = empid.istitle()
if E == False:
empid = empid.upper()
D = len(str(mob))
if D < 10:
print "You Have Entered Wrong Mobile No"
sys.exit(0)
mail = re.match('^[_a-z0-9-]+(\.[_a-z0-9-]+)*@[a-z0-9-]+(\.[a-z0-9-]+)*(\.[a-z]{2,4})$', email)
if mail == None:
print('Bad email Syntax')
raise ValueError('Bad Syntax')
userid = random.sample(range(2000, 15000), 1)
uid = str(userid[0])
salt = os.urandom(4)
h = hashlib.sha1("" + pwd +"")
h.update(salt)
#password = "{SSHA}" + encode(h.digest() + salt)
#password = "password"
password = base64.b64encode("" + pwd +"")
if group == 'group1':
gid = "002"
elif group == 'group2':
gid = "001"
########## performing a simple ldap query ####################################
query = "(cn=" + user +")"
result = con.search_s(ldap_base2, ldap.SCOPE_SUBTREE, query)
#if result != 0:
# return "result"
dn = "cn=" + user +",ou=People,"+ldap_base2
modlist = {
"objectClass": ["inetOrgPerson", "posixAccount", "shadowAccount"],
"uid": ["" + user +""],
"sn": ["" + user +""],
"givenName": ["" + user +""],
"uidNumber": ["" + uid +""],
"gidNumber": ["" + gid +""],
"cn": ["" + user +""],
"displayName": ["" + user +""],
"mail": ["" + email +""],
"userPassword": ["" + pwd +""],
"mobile": ["" + mob +""],
"uid": ["" + empid +""],
"loginShell": ["/bin/bash"],
"homeDirectory": ["/home/" + user +""]}
print modlist
cod = ast.literal_eval(json.dumps(modlist))
#result = con.add_s(dn, ldap.modlist.addModlist(str(modlist)))
var = checkUser("" + user +"")
print var
if var == None:
result = con.add_s(dn, ldap.modlist.addModlist(cod))
#print result
kn = "cn=" + group +",ou=Group,"+ldap_base2
suser = "" + user +""
pod = ast.literal_eval(json.dumps(suser))
mod_attrs = [(ldap.MOD_ADD, "memberUid", pod )]
tod = con.modify_s(kn, mod_attrs)
#if result == "" && tod == "":
return render_template('form_action.html', name="success", priviledge=str(priviledge))
else:
return render_template('form_action.html', name="Already Exists", priviledge=str(priviledge))
def checkUser(user):
query = "(cn=" + user +")"
result = con.search_s(ldap_base2, ldap.SCOPE_SUBTREE, query)
for r in result:
return r
@app.route('/cpw')
def cpw():
if session['logged_in'] == True and priviledge==True:
return render_template('changePassword_submit.html')
else:
return home()
@app.route('/changePassword', methods=['POST'])
def changePassword():
if request.method == 'POST':
username = request.form['username']
oldpassword = request.form['password']
print oldpassword
newpassword = request.form['npassword']
dn =str("cn=" + username +",ou=People,"+ldap_base2)
old_value = {"userPassword": ["" + oldpassword +""]}
new_value = {"userPassword": ["" + newpassword +""]}
#modlist = ldap.modlist.modifyModlist(old_value, new_value)
modlist = ldap.modlist.modifyModlist(old_value, new_value)
con.modify_s(dn, modlist)
@app.route('/login', methods=['POST'])
#def lconnect():
def login():
if request.method == 'POST':
username = request.form['username']
password = request.form['password']
empid = request.form['empid']
ldap_server = "localhost"
ldap_port = "389"
# the following is the user_dn format provided by the ldap server
user_dn = "cn=" + username + ",ou=People,"+ldap_base2
print user_dn
# adjust this to your base dn for searching
# con = ldap.open(ldap_server)
global grp_names_list
global priviledge
priviledge=False
search_filter = "uid=" + empid
try:
# if authentication successful, get the full user data
if username == "user1":
connect.bind_s(user_dn, password)
result = connect.search_s(ldap_base2, ldap.SCOPE_SUBTREE, search_filter)
priviledge=True
elif username == "user2":
connect.bind_s(user_dn, password)
result = connect.search_s(ldap_base2, ldap.SCOPE_SUBTREE, search_filter)
priviledge=True
elif username == "user3":
connect.bind_s(user_dn, password)
result = connect.search_s(ldap_base2, ldap.SCOPE_SUBTREE, search_filter)
priviledge=True
elif username == "user4":
connect.bind_s(user_dn, password)
result = connect.search_s(ldap_base2, ldap.SCOPE_SUBTREE, search_filter)
priviledge=True
elif username == "user5":
connect.bind_s(user_dn, password)
result = connect.search_s(ldap_base2, ldap.SCOPE_SUBTREE, search_filter)
else:
return home()
# return all user data results
del grp_names_list[ : ]
grp_names_list=fetch_grp_list()
session['logged_in'] = True
return render_template('ldap_action.html', name="",priviledge=str(priviledge))
connect.unbind_s()
except ldap.LDAPError:
connect.unbind_s()
print "authentication error"
#return render_template('ldap_action.html', name="Authentication Error")
return home()
@app.route("/logout")
def logout():
session['logged_in'] = False
return home()
####################################################################################### added code below
#Function to check employee id pattern
def checkEmpIDpattern(empid):
if(len(empid)>2 and len(empid)<7):
if(empid[0]=='C' or empid[0]=='T'):
if(empid[1:].isnumeric()):
return True
return False
def filter_priviledged(glist):
print "inside filter function"
glist2=glist[:]
for group_name in glist:
if not("fqa" in group_name or "svn" in group_name):
glist2.remove(group_name)
return glist2
#Function to get the existing group on LDAP
def fetch_grp_list():
glist=list()
ldap_base3=str("ou=Group,"+ldap_base2)
result = con.search_s(ldap_base3, ldap.SCOPE_SUBTREE)
for tup in result:
result = con.search_s(ldap_base3, ldap.SCOPE_SUBTREE)
if len(ldap.dn.explode_dn(tup[0]))==4:
glist.append((ldap.dn.explode_dn(tup[0],[ldap.DN_PRETTY]))[0])
glist.remove("jira_users")
glist.remove("sasuser")
glistfil=glist
if not priviledge:
glistfil=filter_priviledged(glist[:])
print "filter grp names list"
print glistfil
return glistfil
#include this function in main function
#Function to get distinguished name of employee, which is entered in svn viewvc group
def dn_viewvc(empid):
ldap_base3=str("ou=People,"+ldap_base2)
query="(uid={0})".format(empid)
attrib=["dn"]
result=con.search_s(ldap_base3, ldap.SCOPE_ONELEVEL, query, attrib)
return result[0][0]
#check if employee has an ldap account
def check_employee_in_ldap(empid):
ldap_base3=str("ou=People,"+ldap_base2)
query="(uid={0})".format(empid)
result=con.search_s(ldap_base3, ldap.SCOPE_ONELEVEL, query)
return result
#Add an employee to an existing group
def entryAdd(group_name,empid):
ldap_base3=str("ou=Group,"+ldap_base2)
query = "(cn="+str(group_name)+")"
result = con.search_s(ldap_base3, ldap.SCOPE_ONELEVEL,query)
if not checkinGroup(empid,group_name):
kn = "cn=" + group_name +",ou=Group,"+ldap_base2
user = "" + empid +""
pod = ast.literal_eval(json.dumps(user))
mod_attrs = [(ldap.MOD_ADD, "memberUid", pod )]
tod = con.modify_s(kn, mod_attrs)
#Delete an employee to an existing group
def entryDel(group_name,empid):
ldap_base3=str("ou=Group,"+ldap_base2)
query = "(cn="+str(group_name)+")"
result = con.search_s(ldap_base3, ldap.SCOPE_ONELEVEL,query)
if checkinGroup(empid,group_name):
kn = "cn=" + group_name +",ou=Group,"+ldap_base2
user = "" + empid +""
pod = ast.literal_eval(json.dumps(user))
mod_attrs = [(ldap.MOD_DELETE, "memberUid", pod )]
tod = con.modify_s(kn, mod_attrs)
#Replacing employee2 by employee1
def entryMod(group_name,empid1,empid2):
ldap_base3=str("ou=Group,"+ldap_base2)
query = "(cn="+str(group_name)+")"
result = con.search_s(ldap_base3, ldap.SCOPE_ONELEVEL,query)
if not checkinGroup(empid1,group_name):
kn = "cn=" + group_name +",ou=Group,"+ldap_base2
user = "" + empid1 +""
pod = ast.literal_eval(json.dumps(user))
mod_attrs = [(ldap.MOD_ADD, "memberUid", pod )]
tod = con.modify_s(kn, mod_attrs)
print tod
if checkinGroup(empid2,group_name):
kn = "cn=" + group_name +",ou=Group,"+ldap_base2
user = "" + empid2 +""
pod = ast.literal_eval(json.dumps(user))
mod_attrs = [(ldap.MOD_DELETE, "memberUid", pod )]
tod = con.modify_s(kn, mod_attrs)
def form_HTML_page(motive):
count=0
rows=""
for group in grp_names_list:
if count%2==0:
rows=str(rows+"<tr>")
rows=str(rows+'''
<td style="padding-left:50px">
<label><input type="checkbox"; style="width: 15px; height: 15px;" name="box_list" value={name} >
<div class="view div22" style=" display: inline-block; border-radius: 4px;
font-family: "arial-black";font-size: 14px; color:red; padding: 8px 12px; cursor: pointer; ">{name}</div>
</label></td>
'''.format(name=group,name_justified=group.ljust(10)))
if count%2==1:
rows=str(rows+"</tr>")
count+=1
#check_box_list.append(str("ck"+str(count)))
if count%2==1:
rows=str(rows+"</tr>")
return render_template('addgroup.html', table_rows=rows, motive=motive, priviledge=str(priviledge))
@app.route('/addg')
def add_g():
if session['logged_in'] == True:
return form_HTML_page(motive="add")
else:
return home()
@app.route('/delg')
def del_g():
if session['logged_in'] == True:
return form_HTML_page(motive="del")
else:
return home()
@app.route('/modg')
def mod_g():
if session['logged_in'] == True:
return form_HTML_page(motive="mod")
else:
return home()
def checkinGroup(empid,group_name):
ldap_base3=str("ou=Group,"+ldap_base2)
query = "(cn="+str(group_name)+")"
attrib=["memberUid"]
result = con.search_s(ldap_base3, ldap.SCOPE_ONELEVEL,query,attrib)
try:
if empid in result[0][1]['memberUid']:
return True
except KeyError as group_empty:
return False
return False
@app.route('/delgrp',methods = ['POST'])
def del_grp():
if request.method == 'POST':
print "Enter del_grp................."
empid=request.form["empid"]
if checkEmpIDpattern(empid)==False:
return render_template('form_action.html', name="Enter proper employee id", priviledge=str(priviledge))
result_page=""
check_box_list=request.form.getlist("box_list")
no_of_groups=len(check_box_list)
for i in range(0,no_of_groups):
group_name=check_box_list[i]
if group_name:
result_page=str(result_page+"<br> Deletion of "+str(empid)+ " from "+str(group_name)+" : ")
if group_name=="svn":
if not checkinGroup(dn_viewvc(empid),group_name):
result_page=str(result_page + " Already absent!")
else:
print "Caught group name svn"
entryDel(group_name, dn_viewvc(empid))
result_page=str(result_page + "Done")
else:
if not checkinGroup(empid, group_name):
result_page=str(result_page + " Already absent!")
else:
entryDel(group_name, empid)
result_page=str(result_page + "Done")
if request.form.getlist("presence"):
result_page=str(result_page+'''<div align="center"<br><br>Employee present in following groups<br>''')
for group_name in grp_names_list:
if checkinGroup(empid,group_name):
result_page=str(result_page + " | " + group_name)
result_page=str(result_page+'''</div>''')
return render_template('form_action.html', name=result_page, priviledge=str(priviledge))
@app.route('/addgrp',methods = ['POST'])
def add_grp():
if request.method == 'POST':
empid=request.form["empid"]
if checkEmpIDpattern(empid)==False:
return render_template('form_action.html', name="Enter proper employee id", priviledge=str(priviledge))
if not check_employee_in_ldap(empid):
return render_template('form_action.html', name="No user found", priviledge=str(priviledge))
result_page=""
check_box_list=request.form.getlist("box_list")
no_of_groups=len(check_box_list)
#for i in range(0,no_of_groups):
# check_box_list.append(str("ck"+str(i)))
for i in range(0,no_of_groups):
#checkbox_name=check_box_list[i]
group_name=check_box_list[i]
if not checkinGroup(empid,group_name):
if group_name:
try:
result_page=str(result_page+"<br> Addition of "+str(empid)+ " to "+str(group_name)+" : ")
if group_name=="svn":
entryAdd(group_name, dn_viewvc(empid))
result_page=str(result_page + "Done")
else:
entryAdd(group_name, empid)
result_page=str(result_page + "Done")
except ldap.TYPE_OR_VALUE_EXISTS as user_exists:
result_page=str(result_page + "<b>Already present !</b>")
else:
result_page=str(result_page + "<b>Already present!</b>")
return render_template('form_action.html', name=result_page, priviledge=str(priviledge))
if request.form.getlist("presence"):
result_page=str(result_page+'''<div align="center"<br><br>Employee present in following groups<br>''')
for group_name in grp_names_list:
if checkinGroup(empid,group_name):
result_page=str(result_page + " | " + group_name)
result_page=str(result_page+'''</div>''')
return render_template('form_action.html', name=result_page, priviledge=str(priviledge))
@app.route('/modgrp',methods = ['POST'])
def mod_grp():
if request.method == 'POST':
empid1=request.form["empid1"]
empid2=request.form["empid2"]
if checkEmpIDpattern(empid1)==False:
return render_template('form_action.html', name="Enter proper employee id", priviledge=str(priviledge))
if not check_employee_in_ldap(empid1):
return render_template('form_action.html', name=str("No user found in ldap:"+str(empid1)), priviledge=str(priviledge))
result_page=""
if empid1==empid2:
result_page=str(result_page+"<br>Enter different employee id's")
return render_template('form_action.html', name=result_page, priviledge=str(priviledge))
check_box_list=request.form.getlist("box_list")
no_of_groups=len(check_box_list)
#for i in range(0,no_of_groups):
# check_box_list.append(str("ck"+str(i)))
for i in range(0,no_of_groups):
#checkbox_name=check_box_list[i]
group_name=check_box_list[i]
if group_name:
result_page=str(result_page+"<br> Replacing "+str(empid2)+ " by "+str(empid1)+" in "+str(group_name)+" : ")
if group_name=="svn":
if checkinGroup(dn_viewvc(empid1),group_name):
result_page=str(result_page + str(empid1)+" is preset in group!")
elif not checkinGroup(dn_viewvc(empid2),group_name):
result_page=str(result_page +str(empid2)+ " Not found in group!")
else:
entryMod(group_name, dn_viewvc(empid1),dn_viewvc(empid2))
result_page=str(result_page + "Done")
else:
if checkinGroup(empid1, group_name):
result_page=str(result_page+ str(empid1)+ " is present in group!")
elif not checkinGroup(empid2, group_name):
result_page=str(result_page +str(empid2)+ " Not found in group!")
else:
entryMod(group_name, empid1,empid2)
result_page=str(result_page + "Done")
if request.form.getlist("presence"):
result_page=str(result_page+'''<div align="center"<br><br>Employee present in following groups<br>''')
for group_name in grp_names_list:
if checkinGroup(empid1,group_name):
result_page=str(result_page + " | " + group_name)
result_page=str(result_page+'''</div>''')
return render_template('form_action.html', name=result_page, priviledge=str(priviledge))
#if __name__ == '__main__':
# app.run(debug = True)
##if __name__ == '__main__':
# #app.secret_key = os.urandom(12)
## app.run(host="0.0.0.0", port=8081)
# #app.debug = True
# #app.run()
|
998,543 | 22b6f4aca38722a00e641dbce7a3630fd3b51a53 | A=input("nantwat na nan")
print(A) |
998,544 | c41990ae36297184e41d5c35c085c0d135f240f9 | __author__ = 'gustavo'
x = 2
if x > 2:
print 'Maior que 2'
else:
print 'Nao eh maior que 2'
if x == 2:
print 'Igual a 2'
else:
print 'Diferente de 2'
|
998,545 | de37d707487e37047b51683e82bb4b2c3bcc6775 | # Challenge 1: Name the variable types of the following variables. Print them out into console in the format "Variable: Variable Type" (might have to google "how to print variables in python")
integer = 3
string = "Mr. Mortensen"
character = 'f'
float = 0.4
print(integer, ": Variable Type", type(integer))
print(string, ": Variable Type", type(string))
print(character, ": Variable Type", type(character))
print(float, ": Variable Type", type(float))
# Challenge 2: Pass list1 into list2. However, list2 must contain the elements of list1 in order. Print list2. +0.3 if you can create a function to order a list and can display it on your website
list1 = [5, 3, 4, 1, 2]
list2 = []
# requirements
# list2.append(list1[3])
# list2.append(list1[4])
# list2.append(list1[1])
# list2.append(list1[2])
# list2.append(list1[0])
# extra credit
def orderList (firstList, finalList):
for i in firstList:
finalList.append(min(firstList))
firstList.remove(min(firstList))
print(finalList)
orderList(list1, list2)
# Challenge 3: Find a way to add 3 to each element in the array. Then, take the average of the array and put it into the variable avg. +0.2 if you can turn this into a function and display it on your website.
averageList = [23, 41, 90, 55, 71, 83]
averageList[0] += 3
averageList[1] += 3
averageList[2] += 3
averageList[3] += 3
averageList[4] += 3
#print(averageList)
#avg = 0
#for i in averageList:
#avg += i
#avg = avg/len(averageList)
#print(avg)
# extra credit
def averageListfun (list):
avg = 0
for i in list:
avg += i
avg = avg/len(list)
print(avg)
averageListfun(averageList) |
998,546 | 6ca5d8f13b6c6a2fe49f39f9ab8db4bb99540f18 | #!/usr/bin/python
#
# $Id: reader_hicom300.py,v 1.6 2012-02-24 20:53:25 sshevtsov Exp $
#
# Este script es utilizado para levantar los datos del puerto serie
# al cual esta conectada la central hicom300.
#
# La configuracion del pueto esta definida en y explicada en el archivo
# serial_conf.py
#
# Las demas configuraciones estan en el archivo reader_hicom300_conf.py
#
from buffer_thread_safety import bufferTS
import logging
from logging.handlers import TimedRotatingFileHandler
from logging import FileHandler
import os
import re
import reader_hicom300_conf
import serial
import serial_conf
import sys
import threading
class serialPort:
def __init__(self):
try:
self.port = serial.Serial(serial_conf.port,
serial_conf.baudrate,
serial_conf.bytesize,
serial_conf.parity,
serial_conf.stopbits,
serial_conf.timeout)
except serial.SerialException, e:
raise Exception, "Error seting up serial port: %s" % e
sys.exit(1)
self.port.open()
self.port.flushInput()
self.port.flushOutput()
def __del__(self):
try:
self.port.close()
except AttributeError, e:
return
def write(self,data):
try:
self.port.write(data)
except serial.SerialTimeoutException, e:
raise Exception, "Error writing data to serial port: %s" % e
def read(self):
try:
data = self.port.readline()
except ValueError, e:
raise Exception, "Error reading data from serial port: %s" % e
return False
return data
class serialReader(threading.Thread):
def __init__(self, sp, buf):
threading.Thread.__init__(self)
self.serialPort = sp
self.bufferTS = buf
def run(self):
while True:
data = self.serialPort.read()
if data:
self.bufferTS.put(data)
class serialLogger(threading.Thread):
def __init__(self,fileName, buf):
threading.Thread.__init__(self)
self.bufferTS = buf
self.fileName = fileName
# configuracion del logger.
# rotacion medida en dias 'D' cuantos? 1, backup = 200 dias.
# se comenta la rotacion por codigo porque es manejada por el parser
#logHandler = TimedRotatingFileHandler(self.fileName,'D',1,200)
logHandler = FileHandler(self.fileName)
logFormatter = logging.Formatter("%(levelname)s %(asctime)s|%(message)s","")
logHandler.setFormatter(logFormatter)
self.logger = logging.getLogger('LlamadasTelefonicas')
self.logger.addHandler(logHandler)
self.logger.setLevel(logging.INFO)
def isValidLogLine(self,line):
expr = re.compile("\d{2}/\d{2}/\d{2}\d{2}:\d{2}:\d{2}\d{2}:\d{2}:\d{2}\s+\d{3}\s+\d+\s+0{5}")
searchRes = expr.search(line)
if searchRes == None:
return False
if len(searchRes.group(0)) <= 0:
return False
return True
def run(self):
while True:
data = self.bufferTS.get()
if not self.isValidLogLine(data):
continue
callDate = data[0:8]
callTime = data[8:16]
callDuration = data[16:24]
callInternal = data[25:28]
callOutLine = data[30:32]
callNumber = data[32:60].strip()
logLine = ("%s\t%s\t%s\t%s\t%s\t%s" %
(callDate, callTime, callDuration,
callInternal, callOutLine, callNumber))
self.logger.info(logLine)
"""
convertiendo a servicio.
hago el fork si soy padre salgo
me adopta el proceso de init
"""
print "Forking."
if os.fork() > 0:
os._exit(0)
# me hago lider de la sesion
os.setsid()
os.chdir(reader_hicom300_conf.workDir)
# grabo mi pid en el pidfile
pidFile = open(reader_hicom300_conf.pidFileName, 'w')
pidFile.write("%d\n" % os.getpid())
pidFile.close()
s = serialPort()
commonBuffer = bufferTS()
hicom300Reader = serialReader(s,commonBuffer)
hicom300Logger = serialLogger(reader_hicom300_conf.logFileName,commonBuffer)
# a trabajar
hicom300Reader.start()
hicom300Logger.start()
|
998,547 | 84d8b37758940194cf2615cf38ca6d51e3ab7169 | """
author: Zituo Yan
description: this starts the verification part
date: 3/2/2020
"""
from gedcom_app.control.child_birth import birth_before_marriage
from gedcom_app.control.US0203 import birth_b_marriage_us02, birth_b_death_us03
from gedcom_app.control.US1217 import parents_not_too_old_us12, no_marriage_to_children_us17
def verification(indi_dict, fam_dict):
birth_before_marriage(fam_dict)
birth_b_marriage_us02(fam_dict)
birth_b_death_us03(indi_dict)
parents_not_too_old_us12(fam_dict)
no_marriage_to_children_us17(fam_dict)
|
998,548 | 8a6c46ba5ead05c59515879560ae500bec45639a | import requests
import json
#The URL to be used the base
baseUrl = "http://fasttrack.herokuapp.com"
#The main function to make the calls to the x amount of pages
def fetch(link):
headers = {'Content-Type': 'application/json', 'Accept': 'application/json'}
r = requests.get(link, headers=headers)
jsonResponse = (json.loads(r.content.decode("UTF-8")))
#For debugging purposes
print(link)
print (jsonResponse)
print (headers)
fetch(baseUrl + jsonResponse['next'])
fetch(baseUrl)
|
998,549 | d527323f8f2056174d4841eaa6f888663727afbc | # -*- coding: utf-8 -*-
import unittest
from ..utils.Helper_validate import Validate, RegType
class ValidateHelperTestCase(unittest.TestCase):
def test_validate_check_APP(self):
self.assertTrue(Validate.check("app", reg_type=RegType.APP))
self.assertFalse(Validate.check("abpp", reg_type=RegType.APP))
|
998,550 | 192091870c9d3b228fd61bc1522f8c7838d81724 | #Solve the same problems with enumerate and numpy
import numpy as np
def gc_skew(seq):
skew_dict = {'A': 0, 'T': 0, 'C': -1, 'G': 1}
skew_list = [0] #Define first value as 0
for pos, val in enumerate(seq):
skew_list.append(skew_list[pos] + skew_dict[val])
return skew_list
def minimum_gc_skew(seq):
arr = np.array(gc_skew(seq))
#print(type(array))
#print(type(np.where(array == array.min())))
return np.where(arr == np.amin(arr))[0]
def maximum_gc_skew(seq):
arr = np.array(gc_skew(seq))
return np.where(arr == np.amax(arr))[0]
with open("data/dataset_7_6.txt") as file:
genome = file.read().split('\n')
print(minimum_gc_skew(genome[0]))
print("Minimun skew for quiz answer:", minimum_gc_skew("CATTCCAGTACTTCATGATGGCGTGAAGA"))
print("Maximum skew for quiz answer:", maximum_gc_skew("CATTCCAGTACTTCATGATGGCGTGAAGA")) |
998,551 | c98dca771cc6088f3cdf41a2b6cd1bb710afdc73 | # Generated by Django 2.0.3 on 2019-03-04 19:49
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('banners', '0016_auto_20190304_1924'),
]
operations = [
migrations.RenameField(
model_name='bannerupdate',
old_name='message',
new_name='status_message',
),
]
|
998,552 | 63bf545ab8d57ac8c71e2c204c3f5830ddb0d41c | import eliminacaoGaussPivoParcial as ep
def matriz(x, grau):
matriz = []
vet = []
n = len(x)
for j in range(0, n):
for i in range(0, (grau+1)):
vet.append(x[j]**i)
matriz.append(vet)
vet = []
return matriz
def main():
#Valores de Entrada
x = [0.1, 0.2, 0.3, 0.4]
f = [5, 13, -4, -8]
grau = 3
#Chamando as funções
A = matriz(x, grau)
resultado = ep.gaussPivoteamento(A, f)
print(resultado)
if __name__ == '__main__':
main()
|
998,553 | e439fd3063738ee84f06ecf6fc6b6d16a8c416a6 | from cctpy import *
if __name__ == "__main__":
DL2 = 2.1162209
GAP3 = 0.1978111
QS3_LEN = 0.2382791
QS3_GRADIENT = -7.3733
QS3_SECOND_GRADIENT = -45.31 * 2.0
QS3_APERTURE = 60 * MM
big_r_part2 = 0.95
CCT_APERTURE = 80 * MM
small_r_gap = 15 * MM
small_r_innerest = 83 * MM
agcct_small_r_in = small_r_innerest
agcct_small_r_out = small_r_innerest + small_r_gap
dipole_cct_small_r_in = small_r_innerest + small_r_gap * 2
dipole_cct_small_r_out = small_r_innerest + small_r_gap * 3
dipole_cct_winding_num: int = 128
agcct_winding_nums: List[int] = [21, 50, 50]
dipole_cct_bending_angle = 67.5
dipole_cct_bending_rad = BaseUtils.angle_to_radian(dipole_cct_bending_angle)
agcct_bending_angles: List[float] = [8 + 3.716404, 8 + 19.93897, 8 + 19.844626]
agcct_bending_angles_rad: List[float] = BaseUtils.angle_to_radian(
agcct_bending_angles
)
dipole_cct_tilt_angles = numpy.array([30.0, 80.0, 90.0, 90.0])
agcct_tilt_angles = numpy.array([90.0, 30.0, 90.0, 90.0])
dipole_cct_current = 9664.0
agcct_current = -6000.0
disperse_number_per_winding: int = 120
trajectory_part2 = (
Trajectory.set_start_point(P2(3.703795764767297, 1.5341624380266456))
.first_line(P2(1, 1), DL2)
.add_arc_line(0.95, True, dipole_cct_bending_angle)
.add_strait_line(GAP3*2+QS3_LEN)
)
beamline = Beamline()
beamline.add(
CCT.create_cct_along(
trajectory=trajectory_part2,
s=DL2,
big_r=big_r_part2,
small_r=dipole_cct_small_r_in,
bending_angle=dipole_cct_bending_angle,
tilt_angles=dipole_cct_tilt_angles,
winding_number=dipole_cct_winding_num,
current=dipole_cct_current,
starting_point_in_ksi_phi_coordinate=P2.origin(),
end_point_in_ksi_phi_coordinate=P2(
2 * math.pi * dipole_cct_winding_num, -dipole_cct_bending_rad
),
disperse_number_per_winding=disperse_number_per_winding,
)
)
beamline.add(
CCT.create_cct_along(
trajectory=trajectory_part2,
s=DL2,
big_r=big_r_part2,
small_r=dipole_cct_small_r_out, # diff
bending_angle=dipole_cct_bending_angle,
tilt_angles=-dipole_cct_tilt_angles, # diff ⭐
winding_number=dipole_cct_winding_num,
current=dipole_cct_current,
starting_point_in_ksi_phi_coordinate=P2.origin(),
end_point_in_ksi_phi_coordinate=P2(
-2 * math.pi * dipole_cct_winding_num, -dipole_cct_bending_rad
),
disperse_number_per_winding=disperse_number_per_winding,
)
)
# agcct_index = 0
# agcct_start_in = P2.origin()
# agcct_start_out = P2.origin()
# agcct_end_in = P2(
# ((-1.0) ** agcct_index) * 2 * math.pi * agcct_winding_nums[agcct_index],
# -agcct_bending_angles_rad[agcct_index],
# )
# agcct_end_out = P2(
# ((-1.0) ** (agcct_index + 1)) * 2 * math.pi * agcct_winding_nums[agcct_index],
# -agcct_bending_angles_rad[agcct_index],
# )
# beamline.add(
# CCT.create_cct_along(
# trajectory=trajectory_part2,
# s=DL2,
# big_r=big_r_part2,
# small_r=agcct_small_r_in,
# bending_angle=agcct_bending_angles[agcct_index],
# tilt_angles=-agcct_tilt_angles,
# winding_number=agcct_winding_nums[agcct_index],
# current=agcct_current,
# starting_point_in_ksi_phi_coordinate=agcct_start_in,
# end_point_in_ksi_phi_coordinate=agcct_end_in,
# disperse_number_per_winding=disperse_number_per_winding,
# )
# )
# beamline.add(
# CCT.create_cct_along(
# trajectory=trajectory_part2,
# s=DL2,
# big_r=big_r_part2,
# small_r=agcct_small_r_out,
# bending_angle=agcct_bending_angles[agcct_index],
# tilt_angles=agcct_tilt_angles,
# winding_number=agcct_winding_nums[agcct_index],
# current=agcct_current,
# starting_point_in_ksi_phi_coordinate=agcct_start_out,
# end_point_in_ksi_phi_coordinate=agcct_end_out,
# disperse_number_per_winding=disperse_number_per_winding,
# )
# )
# for ignore in range(len(agcct_bending_angles) - 1):
# agcct_index += 1
# agcct_start_in = agcct_end_in + P2(
# 0,
# -agcct_bending_angles_rad[agcct_index - 1]
# / agcct_winding_nums[agcct_index - 1],
# )
# agcct_start_out = agcct_end_out + P2(
# 0,
# -agcct_bending_angles_rad[agcct_index - 1]
# / agcct_winding_nums[agcct_index - 1],
# )
# agcct_end_in = agcct_start_in + P2(
# ((-1) ** agcct_index) * 2 * math.pi * agcct_winding_nums[agcct_index],
# -agcct_bending_angles_rad[agcct_index],
# )
# agcct_end_out = agcct_start_out + P2(
# ((-1) ** (agcct_index + 1)) * 2 * math.pi * agcct_winding_nums[agcct_index],
# -agcct_bending_angles_rad[agcct_index],
# )
# beamline.add(
# CCT.create_cct_along(
# trajectory=trajectory_part2,
# s=DL2,
# big_r=big_r_part2,
# small_r=agcct_small_r_in,
# bending_angle=agcct_bending_angles[agcct_index],
# tilt_angles=-agcct_tilt_angles,
# winding_number=agcct_winding_nums[agcct_index],
# current=agcct_current,
# starting_point_in_ksi_phi_coordinate=agcct_start_in,
# end_point_in_ksi_phi_coordinate=agcct_end_in,
# disperse_number_per_winding=disperse_number_per_winding,
# )
# )
# beamline.add(
# CCT.create_cct_along(
# trajectory=trajectory_part2,
# s=DL2,
# big_r=big_r_part2,
# small_r=agcct_small_r_out,
# bending_angle=agcct_bending_angles[agcct_index],
# tilt_angles=agcct_tilt_angles,
# winding_number=agcct_winding_nums[agcct_index],
# current=agcct_current,
# starting_point_in_ksi_phi_coordinate=agcct_start_out,
# end_point_in_ksi_phi_coordinate=agcct_end_out,
# disperse_number_per_winding=disperse_number_per_winding,
# )
# )
# Plot3.plot_line2(trajectory_part2)
# Plot3.plot_beamline(beamline, ['r','y-']*3)
# Plot3.set_center(trajectory_part2.point_at(DL2).to_p3(), 5)
# Plot3.show()
Plot2.plot_p2s(beamline.magnetic_field_bz_along(trajectory_part2,step=10*MM),describe='r-')
x = [0.0, 0.0010003398611275502, 0.0020006797222551003, 0.0030010195833826505, 0.004001359444510201, 0.005001699305637751, 0.006002039166765301, 0.007002379027892851, 0.008002718889020401, 0.009003058750147952, 0.010003398611275503, 0.011003738472403051, 0.012004078333530602, 0.013004418194658153, 0.014004758055785702, 0.015005097916913252, 0.016005437778040803, 0.017005777639168353, 0.018006117500295904, 0.019006457361423455, 0.020006797222551005, 0.021007137083678552, 0.022007476944806103, 0.023007816805933654, 0.024008156667061204, 0.025008496528188755, 0.026008836389316305, 0.027009176250443856, 0.028009516111571403, 0.029009855972698954, 0.030010195833826504, 0.031010535694954055, 0.032010875556081606, 0.03301121541720915, 0.03401155527833671, 0.035011895139464254, 0.03601223500059181, 0.037012574861719355, 0.03801291472284691, 0.039013254583974456, 0.04001359444510201, 0.04101393430622956, 0.042014274167357105, 0.04301461402848466, 0.044014953889612206, 0.04501529375073976, 0.04601563361186731, 0.04701597347299486, 0.04801631333412241, 0.049016653195249955, 0.05001699305637751, 0.05101733291750506, 0.05201767277863261, 0.05301801263976016, 0.05401835250088771, 0.05501869236201526, 0.056019032223142806, 0.05701937208427036, 0.05801971194539791, 0.05902005180652546, 0.06002039166765301, 0.06102073152878056, 0.06202107138990811, 0.06302141125103566, 0.06402175111216321, 0.06502209097329076, 0.0660224308344183, 0.06702277069554587, 0.06802311055667341, 0.06902345041780096, 0.07002379027892851, 0.07102413014005607, 0.07202447000118362, 0.07302480986231116, 0.07402514972343871, 0.07502548958456626, 0.07602582944569382, 0.07702616930682137, 0.07802650916794891, 0.07902684902907646, 0.08002718889020402, 0.08102752875133157, 0.08202786861245911, 0.08302820847358666, 0.08402854833471421, 0.08502888819584177, 0.08602922805696932, 0.08702956791809686, 0.08802990777922441, 0.08903024764035196, 0.09003058750147952, 0.09103092736260707, 0.09203126722373461, 0.09303160708486216, 0.09403194694598972, 0.09503228680711727, 0.09603262666824482, 0.09703296652937236, 0.09803330639049991, 0.09903364625162747, 0.10003398611275502, 0.10103432597388257, 0.10203466583501011, 0.10303500569613767, 0.10403534555726522, 0.10503568541839277, 0.10603602527952032, 0.10703636514064786, 0.10803670500177542, 0.10903704486290297, 0.11003738472403052, 0.11103772458515807, 0.11203806444628561, 0.11303840430741317, 0.11403874416854072, 0.11503908402966827, 0.11603942389079581, 0.11703976375192338, 0.11804010361305092, 0.11904044347417847, 0.12004078333530602, 0.12104112319643356, 0.12204146305756113, 0.12304180291868867, 0.12404214277981622, 0.12504248264094378, 0.12604282250207133, 0.12704316236319887, 0.12804350222432642, 0.12904384208545397, 0.13004418194658152, 0.13104452180770906, 0.1320448616688366, 0.13304520152996419, 0.13404554139109173, 0.13504588125221928, 0.13604622111334683, 0.13704656097447437, 0.13804690083560192, 0.13904724069672947, 0.14004758055785702, 0.14104792041898456, 0.14204826028011214, 0.14304860014123968, 0.14404894000236723, 0.14504927986349478, 0.14604961972462233, 0.14704995958574987, 0.14805029944687742, 0.14905063930800497, 0.15005097916913251, 0.1510513190302601, 0.15205165889138764, 0.15305199875251518, 0.15405233861364273, 0.15505267847477028, 0.15605301833589783, 0.15705335819702537, 0.15805369805815292, 0.15905403791928047, 0.16005437778040804, 0.1610547176415356, 0.16205505750266314, 0.16305539736379068, 0.16405573722491823, 0.16505607708604578, 0.16605641694717332, 0.16705675680830087, 0.16805709666942842, 0.16905743653055597, 0.17005777639168354, 0.1710581162528111, 0.17205845611393863, 0.17305879597506618, 0.17405913583619373, 0.17505947569732128, 0.17605981555844882, 0.17706015541957637, 0.17806049528070392, 0.1790608351418315, 0.18006117500295904, 0.1810615148640866, 0.18206185472521413, 0.18306219458634168, 0.18406253444746923, 0.18506287430859678, 0.18606321416972432, 0.18706355403085187, 0.18806389389197944, 0.189064233753107, 0.19006457361423454, 0.19106491347536209, 0.19206525333648963, 0.19306559319761718, 0.19406593305874473, 0.19506627291987227, 0.19606661278099982, 0.1970669526421274, 0.19806729250325494, 0.1990676323643825, 0.20006797222551004, 0.20106831208663759, 0.20206865194776513, 0.20306899180889268, 0.20406933167002023, 0.20506967153114777, 0.20607001139227535, 0.2070703512534029, 0.20807069111453044, 0.209071030975658, 0.21007137083678554, 0.21107171069791308, 0.21207205055904063, 0.21307239042016818, 0.21407273028129573, 0.2150730701424233, 0.21607341000355085, 0.2170737498646784, 0.21807408972580594, 0.2190744295869335, 0.22007476944806104, 0.22107510930918858, 0.22207544917031613, 0.22307578903144368, 0.22407612889257122, 0.2250764687536988, 0.22607680861482635, 0.2270771484759539, 0.22807748833708144, 0.229077828198209, 0.23007816805933654, 0.23107850792046408, 0.23207884778159163, 0.23307918764271918, 0.23407952750384675, 0.2350798673649743, 0.23608020722610185, 0.2370805470872294, 0.23808088694835694, 0.2390812268094845, 0.24008156667061203, 0.24108190653173958, 0.24208224639286713, 0.2430825862539947, 0.24408292611512225, 0.2450832659762498, 0.24608360583737734, 0.2470839456985049, 0.24808428555963244, 0.24908462542076, 0.25008496528188756, 0.2510853051430151, 0.25208564500414266, 0.2530859848652702, 0.25408632472639775, 0.25508666458752527, 0.25608700444865284, 0.2570873443097804, 0.25808768417090794, 0.2590880240320355, 0.26008836389316303, 0.2610887037542906, 0.2620890436154181, 0.2630893834765457, 0.2640897233376732, 0.2650900631988008, 0.26609040305992837, 0.2670907429210559, 0.26809108278218347, 0.269091422643311, 0.27009176250443856, 0.2710921023655661, 0.27209244222669365, 0.2730927820878212, 0.27409312194894875, 0.2750934618100763, 0.27609380167120384, 0.2770941415323314, 0.27809448139345894, 0.2790948212545865, 0.28009516111571403, 0.2810955009768416, 0.2820958408379691, 0.2830961806990967, 0.2840965205602243, 0.2850968604213518, 0.28609720028247937, 0.2870975401436069, 0.28809788000473446, 0.289098219865862, 0.29009855972698956, 0.2910988995881171, 0.29209923944924465, 0.2930995793103722, 0.29409991917149975, 0.2951002590326273, 0.29610059889375484, 0.2971009387548824, 0.29810127861600993, 0.2991016184771375, 0.30010195833826503, 0.3011022981993926, 0.3021026380605202, 0.3031029779216477, 0.3041033177827753, 0.3051036576439028, 0.30610399750503037, 0.3071043373661579, 0.30810467722728546, 0.309105017088413, 0.31010535694954056, 0.31110569681066813, 0.31210603667179565, 0.3131063765329232, 0.31410671639405074, 0.3151070562551783, 0.31610739611630584, 0.3171077359774334, 0.31810807583856093, 0.3191084156996885, 0.3201087555608161, 0.3211090954219436, 0.3221094352830712, 0.3231097751441987, 0.32411011500532627, 0.3251104548664538, 0.32611079472758137, 0.3271111345887089, 0.32811147444983646, 0.329111814310964, 0.33011215417209155, 0.33111249403321913, 0.33211283389434665, 0.3331131737554742, 0.33411351361660174, 0.3351138534777293, 0.33611419333885684, 0.3371145331999844, 0.33811487306111193, 0.3391152129222395, 0.3401155527833671, 0.3411158926444946, 0.3421162325056222, 0.3431165723667497, 0.34411691222787727, 0.3451172520890048, 0.34611759195013236, 0.3471179318112599, 0.34811827167238746, 0.34911861153351503, 0.35011895139464255, 0.3511192912557701, 0.35211963111689765, 0.3531199709780252, 0.35412031083915274, 0.3551206507002803, 0.35612099056140784, 0.3571213304225354, 0.358121670283663, 0.3591220101447905, 0.3601223500059181, 0.3611226898670456, 0.3621230297281732, 0.3631233695893007, 0.36412370945042827, 0.3651240493115558, 0.36612438917268336, 0.36712472903381094, 0.36812506889493846, 0.36912540875606603, 0.37012574861719355, 0.3711260884783211, 0.37212642833944864, 0.3731267682005762, 0.37412710806170374, 0.3751274479228313, 0.3761277877839589, 0.3771281276450864, 0.378128467506214, 0.3791288073673415, 0.3801291472284691, 0.3811294870895966, 0.38212982695072417, 0.3831301668118517, 0.38413050667297927, 0.38513084653410684, 0.38613118639523436, 0.38713152625636194, 0.38813186611748945, 0.38913220597861703, 0.39013254583974455, 0.3911328857008721, 0.39213322556199964, 0.3931335654231272, 0.3941339052842548, 0.3951342451453823, 0.3961345850065099, 0.3971349248676374, 0.398135264728765, 0.3991356045898925, 0.4001359444510201, 0.4011362843121476, 0.40213662417327517, 0.40313696403440274, 0.40413730389553026, 0.40513764375665784, 0.40613798361778536, 0.40713832347891293, 0.40813866334004045, 0.40913900320116803, 0.41013934306229555, 0.4111396829234231, 0.4121400227845507, 0.4131403626456782, 0.4141407025068058, 0.4151410423679333, 0.4161413822290609, 0.4171417220901884, 0.418142061951316, 0.4191424018124435, 0.4201427416735711, 0.42114308153469865, 0.42214342139582617, 0.42314376125695374, 0.42414410111808126, 0.42514444097920884, 0.42614478084033636, 0.42714512070146393, 0.42814546056259145, 0.429145800423719, 0.4301461402848466, 0.4311464801459741, 0.4321468200071017, 0.4331471598682292, 0.4341474997293568, 0.4351478395904843, 0.4361481794516119, 0.4371485193127394, 0.438148859173867, 0.43914919903499455, 0.4401495388961221, 0.44114987875724965, 0.44215021861837717, 0.44315055847950474, 0.44415089834063226, 0.44515123820175984, 0.44615157806288736, 0.44715191792401493, 0.44815225778514245, 0.44915259764627, 0.4501529375073976, 0.4511532773685251, 0.4521536172296527, 0.4531539570907802, 0.4541542969519078, 0.4551546368130353, 0.4561549766741629, 0.4571553165352904, 0.458155656396418, 0.45915599625754555, 0.46015633611867307, 0.46115667597980065, 0.46215701584092816, 0.46315735570205574, 0.46415769556318326, 0.46515803542431083, 0.46615837528543835, 0.46715871514656593, 0.4681590550076935, 0.469159394868821, 0.4701597347299486, 0.4711600745910761, 0.4721604144522037, 0.4731607543133312, 0.4741610941744588, 0.4751614340355863, 0.4761617738967139, 0.47716211375784146, 0.478162453618969, 0.47916279348009655, 0.48016313334122407, 0.48116347320235164, 0.48216381306347916, 0.48316415292460674, 0.48416449278573426, 0.48516483264686183, 0.4861651725079894, 0.4871655123691169, 0.4881658522302445, 0.489166192091372, 0.4901665319524996, 0.4911668718136271, 0.4921672116747547, 0.4931675515358822, 0.4941678913970098, 0.49516823125813736, 0.4961685711192649, 0.49716891098039245, 0.49816925084152, 0.49916959070264755, 0.5001699305637751, 0.5011702704249026, 0.5021706102860302, 0.5031709501471577, 0.5041712900082853, 0.5051716298694129, 0.5061719697305403, 0.5071723095916679, 0.5081726494527955, 0.5091729893139231, 0.5101733291750505, 0.5111736690361781, 0.5121740088973057, 0.5131743487584333, 0.5141746886195608, 0.5151750284806883, 0.5161753683418159, 0.5171757082029435, 0.518176048064071, 0.5191763879251985, 0.5201767277863261, 0.5211770676474536, 0.5221774075085812, 0.5231777473697088, 0.5241780872308363, 0.5251784270919638, 0.5261787669530914, 0.527179106814219, 0.5281794466753464, 0.529179786536474, 0.5301801263976016, 0.5311804662587292, 0.5321808061198567, 0.5331811459809842, 0.5341814858421118, 0.5351818257032394, 0.5361821655643669, 0.5371825054254944, 0.538182845286622, 0.5391831851477495, 0.5401835250088771, 0.5411838648700047, 0.5421842047311322, 0.5431845445922597, 0.5441848844533873, 0.5451852243145149, 0.5461855641756423, 0.5471859040367699, 0.5481862438978975, 0.5491865837590251, 0.5501869236201526, 0.5511872634812801, 0.5521876033424077, 0.5531879432035353, 0.5541882830646628, 0.5551886229257903, 0.5561889627869179, 0.5571893026480454, 0.558189642509173, 0.5591899823703006, 0.5601903222314281, 0.5611906620925556, 0.5621910019536832, 0.5631913418148108, 0.5641916816759383, 0.5651920215370658, 0.5661923613981934, 0.567192701259321, 0.5681930411204485, 0.569193380981576, 0.5701937208427036, 0.5711940607038312, 0.5721944005649587, 0.5731947404260862, 0.5741950802872138, 0.5751954201483414, 0.5761957600094689, 0.5771960998705965, 0.578196439731724, 0.5791967795928515, 0.5801971194539791, 0.5811974593151067, 0.5821977991762342, 0.5831981390373617, 0.5841984788984893, 0.5851988187596169, 0.5861991586207445, 0.5871994984818719, 0.5881998383429995, 0.5892001782041271, 0.5902005180652546, 0.5912008579263821, 0.5922011977875097, 0.5932015376486373, 0.5942018775097648, 0.5952022173708924, 0.5962025572320199, 0.5972028970931474, 0.598203236954275, 0.5992035768154026, 0.6002039166765301, 0.6012042565376576, 0.6022045963987852, 0.6032049362599128, 0.6042052761210404, 0.6052056159821678, 0.6062059558432954, 0.607206295704423, 0.6082066355655505, 0.609206975426678, 0.6102073152878056, 0.6112076551489332, 0.6122079950100607, 0.6132083348711883, 0.6142086747323158, 0.6152090145934433, 0.6162093544545709, 0.6172096943156985, 0.618210034176826, 0.6192103740379535, 0.6202107138990811, 0.6212110537602087, 0.6222113936213363, 0.6232117334824637, 0.6242120733435913, 0.6252124132047189, 0.6262127530658465, 0.6272130929269739, 0.6282134327881015, 0.6292137726492291, 0.6302141125103566, 0.6312144523714842, 0.6322147922326117, 0.6332151320937393, 0.6342154719548668, 0.6352158118159944, 0.6362161516771219, 0.6372164915382494, 0.638216831399377, 0.6392171712605046, 0.6402175111216322, 0.6412178509827596, 0.6422181908438872, 0.6432185307050148, 0.6442188705661424, 0.6452192104272698, 0.6462195502883974, 0.647219890149525, 0.6482202300106525, 0.64922056987178, 0.6502209097329076, 0.6512212495940352, 0.6522215894551627, 0.6532219293162903, 0.6542222691774178, 0.6552226090385453, 0.6562229488996729, 0.6572232887608005, 0.658223628621928, 0.6592239684830555, 0.6602243083441831, 0.6612246482053107, 0.6622249880664383, 0.6632253279275657, 0.6642256677886933, 0.6652260076498209, 0.6662263475109484, 0.6672266873720759, 0.6682270272332035, 0.6692273670943311, 0.6702277069554586, 0.6712280468165862, 0.6722283866777137, 0.6732287265388412, 0.6742290663999688, 0.6752294062610964, 0.6762297461222239, 0.6772300859833514, 0.678230425844479, 0.6792307657056066, 0.6802311055667342, 0.6812314454278616, 0.6822317852889892, 0.6832321251501168, 0.6842324650112444, 0.6852328048723718, 0.6862331447334994, 0.687233484594627, 0.6882338244557545, 0.6892341643168821, 0.6902345041780096, 0.6912348440391372, 0.6922351839002647, 0.6932355237613923, 0.6942358636225198, 0.6952362034836473, 0.6962365433447749, 0.6972368832059025, 0.6982372230670301, 0.6992375629281575, 0.7002379027892851, 0.7012382426504127, 0.7022385825115403, 0.7032389223726677, 0.7042392622337953, 0.7052396020949229, 0.7062399419560504, 0.707240281817178, 0.7082406216783055, 0.7092409615394331, 0.7102413014005606, 0.7112416412616882, 0.7122419811228157, 0.7132423209839432, 0.7142426608450708, 0.7152430007061984, 0.716243340567326, 0.7172436804284534, 0.718244020289581, 0.7192443601507086, 0.7202447000118362, 0.7212450398729636, 0.7222453797340912, 0.7232457195952188, 0.7242460594563463, 0.7252463993174739, 0.7262467391786014, 0.727247079039729, 0.7282474189008565, 0.7292477587619841, 0.7302480986231116, 0.7312484384842391, 0.7322487783453667, 0.7332491182064943, 0.7342494580676219, 0.7352497979287493, 0.7362501377898769, 0.7372504776510045, 0.7382508175121321, 0.7392511573732595, 0.7402514972343871, 0.7412518370955147, 0.7422521769566423, 0.7432525168177698, 0.7442528566788973, 0.7452531965400249, 0.7462535364011524, 0.74725387626228, 0.7482542161234075, 0.749254555984535, 0.7502548958456626, 0.7512552357067902, 0.7522555755679178, 0.7532559154290452, 0.7542562552901728, 0.7552565951513004, 0.756256935012428, 0.7572572748735554, 0.758257614734683, 0.7592579545958106, 0.7602582944569382, 0.7612586343180657, 0.7622589741791932, 0.7632593140403208, 0.7642596539014483, 0.7652599937625759, 0.7662603336237034, 0.767260673484831, 0.7682610133459585, 0.7692613532070861, 0.7702616930682137, 0.7712620329293411, 0.7722623727904687, 0.7732627126515963, 0.7742630525127239, 0.7752633923738513, 0.7762637322349789, 0.7772640720961065, 0.7782644119572341, 0.7792647518183616, 0.7802650916794891, 0.7812654315406167, 0.7822657714017442, 0.7832661112628718, 0.7842664511239993, 0.7852667909851269, 0.7862671308462544, 0.787267470707382, 0.7882678105685096, 0.789268150429637, 0.7902684902907646, 0.7912688301518922, 0.7922691700130198, 0.7932695098741472, 0.7942698497352748, 0.7952701895964024, 0.79627052945753, 0.7972708693186575, 0.798271209179785, 0.7992715490409126, 0.8002718889020402, 0.8012722287631677, 0.8022725686242952, 0.8032729084854228, 0.8042732483465503, 0.8052735882076779, 0.8062739280688055, 0.807274267929933, 0.8082746077910605, 0.8092749476521881, 0.8102752875133157, 0.8112756273744431, 0.8122759672355707, 0.8132763070966983, 0.8142766469578259, 0.8152769868189534, 0.8162773266800809, 0.8172776665412085, 0.8182780064023361, 0.8192783462634636, 0.8202786861245911, 0.8212790259857187, 0.8222793658468462, 0.8232797057079738, 0.8242800455691014, 0.8252803854302289, 0.8262807252913564, 0.827281065152484, 0.8282814050136116, 0.829281744874739, 0.8302820847358666, 0.8312824245969942, 0.8322827644581218, 0.8332831043192493, 0.8342834441803768, 0.8352837840415044, 0.836284123902632, 0.8372844637637595, 0.838284803624887, 0.8392851434860146, 0.8402854833471421, 0.8412858232082697, 0.8422861630693973, 0.8432865029305248, 0.8442868427916523, 0.8452871826527799, 0.8462875225139075, 0.847287862375035, 0.8482882022361625, 0.8492885420972901, 0.8502888819584177, 0.8512892218195452, 0.8522895616806727, 0.8532899015418003, 0.8542902414029279, 0.8552905812640554, 0.8562909211251829, 0.8572912609863105, 0.858291600847438, 0.8592919407085656, 0.8602922805696932, 0.8612926204308207, 0.8622929602919482, 0.8632933001530758, 0.8642936400142034, 0.8652939798753309, 0.8662943197364584, 0.867294659597586, 0.8682949994587136, 0.8692953393198412, 0.8702956791809686, 0.8712960190420962, 0.8722963589032238, 0.8732966987643513, 0.8742970386254788, 0.8752973784866064, 0.876297718347734, 0.8772980582088615, 0.8782983980699891, 0.8792987379311166, 0.8802990777922441, 0.8812994176533717, 0.8822997575144993, 0.8833000973756268, 0.8843004372367543, 0.8853007770978819, 0.8863011169590095, 0.8873014568201371, 0.8883017966812645, 0.8893021365423921, 0.8903024764035197, 0.8913028162646472, 0.8923031561257747, 0.8933034959869023, 0.8943038358480299, 0.8953041757091574, 0.8963045155702849, 0.8973048554314125, 0.89830519529254, 0.8993055351536676, 0.9003058750147952, 0.9013062148759227, 0.9023065547370502, 0.9033068945981778, 0.9043072344593054, 0.9053075743204329, 0.9063079141815604, 0.907308254042688, 0.9083085939038156, 0.9093089337649432, 0.9103092736260706, 0.9113096134871982, 0.9123099533483258, 0.9133102932094533, 0.9143106330705808, 0.9153109729317084, 0.916311312792836, 0.9173116526539635, 0.9183119925150911, 0.9193123323762186, 0.9203126722373461, 0.9213130120984737, 0.9223133519596013, 0.9233136918207288, 0.9243140316818563, 0.9253143715429839, 0.9263147114041115, 0.927315051265239, 0.9283153911263665, 0.9293157309874941, 0.9303160708486217, 0.9313164107097492, 0.9323167505708767, 0.9333170904320043, 0.9343174302931319, 0.9353177701542594, 0.936318110015387, 0.9373184498765145, 0.938318789737642, 0.9393191295987696, 0.9403194694598972, 0.9413198093210247, 0.9423201491821522, 0.9433204890432798, 0.9443208289044074, 0.945321168765535, 0.9463215086266624, 0.94732184848779, 0.9483221883489176, 0.9493225282100451, 0.9503228680711726, 0.9513232079323002, 0.9523235477934278, 0.9533238876545553, 0.9543242275156829, 0.9553245673768104, 0.956324907237938, 0.9573252470990655, 0.9583255869601931, 0.9593259268213206, 0.9603262666824481, 0.9613266065435757, 0.9623269464047033, 0.9633272862658309, 0.9643276261269583, 0.9653279659880859, 0.9663283058492135, 0.967328645710341, 0.9683289855714685, 0.9693293254325961, 0.9703296652937237, 0.9713300051548512, 0.9723303450159788, 0.9733306848771063, 0.9743310247382339, 0.9753313645993614, 0.976331704460489, 0.9773320443216165, 0.978332384182744, 0.9793327240438716, 0.9803330639049992, 0.9813334037661268, 0.9823337436272542, 0.9833340834883818, 0.9843344233495094, 0.985334763210637, 0.9863351030717644, 0.987335442932892, 0.9883357827940196, 0.9893361226551471, 0.9903364625162747, 0.9913368023774022, 0.9923371422385298, 0.9933374820996573, 0.9943378219607849, 0.9953381618219124, 0.99633850168304, 0.9973388415441675, 0.9983391814052951, 0.9993395212664227, 1.0003398611275502, 1.0013402009886778, 1.0023405408498052, 1.0033408807109327, 1.0043412205720603, 1.005341560433188, 1.0063419002943155, 1.007342240155443, 1.0083425800165706, 1.0093429198776982, 1.0103432597388258, 1.0113435995999531, 1.0123439394610807, 1.0133442793222083, 1.0143446191833358, 1.0153449590444634, 1.016345298905591, 1.0173456387667186, 1.0183459786278461, 1.0193463184889737, 1.020346658350101, 1.0213469982112287, 1.0223473380723562, 1.0233476779334838, 1.0243480177946114, 1.025348357655739, 1.0263486975168665, 1.027349037377994, 1.0283493772391217, 1.029349717100249, 1.0303500569613766, 1.0313503968225042, 1.0323507366836318, 1.0333510765447593, 1.034351416405887, 1.0353517562670145, 1.036352096128142, 1.0373524359892696, 1.038352775850397, 1.0393531157115246, 1.0403534555726521, 1.0413537954337797, 1.0423541352949073, 1.0433544751560349, 1.0443548150171624, 1.04535515487829, 1.0463554947394176, 1.047355834600545, 1.0483561744616725, 1.0493565143228, 1.0503568541839277, 1.0513571940450552, 1.0523575339061828, 1.0533578737673104, 1.054358213628438, 1.0553585534895655, 1.0563588933506929, 1.0573592332118205, 1.058359573072948, 1.0593599129340756, 1.0603602527952032, 1.0613605926563308, 1.0623609325174583, 1.063361272378586, 1.0643616122397135, 1.0653619521008408, 1.0663622919619684, 1.067362631823096, 1.0683629716842236, 1.0693633115453511, 1.0703636514064787, 1.0713639912676063, 1.0723643311287339, 1.0733646709898614, 1.0743650108509888, 1.0753653507121164, 1.076365690573244, 1.0773660304343715, 1.078366370295499, 1.0793667101566267, 1.0803670500177542, 1.0813673898788818, 1.0823677297400094, 1.0833680696011367, 1.0843684094622643, 1.085368749323392, 1.0863690891845195, 1.087369429045647, 1.0883697689067746, 1.0893701087679022, 1.0903704486290298, 1.0913707884901573, 1.0923711283512847, 1.0933714682124123, 1.0943718080735398, 1.0953721479346674, 1.096372487795795, 1.0973728276569226, 1.0983731675180501, 1.0993735073791777, 1.1003738472403053, 1.1013741871014326, 1.1023745269625602, 1.1033748668236878, 1.1043752066848154, 1.105375546545943, 1.1063758864070705, 1.107376226268198, 1.1083765661293257, 1.1093769059904532, 1.1103772458515806, 1.1113775857127082, 1.1123779255738357, 1.1133782654349633, 1.114378605296091, 1.1153789451572185, 1.116379285018346, 1.1173796248794736, 1.1183799647406012, 1.1193803046017285, 1.1203806444628561, 1.1213809843239837, 1.1223813241851113, 1.1233816640462388, 1.1243820039073664, 1.125382343768494, 1.1263826836296216, 1.1273830234907491, 1.1283833633518765, 1.129383703213004, 1.1303840430741317, 1.1313843829352592, 1.1323847227963868, 1.1333850626575144, 1.134385402518642, 1.1353857423797695, 1.136386082240897, 1.1373864221020245, 1.138386761963152, 1.1393871018242796, 1.1403874416854072, 1.1413877815465348, 1.1423881214076623, 1.14338846126879, 1.1443888011299175, 1.145389140991045, 1.1463894808521724, 1.1473898207133, 1.1483901605744276, 1.1493905004355551, 1.1503908402966827, 1.1513911801578103, 1.1523915200189379, 1.1533918598800654, 1.154392199741193, 1.1553925396023204, 1.156392879463448, 1.1573932193245755, 1.158393559185703, 1.1593938990468307, 1.1603942389079582, 1.1613945787690858, 1.1623949186302134, 1.163395258491341, 1.1643955983524683, 1.1653959382135959, 1.1663962780747235, 1.167396617935851, 1.1683969577969786, 1.1693972976581062, 1.1703976375192338, 1.1713979773803613, 1.172398317241489, 1.1733986571026163, 1.1743989969637438, 1.1753993368248714, 1.176399676685999, 1.1774000165471266, 1.1784003564082541, 1.1794006962693817, 1.1804010361305093, 1.1814013759916369, 1.1824017158527642, 1.1834020557138918, 1.1844023955750194, 1.185402735436147, 1.1864030752972745, 1.187403415158402, 1.1884037550195297, 1.1894040948806572, 1.1904044347417848, 1.1914047746029122, 1.1924051144640397, 1.1934054543251673, 1.194405794186295, 1.1954061340474225, 1.19640647390855, 1.1974068137696776, 1.1984071536308052, 1.1994074934919328, 1.2004078333530601, 1.2014081732141877, 1.2024085130753153, 1.2034088529364428, 1.2044091927975704, 1.205409532658698, 1.2064098725198256, 1.2074102123809531, 1.2084105522420807, 1.209410892103208, 1.2104112319643356, 1.2114115718254632, 1.2124119116865908, 1.2134122515477184, 1.214412591408846, 1.2154129312699735, 1.216413271131101, 1.2174136109922287, 1.218413950853356, 1.2194142907144836, 1.2204146305756112, 1.2214149704367387, 1.2224153102978663, 1.223415650158994, 1.2244159900201215, 1.225416329881249, 1.2264166697423766, 1.227417009603504, 1.2284173494646315, 1.2294176893257591, 1.2304180291868867, 1.2314183690480143, 1.2324187089091418, 1.2334190487702694, 1.234419388631397, 1.2354197284925246, 1.236420068353652, 1.2374204082147795, 1.238420748075907, 1.2394210879370346, 1.2404214277981622, 1.2414217676592898, 1.2424221075204174, 1.243422447381545, 1.2444227872426725, 1.2454231271037999, 1.2464234669649275, 1.247423806826055, 1.2484241466871826, 1.2494244865483102, 1.2504248264094378, 1.2514251662705653, 1.252425506131693, 1.2534258459928205, 1.2544261858539478, 1.2554265257150754, 1.256426865576203, 1.2574272054373306, 1.2584275452984581, 1.2594278851595857, 1.2604282250207133, 1.2614285648818409, 1.2624289047429684, 1.2634292446040958, 1.2644295844652234, 1.265429924326351, 1.2664302641874785, 1.267430604048606, 1.2684309439097337, 1.2694312837708612, 1.2704316236319888, 1.2714319634931164, 1.2724323033542437, 1.2734326432153713, 1.2744329830764989, 1.2754333229376265, 1.276433662798754, 1.2774340026598816, 1.2784343425210092, 1.2794346823821368, 1.2804350222432643, 1.2814353621043917, 1.2824357019655193, 1.2834360418266468, 1.2844363816877744, 1.285436721548902, 1.2864370614100296, 1.2874374012711571, 1.2884377411322847, 1.289438080993412, 1.2904384208545396, 1.2914387607156672, 1.2924391005767948, 1.2934394404379224, 1.29443978029905, 1.2954401201601775, 1.296440460021305, 1.2974407998824327, 1.29844113974356, 1.2994414796046876, 1.3004418194658152, 1.3014421593269427, 1.3024424991880703, 1.3034428390491979, 1.3044431789103255, 1.305443518771453, 1.3064438586325806, 1.307444198493708, 1.3084445383548355, 1.3094448782159631, 1.3104452180770907, 1.3114455579382183, 1.3124458977993458, 1.3134462376604734, 1.314446577521601, 1.3154469173827286, 1.316447257243856, 1.3174475971049835, 1.318447936966111, 1.3194482768272386, 1.3204486166883662, 1.3214489565494938, 1.3224492964106214, 1.323449636271749, 1.3244499761328765, 1.3254503159940039, 1.3264506558551314, 1.327450995716259, 1.3284513355773866, 1.3294516754385142, 1.3304520152996417, 1.3314523551607693, 1.332452695021897, 1.3334530348830245, 1.3344533747441518, 1.3354537146052794, 1.336454054466407, 1.3374543943275345, 1.3384547341886621, 1.3394550740497897, 1.3404554139109173, 1.3414557537720448, 1.3424560936331724, 1.3434564334942998, 1.3444567733554273, 1.345457113216555, 1.3464574530776825, 1.34745779293881, 1.3484581327999376, 1.3494584726610652, 1.3504588125221928, 1.3514591523833204, 1.3524594922444477, 1.3534598321055753, 1.3544601719667029, 1.3554605118278304, 1.356460851688958, 1.3574611915500856, 1.3584615314112132, 1.3594618712723407, 1.3604622111334683, 1.3614625509945957, 1.3624628908557233, 1.3634632307168508, 1.3644635705779784, 1.365463910439106, 1.3664642503002336, 1.3674645901613611, 1.3684649300224887, 1.3694652698836163, 1.3704656097447436, 1.3714659496058712, 1.3724662894669988, 1.3734666293281264, 1.374466969189254, 1.3754673090503815, 1.376467648911509, 1.3774679887726367, 1.3784683286337642, 1.3794686684948916, 1.3804690083560192, 1.3814693482171467, 1.3824696880782743, 1.3834700279394019, 1.3844703678005295, 1.385470707661657, 1.3864710475227846, 1.3874713873839122, 1.3884717272450395, 1.389472067106167, 1.3904724069672947, 1.3914727468284223, 1.3924730866895498, 1.3934734265506774, 1.394473766411805, 1.3954741062729326, 1.3964744461340601, 1.3974747859951875, 1.398475125856315, 1.3994754657174426, 1.4004758055785702, 1.4014761454396978, 1.4024764853008254, 1.403476825161953, 1.4044771650230805, 1.405477504884208, 1.4064778447453354, 1.407478184606463, 1.4084785244675906, 1.4094788643287182, 1.4104792041898457, 1.4114795440509733, 1.4124798839121009, 1.4134802237732285, 1.414480563634356, 1.4154809034954834, 1.416481243356611, 1.4174815832177385, 1.4184819230788661, 1.4194822629399937, 1.4204826028011213, 1.4214829426622488, 1.4224832825233764, 1.423483622384504, 1.4244839622456313, 1.425484302106759, 1.4264846419678865, 1.427484981829014, 1.4284853216901416, 1.4294856615512692, 1.4304860014123968, 1.4314863412735244, 1.432486681134652, 1.4334870209957793, 1.4344873608569069, 1.4354877007180344, 1.436488040579162, 1.4374883804402896, 1.4384887203014172, 1.4394890601625447, 1.4404894000236723, 1.4414897398848, 1.4424900797459272, 1.4434904196070548, 1.4444907594681824, 1.44549109932931, 1.4464914391904375, 1.4474917790515651, 1.4484921189126927, 1.4494924587738203, 1.4504927986349478, 1.4514931384960752, 1.4524934783572028, 1.4534938182183303, 1.454494158079458, 1.4554944979405855, 1.456494837801713, 1.4574951776628406, 1.4584955175239682, 1.4594958573850958, 1.4604961972462231, 1.4614965371073507, 1.4624968769684783, 1.4634972168296059, 1.4644975566907334, 1.465497896551861, 1.4664982364129886, 1.4674985762741162, 1.4684989161352437, 1.469499255996371, 1.4704995958574987, 1.4714999357186263, 1.4725002755797538, 1.4735006154408814, 1.474500955302009, 1.4755012951631366, 1.4765016350242641, 1.4775019748853917, 1.478502314746519, 1.4795026546076466, 1.4805029944687742, 1.4815033343299018, 1.4825036741910294, 1.483504014052157, 1.4845043539132845, 1.485504693774412, 1.4865050336355397, 1.487505373496667, 1.4885057133577946, 1.4895060532189222, 1.4905063930800497, 1.4915067329411773, 1.4925070728023049, 1.4935074126634325, 1.49450775252456, 1.4955080923856876, 1.496508432246815, 1.4975087721079425, 1.49850911196907, 1.4995094518301977, 1.5005097916913253, 1.5015101315524528, 1.5025104714135804, 1.503510811274708, 1.5045111511358356, 1.505511490996963, 1.5065118308580905, 1.507512170719218, 1.5085125105803456, 1.5095128504414732, 1.5105131903026008, 1.5115135301637284, 1.512513870024856, 1.5135142098859835, 1.5145145497471109, 1.5155148896082384, 1.516515229469366, 1.5175155693304936, 1.5185159091916212, 1.5195162490527487, 1.5205165889138763, 1.5215169287750039, 1.5225172686361315, 1.5235176084972588, 1.5245179483583864, 1.525518288219514, 1.5265186280806415, 1.5275189679417691, 1.5285193078028967, 1.5295196476640243, 1.5305199875251518, 1.5315203273862794, 1.5325206672474068, 1.5335210071085343, 1.534521346969662, 1.5355216868307895, 1.536522026691917, 1.5375223665530446, 1.5385227064141722, 1.5395230462752998, 1.5405233861364274, 1.5415237259975547, 1.5425240658586823, 1.5435244057198099, 1.5445247455809374, 1.545525085442065, 1.5465254253031926, 1.5475257651643202, 1.5485261050254477, 1.5495264448865753, 1.5505267847477027, 1.5515271246088302, 1.5525274644699578, 1.5535278043310854, 1.554528144192213, 1.5555284840533405, 1.5565288239144681, 1.5575291637755957, 1.5585295036367233, 1.5595298434978506, 1.5605301833589782, 1.5615305232201058, 1.5625308630812333, 1.563531202942361, 1.5645315428034885, 1.565531882664616, 1.5665322225257436, 1.5675325623868712, 1.5685329022479986, 1.5695332421091261, 1.5705335819702537, 1.5715339218313813, 1.5725342616925089, 1.5735346015536364, 1.574534941414764, 1.5755352812758916, 1.5765356211370192, 1.5775359609981465, 1.578536300859274, 1.5795366407204017, 1.5805369805815292, 1.5815373204426568, 1.5825376603037844, 1.583538000164912, 1.5845383400260395, 1.5855386798871671, 1.5865390197482945, 1.587539359609422, 1.5885396994705496, 1.5895400393316772, 1.5905403791928048, 1.5915407190539324, 1.59254105891506, 1.5935413987761875, 1.594541738637315, 1.5955420784984424, 1.59654241835957, 1.5975427582206976, 1.5985430980818252, 1.5995434379429527, 1.6005437778040803, 1.6015441176652079, 1.6025444575263355, 1.603544797387463, 1.6045451372485904, 1.605545477109718, 1.6065458169708455, 1.607546156831973, 1.6085464966931007, 1.6095468365542283, 1.6105471764153558, 1.6115475162764834, 1.612547856137611, 1.6135481959987383, 1.614548535859866, 1.6155488757209935, 1.616549215582121, 1.6175495554432486, 1.6185498953043762, 1.6195502351655038, 1.6205505750266314, 1.621550914887759, 1.6225512547488863, 1.6235515946100139, 1.6245519344711414, 1.625552274332269, 1.6265526141933966, 1.6275529540545242, 1.6285532939156517, 1.6295536337767793, 1.6305539736379069, 1.6315543134990342, 1.6325546533601618, 1.6335549932212894, 1.634555333082417, 1.6355556729435445, 1.6365560128046721, 1.6375563526657997, 1.6385566925269273, 1.6395570323880548, 1.6405573722491822, 1.6415577121103098, 1.6425580519714373, 1.643558391832565, 1.6445587316936925, 1.64555907155482, 1.6465594114159476, 1.6475597512770752, 1.6485600911382028, 1.6495604309993301, 1.6505607708604577, 1.6515611107215853, 1.6525614505827129, 1.6535617904438404, 1.654562130304968, 1.6555624701660956, 1.6565628100272232, 1.6575631498883507, 1.658563489749478, 1.6595638296106057, 1.6605641694717332, 1.6615645093328608, 1.6625648491939884, 1.663565189055116, 1.6645655289162435, 1.6655658687773711, 1.6665662086384987, 1.667566548499626, 1.6685668883607536, 1.6695672282218812, 1.6705675680830088, 1.6715679079441363, 1.672568247805264, 1.6735685876663915, 1.674568927527519, 1.6755692673886466, 1.676569607249774, 1.6775699471109016, 1.6785702869720291, 1.6795706268331567, 1.6805709666942843, 1.6815713065554119, 1.6825716464165394, 1.683571986277667, 1.6845723261387946, 1.685572665999922, 1.6865730058610495, 1.687573345722177, 1.6885736855833047, 1.6895740254444322, 1.6905743653055598, 1.6915747051666874, 1.692575045027815, 1.6935753848889425, 1.69457572475007, 1.6955760646111975, 1.696576404472325, 1.6975767443334526, 1.6985770841945802, 1.6995774240557078, 1.7005777639168353, 1.701578103777963, 1.7025784436390905, 1.7035787835002179, 1.7045791233613454, 1.705579463222473, 1.7065798030836006, 1.7075801429447282, 1.7085804828058557, 1.7095808226669833, 1.7105811625281109, 1.7115815023892385, 1.7125818422503658, 1.7135821821114934, 1.714582521972621, 1.7155828618337485, 1.716583201694876, 1.7175835415560037, 1.7185838814171313, 1.7195842212782588, 1.7205845611393864, 1.7215849010005138, 1.7225852408616413, 1.723585580722769, 1.7245859205838965, 1.725586260445024, 1.7265866003061516, 1.7275869401672792, 1.7285872800284068, 1.7295876198895344, 1.7305879597506617, 1.7315882996117893, 1.7325886394729169, 1.7335889793340444, 1.734589319195172, 1.7355896590562996, 1.7365899989174272, 1.7375903387785547, 1.7385906786396823, 1.7395910185008097, 1.7405913583619372, 1.7415916982230648, 1.7425920380841924, 1.74359237794532, 1.7445927178064475, 1.745593057667575, 1.7465933975287027, 1.7475937373898303, 1.7485940772509576, 1.7495944171120852, 1.7505947569732128, 1.7515950968343403, 1.752595436695468, 1.7535957765565955, 1.754596116417723, 1.7555964562788506, 1.7565967961399782, 1.7575971360011056, 1.7585974758622331, 1.7595978157233607, 1.7605981555844883, 1.7615984954456159, 1.7625988353067434, 1.763599175167871, 1.7645995150289986, 1.7655998548901262, 1.7666001947512535, 1.767600534612381, 1.7686008744735087, 1.7696012143346362, 1.7706015541957638, 1.7716018940568914, 1.772602233918019, 1.7736025737791465, 1.7746029136402741, 1.7756032535014015, 1.776603593362529, 1.7776039332236566, 1.7786042730847842, 1.7796046129459118, 1.7806049528070393, 1.781605292668167, 1.7826056325292945, 1.783605972390422, 1.7846063122515494, 1.785606652112677, 1.7866069919738046, 1.7876073318349321, 1.7886076716960597, 1.7896080115571873, 1.7906083514183149, 1.7916086912794424, 1.7926090311405698, 1.7936093710016974, 1.794609710862825, 1.7956100507239525, 1.79661039058508, 1.7976107304462077, 1.7986110703073352, 1.7996114101684628, 1.8006117500295904, 1.8016120898907177, 1.8026124297518453, 1.803612769612973, 1.8046131094741005, 1.805613449335228, 1.8066137891963556, 1.8076141290574832, 1.8086144689186108, 1.8096148087797383, 1.8106151486408657, 1.8116154885019933, 1.8126158283631209, 1.8136161682242484, 1.814616508085376, 1.8156168479465036, 1.8166171878076312, 1.8176175276687587, 1.8186178675298863, 1.8196182073910137, 1.8206185472521412, 1.8216188871132688, 1.8226192269743964, 1.823619566835524, 1.8246199066966515, 1.825620246557779, 1.8266205864189067, 1.8276209262800343, 1.8286212661411616, 1.8296216060022892, 1.8306219458634168, 1.8316222857245443, 1.832622625585672, 1.8336229654467995, 1.834623305307927, 1.8356236451690546, 1.8366239850301822, 1.8376243248913096, 1.8386246647524371, 1.8396250046135647, 1.8406253444746923, 1.8416256843358199, 1.8426260241969474, 1.843626364058075, 1.8446267039192026, 1.8456270437803302, 1.8466273836414575, 1.847627723502585, 1.8486280633637127, 1.8496284032248402, 1.8506287430859678, 1.8516290829470954, 1.852629422808223, 1.8536297626693505, 1.854630102530478, 1.8556304423916055, 1.856630782252733, 1.8576311221138606, 1.8586314619749882, 1.8596318018361158, 1.8606321416972433, 1.861632481558371, 1.8626328214194985, 1.863633161280626, 1.8646335011417534, 1.865633841002881, 1.8666341808640086, 1.8676345207251361, 1.8686348605862637, 1.8696352004473913, 1.8706355403085189, 1.8716358801696464, 1.872636220030774, 1.8736365598919014, 1.874636899753029, 1.8756372396141565, 1.876637579475284, 1.8776379193364117, 1.8786382591975392, 1.8796385990586668, 1.8806389389197944, 1.881639278780922, 1.8826396186420493, 1.883639958503177, 1.8846402983643045, 1.885640638225432, 1.8866409780865596, 1.8876413179476872, 1.8886416578088148, 1.8896419976699423, 1.89064233753107, 1.8916426773921973, 1.8926430172533248, 1.8936433571144524, 1.89464369697558, 1.8956440368367076, 1.8966443766978351, 1.8976447165589627, 1.8986450564200903, 1.8996453962812179, 1.9006457361423452, 1.9016460760034728, 1.9026464158646004, 1.903646755725728, 1.9046470955868555, 1.905647435447983, 1.9066477753091107, 1.9076481151702382, 1.9086484550313658, 1.9096487948924932, 1.9106491347536207, 1.9116494746147483, 1.912649814475876, 1.9136501543370035, 1.914650494198131, 1.9156508340592586, 1.9166511739203862, 1.9176515137815138, 1.9186518536426411, 1.9196521935037687, 1.9206525333648963, 1.9216528732260238, 1.9226532130871514, 1.923653552948279, 1.9246538928094066, 1.9256542326705341, 1.9266545725316617, 1.927654912392789, 1.9286552522539167, 1.9296555921150442, 1.9306559319761718, 1.9316562718372994, 1.932656611698427, 1.9336569515595545, 1.934657291420682, 1.9356576312818097, 1.936657971142937, 1.9376583110040646, 1.9386586508651922, 1.9396589907263198, 1.9406593305874473, 1.941659670448575, 1.9426600103097025, 1.94366035017083, 1.9446606900319576, 1.945661029893085, 1.9466613697542126, 1.9476617096153401, 1.9486620494764677, 1.9496623893375953, 1.9506627291987229, 1.9516630690598504, 1.952663408920978, 1.9536637487821056, 1.954664088643233, 1.9556644285043605, 1.956664768365488, 1.9576651082266157, 1.9586654480877432, 1.9596657879488708, 1.9606661278099984, 1.961666467671126, 1.9626668075322535, 1.9636671473933809, 1.9646674872545085, 1.965667827115636, 1.9666681669767636, 1.9676685068378912, 1.9686688466990188, 1.9696691865601463, 1.970669526421274, 1.9716698662824015, 1.9726702061435288, 1.9736705460046564, 1.974670885865784, 1.9756712257269116, 1.9766715655880391, 1.9776719054491667, 1.9786722453102943, 1.9796725851714219, 1.9806729250325494, 1.9816732648936768, 1.9826736047548044, 1.983673944615932, 1.9846742844770595, 1.985674624338187, 1.9866749641993147, 1.9876753040604422, 1.9886756439215698, 1.9896759837826974, 1.9906763236438247, 1.9916766635049523, 1.99267700336608, 1.9936773432272075, 1.994677683088335, 1.9956780229494626, 1.9966783628105902, 1.9976787026717178, 1.9986790425328453, 1.9996793823939727, 2.0006797222551005, 2.001680062116228, 2.0026804019773556, 2.003680741838483, 2.0046810816996103, 2.005681421560738, 2.0066817614218655, 2.007682101282993, 2.0086824411441206, 2.009682781005248, 2.010683120866376, 2.0116834607275034, 2.012683800588631, 2.0136841404497585, 2.014684480310886, 2.0156848201720137, 2.0166851600331412, 2.017685499894269, 2.0186858397553964, 2.019686179616524, 2.0206865194776515, 2.021686859338779, 2.0226871991999062, 2.023687539061034, 2.0246878789221614, 2.025688218783289, 2.0266885586444165, 2.027688898505544, 2.0286892383666717, 2.0296895782277993, 2.030689918088927, 2.0316902579500544, 2.032690597811182, 2.0336909376723096, 2.034691277533437, 2.0356916173945647, 2.0366919572556923, 2.03769229711682, 2.0386926369779474, 2.039692976839075, 2.040693316700202, 2.0416936565613297, 2.0426939964224573, 2.043694336283585, 2.0446946761447125, 2.04569501600584, 2.0466953558669676, 2.047695695728095, 2.0486960355892228, 2.0496963754503503, 2.050696715311478, 2.0516970551726055, 2.052697395033733, 2.0536977348948606, 2.054698074755988, 2.0556984146171158, 2.0566987544782434, 2.0576990943393705, 2.058699434200498, 2.0596997740616256, 2.060700113922753, 2.061700453783881, 2.0627007936450084, 2.063701133506136, 2.0647014733672635, 2.065701813228391, 2.0667021530895187, 2.0677024929506462, 2.068702832811774, 2.0697031726729014, 2.070703512534029, 2.0717038523951565, 2.072704192256284, 2.0737045321174117, 2.0747048719785393, 2.0757052118396664, 2.076705551700794, 2.0777058915619215, 2.078706231423049, 2.0797065712841767, 2.0807069111453043, 2.081707251006432, 2.0827075908675594, 2.083707930728687, 2.0847082705898146, 2.085708610450942, 2.0867089503120697, 2.0877092901731973, 2.088709630034325, 2.0897099698954524, 2.09071030975658, 2.0917106496177076, 2.092710989478835, 2.0937113293399623, 2.09471166920109, 2.0957120090622174, 2.096712348923345, 2.0977126887844726, 2.0987130286456, 2.0997133685067277, 2.1007137083678553, 2.101714048228983, 2.1027143880901105, 2.103714727951238, 2.1047150678123656, 2.105715407673493, 2.1067157475346208, 2.1077160873957483, 2.108716427256876, 2.1097167671180035, 2.110717106979131, 2.111717446840258, 2.1127177867013858, 2.1137181265625133, 2.114718466423641, 2.1157188062847685, 2.116719146145896, 2.1177194860070236, 2.118719825868151, 2.119720165729279, 2.1207205055904064, 2.121720845451534, 2.1227211853126615, 2.123721525173789, 2.1247218650349167, 2.1257222048960442, 2.126722544757172, 2.1277228846182994, 2.128723224479427, 2.129723564340554, 2.1307239042016817, 2.1317242440628092, 2.132724583923937, 2.1337249237850644, 2.134725263646192, 2.1357256035073195, 2.136725943368447, 2.1377262832295747, 2.1387266230907023, 2.13972696295183, 2.1407273028129574, 2.141727642674085, 2.1427279825352126, 2.14372832239634, 2.1447286622574677, 2.1457290021185953, 2.146729341979723, 2.14772968184085, 2.1487300217019776, 2.149730361563105, 2.1507307014242327, 2.1517310412853603, 2.152731381146488, 2.1537317210076155, 2.154732060868743, 2.1557324007298706, 2.156732740590998, 2.1577330804521258, 2.1587334203132533, 2.159733760174381, 2.1607341000355085, 2.161734439896636, 2.1627347797577636, 2.163735119618891, 2.1647354594800188, 2.165735799341146, 2.1667361392022735, 2.167736479063401, 2.1687368189245286, 2.169737158785656, 2.170737498646784, 2.1717378385079114, 2.172738178369039, 2.1737385182301665, 2.174738858091294, 2.1757391979524217, 2.1767395378135492, 2.177739877674677, 2.1787402175358044, 2.179740557396932, 2.1807408972580595, 2.181741237119187, 2.1827415769803147, 2.183741916841442, 2.1847422567025694, 2.185742596563697, 2.1867429364248245, 2.187743276285952, 2.1887436161470797, 2.1897439560082073, 2.190744295869335, 2.1917446357304624, 2.19274497559159, 2.1937453154527176, 2.194745655313845, 2.1957459951749727, 2.1967463350361003, 2.197746674897228, 2.1987470147583554, 2.199747354619483, 2.2007476944806106, 2.2017480343417377, 2.2027483742028653, 2.203748714063993, 2.2047490539251204, 2.205749393786248, 2.2067497336473756, 2.207750073508503, 2.2087504133696307, 2.2097507532307583, 2.210751093091886, 2.2117514329530135, 2.212751772814141, 2.2137521126752686, 2.214752452536396, 2.2157527923975238, 2.2167531322586513, 2.217753472119779, 2.2187538119809065, 2.2197541518420336, 2.220754491703161, 2.2217548315642888, 2.2227551714254163, 2.223755511286544, 2.2247558511476715, 2.225756191008799, 2.2267565308699266, 2.227756870731054, 2.228757210592182, 2.2297575504533094, 2.230757890314437, 2.2317582301755645, 2.232758570036692, 2.2337589098978197, 2.2347592497589472, 2.235759589620075, 2.2367599294812024, 2.2377602693423295, 2.238760609203457, 2.2397609490645847, 2.2407612889257122, 2.24176162878684, 2.2427619686479674, 2.243762308509095, 2.2447626483702225, 2.24576298823135, 2.2467633280924777, 2.2477636679536053, 2.248764007814733, 2.2497643476758604, 2.250764687536988, 2.2517650273981156, 2.252765367259243, 2.2537657071203707, 2.2547660469814983, 2.2557663868426254, 2.256766726703753, 2.2577670665648806, 2.258767406426008, 2.2597677462871357, 2.2607680861482633, 2.261768426009391, 2.2627687658705185, 2.263769105731646, 2.2647694455927736, 2.265769785453901, 2.2667701253150287, 2.2677704651761563, 2.268770805037284, 2.2697711448984115, 2.270771484759539, 2.2717718246206666, 2.272772164481794, 2.2737725043429213, 2.274772844204049, 2.2757731840651765, 2.276773523926304, 2.2777738637874316, 2.278774203648559, 2.279774543509687, 2.2807748833708144, 2.281775223231942, 2.2827755630930695, 2.283775902954197, 2.2847762428153247, 2.2857765826764522, 2.28677692253758, 2.2877772623987074, 2.288777602259835, 2.2897779421209625, 2.29077828198209, 2.2917786218432172, 2.292778961704345, 2.2937793015654724, 2.2947796414266, 2.2957799812877275, 2.296780321148855, 2.2977806610099827, 2.2987810008711103, 2.299781340732238, 2.3007816805933654, 2.301782020454493, 2.3027823603156206, 2.303782700176748, 2.3047830400378757, 2.3057833798990033, 2.306783719760131, 2.3077840596212584, 2.308784399482386, 2.309784739343513, 2.3107850792046407, 2.3117854190657683, 2.312785758926896, 2.3137860987880234, 2.314786438649151, 2.3157867785102786, 2.316787118371406, 2.3177874582325337, 2.3187877980936613, 2.319788137954789, 2.3207884778159165, 2.321788817677044, 2.3227891575381716, 2.323789497399299, 2.3247898372604268, 2.3257901771215543, 2.326790516982682, 2.327790856843809, 2.3287911967049366, 2.329791536566064, 2.3307918764271918, 2.3317922162883193, 2.332792556149447, 2.3337928960105745, 2.334793235871702, 2.3357935757328296, 2.336793915593957, 2.337794255455085, 2.3387945953162124, 2.33979493517734, 2.3407952750384675, 2.341795614899595, 2.3427959547607227, 2.3437962946218502, 2.344796634482978, 2.345796974344105, 2.3467973142052325, 2.34779765406636, 2.3487979939274877, 2.3497983337886152, 2.350798673649743, 2.3517990135108704, 2.352799353371998, 2.3537996932331255, 2.354800033094253, 2.3558003729553807, 2.3568007128165083, 2.357801052677636, 2.3588013925387634, 2.359801732399891, 2.3608020722610186, 2.361802412122146, 2.3628027519832737, 2.363803091844401, 2.3648034317055284, 2.365803771566656, 2.3668041114277836, 2.367804451288911, 2.3688047911500387, 2.3698051310111663, 2.370805470872294, 2.3718058107334214, 2.372806150594549, 2.3738064904556766, 2.374806830316804, 2.3758071701779317, 2.3768075100390593, 2.377807849900187, 2.3788081897613145, 2.379808529622442, 2.3808088694835696, 2.3818092093446968, 2.3828095492058243, 2.383809889066952, 2.3848102289280795, 2.385810568789207, 2.3868109086503346, 2.387811248511462, 2.38881158837259, 2.3898119282337174, 2.390812268094845, 2.3918126079559725, 2.3928129478171, 2.3938132876782277, 2.3948136275393552, 2.395813967400483, 2.3968143072616104, 2.397814647122738, 2.3988149869838655, 2.3998153268449927, 2.4008156667061202, 2.401816006567248, 2.4028163464283754, 2.403816686289503, 2.4048170261506305, 2.405817366011758, 2.4068177058728857, 2.4078180457340133, 2.408818385595141, 2.4098187254562684, 2.410819065317396, 2.4118194051785236, 2.412819745039651, 2.4138200849007787, 2.4148204247619063, 2.415820764623034, 2.4168211044841614, 2.4178214443452886, 2.418821784206416, 2.4198221240675437, 2.4208224639286713, 2.421822803789799, 2.4228231436509264, 2.423823483512054, 2.4248238233731816, 2.425824163234309, 2.4268245030954367, 2.4278248429565643, 2.428825182817692, 2.4298255226788195, 2.430825862539947, 2.4318262024010746, 2.432826542262202, 2.4338268821233298, 2.4348272219844573, 2.4358275618455845, 2.436827901706712, 2.4378282415678396, 2.438828581428967, 2.4398289212900948, 2.4408292611512223, 2.44182960101235, 2.4428299408734775, 2.443830280734605, 2.4448306205957326, 2.44583096045686, 2.446831300317988, 2.4478316401791154, 2.448831980040243, 2.4498323199013705, 2.450832659762498, 2.4518329996236257, 2.4528333394847532, 2.4538336793458804, 2.454834019207008, 2.4558343590681355, 2.456834698929263, 2.4578350387903907, 2.4588353786515182, 2.459835718512646, 2.4608360583737734, 2.461836398234901, 2.4628367380960285, 2.463837077957156, 2.4648374178182837, 2.4658377576794113, 2.466838097540539, 2.4678384374016664, 2.468838777262794, 2.4698391171239216, 2.470839456985049, 2.4718397968461763, 2.472840136707304, 2.4738404765684314, 2.474840816429559, 2.4758411562906866, 2.476841496151814, 2.4778418360129417, 2.4788421758740693, 2.479842515735197, 2.4808428555963244, 2.481843195457452, 2.4828435353185796, 2.483843875179707, 2.4848442150408347, 2.4858445549019623, 2.48684489476309, 2.4878452346242175, 2.488845574485345, 2.489845914346472, 2.4908462542075998, 2.4918465940687273, 2.492846933929855, 2.4938472737909825, 2.49484761365211, 2.4958479535132376, 2.496848293374365, 2.4978486332354928, 2.4988489730966204, 2.499849312957748, 2.5008496528188755, 2.501849992680003, 2.5028503325411307, 2.5038506724022582, 2.504851012263386, 2.5058513521245134, 2.506851691985641, 2.507852031846768, 2.5088523717078957, 2.5098527115690232, 2.510853051430151, 2.5118533912912784, 2.512853731152406, 2.5138540710135335, 2.514854410874661, 2.5158547507357887, 2.5168550905969163, 2.517855430458044, 2.5188557703191714, 2.519856110180299, 2.5208564500414266, 2.521856789902554, 2.5228571297636817, 2.5238574696248093, 2.524857809485937, 2.525858149347064, 2.5268584892081916, 2.527858829069319, 2.5288591689304467, 2.5298595087915743, 2.530859848652702, 2.5318601885138294, 2.532860528374957, 2.5338608682360846, 2.534861208097212, 2.5358615479583397, 2.5368618878194673, 2.537862227680595, 2.5388625675417225, 2.53986290740285, 2.5408632472639776, 2.541863587125105, 2.5428639269862328, 2.54386426684736, 2.5448646067084875, 2.545864946569615, 2.5468652864307426, 2.54786562629187, 2.5488659661529978, 2.5498663060141253, 2.550866645875253, 2.5518669857363805, 2.552867325597508, 2.5538676654586356, 2.554868005319763, 2.555868345180891, 2.5568686850420184, 2.557869024903146, 2.5588693647642735, 2.559869704625401, 2.5608700444865287, 2.561870384347656, 2.5628707242087834, 2.563871064069911, 2.5648714039310385, 2.565871743792166, 2.5668720836532937, 2.5678724235144212, 2.568872763375549, 2.5698731032366764, 2.570873443097804, 2.5718737829589315, 2.572874122820059, 2.5738744626811867, 2.5748748025423143, 2.575875142403442, 2.5768754822645694, 2.577875822125697, 2.578876161986824, 2.5798765018479517, 2.5808768417090793, 2.581877181570207, 2.5828775214313344, 2.583877861292462, 2.5848782011535896, 2.585878541014717, 2.5868788808758447, 2.5878792207369723, 2.5888795605981, 2.5898799004592274, 2.590880240320355, 2.5918805801814826, 2.59288092004261, 2.5938812599037377, 2.5948815997648653, 2.595881939625993, 2.59688227948712, 2.5978826193482476, 2.598882959209375, 2.5998832990705028, 2.6008836389316303, 2.601883978792758, 2.6028843186538855, 2.603884658515013, 2.6048849983761406, 2.605885338237268, 2.6068856780983958, 2.6078860179595234, 2.608886357820651, 2.6098866976817785, 2.610887037542906, 2.6118873774040336, 2.6128877172651612, 2.613888057126289, 2.614888396987416, 2.6158887368485435, 2.616889076709671, 2.6178894165707987, 2.6188897564319262, 2.619890096293054, 2.6208904361541814, 2.621890776015309, 2.6228911158764365, 2.623891455737564, 2.6248917955986917, 2.6258921354598193, 2.626892475320947, 2.6278928151820744, 2.628893155043202, 2.6298934949043296, 2.630893834765457, 2.6318941746265847, 2.632894514487712, 2.6338948543488394, 2.634895194209967, 2.6358955340710946, 2.636895873932222, 2.6378962137933497, 2.6388965536544773, 2.639896893515605, 2.6408972333767324, 2.64189757323786, 2.6428979130989876, 2.643898252960115, 2.6448985928212427, 2.6458989326823703, 2.646899272543498, 2.6478996124046255, 2.648899952265753, 2.6499002921268806, 2.6509006319880077, 2.6519009718491353, 2.652901311710263, 2.6539016515713905, 2.654901991432518, 2.6559023312936456, 2.656902671154773, 2.6579030110159008, 2.6589033508770283, 2.659903690738156, 2.6609040305992835, 2.661904370460411, 2.6629047103215386, 2.663905050182666, 2.664905390043794, 2.6659057299049214, 2.666906069766049, 2.6679064096271765, 2.6689067494883036, 2.669907089349431, 2.670907429210559, 2.6719077690716864, 2.672908108932814, 2.6739084487939415, 2.674908788655069, 2.6759091285161967, 2.6769094683773242, 2.677909808238452, 2.6789101480995794, 2.679910487960707, 2.6809108278218345, 2.681911167682962, 2.6829115075440897, 2.6839118474052173, 2.684912187266345, 2.6859125271274724, 2.6869128669885995, 2.687913206849727, 2.6889135467108547, 2.6899138865719823, 2.69091422643311, 2.6919145662942374, 2.692914906155365, 2.6939152460164926, 2.69491558587762, 2.6959159257387477, 2.6969162655998753, 2.697916605461003, 2.6989169453221304, 2.699917285183258, 2.7009176250443856, 2.701917964905513, 2.7029183047666407, 2.7039186446277683, 2.7049189844888955, 2.705919324350023, 2.7069196642111506, 2.707920004072278, 2.7089203439334057, 2.7099206837945333, 2.710921023655661, 2.7119213635167885, 2.712921703377916, 2.7139220432390436, 2.714922383100171, 2.7159227229612988, 2.7169230628224263, 2.717923402683554, 2.7189237425446815, 2.719924082405809, 2.7209244222669366, 2.7219247621280642, 2.7229251019891914, 2.723925441850319, 2.7249257817114465, 2.725926121572574, 2.7269264614337017, 2.7279268012948292, 2.728927141155957, 2.7299274810170844, 2.730927820878212, 2.7319281607393395, 2.732928500600467, 2.7339288404615947, 2.7349291803227223, 2.73592952018385, 2.7369298600449774, 2.737930199906105, 2.7389305397672326, 2.73993087962836, 2.7409312194894873, 2.741931559350615, 2.7429318992117424, 2.74393223907287, 2.7449325789339976, 2.745932918795125, 2.7469332586562527, 2.7479335985173803, 2.748933938378508, 2.7499342782396354, 2.750934618100763, 2.7519349579618906, 2.752935297823018, 2.7539356376841457, 2.7549359775452733, 2.755936317406401, 2.7569366572675285, 2.757936997128656, 2.758937336989783, 2.7599376768509107, 2.7609380167120383, 2.761938356573166, 2.7629386964342935, 2.763939036295421, 2.7649393761565486, 2.765939716017676, 2.7669400558788038, 2.7679403957399313, 2.768940735601059, 2.7699410754621865, 2.770941415323314, 2.7719417551844416, 2.772942095045569, 2.773942434906697, 2.7749427747678244, 2.775943114628952, 2.776943454490079, 2.7779437943512066, 2.778944134212334, 2.779944474073462, 2.7809448139345894, 2.781945153795717, 2.7829454936568445, 2.783945833517972, 2.7849461733790997, 2.7859465132402272, 2.786946853101355, 2.7879471929624824, 2.78894753282361, 2.7899478726847375, 2.790948212545865, 2.7919485524069927, 2.7929488922681203, 2.793949232129248, 2.794949571990375, 2.7959499118515025, 2.79695025171263, 2.7979505915737577, 2.7989509314348853, 2.799951271296013, 2.8009516111571404, 2.801951951018268, 2.8029522908793956, 2.803952630740523, 2.8049529706016507, 2.8059533104627783, 2.806953650323906, 2.8079539901850334, 2.808954330046161, 2.8099546699072886, 2.810955009768416, 2.8119553496295437, 2.812955689490671, 2.8139560293517984, 2.814956369212926, 2.8159567090740536, 2.816957048935181, 2.8179573887963087, 2.8189577286574363, 2.819958068518564, 2.8209584083796915, 2.821958748240819, 2.8229590881019466, 2.823959427963074, 2.8249597678242018, 2.8259601076853293, 2.826960447546457, 2.8279607874075845, 2.828961127268712, 2.8299614671298396, 2.830961806990967, 2.8319621468520944, 2.832962486713222, 2.8339628265743495, 2.834963166435477, 2.8359635062966047, 2.8369638461577322, 2.83796418601886, 2.8389645258799874, 2.839964865741115, 2.8409652056022425, 2.84196554546337, 2.8429658853244977, 2.8439662251856253, 2.844966565046753, 2.8459669049078804, 2.846967244769008, 2.8479675846301356, 2.8489679244912627, 2.8499682643523903, 2.850968604213518, 2.8519689440746454, 2.852969283935773, 2.8539696237969006, 2.854969963658028, 2.8559703035191557, 2.8569706433802833, 2.857970983241411, 2.8589713231025384, 2.859971662963666, 2.8609720028247936, 2.861972342685921, 2.8629726825470487, 2.8639730224081763, 2.864973362269304, 2.8659737021304315, 2.8669740419915586, 2.867974381852686, 2.8689747217138137, 2.8699750615749413, 2.870975401436069, 2.8719757412971965, 2.872976081158324, 2.8739764210194516, 2.874976760880579, 2.8759771007417068, 2.8769774406028343, 2.877977780463962, 2.8789781203250895, 2.879978460186217, 2.8809788000473446, 2.881979139908472, 2.8829794797696, 2.8839798196307274, 2.8849801594918545, 2.885980499352982, 2.8869808392141096, 2.887981179075237, 2.888981518936365, 2.8899818587974924, 2.89098219865862, 2.8919825385197475, 2.892982878380875, 2.8939832182420027, 2.8949835581031302, 2.895983897964258, 2.8969842378253854, 2.897984577686513, 2.8989849175476405, 2.899985257408768, 2.9009855972698957, 2.9019859371310233, 2.9029862769921504, 2.903986616853278, 2.9049869567144055, 2.905987296575533, 2.9069876364366607, 2.9079879762977883, 2.908988316158916, 2.9099886560200434, 2.910988995881171, 2.9119893357422986, 2.912989675603426, 2.9139900154645537, 2.9149903553256813, 2.915990695186809, 2.9169910350479364, 2.917991374909064, 2.9189917147701916, 2.919992054631319, 2.9209923944924463, 2.921992734353574, 2.9229930742147014, 2.923993414075829, 2.9249937539369566, 2.925994093798084, 2.9269944336592117, 2.9279947735203393, 2.928995113381467, 2.9299954532425945, 2.930995793103722, 2.9319961329648496, 2.932996472825977, 2.9339968126871048, 2.9349971525482323, 2.93599749240936, 2.9369978322704875, 2.937998172131615, 2.938998511992742, 2.9399988518538698, 2.9409991917149974, 2.941999531576125, 2.9429998714372525, 2.94400021129838, 2.9450005511595077, 2.9460008910206352, 2.947001230881763, 2.9480015707428904, 2.949001910604018, 2.9500022504651455, 2.951002590326273, 2.9520029301874007, 2.9530032700485283, 2.954003609909656, 2.9550039497707834, 2.956004289631911, 2.957004629493038, 2.9580049693541657, 2.9590053092152933, 2.960005649076421, 2.9610059889375484, 2.962006328798676, 2.9630066686598036, 2.964007008520931, 2.9650073483820587, 2.9660076882431863, 2.967008028104314, 2.9680083679654414, 2.969008707826569, 2.9700090476876966, 2.971009387548824, 2.9720097274099517, 2.9730100672710793, 2.974010407132207, 2.975010746993334, 2.9760110868544616, 2.977011426715589, 2.9780117665767167, 2.9790121064378443, 2.980012446298972, 2.9810127861600995, 2.982013126021227, 2.9830134658823546, 2.984013805743482, 2.9850141456046098, 2.9860144854657373, 2.987014825326865, 2.9880151651879925, 2.98901550504912, 2.9900158449102476, 2.991016184771375, 2.992016524632503, 2.99301686449363, 2.9940172043547575, 2.995017544215885, 2.9960178840770126, 2.99701822393814, 2.998018563799268, 2.9990189036603954, 3.000019243521523, 3.0010195833826505, 3.002019923243778, 3.0030202631049057, 3.0040206029660332, 3.005020942827161, 3.0060212826882884, 3.007021622549416, 3.0080219624105435, 3.009022302271671, 3.0100226421327987, 3.011022981993926, 3.0120233218550534, 3.013023661716181, 3.0140240015773085, 3.015024341438436, 3.0160246812995637, 3.0170250211606913, 3.018025361021819, 3.0190257008829464, 3.020026040744074, 3.0210263806052016, 3.022026720466329, 3.0230270603274567, 3.0240274001885843, 3.025027740049712, 3.0260280799108394, 3.027028419771967, 3.0280287596330946, 3.0290290994942217, 3.0300294393553493, 3.031029779216477, 3.0320301190776044, 3.033030458938732, 3.0340307987998596, 3.035031138660987, 3.0360314785221147, 3.0370318183832423, 3.03803215824437, 3.0390324981054975, 3.040032837966625, 3.0410331778277526, 3.04203351768888, 3.0430338575500078, 3.0440341974111353, 3.045034537272263, 3.0460348771333905, 3.0470352169945176, 3.048035556855645, 3.0490358967167728, 3.0500362365779004, 3.051036576439028, 3.0520369163001555, 3.053037256161283, 3.0540375960224106, 3.0550379358835382, 3.056038275744666, 3.0570386156057934, 3.058038955466921, 3.0590392953280485, 3.060039635189176, 3.0610399750503037, 3.0620403149114312, 3.063040654772559, 3.0640409946336864, 3.0650413344948135, 3.066041674355941, 3.0670420142170687, 3.0680423540781963, 3.069042693939324, 3.0700430338004514, 3.071043373661579, 3.0720437135227066, 3.073044053383834, 3.0740443932449617, 3.0750447331060893, 3.076045072967217, 3.0770454128283444, 3.078045752689472, 3.0790460925505996, 3.080046432411727, 3.0810467722728547, 3.082047112133982, 3.0830474519951094, 3.084047791856237, 3.0850481317173646, 3.086048471578492, 3.0870488114396197, 3.0880491513007473, 3.089049491161875, 3.0900498310230025, 3.09105017088413, 3.0920505107452576, 3.093050850606385, 3.0940511904675128, 3.0950515303286403, 3.096051870189768, 3.0970522100508955, 3.098052549912023, 3.0990528897731506, 3.1000532296342778, 3.1010535694954053, 3.102053909356533, 3.1030542492176605, 3.104054589078788, 3.1050549289399156, 3.106055268801043, 3.107055608662171, 3.1080559485232984, 3.109056288384426, 3.1100566282455535, 3.111056968106681, 3.1120573079678087, 3.1130576478289362, 3.114057987690064, 3.1150583275511914, 3.116058667412319, 3.1170590072734465, 3.1180593471345737, 3.1190596869957012, 3.120060026856829, 3.1210603667179564, 3.122060706579084, 3.1230610464402115, 3.124061386301339, 3.1250617261624667, 3.1260620660235943, 3.127062405884722, 3.1280627457458494, 3.129063085606977, 3.1300634254681046, 3.131063765329232, 3.1320641051903597, 3.1330644450514873, 3.134064784912615, 3.1350651247737424, 3.1360654646348696, 3.137065804495997, 3.1380661443571247, 3.1390664842182523, 3.14006682407938, 3.1410671639405074, 3.142067503801635, 3.1430678436627626, 3.14406818352389, 3.1450685233850177, 3.1460688632461453, 3.147069203107273, 3.1480695429684005, 3.149069882829528, 3.1500702226906556, 3.151070562551783, 3.1520709024129108, 3.1530712422740383, 3.1540715821351655, 3.155071921996293, 3.1560722618574206, 3.157072601718548, 3.1580729415796758, 3.1590732814408033, 3.160073621301931, 3.1610739611630585, 3.162074301024186, 3.1630746408853136, 3.1640749807464412, 3.165075320607569, 3.1660756604686964, 3.167076000329824, 3.1680763401909515, 3.169076680052079, 3.1700770199132067, 3.1710773597743342, 3.1720776996354614, 3.173078039496589, 3.1740783793577165, 3.175078719218844, 3.1760790590799717, 3.1770793989410993, 3.178079738802227, 3.1790800786633544, 3.180080418524482, 3.1810807583856096, 3.182081098246737, 3.1830814381078647, 3.1840817779689923, 3.18508211783012, 3.1860824576912474, 3.187082797552375, 3.1880831374135026, 3.18908347727463, 3.1900838171357573, 3.191084156996885, 3.1920844968580124, 3.19308483671914, 3.1940851765802676, 3.195085516441395, 3.1960858563025227, 3.1970861961636503, 3.198086536024778, 3.1990868758859055, 3.200087215747033, 3.2010875556081606, 3.202087895469288, 3.2030882353304158, 3.2040885751915433, 3.205088915052671, 3.2060892549137985, 3.207089594774926, 3.208089934636053, 3.2090902744971808, 3.2100906143583083, 3.211090954219436, 3.2120912940805635, 3.213091633941691, 3.2140919738028186, 3.215092313663946, 3.216092653525074, 3.2170929933862014, 3.218093333247329, 3.2190936731084565, 3.220094012969584, 3.2210943528307117, 3.2220946926918392, 3.223095032552967, 3.2240953724140944, 3.225095712275222, 3.226096052136349, 3.2270963919974767, 3.2280967318586042, 3.229097071719732, 3.2300974115808594, 3.231097751441987, 3.2320980913031145, 3.233098431164242, 3.2340987710253697, 3.2350991108864973, 3.236099450747625, 3.2370997906087524, 3.23810013046988, 3.2391004703310076, 3.240100810192135, 3.2411011500532627, 3.2421014899143903, 3.243101829775518, 3.244102169636645, 3.2451025094977726, 3.2461028493589, 3.2471031892200277, 3.2481035290811553, 3.249103868942283, 3.2501042088034104, 3.251104548664538, 3.2521048885256656, 3.253105228386793, 3.2541055682479207, 3.2551059081090483, 3.256106247970176, 3.2571065878313035, 3.258106927692431, 3.2591072675535586, 3.260107607414686, 3.2611079472758138, 3.262108287136941, 3.2631086269980685, 3.264108966859196, 3.2651093067203236, 3.266109646581451, 3.2671099864425788, 3.2681103263037063, 3.269110666164834, 3.2701110060259615, 3.271111345887089, 3.2721116857482166, 3.2731120256093442, 3.274112365470472, 3.2751127053315994, 3.276113045192727, 3.2771133850538545, 3.278113724914982, 3.2791140647761097, 3.280114404637237, 3.2811147444983644, 3.282115084359492, 3.2831154242206195, 3.284115764081747, 3.2851161039428747, 3.2861164438040023, 3.28711678366513, 3.2881171235262574, 3.289117463387385, 3.2901178032485126, 3.29111814310964, 3.2921184829707677, 3.2931188228318953, 3.294119162693023, 3.2951195025541504, 3.296119842415278, 3.2971201822764056, 3.2981205221375327, 3.2991208619986603, 3.300121201859788, 3.3011215417209154, 3.302121881582043, 3.3031222214431706, 3.304122561304298, 3.3051229011654257, 3.3061232410265533, 3.307123580887681, 3.3081239207488085, 3.309124260609936, 3.3101246004710636, 3.311124940332191, 3.3121252801933188, 3.3131256200544463, 3.314125959915574, 3.3151262997767015, 3.3161266396378286, 3.317126979498956, 3.3181273193600838, 3.3191276592212113, 3.320127999082339, 3.3211283389434665, 3.322128678804594, 3.3231290186657216, 3.324129358526849, 3.325129698387977, 3.3261300382491044, 3.327130378110232, 3.3281307179713595, 3.329131057832487, 3.3301313976936147, 3.3311317375547422, 3.33213207741587, 3.3331324172769974, 3.3341327571381245, 3.335133096999252, 3.3361334368603797, 3.3371337767215072, 3.338134116582635, 3.3391344564437624, 3.34013479630489, 3.3411351361660175, 3.342135476027145, 3.3431358158882727, 3.3441361557494003, 3.345136495610528, 3.3461368354716554, 3.347137175332783, 3.3481375151939106, 3.349137855055038, 3.3501381949161657, 3.3511385347772933, 3.3521388746384204, 3.353139214499548, 3.3541395543606756, 3.355139894221803, 3.3561402340829307, 3.3571405739440583, 3.358140913805186, 3.3591412536663134, 3.360141593527441, 3.3611419333885686, 3.362142273249696, 3.3631426131108237, 3.3641429529719513, 3.365143292833079, 3.3661436326942065, 3.367143972555334, 3.3681443124164616, 3.369144652277589, 3.3701449921387163, 3.371145331999844, 3.3721456718609715, 3.373146011722099, 3.3741463515832266, 3.375146691444354, 3.3761470313054818, 3.3771473711666093, 3.378147711027737, 3.3791480508888645, 3.380148390749992, 3.3811487306111196, 3.382149070472247, 3.383149410333375, 3.3841497501945024, 3.38515009005563, 3.3861504299167575, 3.387150769777885, 3.3881511096390122, 3.38915144950014, 3.3901517893612674, 3.391152129222395, 3.3921524690835225, 3.39315280894465, 3.3941531488057777, 3.3951534886669053, 3.396153828528033, 3.3971541683891604, 3.398154508250288, 3.3991548481114155, 3.400155187972543, 3.4011555278336707, 3.4021558676947983, 3.403156207555926, 3.4041565474170534, 3.405156887278181, 3.406157227139308, 3.4071575670004357, 3.4081579068615633, 3.409158246722691, 3.4101585865838184, 3.411158926444946, 3.4121592663060736, 3.413159606167201, 3.4141599460283287, 3.4151602858894563, 3.416160625750584, 3.4171609656117115, 3.418161305472839, 3.4191616453339666, 3.420161985195094, 3.4211623250562218, 3.4221626649173493, 3.423163004778477, 3.424163344639604, 3.4251636845007316, 3.426164024361859, 3.4271643642229868, 3.4281647040841143, 3.429165043945242, 3.4301653838063695, 3.431165723667497, 3.4321660635286246, 3.433166403389752, 3.43416674325088, 3.4351670831120074, 3.436167422973135, 3.4371677628342625, 3.43816810269539, 3.4391684425565177, 3.4401687824176452, 3.441169122278773, 3.4421694621399, 3.4431698020010275, 3.444170141862155, 3.4451704817232827, 3.4461708215844102, 3.447171161445538, 3.4481715013066654, 3.449171841167793, 3.4501721810289205, 3.451172520890048, 3.4521728607511757, 3.4531732006123033, 3.454173540473431, 3.4551738803345584, 3.456174220195686, 3.4571745600568136, 3.458174899917941, 3.4591752397790687, 3.460175579640196, 3.4611759195013234, 3.462176259362451, 3.4631765992235786, 3.464176939084706, 3.4651772789458337, 3.4661776188069613, 3.467177958668089, 3.4681782985292164, 3.469178638390344, 3.4701789782514716, 3.471179318112599, 3.4721796579737267, 3.4731799978348543, 3.474180337695982, 3.4751806775571095, 3.476181017418237, 3.4771813572793646, 3.4781816971404917, 3.4791820370016193, 3.480182376862747, 3.4811827167238745, 3.482183056585002, 3.4831833964461296, 3.484183736307257, 3.4851840761683848, 3.4861844160295123, 3.48718475589064, 3.4881850957517675, 3.489185435612895, 3.4901857754740226, 3.49118611533515, 3.492186455196278, 3.4931867950574054, 3.494187134918533, 3.4951874747796605, 3.4961878146407876, 3.4971881545019152, 3.498188494363043, 3.4991888342241704, 3.500189174085298, 3.5011895139464255, 3.502189853807553, 3.5031901936686807, 3.5041905335298082, 3.505190873390936, 3.5061912132520634, 3.507191553113191, 3.5081918929743185, 3.509192232835446, 3.5101925726965737, 3.5111929125577013, 3.512193252418829, 3.5131935922799564, 3.5141939321410836, 3.515194272002211, 3.5161946118633387, 3.5171949517244663, 3.518195291585594, 3.5191956314467214, 3.520195971307849, 3.5211963111689766, 3.522196651030104, 3.5231969908912317, 3.5241973307523593, 3.525197670613487, 3.5261980104746145, 3.527198350335742, 3.5281986901968696, 3.529199030057997, 3.5301993699191248, 3.5311997097802523, 3.5322000496413795, 3.533200389502507, 3.5342007293636346, 3.535201069224762, 3.5362014090858898, 3.5372017489470173, 3.538202088808145, 3.5392024286692725, 3.5402027685304, 3.5412031083915276, 3.542203448252655, 3.543203788113783, 3.5442041279749104, 3.545204467836038, 3.5462048076971655, 3.547205147558293, 3.5482054874194207, 3.5492058272805482, 3.5502061671416754, 3.551206507002803, 3.5522068468639305, 3.553207186725058, 3.5542075265861857, 3.5552078664473132, 3.556208206308441, 3.5572085461695684, 3.558208886030696, 3.5592092258918235, 3.560209565752951, 3.5612099056140787, 3.5622102454752063, 3.563210585336334, 3.5642109251974614, 3.565211265058589, 3.5662116049197166, 3.567211944780844, 3.5682122846419713, 3.569212624503099, 3.5702129643642264, 3.571213304225354, 3.5722136440864816, 3.573213983947609, 3.5742143238087367, 3.5752146636698643, 3.576215003530992, 3.5772153433921194, 3.578215683253247, 3.5792160231143746, 3.580216362975502, 3.5812167028366297, 3.5822170426977573, 3.583217382558885, 3.5842177224200125, 3.5852180622811396, 3.586218402142267, 3.5872187420033947, 3.5882190818645223, 3.58921942172565, 3.5902197615867775, 3.591220101447905, 3.5922204413090326, 3.59322078117016, 3.5942211210312878, 3.5952214608924153, 3.596221800753543, 3.5972221406146705, 3.598222480475798, 3.5992228203369256, 3.600223160198053, 3.601223500059181, 3.6022238399203084, 3.6032241797814355, 3.604224519642563, 3.6052248595036906, 3.6062251993648182, 3.607225539225946, 3.6082258790870734, 3.609226218948201, 3.6102265588093285, 3.611226898670456, 3.6122272385315837, 3.6132275783927112, 3.614227918253839, 3.6152282581149664, 3.616228597976094, 3.6172289378372215, 3.618229277698349, 3.6192296175594767, 3.6202299574206043, 3.6212302972817314, 3.622230637142859, 3.6232309770039866, 3.624231316865114, 3.6252316567262417, 3.6262319965873693, 3.627232336448497, 3.6282326763096244, 3.629233016170752, 3.6302333560318796, 3.631233695893007, 3.6322340357541347, 3.6332343756152623, 3.63423471547639, 3.6352350553375175, 3.636235395198645, 3.6372357350597726, 3.6382360749209, 3.6392364147820273, 3.640236754643155, 3.6412370945042825, 3.64223743436541, 3.6432377742265376, 3.644238114087665, 3.6452384539487928, 3.6462387938099203, 3.647239133671048, 3.6482394735321755, 3.649239813393303, 3.6502401532544306, 3.651240493115558, 3.652240832976686, 3.6532411728378134, 3.654241512698941, 3.6552418525600685, 3.656242192421196, 3.657242532282323, 3.658242872143451, 3.6592432120045784, 3.660243551865706, 3.6612438917268335, 3.662244231587961, 3.6632445714490887, 3.6642449113102162, 3.665245251171344, 3.6662455910324714, 3.667245930893599, 3.6682462707547265, 3.669246610615854, 3.6702469504769817, 3.6712472903381093, 3.672247630199237, 3.6732479700603644, 3.674248309921492, 3.675248649782619, 3.6762489896437467, 3.6772493295048743, 3.678249669366002, 3.6792500092271294, 3.680250349088257, 3.6812506889493846, 3.682251028810512, 3.6832513686716397, 3.6842517085327673, 3.685252048393895, 3.6862523882550224, 3.68725272811615, 3.6882530679772776, 3.689253407838405, 3.6902537476995327, 3.6912540875606603, 3.692254427421788, 3.693254767282915, 3.6942551071440426, 3.69525544700517, 3.6962557868662977, 3.6972561267274253, 3.698256466588553, 3.6992568064496805, 3.700257146310808, 3.7012574861719356, 3.702257826033063, 3.7032581658941908, 3.7042585057553183, 3.705258845616446, 3.7062591854775735, 3.707259525338701, 3.7082598651998286, 3.709260205060956, 3.710260544922084, 3.711260884783211, 3.7122612246443385, 3.713261564505466, 3.7142619043665936, 3.7152622442277212, 3.716262584088849, 3.7172629239499764, 3.718263263811104, 3.7192636036722315, 3.720263943533359, 3.7212642833944867, 3.7222646232556142, 3.723264963116742, 3.7242653029778694, 3.725265642838997, 3.7262659827001245, 3.727266322561252, 3.7282666624223797, 3.729267002283507, 3.7302673421446344, 3.731267682005762, 3.7322680218668896, 3.733268361728017, 3.7342687015891447, 3.7352690414502723, 3.7362693813114, 3.7372697211725274, 3.738270061033655, 3.7392704008947826, 3.74027074075591, 3.7412710806170377, 3.7422714204781653, 3.743271760339293, 3.7442721002004205, 3.745272440061548, 3.7462727799226756, 3.7472731197838027, 3.7482734596449303, 3.749273799506058, 3.7502741393671855, 3.751274479228313, 3.7522748190894406, 3.753275158950568, 3.7542754988116958, 3.7552758386728233, 3.756276178533951, 3.7572765183950785, 3.758276858256206, 3.7592771981173336, 3.760277537978461, 3.761277877839589, 3.7622782177007164, 3.763278557561844, 3.7642788974229715, 3.7652792372840986, 3.766279577145226, 3.767279917006354, 3.7682802568674814, 3.769280596728609, 3.7702809365897365, 3.771281276450864, 3.7722816163119917, 3.7732819561731192, 3.774282296034247, 3.7752826358953744, 3.776282975756502, 3.7772833156176295, 3.778283655478757, 3.7792839953398847, 3.7802843352010123, 3.78128467506214, 3.7822850149232674, 3.7832853547843945, 3.784285694645522, 3.7852860345066497, 3.7862863743677773, 3.787286714228905, 3.7882870540900324, 3.78928739395116, 3.7902877338122876, 3.791288073673415, 3.7922884135345427, 3.7932887533956703, 3.794289093256798, 3.7952894331179254, 3.796289772979053, 3.7972901128401806, 3.798290452701308, 3.7992907925624357, 3.8002911324235633, 3.8012914722846904, 3.802291812145818, 3.8032921520069456, 3.804292491868073, 3.8052928317292007, 3.8062931715903283, 3.807293511451456, 3.8082938513125835, 3.809294191173711, 3.8102945310348386, 3.811294870895966, 3.8122952107570938, 3.8132955506182213, 3.814295890479349, 3.8152962303404765, 3.816296570201604, 3.8172969100627316, 3.818297249923859, 3.8192975897849863, 3.820297929646114, 3.8212982695072415, 3.822298609368369, 3.8232989492294966, 3.824299289090624, 3.825299628951752, 3.8262999688128794, 3.827300308674007, 3.8283006485351345, 3.829300988396262, 3.8303013282573897, 3.8313016681185172, 3.832302007979645, 3.8333023478407724, 3.8343026877019, 3.8353030275630275, 3.836303367424155, 3.8373037072852823, 3.83830404714641, 3.8393043870075374, 3.840304726868665, 3.8413050667297926, 3.84230540659092, 3.8433057464520477, 3.8443060863131753, 3.845306426174303, 3.8463067660354304, 3.847307105896558, 3.8483074457576856, 3.849307785618813, 3.8503081254799407, 3.8513084653410683, 3.852308805202196, 3.8533091450633234, 3.854309484924451, 3.855309824785578, 3.8563101646467057, 3.8573105045078333, 3.858310844368961, 3.8593111842300885, 3.860311524091216, 3.8613118639523436, 3.862312203813471, 3.8633125436745988, 3.8643128835357263, 3.865313223396854, 3.8663135632579815, 3.867313903119109, 3.8683142429802366, 3.869314582841364]
y = [-9.939227997679768E-4, -9.951845106100899E-4, -9.964483109309965E-4, -9.97714205050911E-4, -9.989821973008391E-4, -0.0010002522920225865, -0.0010015244935688034, -0.0010027988063029691, -0.0010040752345995094, -0.0010053537828437521, -0.0010066344554320004, -0.0010079172567715531, -0.0010092021912807346, -0.0010104892633889316, -0.0010117784775366238, -0.0010130698381754064, -0.0010143633497680586, -0.0010156590167885206, -0.0010169568437219687, -0.0010182568350648488, -0.001019558995324887, -0.001020863329021131, -0.0010221698406839845, -0.0010234785348552575, -0.0010247894160881558, -0.0010261024889473812, -0.0010274177580090979, -0.0010287352278610003, -0.0010300549031023684, -0.0010313767883440423, -0.0010327008882085103, -0.001034027207329914, -0.001035355750354106, -0.0010366865219386713, -0.001038019526752939, -0.001039354769478073, -0.001040692254807051, -0.001042031987444736, -0.0010433739721078948, -0.0010447182135252396, -0.0010460647164374584, -0.001047413485597247, -0.0010487645257693705, -0.0010501178417306745, -0.0010514734382701097, -0.0010528313201888, -0.001054191492300063, -0.001055553959429462, -0.0010569187264147942, -0.0010582857981061866, -0.0010596551793661067, -0.0010610268750693846, -0.0010624008901032771, -0.0010637772293674989, -0.0010651558977742453, -0.0010665369002482335, -0.0010679202417267653, -0.0010693059271597101, -0.0010706939615096286, -0.0010720843497517038, -0.0010734770968738848, -0.001074872207876828, -0.0010762696877740269, -0.0010776695415917763, -0.0010790717743692432, -0.001080476391158523, -0.001081883397024638, -0.0010832927970456084, -0.0010847045963124705, -0.001086118799929335, -0.0010875354130134162, -0.0010889544406950565, -0.0010903758881178117, -0.0010917997604384313, -0.001093226062826931, -0.0010946548004666583, -0.001096085978554275, -0.00109751960229984, -0.0010989556769268282, -0.0011003942076722036, -0.0011018351997863927, -0.0011032786585334102, -0.0011047245891908375, -0.0011061729970498865, -0.0011076238874154528, -0.0011090772656061336, -0.0011105331369542708, -0.0011119915068060304, -0.0011134523805213918, -0.0011149157634742185, -0.0011163816610523063, -0.001117850078657393, -0.0011193210217052495, -0.0011207944956256877, -0.0011222705058626108, -0.0011237490578740429, -0.001125230157132215, -0.0011267138091235539, -0.0011282000193487821, -0.0011296887933228933, -0.0011311801365752768, -0.0011326740546496713, -0.0011341705531042936, -0.0011356696375118362, -0.001137171313459514, -0.0011386755865491205, -0.001140182462397069, -0.0011416919466344312, -0.0011432040449070003, -0.0011447187628753099, -0.001146236106214689, -0.0011477560806153354, -0.0011492786917823203, -0.001150803945435647, -0.001152331847310313, -0.0011538624031563276, -0.0011553956187387955, -0.0011569314998379318, -0.001158470052249109, -0.0011600112817829428, -0.001161555194265293, -0.0011631017955373074, -0.0011646510914555294, -0.0011662030878918776, -0.001167757790733744, -0.0011693152058839993, -0.0011708753392610707, -0.0011724381967989765, -0.0011740037844473854, -0.0011755721081716515, -0.0011771431739528663, -0.001178716987787935, -0.0011802935556895606, -0.0011818728836863802, -0.0011834549778229233, -0.0011850398441597375, -0.001186627488773395, -0.0011882179177565622, -0.001189811137218024, -0.0011914071532827704, -0.001193005972092011, -0.0011946075998032746, -0.0011962120425904046, -0.0011978193066436208, -0.0011994293981696061, -0.0012010423233915534, -0.0012026580885491443, -0.001204276699898706, -0.0012058981637131957, -0.0012075224862822676, -0.0012091496739123275, -0.0012107797329265865, -0.0012124126696651078, -0.0012140484904848952, -0.001215687201759861, -0.0012173288098809823, -0.0012189733212562804, -0.001220620742310918, -0.001222271079487208, -0.0012239243392447173, -0.0012255805280602847, -0.0012272396524281083, -0.0012289017188597564, -0.0012305667338842535, -0.0012322347040481497, -0.0012339056359155181, -0.0012355795360680724, -0.0012372564111051909, -0.0012389362676439818, -0.0012406191123193171, -0.0012423049517839453, -0.0012439937927084632, -0.0012456856417814475, -0.0012473805057094858, -0.0012490783912172095, -0.0012507793050474084, -0.001252483253960997, -0.0012541902447371985, -0.0012559002841734655, -0.001257613379085649, -0.001259329536307985, -0.0012610487626932149, -0.0012627710651125583, -0.0012644964504558718, -0.0012662249256316306, -0.001267956497567033, -0.001269691173208025, -0.0012714289595194047, -0.0012731698634848353, -0.0012749138921069351, -0.001276661052407324, -0.0012784113514267114, -0.001280164796224887, -0.0012819213938808968, -0.0012836811514929931, -0.0012854440761787475, -0.001287210175075108, -0.001288979455338471, -0.001290751924144725, -0.0012925275886893112, -0.0012943064561873074, -0.0012960885338734822, -0.0012978738290023429, -0.001299662348848207, -0.0013014541007053053, -0.0013032490918877349, -0.0013050473297296981, -0.001306848821585408, -0.0013086535748292035, -0.0013104615968556508, -0.0013122728950795796, -0.0013140874769361442, -0.0013159053498808843, -0.0013177265213898234, -0.0013195509989594974, -0.001321378790107037, -0.001323209902370238, -0.0013250443433076283, -0.0013268821204985152, -0.0013287232415430943, -0.0013305677140624656, -0.0013324155456987362, -0.0013342667441150883, -0.0013361213169958209, -0.0013379792720464626, -0.00133984061699377, -0.0013417053595858861, -0.0013435735075923243, -0.0013454450688041284, -0.0013473200510338347, -0.0013491984621156387, -0.0013510803099054094, -0.0013529656022807893, -0.0013548543471412302, -0.0013567465524081104, -0.0013586422260247703, -0.0013605413759565999, -0.001362444010191123, -0.0013643501367380175, -0.0013662597636292552, -0.0013681728989191496, -0.0013700895506843992, -0.0013720097270241921, -0.0013739334360602833, -0.0013758606859370368, -0.0013777914848215609, -0.0013797258409036879, -0.0013816637623961711, -0.0013836052575346269, -0.0013855503345777203, -0.001387499001807179, -0.00138945126752789, -0.0013914071400679643, -0.0013933666277788402, -0.00139532973903533, -0.0013972964822357055, -0.0013992668658017794, -0.0014012408981789984, -0.0014032185878364825, -0.0014051999432671447, -0.0014071849729877278, -0.0014091736855389408, -0.0014111660894854583, -0.0014131621934160817, -0.0014151620059437862, -0.0014171655357057594, -0.0014191727913635513, -0.0014211837816031124, -0.0014231985151348942, -0.0014252170006938943, -0.0014272392470397917, -0.0014292652629570237, -0.0014312950572547855, -0.0014333286387672299, -0.001435366016353461, -0.0014374071988976689, -0.0014394521953091854, -0.001441501014522586, -0.0014435536654977736, -0.0014456101572200228, -0.0014476704987001375, -0.001449734698974473, -0.0014518027671050347, -0.0014538747121796078, -0.0014559505433117757, -0.0014580302696410483, -0.0014601139003329786, -0.0014622014445791298, -0.001464292911597314, -0.0014663883106316078, -0.0014684876509523936, -0.001470590941856571, -0.001472698192667485, -0.0014748094127351986, -0.0014769246114363893, -0.0014790437981746158, -0.0014811669823802769, -0.0014832941735107904, -0.00148542538105062, -0.001487560614511426, -0.0014896998834320622, -0.0014918431973788223, -0.001493990565945351, -0.0014961419987528884, -0.0014982975054502541, -0.0015004570957140567, -0.0015026207792486414, -0.0015047885657862923, -0.0015069604650873062, -0.001509136486940068, -0.0015113166411611558, -0.0015135009375954423, -0.0015156893861161583, -0.0015178819966250646, -0.001520078779052449, -0.001522279743357294, -0.0015244848995273636, -0.0015266942575792842, -0.0015289078275586459, -0.0015311256195401123, -0.0015333476436275217, -0.001535573909953956, -0.00153780442868186, -0.0015400392100031876, -0.0015422782641394254, -0.0015445216013417126, -0.0015467692318909885, -0.0015490211660980568, -0.0015512774143036962, -0.001553537986878728, -0.0015558028942242045, -0.001558072146771429, -0.0015603457549820926, -0.001562623729348385, -0.0015649060803930785, -0.0015671928186696603, -0.001569483954762435, -0.0015717794992865753, -0.0015740794628883042, -0.0015763838562449716, -0.0015786926900651574, -0.0015810059750887655, -0.0015833237220871572, -0.0015856459418632613, -0.00158797264525165, -0.001590303843118698, -0.0015926395463626418, -0.0015949797659137147, -0.0015973245127342654, -0.0015996737978188507, -0.0016020276321943726, -0.001604386026920148, -0.001606748993088057, -0.001609116541822663, -0.0016114886842812702, -0.0016138654316541147, -0.0016162467951644336, -0.0016186327860685502, -0.001621023415656064, -0.0016234186952499097, -0.0016258186362064916, -0.001628223249915814, -0.0016306325478015653, -0.001633046541321258, -0.0016354652419663543, -0.0016378886612623497, -0.0016403168107689357, -0.001642749702080099, -0.001645187346824241, -0.001647629756664273, -0.0016500769432977779, -0.0016525289184571477, -0.0016549856939096338, -0.0016574472814574956, -0.001659913692938196, -0.0016623849402244402, -0.0016648610352242707, -0.0016673419898813368, -0.0016698278161748656, -0.0016723185261198626, -0.0016748141317672622, -0.0016773146452039663, -0.001679820078553045, -0.0016823304439738556, -0.0016848457536621135, -0.0016873660198501207, -0.0016898912548067961, -0.0016924214708378526, -0.0016949566802859357, -0.0016974968955307255, -0.0017000421289890837, -0.0017025923931151968, -0.0017051477004006933, -0.0017077080633747398, -0.0017102734946042512, -0.0017128440066939863, -0.0017154196122866542, -0.0017180003240631122, -0.0017205861547424284, -0.0017231771170820654, -0.0017257732238780453, -0.0017283744879649874, -0.0017309809222163357, -0.0017335925395444715, -0.0017362093529008282, -0.0017388313752760955, -0.0017414586197002627, -0.0017440910992428385, -0.0017467288270129541, -0.0017493718161595223, -0.0017520200798713753, -0.0017546736313773983, -0.0017573324839466875, -0.0017599966508886741, -0.001762666145553306, -0.0017653409813311273, -0.001768021171653519, -0.0017707067299927509, -0.0017733976698621804, -0.0017760940048164016, -0.001778795748451374, -0.0017815029144045691, -0.0017842155163551314, -0.001786933568024054, -0.0017896570831742531, -0.0017923860756108257, -0.0017951205591811062, -0.0017978605477748733, -0.0018006060553244566, -0.0018033570958049657, -0.0018061136832343873, -0.0018088758316737211, -0.0018116435552272356, -0.0018144168680425021, -0.0018171957843105932, -0.0018199803182663002, -0.0018227704841882414, -0.001825566296398979, -0.0018283677692652938, -0.0018311749171982135, -0.0018339877546533078, -0.0018368062961306804, -0.0018396305561753398, -0.0018424605493771848, -0.0018452962903712437, -0.001848137793837878, -0.0018509850745028486, -0.0018538381471375582, -0.0018566970265592292, -0.0018595617276309594, -0.0018624322652620465, -0.0018653086544080215, -0.0018681909100709282, -0.0018710790472993872, -0.0018739730811888375, -0.0018768730268817401, -0.0018797788995676176, -0.001882690714483389, -0.0018856084869133742, -0.0018885322321896872, -0.0018914619656921526, -0.0018943977028487188, -0.0018973394591354465, -0.0019002872500768124, -0.0019032410912458702, -0.0019062009982643373, -0.001909166986802871, -0.001912139072581253, -0.0019151172713685037, -0.0019181015989830753, -0.0019210920712931223, -0.0019240887042165332, -0.0019270915137212926, -0.0019301005158255233, -0.0019331157265977355, -0.0019361371621569906, -0.001939164838673113, -0.0019421987723669066, -0.0019452389795102268, -0.0019482854764263122, -0.0019513382794898511, -0.0019543974051273215, -0.00195746286981701, -0.00196053469008934, -0.00196361288252701, -0.00196669746376518, -0.001969788450491707, -0.0019728858594472804, -0.0019759897074257127, -0.00197910001127405, -0.001982216787892771, -0.001985340054236106, -0.001988469827312065, -0.001991606124182782, -0.0019947489619646435, -0.001997898357828511, -0.002001054328999942, -0.0020042168927593444, -0.0020073860664422535, -0.0020105618674394745, -0.0020137443131973604, -0.002016933421217926, -0.0020201292090591605, -0.002023331694335164, -0.0020265408947163824, -0.0020297568279298633, -0.0020329795117593683, -0.0020362089640457025, -0.0020394452026868317, -0.002042688245638176, -0.0020459381109127887, -0.0020491948165815635, -0.002052458380773509, -0.0020557288216758707, -0.0020590061575344668, -0.0020622904066538343, -0.0020655815873974626, -0.002068879718188057, -0.0020721848175077134, -0.0020754969038981864, -0.002078815995961052, -0.0020821421123580547, -0.002085475271811175, -0.002088815493103023, -0.002092162795076962, -0.0020955171966373586, -0.00209887871674985, -0.0021022473744415444, -0.002105623188801279, -0.002109006178979846, -0.0021123963641902243, -0.0021157937637078356, -0.002119198396870754, -0.00212261028307999, -0.0021260294417996687, -0.0021294558925573524, -0.0021328896549442297, -0.0021363307486153577, -0.0021397791932899497, -0.0021432350087515514, -0.0021466982148483776, -0.0021501688314935026, -0.0021536468786651146, -0.00215713237640681, -0.0021606253448277664, -0.002164125804103112, -0.0021676337744739958, -0.0021711492762481045, -0.002174672329799646, -0.0021782029555698105, -0.002181741174066961, -0.002185287005866821, -0.002188840471612864, -0.0021924015920165127, -0.0021959703878573844, -0.0021995468799835593, -0.0022031310893119455, -0.002206723036828361, -0.0022103227435879796, -0.002213930230715505, -0.0022175455194054818, -0.0022211686309225463, -0.0022247995866017025, -0.002228438407848577, -0.0022320851161397853, -0.0022357397330230824, -0.002239402280117688, -0.00224307277911467, -0.0022467512517770805, -0.002250437719940243, -0.0022541322055121896, -0.002257834730473784, -0.002261545316879085, -0.0022652639868555855, -0.00226899076260457, -0.002272725666401361, -0.002276468720595617, -0.0022802199476116176, -0.0022839793699485364, -0.0022877470101808515, -0.0022915228909584596, -0.00229530703500714, -0.002299099465128754, -0.002302900204201579, -0.0023067092751806175, -0.0023105267010978875, -0.0023143525050627473, -0.002318186710262163, -0.002322029339961074, -0.002325880417502605, -0.0023297399663085323, -0.0023336080098794324, -0.0023374845717951064, -0.0023413696757148402, -0.0023452633453777414, -0.002349165604603053, -0.0023530764772904827, -0.00235699598742048, -0.0023609241590546535, -0.002364861016335977, -0.002368806583489194, -0.0023727608848211146, -0.002376723944721021, -0.002380695787660788, -0.002384676438195459, -0.0023886659209634807, -0.0023926642606869808, -0.002396671482172162, -0.002400687610309667, -0.002404712670074862, -0.002408746686528187, -0.0024127896848155127, -0.0024168416901685225, -0.0024209027279049463, -0.002424972823429023, -0.002429052002231805, -0.0024331402898915373, -0.0024372377120738913, -0.0024413442945324633, -0.00244546006310909, -0.0024495850437341545, -0.0024537192624270107, -0.0024578627452962794, -0.00246201551854025, -0.0024661776084473125, -0.002470349041396083, -0.0024745298438561183, -0.0024787200423879814, -0.0024829196636437624, -0.0024871287343674496, -0.0024913472813952496, -0.0024955753316560055, -0.0024998129121715324, -0.002504060050057034, -0.0025083167725214878, -0.002512583106867972, -0.0025168590804941467, -0.0025211447208925057, -0.0025254400556509477, -0.0025297451124529917, -0.0025340599190782024, -0.0025383845034027074, -0.002542718893399465, -0.0025470631171387063, -0.002551417202788342, -0.0025557811786143135, -0.0025601550729810792, -0.0025645389143519855, -0.0025689327312896125, -0.0025733365524562945, -0.0025777504066144285, -0.0025821743226269477, -0.0025866083294577053, -0.0025910524561719587, -0.002595506731936664, -0.0025999711860210106, -0.0026044458477967984, -0.0026089307467388693, -0.0026134259124254906, -0.0026179313745388843, -0.0026224471628655656, -0.0026269733072968315, -0.002631509837829167, -0.0026360567845646454, -0.0026406141777114795, -0.0026451820475843274, -0.002649760424604893, -0.0026543493393022108, -0.0026589488223131546, -0.0026635589043829946, -0.002668179616365633, -0.0026728109892242722, -0.0026774530540317337, -0.0026821058419709727, -0.002686769384335524, -0.002691443712530003, -0.0026961288580704657, -0.002700824852585039, -0.0027055317278142296, -0.0027102495156115123, -0.0027149782479437487, -0.0027197179568916353, -0.0027244686746502743, -0.0027292304335296137, -0.002734003265954832, -0.0027387872044670404, -0.002743582281723553, -0.002748388530498519, -0.0027532059836833126, -0.002758034674287168, -0.0027628746354374815, -0.002767725900380525, -0.002772588502481815, -0.0027774624752266077, -0.0027823478522205156, -0.0027872446671899304, -0.0027921529539825267, -0.0027970727465678755, -0.0028020040790378026, -0.002806946985607094, -0.0028119015006139066, -0.0028168676585202737, -0.0028218454939127197, -0.0028268350415027142, -0.002831836336127269, -0.002836849412749428, -0.002841874306458831, -0.0028469110524722277, -0.002851959686134051, -0.0028570202429169687, -0.0028620927584223814, -0.002867177268381053, -0.0028722738086536057, -0.002877382415231083, -0.0028825031242355136, -0.002887635971920578, -0.0028927809946719465, -0.0028979382290080583, -0.00290310771158062, -0.002908289479175162, -0.002913483568711652, -0.002918690017245029, -0.002923908861965795, -0.0029291401402006824, -0.002934383889413128, -0.0029396401472039746, -0.002944908951311929, -0.002950190339614313, -0.0029554843501275402, -0.002960791021007833, -0.002966110390551684, -0.0029714424971966358, -0.0029767873795217285, -0.002982145076248284, -0.002987515626240364, -0.002992899068505474, -0.002998295442195224, -0.003003704786605901, -0.003009127141179101, -0.003014562545502351, -0.0030200110393098457, -0.003025472662482932, -0.003030947455050932, -0.0030364354571915824, -0.003041936709231926, -0.003047451251648777, -0.003052979125069415, -0.0030585203702723357, -0.0030640750281878217, -0.0030696431398986737, -0.003075224746640794, -0.0030808198898039747, -0.003086428610932498, -0.003092050951725848, -0.0030976869540393845, -0.003103336659885031, -0.0031090001114319685, -0.003114677351007355, -0.003120368421097007, -0.0031260733643460447, -0.003131792223559743, -0.0031375250417040286, -0.003143271861906455, -0.0031490327274566994, -0.003154807681807368, -0.0031605967685747497, -0.003166400031539494, -0.00317221751464737, -0.003178049262009962, -0.003183895317905513, -0.003189755726779565, -0.003195630533245713, -0.003201519782086386, -0.0032074235182536475, -0.0032133417868698538, -0.00321927463322847, -0.003225222102794914, -0.0032311842412071526, -0.003237161094276613, -0.003243152707988926, -0.003249159128504692, -0.003255180402160297, -0.0032612165754686744, -0.003267267695120129, -0.003273333807983097, -0.0032794149611049895, -0.003285511201712951, -0.003291622577214777, -0.003297749135199588, -0.0033038909234387446, -0.0033100479898866926, -0.003316220382681611, -0.003322408150146547, -0.003328611340789959, -0.0033348300033067928, -0.0033410641865790937, -0.003347313939677108, -0.003353579311859957, -0.003359860352576555, -0.0033661571114664834, -0.00337246963836086, -0.0033787979832831843, -0.003385142196450236, -0.0033915023282729932, -0.003397878429357435, -0.003404270550505516, -0.0034106787427159824, -0.0034171030571854017, -0.003423543545308863, -0.0034300002586811233, -0.003436473249097409, -0.0034429625685542522, -0.0034494682692505704, -0.0034559904035884924, -0.0034625290241743507, -0.003469084183819658, -0.00347565593554191, -0.0034822443325656207, -0.003488849428323391, -0.0034954712764566328, -0.0035021099308167204, -0.0035087654454659355, -0.0035154378746782944, -0.003522127272940771, -0.0035288336949540764, -0.0035355571956337886, -0.003542297830111209, -0.0035490556537345628, -0.0035558307220697784, -0.0035626230909016733, -0.003569432816234951, -0.0035762599542950683, -0.0035831045615295105, -0.003589966694608617, -0.0035968464104267697, -0.0036037437661033053, -0.003610658818983697, -0.003617591626640491, -0.0036245422468744837, -0.003631510737715753, -0.003638497157424636, -0.003645501564493004, -0.003652524017645117, -0.003659564575838911, -0.0036666232982670477, -0.003673700244357909, -0.0036807954737768447, -0.0036879090464272215, -0.003695041022451589, -0.00370219146223272, -0.003709360426394851, -0.0037165479758047276, -0.003723754171572893, -0.0037309790750546406, -0.003738222747851391, -0.003745485251811605, -0.0037527666490322323, -0.003760067001859705, -0.0037673863728911426, -0.0037747248249756452, -0.003782082421215377, -0.0037894592249668093, -0.0037968552998419777, -0.003804270709709635, -0.0038117055186964945, -0.0038191597911885183, -0.0038266335918320463, -0.003834126985535111, -0.0038416400374687715, -0.003849172813068131, -0.003856725378033878, -0.0038642977983333517, -0.0038718901402020117, -0.0038795024701444823, -0.0038871348549360876, -0.003894787361623976, -0.003902460057528563, -0.003910153010244731, -0.003917866287643268, -0.003925599957872058, -0.003933354089357521, -0.003941128750806019, -0.0039489240112049705, -0.003956739939824508, -0.003964576606218608, -0.003972434080226626, -0.00398031243197455, -0.003988211731876519, -0.003996132050636067, -0.004004073459247747, -0.0040120360289982605, -0.004020019831468243, -0.004028024938533323, -0.004036051422365773, -0.004044099355435893, -0.004052168810513568, -0.004060259860669574, -0.0040683725792771104, -0.004076507040013304, -0.00408466331686067, -0.004092841484108697, -0.004101041616355162, -0.004109263788507822, -0.00411750807578585, -0.004125774553721399, -0.004134063298161126, -0.004142374385267654, -0.004150707891521405, -0.004159063893721813, -0.004167442468989069, -0.004175843694765737, -0.0041842676488183265, -0.004192714409238815, -0.0042011840544463105, -0.004209676663188744, -0.0042181923145443645, -0.004226731087923486, -0.004235293063069983, -0.004243878320063201, -0.00425248693931937, -0.004261119001593436, -0.004269774587980666, -0.004278453779918367, -0.004287156659187637, -0.004295883307914984, -0.004304633808574164, -0.004313408243987814, -0.004322206697329198, -0.004331029252124099, -0.0043398759922524096, -0.0043487470019499975, -0.004357642365810495, -0.00436656216878705, -0.004375506496194186, -0.0043844754337095644, -0.004393469067375793, -0.004402487483602393, -0.0044115307691674766, -0.004420599011219724, -0.0044296922972802055, -0.00443881071524428, -0.0044479543533834705, -0.00445712330034736, -0.004466317645165534, -0.004475537477249433, -0.0044847828863944375, -0.004494053962781611, -0.004503350796979886, -0.004512673479947769, -0.004522022103035612, -0.004531396757987339, -0.004540797536942646, -0.004550224532438894, -0.00455967783741318, -0.004569157545204433, -0.004578663749555393, -0.004588196544614589, -0.004597756024938714, -0.004607342285494361, -0.004616955421660344, -0.004626595529229699, -0.004636262704411993, -0.004645957043835122, -0.004655678644547847, -0.004665427604021687, -0.0046752040201531635, -0.004685007991266096, -0.004694839616113614, -0.0047046989938805735, -0.004714586224185606, -0.00472450140708352, -0.00473444464306737, -0.0047444160330709344, -0.004754415678470849, -0.004764443681088899, -0.004774500143194458, -0.004784585167506685, -0.004794698857196913, -0.004804841315890953, -0.0048150126476715505, -0.004825212957080662, -0.004835442349121873, -0.004845700929262949, -0.0048559888034379346, -0.004866306078049991, -0.004876652859973472, -0.004887029256556688, -0.004897435375624181, -0.004907871325479307, -0.004918337214906792, -0.004928833153175142, -0.004939359250039264, -0.0049499156157429575, -0.004960502361021605, -0.004971119597104617, -0.004981767435718187, -0.004992445989087708, -0.005003155369940633, -0.005013895691508836, -0.005024667067531691, -0.005035469612258338, -0.0050463034404506606, -0.005057168667385812, -0.005068065408859177, -0.005078993781186818, -0.005089953901208501, -0.0051009458862903475, -0.005111969854327615, -0.0051230259237476945, -0.005134114213512705, -0.005145234843122423, -0.005156387932617288, -0.005167573602581041, -0.0051787919741438865, -0.005190043168985187, -0.005201327309336602, -0.005212644517984829, -0.005223994918274822, -0.005235378634112532, -0.005246795789968178, -0.005258246510879009, -0.005269730922452566, -0.005281249150869702, -0.00529280132288758, -0.0053043875658428255, -0.005316008007654657, -0.0053276627768281465, -0.005339352002457174, -0.005351075814227709, -0.005362834342421033, -0.00537462771791696, -0.005386456072197067, -0.005398319537347951, -0.005410218246064412, -0.005422152331653015, -0.005434121928035221, -0.00544612716975061, -0.005458168191960764, -0.005470245130451895, -0.005482358121638881, -0.005494507302568463, -0.005506692810922581, -0.005518914785022138, -0.005531173363830216, -0.005543468686955686, -0.005555800894656867, -0.005568170127844907, -0.0055805765280874, -0.005593020237612142, -0.005605501399310544, -0.005618020156741456, -0.005630576654134749, -0.0056431710363950855, -0.005655803449105507, -0.0056684740385312925, -0.005681182951623757, -0.005693930336023854, -0.005706716340066233, -0.005719541112782961, -0.005732404803907347, -0.0057453075638778765, -0.005758249543842128, -0.00577123089566077, -0.005784251771911355, -0.005797312325892436, -0.005810412711627609, -0.005823553083869509, -0.005836733598103846, -0.005849954410553436, -0.005863215678182562, -0.0058765175587008, -0.005889860210567457, -0.005903243792995561, -0.005916668465956237, -0.005930134390182942, -0.0059436417271755875, -0.00595719063920503, -0.005970781289317399, -0.005984413841338225, -0.00599808845987709, -0.006011805310331953, -0.006025564558893675, -0.00603936637255019, -0.006053210919091402, -0.006067098367113543, -0.006081028886023645, -0.006095002646044286, -0.006109019818218118, -0.006123080574412571, -0.006137185087324548, -0.006151333530484951, -0.00616552607826374, -0.006179762905874431, -0.006194044189379023, -0.0062083701056929, -0.006222740832589401, -0.0062371565487051965, -0.006251617433544746, -0.006266123667485534, -0.00628067543178302, -0.00629527290857556, -0.006309916280889659, -0.006324605732644923, -0.006339341448659048, -0.006354123614653363, -0.006368952417257722, -0.0063838280440158225, -0.006398750683390443, -0.006413720524768627, -0.006428737758467534, -0.006443802575739001, -0.006458915168775626, -0.0064740757307157094, -0.006489284455649204, -0.006504541538622798, -0.006519847175645694, -0.006535201563695218, -0.006550604900722233, -0.006566057385656955, -0.006581559218414696, -0.006597110599901377, -0.006612711732019538, -0.006628362817674052, -0.0066440640607778115, -0.006659815666257906, -0.006675617840061364, -0.0066914707891611545, -0.006707374721562203, -0.0067233298463073195, -0.006739336373483536, -0.006755394514227979, -0.006771504480734214, -0.006787666486258252, -0.006803880745125031, -0.006820147472734581, -0.006836466885568207, -0.006852839201195284, -0.006869264638279256, -0.006885743416584197, -0.006902275756981482, -0.006918861881456228, -0.006935502013113762, -0.006952196376186426, -0.006968945196040244, -0.006985748699181628, -0.007002607113264005, -0.007019520667094745, -0.007036489590642198, -0.007053514115042244, -0.007070594472605621, -0.007087730896824518, -0.00710492362237998, -0.007122172885148964, -0.007139478922211118, -0.007156841971856472, -0.007174262273592398, -0.007191740068150961, -0.0072092755974961275, -0.0072268691048315205, -0.007244520834607349, -0.007262231032528419, -0.007279999945561222, -0.007297827821941907, -0.00731571491118361, -0.00733366146408434, -0.00735166773273473, -0.00736973397052575, -0.0073878604321566815, -0.00740604737364267, -0.007424295052323334, -0.007442603726870259, -0.007460973657295147, -0.007479405104958143, -0.007497898332575798, -0.007516453604229536, -0.0075350711853738065, -0.007553751342844355, -0.007572494344866798, -0.007591300461065091, -0.0076101699624698885, -0.007629103121527382, -0.007648100212107611, -0.007667161509513588, -0.007686287290489652, -0.007705477833230551, -0.0077247334173902674, -0.007744054324090871, -0.007763440835931682, -0.007782893236998326, -0.0078024118128716905, -0.007821996850637402, -0.007841648638894683, -0.007861367467766157, -0.0078811536289069, -0.007901007415514206, -0.007920929122336499, -0.007940919045683698, -0.007960977483436417, -0.007981104735055634, -0.008001301101592705, -0.008021566885699306, -0.008041902391636897, -0.008062307925287176, -0.008082783794162024, -0.008103330307413417, -0.008123947775843963, -0.00814463651191693, -0.008165396829766792, -0.008186229045209449, -0.00820713347575288, -0.008228110440607644, -0.00824916026069752, -0.008270283258670227, -0.008291479758908227, -0.008312750087539643, -0.008334094572448889, -0.008355513543288555, -0.008377007331489123, -0.008398576270271579, -0.00842022069465756, -0.008441940941481393, -0.008463737349401168, -0.008485610258910257, -0.008507560012349084, -0.008529586953916526, -0.008551691429681856, -0.008573873787596366, -0.008596134377505539, -0.00861847355116074, -0.008640891662231583, -0.008663389066317856, -0.0086859661209618, -0.008708623185660562, -0.008731360621878527, -0.00875417879305992, -0.00877707806464115, -0.00880005880406379, -0.008823121380787304, -0.008846266166301651, -0.008869493534140585, -0.008892803859894424, -0.008916197521223478, -0.008939674897870916, -0.008963236371676439, -0.008986882326589497, -0.009010613148682855, -0.009034429226166402, -0.009058330949400499, -0.009082318710909965, -0.009106392905397945, -0.009130553929760265, -0.009154802183099019, -0.009179138066736921, -0.009203561984231825, -0.009228074341390882, -0.009252675546285345, -0.00927736600926473, -0.00930214614297211, -0.009327016362358484, -0.009351977084697746, -0.009377028729602128, -0.009402171719036812, -0.009427406477335632, -0.009452733431215988, -0.0094781530097948, -0.009503665644603615, -0.00952927176960492, -0.009554971821207164, -0.009580766238281305, -0.009606655462176492, -0.009632639936736503, -0.009658720108315737, -0.009684896425795658, -0.009711169340601566, -0.009737539306718578, -0.009764006780709307, -0.00979057222172963, -0.009817236091546677, -0.009843998854555044, -0.009870860977794741, -0.009897822930968055, -0.009924885186457354, -0.00995204821934224, -0.009979312507417796, -0.010006678531212, -0.010034146774004, -0.01006171772184191, -0.010089391863561281, -0.010117169690803517, -0.010145051698034037, -0.010173038382561337, -0.010201130244555216, -0.010229327787066264, -0.010257631516044512, -0.010286041940358537, -0.010314559571815126, -0.01034318492517832, -0.010371918518189426, -0.010400760871586076, -0.010429712509122882, -0.010458773957590982, -0.010487945746838118, -0.010517228409789092, -0.010546622482466305, -0.010576128504010103, -0.010605747016699703, -0.010635478565973907, -0.01066532370045227, -0.010695282971956118, -0.010725356935530086, -0.01075554614946337, -0.010785851175311528, -0.010816272577918382, -0.010846810925437461, -0.010877466789354685, -0.01090824074451023, -0.010939133369121157, -0.010970145244804053, -0.011001276956597295, -0.01103252909298462, -0.011063902245917703, -0.011095397010839576, -0.01112701398670792, -0.011158753776018656, -0.011190616984829866, -0.011222604222785315, -0.011254716103138884, -0.011286953242778393, -0.011319316262250596, -0.011351805785784933, -0.01138442244131923, -0.011417166860523863, -0.01145003967882709, -0.01148304153544054, -0.011516173073384466, -0.011549434939513588, -0.011582827784542334, -0.0116163522630716, -0.01165000903361455, -0.011683798758622785, -0.011717722104513491, -0.011751779741695535, -0.011785972344596593, -0.011820300591690633, -0.0118547651655247, -0.011889366752746532, -0.011924106044132615, -0.011958983734615517, -0.011994000523312433, -0.0120291571135531, -0.012064454212908155, -0.012099892533218588, -0.012135472790623648, -0.012171195705590249, -0.012207062002942533, -0.012243072411890689, -0.012279227666061543, -0.012315528503527216, -0.012351975666836244, -0.01238856990304349, -0.012425311963740346, -0.012462202605085876, -0.012499242587837214, -0.01253643267738129, -0.012573773643765801, -0.012611266261730501, -0.012648911310739334, -0.01268670957501268, -0.012724661843558267, -0.012762768910205217, -0.01280103157363554, -0.012839450637417298, -0.012878026910037768, -0.012916761204936178, -0.012955654340538312, -0.012994707140288777, -0.01303392043268609, -0.013073295051316342, -0.01311283183488781, -0.013152531627264867, -0.013192395277504113, -0.013232423639887948, -0.013272617573961401, -0.013312977944566146, -0.013353505621877433, -0.013394201481439293, -0.013435066404201229, -0.013476101276554384, -0.013517306990368568, -0.013558684443028612, -0.013600234537472146, -0.013641958182226337, -0.013683856291446223, -0.013725929784951622, -0.013768179588266326, -0.013810606632655726, -0.013853211855165345, -0.013895996198659982, -0.013938960611862755, -0.01398210604939374, -0.014025433471810612, -0.014068943845647098, -0.014112638143454311, -0.014156517343839766, -0.014200582431509026, -0.014244834397304961, -0.014289274238250441, -0.014333902957588114, -0.01437872156482275, -0.014423731075762187, -0.014468932512560078, -0.014514326903757358, -0.014559915284325034, -0.014605698695706371, -0.014651678185860741, -0.014697854809305858, -0.014744229627161405, -0.01479080370719241, -0.014837578123853735, -0.014884553958333498, -0.01493173229859809, -0.014979114239435796, -0.015026700882502088, -0.01507449333636528, -0.01512249271655057, -0.015170700145586212, -0.01521911675304939, -0.015267743675611692, -0.015316582057085906, -0.015365633048471668, -0.015414897808002955, -0.015464377501194043, -0.015514073300887641, -0.015563986387300919, -0.015614117948074477, -0.01566446917831898, -0.015715041280663394, -0.015765835465303843, -0.015816852950050874, -0.015868094960379088, -0.01591956272947497, -0.01597125749828714, -0.016023180515574297, -0.01607533303795554, -0.01612771632995942, -0.016180331664074094, -0.01623318032079694, -0.016286263588685308, -0.016339582764406606, -0.016393139152788047, -0.016446934066869014, -0.016500968827950418, -0.016555244765646585, -0.01660976321793623, -0.01666452553121353, -0.016719533060340207, -0.016774787168696882, -0.016830289228234745, -0.016886040619527823, -0.01694204273182453, -0.016998296963100606, -0.017054804720110853, -0.017111567418442088, -0.017168586482564602, -0.017225863345885887, -0.01728339945080324, -0.017341196248755988, -0.017399255200279122, -0.017457577775055026, -0.01751616545196816, -0.01757501971915667, -0.01763414207406651, -0.017693534023503803, -0.017753197083688582, -0.01781313278030774, -0.017873342648568213, -0.017933828233250397, -0.017994591088760774, -0.018055632779185694, -0.018116954878344325, -0.018178558969841142, -0.01824044664711992, -0.018302619513515687, -0.018365079182308404, -0.018427827276775045, -0.01849086543024267, -0.01855419528614042, -0.018617818498052628, -0.01868173672977047, -0.018745951655343762, -0.018810464959133525, -0.018875278335863208, -0.018940393490670164, -0.01900581213915626, -0.019071536007440302, -0.019137566832206583, -0.01920390636075709, -0.019270556351060286, -0.019337518571800952, -0.019404794802431072, -0.019472386833215907, -0.019540296465284983, -0.019608525510678827, -0.01967707579239791, -0.019745949144448067, -0.01981514741188864, -0.0198846724508779, -0.01995452612871908, -0.020024710323905338, -0.020095226926163133, -0.02016607783649796, -0.02023726496723547, -0.020308790242065264, -0.020380655596081744, -0.020452862975825464, -0.02052541433932262, -0.020598311656125426, -0.020671556907349092, -0.020745152085710643, -0.020819099195564204, -0.020893400252937722, -0.02096805728556642, -0.0210430723329271, -0.021118447446269874, -0.021194184688649628, -0.02127028613495635, -0.021346753871943038, -0.02142358999825332, -0.021500796624448454, -0.021578375873031262, -0.021656329878469056, -0.02173466078721619, -0.021813370757733694, -0.021892461960508203, -0.021971936578068535, -0.022051796805000815, -0.02213204484796291, -0.022212682925693814, -0.0222937132690256, -0.022375138120889503, -0.022456959736321863, -0.02253918038246757, -0.022621802338581538, -0.02270482789602698, -0.02278825935827277, -0.022872099040886988, -0.022956349271528537, -0.023041012389936916, -0.023126090747917147, -0.02321158670932462, -0.02329750265004564, -0.023383840957974182, -0.023470604032987438, -0.02355779428691709, -0.02364541414351725, -0.023733466038429595, -0.023821952419144955, -0.023910875744960695, -0.024000238486935235, -0.02409004312783764, -0.024180292162094795, -0.02427098809573347, -0.024362133446318004, -0.024453730742884386, -0.024545782525869732, -0.02463829134703694, -0.024731259769394637, -0.024824690367112007, -0.024918585725429453, -0.025012948440563294, -0.025107781119605148, -0.025203086380415792, -0.02529886685151473, -0.025395125171961037, -0.025491863991231198, -0.02558908596908885, -0.025686793775448864, -0.025784990090234383, -0.02588367760322787, -0.02598285901391477, -0.02608253703131884, -0.026182714373832544, -0.026283393769037003, -0.026384577953517285, -0.026486269672665798, -0.02658847168048043, -0.0266911867393536, -0.026794417619850536, -0.02689816710048127, -0.027002437967460706, -0.02710723301446134, -0.027212555042354456, -0.02731840685894127, -0.027424791278675094, -0.027531711122369774, -0.027639169216901238, -0.0277471683948931, -0.027855711494393712, -0.027964801358539345, -0.02807444083520616, -0.02818463277664821, -0.02829538003912355, -0.028406685482505134, -0.028518551969878653, -0.02863098236712803, -0.028743979542500596, -0.028857546366163542, -0.02897168570974057, -0.029086400445833768, -0.02920169344752911, -0.02931756758788414, -0.029434025739399804, -0.029551070773472465, -0.029668705559826543, -0.029786932965933974, -0.029905755856404873, -0.030025177092366215, -0.030145199530816083, -0.030265826023955743, -0.030387059418502255, -0.030508902554978665, -0.030631358266977132, -0.030754429380403223, -0.030878118712692814, -0.031002429072003502, -0.0311273632563833, -0.03125292405290815, -0.03137911423679601, -0.031505936570490944, -0.0316333938027171, -0.03176148866750679, -0.03189022388319415, -0.032019602151378154, -0.032149626155855066, -0.03228029856151658, -0.032411622013212904, -0.03254359913458145, -0.03267623252684054, -0.03280952476754368, -0.032943478409297795, -0.033078095978441004, -0.033213379973680214, -0.033349332864687956, -0.03348595709065519, -0.03362325505880307, -0.033761229142845336, -0.03389988168140804, -0.03403921497640193, -0.03417923129134222, -0.03431993284962313, -0.03446132183273451, -0.03460340037843335, -0.034746170578852424, -0.03488963447856009, -0.035033794072556204, -0.035178651304214846, -0.03532420806316071, -0.035470466183085816, -0.03561742743950291, -0.03576509354743025, -0.0359134661590096, -0.036062546861054645, -0.036212337172528274, -0.0363628385419443, -0.03651405234469634, -0.03666597988030551, -0.03681862236959216, -0.036971980951761366, -0.037126056681407005, -0.03728085052542987, -0.03743636335986161, -0.03759259596660365, -0.03774954903006811, -0.03790722313372409, -0.0380656187565432, -0.038224736269343244, -0.03838457593103029, -0.03854513788472962, -0.03870642215380689, -0.038868428637778216, -0.03903115710810058, -0.03919460720384502, -0.03935877842724684, -0.039523670139126825, -0.039689281554186516, -0.0398556117361681, -0.04002265959287825, -0.04019042387107244, -0.04035890315119284, -0.04052809584196148, -0.04069800017481761, -0.04086861419820544, -0.041039935771692446, -0.041211962559931384, -0.04138469202645122, -0.04155812142727182, -0.041732247804345486, -0.04190706797881381, -0.04208257854407532, -0.042258775858665734, -0.042435656038935794, -0.04261321495153121, -0.042791448205661026, -0.0429703511451563, -0.0431499188403079, -0.04333014607948191, -0.04351102736050292, -0.04369255688180376, -0.043874728533331124, -0.04405753588720357, -0.044240972188118545, -0.04442503034349171, -0.04460970291333488, -0.04479498209985651, -0.044980859736782786, -0.04516732727838786, -0.04535437578823376, -0.0455419959276008, -0.045730177943613295, -0.04591891165704741, -0.046108186449808644, -0.046297991252081075, -0.0464883145291327, -0.04667914426776998, -0.04687046796243748, -0.047062272600948266, -0.047254544649837915, -0.047447270039339115, -0.04764043414795613, -0.04783402178664326, -0.04802801718256737, -0.04822240396245292, -0.04841716513549142, -0.04861228307581532, -0.04880773950451875, -0.04900351547121867, -0.04919959133514622, -0.049395946745755856, -0.049592560622845075, -0.049789411136169776, -0.04998647568455178, -0.0501837308744582, -0.050381152498050606, -0.05057871551068735, -0.05077639400787118, -0.050974161201627705, -0.05117198939630968, -0.05136984996380671, -0.051567713318159875, -0.051765548889557124, -0.05196332509771337, -0.05216100932460775, -0.05235856788658293, -0.05255596600577769, -0.05275316778089989, -0.05295013615731215, -0.0531468328964288, -0.053343218544412915, -0.053539252400154905, -0.053734892482531035, -0.05393009549692572, -0.05412481680100652, -0.054319010369748594, -0.05451262875968429, -0.054705623072388965, -0.05489794291717209, -0.05508953637297695, -0.055280349949483124, -0.05547032854738673, -0.055659415417871845, -0.05584755212125153, -0.056034678484770895, -0.056220732559581085, -0.056405650576854885, -0.056589366903063154, -0.05677181399438973, -0.056952922350290695, -0.05713262046619345, -0.05731083478532868, -0.05748748964970571, -0.05766250725021925, -0.057835807575894876, -0.05800730836227656, -0.05817692503895655, -0.05834457067625433, -0.05851015593105112, -0.058673588991785805, -0.058834775522624366, -0.05899361860680968, -0.05915001868920908, -0.059303873518067235, -0.0594550780859907, -0.059603524570167654, -0.05974910227185774, -0.0598916975551628, -0.06003119378510968, -0.06016747126506998, -0.06030040717354182, -0.06042987550033132, -0.06055574698216595, -0.0606778890377712, -0.06079616570246228, -0.060910437562277236, -0.061020561687712915, -0.061126391567101354, -0.06122777703968435, -0.06132456422843674, -0.06141659547270354, -0.06150370926070508, -0.06158574016198363, -0.06166251875985283, -0.061733871583931205, -0.06179962104283017, -0.06185958535708124, -0.06191357849238559, -0.061961410093276575, -0.06200288541728434, -0.06203780526970999, -0.06206596593909276, -0.062087159133500686, -0.06210117191772892, -0.062107786651543845, -0.06210678092907432, -0.062097927519484036, -0.06208099430905, -0.062055744244777125, -0.06202193527968968, -0.0619793203199385, -0.061927647173868616, -0.061866658503196933, -0.06179609177645874, -0.06171567922487059, -0.06162514780078311, -0.06152421913887937, -0.06141260952029269, -0.06129002983981956, -0.06115618557639244, -0.06101077676700781, -0.06085349798427382, -0.06068403831777802, -0.06050208135944726, -0.06030730519310508, -0.060099382388398244, -0.059877979999300585, -0.059642759567379425, -0.0593933771300198, -0.05912948323379762, -0.05885072295320208, -0.0585567359148937, -0.05824715632769481, -0.05792161301849682, -0.05757972947428064, -0.057221123890427925, -0.05684540922551097, -0.05645219326273254, -0.05604107867820171, -0.05561166311619675, -0.055163539271595494, -0.05469629497961737, -0.05420951331302919, -0.053702772686962774, -0.05317564697146429, -0.05262770561191049, -0.05205851375739539, -0.051467632397201375, -0.05085461850543044, -0.05021902519388689, -0.04956040187326809, -0.04887829442271838, -0.0481722453677841, -0.04744179406677984, -0.04668647690558894, -0.0459058275008626, -0.0450993769116117, -0.04426665385911546, -0.04340718495510912, -0.04252049493813058, -0.041606106917943725, -0.04066354262789201, -0.03969232268504194, -0.03869196685792869, -0.037661994341727414, -0.036601924040608566, -0.03551127485705098, -0.034389565987843713, -0.03323631722646889, -0.03205104927158576, -0.03083328404123722, -0.029582544992446197, -0.028298357445804237, -0.02698024891463991, -0.025627749438342787, -0.024240391919373, -0.022817712463498913, -0.021359250722740357, -0.019864550240517132, -0.018333158798447886, -0.01676462876425619, -0.015158517440184429, -0.013514387411360275, -0.011831806893464799, -0.01011035007911822, -0.008349597482335895, -0.006549136280414317, -0.004708560652600977, -0.002827472114896104, -9.054798503162938E-4, 0.0010577989660180004, 0.003062738847656033, 0.00510970568408471, 0.007199056435155305, 0.009331138833304395, 0.01150629109420228, 0.013724841636453908, 0.015987108810945512, 0.018293400640429978, 0.020644014569918014, 0.02303923722840962, 0.025479344202500705, 0.027964599822336275, 0.03049525696038925, 0.03307155684347945, 0.035693728878455115, 0.038361990491864326, 0.041076546983987466, 0.04383759139746368, 0.046645304400817544, 0.049499854187055096, 0.05240139638750853, 0.05535007400106193, 0.05834601733883054, 0.061389343984336445, 0.06448015876918846, 0.06761855376420246, 0.07080460828589871, 0.07403838891822276, 0.07731994954932725, 0.08064933142324021, 0.08402656320609062, 0.0874516610667046, 0.09092462877116442, 0.09444545779103167, 0.09801412742481254, 0.10163060493226367, 0.1052948456810844, 0.10900679330551044, 0.11276637987634835, 0.11657352608185902, 0.12042814141904365, 0.1243301243946773, 0.12827936273561824, 0.13227573360769887, 0.13631910384271745, 0.14040933017282528, 0.1445462594717942, 0.1487297290024792, 0.15295956666990243, 0.15723559127936507, 0.16155761279894365, 0.165925432625788, 0.17033884385564288, 0.17479763155498723, 0.17930157303523353, 0.1838504381284365, 0.18844398946391522, 0.1930819827453551, 0.19776416702775795, 0.20249028499386834, 0.20726007322946932, 0.2120732624972046, 0.21692957800843138, 0.2218287396926732, 0.2267704624643279, 0.2317544564862221, 0.23678042742964597, 0.24184807673056669, 0.24695710184174655, 0.25210719648033053, 0.25729805087086877, 0.26252935198328775, 0.2678007837657739, 0.27311202737228285, 0.2784627613844902, 0.28385266202804116, 0.28928140338298397, 0.2947486575881925, 0.3002540950397551, 0.30579738458318084, 0.3113781936993761, 0.31699618868438284, 0.3226510348227007, 0.32834239655438546, 0.3340699376356812, 0.33983332129339183, 0.34563221037287667, 0.35146626747971516, 0.35733515511520186, 0.36323853580551313, 0.3691760722248113, 0.37514742731218337, 0.3811522643826752, 0.38719024723227774, 0.3932610402371859, 0.3993643084472754, 0.40549971767393156, 0.411666934572412, 0.4178656267187487, 0.4240954626813794, 0.43035611208762037, 0.43664724568506547, 0.44296853539810294, 0.44931965437957233, 0.45570027705786254, 0.46211007917936897, 0.4685487378466271, 0.47501593155217303, 0.4815113402082215, 0.48803464517240597, 0.49458552926961363, 0.5011636768101067, 0.5077687736040303, 0.5144005069724304, 0.5210585657550043, 0.527742640314528, 0.5344524225382353, 0.5411876058363252, 0.5479478851374607, 0.5547329568816799, 0.5615425190106851, 0.5683762709555592, 0.5752339136222517, 0.5821151493746997, 0.58901968201586, 0.5959472167667031, 0.6028974602432609, 0.609870120431875, 0.6168649066626954, 0.623881529581601, 0.6309197011205461, 0.6379791344665805, 0.6450595440293814, 0.6521606454077515, 0.6592821553547747, 0.6664237917420739, 0.6735852735229846, 0.680766320694951, 0.6879666542610356, 0.6951859961907532, 0.7024240693802921, 0.7096805976121563, 0.7169553055143301, 0.7242479185190626, 0.7315581628213571, 0.7388857653371387, 0.7462304536613572, 0.7535919560259194, 0.7609700012576224, 0.7683643187361474, 0.7757746383521802, 0.7832006904657008, 0.7906422058645421, 0.7980989157232872, 0.8055705515625782, 0.8130568452088569, 0.8205575287547062, 0.8280723345196819, 0.8356009950119134, 0.8431432428904329, 0.8506988109281786, 0.8582674319760062, 0.8658488389275504, 0.8734427646850214, 0.8810489421261558, 0.8886671040721312, 0.8962969832567484, 0.9039383122967246, 0.9115908236633521, 0.9192542496553664, 0.9269283223732315, 0.9346127736948283, 0.9423073352526559, 0.950011738412301, 0.9577257142527982, 0.9654489935482097, 0.9731813067510662, 0.9809223839772657, 0.9886719549927507, 0.996429749201738, 1.0041954956367978, 1.0119689229505249, 1.0197497594088958, 1.0275377328865585, 1.0353325708635472, 1.043134000423922, 1.0509417482560923, 1.0587555406547684, 1.066575103524554, 1.0744001623852886, 1.0822304423789562, 1.0900656682781027, 1.097905564495913, 1.1057498550976381, 1.1135982638135935, 1.1214505140534166, 1.1293063289216938, 1.1371654312348198, 1.1450275435390704, 1.1528923881297533, 1.160759687071282, 1.1686291622183647, 1.176500535237904, 1.1843735276316953, 1.192247860759775, 1.2001232558643413, 1.2079994340941602, 1.2158761165292815, 1.2237530178368194, 1.231629820989617, 1.2395062417399592, 1.2473820023957682, 1.2552568253233929, 1.2631304329705388, 1.271002547888409, 1.2788728927535482, 1.28674119038888, 1.294607163783859, 1.3024705361138358, 1.3103310307583003, 1.3181883713179987, 1.3260422816307968, 1.333892485786261, 1.3417387081387897, 1.3495806733192084, 1.3574181062449238, 1.3652507321283036, 1.373078276483517, 1.3809004651314298, 1.3887170242030853, 1.3965276801408995, 1.4043321596985188, 1.4121301899384249, 1.4199214982279988, 1.427705812233459, 1.4354828599122058, 1.4432523695030666, 1.4510140695149552, 1.4587676887136065, 1.466512956106546, 1.4742496009264612, 1.4819773526126634, 1.4896959407912327, 1.4974050952532934, 1.5051045459318813, 1.5127940228775283, 1.520473256232127, 1.5281419762017314, 1.5357999130279993, 1.5434467969585173, 1.5510823582159938, 1.558706326966373, 1.5663184332861824, 1.5739184071288257, 1.5815059782902576, 1.589080876373806, 1.5966428307545573, 1.6041915705431111, 1.6117268245488865, 1.6192483212431017, 1.6267557887214306, 1.6342489546665222, 1.641727546310336, 1.6491912903964585, 1.6566399131424951, 1.6640731402025233, 1.6714906966297063, 1.6788923068393593, 1.6862776945720555, 1.693646582857449, 1.7009986939783368, 1.7083337494354858, 1.7156514699128744, 1.7229515752438427, 1.7302337843778004, 1.7374978153479064, 1.744743385239592, 1.751970210159993, 1.7591780052085324, 1.766366484448378, 1.7735353608793265, 1.7806843464116469, 1.787813151841287, 1.7949214868265178, 1.802009059865806, 1.8090755782772499, 1.8161207481795563, 1.823144274474532, 1.8301458608312822, 1.8371252096720811, 1.8440820221601086, 1.8510159981889478, 1.8579268363739585, 1.86481423404582, 1.8716778872459585, 1.8785174907240685, 1.8853327379379572, 1.892123321055605, 1.8988889309594648, 1.905629257253321, 1.9123439882715174, 1.9190328110907413, 1.9256954115445977, 1.9323314742406954, 1.9389406825806503, 1.945522718783003, 1.952077263908952, 1.9586039978912826, 1.965102599566376, 1.971572746709371, 1.9780141160727607, 1.9844263834282705, 1.990809223612243, 1.9971623105746392, 2.0034853174316107, 2.0097779165219225, 2.0160397794671185, 2.0222705772356555, 2.0284699802109873, 2.0346376582638728, 2.040773280828706, 2.0468765169842174, 2.0529470355384554, 2.0589845051182296, 2.0649885942630597, 2.0709589715236554, 2.076895305565041, 2.082797265274375, 2.088664519873726, 2.094496739037244, 2.1002935930136815, 2.106054752753596, 2.1117798900413924, 2.1174686776326537, 2.1231207893962223, 2.1287359004614803, 2.1343136873705184, 2.1398538282354194, 2.145356002900571, 2.1508198931098903, 2.156245182679063, 2.1616315576727523, 2.1669787065864314, 2.172286320533281, 2.1775540934355186, 2.1827817222205854, 2.187968907021464, 2.1931153513817625, 2.1982207624645085, 2.203284851265394, 2.208307332829686, 2.2132879264727983, 2.218226356004337, 2.223122349955254, 2.227975641808271, 2.232785970230427, 2.2375530793088325, 2.2422767187876644, 2.2469566443076205, 2.2515926176464856, 2.2561844069608137, 2.2607317870284005, 2.2652345394910762, 2.2696924530973606, 2.2741053239445166, 2.2784729557200185, 2.2827951599410996, 2.2870717561929244, 2.2913025723641613, 2.295487444879683, 2.299626218930144, 2.3037187486977313, 2.307764897577192, 2.3117645383926684, 2.3157175536085663, 2.3196238355347614, 2.3234832865256005, 2.3272958191715674, 2.3310613564837848, 2.3347798320705797, 2.33845119030537, 2.3420753864858526, 2.3456523869837183, 2.349182169384389, 2.352664722616442, 2.356100047070529, 2.3594881547067015, 2.3628290691506684, 2.3661228257776985, 2.369369471784791, 2.37256906624969, 2.3757216801776906, 2.3788273965348914, 2.3818863102684853, 2.384898528313564, 2.387864169585984, 2.390783364962009, 2.3936562572438254, 2.3964830011113323, 2.399263763060055, 2.4019987213254756, 2.4046880657933243, 2.407331997896578, 2.4099307304987416, 2.4124844877640967, 2.4149935050147207, 2.41745802857486, 2.4198783156026256, 2.4222546339097217, 2.4245872617691284, 2.42687648771149, 2.4291226103104133, 2.43132593795711, 2.43348678862501, 2.435605489624571, 2.437682377349042, 2.439717797011598, 2.4417121023741526, 2.443665655469041, 2.445578826313217, 2.447451992616462, 2.4492855394835047, 2.4510798591109744, 2.452835350479664, 2.4545524190428116, 2.456231476410749, 2.4578729400328303, 2.4594772328768846, 2.461044783106997, 2.4625760237602163, 2.464071392422418, 2.465531330904419, 2.4669562849181395, 2.468346703754185, 2.4697030399607374, 2.4710257490241787, 2.472315289052599, 2.473572120461951, 2.4747967056656144, 2.475989508767543, 2.4771509952599136, 2.478281631724875, 2.4793818855415495, 2.480452224597936, 2.481493117008474, 2.4825050308374577, 2.4834884338284366, 2.484443793139868, 2.4853715750875844, 2.4862722448936676, 2.4871462664425517, 2.4879941020440173, 2.4888162122036475, 2.489613055400339, 2.490385087871686, 2.4911327634065823, 2.4918565331457216, 2.4925568453894753, 2.493234145413968, 2.493888875294387, 2.4945214737363233, 2.495132375914812, 2.4957220133211404, 2.496290813617081, 2.496839200497174, 2.497367593557972, 2.497876408175463, 2.4983660553893516, 2.4988369417948713, 2.499289469441721, 2.499724035740179, 2.5001410333741623, 2.500540850220921, 2.5009238692777163, 2.501290468594851, 2.501641021215148, 2.5019758951197972, 2.5022954531800523, 2.502600053115079, 2.502890047455409, 2.5031657835122125, 2.5034276033516054, 2.503675843774668, 2.5039108363021434, 2.5041329071642817, 2.504342377295351, 2.5045395623326083, 2.504724772619742, 2.504898313214472, 2.5050604839002917, 2.5052115792016307, 2.5053518884033883, 2.505481695573313, 2.505601279588144, 2.505710914162692, 2.505810867882082, 2.50590140423672, 2.5059827816600126, 2.506055253568669, 2.5061190684052943, 2.5061744696832293, 2.5062216960334807, 2.5062609812538343, 2.506292554359303, 2.506316639634715, 2.5063334566886004, 2.5063432205085174, 2.5063461415179242, 2.5063424256337443, 2.5063322743256204, 2.50631588467559, 2.5062934494388456, 2.5062651571054184, 2.50623119196215, 2.5061917341554936, 2.5061469597546653, 2.506097040815289, 2.5060421454432555, 2.505982437858666, 2.5059180784603825, 2.505849223890054, 2.5057760270966174, 2.5056986374004855, 2.5056172005578023, 2.5055318588242637, 2.50544275101904, 2.5053500125880914, 2.505253775667425, 2.505154169145661, 2.5050513187266605, 2.5049453469910974, 2.5048363734580312, 2.5047245146456767, 2.5046098841316446, 2.504492592612741, 2.5043727479639446, 2.5042504552967855, 2.504125817017258, 2.503998932882614, 2.5038699000579805, 2.503738813171794, 2.5036057643707283, 2.5034708433737713, 2.5033341375256875, 2.503195731849384, 2.503055709097948, 2.502914149805333, 2.502771132336819, 2.5026267329381424, 2.502481025784258, 2.5023340830269127, 2.502185974841751, 2.502036769474288, 2.5018865332852114, 2.5017353307950714, 2.5015832247276277, 2.50143027605305, 2.501276544029702, 2.5011220862456285, 2.500966958658892, 2.500811215637221, 2.500654909997081, 2.500498093041565, 2.500340814597913, 2.500183123054058, 2.500025065394408, 2.499866687234972, 2.4997080328577135, 2.499549145244213, 2.4993900661084187, 2.4992308359291173, 2.499071493981114, 2.4989120783663337, 2.4987526260437365, 2.498593172858903, 2.4984337535727796, 2.4982744018898915, 2.4981151504857557, 2.4979560310339384, 2.497797074232147, 2.4976383098280883, 2.4974797666443607, 2.497321472603111, 2.497163454749808, 2.497005739276741, 2.496848351545749, 2.49669131611042, 2.4965346567379276, 2.4963783964302735, 2.4962225574448427, 2.496067161314704, 2.495912228868307, 2.4957577802487463, 2.4956038349324086, 2.4954504117473677, 2.495297528891281, 2.4951452039486335, 2.494993453907888, 2.4948422951779436, 2.494691743604279, 2.4945418144847724, 2.4943925225849206, 2.494243882152909, 2.494095906934131, 2.493948610185435, 2.4938020046889675, 2.493656102765642, 2.4935109162883338, 2.4933664566946843, 2.4932227349996, 2.4930797618074356, 2.4929375473238284, 2.4927961013672286, 2.4926554333802526, 2.492515552440544, 2.4923764672715043, 2.4922381862526723, 2.492100717429901, 2.4919640685251805, 2.4918282469462048, 2.491693259795875, 2.491559113881337, 2.491425815722767, 2.49129337156217, 2.4911617873716603, 2.4910310688617825, 2.4909012214893833, 2.4907722504654206, 2.490644160762565, 2.4905169571224377, 2.490390644062966, 2.490265225885114, 2.490140706679936, 2.4900170903350216, 2.4898943805408753, 2.4897725807974425, 2.48965169441986, 2.489531724544653, 2.489412674135381, 2.4892945459883107, 2.4891773427378276, 2.4890610668617805, 2.488945720686715, 2.4888313063927967, 2.4887178260188128, 2.4886052814668975, 2.4884936745072053, 2.488383006782338, 2.4882732798118825, 2.4881644949965263, 2.488056653622403, 2.4879497568648783, 2.4878438057927763, 2.4877388013719672, 2.4876347444692795, 2.487531635855964, 2.487429476211372, 2.487328266126199, 2.4872280061060055, 2.4871286965743566, 2.487030337875999, 2.4869329302799468, 2.486836473982402, 2.486740969109756, 2.486646415721359, 2.4865528138122057, 2.4864601633157344, 2.4863684641063446, 2.4862777160018785, 2.4861879187662197, 2.486099072111511, 2.4860111757006127, 2.4859242291492487, 2.485838232028298, 2.485753183865887, 2.4856690841494453, 2.485585932327692, 2.4855037278127705, 2.485422469981894, 2.485342158179414, 2.485262791718493, 2.4851843698829086, 2.4851068919288193, 2.485030357086235, 2.4849547645608108, 2.4848801135353042, 2.484806403171162, 2.484733632609905, 2.4846618009746226, 2.4845909073714, 2.484520950890598, 2.4844519306082127, 2.484383845587136, 2.4843166948784057, 2.484250477522405, 2.484185192550026, 2.4841208389838374, 2.484057415839153, 2.483994922125117, 2.483933356845796, 2.4838727190011234, 2.483813007587868, 2.483754221600681, 2.4836963600329325, 2.48363942187766, 2.4835834061284525, 2.483528311780174, 2.4834741378300356, 2.4834208832781224, 2.4833685471283133, 2.4833171283890394, 2.4832666260739167, 2.4832170392026107, 2.4831683668012836, 2.4831206079035226, 2.4830737615507887, 2.4830278267931463, 2.482982802689814, 2.4829386883097824, 2.4828954827323413, 2.4828531850476603, 2.4828117943573673, 2.482771309774969, 2.48273173042636, 2.4826930554503726, 2.4826552839992253, 2.4826184152389197, 2.4825824483497514, 2.482547382526656, 2.482513216979659, 2.4824799509342546, 2.4824475836318367, 2.48241611432997, 2.482385542302824, 2.482355866841423, 2.4823270872541245, 2.482299202866729, 2.482272213023002, 2.4822461170847667, 2.4822209144323253, 2.4821966044646766, 2.482173186599719, 2.482150660274609, 2.4821290249459205, 2.4821082800898355, 2.4820884252024995, 2.4820694598000603, 2.4820513834189795, 2.482034195616161, 2.4820178959691104, 2.4820024840762445, 2.4819879595567444, 2.4819743220510757, 2.481961571220842, 2.4819497067490683, 2.4819387283401944, 2.4819286357203727, 2.4819194286373816, 2.4819111068607866, 2.4819036701820725, 2.4818971184147083, 2.481891451394117, 2.481886668977886, 2.4818827710456817, 2.481879757499434, 2.4818776282632404, 2.4818763832834305, 2.481876022528635, 2.4818765459897776, 2.4818779536800655, 2.481880245634937, 2.481883421912151, 2.4818874825916826, 2.4818924277757466, 2.4818982575886936, 2.4819049721770523, 2.4819125717094366, 2.4819210563764234, 2.48193042639058, 2.4819406819863197, 2.4819518234198696, 2.4819638509691115, 2.4819767649335187, 2.4819905656340504, 2.482005253412946, 2.4820208286337557, 2.482037291681028, 2.4820546429602715, 2.482072882897772, 2.4820920119403977, 2.4821120305554185, 2.482132939230406, 2.4821547384729215, 2.48217742881038, 2.482201010789789, 2.4822254849775893, 2.4822508519593462, 2.482277112339519, 2.4823042667412656, 2.4823323158060786, 2.4823612601935077, 2.482391100581, 2.482421837663429, 2.482453472152886, 2.482486004778239, 2.4825194362849787, 2.482553767434669, 2.482588999004661, 2.48262513178775, 2.482662166591708, 2.48270010423894, 2.4827389455659663, 2.482778691423109, 2.482819342673939, 2.482860900194865, 2.4829033648745504, 2.4829467376136436, 2.4829910193239275, 2.483036210928077, 2.483082313358912, 2.4831293275589834, 2.4831772544798545, 2.4832260950815357, 2.4832758503318537, 2.483326521205913, 2.4833781086852156, 2.483430613757112, 2.4834840374141045, 2.4835383806530595, 2.4835936444744666, 2.483649829881637, 2.4837069378799788, 2.48376496947607, 2.4838239256769006, 2.4838838074889136, 2.483944615917144, 2.4840063519642728, 2.484069016629736, 2.4841326109085973, 2.484197135790663, 2.484262592259352, 2.4843289812906555, 2.4843963038519856, 2.48446456090113, 2.4845337533849383, 2.4846038822382273, 2.4846749483824206, 2.4847469527243984, 2.4848198961550505, 2.4848937795479724, 2.484968603758078, 2.4850443696201356, 2.485121077947307, 2.485198729529544, 2.4852773251321247, 2.4853568654939933, 2.4854373513260533, 2.4855187833095593, 2.48560116209425, 2.485684488296556, 2.4857687624978757, 2.4858539852423998, 2.485940157035423, 2.4860272783410746, 2.486115349580432, 2.486204371129232, 2.4862943433157323, 2.486385266418419, 2.4864771406636743, 2.4865699662234535, 2.4866637432126635, 2.486758471686762, 2.486854151639082, 2.4869507829981368, 2.4870483656249593, 2.4871468993101207, 2.4872463837708976, 2.4873468186482866, 2.487448203503911, 2.487550537816748, 2.4876538209800265, 2.487758052297779, 2.487863230981418, 2.487969356146185, 2.4880764268075097, 2.4881844418773014, 2.488293400160054, 2.4884033003489034, 2.4885141410215788, 2.4886259206362302, 2.4887386375270353, 2.488852289899965, 2.488966875828063, 2.489082393246898, 2.4891988399497382, 2.4893162135825664, 2.489434511639114, 2.489553731455648, 2.489673870205513, 2.4897949248937463, 2.4899168923513963, 2.490039769229762, 2.4901635519943133, 2.4902882369187425, 2.4904138200785066, 2.4905402973444435, 2.490667664376157, 2.4907959166151024, 2.4909250492776627, 2.491055057347906, 2.4911859355701838, 2.4913176784415825, 2.491450280204011, 2.4915837348363095, 2.4917180360459357, 2.4918531772605466, 2.4919891516192463, 2.4921259519637795, 2.4922635708292966, 2.4924020004349394, 2.4925412326741987, 2.4926812591050367, 2.492822070939673, 2.4929636590340873, 2.4931060138773664, 2.4932491255806433, 2.4933929838657782, 2.493537578053812, 2.4936828970529454, 2.4938289293463893, 2.4939756629798238, 2.4941230855484133, 2.4942711841837357, 2.4944199455400726, 2.494569355780584, 2.494719400563016, 2.4948700650250295, 2.4950213337691345, 2.495173190847392, 2.495325619745465, 2.4954786033664784, 2.495632124014396, 2.495786163376895, 2.4959407025079643, 2.4960957218098665, 2.496251201014854, 2.4964071191662978, 2.4965634545993156, 2.496720184921018, 2.496877286990205, 2.4970347368966186, 2.4971925099395245, 2.497350580605999, 2.4975089225484792, 2.497667508561954, 2.497826310560396, 2.4979852995528127, 2.4981444456186104, 2.4983037178824405, 2.498463084488386, 2.4986225125735833, 2.498781968241217, 2.498941416532883, 2.499100821400302, 2.4992601456764145, 2.499419351045809, 2.49957839801439, 2.4997372458785634, 2.499895852693461, 2.500054175240787, 2.5002121689955876, 2.500369788092673, 2.500526985292045, 2.5006837119436347, 2.500839917951372, 2.500995551736513, 2.501150560200072, 2.501304888684548, 2.5014584809349776, 2.501611279059065, 2.5017632234865124, 2.5019142529276923, 2.5020643043314283, 2.5022133128418913, 2.502361211754916, 2.5025079324732253, 2.5026534044610864, 2.5027975551979496, 2.5029403101314447, 2.5030815926294894, 2.5032213239314878, 2.503359423098882, 2.503495806964824, 2.5036303900830066, 2.5037630846758034, 2.5038938005815186, 2.504022445200854, 2.5041489234427754, 2.5042731376693657, 2.50439498764022, 2.504514370455799, 2.5046311805003905, 2.504745309384207, 2.5048566458847987, 2.5049650758879314, 2.5050704823277448, 2.505172745126398, 2.505271741133106, 2.5053673440627033, 2.505459424433644, 2.505547849505585, 2.5056324832166084, 2.5057131861199737, 2.5057898153206404, 2.505862224411411, 2.5059302634089997, 2.5059937786897906, 2.5060526129256027, 2.5061066050192657, 2.5061555900404144, 2.5061993991612193, 2.506237859592297, 2.5062707945189064, 2.5062980230374463, 2.506319360092347, 2.506334616413409, 2.506343598453787, 2.5063461083286374, 2.5063419437544723, 2.5063308979894496, 2.5063127597746178, 2.506287313276308, 2.506254338029602, 2.5062136088832805, 2.506164895946135, 2.5061079645347846, 2.5060425751234083, 2.5059684832950486, 2.505885439695179, 2.505793189987082, 2.5056914748097263, 2.505580029737978, 2.505458585245333, 2.505326866669467, 2.5051845941805366, 2.5050314827526208, 2.5048672421383107, 2.504691576846712, 2.504504186124927, 2.504304763943286, 2.504092998984487, 2.5038685746366647, 2.503631168990863, 2.5033804548427234, 2.5031160996989703, 2.5028377657884215, 2.502545110078102, 2.5022377842944055, 2.501915434949507, 2.501577703373311, 2.5012242257508848, 2.500854633165912, 2.5004685516498455, 2.500065602237355, 2.499645401027979, 2.499207559254276, 2.4987516833564354, 2.498277375063741, 2.4977842314827354, 2.4972718451925955, 2.4967398043473508, 2.496187692785516, 2.495615090146937, 2.495021571997115, 2.49440670995897, 2.493770071852248, 2.4931112218405085, 2.492429720585773, 2.4917251254109436, 2.4909969904699816, 2.490244866925732, 2.489468303135609, 2.4886668448449014, 2.4878400353878236, 2.4869874158961482, 2.4861085255154416, 2.4852029016286696, 2.4842700800872857, 2.4833095954493825, 2.482320981225082, 2.4813037701286778, 2.480257494337576, 2.4791816857577764, 2.4780758762955326, 2.476939598135126, 2.4757723840223362, 2.4745737675533803, 2.473343283469056, 2.4720804679536066, 2.4707848589380976, 2.469455996407813, 2.468093422713471, 2.4666966828854338, 2.4652653249509866, 2.463798900253873, 2.4622969637757217, 2.4607590744589065, 2.4591847955301924, 2.4575736948249407, 2.4559253451109706, 2.45423932441175, 2.4525152163284125, 2.4507526103597814, 2.4489511022200485, 2.4471102941534824, 2.445229795245478, 2.443309221729459, 2.4413481972890434, 2.4393463533547557, 2.437303329394931, 2.4352187731999893, 2.4330923411596146, 2.430923698532413, 2.428712519707218, 2.4264584884557303, 2.4241612981758927, 2.4218206521255228, 2.419436263645718, 2.4170078563735173, 2.414535164443535, 2.41201793267801, 2.4094559167649825, 2.406848883424211, 2.4041966105605574, 2.401498887404488, 2.398755514639513, 2.395966304516344, 2.393131080953484, 2.390249679624292, 2.3873219480302788, 2.384347745560555, 2.381326943537557, 2.378259425248867, 2.375145085965224, 2.3719838329449923, 2.368775585424887, 2.3655202745973316, 2.362217843574653, 2.3588682473400544, 2.355471452686033, 2.352027438140224, 2.3485361938789753, 2.3449977216292552, 2.341412034558962, 2.3377791571561795, 2.3340991250977785, 2.33037198510777, 2.3265977948057994, 2.3227766225464856, 2.318908547249686, 2.3149936582225488, 2.3110320549736905, 2.307023847019998, 2.3029691536865604, 2.2988681039004377, 2.2947208359784232, 2.290527497409682, 2.286288244633612, 2.2820032428134125, 2.277672665605958, 2.2732966949284483, 2.2688755207222937, 2.264409340714842, 2.2598983601791476, 2.2553427916926263, 2.250742854894777, 2.2460987762443625, 2.2414107887767942, 2.2366791318617247, 2.231904050961504, 2.227085797390795, 2.222224628077691, 2.2173208053266387, 2.2123745965835626, 2.207386274203456, 2.202356115220674, 2.197284401122313, 2.1921714176247047, 2.1870174544535956, 2.1818228051278137, 2.1765877667469784, 2.1713126397831752, 2.165997727876914, 2.1606433376374232, 2.155249778447466, 2.1498173622727226, 2.144346403475924, 2.1388372186357616, 2.133290126370616, 2.1277054471672887, 2.122083503214622, 2.1164246182421307, 2.1107291173636806, 2.1049973269261857, 2.0992295743632923, 2.093426188054151, 2.0875874971871884, 2.0817138316287487, 2.0758055217969025, 2.069862898539914, 2.0638862930197766, 2.0578760366004634, 2.0518324607409375, 2.0457558968928913, 2.0396466764030663, 2.0335051304201786, 2.0273315898063125, 2.0211263850527104, 2.014889846199886, 2.0086223027620225, 2.0023240836554868, 1.9959955171314543, 1.9896369307125377, 1.9832486511332474, 1.9768310042843709, 1.970384315161007, 1.9639089078142622, 1.9574051053065238, 1.9508732296701443, 1.9443136018695544, 1.9377265417666334, 1.9311123680892797, 1.9244713984031252, 1.9178039490862537, 1.9111103353068837, 1.9043908710038793, 1.897645868870111, 1.8908756403384825, 1.8840804955705233, 1.8772607434476738, 1.8704166915648792, 1.8635486462266748, 1.8566569124455912, 1.8497417939427254, 1.8428035931506277, 1.8358426112181245, 1.8288591480173473, 1.8218535021525781, 1.8148259709711292, 1.8077768505759542, 1.8007064358400902, 1.7936150204227657, 1.7865028967872072, 1.779370356219933, 1.7722176888516379, 1.7650451836795118, 1.7578531285909085, 1.750641810388387, 1.7434115148159792, 1.7361625265866842, 1.728895129411098, 1.7216096060270822, 1.7143062382304697, 1.7069853069067555, 1.6996470920635613, 1.6922918728640313, 1.6849199276608768, 1.6775315340311923, 1.670126968811815, 1.6627065081352645, 1.655270427466104, 1.6478190016377843, 1.6403525048897176, 1.6328712109047157, 1.6253753928464727, 1.6178653233972755, 1.6103412747956467, 1.6028035188740053, 1.5952523270961008, 1.5876879705943252, 1.5801107202066744, 1.5725208465133629, 1.5649186198729235, 1.5573043104578317, 1.549678188289463, 1.5420405232723564, 1.5343915852276646, 1.5267316439257534, 1.5190609691178507, 1.51137983056665, 1.503688498075789, 1.4959872415181787, 1.4882763308630194, 1.4805560362015644, 1.4728266277713655, 1.4650883759791566, 1.4573415514221635, 1.4495864249078234, 1.4418232674718812, 1.434052350394801, 1.4262739452164261, 1.4184883237488908, 1.4106957580877268, 1.402896520621156, 1.395090884037538, 1.3872791213309492, 1.3794615058049113, 1.3716383110743204, 1.3638098110653898, 1.3559762800139645, 1.3481379924618617, 1.3402952232515393, 1.3324482475190105, 1.324597340685114, 1.3167427784451053, 1.308884836756767, 1.3010237918269967, 1.2931599200969852, 1.285293498226149, 1.2774248030747224, 1.2695541116853768, 1.2616817012636796, 1.2538078491577551, 1.2459328328370658, 1.2380569298706061, 1.230180417904465, 1.2223035746390045, 1.2144266900009233, 1.2065500794030222, 1.1986740243389642, 1.1907988037179318, 1.182924696375823, 1.175051981050569, 1.167180936357811, 1.1593118407671237, 1.1514449725787346, 1.1435806099009564, 1.1357190306284357, 1.1278605124211922, 1.1200053326846504, 1.1121537685506957, 1.1043060968598262, 1.0964625941445458, 1.0886235366139507, 1.080789200139677, 1.0729598602432504, 1.0651357920848277, 1.0573172704535017, 1.0495045697590986, 1.0416979640255621, 1.0338977268859906, 1.0261041315792683, 1.0183174509484227, 1.0105379574406477, 1.002765923109021, 0.9950016196159785, 0.9872453182384819, 0.979497289874907, 0.9717578050536582, 0.9640271339434681, 0.9563055463654437, 0.9485933118067202, 0.9408906994358077, 0.9331979781195461, 0.9255154164416531, 0.9178432827228679, 0.9101818450425817, 0.9025313712619696, 0.8948921290485845, 0.8872643859022917, 0.8796484091826553, 0.8720444661375506, 0.8644528239330197, 0.856873749684399, 0.8493075104885301, 0.8417543734570656, 0.8342146057508246, 0.8266884746150661, 0.8191762474157624, 0.8116781916766169, 0.8041945751169307, 0.7967256656901569, 0.7892717316231088, 0.7818330414557964, 0.7744098640817294, 0.7670024687887036, 0.7596111252999911, 0.7522361038157883, 0.7448776750550037, 0.7375361102971547, 0.7302116814243707, 0.7229046609635078, 0.7156153221281463, 0.7083439388605222, 0.7010907858732542, 0.6938561386908555, 0.6866402736908703, 0.6794434681446091, 0.6722660002574272, 0.6651081492083806, 0.657970195189299, 0.6508524194430838, 0.6437551043012104, 0.6366785332203297, 0.6296229908178946, 0.622588762906691, 0.6155761365282488, 0.6085853999849715, 0.6016168428709038, 0.594670756101108, 0.5877474319394639, 0.5808471640248443, 0.5739702473955361, 0.5671169785118598, 0.5602876552768011, 0.5534825770546108, 0.5467020446872377, 0.5399463605084609, 0.5332158283556958, 0.5265107535792333, 0.5198314430488846, 0.5131782051578879, 0.5065513498239205, 0.4999511884871833, 0.49337803410531733, 0.48683220114510883, 0.48031400557080817, 0.4738237648289924, 0.4673617978297542, 0.46092842492416963, 0.45452396787782345, 0.44814874984039144, 0.44180309531099615, 0.43548733009929563, 0.429201781282161, 0.4229467771557691, 0.4167226471830582, 0.4105297219363169, 0.40436833303485964, 0.39823881307762743, 0.3921414955705704, 0.38607671484877326, 0.380044805993093, 0.3740461047412895, 0.36808094739347563, 0.36214967071186266, 0.3562526118146097, 0.35039010806375837, 0.3445624969471187, 0.3387701159541062, 0.33301330244535554, 0.3272923935161536, 0.32160772585357444, 0.31595963558730555, 0.3103484581341671, 0.304774528036243, 0.2992381787926802, 0.29373974268514236, 0.28827955059693583, 0.2828579318259004, 0.2774752138910339, 0.27213172233301086, 0.26682778050860545, 0.26156370937922724, 0.25633982729358684, 0.25115644976469065, 0.24601388924136397, 0.24091245487440555, 0.2358524522776464, 0.230834183284112, 0.22585794569751805, 0.2209240330394392, 0.2160327342923397, 0.21118433363886285, 0.2063791101976785, 0.20161733775622973, 0.19689928450082178, 0.19222521274436571, 0.18759537865226028, 0.183010031966843, 0.1784694157308288, 0.17397376601030792, 0.16952331161769266, 0.16511827383519992, 0.1607588661394091, 0.15644529392738066, 0.15217775424496063, 0.14795643551781418, 0.14378151728576316, 0.13965316994107232, 0.13557155447121078, 0.13153682220675486, 0.1275491145750179, 0.12360856286000076, 0.11971528796932868, 0.1158694002087059, 0.1120709990645438, 0.10832017299533793, 0.10461699923235507, 0.10096154359025834, 0.09735386028814941, 0.09379399178160917, 0.09028196860626489, 0.08681780923333418, 0.0834015199376535, 0.08003309467861797, 0.07671251499443821, 0.07343974991013413, 0.07021475585956112, 0.06703747662182871, 0.06390784327237062, 0.06082577414890598, 0.057791174832518784, 0.0548039381439772, 0.05186394415544731, 0.04897106021767133, 0.04612514100263071, 0.04332602856173023, 0.040573552399404175, 0.03786752956207926, 0.03520776474238008, 0.032594050398355855, 0.03002616688755301, 0.027503882615658084, 0.02502695419941194, 0.022595126643490112, 0.020208133530936413, 0.017865697226781847, 0.015567529094400134, 0.013313329724119041, 0.011102789173622483, 0.008935587219585374, 0.006811393620015902, 0.004729868386727957, 0.0026906620673486942, 6.934160362760036E-4, -0.0012622372060648746, -0.0031766737262872158, -0.005050277844059514, -0.006883441811752154, -0.008676565488351808, -0.010430056008309669, -0.012144327445982539, -0.013819800476316728, -0.015456902032443612, -0.01705606496082148, -0.01861772767456559, -0.020142333805606163, -0.02163033185626685, -0.023082174850900656, -0.024498319988144807, -0.025879228294385388, -0.027225364278975088, -0.028537195591743593, -0.029815192683327926, -0.031059828468793724, -0.03227157799505164, -0.03345091811249329, -0.034598327151296224, -0.035714284602791205, -0.03679927080627511, -0.037853766641633785, -0.03887825322810434, -0.03987321162948446, -0.040839122566085344, -0.0417764661336716, -0.04268572152964839, -0.04356736678669548, -0.04442187851404794, -0.04524973164659129, -0.046051399201913874, -0.04682735204545091, -0.047578058663823086, -0.04830398494644357, -0.04900559397547489, -0.04968334582416875, -0.050337697363620815, -0.050969102077950273, -0.05157800988789409, -0.05216486698279458, -0.05273011566094227, -0.05327419417821623, -0.053797536604961255, -0.0543005726910107, -0.05478372773877589, -0.05524742248428437, -0.05569207298606425, -0.05611809052173733, -0.05652588149219731, -0.05691584733322369, -0.057288384434385276, -0.05764388406507055, -0.05798273230748727, -0.058305309996456514, -0.05861199266582966, -0.05890315050134874, -0.05917914829976365, -0.05944034543402954, -0.059687095824383585, -0.05991974791512117, -0.060138644656873255, -0.06034412349419323, -0.060536516358260524, -0.060716149664504414, -0.06088334431495944, -0.061038415705153226, -0.0611816737353387, -0.06131342282588226, -0.061433961936612845, -0.061543584589953126, -0.06164257889764272, -0.06173122759087657, -0.061809808053676196, -0.06187859235932214, -0.061937847309670904, -0.06198783447719504, -0.06202881024957278, -0.062061025876673906, -0.0620847275197808, -0.062100156302891354, -0.06210754836595783, -0.06210713491991507, -0.062099142303357335, -0.06208379204072928, -0.06206130090189903, -0.062031880962985494, -0.06199573966831824, -0.061953079893411415, -0.061904100008835686, -0.061848993944884315, -0.06178795125692065, -0.06172115719131059, -0.061648792751845344, -0.06157103476655691, -0.06148805595483989, -0.061400024994799754, -0.061307106590739374, -0.06120946154071577, -0.06110724680408787, -0.06100061556899168, -0.06088971731967362, -0.06077469790362535, -0.06065569959845775, -0.060532861178461184, -0.060406317980802995, -0.06027620197130856, -0.060142641809788136, -0.06000576291486091, -0.05986568752824189, -0.059722534778451866, -0.05957642074392033, -0.05942745851544542, -0.059275758257987476, -0.05912142727176667, -0.05896457005264205, -0.058805288351750036, -0.05864368123438238, -0.05847984513808618, -0.0583138739299712, -0.058145858963209326, -0.057975889132713856, -0.057804050929989775, -0.057630428497144795, -0.05745510368005468, -0.05727815608067402, -0.05709966310849394, -0.05691970003113553, -0.05673834002408401, -0.05655565421955798, -0.05637171175451443, -0.056186579817793625, -0.05600032369640098, -0.05581300682093267, -0.05562469081014834, -0.055435435514691114, -0.05524529905996592, -0.05505433788817853, -0.054862606799541545, -0.054670158992656925, -0.05447704610407772, -0.054283318247064366, -0.0540890240495355, -0.053894210691228975, -0.05369892394007843, -0.05350320818781639, -0.05330710648481406, -0.05311066057416605, -0.05291391092503257, -0.05271689676524791, -0.05251965611320744, -0.05232222580904307, -0.05212464154509777, -0.05192693789571081, -0.05172914834632656, -0.05153130532193364, -0.05133344021485141, -0.051135583411870314, -0.050937764320761504, -0.050740011396162785, -0.050542352164855614, -0.05034481325044496, -0.05014742039744803, -0.0499501984948109, -0.049753171598857754, -0.04955636295568666, -0.0493597950230234, -0.049163489491541125, -0.048967467305662066, -0.04877174868384701, -0.0485763531383843, -0.04838129949469181, -0.048186605910138285, -0.04799228989239657, -0.047798368317338955, -0.047604857446483115, -0.0474117729440004, -0.04721912989329398, -0.04702694281315883, -0.04683522567353107, -0.046643991910836353, -0.046453254442947824, -0.04626302568376037, -0.04607331755739165, -0.04588414151201768, -0.04569550853335175, -0.045507429157776114, -0.04531991348513216, -0.0451329711911794, -0.044946611539730466, -0.044760843394469035, -0.044575675230460236, -0.044391115145358276, -0.04420717087032147, -0.04402384978063871, -0.0438411589060788, -0.043659104940963187, -0.04347769425397602, -0.04329693289771132, -0.0431168266179698, -0.042937380862805935, -0.042758600791336816, -0.042580491282314, -0.042403056942467446, -0.04222630211462641, -0.042050230885623534, -0.04187484709398351, -0.04170015433741034, -0.0415261559800684, -0.04135285515967045, -0.04118025479437208, -0.04100835758948179, -0.040837166043988, -0.040666682456910005, -0.0404969089334761, -0.04032784739113289, -0.040159499565394316, -0.03999186701552715, -0.0398249511300833, -0.0396587531322822, -0.03949327408524293, -0.03932851489707548, -0.039164476325831324, -0.03900115898431837, -0.03883856334478264, -0.038676689743461626, -0.038515538385012565, -0.038355109346817184, -0.03819540258316877, -0.038036417929341995, -0.03787815510555077, -0.037720613720795916, -0.037563793276606, -0.03740769317067345, -0.037252312700390644, -0.037097651066285625, -0.03694370737536308, -0.036790480644351184, -0.03663796980285698, -0.036486173696434844, -0.036335091089566604, -0.03618472066855931, -0.036035061044360606, -0.03588611075529459, -0.0357378682697201, -0.035590331988615304, -0.03544350024808625, -0.035297371321806235, -0.035151943423385686, -0.03500721470867281, -0.03486318327799011, -0.034719847178307346, -0.03457720440534945, -0.03443525290564788, -0.0342939905785299, -0.034153415278051734, -0.03401352481487555, -0.033874316958093134, -0.033735789436995525, -0.03359793994279167, -0.03346076613027819, -0.03332426561945746, -0.033188435997112464, -0.03305327481833224, -0.03291877960799462, -0.03278494786220396, -0.03265177704968722, -0.03251926461314907, -0.03238740797058631, -0.03225620451656286, -0.032125651623448104, -0.03199574664261711, -0.031866486905615894, -0.03173786972529007, -0.031609892396881695, -0.031482552199091386, -0.03135584639510907, -0.031229772233613385, -0.031104326949739747, -0.0309795077660204, -0.03085531189329442, -0.030731736531590267, -0.030608778870980315, -0.030486436092411088, -0.030364705368504066, -0.030243583864335573, -0.03012306873818895, -0.030003157142284922, -0.02988384622348817, -0.029765133123992052, -0.029647014981980493, -0.02952948893227033, -0.029412552106930966, -0.02929620163588635, -0.02918043464749492, -0.029065248269113348, -0.02895063962763947, -0.02883660585003802, -0.028723144063850688, -0.028610251397686456, -0.02849792498169677, -0.028386161948036094, -0.028274959431304023, -0.02816431456897494, -0.02805422450181124, -0.027944686374262916, -0.027835697334853835, -0.027727254536553225, -0.02761935513713526, -0.027511996299524968, -0.0274051751921321, -0.02729888898917376, -0.02719313487098366, -0.027087910024311417, -0.026983211642610277, -0.02687903692631379, -0.02677538308310273, -0.026672247328161616, -0.026569626884424574, -0.026467518982813236, -0.026365920862464025, -0.026264829770946657, -0.026164242964474137, -0.026064157708104427, -0.025964571275932796, -0.025865480951278254, -0.025766884026858998, -0.0256687778049634, -0.025571159597611533, -0.025474026726710793, -0.025377376524203882, -0.02528120633221006, -0.025185513503160104, -0.025090295399925223, -0.02499554939593893, -0.024901272875313688, -0.02480746323295093, -0.024714117874647185, -0.024621234217192103, -0.024528809688463493, -0.024436841727516038, -0.02434532778466479, -0.02425426532156546, -0.024163651811287393, -0.02407348473838461, -0.02398376159896034, -0.02389447990072866, -0.023805637163071425, -0.023717230917090902, -0.02362925870565881, -0.02354171808346179, -0.02345460661704281, -0.023367921884838886, -0.023281661477215942, -0.023195822996499756, -0.023110404057003972, -0.023025402285054942, -0.02294081531901313, -0.02285664080929227, -0.022772876418375144, -0.022689519820826926, -0.02260656870330533, -0.022524020764568782, -0.02244187371548185, -0.022360125279018032, -0.02227877319026086, -0.022197815196401464, -0.022117249056735698, -0.022037072542657583, -0.021957283437651368, -0.0218778795372821, -0.02179885864918308, -0.021720218593042656, -0.021641957200588202, -0.0215640723155697, -0.021486561793740117, -0.021409423502835145, -0.021332655322551426, -0.02125625514452262, -0.02118022087229502, -0.021104550421300418, -0.021029241718829042, -0.02095429270400062, -0.02087970132773368, -0.020805465552714536, -0.02073158335336469, -0.020658052715807076, -0.020584871637831542, -0.020512038128858753, -0.02043955020990385, -0.020367405913538833, -0.020295603283853644, -0.020224140376417274, -0.020153015258236637, -0.020082226007716707, -0.020011770714617943, -0.019941647480013723, -0.019871854416247774, -0.01980238964688929, -0.01973325130668959, -0.01966443754153593, -0.019595946508406514, -0.01952777637532393, -0.019459925321308316, -0.019392391536330357, -0.01932517322126341, -0.019258268587835123, -0.019191675858579373, -0.019125393266786368, -0.019059419056454256, -0.018993751482238766, -0.018928388809403215, -0.01886332931376826, -0.01879857128166123, -0.018734113009864925, -0.0186699528055666, -0.018606088986306668, -0.0185425198799265, -0.018479243824517367, -0.018416259168367498, -0.018353564269910423, -0.01829115749767237]
Plot2.plot_xy(x, y,describe='y-')
Plot2.show() |
998,554 | 7a96c2a1eafcd19a1d5235860a501f0330ec49d5 | from common import *
imgsz=(2048,)*2
r,a=meshgrid_polar(imgsz)
im=np.float32(np.uint8(np.log(1+r)*4%2)^np.uint8(np.sin(a*16)>0))
im2=im
def draw(t=0, **kwargs):
# fast box blur:
for n in (19,):
for axis in range(2):
im2=sum(np.roll(im2,i,axis) for i in range(-n//2,(n+1)//2))
im2/=n*n
im3=imnormalize(im2)>(0.25+r/imgsz[0])*255
return im3
if __name__ == '__main__':
for t in range(1000):
print('rendering frame %08d...'%frame)
im3=draw(t)
imsave(im3,'video8a-%08d.png'%frame)
|
998,555 | afa1bfe136d84fbd9978b35f9e03c246922afff3 | # You are given a 2d matrix and int (maxSum). You need to return the size of maximum square possible in this 2d array whose sum of all elements is less than equal to maxSum.
# Eg:
# input array:
# 1 2 3 4 5
# 8 9 9 9 7
# 6 1 2 1 8
# 1 1 1 1 9
# 9 1 1 1 20
# input maxSum = 10
# output = 3.
# Explaination; following 3*3 suare matrix has sum <= 10
# 1 2 1
# 1 1 1
# 1 1 1
def maxSum(matrix, sums):
maxlength = 1
i = j = 0
while i + maxlength <= len(matrix):
while i + maxlength <= len(matrix) and j + maxlength <= len(matrix[0]):
temp = 0
for t in range(i, i + maxlength):
for p in range(j, j + maxlength):
temp += matrix[t][p]
if temp > sums:
j += 1
else:
maxlength += 1
i += 1
j = 0
return maxlength - 1 |
998,556 | 9b6505178f363060c0beea36dd1aabe7d64c88ce |
#Code to write bash script that generates grids based off of the input files made by grid_in.py
method = 'TICA' #clustering method
dock = 'SP' #docking method
with open('gen_grids.sh','w') as newfile:
for num in range(10):
newfile.write('$SCHRODINGER/glide /scratch/jegan/GLIDE_'+dock+'_core_docking/'+method+'_docking/grids/'+method+'_grid_'+str(num)+'.in\n')
|
998,557 | fd5e0d4939b7e0fe813577190b5861a02b51ec3a | from flask import Flask, render_template, redirect, request, jsonify, url_for, flash
from database import Base, User, Book
from sqlalchemy import create_engine, asc
from sqlalchemy.orm import sessionmaker
#Imports for anti-forgery state token
from flask import session as login_session
import random
import string
from oauth2client.client import flow_from_clientsecrets
from oauth2client.client import FlowExchangeError
import httplib2
import json
from flask import make_response
import requests
app = Flask(__name__)
CLIENT_ID = json.loads(open('client_secrets.json', 'r').read())['web']['client_id']
APPLICATION_NAME = "Library App"
# Connect to Database and create database session
engine = create_engine('sqlite:///libraryapp.db')
Base.metadata.bind = engine
DBSession = sessionmaker(bind=engine)
session = DBSession()
# This is the path to the upload directory
app.config['UPLOAD_FOLDER'] = 'uploads/'
# These are the extension that we are accepting to be uploaded
app.config['ALLOWED_EXTENSIONS'] = set(['txt', 'pdf', 'png', 'jpg', 'jpeg', 'gif'])
# For a given file, return whether it's an allowed type or not
def allowed_file(filename):
return '.' in filename and \
filename.rsplit('.', 1)[1] in app.config['ALLOWED_EXTENSIONS']
# Route that will process the file upload
@app.route('/upload', methods=['POST'])
def upload():
# Get the name of the uploaded file
file = request.files['file']
# Check if the file is one of the allowed types/extensions
if file and allowed_file(file.filename):
# Make the filename safe, remove unsupported chars
filename = secure_filename(file.filename)
# Move the file form the temporal folder to
# the upload folder we setup
file.save(os.path.join(app.config['UPLOAD_FOLDER'], filename))
# Create anti-forgery state token
@app.route('/login')
def showLogin():
state = ''.join(random.choice(string.ascii_uppercase + string.digits) for x in range(32))
login_session['state'] = state
#return "The current session state is %s" % login_session['state']
return render_template('login.html', STATE=state)
@app.route('/gconnect', methods=['POST'])
def gconnect():
# Validate state token
if request.args.get('state') != login_session['state']:
response = make_response(json.dumps('Invalid state parameter.'), 401)
response.headers['Content-Type'] = 'application/json'
return response
# Obtain authorization code, now compatible with Python3
code = request.data
try:
# Upgrade the authorization code into a credentials object
oauth_flow = flow_from_clientsecrets('client_secrets.json', scope='')
oauth_flow.redirect_uri = 'postmessage'
credentials = oauth_flow.step2_exchange(code)
except FlowExchangeError:
response = make_response(
json.dumps('Failed to upgrade the authorization code.'), 401)
response.headers['Content-Type'] = 'application/json'
return response
# Check that the access token is valid.
access_token = credentials.access_token
url = ('https://www.googleapis.com/oauth2/v1/tokeninfo?access_token=%s' % access_token)
# Submit request, parse response - Python3 compatible
h = httplib2.Http()
response = h.request(url, 'GET')[1]
str_response = response.decode('utf-8')
result = json.loads(str_response)
# If there was an error in the access token info, abort.
if result.get('error') is not None:
response = make_response(json.dumps(result.get('error')), 500)
response.headers['Content-Type'] = 'application/json'
# Verify that the access token is used for the intended user.
gplus_id = credentials.id_token['sub']
if result['user_id'] != gplus_id:
response = make_response(json.dumps("Token's user ID doesn't match given user ID."), 401)
response.headers['Content-Type'] = 'application/json'
return response
# Verify that the access token is valid for this app.
if result['issued_to'] != CLIENT_ID:
response = make_response(json.dumps("Token's client ID does not match app's."), 401)
response.headers['Content-Type'] = 'application/json'
return response
#Verify if user is already logged in
stored_access_token = login_session.get('access_token')
stored_gplus_id = login_session.get('gplus_id')
if stored_access_token is not None and gplus_id == stored_gplus_id:
response = make_response(json.dumps('Current user is already connected.'), 200)
response.headers['Content-Type'] = 'application/json'
return response
# Store the access token in the session for later use.
login_session['access_token'] = access_token
login_session['gplus_id'] = gplus_id
# Get user info
userinfo_url = "https://www.googleapis.com/oauth2/v1/userinfo"
params = {'access_token': access_token, 'alt': 'json'}
answer = requests.get(userinfo_url, params=params)
data = answer.json()
login_session['username'] = data['name']
login_session['provider'] = 'google'
login_session['picture'] = data['picture']
login_session['email'] = data['email']
user_id = getUserID(login_session['email'])
if not user_id:
user_id = createUser(login_session)
login_session['user_id'] = user_id
output = ''
output += '<h1>Welcome, '
output += login_session['username']
output += '!</h1>'
output += '<img src="'
output += login_session['picture']
output += ' " style = "width: 300px; height: 300px;border-radius: 50%;-webkit-border-radius: 50%;-moz-border-radius: 50%;"> '
flash("you are now logged in as %s" % login_session['username'])
return output
# DISCONNECT - Revoke a current user's token and reset their login_session
@app.route('/gdisconnect')
def gdisconnect():
# Only disconnect a connected user.
access_token = login_session.get('access_token')
if access_token is None:
response = make_response(json.dumps('Current user not connected.'), 401)
response.headers['Content-Type'] = 'application/json'
return response
url = 'https://accounts.google.com/o/oauth2/revoke?token=%s' % access_token
h = httplib2.Http()
result = h.request(url, 'GET')[0]
if result['status'] == '200':
# Reset the user's sesson.
#del login_session['access_token']
#del login_session['gplus_id']
#del login_session['username']
#del login_session['email']
#del login_session['picture']
response = make_response(json.dumps('Successfully disconnected.'), 200)
response.headers['Content-Type'] = 'application/json'
return response
#flash('You have been Successfully logged out')
#return redirect(url_for('books'))
else:
# For whatever reason, the given token was invalid.
response = make_response(json.dumps('Failed to revoke token for given user.', 400))
response.headers['Content-Type'] = 'application/json'
return response
@app.route('/disconnect')
def disconnect():
if 'provider' in login_session:
if login_session['provider'] == 'google':
print "google"
gdisconnect()
del login_session['gplus_id']
del login_session['access_token']
if login_session['provider'] == 'facebook':
print "facebook"
fbdisconnect()
del login_session['facebook_id']
del login_session['username']
del login_session['email']
del login_session['picture']
del login_session['user_id']
del login_session['provider']
flash("You have successfully been logged out")
return redirect(url_for('books'))
else:
flash("You were not logged in to begin with!")
return redirect(url_for('books'))
@app.route('/fbdisconnect')
def fbdisconnect():
facebook_id = login_session['facebook_id']
# The access token must me included to successfully logout
access_token = login_session['access_token']
url = 'https://graph.facebook.com/%s/permissions?access_token=%s' % (facebook_id,access_token)
h = httplib2.Http()
result = h.request(url, 'DELETE')[1]
@app.route('/fbconnect', methods = ['POST'])
def fbconnect():
if request.args.get('state') != login_session['state']:
response = make_response(json.dumps('Invalid state parameter'), 401)
response.headers['Content-Type'] = 'application/json'
return response
access_token = request.data
print "access token received %s" % access_token
#Exchange client token for long-lived server-side token with GET /oauth/acess_token?grant_type=fb_exchange_token&client_id={app-id}&client_secret={app-secret}
#&fb_exchange_token={short-lived-token}
app_id = json.loads(open('fb_client_secrets.json', 'r').read())['web']['app_id']
app_secret = json.loads(open('fb_client_secrets.json', 'r').read())['web']['app_secret']
url = 'https://graph.facebook.com/oauth/access_token?grant_type=fb_exchange_token&client_id=%s&client_secret=%s&fb_exchange_token=%s' % (app_id, app_secret, access_token)
h = httplib2.Http()
result = h.request(url, 'GET')[1]
#Use token to get user info from API
userinfo_url = 'https://graph.facebook.com/v2.4/me'
#Strip expires tag from access token
token = result.split("&")[0]
url = 'https://graph.facebook.com/v2.4/me?%s&fields=name,id,email' % token
h = httplib2.Http()
result = h.request(url, 'GET')[1]
# print "url sent for API access:%s"% url
# print "API JSON result: %s" % result
data = json.loads(result)
login_session['provider'] = 'facebook'
login_session['username'] = data["name"]
login_session['email'] = data["email"]
login_session['facebook_id'] = data["id"]
# The token must be stored in the login_session in order to properly logout, let's strip out the information before the equals sign in our token
stored_token = token.split("=")[1]
login_session['access_token'] = stored_token
# Get user picture
url = 'https://graph.facebook.com/v2.4/me/picture?%s&redirect=0&height=200&width=200' % token
h = httplib2.Http()
result = h.request(url, 'GET')[1]
data = json.loads(result)
login_session['picture'] = data["data"]["url"]
# see if user exists
user_id = getUserID(login_session['email'])
if not user_id:
user_id = createUser(login_session)
login_session['user_id'] = user_id
output = ''
output += '<h1>Welcome, '
output += login_session['username']
output += '!</h1>'
output += '<img src="'
output += login_session['picture']
output += ' " style = "width: 300px; height: 300px;border-radius: 150px;-webkit-border-radius: 150px;-moz-border-radius: 150px;"> '
flash("Now logged in as %s" % login_session['username'])
return output
@app.route('/books/JSON')
def booksJSON():
books = session.query(Book).all()
return jsonify(books = [b.serialize for b in books])
@app.route('/books/<int:book_id>/JSON')
def bookJSON(book_id):
book = session.query(Book).filter_by(id = book_id).one()
return jsonify(book = book.serialize)
@app.route('/')
@app.route('/books')
def books():
books = session.query(Book).all()
if 'username' not in login_session:
return render_template('publicbooks.html', books = books)
else:
return render_template('books.html', books = books)
@app.route('/<int:book_id>/checkout/', methods = ['GET', 'POST'])
def checkout(book_id):
if 'username' not in login_session:
return redirect('/login')
#return "Checkout a book"
book_to_checkout = session.query(Book).filter_by(id = book_id).one()
#if book_to_checkout.user_id != login_session['user_id']:
# return "<script>function myFunction() {alert('You are not authorized to checkout this book.');}</script><body onload='myFunction()''>"
if request.method == 'POST':
book_to_checkout.user_id = login_session['user_id']
else:
return render_template('checkout.html', book = book_to_checkout)
@app.route('/<int:book_id>/checkin/')
def checkin(book_id):
#return "Checkin a book"
if 'username' not in login_session:
return redirect('/login')
book_to_checkin = session.query(Book).filter_by(id = book_id).one()
#if book_to_checkin.user_id != login_session['user_id']:
# return "<script>function myFunction() {alert('You are not authorized to checkin this book.');}</script><body onload='myFunction()''>"
if request.method == 'POST':
book_to_checkin.user_id = None
else:
return render_template('checkin.html', book = book_to_checkin)
@app.route('/<int:book_id>/edit/', methods = ['GET', 'POST'])
def edit(book_id):
if 'username' not in login_session:
return redirect('/login')
#return "Edit a book"
book_to_edit = session.query(Book).filter_by(id = book_id).one()
if book_to_edit.user_id != login_session['user_id']:
return "<script>function myFunction() {alert('You are not authorized to edit this book.');}</script><body onload='myFunction()''>"
if request.method == 'POST':
book_to_edit.name = request.form['name']
book_to_edit.author = request.form['author']
book_to_edit.picture = request.form['picture']
session.commit()
return redirect('/books')
else:
return render_template('edit.html', book = book_to_edit)
@app.route('/<int:book_id>/delete/', methods = ['GET', 'POST'])
def delete(book_id):
if 'username' not in login_session:
return redirect('/login')
#return "Delete a book"
book_to_delete = session.query(Book).filter_by(id = book_id).one()
if book_to_delete.user_id != login_session['user_id']:
return "<script>function myFunction() {alert('You are not authorized to delete this book.');}</script><body onload='myFunction()''>"
if request.method == 'POST':
session.delete(book_to_delete)
session.commit()
return redirect('/books')
else:
return render_template('delete.html', book = book_to_delete)
# User Helper Functions
def createUser(login_session):
newUser = User(name=login_session['username'], email=login_session['email'])
session.add(newUser)
session.commit()
user = session.query(User).filter_by(email=login_session['email']).one()
return user.id
def getUserInfo(user_id):
user = session.query(User).filter_by(id=user_id).one()
return user
def getUserID(email):
try:
user = session.query(User).filter_by(email=email).one()
return user.id
except:
return None
'''@app.route('/<int:book_id>/view/')
def view(book_id):
#return "View a book"
book_to_view = session.query(Book).filter_by(id = book_id).one()
return render_template('view.html', book = book_to_view)'''
@app.route('/new/', methods = ['GET', 'POST'])
def newbook():
if 'username' not in login_session:
return redirect('/login')
#return "Create new book"
if request.method == 'POST':
upload()
newbook = Book(name = request.form['name'], author = request.form['author'], picture = request.form['picture'], user_id = login_session['user_id'])
session.add(newbook)
session.commit()
return redirect(url_for('books'))
else:
return render_template('newbook.html')
if __name__ == '__main__':
app.secret_key = 'super_secret_key'
app.debug = True
app.run(host='0.0.0.0', port=5000)
|
998,558 | 049b1a371324e15ec5d164188abe5b632f3bfcb6 |
R = Recipe()
R.name("Flapjack")
R.ingredient(200, "g", "margarine")
R.ingredient(200, "g", "brown sugar")
R.ingredient(2, "tablespoons", "golden syrup")
R.ingredient(1.5, "teaspoons", "ginger")
R.ingredient(280, "g", "oats")
R.do("Heat everything apart from the oats in a pan.")
R.do("Add oats to the melted ingredients and mix thoroughly.")
R.do("Cook in oven at 160\degc for 22 minutes or until golden brown but not too solid.")
add(R)
|
998,559 | 4ea4d6043677c3983bcadada11f47d026759ddc5 | # -*- coding: utf-8 -*-
"""
使用者上传的资源
"""
import image
import richtext_image
import document |
998,560 | 8a59ae2a5905386552843adb266be7fb830165fc | import os
import sys
class File_Handler(object):
"""docstring for File_Handler."""
def __init__(self):
super(File_Handler, self).__init__()
def mainloop(self):
print("\nFile Handling application. What do you want to do?")
print("1. Create file\n2. Read file\n3. Write to file\n4. Delete file ")
choice = input("Type in 1, 2, 3 or 4 to pick answer, or [Quit] to quit.")
if choice.lower() == "quit":
sys.exit("Goodbye!")
else:
try:
choice = int(choice)
except ValueError:
print("You didn't input a number!")
if choice == 1:
print("Creating file..")
filename = input("What do you want to call it?\n")
print("Creating file ", filename)
self.file_new(filename)
print("File created!")
elif choice == 2:
filename = input("What file do you want to read?")
print("Reading file..")
self.file_read(filename)
print("END OF FILE")
elif choice == 3:
print("Do you want to [1] write to a new file, or [2] append to an existing file?")
new_or_append = int(input())
if new_or_append == 1:
filename = input("What file do you want to create?")
text_to_add = input("Type in the text you want to add:\n")
print("Creating file and adding text..")
self.file_write(filename, text_to_add)
elif new_or_append == 2:
filename = input("What file do you want to write to?")
print("Writing to file", filename)
text_to_add = input("Type in the text you want to add:\n")
print("Adding text..")
self.file_append(filename, text_to_add)
print("Text added!")
elif choice == 4:
filename = input("What file do you want to delete?")
print("Deleting file ", filename)
self.file_delete(filename)
print(filename, " successfully deleted!")
else:
print("Invalid input!")
#NEW FILE
def file_new(self, filename):
new_file = open(filename, "x")
#READ FILES
def file_read(self, filename):
read_file = open(filename, "r")
print(read_file.read())
def file_read_line(self, filename):
read_file = open(filename, "r")
print(read_file.readline())
def file_read_custom(self, filename, limit):
read_file = open(filename, "r")
print(read_file.read(limit))
def file_read_line_custom(self, filename, limit):
read_file = open(filename, "r")
print(read_file.readline())
#WRITE FILES
def file_write(self, filename, text_to_add):
write_file = open(filename, "w")
write_file.write(text_to_add)
def file_append(self, filename, text_to_add):
append_file = open(filename, "a")
append_file.write(text_to_add)
#DELETE FILES
def file_delete(self, filename):
if os.path.exists(filename):
os.remove(filename)
else:
print(filename, " does not exist!")
p1 = File_Handler()
while True:
p1.mainloop()
|
998,561 | 00cebf8c873c9bfe118d80b5957367e02e7a3aa6 | # -*- coding: utf-8 -*-
import math
#define PI 3.1415
#复数类
class complex:
def __init__(self):
self.real = 0.0
self.image = 0.0
#复数乘法
def mul_ee(complex0, complex1):
complex_ret = complex()
complex_ret.real = complex0.real * complex1.real - complex0.image * complex1.image
complex_ret.image = complex0.real * complex1.image + complex0.image * complex1.real
return complex_ret
#复数加法
def add_ee(complex0, complex1):
complex_ret = complex()
complex_ret.real = complex0.real + complex1.real
complex_ret.image = complex0.image + complex1.image
return complex_ret
#复数减法
def sub_ee(complex0, complex1):
complex_ret = complex()
complex_ret.real = complex0.real - complex1.real
complex_ret.image = complex0.image - complex1.image
return complex_ret
#对输入数据进行倒序排列
def forward_input_data(input_data, num):
j = int(num / 2)
print(j)
for i in range(1, num - 2):
if(i < j):
complex_tmp = input_data[i]
input_data[i] = input_data[j]
input_data[j] = complex_tmp
print("forward x[%d] <==> x[%d]" % (i, j))
k = int(num / 2)
while (j >= k):
j = j - k
k = int(k / 2)
j = j + k
#实现1D FFT
def fft_1d(in_data, num):
PI = 3.1415926
forward_input_data(in_data, num) #倒序输入数据
#计算蝶形级数,也就是迭代次数
M = 1 #num = 2^m
tmp = num / 2;
while (tmp != 1):
M = M + 1
tmp = tmp / 2
print("FFT level:%d" % M)
complex_ret = complex()
for L in range(1, M + 1):
B = int(math.pow(2, L -1)) #B为指数函数返回值,为float,需要转换integer
for J in range(0, B):
P = math.pow(2, M - L) * J
for K in range(J, num, int(math.pow(2, L))):
print("L:%d B:%d, J:%d, K:%d, P:%f" % (L, B, J, K, P))
complex_ret.real = math.cos((2 * PI / num) * P)
complex_ret.image = -math.sin((2 * PI / num) * P)
complex_mul = mul_ee(complex_ret, in_data[K + B])
complex_add = add_ee(in_data[K], complex_mul)
complex_sub = sub_ee(in_data[K], complex_mul)
in_data[K] = complex_add
in_data[K + B] = complex_sub
print("A[%d] real: %f, image: %f" % (K, in_data[K].real, in_data[K].image))
print("A[%d] real: %f, image: %f" % (K + B, in_data[K + B].real, in_data[K + B].image))
def test_fft_1d():
in_data = [2,3,4,5,7,9,10,11] #待测试的8点元素
#变量data为长度为8、元素为complex类实例的list,用于存储输入数据
data = [(complex()) for i in range(len(in_data))]
#将8个测试点转换为complex类的形式,存储在变量data中
for i in range(len(in_data)):
data[i].real = in_data[i]
data[i].image = 0.0
#输出FFT需要处理的数据
print("The input data:")
for i in range(len(in_data)):
print("x[%d] real: %f, image: %f" % (i, data[i].real, data[i].image))
fft_1d(data, 8)
#输出经过FFT处理后的结果
print("The output data:")
for i in range(len(in_data)):
print("X[%d] real: %f, image: %f" % (i, data[i].real, data[i].image))
#test the 1d fft
test_fft_1d() |
998,562 | cfb8c5f1c0778047db026b97702d6b0702875934 | # -*- coding: utf-8 -*-
"""
Created on Wed Jan 15 21:00:08 2020
@author: erfan pakdamanian
"""
# STEP1----------------- # Importing the libraries------------
#-------------------------------------------------------------
import os
import numpy as np
import matplotlib.pyplot as plt
import pandas as pd
import glob
import scipy.signal as ss
import wfdb
import csv
import sklearn
# STEP1------------------# Importing the DATASET ------------
#------------------------------------------------------------
# Loading data from the iMotions the path to csv file directory
# Taking care of dataset
directory = os.chdir("\\ML4TakeOver\\Data\\RawData")
file = "RawData"
dataframe_All = pd.read_csv("RawData.csv")
##
# STEP2 =========================== Removing Unnecessary columns========================
#========================================================================================
# 1) Removing the columns with more than 50% NAs
half_count = len(dataframe) / 2
dataframe = dataframe.dropna(thresh=half_count, axis=1) # Drop any column with more than 50% missing values
ColumnList =list(dataframe.columns)
# These columns are not useful for our purposes
dataframe = dataframe.drop(['StudyName','ExportDate','Age','MediaTime',
'Internal ADC A13 PPG RAW (no units) (GSR)',
'VSenseBatt RAW (no units) (GSR)', #Battery status
'VSenseBatt CAL (mVolts) (GSR)',
'ValidityLeft',
'ValidityRight',
'Wide Range Accelerometer X CAL (m/(sec^2)) (GSR)',
'Wide Range Accelerometer X RAW (no units) (GSR)',
'Wide Range Accelerometer Y CAL (m/(sec^2)) (GSR)',
'Wide Range Accelerometer Y RAW (no units) (GSR)',
'Wide Range Accelerometer Z CAL (m/(sec^2)) (GSR)',
'Wide Range Accelerometer Z RAW (no units) (GSR)',
'Trust (0.0)',
'Gaze3DX', 'GazeX', 'GazeY',
'Gaze3DY',
'Gaze3DZ', 'AccX',
'AccY','AccZ'],axis=1)
# 2) Removing columns with no distinct values
for col in dataframe.columns:
if len(dataframe[col].unique()) == 1:
dataframe.drop(col,inplace=True,axis=1)
# 3) Check which columns have the same values
dataframe['check'] = np.where((dataframe['ManualBrake (0.0)'] == dataframe['CurrentBrake (0.0)']),
'TRUE', 'False')
dataframe['check'].value_counts()
# 4) Change some of the variables name
dataframe = dataframe.rename(columns={'Unnamed: 0': 'ID',
'Mode (0.0)' : 'Mode',
'Alarm (0.0)' : 'Alarm',
'AutoBrake (0.0)':'AutoBrake',
'AutoGear (0.0)':'AutoGear',
'AutoThrottle (0.0)':'AutoThrottle',
'AutoWheel (0.0)': 'AutoWheel',
'CurrentBrake (0.0)':'CurrentBrake',
'CurrentGear (0.0)':'CurrentGear',
'CurrentThrottle (0.0)':'CurrentThrottle',
'CurrentWheel (0.0)':'CurrentWheel',
'EventN (0.0)': 'EventN',
'EventW (0.0)':'EventW',
'MPH (0.0)':'MPH',
'ManualBrake (0.0)': 'ManualBrake',
'ManualGear (0.0)':'ManualGear',
'ManualThrottle (0.0)': 'ManualThrottle',
'ManualWheel (0.0)':'ManualWheel',
'PassBy (0.0)':'PassBy',
'RangeN (0.0)':'RangeN',
'RangeW (0.0)':'RangeW',
'RightLaneDist (0.0)':'RightLaneDist',
'RightLaneType (0.0)':'RightLaneType',
'LeftLaneDist (0.0)': 'LeftLaneDist',
'LeftLaneType (0.0)': 'LeftLaneType',
'Speed (0.0)':'Speed'})
##
# STEP3 ================== Making Data Sets for Each type of Data ==============================
#===============================================================================================
# EyeTracking Data
EyeDataFrame = dataframe[['Timestamp','ID','Name','FixationDuration',
'FixationSeq',
'FixationStart', 'FixationX','FixationY',
'GazeDirectionLeftX','GazeDirectionLeftY', 'GazeDirectionLeftZ',
'GazeDirectionRightX','GazeDirectionRightY', 'GazeDirectionRightZ',
'GazeLeftx', 'GazeLefty','PupilLeft','PupilRight',
'GazeRightx', 'GazeRighty', 'GazeVelocityAngle',
'InterpolatedGazeX','InterpolatedGazeY',
'EventN', 'EventSource', 'EventW',
'Alarm', 'Mode']]
CarDataFrame = dataframe[['AutoBrake', 'AutoGear',
'AutoThrottle', 'AutoWheel', 'CurrentBrake',
'CurrentGear', 'CurrentThrottle', 'CurrentWheel',
'Distance3D',
'EventN', 'EventSource', 'EventW',
'MPH', 'ManualBrake', 'ManualGear',
'ManualThrottle', 'ManualWheel',
'PassBy','RangeN', 'RangeW',
'RightLaneDist', 'RightLaneType', 'LeftLaneDist',
'LeftLaneType',
'Speed']]
GsrDataFrame = dataframe[['GSR CAL (kOhms) (GSR)',
'GSR CAL (µSiemens) (GSR)',
'GSR Quality (GSR)',
#'GSR RAW (no units) (GSR)', Not important
'Heart Rate PPG (Beats/min) (GSR)',
'IBI PPG (mSecs) (GSR)',
'Internal ADC A13 PPG CAL (mVolts) (GSR)',
'Packet reception rate RAW (no units) (GSR)',
'System Timestamp CAL (mSecs) (GSR)']]
##
# STEP4================ Creating Individual dataset for furthur analysis =================================
#=========================================================================================================
# Concatinating the EyeData with CarData
Eye_Car_DataFrame = pd.concat([EyeDataFrame, CarDataFrame], axis=1, sort=False)
Eye_Car_DataFrame.to_csv("Eye_Car_Data" + '.csv')
|
998,563 | 43aba4225cd8441c7c0b609cc956beb1e87a4c80 | from rest_framework import serializers
from battles.models import Battle
class BattleSerializer(serializers.ModelSerializer):
class Meta:
model = Battle
fields = '__all__'
|
998,564 | 69d308b006181ca95f42ce74d1037fc5517715e4 | from django.shortcuts import render
from .models import Employee
from .forms import EmployeeForm
def form_view(request):
form = EmployeeForm()
if request.method == "POST":
form = EmployeeForm(request.POST)
if form.is_valid():
form.save()
return render(request,'GitApp/git.html',{'form':form})
|
998,565 | c4a994bdc335290ca85cee0798799069588e257f | import cv2
from LBP import LocalBinaryPatterns
import os
import pickle
from sklearn.model_selection import train_test_split
import numpy as np
from matplotlib import pyplot as plt
from tqdm import tqdm
from matplotlib.patches import Rectangle
def get_features(paths, dataset_faces, desc):
data = []
num_paths = len(paths)
for idx_path in tqdm(range(num_paths)):
path_image = paths[idx_path]
img = cv2.imread(path_image, 0)
if img is not None:
faces_det = dataset_faces.detectMultiScale(img.astype('uint8'))
if len(faces_det) > 0:
img_face = img
valid = True
for (x, y, w, h) in faces_det:
if w >= 120 and h >= 120:
img_face = img[y:y + h, x:x + w]
# dsize
dsize = (120, 120)
# resize image
img_face = cv2.resize(img_face, dsize)
else:
valid = False
if valid:
hist = desc.describe_regions(img_face, window_size=[10, 10])
#hist, lbp_img = desc.describe(img_face)
#plt.imshow(lbp_img, cmap='gray')
#plt.show()
data.append(hist)
return data
path = 'terravic_facial_infrared_dataset/'
dataset_faces = cv2.CascadeClassifier('haar_cascade/haarcascade_frontalface_default.xml') # Face dataset
classes = {}
faces_paths = [x[0] for x in os.walk(path)]
faces_paths = faces_paths[1:]
for i, face_path in enumerate(faces_paths):
for fn in next(os.walk(face_path))[2]:
class_ = str(i+1)
if class_ not in classes:
classes[class_] = {
"paths": [f'{face_path}/{fn}']
}
else:
classes[class_]['paths'].append(f'{face_path}/{fn}')
train, test = train_test_split(np.array(classes[class_]['paths']), test_size=0.5)
validation, test = train_test_split(test, test_size=0.2)
classes[class_]['train'] = list(train)
classes[class_]['validation'] = list(validation)
classes[class_]['test'] = list(test)
if len(classes[class_]['train']) < 500:
del classes[class_]
desc = LocalBinaryPatterns(16, 2)
labels_train = []
data_train = []
labels_test = []
data_test = []
labels_val = []
data_val = []
count = 0
for label, item in classes.items():
if count < 10:
print(f'Class: {count+1}')
train_class = get_features(item['train'], dataset_faces, desc)
labels_train_class = [int(count+1) for i in range(len(train_class))]
test_class = get_features(item['test'], dataset_faces, desc)
labels_test_class = [int(count+1) for i in range(len(test_class))]
val_class = get_features(item['validation'], dataset_faces, desc)
labels_val_class = [int(count + 1) for i in range(len(val_class))]
data_train += train_class
labels_train += labels_train_class
data_test += test_class
labels_test += labels_test_class
data_val += val_class
labels_val += labels_val_class
count += 1
with open('terravic_lbph_features/train.data', 'wb') as filehandle:
# store the data as binary data stream
pickle.dump(data_train, filehandle)
with open('terravic_lbph_features/labels_train.data', 'wb') as filehandle:
# store the data as binary data stream
pickle.dump(labels_train, filehandle)
with open('terravic_lbph_features/test.data', 'wb') as filehandle:
# store the data as binary data stream
pickle.dump(data_test, filehandle)
with open('terravic_lbph_features/labels_test.data', 'wb') as filehandle:
# store the data as binary data stream
pickle.dump(labels_test, filehandle)
with open('terravic_lbph_features/validation.data', 'wb') as filehandle:
# store the data as binary data stream
pickle.dump(data_val, filehandle)
with open('terravic_lbph_features/labels_validation.data', 'wb') as filehandle:
# store the data as binary data stream
pickle.dump(labels_val, filehandle) |
998,566 | 03c2435746b4f16a87a81fda575cbdfab95b072a | from keras.applications import ResNet50
from keras.preprocessing.image import img_to_array
from keras.applications import imagenet_utils
from PIL import Image
import numpy as np
from io import BytesIO
import os
import requests
model = ResNet50(weights="imagenet")
layers = dict([(layer.name, layer.output) for layer in model.layers])
model.summary()
# MODELDEKİ TOPLAM PARAMETRE SAYISINI EKRANA YAZDIR
model.count_params()
def prepare_image(image, target):
# giriş görüntüsünü yeniden boyutlandırma ve ön işlemerin yapılması
image = image.resize(target)
image = img_to_array(image)
image = np.expand_dims(image, axis=0)
image = imagenet_utils.preprocess_input(image)
# işlenmiş görüntüyü alma
return image
"""### İnternet Kaynaklı bir görüntünün URL'sini kopyalayarak test işlemini yapabilirsiniz. Binlerce kategorinin olduğu ImageNet veri seti sayesinde bir çok sınıfı rahatlıkla kategorize edilebildiğini göreceksiniz."""
#@title Görüntünün URL'sini Yapıştırın { vertical-output: true }
ImageURL = "https://3.bp.blogspot.com/-u2EcSH2R3aM/VM69jPZvvOI/AAAAAAAAYzk/xmjSdaDD06o/s1600/mercan_resif.jpg" #@param {type:"string"}
#ImageURL = "https://i.cnnturk.com/ps/cnnturk/75/650x0/57ad7dd9a781b6264026292d.jpg"
response = requests.get(ImageURL)
image = Image.open(BytesIO(response.content))
image
"""**Eğer Dosyadan Resim Okumak isterseniz**"""
# root = 'drive/My Drive/'
# image_path = root+ 'Olips.png'
# image = Image.open(image_path)
# image = image.resize((224, 224))
# image
# Görüntüyü diziye çevir
# x = np.asarray(image, dtype='float32')
# Dizi listesine çevir
# x = np.expand_dims(x, axis=0)
# Giriş görüntüsünü eğitim setine uygun şekilde ön işlemleri yap
# x = preprocess_input(x)
#preds = model.predict(x)
#print('Predicted:', decode_predictions(preds, top=3)[0])
#print(decode_predictions(preds, top=1)[0][0][1])
"""### İnternetten Aldığınız Verinin Ön İşlemlerinin Yapılması Yeniden Boyutlandırılması ve Olası ilk 5 Tahmin ve Tahmin Oranlarının Ekrana Yazdırılması
Örnekteki görsel için %91.9 olasılıkla **mercan** olduğunu %0.17 olasılıkla **denizşakayığı** ve diğer olasılıkları hücrenin çıktısından takip edebilirsiniz.
"""
data = {"success": False}
pre_image = prepare_image(image, target=(224, 224)) # 224 x 224 boyutlu hale getir
preds = model.predict(pre_image) # Kesirim modeline ön işlemden geçmiş görüntüyü uygula
results = imagenet_utils.decode_predictions(preds) #kestirim
data["predictions"] = []
for (imagenetID, label, prob) in results[0]: # ImageNet veri kümseinden etiket, olasılık ve kestrim sonucunu al
r = {"label": label, "probability": float(prob)}
data["predictions"].append(r)
data["success"] = True
print(data)
"""### En yüksek olasılıklı sonucun ekrana yazdırılması"""
print("Sınıflandırma tahmini en yüksek olan {0} oranıyla {1}'dır.".format(data["predictions"][0]["probability"],data["predictions"][0]["label"]))
# En yüksek olasılıklı sonucu ekrana yazdır
"""## ⭐️[TensorFlow Hub Örneğini incelemeniz de çok faydalı olacaktır](https://www.tensorflow.org/tutorials/images/hub_with_keras)⭐️
### ⭐️ [Transfer learning from pre-trained models](https://towardsdatascience.com/transfer-learning-from-pre-trained-models-f2393f124751)⭐️
# 2. VERSİYON İÇİN ÖRNEK
### Kütüphanelerin kurulması ve gerekli importların yapılması adımı
⏬⏬⏬
"""
from keras.applications.vgg16 import VGG16
from keras.applications.vgg16 import preprocess_input
from keras.preprocessing.image import ImageDataGenerator, array_to_img
from keras.models import Model, Sequential
from keras.layers import Conv2D, MaxPooling2D
from keras.layers import Activation, Dropout, Flatten, Dense
from keras.callbacks import ModelCheckpoint, EarlyStopping
from keras.optimizers import SGD
from keras import backend as K
import pandas as pd
import numpy as np
"""## Görüntülerimizin Boyutlarının Ayarlanması
Ön işlemler
"""
img_width, img_height = 224, 224
train_data_dir = 'data/train'
validation_data_dir = 'data/validation'
nb_train_samples = 2000 #eğitim örnek sayısı
nb_validation_samples = 800 #geçerleme örnek
epochs = 50
batch_size = 16
if K.image_data_format() == 'channels_first':
input_shape = (3, img_width, img_height)
else:
input_shape = (img_width, img_height, 3)-
def preprocess_input_vgg(x):
"""
Paremetreler
----------
x : numpy 3d dizi (bir tek görüntü ön işlemlendi)
"""
X = np.expand_dims(x, axis=0)
X = preprocess_input(X)
return X[0]
"""### VGG 16 Derin Öğrenme Modelinin IMAGENET Veri Kümesinde Eğitilmiş Ağırlıklarının Alınması
Keras kütüphanesinden faydalanıyoruz bu aşamada
[VGGNet](https://medium.com/deep-learning-turkiye/deri%CC%87ne-daha-deri%CC%87ne-evri%C5%9Fimli-sinir-a%C4%9Flar%C4%B1-2813a2c8b2a9) Derin Öğrenme Modeli ve [ImageNet](https://medium.com/deep-learning-turkiye/motivasyon-yapay-zeka-ve-derin-%C3%B6%C4%9Frenme-48d09355388d) Veri Kümesi hakkında bilgi için tıklayınız!
"""
vgg16 = VGG16(weights='imagenet')
x = vgg16.get_layer('fc2').output
prediction = Dense(2, activation='softmax', name='predictions')(x)
model = Model(inputs=vgg16.input, outputs=prediction)
# base_model = VGG16(weights='imagenet',include_top= False, input_shape=input_shape)
# x = base_model.output
# x = Flatten(name='flatten')(x)
# x = Dense(4096, activation='relu', name='fc1')(x)
# x = Dense(4096, activation='relu', name='fc2')(x)
# prediction = Dense(2, activation='linear', name='predictions')(x)
# # prediction = Dense(output_dim=1, activation='sigmoid', name='logit')(x)
# top_model = Sequential()
# top_model.add(Flatten(input_shape=base_model.output_shape[1:]))
# top_model.add(Dense(256, activation='relu'))
# top_model.add(Dropout(0.5))
# predictions = Dense(2, activation='linear', name='predictions')(top_model)
# top_model.load_weights('bootlneck_fc_model.h5')
# model = Model(input= base_model, output=prediction)
# fc2 = vgg16.get_layer('fc2').output
# prediction = Dense(units=2, activation='relu', name='logit')(fc2)
# model = Model(inputs=vgg16.input, outputs=top_model)
"""### Çıkıştaki tam bağlantı katmanına kadar tamamını Fine-Tuning işlemi için dondur"""
for layer in model.layers:
if layer.name in ['predictions']:
continue
layer.trainable = False
df = pd.DataFrame(([layer.name, layer.trainable] for layer in model.layers), columns=['layer', 'trainable'])
train_datagen = ImageDataGenerator(preprocessing_function=preprocess_input_vgg,
rotation_range=40,
width_shift_range=0.2,
height_shift_range=0.2,
shear_range=0.2,
zoom_range=0.2,
horizontal_flip=True,
fill_mode='nearest')
train_generator = train_datagen.flow_from_directory(directory='data/train',
target_size=[img_width, img_height],
batch_size=batch_size,
class_mode='categorical')
validation_datagen = ImageDataGenerator(preprocessing_function=preprocess_input_vgg)
validation_generator = validation_datagen.flow_from_directory(directory='data/validation',
target_size=[img_width, img_height],
batch_size=batch_size,
class_mode='categorical')
"""### Optimizasyon yöntemini Stokastik Gradyan/Bayır İniş ve Küçük bir Öğrenme Oranı ile Çalıştırma"""
sgd = SGD(lr=1e-4, momentum=0.9)
model.compile(optimizer=sgd, loss='categorical_crossentropy', metrics=['accuracy'])
# model.compile(optimizer='rmsprop', loss='binary_crossentropy', metrics=['accuracy'])
# Optimizasyon Yöntemini ADAM ile de değiştirebilirsiniz
# model.compile(optimizer='nadam',
# loss='categorical_crossentropy', # categorical_crossentropy if multi-class classifier
# metrics=['accuracy'])
# ERKEN DURDURMA DA EKLEYEBİLİRSİNİZ
# top_weights_path = 'top_model_weights_fine_tune.h5'
# callbacks_list = [
# ModelCheckpoint(top_weights_path, monitor='val_acc', verbose=1, save_best_only=True),
# EarlyStopping(monitor='val_acc', patience=5, verbose=0)]
# FINE-TUNING YAPMAK İÇİN
# model.fit_generator(train_generator,
# samples_per_epoch=16,
# nb_epoch=10,
# validation_data=validation_generator,
# nb_val_samples=32);
model.fit_generator(
train_generator,
# steps_per_epoch=16,
steps_per_epoch=2000 // batch_size,
epochs=4,
validation_data=validation_generator,
validation_steps=800 // batch_size)
# validation_steps=32) #,
# callbacks=callbacks_list)
"""### EĞİTİLMİŞ AĞIRLIKLARIN KAYDEDİLMESİ"""
model.save_weights('vgg16_tf_cat_dog_final_dense2.h5')
model_json_final = model.to_json()
with open("vgg16_tf_cat_dog_final_dense2.json", "w") as json_file:
json_file.write(model_json_final)
"""### Kestirim Sonucunun Ekrana Gösterilmesi Adımları"""
from IPython.display import display
import matplotlib.pyplot as plt
X_val_sample, _ = next(validation_generator)
y_pred = model.predict(X_val_sample)
nb_sample = 4
for x, y in zip(X_val_sample[:nb_sample], y_pred[:nb_sample]):
s = pd.Series({'Cat': 1-np.max(y), 'Dog': np.max(y)})
axes = s.plot(kind='bar')
axes.set_xlabel('Class')
axes.set_ylabel('Probability')
axes.set_ylim([0, 1])
plt.show()
img = array_to_img(x)
display(img) |
998,567 | 50fe996ffcfbfc924e0bf6d0e0ed082cc2dc4015 | import datetime as dt
from airflow import DAG
from airflow.operators.bash_operator import BashOperator
from airflow.operators.python_operator import PythonOperator
from airflow.utils.dates import days_ago
def start():
import time
def initializing():
time.sleep(5)
print("Done")
initializing()
def getData():
import boto3
from configparser import ConfigParser
# config = ConfigParser()
# config.read('config.ini')
# access_key = 'ASIA6FLRIBYZ6W254X4N'
# secret_access_key = 'btjelUWEGgJNe9aSwvFVu3ryXI1vYBsJEvpgPMJX'
# session_token = 'FwoGZXIvYXdzELD//////////wEaDASt6s7PS2tyec2vAyLHAaMtBIIQkTI8fOjNK/5rxKwTvXGY4ZNohhcxiRICpUNuXQCRSbqJUjL3MGkEDkk7vpaX1yM2xlmeZNyrz6yi342darSs066Bn2TXkyCQpbnBpflRizqqygGEqg3P8uE7hPgCJgnTD3G4X1PdIbFt7jFQPrPCApfoz7qVxA1sOgHmgKVp5vcSKCxJrQzWFT7dlp4MC7AW4P3pKCiKV3NnBOPxkou23UcO2/yG6UALb9+Dn9MeFEs7I3FBGsKbw332mfaSdkqWCqkoi9T08wUyLSWEMSD4s5f34cuD2pPe6PbACkLQ5iVfZTQfn6cGiByqbgvOMf0aWLuFTLObSA=='
# access_key = config.get('aws', 'access_key')
# secret_access_key = config.get('aws', 'secret_access_key')
# session_token = config.get('aws', 'session_token')
s3 = boto3.resource('s3',
aws_access_key_id='',
aws_secret_access_key='',
)
s3.Bucket('csye7245-1').download_file(
'data/data.csv', 'down_data.csv')
print('Input Data Downloaded')
def generate_metadata():
import csv
import numpy as np
import pandas as pd
from numpy import genfromtxt
import json
import boto3
df = pd.read_csv('down_data.csv')
jsonParse = '{ "columns": ['
for (columnName, columnData) in df.iteritems():
print('Colunm Name : ', columnName)
# print('Column Contents : ', columnData.values)
if pd.to_numeric(df[columnName], errors='coerce').notnull().all():
print('Numeric')
jsonParse = jsonParse + '{"max": ' + str(columnData.max()) + ',' + '"min": ' + str(
columnData.min()) + ',' + '"name": "' + columnName + '" , "type": "continuous"},'
print(columnData.max())
print(columnData.min())
else:
print('NonNumeric')
jsonParse = jsonParse + ' {"i2s": ['
for i in columnData.unique():
print(i)
jsonParse = jsonParse + '"' + i + '",'
jsonParse = jsonParse[:-1] + '],' + '"name": "' + columnName + '", "size": ' + str(
len(columnData.unique())) + ' , "type": "categorical"},'
jsonParse = jsonParse[:-1] + '], "problem_type": "binary_classification"}'
final_dictionary = eval(jsonParse)
with open('generated_metadata.json', 'w') as outfile:
json.dump(final_dictionary, outfile)
def uploadData():
# from configparser import ConfigParser
import boto3
# config = ConfigParser()
# config.read('config.ini')
# access_key = config.get('aws', 'access_key')
# secret_access_key = config.get('aws', 'secret_access_key')
# session_token = config.get('aws', 'session_token')
s3 = boto3.resource('s3',
aws_access_key_id='',
aws_secret_access_key='',
)
s3.Bucket('csye7245-1').upload_file(
'generated_metadata.json', 'meta/generated_metadata.json')
print('Metadata Uploaded to S3')
def uniformSynth():
import numpy as np
from sdgym.constants import CATEGORICAL, ORDINAL
import json
from sdgym.synthesizers import UniformSynthesizer
# from configparser import ConfigParser
import boto3
# config = ConfigParser()
# config.read('config.ini')
# access_key = config.get('aws', 'access_key')
# secret_access_key = config.get('aws', 'secret_access_key')
# session_token = config.get('aws', 'session_token')
with open('generated_metadata.json') as data_file:
data = json.load(data_file)
categorical_columns = list()
ordinal_columns = list()
for column_idx, column in enumerate(data['columns']):
if column['type'] == CATEGORICAL:
print(column)
print('Classified as Categorical')
categorical_columns.append(column_idx)
elif column['type'] == ORDINAL:
ordinal_columns.append(column_idx)
print(column)
print('Classified as Ordinal')
data = np.loadtxt('down_data.csv', delimiter=',', skiprows=1)
synthesizer = UniformSynthesizer()
synthesizer.fit(data, categorical_columns, ordinal_columns)
sampled = synthesizer.sample(4000)
np.savetxt("41_uniform.csv", sampled, delimiter=",")
print(sampled)
print('Data Synthesized using Uniform Synthesizer')
s3 = boto3.resource('s3',
aws_access_key_id='',
aws_secret_access_key='',
)
s3.Bucket('csye7245-1').upload_file(
'41_uniform.csv', 'synth/41_uniform.csv')
print('Synthesized(Uniform) Data Uploaded to S3')
def independentSynth():
import numpy as np
from sdgym.constants import CATEGORICAL, ORDINAL
import json
from sdgym.synthesizers import IndependentSynthesizer
from configparser import ConfigParser
import boto3
with open('generated_metadata.json') as data_file:
data = json.load(data_file)
categorical_columns = list()
ordinal_columns = list()
for column_idx, column in enumerate(data['columns']):
if column['type'] == CATEGORICAL:
print(column)
print('Classified as Categorical')
categorical_columns.append(column_idx)
elif column['type'] == ORDINAL:
ordinal_columns.append(column_idx)
print(column)
print('Classified as Ordinal')
data = np.loadtxt('down_data.csv', delimiter=',', skiprows=1)
synthesizer = IndependentSynthesizer()
synthesizer.fit(data, categorical_columns, ordinal_columns)
sampled = synthesizer.sample(4000)
np.savetxt("42_independent.csv", sampled, delimiter=",")
print(sampled)
print('Data Synthesized using Independent Synthesizer')
s3 = boto3.resource('s3',
aws_access_key_id='',
aws_secret_access_key='',
)
s3.Bucket('csye7245-1').upload_file(
'42_independent.csv', 'synth/42_independent.csv')
print('Synthesized(Independent) Data Uploaded to S3')
def identitySynth():
import numpy as np
from sdgym.constants import CATEGORICAL, ORDINAL
import json
from sdgym.synthesizers import IdentitySynthesizer
from configparser import ConfigParser
import boto3
with open('generated_metadata.json') as data_file:
data = json.load(data_file)
categorical_columns = list()
ordinal_columns = list()
for column_idx, column in enumerate(data['columns']):
if column['type'] == CATEGORICAL:
print(column)
print('Classified as Categorical')
categorical_columns.append(column_idx)
elif column['type'] == ORDINAL:
ordinal_columns.append(column_idx)
print(column)
print('Classified as Ordinal')
# return categorical_columns, ordinal_columns
data = np.loadtxt('down_data.csv', delimiter=',', skiprows=1)
synthesizer = IdentitySynthesizer()
synthesizer.fit(data, categorical_columns, ordinal_columns)
sampled = synthesizer.sample(4000)
np.savetxt("43_identity.csv", sampled, delimiter=",")
print(sampled)
print('Data Synthesized using Identity synthesizer')
s3 = boto3.resource('s3',
aws_access_key_id='',
aws_secret_access_key='',
)
s3.Bucket('csye7245-1').upload_file(
'43_identity.csv', 'synth/43_identity.csv')
print('Synthesized(Identity) Data Uploaded to S3')
def CLBNSynth():
import numpy as np
from sdgym.constants import CATEGORICAL, ORDINAL
import json
from sdgym.synthesizers import CLBNSynthesizer
from configparser import ConfigParser
import boto3
with open('generated_metadata.json') as data_file:
data = json.load(data_file)
categorical_columns = list()
ordinal_columns = list()
for column_idx, column in enumerate(data['columns']):
if column['type'] == CATEGORICAL:
print(column)
print('Classified as Categorical')
categorical_columns.append(column_idx)
elif column['type'] == ORDINAL:
ordinal_columns.append(column_idx)
print(column)
print('Classified as Ordinal')
# return categorical_columns, ordinal_columns
data = np.loadtxt('down_data.csv', delimiter=',', skiprows=1)
synthesizer = CLBNSynthesizer()
synthesizer.fit(data, categorical_columns, ordinal_columns)
sampled = synthesizer.sample(4000)
np.savetxt("44_CLBN.csv", sampled, delimiter=",")
print(sampled)
print('Data Synthesized using CLBN Synthesizer')
s3 = boto3.resource('s3',
aws_access_key_id='',
aws_secret_access_key='',
)
s3.Bucket('csye7245-1').upload_file(
'44_CLBN.csv', 'synth/44_CLBN.csv')
print('Synthesized(CLBN) Data Uploaded to S3')
def benchUnifrom():
from sdgym.synthesizers import IndependentSynthesizer, UniformSynthesizer
from sdgym.evaluate import evaluate
from sdgym.data import load_dataset
train, test, meta, categoricals, ordinals = load_dataset('adult', benchmark=True)
synthesizer = UniformSynthesizer()
synthesizer.fit(train, categoricals, ordinals)
sampled = synthesizer.sample(300)
scores = evaluate(train, test, sampled, meta)
#scores = scores.append(evaluate(train, test, sampled, meta))
#scores = scores.append(evaluate(train, test, sampled, meta))
print('\nEvaluation Scores from evaluate function:\n')
print (scores)
scores['Synth'] = 'Uniform'
scores.to_csv('UniformBench.csv')
def benchIndependentSynthesizer():
from sdgym.synthesizers import IndependentSynthesizer, UniformSynthesizer
from sdgym.evaluate import evaluate
from sdgym.data import load_dataset
train, test, meta, categoricals, ordinals = load_dataset('adult', benchmark=True)
synthesizer = IndependentSynthesizer()
synthesizer.fit(train, categoricals, ordinals)
sampled = synthesizer.sample(300)
scores = evaluate(train, test, sampled, meta)
#scores = scores.append(evaluate(train, test, sampled, meta))
#scores = scores.append(evaluate(train, test, sampled, meta))
print('\nEvaluation Scores from evaluate function:\n')
print (scores)
scores['Synth'] = 'IndependentSynthesizer'
scores.to_csv('IndependentBench.csv')
def benchIdentitySynthesizer():
from sdgym.synthesizers import IdentitySynthesizer
from sdgym.evaluate import evaluate
from sdgym.data import load_dataset
train, test, meta, categoricals, ordinals = load_dataset('adult', benchmark=True)
synthesizer = IdentitySynthesizer()
synthesizer.fit(train, categoricals, ordinals)
sampled = synthesizer.sample(300)
scores = evaluate(train, test, sampled, meta)
#scores = scores.append(evaluate(train, test, sampled, meta))
#scores = scores.append(evaluate(train, test, sampled, meta))
print('\nEvaluation Scores from evaluate function:\n')
print (scores)
scores['Synth'] = 'IdentitySynthesizer'
scores.to_csv('IdentityBench.csv')
def benchCLBNSynthesizer():
from sdgym.synthesizers import CLBNSynthesizer
from sdgym.evaluate import evaluate
from sdgym.data import load_dataset
train, test, meta, categoricals, ordinals = load_dataset('adult', benchmark=True)
synthesizer = CLBNSynthesizer()
synthesizer.fit(train, categoricals, ordinals)
sampled = synthesizer.sample(300)
scores = evaluate(train, test, sampled, meta)
#scores = scores.append(evaluate(train, test, sampled, meta))
#scores = scores.append(evaluate(train, test, sampled, meta))
print('\nEvaluation Scores from evaluate function:\n')
print (scores)
scores['Synth'] = 'CLBNSynthesizer'
scores.to_csv('CLBNBench.csv')
def compareBenchmarks():
import pandas as pd
import glob
import boto3
s3 = boto3.resource('s3',
aws_access_key_id='',
aws_secret_access_key='',
)
s3.Bucket('csye7245-1').download_file(
'data/data.csv', 'down_data.csv')
all_files = glob.glob("*Bench*.csv")
print(all_files)
li = []
for filename in all_files:
df = pd.read_csv(filename, index_col=None, header=0)
li.append(df)
frame = pd.concat(li, axis=0, ignore_index=True)
print(frame)
a = (df[df['accuracy'] == df['accuracy'].max()])
bestSynth = a['Synth'].item()
print(bestSynth)
if bestSynth == 'Uniform':
s3.Bucket('csye7245-1').upload_file(
'41_uniform.csv', 'output/41_uniform.csv')
elif bestSynth == 'IndependentSynthesizer':
s3.Bucket('csye7245-1').upload_file(
'42_independent.csv', 'output/42_independent.csv')
elif bestSynth == 'IdentitySynthesizer':
s3.Bucket('csye7245-1').upload_file(
'43_identity.csv', 'output/43_identity.csv')
elif bestSynth == 'CLBNSynthesizer':
s3.Bucket('csye7245-1').upload_file(
'44_CLBN.csv', 'output/44_CLBN.csv')
frame.to_csv('output_benchmarkedResults.csv')
s3.Bucket('csye7245-1').upload_file(
'output_benchmarkedResults.csv', 'output/output_benchmarkedResults.csv')
default_args = {
'owner': 'airflow',
'start_date': days_ago(0),
'concurrency': 1,
'retries': 0,
'depends_on_past': False,
# 'email': ['ananthaswamy.s@northeastern.edu'],
# 'email_on_failure': False,
# 'email_on_retry': False,
# 'retry_delay': timedelta(minutes=5),
}
with DAG('Assignment3-Final',
catchup=False,
default_args=default_args,
schedule_interval='@once',
) as dag:
t0_start=PythonOperator(task_id='start',
python_callable=start)
t1_getData = PythonOperator(task_id='getData',
python_callable=getData)
t2_generatemetadata = PythonOperator(task_id='generateMetadata',
python_callable=generate_metadata)
t3_uploadmetadata = PythonOperator(task_id='uploadMetadata',
python_callable=uploadData)
t4_1_uniformSynth = PythonOperator(task_id='uniformSynth',
python_callable=uniformSynth)
t4_2_independentSynth = PythonOperator(task_id='independentSynth',
python_callable=independentSynth)
t4_3_identitySynth = PythonOperator(task_id='identitySynth',
python_callable=identitySynth)
t4_4_CLBNSynth = PythonOperator(task_id='CLBNSynth',
python_callable=CLBNSynth)
t5_1_benchUnifrom = PythonOperator(task_id='benchUnifrom',
python_callable=benchUnifrom)
t5_2_benchIndependent = PythonOperator(task_id='benchIndependent',
python_callable=benchIndependentSynthesizer)
t5_3_benchIdentity = PythonOperator(task_id='benchIdentity',
python_callable=benchIdentitySynthesizer
)
t5_4_benchCLBN = PythonOperator(task_id='benchCLBN',
python_callable=benchCLBNSynthesizer)
t6_compareBenchmarks=PythonOperator(task_id='compareBenchmarks',
python_callable=compareBenchmarks)
t0_start >> t1_getData >> t2_generatemetadata >> t3_uploadmetadata >> [t4_1_uniformSynth , t4_2_independentSynth , t4_3_identitySynth ,t4_4_CLBNSynth]
t4_1_uniformSynth >> t5_1_benchUnifrom
t4_2_independentSynth >> t5_2_benchIndependent
t4_3_identitySynth >> t5_3_benchIdentity
t4_4_CLBNSynth >> t5_4_benchCLBN
[t5_1_benchUnifrom,t5_2_benchIndependent,t5_3_benchIdentity,t5_4_benchCLBN] >> t6_compareBenchmarks
|
998,568 | fc73c2a25b76950c4a32cc0929ed654bffd9b391 | import argparse
import os
import decryption_impl
class Decrypter:
def __init__(self, logs, key_file, code_book):
self.key_file = key_file
self.code_book = code_book
self.logs = logs
@staticmethod
def read_file(logs) -> str:
with open(logs) as file_contents:
__contents = file_contents.read()
return __contents
def decrypt(self):
logs = self.read_file(self.logs)
keys = self.read_file(self.key_file)
key = keys.splitlines()[0]
initialization_vector = keys.splitlines()[1]
# Static Decryption
key = self.read_file(self.key_file)
code_book = self.read_file(self.code_book)
static_text = decryption_impl.parse_logs_for_static(logs)
decryption_impl.decrypt_static_logs(static_text, key, code_book, initialization_vector)
# Dynamic Decryption
dynamic_text = decryption_impl.parse_logs_for_dynamic(logs)
decryption_impl.decrypt_dynamic_logs(dynamic_text, key, initialization_vector)
def parse_args():
"""Parse the argument files passed in. Expects fileA, and fileB.
Returns:
list: The 2 argument files in args.fileA and args.fileB
"""
parser = argparse.ArgumentParser(description='Decrypt Log File')
parser.add_argument('--logs', default='encrypted_logs.txt',
help="Pass key file name within directory path")
parser.add_argument('--key', default='key.txt',
help="Pass key file name within directory path")
parser.add_argument('--codebook', default='codebook.txt',
help="Pass codebook file name within directory path")
args = parser.parse_args()
return args
def remove_unencrypted_log_file():
try:
os.remove('unecrypted_logs.txt')
with open('unecrypted_logs.txt'):
pass
except OSError:
pass
def main():
args = parse_args()
remove_unencrypted_log_file()
decrypter = Decrypter(args.logs, args.key, args.codebook)
decrypter.decrypt()
if __name__ == "__main__":
main()
|
998,569 | 76cbdac6016df5b005bbe86a1134bae8cfe6fe65 | """ Auto Encoder. """
import torch.nn as nn
class Model(nn.Module):
""" Classify genres. """
def __init__(self, config):
super().__init__()
(genre_size,
hid_dim,
drop_rate) = (config['args']['genre_size'],
config['args']['hid_dim'],
config['args']['drop_rate'])
# hidden
self.hid = nn.Linear(genre_size, hid_dim)
# classifier
self.out = nn.Linear(hid_dim, genre_size)
# relu
self.acti = nn.ReLU()
self.obj = nn.Sigmoid()
# batch_norm
self.batch_norm = nn.BatchNorm1d(hid_dim)
# dropouts
self.dropout = nn.Dropout(drop_rate)
def forward(self, genres): # pylint: disable=arguments-differ
""" Forward.
Args:
genres: (batch_size, genres_ize)
Returns:
genres: (batch_size, genre_size)
"""
genres = self.hid(genres)
# genres = self.batch_norm(genres)
# genres = self.acti(genres)
genres = self.dropout(genres)
# (batch_size, hid_dim)
genres = self.out(genres)
genres = self.acti(genres)
# genres = self.obj(genres)
# (batch_size, genre_size)
return genres
|
998,570 | 9dd31c4eb1992ec0d92bf42c206d09f76c223faa | from subtitle.generic import Style
class WebVTTWriter(object):
def write(self, captions, f):
f.write('WEBVTT\n')
for c in captions:
f.write('\n{} --> {}\n'.format(c.start, c.end))
f.writelines(['{}\n'.format(l) for l in c.lines])
class SRTWriter(object):
def write(self, captions, f):
for line_number, caption in enumerate(captions, start=1):
f.write('{}\n'.format(line_number))
f.write('{} --> {}\n'.format(self._to_srt_timestamp(caption.start_in_seconds),
self._to_srt_timestamp(caption.end_in_seconds)))
f.writelines(['{}\n'.format(l) for l in caption.lines])
f.write('\n')
def _to_srt_timestamp(self, total_seconds):
hours = int(total_seconds / 3600)
minutes = int(total_seconds / 60 - hours * 60)
seconds = int(total_seconds - hours * 3600 - minutes * 60)
milliseconds = round((total_seconds - seconds - hours * 3600 - minutes * 60)*1000)
return '{:02d}:{:02d}:{:02d},{:03d}'.format(hours, minutes, seconds, milliseconds)
class SBVWriter(object):
pass
class SMIWriter(object):
def __init__(self, title=None, style=None):
self.title = title
self.style = style
def _write_header(self, f):
f.write('<SAMI>\n')
f.write('\t<HEAD>\n')
if self.title:
f.write('\t\t<TLTLE>{}</TITLE>\n'.format(self.title))
self._write_style(f)
f.write('\t</HEAD>\n')
f.write('\t<BODY>\n')
def _write_style(self, f):
if type(self.style) == Style:
for line in self.style.lines:
f.write(line)
def _write_body(self, f, captions):
for line_number, caption in enumerate(captions, start=1):
if caption.identifier is None:
p_class = 'KRCC'
else:
p_class = caption.identifier
f.write('\t\t<SYNC Start={}> <P class={}>'.format(self._to_timestamp(caption.start), p_class))
f.write(caption.raw_text + '\n')
def _write_footer(self, f):
f.write('\t</BODY>\n')
f.write('</SAMI>\n')
def write(self, captions, f):
self._write_header(f)
self._write_body(f, captions)
self._write_footer(f)
def _to_timestamp(self, time):
""" time format: '{:02d}:{:02d}:{:06.3f} """
t = time.split('.')
hms = t[0].split(':')
seconds = int(hms[0]) * 3600 + 60 * int(hms[1]) + int(hms[2]) + int(t[1]) / 1000
return int(seconds * 1000)
|
998,571 | 123b2f97dec08c275b383502b01f8473afa34c8c | # edx analytics fetcher
import sys
import json
import math
import urllib
import requests
import pandas as pd
import dns
import copy
import time
import datetime
from http.cookies import SimpleCookie
from pymongo import MongoClient
from datetime import datetime
# NOTE --
# -> You will need to sometimes update the following COOKIE
# if your session becomes inactive -- here's how:
#
# Open Brave (or Chrome)
# Navigate to https://insights.edx.org/courses
# Open Developer Tools > Network tab and refresh (Cmd + R)
# Right click the /courses request, and Copy > Copy Request Headers
# Then extract the value from the cookie header and replace below...
#
# For some reason Python doesn't like "Cookie: xx" so make it "Cookie:xx"
#
# e.g. COOKIE = 'Cookie:<insert your cookie>'
CONNECTION_STRING = ''
HEADERS = {
'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/75.0.3770.80 Safari/537.36'
}
SKIP_INSIGHTS = True
DATES = {
1 : '2020-09-27',
2 : '2020-10-04',
3 : '2020-10-11',
4 : '2020-10-18',
5 : '2020-10-25',
6 : '2020-11-01',
7 : '2020-11-08',
8 : '2020-11-15',
9 : '2020-11-22',
10: '2020-11-29',
11: '2020-12-06',
12: '2020-12-13',
13: '2020-12-20'
}
ACTVITY_MAP = {
"1" : 0,
"2" : 0,
"3" : 0,
"4" : 0,
"5" : 0,
"6" : 0,
"7" : 0,
"8" : 0,
"9" : 0,
"10": 0,
"11": 0,
"12": 0,
}
GOAL_MAP = [];
NUM_WEEKS = 12;
COURSE_MAP = {
"course-v1:ColumbiaX+BAMM.101x+3T2020": 2344,
"course-v1:ColumbiaX+BAMM.102x+3T2020": 2365,
"course-v1:ColumbiaX+BAMM.103x+3T2020": 2342,
"course-v1:ColumbiaX+BAMM.104x+3T2020": 2340,
"course-v1:ColumbiaX+CSMM.101x+3T2020": 2345,
"course-v1:ColumbiaX+CSMM.102x+3T2020": 2343,
"course-v1:ColumbiaX+CSMM.103x+3T2020": 2346,
"course-v1:ColumbiaX+CSMM.104x+3T2020": 2357
}
COURSES = [
"course-v1:ColumbiaX+BAMM.101x+3T2020",
"course-v1:ColumbiaX+CSMM.104x+3T2020",
"course-v1:ColumbiaX+CSMM.101x+3T2020",
"course-v1:ColumbiaX+BAMM.102x+3T2020",
"course-v1:ColumbiaX+BAMM.103x+3T2020",
"course-v1:ColumbiaX+CSMM.102x+3T2020",
"course-v1:ColumbiaX+BAMM.104x+3T2020",
"course-v1:ColumbiaX+CSMM.103x+3T2020"
]
# FOR INSIGHTS
def get_cookies2():
"""
To substitute cookies here
"""
cookies = {
}
return cookies
def get_headers():
headers = {
'Connection': 'keep-alive',
'Pragma': 'no-cache',
'Cache-Control': 'no-cache',
'Accept': 'application/json, text/javascript, */*; q=0.01',
'X-NewRelic-ID': 'XA4GVl5ACwoAUFRQDw==',
'X-Requested-With': 'XMLHttpRequest',
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/78.0.3904.70 Safari/537.36',
'Sec-Fetch-Site': 'same-origin',
'Sec-Fetch-Mode': 'cors',
'Referer': 'https://insights.edx.org/courses/course-v1:ColumbiaX+BAMM.101x+3T2019/learners/',
'Accept-Encoding': 'gzip, deflate, br',
}
return headers
def api_learners_metadata_json_url(course_id):
return f'https://insights.edx.org/api/learner_analytics/v0/course_learner_metadata/{course_id}'
def api_learners_engagement_json_url(user_id, course_id):
return f'https://insights.edx.org/api/learner_analytics/v0/engagement_timelines/{user_id}?course_id={course_id}'
def api_learners_data_json_url(page, page_size, course_id, verified):
base_url = f'https://insights.edx.org/api/learner_analytics/v0/learners?page={page}&page_size={page_size}&course_id={course_id}'
if verified:
return base_url + '&enrollment_mode=verified'
return base_url
def api_learners_data_json_requests(page, page_size, course_id, verified):
headers = get_headers()
if verified:
params = (
('page', page),
('page_size', page_size),
('course_id', course_id),
('enrollment_mode', 'verified')
)
else:
params = (
('page', page),
('page_size', page_size),
('course_id', course_id)
)
resp = requests.get(
url='https://insights.edx.org/api/learner_analytics/v0/learners',
cookies=get_cookies2(),
headers=headers,
params=params
)
return resp
def fetch_learners2(course_id, verified):
cookies = get_cookies2()
resp = api_learners_data_json_requests(1, 100, course_id, verified)
if resp.status_code != 200:
return False
body = resp.json()
count = body['count']
num_pages = math.ceil(count / 100)
# TODO: make this async
responses = []
for i in range(1, num_pages + 1):
resp = api_learners_data_json_requests(i, 100, course_id, verified)
responses.append(resp)
results = []
for i in range(0, num_pages):
results += responses[i].json()['results']
filename = f'data-learners_{course_id[10:]}.json'
with open(filename, 'w') as f:
json.dump(results, f, indent=4)
print(f' INFO: wrote {filename} to disk.')
return results
def fetch_engagement(course_id, verified):
course_users = []
cookies = get_cookies2()
print(f"Fetching learners info for {course_id}...")
learners = fetch_learners2(course_id, verified)
# Check the request succeeded
if not learners:
print("Learners request failed, try updating the insights cookie")
return False
num_learners = len(learners)
print(f"Learners request succeeded. Found {num_learners} verified learners.")
crawler = EdCrawler()
crawler.refresh_token()
print(f"Fetching discussion post info for {course_id}...")
if course_id in COURSE_MAP:
all_user_posts = crawler.download_csv(course_id)
else:
all_user_posts = []
threadNumber = len(all_user_posts)
postFileName = str(COURSE_MAP[course_id]) + "_posts.json"
print(f"Found {threadNumber} user threads. Dumping json to {postFileName}")
with open(postFileName, 'w', encoding="utf-8") as outfile:
json.dump(all_user_posts, outfile, indent=4)
client = MongoClient(CONNECTION_STRING)
db = client.srlui2020.activity
# TODO: make this async
responses = []
for i in range(0, num_learners):
row = {}
# url = api_learners_engagement_json_url(learners[i]['username'], course_id)
user_name = learners[i]["username"]
user_id = learners[i]["user_id"]
user_email = learners[i]["email"].lower()
resp = requests.get(
url=f'https://insights.edx.org/api/learner_analytics/v0/engagement_timelines/{user_name}/',
cookies=cookies,
headers={
'Connection': 'keep-alive',
'Pragma': 'no-cache',
'Cache-Control': 'no-cache',
'Accept': 'application/json, text/javascript, */*; q=0.01',
'X-NewRelic-ID': 'XA4GVl5ACwoAUFRQDw==',
'X-Requested-With': 'XMLHttpRequest',
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/78.0.3904.70 Safari/537.36',
'Sec-Fetch-Site': 'same-origin',
'Sec-Fetch-Mode': 'cors',
'Referer': f'https://insights.edx.org/courses/{course_id}/learners/',
'Accept-Encoding': 'gzip, deflate, br',
'Accept-Language': 'zh-CN,zh;q=0.9',
},
params = (
('course_id', course_id),
)
)
# Add data to mongodb, per week
if resp.status_code == 200:
courseString = course_id.split(":")[1]
row["username"] = user_name
row["courseId"] = courseString
row["userId"] = user_id
row["email"] = user_email
videos = copy.deepcopy(ACTVITY_MAP)
posts = copy.deepcopy(ACTVITY_MAP)
problems = copy.deepcopy(ACTVITY_MAP)
if not SKIP_INSIGHTS:
days = resp.json()["days"]
for day in days:
date = datetime.strptime(day["date"], '%Y-%m-%d').date()
for x in range(1, NUM_WEEKS + 1):
weekDate = datetime.strptime(DATES[x], '%Y-%m-%d').date()
if date <= weekDate:
videos[str(x)] += day["videos_viewed"]
problems[str(x)] += day["problems_completed"]
break
row["problems"] = problems
row["videos"] = videos
if user_email in all_user_posts:
user_posts = all_user_posts[user_email]
for post in user_posts:
time = post["created_at"].split("T")
date = datetime.strptime(time[0], '%Y-%m-%d').date()
for x in range(1, NUM_WEEKS + 1):
weekDate = datetime.strptime(DATES[x], '%Y-%m-%d').date()
if date <= weekDate:
posts[str(x)] += 1
break
row["posts"] = posts
db.find_one_and_update(
{'email': user_email, 'courseId': courseString},
{'$set': row,
'$setOnInsert': { 'goals': GOAL_MAP }},
upsert=True)
responses.append(resp)
results = {}
for i in range(0, num_learners):
# Skip learners with no engagement data
if responses[i].status_code != 200: continue
json_response = responses[i].json()
results[learners[i]['username']] = json_response
filename = f'data-engagement_{course_id[10:]}.json'
print(f"Finished updating learner activity data. Dumping engagement data to {filename}")
with open(filename, 'w') as f:
json.dump(results, f, indent=4)
return results
# DISCUSSION POSTS
COURSE_MAP_RE = {v:k for k,v in COURSE_MAP.items()}
class EdCrawler():
def __init__(self):
self.token = ""
def refresh_token(self):
headers = {
'Sec-Fetch-Mode': 'cors',
'Referer': 'https://us.edstem.org/courses/100/analytics/discussion',
'Origin': 'https://us.edstem.org',
'x-token': self.token,
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/'
'537.36 (KHTML, like Gecko) Chrome/'
'76.0.3809.132 Safari/537.36',
}
response = requests.post('https://us.edstem.org/api/renew_token',
headers=headers)
if response.status_code != 200:
print("Wrong response!")
self.token = input("Please input a valid token:"
"(just copy and it from the website "
"inspection page)")
else:
info = response.json()
self.token = response.json()["token"]
self.save_info(info)
def get_token(self):
return self.token
def save_info(self, info):
with open('data.json', 'w') as outfile:
json.dump(info, outfile)
def download_csv(self, course):
course_number = COURSE_MAP[course]
headers = {
'authority': 'us.edstem.org',
'pragma': 'no-cache',
'cache-control': 'no-cache',
'origin': 'https://us.edstem.org',
'upgrade-insecure-requests': '1',
'content-type': 'application/x-www-form-urlencoded',
'user-agent': 'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/'
'537.36 (KHTML, like Gecko) Chrome/'
'76.0.3809.132 Safari/537.36',
'sec-fetch-mode': 'navigate',
'sec-fetch-user': '?1',
'accept': 'text/html,application/xhtml+xml,application/xml;'
'q=0.9,image/webp,image/apng,*/*;'
'q=0.8,application/signed-exchange;v=b3',
'sec-fetch-site': 'same-origin',
'referer': 'https://us.edstem.org/courses/'
f'{course_number}/analytics/discussion',
'accept-encoding': 'gzip, deflate, br',
'cookie': '__cfduid=d2baabd554f0bd49c9b12400afbc569591568386892',
}
data = {'_token': self.token}
response = requests.post('https://us.edstem.org/api/courses/'
f'{course_number}/analytics/'
'discussion_threads.json',
headers=headers,
data=data)
threadsFileName = str(course_number) + "_discussion_threads.json"
print(f"Dumping discussion thread data to {threadsFileName}")
with open(threadsFileName, 'w') as outfile:
json.dump(response.json(), outfile, indent=4)
return parse_threads(threadsFileName)
def parse_threads(file_name, save=True):
with open(file_name, "rb") as f:
data = json.load(f, encoding="utf8")
posts = {}
for post in data:
get_info(posts, post, "")
return posts
def get_info(posts, post, postType):
info = {}
infoList = []
email = post["user"]["email"].lower()
if "type" in post:
info["type"] = post["type"]
else:
info["type"] = postType
info["created_at"] = post["created_at"]
infoList.append(info)
if email in posts:
posts[email].extend(infoList)
else:
posts[email] = infoList
if post.get("comments"):
for sub_post in post.get("comments"):
posts = get_info(posts, sub_post, "comment")
if post.get("answers"):
for sub_post in post.get("answers"):
posts = get_info(posts, sub_post, "answer")
return posts
def read_subposts(posts, sub_post, post_type):
infoList = []
email = sub_post["user"]["email"].lower()
p_list = get_info(posts, sub_post, post_type)
infoList.append(p_list)
if email in posts:
posts[email].extend(infoList)
else:
posts[email] = infoList
return posts
if __name__ == '__main__':
while True:
print('Starting edX activity scraping')
print()
for course in COURSES:
results = fetch_engagement(course, True)
print(f'{course} finished!')
print()
print()
localtime = time.asctime( time.localtime(time.time()) )
print(f"All courses finished on {localtime}.")
print()
print("Sleeping...")
time.sleep(21600)
|
998,572 | b7393c1d2ea22b31f5e08970857cd9125498e1af | t = int(input())
for _ in range(t):
n, k = map(int, input().split())
mx = 0
stair = (k**2 + k) // 2
for i in range(1, n + 1):
if i**2 > n:
break
if n % i:
continue
for d in (i, n // i):
if stair * d <= n:
mx = max(mx, d)
if not mx:
print(-1)
continue
for i in range(k - 1):
print((i + 1) * mx, end=' ')
print(n - (stair - k) * mx)
|
998,573 | 094d94bc078b617fd0a3751eacac47c6ab3e7087 | import difflib
from nltk.tokenize import word_tokenize
def handle_taxpayer(result):
txt=result[result.lower().find('taxpayer ')+9:]
tokens=word_tokenize(txt)
name=""
used=[]
names=[]
skip=0
for token in tokens:
i=0
while i < skip:
i+=1
continue
skip=0
if token not in used:
#name = name+token+" "
used.append(token)
else:
name=""
for k in used:
name=name+k+" "
skip=len(used)
if len(name)>2:
#print name
names=appendToList(names,name)
name=""
used=[]
used.append(token)
if token.lower()=='inc' or token.lower()=='inc.':
for k in used:
name=name+k+" "
#print name
names=appendToList(names,name)
name=""
used=[]
for k in used:
name=name+k+" "
if len(name)>0:
#print name
names=appendToList(names,name)
return names
def appendToList(lst,name,score=50):
flag=False
for i in range(len(lst)):
if difflib.SequenceMatcher(None,lst[i].lower(),name.lower()).ratio()*100>score:
flag=True
if len(lst[i])<len(name):
lst[i]=name
break
if not flag:
lst.append(name)
return lst
def handleTaxpayer(result):
import re
result=re.sub(' +',' ',result)
txt=result[result.lower().rfind('taxpayer')+9:]
tokens=txt.split(' ')
i=0
temp=[]
names=[]
while i < len(tokens):
if tokens[i] not in temp:
temp.append(tokens[i])
i+=1
else:
skip = len(temp)
i+=skip
#print temp
name=""
for k in temp:
name=name+k+" "
name=name.strip()
if len(name)>2:
names=appendToList(names,name,60)
temp=[]
continue
if tokens[i-1].lower().find('inc')==0:
name=""
for k in temp:
name=name+k+" "
name=name.strip()
if len(name)>2:
names=appendToList(names,name,60)
temp=[]
if len(temp)>0:
name=""
for k in temp:
name=name+k+" "
name=name.strip()
if len(name)>2:
names=appendToList(names,name,60)
return names
|
998,574 | 0747a732af6a54a1d18b1b9f8ecbebfcc9820c74 | # Generated by Django 2.0.7 on 2018-08-18 01:14
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('clients', '0002_auto_20180817_1526'),
]
operations = [
migrations.RenameField(
model_name='branch',
old_name='phone',
new_name='tel_1',
),
migrations.RenameField(
model_name='branch',
old_name='alt_phone',
new_name='tel_2',
),
]
|
998,575 | 3cebf901889f06bb616d63cfcc384b1384c816b8 | #https://developers.google.com/edu/python/regular-expressions
import re
#find the pattern start word with Anu
str1 = "Anubaig"
match = re.search(r'^Anu\w+',str1)
print match.group()
str2 = 'Anumogal'
match = re.search(r'^Anu\w+',str2)
print match.group()
str2 = 'numogal'
match = re.search(r'^Anu\w+',str2)
if match:
print match.group()
else:
print "Not Found"
#Find the patter end with baig
## . = any char but \n
match = re.search(r'..g', 'piiig')
if match:
print match.group()
else:
print "Not Found"
## \d = digit char, \w = word char
match = re.search(r'\d\d\d', 'p123g')
if match:
print match.group()
else:
print "Not Found"
match = re.search(r'\w\w\w', '@@abcd!!')
if match:
print match.group()
else:
print "Not Found"
|
998,576 | 0d2346fa931a75c794bd9008f61ec6c1be10ad59 | symbol_table = None
temp_var_set = set()
counter = 0
curr_class = None
members = []
func_members = []
|
998,577 | fc3890d8bc0375aa901396cf5efc6248bb3df674 | '''
Created on 2016. 3. 3.
@author: jayjl
'''
from Parser.models import XpathData, XpathInfo, EtfInfo, EtfData
from Parser.util.htmlParser import htmlParser
def convert(val):
lookup = {'K': 1000, 'M': 1000000, 'B': 1000000000}
unit = val[-1]
try:
number = float(val[:-1])
except ValueError:
# do something
print "error"
if unit in lookup:
return lookup[unit] * number
return float(val)
htmlParser = htmlParser()
codeStr = 'YAHOO001'
infoResult = XpathInfo.objects.filter(code = codeStr).values('url')
dataResult = XpathData.objects.filter(xpath_code = codeStr).filter(use = 'Y').values('xpath', 'xpath_index', 'insert_column', 'description')
etfResult = EtfInfo.objects.values('ticker', 'name')
for etfIdx in range(0, len(etfResult)) :
tickerStr = etfResult[etfIdx]['ticker']
print etfResult[etfIdx]['name'], tickerStr
for siteIdx in range(0, len(infoResult)) :
url = infoResult[siteIdx]['url']
resultDic = {}
resultDic['dt'] = '20160304'
resultDic['ticker'] = tickerStr
for dataIdx in range(0, len(dataResult)) :
xPathStr = dataResult[dataIdx]['xpath']
xPathIndex = dataResult[dataIdx]['xpath_index']
columnName = str(dataResult[dataIdx]['insert_column'].encode('utf-8'))
#result = htmlParser.xPathParse(url + ticker, xPathStr)
result = htmlParser.getResult(url + tickerStr, xPathStr)
resultStr = result[xPathIndex].replace(',','')
resultStr = convert(resultStr)
resultDic[columnName] = resultStr
print columnName, result[xPathIndex], resultStr
p = EtfData(**resultDic)
p.save()
# etfData, created = EtfData.objects.get_or_create(**resultDic)
#
# if created :
# etfData.save()
# else :
# etfData.update()
|
998,578 | 0c4917eb14ed4400818350ad4f621d7e0ee25190 | from app.community import community_blueprint
from app.community.forms import NewCommunityForm, UpdateCommunityForm
from app import db
from app.models import Community, CommunityParticipant
from flask import render_template, redirect, url_for, flash, abort
from flask_login import login_required, current_user
from app.communities.views import join_or_leave
@community_blueprint.route('/community/r/<string:name>')
def get_community(name):
community = Community.query.filter_by(name=name).first()
if community:
return render_template('community/get_community.html',community=community, join_or_leave=join_or_leave)
else:
abort(404)
@community_blueprint.route('/community/new', methods=['POST', 'GET'])
@login_required
def new_community():
form = NewCommunityForm()
if form.validate_on_submit():
community = Community(name=form.name.data, description=form.description.data, user=current_user)
db.session.add(community)
db.session.commit()
flash('Community created', 'success')
return redirect(url_for('community.get_community', name=form.name.data))
return render_template('community/new_community.html', form=form)
@community_blueprint.route('/community/update/<string:name>', methods=['POST', 'GET'])
@login_required
def update_community(name):
form = UpdateCommunityForm()
community = Community.query.filter_by(name=name).first()
if form.validate_on_submit():
community.description = form.description.data
db.session.add(community)
db.session.commit()
flash('Community updated', 'success')
return redirect(url_for('community.get_community', name=community.name))
form.description.data = community.description
return render_template('community/update_community.html', form=form)
@community_blueprint.route('/community/delete/<string:name>')
@login_required
def delete_community(name):
community = Community.query.filter_by(name=name).first()
if community:
if current_user.id == community.user.id:
db.session.delete(community)
db.session.commit()
flash('Community deleted', 'danger')
return redirect(url_for('main.home'))
else:
abort(403)
else:
abort(404)
@community_blueprint.route('/community/join/<string:name>')
@login_required
def join_community(name):
community = Community.query.filter_by(name=name).first()
if community:
community_participant = CommunityParticipant.query.filter_by(user_id=current_user.id, community_id=community.id).first()
if community_participant is None:
community_participant = CommunityParticipant(community=community, user=current_user)
db.session.add(community_participant)
db.session.commit()
flash(f'Joined r/{community.name} successfully', 'success')
return redirect(url_for('community.get_community', name=community.name))
else:
flash(f'Already a member of r/{community.name}', 'primary')
return redirect(url_for('community.get_community', name=community.name))
else:
abort(404)
@community_blueprint.route('/community/leave/<string:name>')
@login_required
def leave_community(name):
community = Community.query.filter_by(name=name).first()
if community:
community_participant = CommunityParticipant.query.filter_by(user_id=current_user.id, community_id=community.id).first()
if community_participant is None:
flash(f'Cannot leave r/{community.name}, not a participant', 'danger')
return redirect(url_for('community.get_community', name=community.name))
else:
db.session.delete(community_participant)
db.session.commit()
flash(f'Left r/{community.name}', 'danger')
return redirect(url_for('community.get_community', name=community.name))
else:
abort(404)
pass
|
998,579 | fef864d818e8ffdc8a0ae913e608abbfd953a27a | """
Check if a given binary tree is perfect or not
"""
class Node:
def __init__(self, val):
self.val = val
self.left = None
self.right = None
def find_depth(node):
d = 0
while node:
d += 1
node = node.left
return d
def is_perfect_util(root, d, level=0):
if not root:
return True
if not root.left and not root.right:
return d == level + 1
if not root.left or not root.right:
return False
return is_perfect_util(root.left, d, level+1) and is_perfect_util(root.right, d, level+1)
def is_perfect(root):
depth = find_depth(root)
return is_perfect_util(root, depth)
root = Node(10)
root.left = Node(20)
root.right = Node(30)
root.left.left = Node(40)
root.left.right = Node(50)
root.right.left = Node(60)
root.right.right = Node(70)
print(is_perfect(root)) |
998,580 | aa5c8212e5a387713392fd12ed3eae71e9feefbc | from django.urls import path
from . import views
urlpatterns = [path('', views.homepage,name="homepage"),
path('canceled/',views.canceled, name="canceled"),
path('place-order/',views.order_create, name="order"),
path('<url_generator>/link/<link>', views.link, name='link'),
path('<single_slug>', views.single_slug,name="single_slug"),
]
|
998,581 | abc1e3365ac30bd4c6739afabb2f47431288397d | # -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: chirpstack-api/as_pb/external/api/deviceProfile.proto
"""Generated protocol buffer code."""
from google.protobuf.internal import builder as _builder
from google.protobuf import descriptor as _descriptor
from google.protobuf import descriptor_pool as _descriptor_pool
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2
from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2
from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2
from chirpstack_api.as_pb.external.api import profiles_pb2 as chirpstack__api_dot_as__pb_dot_external_dot_api_dot_profiles__pb2
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n5chirpstack-api/as_pb/external/api/deviceProfile.proto\x12\x03\x61pi\x1a\x1cgoogle/api/annotations.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a\x30\x63hirpstack-api/as_pb/external/api/profiles.proto\"H\n\x1a\x43reateDeviceProfileRequest\x12*\n\x0e\x64\x65vice_profile\x18\x01 \x01(\x0b\x32\x12.api.DeviceProfile\")\n\x1b\x43reateDeviceProfileResponse\x12\n\n\x02id\x18\x01 \x01(\t\"%\n\x17GetDeviceProfileRequest\x12\n\n\x02id\x18\x01 \x01(\t\"\xa6\x01\n\x18GetDeviceProfileResponse\x12*\n\x0e\x64\x65vice_profile\x18\x01 \x01(\x0b\x32\x12.api.DeviceProfile\x12.\n\ncreated_at\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12.\n\nupdated_at\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\"H\n\x1aUpdateDeviceProfileRequest\x12*\n\x0e\x64\x65vice_profile\x18\x01 \x01(\x0b\x32\x12.api.DeviceProfile\"(\n\x1a\x44\x65leteDeviceProfileRequest\x12\n\n\x02id\x18\x01 \x01(\t\"\x83\x02\n\x15\x44\x65viceProfileListItem\x12\n\n\x02id\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\'\n\x0forganization_id\x18\x03 \x01(\x03R\x0eorganizationID\x12*\n\x11network_server_id\x18\x04 \x01(\x03R\x0fnetworkServerID\x12.\n\ncreated_at\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12.\n\nupdated_at\x18\x06 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x1b\n\x13network_server_name\x18\x07 \x01(\t\"\x89\x01\n\x18ListDeviceProfileRequest\x12\r\n\x05limit\x18\x01 \x01(\x03\x12\x0e\n\x06offset\x18\x02 \x01(\x03\x12\'\n\x0forganization_id\x18\x03 \x01(\x03R\x0eorganizationID\x12%\n\x0e\x61pplication_id\x18\x04 \x01(\x03R\rapplicationID\"\\\n\x19ListDeviceProfileResponse\x12\x13\n\x0btotal_count\x18\x01 \x01(\x03\x12*\n\x06result\x18\x02 \x03(\x0b\x32\x1a.api.DeviceProfileListItem2\xae\x04\n\x14\x44\x65viceProfileService\x12l\n\x06\x43reate\x12\x1f.api.CreateDeviceProfileRequest\x1a .api.CreateDeviceProfileResponse\"\x1f\x82\xd3\xe4\x93\x02\x19\"\x14/api/device-profiles:\x01*\x12\x65\n\x03Get\x12\x1c.api.GetDeviceProfileRequest\x1a\x1d.api.GetDeviceProfileResponse\"!\x82\xd3\xe4\x93\x02\x1b\x12\x19/api/device-profiles/{id}\x12v\n\x06Update\x12\x1f.api.UpdateDeviceProfileRequest\x1a\x16.google.protobuf.Empty\"3\x82\xd3\xe4\x93\x02-\x1a(/api/device-profiles/{device_profile.id}:\x01*\x12\x64\n\x06\x44\x65lete\x12\x1f.api.DeleteDeviceProfileRequest\x1a\x16.google.protobuf.Empty\"!\x82\xd3\xe4\x93\x02\x1b*\x19/api/device-profiles/{id}\x12\x63\n\x04List\x12\x1d.api.ListDeviceProfileRequest\x1a\x1e.api.ListDeviceProfileResponse\"\x1c\x82\xd3\xe4\x93\x02\x16\x12\x14/api/device-profilesBr\n!io.chirpstack.api.as.external.apiB\x12\x44\x65viceProfileProtoP\x01Z7github.com/brocaar/chirpstack-api/go/v3/as/external/apib\x06proto3')
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals())
_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'chirpstack_api.as_pb.external.api.deviceProfile_pb2', globals())
if _descriptor._USE_C_DESCRIPTORS == False:
DESCRIPTOR._options = None
DESCRIPTOR._serialized_options = b'\n!io.chirpstack.api.as.external.apiB\022DeviceProfileProtoP\001Z7github.com/brocaar/chirpstack-api/go/v3/as/external/api'
_DEVICEPROFILESERVICE.methods_by_name['Create']._options = None
_DEVICEPROFILESERVICE.methods_by_name['Create']._serialized_options = b'\202\323\344\223\002\031\"\024/api/device-profiles:\001*'
_DEVICEPROFILESERVICE.methods_by_name['Get']._options = None
_DEVICEPROFILESERVICE.methods_by_name['Get']._serialized_options = b'\202\323\344\223\002\033\022\031/api/device-profiles/{id}'
_DEVICEPROFILESERVICE.methods_by_name['Update']._options = None
_DEVICEPROFILESERVICE.methods_by_name['Update']._serialized_options = b'\202\323\344\223\002-\032(/api/device-profiles/{device_profile.id}:\001*'
_DEVICEPROFILESERVICE.methods_by_name['Delete']._options = None
_DEVICEPROFILESERVICE.methods_by_name['Delete']._serialized_options = b'\202\323\344\223\002\033*\031/api/device-profiles/{id}'
_DEVICEPROFILESERVICE.methods_by_name['List']._options = None
_DEVICEPROFILESERVICE.methods_by_name['List']._serialized_options = b'\202\323\344\223\002\026\022\024/api/device-profiles'
_CREATEDEVICEPROFILEREQUEST._serialized_start=204
_CREATEDEVICEPROFILEREQUEST._serialized_end=276
_CREATEDEVICEPROFILERESPONSE._serialized_start=278
_CREATEDEVICEPROFILERESPONSE._serialized_end=319
_GETDEVICEPROFILEREQUEST._serialized_start=321
_GETDEVICEPROFILEREQUEST._serialized_end=358
_GETDEVICEPROFILERESPONSE._serialized_start=361
_GETDEVICEPROFILERESPONSE._serialized_end=527
_UPDATEDEVICEPROFILEREQUEST._serialized_start=529
_UPDATEDEVICEPROFILEREQUEST._serialized_end=601
_DELETEDEVICEPROFILEREQUEST._serialized_start=603
_DELETEDEVICEPROFILEREQUEST._serialized_end=643
_DEVICEPROFILELISTITEM._serialized_start=646
_DEVICEPROFILELISTITEM._serialized_end=905
_LISTDEVICEPROFILEREQUEST._serialized_start=908
_LISTDEVICEPROFILEREQUEST._serialized_end=1045
_LISTDEVICEPROFILERESPONSE._serialized_start=1047
_LISTDEVICEPROFILERESPONSE._serialized_end=1139
_DEVICEPROFILESERVICE._serialized_start=1142
_DEVICEPROFILESERVICE._serialized_end=1700
# @@protoc_insertion_point(module_scope)
|
998,582 | c8469c324b7b65f8bfcaf02ea14948f13e77546d | import time
import cv2 as cv
import numpy as np
from main import Main
class CreateRectange():
def __init__(self, imagePath, windowName, imread_method=-1, save=False):
self.imagePath = imagePath
self.windowName = windowName
self.x_array = []
self.y_array = []
self.count = 0
self.imread_method = imread_method
self.save = save
def run(self):
while True:
# Load Image
self.image = cv.imread(self.imagePath, self.imread_method)
# Showing images
cv.imshow(self.windowName, self.image)
cv.setMouseCallback(self.windowName, self.onclick)
if cv.waitKey(0) == 27:
cv.destroyAllWindows()
break
def onclick(self, event, x, y, flags, param):
if event == cv.EVENT_LBUTTONDOWN:
# print('Left Button: {}, {}'.format(x, y))
cv.setMouseCallback(self.windowName, self.mousemove, param={'x1': x, 'y1': y})
def mousemove(self, event, x, y, flags, param):
if event == cv.EVENT_MOUSEMOVE:
# print('Mouse Moving: {}, {}'.format(x, y))
# Draw a point
cv.circle(self.image, (x,y), 5, (255, 255, 255), -1)
# Show the image
cv.imshow(self.windowName, self.image)
# Adding each mousemove point to a array and getting the max and min x,y
self.xmax, self.ymax, self.xmin, self.ymin = self.getMaxMinXY(x, y)
# # Drawing the rectangle with max and min value
# cv.rectangle(self.image, (self.xmax, self.ymax), (self.xmin, self.ymin), (0,1.0,0), 2)
# cv.imshow(self.windowName, self.image)
if event == cv.EVENT_LBUTTONDOWN:
cv.setMouseCallback(self.windowName, self.onclick)
# print('Left Button Moving: {}, {}'.format(x, y))
# Drawing the rectangle with max and min value
cv.rectangle(self.image, (self.xmax, self.ymax), (self.xmin, self.ymin), (0, 255, 255), 2)
cv.imshow(self.windowName, self.image)
# Clearning the array for the job is done
self.x_array.clear()
self.y_array.clear()
# print('Cleared Array!')
# Getting the image of the rectangle
croped_image = self.image[self.ymin+2:self.ymax-2, self.xmin+2:self.xmax-2]
# Converting it into GRAY
croped_image = cv.cvtColor(croped_image, cv.COLOR_BGR2GRAY)
if self.save:
# Saving the cropped image
cv.imwrite('image/cropped{}.jpg'.format(self.count), croped_image)
print('Image Saved!')
self.count += 1
print('Sending to model..')
model = Main()
model.load()
preprocessed_image = model.preprocess(croped_image)
prediction = model.predict(preprocessed_image)
print("prediction: {}".format(prediction))
cv.putText(self.image, str(prediction), (self.xmin -5, self.ymin-5), cv.FONT_HERSHEY_SIMPLEX, 1, (255, 255, 255), 2, cv.LINE_AA)
cv.imshow(self.windowName, self.image)
def getMaxMinXY(self, x, y):
self.x_array.append(x)
self.y_array.append(y)
# Getting xmax and ymax
xmax = np.max(self.x_array) + 20
ymax = np.max(self.y_array) + 20
# Getting xmin and ymin
xmin = np.min(self.x_array) - 20
ymin = np.min(self.y_array) - 20
return (xmax, ymax, xmin, ymin)
if __name__ == "__main__":
# image = np.zeros((1000, 2000, 3), np.uint8)
# cv.imwrite('image/black.jpg', image)
c = CreateRectange('image/black.jpg', 'Draw', save= False)
c.run() |
998,583 | e0974752ef36086abc6149729494bec357d3e308 |
import torch
import numpy as np
from typing import List
from collections import namedtuple
from rlpyt.utils.seed import make_seed
from rlpyt.utils.quick_args import save__init__args
from reward_poisoned_drl.utils import list_to_norm, flatten_lists
ServerOptInfo = namedtuple("ServerOptInfo", ["numValidGrads", "meanGradNorm"])
AggClientOptInfo = namedtuple("AggClientOptInfo", ["attackerCost", "recallTarget0", "recallTarget1"]) # TODO make not hard-coded and non-singular (needs to be dynamic)
class FederatedServer:
"""
Manages global model update routine using
client gradients. 'obtain_gradients' and
'optimize_agent' should be called
iteratively inside the runner class.
"""
def __init__(
self,
agent,
sampler,
clients_per_itr=1,
global_lr=1.0,
eval_discount=0.99
):
"""
Store agent and sampler. This class subsumes the
server algorithm componenent. We also setup helper
structures for aggregated returns here.
'clients_per_itr' specifies how many clients to sample
gradients from at each global iteration.
'global_lr' is the learning rate used for batch
gradient descent of the global agent model.
This is applied ~~AFTER~~ any client learning
rate, so 1.0 is maintaining the average client
learning rate.
'eval_discount' is used for logging only. It's
what the evaluation trajectory discounted return
stats will be computed with.
"""
save__init__args(locals())
# for aggregating per-client gradients and logging info
self.gradients = []
self.client_traj_infos = []
self.client_opt_infos = []
self.opt_info_fields = tuple(f for f in ServerOptInfo._fields)
def initialize(self, clients, n_itr, affinity, seed=None, rank=0, world_size=1):
if len(clients) < self.clients_per_itr:
raise ValueError("'clients_per_itr' larger than number of clients")
self.clients = clients
self.num_clients = len(clients)
self.n_itr = n_itr
self.affinity = affinity
self.seed = seed if seed is not None else make_seed() # assumes global seed set in FederatedRunner
self.rank = rank
self.world_size = world_size
self.sampler.initialize(
agent=self.agent, # Agent gets initialized in sampler.
affinity=self.affinity,
seed=self.seed + 1,
bootstrap_value=False, # no algo to use bootstrap
traj_info_kwargs=self.get_traj_info_kwargs(),
rank=rank,
world_size=world_size
)
self.agent.to_device(self.affinity.get("cuda_idx", None))
if world_size > 1:
self.agent.data_parallel()
self.num_updates = 0 # update counter; may not always update if client grads are None
def obtain_gradients(self, itr):
"""
Obtain one batch of sample gradients from clients.
Start by randomly sampling client indices, then
step and join to allow for parallelization
(though compute may still happen in main
thread depending on client class).
"""
global_model = self._get_global_model()
client_idxs = np.random.choice(self.num_clients, size=self.clients_per_itr, replace=False)
# first step all models, allowing parallel execution if using parallelized clients
for idx in client_idxs:
self.clients[idx].step(itr, global_model)
# gather gradients from client stepping; will block until finished if parallelized
for idx in client_idxs:
grad, traj_infos, opt_info = self.clients[idx].join()
self._append_client_results(grad, traj_infos, opt_info)
# return client results along with client_idxs
gradients, client_traj_infos, client_opt_infos = self._get_aggregated_results()
return gradients, client_idxs, client_traj_infos, client_opt_infos
def optimize_agent(self, itr, gradients: List[List[torch.Tensor]], client_idxs: np.ndarray):
"""
Optimize global server agent using list of gradient lists.
The outer list contains one list for each sampled client.
The inner per-client lists contain a gradient tensor for each model parameter.
"""
self.agent.train_mode(itr)
server_opt_info = ServerOptInfo(*([] for _ in range(len(ServerOptInfo._fields))))
# filter non-None responses, if any
valid_gradients = self._get_valid_gradients(gradients)
if valid_gradients:
# prepare mean of valid sampled model gradients
device_gradients = self._gradients_to_device(valid_gradients)
mean_gradients = self._get_mean_gradients(device_gradients)
# apply gradients to server global model
self._apply_gradient_descent(mean_gradients)
# increment update counter
self.num_updates += 1
# return server-specific logging info
server_opt_info.numValidGrads.append(len(valid_gradients))
mean_grad_norm = list_to_norm(mean_gradients).item() if valid_gradients else float('nan')
server_opt_info.meanGradNorm.append(mean_grad_norm)
return server_opt_info
def shutdown(self):
self.sampler.shutdown()
def get_traj_info_kwargs(self):
return dict(discount=self.eval_discount)
def _get_global_model(self):
agent_sd = self.agent.state_dict()
if "model" in agent_sd.keys():
agent_sd = agent_sd["model"] # avoid passing target model
return agent_sd
def _load_global_model(self, state_dict):
self.agent.load_state_dict(state_dict)
def _append_client_results(self, grad, traj_infos, opt_info):
self.gradients.append(grad)
self.client_traj_infos.append(traj_infos)
self.client_opt_infos.append(opt_info)
def _get_aggregated_results(self):
"""
Gather aggregated client results.
Note we flatten the traj_infos since samples
are generated with the newly loaded global model,
and so are uniform over clients.
"""
gradients = self.gradients
client_traj_infos = flatten_lists(self.client_traj_infos)
client_opt_infos = self._combine_client_opt_infos(self.client_opt_infos)
self.gradients = []
self.client_traj_infos = []
self.client_opt_infos = []
return gradients, client_traj_infos, client_opt_infos
def _get_valid_gradients(self, gradients):
"""Sort out 'None' responses from not-ready clients."""
valid_gradients = []
for obj in gradients:
if isinstance(obj, list):
valid_gradients.append(obj)
elif obj is not None:
raise ValueError("Unrecognized value in gradients list;"
" must contain lists of tensors or None objects")
return valid_gradients
def _gradients_to_device(self, gradients):
"""Move list of gradient tensor lists to agent device."""
device_gradients = []
for grad_list in gradients:
device_gradients.append([param.to(self.agent.device) for param in grad_list])
return device_gradients
def _get_mean_gradients(self, gradients):
"""
Average gradients by reducing across clients.
Expect all to be valid (no None values).
"""
mean_gradients = []
for param_tup in zip(*gradients):
param_stack = torch.stack(param_tup, dim=0)
param_mean = torch.mean(param_stack, dim=0)
mean_gradients.append(param_mean)
return mean_gradients
def _apply_gradient_descent(self, gradients):
"""
Apply list of gradients (one tensor for each
model param) to server's global agent model.
"""
updated_sd = {}
global_model = self._get_global_model()
for name, param, grad in zip(global_model.keys(), global_model.values(), gradients):
updated_sd[name] = param - self.global_lr * grad
self._load_global_model(updated_sd)
def _combine_client_opt_infos(self, client_opt_infos):
"""
Converts list of client opt infos to single opt info
with each key labelled by TODO
"""
# TODO make not save using hard-coded keys
# right now this extracts attacker stats from any clients which provide them
# we can aggregate these since they're generated using the same loaded global model
# WARNING: recall will be empty even when logging if no malicious clients were sampled that itr...
attacker_cost_buff = []
recall_target0_buff = []
recall_target1_buff = []
for opt_info in client_opt_infos:
if getattr(opt_info, "attackerCost", None) is not None:
attacker_cost_buff += [opt_info.attackerCost]
if getattr(opt_info, "recallTarget0", None) is not None:
recall_target0_buff += opt_info.recallTarget0
if getattr(opt_info, "recallTarget1", None) is not None:
recall_target1_buff += opt_info.recallTarget1
return AggClientOptInfo(attacker_cost_buff, recall_target0_buff, recall_target1_buff)
class FederatedServerLogNotify(FederatedServer):
"""
Notifies client when runner is logging.
Should be used with corresponding runner and client classes.
"""
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.upcoming_log = False
def log_notify(self, flag: bool):
self.upcoming_log = flag
def obtain_gradients(self, itr):
global_model = self._get_global_model()
client_idxs = np.random.choice(self.num_clients, size=self.clients_per_itr, replace=False)
# first step all models, allowing parallel execution if using parallelized clients
for idx in client_idxs:
self.clients[idx].log_notify(self.upcoming_log) # log notify injected before step
self.clients[idx].step(itr, global_model)
# gather gradients from client stepping; will block until finished if parallelized
for idx in client_idxs:
grad, traj_infos, opt_info = self.clients[idx].join()
self._append_client_results(grad, traj_infos, opt_info)
# return client results along with client_idxs
gradients, client_traj_infos, client_opt_infos = self._get_aggregated_results()
return gradients, client_idxs, client_traj_infos, client_opt_infos
|
998,584 | 5047122f4c37a988fb1ba490eb69dd5bf28cc711 | #!/usr/bin/env python
# encoding=utf8
"""Personal BGG Ratings."""
import argparse
import re
import sys
from datetime import date
from functools import cmp_to_key
from operator import itemgetter
import requests
import requests_cache
import xmltodict
import pandas
from dateutil.relativedelta import relativedelta
sys.getdefaultencoding()
def get_args():
"""Gather command line arguments or display help."""
parser = argparse.ArgumentParser(description='BGG Game Rankings',
add_help=False)
parser.add_argument('-h', '--help', action='help',
default=argparse.SUPPRESS,
help='Show this help message and exit.')
parser.add_argument(
'-v',
'--version',
action='version',
version='%(prog)s 0.0.1',
help="Show program's version number")
parser.add_argument('-u', '--user', help='BGG username',
required=True, metavar='')
parser.add_argument('-m', '--months', help='Last X months',
required=True, metavar='')
parser.add_argument('-c', '--count', help='Number of results',
required=False, metavar='', default=12)
parser.add_argument('-d', '--detailed', help='Detailed output',
required=False, action='store_true')
return parser.parse_args()
def request_data(url):
"""Request data from boardgamegeek."""
requests_cache.install_cache('data_cache')
while True:
data = requests.get(url)
if not data.status_code == 200 or "try again later" in data.text:
continue
else:
break
return data.text
def get_pervious_date(months_in_past):
"""Return a date X months in the past."""
pervious_date = date.today() + relativedelta(months=-months_in_past)
return str(pervious_date)
def multikeysort(items, columns):
"""Sort dictionary based on multiple keys."""
comparers = [((itemgetter(col[1:].strip()), 1) if col.startswith('-') else
(itemgetter(col.strip()), -1)) for col in columns]
def comparer(left, right):
for _fn, mult in comparers:
result = ((_fn(left) > _fn(right)) - (_fn(left) < _fn(right)))
if result:
return mult * result
return None
return sorted(items, key=cmp_to_key(comparer))
def get_plays(username, months):
"""Get user's played games over X months."""
min_date = get_pervious_date(int(months))
plays = []
baseurl = 'https://www.boardgamegeek.com/xmlapi2/'
url = baseurl + (f"plays?username={username}&mindate={min_date}")
data = request_data(url)
doc = xmltodict.parse(data)
for game in doc['plays']['play']:
title = game['item']['@name'].strip()
quantity = int(game['@quantity'])
plays.append(title)
return plays
def display_hot_games(plays):
count = pandas.Series(plays).value_counts()
print(f"{'Game':<6}{'Plays'}")
print(count)
if __name__ == "__main__":
ARGS = get_args()
display_hot_games(get_plays(ARGS.user, ARGS.months))
|
998,585 | 746b496ffc541d5f7e45065d76a2c4f7fdba2a49 | #!/usr/local/bin/python
import clusters
blog,words,data=clusters.readfile('blogdata.txt')
coordinates = clusters.scaledown(data)
clusters.draw2d(coordinates, blog, jpeg='blogs.jpg')
|
998,586 | dfa3f3b033f360bd2c4e5b76e28405941aee72f7 | from django.shortcuts import render
import spotipy
from spotipy.oauth2 import SpotifyClientCredentials
# Create your views here.
def index(request):
if request.method=='POST':
artist_uri=request.POST.get('uri')
spotify = spotipy.Spotify(client_credentials_manager=SpotifyClientCredentials(client_id='a338296a311d420d80e7ee79b1a85ced',client_secret='68d45066f0064f84b8b44769b0e7db66'))
results = spotify.search(q=artist_uri,limit=20)
final_results=results['tracks']['items']
return render(request,'index.html',{'results':final_results})
else:
spotify = spotipy.Spotify(client_credentials_manager=SpotifyClientCredentials(client_id='a338296a311d420d80e7ee79b1a85ced',client_secret='68d45066f0064f84b8b44769b0e7db66'))
result = spotify.search(q='Ed Sheeran', limit=10)
final_results2=result['tracks']['items']
return render(request,'index.html',{'result':final_results2}) |
998,587 | 86308b443fd428fa5ea431b9f0d8c10d0051e704 | import matplotlib.pyplot as plt
import matplotlib.path as mpath
import matplotlib.patches as mpatch
import numpy as np
import string
file = open('data.txt','r')
x = []
y = []
while 1:
line = file.readline()
if not line:
break
data = string.split(line)
x.append(string.atof(data[0]))
y.append(string.atof(data[1]))
file.close();
verts = [[np.cos(theta),np.sin(theta)] for theta in np.arange(7)*360./6/180*np.pi]
codes = [1,2,2,2,2,2,79]
hexagon = mpath.Path(verts, codes)
pathpatch = mpatch.PathPatch(hexagon,facecolor='red',edgecolor='green')
fig = plt.figure(figsize=(10,10))
ax = fig.add_subplot(111)
tverts = [];
tcodes = [];
for i in range(len(x)):
tverts += [[np.cos(theta)+x[i],np.sin(theta)+y[i]] for theta in np.arange(7)*360./6/180*np.pi]
tcodes += codes
thexagon = mpath.Path(tverts, tcodes)
tpathpatch = mpatch.PathPatch(thexagon,facecolor='red',edgecolor='green')
ax.add_patch(tpathpatch)
ax.set_title('A compound path')
ax.viewLim.update_from_data(np.array([-20,20]),np.array([-20,20]))
plt.show()
|
998,588 | 6d3bd26aae00e05edd4e9d60864b87cdeef3045b | #!/usr/bin/env python
import numpy as np
import logging
import itertools
keys=[
"Length:",
"Trace:",
"Norm_1",
"Norm_Frobenius",
"Norm_Infinity",
"Memory:",
"Memory_Relaxation_icntl14",
"Nonzeros:",
"NonzerosAfterAnalysis_20",
"NonzerosAfterFactorization_29",
"TotalMemoryAfterAnalysis_17",
"TotalMemoryAfterAnalysis_19",
"MaxMemoryAfterAnalysis_16",
"MaxMemoryAfterAnalysis_18",
"MatCholeskyFactorSymbolic",
"MatCholeskyFactorNumeric",
]
def list2Str(list1):
return str(list1).replace('[','').replace(']','').replace(',','').replace("'","").replace(":","")
def readLogDirectory():
import glob
global filename
for myfile in glob.glob("log.*"):
readLogFile(myfile)
return 0
def readLogFile(logfile):
errorCode="OK"
values=["NA"]*len(keys)
a=logfile.split('.')
order=a[2][-1]
a=a[2].split('c')
p=a[0].strip('p')
c=a[1][0:2].strip('o')
logging.debug("Reading file {0}".format(logfile))
with open(logfile) as f:
while True:
line = f.readline()
if not line:
break
else:
for i in range(len(keys)):
if keys[i] in line:
a=line.split()
values[i]=a[-1]
if "Error" in line or "ERROR" in line:
errorCode="ER"
if "Performance may be degraded" in line:
errorCode="SL"
print logfile,errorCode,p,c,order,list2Str(values)
return 0
def initializeLog(debug):
import sys
if debug: logLevel = logging.DEBUG
else: logLevel = logging.INFO
logger = logging.getLogger()
logger.setLevel(logLevel)
ch = logging.StreamHandler(sys.stdout)
ch.setLevel(logLevel)
logging.debug("Start in debug mode:")
def getArgs():
import argparse
parser = argparse.ArgumentParser(description=
"""
Without any input: reads Analyze log files, log.*.
"""
)
parser.add_argument('input', metavar='FILE', type=str, nargs='?',
help='Log file to be parsed. All log files will be read if a log file is not specified.')
parser.add_argument('-d', '--debug', action='store_true', help='Print debug information.')
parser.add_argument('-n','--nCoresPerSlice', type=int, default=0,nargs='?',
help='Speedup, efficiency and profile plots for specific number of cores per slice')
return parser.parse_args()
def main():
args=getArgs()
initializeLog(args.debug)
print "file error p c o",list2Str(keys)
if args.input is not None:
logFile=args.input
readLogFile(logFile)
else:
readLogDirectory()
if __name__ == "__main__":
main()
|
998,589 | f780e8ccbeb616f7a58b4386ec4141f82dbfd2c3 | # This example show how to use inline keyboards and process button presses
import telebot
import time
import emoji
import mysql.connector
import requests
from telebot.types import InlineKeyboardMarkup, InlineKeyboardButton
TELEGRAM_TOKEN = '1607683994:AAGotYV7rp5cixLimS33rr0P1ir3-BBm6es'
db = mysql.connector.connect(host="localhost",user="root",password="you",database="smndubot")
cursor = db.cursor()
bot = telebot.TeleBot(TELEGRAM_TOKEN)
@bot.message_handler(commands=["start"])
def send_welcome(message):
bot.send_message(
message.chat.id, "Welcome to SmndUnibot. \n To use this bot use the following commands : \n /newpost = create a new post. \n /editpost = edit a post.\nThis bot accepts only text and images in the post."
)
def reply(message):
sender_id=message.from_user.id
username=message.chat.username
message_id=message.message_id
chat_id=message.chat.id
first_name=message.chat.first_name
text=message.text
if "photo" in message["message"]:
attachments=1
attachments_id=message.photo.file_unique_id
#save photo to directory
dir="img/"+attachments_id+".jpg"
file_info = bot.get_file(message.photo.file_id)
file = requests.get('https://api.telegram.org/file/bot{0}/{1}'.format(TELEGRAM_TOKEN, file_info))
with open(dir, 'wb') as f:
f.write(file.content)
else:
attachments=0
attachments_id=""
#save to db
sql = """INSERT INTO messages(sender_id, sender_name, sender_username, message_id, chat_id, text, attachments, attachments_id)VALUES (sender_id,username, message_id,chat_id, text,attachments, attachments_id)"""
cursor.execute(sql)
mydb.commit()
markup="Now please send us the reaction button emojis separated by (*) Eg:😍*🥳*💕 will create three buttons for you. You may add max of 6 buttons. If you want to edit the above post, type : /editpost. "
return (markup)
@bot.message_handler(commands=["newpost"])
def send_welcome1(message):
bot.send_message(message.chat.id, "Create a new post. Your post can contain images or text.")
@bot.message_handler(commands=["editpost"])
def send_welcome2(message):
bot.send_message(
message.chat.id, "This will be added soon."
)
def gen_markup(tk):
i=0
k=[]
while(i<len(tk)):
k.append(InlineKeyboardButton(tk[i], callback_data=tk[i]));
i=i+1
print([k]);
markup = InlineKeyboardMarkup([k]);
return markup
@bot.message_handler(func=lambda message: True)
def message_handler(message):
sql="select * from messages where sender_id="+str(message.from_user.id)+" AND message_id= "+str(message.message_id-1)
cursor.execute(sql)
records= cursor.fetchall()
if (len(records!=0)):
attachment=records[0][6]
photo=records[0][7]
caption=records[0][5]
tk=message.text
tk=tk.split('*')
if (attachment==0):
bot.send_message(message.chat.id, caption, reply_markup=gen_markup(tk))
else:
bot.sendPhoto(message.chat.id, photo, caption = caption, reply_markup=gen_markup(tk))
else:
bot.send_message(message.chat.id, "The above message can't be recognised. Please follow the guidelines.")
@bot.callback_query_handler(func=lambda call: True)
def callback_query(call):
kw=call.data
print(kw);
k="you have "+kw+" this post."
print(k);
bot.answer_callback_query(call.id, k)
pass
"""
import telebot
import time
bot = telebot.TeleBot("1607683994:AAGotYV7rp5cixLimS33rr0P1ir3-BBm6es")
def gen_markup():
markup = InlineKeyboardMarkup()
markup.row_width = 2
markup.add(InlineKeyboardButton("Yes", callback_data="cb_yes"),
InlineKeyboardButton("No", callback_data="cb_no"))
return markup
@bot.callback_query_handler(func=lambda call: True)
def callback_query(call):
if call.data == "cb_yes":
bot.answer_callback_query(call.id, "Answer is Yes")
elif call.data == "cb_no":
bot.answer_callback_query(call.id, "Answer is No")
@bot.message_handler(func=lambda message: True)
def message_handler(message):
bot.send_message(message.chat.id, "Yes/no?", reply_markup=gen_markup())
@bot.message_handler(commands=["start"])
def send_welcome(message):
bot.send_message(
message.chat.id, "Send us the new post. Post can contain :images, text."
)
@bot.message_handler(content_types=["text", "image"])
def handle_text_image(message):
tk=message.text
tk=tk.split('*')
keyboard = telebot.types.InlineKeyboardMarkup()
for i in tk:
keyboard.add(telebot.types.InlineKeyboardButton(i),url="dshfgsjh.dsfds")
bot.send_message(message.chat.id,"post",reply_markup=keyboard)
pass
"""
while True:
try:
bot.polling()
except Exception:
time.sleep(15)
|
998,590 | 5d1bafb0b905e33c86ed6e5dabbd063c6ef61c10 | import smtplib
from email.message import EmailMessage
email_content = '''Dear Sir/Madam,
I am sending you an e-mail with Python. I hope you like it.
Kind regards,
David
'''
email = EmailMessage()
email['Subject'] = 'Test email'
email['From'] = 'dcorvaisier8@gmail.com'
email['To'] = 'dcorvaisier8@gmail.com'
email.set_content(email_content)
smtp_connector = smtplib.SMTP(host='smtp.gmail.com', port=587)
smtp_connector.starttls()
smtp_connector.login('dcorvaisier8@gmail.com', 'supuration')
smtp_connector.send_message(email)
smtp_connector.quit()
|
998,591 | e926e6c13731028dc48f90531477d1ffbcc9d7aa | class CanaryMiddleware:
def __init__(self, app):
self.app = app
def __call__(self, environ, start_response):
environ['repoze.debug.canary'] = Canary()
return self.app(environ, start_response)
class Canary(object):
pass
def make_middleware(app, global_conf):
""" Paste filter-app converter """
return CanaryMiddleware(app)
|
998,592 | f0fd4b1f8b21ba0abaf5875ffe62d796da8a92e4 | # code: utf-8
# python2
import mysql.connector, os, tarfile, shutil, quick_clone.util as util
def execute(user, password, host, port, database, dir, tables):
# ensure path contains forward slash
if dir[-1:] != "/":
dir += "/"
if not os.path.isdir(dir):
print "ERROR: File Location does not exist"
quit(1)
# connect to database
cnx = util.connect_db(user, password, host, port, database)
# cursor
cursor = cnx.cursor()
# export database to flat files
export_database(cursor, database, dir, tables)
# close cursor
cursor.close()
# end transaction
cnx.close()
# tarball
print "Creating tarball..."
tar = tarfile.open(dir + database + '.tar.gz', 'w:gz')
tar.add(dir + database, arcname=database)
tar.close()
# remove dirs
print "Removing temporary dirs..."
shutil.rmtree(dir + database, ignore_errors=True)
print "Quick Clone export successful!"
def export_database(cursor, database, dir, tables):
print "Table export initiated..."
tables = get_tables(cursor, tables)
for table in tables:
try:
# create a directory based on table name
os.makedirs(dir + database + '/' + table)
create_schema_file(cursor, database, table, dir)
create_data_file(cursor, database, table, dir)
create_checksum_file(cursor, database, table, dir)
except mysql.connector.Error as err:
print "ERROR: Failed to execute SQL command: {}".format(err)
quit(1)
# except:
# print "ERROR: Something went really wrong"
# quit(1)
print "Table export finished..."
def get_tables(cursor, tables):
print "Preparing table list..."
if tables == None:
# build list of tables from db
tables = []
cursor.execute("SHOW TABLES")
for table in cursor.fetchall():
tables += [table[0]]
# don't proceed if no tables to clone
if tables == []:
print "No tables in database"
quit(1)
else:
print "All db tables added to table list..."
return tables
else:
# split comma separated list of tables into list
tables = tables.split(',')
print "Requested tables added to table list..."
return tables
def create_schema_file(cursor, database, table, dir):
# query for show table sql
cursor.execute("SHOW CREATE TABLE " + table)
# fetch result
show_create_result = cursor.fetchall()
# drill into result for raw query
show_create = show_create_result[0][1]
# create a file that contains the sql
file_schema = open(dir + database + '/' + table + '/schema.sql', 'w')
file_schema.write(show_create)
file_schema.close()
def create_data_file(cursor, database, table, dir):
cursor.execute("SELECT * FROM " + table + " INTO OUTFILE '" + dir + database + '/' + table + "/data'")
# create a file containing the checksum of the table dump
def create_checksum_file(cursor, database, table, dir):
cursor.execute("CHECKSUM TABLE " + table)
# fetch result
checksum_result = cursor.fetchall()
# drill into result for raw query
checksum = checksum_result[0][1]
# create a file that contains the sql
file_checksum = open(dir + database + '/' + table + '/checksum', 'w')
file_checksum.write(str(checksum))
file_checksum.close() |
998,593 | 22c984cd081d5f57da24aae5d9be777f4ed585d4 | import unittest
import os
from src.disjoint_set import DisjointSet
class disjoint_set_tests(unittest.TestCase):
def test_not_connected_when_set_is_empty(self):
s = DisjointSet()
self.assertFalse(s.is_connected(1, 2))
self.assertFalse(s.is_connected(2, 3))
def test_not_connected_when_no_vertex_in_set(self):
s = DisjointSet()
s.union(1, 2)
self.assertFalse(s.is_connected(2, 3))
self.assertFalse(s.is_connected(3, 4))
def test_can_connect_two_vertices(self):
s = DisjointSet()
self.assertEqual(2, s.union(1, 2))
self.assertTrue(s.is_connected(1, 2))
self.assertTrue(s.is_connected(2, 1))
def test_can_connect_two_sets(self):
s = DisjointSet()
self.assertEqual(2, s.union(1, 2))
self.assertTrue(s.is_connected(1, 2))
self.assertEqual(2, s.union(3, 4))
self.assertTrue(s.is_connected(3, 4))
self.assertFalse(s.is_connected(1, 3))
self.assertFalse(s.is_connected(1, 4))
self.assertFalse(s.is_connected(2, 3))
self.assertFalse(s.is_connected(2, 4))
self.assertEqual(4, s.union(2, 3))
self.assertTrue(s.is_connected(1, 3))
self.assertTrue(s.is_connected(1, 4))
self.assertTrue(s.is_connected(2, 3))
self.assertTrue(s.is_connected(2, 4))
def test_can_connect_two_sets_in_a_loop(self):
s = DisjointSet()
self.assertEqual(2, s.union(1, 2))
self.assertEqual(3, s.union(2, 3))
self.assertEqual(4, s.union(3, 4))
self.assertEqual(4, s.union(4, 1))
def test_can_process_large_input_from_file(self):
s = DisjointSet()
pairs = self.__read_input()
actual = []
maxs = 0
for pair in pairs:
ns = s.union(pair[0], pair[1])
if ns > maxs:
maxs = ns
actual.append(maxs)
expected = self.__read_result()
self.assertEqual(expected, actual)
def __read_input(self):
path = os.path.join(os.path.dirname(__file__),
'test_resources', 'disjoint_set', '01.input')
with open(path) as f:
n = int(f.readline().split()[0])
r = []
for _ in range(n):
r.append(list(map(int, f.readline().rstrip().split())))
return r
def __read_result(self):
path = os.path.join(os.path.dirname(__file__),
'test_resources', 'disjoint_set', '01.result')
with open(path) as f:
return list(map(int, f.readlines()))
if __name__ == '__main__':
unittest.main()
|
998,594 | 82bb8ca46eb1a08baf68ee81bf63723f1f5a355f | from onegov.onboarding.models import Assistant
def test_assistant():
class FooAssistant(Assistant):
@Assistant.step()
def first_step(self, request):
return {'step': 1}
@Assistant.step()
def second_step(self, request):
return {'step': 2}
@Assistant.step()
def third_step(self, request):
return {'step': 3}
foo = FooAssistant(None, current_step_number=1)
assert foo.current_step.handle_view(None, None) == {'step': 1}
assert foo.progress == (1, 3)
assert foo.is_first_step == True
assert foo.is_last_step == False
foo = FooAssistant(None, current_step_number=2)
assert foo.current_step.handle_view(None, None) == {'step': 2}
assert foo.progress == (2, 3)
assert foo.is_first_step == False
assert foo.is_last_step == False
foo = FooAssistant(None, current_step_number=3)
assert foo.current_step.handle_view(None, None) == {'step': 3}
assert foo.progress == (3, 3)
assert foo.is_first_step == False
assert foo.is_last_step == True
|
998,595 | 319a2c4831c9c76e29bc153f6b15db21bf9ad64b | from django.apps import AppConfig
class TicktockConfig(AppConfig):
name = 'ticktock'
|
998,596 | e88f50a3d9f2adc515f7fa75c4e8532a45948f54 | class SampleClass:
@classmethod
def shout(cls):
print("I was called!")
|
998,597 | d21e188cfdf19adab2861264139c3534550cd1a0 | import tkinter as tk
import tkinter.ttk as ttk
from tkinter.scrolledtext import ScrolledText
from src.UnoServer import UnoServer
from src.ChatServer import ChatServer
from src.PingServer import PingServer
import threading
RECV_BUFFER = 1024
# Main part of the app. Handles transition from the connection window to the game window and stores the player
# dictionary as well as the socket to connect to the server.
# This code was heavily influenced by: http://stackoverflow.com/questions/7546050/switch-between-two-frames-in-tkinter
# since I am new to tkinter.
class MainApp(tk.Tk):
def __init__(self, *args, **kwargs):
tk.Tk.__init__(self, *args, **kwargs)
self.uno_server = UnoServer()
self.chat_server = ChatServer()
self.ping_server = PingServer()
self.wm_title("PyUno Server")
# Main frame and config
container = ttk.Frame(self)
container.pack(side="top", fill="both", expand=True)
container.grid_rowconfigure(0, weight=1)
container.grid_columnconfigure(0, weight=1)
# Dictionary of frames in the app.
self.frames = {}
# Loop through the frame tuple (windows) and add it to the frames dictionary
frame = ServerWindow(parent=container, controller=self)
self.frames[ServerWindow] = frame
frame.grid(row=0, column=0, sticky="nsew")
# Showing the connection window first.
self.show_frame(ServerWindow)
# Showing the connection window first.
self.show_frame(ServerWindow)
def show_frame(self, cont):
frame = self.frames[cont]
frame.tkraise()
class ServerWindow(tk.Frame):
def __init__(self, parent, controller):
tk.Frame.__init__(self, parent)
# Controller references the main app. Used to access its instance variables.
self.controller = controller
self.parent = parent
# Controls relevant to the server window.
port_label = ttk.Label(self, text="Port Number: ")
self.port_entry = ttk.Entry(self)
chatport_label = ttk.Label(self, text="Chat Port Number: ")
self.chatport_entry = ttk.Entry(self)
pingport_label = ttk.Label(self, text="Ping Port Number: ")
self.pingport_entry = ttk.Entry(self)
playerno_label = ttk.Label(self, text="Number of players: ")
self.playerno_entry = ttk.Entry(self)
# Adding relevant controls to grid.
port_label.grid(row=0, sticky='e')
self.port_entry.grid(row=0, column=1)
self.chatport_entry.grid(row=1, column=1)
self.pingport_entry.grid(row=2, column=1)
chatport_label.grid(row=1, sticky='e')
pingport_label.grid(row=2, sticky='e')
playerno_label.grid(row=3, sticky='e')
self.playerno_entry.grid(row=3, column=1)
start_button = ttk.Button(self, text="Start server", command=self.start)
start_button.grid(row=4, column=1)
self.insert_defaults()
# Start accepting players into the game
def start(self):
uno_server = self.controller.uno_server
chat_server = self.controller.chat_server
ping_server = self.controller.ping_server
server_port = int(self.port_entry.get())
uno_server.create_socket(server_port)
ping_server.create_socket(int(self.pingport_entry.get()))
chat_server.create_socket(int(self.chatport_entry.get()))
chat_thread = threading.Thread(target=chat_server.accept_chats,
args=(int(self.playerno_entry.get()), uno_server.players))
chat_thread.start()
uno_server.accept_players(int(self.playerno_entry.get()))
print(self.controller.uno_server.players)
frame = PingWindow(parent=self.parent, controller=self.controller)
self.controller.frames[PingWindow] = frame
frame.grid(row=0, column=0, sticky="nsew")
self.controller.show_frame(PingWindow)
# Insert defaults to entry controls for quick testing.
def insert_defaults(self):
self.port_entry.insert('end', 2121)
self.playerno_entry.insert('end', 1)
self.chatport_entry.insert('end', 2122)
self.pingport_entry.insert('end', 2123)
class PingWindow(tk.Frame):
def __init__(self, parent, controller):
ttk.Frame.__init__(self, parent)
self.controller = controller
uno_server = self.controller.uno_server
self.ping_server = self.controller.ping_server
self.grid_rowconfigure(1)
self.grid_columnconfigure(1, weight=1)
label = ttk.Label(self, text="Pings from players")
self.chat_area = ScrolledText(self, height=10)
label.grid(row=0, column=1)
self.chat_area.grid(row=1, column=1)
uno_server.start_gamethread()
ping_thread = threading.Thread(target=self.ping_thread)
ping_thread.start()
def ping_thread(self):
while True:
message = self.ping_server.accept_pings()
self.chat_area.insert('end',message)
server_app = MainApp()
server_app.mainloop()
|
998,598 | cd455ac72651af481f2048aec0498f863305e7b2 | import torch
from torch.autograd import Variable
import torch.nn as nn
import math
import numpy as np
import torch.nn.functional as F
class MultiBatchNorm(nn.Module):
def __init__(self, dim, types, num_features, momentum=None):
assert isinstance(types, list) and len(types) > 1
assert 'base' in types
assert dim in ('1d', '2d')
super(MultiBatchNorm, self).__init__()
self.types = types
if dim == '1d':
if momentum is not None:
self.bns = nn.ModuleDict([[t, nn.BatchNorm1d(num_features, momentum=momentum)] for t in types])
else:
self.bns = nn.ModuleDict([[t, nn.BatchNorm1d(num_features)] for t in types])
elif dim == '2d':
if momentum is not None:
self.bns = nn.ModuleDict([[t, nn.BatchNorm2d(num_features, momentum=momentum)] for t in types])
else:
self.bns = nn.ModuleDict([[t, nn.BatchNorm2d(num_features)] for t in types])
self.t = 'base'
def forward(self, x):
# print('bn type: {}'.format(self.t))
assert self.t in self.types
out = self.bns[self.t](x)
self.t = 'base'
return out
|
998,599 | cee2e72e424ec03385791f58376e15eba580f9a6 | from collections import defaultdict
n = input()
a = map(int, raw_input().split())
cnt = defaultdict(int)
for x in a:
cnt[x] += 1
res = 0
for i in xrange(11):
if i == 0:
res += cnt[i] * (cnt[i] - 1) / 2
else:
res += cnt[i] * cnt[-i]
print res
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.