text stringlengths 38 1.54M |
|---|
#!/usr/bin/env python
import sys
import yaml
from pybtex.database import BibliographyData
from pybtex.database.input import bibtex
from StringIO import StringIO
from pybtex.database.output import bibtex as bibtexo
import json
filename = sys.argv[1]
out = sys.argv[2]
parser = bibtex.Parser()
bib_data = parser.parse_file(filename)
writer = bibtexo.Writer()
entries = {}
for k,v in bib_data.entries.items():
bdata = BibliographyData()
if 'desc' in v.fields:
print('removing desc field of %r' % k)
del v.fields['desc']
bdata.entries[k] =v
s = StringIO()
writer.write_stream(bdata, s)
bibtex = s.getvalue()
entries[k] = bibtex
with open(out, 'wb') as f:
f.write(json.dumps(entries))
|
from rest_framework import serializers
from ..models import University
class UniversitySerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = University
fields = ["url", "id", "name", "country", "city"]
|
# -*- coding:utf-8 -*-
def main():
string = "A screaming comes across the sky."
print(string.replace("s", "$"))
if __name__ == '__main__':
main()
|
# attempt
class Solution:
def lengthOfLongestSubstring(self, s: str) -> int:
visited = []
longest = []
for char in s:
if char not in visited:
visited.append(char)
else:
if len(visited) > len(longest):
longest = visited
new_start = visited.index(char)
visited = visited[new_start+1:]
visited.append(char)
return max(len(longest), len(visited))
# optimal
def lengthOfLongestSubstring(self, s):
"""
:type s: str
:rtype: int
"""
str_list = []
max_length = 0
for x in s:
if x in str_list:
str_list = str_list[str_list.index(x)+1:]
str_list.append(x)
max_length = max(max_length, len(str_list))
return max_length
# very similar, but thought it was much cleaner and easier to understand the process
|
n_site = 2
n_mode = 200
vij = 0
for i in range(n_site-1):
j = i + 1
print i+1, j+1
print vij
for i_mode in range(1, n_mode):
print 0
print j+1, i+1
print vij
for i_mode in range(1, n_mode):
print 0
|
import sys
sys.path.append("..")
import xgboost as xgb
from ConfigSpace.configuration_space import ConfigurationSpace
from ConfigSpace.hyperparameters import UniformFloatHyperparameter, \
UniformIntegerHyperparameter, UnParametrizedHyperparameter, Constant, \
CategoricalHyperparameter
from Forecasting import Automated_ML_Forecasting
import numpy as np
class Xgboost_Forecasting(Automated_ML_Forecasting):
def __init__(self, timeseries, dataname,
#xgboost model parameter
learning_rate=0.1, n_estimators=100, subsample=1.0,
max_depth=3, colsample_bylevel=1, colsample_bytree=1, gamma=0,
min_child_weight=1, max_delta_step=0, reg_alpha=0, reg_lambda=1,
base_score=0.5, scale_pos_weight=1, nthread=1,
random_state=None, verbose=0,
#feature extraction parameter
Window_size = 20 , Difference = False,
time_feature = True, tsfresh_feature=True,
forecasting_steps = 25, n_splits = 5,
max_train_size = None, NAN_threshold = 0.05):
self.learning_rate = float(learning_rate)
self.n_estimators = int(n_estimators)
self.subsample = float(subsample)
self.max_depth = int(max_depth)
self.colsample_bylevel = float(colsample_bylevel)
self.colsample_bytree = float(colsample_bytree)
self.gamma = float(gamma)
self.min_child_weight = int(min_child_weight)
self.max_delta_step = int(max_delta_step)
self.reg_alpha = float(reg_alpha)
self.reg_lambda = float(reg_lambda)
self.base_score = float(base_score)
self.scale_pos_weight = float(scale_pos_weight)
self.nthread = int(nthread)
if verbose:
self.silent = False
else:
self.silent = True
if random_state is None:
self.seed = np.random.randint(1, 10000, size=1)[0]
else:
self.seed = random_state.randint(1, 10000, size=1)[0]
self.objective = 'reg:linear'
self.estimator = xgb.XGBRegressor(
max_depth=self.max_depth,
learning_rate=self.learning_rate,
n_estimators=self.n_estimators,
silent=self.silent,
objective=self.objective,
nthread=self.nthread,
gamma=self.gamma,
scale_pos_weight=self.scale_pos_weight,
min_child_weight=self.min_child_weight,
max_delta_step=self.max_delta_step,
subsample=self.subsample,
colsample_bytree=self.colsample_bytree,
colsample_bylevel=self.colsample_bylevel,
reg_alpha=self.reg_alpha,
reg_lambda=self.reg_lambda,
base_score=self.base_score,
seed=self.seed
)
super().__init__(timeseries,dataname,Window_size, time_feature, Difference, tsfresh_feature,
forecasting_steps, n_splits, max_train_size, NAN_threshold)
def _direct_prediction(self):
super()._direct_prediction(self.estimator)
def _cross_validation(self):
return super()._Time_Series_forecasting_cross_validation(self.estimator)
def _cross_validation_visualization(self):
super()._cross_validation_visualization(self.estimator)
def _cross_and_val(self):
return super()._cross_and_val(self.estimator)
@staticmethod
def get_hyperparameter_search_space(dataset_properties=None):
cs = ConfigurationSpace()
# Parameterized Hyperparameters
Window_size = UniformIntegerHyperparameter(
name="Window_size", lower=5, upper=50, default_value=20)
tsfresh_feature = CategoricalHyperparameter(
name="tsfresh_feature", choices=["True", "False"], default_value="True")
Difference = CategoricalHyperparameter(
name="Difference", choices=["True", "False"], default_value="True")
max_depth = UniformIntegerHyperparameter(
name="max_depth", lower=1, upper=30, default_value=3)
learning_rate = UniformFloatHyperparameter(
name="learning_rate", lower=0.01, upper=1, default_value=0.1, log=False)
n_estimators = UniformIntegerHyperparameter("n_estimators", 50, 500, default_value=100)
subsample = UniformFloatHyperparameter(
name="subsample", lower=0.01, upper=1.0, default_value=1.0, log=False)
min_child_weight = UniformIntegerHyperparameter(
name="min_child_weight", lower=1, upper=20, default_value=1, log=False)
# Unparameterized Hyperparameters
cs.add_hyperparameters([max_depth, learning_rate, n_estimators,
Difference, subsample, min_child_weight,
Window_size, tsfresh_feature ])
return cs
|
statement = input()
if statement == "t":
print("Yes")
if input != "x":
print("Rubbish")
else:
pass
elif statement == "N":
print("No")
else:
print("What?")
|
def solution(A, B):
temp = []
if len(A)<len(B): temp = B; B = A; A = temp
A.sort()
B.sort()
i = 0
for a in A:
if i < len(B) - 1 and B[i] < a:
i += 1
if a == B[i]:
return a
return -1
if __name__ == '__main__':
real_answer = solution([1,3,2,5],[4,4,4,4,4,5])
print(real_answer) |
# -*- coding: utf-8 -*-
"""
Created on Sun May 12 09:11:29 2019
@author: bittu
"""
import cv2
import numpy as np
import sqlite3
# we are using cascade classifier
faceDetect = cv2.CascadeClassifier('haarcascade_frontalface_default.xml')
#load the web cam
cam=cv2.VideoCapture(0); #for web cam the capture id is generally 0
def insertOrUpdate(Id,Name,Age):
conn=sqlite3.connect("facebase.db")
cmd="SELECT * FROM People WHERE ID="+str(Id)
cursor=conn.execute(cmd)
isRecord=0
for row in cursor:
isRecord=1
if(isRecord==1):
cmd="UPDATE People SET Name="+str(Name)+",Age="+str(Age)+"Where ID="+str(Id)
else:
cmd="INSERT INTO People VALUES("+str(Id)+","+str(Name)+","+str(Age)+")"
conn.execute(cmd)
conn.commit()
conn.close()
id = input('enter user id')
name = input('enter name')
# include "" while entering name
age = input('enter age')
insertOrUpdate(id,name,age)
sampleNum =0
#camera code
while(True):
ret,img=cam.read();#return two variables one flag and another image
gray=cv2.cvtColor(img,cv2.COLOR_BGR2GRAY)#converting the returned colored image to grayscale
faces = faceDetect.detectMultiScale(gray, 1.3, 5)#detect faces in image and return its coordinates
for(x,y,w,h) in faces:
sampleNum+=1;
cv2.imwrite("dataSet/User."+str(id)+"."+str(sampleNum)+".jpg",gray[y:y+h,x:x+w])#writing data to database file
cv2.rectangle(img,(x,y),(x+w,y+h),(255,0,0),2)#draw rectangle around face. (x,y),(x+w,y+h) are 2 points of rectangle.(255,0,0) is the RGB value for rectangle and 2 is thickness
cv2.waitKey(100);
cv2.imshow("face",img);#open window
cv2.waitKey(1);
if(sampleNum>20):
break;
cam.release()
cv2.destroyAllWindows() |
"""
D
/ \
A B C
\ / /
Mc
"""
class D:
def f1(self):
print('from D')
class A(D):
pass
class B(D):
def f1(self):
print('from B')
class C:
def f1(self):
print('from C')
class Mc(A,B,C):
pass
print(Mc.mro()) |
__author__ = 'tomas'
import numpy as np
# import tools
PRIORITY_LOW = 0 # for lessions that are extracted autonomously
PRIORITY_HIGH = 1 # for lessions that are added by the user, these wil not be filtrated by sliders (area, density, ...)
def create_lesion_from_pt(pt, density, lbl, priority=PRIORITY_HIGH):
"""
:param center: center of lesion, [s, x, y] = [s, c, r]
:param density:
:param lbl:
:return:
"""
les = Lesion(lbl)
les.area = 1
les.compactness = 1
les.center = pt
les.priority = priority
les.mean_density = density
les.std_density = 0
les.max_width = 1
les.max_height = 1
les.max_depth = 1
# minimal and maximal row, column and slice
les.r_min = pt[1]
les.r_max = pt[1]
les.c_min = pt[2]
les.c_max = pt[2]
les.s_min = pt[0]
les.s_max = pt[0]
les.hist = None # histogram of density
les.chord = 1 # longest chord (tetiva in czech)
return les
class Lesion(object):
""" This class represents lesions. """
def __init__(self, label, mask=None, data=None, voxels2ml_k=1, priority=PRIORITY_LOW):
self.label = label # label of the lesion in segmented data; its identifier
self.area = None # area of the lesion
self.compactness = None
self.center = None # center of mass
self.priority = priority
self.mean_density = None
self.std_density = None
self.max_width = None
self.max_height = None
self.max_depth = None
# minimal and maximal row, column and slice
self.r_min = None
self.r_max = None
self.c_min = None
self.c_max = None
self.s_min = None
self.s_max = None
self.hist = None # histogram of density
self.chord = None # longest chord (tetiva in czech)
if mask is not None:
self.compute_features(mask, data, voxels2ml_k=voxels2ml_k)
def compute_features(self, mask, data, voxels2ml_k=1):
# getting unique labels that are greater than 0 (0 = background, -1 = out of mask)
self.area = mask.sum() * voxels2ml_k
s, r, c = np.nonzero(mask)
self.compactness = tools.get_zunics_compatness(mask)
self.center = (s.mean(), r.mean(), c.mean())
if data is not None:
pts = data[np.nonzero(mask)]
self.mean_density = pts.mean()
self.mean_density_std = pts.std()
self.r_min = r.min()
self.r_max = r.max()
self.c_min = c.min()
self.c_max = c.max()
self.s_min = s.min()
self.s_max = s.max()
self.max_width = self.c_max - self.c_min
self.max_height = self.r_max - self.r_min
self.max_depth = self.c_max - self.c_min
def __str__(self):
return 'label=%i, area=%i, mean_dens=%.2f, mean_dens_std=%.2f, center=[%.1f, %.1f, %.1f]' % (
self.label, self.area, self.mean_density, self.mean_density_std, self.center[0], self.center[1], self.center[2])
def extract_lesions(labels, data=None, voxels2ml_k=1):
"""
For each label in 'labels' it creates an instance. Returns list of lesions.
:param labels: labeled data, lesions have label > 0 (0 = background, -1 = points outside a mask)
:return: list of lesions
"""
lesions = list()
lb_list = [x for x in np.unique(labels) if x > 0]
for i in lb_list:
im = labels == i
lesion = Lesion(i, mask=im, data=data, voxels2ml_k=voxels2ml_k)
lesions.append(lesion)
return lesions
if __name__ == '__main__':
labels = np.array([[1, 1, 0, 2, 0],
[1, 1, 0, 2, 0],
[0, 0, 0, 2, 0],
[3, 0, 4, 0, 5],
[3, 0, 4, 0, 0]], dtype=np.int)
labels = np.dstack((labels, labels, labels))
lesions = extract_lesions(labels, data=labels, voxels2ml_k=1)
for i in lesions:
print i
# TODO: dopocitat compactness
# TODO: dopocitat chord
# TODO: dopocitat hist |
from django.contrib import admin
from .models import Tyre, UserInfo, Feedback, TyresGroup, Images
admin.site.register(TyresGroup)
admin.site.register(Tyre)
admin.site.register(Images)
admin.site.register(UserInfo)
admin.site.register(Feedback) |
# Python modules
import simpleaudio as sa
import time
import random
import _thread
import sys
# Project modules
import userInput as ui
import playback as pb
import beatGenerator as bgen
import writeMidi as midi
# Terminal colors
colorErr = "\033[31m"
colorReset = "\033[0m"
# Display some nice things
ui.titleText()
# = == === ==== ===== # Initialize values # ===== ==== === == = #
# Initial settings selection.
# Settings can be changed afterwards, but values are needed to:
# - Load the samples
# - Start playback
totalDrumkits = 4 -1
# Drumkit selection
print("Available drumkits: \n 0: Dry \n 1: Synthetic \n 2: Dub \n 3: Beatboxing \n\nChoose a drumkit:")
pb.drumkit = ui.askInput(0, totalDrumkits)
# Load all the samples of the drumkit
pb.loadSamples()
# Time signature selection
print("\nHow many triggers per measure? (4-12)")
pb.timeBeats = ui.askInput(4, 12)
print("\nHow many triggers per quarter note? (2 or 4)")
pb.timeQuarter = ui.askInputOr(2, 4)
# pb.timeQuarter = 2
# BPM selection
print("\nChoose a BPM: (50-200)")
bpm = ui.askInput(50, 200)
# Generate the actual beat
# First boolean determines if the beat is actually generated
# False will just play a predefined sequence consisting of 8 triggers
bgen.generate(pb.timeBeats, pb.timeQuarter)
# Calculates the length of triggers and display info
pb.initPlayback(bpm, True)
# = == === ==== ===== # Playback Loop # ===== ==== === == = #
# Start the playback thread
try:
_thread.start_new_thread(pb.playbackThread, ())
except:
print(colorErr, "Error: unable to start thread \n"+colorReset)
askForInput = True
prevUserInput = None
# Loop checking for user input
while True:
# Wait for keyboard input
userInput = input("> ")
# Splits input into a list, allows evaluating indiviual words
userInput = userInput.split(" ", 1)
# Empty input or a space will repeat the last command
if userInput[0] == "":
if not prevUserInput == None:
print(" Repeating:", ' '.join(prevUserInput))
userInput = prevUserInput
# Exit program
if userInput[0].lower() in ["exit", "quit", "e"]:
# if userInput[0].lower() == "exit" or userInput[0].lower() == "quit" or userInput[0].lower() == "e":
pb.playback = False
ui.exitProgram()
# Start or restart playback
elif userInput[0].lower() == "start":
pb.initPlayback(bpm)
pb.playback = True
# Stop playback
elif userInput[0].lower() == "stop":
if pb.playback:
pb.playback = False
else:
print(colorErr, "Playback has already stopped \n", colorReset)
# Trigger the generation engine
elif userInput[0].lower() == "gen":
bgen.generate(pb.timeBeats, pb.timeQuarter)
# Trigger the generation engine and print the result
elif userInput[0].lower() == "genp":
bgen.generate(pb.timeBeats, pb.timeQuarter)
hhcPrint = " ".join(str(i) for i in bgen.sequences[2])
snrPrint = " ".join(str(i) for i in bgen.sequences[1])
kikPrint = " ".join(str(i) for i in bgen.sequences[0])
print(" Hihats:", hhcPrint.replace("0", "-"))
print(" Snare: ", snrPrint.replace("0", "-"))
print(" Kick: ", kikPrint.replace("0", "-"))
# BPM
elif userInput[0].lower() == "bpm":
if len(userInput) <= 1:
print(colorErr, " ! Missing argument: \n expecting bpm + value", colorReset)
else:
bpm = ui.checkInput(userInput[1], bpm, 50, 200)
# Time signature
elif userInput[0].lower() == "time":
if len(userInput) <= 1:
print(colorErr, " ! Missing argument: \n expecting time + value", colorReset)
else:
pb.timeBeats = ui.checkInput(userInput[1], pb.timeBeats, 4, 12)
if userInput[1].isdigit() and 12 >= int(userInput[1]) >= 4:
pb.playback = False
bgen.generate(pb.timeBeats, pb.timeQuarter)
pb.timeBeats = ui.checkInput(userInput[1], pb.timeBeats, 4, 12)
# Quarter notes resolution
elif userInput[0].lower() == "quarter":
if len(userInput) <= 1:
print(colorErr, " ! Missing argument: \n expecting quarter + value", colorReset)
else:
pb.timeQuarter = ui.checkInputOr(userInput[1], pb.timeQuarter, 2, 4)
# Drumkit
elif userInput[0].lower() == "kit":
if len(userInput) <= 1:
print(colorErr, " ! Missing argument: \n expecting drumkit + value", colorReset)
else:
pb.drumkit = ui.checkInput(userInput[1], pb.drumkit, 0, totalDrumkits)
# If value is valid: load selected drumkit
if userInput[1].isdigit() and totalDrumkits >= int(userInput[1]) >= 0:
pb.loadSamples()
# Print current sequences
elif userInput[0].lower() == "print":
hhcPrint = " ".join(str(i) for i in bgen.sequences[2])
snrPrint = " ".join(str(i) for i in bgen.sequences[1])
kikPrint = " ".join(str(i) for i in bgen.sequences[0])
print(" Hihats:", hhcPrint.replace("0", "-"))
print(" Snare: ", snrPrint.replace("0", "-"))
print(" Kick: ", kikPrint.replace("0", "-"))
# Write beat to midi file
elif userInput[0].lower() == "midi":
# Set filename if not specified
if len(userInput) <= 1:
print(colorErr, " ! Missing argument:\n filename set to irregbeat.mid", colorReset)
userInput.append("irregbeat")
# Write midifile
midi.writeMidi(bgen.sequences, userInput[1], bpm, pb.timeQuarter, pb.timeBeats)
# Show help file
elif userInput[0].lower() == "help":
ui.helpFile()
# SPOOKY
elif userInput[0].lower() == "ufo":
ui.ufo()
# Command not recognized
else:
print(colorErr, " ".join(userInput), "not recognized, type help for an overview of all commands \n", colorReset)
prevUserInput = userInput
|
from rest_framework import status
from rest_framework.decorators import api_view
from rest_framework.renderers import JSONRenderer
from rest_framework.response import Response
from django.contrib.auth.models import User
from rest_framework.decorators import api_view, permission_classes, authentication_classes
from rest_framework.authentication import TokenAuthentication, BasicAuthentication
from rest_framework.permissions import IsAuthenticated
from validate_ip import valid_ip
@api_view(['GET'])
@authentication_classes((TokenAuthentication, BasicAuthentication))
@permission_classes((IsAuthenticated,))
def user(request, format=None):
"""
(POST): Users are created upon successful (federated) login, so no create user functionality is provided
GET: Retrieve a list of users that are the administrators of the domain
"""
ip_address = request.META['REMOTE_ADDR']
if valid_ip(ip_address) is False:
return Response("Not authorised client IP", status=status.HTTP_401_UNAUTHORIZED)
try:
service_user = User.objects.get(username=request.user)
except User.DoesNotExist:
# service_user = None
return Response("User is unknown :"+request.user, status=status.HTTP_400_BAD_REQUEST)
if request.method == 'GET':
# get academic entity from user name
academic = service_user.last_name
# get all users for the defined academic entity
user_list = User.objects.all().filter(last_name=academic)
result_list = []
for user_item in user_list:
result = {'username': user_item.username, 'name': user_item.first_name, 'email': user_item.email,
'domain': user_item.last_name, 'date_joined': user_item.date_joined}
result_list.append(result)
if result_list:
return Response(result_list, status=status.HTTP_200_OK)
else:
return Response("user list not found", status=status.HTTP_400_BAD_REQUEST)
@api_view(['DELETE', 'GET'])
@authentication_classes((TokenAuthentication, BasicAuthentication))
@permission_classes((IsAuthenticated,))
def username_mgmt(request, username, format=None):
"""
GET: Retrieve details for one user
DELETE: Delete specified user ..
"""
ip_address = request.META['REMOTE_ADDR']
print "delete/get ip_address:"+ip_address
if valid_ip(ip_address) is False:
return Response("Not authorised client IP", status=status.HTTP_401_UNAUTHORIZED)
# verify that token corresponds to a valid user
try:
service_user = User.objects.get(username=request.user)
except User.DoesNotExist:
return Response("User token is unknown :"+request.user, status=status.HTTP_400_BAD_REQUEST)
# verify that username exist
try:
user_name = User.objects.get(username=username)
except User.DoesNotExist:
return Response("Username is unknown :"+str(username), status=status.HTTP_400_BAD_REQUEST)
# verify that service_user has the rights to manage the username
if user_name:
# academic domain of service_user should be the same as the managed user
user_domain = user_name.last_name
user_domain_ = user_domain.replace(".", "_")
service_user_domain = service_user.last_name
service_user_domain_ = service_user_domain.replace(".", "_")
if user_domain_ == service_user_domain_:
print "permission granted"
else:
return Response("Not sufficient rights to perform this action for: "+username,
status=status.HTTP_400_BAD_REQUEST)
if request.method == 'DELETE':
user_deleted = User.objects.get(username=username).delete()
if user_deleted is None:
return Response("user:"+username+" deleted", status=status.HTTP_200_OK)
else:
return Response("user:"+username+" not deleted", status=status.HTTP_400_BAD_REQUEST)
if request.method == 'GET':
# get academic entity from user name
# get all users for the defined academic entity
user_item = User.objects.get(username=username)
result = {}
if user_item:
result = {'username': user_item.username, 'name': user_item.first_name, 'email': user_item.email,
'domain': user_item.last_name, 'date_joined': user_item.date_joined}
return Response(result, status=status.HTTP_200_OK)
if not result:
return Response("user:"+username+" not found", status=status.HTTP_400_BAD_REQUEST)
|
'''
let's see how easy moving to use semiinteger and semicontinuous decision variables with docplex.
Semiinteger means for example for a quantity of buses that it's either 0 or within a given range.
In our bus example, suppose we cannot rent less than 4 buses for any given size.
We then write:
'''
from docplex.mp.model import Model
# original model
mdl = Model(name='buses')
nbbus40 = mdl.semiinteger_var(4,20,name='nbBus40')
nbbus30 = mdl.semiinteger_var(4,20,name='nbBus30')
mdl.add_constraint(nbbus40*40 + nbbus30*30 >= 300, 'kids')
mdl.minimize(nbbus40*500 + nbbus30*400)
mdl.solve()
for v in mdl.iter_semiinteger_vars():
print(v," = ",v.solution_value)
'''
which gives
nbBus40 = 4.0
nbBus30 = 5.0
'''
|
import os
import sys
from flask import Flask
from cms.models import Entry
from cms.models import User
CONFIGS = {
'production': 'config-production.py',
'development': 'config-development.py',
}
def create_app():
app = Flask(__name__, instance_relative_config=True)
config_name = CONFIGS[os.getenv('CONFIG_FILE', 'production')]
try:
app.config.from_pyfile(config_name)
app.logger.info('config file successfully loaeded.')
except FileNotFoundError:
app.logger.error('config file must exist.')
sys.exit(1)
app.config.from_mapping(
SQLALCHEMY_DATABASE_URI='mysql+pymysql://{0}:{1}@{2}:{3}/{4}?charset={5}'.format(
app.config['DB_USER'],
app.config['DB_PASSWORD'],
app.config['DB_HOST'],
app.config['DB_PORT'],
app.config['DATABASE'],
'utf8mb4',
),
SQLALCHEMY_TRACK_MODIFICATIONS=False,
)
from cms import error_handler as eh
app.register_error_handler(403, eh.forbidden)
app.register_error_handler(404, eh.not_found)
from cms.database import init_app
from cms.cli import add_cli
init_app(app)
add_cli(app)
from cms import auth, blog, user
app.register_blueprint(auth.bp)
app.register_blueprint(blog.bp)
app.register_blueprint(user.bp)
app.add_url_rule('/', endpoint='index')
return app
|
"""
1) Use 2 references
Note: String are inmutable so we must pass in an array of ch if we want to do it inplace
Time: O(n) Space:O(1)
"""
def reverse_inplace(ch_arr):
if len(ch_arr) < 2:
return ch_arr
beg = 0
end = len(ch_arr) - 1
while beg < end:
tmp = ch_arr[beg]
ch_arr[beg] = ch_arr[end]
ch_arr[end] = tmp
beg += 1
end -= 1
arr = list("hello world")
reverse_inplace(arr)
print(''.join([ch for ch in arr])) |
#!/usr/bin/env python3
# encoding: utf-8
from collections import defaultdict
from copy import deepcopy
from typing import Dict, List
import numpy as np
from mlagents_envs.environment import UnityEnvironment
from mlagents_envs.side_channel.engine_configuration_channel import \
EngineConfigurationChannel
from mlagents_envs.side_channel.environment_parameters_channel import \
EnvironmentParametersChannel
from rls.common.data import Data
from rls.common.specs import EnvAgentSpec, SensorSpec
from rls.common.yaml_ops import load_config
from rls.envs.unity.wrappers.core import ObservationWrapper
from rls.utils.np_utils import get_discrete_action_list
class BasicUnityEnvironment(object):
def __init__(self,
worker_id=0,
file_name=None,
port=5005,
render=False,
seed=42,
timeout_wait=60,
env_copies=12,
env_name='3DBall',
real_done=True,
initialize_config={},
engine_config={
'width': 84,
'height': 84,
'quality_level': 5,
'time_scale': 20,
'target_frame_rate': -1,
'capture_frame_rate': 60
},
**kwargs):
self._n_copies = env_copies
self._real_done = real_done
self._side_channels = self.initialize_all_side_channels(
initialize_config, engine_config)
env_kwargs = dict(seed=seed,
worker_id=worker_id,
timeout_wait=timeout_wait,
side_channels=list(self._side_channels.values())) # 注册所有初始化后的通讯频道
if file_name is not None:
env_dict = load_config('rls/configs/unity/env_dict.yaml')
env_kwargs.update(file_name=file_name,
base_port=port,
no_graphics=not render,
additional_args=[
'--scene', str(env_dict.get(env_name, 'None'))
])
self.env = UnityEnvironment(**env_kwargs)
self.env.reset()
self.initialize_environment()
def initialize_all_side_channels(self, initialize_config, engine_config):
"""
初始化所有的通讯频道
"""
engine_configuration_channel = EngineConfigurationChannel()
engine_configuration_channel.set_configuration_parameters(**engine_config)
float_properties_channel = EnvironmentParametersChannel()
float_properties_channel.set_float_parameter('env_copies', self._n_copies)
for k, v in initialize_config.items():
float_properties_channel.set_float_parameter(k, v)
return dict(engine_configuration_channel=engine_configuration_channel,
float_properties_channel=float_properties_channel)
def initialize_environment(self):
"""
初始化环境,获取必要的信息,如状态、动作维度等等
"""
self.behavior_names = list(self.env.behavior_specs.keys())
self._vector_idxs = defaultdict(list)
self._vector_dims = defaultdict(list)
self._visual_idxs = defaultdict(list)
self._visual_dims = defaultdict(list)
self._a_dim = defaultdict(int)
self._discrete_action_lists = {}
self._is_continuous = {}
self._actiontuples = {}
self.env.reset()
for bn, spec in self.env.behavior_specs.items():
for i, obs_spec in enumerate(spec.observation_specs): # TODO: optimize
if len(obs_spec.shape) == 1:
self._vector_idxs[bn].append(i)
self._vector_dims[bn].append(obs_spec.shape[0])
elif len(obs_spec.shape) == 3:
self._visual_idxs[bn].append(i)
self._visual_dims[bn].append(list(obs_spec.shape))
else:
raise ValueError(
"shape of observation cannot be understood.")
action_spec = spec.action_spec
if action_spec.is_continuous():
self._a_dim[bn] = action_spec.continuous_size
self._discrete_action_lists[bn] = None
self._is_continuous[bn] = True
elif action_spec.is_discrete():
self._a_dim[bn] = int(np.asarray(
action_spec.discrete_branches).prod())
self._discrete_action_lists[bn] = get_discrete_action_list(
action_spec.discrete_branches)
self._is_continuous[bn] = False
else:
raise NotImplementedError(
"doesn't support continuous and discrete actions simultaneously for now.")
self._actiontuples[bn] = action_spec.empty_action(
n_agents=self._n_copies)
def reset(self, reset_config):
for k, v in reset_config.items():
self._side_channels['float_properties_channel'].set_float_parameter(
k, v)
self.env.reset()
return self.get_obs(only_obs=True)
def step(self, actions, step_config):
"""
params: actions, type of dict or np.ndarray, if the type of actions is
not dict, then set those actions for the first behavior controller.
"""
for k, v in step_config.items():
self._side_channels['float_properties_channel'].set_float_parameter(
k, v)
actions = deepcopy(actions)
# TODO: fix this
for bn in self.behavior_names:
if self._is_continuous[bn]:
self._actiontuples[bn].add_continuous(actions[bn])
else:
self._actiontuples[bn].add_discrete(
self._discrete_action_lists[bn][actions[bn]].reshape(self._n_copies, -1))
self.env.set_actions(bn, self._actiontuples[bn])
self.env.step()
return self.get_obs()
@property
def AgentSpecs(self):
ret = {}
for bn in self.behavior_names:
ret[bn] = EnvAgentSpec(
obs_spec=SensorSpec(
vector_dims=self._vector_dims[bn],
visual_dims=self._visual_dims[bn]),
a_dim=self._a_dim[bn],
is_continuous=self._is_continuous[bn]
)
return ret
@property
def StateSpec(self) -> SensorSpec:
return SensorSpec()
@property
def agent_ids(self) -> List[str]:
return self.behavior_names
def get_obs(self, behavior_names=None, only_obs=False):
"""
解析环境反馈的信息,将反馈信息分为四部分:向量、图像、奖励、done信号
"""
behavior_names = behavior_names or self.behavior_names
whole_done = np.full(self._n_copies, False)
whole_info_max_step = np.full(self._n_copies, False)
all_obs_fa, all_obs_fs = {}, {}
all_reward = {}
for bn in behavior_names:
ps = []
# TODO: optimize
while True:
ds, ts = self.env.get_steps(bn)
if len(ts):
ps.append(ts)
if len(ds) == self._n_copies:
break
elif len(ds) == 0:
self.env.step() # some of environments done, but some of not
else:
raise ValueError(
f'agents number error. Expected 0 or {self._n_copies}, received {len(ds)}')
obs_fs, reward = ds.obs, ds.reward
obs_fa = deepcopy(obs_fs)
done = np.full(self._n_copies, False)
begin_mask = np.full(self._n_copies, False)
info_max_step = np.full(self._n_copies, False)
info_real_done = np.full(self._n_copies, False)
for ts in ps: # TODO: 有待优化
_ids = ts.agent_id
reward[_ids] = ts.reward
info_max_step[_ids] = ts.interrupted # 因为达到episode最大步数而终止的
# 去掉因为max_step而done的,只记录因为失败/成功而done的
info_real_done[_ids[~ts.interrupted]] = True
done[_ids] = True
begin_mask[_ids] = True
# zip: vector, visual, ...
for _obs, _tobs in zip(obs_fa, ts.obs):
_obs[_ids] = _tobs
if self._real_done:
done = np.array(info_real_done)
_obs_fa = Data()
_obs_fs = Data()
if len(self._vector_idxs[bn]) > 0:
_obs_fa.update(vector={f'vector_{i}': obs_fa[vi] for i, vi in enumerate(self._vector_idxs[bn])})
_obs_fs.update(vector={f'vector_{i}': obs_fs[vi] for i, vi in enumerate(self._vector_idxs[bn])})
if len(self._visual_idxs[bn]) > 0:
_obs_fa.update(visual={f'visual_{i}': obs_fa[vi] for i, vi in enumerate(self._visual_idxs[bn])})
_obs_fs.update(visual={f'visual_{i}': obs_fs[vi] for i, vi in enumerate(self._visual_idxs[bn])})
all_obs_fa[bn] = _obs_fa
all_obs_fs[bn] = _obs_fs
all_reward[bn] = reward
whole_done = np.logical_or(whole_done, done)
whole_info_max_step = np.logical_or(whole_info_max_step, info_max_step)
if only_obs:
all_obs_fa.update(
{'global': Data(begin_mask=np.full((self._n_copies, 1), True))})
return all_obs_fa
else:
rets = {}
for bn in self.behavior_names:
rets[bn] = Data(obs_fa=all_obs_fa[bn],
obs_fs=all_obs_fs[bn],
reward=all_reward[bn],
done=whole_done,
info=dict(max_step=whole_info_max_step))
rets.update(
{'global': Data(begin_mask=begin_mask[:, np.newaxis])}) # [B, 1]
return rets
def __getattr__(self, name):
"""
不允许获取BasicUnityEnvironment中以'_'开头的属性
"""
if name.startswith('_'):
raise AttributeError(
"attempted to get missing private attribute '{}'".format(name))
return getattr(self.env, name)
class ScaleVisualWrapper(ObservationWrapper):
def observation(self, observation: Dict[str, Data]):
def func(x): return np.asarray(x * 255).astype(np.uint8)
for k in observation.keys():
observation[k].obs.visual.convert_(func)
observation[k].obs_.visual.convert_(func)
return observation
|
#COMMIT DAMN YOU
from Item import *
from Map_Object import *
import csv
class Item_Database(Map_Object):
def __init__(self):#This class is used to house all items that will be accessible in the game
self.items=[]
self.numberOfItems=0
def add_item(self,Item):#This function adds an item to the database and then increments the counter
self.items.append(Item)
self.numberOfItems=self.numberOfItems+1
def remove_item(self, Item):#This function locates an item to delete in the database and removes it while decrementing the counter
find=Item.number
for item in self.items:
if item.get_number() == find:
self.items.remove(item)
break
self.numberOfItems=self.numberOfItems-1
def display(self):#Prints all of the stats for every item in the database
number=0
for item in self.items:
print "Item Number: %d" % number
print "Item Name: %s" % item.get_name()
print "Item Health: %s" % item.get_health()
print "Item Damage: %s" % item.get_damage()
print "Item Type: %s" % item.get_type()
number=number+1
def import_database(self):#This function imports all items from the Item_Database CSV file in order for easy access
for line in open("Item_Database.csv"):
number,name,health,damage,image,type,buygold,sellgold = line.split(",")
sellgold=int(sellgold.rstrip())
dir='images/' + type + '.png'
newItem=Item(number,name,health,damage,dir,type,buygold,sellgold)
self.add_item(newItem)
class Inventory(Item_Database):#This class is will be used by the Player and NPC class to store all picked up items and tradeable items
def __init__(self):#Sets a max number of items that a player/NPC can hold and keeps track of the items
self.items=[]
self.numberOfItems=0
self.maxNumberOfItems=6
def add_item(self,Item):#This function adds an item to the current list of items
if self.numberOfItems!=self.maxNumberOfItems:#checks to see if the inventory is full
self.items.append(Item)
self.numberOfItems=self.numberOfItems+1
else:#Return an error if the inventory is full
print "Maximum number of items reached, remove an item and try again."
class Loot(Item_Database):#This class is used by monsters and will allow the player to obtain items
def remove_item(self, Item):#This function locates an item to delete in the database and removes it while decrementing the counter
for item in self.items:
if item==Item:
self.items.remove(item)
self.numberOfItems=self.numberOfItems-1
pass
class Equipment(Item_Database):#This class is used by the Player class to add the bonuses for the player stats
#Boolean variables are used to keep track of which slot is currently occupied
hasHead=False
hasShoulder=False
hasChest=False
hasHands=False
hasLegs=False
hasFeet=False
has1h=False
hasShield=False
has2h=False
def check_slot(self,slot):#This function returns true if a slot is currently occupied and false otherwise
slot=slot.rstrip()
for item in self.items:
if slot == '1h' and (self.has1h or self.has2h):
return False
elif slot == '2h' and (self.has1h or self.has2h or self.hasShield):
return False
elif slot == 'shield' and (self.hasShield or self.has2h):
return False
elif (slot == 'head' and self.hasHead) or (slot == 'shoulders' and self.hasShoulder) or (slot== 'chest' and self.hasChest) or (slot=='hands' and self.hasHands) or (slot=='legs' and self.hasLegs) or (slot=='feet' and self.hasFeet):
return False
return True
def remove_item(self,Item):#This function removes an item from the equipment
type=Item.type
#Before the item is removed, update the slot that it occupied to read as empty
if type=='head':
self.hasHead=False
elif type=='shoulder':
self.hasShoulder=False
elif type=='chest':
self.hasChest=False
elif type=='hands':
self.hasHands=False
elif type=='legs':
self.hasLegs=False
elif type=='feet':
self.hasFeet=False
elif type=='1h':
self.has1h=False
elif type=='shield':
self.hasShield=False
elif type=='2h':
self.has2h=False
#Search through the item list and remove the item from the inventory
for item in self.items:
if item==Item:
self.items.remove(item)
break
def add_item(self,Item):#Adds an item to the item list and updates the occupancy of the slot type
slot=Item.type
self.items.append(Item)
if slot=='head':
self.hasHead=True
elif slot=='shoulders':
self.hasShoulder=True
elif slot=='chest':
self.hasChest=True
elif slot=='hands':
self.hasHands=True
elif slot=='legs':
self.hasLegs=True
elif slot=='feet':
self.hasFeet=True
elif slot=='1h':
self.has1h=True
elif slot=='shield':
self.hasShield=True
elif slot=='2h':
self.has2h=True
|
# -*- coding: utf-8 -*-
# This is a simple wrapper for running application
# $ python main.py
# $ gunicorn -w 4 -b 127.0.0.1:5000 main:app
from application import app
import application.views
if __name__ == '__main__':
app.run()
|
# ========================
# Information
# ========================
# Direct Link: https://www.hackerrank.com/challenges/s10-geometric-distribution-1/problem
# Difficulty: Easy
# Max Score: 30
# Language: Python
# ========================
# Solution
# ========================
A, B = map(int, input().strip().split(' '))
C = int(input())
P = float(A/B)
RES = (1-P) ** (C-1) * P
print(round(RES, 3))
|
#-*- coding:utf8-*-
import sys
class test:
def __enter__(self):
print("enter")
def __exit__(self,*args):
print ("exit")
with test() as a:
print "in with"
print "yes" |
def mountain(n):
k = n-1
for row in range(0,n):
for column in range(0,k):
print(end=" ")
k = k-1
for column in range(0,row+1):
print("* ",end = " ")
print("\n")
n1 = int(input())
mountain(n1)
|
import engine
import genetic_components.node as n
import tensorflow as tf
import pytest
import math
@pytest.fixture
def x_tensor():
x_size = 10
y_size = 20
x_size_tensor = tf.range(x_size)
x_size_tensor = tf.reshape(x_size_tensor, [-1,1])
x_size_tensor = tf.tile(x_size_tensor, [1, y_size])
x_tensor = tf.cast(x_size_tensor, tf.float32)
return x_tensor
@pytest.fixture
def y_tensor():
x_size = 10
y_size = 20
y_size_tensor = tf.range(y_size)
y_size_tensor = tf.reshape(y_size_tensor, [1,-1])
y_size_tensor = tf.tile(y_size_tensor, [x_size, 1])
y_tensor = tf.cast(y_size_tensor, tf.float32)
return y_tensor
def test_abs(x_tensor, y_tensor):
foo = n.resolve_abs_node(tf.constant(-1, shape=[10,20]))
bar = n.resolve_abs_node(tf.constant(1, shape=[10,20]))
ree = n.resolve_abs_node(x_tensor)
assert (run_tensor(foo) == run_tensor(bar)).all()
assert ree.shape == (10,20)
assert ree.dtype == tf.float32
def test_add(x_tensor, y_tensor):
foo = n.resolve_add_node(tf.constant(-1, shape=[10,20]), tf.constant(1, shape=[10,20]))
bar = n.resolve_add_node(tf.constant(1, shape=[10,20]), tf.constant(-1, shape=[10,20]))
pan = n.resolve_add_node(tf.constant(0, shape=[10,20]), tf.constant(0, shape=[10,20]))
assert (run_tensor(foo) == run_tensor(bar)).all()
assert (run_tensor(foo) == run_tensor(pan)).all()
ree = n.resolve_add_node(x_tensor, y_tensor)
assert ree.shape == (10,20)
assert ree.dtype == tf.float32
def test_and(x_tensor, y_tensor):
foo = n.resolve_and_node(tf.constant(1, shape=[10,20]), tf.constant(1, shape=[10,20]))
bar = n.resolve_and_node(tf.constant(0, shape=[10,20]), tf.constant(1, shape=[10,20]))
pan = n.resolve_and_node(tf.constant(13, shape=[10,20]), tf.constant(13, shape=[10,20]))
ree = n.resolve_and_node(x_tensor, y_tensor)
assert (run_tensor(foo) == run_tensor(tf.constant(1, shape=[10,20]))).all()
assert (run_tensor(bar) == run_tensor(tf.constant(0, shape=[10,20]))).all()
assert (run_tensor(pan) == run_tensor(tf.constant(13, shape=[10,20]))).all()
assert ree.shape == (10,20)
assert ree.dtype == tf.float32
def test_cos(x_tensor, y_tensor):
foo = n.resolve_cos_node(tf.constant(0, shape=[10,20], dtype=tf.float32))
bar = n.resolve_cos_node(tf.constant(math.pi, shape=[10,20], dtype=tf.float32))
pan = tf.cast(n.resolve_cos_node(tf.constant(math.pi / 2, shape=[10,20], dtype=tf.float32)), tf.int32)
ree = n.resolve_cos_node(x_tensor)
assert (run_tensor(foo) == run_tensor(tf.constant(1, shape=[10,20]))).all()
assert (run_tensor(bar) == run_tensor(tf.constant(-1, shape=[10,20]))).all()
assert (run_tensor(pan) == run_tensor(tf.constant(0, shape=[10,20]))).all()
assert ree.shape == (10,20)
assert ree.dtype == tf.float32
def test_div(x_tensor, y_tensor):
foo = n.resolve_div_node(tf.constant(13, shape=[10,20]), tf.constant(13, shape=[10,20]))
bar = n.resolve_div_node(tf.constant(1, shape=[10,20]), tf.constant(0, shape=[10,20]))
pan = n.resolve_div_node(tf.constant(0, shape=[10,20]), tf.constant(1, shape=[10,20]))
assert (run_tensor(foo) == run_tensor(tf.constant(1, shape=[10,20]))).all()
assert (run_tensor(bar) == run_tensor(tf.constant(0, shape=[10,20]))).all()
assert (run_tensor(pan) == run_tensor(tf.constant(0, shape=[10,20]))).all()
ree = n.resolve_div_node(x_tensor, y_tensor)
assert ree.shape == (10,20)
assert ree.dtype == tf.float32
def test_exp(x_tensor, y_tensor):
foo = n.resolve_exp_node(tf.constant(0, shape=[10,20], dtype=tf.float32))
bar = n.resolve_exp_node(tf.constant(1, shape=[10,20], dtype=tf.float32))
pan = n.resolve_exp_node(tf.constant(-1, shape=[10,20], dtype=tf.float32))
ree = n.resolve_exp_node(x_tensor)
assert (run_tensor(foo) == run_tensor(tf.constant(1, shape=[10,20]))).all()
assert (run_tensor(bar) == run_tensor(tf.constant(math.e, shape=[10,20]))).all()
assert (run_tensor(pan) == run_tensor(tf.constant(1 / math.e, shape=[10,20]))).all()
assert ree.shape == (10,20)
assert ree.dtype == tf.float32
def test_if(x_tensor, y_tensor):
aux1 = tf.convert_to_tensor([[1.0, 2.0], [4.0, 5.0]])
aux2 = tf.constant(3, shape=[2,2], dtype=tf.float32)
foo = n.resolve_if_node(tf.constant(1,shape=[2,2], dtype=tf.float32), tf.constant(0,shape=[2,2], dtype=tf.float32), tf.cast(tf.math.greater(aux1, aux2), tf.float32), 2, 2)
assert (run_tensor(foo) == run_tensor(tf.convert_to_tensor([[0,0], [1,1]]))).all()
ree = n.resolve_if_node(x_tensor, y_tensor, x_tensor, 10, 20)
assert ree.shape == (10,20)
assert ree.dtype == tf.float32
def test_log(x_tensor, y_tensor):
foo = n.resolve_log_node(tf.constant(1, shape=[10,20], dtype=tf.float32))
bar = n.resolve_log_node(tf.constant(math.e, shape=[10,20], dtype=tf.float32))
ree = n.resolve_log_node(x_tensor)
assert (run_tensor(foo) == run_tensor(tf.constant(0, shape=[10,20]))).all()
assert (run_tensor(bar) == run_tensor(tf.constant(1, shape=[10,20]))).all()
assert ree.shape == (10,20)
assert ree.dtype == tf.float32
def test_max(x_tensor, y_tensor):
foo = n.resolve_max_node(tf.constant(13, shape=[10,20]), tf.constant(13, shape=[10,20]))
bar = n.resolve_max_node(tf.constant(1, shape=[10,20]), tf.constant(0, shape=[10,20]))
aux1 = tf.convert_to_tensor([[1.0, 2.0], [4.0, 5.0]])
aux2 = tf.constant(3, shape=[2,2], dtype=tf.float32)
pan = n.resolve_max_node(aux1, aux2)
assert (run_tensor(foo) == run_tensor(tf.constant(13, shape=[10,20]))).all()
assert (run_tensor(bar) == run_tensor(tf.constant(1, shape=[10,20]))).all()
assert (run_tensor(pan) == run_tensor(tf.convert_to_tensor([[3,3],[4,5]]))).all()
ree = n.resolve_max_node(x_tensor, y_tensor)
assert ree.shape == (10,20)
assert ree.dtype == tf.float32
def test_mdist(x_tensor, y_tensor):
foo = n.resolve_mdist_node(tf.constant(0, shape=[10,20]), tf.constant(2, shape=[10,20]), 10, 20)
bar = n.resolve_mdist_node(tf.constant(1, shape=[10,20]), tf.constant(1, shape=[10,20]), 10, 20)
assert (run_tensor(foo) == run_tensor(tf.constant(1, shape=[10,20]))).all()
assert (run_tensor(bar) == run_tensor(tf.constant(1, shape=[10,20]))).all()
ree = n.resolve_mdist_node(x_tensor, y_tensor, 10, 20)
assert ree.shape == (10,20)
assert ree.dtype == tf.float32
def test_min(x_tensor, y_tensor):
foo = n.resolve_min_node(tf.constant(13, shape=[10,20]), tf.constant(13, shape=[10,20]))
bar = n.resolve_min_node(tf.constant(1, shape=[10,20]), tf.constant(0, shape=[10,20]))
aux1 = tf.convert_to_tensor([[1.0, 2.0], [4.0, 5.0]])
aux2 = tf.constant(3, shape=[2,2], dtype=tf.float32)
pan = n.resolve_min_node(aux1, aux2)
assert (run_tensor(foo) == run_tensor(tf.constant(13, shape=[10,20]))).all()
assert (run_tensor(bar) == run_tensor(tf.constant(0, shape=[10,20]))).all()
assert (run_tensor(pan) == run_tensor(tf.convert_to_tensor([[1,2],[3,3]]))).all()
ree = n.resolve_min_node(x_tensor, y_tensor)
assert ree.shape == (10,20)
assert ree.dtype == tf.float32
def test_mod(x_tensor, y_tensor):
foo = n.resolve_mod_node(tf.constant(4, shape=[10,20]), tf.constant(2, shape=[10,20]))
bar = n.resolve_mod_node(tf.constant(4, shape=[10,20]), tf.constant(3, shape=[10,20]))
assert (run_tensor(foo) == run_tensor(tf.constant(0, shape=[10,20]))).all()
assert (run_tensor(bar) == run_tensor(tf.constant(1, shape=[10,20]))).all()
ree = n.resolve_mod_node(x_tensor, y_tensor)
assert ree.shape == (10,20)
assert ree.dtype == tf.float32
def test_mult(x_tensor, y_tensor):
foo = n.resolve_mult_node(tf.constant(13, shape=[10,20], dtype=tf.float32), tf.constant(1.2, shape=[10,20]))
bar = n.resolve_mult_node(tf.constant(1, shape=[10,20]), tf.constant(0, shape=[10,20]))
pan = n.resolve_mult_node(tf.constant(3, shape=[10,20]), tf.constant(1, shape=[10,20]))
assert (run_tensor(foo) == run_tensor(tf.constant(15.6, shape=[10,20]))).all()
assert (run_tensor(bar) == run_tensor(tf.constant(0, shape=[10,20]))).all()
assert (run_tensor(pan) == run_tensor(tf.constant(3, shape=[10,20]))).all()
ree = n.resolve_mult_node(x_tensor, y_tensor)
assert ree.shape == (10,20)
assert ree.dtype == tf.float32
def test_neg(x_tensor, y_tensor):
foo = n.resolve_neg_node(tf.constant(1, shape=[10,20], dtype=tf.float32))
bar = n.resolve_neg_node(tf.constant(-1, shape=[10,20], dtype=tf.float32))
ree = n.resolve_neg_node(x_tensor)
assert (run_tensor(foo) == run_tensor(tf.constant(-1, shape=[10,20]))).all()
assert (run_tensor(bar) == run_tensor(tf.constant(1, shape=[10,20]))).all()
assert ree.shape == (10,20)
assert ree.dtype == tf.float32
def test_or(x_tensor, y_tensor):
foo = n.resolve_or_node(tf.constant(1, shape=[10,20]), tf.constant(1, shape=[10,20]))
bar = n.resolve_or_node(tf.constant(0, shape=[10,20]), tf.constant(1, shape=[10,20]))
tur = n.resolve_or_node(tf.constant(0, shape=[10,20]), tf.constant(0, shape=[10,20]))
pan = n.resolve_or_node(tf.constant(13, shape=[10,20]), tf.constant(13, shape=[10,20]))
ree = n.resolve_or_node(x_tensor, y_tensor)
assert (run_tensor(foo) == run_tensor(tf.constant(1, shape=[10,20]))).all()
assert (run_tensor(bar) == run_tensor(tf.constant(1, shape=[10,20]))).all()
assert (run_tensor(tur) == run_tensor(tf.constant(0, shape=[10,20]))).all()
assert (run_tensor(pan) == run_tensor(tf.constant(13, shape=[10,20]))).all()
assert ree.shape == (10,20)
assert ree.dtype == tf.float32
def test_pow(x_tensor, y_tensor):
foo = n.resolve_pow_node(tf.constant(0, tf.float32, shape=[10,20]), tf.constant(1, tf.float32, shape=[10,20]))
bar = n.resolve_pow_node(tf.constant(5, tf.float32, shape=[10,20]), tf.constant(0, tf.float32, shape=[10,20]))
pan = n.resolve_pow_node(tf.constant(3, tf.float32, shape=[10,20]), tf.constant(-1, tf.float32, shape=[10,20]))
tun = n.resolve_pow_node(tf.constant(3, tf.float32, shape=[10,20]), tf.constant(2, tf.float32, shape=[10,20]))
assert (run_tensor(foo) == run_tensor(tf.constant(0, shape=[10,20]))).all()
assert (run_tensor(bar) == run_tensor(tf.constant(1, shape=[10,20]))).all()
assert (run_tensor(pan) == run_tensor(tf.constant(3, shape=[10,20]))).all()
assert (run_tensor(tun) == run_tensor(tf.constant(9, shape=[10,20]))).all()
ree = n.resolve_pow_node(x_tensor, y_tensor)
assert ree.shape == (10,20)
assert ree.dtype == tf.float32
def test_sign(x_tensor, y_tensor):
foo = n.resolve_sign_node(tf.constant(3, shape=[10,20], dtype=tf.float32))
bar = n.resolve_sign_node(tf.constant(-4.3, shape=[10,20], dtype=tf.float32))
pan = n.resolve_sign_node(tf.constant(-0, shape=[10,20], dtype=tf.float32))
ree = n.resolve_sign_node(x_tensor)
assert (run_tensor(foo) == run_tensor(tf.constant(1, shape=[10,20]))).all()
assert (run_tensor(bar) == run_tensor(tf.constant(-1, shape=[10,20]))).all()
assert (run_tensor(pan) == run_tensor(tf.constant(0, shape=[10,20]))).all()
assert ree.shape == (10,20)
assert ree.dtype == tf.float32
def test_sin(x_tensor, y_tensor):
foo = n.resolve_sin_node(tf.constant(0, shape=[10,20], dtype=tf.float32))
bar = tf.cast(n.resolve_sin_node(tf.constant(math.pi, shape=[10,20], dtype=tf.float32)), tf.int32)
pan = tf.cast(n.resolve_sin_node(tf.constant(math.pi / 2, shape=[10,20], dtype=tf.float32)), tf.int32)
ree = n.resolve_sin_node(x_tensor)
assert (run_tensor(foo) == run_tensor(tf.constant(0, shape=[10,20]))).all()
assert (run_tensor(bar) == run_tensor(tf.constant(0, shape=[10,20]))).all()
assert (run_tensor(pan) == run_tensor(tf.constant(1, shape=[10,20]))).all()
assert ree.shape == (10,20)
assert ree.dtype == tf.float32
def test_sqrt(x_tensor, y_tensor):
foo = n.resolve_sqrt_node(tf.constant(0, tf.float32, shape=[10,20]), 10, 20)
pan = n.resolve_sqrt_node(tf.constant(-1, tf.float32, shape=[10,20]), 10, 20)
tun = n.resolve_sqrt_node(tf.constant(6.25, tf.float32, shape=[10,20]), 10, 20)
assert (run_tensor(foo) == run_tensor(tf.constant(0, shape=[10,20]))).all()
assert (run_tensor(pan) == run_tensor(tf.constant(0, shape=[10,20]))).all()
assert (run_tensor(tun) == run_tensor(tf.constant(2.5, shape=[10,20]))).all()
ree = n.resolve_sqrt_node(x_tensor, 10, 20)
assert ree.shape == (10,20)
assert ree.dtype == tf.float32
def test_sub(x_tensor, y_tensor):
foo = n.resolve_sub_node(tf.constant(13, shape=[10,20]), tf.constant(13, shape=[10,20]))
bar = n.resolve_sub_node(tf.constant(1, shape=[10,20]), tf.constant(0, shape=[10,20]))
pan = n.resolve_sub_node(tf.constant(-1, shape=[10,20]), tf.constant(-2, shape=[10,20]))
assert (run_tensor(foo) == run_tensor(tf.constant(0, shape=[10,20]))).all()
assert (run_tensor(bar) == run_tensor(tf.constant(1, shape=[10,20]))).all()
assert (run_tensor(pan) == run_tensor(tf.constant(1, shape=[10,20]))).all()
ree = n.resolve_sub_node(x_tensor, y_tensor)
assert ree.shape == (10,20)
assert ree.dtype == tf.float32
def test_tan(x_tensor, y_tensor):
bar = tf.cast(n.resolve_tan_node(tf.constant(0, shape=[10,20], dtype=tf.float32)), tf.int32)
pan = tf.cast(n.resolve_tan_node(tf.constant(math.pi / 4, shape=[10,20], dtype=tf.float32)), tf.int32)
ree = n.resolve_sin_node(x_tensor)
assert (run_tensor(bar) == run_tensor(tf.constant(0, shape=[10,20]))).all()
assert (run_tensor(pan) == run_tensor(tf.constant(1, shape=[10,20]))).all()
assert ree.shape == (10,20)
assert ree.dtype == tf.float32
def test_xor(x_tensor, y_tensor):
foo = n.resolve_xor_node(tf.constant(1, shape=[10,20]), tf.constant(1, shape=[10,20]))
bar = n.resolve_xor_node(tf.constant(0, shape=[10,20]), tf.constant(1, shape=[10,20]))
tur = n.resolve_xor_node(tf.constant(0, shape=[10,20]), tf.constant(0, shape=[10,20]))
pan = n.resolve_xor_node(tf.constant(1, shape=[10,20]), tf.constant(0, shape=[10,20]))
ree = n.resolve_xor_node(x_tensor, y_tensor)
assert (run_tensor(foo) == run_tensor(tf.constant(0, shape=[10,20]))).all()
assert (run_tensor(bar) == run_tensor(tf.constant(1, shape=[10,20]))).all()
assert (run_tensor(tur) == run_tensor(tf.constant(0, shape=[10,20]))).all()
assert (run_tensor(pan) == run_tensor(tf.constant(1, shape=[10,20]))).all()
assert ree.shape == (10,20)
assert ree.dtype == tf.float32
def run_tensor(tensor):
sess = tf.compat.v1.Session()
result = sess.run(tensor)
sess.close()
return result
def test_engine():
assert engine.engine(5, 5, 3, 0.2, 0.9, [10,10], 0)
if __name__ == '__main__':
x_size = 10
y_size = 20
x_size_tensor = tf.range(x_size)
x_size_tensor = tf.reshape(x_size_tensor, [-1,1])
x_size_tensor = tf.tile(x_size_tensor, [1, y_size])
x_tensor = tf.cast(x_size_tensor, tf.float32)
y_size_tensor = tf.range(y_size)
y_size_tensor = tf.reshape(y_size_tensor, [1,-1])
y_size_tensor = tf.tile(y_size_tensor, [x_size, 1])
y_tensor = tf.cast(y_size_tensor, tf.float32)
test_engine() |
import argparse
import os
import cv2
import sys
import random
import time
import _pickle as cPickle
import torch
import torch.nn.parallel
import torch.optim as optim
import torch.utils.data
from pointnet.seg_dataset_fus import PoseDataset
from pointnet.model_seg import FusionInstanceSeg
import torch.nn.functional as F
from tqdm import tqdm
import numpy as np
import tensorflow as tf
from lib.utils import load_depth, get_bbox
from lib.utils import setup_logger
sys.path.append(os.getcwd())
parser = argparse.ArgumentParser()
parser.add_argument('--dataset', type=str, default='CAMERA', help='CAMERA or CAMERA+Real')
parser.add_argument('--rotate_to_center', type=int, default=1, help='rotate points to center')
parser.add_argument('--data_dir', type=str, default='dataset', help='data directory')
parser.add_argument('--n_pts', type=int, default=4096, help='number of points')
parser.add_argument('--img_size', type=int, default=192, help='cropped image size')
parser.add_argument('--n_cat', type=int, default=6, help='number of object categories')
parser.add_argument('--batchSize', type=int, default=64, help='input batch size')
parser.add_argument('--gpu', type=str, default='0', help='GPU to use')
parser.add_argument('--workers', type=int, help='number of data loading workers', default=8)
parser.add_argument('--model', type=str, default='', help='model path')
parser.add_argument('--start_epoch', type=int, default=1, help='which epoch to start')
parser.add_argument('--nepoch', type=int, default=75, help='number of epochs to train for')
parser.add_argument('--result_dir', type=str, default='seg/Real/real', help='directory to save train results')
parser.add_argument('--val_result_dir', type=str, default='seg/Real/real', help='directory to save train results')
opt = parser.parse_args()
# opt.dataset = 'CAMERA'
# opt.start_epoch = 75
# opt.model = 'results/camerafus_ss15_sp1200_pc75_bs64/seg_model_74.pth'
# opt.result_dir = 'results/camerafus_ss15_sp1200_pc75_bs64'
opt.val_result_dir = 'results/eval_camera'
# dataset
train_dataset = PoseDataset(opt.dataset, 'train', opt.data_dir, opt.n_pts, opt.img_size, opt.rotate_to_center)
test_dataset = PoseDataset(opt.dataset, 'test', opt.data_dir, opt.n_pts, opt.img_size, opt.rotate_to_center)
print(len(train_dataset), len(test_dataset))
blue = lambda x: '\033[94m' + x + '\033[0m'
def train():
os.environ['CUDA_VISIBLE_DEVICES'] = opt.gpu
physical_devices = tf.config.experimental.list_physical_devices('GPU')
if len(physical_devices) > 0:
for k in range(len(physical_devices)):
tf.config.experimental.set_memory_growth(physical_devices[k], True)
print('memory growth:', tf.config.experimental.get_memory_growth(physical_devices[k]))
else:
print("Not enough GPU hardware devices available")
# set result directory
if not os.path.exists(opt.result_dir):
os.makedirs(opt.result_dir)
tb_writer = tf.summary.create_file_writer(opt.result_dir)
logger = setup_logger('train_log', os.path.join(opt.result_dir, 'log.txt'))
logger.propagate = 0
for key, value in vars(opt).items():
logger.info(key + ': ' + str(value))
classifier = FusionInstanceSeg(n_classes=opt.n_cat)
if opt.model != '':
classifier.load_state_dict(torch.load(opt.model))
# global classifier
classifier.cuda()
# create optimizer
if opt.start_epoch == 1:
optimizer = optim.Adam(classifier.parameters(), lr=0.001, betas=(0.9, 0.999))
scheduler = optim.lr_scheduler.StepLR(optimizer, step_size=15, gamma=0.5, last_epoch=-1)
else:
optimizer = optim.Adam([{'params':classifier.parameters(), 'initial_lr': 6.25e-5 }], lr=6.25e-5, betas=(0.9, 0.999))
scheduler = optim.lr_scheduler.StepLR(optimizer, step_size=15, gamma=0.5, last_epoch=opt.start_epoch-1)
# start training
st_time = time.time()
if opt.dataset == 'CAMERA+Real':
train_steps = 1200
val_size = 2000
else:
train_steps = 1200 #trian list:623180 val list:46671
val_size = 2000
global_step = train_steps * (opt.start_epoch - 1)
train_size = train_steps * opt.batchSize
indices = []
page_start = -train_size
for epoch in range(opt.start_epoch, opt.nepoch + 1):
logger.info('Time {0}'.format(time.strftime("%Hh %Mm %Ss", time.gmtime(time.time() - st_time)) + \
', ' + 'Epoch %02d' % epoch + ', ' + 'Training started'))
# sample train subset
page_start += train_size
len_last = len(indices) - page_start
if len_last < train_size:
indices = indices[page_start:]
if opt.dataset == 'CAMERA+Real':
# CAMERA : Real = 3 : 1
camera_len = train_dataset.subset_len[0]
real_len = train_dataset.subset_len[1]
real_indices = list(range(camera_len, camera_len+real_len))
camera_indices = list(range(camera_len))
n_repeat = (train_size - len_last) // (4 * real_len) + 1
data_list = random.sample(camera_indices, 3*n_repeat*real_len) + real_indices*n_repeat
random.shuffle(data_list)
indices += data_list
else:
data_list = list(range(train_dataset.length))
for i in range((train_size - len_last) // train_dataset.length + 1):
random.shuffle(data_list)
indices += data_list
page_start = 0
train_idx = indices[page_start:(page_start+train_size)]
train_sampler = torch.utils.data.sampler.SubsetRandomSampler(train_idx)
traindataloader = torch.utils.data.DataLoader(train_dataset, batch_size=opt.batchSize,
sampler=train_sampler, num_workers=opt.workers, pin_memory=True)
for i, data in enumerate(traindataloader, 1):
batch_data, batch_img, batch_label, batch_category, batch_choose_depth = data
batch_one_hot_vec = F.one_hot(batch_category, opt.n_cat)
batch_data = batch_data.transpose(2,1).float().cuda()
batch_img = batch_img.cuda()
batch_label = batch_label.float().cuda()
batch_one_hot_vec = batch_one_hot_vec.float().cuda()
batch_choose_depth = batch_choose_depth.cuda()
optimizer.zero_grad()
classifier = classifier.train()
logits = classifier(batch_data, batch_img, batch_one_hot_vec, batch_choose_depth)
# 3D Instance Segmentation PointNet Loss
logits = F.log_softmax(logits.view(-1,2),dim=1)
batch_label = batch_label.view(-1).long()
loss = F.nll_loss(logits, batch_label)
loss.backward()
optimizer.step()
logits_choice = logits.data.max(1)[1]
correct = logits_choice.eq(batch_label.data).cpu().sum()
global_step += 1
# write results to tensorboard
with tb_writer.as_default():
tf.summary.scalar('learning_rate', optimizer.param_groups[0]['lr'], step=global_step)
tf.summary.scalar('train_loss', loss.item(), step=global_step)
# tf.summary.scalar('train_acc', correct.item()/float(opt.batchSize * opt.n_pts), step=global_step)
tb_writer.flush()
if i % 10 == 0:
logger.info('epoch {0:<4d} Batch {1:<4d} Loss:{2:f}'.format(epoch, i, loss.item()))
# print('[%d: %d/%d] train loss: %f accuracy: %f' % (epoch, i, train_steps, loss.item(), correct.item()/float(opt.batchSize * opt.n_pts)))
scheduler.step()
logger.info('>>>>>>>>----------Epoch {:02d} train finish---------<<<<<<<<'.format(epoch))
# evaluate one epoch
logger.info('Time {0}'.format(time.strftime("%Hh %Mm %Ss", time.gmtime(time.time() - st_time)) +
', ' + 'Epoch %02d' % epoch + ', ' + 'Testing started'))
val_loss = 0.0
total_count = np.zeros((opt.n_cat,), dtype=int)
val_acc = np.zeros((opt.n_cat,), dtype=float)
# sample validation subset
# val_size = 200
val_batch_size = 1
val_idx = random.sample(list(range(test_dataset.length)), val_size)
val_sampler = torch.utils.data.sampler.SubsetRandomSampler(val_idx)
val_dataloader = torch.utils.data.DataLoader(test_dataset, batch_size=val_batch_size, sampler=val_sampler,
num_workers=opt.workers, pin_memory=True)
classifier = classifier.eval()
for i, data in enumerate(val_dataloader, 1):
batch_data, batch_img, batch_label, batch_category, batch_choose_depth = data
batch_one_hot_vec = F.one_hot(batch_category, opt.n_cat)
batch_data = batch_data.transpose(2,1).float().cuda()
batch_img = batch_img.cuda()
batch_label = batch_label.float().cuda()
batch_one_hot_vec = batch_one_hot_vec.float().cuda()
batch_choose_depth = batch_choose_depth.cuda()
logits = classifier(batch_data, batch_img, batch_one_hot_vec, batch_choose_depth)
logits = F.log_softmax(logits.view(-1,2),dim=1)
batch_label = batch_label.view(-1).long()
loss = F.nll_loss(logits, batch_label)
# use choose_depth to remove repeated points
choose_depth = batch_choose_depth.cpu().numpy()[0]
_, choose_depth = np.unique(choose_depth, return_index=True)
logits_choice = logits.data.max(1)[1][choose_depth]
correct = logits_choice.eq(batch_label.data[choose_depth]).cpu().sum()
acc = correct / len(logits_choice)
cat_id = batch_category.item()
val_acc[cat_id] += acc
total_count[cat_id] += 1
val_loss += loss.item()
if i % 100 == 0:
logger.info('epoch {0:<4d} Batch {1:<4d} Loss:{2}'.format(epoch, i, loss.item()))
# print('[%d: %d/%d] %s loss: %f accuracy: %f' % (epoch, i, train_steps, blue('test'), loss.item(), correct.item()/float(val_batch_size * opt.n_pts)))
# compute accuracy
val_acc = 100 * (val_acc / total_count)
val_loss = val_loss / val_size
for i in range(opt.n_cat):
logger.info('{:>8s} acc: {}'.format(test_dataset.cat_names[i], val_acc[i]))
val_acc = np.mean(val_acc)
with tb_writer.as_default():
tf.summary.scalar('val_loss', val_loss, step=global_step)
tf.summary.scalar('val_acc', val_acc, step=global_step)
tb_writer.flush()
logger.info('Epoch {0:02d} test average loss: {1:06f}'.format(epoch, val_loss))
logger.info('Overall acc: {}'.format(val_acc))
logger.info('>>>>>>>>----------Epoch {:02d} test finish---------<<<<<<<<'.format(epoch))
torch.save(classifier.state_dict(), '%s/seg_model_%d.pth' % (opt.result_dir, epoch))
def test():
global classifier
classifier.cuda()
## benchmark mIOU
if not os.path.exists(opt.val_result_dir):
os.makedirs(opt.val_result_dir)
bottle_ious, bowl_ious, camera_ious, can_ious, laptop_ious, mug_ious = \
[], [], [], [], [], []
bottle_num, bowl_num, camera_num, can_num, laptop_num, mug_num = 0, 0, 0, 0, 0, 0
testdataloader = torch.utils.data.DataLoader(test_dataset, batch_size=opt.batchSize,
shuffle=True, num_workers=int(opt.workers))
classifier = classifier.eval()
for i,data in tqdm(enumerate(testdataloader, 1)):
batch_data, batch_label, batch_category, batch_choose_depth = data
batch_one_hot_vec = F.one_hot(batch_category, opt.n_cat)
batch_data = batch_data.transpose(2,1).float().cuda()
batch_label = batch_label.float().cuda()
batch_one_hot_vec = batch_one_hot_vec.float().cuda()
logits = classifier(batch_data, batch_one_hot_vec)
logits_choice = logits.data.max(2)[1]
choose_depth = batch_choose_depth.numpy()
logits_np = logits_choice.cpu().data.numpy()
batch_label_np = batch_label.cpu().data.numpy()
batch_category_np = batch_category.data.numpy()
for j in range(batch_data.shape[0]):
_, choose_depth_tt = np.unique(choose_depth[j], return_index=True)
# assert opt.n_pts == choose_depth_tt.shape[0]
# choose_depth[j] = choose_depth_tt
logits_np_tt = logits_np[j][choose_depth_tt]
batch_label_np_tt = batch_label_np[j][choose_depth_tt]
I = np.sum(np.logical_and(logits_np_tt == 1, batch_label_np_tt == 1))
U = np.sum(np.logical_or(logits_np_tt == 1, batch_label_np_tt == 1))
if U == 0:
iou = 1 #If the union of groundtruth and prediction points is empty, then count part IoU as 1
else:
iou = I / float(U)
cat = batch_category_np[j]
if cat == 0:
bottle_ious.append(iou)
bottle_num += 1
elif cat == 1:
bowl_ious.append(iou)
bowl_num += 1
elif cat == 2:
camera_ious.append(iou)
camera_num += 1
elif cat == 3:
can_ious.append(iou)
can_num += 1
elif cat == 4:
laptop_ious.append(iou)
laptop_num += 1
elif cat == 5:
mug_ious.append(iou)
mug_num += 1
#save results
fw = open('{0}/eval_logs.txt'.format(opt.val_result_dir), 'a')
messages = []
messages.append("mIOU for {}bottle : {}".format(bottle_num, np.mean(bottle_ious)))
messages.append("mIOU for {}bowl : {}".format(bowl_num, np.mean(bowl_ious)))
messages.append("mIOU for {}camera : {}".format(camera_num, np.mean(camera_ious)))
messages.append("mIOU for {}can : {}".format(can_num, np.mean(can_ious)))
messages.append("mIOU for {}laptop : {}".format(laptop_num, np.mean(laptop_ious)))
messages.append("mIOU for {}mug : {}".format(mug_num, np.mean(mug_ious)))
messages.append("mIOU : {}".format(np.mean([np.mean(bottle_ious),np.mean(bowl_ious),\
np.mean(camera_ious),np.mean(can_ious),np.mean(laptop_ious),np.mean(mug_ious)])))
for msg in messages:
print(msg)
fw.write(msg + '\n')
fw.close()
if __name__ == '__main__':
train()
# test() |
import logging
import numpy as np
import config.sr_network_conf as base_config
from core_network.Network import *
__author__ = 'ptoth'
_LOGGER = logging.getLogger(__name__)
if __name__ == "__main__":
logging.basicConfig(level=logging.INFO)
# Load up the training data
_LOGGER.info('Loading training data')
input_file = '../data_prepared/bach_goldberg_aria_10'
# X_train is a tensor of size (num_train_examples, num_timesteps, num_frequency_dims)
X_train_freq = np.load(input_file + '_x.npy')
# y_train is a tensor of size (num_train_examples, num_timesteps, num_frequency_dims)
y_train_freq = np.load(input_file + '_y.npy')
# X_mean is a matrix of size (num_frequency_dims,) containing the mean for each frequency dimension
X_mean_freq = np.load(input_file + '_mean.npy')
# X_var is a matrix of size (num_frequency_dims,) containing the variance for each frequency dimension
X_var_freq = np.load(input_file + '_var.npy')
_LOGGER.info('Finished loading training data')
config = base_config.get_config()
# Tell TensorFlow that the model will be built into the default Graph.
with tf.Graph().as_default():
network = SRNetwork(config['network'])
# merge all summaries
summary_op = tf.merge_all_summaries()
# Add an op to initialize the variables.
init_op = tf.initialize_all_variables()
# launching the model
with tf.Session() as sess:
# Run the init operation.
sess.run(init_op)
summary_writer = tf.train.SummaryWriter('summary', graph_def=sess.graph_def)
# Use the model
for j in range(1):
# for idx, sample in enumerate(X_train_freq):
network.run(sess, X_train_freq, y_train_freq, summary_op, summary_writer)
|
import requests
from bs4 import BeautifulSoup
def has_usage(info):
try:
return all([(span.name == "span" and span.has_attr("title")) or str(type(span)) == "<class 'bs4.element.NavigableString'>"
for span in info])
except:
return False
def getFromVerben(word):
try:
url = f"https://www.verbformen.de/?w={word}"
r = requests.get(url)
soup = BeautifulSoup(r.content, "html.parser")
info = {
"meaning": "",
"usage": "",
"declension": "",
"eng": "",
}
if ("Es wurden keine deutschen Wörter mit" in soup.find_all("i")[0].text):
print("kein Wort wie", word, "in Verben.de")
return
totalinfo = soup.find_all("section", {"class": "rBox rBoxWht"})[0]
info["info"] = totalinfo.find(
"p", {"class": "rInf"}).text.replace("\n", " ").strip()
info["word"] = totalinfo.find(
"p", {"class": ["vGrnd", "rCntr"]}).text.replace("\n", " ").strip()
info["declension"] = totalinfo.find(
"p", {"class": "vStm rCntr"}).text.replace("\n", " ").strip()
others = totalinfo.find(
"div", {"class": "rAufZu"}).find_all("p")
if others:
for info_ in others:
if (info_.find("span", {"lang": "en"})):
info["eng"] = info_.find(
"span", {"lang": "en"}).text.replace("\n", " ").strip()
elif (len(info_.find_all()) == 1 and info_.find_all()[0].name == "i"):
info["meaning"] = info_.find("i").text.split(";")
elif (has_usage(info_)):
info["usage"] = info_.text.strip()
# print(info)
return info
except:
print(
f"some error occured while scraping from https://www.verbformen.de/?w={word}")
return {}
|
from assignment_5_wang_custom_knn_class import Custom_knn
import pandas as pd
import numpy as np
from sklearn.neighbors import KNeighborsClassifier
from sklearn.model_selection import train_test_split
from sklearn.metrics import confusion_matrix
from sklearn.preprocessing import StandardScaler
import matplotlib.pyplot as plt
from assignment_5_wang_utils import print_confusion_matrix, transform_trading_days_to_trading_weeks, make_trade, trading_strategy
def main():
ticker='WMT'
file_name = '{}_weekly_return_volatility.csv'.format(ticker)
file_name_self_labels = 'WMT_Labeled_Weeks_Self.csv'
# Read from that file for answering our questions
df = pd.read_csv(file_name, encoding='ISO-8859-1')
df_2018 = df[df['Year'] == 2018]
df_2019 = df[df['Year'] == 2019]
scaler = StandardScaler()
print('\nQuestion 1')
X_2018 = df_2018[['mean_return', 'volatility']].values
Y_2018 = df_2018[['Classification']].values
X_2019 = df_2019[['mean_return', 'volatility']].values
Y_2019 = df_2019[['Classification']].values
# Need to scale the training data
X_2018_Scaled = scaler.fit_transform(X_2018)
X_2019_Scaled = scaler.fit_transform(X_2019)
error_rate_custom = {}
error_rate = {}
# The highest accuracy from our knn classifiers was k = 5
for p in [1, 1.5, 2]:
X_train, X_test, Y_train, Y_test = train_test_split(X_2018_Scaled, Y_2018, test_size=0.6, random_state=3)
# Custom Classifier
knn_custom_classifier = Custom_knn(number_neighbors_k=5, distance_parameter_p=p)
knn_custom_classifier.fit(X_train, Y_train.ravel())
prediction_custom = knn_custom_classifier.predict(X_test)
# As a percentage
error_rate_custom[p] = np.round(np.multiply(np.mean(prediction_custom != Y_test.T), 100), 2)
# This is to validate that we are getting the same error rate across the KNN classifier as well
# KNN Classifier
knn_classifier = KNeighborsClassifier(n_neighbors=5, p=p)
knn_classifier.fit(X_train, Y_train.ravel())
prediction = knn_classifier.predict(X_test)
# As a percentage
error_rate[p] = np.round(np.multiply(np.mean(prediction != Y_test.T), 100), 2)
print("Confirm that the error rate for both the custom and scipy classifiers are the same: {}".format(str(error_rate == error_rate_custom)))
print("The error rate of the different p's are {}".format(error_rate_custom))
plt.plot(np.fromiter(error_rate_custom.keys(), dtype=float), np.subtract(100, np.fromiter(error_rate_custom.values(), dtype=float)))
plt.title('P value vs Accuracy - Training 2018, Testing 2018')
plt.xlabel('P value')
plt.ylabel('Accuracy (%)')
plt.savefig(fname='KNN_Classifiers_Q1')
plt.show()
plt.close()
print('The P value of 2 gives the best accuracy of {}%'.format(float(100-error_rate_custom[2])))
print('\nQuestion 2')
print('I am repeating this with year 2 and using year 1 data to train.')
error_rate_custom = {}
for p in [1, 1.5, 2]:
# Train on 2018 data
knn_custom_classifier = Custom_knn(number_neighbors_k=5, distance_parameter_p=p)
knn_custom_classifier.fit(X_2018_Scaled, Y_2018.ravel())
prediction_custom = knn_custom_classifier.predict(X_2019_Scaled)
np.set_printoptions(threshold=np.inf)
# As a percentage
error_rate_custom[p] = np.round(np.multiply(np.mean(prediction_custom != Y_2019.T), 100), 2)
print("The error rate of the different p's are {}".format(error_rate_custom))
print('The P value of 1 and 2 give the best accuracy of {}%'.format(float(100-error_rate_custom[2])))
plt.plot(np.fromiter(error_rate_custom.keys(), dtype=float), np.subtract(100, np.fromiter(error_rate_custom.values(), dtype=float)))
plt.title('P value vs Accuracy - Training 2018, Testing 2019')
plt.xlabel('P value')
plt.ylabel('Accuracy (%)')
plt.savefig(fname='KNN_Classifiers_Q2')
plt.show()
plt.close()
print('Using 2018 data to test 2019 showed slightly higher accuracy. Changing the distance metric between Manhattan and Euclidean did ')
print('not seem to make a difference in clustering label selection. Minkowski distance showed a slightly lower accuracy.')
print('\nQuestion 3')
# Train on 2018 data
knn_custom_classifier = Custom_knn(number_neighbors_k=5, distance_parameter_p=1.5)
knn_custom_classifier.fit(X_2018_Scaled, Y_2018.ravel())
prediction_custom = knn_custom_classifier.predict(X_2019_Scaled)
print('Labels for 2019')
print(prediction_custom)
# Pick two points with different labels in 2019
# Week 11 is GREEN and Week 1 is RED
print('Label for Week 11 is Green')
print('The graph presented shows a majority of green local points')
knn_custom_classifier.draw_decision_boundary(X_2019_Scaled[10])
print('Label for Week 1 is Red')
print('The graph presented shows a majority of red local points')
knn_custom_classifier.draw_decision_boundary(X_2019_Scaled[0])
print('\nQuestion 4 and Question 5')
print('2019 is predicted with 2018 trained data.')
for p in [1, 1.5, 2]:
# Train on 2018 data
knn_custom_classifier = Custom_knn(number_neighbors_k=5, distance_parameter_p=p)
knn_custom_classifier.fit(X_2018_Scaled, Y_2018.ravel())
prediction_custom = knn_custom_classifier.predict(X_2019_Scaled)
confusion_matrix_array = confusion_matrix(Y_2019, prediction_custom)
confusion_matrix_df = pd.DataFrame(confusion_matrix_array, columns= ['Predicted: GREEN', 'Predicted: RED'], index=['Actual: GREEN', 'Actual: RED'])
print('Confusion matrix for p = {}'.format(p))
print(confusion_matrix_df)
print_confusion_matrix(Y_2019, confusion_matrix_df)
print('For question 5, there are significant differences in true positives vs. true negatives. Predicted GREEN ')
print('and actual GREEN values show almost no accuracy, which indicates that this method is not particularly good at predicting making trades.')
print('It does however, show better accuracy for weeks to not trade. The different methods don\'t show significantly different accuracy, ')
print('and the true positive rate remains low regardless of distance calculation.')
print('\nQuestion 6')
# Import the CSV necessary for 2019 data
df = pd.read_csv(file_name_self_labels, encoding='ISO-8859-1')
df_trading_weeks = transform_trading_days_to_trading_weeks(df)
trading_weeks_2019 = df_trading_weeks[df_trading_weeks['Year'] == '2019']
trading_weeks_2019.reset_index(inplace=True)
buy_and_hold = np.full(len(trading_weeks_2019.index), 'GREEN')
for p in [1, 1.5, 2]:
# Train on 2018 data
knn_custom_classifier = Custom_knn(number_neighbors_k=5, distance_parameter_p=p)
knn_custom_classifier.fit(X_2018, Y_2018.ravel())
prediction_custom = knn_custom_classifier.predict(X_2019)
# Add columns for each of the different clustering methods
trading_weeks_2019.insert(len(trading_weeks_2019.columns), "Predicted Labels {}".format(p), prediction_custom, allow_duplicates=True)
trading_weeks_2019.insert(len(trading_weeks_2019.columns), "Buy and Hold", buy_and_hold, allow_duplicates=True)
print('Trading Strategy for 2019 for $100 starting cash:')
print('Trading strategy was based on the one created in Assignment 3')
print('With p = 1')
predicted_trading_df = trading_strategy(trading_weeks_2019, 'Predicted Labels 1')
print('${}'.format(predicted_trading_df[['Balance']].iloc[-1].values[0]))
print('With p = 1.5')
predicted_trading_df = trading_strategy(trading_weeks_2019, 'Predicted Labels 1.5')
print('${}'.format(predicted_trading_df[['Balance']].iloc[-1].values[0]))
print('With p = 2')
predicted_trading_df = trading_strategy(trading_weeks_2019, 'Predicted Labels 2')
print('${}'.format(predicted_trading_df[['Balance']].iloc[-1].values[0]))
print('Buy and Hold')
predicted_trading_buy_and_hold = trading_strategy(trading_weeks_2019, "Buy and Hold")
print('${}'.format(predicted_trading_buy_and_hold[['Balance']].iloc[-1].values[0]))
print('The best trading strategy is still buy and hold.')
if __name__ == "__main__":
main() |
import datetime
day1= (2014,7,2)
day2= (2014,7,11)
(y1,m1,d1)= day1
day1=datetime.datetime(y1,m1,d1)
(y2,m2,d2)= day2
day2=datetime.datetime(y2,m2,d2)
print(int(day2.strftime('%j'))-int(day1.strftime('%j')))
|
'''
pipe_event
==========
This module provides a Event class which behaves just like threading.Event
but is based on two pipes created using os.pipe() functions.
Before Python 3.3, monotonic time is not introduced so adjusting system
clock may affect Event.wait() function if specific timeout is set.
Following notes can be found in PEP 0418:
"If a program uses the system time to schedule events or to implement
a timeout, it may fail to run events at the right moment or stop the
timeout too early or too late when the system time is changed manually
or adjusted automatically by NTP."
This module demonstrates an alternative Event implementation on Unix-like
systems which is not affected by the above issue.
'''
import os
import fcntl
import select
import threading
class Event:
def __init__(self):
r_fd, w_fd = os.pipe() # create the pipes
# set read() to non-blocking
fl = fcntl.fcntl(r_fd, fcntl.F_GETFL)
fcntl.fcntl(r_fd, fcntl.F_SETFL, fl | os.O_NONBLOCK)
# create file objects
self.r_pipe = os.fdopen(r_fd, 'rb', 0)
self.w_pipe = os.fdopen(w_fd, 'wb', 0)
self.lock = threading.Lock() # create a lock to guard the pipes
def __del__(self):
self.r_pipe.close()
self.w_pipe.close()
def is_set(self):
return self.wait(0) # just poll the pipe
def isSet(self):
return self.is_set()
def set(self):
self.lock.acquire()
try:
if not self.is_set():
self.w_pipe.write(b'\n')
except:
self.lock.release()
raise
self.lock.release()
def clear(self):
self.lock.acquire()
try:
self.r_pipe.read()
except:
pass
self.lock.release()
def wait(self, timeout=None):
ret = select.select([self.r_pipe], [], [], timeout)[0]
return len(ret) > 0
|
##
# this file is used to operate some command in server
#
# __author__: chuxiaokai
# data: 2016/3/28
import os
from app.models import *
"""
some operation on server
"""
class Server(object):
ip = "127.0.0.1" # default ip
# hash_id = 0
def __init__(self):
"""
get server ip, initial the server
:return:
"""
# get the server ip
return_info = (os.popen('ifconfig|(grep "net addr" & grep "255.255.255.0")')).readlines()
if len(return_info) == 1:
self.ip = (return_info[0].split('net addr:')[1]).split(' ')[0]
else:
print('Failed find the server ip')
def init_machine(self, image_id):
"""
init a docker container
:return: container_id, passwd='123456'
"""
os.system("docker run -it -d=true '%s' /bin/bash" % image_id) # create a machine
container_id = (os.popen('docker ps -l -q')).readlines()[0].split('\n')[0] # get the container's id
container_ip = (os.popen('docker inspect --format="{{.NetworkSettings.IPAddress}}" %s' % container_id)).readlines()[0]
os.system('docker exec %s service sshd start' % container_id) # start the ssh service
return container_id, "123456", container_ip
def kill_machine(self, container_id):
"""
stop a docker container and delete it
"""
os.system("docker kill %s" % container_id)
os.system("docker rm %s" % container_id)
return True
def exec_shell(self, shell_path, param, state):
"""
:param shell_path: the path of the shell
:param param: param: a list or a single string
:return:
"""
if state == 'cluster':
shell = "bash "+shell_path
for i in range(len(param)):
shell = shell + ' ' + param[i]
print(shell)
if os.system(shell) == 0:
return True
else:
return False
else:
shell = "bash "+shell_path+' '+param
if os.system(shell) == 0:
return True
else:
return False
def get_machine_state(self, container_id):
"""
:param container_id:
:return: the load of a mc
"""
if os.path.exists('shell/report.sh)'):
print("shell/report.sh is not found")
return False
else:
get_ret = (os.popen('bash app/shell/report.sh "%s"' % container_id)).readlines()
ret_info = {'cpu info': get_ret[0], 'disk info': get_ret[1], 'memory info': get_ret[2], 'IDLE info': get_ret[3]}
print(ret_info)
return ret_info
def install_software(self, user_name, shell_path, src_name, map, num_node):
"""
install a software named src_name
:param user_name:
:param shell_path:
:param src_name:
:param map:
:param num_node:
:return:
"""
if map == 'cluster':
containers = []
container_ips = []
for i in range(num_node):
container_id, passwd, container_ip = self.init_machine('666cb2f7a158')
containers.append(container_id)
container_ips.append(container_ip)
# self.exec_shell(shell_path, containers, state='cluster')
# write in the db
# write table vm_machine
for i in range(num_node):
new_mc = VM_machine(mc_id=containers[i], user=user_name, apply_info=str(user_name)+'_'+str(src_name), state='ON')
db.session.add(new_mc)
# write table user
user = db.session.query(User).filter(User.user==user_name).first()
source_info = user.source_info
source_info = source_info + str(src_name) + ': ' + str(num_node) + 'nodes-> '+container_ips[0]+'(mgmd), ' + ', '.join(container_ips[1:]) + ';'
db.session.query(User).filter(User.user==user_name).update({User.source_info: source_info})
db.session.commit()
else:
container_id, passwd, container_ip = self.init_machine('ff416b30c157')
# self.exec_shell(shell_path, container_id, state='single')
string = user_name + '_' + src_name
new_mc = VM_machine(mc_id=container_id, user=user_name, apply_info=string, state='ON')
db.session.add(new_mc)
user = db.session.query(User).filter(User.user==user_name).first()
source_info = user.source_info
source_info = source_info + str(src_name) + '-> ' + container_ip + ';'
db.session.query(User).filter(User.user==user_name).update({User.source_info:source_info})
db.session.commit()
|
from skmultiflow.trees import HAT, RegressionHAT
from decai.simulation.contract.classification.scikit_classifier import SciKitClassifierModule
class DecisionTreeModule(SciKitClassifierModule):
def __init__(self, regression=False):
if regression:
model_initializer = lambda: RegressionHAT(
# leaf_prediction='mc'
)
else:
model_initializer = lambda: HAT(
# leaf_prediction='mc',
# nominal_attributes=[ 4],
)
super().__init__(_model_initializer=model_initializer)
|
import json
import requests
api_key = input("Your API key again: ")
films = []
i = 0
while len(films) < 1000:
response = requests.get("https://api.themoviedb.org/3/movie/{id_f}?api_key={api_key}&language=en".format(api_key=api_key, id_f=i))
if "status_code" not in response.text:
films.append(json.loads(response.text))
i += 1
f = open("films.json", "w")
json.dump(films, f)
f.close()
|
class Car:
def __init__(self, objectId, licenseNumber, make, model):
self._id = objectId
self._license = licenseNumber
self._make = make
self._model = model
@property
def id(self):
return self._id
@property
def license(self):
return self._license
@property
def make(self):
return self._make
@property
def model(self):
return self._model
def __eq__(self, z):
if isinstance(z, Car) == False:
return False
return self.id == z.id
def __str__(self):
return "Id: " + str(self.id) + ", License: " + self.license + ", Car type: " + self.make + ", " + self.model
def __repr__(self):
return str(self) |
from django.contrib.auth.mixins import LoginRequiredMixin, PermissionRequiredMixin
from django.shortcuts import get_object_or_404, redirect
from django.urls import reverse
from django.views.generic import CreateView, DetailView, UpdateView, DeleteView
from webapp.models import Goal, Project
from webapp.forms import GoalForm
class GoalView(LoginRequiredMixin, DetailView):
template_name = 'goal/goal_view.html'
model = Goal
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
return context
class GoalCreateView(PermissionRequiredMixin, CreateView):
model = Goal
form_class = GoalForm
template_name = 'goal/goal_create.html'
permission_required = 'webapp.add_goal'
def has_permission(self):
self.project = get_object_or_404(Project, pk=self.kwargs.get('pk'))
return super().has_permission() and self.request.user in self.project.user.all()
def dispatch(self, request, *args, **kwargs):
self.project = get_object_or_404(Project, pk=self.kwargs.get('pk'))
return super().dispatch(request, *args, **kwargs)
def get_context_data(self, **kwargs):
kwargs['project'] = self.project
kwargs['user'] = self.request.user
return super().get_context_data(**kwargs)
def form_valid(self, form):
form.instance.project = self.project
return super().form_valid(form)
def get_success_url(self):
return reverse('webapp:project_view', kwargs={'pk': self.object.project.pk})
# def form_valid(self, form):
# project = get_object_or_404(Project, pk=self.kwargs.get('pk'))
# goal = form.save(commit=False)
# goal.project = project
# goal.save()
# form.save_m2m()
# return redirect('webapp:project_view', pk=project.pk)
class GoalUpdateView(PermissionRequiredMixin, UpdateView):
model = Goal
template_name = 'goal/goal_update.html'
form_class = GoalForm
permission_required = 'webapp.change_goal'
def has_permission(self):
goal = self.get_object()
return super().has_permission() and self.request.user in goal.project.user.all()
def get_success_url(self):
return reverse('webapp:project_view', kwargs={'pk': self.object.project.pk})
class GoalDeleteView(PermissionRequiredMixin, DeleteView):
model = Goal
template_name = 'goal/goal_delete.html'
permission_required = 'webapp.delete_goal'
def has_permission(self):
goal = self.get_object()
return super().has_permission() and self.request.user in goal.project.user.all()
def get(self, request, *args, **kwargs):
return self.delete(request, *args, **kwargs)
def get_success_url(self):
return reverse('webapp:project_view', kwargs={'pk': self.object.project.pk})
|
from django.shortcuts import render
from pmpportal.models import Registration
def mentors(request):
allmentors = Registration.objects.all()
context = {'mentors': allmentors}
return render(request, 'mentorcards.html', context)
|
import numpy as np
import copy
from sklearn.preprocessing import LabelBinarizer
from sklearn.metrics import accuracy_score
class OneVsOneClassifier:
def __init__(self, estimator = None):
self.base_estimator_ = estimator
self.binary = True
def init_params(self, X, Y):
if len(np.unique(Y)) > 2:
self.binary = False
self.lb = LabelBinarizer(sparse_output=False).fit(Y)
self.Y_ = self.lb.transform(Y)
def fit(self, X, Y):
self.init_params(X, Y)
if self.binary:
return self.base_estimator_.fit(X, Y)
self.base_model_list = [copy.deepcopy(self.base_estimator_.fit(X, Y_class)) for Y_class in self.Y_.T]
return self
def predict_proba(self, X):
Y_pred_proba = np.array(np.hstack([model.predict_proba(X)[:, -1].reshape(-1,1) for model in self.base_model_list]))
row_sum = Y_pred_proba.sum(axis = 1).reshape(-1,1)
return Y_pred_proba/row_sum
def predict(self, X):
Y_pred_proba = self.predict_proba(X)
return Y_pred_proba.argmax(axis = 1)
def score(self, X, Y):
Y_pred = self.predict(X)
return accuracy_score(Y, Y_pred) |
from funcoes import maior
from funcoes import somaLista
from funcoes import mediaLista
from funcoes import valoresIguais
from funcoes import primeiroIgual
a = 5
b = 5
lista = [3, 4, 6, 7]
lista2 = [7, 12, 'dsa', 43, 6]
print(maior.maior(a, b))
print(somaLista.somaLista(lista,5))
print(mediaLista.mediaLista(lista))
print(valoresIguais.valoresIguais(lista, lista2))
print(primeiroIgual.primeiroIgual(lista, lista2))
|
"""
Classes from the 'SwiftUI' framework.
"""
try:
from rubicon.objc import ObjCClass
except ValueError:
def ObjCClass(name):
return None
def _Class(name):
try:
return ObjCClass(name)
except NameError:
return None
_TtC7SwiftUIP33_D03BD89F5A2D484C8BA01348D5E2C30219AllFinishedListener = _Class(
"_TtC7SwiftUIP33_D03BD89F5A2D484C8BA01348D5E2C30219AllFinishedListener"
)
_TtC7SwiftUIP33_D03BD89F5A2D484C8BA01348D5E2C30218FunctionalListener = _Class(
"_TtC7SwiftUIP33_D03BD89F5A2D484C8BA01348D5E2C30218FunctionalListener"
)
_TtC7SwiftUIP33_D03BD89F5A2D484C8BA01348D5E2C30212ListenerPair = _Class(
"_TtC7SwiftUIP33_D03BD89F5A2D484C8BA01348D5E2C30212ListenerPair"
)
_TtC7SwiftUIP33_22A1D162CC670E67558243600080F90E11AnyStyleBox = _Class(
"_TtC7SwiftUIP33_22A1D162CC670E67558243600080F90E11AnyStyleBox"
)
_TtC7SwiftUIP33_E022700F2A5A9659A9FD9265A140252A13TextSizeCache = _Class(
"_TtC7SwiftUIP33_E022700F2A5A9659A9FD9265A140252A13TextSizeCache"
)
SceneBridge = _Class("SwiftUI.SceneBridge")
AnyWindowStyleStorageBase = _Class("SwiftUI.AnyWindowStyleStorageBase")
EmptyViewRendererHost = _Class("SwiftUI.EmptyViewRendererHost")
_TtC7SwiftUIP33_75C503F9FA0DAB6927D8027C1FEBACD211AnyStyleBox = _Class(
"_TtC7SwiftUIP33_75C503F9FA0DAB6927D8027C1FEBACD211AnyStyleBox"
)
_TtC7SwiftUIP33_2462DFFC835A6F4511AFEB231EB4B8C219__DictionaryDecoder = _Class(
"_TtC7SwiftUIP33_2462DFFC835A6F4511AFEB231EB4B8C219__DictionaryDecoder"
)
DictionaryDecoder = _Class("SwiftUI.DictionaryDecoder")
_TtC7SwiftUIP33_2462DFFC835A6F4511AFEB231EB4B8C219__DictionaryEncoder = _Class(
"_TtC7SwiftUIP33_2462DFFC835A6F4511AFEB231EB4B8C219__DictionaryEncoder"
)
_TtC7SwiftUIP33_2462DFFC835A6F4511AFEB231EB4B8C230__DictionaryReferencingEncoder = _Class(
"_TtC7SwiftUIP33_2462DFFC835A6F4511AFEB231EB4B8C230__DictionaryReferencingEncoder"
)
DictionaryEncoder = _Class("SwiftUI.DictionaryEncoder")
ScrollViewNode = _Class("SwiftUI.ScrollViewNode")
_TtCV7SwiftUI11DisplayListP33_1764B38507156E75394CBD4355B4CB6414ViewRasterizer = _Class(
"_TtCV7SwiftUI11DisplayListP33_1764B38507156E75394CBD4355B4CB6414ViewRasterizer"
)
AnyFallbackDelegateBox = _Class("SwiftUI.AnyFallbackDelegateBox")
SceneStorageTransformBox = _Class("SwiftUI.SceneStorageTransformBox")
_TtCC7SwiftUI18SceneStorageValuesP33_1700ED20D4EA891B02973E899ABDB4258AnyEntry = _Class(
"_TtCC7SwiftUI18SceneStorageValuesP33_1700ED20D4EA891B02973E899ABDB4258AnyEntry"
)
SceneStorageValues = _Class("SwiftUI.SceneStorageValues")
AnyWindowToolbarStyleStorageBase = _Class("SwiftUI.AnyWindowToolbarStyleStorageBase")
MainMenuItemHost = _Class("SwiftUI.MainMenuItemHost")
AnyResolvedPaint = _Class("SwiftUI.AnyResolvedPaint")
_TtC7SwiftUIP33_936A47782A7E2FBE97D58CDBAEB0277015ProgressWrapper = _Class(
"_TtC7SwiftUIP33_936A47782A7E2FBE97D58CDBAEB0277015ProgressWrapper"
)
_TtCV7SwiftUI11DisplayList16GraphicsRenderer = _Class(
"_TtCV7SwiftUI11DisplayList16GraphicsRenderer"
)
RBGraphicsContext = _Class("SwiftUI.RBGraphicsContext")
_TtC7SwiftUIP33_BE44ACA3C2CA04FDF50C9B05CC2C047625AnyOptionButtonCollection = _Class(
"_TtC7SwiftUIP33_BE44ACA3C2CA04FDF50C9B05CC2C047625AnyOptionButtonCollection"
)
MemoizedGraphicsDrawingCallback = _Class("SwiftUI.MemoizedGraphicsDrawingCallback")
_TtCC7SwiftUI17FileArchiveReaderP33_7F76DB0F2A61AB82522F124BF5C521A811UnmapBuffer = _Class(
"_TtCC7SwiftUI17FileArchiveReaderP33_7F76DB0F2A61AB82522F124BF5C521A811UnmapBuffer"
)
ArchiveReader = _Class("SwiftUI.ArchiveReader")
DataArchiveReader = _Class("SwiftUI.DataArchiveReader")
FileArchiveReader = _Class("SwiftUI.FileArchiveReader")
ArchiveWriter = _Class("SwiftUI.ArchiveWriter")
DataArchiveWriter = _Class("SwiftUI.DataArchiveWriter")
FileArchiveWriter = _Class("SwiftUI.FileArchiveWriter")
_TtC7SwiftUIP33_B619265B3CBBC7F42E2392FC185432F223MainMenuItemCoordinator = _Class(
"_TtC7SwiftUIP33_B619265B3CBBC7F42E2392FC185432F223MainMenuItemCoordinator"
)
_TtCV7SwiftUI4Path7PathBox = _Class("_TtCV7SwiftUI4Path7PathBox")
ChildIndexProjection = _Class("SwiftUI.ChildIndexProjection")
_TtC7SwiftUIP33_4B6F5E96359C1B6C6815EDE8FF79BA6514DynamicStorage = _Class(
"_TtC7SwiftUIP33_4B6F5E96359C1B6C6815EDE8FF79BA6514DynamicStorage"
)
_PreviewHost = _Class("SwiftUI._PreviewHost")
_TtC7SwiftUIP33_BA7DCAF3038F4A417E2627434298024727ScrollProxyScrollTestRunner = _Class(
"_TtC7SwiftUIP33_BA7DCAF3038F4A417E2627434298024727ScrollProxyScrollTestRunner"
)
ScrollTest = _Class("SwiftUI.ScrollTest")
WidgetBundleHost = _Class("SwiftUI.WidgetBundleHost")
UIBarItemTarget = _Class("SwiftUI.UIBarItemTarget")
RootViewDelegate = _Class("SwiftUI.RootViewDelegate")
_TtCV7SwiftUI11DisplayList20AccessibilityUpdater = _Class(
"_TtCV7SwiftUI11DisplayList20AccessibilityUpdater"
)
_TtC7SwiftUIP33_3734FCB8B87024BD212C6F4B89BF01BE9ViewCache = _Class(
"_TtC7SwiftUIP33_3734FCB8B87024BD212C6F4B89BF01BE9ViewCache"
)
_TtC7SwiftUIP33_3734FCB8B87024BD212C6F4B89BF01BE13ViewCacheItem = _Class(
"_TtC7SwiftUIP33_3734FCB8B87024BD212C6F4B89BF01BE13ViewCacheItem"
)
AnyTextModifier = _Class("SwiftUI.AnyTextModifier")
BoldTextModifier = _Class("SwiftUI.BoldTextModifier")
_TtC7SwiftUIP33_9EE948773C43B4E002A1A22214C71CBE32StylisticAlternativeTextModifier = _Class(
"_TtC7SwiftUIP33_9EE948773C43B4E002A1A22214C71CBE32StylisticAlternativeTextModifier"
)
_TtC7SwiftUIP33_9EE948773C43B4E002A1A22214C71CBE21UnderlineTextModifier = _Class(
"_TtC7SwiftUIP33_9EE948773C43B4E002A1A22214C71CBE21UnderlineTextModifier"
)
_TtC7SwiftUIP33_9EE948773C43B4E002A1A22214C71CBE25StrikethroughTextModifier = _Class(
"_TtC7SwiftUIP33_9EE948773C43B4E002A1A22214C71CBE25StrikethroughTextModifier"
)
DisplayLink = _Class("SwiftUI.DisplayLink")
_TtGC7SwiftUI13AnimatorStateGVS_14AnimatablePairGS1_V12CoreGraphics7CGFloatS3__GS1_S3_S3____ = _Class(
"_TtGC7SwiftUI13AnimatorStateGVS_14AnimatablePairGS1_V12CoreGraphics7CGFloatS3__GS1_S3_S3____"
)
_TtGC7SwiftUI15AnimatorBoxBaseGVS_14AnimatablePairGS1_V12CoreGraphics7CGFloatS3__GS1_S3_S3____ = _Class(
"_TtGC7SwiftUI15AnimatorBoxBaseGVS_14AnimatablePairGS1_V12CoreGraphics7CGFloatS3__GS1_S3_S3____"
)
_TtC7SwiftUIP33_D8F02AF14545BC8A4C2E0C65363F315316LayoutGestureBox = _Class(
"_TtC7SwiftUIP33_D8F02AF14545BC8A4C2E0C65363F315316LayoutGestureBox"
)
CGGraphicsContext = _Class("SwiftUI.CGGraphicsContext")
_TtC7SwiftUIP33_023AA827B8A8D39774F7A0C281455FEE24DynamicAnimationListener = _Class(
"_TtC7SwiftUIP33_023AA827B8A8D39774F7A0C281455FEE24DynamicAnimationListener"
)
AnimationBoxBase = _Class("SwiftUI.AnimationBoxBase")
_TtGC7SwiftUI12AnimationBoxVS_15SpringAnimation_ = _Class(
"_TtGC7SwiftUI12AnimationBoxVS_15SpringAnimation_"
)
_TtGC7SwiftUI12AnimationBoxVS_15BezierAnimation_ = _Class(
"_TtGC7SwiftUI12AnimationBoxVS_15BezierAnimation_"
)
_TtGC7SwiftUI11ObjectCacheVVS_5Color9_ResolvedaSo10CGColorRef_ = _Class(
"_TtGC7SwiftUI11ObjectCacheVVS_5Color9_ResolvedaSo10CGColorRef_"
)
_TtGC7SwiftUI20UIKitStatusBarBridgeGVS_15ModifiedContentVS_14_ViewList_ViewVVS_17CellForRowVisitor12CellModifier__ = _Class(
"_TtGC7SwiftUI20UIKitStatusBarBridgeGVS_15ModifiedContentVS_14_ViewList_ViewVVS_17CellForRowVisitor12CellModifier__"
)
_TtC7SwiftUIP33_30C09FF16BC95EC5173809B57186CAC316AsyncTransaction = _Class(
"_TtC7SwiftUIP33_30C09FF16BC95EC5173809B57186CAC316AsyncTransaction"
)
_TtC7SwiftUIP33_30C09FF16BC95EC5173809B57186CAC317GlobalTransaction = _Class(
"_TtC7SwiftUIP33_30C09FF16BC95EC5173809B57186CAC317GlobalTransaction"
)
_TtCV7SwiftUI12_ViewList_ID5Views = _Class("_TtCV7SwiftUI12_ViewList_ID5Views")
_TtCV7SwiftUI12_ViewList_ID11JoinedViews = _Class(
"_TtCV7SwiftUI12_ViewList_ID11JoinedViews"
)
_TtGCV7SwiftUI12_ViewList_ID6_Views_VS0_17ElementCollection_ = _Class(
"_TtGCV7SwiftUI12_ViewList_ID6_Views_VS0_17ElementCollection_"
)
_TtCV7SwiftUI11DisplayList11ViewUpdater = _Class(
"_TtCV7SwiftUI11DisplayList11ViewUpdater"
)
_TtGC7SwiftUI10MutableBoxVs6UInt32_ = _Class("_TtGC7SwiftUI10MutableBoxVs6UInt32_")
_ViewList_IndirectMap = _Class("SwiftUI._ViewList_IndirectMap")
_TtGC7SwiftUI20UIKitStatusBarBridgeGVS_15ModifiedContentVVS_22_VariadicView_Children7ElementGVS_18StyleContextWriterVS_19SidebarStyleContext___ = _Class(
"_TtGC7SwiftUI20UIKitStatusBarBridgeGVS_15ModifiedContentVVS_22_VariadicView_Children7ElementGVS_18StyleContextWriterVS_19SidebarStyleContext___"
)
_TtGC7SwiftUI13ToolbarBridgeGVS_15ModifiedContentVVS_22_VariadicView_Children7ElementGVS_18StyleContextWriterVS_19SidebarStyleContext___ = _Class(
"_TtGC7SwiftUI13ToolbarBridgeGVS_15ModifiedContentVVS_22_VariadicView_Children7ElementGVS_18StyleContextWriterVS_19SidebarStyleContext___"
)
_TtGC7SwiftUI20UIKitStatusBarBridgeGVS_15ModifiedContentVVS_22_VariadicView_Children7ElementGVS_18StyleContextWriterVS_14NoStyleContext___ = _Class(
"_TtGC7SwiftUI20UIKitStatusBarBridgeGVS_15ModifiedContentVVS_22_VariadicView_Children7ElementGVS_18StyleContextWriterVS_14NoStyleContext___"
)
_TtGC7SwiftUI13ToolbarBridgeGVS_15ModifiedContentVVS_22_VariadicView_Children7ElementGVS_18StyleContextWriterVS_14NoStyleContext___ = _Class(
"_TtGC7SwiftUI13ToolbarBridgeGVS_15ModifiedContentVVS_22_VariadicView_Children7ElementGVS_18StyleContextWriterVS_14NoStyleContext___"
)
_TtGC7SwiftUI20UIKitStatusBarBridgeGVS_15ModifiedContentGS1_VS_7AnyViewGVS_30_EnvironmentKeyWritingModifierGSqGVS_7BindingOS_8EditMode____VS_16_FixedSizeLayout__ = _Class(
"_TtGC7SwiftUI20UIKitStatusBarBridgeGVS_15ModifiedContentGS1_VS_7AnyViewGVS_30_EnvironmentKeyWritingModifierGSqGVS_7BindingOS_8EditMode____VS_16_FixedSizeLayout__"
)
_TtC7SwiftUIP33_C1C63C2F6F2B9F3EB30DD747F0605FBD14PreferenceNode = _Class(
"_TtC7SwiftUIP33_C1C63C2F6F2B9F3EB30DD747F0605FBD14PreferenceNode"
)
_TtC7SwiftUIP33_4FF627671B2E357BF8FD0A376089C04136AnyAccessibilityActionHandlerBoxBase = _Class(
"_TtC7SwiftUIP33_4FF627671B2E357BF8FD0A376089C04136AnyAccessibilityActionHandlerBoxBase"
)
_TtC7SwiftUIP33_F0D4BE429651399A5FAD2DF7DCDF699D14AnyBehaviorBox = _Class(
"_TtC7SwiftUIP33_F0D4BE429651399A5FAD2DF7DCDF699D14AnyBehaviorBox"
)
_TtGC7SwiftUI11ObjectCacheVVS_5Color9_ResolvedCSo7UIColor_ = _Class(
"_TtGC7SwiftUI11ObjectCacheVVS_5Color9_ResolvedCSo7UIColor_"
)
AnyImageProviderBox = _Class("SwiftUI.AnyImageProviderBox")
AnyTransitionBox = _Class("SwiftUI.AnyTransitionBox")
_TtCV7SwiftUI16DynamicContainer8ItemInfo = _Class(
"_TtCV7SwiftUI16DynamicContainer8ItemInfo"
)
ResolvedStyledText = _Class("SwiftUI.ResolvedStyledText")
PreferenceBridge = _Class("SwiftUI.PreferenceBridge")
_TtCV7SwiftUI16_ViewListOutputs12ListModifier = _Class(
"_TtCV7SwiftUI16_ViewListOutputs12ListModifier"
)
AnyFontBox = _Class("SwiftUI.AnyFontBox")
AnyTextStorage = _Class("SwiftUI.AnyTextStorage")
_TtC7SwiftUIP33_69EF06F9BDF62ECF39AC7E7A3D2BB90023ConcatenatedTextStorage = _Class(
"_TtC7SwiftUIP33_69EF06F9BDF62ECF39AC7E7A3D2BB90023ConcatenatedTextStorage"
)
_TtC7SwiftUIP33_B2112F864572FAFE37EFB62AA5578C2615DateTextStorage = _Class(
"_TtC7SwiftUIP33_B2112F864572FAFE37EFB62AA5578C2615DateTextStorage"
)
_TtC7SwiftUIP33_54048EA3D07F599FFD8EA97AC121D1F220FormatterTextStorage = _Class(
"_TtC7SwiftUIP33_54048EA3D07F599FFD8EA97AC121D1F220FormatterTextStorage"
)
_TtC7SwiftUIP33_77FDDD0DEE03C82FE356902694BBAFDD21AttachmentTextStorage = _Class(
"_TtC7SwiftUIP33_77FDDD0DEE03C82FE356902694BBAFDD21AttachmentTextStorage"
)
_TtCC7SwiftUI18ResolvedStyledTextP33_4EAA3873E044FE8466A2EF8771E1058D11TextStorage = _Class(
"_TtCC7SwiftUI18ResolvedStyledTextP33_4EAA3873E044FE8466A2EF8771E1058D11TextStorage"
)
_TtC7SwiftUIP33_CE01D640DBD0DC505B3EBF59FEE0F62E20LocalizedTextStorage = _Class(
"_TtC7SwiftUIP33_CE01D640DBD0DC505B3EBF59FEE0F62E20LocalizedTextStorage"
)
_ViewList_Subgraph = _Class("SwiftUI._ViewList_Subgraph")
_TtC7SwiftUIP33_5AC2D91303C60C06D15F8A51A12C2AF416TestableSubgraph = _Class(
"_TtC7SwiftUIP33_5AC2D91303C60C06D15F8A51A12C2AF416TestableSubgraph"
)
_TtCV7SwiftUIP33_A96961F3546506F21D8995C6092F15B511AnyViewList4Item = _Class(
"_TtCV7SwiftUIP33_A96961F3546506F21D8995C6092F15B511AnyViewList4Item"
)
_TtCV7SwiftUI12PropertyList7Tracker = _Class("_TtCV7SwiftUI12PropertyList7Tracker")
_TtGC7SwiftUI31AttributeInvalidatingSubscriberC7Combine25ObservableObjectPublisher_ = _Class(
"_TtGC7SwiftUI31AttributeInvalidatingSubscriberC7Combine25ObservableObjectPublisher_"
)
_TtGC7SwiftUI20SubscriptionLifetimeC7Combine25ObservableObjectPublisher_ = _Class(
"_TtGC7SwiftUI20SubscriptionLifetimeC7Combine25ObservableObjectPublisher_"
)
_TtC7SwiftUIP33_68550FF604D39F05971FE35A26EE75B013BoxVTableBase = _Class(
"_TtC7SwiftUIP33_68550FF604D39F05971FE35A26EE75B013BoxVTableBase"
)
_TtGC7SwiftUI10MutableBoxGVs10DictionaryVs16ObjectIdentifierOVS_20DynamicPropertyCache6Fields__ = _Class(
"_TtGC7SwiftUI10MutableBoxGVs10DictionaryVs16ObjectIdentifierOVS_20DynamicPropertyCache6Fields__"
)
AccessibilityRelationshipScope = _Class("SwiftUI.AccessibilityRelationshipScope")
_TtC7SwiftUIP33_A1807160ED1F4542128D3D0A34E611B620MatchedGeometryScope = _Class(
"_TtC7SwiftUIP33_A1807160ED1F4542128D3D0A34E611B620MatchedGeometryScope"
)
AnyColorBox = _Class("SwiftUI.AnyColorBox")
_TtCV7SwiftUI12PropertyList7Element = _Class("_TtCV7SwiftUI12PropertyList7Element")
_TtCO7SwiftUI6UpdateP33_EA173074DA35FA471DC70643259B7E749TraceHost = _Class(
"_TtCO7SwiftUI6UpdateP33_EA173074DA35FA471DC70643259B7E749TraceHost"
)
_TtGC7SwiftUI20UIKitStatusBarBridgeVS_7AnyView_ = _Class(
"_TtGC7SwiftUI20UIKitStatusBarBridgeVS_7AnyView_"
)
_TtGC7SwiftUI13ToolbarBridgeVS_7AnyView_ = _Class(
"_TtGC7SwiftUI13ToolbarBridgeVS_7AnyView_"
)
AnyViewStorageBase = _Class("SwiftUI.AnyViewStorageBase")
AnyLocationBase = _Class("SwiftUI.AnyLocationBase")
_TtGC7SwiftUI11AnyLocationGOS_19SelectionManagerBoxOs5Never__ = _Class(
"_TtGC7SwiftUI11AnyLocationGOS_19SelectionManagerBoxOs5Never__"
)
_TtGC7SwiftUI11AnyLocationV4Pyto10SceneState_ = _Class(
"_TtGC7SwiftUI11AnyLocationV4Pyto10SceneState_"
)
_TtGC7SwiftUI11LocationBoxGVS_24ObservableObjectLocationC4Pyto15SceneStateStoreVS2_10SceneState__ = _Class(
"_TtGC7SwiftUI11LocationBoxGVS_24ObservableObjectLocationC4Pyto15SceneStateStoreVS2_10SceneState__"
)
_TtGC7SwiftUI11AnyLocationGSqO4Pyto15SelectedSection__ = _Class(
"_TtGC7SwiftUI11AnyLocationGSqO4Pyto15SelectedSection__"
)
_TtGC7SwiftUI18StoredLocationBaseGSqO4Pyto15SelectedSection__ = _Class(
"_TtGC7SwiftUI18StoredLocationBaseGSqO4Pyto15SelectedSection__"
)
_TtGC7SwiftUI14StoredLocationGSqO4Pyto15SelectedSection__ = _Class(
"_TtGC7SwiftUI14StoredLocationGSqO4Pyto15SelectedSection__"
)
_TtGC7SwiftUI11AnyLocationSb_ = _Class("_TtGC7SwiftUI11AnyLocationSb_")
_TtGC7SwiftUI11LocationBoxGVS_18FunctionalLocationSb__ = _Class(
"_TtGC7SwiftUI11LocationBoxGVS_18FunctionalLocationSb__"
)
_TtGC7SwiftUI18StoredLocationBaseSb_ = _Class("_TtGC7SwiftUI18StoredLocationBaseSb_")
_TtGC7SwiftUI14StoredLocationSb_ = _Class("_TtGC7SwiftUI14StoredLocationSb_")
_TtGC7SwiftUI11AnyLocationGSqV10Foundation3URL__ = _Class(
"_TtGC7SwiftUI11AnyLocationGSqV10Foundation3URL__"
)
_TtGC7SwiftUI18StoredLocationBaseGSqV10Foundation3URL__ = _Class(
"_TtGC7SwiftUI18StoredLocationBaseGSqV10Foundation3URL__"
)
_TtGC7SwiftUI14StoredLocationGSqV10Foundation3URL__ = _Class(
"_TtGC7SwiftUI14StoredLocationGSqV10Foundation3URL__"
)
_TtGC7SwiftUI11AnyLocationT9isPressedSb8isActiveSb__ = _Class(
"_TtGC7SwiftUI11AnyLocationT9isPressedSb8isActiveSb__"
)
_TtGC7SwiftUI18StoredLocationBaseT9isPressedSb8isActiveSb__ = _Class(
"_TtGC7SwiftUI18StoredLocationBaseT9isPressedSb8isActiveSb__"
)
_TtGC7SwiftUI14StoredLocationT9isPressedSb8isActiveSb__ = _Class(
"_TtGC7SwiftUI14StoredLocationT9isPressedSb8isActiveSb__"
)
_TtGC7SwiftUI11AnyLocationGSqSS__ = _Class("_TtGC7SwiftUI11AnyLocationGSqSS__")
_TtGC7SwiftUI18StoredLocationBaseGSqSS__ = _Class(
"_TtGC7SwiftUI18StoredLocationBaseGSqSS__"
)
_TtGC7SwiftUI14StoredLocationGSqSS__ = _Class("_TtGC7SwiftUI14StoredLocationGSqSS__")
_TtGC7SwiftUI11AnyLocationOS_8EditMode_ = _Class(
"_TtGC7SwiftUI11AnyLocationOS_8EditMode_"
)
_TtGC7SwiftUI18StoredLocationBaseOS_8EditMode_ = _Class(
"_TtGC7SwiftUI18StoredLocationBaseOS_8EditMode_"
)
_TtGC7SwiftUI14StoredLocationOS_8EditMode_ = _Class(
"_TtGC7SwiftUI14StoredLocationOS_8EditMode_"
)
_TtGC7SwiftUI11AnyLocationSS_ = _Class("_TtGC7SwiftUI11AnyLocationSS_")
_TtGC7SwiftUI11LocationBoxGVS_18FunctionalLocationSS__ = _Class(
"_TtGC7SwiftUI11LocationBoxGVS_18FunctionalLocationSS__"
)
_TtGC7SwiftUI11LocationBoxGVS_24ObservableObjectLocationC4Pyto9PyPiIndexSS__ = _Class(
"_TtGC7SwiftUI11LocationBoxGVS_24ObservableObjectLocationC4Pyto9PyPiIndexSS__"
)
_TtGC7SwiftUI11AnyLocationVS_16PresentationMode_ = _Class(
"_TtGC7SwiftUI11AnyLocationVS_16PresentationMode_"
)
_TtGC7SwiftUI11LocationBoxGVS_18FunctionalLocationVS_16PresentationMode__ = _Class(
"_TtGC7SwiftUI11LocationBoxGVS_18FunctionalLocationVS_16PresentationMode__"
)
_TtGC7SwiftUI11AnyLocationGSqCSo13UIWindowScene__ = _Class(
"_TtGC7SwiftUI11AnyLocationGSqCSo13UIWindowScene__"
)
_TtGC7SwiftUI11LocationBoxGVS_24ObservableObjectLocationC4Pyto17SelectedItemStoreGSqCSo13UIWindowScene___ = _Class(
"_TtGC7SwiftUI11LocationBoxGVS_24ObservableObjectLocationC4Pyto17SelectedItemStoreGSqCSo13UIWindowScene___"
)
_TtCV7SwiftUI13ViewTransformP33_CE19A3CEA6B9730579C42CE4C3071E745Chunk = _Class(
"_TtCV7SwiftUI13ViewTransformP33_CE19A3CEA6B9730579C42CE4C3071E745Chunk"
)
EventBindingBridge = _Class("SwiftUI.EventBindingBridge")
UIKitEventBindingBridge = _Class("SwiftUI.UIKitEventBindingBridge")
EventBindingManager = _Class("SwiftUI.EventBindingManager")
_TtCV7SwiftUI14LayoutComputer8Delegate = _Class(
"_TtCV7SwiftUI14LayoutComputer8Delegate"
)
StyledTextLayoutDelegate = _Class("SwiftUI.StyledTextLayoutDelegate")
ResolvedImageLayoutDelegate = _Class("SwiftUI.ResolvedImageLayoutDelegate")
_TtCV7SwiftUI11StackLayoutP33_68D684484B5AEF917B6B8353D57CF5907Storage = _Class(
"_TtCV7SwiftUI11StackLayoutP33_68D684484B5AEF917B6B8353D57CF5907Storage"
)
_TtCV7SwiftUI14LayoutComputer15DefaultDelegate = _Class(
"_TtCV7SwiftUI14LayoutComputer15DefaultDelegate"
)
ViewResponder = _Class("SwiftUI.ViewResponder")
_TtGC7SwiftUI17LeafViewResponderGVS_17ResolvedShapeViewVS_9RectangleVVS_5Color9_Resolved__ = _Class(
"_TtGC7SwiftUI17LeafViewResponderGVS_17ResolvedShapeViewVS_9RectangleVVS_5Color9_Resolved__"
)
_TtGC7SwiftUI17LeafViewResponderGVS_12ViewLeafViewGVS_42PlatformViewControllerRepresentableAdaptorV4Pyto14ViewController___ = _Class(
"_TtGC7SwiftUI17LeafViewResponderGVS_12ViewLeafViewGVS_42PlatformViewControllerRepresentableAdaptorV4Pyto14ViewController___"
)
_TtGC7SwiftUI17LeafViewResponderGVS_12ViewLeafViewGVS_42PlatformViewControllerRepresentableAdaptorGVS_33MulticolumnSplitViewRepresentableVVS_22_VariadicView_Children7ElementOs5NeverS5_____ = _Class(
"_TtGC7SwiftUI17LeafViewResponderGVS_12ViewLeafViewGVS_42PlatformViewControllerRepresentableAdaptorGVS_33MulticolumnSplitViewRepresentableVVS_22_VariadicView_Children7ElementOs5NeverS5_____"
)
_TtGC7SwiftUI17LeafViewResponderVVS_5Color9_Resolved_ = _Class(
"_TtGC7SwiftUI17LeafViewResponderVVS_5Color9_Resolved_"
)
UnaryViewResponder = _Class("SwiftUI.UnaryViewResponder")
_TtC7SwiftUIP33_B437445B20C411B83F8E47EB39F0306419AnyGestureResponder = _Class(
"_TtC7SwiftUIP33_B437445B20C411B83F8E47EB39F0306419AnyGestureResponder"
)
UIViewResponder = _Class("SwiftUI.UIViewResponder")
HostingScrollViewResponder = _Class("SwiftUI.HostingScrollViewResponder")
_TtGC7SwiftUI17LeafViewResponderVVS_5Image8Resolved_ = _Class(
"_TtGC7SwiftUI17LeafViewResponderVVS_5Image8Resolved_"
)
MultiViewResponder = _Class("SwiftUI.MultiViewResponder")
DefaultLayoutViewResponder = _Class("SwiftUI.DefaultLayoutViewResponder")
FocusNamespaceViewResponder = _Class("SwiftUI.FocusNamespaceViewResponder")
_TtC7SwiftUIP33_1F8B69996BE941D510140AD6558D884425DefaultFocusViewResponder = _Class(
"_TtC7SwiftUIP33_1F8B69996BE941D510140AD6558D884425DefaultFocusViewResponder"
)
_TtC7SwiftUIP33_3F954A101507DD239D0B7D96685F95F119ScrollViewResponder = _Class(
"_TtC7SwiftUIP33_3F954A101507DD239D0B7D96685F95F119ScrollViewResponder"
)
DropViewResponder = _Class("SwiftUI.DropViewResponder")
_TtCV7SwiftUI11DisplayList19HostedViewResponder = _Class(
"_TtCV7SwiftUI11DisplayList19HostedViewResponder"
)
_TtC7SwiftUIP33_B6A2D4E72E5722B5103497ADB7778B5F22FocusableViewResponder = _Class(
"_TtC7SwiftUIP33_B6A2D4E72E5722B5103497ADB7778B5F22FocusableViewResponder"
)
UIViewSnapshotResponder = _Class("SwiftUI.UIViewSnapshotResponder")
ContextMenuResponder = _Class("SwiftUI.ContextMenuResponder")
HoverResponder = _Class("SwiftUI.HoverResponder")
DragViewResponder = _Class("SwiftUI.DragViewResponder")
_TtC7SwiftUIP33_9EE920A99C667C354EEDF67A755D6AA825AllowsHitTestingResponder = _Class(
"_TtC7SwiftUIP33_9EE920A99C667C354EEDF67A755D6AA825AllowsHitTestingResponder"
)
_TtC7SwiftUIP33_B07689AF38C6459AC9750094550967FF20OpacityViewResponder = _Class(
"_TtC7SwiftUIP33_B07689AF38C6459AC9750094550967FF20OpacityViewResponder"
)
_TtC7SwiftUIP33_B699A935E119DD0B11A5BD0A3505C79F23HitTestBindingResponder = _Class(
"_TtC7SwiftUIP33_B699A935E119DD0B11A5BD0A3505C79F23HitTestBindingResponder"
)
_TtGC7SwiftUI10MutableBoxVS_17CachedEnvironment_ = _Class(
"_TtGC7SwiftUI10MutableBoxVS_17CachedEnvironment_"
)
GraphHost = _Class("SwiftUI.GraphHost")
WidgetGraph = _Class("SwiftUI.WidgetGraph")
_WidgetGraph = _Class("SwiftUI._WidgetGraph")
AppGraph = _Class("SwiftUI.AppGraph")
ViewGraph = _Class("SwiftUI.ViewGraph")
_TtGC7SwiftUI20UIKitStatusBarBridgeV4Pyto8PyPiView_ = _Class(
"_TtGC7SwiftUI20UIKitStatusBarBridgeV4Pyto8PyPiView_"
)
FocusBridge = _Class("SwiftUI.FocusBridge")
_TtCV7SwiftUI11DisplayList12ViewRenderer = _Class(
"_TtCV7SwiftUI11DisplayList12ViewRenderer"
)
_TtGC7SwiftUI13ToolbarBridgeV4Pyto8PyPiView_ = _Class(
"_TtGC7SwiftUI13ToolbarBridgeV4Pyto8PyPiView_"
)
_TSHostingViewInvocationTarget = _Class("_TSHostingViewInvocationTarget")
_TtC7SwiftUIP33_F2BB00CEA25D2617C18DE8984EB64B5319UserDefaultObserver = _Class(
"_TtC7SwiftUIP33_F2BB00CEA25D2617C18DE8984EB64B5319UserDefaultObserver"
)
UserActivityTrackingInfo = _Class("SwiftUI.UserActivityTrackingInfo")
_TtCV7SwiftUI15UIKitTextEditor11Coordinator = _Class(
"_TtCV7SwiftUI15UIKitTextEditor11Coordinator"
)
_TtCV7SwiftUIP33_0B012DB3D42FBF9295A4AA29478C936C18BridgedColorPicker11Coordinator = _Class(
"_TtCV7SwiftUIP33_0B012DB3D42FBF9295A4AA29478C936C18BridgedColorPicker11Coordinator"
)
InteropResponder = _Class("SwiftUI.InteropResponder")
_TtC7SwiftUIP33_B6A2D4E72E5722B5103497ADB7778B5F28UIFocusableViewResponderItem = _Class(
"_TtC7SwiftUIP33_B6A2D4E72E5722B5103497ADB7778B5F28UIFocusableViewResponderItem"
)
_TtC7SwiftUIP33_C881219A53D4B960D55BEB57A34CE5C919ContextMenuIdentity = _Class(
"_TtC7SwiftUIP33_C881219A53D4B960D55BEB57A34CE5C919ContextMenuIdentity"
)
_TtC7SwiftUIP33_32FACBD077E80DBEC9C9CF82638EFBF514CursorIdentity = _Class(
"_TtC7SwiftUIP33_32FACBD077E80DBEC9C9CF82638EFBF514CursorIdentity"
)
KeyboardShortcutBridge = _Class("SwiftUI.KeyboardShortcutBridge")
_TtC7SwiftUIP33_9EE948773C43B4E002A1A22214C71CBE9FindClass = _Class(
"_TtC7SwiftUIP33_9EE948773C43B4E002A1A22214C71CBE9FindClass"
)
ObjcColor = _Class("SwiftUI.ObjcColor")
AccessibilityNode = _Class("SwiftUI.AccessibilityNode")
AccessibilityReadingContentNode = _Class("SwiftUI.AccessibilityReadingContentNode")
_NoAnimationDelegate = _Class("_NoAnimationDelegate")
_SUITimeFormatData = _Class("_SUITimeFormatData")
BaseDateProvider = _Class("BaseDateProvider")
TimeProvider = _Class("TimeProvider")
RelativeDateProvider = _Class("RelativeDateProvider")
TimeIntervalProvider = _Class("TimeIntervalProvider")
DateProvider = _Class("DateProvider")
_TtGC7SwiftUI12CursorBridgeGVS_15ModifiedContentVS_14_ViewList_ViewVVS_17CellForRowVisitor12CellModifier__ = _Class(
"_TtGC7SwiftUI12CursorBridgeGVS_15ModifiedContentVS_14_ViewList_ViewVVS_17CellForRowVisitor12CellModifier__"
)
_TtGC7SwiftUI17ContextMenuBridgeGVS_15ModifiedContentVS_14_ViewList_ViewVVS_17CellForRowVisitor12CellModifier__ = _Class(
"_TtGC7SwiftUI17ContextMenuBridgeGVS_15ModifiedContentVS_14_ViewList_ViewVVS_17CellForRowVisitor12CellModifier__"
)
_TtGC7SwiftUI17DragAndDropBridgeGVS_15ModifiedContentVS_14_ViewList_ViewVVS_17CellForRowVisitor12CellModifier__ = _Class(
"_TtGC7SwiftUI17DragAndDropBridgeGVS_15ModifiedContentVS_14_ViewList_ViewVVS_17CellForRowVisitor12CellModifier__"
)
_TtGC7SwiftUI18UIKitPopoverBridgeGVS_15ModifiedContentVS_14_ViewList_ViewVVS_17CellForRowVisitor12CellModifier__ = _Class(
"_TtGC7SwiftUI18UIKitPopoverBridgeGVS_15ModifiedContentVS_14_ViewList_ViewVVS_17CellForRowVisitor12CellModifier__"
)
_TtGC7SwiftUI11SheetBridgeGVS_15ModifiedContentVS_14_ViewList_ViewVVS_17CellForRowVisitor12CellModifier__ = _Class(
"_TtGC7SwiftUI11SheetBridgeGVS_15ModifiedContentVS_14_ViewList_ViewVVS_17CellForRowVisitor12CellModifier__"
)
_TtGC7SwiftUI11AlertBridgeGVS_15ModifiedContentVS_14_ViewList_ViewVVS_17CellForRowVisitor12CellModifier_VVS_11ActionSheet12Presentation_ = _Class(
"_TtGC7SwiftUI11AlertBridgeGVS_15ModifiedContentVS_14_ViewList_ViewVVS_17CellForRowVisitor12CellModifier_VVS_11ActionSheet12Presentation_"
)
_TtGC7SwiftUI11AlertBridgeGVS_15ModifiedContentVS_14_ViewList_ViewVVS_17CellForRowVisitor12CellModifier_VVS_5Alert12Presentation_ = _Class(
"_TtGC7SwiftUI11AlertBridgeGVS_15ModifiedContentVS_14_ViewList_ViewVVS_17CellForRowVisitor12CellModifier_VVS_5Alert12Presentation_"
)
_TtGC7SwiftUI21UIKitNavigationBridgeGVS_15ModifiedContentVS_14_ViewList_ViewVVS_17CellForRowVisitor12CellModifier__ = _Class(
"_TtGC7SwiftUI21UIKitNavigationBridgeGVS_15ModifiedContentVS_14_ViewList_ViewVVS_17CellForRowVisitor12CellModifier__"
)
_TtGC7SwiftUI24NavigationBridge_PhoneTVGVS_15ModifiedContentVS_14_ViewList_ViewVVS_17CellForRowVisitor12CellModifier__ = _Class(
"_TtGC7SwiftUI24NavigationBridge_PhoneTVGVS_15ModifiedContentVS_14_ViewList_ViewVVS_17CellForRowVisitor12CellModifier__"
)
_TtGC7SwiftUI29ListCoreDragAndDropControllerGVS_20ShadowListDataSourceGVS_20SystemListDataSourceOs5Never___ = _Class(
"_TtGC7SwiftUI29ListCoreDragAndDropControllerGVS_20ShadowListDataSourceGVS_20SystemListDataSourceOs5Never___"
)
_TtC7SwiftUIP33_1C4DED7BD95AC993CC69F2CB25BC2A4016PlatformDragItem = _Class(
"_TtC7SwiftUIP33_1C4DED7BD95AC993CC69F2CB25BC2A4016PlatformDragItem"
)
PlatformDocument = _Class("SwiftUI.PlatformDocument")
_TtGC7SwiftUI12CursorBridgeGVS_15ModifiedContentVVS_22_VariadicView_Children7ElementGVS_18StyleContextWriterVS_19SidebarStyleContext___ = _Class(
"_TtGC7SwiftUI12CursorBridgeGVS_15ModifiedContentVVS_22_VariadicView_Children7ElementGVS_18StyleContextWriterVS_19SidebarStyleContext___"
)
_TtGC7SwiftUI17ContextMenuBridgeGVS_15ModifiedContentVVS_22_VariadicView_Children7ElementGVS_18StyleContextWriterVS_19SidebarStyleContext___ = _Class(
"_TtGC7SwiftUI17ContextMenuBridgeGVS_15ModifiedContentVVS_22_VariadicView_Children7ElementGVS_18StyleContextWriterVS_19SidebarStyleContext___"
)
_TtGC7SwiftUI17DragAndDropBridgeGVS_15ModifiedContentVVS_22_VariadicView_Children7ElementGVS_18StyleContextWriterVS_19SidebarStyleContext___ = _Class(
"_TtGC7SwiftUI17DragAndDropBridgeGVS_15ModifiedContentVVS_22_VariadicView_Children7ElementGVS_18StyleContextWriterVS_19SidebarStyleContext___"
)
_TtGC7SwiftUI18UIKitPopoverBridgeGVS_15ModifiedContentVVS_22_VariadicView_Children7ElementGVS_18StyleContextWriterVS_19SidebarStyleContext___ = _Class(
"_TtGC7SwiftUI18UIKitPopoverBridgeGVS_15ModifiedContentVVS_22_VariadicView_Children7ElementGVS_18StyleContextWriterVS_19SidebarStyleContext___"
)
_TtGC7SwiftUI11SheetBridgeGVS_15ModifiedContentVVS_22_VariadicView_Children7ElementGVS_18StyleContextWriterVS_19SidebarStyleContext___ = _Class(
"_TtGC7SwiftUI11SheetBridgeGVS_15ModifiedContentVVS_22_VariadicView_Children7ElementGVS_18StyleContextWriterVS_19SidebarStyleContext___"
)
_TtGC7SwiftUI11AlertBridgeGVS_15ModifiedContentVVS_22_VariadicView_Children7ElementGVS_18StyleContextWriterVS_19SidebarStyleContext__VVS_11ActionSheet12Presentation_ = _Class(
"_TtGC7SwiftUI11AlertBridgeGVS_15ModifiedContentVVS_22_VariadicView_Children7ElementGVS_18StyleContextWriterVS_19SidebarStyleContext__VVS_11ActionSheet12Presentation_"
)
_TtGC7SwiftUI11AlertBridgeGVS_15ModifiedContentVVS_22_VariadicView_Children7ElementGVS_18StyleContextWriterVS_19SidebarStyleContext__VVS_5Alert12Presentation_ = _Class(
"_TtGC7SwiftUI11AlertBridgeGVS_15ModifiedContentVVS_22_VariadicView_Children7ElementGVS_18StyleContextWriterVS_19SidebarStyleContext__VVS_5Alert12Presentation_"
)
_TtGC7SwiftUI21UIKitNavigationBridgeGVS_15ModifiedContentVVS_22_VariadicView_Children7ElementGVS_18StyleContextWriterVS_19SidebarStyleContext___ = _Class(
"_TtGC7SwiftUI21UIKitNavigationBridgeGVS_15ModifiedContentVVS_22_VariadicView_Children7ElementGVS_18StyleContextWriterVS_19SidebarStyleContext___"
)
_TtGC7SwiftUI24NavigationBridge_PhoneTVGVS_15ModifiedContentVVS_22_VariadicView_Children7ElementGVS_18StyleContextWriterVS_19SidebarStyleContext___ = _Class(
"_TtGC7SwiftUI24NavigationBridge_PhoneTVGVS_15ModifiedContentVVS_22_VariadicView_Children7ElementGVS_18StyleContextWriterVS_19SidebarStyleContext___"
)
_TtGC7SwiftUI12CursorBridgeGVS_15ModifiedContentVVS_22_VariadicView_Children7ElementGVS_18StyleContextWriterVS_14NoStyleContext___ = _Class(
"_TtGC7SwiftUI12CursorBridgeGVS_15ModifiedContentVVS_22_VariadicView_Children7ElementGVS_18StyleContextWriterVS_14NoStyleContext___"
)
_TtGC7SwiftUI17ContextMenuBridgeGVS_15ModifiedContentVVS_22_VariadicView_Children7ElementGVS_18StyleContextWriterVS_14NoStyleContext___ = _Class(
"_TtGC7SwiftUI17ContextMenuBridgeGVS_15ModifiedContentVVS_22_VariadicView_Children7ElementGVS_18StyleContextWriterVS_14NoStyleContext___"
)
_TtGC7SwiftUI17DragAndDropBridgeGVS_15ModifiedContentVVS_22_VariadicView_Children7ElementGVS_18StyleContextWriterVS_14NoStyleContext___ = _Class(
"_TtGC7SwiftUI17DragAndDropBridgeGVS_15ModifiedContentVVS_22_VariadicView_Children7ElementGVS_18StyleContextWriterVS_14NoStyleContext___"
)
_TtGC7SwiftUI18UIKitPopoverBridgeGVS_15ModifiedContentVVS_22_VariadicView_Children7ElementGVS_18StyleContextWriterVS_14NoStyleContext___ = _Class(
"_TtGC7SwiftUI18UIKitPopoverBridgeGVS_15ModifiedContentVVS_22_VariadicView_Children7ElementGVS_18StyleContextWriterVS_14NoStyleContext___"
)
_TtGC7SwiftUI11SheetBridgeGVS_15ModifiedContentVVS_22_VariadicView_Children7ElementGVS_18StyleContextWriterVS_14NoStyleContext___ = _Class(
"_TtGC7SwiftUI11SheetBridgeGVS_15ModifiedContentVVS_22_VariadicView_Children7ElementGVS_18StyleContextWriterVS_14NoStyleContext___"
)
_TtGC7SwiftUI11AlertBridgeGVS_15ModifiedContentVVS_22_VariadicView_Children7ElementGVS_18StyleContextWriterVS_14NoStyleContext__VVS_11ActionSheet12Presentation_ = _Class(
"_TtGC7SwiftUI11AlertBridgeGVS_15ModifiedContentVVS_22_VariadicView_Children7ElementGVS_18StyleContextWriterVS_14NoStyleContext__VVS_11ActionSheet12Presentation_"
)
_TtGC7SwiftUI11AlertBridgeGVS_15ModifiedContentVVS_22_VariadicView_Children7ElementGVS_18StyleContextWriterVS_14NoStyleContext__VVS_5Alert12Presentation_ = _Class(
"_TtGC7SwiftUI11AlertBridgeGVS_15ModifiedContentVVS_22_VariadicView_Children7ElementGVS_18StyleContextWriterVS_14NoStyleContext__VVS_5Alert12Presentation_"
)
_TtGC7SwiftUI21UIKitNavigationBridgeGVS_15ModifiedContentVVS_22_VariadicView_Children7ElementGVS_18StyleContextWriterVS_14NoStyleContext___ = _Class(
"_TtGC7SwiftUI21UIKitNavigationBridgeGVS_15ModifiedContentVVS_22_VariadicView_Children7ElementGVS_18StyleContextWriterVS_14NoStyleContext___"
)
_TtGC7SwiftUI24NavigationBridge_PhoneTVGVS_15ModifiedContentVVS_22_VariadicView_Children7ElementGVS_18StyleContextWriterVS_14NoStyleContext___ = _Class(
"_TtGC7SwiftUI24NavigationBridge_PhoneTVGVS_15ModifiedContentVVS_22_VariadicView_Children7ElementGVS_18StyleContextWriterVS_14NoStyleContext___"
)
_TtGC7SwiftUI12CursorBridgeGVS_15ModifiedContentGS1_VS_7AnyViewGVS_30_EnvironmentKeyWritingModifierGSqGVS_7BindingOS_8EditMode____VS_16_FixedSizeLayout__ = _Class(
"_TtGC7SwiftUI12CursorBridgeGVS_15ModifiedContentGS1_VS_7AnyViewGVS_30_EnvironmentKeyWritingModifierGSqGVS_7BindingOS_8EditMode____VS_16_FixedSizeLayout__"
)
_TtGC7SwiftUI17ContextMenuBridgeGVS_15ModifiedContentGS1_VS_7AnyViewGVS_30_EnvironmentKeyWritingModifierGSqGVS_7BindingOS_8EditMode____VS_16_FixedSizeLayout__ = _Class(
"_TtGC7SwiftUI17ContextMenuBridgeGVS_15ModifiedContentGS1_VS_7AnyViewGVS_30_EnvironmentKeyWritingModifierGSqGVS_7BindingOS_8EditMode____VS_16_FixedSizeLayout__"
)
_TtGC7SwiftUI17DragAndDropBridgeGVS_15ModifiedContentGS1_VS_7AnyViewGVS_30_EnvironmentKeyWritingModifierGSqGVS_7BindingOS_8EditMode____VS_16_FixedSizeLayout__ = _Class(
"_TtGC7SwiftUI17DragAndDropBridgeGVS_15ModifiedContentGS1_VS_7AnyViewGVS_30_EnvironmentKeyWritingModifierGSqGVS_7BindingOS_8EditMode____VS_16_FixedSizeLayout__"
)
_TtGC7SwiftUI18UIKitPopoverBridgeGVS_15ModifiedContentGS1_VS_7AnyViewGVS_30_EnvironmentKeyWritingModifierGSqGVS_7BindingOS_8EditMode____VS_16_FixedSizeLayout__ = _Class(
"_TtGC7SwiftUI18UIKitPopoverBridgeGVS_15ModifiedContentGS1_VS_7AnyViewGVS_30_EnvironmentKeyWritingModifierGSqGVS_7BindingOS_8EditMode____VS_16_FixedSizeLayout__"
)
_TtGC7SwiftUI11SheetBridgeGVS_15ModifiedContentGS1_VS_7AnyViewGVS_30_EnvironmentKeyWritingModifierGSqGVS_7BindingOS_8EditMode____VS_16_FixedSizeLayout__ = _Class(
"_TtGC7SwiftUI11SheetBridgeGVS_15ModifiedContentGS1_VS_7AnyViewGVS_30_EnvironmentKeyWritingModifierGSqGVS_7BindingOS_8EditMode____VS_16_FixedSizeLayout__"
)
_TtGC7SwiftUI11AlertBridgeGVS_15ModifiedContentGS1_VS_7AnyViewGVS_30_EnvironmentKeyWritingModifierGSqGVS_7BindingOS_8EditMode____VS_16_FixedSizeLayout_VVS_11ActionSheet12Presentation_ = _Class(
"_TtGC7SwiftUI11AlertBridgeGVS_15ModifiedContentGS1_VS_7AnyViewGVS_30_EnvironmentKeyWritingModifierGSqGVS_7BindingOS_8EditMode____VS_16_FixedSizeLayout_VVS_11ActionSheet12Presentation_"
)
_TtGC7SwiftUI11AlertBridgeGVS_15ModifiedContentGS1_VS_7AnyViewGVS_30_EnvironmentKeyWritingModifierGSqGVS_7BindingOS_8EditMode____VS_16_FixedSizeLayout_VVS_5Alert12Presentation_ = _Class(
"_TtGC7SwiftUI11AlertBridgeGVS_15ModifiedContentGS1_VS_7AnyViewGVS_30_EnvironmentKeyWritingModifierGSqGVS_7BindingOS_8EditMode____VS_16_FixedSizeLayout_VVS_5Alert12Presentation_"
)
_TtGC7SwiftUI21UIKitNavigationBridgeGVS_15ModifiedContentGS1_VS_7AnyViewGVS_30_EnvironmentKeyWritingModifierGSqGVS_7BindingOS_8EditMode____VS_16_FixedSizeLayout__ = _Class(
"_TtGC7SwiftUI21UIKitNavigationBridgeGVS_15ModifiedContentGS1_VS_7AnyViewGVS_30_EnvironmentKeyWritingModifierGSqGVS_7BindingOS_8EditMode____VS_16_FixedSizeLayout__"
)
_TtGC7SwiftUI24NavigationBridge_PhoneTVGVS_15ModifiedContentGS1_VS_7AnyViewGVS_30_EnvironmentKeyWritingModifierGSqGVS_7BindingOS_8EditMode____VS_16_FixedSizeLayout__ = _Class(
"_TtGC7SwiftUI24NavigationBridge_PhoneTVGVS_15ModifiedContentGS1_VS_7AnyViewGVS_30_EnvironmentKeyWritingModifierGSqGVS_7BindingOS_8EditMode____VS_16_FixedSizeLayout__"
)
SwiftUIEnvironmentWrapper = _Class("SwiftUIEnvironmentWrapper")
PlatformViewCoordinator = _Class("SwiftUI.PlatformViewCoordinator")
_TtC7SwiftUIP33_64A26C7A8406856A733B1A7B593971F711Coordinator = _Class(
"_TtC7SwiftUIP33_64A26C7A8406856A733B1A7B593971F711Coordinator"
)
_TtC7SwiftUIP33_F1E9485F33A623EEFA647AA5EC4AE09411Coordinator = _Class(
"_TtC7SwiftUIP33_F1E9485F33A623EEFA647AA5EC4AE09411Coordinator"
)
_TtC7SwiftUIP33_59ABB005D29F0E32A3A965407533FE0D11Coordinator = _Class(
"_TtC7SwiftUIP33_59ABB005D29F0E32A3A965407533FE0D11Coordinator"
)
_TtCV7SwiftUIP33_796E60B90620AEB0B6972B2798FF4F4228UIKitDatePickerRepresentable11Coordinator = _Class(
"_TtCV7SwiftUIP33_796E60B90620AEB0B6972B2798FF4F4228UIKitDatePickerRepresentable11Coordinator"
)
UIKitPopUpButtonCoordinator = _Class("SwiftUI.UIKitPopUpButtonCoordinator")
_TtC7SwiftUIP33_E007CD1636CD44CE99B3923B80F5F6AD11Coordinator = _Class(
"_TtC7SwiftUIP33_E007CD1636CD44CE99B3923B80F5F6AD11Coordinator"
)
_TtC7SwiftUIP33_8825076C2763A50452A210CBE1FA4AF011Coordinator = _Class(
"_TtC7SwiftUIP33_8825076C2763A50452A210CBE1FA4AF011Coordinator"
)
_TtC7SwiftUIP33_D74FE142C3C5A6C2CEA4987A69AEBD7527SegmentedControlCoordinator = _Class(
"_TtC7SwiftUIP33_D74FE142C3C5A6C2CEA4987A69AEBD7527SegmentedControlCoordinator"
)
_TtC7SwiftUIP33_8AA246B2E0E916EFA5AD706DCC8A0FE811Coordinator = _Class(
"_TtC7SwiftUIP33_8AA246B2E0E916EFA5AD706DCC8A0FE811Coordinator"
)
_TtC7SwiftUIP33_1246D37251EA3A918B392E2B95F8B7EF25PlatformSwitchCoordinator = _Class(
"_TtC7SwiftUIP33_1246D37251EA3A918B392E2B95F8B7EF25PlatformSwitchCoordinator"
)
_TtGCV7SwiftUI33MulticolumnSplitViewRepresentable11CoordinatorVVS_22_VariadicView_Children7ElementOs5NeverS3___ = _Class(
"_TtGCV7SwiftUI33MulticolumnSplitViewRepresentable11CoordinatorVVS_22_VariadicView_Children7ElementOs5NeverS3___"
)
_TtGC7SwiftUI12CursorBridgeVS_7AnyView_ = _Class(
"_TtGC7SwiftUI12CursorBridgeVS_7AnyView_"
)
_TtGC7SwiftUI17ContextMenuBridgeVS_7AnyView_ = _Class(
"_TtGC7SwiftUI17ContextMenuBridgeVS_7AnyView_"
)
_TtGC7SwiftUI17DragAndDropBridgeVS_7AnyView_ = _Class(
"_TtGC7SwiftUI17DragAndDropBridgeVS_7AnyView_"
)
_TtGC7SwiftUI18UIKitPopoverBridgeVS_7AnyView_ = _Class(
"_TtGC7SwiftUI18UIKitPopoverBridgeVS_7AnyView_"
)
_TtGC7SwiftUI11SheetBridgeVS_7AnyView_ = _Class(
"_TtGC7SwiftUI11SheetBridgeVS_7AnyView_"
)
_TtGC7SwiftUI11AlertBridgeVS_7AnyViewVVS_11ActionSheet12Presentation_ = _Class(
"_TtGC7SwiftUI11AlertBridgeVS_7AnyViewVVS_11ActionSheet12Presentation_"
)
_TtGC7SwiftUI11AlertBridgeVS_7AnyViewVVS_5Alert12Presentation_ = _Class(
"_TtGC7SwiftUI11AlertBridgeVS_7AnyViewVVS_5Alert12Presentation_"
)
_TtGC7SwiftUI21UIKitNavigationBridgeVS_7AnyView_ = _Class(
"_TtGC7SwiftUI21UIKitNavigationBridgeVS_7AnyView_"
)
_TtGC7SwiftUI24NavigationBridge_PhoneTVVS_7AnyView_ = _Class(
"_TtGC7SwiftUI24NavigationBridge_PhoneTVVS_7AnyView_"
)
_TtGC7SwiftUI12CursorBridgeV4Pyto8PyPiView_ = _Class(
"_TtGC7SwiftUI12CursorBridgeV4Pyto8PyPiView_"
)
_TtGC7SwiftUI17ContextMenuBridgeV4Pyto8PyPiView_ = _Class(
"_TtGC7SwiftUI17ContextMenuBridgeV4Pyto8PyPiView_"
)
FileImportExportBridge = _Class("SwiftUI.FileImportExportBridge")
_TtGC7SwiftUI17DragAndDropBridgeV4Pyto8PyPiView_ = _Class(
"_TtGC7SwiftUI17DragAndDropBridgeV4Pyto8PyPiView_"
)
_TtGC7SwiftUI18UIKitPopoverBridgeV4Pyto8PyPiView_ = _Class(
"_TtGC7SwiftUI18UIKitPopoverBridgeV4Pyto8PyPiView_"
)
_TtGC7SwiftUI11SheetBridgeV4Pyto8PyPiView_ = _Class(
"_TtGC7SwiftUI11SheetBridgeV4Pyto8PyPiView_"
)
_TtGC7SwiftUI11AlertBridgeV4Pyto8PyPiViewVVS_11ActionSheet12Presentation_ = _Class(
"_TtGC7SwiftUI11AlertBridgeV4Pyto8PyPiViewVVS_11ActionSheet12Presentation_"
)
_TtGC7SwiftUI11AlertBridgeV4Pyto8PyPiViewVVS_5Alert12Presentation_ = _Class(
"_TtGC7SwiftUI11AlertBridgeV4Pyto8PyPiViewVVS_5Alert12Presentation_"
)
_TtGC7SwiftUI21UIKitNavigationBridgeV4Pyto8PyPiView_ = _Class(
"_TtGC7SwiftUI21UIKitNavigationBridgeV4Pyto8PyPiView_"
)
_TtGC7SwiftUI24NavigationBridge_PhoneTVV4Pyto8PyPiView_ = _Class(
"_TtGC7SwiftUI24NavigationBridge_PhoneTVV4Pyto8PyPiView_"
)
UIKitToolbarCoordinator = _Class("SwiftUI.UIKitToolbarCoordinator")
DocumentNavigationItem = _Class("SwiftUI.DocumentNavigationItem")
SwiftUITabBarItem = _Class("SwiftUI.SwiftUITabBarItem")
UIKitGestureRecognizer = _Class("SwiftUI.UIKitGestureRecognizer")
_TtC7SwiftUIP33_F176A6CF4451B27508D54E2BEAEBFD5419ShadowGradientLayer = _Class(
"_TtC7SwiftUIP33_F176A6CF4451B27508D54E2BEAEBFD5419ShadowGradientLayer"
)
_TtC7SwiftUIP33_F176A6CF4451B27508D54E2BEAEBFD5415PaintShapeLayer = _Class(
"_TtC7SwiftUIP33_F176A6CF4451B27508D54E2BEAEBFD5415PaintShapeLayer"
)
GradientLayer = _Class("SwiftUI.GradientLayer")
_TtCV7SwiftUI16EmptyViewFactoryP33_4D627BB6145E5C401552B7640DB8355B12MissingLayer = _Class(
"_TtCV7SwiftUI16EmptyViewFactoryP33_4D627BB6145E5C401552B7640DB8355B12MissingLayer"
)
ImageLayer = _Class("SwiftUI.ImageLayer")
_TtC7SwiftUIP33_F176A6CF4451B27508D54E2BEAEBFD5415ColorShapeLayer = _Class(
"_TtC7SwiftUIP33_F176A6CF4451B27508D54E2BEAEBFD5415ColorShapeLayer"
)
MaskLayer = _Class("SwiftUI.MaskLayer")
AppSceneDelegate = _Class("SwiftUI.AppSceneDelegate")
AppDelegate = _Class("SwiftUI.AppDelegate")
TestingSceneDelegate = _Class("SwiftUI.TestingSceneDelegate")
TestingAppDelegate = _Class("SwiftUI.TestingAppDelegate")
UIKitMainMenuController = _Class("SwiftUI.UIKitMainMenuController")
_TtCC7SwiftUI17HostingScrollView22PlatformGroupContainer = _Class(
"_TtCC7SwiftUI17HostingScrollView22PlatformGroupContainer"
)
_TtC7SwiftUIP33_7B961970B8750E2C6A3A32EFD7AB64FD15DisplayListView = _Class(
"_TtC7SwiftUIP33_7B961970B8750E2C6A3A32EFD7AB64FD15DisplayListView"
)
_UIGraphicsView = _Class("SwiftUI._UIGraphicsView")
_TtC7SwiftUIP33_A34643117F00277B93DEBAB70EC0697122_UIShapeHitTestingView = _Class(
"_TtC7SwiftUIP33_A34643117F00277B93DEBAB70EC0697122_UIShapeHitTestingView"
)
_TtC7SwiftUIP33_A34643117F00277B93DEBAB70EC0697116_UIInheritedView = _Class(
"_TtC7SwiftUIP33_A34643117F00277B93DEBAB70EC0697116_UIInheritedView"
)
RenderBoxView = _Class("SwiftUI.RenderBoxView")
_TtCOCV7SwiftUI11DisplayList11ViewUpdater8Platform13RBDrawingView = _Class(
"_TtCOCV7SwiftUI11DisplayList11ViewUpdater8Platform13RBDrawingView"
)
_TtCOCV7SwiftUI11DisplayList11ViewUpdater8Platform13CGDrawingView = _Class(
"_TtCOCV7SwiftUI11DisplayList11ViewUpdater8Platform13CGDrawingView"
)
_TtGC7SwiftUI14_UIHostingViewGVS_15ModifiedContentVS_14_ViewList_ViewVVS_17CellForRowVisitor12CellModifier__ = _Class(
"_TtGC7SwiftUI14_UIHostingViewGVS_15ModifiedContentVS_14_ViewList_ViewVVS_17CellForRowVisitor12CellModifier__"
)
_TtGC7SwiftUI15ListHostingViewGVS_15ModifiedContentVS_14_ViewList_ViewVVS_17CellForRowVisitor12CellModifier__ = _Class(
"_TtGC7SwiftUI15ListHostingViewGVS_15ModifiedContentVS_14_ViewList_ViewVVS_17CellForRowVisitor12CellModifier__"
)
_TtGC7SwiftUI16PlatformViewHostGVS_42PlatformViewControllerRepresentableAdaptorV4Pyto14ViewController__ = _Class(
"_TtGC7SwiftUI16PlatformViewHostGVS_42PlatformViewControllerRepresentableAdaptorV4Pyto14ViewController__"
)
_TtGC7SwiftUI14_UIHostingViewGVS_15ModifiedContentVVS_22_VariadicView_Children7ElementGVS_18StyleContextWriterVS_19SidebarStyleContext___ = _Class(
"_TtGC7SwiftUI14_UIHostingViewGVS_15ModifiedContentVVS_22_VariadicView_Children7ElementGVS_18StyleContextWriterVS_19SidebarStyleContext___"
)
_TtGC7SwiftUI16PlatformViewHostGVS_42PlatformViewControllerRepresentableAdaptorGVS_33MulticolumnSplitViewRepresentableVVS_22_VariadicView_Children7ElementOs5NeverS4____ = _Class(
"_TtGC7SwiftUI16PlatformViewHostGVS_42PlatformViewControllerRepresentableAdaptorGVS_33MulticolumnSplitViewRepresentableVVS_22_VariadicView_Children7ElementOs5NeverS4____"
)
_TtGC7SwiftUI14_UIHostingViewGVS_15ModifiedContentVVS_22_VariadicView_Children7ElementGVS_18StyleContextWriterVS_14NoStyleContext___ = _Class(
"_TtGC7SwiftUI14_UIHostingViewGVS_15ModifiedContentVVS_22_VariadicView_Children7ElementGVS_18StyleContextWriterVS_14NoStyleContext___"
)
_TtGC7SwiftUI14_UIHostingViewGVS_15ModifiedContentGS1_VS_7AnyViewGVS_30_EnvironmentKeyWritingModifierGSqGVS_7BindingOS_8EditMode____VS_16_FixedSizeLayout__ = _Class(
"_TtGC7SwiftUI14_UIHostingViewGVS_15ModifiedContentGS1_VS_7AnyViewGVS_30_EnvironmentKeyWritingModifierGSqGVS_7BindingOS_8EditMode____VS_16_FixedSizeLayout__"
)
_TtGC7SwiftUI14_UIHostingViewVS_7AnyView_ = _Class(
"_TtGC7SwiftUI14_UIHostingViewVS_7AnyView_"
)
_TtGC7SwiftUI14_UIHostingViewV4Pyto8PyPiView_ = _Class(
"_TtGC7SwiftUI14_UIHostingViewV4Pyto8PyPiView_"
)
ListCoreHeaderHost = _Class("SwiftUI.ListCoreHeaderHost")
HostingScrollView = _Class("SwiftUI.HostingScrollView")
_TtC7SwiftUIP33_BFB370BA5F1BADDC9D83021565761A4925UpdateCoalescingTableView = _Class(
"_TtC7SwiftUIP33_BFB370BA5F1BADDC9D83021565761A4925UpdateCoalescingTableView"
)
_TtC7SwiftUIP33_8825076C2763A50452A210CBE1FA4AF020PagingCollectionView = _Class(
"_TtC7SwiftUIP33_8825076C2763A50452A210CBE1FA4AF020PagingCollectionView"
)
ListCoreCellHost = _Class("SwiftUI.ListCoreCellHost")
_TtC7SwiftUIP33_8825076C2763A50452A210CBE1FA4AF015UIKitPagingCell = _Class(
"_TtC7SwiftUIP33_8825076C2763A50452A210CBE1FA4AF015UIKitPagingCell"
)
SwiftUIToolbar = _Class("SwiftUI.SwiftUIToolbar")
_TtCV7SwiftUIP33_D74FE142C3C5A6C2CEA4987A69AEBD7522SystemSegmentedControl18UISegmentedControl = _Class(
"_TtCV7SwiftUIP33_D74FE142C3C5A6C2CEA4987A69AEBD7522SystemSegmentedControl18UISegmentedControl"
)
SwiftUITextField = _Class("SwiftUI.SwiftUITextField")
_TtGC7SwiftUI19UIHostingControllerGVS_15ModifiedContentVVS_22_VariadicView_Children7ElementGVS_18StyleContextWriterVS_19SidebarStyleContext___ = _Class(
"_TtGC7SwiftUI19UIHostingControllerGVS_15ModifiedContentVVS_22_VariadicView_Children7ElementGVS_18StyleContextWriterVS_19SidebarStyleContext___"
)
_TtGC7SwiftUI19UIHostingControllerGVS_15ModifiedContentVVS_22_VariadicView_Children7ElementGVS_18StyleContextWriterVS_14NoStyleContext___ = _Class(
"_TtGC7SwiftUI19UIHostingControllerGVS_15ModifiedContentVVS_22_VariadicView_Children7ElementGVS_18StyleContextWriterVS_14NoStyleContext___"
)
_TtGC7SwiftUI19UIHostingControllerVS_7AnyView_ = _Class(
"_TtGC7SwiftUI19UIHostingControllerVS_7AnyView_"
)
_TtGC7SwiftUI19UIHostingControllerV4Pyto8PyPiView_ = _Class(
"_TtGC7SwiftUI19UIHostingControllerV4Pyto8PyPiView_"
)
DocumentBrowserViewController = _Class("SwiftUI.DocumentBrowserViewController")
NotificationSendingSplitViewController = _Class(
"SwiftUI.NotificationSendingSplitViewController"
)
NotifyingMulticolumnSplitViewController = _Class(
"SwiftUI.NotifyingMulticolumnSplitViewController"
)
SplitViewNavigationController = _Class("SwiftUI.SplitViewNavigationController")
_TtGC7SwiftUI41StyleContextSplitViewNavigationControllerVS_19SidebarStyleContext_ = _Class(
"_TtGC7SwiftUI41StyleContextSplitViewNavigationControllerVS_19SidebarStyleContext_"
)
_TtGC7SwiftUI41StyleContextSplitViewNavigationControllerVS_14NoStyleContext_ = _Class(
"_TtGC7SwiftUI41StyleContextSplitViewNavigationControllerVS_14NoStyleContext_"
)
PlatformAlertController = _Class("SwiftUI.PlatformAlertController")
|
import copy
import string
assignments = []
### Setup
board_rows = string.ascii_uppercase[0:9]
board_cols = string.digits[1:10]
subboard_rows = [board_rows[0:3], board_rows[3:6],board_rows[6:9]]
subboard_cols = [board_cols[0:3], board_cols[3:6],board_cols[6:9]]
subboards = [[r+c for r in sub_row for c in sub_col] for sub_row in subboard_rows \
for sub_col in subboard_cols]
diagonals = [[board_rows[i] + board_cols[i] for i in range(9)], [board_rows[i] + \
board_cols[8-i] for i in range(9)]]
peer_groups = {}
#Creates a list of list of rows
rows = [ [board_row+board_col for board_col in board_cols] for board_row in board_rows]
#Creates a list of list of column
cols = [[board_row+board_col for board_row in board_rows] for board_col in board_cols]
def strIntersection(s1, s2):
out = ""
for c in s1:
if c in s2 and not c in out:
out += c
#print (''.join([c for c in s1 if (c in s2 and not c in out)]), out)
#return ''.join([c for c in s1 if c in s2 and not c in out])
return out
def get_all_peer_grp(values):
"""
Generate a dictionary of peer groups.
Args:
values(dict): a dictionary of the form {'box_name': '123456789', ...}
Returns:
a dictionary of the form {'A1': ['A2','A3',..], ...}
"""
#One time function to generate fast look up for peer groups later
pd = {}
for value in values:
pd[value] = get_peer_grp(value)
return pd
def get_peer_grp(value):
"""
Given a position this function returns a list of position in its peer group.
Args:
value(str): a string of the form 'A1'
Returns:
the positions list with the peers.
"""
peer_grp = []
# add member of target box row to peer group list
for row in rows:
if value in row:
peer_grp += row
# add member of target box column to peer group list
for col in cols:
if value in col:
peer_grp += col
# add member of target box subboard to peer group list
for subboard in subboards:
if value in subboard:
peer_grp += subboard
# add member of target box diagonal if applicable to peer group list
for diagonal in diagonals:
if value in diagonal:
peer_grp += diagonal
return peer_grp
### Main Logic
def assign_value(values, box, value):
"""
Please use this function to update your values dictionary!
Assigns a value to a given box. If it updates the board record it.
"""
values[box] = value
if len(value) == 1:
assignments.append(values.copy())
return values
def naked_twins(values):
"""Eliminate values using the naked twins strategy.
Args:
values(dict): a dictionary of the form {'box_name': '123456789', ...}
Returns:
the values dictionary with the naked twins eliminated from peers.
"""
global peer_groups
if peer_groups == {}:
peer_groups = get_all_peer_grp(values)
for value in values:
if (len(values[value]) == 2):
peer_group = peer_groups[value]
for peer in peer_group:
if ( peer != value ) and ( values[peer] == values[value] ):
for cell_to_replace in list(set(peer_groups[value]).intersection(set(peer_groups[peer]))):
if ( cell_to_replace != peer and cell_to_replace != value ) and \
(values[peer][0] in values[cell_to_replace] or values[peer][1] in values[cell_to_replace]) and \
len(values[cell_to_replace]) > 2:
assign_value(values, cell_to_replace, values[cell_to_replace].replace(values[peer][0],''))
assign_value(values, cell_to_replace, values[cell_to_replace].replace(values[peer][1],''))
return values
def cross(A, B):
"Cross product of elements in A and elements in B."
return [b+a for b in B for a in A]
def grid_values(grid):
"""
Convert grid into a dict of {square: char} with '123456789' for empties.
Args:
grid(string) - A grid in string form.
Returns:
A grid in dictionary form
Keys: The boxes, e.g., 'A1'
Values: The value in each box, e.g., '8'. If the box has no value, then the value will be '123456789'.
"""
grid_dict = {}
for n, box in enumerate(cross(board_cols, board_rows)):
entry = '123456789' if grid[n] == '.' else grid[n]
grid_dict[box] = entry
global peer_groups
peer_groups = get_all_peer_grp(grid_dict)
return grid_dict
def display(values):
"""
Display the values as a 2-D grid.
Args:
values(dict): The sudoku in dictionary form
"""
width = 1+max(len(values[s]) for s in cross(board_cols, board_rows))
line = '+'.join(['-'*(width*3)]*3)
for r in board_rows:
print(''.join(values[r+c].center(width)+('|' if c in '36' else '')
for c in board_cols))
if r in 'CF': print(line)
return
def eliminate(values):
"""
Eliminate values that are not possibile.
Args:
values(dict): a dictionary of the form {'box_name': '123456789', ...}
Returns:
the values dictionary with the eliminated values from peers.
"""
for value in values:
# Removes potential answers from a target box that are valid solutions for peers
seen_buffer = ''
value_buffer = ''
if len(values[value]) > 1:
# add to targets seen buffer validated solutions for peers
for cell in peer_groups[value]:
if len(values[cell]) == 1 and cell != value:
seen_buffer += values[cell]
# add to targets value buffer potential solution that are not validated solutions for peers
for i in board_cols:
if i not in seen_buffer:
value_buffer += i
#Assign value buffer box if the value buffer is not empty if it is empty there is no change
assign_value(values, value, strIntersection(value_buffer, values[value]))
# Removes single-valued box values from peers
if len(values[value]) == 1:
for peer in peer_groups[value]:
if peer != value:
entry = values[peer].replace(values[value], '')
assign_value(values, peer, entry)
return values
def only_choice(values):
"""
Chooses values using the only choice strategy.
Args:
values(dict): a dictionary of the form {'box_name': '123456789', ...}
Returns:
the values dictionary with the only choice chosen from among peers.
"""
for value in values:
seen_buffer = ''
peer_grp = peer_groups[value]
#Creates a set of seen values from target box peer group
for i in peer_grp:
if i != value:
seen_buffer += values[i]
seen_buffer = ''.join(list(set(list(seen_buffer))))
#If there is a number not in seen buffer we make it the new value of the target box
new_value = ''
for i in values[value]:
if i not in seen_buffer:
new_value += i
#Assign new value to box if the value if there is new value is non-empty
new_value = values[value] if new_value == '' else new_value
if new_value != values[value]:
values = assign_value(values, value, new_value)
return values
def reduce_puzzle(values):
"""
Args:
values(dict): a dictionary of the form {'box_name': '123456789', ...}
Reduces puzzle and determine if puzzle is stalled.
"""
solved_values = [box for box in values.keys() if len(values[box]) == 1]
stalled = False
while not stalled:
solved_values_before = len([box for box in values.keys() if len(values[box]) == 1])
values = eliminate(values)
values = only_choice(values)
values = naked_twins(values)
solved_values_after = len([box for box in values.keys() if len(values[box]) == 1])
stalled = solved_values_before == solved_values_after
if len([box for box in values.keys() if len(values[box]) == 0]):
return False
return values
def search(values):
"""
Use DFS to check different possibilities for the puzzles.
Args:
values(dict): a dictionary of the form {'box_name': '123456789', ...}
Returns:
the values dictionary with the naked twins eliminated from peers.
"""
values = reduce_puzzle(values)
if values is False:
return values ## Failed earlier
if all(len(values[s]) == 1 for s in cross(board_cols, board_rows)):
return values ## Solved!
# Chose one of the unfilled square s with the fewest possibilities
n,s = min((len(values[s]), s) for s in cross(board_cols, board_rows) if len(values[s]) > 1)
# Now use recurrence to solve each one of the resulting sudokus, and
for value in values[s]:
new_sudoku = values.copy()
new_sudoku[s] = value
attempt = search(new_sudoku)
if attempt:
return attempt
def solve(grid):
"""
Find the solution to a Sudoku grid.
Args:
grid(string): a string representing a sudoku grid.
Example: '2.............62....1....7...6..8...3...9...7...6..4...4....8....52.............3'
Returns:
The dictionary representation of the final sudoku grid. False if no solution exists.
"""
sudoku_board = grid_values(grid)
sudoku_board = search(sudoku_board)
return sudoku_board
if __name__ == '__main__':
diag_sudoku_grid = '2.............62....1....7...6..8...3...9...7...6..4...4....8....52.............3'
display(solve(diag_sudoku_grid))
try:
from visualize import visualize_assignments
visualize_assignments(assignments)
except:
print('We could not visualize your board due to a pygame issue. Not a problem! It is not a requirement.')
|
# Generated by Django 3.0.4 on 2020-07-14 20:21
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('todo', '0009_nancy_scheduled'),
]
operations = [
migrations.DeleteModel(
name='Todo',
),
]
|
# Generated by Django 2.1.2 on 2019-01-07 18:54
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('projects', '0048_dialouge_author'),
]
operations = [
migrations.RemoveField(
model_name='dialouge',
name='member',
),
]
|
# Author: Matthew Wicker
# Companion Code for paper: Analysis of 3D Deep Learning in an Adversarial Setting
# CVPR 2019
"""
This file impliments the PointNet model (Qi et. al. 2017) in keras
It is aware of weights that are saved in the Models directory of this
repository. So if you would like to modify/retrain this model, then
please ensure the weights and architecture are changed accordingly.
"""
import h5py
import numpy as np
import numpy as np
import os
import tensorflow as tf
from keras import optimizers
from keras.layers import Input
from keras.models import Model
from keras.layers import Dense, Flatten, Reshape, Dropout
from keras.layers import Convolution1D, MaxPooling1D, BatchNormalization
from keras.layers import Lambda
from keras.utils import np_utils
from keras import backend as K
import copy
def mat_mul(A, B):
return tf.matmul(A, B)
"""
This function declares the PointNet architecture.
@ Param classes - Integer, defining the number of classes that the
model will be predicting.
@ Param load_weights - Boolean, if classes is set for 10 or 40 we
will load pretrained weights into the model.
"""
def PointNet(classes=40, load_weights=True, num_points =2048):
num_points = num_points
input_points = Input(shape=(num_points, 3))
x = Convolution1D(64, 1, activation='relu',
input_shape=(num_points, 3))(input_points)
x = BatchNormalization()(x)
x = Convolution1D(128, 1, activation='relu')(x)
x = BatchNormalization()(x)
x = Convolution1D(1024, 1, activation='relu')(x)
x = BatchNormalization()(x)
x = MaxPooling1D(pool_size=num_points)(x)
x = Dense(512, activation='relu')(x)
x = BatchNormalization()(x)
x = Dense(256, activation='relu')(x)
x = BatchNormalization()(x)
x = Dense(9, weights=[np.zeros([256, 9]), np.array([1, 0, 0, 0, 1, 0, 0, 0, 1]).astype(np.float32)])(x)
input_T = Reshape((3, 3))(x)
# forward net
g = Lambda(mat_mul, arguments={'B': input_T})(input_points)
g = Convolution1D(64, 1, input_shape=(num_points, 3), activation='relu')(g)
g = BatchNormalization()(g)
g = Convolution1D(64, 1, input_shape=(num_points, 3), activation='relu')(g)
g = BatchNormalization()(g)
# feature transform net
f = Convolution1D(64, 1, activation='relu')(g)
f = BatchNormalization()(f)
f = Convolution1D(128, 1, activation='relu')(f)
f = BatchNormalization()(f)
f = Convolution1D(1024, 1, activation='relu')(f)
f = BatchNormalization()(f)
f = MaxPooling1D(pool_size=num_points)(f)
f = Dense(512, activation='relu')(f)
f = BatchNormalization()(f)
f = Dense(256, activation='relu')(f)
f = BatchNormalization()(f)
f = Dense(64 * 64, weights=[np.zeros([256, 64 * 64]), np.eye(64).flatten().astype(np.float32)])(f)
feature_T = Reshape((64, 64))(f)
# forward net
g = Lambda(mat_mul, arguments={'B': feature_T})(g)
g = Convolution1D(64, 1, activation='relu')(g)
g = BatchNormalization()(g)
g = Convolution1D(128, 1, activation='relu')(g)
g = BatchNormalization()(g)
g = Convolution1D(1024, 1, activation='relu')(g)
g = BatchNormalization()(g)
# global_feature
global_feature = MaxPooling1D(pool_size=num_points)(g)
# point_net_cls
c = Dense(512, activation='relu')(global_feature)
c = BatchNormalization()(c)
c = Dropout(rate=0.7)(c)
c = Dense(256, activation='relu')(c)
c = BatchNormalization()(c)
c = Dropout(rate=0.7)(c)
c = Dense(classes, activation='softmax')(c)
prediction = Flatten()(c)
model = Model(inputs=input_points, outputs=prediction)
model.compile(optimizer='adam',
loss='categorical_crossentropy',
metrics=['accuracy'])
if(classes == 40 and load_weights):
model.load_weights('Models/PointNet-ModelNet40.h5')
if(classes == 10 and load_weights):
model.load_weights('Models/PointNet-ModelNet10.h5')
if(classes == 2 and load_weights):
model.load_weights('Models/PointNet-KITTI.h5')
if(classes == 3 and load_weights):
model.load_weights('Models/PointNet-KITTI3.h5')
print model.summary()
return model
def predict(x_in, model):
x_in = np.squeeze(x_in)
val = model.predict(np.asarray([x_in]))
val = np.squeeze(val)
cl = np.argmax(val)
return val[cl], cl
"""
This method returns the activations of the max pooling layer for
the specified inputs. Expects the following input
@Param - model, the Keras model that is outputted from the PointNet() function
@Param - point_cloud, the point cloud input that we want the maxpooling layer for
"""
def get_max_pool(model, point_cloud):
layer_name = 'max_pooling1d_3'
intermediate_layer_model = Model(inputs=model.input,
outputs=model.get_layer(layer_name).output)
inp = np.asarray(point_cloud)
activations = intermediate_layer_model.predict(inp)
return activations
def get_latent_activations(model, point_cloud):
layer_name = 'batch_normalization_15'
intermediate_layer_model = Model(inputs=model.input,
outputs=model.get_layer(layer_name).output)
value_test = np.asarray([point_cloud])
intermediate_output = intermediate_layer_model.predict(value_test)
return intermediate_output
def get_critical_set(model, point_cloud):
latent = get_latent_activations(model, point_cloud)[0]
critical_set = np.argmax(latent, axis=0)
critical_set = set(critical_set)
return critical_set
def get_critical_set_bb(model, point_cloud):
critical_set = []
values = []
#v_init, c_init = predict(point_cloud, model)
pc = copy.deepcopy(point_cloud)
for i in range(len(pc)):
val = copy.deepcopy(pc[i])
pc[i] = [0.0,0.0,0.0]
v, c = predict(pc, model)
values.append(v)
pc[i] = val
unique = np.unique(values,return_index=True)
return unique[1]
|
from django.contrib import admin
from django.urls import path,include
from django.conf import settings
from django.conf.urls.static import static
from django.contrib.auth import views as auth_views
from .views import home,all_blogs,Blogdetailview,my_profile,pay_foundation,payment,response,change_status,contact_save,pay_mains,pay_prelim
app_name="core"
urlpatterns = [
path('',home,name="home"),
path('blogs/',all_blogs.as_view(),name="all_blogs"),
path('blogs/<int:pk>',Blogdetailview,name='post-detail'),
path('profile/',my_profile,name='profile'),
path('pay_foundation/',pay_foundation,name='pay_foundation'),
path('pay_mains/',pay_mains,name='pay_mains'),
path('pay_prelim/',pay_prelim,name='pay_prelim'),
path('payment/',payment,name='payment'),
path('response/',response,name='response'),
path('change_status/',change_status,name='change_status'),
path('contact_save',contact_save,name='contact_save')
]
|
from lxml import etree
import os
class Spells():
def __init__(self,interface):
self.interface = interface
self.list_spell = {}
def update_spells(self,spells_data):
self.interface.ongletsSorts.removes_spells()
for spell in spells_data[:len(spells_data)-1]:
spell = spell.split("~")
self.get_name(spell[0])
self.interface.ongletsSorts.add_spell(spell[0],self.get_name(spell[0]),spell[1])
def get_name(self,id_):
dir_path = os.path.join(os.path.dirname(os.path.dirname(os.path.abspath(__file__))), "resource/spells.xml" )
#dir_path = "D:/Users/remic/Desktop/MyProjet/Bot_socket/resource/spells.xml"
spell_name = "None"
tree = etree.parse(dir_path)
for spell in tree.xpath("/SPELLS/SPELL"):
if id_ == spell.get("ID"):
spell_name = spell.find("NAME").text
return spell_name
|
from redash import models
from redash.models import db
from redash.permissions import ACCESS_TYPE_MODIFY
from redash.serializers import serialize_query
from tests import BaseTestCase
class TestQueryResourceGet(BaseTestCase):
def test_get_query(self):
query = self.factory.create_query()
rv = self.make_request("get", "/api/queries/{0}".format(query.id))
self.assertEqual(rv.status_code, 200)
expected = serialize_query(query, with_visualizations=True)
expected["can_edit"] = True
expected["is_favorite"] = False
self.assertResponseEqual(expected, rv.json)
def test_get_all_queries(self):
[self.factory.create_query() for _ in range(10)]
rv = self.make_request("get", "/api/queries")
self.assertEqual(rv.status_code, 200)
self.assertEqual(len(rv.json["results"]), 10)
def test_query_without_data_source_should_be_available_only_by_admin(self):
query = self.factory.create_query()
query.data_source = None
db.session.add(query)
rv = self.make_request("get", "/api/queries/{}".format(query.id))
self.assertEqual(rv.status_code, 403)
rv = self.make_request("get", "/api/queries/{}".format(query.id), user=self.factory.create_admin())
self.assertEqual(rv.status_code, 200)
def test_query_only_accessible_to_users_from_its_organization(self):
second_org = self.factory.create_org()
second_org_admin = self.factory.create_admin(org=second_org)
query = self.factory.create_query()
query.data_source = None
db.session.add(query)
rv = self.make_request("get", "/api/queries/{}".format(query.id), user=second_org_admin)
self.assertEqual(rv.status_code, 404)
rv = self.make_request("get", "/api/queries/{}".format(query.id), user=self.factory.create_admin())
self.assertEqual(rv.status_code, 200)
def test_query_search(self):
names = ["Harder", "Better", "Faster", "Stronger"]
for name in names:
self.factory.create_query(name=name)
rv = self.make_request("get", "/api/queries?q=better")
self.assertEqual(rv.status_code, 200)
self.assertEqual(len(rv.json["results"]), 1)
rv = self.make_request("get", "/api/queries?q=better or faster")
self.assertEqual(rv.status_code, 200)
self.assertEqual(len(rv.json["results"]), 2)
# test the old search API and that it redirects to the new one
rv = self.make_request("get", "/api/queries/search?q=stronger")
self.assertEqual(rv.status_code, 301)
self.assertIn("/api/queries?q=stronger", rv.headers["Location"])
rv = self.make_request("get", "/api/queries/search?q=stronger", follow_redirects=True)
self.assertEqual(rv.status_code, 200)
self.assertEqual(len(rv.json["results"]), 1)
class TestQueryResourcePost(BaseTestCase):
def test_update_query(self):
admin = self.factory.create_admin()
query = self.factory.create_query()
new_ds = self.factory.create_data_source()
new_qr = self.factory.create_query_result()
data = {
"name": "Testing",
"query": "select 2",
"latest_query_data_id": new_qr.id,
"data_source_id": new_ds.id,
}
rv = self.make_request("post", "/api/queries/{0}".format(query.id), data=data, user=admin)
self.assertEqual(rv.status_code, 200)
self.assertEqual(rv.json["name"], data["name"])
self.assertEqual(rv.json["last_modified_by"]["id"], admin.id)
self.assertEqual(rv.json["query"], data["query"])
self.assertEqual(rv.json["data_source_id"], data["data_source_id"])
self.assertEqual(rv.json["latest_query_data_id"], data["latest_query_data_id"])
def test_raises_error_in_case_of_conflict(self):
q = self.factory.create_query()
q.name = "Another Name"
db.session.add(q)
rv = self.make_request(
"post",
"/api/queries/{0}".format(q.id),
data={"name": "Testing", "version": q.version - 1},
user=self.factory.user,
)
self.assertEqual(rv.status_code, 409)
def test_prevents_association_with_view_only_data_sources(self):
view_only_data_source = self.factory.create_data_source(view_only=True)
my_data_source = self.factory.create_data_source()
my_query = self.factory.create_query(data_source=my_data_source)
db.session.add(my_query)
rv = self.make_request(
"post",
"/api/queries/{0}".format(my_query.id),
data={"data_source_id": view_only_data_source.id},
user=self.factory.user,
)
self.assertEqual(rv.status_code, 403)
def test_allows_association_with_authorized_dropdown_queries(self):
data_source = self.factory.create_data_source(group=self.factory.default_group)
other_query = self.factory.create_query(data_source=data_source)
db.session.add(other_query)
my_query = self.factory.create_query(data_source=data_source)
db.session.add(my_query)
options = {
"parameters": [
{"name": "foo", "type": "query", "queryId": other_query.id},
{"name": "bar", "type": "query", "queryId": other_query.id},
]
}
rv = self.make_request(
"post",
"/api/queries/{0}".format(my_query.id),
data={"options": options},
user=self.factory.user,
)
self.assertEqual(rv.status_code, 200)
def test_prevents_association_with_unauthorized_dropdown_queries(self):
other_data_source = self.factory.create_data_source(group=self.factory.create_group())
other_query = self.factory.create_query(data_source=other_data_source)
db.session.add(other_query)
my_data_source = self.factory.create_data_source(group=self.factory.create_group())
my_query = self.factory.create_query(data_source=my_data_source)
db.session.add(my_query)
options = {"parameters": [{"type": "query", "queryId": other_query.id}]}
rv = self.make_request(
"post",
"/api/queries/{0}".format(my_query.id),
data={"options": options},
user=self.factory.user,
)
self.assertEqual(rv.status_code, 403)
def test_prevents_association_with_non_existing_dropdown_queries(self):
my_data_source = self.factory.create_data_source(group=self.factory.create_group())
my_query = self.factory.create_query(data_source=my_data_source)
db.session.add(my_query)
options = {"parameters": [{"type": "query", "queryId": 100000}]}
rv = self.make_request(
"post",
"/api/queries/{0}".format(my_query.id),
data={"options": options},
user=self.factory.user,
)
self.assertEqual(rv.status_code, 400)
def test_overrides_existing_if_no_version_specified(self):
q = self.factory.create_query()
q.name = "Another Name"
db.session.add(q)
rv = self.make_request(
"post",
"/api/queries/{0}".format(q.id),
data={"name": "Testing"},
user=self.factory.user,
)
self.assertEqual(rv.status_code, 200)
def test_works_for_non_owner_with_permission(self):
query = self.factory.create_query()
user = self.factory.create_user()
rv = self.make_request(
"post",
"/api/queries/{0}".format(query.id),
data={"name": "Testing"},
user=user,
)
self.assertEqual(rv.status_code, 403)
models.AccessPermission.grant(obj=query, access_type=ACCESS_TYPE_MODIFY, grantee=user, grantor=query.user)
rv = self.make_request(
"post",
"/api/queries/{0}".format(query.id),
data={"name": "Testing"},
user=user,
)
self.assertEqual(rv.status_code, 200)
self.assertEqual(rv.json["name"], "Testing")
self.assertEqual(rv.json["last_modified_by"]["id"], user.id)
class TestQueryListResourceGet(BaseTestCase):
def test_returns_queries(self):
q1 = self.factory.create_query()
q2 = self.factory.create_query()
q3 = self.factory.create_query()
rv = self.make_request("get", "/api/queries")
assert len(rv.json["results"]) == 3
assert set([result["id"] for result in rv.json["results"]]) == {q1.id, q2.id, q3.id}
def test_filters_with_tags(self):
q1 = self.factory.create_query(tags=["test"])
self.factory.create_query()
self.factory.create_query()
rv = self.make_request("get", "/api/queries?tags=test")
assert len(rv.json["results"]) == 1
assert set([result["id"] for result in rv.json["results"]]) == {q1.id}
def test_search_term(self):
q1 = self.factory.create_query(name="Sales")
q2 = self.factory.create_query(name="Q1 sales")
self.factory.create_query(name="Ops")
rv = self.make_request("get", "/api/queries?q=sales")
assert len(rv.json["results"]) == 2
assert set([result["id"] for result in rv.json["results"]]) == {q1.id, q2.id}
class TestQueryListResourcePost(BaseTestCase):
def test_create_query(self):
query_data = {
"name": "Testing",
"query": "SELECT 1",
"schedule": {"interval": "3600"},
"data_source_id": self.factory.data_source.id,
}
rv = self.make_request("post", "/api/queries", data=query_data)
self.assertEqual(rv.status_code, 200)
self.assertLessEqual(query_data.items(), rv.json.items())
self.assertEqual(rv.json["user"]["id"], self.factory.user.id)
self.assertIsNotNone(rv.json["api_key"])
self.assertIsNotNone(rv.json["query_hash"])
query = models.Query.query.get(rv.json["id"])
self.assertEqual(len(list(query.visualizations)), 1)
self.assertTrue(query.is_draft)
def test_allows_association_with_authorized_dropdown_queries(self):
data_source = self.factory.create_data_source(group=self.factory.default_group)
other_query = self.factory.create_query(data_source=data_source)
db.session.add(other_query)
query_data = {
"name": "Testing",
"query": "SELECT 1",
"schedule": {"interval": "3600"},
"data_source_id": self.factory.data_source.id,
"options": {
"parameters": [
{"name": "foo", "type": "query", "queryId": other_query.id},
{"name": "bar", "type": "query", "queryId": other_query.id},
]
},
}
rv = self.make_request("post", "/api/queries", data=query_data)
self.assertEqual(rv.status_code, 200)
def test_prevents_association_with_unauthorized_dropdown_queries(self):
other_data_source = self.factory.create_data_source(group=self.factory.create_group())
other_query = self.factory.create_query(data_source=other_data_source)
db.session.add(other_query)
my_data_source = self.factory.create_data_source(group=self.factory.create_group())
query_data = {
"name": "Testing",
"query": "SELECT 1",
"schedule": {"interval": "3600"},
"data_source_id": my_data_source.id,
"options": {"parameters": [{"type": "query", "queryId": other_query.id}]},
}
rv = self.make_request("post", "/api/queries", data=query_data)
self.assertEqual(rv.status_code, 403)
def test_prevents_association_with_non_existing_dropdown_queries(self):
query_data = {
"name": "Testing",
"query": "SELECT 1",
"schedule": {"interval": "3600"},
"data_source_id": self.factory.data_source.id,
"options": {"parameters": [{"type": "query", "queryId": 100000}]},
}
rv = self.make_request("post", "/api/queries", data=query_data)
self.assertEqual(rv.status_code, 400)
class TestQueryArchiveResourceGet(BaseTestCase):
def test_returns_queries(self):
q1 = self.factory.create_query(is_archived=True)
q2 = self.factory.create_query(is_archived=True)
self.factory.create_query()
rv = self.make_request("get", "/api/queries/archive")
assert len(rv.json["results"]) == 2
assert set([result["id"] for result in rv.json["results"]]) == {q1.id, q2.id}
def test_search_term(self):
q1 = self.factory.create_query(name="Sales", is_archived=True)
q2 = self.factory.create_query(name="Q1 sales", is_archived=True)
self.factory.create_query(name="Q2 sales")
rv = self.make_request("get", "/api/queries/archive?q=sales")
assert len(rv.json["results"]) == 2
assert set([result["id"] for result in rv.json["results"]]) == {q1.id, q2.id}
class QueryRefreshTest(BaseTestCase):
def setUp(self):
super(QueryRefreshTest, self).setUp()
self.query = self.factory.create_query()
self.path = "/api/queries/{}/refresh".format(self.query.id)
def test_refresh_regular_query(self):
response = self.make_request("post", self.path)
self.assertEqual(200, response.status_code)
def test_refresh_of_query_with_parameters(self):
self.query.query_text = "SELECT {{param}}"
db.session.add(self.query)
response = self.make_request("post", "{}?p_param=1".format(self.path))
self.assertEqual(200, response.status_code)
def test_refresh_of_query_with_parameters_without_parameters(self):
self.query.query_text = "SELECT {{param}}"
db.session.add(self.query)
response = self.make_request("post", "{}".format(self.path))
self.assertEqual(400, response.status_code)
def test_refresh_query_you_dont_have_access_to(self):
group = self.factory.create_group()
db.session.add(group)
db.session.commit()
user = self.factory.create_user(group_ids=[group.id])
response = self.make_request("post", self.path, user=user)
self.assertEqual(403, response.status_code)
def test_refresh_forbiden_with_query_api_key(self):
response = self.make_request("post", "{}?api_key={}".format(self.path, self.query.api_key), user=False)
self.assertEqual(403, response.status_code)
response = self.make_request(
"post",
"{}?api_key={}".format(self.path, self.factory.user.api_key),
user=False,
)
self.assertEqual(200, response.status_code)
class TestQueryRegenerateApiKey(BaseTestCase):
def test_non_admin_cannot_regenerate_api_key_of_other_user(self):
query_creator = self.factory.create_user()
query = self.factory.create_query(user=query_creator)
other_user = self.factory.create_user()
orig_api_key = query.api_key
rv = self.make_request(
"post",
"/api/queries/{}/regenerate_api_key".format(query.id),
user=other_user,
)
self.assertEqual(rv.status_code, 403)
reloaded_query = models.Query.query.get(query.id)
self.assertEqual(orig_api_key, reloaded_query.api_key)
def test_admin_can_regenerate_api_key_of_other_user(self):
query_creator = self.factory.create_user()
query = self.factory.create_query(user=query_creator)
admin_user = self.factory.create_admin()
orig_api_key = query.api_key
rv = self.make_request(
"post",
"/api/queries/{}/regenerate_api_key".format(query.id),
user=admin_user,
)
self.assertEqual(rv.status_code, 200)
reloaded_query = models.Query.query.get(query.id)
self.assertNotEqual(orig_api_key, reloaded_query.api_key)
def test_admin_can_regenerate_api_key_of_myself(self):
query_creator = self.factory.create_user()
admin_user = self.factory.create_admin()
query = self.factory.create_query(user=query_creator)
orig_api_key = query.api_key
rv = self.make_request(
"post",
"/api/queries/{}/regenerate_api_key".format(query.id),
user=admin_user,
)
self.assertEqual(rv.status_code, 200)
updated_query = models.Query.query.get(query.id)
self.assertNotEqual(orig_api_key, updated_query.api_key)
def test_user_can_regenerate_api_key_of_myself(self):
user = self.factory.create_user()
query = self.factory.create_query(user=user)
orig_api_key = query.api_key
rv = self.make_request("post", "/api/queries/{}/regenerate_api_key".format(query.id), user=user)
self.assertEqual(rv.status_code, 200)
updated_query = models.Query.query.get(query.id)
self.assertNotEqual(orig_api_key, updated_query.api_key)
class TestQueryForkResourcePost(BaseTestCase):
def test_forks_a_query(self):
ds = self.factory.create_data_source(group=self.factory.org.default_group, view_only=False)
query = self.factory.create_query(data_source=ds)
rv = self.make_request("post", "/api/queries/{}/fork".format(query.id))
self.assertEqual(rv.status_code, 200)
def test_must_have_full_access_to_data_source(self):
ds = self.factory.create_data_source(group=self.factory.org.default_group, view_only=True)
query = self.factory.create_query(data_source=ds)
rv = self.make_request("post", "/api/queries/{}/fork".format(query.id))
self.assertEqual(rv.status_code, 403)
class TestFormatSQLQueryAPI(BaseTestCase):
def test_format_sql_query(self):
admin = self.factory.create_admin()
query = "select a,b,c FROM foobar Where x=1 and y=2;"
expected = """SELECT a,
b,
c
FROM foobar
WHERE x=1
AND y=2;"""
rv = self.make_request("post", "/api/queries/format", user=admin, data={"query": query})
self.assertEqual(rv.json["query"], expected)
|
import script_context
import os
import h5py
import matplotlib.pyplot as plt
import numpy as np
from Stonks.Analytics import Analytics
import time as tm
import importlib
importlib.reload(Analytics)
def instrument_price(sell_price, buy_price, base_price=6, delta=.5):
delta_price = -(sell_price - buy_price) * delta + base_price
return delta_price
def SMA_strat(time, sma, sma_d, candle, candle_high, candle_low, stop_loss=.8, profit=1.2):
put_thresholds = {'buy': 5, 'stop_loss': stop_loss, 'profit': profit}
put_buy_locs = []
put_buy_price = []
put_buy_option_price = []
put_sell_locs = []
put_sell_price = []
put_sell_option_price = []
open_put_position = False
put_price = 0
max_put_price = 0
for i in np.arange(sma.shape[0]):
gm_time = tm.gmtime(time[i] * 1e-3)
if (gm_time[3] - 4 > 9) and (gm_time[3] - 4 < 16):
delta = True
# if sma_d[i] < 0.0 and sma_d[i] > -0.03 and delta and not open_put_position: # open put options
if sma_d[i] < 0.0 and delta and not open_put_position: # open put options
put_buy_locs.append(i)
put_price = instrument_price(candle[i], candle[i], base_price=3, delta=.5)
# print(put_price)
max_put_price = put_price
# print(put_price)
put_buy_option_price.append(put_price)
put_buy_price.append(candle[i])
open_put_position = True
if open_put_position:
put_price = instrument_price(candle[i], put_buy_price[-1], base_price=3, delta=.5)
# print(put_price)
if put_price >= max_put_price:
max_put_price = put_price
# print(put_price)
if (put_price < put_thresholds['stop_loss'] * max_put_price
and put_price <= put_buy_option_price[-1]) \
or \
(put_price > put_thresholds['profit'] * max_put_price
and put_price > put_buy_option_price[-1]
and sma_d[i] >= 0.0): # close put options
# print('#############################################')
put_sell_locs.append(i)
put_sell_price.append(candle[i])
put_sell_option_price.append(put_price)
# print(put_price)
open_put_position = False
if (gm_time[3] - 4 == 16) and open_put_position:
put_sell_locs.append(i)
put_sell_price.append(candle[i])
put_sell_option_price.append(put_price)
open_put_position = False
return [np.array(put_buy_locs), np.array(put_buy_price), np.array(put_buy_option_price),
np.array(put_sell_locs), np.array(put_sell_price), np.array(put_sell_option_price)]
if __name__ == "__main__":
'''File Handling'''
filedirectory = '../StockData/'
filename = 'S&P_500_2020-03-16'
filepath = filedirectory + filename
if os.path.exists(filepath):
datafile = h5py.File(filepath)
else:
print('Data file does not exist!')
# group_choice = np.random.choice(list(datafile.keys()))
group_choice = 'SPY'
time = datafile[group_choice]['datetime'][...]
data_open = datafile[group_choice]['open'][...]
data_high = datafile[group_choice]['high'][...]
data_low = datafile[group_choice]['low'][...]
datafile.close()
data = Analytics.candle_avg(open=data_open, high=data_high, low=data_low)
candle_low_bollinger, candle_high_bollinger = Analytics.candle_bollinger_bands(open=data_open,
high=data_high,
low=data_low,
average=data,
period=30)
period = 60
sma = Analytics.moving_average(data=data, period=period)
# sma = Analytics.exp_moving_average(data=data, alpha=.1, period=30)
sma_low_bollinger, sma_high_bollinger = Analytics.bollinger_bands(data=data, average=sma)
sma_d = Analytics.derivative(data, period=period)
sma_dd = Analytics.second_derivative(data, period=period)
results_list = SMA_strat(time=time,
sma=sma,
sma_d=sma_d,
candle=data,
candle_high=candle_low_bollinger,
candle_low=candle_high_bollinger,
stop_loss=.8,
profit=1.2)
put_buy_locs = results_list[0]
put_buy_price = results_list[1]
put_buy_option_price = results_list[2]
put_sell_locs = results_list[3]
put_sell_price = results_list[4]
put_sell_option_price = results_list[5]
'''
plt.figure(figsize=(20, 10))
plt.suptitle('second derivative SMA movement')
# plt.hist((sma[:-1] - sma[1:]) / (sma_high_bollinger[1:] - sma_low_bollinger[1:]), bins=100)
plt.hist((sma[0:-1:10][:-2] - 2 * sma[10:-1:10][:-1] + sma[20:-1:10]) / 2., bins=100)
plt.figure(figsize=(20, 10))
plt.suptitle('derivative SMA movement')
plt.hist((sma[:-1] - sma[1:]) / (sma_high_bollinger[1:] - sma_low_bollinger[1:]), bins=100)
plt.figure(figsize=(20, 10))
plt.suptitle('Bollinger Band normalized SMA movement')
plt.plot((sma[:-1] - sma[1:]) / (sma_high_bollinger[1:] - sma_low_bollinger[1:]))
'''
print('number of put purchases: {}'.format(put_buy_option_price.shape[0]))
put_profits = (put_buy_option_price - put_sell_option_price)
print('put_profits: {}'.format(np.sum(put_profits)))
put_percent = (put_buy_option_price - put_sell_option_price) / put_buy_option_price
print('put_percent: {}'.format(np.sum(put_percent) / put_percent.shape[0]))
plt.figure(figsize=(20, 10))
plt.hist(put_profits, bins=100)
plt.figure(figsize=(20, 10))
plt.plot(put_profits)
focus_top = time.shape[0] - 60 * 48
focus_bot = time.shape[0] + 1
focus_top = 0
focus_bot = time.shape[0] + 1
#################################################################################
plt.figure(figsize=(20, 10))
plt.suptitle('profitable trades')
plt.plot(time[focus_top:focus_bot], data[focus_top:focus_bot], '.')
plt.plot(time[focus_top:focus_bot], sma[focus_top:focus_bot])
plt.plot(time[focus_top:focus_bot], sma_low_bollinger[focus_top:focus_bot])
plt.plot(time[focus_top:focus_bot], sma_high_bollinger[focus_top:focus_bot])
plt.plot(time[focus_top:focus_bot], candle_low_bollinger[focus_top:focus_bot])
plt.plot(time[focus_top:focus_bot], candle_high_bollinger[focus_top:focus_bot])
profit_put_buy_locs = put_buy_locs[put_profits > 0]
put_cut = profit_put_buy_locs[profit_put_buy_locs > focus_top]
plt.plot(time[put_cut], data[put_cut], '>', color='r')
# plt.plot(put_cut - focus_top, sma[put_cut], '>', color='r')
sma_d_buy = sma_dd[put_cut]
profit_put_sell_locs = put_sell_locs[put_profits > 0]
put_cut = profit_put_sell_locs[profit_put_sell_locs > focus_top]
plt.plot(time[put_cut], data[put_cut], '<', color='g')
# plt.plot(put_cut - focus_top, sma[put_cut], '<', color='g')
plt.figure(figsize=(20, 10))
plt.plot(time[put_cut], sma_d_buy, '.')
#################################################################################
plt.figure(figsize=(20, 10))
plt.suptitle('loss trades')
plt.plot(time[focus_top:focus_bot], data[focus_top:focus_bot], '.')
plt.plot(time[focus_top:focus_bot], sma[focus_top:focus_bot])
plt.plot(time[focus_top:focus_bot], sma_low_bollinger[focus_top:focus_bot])
plt.plot(time[focus_top:focus_bot], sma_high_bollinger[focus_top:focus_bot])
plt.plot(time[focus_top:focus_bot], candle_low_bollinger[focus_top:focus_bot])
plt.plot(time[focus_top:focus_bot], candle_high_bollinger[focus_top:focus_bot])
loss_put_buy_locs = put_buy_locs[put_profits < 0]
put_cut = loss_put_buy_locs[loss_put_buy_locs > focus_top]
plt.plot(time[put_cut], data[put_cut], '>', color='r')
# plt.plot(put_cut - focus_top, sma[put_cut], '>', color='r')
sma_d_buy = sma_dd[put_cut]
loss_put_sell_locs = put_sell_locs[put_profits < 0]
put_cut = loss_put_sell_locs[loss_put_sell_locs > focus_top]
plt.plot(time[put_cut], data[put_cut], '<', color='g')
# plt.plot(put_cut - focus_top, sma[put_cut], '<', color='g')
plt.figure(figsize=(20, 10))
plt.plot(time[put_cut], sma_d_buy, '.')
'''
focus_top = 3000
focus_bot = 35000
plt.figure(figsize=(20, 10))
plt.suptitle(group_choice + ' ' + 'open sma')
plt.plot(sma[focus_top:focus_bot])
plt.plot(sma_low_bollinger[focus_top:focus_bot])
plt.plot(sma_high_bollinger[focus_top:focus_bot])
peak_cut = local_minimums_loc[local_minimums_loc > focus_top]
plt.plot(peak_cut - focus_top, sma[peak_cut], '.', color='k')
#peak_cut = local_maximums_loc[local_maximums_loc > focus_top]
#plt.plot(peak_cut - focus_top, sma[peak_cut], '.', color='b')
'''
|
# coding: utf-8
# In[1]:
import pandas as pd
import numpy as np
import sqlite3
# In[2]:
# Create your connection.
cnx = sqlite3.connect('mortgage.db')
loan_df_new = pd.read_sql_query("SELECT * FROM loan_data", cnx)
# In[3]:
# Droppping the additional index column
loan_df_new = loan_df_new.drop('index', axis=1)
loan_df_new.head(5)
# In[4]:
loan_df_new.shape
# In[5]:
#dropping 7 columns which have same value for all rows.
#Current total should be 44 columns
loan_df_new = loan_df_new.drop(['state_name','state_abbr','state_code','respondent_id','owner_occupancy_name',
'lien_status_name','agency_abbr'],1)
# In[6]:
loan_df_new.shape
# In[8]:
# Checking datatypes to convert all categorical to numerical
loan_df_new.dtypes
# In[9]:
# Converting categorical columns to numerical with one-hot encoding technique (15 columns in total)
unique_agency = loan_df_new['agency_name'].value_counts()
print(unique_agency)
agency_dummy = pd.get_dummies(loan_df_new['agency_name'],prefix = 'agency')
loan_df_new = pd.concat([loan_df_new,agency_dummy],axis=1)
loan_df_new.shape
# In[10]:
unique_ethnicity = loan_df_new['applicant_ethnicity_name'].value_counts()
print(unique_ethnicity)
ethnicity_dummy = pd.get_dummies(loan_df_new['applicant_ethnicity_name'],prefix = 'ethnicity')
loan_df_new = pd.concat([loan_df_new,ethnicity_dummy],axis=1)
loan_df_new.shape
# In[11]:
unique_race = loan_df_new['applicant_race_name_1'].value_counts()
print(unique_race)
race_dummy = pd.get_dummies(loan_df_new['applicant_race_name_1'],prefix = 'race')
loan_df_new = pd.concat([loan_df_new,race_dummy],axis=1)
loan_df_new.shape
# In[12]:
unique_sex = loan_df_new['applicant_sex_name'].value_counts()
print(unique_sex)
sex_dummy = pd.get_dummies(loan_df_new['applicant_sex_name'],prefix = 'sex')
loan_df_new = pd.concat([loan_df_new,sex_dummy],axis=1)
loan_df_new.shape
# In[13]:
unique_coethnicity = loan_df_new['co_applicant_ethnicity_name'].value_counts()
print(unique_coethnicity)
coethnicity_dummy = pd.get_dummies(loan_df_new['co_applicant_ethnicity_name'],prefix = 'coethnicity')
loan_df_new = pd.concat([loan_df_new,coethnicity_dummy],axis=1)
loan_df_new.shape
# In[14]:
unique_corace = loan_df_new['co_applicant_race_name_1'].value_counts()
print(unique_corace)
corace_dummy = pd.get_dummies(loan_df_new['co_applicant_race_name_1'],prefix = 'corace')
loan_df_new = pd.concat([loan_df_new,corace_dummy],axis=1)
loan_df_new.shape
# In[15]:
unique_cosex = loan_df_new['co_applicant_sex_name'].value_counts()
print(unique_cosex)
cosex_dummy = pd.get_dummies(loan_df_new['co_applicant_sex_name'],prefix = 'cosex')
loan_df_new = pd.concat([loan_df_new,cosex_dummy],axis=1)
loan_df_new.shape
# In[16]:
unique_county = loan_df_new['county_name'].value_counts()
print(unique_county)
county_dummy = pd.get_dummies(loan_df_new['county_name'],prefix = 'county')
loan_df_new = pd.concat([loan_df_new,county_dummy],axis=1)
loan_df_new.shape
# In[17]:
unique_hoepa = loan_df_new['hoepa_status_name'].value_counts()
print(unique_hoepa)
hoepa_dummy = pd.get_dummies(loan_df_new['hoepa_status_name'],prefix = 'hoepa')
loan_df_new = pd.concat([loan_df_new,hoepa_dummy],axis=1)
loan_df_new.shape
# In[18]:
unique_purpose = loan_df_new['loan_purpose_name'].value_counts()
print(unique_purpose)
purpose_dummy = pd.get_dummies(loan_df_new['loan_purpose_name'],prefix = 'purpose')
loan_df_new = pd.concat([loan_df_new,purpose_dummy],axis=1)
loan_df_new.shape
# In[19]:
unique_type = loan_df_new['loan_type_name'].value_counts()
print(unique_type)
type_dummy = pd.get_dummies(loan_df_new['loan_type_name'],prefix = 'type')
loan_df_new = pd.concat([loan_df_new,type_dummy],axis=1)
loan_df_new.shape
# In[20]:
unique_msamd = loan_df_new['msamd_name'].value_counts()
print(unique_msamd)
msamd_dummy = pd.get_dummies(loan_df_new['msamd_name'],prefix = 'msamd')
loan_df_new = pd.concat([loan_df_new,msamd_dummy],axis=1)
loan_df_new.shape
# In[21]:
unique_preapp = loan_df_new['preapproval_name'].value_counts()
print(unique_preapp)
preapp_dummy = pd.get_dummies(loan_df_new['preapproval_name'],prefix = 'preapp')
loan_df_new = pd.concat([loan_df_new,preapp_dummy],axis=1)
loan_df_new.shape
# In[22]:
unique_prop = loan_df_new['property_type_name'].value_counts()
print(unique_prop)
prop_dummy = pd.get_dummies(loan_df_new['property_type_name'],prefix = 'prop')
loan_df_new = pd.concat([loan_df_new,prop_dummy],axis=1)
loan_df_new.shape
# In[23]:
unique_purchase = loan_df_new['purchaser_type_name'].value_counts()
print(unique_purchase)
purchase_dummy = pd.get_dummies(loan_df_new['purchaser_type_name'],prefix = 'purchase')
loan_df_new = pd.concat([loan_df_new,purchase_dummy],axis=1)
loan_df_new.shape
# In[24]:
#drop the original categorical columns
loan_df_fin = loan_df_new.drop(['agency_name','applicant_ethnicity_name','applicant_race_name_1',
'applicant_sex_name','co_applicant_ethnicity_name','co_applicant_race_name_1',
'co_applicant_sex_name','county_name','hoepa_status_name','loan_purpose_name',
'loan_type_name','msamd_name','preapproval_name','property_type_name',
'purchaser_type_name'],1)
loan_df_fin.shape
# In[25]:
#imputing missing values in columns with mean values
null = loan_df_fin['applicant_income_000s'].isnull().sum()
loan_df_fin['applicant_income_000s'] = loan_df_fin['applicant_income_000s'].fillna(loan_df_fin.applicant_income_000s.mean())
# In[26]:
null = loan_df_fin['census_tract_number'].isnull().sum()
loan_df_fin['census_tract_number'] = loan_df_fin['census_tract_number'].fillna(loan_df_fin.census_tract_number.mean())
loan_df_fin = loan_df_fin.dropna()
# In[27]:
#shuffling rows for uniform distrubution
from sklearn.utils import shuffle
loan_df_fin = shuffle(loan_df_fin)
# In[28]:
unique_action = loan_df_new['action_taken_name'].value_counts()
print(unique_action)
# In[29]:
# Removing the last class as it has only one row
loan_df_new = loan_df_new[(loan_df_new[['action_taken_name']] != 'Preapproval request approved but not accepted').all(axis=1)]
# In[30]:
# Creating a new dataframe for target variable
target_df = pd.DataFrame(loan_df_fin['action_taken_name'])
loan_df_fin = loan_df_fin.drop(['action_taken_name'],1)
# In[31]:
loan_df_fin.shape
# In[32]:
# Perform test train split
from sklearn.model_selection import train_test_split
X_train, X_test, Y_train, Y_test = train_test_split(loan_df_fin, target_df, test_size=0.2)
# In[34]:
conn = sqlite3.connect("mortgage.db")
X_train.to_sql("X_train", conn, if_exists="replace")
X_test.to_sql("X_test", conn, if_exists="replace")
Y_train.to_sql("Y_train", conn, if_exists="replace")
Y_test.to_sql("Y_test", conn, if_exists="replace")
|
import nltk
from nltk.corpus import stopwords
s = '''Good muffins cost $3.88\nin New York. Please buy me
... two of them.\n\nThanks.'''
tokens = nltk.wordpunct_tokenize(s)
filtered = [w for w in tokens if not w in set(stopwords.words('english'))]
|
#!/usr/bin/env python
import rospy
import math
import tf
import geometry_msgs.msg
import numpy as np
import json
import sys
from scipy.spatial.transform import Rotation
from hrca_action.utilities import *
import actionlib
from hrca_action.panda_arm import PandaArm
if __name__ == '__main__':
rospy.init_node('object_publisher')
listener = tf.TransformListener()
rot_cluster = []
trans_cluster = []
camera = "realsense"
# wrist_
# while not rospy.is_shutdown():
for i in range(1):
#rot_cluster.append([])
#trans_cluster.append([])
i = 29
for _ in range(10):
try:
listener.waitForTransform('/panda_link0','/ar_marker_' + str(i),rospy.Time(), rospy.Duration(4.0))
(trans,rot) = listener.lookupTransform('camera_color_optical_frame', 'ar_marker_' + str(i), rospy.Time(0))
rot_cluster.append(rot)
trans_cluster.append(trans)
except Exception as e:
print(e)
continue
# rot = Rotation.from_quat(np.array(rot_cluster)).mean()
rot_cluster = np.array(rot_cluster)
aldkfj = Rotation.from_quat(rot_cluster)
print(list(aldkfj.as_quat()))
# trans = np.array(trans_cluster).mean(axis=0)
trans_cluster = np.array(trans_cluster)
print(trans_cluster)
rospy.init_node("panda_arm_client_py")
### test two nodes to planning scene
### from test_panda_arm_action_server.py
panda_arm = PandaArm(simulation=True)
moh = MoveitObjectHandler()
# Add spam
# obj_1_size = (0.101, 0.056, 0.083)
obj_1_size = (0.15, 0.055, 0.205)
obj_1_rotation = (0,0,0)
obj_1_pose = create_pose_stamped(create_pose(-0.00072, 0.34528, 0.1025, -1, 0, 0, 0), "panda_link0") # ritz
# add object to planning scene
moh.add_box_object("obj_1", pose=obj_1_pose, size=obj_1_size, rotation=obj_1_rotation, frame="panda_link0")
|
import unittest
from sources.controller.controller import Controller
class ControllerTests(unittest.TestCase):
def testControllerConstructor(self):
self.controller = Controller()
self.assertEquals(0,0) |
#!/usr/bin/env python
# _*_ coding: utf-8 _*_
import os, sys, traceback, json
from web3.auto import w3
if __name__ == '__main__':
abifile = open(os.path.join(os.path.dirname(__file__), "sol/build/abi.json"), "r")
abi = json.load(abifile)
abifile.close()
key = "0x620b0c04de671567431e962c6d0eadc28b9f25d672d0a036044c5a259c27ad9b"
contract_address = w3.toChecksumAddress("0x70988a12797aff8c063a72bebcaf897175c590c3")
erc721 = w3.eth.contract(address=contract_address, abi=abi)
# tx = erc721.functions.mintTo("0x005Ea2533D25B74BE9F774c79Fa4E0D219912B41").buildTransaction()
# print(tx)
##########################################
transaction = {
"from": "0xD9d73f325BdF1af2C76437b95CE72574D56E3232",
"to": "0x70988a12797AFf8c063a72BebCaf897175C590C3",
"value": 0,
"gas": 200000,
"gasPrice": 10 ** 9,
"nonce": 6,
"chainId": 4,
"data": "0x755edd17000000000000000000000000005ea2533d25b74be9f774c79fa4e0d219912b41"
}
signed = w3.eth.account.sign_transaction(transaction, key)
print(signed.rawTransaction.hex())
print(signed.hash.hex())
|
import os
import random
list_txtpath='/home/zhanwj/Desktop/pyTorch/Detectron.pytorch/lib/datasets/data/cityscapes/label_info_coarse/train.txt'
save_path='/home/zhanwj/Desktop/pyTorch/Detectron.pytorch/lib/datasets/data/cityscapes/annotations/'
coarse_train='coarse_train.txt'
data_path='/home/zhanwj/Desktop/pyTorch/Detectron.pytorch/lib/datasets/data/cityscapes/'
fine_train ="train.txt"
coarse_fine_mixed="coarse_fine_mixed"
coarse_lines=[]
coarse_fine_mixed_list=[]
def readlines(filename):
temp=[]
with open(filename,'r') as f:
for lines in f.readlines():
temp.append(lines)
return temp
def writelines(filename,list_to_wirte):
with open(filename,'w') as f:
for i in range(len(list_to_wirte)):
f.write(list_to_wirte[i])
def check_file(file_path):
coarse=readlines(file_path)
fail=0
for i in range(len(coarse)):
left,right,sem,_=coarse[i].split()
if not os.path.isfile(os.path.join(data_path,left)):
print("file does not exist:{}".format(left))
fail=fail+1
if not os.path.isfile(os.path.join(data_path,sem)):
print("file does not exist:{}".format(sem))
fail=fail+1
print("Number of samples had been processed:{}".format(i))
print("File not exist:",fail)
with open(list_txtpath,'r') as f:
for lines in f.readlines():
line=lines.split()
left=line[0][75:]
right='right'
sem=line[1][75:]
disparity='disparity'
coarse_line=left+' '+right+' '+sem+' '+disparity+'\n'
coarse_lines.append(coarse_line)
print("Number of samples had been processed:{}".format(len(coarse_lines)))
f2=open(os.path.join(save_path,coarse_train),'w')
f2.writelines(coarse_lines)
f2.close()
coarse_train_list=readlines(os.path.join(save_path,coarse_train))
fine_train_list=readlines(os.path.join(save_path,fine_train))
for i in range(4):
coarse_train_list.extend(fine_train_list)
print("Total Number of samples:",len(coarse_train_list))
random.shuffle(coarse_train_list)
writelines(os.path.join(save_path,coarse_fine_mixed),coarse_train_list)
check_file(os.path.join(save_path,coarse_train))
|
import math
def add(operator,x,y):
return x+y
def subtract(operator,x,y):
return x-y
def divide(operator,x,y):
return x/y
def multiply(operator,x,y):
return x*y
def power(operator,x,y):
return x**y
def card(start,end):
exp=0
for i in range(start,end):
exp = exp+math.floor(10*(i**1.5))
silver = math.ceil(exp/200)
gold= math.ceil(exp/250)
platinum = math.ceil(exp/300)
return (silver,gold,platinum)
|
class Tumor:
def __init__(self,tumor, tumorType):
self.tumor = tumor
self.tumorType = tumorType |
def bissexto(x):
if (x % 4 == 0 and x % 100 != 0) or x % 400 == 0:
return True
return False
def huluculu(x):
if x % 15 == 0:
return True
return False
def buluculu(x):
if x % 55 == 0 and bissexto(x):
return True
return False
f = True
a = input()
while True:
try:
if not f:
print('\n', end='')
bi = bissexto(int(a))
hu = huluculu(int(a))
bu = buluculu(int(a))
if not bi and not hu and not bu:
print("This is an ordinary year.\n", end='')
else:
if bi:
print("This is leap year.\n", end='')
if hu:
print("This is huluculu festival year.\n", end='')
if bu:
print("This is bulukulu festival year.\n", end='')
f = False
a = input()
except EOFError:
break
|
# -*- coding:UTF-8 -*-
"""
xvideos视频爬虫
https://www.xvideos.com/
@author: hikaru
email: hikaru870806@hotmail.com
如有问题或建议请联系
"""
import os
import re
import time
import traceback
from pyquery import PyQuery as pq
from common import *
COOKIE_INFO = {}
VIDEO_QUALITY = 2
ACTION_WHEN_BLOCK_HD_QUALITY = 2
CATEGORY_WHITELIST = ""
CATEGORY_BLACKLIST = ""
# 获取指定视频
def get_video_page(video_id):
video_play_url = "https://www.xvideos.com/video%s/" % video_id
# 强制使用英语
video_play_response = net.http_request(video_play_url, method="GET")
result = {
"is_delete": False, # 是否已删除
"is_skip": False, # 是否跳过
"video_title": "", # 视频标题
"video_url": None, # 视频地址
}
if video_play_response.status == 404 or video_play_response.status == 403:
result["is_delete"] = True
return result
if video_play_response.status != net.HTTP_RETURN_CODE_SUCCEED:
raise crawler.CrawlerException(crawler.request_failre(video_play_response.status))
video_play_response_content = video_play_response.data.decode(errors="ignore")
# 过滤视频category
category_list_selector = pq(video_play_response_content).find(".video-tags-list ul li a")
category_list = []
for category_index in range(1, category_list_selector.length):
category_selector = category_list_selector.eq(category_index)
category_list.append(category_selector.html().strip().lower())
if CATEGORY_BLACKLIST or CATEGORY_WHITELIST:
is_skip = True if CATEGORY_WHITELIST else False
for category in category_list:
if CATEGORY_BLACKLIST:
# category在黑名单中
if len(re.findall(CATEGORY_BLACKLIST, category)) > 0:
is_skip = True
break
if CATEGORY_WHITELIST:
# category在黑名单中
if len(re.findall(CATEGORY_WHITELIST, category)) > 0:
is_skip = False
if is_skip:
result["is_skip"] = True
return result
# 获取视频标题
video_title = tool.find_sub_string(video_play_response_content, "html5player.setVideoTitle('", "');")
if not video_title:
raise crawler.CrawlerException("页面截取视频标题失败\n%s" % video_play_response_content)
result["video_title"] = video_title.strip()
# 获取视频地址
if VIDEO_QUALITY == 2:
video_url = tool.find_sub_string(video_play_response_content, "html5player.setVideoUrlHigh('", "');")
# 被屏蔽了高质量视频
if not video_url:
if ACTION_WHEN_BLOCK_HD_QUALITY == 1:
video_url = tool.find_sub_string("html5player.setVideoUrlLow('", "');")
elif ACTION_WHEN_BLOCK_HD_QUALITY == 2:
log.error("高质量视频地址已被暂时屏蔽,等待10分钟")
time.sleep(600)
return get_video_page(video_id)
else:
raise crawler.CrawlerException("高质量视频地址已被暂时屏蔽")
else:
video_url = tool.find_sub_string("html5player.setVideoUrlLow('", "');")
# if not video_url:
# video_info_url = "https://www.xvideos.com/video-download/%s" % video_id
# video_info_response = net.http_request(video_info_url, method="GET", cookies_list=COOKIE_INFO, json_decode=True)
# if video_info_response.status != net.HTTP_RETURN_CODE_SUCCEED:
# raise crawler.CrawlerException("视频下载请求," + crawler.request_failre(video_info_response.status))
# if VIDEO_QUALITY == 2:
# if not crawler.check_sub_key(("URL",), video_info_response.json_data):
# raise crawler.CrawlerException("视频下载信息,'URL'字段不存在\n%s" % video_info_response.json_data)
# video_url = video_info_response.json_data["URL"]
# else:
# if not crawler.check_sub_key(("URL_LOW",), video_info_response.json_data):
# raise crawler.CrawlerException("视频下载信息,'URL_LOW'字段不存在\n%s" % video_info_response.json_data)
# video_url = video_info_response.json_data["URL_LOW"]
if not video_url:
raise crawler.CrawlerException("页面截取视频地址失败\n%s" % video_play_response_content)
result["video_url"] = video_url
return result
class XVideos(crawler.Crawler):
def __init__(self, **kwargs):
global COOKIE_INFO
global VIDEO_QUALITY
global ACTION_WHEN_BLOCK_HD_QUALITY
global CATEGORY_WHITELIST
global CATEGORY_BLACKLIST
# 设置APP目录
crawler.PROJECT_APP_PATH = os.path.abspath(os.path.dirname(__file__))
# 初始化参数
sys_config = {
crawler.SYS_DOWNLOAD_VIDEO: True,
crawler.SYS_SET_PROXY: True,
crawler.SYS_NOT_CHECK_SAVE_DATA: True,
crawler.SYS_GET_COOKIE: ("xvideos.com",),
crawler.SYS_APP_CONFIG: (
("VIDEO_QUALITY", 2, crawler.CONFIG_ANALYSIS_MODE_INTEGER),
("ACTION_WHEN_BLOCK_HD_QUALITY", 2, crawler.CONFIG_ANALYSIS_MODE_INTEGER),
("CATEGORY_WHITELIST", "", crawler.CONFIG_ANALYSIS_MODE_RAW),
("CATEGORY_BLACKLIST", "", crawler.CONFIG_ANALYSIS_MODE_RAW),
),
}
crawler.Crawler.__init__(self, sys_config, **kwargs)
# 设置全局变量,供子线程调用
COOKIE_INFO = self.cookie_value
VIDEO_QUALITY = self.app_config["VIDEO_QUALITY"]
if VIDEO_QUALITY not in [1, 2]:
VIDEO_QUALITY = 2
log.error("配置文件config.ini中key为'video_quality'的值必须是1或2,使用程序默认设置")
ACTION_WHEN_BLOCK_HD_QUALITY = self.app_config["ACTION_WHEN_BLOCK_HD_QUALITY"]
if ACTION_WHEN_BLOCK_HD_QUALITY not in [1, 2, 3]:
ACTION_WHEN_BLOCK_HD_QUALITY = 2
log.error("配置文件config.ini中key为'ACTION_WHEN_BLOCK_HD_QUALITY'的值必须是1至3之间的整数,使用程序默认设置")
category_whitelist = self.app_config["CATEGORY_WHITELIST"]
if category_whitelist:
CATEGORY_WHITELIST = "|".join(category_whitelist.lower().split(",")).replace("*", "\w*")
category_blacklist = self.app_config["CATEGORY_BLACKLIST"]
if category_blacklist:
CATEGORY_BLACKLIST = "|".join(category_blacklist.lower().split(",")).replace("*", "\w*")
def main(self):
# 解析存档文件,获取上一次的album id
video_id = 1
if os.path.exists(self.save_data_path):
file_save_info = file.read_file(self.save_data_path)
if not crawler.is_integer(file_save_info):
log.error("存档内数据格式不正确")
tool.process_exit()
video_id = int(file_save_info)
try:
while video_id:
if not self.is_running():
tool.process_exit(0)
log.step("开始解析视频%s" % video_id)
# 获取视频
try:
video_play_response = get_video_page(video_id)
except crawler.CrawlerException as e:
log.error("视频%s解析失败,原因:%s" % (video_id, e.message))
raise
if video_play_response["is_delete"]:
log.step("视频%s已删除,跳过" % video_id)
video_id += 1
continue
if video_play_response["is_skip"]:
log.step("视频%s已过滤,跳过" % video_id)
video_id += 1
continue
log.step("开始下载视频%s《%s》 %s" % (video_id, video_play_response["video_title"], video_play_response["video_url"]))
file_path = os.path.join(self.video_download_path, "%08d %s.mp4" % (video_id, path.filter_text(video_play_response["video_title"])))
save_file_return = net.save_net_file(video_play_response["video_url"], file_path, head_check=True)
if save_file_return["status"] == 1:
log.step("视频%s《%s》 下载成功" % (video_id, video_play_response["video_title"]))
else:
log.error("视频%s《%s》 %s 下载失败,原因:%s" % (video_id, video_play_response["video_title"], video_play_response["video_url"], crawler.download_failre(save_file_return["code"])))
# 视频下载完毕
self.total_video_count += 1 # 计数累加
video_id += 1 # 设置存档记录
except SystemExit as se:
if se.code == 0:
log.step("提前退出")
else:
log.error("异常退出")
except Exception as e:
log.error("未知异常")
log.error(str(e) + "\n" + traceback.format_exc())
# 重新保存存档文件
file.write_file(str(video_id), self.save_data_path, file.WRITE_FILE_TYPE_REPLACE)
log.step("全部下载完毕,耗时%s秒,共计视频%s个" % (self.get_run_time(), self.total_video_count))
if __name__ == "__main__":
XVideos().main()
|
szam = int(input("Adj meg egy számot! "))
if szam < 0:
print("A megadott szám negatív!")
else:
print("A megadott szám nem negatív!")
print(" Itt a vége! ") |
# -*- encoding: utf-8 -*-
__author__ = "Chmouel Boudjnah <chmouel@chmouel.com>"
import httplib2
import os
import sys
import json
import pprint
import time
import datetime
import cloudlb.base
import cloudlb.consts
import cloudlb.errors
class CLBClient(httplib2.Http):
"""
Client class for accessing the CLB API.
"""
def __init__(self,
username,
api_key,
region,
auth_url=None):
super(CLBClient, self).__init__()
self.username = username
self.api_key = api_key
if not auth_url and region == 'lon':
auth_url = cloudlb.consts.UK_AUTH_SERVER
else:
auth_url = cloudlb.consts.DEFAULT_AUTH_SERVER
self._auth_url = auth_url
if region.lower() in cloudlb.consts.REGION.values():
self.region = region
elif region.lower() in cloudlb.consts.REGION.keys():
self.region = cloudlb.consts.REGION[region]
else:
raise cloudlb.errors.InvalidRegion(region)
self.auth_token = None
self.account_number = None
self.region_account_url = None
def authenticate(self):
headers = {'Content-Type': 'application/json'}
body = '{"credentials": {"username": "%s", "key": "%s"}}' \
% (self.username, self.api_key)
#DEBUGGING:
if 'PYTHON_CLOUDLB_DEBUG' in os.environ:
pp = pprint.PrettyPrinter(stream=sys.stderr, indent=2)
sys.stderr.write("URL: %s\n" % (self._auth_url))
response, body = self.request(self._auth_url, 'POST',
body=body, headers=headers)
if 'PYTHON_CLOUDLB_DEBUG' in os.environ:
sys.stderr.write("RETURNED HEADERS: %s\n" % (str(response)))
sys.stderr.write("BODY:")
pp.pprint(body)
data = json.loads(body)
# A status code of 401 indicates that the supplied credentials
# were not accepted by the authentication service.
if response.status == 401:
reason = data['unauthorized']['message']
raise cloudlb.errors.AuthenticationFailed(response.status, reason)
if response.status != 200:
raise cloudlb.errors.ResponseError(response.status,
response.reason)
auth_data = data['auth']
self.account_number = int(
auth_data['serviceCatalog']['cloudServersOpenStack'][0]['publicURL'].rsplit('/', 1)[-1])
self.auth_token = auth_data['token']['id']
self.region_account_url = "%s/%s" % (
cloudlb.consts.REGION_URL % (self.region),
self.account_number)
def _cloudlb_request(self, url, method, **kwargs):
if not self.region_account_url:
self.authenticate()
#TODO: Look over
# Perform the request once. If we get a 401 back then it
# might be because the auth token expired, so try to
# re-authenticate and try again. If it still fails, bail.
kwargs.setdefault('headers', {})['X-Auth-Token'] = self.auth_token
kwargs['headers']['User-Agent'] = cloudlb.consts.USER_AGENT
if 'body' in kwargs:
kwargs['headers']['Content-Type'] = 'application/json'
kwargs['body'] = json.dumps(kwargs['body'])
ext = ""
fullurl = "%s%s%s" % (self.region_account_url, url, ext)
#DEBUGGING:
if 'PYTHON_CLOUDLB_DEBUG' in os.environ:
pp = pprint.PrettyPrinter(stream=sys.stderr, indent=2)
sys.stderr.write("URL: %s\n" % (fullurl))
sys.stderr.write("ARGS: %s\n" % (str(kwargs)))
sys.stderr.write("METHOD: %s\n" % (str(method)))
if 'body' in kwargs:
pp.pprint(json.loads(kwargs['body']))
response, body = self.request(fullurl, method, **kwargs)
if 'PYTHON_CLOUDLB_DEBUG' in os.environ:
sys.stderr.write("RETURNED HEADERS: %s\n" % (str(response)))
# If we hit a 413 (Request Limit) response code,
# check to see how long we have to wait.
# If you have to wait more then 10 seconds,
# raise ResponseError with a more sane message then CLB provides
if response.status == 413:
if 'PYTHON_CLOUDLB_DEBUG' in os.environ:
sys.stderr.write("(413) BODY:")
pp.pprint(body)
now = datetime.datetime.strptime(response['date'],
'%a, %d %b %Y %H:%M:%S %Z')
# Absolute limits are not resolved by waiting
if not 'retry-after' in response:
data = json.loads(body)
raise cloudlb.errors.AbsoluteLimit(data['message'])
# Retry-After header now doesn't always return a timestamp,
# try parsing the timestamp, if that fails wait 5 seconds
# and try again. If it succeeds figure out how long to wait
try:
retry = datetime.datetime.strptime(response['retry-after'],
'%a, %d %b %Y %H:%M:%S %Z')
except ValueError:
if response['retry-after'] > '30':
raise cloudlb.errors.RateLimit(response['retry-after'])
else:
time.sleep(5)
response, body = self.request(fullurl, method, **kwargs)
except:
raise
else:
if (retry - now) > datetime.timedelta(seconds=10):
raise cloudlb.errors.RateLimit((retry - now))
else:
time.sleep((retry - now).seconds)
response, body = self.request(fullurl, method, **kwargs)
if body:
try:
body = json.loads(body, object_hook=lambda obj: dict((k.encode('ascii'), v) for k, v in obj.items()))
except(ValueError):
pass
if 'PYTHON_CLOUDLB_DEBUG' in os.environ:
sys.stderr.write("BODY:")
pp.pprint(body)
if (response.status >= 200) and (response.status < 300):
return response, body
if response.status == 404:
raise cloudlb.errors.NotFound(response.status, '%s not found' % url)
elif response.status == 413:
raise cloudlb.errors.RateLimit(retry)
try:
message = ', '.join(body['messages'])
except KeyError:
message = body['message']
if response.status == 400:
raise cloudlb.errors.BadRequest(response.status, message)
elif response.status == 422:
if 'unprocessable' in message:
raise cloudlb.errors.UnprocessableEntity(response.status,
message)
else:
raise cloudlb.errors.ImmutableEntity(response.status,
message)
else:
raise cloudlb.errors.ResponseError(response.status,
message)
def put(self, url, **kwargs):
return self._cloudlb_request(url, 'PUT', **kwargs)
def get(self, url, **kwargs):
return self._cloudlb_request(url, 'GET', **kwargs)
def post(self, url, **kwargs):
return self._cloudlb_request(url, 'POST', **kwargs)
def delete(self, url, **kwargs):
return self._cloudlb_request(url, 'DELETE', **kwargs)
|
# The python implementation which corresponds
# https://github.com/kaelzhang/gaia/blob/master/example/hello/controller/Greeter.js
import asyncio
def SayHello(helloRequest, HelloReply):
return HelloReply(message = f'Hello {helloRequest.name}')
async def DelayedSayHello(*args):
await asyncio.sleep(300)
return SayHello(*args)
|
class WyzeClientError(Exception):
"""Base class for Client errors"""
class WyzeRequestError(WyzeClientError):
"""Error raised when there's a problem with the request that's being submitted."""
class WyzeFeatureNotSupportedError(WyzeRequestError):
"""Error raised when the requested action on a device isn't supported."""
def __init__(self, action: str):
msg = f"{action} is not supported on this device"
super(WyzeRequestError, self).__init__(msg)
class WyzeApiError(WyzeClientError):
"""Error raised when Wyze does not send the expected response.
.. note ::
The message (str) passed into the exception is used when
a user converts the exception to a str.
i.e. ``str(WyzeApiError("This text will be sent as a string."))``
"""
def __init__(self, message, response):
msg = f"{message}\nThe server responded with: {response}"
#: The WyzeResponse object containing all of the data sent back from the API
self.response = response
super(WyzeApiError, self).__init__(msg)
class WyzeClientNotConnectedError(WyzeClientError):
"""Error raised when attempting to send messages over the websocket when the
connection is closed."""
class WyzeObjectFormationError(WyzeClientError):
"""Error raised when a constructed object is not valid/malformed"""
class WyzeClientConfigurationError(WyzeClientError):
"""Error raised when attempting to send messages over the websocket when the
connection is closed."""
|
import cleanup
import pdb
import random
class Dictogram(dict):
def __init__(self, word_text=None):
'''Everytime this dictogram class is instantiated word text is given'''
if word_text:
self.word_text = word_text
for word in self.word_text:
self.add_count(word)
'''Generates a dictogram given a piece of text'''
def generate_histogram(self, word_text):
'''This function generates our histogram for us'''
word_frequency = {}
self.word_text = word_text
cleaned_text = cleanup.clean_given_text(self.word_text)[:10]
for word in cleaned_text:
word_occurences = cleaned_text.count(word)
word_frequency[word] = word_occurences
return word_frequency
#
def add_count(self, word, count=1):
'''This function essentially adds a count if the word is not in the dictogram I want you to add the key
as well as give that key a count as a value else if if it is in there already I want you to add the count of 1'''
if word not in self:
self[word] = count
else:
self[word] += count
def generate_histogram_weights(self):
#This function essentially generates the weights or the relative occurence of the words in the histogram
'''We have to remember at this point the dictionary in self is not the chain just a regular dictionary'''
weight_dictionary = {}
sum_values = sum([val for val in self.values()])
for key, value in self.items():
weight_dictionary[key] = value / sum_values
return weight_dictionary
def generate_specific_frequency_of_word(self, user_inputted_word):
# This function essentially takes a word that the user wants to find in the text and find how many times that word
# occurs
specific_word_frequency = {}
user_inputted_word = str(input())
cleaned_text = cleanup.clean_given_text(self.word_text)
if user_inputted_word in cleaned_text:
specific_word_occurence = cleaned_text.count(user_inputted_word)
specific_word_frequency[user_inputted_word] = specific_word_occurence
else:
return 'This word does not occur at all'
return specific_word_frequency
def find_rarest_word(self):
rarest_word = {}
highest_occurence = max(self.generate_histogram().values())
for key, value in self.generate_histogram().items():
if value == highest_occurence:
rarest_word[key] = value
return rarest_word
# def pair_text_together(self):
# # Pairs a given corpus into pairs of words
# paired_text = {}
# cleaned_text = cleanup.clean_given_text(self.word_text)
# rarest_word = max(self.generate_histogram().values())
# for word in range(len(cleaned_text[:10]) - 1):
# paired_text[cleaned_text[word]] = {cleaned_text[word + 1]: }
# return paired_text
def find_word_after_entry(self, user_word_input):
pair_text_list = list(self.pair_text_together())
new_word = pair_text_list.index(user_word_input) + 1
return pair_text_list[new_word]
def generates_all_words(self):
word_list = []
cleaned_text = cleanup.clean_given_text(self.word_text)[:10]
for word in cleaned_text:
word_list.append(word)
return word_list
def develop_states_and_transitions(self):
#Finds the states and transitions when given a corpus
word_b_list = []
rel_probability = {}
chain_dictionary = {}
paired_text_list = list(self.pair_text_together())
count = 0
while count != (len(paired_text_list) - 1):
for word in self.generates_all_words():
next_word_occurence = self.generates_all_words().count(self.find_word_after_entry(word))
current_word_occurence = self.generates_all_words().count(word)
rel_probability = next_word_occurence / current_word_occurence
new_word = self.generates_all_words().index(word) + 1
new_word_value = paired_text_list[new_word]
chain_dictionary[word] = {self.find_word_after_entry(new_word_value): rel_probability}
count = count + 1
return chain_dictionary
def generate_random_word(self):
generated_random_word_dictionary = {}
randomly_generated_number = random.uniform(0, 1)
cumalitve_probability = 0.0
for word, weighted_occurence in zip(self.items(), self.generate_histogram_weights().values()):
# index_of_value =index_of_value
cumalitve_probability += weighted_occurence
if randomly_generated_number < cumalitve_probability:
break
return word[0]
def generate_sentence_from_markov_chain(self, length_of_sentence):
sentence_list = []
x = 0
for i in range(length_of_sentence):
sentence_list.append(self.generate_random_word_from_chain())
sentence = ' '.join(sentence_list)
return sentence
cleaned_text = cleanup.clean_given_text("robert_greene.txt")[:12]
"""This function essentially makes a dictionary where the keys are the current word while the value is a dictionary
of all the possible next words"""
def markov_chain(cleaned_text):
markov_dictionary = {}
x = 0
while x < len(cleaned_text) -1:
# Find the first word of the iteration and so on
current_word = cleaned_text[x]
# Finds the next word
next_word = cleaned_text[x + 1]
if current_word not in markov_dictionary.keys():
markov_dictionary[current_word] = Dictogram() # THIS IS EQUAL TO THAT BECAUSE WE DONT HAVE TO PASS IN THE TEXT YET{}
markov_dictionary[current_word].add_count(next_word)
x += 1
# print(markov_dictionary)
return markov_dictionary
# def weighted_markov(markov):
# weighted_markov_dictionary = {}
# for key ,value in markov.items():
# weighted_markov_dictionary[key] = value.generate_histogram_weights()
# return weighted_markov_dictionary
#
# def second_order_markov_chain(cleaned_text):
# pass
# print(markov_chain(cleaned_text))
#
def second_order_markov_chain(cleaned_text):
count = 0
second_order_markov_dictionary = {}
# Do not want to get an index out of range therefore decrement by two accounting for the current and next word
while count < len(cleaned_text) - 2:
# Getting the current word by indexing at the current count each iteration
current_word = cleaned_text[count]
# Doing the same except for the next word
next_word = cleaned_text[count + 1]
# As well as for the word after the next word
next_next_word = cleaned_text[count + 2]
# Making a list comprised of the current word and the next word
current_and_next_list = [current_word, next_word]
#Formatting that list into a string
current_and_next_pair = ' '.join(current_and_next_list)
# Checking if the string is in the keys
if current_and_next_pair not in second_order_markov_dictionary.keys():
# If not make it a key as well as set it equal to an empty instance Dictogram
second_order_markov_dictionary[current_and_next_pair] = Dictogram()
# Then populate that dictogram instance with the dictioray of count which is a key value pair comprised of a word and it's frequency
second_order_markov_dictionary[current_and_next_pair].add_count(next_next_word)
# Increment the count by 1 to keep the while loop iterating
count = count + 1
# Then return the dictionary
return second_order_markov_dictionary
print(second_order_markov_chain(cleaned_text))
|
"""A shim module for deprecated imports
"""
# Copyright (c) IPython Development Team.
# Distributed under the terms of the Modified BSD License.
import sys
import types
class ShimModule(types.ModuleType):
def __init__(self, *args, **kwargs):
self._mirror = kwargs.pop("mirror")
super(ShimModule, self).__init__(*args, **kwargs)
if sys.version_info >= (3,4):
self.__spec__ = __import__(self._mirror).__spec__
def __getattr__(self, key):
# Use the equivalent of import_item(name), see below
name = "%s.%s" % (self._mirror, key)
# NOTE: the code below was copied *verbatim* from
# importstring.import_item. For some very strange reason that makes no
# sense to me, if we call it *as a function*, it doesn't work. This
# has something to do with the deep bowels of the import machinery and
# I couldn't find a way to make the code work as a standard function
# call. But at least since it's an unmodified copy of import_item,
# which is used extensively and has a test suite, we can be reasonably
# confident this is OK. If anyone finds how to call the function, all
# the below could be replaced simply with:
#
# from IPython.utils.importstring import import_item
# return import_item('MIRROR.' + key)
parts = name.rsplit('.', 1)
if len(parts) == 2:
# called with 'foo.bar....'
package, obj = parts
module = __import__(package, fromlist=[obj])
return getattr(module, obj)
else:
# called with un-dotted string
return __import__(parts[0])
|
"""
:py:class:`GlobalUtils` contains global utilities
=================================================
This software was developed for the SIT project.
If you use all or part of it, please give an appropriate acknowledgment.
Author Mikhail Dubrovin
"""
import sys
import numpy as np
def info_ndarr(nda, name='', first=0, last=5):
_name = '%s '%name if name!='' else name
s = ''
gap = '\n' if (last-first)>10 else ' '
if nda is None : s = '%s%s' % (_name, nda)
elif isinstance(nda, tuple): s += info_ndarr(np.array(nda), 'ndarray from tuple: %s' % name)
elif isinstance(nda, list) : s += info_ndarr(np.array(nda), 'ndarray from list: %s' % name)
elif not isinstance(nda, np.ndarray):
s = '%s%s' % (_name, type(nda))
else: s = '%sshape:%s size:%d dtype:%s%s%s%s'%\
(_name, str(nda.shape), nda.size, nda.dtype, gap, str(nda.ravel()[first:last]).rstrip(']'),\
('...]' if nda.size>last else ']'))
return s
def print_ndarr(nda, name='', first=0, last=5):
print(info_ndarr(nda, name, first, last))
def divide_protected(num, den, vsub_zero=0):
"""Returns result of devision of numpy arrays num/den with substitution of value vsub_zero for zero den elements.
"""
pro_num = np.select((den!=0,), (num,), default=vsub_zero)
pro_den = np.select((den!=0,), (den,), default=1)
return pro_num / pro_den
def info_command_line_parameters(parser):
"""Prints input arguments and optional parameters"""
opts = {}
args = None
defs = None
from optparse import OptionParser
if isinstance(parser, OptionParser):
(popts, pargs) = parser.parse_args()
args = pargs # list of positional arguments
opts = vars(popts) # dict of options
defs = vars(parser.get_default_values()) # dict of default options
else: # ArgumentParser
args = parser.parse_args() # Namespace
opts = vars(args) # dict
defs = vars(parser.parse_args([]))
s = 'Command: ' + ' '.join(sys.argv)\
+ '\n Optional parameters:'\
+ '\n <key> <value> <default>\n'
for k,v in opts.items():
vdef = defs[k]
if k in ('dirmode', 'filemode'):
v = oct(v)
vdef = oct(vdef)
s += ' %s %s %s\n' % (k.ljust(10), str(v).ljust(20), str(vdef).ljust(20))
return s
def info_command_line():
return ' '.join(sys.argv)
def info_kwargs(fmt='%10s: %s', separator='\n', **kwargs):
return separator.join(fmt%(k,str(v)) for k,v in kwargs.items())
def selected_record(n):
return n<5\
or (n<50 and not n%10)\
or (n<500 and not n%100)\
or (not n%1000)
# EOF
|
import tensorflow as tf
from ceiling_segmentation.UNET.VGG16.EncoderDecoder import EncoderDecoder
from ceiling_segmentation.utils.LoadData import LoadData
import matplotlib.pyplot as plt
import datetime
import numpy as np
import pathlib
tf.config.experimental.set_memory_growth(tf.config.experimental.list_physical_devices('GPU')[0], True)
class VGG16Train:
def __init__(self):
self.batch_size = 16
self.image_size = 224
self.buffer_size = 32
self.epoch = 6
self.autotune = tf.data.experimental.AUTOTUNE
self.seed = 15
self.num_channels = 3
self.num_classes = 2
self.parameters()
self.data_set = self.load_data()
def load_data(self):
data_address = pathlib.Path(__file__).parent.absolute()
data_address.replace("UNET/VGG16", "data")
dataset = LoadData(data_address + "/training/images/*.png",
data_address + "validation/images/*.png",
self.image_size, self.batch_size, shuffle_buffer_size=self.buffer_size,
seed=123).get_dataset()
# following lines are used for debug
print(dataset['train'])
print(dataset['val'])
sample_image = None
sample_mask = None
for image, segmented_mask in dataset['train'].take(1):
sample_image, sample_mask = image, segmented_mask
self.display_sample([sample_image[0], sample_mask[0]])
return dataset
def parameters(self):
pass
def display_sample(self, display_list):
"""
Show side-by-side an input image, the ground truth and the prediction.
:param display_list: a list including [image, ground truth] or [image, ground truth, prediction]
:return:
"""
plt.figure(figsize=(18, 18))
title = ['Input Image', 'True Mask', 'Predicted Mask']
for i in range(len(display_list)):
plt.subplot(1, len(display_list), i + 1)
plt.title(title[i])
img = tf.keras.preprocessing.image.array_to_img(display_list[i])
plt.imshow(img)
plt.axis('off')
plt.show()
def create_mask(self, pred_mask: tf.Tensor) -> tf.Tensor:
"""Return a filter mask with the top 1 predictions
only.
Parameters
----------
pred_mask : tf.Tensor
A [IMG_SIZE, IMG_SIZE, N_CLASS] tensor. For each pixel we have
N_CLASS values (vector) which represents the probability of the pixel
being these classes. Example: A pixel with the vector [0.0, 0.0, 1.0]
has been predicted class 2 with a probability of 100%.
Returns
-------
tf.Tensor
A [IMG_SIZE, IMG_SIZE, 1] mask with top 1 predictions
for each pixels.
"""
# pred_mask -> [IMG_SIZE, SIZE, N_CLASS]
# 1 prediction for each class but we want the highest score only
# so we use argmax
pred_mask = tf.argmax(pred_mask, axis=-1)
# pred_mask becomes [IMG_SIZE, IMG_SIZE]
# but matplotlib needs [IMG_SIZE, IMG_SIZE, 1]
pred_mask = tf.expand_dims(pred_mask, axis=-1)
return pred_mask
def show_predictions(self, dataset, num=1):
"""Show a sample prediction.
Parameters
----------
dataset : [type], optional
[Input dataset, by default None
num : int, optional
Number of sample to show, by default 1
"""
for image, segmented_mask in dataset.take(num):
sample_image, sample_mask = image, segmented_mask
# The UNET is expecting a tensor of the size
# [BATCH_SIZE, IMG_SIZE, IMG_SIZE, 3]
# but sample_image[0] is [IMG_SIZE, IMG_SIZE, 3]
# and we want only 1 inference to be faster
# so we add an additional dimension [1, IMG_SIZE, IMG_SIZE, 3]
one_img_batch = sample_image[0][tf.newaxis, ...]
pred_mask = encoderDecoder(one_img_batch, training=False)
mask = self.create_mask(pred_mask)
self.display_sample([sample_image[0], sample_mask[0], mask[0]])
def weighted_loss_function(self, y_true, y_pred):
cross_entropy = tf.keras.backend.sparse_categorical_crossentropy(y_true, y_pred)
# calculate weight
y_true = tf.cast(y_true, dtype='float32')
y_true = tf.where(y_true == 0, np.dtype('float32').type(0.25), y_true)
weight = tf.where(y_true == 1, np.dtype('float32').type(0.75), y_true)
# multiply weight by cross entropy
weight = tf.squeeze(weight)
weighted_cross_entropy = tf.multiply(weight, cross_entropy)
return tf.reduce_mean(weighted_cross_entropy)
def build_model(self):
self.encoderDecoder = EncoderDecoder(self.num_classes, batch_norm=False)
# freeze the encoder and initialize it weights by vgg trained on imagenet
self.encoderDecoder.encoder.trainable = False
self.encoderDecoder.build((None, self.image_size, self.image_size, 3))
self.encoderDecoder.encoder.set_weights(tf.keras.applications.VGG16(include_top=False, weights='imagenet',
input_shape=(
self.image_size, self.image_size, 3)).get_weights())
self.loss_function = tf.keras.losses.SparseCategoricalCrossentropy()
self.optimizer = tf.keras.optimizers.Adam(learning_rate=0.0002, epsilon=1e-6)
# set up the metric and logs
train_loss = tf.keras.metrics.Mean(name="train_loss")
train_acc = tf.keras.metrics.SparseCategoricalAccuracy(name='train_accuracy')
test_loss = tf.keras.metrics.Mean(name='train_accuracy')
test_acc = tf.keras.metrics.SparseCategoricalAccuracy(name='test_accuracy')
current_time = datetime.datetime.now().strftime("%Y%m%d-%H%M%S")
train_log_dir = 'logs/gradient_tape/' + current_time + '/train'
test_log_dir = 'logs/gradient_tape/' + current_time + '/test'
train_summary_writer = tf.summary.create_file_writer(train_log_dir)
test_summary_writer = tf.summary.create_file_writer(test_log_dir)
show_predictions(dataset['val'], 1)
@tf.function
def train_model(images, masks):
with tf.GradientTape() as g:
prediction = encoderDecoder(images)
loss = loss_function(masks, prediction)
trainable_variables = encoderDecoder.trainable_variables
gradients = g.gradient(loss, trainable_variables)
optimizer.apply_gradients(zip(gradients, trainable_variables))
train_loss.update_state(loss)
train_acc.update_state(masks, prediction)
@tf.function
def test_model(images, masks):
predictions = encoderDecoder(images)
loss = loss_function(masks, predictions)
test_loss.update_state(loss)
test_acc.update_state(masks, predictions)
batch_train_ctr = 0
batch_test_ctr = 0
for repeat in range(EPOCHS):
# reset the matrices at the beginning of every epoch
train_loss.reset_states()
train_acc.reset_states()
test_loss.reset_states()
test_acc.reset_states()
for (x_batch, y_batch) in dataset['train']:
train_model(x_batch, y_batch)
batch_train_ctr += 1
template = 'Epoch {}, Batch {}, Loss: {}, Accuracy: {}'
print(template.format(repeat, batch_train_ctr,
train_loss.result(),
train_acc.result() * 100))
with train_summary_writer.as_default():
tf.summary.scalar('train_loss', train_loss.result(), step=batch_train_ctr)
tf.summary.scalar('train_accuracy', train_acc.result(), step=batch_train_ctr)
for (x_batch, y_batch) in dataset['val']:
test_model(x_batch, y_batch)
batch_test_ctr += 1
template = 'Epoch {}, Batch{}, Test Loss: {}, Test Accuracy: {}'
print(template.format(repeat, batch_test_ctr,
test_loss.result(),
test_acc.result() * 100))
with test_summary_writer.as_default():
tf.summary.scalar('test_loss', test_loss.result(), step=batch_test_ctr)
tf.summary.scalar('test_accuracy', test_acc.result(), step=batch_test_ctr)
show_predictions(dataset['val'], num=5)
# encoderDecoder.save_weights(os.getcwd()+"/weights/WithoutBN/NaiveLoss"+str(repeat+1)+"/")
|
#!/usr/bin/python3
'''python script'''
import requests
def count_words(subreddit, word_list):
'''function to check nbre of sub'''
requestpost = requests.get("https://www.reddit.com/r/{}/hot.json".format(
subreddit), headers={"User-Agent": "amine"})
if requestpost.status_code != 200:
return(None)
request_data = requestpost.json()
|
# Generated by Django 2.1.3 on 2019-01-01 19:48
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Comment',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('submit_date', models.DateTimeField(auto_now_add=True)),
('person_name', models.CharField(max_length=60)),
('comment', models.TextField()),
('is_public', models.BooleanField()),
],
options={
'db_table': 'comment_comment',
},
),
migrations.CreateModel(
name='Post',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('slug', models.SlugField(unique_for_date='pub_date', verbose_name='slug')),
('pub_date', models.DateTimeField(db_index=True)),
('listed', models.BooleanField(default=False, verbose_name='Listed in public indexes?')),
('title', models.CharField(max_length=100)),
('content', models.TextField()),
('posted_by', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.AddField(
model_name='comment',
name='post',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='blog.Post'),
),
]
|
import unittest
from spikeinterface.postprocessing import check_equal_template_with_distribution_overlap, TemplateSimilarityCalculator
from spikeinterface.postprocessing.tests.common_extension_tests import WaveformExtensionCommonTestSuite
class SimilarityExtensionTest(WaveformExtensionCommonTestSuite, unittest.TestCase):
extension_class = TemplateSimilarityCalculator
extension_data_names = ["similarity"]
# extend common test
def test_check_equal_template_with_distribution_overlap(self):
we = self.we1
for unit_id0 in we.unit_ids:
waveforms0 = we.get_waveforms(unit_id0)
for unit_id1 in we.unit_ids:
if unit_id0 == unit_id1:
continue
waveforms1 = we.get_waveforms(unit_id1)
check_equal_template_with_distribution_overlap(waveforms0, waveforms1)
if __name__ == "__main__":
test = SimilarityExtensionTest()
test.setUp()
test.test_extension()
test.test_check_equal_template_with_distribution_overlap()
|
from fuzzy_control import RuddRuleBase, AccRuleBase, plot_fuzzy_set, defuzzyfication
from fuzzy_inputs import *
if __name__ == "__main__":
"""
This program is made for testing one of the
rule bases created in the exercise. It will plot
resultant FuzzySet and show defuzzyficated value of this
resultant FuzzySet.
"""
my_input = input("Please select one of my bases (type 'acc' or 'rudd'): ")
# defining which base we will be testing
if my_input == "rudd":
my_base = RuddRuleBase()
elif my_input == "acc":
my_base = AccRuleBase()
else:
raise ValueError("None of the above is selected!")
# saving input variables to dictionary
my_input = input("Enter L, D, LK, DK, V, S:\n")
nums_from_input = [int(s) for s in my_input.split(" ") if s.isdigit()]
L, D, LK, DK, V, S = nums_from_input
input_dict = dict(L=L, D=D, LK=LK, DK=DK, V=V, S=S)
# fuzzy logic part
my_base.instant_values = input_dict
my_base.update_input_values_for_rules()
fuzzy_result = my_base.calculate_rule_union()
plot_fuzzy_set(fuzzy_result)
|
import numpy as np
import pandas as pd
import tensorflow as tf
import tensorflow.keras as keras
class MfHybridModel(object):
"""
Class for hybrid model object
Args:
num_user (int): The total number of users in the full data
item_dim (int): The dimension of item representation. Default is 100
comb_type (string): The type of combination layer to user add | concat. Default is concat
embed_dim (int): The size of embedding layers. Defaut is 100
lr (float): The learning for the model
"""
def __init__(
self,
num_user,
item_dim=100,
comb_type="concat",
embed_dim=100,
lr=0.0001,
user_pretrained=None,
):
# Initialize the instance variables
self.num_user = num_user
self.item_dim = item_dim
self.comb_type = comb_type
self.embed_dim = embed_dim
self.user_pretrained = user_pretrained
self.lr = lr
def get_model(self):
# Return the model
input_user_id = keras.layers.Input(shape=(1,), name="input_1")
input_item_id = keras.layers.Input(
shape=(self.item_dim,), name="input_2")
if self.user_pretrained == None:
# Create the embedding layers
embedding_user_gmf = keras.layers.Embedding(
input_dim=self.num_user,
output_dim=self.embed_dim,
embeddings_initializer="he_normal",
embeddings_regularizer=tf.keras.regularizers.l2(1e-6),
)(input_user_id)
embedding_user_mlp = keras.layers.Embedding(
input_dim=self.num_user,
output_dim=self.embed_dim,
embeddings_initializer="he_normal",
embeddings_regularizer=tf.keras.regularizers.l2(1e-6),
)(input_user_id)
else:
# Create the embedding layers
embedding_user_gmf = keras.layers.Embedding(
input_dim=self.num_user,
output_dim=self.embed_dim,
weights=[self.user_pretrained[0]],
embeddings_regularizer=tf.keras.regularizers.l2(1e-6),
)(input_user_id)
embedding_user_mlp = keras.layers.Embedding(
input_dim=self.num_user,
output_dim=self.embed_dim,
weights=[self.user_pretrained[1]],
embeddings_regularizer=tf.keras.regularizers.l2(1e-6),
)(input_user_id)
# GMF and its optimal shape
flatten_user_gmf = keras.layers.Flatten()(embedding_user_gmf)
flatten_item_gmf = keras.layers.Flatten()(input_item_id)
flatten_item_gmf = keras.layers.Dense(
units=self.embed_dim,
activation="relu",
kernel_regularizer=tf.keras.regularizers.l2(1e-6),
)(flatten_item_gmf)
flatten_item_gmf = keras.layers.Dense(
units=self.embed_dim,
activation="relu",
kernel_regularizer=tf.keras.regularizers.l2(1e-6),
)(flatten_item_gmf)
gmf_embed = keras.layers.Multiply()(
[flatten_user_gmf, flatten_item_gmf])
# MLP and option available
flatten_user_mlp = keras.layers.Flatten()(embedding_user_mlp)
flatten_item_mlp = keras.layers.Flatten()(input_item_id)
flatten_item_mlp = keras.layers.Dense(
units=self.embed_dim,
activation="relu",
kernel_regularizer=tf.keras.regularizers.l2(1e-6),
)(flatten_item_mlp)
flatten_item_mlp = keras.layers.Dense(
units=self.embed_dim,
activation="relu",
kernel_regularizer=tf.keras.regularizers.l2(1e-6),
)(flatten_item_mlp)
if self.comb_type == "concat":
mlp_embed = keras.layers.Concatenate()(
[flatten_user_mlp, flatten_item_mlp])
elif self.comb_type == "add":
mlp_embed = keras.layers.Add()(
[flatten_user_mlp, flatten_item_mlp])
else:
raise Exception(
"Invalid comb type ==> %s | options ==> [concat, add]"
% (self.comb_type)
)
# MLP Dense layers
mlp_x = keras.layers.Dense(
units=512, activation="relu", kernel_regularizer=keras.regularizers.l1(1e-6)
)(mlp_embed)
mlp_x = keras.layers.BatchNormalization()(mlp_x)
mlp_x = keras.layers.Dropout(0.3)(mlp_x)
mlp_x = keras.layers.Dense(
units=256, activation="relu", kernel_regularizer=keras.regularizers.l1(1e-6)
)(mlp_x)
mlp_x = keras.layers.BatchNormalization()(mlp_x)
mlp_x = keras.layers.Dropout(0.2)(mlp_x)
mlp_x = keras.layers.Dense(
units=128, activation="relu", kernel_regularizer=keras.regularizers.l1(1e-6)
)(mlp_x)
mlp_x = keras.layers.BatchNormalization()(mlp_x)
mlp_x = keras.layers.Dropout(0.1)(mlp_x)
# Final merge
merged = keras.layers.Concatenate()([gmf_embed, mlp_x])
# Create the dense net
x = keras.layers.Dense(
units=1, kernel_initializer="lecun_uniform", activation="relu"
)(merged)
# Create the model
model = keras.models.Model(
inputs=[input_user_id, input_item_id], outputs=[x])
model.compile(
optimizer=keras.optimizers.Adam(self.lr),
loss=keras.losses.MeanSquaredError(),
metrics=keras.metrics.RootMeanSquaredError(),
)
# Returnt the model
return model
|
# Generated by Django 3.1.1 on 2021-06-09 07:15
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('travelauth', '0039_travelrequest_tbl_pptr'),
]
operations = [
migrations.AddField(
model_name='travelrequest_tbl',
name='nt_without_gov_expense',
field=models.IntegerField(default=0),
),
]
|
#!/usr/bin/python
'''
Simulation Visualiser for Embedded Cadmium
By: Ben Earle
ARSLab - Carleton University
This script will parse the I/O files and animate the pin values
for the duration of the simulation.
Note, if tkinter is not installed by default run the following
command in the terminal:
sudo apt-get install python-tk
'''
from tkinter import filedialog
from tkinter import *
import tkMessageBox
import os
debug = True
#Constants to change units of time to micro seconds:
HOURS_TO_MICRO = 1000*1000*60*60
MIN_TO_MICRO = 1000*1000*60
SEC_TO_MICRO = 1000*1000
MILI_TO_MICRO = 1000
def strTimeToMicroSeconds(time):
intList = map(int, time.split(':'))
while(len(intList) < 5):
intList.append(0)
return (intList[0] * HOURS_TO_MICRO + intList[1] * MIN_TO_MICRO + intList[2] * SEC_TO_MICRO + intList[3] * MILI_TO_MICRO + intList[4])
def microSecondsToStrTime(usec):
hours = usec / HOURS_TO_MICRO
usec = usec % HOURS_TO_MICRO
minu = usec / MIN_TO_MICRO
usec = usec % MIN_TO_MICRO
sec = usec / SEC_TO_MICRO
usec = usec % SEC_TO_MICRO
msec = usec / MILI_TO_MICRO
usec = usec % MILI_TO_MICRO
return (str(hours).zfill(2) + ":" + str(minu).zfill(2) + ":" + str(sec).zfill(2) + ":" + str(msec).zfill(3) + ":" + str(usec).zfill(3))
# Helper function to read and return the contents of the file.
def loadFromDir(path):
output = []
# Read every file in directory
for filename in os.listdir(path):
events = []
with open(path+filename, "r") as f:
# Read each line of the file
for line in f.readlines():
events.append([strTimeToMicroSeconds(line.split(" ")[0]), line.split(" ")[1].strip("\n")])
output.append([filename, events])
return output
# Here, we are creating our class, Window, and inheriting from the Frame
# class. Frame is a class from the tkinter module. (see Lib/tkinter/__init__)
class Window(Frame):
# Define settings upon initialization. Here you can specify
def __init__(self, master=None):
# Inlitialize class variables
self.inputFolderPath = ""
self.outputFolderPath = ""
self.loaded = False
self.displayTime = 0
self.stepList = []
self.labelList = []
# parameters that you want to send through the Frame class.
Frame.__init__(self, master)
#reference to the master widget, which is the tk window
self.master = master
#with that, we want to then run init_window, which doesn't yet exist
self.init_window()
#Creation of init_window
def init_window(self):
# changing the title of our master widget
self.master.title("SVEC")
# allowing the widget to take the full space of the root window
self.pack(fill=BOTH, expand=1)
################################################################
# creating the menu
menu = Menu(self.master)
self.master.config(menu=menu)
# create the file object
file = Menu(menu)
# adds a command to the menu option, calling it exit, and the
# command it runs on event is client_exit
file.add_command(label="Open Input Folder", command=self.getInputFolder)
file.add_command(label="Open Output Folder", command=self.getOutputFolder)
file.add_command(label="Open Top Folder", command=self.getTopFolder)
file.add_command(label="Exit", command=self.client_exit)
#added "file" to our menu
menu.add_cascade(label="File", menu=file)
# # create the file object)
# edit = Menu(menu)
# # adds a command to the menu option, calling it exit, and the
# # command it runs on event is client_exit
# edit.add_command(label="Show Text", command=self.showText)
# #added "file" to our menu
# menu.add_cascade(label="Edit", menu=edit)
################################################################
# Entry boxes:
self.stepSize = Entry(self,width=14)
self.stepSize.insert(0, "00:00:00:000:000")
#self.stepSize.place(x=190, y=5)
self.stepSize.grid(row = 2, column = 1)
self.displayTimeEntry = Entry(self,width=14)
self.displayTimeEntry.insert(1, "00:00:00:000:000")
self.displayTimeEntry.grid(row = 1, column = 1)
################################################################
# Make the buttons and place them in the grid.
loadButton = Button(self, text="Reload",command=self.loadFiles)
loadButton.grid(row = 0, column = 0)
quitButton = Button(self, text="Exit",command=self.client_exit)
quitButton.grid(row = 0, column = 3)
revButton = Button(self, text="<<<",command=self.revTime)
revButton.grid(row = 2, column = 2)
fwdButton = Button(self, text=">>>",command=self.fwdTime)
fwdButton.grid(row = 2, column = 3)
revStepButton = Button(self, text=" |< ",command=self.revStepTime)
revStepButton.grid(row =1, column = 2)
fwdStepButton = Button(self, text=" >| ",command=self.fwdStepTime)
fwdStepButton.grid(row = 1, column = 3)
resetButton = Button(self, text="Reset Time",command=self.resetTime)
resetButton.grid(row = 0, column = 1)
setButton = Button(self, text="Set Time",command=self.setTime)
setButton.grid(row = 0, column = 2)
################################################################
# Text boxes
stepLabel = Label(self, text="Step size:")
stepLabel.grid(row = 2, column = 0)
timeLabel = Label(self, text="Current time:")
timeLabel.grid(row = 1, column = 0)
inLabel = Label(self, text="Inputs")
inLabel.grid(row = 3, column = 0)
outLabel = Label(self, text="Outputs")
outLabel.grid(row = 3, column = 2)
################################################################
def showText(self):
text = Label(self, text="Hello World!")
text.pack()
def updatePinDisplay(self):
#If the files have not been loaded throw an error.
if (not self.loaded):
tkMessageBox.showinfo("ERROR", "Please load the I/O folders and try again.")
return
# Update the entry boxes:
size = strTimeToMicroSeconds(self.stepSize.get())
self.stepSize.delete(0, END)
self.stepSize.insert(1, microSecondsToStrTime(size))
self.displayTimeEntry.delete(0, END)
self.displayTimeEntry.insert(1, microSecondsToStrTime(self.displayTime))
for label in self.labelList:
label.destroy()
self.labelList = []
i = 0
for pin in self.inputPins:
currEvent = []
i = 1+i
label = Label(self, text=pin[0].split('.')[0].strip("_In"))
label.grid(row = 3+i, column = 0)
for event in pin[1]:
if (event[0] < self.displayTime):
currEvent = event
if (currEvent == []):
currEvent = [0,'?']
label = Label(self, text=currEvent[1])
label.grid(row = 3+i, column = 1)
self.labelList.append(label)
i=0
for pin in self.outputPins:
currEvent = []
i = 1+i
label = Label(self, text=pin[0].split('.')[0].strip("_Out"))
label.grid(row = 3+i, column = 2)
for event in pin[1]:
if (event[0] < self.displayTime):
currEvent = event
if (currEvent == []):
currEvent = [0,'?']
label = Label(self, text=currEvent[1])
label.grid(row = 3+i, column = 3)
self.labelList.append(label)
def updateStepTimes(self):
self.stepList = []
for pin in self.inputPins:
for event in pin[1]:
self.stepList.append(event[0])
for pin in self.outputPins:
for event in pin[1]:
self.stepList.append(event[0])
self.stepList.sort()
def loadFiles(self):
if (self.inputFolderPath == "" or self.outputFolderPath == ""):
self.loaded = False
tkMessageBox.showinfo("ERROR", "Please load the I/O folders and try again.")
else:
self.loaded = True
self.inputPins = loadFromDir(self.inputFolderPath)
self.outputPins = loadFromDir(self.outputFolderPath)
self.updateStepTimes();
def revTime(self):
self.displayTime -= strTimeToMicroSeconds(self.stepSize.get())
if(self.displayTime < 0):
self.displayTime = 0;
self.updatePinDisplay()
def fwdTime(self):
self.displayTime += strTimeToMicroSeconds(self.stepSize.get())
self.updatePinDisplay()
def revStepTime(self):
#If the files have not been loaded throw an error.
if (not self.loaded):
tkMessageBox.showinfo("ERROR", "Please load the I/O folders and try again.")
return
newTime = 0
for time in self.stepList:
if(time < self.displayTime):
newTime = time
self.displayTime = newTime
self.updatePinDisplay()
def fwdStepTime(self):
#If the files have not been loaded throw an error.
if (not self.loaded):
tkMessageBox.showinfo("ERROR", "Please load the I/O folders and try again.")
return
for time in self.stepList:
if(time > self.displayTime):
self.displayTime = time
self.updatePinDisplay()
return
self.displayTime = self.stepList[-1]
self.updatePinDisplay()
def resetTime(self):
self.displayTime = 0
self.updatePinDisplay()
def setTime(self):
self.displayTime = strTimeToMicroSeconds(self.displayTimeEntry.get())
self.updatePinDisplay()
def client_exit(self):
exit()
def getInputFolder(self):
self.inputFolderPath = filedialog.askdirectory()
if (not os.path.isdir(self.inputFolderPath)):
self.loaded = False
tkMessageBox.showinfo("ERROR", "Folders not found. Please try again.")
self.inputFolderPath = ""
def getOutputFolder(self):
self.outputFolderPath = filedialog.askdirectory()
if (not os.path.isdir(self.outputFolderPath)):
self.loaded = False
tkMessageBox.showinfo("ERROR", "Folders not found. Please try again.")
self.outputFolderPath = ""
def getTopFolder(self):
top = filedialog.askdirectory()
self.inputFolderPath = top + "/inputs/"
self.outputFolderPath = top +"/outputs/"
if ((not os.path.isdir(self.inputFolderPath)) or (not os.path.isdir(self.outputFolderPath))):
tkMessageBox.showinfo("ERROR", "Folders not found. Try loading them indiviually.")
self.inputFolderPath = ""
self.outputFolderPath = ""
self.loaded = False
else:
self.loadFiles()
self.updatePinDisplay()
# root window created. Here, that would be the only window, but
# you can later have windows within windows.
root = Tk()
root.geometry("500x400")
#creation of an instance
app = Window(root)
#mainloop
root.mainloop() |
__author__ = 'Pedram'
import nose
from graph import Graph
from graph_functions import *
def test_complete_C1():
g = Graph({'A':['B','D'],'B':['A'],'D':['A']})
assert is_complete(g) == False
def test_complete_C2():
g = Graph({'A':['B','D'],'B':['A','D'],'D':['A','B']})
assert is_complete(g) == True
def test_complete_C3():
g = Graph({'A':['B'],'B':['A']})
assert is_complete(g) == True
def test_complete_C4():
g = Graph({})
assert is_complete(g) == True
def test_complete_C5():
g = Graph({'A':[]})
assert is_complete(g) == True
def test_complete_C6():
g = []
try:
a = is_complete(g)
assert False
except TypeError:
assert True
def test_degree_ND1():
g = Graph({'A':[],'B':['D'],'D':['B']})
ret = nodes_by_degree(g)
assert str(ret) == str([('D',1), ('B',1), ('A',0)]) or str(ret) == str([('B',1), ('D',1), ('A',0)])
def test_degree_ND2():
g = Graph({})
ret = nodes_by_degree(g)
assert str(ret) == str([])
def test_degree_ND3():
g = Graph({'A':[]})
ret = nodes_by_degree(g)
assert str(ret) == str([('A',0)])
def test_degree_ND4():
g = Graph({'A':['B','D'],'B':['A','D'],'D':['A','B']})
ret = nodes_by_degree(g)
lsit = [('A',2),('B',2),('D',2)]
assert len(ret) == 3
for item in lsit:
assert item in ret
def test_degree_ND5():
g = []
try:
a = nodes_by_degree(g)
assert False
except TypeError:
assert True |
package com.red.dwarf;
import com.google.gson.*;
import javax.net.ssl.HttpsURLConnection;
import java.io.BufferedReader;
import java.io.DataOutputStream;
import java.io.InputStreamReader;
import java.net.URL;
import java.util.ArrayList;
import java.util.List;
public class Util {
// **********************************************
// *** Update or verify the following values. ***
// **********************************************
// Replace the subscriptionKey string value with your valid subscription key.
static String subscriptionKey = "d482808b741ba30d464acdb9b67100b7";
static String host = "https://api.cognitive.microsofttranslator.com";
static String path = "/translate?api-version=3.0";
public static class RequestBody {
String Text;
public RequestBody(String text) {
this.Text = text;
}
}
public static String Post(URL url, String content) throws Exception {
HttpsURLConnection connection = (HttpsURLConnection) url.openConnection();
connection.setRequestMethod("POST");
connection.setRequestProperty("Content-Type", "application/json");
connection.setRequestProperty("Content-Length", content.length() + "");
connection.setRequestProperty("100785c8d39025e7a62766415ae2ab48", subscriptionKey);
connection.setRequestProperty("X-ClientTraceId", java.util.UUID.randomUUID().toString());
connection.setDoOutput(true);
DataOutputStream wr = new DataOutputStream(connection.getOutputStream());
byte[] encoded_content = content.getBytes("UTF-8");
wr.write(encoded_content, 0, encoded_content.length);
wr.flush();
wr.close();
StringBuilder response = new StringBuilder ();
BufferedReader in = new BufferedReader(new InputStreamReader(connection.getInputStream(), "UTF-8"));
String line;
while ((line = in.readLine()) != null) {
response.append(line);
}
in.close();
return response.toString();
}
public static String Translate (String text, String from, String to) throws Exception {
String queryPath = "";
if(!from.equals("detect")) {
queryPath += "&from=" + from;
}
queryPath += "&to=" + to;
URL url = new URL (host + path + queryPath);
List<Util.RequestBody> objList = new ArrayList<>();
objList.add(new Util.RequestBody(text));
String content = new Gson().toJson(objList);
return Post(url, content);
}
public static String prettify(String json_text) {
json_text = json_text.substring(1, json_text.length() - 1);
JsonParser parser = new JsonParser();
JsonElement json = parser.parse(json_text);
Gson gson = new GsonBuilder().setPrettyPrinting().create();
return gson.toJson(json);
}
public static Translation getTranslation(String jsonText) {
jsonText = jsonText.substring(1, jsonText.length() - 1);
JsonParser parser = new JsonParser();
JsonElement json = parser.parse(jsonText);
JsonObject jsonObject = json.getAsJsonObject();
JsonObject detectedLanguageObj = jsonObject.getAsJsonObject("detectedLanguage");
JsonArray tranlationsArrayObj = jsonObject.getAsJsonArray("translations");
JsonObject translationObj = tranlationsArrayObj.get(0).getAsJsonObject();
return new Translation(
(detectedLanguageObj == null ? null : detectedLanguageObj.get("language").getAsString()),
translationObj.get("text").getAsString());
}
}
|
#!python3
# -*- coding: utf-8 -*-
# Author: JustinHan
# Date: 2021-01-25
# Introduce: 正规方程求解线性回归系数
# Dependence
from sklearn.datasets import load_boston
from sklearn.model_selection import train_test_split
from sklearn.linear_model import LinearRegression
from sklearn.preprocessing import StandardScaler
from sklearn.metrics import mean_squared_error
# 波士顿房价预测
def boston_housing_price_predict():
# (1)获取数据
raw_all_data = load_boston()
# (2)划分数据集
x_train, x_test, y_train, y_test = train_test_split(raw_all_data.data, raw_all_data.target)
# (3)标准化
transfer = StandardScaler()
x_train = transfer.fit_transform(x_train)
x_test = transfer.fit_transform(x_test)
# (4)预估器
estimator = LinearRegression()
estimator.fit(x_train, y_train)
# (5)得出模型
print("权重系数为:\n", estimator.coef_)
print("偏置值为:\n", estimator.intercept_)
# (6)评估模型
y_predict = estimator.predict(x_test)
print("预测的房价:\n", y_predict)
error = mean_squared_error(y_test, y_predict)
print("正规方程-均方误差为:\n", error)
if __name__ == '__main__':
boston_housing_price_predict()
|
from typing import List, Dict, Any, Callable
import json
import re
import os
import errno
from collections import defaultdict
from tqdm import tqdm
from loader.Database import DBViewIndex, DBManager, DBView, DBDict, check_target_path
from loader.Actions import CommandType
from exporter.Mappings import AFFLICTION_TYPES, ABILITY_CONDITION_TYPES, KILLER_STATE, TRIBE_TYPES, TARGET_ACTION_TYPES, ELEMENTS, WEAPON_TYPES
def get_valid_filename(s):
return re.sub(r'(?u)[^-\w. ]', '', s)
class ActionCondition(DBView):
def __init__(self, index):
super().__init__(index, 'ActionCondition',
labeled_fields=['_Text', '_TextEx'])
self.seen_skills = set()
def process_result(self, res, exclude_falsy=True):
if '_Type' in res:
res['_Type'] = AFFLICTION_TYPES.get(res['_Type'], res['_Type'])
if '_EnhancedBurstAttack' in res and res['_EnhancedBurstAttack']:
res['_EnhancedBurstAttack'] = self.index['PlayerAction'].get(
res['_EnhancedBurstAttack'], exclude_falsy=exclude_falsy)
if '_AdditionAttack' in res and res['_AdditionAttack']:
res['_AdditionAttack'] = self.index['PlayerActionHitAttribute'].get(
res['_AdditionAttack'], exclude_falsy=exclude_falsy)
reset_seen_skills = len(self.seen_skills) == 0
if res['_Id'] not in self.seen_skills:
self.seen_skills.add(res['_Id'])
for s in ('_EnhancedSkill1', '_EnhancedSkill2', '_EnhancedSkillWeapon'):
if s in res and res[s] and res[s] not in self.seen_skills:
skill = self.index['SkillData'].get(
res[s], exclude_falsy=exclude_falsy)
if skill:
res[s] = skill
if (dlk := res.get('_DamageLink')) and (dmglink := self.index['PlayerActionHitAttribute'].get(dlk, exclude_falsy=exclude_falsy)):
res['_DamageLink'] = dmglink
if reset_seen_skills:
self.seen_skills = set()
return res
def get(self, key, fields=None, exclude_falsy=True):
res = super().get(key, fields=fields, exclude_falsy=exclude_falsy)
if not res:
return None
return self.process_result(res, exclude_falsy=exclude_falsy)
def export_all_to_folder(self, out_dir='./out', ext='.json', exclude_falsy=True):
# super().export_all_to_folder(out_dir, ext, fn_mode='a', exclude_falsy=exclude_falsy, full_actions=False)
out_dir = os.path.join(out_dir, '_act_cond')
all_res = self.get_all(exclude_falsy=exclude_falsy)
check_target_path(out_dir)
sorted_res = defaultdict(lambda: [])
for res in tqdm(all_res, desc='_act_cond'):
res = self.process_result(res, exclude_falsy=exclude_falsy)
try:
sorted_res[int(res['_Id'] / 100000000)].append(res)
except:
sorted_res[0].append(res)
for group_name, res_list in sorted_res.items():
out_name = get_valid_filename(f'{group_name}00000000{ext}')
output = os.path.join(out_dir, out_name)
with open(output, 'w', newline='', encoding='utf-8') as fp:
json.dump(res_list, fp, indent=2, ensure_ascii=False)
class ActionGrant(DBView):
def __init__(self, index):
super().__init__(index, 'ActionGrant')
def process_result(self, res, exclude_falsy=True):
res['_TargetAction'] = TARGET_ACTION_TYPES.get(
res['_TargetAction'], res['_TargetAction'])
grant_cond = self.index['ActionCondition'].get(
res['_GrantCondition'], exclude_falsy=exclude_falsy)
if grant_cond:
res['_GrantCondition'] = grant_cond
return res
def get(self, pk, by=None, fields=None, order=None, exclude_falsy=False):
res = super().get(pk, by=by, fields=fields, order=order, exclude_falsy=exclude_falsy)
return self.process_result(res, exclude_falsy=exclude_falsy)
class AbilityData(DBView):
STAT_ABILITIES = {
1: 'hp',
2: 'strength',
3: 'defense',
4: 'skill haste',
5: 'dragon haste',
8: 'shapeshift time',
10: 'attack speed',
12: 'fs charge rate'
}
@staticmethod
def a_ids(res, i):
a_ids = [res[f'_VariousId{i}{a}'] for a in (
'a', 'b', 'c', '') if f'_VariousId{i}{a}' in res and res[f'_VariousId{i}{a}']]
return a_ids
@staticmethod
def a_str(res, i):
return res.get(f'_VariousId{i}str', None)
@staticmethod
def generic_description(name):
def f(ad, res, i):
a_ids = AbilityData.a_ids(res, i)
a_str = AbilityData.a_str(res, i)
if a_ids or a_str:
res[f'_Description{i}'] = f'{name} {a_ids, a_str}'
else:
res[f'_Description{i}'] = name
return res
return f
@staticmethod
def link_various_ids(ad, res, i, view='ActionCondition'):
a_ids = []
for a in ('a', 'b', 'c', ''):
key = f'_VariousId{i}{a}'
if key in res and res[key]:
a_ids.append(res[key])
res[key] = ad.index[view].get(res[key], exclude_falsy=True)
return res, a_ids
@staticmethod
def link_various_str(ad, res, i, view='PlayerActionHitAttribute'):
a_str = None
key = f'_VariousId{i}str'
if key in res and res[key]:
a_str = res[key]
res[key] = ad.index[view].get(
res[key], by='_Id', exclude_falsy=True)
return res, a_str
@staticmethod
def stat_ability(ad, res, i):
a_id = AbilityData.a_ids(res, i)[0]
res[f'_Description{i}'] = f'stat {AbilityData.STAT_ABILITIES.get(a_id, a_id)}'
return res
@staticmethod
def affliction_resist(ad, res, i):
a_id = AbilityData.a_ids(res, i)[0]
res[f'_Description{i}'] = f'affliction resist {AFFLICTION_TYPES.get(a_id, a_id)}'
return res
@staticmethod
def affliction_proc_rate(ad, res, i):
a_id = AbilityData.a_ids(res, i)[0]
res[f'_Description{i}'] = f'affliction proc rate {AFFLICTION_TYPES.get(a_id, a_id)}'
return res
@staticmethod
def tribe_resist(ad, res, i):
a_id = AbilityData.a_ids(res, i)[0]
res[f'_Description{i}'] = f'tribe resist {TRIBE_TYPES.get(a_id, a_id)}'
return res
@staticmethod
def tribe_bane(ad, res, i):
a_id = AbilityData.a_ids(res, i)[0]
res[f'_Description{i}'] = f'tribe bane {TRIBE_TYPES.get(a_id, a_id)}'
return res
@staticmethod
def action_condition(ad, res, i):
res, a_ids = AbilityData.link_various_ids(ad, res, i)
res, a_str = AbilityData.link_various_str(ad, res, i)
res[f'_Description{i}'] = f'action condition {a_ids, a_str}'
return res
@staticmethod
def affliction_punisher(ad, res, i):
a_id = AbilityData.a_ids(res, i)[0]
res[f'_Description{i}'] = f'affliction punisher {AFFLICTION_TYPES.get(a_id, a_id)}'
return res
@staticmethod
def conditional_action_grant(ad, res, i):
res, a_ids = AbilityData.link_various_ids(
ad, res, i, view='ActionGrant')
res[f'_Description{i}'] = f'conditional action grant {a_ids}'
return res
@staticmethod
def elemental_resist(ad, res, i):
a_id = AbilityData.a_ids(res, i)[0]
res[f'_Description{i}'] = f'elemental resist {ELEMENTS.get(a_id, a_id)}'
return res
@staticmethod
def action_grant(ad, res, i):
res, a_ids = AbilityData.link_various_ids(
ad, res, i, view='ActionGrant')
res[f'_Description{i}'] = f'action grant {a_ids}'
return res
@staticmethod
def ability_reference(ad, res, i):
res, a_ids = AbilityData.link_various_ids(
ad, res, i, view='AbilityData')
res[f'_Description{i}'] = f'ability reference {a_ids}'
return res
@staticmethod
def skill_reference(ad, res, i):
res, a_ids = AbilityData.link_various_ids(
ad, res, i, view='SkillData')
res[f'_Description{i}'] = f'skill reference {a_ids}'
return res
@staticmethod
def action_reference(ad, res, i):
res, a_ids = AbilityData.link_various_ids(
ad, res, i, view='PlayerAction')
res[f'_Description{i}'] = f'action reference {a_ids}'
return res
@staticmethod
def random_action_condition(ad, res, i):
res, a_ids = AbilityData.link_various_ids(ad, res, i)
res, a_str = AbilityData.link_various_str(ad, res, i)
res[f'_Description{i}'] = f'random action condition {a_ids, a_str}'
return res
@staticmethod
def elemental_damage(ad, res, i):
a_id = AbilityData.a_ids(res, i)[0]
res[f'_Description{i}'] = f'elemental damage {ELEMENTS.get(a_id, a_id)}'
return res
@staticmethod
def action_condition_timer(ad, res, i):
res, a_ids = AbilityData.link_various_ids(ad, res, i)
res[f'_Description{i}'] = 'action condition timer'
return res
def __init__(self, index):
super().__init__(index, 'AbilityData', labeled_fields=[
'_Name', '_Details', '_HeadText'])
def process_result(self, res, full_query=True, exclude_falsy=True):
try:
res['_ConditionType'] = ABILITY_CONDITION_TYPES.get(
res['_ConditionType'], res['_ConditionType'])
except:
pass
try:
res[f'_TargetAction'] = TARGET_ACTION_TYPES[res[f'_TargetAction']]
except:
pass
for i in (1, 2, 3):
try:
res[f'_TargetAction{i}'] = TARGET_ACTION_TYPES[res[f'_TargetAction{i}']]
except:
pass
try:
res = ABILITY_TYPES[res[f'_AbilityType{i}']](self, res, i)
except KeyError:
pass
if (ele := res.get('_ElementalType')):
res['_ElementalType'] = ELEMENTS.get(ele, ele)
if (wep := res.get('_WeaponType')):
res['_WeaponType'] = WEAPON_TYPES.get(wep, wep)
return res
def get(self, key, fields=None, full_query=True, exclude_falsy=True):
res = super().get(key, fields=fields, exclude_falsy=exclude_falsy)
if not full_query:
return res
return self.process_result(res, full_query, exclude_falsy)
def export_all_to_folder(self, out_dir='./out', ext='.json', exclude_falsy=True):
processed_res = [self.process_result(res, exclude_falsy=exclude_falsy) for res in self.get_all(exclude_falsy=exclude_falsy)]
with open(os.path.join(out_dir, f'_abilities{ext}'), 'w', newline='', encoding='utf-8') as fp:
json.dump(processed_res, fp, indent=2, ensure_ascii=False)
ABILITY_TYPES = {
1: AbilityData.stat_ability,
2: AbilityData.affliction_resist,
3: AbilityData.affliction_proc_rate,
4: AbilityData.tribe_resist,
5: AbilityData.tribe_bane,
6: AbilityData.generic_description('damage'),
7: AbilityData.generic_description('critical rate'),
8: AbilityData.generic_description('recovery potency'),
9: AbilityData.generic_description('gauge accelerator'),
# 10
11: AbilityData.generic_description('striking haste'),
# 12 13
14: AbilityData.action_condition,
# 15
16: AbilityData.generic_description('debuff chance'),
17: AbilityData.generic_description('skill prep'),
18: AbilityData.generic_description('buff time'),
# 19
20: AbilityData.affliction_punisher,
21: AbilityData.generic_description('player exp'),
22: AbilityData.generic_description('adv exp'),
23: AbilityData.generic_description('rupies'),
24: AbilityData.generic_description('mana'),
25: AbilityData.conditional_action_grant,
26: AbilityData.generic_description('critical damage'),
27: AbilityData.generic_description('shapeshift prep'),
28: AbilityData.elemental_resist,
29: AbilityData.generic_description('specific enemy resist'),
30: AbilityData.generic_description('specific enemy bane'),
# 31 32
33: AbilityData.generic_description('event points'),
34: AbilityData.generic_description('event drops'),
35: AbilityData.generic_description('gauge inhibitor'),
36: AbilityData.generic_description('dragon damage'),
37: AbilityData.generic_description('enemy ability resist'),
# 38
39: AbilityData.action_grant,
40: AbilityData.generic_description('gauge defense & skill damage'),
41: AbilityData.generic_description('event point feh'),
# 42: something dragonform related
43: AbilityData.ability_reference,
44: AbilityData.skill_reference,
45: AbilityData.action_reference,
46: AbilityData.generic_description('dragon gauge flat increaase'),
# 47
48: AbilityData.generic_description('dragon gauge decrease rate'),
49: AbilityData.generic_description('conditional shapeshift fill'),
51: AbilityData.random_action_condition,
52: AbilityData.generic_description('buff icon critical rate'),
# 53
54: AbilityData.generic_description('combo damage boost'),
55: AbilityData.generic_description('combo time'),
56: AbilityData.generic_description('dragondrive'),
57: AbilityData.elemental_damage,
58: AbilityData.generic_description('dragondrive defense'),
59: AbilityData.generic_description('debuff time'),
# 60 61 - galaxi
# 62 - ssinoa
# "_AbilityType1": 62,
# "_VariousId1a": 435,
# "_VariousId1b": 304030301,
# "_VariousId1c": 1084,
63: AbilityData.action_condition_timer,
65: AbilityData.action_reference
}
class PlayerActionHitAttribute(DBView):
def __init__(self, index):
super().__init__(index, 'PlayerActionHitAttribute')
def process_result(self, res, exclude_falsy=True):
res_list = [res] if isinstance(res, dict) else res
for r in res_list:
if '_ActionCondition1' in r and r['_ActionCondition1']:
act_cond = self.index['ActionCondition'].get(
r['_ActionCondition1'], exclude_falsy=exclude_falsy)
if act_cond:
r['_ActionCondition1'] = act_cond
for ks in ('_KillerState1', '_KillerState2', '_KillerState3'):
if ks in r and r[ks] in KILLER_STATE:
r[ks] = KILLER_STATE[r[ks]]
return res
def get(self, pk, by=None, fields=None, order=None, mode=DBManager.EXACT, exclude_falsy=False):
res = super().get(pk, by, fields, order, mode, exclude_falsy)
return self.process_result(res, exclude_falsy=exclude_falsy)
S_PATTERN = re.compile(r'S\d+')
def export_all_to_folder(self, out_dir='./out', ext='.json', exclude_falsy=True):
# super().export_all_to_folder(out_dir, ext, fn_mode='a', exclude_falsy=exclude_falsy, full_actions=False)
out_dir = os.path.join(out_dir, '_hit_attr')
all_res = self.get_all(exclude_falsy=exclude_falsy)
check_target_path(out_dir)
sorted_res = defaultdict(lambda: [])
for res in tqdm(all_res, desc='_hit_attr'):
res = self.process_result(res, exclude_falsy=exclude_falsy)
try:
k1, _ = res['_Id'].split('_', 1)
if PlayerActionHitAttribute.S_PATTERN.match(k1):
sorted_res['S'].append(res)
else:
sorted_res[k1].append(res)
except:
sorted_res[res['_Id']].append(res)
for group_name, res_list in sorted_res.items():
out_name = get_valid_filename(f'{group_name}{ext}')
output = os.path.join(out_dir, out_name)
with open(output, 'w', newline='', encoding='utf-8') as fp:
json.dump(res_list, fp, indent=2, ensure_ascii=False)
class CharacterMotion(DBView):
def __init__(self, index):
super().__init__(index, 'CharacterMotion')
def get_by_state_ref(self, state, ref, exclude_falsy=True):
tbl = self.database.check_table(self.name)
query = f'SELECT {tbl.named_fields} FROM {self.name} WHERE {self.name}.state=? AND {self.name}.ref=?;'
return self.database.query_many(
query=query,
param=(state, ref),
d_type=DBDict
)
class ActionParts(DBView):
LV_SUFFIX = re.compile(r'(.*LV)(\d{2})')
HIT_LABELS = ['_hitLabel', '_hitAttrLabel', '_abHitAttrLabel']
# BURST_ATK_DISPLACEMENT = 1
def __init__(self, index):
super().__init__(index, 'ActionParts')
self.animation_reference = None
# # figure out how it works again bleh
# def get_burst_action_parts(self, pk, fields=None, exclude_falsy=True, hide_ref=False):
# # sub_parts = super().get((pk, pk+self.BURST_ATK_DISPLACEMENT), by='_ref', fields=fields, order='_ref ASC', mode=DBManager.RANGE, exclude_falsy=exclude_falsy)
# # return self.process_result(sub_parts, exclude_falsy=exclude_falsy, hide_ref=hide_ref)
def process_result(self, action_parts, exclude_falsy=True, hide_ref=True):
if isinstance(action_parts, dict):
action_parts = [action_parts]
for r in action_parts:
if 'commandType' in r:
r['commandType'] = CommandType(r['commandType']).name
del r['_Id']
if hide_ref:
del r['_ref']
for label in self.HIT_LABELS:
if label not in r or not r[label]:
continue
res = self.LV_SUFFIX.match(r[label])
if res:
base_label, _ = res.groups()
hit_attrs = self.index['PlayerActionHitAttribute'].get(
base_label, by='_Id', order='_Id ASC', mode=DBManager.LIKE, exclude_falsy=exclude_falsy)
if hit_attrs:
r[label] = hit_attrs
elif 'CMB' in r[label]:
base_label = r[label]
hit_attrs = self.index['PlayerActionHitAttribute'].get(
base_label, by='_Id', order='_Id ASC', mode=DBManager.LIKE, exclude_falsy=exclude_falsy)
if hit_attrs:
r[label] = hit_attrs
else:
hit_attr = self.index['PlayerActionHitAttribute'].get(
r[label], by='_Id', exclude_falsy=exclude_falsy)
if hit_attr:
r[label] = hit_attr
if '_actionConditionId' in r and r['_actionConditionId'] and (act_cond := self.index['ActionCondition'].get(r['_actionConditionId'], exclude_falsy=exclude_falsy)):
r['_actionConditionId'] = act_cond
if '_motionState' in r and r['_motionState']:
ms = r['_motionState']
animation = []
if self.animation_reference is not None:
animation = self.index[self.animation_reference[0]].get_by_state_ref(
ms, self.animation_reference[1], exclude_falsy=exclude_falsy)
if not animation:
animation = self.index['CharacterMotion'].get(
ms, exclude_falsy=exclude_falsy)
if animation:
if len(animation) == 1:
r['_animation'] = animation[0]
else:
r['_animation'] = animation
return action_parts
def get(self, pk, by=None, fields=None, order=None, mode=DBManager.EXACT, exclude_falsy=True, hide_ref=True):
action_parts = super().get(pk, by=by, fields=fields, order=order,
mode=mode, exclude_falsy=exclude_falsy)
return self.process_result(action_parts, exclude_falsy=exclude_falsy, hide_ref=hide_ref)
@staticmethod
def remove_falsy_fields(res):
return DBDict(filter(lambda x: bool(x[1]) or x[0] in ('_seconds', '_seq'), res.items()))
class PlayerAction(DBView):
BURST_MARKER_DISPLACEMENT = 4
# REF = set()
def __init__(self, index):
super().__init__(index, 'PlayerAction')
def process_result(self, player_action, exclude_falsy=True, full_query=True):
pa_id = player_action['_Id']
action_parts = self.index['ActionParts'].get(
pa_id, by='_ref', order='_seconds ASC', exclude_falsy=exclude_falsy)
if action_parts:
player_action['_Parts'] = action_parts
if (mid := player_action.get('_BurstMarkerId')) and (marker := self.get(mid, exclude_falsy=exclude_falsy)):
player_action['_BurstMarkerId'] = marker
else:
try:
if action_parts[0]['_motionState'] == 'charge_13':
player_action['_BurstMarkerId'] = pa_id + \
PlayerAction.BURST_MARKER_DISPLACEMENT
if marker := self.get(player_action['_BurstMarkerId'], exclude_falsy=exclude_falsy):
player_action['_BurstMarkerId'] = marker
except:
pass
if (nextact := player_action.get('_NextAction')):
player_action['_NextAction'] = self.get(nextact, exclude_falsy=exclude_falsy)
if (casting := player_action.get('_CastingAction')):
player_action['_CastingAction'] = self.get(casting, exclude_falsy=exclude_falsy)
return player_action
def get(self, pk, fields=None, exclude_falsy=True, full_query=True):
player_action = super().get(pk, fields=fields, exclude_falsy=exclude_falsy)
if not full_query or not player_action:
return player_action
# PlayerAction.REF.add(pk)
return self.process_result(player_action, exclude_falsy=exclude_falsy, full_query=full_query)
def export_all_to_folder(self, out_dir='./out', ext='.json', exclude_falsy=True):
# super().export_all_to_folder(out_dir, ext, fn_mode='a', exclude_falsy=exclude_falsy, full_actions=False)
out_dir = os.path.join(out_dir, '_actions')
all_res = self.get_all(exclude_falsy=exclude_falsy)
check_target_path(out_dir)
sorted_res = defaultdict(lambda: [])
for res in tqdm(all_res, desc='_actions'):
res = self.process_result(res, exclude_falsy=exclude_falsy)
try:
k1, _ = res['_ActionName'].split('_', 1)
if k1[0] == 'D' and k1 != 'DAG':
k1 = 'DRAGON'
sorted_res[k1].append(res)
except:
sorted_res[res['_ActionName']].append(res)
# if res['_Id'] not in PlayerAction.REF:
# sorted_res['UNUSED'].append(res)
for group_name, res_list in sorted_res.items():
out_name = get_valid_filename(f'{group_name}{ext}')
output = os.path.join(out_dir, out_name)
with open(output, 'w', newline='', encoding='utf-8') as fp:
json.dump(res_list, fp, indent=2, ensure_ascii=False)
class SkillChainData(DBView):
def __init__(self, index):
super().__init__(index, 'SkillChainData')
def process_result(self, res):
for r in res:
r['_Skill'] = self.index['SkillData'].get(
r['_Id'], full_chainSkill=False)
return res
def get(self, pk, by=None, fields=None, order=None, mode=DBManager.EXACT, exclude_falsy=False, expand_one=True):
res = super().get(pk, by=by, fields=fields, order=order, mode=mode,
exclude_falsy=exclude_falsy, expand_one=expand_one)
return self.process_result(res)
class SkillData(DBView):
TRANS_PREFIX = '_Trans'
def __init__(self, index):
super().__init__(index, 'SkillData', labeled_fields=[
'_Name', '_Description1', '_Description2', '_Description3', '_Description4', '_TransText'])
@staticmethod
def get_all_from(view, prefix, data, **kargs):
for i in range(1, 5):
a_id = f'{prefix}{i}'
if a_id in data and data[a_id]:
data[a_id] = view.get(data[a_id], **kargs)
return data
@staticmethod
def get_last_from(view, prefix, data, **kargs):
i = 4
a_id = f'{prefix}{i}'
while i > 0 and (not a_id in data or not data[a_id]):
i -= 1
a_id = f'{prefix}{i}'
if i > 0:
data[a_id] = view.get(data[a_id], **kargs)
return data
def process_result(self, skill_data, exclude_falsy=True,
full_query=True, full_abilities=False, full_transSkill=True, full_chainSkill=True):
if not full_query:
return skill_data
# Actions
skill_data = self.get_all_from(
self.index['PlayerAction'], '_ActionId', skill_data, exclude_falsy=exclude_falsy)
if '_AdvancedSkillLv1' in skill_data and skill_data['_AdvancedSkillLv1'] and (adv_act := self.index['PlayerAction'].get(skill_data['_AdvancedActionId1'], exclude_falsy=exclude_falsy)):
skill_data['_AdvancedActionId1'] = adv_act
# Abilities
if full_abilities:
skill_data = self.get_all_from(
self.index['AbilityData'], '_Ability', skill_data, exclude_falsy=exclude_falsy)
else:
skill_data = self.get_last_from(
self.index['AbilityData'], '_Ability', skill_data, exclude_falsy=exclude_falsy)
if full_transSkill and '_TransSkill' in skill_data and skill_data['_TransSkill']:
next_trans_skill = self.get(skill_data['_TransSkill'], exclude_falsy=exclude_falsy,
full_query=full_query, full_abilities=full_abilities, full_transSkill=False)
trans_skill_group = {
skill_data['_Id']: None,
next_trans_skill['_Id']: next_trans_skill
}
seen_id = {skill_data['_Id'], next_trans_skill['_Id']}
while next_trans_skill['_TransSkill'] not in seen_id:
next_trans_skill = self.get(next_trans_skill['_TransSkill'], exclude_falsy=exclude_falsy,
full_query=full_query, full_abilities=full_abilities, full_transSkill=False)
trans_skill_group[next_trans_skill['_Id']] = next_trans_skill
seen_id.add(next_trans_skill['_Id'])
skill_data['_TransSkill'] = trans_skill_group
if '_TransBuff' in skill_data and skill_data['_TransBuff'] and (tb := self.index['PlayerAction'].get(skill_data['_TransBuff'], exclude_falsy=exclude_falsy)):
skill_data['_TransBuff'] = tb
# ChainGroupId
if full_chainSkill and '_ChainGroupId' in skill_data and skill_data['_ChainGroupId']:
skill_data['_ChainGroupId'] = self.index['SkillChainData'].get(
skill_data['_ChainGroupId'], by='_GroupId', exclude_falsy=exclude_falsy)
return skill_data
def get(self, pk, fields=None, exclude_falsy=True,
full_query=True, full_abilities=False, full_transSkill=True, full_chainSkill=True):
skill_data = super().get(pk, fields=fields, exclude_falsy=exclude_falsy)
return self.process_result(skill_data, exclude_falsy=exclude_falsy,
full_query=full_query, full_abilities=full_abilities, full_transSkill=full_transSkill, full_chainSkill=full_chainSkill)
class MaterialData(DBView):
def __init__(self, index):
super().__init__(index, 'MaterialData', labeled_fields=['_Name', '_Detail', '_Description'])
if __name__ == '__main__':
index = DBViewIndex()
view = SkillData(index)
test = view.get(106505012)
print(test)
|
"""Upgrade User and Survey Objects
Revision ID: 823a9e3627a9
Revises: a5e33684a022
Create Date: 2021-04-06 10:16:13.980341
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '823a9e3627a9'
down_revision = 'a5e33684a022'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('Survey', sa.Column('owner_id', sa.Integer(), nullable=True))
op.create_foreign_key(None, 'Survey', 'User', ['owner_id'], ['id'])
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_constraint(None, 'Survey', type_='foreignkey')
op.drop_column('Survey', 'owner_id')
# ### end Alembic commands ###
|
# -*- coding: utf-8 -*-
"""
Created on 2020-02-24
@author: duytinvo
"""
from collections import Counter
from sklearn import metrics
from mlmodels.utils.special_tokens import PAD, SOT, EOT, UNK, NULL
sys_tokens = [PAD, SOT, EOT, UNK]
class APRF1:
@staticmethod
def sklearn(y_true, y_pred):
acc = metrics.accuracy_score(y_true, y_pred)
precision, recall, f1, _ = metrics.precision_recall_fscore_support(y_true, y_pred, average='weighted')
return precision, recall, f1, acc
@staticmethod
def accuracies(reference, candidate):
flatten = lambda l: [item for sublist in l for item in sublist]
sep_acc = metrics.accuracy_score(flatten(reference), flatten(candidate))
compose = lambda l: ["_".join(sublist) for sublist in l]
full_acc = metrics.accuracy_score(compose(reference), compose(candidate))
return sep_acc, full_acc
class NER_metrics:
@staticmethod
def sklearn_metrics(reference, candidate):
# acc = metrics.accuracy_score(y_true, y_pred)
# f1_ma = metrics.precision_recall_fscore_support(y_true, y_pred, average='macro')
y_true, y_pred = NER_metrics.span_batch_pair(reference, candidate)
precision, recall, f1, _ = metrics.precision_recall_fscore_support(y_true, y_pred, average='weighted')
# f1_no = metrics.precision_recall_fscore_support(y_true, y_pred, average=None)
# measures = {"acc": acc, "prf_macro": f1_ma, "prf_weighted": f1_we, "prf_individual": f1_no}
return precision, recall, f1
@staticmethod
def span_metrics(reference, candidate):
y_true, y_pred = NER_metrics.span_batch(reference, candidate)
right_ner = len(set(y_true).intersection(set(y_pred)))
if right_ner != 0:
precision = right_ner / len(y_pred)
recall = right_ner / len(y_true)
f1 = 2 * precision * recall / (precision + recall)
else:
precision, recall, f1 = 0., 0., 0.
return precision, recall, f1
@staticmethod
def span_batch(reference, candidate):
pred_labels = []
gold_labels = []
for i in range(len(reference)):
assert len(reference[i]) == len(candidate[i]), print(len(reference[i]), reference[i], len(candidate[i]), candidate[i])
pred_span = NER_metrics.span_ner(candidate[i])
pred_span = [str(i) + "_" + l for l in pred_span]
gold_span = NER_metrics.span_ner(reference[i])
gold_span = [str(i) + "_" + l for l in gold_span]
pred_labels.extend(pred_span)
gold_labels.extend(gold_span)
return gold_labels, pred_labels
@staticmethod
def span_ner(tags):
cur = []
span = []
for i in range(len(tags) - 1):
if tags[i].upper() != 'O' and tags[i] not in sys_tokens:
cur += ["_".join([str(i), tags[i]])] # idx_tag in [S, B, I, E]
if tags[i].upper().startswith("S"):
span.extend(cur)
cur = []
else:
if tags[i+1].upper() == 'O' or tags[i+1].upper().startswith('S') or \
tags[i+1].upper().startswith('B'):
span.extend(["-".join(cur)])
cur = []
# we don't care the'O' label
if tags[-1].upper() != 'O' and tags[-1] not in sys_tokens:
cur += ["_".join([str(len(tags) - 1), tags[-1]])]
span.extend(["-".join(cur)])
return span
@staticmethod
def absa_extractor(tokens, labels, prob=None):
cur = []
tok = []
p = []
span = []
por = []
for i in range(len(labels) - 1):
if labels[i].upper() != 'O' and labels[i] not in sys_tokens:
cur += [labels[i]] # idx_tag in [S, B, I, E]
por += [labels[i][2:]]
tok += [tokens[i]]
p += [prob[i] if prob is not None else 0]
if labels[i].upper().startswith("S"):
span.extend([[" ".join(tok), Counter(por).most_common(1)[0][0], " ".join(cur), sum(p) / len(p)]])
cur = []
por = []
tok = []
p = []
else:
if labels[i + 1].upper() == 'O' or labels[i + 1].upper().startswith('S') or \
labels[i + 1].upper().startswith('B'):
span.extend(
[[" ".join(tok), Counter(por).most_common(1)[0][0], " ".join(cur), sum(p) / len(p)]])
cur = []
por = []
tok = []
p = []
# we don't care the'O' label
if labels[-1].upper() != 'O' and labels[-1] not in sys_tokens:
cur += [labels[-1]] # idx_tag in [S, B, I, E]
por += [labels[-1][2:]]
tok += [tokens[-1]]
p += [prob[-1] if prob is not None else 0]
span.extend([[" ".join(tok), Counter(por).most_common(1)[0][0], " ".join(cur), sum(p) / len(p)]])
return span
@staticmethod
def span_batch_pair(reference, candidate):
pred_labels = []
gold_labels = []
for i in range(len(reference)):
assert len(reference[i]) == len(candidate[i])
gold_span, pred_span = NER_metrics.span_ner_pair(reference[i], candidate[i])
gold_span = [str(i) + "_" + l for l in gold_span]
pred_span = [str(i) + "_" + l for l in pred_span]
pred_labels.extend(pred_span)
gold_labels.extend(gold_span)
return gold_labels, pred_labels
@staticmethod
def span_ner_pair(gold_tags, pred_tags):
pred_cur = []
pred_span = []
cur = []
span = []
for i in range(len(gold_tags) - 1):
if gold_tags[i].upper() != 'O' and gold_tags[i] not in sys_tokens:
cur += ["_".join([str(i), gold_tags[i]])] # idx_tag in [S, B, I, E]
pred_cur += ["_".join([str(i), pred_tags[i]])]
if gold_tags[i].upper().startswith("S"):
span.extend(cur)
cur = []
pred_span.extend(pred_cur)
pred_cur = []
else:
if gold_tags[i+1].upper() == 'O' or gold_tags[i+1].upper().startswith('S') or \
gold_tags[i+1].upper().startswith('B'):
span.extend(["-".join(cur)])
cur = []
pred_span.extend(["-".join(pred_cur)])
pred_cur = []
# we don't care the'O' label
if gold_tags[-1].upper() != 'O' and gold_tags[-1] not in sys_tokens:
cur += ["_".join([str(len(gold_tags) - 1), gold_tags[-1]])]
span.extend(["-".join(cur)])
pred_cur += ["_".join([str(len(pred_tags) - 1), pred_tags[-1]])]
pred_span.extend(["-".join(pred_cur)])
return span, pred_span
if __name__ == '__main__':
reference = \
[['O', 'S', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'S', 'O'],
['O', 'S', 'O', 'B', 'E', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'S', 'O', 'O', 'O', 'O', 'O', 'O', 'O',
'O', 'O', 'O', 'O', 'O', 'S', 'O', 'O', 'O', 'O', 'O', 'O'], ['S', 'O', 'O', 'O', 'S', 'O', 'O', 'O'],
['O', 'O', 'O', 'S', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'S', 'S', 'O', 'O', 'O', 'O', 'O',
'O'], ['O', 'O', 'O', 'S', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'S', 'O'],
['O', 'B', 'E', 'O', 'O', 'O', 'B', 'E', 'O', 'O', 'S', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'B', 'E', 'O',
'O', 'O', 'O', 'O', 'O', 'S', 'O'],
['O', 'S', 'O', 'S', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'O',
'O', 'O'], ['B', 'E', 'O', 'O', 'S', 'O', 'O', 'O', 'S', 'O', 'S', 'O', 'O', 'O', 'O', 'O']]
candidate = \
[['I', '<s>', '<s>', '<s>', 'I', '<s>', '<s>', 'I', '<s>', 'I', '<s>', 'S', 'I', 'I', '<PAD>', '<s>', 'I', 'S'],
['I', 'I', 'E', 'I', '<PAD>', '<s>', 'I', 'I', 'I', 'I', 'I', '<PAD>', 'S', 'I', 'E', 'I', '<s>', '<s>', '<s>',
'I', '<PAD>', '<s>', 'I', 'E', 'E', 'E', 'I', 'I', '<s>', '<PAD>', 'I', '<s>', 'I', '<s>'],
['<s>', 'I', '<PAD>', '<s>', '<s>', '<PAD>', '<PAD>', '<s>'],
['S', 'E', '<s>', '<PAD>', 'I', '<PAD>', 'I', 'I', '<PAD>', 'I', '<PAD>', '<PAD>', '<PAD>', '<PAD>', '<s>',
'<PAD>', 'I', 'S', 'S', '<s>', '<s>', 'I', 'I'],
['<s>', 'I', '<PAD>', '<s>', 'I', '<s>', 'I', 'I', '<s>', '<s>', '<s>', '<s>', '<s>', '<PAD>'],
['I', '<s>', 'I', '<s>', '<s>', 'I', 'E', 'I', '<s>', '<s>', '<s>', '<PAD>', '<s>', 'I', '<s>', '<s>', '<s>',
'I', 'I', 'E', 'I', '<s>', '<s>', 'I', 'E', '<s>', '<PAD>', '<PAD>', '<s>'],
['<PAD>', '<s>', 'I', 'I', 'I', 'I', '<PAD>', 'I', 'I', 'I', 'I', 'I', '<PAD>', '<PAD>', '<PAD>', '<s>', 'E',
'E', 'I', '<s>', 'E', '<PAD>', 'E', '<s>'],
['<PAD>', 'I', '<PAD>', '<s>', 'I', '<s>', '<PAD>', '<PAD>', '<PAD>', 'E', 'I', '<PAD>', 'I', 'I', 'I', '<s>']]
gold_labels, pred_labels = NER_metrics.span_batch_pair(reference, candidate) |
from collections import deque
class Circle:
"""
A circle to play a game of marbles
with the other elves in.
"""
def __init__(self, players, target, log=False):
self.log = log
self.marbles = deque([0])
self.target_marbles = target
self.players = [0 for _ in range(players)]
self.marbles_played = 0
def __repr__(self):
marbles = ""
deq = self.marbles.copy()
index = deq.index(0)
deq.rotate(-index)
for index, marble in enumerate(deq):
if marble == self.marbles[0]:
marbles += f"({marble}) "
else:
marbles += f"{marble} "
return marbles
def get_final_score(self):
"""
Get the player who has the highest score.
"""
return max(self.players)
def play(self):
if self.log:
print(self)
# Play until the target marble number is reached.
for marble in range(1, self.target_marbles + 1):
# If the marble is a multiple of 23, score points!
if marble % 23 == 0:
# find the player index
player_index = marble % len(self.players)
# Rotate 7 times counter clockwise and then remove that item
self.marbles.rotate(7)
removed_marble = self.marbles.popleft()
# Add the score
self.players[player_index] += marble
self.players[player_index] += removed_marble
# Otherwise, just rotate and add the marble to the deque
else:
self.marbles.rotate(-2)
self.marbles.appendleft(marble)
if self.log:
print(self)
|
"""
@Author: Joseph K. Nguyen
@Date: 02/22/2021
AnagramChecker.py
Anagram is defined as: when two strings are the same length and have same counts of all characters.
NOTE: This version doesn't do multiple same characters count.
"""
array1= ["cat", "tac"]
array2 = ["bad", "dab"]
array3 = ["test", "tset"]
array4= ["dog", "dogg"]
def isAnagram(word1,word2):
anagram = False
if len(word1) == len(word2):
for x in word1:
if x in word2:
anagram = True
else:
anagram = False
return anagram
else:
return anagram
def main():
(word1, word2) = array1
print(word1)
print(word2)
flag = isAnagram(word1, word2)
print(flag)
if __name__ == "__main__":
main() |
import numpy as np
@np.vectorize
def relu(x):
return max(x, 0)
def feedforward(inputs, w):
a = inputs # Сначала inputs
for i in range(0, len(w)):
a = np.append(a, 1)
# a = relu(np.dot(w[i], a))
a = np.tanh(np.dot(w[i], a))
return a
|
import sys
import string
import re
glide_regex = re.compile('{[a-z0-9]*}')
style_regex = re.compile('-[0-9]-')
comment_regex = re.compile('-- .*')
count_regex = re.compile('[0-9]$')
# primary stress, secondary stress, or unstressed
stress_regex = re.compile('[0-2]$')
A2P = {'AA':'5', 'AE':'3', 'AH':'6', 'AO':'53', 'AW':'42', 'AY':'41', 'EH':'2', 'ER':'94', 'EY':'21', 'IH':'1', 'IY':'11', 'OW':'62', 'OY':'61', 'UH':'7', 'UW':'72'}
A2P_FINAL = {'IY':'12', 'EY':'22', 'OW':'63'}
A2P_R = {'EH':'2', 'AE':'3', 'IH':'14', 'IY':'14', 'EY':'24', 'AA':'44', 'AO':'64', 'OW':'64', 'UH':'74', 'UW':'74', 'AH':'6', 'AW':'42', 'AY':'41', 'OY':'61'}
MANNER = {'s':'1', 'a':'2', 'f':'3', 'n':'4', 'l':'5', 'r':'6'}
PLACE = {'l':'1', 'a':'4', 'p':'5', 'b':'2', 'd':'3', 'v':'6'}
VOICE = {'-':'1', '+':'2'}
def arpabet2plotnik(ac, trans, prec_p, foll_p, phoneset):
# print ac, trans, prec_p, foll_p
if foll_p == '' and ac in ['IY', 'EY', 'OW']:
pc = A2P_FINAL[ac]
elif foll_p != '' and ac == 'AY' and phoneset[foll_p].cvox == '-':
pc = '47'
elif trans in ['FATHER', 'MA', 'PA', 'SPA', 'CHICAGO', 'PASTA', 'BRA', 'UTAH', 'TACO']:
pc = '43'
elif prec_p != '' and ac == 'UW' and phoneset[prec_p].cplace == 'a':
pc = '73'
elif foll_p != '' and phoneset[foll_p].ctype == 'r' and ac != 'ER':
pc = A2P_R[ac]
else:
pc = A2P[ac]
return pc
# this is a hack based on the fact that we know that the CMU transcriptions for vowels all indicate the level of stress in their final character (0, 1, or 2); will rewrite them later to be more portable...
def is_v(p):
if p[-1] in ['0', '1', '2']:
return True
else:
return False
def get_n_foll_syl(i, phones):
n = 0
for p in phones[i+1:]:
if is_v(p.label):
n += 1
return n
def get_n_foll_c(i, phones):
n = 0
for p in phones[i+1:]:
if is_v(p.label):
break
elif n == 1 and p.label in ['Y', 'W', 'R', 'L']: # e.g. 'figure', 'Wrigley', etc.
break
else:
n += 1
return n
class PltFile:
first_name = ''
last_name = ''
age = ''
city = ''
state = ''
sex = ''
ts = ''
N = ''
S = ''
measurements = []
class VowelMeasurement:
F1 = 0
F2 = 0
F3 = ''
code = ''
stress = 1
text = ''
word = ''
trans = ''
fname = ''
comment = ''
glide = ''
style = ''
t = 0
# input: Plotnik word as originally entered (with parentheses, token numbers, glide annotations, etc.)
# output: normal transcription
def word2trans(word):
trans = word.replace('(', '')
trans = trans.replace(')', '')
# the glide annotation, if it exists is outside the count, so this must be done first
trans = re.sub(glide_regex, '', trans)
trans = re.sub(count_regex, '', trans)
trans = str.upper(trans)
return trans
def word2fname(word):
fname = word.replace('(', '')
fname = fname.replace(')', '')
fname = fname.replace('-', '')
fname = re.sub(glide_regex, '', fname)
fname = str.upper(fname)
if len(fname) > 8:
last = fname[-1]
if last in ['0', '1', '2', '3', '4', '5', '6', '7', '8', '9']:
fname = fname[0:7] + last
else:
fname = fname[0:8]
return fname
# returns the index of the stressed vowel, or '' if 0 or more than one exist
def get_stressed_v(phones):
primary_count = 0
for p in phones:
if p[-1] == '1':
primary_count += 1
i = phones.index(p)
# if there is more than vowel with primary stress in the transcription, then we don't know which one to look at, so return ''
if primary_count != 1:
return ''
else:
return i
def cmu2plotnik_code(i, phones, trans, phoneset):
if not is_v(phones[i].label):
return None
# if the vowel is the final phone in the list, then there is no following segment
if i+1 == len(phones):
foll_p = ''
fm = '0'
fp = '0'
fv = '0'
fs = '0'
else:
# get the following segment, and strip the stress code off if it's a vowel
foll_p = re.sub(stress_regex, '', phones[i+1].label)
ctype = phoneset[foll_p].ctype
cplace = phoneset[foll_p].cplace
cvox = phoneset[foll_p].cvox
# convert from the CMU codes to the Plotnik codes
fm = MANNER.get(ctype, '0')
fp = PLACE.get(cplace, '0')
fv = VOICE.get(cvox, '0')
n_foll_syl = get_n_foll_syl(i, phones)
n_foll_c = get_n_foll_c(i, phones)
if n_foll_c <= 1 and n_foll_syl == 1:
fs = '1'
elif n_foll_c <= 1 and n_foll_syl >= 2:
fs = '2'
elif n_foll_c > 1 and n_foll_syl == 0:
fs = '3'
elif n_foll_c > 1 and n_foll_syl == 1:
fs = '4'
elif n_foll_c > 1 and n_foll_syl >= 2:
fs = '5'
else:
fs = '0'
# if the vowel is the first phone in the list, then there is no preceding segment
if i == 0:
prec_p = ''
ps = '0'
else:
# get the preceding segment, and strip the stress code off if it's a vowel
prec_p = re.sub(stress_regex, '', phones[i-1].label)
if prec_p in ['B', 'P', 'V', 'F']:
ps = '1'
elif prec_p in ['M']:
ps = '2'
elif prec_p in ['D', 'T', 'Z', 'S', 'TH', 'DH']:
ps = '3'
elif prec_p in ['N']:
ps = '4'
elif prec_p in ['ZH', 'SH', 'JH', 'CH']:
ps = '5'
elif prec_p in ['G', 'K']:
ps = '6'
elif i > 1 and prec_p in ['L', 'R'] and phones[i-2] in ['B', 'D', 'G', 'P', 'T', 'K', 'V', 'F', 'Z', 'S', 'SH', 'TH']:
ps = '8'
elif prec_p in ['L', 'R', 'ER0', 'ER2', 'ER1']:
ps = '7'
elif prec_p in ['W', 'Y']:
ps = '9'
else:
ps = '0'
code = arpabet2plotnik(phones[i].label[:-1], trans, prec_p, foll_p, phoneset)
code += '.'
code += fm
code += fp
code += fv
code += ps
code += fs
return code
def process_measurement_line(line):
vm = VowelMeasurement()
vm.F1 = float(line.split(',')[0])
vm.F2 = float(line.split(',')[1])
try:
vm.F3 = float(line.split(',')[2])
except ValueError:
vm.F3 = ''
vm.code = line.split(',')[3]
vm.stress = line.split(',')[4]
vm.text = line.split(',')[5]
vm.word = vm.text.split()[0]
vm.trans = word2trans(vm.word)
vm.fname = word2fname(vm.word)
res = re.findall(glide_regex, vm.text)
if len(res) > 0:
temp = res[0].replace('{', '')
temp = temp.replace('}', '')
vm.glide = temp
res = re.findall(style_regex, vm.text)
if len(res) > 0:
temp = res[0].replace('-', '')
temp = temp.replace('-', '')
vm.style = temp
res = re.findall(comment_regex, vm.text)
if len(res) > 0:
temp = res[0].replace('-- ', '')
vm.comment = temp
if temp == 'glide':
vm.glide = 'g'
else:
res = style_regex.split(vm.text)
if len(res) > 1:
vm.comment = res[1].strip()
return vm
def get_first_name(line):
first_name = line.split(',')[0].split()[0]
return first_name
def get_last_name(line):
try:
last_name = line.split(',')[0].split()[1]
except IndexError:
last_name = ''
return last_name
def get_age(line):
try:
age = line.split(',')[1].strip()
except IndexError:
age = ''
return age
def get_sex(line):
try:
sex = line.split(',')[2].strip()
except IndexError:
sex = ''
# only some files have sex listed in the first line
if sex not in ['m', 'f']:
sex = ''
return sex
def get_city(line):
sex = get_sex(line)
if sex in ['m', 'f']:
try:
city = line.split(',')[3].strip()
except IndexError:
city = ''
else:
try:
city = line.split(',')[2].strip()
except IndexError:
city = ''
return city
def get_state(line):
sex = get_sex(line)
if sex in ['m', 'f']:
try:
state = line.split(',')[4].strip().split()[0]
except IndexError:
state = ''
else:
try:
state = line.split(',')[3].strip().split()[0]
except IndexError:
state = ''
return state
def get_ts(line):
if ' TS ' in line:
ts = line.strip().split(' TS ')[1]
elif ' ts ' in line:
ts = line.strip().split(' ts ')[1]
else:
ts = ''
return ts
def get_n(line):
try:
n = int(line.strip().split(',')[0])
except IndexError:
n = ''
return n
def get_s(line):
try:
s = float(line.strip().split(',')[1])
except IndexError:
s = ''
return s
def process_plt_file(filename):
f = open(filename, 'rU')
line = f.readline().strip()
# skip initial blank lines
while line == '':
line = f.readline()
# EOF was reached, so this file only contains blank lines
if line == '':
sys.exit()
else:
line = line.strip()
Plt = PltFile()
Plt.first_name = get_first_name(line)
Plt.last_name = get_last_name(line)
Plt.age = get_age(line)
Plt.sex = get_sex(line)
Plt.city = get_city(line)
Plt.state = get_state(line)
Plt.ts = get_ts(line)
line = f.readline().strip()
Plt.N = get_n(line)
Plt.S = get_s(line)
# print ','.join([ts, first_name, last_name, age, sex, city, state])
# print ','.join([n, s])
line = f.readline().strip()
# skip any blank lines between header and formant measurements
while line == '':
line = f.readline()
# this file only contains blank lines
if line == '':
sys.exit()
else:
line = line.strip()
Plt.measurements = []
# proceed until we reach the blank line separating the formant data from the means
while line != '':
# some files don't contain this blank line, so look to see if the first value in the line is '1'; if it is, this must be the beginning of the means list, and not an F1 measurement
if line.split(',')[0] == '1':
break
vm = process_measurement_line(line)
Plt.measurements.append(vm)
line = f.readline().strip()
if len(Plt.measurements) != Plt.N:
print "ERROR: N's do not match for %s" % filename
return None
else:
return Plt
# unstressed vowels are labeled with '0' in the CMU pronouncing dictionary, but '3' in Plotnik
def convertStress(stress):
if stress == '0':
stress = '3'
return stress
# Plotnik requires the duration to be represented in msec as an integer
def convertDur(dur):
dur = int(round(dur * 1000))
return dur
def outputPlotnikFile(Plt, f):
pltFields = {'f1':0, 'f2':1, 'f3':2, 'code':3, 'stress':4, 'word':5}
fw = open(f, 'w')
if Plt.sex == '':
fw.write(Plt.first_name+' '+Plt.last_name+', '+Plt.age+', '+Plt.city+', '+Plt.state+' '+Plt.ts)
else:
print Plt.first_name+' '+Plt.last_name+', '+Plt.age+', '+Plt.sex+','+Plt.city+', '+Plt.state+' '+Plt.ts
fw.write('\n')
fw.write(str(Plt.N)+','+str(Plt.S))
fw.write('\n')
for vm in Plt.measurements:
stress = convertStress(vm.stress)
dur = convertDur(vm.dur)
fw.write(','.join([str(round(vm.f1, 1)), str(round(vm.f2, 1)), str(vm.f3), vm.code, stress + '.' + str(dur), vm.word + ' ' + str(vm.t)]))
fw.write('\n')
|
# coding: utf8
from django.shortcuts import render
from django.contrib.auth.decorators import login_required
from django.http import HttpResponse, HttpResponseForbidden
from dwebsocket import require_websocket
from django.views.decorators.csrf import csrf_exempt, csrf_protect
from models import tomcat_status, tomcat_url, tomcat_project, check_status
from saltstack.command import Command
from accounts.views import HasPermission
import json, logging, requests, re, datetime
logger = logging.getLogger('django')
error_status = 'null'
@csrf_exempt
def UrlQuery(request):
if request.method == 'GET':
return HttpResponse('You get nothing!')
elif request.method == 'POST':
clientip = request.META['REMOTE_ADDR']
logger.info('[POST]%s is requesting. %s' %(clientip, request.get_full_path()))
try:
data = json.loads(request.body)
act = data['act']
#logger.info(data)
except:
act = 'null'
if act == 'query_all':
datas = tomcat_url.objects.all()
elif act == 'query_active':
datas = tomcat_url.objects.filter(status='active')
elif act == 'query_inactive':
datas = tomcat_url.objects.filter(status='inactive')
else:
return HttpResponse("参数错误!")
logger.info('查询参数:%s' %act)
url_list = []
for url in datas:
tmp_dict = {}
tmp_dict['id'] = url.id
tmp_dict['envir'] = url.envir
tmp_dict['project'] = url.project
tmp_dict['minion_id'] = url.minion_id
tmp_dict['ip_addr'] = url.ip_addr
tmp_dict['server_type'] = url.server_type
tmp_dict['role'] = url.role
tmp_dict['domain'] = url.domain
tmp_dict['url'] = url.url
tmp_dict['status_'] = url.status
tmp_dict['info'] = url.info
url_list.append(tmp_dict)
return HttpResponse(json.dumps(url_list))
#return HttpResponse('You get nothing!')
else:
return HttpResponse('nothing!')
@csrf_exempt
def UrlAdd(request):
if request.method == 'POST':
clientip = request.META['REMOTE_ADDR']
#data = json.loads(request.body)
data = request.POST
if not HasPermission(request.user, 'add', 'tomcat_url', 'check_tomcat'):
return HttpResponseForbidden('你没有新增的权限。')
try:
info = tomcat_url.objects.get(project=data['project'], minion_id=data['minion_id'])
logger.info('%s is requesting. %s url: %s already exists!' %(clientip, request.get_full_path(), info.url))
return HttpResponse('记录: %s %s already exists!' %(info.project, info.minion_id))
except:
logger.info('%s is requesting. %s data: %s' %(clientip, request.get_full_path(), data))
info = tomcat_url(envir=data['envir'], project=data['project'], minion_id=data['minion_id'].strip(), ip_addr=data['ip_addr'].strip(), server_type=data['server_type'] , role=data['role'], domain=data['domain'], url=data['url'], status=data['status_'], info=data['info'])
info.save()
return HttpResponse('添加成功!')
elif request.method == 'GET':
return HttpResponse('You get nothing!')
else:
return HttpResponse('nothing!')
@csrf_exempt
def UrlUpdate(request):
if request.method == 'POST':
clientip = request.META['REMOTE_ADDR']
#data = json.loads(request.body)
data = request.POST
logger.info('%s is requesting. %s data: %s' %(clientip, request.get_full_path(), data))
if not HasPermission(request.user, 'change', 'tomcat_url', 'check_tomcat'):
return HttpResponseForbidden('你没有修改的权限。')
info = tomcat_url.objects.get(id=data['id'])
info.envir = data['envir']
info.project = data['project']
info.minion_id = data['minion_id'].strip()
info.ip_addr = data['ip_addr'].strip()
info.server_type = data['server_type']
info.role = data['role']
info.domain = data['domain']
info.url = data['url']
info.status = data['status_']
info.info = data['info']
info.save()
return HttpResponse('更新成功!')
elif request.method == 'GET':
return HttpResponse('You get nothing!')
else:
return HttpResponse('nothing!')
@csrf_exempt
def UrlUpdateStatus(request):
if request.method == 'POST':
clientip = request.META['REMOTE_ADDR']
data = json.loads(request.body)
logger.info('%s is requesting. %s data: %s' %(clientip, request.get_full_path(), data))
if not HasPermission(request.user, 'change', 'tomcat_url', 'check_tomcat'):
return HttpResponseForbidden('你没有修改的权限。')
info = tomcat_url.objects.get(id=data['id'])
info.status = data['status']
info.save()
return HttpResponse('更新成功!')
elif request.method == 'GET':
return HttpResponse('You get nothing!')
else:
return HttpResponse('nothing!')
@csrf_exempt
def UrlDelete(request):
clientip = request.META['REMOTE_ADDR']
logger.info('user: %s' %request.user.username)
username = request.user.username
if username != u'arno' and not HasPermission(request.user, 'delete', 'tomcat_url', 'check_tomcat'):
logger.info('%s %s is requesting. %s' %(clientip, username, request.get_full_path()))
return HttpResponseForbidden('你没有删除的权限,请联系管理员。')
if request.method == 'POST':
datas = json.loads(request.body)
logger.info('%s is requesting. %s data: %s' %(clientip, request.get_full_path(), datas))
for data in datas:
info = tomcat_url.objects.get(id=data['id'],)
info.delete()
return HttpResponse('删除成功!')
elif request.method == 'GET':
return HttpResponse('You get nothing!')
else:
return HttpResponse('nothing!')
@require_websocket
@csrf_exempt
def UrlCheckServer(request):
if request.is_websocket():
global username, role, clientip
username = request.user.username
try:
role = request.user.userprofile.role
except:
role = 'none'
clientip = request.META['REMOTE_ADDR']
#logger.info(dir(request.websocket))
#message = request.websocket.wait()
code_list = ['200', '302', '303', '405']
for postdata in request.websocket:
#logger.info(type(postdata))
data = json.loads(postdata)
### step one ###
info_one = {}
info_one['step'] = 'one'
request.websocket.send(json.dumps(info_one))
logger.info('%s is requesting. %s 执行参数:%s' %(clientip, request.get_full_path(), data))
#results = []
### final step ###
info_final = {}
info_final['step'] = 'final'
info_final['access_time'] = datetime.datetime.now().strftime('%Y/%m/%d %H:%M:%S')
try:
if data['server_type'] == 'app':
info_final['info'] = error_status
datas = {}
datas['target'] = data['minion_id']
datas['function'] = 'cmd.run'
datas['arguments'] = 'ps -ef |grep -i "java" |grep -i " -jar" |grep -v grep'
datas['expr_form'] = 'glob'
commandexe = Command(datas['target'], datas['function'], datas['arguments'], datas['expr_form'])
exe_result = commandexe.CmdRun()[datas['target']]
logger.info("exe_result: %s" %exe_result)
if exe_result == '':
info_final['code'] = error_status
elif exe_result == 'not return':
info_final['code'] = exe_result
info_final['info'] = '请检查服务器是否存活'
else:
info_final['code'] = '200'
info_final['info'] = '正常'
#logger.info(info_final)
else:
ret = requests.head(data['url'], headers={'Host': data['domain']}, timeout=10)
info_final['code'] = '%s' %ret.status_code
try:
title = re.search('<title>.*?</title>', ret.content)
info_final['info'] = title.group().replace('<title>', '').replace('</title>', '')
except AttributeError:
if info_final['code'] in code_list:
info_final['info'] = '正常'
else:
info_final['info'] = '失败'
except:
info_final['code'] = error_status
info_final['info'] = '失败'
if info_final['code'] == error_status:
commandexe = Command(data['minion_id'], 'test.ping')
test_result = commandexe.TestPing()[data['minion_id']]
if test_result == 'not return':
info_final['info'] = '请检查服务器是否存活'
request.websocket.send(json.dumps(info_final))
### close websocket ###
request.websocket.close()
@csrf_exempt
def UpdateCheckStatus(request):
clientip = request.META['REMOTE_ADDR']
if request.method == 'POST':
data = json.loads(request.body)
#data = request.POST
logger.info('%s is requesting. %s data: %s' %(clientip, request.get_full_path(), data))
if not HasPermission(request.user, 'change', 'check_status', 'check_tomcat'):
return HttpResponseForbidden('你没有修改的权限。')
info = check_status.objects.filter(program=data['program']).first()
info.status = data['status']
info.save()
return HttpResponse('更新成功!')
elif request.method == 'GET':
logger.info('%s is requesting. %s query check_status' %(clientip, request.get_full_path()))
datas = check_status.objects.all()
status_list = []
for status_info in datas:
tmp_dict = {}
tmp_dict['program'] = status_info.program
tmp_dict['status'] = status_info.status
status_list.append(tmp_dict)
return HttpResponse(json.dumps(status_list))
else:
return HttpResponse('nothing!') |
# Generated by Django 3.1.1 on 2021-02-20 21:33
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('product', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='gamemodel',
name='year',
field=models.IntegerField(verbose_name='Yıl'),
),
]
|
"""constructs a daily time series for Finland of the daily change in COVID-19 tests.
API documentation: https://thl.fi/fi/tilastot-ja-data/aineistot-ja-palvelut/avoin-data/varmistetut-koronatapaukset-suomessa-covid-19-
"""
import json
import requests
import pandas as pd
def main():
url = "https://services7.arcgis.com/nuPvVz1HGGfa0Eh7/arcgis/rest/services/korona_testimaara_paivittain/FeatureServer/0/query?f=json&where=date%3Etimestamp%20%272020-02-25%2022%3A59%3A59%27&returnGeometry=false&spatialRel=esriSpatialRelIntersects&outFields=OBJECTID%2Ctestimaara_kumulatiivinen%2Cdate&orderByFields=date%20asc&resultOffset=0&resultRecordCount=4000&resultType=standard&cacheHint=true"
# retrieves data
res = requests.get(url)
assert res.ok
# extract data
data = json.loads(res.content)["features"]
dates = [d.get("attributes").get("date") for d in data]
dates = pd.to_datetime(dates, unit="ms").date
total_tests = [d.get("attributes").get("testimaara_kumulatiivinen") for d in data]
# build dataframe
df = pd.DataFrame({"Date": dates, "Cumulative total": total_tests})
df = df.groupby("Cumulative total", as_index=False).min()
df.loc[:, "Country"] = "Finland"
df.loc[:, "Units"] = "tests performed"
df.loc[:, "Source URL"] = "https://experience.arcgis.com/experience/d40b2aaf08be4b9c8ec38de30b714f26"
df.loc[:, "Source label"] = "Finnish Department of Health and Welfare"
df.loc[:, "Notes"] = pd.NA
df.to_csv("automated_sheets/Finland.csv", index=False)
if __name__ == "__main__":
main()
|
#!/usr/bin/python
import sys,cv2
import math
import numpy as np
import matplotlib.pyplot as plt
import cv2.cv as cv
# ----------------- LOAD IMAGE -------------------------
sIPath="./original"
sFPath="./filter"
sOPath="./result"
sImgID=sys.argv[1]
img = cv2.imread(sIPath+"/"+sImgID+".tif")
# ----------------- FILTER IMAGE -----------------------
imgF = img
# imgF = cv2.GaussianBlur(imgF,,)
# imgF = cv2.Sobel(imgF,-1,2,2,1)
# ret,imgF = cv2.threshold(imgF,50,255,cv2.THRESH_BINARY)
imgF = cv2.cvtColor(imgF,cv2.COLOR_BGR2GRAY)
cv2.imwrite(sFPath+"/"+sImgID+".tif", imgF);
# ----------------- DETECT CIRCLES ---------------------
circles = cv2.HoughCircles(imgF,cv.CV_HOUGH_GRADIENT,1,20, \
param1=400,param2=35,minRadius=10,maxRadius=80)
circles = np.uint16(np.around(circles))
# hist, bin_edges = np.histogram(circles[0,:,2],bins=20)
# plt.bar(bin_edges[:-1], hist, width = 1)
# plt.xlim(min(bin_edges), max(bin_edges))
# plt.show()
# ---------- DETECT INNER AND OUTER LIMITS -------------
lCircles=[]
for i in circles[0,:]:
lAvg=[]
rInt=-1
rExt=-1
for r in range(1,i[2]+10):
avg=0
count=0
for x in range(i[0]-r,i[0]+r):
if (x>0 and x<imgF.shape[1]):
if (r*r-(x-i[0])*(x-i[0])>0):
y=i[1]+math.sqrt(r*r-(x-i[0])*(x-i[0]))
if(y>0 and y<imgF.shape[0]):
count=count+1.0
avg=avg+imgF[y,x]
y=i[1]-math.sqrt(r*r-(x-i[0])*(x-i[0]))
if(y>0 and y<imgF.shape[0]):
count=count+1.0
avg=avg+imgF[y,x]
avg=avg/(2.0*count)
lAvg.append(avg)
if(rInt<0 and avg<50):
rInt=r
if(rInt>0 and rExt<0 and avg>50):
rExt=r
# if (rInt>0 and rExt>0 and rExt-rInt>5 and rExt-rInt<20 and i[2]<1.1*float(rExt) and i[2]>0.9*float(rInt)):
lCircles.append([i[0],i[1],rInt,rExt,i[2]])
# ----------------- DRAW DETECTED CIRCLES --------------
for i in lCircles:
cv2.circle(img,(i[0],i[1]),i[4],(0,255,0),2)
cv2.circle(img,(i[0],i[1]),2,(0,0,255),3)
if(i[2]>0):
cv2.circle(img,(i[0],i[1]),i[2],(0,255,255),1)
cv2.circle(img,(i[0],i[1]),2,(0,0,255),3)
if(i[3]>0):
cv2.circle(img,(i[0],i[1]),i[3],(0,255,255),1)
cv2.circle(img,(i[0],i[1]),2,(0,0,255),3)
# ----------------- SAVE IMAGE WITH CIRCLES ------------
cv2.imwrite(sOPath+"/"+sImgID+".tif", img);
|
import asyncio
import logging
import os
from .settings import BaseSettings
logger = logging.getLogger('foxglove.redis')
async def async_flush_redis(settings: BaseSettings):
from arq import create_pool
redis = await create_pool(settings.redis_settings)
await redis.flushdb()
await redis.close(close_connection_pool=True)
def flush_redis(settings: BaseSettings):
if not (os.getenv('CONFIRM_FLUSH_REDIS') == 'confirm' or input('Confirm redis flush? [yN] ') == 'y'):
logger.info('cancelling')
else:
logger.info('resetting database...')
asyncio.run(async_flush_redis(settings))
logger.info('done.')
|
from authentication import auth
from flask_restful import Resource
class Login(Resource):
@auth.login_required
def post(self):
"""
simply checks if provided creds match any records
"""
return {"username": auth.current_user().name}, 200 |
#!/usr/bin/env python3
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import StandardOptions
from apache_beam.options.pipeline_options import GoogleCloudOptions
from apache_beam.options.pipeline_options import WorkerOptions
from apache_beam.io.gcp.internal.clients import bigquery
from apache_beam.pvalue import AsList
import logging
import json
import argparse
def process_artists(row, gender, area):
"""
Processes artist PCollection with gender and area PCollections as side inputs
The function will
:param row: Dictionary element from beam.PCollection
:param gender: list of gender id and name mappings
:param area: list of area id and name mappings
:return: tuple in the form (id, row)
"""
reduced_row = {
'id': row['id'],
'artist_gid': row['gid'],
'artist_name': row['name'],
'area': row['area'],
'gender': row['gender'],
}
if reduced_row['gender']:
for g in gender:
if g['id'] == reduced_row['gender']:
reduced_row['gender'] = g['name']
for a in area:
if a['id'] == reduced_row['area']:
reduced_row['area'] = a['name']
return (reduced_row['id'], reduced_row)
def process_gender_or_area(element):
"""
Utility function that processes text json from area.json or gender.json
:param element: String json object that needs to be parsed
:return: {id: int, name: string}
"""
row = json.loads(element)
return {
'id': row['id'],
'name': row['name']
}
def process_artist_credit(element):
"""
This function is used to decode json elements from artist_credit_name.json.
:param element: json string element
:return: set(artist_id, dict). Dictionary has only columns of interest preserved from the original element
"""
row = json.loads(element)
reduced_row = {
'artist_credit': row['artist_credit'],
'artist': row['artist']
}
return (reduced_row['artist'], reduced_row)
def process_recording(element):
"""
This method processes json records in recording.json
:param element: Json string object
:return: set(artist_credit, dict). Dictionary has only columns of interest preserved from the original element
"""
row = json.loads(element)
reduced_row = {
'recording_name': row['name'],
'length': row['length'],
'recording_gid': row['gid'],
'video': row['video'],
'artist_credit': row['artist_credit']
}
return (reduced_row['artist_credit'], reduced_row)
class UnSetCoGroup(beam.DoFn):
def process(self, element, source, joined, exclude_join_field):
"""
This method finalizes inner join. element is in the following form
(key, {source:[some dict elements], joined: [some dict elements]}). In order to perform the full
left join we need to combine columns from source with columns from joined.
In a nutshell we are doing a cartesian product
:param element: set containing id and the dictionary object
:param source: key for source array in the dictionary object
:param joined: key for joined array in the dictionary object
:param exclude_join_field: Field that should be excluded from objects in joined array when merging with
objects from source array
:return: joined dictionary
"""
_, grouped_dict = element
sources = grouped_dict[source]
joins = grouped_dict[joined]
for src in sources:
for join in joins:
for k, v in join.items():
if k != exclude_join_field:
src[k] = v
yield src
def main():
parser = argparse.ArgumentParser()
parser.add_argument(
'--dataset',
default='musicbrainz',
help='BigQuery dataset name'
)
parser.add_argument(
'--table',
default='recordings_by_artists_dataflow',
help='BiqQuery table'
)
args, argv = parser.parse_known_args()
pipeline_options = PipelineOptions(argv)
pipeline_options.view_as(StandardOptions).runner = 'DataflowRunner'
gcp_options = pipeline_options.view_as(GoogleCloudOptions)
if not gcp_options.job_name:
gcp_options.job_name = 'music-job'
worker_options = pipeline_options.view_as(WorkerOptions)
if not worker_options.use_public_ips:
worker_options.use_public_ips = False
table_spec = bigquery.TableReference(projectId=gcp_options.project,
datasetId=args.dataset,
tableId=args.table)
table_schema = {
'fields': [
{'name': 'id', 'mode': 'NULLABLE', 'type': 'INTEGER'},
{'name': 'artist_gid', 'mode': 'NULLABLE', 'type': 'STRING'},
{'name': 'artist_name', 'mode': 'NULLABLE', 'type': 'STRING'},
{'name': 'area', 'mode': 'NULLABLE', 'type': 'STRING'},
{'name': 'gender', 'mode': 'NULLABLE', 'type': 'STRING'},
{'name': 'artist_credit', 'mode': 'NULLABLE', 'type': 'INTEGER'},
{'name': 'recording_name', 'mode': 'NULLABLE', 'type': 'STRING'},
{'name': 'length', 'mode': 'NULLABLE', 'type': 'INTEGER'},
{'name': 'recording_gid', 'mode': 'NULLABLE', 'type': 'STRING'},
{'name': 'video', 'mode': 'NULLABLE', 'type': 'BOOLEAN'},
]
}
with beam.Pipeline(options=pipeline_options) as pipeline:
gender = pipeline | \
'Read gender' >> beam.io.ReadFromText('gs://solutions-public-assets/bqetl/gender.json') | \
'Process gender' >> beam.Map(process_gender_or_area)
area = pipeline | \
'Read area' >> beam.io.ReadFromText('gs://solutions-public-assets/bqetl/area.json') | \
'Process area' >> beam.Map(process_gender_or_area)
artists = pipeline | \
'Read Artists' >> beam.io.ReadFromText('gs://solutions-public-assets/bqetl/artist.json') | \
'Convert artist from json to dict' >> beam.Map(lambda e: json.loads(e)) | \
'Process artists' >> beam.Map(process_artists, AsList(gender), AsList(area))
recordings = pipeline | \
'Read Recordings' >> beam.io.ReadFromText('gs://solutions-public-assets/bqetl/recording.json') | \
'Process recording' >> beam.Map(process_recording)
artist_credit_name = pipeline | \
'Read Artists Credit Name' >> beam.io.ReadFromText('gs://solutions-public-assets/bqetl/artist_credit_name.json') | \
'Process artist credit name' >> beam.Map(process_artist_credit)
# Joining artist and artist_credit_name
# SELECT artist.id,
# artist.gid as artist_gid,
# artist.name as artist_name,
# artist.area,
# artist_credit_name.artist_credit
# FROM datafusion-dataproc-tutorial.musicbrainz.artist as artist
# INNER JOIN datafusion-dataproc-tutorial.musicbrainz.artist_credit_name AS artist_credit_name
# ON artist.id = artist_credit_name.artist
#
joined_artist_and_artist_credit_name = ({
'artists': artists,
'artist_credit_name': artist_credit_name}) | \
'Merge artist and artist_credit_name to intermitent' >> beam.CoGroupByKey() | \
'UnSetCoGroup intermitent' >> beam.ParDo(UnSetCoGroup(),
'artists',
'artist_credit_name',
'artist') | \
'Map artist_credit to dict element' >> beam.Map(lambda e: (e['artist_credit'], e))
# Joining previous table with recordings
# SELECT intermitent.id,
# intermitent.artist_gid,
# intermitent.artist_name,
# intermitent.area,
# intermitent.artist_credit,
# recording.recording_name,
# recording.length,
# recording.video
# FROM datafusion-dataproc-tutorial.musicbrainz.intermitents as intermitent
# INNER JOIN datafusion-dataproc-tutorial.musicbrainz.recording AS recording
# ON intermitent.artist_credit = recording.artist_credit
#
joined_artist_and_artist_credit_name_and_recording = ({
'joined_artist_and_artist_credit_name': joined_artist_and_artist_credit_name,
'recordings': recordings}) | \
'Merge intermitent and recording' >> beam.CoGroupByKey() | \
'UnSetCoGroup final' >> beam.ParDo(UnSetCoGroup(),
'joined_artist_and_artist_credit_name',
'recordings',
'artist_credit') | \
'Write To BQ' >> beam.io.WriteToBigQuery(table_spec,
schema=table_schema,
write_disposition=beam.io.BigQueryDisposition.WRITE_TRUNCATE,
create_disposition=beam.io.BigQueryDisposition.CREATE_IF_NEEDED)
logging.getLogger().setLevel(logging.INFO)
main()
|
#Take list as user input and print all names whi have grater than 5 letters
lst = [] #Empty list
n= int(input("enter list")) # Taking user input
for i in range(n):
name = str(input()) #Again user input for string type
lst.append(name) #Appending input string to list
print(lst)
j=0
for j in range(0, len(lst)):
if len(lst[j])>5:
print(lst[j])
j+=1
|
"""
Given a list of numbers and a number k, return whether any two numbers from the list add up to k.
For example, given [10, 15, 3, 7] and k of 17, return true since 10 + 7 is 17.
Bonus: Can you do this in one pass?
"""
import unittest
def check_two_numbers_add_up_to_target(array, target):
"""
Given a list of numbers and a number k, return whether any two numbers from the list add up to k.
:param array: Array of number
:param target: Value, the sum needs to be checked against
:return: Boolean indicating two numbers in list add up to target
"""
# Edge cases -->
if array is None or target is None:
return False
if len(array) < 1:
return False
if len(array) is 1:
return array[0] == target
# <-- Edge cases
values = set() # Set for visited numbers
for el in array: # For every element in array
if el is None: # Skip 'None' elements
continue
if target - el in values: # Check if it's complement exists in the set of already visited numbers
return True # Return True if exists
values.add(el) # Else add the current value to the set of visited numbers
return False # Sum cannot be achieved by two elements. Return False
class TestSolution(unittest.TestCase):
def test(self):
# Normal case
a = [10, 15, 3, 7]
k = 17
self.assertTrue(check_two_numbers_add_up_to_target(a, k))
# Empty array
a = []
k = 17
self.assertFalse(check_two_numbers_add_up_to_target(a, k))
# Empty target
a = []
k = None
self.assertFalse(check_two_numbers_add_up_to_target(a, k))
# None array and target
a = []
k = 17
self.assertFalse(check_two_numbers_add_up_to_target(a, k))
# None array and None target
a = None
k = None
self.assertFalse(check_two_numbers_add_up_to_target(a, k))
# None element in array
a = [None, 10, 7]
k = 17
self.assertTrue(check_two_numbers_add_up_to_target(a, k))
# None element in array
a = [None, 10, 7]
k = 16
self.assertFalse(check_two_numbers_add_up_to_target(a, k))
if __name__ == '__main__':
unittest.main()
|
#Generators: generate sequence of values
#range() is a generator
#special keyword - yield
def make_list(num):
result= []
for i in range(num):#range is a generator
result.append(i*2)
return result
my_list = make_list(100)
#print(my_list)
#it is taking up space
print(list(range(100000)))
#iterable - any object in python whihc were able to loop through underneath the hood it has dunder method
#__iter__ - so when the object is created this iter allow us to have an iterable object that can be iterated over to iterate something
#generators - but not evrything that is iterable is not a generator
#list is iterable but not a genertor
|
#!/usr/bin/env python
# coding: utf-8
# In[1]:
def external_func():
return 23
def _internal_func():
return 42
|
# -*- coding: utf-8 -*-
"""
Created on Wed Jun 12 11:25:00 2019
@author: Administrator
"""
#导入包
from mpl_toolkits.basemap import Basemap
import matplotlib.pyplot as plt
#新建地图
map = Basemap()
#绘制海岸线
map.drawcoastlines()
#添加多个点
lons = [0, 10, -20, -20]
lats = [0, -10, 40, -20]
x, y = map(lons, lats)
map.scatter(x, y, marker='D',color='m')
#显示结果
plt.show() |
import sys
import csv
import math
numParameters = int(sys.argv[3])
with open(sys.argv[2]) as model:
reader = csv.DictReader(model)
modeledData = [float(s["Modeled data"]) for s in reader]
with open(sys.argv[1]) as benchmark:
reader = csv.DictReader(benchmark)
benchmarkData = [float(s["Value"]) for s in reader]
n = len(modeledData)
rss = sum([(x - y) ** 2 for x, y in zip(modeledData, benchmarkData)])
print(2 * numParameters + n * math.log(rss / n, math.e))
|
#!/usr/bin/python3
import datetime
import time
import glob
import os
import math
import cv2
import math
import numpy as np
import scipy.optimize
from lib.VideoLib import get_masks, find_hd_file_new, load_video_frames, sync_hd_frames
from lib.UtilLib import check_running, angularSeparation
from lib.CalibLib import radec_to_azel, clean_star_bg, get_catalog_stars, find_close_stars, XYtoRADec, HMS2deg, AzEltoRADec
from lib.ImageLib import mask_frame , stack_frames, preload_image_acc
from lib.ReducerLib import setup_metframes, detect_meteor , make_crop_images, perfect, detect_bp, sort_metframes
from lib.MeteorTests import meteor_test_cm_gaps
#import matplotlib.pyplot as plt
import sys
#from caliblib import distort_xy,
from lib.CalibLib import distort_xy_new, find_image_stars, distort_xy_new, XYtoRADec, radec_to_azel, get_catalog_stars,AzEltoRADec , HMS2deg, get_active_cal_file, RAdeg2HMS, clean_star_bg
from lib.UtilLib import calc_dist, find_angle, bound_cnt, cnt_max_px
from lib.UtilLib import angularSeparation, convert_filename_to_date_cam, better_parse_file_date
from lib.FileIO import load_json_file, save_json_file, cfe
from lib.UtilLib import calc_dist,find_angle
import lib.brightstardata as bsd
from lib.DetectLib import eval_cnt, id_object
json_conf = load_json_file("../conf/as6.json")
cmd = sys.argv[1]
file = sys.argv[2]
try:
show = int(sys.argv[3])
except:
show = 0
if cmd == 'dm' or cmd == 'detect_meteor':
metframes, frames, metconf = detect_meteor(file, json_conf, show)
print("Metframes")
for fn in metframes:
print(fn, metframes[fn])
if cmd == 'cm' or cmd == 'crop_images':
vid_file = file.replace("-reduced.json", ".mp4")
frames = load_video_frames(vid_file, json_conf, 2)
frame = frames[0]
make_crop_images(file, json_conf)
#MFD TO METFRAMES
if cmd == 'mfd' :
# perfect the meteor reduction!
vid_file = file.replace("-reduced.json", ".mp4")
frames = load_video_frames(vid_file, json_conf, 2)
frame = frames[0]
if "mp4" in file:
file = file.replace(".mp4", "-reduced.json")
red_data = load_json_file(file)
mfd = red_data['meteor_frame_data']
metframes, metconf = setup_metframes(mfd, frame)
red_data['metframes'] = sort_metframes(metframes )
red_data['metconf'] = metconf
save_json_file(file, red_data)
if cmd == 'pf' or cmd == 'perfect':
# perfect the meteor reduction!
perfect(file, json_conf)
if cmd == 'shd' or cmd == 'sync_hd':
# perfect the meteor reduction!
sync_hd_frames(file, json_conf)
if cmd == 'dbp':
# perfect the meteor reduction!
detect_bp(file, json_conf)
|
L = []
n = 1
while n < 99:
L.append(n)
n+=2
print(L)
print('dsfsdfsdfdsdf=====',len(L) / 2)
n = 0
sum = 0
while n < len(L) / 2:
print(L[n])
sum+=1
n+=1
print(sum)
|
import os
import re
import json
data_src = '/home/melody/develop/caffe-tensorflow/caffe_name.txt'
param_map = {'variance': 'moving_variance',
'scale': 'gamma',
'offset': 'beta',
'mean': 'moving_mean',
'weights': 'weights'}
psp_map = {
'1': '1',
'2': '2',
'3': '3',
'6': '4'}
caffe_tf_name = {}
with open(data_src) as fd:
lines = fd.readlines()
lines = [[a_line.strip().split()[0], a_line.strip().split()[1]] for a_line in lines]
tf_prefix = 'pspnet_v1_101'
for i, a_line in enumerate(lines):
a_name = a_line[0]
param = a_line[1]
assert a_name.startswith('conv'), a_name
is_psp = False
if True:
add_conv_id = False
match = re.search('\d+_\d+_\d+x\d+', a_name)
if not match:
if 'pool' in a_name:
pattern = '\d+_\d+_pool\d+'
is_psp = True
elif a_name.startswith('conv5_4'):
pattern = 'conv5_4'
pattern = '5_4'
# elif a_name.startswith('conv6'):
# pattern = 'conv6'
# elif a_name.startswith('conv_aux'):
# pattern = 'conv_aux'
else:
print 'invalid', a_name
continue
match = re.search(pattern, a_name)
else:
if 'reduce' in a_name:
conv_id = 1
elif 'increase' in a_name:
conv_id = 3
else:
conv_id = 2
add_conv_id = True
postfix = a_name[match.span()[1]:]
info = match.group()
print '<<<', info, a_name, match.span()
# info = re.findall('\d', info)
info = re.split('[\s,.,x_]', info)
# info = re.split('[\D]', info)
print '>>>',info
block_id = int(info[0])
unit_id = int(info[1])
tf_block_id = 'block{}'.format(block_id - 1)
if add_conv_id:
op_name = 'conv{}'.format(conv_id)
else:
op_name = 'conv{}'.format(unit_id)
if 'proj' in postfix:
op_name = 'shortcut'
if is_psp:
tf_block_id = 'pyramid_pool_module/level{}/pyramid_pool_v1'.format(psp_map[info[2][-1]])
op_name = 'conv1'
elif a_name.startswith('conv5_4'):
tf_block_id = 'fc1'
op_name = ''
else:
if block_id == 1:
tf_block_id = 'root'
op_name = 'conv{}'.format(unit_id)
else:
tf_block_id = '{}/unit_{}/bottleneck_v1'.format(tf_block_id, unit_id)
if postfix.endswith('bn'):
if op_name == '':
op_name = 'BatchNorm'
else:
op_name = '{}/BatchNorm'.format(op_name)
if op_name == '':
# tf_name = '{}/{}/{}'.format(tf_prefix, tf_block_id, param_map[param])
tf_name = '{}/{}'.format(tf_prefix, tf_block_id)
else:
# tf_name = '{}/{}/{}/{}'.format(tf_prefix, tf_block_id, op_name, param_map[param])
tf_name = '{}/{}/{}'.format(tf_prefix, tf_block_id, op_name)
print a_name, ' ---> ', tf_name
caffe_tf_name[a_name] = tf_name
else:
print '->', a_name
caffe_tf_name['conv6'] = 'pspnet_v1_101/logits'
caffe_tf_name['conv_aux'] = 'pspnet_v1_101/aux_logits'
with open('pspnet_dict.json', 'w') as fd:
json.dump(caffe_tf_name, fd, sort_keys=True, indent=4) |
#!/usr/bin/env python3.6
# Author: Eric Turgeon
# License: BSD
# Location for tests into REST API of FreeNAS
import unittest
import sys
import os
import xmlrunner
apifolder = os.getcwd()
sys.path.append(apifolder)
from functions import POST
from auto_config import results_xml
RunTest = True
TestName = "create group"
class group_test(unittest.TestCase):
def test_01_Creating_group_testgroup(self):
payload = {"bsdgrp_gid": 1200, "bsdgrp_group": "testgroup"}
assert POST("/account/groups/", payload) == 201
def run_test():
suite = unittest.TestLoader().loadTestsFromTestCase(group_test)
xmlrunner.XMLTestRunner(output=results_xml, verbosity=2).run(suite)
if RunTest is True:
print('\n\nStarting %s tests...' % TestName)
run_test()
|
from source.geometry.geometric_functions import GeometricFunctions
class Insulation(object):
@staticmethod
def return_insulation_single_element_area(diameter_strand, diameter_strand_with_insulation, total_winding_length,
number_of_elements, contact_correction_factor):
"""
Returns area of a single 1D insulation element for ANSYS geometry
:param diameter_strand: as float
:param diameter_strand_with_insulation:
:param total_winding_length: length of a single winding as float
:param number_of_elements: number of insulation elements in one winding as float
:param contact_correction_factor:
:return: as float
"""
area_strand = GeometricFunctions.calculate_circle_area(diameter_strand)
area_strand_with_insulation = GeometricFunctions.calculate_circle_area(diameter_strand_with_insulation)
area_insulation = GeometricFunctions.subtract_area_from_area(area_strand, area_strand_with_insulation)
volume_insulation = GeometricFunctions.calculate_volume_from_area_and_height(area_insulation,
height=total_winding_length)
element_area = 0.25 * contact_correction_factor * volume_insulation / Insulation.\
get_insulation_side(diameter_strand, diameter_strand_with_insulation)
return element_area / number_of_elements
@staticmethod
def return_insulation_resin_single_element_volume(winding_side, diameter_strand_with_insulation,
diameter_strand, contact_correction_factor, total_winding_length,
resin_filling_correction_factor, number_of_elements):
area_winding = GeometricFunctions.calculate_rectangular_area(winding_side, winding_side)
area_strand_with_insulation = GeometricFunctions.calculate_circle_area(diameter_strand_with_insulation)
area_strand = GeometricFunctions.calculate_circle_area(diameter_strand)
area_resin = GeometricFunctions.subtract_area_from_area(area_strand_with_insulation, area_winding)
volume_resin_winding = GeometricFunctions.calculate_volume_from_area_and_height(area_resin,
total_winding_length)
area_insulation = GeometricFunctions.subtract_area_from_area(area_strand, area_strand_with_insulation)
volume_insulation = GeometricFunctions.calculate_volume_from_area_and_height(area_insulation,
total_winding_length)
volume_resin_winding_corrected = volume_resin_winding * resin_filling_correction_factor
volume_insulation_corrected = volume_insulation * (1.0 - contact_correction_factor)
final_volume = (volume_insulation_corrected + volume_resin_winding_corrected) / number_of_elements
return final_volume
@staticmethod
def calculate_average_insulation_perimeter(winding_side1, winding_side2, strand_diameter):
winding_perimeter = GeometricFunctions.calculate_rectangular_perimeter(winding_side1, winding_side2)
strand_perimeter = GeometricFunctions.calculate_circle_perimeter(strand_diameter)
return (winding_perimeter + strand_perimeter) / 2.0
@staticmethod
def calculate_eff_insulation_length(cross_sectional_insulation_area, average_insulation_perimeter):
"""
:param cross_sectional_insulation_area: as float
:param average_insulation_perimeter: as float
:return: as float
"""
return cross_sectional_insulation_area / average_insulation_perimeter
@staticmethod
def get_insulation_side(small_circle, large_circle):
"""
Returns effective insulation element length for ANSYS geometry
:param small_circle: as float
:param large_circle: as float
:return: as float
"""
if small_circle > large_circle:
raise ValueError("ERROR - small circle cannot be larger than the large circle")
elif small_circle < 0.0 or large_circle < 0.0:
raise ValueError("ERROR - each input values should be positive")
return (large_circle - small_circle) / 2.0
@staticmethod
def check_input_of_correction_factor(correction_factor):
if correction_factor > 1.0 or correction_factor < 0.0:
raise ValueError("Correction factor should be between 0.0 and 1.0")
elif not isinstance(correction_factor, float):
raise TypeError("Correction factor should be a float")
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.