text stringlengths 38 1.54M |
|---|
from tkinter import *
window = Tk()
window.geometry("310x150")
name = Entry(window, width=25)
frstgrade = Entry(window, width=25)
secgrade = Entry(window, width=25)
trdgrade = Entry(window, width=25)
resultbx = Text(window, height=1, width=25)
lbnme = Label(window, text="Name:")
lbfst = Label(window, text="First grade:")
lbscnd = Label(window, text="Second grade:")
lbthrd = Label(window, text="Third grade:")
lbrbx = Label(window, text="Result:")
def res():
avg = round(((int(frstgrade.get()) + int(secgrade.get()) + int(trdgrade.get())) / 3))
if avg > 50:
resultbx.insert(END, name.get() + " got " + str(avg) + " has passed")
elif avg < 50:
resultbx.insert(END, name.get() + " got " + str(avg) + " has failed")
resBtn = Button(window, text="Get result", command=res)
name.grid(row=1, column=1)
frstgrade.grid(row=2, column=1)
secgrade.grid(row=3, column=1)
trdgrade.grid(row=4, column=1)
resultbx.grid(row=5, column=1)
resBtn.grid(row=6, column=0, sticky=W)
lbnme.grid(row=1, column=0, sticky=W)
lbfst.grid(row=2, column=0, sticky=W)
lbscnd.grid(row=3, column=0, sticky=W)
lbthrd.grid(row=4, column=0, sticky=W)
lbrbx.grid(row=5, column=0, sticky=W)
window.mainloop()
|
import os
import testinfra.utils.ansible_runner
testinfra_hosts = testinfra.utils.ansible_runner.AnsibleRunner(
os.environ['MOLECULE_INVENTORY_FILE']).get_hosts('all')
def test_lib_svn_home_dir(host):
f = host.file('/srv/svn/repos/libsvn')
assert f.exists
assert f.user == 'svn'
assert f.group == 'svn'
|
import os
def GetParentPath(path, layer):
for i in range(layer):
path = os.path.dirname(path)
return path
class Config:
STAGE_ID = "pre"
PWR_MAXN = 2999999999
TIME_MAXN = 30
SCORE_MAXN = PWR_MAXN * TIME_MAXN + TIME_MAXN - 1
def GetFinalScore(sumPwr, avgTime):
if sumPwr > Config.PWR_MAXN:
sumPwr = Config.PWR_MAXN
if avgTime >= Config.TIME_MAXN:
avgTime = Config.TIME_MAXN - 1
return sumPwr * Config.TIME_MAXN + avgTime // Config.TIME_MAXN
ERR_CODE_SUCCESS = 90000000000
ERR_CODE_BUILD_FAIL = 90000000101
ERR_CODE_BUILD_TIMEOUT = 90000000102
ERR_CODE_RUN_FAIL = 90000000201
ERR_CODE_RUN_MEMOUT = 90000000202
ERR_CODE_RUN_TIMEOUT = 90000000203
ERR_CODE_OUTPUT_EMPTY = 90000000301
ERR_CODE_OUTPUT_INVALID = 90000000302
ERR_CODE_OUTPUT_FEW = 90000000303
ERR_CODE_JUDGE_REPEAT_SEND = 90000000401
ERR_CODE_JUDGE_REPEAT_RECV = 90000000402
ERR_CODE_JUDGE_INVALID_SITE = 90000000403
ERR_CODE_JUDGE_INVALID_SEND = 90000000404
ERR_CODE_JUDGE_INVALID_RECV = 90000000405
ERR_CODE_JUDGE_RECV_ALSO_MID = 90000000406
ERR_CODE_JUDGE_EDGE_NONE = 90000000407
ERR_CODE_JUDGE_EDGE_CONFLICT = 90000000408
ERR_CODE_JUDGE_ROUTE_LENOUT = 90000000409
ERR_CODE_JUDGE_SEND_LEFT = 90000000410
ERR_MSG_MGR = {
ERR_CODE_BUILD_FAIL : "็ผ่ฏ้่ฏฏ",
ERR_CODE_BUILD_TIMEOUT : "็ผ่ฏ่ถ
ๆถ",
ERR_CODE_RUN_FAIL : "่ฟ่ก้่ฏฏ",
ERR_CODE_RUN_MEMOUT : "่ฟ่ก้่ฏฏ-ๅ
ๅญๆบขๅบ",
ERR_CODE_RUN_TIMEOUT : "่ฟ่ก่ถ
ๆถ",
ERR_CODE_OUTPUT_EMPTY : "่พๅบ้่ฏฏ-ๆ ่พๅบ",
ERR_CODE_OUTPUT_INVALID : "่พๅบ้่ฏฏ-้ๆณ็ฌฆๅท",
ERR_CODE_OUTPUT_FEW : "่พๅบ้่ฏฏ-ๆๅ็ปๆ",
ERR_CODE_JUDGE_REPEAT_SEND : "ๆนๆก้่ฏฏ-้ๅค่งๅไบๅไธไธชๅๅฐๅบ็ซ",
ERR_CODE_JUDGE_REPEAT_RECV : "ๆนๆก้่ฏฏ-้ๅค่งๅไบๅไธไธชๆฅๆถๅซๆ",
ERR_CODE_JUDGE_INVALID_SITE : "ๆนๆก้่ฏฏ-่งๅไบ้ๆณ็็ซ็น",
ERR_CODE_JUDGE_INVALID_SEND : "ๆนๆก้่ฏฏ-่งๅไบ้ๆณ็ๅๅฐๅบ็ซ",
ERR_CODE_JUDGE_INVALID_RECV : "ๆนๆก้่ฏฏ-่งๅไบ้ๆณ็ๆฅๆถๅซๆ",
ERR_CODE_JUDGE_RECV_ALSO_MID : "ๆนๆก้่ฏฏ-ๆฅๆถๅซๆๅๆถ่งๅๆไบไธญ่ฝฌๅซๆ",
ERR_CODE_JUDGE_EDGE_NONE : "ๆนๆก้่ฏฏ-่งๅไบไธๅญๅจ็้ไฟก้้",
ERR_CODE_JUDGE_EDGE_CONFLICT : "ๆนๆก้่ฏฏ-่งๅไบๅฒ็ช็้ไฟก้้",
ERR_CODE_JUDGE_ROUTE_LENOUT : "ๆนๆก้่ฏฏ-่งๅไบ่ฟ้ฟ็้ไฟก่ทฏๅพ",
ERR_CODE_JUDGE_SEND_LEFT : "ๆนๆก้่ฏฏ-ๆช่งๅๆๆ็ๅๅฐๅบ็ซ",
}
PATH_ROOT = GetParentPath(os.getcwd(), 1) + "/"
TIME_MAX_RUN = 300
TIME_MAX_BUILD = 60
NODE_MAX_ID = 10000
CASE_NAME_VEC = [
"Example",
"TestData_24",
]
CASE_NUM = len(CASE_NAME_VEC) |
#!/usr/bin/env python
import roslib; roslib.load_manifest('kurt_base')
import rospy
from sensor_msgs.msg import JointState
def fake_wheel_publisher():
pub = rospy.Publisher('/joint_states', JointState, queue_size=100)
rospy.init_node('fake_wheel_publisher')
while not rospy.is_shutdown():
js = JointState()
js.header.stamp = rospy.Time.now()
js.name = ['left_front_wheel_joint', 'left_middle_wheel_joint', 'left_rear_wheel_joint', 'right_front_wheel_joint', 'right_middle_wheel_joint', 'right_rear_wheel_joint']
js.position = [0.0, 0.0, 0.0, 0.0, 0.0, 0.0]
pub.publish(js)
rospy.sleep(0.02)
if __name__ == '__main__':
try:
fake_wheel_publisher()
except rospy.ROSInterruptException: pass
|
#-*- coding: utf-8 -*-
import requests
import pandas
from bs4 import BeautifulSoup
from collections import defaultdict
'''
spider is crawler bot
'''
def spider():
cfp_dic = defaultdict(lambda: defaultdict(str))
k = 0
url = 'https://research.cs.wisc.edu/dbworld/browse.html' #์์งํ๋ ค๋ ํํ์ด์ง ์ฃผ์
source_code = requests.get(url)
plain_text = source_code.text
soup = BeautifulSoup(plain_text, 'lxml') #์์งํ html ์์ค๋ฅผ ํ๊ทธ๋ณ๋ก ํ์ฑํจ
for link in soup.select('td > a'): #<td><a ~~~>์ ํ๊ทธ๋ค๋ง ์ ํ
href = link.get('href')
title = link.string
if title != 'web page': #์์งํ๋ ค๋ ํํ์ด์ง์ td> a์ค์ ํ์ ์๋ ๊ฒ๋ค ์ ๊ฑฐํ๋ ๋ถ๋ถ
cfp_dic[k][href] = title
k += 1
return cfp_dic
def get_single_article(item_url): # url์ boby ๋ถ๋ถ์ text๋ฅผ ๊ฐ์ ธ์ค๊ธฐ
source_code = requests.get(item_url) # url ์ง์ด ๋ฃ๊ธฐ
source_code.encoding = 'utf-8' # ์ธ์ฝ๋ฉ์ค์ .. ๋ฐ์ from_encoding์จ์ ์์จ๋ ๋ ์ง๋...
plain_text = source_code.text
soup = BeautifulSoup(plain_text, 'lxml', from_encoding='utf-8') #BeautifulSoup๋ก html ์ด์๊ฒ ๊ฐ์ ธ์ค๊ธฐ(์๋ง tag ๋น 1์ค์ฉ)
return_text = ''
for contents in soup.select('body '):
return_text += contents.text
#print(contents.text)
return return_text
def save_dbworld(file_name,dbworld_list):
cfp_documents = ''
for count in range(0,10): # ํ์ผ์ 10๊ฐ ๋ง๋ ๋ค 0~9
with open('./gs_data/' + file_name +str(count)+'.txt', 'w', encoding='UTF-8') as f: #ํ์ผ์ด๋ฆ์ file_name(0~9).txt
for nlist in range(0+(count*100),100+(count*100)): #dbworld์ url list๋ฅผ ๋ชจ๋ ์ฌ์ฉํ๋๊ฒฝ์ฐ ๋๋ฌด ๋ง์ .... ์ ๋นํ ๋์ด ์ฌ์ฉํ๋๊ฒ ์ค์ 1000์ผ๋ก ๋ฐ๊พธ๋ฉด ํ๋๋น 2~3๊ธฐ๊ฐ ๋์ด
for key, value in dbworld_list[nlist].items():
cfp_documents += get_single_article(key)
f.write(cfp_documents)
f.write('\n')
f.close()
|
"""Extract data on near-Earth objects and close approaches from CSV and JSON files.
The `load_neos` function extracts NEO data from a CSV file, formatted as
described in the project instructions, into a collection of `NearEarthObject`s.
The `load_approaches` function extracts close approach data from a JSON file,
formatted as described in the project instructions, into a collection of
`CloseApproach` objects.
The main module calls these functions with the arguments provided at the command
line, and uses the resulting collections to build an `NEODatabase`.
You'll edit this file in Task 2.
"""
import csv
import json
from models import NearEarthObject, CloseApproach
def load_neos(neo_csv_path):
"""Read near-Earth object information from a CSV file.
:param neo_csv_path: A path to a CSV file containing data about near-Earth objects.
:return: A collection of `NearEarthObject`s.
"""
# Load NEO data from the given CSV file.
neo_collection = list()
with open(neo_csv_path, 'r') as infile:
reader = csv.DictReader(infile)
for elem in reader:
if elem["pha"] == "Y":
hza = True
else:
hza = False
if elem["diameter"] != '':
neo_collection.append(NearEarthObject(
elem["pdes"], elem["name"], hza, float(elem["diameter"])))
else:
neo_collection.append(NearEarthObject(
elem["pdes"], elem["name"], hza))
return neo_collection
# print(load_neos('./data/neos.csv'))
def load_approaches(cad_json_path):
"""Read close approach data from a JSON file.
:param neo_csv_path: A path to a JSON file containing data about close approaches.
:return: A collection of `CloseApproach`es.
"""
# Load close approach data from the given JSON file.
ca_collection = list()
with open(cad_json_path, 'r') as infile:
contents = json.load(infile)
for key in contents["data"]:
pdes = key[0]
time = key[3]
distance = key[4]
velocity = key[7]
ca_collection.append(CloseApproach(pdes, time, distance, velocity))
return ca_collection
# print(load_approaches('./data/cad.json'))
|
#!/usr/bin/env python
from folds import Vertex, Edge, collinear, foot_of_altitude, quadrance
v1 = Vertex(0, 0, 0)
v2 = Vertex(0, 1, 0)
assert quadrance(v1, v2) == 1
e1 = Edge(v1, v2)
assert e1.quadrance() == 1
v3 = Vertex(0, 2, 0)
v4 = Vertex(1, 0, 0)
assert collinear(v1, v2, v3) is True
assert collinear(v1, v2, v4) is False
v5 = Vertex(1, 2, 0)
v6 = Vertex(2, 0, 0)
e2 = Edge(v1, v6)
assert foot_of_altitude(v5, e2) == v4
|
# coding: utf-8
# In[ ]:
### Downloads and Uploads data from AWS Earth Program and uploads to GCS. Super inefficient but it works.
# In[1]:
ec2_input_path = "/volumes/data/hackathon_for_good/input"
# In[2]:
ec2_output_path = "/volumes/data/hackathon_for_good/output"
# In[3]:
get_ipython().system('mkdir -p {ec2_input_path}')
get_ipython().system('mkdir -p {ec2_output_path}')
# In[ ]:
# In[4]:
url = "http://opendata.digitalglobe.com/hurricane-irma/vector-data/2017-09-26/tomnod20170926/digitalglobe_crowdsourcing_hurricane_irma_20170926.zip"
# In[5]:
get_ipython().system('wget -O {ec2_input_path}/tomnod.zip {url}')
# In[6]:
get_ipython().system('unzip -o /volumes/data/hackathon_for_good/input/tomnod.zip -d /volumes/data/hackathon_for_good/input/')
# In[7]:
import geopandas as gpd
import subprocess
# In[8]:
input_path = "/volumes/data/hackathon_for_good/input/digitalglobe_crowdsourcing_hurricane_irma_20170926/digitalglobe_crowdsourcing_hurricane_irma_20170926.geojson"
# In[9]:
gdf = gpd.GeoDataFrame.from_file(input_path)
# In[10]:
gdf.shape
# In[11]:
gdf.head()
# In[12]:
for index, row in gdf.iterrows():
print(index)
s3_input_url = row["chip_url"]
output_file_name = "{}.jpg".format(row["id"],row["label"])
command = "wget -O {}/{} {}".format(ec2_output_path,output_file_name,s3_input_url)
subprocess.check_output(command,shell=True)
# In[ ]:
|
f=open('Table.txt','w')
for i in range(1,11):
for j in range(1,11):
f.write(str(i*j)+'\t')
print(i*j ,end='\t')
f.write('\n')
print()
f.close()
|
# -*- coding: utf-8 -*-
# Copyright (C) 2015-2019 by Brendt Wohlberg <brendt@ieee.org>
# All rights reserved. BSD 3-clause License.
# This file is part of the SPORCO package. Details of the copyright
# and user license can be found in the 'LICENSE.txt' file distributed
# with the package.
"""Classes for ADMM algorithms for Robust PCA optimisation"""
from __future__ import division
from __future__ import absolute_import
import copy
import numpy as np
from sporco.admm import admm
import sporco.prox as sp
from sporco.util import u
__author__ = """Brendt Wohlberg <brendt@ieee.org>"""
class RobustPCA(admm.ADMM):
r"""ADMM algorithm for Robust PCA problem :cite:`candes-2011-robust`
:cite:`cai-2010-singular`.
Solve the optimisation problem
.. math::
\mathrm{argmin}_{X, Y} \;
\| X \|_* + \lambda \| Y \|_1 \quad \text{such that}
\quad X + Y = S \;\;.
This problem is unusual in that it is already in ADMM form without
the need for any variable splitting.
After termination of the :meth:`solve` method, attribute :attr:`itstat` is
a list of tuples representing statistics of each iteration. The
fields of the named tuple ``IterationStats`` are:
``Iter`` : Iteration number
``ObjFun`` : Objective function value
``NrmNuc`` : Value of nuclear norm term :math:`\| X \|_*`
``NrmL1`` : Value of :math:`\ell_1` norm term :math:`\| Y \|_1`
``Cnstr`` : Constraint violation :math:`\| X + Y - S\|_2`
``PrimalRsdl`` : Norm of primal residual
``DualRsdl`` : Norm of dual residual
``EpsPrimal`` : Primal residual stopping tolerance
:math:`\epsilon_{\mathrm{pri}}`
``EpsDual`` : Dual residual stopping tolerance
:math:`\epsilon_{\mathrm{dua}}`
``Rho`` : Penalty parameter
``Time`` : Cumulative run time
"""
class Options(admm.ADMM.Options):
"""RobustPCA algorithm options
Options include all of those defined in
:class:`sporco.admm.admm.ADMM.Options`, together with
an additional option:
``fEvalX`` : Flag indicating whether the :math:`f` component
of the objective function should be evaluated using variable
X (``True``) or Y (``False``) as its argument.
``gEvalY`` : Flag indicating whether the :math:`g` component
of the objective function should be evaluated using variable
Y (``True``) or X (``False``) as its argument.
"""
defaults = copy.deepcopy(admm.ADMM.Options.defaults)
defaults.update({'gEvalY': True, 'fEvalX': True, 'RelaxParam': 1.8})
defaults['AutoRho'].update({'Enabled': True, 'Period': 1,
'AutoScaling': True, 'Scaling': 1000.0,
'RsdlRatio': 1.2})
def __init__(self, opt=None):
"""
Parameters
----------
opt : dict or None, optional (default None)
RobustPCA algorithm options
"""
if opt is None:
opt = {}
admm.ADMM.Options.__init__(self, opt)
if self['AutoRho', 'RsdlTarget'] is None:
self['AutoRho', 'RsdlTarget'] = 1.0
itstat_fields_objfn = ('ObjFun', 'NrmNuc', 'NrmL1', 'Cnstr')
hdrtxt_objfn = ('Fnc', 'NrmNuc', u('Nrmโ1'), 'Cnstr')
hdrval_objfun = {'Fnc': 'ObjFun', 'NrmNuc': 'NrmNuc',
u('Nrmโ1'): 'NrmL1', 'Cnstr': 'Cnstr'}
def __init__(self, S, lmbda=None, opt=None):
"""
Parameters
----------
S : array_like
Signal vector or matrix
lmbda : float
Regularisation parameter
opt : RobustPCA.Options object
Algorithm options
"""
if opt is None:
opt = RobustPCA.Options()
# Set dtype attribute based on S.dtype and opt['DataType']
self.set_dtype(opt, S.dtype)
# Set default lambda value if not specified
if lmbda is None:
lmbda = 1.0 / np.sqrt(S.shape[0])
self.lmbda = self.dtype.type(lmbda)
# Set penalty parameter
self.set_attr('rho', opt['rho'], dval=(2.0*self.lmbda + 0.1),
dtype=self.dtype)
Nx = S.size
super(RobustPCA, self).__init__(Nx, S.shape, S.shape, S.dtype, opt)
self.S = np.asarray(S, dtype=self.dtype)
def uinit(self, ushape):
"""Return initialiser for working variable U"""
if self.opt['Y0'] is None:
return np.zeros(ushape, dtype=self.dtype)
else:
# If initial Y is non-zero, initial U is chosen so that
# the relevant dual optimality criterion (see (3.10) in
# boyd-2010-distributed) is satisfied.
return (self.lmbda/self.rho)*np.sign(self.Y)
def solve(self):
"""Start (or re-start) optimisation."""
super(RobustPCA, self).solve()
return self.X, self.Y
def xstep(self):
r"""Minimise Augmented Lagrangian with respect to
:math:`\mathbf{x}`.
"""
self.X, self.ss = sp.prox_nuclear(self.S - self.Y - self.U,
1/self.rho)
def ystep(self):
r"""Minimise Augmented Lagrangian with respect to
:math:`\mathbf{y}`.
"""
self.Y = np.asarray(sp.prox_l1(self.S - self.AX - self.U,
self.lmbda/self.rho), dtype=self.dtype)
def obfn_fvar(self):
"""Variable to be evaluated in computing regularisation term,
depending on 'fEvalX' option value.
"""
if self.opt['fEvalX']:
return self.X
else:
return self.cnst_c() - self.cnst_B(self.Y)
def obfn_gvar(self):
"""Variable to be evaluated in computing regularisation term,
depending on 'gEvalY' option value.
"""
if self.opt['gEvalY']:
return self.Y
else:
return self.cnst_c() - self.cnst_A(self.X)
def eval_objfn(self):
"""Compute components of objective function as well as total
contribution to objective function.
"""
if self.opt['fEvalX']:
rnn = np.sum(self.ss)
else:
rnn = sp.norm_nuclear(self.obfn_fvar())
rl1 = np.sum(np.abs(self.obfn_gvar()))
cns = np.linalg.norm(self.X + self.Y - self.S)
obj = rnn + self.lmbda*rl1
return (obj, rnn, rl1, cns)
def cnst_A(self, X):
r"""Compute :math:`A \mathbf{x}` component of ADMM problem
constraint. In this case :math:`A \mathbf{x} = \mathbf{x}`.
"""
return X
def cnst_AT(self, X):
r"""Compute :math:`A^T \mathbf{x}` where :math:`A \mathbf{x}` is
a component of ADMM problem constraint. In this case
:math:`A^T \mathbf{x} = \mathbf{x}`.
"""
return X
def cnst_B(self, Y):
r"""Compute :math:`B \mathbf{y}` component of ADMM problem
constraint. In this case :math:`B \mathbf{y} = -\mathbf{y}`.
"""
return Y
def cnst_c(self):
r"""Compute constant component :math:`\mathbf{c}` of ADMM problem
constraint. In this case :math:`\mathbf{c} = \mathbf{s}`.
"""
return self.S
|
from django.contrib import admin
from django.contrib.auth.models import User as AuthUser
from django.contrib.auth.admin import UserAdmin as AuthUserAdmin
from django.template.defaultfilters import truncatechars
from django.urls import reverse
from django.utils.html import format_html
from . import models
from blog.lib import utils
# codestart:AuthorUser
class AuthorInline(admin.StackedInline):
model = models.Author
max_num = 1
can_delete = False
class AuthorUser(AuthUserAdmin):
inlines = [AuthorInline]
admin.site.unregister(AuthUser)
admin.site.register(AuthUser, AuthorUser)
# codeend:AuthorUser
# codestart:AplModel
def set_author_by_user(request, org_list, item_name):
edit_list = list(org_list)
if not request.user.is_superuser:
edit_list.remove(item_name)
return edit_list
class AplModelAdmin(admin.ModelAdmin):
def get_queryset(self, request):
queryset = super().get_queryset(request)
if not request.user.is_superuser:
queryset = queryset.filter(author=request.user.author)
return queryset
def get_list_display(self, request):
return set_author_by_user(request, super().get_list_display(request), 'author')
def get_list_filter(self, request):
return set_author_by_user(request, super().get_list_filter(request), 'author')
def get_exclude(self, request, obj=None):
exclude = super().get_exclude(request, obj)
if not request.user.is_superuser:
if not exclude:
exclude = []
exclude.append('author')
return exclude
def save_model(self, request, obj, form, change):
if not request.user.is_superuser:
obj.author = request.user.author
obj.save()
# codeend:AplModel
# codestart:RelatedModel
class RelatedModelAdmin(admin.ModelAdmin):
def get_queryset(self, request):
queryset = super().get_queryset(request)
if not request.user.is_superuser:
queryset = queryset.filter(post__author=request.user.author)
return queryset
def author(sef, obj):
return obj.post.author
def get_list_display(self, request):
return set_author_by_user(request, super().get_list_display(request), 'author')
def get_list_filter(self, request):
return set_author_by_user(request, super().get_list_filter(request), 'post__author')
# codeend:RelatedModel
# codestart:Author
class AuthorAdmin(admin.ModelAdmin):
list_display = ('title_text', 'author_name', 'email', 'flags', 'post')
ordering = ('id',)
search_fields = ('author_name', 'title_text', 'email')
exclude = ('user',)
def author_name(sef, obj):
href = reverse('admin:auth_user_change', args=(obj.user.id,))
return format_html('<a href="{}">{}</a>', href, obj.user.username)
def post(sef, obj):
href = reverse('blog:index', args=(obj.user.username,))
return format_html('<a href="{}" target="blog">{}</a>', href, obj.post_set.count())
def email(sef, obj):
return obj.user.email
def has_add_permission(self, request):
return False
def has_delete_permission(self, request, obj=None):
return False
admin.site.register(models.Author, AuthorAdmin)
# codeend:Author
# codestart:Post
class PostAdmin(AplModelAdmin):
list_display = ['id', 'author', 'title_text', 'status', 'template_text', 'category', 'view_count']
ordering = ('author', 'id')
list_filter = ['author', 'status']
search_fields = ('template_text',)
def title_text(sef, obj):
href = reverse('blog:detail', args=(obj.author.user.username, obj.id))
return format_html('<a href="{}" target="blog">{}</a>', href, obj.get_title_text())
def category(sef, obj):
return obj.postcategory_set.count()
admin.site.register(models.Post, PostAdmin)
# codeend:Post
# codestart:PostContent
class PostContentAdmin(RelatedModelAdmin):
list_display = ('id', 'post', 'language_code_short', 'title_text_link', 'summary_text_short')
ordering = ('post', 'id', 'language_code')
list_filter = ('post__author', 'language_code')
search_fields = ('language_code', 'title_text', 'summary_text')
raw_id_fields = ('post',)
def language_code_short(sef, obj):
return obj.language_code
language_code_short.short_description = 'lang'
def title_text_link(sef, obj):
href = utils.reverse('blog:detail', obj.language_code, obj.post.author.user.username, obj.post.id)
return format_html('<a href="{}" target="blog">{}</a>', href, obj.title_text)
title_text_link.short_description = "title_text"
def summary_text_short(sef, obj):
return obj.summary_text[:20]
summary_text_short.short_description = "summary_text"
admin.site.register(models.PostContent, PostContentAdmin)
# codeend:PostContent
# codestart:Category
class CategoryAdmin(AplModelAdmin):
list_display = ('author', 'category_text', 'order_number')
ordering = ('author', 'order_number')
list_filter = ('author',)
search_fields = ('category_text',)
admin.site.register(models.Category, CategoryAdmin)
# codeend:Category
# codestart:PostCategory
class PostCategoryAdmin(RelatedModelAdmin):
list_display = ('author', 'category', 'post')
ordering = ('category', 'post')
list_filter = ('post__author', 'category')
search_fields = ('category', 'post')
raw_id_fields = ('category', 'post')
admin.site.register(models.PostCategory, PostCategoryAdmin)
# codeend:PostCategory
# codestart:Comment
class CommentAdmin(RelatedModelAdmin):
list_display = ('author', 'id', 'comment_text', 'status', 'client_text', 'post', 'parent', 'created_date')
ordering = ('-id',)
list_filter = ('post__author', 'status',)
search_fields = ('comment_text', 'post', 'client_text')
admin.site.register(models.Comment, CommentAdmin)
# codeend:Comment |
print("made by xXFireShadow700Xx")
from time import sleep
sleep(9)
print(''' _____ ____ ____ __ ___
/ ___/| \ / | / ] / _]
( \_ | o ) o | / / / [_
\__ || _/| |/ / | _]
/ \ || | | _ / \_ | [_
\ || | | | \ || |
\___||__| |__|__|\____||_____|
__ _ __ _ _ __ ___
\ \ /\ / / _` | '__/ __|
\ V V / (_| | | \__ \
___\/\__/\__,__|_| |___/___
Retirement
''')
def intro():
sleep(7)
print("HELLO. I am 2BB7, your BB-unit who served with you during the war.")
sleep(5)
print(
"the Battle on Exegol has finally ended. The Resistance has won, but a new threat remains..."
)
print('''
โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
โโโโโโโโโโ โโโโโโโโโโ
โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ ''')
sleep(5)
print('''
||| |||
| | __ | |
|-|_____-----/ |_| |_| \-----_____|-|
|_|_________{ }| (^) |{ }__________|_|
|| |_| | ^ | |_| ||
| \| /\ |/ |
| \ |--| / |
= \ |__| / =
+ \ / +
\ /
\ /
\ /
\ /
\ /
\ /
\ /
\ /
\/
''')
sleep(5)
print( "You are alive. As a Resistance T-70 X-wing pilot, you will settle down in Batuu's Resistance encampment, located in the Black Spire Outpost.")
print(''' . + . . . . . .
. . . *
. * . . . . . . + .
"You Are Here on Batuu" . . + . . .
. | . . . . . .
| . . . +. + .
\|/ . . . .
. . V . * . . . . + .
+ . . . +
. . + .+. .
. . . + . . . . .
. . . . . . . . ! /
* . . . + . . - O -
. . . + . . * . . / |
. + . . . .. + .
. . . . * . * . +.. . *
. . . . . . . . + . . +
''')
sleep(5)
print("You have infinite credits because the Resistance thanks you for your one year of support. You have a lightsaber somehow. You are hungry. Let's go to Tuggs' Grub, in Docking Bay 7.")
sleep(3)
print("You walk to Docking Bay 7.")
print('''
__________________________________________
|.'', ,''.|
|.'.'', ,''.'.|
|.'.'.'', | \ ,''.'.'.|
|.'.'.'.'', | \ ,''.'.'.'.|
|.'.'.'.'.| | \ |.'.'.'.'.|
|.'.'.'.'.|===; | ____\ ;===|.'.'.'.'.|
|.'.'.'.'.|:::|', |______D ,'|:::|.'.'.'.'.|
|.'.'.'.'.|---|'.|, |______|,|.'|---|.'.'.'.'.|
|.'.'.'.'.|:::|'.|'|Docking|'|.'|:::|.'.'.'.'.|
|,',',',',|---|',|'| bay 7 |'|,'|---|,',',',',|
|.'.'.'.'.|:::|'.|'| [ ]|'|.'|:::|.'.'.'.'.|
|.'.'.'.'.|---|',' /%%%\ ','|---|.'.'.'.'.|
|.'.'.'.'.|===:' /%%%%%\ ':===|.'.'.'.'.|
|.'.'.'.'.|%%%%%%%%%%%%%%%%%%%%%%%%%|.'.'.'.'.|
|.'.'.'.',' /%%%%%%%%%\ ','.'.'.'.|
|.'.'.',' /%%%%%%%%%%%\ ','.'.'.|
|.'.',' /%%%%%%%%%%%%%\ ','.'.|
|.',' /%%%%%%%%%%%%%%%\ ','.|
|;____________/%%%%%%%%%%%%%%%%%\____________;|
''')
sleep(3)
print(
" Tuggs is not here, his ship is not here, but the place is still open. Guess I'll eat anyways" )
sleep(5)
print("You walk into Tuggs' Grub")
sleep(7)
requested_toppings = []
print("Welcome to Tugg's Grub, in Docking Bay 7. We have polystarch portion bread,blue milk,a blue milkshake, Flameout, the space banana(hern jiu),Meiloorun Fruit,Bantha Steak beef soup,Rising Moons Overnight Oats,Bright Suns Breakfast Platter,Bright Suns Youngling Breakfast,Mustafarian Cinnamon Roll,a neuvian sundae,Batuubucha Tea,Phattro,Black Caf, Coke, Diet Coke, Sprite, sweetmallows, and Roasted Porg(The cute bird things native to Ach-to)")
while True:
choice = input("What would you like today? ")
requested_toppings.append(choice)
choice = input("Would you like another item? y/n ")
if choice == 'n':
break
available_toppings = ['polystarch portion bread','blue milk','a blue milkshake', 'a neuvian sundae','the space banana(hern jiu)','Meiloorun Fruit','Bantha Steak beef soup','Rising Moons Overnight Oats','Bright Suns Breakfast Platter','Bright Suns Youngling Breakfast',
'Mustafarian Cinnamon Roll','Batuubucha Tea','Phattro','Black Caf','Coke', 'Diet Coke', 'Sprite', 'sweetmallows', 'Flameout','Roasted Porg(The cute bird things native to Ach-to)']
for topping in requested_toppings:
if topping in available_toppings:
print(f'Adding {topping} to your order!')
elif topping == 'Roasted Porg':
print(f'Why would you eat that? PORGS ARE THE CUTEST THING IN THE WORLD')
else:
print(f'We dont have any {topping} right now')
print("Thank you for your order")
sleep(10)
print("you walk out of the resturant. But you notice...")
sleep(5)
print("NANI?!? a Teenager Yoda?")
sleep(5)
print("'mhm. Train you to be a Jedi, I shall.' He says.")
sleep(5)
choice = input("Do you accept your training? Y or W.")
if choice == 'W':
print(
" He thinks you may be a Sith. He jumps up from behind you and stabs you in the back with his lightsaber."
)
sleep(5)
print('''You died.
_____ __ __ ______ ______ ________ _____
/ ____| /\ | \/ | ____| / __ \ \ / / ____| __ \
| | __ / \ | \ / | |__ | | | \ \ / /| |__ | |__) |
| | |_ | / /\ \ | |\/| | __| | | | |\ \/ / | __| | _ /
| |__| |/ ____ \| | | | |____ | |__| | \ / | |____| | \ \
\_____/_/ \_\_| |_|______| \____/ \/ |______|_| \_\
''')
else:
print(
"Teen Yoda happily smiles at you, an you can see Tuggs in the dark hallway ahead nodding."
)
sleep(5)
print("Teen Yoda leads you down the dark hallway, and Tuggs joins him.")
sleep(5)
print(
"Baby Yoda looks up at you and says 'Now, your training begins, young Padawan.'"
)
sleep(5)
print(
"Young? YOUNG? you are obviously the same age tha Teen Yoda is! You're 13!"
)
sleep(5)
print("You roll your eyes and sigh.")
sleep(5)
print("Tuggs yells to Teen Yoda in Aurbesh.")
sleep(5)
print(
"'Brought you here to groan, I have not.' he calmly states as he meditates and floats with the Force. ' I am 250 years old. I can read your mind.'"
)
sleep(5)
print(
"'teach you to lift things with the force, I will. Close your eyes. I see you have a Lightsaber. Do not look, and block the blasts.' He sits, and throws a harmless ball at you. However, it does not look so harmless, yet fires stun blasts at you! you block the blasts, eyes closed. something guides you. The Force. 'Good.' He exclaims. 'Good, good.'"
)
sleep(5)
print(
"Suddenly, remaing Sith troopers bust through the door, blasters raised. Teen Yoda ignites his lightsaber as Tuggs grabs you with his four hands. 'We need to go.'He whispers as he shoves you into the kitchen. 'They are excucuting members of the Resistance. go to the encampment.' You wonder how he knows about that. ")
sleep(5)
print("The Jedi tells you one last thing before sending you off...")
sleep(5)
print(
"'Next week, I will see you in your bunk. Decide if you are yet worthy of training, we then will.'")
sleep(5)
print(
"You run back to the encampment, blocking blaster fire with your lightsaber. you see that part of it is being attacked! You run to your X-Wing.")
print(
"As you fly off with your loyal droid(me) you tell me to remind you to fly back.")
sleep(5)
print("Soon.")
sleep(20)
intro() |
"""
872. Leaf-Similar Trees
Consider all the leaves of a binary tree. From left to right order, the values of those leaves form a leaf value sequence.
For example, in the given tree above, the leaf value sequence is (6, 7, 4, 9, 8).
Two binary trees are considered leaf-similar if their leaf value sequence is the same.
Return true if and only if the two given trees with head nodes root1 and root2 are leaf-similar.
Note:
Both of the given trees will have between 1 and 100 nodes.
Companies
Google 2
"""
# Definition for a binary tree node.
# class TreeNode(object):
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution(object):
def leafSimilar(self, root1, root2):
"""
:type root1: TreeNode
:type root2: TreeNode
:rtype: bool
"""
def get_leaf_sequence(root):
result, stack = list(), list()
while True:
if root:
stack.append(root)
root = root.left
else:
if stack:
root = stack.pop()
if not root.left and not root.right:
yield root.val
root = root.right
else:
break
return list(get_leaf_sequence(root1)) == list(get_leaf_sequence(root2))
# return all(a == b for a,b in zip(get_leaf_sequence(root1), get_leaf_sequence(root2)))
|
class ExamplePolicy:
@staticmethod
def getParamValue(paramIndex) :
return 1 + paramIndex
def __init__(self, paramValue):
self.paramValue = paramValue
self.nTested = 0
def getNextSize(self):
return 5
def recordResponse(self, size, ans):
self.nTested += 1
def isDone(self):
return self.nTested > 1
def getAnswer(self):
return self.paramValue |
from lib.BeautifulSoup import BeautifulSoup
import re
def strip_search(html):
form_html = BeautifulSoup(html).find('form', action='http://websoc.reg.uci.edu/')
#replace form submit with our own link
form_html['action'] = '/schedules'
#remove 'Display Text Results' button
text_buttons = form_html.findAll(attrs={"class" : "banner-width"})
for i in text_buttons:
i.replaceWith('<p id=\"submit-container\"><input type="submit" value="Display Results" name="Submit"></p>')
return str(form_html)
def strip_schedule(html):
schedule_html = BeautifulSoup(html).find('div', 'course-list')
if schedule_html is None:
return "<p id=\"error\">No results were found.</p>"
else:
return str(schedule_html)
def strip_websoc_version(html):
version_matches = re.findall('version.{,8}', html)
if not version_matches:
return 'Couldn\'t find a match'
else:
return version_matches[0]
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# FILE: app/alc_etm_searcher.py
# AUTHOR: haya14busa
# License: MIT license
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
# CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
#=============================================================================
from app.database import WordDB
from app.nlp import NLTK
class ALCEtmSearcher(object):
def __init__(self):
self.db = WordDB('mydict')
self.n = NLTK()
self.url_unum = \
'http://home.alc.co.jp/db/owa/etm_sch?unum={unum}&stg=2'
def find_word_with_unum(self, word):
''' Aggressively find word data of ALC etmology dictionary with NLP
1. word itself
2. lemmatize
3. Porter stemmer with lemma
4. Porter stemmer with stem
5. Lancaster stemmer with stem
'''
word = word.lower()
lemma = self.n.lemmatizer(word)
p_stem = self.n.porter_stemmer(word)
l_stem = self.n.lancaster_stemmer(word)
word_data = self.db.find_with_unum(word, 'lemma') \
or self.db.find_with_unum(lemma, 'lemma') \
or self.db.find_with_unum(p_stem, 'lemma') \
or self.db.find_with_unum(p_stem, 'stem') \
or self.db.find_with_unum(l_stem, 'stem')
return word_data
def text_linker(self, text, is_newtab=None):
# link_format = '<a href="{url}" target="_blank">{w}</a>'.format(
target = 'target="_blank"' if is_newtab else ''
link_format = '<a href="{url}" {target}>{w}</a>'.format(
url=self.url_unum, w='{w}', target=target)
# Cache linked word not to look into database more hantwice
# {word: unum}
# NOTE: Should it be with MongoDB or something?
linked_word = {}
sentences = text.splitlines()
sentence_list = []
for sentence in sentences:
word_list = []
for word in self.n.tokenize(sentence):
# Filter stopwords
if len(word) < 3 or word.lower() in self.n.stopwords:
word_list.append(word)
continue
# Not to look into database twice with the same word
if word in linked_word:
word_list.append(
link_format.format(
unum=linked_word[word],
w=word
))
continue
# Try to find word in DataBase
word_data = self.find_word_with_unum(word.lower())
if word_data:
linked_word[word] = word_data['alc_etm']['unum']
word_list.append(
link_format.format(
unum=word_data['alc_etm']['unum'],
w=word
))
else:
word_list.append(word)
sentence_list.append(' '.join(word_list))
return '<br>'.join(sentence_list)
|
#!/usr/bin/env python
# -*- coding: UTF-8 -*-
#Fox Ning
from PyQt4.QtGui import *
from PyQt4.QtCore import *
import basic
#Three dictionaries for show types of map or unit
NumToMapType = {0:"ๅนณๅ",1:"ๅฑฑๅฐ",2:"ๆฃฎๆ",3:"ๅฑ้",4:"็ฎๅก",
5:"้่ฟน",6:"ไผ ้้จ"}
NumToUnitType = {0:"ๅๅฃซ",1:"็ชๅปๆ",2:"็ๅปๆ",3:"ๆๆๆบ",
4:"่ๆ่
", 5:"ๆฒป็ๅธ", 6:"็ๆๅฃซ", 7:"ๆๆ่
",
8:"ๅคงๆณๅธ"}
NumToActionType = {0:"ๅพ
ๆบ", 1:"ๆปๅป", 2:"ๆ่ฝ"}
NumToTempleType = {0:"ๆ ็ฅ็ฌฆ", 1:"ๅ้็ฅ็ฌฆ", 2:"ๆๆท็ฅ็ฌฆ", 3:"้ฒๅพก็ฅ็ฌฆ"}
NumToEffectType = {0:"ๆชๅฝไธญ", 1:"ๅฝไธญ", -1:"ๆช่ฟ่ก"}
StyleSheet = """
QLineEdit{
background-color: rgb(255, 255, 127);
color: darkblue;
}
"""
class InfoWidget(QTabWidget):
def __init__(self, parent =None):
super(InfoWidget, self).__init__(parent)
self.infoWidget_Game = InfoWidget1()
self.infoWidget_Unit = InfoWidget2()
self.infoWidget_Map = InfoWidget3()
self.addTab(self.infoWidget_Game, "Game info")
self.addTab(self.infoWidget_Unit, "Unit info")
self.addTab(self.infoWidget_Map, "Map info")
self.setTabToolTip(1, "the global infos and game runing infos")
self.setTabToolTip(2, "the selected unit's infos")
self.setTabToolTip(3, "the selected map-grid's infos")
#reimplement close event:ไป
ไป
่ฎพ็ฝฎๅฎไธๅฏ่ง,่ไธๆฏๅ
ณ้ญ
def closeEvent(self, event):
self.hide()
event.ignore()
#ไธบไบๅๆญฅไธป็้ข็ชๅฃ่ๅ็ๆพ็คบๅ ๅ
ฅ็event handler
def hideEvent(self, event):
self.emit(SIGNAL("hided()"))
#ๅฑ็ฐๆๆไฟกๆฏ
def beginRoundInfo(self, beginfo):
self.infoWidget_Game.resetEnd()
self.infoWidget_Game.setUnitinfo("Team %d Soldier %d" %(beginfo.id[0],beginfo.id[1]))
self.beg_Flag = 1
def endRoundInfo(self, cmd, endinfo):
self.infoWidget_Game.setCmdinfo(QString.fromUtf8("็งปๅจ่ณ%s,%s" %(cmd.move,
NumToActionType[cmd.order]
)))
if cmd.order != 0:
self.infoWidget_Game.setTargetinfo(QString.fromUtf8("%d้%dๅทๅไฝ" %(cmd.target[0], cmd.target[1])))
self.infoWidget_Game.setEffectinfo(QString.fromUtf8("ๆปๅป%s๏ผๅๅป%s" %(NumToEffectType[endinfo.effect[0]],
NumToEffectType[endinfo.effect[1]])))
self.infoWidget_Game.setTimeinfo("%d ms" %endinfo.timeused)
#self.infoWidget_Game.setScoreinfo("%d : %d" %(endinfo.score[0],endinfo.score[1]))
self.beg_Flag = 0
def on_goToRound(self, _round, status, begInfo, endInfo):
self.infoWidget_Game.info_round.setText("%d" %_round)
self.beginRoundInfo(begInfo)
if endInfo:
self.endRoundInfo(*endInfo)
#ๅพ
ๅฎ็ฐ,ๅจ่ทณ่ฝฌๅๅๆถไปๅๆพ้่ฎพ็ฝฎ็็ฑปไผ ๅบ็ไฟกๅท่ฎพ็ฝฎ
#ๅฑ็ฐๅไฝ,ๅฐๅฝขไฟกๆฏ
def newUnitInfo(self, base_unit):
self.infoWidget_Unit.info_type.setText(QString.fromUtf8(NumToUnitType[base_unit.kind]))
self.infoWidget_Unit.info_life.setText("%d" %base_unit.life)
self.infoWidget_Unit.info_attack.setText("%d" %base_unit.strength)
self.infoWidget_Unit.info_defence.setText("%d" %base_unit.defence)
#self.infoWidget_Unit.info_speed.setText("%d" %base_unit.agility)
self.infoWidget_Unit.info_moverange.setText("%d" %base_unit.move_range)
self.infoWidget_Unit.info_attackrange.setText("%s" %base_unit.attack_range)
def newMapInfo(self, map_basic, tp):
self.infoWidget_Map.info_type.setText(QString.fromUtf8(NumToMapType[map_basic.kind]))
self.infoWidget_Map.info_score.setText("%d" %map_basic.score)
self.infoWidget_Map.info_consumption.setText("%d" %map_basic.move_consumption)
if isinstance(map_basic, basic.Map_Temple):
self.infoWidget_Map.info_temple.setText(QString.fromUtf8("%s"%NumToTempleType[tp]))
#cd = basic.TEMPLE_UP_TIME - map_basic.time if (basic.TEMPLE_UP_TIME - map_basic.time) > 0 else 0
# self.infoWidget_Map.info_cd.setText("%d" %cd)
else:
self.infoWidget_Map.info_temple.setText("")
#ๅฑ็คบๆธธๆๅบ็กไฟกๆฏ
class InfoWidget1(QWidget):
def __init__(self, parent = None):
super(InfoWidget1, self).__init__(parent)
self.label_aifile = QLabel("AI file path:")
self.info_aifile1 = QLineEdit("")
self.info_aifile1.setReadOnly(True)
self.info_aifile2 = QLineEdit("")
self.info_aifile2.setReadOnly(True)
self.label_mapfile = QLabel("MAP file path:")
self.info_mapfile = QLineEdit("")
self.info_mapfile.setReadOnly(True)
self.label_round = QLabel(QString.fromUtf8("ๅๅๆฐ:"))
self.info_round = QLineEdit("")
self.info_round.setReadOnly(True)
self.label_unit = QLabel(QString.fromUtf8("่กๅจๅไฝ:"))
self.info_unit = QLineEdit("")
self.info_unit.setReadOnly(True)
self.label_time = QLabel(QString.fromUtf8("็จๆถ:"))
self.info_time = QLineEdit("")
self.info_time.setReadOnly(True)
self.label_cmd = QLabel(QString.fromUtf8("ๅฝไปค:"))
self.info_cmd = QLineEdit("")
self.info_cmd.setReadOnly(True)
self.label_target = QLabel(QString.fromUtf8("็ฎๆ :"))
self.info_target = QLineEdit("")
self.info_cmd.setReadOnly(True)
self.label_effect = QLabel(QString.fromUtf8("ๆปๅปๆๆ:"))
self.info_effect = QLineEdit("")
self.info_effect.setReadOnly(True)
#self.label_score = QLabel("socre:")
#self.info_score = QLineEdit("0:0")
#self.info_score.setReadOnly(True)
self.layout = QGridLayout()
self.layout.addWidget(self.label_aifile, 0, 0)
self.layout.addWidget(self.info_aifile1, 0, 1)
self.layout.addWidget(self.info_aifile2, 1, 1)
self.layout.addWidget(self.label_mapfile, 2, 0)
self.layout.addWidget(self.info_mapfile, 2, 1)
self.layout.addWidget(self.label_time, 3, 0)
self.layout.addWidget(self.info_time, 3, 1)
self.layout.addWidget(self.label_round, 4, 0)
self.layout.addWidget(self.info_round, 4, 1)
self.layout.addWidget(self.label_unit, 5, 0)
self.layout.addWidget(self.info_unit, 5, 1)
self.layout.addWidget(self.label_cmd, 6, 0)
self.layout.addWidget(self.info_cmd, 6, 1)
self.layout.addWidget(self.label_target, 7, 0)
self.layout.addWidget(self.info_target, 7, 1)
self.layout.addWidget(self.label_effect, 8, 0)
self.layout.addWidget(self.info_effect, 8, 1)
# self.layout.addWidget(self.label_score, 8, 0)
# self.layout.addWidget(self.info_score, 8, 1)
self.setLayout(self.layout)
self.setStyleSheet(StyleSheet)
def setAiFileinfo(self, loaded_ai):
self.info_aifile1.setText(loaded_ai[0])
if len(loaded_ai) == 2:
self.info_aifile2.setText(loaded_ai[1])
else:
self.info_aifile2.setText("Default")
def setMapFileinfo(self, str):
self.info_mapfile.setText(str)
def setUnitinfo(self, str):
self.info_unit.setText(str)
def setTimeinfo(self, str):
self.info_time.setText(str)
def setCmdinfo(self, str):
self.info_cmd.setText(str)
def setTargetinfo(self, str):
self.info_target.setText(str)
def setEffectinfo(self,str):
self.info_effect.setText(str)
#def setScoreinfo(self, str):
# self.info_score.setText(str)
def resetEnd(self):
self.setTargetinfo("")
self.setEffectinfo("")
self.setCmdinfo("")
#ๅฑ็คบๅไฝๅบ็กไฟกๆฏ
class InfoWidget2(QWidget):
def __init__(self, parent = None):
super(InfoWidget2, self).__init__(parent)
self.infos = []
self.label_type = QLabel(QString.fromUtf8("็ฑปๅ:"))
self.info_type = QLabel("")
self.infos.append(self.info_type)
self.label_life = QLabel(QString.fromUtf8("็ๅฝ:"))
self.info_life= QLabel("")
self.infos.append(self.info_life)
self.label_attack = QLabel(QString.fromUtf8("ๆปๅป:"))
self.info_attack = QLabel("")
self.infos.append(self.info_attack)
#self.label_speed = QLabel(QString.fromUtf8("ๆๆท:"))
#self.info_speed = QLabel("")
#self.infos.append(self.info_speed)
self.label_defence = QLabel(QString.fromUtf8("้ฒๅพก:"))
self.info_defence = QLabel("")
self.infos.append(self.info_defence)
self.label_moverange = QLabel(QString.fromUtf8("็งปๅจๅ:"))
self.info_moverange = QLabel("")
self.infos.append(self.info_moverange)
self.label_attackrange = QLabel(QString.fromUtf8("ๆปๅป่ๅด:"))
self.info_attackrange = QLabel("")
self.infos.append(self.info_attackrange)
labels = [self.label_type, self.label_attack, self.label_life,
self.label_defence, self.label_attackrange, self.label_moverange]
#old_font = self.font()
#new_font = QFont()
#new_font.setBold(True)
#new_font.setPointSize(old_font.pointSize() + 3)
#pal = self.label_type.palette()
#pal.setBrush(QPalette.WindowText, QColor(Qt.white))
for info in self.infos:
# info.setFrameStyle(QFrame.StyledPanel|QFrame.Sunken)
info.setSizePolicy(QSizePolicy(QSizePolicy.Preferred, QSizePolicy.Fixed))
# info.setFont(new_font)
# info.setPalette(pal)
# for label in labels:
# label.setFont(new_font)
# label.setPalette(pal)
self.layout = QGridLayout()
self.layout.addWidget(self.label_type, 0, 0)
self.layout.addWidget(self.info_type, 0, 1)
self.layout.addWidget(self.label_life, 1, 0)
self.layout.addWidget(self.info_life, 1, 1)
self.layout.addWidget(self.label_attack, 2, 0)
self.layout.addWidget(self.info_attack, 2, 1)
self.layout.addWidget(self.label_defence, 3, 0)
self.layout.addWidget(self.info_defence, 3, 1)
# self.layout.addWidget(self.label_speed, 4, 0)
# self.layout.addWidget(self.info_speed, 4, 1)
self.layout.addWidget(self.label_moverange, 4, 0)
self.layout.addWidget(self.info_moverange, 4, 1)
self.layout.addWidget(self.label_attackrange, 5, 0)
self.layout.addWidget(self.info_attackrange, 5, 1)
self.setLayout(self.layout)
#ๅฑ็คบๅฐๅพๅบ็กไฟกๆฏ
class InfoWidget3(QWidget):
def __init__(self, parent = None):
super(InfoWidget3, self).__init__(parent)
self.infos = []
self.label_type = QLabel(QString.fromUtf8("็ฑปๅ:"))
self.info_type = QLabel("")
self.infos.append(self.info_type)
self.label_score = QLabel(QString.fromUtf8("ๅๅผ:"))
self.info_score= QLabel("")
self.infos.append(self.info_score)
self.label_consumption = QLabel(QString.fromUtf8("็งปๅจๆถ่:"))
self.info_consumption = QLabel("")
self.infos.append(self.info_consumption)
self.label_temple = QLabel(QString.fromUtf8("็ฅ็ฌฆ็ง็ฑป:"))
self.info_temple = QLabel("")
self.infos.append(self.info_temple)
# self.label_cd = QLabel(QString.fromUtf8("็ฅ็ฌฆๅทๅด:"))
# self.info_cd = QLabel("")
# self.infos.append(self.info_cd)
labels = [self.label_type, self.label_consumption, self.label_score,self.label_temple]
# self.label_cd]
# old_font = self.font()
# new_font = QFont()
# new_font.setBold(True)
# new_font.setPointSize(old_font.pointSize() + 3)
# pal = self.label_type.palette()
# pal.setBrush(QPalette.WindowText, QColor(Qt.white))
for info in self.infos:
# info.setFrameStyle(QFrame.StyledPanel|QFrame.Sunken)
info.setSizePolicy(QSizePolicy(QSizePolicy.Preferred, QSizePolicy.Fixed))
# info.setFont(new_font)
# info.setPalette(pal)
# for label in labels:
# label.setFont(new_font)
# label.setPalette(pal)
self.layout = QGridLayout()
self.layout.addWidget(self.label_type, 0, 0)
self.layout.addWidget(self.info_type, 0, 1)
self.layout.addWidget(self.label_score, 1, 0)
self.layout.addWidget(self.info_score, 1, 1)
self.layout.addWidget(self.label_consumption, 2, 0)
self.layout.addWidget(self.info_consumption, 2, 1)
self.layout.addWidget(self.label_temple, 3, 0)
self.layout.addWidget(self.info_temple, 3 ,1)
# self.layout.addWidget(self.label_cd, 4, 0)
# self.layout.addWidget(self.info_cd, 4, 1)
self.setLayout(self.layout)
#just for test
if __name__ == "__main__":
import sys
app = QApplication(sys.argv)
form = InfoWidget()
form.show()
app.exec_()
|
import sys
from pylab import *
N = 30
X = np.random.rand(N)
Y = np.random.rand(N)
clusterCentreX = np.random.rand(3)
clusterCentreY = np.random.rand(3)
clusters = [[], [], []]
colors = ["red", "blue", "green"]
size = 50
def plotCentre(ax):
ax.scatter(clusterCentreX, clusterCentreY,
marker = "D", color = colors, s = size, alpha = 1)
def plotClusters(ax):
for i in range(3):
subX = [observation[0] for observation in clusters[i]]
subY = [observation[1] for observation in clusters[i]]
ax.scatter(subX, subY, color = colors[i], s = size, alpha = 0.5)
def formatSubplot(i):
xlim(0, 1)
ylim(0, 1)
xticks([])
yticks([])
title(i)
def assign():
global clusters
clusters = [[], [], []]
for x, y in zip(X, Y):
distance = Inf
for i in range(3):
newDistance = (x - clusterCentreX[i]) ** 2
newDistance += (y - clusterCentreY[i]) ** 2
if newDistance < distance:
distance = newDistance
index = i
clusters[index].append((x, y))
def update():
global clusterCentreX, clusterCentreY
for i in range(3):
subX = [observation[0] for observation in clusters[i]]
subY = [observation[1] for observation in clusters[i]]
clusterCentreX[i] = np.mean(subX)
clusterCentreY[i] = np.mean(subY)
figure(figsize = (8, 12), dpi = 80)
# THE FIRST FIGURE
subplot(3, 2, 1)
ax1 = gca()
formatSubplot("A");
plotCentre(ax1)
ax1.scatter(X, Y, color = "black", s = size, alpha = 0.5)
# THE SECOND FIGURE
subplot(3, 2, 2)
ax2 = gca()
formatSubplot("B");
plotCentre(ax2)
assign()
plotClusters(ax2)
# THE THIRD FIGURE
subplot(3, 2, 3)
ax3 = gca()
formatSubplot("C");
plotClusters(ax3)
update()
plotCentre(ax3)
# THE FOURTH FIGURE
subplot(3, 2, 4)
ax4 = gca()
formatSubplot("D");
plotCentre(ax4)
assign()
plotClusters(ax4)
# THE FIFTH FIGURE
subplot(3, 2, 5)
ax5 = gca()
formatSubplot("E")
plotClusters(ax5)
update()
plotCentre(ax5)
# THE SIXTH FIGURE
subplot(3, 2, 6)
ax6 = gca()
formatSubplot("F")
plotCentre(ax6)
assign()
plotClusters(ax6)
# Save figure and show
savefig("KMeansDemo.png", dpi = 200)
show()
|
from django.db import models
from django.conf import settings
from django.urls import reverse
import photograph
import campi
class Directory(
campi.models.labeledModel,
campi.models.descriptionModel,
campi.models.userModifiedModel,
):
"""
A directory from the original image filesystem. Photographs must have one direct parent directory, which may itself have parent directories.
"""
parent_directory = models.ForeignKey(
"Directory",
null=True,
on_delete=models.CASCADE,
related_name="child_directories",
help_text="The immediate parent of this directory",
)
class Job(
campi.models.labeledModel,
campi.models.descriptionModel,
campi.models.userModifiedModel,
):
"""
A job from the original archival description. Photographs may be associated with one job.
"""
job_code = models.CharField(
blank=True,
unique=True,
max_length=20,
help_text="custom code used for this job",
)
date_start = models.DateField(help_text="Earliest date of this job")
date_end = models.DateField(help_text="Latest date of this job")
|
import os
import sys
path = os.getcwd()
sys.path.append(path)
def calculate_filled_utilities(rated_products, product_num):
count_size = 0
for user_id in rated_products:
count_size += len(rated_products[user_id])
return count_size / (len(rated_products)) / product_num
|
import redis
REDIS_HOST = "localhost"
REDIS_PORT = 6379
REDIS_PWD = ""
def hello_redis():
"""Example Hello Redis Program"""
# Redis Connection object
try:
# The decode_repsonses flag here directs the client to convert
# the responses from Redis into Python strings
# using the default encoding utf-8. ย This is client specific.
r = redis.StrictRedis(host=REDIS_HOST,
port=REDIS_PORT,
password=REDIS_PWD,
decode_responses=True)
# Set the hello message in Redis
r.set("msg:hello", "Hello Redis!!!")
# Retrieve the hello message from Redis
msg = r.get("msg:hello")
print(msg)
except Exception as err:
print(err)
if __name__ == "__main__":
hello_redis()
|
#First *fork* your copy. Then copy-paste your code below this line ๐
#Finally click "Run" to execute the tests
#Imports the ceil function from the math module
from math import ceil
#Defines the function paint_calc with 3 parameters : height of the wall, width of the wall and coverage
def paint_calc(height, width, cover):
#Computes the number of cans needed
cans_number = ceil(height * width / cover)
#Prints the result
print(f"You'll need {cans_number} cans of paint.")
#Write your code above this line ๐
# ๐จ Don't change the code below ๐
test_h = int(input("Height of wall: "))
test_w = int(input("Width of wall: "))
coverage = 5
paint_calc(height=test_h, width=test_w, cover=coverage)
# Tests
import unittest
from unittest.mock import patch
from io import StringIO
class MyTest(unittest.TestCase):
# Testing Print output
def test_1(self):
with patch('sys.stdout', new = StringIO()) as fake_out:
paint_calc(3, 6, 5)
expected_print = "You'll need 4 cans of paint.\n"
self.assertEqual(fake_out.getvalue(), expected_print)
def test_2(self):
with patch('sys.stdout', new = StringIO()) as fake_out:
paint_calc(3, 9, 5)
expected_print = "You'll need 6 cans of paint.\n"
self.assertEqual(fake_out.getvalue(), expected_print)
def test_3(self):
with patch('sys.stdout', new = StringIO()) as fake_out:
paint_calc(7, 9, 2)
expected_print = "You'll need 32 cans of paint.\n"
self.assertEqual(fake_out.getvalue(), expected_print)
def test_4(self):
with patch('sys.stdout', new = StringIO()) as fake_out:
paint_calc(12, 45, 5)
expected_print = "You'll need 108 cans of paint.\n"
self.assertEqual(fake_out.getvalue(), expected_print)
print("\n")
print('Running some tests on your code:')
print(".\n.\n.\n.")
unittest.main(verbosity=1, exit=False) |
"""
Do not change the input and output format.
If our script cannot run your code or the format is improper, your code will not be graded.
The only functions you need to implement in this template is linear_regression_noreg, linear_regression_invertible๏ผregularized_linear_regression,
tune_lambda, test_error and mapping_data.
"""
import numpy as np
import pandas as pd
###### Q1.1 ######
def mean_absolute_error(w, X, y):
"""
Compute the mean absolute error on test set given X, y, and model parameter w.
Inputs:
- X: A numpy array of shape (num_samples, D) containing test feature.
- y: A numpy array of shape (num_samples, ) containing test label
- w: a numpy array of shape (D, )
Returns:
- err: the mean absolute error
"""
#####################################################
# TODO 1: Fill in your code here #
#####################################################
res=np.float64(np.dot(X,w))
#res=res.transpose()
diff=0
for i in range(len(y)):
diff+=np.absolute(y[i]-res[i])
err=np.float64(diff/len(y))
return err
###### Q1.2 ######
def linear_regression_noreg(X, y):
"""
Compute the weight parameter given X and y.
Inputs:
- X: A numpy array of shape (num_samples, D) containing feature.
- y: A numpy array of shape (num_samples, ) containing label
Returns:
- w: a numpy array of shape (D, )
"""
#####################################################
# TODO 2: Fill in your code here #
#####################################################
Xt=X.transpose()
w=np.dot(np.dot(np.linalg.inv(np.dot(Xt,X)),Xt),y)
return w
###### Q1.3 ######
def linear_regression_invertible(X, y):
"""
Compute the weight parameter given X and y.
Inputs:
- X: A numpy array of shape (num_samples, D) containing feature.
- y: A numpy array of shape (num_samples, ) containing label
Returns:
- w: a numpy array of shape (D, )
"""
#####################################################
# TODO 3: Fill in your code here #
#####################################################
Xt=X.transpose()
Xnew=np.dot(Xt,X)
w, v = np.linalg.eig(Xnew)
while min(w)<10**(-5):
Xnew=Xnew+10**(-1)*np.identity(len(Xnew))
w, v = np.linalg.eig(Xnew)
w=np.dot(np.dot(np.linalg.inv(Xnew),Xt),y)
return w
###### Q1.4 ######
def regularized_linear_regression(X, y, lambd):
"""
Compute the weight parameter given X, y and lambda.
Inputs:
- X: A numpy array of shape (num_samples, D) containing feature.
- y: A numpy array of shape (num_samples, ) containing label
- lambd: a float number containing regularization strength
Returns:
- w: a numpy array of shape (D, )
"""
#####################################################
# TODO 4: Fill in your code here #
#####################################################
Xt=X.transpose()
Xnew=np.dot(Xt,X)
w=np.dot(np.dot(np.linalg.inv(Xnew+lambd*np.identity(len(Xnew))),Xt),y)
return w
###### Q1.5 ######
def tune_lambda(Xtrain, ytrain, Xval, yval):
"""
Find the best lambda value.
Inputs:
- Xtrain: A numpy array of shape (num_training_samples, D) containing training feature.
- ytrain: A numpy array of shape (num_training_samples, ) containing training label
- Xval: A numpy array of shape (num_val_samples, D) containing validation feature.
- yval: A numpy array of shape (num_val_samples, ) containing validation label
Returns:
- bestlambda: the best lambda you find in lambds
"""
#####################################################
# TODO 5: Fill in your code here #
#####################################################
x=-19
ordlamda=10
lambd=ordlamda**(x)
minmae=np.inf
while lambd<=10**(19):
w=regularized_linear_regression(Xtrain,ytrain,lambd)
m=mean_absolute_error(w, Xval, yval)
if(m<minmae):
minmae=m
bestlambda=lambd
x+=1
lambd=ordlamda**x
return bestlambda
###### Q1.6 ######
def mapping_data(X, power):
"""
Mapping the data.
Inputs:
- X: A numpy array of shape (num_training_samples, D) containing training feature.
- power: A integer that indicate the power in polynomial regression
Returns:
- X: mapped_X, You can manully calculate the size of X based on the power and original size of X
"""
#####################################################
# TODO 6: Fill in your code here #
#####################################################
Xtemp=X
for p in range(2,power+1):
X=np.concatenate((X,np.power(Xtemp,p)),axis=1)
return X
|
import random
from datetime import datetime
from datetime import timezone
from senor_octopus.types import Stream
async def rand(events: int = 10, prefix: str = "hub.random") -> Stream:
"""
Generate random numbers between 0 and 1.
This source will generate random numbers between 0 and 1
when schedule. It's useful for testing.
Parameters
----------
events
Number of events to generate every time it runs
prefix
Prefix for events from this source
Yields
------
Event
Events with random numbers
"""
for _ in range(events):
yield {
"timestamp": datetime.now(timezone.utc),
"name": prefix,
"value": random.random(),
}
|
import copy
from PIL import Image
import numpy as np
from numpy.core.multiarray import ndarray
from scipy.fftpack import dctn
from scipy.fftpack import idctn
import math
import matplotlib.pyplot as plt
class Dct:
# bs_n is blocksize n.
#imagepath is the input of n.
def __init__(self,imagepath,bs_n):
self.bs_n = bs_n
image_int8 = Image.open(imagepath)
# image is the input of n.
self.image = np.asarray(image_int8, dtype="int32")
self.Row = len(self.image)
self.Col = len(self.image[0])
self.procImage = np.asarray(self.image, dtype="float")
def save(self, path):
im = Image.fromarray(np.uint8(self.procImage))
im.save(path)
def CompressionRate(self):
n = self.bs_n
return 8*n*n/(3*int((n*n-1)/10)+2*int((n*n-1)/10)+4)
def Snr(self):
self.noise = abs(self.signal-self.image)
pnoise = np.mean(self.noise**2)
psignal = np.mean(self.image**2)
return 10*math.log10(psignal/pnoise)
def blockImage(self):
DC_term=[]
n = self.bs_n
for row in range(0, self.Row, n):
for col in range(0, self.Col, n):
tempblock = self.procImage[row: row+n, col: col+n]
dctblock = dctn(tempblock, norm='ortho')
DC_term.append(dctblock[0][0])
self.getACterm(dctblock)
self.procImage[row: row + n, col: col + n] = dctblock
self.QdeQ_array(DC_term, 16)
for row in range(0, self.Row, n):
for col in range(0, self.Col, n):
dcColLen = int(self.Col/n)
tempblock = self.procImage[row: row + n, col: col + n]
index=int(row / n * dcColLen + col / n)
tempblock[0][0] = DC_term[index]
idctblock = idctn(tempblock, norm='ortho')
self.procImage[row: row + n, col: col + n] = idctblock
self.signal = np.uint8(self.procImage)
pass
def getACterm(self,mat):
n = self.bs_n
termNum = int((n * n - 1) / 10)
firstTerm = []
secondTerm = []
zzIndex = 0
for i in range(1, 2 * n - 1):
if i % 2 == 1:
# down left
x = 0 if i < n else i - n + 1
y = i if i < n else n - 1
while x < n and y >= 0:
if zzIndex < termNum:
firstTerm.append(mat[x][y])
elif termNum <= zzIndex < 2 * termNum:
secondTerm.append(mat[x][y])
else:
mat[x][y] = 0.0
x += 1
y -= 1
zzIndex += 1
else:
x = i if i < n else n - 1
y = 0 if i < n else i - n + 1
while x >= 0 and y < n:
if zzIndex < termNum:
firstTerm.append(mat[x][y])
elif termNum <= zzIndex < 2 * termNum:
secondTerm.append(mat[x][y])
else:
mat[x][y] = 0.0
x -= 1
y += 1
zzIndex += 1
if firstTerm:
self.QdeQ_array(firstTerm, 8)
if secondTerm:
self.QdeQ_array(secondTerm, 4)
zzIndex = 0
fi = 0
si = 0
for i in range(1, 2 * n - 1):
if zzIndex >= 2 * termNum:
break
if i % 2 == 1:
# down left
x = 0 if i < n else i - n + 1
y = i if i < n else n - 1
while x < n and y >= 0:
if zzIndex < termNum:
mat[x][y] = firstTerm[fi]
fi += 1
elif termNum <= zzIndex < 2 * termNum:
mat[x][y] = secondTerm[si]
si += 1
else:
break
x += 1
y -= 1
zzIndex += 1
else:
x = i if i < n else n - 1
y = 0 if i < n else i - n + 1
while x >= 0 and y < n:
if zzIndex < termNum:
mat[x][y] = firstTerm[fi]
fi += 1
elif termNum <= zzIndex < 2 * termNum:
mat[x][y] = secondTerm[si]
si += 1
else:
break
x -= 1
y += 1
zzIndex += 1
def QdeQ_array(self, array, level):
H = np.max(array) + 0.000001
L = np.min(array)
for i in range(len(array)):
array[i] = self.QdeQ(H, L, array[i], level)
def QdeQ(self,H,L,value,level):
gap = (H-L)/level
return (int((value - L)/gap)+0.5)*gap+L
#quantized then dequantized
if __name__ == '__main__':
path = '/Users/shibowen/Documents/data compression/newpicture.png'
pathlena = '/Users/shibowen/Documents/data compression/lena.png'
n =[2,4,8,16,32,64]
#bird
snrplot=[]
compplot = []
for element in n:
path2 = f'/Users/shibowen/Documents/data compression/newimage{element}.png'
imagea = Dct(path,element)
imagea.blockImage()
imagea.save(path2)
snr = imagea.Snr()
comp = imagea.CompressionRate()
snrplot.append(snr)
compplot.append(comp)
print('sky and bird result')
print(n)
print(snrplot)
print(compplot)
plt.plot(n,snrplot)
plt.title("the sky and bird snr to n")
plt.show()
#lena
snrplotlena=[]
compplotlena = []
for element in n:
path2 = f'/Users/shibowen/Documents/data compression/lena{element}.png'
imagea = Dct(pathlena,element)
imagea.blockImage()
imagea.save(path2)
snr = imagea.Snr()
comp = imagea.CompressionRate()
snrplotlena.append(snr)
compplotlena.append(comp)
print('lena result')
print(n)
print(snrplotlena)
print(compplotlena)
plt.plot(n, snrplotlena)
plt.title("the lena snr to n")
plt.show()
|
from gaiatest import GaiaTestCase
from OWDTestToolkit.utils.utils import UTILS
from OWDTestToolkit.apps.messages import Messages
class test_main(GaiaTestCase):
test_str = "abcdefghijklmnopqrstuvwxyz"
def setUp(self):
# Set up child objects...
GaiaTestCase.setUp(self)
self.UTILS = UTILS(self)
self.messages = Messages(self)
# Establish which phone number to use.
self.phone_number = self.UTILS.general.get_config_variable("phone_number", "custom")
self.UTILS.reporting.logComment("Sending sms to telephone number " + self.phone_number)
def tearDown(self):
self.UTILS.reporting.reportResults()
GaiaTestCase.tearDown(self)
def test_run(self):
# Launch messages app.
self.messages.launch()
"""
Type a message containing the required string
(the test is already included in 'enterSMSMsg' because it uses 'typeThis()').
"""
self.messages.startNewSMS()
self.messages.enterSMSMsg(self.test_str, False)
self.UTILS.debug.screenShot("5968")
|
# coding: utf-8
# In[1]:
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import networkx as nx
from sets import Set
import random as rd
# In[132]:
class_CBO = np.genfromtxt('class_weight.txt', dtype=None)
#class_CBO = list(class_CBO)
class_History = np.genfromtxt('history_list.txt', dtype=None)
#class_History = list(class_History)
class_combined_factors = []
for i in np.arange(0, class_CBO.shape[0]):
name = class_CBO[i][1]
CBO = float(class_CBO[i][2])
History = float(class_History[i][1])
total_factor = 0.2*CBO + 0.8*History
combined_factors = [name, CBO, History, total_factor]
class_combined_factors.append(combined_factors)
dataframe = pd.DataFrame(class_combined_factors, columns=['ClassName', 'ClassCBO', 'ClassHistory', 'TotalScore'])
dataframe.to_csv('TotalScore.csv', index=False, header=False)
print("For maximum TotalScore : ")
print(dataframe.loc[dataframe['TotalScore'] == max(dataframe.TotalScore)])
print("\n\nFor maximum CBO : ")
print(dataframe.loc[dataframe['ClassCBO'] == max(dataframe.ClassCBO)])
print("\n\nFor maximum History : ")
print(dataframe.loc[dataframe['ClassHistory'] == max(dataframe.ClassHistory)])
print("\n\nLargest 5 TotalScores")
print(dataframe.TotalScore.nlargest(n=5))
#print("\n\n")
### Plot Graph
TotalScore, counts = np.unique(ar=(dataframe.TotalScore+0.5).astype(int), return_counts=True)
print("No of unique TotalScore range values : {}".format(len(TotalScore)))
print("Max Total Score : {}".format(max(dataframe.TotalScore)))
TotalMtx = zip(TotalScore, counts)
plt.figure(2)
plt.plot(TotalScore, counts)
plt.xlabel('Total score')
plt.ylabel('No. of classes')
plt.title('Distribution curve of Total score')
plt.savefig('dist_totalScore.png')
plt.show() |
# coding=utf-8
from django.conf.urls import url
from . import views
urlpatterns=[url(r'^$',views.index_views),
url(r'^login/$',views.login_views),
url(r'^register/$',views.register_views),
url(r'^01-temp/$',views.temp_views),
url(r'02-var1/',views.var_views),
url(r'^03-var2/$',views.var2_views),
url(r'^04-tag/$',views.tag_views),
url(r'^05-static/$',views.static_views),
] |
#!/usr/bin/env python
#
# Copyright (c), 2018-2021, SISSA (International School for Advanced Studies).
# All rights reserved.
# This file is distributed under the terms of the MIT License.
# See the file 'LICENSE' in the root directory of the present
# distribution, or http://opensource.org/licenses/MIT.
#
# @author Davide Brunato <brunato@sissa.it>
#
import unittest
from unittest.mock import patch
import io
import xml.etree.ElementTree as ElementTree
from elementpath.xpath_nodes import AttributeNode, TextNode, TypedAttribute, \
TypedElement, NamespaceNode, is_etree_element, etree_iter_strings, \
etree_deep_equal, match_element_node, match_attribute_node, is_comment_node, \
is_document_node, is_processing_instruction_node, node_attributes, node_base_uri, \
node_document_uri, node_children, node_nilled, node_kind, node_name, \
etree_iter_nodes, etree_iter_paths
class DummyXsdType:
name = local_name = None
def is_matching(self, name, default_namespace): pass
def is_empty(self): pass
def is_simple(self): pass
def has_simple_content(self): pass
def has_mixed_content(self): pass
def is_element_only(self): pass
def is_key(self): pass
def is_qname(self): pass
def is_notation(self): pass
def decode(self, obj, *args, **kwargs): pass
def validate(self, obj, *args, **kwargs): pass
class XPathNodesTest(unittest.TestCase):
elem = ElementTree.XML('<node a1="10"/>')
def test_is_etree_element_function(self):
self.assertTrue(is_etree_element(self.elem))
self.assertFalse(is_etree_element('text'))
self.assertFalse(is_etree_element(None))
def test_elem_iter_nodes_function(self):
root = ElementTree.XML('<A>text1\n<B1 a="10">text2</B1><B2/><B3><C1>text3</C1></B3></A>')
result = [root, TextNode('text1\n', root),
root[0], TextNode('text2', root[0]), root[1],
root[2], root[2][0], TextNode('text3', root[2][0])]
self.assertListEqual(list(etree_iter_nodes(root)), result)
self.assertListEqual(list(etree_iter_nodes(root, with_root=False)), result[1:])
with patch.multiple(DummyXsdType, has_mixed_content=lambda x: True):
xsd_type = DummyXsdType()
typed_root = TypedElement(root, xsd_type, 'text1')
self.assertListEqual(list(etree_iter_nodes(typed_root)), result)
result = result[:4] + [AttributeNode('a', '10', root[0])] + result[4:]
self.assertListEqual(list(etree_iter_nodes(root, with_attributes=True)), result)
comment = ElementTree.Comment('foo')
root[1].append(comment)
self.assertListEqual(list(etree_iter_nodes(root, with_attributes=True)), result)
def test_elem_iter_strings_function(self):
root = ElementTree.XML('<A>text1\n<B1>text2</B1>tail1<B2/><B3><C1>text3</C1></B3>tail2</A>')
result = ['text1\n', 'text2', 'tail1', 'tail2', 'text3']
self.assertListEqual(list(etree_iter_strings(root)), result)
with patch.multiple(DummyXsdType, has_mixed_content=lambda x: True):
xsd_type = DummyXsdType()
typed_root = TypedElement(elem=root, xsd_type=xsd_type, value='text1')
self.assertListEqual(list(etree_iter_strings(typed_root)), result)
norm_result = ['text1', 'text2', 'tail1', 'tail2', 'text3']
with patch.multiple(DummyXsdType, is_element_only=lambda x: True):
xsd_type = DummyXsdType()
typed_root = TypedElement(elem=root, xsd_type=xsd_type, value='text1')
self.assertListEqual(list(etree_iter_strings(typed_root)), norm_result)
comment = ElementTree.Comment('foo')
root[1].append(comment)
self.assertListEqual(list(etree_iter_strings(typed_root)), norm_result)
self.assertListEqual(list(etree_iter_strings(root)), result)
def test_etree_deep_equal_function(self):
root = ElementTree.XML('<A><B1>10</B1><B2 max="20"/>end</A>')
self.assertTrue(etree_deep_equal(root, root))
elem = ElementTree.XML('<A><B1>11</B1><B2 max="20"/>end</A>')
self.assertFalse(etree_deep_equal(root, elem))
elem = ElementTree.XML('<A><B1>10</B1>30<B2 max="20"/>end</A>')
self.assertFalse(etree_deep_equal(root, elem))
elem = ElementTree.XML('<A xmlns:ns="tns"><B1>10</B1><B2 max="20"/>end</A>')
self.assertTrue(etree_deep_equal(root, elem))
elem = ElementTree.XML('<A><B1>10</B1><B2 max="20"><C1/></B2>end</A>')
self.assertFalse(etree_deep_equal(root, elem))
def test_match_element_node_function(self):
elem = ElementTree.Element('alpha')
empty_tag_elem = ElementTree.Element('')
self.assertTrue(match_element_node(elem))
self.assertTrue(match_element_node(elem, '*'))
self.assertFalse(match_element_node(empty_tag_elem, '*'))
with self.assertRaises(ValueError):
match_element_node(elem, '**')
with self.assertRaises(ValueError):
match_element_node(elem, '*:*:*')
with self.assertRaises(ValueError):
match_element_node(elem, 'foo:*')
self.assertFalse(match_element_node(empty_tag_elem, 'foo:*'))
self.assertFalse(match_element_node(elem, '{foo}*'))
with patch.multiple(DummyXsdType, has_mixed_content=lambda x: True):
xsd_type = DummyXsdType()
typed_elem = TypedElement(elem=elem, xsd_type=xsd_type, value='text1')
self.assertTrue(match_element_node(typed_elem, '*'))
def test_match_attribute_node_function(self):
attr = AttributeNode('a1', '10', parent=None)
self.assertTrue(match_attribute_node(attr, '*'))
self.assertTrue(match_attribute_node(TypedAttribute(attr, None, 10), 'a1'))
with self.assertRaises(ValueError):
match_attribute_node(attr, '**')
with self.assertRaises(ValueError):
match_attribute_node(attr, '*:*:*')
with self.assertRaises(ValueError):
match_attribute_node(attr, 'foo:*')
self.assertTrue(match_attribute_node(attr, '*:a1'))
self.assertFalse(match_attribute_node(attr, '{foo}*'))
self.assertTrue(match_attribute_node(AttributeNode('{foo}a1', '10'), '{foo}*'))
attr = AttributeNode('{http://xpath.test/ns}a1', '10', parent=None)
self.assertTrue(match_attribute_node(attr, '*:a1'))
def test_is_comment_node_function(self):
comment = ElementTree.Comment('nothing important')
self.assertTrue(is_comment_node(comment))
self.assertFalse(is_comment_node(self.elem))
def test_is_document_node_function(self):
document = ElementTree.parse(io.StringIO('<A/>'))
self.assertTrue(is_document_node(document))
self.assertFalse(is_document_node(self.elem))
def test_is_processing_instruction_node_function(self):
pi = ElementTree.ProcessingInstruction('action', 'nothing to do')
self.assertTrue(is_processing_instruction_node(pi))
self.assertFalse(is_processing_instruction_node(self.elem))
def test_node_attributes_function(self):
self.assertEqual(node_attributes(self.elem), self.elem.attrib)
self.assertIsNone(node_attributes('a text node'))
def test_node_base_uri_function(self):
xml_test = '<A xmlns:xml="http://www.w3.org/XML/1998/namespace" xml:base="/" />'
self.assertEqual(node_base_uri(ElementTree.XML(xml_test)), '/')
document = ElementTree.parse(io.StringIO(xml_test))
self.assertEqual(node_base_uri(document), '/')
self.assertIsNone(node_base_uri(self.elem))
self.assertIsNone(node_base_uri('a text node'))
def test_node_document_uri_function(self):
self.assertIsNone(node_document_uri(self.elem))
xml_test = '<A xmlns:xml="http://www.w3.org/XML/1998/namespace" xml:base="/root" />'
document = ElementTree.parse(io.StringIO(xml_test))
self.assertEqual(node_document_uri(document), '/root')
xml_test = '<A xmlns:xml="http://www.w3.org/XML/1998/namespace" ' \
'xml:base="http://xpath.test" />'
document = ElementTree.parse(io.StringIO(xml_test))
self.assertEqual(node_document_uri(document), 'http://xpath.test')
xml_test = '<A xmlns:xml="http://www.w3.org/XML/1998/namespace" xml:base="dir1/dir2" />'
document = ElementTree.parse(io.StringIO(xml_test))
self.assertIsNone(node_document_uri(document))
xml_test = '<A xmlns:xml="http://www.w3.org/XML/1998/namespace" ' \
'xml:base="http://[xpath.test" />'
document = ElementTree.parse(io.StringIO(xml_test))
self.assertIsNone(node_document_uri(document))
def test_attribute_nodes(self):
parent = ElementTree.Element('element')
attribute = AttributeNode('id', '0212349350')
self.assertEqual(repr(attribute),
"AttributeNode(name='id', value='0212349350')")
self.assertEqual(attribute, AttributeNode('id', '0212349350'))
self.assertEqual(attribute.as_item(), ('id', '0212349350'))
self.assertNotEqual(attribute.as_item(), AttributeNode('id', '0212349350'))
self.assertNotEqual(attribute, AttributeNode('id', '0212349350', parent))
attribute = AttributeNode('id', '0212349350', parent)
self.assertEqual(attribute, AttributeNode('id', '0212349350', parent))
self.assertEqual(attribute.as_item(), ('id', '0212349350'))
self.assertNotEqual(attribute, AttributeNode('id', '0212349350'))
self.assertNotEqual(attribute, AttributeNode('id', '0212349350',
parent=ElementTree.Element('element')))
attribute = AttributeNode('value', '10', parent)
self.assertEqual(repr(attribute)[:65],
"AttributeNode(name='value', value='10', parent=<Element 'element'")
with patch.multiple(DummyXsdType, is_simple=lambda x: True):
xsd_type = DummyXsdType()
typed_attribute = TypedAttribute(attribute, xsd_type, 10)
self.assertEqual(repr(typed_attribute), "TypedAttribute(name='value')")
self.assertEqual(typed_attribute.as_item(), ('value', 10))
self.assertEqual(typed_attribute, TypedAttribute(attribute, DummyXsdType(), 10))
self.assertEqual(typed_attribute, TypedAttribute(attribute, None, 10))
self.assertEqual(typed_attribute,
TypedAttribute(AttributeNode('value', '10', parent), xsd_type, 10))
self.assertNotEqual(typed_attribute, TypedAttribute(attribute, xsd_type, '10'))
self.assertNotEqual(typed_attribute,
TypedAttribute(AttributeNode('value', '10'), xsd_type, 10))
def test_typed_element_nodes(self):
element = ElementTree.Element('schema')
with patch.multiple(DummyXsdType, is_simple=lambda x: True):
xsd_type = DummyXsdType()
typed_element = TypedElement(element, xsd_type, None)
self.assertEqual(repr(typed_element), "TypedElement(tag='schema')")
def test_text_nodes(self):
parent = ElementTree.Element('element')
self.assertEqual(TextNode('alpha'), TextNode('alpha'))
self.assertEqual(TextNode('alpha', parent), TextNode('alpha', parent))
self.assertEqual(TextNode('alpha', parent, tail=True),
TextNode('alpha', parent, tail=True))
self.assertEqual(TextNode('alpha', tail=True), TextNode('alpha'))
self.assertNotEqual(TextNode('alpha', parent), TextNode('alpha'))
self.assertNotEqual(TextNode('alpha', parent, tail=True),
TextNode('alpha', parent))
self.assertNotEqual(TextNode('alpha', parent),
TextNode('alpha', parent=ElementTree.Element('element'))) # != id()
self.assertFalse(TextNode('alpha', parent).is_tail())
self.assertTrue(TextNode('alpha', parent, tail=True).is_tail())
self.assertFalse(TextNode('alpha', tail=True).is_tail())
self.assertEqual(repr(TextNode('alpha')), "TextNode('alpha')")
text = TextNode('alpha', parent)
self.assertTrue(repr(text).startswith("TextNode('alpha', parent=<Element "))
self.assertTrue(repr(text).endswith(", tail=False)"))
text = TextNode('alpha', parent, tail=True)
self.assertTrue(repr(text).endswith(", tail=True)"))
def test_namespace_nodes(self):
parent = ElementTree.Element('element')
namespace = NamespaceNode('tns', 'http://xpath.test/ns')
self.assertEqual(repr(namespace),
"NamespaceNode(prefix='tns', uri='http://xpath.test/ns')")
self.assertEqual(namespace.value, 'http://xpath.test/ns')
self.assertEqual(namespace, NamespaceNode('tns', 'http://xpath.test/ns'))
self.assertEqual(namespace.as_item(), ('tns', 'http://xpath.test/ns'))
self.assertNotEqual(namespace,
NamespaceNode('tns', 'http://xpath.test/ns', parent))
namespace = NamespaceNode('tns', 'http://xpath.test/ns', parent)
self.assertEqual(repr(namespace)[:81],
"NamespaceNode(prefix='tns', uri='http://xpath.test/ns', "
"parent=<Element 'element'")
self.assertEqual(namespace, NamespaceNode('tns', 'http://xpath.test/ns', parent))
self.assertEqual(namespace.as_item(), ('tns', 'http://xpath.test/ns'))
self.assertNotEqual(namespace, NamespaceNode('tns', 'http://xpath.test/ns'))
self.assertNotEqual(namespace, NamespaceNode('tns', 'http://xpath.test/ns',
parent=ElementTree.Element('element')))
def test_node_children_function(self):
self.assertListEqual(list(node_children(self.elem)), [])
elem = ElementTree.XML("<A><B1/><B2/></A>")
self.assertListEqual(list(node_children(elem)), [x for x in elem])
document = ElementTree.parse(io.StringIO("<A><B1/><B2/></A>"))
self.assertListEqual(list(node_children(document)), [document.getroot()])
self.assertIsNone(node_children('a text node'))
def test_node_nilled_function(self):
xml_test = '<A xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:nil="true" />'
self.assertTrue(node_nilled(ElementTree.XML(xml_test)))
xml_test = '<A xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:nil="false" />'
self.assertFalse(node_nilled(ElementTree.XML(xml_test)))
self.assertFalse(node_nilled(ElementTree.XML('<A />')))
self.assertFalse(node_nilled(TextNode('foo')))
def test_node_kind_function(self):
document = ElementTree.parse(io.StringIO(u'<A/>'))
element = ElementTree.Element('schema')
attribute = AttributeNode('id', '0212349350')
namespace = NamespaceNode('xs', 'http://www.w3.org/2001/XMLSchema')
comment = ElementTree.Comment('nothing important')
pi = ElementTree.ProcessingInstruction('action', 'nothing to do')
text = TextNode('betelgeuse')
self.assertEqual(node_kind(document), 'document-node')
self.assertEqual(node_kind(element), 'element')
self.assertEqual(node_kind(attribute), 'attribute')
self.assertEqual(node_kind(namespace), 'namespace')
self.assertEqual(node_kind(comment), 'comment')
self.assertEqual(node_kind(pi), 'processing-instruction')
self.assertEqual(node_kind(text), 'text')
self.assertIsNone(node_kind(()))
self.assertIsNone(node_kind(None))
self.assertIsNone(node_kind(10))
with patch.multiple(DummyXsdType, is_simple=lambda x: True):
xsd_type = DummyXsdType()
typed_attribute = TypedAttribute(attribute, xsd_type, '0212349350')
self.assertEqual(node_kind(typed_attribute), 'attribute')
typed_element = TypedElement(element, xsd_type, None)
self.assertEqual(node_kind(typed_element), 'element')
def test_node_name_function(self):
elem = ElementTree.Element('root')
attr = AttributeNode('a1', '20')
namespace = NamespaceNode('xs', 'http://www.w3.org/2001/XMLSchema')
self.assertEqual(node_name(elem), 'root')
self.assertEqual(node_name(attr), 'a1')
self.assertEqual(node_name(namespace), 'xs')
self.assertIsNone(node_name(()))
self.assertIsNone(node_name(None))
with patch.multiple(DummyXsdType, is_simple=lambda x: True):
xsd_type = DummyXsdType()
typed_elem = TypedElement(elem=elem, xsd_type=xsd_type, value=10)
self.assertEqual(node_name(typed_elem), 'root')
typed_attr = TypedAttribute(attribute=attr, xsd_type=xsd_type, value=20)
self.assertEqual(node_name(typed_attr), 'a1')
def test_etree_iter_paths(self):
root = ElementTree.XML('<a><b1><c1/><c2/></b1><b2/><b3><c3/></b3></a>')
root[2].append(ElementTree.Comment('a comment'))
root[2].append(ElementTree.Element('c3')) # duplicated tag
items = list(etree_iter_paths(root))
self.assertListEqual(items, [
(root, '.'), (root[0], './b1'), (root[0][0], './b1/c1'),
(root[0][1], './b1/c2'), (root[1], './b2'), (root[2], './b3'),
(root[2][0], './b3/c3[1]'), (root[2][2], './b3/c3[2]')
])
self.assertListEqual(list(etree_iter_paths(root, path='')), [
(root, ''), (root[0], 'b1'), (root[0][0], 'b1/c1'),
(root[0][1], 'b1/c2'), (root[1], 'b2'), (root[2], 'b3'),
(root[2][0], 'b3/c3[1]'), (root[2][2], 'b3/c3[2]')
])
self.assertListEqual(list(etree_iter_paths(root, path='/')), [
(root, '/'), (root[0], '/b1'), (root[0][0], '/b1/c1'),
(root[0][1], '/b1/c2'), (root[1], '/b2'), (root[2], '/b3'),
(root[2][0], '/b3/c3[1]'), (root[2][2], '/b3/c3[2]')
])
if __name__ == '__main__':
unittest.main()
|
# -*- coding: utf-8 -*-
import codecs
import os
import sys
from ltp_parse import LTP_parse
import time
import threading
#initial LTP_parse()
ltp_parser = LTP_parse()
names = locals()
class Sub_Pre:
def __init__(self, file_dir, sub_list, pre_list):
#load dict
sub_name = 'sub'
pre_name = 'pre'
names['self.%s_list'%sub_name] = sub_list
names['self.%s_list'%pre_name] = pre_list
self.pre_list = pre_list
self.sub_word_id_dict = self.mu_create_word_id_dict(file_dir, sub_list)
self.sub_id_word_dict = self.mu_create_id_word_dict(file_dir, sub_list)
self.pre_word_id_dict = self.mu_create_word_id_dict(file_dir, pre_list)
self.pre_id_word_dict = self.mu_create_id_word_dict(file_dir, pre_list)
for sub in sub_list:
sub_path = os.path.join(file_dir, sub)
sub_name = sub
names['self.%s_word_id_dict' %sub] = self.create_word_id_dict(sub_path)
names['self.%s_raw_word_id_dict' %sub] = self.create_raw_word_id_dict(sub_path)
names['self.%s_id_word_dict' %sub] = self.create_id_word_dict(sub_path)
for pre in pre_list:
pre_path = os.path.join(file_dir, pre)
pre_name = pre
names['self.%s_word_id_dict' %pre] = self.create_word_id_dict(pre_path)
names['self.%s_raw_word_id_dict'%pre] = self.create_raw_word_id_dict(pre_path)
names['self.%s_id_word_dict' %pre] = self.create_id_word_dict(pre_path)
#res_file save result data
self.res_file_name = sub.split('_')[0] + '_' + pre.split('_')[0]
#sub_pre dict
sub_pre_name = 'sub_pre'
pre_sub_name = 'pre_sub'
names['self.%s_dict'%sub_pre_name] = {}
names['self.%s_dict'%pre_sub_name] = {}
self.pre_sub_dict = {}
self.pre_dict = {}
self.sub_dict = {}
def add_dicts(self,key1,key2,dicts):
if key1 in dicts.keys() and key2 in dicts[key1].keys():
dicts[key1][key2] += 1
if key1 in dicts.keys() and key2 not in dicts[key1].keys():
dicts[key1].update({key2:1})
if key1 not in dicts.keys():
dicts.update({key1:{key2:1}})
return dicts
def parse_dict(self, dicts, dict_name):
dict_name_list = dict_name.split('_')
for word1 in names['self.%s_list'%dict_name_list[0]]:
for word2 in names['self.%s_list'%dict_name_list[1]]:
names['%s_%s_dict' %(word1,word2)] = {}
for key1,key2_value in names['self.%s_dict' %dict_name].items():
#sorted larger to small
#key1 is string ,key2_value is dict
temp_key1 = ''
temp_word1 = ''
for word1 in names['self.%s_list' %dict_name_list[0]]:
if key1 in names['self.%s_word_id_dict' %word1].keys():
temp_word1 = word1
temp_key1 = key1
for word2 in names['self.%s_list' %dict_name_list[1]]:
for key2 ,val in key2_value.items():
if key2 in names['self.%s_word_id_dict' %word2].keys():
#print('sub:%s pre:%s val:%d' %(temp_key1,key2,val))
if temp_key1 in names['%s_%s_dict' %(temp_word1,word2)].keys():
names['%s_%s_dict' %(temp_word1,word2)][temp_key1].update({key2:val})
else:
names['%s_%s_dict' %(temp_word1,word2)].update({temp_key1:{key2:val}})
for word1 in names['self.%s_list' %dict_name_list[0]]:
for word2 in names['self.%s_list' %dict_name_list[1]]:
print('word1_word2:%s\t%s' %(word1,word2))
word1_name = word1.split('_')[0]
word2_name = word2.split('_')[0]
f_name = word1_name + '_'+ word2_name
save_f = open('res_sub_pre/%s' %f_name,'w')
for key1,key2_value in names['%s_%s_dict' %(word1,word2)].items():
#print('key1:%s key2_val:%s' %(key1,key2_value))
sorted_word2 = sorted(key2_value.items(),key = lambda
item:item[1],reverse = True)
num = 0
dict_num = 0
if dict_name == 'sub_pre':
if key1 in self.sub_dict.keys():
dict_num = self.sub_dict[key1]
else:
if key1 in self.pre_dict.keys():
dict_num = self.pre_dict[key1]
print("%s\t%d\t" %(key1,dict_num),end = '')
save_f.write("%s\t%d\t" %(key1,dict_num))
sys.stdout.flush()
for line in sorted_word2:
num_pre = 0
#if line[0] in names['self.%s_dict.keys()' %dict_name_list[1]]:
print('%s:%d' %(line[0],line[1]),end = '\t')
save_f.write('%s:%d\t'%(line[0], line[1]))
sys.stdout.flush()
num += 1
print('\n',end = '')
save_f.write('\n')
print("~EOF:")
save_f.close()
'''
'''
def identify_words_depend(self,words1_dict,words2_dict,arcs):
words_list = {}
for word1,index1 in words1_dict.items():
depend = arcs[index1].relation
head_index = arcs[index1].head - 1
for word2,index2 in words2_dict.items():
if ("SBV" == depend or "ATT" == depend) and head_index in index2:
words_set = [word1,word2]
words_list.append(words_set)
return words_list
#return dict
'''
words1_dict = {'word1':index1,'word2':index2,...,'wordn':indexn};
words2_dict = {'word1':index1,'word2':index2,...,'wordn':indexn}.
'''
def find_words_in_dicts(self, words):
words1_dict = {}
words2_dict = {}
for i in range(len(words)):
#find sub words
if words[i] in self.sub_word_id_dict.keys():
id_val = self.sub_word_id_dict[words[i]]
words1_dict[words[i]] = i
#find pre words
if words[i] in self.pre_word_id_dict.keys():
words2_dict[words[i]] = i
return words1_dict, words2_dict
def main_function(self, des_dir):
file_num = len(os.listdir(des_dir))
count = 0
#read all file
for line in os.listdir(des_dir):
print('start file %d all file num:%d' %(count,file_num))
des_path = os.path.join(des_dir,line)
all_des = codecs.open(des_path, 'r' ,'utf-8')
for des in all_des:
description = des.split('\t')[0]
words, arcs = ltp_parser.des_parse(description)
words1_dict, words2_dict = self.find_words_in_dicts(words)
if len(words1_dict) != 0 and len(words2_dict) != 0:
print('---------------------------------------------')
print(des)
print('\t'.join(str(i + 1) for i in range(0,
len(list(words)))))
print('\t'.join(word for word in list(words)))
print('\t'.join("%d:%s" % (arc.head, arc.relation)
for arc in arcs))
print("find words1 words2 are: %s: %s"
%(words1_dict, words2_dict))
res = self.identify_words_depend(words1_dict,words2_dict,
arcs)
sub_pre_name = 'sub_pre'
pre_sub_name = 'pre_sub'
self.parse_dict(names['self.%s_dict'%sub_pre_name],'sub_pre')
self.parse_dict(names['self.%s_dict'%pre_sub_name],'pre_sub')
def create_word_id_dict(self, inFile):
res_dict = {} #key is word,val is id
temp_file = codecs.open(inFile, 'r', 'utf-8').readlines()
for i in range(len(temp_file)):
words = temp_file[i].split()
for ele in words:
res_dict[ele] = i
return res_dict
def create_raw_word_id_dict(self, inFile):
res_dict = {} #key is word,val is id
temp_file = codecs.open(inFile, 'r', 'utf-8').readlines()
for i in range(len(temp_file)):
words = temp_file[i].strip()
res_dict[words] = i
return res_dict
def mu_create_word_id_dict(self,file_dir,file_list):
res_dict = {}
dict_val = 0
for file_name in file_list:
file_path = os.path.join(file_dir,file_name)
temp_file = codecs.open(file_path, 'r', 'utf-8').readlines()
for line in temp_file:
words = line.split()
for ele in words:
res_dict[ele] = dict_val
dict_val += 1
return res_dict
def create_id_word_dict(self, inFile):
res_dict = {} #key is id, val is the word,val is a list
temp_file = codecs.open(inFile, 'r', 'utf-8').readlines()
i = 0
for ele in temp_file:
temp = ele.strip() #
res_dict[i] = temp
i += 1
return res_dict
def mu_create_id_word_dict(self, file_dir, file_list):
res_dict = {} #key is id, val is the word,val is a list
dict_key = 0
for file_name in file_list:
file_path = os.path.join(file_dir, file_name)
temp_file = codecs.open(file_path, 'r', 'utf-8').readlines()
for ele in temp_file:
temp = ele.strip() #
res_dict[dict_key] = temp
dict_key += 1
return res_dict
def delete_match(self):
del self.sub_pre_dict
if __name__ == '__main__':
dict_file_dir = '/data/user/licheng/project/app/hospital_guide_robot/backends/nlp_licheng/dict.modify'
#ori_file_dir = '/data/user/licheng/project/app/hospital_guide_robot/backends/nlp_licheng/ori'
ori_file_dir = '/data/nlp/corpus.predict_disease/1.ori'
sub_list = ['organ_nlp','tissue_nlp','indicator_nlp','function_nlp','nutrition_nlp']
pre_list = ['problem_nlp','appearance_nlp']
sp = Sub_Pre(dict_file_dir, sub_list, pre_list)
sp.main_function(ori_file_dir)
|
n, m = map(int, input().split())
mp = []
for i in range(n):
mp.append(list(map(int, input().split())))
d = [[[0 for _ in range(3)] for __ in range(m)] for ___ in range(n + 1)]
ans = 987654321
def minn(a, b):
if a< b:
return a
return b
for i in range(m):
for j in range(3):
d[0][i][j] = mp[0][i]
for i in range(1, n):
for j in range(m):
if j == 0:
d[i][j][1] = d[i - 1][j + 1][0] + mp[i][j];
d[i][j][2] = minn(d[i-1][j][1], d[i-1][j+1][0]) + mp[i][j]
elif j == m - 1:
d[i][j][0] = minn(d[i-1][j][1], d[i-1][j-1][2]) + mp[i][j]
d[i][j][1] = d[i-1][j-1][2] + mp[i][j]
else:
d[i][j][0] = minn(d[i-1][j-1][2], d[i-1][j][1]) + mp[i][j]
d[i][j][1] = minn(d[i-1][j-1][2], d[i-1][j+1][0]) + mp[i][j]
d[i][j][2] = minn(d[i-1][j][1], d[i-1][j+1][0]) + mp[i][j]
for i in range(m):
for j in range(3):
if d[n-1][i][j] < ans and d[n-1][i][j] != 0:
ans = d[n-1][i][j]
print(ans)
|
# This one was largely unsuccesful
# I'm guessing the problem lies in me trying to use just positive or negative words to train an ai to classify sentences
# Refer to sentiment1.py for working version
# Train NaiveBayes classifier from text files
import nltk
import os
import sys
# from senticnet4 import senticnet
# print(senticnet['a_little'][7])
def main():
# Read data from files
if len(sys.argv) != 2:
sys.exit("Usage: python sentiment.py corpus")
positives, negatives = load_data(sys.argv[1])
# print(positives)
# Create a set of all words
words = set()
# for document in positives:
# words.update(document)
# for document in negatives:
# words.update(document)
words.update(positives[0].keys())
words.update(negatives[0].keys())
# print(words)
# Extract features from text
training = []
training.extend(generate_features(positives[0], words, "Positive"))
training.extend(generate_features(negatives[0], words, "Negative"))
# print(training)
# Classify a new sample
classifier = nltk.NaiveBayesClassifier.train(training)
s = input("sentence: ")
result = (classify(classifier, s, words, positives[0], negatives[0]))
for key in result.samples():
print(f"{key}: {result.prob(key):.4f}")
classifier.show_most_informative_features(5)
def extract_words(document):
return set(
word.lower() for word in nltk.word_tokenize(document)
if any(c.isalpha() for c in word)
)
def load_data(directory):
result = []
for filename in ["positives.txt", "negatives.txt"]:
with open(os.path.join(directory, filename)) as f:
# result.append([
# extract_words(line)
# for line in f.read().splitlines()
# ])
pol = []
scores = {}
for line in f.read().splitlines():
word, intensity = line.rstrip('\n').split(',')
scores[word] = float(intensity)
pol.append(scores)
result.append(pol)
# result.append([
# {word} for word in f.read().splitlines()
# ])
# print(result[0][:10])
return result
def generate_features(documents, words, label):
# make set of highest intensity words and add a feature that checks words against these intense words and assigns a boolean
features = []
# for document in documents:
# print(document)
# features.append(({
# word : documents[word]
# for word in words
# }, label))
for word in documents:
features.append(({'score': documents[word]}, label))
print(features[:20])
return features
def classify(classifier, document, words, positives, negatives):
document_words = extract_words(document)
score = 0
for word in document_words:
if word in positives:
score += positives.get(word, 0)
elif word in negatives:
score += negatives.get(word, 0)
score = score / len(document_words)
print(score)
# features = {
# 'score': (word in document_words)
# for word in words
# }
features = {
'score': score
}
return classifier.prob_classify(features)
if __name__ == "__main__":
main()
# split sentence into n grams using nltk.ngrams
# Get trending topics from Twitter api
# Using twint search to get tweets
# Use nltk NaiveBayes classifier to classify each tweet
# Return the overall sentiment of the trend |
# make svr model
import pandas as pd
import numpy as np
from sklearn.model_selection import KFold
import itertools
import ast
import sys
import libsvm.svmutil as svmutil
import os
import concurrent.futures as cc
import functools
import math
from tqdm import tqdm
# Wrapper function, accept param dictionary
def init_train_matrix(param):
# make output directory if not exist
if not os.path.exists(param["outdir"]):
os.makedirs(param["outdir"])
data = pd.read_csv(param["pbmdata"], sep="\t", index_col="ID_REF")
df = pd.DataFrame(data[[param["column_id"],param["column_train"],"Sequence"]])
if param["normalize"]:
maxval, minval = df[param["column_train"]].max(), df[param["column_train"]].min()
df[param["column_train"]] = df[param["column_train"]].apply(lambda x : (x - minval) / (maxval-minval))
bound_idxs = df[param["column_id"]].str.contains("Bound")
# just take the bound column, ignore the negative control
df = df[bound_idxs].reset_index()[[param["column_train"],"Sequence"]]
cores_centered = gen_seqwcore(df.values.tolist(), param["width"], param["corelist"], corepos=param["corepos"])
# do the logistic transformation
if param['logit']:
cores_centered = {k: [(logit_score(val),seq) for (val, seq) in cores_centered[k]] for k in cores_centered}
return cores_centered
def write_result(core_params, cores_centered, user_param):
model_fname = '%s_w%s' % (user_param['tfname'], user_param['width'])
outdir = "%s/"%user_param['outdir'] if user_param['outdir'] else ""
# if suffix:
# model_fname = "%s_%s" % (model_fname, suffix)
param_log = ""
pmlist = []
for core in core_params:
# Save the best model
best_dict = max(core_params[core], key = lambda p:p["avg_scc"])
# get predicted vs measured
pm = predict_kfold(best_dict['params'], rows=cores_centered[core], numfold=user_param['numfold'], kmers=user_param['kmers'])
pm = pd.DataFrame(pm)
pm["core"] = core
pmlist.append(pm)
model = generate_svm_model(cores_centered[core], best_dict["params"], user_param['kmers'])
svmutil.svm_save_model('%s%s_%s.model' % (outdir,model_fname,core), model)
param_log += "%s: %s\n" % (core,str(best_dict))
pm_df = pd.concat(pmlist)
rsq_all = pm_df["measured"].corr(pm_df["predicted"])**2
param_log += "Rยฒ: %s\n" % rsq_all
with open("%s%s.log" % (outdir,model_fname), 'w') as f:
f.write(param_log)
# -----------------
def sparse_to_dense(sparse_matrix, totalfeat):
dense = [float(sparse_matrix[i+1]) if i + 1 in sparse_matrix else 0.0 for i in range(totalfeat)]
return dense
def count_dense_feat(lenseq, kmers):
return sum([(lenseq+1-k) * pow(4,k) for k in kmers])
def libsvm_generate_matrix(seqlist, kmers, dense=False):
"""Generates the sparse matrix file from a list of sequences and their scores"""
kmer_dict = {}
for k in kmers:
lst = ["".join(n) for n in itertools.product('ACGT', repeat=k)]
kmer_dict[k] = {k: v + 1 for v, k in enumerate(lst)}
scores = []
features = []
total_dense_feat = count_dense_feat(len(seqlist[0][1]), [1,2,3])
for line in seqlist:
score, seq = line
scores.append(score)
feat_pos = 0
feat_dict = {}
for k in kmers:
hop = len(kmer_dict[k])
for i in range(len(seq) - k + 1):
kmer_seq = seq[i:i+k]
feat_idx = feat_pos + kmer_dict[k][kmer_seq]
feat_dict[feat_idx] = 1
feat_pos += hop
if dense:
features.append(sparse_to_dense(feat_dict,total_dense_feat))
else:
features.append(feat_dict)
return np.array(scores), np.array(features) # y, x
def gen_seqwcore(seqintensities, width, corelist, corepos="center"):
"""
Generate core centered sequence and take mediann intensity if there are multiple matches
:param seqintensities: list of list with [intensity, sequence]
:param width:
:param corelist:
:param corepos: left, right, center
:return: list of (score,sequence) with the sequence is of length 'width' with the core center
"""
corelen = len(corelist[0])
if not all(len(core) == corelen for core in corelist):
raise ValueError('not all cores have same length!')
core_dict = {}
seqlen = len(seqintensities[0][1])
if corepos == "left":
s1 = int(math.ceil(0.5 * seqlen) - 0.5 * corelen)
c1 = s1
elif corepos == "right":
s1 = int(math.ceil(0.5 * seqlen) - width + 0.5 * corelen)
c1 = s1 + width - corelen
else: #center
s1 = int(math.ceil(0.5 * seqlen) - 0.5 * width)
c1 = int(math.ceil(0.5 * seqlen - 0.5 * corelen))
spos = (s1, s1 + width)
cpos = (c1, c1 + corelen)
# process each core separately and make sure that the list is unique
for core in set(corelist):
seq_wcore = [(score, seq[spos[0]:spos[1]]) for score, seq in seqintensities if seq[cpos[0]:cpos[1]] == core]
seq_core_df = pd.DataFrame(seq_wcore, columns = ['score', 'seq'])
agg = seq_core_df.groupby(['seq'], as_index=False).median()
core_dict[core] = agg[["score","seq"]].values.tolist()
return core_dict
def benchmark_kfold(param_dict, rows, numfold, kmers=[1,2,3]):
"""
run_kfold wrapper for benchmarking per fold
"""
scc_list = [] # r squared
mse_list = []
kf = run_kfold(param_dict, rows, numfold, kmers=[1,2,3])
for i in range(len(kf)):
p_label, p_acc, p_val = kf[i]["svmpred"]
acc, mse, scc = p_acc
scc_list.append(scc)
mse_list.append(mse)
avg_scc = sum(scc_list)/numfold
avg_mse = sum(mse_list)/numfold
return {"params":param_dict, "avg_scc": avg_scc, "avg_mse":avg_mse}
def predict_kfold(param_dict, rows, numfold, kmers=[1,2,3]):
"""
run_kfold wrapper for predictions per fold
"""
dflist = []
kf = run_kfold(param_dict, rows, numfold, kmers=[1,2,3])
for i in range(len(kf)):
predicted = kf[i]["svmpred"][0]
seqs = [x[1] for x in kf[i]["test"]]
measured = [x[0] for x in kf[i]["test"]]
ldict = [{"sequence":seqs[i],"measured":measured[i],"predicted":predicted[i], "fold":i+1} for i in range(len(seqs))]
dflist.extend(ldict)
return dflist
def run_kfold(param_dict, rows, numfold, kmers=[1,2,3]):
"""
Run k KFold
Args:
param_dict: dictionary mapping param string to its value
rows: input rows
numfold: k for cross validation
kmers: list of kmers, default [1,2,3]
Return:
dictionary of model performance (SCC, MSE) if benchmark is True, else
return predictions for each fold
"""
kf = KFold(numfold, shuffle=True)
splitted = kf.split(rows)
param_str = "-s 3 -b 1 -q " # epsilon-SVR, prob estimate true, quiet mode
param_str += " ".join(["-{} {}".format(k,v) for k,v in param_dict.items()])
params = svmutil.svm_parameter(param_str)
foldidx = 1
fold_results = []
for train_idx, test_idx in splitted:
train_list = [rows[i] for i in train_idx]
test_list = [rows[i] for i in test_idx]
y_train, x_train = libsvm_generate_matrix(train_list, kmers)
y_test, x_test = libsvm_generate_matrix(test_list, kmers)
train_prob = svmutil.svm_problem(y_train, x_train)
model = svmutil.svm_train(train_prob, params)
#svmutil.svm_save_model('model_name.model', m)
# y is only needed when we need the model performance
svmpred = svmutil.svm_predict(y_test, x_test, model, options="-q")
fold_results.append({"test":test_list, "svmpred":svmpred})
return fold_results
def test_param_comb(traindata, param_dict, numfold=10, kmers=[1,2,3]):
param_log = ""
run_params = {}
for core in traindata:
#print("Working for core: %s" % core)
run_params[core] = benchmark_kfold(param_dict, rows=traindata[core], numfold=numfold, kmers=kmers)
return run_params
def generate_svm_model(rows, param, kmers=[1,2,3]):
y, x = libsvm_generate_matrix(rows, kmers, dense=True)
prob = svmutil.svm_problem(y, x)
# s = 3 -- epsilon-SVR
param_str = "-s 3 -b 1 -q " # epsilon-SVR, prob estimate true, quiet mode
param_str += " ".join(["-{} {}".format(k,v) for k,v in param.items()])
params = svmutil.svm_parameter(param_str)
model = svmutil.svm_train(prob, params)
return model
def logit_score(p):
# f(x) = 1 / ( 1 + exp(-x) ) to obtain only values between 0 and 1.
p_use = 0.999 if p == 1 else p # to avoid division by zero
return np.log(p_use/(1.0-p_use))
def genmodel_gridsearch(cores_centered, user_param, numworkers):
# get all user params needed. We use this instead of direct input functions
# to make integration with the SLURM version easier
param_dict = user_param["grid"]
numfold = user_param["numfold"]
kmers = user_param["kmers"]
# Make list of params for grid search
params = list(param_dict.keys())
combinations = itertools.product(*param_dict.values())
combinations = [{params[i]:c[i] for i in range(len(c))} for c in combinations]
# ----- RUNNING CROSS VALIDATION -----
core_params = {}
for core in cores_centered:
print("Working for core: %s" % core)
benchmark_kfold_partial = functools.partial(benchmark_kfold, rows=cores_centered[core], numfold=numfold, kmers=kmers)
with cc.ProcessPoolExecutor(max_workers = numworkers) as executor:
# TODO: update input to combinations to dictionary
run_params = list(tqdm(executor.map(benchmark_kfold_partial, combinations), total=len(combinations)))
core_params[core] = run_params
write_result(core_params, cores_centered, user_param)
def get_weight(libsvm_model, width, kmers=[1,2,3]):
lendense = count_dense_feat(width, kmers)
dense_vectors = np.array([sparse_to_dense(v, lendense) for v in mdl.get_SV()])
coefs = np.transpose(np.array(mdl.get_sv_coef()))
w = np.dot(coefs, dense_vectors)
return w[0]
def get_feature_imp(weights, width):
nucleotide = ["A", "C", "G", "T"]
comb_dict = {}
for k in kmers:
comb_dict[k] = ["".join(n) for n in itertools.product('ACGT', repeat=k)]
features = []
for k in comb_dict:
for i in range(1, width - k + 2):
for comb in comb_dict[k]:
features.append([comb, i])
df = pd.DataFrame(features, columns = ["feature", "position"])
df['weight'] = weights
sorted = df.iloc[(-df['weight'].abs()).argsort()]
return sorted
def explain_imp(impdf):
df = pd.DataFrame(impdf)[["position", "weight"]]
df["weight"] = abs(df["weight"])
bypos = df.groupby("position")[["weight"]].sum().sort_values("weight", ascending=False)
print(bypos)
|
# noinspection SpellCheckingInspection
PickerRightArrowIconKey = "00B2D882:00000000:38D63245CC037C1F" # type: str
# noinspection SpellCheckingInspection
PickerGearIconKey = "00B2D882:00000000:188D7F9F936DEAA6" # type: str
# noinspection SpellCheckingInspection
PickerLockIconKey = "00B2D882:00000000:144FA1139F00C553" # type: str
# noinspection SpellCheckingInspection
PickerResetIconKey = "00B2D882:00000000:4B1A7F7632C7823F" # type: str
|
from model import firebase_token as token
import web
import pyrebase
render=web.template.render('./views/')
class Insert:
def GET(self):
#Redireccionar al archivo insert.html
try:
return render.insert()
except Exception as e:
result=[]
result.append('error'+ str(e.args))
return result
def POST(self):
try:
#creacion de objeto tipo firebase inicializado con la configuracion del archivo
firebase = pyrebase.initialize_app(token.firebaseConfig)
#Concexion a la base de datos
db = firebase.database()
#obtener datos del formulario de insert.html
data=web.input()
nombre = str(data.nombre)
email = str(data.email)
print(email)
print(nombre)
#obtener id
agenda = db.child("agenda").get()
r=0
for agenda in agenda.each():
r=int(agenda.key())
r=r+1
print(r)
send_data={"nombre": nombre, "email": email}
print(send_data)
#Enviar datos con id
db.child("agenda").child(r).set(send_data)
return web.seeother('/list')
#Condicion para error
except Exception as error:
print("Error :{}".format(error.args[1])) |
from intuitlib.client import AuthClient
from quickbooks import QuickBooks
from quickbooks.objects.customer import Customer
from quickbooks.objects.account import Account
import json
CLIENT_ID = "ABGA9WMqhlrmpP39UxG5Q3H2227bLiXsIWVTeoq0UnQVW2B8fw"
CLIENT_SECRET = "risgBjg1RqjqK8AVDHnYmbwnY46vG0K45v6kG3FH"
# REDIRECT_URI = "https://developer.intuit.com/v2/OAuth2Playground/RedirectUrl"
REDIRECT_URI = "https://sandbox-quickbooks.api.intuit.com/v3"
REFRESH_TOKEN = "AB11584091105kCAQoexKsxQzQ6XGgrhDRMUl2TX5cPoowTTXa"
auth_client = AuthClient(
client_id= CLIENT_ID,
client_secret=CLIENT_SECRET,
environment= 'sandbox',
redirect_uri= REDIRECT_URI,
)
client = QuickBooks(
auth_client=auth_client,
refresh_token= REFRESH_TOKEN ,
company_id='4620816365025351930'
)
account = Account.count(qb=client)
# print(account)
# res = json.loads(account)
# for r in account:
# print(r)
print(account)
# account = Account.get(1, qb=client)
# account = Account.all(1, qb=client)
# json_data = account.to_json()
# print(json_data)
# customers = Customer.query("SELECT * FROM Customer WHERE Active = True", qb=client)
# print(customers)
# print(client.company_id)
|
from typing import Mapping, Optional, Type
import numpy as np
import pytest
from starfish import Log
from starfish.image import Filter
from starfish.types import ArrayLike, Axes, Coordinates, Number
from ..label_image import AttrKeys, CURRENT_VERSION, DOCTYPE_STRING, LabelImage
@pytest.mark.parametrize(
"array, physical_ticks, log, expected_error",
[
# 3D label image
[
np.zeros((1, 1, 1), dtype=np.int32),
{
Coordinates.X: [0],
Coordinates.Y: [0],
Coordinates.Z: [0],
},
None,
None,
],
# 2D label image
[
np.zeros((1, 2), dtype=np.int32),
{
Coordinates.X: [0, 1],
Coordinates.Y: [0],
},
None,
None,
],
# wrong dtype
[
np.zeros((1, 2), dtype=np.float32),
{
Coordinates.X: [0, 1],
Coordinates.Y: [0],
},
None,
TypeError,
],
# missing some coordinates
[
np.zeros((1, 2), dtype=np.float32),
{
Coordinates.X: [0, 1],
},
None,
KeyError,
],
]
)
def test_from_array_and_coords(
array: np.ndarray,
physical_ticks: Mapping[Coordinates, ArrayLike[Number]],
log: Optional[Log],
expected_error: Optional[Type[Exception]],
):
"""Test that we can construct a LabelImage and that some common error conditions are caught."""
if expected_error is not None:
with pytest.raises(expected_error):
LabelImage.from_label_array_and_ticks(array, None, physical_ticks, log)
else:
label_image = LabelImage.from_label_array_and_ticks(array, None, physical_ticks, log)
assert isinstance(label_image.log, Log)
assert label_image.xarray.attrs.get(AttrKeys.DOCTYPE, None) == DOCTYPE_STRING
assert label_image.xarray.attrs.get(AttrKeys.VERSION, None) == str(CURRENT_VERSION)
def test_pixel_coordinates():
"""Test that the code creates missing pixel coordinate values."""
array = np.zeros((2, 3, 4), dtype=np.int32)
pixel_coordinates = {
Axes.X: [2, 3, 4, 5],
Axes.ZPLANE: [0, 1],
}
physical_coordinates = {
Coordinates.X: [0, 0.5, 1.0, 1.5],
Coordinates.Y: [0, 0.2, 0.4],
Coordinates.Z: [0, 0.1],
}
label_image = LabelImage.from_label_array_and_ticks(
array, pixel_coordinates, physical_coordinates, None)
assert np.array_equal(label_image.xarray.coords[Axes.X.value], [2, 3, 4, 5])
# not provided, should be 0..N-1
assert np.array_equal(label_image.xarray.coords[Axes.Y.value], [0, 1, 2])
assert np.array_equal(label_image.xarray.coords[Axes.ZPLANE.value], [0, 1])
def test_coordinates_key_type():
"""Test that the code correctly handles situations where the coordinate keys are provided as
strings instead of the enumerated types."""
array = np.zeros((2, 3, 4), dtype=np.int32)
pixel_coordinates = {
Axes.X.value: [2, 3, 4, 5],
Axes.Y.value: [0, 1, 2],
Axes.ZPLANE.value: [0, 1],
}
physical_coordinates = {
Coordinates.X.value: [0, 0.5, 1.0, 1.5],
Coordinates.Y.value: [0, 0.2, 0.4],
Coordinates.Z.value: [0, 0.1],
}
label_image = LabelImage.from_label_array_and_ticks(
array, pixel_coordinates, physical_coordinates, None)
for axis_str, axis_data in pixel_coordinates.items():
assert np.array_equal(label_image.xarray.coords[axis_str], axis_data)
for coord_str, coord_data in physical_coordinates.items():
assert np.array_equal(label_image.xarray.coords[coord_str], coord_data)
def test_save_and_load(tmp_path):
"""Verify that we can save the label image and load it correctly."""
array = np.zeros((2, 3, 4), dtype=np.int32)
pixel_coordinates = {
Axes.X: [2, 3, 4, 5],
Axes.ZPLANE: [0, 1],
}
physical_coordinates = {
Coordinates.X: [0, 0.5, 1.0, 1.5],
Coordinates.Y: [0, 0.2, 0.4],
Coordinates.Z: [0, 0.1],
}
log = Log()
# instantiate a filter (even though that makes no sense in this context)
filt = Filter.Reduce((Axes.ROUND,), func="max")
log.update_log(filt)
label_image = LabelImage.from_label_array_and_ticks(
array, pixel_coordinates, physical_coordinates, log)
label_image.to_netcdf(tmp_path / "label_image.netcdf")
loaded_label_image = LabelImage.open_netcdf(tmp_path / "label_image.netcdf")
assert label_image.xarray.equals(loaded_label_image.xarray)
assert label_image.xarray.attrs == loaded_label_image.xarray.attrs
|
"""Eine Sammlung von Verschlรผsselungsfunktionen"""
def text_verschluesseln(text):
"""Verschlรผsselt den รผbergebenen Text und gibt ihn zurรผck"""
verschluesselung = text[::-1]
verschluesselung = verschluesselung.replace("e", "#")
verschluesselung = verschluesselung.replace("a", "?")
return verschluesselung
def text_entschluesseln(text):
"""Entschlรผsselt den รผbergebenen Text und gibt ihn zurรผck"""
entschluesselung = text[::-1]
entschluesselung = entschluesselung.replace("#", "e")
entschluesselung = entschluesselung.replace("?", "a")
return entschluesselung
# Beschreibung des verwendeten Schlรผssels
schluesselbeschreibung = "# entspricht e, ? entspricht a. Text rรผckwรคrts lesen!" |
from datetime import date
ano_nascimento = int(input('ANO DE NASCIMENTO?'))
idade = date.today().year-ano_nascimento
if idade <= 9:
print('Vocรช vai competir em: INFANTIS ')
elif 9 < idade <= 14:
print('Vocรช vai competir em: INICIADOS')
elif 14 < idade <= 19:
print('Vocรช vai competir em: JUNIORES')
elif 19 < idade <= 20:
print('Vocรช vai competir em: SENIORES')
else:
print('Vocรช vai competir em: MASTERS')
|
# -*- coding: utf-8 -*-
# TODO: Rewrite combinatorically a + b - (a \and b)
def run_problem(n=1000, multiple_set={3, 5}):
total = 0
for num in range(n):
if any_divides(num=num, multiple_set=multiple_set):
total += num
return total
def any_divides(num, multiple_set):
for x in multiple_set:
if num % x == 0:
return True
return False
if __name__ == '__main__':
answer = run_problem(n=10, multiple_set={3, 5})
if answer == 23:
print('Correct!')
else:
print('Incorrect!')
|
from django.db.models.signals import post_save
from django.conf import settings
from django.db import models
from django.db.models import Sum
from django.shortcuts import reverse
from django_countries.fields import CountryField
CATEGORY_CHOICES =(
('S','Shirt'),
('SW','Sport Wear'),
('OW','Outwear')
)
LABEL_CHOICES =(
('P','primary'),
('S','secondary'),
('D','danger')
)
ADDRESS_CHOICES =(
('B', 'Billing'),
('S', 'Shipping'),
)
class UserProfile(models.Model):
user = models.OneToOneField(
settings.AUTH_USER_MODEL, on_delete=models.CASCADE)
stripe_customer_id = models.CharField(max_length=50, blank=True, null=True)
one_click_purchasing = models.BooleanField(default=False)
def __str__(self):
return self.user.username
class Item (models.Model):
title = models.CharField(max_length=100)
price = models.FloatField()
discount_price = models.FloatField(blank=True,null=True)
category = models.CharField(choices=CATEGORY_CHOICES, max_length=2)
label = models.CharField(choices=LABEL_CHOICES, max_length=1)
slug = models.SlugField()
description = models.TextField()
image = models.ImageField(upload_to = 'static_in_env')
def __str__(self):
return self.title
def get_absolute_url(self):
return reverse("core:product",kwargs={
'slug':self.slug
})
def get_add_to_cart_url(self):
return reverse("core:add-to-cart",kwargs={
'slug':self.slug
})
def get_remove_from_cart_url(self):
return reverse("core:remove-from-cart",kwargs={
'slug':self.slug
})
class OrderItem(models.Model):
user=models.ForeignKey(settings.AUTH_USER_MODEL,on_delete=models.CASCADE)
ordered = models.BooleanField(default=False)
item = models.ForeignKey(Item,on_delete=models.CASCADE)
quantity = models.IntegerField(default=1)
def __str__(self):
return f"{self.quantity} of {self.item.title}"
def get_total_item_price(self):
return self.quantity * self.item.price
def get_total_discount_item_price(self):
return self.quantity * self.item.discount_price
def get_amount_saved (self):
return self.get_total_item_price() - self.get_total_discount_item_price()
def get_final_price(self):
if self.item.discount_price:
return self.get_total_discount_item_price()
else:
return self.get_total_item_price()
class Order(models.Model):
user=models.ForeignKey(settings.AUTH_USER_MODEL,on_delete=models.CASCADE)
ref_code = models.CharField(max_length =30,blank=True,null=True )
items=models.ManyToManyField(OrderItem)
start_date= models.DateTimeField(auto_now_add=True)
ordered_date = models.DateTimeField()
ordered = models.BooleanField(default=False)
shipping_address = models.ForeignKey(
'Address',related_name='shipping_address',on_delete=models.SET_NULL,blank=True,null=True)
billing_address = models.ForeignKey(
'Address',related_name='billing_address',on_delete=models.SET_NULL,blank=True,null=True)
payment = models.ForeignKey(
'Payment',on_delete=models.SET_NULL,blank=True,null=True)
coupon = models.ForeignKey(
'Coupon',on_delete=models.SET_NULL,blank=True,null=True)
being_delivered = models.BooleanField(default=False)
received = models.BooleanField(default=False)
refund_requested = models.BooleanField(default=False)
refund_granted = models.BooleanField(default=False)
def __str__(self):
return self.user.username
def get_total(self):
total =0
for order_item in self.items.all():
total += order_item.get_final_price()
if self.coupon != None:
total -= self.coupon.amount
if total < 1:
return 1
return total
class Address (models.Model):
user=models.ForeignKey(settings.AUTH_USER_MODEL,on_delete=models.CASCADE)
street_address = models.CharField(max_length=100)
apartment_address = models.CharField(max_length=100)
country = CountryField(multiple = False)
zip = models.CharField(max_length=100)
address_type =models.CharField(max_length=1,choices=ADDRESS_CHOICES)
default = models.BooleanField(default=False)
def __str__(self):
return self.user.username
class Meta:
verbose_name_plural = 'Addresses'
class Payment (models.Model):
user=models.ForeignKey(settings.AUTH_USER_MODEL,on_delete=models.SET_NULL,blank=True,null=True)
amount = models.FloatField()
stripe_charge_id = models.CharField(max_length=50)
timestamp = models.DateTimeField(auto_now_add=True)
def __str__(self):
return self.user.username
class Coupon (models.Model):
code = models.CharField(max_length= 20)
amount = models.FloatField()
def __str__(self):
return self.code
class Refound(models.Model):
order = models.ForeignKey(Order, on_delete=models.CASCADE)
reason = models.TextField()
accepted = models.BooleanField(default=False)
email = models.EmailField()
def __str__(self):
return f"{self.pk}"
def userprofile_receiver(sender, instance, created, *args, **kwargs):
if created:
userprofile = UserProfile.objects.create(user=instance)
post_save.connect(userprofile_receiver, sender=settings.AUTH_USER_MODEL)
|
# Copyright 2019 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Main entrypoint for containers with Kubeflow TFX component executors.
We cannot use the existing TFX container entrypoint for the following
reason:Say component A requires inputs from component B output O1 and
component C output O2. Now, the inputs to A is a serialized dictionary
contained O1 and O2. But we need Argo to combine O1 and O2 into the expected
dictionary of artifact/artifact_type types, which isn't possible. Hence, we
need each output from a component to be individual argo output parameters so
they can be passed into downstream components as input parameters via Argo.
TODO(ajaygopinathan): The input names below are hardcoded and can easily
diverge from the actual names and types expected by the underlying executors.
Look into how we can dynamically generate the required inputs.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import argparse
import logging
import sys
from tfx.orchestration.kubeflow import executor_wrappers as wrappers
def main():
# Log to the container's stdout so Kubeflow Pipelines UI can display logs to
# the user.
logging.basicConfig(stream=sys.stdout, level=logging.INFO)
parser = argparse.ArgumentParser()
parser.add_argument('--exec_properties', type=str, required=True)
parser.add_argument('--outputs', type=str, required=True)
parser.add_argument('--executor_class_path', type=str, required=True)
subparsers = parser.add_subparsers(dest='executor')
subparser = subparsers.add_parser('CsvExampleGen')
subparser.add_argument('--input-base', type=str, required=True)
subparser.set_defaults(executor=wrappers.CsvExampleGenWrapper)
subparser = subparsers.add_parser('BigQueryExampleGen')
subparser.set_defaults(executor=wrappers.BigQueryExampleGenWrapper)
subparser = subparsers.add_parser('StatisticsGen')
subparser.add_argument('--input_data', type=str, required=True)
subparser.set_defaults(executor=wrappers.StatisticsGenWrapper)
subparser = subparsers.add_parser('SchemaGen')
subparser.add_argument('--stats', type=str, required=True)
subparser.set_defaults(executor=wrappers.SchemaGenWrapper)
subparser = subparsers.add_parser('ExampleValidator')
subparser.add_argument('--stats', type=str, required=True)
subparser.add_argument('--schema', type=str, required=True)
subparser.set_defaults(executor=wrappers.ExampleValidatorWrapper)
subparser = subparsers.add_parser('Transform')
subparser.add_argument('--input_data', type=str, required=True)
subparser.add_argument('--schema', type=str, required=True)
subparser.set_defaults(executor=wrappers.TransformWrapper)
subparser = subparsers.add_parser('Trainer')
subparser.add_argument('--transformed_examples', type=str, required=True)
subparser.add_argument('--transform_output', type=str, required=True)
subparser.add_argument('--schema', type=str, required=True)
subparser.set_defaults(executor=wrappers.TrainerWrapper)
subparser = subparsers.add_parser('Evaluator')
subparser.add_argument('--examples', type=str, required=True)
subparser.add_argument('--model_exports', type=str, required=True)
subparser.set_defaults(executor=wrappers.EvaluatorWrapper)
subparser = subparsers.add_parser('ModelValidator')
subparser.add_argument('--examples', type=str, required=True)
subparser.add_argument('--model', type=str, required=True)
subparser.set_defaults(executor=wrappers.ModelValidatorWrapper)
subparser = subparsers.add_parser('Pusher')
subparser.add_argument('--model_export', type=str, required=True)
subparser.add_argument('--model_blessing', type=str, required=True)
subparser.set_defaults(executor=wrappers.PusherWrapper)
args = parser.parse_args()
executor = args.executor(args)
executor.run()
if __name__ == '__main__':
main()
|
from django.core.files.storage import FileSystemStorage
from django.db import models
from django.core.files import File
fs = FileSystemStorage(location='/media/photos')
class images(models.Model):
Title = models.CharField(max_length=10)
content_image = models.ImageField(upload_to='fs', null=True, blank=True)
style_image = models.ImageField(upload_to='fs', null=True, blank=True)
output_image = models.ImageField(upload_to='fs', null=True, blank=True)
def __str__(self):
return '{}'.format(self.Title)
|
import libreria
def AgregarSubOpcionA():
#1. pedir contraseรฑa
#2. Guardadr datos en contraseรฑas.txt
universidad=libreria.pedir_nombre("Ingrese universidad: ")
ciudad=libreria.pedir_nombre("Ingrese ciudad de la universidad: ")
contenido = universidad + "-" + ciudad + "\n"
libreria.guardar_datos("universidad.txt", contenido,"a")
print("universidad guardada")
def MostrarSubOpcionB():
# 1. Abrir el archivo contraseรฑas.txt y mostrar sus datos
datos=libreria.guardar_datos("universidad.txt")
# 2. Comprobar si hay datos
if ( datos != ""):
for item in datos:
universidad, ciudad = item.split("-")
msg="{} y pertenece a la ciuda de {}"
universidad=universidad.replace("\n","")
ciudad=ciudad.replace("\n","")
print(msg.format(universidad, ciudad))
#fin_for
else:
print("No hay datos")
def AgregarSubOpcionC():
#1. pedir nombre
#2. Guardadr datos en datos.txt
nombre=libreria.pedir_nombre("Ingrese nombre: ")
nro_de_examenes=libreria.pedir_numero("Ingrese nro de examenes: ",0,999999999)
contenido = nombre + "-" + str(nro_de_examenes) + "\n"
libreria.guardar_datos("universidad.txt", contenido,"a")
print("guardado con exito")
def MostrarSubOpcionC():
# 1. Abrir el archivo datos.txt y mostrar sus datos
datos=libreria.obtener_datos("universidad.txt")
# 2. Comprobar si hay datos
if ( datos != ""):
for item in datos:
nombre, nro_de_examenes = item.split("-")
msg="{} su nuero de examnes al aรฑo es {}"
nombre=nombre.replace("\n","")
nro_de_examenes=creditos.replace("\n","")
print(msg.format(nombre,nro_de_examenes))
#fin_for
else:
print("No hay datos")
def OpcionA():
print("##### universidades ########")
print("#1. Agregar nombre #")
print("#2. mostrar nombre #")
print("#3. salir #")
print("#############################")
opc=libreria.pedir_numero("ingrese opcion: ",1,3)
if( opc==1):
AgregarSubOpcionA()
if(opc==2):
MostrarSubOpcionB()
#fin if
#fin while
def OpcionB():
print("##### nro de examenes ########")
print("#1. agregar datos #")
print("#2. mostrar datos #")
print("#3. salir #")
print("#############################")
opc=libreria.pedir_numero("ingrese opcion: ",1,3)
if( opc==1):
AgregarSubOpcionC()
if(opc==2):
MostrarSubOpcionC()
#fin if
#fin while
opc=""
max=3
while(opc!=max):
print("############ MENU #############")
print("#1. universidad #")
print("#2. nro de examnenes #")
print("#3. Salir #")
print("###############################")
opc=libreria.pedir_numero("ingrese opcion: ",1,3)
if( opc==1):
OpcionA()
if(opc==2):
OpcionB()
#fin if
#fin while
print("fin")
|
import numpy as np
from numpy import *
import os, sys, argparse
import glob
import astropy
from astropy.io import fits
import astropy
from astropy.cosmology import Planck15 as cosmo
from joblib import Parallel, delayed
import scipy
from scipy.interpolate import UnivariateSpline
import yt
#This file will be used to store the profile of the momentum
def parse():
'''
Parse command line arguments
'''
parser = argparse.ArgumentParser(formatter_class=argparse.ArgumentDefaultsHelpFormatter,
description='''\
Generate the cameras to use in Sunrise and make projection plots
of the data for some of these cameras. Then export the data within
the fov to a FITS file in a format that Sunrise understands.
''')
parser.add_argument('-run_parallel', '--run_parallel', default=False, help='Run parallel')
parser.add_argument('-simname', '--simname', default='nref11n_nref10f', help='Simulation to be analyzed.')
parser.add_argument('-snapname', '--snapname', default=None, help='Snapshot files to be analyzed.')
parser.add_argument('-haloname', '--haloname', default='halo_008508', help='halo_name')
parser.add_argument('-on_system', '--on_system', default='local', help='System being used (pfe or local)')
parser.add_argument('-ddmin', '--ddmin', default=906, help='halo_name')
parser.add_argument('-ddmax', '--ddmax', default=907, help='halo_name')
parser.add_argument('-n_jobs', '--n_jobs', default=3, help='number of jobs')
args = vars(parser.parse_args())
return args
def recenter_2(amom):
print 'Recentering...'
#amom.cen_x, amom.cen_y, amom.cen_z = yt.YTArray(galprops['stars_center'][0], 'kpc')
#amom.cen_x, amom.cen_y, amom.cen_z = yt.YTArray([ds.quan(0.4922914505, 'code_length'), ds.quan(0.482047080994, 'code_length'), ds.quan(0.504963874817, 'code_length')]).to('kpc')
#amom.cen_x,
amom.stars_x = amom.stars_x_box - amom.cen_x
amom.stars_y = amom.stars_y_box - amom.cen_y
amom.stars_z = amom.stars_z_box - amom.cen_z
amom.stars_pos = array([amom.stars_x, amom.stars_y, amom.stars_z])
amom.stars_pos_mag = sqrt(amom.stars_x**2. + amom.stars_y**2. + amom.stars_z**2.)
amom.dark_x = amom.dark_x_box - amom.cen_x
amom.dark_y = amom.dark_y_box - amom.cen_y
amom.dark_z = amom.dark_z_box - amom.cen_z
amom.dark_pos = array([amom.dark_x, amom.dark_y, amom.dark_z])
amom.dark_pos_mag = sqrt(amom.dark_x**2. + amom.dark_y**2. + amom.dark_z**2.)
#Determine the mass-weighted velocity of the stars in the inner 1 kpc
stars_inner_1kpc = where(amom.stars_pos_mag < 1)[0]
print len(stars_inner_1kpc)
amom.cen_vx = np.average(amom.stars_vx_box[stars_inner_1kpc], weights = amom.star_mass[stars_inner_1kpc])
amom.cen_vy = np.average(amom.stars_vy_box[stars_inner_1kpc], weights = amom.star_mass[stars_inner_1kpc])
amom.cen_vz = np.average(amom.stars_vz_box[stars_inner_1kpc], weights = amom.star_mass[stars_inner_1kpc])
amom.stars_vx = amom.stars_vx_box - amom.cen_vx
amom.stars_vy = amom.stars_vy_box - amom.cen_vy
amom.stars_vz = amom.stars_vz_box - amom.cen_vz
amom.stars_vel = array([amom.stars_vx, amom.stars_vy, amom.stars_vz])
amom.stars_vel_mag = sqrt(amom.stars_vx**2. + amom.stars_vy**2. + amom.stars_vz**2.)
amom.dark_vx = amom.dark_vx_box - amom.cen_vx
amom.dark_vy = amom.dark_vy_box - amom.cen_vy
amom.dark_vz = amom.dark_vz_box - amom.cen_vz
amom.dark_vel = array([amom.dark_vx, amom.dark_vy, amom.dark_vz])
amom.dark_vel_mag = sqrt(amom.dark_vx**2. + amom.dark_vy**2. + amom.dark_vz**2.)
return amom
class momentum_obj():
def __init__(self, simname, aname, snapfile, fits_name):
self.ds = yt.load(snapfile)
self.simname = simname
self.aname = aname
self.snapfile = snapfile
self.fits_name = fits_name
def load(self):
dd = self.ds.all_data()
def _stars(pfilter, data):
return data[(pfilter.filtered_type, "particle_type")] == 2
# these are only the must refine dark matter particles
def _darkmatter(pfilter, data):
return data[(pfilter.filtered_type, "particle_type")] == 4
yt.add_particle_filter("stars",function=_stars, filtered_type='all',requires=["particle_type"])
yt.add_particle_filter("darkmatter",function=_darkmatter, filtered_type='all',requires=["particle_type"])
self.ds.add_particle_filter('stars')
self.ds.add_particle_filter('darkmatter')
try:
print 'Loading stars particle indices...'
self.stars_id = dd['stars', 'particle_index']
assert self.stars_id.shape > 5
except AttributeError,AssertionError:
print "No star particles found, skipping: ", self.ds._file_amr
return
#self.stars_metallicity1 = dd['stars', 'particle_metallicity1']
#self.stars_metallicity2 = dd['stars', 'particle_metallicity2']
print 'Loading star velocities...'
self.stars_vx_box = dd['stars', 'particle_velocity_x'].in_units('km/s')
self.stars_vy_box = dd['stars', 'particle_velocity_y'].in_units('km/s')
self.stars_vz_box = dd['stars', 'particle_velocity_z'].in_units('km/s')
print 'Loading star positions...'
self.stars_x_box = dd['stars', 'particle_position_x'].in_units('kpc')
self.stars_y_box = dd['stars', 'particle_position_y'].in_units('kpc')
self.stars_z_box = dd['stars', 'particle_position_z'].in_units('kpc')
print 'Loading star mass...'
self.star_mass = dd['stars', 'particle_mass'].in_units('Msun')
print 'Loading star age...'
self.star_creation_time = dd['stars', 'creation_time'].in_units('yr')
self.star_age = self.ds.arr(cosmo.age(self.ds.current_redshift).value, 'Gyr').in_units('yr') - self.star_creation_time
print 'Loading dark matter particle indices...'
self.dark_id = dd['darkmatter', 'particle_index']
print 'Loading dark matter velocities...'
self.dark_vx_box = dd['darkmatter', 'particle_velocity_x'].in_units('km/s')
self.dark_vy_box = dd['darkmatter', 'particle_velocity_y'].in_units('km/s')
self.dark_vz_box = dd['darkmatter', 'particle_velocity_z'].in_units('km/s')
print 'Loading dark matter positions...'
self.dark_x_box = dd['darkmatter', 'particle_position_x'].in_units('kpc')
self.dark_y_box = dd['darkmatter', 'particle_position_y'].in_units('kpc')
self.dark_z_box = dd['darkmatter', 'particle_position_z'].in_units('kpc')
print 'Loading dark matter mass...'
self.dark_mass = dd['darkmatter', 'particle_mass'].in_units('Msun')
print 'Loading dark matter age...'
self.dark_creation_time = dd['darkmatter', 'creation_time'].in_units('yr')
self.dark_age = self.ds.arr(cosmo.age(self.ds.current_redshift).value, 'Gyr').in_units('yr') - self.dark_creation_time
if False:
print 'Loading gas velocity...'
self.gas_vx = dd['gas', 'velocity_x'].in_units('km/s')
self.gas_vy = dd['gas', 'velocity_y'].in_units('km/s')
self.gas_vz = dd['gas', 'velocity_z'].in_units('km/s')
print 'Loading gas cell position...'
self.gas_x = dd['gas', 'x'].in_units('kpc')
self.gas_y = dd['gas', 'y'].in_units('kpc')
self.gas_z = dd['gas', 'z'].in_units('kpc')
print 'Loading gas temperature...'
self.gas_temp = dd['gas', 'temperature']
print 'Loading gas cell mass...'
self.gas_mass = dd['gas', 'cell_mass']
print 'Finished loading...'
return 1
def recenter(self, galprops):
print 'Recentering...'
self.cen_x, self.cen_y, self.cen_z = yt.YTArray(galprops['stars_center'][0], 'kpc')
self.stars_x = self.stars_x_box - self.cen_x
self.stars_y = self.stars_y_box - self.cen_y
self.stars_z = self.stars_z_box - self.cen_z
self.stars_pos = array([self.stars_x, self.stars_y, self.stars_z])
self.stars_pos_mag = sqrt(self.stars_x**2. + self.stars_y**2. + self.stars_z**2.)
self.dark_x = self.dark_x_box - self.cen_x
self.dark_y = self.dark_y_box - self.cen_y
self.dark_z = self.dark_z_box - self.cen_z
self.dark_pos = array([self.dark_x, self.dark_y, self.dark_z])
self.dark_pos_mag = sqrt(self.dark_x**2. + self.dark_y**2. + self.dark_z**2.)
#Determine the mass-weighted velocity of the stars in the inner 1 kpc
stars_inner_1kpc = where(self.stars_pos_mag < 1)
self.cen_vx = np.average(self.stars_vx_box[stars_inner_1kpc], weights = self.star_mass[stars_inner_1kpc])
self.cen_vy = np.average(self.stars_vy_box[stars_inner_1kpc], weights = self.star_mass[stars_inner_1kpc])
self.cen_vz = np.average(self.stars_vz_box[stars_inner_1kpc], weights = self.star_mass[stars_inner_1kpc])
self.stars_vx = self.stars_vx_box - self.cen_vx
self.stars_vy = self.stars_vy_box - self.cen_vy
self.stars_vz = self.stars_vz_box - self.cen_vz
self.stars_vel = array([self.stars_vx, self.stars_vy, self.stars_vz])
self.stars_vel_mag = sqrt(self.stars_vx**2. + self.stars_vy**2. + self.stars_vz**2.)
self.dark_vx = self.dark_vx_box - self.cen_vx
self.dark_vy = self.dark_vy_box - self.cen_vy
self.dark_vz = self.dark_vz_box - self.cen_vz
self.dark_vel = array([self.dark_vx, self.dark_vy, self.dark_vz])
self.dark_vel_mag = sqrt(self.dark_vx**2. + self.dark_vy**2. + self.dark_vz**2.)
def calc_angular_momentum(self, ptype = 'stars'):
print 'Calculating angular momentum for type: %s...'%ptype
#Calculate momentum for stars
if ptype == 'stars':
self.stars_jx = self.stars_vz * self.stars_y - self.stars_z * self.stars_vy
self.stars_jy = self.stars_vx * self.stars_z - self.stars_x * self.stars_vz
self.stars_jz = self.stars_vy * self.stars_x - self.stars_y * self.stars_vx
self.stars_j = array([self.stars_jx, self.stars_jy, self.stars_jz])
self.stars_j_mag = sqrt(self.stars_jx**2. + self.stars_jy**2. + self.stars_jz**2.)
if ptype =='darkmatter':
self.dark_jx = self.dark_vz * self.dark_y - self.dark_z * self.dark_vy
self.dark_jy = self.dark_vx * self.dark_z - self.dark_x * self.dark_vz
self.dark_jz = self.dark_vy * self.dark_x - self.dark_y * self.dark_vx
self.dark_j = array([self.dark_jx, self.dark_jy, self.dark_jz])
self.dark_j_mag = sqrt(self.dark_jx**2. + self.dark_jy**2. + self.dark_jz**2.)
if ptype == 'gas':
#Calculate angular momentum for gas
self.gas_jx = self.gas_vz * self.gas_y - self.gas_z * self.gas_vy
self.gas_jy = self.gas_vx * self.gas_z - self.gas_x * self.gas_vz
self.gas_jz = self.gas_vy * self.gas_x - self.gas_y * self.gas_vx
self.gas_j = array([self.gas_jx, self.gas_jy, self.gas_jz])
self.gas_j_mag = sqrt(self.gas_jx**2. + self.gas_jy**2. + self.gas_jz**2.)
return self
def measure_potential(self, r_min = 0.1, r_step1 = 0.2, r_cen1 = 5, r_step2 = 1, r_cen2 = 15, r_step3 = 5, r_max = 200.):
print 'Measuring the potential...'
center = self.ds.arr([self.cen_x, self.cen_y, self.cen_z], 'kpc')
rad_steps = concatenate((arange(r_min, r_cen1, r_step1),
arange(r_cen1, r_cen2, r_step2),
arange(r_cen2, r_max, r_step3)))
self.mass_profile = zeros((2,len(rad_steps)))
for i in arange(0,len(rad_steps)):
print i, rad_steps[i], len(rad_steps)
try:
gc_sphere = self.ds.sphere(center, self.ds.arr(rad_steps[i],'kpc'))
baryon_mass, particle_mass = gc_sphere.quantities.total_quantity(["cell_mass", "particle_mass"])
self.mass_profile[0,i] = rad_steps[i]
self.mass_profile[1,i] = baryon_mass + particle_mass
except:
print '\tsomething broken in measure_potential..'
self.mass_profile[0,i] = 0.
self.mass_profile[1,i] = 0.
self.spl = UnivariateSpline(self.mass_profile[0,:], self.mass_profile[1,:])
def measure_circularity(self, use_self = False):
print 'Calculating circularity...'
G = yt.units.G.to('kpc**3*Msun**-1*s**-2')
#internal_mass_gas = self.ds.arr(self.spl(self.gas_pos_mag),'g').in_units('Msun')
#self.vcirc_gas = self.ds.arr(sqrt(G*internal_mass_gas/(self.gas_pos_mag)),'kpc/s').in_units('km/s')
#self.jcirc_gas = self.vcirc_gas * self.gas_pos_mag
internal_mass_stars = self.ds.arr(self.spl(self.stars_pos_mag),'g').in_units('Msun')
self.vcirc_stars = self.ds.arr(sqrt(G*internal_mass_stars/(self.stars_pos_mag)),'kpc/s').in_units('km/s')
self.jcirc_stars = self.vcirc_stars * self.stars_pos_mag
internal_mass_dark = self.ds.arr(self.spl(self.dark_pos_mag),'g').in_units('Msun')
self.vcirc_dark = self.ds.arr(sqrt(G*internal_mass_dark/(self.dark_pos_mag)),'kpc/s').in_units('km/s')
self.jcirc_dark = self.vcirc_dark * self.dark_pos_mag
self.L_mag = sqrt(self.L_disk[0]**2.+self.L_disk[1]**2.+self.L_disk[2]**2.)
self.L_mag_fixed = sqrt(self.L_disk_fixed[0]**2.+self.L_disk_fixed[1]**2.+self.L_disk_fixed[2]**2.)
costheta_stars = np.dot(self.L_disk, self.stars_j)/(self.stars_j_mag*self.L_mag)
costheta_stars_fixed = np.dot(self.L_disk_fixed, self.stars_j)/(self.stars_j_mag*self.L_mag_fixed)
self.jz_stars = costheta_stars*self.stars_j_mag
self.jz_stars_fixed = costheta_stars_fixed*self.stars_j_mag
self.epsilon_stars = self.jz_stars/self.jcirc_stars
self.epsilon_stars_fixed = self.jz_stars_fixed/self.jcirc_stars
costheta_dark = np.dot(self.L_disk, self.dark_j)/(self.dark_j_mag*self.L_mag)
costheta_dark_fixed = np.dot(self.L_disk_fixed, self.dark_j)/(self.dark_j_mag*self.L_mag_fixed)
self.jz_dark = costheta_dark*self.dark_j_mag
self.jz_dark_fixed = costheta_dark_fixed*self.dark_j_mag
self.epsilon_dark = self.jz_dark/self.jcirc_dark
self.epsilon_dark_fixed = self.jz_dark_fixed/self.jcirc_dark
#costheta_gas = np.dot(self.L_disk, self.gas_j)/(self.gas_j_mag*self.L_mag)
#self.jz_gas = costheta_gas*self.gas_j_mag
#self.epsilon_gas = self.jz_gas/self.jcirc_gas
#costheta_gas = np.dot(self.L_disk, self.gas_pos)/(self.gas_pos_mag*self.L_mag)
#self.zz_gas = self.ds.arr(costheta_gas * self.gas_pos_mag, 'kpc')
#self.rr_gas = sqrt(self.gas_pos_mag**2. - self.zz_gas**2.)
#costheta_stars = np.dot(self.L_disk, self.stars_pos)/(self.stars_pos_mag*self.L_mag)
#self.zz_stars = self.ds.arr(costheta_stars * self.stars_pos_mag, 'kpc')
#self.rr_stars = sqrt(self.stars_pos_mag**2. - self.zz_stars**2.)
def gas_momentum_heatmap(self):
print 'Measuring gas momentum profiles...'
cold_gas_zz = where((abs(self.rr_gas) < 30) & (self.gas_temp < 1.e4))
eps_min = -2.5
eps_max = 2.5
min_z = -10
max_z = 10
min_r = 0
max_r = 30
min_rad = 0
max_rad = 100.
bins_n = 200
'''
cold_gas_zz = where((abs(self.rr_gas) < max_r) & (self.gas_temp < 1.e4))
weights = self.gas_mass[cold_gas_zz]
self.cg_zz_heatmap, self.cg_zz_xedges, self.cg_zz_yedges = np.histogram2d(self.epsilon_gas[cold_gas_zz], self.zz_gas[cold_gas_zz],
bins=[linspace(eps_min,eps_max,bins_n), linspace(min_z,max_z,bins_n)],
weights = weights)
cold_gas_rr = where((abs(self.zz_gas) < (max_z-min_z)/2.) & (self.gas_temp < 1.e4))
weights = self.gas_mass[cold_gas_rr]
print min_r, max_r
self.cg_rr_heatmap, self.cg_rr_xedges, self.cg_rr_yedges = np.histogram2d(self.epsilon_gas[cold_gas_rr], self.rr_gas[cold_gas_rr],
bins=[linspace(eps_min,eps_max,bins_n), linspace(min_r,max_r,bins_n)],
weights = weights)
print self.cg_rr_xedges.min()
cold_gas = where(self.gas_temp < 1.e4)
weights = self.gas_mass[cold_gas]
self.cg_rad_heatmap, self.cg_rad_xedges, self.cg_rad_yedges = np.histogram2d(self.epsilon_gas[cold_gas], self.gas_pos_mag[cold_gas],
bins=[linspace(eps_min,eps_max,bins_n), linspace(min_rad,max_rad,bins_n)],
weights = weights)
'''
def write_fits(self):
print '\tGenerating fits for %s...'%self.aname
master_hdulist = []
prihdr = fits.Header()
prihdr['COMMENT'] = "Storing the momentum measurements in this FITS file."
prihdr['simname'] = self.simname
prihdr['scale'] = self.aname.strip('a')
prihdr['snapfile'] = self.snapfile
prihdu = fits.PrimaryHDU(header=prihdr)
master_hdulist.append(prihdu)
colhdr = fits.Header()
if False:
master_hdulist.append(fits.ImageHDU(data = self.L_disk , header = colhdr, name = 'net_angmomentum'))
master_hdulist.append(fits.ImageHDU(data = self.L_disk_fixed , header = colhdr, name = 'net_angmomentum_fixed'))
master_hdulist.append(fits.ImageHDU(data = self.stars_id , header = colhdr, name = 'stars_id'))
master_hdulist.append(fits.ImageHDU(data = self.dark_id , header = colhdr, name = 'dark_id'))
master_hdulist.append(fits.ImageHDU(data = np.stack((self.stars_x_box , self.stars_y_box , self.stars_z_box)) , header = colhdr, name = 'stars_box_position'))
master_hdulist.append(fits.ImageHDU(data = np.stack((self.stars_vx_box , self.stars_vy_box , self.stars_vz_box)) , header = colhdr, name = 'stars_box_velocity'))
master_hdulist.append(fits.ImageHDU(data = np.stack((self.stars_x , self.stars_y , self.stars_z)) , header = colhdr, name = 'stars_gal_position'))
master_hdulist.append(fits.ImageHDU(data = np.stack((self.stars_vx , self.stars_vy , self.stars_vz)) , header = colhdr, name = 'stars_gal_velocity'))
master_hdulist.append(fits.ImageHDU(data = np.stack((self.dark_x_box , self.dark_y_box , self.dark_z_box)) , header = colhdr, name = 'dark_box_position'))
master_hdulist.append(fits.ImageHDU(data = np.stack((self.dark_vx_box , self.dark_vy_box , self.dark_vz_box)) , header = colhdr, name = 'dark_box_velocity'))
master_hdulist.append(fits.ImageHDU(data = np.stack((self.dark_x , self.dark_y , self.dark_z)) , header = colhdr, name = 'dark_gal_position'))
master_hdulist.append(fits.ImageHDU(data = np.stack((self.dark_vx , self.dark_vy , self.dark_vz)) , header = colhdr, name = 'dark_gal_velocity'))
if False:
master_hdulist.append(fits.ImageHDU(data = np.stack((self.stars_jx, self.stars_jy, self.stars_jz)) , header = colhdr, name = 'stars_gal_angmomentum'))
master_hdulist.append(fits.ImageHDU(data = np.stack((self.dark_jx, self.dark_jy, self.dark_jz)) , header = colhdr, name = 'dark_gal_angmomentum'))
master_hdulist.append(fits.ImageHDU(data = self.epsilon_stars , header = colhdr, name = 'stars_epsilon'))
master_hdulist.append(fits.ImageHDU(data = self.epsilon_stars_fixed , header = colhdr, name = 'stars_epsilon_fixed'))
master_hdulist.append(fits.ImageHDU(data = self.epsilon_dark , header = colhdr, name = 'dark_epsilon'))
master_hdulist.append(fits.ImageHDU(data = self.epsilon_dark_fixed , header = colhdr, name = 'dark_epsilon_fixed'))
master_hdulist.append(fits.ImageHDU(data = self.star_mass , header = colhdr, name = 'star_mass'))
master_hdulist.append(fits.ImageHDU(data = self.star_age , header = colhdr, name = 'star_age'))
master_hdulist.append(fits.ImageHDU(data = self.dark_mass , header = colhdr, name = 'dark_mass'))
master_hdulist.append(fits.ImageHDU(data = self.dark_age , header = colhdr, name = 'dark_age'))
if False:
master_hdulist.append(fits.ImageHDU(data = self.mass_profile , header = colhdr, name = 'mass_profile'))
if False:
# save gas info
master_hdulist.append(fits.ImageHDU(data = np.stack((self.cg_zz_xedges , self.cg_zz_yedges)) , header = colhdr, name = 'gas_zz_epsilon_edges'))
master_hdulist.append(fits.ImageHDU(data = self.cg_zz_heatmap , header = colhdr, name = 'gas_zz_epsilon'))
master_hdulist.append(fits.ImageHDU(data = np.stack((self.cg_rr_xedges , self.cg_rr_yedges)) , header = colhdr, name = 'gas_rr_epsilon_edges'))
master_hdulist.append(fits.ImageHDU(data = self.cg_rr_heatmap , header = colhdr, name = 'gas_rr_epsilon'))
master_hdulist.append(fits.ImageHDU(data = np.stack((self.cg_rad_xedges , self.cg_rad_yedges)) , header = colhdr, name = 'gas_rad_epsilon_edges'))
master_hdulist.append(fits.ImageHDU(data = self.cg_rad_heatmap , header = colhdr, name = 'gas_rad_epsilon'))
master_hdulist.append(fits.ImageHDU(data = np.stack((self.gas_x , self.gas_y , self.gas_z)) , header = colhdr, name = 'gas_xyz_position'))
master_hdulist.append(fits.ImageHDU(data = np.stack((self.rr_gas, self.zz_gas)) , header = colhdr, name = 'gas_cylindrical_position'))
master_hdulist.append(fits.ImageHDU(data = np.stack((self.gas_jx, self.gas_jy, self.gas_jz)) , header = colhdr, name = 'gas_momentum'))
master_hdulist.append(fits.ImageHDU(data = self.epsilon_gas , header = colhdr, name = 'gas_epsilon'))
#master_hdulist.append(fits.ImageHDU(data = self.gas_temp , header = colhdr, name = 'gas_temperature'))
#master_hdulist.append(fits.ImageHDU(data = self.gas_mass , header = colhdr, name = 'gas_mass'))
print '\tSaving to ' + self.fits_name
thdulist = fits.HDUList(master_hdulist)
thdulist.writeto(self.fits_name, clobber = True)
return master_hdulist
def run_measure_momentum(haloname, simname, snapname, galprops, on_system = 'pfe'):
if on_system == 'pfe':
snaps = np.sort(np.asarray(glob.glob("/nobackupp2/mpeeples/%s/orig/%s/%s/%s"%(haloname, simname, snapname, snapname))))
out_dir = '/nobackupp2/rcsimons/foggie_momentum/momentum_fits'
else:
print "/Volumes/gdrive/foggie/%s/nref11n/%s/%s/%s"%(haloname, simname, snapname, snapname)
snaps = np.sort(np.asarray(glob.glob("/Volumes/gdrive/foggie/%s/nref11n/%s/%s/%s"%(haloname, simname, snapname, snapname))))
out_dir = '/Users/rsimons/Dropbox/rcs_foggie/outputs'
#snaps = np.sort(np.asarray(glob.glob("/Users/rsimons/Dropbox/rcs_foggie/data/%s/%s/%s/%s"%(haloname, simname, snapname, snapname))))
#out_dir = '/Users/rsimons/Dropbox/rcs_foggie/outputs'
assert os.path.lexists(snaps[0])
assert os.path.lexists(out_dir)
new_snapfiles = np.asarray(snaps)
ts = yt.DatasetSeries(new_snapfiles)
for ds,snapfile in zip(reversed(ts),np.flipud(new_snapfiles)):
ad = ds.all_data()
print 'Creating momentum fits file for '+ snapfile
aname = snapfile.split('/')[-1]
fits_name = out_dir+'/'+simname+'_'+aname+'_momentum.fits'
print 'fits name : ', fits_name
print 'Generating angular momentum object...'
amom = momentum_obj(simname, aname, snapfile, fits_name)
amom.load()
amom.recenter(galprops)
amom.L_disk = galprops['gas_L'][0]
amom.L_disk_fixed = [-0.37085436, 0.14802026, 0.91681898]
amom.calc_angular_momentum(ptype = 'stars')
amom.calc_angular_momentum(ptype = 'darkmatter')
amom.measure_potential()
amom.measure_circularity()
#amom.gas_momentum_heatmap()
amom.write_fits()
return amom
if __name__ == "__main__":
#args = parse()
#simname = args['simname']
#snapname = args['snapname']
#haloname = args['haloname']
#run_parallel = args['run_parallel']
#on_system = args['on_system']
ddmin = 906
ddmax = 908
run_parallel = False
haloname = 'halo_008508'
simname = 'nref11n_nref10f'
on_system = 'local'
'''
if on_system == 'pfe':
galprops_outdir = '/nobackupp2/rcsimons/foggie_momentum/galprops'
galaxy_props_file = galprops_outdir + '/' + simname + '_' + snapname + '_galprops.npy'
else:
galprops_outdir = '/Users/rsimons/Dropbox/rcs_foggie/outputs'
galaxy_props_file = galprops_outdir + '/temp_galprops.npy'
galprops = np.load(galaxy_props_file)[()]
'''
#print haloname, simname, snapname, run_parallel
#ddmin, ddmax = int(args['ddmin']), int(args['ddmax'])
#snapnames = ['DD%.4i'%i for i in arange(ddmin, ddmax)]
#snapnames = ['DD0906']
#snapnames = ['DD0907']
snapnames = ['DD0956']
'''
if run_parallel:
n_jobs = int(args['n_jobs'])
if (simname is not None) & (haloname is not None):
Parallel(n_jobs = n_jobs, backend = 'threading')(delayed(run_measure_momentum)(haloname = haloname, simname = simname, snapname = snapname, galprops = galprops, on_system = on_system) for snapname in snapnames)
else:
print 'run_all_parallel set to True, but no simname or haloname provided.'
else:
for snapname in snapnames:
amom = run_measure_momentum(haloname = haloname, simname = simname, snapname = snapname, galprops = galprops, on_system = on_system)
'''
if True:
# Test run_measure_momentum in ipython
for s, snapname in enumerate(snapnames):
if on_system == 'pfe':
snaps = np.sort(np.asarray(glob.glob("/nobackupp2/mpeeples/%s/%s/%s/%s"%(haloname, simname, snapname, snapname))))
out_dir = '/nobackupp2/rcsimons/foggie_momentum/momentum_fits'
else:
print "/Volumes/gdrive/foggie/%s/nref11n/%s/%s/%s"%(haloname, simname, snapname, snapname)
snaps = np.sort(np.asarray(glob.glob("/Users/rsimons/Dropbox/rcs_foggie/data/%s/%s/%s"%(haloname, snapname, snapname))))
out_dir = '/Users/rsimons/Dropbox/rcs_foggie/outputs'
#snaps = np.sort(np.asarray(glob.glob("/Users/rsimons/Dropbox/rcs_foggie/data/%s/%s/%s/%s"%(haloname, simname, snapname, snapname))))
#out_dir = '/Users/rsimons/Dropbox/rcs_foggie/outputs'
assert os.path.lexists(snaps[0])
assert os.path.lexists(out_dir)
new_snapfiles = np.asarray(snaps)
ts = yt.DatasetSeries(new_snapfiles)
for ds,snapfile in zip(reversed(ts),np.flipud(new_snapfiles)):
ad = ds.all_data()
print 'Creating momentum fits file for '+ snapfile
aname = snapfile.split('/')[-1]
fits_name = out_dir+'/'+simname+'_'+aname+'_momentum.fits'
print 'fits name : ', fits_name
print 'Generating angular momentum object...'
amom = momentum_obj(simname, aname, snapfile, fits_name)
amom.load()
#amom.recenter(galprops)
if snapname == 'DD0906': cen = yt.YTArray([ds.quan(0.4922914505, 'code_length'), ds.quan(0.482047080994, 'code_length'), ds.quan(0.504963874817, 'code_length')]).to('kpc')
if snapname == 'DD0907': cen = yt.YTArray([ds.quan(0.492289543152, 'code_length'), ds.quan(0.48203754425, 'code_length'), ds.quan(0.504967689514, 'code_length')]).to('kpc')
if snapname == 'DD0956': cen = yt.YTArray([ds.quan(0.49216556549072266, 'code_length'), ds.quan(0.48153591156005865, 'code_length'), ds.quan(0.5051298141479492, 'code_length')]).to('kpc')
amom.cen_x, amom.cen_y, amom.cen_z = cen[0], cen[1], cen[2]
amom = recenter_2(amom)
#amom.L_disk = galprops['gas_L'][0]
#amom.L_disk_fixed = [-0.37085436, 0.14802026, 0.91681898]
#amom.calc_angular_momentum(ptype = 'stars')
#amom.calc_angular_momentum(ptype = 'darkmatter')
#amom.measure_potential()
#amom.measure_circularity()
#amom.gas_momentum_heatmap()
amom.write_fits()
|
from django.contrib import admin
from ..models import Board
__all__ = [
'BoardAdmin'
]
@admin.register(Board)
class BoardAdmin(admin.ModelAdmin):
readonly_fields = ('created', 'modified')
|
import sys
file_name = str(sys.argv[1])
f = open(file_name)
output = []
for i in range(3):
f.readline()
num_atoms = f.readline().split()[0]
output.append(str(num_atoms + '\n' + '\n'))
for i in range(int(num_atoms)):
line = f.readline().split()
output.append(line[3] + '\t')
for i in range(3):
str_to_add = "{:9.4f} \t".format(round(float(line[i])*284.0/165.0, 4))
output.append(str_to_add)
if i == 2:
output.append('\n')
f.close()
f = open('xyz_files/'+str(file_name[10:-3])+'xyz', 'w')
f.writelines(output)
f.close()
|
#coding ๏ผutf-8
from selenium import webdriver
from chandao_demo.common.Base import Base
"'ๅฐ่ฃ
ๆทปๅ BUG็ๆนๆณ " \
"1.็ปๅฝ " \
" 2.ๆทปๅ BUG " \
"3.ๅคๆญๆฏๅฆๆทปๅ ๆๅ '"
class ZenTaoBug(Base): #็ปงๆฟBase็ๆๆๆนๆณ
url='http://pro.demo.zentao.net/bug-browse-20.html'
#ๅฎไฝ็ปๅฝ
admin_name=('id','account')
admin_pwd=('name','password')
login_button=('id','submit')
#ๆทปๅ BUG
test_name=('xpath',".//*[@id='navbar']/ul/li[4]/a") #ๅฏผ่ช
test_modle = ('xpath', ".//*[@id='subNavbar']/ul/li[1]/a") # bugๆจกๅ
button_bug=('xpath',".//*[@id='mainMenu']/div[3]/a[3]") #ๆทปๅ Bug
version1=('xpath',".//*[@id='openedBuild_chosen']/ul") #็นๅป็ๆฌ่พๅ
ฅๆก
version2=('xpath',".//*[@id='openedBuild_chosen']/div/ul/li") #็นๅป็ๆฌๅท
bug_title=('id','title') #ๆ ้ข
frmae=('id','ke-edit-iframe')
bug_step=('xpath','html/body') #ๅ
ๅฎน
bug_sumit=('id','submit') #ๆไบคbug
def login(self,user="demo",pwd='123456'):
driver.get('http://pro.demo.zentao.net/user-login.html')
self.clear(self.admin_name)
self.input_text(self.admin_name,user)
self.clear(self.admin_pwd)
self.input_text(self.admin_pwd,pwd)
self.click(self.login_button)
self.sleep_time(3)
def add_bug(self):
self.click(self.test_name)
self.click(self.test_modle)
self.click(self.button_bug)
self.click(self.version1)
self.click(self.version2)
self.input_text(self.bug_title,'ๆ ้ข325')
#่พๅ
ฅๆกbody
frame=self.find_element('class name','ke-edit-iframe')
self.driver.switch_to_frame(frame) #ๅๆข่ฟๅ
ฅframe่พๅ
ฅๆก
#ๅฏๆๆฌไธ่ฝclear
self.input_text(self.bug_step,"bugๅ
ๅฎน")
self.driver.switch_to_default_content() #้ๅบframe
self.click(self.bug_sumit) #็นๅปๆไบคๆ้ฎ
if __name__ == '__main__': #่ฐ่ฏ
driver=webdriver.Firefox()
bug=ZenTaoBug(driver)
bug.login()
bug.add_bug()
|
# coding: utf-8
from os import *
from Net import *
from signal import *
from BaseModule import *
from GM import *
global server
def usr1Handler(signo, frame):
global server
GM().getLogMgr().logI("่ฟ็จๅ
ณ้ญ")
server.stop()
class Server:
def __init__(self, config):
GM().setServer(self)
self._host = config["host"]
self._port = config["port"]
self._timeout = config["timeout"]
# ๅๅปบๆถๆฏๅๅๅจ
self._msgHandler = MsgHandler()
# ๅๆถๆฏๅๅๅจๆณจๅๆถๆฏๅค็ๆจกๅ
baseModule = BaseModule(0x00010000)
self._msgHandler.registerModule(baseModule)
# ๅๅงๅ็ฝ็ปๅนถ็ๅฌ
self._net = Net(self._host, self._port, self._timeout, self._msgHandler)
def run(self):
self._net.run()
def stop(self):
self._net.stop()
def getNet(self):
return self._net
if __name__ == "__main__":
global server
pidfile = "../proc/pid"
if path.exists(pidfile):
GM().getLogMgr().logE("pidๆไปถๅทฒ็ปๅญๅจ๏ผ่ฏทๆฃๆฅ็จๅบๆฏๅฆๅทฒ็ป่ฟ่ก")
else:
f = file(pidfile, "w")
f.write(str(getpid()))
f.close()
config = {}
config["host"] = "115.29.53.18"
config["port"] = 8888
config["timeout"] = 10
server = Server(config)
signal(SIGUSR1, usr1Handler)
server.run()
if path.exists(pidfile):
GM().getLogMgr().logD("็จๅบๆญฃๅธธ้ๅบ๏ผ็งป้คpidๆไปถ")
remove(pidfile)
|
#Exercise: combine two lists/arrays
def combine(list_1, list_2):
len1 = len(list_1)
len2 = len(list_2)
join = []
if len1 > len2:
for list in range(len2):
join.append(list_1[list])
join.append(list_2[list])
for remaining_index in range(list,len1):
join.append(list_1[remaining_index])
elif len1 < len2:
list = 0
for list in range(len1):
join.append(list_1[list])
join.append(list_2[list])
for remaining_index in range(list,len2):
join.append(list_2[remaining_index])
else:
if len1 == len2:
for list in range(len1):
join.append(list_1[list])
join.append(list_2[list])
return join
if __name__ == "__main__":
test1 = combine([1,2,3],[11,22,33])
print(test1)
|
__author__ = 'cynthia_odonnell'
import urllib2
import sys
import numpy as np
import pylab
import scipy.stats as stats
#read data from uci data repository
target_url = ("https://archive.ics.uci.edu/ml/machine-learning-databases/undocumented/connectionist-bench/sonar/sonar.all-data")
data = urllib2.urlopen(target_url)
def count_rows_and_columns(data):
#arrange data into list for labels and list of lists for attributes
xList = []
labels = []
for line in data:
#split on comma
row = line.strip().split(",")
xList.append(row)
nrow = len(xList)
ncol = len(xList[1])
type = [0]*3
colCounts = []
for col in range(ncol):
for row in xList:
try:
a = float(row[col])
if isinstance(a, float):
type[0] += 1
except ValueError:
if len(row[col]) > 0:
type[1] += 1
else:
type[2] += 1
colCounts.append(type)
type = [0] * 3
sys.stdout.write("Col#" + '\t' + "Number" + '\t' + "Strings" + '\t' + "Other\n")
iCol = 0
for types in colCounts:
sys.stdout.write(str(iCol) + '\t\t' + str(types[0]) + '\t\t' +
str(types[1]) + '\t\t' + str(types[2]) + "\n")
iCol += 1
sys.stdout.write("Number of Rows of Data = " + str(len(xList)) + '\n')
sys.stdout.write("Number of Columns of Data = " + str(len(xList[1])) + '\n')
return xList, nrow, ncol
def summary_stats(data):
xList, nrow, ncol = count_rows_and_columns(data)
type = [0]*3
colCounts = []
#generate summary statistics for column 3 (e.g.)
col = 3
colData = []
for row in xList:
colData.append(float(row[col]))
colArray = np.array(colData)
colMean = np.mean(colArray)
colsd = np.std(colArray)
sys.stdout.write("Mean = " + '\t' + str(colMean) + '\t\t' +
"Standard Deviation = " + '\t ' + str(colsd) + "\n")
#calculate quantile boundaries
ntiles = 4
percentBdry = []
for i in range(ntiles+1):
percentBdry.append(np.percentile(colArray, i*(100)/ntiles))
sys.stdout.write("\nBoundaries for 4 Equal Percentiles \n")
print(percentBdry)
sys.stdout.write(" \n")
#run again with 10 equal intervals
ntiles = 10
percentBdry = []
for i in range(ntiles+1):
percentBdry.append(np.percentile(colArray, i*(100)/ntiles))
sys.stdout.write("Boundaries for 10 Equal Percentiles \n")
print(percentBdry)
sys.stdout.write(" \n")
#The last column contains categorical variables
col = 60
colData = []
for row in xList:
colData.append(row[col])
unique = set(colData)
sys.stdout.write("Unique Label Values \n")
print(unique)
#count up the number of elements having each value
catDict = dict(zip(list(unique),range(len(unique))))
catCount = [0]*2
for elt in colData:
catCount[catDict[elt]] += 1
sys.stdout.write("\nCounts for Each Value of Categorical Label \n")
print(list(unique))
print(catCount)
def qqplot_attribute(data):
xList = []
labels = []
for line in data:
#split on comma
row = line.strip().split(",")
xList.append(row)
nrow = len(xList)
ncol = len(xList[1])
type = [0]*3
colCounts = []
#generate summary statistics for column 3 (e.g.)
col = 3
colData = []
for row in xList:
colData.append(float(row[col]))
stats.probplot(colData, dist="norm", plot=pylab)
pylab.show()
#summary_stats(data)
qqplot_attribute(data) |
# -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (http://tiny.be). All Rights Reserved
#
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see http://www.gnu.org/licenses/.
#
##############################################################################
from osv import osv
from osv import fields
class inmueble(osv.Model):
_name = 'inmueble'
_description = 'Inmueble gestionado por la inmobiliaria'
def _check_form(self, cr, uid, ids):
for clase in self.browse(cr, uid, ids):
if (clase.price>0):
return True
else:
return False
def _totalVisitas(self, cr, uid, ids, field, arg,context=None):
res = {}
# Recorre todas las clases y calcula el nรบmero de gymusers asociados
for clase in self.browse(cr,uid,ids,context=context):
res[clase.id] = len(clase.visita_ids)
return res
def on_change_class(self,cr,uid,ids,estado):
warning={'title':'Estado Incorrecto',
'message':'El inmueble debe estar tasado'}
if estado!="disponible" :
return {'value':{'name':'ERROR'}, 'warning':warning}
_columns = {
'id_inmueble': fields.integer('Id', size=9, required=True),
'name': fields.char('Direccion', size=60, required=True),
'postal_code': fields.integer('Codigo postal', size=5, required=True),
'price': fields.float('Precio', size=20, required=True),
'data': fields.text('Datos'),
'score': fields.float('Valoracion', size=20),
'totalvisitas': fields.function(_totalVisitas, type='integer', string='Total de visitas', store=True),
'visita_ids':fields.one2many('visita', 'inmueble_id', 'Visitas concertadas en el inmueble'),
'contrato_id':fields.many2one('contrato', 'Contrato'),
'state': fields.selection([('aceptado', 'Aceptado'),('disponible', 'Disponible'), ('alquiladovendido', 'Alquilado o vendido')], 'Estado'),
'caracteristica_ids':fields.many2many('caracteristica','inmueble_caracteristica_rel','inmueble_id_inmueble','caracteristica_name','Caracteristicas'),
'propietario_id':fields.many2one('propietario','Propietario'),
'tasador_dni':fields.many2one('tasador', 'Tasador'),
}
_defaults = {'state':'aceptado'}
_constraints = [(_check_form, 'ยก Errores en el formulario !' , [ 'Precio' ])]
_sql_constraints=[('id_inmueble_uniq','unique (id_inmueble)','Id del inmueble ya registrado.')]
|
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
plt.rcParams.update({'figure.max_open_warning': 0})
from mpl_toolkits.mplot3d import Axes3D
from datetime import date
# REPORTLAB
from reportlab.pdfgen import canvas
from PIL import Image
from io import BytesIO
from reportlab.lib.units import inch, cm
from reportlab.lib.utils import ImageReader
from reportlab.graphics import renderPDF
from svglib.svglib import svg2rlg
from reportlab.lib import colors
from reportlab.platypus import SimpleDocTemplate, Table, TableStyle
from reportlab.lib.pagesizes import letter, A4
import os
import sys
#test
import traceback
try:
def logo_choose(self):
index = self.comboBox.currentIndex()
logo_cb = index
return logo_cb
def generate_report(self):
# Importing raw data
survey_in_path = self.lineEdit_8.text()
survey_in = pd.read_csv(survey_in_path, skiprows = [0, 2])
#survey_in = survey_in.drop(["Unnamed: 26"], axis = 1)
survey_out_path = self.lineEdit_9.text()
survey_out = pd.read_csv(survey_out_path, skiprows = [0, 2])
#survey_out = survey_out.drop(["Unnamed: 26"], axis = 1)
# Creating Lists
survey1 = survey_in.drop(survey_in.index[1:-1])
survey1 = survey1[["Dip", "Azimuth", "Easting", "Northing", "Elevation"]]
survey1.index=(['Start of Survey','End of Survey'])
survey1.reset_index(level=0, inplace=True)
s1_lol = round(survey1, 2).T.reset_index().values.T.tolist()
survey2 = survey_out.drop(survey_out.index[1:-1])
survey2 = survey2[["Dip", "Azimuth", "Easting", "Northing", "Elevation"]]
survey2.index=(['Start of Survey','End of Survey'])
survey2.reset_index(level=0, inplace=True)
s2_lol = round(survey2, 2).T.reset_index().values.T.tolist()
depth = survey_in["Station"].iloc[-1] - survey_in["Station"].iloc[0]
#Inputs
project_id = str(self.lineEdit_17.text())
bh_id = str(self.lineEdit.text())
project_loc = str(self.lineEdit_2.text())
country = "Azerbaijan"
client = str(self.lineEdit_3.text())
surveyed_by = str(self.lineEdit_4.text())
report_by = str(self.lineEdit_5.text())
temp_survey_date = self.dateEdit.date()
survey_date = str(temp_survey_date.toPyDate())
drill_dia = str(self.lineEdit_10.text()) + " mm"
survey_run = str(self.lineEdit_11.text())
angular_unit = str(self.lineEdit_12.text())
linear_unit = str(self.lineEdit_13.text())
#Survey parameteres
station_inter = survey_in["Station"].iloc[1] - survey_in["Station"].iloc[0]
num_stations = len(survey_in["Station"])
survey_start = survey_in["Station"].iloc[0]
survey_end = survey_in["Station"].iloc[-1]
#Today's date
today = str(date.today().strftime("%d %b %Y"))
#Calculating misclosure
end_east_diff = round(survey_out["Easting"].iloc[-1]-survey_in["Easting"].iloc[-1], 2)
end_nort_diff = round(survey_out["Northing"].iloc[-1]-survey_in["Northing"].iloc[-1], 2)
end_elev_diff = round(survey_out["Elevation"].iloc[-1]-survey_in["Elevation"].iloc[-1], 2)
misclosure = round(np.sqrt(end_east_diff**2 + end_nort_diff**2 + end_elev_diff**2), 2)
# Misclosure QC
def misclosure_qc(misc, depth1):
global qc
if misc <= depth1 * 1/100:
qc = "Passed"
else:
qc = "Failed"
misclosure_qc(misclosure, depth)
end_dip_diff = round(survey_in["Dip"].iloc[-1] - survey_out["Dip"].iloc[-1], 2)
end_azimuth_diff = round(survey_in["Azimuth"].iloc[-1] - survey_out["Azimuth"].iloc[-1], 2)
end_east_diff = round(survey_in["Easting"].iloc[-1] - survey_out["Easting"].iloc[-1], 2)
end_north_diff = round(survey_in["Northing"].iloc[-1] - survey_out["Northing"].iloc[-1], 2)
end_elev_diff = round(survey_in["Elevation"].iloc[-1] - survey_out["Elevation"].iloc[-1], 2)
#Lists for table
end_of_surv_diff = [["Dip", "Azimuth", "Easting", "Northing", "Elevation"],
[end_dip_diff, end_azimuth_diff, end_east_diff, end_north_diff, end_elev_diff]]
end_of_surv_misc = [["Depth", "Misclosure", "Misclosure %", "Quality"],
[depth, misclosure, round(misclosure / depth * 100, 2), qc]]
#Misclosure plot
misclosure_line = np.linspace(0, misclosure, 50)
misclosure_max_line = np.linspace(0, (survey_out["Station"].iloc[-1] - survey_out["Station"].iloc[0])/100, 50)
depth_line = np.linspace(0, survey_out["Station"].iloc[-1], 50)
fig1, ax1 = plt.subplots(figsize=(6, 3))
ax1.plot(depth_line, misclosure_line, color = "orange", label = "Misclosure between surveys")
ax1.plot(depth_line, misclosure_max_line, color = "purple", label = "Maximum allowed misclosure")
ax1.set_xlabel("Depth (m)", size = 14)
ax1.set_ylabel("Meter", size = 14)
ax1.legend(loc = "best")
plt.grid(True)
ax1.set_title("Misclosure", size=14)
# Borehole path plot
fig2 = plt.figure(figsize=(7.5, 7.5))
ax2 = plt.axes(projection='3d')
# Data for three-dimensional scattered points
xdata_in = survey_in["Easting"]
ydata_in = survey_in["Northing"]
zdata_in = survey_in["Elevation"]
xdata_out = survey_out["Easting"]
ydata_out = survey_out["Northing"]
zdata_out = survey_out["Elevation"]
xdata_avg = (xdata_in + xdata_out) / 2
ydata_avg = (ydata_in + ydata_out) / 2
zdata_avg = (zdata_in + zdata_out) / 2
ax2.scatter3D(xdata_in, ydata_in, zdata_in, c="orange")
ax2.plot(xdata_in, ydata_in, zdata_in, c="orange", label="Survey1")
ax2.scatter3D(xdata_out, ydata_out, zdata_out, c="purple")
ax2.plot(xdata_out, ydata_out, zdata_out, c="purple", label ="Survey2")
plt.legend(loc="best")
x_ticks = np.linspace(xdata_in.min(), xdata_in.max(), 5)
y_ticks = np.linspace(ydata_in.min(), ydata_in.max(), 5)
z_ticks = np.linspace(zdata_in.min(), zdata_in.max(), 5)
plt.tight_layout()
ax2.set_xlabel('X', size = 14, labelpad=3)
ax2.set_ylabel('Y', size = 14, labelpad=15)
ax2.set_zlabel('Z', size = 14, labelpad=10)
ax2.get_xaxis().get_major_formatter().set_useOffset(False)
ax2.get_yaxis().get_major_formatter().set_useOffset(False)
ax2.set_title("Borehole Path", size = 14, pad=20)
# DIP PLOT
fig15, ax15 = plt.subplots(figsize=(7, 4))
ax15.plot(survey_in["Station"], survey_in["Dip"], color = "orange", label = "Survey 1")
ax15.plot(survey_out["Station"], survey_out["Dip"], color = "purple", label = "Survey 2")
ax15.set_xlabel("Depth (m)", size = 14)
ax15.set_ylabel("Dip", size = 14)
ax15.legend(loc = "best")
ax15.grid(True)
ax15.set_title("Dip", size=14)
# AZIMUTH PLOT
fig3, ax3 = plt.subplots(figsize=(7, 4))
ax3.plot(survey_in["Station"], survey_in["Azimuth"], color = "orange", label = "Survey 1")
ax3.plot(survey_out["Station"], survey_out["Azimuth"], color = "purple", label = "Survey 2")
ax3.set_xlabel("Depth (m)", size = 14)
ax3.set_ylabel("Azimuth", size = 14)
ax3.legend(loc = "best")
ax3.grid(True)
ax3.set_title("Azimuth", size=14)
# EASTING PLOT
fig4, ax4 = plt.subplots(figsize=(6, 2.5))
ax4.plot(survey_in["Station"], survey_in["Easting"], color = "orange", label = "Survey 1")
ax4.plot(survey_out["Station"], survey_out["Easting"], color = "purple", label = "Survey 2")
ax4.set_xlabel("Depth (m)", size = 14)
ax4.set_ylabel("Easting", size = 14)
ax4.legend(loc = "best")
ax4.grid(True)
ax4.get_yaxis().get_major_formatter().set_useOffset(False)
ax4.set_title("Easting", size=14)
# NORTHING PLOT
fig5, ax5 = plt.subplots(figsize=(6, 2.5))
ax5.plot(survey_in["Station"], survey_in["Northing"], color = "orange", label = "Survey 1")
ax5.plot(survey_out["Station"], survey_out["Northing"], color = "purple", label = "Survey 2")
ax5.set_xlabel("Depth (m)", size = 14)
ax5.set_ylabel("Northing", size = 14)
ax5.legend(loc = "best")
ax5.grid(True)
ax5.get_yaxis().get_major_formatter().set_useOffset(False)
ax5.set_title("Northing", size=14)
# ELEVATION PLOT
fig6, ax6 = plt.subplots(figsize=(6, 2.5))
ax6.plot(survey_in["Station"], survey_in["Elevation"], color = "orange", label = "Survey 1")
ax6.plot(survey_out["Station"], survey_out["Elevation"], color = "purple", label = "Survey 2")
ax6.set_xlabel("Depth (m)", size = 14)
ax6.set_ylabel("Elevation", size = 14)
ax6.legend(loc = "best")
ax6.grid(True)
ax6.get_yaxis().get_major_formatter().set_useOffset(False)
ax6.set_title("Elevation", size=14)
# POSTIONAL COMPARISON
fig7, ax7 = plt.subplots(figsize=(6.5, 3.5))
ax7.plot(survey_in["Easting"], survey_in["Northing"], color = "orange", label = "Survey 1")
ax7.plot(survey_out["Easting"], survey_out["Northing"], color = "purple", label = "Survey 2")
ax7.set_xlabel("Easting", size = 14)
ax7.set_ylabel("Northing", size = 14)
ax7.legend(loc = "best")
plt.xticks(rotation=40)
plt.yticks(rotation=40)
ax7.grid(True)
ax7.get_yaxis().get_major_formatter().set_useOffset(False)
ax7.get_xaxis().get_major_formatter().set_useOffset(False)
ax7.set_title("Positional Comparison", size=14)
#DLS PLOT
fig8, ax8 = plt.subplots(figsize=(6.5, 3.5))
ax8.plot(survey_in["Station"], survey_in["DLS"], color = "orange", label = "Survey 1")
ax8.plot(survey_out["Station"], survey_out["DLS"], color = "purple", label = "Survey 2")
ax8.set_xlabel("Depth (m)", size = 14)
ax8.set_ylabel("DLS/30m", size = 14)
ax8.legend(loc = "best")
ax8.grid(True)
ax8.get_yaxis().get_major_formatter().set_useOffset(False)
ax8.get_xaxis().get_major_formatter().set_useOffset(False)
ax8.set_title("DLS Comparison", size=14)
# UP and DOWN PLOT
fig9, ax9 = plt.subplots(figsize=(6.5, 3.5))
ax9.plot(survey_in["Station"], survey_in["UpDown"], color = "orange", label = "Survey 1")
ax9.plot(survey_out["Station"], survey_out["UpDown"], color = "purple", label = "Survey 2")
ax9.set_xlabel("Depth (m)", size = 14)
ax9.set_ylabel("UP and Down (m)", size = 14)
ax9.legend(loc = "best")
ax9.grid(True)
ax9.get_yaxis().get_major_formatter().set_useOffset(False)
ax9.get_xaxis().get_major_formatter().set_useOffset(False)
ax9.set_title("UP and Down Deviation", size=14)
# LEFT and RIGHT PLOT
fig10, ax10 = plt.subplots(figsize=(6.5, 3.5))
ax10.plot(survey_in["Station"], survey_in["LeftRight"], color = "orange", label = "Survey 1")
ax10.plot(survey_out["Station"], survey_out["LeftRight"], color = "purple", label = "Survey 2")
ax10.set_xlabel("Depth (m)", size = 14)
ax10.set_ylabel("Left and Right (m)", size = 14)
ax10.legend(loc = "best")
ax10.grid(True)
ax10.get_yaxis().get_major_formatter().set_useOffset(False)
ax10.get_xaxis().get_major_formatter().set_useOffset(False)
ax10.set_title("Left and Right Deviation", size=14)
# Target Deviation Plot - 2D (TOP)
#Collar Coordinates
collar_easting = survey_in["Easting"].iloc[0]
collar_northing = survey_in["Northing"].iloc[0]
collar_elevation = survey_in["Elevation"].iloc[0]
#Collar Parameters
collar_dip = float(self.lineEdit_6.text())
collar_azimuth = float(self.lineEdit_7.text())
# Calculating Target Coordinates
target_easting = ((survey_out["Station"].iloc[-1] - survey_out["Station"].iloc[0]
) / 2 * ((np.sin(np.radians(collar_dip + 90)) * np.sin(np.radians(collar_azimuth))
) + (np.sin(np.radians(collar_dip + 90)) * np.sin(np.radians(collar_azimuth))))
) + collar_easting
target_northing = ((survey_out["Station"].iloc[-1] - survey_out["Station"].iloc[0]
) / 2 * ((np.sin(np.radians(collar_dip + 90)) * np.cos(np.radians(collar_azimuth))
) + (np.sin(np.radians(collar_dip + 90)) * np.cos(np.radians(collar_azimuth))))
) + collar_northing
target_elevation = (-1 * (survey_out["Station"].iloc[-1] - survey_out["Station"].iloc[0]
) / 2 * (np.cos(np.radians(collar_dip + 90)) + np.cos(np.radians(collar_dip + 90)))
) + collar_elevation
#Actual Coordinates
#Survey 01
actual_easting1 = survey_in["Easting"].iloc[-1]
actual_northing1 = survey_in["Northing"].iloc[-1]
actual_elevation1 = survey_in["Elevation"].iloc[-1]
#Survey 02
actual_easting2 = survey_out["Easting"].iloc[-1]
actual_northing2 = survey_out["Northing"].iloc[-1]
actual_elevation2 = survey_out["Elevation"].iloc[-1]
# Average
average_easting = (actual_easting1 + actual_easting2) / 2
average_northing = (actual_northing1 + actual_northing2) / 2
average_elevation = (actual_elevation1 + actual_elevation2) / 2
# Target Differences
targ_east_differ1 = abs(target_easting - actual_easting1)
targ_nort_differ1 = abs(target_northing - actual_northing1)
targ_elev_differ1 = abs(target_elevation - actual_elevation1)
targ_east_differ2 = abs(target_easting - actual_easting2)
targ_nort_differ2 = abs(target_northing - actual_northing2)
targ_elev_differ2 = abs(target_elevation - actual_elevation2)
targ_east_differ_avg = abs(target_easting - average_easting)
targ_nort_differ_avg = abs(target_northing - average_northing)
targ_elev_differ_avg = abs(target_elevation - average_elevation)
tot_misc_to_targ1 = np.sqrt(targ_east_differ1 ** 2 + targ_nort_differ1 ** 2 + targ_elev_differ1 ** 2)
tot_misc_to_targ2 = np.sqrt(targ_east_differ2 ** 2 + targ_nort_differ2 ** 2 + targ_elev_differ2 ** 2)
tot_misc_to_targ_avg = np.sqrt(targ_east_differ_avg ** 2 + targ_nort_differ_avg ** 2 + targ_elev_differ_avg ** 2)
tot_misc_to_targ1_perc = tot_misc_to_targ1 / depth * 100
tot_misc_to_targ2_perc = tot_misc_to_targ2 / depth * 100
tot_misc_to_targ_avg_perc = tot_misc_to_targ_avg / depth * 100
fig11, ax11 = plt.subplots(figsize=(6, 6))
ax11.scatter(target_easting, target_northing,
marker = "o",
s = 300,
color = "green",
alpha = 0.5,
label = "Planned")
ax11.scatter(average_easting, average_northing,
marker="o",
color="orange",
label="Actual ")
# ax11.scatter(actual_easting1, actual_northing1,
# marker = "o",
# color = "orange",
# label = "Survey 1")
# ax11.scatter(actual_easting2, actual_northing2,
# marker = "o",
# color = "purple",
# label = "Survey 2")
#ax11.get_yaxis().get_major_formatter().set_useOffset(False)
#ax11.get_xaxis().get_major_formatter().set_useOffset(False)
ax11.set_xlabel("Northing", size=14, labelpad=147)
ax11.set_ylabel("Easting", size=14, labelpad=165)
ax11.set_title("Distance From Target", size=15, pad=28)
# ax11.legend(loc = "best", markerscale = 0.5)
#Legend
box = ax11.get_position()
ax11.set_position([box.x0, box.y0 + box.height * 0.1,
box.width, box.height * 0.9])
# Put a legend below current axis
ax11.legend(loc='upper center', bbox_to_anchor=(0.5, 1.09),
fancybox=True, shadow=False, ncol=5)
ax11.spines['left'].set_position(('data', target_easting))
ax11.spines['bottom'].set_position(('data', target_northing))
ax11.tick_params(axis='both', which='major', pad=0)
plt.xticks(rotation=40)
plt.yticks(rotation=40)
#ax11.grid(True)
ax11.xaxis.set_ticks_position('bottom')
ax11.yaxis.set_ticks_position('left')
ax11.spines['right'].set_color('none')
ax11.spines['top'].set_color('none')
ax11.set_yticklabels([])
ax11.set_xticklabels([])
plt.xlim(target_easting-100, target_easting+100)
plt.ylim(target_northing-100, target_northing+100)
ax11.text(actual_easting2, actual_northing2 + 3, str(round(tot_misc_to_targ_avg, 2)),horizontalalignment='center')
# Target Deviation Plot - 3D
#Planned Borehole data
xdata_plan = np.linspace(collar_easting, target_easting, len(xdata_in))
ydata_plan = np.linspace(collar_northing, target_northing, len(xdata_in))
zdata_plan = np.linspace(collar_elevation, target_elevation, len(xdata_in))
fig12 = plt.figure(figsize=(7.5, 7.5))
ax12 = plt.axes(projection='3d')
# ax12.scatter3D(xdata_in, ydata_in, zdata_in, c="orange")
# ax12.plot(xdata_in, ydata_in, zdata_in, c="orange", label="Survey 1")
# ax12.scatter3D(xdata_out, ydata_out, zdata_out, c="purple")
# ax12.plot(xdata_out, ydata_out, zdata_out, c="purple", label ="Survey 2")
ax12.scatter3D(xdata_plan, ydata_plan, zdata_plan, c="grey")
ax12.plot(xdata_plan, ydata_plan, zdata_plan, c="grey", label ="Planned")
ax12.scatter3D(xdata_avg, ydata_avg, zdata_avg, c="orange")
ax12.plot(xdata_avg, ydata_avg, zdata_avg, c="orange", label="Actual")
plt.legend(loc="best")
x_ticks = np.linspace(xdata_in.min(), xdata_in.max(), 5)
y_ticks = np.linspace(ydata_in.min(), ydata_in.max(), 5)
z_ticks = np.linspace(zdata_in.min(), zdata_in.max(), 5)
ax12.set_xlabel('X', size = 14, labelpad=15)
ax12.set_ylabel('Y', size = 14, labelpad=15)
ax12.set_zlabel('Z', size = 14, labelpad=5)
ax12.get_xaxis().get_major_formatter().set_useOffset(False)
ax12.get_yaxis().get_major_formatter().set_useOffset(False)
## 3D Plot for first page
fig13 = plt.figure(figsize=(6, 6))
ax13 = plt.axes(projection='3d')
# Data for three-dimensional scattered points
xdata_in = survey_in["Easting"]
ydata_in = survey_in["Northing"]
zdata_in = survey_in["Elevation"]
xdata_out = survey_out["Easting"]
ydata_out = survey_out["Northing"]
zdata_out = survey_out["Elevation"]
ax13.scatter3D(xdata_in, ydata_in, zdata_in, c="orange")
ax13.scatter3D(xdata_out, ydata_out, zdata_out, c="purple")
ax13.axes.xaxis.set_ticklabels([])
ax13.axes.yaxis.set_ticklabels([])
ax13.axes.zaxis.set_ticklabels([])
plt.box(on=None)
#Generating PDF Report (Survey QC)
script_dir = os.path.abspath(os.path.dirname(__file__)) # <-- absolute dir the script is in
rel_path1 = "logo\\AT-GEOTECH-logo.png"
rel_path2 = "logo\\dag_logo.jpg"
rel_path3 = "logo\\BLASTO-logo-New.png"
rel_path4 = "logo\\ATG_logo.jpg"
logo_at_g = os.path.join(script_dir, rel_path1)
logo_azdg = os.path.join(script_dir, rel_path2)
logo_blst = os.path.join(script_dir, rel_path3)
logo_atg = os.path.join(script_dir, rel_path4)
#logo_at_g = 'E:\MOOCs\My Projects\ReportINC\logo\AT-GEOTECH-logo.png'
#logo_azdg = 'E:\MOOCs\My Projects\ReportINC\logo\dag_logo.jpg'
#logo_blst = 'E:\MOOCs\My Projects\ReportINC\logo\BLASTO-logo-New.png'
#logo_atg = 'E:\MOOCs\My Projects\ReportINC\logo\ATG_logo.jpg'
logo_cb = logo_choose(self)
if logo_cb == 0:
logo = logo_at_g
logo_w = 70
logo_h = 50
if logo_cb == 1:
logo = logo_azdg
logo_w = 70
logo_h = 50
if logo_cb == 2:
logo = logo_blst
logo_w = 70
logo_h = 50
if logo_cb == 3:
logo = logo_atg
logo_w = 65
logo_h = 30
#Set Canvas
outfilename1 = project_id + "-" + bh_id + " " + "(" + str(survey_in["Station"].iloc[0]) + \
".0m" + "-" + str(survey_in["Station"].iloc[-1]) + ".0m" + ")" + "-QC Report.pdf"
output_path = "Output"
output_file1 = os.path.join(script_dir, output_path, outfilename1)
#outfiledir1 = os.path.expanduser("~"),
#outfilepath1 = os.path.join(os.path.expanduser("~"), "Documents/", outfilename1)
pdf = canvas.Canvas(output_file1, pagesize=A4)
pdf.setTitle("")
pdf.setFont("Helvetica-Bold", 24)
pdf.drawString(190, 790, "REFLEX - GYRO")
# 1st Page
imgdata = BytesIO()
ax13 = plt.Axes(fig13, [0., 0., 1., 1.])
ax13.set_axis_off()
fig13.add_axes(ax13)
fig13.savefig(imgdata, format='svg',dpi = 800)
imgdata.seek(0) # rewind the data
Image1 = svg2rlg(imgdata)
renderPDF.draw(Image1, pdf,21, 220)
pdf.setFont("Helvetica", 18)
pdf.drawString(233, 735, today)
pdf.setFont("Helvetica-Bold", 20)
pdf.drawString(155, 760, "QUALITY CHECK REPORT")
pdf.setFont("Helvetica", 16)
pdf.drawString(85, 230, "Project ID: " + project_id)
pdf.drawString(85, 210, "Borehole ID: " + bh_id)
pdf.drawString(85, 190, "Project location: " + project_loc)
pdf.drawString(85, 170, "Client: " + client)
pdf.drawString(85, 150, "Surveyor: " + surveyed_by)
pdf.drawString(85, 130, "Reporter: " + report_by)
pdf.drawString(85, 110, "Survey date: " + survey_date)
pdf.setFont("Helvetica", 14)
page_number = pdf.getPageNumber()
pdf.drawString(500, 20, "Page " + str(page_number))
pdf.drawImage(logo, 515, 775, width = logo_w, height = logo_h)
pdf.showPage()
# 2nd Page
pdf.setFont("Helvetica-Bold", 16)
pdf.drawString(160, 790, "DRILLING COLLAR PARAMETERS")
pdf.setFont("Helvetica", 14)
pdf.drawString(60, 765, "Easting: " + str(round(collar_easting, 2)))
pdf.drawString(60, 745, "Northing: " + str(round(collar_northing, 2)))
pdf.drawString(60, 725, "Elevation: " + str(round(collar_elevation, 2)))
pdf.setFont("Helvetica", 14)
pdf.drawString(380, 765, "Dip: " + str(collar_dip) + " deg")
pdf.drawString(380, 745, "Azimuth: " + str(collar_azimuth)+ " deg")
pdf.drawString(380, 725, "Drill diameter: " + str(drill_dia))
pdf.setFont("Helvetica-Bold", 16)
pdf.drawString(200, 700, "SURVEY PARAMETERS")
pdf.setFont("Helvetica", 14)
pdf.drawString(60, 640, "Survey interval: " + str(station_inter) + " m")
pdf.drawString(60, 620, "Number of stations: " + str(num_stations))
pdf.drawString(380, 680, "Survey run on: " + str(survey_run))
pdf.drawString(380, 660, "Angular units: " + str(angular_unit))
pdf.drawString(380, 640, "Linear units: " + str(linear_unit))
pdf.drawString(60, 680, "Survey start: " + str(survey_start) + " m")
pdf.drawString(60, 660, "Survey end: " + str(survey_end) + " m")
pdf.setFont("Helvetica-Bold", 16)
pdf.drawString(260, 600, "Survey 1")
from reportlab.lib import colors
from reportlab.lib.pagesizes import letter
from reportlab.platypus import SimpleDocTemplate, Table, TableStyle
width = 400
height = 100
#Survey 1
x1 = 125
y1 = 540
t1=Table(s1_lol)
t1.setStyle(TableStyle([('INNERGRID', (0,0), (-1,-1), 0.25, colors.black),
('BOX', (0,0), (-1,-1), 0.25, colors.black),
('TEXTCOLOR',(0,0),(0,0),colors.white)]))
t1.wrapOn(pdf, width, height)
t1.drawOn(pdf, x1, y1)
#Survey 2
pdf.setFont("Helvetica-Bold", 16)
pdf.drawString(260, 520, "Survey 2")
x2 = 125
y2 = 460
t2=Table(s2_lol)
t2.setStyle(TableStyle([('INNERGRID', (0,0), (-1,-1), 0.25, colors.black),
('BOX', (0,0), (-1,-1), 0.25, colors.black),
('TEXTCOLOR',(0,0),(0,0),colors.white)]))
t2.wrapOn(pdf, width, height)
t2.drawOn(pdf, x2, y2)
#Difference
pdf.setFont("Helvetica-Bold", 16)
pdf.drawString(70, 430, "End of Survey Difference")
pdf.drawString(328, 430, "End of Survey Misclosure")
x3 = 50
y3 = 385
t3=Table(end_of_surv_diff)
t3.setStyle(TableStyle([('INNERGRID', (0,0), (-1,-1), 0.25, colors.black),
('BOX', (0,0), (-1,-1), 0.25, colors.black),
('TEXTCOLOR',(0,0),(0,0),colors.black)]))
t3.wrapOn(pdf, width, height)
t3.drawOn(pdf, x3, y3)
#Misclosure table
qc_pass = ('TEXTCOLOR',(-1,-1),(-1,-1), colors.green)
qc_fail = ('TEXTCOLOR',(-1,-1),(-1,-1), colors.red)
global color
if misclosure <= depth * 1/100:
color = qc_pass
else:
color = qc_fail
x4 = 320
y4 = 385
t4=Table(end_of_surv_misc)
t4.setStyle(TableStyle([('INNERGRID', (0,0), (-1,-1), 0.25, colors.black),
('BOX', (0,0), (-1,-1), 0.25, colors.black),
('FONTNAME', (-1, 1), (-1, -1), 'Helvetica-Bold'),
color]))
t4.wrapOn(pdf, width, height)
t4.drawOn(pdf, x4, y4)
#Misclosure plot
imgdata1 = BytesIO()
ax1 = plt.Axes(fig1, [0., 0., 1., 1.])
ax1.set_axis_off()
fig1.add_axes(ax1)
fig1.savefig(imgdata1, format='svg',dpi = 800)
imgdata1.seek(0) # rewind the data
Image2 = svg2rlg(imgdata1)
renderPDF.draw(Image2, pdf,21, 100)
pdf.setFont("Helvetica", 14)
page_number = pdf.getPageNumber()
pdf.drawString(500, 20, "Page " + str(page_number))
pdf.drawString(45, 20, project_id + "-" + bh_id + " " + "(" + str(survey_in["Station"].iloc[0]) +
".0m" + "-" + str(survey_in["Station"].iloc[-1]) + ".0m" + ")")
pdf.showPage()
# 3rd Page
imgdata2 = BytesIO()
ax2 = plt.Axes(fig2, [0., 0., 1., 1.])
ax2.set_axis_off()
fig2.add_axes(ax2)
fig2.savefig(imgdata2, format='svg',dpi = 800)
imgdata2.seek(0) # rewind the data
Image3 = svg2rlg(imgdata2)
renderPDF.draw(Image3, pdf,-66, 70)
pdf.setFont("Helvetica", 14)
page_number = pdf.getPageNumber()
pdf.drawString(500, 20, "Page " + str(page_number))
pdf.drawString(45, 20, project_id + "-" + bh_id + " " + "(" + str(survey_in["Station"].iloc[0]) +
".0m" + "-" + str(survey_in["Station"].iloc[-1]) + ".0m" + ")")
pdf.showPage()
# 4th Page
imgdata3 = BytesIO()
ax15 = plt.Axes(fig15, [0., 0., 1., 1.])
ax15.set_axis_off()
fig15.add_axes(ax15)
fig15.savefig(imgdata3, format='svg',dpi = 800)
imgdata3.seek(0) # rewind the data
Image4 = svg2rlg(imgdata3)
renderPDF.draw(Image4, pdf,5, 450)
imgdata4 = BytesIO()
ax3 = plt.Axes(fig3, [0., 0., 1., 1.])
ax3.set_axis_off()
fig3.add_axes(ax3)
fig3.savefig(imgdata4, format='svg',dpi = 800)
imgdata4.seek(0) # rewind the data
Image5 = svg2rlg(imgdata4)
renderPDF.draw(Image5, pdf,5, 90)
pdf.setFont("Helvetica", 14)
page_number = pdf.getPageNumber()
pdf.drawString(500, 20, "Page " + str(page_number))
pdf.drawString(45, 20, project_id + "-" + bh_id + " " + "(" + str(survey_in["Station"].iloc[0]) +
".0m" + "-" + str(survey_in["Station"].iloc[-1]) + ".0m" + ")")
pdf.showPage()
# 5th Page
#Easting
imgdata5 = BytesIO()
ax4 = plt.Axes(fig4, [0., 0., 1., 1.])
ax4.set_axis_off()
fig4.add_axes(ax4)
fig4.savefig(imgdata5, format='svg',dpi = 800)
imgdata5.seek(0) # rewind the data
Image6 = svg2rlg(imgdata5)
renderPDF.draw(Image6, pdf,45, 580)
#Northing
imgdata6 = BytesIO()
ax5 = plt.Axes(fig5, [0., 0., 1., 1.])
ax5.set_axis_off()
fig5.add_axes(ax5)
fig5.savefig(imgdata6, format='svg',dpi = 800)
imgdata6.seek(0) # rewind the data
Image7 = svg2rlg(imgdata6)
renderPDF.draw(Image7, pdf,45, 320)
#Elevation
imgdata7 = BytesIO()
ax6 = plt.Axes(fig6, [0., 0., 1., 1.])
ax6.set_axis_off()
fig6.add_axes(ax6)
fig6.savefig(imgdata7, format='svg',dpi = 800)
imgdata7.seek(0) # rewind the data
Image8 = svg2rlg(imgdata7)
renderPDF.draw(Image8, pdf,45, 60)
pdf.setFont("Helvetica", 14)
page_number = pdf.getPageNumber()
pdf.drawString(500, 20, "Page " + str(page_number))
pdf.drawString(45, 20, project_id + "-" + bh_id + " " + "(" + str(survey_in["Station"].iloc[0]) +
".0m" + "-" + str(survey_in["Station"].iloc[-1]) + ".0m" + ")")
pdf.showPage()
# 6th Page
# Positional Comparison
imgdata8 = BytesIO()
ax7 = plt.Axes(fig7, [0., 0., 1., 1.])
ax7.set_axis_off()
fig7.add_axes(ax7)
fig7.savefig(imgdata8, format='svg',dpi = 800)
imgdata8.seek(0) # rewind the data
Image9 = svg2rlg(imgdata8)
renderPDF.draw(Image9, pdf,30, 490)
# DLS
imgdata9 = BytesIO()
ax8 = plt.Axes(fig8, [0., 0., 1., 1.])
ax8.set_axis_off()
fig8.add_axes(ax8)
fig8.savefig(imgdata9, format='svg',dpi = 800)
imgdata9.seek(0) # rewind the data
Image10 = svg2rlg(imgdata9)
renderPDF.draw(Image10, pdf,30, 90)
page_number = pdf.getPageNumber()
pdf.drawString(500, 20, "Page " + str(page_number))
pdf.drawString(45, 20, project_id + "-" + bh_id + " " + "(" + str(survey_in["Station"].iloc[0]) +
".0m" + "-" + str(survey_in["Station"].iloc[-1]) + ".0m" + ")")
pdf.showPage()
# 7th Page
# Ip and Down
imgdata10 = BytesIO()
ax9 = plt.Axes(fig9, [0., 0., 1., 1.])
ax9.set_axis_off()
fig9.add_axes(ax9)
fig9.savefig(imgdata10, format='svg', dpi=800)
imgdata10.seek(0) # rewind the data
Image11 = svg2rlg(imgdata10)
renderPDF.draw(Image11, pdf, 30, 490)
# Left and Right
imgdata11 = BytesIO()
ax10 = plt.Axes(fig10, [0., 0., 1., 1.])
ax10.set_axis_off()
fig10.add_axes(ax10)
fig10.savefig(imgdata11, format='svg', dpi=800)
imgdata11.seek(0) # rewind the data
Image12 = svg2rlg(imgdata11)
renderPDF.draw(Image12, pdf, 30, 90)
page_number = pdf.getPageNumber()
pdf.drawString(500, 20, "Page " + str(page_number))
pdf.drawString(45, 20, project_id + "-" + bh_id + " " + "(" + str(survey_in["Station"].iloc[0]) +
".0m" + "-" + str(survey_in["Station"].iloc[-1]) + ".0m" + ")")
pdf.save()
#Generating PDF Report (Drilling QC)
actual_dip = round((survey_in["Dip"].iloc[-1] + survey_out["Dip"].iloc[-1]) / 2, 1)
actual_azimuth = round((survey_in["Azimuth"].iloc[-1] + survey_out["Azimuth"].iloc[-1]) / 2, 1)
planned_actual_t = [[bh_id, "Planned", "Actual"], ["Dip", collar_dip, actual_dip],
["Azimuth", collar_azimuth, actual_azimuth]]
misc_to_target_t = [["Survey Name", "Survey 1", "Survey 2", "Average"],
["Easting Difference (m)", round(targ_east_differ1, 2), round(targ_east_differ2, 2),
round((targ_east_differ1 + targ_east_differ2) / 2, 2)],
["Northing Difference (m)", round(targ_nort_differ1, 2),
round(targ_nort_differ2, 2),
round((targ_nort_differ1 + targ_nort_differ2) / 2, 2)],
["Elevation Difference (m)", round(targ_elev_differ1, 2),
round(targ_elev_differ2, 2),
round((targ_elev_differ1 + targ_elev_differ2) / 2, 2)],
["Total Misclosure (m)", round(tot_misc_to_targ1, 2), round(tot_misc_to_targ2, 2),
round((tot_misc_to_targ1 + tot_misc_to_targ2) / 2, 2)],
["Percentage Misclosure %", round(tot_misc_to_targ1_perc, 2),
round(tot_misc_to_targ2_perc, 2),
round((tot_misc_to_targ1_perc + tot_misc_to_targ2_perc) / 2, 2)]]
#PDF 2
outfilename2 = project_id + "-" + bh_id + " " + "(" + str(survey_in["Station"].iloc[0]) + \
".0m" + "-" + str(survey_in["Station"].iloc[-1]) + ".0m" + ")" + "-Drilling Report.pdf"
output_path = "Output"
output_file2 = os.path.join(script_dir, output_path, outfilename2)
#outfiledir2 = os.path.expanduser("~"),
#outfilepath2 = os.path.join(os.path.expanduser("~"), "Documents/", outfilename2)
pdf2 = canvas.Canvas(output_file2, pagesize=A4)
pdf2.setTitle("")
# 1st Page
# Target plot
imgdata1 = BytesIO()
# ax11 = plt.Axes(fig11, [0., 0., 1., 1.])
# ax11.set_axis_off()
# fig11.add_axes(ax11)
fig11.savefig(imgdata1, format='svg', dpi=fig11.dpi)
# fig11.savefig(imgdata1, format='svg',dpi = 800)
imgdata1.seek(0) # rewind the data
Image1 = svg2rlg(imgdata1)
# Image1._showBoundary = False
renderPDF.draw(Image1, pdf2, 40, 185)
pdf2.setFont("Helvetica-Bold", 24)
pdf2.drawString(90, 745, "DRILLING QUALITY CHECK REPORT")
# target tabele
x1 = 380
y1 = 113
t1 = Table(planned_actual_t)
t1.setStyle(TableStyle([('INNERGRID', (0, 0), (-1, -1), 0.25, colors.black),
('BOX', (0, 0), (-1, -1), 0.25, colors.black),
('TEXTCOLOR', (0, 0), (0, 0), colors.black)]))
t1.wrapOn(pdf2, width, height)
t1.drawOn(pdf2, x1, y1)
# Misclosure to target tabele
pdf2.setFont("Helvetica-Bold", 16)
pdf2.drawString(220, 180, "Misclosure to Target")
x2 = 80
y2 = 60
t2 = Table(misc_to_target_t)
t2.setStyle(TableStyle([('INNERGRID', (0, 0), (-1, -1), 0.25, colors.black),
('BOX', (0, 0), (-1, -1), 0.25, colors.black),
('TEXTCOLOR', (0, 0), (0, 0), colors.black)]))
t2.wrapOn(pdf2, width, height)
t2.drawOn(pdf2, x2, y2)
pdf2.setFont("Helvetica", 14)
page_number = pdf2.getPageNumber()
pdf2.drawString(500, 20, "Page " + str(page_number))
pdf2.drawString(45, 20, project_id + "-" + bh_id + " " + "(" + str(survey_in["Station"].iloc[0]) +
".0m" + "-" + str(survey_in["Station"].iloc[-1]) + ".0m" + ")")
pdf2.drawImage(logo, 515, 775, width=logo_w, height=logo_h)
pdf2.showPage()
# 3D PLOT of DEVIATION FROM TARGET
imgdata2 = BytesIO()
ax12 = plt.Axes(fig12, [0., 0., 1., 1.])
ax12.set_axis_off()
fig12.add_axes(ax12)
fig12.savefig(imgdata2, format='svg',dpi = 800)
imgdata2.seek(0) # rewind the data
Image2 = svg2rlg(imgdata2)
renderPDF.draw(Image2, pdf2, -50, 100)
pdf2.setFont("Helvetica", 20)
pdf2.drawString(140, 720, "Planned and Actual Borehole Paths")
pdf2.setFont("Helvetica", 14)
page_number = pdf2.getPageNumber()
pdf2.drawString(500, 20, "Page " + str(page_number))
pdf2.drawString(45, 20, project_id + "-" + bh_id + " " + "(" + str(survey_in["Station"].iloc[0]) +
".0m" + "-" + str(survey_in["Station"].iloc[-1]) + ".0m" + ")")
pdf2.save()
return generate_report
except Exception:
traceback.print_exc()
while(True):
pass |
#Jonathan Poch
#CS 3923
testFile = open("Password Hashes/AshleyMadison.txt", 'r+')
for line in testFile.readlines():
list = line.split(",")
print(line.rstrip() + " " + list[3])
|
class Solution:
def myAtoi(self,str1):
MAX_INT = 2147483647
MIN_INT = -2147483648
n,result,i,sign=len(str1),0,0,1
while i<n and str1[i]==" ":
i+=1
if i<n and str1[i]=="-":
sign=-1
i+=1
elif i<n and str1[i]=="+":
sign=1
i+=1
while i<n and "0"<=str1[i]<="9":
result=result*10+ord(str1[i])-ord("0")
i+=1
result=sign*result
if result<MIN_INT:
return MIN_INT
elif result>MAX_INT:
return MAX_INT
else:
return result
pythonไธญๆฒกๆMAX_INT MIN_INT่ฟๆ ท็ๅธธ้
|
import sys
from collections import deque
NOT_VISITED = -1
# ์
๋ ฅ ๋ฐ๊ธฐ
t = int(sys.stdin.readline().rstrip())
answer = []
dx = [-1, -2, -2, -1, 1, 2, 2, 1]
dy = [-2, -1, 1, 2, -2, -1, 1, 2]
# ํ์ ๊ฐ๋ฅํ ์ขํ์ธ์ง ํ์ธ
def isSearchableCoordiante(x, y, len):
if 0 <= x < len and 0 <= y < len:
return True
# ์ฒด์คํ์ ์ด๋ ๊ฑฐ๋ฆฌ ๊ณ์ฐ
def getChessboardDistance(x, y, l, board):
queue = deque([[x, y]])
dist = 0
board[x][y] = dist # ํ์ฌ ์์น์ ๊ฑฐ๋ฆฌ๋ 0
while queue:
dist += 1 # ๊ฑฐ๋ฆฌ 1 ์ฆ๊ฐ
for _ in range(len(queue)):
cur = queue.popleft()
for k in range(8):
new_x = cur[0] + dx[k]
new_y = cur[1] + dy[k]
if isSearchableCoordiante(new_x, new_y, l):
if board[new_x][new_y] == NOT_VISITED:
queue.append([new_x, new_y])
board[new_x][new_y] = dist
# ํ
์คํธ ์ผ์ด์ค๋งํผ ๋ฐ๋ณต
for _ in range(t):
length = int(sys.stdin.readline().rstrip())
current_x, current_y = map(int, sys.stdin.readline().split())
target_x, target_y = map(int, sys.stdin.readline().split())
chessboard = [[NOT_VISITED] * length for _ in range(length)]
getChessboardDistance(current_x, current_y, length, chessboard)
answer.append(chessboard[target_x][target_y])
# ์ ๋ต ์ถ๋ ฅ
print(*answer, sep='\n') |
from functools import wraps
from flask import g, request
from flask_restx import Resource as RestResource
from core.db import session
from core.exceptions import AuthError, AuthorizationError, BadRequestError
from services import services
def get_current_user():
token = request.headers.get("TOKEN")
if not token:
return None
user_data = services.token_service.decode_access_token(token)
return services.user.get(user_data.user_id)
def login_required(func):
@wraps(func)
def decorated_view(*args, **kwargs):
token = request.headers.get("TOKEN")
if not token:
raise AuthError("Access token required")
g.access_token = services.token_service.decode_access_token(token)
return func(*args, **kwargs)
return decorated_view
def is_authorized(permission_name):
def func_wrapper(func):
@wraps(func)
def decorator_view(*args, **kwargs):
token = request.headers.get("TOKEN")
if not token:
raise AuthError("Access token required")
access_token = services.token_service.decode_access_token(token)
user_roles_permissions = (
services.authorization_service.get_user_roles_permissions(
user_id=access_token.user_id
)
)
if permission_name in user_roles_permissions["user_permissions"]:
return func(*args, **kwargs)
raise AuthorizationError("Forbidden, you don't have permission to access")
return decorator_view
return func_wrapper
def is_superuser(func):
@wraps(func)
def decorator_view(*args, **kwargs):
token = request.headers.get("TOKEN")
if not token:
raise AuthError("Access token required")
access_token = services.token_service.decode_access_token(token)
user_roles_permissions = (
services.authorization_service.get_user_roles_permissions(
user_id=access_token.user_id
)
)
if "superuser" in user_roles_permissions["user_roles"]:
return func(*args, **kwargs)
raise AuthorizationError("Forbidden, you don't have permission to access")
return decorator_view
def captcha_challenge(func):
@wraps(func)
def decorator_view(*args, **kwargs):
hash_key = request.headers.get("CAPTCHA_HASH_KEY")
if not hash_key:
raise BadRequestError("Captcha hash key is required")
services.captcha.verify(hash_key)
return func(*args, **kwargs)
return decorator_view
class Resource(RestResource):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.services = self.api.services
def dispatch_request(self, *args, **kwargs):
try:
resp = super().dispatch_request(*args, **kwargs)
session.commit()
return resp
except Exception as exc:
session.rollback()
raise exc
finally:
session.remove()
|
from django.contrib import admin
# Register your models here.
from .models import pharmacogenes, drug, snp, star_allele, study
admin.site.register(pharmacogenes)
admin.site.register(drug)
admin.site.register(snp)
admin.site.register(star_allele)
admin.site.register(study)
|
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
import csv
from operator import itemgetter
def getYearMean(df, year):
df = df.query("year ==" + str(year))
return df.price.mean()
def getLinkedProduct(product):
linked_products = pd.read_csv('Linked_products.csv', encoding='UTF-8', delimiter=";")
prod = linked_products.query('price_df_product == \"' + product + '\"')
return prod.production_df_product.unique()
def plotScatter(productionDf, priceDf, country, priceProduct):
priceDf = priceDf.query("_product==\"" + str(priceProduct) + "\" &country==\"" +str(country) + "\"")
print(priceProduct)
productionProducts = getLinkedProduct(priceProduct)
print(productionProducts)
priceYears = priceDf.year.unique()
for productionProduct in productionProducts:
prodPerProdDf = productionDf.query('Item=="' + str(productionProduct) + '"&Area=="'+str(country) + '"')
productionYears = prodPerProdDf.Year.unique()
commonYears = list(set(priceYears).intersection(productionYears))
productions = []
prices = []
for year in commonYears:
yearlyProduction = prodPerProdDf.query('Year=="' + str(year) + '"')
productions.append(yearlyProduction.iloc[0]['Value'])
prices.append(getYearMean(priceDf, year))
print(commonYears)
plt.scatter(productions, prices)
plt.title("Relation between {} price and {} production in {}".format(priceProduct, productionProduct, country))
plt.xlabel('{} price'.format(priceProduct))
plt.ylabel('{} production (tonnes)'.format(productionProduct))
plt.show()
return
def findBestProducts(minimumData):
priceProducts = linkedProductsDf.price_df_product.unique()
countries = productionDf.Area.unique()
availableData = []
for country in countries:
for priceProduct in priceProducts:
priceProductDf = priceDf.query("_product==\"" + str(priceProduct) + "\" &country==\"" +str(country) + "\"")
priceYears = priceProductDf.year.unique()
productionProducts = getLinkedProduct(priceProduct)
for productionProduct in productionProducts:
prodPerProdDf = productionDf.query('Item=="' + str(productionProduct) + '"&Area=="'+str(country) + '"')
productionYears = prodPerProdDf.Year.unique()
commonYearsLen = len(set(priceYears).intersection(productionYears))
if commonYearsLen >= minimumData:
availableData.append((commonYearsLen, country, priceProduct, productionProduct))
availableData.sort(key=itemgetter(0), reverse=True)
print("There are {} products with these requirements".format(len(availableData)))
return availableData
def plotBest(minimum):
combinations = findBestProducts(minimum)
for combination in combinations:
print(combination)
plotScatter(productionDf, priceDf, combination[1], combination[2])
if __name__ == "__main__":
linkedProductsDf = pd.read_csv('Linked_products.csv', encoding='UTF-8', delimiter=";")
productionDf = pd.read_csv('cleaned_reduced_production.csv')
priceDf = pd.read_csv('WFPVAM_FoodPrices_05-12-2017.csv', encoding='latin-1')
priceDf.rename(columns={'adm0_id': 'country_ID', 'adm0_name': 'country', 'adm1_id' : 'district_ID', \
'adm1_name' : 'district', 'mkt_id' : 'market_ID', 'mkt_name' : 'market' , \
'cm_id' : 'product_ID','cm_name' : '_product', 'cur_id' : 'currency_ID', \
'cur_name' : 'currency', 'pt_id' : 'sale_ID', 'pt_name' : 'sale', 'um_id' : 'unit_ID', \
'um_name' : 'unit', 'mp_month' : 'month', 'mp_year' : 'year', 'mp_price' : 'price', \
'mp_commoditysource' : 'source'}, inplace=True)
plotBest(15)
|
class No:
conteudo=""
prox=None
class LSE:
cabeca=None
cauda= None
tam=0 #tamanho da lista
def tamanho(self):
return self.tam
def inserirInicio(self,novo):
if (self.cabeca==None):
self.cabeca=novo
self.cauda=novo
else:
novo.prox=self.cabeca
self.cabeca=novo
self.tam+=1
def imprimir(self):
noAux=self.cabeca
while (noAux.prox!=None):
print (noAux.conteudo)
noAux=noAux.prox
print (noAux.conteudo)
def removerInicio (self):
if (self.cabeca==None):
print ("lista vazia!")
elif (self.cabeca==self.cauda):
self.cabeca=None
self.cauda=None
self.tam-=1
else:
aux=self.cabeca
self.cabeca=aux.prox
aux=None
self.tam-=1
def removerFinal (self):
aux = self.cabeca
while (aux.prox.prox !=None):
aux=aux.prox
def inserirFinal(self,novo):
if(self.cabeca==None):
self.cabeca=novo
self.cauda=novo
else:
self.cauda.prox=novo
self.cauda=novo
self.tam+=1
def inserir(self, novo, i):
if (i>self.tam or i<0):
print ("indice invรกlido")
elif (i== self.tam):
self.inserirFinal(novo)
elif(i==0):
self.inserirInicio(novo)
else :
aux = self.cabeca
j=0
while (j<i-1):
aux=aux.prox
j+=1
novo.prox= aux.prox
aux.prox=novo
n1 = No()
n2 = No()
n3 = No()
n4 = No()
n5 = No()
n1.conteudo ="1"
n2.conteudo ="2"
n3.conteudo ="3"
n4.conteudo="4"
n5.conteudo="5"
lista = LSE()
lista.inserirInicio (n1)
lista.inserirInicio (n3)
lista.inserirFinal(n4)
lista.inserirInicio (n2)
lista.imprimir()
lista.inserir(n5,0)
print ("---------------------------")
lista.imprimir()
print ("O tamanho da lista รฉ:",lista.tamanho()) |
# -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2018-03-04 14:16
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Project',
fields=[
('id', models.AutoField(primary_key=True, serialize=False)),
('name', models.CharField(max_length=30)),
('description', models.CharField(default='Description', max_length=200)),
('address', models.CharField(default='Address', max_length=30)),
('status', models.CharField(default='Status', max_length=30)),
('longitude', models.FloatField(default=0)),
('latitude', models.FloatField(default=0)),
],
),
migrations.CreateModel(
name='Story',
fields=[
('id', models.AutoField(primary_key=True, serialize=False)),
('text', models.CharField(default='As ... I want ...', max_length=128)),
('header', models.CharField(default='header', max_length=20)),
('top', models.CharField(default='200px', max_length=6)),
('left', models.CharField(default='50px', max_length=6)),
('width', models.CharField(default='200px', max_length=6)),
('heigth', models.CharField(default='100px', max_length=6)),
('theme', models.CharField(default='sticky-note-green-theme', max_length=30)),
('project', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='scrum.Project')),
],
),
migrations.CreateModel(
name='UserProfile',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('website', models.URLField(blank=True)),
('picture', models.ImageField(blank=True, upload_to='profile_images')),
('user', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
]
|
# coding:utf-8
from __future__ import unicode_literals
from gensim.models.word2vec import Word2Vec
from datetime import timedelta
from glob import glob
import numpy as np
import codecs
import time
import os
def current_time():
"""
่ทๅๅฝๅๆถ้ด๏ผๅนดๆๆฅๆถๅ็ง
:return:
"""
ct = datetime.datetime.now().strftime('%Y:%m:%d:%H:%M:%S')
return ct
def get_time(start_time):
end_time = time.time()
time_dif = start_time - end_time
return timedelta(seconds=int(round(time_dif)))
if __name__ == '__main__':
start_time = time.time()
# data_dir = '/home/abc/ssd/pzw/nlp/data/0523/word_sep_ht03/'
data_dir = '/home/zhwpeng/abc/nlp/data/0324/word_sep/'
if not os.path.exists('model/'):
os.makedirs('model/')
txt_dirs = list()
for fold in glob(data_dir+'*'):
# txt_dirs = txt_dirs + glob(fold+'/*.txt')
txt_dirs = txt_dirs + glob(fold+'/*.txt')[:100] # ๆฌๅฐๅฐๆน้ๆฐๆฎ
print "่ฎญ็ปๆ ทๆฌๆปๆฐๆฏ{}".format(len(txt_dirs))
np.random.shuffle(txt_dirs)
sample_content = []
for txt in txt_dirs:
with codecs.open(txt, 'r', encoding='utf-8') as fr:
content = fr.read()
if content == ' ':
continue
content = content.split(' ')[:400]
if len(content):
# content = [word for word in content if len(word) > 1][:400] # ๅ่ฏไธญ็ๅๅญๅปๆ๏ผๅๆ ทๆฌๅ400ไธช่ฏ
# print ' '.join(content)
sample_content.append(content)
print "่ฎญ็ปๆ ทๆฌๆปๆฐๆฏ{}".format(len(sample_content))
print 'take time {}'.format(get_time(start_time))
# ่ฎญ็ป่ฏๅ้ๆจกๅ
model = Word2Vec(sample_content, size=100, window=5, min_count=5, workers=4)
# ไฟๅญ่ฏๅ้ๆจกๅ
model.save('model/word2vec_v1.m')
model.wv.save_word2vec_format('model/word2vec_v1.model', binary=False)
print 'word2vec model saved successfully!'
print 'take time {}'.format(get_time(start_time))
if 'ๅ
ฌๅธ' in model.wv:
print 'ๅ
ฌๅธ ็่ฏๅ้ๆฏ', model.wv['ๅ
ฌๅธ']
if 'ๅ
ฌๅธ็ ็ฉถ' in model.wv:
print 'ๅ
ฌๅธ็ ็ฉถ ็่ฏๅ้ๆฏ', model.wv['ๅ
ฌๅธ็ ็ฉถ']
if '็ญ็ฅ' in model.wv:
print '็ญ็ฅ ็่ฏๅ้ๆฏ', model.wv['็ญ็ฅ']
|
#coding: utf-8
#-------------------------------------------------------------------
# Um programa que recebe dois valores e retorna a soma entre eles.
#-------------------------------------------------------------------
# Somando no Python - Exercรญcio #003
#-------------------------------------------------------------------
n1 = int(input('Digite um valor: '))
n2 = int(input('Digite outro valor: '))
soma = n1 + n2
print('--' * 22)
print('A soma entre o valor {} e {} รฉ {}.'.format(n1, n2, soma))
print('--' * 22)
|
class Solution:
def maxSubArray(self, nums: List[int]) -> int:
if len(nums) == 0:
return nums
max_so_far = -99999999999999999999999999999999999999999999999
max_ending_here = 0
for i in range(0, len(nums)):
max_ending_here = max_ending_here + nums[i]
if (max_so_far < max_ending_here):
max_so_far = max_ending_here
if max_ending_here < 0:
max_ending_here = 0
return max_so_far
|
import pygame
import time
import random
from math import sqrt
pygame.init()
screenwidth = 1600
screenheight = 720
#These are the limits for the in-game wall so that players, enemies or bullets can go outside of the walls.
wall_limit_x1 =405
wall_limit_x2 = 1520
wall_limit_y1 = 50
wall_limit_y2 = 665
screen = pygame.display.set_mode((screenwidth,screenheight))
#Here is where all the images will be loaded
enemy_images = [[(pygame.image.load('Goblin1.png')), (pygame.image.load('Goblin2.png')), (pygame.image.load('Goblin3.png'))],[(pygame.image.load('Goblin1.png')), (pygame.image.load('Goblin2.png')), (pygame.image.load('Goblin3.png'))],[(pygame.image.load('Goblin1.png')), (pygame.image.load('Goblin2.png')), (pygame.image.load('Goblin3.png'))],[(pygame.image.load('Goblin1.png')), (pygame.image.load('Goblin2.png')), (pygame.image.load('Goblin3.png'))]]
dungeonBackground = pygame.image.load('DungeonBackground.png')
#Y Scale Factor = 1.2
#x Scale Factor = 1.575
dungeonBackground = pygame.transform.scale(dungeonBackground, (1260, 720))
horizontalDoor = pygame.image.load('HorizontalDoor.png')
horizontalDoor = pygame.transform.scale(horizontalDoor,(65,250))
verticalDoor = pygame.image.load('VerticalDoor.png')
verticalDoor = pygame.transform.scale(verticalDoor,(250,50))
#Collectable images
common_collectable = pygame.image.load("Shoe.png")
rare_collectable = pygame.image.load("Cube.png")
epic_collectable = pygame.image.load("Trophie.png")
legendary_collectable = pygame.image.load("Diamond.png")
weapon_data = [[0.5, 5, 3, 20, 0], [0.05, 2, 6, 100, 500], [3, 50, 2, 5, 1500]]
#This is the 2D array to create the map. It can be edited but for a fully functioning map they will need ot piece together correctly
#TRC = TopRightCorner
#TLC = TopLeftCorner
#BRC = BottomRightCorner
#BLC = BottomLefCorner
#XC = Horizontal Corridoor
#YC = Vertical Corridoor
#E = Empty
#UTJ = UpTJunction
#DTJ = DownTJunction
#LTJ = LeftTJunction
#RTJ = RightTJunction
#FWJ = 4 way junction
#OWU = OneWayUp
#OWD = OneWayDown
#OWL = OneWayleft
#OWR = OneWayUpRight
maps = [[["OWR","XC","UTJ","UTJ", "OWL","E","E","E"],["E","E","RTJ","XC","E","E","E", "E"], ["E","E","YC","E","E","E","E","E"],["E","TRC","DTJ","OWL","E","E","E","E"],["E","YC","E","E","E","E","E","E"],["E","YC","E","E","E","E","E","E"], ["E","E","E","E","E","E","E","E"]],
[],
[]]
tileWidth = 42
tileHeight = 24
def drawMiniMap(map_number):
x = 0
y = 0
#Loop through the specific map
for row in maps[map_number]:
for col in row:
if col == "E":
pygame.draw.rect(screen, (255,255,255), [x,y,tileWidth, tileHeight])
else:
pygame.draw.rect(screen,(0,0,0), [x,y,tileWidth, tileHeight])
x += tileWidth
y += tileHeight
x = 0
#Border around the minimap
pygame.draw.rect(screen, (0,0,0), (0,0, 336, 192), 5)
#This is the parent class that the player and enemy will inherit
class ParentObject (object):
def __init__(self, x_pos, y_pos,health,weapon,damage_mult, image_array):
self.x_pos = x_pos
self.y_pos = y_pos
self.width = 50
self.height = 50
self.health = health
self.weapon = weapon
self.speed = 3
self.damage_mult = damage_mult
#This has been changed from the previous method.
#It makes other algorithms more efficient and simplifies implementation
#0 = Right
#1 = Left
#2 = Up
#3 = Down
self.direction = 0
self.image_array = image_array
self.image_index = 0
self.hitbox = ((self.x_pos - 3), (self.y_pos - 3), self.width, self.height)
#This method is responsible for drawing the particular images of either the player or the object
def draw(self, screen):
screen.blit(self.image_array[self.direction][int(self.image_index)], (self.x_pos, self.y_pos))
def hit(self, damage):
self.health -= damage
#The player object inherits the ParentObject class
class Player (ParentObject):
def __init__(self, x_pos, y_pos,health,weapon,damage_mult,map_num, image_array):
self.score = 0
self.map_num = map_num
self.room_x_pos = 0
self.room_y_pos = 0
#This is the start time of the game. It is responsible for the delay when shooting
self.previous_click = time.time()
#Super means that the player inherits the parent class' attributes and methods
super().__init__(x_pos, y_pos,health,weapon,damage_mult, image_array)
self.ammo = weapon_data[self.weapon][3]
#This method is responsible for the user controlling the player
def movePlayer(self):
keys = pygame.key.get_pressed()
if self.x_pos > wall_limit_x1:
if keys[pygame.K_a]:
self.x_pos -= self.speed
self.image_index += 0.25
self.direction = 1
#This had to be changed so that the player's character was in the wall or stopping before the wall
if self.x_pos + self.width//2 < wall_limit_x2:
if keys[pygame.K_d]:
self.x_pos += self.speed
self.image_index += 0.25
self.direction = 0
if self.y_pos > wall_limit_y1:
if keys[pygame.K_w]:
self.y_pos -= self.speed
self.image_index += 0.25
self.direction = 2
if self.y_pos + self.height < wall_limit_y2:
if keys[pygame.K_s]:
self.y_pos += self.speed
self.image_index += 0.25
self.direction =3
if self.image_index >=len(self.image_array[0]):
self.image_index = 0
def shoot(self):
global mouse, click
click = pygame.mouse.get_pressed()
mouse = pygame.mouse.get_pos()
if click[0] == 1 and self.ammo > 0 and (self.previous_click + weapon_data[self.weapon][0]) < time.time():
#These if statements are responsible for creating the bullets outside of the player's hitbox
if mouse[0] > self.x_pos - 20 and mouse[0] < self.x_pos + self.width + 10 and mouse[1] < self.y_pos +20:#Down
bulletArray.append(Projectile(round(self.x_pos + self.width // 2), (round(self.y_pos + self.height//2)-35), mouse[0], mouse[1], weapon_data[self.weapon][2], (weapon_data[self.weapon][1] * self.damage_mult), (0,0,0), 5))
elif mouse[0] > self.x_pos - 20 and mouse[0] < self.x_pos + self.width + 10 and mouse[1] > self.y_pos -20: #Up
bulletArray.append(Projectile(round(self.x_pos + self.width // 2), (round(self.y_pos + self.height//2)+35), mouse[0], mouse[1], weapon_data[self.weapon][2], (weapon_data[self.weapon][1] * self.damage_mult), (0,0,0), 5))
elif self.x_pos < mouse[0]:#Right
bulletArray.append(Projectile((round(self.x_pos + self.width // 2)+35), round(self.y_pos + self.height//2), mouse[0], mouse[1], weapon_data[self.weapon][2], (weapon_data[self.weapon][1] * self.damage_mult), (0,0,0), 5))
elif self.x_pos > mouse[0]:#Left
bulletArray.append(Projectile((round(self.x_pos + self.width // 2)-35), round(self.y_pos + self.height//2), mouse[0], mouse[1], weapon_data[self.weapon][2], (weapon_data[self.weapon][1] * self.damage_mult), (0,0,0), 5))
self.previous_click = time.time()
class Enemy(ParentObject):
def __init__(self,x_pos, y_pos,health,weapon,damage_mult,image_array,enemy_type):
self.enemy_type = enemy_type
#This is the start time of the game. It is responsible for the delay when shooting
self.previous_attack = time.time()
#This is to prevent an error when a bullet is in two enemy hitboxes
self.previous_collision = time.time()
super().__init__(x_pos, y_pos,health,weapon,damage_mult, image_array)
def moveEnemy(self, target):
if self.previous_collision + 1 < time.time():
self.change_x = target.x_pos - self.x_pos
self.change_y = target.y_pos - self.y_pos
self.distance = sqrt(((self.change_x)**2 + (self.change_y)**2))
self.x_vel = self.speed * (self.change_x/self.distance)
self.y_vel = self.speed * (self.change_y/self.distance)
self.x_pos += self.x_vel
self.y_pos += self.y_vel
def attack(self, target):
if (self.previous_attack + weapon_data[self.weapon][0]) < time.time():
#These if statements are responsible for creating the bullets outside of the enemy's hitbox
if target.x_pos > self.x_pos - 20 and target.x_pos < self.x_pos + self.width + 10 and target.y_pos < self.y_pos +20:#Down
bulletArray.append(Projectile(round(self.x_pos + self.width // 2), (round(self.y_pos + self.height//2)-35), target.x_pos, target.y_pos, weapon_data[self.weapon][2], (weapon_data[self.weapon][1] * self.damage_mult), (0,0,0), 5))
elif target.x_pos > self.x_pos - 20 and target.x_pos < self.x_pos + self.width + 10 and target.y_pos > self.y_pos -20: #Up
bulletArray.append(Projectile(round(self.x_pos + self.width // 2), (round(self.y_pos + self.height//2)+35), target.x_pos, target.y_pos, weapon_data[self.weapon][2], (weapon_data[self.weapon][1] * self.damage_mult), (0,0,0), 5))
elif self.x_pos < target.x_pos:#Right
bulletArray.append(Projectile((round(self.x_pos + self.width // 2)+35), round(self.y_pos + self.height//2), target.x_pos, target.y_pos, weapon_data[self.weapon][2], (weapon_data[self.weapon][1] * self.damage_mult), (0,0,0), 5))
elif self.x_pos > target.x_pos:#Left
bulletArray.append(Projectile((round(self.x_pos + self.width // 2)-35), round(self.y_pos + self.height//2), target.x_pos, target.y_pos, weapon_data[self.weapon][2], (weapon_data[self.weapon][1] * self.damage_mult), (0,0,0), 5))
self.previous_attack = time.time()
enemyArray = []
def spawnEnemies(num_enemies):
#
for i in range(num_enemies):
enemy_type = random.randint(1,2)
if enemy_type == 1:
enemyArray.append(Enemy((random.randint(wall_limit_x1, wall_limit_x2)), (random.randint(wall_limit_y1, wall_limit_y2)), 50,0,0, enemy_images, "R"))
elif enemy_type == 2:
enemyArray.append(Enemy((random.randint(wall_limit_x1, wall_limit_x2)), (random.randint(wall_limit_y1, wall_limit_y2)), 50,0,0, enemy_images, "M"))
bulletArray = []
class Projectile:
def __init__(self, x_pos, y_pos, end_x_pos, end_y_pos, speed, damage, colour, radius):
self.x_pos = x_pos
self.y_pos = y_pos
self.end_x_pos = end_x_pos
self.end_y_pos = end_y_pos
self.speed = speed
self.damage = damage
self.colour = colour
self.radius = radius
self.change_x = self.end_x_pos - self.x_pos
self.change_y = self.end_y_pos - self.y_pos
self.distance = sqrt(((self.change_x)**2 + (self.change_y)**2))
self.x_vel = self.speed * (self.change_x/self.distance)
self.y_vel = self.speed * (self.change_y/self.distance)
def moveProjectile(self):
self.x_pos += self.x_vel
self.y_pos += self.y_vel
def drawProjectile(self, screen):
pygame.draw.circle(screen, self.colour, (int(self.x_pos), int(self.y_pos)), self.radius)
#Change these to masks
def checkBulletCollision(entity, bullet):
if bullet.x_pos - bullet.radius < entity.x_pos + entity.width and bullet.x_pos + bullet.radius > entity.x_pos:
if bullet.y_pos - bullet.radius < entity.y_pos + entity.height and bullet.y_pos + bullet.radius > entity.y_pos:
entity.hit(bullet.damage)
try:
bulletArray.pop(bulletArray.index(bullet))
except:
pass
def checkEnemyCollision(player, enemy):
#This creates the masks of the player and the enemy
player_mask = pygame.mask.from_surface(player.image_array[player.direction][int(player.image_index)])
enemy_mask = pygame.mask.from_surface(enemy.image_array[enemy.direction][int(enemy.image_index)])
#An offset is needed when overlapping
offset = ((player.x_pos - int(enemy.x_pos)),(player.y_pos - int(enemy.y_pos)))
#If the masks overlap, it will return a mask. Otherwise it will return nothing
if player_mask.overlap(enemy_mask,offset) and (enemy.previous_collision + 1) < time.time():
player.hit(weapon_data[enemy.weapon][2])
#Have a time delay so that the player doesn't die almost instantaneously
enemy.previous_collision = time.time()
collectableArray = []
class Collectable :
def __init__(self, x_pos, y_pos, points, image):
self.x_pos = x_pos
self.y_pos = y_pos
self.points = points
self.image = image
def drawCollectable (self,screen):
screen.blit(self.image, (self.x_pos, self.y_pos))
def checkCollectableCollision(player,collectable):
player_mask = pygame.mask.from_surface(player.image_array[player.direction][int(player.image_index)])
collectable_mask = pygame.mask.from_surface(collectable.image)
offset = ((player.x_pos - collectable.x_pos),(player.y_pos - collectable.y_pos))
if player_mask.overlap(collectable_mask,offset):
player.score += collectable.points
collectableArray.pop(collectableArray.index(collectable))
def spawnCollectables():
rarity_num = random.randint(0,100)
if rarity_num == 100:#Legendary
#Add the collectable to the collactableArray
collectableArray.append (Collectable((random.randint(wall_limit_x1,wall_limit_x2)), (random.randint(wall_limit_y1,wall_limit_y2)),100, legendary_collectable))
elif rarity_num >= 85 and rarity_num < 100:#Epic
collectableArray.append (Collectable((random.randint(wall_limit_x1,wall_limit_x2)), (random.randint(wall_limit_y1,wall_limit_y2)),50, epic_collectable))
elif rarity_num >= 50 and rarity_num < 85:#Rare
collectableArray.append (Collectable((random.randint(wall_limit_x1,wall_limit_x2)), (random.randint(wall_limit_y1,wall_limit_y2)),20, rare_collectable))
elif rarity_num >= 30 and rarity_num < 50:#Commmon
collectableArray.append (Collectable((random.randint(wall_limit_x1,wall_limit_x2)), (random.randint(wall_limit_y1,wall_limit_y2)),10, common_collectable))
bottom_hitbox = (840, 645, 250, 50 )
top_hitbox = (840, 20, 250, 50 )
left_hitbox = (370, 240, 65, 250)
right_hitbox = (1520, 240, 65, 250)
def BottomHitbox(player):
if player.x_pos < bottom_hitbox[0] + bottom_hitbox[2] and player.x_pos + player.width > bottom_hitbox[0]:
if player.y_pos + player.height > bottom_hitbox[1] and player.y_pos < bottom_hitbox[1] + bottom_hitbox[3]:
player.room_y_pos +=1
#Not sure where player will spawn yet
player.x_pos = 970
player.y_pos = 70
#Add additional checking to ensure that the player doesn't go to an empty room
#if grid[player1.player_y_pos][player1.player_x_pos] == "E":
#player1.player_y_pos -= 1
pygame.draw.rect(screen,(255,0,0),bottom_hitbox, 5)
screen.blit(verticalDoor, (840, 675))
def TopHitbox(player):
if player.x_pos < top_hitbox[0] + top_hitbox[2] and player.x_pos + player.width > top_hitbox[0]:
if player.y_pos + player.height > top_hitbox[1] and player.y_pos < top_hitbox[1] + top_hitbox[3]:
player.room_y_pos -= 1
#Not sure where player will spawn yet
player.x_pos = 970
player.y_pos = 585
pygame.draw.rect(screen,(255,0,0),top_hitbox, 5)
screen.blit(verticalDoor, (840, 0))
def LeftHitbox(player):
if player.x_pos < left_hitbox[0] + left_hitbox[2] and player.x_pos + player.width > left_hitbox[0]:
if player.y_pos + player.height > left_hitbox[1] and player.y_pos < left_hitbox[1] + left_hitbox[3]:
player.room_x_pos -= 1
player.x_pos = 1455
player.y_pos = 360
pygame.draw.rect(screen,(255,0,0),left_hitbox, 5)
screen.blit(horizontalDoor, (340, 240))
def RightHitbox(player):
if player.x_pos < right_hitbox[0] + right_hitbox[2] and player.x_pos + player.width > right_hitbox[0]:
if player.y_pos + player.height > right_hitbox[1] and player.y_pos < right_hitbox[1] + right_hitbox[3]:
player.room_x_pos +=1
#Not sure where player will spawn yet
player.x_pos = 450
player.y_pos = 360
pygame.draw.rect(screen,(255,0,0), right_hitbox, 5)
screen.blit(horizontalDoor, (1535, 240))
#These procedures are responsible for checking if the player is in the room hitboxes
#Each type of dungeon room corresponds with a procedure
def UpTJunction (player):
BottomHitbox(player)
LeftHitbox(player)
RightHitbox(player)
def DownTJunction(player):
TopHitbox(player)
LeftHitbox(player)
RightHitbox(player)
def LeftTJunction(player):
TopHitbox(player)
LeftHitbox(player)
BottomHitbox(player)
def RightTJunction(player):
TopHitbox(player)
RightHitbox(player)
BottomHitbox(player)
def TopLeftCorner(player):
BottomHitbox(player)
RightHitbox(player)
def TopRightCorner(player):
BottomHitbox(player)
RightHitbox(player)
def BottomRightCorner(player):
TopHitbox(player)
LeftHitbox(player)
def VerticalCorridor (player):
TopHitbox(player)
BottomHitbox(player)
def HorizontalCorridor(player):
LeftHitbox(player)
RightHitbox(player)
def FourWayJunction(player):
TopHitbox(player)
BottomHitbox(player)
LeftHitbox(player)
RightHitbox(player)
def OneWayUp(player):
TopHitbox(player)
def OneWayDown(player):
BottomHitbox(player)
def OneWayLeft(player):
LeftHitbox(player)
def OneWayRight(player):
RightHitbox(player)
def WhatRoomIsPlayer(player):
if maps[player.map_num][player.room_y_pos][player.room_x_pos] == "UTJ":
UpTJunction(player)
elif maps[player.map_num][player.room_y_pos][player.room_x_pos] == "DTJ":
DownTJunction(player)
elif maps[player.map_num][player.room_y_pos][player.room_x_pos] == "RTJ":
RightTJunction(player)
elif maps[player.map_num][player.room_y_pos][player.room_x_pos] == "LTJ":
LeftTJunction(player)
elif maps[player.map_num][player.room_y_pos][player.room_x_pos] == "TRC":
TopRightCorner(player)
elif maps[player.map_num][player.room_y_pos][player.room_x_pos] == "TLC":
TopLeftCorner(player)
elif maps[player.map_num][player.room_y_pos][player.room_x_pos] == "BRC":
BottomRightCorner(player)
elif maps[player.map_num][player.room_y_pos][player.room_x_pos] == "BLC":
BottomLeftCorner(player)
elif maps[player.map_num][player.room_y_pos][player.room_x_pos] == "XC":
HorizontalCorridor(player)
elif maps[player.map_num][player.room_y_pos][player.room_x_pos] == "YC":
VerticalCorridor(player)
elif maps[player.map_num][player.room_y_pos][player.room_x_pos] == "FWJ":
FourWayJunction(player)
elif maps[player.map_num][player.room_y_pos][player.room_x_pos] == "OWU":
OneWayUp(player)
elif maps[player.map_num][player.room_y_pos][player.room_x_pos] == "OWD":
OneWayDown(player)
elif maps[player.map_num][player.room_y_pos][player.room_x_pos] == "OWL":
OneWayLeft(player)
elif maps[player.map_num][player.room_y_pos][player.room_x_pos] == "OWR":
OneWayRight(player)
#Shop buttons
machine_gun_button = (550, 210, 170, 90)
wand_button= (850, 210, 170, 90)
upgrade_button = (1150, 210, 190, 90)
leave_button = (450,100,100,50)
def shop(player):
font1 = pygame.font.SysFont('comicsans', 30)
machine_gun_text1 = font1.render(("Machine Gun"),1,(0,0,0))
machine_gun_text2 = font1.render(("Cost: "+ str(weapon_data[1][4])),1,(0,0,0))
wand_text1 = font1.render(("Wand"),1,(0,0,0))
wand_text2 = font1.render(("Cost: "+ str(weapon_data[2][4])),1,(0,0,0))
damage_multiplier_text1 = font1.render(("5% Damage Boost"),1,(0,0,0))
damage_multiplier_text2 = font1.render(("Cost: 150"),1,(0,0,0))
leave_text = font1.render(("LEAVE"),1,(0,0,0))
#Machine Gun Button
pygame.draw.rect(screen, (255,255,255), machine_gun_button)
screen.blit(machine_gun_text1, (machine_gun_button[0] +10, machine_gun_button[1] + 10))
screen.blit(machine_gun_text2, (machine_gun_button[0] +10, machine_gun_button[1] + 40))
if mouse[0] > machine_gun_button[0] and mouse[0] < machine_gun_button[0] + machine_gun_button[2]:
if mouse[1] > machine_gun_button[1] and mouse[1] < machine_gun_button[1] + machine_gun_button[3]:
pygame.draw.rect(screen, (255,0,0), machine_gun_button,3)
if click[0] == 1 and player.score >= weapon_data[1][4]:
player.weapon = 1
player.score -= weapon_data[1][4]
#get a purchased sound effect
else:
pygame.draw.rect(screen, (0,0,0), machine_gun_button,3)
else:
pygame.draw.rect(screen, (0,0,0), machine_gun_button,3)
#Wand button
pygame.draw.rect(screen, (255,255,255), wand_button)
screen.blit(wand_text1, (wand_button[0] + 25, wand_button[1] + 10))
screen.blit(wand_text2, (wand_button[0] + 10, wand_button[1] + 40))
if mouse[0] > wand_button[0] and mouse[0] < wand_button[0] + wand_button[2]:
if mouse[1] > wand_button[1] and mouse[1] < wand_button[1] + wand_button[3]:
pygame.draw.rect(screen, (255,0,0), wand_button,3)
if click[0] == 1 and player.score >= weapon_data[2][4]:
player.weapon = 2
player.score -= weapon_data[2][4]
else:
pygame.draw.rect(screen, (0,0,0), wand_button,3)
else:
pygame.draw.rect(screen, (0,0,0), wand_button,3)
#Upgrade damage multiplier
pygame.draw.rect(screen, (255,255,255), upgrade_button)
screen.blit(damage_multiplier_text1, (upgrade_button[0] + 10, upgrade_button[1] + 10))
screen.blit(damage_multiplier_text2, (upgrade_button[0] + 10, upgrade_button[1] + 40))
if mouse[0] > upgrade_button[0] and mouse[0] < upgrade_button[0] + upgrade_button[2]:
if mouse[1] > upgrade_button[1] and mouse[1] < upgrade_button[1] + upgrade_button[3]:
pygame.draw.rect(screen, (255,0,0), upgrade_button,3)
if click[0] == 1 and player.score >= 150:
player.damage_mult += 0.05
player.score -= 150
else:
pygame.draw.rect(screen, (0,0,0), upgrade_button,3)
else:
pygame.draw.rect(screen, (0,0,0), upgrade_button,3)
#Leave Button
pygame.draw.rect(screen, (255,255,255), leave_button)
screen.blit(leave_text, (leave_button[0] +10, leave_button[1]+10))
if mouse[0] > leave_button[0] and mouse[0] < leave_button[0] + leave_button[2]:
if mouse[1] > leave_button[1] and mouse[1] < leave_button[1] + leave_button[3]:
pygame.draw.rect(screen, (255,0,0), leave_button, 3)
else:
pygame.draw.rect(screen, (0,0,0), leave_button, 3)
else:
pygame.draw.rect(screen, (0,0,0), leave_button, 3)
player1 = Player(1340, 50, 50,0,1,0, enemy_images)
def redraw_game_window():
screen.fill([255,255,255])
screen.blit(dungeonBackground, (340,0))#Responsible for drawing the background
if level_complete == True:
#This procedure needs to be in the "redraw...()" as the game is drawing the doors to the next dungeons
WhatRoomIsPlayer(player1)
drawMiniMap(player1.map_num)#This draws a player on the minimap
pygame.draw.rect(screen, (255, 0, 0), [((player1.room_x_pos*42)+13),((player1.room_y_pos*24)+8),15, 12])#Draw player1 on minimap
#Draw the player's score
font2 = pygame.font.SysFont('comicsans', 50)
player_score_text = font2.render(("Score: " + str(player1.score)),1,(0,0,0))
screen.blit(player_score_text, (25, 225))
#Draw Collectables
if len(collectableArray) >0 :
collectableArray[0].drawCollectable(screen)
#Draw Bullets
for bullet in bulletArray:
bullet.drawProjectile(screen)
#Draw enemies
for enemy in enemyArray:
enemy.draw(screen)
shop(player1)
player1.draw(screen)#Draw player
pygame.display.update()
def end_game():
pass
def main_game_loop():
global level_complete,game_loop
#http://pygametutorials.wikidot.com/book-time
game_loop = True
level_complete = False
spawnEnemies(5)#Edit later
spawnCollectables()
while game_loop == True:
for event in pygame.event.get():
if event.type == pygame.QUIT:
game_loop = False
for bullet in bulletArray:
if bullet.x_pos < wall_limit_x2 and bullet.x_pos > wall_limit_x1 and bullet.y_pos < wall_limit_y2 and bullet.y_pos > wall_limit_y1:
bullet.moveProjectile()
else:
bulletArray.pop(bulletArray.index(bullet))
checkBulletCollision(player1, bullet)
for enemy in enemyArray:
checkBulletCollision(enemy, bullet)
for enemy in enemyArray:
if enemy.health <= 0:
player1.score += 50
enemyArray.pop(enemyArray.index(enemy))
checkEnemyCollision(player1, enemy)
if enemy.enemy_type == "R":
enemy.attack(player1)
else:
enemy.moveEnemy(player1)
if len(enemyArray) == 0:
level_complete = True
else:
level_complete = False
if len(collectableArray) >0 :
checkCollectableCollision(player1,collectableArray[0])
player1.movePlayer()
player1.shoot()
redraw_game_window()
main_game_loop()
|
import pygame as pg
import math
from settings import *
import pytmx
vec = pg.math.Vector2
class TiledMap:
def __init__(self, filename):
tm = pytmx.load_pygame(filename, pixelalpha = True)
self.width = tm.width * tm.tilewidth
self.height = tm.height * tm.tileheight
self.tmxdata = tm
def render(self, surface, generator):
ti = self.tmxdata.get_tile_image_by_gid
for layer in self.tmxdata.visible_layers:
if isinstance(layer, pytmx.TiledTileLayer):
for x, y, gid, in layer:
tile = ti(gid)
try:
tile.set_colorkey((255, 0, 255))
except Exception as e:
pass
if tile:
#tile.set_alpha(100)
next(generator)
surface.blit(tile, (x * self.tmxdata.tilewidth + layer.offsetx, y * self.tmxdata.tileheight + layer.offsety))
def make_map(self, generator):
temp_surface = pg.Surface((self.width, self.height))
self.render(temp_surface, generator)
return temp_surface
class Camera:
def __init__(self, width, height):
self.pos = vec(0, 0)
self.width = width
self.height = height
def apply(self, entity):
return entity.rect.move(self.pos)
def apply_rect(self, rect):
return rect.move(self.pos)
def update(self, target):
x = -target.rect.centerx + int(WIDTH / 2)
y = -target.rect.centery + int(HEIGHT / 2)
x = min(0, x)
y = min(0, y)
x = max(-(self.width - WIDTH), x)
y = max(-(self.height - HEIGHT), y)
self.pos.x = x
self.pos.y = y
def onscreen(self, entity):
if -self.pos.x < entity.rect.x < (-self.pos.x + WIDTH) and -self.pos.y < entity.rect.y < (-self.pos.y + HEIGHT):
return True
return False
def inside(self):
row = math.floor(-self.pos.y / TILEHEIGHT)
col = math.floor(-self.pos.x / TILEWIDTH)
return row, col
# class Map:
# def __init__(self, filename):
# self.data = []
# self.saved_data = []
# with open(filename, "rt") as f:
# for line in f:
# self.data.append(list(line.strip()))
#
# self.tilewidth = len(self.data[0])
# self.tileheight = len(self.data)
# self.width = self.tilewidth * TILEWIDTH
# self.height = self.tileheight * TILEHEIGHT
#
# def update(self, row, col, mod = "."):
# if mod != ".":
# self.saved_data.remove((mod, row, col))
# self.data[row].pop(col)
# else:
# self.saved_data.append((self.data[row].pop(col), row, col))
# self.data[row].insert(col, mod)
#
# def find_player(self):
# for row, tiles in enumerate(self.data):
# for col, tile in enumerate(tiles):
# if tile == PLAYER_LETTER:
# return col, row
|
import pandas as pd
from table_detect import table_detect, colfilter
import ast
def get_annotations_xlsx(path):
df = pd.read_csv(path, header=None)
annotate_dict = {}
number_of_rows = df.shape[0]
for r in range(0,number_of_rows-1):
row1 = df.iloc[r,:]
curr_row = row1.tolist()
print(curr_row)
annotate_dict['page '+str(r+1)] = []
for i in range(1,len(curr_row)):
res = ast.literal_eval(curr_row[i])
label = res['label']
x1 = int(res['left'])
x2 = x1 + int(res['width'])
y1 = int(res['top'])
y2 = y1 + int(res['height'])
annotate_dict['page '+str(r+1)].append(
{
label:(x1,y1,x2,y2)
}
)
annotate_dict['ncols'] = df.iloc[number_of_rows-1,1]
return annotate_dict
temp = get_annotations_xlsx("annotate/def.csv")
print(temp) |
#!/usr/bin/python3
from math import cos,sin,exp
# Initial variables and constants
t=0
h=0.05
# Function to integrate
def f(t,y):
return exp(-0.5*t*t)-y*t
# Starting values for modified Euler, improved Euler, and Ralston
yme=0
yie=0
yre=0
# Apply timesteps until t>6
while t<=6:
# Analytical solution
yexact=t*exp(-0.5*t*t)
# Print the solutions and error
print(t,yme,yie,yre,yexact,yme-yexact,yie-yexact,yre-yexact)
# Modified Euler step
k1=h*f(t,yme)
k2=h*f(t+0.5*h,yme+0.5*k1)
yme+=k2
# Improved Euler step
k1=h*f(t,yie)
k2=h*f(t+h,yie+k1)
yie+=0.5*(k1+k2)
# Ralston's method
k1=h*f(t,yre)
k2=h*f(t+2*h/3.,yre+k1*2/3.)
yre+=0.25*k1+0.75*k2
# Update time
t+=h
|
'''
็ฉ้ต็ธไน
'''
import tensorflow as tf
import os
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '2'
# ไธค่กไธๅ้ถ็ฉ้ต
mat0 = tf.constant([[0,0,0],[0,0,0]])
# ไธค่กไธๅ้ถ็ฉ้ต
mat1 = tf.zeros([2,3])
# ไธ่กไธคๅไธ็ฉ้ต
mat2 = tf.ones([3,2])
# ไธค่กไธๅๅกซๅ
็ฉ้ต(15)
mat3 = tf.fill([2,3],15)
with tf.Session() as sess:
print(sess.run(mat0))
print('************')
print(sess.run(mat1))
print('************')
print(sess.run(mat2))
print('************')
print(sess.run(mat3))
|
# -*- coding: utf-8 -*-
#################################################################################
#
# Copyright (c) 2016-Present site-module Software Pvt. Ltd. (<https://site-module.com/>)
# See LICENSE file for full copyright and licensing details.
###############################################################################
############## Overide Core classes for maintaining OpenCart Information #################
from odoo import api, fields, models, _
from odoo import tools
from odoo.exceptions import UserError
from odoo.tools.translate import _
import json
from . import oobapi
from .oobapi import OpencartWebService, OpencartWebServiceDict
from .core_updated_files import API_PATH
from .core_updated_files import _unescape
import logging
_logger = logging.getLogger(__name__)
class ProductAttribute(models.Model):
_inherit = 'product.attribute'
@api.model
def create(self, vals):
if 'opencart' in self._context:
if 'name' in vals:
vals['name'] = _unescape(vals['name'])
erp_id = super(ProductAttribute, self).create(vals)
if erp_id:
mapping_data = {
'name':erp_id.id,
'erp_id':erp_id.id,
'opencart_id':self._context['opencart_id']
}
self.env['opencart.product.option'].create(mapping_data)
else:
erp_id = super(ProductAttribute, self).create(vals)
return erp_id
@api.multi
def write(self, vals):
if 'opencart' in self._context:
if 'name' in vals:
vals['name'] = _unescape(vals['name'])
return super(ProductAttribute, self).write(vals)
ProductAttribute()
class ProductAttributeValue(models.Model):
_inherit = 'product.attribute.value'
@api.model
def create(self, vals):
if 'opencart' in self._context:
if 'name' in vals:
vals['name'] = _unescape(vals['name'])
erp_id = super(ProductAttributeValue, self).create(vals)
if erp_id:
mapping_data = {
'name':erp_id.id,
'erp_id':erp_id.id,
'opencart_value_id':self._context['opencart_id'],
'erp_attr_id':vals['attribute_id'],
'opencart_option_id':self._context['oc_attr_id']
}
self.env['opencart.product.option.value'].create(mapping_data)
else:
erp_id = super(ProductAttributeValue, self).create(vals)
return erp_id
@api.multi
def write(self, vals):
if 'opencart' in self._context:
if 'name' in vals:
vals['name'] = _unescape(vals['name'])
return super(ProductAttributeValue,self).write(vals)
ProductAttributeValue()
class ProductCategory(models.Model):
_inherit= 'product.category'
@api.model
def create(self, vals):
if 'opencart' in self._context:
if 'name' in vals:
vals['name'] = _unescape(vals['name'])
cat_id = super(ProductCategory, self).create(vals)
if 'opencart' in self._context:
if cat_id:
self.env['opencart.product.category'].create({
'odoo_id':cat_id.id,
'ecommerce_cat_id':int(vals['opencart_id']),
'created_by':'Opencart'
})
return cat_id
@api.multi
def write(self, vals):
if 'opencart' in self._context:
if 'name' in vals:
vals['name'] = _unescape(vals['name'])
else:
opncart_category = self.env['opencart.product.category'].sudo().search([('odoo_id','=',self.id)])
if opncart_category:
opncart_category[0].sudo().need_sync = 'yes'
return super(ProductCategory,self).write(vals)
ProductCategory()
class ResPartner(models.Model):
_inherit= 'res.partner'
@api.model
def create(self, vals):
if 'opencart' in self._context:
if 'name' in vals:
vals['name'] = _unescape(vals['name'])
if 'street' in vals:
vals['street'] = _unescape(vals['street'])
if 'street2' in vals:
vals['street2'] = _unescape(vals['street2'])
if 'city' in vals:
vals['city'] = _unescape(vals['city'])
if 'email' in vals:
vals['email'] = _unescape(vals['email'])
partner = super(ResPartner, self).create(vals)
return partner
@api.multi
def write(self, vals):
if 'opencart' in self._context:
if 'name' in vals:
vals['name'] = _unescape(vals['name'])
if 'street' in vals:
vals['street'] = _unescape(vals['street'])
if 'street2' in vals:
vals['street2'] = _unescape(vals['street2'])
if 'city' in vals:
vals['city'] = _unescape(vals['city'])
if 'email' in vals:
vals['email'] = _unescape(vals['email'])
return super(ResPartner, self).write(vals)
ResPartner()
class DeliveryCarrier(models.Model):
_inherit = 'delivery.carrier'
@api.model
def create(self, vals):
context = self.env.context.copy()
if 'opencart' in vals:
context['ecommerce'] = "opencart"
context['type'] = 'shipping'
product_id = self.env['wk.skeleton'].with_context(context).get_opencart_virtual_product_id(vals)
vals['name'] = _unescape(vals['name'])
temp = self.env['res.users'].browse(self._uid)
vals['partner_id'] = temp.company_id
vals['product_type'] = 'service'
vals['taxes_id'] = False
vals['product_id'] = product_id
vals['supplier_taxes_id'] = False
return super(DeliveryCarrier, self).create(vals)
class SaleOrder(models.Model):
_inherit= 'sale.order'
opencart_id = fields.Integer(string='OpenCart Order Id')
@api.model
def _get_ecommerces(self):
res = super(SaleOrder, self)._get_ecommerces()
res.append(('opencart','Opencart'))
return res
@api.one
def action_cancel(self):
res = super(SaleOrder, self).action_cancel()
if 'opencart' not in self._context:
res = self.manual_opencart_order_status_operation("cancel")
return res
@api.one
def manual_opencart_order_status_operation(self, operation):
text = ''
status = 'no'
session = False
mage_shipment = False
mapping_ids = self.env['wk.order.mapping'].sudo().search([('erp_order_id','=', self.id)])
if mapping_ids:
mapping_obj = mapping_ids[0]
oc_order_id = mapping_obj.ecommerce_order_id
connection = self.env['opencart.configuration'].sudo()._create_connection()
if connection:
url = connection[2]
session = connection[0]
session_key = connection[1]
if session and oc_order_id:
data = {}
data['order_id'] = oc_order_id
route = 'UpdateOrderStatus'
data['session'] = session_key
if operation == "shipment":
data['order_status_id'] = 'delivered'
elif operation == "cancel":
data['order_status_id'] = 'cancel'
elif operation == "invoice":
data['order_status_id'] = 'paid'
data = json.dumps(data)
resp = session.get_session_key(url+route, data)
_logger.info('.......... %r ..............', resp)
resp = resp.json()
key = str(resp[0])
status = resp[1]
if status:
return True
return str(key)
return True
def manual_opencart_paid(self):
text = ''
session = 0
route = 'UpdateOrderStatus'
oc_invoice = False
param = {}
connection = self.env['opencart.configuration'].sudo()._create_connection()
if connection:
url = connection[2]
session = connection[0]
session_key = connection[1]
if session:
map_id = self.env['wk.order.mapping'].sudo().search([('erp_order_id','=',self._ids[0])])
if map_id:
map_obj = map_id[0]
oc_order_id = map_obj.ecommerce_order_id
data={}
data['order_id'] = oc_order_id
data['session'] = session_key
data['order_status_id'] = 'paid'
data = json.dumps(data)
resp = session.get_session_key(url+route, data)
resp = resp.json()
key = str(resp[0])
status = resp[1]
if status:
return True
return str(key)
# self._cr.commit()
return True
SaleOrder()
class account_payment(models.Model):
_inherit = "account.payment"
@api.multi
def post(self):
res = super(account_payment, self).post()
if 'opencart' not in self._context:
sale_obj = self.env['sale.order']
for rec in self:
invoice_ids = rec.invoice_ids
for inv_obj in invoice_ids:
invoices = inv_obj.read(['origin', 'state'])
if invoices[0]['origin']:
sale_ids = sale_obj.search(
[('name', '=', invoices[0]['origin'])])
for sale_order_obj in sale_ids:
order_id = self.env['wk.order.mapping'].sudo().search(
[('erp_order_id', '=', sale_order_obj.id)])
if order_id and sale_order_obj.ecommerce_channel == "opencart" and sale_order_obj.is_invoiced:
sale_order_obj.sudo().manual_opencart_paid()
return res
class Picking(models.Model):
_name = "stock.picking"
_inherit = "stock.picking"
@api.multi
def action_done(self):
res = super(Picking, self).action_done()
_logger.info('/......Action_Done.... %r ............', self._context)
if 'opencart' not in self._context:
order_name = self.browse(self._ids[0]).origin
sale_id = self.env['sale.order'].search([('name','=',order_name)])
if len(sale_id):
sale_id.sudo().manual_opencart_order_status_operation('shipment')
return True
class WkSkeleton(models.TransientModel):
_inherit = "wk.skeleton"
def get_opencart_virtual_product_id(self, order_line):
if 'ecommerce' in self._context and self._context['ecommerce'] == "opencart":
if 'type' in self._context and self._context['type'] == 'shipping':
carrier = self._context.get('carrier_id', False)
if carrier:
obj = self.env['delivery.carrier'].browse(carrier)
erp_product_id = obj.product_id.id
return erp_product_id
# class account_invoice(models.Model):
# _name = 'account.invoice'
# _inherit='account.invoice'
# def manual_opencart_invoice(self):
# text = ''
# session = 0
# oc_invoice = False
# param = {}
# connection = self.env['opencart.configuration'].search([('active','=',True)])
# if connection:
# config_obj = connection[0]
# url = config_obj.api_url+'/api/server.php'
# param['api_user'] = config_obj.api_user
# param['api_key'] = config_obj.api_key
# try:
# server = SOAPpy.SOAPProxy(url)
# session = server.login(param)
# except:
# pass
# if session:
# map_id = self.env['sale.order'].search([('id','in',self._ids),('opencart_id','!=',False)])
# if map_id:
# map_obj = map_id[0]
# oc_order_id = map_obj.opencart_id
# data={}
# data['order_id'] = oc_order_id
# data['order_status_id'] = 'paid'
# try:
# server = SOAPpy.SOAPProxy(url)
# oc_invoice = server.UpdateOrderStaus(session, data)
# except Exception,e:
# return str(e)
# self._cr.commit()
# return oc_invoice
# def write(self, vals):
# context = self.env.context.copy()
# ids = self._ids
# if isinstance(ids, (int, long)):
# ids = [ids]
# ######## manual_opencart_invoice method is used to update an invoice status on opencart end #########
# for id in ids:
# if vals.has_key('state'):
# if vals['state'] == 'paid':
# invoice_origin = self.browse(id).origin
# sale_origin_id = self.env['sale.order'].search([('name','=',invoice_origin)])
# if sale_origin_id:
# oc_invoice = sale_origin_id.manual_opencart_invoice()
# return super(account_invoice,self).write(cr,uid,ids,vals,context=context)
# ###################################################
# account_invoice()
|
from __future__ import unicode_literals
import frappe
from frappe.model.db_query import DatabaseQuery
from frappe.utils import nowdate
from frappe.utils import flt
from verp.stock.utils import get_stock_balance
@frappe.whitelist()
def get_data(item_code=None, warehouse=None, parent_warehouse=None,
company=None, start=0, sort_by="stock_capacity", sort_order="desc"):
"""Return data to render the warehouse capacity dashboard."""
filters = get_filters(item_code, warehouse, parent_warehouse, company)
no_permission, filters = get_warehouse_filter_based_on_permissions(filters)
if no_permission:
return []
capacity_data = get_warehouse_capacity_data(filters, start)
asc_desc = -1 if sort_order == "desc" else 1
capacity_data = sorted(capacity_data, key = lambda i: (i[sort_by] * asc_desc))
return capacity_data
def get_filters(item_code=None, warehouse=None, parent_warehouse=None,
company=None):
filters = [['disable', '=', 0]]
if item_code:
filters.append(['item_code', '=', item_code])
if warehouse:
filters.append(['warehouse', '=', warehouse])
if company:
filters.append(['company', '=', company])
if parent_warehouse:
lft, rgt = frappe.db.get_value("Warehouse", parent_warehouse, ["lft", "rgt"])
warehouses = frappe.db.sql_list("""
select name from `tabWarehouse`
where lft >=%s and rgt<=%s
""", (lft, rgt))
filters.append(['warehouse', 'in', warehouses])
return filters
def get_warehouse_filter_based_on_permissions(filters):
try:
# check if user has any restrictions based on user permissions on warehouse
if DatabaseQuery('Warehouse', user=frappe.session.user).build_match_conditions():
filters.append(['warehouse', 'in', [w.name for w in frappe.get_list('Warehouse')]])
return False, filters
except frappe.PermissionError:
# user does not have access on warehouse
return True, []
def get_warehouse_capacity_data(filters, start):
capacity_data = frappe.db.get_all('Putaway Rule',
fields=['item_code', 'warehouse','stock_capacity', 'company'],
filters=filters,
limit_start=start,
limit_page_length='11'
)
for entry in capacity_data:
balance_qty = get_stock_balance(entry.item_code, entry.warehouse, nowdate()) or 0
entry.update({
'actual_qty': balance_qty,
'percent_occupied': flt((flt(balance_qty) / flt(entry.stock_capacity)) * 100, 0)
})
return capacity_data |
"""enterprise URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/2.1/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path, include
from rest_framework import routers
from scrapyd_manage.views import ScrapydHostViewSet
from user_token.views import AuthTokenView
# ่ชๅฎไน่ทฏ็ฑ
router_v1 = routers.DefaultRouter()
router_v1.register(r'scrapyd-host', ScrapydHostViewSet)
# ่งๅๅน้
็่ทฏ็ฑ
urlpatterns = [
path('admin/', admin.site.urls),
path('api-token-auth/', AuthTokenView.as_view()), # token ่ฎค่ฏ
path('api/v1/', include(router_v1.urls)), # ็ธๅบ็ๆฅๅฃ
]
|
import random
import logging
class NameAlreadyExists(Exception):
def __init__(self, eventName):
self.name = eventName
def __str__(self):
return self.name
class trackVerification():
def __init__(self, userlist):
self.users = {}
for item in userlist:
self.users[item.lower()] = False
print self.users
self.userQueue = []
self.__verify_log__ = logging.getLogger("UserVerification")
self.__verify_log__.info("Verification Module initialized; Current users: %s", self.users.keys())
def doesExist(self, user):
return user.lower() in self.users
def isQueued(self, user):
return user.lower() in self.userQueue
def queueUser(self, user):
user = user.lower()
if user not in self.userQueue:
self.userQueue.append(user.lower())
self.__verify_log__.debug("Queueing user: %s", user)
return True
else:
self.__verify_log__.debug("Didn't queue user; Already queued: %s", user)
return False
def unqueueUser(self, user):
user = user.lower()
if user in self.userQueue:
self.userQueue.remove(user)
self.__verify_log__.debug("Removing user from queue: %s", user)
return True
else:
self.__verify_log__.debug("Didn't remove user from queue; Wasn't queued: %s", user)
return False
def isRegistered(self, user):
user = user.lower()
if user in self.users and self.users[user] == True:
return True
else:
return False
def unregisterUser(self, user):
user = user.lower()
self.users[user] = False
self.__verify_log__.debug("Unregistered user: %s", user)
def registerUser(self, user):
user = user.lower()
self.users[user] = True
self.__verify_log__.debug("Registered user: %s", user)
def addUser(self, user):
user = user.lower()
self.users[user] = False
self.__verify_log__.debug("Added new user: %s", user)
def remUser(self, user):
user = user.lower()
if user in self.users:
del self.users[user]
self.__verify_log__.debug("Removed user: %s", user)
else:
self.__verify_log__.warning("Tried to remove user %s, but no such user exists", user)
|
from tqdm import tqdm
import sys
_ = input()
LEN, J = (int(i) for i in input().split())
def change_base(n, base):
if base == 2:
return n
res = 0
for i in range(33):
if (2**i) & n:
res += base**i
return res
import sympy
def check_number(n):
if sympy.ntheory.primetest.isprime(n):
return (False, -1)
else:
return (True, sympy.ntheory.factor_.pollard_rho(n))
def check_one(n):
factors = []
for base in range(2, 11):
result, factor = check_number(change_base(n, base))
if result:
factors.append(factor)
else:
return None
res = ""
for i in range(LEN):
if (2**i) & n:
res+='1'
else:
res+='0'
return (res[::-1], factors)
print("Case #1:")
from multiprocessing import Pool
pool = Pool(2)
for n in range((2**(LEN - 1))+1, 2**LEN, 22):
d = pool.apply_async(check_one, (n,))
try:
tmp = d.get(timeout = 1)
except:
continue
if tmp != None:
print(tmp[0], end=" ")
for i in tmp[1]:
print(i, end = " ")
print("")
J-=1
print(J, file=sys.stderr)
if J == 0:
break
|
#from sklearn.externals import joblib
#clf = joblib.load('classifier.pkl')
#def predict(a):
# predicted = clf.predict(a) # predicted is 1x1 numpy array
# return int(predicted[0])
from flask import Flask,render_template,url_for,request
import pickle
from sklearn.externals import joblib
app = Flask(__name__) #initialize a new Flask instance๏ผ
#to let Flask know that it can find the HTML template folder (templates)
#in the same directory where it is located.
#the route decorator (@app.route('/')) specifies the URL that
#should trigger the execution of the home function.
@app.route('/')
#home function simply render the home.html HTML file,
#which is located in the templates folder.
def home():
return render_template('home.html')
@app.route('/predict',methods=['POST'])
#predict function access the new message entered by the user and
#use our model to make a prediction for its label.
def predict():
model = open('classifier.pkl','rb')
clf = joblib.load(model)
# the POST method transports the form data to the server in the message body.
if request.method == 'POST':
message = request.form['message']
data = [message]
my_prediction = clf.predict(data)[0]
return render_template('result.html', prediction = my_prediction)
if __name__ == '__main__':
app.run(debug=True)#activate Flask's debugger avoide relaunch the server in case of
#modification de contenu ,and run the application
#on the server when this script is directly executed by the Python interpreter
|
from mrjob.job import MRJob
from mrjob.step import MRStep
from itertools import combinations
import sys
from math import*
import statistics
class ContentBasedRecommendation(MRJob):
SORT_VALUES = True
def steps(self):
return [
MRStep(mapper=self.mapper_phase1, reducer=self.reducer_phase1),
MRStep(mapper=self.mapper_phase2, reducer=self.reducer_phase2),
MRStep(mapper=self.mapper_phase3, reducer=self.reducer_phase3),
MRStep(mapper=self.mapper_phase4, reducer=self.reducer_phase4)
]
def mapper_phase1(self, _, line):
userId, movie, rating, _ = line.split(',')
yield float(movie), (userId, float(rating))
def reducer_phase1(self, key, values):
movie = key
items = list(values)
numberOfRaters = len(items)
for item in items:
userId = item[0]
rating = item[1]
yield userId, (movie, rating, numberOfRaters)
def mapper_phase2(self, key, value):
(movie, rating, numberOfRaters) = value
userId = key
yield userId, (movie, rating, numberOfRaters)
def reducer_phase2(self, key, values):
userId = key
items = list(values)
items.sort()
combs = combinations(items, 2)
for comb in combs:
# comb => ((movie1, rating1, number_of_raters1), (movie2, rating2, number_of_raters2))
reducerKey = comb[0][0], comb[1][0]
rating1 = comb[0][1]
rating2 = comb[1][1]
numberOfRaters1 = comb[0][2]
numberOfRaters2 = comb[1][2]
reducerValue = (rating1, numberOfRaters1, rating2,
numberOfRaters2)
yield reducerKey, reducerValue
def mapper_phase3(self, key, value):
movie1, movie2 = key
rating1, numberOfRaters1, rating2, numberOfRaters2 = value
yield (movie1, movie2), (rating1, numberOfRaters1, rating2, numberOfRaters2)
def reducer_phase3(self, key, values):
items = list(values)
movie1, movie2 = key
values1 = []
values2 = []
for item in items:
(rating1, numberOfRaters1, rating2, numberOfRaters2) = item
values1.append(rating1)
values2.append(rating2)
min1 = min(values1)
min2 = min(values2)
max1 = max(values1)
max2 = max(values2)
avg1 = statistics.mean(values1)
avg2 = statistics.mean(values2)
g1 = statistics.geometric_mean(values1)
g2 = statistics.geometric_mean(values2)
h1 = statistics.harmonic_mean(values1)
h2 = statistics.harmonic_mean(values2)
features1 = [min1, max1, avg1, g1, h1]
features2 = [min2, max2, avg2, g2, h2]
euclidian = self.calculate_euclidian(features1, features2)
cosine = self.cosine_similarity(features1, features2)
yield (movie1, movie2), (euclidian , cosine)
def mapper_phase4(self, key, value):
movie1, movie2 = key
yield movie1, (movie2, value)
def reducer_phase4(self, key, values):
items = list(values)
yield key, items
def calculate_euclidian(self, x, y):
result = sqrt(sum(pow(a-b,2) for a, b in zip(x, y)))
return 1 / (1 + result)
def square_rooted(self, x):
return round(sqrt(sum([a*a for a in x])),3)
def cosine_similarity(self, x,y):
numerator = sum(a*b for a,b in zip(x,y))
denominator = self.square_rooted(x)*self.square_rooted(y)
return round(numerator/float(denominator),3)
if __name__ == '__main__':
ContentBasedRecommendation.run() |
def swap_case(s):
chars = list(s)
result = []
for c in chars:
if str(c).islower():
result.append(str(c).upper())
elif str(c).isupper():
result.append(str(c).lower())
else:
result.append(c)
return ''.join(result)
#v2: Using List comprehensions
#return ''.join(c.lower() if c.isupper() else c.upper() for c in s)
#v3: Using power of pythons utility functions
#return s.swapcase()
if __name__ == '__main__':
s = input()
result = swap_case(s)
print(result)
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
DOCUMENTATION = '''
---
module: fail2ban_jail
options:
name:
description: The name of the jail to configure.
required: True
aliases: []
enabled:
description: Whether the jail is enabled or not.
choices: [true, false]
default: true
alias: [state]
filter:
description: Name of the filter to apply.
required: true
logpath:
description: Path to log file to filter. Supports a single logfile in a
string or a list of several.
ignoreregex:
description: regex to ignore.
maxretry:
description: Max retry times.
bantime:
description: Ban time in seconds.
findtime:
description: Find time in seconds.
action:
description: Name of the action to take.
required: true
port:
description: Port for the iptables blocking action.
protocol:
description: Protocol for the iptables blocking action.
backend:
description: Backend to use.
choices: [auto, polling, gamin, pyinotify]
default: auto
'''
|
from smoke.features.steps.openshift import Openshift
from kubernetes import client, config
oc = Openshift()
v1 = client.CoreV1Api()
@then(u'we delete deploymentconfig.apps.openshift.io "jenkins"')
def del_dc(context):
res = oc.delete("deploymentconfig","jenkins",context.current_project)
if res == None:
raise AssertionError
@then(u'we delete route.route.openshift.io "jenkins"')
def del_route(context):
res = oc.delete("route","jenkins",context.current_project)
if res == None:
raise AssertionError
@then(u'delete configmap "jenkins-trusted-ca-bundle"')
def del_cm(context):
res = oc.delete("configmap","jenkins-trusted-ca-bundle",context.current_project)
if res == None:
raise AssertionError
@then(u'delete serviceaccount "jenkins"')
def del_sa(context):
res = oc.delete("serviceaccount","jenkins",context.current_project)
if res == None:
raise AssertionError
@then(u'delete rolebinding.authorization.openshift.io "jenkins_edit"')
def del_rb(context):
res = oc.delete("rolebinding","jenkins_edit",context.current_project)
if res == None:
raise AssertionError
@then(u'delete service "jenkins"')
def del_svc(context):
res = oc.delete("service","jenkins",context.current_project)
if res == None:
raise AssertionError
@then(u'delete service "jenkins-jnlp"')
def del_svc_jnlp(context):
res = oc.delete("service","jenkins-jnlp",context.current_project)
if res == None:
raise AssertionError
@then(u'delete all buildconfigs')
def del_bc(context):
res = oc.delete("bc","--all",context.current_project)
if res == None:
raise AssertionError
@then(u'delete all builds')
def del_builds(context):
res = oc.delete("builds","--all",context.current_project)
if res == None:
raise AssertionError
@then(u'delete all deploymentconfig')
def del_alldc(context):
res = oc.delete("deploymentconfig","--all",context.current_project)
if res == None:
raise AssertionError
@then(u'delete all services')
def del_allsvc(context):
res = oc.delete("service","--all",context.current_project)
if res == None:
raise AssertionError
@then(u'delete all imagestream')
def del_all_is(context):
res = oc.delete("is","--all",context.current_project)
if res == None:
raise AssertionError
@then(u'delete all remaining test resources')
@given(u'cleared from all test resources')
def del_all_remaining_test_resources(context):
delete_command = "all,rolebindings.authorization.openshift.io,bc,cm,is,pvc,sa,secret"
oc.delete(delete_command,"-l app=jenkins-ephemeral",context.current_project)
oc.delete(delete_command,"-l app=jenkins-persistent",context.current_project)
oc.delete(delete_command,"-l app=openshift-jee-sample",context.current_project)
oc.delete(delete_command,"-l app=jenkins-pipeline-example",context.current_project) |
# -*- coding=UTF-8 -*-
# pyright: strict, reportTypeCommentUsage=false
from __future__ import absolute_import, division, print_function, unicode_literals
import os
TYPE_CHECKING = False
if TYPE_CHECKING:
from typing import Text
_DIR = os.path.dirname(os.path.abspath(__file__))
_ROOT = os.path.dirname(os.path.dirname(_DIR))
def workspace_path(*paths):
# type: (Text) -> Text
return os.path.join(_ROOT, *paths)
|
# coding:utf-8
'''
@time: Created on 2021-04-05 17:12:34
@author: Xinqi Chen
@Func: CNN_LSTM model and train-val-test
'''
from tensorflow.python.keras.models import Sequential, Model
from tensorflow.python.keras.layers import Dense, Lambda, Input, Reshape
from tensorflow.python.keras.optimizers import Adam
import os
import tensorflow as tf
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '2' # remove warnings
from CODE.Android.Model import train_test_evalation_split, validatePR
from CODE.Android.main import read__data
from CODE.Android.config import predict_window, train_keyword, use_feature, train_folder, sigma, \
overlap_window, window_length, model_folder, train_data_rate, \
saved_dimension_after_pca, NB_CLASS, whether_shuffle_train_and_test, M, time_step, APP_CLASS, evaluation_ratio, load_already_min_max_data,\
one_label, use_time_and_fft, test_folder, train_or_test
from keras.callbacks import ModelCheckpoint, ReduceLROnPlateau
from keras.layers import Conv1D, BatchNormalization, GlobalAveragePooling1D, Permute, Dropout
from keras.layers import Input, Dense, LSTM, multiply, concatenate, Activation, Masking, Reshape
from keras.models import Model
from keras.optimizers import Adam
from keras.layers import Concatenate, Lambda
from CODE.Android.o7_baseline_traditional import validatePR
from CODE.Android.o7_baseline_LSTM import get_full_dataset, get_weight, oneHot2List
import keras
import numpy as np
from sklearn.model_selection import StratifiedKFold
from collections import Counter
from sklearn import metrics
from keras.callbacks import EarlyStopping
def squeeze_excite_block(inputs):
''' Create a squeeze-excite block
Args:
inputs: input tensor
filters: number of output filters
k: width factor
Returns: a keras tensor
'''
filters = inputs.shape[-1] # channel_axis = -1 for TF
se = GlobalAveragePooling1D()(inputs)
se = Reshape((1, filters))(se)
se = Dense(filters // 16, activation='relu', kernel_initializer='he_normal', use_bias=False)(se)
se = Dense(filters, activation='sigmoid', kernel_initializer='he_normal', use_bias=False)(se)
se = multiply([inputs, se])
return se
def expand(x):
return tf.expand_dims(x, axis=-1)
def reduce(x):
return tf.squeeze(x, axis=-1)
def transpose(x):
return tf.transpose(x,[0,2,1])
def triplet_loss(y_true, y_pred):
y_pred = keras.backend.l2_normalize(y_pred,axis=1)
batch = 256
#print(batch)
ref1 = y_pred[0:batch,:]
pos1 = y_pred[batch:batch+batch,:]
neg1 = y_pred[batch+batch:3*batch,:]
dis_pos = keras.backend.sum(keras.backend.square(ref1 - pos1), axis=1, keepdims=True)
dis_neg = keras.backend.sum(keras.backend.square(ref1 - neg1), axis=1, keepdims=True)
dis_pos = keras.backend.sqrt(dis_pos)
dis_neg = keras.backend.sqrt(dis_neg)
a1 = 17
d1 = dis_pos + keras.backend.maximum(0.0, dis_pos - dis_neg + a1)
return keras.backend.mean(d1)
def CNN(inp):
m = inp
m = tf.expand_dims(m, axis=-1)
y1 = Conv1D(16,kernel_size=5, padding='same', kernel_initializer='he_uniform')(m)
y1 = BatchNormalization()(y1)
y1 = Activation('relu')(y1)
y1 = squeeze_excite_block(y1)
y1 = Conv1D(32,kernel_size=3, padding='same', kernel_initializer='he_uniform')(y1)
y1 = BatchNormalization()(y1)
y1 = Activation('relu')(y1)
y1 = squeeze_excite_block(y1)
y1 = Conv1D(16,kernel_size=3, padding='same', kernel_initializer='he_uniform')(y1)
y1 = BatchNormalization()(y1)
y1 = Activation('relu')(y1)
y1 = squeeze_excite_block(y1)
y1 = Conv1D(8, 1, padding='same', kernel_initializer='he_uniform')(y1)
y1 = BatchNormalization()(y1)
y1 = Activation('relu')(y1)
x1 = GlobalAveragePooling1D()(y1)
x1 = Dense(256, activation='softmax')(x1)
y1 = Dense(NB_CLASS, activation='softmax')(x1)
return [x1, y1]
def cnn_lstm_old():
inp = Input(shape=(time_step,M))
inapp_list = []
vector_list = []
# ็ฌฌไธไธชCNN
inp_tmp1 = Lambda(lambda x: x[:, 0, :])(inp)
#l1 = Lambda(CNN)(inp_tmp) CNN่ฟๅไธคไธชtensor๏ผl1[0]ไธบ่ฆไผ ็ปlstm็ๅ้๏ผl1[1]ไธบinappๆ ็ญพ
n1 = Lambda(expand)(inp_tmp1)
n1 = Permute((2, 1))(n1)
y1 = Conv1D(16, kernel_size=5, padding='same', kernel_initializer='he_uniform')(n1)
y1 = BatchNormalization()(y1)
y1 = Activation('relu')(y1)
y1 = squeeze_excite_block(y1)
y1 = Conv1D(32, kernel_size=3, padding='same', kernel_initializer='he_uniform')(y1)
y1 = BatchNormalization()(y1)
y1 = Activation('relu')(y1)
y1 = squeeze_excite_block(y1)
y1 = Conv1D(16, kernel_size=3, padding='same', kernel_initializer='he_uniform')(y1)
y1 = BatchNormalization()(y1)
y1 = Activation('relu')(y1)
y1 = squeeze_excite_block(y1)
y1 = Conv1D(8, 1, padding='same', kernel_initializer='he_uniform')(y1)
y1 = BatchNormalization()(y1)
y1 = Activation('relu')(y1)
x1 = GlobalAveragePooling1D()(y1)
x1 = Dense(128, activation='softmax')(x1)
out1 = Dense(NB_CLASS, activation='softmax')(x1)
m1 = Lambda(expand)(x1)
# ็ฌฌไบไธชCNN
inp_tmp2 = Lambda(lambda x: x[:, 1, :])(inp)
n2 = Lambda(expand)(inp_tmp2)
n2 = Permute((2, 1))(n2)
y2 = Conv1D(16, kernel_size=5, padding='same', kernel_initializer='he_uniform')(n2)
y2 = BatchNormalization()(y2)
y2 = Activation('relu')(y2)
y2 = squeeze_excite_block(y2)
y2 = Conv1D(32, kernel_size=3, padding='same', kernel_initializer='he_uniform')(y2)
y2 = BatchNormalization()(y2)
y2 = Activation('relu')(y2)
y2 = squeeze_excite_block(y2)
y2 = Conv1D(16, kernel_size=3, padding='same', kernel_initializer='he_uniform')(y2)
y2 = BatchNormalization()(y2)
y2 = Activation('relu')(y2)
y2 = squeeze_excite_block(y2)
y2 = Conv1D(8, 1, padding='same', kernel_initializer='he_uniform')(y2)
y2 = BatchNormalization()(y2)
y2 = Activation('relu')(y2)
x2 = GlobalAveragePooling1D()(y2)
x2 = Dense(128, activation='softmax')(x2)
out2 = Dense(NB_CLASS, activation='softmax')(x2)
m2 = Lambda(expand)(x2)
# ็ฌฌไธไธชCNN
inp_tmp3 = Lambda(lambda x: x[:, 2, :])(inp)
n3 = Lambda(expand)(inp_tmp3)
n3 = Permute((2, 1))(n3)
y3 = Conv1D(16, kernel_size=5, padding='same', kernel_initializer='he_uniform')(n3)
y3 = BatchNormalization()(y3)
y3 = Activation('relu')(y3)
y3 = squeeze_excite_block(y3)
y3 = Conv1D(32, kernel_size=3, padding='same', kernel_initializer='he_uniform')(y3)
y3 = BatchNormalization()(y3)
y3 = Activation('relu')(y3)
y3 = squeeze_excite_block(y3)
y3 = Conv1D(16, kernel_size=3, padding='same', kernel_initializer='he_uniform')(y3)
y3 = BatchNormalization()(y3)
y3 = Activation('relu')(y3)
y3 = squeeze_excite_block(y3)
y3 = Conv1D(8, 1, padding='same', kernel_initializer='he_uniform')(y3)
y3 = BatchNormalization()(y3)
y3 = Activation('relu')(y3)
x3 = GlobalAveragePooling1D()(y3)
x3 = Dense(128, activation='softmax')(x3)
out3 = Dense(NB_CLASS, activation='softmax')(x3)
m3 = Lambda(expand)(x3)
# ็ฌฌๅไธชCNN
inp_tmp4 = Lambda(lambda x: x[:, 3, :])(inp)
n4 = Lambda(expand)(inp_tmp4)
n4 = Permute((2, 1))(n4)
y4 = Conv1D(16, kernel_size=5, padding='same', kernel_initializer='he_uniform')(n4)
y4 = BatchNormalization()(y4)
y4 = Activation('relu')(y4)
y4 = squeeze_excite_block(y4)
y4 = Conv1D(32, kernel_size=3, padding='same', kernel_initializer='he_uniform')(y4)
y4 = BatchNormalization()(y4)
y4 = Activation('relu')(y4)
y4 = squeeze_excite_block(y4)
y4 = Conv1D(16, kernel_size=3, padding='same', kernel_initializer='he_uniform')(y4)
y4 = BatchNormalization()(y4)
y4 = Activation('relu')(y4)
y4 = squeeze_excite_block(y4)
y4 = Conv1D(8, 1, padding='same', kernel_initializer='he_uniform')(y4)
y4 = BatchNormalization()(y4)
y4 = Activation('relu')(y4)
x4 = GlobalAveragePooling1D()(y4)
x4 = Dense(128, activation='softmax')(x4)
out4 = Dense(NB_CLASS, activation='softmax')(x4)
m4 = Lambda(expand)(x4)
inapp_list.append(out1)
inapp_list.append(out2)
inapp_list.append(out3)
inapp_list.append(out4)
vector_list.append(m1)
vector_list.append(m2)
vector_list.append(m3)
vector_list.append(m4)
#ๅๅนถๆๆtime step็ๅ้
vector = Concatenate()(vector_list)
#ๆๆฏไธชtensor่ฝฌไธบๅฑ่พๅบ๏ผreduceๅๅฐๆๅ็็ปดๅบฆ
label_inapp1 = Conv1D(1, kernel_size=1, padding='same', kernel_initializer='he_uniform')(Lambda(expand)(inapp_list[0]))
label_inapp1 = Lambda(reduce)(label_inapp1)
label_inapp2 = Conv1D(1, kernel_size=1, padding='same', kernel_initializer='he_uniform')(Lambda(expand)(inapp_list[1]))
label_inapp2 = Lambda(reduce)(label_inapp2)
label_inapp3 = Conv1D(1, kernel_size=1, padding='same', kernel_initializer='he_uniform')(Lambda(expand)(inapp_list[2]))
label_inapp3 = Lambda(reduce)(label_inapp3)
label_inapp4 = Conv1D(1, kernel_size=1, padding='same', kernel_initializer='he_uniform')(Lambda(expand)(inapp_list[3]))
label_inapp4 = Lambda(reduce)(label_inapp4)
#ๅๅนถๅไธช่พๅบๅฑ
# label_inapp_total = Concatenate([label_inapp1,label_inapp2])
# label_inapp_total = Concatenate([label_inapp_total, label_inapp3])
# label_inapp_total = Concatenate([label_inapp_total, label_inapp4])
#ๅฐ(None,units,time step)็ๆฐๆฎๆจกๅผๅไธบ(None,time step,units)๏ผๅ ไธบlstmๆฅๆถ็ฌฌไบ็ปดๆฏtime step
# vector = Lambda(transpose)(vector)
out_y = LSTM(32)(vector) #16ไธบ็ฅ็ปๅ
ไธชๆฐ
out_y = Dense(128, activation='softmax')(out_y)
label_app = Dense(APP_CLASS, activation='softmax')(out_y)
model = Model(inputs=inp, outputs=[label_inapp1,label_inapp2,label_inapp3,label_inapp4,label_app])
model.summary()
return model
def cnn_lstm():
inapp1 = Input(shape=(saved_dimension_after_pca,1),name='in_inapp1')
inapp2 = Input(shape=(saved_dimension_after_pca, 1),name='in_inapp2')
inapp3 = Input(shape=(saved_dimension_after_pca, 1),name='in_inapp3')
inapp4 = Input(shape=(saved_dimension_after_pca, 1),name='in_inapp4')
inapp_list = []
vector_list = []
# ็ฌฌไธไธชLSTM
inp_tmp1 = inapp1
#l1 = Lambda(CNN)(inp_tmp) CNN่ฟๅไธคไธชtensor๏ผl1[0]ไธบ่ฆไผ ็ปlstm็ๅ้๏ผl1[1]ไธบinappๆ ็ญพ
n1 = Reshape((5,30))(inp_tmp1)
y1 = LSTM(64, return_sequences=True)(n1)
y1 = LSTM(32)(y1)
out1 = Dense(NB_CLASS, activation='softmax',name='out_inapp1')(y1)
m1 = Lambda(expand)(y1)
# ็ฌฌไบไธชCNN
#inp_tmp2 = Lambda(lambda x: x[:, 1, :])(inp)
inp_tmp2 = inapp2
n2 = Reshape((5,30))(inp_tmp2)
y2 = LSTM(64, return_sequences=True)(n2)
y2 = LSTM(32)(y2)
out2 = Dense(NB_CLASS, activation='softmax',name='out_inapp2')(y2)
m2 = Lambda(expand)(y2)
# ็ฌฌไธไธชCNN
inp_tmp3 = inapp3
n3 = Reshape((5, 30))(inp_tmp3)
y3 = LSTM(64, return_sequences=True)(n3)
y3 = LSTM(32)(y3)
out3 = Dense(NB_CLASS, activation='softmax',name='out_inapp3')(y3)
m3 = Lambda(expand)(y3)
# ็ฌฌๅไธชCNN
inp_tmp4 = inapp4
n4 = Reshape((5, 30))(inp_tmp4)
y4 = LSTM(64, return_sequences=True)(n4)
y4 = LSTM(32)(y4)
out4 = Dense(NB_CLASS, activation='softmax',name='out_inapp4')(y4)
m4 = Lambda(expand)(y4)
inapp_list.append(out1)
inapp_list.append(out2)
inapp_list.append(out3)
inapp_list.append(out4)
#ๅๅนถๆๆtime step็ๅ้
vector = keras.layers.concatenate([m1,m2,m3,m4])
vector = Permute((2,1))(vector)
#ๆๆฏไธชtensor่ฝฌไธบๅฑ่พๅบ๏ผreduceๅๅฐๆๅ็็ปดๅบฆ
# label_inapp1 = Conv1D(1, kernel_size=1, padding='same', kernel_initializer='he_uniform')(Lambda(expand)(inapp_list[0]))
# label_inapp1 = Lambda(reduce, name='out_inapp1')(label_inapp1)
# label_inapp2 = Conv1D(1, kernel_size=1, padding='same', kernel_initializer='he_uniform')(Lambda(expand)(inapp_list[1]))
# label_inapp2 = Lambda(reduce, name='out_inapp2')(label_inapp2)
# label_inapp3 = Conv1D(1, kernel_size=1, padding='same', kernel_initializer='he_uniform')(Lambda(expand)(inapp_list[2]))
# label_inapp3 = Lambda(reduce, name='out_inapp3')(label_inapp3)
# label_inapp4 = Conv1D(1, kernel_size=1, padding='same', kernel_initializer='he_uniform')(Lambda(expand)(inapp_list[3]))
# label_inapp4 = Lambda(reduce, name='out_inapp4')(label_inapp4)
#ๅๅนถๅไธช่พๅบๅฑ
# label_inapp_total = Concatenate([label_inapp1,label_inapp2])
# label_inapp_total = Concatenate([label_inapp_total, label_inapp3])
# label_inapp_total = Concatenate([label_inapp_total, label_inapp4])
#ๅฐ(None,units,time step)็ๆฐๆฎๆจกๅผๅไธบ(None,time step,units)๏ผๅ ไธบlstmๆฅๆถ็ฌฌไบ็ปดๆฏtime step
# vector = Lambda(transpose)(vector)
out_y = LSTM(32)(vector) #16ไธบ็ฅ็ปๅ
ไธชๆฐ
out_y = Dense(128, activation='softmax')(out_y)
label_app = Dense(APP_CLASS, activation='softmax', name='out_app')(out_y)
model = Model(inputs=[inapp1,inapp2,inapp3,inapp4], outputs=[out1,out2,out3,out4,label_app])
model.summary()
return model
learning_rate = 1e-2 #ๅญฆไน ็
monitor = 'val_loss' # acc
optimization_mode = 'min'
compile_model = True
factor = 1. / np.sqrt(2) # not time series 1. / np.sqrt(2)
def train_fcn():
train_tmp = '../data//tmp/Android//tmp/train/'
if load_already_min_max_data == True:
#่ฝฝๅ
ฅๅทฒไฟๅญๆฐๆฎ
data = np.load('D:/Pycharm Projects/CODE/data/tmp/Android/model/data.npy', allow_pickle=True)
label = np.load('D:/Pycharm Projects/CODE/data/tmp/Android/model/label.npy', allow_pickle=True)
category_len = np.load('D:/Pycharm Projects/CODE/data/tmp/Android/model/category_len.npy', allow_pickle=True).item()
else:
#้ๆฐ่ฝฝๅ
ฅๆฐๆฎ
if train_or_test == 'train':
data, label, category_len = read__data(train_folder, train_keyword, train_data_rate, train_tmp)
np.save('D:/Pycharm Projects/CODE/data/tmp/Android/model/data.npy',data)
np.save('D:/Pycharm Projects/CODE/data/tmp/Android/model/label.npy',label)
np.save('D:/Pycharm Projects/CODE/data/tmp/Android/model/category_len.npy',category_len)
else:
data, label, category_len = read__data(test_folder, train_keyword, train_data_rate, train_tmp)
#ไธไธชๆ ็ญพ
if one_label == True:
X_train, X_test_left, y_train, y_test_left = train_test_evalation_split(data, label, category_len)
X, y = X_train, y_train
else:
X_train, X_test_left, y_train, y_test_left = train_test_evalation_split(data, label, category_len)
X, y = X_train, y_train[:, 0]
app_label = y_train[:, 1]
model = cnn_lstm()
keras.utils.plot_model(model, "cnn_lstm_model.png", show_shapes=True)
# ๆฃ้ชๆจกๅไธญ้ดๅฑ่พๅบ
# intermediate_layer_model = Model(inputs=model.input, outputs=model.get_layer('lambda_4').output)
# data = intermediate_layer_model.predict(X_train[:4].reshape(1,4,M))
# print(data)
# ๆฃ้ชๆจกๅ่พๅบ
#tmp_test = model.predict(X_test_left.reshape(-1,M,time_step))
#print(tmp_test)
print('after one hot,y shape:', y.shape)
n_splits = 10
epochs = 500
skf_cv = StratifiedKFold(n_splits=n_splits, shuffle=True, random_state=None)
############ ๅๆไบคๅ้ช่ฏ ############
i = 0
for_train = False #Trueๆถ่ฎญ็ป๏ผFalseๆฃ้ชๆต่ฏ้
if for_train == True:
for train_index, test_index in skf_cv.split(X, y):
i += 1
print('============ ็ฌฌ %d ๆไบคๅ้ช่ฏ =====================' % i)
X_training, X_testing = X[train_index], X[test_index]
y_training_inapp, y_test_orging_inapp = y[train_index], y[test_index]
y_training_app, y_test_orging_app = app_label[train_index], app_label[test_index]
print(dict(Counter(y_training_inapp[:])))
weight_dict_inapp = get_weight(list(y_training_inapp[:]))
weight_dict_app = get_weight(list(y_training_app[:]))
weight_fn = "%s/%s_new_cnn_lstm_weights.h5" % (model_folder, train_tmp.split('/')[-2])
print(weight_fn)
# X_train, X_validate, y_train, y_validate = train_evalation_split(X_train, y_train)
print(X_training.shape)
######## ็ป็ฒๅบฆๅฎไนๆจกๅ
model_checkpoint = ModelCheckpoint(weight_fn, verbose=1, mode=optimization_mode,
monitor=monitor, save_best_only=True, save_weights_only=True)
reduce_lr = ReduceLROnPlateau(monitor=monitor, patience=15, mode=optimization_mode,
factor=factor, cooldown=0, min_lr=1e-4, verbose=2)
early_stopping = EarlyStopping(monitor='val_loss', patience=30)
callback_list = [model_checkpoint, reduce_lr, early_stopping]
optm = Adam(lr=learning_rate)
if compile_model:
model.compile(optimizer=optm, loss={'out_inapp1':'categorical_crossentropy',
'out_inapp2':'categorical_crossentropy',
'out_inapp3': 'categorical_crossentropy',
'out_inapp4': 'categorical_crossentropy',
'out_app': 'categorical_crossentropy'},
metrics=[keras.metrics.CategoricalAccuracy()])
#่ฝฌๅๆone-hot็ผ็
y_training_inapp = y_training_inapp[:len(y_training_inapp)//time_step*time_step]
y_training_app = y_training_app[:len(y_training_app)//time_step*time_step]
y_training_inapp_label = keras.utils.to_categorical(y_training_inapp)
y_training_app_label = keras.utils.to_categorical(y_training_app)
# y_training_inapp_label1 = y_training_inapp_label[:, 0, :].reshape(-1, NB_CLASS)
# y_training_inapp_label2 = y_training_inapp_label[:, 1, :].reshape(-1, NB_CLASS)
# y_training_inapp_label3 = y_training_inapp_label[:, 2, :].reshape(-1, NB_CLASS)
# y_training_inapp_label4 = y_training_inapp_label[:, 3, :].reshape(-1, NB_CLASS)
# y_training_app_label = y_training_app_label[:, 0, :].reshape(-1, APP_CLASS)
y_test_orging_inapp = y_test_orging_inapp[:(len(y_test_orging_inapp)//time_step)*time_step]
y_test_orging_app = y_test_orging_app[:(len(y_test_orging_app)//time_step)*time_step]
y_val_inapp_label = keras.utils.to_categorical(y_test_orging_inapp)
y_val_app_label = keras.utils.to_categorical(y_test_orging_app)
# y_val_inapp_label1 = y_val_inapp_label[:,0,:].reshape(-1,NB_CLASS)
# y_val_inapp_label2 = y_val_inapp_label[:, 1, :].reshape(-1, NB_CLASS)
# y_val_inapp_label3 = y_val_inapp_label[:, 2, :].reshape(-1, NB_CLASS)
# y_val_inapp_label4 = y_val_inapp_label[:, 3, :].reshape(-1, NB_CLASS)
# y_val_app_label = y_val_app_label[:, 0, :].reshape(-1, APP_CLASS)
# ๆฏไธๆฎตๆฐๆฎๅๅๆtime stepไธชin_appๆ ็ญพ
# y_training_inapp_label_timestep = []
# y_val_inapp_label_timestep = []
# for i in range(len(y_training_inapp_label)):
# train_tmp = []
# for j in range(time_step):
# train_tmp.append(y_training_inapp_label[i])
# y_training_inapp_label_timestep.append(train_tmp)
# for i in range(len(y_val_inapp_label)):
# val_tmp = []
# for j in range(time_step):
# val_tmp.append(y_val_inapp_label[i])
# y_val_inapp_label_timestep.append(val_tmp)
# y_training_inapp_label_timestep = np.array(y_training_inapp_label_timestep)
# y_val_inapp_label_timestep = np.array(y_val_inapp_label_timestep)
#### ๅฐ่ฟไธๆญฅ็ๆถๅ๏ผ็ปไบๅฏไปฅๅ
ๅผๅๅฎถไบ
if use_time_and_fft:
X_training = X_training.reshape(len(X_training)//time_step*time_step,time_step,2*M*time_step)
X_val = X_testing.reshape(len(X_testing)//time_step,time_step,2*M*time_step)
else:
#่ฎญ็ป้
X_len = len(X_training)//time_step*time_step
X_training= X_training[:X_len]
in_app1 = X_training#[0:-1:4]
in_app1 = in_app1.reshape(len(in_app1),150,1)
in_app2 = X_training#[1:-1:4]
in_app2 = in_app2.reshape(len(in_app1), 150, 1)
in_app3 = X_training#[2:-1:4]
in_app3 = in_app3.reshape(len(in_app1), 150, 1)
in_app4 = X_training#[3:len(X_training):4]
in_app4 = in_app4.reshape(len(in_app1), 150, 1)
#้ช่ฏ้
X_val_len = len(X_testing)//time_step*time_step
X_val = X_testing[:X_val_len]
val_in_app1 = X_val#[0:-1:4]
val_in_app1 = val_in_app1.reshape(len(val_in_app1), 150,1)
val_in_app2 = X_val#[1:-1:4]
val_in_app2 = val_in_app2.reshape(len(val_in_app1), 150, 1)
val_in_app3 = X_val#[2:-1:4]
val_in_app3 = val_in_app3.reshape(len(val_in_app1), 150, 1)
val_in_app4 = X_val#[3:len(X_training):4]
val_in_app4 = val_in_app4.reshape(len(val_in_app1), 150, 1)
model.fit({"in_inapp1":in_app1,
"in_inapp2":in_app2,
"in_inapp3":in_app3,
"in_inapp4":in_app4},
#ๆฐๆฎไธๅๅๅ ไธ[0:-1:4] [3:len(y_training_inapp_label):4]็ญ็ญ
{"out_inapp1":y_training_inapp_label.reshape(len(in_app1),NB_CLASS),
"out_inapp2": y_training_inapp_label.reshape(len(in_app1),NB_CLASS),
"out_inapp3": y_training_inapp_label.reshape(len(in_app1),NB_CLASS),
"out_inapp4": y_training_inapp_label.reshape(len(in_app1),NB_CLASS),
# ๆฏๆฌกๅๆฎตๆฐๆฎ้ฝๆฏๅ่ชๅไธapp
"out_app": y_training_app_label.reshape(len(in_app1),APP_CLASS)},
batch_size=256, epochs=epochs, callbacks=callback_list,
class_weight=[1,1,1,1,2],#ๆ็ป็app weightๆๅคง
validation_data=[[val_in_app1,val_in_app2,val_in_app3,val_in_app4],
[y_val_inapp_label.reshape(len(val_in_app1),NB_CLASS),
y_val_inapp_label.reshape(len(val_in_app1),NB_CLASS),
y_val_inapp_label.reshape(len(val_in_app1),NB_CLASS),
y_val_inapp_label.reshape(len(val_in_app1),NB_CLASS),
y_val_app_label.reshape(len(val_in_app1),APP_CLASS)]],
verbose=2)
# ไฝฟ็จๅ
จ้จๆฐๆฎ๏ผไฝฟ็จไฟๅญ็๏ผๆจกๅ่ฟ่กๅฎ้ช
model.load_weights('D:/Pycharm Projects/CODE/data/tmp/Android/model/train_cnn_lstm_weights.h5')
X_test_len = len(X_test_left)//time_step*time_step
X_test_left = X_test_left[:X_test_len]
X_test_left = X_test_left.reshape(len(X_test_left),saved_dimension_after_pca,1)
# ้ขๆต
predict_y_left = model.predict([X_test_left,X_test_left,X_test_left,X_test_left]) # now do the final test
# ๆๅๆฏไธชtime step็ๆ ็ญพ
predict_y_left_inapp1 = np.array(oneHot2List(predict_y_left[0]))
predict_y_left_inapp2 = np.array(oneHot2List(predict_y_left[1]))
predict_y_left_inapp3 = np.array(oneHot2List(predict_y_left[2]))
predict_y_left_inapp4 = np.array(oneHot2List(predict_y_left[3]))
# ๅๅนถๆๆtime step็inappๆ ็ญพ
predict_y_left_inapp = np.hstack((predict_y_left_inapp1,predict_y_left_inapp2))
predict_y_left_inapp = np.hstack((predict_y_left_inapp, predict_y_left_inapp3))
predict_y_left_inapp = np.hstack((predict_y_left_inapp, predict_y_left_inapp4))
predict_y_left_app = np.array(oneHot2List(predict_y_left[4]))
y_test_left_inapp = y_test_left[:,0]
y_test_left_app_formatrix = y_test_left[:,1]
# inappๆททๆท็ฉ้ตไธๅ็กฎ็
y_test_left_inapp_formatrix = np.array(y_test_left_inapp)
y_test_left_inapp_formatrix = np.hstack((y_test_left_inapp_formatrix,y_test_left_inapp_formatrix))
y_test_left_inapp_formatrix = np.hstack((y_test_left_inapp_formatrix,y_test_left_inapp_formatrix))
confusion_inapp = metrics.confusion_matrix(predict_y_left_inapp, y_test_left_inapp_formatrix)
np.savetxt(model_folder + 'cnn_lstm_test_inapp_confusion_matrix.csv', confusion_inapp.astype(int), delimiter=',', fmt='%d')
print('\tfinal inapp confusion matrix:\n', confusion_inapp)
precise,_,_,_,accuracy = validatePR(predict_y_left_inapp, y_test_left_inapp_formatrix)
print('Inapp Accuracy :\n', accuracy)
print('Inapp Precise :\n', precise)
# appๆททๆท็ฉ้ตไธๅ็กฎ็
# a = len(y_test_left_app_formatrix) // time_step * time_step
y_test = y_test_left_app_formatrix
confusion_app = metrics.confusion_matrix(predict_y_left_app, y_test)
np.savetxt(model_folder + 'cnn_lstm_test_app_confusion_matrix.csv', confusion_app.astype(int), delimiter=',', fmt='%d')
print('\tfinal inapp confusion matrix:\n', confusion_app)
precise,_,_,_,accuracy = validatePR(predict_y_left_app, y_test)
print('App Accuracy :\n', accuracy)
print('App Precise :\n', precise)
return
if __name__ == "__main__":
train_fcn() |
# Generated by Django 2.1.11 on 2020-02-13 20:46
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('tracking', '0006_auto_20200213_1716'),
]
operations = [
migrations.AlterField(
model_name='device',
name='hidden',
field=models.BooleanField(default=False, verbose_name='Hidden/private use'),
),
]
|
import os
import numpy as np
from sklearn import metrics
import pandas as pd
import joblib
from sklearn.metrics import roc_curve,precision_recall_curve,auc
import networkx as nx
from optparse import OptionParser
def get_graph_distance(graph, row):
graph_restricted = nx.restricted_view(graph, [], [(row.from_motif_index, row.to_motif_index)])
try:
distance = nx.shortest_path_length(graph_restricted, source=row.from_motif_index, target=row.to_motif_index,
weight='path_weight')
except:
distance = 100000
return distance
def run(model_path,sample_file,output_path,model_cell,sample_cell):
model_base_path = model_path+'/rf_base.model'
model_graph_path = model_path+'/rf_graph.model'
data_path = sample_file
print('Reading in features...')
sample = pd.read_csv(data_path)
f = []
f_suffix = ['motif_strand', 'motif_score', 'ctcf_signalValue', 'rad_signalValue', 'age']
f_suffix.extend([c + str(num) for num in range(19) for c in ['A', 'G', 'C', 'T']])
f_from = ['from_' + name for name in f_suffix]
f_to = ['to_' + name for name in f_suffix]
f.extend(f_from)
f.extend(f_to)
f.append('distance')
f.extend(['between_motif',
'between_positive_motif',
'between_negative_motif',
'between_ctcf_score',
'between_positive_ctcf_score',
'between_negative_ctcf_score',
'between_ctcf_signalValue',
'between_positive_ctcf_signalValue',
'between_negative_ctcf_signalValue'
])
feature = sample[f].values
label = sample.frequency.values >= 1
rf_base = joblib.load(model_base_path)
rf_graph=joblib.load(model_graph_path)
sample_predict = rf_base.predict_proba(feature)[:, 1]
sample_predict[sample_predict <= 0] = 0.000001
sample_predict = -np.log(sample_predict)
sample['path_weight'] = sample_predict
graph = nx.convert_matrix.from_pandas_edgelist(sample, source='from_motif_index', target='to_motif_index',
edge_attr=True)
shorest = [get_graph_distance(graph, row) for index, row in sample.iterrows()]
shorest = list(np.exp(-np.array(shorest)))
new_f = f.copy()
new_f.append('gcp')
sample['gcp'] = shorest
new_feature = sample[new_f].values
y_predict_prob=rf_graph.predict_proba(new_feature)
np.save(output_path + "%s_%s_ccip_prob.npy" % (model_cell, sample_cell), y_predict_prob[:, 1])
precision, recall, thresholds = precision_recall_curve(label, y_predict_prob[:, 1])
au_pr = metrics.auc(recall, precision)
fpr, tpr, thresholds = roc_curve(label, y_predict_prob[:, 1])
au_roc = auc(fpr, tpr)
print("auroc for the model: %0.4f"%(au_roc))
parser = OptionParser()
parser.add_option('-o', '--output_path',help='Output path for storing the testing results')
parser.add_option('-m', '--model_path',help='Model file for predicting')
parser.add_option('-s', '--sample_file',help='Sample file for predicting')
parser.add_option('-M', '--model_cell',help='Model cell for predicting')
parser.add_option('-S', '--sample_cell',help='Sample cell for predicting')
(opts, args) = parser.parse_args()
output_path = opts.output_path
model_path = opts.model_path
sample_file = opts.sample_file
model_cell = opts.model_cell
sample_cell = opts.sample_cell
if not output_path.endswith('/'):
output_path=output_path+'/'
if not os.path.exists(output_path):
os.makedirs(output_path)
run(model_path,sample_file,output_path,model_cell,sample_cell)
|
import splitfolders
splitfolders.ratio("..Files/BdSL/BdSL_digits/main",
output="..Files/BdSL/BdSL_digits/split", seed=13, ratio=(.8, .1, .1), group_prefix=None)
|
#!/usr/bin/env python
# coding: utf-8
# In[1]:
import time
from selenium import webdriver
from selenium.webdriver.common.by import By
import chromedriver_binary
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.common.action_chains import ActionChains
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
from selenium.common.exceptions import TimeoutException
# In[2]:
import json
with open("./account.json") as json_file:
json_data = json.load(json_file)
# In[3]:
driver = webdriver.Chrome()
action = ActionChains(driver)
wait = WebDriverWait(driver, 10)
# In[4]:
#move to reservation_api
url = "http://staging.tripla-hotel.com/"
driver.get(url)
driver.maximize_window()
timeout = 5
# In[5]:
#move to booking_widget
try:
availabilty_button = wait.until(EC.presence_of_element_located((By.CSS_SELECTOR,'body > div.search-widget-wrapper-872d0c > form > div > button')))
availabilty_button.click()
time.sleep(2)
print("moved to booking_widget")
except:
print("booking_widget is unloaded")
# In[6]:
#move to sign-in page
try:
reservation_modal = driver.find_element_by_css_selector("#tripla-booking-modal > div > div > iframe")
driver.switch_to.frame(reservation_modal)
sign_in_button = wait.until(EC.presence_of_element_located((By.XPATH,"//*[@class='sign-in-button-container']/div[2]/button[1]")))
sign_in_button.click()
time.sleep(5)
print("moved to sign-in page")
except:
print("failed to move to sign-in page")
# In[7]:
#click the facebook login
try:
fb_frame = driver.find_element_by_css_selector("body > div.app-wrapper.webkit-overflow-scrolling > div > div.center-wrapper > form > div.mb-3.text-center > div > span > iframe")
driver.switch_to.frame(fb_frame)
sign_in_button_2 = wait.until(EC.presence_of_element_located((By.CSS_SELECTOR,'#u_0_1 > div > table > tbody > tr:nth-child(2) > td:nth-child(1) > span')))
sign_in_button_2.click()
time.sleep(5)
print("clicked the Facebook login")
except:
print("failed to move to facebook login")
# In[8]:
#input the id/password
try:
driver.switch_to_window(driver.window_handles[1])
driver.find_element_by_id("email").send_keys(json_data['Account'][1]['id'])
driver.find_element_by_id("pass").send_keys(json_data['Account'][1]['password'])
driver.find_element_by_id("u_0_0").click()
print("successfully logged in")
except:
print("login failed")
finally:
time.sleep(5)
driver.quit()
# In[ ]:
|
#!/usr/bin/env python #
# #
# Autor: Michela Negro, University of Torino. #
# On behalf of the Fermi-LAT Collaboration. #
# #
# This program is free software; you can redistribute it and/or modify #
# it under the terms of the GNU GengReral Public License as published by #
# the Free Software Foundation; either version 3 of the License, or #
# (at your option) any later version. #
# #
#------------------------------------------------------------------------------#
"""Produces sky masks (work in progress)
"""
import os
import ast
import argparse
import numpy as np
import healpy as hp
import matplotlib.pyplot as plt
from Xgam import X_OUT
from Xgam.utils.logging_ import logger, startmsg
__description__ = 'Produce masks fits files'
"""Command-line switches.
"""
formatter = argparse.ArgumentDefaultsHelpFormatter
PARSER = argparse.ArgumentParser(description=__description__,
formatter_class=formatter)
PARSER.add_argument('-ff', '--maskfiles', type=str, required=True, nargs='*',
help='Mask files to combine.')
PARSER.add_argument('--outflabel', type=str, required=True,
help='Mask files to combine.')
PARSER.add_argument('--show', type=ast.literal_eval, choices=[True, False],
default=False,
help='if True the mask map is displayed')
def combinemasks(**kwargs):
"""Routine to produce sky maps (limited edition)
"""
f_ = kwargs['maskfiles']
logger.info('Reading mask %s...'%f_[0])
combmask = hp.read_map(f_[0])
for f in f_[1:]:
m = hp.read_map(f)
combmask *= m
hp.write_map(os.path.join(X_OUT, 'fits/MaskCombo_%s.fits'%kwargs['outflabel']), combmask)
logger.info('Created %s'%os.path.join(X_OUT, 'fits/MaskCombo_%s.fits'%kwargs['outflabel']))
if kwargs['show'] == True:
hp.mollview(combmask, cmap='bone')
plt.show()
if __name__ == '__main__':
args = PARSER.parse_args()
startmsg()
combinemasks(**args.__dict__)
|
# Imports
import json, time, sys, requests
# Colors
class bcolors:
HEADER = '\033[95m'
OKBLUE = '\033[94m'
OKGREEN = '\033[92m'
WARNING = '\033[93m'
FAIL = '\033[91m'
ENDC = '\033[0m'
BOLD = '\033[1m'
UNDERLINE = '\033[4m'
# Decision Prompt
yes = set(['yes','y', 'ye', ''])
no = set(['no','n'])
# Ask for a user!
user = raw_input(bcolors.BOLD + "Please enter a username or email. \n" + bcolors.ENDC)
# Do the request
r = requests.get('https://haveibeenpwned.com/api/v2/breachedaccount/' + user)
# Check if it failed.
if r.status_code is 200:
print bcolors.FAIL + "You have been pwned." + bcolors.ENDC
print bcolors.OKBLUE + "Do you want a list of pwns? (y/n)" + bcolors.ENDC
choice = raw_input().lower()
if choice in yes:
unload_json = r.text
load_json = json.loads(unload_json)
print bcolors.OKBLUE + "You appeared on the following leaks:" + bcolors.ENDC
for x in range (0, (len(load_json))):
print bcolors.HEADER + load_json[x]['Title'] + bcolors.ENDC
elif choice in no:
exit()
else:
sys.stdout.write(bcolors.WARNING + "Please respond with 'yes' or 'no'\n" + bcolors.ENDC)
else:
print bcolors.OKGREEN + "You have no records of being pwned, ;)" + bcolors.ENDC
x = 1
exit()
|
# -*- coding: utf-8 -*-
# Copyright European Organization for Nuclear Research (CERN) since 2012
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
try:
from rucio.vcsversion import VERSION_INFO
except ImportError:
VERSION_INFO = {'branch_nick': 'LOCALBRANCH', # NOQA
'revision_id': 'LOCALREVISION',
'version': 'VERSION',
'final': False,
'revno': 0}
RUCIO_VERSION = [VERSION_INFO['version'], ]
FINAL = VERSION_INFO['final'] # This becomes true at Release Candidate time
def canonical_version_string():
""" Get the canonical string """
return '.'.join(filter(None, RUCIO_VERSION))
def version_string():
""" Get the version string """
return canonical_version_string()
def vcs_version_string():
""" Get the VCS version string """
return "%s:%s" % (VERSION_INFO['branch_nick'], VERSION_INFO['revision_id'])
def version_string_with_vcs():
""" Get the version string with VCS """
return "%s-%s" % (canonical_version_string(), vcs_version_string())
|
from django.shortcuts import render
from django.http import HttpResponse
def home(request):
return render(request, "home.html")
def about(request):
return render(request, "about.html")
def registeration(request):
return render(request, "registeration.html")
def login(request):
return render(request, "Login.html")
|
import xlrd
wb = xlrd.open_workbook("C:\Users\subbu\Desktop\Book.xlsx")
sheet = wb.sheet_by_index(0)
rows = sheet.nrows
for row in range(rows):
print sheet.cell_value(row,0) + sheet.cell_value(row,1)
|
# Generated by Django 3.2.4 on 2021-09-03 01:12
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('paper', '0012_auto_20210901_2232'),
]
operations = [
migrations.CreateModel(
name='Signup',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('firstname', models.CharField(max_length=50)),
('lastname', models.CharField(max_length=50)),
('email', models.CharField(max_length=50)),
('pwd', models.CharField(max_length=50)),
('user', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.DeleteModel(
name='staff',
),
]
|
"""
Button
click and response
option: command
func() -> call a function, get the returned value
"""
from tkinter import *
def response():
print("I was clicked!")
root = Tk()
root.title('Python GUI - Button')
root.geometry('640x480+300+300')
root.config(bg='#ddddff')
btn1 = Button(root, text='Click me', command=response)
# btn1 = Button(root, text='Click me', command=response())
btn1.pack()
root.mainloop() |
# Flask==1.1.1
# praw==6.5.1
# https://towardsdatascience.com/scraping-reddit-data-1c0af3040768
# https://blog.miguelgrinberg.com/post/designing-a-restful-api-with-python-and-flask
import praw
# from praw.models import MoreComments
import pandas as pd
import json
from flask import Flask, request, jsonify
from flask_cors import CORS
app = Flask(__name__)
CORS(app)
def getSubComments(comment, level, commentsData):
if comment.replies:
level += 1 # increment the comment level
for subcomment in comment.replies:
commentsData.append({"level":level, "author": str(subcomment.author), "body": str(subcomment.body), "isChecked": False, "score":str(subcomment.score)})
getSubComments(subcomment, level, commentsData)
# example url
# url="https://www.reddit.com/r/devops/comments/elqkkg/what_metrics_can_we_use_to_drive_the_things_we/"
def getDataStr(url):
# setup reddit api client
reddit = praw.Reddit(client_id='QyfE0lzekE-wDQ', client_secret='ME3ZIEoH1aLaOt7NasRe_tJb20M', user_agent='videomaker')
submission = reddit.submission(url=url)
# submission = reddit.submission(id="a3p0uq")
# subMissionId is used to name the JSON file and
# the folder generated resources will be stored in
subMissionId = str(submission.id)
submissionData = {
"author": str(submission.author),
"subreddit": str(submission.subreddit),
"title": str(submission.title),
"body": str(submission.selftext),
"score": str(submission.score),
}
# get comments with author and level
submission.comments.replace_more(limit=0)
commentsData = []
#tlc = top level comment
for tlc in submission.comments:
level = 1
commentsData.append({"level":level, "author": str(tlc.author), "body": str(tlc.body), "isChecked": False, "score":str(tlc.score)})
# loop through sub comments
getSubComments(tlc, level, commentsData)
totalJson = {
"url": url,
"id": subMissionId,
"submissionData": submissionData,
"commentsData": commentsData
}
print(totalJson)
return totalJson
@app.route('/hello')
def hello_world():
return 'Hello, World!'
@app.route('/getdatajson', methods=['POST'])
def getDataJSON():
print("--url is: " + request.json['url'])
return jsonify(getDataStr(request.json['url']))
# curl -i -H "Content-Type: application/json" -X POST -d '{"url":"https://www.reddit.com/r/devops/comments/elqkkg/what_metrics_can_we_use_to_drive_the_things_we/"}' http://127.0.0.1:5000/getdatajson
# {"url":"https://www.reddit.com/r/devops/comments/elqkkg/what_metrics_can_we_use_to_drive_the_things_we/"}
if __name__ == '__main__':
app.run() |
from custom_modules import messages as m
from custom_modules import stock_data as s
from custom_modules import finviz as f
from custom_modules import more_stock_data as z
from custom_modules import investors_hub as ih
class Scraper:
def symbol_getter(self):
self.symbol = input("Enter stock symbol: ")
def scrape_yahoo(self):
yahoo = s.YahooFinance(self.symbol)
yahoo.build_url()
yahoo.parser()
yahoo.pull_table_data()
def scrape_stock_twits(self):
user = m.StockTwits(self.symbol)
user.open_parser()
user.find_element()
user.display()
def scrape_finviz(self):
fin = f.FinViz(self.symbol)
fin.build_url()
fin.parser()
fin.pull_table_data()
def more_yahoo_finance(self):
more = z.MoreYahooFinance(self.symbol)
more.build_url()
more.parser()
more.pull_table_data()
def investors_hub(self):
stocks = ih.InvestorsHub()
stocks.pull()
stocks.filter_results()
self.potential_stocks = stocks.results()
print(self.potential_stocks)
scraper = Scraper()
scraper.investors_hub()
print(" ", '\n')
scraper.symbol_getter()
print(" ")
scraper.scrape_yahoo()
print(" ")
scraper.more_yahoo_finance()
print(" ")
scraper.scrape_stock_twits()
print(" ")
scraper.scrape_finviz()
|
"""Forms"""
from flask_wtf import FlaskForm
from wtforms.fields import (
HiddenField,
PasswordField,
SelectField,
StringField,
SubmitField,
)
from wtforms.validators import (
DataRequired,
EqualTo,
Length,
)
from .models import ActionType
class LoginForm(FlaskForm):
"""Login form"""
username = StringField(
"Username", validators=[DataRequired(), Length(min=6, max=30)]
)
password = PasswordField(
"Password", validators=[DataRequired(), Length(min=6, max=20)]
)
submit = SubmitField("Sign In")
class RegisterForm(FlaskForm):
"""Register form"""
username = StringField(
"Username", validators=[DataRequired(), Length(min=6, max=30)]
)
password = PasswordField(
"Password",
validators=[
DataRequired(),
Length(min=6, max=20),
EqualTo("password_confirm", message="Password Mismatched!"),
],
)
password_confirm = PasswordField(
"Password Confirm",
validators=[
DataRequired(),
Length(min=6, max=20),
EqualTo("password", message="Password Mismatched!"),
],
)
submit = SubmitField("Register")
class ActionForm(FlaskForm):
"""Action Form"""
action_name = StringField("Name", validators=[Length(max=32)])
action_type = SelectField(
"Type",
validators=[DataRequired()],
choices=[(item.name, item.value) for item in ActionType],
)
channel_id = HiddenField("Channel ID", validators=[DataRequired()])
|
import requests
import json
from sys import argv
import os
from dotenv import load_dotenv
load_dotenv()
WEBHOOK_URL_FIBERMC = os.getenv("WEBHOOK_URL_FIBERMC")
post_headers = {
"content-type": "application/json",
}
with open(argv[1]) as f:
post_data = json.load(f)
res = requests.post(WEBHOOK_URL_FIBERMC, json=post_data, headers=post_headers)
print(res)
|
from array import array
import math
class Vector2d0:
typecode = 'd'
def __init__(self, x, y):
self.x = float(x)
self.y = float(y)
def __iter__(self):
return (i for i in (self.x, self.y))
def __repr__(self):
class_name = type(self).__name__
return '{}({!r}, {!r})'.format(class_name, *self)
def __str__(self):
return str(tuple(self))
def __bytes__(self):
return (bytes([ord(self.typecode)]) + bytes(array(self.typecode, self)))
def __eq__(self, other):
return tuple(self) == tuple(other)
def __abs__(self):
return math.hypot(self.x, self.y)
def __bool__(self):
return bool(abs(self))
class Vector2d1:
typecode = 'd'
def __init__(self, x, y):
self.x = float(x)
self.y = float(y)
@classmethod
def frombytes(cls, octets):
typecode = chr(octets[0])
memv = memoryview(octets[1:]).cast(typecode)
return cls(*memv)
def __iter__(self):
return (i for i in (self.x, self.y))
def __repr__(self):
class_name = type(self).__name__
return '{}({!r}, {!r})'.format(class_name, *self)
def __str__(self):
return str(tuple(self))
def __bytes__(self):
return (bytes([ord(self.typecode)]) + bytes(array(self.typecode, self)))
def __eq__(self, other):
return tuple(self) == tuple(other)
def __abs__(self):
return math.hypot(self.x, self.y)
def __bool__(self):
return bool(abs(self))
def __format__(self, format_spec=''):
components = (format(c, format_spec) for c in self)
return '({}, {})'.format(*components)
class Vector2d2:
typecode = 'd'
def __init__(self, x, y):
self.x = float(x)
self.y = float(y)
@classmethod
def frombytes(cls, octets):
typecode = chr(octets[0])
memv = memoryview(octets[1:]).cast(typecode)
return cls(*memv)
def __iter__(self):
return (i for i in (self.x, self.y))
def __repr__(self):
class_name = type(self).__name__
return '{}({!r}, {!r})'.format(class_name, *self)
def __str__(self):
return str(tuple(self))
def __bytes__(self):
return (
bytes([ord(self.typecode)]) + bytes(array(self.typecode, self)))
def __eq__(self, other):
return tuple(self) == tuple(other)
def __abs__(self):
return math.hypot(self.x, self.y)
def __bool__(self):
return bool(abs(self))
def __format__(self, format_spec=''):
if format_spec.endswith('p'):
format_spec = format_spec[:-1]
coords = (abs(self), self.angle())
outer_fmt = '<{}, {}>'
else:
coords = self
outer_fmt = '({}, {})'
components = (format(c, format_spec) for c in coords)
return outer_fmt.format(*components)
def angle(self):
return math.atan2(self.y, self.x)
class Vector2d3:
"""
>>> v3 = Vector2d3(3, 4)
>>> print(v3.x, v3.y)
3.0 4.0
>>> int("323k")
456
"""
typecode = 'd'
def __init__(self, x, y):
self.__x = float(x)
self.__y = float(y)
self.__h = None
self._a = 'asfg'
@property
def x(self):
return self.__x
@property
def y(self):
return self.__y
@classmethod
def frombytes(cls, octets):
typecode = chr(octets[0])
memv = memoryview(octets[1:]).cast(typecode)
return cls(*memv)
def __hash__(self):
return hash(self.x) ^ hash(self.y)
def __iter__(self):
return (i for i in (self.__x, self.__y))
def __repr__(self):
class_name = type(self).__name__
return '{}({!r}, {!r})'.format(class_name, *self)
def __str__(self):
return str(tuple(self))
def __bytes__(self):
return (
bytes([ord(self.typecode)]) + bytes(array(self.typecode, self)))
def __eq__(self, other):
return tuple(self) == tuple(other)
def __abs__(self):
return math.hypot(self.__x, self.__y)
def __bool__(self):
return bool(abs(self))
def __format__(self, format_spec=''):
if format_spec.endswith('p'):
format_spec = format_spec[:-1]
coords = (abs(self), self.angle())
outer_fmt = '<{}, {}>'
else:
coords = self
outer_fmt = '({}, {})'
components = (format(c, format_spec) for c in coords)
return outer_fmt.format(*components)
def angle(self):
return math.atan2(self.__y, self.__x)
v3 = Vector2d3(5, 1)
v3.__x = 9
print(v3.__dict__)
print(v3.__x)
print(v3.x, v3.y)
# import doctest
# doctest.testmod()
class Vector2d4:
__slot__ = ('__x', '__y')
typecode = 'd'
def __init__(self, x, y):
self.__x = float(x)
self.__y = float(y)
@property
def x(self):
return self.__x
@property
def y(self):
return self.__y
@classmethod
def frombytes(cls, octets):
typecode = chr(octets[0])
memv = memoryview(octets[1:]).cast(typecode)
return cls(*memv)
def __hash__(self):
return hash(self.x) ^ hash(self.y)
def __iter__(self):
return (i for i in (self.__x, self.__y))
def __repr__(self):
class_name = type(self).__name__
return '{}({!r}, {!r})'.format(class_name, *self)
def __str__(self):
return str(tuple(self))
def __bytes__(self):
return (
bytes([ord(self.typecode)]) + bytes(array(self.typecode, self)))
def __eq__(self, other):
return tuple(self) == tuple(other)
def __abs__(self):
return math.hypot(self.__x, self.__y)
def __bool__(self):
return bool(abs(self))
def __format__(self, format_spec=''):
if format_spec.endswith('p'):
format_spec = format_spec[:-1]
coords = (abs(self), self.angle())
outer_fmt = '<{}, {}>'
else:
coords = self
outer_fmt = '({}, {})'
components = (format(c, format_spec) for c in coords)
return outer_fmt.format(*components)
def angle(self):
return math.atan2(self.__y, self.__x) |
from qt_core import *
# CUSTOM LEFT MENU
class PyDiv(QWidget):
def __init__(self, color):
super().__init__()
self.layout = QHBoxLayout(self)
self.layout.setContentsMargins(0,5,0,5)
self.frame_line = QFrame()
self.frame_line.setStyleSheet(f"background: {color};")
self.frame_line.setMaximumWidth(1)
self.frame_line.setMinimumWidth(1)
self.layout.addWidget(self.frame_line)
self.setMaximumWidth(20)
self.setMinimumWidth(20)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from TechParser.query import *
from TechParser import db, get_conf
import uuid
from Crypto import Random
def remove_from_blacklist(link):
"""Remove article from blacklist by link"""
db.Database.main_database.execute_query(Q_DELETE_FROM_BLACKLIST, [(link,)])
def remove_from_history(link):
"""Remove article from history by link"""
db.Database.main_database.execute_query(Q_DELETE_FROM_HISTORY, [(link,)])
def add_to_blacklist(article):
"""Add article to blacklist"""
IntegrityError = db.Database.main_database.userData # userData contains exception
try:
title = article['title']
link = article['link']
summary = article['summary']
source = article['source']
fromrss = article.get('fromrss', 0)
icon = article.get('icon', '')
color = article.get('color', '#000')
parameters = [(title, link, summary, fromrss, icon, color, source)]
db.Database.main_database.execute_query(Q_ADD_TO_BLACKLIST, parameters)
except IntegrityError:
pass
def add_to_interesting(article):
"""Add article to history"""
IntegrityError = db.Database.main_database.userData # userData contains exception
try:
title = article['title']
link = article['link']
summary = article['summary']
source = article['source']
fromrss = article.get('fromrss', 0)
icon = article.get('icon', '')
color = article.get('color', '#000')
parameters = [(title, link, summary, fromrss, icon, color, source)]
db.Database.main_database.execute_query(Q_ADD_TO_HISTORY, parameters)
except IntegrityError:
pass
def get_blacklist():
"""Get list of articles from blacklist"""
db.Database.main_database.execute_query(Q_GET_BLACKLIST)
return [{'title': x[0],
'link': x[1],
'summary': x[2],
'fromrss': x[3],
'icon': x[4],
'color': x[5],
'source': x[6],
'liked': False,
'disliked': True} for x in db.Database.main_database.fetchall()]
def get_interesting_articles():
"""Get list of articles from history (that were marked as interesting)"""
db.Database.main_database.execute_query(Q_GET_HISTORY)
return [{'title': x[0],
'link': x[1],
'summary': x[2],
'fromrss': x[3],
'icon': x[4],
'color': x[5],
'source': x[6],
'liked': True,
'disliked': False} for x in db.Database.main_database.fetchall()]
def generate_sessionid(num_bytes=16):
return uuid.UUID(bytes=Random.get_random_bytes(num_bytes))
def add_session():
sid = str(generate_sessionid())
db.Database.main_database.execute_query(Q_ADD_SESSIONID, [(sid,)])
return sid
def delete_session(sid):
db.Database.main_database.execute_query(Q_DELETE_SESSIONID, [(sid,)])
def check_password(password):
return get_conf.config.password == password
def check_session_existance(sid):
remove_old_sessions()
db.Database.main_database.execute_query(Q_CHECK_SESSIONID, [(sid,)])
return not not db.Database.main_database.fetchone() # Convert result to boolean
def remove_old_sessions():
db.Database.main_database.execute_query(Q_REMOVE_OLD_SESSIONIDS)
def remove_session(sid):
db.Database.main_database.execute_query(Q_REMOVE_SESSIONID, [(sid,)])
def get_var(name, default=None):
db.Database.main_database.execute_query(Q_GET_VAR, [(name,)])
try:
return db.Database.main_database.fetchone()[0]
except TypeError:
return default
def set_var(name, value):
mainDB = db.Database.main_database
IntegrityError = mainDB.userData
try:
mainDB.execute_query(Q_ADD_VAR, [(name, value)])
except IntegrityError:
mainDB.con.rollback()
mainDB.execute_query(Q_SET_VAR, [(value, name)])
|
from d7a.system_files.access_profile import AccessProfileFile
from d7a.system_files.dll_config import DllConfigFile
from d7a.system_files.firmware_version import FirmwareVersionFile
from d7a.system_files.system_file_ids import SystemFileIds
from d7a.system_files.uid import UidFile
class SystemFiles:
files = {
SystemFileIds.UID: UidFile(),
SystemFileIds.FIRMWARE_VERSION: FirmwareVersionFile(),
SystemFileIds.DLL_CONFIG: DllConfigFile(),
SystemFileIds.ACCESS_PROFILE_0: AccessProfileFile(access_specifier=0),
SystemFileIds.ACCESS_PROFILE_1: AccessProfileFile(access_specifier=1),
SystemFileIds.ACCESS_PROFILE_2: AccessProfileFile(access_specifier=2),
SystemFileIds.ACCESS_PROFILE_3: AccessProfileFile(access_specifier=3),
SystemFileIds.ACCESS_PROFILE_4: AccessProfileFile(access_specifier=4),
SystemFileIds.ACCESS_PROFILE_5: AccessProfileFile(access_specifier=5),
SystemFileIds.ACCESS_PROFILE_6: AccessProfileFile(access_specifier=6),
SystemFileIds.ACCESS_PROFILE_7: AccessProfileFile(access_specifier=7),
SystemFileIds.ACCESS_PROFILE_8: AccessProfileFile(access_specifier=8),
SystemFileIds.ACCESS_PROFILE_9: AccessProfileFile(access_specifier=9),
SystemFileIds.ACCESS_PROFILE_10: AccessProfileFile(access_specifier=10),
SystemFileIds.ACCESS_PROFILE_11: AccessProfileFile(access_specifier=11),
SystemFileIds.ACCESS_PROFILE_12: AccessProfileFile(access_specifier=12),
SystemFileIds.ACCESS_PROFILE_13: AccessProfileFile(access_specifier=13),
SystemFileIds.ACCESS_PROFILE_14: AccessProfileFile(access_specifier=14),
}
def get_all_system_files(self):
return sorted(self.files, key=lambda t: t.value) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.