index int64 0 1,000k | blob_id stringlengths 40 40 | code stringlengths 7 10.4M |
|---|---|---|
991,800 | 0ad4157e9a72b4da6cde700a25baa640230d01d0 | from tensorflow import keras
def unet(channel):
input = keras.layers.Input(shape=(None, None, channel), name='input')
conv1 = conv_layer(input, 64)
pool1 = pool(conv1)
conv2 = conv_layer(pool1, 128)
pool2 = pool(conv2)
conv3 = conv_layer(pool2, 256)
deconv4 = deconv_layer(conv3, conv2, 128)
deconv5 = deconv_layer(deconv4, conv1, 64)
conv6 = conv_layer(deconv5, 2, False, False)
out = keras.layers.Activation(activation="sigmoid")(conv6)
model = keras.models.Model(inputs=input, outputs=out)
return model
def double_conv(input, filters):
conv1 = conv_layer(input, filters)
conv2 = conv_layer(conv1, filters)
return conv2
def pool(input):
return keras.layers.MaxPool2D(pool_size=(2, 2), strides=(2, 2), padding="valid")(input)
def conv_layer(input, filters, bn=True, ac=True):
out = keras.layers.Conv2D(filters=filters, kernel_size=(3, 3), strides=(1, 1), kernel_initializer='Orthogonal',
padding='same', use_bias=False)(
input)
if bn:
out = keras.layers.BatchNormalization(axis=-1, momentum=0.0, epsilon=0.0001)(out)
if ac:
out = keras.layers.Activation(activation='relu')(out)
return out
def deconv_layer(input, conv_prev, filter):
up1 = keras.layers.UpSampling2D(size=(2, 2))(input)
conv1 = keras.layers.Conv2D(filters=filter, kernel_size=(3, 3), strides=(1, 1), kernel_initializer='Orthogonal',
padding='same')(up1)
concat1 = keras.layers.concatenate([conv_prev, conv1])
conv2 = double_conv(concat1, filter)
return conv2
if __name__ == "__main__":
model = unet(1)
model.summary()
keras.utils.plot_model(model, to_file='model.png', show_shapes=True)
|
991,801 | cc1d39d2b49b1998810e476c03986312ea52264e | #!/usr/bin/env python3
import torch.nn as nn
import torch as T
import torch.nn.functional as F
from torch.nn.modules.rnn import RNNCellBase
from subLSTM.functional import SubLSTMCell as SubLSTMCellF
import math
class SubLSTMCell(RNNCellBase):
r"""A long sub-short-term memory (subLSTM) cell, as described in the paper:
https://arxiv.org/abs/1711.02448
.. math::
\begin{array}{ll}
i = \mathrm{sigmoid}(W_{ii} x + b_{ii} + W_{hi} h + b_{hi}) \\
f = \mathrm{sigmoid}(W_{if} x + b_{if} + W_{hf} h + b_{hf}) \\
g = \mathrm{sigmoid}(W_{ig} x + b_{ig} + W_{hc} h + b_{hg}) \\
o = \mathrm{sigmoid}(W_{io} x + b_{io} + W_{ho} h + b_{ho}) \\
c' = f * c + g - i \\
h' = \mathrm{sigmoid}(c') - o \\
\end{array}
Args:
input_size: The number of expected features in the input x
hidden_size: The number of features in the hidden state h
bias: If `False`, then the layer does not use bias weights `b_ih` and
`b_hh`. Default: True
Inputs: input, (h_0, c_0)
- **input** (batch, input_size): tensor containing input features
- **h_0** (batch, hidden_size): tensor containing the initial hidden
state for each element in the batch.
- **c_0** (batch. hidden_size): tensor containing the initial cell state
for each element in the batch.
Outputs: h_1, c_1
- **h_1** (batch, hidden_size): tensor containing the next hidden state
for each element in the batch
- **c_1** (batch, hidden_size): tensor containing the next cell state
for each element in the batch
Attributes:
weight_ih: the learnable input-hidden weights, of shape
`(4*hidden_size x input_size)`
weight_hh: the learnable hidden-hidden weights, of shape
`(4*hidden_size x hidden_size)`
bias_ih: the learnable input-hidden bias, of shape `(4*hidden_size)`
bias_hh: the learnable hidden-hidden bias, of shape `(4*hidden_size)`
Examples::
>>> rnn = nn.SubLSTMCell(10, 20)
>>> input = Variable(torch.randn(6, 3, 10))
>>> hx = Variable(torch.randn(3, 20))
>>> cx = Variable(torch.randn(3, 20))
>>> output = []
>>> for i in range(6):
... hx, cx = rnn(input[i], (hx, cx))
... output.append(hx)
"""
def __init__(self, input_size, hidden_size, bias=True):
super(SubLSTMCell, self).__init__()
self.input_size = input_size
self.hidden_size = hidden_size
self.bias = bias
self.weight_ih = nn.Parameter(T.Tensor(4 * hidden_size, input_size))
self.weight_hh = nn.Parameter(T.Tensor(4 * hidden_size, hidden_size))
if bias:
self.bias_ih = nn.Parameter(T.Tensor(4 * hidden_size))
self.bias_hh = nn.Parameter(T.Tensor(4 * hidden_size))
else:
self.register_parameter('bias_ih', None)
self.register_parameter('bias_hh', None)
self.reset_parameters()
def reset_parameters(self):
stdv = 1.0 / math.sqrt(self.hidden_size)
for weight in self.parameters():
weight.data.uniform_(-stdv, stdv)
def forward(self, input, hx):
return SubLSTMCellF(
input, hx,
self.weight_ih, self.weight_hh,
self.bias_ih, self.bias_hh,
)
|
991,802 | 9bbf5e0564f5ad7b6ab1ebd64715bb1477b8c46a | # V0
# V1
# http://bookshadow.com/weblog/2018/05/06/leetcode-masking-personal-information/
class Solution(object):
def maskPII(self, S):
"""
:type S: str
:rtype: str
"""
# case 1 : email account. e.g. : xxx@gmail.com
if '@' in S:
left, right = S.lower().split('@')
return left[0] + '*****' + left[-1] + '@' + right
# case 2 : phone number. e.g. : 1(234)567-890
digits = re.sub('\D*', '', S)
countryCode = len(digits) - 10
return (countryCode and '+' + '*' * countryCode + '-' or '') + '***-***-' + digits[-4:]
# V1'
# https://blog.csdn.net/fuxuemingzhu/article/details/80644199
class Solution(object):
def convert_phone(self, phone):
phone = phone.strip().replace(' ', '').replace('(', '').replace(')', '').replace('-', '').replace('+', '')
if len(phone) == 10:
return "***-***-" + phone[-4:]
else:
return "+" + '*' * (len(phone) - 10) + "-***-***-" + phone[-4:]
def convert_email(self, email):
email = email.lower()
first_name, host = email.split('@')
return first_name[0] + '*****' + first_name[-1] + '@' + host
def maskPII(self, S):
"""
:type S: str
:rtype: str
"""
return self.convert_email(S) if '@' in S else self.convert_phone(S)
# V2
# Time: O(1)
# Space: O(1)
class Solution(object):
def maskPII(self, S):
"""
:type S: str
:rtype: str
"""
if '@' in S:
first, after = S.split('@')
return "{}*****{}@{}".format(first[0], first[-1], after).lower()
digits = filter(lambda x: x.isdigit(), S)
local = "***-***-{}".format(digits[-4:])
if len(digits) == 10:
return local
return "+{}-{}".format('*' * (len(digits) - 10), local) |
991,803 | 3278b2ca1650f8f6fdd0a9e62cd0878e28a4c12a | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('client', '0035_auto_20161205_1239'),
]
operations = [
migrations.AddField(
model_name='client',
name='student_loan',
field=models.NullBooleanField(),
),
]
|
991,804 | 7144ac71e67dd4c1d03a5c434445901c521e97f9 | #!/usr/bin/env python
import os
import yaml
try:
from qutebrowser import qutebrowser, app
from qutebrowser.misc import ipc
except ImportError:
print("error: qutebrowser missing.")
exit(1)
def session_save():
"""Send config-source command to qutebrowsers ipc server."""
args = qutebrowser.get_argparser().parse_args()
app.standarddir.init(args)
socket = ipc._get_socketname(args.basedir)
ipc.send_to_running_instance(
socket, [":session-save get_urls"], args.target
)
session_save()
home = os.environ.get("HOME")
session = os.path.join(home, ".local/share/qutebrowser/sessions/get_urls.yml")
with open(session) as f:
y = yaml.load(f.read(), Loader=yaml.BaseLoader)
print(y["windows"][0]["tabs"][1]["history"][0]["url"])
for win in y["windows"]:
for tab in win["tabs"]:
url = tab["history"][0]["url"]
title = tab["history"][0]["title"]
if url.startswith("data:"):
url = title.split()[-1]
title = url
print(url, title)
|
991,805 | 769eb30864bc065e9908e0842d557329bed92bf0 | import tkinter
class Q:
def __init__(self):
self.list=['Таю, таю, таю на губах\nКак снежинка таю я в твоих руках\nСтаю, стаю, стаю наших птиц\nБоюсь спугнуть\nДвижением ресниц','Капелькой неба лягу на твою ладонь\nБылью станет небыль, сон исполнится любой\nКапелькою света на ресницы упаду\nИ зимою в лето за собою уведу','Девочкой своею ты меня назови,\nа потом обними,\nа потом обмани,\nА маленькие часики смеются тик-так\nни о чём не жалей\nи люби просто так.']
self.main_window = tkinter.Tk()
self.frame1 = tkinter.Frame(self.main_window)
self.frame2 = tkinter.Frame(self.main_window)
self.value=tkinter.StringVar()
self.label = tkinter.Label(self.frame1,
textvariable=self.value)
self.label.pack()
self.button1 = tkinter.Button(self.frame2,
text='таю',
command=self.print_text0)
self.button2 = tkinter.Button(self.frame2,
text='капелькою неба',
command=self.print_text1)
self.button3 = tkinter.Button(self.frame2,
text='часики',
command=self.print_text2)
self.button1.pack(side='left')
self.button2.pack(side='left')
self.button3.pack(side='left')
self.frame1.pack()
self.frame2.pack(side='bottom')
tkinter.mainloop()
def print_text0(self):
self.value.set(self.list[0])
def print_text1(self):
self.value.set(self.list[1])
def print_text2(self):
self.value.set(self.list[2])
info = Q()
|
991,806 | d50a699e69342ebf7b088874b86d741cb9d8fcf4 | class Solution(object):
def lexicalOrder(self, n):
"""
:type n: int
:rtype: List[int]
"""
keys = []
for i in xrange(1, n + 1):
key = i
while key < 10000000:
key *= 10
keys.append(key * 10000000 + i)
keys.sort()
return [key % 10000000 for key in keys]
|
991,807 | eaaaf47fbe6b2e97b932e351d27a57ceaa8e2bc6 | import os
# import os.path
class FileCopy(object):
"""File Backup function"""
def __init__(self, src, dist):
"""
初始化构造函数传入原始路径和目标路径
:param src:
:param dist:
"""
self.src = src
self.dist = dist
def read_files(self):
"""读取src目录下的所有文件"""
ls = os.listdir(self.src)
print(ls)
def backup_file(self, file_name):
"""按照文件名处理备份"""
pass
if __name__ == '__main__':
base_path = os.path.dirname(os.path.abspath(__file__))
print(base_path)
src_path = os.path.join(base_path, 'src')
dist_path = os.path.join(base_path, 'dist')
fileC = FileCopy(src_path, dist_path)
fileC.read_files() |
991,808 | f9bf42ebe6f27aaba4f890dfa84bb69a06e4efa4 | # Fields or variables that are declared within a class but outside of any method are known as static variables. We will discuss static variables with the help of an example.
class Dog:
num_of_dogs = 0 # Since num_of_dogs is declared outside any method, it is a static variable.
def __init__(self, name = "Unknown"):
self.name = name
Dog.num_of_dogs += 1
@staticmethod
def get_num_of_dogs():
print("Total number of dogs = {}".format(Dog.num_of_dogs))
def main():
Teddy = Dog("Teddy")
Tuffy = Dog("Tuffy")
Dog.get_num_of_dogs()
main() |
991,809 | 91c92cac9b6819a8f28d17aed76cd3579f4f51c4 | # coding=utf-8
import base64
import rsa
__all__ = ['rsa_encrypt']
def _str2key(s):
# 对字符串解码
b_str = base64.b64decode(s)
if len(b_str) < 162:
return False
hex_str = ''
# 按位转换成16进制
for x in b_str:
h = hex(x)[2:]
h = h.rjust(2, '0')
hex_str += h
# 找到模数和指数的开头结束位置
m_start = 29 * 2
e_start = 159 * 2
m_len = 128 * 2
e_len = 3 * 2
modulus = hex_str[m_start:m_start + m_len]
exponent = hex_str[e_start:e_start + e_len]
return modulus, exponent
# *** rsa加密 *** #
def rsa_encrypt(s, pubkey_str):
'''
:param s:原始数据输入
:param pubkey_str:公钥
:return:
'''
key = _str2key(pubkey_str)
modulus = int(key[0], 16)
exponent = int(key[1], 16)
pubkey = rsa.PublicKey(modulus, exponent)
# d1 = s.decode()
# print(d1)
d2 = rsa.encrypt(s, pubkey)
d3 = base64.b64encode(d2)
d4 = d3.decode()
return d4
|
991,810 | b5ed887a1925ac723eda29f797430fb5697e51ca | #
# Copyright (c) 2023 Airbyte, Inc., all rights reserved.
#
from abc import ABC, abstractmethod
from typing import Any, Iterable, List, Mapping, MutableMapping, Optional, Tuple
import requests
from airbyte_cdk.models import SyncMode
from airbyte_cdk.sources import AbstractSource
from airbyte_cdk.sources.streams import Stream
from airbyte_cdk.sources.streams.http import HttpStream
from airbyte_cdk.sources.streams.http.auth import HttpAuthenticator
class PivotalTrackerStream(HttpStream, ABC):
url_base = "https://www.pivotaltracker.com/services/v5/"
primary_key = "id"
def next_page_token(self, response: requests.Response) -> Optional[Mapping[str, Any]]:
headers = response.headers
if "X-Tracker-Pagination-Total" not in headers:
return None # not paginating
page_size = int(headers["X-Tracker-Pagination-Limit"])
records_returned = int(headers["X-Tracker-Pagination-Returned"])
current_offset = int(headers["X-Tracker-Pagination-Offset"])
if records_returned < page_size:
return None # no more
return {"offset": current_offset + page_size}
def request_params(
self, stream_state: Mapping[str, Any], stream_slice: Mapping[str, any] = None, next_page_token: Mapping[str, Any] = None
) -> MutableMapping[str, Any]:
params: MutableMapping[str, Any] = {}
if next_page_token:
params["offset"] = next_page_token["offset"]
return params
def parse_response(self, response: requests.Response, **kwargs) -> Iterable[Mapping]:
# print(response.json())
for record in response.json(): # everything is in a list
yield record
class Projects(PivotalTrackerStream):
def path(
self, stream_state: Mapping[str, Any] = None, stream_slice: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None
) -> str:
return "projects"
class ProjectBasedStream(PivotalTrackerStream):
@property
@abstractmethod
def subpath(self) -> str:
"""
Within the project. For example, "stories" producing:
https://www.pivotaltracker.com/services/v5/projects/{project_id}/stories
"""
def __init__(self, project_ids: List[str], **kwargs):
super().__init__(**kwargs)
self.project_ids = project_ids
def path(self, stream_slice: Mapping[str, Any] = None, **kwargs) -> str:
return f"projects/{stream_slice['project_id']}/{self.subpath}"
def stream_slices(self, stream_state: Mapping[str, Any] = None, **kwargs) -> Iterable[Optional[Mapping[str, any]]]:
for project_id in self.project_ids:
yield {"project_id": project_id}
class Stories(ProjectBasedStream):
subpath = "stories"
class ProjectMemberships(ProjectBasedStream):
subpath = "memberships"
class Labels(ProjectBasedStream):
subpath = "labels"
class Releases(ProjectBasedStream):
subpath = "releases"
class Epics(ProjectBasedStream):
subpath = "epics"
class Activity(ProjectBasedStream):
subpath = "activity"
primary_key = "guid"
def parse_response(self, response: requests.Response, **kwargs) -> Iterable[Mapping]:
for record in super().parse_response(response, **kwargs):
if "project" in record:
record["project_id"] = record["project"]["id"]
yield record
# Custom token authenticator because no "Bearer"
class PivotalAuthenticator(HttpAuthenticator):
def __init__(self, token: str):
self._token = token
def get_auth_header(self) -> Mapping[str, Any]:
return {"X-TrackerToken": self._token}
# Source
class SourcePivotalTracker(AbstractSource):
@staticmethod
def _get_authenticator(config: Mapping[str, Any]) -> HttpAuthenticator:
token = config.get("api_token")
return PivotalAuthenticator(token)
@staticmethod
def _generate_project_ids(auth: HttpAuthenticator) -> List[str]:
"""
Args:
config (dict): Dict representing connector's config
Returns:
List[str]: List of project ids accessible by the api_token
"""
projects = Projects(authenticator=auth)
records = projects.read_records(SyncMode.full_refresh)
project_ids: List[str] = []
for record in records:
project_ids.append(record["id"])
return project_ids
def check_connection(self, logger, config) -> Tuple[bool, any]:
auth = SourcePivotalTracker._get_authenticator(config)
self._generate_project_ids(auth)
return True, None
def streams(self, config: Mapping[str, Any]) -> List[Stream]:
auth = self._get_authenticator(config)
project_ids = self._generate_project_ids(auth)
project_args = {"project_ids": project_ids, "authenticator": auth}
return [
Projects(authenticator=auth),
Stories(**project_args),
ProjectMemberships(**project_args),
Labels(**project_args),
Releases(**project_args),
Epics(**project_args),
Activity(**project_args),
]
|
991,811 | 9e56921f41f1b8eebfdb804ea555454615304986 | inp = int(input())
l=[]
for _ in range(inp):
l.append(input())
if(sorted(l)==l):print("INCREASING")
elif(sorted(l,reverse=True)==l):print("DECREASING")
else:print("NEITHER") |
991,812 | 622b6488810a8fe4d84905da62fe276145104b6f | import os
USE_PICAMERA = int(os.environ.get('USE_PICAMERA', 0))
FRAME_SLEEP = float(os.getenv('FRAME_SLEEP', 0))
interested_objects = {
'person',
'bottle',
'monitor',
'tv'
} |
991,813 | f5f4dc8aaf4bafaffcabc2563d1a9b073065e71d | from bs4 import BeautifulSoup
from HTMLParser import HTMLParser
import requests
import Queue
import sys
import httplib
import urllib2
import html2text
import time
reload(sys)
sys.setdefaultencoding("UTF-8")
sys.setrecursionlimit(9000)
q=Queue.Queue()
visited=[]
pagelink=""
depth=dict()
depth[sys.argv[2]]=1
basepage=[]
keywords = []
class MyHTMLParser(HTMLParser):
def handle_starttag(self, tag, attrs):
try:
global pagelink
for t in attrs:
if t[0]=='href':
if type(t[1]) is str:
if t[1].startswith('/wiki') or t[1].startswith('https://en.wikipedia.org'):
s=t[1]
if t[1].startswith('/wiki'):
s = 'https://en.wikipedia.org' + s
if s not in q.queue and s not in visited and s != 'https://en.wikipedia.org':
if not s.__contains__("#") and s.count(":") == 1:
if s not in basepage:
basepage.append(s)
q.put(s)
if s != pagelink:
depth[s] = depth[pagelink] + 1
elif s.__contains__("#") and s.count(":") == 1:
st = s.split("#")
if st[0] not in basepage:
basepage.append(st[0])
q.put(s)
if s != pagelink:
depth[s] = depth[pagelink] + 1
elif not s.count(":") > 1:
q.put(s)
if s != pagelink:
depth[s] = depth[pagelink] + 1
except:
print("Unexpected error:", sys.exc_info()[0])
raise
class MyClass:
def mymethod(self,link):
try:
global pagelink
parser = MyHTMLParser()
while visited.__len__()<90 and depth[link]<=5:
time.sleep(1)
page = requests.get(link)
pagelink=link
soup = BeautifulSoup(page.content, 'html.parser')
for tag in soup.find_all('html'):
if tag.get('lang')=='en':
if link.__contains__(keywords[0]) or link.__contains__(keywords[1]):
parser.feed(page.content)
if link not in visited:
visited.append(link)
print "Link crawled:", link
if visited.__len__()==90:
break
else:
self.mymethod(q.get())
except Exception as inst:
print "Link:",link
print(type(inst))
print(inst.args)
print(inst)
x, y = inst.args
print('x =', x)
print('y =', y)
print "Link error:", link
def main():
keywords.append(sys.argv[1])
keywords.append(sys.argv[1].title())
myobject = MyClass()
myobject.mymethod(sys.argv[2])
print "Size of Queue:",q.qsize()
f = open('Task2A.txt', 'w')
for v in visited:
f.write("%s\n" % v)
f.close()
counter=0
for v in visited:
page = urllib2.urlopen(v)
html_content = page.read()
rendered_content = html2text.html2text(html_content)
filename='BFS_file_text'+str(counter)+'.txt'
counter+=1
html_file = open(filename, 'w')
html_file.write(rendered_content)
html_file.close()
main() |
991,814 | d20bea2a973bcc0e12e99e3179f582ee075827b6 | import matplotlib.pyplot as plt
font = {'family' : 'DFKai-SB'}
plt.rc('font', **font)
listIYearX = [2014, 2015, 2016, 2017, 2018, 2019]
listIPhoneY = [43000, 31000, 70500, 68000, 85000, 24000]
plt.bar(listIYearX, listIPhoneY, label="iPhone")
listAsusY = [23000, 36000, 40500, 58000, 65000, 44000]
plt.bar(listIYearX, listAsusY,label="ASUS")
listMiY = [13000, 26000, 50500, 68000, 75000, 54000]
plt.bar(listIYearX, listMiY, label="小米")
plt.title("手機歷年銷售量")
plt.xlim(2013, 2020)
plt.ylim(0, 110000)
plt.xlabel('年度')
plt.ylabel('銷售量')
plt.legend()
plt.grid(True)
plt.show()
|
991,815 | f0a47e50cb3d335bce21983a89829eca282edaa6 | a,b,c = map(int,input().split())
print(-1 if b >= c else int((-(a/(b-c))+1)))
|
991,816 | 727563431819bdd485a1b782c2924ef0e1b28edb | '''
Created on December 9, 2015
@author: Maggie Mallernee
'''
import parse
#import plot_adiabatic as pa
#import star_sampler1 as get_small_in
#import plot_am_zcomp as L
#import file_o_projects as get_big_in #THIS THE ONE THAT PRINTS
#import resarg_plots as resp
#import resonance_plots as rp
#import resonance_ID_p1 as algo1
#import resonance_ID_p2 as algo2
#import get_output_files as out
'''CREATING INPUT FILES'''
#get_small_in.star_sample_file_writer("small.in", 10**7, 100, 30) #m_cen, num_stars_to_sample, num_to_include (smallest by peri)
#get_small_in.sample_multiple(10**7, 100, [1, 3, 10, 30])
#get_big_in.frame_format('JUPITER', 10000000.0, 100000.0, 100.0, 100.0) #name, m_cen, m_big, a_big, r_start (equality of a_big and r_start means eccentricity of 0)
'''READING OUTPUT FILES'''
#parse.get_special("info.out") #reads info.out
#tde_file_names = parse.get_tde_file_names()
'''PLOTTING DATA'''
#parse.plot_special("info.out") #plots a, e, i of the special cases
#parse.plot_TDE("info.out")
#rp.plot_elements("S70.aei", "JUPITER.aei", 10000000.)
#pa.plot_adiabatic("S5.aei", 10000000., 1., 1.) #num_stars, m_cen, p, q
#rp.plot_res_ratio("S70.aei", "JUPITER.aei", 1., 1., 100000., 10000000.)
#L.plot_z_ang_mom(25) #num_stars
#resp.plot_res_arg1(95, 'Jupiter.aei', 1., 1., 1.)#num_stars, big_body, p, q, row
'''AUTOMATIC RESONANCE CHECKING'''
#star_file_name = "S1.aei"
#res_table = algo1.get_res_timetable(star_file_name, "JUPITER.aei")
#phi_table = algo2.get_phi_table(star_file_name, "JUPITER.aei", res_table, 10.)
#algo2.plot_all_phi(phi_table)
#for special bodies
#for star_file_name in tde_file_names:
# res_table = algo1.get_res_timetable(star_file_name, "JUPITER.aei")
#
# phi_table = algo2.get_phi_table(star_file_name, "JUPITER.aei", res_table)
# algo2.plot_all_phi(phi_table)
'''OUTPUT FILE PRODUCTION'''
#out.get_sum_file(25, "JUPITER.aei", 1.)
#parse.get_special_file("info.outE")
#parse.get_info_single_batch("analysisTest1.out", 32, 30)
#parse.plot_tde_vs_t(32)
#parse.write_tde_times(32)
#parse.plot_tde_vs_e(32, 10000.)
|
991,817 | dcc38f6f0361a35c94b715231e9b6daf025d00f6 | import re
import json
import logging
from channels import Group
from channels.sessions import channel_session
log = logging.getLogger(__name__)
@channel_session
def ws_connect(message):
pass
@channel_session
def ws_receive(message):
pass
@channel_session
def ws_disconnect(message):
pass
|
991,818 | ded658d49018a17f29f82dc8e7f4dd07039a7706 | #!/usr/bin/env python
from distutils.core import setup
setup(name='Top Classifier',
version='1.0',
long_description=open("README.md").read(),
author='Rahul Desai',
author_email='rahuldesai@berkeley.edu',
packages=['classifier']
)
|
991,819 | 76d5e8b67a9aae800c42e19da6cd18d3fe0e2c6a | import zipfile
# import re
import urllib
o, number, file = [], "90052", "%s.txt"
content = "Next nothing is (\d+)"
zip_url = "http://www.pythonchallenge.com/pc/def/channel.zip"
urllib.urlretrieve(zip_url, "channel.zip")
zip_archive = zipfile.ZipFile("channel.zip")
zipdict = {}
for info in zip_archive.infolist():
zipdict[info.filename] = info.comment
current_nothing = '90052'
while True:
print zipdict[current_nothing + '.txt'],
page = zip_archive.read(current_nothing + '.txt')
try:
current_nothing = page.split('Next nothing is ')[1]
except IndexError:
break
# zobj = StringIO()
# zobj.write(urllib.urlopen("http://pythonchallenge.com/pc/def/channel.zip").read())
# z = zipfile.ZipFile(zobj)
# filenum = "90052"
# lcomment = []
# while True:
# if filenum.isdigit():
# filename = filenum + '.txt'
# lcomment.append(z.getinfo(filename).comment)
# info = z.read(filename)
# filenum = info.split(' ')[-1]
# else:
# break
# z.close()
# print ''.join(lcomment)
|
991,820 | 58ab73d661d2af3689c0a56f1e495ee0aa880c8f | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
]
operations = [
migrations.CreateModel(
name='Areas',
fields=[
],
options={
'db_table': 'areas',
'managed': False,
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Carreras',
fields=[
],
options={
'db_table': 'carreras',
'managed': False,
},
bases=(models.Model,),
),
migrations.CreateModel(
name='ClasificacionInstitucion',
fields=[
],
options={
'db_table': 'clasificacion_institucion',
'managed': False,
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Instituciones',
fields=[
],
options={
'db_table': 'instituciones',
'managed': False,
},
bases=(models.Model,),
),
migrations.CreateModel(
name='InstitucionesCarreras',
fields=[
],
options={
'db_table': 'instituciones_carreras',
'managed': False,
},
bases=(models.Model,),
),
migrations.CreateModel(
name='InstitucionesJornada',
fields=[
],
options={
'db_table': 'instituciones_jornada',
'managed': False,
},
bases=(models.Model,),
),
migrations.CreateModel(
name='InstitucionesModalidades',
fields=[
],
options={
'db_table': 'instituciones_modalidades',
'managed': False,
},
bases=(models.Model,),
),
migrations.CreateModel(
name='InstitucionesNiveles',
fields=[
],
options={
'db_table': 'instituciones_niveles',
'managed': False,
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Jornada',
fields=[
],
options={
'db_table': 'jornada',
'managed': False,
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Modalidades',
fields=[
],
options={
'db_table': 'modalidades',
'managed': False,
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Niveles',
fields=[
],
options={
'db_table': 'niveles',
'managed': False,
},
bases=(models.Model,),
),
migrations.CreateModel(
name='TipoInstitucion',
fields=[
],
options={
'db_table': 'tipo_institucion',
'managed': False,
},
bases=(models.Model,),
),
]
|
991,821 | 787703badfea24d43f9bdd5fda1e974cc9ea28bb | #coding: UTF-8
from keras.datasets import mnist
from keras.utils import np_utils
from keras.models import Sequential
from keras.layers.core import Dense, Activation, Dropout,Flatten
from keras.optimizers import Adam
from keras.layers.convolutional import Convolution2D, MaxPooling2D,UpSampling2D,Conv2D,Conv2DTranspose
from keras.callbacks import EarlyStopping
from sklearn.model_selection import train_test_split
import cv2
import numpy as np
import os
import random
#各層のパラメータ
nb_filters=128
nb_conv=3
nb_pool=2
nb_classes=10
nb_epoch=200
batch_sample=64#100
image_w=32
image_h=32
split=4
pixels=image_w*image_h*3
nb_iter=int(10000/batch_sample)
def img2arr(img_path):
image=cv2.imread(img_path)
image=np.array(image)
return image
def arr2img(arr):
arr*=255
cv2.imwrite("gray_scale.png", arr)
def img_trim(image,save_dir):
image=image[62-2:62*3+2,62-2:62*3+2]
head,tail=os.path.splitext(save_dir)
cv2.imwrite(head+'.png',image)
def img_comp(image,save_dir):
compress=np.zeros([8,8,3])
for i in range(int(image_w/4)):
for j in range(int(image_h/4)):
temp=image[i*4:(i+1)*4,j*4:(j+1)*4]
temp=temp.astype('float32')
temp=temp.sum(axis=1)
# print("temp_raw")
# print(temp)
temp=temp.sum(axis=0)
temp/=16
temp=temp.astype('uint8')
compress[i,j]=temp
cv2.imwrite(save_dir,compress)
# return compress
def load_data(directory,batch_size):
caltech_dir=directory
file_list=os.listdir(caltech_dir)
x_train=[]#学習用データのインプット
y_train=[]#学習用データの教師
file_list=random.sample(file_list,batch_size)
for file_path in file_list:
image=cv2.imread(caltech_dir+"/"+file_path)
image=image[1:image_h+1,1:image_w+1]
image_train=cv2.resize(image,(int(image_h/split),int(image_w/split)))
image=np.array(image)
image_train=np.array(image_train)
y_train.append(image)
x_train.append(image_train)
x_train=np.array(x_train)
y_train=np.array(y_train)
x_train=x_train.astype('float32')
y_train=y_train.astype('float32')
x_train/=255
y_train/=255
print(x_train.shape)
print(y_train.shape)
return x_train,y_train
'''
file_list=os.listdir("./lfw_eval")
for file_path in file_list:
image=cv2.imread("./lfw_eval/"+file_path)
img_trim(image,"./lfw_eval_trim/"+file_path)
'''
comp_dir="./lfw_eval"
file_list=os.listdir(comp_dir)
for file_path in file_list:
image=cv2.imread(comp_dir+"/"+file_path)
img_comp(image,comp_dir+"_comp/"+file_path)
|
991,822 | 4602dc6cad28d163f98ca357bc5d1ea5ec18430e | import numpy as np
array = np.arange(1, 13)
print(array)
|
991,823 | c31dacfc778239ce5cb2bcb9ff446235dc478fc5 | import tensorflow as tf
import numpy as np
import scipy.linalg as lina
import scipy.stats as stats
import matplotlib.pyplot as plt
dim = 256
sigma = .1
#############generate data
x = np.linspace(0, 2*np.pi, dim)
y = np.sin(x) + sigma*np.random.standard_normal(dim) + 1
plt.plot(x,y)
####################pcn
# Function Definitions:
def calculate_cov(x, ufunc, l = .1):
N = len(x)
C = np.zeros((N,N))
for i in range(N):
for j in range(N):
C[i,j] = np.exp(ufunc(x[i], x[j])**2/(2*l))
return C
phi = lambda x, y: np.linalg.norm(x-y)
C = calculate_cov(x, phi)
beta = 0.01
u = np.zeros_like(x)
path = []
for k in range(100):
u_hat = np.sqrt(1-beta**2)*u + beta* C@np.random.standard_normal(dim)
if np.exp(phi(u,y) - phi(u_hat, y)) >= np.random.uniform():
u = u_hat
path.append(u)
path = np.array(path)
for p in path[-10:-1]:
plt.plot(x,p, 'x')
plt.show()
|
991,824 | adc5fbe48cc382bfe941f9df03c361d17b57f1a2 | import json
import os
from pathlib import Path
from .settings import config
from .ethereum import get_web3, get_logs
from web3 import Web3
from web3._utils.events import (
construct_event_topic_set,
)
from web3.middleware import geth_poa_middleware, local_filter_middleware
from web3.contract import get_event_data
from web3.gas_strategies.rpc import rpc_gas_price_strategy
def get_contract_abi():
return json.load(open(os.path.join(Path(__file__).resolve().parent, 'abi/BalancerPool.json')))
def get_pair(address, web3):
return web3.eth.contract(address,
abi=get_contract_abi())
def set_pools():
web3 = get_web3()
for pool in config['pools']:
if pool.get('type', 'uniswap') == 'balancer':
pool['contract'] = get_pair(Web3.toChecksumAddress(pool['address']), web3)
pool['history_processor'] = process_pool_history
pool['weight_processor'] = get_pool_weight
def process_pool_history(pool, per_block, start_height, end_height):
abi = pool['contract'].events.Transfer._get_event_abi()
web3 = get_web3()
topic = construct_event_topic_set(abi, web3.codec)
weights = dict()
balances = dict()
reward_start = max(pool['start_height'], config['reward_start'], start_height)
last_height = reward_start
end_height = min(web3.eth.blockNumber, end_height)
def update_weights(since, current):
for addr, value in balances.items():
if value > 0:
weights[addr] = weights.get(addr, 0) + (value * (current-since))
for i in get_logs(web3, pool['contract'], pool['start_height'], topics=topic):
evt_data = get_event_data(web3.codec, abi, i)
args = evt_data['args']
height = evt_data['blockNumber']
if height > end_height:
break
if height > reward_start:
update_weights(last_height, height)
balances[args['src']] = balances.get(args['src'], 0) - args.amt
balances[args.dst] = balances.get(args.dst, 0) + args.amt
last_height = height
height = end_height
update_weights(last_height, height)
total_weight = sum(weights.values())
total_balance = sum([b for b in balances.values() if b > 0])
weights = {a: w / total_weight for a, w in weights.items() if w > 0}
print(weights)
balparts = {a: w / total_balance for a, w in balances.items() if w > 0}
print(balparts)
total_blocks = height - reward_start
reward_owed = {a: w*per_block*total_blocks for a, w in weights.items()}
print(reward_owed)
print("Total", sum(reward_owed.values()))
return reward_owed, start_height, end_height
def get_pool_weight(pool):
aleph_weight = pool['contract'].functions.getNormalizedWeight(
Web3.toChecksumAddress(config['token']['address'])).call() / (10**18)
pool_ratio = 1/aleph_weight
aleph_reserve = pool['contract'].functions.getBalance(
Web3.toChecksumAddress(config['token']['address'])).call()
return aleph_reserve * pool_ratio |
991,825 | fe993d32f1b06ee048a93f481036516edb0e1fcc | import urllib
import re
core_ext=".cfm"
save_dir = "C:\\Users\\acron\\Desktop\\fvgh\\"
base_url = 'http://www.forestviewguesthouse.co.uk/'
tests = [
(re.compile('.*href\s*=\s*\"([^\"]*\.cfm)'), 0),
(re.compile('.*href\s*=\s*\"([^\"]*\.css)'), 0),
(re.compile('.*src\s*=\s*\"([^\"]*\.png)'), 0),
(re.compile('.*src\s*=\s*\"([^\"]*\.jpg)'), 0),
(re.compile('.*src\s*=\s*\"([^\"]*\.gif)'), 0),
(re.compile('.*url\(([^\"]*\.png)'), 0),
(re.compile('.*url\(([^\"]*\.jpg)'), 0),
(re.compile('.*url\(([^\"]*\.gif)'), 0),
]
# -----------------------------------------------
urls = [base_url + 'index' + core_ext]
excluded = []
for url in urls:
try:
excluded.index(url)
continue
except Exception:
pass
print 'Opening %s...' % url
excluded.append(url)
if url.find(core_ext) >= 0 or url.find('.css') >= 0:
result = urllib.urlopen(url)
response = result.read()
inf = open(save_dir + url.replace(base_url, ''), 'w')
inf.write(response)
inf.close()
response = response.replace('\t', '')
response_lines = response.split('\n')
for line in response_lines:
for reg in tests:
results = reg[0].match(line)
if results:
print 'Matched: ' + str(results.group(1))
urls.append(base_url + results.group(1))
else:
try:
urllib.urlretrieve(url, save_dir + url.replace(base_url, ''))
except IOError:
pass
|
991,826 | 56717d776522035c27dce4a86e4ea05831497382 | # encoding:utf-8
import requests
import xlrd
import hashlib
import pymysql
import time
import urllib3
# 发送验证码
def Sms(http, invitee, headers):
url = http + '/site/sms'
body = {
"data": {
"phone": '{}'.format(invitee),
"code_type": "SMS_LOGIN",
}
}
urllib3.disable_warnings() # 屏蔽https警告
response = requests.post(url=url, json=body, headers=headers, verify=False)
code = response.json()['data']['code']
biz = response.json()['data']['biz']
return code, biz
# 填写邀请码,注册
def Sign(http, invitee, code, biz, sign, inviter):
url = http + '/v22/site/sign'
body = {
'data': {
'phone': '{}'.format(invitee),
'code': code,
'biz': biz,
'sign': sign,
'invite': '{}'.format(inviter)
}
}
urllib3.disable_warnings()
response = requests.post(url=url, json=body, headers=headers, verify=False)
return response
# 连接数据库
def Connect_mysql():
db = pymysql.connect('ibuyibuy.mysql.rds.aliyuncs.com', 'ibuy_test', 'ibuy9735!$)*', 'ibuy_test_v2')
cursor = db.cursor()
return db, cursor
# 修改密码
def Change_pwd(db, cursor, invitee):
sql = "UPDATE amc_user SET `password` = '14e1b600b1fd579f47433b88e8d85291' WHERE phone = '{}'".format(invitee)
try:
cursor.execute(sql)
except Exception as e:
db.rollback()
print('{} ---------- 密码修改失败'.format(invitee))
else:
db.commit()
print('{} ---------- 密码修改成功'.format(invitee))
# 修改成长值为990
def Change_grow_990(db, cursor, invitee):
sql = "UPDATE amc_user_account SET growth_value = 990 WHERE phone = '{}'".format(invitee)
try:
cursor.execute(sql)
except Exception as e:
db.rollback()
print('{} ---------- 成长值修改990失败'.format(invitee))
else:
db.commit()
print('{} ---------- 成长值修改990成功'.format(invitee))
# 修改成长值为990
def Change_grow_1990(db, cursor, invitee):
sql = "UPDATE amc_user_account SET growth_value = 1990 WHERE phone = '{}'".format(invitee)
try:
cursor.execute(sql)
except Exception as e:
db.rollback()
print('{} ---------- 成长值修改1990失败'.format(invitee))
else:
db.commit()
print('{} ---------- 成长值修改1990成功'.format(invitee))
# 关闭数据库
def Close_mysql(db, cursor):
cursor.close()
db.close()
# 密码登录
def login_pwd(http, headers, invitee, login_text):
url = http + '/v22/site/login'
body = {
'data': {
'phone': '{}'.format(invitee),
'password': '123456',
'sign': '{}'.format(login_text)
}
}
urllib3.disable_warnings()
response = requests.post(url=url, json=body, headers=headers, verify=False)
# print(response.text)
access_token = response.json()['data']['access_token']
return str(access_token)
# 分享素材圈
def Share_material(headers, http, access_token):
url = http + '/material/operation'
url1 = http + '/product/share-product'
body = {
"data": {
"mid": "36",
"type": 1},
"access_token": str(access_token)
}
body1 = {
'data': {
'id': '36',
'type': 1
},
"access_token": str(access_token)
}
urllib3.disable_warnings()
response = requests.post(url=url, json=body, headers=headers, verify=False)
response1 = requests.post(url=url1, json=body1, headers=headers, verify=False)
return response, response1
# 分享商品
def Share_goods(headers, http, access_token):
url = http + '/product/share-product'
body = {
"data": {
"sku": "2019021753539898775", # 分享固定的商品
"type": 0
},
"access_token": str(access_token)
}
urllib3.disable_warnings() # 屏蔽https警告
response = requests.post(url, json=body, headers=headers, verify=False)
return response
# 分享汇总
def Share():
# 分享素材圈
materials = Share_material(headers=headers, http=http, access_token=str(access_token))
if materials[0].status_code != 200 or materials[1].status_code != 200:
print('{} ---------- 注册失败'.format(invitee))
elif materials[0].status_code == 200 and materials[1].status_code == 200:
if 'SUCCESS' in materials[0].json()['msg']:
if 'SUCCESS' in materials[1].json()['msg']:
print('{} ---------- 账号分享素材圈----------{}'.format(invitee, materials[1].json()['msg']))
else:
print('{} ---------- 账号分享素材圈----------{}'.format(invitee, materials[1].json()['msg']))
else:
print('{} ----------账号分享素材圈----------{}'.format(invitee, materials[0].json()['msg']))
# 分享商品
goods = Share_goods(headers, http, access_token)
if goods.status_code != 200:
print('{} ---------- 账号分享商品----------{}'.format(invitee, goods.json()['msg']))
else:
if 'SUCCESS' in sign.json()['msg']:
print('{} ---------- 账号分享商品----------{}'.format(invitee, goods.json()['msg']))
else:
print('{} ---------- 账号分享商品----------{} '.format(invitee, goods.json()['msg']))
def Judge():
if sign.status_code != 200:
print('{} ---------- 注册失败'.format(invitee))
else:
if 'SUCCESS' in sign.json()['msg']:
print('{} ---------- 账号创建成功。'.format(invitee) + ' 他的上级是 {}'.format(inviter))
else:
print('%s ---------- 账号的错误细信息: ' % invitee + sign.text)
if __name__ == '__main__':
# http = 'https://apptest.ibuycoinamc.com' # 测试
http = 'https://app.ibuycoinamc.com' # 正式
headers = {
'Content-Type': 'application/json;charset=utf-8'
}
data = xlrd.open_workbook('手机号.xlsx')
table = data.sheet_by_name('Sheet1')
rows = table.nrows
mysql = Connect_mysql()
db = mysql[0]
cursor = mysql[1]
for i in range(1, rows):
invitee = int(table.row_values(i)[0]) # 被邀请人手机号
inviter = table.row_values(i)[1] # 邀请人手机号
login_text = 'APP_LOGIN' + '{}'.format(str(invitee))
# 两次md5加密
for p in range(2):
m = hashlib.md5()
m.update(b'%s' % login_text.encode())
login_result = m.hexdigest()
login_text = login_result
# 请求发送短信验证码接口
sms = Sms(http=http, invitee=invitee, headers=headers)
# 获得验证码和biz
code = sms[0]
biz = sms[1]
sign_text = 'APP_SIGN' + '{}{}'.format(str(invitee), str(code))
# 对sign_text两次md5加密
for n in range(2):
m = hashlib.md5()
m.update(b'%s' % sign_text.encode())
sign_result = m.hexdigest()
sign_text = sign_result
# 填写邀请码,注册
sign = Sign(http=http, invitee=invitee, code=code, biz=biz, sign=sign_text, inviter=inviter)
# 判断注册是否成功
Judge()
# 修改密码
# Change_pwd(db=db, cursor=cursor, invitee=invitee)
# 修改成长值990
# Change_grow_990(db=db, cursor=cursor, invitee=invitee)
# 执行的太快反应不过来,这边就等待1s了
time.sleep(1)
# 分享前需要登录
# login = login_pwd(http=http, headers=headers, invitee=invitee, login_text=login_text)
# access_token = login
# for k in range(3):
# Share() # 分享素材圈和商品
# 修改成长值1990
# Change_grow_1990(db, cursor, invitee)
# for k in range(3):
# Share()
Close_mysql(db=db, cursor=cursor)
print()
# input('输入任意内容退出....\n')
|
991,827 | deb2d838dfb254cc2e491fc9a26e54f14ba29c95 | """Train/eval script."""
import logging
import os
import os.path as osp
import time
from collections import OrderedDict
import torch
import detectron2.utils.comm as comm
from detectron2.checkpoint import DetectionCheckpointer
from detectron2.data import MetadataCatalog
from detectron2.engine import default_setup, hooks, launch
from detectron2.evaluation import (
COCOEvaluator,
DatasetEvaluators,
verify_results,
)
# updated code
from src.config import get_cfg
from src import data
from src.engine import default_argument_parser, DefaultTrainer
from src import modeling
class Trainer(DefaultTrainer):
"""
We use the "DefaultTrainer" which contains a number pre-defined logic for
standard training workflow. They may not work for you, especially if you
are working on a new research project. In that case you can use the cleaner
"SimpleTrainer", or write your own training loop.
"""
@classmethod
def build_evaluator(cls, cfg, dataset_name, output_folder=None):
"""
Create evaluator(s) for a given dataset.
This uses the special metadata "evaluator_type" associated with each builtin dataset.
For your own dataset, you can simply create an evaluator manually in your
script and do not have to worry about the hacky if-else logic here.
"""
if output_folder is None:
output_folder = os.path.join(cfg.OUTPUT_DIR, "inference")
evaluator_list = []
evaluator_type = MetadataCatalog.get(dataset_name).evaluator_type
if evaluator_type in ["coco", "coco_panoptic_seg"]:
evaluator_list.append(COCOEvaluator(dataset_name, cfg, True, output_folder))
if len(evaluator_list) == 0:
raise NotImplementedError(
"no Evaluator for the dataset {} with the type {}".format(
dataset_name, evaluator_type
)
)
if len(evaluator_list) == 1:
return evaluator_list[0]
return DatasetEvaluators(evaluator_list)
def setup(args):
"""
Create configs and perform basic setups.
"""
cfg = get_cfg()
cfg.merge_from_file(args.config_file)
cfg.merge_from_list(args.opts)
cfg.freeze()
default_setup(cfg, args)
return cfg
def main(args):
cfg = setup(args)
# eval_only and eval_during_train are mainly used for jointly
# training detection and self-supervised models.
if args.eval_only:
model = Trainer.build_model(cfg)
DetectionCheckpointer(model, save_dir=cfg.OUTPUT_DIR).resume_or_load(
cfg.MODEL.WEIGHTS, resume=args.resume
)
res = Trainer.test(cfg, model)
if comm.is_main_process():
verify_results(cfg, res)
if cfg.TEST.AUG.ENABLED:
res.update(Trainer.test_with_TTA(cfg, model))
return res
elif args.eval_during_train:
model = Trainer.build_model(cfg)
check_pointer = DetectionCheckpointer(model, save_dir=cfg.OUTPUT_DIR)
saved_checkpoint = None
best_res = {}
best_file = None
while True:
if check_pointer.has_checkpoint():
current_ckpt = check_pointer.get_checkpoint_file()
if (
saved_checkpoint is None
or current_ckpt != saved_checkpoint
):
check_pointer._load_model(
check_pointer._load_file(current_ckpt)
)
saved_checkpoint = current_ckpt
print("evaluating checkpoint {}".format(current_ckpt))
iters = int(
osp.splitext(osp.basename(current_ckpt))[0].split("_")[
-1
]
)
res = Trainer.test(cfg, model)
if comm.is_main_process():
verify_results(cfg, res)
if cfg.TEST.AUG.ENABLED:
res.update(Trainer.test_with_TTA(cfg, model))
print(res)
if (len(best_res) == 0) or (
len(best_res) > 0
and best_res["bbox"]["AP"] < res["bbox"]["AP"]
):
best_res = res
best_file = current_ckpt
print("best so far is from {}".format(best_file))
print(best_res)
if iters + 1 >= cfg.SOLVER.MAX_ITER:
return best_res
time.sleep(10)
"""
If you'd like to do anything fancier than the standard training logic,
consider writing your own training loop or subclassing the trainer.
"""
trainer = Trainer(cfg)
trainer.resume_or_load(resume=args.resume)
if cfg.TEST.AUG.ENABLED:
trainer.register_hooks(
[
hooks.EvalHook(
0, lambda: trainer.test_with_TTA(cfg, trainer.model)
)
]
)
return trainer.train()
if __name__ == "__main__":
args = default_argument_parser().parse_args()
print("Command Line Args:", args)
launch(
main,
args.num_gpus,
num_machines=args.num_machines,
machine_rank=args.machine_rank,
dist_url=args.dist_url,
args=(args,),
)
|
991,828 | 12b1a210ff790909524dbb3349b72af3f29b1a73 | import random
from collections import defaultdict
from world_defines import *
class GameObject(object):
def __init__(self, x,y, symbol):
self.x = x
self.y = y
self.symbol = symbol
self.action = defaultdict(lambda : self.no_action)
def no_action(self):
pass
class Player(GameObject):
def __init__(self, world, x,y, symbol):
super().__init__(x,y,symbol)
self.world = world
self.dx = 0
self.dy = 0
self.action["move"] = self.move
def move(self, nx,ny):
x_old, y_old = self.x, self.y
def return_move():
where = (x_old,y_old)
what = "move"
symbol = WPLAYER
param = (self.x,self.y)
return (where,what,symbol,param)
nx = min(self.world.width-1,max(0,nx))
ny = min(self.world.height-1,max(0,ny))
dx = nx-self.x
dy = ny-self.y
if dx == 0 and dy == 0:
# print("no movement")
return return_move()
if dx != 0:
dx = int(dx/abs(dx))
if dy != 0:
dy = int(dy/abs(dy))
nx = self.x+dx
ny = self.y+dy
# move checks for nx,ny:
if dx != 0 and dy != 0:
if not self.world.get_object(self.x,ny,WGROUND) and not self.world.get_object(nx,self.y,WGROUND):
return return_move()
if not self.world.get_object(nx,ny,WGROUND):
# print("no ground at {},{}".format(nx,ny))
if random.randint(0,1) == 0:
if not self.world.get_object(nx,self.y,WGROUND):
# print("no ground at {},{}".format(nx,self.y))
if not self.world.get_object(self.x,ny,WGROUND):
# print("no ground at {},{}".format(self.x,ny))
return return_move()
nx = self.x
else:
ny = self.y
else:
if not self.world.get_object(self.x,ny,WGROUND):
# print("no ground at {},{}".format(self.x,ny))
if not self.world.get_object(nx,self.y,WGROUND):
# print("no ground at {},{}".format(nx,self.y))
return return_move()
ny = self.y
else:
nx = self.x
if self.world.get_object(nx,ny,WPLAYER):
# print("field occupied at {},{}".format(nx,ny))
return False
# move checks all ok!
self.world.move_object(self,nx,ny)
self.x = nx
self.y = ny
# print("moving to {},{}".format(nx,ny))
return return_move()
class Wall(GameObject):
def __init__(self, world, x,y, symbol):
super().__init__(x,y,symbol)
self.world = world
class Ground(GameObject):
def __init__(self, world, x,y, symbol):
super().__init__(x,y,symbol)
self.world = world
class Ignore(GameObject):
def __init__(self, world, x,y, symbol):
super().__init__(x,y,symbol)
self.world = world
OBJ_GENERATOR = dict()
OBJ_GENERATOR[WPLAYER] = Player
OBJ_GENERATOR[WWALL] = Wall
OBJ_GENERATOR[WGROUND] = Ground
OBJ_GENERATOR[WIGNORE] = Ignore
|
991,829 | a5239d5c560ca144866d53948eb02518943708bf | import pyvista as pv
mesh = pv.Sphere()
mesh.get_cell(0).n_edges
# Expected:
## 3
|
991,830 | c2052c0a37ef2abcb2333412b6112e50db87ba75 | from pyspark.sql import SparkSession, SQLContext
from pyspark.sql.types import StructType, StructField, StringType, DoubleType
if __name__ == '__main__':
scSpark = SparkSession \
.builder \
.appName("HW9P5DATA603") \
.getOrCreate()
sc = scSpark.sparkContext
sql = SQLContext(sc)
schema = StructType([ \
StructField("Student_ID", StringType(), True), \
StructField("Student_Name", StringType(), True), \
StructField("Student_Phone_Number", StringType(), True), \
StructField("GPA", DoubleType(), True)])
df = sql.read.csv(
"data.csv", header=False, schema=schema, multiLine=True
)
df.createOrReplaceTempView("students")
result = sql.sql("SELECT Student_ID, Student_Name, Student_Phone_Number, "
"CASE WHEN GPA > 3.6 THEN 'A' WHEN GPA < 3.6 AND GPA > 3.2 THEN 'B' WHEN GPA < 3.2 AND GPA > 2.8 THEN 'C' END "
"AS Grade FROM students")
result.show()
|
991,831 | 43a236d00bfbf5645214e6d4983fdff709194741 | import unittest
from widget.widget_resolver import WidgetResolver
from widget.widget import Widget
from database.database import WidgetUser
url_mock = 'www.example.com/rest'
class WidgetDaoMock:
def get_base_url(self, widget):
return url_mock
class WidgetUserDaoMock:
def get_mapping(self, username):
w = WidgetUser()
w.mapping_id = 1
w.position = 2
w.context = 'Context: 123'
if username == 'Fib':
w.widget = "GenericWidget"
w.username = 'Fib'
return [w]
elif username == 'Bif':
w.widget = "Widget2"
w.username = 'Bif'
return [w]
class APIKey:
def __init__(self, name, key):
self.name = name
self.key = key
api_keys = {"GenericWidget": APIKey("APPID", "1234567890")}
class WidgetResolverTest(unittest.TestCase):
def setUp(self):
self.resolver = WidgetResolver(api_keys=api_keys,
widget_user_dao=WidgetUserDaoMock(), widget_dao=WidgetDaoMock())
def test_without_api_key(self):
resolved = self.resolver.process_widgets('Bif')
self.assertEqual(url_mock, resolved[0].url)
def test_with_api_key(self):
resolved = self.resolver.process_widgets('Fib')
self.assertEqual("www.example.com/rest?APPID=1234567890", resolved[0].url)
|
991,832 | df80915d4a28be49ae5356824167a7020a5868f2 | """
File: load_2d_interp.py
Author: Neil Bassett
Date: 20 Aug 2019
Description: Contains functions which load interpolations of height vs. dB
threshold grid for the coefficients a and b in the power law
a*(nu**b) where nu is the frequency. Grid was calculated
from fits to FDTD simulations of RFI diffraction around the moon.
"""
from __future__ import division
import numpy as np
import pickle
from scipy.interpolate import interp2d
def load_interp_2d():
"""
Loads interpolation of h vs. dB grid for a and b power law parameters
from 2d_interp_h_vs_dB.pkl
"""
f = open('2d_interp_h_vs_dB.pkl', 'rb')
interp_dict = pickle.load(f)
f.close()
return interp_dict['a_grid_interp'],\
interp_dict['b_grid_interp']
|
991,833 | b9b8f60bd063a130eb47e21deea3ef91b5a48c6c | from building import *
import rtconfig
# get current directory
cwd = GetCurrentDir()
# The set of source files associated with this SConscript file.
src = Glob('*.c')
if GetDepend('KOBUKI_USING_GET_ODOMETRY_EXAMPLE'):
src += Glob('examples/kobuki_get_odometry_example.c')
if GetDepend('KOBUKI_USING_GET_VERSION_EXAMPLE'):
src += Glob('examples/kobuki_get_version_example.c')
if GetDepend('KOBUKI_USING_LED_EXAMPLE'):
src += Glob('examples/kobuki_led_example.c')
if GetDepend('KOBUKI_USING_PLAY_SOUND_EXAMPLE'):
src += Glob('examples/kobuki_play_sound_example.c')
if GetDepend('KOBUKI_USING_POWER_EXAMPLE'):
src += Glob('examples/kobuki_power_example.c')
if GetDepend('KOBUKI_USING_SET_SPEED_EXAMPLE'):
src += Glob('examples/kobuki_set_speed_example.c')
path = [cwd]
LOCAL_CCFLAGS = ''
group = DefineGroup('kobuki', src, depend = ['PKG_USING_KOBUKI'], CPPPATH = path, LOCAL_CCFLAGS = LOCAL_CCFLAGS)
Return('group')
|
991,834 | 380ae747344c7ea63848214d33fc401f57d241c6 | #!python3
# coding=utf8
import pymongo
class DBUtil(object):
def __init__(self):
self.conn = pymongo.MongoClient('localhost', 27017)
if __name__ == '__main__':
util = DBUtil()
db = util.conn.comic
|
991,835 | b3e9625d31f960268aa748ac251a742299d65341 | import sqlite3
def genInsert(filename):
conn = sqlite3.connect('datasetfull.db')
db = []
assets = []
header = '''CREATE TABLE rankings(
asset_id INTEGER NOT NULL PRIMARY KEY
,asset_name VARCHAR(121) NOT NULL
,ranking INTEGER
,team_id VARCHAR(3)
,notes VARCHAR2(55)
,y1 INTEGER
,y1g INTEGER
,y2 INTEGER
,y2g INTEGER
,y3 INTEGER
,y3g INTEGER
,y4 INTEGER
,y4g INTEGER
,y5 INTEGER
,y5g INTEGER
,playeropt INTEGER
,teamopt INTEGER
,eto INTEGER
,qo INTEGER
,bird VARCHAR(5)
,ebird VARCHAR(5)
,nonbird VARCHAR(5)
,rfa VARCHAR(5)
,ufa VARCHAR(5)
,rights VARCHAR(22)
,rightsinfo VARCHAR(30)
,ntc VARCHAR(30)
,agent VARCHAR(51)
,agency VARCHAR(76)
,espn VARCHAR(30)
,fivethirty VARCHAR(7)
);\n\n'''
pre = 'INSERT INTO rankings(asset_id,asset_name,ranking,team_id,notes,y1,y1g,y2,y2g,y3,y3g,y4,y4g,y5,y5g,playeropt,teamopt,eto,qo,bird,ebird,nonbird,rfa,ufa,rights,rightsinfo,ntc,agent,agency,espn,fivethirty) VALUES ('
suff = ');\n'
with open(filename) as csv:
for line in csv: db.append(line.strip('\n\r'))
for i in db:
assets.append(i.split(','))
insert = header
for asset in assets:
line = pre
for i in range(len(asset)):
if (asset[i] == ''): line += 'NULL'
else:
if (i == 1) or (i == 3) or (i ==4) or (19 <= i <= len(asset)): line += "'" + asset[i] + "'"
else: line += asset[i]
if (i != len(asset)-1): line += ','
line += suff
insert += line
c = conn.cursor()
try:
c.execute('''DROP TABLE rankings ;''')
except sqlite3.OperationalError:
print "creating new table"
c.executescript(insert)
conn.commit()
conn.close()
return 0
|
991,836 | a2e9201fec986aa29d590b437b7e6609b48c5ff4 | from keras.datasets import mnist # standard dataset of hand drawn numbers - digit recognition
import matplotlib.pyplot as plt
import keras
from keras.layers import Input, Dense, Convolution2D, MaxPooling2D, Flatten
import numpy as np
(x_train, y_train), (x_test,y_test) = mnist.load_data()
x_train = x_train[:10000, :]
y_train = y_train[:10000]
x_test = x_test[:1000, :]
y_test = y_test[:1000]
# x = input (images), y = output (numbers)
print(x_train.shape)
print(y_train.shape)
# Plot some images to see how they look. In grey. With a title.
for i in range(0):
plt.imshow(x_train[i,:,:], cmap="gray")
plt.title(y_train[i])
plt.show()
# have now looked at the data and it looks smashin'
# last 4th dimentions is channels, but we're doing greyscale so dont need that
x_train = np.expand_dims(x_train,axis=-1)
x_test = np.expand_dims(x_test,axis=-1)
print(x_train.shape)
# must now convert images to floats
print(x_train.dtype)
x_train = x_train.astype(np.float32)/255
x_test = x_test.astype(np.float32)/255
print(x_train.dtype)
# must now convert the output data. It is values, but we want it to match data
# we want onehotencoding
y_train = keras.utils.to_categorical(y_train,10)
y_test = keras.utils.to_categorical(y_test,10)
print(y_train.shape)
print(y_test.shape)
print(y_train[0,:])
# MAKING LAYER MAKING NETWORK DUDUDU ITS OURS THIS TIME
first_layer = Input(shape=x_train.shape[1:])
# nr of filters, size of filter, activationfilter, and x is the prev layer
x = Convolution2D(32, 3, activation="relu",padding="same")(first_layer)
x = Convolution2D(64, 3, activation="relu",padding="same")(x)
# half the width and height
x = MaxPooling2D((2,2))(x)
x = Flatten()(x)
# nr of neurons in this layer
x = Dense(128)(x)
x = Dense(128)(x)
x = Dense(10,activation="softmax")(x)
model = keras.Model(inputs=first_layer,outputs=x)
print(model.summary())
model.compile(loss="categorical_crossentropy",optimizer="adadelta")
model.fit(x_train,y_train,batch_size=24,epochs=3,validation_data=(x_test,y_test))
|
991,837 | 3b798ee6696dfc4afc73138a9fbc9197629bc372 | from backend.models import Model
from multipledispatch import dispatch
class Book(Model.Model):
def create(self, payload):
query = "INSERT INTO books (name, created_at, updated_at) values (%s, now(), now())"
if self.execQ(query, payload):
print("======= DATA Buku CREATED =========")
else:
print("======= DATA Buku FAILED TO CREATE ==========")
def update(self, payload):
query = "UPDATE books set name=%s, updated_at = now() where id = %s"
if self.execQ(query, payload):
print("======= DATA Buku Updated =========")
else:
print("======= DATA Buku FAILED TO UPDATE ==========")
@dispatch()
def getAll(self):
query = "SELECT * FROM books"
if self.execQ(query):
return self._cnx.getConnection().fetchall()
else:
print("Failed to GET ALL data Buku")
def delete(self, id):
query = "DELETE FROM books where id = %s"
if self.execQ(query, (id,)):
print("============= Buku berhasil dihapus ============")
else:
print("============= FAILED TO DELETE Buku ============")
@dispatch(str)
def getAll(self, searchValue):
query = "SELECT * FROM books WHERE name LIKE %s"
if self.execQ(query, (searchValue,)):
return self._cnx.getConnection().fetchall()
else:
print("========= Failed to get data Buku =========") |
991,838 | 4beaad51186e24b688c9eed63a8f044a59655724 | # creates db for SDV-Summary
from flask import Flask
import os
import sys
import getpass
from werkzeug import check_password_hash
from config import config
app = Flask(__name__)
config_name = os.environ.get("SDV_APP_SETTINGS", None)
app.config.from_object(config[config_name])
database_structure_dict = {
"md5": "TEXT",
"url": "TEXT",
"isMale": "TEXT",
"pantsColor0": "BIGINT",
"pantsColor1": "BIGINT",
"pantsColor2": "BIGINT",
"pantsColor3": "BIGINT",
"combatLevel": "BIGINT",
"maxHealth": "BIGINT",
"hair": "BIGINT",
"favoriteThing": "TEXT",
"maxItems": "BIGINT",
"skin": "BIGINT",
"friendshipsWilly": "BIGINT",
"friendshipsClint": "BIGINT",
"friendshipsJodi": "BIGINT",
"friendshipsHarvey": "BIGINT",
"friendshipsLeah": "BIGINT",
"friendshipsWizard": "BIGINT",
"friendshipsJas": "BIGINT",
"friendshipsAbigail": "BIGINT",
"friendshipsMaru": "BIGINT",
"friendshipsElliott": "BIGINT",
"friendshipsCaroline": "BIGINT",
"friendshipsPam": "BIGINT",
"friendshipsDwarf": "BIGINT",
"friendshipsShane": "BIGINT",
"friendshipsDemetrius": "BIGINT",
"friendshipsAlex": "BIGINT",
"friendshipsGus": "BIGINT",
"friendshipsVincent": "BIGINT",
"friendshipsSebastian": "BIGINT",
"friendshipsRobin": "BIGINT",
"friendshipsSam": "BIGINT",
"friendshipsLewis": "BIGINT",
"friendshipsMarnie": "BIGINT",
"friendshipsPenny": "BIGINT",
"friendshipsHaley": "BIGINT",
"friendshipsPierre": "BIGINT",
"friendshipsEvelyn": "BIGINT",
"friendshipsLinus": "BIGINT",
"friendshipsGeorge": "BIGINT",
"friendshipsEmily": "BIGINT",
"friendshipsKent": "BIGINT",
"friendshipsKrobus": "BIGINT",
"friendshipsSandy": "BIGINT",
"friendshipsHenchman": "BIGINT",
# 'friendshipsBouncer':'BIGINT',
# 'friendshipsGil':'BIGINT',
# 'friendshipsGovernor':'BIGINT',
# 'friendshipsGrandpa':'BIGINT',
# 'friendshipsGunther':'BIGINT',
# 'friendshipsMarlon':'BIGINT',
# 'friendshipsMorris':'BIGINT',
# 'friendshipsMr_Qi':'BIGINT',
"farmingLevel": "BIGINT",
"statsRocksCrushed": "BIGINT",
"statsDaysPlayed": "BIGINT",
"statsStepsTaken": "BIGINT",
"statsSpecificMonstersKilledFly": "BIGINT",
"statsSpecificMonstersKilledGhost": "BIGINT",
"statsSpecificMonstersKilledBat": "BIGINT",
"statsSpecificMonstersKilledSkeleton": "BIGINT",
"statsSpecificMonstersKilledGrub": "BIGINT",
"statsSpecificMonstersKilledDust_Spirit": "BIGINT",
"statsSpecificMonstersKilledStone_Golem": "BIGINT",
"statsSpecificMonstersKilledFrost_Bat": "BIGINT",
"statsSpecificMonstersKilledDuggy": "BIGINT",
"statsSpecificMonstersKilledRock_Crab": "BIGINT",
"statsSpecificMonstersKilledBig_Slime": "BIGINT",
"statsSpecificMonstersKilledSludge": "BIGINT",
"statsSpecificMonstersKilledFrost_Jelly": "BIGINT",
"statsSpecificMonstersKilledBug": "BIGINT",
"statsSpecificMonstersKilledGreen_Slime": "BIGINT",
"statsSpecificMonstersKilledLava_Crab": "BIGINT",
"statsSpecificMonstersKilledLava_Bat": "BIGINT",
"statsSpecificMonstersKilledMetal_Head": "BIGINT",
"statsSpecificMonstersKilledShadow_Brute": "BIGINT",
"statsSpecificMonstersKilledShadow_Shaman": "BIGINT",
"statsSpecificMonstersKilledMummy": "BIGINT",
"statsSpecificMonstersKilledSerpent": "BIGINT",
"statsSpecificMonstersKilledArmored_Bug": "BIGINT",
"statsSpecificMonstersKilledVoid_Spirit": "BIGINT",
"statsSpecificMonstersKilledSquid_Kid": "BIGINT",
"statsSpecificMonstersKilledPurple_Slime": "BIGINT",
"statsSpecificMonstersKilledRed_Slime": "BIGINT",
"statsSpecificMonstersKilledTransparent_Slime": "BIGINT",
"statsSlimesKilled": "BIGINT",
"statsPreservesMade": "BIGINT",
"statsGeodesCracked": "BIGINT",
"statsSeedsSown": "BIGINT",
"statsNotesFound": "BIGINT",
"statsMonstersKilled": "BIGINT",
"statsStumpsChopped": "BIGINT",
"statsCropsShipped": "BIGINT",
"statsCowMilkProduced": "BIGINT",
"statsFishCaught": "BIGINT",
"statsPiecesOfTrashRecycled": "BIGINT",
"statsTrufflesFound": "BIGINT",
"statsIridiumFound": "BIGINT",
"statsTimesFished": "BIGINT",
"statsStarLevelCropsShipped": "BIGINT",
"statsCopperFound": "BIGINT",
"statsBarsSmelted": "BIGINT",
"statsBouldersCracked": "BIGINT",
"statsCoinsFound": "BIGINT",
"statsCaveCarrotsFound": "BIGINT",
"statsStoneGathered": "BIGINT",
"statsQuestsCompleted": "BIGINT",
"statsGoatMilkProduced": "BIGINT",
"statsCoalFound": "BIGINT",
"statsIronFound": "BIGINT",
"statsCheeseMade": "BIGINT",
"statsItemsCooked": "BIGINT",
"statsWeedsEliminated": "BIGINT",
"statsTimesUnconscious": "BIGINT",
"statsChickenEggsLayed": "BIGINT",
"statsSheepWoolProduced": "BIGINT",
"statsDiamondsFound": "BIGINT",
"statsRabbitWoolProduced": "BIGINT",
"statsAverageBedtime": "BIGINT",
"statsBeveragesMade": "BIGINT",
"statsOtherPreciousGemsFound": "BIGINT",
"statsDuckEggsLayed": "BIGINT",
"statsItemsCrafted": "BIGINT",
"statsGiftsGiven": "BIGINT",
"statsSticksChopped": "BIGINT",
"statsPrismaticShardsFound": "BIGINT",
"statsDirtHoed": "BIGINT",
"statsGoldFound": "BIGINT",
"statsMysticStonesCrushed": "BIGINT",
"statsItemsShipped": "BIGINT",
"statsGoatCheeseMade": "BIGINT",
"shirt": "BIGINT",
"uniqueIDForThisGame": "BIGINT",
"miningLevel": "BIGINT",
"facialHair": "BIGINT",
"money": "BIGINT",
"newEyeColor0": "BIGINT",
"newEyeColor1": "BIGINT",
"newEyeColor2": "BIGINT",
"newEyeColor3": "BIGINT",
"maxStamina": "BIGINT",
"farmName": "TEXT",
"foragingLevel": "BIGINT",
"fishingLevel": "BIGINT",
"deepestMineLevel": "BIGINT",
"accessory": "BIGINT",
"catPerson": "TEXT",
"totalMoneyEarned": "BIGINT",
"millisecondsPlayed": "BIGINT",
"hairstyleColor0": "BIGINT",
"hairstyleColor1": "BIGINT",
"hairstyleColor2": "BIGINT",
"hairstyleColor3": "BIGINT",
"name": "TEXT",
"professions0": "TEXT",
"professions1": "TEXT",
"professions2": "TEXT",
"professions3": "TEXT",
"professions4": "TEXT",
"professions5": "TEXT",
"professions6": "TEXT",
"professions7": "TEXT",
"professions8": "TEXT",
"professions9": "TEXT",
"farm_info": "TEXT",
"farm_url": "TEXT",
"avatar_url": "TEXT",
"added_time": "FLOAT",
"ip": "TEXT",
"del_token": "BIGINT",
"views": "BIGINT",
# 'date':'TEXT',
"savefileLocation": "TEXT",
"petName": "TEXT",
"portrait_info": "TEXT",
"portrait_url": "TEXT",
"animals": "TEXT",
"download_enabled": "BOOLEAN",
"download_url": "TEXT",
"owner_id": "BIGINT",
"series_id": "BIGINT",
"map_url": "TEXT",
"currentSeason": "TEXT",
"failed_processing": "BOOLEAN",
"imgur_json": "TEXT",
"positive_votes": "BIGINT DEFAULT 1",
"negative_votes": "BIGINT DEFAULT 1",
"base_path": "TEXT",
"thumb_url": "TEXT",
"private": "BOOLEAN",
"planner_url": "TEXT",
"statsGoodFriends": "BIGINT",
"statsItemsForaged": "BIGINT",
"dayOfMonthForSaveGame": "TEXT",
"seasonForSaveGame": "TEXT",
"yearForSaveGame": "TEXT",
"farmhands": "JSONB",
}
if app.config["USE_SQLITE"] == True:
database_structure_dict["id"] = "INTEGER PRIMARY KEY AUTOINCREMENT"
sqlesc = "?"
idcode = "INTEGER PRIMARY KEY AUTOINCREMENT"
else:
sqlesc = "%s"
idcode = "SERIAL PRIMARY KEY"
database_structure_dict["id"] = "SERIAL PRIMARY KEY"
users_structure_dict = {
"id": idcode,
"email": "TEXT",
"email_confirmed": "BOOLEAN",
"email_conf_token": "TEXT",
"pw_reset_token": "TEXT",
"password": "TEXT",
"imgur_json": "TEXT",
"imgur_id": "TEXT",
"patreon_info": "TEXT",
"patreon_token": "TEXT",
"patreon_refresh_token": "TEXT",
"patreon_expiry": "BIGINT",
"unconditional_api_access": "BOOLEAN", # designed to allow discretionary
# awarding of API usage; should not be used when API access is required:
# update check_api_availability() instead!
"auth_key": "TEXT",
"login_time": "BIGINT",
"api_key": "TEXT",
"api_secret": "TEXT",
"votes": "TEXT",
"privacy_default": "BOOLEAN DEFAULT FALSE",
}
database_fields = ""
database_fields_less_farminfo = ""
for key in sorted(database_structure_dict.keys()):
database_fields += key + ","
if key != "farm_info":
database_fields_less_farminfo += key + ","
database_fields = database_fields[:-1]
database_fields_less_farminfo = database_fields_less_farminfo[:-1]
capitalization_map = {key.lower(): key for key in database_structure_dict.keys()}
if sys.version_info >= (3, 0):
raw_input = input
def connect_db():
if app.config["USE_SQLITE"] == True:
import sqlite3
connection = sqlite3.connect(app.config["DB_SQLITE"])
else:
import psycopg2
connection = psycopg2.connect(
"dbname="
+ app.config["DB_NAME"]
+ " user="
+ app.config["DB_USER"]
+ " password="
+ app.config["DB_PASSWORD"]
)
return connection
def generate_db():
database_structure = ""
for key in sorted(database_structure_dict.keys()):
database_structure += key + " " + database_structure_dict[key] + ",\n"
database_structure = database_structure[:-2]
connection = connect_db()
c = connection.cursor()
c.execute("CREATE TABLE playerinfo(" + database_structure + ")")
connection.commit()
print("done")
def generate_errors():
connection = connect_db()
c = connection.cursor()
statement = (
"CREATE TABLE errors (id " + idcode + ", ip TEXT, time BIGINT, notes TEXT);"
)
c.execute(statement)
connection.commit()
connection.close()
print("done")
def generate_todo():
connection = connect_db()
c = connection.cursor()
statement = (
"CREATE TABLE todo (id "
+ idcode
+ ", task TEXT, playerid TEXT, currently_processing BOOLEAN);"
)
c.execute(statement)
connection.commit()
connection.close()
print("done")
def generate_blog():
connection = connect_db()
c = connection.cursor()
statement = (
"CREATE TABLE blog(id "
+ idcode
+ ", time BIGINT, author TEXT, title TEXT, post TEXT, live BOOLEAN);"
)
c.execute(statement)
connection.commit()
connection.close()
print("done")
def generate_users():
users_structure = ""
for key in sorted(users_structure_dict.keys()):
users_structure += key + " " + users_structure_dict[key] + ",\n"
users_structure = users_structure[:-2]
connection = connect_db()
c = connection.cursor()
c.execute("CREATE TABLE users(" + users_structure + ")")
connection.commit()
connection.close()
print("done")
def generate_serial():
connection = connect_db()
c = connection.cursor()
statement = (
"CREATE TABLE series(id "
+ idcode
+ ", owner INT, members_json TEXT, auto_key_json TEXT);"
)
c.execute(statement)
connection.commit()
connection.close()
print("done")
def generate_plans():
connection = connect_db()
c = connection.cursor()
statement = (
"CREATE TABLE plans(id "
+ idcode
+ ", failed_render BOOLEAN, added_time BIGINT, source_json TEXT, url TEXT, image_url TEXT, base_path TEXT, planner_url TEXT, views INT, owner_id TEXT, last_visited BIGINT, season TEXT, md5 TEXT, render_deleted BOOL);"
)
c.execute(statement)
connection.commit()
connection.close()
print("done")
def generate_ad_log():
connection = connect_db()
c = connection.cursor()
statement = (
"CREATE TABLE ad_log(id "
+ idcode
+ ", time BIGINT, ip_address TEXT, ad_id TEXT, ad_place TEXT, ad_file TEXT, ad_url TEXT);"
)
c.execute(statement)
connection.commit()
connection.close()
print("done")
def generate_api_clients():
connection = connect_db()
c = connection.cursor()
statement = (
"CREATE TABLE api_clients(id "
+ idcode
+ ", name TEXT, key TEXT, secret TEXT, redirect TEXT, info TEXT);"
)
c.execute(statement)
connection.commit()
connection.close()
print("done")
def generate_api_users():
connection = connect_db()
c = connection.cursor()
statement = (
"CREATE TABLE api_users(id "
+ idcode
+ ", clientid INT, userid INT, token TEXT UNIQUE, refresh_token TEXT UNIQUE, expiry INT, scope TEXT);"
)
c.execute(statement)
connection.commit()
connection.close()
print("done")
def set_indexes():
connection = connect_db()
c = connection.cursor()
indexes = [
"CREATE INDEX series_id_index ON playerinfo (series_id)",
"CREATE INDEX url_index ON playerinfo (url)",
"CREATE INDEX views_index ON playerinfo (views)",
"CREATE INDEX positive_votes_index ON playerinfo (positive_votes)",
"CREATE INDEX negative_votes_index ON playerinfo (negative_votes)",
"CREATE INDEX millisecondsPlayed ON playerinfo (millisecondsPlayed)",
]
for index in indexes:
try:
c.execute(index)
connection.commit()
print("{} successful".format(index))
except:
connection.rollback()
print("{} failed (may already exist)".format(index))
connection.close()
print("done")
def delete_db():
connection = connect_db()
c = connection.cursor()
print("you must log in as admin to delete the database")
username = raw_input("username: ")
password = getpass.getpass("password: ")
c.execute("SELECT password FROM admin WHERE username=" + sqlesc, (username,))
passhash = c.fetchone()
if check_password_hash(passhash[0], password) == True:
a = raw_input(
"just to double check, you REALLY want to delete everything? (y/n): "
)
if a == "y":
c.execute("DROP TABLE playerinfo")
c.execute("DROP TABLE errors")
c.execute("DROP TABLE todo")
c.execute("DROP TABLE blog")
c.execute("DROP TABLE users")
c.execute("DROP TABLE series")
c.execute("DROP TABLE plans")
connection.commit()
connection.close()
print("all (except admin) deleted")
else:
print("incorrect credentials")
def update_playerinfo():
if app.config["USE_SQLITE"] == True:
print("This is only for Postgres databases")
return
connection = connect_db()
c = connection.cursor()
c.execute(
"SELECT * FROM information_schema.columns WHERE table_schema='public' AND table_name='playerinfo'"
)
returned_database_structure = {
row[3].lower(): row[7].upper() for row in c.fetchall()
}
current_design_structure = {
key.lower(): database_structure_dict[key].upper()
for key in database_structure_dict.keys()
}
redundant = {}
incorrect_type = {}
for key in returned_database_structure.keys():
try:
if current_design_structure[key] == returned_database_structure[key]:
# print(key,'matches')
pass
else:
# print(key,'by design:',current_design_structure[key],'db has:',returned_database_structure[key])
incorrect_type[key] = {
"should be": current_design_structure[key],
"was": returned_database_structure[key],
}
del current_design_structure[key]
except KeyError:
# print(key,'in db but not in current design structure')
redundant[key] = {"redundant": returned_database_structure[key]}
not_implemented = current_design_structure
print("not implemented in db:")
for key in not_implemented.keys():
print(key, not_implemented[key])
print("redundant in db:")
for key in redundant.keys():
print(key, redundant[key])
print("incorrect type in db:")
for key in incorrect_type.keys():
print(key, incorrect_type[key])
a = raw_input("Alter database? (y/n): ")
if a == "y":
print("you must log in as admin to alter the database")
username = raw_input("username: ")
password = getpass.getpass("password: ")
c.execute("SELECT password FROM admin WHERE username=" + sqlesc, (username,))
passhash = c.fetchone()
if check_password_hash(passhash[0], password) == True:
print("implementing not-implemented keys (ADDing to database)")
for key in not_implemented.keys():
a = raw_input(
"Add column "
+ str(key)
+ " type "
+ str(not_implemented[key])
+ " to playerinfo? (y/n): "
)
if a == "y":
c.execute(
"ALTER TABLE playerinfo ADD COLUMN "
+ str(key)
+ " "
+ str(not_implemented[key])
)
print("done")
print("removing no-longer-necessary keys (DROPping from database)")
for key in redundant.keys():
a = raw_input("Remove column " + str(key) + " from playerinfo? (y/n): ")
if a == "y":
c.execute("ALTER TABLE playerinfo DROP COLUMN " + str(key))
else:
print("incorrect credentials")
connection.commit()
connection.close()
print("all modifications committed")
def update_users():
if app.config["USE_SQLITE"] == True:
print("This is only for Postgres databases")
return
connection = connect_db()
c = connection.cursor()
c.execute(
"SELECT * FROM information_schema.columns WHERE table_schema='public' AND table_name='users'"
)
returned_database_structure = {
row[3].lower(): row[7].upper() for row in c.fetchall()
}
current_design_structure = {
key.lower(): users_structure_dict[key].upper()
for key in users_structure_dict.keys()
}
redundant = {}
incorrect_type = {}
for key in returned_database_structure.keys():
try:
if current_design_structure[key] == returned_database_structure[key]:
# print(key,'matches')
pass
else:
# print(key,'by design:',current_design_structure[key],'db has:',returned_database_structure[key])
incorrect_type[key] = {
"should be": current_design_structure[key],
"was": returned_database_structure[key],
}
del current_design_structure[key]
except KeyError:
# print(key,'in db but not in current design structure')
redundant[key] = {"redundant": returned_database_structure[key]}
not_implemented = current_design_structure
print("not implemented in db:")
for key in not_implemented.keys():
print(key, not_implemented[key])
print("redundant in db:")
for key in redundant.keys():
print(key, redundant[key])
print("incorrect type in db:")
for key in incorrect_type.keys():
print(key, incorrect_type[key])
a = raw_input("Alter database? (y/n): ")
if a == "y":
print("you must log in as admin to alter the database")
username = raw_input("username: ")
password = getpass.getpass("password: ")
c.execute("SELECT password FROM admin WHERE username=" + sqlesc, (username,))
passhash = c.fetchone()
if check_password_hash(passhash[0], password) == True:
print("implementing not-implemented keys (ADDing to database)")
for key in not_implemented.keys():
a = raw_input(
"Add column "
+ str(key)
+ " type "
+ str(not_implemented[key])
+ " to users? (y/n): "
)
if a == "y":
c.execute(
"ALTER TABLE users ADD COLUMN "
+ str(key)
+ " "
+ str(not_implemented[key])
)
print("done")
print("removing no-longer-necessary keys (DROPping from database)")
for key in redundant.keys():
a = raw_input("Remove column " + str(key) + " from users? (y/n): ")
if a == "y":
c.execute("ALTER TABLE users DROP COLUMN " + str(key))
else:
print("incorrect credentials")
connection.commit()
connection.close()
print("all modifications committed")
def init_db(drop_all=False):
if drop_all:
delete_db()
print("---------")
a = raw_input("Generate advertising log database? (y/n): ")
if a == "y":
generate_ad_log()
a = raw_input("Generate playerinfo database? (y/n): ")
if a == "y":
generate_db()
a = raw_input("Generate todo database? (y/n): ")
if a == "y":
generate_todo()
a = raw_input("Generate errors database? (y/n): ")
if a == "y":
generate_errors()
a = raw_input("Generate blog database? (y/n): ")
if a == "y":
generate_blog()
a = raw_input("Generate user database? (y/n): ")
if a == "y":
generate_users()
a = raw_input("Generate serial database? (y/n): ")
if a == "y":
generate_serial()
a = raw_input("Generate api_clients database? (y/n): ")
if a == "y":
generate_api_clients()
a = raw_input("Generate api_users database? (y/n): ")
if a == "y":
generate_api_users()
a = raw_input("Generate plans database? (y/n): ")
if a == "y":
generate_plans()
a = raw_input("Set indexes for optimized db access? (y/n): ")
if a == "y":
set_indexes()
print("--------")
a = raw_input("Update playerinfo database? (y/n): ")
if a == "y":
update_playerinfo()
a = raw_input("Update users database? (y/n): ")
if a == "y":
update_users()
if __name__ == "__main__":
init_db()
|
991,839 | e4b7469b21a081cf47aa59d0a2e9e95774a79bac | from utils import header
from file import write_money_slips, open_file_bank, write_bank_account
def main():
header()
make_money_slips('w')
file = open_file_bank('a')
file.write('\n')
file.close()
make_bank_account('a')
def make_money_slips(mode):
file = open_file_bank(mode)
write_money_slips(file)
file.close()
print('Cédulas geradas com sucesso')
def make_bank_account(mode):
file = open_file_bank(mode)
write_bank_account(file)
file.close()
print('Contas geradas com sucesso')
main()
|
991,840 | 6cba270e5afc7430a2cb869b3543f3d37bd1de2f | # Generated by Django 3.0.7 on 2020-08-09 16:36
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('LVR', '0018_auto_20200807_1436'),
]
operations = [
migrations.AddField(
model_name='blog_post',
name='send_to_newsletter',
field=models.BooleanField(default=False, null=True),
),
]
|
991,841 | c781737e60104a6fb2369710aa2212f56d2106c7 | from bs4 import BeautifulSoup
import keyword
def extractText(text):
"""
Removes all code from the input text.
Returns text without html tags.
"""
soup = BeautifulSoup(text, 'html.parser')
for code in soup.find_all('code'):
code.decompose()
return soup.get_text()
def extractCode(text):
"""Finds all code within input text and returns code without tags"""
soup = BeautifulSoup(text, 'html.parser')
code_str = str()
for code in soup.find_all('code'):
code_without_tags = code.get_text()
code_str += code_without_tags
return code_str
def extractContent(content):
"""Returns input content without html tags"""
soup = BeautifulSoup(content, 'html.parser')
return soup.get_text()
def extractReservedWords(code):
"""Returns all keyword/reserved words"""
reserved_words=[] #https://realpython.com/lessons/reserved-keywords/5646
code = str(code).replace("\n", "")
for c in code.split(" "):
if keyword.iskeyword(c):
reserved_words.append(c)
str1= " "
return (str1.join(reserved_words))
def extractReservedWords_top50(code):
"""Returns all keyword/reserved words"""
reserved_words=[] #https://realpython.com/lessons/reserved-keywords/5646
code = str(code).replace("\n", "")
qa_top50_adjv2 = ['+', '<', 'of', '*', "'", '"', '==', 'File', '>', 'to', '#', '=', '+=', 'print', '%', '!=', '-', ':', 'i', 'x', 'line']
for c in code.split(" "):
if keyword.iskeyword(c):
reserved_words.append(c)
elif c in qa_top50_adjv2:
reserved_words.append(c)
else:
continue
str1= " "
return (str1.join(reserved_words))
|
991,842 | e9747d2e8a1cbe24920bb7ace6aef79f633b54fc | import os
import sys
import atexit
import json
import subprocess
import logging
import re
import requests
from requests.exceptions import ConnectionError, ReadTimeout, Timeout
from django.utils import six
from . import node, npm
from .settings import (
PATH_TO_NODE, SERVER_PROTOCOL, SERVER_ADDRESS, SERVER_PORT, SERVER_TIMEOUT, SERVER_TEST_TIMEOUT,
NODE_VERSION_REQUIRED, NPM_VERSION_REQUIRED,
)
from .exceptions import (
NodeServerConnectionError, NodeServerStartError, NodeServerAddressInUseError, NodeServerError, ErrorAddingService,
NodeServerTimeoutError
)
from .utils import html_unescape
class NodeServer(object):
"""
A persistent Node server which sits alongside the python process
and responds over HTTP
"""
protocol = SERVER_PROTOCOL
address = SERVER_ADDRESS
port = SERVER_PORT
path_to_source = os.path.join(os.path.dirname(__file__), 'node_server.js')
start_on_init = False
resolve_dependencies_on_init = True
shutdown_on_exit = True
has_started = False
has_stopped = False
logger = logging.getLogger(__name__)
timeout = SERVER_TIMEOUT
test_timeout = SERVER_TEST_TIMEOUT
_test_endpoint = '/__test__'
_add_service_endpoint = '/__add_service__'
_get_endpoints_endpoint = '/__get_endpoints__'
_blacklisted_endpoints = (
'', '*', '/', _test_endpoint, _add_service_endpoint, _get_endpoints_endpoint,
)
_expected_startup_output = '__NODE_SERVER_IS_RUNNING__\n'
_expected_test_output = '__SERVER_TEST__'
_expected_add_service_output = '__ADDED_ENDPOINT__'
_process = None
def __init__(self):
if self.resolve_dependencies_on_init:
# Ensure that the external dependencies are met
node.ensure_version_gte(NODE_VERSION_REQUIRED)
npm.ensure_version_gte(NPM_VERSION_REQUIRED)
# Ensure that the required packages have been installed
npm.install(os.path.dirname(__file__))
if self.start_on_init:
self.start()
def start(self, debug=None, use_existing_process=None, blocking=None):
if debug is None:
debug = False
if use_existing_process is None:
use_existing_process = True
if blocking is None:
blocking = False
if debug:
use_existing_process = False
blocking = True
if use_existing_process and self.test():
self.has_started = True
self.has_stopped = False
return
if not use_existing_process and self.test():
raise NodeServerAddressInUseError(
'A process is already listening at {server_url}'.format(
server_url=self.get_server_url()
)
)
# Ensure that the process is terminated if the python process stops
if self.shutdown_on_exit:
atexit.register(self.stop)
cmd = (PATH_TO_NODE,)
if debug:
cmd += ('debug',)
cmd += (
self.path_to_source,
'--address', self.address,
'--port', self.port,
'--test-endpoint', self._test_endpoint,
'--expected-test-output', self._expected_test_output,
'--add-service-endpoint', self._add_service_endpoint,
'--expected-add-service-output', self._expected_add_service_output,
'--get-endpoints-endpoint', self._get_endpoints_endpoint,
'--blacklisted-endpoints', json.dumps(self._blacklisted_endpoints),
)
if blocking:
cmd += (
'--expected-startup-output',
'Node server listening at {server_url}'.format(server_url=self.get_server_url()),
)
else:
cmd += ('--expected-startup-output', self._expected_startup_output,)
self.log('Starting process with {cmd}'.format(cmd=cmd))
if blocking:
# Start the server in a blocking process
subprocess.call(cmd)
return
# While rendering templates Django will silently ignore some types of exceptions,
# so we need to intercept them and raise our own class of exception
try:
# TODO: set NODE_ENV. See `env` arg https://docs.python.org/2/library/subprocess.html#popen-constructor
self._process = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
except (TypeError, AttributeError):
msg = 'Failed to start server with {arguments}'.format(arguments=cmd)
six.reraise(NodeServerStartError, NodeServerStartError(msg), sys.exc_info()[2])
# Block until the server is ready and pushes the expected output to stdout
output = self._process.stdout.readline()
output = output.decode('utf-8')
if output != self._expected_startup_output:
# Read in the rest of the error message
output += self._process.stdout.read().decode('utf-8')
if 'EADDRINUSE' in output:
raise NodeServerAddressInUseError(
(
'Port "{port}" already in use. '
'Try changing the DJANGO_NODE[\'SERVER_PORT\'] setting. '
'{output}'
).format(
port=self.port,
output=output,
)
)
else:
raise NodeServerStartError(output)
self.has_started = True
self.has_stopped = False
# Ensure that the server is running
if not self.test():
self.stop()
raise NodeServerStartError(
'Server does not appear to be running. Tried to test the server at "{test_endpoint}"'.format(
test_endpoint=self._test_endpoint,
)
)
self.log('Started process')
def ensure_started(self):
if not self.has_started:
self.start()
def stop(self):
if self._process is not None and not self.has_stopped:
self._process.terminate()
self.log('Terminated process')
self.has_stopped = True
self.has_started = False
def get_server_url(self):
if self.protocol and self.address and self.port:
return '{protocol}://{address}:{port}'.format(
protocol=self.protocol,
address=self.address,
port=self.port,
)
def absolute_url(self, url):
separator = '/' if not url.startswith('/') else ''
return '{server_url}{separator}{url}'.format(
server_url=self.get_server_url(),
separator=separator,
url=url,
)
def _html_to_plain_text(self, html):
html = html_unescape(html)
html = html.decode('utf-8')
# Replace HTML break rules with new lines
html = html.replace('<br>', '\n')
# Remove multiple spaces
html = re.sub(' +', ' ', html)
return html
def _validate_response(self, response, url):
if response.status_code != 200:
error_message = self._html_to_plain_text(response.text)
message = 'Error at {url}: {error_message}'
if six.PY2:
# Prevent UnicodeEncodeError
message = unicode(message)
raise NodeServerError(message.format(
url=url,
error_message=error_message,
))
return response
def _send_request(self, func, url, **kwargs):
timeout = kwargs.pop('timeout', self.timeout)
try:
return func(url, timeout=timeout, **kwargs)
except ConnectionError as e:
six.reraise(NodeServerConnectionError, NodeServerConnectionError(url, *e.args), sys.exc_info()[2])
except (ReadTimeout, Timeout) as e:
six.reraise(NodeServerTimeoutError, NodeServerTimeoutError(url, *e.args), sys.exc_info()[2])
def get_server_name(self):
return self.__class__.__name__
def log(self, message):
self.logger.info(
'{server_name} [Address: {server_url}] {message}'.format(
server_name=self.get_server_name(),
server_url=self.get_server_url(),
message=message,
)
)
def test(self):
if self.address is None or self.port is None:
return False
self.log('Testing server at {test_endpoint}'.format(test_endpoint=self._test_endpoint))
absolute_url = self.absolute_url(self._test_endpoint)
try:
response = self._send_request(
requests.get,
absolute_url,
timeout=self.test_timeout,
)
except (NodeServerConnectionError, NodeServerTimeoutError):
return False
if response.status_code != 200:
return False
return response.text == self._expected_test_output
def get_endpoints(self):
self.ensure_started()
response = self.get_service(self._get_endpoints_endpoint)
endpoints = json.loads(response.text)
return [endpoint for endpoint in endpoints]
def service_factory(self, endpoint):
def service(**kwargs):
return self.get_service(endpoint, params=kwargs)
service.endpoint = endpoint
service.server_name = self.get_server_name()
return service
def add_service(self, endpoint, path_to_source):
self.ensure_started()
if endpoint not in self._blacklisted_endpoints and endpoint in self.get_endpoints():
return self.service_factory(endpoint)
self.log('Adding service at "{endpoint}" with source "{path_to_source}"'.format(
endpoint=endpoint,
path_to_source=path_to_source,
))
absolute_url = self.absolute_url(self._add_service_endpoint)
response = self._send_request(
requests.post,
absolute_url,
data={
'endpoint': endpoint,
'path_to_source': path_to_source,
},
)
response = self._validate_response(response, absolute_url)
if response.text != self._expected_add_service_output:
error_message = self._html_to_plain_text(response.text)
raise ErrorAddingService(error_message)
return self.service_factory(endpoint=endpoint)
def get_service(self, endpoint, params=None):
self.ensure_started()
self.log('Sending request to endpoint "{url}" with params "{params}"'.format(
url=endpoint,
params=params,
))
absolute_url = self.absolute_url(endpoint)
response = self._send_request(
requests.get,
absolute_url,
params=params,
)
return self._validate_response(response, endpoint) |
991,843 | 296cba30f93eda2b5e2a3a40ce27e1b7226445f2 | # Generated by Django 2.1.4 on 2018-12-14 18:52
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('clubeleven_models', '0007_baseactor_public_key'),
]
operations = [
migrations.AddField(
model_name='baseactor',
name='private_key',
field=models.TextField(blank=True, null=True),
),
migrations.AddField(
model_name='baseactor',
name='summary',
field=models.TextField(blank=True, null=True),
),
]
|
991,844 | 0d2c595953564cb1dcf27dde5a7a02e5ac1bdc63 | # -*- coding: utf-8 -*-
"""
Created on Sun Jan 28 21:44:25 2018
@author: zhujinhua
"""
#coding:utf-8
import urllib
from bs4 import BeautifulSoup
import csv
import re
import sys,imp
imp.reload(sys)
#sys.setdefaultencoding('utf-8')
urbanlist={'gulou':100,
'jianye':61,
'qinhuai':100,
'xuanwu':59,
'yuhuatai':28,
'qixia':55,
'jiangning':100,
'pukou':100
}
#建立csv存储文件,wb写 a+追加模式
csvfile = open('lianjia_nj_ershou.csv', 'a+',encoding='utf8',newline='')
writer = csv.writer(csvfile)
def getpage(urban,pagenum):
for k in range(10,pagenum+1):
#读取网页
response = urllib.request.urlopen('https://nj.lianjia.com/ershoufang/'+urban+'/pg'+str(k))
the_page = response.read()
#解析网页
soup = BeautifulSoup(the_page,"lxml")
list0=[]
list1=[]
listregion=[]
listdesc=[]
listflood=[]
list2=[]
list3=[]
list4=[]
list5=[]
list6=[]
#提取楼盘名称字段
count=0
for tag in soup.find_all(name="li", attrs={"class": re.compile("clear")}):
count+=1
#添加城市字段
list0.append(urban)
ta1 = tag.find(name="div",attrs={"class": re.compile("title")}).find("a")
list1.append(ta1.string)
#提取地址
ta2 = tag.find(name="div", attrs={"class": re.compile("address")})
t2 = ta2.find(name="a")
if t2 != None:
listregion.append(t2.string)
else:
listregion.append(0)
desc=ta2.find(name="div",attrs={"class":re.compile("houseInfo")})
if desc!=None and len(desc)>=3:
desc=desc.contents[2]
listdesc.append(desc)
else:
listdesc.append(0)
#提取建筑楼层
ta2 = tag.find(name="div", attrs={"class": re.compile("positionInfo")})
flood=ta2.contents[1]
listflood.append(flood)
t2 = ta2.find(name="a")
if t2 != None:
list2.append(t2.string)
else:
list2.append(0)
#提取在售状态字段
ta3 = tag.find(name="div", attrs={"class": re.compile("totalPrice")})
list3.append(ta3.find(name="span").string)
#提取每平米均价字段
ta4 = tag.find(name="div", attrs={"class": re.compile("unitPrice")})
list4.append(ta4.find(name="span").string)
#将提取的数据合并
data = []
print(list0)
print(list1)
print(listregion)
print(listdesc)
print(list2)
print(list3)
print(list4)
print(listflood)
for i in range(0,count):
print("i="+str(i))
data.append((list0[i],list1[i], listregion[i],listdesc[i],list2[i], list3[i], list4[i], listflood[i]))
print(data)
#将合并的数据存入csv
writer.writerows(data)
#csvfile.close()
print("第" + str(k) + "页完成")
getpage('gulou', 100)
'''
#根据网页数设置范围
for urban in urbanlist.keys():
print('Now get '+urban+' ',urbanlist[urban])
getpage(urban, urbanlist[urban])
''' |
991,845 | d942af46d037f9dbe05a1be8082fab948c2d12c3 | # https://www.codewars.com/kata/5838b5eb1adeb6b7220000f5/train/python
'''
A website named "All for Five", sells many products to registered clients that
cost all the same (5 dollars, the price is not relevant). Every user receives an
alphanumeric id code, like D085. The page tracks all the purchases, that the
clients do. For each purchase of a certain client, his/her id user will be
registered once.
You will be given an uncertain number of arrays that contains strings
(the id code users). Each array will represent the purchases that the users
do in a month. You should find the total number of purchases of the users
that have bought in all the given months (the clients that their id code are
present in all the arrays). e.g.:
'''
import pandas as pd
from collections import Counter
def id_best_users(*args):
# your code here
# How does one work with multiple arguments?
# https://stackoverflow.com/questions/3496518/using-a-dictionary-to-count-the-items-in-a-list
for ag in args:
values = Counter(ag)
return [[]]
a1 = ['A042', 'B004', 'A025', 'A042', 'C025']
a2 = ['B009', 'B040', 'B004', 'A042', 'A025', 'A042']
a3 = ['A042', 'B004', 'A025', 'A042', 'C025', 'B009', 'B040', 'B004', 'A042', 'A025', 'A042']
a4 = ['A042', 'A025', 'B004']
test_values = id_best_users(a1, a2, a3, a4)
exact_values = [[9, ['A042']], [5, ['A025', 'B004']]]
|
991,846 | 1d3db6c50302c75843c6a3755f16bc97b1fb5594 | # Importing Plugins
import streamlit as st
#importing Newspaper3k
import newspaper
from newspaper import Article
#-------------------------------------------------------------
# Functions
# article Summarizer
def run_api(user_url_input):
article = Article(user_url_input)
article.download()
article.parse()
article.nlp()
return article.summary
#--------------------------------------------------------------
# Formating
#Text Input
user_url_input = st.text_input("Enter URL of article", '')
if st.button('Summarize the Article'):
st.write(run_api(user_url_input))
else:
st.write('no_url/not_executed')
|
991,847 | 6fdd6ed02acbc98ef1a96108de4a18952ceab804 | from django.test import TestCase, RequestFactory
from nose.tools import *
from django_nose.tools import *
from . import factories as f
from ..forms import PostForm
class TestPostForm(TestCase):
def test_post_to_closed_thread_not_valid(self):
t = f.ThreadFactory(board=f.BoardFactory(), is_closed=True)
form = PostForm({'raw_body': 'Body'}, thread=t, request=RequestFactory().get('/'))
ok_(not form.is_valid())
eq_(form.errors.as_data()['__all__'][0].code, 'closed')
|
991,848 | dffe12eba1e536d832541877429cb4ba049ea760 | import numpy as np
from dezero import Variable
from dezero import as_array
import dezero.functions as F
import matplotlib.pyplot as plt
from dezero.models import MLP
from dezero import optimizers
import numpy as np
from dezero import Variable
from dezero.utils import plot_dot_graph
import dezero.functions as F
np.random.seed(0)
x = np.random.rand(100, 1)
y = np.sin(2 * np.pi * x) + np.random.rand(100, 1)
lr = 0.2
max_iter = 800
hidden_size = (10, 1)
model = MLP(hidden_size)
optimizer = optimizers.MomentumSGD(lr).setup(model)
# Plot
for i in range(max_iter):
y_pred = model(x)
loss = F.mean_squared_error(y, y_pred)
model.cleargrads()
loss.backward()
optimizer.update()
if i % 1000 == 0:
print(loss)
# Plot
plt.scatter(x, y, s=10)
plt.xlabel('x')
plt.ylabel('y')
t = np.arange(0, 1, .01)[:, np.newaxis]
y_pred = model(t)
plt.plot(t, y_pred.data, color='r')
plt.show()
|
991,849 | aab5282e160ded426e60acf291074a91869f6cec | # Generated by Django 3.0.4 on 2020-05-01 16:02
import datetime
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('blog', '0002_auto_20200501_1557'),
]
operations = [
migrations.CreateModel(
name='Blog_Entries',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=200, verbose_name='Title')),
('image', models.ImageField(blank=True, null=True, upload_to='')),
('tags', models.CharField(blank=True, max_length=200, verbose_name='Tags')),
('pub_date', models.DateTimeField(default=datetime.datetime.now, verbose_name='Published Date')),
('text', models.TextField(blank=True, verbose_name='Text')),
],
),
migrations.CreateModel(
name='Category',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('category', models.CharField(max_length=200, verbose_name='Category')),
('description', models.CharField(max_length=600, verbose_name='Description')),
],
),
migrations.DeleteModel(
name='Blog',
),
migrations.AddField(
model_name='blog_entries',
name='category',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='blog.Category'),
),
]
|
991,850 | fed64721879ffe82d5afe27fb42db12e5a1eadae | #!/usr/bin/env python
# coding: utf-8
# author: LiuYue
# e-mail: liuyue@mobike.com
# blog : http://liuyue.club/
# Pw @ 2018/4/18 下午6:55
import os
import datetime
import logging
basedir = os.path.abspath(os.path.dirname(__file__))
class DevelopmentConfig(object):
ENV = "development"
DEBUG = True
SECRET_KEY = "DtPrdVSm22gWpstYbT9ti9b6TvhbdqIr"
SQLALCHEMY_DATABASE_URI = 'mysql://root:@localhost:3306/lcmd?charset=utf8'
SQLALCHEMY_COMMIT_ON_TEARDOWN = True
SQLALCHEMY_TRACK_MODIFICATIONS = True
SQLALCHEMY_EXPIRE_ON_COMMIT = True
PERMANENT_SESSION_LIFETIME = datetime.timedelta(minutes=60)
LOG_LEVEL = logging.DEBUG
LOG_FILE = os.path.join(basedir, "logs/access.log")
|
991,851 | acabc29f0e78b725ff94fe77baed3d4f2861ad77 | import time
import pigpio
import Adafruit_GPIO.SPI as SPI
import Adafruit_MCP3008
import sonar_scan
# Hardware SPI configuration:
SPI_PORT = 0
SPI_DEVICE = 0
mcp = Adafruit_MCP3008.MCP3008(spi=SPI.SpiDev(SPI_PORT, SPI_DEVICE))
FREQ = 150
DC = 20
PWM1 = 17
REFRESH=1000
CHARSET={
' ': 0b00000000,
'0': 0b11111100,
'1': 0b01100000,
'2': 0b11011010,
'3': 0b11110010,
'4': 0b01100110,
'5': 0b10110110,
'6': 0b00111110,
'7': 0b11100000,
'8': 0b11111110,
'9': 0b11100110,
' .': 0b00000001,
'0.': 0b11111101,
'1.': 0b01100001,
'2.': 0b11011011,
'3.': 0b11110011,
'4.': 0b01100111,
'5.': 0b10110111,
'6.': 0b00111111,
'7.': 0b11100001,
'8.': 0b11111111,
'9.': 0b11100111,
'A': 0b11101110,
'b': 0b00111110,
'C': 0b10011100,
'c': 0b00011010,
'd': 0b01111010,
'E': 0b10011110,
'F': 0b10001110,
'H': 0b01101110,
'h': 0b00101110,
'L': 0b00011100,
'l': 0b01100000,
'O': 0b11111100,
'o': 0b00111010,
'P': 0b11001110,
'S': 0b10110110,
}
# This defines which gpios are connected to which segments
# a b c d e f g dp
SEG2GPIO=[ 4, 27, 18, 22, 23, 13, 24, 19]
# This defines the gpio used to switch on a LCD
# 1 2 3 4 5
LCD2GPIO=[ 5, 6, 16, 25]
wid = None
showing = [0]*len(LCD2GPIO)
CHARS=len(CHARSET)
def display(lcd, char):
if char in CHARSET:
showing[lcd] = CHARSET[char]
else:
showing[lcd] = 0
def update_display():
global wid
wf = []
for lcd in range(len(LCD2GPIO)):
segments = showing[lcd] # segments on for current LCD
on = 0 # gpios to switch on
off = 0 # gpios to switch off
# set this LCD on, others off
for L in range(len(LCD2GPIO)):
if L == lcd:
off |= 1<<LCD2GPIO[L] # switch LCD on
else:
on |= 1<<LCD2GPIO[L] # switch LCD off
# set used segments on, unused segments off
for b in range(8):
if segments & 1<<(7-b):
on |= 1<<SEG2GPIO[b] # switch segment on
else:
off |= 1<<SEG2GPIO[b] # switch segment off
wf.append(pigpio.pulse(on, off, REFRESH))
#print(lcd, on, off, REFRESH) # debugging only
p.wave_add_generic(wf) # add pulses to waveform
new_wid = p.wave_create() # commit waveform
p.wave_send_repeat(new_wid) # transmit waveform repeatedly
if wid is not None:
p.wave_delete(wid) # delete no longer used waveform
#print("wid", wid, "new_wid", new_wid)
wid = new_wid
p = pigpio.pi()
p.set_mode(PWM1,pigpio.OUTPUT)
p.set_PWM_frequency(PWM1,FREQ)
p.set_PWM_dutycycle(PWM1,DC)
for segment in SEG2GPIO:
p.set_mode(segment, pigpio.OUTPUT)
for lcd in LCD2GPIO:
p.set_mode(lcd, pigpio.OUTPUT)
time_flag = 0
start_time = 0
char=0
ck = CHARSET.keys()
sonar = sonar_scan.ranger(p, 21, 20, 2600)
try:
while True:
sonar.trig()
time.sleep(0.1)
distanz = (sonar.read()*34300)/(2*1000000)
values = [0]*8
for i in range(8):
values[i] = mcp.read_adc(i)
poti = (255*values[0])/1024
temp = (500*values[1])/1024
poti_proz = (poti * 100) / 255
poti_strg = str(poti_proz)
temp_strg = str(temp)
dist_strg = str(distanz)
p.set_PWM_dutycycle(PWM1,poti)
time_strg=str(time.strftime('%X'))
if distanz < 30:
time_flag = 1
start_time = time.time()
if poti_proz>20:
if len(dist_strg) > 1:
display(0,str(dist_strg[0]))
display(1,str(dist_strg[1]))
else:
display(0,' ')
display(1,str(dist_strg[0]))
if len(temp_strg) > 1:
display(2,str(temp_strg[0]))
display(3,str(temp_strg[1]))
else:
display(2,' ')
display(3,str(temp_strg[0]))
elif time_flag>0:
display(0,time_strg[0])
display(1,time_strg[1]+'.')
display(2,time_strg[3])
display(3,time_strg[4])
else:
display(0,' ')
display(1,' ')
display(2,' ')
display(3,' ')
update_display()
if (time.time()-start_time) > 3:
time_flag=0
except KeyboardInterrupt:
pass
sonar.cancel()
p.wave_delete(wid)
p.stop()
|
991,852 | f768f0e699f3bf2205cbe1a40286f0f70bd23eab | # -*- coding: utf-8 -*-
"""
Created on Tue Aug 21 10:12:28 2018
@author: eumartinez
"""
from BreadthFirst import BreadthFirst
from WaterJug import WaterJug
from Puzzle import Puzzle
pz=Puzzle([1,2,3,8,6,0,7,5,4],[1,2,3,8,0,4,7,6,5])
#wj=WaterJug(4,3,(4,3),(2,0))
#wj=WaterJug()
bf=BreadthFirst(pz)
#bf=BreadthFirst(wj)
sol=bf.run()
print('Solution: '+str(sol))
|
991,853 | 606dabd38a704a0397d8419fe2c93e7213884408 | from typing import List
class Solution:
def findDuplicate(self, nums: List[int]) -> int:
a = 0
b = 0
while(a!=b or (a==0 and b==0)):
a = nums[a]
b = nums[nums[b]]
b = 0
while(a!=b):
a = nums[a]
b = nums[b]
return a
# 给定一个包含 n + 1 个整数的数组 nums,其数字都在 1 到 n 之间(包括 1 和 n),可知至少存在一个重复的整数。假设只有一个重复的整数,找出这个重复的数。
# 示例 1:
# 输入: [1,3,4,2,2]
# 输出: 2
# 示例 2:
# 输入: [3,1,3,4,2]
# 输出: 3
# 说明:
# 不能更改原数组(假设数组是只读的)。
# 只能使用额外的 O(1) 的空间。
# 时间复杂度小于 O(n2) 。
# 数组中只有一个重复的数字,但它可能不止重复出现一次。
# 链接:https://leetcode-cn.com/problems/find-the-duplicate-number |
991,854 | db96d073ca7d6d2a0a863ca8ec9e5d148b5519a4 | """
inkex.py
A helper module for creating Inkscape extensions
Copyright (C) 2005,2010 Aaron Spike <aaron@ekips.org> and contributors
Contributors:
Aurélio A. Heckert <aurium(a)gmail.com>
Bulia Byak <buliabyak@users.sf.net>
Nicolas Dufour, nicoduf@yahoo.fr
Peter J. R. Moulder <pjrm@users.sourceforge.net>
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
"""
# a dictionary of all of the xmlns prefixes in a standard inkscape doc
NSS = {
'sodipodi' :'http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd',
'cc' :'http://creativecommons.org/ns#',
'ccOLD' :'http://web.resource.org/cc/',
'svg' :'http://www.w3.org/2000/svg',
'dc' :'http://purl.org/dc/elements/1.1/',
'rdf' :'http://www.w3.org/1999/02/22-rdf-syntax-ns#',
'inkscape' :'http://www.inkscape.org/namespaces/inkscape',
'xlink' :'http://www.w3.org/1999/xlink',
'xml' :'http://www.w3.org/XML/1998/namespace'
}
def addNS(tag, ns=None):
val = tag
if ns is not None and len(ns) > 0 and ns in NSS and len(tag) > 0 and tag[0] != '{':
val = "{%s}%s" % (NSS[ns], tag)
return val
|
991,855 | 9d798d9165b60480b2f6bfac39980c740de99873 | from django.urls import path
from django.urls import re_path
from . import views
from django.conf.urls import url
#from pacient_result.views import ArticleDetailView
from pacient_result.views import PacientDetail
urlpatterns = [
path('dashboard/', views.index, name="dashboard"),
url(r'^pacients/$', views.PacientList.as_view(), name='pacients'),
path('dashboard/<pk>', PacientDetail.as_view(), name="pacient-detail"),
url(r'^recomendation/create/$', views.Recomend_create.as_view(), name='recomend_create'),
url(r'^recomendations/$', views.RecomendList.as_view(), name='recomends'),
url(r'^recomendations_pacient/$', views.RecomendList_pacient.as_view(), name='recomends_pac'),
#path('publishers/', ArticleListView.as_view()),
]
|
991,856 | 6ee656917aa9357fd0011ec520a5082f2c704368 | #! /usr/bin/env python
# -*- coding: utf-8 -*-
__author__ = '北极鱼'
from index import application
import tornado.wsgi
import tornado.httpserver
import tornado.ioloop
if __name__ == '__main__':
container = tornado.wsgi.WSGIContainer(application)
http_server = tornado.httpserver.HTTPServer(container)
http_server.listen(80)
tornado.ioloop.IOLoop.instance().start() |
991,857 | b72b107a143326ead9519954795781903158a7c5 | s = []
s = input()
min = 9999999999
for i in range(len(s) - 2):
a = int(s[i] + s[i+1] + s[i+2])
if min > abs(753 - a):
min = abs(753 - a)
print(min)
print(min)
|
991,858 | 044e874b8f8a9545a71441fb36d1ab06d1a1391e | import pandas as pd
from legacy.read_pics import get_pics_from_file
from legacy.data_clean import detect_outliers
def generate_df(pics, label):
df = pd.DataFrame.from_records(pics)
df['label'] = label
return df
def generate_dataset_dataframe(alphanum, touchesspe, clean_data=False):
""" Load les trames de touches des alphanum et touchesspe avec ou sans le cleaning de data (remove outliers)"""
df_1 = pd.DataFrame()
df_cleaned = pd.DataFrame()
for c in alphanum:
pics, info = get_pics_from_file("../input/Hackaton/data/pics_" + c + ".bin")
df_test = generate_df(pics, c)
if clean_data:
df_temp = detect_outliers(df_test)
df_temp['label'] = c
df_cleaned = df_cleaned.append(df_temp)
df_1 = df_1.append(df_test)
for c in touchesspe:
pics, info = get_pics_from_file("../input/Hackaton/data/pics_" + c + ".bin")
df_test = generate_df(pics, c)
if clean_data:
df_temp = detect_outliers(df_test)
df_temp['label'] = c
df_cleaned = df_cleaned.append(df_temp)
df_1 = df_1.append(df_test)
print("Loaded Dataframe")
return df_1, df_cleaned |
991,859 | 41662aaa77a60c4976b740c678cd69f20673e877 | # coding: utf-8
import os
import unittest
from lm.config import LM_MODEL_DIR
from lm.config import RESOURCE_DIR
from lm import lm
class MyTestCase(unittest.TestCase):
def test_something(self):
self.assertEqual(True, False)
def test_build_train_data(self):
self.assertTrue(True)
lm.build_lm_train_data(os.path.join(RESOURCE_DIR, 'mobile', 'std.txt'),
os.path.join(RESOURCE_DIR, 'tmp', 'std.hanzi.txt'),
os.path.join(RESOURCE_DIR, 'tmp', 'std.pinyin.txt'))
def test_lm_preprocess(self):
self.assertTrue(True)
txt = 'Iphonex不知道会不会火, Iphone8肯定是不行了。'
hanzis, pnyins = lm.BaseLM.preprocess(txt)
print(hanzis)
print(pnyins)
def test_hanzi_lm(self):
self.assertTrue(True)
hanzi_model_file = os.path.join(LM_MODEL_DIR, 'hanzi.arpa')
hanziLM = lm.HanziLM(hanzi_model_file)
prob = hanziLM.predict_prob('很好')
print(prob)
def test_pinyin_lm(self):
self.assertTrue(True)
pnyin_model_file = os.path.join(LM_MODEL_DIR, 'pinyin.arpa')
pinyinLM = lm.PinyinLM(pnyin_model_file)
prob = pinyinLM.predict_prob('手机性价比很高。')
print(prob)
def test_lm(self):
self.assertTrue(True)
hanzi_model_file = os.path.join(LM_MODEL_DIR, 'hanzi.arpa')
pnyin_model_file = os.path.join(LM_MODEL_DIR, 'pinyin.arpa')
model = lm.LM(hanzi_model_file, pnyin_model_file)
txts = ['手机性价比很高。', '吃好喝好啊', '像素很高', '相素很高', '分辨率低', '看不清楚', '反应很快', '反映很快']
for txt in txts:
rate = model.rate(txt)
print(txt, rate)
def test_lm_pinyin2hanzi(self):
self.assertTrue(True)
hanzi_model_file = os.path.join(LM_MODEL_DIR, 'hanzi.arpa')
pnyin_model_file = os.path.join(LM_MODEL_DIR, 'pinyin.arpa')
model = lm.LM(hanzi_model_file, pnyin_model_file)
while True:
try:
line = input("请输入一串拼音:")
hanzis = model.pinyin2hanzi(line.strip().split())
print(hanzis)
except Exception:
pass
if __name__ == '__main__':
unittest.main()
|
991,860 | 60a117329607d8b7cc1611cf25ca572db155f330 | # import os
# import sys
# import gym
# import math
# import numpy as np
# from Tools import SumoSDK
# from Tools.Statistics import Observer
# from RLlib import DDPG
# if 'SUMO_HOME' in os.environ:
# tools = os.path.join(os.environ['SUMO_HOME'], 'tools')
# sys.path.append(tools)
# else:
# sys.exit("please declare environment variable 'SUMO_HOME'")
# import traci
#
# SIM_STEP_LENGTH = 0.1
# N_CAR = 25
# ROUTE = ['edge290_0', 'edge390_0']
# ROUTE_LENGTH = 283.
# DQN_CAR = ['999']
#
# TURBULENCE_TERM = 20
# START_TURBULENCE = 2500
#
#
# class CustomEnv(gym.Env):
# metadata = {'render.modes': ['human']}
#
# def __init__(self):
# super(CustomEnv, self).__init__()
# self.action_space = gym.spaces.Box(low=-2, high=2, shape=(1,), dtype=np.float)
# # self.action_space = gym.spaces.Discrete(50)
# self.observation_space = gym.spaces.Box(low=np.zeros((4,)),
# high=np.ones((4,)),
# shape=(4,),
# dtype=np.float)
# # self.observation_space = gym.spaces.Box(low=np.zeros((48,)),
# # high=np.hstack((15 * np.ones((24,)), 650 * np.ones((24,)))),
# # shape=(48,),
# # dtype=np.float)
#
# self.route = ROUTE
# self.dqn_car = DQN_CAR
# self.observer = Observer()
# # self.reset_pos_x = [100.0, 98.76883405951378, 95.10565162951535, 89.10065241883679, 80.90169943749474,
# # 70.71067811865476, 58.778525229247315, 45.39904997395468, 30.901699437494745,
# # 15.643446504023093, 6.123233995736766e-15, -15.643446504023103, -30.901699437494734,
# # -45.39904997395467, -58.7785252292473, -70.71067811865474, -80.90169943749473,
# # -89.10065241883677, -95.10565162951535, -98.76883405951376, -100.0, -98.76883405951378,
# # -95.10565162951535, -89.10065241883682, -80.90169943749476, -70.71067811865477,
# # -58.77852522924732, -45.39904997395469, -30.901699437494756, -15.643446504023103,
# # -1.8369701987210297e-14, 15.643446504023068, 30.901699437494724, 45.39904997395466,
# # 58.77852522924729, 70.71067811865474, 80.90169943749473, 89.10065241883677,
# # 95.10565162951535, 98.76883405951376]
# # self.reset_pos_y = [200.0, 215.64344650402307, 230.90169943749473, 245.39904997395467, 258.7785252292473,
# # 270.71067811865476, 280.90169943749476, 289.1006524188368, 295.10565162951536,
# # 298.7688340595138, 300.0, 298.7688340595138, 295.10565162951536, 289.1006524188368,
# # 280.90169943749476, 270.71067811865476, 258.7785252292473, 245.39904997395467,
# # 230.90169943749476, 215.6434465040231, 200.0, 184.35655349597693, 169.09830056250522,
# # 154.60095002604538, 141.2214747707527, 129.28932188134524, 119.09830056250527,
# # 110.89934758116323, 104.89434837048465, 101.23116594048624, 100.0, 101.23116594048622,
# # 104.89434837048464, 110.89934758116321, 119.09830056250524, 129.28932188134524,
# # 141.22147477075265, 154.6009500260453, 169.09830056250524, 184.3565534959769]
# self.reset_pos_x = [45.0, 44.276831486938335, 42.13056917878817, 38.630195712083975, 33.88821597016249, 28.05704108364301, 21.324089811284942, 13.905764746872634, 6.040496961794498, -2.018917365773159, -10.013442028034145, -17.68612642442656, -24.79036416534461, -31.097819204408903, -36.40576474687263, -40.54359905560886, -43.378328731313395, -44.818843229785756, -44.818843229785756, -43.3783287313134, -40.54359905560886, -36.405764746872634, -31.097819204408914, -24.79036416534462, -17.68612642442657, -10.013442028034156, -2.01891736577318, 6.040496961794487, 13.905764746872626, 21.324089811284935, 28.057041083643, 33.88821597016249, 38.63019571208397, 42.13056917878817, 44.276831486938335]
#
# self.reset_pos_y = [200.0, 208.03506026593865, 215.81186708366042, 223.08046748326578, 229.60724266728707, 235.18241671106134, 239.6267989335532, 242.7975432332819, 244.59273927955707, 244.95468799435918, 243.87175604818208, 241.37874976481527, 237.55579641745862, 232.52576887223262, 226.4503363531613, 219.52476826029013, 211.9716580505004, 204.0337689006545, 195.96623109934552, 188.02834194949963, 180.4752317397099, 173.54966364683872, 167.47423112776738, 162.44420358254138, 158.62125023518473, 156.12824395181795, 155.04531200564085, 155.40726072044293, 157.2024567667181, 160.37320106644677, 164.81758328893866, 170.39275733271293, 176.91953251673422, 184.18813291633955, 191.96493973406135]
#
# # self.turbulence_car = '25'
# self.veh_list = []
# self.s = {}
# self.a = {}
# self.done = {}
# self.veh_sq = None
# self.act = {}
# self.act_old = {}
# self.reward = {}
# self.log_prob = {}
# self.acc = {}
# self.v = {}
# self.avg_v = [0.]
# self.headway = {}
# self.fuel = {}
# self.emer_brake_count = {}
# self.dang_headway_count = {}
# self.ok_flag = False
# self.last_turbulence_car = None
# for item in DQN_CAR:
# self.act_old[item] = 0.
# self.reward[item] = []
# self.log_prob[item] = []
# self.acc[item] = []
# self.v[item] = []
# self.headway[item] = []
# self.emer_brake_count[item] = 0
# self.dang_headway_count[item] = 0
# self.done[item] = 0
# self.i_episode = 0
# self.n_step = 0
# self.av_step = 0
#
# sumo_binary = 'sumo-gui'
# sumo_cmd = [sumo_binary, '-c', '/Users/sandymark/RL-sumo/net.sumocfg', '--collision.action', 'warn']
# traci.start(sumo_cmd)
#
# self.veh_list = SumoSDK.wait_all_vehicles(N_CAR)
#
# def reset_one(self, car):
# x = self.reset_pos_x
# y = self.reset_pos_y
# reset_idx = np.random.choice(len(x))
# if reset_idx < (len(x) // 2):
# edge = 'edge290'
# else:
# edge = 'edge390'
# traci.vehicle.moveToXY(car, edge, 0, x[reset_idx], y[reset_idx])
# traci.vehicle.setSpeed(car, 0)
# traci.simulationStep()
# traci.vehicle.setSpeed(car, -1)
#
# def reset(self, return_state=True):
# for _ in DQN_CAR:
# self.done[_] = 0
# x = self.reset_pos_x
# y = self.reset_pos_y
# reset_idx = list(np.random.choice(len(x), (N_CAR,), replace=False))
# veh_list = sorted(self.veh_list, key=lambda x: int(x))
# # traci.vehicle.moveToXY('0', 'edge2', 0, self.reset_pos_x[0], self.reset_pos_y[0])
# # traci.vehicle.moveToXY('999', 'edge2', 0, self.reset_pos_x[1], self.reset_pos_y[1])
# for idx, item in zip(range(len(reset_idx)), reset_idx):
# if item < (len(x) // 2):
# edge = 'edge290'
# else:
# edge = 'edge390'
# traci.vehicle.moveToXY(veh_list[idx], edge, 0, x[item], y[item])
# traci.vehicle.setSpeed(veh_list[idx], 0)
# # for idx in range(len(veh_list)):
# #
# # if idx < 20:
# # traci.vehicle.moveToXY(veh_list[idx], 'edge2', 1, self.reset_pos_x[idx], self.reset_pos_y[idx])
# # else:
# # traci.vehicle.moveToXY(veh_list[idx], 'edge3', 1, self.reset_pos_x[idx], self.reset_pos_y[idx])
# # traci.vehicle.setSpeed(veh_list[idx], 1)
# traci.simulationStep()
# for car_ in veh_list:
# traci.vehicle.setSpeed(car_, -1)
#
# if return_state:
# veh_sq = self._get_veh_sequence() # Generate initial state
# for car_ in DQN_CAR:
# car_idx = veh_sq[0].index(car_)
# veh_sq_ = [veh_sq[0][car_idx + 1:] + veh_sq[0][: car_idx],
# veh_sq[1][car_idx + 1:] + veh_sq[1][: car_idx]]
# veh_sq_[1] = self._get_interval(veh_sq[1][car_idx], veh_sq_)
# self.s[car_] = self._get_state(car_, veh_sq_)
# # get centralized state
# # s = []
# # for _ in DQN_CAR:
# # s += self.s[_]
# return self.s['999']
#
# def step(self, action: dict):
# # Take action
# # action = np.clip(action, -3., 2.)
# for _ in action.keys():
# traci.vehicle.setSpeed(_, max(0, traci.vehicle.getSpeed(_) + SIM_STEP_LENGTH * action[_]))
# # print(action)
# self.act[_] = action[_]
# # self.act['999'] = action / 10 - 3
# traci.simulationStep()
#
# # Get reward
# reward = {}
# for _ in DQN_CAR:
# reward[_] = self._get_reward_test(_)
# cent_reward = 0
# for value in reward.values():
# cent_reward += value
# # print(reward)
#
# # Get next state
# veh_sq = self._get_veh_sequence()
# for car_ in DQN_CAR:
# car_idx = veh_sq[0].index(car_)
# veh_sq_ = [veh_sq[0][car_idx + 1:] + veh_sq[0][: car_idx],
# veh_sq[1][car_idx + 1:] + veh_sq[1][: car_idx]]
# veh_sq_[1] = self._get_interval(veh_sq[1][car_idx], veh_sq_)
# s_ = self._get_state(car_, veh_sq_) # Only take 999's state for single-car test
# self.s[car_] = s_
# # get centralized state
# # s_ = []
# # for _ in DQN_CAR:
# # s_ += self.s[_]
#
# # Reset if collision occurred
# collision_list = traci.simulation.getCollidingVehiclesIDList()
# for _ in DQN_CAR:
# if _ in collision_list:
# # self.reset(return_state=False)
# self.done[_] = 1
# else:
# self.done[_] = 0
#
# self.avg_v.append(traci.vehicle.getSpeed('5') if traci.vehicle.getSpeed('5') >= 0 else 0)
#
# """ Manually change the leader's behaviour to train the dqn-car """
# if self.n_step % TURBULENCE_TERM == 0 and self.n_step >= START_TURBULENCE:
# # if self.step == 300:
# # if self.turbulence_car:
# # traci.vehicle.setSpeed(self.turbulence_car, -1)
# # self.turbulence_car = self.veh_list[25] \
# # if self.turbulence_car != self.veh_list[25] else self.veh_list[20 + np.random.randint(-3, 3)]
# # turbulence_car = self.turbulence_car[np.random.choice(len(self.turbulence_car))]
# self.last_turbulence_car = str(np.random.randint(N_CAR - 2))
# traci.vehicle.setSpeed(self.last_turbulence_car, np.random.random() * 5 + 1) # 0.01 + np.random.random() / 2)
#
# elif self.n_step % TURBULENCE_TERM == 10 and self.n_step > START_TURBULENCE:
# # elif self.step == 320 and self.step >= START_TURBULENCE:
# traci.vehicle.setSpeed(self.last_turbulence_car, -1)
#
# self.n_step += 1
# # if self.n_step == 0:
# # traci.vehicletype.setMinGap('car', 10)
# if self.n_step > 5000:
# self.observer.plot_var_dyn([reward['999'], self.act['999']], self.n_step, 300, [0, 0], 1, ['b', 'r'])
# return self.s, reward, self.done, {}
#
# def render(self, mode='human'):
# pass
#
# def _get_veh_sequence(self):
# """ Generate a list, storing the car sequence before dqn-car,
# in which the closest former car is the first element.
#
# Note that: ABSOLUTE POSITION will be stored in veh_list[1]"""
# while True:
# veh_list = [[], []]
# try:
# while True:
# for lane in self.route:
# veh_list[0] += traci.lane.getLastStepVehicleIDs(lane)
# if len(veh_list[0]) != N_CAR:
# traci.simulationStep()
# continue
# else:
# abs_pos = self._get_absolute_pos(veh_list[0])
# veh_list[1] = abs_pos
# # for item in veh_list[0]:
# # veh_list[1].append(self._get_interval(carID, item))
# # print('veh_list: ', veh_list)
# break
# break
# except ValueError:
# traci.simulationStep()
# print('ValueError')
# continue
#
# return veh_list
#
# def _get_absolute_pos(self, veh_sq):
# abs_pos = []
# for car_ in veh_sq:
# car_lane_index = self.route.index(traci.vehicle.getLaneID(car_))
# if car_lane_index == 0:
# pos = traci.vehicle.getLanePosition(car_)
# elif car_lane_index == 1:
# pos = traci.lane.getLength(self.route[0]) + traci.vehicle.getLanePosition(car_)
# abs_pos.append(pos)
# return abs_pos
#
# # def _get_cent_state(self, veh_sq_):
# # s = []
# # for _ in DQN_CAR:
# # den_former, den_later, vhf, vhl, nf, nl = self._get_density(veh_sq_)
# # s = [traci.vehicle.getSpeed(car),
# # veh_sq_[1][0],
# # (ROUTE_LENGTH - veh_sq_[1][-1]),
# # (traci.vehicle.getSpeed(car) - traci.vehicle.getSpeed(veh_sq_[0][0])),
# # (den_former - den_later)
# # ]
#
# def _get_state(self, car, veh_sq_):
# den_former, den_later, vhf, vhl, nf, nl = self._get_density(veh_sq_)
# s = [traci.vehicle.getSpeed(car),
# veh_sq_[1][0],
# (ROUTE_LENGTH - veh_sq_[1][-1]),
# # (traci.vehicle.getSpeed(car) - traci.vehicle.getSpeed(veh_sq_[0][0])),
# (den_former - den_later)
# ]
# s_norm = [s[0] / 15,
# s[1] / 110,
# s[2] / 110,
# # s[3] / 20 + 0.5,
# s[3] / 2000 + 0.5]
# # print('STATE: ', s_norm)
# return s_norm
#
# @staticmethod
# def _get_density(veh_sq):
# look_range = 100 # default: 200
# former_car_list = []
# later_car_list = []
# for idx in range(len(veh_sq[0])):
# if 0 <= veh_sq[1][idx] < look_range:
# former_car_list.append(veh_sq[0][idx])
# elif ROUTE_LENGTH - look_range < veh_sq[1][idx] < ROUTE_LENGTH:
# later_car_list.append(veh_sq[0][idx])
#
# n_former_car = len(former_car_list)
# n_later_car = len(later_car_list)
#
# v_h_for = 1e-6
# v_h_lat = 1e-6
# sum_d_for = 0
# sum_d_lat = 0
# w_v_f = 0
# w_v_l = 0
# w_p_f = 0
# w_p_l = 0
# for idx, car_ in zip(range(len(former_car_list)), former_car_list):
# # v_h_for += 1 / max(0.1, traci.vehicle.getSpeed(car_)) # Harmonic Sum
# # v_h_for += max(0.1, traci.vehicle.getSpeed(car_)) # Arithmetic Sum
# v_h_for += (1 - math.pow(veh_sq[1][idx] / 400, 1)) / max(0.1, traci.vehicle.getSpeed(
# car_)) # Weighted Harmonic Sum
# w_v_f += (1 - math.pow(veh_sq[1][idx] / 400, 1))
# # sum_d_for += (1 - traci.vehicle.getAcceleration(car_) / 3) / veh_sq[1][idx]
# # w_p_f += (1 - traci.vehicle.getAcceleration(car_) / 3)
# if idx == 0:
# sum_d_for += 1 / veh_sq[1][0]
# # w_p_f += 200
# w_p_f += 1
# else:
# sum_d_for += (1 - math.pow(veh_sq[1][idx - 1] / 400, 1)) / max(0.1, veh_sq[1][idx] - veh_sq[1][idx - 1])
# # w_p_f += (200 - veh_sq[1][idx - 1])
# w_p_f += (1 - math.pow(veh_sq[1][idx - 1] / 400, 1))
# # v_h_for = max(1 / v_h_for, 0.1) # Harmonic Mean
# # v_h_for = max(v_h_for / n_former_car, 0.1) # Arithmetic Mean
# v_h_for = max(w_v_f / v_h_for, 0.1) # Weighted Harmonic Mean
# for idx, car_ in zip(range(len(later_car_list)), later_car_list):
# # v_h_lat += 1 / max(0.1, traci.vehicle.getSpeed(car_))
# # v_h_lat += max(0.1, traci.vehicle.getSpeed(car_))
# v_h_lat += (1 - math.pow((ROUTE_LENGTH - veh_sq[1][idx - n_later_car]) / 400, 1)) / max(0.1,
# traci.vehicle.getSpeed(car_))
# w_v_l += (1 - math.pow((ROUTE_LENGTH - veh_sq[1][idx - n_later_car]) / 400, 1))
# # sum_d_lat += (1 - traci.vehicle.getAcceleration(car_) / 3) / (veh_sq[1][idx - n_later_car + 1] - veh_sq[1][idx - n_later_car])
# # w_p_l += (1 - traci.vehicle.getAcceleration(car_) / 3)
# if idx == n_later_car - 1:
# sum_d_lat += 1 / (ROUTE_LENGTH - veh_sq[1][-1])
# # w_p_l += 200
# w_p_l += 1
# else:
# sum_d_lat += (1 - math.pow((ROUTE_LENGTH - veh_sq[1][idx - n_later_car + 1]) / 400, 1)) / max(0.1, veh_sq[1][
# idx - n_later_car + 1] - veh_sq[1][idx - n_later_car])
# # w_p_l += (200 - (ROUTE_LENGTH - veh_sq[1][idx - n_later_car + 1]))
# w_p_l += (1 - math.pow((ROUTE_LENGTH - veh_sq[1][idx - n_later_car + 1]) / 400, 1))
# # v_h_lat = max(1 / v_h_lat, 0.1)
# # v_h_lat = max(v_h_lat / n_later_car, 0.1)
# v_h_lat = max(w_v_l / v_h_lat, 0.1)
# if n_former_car:
# # den_former = n_former_car + 5 * (1 - math.pow(veh_sq[1][0] / 600, 1)) / v_h_for + 1000 / max(0.1, veh_sq[1][0])
# den_former = 300 * sum_d_for / w_p_f + 15 / v_h_for + 500 / max(0.1, veh_sq[1][0])
# else:
# den_former = 0
# if n_later_car:
# # den_later = n_later_car + 5 * (1 - math.pow((629 - veh_sq[1][-1]) / 600, 1)) / v_h_lat + 1000 / max(0.1, 629 - veh_sq[1][-1])
# den_later = 300 * sum_d_lat / w_p_l + 15 / v_h_lat + 500 / max(0.1, (ROUTE_LENGTH - veh_sq[1][-1]))
# else:
# den_later = 0
#
# # print('den_former: ', den_former, 'sdf: ', sum_d_for, 'wpf: ', w_p_f, 'vh: ', v_h_for, 'itv: ', veh_sq[1][0])
# # print('den_later: ', den_later, 'sdll: ', sum_d_lat, 'wpl: ', w_p_l, 'vh: ', v_h_lat, 'itv: ', ROUTE_LENGTH - veh_sq[1][-1])
# return min(1000., den_former * 2.5), min(1000., den_later * 2.5), v_h_for, v_h_lat, n_former_car, n_later_car
# # return n_former_car*50, n_later_car * 50
#
# def _get_reward(self, carID):
# r = 0 # 3. Calculate the reward
# # s_star = 5 + 3 + max(0, self.s[carID][0] * T + self.s[carID][0] * (self.s[carID][2]) / (2 * math.sqrt(2. * 3.)))
# # a_star = min(2, max(-3, 2. * (1 - (self.s[carID][0] / 15.) ** 4 - (s_star / self.s[carID][1]) ** 2)))
# # print('a_star: ', carID, a_star)
# # r = -150 * abs(self.act[carID] - a_star) # for a
#
# # if ((traci.vehicle.getAcceleration(veh_sq[0]) <= -2 and s[car][1] <= 20) or
# # (s[car][3] >= 5 and s[car][1] <= (s[car][0]**2 - s[car][2]**2) / 2*2.5 + 3)) and act[car] > -2:
# # r -= 1000
# #
# # if s[car][1] <= 7 and act[car] >= -2:
# # r -= 1000
# # elif s[car][1] <= 7 and act[car] < -2:
# # r += 500
#
# # if (traci.vehicle.getAcceleration(veh_sequence[0][0]) <= -2 and self.s[carID][1] <= 20) or \
# # (self.s[carID][3] >= 5 and self.s[carID][1] <= (self.s[carID][0] ** 2 - traci.vehicle.getSpeed(veh_sequence[0][0]) ** 2) / 2 * 2.5 + 3): # or \
# # # s[1] <= 7:
# # if self.act[carID] > -2:
# # r -= 2000
# # # else:
# # # r += 500 # for essential emergency break
#
# if self.s[carID][1] * 40 <= 7:
# r -= 500
# if self.s[carID][0] > 0:
# r -= 500
# r -= self.act[carID] * 150
# # if act > 0:
# # r -= 1000
# # elif act <= 0:
# # r += abs(act) * 400
# # r -= 15 / max(0.1, self.s[carID][0]) * 100 # for dangerous headway (new collision punishment)
#
# # r -= min(400, abs(s[3]) ** 4)
# r -= 100 * abs(self.act[carID] - self.act_old[carID]) # for delta a
# # r += self.s[carID][-1] * 500 # for avg_v
# # r -= abs(self.act[carID]) * 300 # for fuel
# r -= 30 * abs(self.s[carID][4] * 4000 - 2000) # for density
# r -= 80 * abs(self.act[carID]) ** 2
# # if carID in traci.simulation.getCollidingVehiclesIDList():
# if self.s[carID][1] * 40 <= 5:
# r -= 10000
# return max(-20000, r) / 20000 + 1
#
# def _get_reward_test(self, car):
# r = 0
# r -= abs(self.s[car][3] - 0.5)
# # v = np.zeros((N_CAR,))
# # for car, idx in zip(self.veh_list, range(len(self.veh_list))):
# # v[idx] = traci.vehicle.getSpeed(car)
# # r += v.mean() / 5
# # if 5 < self.s[car][1] * 90 <= 7:
# # r = -1.5
# # elif self.s[car][1] * 90 <= 5:
# # r = -2
# return r
#
# @staticmethod
# def _get_interval(pos_cur_car, veh_sq):
# veh_sq[1] -= pos_cur_car * np.ones((len(veh_sq[1])))
# for idx in range(len(veh_sq[1])):
# if veh_sq[1][idx] < 0:
# veh_sq[1][idx] += ROUTE_LENGTH
# return list(veh_sq[1])
import gym
import math
import numpy as np
from Tools import SumoSDK
from Tools.Statistics import Observer
from RLlib import DDPG
import traci
SIM_STEP_LENGTH = 0.1
N_CAR = 25
ROUTE = ['edge290_0', 'edge390_0']
ROUTE_LENGTH = 283.
DQN_CAR = ['0']
TURBULENCE_TERM = 400
START_TURBULENCE = 10000
class CustomEnv(gym.Env):
metadata = {'render.modes': ['human']}
def __init__(self):
super(CustomEnv, self).__init__()
self.action_space = gym.spaces.Box(low=-2, high=2, shape=(1,), dtype=np.float)
# self.action_space = gym.spaces.Discrete(50)
self.observation_space = gym.spaces.Box(low=np.zeros((4,)),
high=np.ones((4,)),
shape=(4,),
dtype=np.float)
# self.observation_space = gym.spaces.Box(low=np.zeros((48,)),
# high=np.hstack((15 * np.ones((24,)), 650 * np.ones((24,)))),
# shape=(48,),
# dtype=np.float)
self.route = ROUTE
self.dqn_car = DQN_CAR
self.observer = Observer()
# self.reset_pos_x = [100.0, 98.76883405951378, 95.10565162951535, 89.10065241883679, 80.90169943749474,
# 70.71067811865476, 58.778525229247315, 45.39904997395468, 30.901699437494745,
# 15.643446504023093, 6.123233995736766e-15, -15.643446504023103, -30.901699437494734,
# -45.39904997395467, -58.7785252292473, -70.71067811865474, -80.90169943749473,
# -89.10065241883677, -95.10565162951535, -98.76883405951376, -100.0, -98.76883405951378,
# -95.10565162951535, -89.10065241883682, -80.90169943749476, -70.71067811865477,
# -58.77852522924732, -45.39904997395469, -30.901699437494756, -15.643446504023103,
# -1.8369701987210297e-14, 15.643446504023068, 30.901699437494724, 45.39904997395466,
# 58.77852522924729, 70.71067811865474, 80.90169943749473, 89.10065241883677,
# 95.10565162951535, 98.76883405951376]
# self.reset_pos_y = [200.0, 215.64344650402307, 230.90169943749473, 245.39904997395467, 258.7785252292473,
# 270.71067811865476, 280.90169943749476, 289.1006524188368, 295.10565162951536,
# 298.7688340595138, 300.0, 298.7688340595138, 295.10565162951536, 289.1006524188368,
# 280.90169943749476, 270.71067811865476, 258.7785252292473, 245.39904997395467,
# 230.90169943749476, 215.6434465040231, 200.0, 184.35655349597693, 169.09830056250522,
# 154.60095002604538, 141.2214747707527, 129.28932188134524, 119.09830056250527,
# 110.89934758116323, 104.89434837048465, 101.23116594048624, 100.0, 101.23116594048622,
# 104.89434837048464, 110.89934758116321, 119.09830056250524, 129.28932188134524,
# 141.22147477075265, 154.6009500260453, 169.09830056250524, 184.3565534959769]
self.reset_pos_x = [45.0, 44.276831486938335, 42.13056917878817, 38.630195712083975, 33.88821597016249, 28.05704108364301, 21.324089811284942, 13.905764746872634, 6.040496961794498, -2.018917365773159, -10.013442028034145, -17.68612642442656, -24.79036416534461, -31.097819204408903, -36.40576474687263, -40.54359905560886, -43.378328731313395, -44.818843229785756, -44.818843229785756, -43.3783287313134, -40.54359905560886, -36.405764746872634, -31.097819204408914, -24.79036416534462, -17.68612642442657, -10.013442028034156, -2.01891736577318, 6.040496961794487, 13.905764746872626, 21.324089811284935, 28.057041083643, 33.88821597016249, 38.63019571208397, 42.13056917878817, 44.276831486938335]
self.reset_pos_y = [200.0, 208.03506026593865, 215.81186708366042, 223.08046748326578, 229.60724266728707, 235.18241671106134, 239.6267989335532, 242.7975432332819, 244.59273927955707, 244.95468799435918, 243.87175604818208, 241.37874976481527, 237.55579641745862, 232.52576887223262, 226.4503363531613, 219.52476826029013, 211.9716580505004, 204.0337689006545, 195.96623109934552, 188.02834194949963, 180.4752317397099, 173.54966364683872, 167.47423112776738, 162.44420358254138, 158.62125023518473, 156.12824395181795, 155.04531200564085, 155.40726072044293, 157.2024567667181, 160.37320106644677, 164.81758328893866, 170.39275733271293, 176.91953251673422, 184.18813291633955, 191.96493973406135]
# self.turbulence_car = '25'
self.veh_list = []
self.s = {}
self.a = {}
self.done = 0
self.veh_sq = None
self.act = {}
self.act_old = {}
self.reward = {}
self.log_prob = {}
self.acc = {}
self.v = {}
self.avg_v = [0.]
self.headway = {}
self.fuel = {}
self.emer_brake_count = {}
self.dang_headway_count = {}
self.ok_flag = False
self.last_turbulence_car = None
self.past_roue = [0.]
for item in DQN_CAR:
self.act_old[item] = 0.
self.reward[item] = []
self.log_prob[item] = []
self.acc[item] = []
self.v[item] = []
self.headway[item] = []
self.emer_brake_count[item] = 0
self.dang_headway_count[item] = 0
self.i_episode = 0
self.n_step = 0
self.av_step = 0
sumo_binary = 'sumo-gui'
sumo_cmd = [sumo_binary, '-c', '/Users/sandymark/RL-sumo/net.sumocfg', '--collision.action', 'warn']
traci.start(sumo_cmd)
self.veh_list = SumoSDK.wait_all_vehicles(N_CAR)
def reset_one(self, car):
x = self.reset_pos_x
y = self.reset_pos_y
reset_idx = np.random.choice(len(x))
if reset_idx < (len(x) // 2):
edge = 'edge290'
else:
edge = 'edge390'
traci.vehicle.moveToXY(car, edge, 0, x[reset_idx], y[reset_idx])
traci.vehicle.setSpeed(car, 0)
traci.simulationStep()
traci.vehicle.setSpeed(car, -1)
def reset(self, return_state=True):
self.done = 0
# self.past_roue = [0.]
x = self.reset_pos_x
y = self.reset_pos_y
reset_idx = list(np.random.choice(len(x), (N_CAR,), replace=False))
veh_list = sorted(self.veh_list, key=lambda x: int(x))
# traci.vehicle.moveToXY('0', 'edge2', 0, self.reset_pos_x[0], self.reset_pos_y[0])
# traci.vehicle.moveToXY('999', 'edge2', 0, self.reset_pos_x[1], self.reset_pos_y[1])
for idx, item in zip(range(len(reset_idx)), reset_idx):
if item < (len(x) // 2):
edge = 'edge290'
else:
edge = 'edge390'
traci.vehicle.moveToXY(veh_list[idx], edge, 0, x[item], y[item])
traci.vehicle.setSpeed(veh_list[idx], 0)
# for idx in reversed(range(N_CAR)):
# if idx < (len(x) // 2):
# traci.vehicle.moveToXY(veh_list[idx], 'edge290', 1, self.reset_pos_x[idx], self.reset_pos_y[idx])
# else:
# traci.vehicle.moveToXY(veh_list[idx], 'edge390', 1, self.reset_pos_x[idx], self.reset_pos_y[idx])
# traci.vehicle.setSpeed(veh_list[idx], 0)
# for idx in range(len(veh_list)):
#
# if idx < 20:
# traci.vehicle.moveToXY(veh_list[idx], 'edge2', 1, self.reset_pos_x[idx], self.reset_pos_y[idx])
# else:
# traci.vehicle.moveToXY(veh_list[idx], 'edge3', 1, self.reset_pos_x[idx], self.reset_pos_y[idx])
# traci.vehicle.setSpeed(veh_list[idx], 1)
traci.simulationStep()
for car_ in veh_list:
traci.vehicle.setSpeed(car_, -1)
if return_state:
veh_sq = self._get_veh_sequence() # Generate initial state
for car_ in DQN_CAR:
car_idx = veh_sq[0].index(car_)
veh_sq_ = [veh_sq[0][car_idx + 1:] + veh_sq[0][: car_idx],
veh_sq[1][car_idx + 1:] + veh_sq[1][: car_idx]]
veh_sq_[1] = self._get_interval(veh_sq[1][car_idx], veh_sq_)
self.s[car_] = self._get_state(car_, veh_sq_)
# get centralized state
# s = []
# for _ in DQN_CAR:
# s += self.s[_]
return self.s['999']
def step(self, action):
# Take action
# action = np.clip(action, -3., 2.)
traci.vehicle.setSpeed('999', max(0, traci.vehicle.getSpeed('999') + SIM_STEP_LENGTH * action[0]))
# print(action)
self.act['999'] = action[0]
# self.act['999'] = action / 10 - 3
traci.simulationStep()
# Get reward
reward = 0
# reward = self._get_reward_test('999')
reward = traci.vehicle.getSpeed('999')
# cent_reward = 0
# for value in reward.values():
# cent_reward += value
# # print(reward)
# Get next state
veh_sq = self._get_veh_sequence()
for car_ in DQN_CAR:
car_idx = veh_sq[0].index(car_)
veh_sq_ = [veh_sq[0][car_idx + 1:] + veh_sq[0][: car_idx],
veh_sq[1][car_idx + 1:] + veh_sq[1][: car_idx]]
veh_sq_[1] = self._get_interval(veh_sq[1][car_idx], veh_sq_)
s_ = self._get_state(car_, veh_sq_) # Only take 999's state for single-car test
self.s[car_] = s_
self.past_roue.append(self.s['999'][3])
if len(self.past_roue) > 20:
self.past_roue.pop(0)
# get centralized state
# s_ = []
# for _ in DQN_CAR:
# s_ += self.s[_]
# Reset if collision occurred
collision_list = traci.simulation.getCollidingVehiclesIDList()
if collision_list:
# self.reset(return_state=False)
self.done = 1
else:
self.done = 0
self.avg_v.append(traci.vehicle.getSpeed('5') if traci.vehicle.getSpeed('5') >= 0 else 0)
""" Manually change the leader's behaviour to train the dqn-car """
if self.n_step % TURBULENCE_TERM == 0 and self.n_step >= START_TURBULENCE:
# if self.step == 300:
# if self.turbulence_car:
# traci.vehicle.setSpeed(self.turbulence_car, -1)
# self.turbulence_car = self.veh_list[25] \
# if self.turbulence_car != self.veh_list[25] else self.veh_list[20 + np.random.randint(-3, 3)]
# turbulence_car = self.turbulence_car[np.random.choice(len(self.turbulence_car))]
self.last_turbulence_car = str(np.random.randint(N_CAR - 2))
traci.vehicle.setSpeed(self.last_turbulence_car, np.random.random() + 0.1) # 0.01 + np.random.random() / 2)
elif self.n_step % TURBULENCE_TERM == 100 and self.n_step > START_TURBULENCE:
# elif self.step == 320 and self.step >= START_TURBULENCE:
traci.vehicle.setSpeed(self.last_turbulence_car, -1)
self.n_step += 1
# if self.n_step == 0:
# traci.vehicletype.setMinGap('car', 10)
if self.n_step > 1000:
self.observer.plot_var_dyn([reward, self.act['999']], self.n_step, 100, [0, 0], 1, ['b', 'r'])
return self.s['999'], reward, self.done, {}
def render(self, mode='human'):
pass
def _get_veh_sequence(self):
""" Generate a list, storing the car sequence before dqn-car,
in which the closest former car is the first element.
Note that: ABSOLUTE POSITION will be stored in veh_list[1]"""
while True:
veh_list = [[], []]
try:
while True:
for lane in self.route:
veh_list[0] += traci.lane.getLastStepVehicleIDs(lane)
if len(veh_list[0]) != N_CAR:
traci.simulationStep()
continue
else:
abs_pos = self._get_absolute_pos(veh_list[0])
veh_list[1] = abs_pos
# for item in veh_list[0]:
# veh_list[1].append(self._get_interval(carID, item))
# print('veh_list: ', veh_list)
break
break
except ValueError:
traci.simulationStep()
print('ValueError')
continue
return veh_list
def _get_absolute_pos(self, veh_sq):
abs_pos = []
for car_ in veh_sq:
car_lane_index = self.route.index(traci.vehicle.getLaneID(car_))
if car_lane_index == 0:
pos = traci.vehicle.getLanePosition(car_)
elif car_lane_index == 1:
pos = traci.lane.getLength(self.route[0]) + traci.vehicle.getLanePosition(car_)
abs_pos.append(pos)
return abs_pos
# def _get_cent_state(self, veh_sq_):
# s = []
# for _ in DQN_CAR:
# den_former, den_later, vhf, vhl, nf, nl = self._get_density(veh_sq_)
# s = [traci.vehicle.getSpeed(car),
# veh_sq_[1][0],
# (ROUTE_LENGTH - veh_sq_[1][-1]),
# (traci.vehicle.getSpeed(car) - traci.vehicle.getSpeed(veh_sq_[0][0])),
# (den_former - den_later)
# ]
def _get_state(self, car, veh_sq_):
den_former, den_later, vhf, vhl, nf, nl = self._get_density(veh_sq_)
s = [traci.vehicle.getSpeed(car),
veh_sq_[1][0],
(ROUTE_LENGTH - veh_sq_[1][-1]),
# (traci.vehicle.getSpeed(car) - traci.vehicle.getSpeed(veh_sq_[0][0])),
(den_former - den_later)
]
s_norm = [s[0] / 15,
s[1] / 110,
s[2] / 110,
# s[3] / 20 + 0.5,
s[3] / 2000 + 0.5]
# print('STATE: ', s_norm)
return s_norm
@staticmethod
def _get_density(veh_sq):
look_range = 100 # default: 200
former_car_list = []
later_car_list = []
for idx in range(len(veh_sq[0])):
if 0 <= veh_sq[1][idx] < look_range:
former_car_list.append(veh_sq[0][idx])
elif ROUTE_LENGTH - look_range < veh_sq[1][idx] < ROUTE_LENGTH:
later_car_list.append(veh_sq[0][idx])
n_former_car = len(former_car_list)
n_later_car = len(later_car_list)
v_h_for = 1e-6
v_h_lat = 1e-6
sum_d_for = 0
sum_d_lat = 0
w_v_f = 0
w_v_l = 0
w_p_f = 0
w_p_l = 0
for idx, car_ in zip(range(len(former_car_list)), former_car_list):
# v_h_for += 1 / max(0.1, traci.vehicle.getSpeed(car_)) # Harmonic Sum
# v_h_for += max(0.1, traci.vehicle.getSpeed(car_)) # Arithmetic Sum
v_h_for += (1 - math.pow(veh_sq[1][idx] / look_range, 1)) / max(0.1, traci.vehicle.getSpeed(
car_)) # Weighted Harmonic Sum
w_v_f += (1 - math.pow(veh_sq[1][idx] / look_range, 1))
# sum_d_for += (1 - traci.vehicle.getAcceleration(car_) / 3) / veh_sq[1][idx]
# w_p_f += (1 - traci.vehicle.getAcceleration(car_) / 3)
if idx == 0:
sum_d_for += 1 / veh_sq[1][0]
# w_p_f += 200
w_p_f += 1
else:
sum_d_for += (1 - math.pow(veh_sq[1][idx - 1] / look_range, 1)) / max(0.1, veh_sq[1][idx] - veh_sq[1][idx - 1])
# w_p_f += (200 - veh_sq[1][idx - 1])
w_p_f += (1 - math.pow(veh_sq[1][idx - 1] / look_range, 1))
# v_h_for = max(1 / v_h_for, 0.1) # Harmonic Mean
# v_h_for = max(v_h_for / n_former_car, 0.1) # Arithmetic Mean
v_h_for = max(w_v_f / v_h_for, 0.1) # Weighted Harmonic Mean
for idx, car_ in zip(range(len(later_car_list)), later_car_list):
# v_h_lat += 1 / max(0.1, traci.vehicle.getSpeed(car_))
# v_h_lat += max(0.1, traci.vehicle.getSpeed(car_))
v_h_lat += (1 - math.pow((ROUTE_LENGTH - veh_sq[1][idx - n_later_car]) / look_range, 1)) / max(0.1,
traci.vehicle.getSpeed(car_))
w_v_l += (1 - math.pow((ROUTE_LENGTH - veh_sq[1][idx - n_later_car]) / look_range, 1))
# sum_d_lat += (1 - traci.vehicle.getAcceleration(car_) / 3) / (veh_sq[1][idx - n_later_car + 1] - veh_sq[1][idx - n_later_car])
# w_p_l += (1 - traci.vehicle.getAcceleration(car_) / 3)
if idx == n_later_car - 1:
sum_d_lat += 1 / (ROUTE_LENGTH - veh_sq[1][-1])
# w_p_l += 200
w_p_l += 1
else:
sum_d_lat += (1 - math.pow((ROUTE_LENGTH - veh_sq[1][idx - n_later_car + 1]) / look_range, 1)) / max(0.1, veh_sq[1][
idx - n_later_car + 1] - veh_sq[1][idx - n_later_car])
# w_p_l += (200 - (ROUTE_LENGTH - veh_sq[1][idx - n_later_car + 1]))
w_p_l += (1 - math.pow((ROUTE_LENGTH - veh_sq[1][idx - n_later_car + 1]) / look_range, 1))
# v_h_lat = max(1 / v_h_lat, 0.1)
# v_h_lat = max(v_h_lat / n_later_car, 0.1)
v_h_lat = max(w_v_l / v_h_lat, 0.1)
if n_former_car:
# den_former = n_former_car + 5 * (1 - math.pow(veh_sq[1][0] / 600, 1)) / v_h_for + 1000 / max(0.1, veh_sq[1][0])
den_former = 300 * sum_d_for / w_p_f + 15 / v_h_for + 500 / max(0.1, veh_sq[1][0])
else:
den_former = 0
if n_later_car:
# den_later = n_later_car + 5 * (1 - math.pow((629 - veh_sq[1][-1]) / 600, 1)) / v_h_lat + 1000 / max(0.1, 629 - veh_sq[1][-1])
den_later = 300 * sum_d_lat / w_p_l + 15 / v_h_lat + 500 / max(0.1, (ROUTE_LENGTH - veh_sq[1][-1]))
else:
den_later = 0
# print('den_former: ', den_former, 'sdf: ', sum_d_for, 'wpf: ', w_p_f, 'vh: ', v_h_for, 'itv: ', veh_sq[1][0])
# print('den_later: ', den_later, 'sdll: ', sum_d_lat, 'wpl: ', w_p_l, 'vh: ', v_h_lat, 'itv: ', ROUTE_LENGTH - veh_sq[1][-1])
return min(1000., den_former * 2.5), min(1000., den_later * 2.5), v_h_for, v_h_lat, n_former_car, n_later_car
# return n_former_car*50, n_later_car * 50
def _get_reward(self, carID):
r = 0 # 3. Calculate the reward
# s_star = 5 + 3 + max(0, self.s[carID][0] * T + self.s[carID][0] * (self.s[carID][2]) / (2 * math.sqrt(2. * 3.)))
# a_star = min(2, max(-3, 2. * (1 - (self.s[carID][0] / 15.) ** 4 - (s_star / self.s[carID][1]) ** 2)))
# print('a_star: ', carID, a_star)
# r = -150 * abs(self.act[carID] - a_star) # for a
# if ((traci.vehicle.getAcceleration(veh_sq[0]) <= -2 and s[car][1] <= 20) or
# (s[car][3] >= 5 and s[car][1] <= (s[car][0]**2 - s[car][2]**2) / 2*2.5 + 3)) and act[car] > -2:
# r -= 1000
#
# if s[car][1] <= 7 and act[car] >= -2:
# r -= 1000
# elif s[car][1] <= 7 and act[car] < -2:
# r += 500
# if (traci.vehicle.getAcceleration(veh_sequence[0][0]) <= -2 and self.s[carID][1] <= 20) or \
# (self.s[carID][3] >= 5 and self.s[carID][1] <= (self.s[carID][0] ** 2 - traci.vehicle.getSpeed(veh_sequence[0][0]) ** 2) / 2 * 2.5 + 3): # or \
# # s[1] <= 7:
# if self.act[carID] > -2:
# r -= 2000
# # else:
# # r += 500 # for essential emergency break
if self.s[carID][1] * 110 <= 7:
r -= 500
if self.s[carID][0] > 0:
r -= 500
r -= self.act[carID] * 150
# if act > 0:
# r -= 1000
# elif act <= 0:
# r += abs(act) * 400
# r -= 15 / max(0.1, self.s[carID][0]) * 100 # for dangerous headway (new collision punishment)
# r -= min(400, abs(s[3]) ** 4)
r -= 100 * abs(self.act[carID] - self.act_old[carID]) # for delta a
# r += self.s[carID][-1] * 500 # for avg_v
# r -= abs(self.act[carID]) * 300 # for fuel
r -= 30 * abs(self.s[carID][3] * 2000 - 1000) # for density
r -= 80 * abs(self.act[carID]) ** 2
# if carID in traci.simulation.getCollidingVehiclesIDList():
if self.s[carID][1] * 110 <= 5:
r -= 10000
return max(-20000, r) / 20000 - 1
def _get_reward_test(self, car):
r = 0
# rou = abs(self.s[car][3] - 0.5)
# tendency = np.array(self.past_roue).mean()
# r = -0.5 * rou - tendency
v = np.zeros((N_CAR,))
for car, idx in zip(self.veh_list, range(len(self.veh_list))):
v[idx] = traci.vehicle.getSpeed(car)
# r += v.mean() / 5
h = np.zeros((N_CAR,))
for car, idx in zip(self.veh_list, range(len(self.veh_list))):
h[idx] = traci.vehicle.getLeader(car, dist=1000)[1] + 2
print(traci.vehicle.getLeader(car, dist=1000))
r = - np.linalg.norm(7 * np.ones((N_CAR,)) - v) - 0.5 * np.sum(12 * np.ones((N_CAR,)) - h)
# if 5 < self.s[car][1] * 90 <= 7:
# r = -0.3
# elif self.s[car][1] * 90 <= 5:
# r = -0.5
return r
@staticmethod
def _get_interval(pos_cur_car, veh_sq):
veh_sq[1] -= pos_cur_car * np.ones((len(veh_sq[1])))
for idx in range(len(veh_sq[1])):
if veh_sq[1][idx] < 0:
veh_sq[1][idx] += ROUTE_LENGTH
return list(veh_sq[1])
|
991,861 | 90e0f4229fc926f8cc520497ac2a4107530c4cf8 | #from AcceptTradeRequest import AcceptTradeRequest
#from AddItemToStoreRequest import AddItemToStoreRequest
from AddItemsToClanStashRequest import AddItemsToClanStashRequest
from AddItemsToClosetRequest import AddItemsToClosetRequest
from AddItemsToDisplayCaseRequest import AddItemsToDisplayCaseRequest
from AddMeatToClanStashRequest import AddMeatToClanStashRequest
from AddMeatToClosetRequest import AddMeatToClosetRequest
from AddPlayerToClanWhitelistRequest import AddPlayerToClanWhitelistRequest
from AdventureRequest import AdventureRequest
from ApiRequest import ApiRequest
from ArcaneTomesRequest import ArcaneTomesRequest
from AscensionHistoryRequest import AscensionHistoryRequest
from AutoSellRequest import AutoSellRequest
from BarrelRequest import BarrelRequest
from BootClanMemberRequest import BootClanMemberRequest
from BountyHunterRequest import BountyHunterRequest
#from CafeConsumeRequest import CafeConsumeRequest
from CafeMenuRequest import CafeMenuRequest
from CampgroundKitchenRequest import CampgroundKitchenRequest
from CampgroundRestRequest import CampgroundRestRequest
from CanadianStudiesRequest import CanadianStudiesRequest
#from CancelTradeRequest import CancelTradeRequest
from CharpaneRequest import CharpaneRequest
from ChoiceRequest import ChoiceRequest
from ClanLogRequest import ClanLogRequest
from ClanRaidLogRequest import ClanRaidLogRequest
from ClanStashRequest import ClanStashRequest
from ClanWhitelistRequest import ClanWhitelistRequest
from CocktailcraftingRequest import CocktailcraftingRequest
from CombatRequest import CombatRequest
from ComfySofaRequest import ComfySofaRequest
from CookingRequest import CookingRequest
from Crimbo2011ToyFactoryRequest import Crimbo2011ToyFactoryRequest
from Crimbo2011TradeInCandyRequest import Crimbo2011TradeInCandyRequest
from CrimboTreeRequest import CrimboTreeRequest
from CurrentEquipmentRequest import CurrentEquipmentRequest
from CursePlayerRequest import CursePlayerRequest
from DeclineTradeOfferRequest import DeclineTradeOfferRequest
from DeclineTradeResponseRequest import DeclineTradeResponseRequest
from DeleteMessagesRequest import DeleteMessagesRequest
from DeluxeMrKlawRequest import DeluxeMrKlawRequest
from DiscardItemRequest import DiscardItemRequest
from DrinkBoozeRequest import DrinkBoozeRequest
from DynamicRequest import DynamicRequest
from EatFoodRequest import EatFoodRequest
from EquipRequest import EquipRequest
from GenericAdventuringRequest import GenericAdventuringRequest
from GenericRequest import GenericRequest
from GetChatMessagesRequest import GetChatMessagesRequest
from GetMessagesRequest import GetMessagesRequest
from GetPendingTradesRequest import GetPendingTradesRequest
from GuildTrainRequest import GuildTrainRequest
from HermitRequest import HermitRequest
from HoboFlexRequest import HoboFlexRequest
from HomepageRequest import HomepageRequest
from InventoryRequest import InventoryRequest
from ItemDescriptionRequest import ItemDescriptionRequest
from ItemInformationRequest import ItemInformationRequest
from JukeboxRequest import JukeboxRequest
from LoadClanAdminRequest import LoadClanAdminRequest
from LoginRequest import LoginRequest
from LogoutRequest import LogoutRequest
from LookingGlassRequest import LookingGlassRequest
from MainMapRequest import MainMapRequest
from MakePasteRequest import MakePasteRequest
#from MallItemPriceSearchRequest import MallItemPriceSearchRequest
from MallItemPurchaseRequest import MallItemPurchaseRequest
from MallItemSearchRequest import MallItemSearchRequest
from MalusRequest import MalusRequest
from MeatBushRequest import MeatBushRequest
from MeatOrchidRequest import MeatOrchidRequest
from MeatTreeRequest import MeatTreeRequest
from MeatpastingRequest import MeatpastingRequest
from MindControlRequest import MindControlRequest
from MrKlawRequest import MrKlawRequest
from NashCrosbysStillRequest import NashCrosbysStillRequest
from OldTimeyRadioRequest import OldTimeyRadioRequest
from OpenChatRequest import OpenChatRequest
from ProposeTradeRequest import ProposeTradeRequest
from PulverizeRequest import PulverizeRequest
from QuestLogRequest import QuestLogRequest
from RespondToTradeRequest import RespondToTradeRequest
from RumpusRoomRequest import RumpusRoomRequest
from SearchPlayerRequest import SearchPlayerRequest
from SendChatRequest import SendChatRequest
from SendMessageRequest import SendMessageRequest
from SnackMachineRequest import SnackMachineRequest
from SodaMachineRequest import SodaMachineRequest
from StatusRequest import StatusRequest
#from StoreGetTransactionsRequest import StoreGetTransactionsRequest
from StoreInventoryRequest import StoreInventoryRequest
from StoreRequest import StoreRequest
#from StoreUpdateItemRequest import StoreUpdateItemRequest
from TakeItemFromClanStashRequest import TakeItemFromClanStashRequest
from TakeItemFromStoreRequest import TakeItemFromStoreRequest
from TakeMeatFromClosetRequest import TakeMeatFromClosetRequest
from TanULotsRequest import TanULotsRequest
from ToggleAcceptingClanApplicationsRequest import ToggleAcceptingClanApplicationsRequest
from TravelingTraderRequest import TravelingTraderRequest
from UneffectRequest import UneffectRequest
from UnequipRequest import UnequipRequest
from UseItemRequest import UseItemRequest
from UseMultipleRequest import UseMultipleRequest
from UseSkillRequest import UseSkillRequest
from UserProfileRequest import UserProfileRequest
from WokRequest import WokRequest
|
991,862 | db2bbfc0b482ded11de3327aacb78a4665f490f5 | import json
import responses
from twitch.client import TwitchClient
from twitch.constants import BASE_URL
example_emote = {"code": "TwitchLit", "id": 115390}
@responses.activate
def test_get_badges_by_channel():
channel_id = 7236692
response = {
"admin": {
"alpha": "https://static-cdn.jtvnw.net/chat-badges/admin-alpha.png",
"image": "https://static-cdn.jtvnw.net/chat-badges/admin.png",
"svg": "https://static-cdn.jtvnw.net/chat-badges/admin.svg",
}
}
responses.add(
responses.GET,
"{}chat/{}/badges".format(BASE_URL, channel_id),
body=json.dumps(response),
status=200,
content_type="application/json",
)
client = TwitchClient("client id")
badges = client.chat.get_badges_by_channel(channel_id)
assert len(responses.calls) == 1
assert isinstance(badges, dict)
assert badges["admin"] == response["admin"]
@responses.activate
def test_get_emoticons_by_set():
response = {"emoticon_sets": {"19151": [example_emote]}}
responses.add(
responses.GET,
"{}chat/emoticon_images".format(BASE_URL),
body=json.dumps(response),
status=200,
content_type="application/json",
)
client = TwitchClient("client id")
emoticon_sets = client.chat.get_emoticons_by_set()
assert len(responses.calls) == 1
assert isinstance(emoticon_sets, dict)
assert emoticon_sets["emoticon_sets"] == response["emoticon_sets"]
assert emoticon_sets["emoticon_sets"]["19151"][0] == example_emote
@responses.activate
def test_get_all_emoticons():
response = {"emoticons": [example_emote]}
responses.add(
responses.GET,
"{}chat/emoticons".format(BASE_URL),
body=json.dumps(response),
status=200,
content_type="application/json",
)
client = TwitchClient("client id")
emoticon_sets = client.chat.get_all_emoticons()
assert len(responses.calls) == 1
assert isinstance(emoticon_sets, dict)
assert emoticon_sets["emoticons"] == response["emoticons"]
assert emoticon_sets["emoticons"][0] == example_emote
|
991,863 | 3ec35311f39369d57e101a521cbef15536da8ace | import numpy as np
from guess_and_check import GuessAndCheck
import matplotlib.pyplot as plt
n_samples = 5000000
n_total_features = 100
n_good_features = 2
X = np.random.random(size=(n_samples, n_total_features))
# Y = np.linalg.norm(X[:, 0:n_good_features] - (np.zeros((n_samples, n_good_features))+0.5), axis=1)
# Y = np.exp(-Y**2/2)
# max_y = np.exp(-np.power(np.linalg.norm(np.zeros(n_total_features) + 0.5), 2)/2)
Y = X[:, 0] + X[:, 1]
max_y = 2
model = GuessAndCheck(leaf_size=500000, balance_param=0.5, max_y=max_y)
model.fit(X, Y)
if model.n_of_nodes == 1:
print("Trust border : %f" % model.trust_border)
print("Failed split : %s" % model.root.failed_split)
else:
used_var = model.variables_used
plt.bar(used_var.keys(), used_var.values())
model.show_graph()
t = model.compute_bounding_boxes()
model.plot_bounding_boxes(t)
|
991,864 | 9db7ef4ccaa3afc264f7af7feb7000c395f5b10f | # -*- coding: utf-8 -*-
"""Present Values modules
"""
def PV_SumInsInForce(t):
if t > last_t:
return 0
else:
return prj_InsInForce_BoP1(t) + PV_SumInsInForce(t + 1) / (1 + DiscRate(t))
def PV_IncomePremium(t):
"""Present value of premium income"""
if t > last_t:
return 0
else:
return prj_incm_Premium(t) + PV_IncomePremium(t + 1) / (1 + DiscRate(t))
def PV_BenefitSurrender(t):
"""Present value of surrender benefits"""
if t > last_t:
return 0
else:
return (-prj_bnft_Surrender(t) + PV_BenefitSurrender(t + 1)) / (1 + DiscRate(t))
def PV_BenefitDeath(t):
"""Present value of death benefits"""
if t > last_t:
return 0
else:
return (-prj_bnft_Death(t) + PV_BenefitDeath(t + 1)) / (1 + DiscRate(t))
def PV_ExpsCommTotal(t):
"""Present value of total expenses"""
if t > last_t:
return 0
else:
return - prj_exps_CommTotal(t) + PV_ExpsCommTotal(t + 1) / (1 + DiscRate(t))
def PV_ExpsAcq(t):
"""Present value of total expenses"""
if t > last_t:
return 0
else:
return - prj_exps_Acq(t) + PV_ExpsAcq(t + 1) / (1 + DiscRate(t))
def PV_ExpsMaint(t):
"""Present value of total expenses"""
if t > last_t:
return 0
else:
return - prj_exps_Maint(t) + PV_ExpsMaint(t + 1) / (1 + DiscRate(t))
def PV_ExpsTotal(t):
"""Present value of total expenses"""
if t > last_t:
return 0
else:
return - prj_exps_Total(t) + PV_ExpsTotal(t + 1) / (1 + DiscRate(t))
def PV_NetCashflows(t):
"""Present value of net liability cashflow"""
if t > last_t:
return 0
else:
return (prj_incm_Premium(t)
- prj_exps_Total(t)
- prj_bnft_Total(t) / (1 + DiscRate(t))
+ PV_NetCashflows(t + 1) / (1 + DiscRate(t)))
|
991,865 | 53036919d42f6449071a47c869ab1563b831e608 | from datetime import datetime
from django.core.management.base import BaseCommand
def stats_for_type(role_type):
from users.models import Role
roles = Role.objects.filter(type=role_type).exclude(user__user__email='', user__user__is_active=False)
times = roles.values_list('time', flat=True)
min_time = min(times)
min_time = datetime(min_time.year, min_time.month, min_time.day, min_time.hour)
print "min time", min_time
print "max time", max(times)
counts = {}
for time in times:
block_time = datetime(time.year, time.month, time.day, time.hour)
counts.setdefault(block_time, 0)
counts[block_time] += 1
for block_time in counts:
print (block_time-min_time).days, (block_time-min_time).seconds/3600, counts[block_time]
class Command(BaseCommand):
help = "Provide registration statistics."
def handle(self, *args, **options):
stats_for_type('voter')
stats_for_type('observer')
stats_for_type('member')
|
991,866 | 76a90c859dd57d252cfb03bd0b13edd57c95b404 | from db import db
class PlayerModel(db.Model):
__tablename__ = 'players'
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(80))
strategy = db.Column(db.String(80))
buyIn = db.Column(db.Float)
chips = db.Column(db.Float)
unitBet = db.Column(db.Float)
# hand_id = db.Column(db.Integer, db.ForeignKey('hands.id'))
# hand = db.relationship('HandModel')
def __init__(self, name, strategy, buyIn, chips, unitBet):
self.name = name
self.strategy = strategy
self.buyIn = buyIn
self.chips = chips
self.unitBet = unitBet
def json(self):
return {'name': self.name, 'strategy': self.strategy, 'buyIn':self.buyIn, 'chips':self.chips, 'unitBet':self.unitBet}
@classmethod
def find_by_name(cls, name):
''' The line:
return PlayerModel.query.filter_by(name=name).first()
performs:
SELECT * FROM items WHERE name=name LIMIT 1
Because its a class nethod we can use cls
'''
return cls.query.filter_by(name=name).first()
def save_to_db(self):
''' Insert the current object to the database. Can do an
update and insert. The session is a collection of obejects
that can be written to the database'''
db.session.add(self)
db.session.commit()
def delete_from_db(self):
''' delete an itemModel from the database. This will do:
"DELETE FROM items WHERE name=?"
'''
db.session.delete(self)
db.session.commit()
|
991,867 | f984da5d891529e2c26ee55bc90f19832b172cc9 | from caffe.proto import caffe_pb2
from caffe.io import blobproto_to_array
import os
def binaryfile_to_blobproto_to_array(file_path):
# input the filepath save by function WriteProtoToBinaryFile in caffe
# output the array data
assert os.path.exists(file_path),'File does not exists: %s'%file_path
binary_data = open(file_path, 'rb').read()
blob_proto = caffe_pb2.BlobProto()
blob_proto.ParseFromString(binary_data)
array_data=blobproto_to_array(blob_proto)
return array_data
|
991,868 | 57ffc4e9de1813e79e280bec412fa998b0637439 | from flask import Flask, request
from flask_restful import Resource, Api
from sqlalchemy import create_engine, desc
from json import dumps
from flask import jsonify, json
#using two databases
db_connect = create_engine('sqlite:///chinook.db') #sample dataset for sqlite
db2_connect = create_engine('sqlite:///sf-food-inspection.sqlite') #San Francisco Food Inspection Dataset. Downloaded from Public Affairs Data Journalism at Stanford starter pack site
app = Flask(__name__)
api = Api(app)
class Restaurants(Resource):
def get(self):
conn = db2_connect.connect()
query = conn.execute("select business_id, business_name, inspection_score from inspection_records")
result = {'restaurants': [dict(zip(tuple (query.keys()) , i)) for i in query.cursor]}
return jsonify(result) #returns as json
def post(self):
conn = db2_connect.connect()
print(request.json)
business_id = request.json['business_id']
business_name = request.json['business_name']
inspection_score = request.json['inspection_score']
query = conn.execute("insert into inspection_records values(null,'{0}','{1}','{2}')".format(business_id,business_name,inspection_score))
return {'status':'success'}
class Restaurants_Score(Resource):
def get(self, score):
conn = db2_connect.connect()
query = conn.execute("select * from inspection_records where inspection_score =%d" %int(score))
result = {'%d' %int(score): [dict(zip(tuple (query.keys()) ,i)) for i in query.cursor]}
return jsonify(result)
class Employees(Resource):
def get(self):
conn = db_connect.connect()
query = conn.execute("select * from employees")
result = {'employees': [i[0] for i in query.cursor.fetchall()]} #fetches first column that is employee id
return jsonify(result)
class Employees_Name(Resource):
def get(self, employee_id):
conn = db_connect.connect()
query = conn.execute("select * from employees where EmployeeId =%d " %int(employee_id))
result = {'data': [dict(zip(tuple (query.keys()) ,i)) for i in query.cursor]}
return jsonify(result)
api.add_resource(Employees, '/employees')
api.add_resource(Employees_Name, '/employees/<employee_id>')
api.add_resource(Restaurants, '/restaurants')
api.add_resource(Restaurants_Score, '/restaurants/<score>')
if __name__ == '__main__':
app.config['JSONIFY_PRETTYPRINT_REGULAR'] = True
app.run(port=5002)
|
991,869 | ea5e6d667e1c30937126f45c889897ea008bd5fe | str = input("enter any string")
vowels =['a','e','i','o','u']
result = True
for i in vowels:
if i in str:
continue
else:
result=False
break
if result == False:
print("string not accepted")
if result == True:
print("string accepted") |
991,870 | 19950d535569d6ee8d33094f8eb42427fb564069 | import os
from bottle import Bottle, request
app = Bottle()
@app.route("/ping", name="ping")
print('test')
def ping():
return {
"headers": dict(request.headers),
"environ": dict(os.environ),
"link": app.get_url("ping"),
}
if __name__ == "__main__":
app.run(host="localhost", port=8080)
|
991,871 | 413cec298fd7ec9a1a119b4c1dd3e5ce5c5b4222 | # -*- coding: utf-8 -*-
"""
Created on Sun May 5 23:42:10 2019
@author: Heriz
"""
from MovieLens import MovieLens
from collections import defaultdict
from six import iteritems
import numpy as np
class Trainset:
def __init__(self):
ml = MovieLens()
self.dataset = ml.getTrainSet()
userVal = int(self.dataset['userId'])
self.n_users = len(userVal)
self.n_items = len(set(self.dataset['movieId']))
self._global_mean = None
self.ur = defaultdict(list)
for index,row in self.dataset.iterrows():
userId = row['userId']
movieId = row['movieId']
ratings = row['rating']
self.ur[userId].append((movieId,ratings))
self.ir = defaultdict(list)
for index,row in self.dataset.iterrows():
userId = row['userId']
movieId = row['movieId']
ratings = row['rating']
self.ur[movieId].append((userId,ratings))
def knows_user(self,uid):
if (uid in self.ur):
return True
else:
return False
def knows_item(self,iid):
if (iid in self.ir):
return True
else:
return False
def all_ratings(self):
"""Generator function to iterate over all ratings.
Yields:
A tuple ``(uid, iid, rating)`` where ids are inner ids (see
:ref:`this note <raw_inner_note>`).
"""
for u, u_ratings in iteritems(self.ur):
for i, r in u_ratings:
yield u, i, r
def all_users(self):
"""Generator function to iterate over all users.
Yields:
Inner id of users.
"""
return range(self.n_users)
def all_items(self):
"""Generator function to iterate over all items.
Yields:
Inner id of items.
"""
return range(self.n_items)
@property
def global_mean(self):
"""Return the mean of all ratings.
It's only computed once."""
if self._global_mean is None:
self._global_mean = np.mean([r for (_, _, r) in
self.all_ratings()])
return self._global_mean |
991,872 | dfb0e8d134ef57636626f9250d6b3b4f8f092111 | #非常干净
class MissionAlgor(db.Model):
MIALid = db.Column(db.String(36), nullable=False)
LEMIid = db.Column(db.String(36))
ALGOid = db.Column(db.String(36))
code = db.Column(db.String(30), nullable=False)
name = db.Column(db.String(50), nullable=False)
type = db.Column(db.SMALLINT, nullable=False)
sequence = db.Column(db.INTEGER, nullable=False)
#primary key = db.Column(db.(MIALid)
class MissionPar(db.Model):
MIPAid = db.Column(db.String(36), nullable=False)
MIALid = db.Column(db.String(36))
code = db.Column(db.String(50), nullable=False)
name = db.Column(db.String(50), nullable=False)
value = db.Column(db.String(100), nullable=False)
illustration = db.Column(db.String(200), nullable=False, default=' ')
#primary key = db.Column(db.(MIPAid)
class accessory(db.Model):
ACCEid = db.Column(db.String(36), nullable=False)
ownerid = db.Column(db.String(36), nullable=False)
realFilename = db.Column(db.String(100), nullable=False, default=' ')
filename = db.Column(db.String(100), nullable=False, default=' ')
filetype = db.Column(db.String(50), nullable=False, default=' ')
path = db.Column(db.String(200), nullable=False, default=' ')
illustration = db.Column(db.Text, nullable=False, default=' ')
createDay = db.Column(db.DateTime, nullable=False, default='1900-1-1')
optrid = db.Column(db.String(36), nullable=False, default=' ')
#primary key = db.Column(db.(ACCEid)
class algorithm(db.Model):
ALGOid = db.Column(db.String(36), nullable=False)
code = db.Column(db.String(30), nullable=False)
name = db.Column(db.String(50), nullable=False)
type = db.Column(db.SMALLINT, nullable=False)
illustration = db.Column(db.String(300), nullable=False, default=' ')
#primary key = db.Column(db.(ALGOid)
class algorithmPar(db.Model):
ALPAid = db.Column(db.String(36), nullable=False)
ALGOid = db.Column(db.String(36))
code = db.Column(db.String(50), nullable=False)
name = db.Column(db.String(50), nullable=False)
value = db.Column(db.String(100), nullable=False)
illustration = db.Column(db.String(200), nullable=False, default=' ')
#primary key = db.Column(db.(ALPAid)
class ancientBook(db.Model):
ANBOid = db.Column(db.String(36), nullable=False)
REBOid = db.Column(db.String(36))
literature_name = db.Column(db.String(20), nullable=False, default=' ')
pubType = db.Column(db.String(10), nullable=False, default=' ')
benefit = db.Column(db.String(100), nullable=False, default=' ')
#primary key = db.Column(db.(ANBOid)
class ancientSpecialist(db.Model):
ANSPid = db.Column(db.String(36), nullable=False)
REBOid = db.Column(db.String(36))
name = db.Column(db.String(20), nullable=False)
major = db.Column(db.String(100), nullable=False)
#primary key = db.Column(db.(ANSPid)
class background(db.Model):
BACKid = db.Column(db.String(36), nullable=False)
STSTid = db.Column(db.String(36))
sport = db.Column(db.String(300), nullable=False)
literature = db.Column(db.String(300), nullable=False)
health = db.Column(db.String(300), nullable=False)
tour = db.Column(db.String(300), nullable=False)
other_hobby = db.Column(db.String(300), nullable=False)
smoke = db.Column(db.String(300), nullable=False)
coffee = db.Column(db.String(300), nullable=False)
alcohol = db.Column(db.String(300), nullable=False)
tea = db.Column(db.String(300), nullable=False)
movie = db.Column(db.String(300), nullable=False)
other_habit = db.Column(db.String(300), nullable=False)
chinese = db.Column(db.String(300), nullable=False)
philosophy = db.Column(db.String(300), nullable=False)
art = db.Column(db.String(300), nullable=False)
custom = db.Column(db.String(300), nullable=False)
belief = db.Column(db.String(300), nullable=False)
other_culture = db.Column(db.String(300), nullable=False)
other_experience = db.Column(db.Text, nullable=False, default=' ')
tiptop_duty = db.Column(db.String(300), nullable=False)
years = db.Column(db.String(10), nullable=False)
organization = db.Column(db.String(300), nullable=False)
parttime_duty = db.Column(db.String(300), nullable=False)
glory = db.Column(db.String(300), nullable=False)
acquire_time = db.Column(db.String(10), nullable=False)
#primary key = db.Column(db.(BACKid)
class badInstance(db.Model):
BAINid = db.Column(db.String(36), nullable=False)
REBOid = db.Column(db.String(36))
content = db.Column(db.Text, nullable=False, default=' ')
#primary key = db.Column(db.(BAINid)
class basicinformation(db.Model):
Num = db.Column(db.String(50), nullable=False)
Name = db.Column(db.String(50), nullable=False)
Gender = db.Column(db.String(50), nullable=False)
Age = db.Column(db.String(50), nullable=False)
Diploma = db.Column(db.String(50), nullable=False)
Dgree = db.Column(db.String(50), nullable=False)
Rank = db.Column(db.String(50))
PromotionTime = db.Column(db.String(50))
Duty = db.Column(db.Text)
HashId = db.Column(db.String(36), nullable=False)
State = db.Column(db.SMALLINT)
SpecialisId = db.Column(db.String(36))
OperatorId = db.Column(db.String(36))
CreateDate = db.Column(db.DateTime)
#primary key = db.Column(db.(HashId)
class bookResult(db.Model):
Num = db.Column(db.String(50))
Title = db.Column(db.String(50))
Author = db.Column(db.String(50))
AuthorAffiliation = db.Column(db.String(50))
Abstract = db.Column(db.Text)
Source = db.Column(db.String(50))
Files = db.Column(db.String(50))
SpecialistId = db.Column(db.String(36))
OperatorId = db.Column(db.String(36))
HashId = db.Column(db.String(36), nullable=False)
State = db.Column(db.SMALLINT)
#primary key = db.Column(db.(HashId)
class cDisease(db.Model):
CDISid = db.Column(db.String(36), nullable=False)
code = db.Column(db.String(20), nullable=False)
name = db.Column(db.String(100), nullable=False)
parentcode = db.Column(db.String(20), nullable=False)
level = db.Column(db.SMALLINT, nullable=False)
isClassical = db.Column(db.Boolean, nullable=False, default=1)
SPETid = db.Column(db.String(36), nullable=False, default=' ')
illustration = db.Column(db.Text, nullable=False, default=' ')
createDay = db.Column(db.DateTime, nullable=False, default='1900-1-1')
optrid = db.Column(db.String(36), nullable=False, default=' ')
state = db.Column(db.SMALLINT, nullable=False, default=0)
#primary key = db.Column(db.(CDISid))
#key AK_CDISEASE_PK_CODE_CDISEASE = db.Column(db.(code)
class caseAnalysis(db.Model):
CAANid = db.Column(db.String(36), nullable=False)
SPETid = db.Column(db.String(36))
CASEid = db.Column(db.String(36), nullable=False)
diagnose_mode = db.Column(db.SMALLINT, nullable=False, default=0)
diagnose_method = db.Column(db.SMALLINT, nullable=False, default=0)
look_body = db.Column(db.String(50), nullable=False)
illustration = db.Column(db.Text, nullable=False)
tongue = db.Column(db.SMALLINT, nullable=False, default=0)
tongue1 = db.Column(db.String(50), nullable=False)
look_place = db.Column(db.String(50), nullable=False)
sound = db.Column(db.String(50), nullable=False)
taste = db.Column(db.String(50), nullable=False)
question_answer = db.Column(db.String(200), nullable=False)
question_content = db.Column(db.Text, nullable=False)
special_question = db.Column(db.String(200), nullable=False)
feel_diagnose = db.Column(db.String(200), nullable=False)
habit_dmethod = db.Column(db.String(200), nullable=False)
important_question = db.Column(db.String(500), nullable=False)
information_select = db.Column(db.String(200), nullable=False)
analysis_way = db.Column(db.String(500), nullable=False)
analysis_method = db.Column(db.SMALLINT, nullable=False, default=0)
other_method = db.Column(db.String(200), nullable=False)
analysis_evidence = db.Column(db.String(200), nullable=False)
reason_evidence = db.Column(db.String(200), nullable=False)
character_evidence = db.Column(db.String(200), nullable=False)
place_evidence = db.Column(db.String(200), nullable=False)
situation_evidence = db.Column(db.String(200), nullable=False)
semiotics = db.Column(db.String(50), nullable=False)
recipe_name = db.Column(db.String(50), nullable=False)
produce_method = db.Column(db.String(200), nullable=False)
takedrug_way = db.Column(db.String(200), nullable=False)
doctor_advice = db.Column(db.Text, nullable=False)
experience = db.Column(db.String(500), nullable=False)
createDay = db.Column(db.DateTime, nullable=False, default='1900-1-1')
optrid = db.Column(db.String(36), nullable=False, default=' ')
state = db.Column(db.SMALLINT, nullable=False, default=0)
#primary key = db.Column(db.(CAANid)
class classicCase(db.Model):
clcaid = db.Column(db.String(36), nullable=False)
operatorId = db.Column(db.String(36))
SPETid = db.Column(db.String(36))
state = db.Column(db.SMALLINT)
diagnosisNo = db.Column(db.String(50))
creatDate = db.Column(db.DateTime)
num = db.Column(db.String(36), nullable=False)
title = db.Column(db.String(50))
source = db.Column(db.SMALLINT)
caseNum = db.Column(db.String(50))
name = db.Column(db.String(50))
gender = db.Column(db.String(50))
birthday = db.Column(db.String(50))
people = db.Column(db.String(50))
job = db.Column(db.String(50))
hometown = db.Column(db.String(50))
married = db.Column(db.String(50))
address = db.Column(db.String(50))
postcode = db.Column(db.String(50))
phone = db.Column(db.String(50))
presentIll = db.Column(db.Text)
pastIll = db.Column(db.Text)
familyIll = db.Column(db.Text)
personalIll = db.Column(db.Text)
visit = db.Column(db.Text)
note = db.Column(db.Text)
remark = db.Column(db.Text)
collectPerson = db.Column(db.String(50))
collectTime = db.Column(db.String(50))
verifyOpinion = db.Column(db.Text)
verifyName = db.Column(db.String(50))
verifyTime = db.Column(db.String(50))
diagTime1 = db.Column(db.String(50))
mainDisease1 = db.Column(db.Text)
caseAbstract1 = db.Column(db.Text)
zhengKeys1 = db.Column(db.Text)
westdisease1 = db.Column(db.String(50))
tcmdisease1 = db.Column(db.String(50))
tcmsyndrome1 = db.Column(db.String(50))
therapy1 = db.Column(db.Text)
fangYao1 = db.Column(db.Text)
otherCure1 = db.Column(db.Text)
doctorAdvice1 = db.Column(db.Text)
efficacy1 = db.Column(db.String(50))
diagTime2 = db.Column(db.String(50))
mainDisease2 = db.Column(db.Text)
caseAbstract2 = db.Column(db.Text)
zhengKeys2 = db.Column(db.Text)
westdisease2 = db.Column(db.String(50))
tcmdisease2 = db.Column(db.String(50))
tcmsyndrome2 = db.Column(db.String(50))
therapy2 = db.Column(db.Text)
fangYao2 = db.Column(db.Text)
otherCure2 = db.Column(db.Text)
doctorAdvice2 = db.Column(db.Text)
efficacy2 = db.Column(db.String(50))
diagTime3 = db.Column(db.String(50))
mainDisease3 = db.Column(db.Text)
caseAbstract3 = db.Column(db.Text)
zhengKeys3 = db.Column(db.Text)
westdisease3 = db.Column(db.String(50))
tcmdisease3 = db.Column(db.String(50))
tcmsyndrome3 = db.Column(db.String(50))
therapy3 = db.Column(db.Text)
fangYao3 = db.Column(db.Text)
otherCure3 = db.Column(db.Text)
doctorAdvice3 = db.Column(db.Text)
efficacy3 = db.Column(db.String(50))
diagTime4 = db.Column(db.String(50))
mainDisease4 = db.Column(db.Text)
caseAbstract4 = db.Column(db.Text)
zhengKeys4 = db.Column(db.Text)
westdisease4 = db.Column(db.String(50))
tcmdisease4 = db.Column(db.String(50))
tcmsyndrome4 = db.Column(db.String(50))
therapy4 = db.Column(db.Text)
fangYao4 = db.Column(db.Text)
otherCure4 = db.Column(db.Text)
doctorAdvice4 = db.Column(db.Text)
efficacy4 = db.Column(db.String(50))
diagTime5 = db.Column(db.String(50))
mainDisease5 = db.Column(db.Text)
caseAbstract5 = db.Column(db.Text)
zhengKeys5 = db.Column(db.Text)
westdisease5 = db.Column(db.String(50))
tcmdisease5 = db.Column(db.String(50))
tcmsyndrome5 = db.Column(db.String(50))
therapy5 = db.Column(db.Text)
fangYao5 = db.Column(db.Text)
otherCure5 = db.Column(db.Text)
doctorAdvice5 = db.Column(db.Text)
efficacy5 = db.Column(db.String(50))
diagTime6 = db.Column(db.String(50))
mainDisease6 = db.Column(db.Text)
caseAbstract6 = db.Column(db.Text)
zhengKeys6 = db.Column(db.Text)
westdisease6 = db.Column(db.String(50))
tcmdisease6 = db.Column(db.String(50))
tcmsyndrome6 = db.Column(db.String(50))
therapy6 = db.Column(db.Text)
fangYao6 = db.Column(db.Text)
otherCure6 = db.Column(db.Text)
doctorAdvice6 = db.Column(db.Text)
efficacy6 = db.Column(db.String(50))
diagTime7 = db.Column(db.String(50))
mainDisease7 = db.Column(db.Text)
caseAbstract7 = db.Column(db.Text)
zhengKeys7 = db.Column(db.Text)
westdisease7 = db.Column(db.String(50))
tcmdisease7 = db.Column(db.String(50))
tcmsyndrome7 = db.Column(db.String(50))
therapy7 = db.Column(db.Text)
fangYao7 = db.Column(db.Text)
otherCure7 = db.Column(db.Text)
doctorAdvice7 = db.Column(db.Text)
efficacy7 = db.Column(db.String(50))
diagTime8 = db.Column(db.String(50))
mainDisease8 = db.Column(db.Text)
caseAbstract8 = db.Column(db.Text)
zhengKeys8 = db.Column(db.Text)
westdisease8 = db.Column(db.String(50))
tcmdisease8 = db.Column(db.String(50))
tcmsyndrome8 = db.Column(db.String(50))
therapy8 = db.Column(db.Text)
fangYao8 = db.Column(db.Text)
otherCure8 = db.Column(db.Text)
doctorAdvice8 = db.Column(db.Text)
efficacy8 = db.Column(db.String(50))
diagTime9 = db.Column(db.String(50))
mainDisease9 = db.Column(db.Text)
caseAbstract9 = db.Column(db.Text)
zhengKeys9 = db.Column(db.Text)
westdisease9 = db.Column(db.String(50))
tcmdisease9 = db.Column(db.String(50))
tcmsyndrome9 = db.Column(db.String(50))
therapy9 = db.Column(db.Text)
fangYao9 = db.Column(db.Text)
otherCure9 = db.Column(db.Text)
doctorAdvice9 = db.Column(db.Text)
efficacy9 = db.Column(db.String(50))
diagTime10 = db.Column(db.String(50))
mainDisease10 = db.Column(db.Text)
caseAbstract10 = db.Column(db.Text)
zhengKeys10 = db.Column(db.Text)
westdisease10 = db.Column(db.String(50))
tcmdisease10 = db.Column(db.String(50))
tcmsyndrome10 = db.Column(db.String(50))
therapy10 = db.Column(db.Text)
fangYao10 = db.Column(db.Text)
otherCure10 = db.Column(db.Text)
doctorAdvice10 = db.Column(db.Text)
efficacy10 = db.Column(db.String(50))
diagTime11 = db.Column(db.String(50))
mainDisease11 = db.Column(db.Text)
caseAbstract11 = db.Column(db.Text)
zhengKeys11 = db.Column(db.Text)
westdisease11 = db.Column(db.String(50))
tcmdisease11 = db.Column(db.String(50))
tcmsyndrome11 = db.Column(db.String(50))
therapy11 = db.Column(db.Text)
fangYao11 = db.Column(db.Text)
otherCure11 = db.Column(db.Text)
doctorAdvice11 = db.Column(db.Text)
efficacy11 = db.Column(db.String(50))
diagTime12 = db.Column(db.String(50))
mainDisease12 = db.Column(db.Text)
caseAbstract12 = db.Column(db.Text)
zhengKeys12 = db.Column(db.Text)
westdisease12 = db.Column(db.String(50))
tcmdisease12 = db.Column(db.String(50))
tcmsyndrome12 = db.Column(db.String(50))
therapy12 = db.Column(db.Text)
fangYao12 = db.Column(db.Text)
otherCure12 = db.Column(db.Text)
doctorAdvice12 = db.Column(db.Text)
efficacy12 = db.Column(db.String(50))
diagTime13 = db.Column(db.String(50))
mainDisease13 = db.Column(db.Text)
caseAbstract13 = db.Column(db.Text)
zhengKeys13 = db.Column(db.Text)
westdisease13 = db.Column(db.String(50))
tcmdisease13 = db.Column(db.String(50))
tcmsyndrome13 = db.Column(db.String(50))
therapy13 = db.Column(db.Text)
fangYao13 = db.Column(db.Text)
otherCure13 = db.Column(db.Text)
doctorAdvice13 = db.Column(db.Text)
efficacy13 = db.Column(db.String(50))
diagTime14 = db.Column(db.String(50))
mainDisease14 = db.Column(db.Text)
caseAbstract14 = db.Column(db.Text)
zhengKeys14 = db.Column(db.Text)
westdisease14 = db.Column(db.String(50))
tcmdisease14 = db.Column(db.String(50))
tcmsyndrome14 = db.Column(db.String(50))
therapy14 = db.Column(db.Text)
fangYao14 = db.Column(db.Text)
otherCure14 = db.Column(db.Text)
doctorAdvice14 = db.Column(db.Text)
efficacy14 = db.Column(db.String(50))
diagTime15 = db.Column(db.String(50))
mainDisease15 = db.Column(db.Text)
caseAbstract15 = db.Column(db.Text)
zhengKeys15 = db.Column(db.Text)
westdisease15 = db.Column(db.String(50))
tcmdisease15 = db.Column(db.String(50))
tcmsyndrome15 = db.Column(db.String(50))
therapy15 = db.Column(db.Text)
fangYao15 = db.Column(db.Text)
otherCure15 = db.Column(db.Text)
doctorAdvice15 = db.Column(db.Text)
efficacy15 = db.Column(db.String(50))
diagTime16 = db.Column(db.String(50))
mainDisease16 = db.Column(db.Text)
caseAbstract16 = db.Column(db.Text)
zhengKeys16 = db.Column(db.Text)
westdisease16 = db.Column(db.String(50))
tcmdisease16 = db.Column(db.String(50))
tcmsyndrome16 = db.Column(db.String(50))
therapy16 = db.Column(db.Text)
fangYao16 = db.Column(db.Text)
otherCure16 = db.Column(db.Text)
doctorAdvice16 = db.Column(db.Text)
efficacy16 = db.Column(db.String(50))
diagTime17 = db.Column(db.String(50))
mainDisease17 = db.Column(db.Text)
caseAbstract17 = db.Column(db.Text)
zhengKeys17 = db.Column(db.Text)
westdisease17 = db.Column(db.String(50))
tcmdisease17 = db.Column(db.String(50))
tcmsyndrome17 = db.Column(db.String(50))
therapy17 = db.Column(db.Text)
fangYao17 = db.Column(db.Text)
otherCure17 = db.Column(db.Text)
doctorAdvice17 = db.Column(db.Text)
efficacy17 = db.Column(db.String(50))
diagTime18 = db.Column(db.String(50))
mainDisease18 = db.Column(db.Text)
caseAbstract18 = db.Column(db.Text)
zhengKeys18 = db.Column(db.Text)
westdisease18 = db.Column(db.String(50))
tcmdisease18 = db.Column(db.String(50))
tcmsyndrome18 = db.Column(db.String(50))
therapy18 = db.Column(db.Text)
fangYao18 = db.Column(db.Text)
otherCure18 = db.Column(db.Text)
doctorAdvice18 = db.Column(db.Text)
efficacy18 = db.Column(db.String(50))
#primary key = db.Column(db.(clcaid)
class classicCaseDiagnosis(db.Model):
clcaid = db.Column(db.String(36), nullable=False)
caseNum = db.Column(db.String(50))
diagnosisNum = db.Column(db.String(50))
diagTime = db.Column(db.String(50))
mainDisease = db.Column(db.Text)
caseAbstract = db.Column(db.Text)
zhengKeys = db.Column(db.Text)
westdisease = db.Column(db.String(50))
tcmdisease = db.Column(db.String(50))
tcmsyndrome = db.Column(db.String(50))
therapy = db.Column(db.Text)
fangYao = db.Column(db.Text)
otherCure = db.Column(db.Text)
doctorAdvice = db.Column(db.Text)
efficacy = db.Column(db.String(50))
#primary key = db.Column(db.(clcaid)
class continuingeducation(db.Model):
Participent = db.Column(db.String(50))
TrainingName = db.Column(db.String(50))
Category = db.Column(db.String(50))
TrainingDate = db.Column(db.String(50))
Hours = db.Column(db.String(50))
CreditHour = db.Column(db.String(50))
HashId = db.Column(db.String(36), nullable=False)
SpecialisId = db.Column(db.String(36))
State = db.Column(db.SMALLINT)
OperatorId = db.Column(db.String(36))
CreateDate = db.Column(db.DateTime)
#primary key = db.Column(db.(HashId)
class dCase(db.Model):
CASEid = db.Column(db.String(36), nullable=False)
SPETid = db.Column(db.String(36), nullable=False, default=' ')
DTMPid = db.Column(db.String(36), nullable=False, default=' ')
code = db.Column(db.String(20), nullable=False)
outpatientCode = db.Column(db.String(20), nullable=False)
caseKind = db.Column(db.smallint, nullable=False)
name = db.Column(db.String(20), nullable=False)
age = db.Column(db.smallint, nullable=False)
month = db.Column(db.smallint, nullable=False, default=0)
gender = db.Column(db.SMALLINT, nullable=False)
nationality = db.Column(db.SMALLINT, nullable=False)
personSort = db.Column(db.SMALLINT, nullable=False)
afflication = db.Column(db.String(200), nullable=False, default=' ')
job = db.Column(db.String(20), nullable=False, default=' ')
tel = db.Column(db.String(20), nullable=False, default=' ')
address = db.Column(db.String(200), nullable=False, default=' ')
birthplace = db.Column(db.String(6), nullable=False)
liveplace = db.Column(db.String(6), nullable=False)
education = db.Column(db.SMALLINT, nullable=False)
marriage = db.Column(db.SMALLINT, nullable=False)
ohistory = db.Column(db.Text, nullable=False, default=' ')
phistory = db.Column(db.Text, nullable=False, default=' ')
fhistory = db.Column(db.Text, nullable=False, default=' ')
allergy = db.Column(db.Text, nullable=False, default=' ')
extraMed = db.Column(db.String(500), nullable=False, default=' ')
nhistory = db.Column(db.Text, nullable=False, default=' ')
mresult = db.Column(db.SMALLINT, nullable=False)
vresult = db.Column(db.String(500), nullable=False)
illustration = db.Column(db.Text, nullable=False, default=' ')
state = db.Column(db.SMALLINT, nullable=False, default=0)
createDay = db.Column(db.DateTime, nullable=False, default='1900-1-1')
optrid = db.Column(db.String(36), nullable=False, default=' ')
hasFile = db.Column(db.Boolean, nullable=False, default=0)
preState = db.Column(db.SMALLINT, nullable=False, default=0)
#primary key = db.Column(db.(CASEid))
check = db.Column(db.([age] >= 0 and [age] <= 999)
class dMethod(db.Model):
DMETid = db.Column(db.String(36), nullable=False)
code = db.Column(db.String(20), nullable=False)
name = db.Column(db.String(100), nullable=False)
parentcode = db.Column(db.String(20), nullable=False)
level = db.Column(db.SMALLINT, nullable=False)
isClassical = db.Column(db.Boolean, nullable=False, default=1)
SPETid = db.Column(db.String(36), nullable=False, default=' ')
illustration = db.Column(db.Text, nullable=False, default=' ')
createDay = db.Column(db.DateTime, nullable=False, default='1900-1-1')
optrid = db.Column(db.String(36), nullable=False, default=' ')
state = db.Column(db.SMALLINT, nullable=False, default=0)
#primary key = db.Column(db.(DMETid))
#key AK_DMETHOD_PK_CODE_DMETHOD = db.Column(db.(code)
class dTemplate(db.Model):
DTMPid = db.Column(db.String(36), nullable=False)
code = db.Column(db.String(20), nullable=False)
name = db.Column(db.String(200), nullable=False)
useClassCdis = db.Column(db.Boolean, nullable=False, default=0)
useClassDmet = db.Column(db.Boolean, nullable=False, default=0)
CDISid = db.Column(db.String(36), nullable=False, default=' ')
WDISid = db.Column(db.String(36), nullable=False, default=' ')
SEMCid = db.Column(db.String(36), nullable=False)
DMETid = db.Column(db.String(36), nullable=False)
takeWay = db.Column(db.SMALLINT, nullable=False)
drugForm = db.Column(db.SMALLINT, nullable=False)
SPETid = db.Column(db.String(36), nullable=False, default=' ')
illustration = db.Column(db.Text, nullable=False, default=' ')
createDay = db.Column(db.DateTime, nullable=False, default='1900-1-1')
optrid = db.Column(db.String(36), nullable=False, default=' ')
useClassWdis = db.Column(db.Boolean, nullable=False, default=0)
state = db.Column(db.SMALLINT, nullable=False, default=0)
#primary key = db.Column(db.(DTMPid))
#key AK_DTEMPLATE_PK_CODE_DTEMPLAT = db.Column(db.(code)
class dataSet(db.Model):
DASEid = db.Column(db.String(36), nullable=False)
DTMPid = db.Column(db.String(36), nullable=False)
code = db.Column(db.String(20), nullable=False)
name = db.Column(db.String(50), nullable=False)
missionType = db.Column(db.SMALLINT, nullable=False)
sampleNum = db.Column(db.INTEGER, nullable=False, default=0)
attributeNum = db.Column(db.INTEGER, nullable=False, default=0)
state = db.Column(db.SMALLINT, nullable=False, default=0)
createDay = db.Column(db.DateTime, nullable=False, default='1900-1-1')
optrid = db.Column(db.String(36), nullable=False, default=' ')
#primary key = db.Column(db.(DASEid)
class diagExam(db.Model):
DIEXid = db.Column(db.String(36), nullable=False)
DIAGid = db.Column(db.String(36))
EXAMid = db.Column(db.String(36))
value = db.Column(db.String(200), nullable=False, default=' ')
illustration = db.Column(db.Text, nullable=False, default=' ')
date = db.Column(db.DateTime)
sequence INTEGER default== db.Column(db.0)
address = db.Column(db.String(100))
#primary key = db.Column(db.(DIEXid)
class diagItem(db.Model):
DIITid = db.Column(db.String(36), nullable=False)
DIREid = db.Column(db.String(36))
dru_DRUGid = db.Column(db.String(36))
DRUGid = db.Column(db.String(36))
quality = db.Column(db.decimal(18,4), nullable=False, default=0)
sequence = db.Column(db.INTEGER, nullable=False, default=0)
illustration = db.Column(db.Text, nullable=False, default=' ')
#primary key = db.Column(db.(DIITid)
class diagRecipe(db.Model):
DIREid = db.Column(db.String(36), nullable=False)
DIAGid = db.Column(db.String(36))
FREPid = db.Column(db.String(36))
isCustomed = db.Column(db.Boolean, nullable=False, default=0)
name = db.Column(db.String(36), nullable=False, default=' ')
doctorAdvice = db.Column(db.Text, nullable=False, default=' ')
drugForm = db.Column(db.SMALLINT, nullable=False)
takeWay = db.Column(db.SMALLINT, nullable=False)
quality = db.Column(db.smallint, nullable=False)
produceMethod = db.Column(db.String(100), nullable=False)
"usage" String(100), nullable=False)
#primary key = db.Column(db.(DIREid)
class diagSymptom(db.Model):
DISYid = db.Column(db.String(36), nullable=False)
SYPMid = db.Column(db.String(36))
DIAGid = db.Column(db.String(36))
value = db.Column(db.String(200), nullable=False, default=' ')
illustration = db.Column(db.Text, nullable=False, default=' ')
sequence INTEGER default== db.Column(db.0)
#primary key = db.Column(db.(DISYid)
class diagnose(db.Model):
DIAGid = db.Column(db.String(36), nullable=False)
CASEid = db.Column(db.String(36))
CDISid = db.Column(db.String(36), nullable=False, default=' ')
CDISid2 = db.Column(db.String(36), nullable=False, default=' ')
WDISid = db.Column(db.String(36), nullable=False, default=' ')
WDISid2 = db.Column(db.String(36), nullable=False, default=' ')
SEMCid = db.Column(db.String(36), nullable=False)
SEMCid2 = db.Column(db.String(36), nullable=False)
SEMCid3 = db.Column(db.String(36), nullable=False)
DMETid = db.Column(db.String(36), nullable=False)
DMETid2 = db.Column(db.String(36), nullable=False)
DMETid3 = db.Column(db.String(36), nullable=False)
DIAGno = db.Column(db.SMALLINT, nullable=False, default=1)
DIAGnum = db.Column(db.SMALLINT, nullable=False)
DIAGday = db.Column(db.DateTime, nullable=False)
lunarDay = db.Column(db.String(50), nullable=False, default=' ')
solarTerm = db.Column(db.SMALLINT, nullable=False, default=0)
DIAway = db.Column(db.SMALLINT, nullable=False, default=0)
majorSue = db.Column(db.Text, nullable=False)
illustration = db.Column(db.Text, nullable=False, default=' ')
optrid = db.Column(db.String(36), nullable=False, default=' ')
createDay = db.Column(db.DateTime, nullable=False, default='1900-1-1')
westernMed = db.Column(db.Text, nullable=False, default=' ')
other = db.Column(db.Text, nullable=False, default=' ')
preSEMCid = db.Column(db.String(36))
#primary key = db.Column(db.(DIAGid)
class district(db.Model):
DISTid = db.Column(db.String(36), nullable=False)
code = db.Column(db.String(20), nullable=False)
parentcode = db.Column(db.String(20), nullable=False)
name = db.Column(db.String(50), nullable=False)
level = db.Column(db.SMALLINT, nullable=False)
py = db.Column(db.String(20), nullable=False, default=' ')
wb = db.Column(db.String(20), nullable=False, default=' ')
illustration = db.Column(db.Text, nullable=False, default=' ')
#primary key = db.Column(db.(DISTid))
#key AK_DISTRICT_PK_CODE_DISTRICT = db.Column(db.(code)
class drug(db.Model):
DRUGid = db.Column(db.String(36), nullable=False)
code = db.Column(db.String(20), nullable=False)
name = db.Column(db.String(50), nullable=False)
unit = db.Column(db.String(10), nullable=False)
alias = db.Column(db.String(100), nullable=False, default=' ')
py = db.Column(db.String(20), nullable=False, default=' ')
wb = db.Column(db.String(20), nullable=False, default=' ')
isClassical = db.Column(db.Boolean, nullable=False, default=1)
SPETid = db.Column(db.String(36), nullable=False, default=' ')
illustration = db.Column(db.Text, nullable=False, default=' ')
createDay = db.Column(db.DateTime, nullable=False, default='1900-1-1')
optrid = db.Column(db.String(36), nullable=False, default=' ')
state = db.Column(db.SMALLINT, nullable=False, default=0)
#primary key = db.Column(db.(DRUGid))
#key AK_DRUG_PK_CODE_DRUG = db.Column(db.(code)
class dtmpExamination(db.Model):
DTEXid = db.Column(db.String(36), nullable=False)
DTMPid = db.Column(db.String(36))
EXAMid = db.Column(db.String(36))
sequence = db.Column(db.INTEGER, nullable=False)
illustration = db.Column(db.Text, nullable=False)
#primary key = db.Column(db.(DTEXid)
class dtmpSymptom(db.Model):
DTSYid = db.Column(db.String(36), nullable=False)
DTMPid = db.Column(db.String(36))
SYPMid = db.Column(db.String(36))
isFirst = db.Column(db.Boolean, nullable=False, default=1)
sequence = db.Column(db.INTEGER, nullable=False, default=0)
illustration = db.Column(db.Text, nullable=False, default=' ')
#primary key = db.Column(db.(DTSYid)
class examination(db.Model):
EXAMid = db.Column(db.String(36), nullable=False)
code = db.Column(db.String(20), nullable=False)
name = db.Column(db.String(100), nullable=False)
abbreviation = db.Column(db.String(50), nullable=False, default=' ')
kind = db.Column(db.SMALLINT, nullable=False, default=0)
normalValue = db.Column(db.String(200), nullable=False, default=' ')
hasFile = db.Column(db.Boolean, nullable=False, default=0)
isClassical = db.Column(db.Boolean, nullable=False, default=1)
SPETid = db.Column(db.String(36), nullable=False, default=' ')
illustration = db.Column(db.Text, nullable=False, default=' ')
createDay = db.Column(db.DateTime, nullable=False, default='1900-1-1')
optrid = db.Column(db.String(36), nullable=False, default=' ')
state = db.Column(db.SMALLINT, nullable=False, default=0)
#primary key = db.Column(db.(EXAMid))
#key AK_EXAMINATION_PK_COD_EXAMINAT = db.Column(db.(code)
class feature(db.Model):
FEATid = db.Column(db.String(36), nullable=False)
DASEid = db.Column(db.String(36))
code = db.Column(db.String(20), nullable=False)
name = db.Column(db.String(50), nullable=False)
featType = db.Column(db.SMALLINT, nullable=False)
valSort = db.Column(db.SMALLINT, nullable=False, default=0)
sequence = db.Column(db.INTEGER, nullable=False)
#primary key = db.Column(db.(FEATid)
class fileinfo(db.Model):
FIlEINFOid = db.Column(db.String(36), nullable=False)
code = db.Column(db.String(20), nullable=False)
name = db.Column(db.String(50), nullable=False)
SPETid = db.Column(db.String(36), nullable=False)
illustration = db.Column(db.Text, nullable=False, default=' ')
createDay = db.Column(db.DateTime, nullable=False)
optrid = db.Column(db.String(36), nullable=False)
state = db.Column(db.SMALLINT, nullable=False)
fileinfoType = db.Column(db.SMALLINT, nullable=False)
#primary key = db.Column(db.(FIlEINFOid)
class fixedrecipe(db.Model):
FREPid = db.Column(db.String(36), nullable=False)
code = db.Column(db.String(20), nullable=False)
name = db.Column(db.String(50), nullable=False)
effect = db.Column(db.String(200), nullable=False, default=' ')
py = db.Column(db.String(20), nullable=False, default=' ')
wb = db.Column(db.String(20), nullable=False, default=' ')
isClassical = db.Column(db.Boolean, nullable=False, default=1)
SPETid = db.Column(db.String(36), nullable=False, default=' ')
illustration = db.Column(db.Text, nullable=False, default=' ')
createDay = db.Column(db.DateTime, nullable=False, default='1900-1-1')
optrid = db.Column(db.String(36), nullable=False, default=' ')
state = db.Column(db.SMALLINT, nullable=False, default=0)
#primary key = db.Column(db.(FREPid))
#key AK_FIXEDRECIPE_PK_COD_FIXEDREC = db.Column(db.(code)
class fixedrecipeItem(db.Model):
FRITid = db.Column(db.String(36), nullable=False)
DRUGid = db.Column(db.String(36))
FREPid = db.Column(db.String(36))
quality = db.Column(db.decimal(18,4), nullable=False, default=0)
sequence = db.Column(db.INTEGER, nullable=False, default=0)
illustration = db.Column(db.Text, nullable=False, default=' ')
#primary key = db.Column(db.(FRITid)
class genre(db.Model):
GENRid = db.Column(db.String(36), nullable=False)
INSTid = db.Column(db.String(36))
main_specialist = db.Column(db.String(20), nullable=False, default=' ')
genre_name = db.Column(db.String(20), nullable=False, default=' ')
achievement = db.Column(db.String(100), nullable=False, default=' ')
#primary key = db.Column(db.(GENRid)
class goodInstance(db.Model):
GOINid = db.Column(db.String(36), nullable=False)
REBOid = db.Column(db.String(36))
content = db.Column(db.Text, nullable=False, default=' ')
#primary key = db.Column(db.(GOINid)
class inherit(db.Model):
INHEid = db.Column(db.String(36), nullable=False)
INSTid = db.Column(db.String(36))
name = db.Column(db.String(20), nullable=False)
start_date = db.Column(db.String(16), nullable=False, default='1900-1-1')
end_date = db.Column(db.String(16), nullable=False, default='1900-1-1')
theoretics = db.Column(db.String(100), nullable=False, default=' ')
introduction = db.Column(db.Text, nullable=False, default=' ')
#key_factor = db.Column(db.String(100), nullable=False, default=' ')
#primary key = db.Column(db.(INHEid)
class inheritStudy(db.Model):
INSTid = db.Column(db.String(36), nullable=False)
SPETid = db.Column(db.String(36))
enlighten_teacher = db.Column(db.String(20), nullable=False)
work_place = db.Column(db.String(50), nullable=False)
major = db.Column(db.String(50), nullable=False)
early_degree = db.Column(db.Text, nullable=False, default=' ')
textbook_type = db.Column(db.SMALLINT, nullable=False)
textbook = db.Column(db.Text, nullable=False, default=' ')
other_book = db.Column(db.Text, nullable=False, default=' ')
study_time = db.Column(db.Text, nullable=False, default=' ')
wisdom = db.Column(db.Text, nullable=False, default=' ')
aphorism = db.Column(db.Text, nullable=False, default=' ')
ideal = db.Column(db.Text, nullable=False, default=' ')
point = db.Column(db.Text, nullable=False, default=' ')
createDay = db.Column(db.DateTime, nullable=False, default='1900-1-1')
optrid = db.Column(db.String(36), nullable=False, default=' ')
state = db.Column(db.SMALLINT, nullable=False, default=0)
#primary key = db.Column(db.(INSTid)
class integratedSym(db.Model):
INSYid = db.Column(db.String(36), nullable=False)
SYPMid = db.Column(db.String(36))
name = db.Column(db.String(50), nullable=False)
valSort = db.Column(db.INTEGER, nullable=False)
sequence = db.Column(db.INTEGER, nullable=False)
#primary key = db.Column(db.(INSYid)
class learnMission(db.Model):
LEMIid = db.Column(db.String(36), nullable=False)
DASEid = db.Column(db.String(36))
DTMPid = db.Column(db.String(36), nullable=False)
code = db.Column(db.String(20), nullable=False)
name = db.Column(db.String(50), nullable=False)
missionType = db.Column(db.SMALLINT, nullable=False)
testType = db.Column(db.SMALLINT, nullable=False)
testPar = db.Column(db.decimal(4,1), nullable=False)
illustration = db.Column(db.Text, nullable=False, default=' ')
state = db.Column(db.SMALLINT, nullable=False, default=0)
createDay = db.Column(db.DateTime, nullable=False, default='1900-1-1')
optrid = db.Column(db.String(36), nullable=False, default=' ')
#primary key = db.Column(db.(LEMIid)
class literature(db.Model):
LITEid = db.Column(db.String(36), nullable=False)
SCIEid = db.Column(db.String(36))
literature_name = db.Column(db.String(20), nullable=False, default=' ')
pubType = db.Column(db.String(10), nullable=False, default=' ')
publishing_date = db.Column(db.String(16), nullable=False, default='1900-1-1')
publishing_company = db.Column(db.String(30), nullable=False, default=' ')
paper = db.Column(db.String(30), nullable=False, default=' ')
magazine = db.Column(db.String(30), nullable=False, default=' ')
#primary key = db.Column(db.(LITEid)
class mainBook(db.Model):
MABOid = db.Column(db.String(36), nullable=False)
INSTid = db.Column(db.String(36))
literature_name = db.Column(db.String(20), nullable=False, default=' ')
pubType = db.Column(db.String(10), nullable=False, default=' ')
publishing_date = db.Column(db.String(16), nullable=False, default='1900-1-1')
edition = db.Column(db.String(30), nullable=False, default=' ')
publishing_company = db.Column(db.String(30), nullable=False, default=' ')
#primary key = db.Column(db.(MABOid)
class mediaInfo(db.Model):
Num = db.Column(db.String(50))
Name = db.Column(db.String(50))
Category = db.Column(db.String(50))
Abs = db.Column(db.Text)
ProducedTime = db.Column(db.String(50))
Maker = db.Column(db.String(50))
FileName = db.Column(db.String(50))
Longth = db.Column(db.String(50))
DataFormat = db.Column(db.String(50))
SpecialistId = db.Column(db.String(36))
OperatorId = db.Column(db.String(36))
HashId = db.Column(db.String(36), nullable=False)
State = db.Column(db.SMALLINT)
CreateDate = db.Column(db.DateTime)
#primary key = db.Column(db.(HashId)
class message(db.Model):
MESGid = db.Column(db.String(36), nullable=False)
title = db.Column(db.String(100), nullable=False)
content = db.Column(db.String(256), nullable=False)
sender = db.Column(db.String(36), nullable=False)
receivor = db.Column(db.String(36), nullable=False)
readed = db.Column(db.Boolean, nullable=False)
msgDate = db.Column(db.DateTime, nullable=False)
sysMsg = db.Column(db.Boolean, nullable=False)
#primary key = db.Column(db.(MESGid)
class modernBook(db.Model):
MOBOid = db.Column(db.String(36), nullable=False)
REBOid = db.Column(db.String(36))
literature_name = db.Column(db.String(20), nullable=False, default=' ')
pubType = db.Column(db.String(10), nullable=False, default=' ')
benefit = db.Column(db.String(100), nullable=False, default=' ')
#primary key = db.Column(db.(MOBOid)
class modernSpecialist(db.Model):
MOSPid = db.Column(db.String(36), nullable=False)
REBOid = db.Column(db.String(36))
name = db.Column(db.String(20), nullable=False)
isprofession = db.Column(db.Boolean, nullable=False, default=0)
afflication = db.Column(db.String(100), nullable=False)
major = db.Column(db.String(100), nullable=False)
#primary key = db.Column(db.(MOSPid)
class newDTechnology(db.Model):
OperatorId = db.Column(db.String(36))
Num = db.Column(db.String(50))
Name = db.Column(db.String(50))
Content = db.Column(db.Text)
FormationTime = db.Column(db.String(50))
DevelopingPeople = db.Column(db.String(50))
Possessor = db.Column(db.String(50))
Bearer = db.Column(db.String(50))
DevelopmentAffiliation = db.Column(db.Text)
PossesionAffiliation = db.Column(db.Text)
ApplicationAffiliation = db.Column(db.Text)
ApplicationStartingTime = db.Column(db.String(50))
Files = db.Column(db.String(50))
SpecialisId = db.Column(db.String(36))
HashId = db.Column(db.String(36), nullable=False)
State = db.Column(db.SMALLINT)
CreateDate = db.Column(db.DateTime)
#primary key = db.Column(db.(HashId)
class operator(db.Model):
OPTRid = db.Column(db.String(36), nullable=False)
ROLEid = db.Column(db.String(36), nullable=False)
username = db.Column(db.String(20), nullable=False)
password = db.Column(db.String(100), nullable=False)
realname = db.Column(db.String(20), nullable=False)
gender = db.Column(db.SMALLINT, nullable=False, default=0)
part = db.Column(db.String(100), nullable=False, default=' ')
SPETid = db.Column(db.char(36), nullable=False, default=' ')
illustration = db.Column(db.Text, nullable=False, default=' ')
insertLock = db.Column(db.Boolean, nullable=False, default=0)
editLock = db.Column(db.Boolean, nullable=False, default=0)
deleteLock = db.Column(db.Boolean, nullable=False, default=0)
state = db.Column(db.SMALLINT, nullable=False, default=0)
#primary key = db.Column(db.(OPTRid))
#key AK_OPERATOR_PK_USERNA_OPERATOR = db.Column(db.(username)
class operatorfun(db.Model):
OPTRid = db.Column(db.String(36), nullable=False)
SFUNid = db.Column(db.String(36), nullable=False)
#primary key = db.Column(db.(OPTRid, SFUNid)
class otherBook(db.Model):
OTBOid = db.Column(db.String(36), nullable=False)
BACKid = db.Column(db.String(36))
literature_name = db.Column(db.String(20), nullable=False, default=' ')
pubType = db.Column(db.String(10), nullable=False, default=' ')
benefit = db.Column(db.String(100), nullable=False, default=' ')
#primary key = db.Column(db.(OTBOid)
class otherInformation(db.Model):
OTINid = db.Column(db.String(36), nullable=False)
INSTid = db.Column(db.String(36))
literature_name = db.Column(db.String(20), nullable=False, default=' ')
publishing_date = db.Column(db.String(16), nullable=False, default='1900-1-1')
edition = db.Column(db.String(30), nullable=False, default=' ')
publishing_company = db.Column(db.String(30), nullable=False, default=' ')
magazine = db.Column(db.String(30), nullable=False, default=' ')
entrepreneur = db.Column(db.String(30), nullable=False, default=' ')
#primary key = db.Column(db.(OTINid)
class paperResult(db.Model):
num = db.Column(db.String(50))
Title = db.Column(db.String(150))
Author = db.Column(db.String(50))
AuthorAffiliation = db.Column(db.String(50))
Abstract = db.Column(db.Text)
Source = db.Column(db.String(50))
Files = db.Column(db.String(50))
SpeicalistId = db.Column(db.String(36))
OperatorId = db.Column(db.String(36))
HashId = db.Column(db.String(36), nullable=False)
State = db.Column(db.SMALLINT)
CreateDate = db.Column(db.DateTime)
#primary key = db.Column(db.(HashId)
class patentResult(db.Model):
Num = db.Column(db.String(50))
Name = db.Column(db.String(50))
ApplicationNum = db.Column(db.String(50))
PatentNum = db.Column(db.String(50))
PatentMandate = db.Column(db.String(50))
Inventor = db.Column(db.String(50))
Patentee = db.Column(db.String(50))
Files = db.Column(db.String(50))
SpecialistId = db.Column(db.String(36))
OperatorId = db.Column(db.String(36))
HashId = db.Column(db.String(36), nullable=False)
State = db.Column(db.SMALLINT)
createDate = db.Column(db.DateTime)
#primary key = db.Column(db.(HashId)
class pullulation(db.Model):
PULLid = db.Column(db.String(36), nullable=False)
STSTid = db.Column(db.String(36))
famous_domain = db.Column(db.String(100), nullable=False, default=' ')
famous_date = db.Column(db.DateTime, nullable=False, default='1900-1-1')
famous_age = db.Column(db.smallint, nullable=False)
famous_reason = db.Column(db.String(50), nullable=False, default=' ')
famous_achievement = db.Column(db.String(50), nullable=False, default=' ')
revelation = db.Column(db.String(50), nullable=False, default=' ')
experience = db.Column(db.Text, nullable=False)
aphorism = db.Column(db.String(100), nullable=False, default=' ')
advice = db.Column(db.String(100), nullable=False)
credendum = db.Column(db.String(100), nullable=False)
hope = db.Column(db.String(100), nullable=False)
other_advice = db.Column(db.String(100), nullable=False)
all_clinic_time = db.Column(db.smallint, nullable=False)
old_clinic_time = db.Column(db.smallint, nullable=False)
last_clinic_time = db.Column(db.smallint, nullable=False)
clinic_regard = db.Column(db.Text, nullable=False, default=' ')
diagnose_custom = db.Column(db.Text, nullable=False, default=' ')
#primary key = db.Column(db.(PULLid)
class readBook(db.Model):
REBOid = db.Column(db.String(36), nullable=False)
STSTid = db.Column(db.String(36))
sequence = db.Column(db.String(300), nullable=False)
study_emphases = db.Column(db.SMALLINT, nullable=False)
emphases_reason = db.Column(db.Text, nullable=False)
study_advice = db.Column(db.SMALLINT, nullable=False)
advice_reason = db.Column(db.Text, nullable=False)
con_book = db.Column(db.Text, nullable=False)
extensive_book = db.Column(db.Text, nullable=False)
bad_book = db.Column(db.Text, nullable=False)
classic_opinion = db.Column(db.Text, nullable=False)
genre_attitude = db.Column(db.Text, nullable=False)
relation_opinion = db.Column(db.SMALLINT, nullable=False)
opinion_reason = db.Column(db.Text, nullable=False)
special_book = db.Column(db.String(500), nullable=False)
ratio = db.Column(db.String(100), nullable=False)
#primary key = db.Column(db.(REBOid)
class rediagnose(db.Model):
RDIAid = db.Column(db.String(36), nullable=False)
CAANid = db.Column(db.String(36))
RDIAno = db.Column(db.SMALLINT, nullable=False)
disease_state = db.Column(db.String(200), nullable=False)
tongue = db.Column(db.SMALLINT, nullable=False, default=0)
tongue1 = db.Column(db.String(50), nullable=False)
artery = db.Column(db.String(50), nullable=False)
other_artery = db.Column(db.String(200), nullable=False)
rediagnose_analysis = db.Column(db.Text, nullable=False)
#primary key = db.Column(db.(RDIAid)
class researchItem(db.Model):
Name = db.Column(db.String(50))
Leval = db.Column(db.String(50))
Princial = db.Column(db.String(50))
Participent = db.Column(db.String(50))
Affiliation = db.Column(db.Text)
Duration = db.Column(db.String(50))
Source = db.Column(db.Text)
Abstruct = db.Column(db.String(50))
HashId = db.Column(db.String(36), nullable=False)
SpecialisId = db.Column(db.String(36))
State = db.Column(db.SMALLINT)
OperationId = db.Column(db.String(36))
Category = db.Column(db.String(50))
CreateDate = db.Column(db.DateTime)
#primary key = db.Column(db.(HashId)
class resultReward(db.Model):
Num = db.Column(db.String(50))
ResultName = db.Column(db.String(50))
Author = db.Column(db.String(50))
AuthorAffiliation = db.Column(db.String(50))
RewardName = db.Column(db.String(50))
Leval = db.Column(db.String(50))
RewardDate = db.Column(db.String(50))
LicensingGroup = db.Column(db.String(50))
Files = db.Column(db.String(50))
SpecialistId = db.Column(db.String(36))
OperatorId = db.Column(db.String(36))
State = db.Column(db.SMALLINT)
HashId = db.Column(db.String(36), nullable=False)
CreateDate = db.Column(db.DateTime)
#primary key = db.Column(db.(HashId)
class science(db.Model):
SCIEid = db.Column(db.String(36), nullable=False)
SPETid = db.Column(db.String(36))
recipe_description = db.Column(db.Text, nullable=False, default=' ')
drugs = db.Column(db.Text, nullable=False)
technique = db.Column(db.Text, nullable=False, default=' ')
recipes = db.Column(db.Text, nullable=False, default=' ')
study_opinion = db.Column(db.Text, nullable=False, default=' ')
study_advice = db.Column(db.Text, nullable=False, default=' ')
reports = db.Column(db.Text, nullable=False, default=' ')
contents = db.Column(db.Text, nullable=False, default=' ')
reference = db.Column(db.Text, nullable=False, default=' ')
createDay = db.Column(db.DateTime, nullable=False, default='1900-1-1')
optrid = db.Column(db.String(36), nullable=False, default=' ')
state = db.Column(db.SMALLINT, nullable=False, default=0)
#primary key = db.Column(db.(SCIEid)
class semiotic(db.Model):
SEMCid = db.Column(db.String(36), nullable=False)
CDISid = db.Column(db.String(36))
code = db.Column(db.String(20), nullable=False)
groupCode = db.Column(db.String(20), nullable=False, default=' ')
name = db.Column(db.String(100), nullable=False)
isClassical = db.Column(db.Boolean, nullable=False, default=1)
SPETid = db.Column(db.String(36), nullable=False, default=' ')
illustration = db.Column(db.Text, nullable=False, default=' ')
createDay = db.Column(db.DateTime, nullable=False, default='1900-1-1')
optrid = db.Column(db.String(36), nullable=False, default=' ')
state = db.Column(db.SMALLINT, nullable=False, default=0)
#primary key = db.Column(db.(SEMCid))
#key AK_SEMIOTIC_PK_CODE_SEMIOTIC = db.Column(db.(code)
class sourceTechnology(db.Model):
OperatorId = db.Column(db.String(36))
Num = db.Column(db.String(50))
Name = db.Column(db.String(50))
Content = db.Column(db.Text)
FormationTime = db.Column(db.String(50))
Possessor = db.Column(db.String(50))
Bearer = db.Column(db.String(50))
PossetionAffiliation = db.Column(db.Text)
ApplicaionAffiliation = db.Column(db.Text)
Duration = db.Column(db.String(50))
Files = db.Column(db.String(50))
SpecialisId = db.Column(db.String(36))
HashId = db.Column(db.String(36), nullable=False)
State = db.Column(db.SMALLINT)
CreateDate = db.Column(db.DateTime)
#primary key = db.Column(db.(HashId)
class specialist(db.Model):
SPETid = db.Column(db.String(36), nullable=False)
name = db.Column(db.String(20), nullable=False)
code = db.Column(db.String(20))
birthday = db.Column(db.DateTime, nullable=False)
nationality = db.Column(db.SMALLINT, nullable=False)
native_place = db.Column(db.String(6), nullable=False)
gender = db.Column(db.SMALLINT, nullable=False)
afflication = db.Column(db.String(100), nullable=False)
telephone = db.Column(db.String(100), nullable=False)
address = db.Column(db.String(100), nullable=False)
postalcode = db.Column(db.String(20), nullable=False)
status = db.Column(db.String(100), nullable=False)
principalship = db.Column(db.String(100), nullable=False)
major = db.Column(db.String(100), nullable=False)
social_status = db.Column(db.Text, nullable=False)
school_degree = db.Column(db.String(100), nullable=False)
school = db.Column(db.String(100), nullable=False)
graduation_date = db.Column(db.DateTime, nullable=False)
other_degree = db.Column(db.String(100), nullable=False)
learning_date = db.Column(db.DateTime, nullable=False)
work_date = db.Column(db.DateTime, nullable=False)
motivation = db.Column(db.String(100), nullable=False)
mode = db.Column(db.String(100), nullable=False)
resume = db.Column(db.Text, nullable=False)
contribution = db.Column(db.Text, nullable=False)
health_info = db.Column(db.String(50), nullable=False)
clinic_info = db.Column(db.String(50), nullable=False)
reseach_disease = db.Column(db.Text, nullable=False)
recips = db.Column(db.Text, nullable=False)
drugs = db.Column(db.Text, nullable=False)
optrid = db.Column(db.String(36), nullable=False, default=' ')
state = db.Column(db.SMALLINT, nullable=False, default=0)
createDay = db.Column(db.DateTime, nullable=False, default='1900-1-1')
#primary key = db.Column(db.(SPETid)
class student(db.Model):
STUDid = db.Column(db.String(36), nullable=False)
TEEXid = db.Column(db.String(36))
name = db.Column(db.String(20), nullable=False)
years = db.Column(db.String(20), nullable=False, default=' ')
major = db.Column(db.String(100), nullable=False)
domain = db.Column(db.String(30), nullable=False, default=' ')
achievement = db.Column(db.String(100), nullable=False, default=' ')
#primary key = db.Column(db.(STUDid)
class studyRelation(db.Model):
STRLid = db.Column(db.String(36), nullable=False)
PULLid = db.Column(db.String(36))
name = db.Column(db.String(20), nullable=False)
afflication = db.Column(db.String(100), nullable=False)
reason = db.Column(db.String(100), nullable=False, default=' ')
#primary key = db.Column(db.(STRLid)
class studyStory(db.Model):
STSTid = db.Column(db.String(36), nullable=False)
SPETid = db.Column(db.String(36))
study_start_date = db.Column(db.DateTime, nullable=False)
start_age = db.Column(db.smallint, nullable=False, default=0)
read_day = db.Column(db.smallint, nullable=False, default=0)
read_week = db.Column(db.smallint, nullable=False, default=0)
practice_day = db.Column(db.smallint, nullable=False, default=0)
practice_week = db.Column(db.smallint, nullable=False, default=0)
study_end_date = db.Column(db.DateTime, nullable=False)
end_age = db.Column(db.smallint, nullable=False, default=0)
matter_type = db.Column(db.SMALLINT, nullable=False)
matter = db.Column(db.Text, nullable=False, default=' ')
work_start_date = db.Column(db.DateTime, nullable=False)
work_age = db.Column(db.smallint, nullable=False, default=0)
clinic_day = db.Column(db.smallint, nullable=False, default=0)
clinic_week = db.Column(db.smallint, nullable=False, default=0)
study_day = db.Column(db.smallint, nullable=False, default=0)
study_week = db.Column(db.smallint, nullable=False, default=0)
clinic_years = db.Column(db.smallint, nullable=False, default=0)
root_years = db.Column(db.smallint, nullable=False, default=0)
root_place = db.Column(db.String(50), nullable=False)
work_start = db.Column(db.String(20), nullable=False)
work_middle = db.Column(db.String(20), nullable=False)
work_end = db.Column(db.String(20), nullable=False)
work_mode = db.Column(db.String(50), nullable=False)
study_key = db.Column(db.String(50), nullable=False, default=' ')
other_situation = db.Column(db.Text, nullable=False, default=' ')
createDay = db.Column(db.DateTime, nullable=False, default='1900-1-1')
optrid = db.Column(db.String(36), nullable=False, default=' ')
state = db.Column(db.SMALLINT, nullable=False, default=0)
#primary key = db.Column(db.(STSTid)
class symptom(db.Model):
SYPMid = db.Column(db.String(36), nullable=False)
code = db.Column(db.String(20), nullable=False)
name = db.Column(db.String(50), nullable=False)
parentcode = db.Column(db.String(20), nullable=False)
level = db.Column(db.SMALLINT, nullable=False)
kind = db.Column(db.SMALLINT, nullable=False, default=0)
sort = db.Column(db.SMALLINT, nullable=False, default=1)
valSort = db.Column(db.INTEGER, nullable=False, default=0)
hasFile = db.Column(db.Boolean, nullable=False, default=0)
isClassical = db.Column(db.Boolean, nullable=False, default=1)
SPETid = db.Column(db.String(36), nullable=False, default=' ')
illustration = db.Column(db.Text, nullable=False, default=' ')
createDay = db.Column(db.DateTime, nullable=False, default='1900-1-1')
optrid = db.Column(db.String(36), nullable=False, default=' ')
state = db.Column(db.SMALLINT, nullable=False, default=0)
#primary key = db.Column(db.(SYPMid))
#key AK_SYMPTOM_PK_CODE_SYMPTOM = db.Column(db.(code)
class syscode(db.Model):
CODEid = db.Column(db.String(36), nullable=False)
no = db.Column(db.INTEGER, nullable=False)
code = db.Column(db.String(20), nullable=False)
name = db.Column(db.String(50), nullable=False, default=' ')
illustration = db.Column(db.Text, nullable=False, default=' ')
#primary key = db.Column(db.(CODEid))
#key AK_SYSCODE_PK_CODE_SYSCODE = db.Column(db.(code))
#key AK_SYSCODE_PK_NO_SYSCODE = db.Column(db.(no)
class syscodeValue(db.Model):
SVALid = db.Column(db.String(36), nullable=False)
CODEid = db.Column(db.String(36), nullable=False, default=' ')
subno = db.Column(db.INTEGER, nullable=False)
subcode = db.Column(db.String(20), nullable=False)
truevalue = db.Column(db.String(100))
py = db.Column(db.String(20) default=' ')
wb = db.Column(db.String(20) default=' ')
illustration Text default== db.Column(db.' ')
#primary key = db.Column(db.(SVALid)
class sysfun(db.Model):
SFUNid = db.Column(db.String(36), nullable=False)
code = db.Column(db.String(20), nullable=False)
parentcode = db.Column(db.String(20), nullable=False, default='-1')
level = db.Column(db.SMALLINT, nullable=False)
name = db.Column(db.String(100), nullable=False)
href = db.Column(db.String(200), nullable=False, default=' ')
targetFrame = db.Column(db.String(100), nullable=False, default=' ')
illustration = db.Column(db.Text, nullable=False, default=' ')
state = db.Column(db.SMALLINT, nullable=False, default=0)
#primary key = db.Column(db.(SFUNid))
#key AK_SYSFUN_PK_CODE_SYSFUN = db.Column(db.(code)
class table(db.Model):1
id = db.Column(db.char(10), nullable=False)
name = db.Column(db.char(10))
#primary key = db.Column(db.(id)
class talentreward(db.Model):
OperatorId = db.Column(db.String(36))
Name = db.Column(db.String(50))
Category = db.Column(db.String(50))
Leval = db.Column(db.String(50))
Principal = db.Column(db.String(50))
Participent = db.Column(db.String(50))
Affiliation = db.Column(db.Text)
StaringTime = db.Column(db.String(50))
Source = db.Column(db.Text)
Absturct = db.Column(db.Text)
SpecialisId = db.Column(db.String(36))
HashId = db.Column(db.String(36), nullable=False)
State = db.Column(db.SMALLINT)
CreateDate = db.Column(db.DateTime)
#primary key = db.Column(db.(HashId)
class teachExperience(db.Model):
TEEXid = db.Column(db.String(36), nullable=False)
STSTid = db.Column(db.String(36))
study_gist = db.Column(db.String(300), nullable=False)
clinic_gist = db.Column(db.String(300), nullable=False)
interaction_gist = db.Column(db.String(300), nullable=False)
other_gist = db.Column(db.String(300), nullable=False)
schoolage_request = db.Column(db.String(300), nullable=False)
knowledge_request = db.Column(db.String(300), nullable=False)
moral_request = db.Column(db.String(300), nullable=False)
other_request = db.Column(db.String(300), nullable=False)
school_opinion = db.Column(db.Text, nullable=False)
course_ratio = db.Column(db.String(300), nullable=False)
period _ratio = db.Column(db.String(300), nullable=False)
textbook = db.Column(db.Text, nullable=False)
inherit_mode = db.Column(db.Text, nullable=False)
teach_opinion = db.Column(db.Text, nullable=False)
combine_opinion = db.Column(db.Text, nullable=False)
system_opinion = db.Column(db.Text, nullable=False)
department_opinion = db.Column(db.Text, nullable=False)
research_opinion = db.Column(db.Text, nullable=False)
support_opinion = db.Column(db.Text, nullable=False)
#primary key = db.Column(db.(TEEXid)
class teaching(db.Model):
TEACid = db.Column(db.String(36), nullable=False)
PULLid = db.Column(db.String(36))
start_date = db.Column(db.String(16), nullable=False, default='1900-1-1')
end_date = db.Column(db.String(16), nullable=False, default='1900-1-1')
teach_place = db.Column(db.String(100), nullable=False, default=' ')
major = db.Column(db.String(100), nullable=False)
#primary key = db.Column(db.(TEACid)
class techCreative(db.Model):
Num = db.Column(db.String(50))
Name = db.Column(db.Text)
Conent = db.Column(db.Text)
FormationTime = db.Column(db.String(50))
Author = db.Column(db.String(50))
Files = db.Column(db.String(50))
HashId = db.Column(db.String(36), nullable=False)
State = db.Column(db.SMALLINT)
SpecialisId = db.Column(db.String(36))
CreateDate = db.Column(db.DateTime)
OperatorId = db.Column(db.String(36))
#primary key = db.Column(db.(HashId)
class technologyapplication(db.Model):
OperatorId = db.Column(db.String(36))
Num = db.Column(db.String(50))
Name = db.Column(db.String(50))
Disease = db.Column(db.String(50))
Department = db.Column(db.String(50))
Author = db.Column(db.String(50))
Affiliation = db.Column(db.Text)
Percentage = db.Column(db.String(50))
Beneficiary = db.Column(db.Text)
EfficacyAssement = db.Column(db.Text)
HealthEconomics = db.Column(db.Text)
SpecialisId = db.Column(db.String(36))
HashId = db.Column(db.String(36), nullable=False)
State = db.Column(db.SMALLINT)
CreateDate = db.Column(db.DateTime)
#primary key = db.Column(db.(HashId)
class technologycase(db.Model):
OperatorId = db.Column(db.String(36))
Num = db.Column(db.String(50))
Name = db.Column(db.String(50))
Possessor = db.Column(db.String(50))
PossesstionAffilation = db.Column(db.String(50))
specialisId = db.Column(db.String(36))
HashId = db.Column(db.String(36), nullable=False)
State = db.Column(db.SMALLINT)
CreateDate = db.Column(db.DateTime)
#primary key = db.Column(db.(HashId)
class userrole(db.Model):
ROLEid = db.Column(db.String(36), nullable=False)
code = db.Column(db.String(20), nullable=False)
name = db.Column(db.String(100), nullable=False)
illustration = db.Column(db.Text, nullable=False, default=' ')
state = db.Column(db.SMALLINT, nullable=False, default=0)
#primary key = db.Column(db.(ROLEid))
#key AK_USERROLE_PK_CODE_USERROLE = db.Column(db.(code)
class userrolefun(db.Model):
ROLEid = db.Column(db.String(36), nullable=False)
SFUNid = db.Column(db.String(36), nullable=False)
#primary key = db.Column(db.(ROLEid, SFUNid)
class wDisease(db.Model):
WDISid = db.Column(db.String(36), nullable=False)
code = db.Column(db.String(20), nullable=False)
name = db.Column(db.String(100), nullable=False)
parentcode = db.Column(db.String(20), nullable=False)
level = db.Column(db.SMALLINT, nullable=False)
isClassical = db.Column(db.Boolean, nullable=False, default=1)
SPETid = db.Column(db.String(36), nullable=False, default=' ')
illustration = db.Column(db.Text, nullable=False, default=' ')
createDay = db.Column(db.DateTime, nullable=False, default='1900-1-1')
optrid = db.Column(db.String(36), nullable=False, default=' ')
state = db.Column(db.SMALLINT, nullable=False, default=0)
#primary key = db.Column(db.(WDISid))
#key AK_WDISEASE_PK_CODE_WDISEASE = db.Column(db.(code)
class wisdom(db.Model):
WISDid = db.Column(db.String(36), nullable=False)
REBOid = db.Column(db.String(36))
content = db.Column(db.Text, nullable=False, default=' ')
#primary key = db.Column(db.(WISDid)
|
991,873 | 85f1bad1c3fd98e7c2debe8556e898cee5a55d5f | import win32com.client as win32
xl = win32.Dispatch('Excel.Application')
xlsx = xl.Workbooks.Open(r"C:\Users\Raffaele.Sportiello\OneDrive - Wolters Kluwer\Documents\Dashboard inflow\Dashboard inflow canali e prodotti\Dashboard inflow - Raffaele.xlsb")
xlsx.Sheets.Item('TuttotelFE').PivotTables('Tabella pivot1').TableRange2.Copy()
outlook = win32.Dispatch('outlook.application')
mail = outlook.CreateItem(0)
mail.To = 'Raffaele.Sportiello@wolterskluwer.com'
mail.Subject = 'Prova'
mail.Body = 'Message body'
mail.Display()
inspector = outlook.ActiveInspector()
word_editor = inspector.WordEditor
word_range = word_editor.Application.ActiveDocument.Content
word_range.PasteExcelTable(False, False, True)
|
991,874 | a75e73ce2a72af50918b32603264550fa8155af9 | from flask import Flask
app = Flask(__name__)
import hello_world.views # noqa
from hello_world import views
|
991,875 | 1bbc1251612a0ac69e70275de754ea3197fdb81c | from django.shortcuts import render
import socket
from _thread import *
import threading
import pymongo
from scipy.spatial import distance
import json
from django.views.decorators.csrf import csrf_exempt
from django.http import JsonResponse
import datetime
from urllib.request import urlopen
from urllib.parse import urlencode, quote_plus
import urllib
from time import sleep
HOST = '127.0.0.1'
PORT = 4000
Lock1 = threading.Lock()
# 접속한 클라이언트마다 새로운 쓰레드가 생성되어 통신을 하게 됩니다.
myclient = pymongo.MongoClient("mongodb://localhost:27017/") # 이곳은 보통 똑같음 IP add
mydb = myclient["lucete"]
user_col = mydb["userID"]
mycol1 = mydb["KR_city"]
mycol2 = mydb["KR_weather"]
weather_col = mydb["KR_weather"]
connect_col = mydb["connectID"]
user_Alarm = mydb["user_Alarm"]
# def get_weather(lat, lng):
# 연결되어 있는 디바이스
connect_device = []
time_now = datetime.datetime.now()
global server_flag
server_flag = True
url2 = 'http://api.openweathermap.org/data/2.5/weather'
# 올리고 내리고 메제시 전송
def move_message(_id, value, mode):
print(_id)
# connect 되어 있는지 확인
check = False
for item2 in connect_device:
print(item2)
if item2['_id'] == _id:
device_info = item2['socket']
check = True
# define 되어 있는 값들
if check:
if mode:
if value:
message = "SERVER RECEIVE:CONTINUOUS UP ~"
else:
message = "SERVER RECEIVE:CONTINUOUS DOWN ~"
device_info.send(message.encode('utf-8'))
else:
if value:
message = "SERVER RECEIVE:ONCE UP ~"
else:
message = "SERVER RECEIVE:ONCE DOWN ~"
device_info.send(message.encode('utf-8'))
# else:
# device_info.send("Disconnected HW".encode('utf-8'))
# 절전 모드 메세지 전송
def power_message(_id, power, client):
print(_id)
# connect 되어 있는지 확인
check = False
for item2 in connect_device:
print(item2)
if item2['_id'] == _id:
device_info = item2['socket']
check = True
# define 되어 있는 값들
print(check)
if check:
if power:
message = "SERVER RECEIVE:POWER ON ~"
else:
message = "SERVER RECEIVE:POWER OFF ~"
device_info.send(message.encode('utf-8'))
else:
client.send("Disconnected HW".encode('utf-8'))
# 방범 모드 메세지 전송
def protection_message(_id, period):
print(_id)
# connect 되어 있는지 확인
check = False
for item2 in connect_device:
print(item2)
if item2['_id'] == _id:
device_info = item2['socket']
check = True
# define 되어 있는 값들
print(check)
if check:
if 30 < period:
message = "SERVER RECEIVE:PROTECTION " + str(period) + " ~"
else:
message = "SERVER RECEIVE:PROTECTION OFF"
device_info.send(message.encode('utf-8'))
else:
device_info.send("Disconnected HW".encode('utf-8'))
# 현재시간과 알람시간을 비교하여 알려줌
def check_time(_id):
print("check_time :", repr(time_now))
item = user_Alarm.find_one({'_id': _id})
now_minute = time_now.hour*60 + time_now.minute
# 00시에 알람 초기화 그 후에 알람 시간을 체크해서 메세지 전송
if item['power'] == 0:
# 알람 시간 체크
if 0 < item['time'] - now_minute:
# 올림
move_message(_id, True, False)
user_Alarm.update_one({'_id': _id}, {
'$set': {
'power': 1
}
})
# 알람 시간이 아닐 때
else:
# 에너지 모드 자동 실행
energy_mode(_id)
else:
# 혹시모를 시간에 10분까지 체크 후 power 초기화
if 10 < now_minute:
user_Alarm.update_one({'_id': _id}, {
'$set': {
'power': 0
}
})
# 에너지 모드 자동 실행
energy_mode(_id)
# 온도에 따른 에너지 효율
def energy_mode(_id):
print("energy_mode :", repr(_id))
# 냉방 18-20도 난방 24-26도 default
user = user_col.find_one({'_id': _id})
weather = weather_col.find_one(({'_id': user['City_id']}))
if weather['data']['temp'] >= 18:
# 여름일 경우
if 9 > time_now.month > 5:
print('여름')
# 내림
move_message(_id, False, False)
# 에너지 절약을 위한 실내 실외 온도 비교
if weather['data']['temp'] > user['Temp']:
if weather['data']['temp'] - user['Temp'] > 3:
print(' 내림 ')
# 내림
move_message(_id, False, False)
else:
if user['Temp'] - weather['data']['temp'] > 3:
# 올림
print(' 올림 ')
move_message(_id, True, False)
elif weather['data']['temp'] < 18:
if time_now.month < 3 or time_now.month > 11:
# 올림
print('겨울')
move_message(_id, True, False)
# 에너지 절약을 위한 실내 실외 온도 비교
if weather['data']['temp'] > user['Temp']:
if weather['data']['temp'] - user['Temp'] > 3:
print(' 내림 ')
# 내림
move_message(_id, False, False)
else:
if user['Temp'] - weather['data']['temp'] > 3:
# 올림
print(' 올림 ')
move_message(_id, True, False)
# 조도에 따른 값
def landscape_mode(_id):
user = user_col.find_one({'_id': _id})
print("landscape_mode ")
# 빛에 민감할 경우
if user['Lx_mode'] == 1:
print('Lx_mode 1')
# lux 값이 0 ~ 500 사이
maximum = 500
if user['Lx'] > maximum:
# 계속 상태가 지속되었음
if user['Lx_flag'] == 1:
# 내림
move_message(_id, False, False)
# 그렇지 않으면 플래그를 세워줌
else:
user_col.update_one({'_id': _id}, {
'$set': {
'Lx_flag': 1
}
})
# 조경모드 조건 완성 -> 커튼 쳐야함
else:
if user['Lx_flag'] == 4:
# 올림
move_message(_id, True, False)
# 그렇지 않으면 플래그를 세워줌
else:
user_col.update_one({'_id': _id}, {
'$set': {
'Lx_flag': 4
}
})
# 보통의 경우
elif user['Lx_mode'] == 2:
print('Lx_mode 2')
# lux 값이 0 ~ 5000 사이
maximum = 5000
# 범위 초과시
if user['Lx'] > maximum:
# 계속 상태가 지속되었음
if user['Lx_flag'] == 2:
# 내림
move_message(_id, False, False)
# 그렇지 않으면 플래그를 세워줌
else:
user_col.update_one({'_id': _id}, {
'$set': {
'Lx_flag': 2
}
})
# 조경모드 조건 완성 -> 커튼 쳐야함
else:
if user['Lx_flag'] == 4:
# 올림
move_message(_id, True, False)
# 그렇지 않으면 플래그를 세워줌
else:
user_col.update_one({'_id': _id}, {
'$set': {
'Lx_flag': 4
}
})
# 빛에 둔감할 경우
elif user['Lx_mode'] == 3:
print('Lx_mode 3')
# lux 값이 0 ~ 20000 사이
maximum = 20000
if user['Lx'] > maximum:
# 계속 상태가 지속되었음
if user['Lx_flag'] == 3:
# 내림
move_message(_id, False, False)
# 그렇지 않으면 플래그를 세워줌
else:
user_col.update_one({'_id': _id}, {
'$set': {
'Lx_flag': 3
}
})
# 조경모드 조건 완성 -> 커튼 쳐야함
else:
if user['Lx_flag'] == 4:
# 올림
move_message(_id, True, False)
# 그렇지 않으면 플래그를 세워줌
else:
user_col.update_one({'_id': _id}, {
'$set': {
'Lx_flag': 4
}
})
# 모드에 따라 나뉘어줌
def check_mode(_id):
# 아직 sunset sunrise 포함하지 못했음
user = user_col.find_one({'_id': _id})
# weather = weather_col.find_one(({'_id': user['City_id']}))
if user == 0:
print("user not exist")
else:
print(user['Mode'])
# 에너지 절약 모드
if user['Mode'] == 1:
energy_mode(_id)
# 조경 모드
elif user['Mode'] == 2:
landscape_mode(_id)
# print(datetime.datetime.now().isoformat(timespec='seconds'))
# 방범 모드 - 주기 전송
elif user['Mode'] == 3:
protection_message(_id, user['time_period'])
# 알람모드 - 선 알람 후 에너지 절약 모드
elif user['Mode'] == 4:
check_time(_id)
print("Mode 4")
# 소켓에 들어갈 함수
def threaded(client_socket, addr):
print('Connected by :', addr[0], ':', addr[1])
# 클라이언트가 접속을 끊을 때 까지 반복.
while True:
try:
# 데이터가 수신되면 클라이언트에 다시 전송합니다.(에코)
data = client_socket.recv(1024)
if not data:
print('Disconnected by ' + addr[0], ':', addr[1])
break
# print('Received from ' + addr[0], ':', addr[1], data.decode())
# 새로운 클라이언트의 접속
device = {
'_id': '',
'socket': client_socket,
'ip': addr[0],
'port': addr[1]
}
data_list = data.decode().split(' ')
# 하드웨어와 통신 시작
try:
if data_list[0] == "HW":
# MKID 일 때 실행 코드
check = False
if data_list[1] == "MKID":
# Dead Lock 방지
Lock1.acquire()
# 이곳에서 디비에 추가
not_overlap = True
data_list[2] = int(data_list[2])
# 연결되어 있는 HW 중에 중복이 되어 있는지 확인
for check_overlap in connect_device:
if check_overlap['_id'] == data_list[2]:
client_socket.send("user_id overlap".encode('utf-8'))
not_overlap = False
break
if not_overlap:
# 현재 위치
location = (float(data_list[3]), float(data_list[4]))
dst = -1
# 위도와 경도를 내 디비에 있는 도시들과 비교하여 가장 가까운 도시를 집어 넣음
for item2 in weather_col.find():
loc = (float(item2['location']['lat']), float(item2['location']['lon']))
# 위도 경도 최소값 구함
if dst < 0:
dst = distance.euclidean(loc, location)
elif dst > distance.euclidean(loc, location):
dst = distance.euclidean(loc, location)
city_id = item2['_id']
device['_id'] = data_list[2]
dic = {
'_id': data_list[2],
'City_id': city_id,
'Power': 1,
'Mode': 0,
'State': data_list[5],
'Lx': 0.0,
'Temp': 0.0,
'time_period': 60,
'time': 0
}
user_col.insert_one(dic)
connect_device.append(device)
# print(connect_device)
# connect_col.insert_one(device)
client_socket.send("MK_ID Success".encode('utf-8'))
# Dead Lock 방지 해제
Lock1.release()
# 기존의 클라이언트 유저가 정보를 보내옴
elif data_list[1] == "SET":
# print(connect_device)
Lock1.acquire()
data_list[2] = int(data_list[2])
for item in connect_device:
if item['_id'] == data_list[2]:
check = True
break
if check:
print("SET VALUE")
location = (float(data_list[3]), float(data_list[4]))
dst = -1
# 위도와 경도를 내 디비에 있는 도시들과 비교하여 가장 가까운 도시를 집어 넣음
for item2 in weather_col.find():
loc = (float(item2['location']['lat']), float(item2['location']['lon']))
# 위도 경도 최소값 구함
if dst < 0:
dst = distance.euclidean(loc, location)
elif dst > distance.euclidean(loc, location):
dst = distance.euclidean(loc, location)
city_id = item2['_id']
user_col.update_one({'_id': data_list[2]}, {
"$set": {
"City_id": city_id,
"State": float(data_list[5])
}})
client_socket.send("SET Success".encode('utf-8'))
else:
location = (float(data_list[3]), float(data_list[4]))
dst = -1
# 위도와 경도를 내 디비에 있는 도시들과 비교하여 가장 가까운 도시를 집어 넣음
for item2 in weather_col.find():
loc = (float(item2['location']['lat']), float(item2['location']['lon']))
# 위도 경도 최소값 구함
if dst < 0:
dst = distance.euclidean(loc, location)
elif dst > distance.euclidean(loc, location):
dst = distance.euclidean(loc, location)
city_id = item2['_id']
dic = {
'_id': data_list[2],
'City_id': city_id,
'Power': 1,
'Mode': 0,
'State': float(data_list[5]),
'Lx': 0.0,
'Temp': 0.0,
'time_period': 3,
'time': 0
}
user_col.insert_one(dic)
connect_device.append(device)
client_socket.send("SET MK_ID Success".encode('utf-8'))
# connect_col.insert_one(device)
Lock1.release()
# 현재 아두이노에서 가지고 있는 값을 갱신해주기 위ㅡ하여 보내주는 값 CR
elif data_list[1] == "CR":
# 연결되어 있는 디바이스 확인
data_list[2] = int(data_list[2])
for item in connect_device:
if item['_id'] == data_list[2]:
check = True
break
# 디바이스에 있으면 업데이트 해줌
if check:
user_col.update_one({'_id': data_list[2]}, {
"$set": {
"State": float(data_list[3]),
"Lx": float(data_list[4]),
"Temp": float(data_list[5]),
"Power": int(data_list[6])
}})
# 에너지 절절모드 OR 조경모드일때 메세지 보내야 함
client_socket.send("CR Success".encode('utf-8'))
else:
client_socket.send("HW error : code 8".encode('utf-8'))
# 앱하고 통신
elif data_list[0] == "APP":
print("APP _id :", data_list[1])
data_list[1] = int(data_list[1])
user_data = user_col.find_one({'_id': data_list[1]})
if user_data == 0:
client_socket.send("_id value is Not valuable".encode('utf-8'))
else:
# 전원이 꺼져있는 것
print("APP Connect")
data_list[2] = int(data_list[2])
data_list[3] = int(data_list[3])
if data_list[2] != user_data['Power']:
# 메세지 전송
if data_list[2] == 0:
power_message(data_list[1], False, client_socket)
# 전원을 킴
elif data_list[2] == 1:
power_message(data_list[1], True, client_socket)
# 알람 모드
if data_list[3] == 3:
time = data_list[4].split(':')
hour = int(time[0])
minute = int(time[1])
user_col.update_one({'_id': data_list[1]}, {
'$set': {
'Power': data_list[2],
'Mode': data_list[3],
'time_period': hour*60 + minute
}
}, upsert=True
)
# 방범 모드
elif data_list[3] == 4:
# 주기를 계산
time = data_list[4].split(':')
hour = int(time[0])
minute = int(time[1])
# 디비에 업로드
user_col.update_one({'_id': data_list[1]}, {
'$set': {
'Power': data_list[2],
'Mode': data_list[3],
'time': hour*60 + minute
}
}, upsert=True
)
# 디비에 알람시간을 저장한 후에 더 빠르게 검색 후 메시지 전송
user_Alarm.update_one({'_id': data_list[1]}, {
'$set': {
'_id': data_list[1],
'time': hour*60 + minute,
'power': 0
}
}, upsert=True)
# 사용자 설정 모드
elif data_list[3] == 5:
if data_list[4] == 1:
move_message(data_list[1], True, True)
else:
move_message(data_list[1], False, True)
# 조경 모드
elif data_list[3] == 2:
user_col.update_one({'_id': data_list[1]}, {
'$set': {
'Power': data_list[2],
'Mode': data_list[3],
'Lx_mode': data_list[4],
}
}, upsert=True
)
# 초기 모드, 에너지 효율 모드
else:
user_col.update_one({'_id': data_list[1]}, {
'$set': {
'Power': data_list[2],
'Mode': data_list[3],
}
}, upsert=True
)
# 후에 데이터를 아두이노에 보내는 함수 필요
# 모드 체크 함수 호출
check_mode(data_list[1])
print("3")
# for item in connect_device:
# if item['_id'] == int(data_list[1]):
# item['socket'].send(" socket message ".encode('utf-8'))
elif data_list[0] == "test":
check_time(data_list[1])
else:
client_socket.send("APP error : code 8".encode('utf-8'))
except ConnectionResetError as e:
print('Error :' + addr[0], ':', addr[1], ' :', e)
client_socket.send("Error :".encode('utf-8'))
except ConnectionResetError as e:
print('Disconnected by ' + addr[0], ':', addr[1], ' :', e)
# 연결 끊겼을 때 connect_device 에서 remove
break
client_socket.close()
@csrf_exempt
def start_server(request):
global server_flag
server_flag = True
server_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
server_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
server_socket.bind((HOST, PORT))
server_socket.listen()
print('server start', request)
# 클라이언트가 접속하면 accept 함수에서 새로운 소켓을 리턴.
# 새로운 쓰레드에서 해당 소켓을 사용하여 통신.
while server_flag:
print('wait')
client_socket, addr = server_socket.accept()
start_new_thread(threaded, (client_socket, addr))
server_socket.close()
data = "server destroy"
return JsonResponse({"data": data})
@csrf_exempt
def stop_server(request):
global server_flag
print(server_flag)
server_flag = False
data = "server finish"
return JsonResponse({"data": data})
@csrf_exempt
def weather_upload(request):
count = 0
cityID_list = []
myquery = [
{"$sort": {"_id": 1}}
]
for item in mycol1.aggregate(myquery):
cityID_list.append(item['_id'])
for item in cityID_list:
try:
print(item)
queryParams2 = '?' + urlencode({
quote_plus('id'): item, quote_plus('appid'): '008f6aadc0e813803c68f1a1e5dedf12'
})
request = urllib.request.Request(url2 + queryParams2)
request.get_method = lambda: 'GET'
response_body = urllib.request.urlopen(request).read()
# response_body = response_body.replace("'", "\"")
response_body = json.loads(response_body)
data = {
'temp': round(response_body['main']['temp'] - 273, 2),
'date': datetime.datetime.fromtimestamp(response_body['dt']),
'sunset': datetime.datetime.fromtimestamp(response_body['sys']['sunset']),
'sunrise': datetime.datetime.fromtimestamp(response_body['sys']['sunrise']),
'weather': response_body['weather'],
'cloud': response_body['clouds']['all']
}
mycol2.replace_one({'_id': item}, {"name": response_body['name'],
'location': response_body['coord'], 'data': data})
count = count + 1
if count == 20:
count = 0
# break
sleep(60)
except Exception as ex:
print(ex)
queryParams2 = '?' + urlencode({
quote_plus('id'): item, quote_plus('appid'): '008f6aadc0e813803c68f1a1e5dedf12'
})
request = urllib.request.Request(url2 + queryParams2)
request.get_method = lambda: 'GET'
response_body = urllib.request.urlopen(request).read()
# response_body = response_body.replace("'", "\"")
response_body = json.loads(response_body)
data = {
'temp': round(response_body['main']['temp'] - 273, 2),
'date': datetime.datetime.fromtimestamp(response_body['dt']),
'sunset': datetime.datetime.fromtimestamp(response_body['sys']['sunset']),
'sunrise': datetime.datetime.fromtimestamp(response_body['sys']['sunrise']),
'weather': response_body['weather'],
'cloud': response_body['clouds']['all']
}
mycol2.replace_one({'_id': item}, {"name": response_body['name'],
'location': response_body['coord'], 'data': data})
count = count + 1
if count == 50:
count = 0
sleep(60)
data = "upload finish"
return JsonResponse({"data": data})
def index(request):
return render(request, "vis/beta.html")
|
991,876 | e1752695aba489fb7ccb0ca46156ac5307b56b12 | [
alg.createtemp (
"lineitem_filtered",
alg.projection (
[ "l_quantity", "l_extendedprice", "l_partkey" ],
alg.join (
( "l_partkey", "p_partkey" ),
alg.selection (
scal.AndExpr (
scal.EqualsExpr (
scal.AttrExpr ( "p_brand" ),
scal.ConstExpr ( "Brand#23", Type.STRING )
),
scal.EqualsExpr (
scal.AttrExpr ( "p_container" ),
scal.ConstExpr ( "MED BOX", Type.STRING )
)
),
alg.scan ( "part" )
),
alg.scan ( "lineitem" )
)
)
),
alg.projection (
[ "avg_yearly", "count_price" ],
alg.map (
"avg_yearly",
scal.DivExpr (
scal.AttrExpr ( "sum_price" ),
scal.ConstExpr ( "7.0f", Type.DOUBLE )
),
alg.aggregation (
[],
[ ( Reduction.SUM, "l_extendedprice", "sum_price" ),
( Reduction.COUNT, "l_extendedprice", "count_price" ) ],
alg.selection (
scal.SmallerExpr (
scal.AttrExpr ( "l_quantity" ),
scal.AttrExpr ( "lim_quan" )
),
alg.map (
"lim_quan",
scal.MulExpr (
scal.AttrExpr ( "avg_quan" ),
scal.ConstExpr ( "0.2f", Type.DOUBLE )
),
alg.join (
( "l1.l_partkey", "l2.l_partkey" ),
alg.aggregation (
[ "l_partkey" ],
[ ( Reduction.AVG, "l_quantity", "avg_quan" ) ],
alg.scan ( "lineitem_filtered", "l1" )
),
alg.scan ( "lineitem_filtered", "l2" )
)
)
)
)
)
)
]
|
991,877 | 4afe8d512ca7215347facae3f7f2034249e0c362 | from functions import *
import matplotlib.pyplot as plt
# received_packets = 0
# sent_packets = 0
# dropped_packets = 0
# total_delay = 0
# received_bytes = 0
# start_time = 1000000
# end_time = 0
# header_bytes = 20 # constants
# sent_time = {} # Empty Dictionary
# test_str = "GFG is for Geeks"
# N = 3
# # Get Nth word in String
# # using split()
# res = test_str.split()
# print(res[3])
Y_output_flow = {
'throughput': [],
'avgDelay': [],
'deliveryRatio': [],
'dropRatio': [],
}
Y_output_node = {
'throughput': [],
'avgDelay': [],
'deliveryRatio': [],
'dropRatio': [],
}
Y_output_area = {
'throughput': [],
'avgDelay': [],
'deliveryRatio': [],
'dropRatio': [],
}
X_input = {}
X_input['flow'] = [10, 20, 30, 40, 50]
X_input['node'] = [20, 40, 60, 80, 100]
X_input['area'] = [250, 500, 750, 1000, 1250]
fileNames = ['500_40_10.tr', '500_40_20.tr', '500_40_30.tr',
'500_40_40.tr', '500_40_50.tr'] # flow Varying
fileNames1 = ['500_20_20.tr', '500_40_20.tr', '500_60_20.tr',
'500_80_20.tr', '500_100_20.tr'] # node Varying
fileNames2 = ['250_40_20.tr', '500_40_20.tr', '750_40_20.tr',
'1000_40_20.tr', '1250_40_20.tr'] # area Varying
# Opening file
i = 0
for name in fileNames:
temp_List = {}
print("Flow Variation File{} :{}".format(i+1, name))
file = open('FlowVariation/'+name, 'r')
# Y_output_flow[i] = myFunctionFlow(file, 0)
# Y_output['flow']['throughput'][i] = Y_output_flow[i]['throughput']
# Y_output['flow']['avgDelay'][i] = Y_output_flow[i]['avgDelay']
# Y_output['flow']['deliveryRatio'][i] = Y_output_flow[i]['deliveryRatio']
# Y_output['flow']['dropRatio'][i] = Y_output_flow[i]['dropRatio']
temp_List = myFunctionFlow(file, 0)
Y_output_flow['throughput'].append(temp_List['throughput'])
Y_output_flow['avgDelay'].append(temp_List['avgDelay'])
Y_output_flow['deliveryRatio'].append(temp_List['deliveryRatio'])
Y_output_flow['dropRatio'].append(temp_List['dropRatio'])
print("-------------------------------------\n")
++i
temp_List.clear()
i = 0
for name in fileNames1:
temp_List = {}
print("Node Variation File{} :{}".format(i+1, name))
file = open('NodeVariation/'+name, 'r')
# Y_output_flow[i] = myFunctionFlow(file, 0)
# Y_output['flow']['throughput'][i] = Y_output_flow[i]['throughput']
# Y_output['flow']['avgDelay'][i] = Y_output_flow[i]['avgDelay']
# Y_output['flow']['deliveryRatio'][i] = Y_output_flow[i]['deliveryRatio']
# Y_output['flow']['dropRatio'][i] = Y_output_flow[i]['dropRatio']
temp_List = myFunctionFlow(file, 0)
Y_output_node['throughput'].append(temp_List['throughput'])
Y_output_node['avgDelay'].append(temp_List['avgDelay'])
Y_output_node['deliveryRatio'].append(temp_List['deliveryRatio'])
Y_output_node['dropRatio'].append(temp_List['dropRatio'])
print("-------------------------------------\n")
++i
temp_List.clear()
i = 0
for name in fileNames2:
temp_List = {}
print("Node Variation File{} :{}".format(i+1, name))
file = open('AreaVariation/'+name, 'r')
# Y_output_flow[i] = myFunctionFlow(file, 0)
# Y_output['flow']['throughput'][i] = Y_output_flow[i]['throughput']
# Y_output['flow']['avgDelay'][i] = Y_output_flow[i]['avgDelay']
# Y_output['flow']['deliveryRatio'][i] = Y_output_flow[i]['deliveryRatio']
# Y_output['flow']['dropRatio'][i] = Y_output_flow[i]['dropRatio']
temp_List = myFunctionFlow(file, 0)
Y_output_area['throughput'].append(temp_List['throughput'])
Y_output_area['avgDelay'].append(temp_List['avgDelay'])
Y_output_area['deliveryRatio'].append(temp_List['deliveryRatio'])
Y_output_area['dropRatio'].append(temp_List['dropRatio'])
print("-------------------------------------\n")
++i
y_Attributes=['throughput','avgDelay','deliveryRatio','dropRatio']
for y_attribute in y_Attributes:
plotGraph(X_input['flow'], Y_output_flow[y_attribute],
'Flow', y_attribute, 'Flow Variation: {} Vs Flow'.format(y_attribute))
for y_attribute in y_Attributes:
plotGraph(X_input['node'], Y_output_node[y_attribute],
'Node', y_attribute, 'Node Variation: {} Vs Node'.format(y_attribute))
for y_attribute in y_Attributes:
plotGraph(X_input['area'], Y_output_area[y_attribute],
'Area', y_attribute, 'Area Variation: {} Vs Area'.format(y_attribute))
# plotGraph(X_input['flow'], Y_output_flow,
# 'Flow',y_Attributes,'Flow Variation:')
# print(Y_output_flow[0])
# for line in file:
# # print(line)
# count = 1
# words = line.split()
# event = words[0]
# time_sec = float(words[1])
# node = int(words[2].replace('_', ''))
# layer = words[3]
# packet_id = int(words[5])
# packet_type = words[6]
# packet_bytes = int(words[7])
# # print(node)
# # set start time for the first line
# if start_time > time_sec:
# start_time = time_sec
# if layer == "AGT" and packet_type == "tcp":
# if event == "s":
# sent_time[packet_id] = time_sec
# sent_packets += 1
# elif event == "r":
# delay = time_sec - sent_time[packet_id]
# total_delay += delay
# bytes = (packet_bytes - header_bytes)
# received_bytes += bytes
# received_packets += 1
# if packet_type == "tcp" and event == "D":
# dropped_packets += 1
# end_time = time_sec
# simulation_time = end_time - start_time
# print("Sent Packets :{}".format(sent_packets))
# print("Dropped Packets :{}".format(dropped_packets))
# print("Received Packets :{}".format(received_packets))
# print("-------------------------------------------------------------")
# print("Throughput :{} bits/sec".format(((received_bytes * 8) / simulation_time)))
# print("Average Delay :{} seconds".format((total_delay / received_packets)))
# print("Delivery ratio :{} ".format((received_packets / sent_packets)))
# print("Drop ratio :{} ".format((dropped_packets / sent_packets)))
# event = words[0]
# time_sec = float(words[1])
# node = words[2].replace('_', '')
# layer = words[3]
# packet_id = words[5]
# packet_type = words[6]
# packet_bytes = int(words[7])
# # print(node)
# # set start time for the first line
# if start_time > time_sec:
# start_time = time_sec
# if layer == "AGT" and packet_type == "tcp":
# if event == "s":
# sent_time[packet_id] = time_sec
# sent_packets += 1
# elif event == "r":
# delay = time_sec - sent_time[packet_id]
# total_delay += delay
# bytes = (packet_bytes - header_bytes)
# received_bytes += bytes
# received_packets += 1
# if packet_type == "tcp" and event == "D":
# dropped_packets += 1
# end_time = time_sec
# simulation_time = end_time - start_time
# print("Sent Packets :{}".format(sent_packets))
# print("Dropped Packets :{}".format(dropped_packets))
# print("Received Packets :{}".format(received_packets))
# print("-------------------------------------------------------------")
# print("Throughput :{} bits/sec".format(((received_bytes * 8) / simulation_time)))
# print("Average Delay :{} seconds".format((total_delay / received_packets)))
# print("Delivery ratio :{} ".format((received_packets / sent_packets)))
# print("Drop ratio :{} ".format((dropped_packets / sent_packets)))
|
991,878 | 4c72f61bd0723789a0abdd515302496e64dab404 | from nltk.corpus import stopwords
from textblob import Word
import joblib
import numpy as np
import config as cf
def analyze(data):
answers_decode = {0: [
"#616161",
"#9E9E9E",
"#757575",
"#536DFE",
"#607D8B"
], 1: [
"#00BCD4",
"#B3E5FC",
"#03A9F4",
"#00BCD4",
"#00796B"
], 2: [
"#00796B",
"#B2DFDB",
"#009688",
"#757575",
"#FFC107"
], 3: [
"#7B1FA2",
"#E1BEE7",
"#FF4081",
"#9C27B0",
"#D32F2F"
], 4: [
"#E64A19",
"#D32F2F",
"#FFEB3B",
"#E040FB",
"#F44336"
], 5: [
"#FFA000",
"#FFECB3",
"#FFC107",
"#CDDC39",
"#FFA000"
]}
data = data.replace('[^\w\s].',' ').split()
stop = stopwords.words('english')
data = list(map(lambda x: " ".join(x for x in x.split() if x not in stop), data))
data = list(map(lambda x: " ".join([Word(word).lemmatize() for word in x.split()]), data))
count_vect = joblib.load('../model/class_triple.joblib')
#count_vect = joblib.load(cf.EMBEDDINGS_PATH)
data_vect = count_vect.transform(data)
rf = joblib.load('../model/rf_triple.joblib')
#rf = joblib.load(cf.MODEL_PATH)
data_pred = list(rf.predict(data_vect))
data_pred = max(set(data_pred), key=data_pred.count)
answ = answers_decode.get(data_pred)
return answ
'''
tweetss = pd.DataFrame(['I am very happy today! The atmosphere looks cheerful',
'Things are looking great. It was such a good day',
'Success is right around the corner. Lets celebrate this victory',
'Everything is more beautiful when you experience them with a smile!',
'Now this is my worst, okay? But I am gonna get better.',
'I am tired, boss. Tired of being on the road, lonely as a sparrow in the rain. I am tired of all the pain I feel',
'This is quite depressing. I am filled with sorrow',
'His death broke my heart. It was a sad day'])
'''
'''
def analyze(tweets):
# Doing some preprocessing on these tweets as done before
tweets[0] = tweets[0].str.replace('[^\w\s]',' ')
from nltk.corpus import stopwords
stopp = stopwords.words('english')
tweets[0] = tweets[0].apply(lambda x: " ".join(x for x in x.split() if x not in stopp))
from textblob import Word
tweets[0] = tweets[0].apply(lambda x: " ".join([Word(word).lemmatize() for word in x.split()]))
# Extracting Count Vectors feature from our tweets
count_vect = joblib.load('../model/class_rf.joblib')
tweet_count = count_vect.transform(tweets[0])
rf = joblib.load('../model/rf.joblib')
#Predicting the emotion of the tweet using our already trained linear SVM
tweet_pred = rf.predict(tweet_count)
print(tweet_pred)
'''
if __name__ == '__main__':
print(analyze('I love you'))
|
991,879 | c191a0fbfb9ff5bb430576198b4e49fc4faf6fa0 | from loSynthTuning import synth
synth() |
991,880 | fb4f7347a3a60d1a53bc602816cd42ec29862e9e | import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
import sklearn.cluster as sk_clustering
fig, (ax1, ax2) = plt.subplots(nrows=1, ncols=2, figsize=(12, 6))
object_sizes = pd.read_csv("data/object_sizes.csv")
# plt.scatter(x=object_sizes["width"], y=object_sizes["height"])
X = object_sizes[["width", "height"]]
print("K-means:")
k_means_clustering_model = sk_clustering.KMeans(n_clusters=5, init="random", n_init=1)
k_means_clustering_model.fit(X)
k_means_classes = k_means_clustering_model.predict(X)
ax1.set_title("K-means")
ax1.scatter(x=object_sizes["width"], y=object_sizes["height"], c=k_means_classes, cmap="prism")
print("K-means++:")
k_means_pp_clustering_model = sk_clustering.KMeans(n_clusters=5, init="k-means++")
k_means_pp_clustering_model.fit(X)
k_means_pp_classes = k_means_pp_clustering_model.predict(X)
ax2.set_title("K-means++")
ax2.scatter(x=object_sizes["width"], y=object_sizes["height"], c=k_means_pp_classes, cmap="prism")
k_means_pp_centroids = [(int(round(x)), int(round(y))) for x, y in k_means_clustering_model.cluster_centers_]
print(k_means_pp_centroids)
plt.show()
|
991,881 | 4a455beedf7065326b15844388e82c8863d8cc16 |
import logging
import resource
from epopt import tree_format
def cpu_time():
return resource.getrusage(resource.RUSAGE_SELF).ru_utime
class DeferredMessage(object):
def __init__(self, func, *args):
self.func = func
self.args = args
def __str__(self):
return self.func(*self.args)
def log_debug(f, *args):
logging.debug(DeferredMessage(f, *args))
def log_debug_expr(msg, expr):
log_debug(lambda msg, expr: msg + ":\n" + tree_format.format_expr(expr),
msg, expr)
|
991,882 | 68c981ea94e870e1f2ca3039adafc94acd190684 | from . import HypergraphEmbedding
from .hypergraph_util import *
import numpy as np
import multiprocessing
from multiprocessing import Pool
from tqdm import tqdm
import logging
_shared_info = {}
log = logging.getLogger()
################################################################################
# AlgebraicDistance - Helper and runner #
################################################################################
## Helper functions to update embeddings ######################################
def _init_update_alg_dist(A2B, B2A, A2emb, B2emb):
_shared_info.clear()
assert A2B.shape[0] == B2A.shape[1]
assert A2B.shape[1] == B2A.shape[0]
assert A2B.shape[0] == A2emb.shape[0]
assert A2B.shape[1] == B2emb.shape[0]
assert A2emb.shape[1] == B2emb.shape[1]
_shared_info["A2B"] = A2B
_shared_info["B2A"] = B2A
_shared_info["A2emb"] = A2emb
_shared_info["B2emb"] = B2emb
def _update_alg_dist(a_idx, A2B=None, B2A=None, A2emb=None, B2emb=None):
if A2B is None:
A2B = _shared_info["A2B"]
if B2A is None:
B2A = _shared_info["B2A"]
if A2emb is None:
A2emb = _shared_info["A2emb"]
if B2emb is None:
B2emb = _shared_info["B2emb"]
a_emb = A2emb[a_idx, :]
b_emb_weight = [
(B2emb[b_idx], 1 / B2A[b_idx].nnz) for b_idx in A2B[a_idx].nonzero()[1]
]
b_emb = sum(e * w for e, w in b_emb_weight) / sum(w for _, w in b_emb_weight)
return a_idx, (a_emb + b_emb) / 2
def _helper_update_embeddings(hypergraph, node_embeddings, edge_embeddings,
node2edges, edge2nodes, workers, disable_pbar):
if not disable_pbar:
log.info("Placing nodes with respect to edges")
new_node_embeddings = np.copy(node_embeddings)
with Pool(
workers,
initializer=_init_update_alg_dist,
initargs=(
node2edges, #A2B
edge2nodes, #B2A
node_embeddings, #A2emb
edge_embeddings #B2emb
)) as pool:
with tqdm(total=len(hypergraph.node), disable=disable_pbar) as pbar:
for idx, emb in pool.imap(
_update_alg_dist, hypergraph.node, chunksize=128):
new_node_embeddings[idx, :] = emb
pbar.update(1)
if not disable_pbar:
log.info("Placing edges with respect to nodes")
new_edge_embeddings = np.copy(edge_embeddings)
with Pool(
workers,
initializer=_init_update_alg_dist,
initargs=(
edge2nodes, #A2B
node2edges, #B2a
edge_embeddings, #A2emb
new_node_embeddings #B2emb
)) as pool:
with tqdm(total=len(hypergraph.edge), disable=disable_pbar) as pbar:
for idx, emb in pool.imap(
_update_alg_dist, hypergraph.edge, chunksize=128):
new_edge_embeddings[idx, :] = emb
pbar.update(1)
return new_node_embeddings, new_edge_embeddings
## Helper functions to scale embeddings ########################################
def _helper_scale_embeddings(hypergraph, node_embeddings, edge_embeddings,
workers, disable_pbar):
if not disable_pbar:
log.info("Getting min-max embedding per dimension")
min_edge_embedding = np.min(edge_embeddings, axis=0)
min_node_embedding = np.min(node_embeddings, axis=0)
min_embedding = np.min(
np.stack((min_node_embedding, min_edge_embedding)), axis=0)
max_edge_embedding = np.max(edge_embeddings, axis=0)
max_node_embedding = np.max(node_embeddings, axis=0)
max_embedding = np.max(
np.stack((max_node_embedding, max_edge_embedding)), axis=0)
delta_embedding = max_embedding - min_embedding
if not disable_pbar:
log.info("Scaling nodes to 0-1 hypercube")
for idx in tqdm(hypergraph.node, disable=disable_pbar):
node_embeddings[idx, :] -= min_embedding
node_embeddings[idx, :] /= delta_embedding
if not disable_pbar:
log.info("Scaling edges to 0-1 hypercube")
for idx in tqdm(hypergraph.edge, disable=disable_pbar):
edge_embeddings[idx, :] -= min_embedding
edge_embeddings[idx, :] /= delta_embedding
return node_embeddings, edge_embeddings
def EmbedAlgebraicDistance(hypergraph,
dimension,
iterations=20,
run_in_parallel=True,
disable_pbar=False):
workers = multiprocessing.cpu_count() if run_in_parallel else 1
hypergraph, node_map, edge_map = CompressRange(hypergraph)
num_nodes = max(hypergraph.node) + 1
num_edges = max(hypergraph.edge) + 1
log.info("Random Initialization")
# all embeddings are in 0-1 interval
node_embeddings = np.random.random((num_nodes, dimension))
edge_embeddings = np.random.random((num_edges, dimension))
log.info("Getting node-edge matrix")
node2edges = ToCsrMatrix(hypergraph)
log.info("Getting edge-node matrix")
edge2nodes = ToEdgeCsrMatrix(hypergraph)
log.info("Performing iterations of Algebraic Distance Calculations")
for iteration in tqdm(range(iterations), disable=disable_pbar):
node_embeddings, edge_embeddings = _helper_update_embeddings(
hypergraph,
node_embeddings,
edge_embeddings,
node2edges,
edge2nodes,
workers,
disable_pbar=True)
node_embeddings, edge_embeddings = _helper_scale_embeddings(
hypergraph,
node_embeddings,
edge_embeddings,
workers,
disable_pbar=True)
embedding = HypergraphEmbedding()
embedding.dim = dimension
embedding.method_name = "AlgebraicDistance"
for node_idx in hypergraph.node:
embedding.node[node_map[node_idx]].values.extend(
node_embeddings[node_idx, :])
for edge_idx in hypergraph.edge:
embedding.edge[edge_map[edge_idx]].values.extend(
edge_embeddings[edge_idx, :])
return embedding
|
991,883 | 1b9504140d5903a7a2d05565ceb265c0e7651744 | N = int(input())
xyz = [list(map(int, input().split())) for _ in range(N)]
d = [[float('inf')]*N for _ in range(N)]
for i in range(N):
for j in range(i+1, N):
a, b, c = xyz[i]
p, q, r = xyz[j]
d[i][j] = abs(p-a)+abs(q-b)+max(0, r-c)
d[j][i] = abs(a-p)+abs(b-q)+max(0, c-r)
dp = [[float('inf')]*N for _ in range(2**N)]
dp[0][0] = 0
# bitDP
for s in range(2**N):
for u in range(N):
for v in range(N):
if s & (1 << v) == 0:
dp[s | (1 << v)][v] = min(dp[s | (1 << v)][v], dp[s][u]+d[u][v])
print(dp[2**N-1][0])
|
991,884 | 3634241057af0126957d3dfba49fc1ec6dc2f6d4 | from PySide.QtNetwork import QNetworkAccessManager
from PySide.QtNetwork import QNetworkProxy
from PySide.QtCore import SIGNAL, QUrl
import urlparse
class PtNetworkAccessManager(QNetworkAccessManager):
_url_filter = []
def __init__(self, parent):
QNetworkAccessManager.__init__(self, parent)
self.finished.connect(self._request_ended)
def _request_ended(self,reply):
pass
def createRequest(self, operation, request, outgoingData):
url = request.url().toString()
for h in request.rawHeaderList():
pass
#self._debug(DEBUG, " %s: %s" % (h, request.rawHeader(h)))
if self._url_filter:
if url in self._url_filter:
#self._debug(INFO, "URL filtered: %s" % url)
request.setUrl(QUrl("about:blank"))
else:
pass
#self._debug(DEBUG, "URL not filtered: %s" % url)
#print url
#if url == "http://v5.ele.me/":
#request.setRawHeader("Accept-Encoding","")
reply = QNetworkAccessManager.createRequest(self, operation, request, outgoingData)
#self.emit(SIGNAL('networkRequestCreated(QNetworkReply*)'), reply)
#if html[:6]=='\x1f\x8b\x08\x00\x00\x00':
# html=gzip.GzipFile(fileobj=StringIO(html)).read()
return reply
def get_proxy(self):
"""Return string containing the current proxy."""
return self.proxy()
def set_proxy(self, string_proxy=None):
"""Set proxy:
url can be in the form:
- hostname (http proxy)
- hostname:port (http proxy)
- username:password@hostname:port (http proxy)
- http://username:password@hostname:port
- socks5://username:password@hostname:port
- https://username:password@hostname:port
- httpcaching://username:password@hostname:port
- ftpcaching://username:password@hostname:port
"""
if not string_proxy:
string_proxy = ''
if string_proxy:
urlinfo = urlparse.urlparse(string_proxy)
# default to http proxy if we have a string
if not urlinfo.scheme:
string_proxy = "http://%s" % string_proxy
urlinfo = urlparse.urlparse(string_proxy)
self.proxy_url = string_proxy
proxy = QNetworkProxy()
if urlinfo.scheme == 'socks5':
proxy.setType(QNetworkProxy.Socks5Proxy)
elif urlinfo.scheme in ['https', 'http']:
proxy.setType(QNetworkProxy.HttpProxy)
elif urlinfo.scheme == 'httpcaching':
proxy.setType(QNetworkProxy.HttpCachingProxy)
elif urlinfo.scheme == 'ftpcaching':
proxy.setType(QNetworkProxy.FtpCachingProxy)
else:
proxy.setType(QNetworkProxy.NoProxy)
if urlinfo.hostname != None:
proxy.setHostName(urlinfo.hostname)
if urlinfo.port != None:
proxy.setPort(urlinfo.port)
if urlinfo.username != None:
proxy.setUser(urlinfo.username)
else:
proxy.setUser('')
if urlinfo.password != None:
proxy.setPassword(urlinfo.password)
else:
proxy.setPassword('')
self.setProxy(proxy)
return self.proxy() |
991,885 | 914b11bdca0f300381cadf9af0157b62ab1d854f | from sqlalchemy import Column, String, Integer, BOOLEAN
from backend.Model.connection import MyBase
class VolumeModel(MyBase):
__tablename__ = 'volume_info'
id = Column(String(40), primary_key=True, )
uuid = Column(String(40), nullable=False)
name = Column(String(40), nullable=False)
status = Column(String(40), nullable=False)
size = Column(Integer(), nullable=False)
type = Column(Integer(), nullable=False)
bootable = Column(BOOLEAN(), nullable=False)
vm_id = Column(String(40), )
data_center_id = Column(String(40), )
timestamp = Column(Integer(), nullable=False)
version = Column(Integer(), nullable=False, default=0)
def __init__(self, _id, uuid, name, status, size, type, bootable, vm_id, data_center_id, timestamp):
self.id = _id
self.uuid = uuid
self.name = name
self.status = status
self.size = size
self.type = type
self.bootable = bootable
self.vm_id = vm_id
self.data_center_id = data_center_id
self.timestamp = timestamp
|
991,886 | 101c9145100dc2b4496d4f9af59e126bd7a7bd48 | import uuid
import pytz
from django.db import models
from datetime import datetime
from django.utils import timezone
# Create your models here.
class Timestamp(models.Model):
"""Model containg created and last modified time"""
uuid = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False, verbose_name="Primary key")
created_at = models.DateTimeField(auto_now_add=True, verbose_name="Created at")
last_modified_at = models.DateTimeField(default=timezone.now, verbose_name="Last modified at")
class Meta:
abstract = True
def save(self, *args, **kwargs):
super().save(*args, **kwargs)
self.last_updated_at = datetime.now(pytz.utc)
|
991,887 | 341565a12b04b465113a32031fce05f278b3ba8a | class SteamAPIError(Exception):
""" Error raised when the Steam API has issues. """
def __init__(self, value):
self.errormessage = value |
991,888 | 58adfe2160441b849b939295fc05ce2bfe10eb95 | print "I will count all chicken"
print "Hens",25 + 30/6
print "Roosters",100 - 25 * 3 % 4
print "Now I will count eggs"
print "Is it true that (3 + 2) < (5 +7)?"
print 3 + 2 < 5 + 7
print "What is 3+2",3 + 2
print "what is 5 - 7?",5 - 7
print "Oh, that's why it is false"
print "How about more"
print "Is it greater?",5>-2
print "Is it greater or equal?",5 >=-2
print "Is it less or equal?",5 <=-2
|
991,889 | f9cd0d7e64f6a9ba28040b011c4ec73a685625b4 | from PySide2 import QtCore
from PySide2.QtWidgets import QWidget, QHBoxLayout, QVBoxLayout, QFormLayout, QLabel, QGridLayout, QSizePolicy
from PySide2.QtWidgets import QStackedLayout
from tab2.workflow import workflow
from tab2.choose_option import choose_option
from tab2.cluster_arguments import cluster_arguments
from tab2.set_time import set_time
from tab2.set_cpus import set_cpus
from tab2.parameter_setter import parameter_setter_single
from custom_config_parser import custom_config_parser
from tooltip_label import tooltip_label
from utils import get_tooltip
def init_tab2(paths):
tab2 = QWidget()
tab2_layout = QHBoxLayout()
tab2.setLayout(tab2_layout)
filler = QLabel('')
filler.setFixedWidth(1)
# Left box
tab2_leftbox = QWidget()
tab2_leftbox_layout = QVBoxLayout()
tab2_leftbox.setLayout(tab2_leftbox_layout)
# Top
tab2_leftbox_top = QWidget()
tab2_leftbox_top_layout = QFormLayout()
tab2_leftbox_top.setLayout(tab2_leftbox_top_layout)
tab2_choose_option_workflow = choose_option('workflow', paths['nf'])
tab2_choose_option_boxcar_convert = choose_option('boxcar_convert', paths['nf'])
tab2_choose_option_profile = choose_option('profile', paths['sh'])
tab2_choose_option_parallel_quandenser = choose_option('parallel_quandenser', paths['nf'])
tab2_max_forks_quandenser = parameter_setter_single('parallel_quandenser_max_forks', paths['nf'])
tab2_max_forks_msconvert = parameter_setter_single('parallel_msconvert_max_forks', paths['nf'])
tab2_max_forks_boxcar = parameter_setter_single('parallel_boxcar_max_forks', paths['nf'])
# Always visible
tab2_leftbox_top_layout.addRow(QLabel('Choose workflow'), tab2_choose_option_workflow)
tab2_leftbox_top_layout.addRow(QLabel('Profile'), tab2_choose_option_profile)
tab2_leftbox_top_layout.addRow(QLabel('Enable boxcar conversion'), tab2_choose_option_boxcar_convert)
tab2_leftbox_top_layout.addRow(QLabel('Enable parallel quandenser'), tab2_choose_option_parallel_quandenser)
tab2_leftbox_top_layout.addRow(QLabel('Max forks msconvert convert'), tab2_max_forks_msconvert)
tab2_leftbox_top_layout.addRow(QLabel('Max forks boxcar'), tab2_max_forks_boxcar)
tab2_leftbox_top_layout.addRow(QLabel('Max forks quandenser'), tab2_max_forks_quandenser)
tab2_leftbox_top_layout.addRow(filler, filler) # Empty space
tab2_leftbox_layout.addWidget(tab2_leftbox_top)
# Bottom, these will be hidden or shown depending on profile option
tab2_hidden = QWidget()
tab2_hidden.hidden_object = True
tab2_hidden_layout = QStackedLayout()
tab2_hidden.setLayout(tab2_hidden_layout)
# Stack 1: Empty layout. Cluster disabled
tab2_hidden_stack_1 = QWidget()
tab2_hidden_stack_1_layout = QFormLayout()
tab2_hidden_stack_1.setLayout(tab2_hidden_stack_1_layout)
# Stack 2: Regular quandenser, cluster enabled
tab2_hidden_stack_2 = QWidget()
tab2_hidden_stack_2_layout = QFormLayout()
tab2_hidden_stack_2_layout.setVerticalSpacing(0)
tab2_hidden_stack_2.setLayout(tab2_hidden_stack_2_layout)
stack_2_widgets = []
stack_2_widgets.append(get_tooltip('cluster-type'))
stack_2_widgets.append(choose_option("process.executor", paths['nf']))
stack_2_widgets.append(filler)
stack_2_widgets.append(get_tooltip('cluster-arguments'))
stack_2_widgets.append(cluster_arguments("process.clusterOptions", paths['nf']))
stack_2_widgets.append(filler)
stack_2_widgets.append(get_tooltip('cluster-queue'))
stack_2_widgets.append(cluster_arguments("process.queue", paths['nf']))
stack_2_widgets.append(filler)
stack_2_widgets.append(QLabel('MSconvert cpus + time'))
stack_2_widgets.append(set_cpus("msconvert_cpus", paths['nf']))
stack_2_widgets.append(set_time("msconvert_time", paths['nf']))
stack_2_widgets.append(QLabel('Boxcar convert cpus + time'))
stack_2_widgets.append(set_cpus("boxcar_convert_cpus", paths['nf']))
stack_2_widgets.append(set_time("boxcar_convert_time", paths['nf']))
stack_2_widgets.append(QLabel('Quandenser cpus + time'))
stack_2_widgets.append(set_cpus("quandenser_cpus", paths['nf']))
stack_2_widgets.append(set_time("quandenser_time", paths['nf']))
stack_2_widgets.append(QLabel('Tide cpus + time'))
stack_2_widgets.append(set_cpus("tide_search_cpus", paths['nf']))
stack_2_widgets.append(set_time("tide_search_time", paths['nf']))
stack_2_widgets.append(QLabel('Triqler cpus + time'))
stack_2_widgets.append(set_cpus("triqler_cpus", paths['nf']))
stack_2_widgets.append(set_time("triqler_time", paths['nf']))
list_itr = iter(stack_2_widgets)
for label in list_itr:
combine_widget = QWidget()
combine_widget_layout = QFormLayout()
combine_widget.setLayout(combine_widget_layout)
widget1, widget2 = next(list_itr), next(list_itr)
combine_widget_layout.addRow(widget1, widget2)
tab2_hidden_stack_2_layout.addRow(label, combine_widget)
# Stack 3: Parallel quandenser, cluster enabled
tab2_hidden_stack_3 = QWidget()
tab2_hidden_stack_3_layout = QFormLayout()
tab2_hidden_stack_3_layout.setVerticalSpacing(0)
tab2_hidden_stack_3.setLayout(tab2_hidden_stack_3_layout)
stack_3_widgets = []
stack_3_widgets.append(get_tooltip('cluster-type'))
stack_3_widgets.append(choose_option("process.executor", paths['nf']))
stack_3_widgets.append(filler)
stack_3_widgets.append(get_tooltip('cluster-arguments'))
stack_3_widgets.append(cluster_arguments("process.clusterOptions", paths['nf']))
stack_3_widgets.append(filler)
stack_3_widgets.append(get_tooltip('cluster-queue'))
stack_3_widgets.append(cluster_arguments("process.queue", paths['nf']))
stack_3_widgets.append(filler)
stack_3_widgets.append(QLabel('MSconvert cpus + time'))
stack_3_widgets.append(set_cpus("msconvert_cpus", paths['nf']))
stack_3_widgets.append(set_time("msconvert_time", paths['nf']))
stack_3_widgets.append(QLabel('Boxcar convert cpus + time'))
stack_3_widgets.append(set_cpus("boxcar_convert_cpus", paths['nf']))
stack_3_widgets.append(set_time("boxcar_convert_time", paths['nf']))
stack_3_widgets.append(QLabel('Quandenser p1 cpus + time'))
stack_3_widgets.append(set_cpus("quandenser_parallel_1_dinosaur_cpus", paths['nf']))
stack_3_widgets.append(set_time("quandenser_parallel_1_dinosaur_time", paths['nf']))
stack_3_widgets.append(QLabel('Quandenser p2 cpus + time'))
stack_3_widgets.append(set_cpus("quandenser_parallel_2_maracluster_cpus", paths['nf']))
stack_3_widgets.append(set_time("quandenser_parallel_2_maracluster_time", paths['nf']))
stack_3_widgets.append(QLabel('Quandenser p3 cpus + time'))
stack_3_widgets.append(set_cpus("quandenser_parallel_3_match_features_cpus", paths['nf']))
stack_3_widgets.append(set_time("quandenser_parallel_3_match_features_time", paths['nf']))
stack_3_widgets.append(QLabel('Quandenser p4 cpus + time'))
stack_3_widgets.append(set_cpus("quandenser_parallel_4_consensus_cpus", paths['nf']))
stack_3_widgets.append(set_time("quandenser_parallel_4_consensus_time", paths['nf']))
stack_3_widgets.append(QLabel('Tide cpus + time'))
stack_3_widgets.append(set_cpus("tide_search_cpus", paths['nf']))
stack_3_widgets.append(set_time("tide_search_time", paths['nf']))
stack_3_widgets.append(QLabel('Triqler cpus + time'))
stack_3_widgets.append(set_cpus("triqler_cpus", paths['nf']))
stack_3_widgets.append(set_time("triqler_time", paths['nf']))
stack_3_widgets.extend([filler, filler, filler]) # Empty space
list_itr = iter(stack_3_widgets)
for label in list_itr:
combine_widget = QWidget()
combine_widget_layout = QFormLayout()
combine_widget.setLayout(combine_widget_layout)
widget1, widget2 = next(list_itr), next(list_itr)
combine_widget_layout.addRow(widget1, widget2)
tab2_hidden_stack_3_layout.addRow(label, combine_widget)
# Stack 4: Parallel quandenser, parallel maracluster, cluster enabled
tab2_hidden_stack_4 = QWidget()
tab2_hidden_stack_4_layout = QFormLayout()
tab2_hidden_stack_4_layout.setVerticalSpacing(0)
tab2_hidden_stack_4.setLayout(tab2_hidden_stack_4_layout)
stack_4_widgets = []
stack_4_widgets.append(get_tooltip('cluster-type'))
stack_4_widgets.append(choose_option("process.executor", paths['nf']))
stack_4_widgets.append(filler)
stack_4_widgets.append(get_tooltip('cluster-arguments'))
stack_4_widgets.append(cluster_arguments("process.clusterOptions", paths['nf']))
stack_4_widgets.append(filler)
stack_4_widgets.append(get_tooltip('cluster-queue'))
stack_4_widgets.append(cluster_arguments("process.queue", paths['nf']))
stack_4_widgets.append(filler)
stack_4_widgets.append(QLabel('MSconvert cpus + time'))
stack_4_widgets.append(set_cpus("msconvert_cpus", paths['nf']))
stack_4_widgets.append(set_time("msconvert_time", paths['nf']))
stack_4_widgets.append(QLabel('Boxcar convert cpus + time'))
stack_4_widgets.append(set_cpus("boxcar_convert_cpus", paths['nf']))
stack_4_widgets.append(set_time("boxcar_convert_time", paths['nf']))
stack_4_widgets.append(QLabel('Quandenser p1 cpus + time'))
stack_4_widgets.append(set_cpus("quandenser_parallel_1_dinosaur_cpus", paths['nf']))
stack_4_widgets.append(set_time("quandenser_parallel_1_dinosaur_time", paths['nf']))
stack_4_widgets.append(QLabel('Quandenser p2.1 cpus + time'))
stack_4_widgets.append(set_cpus("quandenser_parallel_2_maracluster_parallel_1_index_cpus", paths['nf']))
stack_4_widgets.append(set_time("quandenser_parallel_2_maracluster_parallel_1_index_time", paths['nf']))
stack_4_widgets.append(QLabel('Quandenser p2.2 cpus + time'))
stack_4_widgets.append(set_cpus("quandenser_parallel_2_maracluster_parallel_2_pvalue_cpus", paths['nf']))
stack_4_widgets.append(set_time("quandenser_parallel_2_maracluster_parallel_2_pvalue_time", paths['nf']))
stack_4_widgets.append(QLabel('Quandenser p2.3 cpus + time'))
stack_4_widgets.append(set_cpus("quandenser_parallel_2_maracluster_parallel_3_overlap_cpus", paths['nf']))
stack_4_widgets.append(set_time("quandenser_parallel_2_maracluster_parallel_3_overlap_time", paths['nf']))
stack_4_widgets.append(QLabel('Quandenser p2.4 cpus + time'))
stack_4_widgets.append(set_cpus("quandenser_parallel_2_maracluster_parallel_4_cluster_cpus", paths['nf']))
stack_4_widgets.append(set_time("quandenser_parallel_2_maracluster_parallel_4_cluster_time", paths['nf']))
stack_4_widgets.append(QLabel('Quandenser p3 cpus + time'))
stack_4_widgets.append(set_cpus("quandenser_parallel_3_match_features_cpus", paths['nf']))
stack_4_widgets.append(set_time("quandenser_parallel_3_match_features_time", paths['nf']))
stack_4_widgets.append(QLabel('Quandenser p4.1 cpus + time'))
stack_4_widgets.append(set_cpus("quandenser_parallel_4_consensus_parallel_1_index_cpus", paths['nf']))
stack_4_widgets.append(set_time("quandenser_parallel_4_consensus_parallel_1_index_time", paths['nf']))
stack_4_widgets.append(QLabel('Quandenser p4.2 cpus + time'))
stack_4_widgets.append(set_cpus("quandenser_parallel_4_consensus_parallel_2_pvalue_cpus", paths['nf']))
stack_4_widgets.append(set_time("quandenser_parallel_4_consensus_parallel_2_pvalue_time", paths['nf']))
stack_4_widgets.append(QLabel('Quandenser p4.3 cpus + time'))
stack_4_widgets.append(set_cpus("quandenser_parallel_4_consensus_parallel_3_overlap_cpus", paths['nf']))
stack_4_widgets.append(set_time("quandenser_parallel_4_consensus_parallel_3_overlap_time", paths['nf']))
stack_4_widgets.append(QLabel('Quandenser p4.4 cpus + time'))
stack_4_widgets.append(set_cpus("quandenser_parallel_4_consensus_parallel_4_cluster_cpus", paths['nf']))
stack_4_widgets.append(set_time("quandenser_parallel_4_consensus_parallel_4_cluster_time", paths['nf']))
stack_4_widgets.append(QLabel('Tide cpus + time'))
stack_4_widgets.append(set_cpus("tide_search_cpus", paths['nf']))
stack_4_widgets.append(set_time("tide_search_time", paths['nf']))
stack_4_widgets.append(QLabel('Triqler cpus + time'))
stack_4_widgets.append(set_cpus("triqler_cpus", paths['nf']))
stack_4_widgets.append(set_time("triqler_time", paths['nf']))
stack_4_widgets.extend([filler, filler, filler]) # Empty space
list_itr = iter(stack_4_widgets)
for label in list_itr:
combine_widget = QWidget()
combine_widget_layout = QFormLayout()
combine_widget.setLayout(combine_widget_layout)
widget1, widget2 = next(list_itr), next(list_itr)
combine_widget_layout.addRow(widget1, widget2)
tab2_hidden_stack_4_layout.addRow(label, combine_widget)
# Add stacks
tab2_hidden_layout.addWidget(tab2_hidden_stack_1)
tab2_hidden_layout.addWidget(tab2_hidden_stack_2)
tab2_hidden_layout.addWidget(tab2_hidden_stack_3)
tab2_hidden_layout.addWidget(tab2_hidden_stack_4)
# Add hidden stacked layout
tab2_leftbox_layout.addWidget(tab2_hidden)
# Right box
tab2_rightbox = QWidget()
tab2_rightbox_layout = QHBoxLayout()
tab2_rightbox.setLayout(tab2_rightbox_layout)
pipe_parser = custom_config_parser()
pipe_parser.load(paths['pipe'])
if pipe_parser.get('disable-opengl') in ['false', '']:
tab2_workflow = workflow()
tab2_rightbox_layout.addWidget(tab2_workflow)
else:
tab2_workflow = tooltip_label("OpenGL disabled", "OpenGL disabled", style=True)
tab2_rightbox_layout.addWidget(tab2_workflow)
tab2_layout.addWidget(tab2_rightbox)
# Combine
tab2_layout.addWidget(tab2_leftbox)
tab2_layout.addWidget(tab2_rightbox)
return tab2
|
991,890 | 049c3859c28bfd20e3929c672d9f76488c42aa5f |
# KasperConstant - descend num - ascend num == 6174
n = input()
t = sorted(n)
t = ''.join(i for i in t)
t1 = t[::-1]
counter = 0
while int(t1)-int(t) != 6174:
k1 = (int(t1)-int(t))
if k1<1000:
k1 = str(k1)+'0'
#break
k2 = sorted(str(k1))
k3 = ''.join(i for i in k2)
k4 = k3[::-1]
t1 = k4
t = k3
#print(k3, k3[::-1])
#break
counter += 1
#print(k1)
print(counter+1)
# input 2111
# output 5
|
991,891 | 6770f4c67aede60d595300976ba0cf6b01a94288 | # coding: utf8
import logging
from summermvc.mvc import HandlerInterceptor
from summermvc.decorator import *
LOGGER = logging.getLogger(__name__)
@component
class LogInterceptor(HandlerInterceptor):
@override
def pre_handle(self, request, response, model_and_view):
logging.info("pre handle in LogInterceptor")
@override
def post_handle(self, request, response, model_and_view):
logging.info("post handle in LogInterceptor")
@override
def path_pattern(self):
return r"/.*"
@override
def get_order(self):
return 1
|
991,892 | c8c527633dc97e5721430570d65b922124a29f10 | import math
# Create a class called Rectangle
class Rectangle:
# Initialiser method with 2 attributes and their type hint
def __init__(self, length: float, width: float):
self.length = length
self.width = width
# Function to calculate the area of a rectangle and round result to 2 decimal places
def calculate_area(self):
return round(self.length * self.width, 2)
# Function to calculate the perimeter of a rectangle and round result to 2 decimal places
def calculate_perimeter(self):
return round(2 * self.length + 2 * self.width, 2)
# Create a class called Circle
class Circle:
# Initialiser method with 1 attribute and its type hint
def __init__(self, radius: float):
self.radius = radius
# Function to calculate the area of a circle and round result to 2 decimal places
def calculate_area(self):
return round(math.pi * (self.radius * self.radius), 2)
# Function to calculate the circumference of a circle and round result to 2 decimal places
def calculate_circumference(self):
return round(math.pi * (self.radius * 2), 2) |
991,893 | a1152fd0564ea579a379166a65600c68cebab59b | from flask import Flask, jsonify, render_template, request, g, session,\
redirect, url_for, flash, render_template_string
from flask_paginate import Pagination, get_page_parameter
from flask_login import LoginManager
from flask_wtf import FlaskForm
from wtforms import StringField, SubmitField, PasswordField
from wtforms.validators import DataRequired
from flask_msearch import Search
from collections import defaultdict
import jieba
from pymongo import MongoClient
from flask_pymongo import PyMongo,DESCENDING,ASCENDING
# from page import get_page
import os
import json
import math
import ast
import pickle
import pymongo
from utils import safe_pickle_dump, strip_version, isvalidid, Config
from tqdm import tqdm
from datetime import datetime, timedelta
# from sshtunnel import SSHTunnelForwarder
# server = SSHTunnelForwarder(
# '10.141.2.222',
# ssh_username='fyw',
# ssh_password='password',
# remote_bind_address=('127.0.0.1', 27017)
# )
# server.start()
app = Flask(__name__)
app.config.from_pyfile('settings.py')
# app.config['SQLALCHEMY_DATABASE_URI'] =
# MongoDB 数据库,取最新的数据作为 Latest 里面的数据
# client = MongoClient('127.0.0.1', server.local_bind_port)
client = MongoClient('127.0.0.1',27017)
db = client.PaperPal
db.paper.create_index([('title',pymongo.TEXT)])
db.ChinesePaper.create_index([('searchTitle',pymongo.TEXT)])
end_date = datetime.today().strftime('%Y-%m-%d')
start_date = (datetime.today() - timedelta(days=100)).strftime('%Y-%m-%d')
PAPERS = [
# i for i in db.paper.find(
# {
# 'datetime': {
# '$gte': start_date,
# '$lt': end_date
# }
# }, limit=200).sort('datetime', pymongo.DESCENDING)
i for i in db.paper.find().sort('datetime', pymongo.DESCENDING).limit(200)
]
all_conference = {
'conference':{
'CCF-AI-A':['AAAI','ACL','CVPR','ICCV','ICML','IJCAI','NIPS'],
'CCF-AI-B':['AAMAS','COLING','COLT','ECAI','ECCV','EMNLP','ICAPS','ICCBR','ICRA','KR','PPSN','UAI'],
'CCF-DATA-A':['ICDE','SIGIR','SIGKDD','SIGMOD','VLDB'],
'CCF-DATA-B':['CIDR','CIKM','DASFAA','ECML-PKDD','EDBT','ICDM','ICDT','ISWC','PODS','SDM','WSDM'],
},
'journal':{
'中文期刊':['计算机学报','软件学报','计算机研究与发展','大数据']
}
}
conference_years={
"AAAI":['2019','2018','2017'],
'ACL':['2019','2018','2017'],
'CVPR':['2019','2018','2017'],
'ICCV':['2017'],
'ICML':['2019','2018','2017'],
'IJCAI':['2019','2018','2017'],
'NIPS':['2019','2018','2017'],
'AAMAS':['2019','2018','2017'],
'COLING':['2018'],
'COLT':['2019','2018','2017'],
'ECAI':['2018'],
'ECCV':['2018'],
'EMNLP':['2019','2018','2017'],
'ICAPS':['2019','2018','2017'],
'ICCBR':['2017'],
'ICRA':['2019','2018','2017'],
'KR':['2018'],
'PPSN':['2018'],
'UAI':['2019','2018','2017'],
'计算机学报':['2020','2019','2018'],
'软件学报':['2020','2019','2018','2017'],
'计算机研究与发展':['2020','2019','2018','2017'],
'大数据':['2020','2019','2018','2017'],
'ICDE':['2019','2018','2017'],
'SIGMOD':['2019','2018','2017'],
'SIGIR':['2019','2018','2017'],
'SIGKDD':['2019','2018','2017'],
'VLDB':['2019','2018','2017'],
'CIDR':['2020','2019','2017'],
'CIKM':['2019','2018'],
'DASFAA':['2019','2018','2017'],
'ECML-PKDD':['2019','2018','2017'],
'EDBT':['2020','2019','2018','2017'],
'ICDM':['2019','2018','2017'],
'ICDT':['2020','2019','2018','2017'],
'ISWC':['2019','2018','2017'],
'PODS':['2019','2018','2017'],
'SDM':['2019','2018','2017'],
'WSDM':['2020','2019','2018','2017'],
}
# 前端不同会议button的颜色
button_color = ['btn btn-success','btn btn-danger','btn btn-warning','btn btn-secondary','btn btn-primary']
rank = {'相关':'similarity','时间':'datetime','被引':'citation'}
test1 = []
for paper in PAPERS[:30]:
test1.append(paper['paperId'])
# 每页展示
PAPER_NUM = 6
TOTALS = math.ceil(len(PAPERS) / PAPER_NUM)
# 最近的论文
latestPapers = list(db.paper.find().sort('datetime', pymongo.DESCENDING).limit(600))
# latestPapers = [
# i for i in db.paper.aggregate([{'$match':{'year':'2020'}},{ '$sample':{'size':PAPER_NUM * 100 }}])
# ]
# p
class UserForm(FlaskForm):
username = StringField('username',
render_kw={'placeholder': '用户名'},
validators=[DataRequired()])
password = PasswordField('password',
render_kw={'placeholder': '密码'},
validators=[DataRequired()])
submit = SubmitField(label='确定')
class InterestForm(FlaskForm):
submit = SubmitField(label='确定')
def unique_papers(papers):
unique_papers = []
titles = []
for p in papers:
if p['title'] not in titles:
titles.append(p['title'])
unique_papers.append(p)
print(len(unique_papers))
return unique_papers
@app.route('/')
def main():
journals = []
# 得到所有的期刊
for type in all_conference['journal'].keys():
journals = journals + all_conference['journal'][type]
conferences = []
# 得到所有的会议
for type in all_conference['conference'].keys():
conferences += all_conference['conference'][type]
print(conferences)
# 所有论文的数量
paperNum = db.paper.find({}).count()
conferenceAndJournalNum = 0
for key in all_conference.keys():
for value in all_conference[key].values():
conferenceAndJournalNum += len(value)
# 代码数量
codeNum = db.paper.find({'code':{'$regex':'http'}}).count()
return render_template('main.html',
conferences=conferences,
journals=journals,
paperNum=format(paperNum,','),
conferenceAndJournalNum=format(conferenceAndJournalNum,','),
codeNum=format(codeNum,','))
@app.route('/query')
def query():
# 给标题创建索引
# db.paper.create_index([('title', pymongo.TEXT)])
rankBy = request.args.get('rankBy') if request.args.get('rankBy') else '相关' #默认按时间排序
code = request.args.get('code',type=str,default='')
logout = request.args.get('logout')
latest = request.args.get('/?latest=true')
if 'user' in session:
print(f"user in session: {session['user']}")
if 'user' not in session or logout == 'true':
session['user'] = ''
q = request.args.get('q')
# c代表用户制定在会议c里面查找论文
c = request.args.get('c', type=str, default='')
if c:
total_papers = list(
db.paper.find({
'conference':c,
'code':{'$regex':code},
'$text': {
'$search': q
}},
{
'score': {
'$meta': 'textScore'
}}
).limit(600).sort([('score',{'$meta': 'textScore'})]))
else:
# 在所有论文中查找
total_papers = list(
db.paper.find({
'code': {'$regex': code},
'$text': {
'$search': q
}},
{
'score': {
'$meta': 'textScore'
}}
).limit(600).sort([('score', {'$meta': 'textScore'})]))
if len(total_papers) == 0:
# 如果上述搜索结果为零,再去中文文献集合中查找
qJieba = ' '.join(jieba.cut_for_search(q))
if qJieba.find("\"") != -1:
qJieba = qJieba.replace("\" ","\"").replace(" \"","\"")
elif qJieba.find("-"):
qJieba = qJieba.replace("- ","-")
else:
pass
if c:
total_papers = list(
db.ChinesePaper.find({
'conference': c,
'code': {'$regex': code},
'$text': {
'$search': qJieba
}},
{
'score': {
'$meta': 'textScore'
}}
).limit(600).sort([('score', {'$meta': 'textScore'})]))
else:
total_papers = list(
db.ChinesePaper.find({
'code': {'$regex': code},
'$text': {
'$search': qJieba
}},
{
'score': {
'$meta': 'textScore'
}}
).limit(600).sort([('score', {'$meta': 'textScore'})]))
q_papers = []
unique_title = []
for p in total_papers:
if p['title'] not in unique_title:
unique_title.append(p['title'])
q_papers.append(p)
page = request.args.get(get_page_parameter(), type=int, default=1) # 1
pagination = Pagination(page=page, total=len(q_papers), css_framework="foundation",per_page=PAPER_NUM)
for i,paper in enumerate(q_papers):
q_papers[i]['datetime'] = int(paper['datetime'].replace('-',''))
if rankBy != '相关':
q_papers.sort(key = lambda k: k.get(rank[rankBy], 0),reverse = True)
papers = q_papers[PAPER_NUM * (page - 1):PAPER_NUM * page]
for i,paper in enumerate(papers):
q_papers[i]['datetime'] = str(paper['datetime'])[:4] + '-' + str(paper['datetime'])[4:6] + '-' + str(paper['datetime'])[6:]
'''中文文献与英文文献显示的摘要字数不一样'''
for p in papers:
if p['conference'] not in ['计算机学报', '软件学报', '计算机研究与发展']:
p['abstract'] = ' '.join(p['abstract'].split(' ')[:40]).rstrip('...') + '...'
else:
p['abstract'] = p['abstract'][:180].rstrip('...') + '...'
return render_template('query.html',
rankBy = rankBy,
c=c,
papers=papers,
pagination = pagination,
page=page,
keyword=q,
user=session['user'])
@app.route('/recommend')
def recommend(user=''):
logout = request.args.get('logout')
# latest = request.args.get('/?latest=true')
page = request.args.get(get_page_parameter(), type=int, default=1) # 1
if 'user' not in session or logout == 'true':
session['user'] = ''
if session['user'] == '' or db.user.find({'user': session['user']}):
papers = PAPERS[PAPER_NUM * (page - 1):PAPER_NUM * page]
pagination = Pagination(page=page, total=len(PAPERS), css_framework="foundation",
per_page=PAPER_NUM)
# 中文文献与英文文献的摘要字数不一样
for p in papers:
if p['conference'] not in ['计算机学报', '软件学报', '计算机研究与发展']:
p['abstract'] = ' '.join(p['abstract'].split(' ')[:40]) + '...'
else:
p['abstract'] = p['abstract'][:180] + '...'
return render_template('index.html',
papers=papers,
page=page,
pagination = pagination,
# totals=paginate_list(page),
user=session['user'])
@app.route('/latest')
def latest():
logout = request.args.get('logout')
# latest = request.args.get('/?latest=true')
page = request.args.get(get_page_parameter(), type=int, default=1)
paper = db.paper.aggregate([{ '$sample':{'size':PAPER_NUM }}])
# 默认最多显示100页最新论文
pagination = Pagination(page=page,total= len(latestPapers), css_framework="foundation",per_page=PAPER_NUM)
latestCurrentPapers = latestPapers[(page - 1)*PAPER_NUM : page * PAPER_NUM]
print(latestPapers[(page - 1)*PAPER_NUM : page * PAPER_NUM])
for p in latestCurrentPapers:
if p['conference'] not in ['计算机学报', '软件学报', '计算机研究与发展']:
p['abstract'] = ' '.join(p['abstract'].split(' ')[:40]).rstrip('...') + '...'
else:
p['abstract'] = p['abstract'][:180].rstrip('...') + '...'
if 'user' in session:
print(f"user in session: {session['user']}")
if 'user' not in session or logout == 'true':
session['user'] = ''
return render_template('index.html',
papers=latestCurrentPapers,
page=page,
user=session['user'],
pagination =pagination,
show='latest')
@app.route('/login', methods=['POST', 'GET'])
def login():
form = UserForm()
status = request.args.get('status')
print(f"status is {status} and user is {session['user']}")
if form.validate_on_submit():
username = form.username.data
password = form.password.data
print(f'username is {username}, password is {password}')
'''register'''
if status == 'register':
u = db.users.find_one({'username': username})
if not u: #判断用户名是否已经被注册
db.users.insert_one({'username':username,'password':password})
session['user'] = username
return redirect(url_for('interest'))
else:
session['user'] = ''
return render_template('login.html',
form=form,
user = session['user'],
status=status,
errorInfo = 'register')
# login
else:
u = db.users.find_one({'username':username})
if u and u['password'] == password:
session['user'] = username
return redirect(url_for('recommend', user=username))
else:
session['user'] = ''
return render_template('login.html',
form=form,
user=session['user'],
status=status,
errorInfo='login')
return render_template('login.html',
status=status,
form=form,
user=session['user'])
@app.route('/about')
def about():
if 'user' in session:
user = session['user']
else:
user=''
return render_template('about.html', user=user)
@app.route('/interest')
def interest():
logout = request.args.get('logout')
latest = request.args.get('/?latest=true')
if 'user' in session:
print(f"user in session: {session['user']}")
if 'user' not in session or logout == 'true':
session['user'] = ''
form = InterestForm()
return render_template('interest.html', form=form, user=session['user'])
@app.route('/dataset')
def dataset():
logout = request.args.get('logout')
if 'user' in session:
print(f"user in session: {session['user']}")
if 'user' not in session or logout == 'true':
session['user'] = ''
return render_template('dataset.html', user=session['user'])
@app.route('/conference',methods = ['GET','POST'])
def conference():
logout = request.args.get('logout')
# 按照时间还是被引用次数排序
rankBy = request.args.get('rankBy') if request.args.get('rankBy') else '时间' #默认按时间排序
# 是否只看代码
code= request.args.get('code', type=str, default='')
# 选择期刊还是会议
journalOrConference = request.args.get('journalOrConference') if request.args.get('journalOrConference') else 'conference'
conference_type = request.args.get('type', type=str, default='CCF-AI-A') if journalOrConference == 'conference' else list(all_conference['journal'].keys())[0]#中文期刊
# name为会议或者期刊的名字
if request.args.get('name'):
for key,value in all_conference.items():
if request.args.get('name') in value:
conference_type = key
# 默认展示页面第一个会议的最近年份的论文
conference = request.args.get('name') if request.args.get('name') else all_conference[journalOrConference][conference_type][0]
year = request.args.get('year') if request.args.get('year') else '2019'
page = request.args.get(get_page_parameter(), type=int, default=1)
if 'user' in session:
print(f"user in session: {session['user']}")
if 'user' not in session or logout == 'true':
session['user'] = ''
# 得到某一类下的所有会议
conferences = all_conference[journalOrConference][conference_type]
conference_button_color = {}
for c in enumerate(conferences):
# 每两个按钮换一次颜色
conference_button_color[c[1]] = button_color[c[0]//2%len(button_color)]
print(code)
current_page_papers = list(db.paper.find({
'year':year,
'conference':conference,
'code':{'$regex':code}
}).skip(PAPER_NUM*(page-1)).sort(rank[rankBy], pymongo.DESCENDING).limit(PAPER_NUM))
# 分页
pagination = Pagination(page=page, total=db.paper.find({ 'year':year,'conference':conference}).count(),css_framework="foundation",per_page=PAPER_NUM)
for p in current_page_papers:
if p['conference'] not in ['计算机学报','软件学报','计算机研究与发展']:
p['abstract'] = ' '.join(p['abstract'].split(' ')[:40]).rstrip('...') + '...'
else:
p['abstract'] = p['abstract'][:180].rstrip('...') + '...'
return render_template('conference.html',
current_page_papers=current_page_papers,
page=page,
year=year,
rankBy = rankBy,
types = all_conference[journalOrConference].keys(),
journalOrConference = journalOrConference,
conference=conference,
user=session['user'],
pagination=pagination,
conferences = conferences,
conference_years = conference_years,
conference_button_color=conference_button_color,
conference_type = conference_type
)
@app.route('/home',methods = ['GET','POST'])
def home():
user = session['user']
choice = request.args.get('choice')
'''c->create collection n->the name of the collection'''
if request.args.get('c'):
useCollection=db.collec.find_one({
'user':user
})
files = []
# 如果该用户还未添加任何收藏夹,则为该用户初始化数据库的数据
if useCollection == None:
db.collec.insert_one({'user':user,'files':[]})
useCollection = db.collec.find_one({
'user': user
})
# 用户收藏夹的名字不能重复
if request.args.get('n') not in files:
files = useCollection['files']
files.append(request.args.get('n'))
db.collec.update_one(
{
'user':user
},{
'$set':{
'files':files,
request.args.get('n'): []
}
}
)
return render_template('home.html',
user=user,
choice = 'collection',
files = files
)
# 用户收藏版块的页面
if choice == 'collection':
useCollection = db.collec.find_one({
'user': user
})
if useCollection == None:
useCollection={'files':[]}
return render_template('home.html',
user = user,
choice=choice,
files = useCollection['files'])
# 对指定收藏夹生成相似论文
elif choice == 'collectionSimilarPaper':
# 取前60篇
collectionSimilarPapers=[db.paper.find_one({'paperId':paperId}) for num,paperId in enumerate(user_sim[user][request.args.get('collectionName')]) if num < 60]
print('collectionSimilarPapers:',collectionSimilarPapers)
return render_template('home.html',
choice='collectionSimilarPaper',
user=session['user'],
collectionSimilarPapers=collectionSimilarPapers)
# 查看收藏夹的内容
elif choice == 'collectionContent':
collectionName = request.args.get('collectionName')
collectionContent = db.collec.find_one({
'user': user
})[collectionName]
return render_template('home.html',
choice='collectionContent',
user=session['user'],
collection = collectionName,
collectionContent=collectionContent)
# 删除某收藏夹
elif choice =='collectionDelete':
collectionName=request.args.get('collectionName')
files = db.collec.find_one({'user':user})['files']
collectionFiles = [f for f in files if f != collectionName]
db.collec.update_one({
'user':user
},{
'$set':{'files':collectionFiles},
'$unset': {collectionName: 1}
})
useCollection = db.collec.find_one({
'user':user
})
return render_template('home.html',
user=user,
choice='collection',
files=useCollection['files'])
# 删除收藏夹里的某篇论文
elif choice == 'paperDelete':
paperId = request.args.get('paperId',type=str,default = '')
collectionName = request.args.get('collectionName')
papers = db.collec.find_one({'user': user})[collectionName]
newPapers = [p for p in papers if p['paperId'] != paperId]
db.collec.update_one({
'user': user
}, {
'$set': {collectionName: newPapers},
})
useCollection = db.collec.find_one({
'user': user
})
return render_template('home.html',
user=user,
choice='collectionContent',
files=useCollection[collectionName])
# 用户浏览记录页面
else:
user_data = list(
db.user.find({
'user': user
}).sort('date', pymongo.DESCENDING))
for i in range(len(user_data)):
user_data[i]['num'] = i + 1
return render_template('home.html', user_data=user_data, user=user,choice = choice)
# 记录用户的浏览记录
@app.route('/record', methods=['POST', 'GET'])
def record():
data = request.json
paperId = data['id']
user = session['user']
print('record:',paperId)
res = list(db.user.find({'user': user, 'paperId': paperId}))
print("res:",res)
paper = db.paper.find_one({'paperId': paperId})
if len(res) == 0:
date = datetime.now().strftime('%Y-%m-%d')
print(date)
db.user.insert_one({
'user': user,
'code': paper['code'],
'title': paper['title'],
'date': date
})
else:
date = datetime.now().strftime('%Y-%m-%d')
db.user.update_one({
'user': user,
'title': paper['title']
}, {'$set': {
'date': date,
}})
return render_template_string('Add log data to dataset.')
@app.route('/paperPage',methods=['POST', 'GET'])
def paperPage():
print('herehere')
user = session['user']
logout = request.args.get('logout')
paperId = request.args.get('paperId')
# 用户评论版块相关代码
# if request.args.get('commentSubmit') == 'True':
# db.paperInfo.update_one({
# 'paperId':paperId
# },{
# '$addToSet':{
# 'reviews':{'content':request.args.get('comment'),'reviewer':user,'time':datetime.today().strftime('%Y-%m-%d')}
# }
# })
similarPaperId = sim_dict[paperId]
similarPapers = [db.paper.find_one({'paperId':pid}) for num,pid in enumerate(similarPaperId) if num < 10 and pid != paperId]
paperInfo = db.paper.find_one({
'paperId': paperId
})
'''
评论版块 关闭
reviews = db.paperInfo.find_one({
'paperId':paperId
}
)['reviews']
'''
# 返回用户收藏夹列表
useCollection = db.collec.find_one({
'user': user
})
if useCollection == None:
collections=[]
else:
collections = useCollection['files']
if 'user' in session:
print(f"user in session: {session['user']}")
if 'user' not in session or logout == 'true':
session['user'] = ''
collectionName = request.args.get('whichCollection')
if collectionName: #将paperInfo存到collection中
print('addtoset ',collectionName)
print(type(paperInfo))
db.collec.update_one({
'user': user
}, {
'$addToSet': {
collectionName: paperInfo
}
})
return render_template('paperPage.html',
user=session['user'],
paperInfo=paperInfo)
return render_template('paperPage.html',
user=user,
paperInfo=paperInfo,
# reviews = reviews,
similarPapers=similarPapers,
collections = collections
)
if __name__ == '__main__':
print('loading paper similarities', Config.sim_path)
sim_dict = pickle.load(open(Config.sim_path, "rb"))
print('loading user recommendations', Config.user_sim_path)
user_sim = {}
if os.path.isfile(Config.user_sim_path):
user_sim = pickle.load(open(Config.user_sim_path, 'rb'))
app.run(host='127.0.0.1', debug=True)
|
991,894 | adcb64f0c74f0f25bdfd3240a132dba4a52785e9 | # -*- coding: utf-8 -*-
import json
from odoo import http
from odoo.http import request
import requests
from .. import defs
from .base import BaseController, UserException
from weixin.pay import WXAppPay
import time
import logging
_logger = logging.getLogger(__name__)
TIMEOUT = 5
def get_order_code(id):
order_no = str(time.strftime(
'%Y%m%d%H%M%S', time.localtime(time.time()))) + str(id)
return order_no
class WxappPayment(http.Controller, BaseController):
def req_token(self, sub_domain):
try:
config = request.env['wxapp.config'].sudo()
app_id = config.get_config('app_id', sub_domain)
secret = config.get_config('secret', sub_domain)
if not app_id or not secret:
return self.res_err(404)
params = {}
url = "https://api.weixin.qq.com/cgi-bin/token?grant_type=client_credential&appid=%s&secret=%s" % (
app_id, secret)
res = requests.post(url)
return res.json()
except Exception as e:
return self.res_err(-1, str(e))
def get_token(self, sub_domain):
try:
ret, entry = self._check_domain(sub_domain)
if ret:
return ret
wxToken = request.env(user=1)['wxapp.token'].search([
('sub_domain', '=', sub_domain),
])
if not wxToken:
token = self.req_token(sub_domain)
expires_in = time.time() + token['expires_in']
data = {
'access_token': token['access_token'],
'expires_in': expires_in,
'sub_domain': sub_domain,
}
wxToken = request.env(user=1)['wxapp.token'].create(data)
else:
if wxToken.expires_in < time.time():
token = self.req_token(sub_domain)
expires_in = time.time() + token['expires_in']
data = {
'access_token': token['access_token'],
'expires_in': expires_in,
'sub_domain': sub_domain,
}
wxToken.write(data)
return wxToken.access_token
except Exception as e:
return self.res_err(-1, str(e))
@http.route('/<string:sub_domain>/notify', auth='public', methods=['POST', 'GET'], csrf=False, type='http')
# 支付回调
def notify(self, sub_domain, **kwargs):
# todo 支付回调
token = self.get_token(sub_domain)
return self.res_ok([token, kwargs])
@http.route('/<string:sub_domain>/template-msg/wxa/formId', auth='public', methods=['POST', 'GET'], csrf=False, type='http')
# 支付成功消息推送
def formid(self, sub_domain, **kwargs):
# todo 支付消息推送
try:
token = kwargs.pop('token', None)
formId = kwargs.pop('formId', None)
orderId = kwargs.pop('orderId', '')
wxapp_access_token = request.env(user=1)['wxapp.access_token'].search([
('token', '=', token),
])
if not wxapp_access_token:
return self.res_err(901)
saleOrder = request.env(user=1)['sale.order'].search([('id', '=', orderId)])
if not saleOrder:
return self.res_err(901)
saleOrder.write({'customer_status': 'pending'})
openid = wxapp_access_token.open_id
access_token = self.get_token(sub_domain)
pay_goods = '云辅材小程序下单'
pay_time = str(time.strftime(
'%Y-%m-%d %H:%M:%S', time.localtime(time.time())))
pay_type = '微信支付'
pay_fee = 100
url = 'https://api.weixin.qq.com/cgi-bin/message/wxopen/template/send?access_token=%s' % access_token
params = {
"touser": openid,
"template_id": "VtZGskB7XJ-EzTsCjR3LpOXJ-f_1OIDgEiZ8X2JWNCU",
"page": "index",
"form_id": formId,
"data": {
"keyword1": {
"value": saleOrder.name
},
"keyword2": {
"value": pay_goods
},
"keyword3": {
"value": str(saleOrder.total)
},
"keyword4": {
"value": pay_type
},
"keyword5": {
"value": pay_time
}
},
"emphasis_keyword": "keyword1.DATA"
}
res = requests.post(url, json=params)
return self.res_ok(params)
except Exception as e:
_logger.exception(e)
return self.res_err(-1, str(e))
@http.route('/<string:sub_domain>/pay/wx/wxapp', auth='public', methods=['POST', 'GET'], csrf=False, type='http')
# 微信支付统一下单
def wxPay(self, sub_domain, **kwargs):
try:
ret, entry = self._check_domain(sub_domain)
if ret:
return ret
token = kwargs.pop('token', None)
total_fee = int(float(kwargs.pop('money')) * 100)
nextAction = kwargs.pop('nextAction', None)
actionJson = json.loads(nextAction)
orderId = actionJson['id']
body = str(orderId)
out_trade_no = get_order_code(orderId)
access_token = request.env(user=1)['wxapp.access_token'].search([
('token', '=', token),
])
if not access_token:
return self.res_err(901)
openid = access_token.open_id
# return self.res_ok([entry.app_id, entry.wechat_pay_id, entry.wechat_pay_secret, openid])
wxPay = WXAppPay(entry.app_id, entry.wechat_pay_id,
partner_key=entry.wechat_pay_secret, notify_url='http://erp.yunfc.net/yunfc/notify')
# return self.res_ok([body, out_trade_no, total_fee, openid, wxPay.mch_id])
res = wxPay.unifiedorder(
body=body, out_trade_no=out_trade_no, total_fee=total_fee, openid=openid)
res.update({'orderId': orderId})
return self.res_ok(res)
except Exception as e:
_logger.exception(e)
return self.res_err(-1, str(e))
|
991,895 | df05160e17b4f848cae9f5469d977035a56d9a99 | import pyautogui
pyautogui.typewrite("Dhiraj loves Python Programming language",interval=0.30)
|
991,896 | 2a437a3408ff75c535f2a6b0a39cd368ffaedeed | import requests
import datetime
import time
def las_seven():
url = "https://covid-19-data.p.rapidapi.com/report/totals"
sd = "2020-07-21"
date = datetime.datetime.strptime(sd, '%Y-%m-%d')
for i in range(7):
date = date - datetime.timedelta(days=1)
date = datetime.datetime.strftime(date, '%Y-%m-%d')
print(date[:10])
dater = date[:10]
querystring = {"date": dater}
headers = {
'x-rapidapi-key': "277ae669b3msh52f0bc0d898f114p159b11jsn27bf26ae14ea",
'x-rapidapi-host': "covid-19-data.p.rapidapi.com"
}
response = requests.request("GET", url, headers=headers, params=querystring)
print(response.text)
date = datetime.datetime.strptime(date, '%Y-%m-%d')
time.sleep(1)
|
991,897 | 2e52a13ef9bdbaccd51521fab439241076f4f203 | # -*- coding: utf-8 -*-
"""单链表实现
1. 插入、删除、查找操作
2. 链表中存储的int类型数据
@Time : 2021-02-22 00:16
@Author : Hao
"""
class SinglyLinkedList:
class Node:
def __init__(self, data=None):
self.data = data
self.next = None
def __init__(self):
self.head = None
def find_by_value(self, value):
if not self.head:
return None
cur = self.head
while cur:
if cur.value == value:
return cur
cur = cur.next
return None
def find_by_index(self, index):
if not self.head:
return -1
cur = self.head
count = 0
while cur:
count += 1
if count == index:
return cur
cur = cur.next
return -1
def insert_to_head(self, value):
"""表头插入,无头节点逆序插入"""
new_node = SinglyLinkedList.Node(value)
if not self.head:
self.head = new_node
return
new_node.next = self.head
self.head = new_node
def insert_to_tail(self, value):
"""表尾插入,无头节点顺序插入"""
new_node = SinglyLinkedList.Node(value)
if not self.head:
self.head = new_node
return
cur = self.head
while cur.next:
cur = cur.next
cur.next = new_node
def insert_to_head_with_dummy_node(self, value):
"""表头插入,有哨兵头节点逆序插入"""
dummy_node = SinglyLinkedList.Node()
new_node = SinglyLinkedList.Node(value)
new_node.next = dummy_node.next
dummy_node.next = new_node
def insert_to_tail_dummy_node(self, value):
"""表尾插入,有哨兵节点顺序插入"""
dummy_node = SinglyLinkedList.Node()
new_node = SinglyLinkedList.Node(value)
cur = dummy_node
while cur.next:
cur = cur.next
cur.next = new_node
def insert_before(self, p, value):
"""插入到p节点之前"""
# find p
dummy_node = SinglyLinkedList.Node()
new_node = SinglyLinkedList.Node(value)
cur = dummy_node
while cur.next:
if cur.next.data == p.data:
new_node.next = cur.next
cur = new_node
break
cur = cur.next
raise Exception("Can not find p, please check!")
def insert_after(self, p, value):
"""插入到p节点之后"""
dummy_node = SinglyLinkedList.Node()
new_node = SinglyLinkedList.Node(value)
cur = dummy_node.next
while cur:
if cur.data == p.data:
if not cur.next:
cur.next = new_node
else:
new_node.next = cur.next
cur = new_node
break
cur = cur.next
raise Exception("Can not find p, please check!")
def delete_by_node(self, p):
dummy_node = SinglyLinkedList.Node()
cur = dummy_node
while cur.next:
if cur.next.data == p.value:
cur.next = cur.next.next
cur = cur.next
raise Exception("Can not find p, please check!")
def delete_by_value(self, value):
dummy_node = SinglyLinkedList.Node()
cur = dummy_node
while cur.next:
if cur.next.data == value:
cur.next = cur.next.next
cur = cur.next
raise Exception("Can not find p, please check!")
def print_all(self):
dummy_node = SinglyLinkedList.Node()
dummy_node.next = self.head
cur = dummy_node
print("[",end='')
while cur.next:
if cur.next.next:
print(cur.next.data,end=',')
else:
print(cur.next.data,end='')
cur = cur.next
print("]",end='')
def is_palindrome(self):
"""判断是否回文"""
# find middle
if not self.head:
return False
tmp = self.head
p = self.head
q = self.head
if not p.next:
return True
while q.next and q.next.next:
p = p.next
q = q.next.next
first_half_end = p
second_half_start = self.inverse_linked_list_with_dummy_node(first_half_end.next)
# reverse right linklist
first_position = self.head
second_position = second_half_start
# compare left and right list
result = True
while result and second_position:
if first_position.data != second_position.data:
result = False
break
second_position = second_position.next
first_position = first_position.next
# 还原链表并返回结果
p.next = self.inverse_linked_list_with_dummy_node(second_half_start)
self.head = tmp
return result
def inverse_linked_list_with_dummy_node(self, p):
"""从任意节点反转后半段"""
self.head = p
if self.head is None:
return
cur = self.head
last = None # 存储前一个节点指针
while cur:
tmp = cur.next
cur.next = last
last = cur
cur = tmp
self.head = last
return last
def inverse_linked_list_by_recuise(self):
"""递归方式反转链表"""
pass
def inverse_linked_list(self):
"""无头节点的链表反转"""
pass
if __name__ == "__main__":
sll = SinglyLinkedList()
sll.insert_to_head(1)
sll.insert_to_head(2)
sll.insert_to_head(3)
sll.insert_to_head(2)
sll.insert_to_head(1)
sll.print_all()
# sll.inverse_linked_list_with_dummy_node(sll.head)
res = sll.is_palindrome()
print(res)
sll.print_all()
|
991,898 | bc721c9f907a11213ef8ffd360d8153a41507403 | #-------------------------------------------------------------------------------
# Name: 100_3n_1_alt.py
# Purpose: Solves the ACM Problem
# https://uva.onlinejudge.org/index.php?option=com_onlinejudge&Itemid=8&page=show_problem&problem=36
# count the number of times memoization saves
# Usage: python 100_3n_1_alt.py
# Author: Di Zhuang
# Created: 07/30/2015
# Copyright: (c) Di Zhuang 2015
#-------------------------------------------------------------------------------
import time
import numpy as np
def timeit(func):
def decorator(*args, **kwargs):
start = time.clock()
func(*args, **kwargs)
end = time.clock()
#print 'start(clock): %0.6f secs' % start
#print 'end: %0.6f secs' % end
print '%s took %0.6f secs' % (func.__name__, end - start)
return decorator
class max_cycle(object):
def __init__(self, arrlen=1000000):
self.__arr = np.zeros(arrlen, dtype=np.int32)
self.__arr[1] = 1
self.__lenarr = arrlen
self.__count = np.zeros(arrlen, dtype=np.int32) # count the number of calls saved via memoization
def count(self):
return sum(self.__count)
def cycle(self, n, opt=True):
if n < self.__lenarr:
if self.__arr[n] == 0:
self.__arr[n] = self.func(n, opt)
else:
self.__count[n] += 1
return self.__arr[n]
else:
return self.func(n, opt)
def func(self, n, opt=True):
if n == 1:
return 0
else:
if n % 2 == 1:
if opt:
if n < self.__lenarr: # otherwise, the number is not memoized
self.__count[n] += 1
return self.cycle(n + (n >> 1) + 1) + 2
else:
return self.cycle(3 * n + 1) + 1
else:
return self.cycle(n >> 1) + 1
def trial(self, start, end, opt=True):
t0 = time.clock()
max_cycle_length = 0
for i in xrange(start, end+1):
max_cycle_length = max(max_cycle_length, self.cycle(i, opt))
duration = time.clock() - t0
return max_cycle_length, self.count(), duration
def run_trials():
print "{:^12s}\t{:>10s}\t{:>10s}\t{:>12s}\t{:s}".format("range", "max cycle", "shortcut", "saved calls", "time")
for i in xrange(2, 7):
for opt in [True, False]:
mc = max_cycle()
max_cycle_length, saved_calls, duration = mc.trial(1, 10**i, opt)
print '{:1d} - {:>8d}\t{:>10d}\t{:>10s}\t{:>12d}\t{:>5.6f} secs'.format(
1, 10**i, max_cycle_length, str(opt), saved_calls, duration)
if __name__ == "__main__":
run_trials() |
991,899 | 1d12bc4586bd7ba7a3c270ad1530a0cabbed71d1 | import risk.logger
import risk.commands
from risk.ai import RiskBot
from risk.errors.game_master import *
from risk.player import HumonRiskPlayer
class GameMaster(object):
def __init__(self, board, settings, num_ai=7):
self.board = board
# need to setup with settings later
self.bots = [RiskBot() for i in xrange(num_ai)]
risk.logger.debug(
'Game master instance created with %s bots!' % num_ai)
self.ended = False
self.end_turn_callbacks = []
self.players = []
###########################################################################
## Setup actions
#
def choose_territories(self):
pass
def add_end_turn_callback(self, callback):
self.end_turn_callbacks.append(callback)
def generate_human_players(self, number_of_players):
for i in xrange(number_of_players):
self.players.append(HumonRiskPlayer("Human %s" % i))
###########################################################################
## Run time events/hooks
#
def call_end_turn_callbacks(self):
risk.logger.debug('Calling end of turn callbacks')
if not self.ended:
for callback in self.end_turn_callbacks:
callback(self)
###########################################################################
## Game state queries
#
def number_of_players(self):
return len(self.players)
def end_game(self):
risk.logger.debug('Ending game!')
self.ended = True
###########################################################################
## Player actions
#
def player_take_turn(self, player_index):
try:
self.players[player_index].take_turn(self)
except IndexError:
raise NoSuchPlayerError(player_index, self.number_of_players)
def player_territories(self, player):
# TODO implement
return []
def player_attack(self, player, origin, target):
# TODO implement
return 0, 0
def player_add_infantry(self, player, territory):
# TODO implement
return 0, 0
def player_add_cavalry(self, player, territory):
# TODO implement
return 0, 0
def player_add_artilery(self, player, territory):
# TODO implement
return 0, 0
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.