index int64 0 1,000k | blob_id stringlengths 40 40 | code stringlengths 7 10.4M |
|---|---|---|
16,700 | 445590c24cac1bb222db0268a4b07ae8acdac1b8 | from jira import JIRA
from settings import JIRA_USERNAME, JIRA_PASSWORD, JIRA_SERVER
def create_jira_backend():
return JIRA(server=JIRA_SERVER, basic_auth=(JIRA_USERNAME, JIRA_PASSWORD), validate=True)
|
16,701 | 7a33ebd29eb31b204eb50c89178028cd99854b1c | S=[int(s) for s in list(input())]
if sum(S)%9==0:
print('Yes')
else:
print('No') |
16,702 | 47538036bffee415199dd3bbdf4dd386e00c54b3 |
# -*- coding: utf-8 -*-
"""
models.py
flask
Created on 10|04
-----
20|18
@author: rdlc_Dev(alain)
@version:1.180410
"""
# Import the database object (db) from the main application module
# We will define this inside /app/__init__.py in the next sections.
from werkzeug.security import check_password_hash
class User():
def __init__(self, username):
self.username = username
def is_authenticated(self):
return True
def is_active(self):
return True
def is_anonymous(self):
return False
def get_id(self):
return self.username
@staticmethod
def validate_login(password_hash, password):
return check_password_hash(password_hash, password)
class ListCollection():
def __init__(self, artists, title, year, release, rangement, place):
self.artists = artists
self.title = title
self.year = year
self.release = release
self.rangement = rangement
self.place = place
|
16,703 | 3dc6b096ff1b14f52d54c0097003c059f76391fa | import unittest
from ps4a import *
# Test cases
class TestCodeEdX(unittest.TestCase):
def test_count(self):
# Only adding tests for functions that are not in tested in test_ps4a.py
self.assertEqual(calculateHandlen({'a': 3, 'b':2, 'c':1}), 6)
# Execute unit testing
if __name__ == '__main__':
unittest.main() |
16,704 | 1272b37d35d0d72d704a1e0ddc991e5ad22677cf | from logfile import filelog
|
16,705 | 5fc87dd2acd0e0f4f9b520558e66021320666f58 | import sys
def score_string(s):
skip_next = False
in_garbage = False
level = 0
score = 0
garbage_count = 0
for c in s:
if skip_next:
skip_next = False
elif c == "!":
skip_next = True
elif in_garbage:
if c == ">":
in_garbage = False
else:
garbage_count += 1
elif c == "<":
in_garbage = True
elif c == "{":
level += 1
score += level
elif c == "}":
level -= 1
return score, garbage_count
score, garbage = score_string(sys.argv[1])
print(garbage)
|
16,706 | f69362c722d0c499511fbfa7cd38fe0cb69ba4ea | import torch
import torch.nn as nn
class CNN(nn.Module):
def __init__(self):
super(CNN, self).__init__()
self.conv1 = nn.Sequential(
nn.Conv2d(in_channels=1, out_channels=16, kernel_size=5, stride=1, padding=2),
nn.ReLU(),
nn.MaxPool2d(kernel_size=2),
)
self.conv2 = nn.Sequential(
nn.Conv2d(in_channels=16, out_channels=32, kernel_size=5, stride=1, padding=2),
nn.ReLU(),
nn.MaxPool2d(kernel_size=2),
)
self.out = nn.Linear(in_features=32*7*7, out_features=10)
def forward(self, x):
x = self.conv1(x)
x = self.conv2(x)
x = x.view(x.size(0), -1)
output = self.out(x)
return output, x
|
16,707 | 6997ad57bab3e1ba012c77f5e15ee7c27180ebda | from backend import app, db
from backend.models import Movie
from flask import jsonify, request
# TODO set the url for accept request
# 1. /append
# 2. /all
# 3. /search
# 4. /delete
# 5. /modify
@app.route('/append', methods=['POST'])
def append():
# title = request.form.get('title')
# year = request.form.get('year')
title = request.json.get('title')
year = request.json.get('year')
db.session.add(Movie(title = title, year = year))
db.session.commit()
return jsonify({
'status': 'append success',
})
@app.route('/all', methods=['POST'])
def all():
movies = Movie.query.all()
return jsonify({
'status': 'get all movies success',
'data': list(map(lambda x: x.serialize(), movies)),
})
@app.route('/search', methods=['POST'])
def search():
# field = request.form.get('field')
# string = request.form.get('string')
field = request.json.get('field')
string = request.json.get('string')
movies = []
status = 'search success'
if field == 'title':
movies = Movie.query.filter(Movie.title == string).all()
elif field == 'year':
movies = Movie.query.filter(Movie.year == string).all()
else:
status = 'search failed'
return jsonify({
'status': status,
'data': list(map(lambda x: x.serialize(), movies))
})
@app.route('/delete', methods=['POST'])
def delete():
id = request.json.get('id')
movie = Movie.query.get(id)
db.session.delete(movie)
db.session.commit()
return jsonify({
'status': 'delete success',
})
@app.route('/modify', methods=['POST'])
def modify():
id = request.json.get('id')
title = request.json.get('title')
year = request.json.get('year')
movie = Movie.query.get(id)
# 处理空值情况
if title == None:
title = movie.title
if year == None:
year = movie.year
movie.title = title
movie.year = year
db.session.commit()
return jsonify({
'status': 'modify success',
})
@app.route('/get', methods=['POST'])
def get():
id = int(request.json.get('id'))
movie = Movie.query.get(id)
return jsonify({
'status': 'get success',
'data': movie.serialize(),
})
print('Hello')
|
16,708 | bf5ee4dec04b5e7fff55c96ff5f97703790f61c1 | # -*- coding: utf-8 -*-
# Generated by Django 1.11.14 on 2018-10-04 14:15
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
import django.utils.timezone
class Migration(migrations.Migration):
dependencies = [
('primary_docs', '0028_auto_20181002_1312'),
]
operations = [
migrations.AddField(
model_name='worktimetable',
name='hours',
field=models.DecimalField(decimal_places=2, max_digits=10, null=True, verbose_name='Всего часов'),
),
migrations.AlterField(
model_name='worktimetable',
name='created_date',
field=models.DateTimeField(default=django.utils.timezone.now, verbose_name='Дата создания'),
),
migrations.AlterField(
model_name='worktimetable',
name='organization',
field=models.ForeignKey(default=1, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='worktime_tables', to='primary_docs.Organization', verbose_name='Организация'),
),
]
|
16,709 | 4c6b40430491cf328ff8f973ed46857619a3304d | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Mon Oct 19 18:43:21 2020
@author: phalinp
"""
import cv2 as cv
import numpy as np
img = np.zeros((512,512,3),np.uint8)
#img is a black image to draw a line
cv.line(img,(0,0),(511,511),(255,0,0),10)
#Creates a line starting from (0,0) to (511,511) in img, (255,0,0) describes
#colour which is Blue as per BGR and 10 is thickness of line in px
cv.imshow("image",img)
cv.waitKey(0)
cv.destroyAllWindows()
|
16,710 | 5e65b999940cc674062de23ca0b4ef4efebc6e4a | import datetime
from Status.logList import log
from Status.updateStatus import state_json_data
def analysis(stock):
'''torxiong Form Data Analysis'''
result = ''
email_title = ''
email_html = ''
TIME = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')
json_status = state_json_data["optional_list"]
# "ID": 600352,
# "cost_price": 17.61,
# "loss_limit": 0.05,
# "profit_limit": 0.05,
# "volume": 2000000,
# "time_out": 90
profit_limit = float(json_status["profit_limit"])
loss_limit = float(json_status["loss_limit"])
volume_limit = float(json_status["volume"])
# 'code': 'sh600352',
# 'name': '浙江龙盛',
# 'type': 'GP-A',
# 'priceChange': '1.60',
# 'changePercent': '9.99',
# 'open': '15.85',
# 'close': '16.01',
# 'price': '17.61',
# 'high': '17.61',
# 'low': '15.79',
# 'volume': '1992987',
# 'turnover': '337149',
# 'turnoverRate': '6.13',
# 'totalWorth': '572.91',
# 'circulationWorth': '572.91',
# 'date': '2021-02-10 15:40:55',
# 'buy': [ ··· ]
# 'sell': [ ··· ]
# 'minData': [ [ ··· ], [ ··· ], [ ··· ] ]
name = stock["name"]
changePercent = float(stock["changePercent"])
volume = float(stock["volume"])
# TODO 上涨到止盈位警告
if changePercent > profit_limit and profit_limit:
result = "告警\n当前{}上涨{}%\n{}".format(name, changePercent, TIME)
# TODO 下跌到止损位警告
elif changePercent < loss_limit and loss_limit:
result = "告警\n当前{}下跌{}%\n{}".format(name, changePercent, TIME)
# TODO 成交量突破
elif volume > volume_limit and volume_limit:
result = "当前{}成交额\n已达到{}股\n{}".format(name, volume, TIME)
# email_title = "{}成交额{}股".format(name, volume)
# email_html = "<h2>{}</h2>已达交易预警" \
# "<h1>当前成交额{}股</h1>" \
# "<p>{}</p>".format(name, volume, TIME)
return result |
16,711 | 9e26f40cb0dde6a5f8dd52dc162c16a53567eeee | N = int(input())
l = list(range(1, 2*N+2))
while l:
print(l.pop(0))
inp = int(input())
if inp == 0:
exit()
else:
l.pop(l.index(inp))
|
16,712 | 09458199687e103dc45d8f9ed4272af352c07062 | from django.apps import AppConfig
class DailyAttandanceConfig(AppConfig):
name = 'daily_tracker'
|
16,713 | 8d5b9825eeb7b29f4a0af0bda1db6fe7c89f990c | from django.db import models
from django.core.validators import MaxValueValidator, MinValueValidator
class Rating(models.Model):
"""
Rating model for ratings.
"""
from_user = models.ForeignKey('auth.User',
related_name="ratings_made",
on_delete=models.CASCADE,
)
to_user = models.ForeignKey('auth.User',
related_name="ratings_received",
on_delete=models.CASCADE,
)
created = models.DateTimeField(auto_now_add=True)
rating = models.IntegerField(validators=[MaxValueValidator(10),
MinValueValidator(0)])
class Meta:
constraints = [
models.UniqueConstraint(fields=['from_user',
'to_user'],
name="unique_rating")
]
ordering = ['created']
|
16,714 | 47750a7d7ad3105e637062bbda91f0745e1b88e5 |
import numpy as np
import cv2
import imutils
class Procesador(object):
def __init__(self, detectorM, detectorR, reconocedor, output_path=None):
self.detectorM = detectorM
self.detectorR = detectorR
self.reconocedor = reconocedor
self.output_path = output_path
if output_path:
self.writer = None
def dibujaRectangulo(self, puntos, img):
for (x, y, w, h) in puntos:
cv2.rectangle(img, (x, y), (x + w, y + h), (200, 200, 200), 2)
def dibujaRectangulo2(self, pts_cds, img):
if len(pts_cds) > 0:
for (pts, cds) in pts_cds:
for (x, y, w, h) in pts:
cv2.rectangle(img, (x, y), (x + w, y + h), (0, 255, 0), 2)
def procesar(self, img):
puntos, cuadros = self.detectorM.detectar(img)
self.dibujaRectangulo(puntos, img)
pts_cds = self.detectorR.detectar(img)
self.dibujaRectangulo2([pts_cds], img)
pts, cds = pts_cds
for (x, y, w, h), cd in zip(pts, cds):
nombre, predi = self.reconocedor.predecir(cd)
cv2.putText(img, nombre, (x, y - 20), cv2.FONT_HERSHEY_SIMPLEX, 1, 255, 2)
#cv2.imshow("detect", img)
if self.writer is None and self.output_path is not None:
fourcc = cv2.VideoWriter_fourcc(*"MJPG")
self.writer = cv2.VideoWriter(self.output_path, fourcc, 20,
(img.shape[1], img.shape[0]), True)
if self.writer is not None:
self.writer.write(img)
return img
def stop(self):
if self.writer is not None:
self.writer.release()
|
16,715 | d6c970d7a716ba70c9f8c63df2245d340b41426f | import pandas as pd
import glob
import matplotlib.pyplot as plt
import seaborn as sns
plt.rcParams["figure.dpi"] = 150
# MP2.5
df_mp25 = pd.DataFrame()
for i, file_name in enumerate(sorted(list(glob.glob('../data/*_mp25.csv')), reverse=True)):
temp = pd.read_csv(file_name, usecols=['date', 'name', 'val'])
temp = temp.set_index('date')
temp.index = pd.to_datetime(temp.index)
temp = temp[(temp.index >= '01/01/2019') & (temp.index < '01/01/2020')]
df_mp25 = pd.concat([df_mp25, temp])
plt.rcParams['figure.figsize'] = [12, 3]
ax = sns.heatmap(df_mp25.pivot_table(index='name', columns=df_mp25.index.dayofyear, values='val'),
cmap='coolwarm')
plt.xlabel('Timestamp')
plt.ylabel(None)
ax.collections[0].colorbar.set_label('PM 2.5 ug/m³')
plt.tight_layout()
# plt.show()
plt.savefig('../plots/heat-map-mp25.png')
# MP10
df_mp10 = pd.DataFrame()
for i, file_name in enumerate(sorted(list(glob.glob('../data/*_mp10.csv')), reverse=True)):
temp = pd.read_csv(file_name, usecols=['date', 'name', 'val'])
temp = temp.set_index('date')
temp.index = pd.to_datetime(temp.index)
temp = temp[(temp.index >= '01/01/2019') & (temp.index < '01/01/2020')]
df_mp10 = pd.concat([df_mp10, temp])
plt.clf()
plt.rcParams['figure.figsize'] = [12, 3]
ax = sns.heatmap(df_mp10.pivot_table(index='name', columns=df_mp10.index.dayofyear, values='val'),
cmap='coolwarm')
plt.xlabel('Timestamp')
plt.ylabel(None)
ax.collections[0].colorbar.set_label('PM 10 ug/m³')
plt.tight_layout()
# plt.show()
plt.savefig('../plots/heat-map-mp10.png')
|
16,716 | bd6167d79dfea2ccb824eac5125e51a34b9f5d5f | from django.urls import path
from . import views
app_name = 'App_Login'
urlpatterns = [
path('sign_up/',views.sign_up,name='sign_up'),
path('login/',views.login_page,name='login'),
path('logout/',views.logout_user,name='logout'),
path('profile/',views.profile,name='profile'),
path('change-profile/',views.user_change,name='user_change'),
path('password/',views.pass_change,name='pass_change'),
path('add-picture/',views.add_pro_pic,name='add_pro_pic'),
path('change-picture/',views.change_pro_pic,name='change_pro_pic'),
]
|
16,717 | fa5aa267f65394c7f6300d9e8f5b86c2a09cdfde | from collections import defaultdict
import numpy as np
class ImagePose():
def __init__(self):
self.img = np.empty(1) # downsampled image used for display
self.desc = np.empty(1) # feature descriptor
self.kp = [] # keypoints list in (x,y) format (not (y, x)!)
self.T = np.zeros((4,4)) # 4x4 pose transformation matrix
self.P = np.zeros((3,4)) # 3x4 projection matrix
self.kp_matches = defaultdict(int) # keypoint matches in other images
self.kp_landmark = defaultdict(int) # seypoint to 3d points
# helper methods
def kp_match_exist(self, kp_idx, img_idx):
return self.kp_matches[(kp_idx, img_idx)] > 0
def kp_3d_exist(self, kp_idx):
return self.kp_landmark[kp_idx] != 0
# 3D point
class Landmark():
def __init__(self):
self.pt = np.zeros((3)) # cv::Point3f
self.color = np.zeros((3)) # [R, G, B]
self.seen = 0 # how many cameras have seen this point
# Helper class
class SFMStorage():
def __init__(self):
self.img_pose = [] # list of ImgPose class instances
self.landmark = [] # list of Landmark class instances
# main storage class
class LandmarksStorage(object):
"""
Stores all landmarks and corresponding images with keypoint coordinates.
"""
def __init__(self, sfm_storage, min_landmark_seen):
# self.landmarks_info[landmark_id] -> [(img_id1, (x, y)), ...]
self.landmarks_info = defaultdict(list)
self.sfm_storage = sfm_storage
self.min_landmark_seen = min_landmark_seen
self._fill_landmark_info(self.sfm_storage, self.min_landmark_seen)
def _fill_landmark_info(self, sfm_storage, min_landmark_seen):
for i in range(len(sfm_storage.img_pose)):
curr_img_pose = SFM.img_pose[i]
for k in range(len(curr_img_pose.kp)):
if curr_img_pose.kp_3d_exist(k):
landmark_id = curr_img_pose.kp_landmark[k]
if sfm_storage.landmark[landmark_id].seen >= min_landmark_seen:
self.landmarks_info[landmark_id].append((i, curr_img_pose.kp[k]))
# def add_img_kp_pair(self, landmark_id, img_id, kp_coords):
# self.landmarks_info[landmark_id].append((img_id, kp_coords))
def get_num_landmarks(self):
"""
Returns number of landmarks in the storage.
return:
int
Number of landmarks in the storage
"""
return len(self.landmarks_info)
def get_landmark_info(self, landmark_id):
"""
Returns list of image id's
and coordinates of corresponding keypoint on each image.
landmark_id:
int
Landmark's id
return:
list
List in format: [(img_id, (x, y)), ...].
(x, y) are the coordinates of the keypoint in pixels (x<->cols, y<->rows)
"""
return self.landmarks_info[landmark_id]
|
16,718 | a08b3dc16cead67a7c1b3a67a65f70a1414859c7 | import struct
import numpy as np
import FakeSerial as serial
class TestClass:
## init(): the constructor. Many of the arguments have default values
# and can be skipped when calling the constructor.
# same time as a Serial
# same readtimeout and writetimeout as used in Nanomodem from MasterAnchor
def __init__(self):
self.soundspeed = 1500
self.portname = "COM1"
self.baudrate = 19500
self.readtimeout = 1000
self.writetimeout = 1000
self.serport = serial.Serial(self.portname, self.baudrate, timeout=self.readtimeout,
write_timeout=self.writetimeout)
def run_test(self):
print("\ntest_set_self_address() should be called at the beginning of the test\n")
self_address = False
self_address = self.test_set_self_address()
print ("self_address test is :", self_address)
remote_node = "002"
check_query_response = self.test_check_query_response(remote_node)
print("check_query_response test is :", check_query_response)
if self_address == False:
print("self_address has not been set up correctly, the program can't continue")
return
else:
self_status = self.test_query_self_status()
print ("self_status test is :", self_status)
query_node_status = self.test_query_node_id(remote_node)
print("query_node_status is :", query_node_status)
if query_node_status == True:
check_query_response = self.test_check_query_response(remote_node)
print("check_query_response test is :", check_query_response)
def test_set_self_address(self):
"""
test_set_self_address()
test the setting of a node address, writing to and receiving from the modem
The address should be of 3 characters, e.g., "000" to "255".
If successful return True otherwise False
"""
print('### Testing set up address ###')
node_id = "001" # node_id of the form of 3 chr string already verified in Nanomodem.py
command = b'$A' + node_id.encode()
self.serport.write(command)
received_bytes = self.serport.readline()
index = received_bytes.find(b'#A')
#print("SET_ADDRESS len is "+ str(len(received_bytes)) +" and index is "+str(index))
if (index != -1) and (len(received_bytes) - index == 5 and received_bytes.decode()[1] == 'A'):
# received_bytes[1] == b'A' as condition doesn't work because x = b'A' still stay b'A' and x[0] give 65 (the byte for A)
#print("SET_ADDRESS A was spot on")
if received_bytes[1:4] == command[1:4]:
node_id = received_bytes.decode()[2:5]
print("SET_ADDRESS node is :"+ node_id)
print("set self address SUCCESS")
return True
else:
print("set self address FAILURE")
return False
def test_query_self_status(self):
"""
test_query_self_status()
Checks that status of the parent Nanomodem.
Return the address and voltage of the parent Nanomodem otherwise raises Exception
See also: query_node_status
"""
print('\n### Testing query self status ###')
command = b'$?'
self.serport.write(command)
received_bytes = self.serport.readline()
index = received_bytes.find(b'#A')
if (index != -1) and (len(received_bytes) - index == 10):
if received_bytes[index+5:index+6] == b'V':
# print("check_for_valid_ack_signal SELF_STATUS V was spot on")
for i in range(index+6, index+10):
if b'0' <= received_bytes[i:i+1] <= b'9':
pass
else:
print("query self status FAILURE 1")
return False
node_id = received_bytes.decode()[index+2:index+5]
print("SELF_STATUS node is :", node_id)
voltage = round(float(received_bytes[index+6:index+10]) * (15.0/65536.0), 3)
print("SELF_STATUS voltage is :", voltage)
print("query self status SUCCESS")
return True
else:
print(" query self status FAILURE 2")
return False
def test_query_node_id(self,node_id):
"""
test_query_node_status(node_id)
Requests the status of remote node with specific node_id.
Returns 0 if query was sent, otherwise -1
"""
print('\n### Testing query node status ACK ###')
print('Remember that node_id must be a 3 characters string')
command = b'$V' + node_id.encode()
self.serport.write(command)
received_bytes = self.serport.readline()
index = received_bytes.find(b'$V')
#ACK COMMAND
if (index != -1) and (len(received_bytes) - index == 5 and received_bytes.decode()[1] == 'V'):
#print("SET_ADDRESS V was spot on")
if received_bytes[1:4] == command[1:4]:
node_id = received_bytes.decode()[2:5]
print("command has well been sent to node :"+ node_id)
print("acknowledgement of the command SUCCESS")
return True
else:
print("acknowledgement of the command FAILURE")
return False
def test_check_query_response(self, node_id):
"""
test_check_query_response(node_id)
Checks if the node with specific address has replied for the status query.
Returns the voltage of the queried node if response was received correctly,
otherwise -1
Note: Wait for sufficient time after issuing query_node_status command to
receive the response. Re-issue the check_ping_response command after some
waiting if required!
See also
query_node_status
"""
print('\n### Testing query node status RESPONSE ###')
print('Remember that node_id must be the same 3 characters string that in test_query_node_id(node_id)')
received_bytes = self.serport.readline()
if received_bytes == b'E\r\n':
print("You received Error Msg!")
print(f'Did not receive correct query status response from node {node_id}')
print(f'Query again the node {node_id} if required')
return False
elif (len(received_bytes) == 13) and (received_bytes[0:8] == b'#B' + node_id.encode() + b'06V'):
supply_voltage = received_bytes.decode()[8:13]
print(f"supply_voltage of {node_id} is {supply_voltage}")
print("response from the remote node SUCCESS")
return True
else:
print(f'Did not receive correct query status response from node {node_id}')
print(f'Query again the node {node_id} if required')
return False |
16,719 | 693d082114b0fd5374896f9232d1db22e9509d01 | # Update for the User name and Password for the
# database service
VIDEO_DATABASE_USER = "video_search"
VIDEO_DATABASE_PASSWORD = "passw0rd"
|
16,720 | 7ddaa444b7d29a7e1a85d8e528b7cfe06638221d | import random
class BernouliArm:
def __init__(self, s):
self.s = s
def pull(self):
reward = 0
if random.random() < self.s:
reward = 1
return reward
|
16,721 | 1d5bf548a98e24e4dcdad0ae77dc16980519f6b7 | #comprimento da escada
from math import sin
from math import pi
def main():
altura = eval(input("Digite a altura: "))
graus = eval(input("Digite a inclinação (graus): "))
angulo = (pi/180)*graus
comprimento = altura / sin(angulo)
print("O comprimento da escada ate a casa e: ", round(comprimento))
main() |
16,722 | 318918145e9da48522ca18a00478d9281300e321 | import json
import logging
from datetime import datetime
# from dateutil.tz import tzlocal
from flask import Blueprint, jsonify, abort, current_app, request
# import paho.mqtt.client as mqtt
from kafka import KafkaProducer
from .models import Task, TaskType, TaskStatus
from ..database import db
from ..factory.models import Factory
from ..workstation.models import Workstation
from ..equipment.models import Equipment
task_bp = Blueprint("task", __name__)
logger = logging.getLogger(__name__)
# logger = current_app.logger
@task_bp.route("/task/equipment/equipment/<int:equipment_id>", methods=["POST"])
def add_equipment_task(equipment_id: int):
body = request.get_json()
deadline = body.get("deadline")
try:
deadline = datetime.strptime(deadline, "%Y-%m-%dT%H:%M:%S")
except (TypeError, ValueError) as e:
logger.error(e)
abort(400, "deadline not provided or not in %Y-%m-%dT%H:%M:%S format")
equipment = Equipment.query.get_or_404(equipment_id)
if equipment.equipment_camera is None:
abort(400, "no camera is registered for this equipment for checking existence")
payload = {"equipmentId": equipment_id, "deadline": deadline.strftime(format="%Y-%m-%dT%H:%M:%S")}
# payload = json.dumps(payload)
task = Task(type=TaskType.equipment,
create_time=datetime.now(),
context=json.dumps(payload),
deadline=deadline,
status=TaskStatus.created)
try:
db.session.add(task)
db.session.flush()
except Exception as e:
logger.exception(e)
abort(500, "database write error")
payload.update({"taskId": task.id})
payload = json.dumps(payload)
try:
producer = KafkaProducer(bootstrap_servers=current_app.config.get("KAFKA_SERVER"))
producer.send(topic="equipment", value=payload.encode())
producer.close()
except Exception as e:
logger.error(e, exc_info=True)
db.session.rollback()
abort(500, "mq publish failed")
else:
db.session.commit()
return jsonify(task.dict)
@task_bp.route("/task/equipment_active/equipment/<int:equipment_id>", methods=["POST"])
def add_equipment_active_task(equipment_id: int):
body = request.get_json()
deadline = body.get("deadline")
try:
deadline = datetime.strptime(deadline, "%Y-%m-%dT%H:%M:%S")
except (TypeError, ValueError) as e:
logger.error(e)
abort(400, "deadline not provided or not in %Y-%m-%dT%H:%M:%S format")
equipment = Equipment.query.get_or_404(equipment_id)
if equipment.equipment_active_camera is None:
abort(400, "no camera is registered for this equipment for checking existence")
payload = {"equipmentId": equipment_id,
"deadline": deadline.strftime(format="%Y-%m-%dT%H:%M:%S")}
task = Task(type=TaskType.equipment_active,
create_time=datetime.now(),
context=json.dumps(payload),
deadline=deadline,
status=TaskStatus.created)
try:
db.session.add(task)
db.session.flush()
except Exception as e:
logger.exception(e)
abort(500, "database write error")
payload.update({"taskId": task.id})
payload = json.dumps(payload)
try:
producer = KafkaProducer(bootstrap_servers=current_app.config.get("KAFKA_SERVER"))
producer.send(topic="equipment_active", value=payload.encode())
producer.close()
except Exception as e:
logger.error(e, exc_info=True)
db.session.rollback()
abort(500, "mq publish failed")
else:
db.session.commit()
return jsonify(task.dict)
@task_bp.route("/task/keyperson/workstation/<int:workstation_id>", methods=["POST"])
def add_keyperson_task(workstation_id: int):
body = request.get_json()
deadline = body.get("deadline")
duration = body.get("duration")
try:
deadline = datetime.strptime(deadline, "%Y-%m-%dT%H:%M:%S")
except (TypeError, ValueError) as e:
current_app.logger.error(e)
abort(400, "deadline not provided or not in %Y-%m-%dT%H:%M:%S format")
if not isinstance(duration, int):
abort(400, "task duration not provided or is not integer")
workstation = Workstation.query.get_or_404(workstation_id)
if workstation.camera is None:
abort(400, "no camera is registered for this workstation for checking existence")
payload = {
"workstationId": workstation_id,
"deadline": deadline.strftime(format="%Y-%m-%dT%H:%M:%S"),
"duration": duration
}
task = Task(type=TaskType.keyperson,
create_time=datetime.now(),
context=json.dumps(payload),
deadline=deadline,
status=TaskStatus.created)
try:
db.session.add(task)
db.session.flush()
except Exception as e:
logger.exception(e)
abort(500, "database write error")
payload.update({"taskId": task.id})
payload = json.dumps(payload)
try:
producer = KafkaProducer(bootstrap_servers=current_app.config.get("KAFKA_SERVER"))
producer.send(topic="keyperson", value=payload.encode())
producer.close()
except Exception as e:
logger.error(e, exc_info=True)
db.session.rollback()
abort(500, "mq publish failed")
else:
db.session.commit()
return jsonify(task.dict)
@task_bp.route("/task/<int:task_id>", methods=["GET"])
def get_task(task_id: int):
task = Task.query.get_or_404(task_id)
return jsonify(task.dict)
@task_bp.route("/task/<int:task_id>", methods=["PUT"])
def set_task(task_id: int):
body = request.get_json()
task = Task.query.get_or_404(task_id)
if "startTime" in body:
task.start_time = datetime.strptime(body.get("startTime"), "%Y-%m-%dT%H:%M:%S")
if "endTime" in body:
task.end_time = datetime.strptime(body.get("endTime"), "%Y-%m-%dT%H:%M:%S")
if "status" in body:
task.status = body.get("status")
if "result" in body:
task.result = body.get("result")
db.session.commit()
return "OK"
|
16,723 | 66150ad456d07e207a796347ee60a4e3ec4e2a8c | import requests
import json
import base64
import shutil
import cv2
addr = 'http://localhost:5000'
test_url = addr + '/predict'
# prepare headers for http request
content_type = 'image/jpg'
headers = {'content-type': content_type}
img = cv2.imread('pA.jpg')
# encode image as jpeg
_, img_encoded = cv2.imencode('.jpg', img)
# send http request with image and receive response
response = requests.post(test_url, data=img_encoded.tostring(), headers=headers)
# decode response
#print(response.content)
with open("resAPI.jpg", "wb") as f:
f.write(response.content)
# expected output: {u'message': u'image received. size=124x124'} |
16,724 | aebc8dae9e39bc97d5078d0ae1b5ba4d0442c1c9 | #!/usr/bin/env python3
"""
Created on Wed Feb 12 10:44:59 2020
@author: German Sinuco
Skeleton modified from
https://www.tensorflow.org/tutorials/customization/custom_training
https://www.tensorflow.org/tutorials/customization/custom_training_walkthrough
Training of an RBM parametrization of the unitary matrix that diagonalises the 2x2 real,
and symmetric HAMILTONIAN:
==================== IMPORTANT NOTE ========================
as V2, but using complex parameters, which I used for the first time in TensorFlow_Floquet.py
============================================================
"""
from __future__ import absolute_import, division, print_function, unicode_literals
try:
# %tensorflow_version only exists in Colab.
get_ipython().run_line_magic('tensorflow_version', '2.x')
except Exception:
pass
import tensorflow as tf
import numpy as np
import math as m
from model import FloquetHamiltonian
from scipy.stats import unitary_group
class Model(object):
def __init__(self,delta=0.0,Omega=0.1,phase=0.0):
self.spin = True
self.omega_0 = 1.00
# Hamiltonian parameters
self.delta = 0.00
self.omega = self.delta + self.omega_0
self.Omega = 1.00
self.phase = phase # the phase in cos(omega t + phase)
# Initialize the spin value and number of floquet channels
self.hidden_n = 4 # hidden neurons
self.hidden_ph = 4 # hidden neurons
self.S = 4 # spin 3.2. Hilbert space dimension
#self.S = 2 # spin 1/2. Hilbert space dimension
self.N = 0 # Number of positive Floquet manifolds
self.dim = self.S*(2*self.N+1) # Dimension of the extended floquet space
zero_ = tf.constant(0.0,dtype=tf.float64)
one_ = tf.constant(1.0,dtype=tf.float64)
j_ = tf.constant(tf.complex(zero_,one_),dtype=tf.complex128)
#uf_ = tf.random.stateless_uniform([self.dim,self.dim],seed=[2,1],dtype=tf.float32,minval=0.0,maxval=1.0)
#s,u,v = tf.linalg.svd(uf_, full_matrices=True)
#uf_ = u
# Declaring training variables
# Training parameters defining the norm
self.W_n = tf.Variable(tf.random.stateless_uniform([self.hidden_n,self.dim*self.dim,2],
seed=[1,1],dtype=tf.float64,
minval=-1.0,maxval=1.0),trainable=True)
self.b_n = tf.Variable(tf.random.stateless_uniform([self.dim*self.dim,2],
seed=[1,1],dtype=tf.float64,
minval=-1.0,maxval=1.0),trainable=True)
self.c_n = tf.Variable(tf.random.stateless_uniform([self.hidden_n],
seed=[1,1],dtype=tf.float64,
minval=-1.0,maxval=1.0),trainable=True)
# Training parameters defining the phase
self.W_ph = tf.Variable(tf.random.stateless_uniform([self.hidden_ph,self.dim*self.dim,2],
seed=[1,1],dtype=tf.float64,
minval=-1.0,maxval=1.0),trainable=True)
self.b_ph = tf.Variable(tf.random.stateless_uniform([self.dim*self.dim,2],
seed=[1,1],dtype=tf.float64,
minval=-1.0,maxval=1.0),trainable=True)
self.c_ph = tf.Variable(tf.random.stateless_uniform([self.hidden_ph],
seed=[1,1],dtype=tf.float64,
minval=-1.0,maxval=1.0),trainable=True)
UF_aux = tf.Variable(np.zeros((self.dim*self.dim), dtype=np.complex128),trainable = False) # ext. micromotion operator
UF_n = tf.Variable(np.zeros((self.dim,self.dim), dtype=np.complex128),trainable = False) # ext. micromotion operator
UF_ph = tf.Variable(np.zeros((self.dim,self.dim), dtype=np.complex128),trainable = False) # ext. micromotion operator
self.UF = tf.Variable(np.zeros((self.dim,self.dim), dtype=np.complex128),trainable = False) # ext. micromotion operator
# defining the labels of the input layer, which are the components of the UF matrix
self.x = tf.Variable([[0.0,0.0]],dtype=tf.float64)
counter = 0
self.count = counter
for i in range(1,self.dim+1):
for j in range(1,self.dim+1):
if(self.S==4):
y = [[i-2.5,j-2.5]]
if(self.S==2):
y = [[i-1.5,j-1.5]]
self.x = tf.concat([self.x, y], 0)
counter +=1
self.count = counter
#Building of the marginal probability of the RBM using the training parameters and labels of the input layer
#P(x)(b,c,W) = exp(bji . x) Prod_l=1^M 2 x cosh(c_l + W_{x,l} . x)
# 1. Amplitude (norm)
WX_n = [tf.reduce_sum(tf.multiply(self.x[1:counter+1],self.W_n[0]),1)+self.c_n[0]]
for j in range(1,self.hidden_n):
y = tf.reduce_sum(tf.multiply(self.x[1:counter+1],self.W_n[j]),1)+self.c_n[j]
WX_n = tf.concat([WX_n, [y]], 0)
UF_aux = tf.sqrt(tf.abs(tf.multiply(tf.reduce_prod(tf.math.cosh(WX_n),0),tf.exp(
tf.transpose(tf.reduce_sum(tf.multiply(
self.x[1:counter+1],self.b_n),1))))))
UF_n = tf.reshape(UF_aux,[self.dim,self.dim])
# 2. Phase
WX_ph = [tf.reduce_sum(tf.multiply(self.x[1:counter+1],self.W_ph[0]),1)+self.c_ph[0]]
for j in range(1,self.hidden_ph):
y = tf.reduce_sum(tf.multiply(self.x[1:counter+1],self.W_ph[j]),1)+self.c_ph[j]
WX_ph = tf.concat([WX_ph, [y]], 0)
UF_aux = tf.multiply(tf.reduce_prod(tf.math.cosh(WX_ph),0),tf.exp(
tf.transpose(tf.reduce_sum(tf.multiply(self.x[1:counter+1],self.b_ph),1))))
UF_ph = tf.reshape(tf.math.log(UF_aux),[self.dim,self.dim])
UF_cos = tf.cos(UF_ph/2.0)
UF_sin = tf.sin(UF_ph/2.0)
UF = tf.complex(UF_n*UF_cos,UF_n*UF_sin)
# 1st of March 2020. Task: REVISE NORMALISATION AND GRAM-SCHMIDT PROCEDURE FOR COMPLEX VECTORS
# 5th of March 2020. Normalisation done by hand: OK. Now I am using the G-S algorithm
# reported in https://stackoverflow.com/questions/48119473/gram-schmidt-orthogonalization-in-pure-tensorflow-performance-for-iterative-sol.
# Task: incorparate a basis rotation in the training loop
UF = normalisation(UF)
UF = tf_gram_schmidt(UF)
self.UF = UF
if self.S == 2:
# spin 1/2
self.Identity = tf.constant([[1.0,0.0],[ 0.0, 1.0]],dtype = tf.complex128)
self.Sx = 0.5*tf.constant([[0.0,1.0],[ 1.0, 0.0]],dtype = tf.complex128)
self.Sy = j_*0.5*tf.constant([[0.0,1.0],[-1.0, 0.0]],dtype = tf.complex128)
self.Sz = 0.5*tf.constant([[1.0,0.0],[ 0.0,-1.0]],dtype = tf.complex128)
else:
if self.S == 4:
# spin 3/2
self.Identity = tf.constant([[1.0,0.0,0.0,0.0],
[0.0,1.0,0.0,0.0],
[0.0,0.0,1.0,0.0],
[0.0,0.0,0.0,1.0]],dtype = tf.complex128)
self.Sx = 0.5*tf.constant([[0.0, np.sqrt(3.0),0.0, 0.0],
[np.sqrt(3.0),0.0, np.sqrt(4.0), 0.0],
[0.0, np.sqrt(4.0),0.0, np.sqrt(4.0)],
[0.0, 0.0, np.sqrt(3.0), 0.0]],dtype = tf.complex128)
self.Sz = 0.5*tf.constant([[3.0,0.0, 0.0, 0.0],
[0.0,1.0, 0.0, 0.0],
[0.0,0.0,-1.0, 0.0],
[0.0,0.0, 0.0,-3.0]],dtype = tf.complex128)
self.Szero = tf.zeros([self.S,self.S],dtype=tf.complex128)
#else:
# if (self.S != 4 & self.S !=2):
# for j in range(0,self.S):
# H[j,j]
if self.N == 0:
self.H_TLS = tf.Variable(self.delta*self.Sz+0.5*self.Omega*self.Sx,shape=(self.dim,self.dim),dtype = tf.complex128,trainable = False) # ext. Hamiltonian
else:
self.H_TLS = FloquetHamiltonian(self) # ext. Hamiltonian
self.trainable_variables = [self.W_n,self.b_n,self.c_n,self.W_ph,self.b_ph,self.c_ph]
def getH(self):
return self.H_TLS
def __call__(trainable_variables):
return self.H_TLS
def normalisation(U_):
# U_ (in) original matrix
# (out) matrix with normalised vectors
normaU_ = tf.sqrt(tf.math.reduce_sum(tf.multiply(tf.math.conj(U_),U_,1),axis=0))
U_ = tf.math.truediv(U_,normaU_)
return U_
def tf_gram_schmidt(vectors):
# add batch dimension for matmul
basis = tf.expand_dims(vectors[:,0]/tf.norm(vectors[:,0]),0)
for i in range(1,vectors.shape[0]):#vectors.get_shape()[0].value):
v = vectors[:,i]
# add batch dimension for matmul
v = tf.expand_dims(v,0)
w = v - tf.matmul(tf.matmul(v, basis, adjoint_b=True), basis)
# I assume that my matrix is close to orthogonal
basis = tf.concat([basis, w/tf.norm(w)],axis=0)
return basis
def Unitary_Matrix(model):
UF = tf.Variable(np.zeros((model.dim*model.dim), dtype=np.complex64),trainable = False) # ext. micromotion operator
UF_n = tf.Variable(np.zeros((model.dim,model.dim), dtype=np.complex64),trainable = False) # ext. micromotion operator
UF_ph = tf.Variable(np.zeros((model.dim,model.dim), dtype=np.complex64),trainable = False) # ext. micromotion operator
#dim = model.dim
counter = model.count
#Building of the marginal probability of the RBM using the training parameters and labels of the input layer
#P(x)(b,c,W) = exp(bji . x) Prod_l=1^M 2 x cosh(c_l + W_{x,l} . x)
# 1. Amplitude (norm)
WX_n = [tf.reduce_sum(tf.multiply(model.x[1:counter+1],model.W_n[0]),1)+model.c_n[0]]
for j in range(1,model.hidden_n):
y = tf.reduce_sum(tf.multiply(model.x[1:counter+1],model.W_n[j]),1)+model.c_n[j]
WX_n = tf.concat([WX_n, [y]], 0)
UF_n = tf.sqrt(tf.multiply(tf.reduce_prod(tf.math.cosh(WX_n),0),tf.transpose(tf.exp(tf.reduce_sum(
tf.multiply(model.x[1:counter+1],model.b_n),1)))))
UF_n = tf.reshape(UF_n,[model.dim,model.dim])
# 2. Phase
WX_ph = [tf.reduce_sum(tf.multiply(model.x[1:counter+1],model.W_ph[0]),1)+model.c_ph[0]]
for j in range(1,model.hidden_ph):
y = tf.reduce_sum(tf.multiply(model.x[1:counter+1],model.W_ph[j]),1)+model.c_ph[j]
WX_ph = tf.concat([WX_ph, [y]], 0)
UF_ph = tf.multiply(tf.reduce_prod(tf.math.cosh(WX_ph),0),tf.transpose(tf.exp(tf.reduce_sum(
tf.multiply(model.x[1:counter+1],model.b_ph),1))))
UF_ph = tf.reshape(tf.math.log(UF_ph),[model.dim,model.dim])
UF_cos = tf.cos(UF_ph/2.0)
UF_sin = tf.sin(UF_ph/2.0)
UF = tf.complex(UF_n*UF_cos,UF_n*UF_sin)
UF = tf_gram_schmidt(UF)
#s,u,v = tf.linalg.svd(UF, full_matrices=True)
#UF = u
return UF
def train(model,learning_rate):
with tf.GradientTape() as t:
current_loss = loss(model)
dU = t.gradient(current_loss, model.trainable_variables)
model.UF.assign_sub(learning_rate*dU)
# 3e. Loss function := Use U^dagger H U, sum over the columns, take the difference with the diagonal,
# the loss function is the summ of the square of these differences.
def loss(model):
# define the loss function explicitly including the training variables: self.W, self.b
# model.UF is a function of self.W,self.b,self.c
#UF = tf.Variable(np.zeros((model.dim*model.dim), dtype=np.complex64),trainable = False) # ext. micromotion operator
UF = tf.Variable(np.zeros((model.dim,model.dim),dtype=np.complex64))
a = np.zeros((model.dim,model.dim),dtype=np.float32)
counter = model.count
#Building of the marginal probability of the RBM using the training parameters and labels of the input layer
#P(x)(b,c,W) = exp(bji . x) Prod_l=1^M 2 x cosh(c_l + W_{x,l} . x)
# 1. Amplitude (norm)
UF = Unitary_Matrix(model)
U_ = tf.abs(tf.transpose(tf.math.conj(UF))@model.H_TLS@UF)
U_diag = tf.linalg.tensor_diag_part(U_)
dotProd = tf.math.reduce_sum(abs(U_),axis=1,)
residual = tf.math.reduce_sum(tf.abs((U_diag-dotProd)),0)
U_ = tf.abs(tf.transpose(tf.math.conj(UF))@UF)
#print(U_)
U_diag = tf.linalg.tensor_diag_part(U_)
dotProd = tf.math.reduce_sum(abs(U_),axis=1)
residual_unitary = tf.pow(tf.math.reduce_sum(dotProd,0) - model.dim,2.0)
#residual += 1.0*residual_unitary
return residual
# This is the gradient of the loss function. required for keras optimisers
def grad(model):
with tf.GradientTape() as tape:
loss_value = loss(model)
return loss_value, tape.gradient(loss_value, model.trainable_variables)
optimizer = tf.keras.optimizers.Adam(learning_rate=0.01, beta_1=0.9, beta_2=0.999,
epsilon=1e-07, amsgrad=False,name='Adam')
#optimizer = tf.keras.optimizers.SGD(learning_rate=0.001)
model = Model()
loss_value = loss(model)
print("Initial UF guess: ", Unitary_Matrix(model))
print("Initial loss value: ",loss_value.numpy())
epochs = range(2048)
for i in epochs:
loss_value, grads = grad(model)
optimizer.apply_gradients(zip(grads, model.trainable_variables))
print("Final loss value: ",loss_value.numpy())
print("Final UF matrix:", Unitary_Matrix(model))
UF = Unitary_Matrix(model)
U_ = tf.abs(tf.transpose(tf.math.conj(UF))@(model.H_TLS@UF))
U_diag = tf.linalg.tensor_diag_part(U_)
dotProd = tf.math.reduce_sum(abs(U_),axis=1,)
residual = tf.math.reduce_sum(tf.pow((U_diag-dotProd),2),0)
print(residual)
print(tf.abs(UF))
print(U_)
|
16,725 | 447d4b48cd898b06837a19f192c8b39e5ae10104 | from django.contrib import admin
from .models import *
admin.site.register(ResourceField,ResourceFieldDisplay)
admin.site.register(ReleaseCatalogue,ReleaseCatalogueDisplay) |
16,726 | bc5b1beec471d69b6264a56ed17151ca09230c55 | # coding: utf-8
import time
import datetime
import base64
from email import utils
from odoo import models, fields, api
from odoo.tools.misc import DEFAULT_SERVER_DATETIME_FORMAT
class BlogPost(models.Model):
_inherit = 'blog.post'
@api.multi
def _compute_get_date(self):
posts = self
for post in posts:
write_tuple = post.write_date.date().timetuple()
timestamp = time.mktime(write_tuple)
post.date_rfc2822 = utils.formatdate(timestamp)
date_rfc2822 = fields.Char(compute='_compute_get_date')
@api.model
def _get_previous_blog_rss(self):
cache_time = self.env['ir.config_parameter'].sudo().get_param(
'blog.rss.cache.time')
blog_cache = datetime.datetime.now() - datetime.timedelta(
minutes=int(cache_time))
worging_date = blog_cache.strftime(DEFAULT_SERVER_DATETIME_FORMAT)
blog_rss = self.env['ir.attachment'].search([
('name', 'ilike', '/blog_rss%.xml'),
('type', '=', 'binary'),
('create_date', '>=', worging_date)], limit=1)
return blog_rss and base64.b64decode(blog_rss.datas)
@api.model
def _get_blog_rss_content(self, blog=None):
"""Return Blog rss content build it from all blog post"""
view = self.env['ir.ui.view']
blog_obj = self.env['blog.blog']
blog_post = self.env['blog.post']
blog_ids = blog.ids if blog else blog_obj.search([], limit=1).ids
values = {}
post_domain = [('website_published', '=', True)]
if blog_ids:
post_domain += [("blog_id", "in", blog_ids)]
values['blog'] = blog_obj.browse(blog_ids)
values['posts'] = blog_post.search(post_domain)
values['company'] = self.env.user.company_id
values['website_url'] = self.env[
'ir.config_parameter'].get_param('web.base.url')
values['url_root'] = '%s/' % values['website_url']
return view.render_template('website_blog_rss.blog_rss_xml', values)
@api.model
def _update_blog_rss(self):
content = self._get_previous_blog_rss()
att = self.env['ir.attachment'].search(
[('name', '=like', '/blog_rss%.xml'), ('type', '=', 'binary')])
att.unlink()
content = self._get_blog_rss_content()
att.create(dict(
datas=base64.b64encode(content),
mimetype='application/xml;charset=utf-8', type='binary',
name='/blog_rss.xml', url='/blog_rss.xml',
public=True))
return True
|
16,727 | e204af9bfc1687348d1dd2b1304b9e397c91a39e | # -*- coding: utf-8 -*-
# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: https://docs.scrapy.org/en/latest/topics/item-pipeline.html
import os
class DaomuPipeline(object):
def process_item(self, item, spider):
directory = 'D:/novel/{}/'.format(item['title'])
if not os.path.exists(directory):
os.makedirs(directory)
filename = directory + item['name'] + '.txt'
with open(filename,'w',encoding='utf-8') as f:
f.write(item['content'])
return item
|
16,728 | 0fca2d33ff189d64fc4f5c3e8c9dff7ec073394c | from django.urls import path, include
from .views import index, get_user, get_user_info , get_images
urlpatterns = [
path('', index),
path('user/', get_user),
path('search/', get_user_info),
path('images/', get_images),
]
|
16,729 | bda2c9f19b585568b6b9e8d595305d81405aabb3 | # -*- coding: utf-8 -*-
# Copyright (c) 2008-2013 Infrae. All rights reserved.
# See also LICENSE.txt
# Zope 3
from zope.location.interfaces import ISite
from zope.traversing.interfaces import IContainmentRoot
from zope.interface import implementedBy
from zope import component
from five.localsitemanager.utils import get_parent
def findSite(container):
"""Return the nearest site.
"""
if ISite.providedBy(container):
return container
return findNextSite(container)
def findNextSite(container):
"""Return the next site.
"""
while container:
if IContainmentRoot.providedBy(container):
return None
try:
container = get_parent(container)
if container is None:
return None
except TypeError:
return None
if ISite.providedBy(container):
return container
def queryAdapterOnClass(klass, interface=None, name=u''):
"""Query an adapter on a klass instead of an instead of it.
"""
sm = component.getGlobalSiteManager()
required = implementedBy(klass)
factory = sm.adapters.lookup((required,), interface, name)
if factory is not None:
result = factory(klass)
if result is not None:
return result
return None
|
16,730 | 66d18583fbaf1de86a79b24fc6949f68ee6be838 | # coding:utf-8
import tkinter as tk
from tkinter import ttk
from tkinter import simpledialog
from MongoOperator import MongoOperator
db = MongoOperator('127.0.0.1', 27017, 'web_news', 'test_collection')
# ##
def search_action():
table_name = collection_name_text.get()
if table_name == '':
display_text.insert(tk.INSERT, u"请输入合法的表名!!!")
else:
keyWord = key_word_text.get()
print(type(keyWord))
if keyWord == '':
display_text.insert(tk.INSERT, u"请输入至少一个关键字!!!")
return
expression = {"content": {"$regex": keyWord}}
# expression = {"date": "1970-01-01"}
resultItem = db.find(expression=expression, collection_name=table_name)
result_count = resultItem.count()
if result_count == 0:
display_text.insert(tk.INSERT, u"对不起,没有查询到结果,请重试!!!")
else:
content = u' 共找到%d篇相关文章 \n' % result_count
idx = 1
for item in resultItem:
content += str(idx) + u"、 标题:" + item['title'] + u" 内容:" + item['content'][:50] + '\n'
# content += str(idx) + u"、 标题:" + item['title'] + '\n'
idx += 1
clear_display()
display_text.insert(tk.INSERT, content)
# print(resultItem.count())
def clear_display():
display_text.delete(index1="1.0", index2='10000.end')
def cancel_action():
collection_name_text.delete(first=0, last=len(collection_name_text.get()))
key_word_text.delete(first=0, last=len(key_word_text.get()))
clear_display()
# display_text.delete(index1=0, index2=len(display_text.get()))
root = tk.Tk()
root.title("CloudMinds")
root.geometry("550x550")
#root.resizable(False, False)
collection_name = tk.Label(root, text=u"数据库名: ", font=(10))
collection_name.grid(row=0, sticky=tk.NSEW, padx=50, pady=5, ipadx=10, ipady=0)
#collection_name.pack(side=tk.LEFT, fill=tk.NONE,expand=tk.YES,anchor=tk.CENTER)
# collection_name_text = tk.Entry(root, font=(10))
collection_name_text = ttk.Combobox(root, font=(10))
collection_name_text["values"] =("zsyh_spider", "tzj_spider", "rmw_spider", "jqrzj_spider", "hsjqr_spider", "zhjqr_spider")
collection_name_text.grid(row=0, column=1, sticky=tk.NSEW, padx=0, pady=40, ipadx=0)
#collection_name_text.pack(side=tk.LEFT, fill=tk.NONE,expand=tk.YES, ipadx=100, ipady=5,anchor=tk.CENTER)
key_word = tk.Label(root, text=u"关键字: ", font=(10))
# key_word.pack(side=tk.LEFT, fill=tk.NONE, expand=tk.YES, anchor=tk.CENTER)
key_word.grid(row=1, sticky=tk.NSEW, padx=50, pady=5, ipadx=5, ipady=5)
# key_word_text = tk.Entry(root, font=(10))
key_word_text = tk.Entry(root, font=(10))
key_word_text.grid(row=1, column=1, sticky=tk.NSEW, padx=0, pady=40, ipadx=0)
# key_word_text.pack(side=tk.LEFT, fill=tk.NONE, expand=tk.YES, ipadx=100, ipady=5, anchor=tk.CENTER)
# collection_name_text =
# text = tkinter.Entry(root)
# btn1 = tkinter.Button(root, text='Input String', command=inputStr)
# btn2 = tkinter.Button(root, text='Input Integer', command=inputInt)
# btn3 = tkinter.Button(root, text='Input Float', command=inputFloat)
# text.pack(side='left')
# btn1.pack(side='left')
# btn2.pack(side='left')
# btn3.pack(side='left')
display_text = tk.Text(root, font=(10), width=40, height=10)
display_text.grid(row=3, columnspan=2, padx=30, pady=5,sticky=tk.NSEW)
search_button = tk.Button(root, text=u'搜索', font=(10), command=search_action)
search_button.grid(row=5, sticky=tk.EW, padx=0, pady=5, ipadx=0)
cancel_button = tk.Button(root, text=u'取消', font=(10), command=cancel_action)
cancel_button.grid(row=5, column=1, sticky=tk.EW, padx=0, pady=5, ipadx=0)
root.mainloop()
|
16,731 | 442dba8d642c4e10d5eda84539b0008eb8c96436 | """Python Cookbook 2nd ed.
Chapter B, Bonus, recipe 5.
Raw data source: ftp://ftp.cmdl.noaa.gov/ccg/co2/trends/co2_mm_mlo.txt
Note: Output from this is used in Chapter 4 examples.
"""
from pathlib import Path
import csv
import json
from typing import Iterable, Iterator, Dict, TextIO
def non_comment_iter(source: TextIO) -> Iterator[str]:
for line in source:
if line[0] == "#":
continue
yield line
def raw_data_iter(source: Iterable[str]) -> Iterator[Dict[str, str]]:
header = [
"year",
"month",
"decimal_date",
"average",
"interpolated",
"trend",
"days",
]
rdr = csv.DictReader(source, header, delimiter=" ", skipinitialspace=True)
return rdr
# from types import SimpleNamespace as Sample
from typing import NamedTuple
class Sample(NamedTuple):
year: int
month: int
decimal_date: float
average: float
interpolated: float
trend: float
days: int
def cleanse(row: Dict[str, str]):
return Sample(
year=int(row["year"]),
month=int(row["month"]),
decimal_date=float(row["decimal_date"]),
average=float(row["average"]),
interpolated=float(row["interpolated"]),
trend=float(row["trend"]),
days=int(row["days"]),
)
def get_data(source_file: TextIO) -> Iterator[Sample]:
non_comment_data = non_comment_iter(source_file)
raw_data = raw_data_iter(non_comment_data)
# print(list(raw_data)[:10])
cleansed_data = (cleanse(row) for row in raw_data)
# print(list(cleansed_data)[:10])
return cleansed_data
from Chapter_B.chB_r03 import correlation
from Chapter_B.chB_r04 import regression
from statistics import mean, median
__test__ = {n: v for n, v in locals().items() if n.startswith("test_")}
if __name__ == "__main__":
source_path = Path("data") / "co2_mm_mlo.txt"
with source_path.open() as source_file:
co2_ppm = list(row.interpolated for row in get_data(source_file))
print(len(co2_ppm))
# print(co2_ppm)
for tau in range(1, 20):
# print(co2_ppm[:-tau], co2_ppm[tau:])
data = [{"x": x, "y": y} for x, y in zip(co2_ppm[:-tau], co2_ppm[tau:])]
r_tau_0 = correlation(data[:60])
r_tau_60 = correlation(data[60:120])
print(f"r_{{xx}}(τ={tau:2d}) = {r_tau_0:6.3f}")
monthly_mean = [
{"x": x, "y": mean(co2_ppm[x : x + 12])} for x in range(0, len(co2_ppm), 12)
]
# print(monthly_mean)
alpha, beta = regression(monthly_mean)
print(f"y = {alpha}+x*{beta}")
r = correlation(monthly_mean)
print(f"r^2 = {r**2}")
for d in monthly_mean:
print(f"{d} x={d['x']}, y={alpha+d['x']*beta}")
|
16,732 | eef3ab20f77983c3d8470461d754ac875d122a30 | #-*- coding:utf-8 -*-
"""
@author: M.Lee
@file: Statistics_Ques_Detail.py
@time: 2018/2/20-2018/4/21(final version)
"""
import nltk
from nltk.corpus import webtext as web
from nltk import *
from nltk.tokenize import RegexpTokenizer
from nltk.corpus import stopwords
from nltk.tag import UnigramTagger
from nltk.metrics import *
import string
'''import replacer
from replacer import RegexpReplacer
from replacer import RepeatReplacer'''
import linecache
import matplotlib.pyplot as plt
'''
Train Tagger
'''
from nltk.tag import DefaultTagger
from nltk.tag import UnigramTagger
from nltk.tag import BigramTagger
from nltk.corpus import treebank
train=treebank.tagged_sents()[:10000]
t0=DefaultTagger('NN')
t1=UnigramTagger(train,backoff=t0)
t2=BigramTagger(train,backoff=t1)
'''
Initialize
'''
my_corp=web.sents(fileids='overheard.txt')
sent_count=0
ques_count=0
All_count=0
NN_count=0
NNS_count=0
NNP_count=0
VB_count=0
VBN_count=0
VBG_count=0
VBD_count=0
VBZ_count=0
#rep=RegexpReplacer()
i=1
plt.axis([0, 15200, 0, 0.05])
plt.ion()
'''
Main
'''
for eve_sent in my_corp:
if my_corp[sent_count][-1]=='?':
ques_count=ques_count+1
#print(my_corp[sent_count])
#init in cur
wordcount=0
flag1=0
#end init
#pre Operate
for word in my_corp[sent_count]:
if my_corp[sent_count][wordcount]==':':
flag1=1
break
wordcount=wordcount+1
if flag1==1:
#print(my_corp[sent_count][wordcount+1:-1])
curr_sent=my_corp[sent_count][wordcount+1:-1]
else:
#print(my_corp[sent_count])
curr_sent=my_corp[sent_count]
#curr_sent=rep.replace(curr_sent)
#end pre Operate
tag_curr_sent=t2.tag(curr_sent)
#print(tag_curr_sent)
for words_tup in tag_curr_sent:
if words_tup[1]!=(',' or '?' or '!' or '.'):
All_count=All_count+1
#if words_tup[1]=='NN':
# NN_count=NN_count+1
if words_tup[1]=='NNP':
NNP_count=NNP_count+1
if words_tup[1]=='NNS':
NNS_count=NNS_count+1
#if words_tup[1]=='VB':
# VB_count=VB_count+1
if words_tup[1]=='VBN':
VBN_count=VBN_count+1
if words_tup[1]=='VBG':
VBG_count=VBG_count+1
if words_tup[1]=='VBD':
VBD_count=VBD_count+1
if words_tup[1]=='VBZ':
VBZ_count=VBZ_count+1
if sent_count==50*i:
i=i+1
print("--------------------------------\nIt is the ",50*i," 's line\n-----------------------------")
#print("NN:",NN_count/All_count)
print("NNP:",NNP_count/All_count)
print("NNS:",NNS_count/All_count)
#print("VB:",VB_count/All_count)
print("VBN:",VBN_count/All_count)
print("VBG:",VBG_count/All_count)
print("VBD:",VBD_count/All_count)
print("VBZ:",VBZ_count/All_count)
#type1=plt.scatter(sent_count,NN_count/All_count,s=1,edgecolors='none',c='red')
type2=plt.scatter(sent_count,NNP_count/All_count,s=1,edgecolors='none',c='blue')
type3=plt.scatter(sent_count,NNS_count/All_count,s=1,edgecolors='none',c='yellow')
#type4=plt.scatter(sent_count,VB_count/All_count,s=1,edgecolors='none',c='black')
type6=plt.scatter(sent_count,VBN_count/All_count,s=1,edgecolors='none',c='purple')
type7=plt.scatter(sent_count,VBG_count/All_count,s=1,edgecolors='none',c='green')
type8=plt.scatter(sent_count,VBD_count/All_count,s=1,edgecolors='none',c='red')
type9=plt.scatter(sent_count,VBZ_count/All_count,s=1,edgecolors='none',c='#00008B')
plt.pause(0.001)
plt.legend((type2, type3,type6,type7,type8,type9), ( u'NNP', u'NNS',u'VBN',u'VBG',u'VGD',u'VGZ'),loc='upper right')
sent_count=sent_count+1
if sent_count>15000:
break
'''
Python 3.5.1 (v3.5.1:37a07cee5969, Dec 6 2015, 01:38:48) [MSC v.1900 32 bit (Intel)] on win32
Type "copyright", "credits" or "license()" for more information.
>>>
RESTART: D:\CollegeActivities\MainSubjects\自然科学类\2017大创1--NLP\Final-Result\1.Context\Statistics_Ques_Detail.py
--------------------------------
It is the 100 's line
-----------------------------
NNP: 0.0
NNS: 0.010869565217391304
VBN: 0.0
VBG: 0.010869565217391304
VBD: 0.010869565217391304
VBZ: 0.03260869565217391
Warning (from warnings module):
File "D:\python\lib\site-packages\matplotlib\backend_bases.py", line 2453
warnings.warn(str, mplDeprecation)
MatplotlibDeprecationWarning: Using default event loop until function specific to this GUI is implemented
--------------------------------
It is the 150 's line
-----------------------------
NNP: 0.0
NNS: 0.01680672268907563
VBN: 0.0
VBG: 0.012605042016806723
VBD: 0.008403361344537815
VBZ: 0.025210084033613446
--------------------------------
It is the 200 's line
-----------------------------
NNP: 0.0
NNS: 0.01818181818181818
VBN: 0.0
VBG: 0.01090909090909091
VBD: 0.01818181818181818
VBZ: 0.025454545454545455
--------------------------------
It is the 250 's line
-----------------------------
NNP: 0.0
NNS: 0.014925373134328358
VBN: 0.0
VBG: 0.011940298507462687
VBD: 0.01791044776119403
VBZ: 0.023880597014925373
--------------------------------
It is the 300 's line
-----------------------------
NNP: 0.002688172043010753
NNS: 0.013440860215053764
VBN: 0.0
VBG: 0.010752688172043012
VBD: 0.021505376344086023
VBZ: 0.024193548387096774
--------------------------------
It is the 350 's line
-----------------------------
NNP: 0.0071090047393364926
NNS: 0.016587677725118485
VBN: 0.002369668246445498
VBG: 0.011848341232227487
VBD: 0.02132701421800948
VBZ: 0.023696682464454975
--------------------------------
It is the 400 's line
-----------------------------
NNP: 0.006507592190889371
NNS: 0.019522776572668113
VBN: 0.0021691973969631237
VBG: 0.010845986984815618
VBD: 0.019522776572668113
VBZ: 0.021691973969631236
--------------------------------
It is the 450 's line
-----------------------------
NNP: 0.0057692307692307696
NNS: 0.01730769230769231
VBN: 0.0019230769230769232
VBG: 0.009615384615384616
VBD: 0.019230769230769232
VBZ: 0.025
--------------------------------
It is the 500 's line
-----------------------------
NNP: 0.01340033500837521
NNS: 0.01507537688442211
VBN: 0.0016750418760469012
VBG: 0.010050251256281407
VBD: 0.020100502512562814
VBZ: 0.023450586264656615
--------------------------------
It is the 550 's line
-----------------------------
NNP: 0.01832460732984293
NNS: 0.013089005235602094
VBN: 0.0013089005235602095
VBG: 0.009162303664921465
VBD: 0.01963350785340314
VBZ: 0.02617801047120419
--------------------------------
It is the 600 's line
-----------------------------
NNP: 0.01726263871763255
NNS: 0.016029593094944512
VBN: 0.0012330456226880395
VBG: 0.009864364981504316
VBD: 0.018495684340320593
VBZ: 0.025893958076448828
--------------------------------
It is the 650 's line
-----------------------------
NNP: 0.0170261066969353
NNS: 0.015891032917139614
VBN: 0.0011350737797956867
VBG: 0.01021566401816118
VBD: 0.019296254256526674
VBZ: 0.02383654937570942
--------------------------------
It is the 700 's line
-----------------------------
NNP: 0.017653167185877467
NNS: 0.01557632398753894
VBN: 0.004153686396677051
VBG: 0.01142263759086189
VBD: 0.017653167185877467
VBZ: 0.023883696780893044
--------------------------------
It is the 750 's line
-----------------------------
NNP: 0.01623686723973257
NNS: 0.015281757402101241
VBN: 0.0038204393505253103
VBG: 0.011461318051575931
VBD: 0.019102196752626553
VBZ: 0.02387774594078319
--------------------------------
It is the 800 's line
-----------------------------
NNP: 0.016183986371379896
NNS: 0.017035775127768313
VBN: 0.0034071550255536627
VBG: 0.01192504258943782
VBD: 0.018739352640545145
VBZ: 0.02129471890971039
--------------------------------
It is the 850 's line
-----------------------------
NNP: 0.015899581589958158
NNS: 0.016736401673640166
VBN: 0.0033472803347280333
VBG: 0.011715481171548118
VBD: 0.018410041841004185
VBZ: 0.02092050209205021
--------------------------------
It is the 900 's line
-----------------------------
NNP: 0.016483516483516484
NNS: 0.02040816326530612
VBN: 0.0031397174254317113
VBG: 0.011773940345368918
VBD: 0.01805337519623234
VBZ: 0.02119309262166405
--------------------------------
It is the 950 's line
-----------------------------
NNP: 0.018018018018018018
NNS: 0.02252252252252252
VBN: 0.003003003003003003
VBG: 0.01126126126126126
VBD: 0.018018018018018018
VBZ: 0.021021021021021023
--------------------------------
It is the 1000 's line
-----------------------------
NNP: 0.01858470335954253
NNS: 0.021443888491779844
VBN: 0.0035739814152966403
VBG: 0.01143674052894925
VBD: 0.01929949964260186
VBZ: 0.020014295925661188
--------------------------------
It is the 1050 's line
-----------------------------
NNP: 0.01917989417989418
NNS: 0.021825396825396824
VBN: 0.003968253968253968
VBG: 0.011243386243386243
VBD: 0.017857142857142856
VBZ: 0.021164021164021163
--------------------------------
It is the 1100 's line
-----------------------------
NNP: 0.018315018315018316
NNS: 0.020757020757020756
VBN: 0.003663003663003663
VBG: 0.01098901098901099
VBD: 0.017704517704517704
VBZ: 0.019536019536019536
--------------------------------
It is the 1150 's line
-----------------------------
NNP: 0.018028846153846152
NNS: 0.020432692307692308
VBN: 0.003605769230769231
VBG: 0.01141826923076923
VBD: 0.018028846153846152
VBZ: 0.019230769230769232
--------------------------------
It is the 1200 's line
-----------------------------
NNP: 0.019506597819850834
NNS: 0.019506597819850834
VBN: 0.0034423407917383822
VBG: 0.011474469305794608
VBD: 0.01778542742398164
VBZ: 0.019506597819850834
--------------------------------
It is the 1250 's line
-----------------------------
NNP: 0.02021276595744681
NNS: 0.02021276595744681
VBN: 0.00425531914893617
VBG: 0.011702127659574468
VBD: 0.01648936170212766
VBZ: 0.018617021276595744
--------------------------------
It is the 1300 's line
-----------------------------
NNP: 0.019397651863195507
NNS: 0.019397651863195507
VBN: 0.00408371618172537
VBG: 0.012761613067891782
VBD: 0.016845329249617153
VBZ: 0.017866258295048495
--------------------------------
It is the 1350 's line
-----------------------------
NNP: 0.02014098690835851
NNS: 0.019133937562940583
VBN: 0.004028197381671702
VBG: 0.012588116817724069
VBD: 0.017119838872104734
VBZ: 0.017623363544813697
--------------------------------
It is the 1400 's line
-----------------------------
NNP: 0.020124580737901295
NNS: 0.01820795400095831
VBN: 0.003833253473885961
VBG: 0.012937230474365118
VBD: 0.018687110685194058
VBZ: 0.01772879731672257
--------------------------------
It is the 1450 's line
-----------------------------
NNP: 0.02010968921389397
NNS: 0.018738574040219377
VBN: 0.003656307129798903
VBG: 0.013254113345521023
VBD: 0.018281535648994516
VBZ: 0.01736745886654479
--------------------------------
It is the 1500 's line
-----------------------------
NNP: 0.02030008826125331
NNS: 0.018093556928508385
VBN: 0.00353045013239188
VBG: 0.013680494263018535
VBD: 0.019417475728155338
VBZ: 0.0176522506619594
--------------------------------
It is the 1550 's line
-----------------------------
NNP: 0.02079722703639515
NNS: 0.018197573656845753
VBN: 0.0034662045060658577
VBG: 0.01386481802426343
VBD: 0.01949740034662045
VBZ: 0.01776429809358752
--------------------------------
It is the 1600 's line
-----------------------------
NNP: 0.020219039595619208
NNS: 0.017691659646166806
VBN: 0.003369839932603201
VBG: 0.013900589721988205
VBD: 0.019376579612468407
VBZ: 0.017691659646166806
--------------------------------
It is the 1650 's line
-----------------------------
NNP: 0.019834710743801654
NNS: 0.017768595041322315
VBN: 0.00371900826446281
VBG: 0.013636363636363636
VBD: 0.019421487603305785
VBZ: 0.01818181818181818
--------------------------------
It is the 1700 's line
-----------------------------
NNP: 0.020247469066366704
NNS: 0.01799775028121485
VBN: 0.0037495313085864268
VBG: 0.013498312710911136
VBD: 0.018747656542932135
VBZ: 0.018747656542932135
--------------------------------
It is the 1750 's line
-----------------------------
NNP: 0.020124405415294547
NNS: 0.01756311745334797
VBN: 0.003658982802780827
VBG: 0.01353823637028906
VBD: 0.018294914013904134
VBZ: 0.0190267105744603
--------------------------------
It is the 1800 's line
-----------------------------
NNP: 0.019400352733686066
NNS: 0.01728395061728395
VBN: 0.004232804232804233
VBG: 0.013403880070546737
VBD: 0.019400352733686066
VBZ: 0.01869488536155203
--------------------------------
It is the 1850 's line
-----------------------------
NNP: 0.019337016574585635
NNS: 0.01761049723756906
VBN: 0.004488950276243094
VBG: 0.013121546961325966
VBD: 0.019337016574585635
VBZ: 0.018646408839779006
--------------------------------
It is the 1900 's line
-----------------------------
NNP: 0.019471947194719473
NNS: 0.016831683168316833
VBN: 0.0046204620462046205
VBG: 0.01287128712871287
VBD: 0.01914191419141914
VBZ: 0.018151815181518153
--------------------------------
It is the 1950 's line
-----------------------------
NNP: 0.019426751592356687
NNS: 0.01751592356687898
VBN: 0.004777070063694267
VBG: 0.012738853503184714
VBD: 0.019745222929936305
VBZ: 0.01751592356687898
--------------------------------
It is the 2000 's line
-----------------------------
NNP: 0.019943907759426612
NNS: 0.017450919289498285
VBN: 0.004674353381115612
VBG: 0.012464942349641633
VBD: 0.019632284200685572
VBZ: 0.017762542848239325
--------------------------------
It is the 2050 's line
-----------------------------
NNP: 0.019714892326357293
NNS: 0.017591750075826508
VBN: 0.004549590536851683
VBG: 0.0124355474673946
VBD: 0.01910828025477707
VBZ: 0.01789505611161662
--------------------------------
It is the 2100 's line
-----------------------------
NNP: 0.019287833827893175
NNS: 0.017210682492581602
VBN: 0.004451038575667656
VBG: 0.012166172106824925
VBD: 0.018991097922848664
VBZ: 0.018397626112759646
--------------------------------
It is the 2150 's line
-----------------------------
NNP: 0.019169329073482427
NNS: 0.017426662794074933
VBN: 0.004647110078419983
VBG: 0.012198663955852455
VBD: 0.019169329073482427
VBZ: 0.018007551553877434
--------------------------------
It is the 2200 's line
-----------------------------
NNP: 0.019400855920114122
NNS: 0.017403708987161197
VBN: 0.00456490727532097
VBG: 0.011982881597717546
VBD: 0.018830242510699
VBZ: 0.017974322396576318
--------------------------------
It is the 2250 's line
-----------------------------
NNP: 0.01893622946254525
NNS: 0.017543859649122806
VBN: 0.004734057365636313
VBG: 0.011695906432748537
VBD: 0.018657755499860762
VBZ: 0.017543859649122806
--------------------------------
It is the 2300 's line
-----------------------------
NNP: 0.018970189701897018
NNS: 0.01707317073170732
VBN: 0.004878048780487805
VBG: 0.011924119241192412
VBD: 0.018970189701897018
VBZ: 0.017615176151761516
--------------------------------
It is the 2350 's line
-----------------------------
NNP: 0.018494055482166448
NNS: 0.017173051519154558
VBN: 0.0047556142668428005
VBG: 0.01215323645970938
VBD: 0.018758256274768823
VBZ: 0.017437252311756937
--------------------------------
It is the 2400 's line
-----------------------------
NNP: 0.018336776859504134
NNS: 0.017045454545454544
VBN: 0.0046487603305785125
VBG: 0.012396694214876033
VBD: 0.01962809917355372
VBZ: 0.017045454545454544
--------------------------------
It is the 2450 's line
-----------------------------
NNP: 0.018163471241170535
NNS: 0.017658930373360242
VBN: 0.00479313824419778
VBG: 0.012865792129162463
VBD: 0.019424823410696266
VBZ: 0.016902119071644805
--------------------------------
It is the 2500 's line
-----------------------------
NNP: 0.017813567593948268
NNS: 0.017325524646168863
VBN: 0.005124450951683748
VBG: 0.012933138116154222
VBD: 0.019277696437286482
VBZ: 0.01683748169838946
--------------------------------
It is the 2550 's line
-----------------------------
NNP: 0.01788235294117647
NNS: 0.016941176470588234
VBN: 0.004941176470588235
VBG: 0.012470588235294117
VBD: 0.019058823529411763
VBZ: 0.01647058823529412
--------------------------------
It is the 2600 's line
-----------------------------
NNP: 0.01781582600647848
NNS: 0.016658954187875982
VBN: 0.005321610365571495
VBG: 0.012956964368347987
VBD: 0.018741323461360482
VBZ: 0.016427579824155485
--------------------------------
It is the 2650 's line
-----------------------------
NNP: 0.017603249830737983
NNS: 0.016700519070187318
VBN: 0.005416384563303994
VBG: 0.013089596027984653
VBD: 0.019183028661701646
VBZ: 0.016249153689911984
--------------------------------
It is the 2700 's line
-----------------------------
NNP: 0.017485613103142984
NNS: 0.017264276228419653
VBN: 0.005312084993359893
VBG: 0.013058875608676405
VBD: 0.019034971226206288
VBZ: 0.01615759185480301
--------------------------------
It is the 2750 's line
-----------------------------
NNP: 0.01707738867271941
NNS: 0.016861219195849545
VBN: 0.005188067444876783
VBG: 0.01297016861219196
VBD: 0.019022913964548204
VBZ: 0.015780371811500216
--------------------------------
It is the 2800 's line
-----------------------------
NNP: 0.017079419299743808
NNS: 0.016652433817250213
VBN: 0.005123825789923143
VBG: 0.012809564474807857
VBD: 0.019000853970964987
VBZ: 0.015798462852263023
--------------------------------
It is the 2850 's line
-----------------------------
NNP: 0.01689545934530095
NNS: 0.01689545934530095
VBN: 0.005279831045406547
VBG: 0.012671594508975714
VBD: 0.01900739176346357
VBZ: 0.01562829989440338
--------------------------------
It is the 2900 's line
-----------------------------
NNP: 0.016587186398507155
NNS: 0.016794526228488493
VBN: 0.005390835579514825
VBG: 0.012647729628861704
VBD: 0.018867924528301886
VBZ: 0.015757827078581796
--------------------------------
It is the 2950 's line
-----------------------------
NNP: 0.016724454415663878
NNS: 0.017132367937997144
VBN: 0.005506832551499082
VBG: 0.012441362431164593
VBD: 0.018967978788496837
VBZ: 0.01611258413216398
--------------------------------
It is the 3000 's line
-----------------------------
NNP: 0.01642957323181727
NNS: 0.017231015828491285
VBN: 0.0054097375275495895
VBG: 0.012422360248447204
VBD: 0.019234622320176316
VBZ: 0.01582849128431176
--------------------------------
It is the 3050 's line
-----------------------------
NNP: 0.01619913077834848
NNS: 0.017186882655077045
VBN: 0.005333860134334255
VBG: 0.012248123271434215
VBD: 0.018964836033188465
VBZ: 0.015804030027657054
--------------------------------
It is the 3100 's line
-----------------------------
NNP: 0.01673477330219887
NNS: 0.016929363689433742
VBN: 0.005643121229811247
VBG: 0.012064604008561977
VBD: 0.018875267561782448
VBZ: 0.01576182136602452
--------------------------------
It is the 3150 's line
-----------------------------
NNP: 0.01719526175009553
NNS: 0.01700420328620558
VBN: 0.00573175391669851
VBG: 0.011845624761176921
VBD: 0.019105846388995033
VBZ: 0.015475735575085976
--------------------------------
It is the 3200 's line
-----------------------------
NNP: 0.0177836016473231
NNS: 0.01684762261325346
VBN: 0.005615874204417821
VBG: 0.011980531636091352
VBD: 0.018719580681392737
VBZ: 0.01647323099962561
--------------------------------
It is the 3250 's line
-----------------------------
NNP: 0.017781070568623818
NNS: 0.016669753658084832
VBN: 0.0055565845526949435
VBG: 0.012224486015928876
VBD: 0.018521948508983144
VBZ: 0.016669753658084832
--------------------------------
It is the 3300 's line
-----------------------------
NNP: 0.01772013153087322
NNS: 0.01680672268907563
VBN: 0.005480453050785532
VBG: 0.01205699671172817
VBD: 0.018268176835951774
VBZ: 0.016624040920716114
--------------------------------
It is the 3350 's line
-----------------------------
NNP: 0.017531176576902223
NNS: 0.016627507681185615
VBN: 0.005602747153442979
VBG: 0.012109163202602566
VBD: 0.01843484547261883
VBZ: 0.016988975239472258
--------------------------------
It is the 3400 's line
-----------------------------
NNP: 0.01737106017191977
NNS: 0.0164756446991404
VBN: 0.0057306590257879654
VBG: 0.012177650429799427
VBD: 0.018445558739255016
VBZ: 0.01683381088825215
--------------------------------
It is the 3450 's line
-----------------------------
NNP: 0.017458100558659217
NNS: 0.016410614525139665
VBN: 0.005761173184357542
VBG: 0.012395251396648045
VBD: 0.017981843575418995
VBZ: 0.01675977653631285
--------------------------------
It is the 3500 's line
-----------------------------
NNP: 0.017534722222222222
NNS: 0.016666666666666666
VBN: 0.005729166666666666
VBG: 0.012326388888888888
VBD: 0.018055555555555554
VBZ: 0.016666666666666666
--------------------------------
It is the 3550 's line
-----------------------------
NNP: 0.01749523644552226
NNS: 0.016629135631387494
VBN: 0.005716265373289451
VBG: 0.012298631560713667
VBD: 0.018014896934003117
VBZ: 0.016629135631387494
--------------------------------
It is the 3600 's line
-----------------------------
NNP: 0.017359917497421794
NNS: 0.016672396012375388
VBN: 0.005672052251632863
VBG: 0.012375386730835339
VBD: 0.0178755586112066
VBZ: 0.016500515641113784
--------------------------------
It is the 3650 's line
-----------------------------
NNP: 0.017466508394098693
NNS: 0.016449041885704594
VBN: 0.005765643547566559
VBG: 0.012209598100729184
VBD: 0.017975241648295743
VBZ: 0.016788197388502627
--------------------------------
It is the 3700 's line
-----------------------------
NNP: 0.017546791443850268
NNS: 0.01620989304812834
VBN: 0.005681818181818182
VBG: 0.012366310160427808
VBD: 0.01821524064171123
VBZ: 0.016544117647058824
--------------------------------
It is the 3750 's line
-----------------------------
NNP: 0.017509084902543774
NNS: 0.016022464486290057
VBN: 0.0056161215725140405
VBG: 0.012223323422530559
VBD: 0.018334985133795837
VBZ: 0.016518004625041296
--------------------------------
It is the 3800 's line
-----------------------------
NNP: 0.01762114537444934
NNS: 0.015989557839778103
VBN: 0.005710556371349323
VBG: 0.01207374775656714
VBD: 0.018436939141784958
VBZ: 0.016642192853646598
--------------------------------
It is the 3850 's line
-----------------------------
NNP: 0.01726894787336105
NNS: 0.01566997121842021
VBN: 0.0059162136232811
VBG: 0.0118324272465622
VBD: 0.019347617524784137
VBZ: 0.01646945954589063
--------------------------------
It is the 3900 's line
-----------------------------
NNP: 0.016962462698288047
NNS: 0.015548924140097377
VBN: 0.005811214072561646
VBG: 0.011936547824721219
VBD: 0.019161300455473537
VBZ: 0.01617716349929323
--------------------------------
It is the 3950 's line
-----------------------------
NNP: 0.016835541699142635
NNS: 0.015432579890880748
VBN: 0.005923616523772409
VBG: 0.01215900233826968
VBD: 0.019017926734216678
VBZ: 0.01652377240841777
--------------------------------
It is the 4000 's line
-----------------------------
NNP: 0.016827290806180204
NNS: 0.01529753709652746
VBN: 0.005813064096680434
VBG: 0.012085054306256693
VBD: 0.01896894599969405
VBZ: 0.016521340064249657
--------------------------------
It is the 4050 's line
-----------------------------
NNP: 0.016722408026755852
NNS: 0.015202189115232594
VBN: 0.005776831863788386
VBG: 0.012009729401033748
VBD: 0.018850714502888416
VBZ: 0.0164183642444512
--------------------------------
It is the 4100 's line
-----------------------------
NNP: 0.016568047337278107
NNS: 0.014940828402366864
VBN: 0.0057692307692307696
VBG: 0.011686390532544378
VBD: 0.01982248520710059
VBZ: 0.016124260355029587
--------------------------------
It is the 4150 's line
-----------------------------
NNP: 0.016827626573017267
NNS: 0.015071700321919812
VBN: 0.005853087503658179
VBG: 0.011559847819724904
VBD: 0.0196078431372549
VBZ: 0.016095990635059995
--------------------------------
It is the 4200 's line
-----------------------------
NNP: 0.016775126536514823
NNS: 0.014895155459146782
VBN: 0.005784526391901663
VBG: 0.011569052783803326
VBD: 0.019667389732465655
VBZ: 0.016052060737527116
--------------------------------
It is the 4250 's line
-----------------------------
NNP: 0.016652311225954636
NNS: 0.014786103933390755
VBN: 0.005742176284811944
VBG: 0.011484352569623888
VBD: 0.019666953775480906
VBZ: 0.015934539190353144
--------------------------------
It is the 4300 's line
-----------------------------
NNP: 0.016418966737438076
NNS: 0.014578910120311395
VBN: 0.005803255484784147
VBG: 0.011606510969568294
VBD: 0.019815994338287332
VBZ: 0.015711252653927813
--------------------------------
It is the 4350 's line
-----------------------------
NNP: 0.016491963661774984
NNS: 0.01453529000698812
VBN: 0.00573025856044724
VBG: 0.011879804332634521
VBD: 0.019846261355695317
VBZ: 0.01551362683438155
--------------------------------
It is the 4400 's line
-----------------------------
NNP: 0.016517549896765314
NNS: 0.014315209910529939
VBN: 0.005643496214728149
VBG: 0.01169993117687543
VBD: 0.019545767377838953
VBZ: 0.015554026152787337
--------------------------------
It is the 4450 's line
-----------------------------
NNP: 0.016339869281045753
NNS: 0.014297385620915032
VBN: 0.005718954248366013
VBG: 0.011710239651416121
VBD: 0.0196078431372549
VBZ: 0.015386710239651416
--------------------------------
It is the 4500 's line
-----------------------------
NNP: 0.016375838926174495
NNS: 0.014228187919463087
VBN: 0.0057718120805369125
VBG: 0.011543624161073825
VBD: 0.019328859060402683
VBZ: 0.015570469798657718
--------------------------------
It is the 4550 's line
-----------------------------
NNP: 0.016779864163004393
NNS: 0.014249567186043414
VBN: 0.0057264615794380075
VBG: 0.011586096683979225
VBD: 0.019176987614862164
VBZ: 0.01558130243707551
--------------------------------
It is the 4600 's line
-----------------------------
NNP: 0.01671530910055718
NNS: 0.014194746617139825
VBN: 0.005704430883523481
VBG: 0.011541522950384717
VBD: 0.01923587158397453
VBZ: 0.01552135845051738
--------------------------------
It is the 4650 's line
-----------------------------
NNP: 0.016329704510108865
NNS: 0.01412649040953862
VBN: 0.005702436495593572
VBG: 0.011404872991187144
VBD: 0.01918092275790565
VBZ: 0.015292897874546397
--------------------------------
It is the 4700 's line
-----------------------------
NNP: 0.01687332225488943
NNS: 0.014316758276875879
VBN: 0.005880097149431164
VBG: 0.011504537901060975
VBD: 0.019046401636200947
VBZ: 0.015467212066981977
--------------------------------
It is the 4750 's line
-----------------------------
NNP: 0.016972592406336435
NNS: 0.014206688458637163
VBN: 0.005908976615539351
VBG: 0.011440784510937893
VBD: 0.018984158913754087
VBZ: 0.015715363339200403
--------------------------------
It is the 4800 's line
-----------------------------
NNP: 0.017116441779110446
NNS: 0.01424287856071964
VBN: 0.005872063968015992
VBG: 0.011369315342328835
VBD: 0.01899050474762619
VBZ: 0.015617191404297851
--------------------------------
It is the 4850 's line
-----------------------------
NNP: 0.01707709441900755
NNS: 0.014107164954832323
VBN: 0.005816111867343151
VBG: 0.011260982551664397
VBD: 0.018809553273109764
VBZ: 0.015839623808934538
--------------------------------
It is the 4900 's line
-----------------------------
NNP: 0.016976062530532486
NNS: 0.014167073766487542
VBN: 0.005740107474352711
VBG: 0.011358085002442599
VBD: 0.018685881778212017
VBZ: 0.015999022960429897
--------------------------------
It is the 4950 's line
-----------------------------
NNP: 0.016873027433843165
NNS: 0.014202476329206118
VBN: 0.005826656955571741
VBG: 0.011289147851420248
VBD: 0.018572469045884922
VBZ: 0.015901917941247876
--------------------------------
It is the 5000 's line
-----------------------------
NNP: 0.01693490271438866
NNS: 0.014172471775162144
VBN: 0.005765073264472736
VBG: 0.011410040835935623
VBD: 0.018376171030506847
VBZ: 0.015974057170309874
--------------------------------
It is the 5050 's line
-----------------------------
NNP: 0.01683984235041204
NNS: 0.014092917711692344
VBN: 0.00573271228950197
VBG: 0.01146542457900394
VBD: 0.01827302042278753
VBZ: 0.01588439030216171
--------------------------------
It is the 5100 's line
-----------------------------
NNP: 0.016785714285714286
NNS: 0.014047619047619048
VBN: 0.005714285714285714
VBG: 0.011428571428571429
VBD: 0.018333333333333333
VBZ: 0.015833333333333335
--------------------------------
It is the 5150 's line
-----------------------------
NNP: 0.016717683070402636
NNS: 0.014127619496114905
VBN: 0.0056510477984459614
VBG: 0.011655286084294796
VBD: 0.01824817518248175
VBZ: 0.016129032258064516
--------------------------------
It is the 5200 's line
-----------------------------
NNP: 0.016713417484805985
NNS: 0.014375876577840112
VBN: 0.00584385226741468
VBG: 0.01168770453482936
VBD: 0.0182328190743338
VBZ: 0.016245909303412808
--------------------------------
It is the 5250 's line
-----------------------------
NNP: 0.016829971181556196
NNS: 0.014293948126801152
VBN: 0.005763688760806916
VBG: 0.01164265129682997
VBD: 0.018213256484149856
VBZ: 0.016138328530259365
--------------------------------
It is the 5300 's line
-----------------------------
NNP: 0.01683270353830299
NNS: 0.014199015229588916
VBN: 0.005725409366769724
VBG: 0.011679835108210237
VBD: 0.018321309973663116
VBZ: 0.01603114622695523
--------------------------------
It is the 5350 's line
-----------------------------
NNP: 0.01691451924168464
NNS: 0.014303553184243388
VBN: 0.005789533431717561
VBG: 0.011579066863435123
VBD: 0.018276762402088774
VBZ: 0.01589283687138154
--------------------------------
It is the 5400 's line
-----------------------------
NNP: 0.017067145744441948
NNS: 0.014260049404895576
VBN: 0.005838760386256457
VBG: 0.011565236918931058
VBD: 0.01841455198742421
VBZ: 0.01616887491578711
--------------------------------
It is the 5450 's line
-----------------------------
NNP: 0.017170710091946383
NNS: 0.014401240722277612
VBN: 0.005871275063697796
VBG: 0.01152099257782209
VBD: 0.018389276614600644
VBZ: 0.016173701118865624
--------------------------------
It is the 5500 's line
-----------------------------
NNP: 0.017036711365135196
NNS: 0.014288854693339195
VBN: 0.005935370411079358
VBG: 0.011431083754671357
VBD: 0.018465596834469113
VBZ: 0.016157397230160474
--------------------------------
It is the 5550 's line
-----------------------------
NNP: 0.01693443334780721
NNS: 0.014329135909683021
VBN: 0.005861919235779418
VBG: 0.011398176291793313
VBD: 0.0182370820668693
VBZ: 0.01617455492835432
--------------------------------
It is the 5600 's line
-----------------------------
NNP: 0.016887167903624826
NNS: 0.01430569000752931
VBN: 0.005808325266214908
VBG: 0.011401527374421856
VBD: 0.018393030009680542
VBZ: 0.016456921587608905
--------------------------------
It is the 5650 's line
-----------------------------
NNP: 0.016827438370846732
NNS: 0.014362272240085745
VBN: 0.005787781350482315
VBG: 0.011468381564844587
VBD: 0.018435155412647373
VBZ: 0.01639871382636656
--------------------------------
It is the 5700 's line
-----------------------------
NNP: 0.017234087544935505
NNS: 0.014167900190315077
VBN: 0.0057094523155001055
VBG: 0.011313174032565024
VBD: 0.018608585324592936
VBZ: 0.016493973355889195
--------------------------------
It is the 5750 's line
-----------------------------
NNP: 0.018071659876736656
NNS: 0.014102162331557505
VBN: 0.005849785856053484
VBG: 0.011177269403530764
VBD: 0.01890734357045858
VBZ: 0.01629583202757756
--------------------------------
It is the 5800 's line
-----------------------------
NNP: 0.017871900826446283
NNS: 0.013946280991735538
VBN: 0.005785123966942148
VBG: 0.011260330578512397
VBD: 0.01921487603305785
VBZ: 0.016322314049586777
--------------------------------
It is the 5850 's line
-----------------------------
NNP: 0.017871815940838125
NNS: 0.013968775677896467
VBN: 0.00585456039441249
VBG: 0.01119556285949055
VBD: 0.019309778142974528
VBZ: 0.016433853738701727
--------------------------------
It is the 5900 's line
-----------------------------
NNP: 0.01782258885833588
NNS: 0.013850697627049598
VBN: 0.0058050717995722576
VBG: 0.011100926774620634
VBD: 0.0194520826968123
VBZ: 0.016498625114573784
--------------------------------
It is the 5950 's line
-----------------------------
NNP: 0.01797253634894992
NNS: 0.013731825525040387
VBN: 0.0057552504038772215
VBG: 0.011106623586429725
VBD: 0.01938610662358643
VBZ: 0.016558966074313407
--------------------------------
It is the 6000 's line
-----------------------------
NNP: 0.018178166114291454
NNS: 0.01365873254996485
VBN: 0.005724615848147032
VBG: 0.011047504268353922
VBD: 0.019282916541126845
VBZ: 0.016571256402530883
--------------------------------
It is the 6050 's line
-----------------------------
NNP: 0.018201710761885818
NNS: 0.013526954445991645
VBN: 0.00566938531927591
VBG: 0.011139844837875473
VBD: 0.0197931171672966
VBZ: 0.016411378555798686
--------------------------------
It is the 6100 's line
-----------------------------
NNP: 0.018359490672194254
NNS: 0.013522850656401145
VBN: 0.005725002467673477
VBG: 0.011153884118053499
VBD: 0.019938801697759353
VBZ: 0.016385351890237883
--------------------------------
It is the 6150 's line
-----------------------------
NNP: 0.018471709119191132
NNS: 0.01370795255687342
VBN: 0.005930390822477153
VBG: 0.010985805949834726
VBD: 0.019735562901030526
VBZ: 0.016430099163912113
--------------------------------
It is the 6200 's line
-----------------------------
NNP: 0.01850067450375795
NNS: 0.013682790518404316
VBN: 0.005974176141838505
VBG: 0.010984775486606282
VBD: 0.019753324339949894
VBZ: 0.016284447870495277
--------------------------------
It is the 6250 's line
-----------------------------
NNP: 0.018427872156636913
NNS: 0.013724925616661868
VBN: 0.006046645551396487
VBG: 0.011037527593818985
VBD: 0.01996352816968999
VBZ: 0.016220366637873115
--------------------------------
It is the 6300 's line
-----------------------------
NNP: 0.018304890838020782
NNS: 0.013633330155400896
VBN: 0.00600629230622557
VBG: 0.010963866908189531
VBD: 0.01992563638097054
VBZ: 0.01630279340261226
--------------------------------
It is the 6350 's line
-----------------------------
NNP: 0.01805189921022941
NNS: 0.013538924407672057
VBN: 0.005923279428356525
VBG: 0.010812335464460324
VBD: 0.02002632568634825
VBZ: 0.016547574276043624
--------------------------------
It is the 6400 's line
-----------------------------
NNP: 0.018102080806195762
NNS: 0.013529905757208175
VBN: 0.006065130167024354
VBG: 0.010917234300643838
VBD: 0.020061584398619017
VBZ: 0.016515815993281703
--------------------------------
It is the 6450 's line
-----------------------------
NNP: 0.018114259173246633
NNS: 0.013469577333952624
VBN: 0.006038086391082211
VBG: 0.01096144914073386
VBD: 0.020436600092893636
VBZ: 0.01644217371110079
--------------------------------
It is the 6500 's line
-----------------------------
NNP: 0.01811292856482765
NNS: 0.013399870621938823
VBN: 0.006006838554662231
VBG: 0.010904722299232972
VBD: 0.020515663986692544
VBZ: 0.01635708344884946
--------------------------------
It is the 6550 's line
-----------------------------
NNP: 0.018032036613272313
NNS: 0.013272311212814645
VBN: 0.005949656750572082
VBG: 0.010892448512585813
VBD: 0.020778032036613273
VBZ: 0.01620137299771167
--------------------------------
It is the 6600 's line
-----------------------------
NNP: 0.01789444999545826
NNS: 0.013171041874829684
VBN: 0.00608592969388682
VBG: 0.010900172586065945
VBD: 0.020891997456626395
VBZ: 0.01625942410754837
--------------------------------
It is the 6650 's line
-----------------------------
NNP: 0.017877998382894618
NNS: 0.013386038990207528
VBN: 0.006109064774054443
VBG: 0.010870541730302758
VBD: 0.02093253076992184
VBZ: 0.016530410565088492
--------------------------------
It is the 6700 's line
-----------------------------
NNP: 0.017779358165170237
NNS: 0.01342341541470353
VBN: 0.006044981776157881
VBG: 0.010845408480753846
VBD: 0.02097964263490088
VBZ: 0.01653480309360832
--------------------------------
It is the 6750 's line
-----------------------------
NNP: 0.017691287041132243
NNS: 0.013445378151260505
VBN: 0.006015037593984963
VBG: 0.011057054400707651
VBD: 0.020875718708536047
VBZ: 0.01662980981866431
--------------------------------
It is the 6800 's line
-----------------------------
NNP: 0.017811704834605598
NNS: 0.013336843028867246
VBN: 0.006054224795998947
VBG: 0.011055540931824164
VBD: 0.020707203650083356
VBZ: 0.016671053786084057
--------------------------------
It is the 6850 's line
-----------------------------
NNP: 0.018258913118726204
NNS: 0.0132398753894081
VBN: 0.006143994461751471
VBG: 0.010989961924541363
VBD: 0.02068189685012115
VBZ: 0.0165282104534441
--------------------------------
It is the 6900 's line
-----------------------------
NNP: 0.01867077766358342
NNS: 0.013103802672147996
VBN: 0.006166495375128468
VBG: 0.010962658444672833
VBD: 0.020897567660157587
VBZ: 0.01661527920520726
--------------------------------
It is the 6950 's line
-----------------------------
NNP: 0.018717709833149826
NNS: 0.013381892097908021
VBN: 0.006182772931311933
VBG: 0.010841026509697637
VBD: 0.020835097823325145
VBZ: 0.016600321842974507
--------------------------------
It is the 7000 's line
-----------------------------
NNP: 0.01871433366901645
NNS: 0.013259483048002686
VBN: 0.0062101376300772075
VBG: 0.0108257804632427
VBD: 0.021064115474991606
VBZ: 0.016532393420610945
--------------------------------
It is the 7050 's line
-----------------------------
NNP: 0.018574046310178245
NNS: 0.013493253373313344
VBN: 0.00624687656171914
VBG: 0.010911211061136098
VBD: 0.02115608862235549
VBZ: 0.01699150424787606
--------------------------------
It is the 7100 's line
-----------------------------
NNP: 0.01847249834327369
NNS: 0.01350231941683234
VBN: 0.00621272365805169
VBG: 0.010851557322730285
VBD: 0.021040424121935058
VBZ: 0.017064280980781974
--------------------------------
It is the 7150 's line
-----------------------------
NNP: 0.01835542019919335
NNS: 0.013416742118692896
VBN: 0.006173347600625566
VBG: 0.010865091777100997
VBD: 0.020989381842126925
VBZ: 0.017285373281751584
--------------------------------
It is the 7200 's line
-----------------------------
NNP: 0.018404907975460124
NNS: 0.013333333333333334
VBN: 0.006134969325153374
VBG: 0.01096114519427403
VBD: 0.020858895705521473
VBZ: 0.01734151329243354
--------------------------------
It is the 7250 's line
-----------------------------
NNP: 0.018468578900057205
NNS: 0.013320258233227099
VBN: 0.006128953174797744
VBG: 0.01095039633897197
VBD: 0.02083844079431233
VBZ: 0.017324507640761624
--------------------------------
It is the 7300 's line
-----------------------------
NNP: 0.018391927083333332
NNS: 0.013264973958333334
VBN: 0.006103515625
VBG: 0.010904947916666666
VBD: 0.020833333333333332
VBZ: 0.017415364583333332
--------------------------------
It is the 7350 's line
-----------------------------
NNP: 0.018545513443472626
NNS: 0.01336248785228377
VBN: 0.006154842889536767
VBG: 0.010851959831551669
VBD: 0.020813087139617752
VBZ: 0.017411726595400065
--------------------------------
It is the 7400 's line
-----------------------------
NNP: 0.018563357546408393
NNS: 0.013317191283292978
VBN: 0.0062146892655367235
VBG: 0.010815173527037933
VBD: 0.020823244552058112
VBZ: 0.01735270379338176
--------------------------------
It is the 7450 's line
-----------------------------
NNP: 0.01862705740666399
NNS: 0.01332798073063027
VBN: 0.006182256122039342
VBG: 0.010919309514251305
VBD: 0.020794861501405058
VBZ: 0.017342432757928544
--------------------------------
It is the 7500 's line
-----------------------------
NNP: 0.018518518518518517
NNS: 0.013193450961691305
VBN: 0.00619933237958989
VBG: 0.010888570974407884
VBD: 0.02074391988555079
VBZ: 0.017485296455253537
--------------------------------
It is the 7550 's line
-----------------------------
NNP: 0.0185302002838669
NNS: 0.013168269988960733
VBN: 0.006150449455921779
VBG: 0.010881564422015455
VBD: 0.020738053934710614
VBZ: 0.01742627345844504
--------------------------------
It is the 7600 's line
-----------------------------
NNP: 0.01831359102244389
NNS: 0.013170199501246883
VBN: 0.006078553615960099
VBG: 0.011066084788029925
VBD: 0.020807356608478805
VBZ: 0.01753428927680798
--------------------------------
It is the 7650 's line
-----------------------------
NNP: 0.018328673501087293
NNS: 0.013280521901211557
VBN: 0.0060577819198508855
VBG: 0.011105933519726623
VBD: 0.020891581236408822
VBZ: 0.017474370922646785
--------------------------------
It is the 7700 's line
-----------------------------
NNP: 0.018260266584482626
NNS: 0.013329224131289005
VBN: 0.006009707989829725
VBG: 0.011017797981354495
VBD: 0.020802835349410586
VBZ: 0.017566838739502273
--------------------------------
It is the 7750 's line
-----------------------------
NNP: 0.01839503334099793
NNS: 0.0133363991722235
VBN: 0.005978385835824327
VBG: 0.010960374032344601
VBD: 0.02077105848087683
VBZ: 0.01755192764620219
--------------------------------
It is the 7800 's line
-----------------------------
NNP: 0.0183094293561184
NNS: 0.013350625572169667
VBN: 0.00595056454073848
VBG: 0.010909368324687214
VBD: 0.020674397314617027
VBZ: 0.017546536466280134
--------------------------------
It is the 7850 's line
-----------------------------
NNP: 0.018363939899833055
NNS: 0.013279708605251177
VBN: 0.005994839884656245
VBG: 0.010927303080892396
VBD: 0.020716345424191835
VBZ: 0.01768098345727728
--------------------------------
It is the 7900 's line
-----------------------------
NNP: 0.01834448537703932
NNS: 0.01330727013006541
VBN: 0.005939403052402075
VBG: 0.010976618299375986
VBD: 0.020675137207728744
VBZ: 0.0176678445229682
--------------------------------
It is the 7950 's line
-----------------------------
NNP: 0.018572387558737972
NNS: 0.013351234429775491
VBN: 0.005892444245543373
VBG: 0.010964421570821213
VBD: 0.020586260908480643
VBZ: 0.017901096442157084
--------------------------------
It is the 8000 's line
-----------------------------
NNP: 0.018497882772453754
NNS: 0.013297674764133422
VBN: 0.005868806180818661
VBG: 0.0109204368174727
VBD: 0.020652254661615035
VBZ: 0.017829284599955426
--------------------------------
It is the 8050 's line
-----------------------------
NNP: 0.018508959516259863
NNS: 0.013347098296585798
VBN: 0.005973010839908561
VBG: 0.010839908561315538
VBD: 0.020647444878696262
VBZ: 0.01777155077059214
--------------------------------
It is the 8100 's line
-----------------------------
NNP: 0.018462670099301214
NNS: 0.013313718278778962
VBN: 0.005958072820890033
VBG: 0.010812798823096726
VBD: 0.020595807282089002
VBZ: 0.0178742184626701
--------------------------------
It is the 8150 's line
-----------------------------
NNP: 0.018676588604362734
NNS: 0.013277887210914132
VBN: 0.005909389363099146
VBG: 0.010797402786897206
VBD: 0.02042751878602174
VBZ: 0.017801123513533232
--------------------------------
It is the 8200 's line
-----------------------------
NNP: 0.018603299178838746
NNS: 0.013225783009955671
VBN: 0.006031538405639125
VBG: 0.010755032337766151
VBD: 0.02042002761427222
VBZ: 0.01780393866724802
--------------------------------
It is the 8250 's line
-----------------------------
NNP: 0.018510484454085322
NNS: 0.013159797541576283
VBN: 0.006001446131597975
VBG: 0.010773680404916847
VBD: 0.020607375271149676
VBZ: 0.017715112075198842
--------------------------------
It is the 8300 's line
-----------------------------
NNP: 0.018758085381630013
NNS: 0.013080350725887596
VBN: 0.005965214891476211
VBG: 0.010708638781083801
VBD: 0.020482966796032773
VBZ: 0.017823774615495183
--------------------------------
It is the 8350 's line
-----------------------------
NNP: 0.018838304552590265
NNS: 0.012987012987012988
VBN: 0.005922648779791637
VBG: 0.010632224917939203
VBD: 0.02047952047952048
VBZ: 0.01783930355358927
--------------------------------
It is the 8400 's line
-----------------------------
NNP: 0.01886255249483949
NNS: 0.012954658694569009
VBN: 0.005979073243647235
VBG: 0.010605737063136167
VBD: 0.02042850024912805
VBZ: 0.018008399174318457
--------------------------------
It is the 8450 's line
-----------------------------
NNP: 0.019089884474499857
NNS: 0.01282051282051282
VBN: 0.005917159763313609
VBG: 0.010777683854606932
VBD: 0.02028740490278952
VBZ: 0.017962806424344887
--------------------------------
It is the 8500 's line
-----------------------------
NNP: 0.01901565995525727
NNS: 0.012723713646532438
VBN: 0.006012304250559284
VBG: 0.010766219239373601
VBD: 0.02041387024608501
VBZ: 0.017897091722595078
--------------------------------
It is the 8550 's line
-----------------------------
NNP: 0.019097222222222224
NNS: 0.012708333333333334
VBN: 0.0060416666666666665
VBG: 0.010763888888888889
VBD: 0.020555555555555556
VBZ: 0.017847222222222223
--------------------------------
It is the 8600 's line
-----------------------------
NNP: 0.01900172117039587
NNS: 0.012805507745266781
VBN: 0.0060585197934595525
VBG: 0.011015490533562823
VBD: 0.020516351118760757
VBZ: 0.017900172117039585
--------------------------------
It is the 8650 's line
-----------------------------
NNP: 0.018835733296935782
NNS: 0.012830137173275098
VBN: 0.006073841534156828
VBG: 0.010987511089879205
VBD: 0.020610113969835527
VBZ: 0.01781205213949362
--------------------------------
It is the 8700 's line
-----------------------------
NNP: 0.01881811412712381
NNS: 0.012928992080146213
VBN: 0.006024504163000067
VBG: 0.011033642455831584
VBD: 0.020645772693427197
VBZ: 0.01773505719894402
--------------------------------
It is the 8750 's line
-----------------------------
NNP: 0.01871927816308666
NNS: 0.012861086795501986
VBN: 0.006127533499427648
VBG: 0.011043027405561914
VBD: 0.02053733755302673
VBZ: 0.017843916234596996
--------------------------------
It is the 8800 's line
-----------------------------
NNP: 0.018856569709127382
NNS: 0.012905382815112002
VBN: 0.006084921430959545
VBG: 0.011099966566365764
VBD: 0.02039451688398529
VBZ: 0.017987295218990305
--------------------------------
It is the 8850 's line
-----------------------------
NNP: 0.01889177143617375
NNS: 0.013037983103838222
VBN: 0.006119869620168961
VBG: 0.011108893767045833
VBD: 0.02042173884121599
VBZ: 0.018027007250715092
--------------------------------
It is the 8900 's line
-----------------------------
NNP: 0.018945416004239532
NNS: 0.013116057233704292
VBN: 0.006094329623741388
VBG: 0.011128775834658187
VBD: 0.02033651298357181
VBZ: 0.018018018018018018
--------------------------------
It is the 8950 's line
-----------------------------
NNP: 0.019367588932806323
NNS: 0.013043478260869565
VBN: 0.006060606060606061
VBG: 0.011067193675889328
VBD: 0.020223978919631094
VBZ: 0.018115942028985508
--------------------------------
It is the 9000 's line
-----------------------------
NNP: 0.01923580214603507
NNS: 0.01308557969118032
VBN: 0.006019366657942947
VBG: 0.011122742737503272
VBD: 0.02015179272441769
VBZ: 0.018254383669196544
--------------------------------
It is the 9050 's line
-----------------------------
NNP: 0.01920072897682895
NNS: 0.013147617807862536
VBN: 0.005988023952095809
VBG: 0.01112991408487373
VBD: 0.02017703722988805
VBZ: 0.01822442072376985
--------------------------------
It is the 9100 's line
-----------------------------
NNP: 0.019183408943616333
NNS: 0.01322099805573558
VBN: 0.006027219701879456
VBG: 0.011082307193778354
VBD: 0.020220349967595592
VBZ: 0.018211276733635776
--------------------------------
It is the 9150 's line
-----------------------------
NNP: 0.019097222222222224
NNS: 0.013245884773662552
VBN: 0.005979938271604938
VBG: 0.011123971193415638
VBD: 0.020190329218106994
VBZ: 0.01819701646090535
--------------------------------
It is the 9200 's line
-----------------------------
NNP: 0.019095669945076
NNS: 0.013220079192744923
VBN: 0.0059394558692042404
VBG: 0.011176395452803679
VBD: 0.020181376931919785
VBZ: 0.018073828075105377
--------------------------------
It is the 9250 's line
-----------------------------
NNP: 0.01901743264659271
NNS: 0.013185419968304279
VBN: 0.006022187004754358
VBG: 0.01115689381933439
VBD: 0.020221870047543583
VBZ: 0.017939778129952455
--------------------------------
It is the 9300 's line
-----------------------------
NNP: 0.019041614123581338
NNS: 0.01317780580075662
VBN: 0.00605296343001261
VBG: 0.011223203026481715
VBD: 0.02030264817150063
VBZ: 0.018032786885245903
--------------------------------
It is the 9350 's line
-----------------------------
NNP: 0.019069125580228326
NNS: 0.01323547860996111
VBN: 0.0060218291305984195
VBG: 0.011290929619872036
VBD: 0.020260945928992597
VBZ: 0.017940032618241123
--------------------------------
It is the 9400 's line
-----------------------------
NNP: 0.019042877590391437
NNS: 0.013255336361939138
VBN: 0.0059742361067894705
VBG: 0.011263924326342647
VBD: 0.020100815234302072
VBZ: 0.01792270832036841
--------------------------------
It is the 9450 's line
-----------------------------
NNP: 0.01907238838318162
NNS: 0.013189671187070406
VBN: 0.005944640535017648
VBG: 0.011393894358783825
VBD: 0.020063161805684564
VBZ: 0.018019691621772246
--------------------------------
It is the 9500 's line
-----------------------------
NNP: 0.019058779991364953
NNS: 0.013137605625115649
VBN: 0.005982853265897737
VBG: 0.01134891753531117
VBD: 0.0201690001850367
VBZ: 0.0181335964966385
--------------------------------
It is the 9550 's line
-----------------------------
NNP: 0.01900369003690037
NNS: 0.013099630996309963
VBN: 0.005965559655596556
VBG: 0.01131611316113161
VBD: 0.02017220172201722
VBZ: 0.01814268142681427
--------------------------------
It is the 9600 's line
-----------------------------
NNP: 0.019014905232165858
NNS: 0.013065080046617187
VBN: 0.0060111635895234005
VBG: 0.011408943139299516
VBD: 0.020118996503710972
VBZ: 0.01815616757651966
--------------------------------
It is the 9650 's line
-----------------------------
NNP: 0.019058637276989587
NNS: 0.013030505997686171
VBN: 0.006028131279303416
VBG: 0.011447360409182244
VBD: 0.020154661145953844
VBZ: 0.018145284052852706
--------------------------------
It is the 9700 's line
-----------------------------
NNP: 0.019044153323629305
NNS: 0.012979136341581756
VBN: 0.006004366812227074
VBG: 0.011402231926249394
VBD: 0.02019650655021834
VBZ: 0.018134400776322175
--------------------------------
It is the 9750 's line
-----------------------------
NNP: 0.019029783120884432
NNS: 0.0129281701202199
VBN: 0.006041200990756963
VBG: 0.01141786987253066
VBD: 0.020177611309128256
VBZ: 0.018123602972270888
--------------------------------
It is the 9800 's line
-----------------------------
NNP: 0.018977839168818688
NNS: 0.012972193862230497
VBN: 0.006065701759654075
VBG: 0.01153083898864933
VBD: 0.02023902468320221
VBZ: 0.01807699237283046
--------------------------------
It is the 9850 's line
-----------------------------
NNP: 0.018878069179759843
NNS: 0.01296373737977179
VBN: 0.006033813250492861
VBG: 0.011470219248461677
VBD: 0.020192365135312744
VBZ: 0.017981958300973773
--------------------------------
It is the 9900 's line
-----------------------------
NNP: 0.018854457859989293
NNS: 0.01290667935526081
VBN: 0.006066734074823054
VBG: 0.011479212514125973
VBD: 0.02028192470112413
VBZ: 0.017962291084280022
--------------------------------
It is the 9950 's line
-----------------------------
NNP: 0.018833283979863783
NNS: 0.01291086763399467
VBN: 0.006159312999703879
VBG: 0.011667160201362155
VBD: 0.020254663902872373
VBZ: 0.017885697364524725
--------------------------------
It is the 10000 's line
-----------------------------
NNP: 0.018900242159352668
NNS: 0.012934853227806981
VBN: 0.006142578701789617
VBG: 0.011635461579351485
VBD: 0.020317760321304116
VBZ: 0.01783710353788908
--------------------------------
It is the 10050 's line
-----------------------------
NNP: 0.01921607803960745
NNS: 0.012869483457718751
VBN: 0.006111535523300229
VBG: 0.011576658635482165
VBD: 0.02021507903860845
VBZ: 0.017982017982017984
--------------------------------
It is the 10100 's line
-----------------------------
NNP: 0.01928600738613049
NNS: 0.012837798229673486
VBN: 0.006155108740254411
VBG: 0.011548156398382086
VBD: 0.020282548801219297
VBZ: 0.017996365554839088
--------------------------------
It is the 10150 's line
-----------------------------
NNP: 0.019301203418405907
NNS: 0.012731817917562933
VBN: 0.006104296261845241
VBG: 0.011627230974943318
VBD: 0.020405790361025522
VBZ: 0.017964071856287425
--------------------------------
It is the 10200 's line
-----------------------------
NNP: 0.01926536296640167
NNS: 0.012766204375326409
VBN: 0.006092961179133058
VBG: 0.011605640341205826
VBD: 0.020425927000522252
VBZ: 0.01798874252886903
--------------------------------
It is the 10250 's line
-----------------------------
NNP: 0.019357448283832197
NNS: 0.012770137524557957
VBN: 0.0060672599098578524
VBG: 0.011614468970299318
VBD: 0.02039754998266497
VBZ: 0.018028429446434763
--------------------------------
It is the 10300 's line
-----------------------------
NNP: 0.0195323720342391
NNS: 0.012753489975297294
VBN: 0.006146952375481129
VBG: 0.01154707876141782
VBD: 0.020336646176825415
VBZ: 0.018038720055150226
--------------------------------
It is the 10350 's line
-----------------------------
NNP: 0.019438568406609114
NNS: 0.0127494139843348
VBN: 0.006117431822079927
VBG: 0.011548796523926591
VBD: 0.020238980046881253
VBZ: 0.018009261906123147
--------------------------------
It is the 10400 's line
-----------------------------
NNP: 0.019414711910726485
NNS: 0.012810293782737417
VBN: 0.00614894101571396
VBG: 0.011500797084946482
VBD: 0.020268731496242313
VBZ: 0.017991345934866773
--------------------------------
It is the 10450 's line
-----------------------------
NNP: 0.019409234947940245
NNS: 0.01278859212313264
VBN: 0.006167949298325034
VBG: 0.011487098234495247
VBD: 0.020371208691715707
VBZ: 0.01805115436849253
--------------------------------
It is the 10500 's line
-----------------------------
NNP: 0.01931741383194413
NNS: 0.012784410903356611
VBN: 0.006195088984005406
VBG: 0.011489074115791845
VBD: 0.020387474656454158
VBZ: 0.018022077044379366
--------------------------------
It is the 10550 's line
-----------------------------
NNP: 0.01945873406396779
NNS: 0.01280474166853053
VBN: 0.006150749273093268
VBG: 0.011406844106463879
VBD: 0.020241556698725117
VBZ: 0.01806083650190114
--------------------------------
It is the 10600 's line
-----------------------------
NNP: 0.01935483870967742
NNS: 0.012736373748609565
VBN: 0.006229143492769744
VBG: 0.011401557285873193
VBD: 0.020189098998887652
VBZ: 0.018020022246941044
--------------------------------
It is the 10650 's line
-----------------------------
NNP: 0.019324473975636768
NNS: 0.01273532668881506
VBN: 0.006201550387596899
VBG: 0.011351052048726468
VBD: 0.020210409745293468
VBZ: 0.01816168327796235
--------------------------------
It is the 10700 's line
-----------------------------
NNP: 0.01934022213626568
NNS: 0.012709288832403161
VBN: 0.006188871083605017
VBG: 0.011383102171630657
VBD: 0.020169088799248495
VBZ: 0.018124551030557552
--------------------------------
It is the 10750 's line
-----------------------------
NNP: 0.019372592184920198
NNS: 0.01271326362135388
VBN: 0.006329113924050633
VBG: 0.01139240506329114
VBD: 0.020143093010456796
VBZ: 0.018106769400110072
--------------------------------
It is the 10800 's line
-----------------------------
NNP: 0.019547719432732848
NNS: 0.012648524338827136
VBN: 0.006351639927722718
VBG: 0.011334391939987953
VBD: 0.02004051908229754
VBZ: 0.018178831517275364
--------------------------------
It is the 10850 's line
-----------------------------
NNP: 0.019473081328751432
NNS: 0.012654775541373479
VBN: 0.0063273877706867395
VBG: 0.011400207276495936
VBD: 0.019963999345442643
VBZ: 0.018163966617574866
--------------------------------
It is the 10900 's line
-----------------------------
NNP: 0.019479812819675698
NNS: 0.012678202198280553
VBN: 0.006311894656654696
VBG: 0.011372292958972685
VBD: 0.01996952878441615
VBZ: 0.01811949069539667
--------------------------------
It is the 10950 's line
-----------------------------
NNP: 0.01964179427520156
NNS: 0.012715762134083654
VBN: 0.006330826253990585
VBG: 0.011363021481521563
VBD: 0.019858232779611493
VBZ: 0.0181808343704345
--------------------------------
It is the 11000 's line
-----------------------------
NNP: 0.019593067068575734
NNS: 0.012703197330175477
VBN: 0.006297771557756487
VBG: 0.01146517386155668
VBD: 0.02002368392722575
VBZ: 0.018193562277963184
--------------------------------
It is the 11050 's line
-----------------------------
NNP: 0.019569996246850036
NNS: 0.012707093453434131
VBN: 0.0062731220846067234
VBG: 0.01142029917966865
VBD: 0.02005254409951209
VBZ: 0.01817596911693743
--------------------------------
It is the 11100 's line
-----------------------------
NNP: 0.019528115135626772
NNS: 0.012733401102134717
VBN: 0.006259697180461185
VBG: 0.011395858969557542
VBD: 0.020170135359263815
VBZ: 0.018190573003049597
--------------------------------
It is the 11150 's line
-----------------------------
NNP: 0.019510369895231985
NNS: 0.012721830233055377
VBN: 0.0062540089801154585
VBG: 0.011385503527902501
VBD: 0.020258712850117596
VBZ: 0.01817404319007911
--------------------------------
It is the 11200 's line
-----------------------------
NNP: 0.01942365865308072
NNS: 0.01268375524067293
VBN: 0.006209202356312689
VBG: 0.011357002600435175
VBD: 0.02027278034283288
VBZ: 0.018309186435281007
--------------------------------
It is the 11250 's line
-----------------------------
NNP: 0.019422099915325994
NNS: 0.01264817950889077
VBN: 0.006244707874682473
VBG: 0.01132514817950889
VBD: 0.020268839966130398
VBZ: 0.01825783234546994
--------------------------------
It is the 11300 's line
-----------------------------
NNP: 0.01949317738791423
NNS: 0.012696907433749538
VBN: 0.006216743058848322
VBG: 0.011379800853485065
VBD: 0.020178072809651757
VBZ: 0.018334123597281492
--------------------------------
It is the 11350 's line
-----------------------------
NNP: 0.019435276861019434
NNS: 0.01272984441301273
VBN: 0.006181570538006181
VBG: 0.011472575829011473
VBD: 0.02022106972602022
VBZ: 0.018387553041018388
--------------------------------
It is the 11400 's line
-----------------------------
NNP: 0.019470689565172
NNS: 0.012736858589549512
VBN: 0.006159628334290338
VBG: 0.011431852586521898
VBD: 0.020201492926867463
VBZ: 0.018426684762749907
--------------------------------
It is the 11450 's line
-----------------------------
NNP: 0.019511941308080545
NNS: 0.012695769811124408
VBN: 0.006139757531609345
VBG: 0.011499037410895469
VBD: 0.020136323429939124
VBZ: 0.01857536812529268
--------------------------------
It is the 11500 's line
-----------------------------
NNP: 0.019435542230952136
NNS: 0.012612426341362556
VBN: 0.006151142355008787
VBG: 0.011526930631655122
VBD: 0.020159206037423757
VBZ: 0.01845342706502636
--------------------------------
It is the 11550 's line
-----------------------------
NNP: 0.01937744794887652
NNS: 0.012677798392084107
VBN: 0.006132756132756133
VBG: 0.011492475778190065
VBD: 0.020356627499484643
VBZ: 0.0183982683982684
--------------------------------
It is the 11600 's line
-----------------------------
NNP: 0.019326395652842568
NNS: 0.012764648587686472
VBN: 0.00610037422463731
VBG: 0.01148305736402317
VBD: 0.02035166863177321
VBZ: 0.018454913620751526
--------------------------------
It is the 11650 's line
-----------------------------
NNP: 0.019335748176113464
NNS: 0.012703433498290903
VBN: 0.006071118820468344
VBG: 0.01158104178358247
VBD: 0.020305086475179836
VBZ: 0.01841742768226111
--------------------------------
It is the 11700 's line
-----------------------------
NNP: 0.01932367149758454
NNS: 0.012763793541825578
VBN: 0.006102212051868803
VBG: 0.011543351131451818
VBD: 0.020340706839562676
VBZ: 0.018459191456903126
--------------------------------
It is the 11750 's line
-----------------------------
NNP: 0.019260986365249127
NNS: 0.012722388362309291
VBN: 0.006082416746920776
VBG: 0.011556591819149475
VBD: 0.020325409295960262
VBZ: 0.01850068427188403
--------------------------------
It is the 11800 's line
-----------------------------
NNP: 0.019253120420435595
NNS: 0.012784880489160644
VBN: 0.006063974935570266
VBG: 0.011572085502046591
VBD: 0.020263782909697308
VBZ: 0.018444590429026228
--------------------------------
It is the 11850 's line
-----------------------------
NNP: 0.019295512788302095
NNS: 0.012763177729762324
VBN: 0.006080096477563942
VBG: 0.01160745691171298
VBD: 0.02025023868147329
VBZ: 0.018441284357569972
--------------------------------
It is the 11900 's line
-----------------------------
NNP: 0.01922595503930306
NNS: 0.012717168177039002
VBN: 0.006058178541030391
VBG: 0.011565613578330747
VBD: 0.020177239273018576
VBZ: 0.018575076353076654
--------------------------------
It is the 11950 's line
-----------------------------
NNP: 0.019241313992323414
NNS: 0.012711230746224017
VBN: 0.006081451572703255
VBG: 0.011514879617167639
VBD: 0.020088729375405016
VBZ: 0.01854344250037386
--------------------------------
It is the 12000 's line
-----------------------------
NNP: 0.019315755499280003
NNS: 0.01271165400466756
VBN: 0.006107552510055117
VBG: 0.011519936441729977
VBD: 0.020110233874571726
VBZ: 0.018620586920899747
--------------------------------
It is the 12050 's line
-----------------------------
NNP: 0.019309595535582005
NNS: 0.012642599634549855
VBN: 0.006123759197985086
VBG: 0.011506741073633265
VBD: 0.02009975801274137
VBZ: 0.018519433058422637
--------------------------------
It is the 12100 's line
-----------------------------
NNP: 0.019323433965975023
NNS: 0.012636444094797914
VBN: 0.006096961353132068
VBG: 0.011456387058707837
VBD: 0.02015930769987216
VBZ: 0.018585898318418722
--------------------------------
It is the 12150 's line
-----------------------------
NNP: 0.01945124938755512
NNS: 0.01268985791278785
VBN: 0.006075453209211171
VBG: 0.011415972562469378
VBD: 0.02013718765311122
VBZ: 0.018618324350808426
--------------------------------
It is the 12200 's line
-----------------------------
NNP: 0.01952076521399639
NNS: 0.012639695476062661
VBN: 0.006051437216338881
VBG: 0.011370845737152897
VBD: 0.02005758625738129
VBZ: 0.01859352886633156
--------------------------------
It is the 12250 's line
-----------------------------
NNP: 0.019458092134066254
NNS: 0.012696405117478231
VBN: 0.006032008561560539
VBG: 0.011334338668093594
VBD: 0.020139125358758572
VBZ: 0.018679768448703605
--------------------------------
It is the 12300 's line
-----------------------------
NNP: 0.01941982662598673
NNS: 0.012688265775582352
VBN: 0.006005133420504625
VBG: 0.011380696401762798
VBD: 0.020049397065233183
VBZ: 0.01864497070076033
--------------------------------
It is the 12350 's line
-----------------------------
NNP: 0.019471756313861575
NNS: 0.012627723154038943
VBN: 0.006121071910545595
VBG: 0.011326392905340274
VBD: 0.020001927896664738
VBZ: 0.0186524002313476
--------------------------------
It is the 12400 's line
-----------------------------
NNP: 0.01938912294140176
NNS: 0.012590961317502872
VBN: 0.006080045959402528
VBG: 0.011441976254308693
VBD: 0.02005936422826503
VBZ: 0.018718881654538493
--------------------------------
It is the 12450 's line
-----------------------------
NNP: 0.01941561857395629
NNS: 0.01262493424513414
VBN: 0.00607335851943953
VBG: 0.011429391229496438
VBD: 0.020037300942087895
VBZ: 0.018746114485199177
--------------------------------
It is the 12500 's line
-----------------------------
NNP: 0.019340701219512195
NNS: 0.012623856707317074
VBN: 0.006049923780487805
VBG: 0.011385289634146341
VBD: 0.020007621951219513
VBZ: 0.01872141768292683
--------------------------------
It is the 12550 's line
-----------------------------
NNP: 0.0193057536852116
NNS: 0.012648597242035188
VBN: 0.0060389919163100336
VBG: 0.011412268188302425
VBD: 0.020019020446980503
VBZ: 0.01873514027579648
--------------------------------
It is the 12600 's line
-----------------------------
NNP: 0.019280083578687435
NNS: 0.012726754677557224
VBN: 0.006030962104663311
VBG: 0.01139709374109602
VBD: 0.019992401937505937
VBZ: 0.018710228891632635
--------------------------------
It is the 12650 's line
-----------------------------
NNP: 0.0192635365391897
NNS: 0.012731919727375994
VBN: 0.006010980689132904
VBG: 0.011359333585762969
VBD: 0.019973494888299886
VBZ: 0.01869556985990155
--------------------------------
It is the 12700 's line
-----------------------------
NNP: 0.01933207902163688
NNS: 0.012699905926622766
VBN: 0.005973659454374412
VBG: 0.011335841956726246
VBD: 0.020037629350893697
VBZ: 0.018673565380997178
--------------------------------
It is the 12750 's line
-----------------------------
NNP: 0.019504876219054765
NNS: 0.012659414853713429
VBN: 0.005954613653413353
VBG: 0.011299699924981246
VBD: 0.020114403600900225
VBZ: 0.018754688672168042
--------------------------------
It is the 12800 's line
-----------------------------
NNP: 0.01943743575366788
NNS: 0.012615643397813289
VBN: 0.0059340248574899545
VBG: 0.011260629847677787
VBD: 0.020138304831324175
VBZ: 0.018736566676011587
--------------------------------
It is the 12850 's line
-----------------------------
NNP: 0.019491998511350948
NNS: 0.012560476367696315
VBN: 0.00595459620394492
VBG: 0.011350949013770003
VBD: 0.020050241905470785
VBZ: 0.018747673985857832
--------------------------------
It is the 12900 's line
-----------------------------
NNP: 0.019470585508321356
NNS: 0.012516804969635158
VBN: 0.005980251263270131
VBG: 0.011311483009596217
VBD: 0.02007324648834083
VBZ: 0.018728848917528164
--------------------------------
It is the 12950 's line
-----------------------------
NNP: 0.019420169232903316
NNS: 0.012530633005039996
VBN: 0.005964766264391732
VBG: 0.011328432052526934
VBD: 0.020021269709159847
VBZ: 0.018680353262126047
--------------------------------
It is the 13000 's line
-----------------------------
NNP: 0.019414341710860043
NNS: 0.012497117823380217
VBN: 0.005948812543232649
VBG: 0.011390362001383445
VBD: 0.020244408577357622
VBZ: 0.018676504496195526
--------------------------------
It is the 13050 's line
-----------------------------
NNP: 0.01946617579383341
NNS: 0.012471237919926369
VBN: 0.005936493327197423
VBG: 0.011366774045098942
VBD: 0.02024850437183617
VBZ: 0.01863782788771284
--------------------------------
It is the 13100 's line
-----------------------------
NNP: 0.019695161142227528
NNS: 0.0124414654301717
VBN: 0.005922321182627858
VBG: 0.011339638233403728
VBD: 0.020200165274079515
VBZ: 0.018639243411991552
--------------------------------
It is the 13150 's line
-----------------------------
NNP: 0.019942368384942597
NNS: 0.01244111055207428
VBN: 0.005900379636829346
VBG: 0.01129762612633216
VBD: 0.02035402277820976
VBZ: 0.01866166582811142
--------------------------------
It is the 13200 's line
-----------------------------
NNP: 0.020034684191310697
NNS: 0.012413289521723256
VBN: 0.005932822197882439
VBG: 0.011317999269806499
VBD: 0.020308506754289888
VBZ: 0.018619934282584884
--------------------------------
It is the 13250 's line
-----------------------------
NNP: 0.019965435692195742
NNS: 0.01246134255048208
VBN: 0.005912315808622885
VBG: 0.011278879388757504
VBD: 0.020238311806439875
VBZ: 0.018692013825723122
--------------------------------
It is the 13300 's line
-----------------------------
NNP: 0.02006068016120998
NNS: 0.012407734456369153
VBN: 0.0058868813114160215
VBG: 0.011230358194085948
VBD: 0.020196531268396503
VBZ: 0.01879273649413576
--------------------------------
It is the 13350 's line
-----------------------------
NNP: 0.020148749154834347
NNS: 0.01235068740139734
VBN: 0.005859815190443994
VBG: 0.011178724363308542
VBD: 0.020329051160694164
VBZ: 0.01875140860942078
--------------------------------
It is the 13400 's line
-----------------------------
NNP: 0.020035858359480054
NNS: 0.012326311071268489
VBN: 0.00582698341550874
VBG: 0.011116091438816674
VBD: 0.020484087852980726
VBZ: 0.018870461676378306
--------------------------------
It is the 13450 's line
-----------------------------
NNP: 0.02002503128911139
NNS: 0.012381548364026461
VBN: 0.005855533702842839
VBG: 0.011085285177900948
VBD: 0.02047201859467191
VBZ: 0.018907563025210083
--------------------------------
It is the 13500 's line
-----------------------------
NNP: 0.01998037641601998
NNS: 0.012398537151012398
VBN: 0.005842476139505843
VBG: 0.011105164570511104
VBD: 0.020426366961020428
VBZ: 0.018954598162518956
--------------------------------
It is the 13550 's line
-----------------------------
NNP: 0.019939469467687376
NNS: 0.012373152928609578
VBN: 0.005830514509524657
VBG: 0.0110824283425316
VBD: 0.02051807014420509
VBZ: 0.0189157913476945
--------------------------------
It is the 13600 's line
-----------------------------
NNP: 0.019940621261133513
NNS: 0.012363185181902778
VBN: 0.005849248903265831
VBG: 0.011078122922851952
VBD: 0.020472371161430407
VBZ: 0.018965746443922543
--------------------------------
It is the 13650 's line
-----------------------------
NNP: 0.019895658325227696
NNS: 0.01233530816164117
VBN: 0.005836059775400124
VBG: 0.011097356088071447
VBD: 0.020470421787956494
VBZ: 0.019055619418162525
--------------------------------
It is the 13700 's line
-----------------------------
NNP: 0.019849146486701073
NNS: 0.012306470821754664
VBN: 0.005822416302765648
VBG: 0.0111155220325526
VBD: 0.02042256627409466
VBZ: 0.019055180627233027
--------------------------------
It is the 13750 's line
-----------------------------
NNP: 0.01989699344103535
NNS: 0.012281551261170049
VBN: 0.005810626403134216
VBG: 0.011137033939340582
VBD: 0.02051327199894352
VBZ: 0.01906061539815997
--------------------------------
It is the 13800 's line
-----------------------------
NNP: 0.01982195325176512
NNS: 0.012279086085164233
VBN: 0.005788712011577424
VBG: 0.011138885234398982
VBD: 0.020479761434898918
VBZ: 0.019076437310880146
--------------------------------
It is the 13850 's line
-----------------------------
NNP: 0.019816272965879265
NNS: 0.012292213473315836
VBN: 0.005861767279090113
VBG: 0.01115485564304462
VBD: 0.0205161854768154
VBZ: 0.019072615923009625
--------------------------------
It is the 13900 's line
-----------------------------
NNP: 0.019826571963919997
NNS: 0.01228811712928668
VBN: 0.0058826092640202185
VBG: 0.011198745043357008
VBD: 0.020654494749226544
VBZ: 0.018998649178613446
--------------------------------
It is the 13950 's line
-----------------------------
NNP: 0.01977401129943503
NNS: 0.01225554106910039
VBN: 0.005867014341590613
VBG: 0.011169056931768796
VBD: 0.020730117340286833
VBZ: 0.018948283355063017
--------------------------------
It is the 14000 's line
-----------------------------
NNP: 0.019757036012277896
NNS: 0.012191431412390298
VBN: 0.005836323548484718
VBG: 0.011153862781548571
VBD: 0.02070814059054948
VBZ: 0.018849163460291384
--------------------------------
It is the 14050 's line
-----------------------------
NNP: 0.019774254695846975
NNS: 0.0122350508357746
VBN: 0.005859038428399104
VBG: 0.011114940547992417
VBD: 0.02067895915905566
VBZ: 0.018826469067723593
--------------------------------
It is the 14100 's line
-----------------------------
NNP: 0.01976964070826887
NNS: 0.012291559222967164
VBN: 0.005844937252879491
VBG: 0.011174144748151968
VBD: 0.020672167784081142
VBZ: 0.018781158672855423
--------------------------------
It is the 14150 's line
-----------------------------
NNP: 0.019758601434646278
NNS: 0.012284695674584425
VBN: 0.005841673467634552
VBG: 0.011167905158713114
VBD: 0.02066062454361926
VBZ: 0.018770671362913965
--------------------------------
It is the 14200 's line
-----------------------------
NNP: 0.019784172661870502
NNS: 0.012247344981157931
VBN: 0.005909558067831449
VBG: 0.011133949982870846
VBD: 0.02068345323741007
VBZ: 0.01871360054813292
--------------------------------
It is the 14250 's line
-----------------------------
NNP: 0.01971326164874552
NNS: 0.01228878648233487
VBN: 0.005888376856118792
VBG: 0.011094043352107869
VBD: 0.020694657791431984
VBZ: 0.018731865506059056
--------------------------------
It is the 14300 's line
-----------------------------
NNP: 0.019781341727995914
NNS: 0.012336750755094227
VBN: 0.0059131322584761985
VBG: 0.011060535159739652
VBD: 0.020632152124898967
VBZ: 0.018760369251712258
--------------------------------
It is the 14350 's line
-----------------------------
NNP: 0.01975329574837862
NNS: 0.012377601627739392
VBN: 0.005934466533847654
VBG: 0.0110211521342885
VBD: 0.020558687635115085
VBZ: 0.01873595862829045
--------------------------------
It is the 14400 's line
-----------------------------
NNP: 0.01973154930770208
NNS: 0.012363975102680273
VBN: 0.00592793326840835
VBG: 0.01105136130753271
VBD: 0.0205783969174747
VBZ: 0.018715332175974934
--------------------------------
It is the 14450 's line
-----------------------------
NNP: 0.01968071627671256
NNS: 0.01237435594222485
VBN: 0.005912661542359997
VBG: 0.011107357040290564
VBD: 0.020652082101528847
VBZ: 0.018709350451896273
--------------------------------
It is the 14500 's line
-----------------------------
NNP: 0.01962105263157895
NNS: 0.012421052631578947
VBN: 0.005894736842105263
VBG: 0.011073684210526315
VBD: 0.02063157894736842
VBZ: 0.018694736842105264
--------------------------------
It is the 14550 's line
-----------------------------
NNP: 0.019550383053543768
NNS: 0.012349813706200025
VBN: 0.00590279231381086
VBG: 0.011010172897391887
VBD: 0.02051324988487462
VBZ: 0.018671243772763427
--------------------------------
It is the 14600 's line
-----------------------------
NNP: 0.019523591005798674
NNS: 0.012348254140419674
VBN: 0.005882107546618831
VBG: 0.010971590672062074
VBD: 0.02048308372616912
VBZ: 0.018730966584623088
--------------------------------
It is the 14650 's line
-----------------------------
NNP: 0.01957036606141189
NNS: 0.012340549299871193
VBN: 0.005858644617110566
VBG: 0.010927826484397723
VBD: 0.020526031495408652
VBZ: 0.01869780196950181
--------------------------------
It is the 14700 's line
-----------------------------
NNP: 0.01969139122161089
NNS: 0.012286435279030323
VBN: 0.005874322591320895
VBG: 0.010921275803582509
VBD: 0.02043602366276424
VBZ: 0.018739916435692713
--------------------------------
It is the 14750 's line
-----------------------------
NNP: 0.0197119341563786
NNS: 0.012263374485596707
VBN: 0.00588477366255144
VBG: 0.010946502057613168
VBD: 0.020534979423868314
VBZ: 0.018724279835390947
--------------------------------
It is the 14800 's line
-----------------------------
NNP: 0.019745484400656814
NNS: 0.012274220032840722
VBN: 0.005870279146141215
VBG: 0.010919540229885057
VBD: 0.02048440065681445
VBZ: 0.018760262725779967
--------------------------------
It is the 14850 's line
-----------------------------
NNP: 0.019777250020473345
NNS: 0.012243059536483499
VBN: 0.005896322987470314
VBG: 0.010973712226680862
VBD: 0.020432397019081155
VBZ: 0.018835476209974614
--------------------------------
It is the 14900 's line
-----------------------------
NNP: 0.019793494674121535
NNS: 0.012243398767497858
VBN: 0.005876831408398972
VBG: 0.010937436232298085
VBD: 0.020446475941721422
VBZ: 0.018773211443496713
--------------------------------
It is the 14950 's line
-----------------------------
NNP: 0.01977458599503601
NNS: 0.012287911461935957
VBN: 0.005899825039671238
VBG: 0.010904504211254424
VBD: 0.020466289620376776
VBZ: 0.018716686332750133
--------------------------------
It is the 15000 's line
-----------------------------
NNP: 0.019713625116618668
NNS: 0.012250030422260982
VBN: 0.006003326167200746
VBG: 0.01087088792439054
VBD: 0.020524885409483634
VBZ: 0.018740112765180707
--------------------------------
It is the 15050 's line
-----------------------------
NNP: 0.019782353655083136
NNS: 0.012217322707229257
VBN: 0.005987297220761358
VBG: 0.010922771956794369
VBD: 0.02051053845220276
VBZ: 0.01877098588130588
>>>
'''
|
16,733 | 8882921c26ac7732f482f3483a133e922884bfa5 | #coding=utf-8
import json
import requests
from handlers.base import BaseHandler
class view(BaseHandler):
def get(self):
return self.render("index.html")
class get_api(BaseHandler):
def post(self):
res = dict()
args = self.request.arguments
for a in args:
res[a] = self.get_argument(a)
url = res['url']
method = res['method']
data = eval(res['data'])
headers = eval(res['headers'])
# if data:
# data = dict((d.split(':') for d in data.split(',')))
# if headers:
# headers = dict((h.split(':') for h in headers.split(',')))
if method == "post":
r = requests.post(url, data=json.dumps(data), headers=headers)
if method == "get":
r = requests.get(url, headers=headers, params=data)
if method == "put":
r = requests.put(url, data=json.dumps(data), headers=headers)
if method == "delete":
r = requests.delete(url, headers=headers)
self.write(r.text)
|
16,734 | b4ede7048e2c348d31dce2dbe7d8844ded0afca1 | import heapq
stud = [['KRIS', 54, 56, 90], ['ANA', 66, 77, 88]]
new_list = []
for i in stud:
dict_stud = dict()
sum = 0
key = stud[0]
for j in i:
if type(j) is int:
sum = sum + j
else:
key = j.split()
all_list = dict_stud.fromkeys(key, sum)
new_list.append(all_list)
print(new_list)
# heapq.nsmallest(0, li)
# print (list(li)) |
16,735 | 1fa9cebac8d2376b55490624291ab32920003a1a | #
# @lc app=leetcode.cn id=42 lang=python3
#
# [42] 接雨水
#
# https://leetcode-cn.com/problems/trapping-rain-water/description/
#
# algorithms
# Hard (48.85%)
# Likes: 977
# Dislikes: 0
# Total Accepted: 69K
# Total Submissions: 139.2K
# Testcase Example: '[0,1,0,2,1,0,1,3,2,1,2,1]'
#
# 给定 n 个非负整数表示每个宽度为 1 的柱子的高度图,计算按此排列的柱子,下雨之后能接多少雨水。
#
#
#
# 上面是由数组 [0,1,0,2,1,0,1,3,2,1,2,1] 表示的高度图,在这种情况下,可以接 6 个单位的雨水(蓝色部分表示雨水)。 感谢
# Marcos 贡献此图。
#
# 示例:
#
# 输入: [0,1,0,2,1,0,1,3,2,1,2,1]
# 输出: 6
#
#
from typing import List
# @lc code=start
class Solution:
# 双指针,通过动态更新左右的最大高度
# 分别指向头尾,动态找最大值,
# 接多少雨水是由短板决定的,
# 如果左边最大值小于右边最大值,就取左边最大值-height[left],然后向右移
# 反之,右边同理
def trap(self, height: List[int]) -> int:
left, right = 0, len(height)-1#双指针分别指向头尾
water, left_max, right_max = 0,0,0
while left <= right:
if left_max <= right_max:
left_max = max(left_max, height[left])#找左边的最高值
water += left_max-height[left]
left += 1
else:
right_max = max(right_max, height[right])#找右边的最高值
water += right_max - height[right]
right -= 1
return water
# 动态规划
# 分别预先算好左边最大值,右边最大值
# 在遍历每个点,计算每个点能接的雨水,累加起来
def trap2(self, height: List[int]) -> int:
n = len(height)
if n==0:return 0
re = 0
left = [0] * n
right = [0] * n
for i in range(n): #先算出左边的最大值
left[i] = max(left[i-1], height[i]) if i>0 else height[0]
for i in range(n-1,-1,-1): #在算出右边的最大值
right[i] = max(right[i+1], height[i]) if i+1<n else height[-1]
for i in range(n): #min(左边最高的柱子高度,右边最高的柱子高度) - 这个位置上的柱子高度
re += min(left[i], right[i]) - height[i]
return re
#单调栈
def trap3(self, height: List[int]) -> int:
if not height:return 0
re = 0
stack = []
for i in range(len(height)):
while stack and height[stack[-1]] < height[i]: #当前大于栈顶的,说明这一格可以蓄水,把比他矮的都弹出去,每弹一个,计算一次能蓄水的量
bottomIdx = stack.pop() #取出这一格的坐标
while stack and height[stack[-1]] == height[bottomIdx]: #和他一样的要弹出去,避免重复计算
stack.pop()
if stack:
re += (min(height[stack[-1]], height[i]) - height[bottomIdx]) * (i-stack[-1]-1) #柱子高度较短的 * 长度, 这个格子的蓄水量
stack.append(i) #保存坐标
return re
# @lc code=end
h = [0,1,0,2,1,0,1,3,2,1,2,1]
s = Solution()
print(s.trap(h))
print(s.trap2(h))
print(s.trap3(h)) |
16,736 | f4e3f0d83fd73b92c9ca69345e49eed1656b28ed |
# def 함수이름(매개변수=값):
# 코드
# ---------- 초기값 지정하기
def personal_info(name, age, address='비공개'):
print('이름: ', name)
print('나이: ', age)
print('주소: ', address)
# --------------초기값 지정되어 있더라도 값을 넣으면 해당 값이 전달됨
personal_info('홍길동',40, '서울시 용산구 이촌동')
#----------------초기값 지정된 매개변수의 위치\
# 초기값이 없는 매개변수 올 수 없다.\
#
# def personal_info(name, address='비공개',age):
# print('이름: ', name)
# print('나이: ', age)
# print('주소: ', address)
#
# SyntaxError: non-default argument follows default argument
# def personal_info(name, age, address = '비공개'):
# print('이름',항열)
# 초기값 지정된 매개변수는 뒤쪽으로 몰아주기
def personal_info(name='비공개', age=0, address='비공개'):
# 매개변수에 초깃값 지정시
personal_info()
# 인수를 안넣어도 호출 가능
# $$$$$$$$$$$$$$$$$$$$$$$$$$$
# 함수와 *를 리스트에 사용하고 **는 딕셔너리에 사용한다
# $$$$$$$$$$$$$$$$$$$$$$$$$$$ |
16,737 | dca71ca85cac7275a35abe7c3a47cc5b91124ec1 | import os
import argparse
from collections import defaultdict
def extnum_func(args):
dataset = defaultdict(lambda:0)
for root, dirs, files in os.walk(args.srcdir):
for srcname in files:
name, ext = os.path.splitext(srcname)
dataset[ext] += 1
print(args.srcdir)
dataset = sorted(dataset.items(), key=lambda kv: kv[1], reverse=True)
for ext, num in dataset:
print(' ', ext, num)
def print_help(parser):
def wrapper(args):
parser.print_help()
return wrapper
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--srcdir', default='D:\\Projects\\Project-IGI1\\game\\')
parser.set_defaults(func=print_help(parser))
subpar = parser.add_subparsers()
extnum = subpar.add_parser('extnum', help='Count files by extension')
extnum.set_defaults(func=extnum_func)
args = parser.parse_args()
args.func(args)
if __name__ == '__main__':
main()
|
16,738 | 164a48fa11a4b9aea93b60655e870013793d20f1 | # template for "Stopwatch: The Game"
import simplegui
# define global variables
total_millisecond = 0
accept_cnt = 0
total_cnt = 0
score = 0
mode = 0
width = 300
height = 200
per_second = 1000
# define helper function format that converts time
# in tenths of seconds into formatted string A:BC.D
def format(t):
millisecond = second = minute = 0
minute = (t / 10) / 60
second = (t / 10) % 60
millisecond = t % 10
if whichLevel() == "Easy":
if second >= 10:
return str(minute) + ":" + str(second) + "." + str(millisecond)
else:
return str(minute) + ":0" + str(second) + "." + str(millisecond)
elif whichLevel() == "Medium":
if second >= 10:
return str(minute) + ":" + str(second) + "." + "*"
else:
return str(minute) + ":0" + str(second) + "." + "*"
else:
return str(minute) + ":" + str(second / 10) + "*.*"
def judge(t):
global accept_cnt, total_cnt, score
if t > 10 and t % 10 == 0:
accept_cnt += 1
total_cnt += 1
score = float(accept_cnt) / float(total_cnt) * 100.0
def whichLevel():
global mode
if mode == 0:
level = "Easy"
elif mode == 1:
level = "Medium"
else:
level = "Hard"
return level
# define event handlers for buttons; "Start", "Stop", "Reset"
def start():
timer.start()
def stop():
global total_millisecond
timer.stop()
judge(total_millisecond)
def reset():
global total_millisecond, accept_cnt, total_cnt, score, mode
timer.stop()
accept_cnt = 0
total_cnt = 0
total_millisecond = 0
score = 0
def easy():
global mode
mode = 0
def medium():
global mode
mode = 1
def hard():
global mode
mode = 2
# define event handler for timer with 0.1 sec interval
def update():
global total_millisecond
total_millisecond += 1
# define draw handler
def draw(canvas):
global accept_cnt, total_cnt, score
canvas.draw_text(format(total_millisecond), (width / 3, height / 2), 36, "White")
canvas.draw_text(str(accept_cnt) + "/" + str(total_cnt), (250, 25), 24, "Green")
canvas.draw_text("Score: " + str(int(score)) + "%", (25, 25), 24, "Red")
canvas.draw_text("Level: " + whichLevel(), (width / 3 + 50, height / 2 + 30), 18, "White")
# create frame
frame = simplegui.create_frame("Stopwatch", width, height)
# register event handlers
start_button = frame.add_button("Start", start, 100)
stop_button = frame.add_button("Stop", stop, 100)
reset_button = frame.add_button("Reset", reset, 100)
easy_button = frame.add_button("Easy", easy, 100)
medium_button = frame.add_button("Medium", medium, 100)
hard_button = frame.add_button("Hard", hard, 100)
frame.set_draw_handler(draw)
timer = simplegui.create_timer(per_second / 10, update)
# start frame
frame.start()
# Please remember to review the grading rubric
|
16,739 | a0e3bbea30e48fbb6df79c5d41f7343021da9b2b | import time
from selenium import webdriver
from selenium.webdriver.support.ui import Select
from bs4 import BeautifulSoup
from selenium.webdriver.chrome.options import Options
import pandas as pd
options = Options()
options.headless = True
driver = webdriver.Chrome(chrome_options=options) #Safari()
#driver = webdriver.PhantomJS()
url = "http://www.capetown.gov.za/Family%20and%20home/residential-utility-services/residential-water-and-sanitation-services/this-weeks-dam-levels"
#url = 'http://www.capetown.gov.za/Family%20and%20home/residential-utility-services/residential-water-and-sanitation-services/this-weeks-dam-levels#Heading1.html'
driver.get(url)
time.sleep(8)
dados = driver.find_element_by_class_name("mobile-scroll")
dado = driver.find_element_by_id("Heading1") #tabela css:4746
#print(dado)
html = dado.get_attribute("innerHTML")#("innerHTML")
#print(html)
soup = BeautifulSoup(html, "html.parser")
table = soup.select_one("table")
#print(table)
line = []
# data = [d for d in table.select("tr")]
# for d in data:
# linha = ""
# for t in d.select("td"):
# linha += t.text+","
# line.append(linha)
driver.close()
data = [d for d in table.select("tr")]
#print(data)
row_list = []
for tr in data:
td = tr.find_all('td')
th = tr.find_all('th')
row = [tr.text.strip() for tr in td if tr.text.strip()]
head = [tr.text.strip() for tr in th if tr.text.strip()]
#print(head)
if head:
head[0] = 'Dam'
row_list.append(head)
if row:
row_list.append(row)
#print(row_list)
row_list_no_totals = row_list[1:-2]
#print(row_list_no_totals)
row_list_no_totals_clean = []
for r in row_list_no_totals:
#print(r)
row_list_no_totals_clean.append([s.replace('#','') for s in r])
df = pd.DataFrame(row_list_no_totals_clean, columns=row_list[0], dtype=float)
df_location = pd.read_csv('damLocations.txt', delimiter=',')
df_damData = df.merge(df_location, how='left')
#print(df_damData)
|
16,740 | 176ff59264635e708cf2aee1af3ce7cd3b7b7f6e | class School(object):
def __init__(
self, code, name, city, state, from_m,
from_y, to_m, to_y, grad_m, grad_y):
self.school_code = code
self.school_name = name
self.school_city = city
self.school_state = state
self.from_month = from_m
self.from_year = from_y
self.to_month = to_m
self.to_year = to_y
self.grad_month = grad_m
self.grad_year = grad_y
|
16,741 | 6408798a1796ddabb9e285d40046877ead8bd3c3 | import requests
import json
def getData(url):
req = requests.get(url)
return req.json()
class AttorneysRequest():
@classmethod
def getAttorneys(self):
attorneys = getData('https://www.lawyercentral.com/utils/maps.cfc?method=getAttorneysFromLatLng&lat=39.828185&lng=-98.57954&lawyerName=&stateAbb=&practiceareaID=89')
for attorney in attorneys['markers']:
print(attorney['name'])
practice_areas = getData('https://www.lawyercentral.com/utils/lawyer.cfc?method=returnPracticeAreaJSON&lawyer_id=%s' % attorney['id'])
for area in practice_areas['data']:
if(area['name'] == 'Divorce'):
print(area['y']) |
16,742 | 883a4cc802521d73e29fdf652ef69f591d75898b | import torch
from utils import Utils
from model import GRUEncoder, Classifier, ContrastiveLoss
import torch.optim as optim
import os
import sys
from gensim.models import Word2Vec
import numpy as np
import jieba
def build_model(voc_size, emb_dim, hidden_size):
encoder = GRUEncoder(emb_dim, hidden_size, voc_size, emb_dim)
classifier = Classifier(hidden_size)
if os.path.exists(os.path.join(os.path.abspath(os.path.dirname(sys.argv[0])), 'ckpt/encoder')):
encoder.load_state_dict(torch.load(os.path.join(os.path.abspath(os.path.dirname(sys.argv[0])),'ckpt/encoder')))
else:
word2vec = Word2Vec.load(os.path.join(os.path.abspath(os.path.dirname(sys.argv[0])),'ckpt/word2vec_init/model'))
trained = np.random.random(size=(voc_size, emb_dim))
trained[:-2][:] = np.array(word2vec.wv.vectors).reshape(voc_size-2, emb_dim)
encoder.embedding.weight = torch.nn.Parameter(torch.tensor(trained, dtype=torch.float))
if os.path.exists(os.path.join(os.path.abspath(os.path.dirname(sys.argv[0])), 'ckpt/classifier')):
classifier.load_state_dict(torch.load(os.path.join(os.path.abspath(os.path.dirname(sys.argv[0])),'ckpt/classifier')))
return encoder, classifier
def train_iter(helper, encoder, classifier, encoder_optimizer, classifier_optimizer):
encoder.train()
classifier.train()
batch_size = 128
encoder_optimizer.zero_grad()
classifier_optimizer.zero_grad()
sens1, sens2, length1, length2, label, index = helper.get_random_batch(batch_size)
target = torch.tensor(label.view(-1, 1), dtype=torch.float)
_, hidden1, cell1 = encoder(sens1, length1)
_, hidden2, cell2 = encoder(sens2, length2)
loss = 0
for j in range(batch_size):
logit = classifier(cell1[0, index[j]], cell2[0, j])
loss += criterion(logit, label[j].view(1))
loss.backward()
encoder_optimizer.step()
classifier_optimizer.step()
return loss.item() / batch_size
def evaluate(encoder, classifier, helper):
encoder.eval()
classifier.eval()
with torch.no_grad():
result = []
label = []
with open(os.path.join(os.path.abspath(os.path.dirname(sys.argv[0])), '../data/test'), 'r') as file_sen, open(os.path.join(os.path.abspath(os.path.dirname(sys.argv[0])), '../data/test_label'), 'r') as file_label:
for line in file_sen:
lineno, sen1, sen2 = line.strip().split('\t')
words1 = [w for w in jieba.cut(sen1) if w.strip()]
words2 = [w for w in jieba.cut(sen2) if w.strip()]
sen_id_1 = []
sen_id_2 =[]
for word in words1:
if word in helper.dic.keys():
sen_id_1.append(helper.dic[word])
else:
sen_id_1.append(helper.dic['<UNK>'])
for word in words2:
if word in helper.dic.keys():
sen_id_2.append(helper.dic[word])
else:
sen_id_2.append(helper.dic['<UNK>'])
data_1 = torch.tensor(sen_id_1, dtype=torch.long).view(1, -1)
data_2 = torch.tensor(sen_id_2, dtype=torch.long).view(1, -1)
_, _, output_1 = encoder(data_1)
_, _, output_2 = encoder(data_2)
logit = classifier(output_1, output_2)
if logit.data[0][0] > logit.data[0][1]:
result.append("0")
else:
result.append("1")
for la in file_label:
label.append(la.strip().split('\t')[1])
TP = 0
FP = 0
TN = 0
FN = 0
for j in range(len(label)):
if result[j] == "1" and label[j] == "1":
TP += 1
if result[j] == "1" and label[j] == "0":
FP += 1
if result[j] == "0" and label[j] == "0":
TN += 1
if result[j] == "0" and label[j] == "1":
FN += 1
precision = 0.0
if not (TP + FP) == 0:
precision = TP / ((TP + FP) * 1.0)
recall = 0.0
if not (TP + FN) == 0:
recall = TP / ((TP + FN) * 1.0)
accuracy = (TP + TN) / ((TP + FP + TN + FN) * 1.0)
f1_score = 0.0
if not (precision + recall) == 0:
f1_score = 2 * precision * recall / (precision + recall)
return round(precision, 3), round(recall, 3), round(accuracy, 3), round(f1_score, 3)
def test(encoder, classifier, helper):
with torch.no_grad():
encoder.eval()
classifier.eval()
batch_size = 128
sens1, sens2, length1, length2, label, index = helper.get_random_batch(batch_size)
_, hidden1, cell1 = encoder(sens1, length1)
_, hidden2, cell2 = encoder(sens2, length2)
TP = 0
FP = 0
TN = 0
FN = 0
for j in range(batch_size):
logit = classifier(cell1[0, index[j]], cell2[0, j])
lo = 0
if logit.data[0][0] > logit.data[0][1]:
lo = 0
else:
lo = 1
if lo == 1 and label.data[j] == 1:
TP += 1
if lo == 1 and label.data[j] == 0:
FP += 1
if lo == 0 and label.data[j] == 0:
TN += 1
if lo == 0 and label.data[j] == 1:
FN += 1
precision = 0.0
if not (TP + FP) == 0:
precision = TP / ((TP + FP) * 1.0)
recall = 0.0
if not (TP + FN) == 0:
recall = TP / ((TP + FN) * 1.0)
accuracy = (TP + TN) / ((TP + FP + TN + FN) * 1.0)
f1_score = 0.0
if not (precision + recall) == 0:
f1_score = 2 * precision * recall / (precision + recall)
return round(precision, 3), round(recall, 3), round(accuracy, 3), round(f1_score, 3)
helper = Utils()
helper.build_dic()
helper.process_data(os.path.join(os.path.abspath(os.path.dirname(sys.argv[0])), '../data/train/train_add'), os.path.join(os.path.abspath(os.path.dirname(sys.argv[0])), '../data/train/label_add'))
encoder, classifier = build_model(len(helper.dic), 128, 128)
encoder_optimizer = optim.Adam(encoder.parameters(), lr=0.0001)
classifier_optimizer = optim.Adam(classifier.parameters(), lr=0.0001)
criterion = torch.nn.CrossEntropyLoss()
loss_list = []
print "epoch precision recall accuracy F1_score train_loss"
eval_f1 = 0.
not_break = True
for i in range(20000):
if not_break:
loss = train_iter(helper, encoder, classifier, encoder_optimizer, classifier_optimizer)
loss_list.append(loss)
if (i + 1) % 100 == 0:
precision, recall, accuracy, f1_score = test(encoder, classifier, helper)
print(str(i + 1) + ' ' + str(precision) + ' ' + str(recall) + ' ' + str(
accuracy) + ' ' + str(f1_score) + ' ' + str(round(sum(loss_list[-100:]) / 100.0, 4)))
# torch.save(encoder.state_dict(), os.path.join(os.path.abspath(os.path.dirname(sys.argv[0])), 'ckpt/encoder'))
# torch.save(classifier.state_dict(),
# os.path.join(os.path.abspath(os.path.dirname(sys.argv[0])), 'ckpt/classifier'))
if (i + 1) % 200 == 0:
precision, recall, accuracy, f1_score = evaluate(encoder, classifier, helper)
if f1_score > eval_f1:
print(str(i + 1) + ' ' + str(precision) + ' ' + str(recall) + ' ' + str(
accuracy) + ' ' + str(f1_score))
torch.save(encoder.state_dict(),
os.path.join(os.path.abspath(os.path.dirname(sys.argv[0])), 'ckpt/encoder'))
torch.save(classifier.state_dict(),
os.path.join(os.path.abspath(os.path.dirname(sys.argv[0])), 'ckpt/classifier'))
eval_f1 = f1_score
elif f1_score < eval_f1:
not_break = False
else:
break
|
16,743 | f6eb3b6a3eb99939ea60c701fe8a8b6233fc6494 | from django.shortcuts import render
# Create your views here.
def information(request):
""" A view to return the Public Information page """
print('Hello, world')
context = {
}
return render(request, 'information/information.html', context)
|
16,744 | 59a3c46313588a7365975b1f20a7100775dc4506 | class DayOfDead(object):
def enter(self):
print "A nice looking skeleton woman with a big dress is standing in"
print "Front of you. You can see food offerings on a table, and "
print "Grandma and grandpa's bones sitting on chairs"
print "How do you say \"dead\" in spanish?"
answer = raw_input("> ")
if answer == "muerto":
print "Good Job !!"
return 'dance'
else:
print "Wrong !, you die. Bye !"
exit(1)
|
16,745 | 5125b933ce08ef57fb284e001e4eda94b5b5c9d4 | __all__ = ['Starter']
import httplib
from multiprocessing.managers import BaseManager
import socket
import time
import urllib2
from veetwo.lib.aggregation.spider import get_spider_config_value as c
from veetwo.lib.aggregation.spider.doc_store import DocStoreS3
from veetwo.lib.aggregation.spider.model import Document
from veetwo.lib.aggregation.spider.urlretriever import URLRetriever, URLRetrieveError
from veetwo.lib.aggregation.spider.docprocessor_queue import DocProcessorQueue
#from veetwo.tools.jobs.command import inject_logging
import logging
L = logging
D = logging.debug
I = logging.info
W = logging.warning
X = logging.exception
SECONDS_BETWEEN_PINGS = 10
SECONDS_BETWEEN_REQUESTS = 5
class UrlBrokerClient(BaseManager):
pass
UrlBrokerClient.register('get_broker')
URL_BROKER_ADDRESS = c('urlbroker.address')
URL_BROKER_PORT = int(c('urlbroker.port'))
URL_BROKER_AUTHKEY = c('urlbroker.authkey')
DOCUMENT_STORE_BUCKET_NAME = c('documentstore.bucketname')
TIME_TO_WAIT_FOR_OTHER_FETCHERS_TO_REGISTER = float(c('fetcher.register_wait_time', default=120))
class Starter(object):
log_name = 'aggregation.spider.fetcher'
def run(self):
f = Fetcher()
try:
f.run_forever()
except Exception, er:
I('Exception %s', er)
raise
class Fetcher(object):
def send_email(self, subject, body):
sender = "curl-uploader@nextag.com"
receiver = 'utsav.sabharwal@nextag.com', 'jayant.yadav@nextag.com'
msg = ("From: %s\r\nTo: %s\r\nSubject: %s\r\n\r\n%s"
%(sender, receiver, subject, body))
s = smtplib.SMTP('localhost')
s.sendmail(sender, ['utsav.sabharwal@nextag.com', 'jayant.yadav@nextag.com'], msg)
s.quit()
def run_forever(self):
f=open("/mnt/curl/11/output/success.log").readlines()
for x in f:
print x.split(":::")
try:
url = x.split(":::")[2].strip()
except Exception, ex:
msg = ex[1]+chr(10)+x
self.send_mail("Uploader Failure: Failed to extract url", msg)
try:
text = open("/mnt/curl/11/"+x.split(":::")[0][3:].strip()).read()
except Exception, ex:
msg = ex[1]+chr(10)+"url:"+url
self.send_mail("Uploader Failure: File not found", msg)
try:
document = Document(url, text)
except Exception, ex:
msg = ex[1]+chr(10)+"url:"+url+chr(10)+"text:"+text
self.send_mail("Uploader Failure: Failed to create standard file", msg)
try:
DocStoreS3(bucket_name="com.wize.spider.sandbox.docstore").put(document)
except Exception, ex:
msg = ex[1]+chr(10)+"bucket_name: com.wize.spider.sandbox.docstore"+chr(10)+"url:"+url+chr(10)+"text"+text
self.send_mail("Uploader Failure: Failed to upload to S3", msg)
try:
DocProcessorQueue(name="docprocessor_queue_beta_spider", logger=L).put(url=url, docid=document.id)
except Exception, ex:
msg = ex[1]+chr(10)+"queue_name: docprocessor_queue_beta_spider"+chr(10)+"url:"+url+chr(10)+"text"+text
self.send_mail("Uploader Failure: Failed to upload to SQS", msg)
|
16,746 | e70f11179e6c7c612cc863775adb14b385581959 | import librosa
from mir_eval.separation import bss_eval_sources
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('--in_wav1', type=str, required=True,
help="input wav 1")
parser.add_argument('--in_wav2', type=str, required=True,
help="input wav 2")
args = parser.parse_args()
srate=16000
s1, _ = librosa.load(args.in_wav1, sr=srate)
s2, _ = librosa.load(args.in_wav2, sr=srate)
min_len=min(len(s1), len(s2))
s1=s1[:min_len]
s2=s2[:min_len]
sdr = bss_eval_sources(s1, s2, False)[0][0]
print(sdr)
|
16,747 | 0c63d8645f22a4a1296ec877bb8f9768576e1649 | import os
import sys
from PIL import Image
import numpy as np
import matplotlib.pyplot as plt
def list_files(dir):
full_path = [os.path.join(dir, f) for f in os.listdir(dir)]
files = [f for f in full_path
if os.path.isfile(f)]
return files
def read_images(filenames):
images = []
for f in filenames:
image = Image.open(f)
images.append(image)
# sort by image exposure time
images.sort(key=exposure_time)
times = [exposure_time(im) for im in images]
return images, times
def exposure_time(im):
exif = im._getexif()
a, b = exif[33434] # EXIF exposuture time tag
return float(a)/b
def recover_g(imgs, times):
n_imgs = len(imgs)
tmp = imgs[0].copy()
tmp.thumbnail((20, 20))
width, height = tmp.size
n_pixels = width*height
B = np.log(times)
Z_R = np.zeros((n_pixels, n_imgs))
Z_G = np.zeros((n_pixels, n_imgs))
Z_B = np.zeros((n_pixels, n_imgs))
# split rgb
for i, im in enumerate(imgs):
im = im.copy()
im.thumbnail((width, height))
r, g, b = im.split()
Z_R[:, i] = np.array(r, dtype=np.int).flatten()
Z_G[:, i] = np.array(g, dtype=np.int).flatten()
Z_B[:, i] = np.array(b, dtype=np.int).flatten()
l = 100
g_r, lE_r = solve_g(Z_R, B, l, weight_function)
g_g, lE_g = solve_g(Z_G, B, l, weight_function)
g_b, lE_b = solve_g(Z_B, B, l, weight_function)
# x = np.arange(0, 256)
# plt.plot(g_r, x)
# plt.plot(g_g, x)
# plt.plot(g_b, x)
# plt.show()
return g_r, g_g, g_b
def solve_g(Z, B, l, w):
n = 256
A = np.zeros((np.size(Z, 0)*np.size(Z, 1)+n+1, n+np.size(Z, 0)))
b = np.zeros(np.size(A, 0))
k = 0
for i in range(np.size(Z, 0)):
for j in range(np.size(Z, 1)):
wij = w(Z[i, j])
A[k, Z[i, j]] = wij
A[k, n+i] = -wij
b[k] = wij * B[j]
k = k+1
A[k, 128] = 1
k = k+1
for i in range(n-2):
A[k, i] = l*w(i+1)
A[k, i+1] = -2*l*w(i+1)
A[k, i+2] = l*w(i+1)
k = k+1
x = np.linalg.lstsq(A, b)[0]
g = x[0:n]
lE = x[n:np.size(x, 0)]
return g, lE
def weight_function(z):
z_min = 0
z_max = 255
z_mean = 128
if z <= z_mean:
return z - z_min + 1
else:
return z_max - z + 1
def gen_weight_map():
w = [weight_function(z) for z in range(256)]
return w
def radiance(g, imgs, times, w):
n_imgs = len(imgs)
width, height = imgs[0].size
length = width*height
pixels = np.zeros((length, n_imgs), dtype=np.int8)
tmp = np.zeros((length, n_imgs))
weight = np.zeros((length, n_imgs))
for i, im in enumerate(imgs):
pixels[:, i] = np.array(im).flatten()
# for z in range(256):
# rows, cols = np.where(pixels == z)
# tmp[rows, cols] = g[z]
# weight[rows, cols] = w(z)
# tmp = tmp - np.log(times)
vfun = np.vectorize(lambda z: g[z])
tmp = vfun(pixels)
tmp = tmp - np.log(times)
vfun = np.vectorize(lambda z: w[z])
weight = vfun(pixels)
lnE = np.sum(weight*tmp, axis=1) / np.sum(weight, axis=1)
return lnE.reshape((height, width))
def tone_mapping(E):
p = E / (E+1) * 255
return p
def make_hdr(images, times):
print('recover g...')
g_r, g_g, g_b = recover_g(images, times)
R = []
G = []
B = []
for img in images:
r, g, b = img.split()
R.append(r)
G.append(g)
B.append(b)
w = gen_weight_map()
print('radiance r')
lnE_r = radiance(g_r, R, times, w)
E_r = np.exp(lnE_r)
p_r = tone_mapping(E_r)
print('radiance g')
lnE_g = radiance(g_g, G, times, w)
E_g = np.exp(lnE_g)
p_g = tone_mapping(E_g)
print('radiance b')
lnE_b = radiance(g_b, B, times, w)
E_b = np.exp(lnE_b)
p_b = tone_mapping(E_b)
print('display result')
r = Image.fromarray(np.array(p_r, dtype=np.uint8), mode='L')
g = Image.fromarray(np.array(p_g, dtype=np.uint8), mode='L')
b = Image.fromarray(np.array(p_b, dtype=np.uint8), mode='L')
img = Image.merge('RGB', (r, g, b))
return img
|
16,748 | 7608eb73ec51c816efe9b4416df6a1ccc3b30450 | # from common.Cloudscale import *
import boto.ec2
import time
from scripts.common.Cloudscale import check_args, parse_args
class EC2CreateAMI:
def __init__(self, config, logger):
self.logger = logger
self.cfg = config.cfg
self.config = config
self.key_name = self.cfg.get('EC2', 'key_name')
self.key_pair = self.cfg.get('EC2', 'key_pair')
self.conn = boto.ec2.connect_to_region(self.cfg.get('EC2', 'region'),
aws_access_key_id=self.cfg.get('EC2', 'aws_access_key_id'),
aws_secret_access_key=self.cfg.get('EC2', 'aws_secret_access_key'))
ami_id = self.create_ami(self.cfg.get('infrastructure', 'ip_address'))
self.config.save('infrastructure', 'ami_id', ami_id)
self.logger.log("Done")
def create_ami(self, instance_ip):
self.logger.log("Creating AMI from instance %s" % instance_ip)
if instance_ip is None:
self.logger.log("instance_ip is null")
exit(0)
instance_id = None
for instance in self.conn.get_only_instances():
if instance.ip_address == instance_ip:
instance_id = instance.id
break
if instance_id is None:
self.logger.log("Can't find any instances to create ami from!")
exit(0)
try:
image_id = self.conn.create_image(instance_id, 'cloudscale-as-image')
self.wait_available(image_id)
self.terminate_instance(instance_id)
return image_id
except boto.exception.EC2ResponseError as e:
if str(e.error_code) == 'InvalidAMIName.Duplicate':
image = self.conn.get_all_images(filters={'name' : 'cloudscale-as-image'})[0]
image.deregister()
return self.create_ami(instance_ip)
self.logger.log("Error creating AMI image")
exit(0)
def terminate_instance(self, instance_id):
self.conn.terminate_instances([instance_id])
self.wait_terminate(instance_id)
def wait_available(self, image_id):
self.logger.log("Waiting to create AMI from instance ..")
status = self.conn.get_all_images(image_ids=[image_id])[0].state
i=1
while status != 'available':
if i%10 == 0:
self.logger.log("\nPlease wait .")
self.logger.log(".", append_to_last=True)
status = self.conn.get_all_images(image_ids=[image_id])[0].state
time.sleep(3)
i=i+1
self.logger.log("Done")
def wait_terminate(self, instance_id):
self.logger.log("Waiting for instance to terminate\nPlease wait ..")
status = self.conn.get_all_instances([instance_id])[0].instances[0].state
i=1
while status != 'terminated':
if i%10 == 0:
self.logger.log("\nPlease wait .")
self.logger.log(".", append_to_last=True)
status = self.conn.get_all_instances([instance_id])[0].instances[0].state
time.sleep(3)
i=i+1
self.logger.log("Instance is terminated!")
if __name__ == "__main__":
check_args(1, "<config_path>")
config_file, cfg, key_name, key_pair = parse_args()
EC2CreateAMI(config_file, cfg, key_name, key_pair)
|
16,749 | b2529ad5570aa174a24775e90327468bb2eaefb6 | from django.contrib import admin
from EnoteApp.models import Notes
# Register your models here.
admin.site.register(Notes) |
16,750 | 433b26bac9c1a398dc4cf8c030b377921cafa779 | # osTest02.py
# os 모듈 : Operating System(운영체제)와 관련된 모듈
# 폴더 생성, 수정, 삭제 등등
# 파일과 관련된 여러 가지 속성 정보
import os
# 폴더 구분자를 사용할 때 /는 한번만, \는 반드시 두개 ex) c:/user, c:\\user
myfolder = 'd:\\'
newpath = os.path.join(myfolder, 'sample')
try:
os.mkdir(path=newpath) # mkdir : make directory
for i in range(1, 11):
newfile = os.path.join(newpath, 'folder' + str(i).zfill(2))
os.mkdir(path=newfile)
except FileExistsError as err:
print(err)
print('#'*30) |
16,751 | 2c7705eba52865f99f586e82e8c25341041a94b0 | __author__ = 'computer'
import unittest
from Lemon.class_09_17_test.MathMethod import MathMethod
class class_Test(unittest.TestCase):
def test_sum(self):
t = MathMethod()
sum = t.Sum(1,2)
print('两个数相加的值为:{}'.format(sum))
def test_sub(self):
t = MathMethod()
t.Sub(1,2)
# print('两个数相加的值为:{}'.format(sub))
su = unittest.s |
16,752 | d56a6ad837fb91dc4ebc02976835b28670c3d60c | # Copyright (c) 2007, Enthought, Inc.
# License: BSD Style.
"""
This demo shows you how to use animated GIF files in a traits user interface.
"""
from os.path \
import join, dirname, abspath
from enthought.traits.api \
import HasTraits, File, Bool
from enthought.traits.ui.api \
import View, VGroup, HGroup, Item, EnumEditor
from enthought.traits.ui.wx.animated_gif_editor \
import AnimatedGIFEditor
# Some sample animated GIF files:
import enthought.traits as traits
base_path = join( dirname( traits.api.__file__ ),
'..', '..', 'examples', 'demo', 'Extras', 'images' )
files = [
abspath( join( base_path, 'logo_64x64.gif' ) ),
abspath( join( base_path, 'logo_48x48.gif' ) ),
abspath( join( base_path, 'logo_32x32.gif' ) )
]
class AnimatedGIFDemo ( HasTraits ):
# The animated GIF file to display:
gif_file = File( files[0] )
# Is the animation playing or not?
playing = Bool( True )
# The traits view:
view = View(
VGroup(
HGroup(
Item( 'gif_file',
editor = AnimatedGIFEditor( playing = 'playing' ),
show_label = False ),
Item( 'playing' ),
),
'_',
Item( 'gif_file',
label = 'GIF File',
editor = EnumEditor( values = files )
)
),
title = 'Animated GIF Demo',
buttons = [ 'OK' ]
)
# Create the demo:
demo = AnimatedGIFDemo()
# Run the demo (if invoked from the command line):
if __name__ == '__main__':
demo.configure_traits()
|
16,753 | 24e5bd41b63eee4b8bd98c8a7ec6c224bf656c17 | # -*- coding: utf-8 -*-
from openerp.osv import fields, osv
class ship_kind(osv.osv):
_name = 'ship.kind'
#_rec_name = 'complete_name' #显示的是name的值,没建的话重写 ------- 不能加,会不可用而显示出全部记录The field 'Name' (complete_name) can not be searched: non-stored function field without fnct_search
def name_get_full(self, cr, uid, ids, context=None):
if isinstance(ids, (list, tuple)) and not len(ids):
return []
if isinstance(ids, (long, int)):
ids = [ids]
reads = self.read(cr, uid, ids, ['name','parent_id'], context=context)
res = []
for record in reads:
name = record['name']
if record['parent_id']:
name = record['parent_id'][1]+' / '+name
res.append((record['id'], name))
return res
def _name_get_fnc(self, cr, uid, ids, prop, unknow_none, context=None):
res = self.name_get_full(cr, uid, ids, context=context)
return dict(res)
_description = "Ship Category"
_columns = {
'name': fields.char('Name', size=64, required=True, translate=True, select=True),
'complete_name': fields.function(_name_get_fnc, type="char", string='Name'),
'parent_id': fields.many2one('ship.kind','Parent Category', select=True, ondelete='cascade'),
'child_id': fields.one2many('ship.kind', 'parent_id', string='Child Categories'),
'sequence': fields.integer('Sequence', select=True, help="Gives the sequence order when displaying a list of product categories."),
'parent_left': fields.integer('Left Parent', select=1),
'parent_right': fields.integer('Right Parent', select=1),
}
_defaults = {
}
_parent_name = "parent_id"
_parent_store = True
_parent_order = 'sequence, name'
_order = 'parent_left'
def _check_recursion(self, cr, uid, ids, context=None):
level = 100
while len(ids):
cr.execute('select distinct parent_id from ship_kind where id IN %s',(tuple(ids),))
ids = filter(None, map(lambda x:x[0], cr.fetchall()))
if not level:
return False
level -= 1
return True
_constraints = [
(_check_recursion, 'Error ! You cannot create recursive categories.', ['parent_id'])
]
def child_get(self, cr, uid, ids):
return [ids]
ship_kind(
) |
16,754 | 1993ed207c66820301e6919f50cb7f656c540f11 | import os
from flask import Flask, render_template, session, redirect
def create_app(test_config=None):
app = Flask(__name__, instance_relative_config=True)
app.config.from_mapping(
SECRET_KEY='124sdsd'
)
if test_config is None:
# It probably fail, but it must shut up!!
app.config.from_pyfile('config.py', silent=True)
else:
app.config.from_mapping(test_config)
try:
# Ensure instance path exists
os.makedirs(app.instance_path)
except OSError:
# So it already exists... do nothing
pass
# Register db session hook, and db seed command
from . import model
model.init_app(app)
# Register auth blueprint
from . import auth
app.register_blueprint(auth.bp)
# Register app blueprint
from . import main_app
app.register_blueprint(main_app.bp)
@app.route('/hello')
def hello():
return 'Hello, World!'
@app.route('/')
def index():
if session.get('user_id', None) is not None:
return redirect('/app/data')
return render_template('index.html')
return app |
16,755 | f4333295cdd0b42775b94eda189c743d61b31bcf | import concurrent.futures
import logging
import mimetypes
import os
import socket
from datetime import datetime
from optparse import OptionParser
from urllib.parse import unquote
INDEX_PAGE = 'index.html'
def fill_file_info(path, need_body):
file_stat = os.stat(path)
response_headers = {
'Date': datetime.fromtimestamp(file_stat.st_mtime).strftime('%d.%m.%Y'),
'Content-Length': file_stat.st_size,
'Content-Type': mimetypes.guess_type(path)[0],
}
if response_headers['Content-Type'] is None:
response_headers['Content-Type'] = 'application/octet-stream'
response_body = b''
if need_body:
with open(path, 'rb') as file:
response_body = file.read()
return response_headers, response_body
def check_access(address):
if address.startswith('..'):
raise PermissionError
def read_http_request(client_socket):
data = b''
partlen = 1024
while True:
part = client_socket.recv(partlen)
data += part
if b'\r\n\r\n' in data:
split = data.split(b'\r\n\r\n')
body = split[1]
request_splitted = split[0].decode().splitlines()
request = request_splitted[0]
request_headers = {y[0]: y[1] for y in [x.split(': ') for x in request_splitted[1:]]}
break
if not part:
raise RuntimeError
if 'Content-Length' in request_headers.keys():
while len(body) < int(request_headers['Content-Length']):
part = client_socket.recv(partlen)
body += part
return request, request_headers, body.decode()
def prepare_address(address, base_folder):
address = unquote(address)
if '?' in address:
address = address[:address.index('?')]
if address[-1] == '/':
address = address + INDEX_PAGE
if base_folder:
address = f'/{base_folder}{address}'
address = f'.{address}'
address = os.path.normpath(address)
return address
def create_http_response(http, code, mnemonic, headers={}, body=b''):
# Add base headers
headers.update({
'Connection': 'keep-alive',
'Server': 'Python socket'
})
response = f'{http} {code} {mnemonic}\r\n'
response += '\r\n'.join([f'{k}: {v}' for k, v in headers.items()])
response += '\r\n\r\n'
return response.encode() + body
def connection_handling(client_socket, base_folder):
http = 'HTTP/1.1'
try:
request, _, _ = read_http_request(client_socket)
method, address, http = request.split(' ')
if method not in ['GET', 'HEAD']:
response = create_http_response(
http,
405,
'METHOD NOT ALLOWED',
{'Allow': 'GET, HEAD'}
)
else:
address = prepare_address(address, base_folder)
check_access(address)
file_head, file_body = fill_file_info(address, method != 'HEAD')
response = create_http_response(
http,
200,
'OK',
file_head,
file_body
)
except (FileNotFoundError, NotADirectoryError):
response = create_http_response(http, 404, 'NOT FOUND')
except PermissionError:
response = create_http_response(http, 403, 'FORBIDDEN')
except RuntimeError:
response = create_http_response(http, 400, 'BAD REQUEST')
except Exception as e:
response = create_http_response(http, 500, e)
client_socket.send(response)
client_socket.close()
if __name__ == "__main__":
op = OptionParser()
op.add_option("-p", "--port", action="store", type=int, default=8080)
op.add_option("-w", "--workers", action="store", default=1)
op.add_option("-r", "--root", action="store", default="")
(opts, args) = op.parse_args()
logging.basicConfig(filename=None, level=logging.INFO,
format='[%(asctime)s] %(levelname).1s %(message)s', datefmt='%Y.%m.%d %H:%M:%S')
server = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
server.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1)
server.bind(('', opts.port))
server.listen(50000)
with concurrent.futures.ThreadPoolExecutor(max_workers=int(opts.workers)) as executor:
while True:
client, addr = server.accept()
executor.submit(connection_handling, client, opts.root)
|
16,756 | 2a95f1c1b67ba8495ca0cab23bef64164d5116b6 | import numpy as np
from flareFind import procFlares
path = '/astro/store/gradscratch/tmp/scw7/tessData/lightcurves/gj1243/'
filenames = np.genfromtxt('gj1243_files.txt', comments='#', dtype='str')
procFlares('gj1243', filenames, path, makePlots=True, clobberGP=True)
|
16,757 | 9f0ce8d3de2c3e38338d3be6fce861518369c718 | import gi
gi.require_version("Gtk", "3.0")
from gi.repository import Gtk, Gdk, GObject
from game import Player, Board, desc, pointsCal
class SumGtk(Player, Gtk.Window):
def __init__(self, data):
Gtk.Window.__init__(self, title="Eclipse GTK")
self.S = data
self.SLen = len(data)
self.PNum = 0
cssProvider = Gtk.CssProvider()
cssProvider.load_from_path('styles.css')
screen = Gdk.Screen.get_default()
styleContext = Gtk.StyleContext()
styleContext.add_provider_for_screen(screen, cssProvider,
Gtk.STYLE_PROVIDER_PRIORITY_USER)
self.set_position(Gtk.WindowPosition.CENTER)
self.set_default_size(1000, 500)
self.set_resizable(False)
self.connect("destroy", Gtk.main_quit)
self.sumMenu(self.S)
self.show_all()
Gtk.main()
def sumMenu(self, Score):
self.sumBox = Gtk.Fixed()
self.sumlabel = Gtk.Label(label="Wyniki")
self.sumlabel.get_style_context().add_class("titlelabel")
self.g1 = Gtk.Label(label="1. miejsce")
self.g1.get_style_context().add_class("sumlabel")
self.g2 = Gtk.Label(label="2. miejsce")
self.g2.get_style_context().add_class("sumlabel")
self.g3 = Gtk.Label(label="")
self.g3.get_style_context().add_class("sumlabel")
self.g4 = Gtk.Label(label="")
self.g4.get_style_context().add_class("sumlabel")
self.player1 = Gtk.Label(label="")
self.player1.get_style_context().add_class("sumlabel")
self.player2 = Gtk.Label(label="")
self.player2.get_style_context().add_class("sumlabel")
self.player3 = Gtk.Label(label="")
self.player3.get_style_context().add_class("sumlabel")
self.player4 = Gtk.Label(label="")
self.player4.get_style_context().add_class("sumlabel")
self.pkt1 = Gtk.Label(label="")
self.pkt1.get_style_context().add_class("sumlabel")
self.pkt2 = Gtk.Label(label="")
self.pkt2.get_style_context().add_class("sumlabel")
self.pkt3 = Gtk.Label(label="")
self.pkt3.get_style_context().add_class("sumlabel")
self.pkt4 = Gtk.Label(label="")
self.pkt4.get_style_context().add_class("sumlabel")
self.men = self.createMenu()
self.sumBox.put(self.men, 0, 0)
self.sumBox.put(self.sumlabel, 450, 20)
self.sumBox.put(self.g1, 150, 120)
self.sumBox.put(self.g2, 150, 190)
self.sumBox.put(self.g3, 150, 260)
self.sumBox.put(self.g4, 150, 330)
self.sumBox.put(self.player1, 350, 120)
self.sumBox.put(self.player2, 350, 190)
self.sumBox.put(self.player3, 350, 260)
self.sumBox.put(self.player4, 350, 330)
self.sumBox.put(self.pkt1, 650, 120)
self.sumBox.put(self.pkt2, 650, 190)
self.sumBox.put(self.pkt3, 650, 260)
self.sumBox.put(self.pkt4, 650, 330)
self.add(self.sumBox)
self.getScore(Score)
def getScore(self, S):
S.sort(key=lambda x: x.points, reverse=True)
self.player1.set_text(self.S[0].Name)
self.player1.get_style_context().add_class("color{}".format(self.S[0].Color))
self.pkt1.set_text("{} pkt.".format(S[0].points))
self.player2.set_text(self.S[1].Name)
self.player2.get_style_context().add_class("color{}".format(self.S[1].Color))
self.pkt2.set_text("{} pkt.".format(S[1].points))
if len(S) >= 3:
self.g3.set_text("3. miejsce")
self.player3.set_text(self.S[2].Name)
self.player3.get_style_context().add_class("color{}".format(self.S[2].Color))
self.pkt3.set_text("{} pkt.".format(S[2].points))
if len(S) == 4:
self.g4.set_text("4. miejsce")
self.player4.set_text(self.S[3].Name)
self.player4.get_style_context().add_class("color{}".format(self.S[3].Color))
self.pkt4.set_text("{} pkt.".format(S[3].points))
def createMenu(self):
self.menuBar = Gtk.MenuBar()
self.infoMenu = Gtk.Menu()
self.info = Gtk.MenuItem("Info")
self.action = Gtk.MenuItem("Opis aplikacji")
self.action.connect("activate", self.showInfo)
self.info.set_submenu(self.infoMenu)
self.infoMenu.append(self.action)
self.menuBar.append(self.info)
return self.menuBar
def showInfo(self, arg):
self.infoWidget = Gtk.Window()
self.infoWidget.set_title("Opis aplikacji")
self.infoWidget.set_position(Gtk.WindowPosition.CENTER)
self.infoWidget.set_default_size(300, 100)
self.infoWidget.set_resizable(False)
infoText = Gtk.Label(label=desc.Opis)
infoText.set_line_wrap(True)
infoText.set_max_width_chars(48)
infoText.set_margin_top(0)
box = Gtk.HBox()
box.pack_start(infoText, True, True, 0)
self.infoWidget.add(box)
self.infoWidget.show_all() |
16,758 | d574d24bd86398efb499e2f8a11f2ed2595e9b17 | """Check Ceph overall cluster health.
This check parses `ceph status` output and generates various metrics. It is
intended to be run on all Ceph mons.
"""
import argparse
import json
import logging
import re
import subprocess
import nagiosplugin
DEFAULT_LOGFILE = "/var/log/ceph/ceph.log"
_log = logging.getLogger("nagiosplugin")
class CephStatus(object):
"""Encapsulates ceph status output and provides easy access."""
def __init__(self, status_cmd):
self.cmd = status_cmd
self._raw = None
self.status = None
def query(self):
_log.info('querying cluster status with "%s"', self.cmd)
self._raw = subprocess.check_output(self.cmd, shell=True).decode()
_log.debug("cluster status output:\n%s", self._raw)
self.status = json.loads(self._raw)
@property
def overall(self):
return self.status["health"]["overall_status"]
@property
def summary(self):
"""Return one-line cluster health summary.
Sometimes, the "summary" fields are empty and there is just some
stuff in the "detail" fields. In this case, we return a random
detail.
"""
res = ", ".join(
elem["summary"] for elem in self.status["health"]["summary"]
)
if res:
return res
elif self.detail:
return self.detail[0]
return ""
@property
def detail(self):
"""Detailed status (e.g., clock skew) as list."""
return self.status["health"]["detail"]
@property
def data_bytes(self):
"""Net amount of saved data (excluding replicas)."""
return int(self.status["pgmap"]["data_bytes"])
@property
def bytes_total(self):
"""Gross storage space in cluster (including replicas)."""
return int(self.status["pgmap"]["bytes_total"])
@property
def bytes_used(self):
"""Gross amount of saved data including replicas."""
return int(self.status["pgmap"]["bytes_used"])
@property
def bytes_avail(self):
return int(self.status["pgmap"]["bytes_avail"])
@property
def bytes_net_total(self):
return self.bytes_used + self.bytes_avail
@property
def read_rate(self):
try:
return int(self.status["pgmap"]["read_bytes_sec"])
except KeyError:
return 0
@property
def write_rate(self):
try:
return int(self.status["pgmap"]["write_bytes_sec"])
except KeyError:
return 0
@property
def ops(self):
try:
return int(self.status["pgmap"]["op_per_sec"])
except KeyError:
return 0
@property
def recovery_rate(self):
try:
return int(self.status["pgmap"]["recovering_bytes_per_sec"])
except KeyError:
return 0
@property
def degraded_ratio(self):
try:
return float(self.status["pgmap"]["degraded_ratio"]) * 100.0
except KeyError:
return 0.0
@property
def misplaced_ratio(self):
try:
return float(self.status["pgmap"]["misplaced_ratio"]) * 100.0
except KeyError:
return 0.0
class Ceph(nagiosplugin.Resource):
"""Status data aquisition and parsing."""
def __init__(self, status):
self.stat = status
self.summary = ""
self.usage_ratio = 0.0
def probe(self):
self.stat.query()
self.summary = self.stat.summary
_log.debug("summary=%s", self.summary.strip())
for detail in self.stat.detail:
_log.info("detail=%s", detail.strip())
yield nagiosplugin.Metric("health", self.stat.overall)
yield nagiosplugin.Metric(
"net data", self.stat.data_bytes, "B", min=0, context="default"
)
m = re.search(r"(\d+) near full osd", self.summary)
nearfull = int(m.group(1)) if m else 0
yield nagiosplugin.Metric(
"nearfull", nearfull, min=0, context="nearfull"
)
if self.stat.bytes_net_total:
self.usage_ratio = self.stat.bytes_used / self.stat.bytes_net_total
yield nagiosplugin.Metric(
"usage",
float("{:5.4}".format(100.0 * self.usage_ratio)),
"%",
min=0.0,
max=100.0,
context="default",
)
yield nagiosplugin.Metric(
"client read", self.stat.read_rate, "B/s", min=0, context="default"
)
yield nagiosplugin.Metric(
"client write",
self.stat.write_rate,
"B/s",
min=0,
context="default",
)
yield nagiosplugin.Metric(
"client ops", self.stat.ops, "op/s", min=0, context="default"
)
yield nagiosplugin.Metric(
"recovery rate",
self.stat.recovery_rate,
"B/s",
min=0,
context="default",
)
yield nagiosplugin.Metric(
"degraded pgs",
self.stat.degraded_ratio,
"%",
min=0.0,
max=100.0,
context="default",
)
yield nagiosplugin.Metric(
"misplaced pgs",
self.stat.misplaced_ratio,
"%",
min=0.0,
max=100.0,
context="default",
)
class CephLog(nagiosplugin.Resource):
"""Scan log file for blocked requests."""
def __init__(self, logfile, statefile):
self.logfile = logfile
self.cookie = nagiosplugin.Cookie(statefile)
r_slow_req = re.compile(
r" (\d+) slow requests.*; oldest blocked for > ([0-9.]+) secs"
)
def probe(self):
blocked = 0
oldest = 0.0
_log.info("scanning %s for slow request logs", self.logfile)
with nagiosplugin.LogTail(self.logfile, self.cookie) as newlines:
for line in newlines:
m = self.r_slow_req.search(line.decode())
if not m:
continue
_log.debug("slow requests: %s", line.strip())
blocked = max(blocked, int(m.group(1)))
oldest = max(oldest, float(m.group(2)))
return [
nagiosplugin.Metric("req_blocked", blocked, min=0),
nagiosplugin.Metric("req_blocked_age", oldest, "s", min=0),
]
class HealthContext(nagiosplugin.Context):
def evaluate(self, metric, resource):
health = metric.value
hint = resource.summary
if "HEALTH_CRIT" in health or "HEALTH_ERR" in health:
return self.result_cls(nagiosplugin.Critical, hint, metric)
if "HEALTH_WARN" in health:
return self.result_cls(nagiosplugin.Warn, hint, metric)
if "HEALTH_OK" in health:
return self.result_cls(nagiosplugin.Ok, hint, metric)
raise RuntimeError("cannot parse health status", health)
class UsageSummary(nagiosplugin.Summary):
def ok(self, results):
"""Include overall usage information into green status output."""
return "{:5.2f}% capacity used".format(
results["usage"].resource.usage_ratio * 100.0
)
@nagiosplugin.guarded
def main():
argp = argparse.ArgumentParser()
argp.add_argument(
"-w",
"--warn-usage",
metavar="RANGE",
default="0.8",
help="warn if cluster usage ratio is outside RANGE",
)
argp.add_argument(
"-c",
"--crit-usage",
metavar="RANGE",
default="0.9",
help="crit if cluster usage ratio is outside RANGE",
)
argp.add_argument(
"-k",
"--command",
default="ceph status --format=json",
help="execute command to retrieve cluster status "
'(default: "%(default)s")',
)
argp.add_argument(
"-l",
"--log",
metavar="PATH",
default=DEFAULT_LOGFILE,
help="scan log file for slow requests (default: " "%(default)s)",
)
argp.add_argument(
"-r",
"--warn-requests",
metavar="RANGE",
default=1,
help="warn if number of blocked requests exceeds range "
"(default: %(default)s)",
)
argp.add_argument(
"-R",
"--crit-requests",
metavar="RANGE",
default=50,
help="crit if number of blocked requests exceeds range "
"(default: %(default)s)",
)
argp.add_argument(
"-a",
"--warn-blocked-age",
metavar="RANGE",
default=30,
help="warn if age of oldest blocked request is outside "
"range (default: %(default)s)",
)
argp.add_argument(
"-A",
"--crit-blocked-age",
metavar="RANGE",
default=90,
help="crit if age of oldest blocked request is outside "
"range (default: %(default)s)",
)
argp.add_argument(
"-s",
"--state",
metavar="PATH",
default="/var/lib/check_ceph_health.state",
help="state file for logteil (default: %(default)s)",
)
argp.add_argument(
"-v",
"--verbose",
action="count",
default=0,
help="increase output level",
)
argp.add_argument(
"-t",
"--timeout",
default=30,
metavar="SEC",
help="abort execution after SEC seconds",
)
args = argp.parse_args()
check = nagiosplugin.Check(
Ceph(CephStatus(args.command)),
HealthContext("health"),
nagiosplugin.ScalarContext(
"nearfull", critical="0:0", fmt_metric="{value} near full osd(s)"
),
UsageSummary(),
)
if args.log:
check.add(
CephLog(args.log, args.state),
nagiosplugin.ScalarContext(
"req_blocked", args.warn_requests, args.crit_requests
),
nagiosplugin.ScalarContext(
"req_blocked_age", args.warn_blocked_age, args.crit_blocked_age
),
)
check.main(args.verbose, args.timeout)
|
16,759 | f798e6e4bdc836233feb1f8a0955d1fec5981924 | #!/usr/bin/python3
__author__ = "BaelTD"
__copyright__ = "Copyright 2019, Automatize the automator"
__credits__ = ["David Morelli"]
__license__ = "GPL"
__version__ = "1.0.0"
__maintainer__ = "BaelTD"
__email__ = "morelli.d14@gmail.com"
__status__ = "Production"
import sys
import glob
import os
from pyzbar.pyzbar import decode
import pyzbar
import numpy as np
import cv2
import time
from PIL import Image
#Mostra codice a barre e QR core in locazione
def display(im, decodeObjects):
#cicla per tutti gli oggetti decodificati
for decodeObject in decodeObjects:
points= decodeObject.polygon
# se i punti non sono sono formati da 4 cifre cerca convex null
if len(points)> 4 :
hull = cv2.convexHull(np.array([point for point in points], dtype= np.float32))
hull = list(map(tuple,np.squeeze(hull)))
else :
hull = points;
#numeri si punti in convensione Hull
n = len(hull)
for j in range(0,n):
cv2.line(im,hull[j],hull[ (j+1) % n], (255,0,0), 3 )
cv2.imshow("Barcode Image:", im);
cv2.waitKey(10);
#main dove we search in specific directory every file with extention .jpg
if __name__ =='__main__':
print("-.-.-.-.-.-.-.-.-Automate the AUTOMATOR-.-.-.-.-.-.-.-.-.-.")
print("-.-.-.-.-.-.-.-.-Automate the AUTOMATOR-.-.-.-.-.-.-.-.-.-.")
print("-.-.-.-.-.-.-.-.-Automate the AUTOMATOR-.-.-.-.-.-.-.-.-.-.")
print("-.-.-.-.-.-.-.-.-Automate the AUTOMATOR-.-.-.-.-.-.-.-.-.-.")
print("-.-.-.-.-.-.-.-.-Automate the AUTOMATOR-.-.-.-.-.-.-.-.-.-.")
print("-.-.-.-.-.-.-.-.-Automate the AUTOMATOR-.-.-.-.-.-.-.-.-.-.")
print("-.-.-.-.-.-CONVERT IMAGE BARCODE IN TEXTBARCODE-.-.-.-.-.-.")
print("-.-.-.-.-SEARCH IN DIRECTORY BARCODE FILE IMG: *JPG :-.-.-.")
raw_input("Press key to continue....")
os.chdir("/home/bael/Develop")
for file in glob.glob("*.jpg"):
print(file)
im = cv2.imread('/home/bael/Develop/bar_image/'+file )
print("Apertura immagine codice a barre....")
time.sleep(2)
if im is not None :
print("THIS IS MATRIX BARCODE :")
print(im)
print("-.-.-.-.-.-.-.-.-.-.-.-.-.-.-.-.-.-.-.-.-.-.-.-.-")
print("THIS IS BARCODE DECODIFIED :")
print(decode(im))
print("-.-.-.-.-.-.-.-.-.-.-.-.-.-.-.-.-.-.-.-.-.-.-.-.-")
x = decode(im)
xran =range(0,len(x))
if x != [] :
for n in xran:
print("Only data ob BARCODE: ")
print("Data:" + x[n].data)
print("-.-.-.-.-.-.-.-.-.-.-.-.-.-.-.-.-.-.-.-.-.-.-.-.-")
print("Saving data on File (is do not exit it will create): ")
text_file = open("barcode.txt","a")
print("Opening File named Barcode....")
text_file.write(x[n].data + "\n")
print("Write in file BARCODE.... ")
text_file.close()
print("Succesfully Saved BARCODE. ")
decodedObjects = decode(im)
display(im, decodedObjects)
else : print("ERROR DURING READ OF BARCODE")
else: print("Nessuna immagine con tale nome...")
print("Grazie per aver utilzzato il nostro Software")
|
16,760 | 6f22e23f7a038ba99a85dc52fb6ae1466408bb30 | def max_two(a,b):
if a>b:
return a
else:
return b
def max_three(a,b,c):
return max_two(a,max_two(b,c))
print(max_three(50,100,30))
|
16,761 | b9e4ce56b8f1ac54afeb3f7feb9d9dcdff7c10d4 | """Tables definitions for data from the EIA bulk electricity aggregates."""
from typing import Any
RESOURCE_METADATA: dict[str, dict[str, Any]] = {
"fuel_receipts_costs_aggs_eia": {
"description": (
"Aggregated fuel receipts and costs from the EIA bulk electricty data."
),
"schema": {
"fields": [
"fuel_agg",
"geo_agg",
"sector_agg",
"temporal_agg",
"report_date",
"fuel_received_mmbtu",
"fuel_cost_per_mmbtu",
],
"primary_key": [
"fuel_agg",
"geo_agg",
"sector_agg",
"temporal_agg",
"report_date",
],
},
"sources": ["eia_bulk_elec"],
"field_namespace": "eia_bulk_elec",
"etl_group": "eia_bulk_elec",
},
}
|
16,762 | 81844f582dcdb0ca290190d71c5cb00f3f1adbc6 | #!/usr/local/bin/python3.4
import time
import csv
import urllib.request, urllib.parse, urllib.error
import xlrd
import pandas as pd
import platform
#from xlutils.display import cell_display
#from xlrd.sheet import Cell
timerStart = time.time()
# build an empty Pandas dataframe to concatenate the retrieved data into
curMonth = int(time.strftime("%m"))
curYear = int(time.strftime("%Y"))
if curMonth > 6:
curYear += 1
numberOfRows = ((curYear - 1900)*12)+6
pd.options.display.float_format = '{:,.2f}'.format
prng = pd.period_range('Jan-1900', periods=numberOfRows, freq='M')
indexFrame = pd.DataFrame(index = prng)
def fetchSeries(chapter, file, series):
book = xlrd.open_workbook(file)
# important - set xlrd datemode to match the excel book - avoiding any 1900/1904 problems
datemode = book.datemode
# build a temporary Pandas Series to hold the data
s = pd.Series(name=chapter)
if series[0]=="A":
series = series + "_Data"
# name_obj_list is a list of all named ranges in the excel book
for nobj in book.name_obj_list:
name = nobj.name
if name == series:
print (name)
# result.value is the actual reference to the series (there can be multiple references - A1:A10,A18:A50 etc
value = nobj.result.value
for i in range(len(value)):
# set Ref3d to the tuple of the form: (shtxlo, shtxhi, rowxlo, rowxhi, colxlo, colxhi)
ref3d = value[i]
for shx in range(ref3d.shtxlo, ref3d.shtxhi):
# go to the worksheet specified for the range
sh = book.sheet_by_index(shx)
# in case the range spans multiple columns
for colx in range(ref3d.colxlo, ref3d.colxhi):
# iterate over the rows
for rowx in range(ref3d.rowxlo, ref3d.rowxhi):
# value of the cell
cval = sh.cell_value(rowx, colx)
# get the date that corresponds to the series entry (column 0 is the date column)
cdate = sh.cell_value(rowx, 0)
# create a tuple holding the date
date_tup = xlrd.xldate_as_tuple(cdate, datemode)
# set the series value for this date
s[pd.datetime(date_tup[0], date_tup[1], date_tup[2])] = cval
return s
links = set()
with open('MSB_Sources.csv', 'Ur') as f:
datasources = list(tuple(rec) for rec in csv.reader(f, delimiter=','))
for item in datasources:
links.add(tuple((item[1],item[2])))
for link in links:
filename = urllib.request.urlretrieve(link[0], link[1])
for item in datasources:
if item[2] == link[1]:
print (item)
s = fetchSeries(item[0],item[2],item[3])
s = s.to_period(freq='M')
print (s)
indexFrame = pd.concat([indexFrame, s], axis=1)
#indexFrame = indexFrame.groupby(indexFrame.index.month).sum()
print (indexFrame)
indexFrame.reindex_axis(sorted(indexFrame.columns), axis=1)
indexFrame.to_csv('./output.csv')
timerEnd = time.time()
timerInterval = timerEnd - timerStart
print("process took "+str(timerInterval)+" seconds")
print((platform.python_version()))
|
16,763 | 9bc1633a4c90e8c638c28382dcb292e4853db24c | from hadmin.conf import QueueGenerator
from unittest2 import TestCase
class QueueGeneratorTest(TestCase):
def setUp(self):
self.root = QueueGenerator.load_dir('data/queues').generate()
self.prod = self.root.subqueue('prod')
self.dev = self.root.subqueue('dev')
self.dev1 = self.dev.subqueue('product1')
self.dev2 = self.dev.subqueue('product2')
def testRootCapacity(self):
self.assertEqual(100.0, self.root.cap_min)
def testRootUserLimitFactor(self):
self.assertEqual(1.0, self.root.user_limit_factor)
def testRootUsers(self):
self.assertEqual(['*'], self.root.users)
def testRootAdmins(self):
self.assertEqual(['*'], self.root.admins)
def testProdAdmins(self):
self.assertEqual(['alec'], self.prod.admins)
def testProdCapacity(self):
self.assertEqual(90.0, self.prod.cap_min)
def testProdUsers(self):
self.assertEqual(['alec', 'trozamon'], self.prod.users)
def testProdUserLimitFactor(self):
self.assertAlmostEqual(50.0 / 90.0, self.prod.user_limit_factor)
def testProdRunning(self):
self.assertEqual(True, self.prod.running)
def testProdMaxCapacity(self):
self.assertEqual(100.0, self.prod.cap_max)
def testDevCapacity(self):
self.assertEqual(10.0, self.dev.cap_min)
def testDevMaxCapacity(self):
self.assertEqual(100.0, self.dev.cap_max)
def testDevAdmins(self):
self.assertEqual(['alec', 'trozamon'], self.dev.admins)
def testDevUsers(self):
self.assertEqual(['alec', 'trozamon'], self.dev.users)
def testDevUserLimitFactor(self):
self.assertEqual(10.0, self.dev.user_limit_factor)
def testDevRunning(self):
self.assertEqual(True, self.dev.running)
def testDev1Capacity(self):
self.assertEqual(50.0, self.dev1.cap_min)
def testDev1MaxCapacity(self):
self.assertEqual(100.0, self.dev1.cap_max)
def testDev1Admins(self):
self.assertEqual(['alec'], self.dev1.admins)
def testDev1Users(self):
self.assertEqual(['alec'], self.dev1.users)
def testDev1UserLimitFactor(self):
self.assertEqual(1.0, self.dev1.user_limit_factor)
def testDev1Running(self):
self.assertEqual(False, self.dev1.running)
def testDev2Capacity(self):
self.assertEqual(50.0, self.dev2.cap_min)
def testDev2MaxCapacity(self):
self.assertEqual(100.0, self.dev2.cap_max)
def testDev2Admins(self):
self.assertEqual(['trozamon'], self.dev2.admins)
def testDev2Users(self):
self.assertEqual(['trozamon'], self.dev2.users)
def testDev2UserLimitFactor(self):
self.assertEqual(1.0, self.dev2.user_limit_factor)
def testDev2Running(self):
self.assertEqual(True, self.dev2.running)
|
16,764 | 75fa488b923d33d0733001b3d63a92b91848cde6 | #!/usr/bin/env python
"""
A very simple, rather ugly remote debugging client for Python, for viewing the state
of a remote Python process.
Run with ./rdb_client.py [hostname [port [passcode]]]
Author: Christopher Swenson (chris@caswenson.com)
Homepage: http://github.com/swenson/python_remote_debugger
License: MIT Public License
Copyright (c) 2009 Christopher Swenson
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
files (the "Software"), to deal in the Software without
restriction, including without limitation the rights to use,
copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
"""
from Tkinter import *
import cPickle as pickle
import threading
import traceback
import sys
import os
import time
import socket
from rdb import RDBClient
class RDBGui(object):
def __init__(self, host="localhost", port=1235, password="abc"):
"""Starts up a simple GUI that can be used to remotely view a Python instance using
the Python remote debugger protocol in rdb.py"""
self.root = Tk()
self.client = RDBClient(host, port, password)
r = 0
send_button = Button(self.root, text="Refresh thread list", command=self.refresh_thread_list)
send_button.grid(row=r)
r += 1
self.thread_list_label = Label(self.root, text="Thread List")
self.thread_list_label.grid(row=r)
r += 1
self.thread_list = Listbox(self.root, selectmode=SINGLE)
#self.thread_list.bind('<Button-1>', self.update_vars)
self.thread_list.grid(row=r)
view_thread_button = Button(self.root, text="View Thread", command=self.update_vars)
view_thread_button.grid(row=r, column=1)
r += 1
self.locals_list_label = Label(self.root, text="Local variables")
self.locals_list_label.grid(row=r)
r += 1
self.locals_list = Listbox(self.root)
self.locals_list.grid(row=r)
r += 1
self.stack_text_label = Label(self.root, text="Stack trace")
self.stack_text_label.grid(row=r)
r += 1
self.stack_text = Text(self.root)
self.stack_text.grid(row=r)
self.refresh_thread_list()
def update_vars(self, point=None):
"""Update the display to show variables and the stack"""
which = self.thread_list.curselection()
which = int(which[0])
id = self.thread_ids[which]
self.client.send_msg("get_locals", id)
self.locals = self.client.recv_msg()[0]
self.locals_list.delete(0, END)
for k, v in self.locals:
self.locals_list.insert(END, str(k) + " = " + str(v))
self.client.send_msg("get_stack", id)
stack = self.client.recv_msg()[0]
self.stack_text.delete(1.0, END)
self.stack_text.insert(END, "".join(stack))
def refresh_thread_list(self):
"""Update the thread list"""
self.client.send_msg("get_thread_list")
self.thread_ids = self.client.recv_msg()[0]
self.thread_list.delete(0, END)
for id in self.thread_ids:
self.thread_list.insert(END, str(id))
def run(self):
self.root.mainloop()
if __name__ == '__main__':
RDBGui(*sys.argv[1:]).run()
|
16,765 | fa24e79723f48a9c8830698df5c87155a20fe004 | #!/usr/bin env python
# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
# Simple script to extract the path of the TAG outputs of Tier0 monitoring,
# open them and chain them in a single TChain
# Uses the pathExtract library to extract the EOS path
# Options:
# -r RUNNUMBER, --run RUNNUMBER : Run number
# -s STREAM, --stream STREAM : Stream without prefix: express, CosmicCalo, Egamma...
# -t TAG, --tag TAG : DAQ tag: data12_8TeV, data12_calocomm...
# -a AMITAG, --amiTag AMITAG : First letter of AMI tag: x->express / f->bulk
# Author : Benjamin Trocme / Summer 2012
import os, sys
import argparse
from DataQualityUtils import pathExtract
from six.moves import xmlrpc_client as xmlrpclib
from ROOT import TChain
from ROOT import gStyle
gStyle.SetPalette(1)
gStyle.SetOptStat("em")
# Main===========================================================================================================
parser = argparse.ArgumentParser()
parser.add_argument('-r','--run',type=int,dest='runNumber',default='267599',help="Run number",action='store')
parser.add_argument('-s','--stream',dest='stream',default='express',help="Stream without prefix: express, CosmicCalo, Egamma...",action='store')
parser.add_argument('-t','--tag',dest='tag',default='',help="DAQ tag: data12_8TeV, data12_calocomm...By default retrieve it via atlasdqm",action='store')
parser.add_argument('-a','--amiTag',dest='amiTag',default='f',help="First letter of AMI tag: x->express / f->bulk",action='store')
parser.print_help()
args = parser.parse_args()
runNumber = args.runNumber
stream = args.stream
if args.tag != "":
tag = args.tag
else: # Try to retrieve the data project tag via atlasdqm
if (not os.path.isfile("atlasdqmpass.txt")):
print("To retrieve the data project tag, you need to generate an atlasdqm key and store it in this directory as atlasdqmpass.txt (yourname:key)")
print("To generate a kay, go here : https://atlasdqm.cern.ch/dqauth/")
print("You can also define by hand the data project tag wit hthe option -t")
sys.exit()
passfile = open("atlasdqmpass.txt")
passwd = passfile.read().strip(); passfile.close()
passurl = 'https://%s@atlasdqm.cern.ch'%passwd
s = xmlrpclib.ServerProxy(passurl)
run_spec = {'stream': 'physics_CosmicCalo', 'proc_ver': 1,'source': 'tier0', 'low_run': runNumber, 'high_run':runNumber}
run_info= s.get_run_information(run_spec)
if '%d'%runNumber not in run_info.keys() or len(run_info['%d'%runNumber])<2:
print("Unable to retrieve the data project tag via atlasdqm... Please double check your atlasdqmpass.txt or define it by hand with -t option")
sys.exit()
tag = run_info['%d'%runNumber][1]
amiTag = args.amiTag
listOfFiles = pathExtract.returnEosTagPath(runNumber,stream,amiTag,tag)
tree = TChain("POOLCollectionTree")
file = {}
for fileNames in listOfFiles:
print("Adding %s"%(fileNames))
tree.AddFile("root://eosatlas/%s"%(fileNames))
entries = tree.GetEntries()
if entries != 0:
print("The chained tree contains %d entries"%(entries))
else:
print("Empty chain...")
|
16,766 | 7d491d9c4c9151c8c6754331801dae592f5573ae | from itertools import product
import json
import numpy
cube = numpy.array(range(1, 9)).reshape(2, 2, 2)
pcube = [
cube[0 ,0 ,0 ],
cube[0 ,0 ,0:2],
cube[0 ,0:2,0:1],
cube[0 ,0:2,0:2],
cube[0:2,0:1,0:1],
cube[0:2,0:1,0:2],
cube[0:2,0:2,0:1],
cube[0:2,0:2,0:2],
]
for (i, (a, b)) in enumerate(product(pcube, repeat=2), start=1):
print 'public function testBsxfun{0:0>2d}()'.format(i)
print '{'
print '$a = {0};'.format(json.dumps(a.tolist()))
print '$b = {0};'.format(json.dumps(b.tolist()))
print '$expected = {0};'.format(json.dumps((a * b).tolist()))
print '$actual = Bsxfun::bsxfun($this->times, $a, $b);'
print '$this->assertEquals($expected, $actual);'
print '}'
print
|
16,767 | 74c9254ac1ca3655e5c028179f5a0023059889b3 | import logging
from datetime import timedelta, datetime
import unicodedata
from urllib.error import HTTPError
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import CONF_CODE, STATE_UNAVAILABLE
from homeassistant.helpers.entity import Entity
import homeassistant.helpers.config_validation as cv
from homeassistant.util import Throttle
_LOGGER = logging.getLogger(__name__)
CONF_DEFAULT_MENSA = 828
MEAL_CATEGORY = "category"
MIN_TIME_BETWEEN_UPDATES = timedelta(minutes=5)
CONST_NAME = "name"
CONST_MEALS = "meals"
STATE_ONLINE = "online"
STATE_NO_MEALS = "no_meals"
ICON = "mdi:hamburger"
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{vol.Required(CONF_CODE, default=CONF_DEFAULT_MENSA): cv.positive_int}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
from openmensa import OpenMensa as om
_LOGGER.debug("setup openmensa sensor")
mensa = config.get(CONF_CODE)
add_entities([OpenmensaSensor(om, mensa)], True)
class OpenmensaSensor(Entity):
"""Representation of a Sensor."""
def __init__(self, om, mensa):
"""Initialize the sensor."""
self._om = om
self._mensa = mensa
self._state = STATE_UNAVAILABLE
self._categories = None
def meal_category_from_categories_list(self, categories, search_category):
for category in categories:
if category[CONST_NAME] == search_category:
return category
return None
def get_meals_of_the_day(self):
date_str = datetime.now().strftime("%Y-%m-%d")
categories = []
try:
meals = self._om.get_meals_by_day(self._mensa, date_str)
for meal in meals:
meal_category = meal[MEAL_CATEGORY]
category = self.meal_category_from_categories_list(
categories, meal_category
)
if category is None:
category = {CONST_NAME: meal[MEAL_CATEGORY], CONST_MEALS: []}
categories.append(category)
category[CONST_MEALS].append({CONST_NAME: meal[CONST_NAME]})
except HTTPError:
self._state = STATE_NO_MEALS
return categories
def normalize_string(self, my_string):
"""
Normalize a String with Umlauts and blanks to ascii, lowercase and _ instead of blanks.
:param my_string:
:return:
"""
return (
unicodedata.normalize("NFKD", my_string)
.encode("ASCII", "ignore")
.decode()
.replace(" ", "_")
.lower()
)
@property
def name(self):
return "Openmensa Sensor"
@property
def icon(self):
return ICON
@property
def state(self):
return self._state
@property
def state_attributes(self):
if self._state == STATE_UNAVAILABLE:
return {}
attr = {}
attr["categories"] = self._categories
return attr
@Throttle(MIN_TIME_BETWEEN_UPDATES)
def update(self):
"""Fetch new meals for the day
"""
_LOGGER.debug("Updating meals of the day.")
self._state = STATE_ONLINE
self._categories = self.get_meals_of_the_day()
|
16,768 | de9ced14196fe38cadcde40cad107d07dc8b81f6 | import random
import string
def random_string_generator(size=15,chars= string.ascii_uppercase + string.digits):
l = ""
for i in range(size):
l += random.choice(chars)
return l |
16,769 | 509b65e6da71139b073e8a4b846c3a8e71371271 | class Employee:
def __init__(self,name,age,salary):
self.name = name
self.age = age
self.salary = salary
def getinfo(self):
print(f"The employee name is {self.name}")
print(f"The employee age is {self.age}")
print(f"The employee salary is {self.salary}")
class Programmer(Employee):
def __init__(self,address):
self.add = address
def showDetails(self):
print(f"The programmer address is {self.add}")
akash = Programmer("Akash",30,30000,"Varanasi")
akash.getInfo() |
16,770 | cd4151679e91255c0af04ca1b465bdec3e9f06d0 | import time
import pyspark.sql.functions as F
sc._jsc.hadoopConfiguration().set("fs.s3n.awsAccessKeyId", AWS_ACCESS_KEY)
sc._jsc.hadoopConfiguration().set("fs.s3n.awsSecretAccessKey", AWS_SECRET_KEY)
class LoanData():
def __init__(self, config):
"""
Initialize Loan Dataset object.
Input: config, a dictionary with the following keys:
> bucket (AWS S3 bucket raw loan data is read from)
> raw_folder (folder inside AWS S3 bucket where raw loan data is read)
> processed_folder (folder inside AWS S3 bucket where processed loan data will be written)
"""
self.bucket = config['bucket']
self.raw_folder = config['raw_folder']
self.processed_folder = config['processed_folder']
self.loans_raw = None
self.loans_processed = None
self.loan_status_distribution = None
self.annual_inc_summary = None
def readData(self, bucket, folder):
"""
Read CSV loan data from an AWS S3 bucket and folder
Input: AWS S3 bucket and folder to read from
"""
self.loans_raw = spark.read.option('header', 'true').csv('s3://{}/{}'.format(bucket, folder))
self.loans_raw.cache()
def writeData(self, df, bucket, folder, partition_key):
"""
Write loan data in parquet format to an AWS S3 bucket and folder
Input:
> df: dataframe to be written out
> bucket: AWS S3 bucket to write to
> folder: AWS S3 folder to write to
> partition_key: key to partition on when writing out parquet
"""
df.write.mode('append').partitionBy(partition_key).parquet("s3n://{}/{}".format(bucket, folder))
def processLoans(self):
"""
Perform preprocessing on raw loan data to facilitate exploratory data analysis and ETL pipelining
Input: none, but make sure readData() has already been run
"""
self.loans_raw.createOrReplaceTempView('loan_data')
# Perform the following transformations on the data:
# Simplify loan statuses to good, bad, or unknown standing
# Round loan amount and log of annual income to allow for analyses with bucketing
# Cast annual income field to int type
# Add a collection_timestamp field indicating when the data was processed to allow for new batches of data
self.loans_processed = sqlContext.sql('''
with loan_statuses_transformed as
(
select
"{}" as collection_timestamp,
(case
when loan_status in ('Fully Paid', 'Current') then 'good_standing'
when loan_status in ('Charged Off') or loan_status like 'Late%' then 'bad_standing'
else 'unknown'
end
) as loan_status_simplified,
round(int(loan_amnt), -4) as loan_amnt_rounded,
pow(10, round(log10(int(annual_inc)), 0)) as annual_inc_rounded,
*
from loan_data
)
select * from loan_statuses_transformed
'''.format(time.strftime("%Y-%m-%d_%H-%M-%S", time.localtime())))
self.loans_processed = self.loans_processed.withColumn('annual_inc', F.col('annual_inc').cast('int'))
self.loans_processed = self.loans_processed.withColumn('dti', F.col('dti').cast('float'))
self.loans_processed.cache()
def main():
config = {
'bucket': 'lending-club-warehouse',
'raw_folder': 'raw_loan_data',
'processed_folder': 'processed'
}
l = LoanData(config)
l.readData(l.bucket, l.raw_folder)
l.processLoans()
l.writeData(l.loans_processed, l.bucket, l.processed_folder, 'collection_timestamp')
if __name__ == '__main__':
main() |
16,771 | 65fe7ad0d7180617b9564ba0cf71b5b82bd86542 | buah = int(8000)
jumlah = int(input("Masukan Berat Buah :"))
x = buah*jumlah
print("Harga Sebelum Diskon",x)
y = x*0.85
print("Harga Setelah Diskon",y) |
16,772 | 3b633868e02a64831b1fea473fc6cc01de2305d1 | import uuid
import jwt
class BaseAuthenticator(object):
def __init__(self, *args, **kwargs):
pass
def authenticate(self, request):
raise NotImplemented
def create_token(self):
raise NotImplemented
class JWTAuthenticator(BaseAuthenticator):
def __init__(self, *args, **kwargs):
super(JWTAuthenticator, self).__init__(*args, **kwargs)
self.algorithm = kwargs.get('algorithm', 'HS256')
self.secret = kwargs.get('secret', uuid.uuid4().hex)
self.issuer = kwargs.get('issuer', '')
def authenticate(self, request):
hdr_key = 'Authorization'
if hdr_key not in request.headers:
return None
authorization = request.headers[hdr_key]
if authorization is None or authorization == '':
return None
token = authorization.replace('Bearer', '').strip()
payload = jwt.decode(token, self.secret, algorithms=[self.algorithm])
if payload is None or 'iss' not in payload or payload['iss'] != self.issuer:
return None
return payload
def create_token(self, email=None, expire=None):
payload = {'iss': self.issuer}
return jwt.encode(payload, self.secret, algorithm=self.algorithm)
|
16,773 | 99b98e3ec03966f461d6dee05b2c441bf01c441e | import random
list_result = ['outlook is good','ask again later','yes','no','most likely no','most likely yes','maybe','outlook is not good']
q1 = input("Please ask me a question:")
print(list_result[random.randint(0,len(list_result)-1)])
q2 = input("Please ask me a question:")
print(list_result[random.randint(0,len(list_result)-1)])
q3 = input("Please ask me a question:")
print(list_result[random.randint(0,len(list_result)-1)])
q4 = input("Please ask me a question:")
print(list_result[random.randint(0,len(list_result)-1)])
q5 = input("Please ask me a question:")
print(list_result[random.randint(0,len(list_result)-1)])
q6 = input("Please ask me a question:")
print(list_result[random.randint(0,len(list_result)-1)])
q7 = input("Please ask me a question:")
print(list_result[random.randint(0,len(list_result)-1)])
q8 = input("Please ask me a question:")
print(list_result[random.randint(0,len(list_result)-1)])
|
16,774 | 6f16229e54964d0cded97e16c497b4a42620d09e | from django.core.management.base import BaseCommand, CommandError
import time
from getpass import getpass
import tweepy
from django.conf import settings
from socialbeer.core.tasks import process_tweet
USERNAME = getattr(settings, "TWITTER_USERNAME", None)
PASSWORD = getattr(settings, "TWITTER_PASSWORD", None)
TRACK_LIST = getattr(settings, "TWITTER_SEARCH_TERMS", None)
FOLLOW_LIST = None
class Command(BaseCommand):
def handle(self, *args, **options):
stream_twitter()
class StreamWatcherListener(tweepy.StreamListener):
def on_status(self, status):
process_tweet.delay(status)
return True
def on_error(self, status_code):
print 'An error has occured! Status code = %s' % status_code
return True # keep stream alive
def on_timeout(self):
print 'Snoozing Zzzzzz'
def stream_twitter():
# Prompt for login credentials and setup stream object
stream = tweepy.Stream(USERNAME, PASSWORD, StreamWatcherListener(), timeout=None)
mode = 'filter'
if mode == 'sample':
stream.sample()
elif mode == 'filter':
stream.filter(FOLLOW_LIST, TRACK_LIST)
if __name__ == '__main__':
try:
stream_twitter()
except KeyboardInterrupt:
print '\nGoodbye!'
|
16,775 | 38422fd55b7a3bfd8d58801e6109767d046a69e0 | M = int(input('Enter your full marks : '))
per = M/500*100
if(per>=90):
print('Your Grade is A')
elif(per>=80):
print('Your Grade is B+')
elif(per>=70):
print('Your Grade is B')
elif(per>=60):
print('Your Grade is C+')
elif(per>=50):
print('Your Grade is C')
else:
print('You got Grade D ') |
16,776 | cc74e55f439fbfd56a53b9b4f4583725ce1ff8d6 | ## IMPORTS ##
from tika import parser
## CLASS ##
class ReadPDF():
def __init__(self, filepath, filename, *args, **kwargs):
self.content = str(parser.from_file("%s%s" % (filepath, filename))["content"])#, "G:\\Automation\\Tika\\tika-server.jar")
def build_content_lines():
lines = []
for line in self.content.split("\n"):
if line not in ["", " "]:
lines.append(line)
return lines
|
16,777 | ec19aa83e5f36531f16ccea30e2e01b1f60b02a9 | from datetime import date
from decimal import Decimal
# pylint might have a problem with this pydantic import
# add this exception in ".pylintrc" so pylint ignores imports from pydantic
# "--extension-pkg-whitelist=pydantic"
from pydantic import BaseModel
class UserBase(BaseModel):
cpf : str
private : bool
class UserCreate(UserBase):
pass
class User(UserBase):
id : int
private : bool
incomplete : bool
last_order_date : date = None
avg_ticket : Decimal = None
last_ticket : Decimal = None
most_frequent_store_cnpj : str = None
last_store_cnpj : str = None
class Config:
orm_mode = True |
16,778 | 161319dbd4615edb0281e9e061bafa1c88b0814d | from tkinter import ttk
from tkinter import *
import pandas as pd
####################################################################################################################
# backend ##
####################################################################################################################
# movie lens data set import
ratings = pd.read_csv('DataSet/ratings.csv')
movies = pd.read_csv('DataSet/movies.csv')
# merging the two files to get the movies and ratings in one file + dropping unwanted columns
ratings = pd.merge(movies, ratings).drop(['genres', 'timestamp'], axis=1)
# changing the data structure to ease the work
user_ratings = ratings.pivot_table(index=['userId'], columns=['title'], values='rating')
# remove movies who were rated by lesser than 10 user and fill Nan with 0
user_ratings = user_ratings.dropna(thresh=10, axis=1).fillna(0)
# movie list shown in dropdown menu
movie_list = []
for i in range(0, len(user_ratings.columns)):
movie_list.append(user_ratings.columns[i])
# applying the pearson methode to get the similarities between the movies
item_similarity_df = user_ratings.corr(method='pearson')
def get_similar(movie_name, rating):
# get the similarity score and subtracting 2.5 from the rating to fix placement of bad movies in the list
similar_score = item_similarity_df[movie_name] * (rating - 2.5)
similar_score = similar_score.sort_values(ascending=False)
return similar_score
def checkkey(event):
value = event.widget.get()
# get data from movie list
if value == '':
data = movie_list
else:
data = []
for item in movie_list:
if value.lower() in item.lower():
data.append(item)
# update data in listbox
update(data)
def update(data):
# clear previous data
lb.delete(0, 'end')
# put new data
for item in data:
lb.insert('end', item)
################################################################################################################
# GUI CODE #
################################################################################################################
root = Tk()
root.title("Master 2 IAM")
root.configure(bg='#4b5162')
root.geometry('700x800')
root.resizable(False, False)
# create rappers to manage the gui
wrapper1 = Frame(root, bg='#383c4a')
wrapper2 = Frame(root, bg='#4b5162')
wrapper3 = Frame(root, bg='#4b5162')
wrapper1.pack(padx=0, pady=0, fill="both")
wrapper2.pack(padx=30, pady=10, fill="both", expand="yes")
wrapper3.pack(padx=30, pady=10, fill="both", expand="yes")
#########################
# header #
#########################
Label(wrapper1, text="MOVIE RECOMMENDER", bg='#383c4a', fg='white', font=("Chango", 33)).pack(pady=10)
#########################
# main content #
#########################
# drop down box with their label
Label(wrapper2, text="Select at least ONE movie", fg='white', bg='#4b5162', font=("Chango", 10)).grid(
row=0, column=0, padx=10, pady=10)
# dropdown menu 1
movie_combo = Entry(wrapper2, width=63)
rating_combo = ttk.Combobox(wrapper2, value=[0, 1, 2, 3, 4, 5], width=32)
# creating list box
lb = Listbox(wrapper2, width=95)
# grid element to the screen
rating_combo.grid(row=1, column=3, padx=10, pady=10)
movie_combo.grid(row=1, column=0, columnspan=3, padx=10, pady=10)
lb.grid(row=3, column=0, columnspan=4, padx=10, pady=10)
# call for the keystrock function
update(movie_list)
# button function to add selected movies
fake_user = []
def add_movie(event):
mov = [movie_combo.get(), int(rating_combo.get())]
fake_user.append(mov)
global labeltest
labeltest = Label(wrapper2, text=fake_user)
labeltest.grid(row=4, column=0,columnspan=4 , padx=10, pady=10)
def fill(event):
movie_combo.delete(0, END)
movie_combo.insert(0, lb.get(ACTIVE))
# comboSelect bind
rating_combo.bind("<<ComboboxSelected>>", add_movie)
# key release bind
movie_combo.bind('<KeyRelease>', checkkey)
# list box bind to print selected item in entry box
lb.bind("<<ListboxSelect>>", fill)
# label for wrapper 3
Label(wrapper3, text="Our Recommendation", bg='#4b5162', fg='white', font=("Chango", 10)).pack(pady=20)
# show the movie recommended with their ratings
tree = ttk.Treeview(wrapper3) # Treeview instead of listbox
# define columns
tree['columns'] = ("ID", "Movie Name")
# format Treeview
tree.column("#0", width=0, stretch=NO)
tree.column("ID", anchor=W, width=25)
tree.column("Movie Name", width=600, anchor=W)
# headings
tree.heading("#0", text="", anchor=W)
tree.heading("ID", text="#", anchor=W)
tree.heading("Movie Name", text="Movie Name", anchor=W)
# pack
tree.pack(padx=10, pady=10)
# # create error message when movies not selected
error_message = StringVar()
# button to suggest movie
def calculate():
# collecting similar movies so we can show the result
if not any(movie_combo.get()):
error_message = ['please enter at least one movie !']
tree.insert(parent='', index='end', iid=0, text='', values=(1, error_message))
else:
# error_message.config(text=" ")
similar_movies = pd.DataFrame()
for movie, rating in fake_user:
similar_movies = similar_movies.append(get_similar(movie, rating), ignore_index=True)
# printing the top 20 recommended movie
s = similar_movies.sum().sort_values(ascending=False)
i = 0
j = 1
for i in range(20):
tree.insert(parent='', index='end', iid=i, text='', values=(j, s.iloc[i:j].to_string()))
# tree.insert(END, s.iloc[i:j].to_string())
j = j + 1
# clear button function
def clear():
fake_user.clear()
movie_combo.delete(0, END)
rating_combo.delete(0, END)
lb.delete(0, END)
for item in movie_list:
lb.insert(END, item)
for record in tree.get_children():
tree.delete(record)
labeltest.destroy()
# create buttons
B1 = Button(wrapper2, text="Get Recommendation", bg='#7c818c', fg='white', padx=10, pady=10, borderwidth=2,
command=calculate).grid(row=8, column=0, padx=20, pady=10)
B2 = Button(wrapper2, text=" clear ", bg='#7c818c', fg='white', padx=24, pady=10, borderwidth=2,
command=clear).grid(row=8, column=3)
root.resizable(width=False, height=True)
root.mainloop()
|
16,779 | e47850bc34e5a870342f78d7beffff272306f50f | #coding:utf-8
'''
数据类型
'''
import sympy
sympy.init_printing()
from sympy import I, pi, oo
# 整数符号
i = sympy.Integer(19)
print('type of i:',type(i),',i is real:',i.is_real,',i is integer:',i.is_Integer,',i is odd:',i.is_odd)
print(i ** 50)
# 浮点数符号
f = sympy.Float(2.35)
# 创建指定小数位数浮点数
f1 = sympy.Float(0.3,25)
print(f1)
# 如果通过符号串转换浮点数
f2 = sympy.Float('0.3',25)
print(f2)
# 有理数符号
r = sympy.Rational(11,13)
print(r)
# 有理数符号计算
r1 = sympy.Rational(2,3)
r2 = sympy.Rational(4,5)
print(r1 * r2)
print(r1 / r2) |
16,780 | 4e62d65830b97b09c82f6c82af37ac0acf5c27af | # ____ e.. _______ E..
#
#
# c_ Equality E..
# SAME_REFERENCE 4
# SAME_ORDERED 3
# SAME_UNORDERED 2
# SAME_UNORDERED_DEDUPED 1
# NO_EQUALITY 0
#
#
# ___ check_equality list1, list2
# """Check if list1 and list2 are equal returning the kind of equality.
# Use the values in the Equality Enum:
# - return SAME_REFERENCE if both lists reference the same object
# - return SAME_ORDERED if they have the same content and order
# - return SAME_UNORDERED if they have the same content unordered
# - return SAME_UNORDERED_DEDUPED if they have the same unordered content
# and reduced to unique items
# - return NO_EQUALITY if none of the previous cases match"""
# equality_number = 0
# # SAME_REFERENCE
# __ ? __ ?
# ? += 1
# # SAME_ORDERED
# __ s.. ? __ s..
# ? += 1
# # SAME_UNORDERED
# __ s.. ? __ s.. ?
# ? += 1
# # SAME_UNORDERED_DEDUPED
# __ s.. s.. ? __ s.. s.. ?
# ? += 1
# r.. ? ? |
16,781 | 8346e9eb14fd0b36937e260b0b334f8f787a6d63 | import math
import numpy as np
from numpy import histogram
import matplotlib.pyplot as plt
with open("sdss-stars.txt") as f:
lines = f.readlines()
vmag = [float(line.split('|')[12]) for line in lines]
j = [float(line.split('|')[9]) for line in lines]
h = [float(line.split('|')[10]) for line in lines]
ks = [float(line.split('|')[11]) for line in lines]
v_j = [float(line.split('|')[13]) for line in lines]
pmra = [float(line.split('|')[7]) for line in lines]
pmdec = [float(line.split('|')[8]) for line in lines]
pm = [np.sqrt(pmra[i]**2+pmdec[i]**2) for i in range(len(pmra))]
j_h = [j[i]-h[i] for i in range(len(h))]
h_ks = [h[i]-ks[i] for i in range(len(h))]
#plt.hist(vmag, bins='auto',color='black') # plt.hist passes it's arguments to np.histogram
#plt.xlabel('mag')
#plt.ylabel('Number of stars')
#plt.title("Apparent magnitude distribution")
#plt.figure(1)
#plt.hist(v_j, bins='auto',color='black') # plt.hist passes it's arguments to np.histogram
#plt.xlabel('V - J, mag')
#plt.ylabel('Number of stars')
#plt.title("Color index distribution")
plt.figure(2)
plt.plot(h_ks,j_h,'kp')
plt.title('Color-Color Diagram')
plt.xlabel('H - Ks, mag')
plt.ylabel('J - H, mag')
#plt.figure(3)
#plt.hist(pm, bins='auto',color='black')
#plt.xlabel('Proper motion, mas/yr')
#plt.ylabel('Number of stars')
#plt.title("Proper motion distribution")
plt.show() |
16,782 | 7af5b0cf0c0a37cf42b78f2be6d79d3fb6af6e01 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from collections import Counter
def decrypt_message(lines: [str]) -> str:
"""
Decrypts the message by selecting the most frequent character at each position.
Args:
lines (list of str): The repeated message lines.
Returns:
str: The decrypted message.
"""
message_length = len(lines[0])
decrypted_message = ""
for i in range(message_length):
column_chars = [line[i] for line in lines]
most_common_char = Counter(column_chars).most_common(1)[0][0]
decrypted_message += most_common_char
return decrypted_message
if __name__ == "__main__":
repeated_message = [
"P + 2 l ! a t o",
"1 e 8 0 R $ 4 u",
"5 - r ] + a > /",
"P x w l b 3 k \\",
"2 e 3 5 R 8 y u",
"< ! r ^ ( ) k 0",
]
decrypted_message = decrypt_message(repeated_message)
print(decrypted_message)
|
16,783 | c3d9c2642ef36b5ad69d7cce37bbfa4ff0944820 | from django.apps import AppConfig
class ReqresConfig(AppConfig):
name = 'reqres'
|
16,784 | 0d2aef838a2463fbbceb3ee0a8b771b753b81d51 | from aiohttp import web
from Email.EmailRestfulClass import EmailRestful
from User.UserRestfulAPIClass import UserRestfulAPI
from Config.MySqlConnector import MySqlConnector
def Main():
app = web.Application()
rst = EmailRestful()
usr = UserRestfulAPI()
# EMAILS
app.add_routes(
[web.post("/send_new_email", rst.send_new_email),
web.post("/get_new_emails", rst.get_new_emails),
web.post("/get_outbox", rst.get_outbox),
web.post("/add_new_account", rst.add_new_account), ]
)
# USERS
app.add_routes(
[web.post("/get_users", usr.get_users), ])
# ACCOUNTS
app.add_routes(
[web.post("/edit_profile", rst.edit_profile),
web.post("/edit_account", rst.edit_account),
web.post("/delete_account", rst.delete_account),
web.post("/get_accounts", rst.get_accounts),
web.post("/get_account_details", rst.get_account_details), ])
# CONTACTS
app.add_routes(
[web.post("/get_contact_details", rst.get_contact_details),
web.post("/get_contacts", rst.get_contacts), ])
web.run_app(app, host='127.0.0.1', port=8009)
if __name__ == '__main__':
Main()
|
16,785 | ecd1d8531e06fef4a15d118774e913daab41bd49 | #!/usr/bin/python3
import pytest
from common import *
filename = os.path.basename(__file__)
@pytest.fixture
def tc_setup():
'''Initializes log file in TWT log module'''
TWT_set_log_filename(filename)
@pytest.mark.parametrize("sarg, carg", [
(' -serv -cauth -msgcb ', ' -cauth -msgcb '),
])
def test_t13_auth(tc_setup, sarg, carg):
run_test(inspect.stack()[0][3], sarg, carg)
|
16,786 | 9ad57a31e0e2b94cb41caa83031bbb1db0c1edd4 | # Generated by Django 3.1.7 on 2021-03-28 19:58
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('content', '0008_auto_20210328_1852'),
]
operations = [
migrations.CreateModel(
name='CaseGrades',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('club', models.TextField(blank=True, null=True, verbose_name='в каком клубе')),
('cost', models.IntegerField(blank=True, null=True, verbose_name='цена кейса')),
('text', models.TextField(blank=True, null=True, verbose_name='текст на кнопке')),
('rewards', models.TextField(blank=True, null=True, verbose_name='призы через запятую')),
],
options={
'verbose_name': 'цена кейса',
'verbose_name_plural': 'цены кейсов',
},
),
migrations.DeleteModel(
name='CasesCost',
),
migrations.AlterModelOptions(
name='casebody',
options={'verbose_name': 'акция с кейсами', 'verbose_name_plural': 'акции с кейсами'},
),
]
|
16,787 | 3d2b1492901ad48feebec39f02ac7756c4b7ef5a | # -*- coding: utf-8 -*-
"""
Created on Thu Feb 21 11:24:24 2019
@author: 3874034
"""
#from indexerSimple import IndexerSimple
from IRModel import IRModel
from TextRepresenter import PorterStemmer
from math import log
class Okapi(IRModel):
"""
index : objet IndexerSimple
"""
def __init__(self,index):
super().__init__(index)
def getScores(self,query):
ps = PorterStemmer()
requete = ps.getTextRepresentation(query) # {mot1 : nb1 , mot2 : nb2}
nb_documents = len(self._index.getIndex()) # nombre de documents dans la collection
listScores = []
k1 = 1.2
b = 0.75
indexInv = self._index.getIndexInv()
index = self._index.getIndex()
count = 0 # nb total d'occurences
score = 0
for key,dico in index.items():
for keyDico,itemDico in dico.items():
count += itemDico
avg = count / len(index)
"""
for keyIndex, itemIndex in index.items():
for key,item in dict(requete).items():
idf = log((1+nb_documents)/(1+len(self._index.getTfsForStem(key))))
f_qi_d = indexInv[key][keyIndex]
D = sum(self._index.getTfsForDoc(keyIndex)[term] for term in self._index.getTfsForDoc(keyIndex).keys())
score += idf * ( ( f_qi_d * (k1+1)) / f_qi_d + k1 * ( 1 - b + b * ( D / avg ) ) )
listScores[i-1] = score
i = i + 1
"""
"""
for keyIndexInv, itemIndexInv in indexInv.items():
if keyIndexInv in requete:
for keyDico, itemDico in itemIndexInv.items():
idf = log( ( 1 + nb_documents ) / ( 1 + len(itemIndexInv)) )
print("idf",idf)
f_qi_d = itemDico
print("f",f_qi_d)
D = sum(self._index.getTfsForDoc(keyDico)[term] for term in self._index.getTfsForDoc(keyDico).keys())
print("D",D)
score += idf * ( ( f_qi_d * (k1+1)) / f_qi_d + k1 * ( 1 - b + b * ( D / avg ) ) )
print("score",score)
listScores.append((keyDico,score))
score = 0
"""
dictScores = dict()
c = 1
liste_id = []
#print("aaa : ",dict(requete).items())
for mot, occu in dict(requete).items():
if mot in indexInv.keys():
#print("MOT : ",mot)
#print("indexinv : ",indexInv[mot].items())
for key,item in indexInv[mot].items():
#print("key : ",key)
#print("item : ",item)
idf = log( ( 1 + nb_documents ) / ( 1 + len(indexInv[mot] ) ) )
f_qi_d = item
D = sum(self._index.getTfsForDoc(key)[term] for term in self._index.getTfsForDoc(key).keys())
#print("c : ",c)
c += 1
#print("idf : ",idf)
#print("f_qi_d : ",f_qi_d)
#print("D : ",D)
score = idf * ( ( f_qi_d * 1 ) / ( f_qi_d + k1 * ( 1 - b + b * ( D / avg ) ) ) )
#print("avg : ",avg)
#print("score : ",score)
#print("key : ",key)
#print("list id : ",liste_id)
if key in liste_id:
dictScores[key] += score
else:
dictScores[key] = score
liste_id.append(key)
score = 0
return dictScores
|
16,788 | e3bdc863c67439d88e9d7348291d544e5c266a0f | # one of them is True will com back as True
print(True or False)
print(2 or 0) # one of them is True will return True i.e 2
# one of them is False will come back as False
print(True and False)
print(2 and 0) # one of them is False will return False i.e. 0 ( Falsy value)
# not True will return False
print(not True)
# not False will return True
print(not False)
"""
True
2
False
0
False
True
""" |
16,789 | 97a6274b78a916539d67eddeb0c2f6bc0e45b71c | from json import dumps as json_dumpstring
import time
class Logger:
"""
Utility class used to handle server-side logging in the event of certain actions
"""
def __init__(self):
pass
@staticmethod
def song_added(song, playlist_id):
"""
Prints server logging on success of addition of song to playlist.
"""
if song.added_by == 'cedmunds90':
print('Ruhpushuh {song_id} ({title}) ruhpush a shuh {playlist_id} rhup {added_by}.'
.format(song_id=song.id,
title=song.title,
playlist_id=playlist_id,
added_by=song.added_by))
pass
else:
print('Song {song_id} ({title}) added to playlist {playlist_id} by {added_by}.'
.format(song_id=song.id,
title=song.title,
playlist_id=playlist_id,
added_by=song.added_by))
pass
@staticmethod
def song_already_exists(song, playlist_id):
"""
Prints server logging on attempted song addition when song already exists in playlist.
"""
print('Song {title} already in playlist {playlist_id}, adding has been skipped.'
.format(title=song.title,
playlist_id=playlist_id))
pass
@staticmethod
def unrecognised_format(link):
"""
Server logging for receipt of unsupported YouTube link format.
"""
print('Message has been identified as a YouTube link, but the format is not recognised.')
print('Message was {}, support for this format should be added soon.'.format(link))
pass
@staticmethod
def unrecognised_service(service_name):
"""
Server logging for reciept of unsupported service/attachment type.
"""
print('Service {} not (yet) supported.'.format(service_name))
pass
@staticmethod
def future_supported_service(service_name):
"""
Server logging for receipt of service for which support is planned for a future version.
"""
print('Service {} linked.'.format(service_name))
pass
@staticmethod
def failed_to_find_relevant_youtube_video(track_name):
"""
Server logging when Spotify->YouTube cross-search fails to return a video to add to the playlist.
"""
print('YouTube Service search for {} did not bring back an appropriate video.'.format(track_name))
pass
@staticmethod
def failed_to_find_relevant_spotify_track(video_title):
"""
Server logging when YouTube->Spotify cross-search fails to return a track to add to the playlist.
"""
print('Spotify Service search for {} did not bring back an appropriate track.'.format(video_title))
pass
@staticmethod
def log_event_to_file(event):
"""
Logging of received event JSON to file.
"""
with open('eventlogs/{}.json'.format(time.time()), 'w') as event_write:
event_write.write(json_dumpstring(event))
pass
@staticmethod
def playlist_contents_requested(service_name):
"""
Server logging when a request for the contents of a playlist is requested
"""
print('Request for contents of {} playlist received.'.format(service_name))
pass
|
16,790 | 2529caa2bc4e8386f95a3d9d30ef98cfa897ffa2 | from django.contrib import admin
from django.utils.html import format_html
from django.core.urlresolvers import reverse
from django.db import models
from django_summernote.widgets import (SummernoteWidget)
# Register your models here.
from .models import (Venue, Event, EventRegistration)
# from django_google_maps import widgets as map_widgets
# from django_google_maps import fields as map_fields
class VenueAdmin(admin.ModelAdmin):
list_display = ('title', 'contact_number')
formfield_overrides = {
models.TextField: {'widget': SummernoteWidget},
}
# formfield_overrides = {
# map_fields.AddressField: {
# 'widget': map_widgets.GoogleMapsAddressWidget(attrs={'data-map-type': 'roadmap'})},
# }
class EventAdmin(admin.ModelAdmin):
list_display = ('id', 'title', 'venue', 'start_date',
'end_date', 'attendee_list', )
search_fields = ('title', 'description')
ordering = ('-id',)
formfield_overrides = {
models.TextField: {'widget': SummernoteWidget},
}
def attendee_list(self, obj):
return format_html("<a href='%s?event_id=%s'>Show</a>" % (
reverse('admin:event_eventregistration_changelist'), obj.id))
attendee_list.allow_tags = True
attendee_list.short_description = 'Attendee List'
class EventRegistrationAdmin(admin.ModelAdmin):
list_display = ('id', 'nuid', 'user', 'email_address', 'custom_field',
'event', 'status', 'registration_actions', 'dietery_requirements')
list_filter = ('status', 'event')
search_fields = ('event', 'status')
ordering = ('-id',)
def custom_field(self, obj):
return '{}'.format(obj.event.id)
custom_field.short_description = 'Event Id'
def registration_actions(self, obj):
if obj.status == 'waiting':
return format_html(
'<a class="button" href="{}">Confirm</a> '
'<a class="button" href="{}" style="background-color: red;">Decline</a>',
reverse('confirm_event', args=[obj.pk]),
reverse('decline_event', args=[obj.pk]),
)
elif obj.status == 'confirm':
return format_html(
'<a class="button" href="{}">Confirm</a> '
'<a class="button" href="{}" style="background-color: red;">Decline</a>',
reverse('confirm_event', args=[obj.pk]),
reverse('decline_event', args=[obj.pk]),
)
registration_actions.short_description = 'Registration Actions'
registration_actions.allow_tags = True
admin.site.register(Venue, VenueAdmin)
admin.site.register(Event, EventAdmin)
admin.site.register(EventRegistration, EventRegistrationAdmin)
|
16,791 | 3b1d247aff186bc1b16fc599efa8de6a0ea95dbd | #!/usr/bin/env python
# Copyright (c) 2011 The Native Client Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Simple test suite for toolchains espcially llvm arm toolchains.
Sample invocations
tools/toolchain_tester/toolchain_tester.py [options]+ test1.c test2.c ...
where options are
--config <config>
--append <tag>=<value>
--append_file=<filename>
--verbose
--show_console
--exclude=<filename>
--tmp=<path>
--check_excludes
--concurrency=<number>
e.g. --append "CFLAGS:-lsupc++" will enable C++ eh support
NOTE: the location of tmp files is intentionally hardcoded, so you
can only run one instance of this at a time.
"""
from __future__ import print_function
import getopt
import glob
import multiprocessing
import os
import shlex
import subprocess
import sys
import time
import toolchain_config
# ======================================================================
# Options
# ======================================================================
# list of streams being logged to (both normal and verbose output)
REPORT_STREAMS = [sys.stdout]
# max time (secs) to wait for command any command to complete
TIMEOUT = 120
# enable verbose output, e.g. commands being executed
VERBOSE = 0
# prefix for temporary files
TMP_PREFIX = '/tmp/tc_test_'
# show command output (stdout/stderr)
SHOW_CONSOLE = 1
# append these settings to config
APPEND = []
# append these settings to config, for a given test (specified by APPEND_FILES)
APPEND_PER_TEST = {}
# Files listing the APPEND_PER_TEST entries.
APPEND_FILES = []
# exclude these tests
EXCLUDE = {}
# check whether excludes are still necessary
CHECK_EXCLUDES = 0
# Files listing excluded tests
EXCLUDE_FILES = []
# module with settings for compiler, etc.
CFG = None
# Number of simultaneous test processes
CONCURRENCY = 1
# Child processes push failing test results onto this queue
ERRORS = multiprocessing.Queue()
# ======================================================================
# Hook print to we can print to both stdout and a file
def Print(message):
for s in REPORT_STREAMS:
print(message, file=s)
# ======================================================================
def Banner(message):
Print('=' * 70)
Print(message)
Print('=' * 70)
# ======================================================================
def RunCommand(cmd, always_dump_stdout_stderr):
"""Run a shell command given as an argv style vector."""
if VERBOSE:
Print(str(cmd))
Print(" ".join(cmd))
start = time.time()
p = subprocess.Popen(cmd,
bufsize=1000*1000,
stderr=subprocess.PIPE,
stdout=subprocess.PIPE)
while p.poll() is None:
time.sleep(0.1)
now = time.time()
if now - start > TIMEOUT:
Print('Error: timeout')
Print('Killing pid %d' % p.pid)
os.waitpid(-1, os.WNOHANG)
return -1
stdout = p.stdout.read()
stderr = p.stderr.read()
retcode = p.wait()
if retcode != 0:
Print('Error: command failed %d %s' % (retcode, ' '.join(cmd)))
always_dump_stdout_stderr = True
if always_dump_stdout_stderr:
Print(stderr)
Print(stdout)
return retcode
def RemoveTempFiles():
global TMP_PREFIX
for f in glob.glob(TMP_PREFIX + '*'):
os.remove(f)
def MakeExecutableCustom(config, test, extra):
global TMP_PREFIX
global SHOW_CONSOLE
d = extra.copy()
d['tmp'] = (TMP_PREFIX + '_' +
os.path.basename(os.path.dirname(test)) + '_' +
os.path.basename(test))
d['src'] = test
for phase, command in config.GetCommands(d):
command = shlex.split(command)
try:
retcode = RunCommand(command, SHOW_CONSOLE)
except Exception as err:
Print("cannot run phase %s: %s" % (phase, str(err)))
return phase
if retcode:
return phase
# success
return ''
def ParseExcludeFiles(config_attributes):
''' Parse the files containing tests to exclude (i.e. expected fails).
Each line may contain a comma-separated list of attributes restricting
the test configurations which are expected to fail. (e.g. architecture
or optimization level). A test is only excluded if the configuration
has all the attributes specified in the exclude line. Lines which
have no attributes will match everything, and lines which specify only
one attribute (e.g. architecture) will match all configurations with that
attributed (e.g. both opt levels with that architecture)
'''
for excludefile in EXCLUDE_FILES:
f = open(excludefile)
for line in f:
line = line.strip()
if not line: continue
if line.startswith('#'): continue
tokens = line.split()
if len(tokens) > 1:
attributes = set(tokens[1].split(','))
if not attributes.issubset(config_attributes):
continue
test = tokens[0]
else:
test = line
if test in EXCLUDE:
Print('ERROR: duplicate exclude: [%s]' % line)
EXCLUDE[test] = excludefile
f.close()
Print('Size of excludes now: %d' % len(EXCLUDE))
def ParseAppendFiles():
"""Parse the file contain a list of test + CFLAGS to append for that test."""
for append_file in APPEND_FILES:
f = open(append_file)
for line in f:
line = line.strip()
if not line: continue
if line.startswith('#'): continue
tokens = line.split(',')
test = tokens[0]
to_append = {}
for t in tokens[1:]:
tag, value = t.split(':')
if tag in to_append:
to_append[tag] = to_append[tag] + ' ' + value
else:
to_append[tag] = value
if test in APPEND_PER_TEST:
raise Exception('Duplicate append/flags for test %s (old %s, new %s)' %
(test, APPEND_PER_TEST[test], to_append))
APPEND_PER_TEST[test] = to_append
f.close()
def ParseCommandLineArgs(argv):
"""Process command line options and return the unprocessed left overs."""
global VERBOSE, COMPILE_MODE, RUN_MODE, TMP_PREFIX
global CFG, APPEND, SHOW_CONSOLE, CHECK_EXCLUDES, CONCURRENCY
try:
opts, args = getopt.getopt(argv[1:], '',
['verbose',
'show_console',
'append=',
'append_file=',
'config=',
'exclude=',
'check_excludes',
'tmp=',
'concurrency='])
except getopt.GetoptError as err:
Print(str(err)) # will print something like 'option -a not recognized'
sys.exit(-1)
for o, a in opts:
# strip the leading '--'
o = o[2:]
if o == 'verbose':
VERBOSE = 1
elif o == 'show_console':
SHOW_CONSOLE = 1
elif o == 'check_excludes':
CHECK_EXCLUDES = 1
elif o == 'tmp':
TMP_PREFIX = a
elif o == 'exclude':
# Parsing of exclude files must happen after we know the current config
EXCLUDE_FILES.append(a)
elif o == 'append':
tag, value = a.split(":", 1)
APPEND.append((tag, value))
elif o == 'append_file':
APPEND_FILES.append(a)
elif o == 'config':
CFG = a
elif o == 'concurrency':
CONCURRENCY = int(a)
else:
Print('ERROR: bad commandline arg: %s' % o)
sys.exit(-1)
# return the unprocessed options, i.e. the command
return args
def RunTest(args):
num, total, config, test, extra_flags = args
base_test_name = os.path.basename(test)
extra_flags = extra_flags.copy()
toolchain_config.AppendDictionary(extra_flags,
APPEND_PER_TEST.get(base_test_name, {}))
Print('Running %d/%d: %s' % (num + 1, total, base_test_name))
try:
result = MakeExecutableCustom(config, test, extra_flags)
except KeyboardInterrupt:
# Swallow the keyboard interrupt in the child. Otherwise the parent
# hangs trying to join it.
pass
if result and config.IsFlaky():
# TODO(dschuff): deflake qemu or switch to hardware
# BUG=http://code.google.com/p/nativeclient/issues/detail?id=2197
# try it again, and only fail on consecutive failures
Print('Retrying ' + base_test_name)
result = MakeExecutableCustom(config, test, extra_flags)
if result:
Print('[ FAILED ] %s: %s' % (result, test))
ERRORS.put((result, test))
def RunSuite(config, files, extra_flags, errors):
"""Run a collection of benchmarks."""
global ERRORS, CONCURRENCY
Banner('running %d tests' % (len(files)))
pool = multiprocessing.Pool(processes=CONCURRENCY)
# create a list of run arguments to map over
argslist = [(num, len(files), config, test, extra_flags)
for num, test in enumerate(files)]
# let the process pool handle the test assignments, order doesn't matter
pool.map(RunTest, argslist)
while not ERRORS.empty():
phase, test = ERRORS.get()
errors[phase].append(test)
def FilterOutExcludedTests(files, exclude):
return [f for f in files if not os.path.basename(f) in exclude]
def main(argv):
files = ParseCommandLineArgs(argv)
if not CFG:
print('ERROR: you must specify a toolchain-config using --config=<config>')
print('Available configs are: ')
print('\n'.join(toolchain_config.TOOLCHAIN_CONFIGS.keys()))
print()
return -1
global TMP_PREFIX
global APPEND
TMP_PREFIX = TMP_PREFIX + CFG
Banner('Config: %s' % CFG)
config = toolchain_config.TOOLCHAIN_CONFIGS[CFG]
ParseExcludeFiles(config.GetAttributes())
for tag, value in APPEND:
config.Append(tag, value)
ParseAppendFiles()
config.SanityCheck()
Print('TMP_PREFIX: %s' % TMP_PREFIX)
# initialize error stats
errors = {}
for phase in config.GetPhases():
errors[phase] = []
Print('Tests before filtering %d' % len(files))
if not CHECK_EXCLUDES:
files = FilterOutExcludedTests(files, EXCLUDE)
Print('Tests after filtering %d' % len(files))
try:
RunSuite(config, files, {}, errors)
finally:
RemoveTempFiles()
# print error report
USED_EXCLUDES = {}
num_errors = 0
for k in errors:
lst = errors[k]
if not lst: continue
Banner('%d failures in config %s phase %s' % (len(lst), CFG, k))
for e in lst:
if os.path.basename(e) in EXCLUDE:
USED_EXCLUDES[os.path.basename(e)] = None
continue
Print(e)
num_errors += 1
if CHECK_EXCLUDES:
Banner('Unnecessary excludes:')
for e in EXCLUDE:
if e not in USED_EXCLUDES:
Print(e + ' (' + EXCLUDE[e] + ')')
return num_errors > 0
if __name__ == '__main__':
sys.exit(main(sys.argv))
|
16,792 | 958a047cbfdd33091e3540c8227a0606fe702d63 | #
# Copyright (c) 2023 Airbyte, Inc., all rights reserved.
#
from .source import SourceBraintree
__all__ = ["SourceBraintree"]
|
16,793 | 0b444d80d55f59bd545349e0fb5c51db3087794f | import os;
pfx = os.environ["PREFIX"];
cmds_ = [
{
"name": "predict",
"description": "Gives a random response to predict what you ask.",
"type": "Misc",
"args": "<question>"
},
{
"name": "snipe",
"description": "Sends the most recently deleted message.",
"type": "Misc"
},
{
"name": "kick",
"description": "Kicks mentioned user for optional given reason.",
"type": "Moderation",
"args": "<@user> <(optional) reason>"
},
{
"name": "ban",
"description": "Bans mentioned user for optional given reason.",
"type": "Moderation",
"args": "<@user> <(optional) reason>"
},
{
"name": "join",
"description": "Joins VC that message author is connected to.",
"type": "Voice"
},
{
"name": "leave",
"description": "Leaves VC that client is currently connected to.",
"type": "Voice"
},
{
"name": "neko",
"description": "Posts an image of a NSFW neko to the channel.",
"type": "NSFW",
},
{
"name": "hentai",
"description": "Posts a hentai image to the channel.",
"type": "NSFW"
}
] |
16,794 | 8f1f7892c28198eb2cfe6bcdb84b269581429bbe | # -*- coding: utf-8 -*-
from zope.interface import Interface
from zope.publisher.interfaces.browser import IDefaultBrowserLayer
class IEjnApplicants4FundingLayer(IDefaultBrowserLayer):
"""Marker interface that defines a browser layer."""
class IFundingReqBase(Interface):
"""Marker interface for funding request content type"""
|
16,795 | cfcac25b4f017b3f950bfac126ebb1227048c097 | import typing
import pytest
import requests
from apimoex import client
@pytest.fixture(scope="module", name="session")
def make_session():
with requests.Session() as session:
yield session
def test_iss_client_iterable():
assert issubclass(client.ISSClient, typing.Iterable)
def test_repr(session):
iss = client.ISSClient(session, "test_url", dict(a="b"))
assert str(iss) == "ISSClient(url=test_url, query={'a': 'b'})"
def test_get(session):
url = "https://iss.moex.com/iss/securities.json"
query = dict(q="1-02-65104-D")
iss = client.ISSClient(session, url, query)
raw = iss.get()
assert isinstance(raw, dict)
assert len(raw) == 1
data = raw["securities"]
assert isinstance(data, list)
assert len(data) == 4
assert isinstance(data[0], dict)
assert data[1]["regnumber"] == "1-02-65104-D"
def test_get_with_start(session):
url = "https://iss.moex.com/iss/securities.json"
query = dict(q="1-02-65104-D")
iss = client.ISSClient(session, url, query)
raw = iss.get(1)
assert isinstance(raw, dict)
assert len(raw) == 1
data = raw["securities"]
assert isinstance(data, list)
assert len(data) == 3
assert isinstance(data[0], dict)
assert data[1]["regnumber"] == "1-02-65104-D"
def test_get_wrong_url(session):
url = "https://iss.moex.com/iss/securities1.json"
iss = client.ISSClient(session, url)
with pytest.raises(client.ISSMoexError) as error:
iss.get()
assert "Неверный url" in str(error.value)
assert "https://iss.moex.com/iss/securities1.json?iss.json=extended&iss.meta=off" in str(error.value)
def test_get_wrong_json(monkeypatch, session):
url = "https://iss.moex.com/iss/securities.json"
iss = client.ISSClient(session, url)
# noinspection PyProtectedMember
monkeypatch.setattr(requests.Response, "json", lambda x: [0, 1, 2])
with pytest.raises(client.ISSMoexError) as error:
iss.get()
assert "Ответ содержит некорректные данные" in str(error.value)
assert "https://iss.moex.com/iss/securities.json?iss.json=extended&iss.meta=off" in str(error.value)
def test_make_query_empty(session):
iss = client.ISSClient(session, "test_url")
# noinspection PyProtectedMember
query = iss._make_query()
assert isinstance(query, dict)
assert len(query) == 2
assert query["iss.json"] == "extended"
assert query["iss.meta"] == "off"
def test_make_query_not_empty(session):
iss = client.ISSClient(session, "test_url", dict(test_param="test_value"))
# noinspection PyProtectedMember
query = iss._make_query()
assert isinstance(query, dict)
assert len(query) == 3
assert query["iss.json"] == "extended"
assert query["iss.meta"] == "off"
assert query["test_param"] == "test_value"
def test_make_query_not_empty_with_start(session):
iss = client.ISSClient(session, "test_url", dict(test_param="test_value"))
# noinspection PyProtectedMember
query = iss._make_query(704)
assert isinstance(query, dict)
assert len(query) == 4
assert query["iss.json"] == "extended"
assert query["iss.meta"] == "off"
assert query["test_param"] == "test_value"
assert query["start"] == 704
def test_get_all_with_cursor(session):
url = "https://iss.moex.com/iss/history/engines/stock/markets/shares/securities/SNGSP.json"
query = {"from": "2018-01-01", "till": "2018-03-01"}
iss = client.ISSClient(session, url, query)
raw = iss.get_all()
assert isinstance(raw, dict)
assert len(raw) == 1
data = raw["history"]
assert isinstance(data, list)
assert len(data) > 100
assert data[0]["TRADEDATE"] == "2018-01-03"
assert data[-1]["TRADEDATE"] == "2018-03-01"
for row in data:
for column in ["TRADEDATE", "OPEN", "LOW", "HIGH", "CLOSE", "VOLUME"]:
assert column in row
def test_get_all_without_cursor(session):
url = "https://iss.moex.com/iss/history/engines/stock/markets/shares/boards/TQBR/securities/SNGSP.json"
query = {"from": "2018-01-03", "till": "2018-06-01"}
iss = client.ISSClient(session, url, query)
raw = iss.get_all()
assert isinstance(raw, dict)
assert len(raw) == 1
data = raw["history"]
assert isinstance(data, list)
assert len(data) > 100
assert data[0]["TRADEDATE"] == "2018-01-03"
assert data[-1]["TRADEDATE"] == "2018-06-01"
for row in data:
for column in ["TRADEDATE", "OPEN", "LOW", "HIGH", "CLOSE", "VOLUME"]:
assert column in row
def test_wrong_cursor_size(monkeypatch, session):
iss = client.ISSClient(session, "")
fake_cursor = {"history.cursor": [0, 1]}
monkeypatch.setattr(iss, "get", lambda x: fake_cursor)
with pytest.raises(client.ISSMoexError) as error:
iss.get_all()
assert f"Некорректные данные history.cursor [0, 1] для начальной позиции 0" in str(
error.value
)
def test_wrong_cursor_index(monkeypatch, session):
iss = client.ISSClient(session, "")
fake_cursor = {"history.cursor": [{"INDEX": 1}]}
monkeypatch.setattr(iss, "get", lambda x: fake_cursor)
with pytest.raises(client.ISSMoexError) as error:
iss.get_all()
assert "Некорректные данные history.cursor [{'INDEX': 1}] для начальной позиции 0" in str(
error.value
)
|
16,796 | 631da250a19b325cb12c47d9274aeec525b0b675 | from google.appengine.ext import webapp
from google.appengine.api import users
from google.appengine.ext.webapp import util
from google.appengine.api import memcache
from google.appengine.ext.webapp import template
import os
from models import *
import misc
import datetime
class Main(webapp.RequestHandler):
def get(self):
page = self.request.get('page')
if(page.isdigit()):
page = int(page)
else:
page = 1
ret = misc.getPublicArticles(page)
articles = ret[0]
nextPageBool = ret[1]
template_values = {
'articles': articles,
'nextpage': (page+1),
'nextpagebool': nextPageBool,
'prevpage': (page-1),
}
path = os.path.join(os.path.dirname(__file__), 'templates/articles.html')
self.response.out.write(template.render(path, template_values))
class Search(webapp.RequestHandler):
def get(self):
allArticles = misc.getAllPublicArticles()
articles = []
search = self.request.get('s')
page = self.request.get('page')
if page.isdigit():
page = int(page)-1
else:
page = 1
searchList = search.split(" ")
for article in allArticles:
for word in searchList:
if(article.title.lower().find(word.lower()) != -1):
articles.append(article)
break
nextPageBool = False
if len(articles) > misc.ARTICLES_PER_PAGE * page + misc.ARTICLES_PER_PAGE:
nextPageBool = True
articles = articles[misc.ARTICLES_PER_PAGE*page:misc.ARTICLES_PER_PAGE*page+misc.ARTICLES_PER_PAGE]
template_values = {
'articles': articles,
'nextpage': (page+2),
'nextpagebool':nextPageBool,
'prevpage':(page),
'search': search,
}
path = os.path.join(os.path.dirname(__file__), 'templates/search.html')
self.response.out.write(template.render(path, template_values))
class Article(webapp.RequestHandler):
def get(self):
id = self.request.get('id')
user = users.get_current_user()
if(id.isdigit()):
article = misc.getArticle(id)
if(article == None):
self.redirect('/')
elif(article.public):
loggedIn = False
loginUrl = ""
if user:
loggedIn = True
else:
loginUrl = users.create_login_url(self.request.uri)
template_values = {
'article': article,
'comments':printComments(article.comments),
'loggedin':loggedIn,
'loginurl': loginUrl,
'key': article.key(),
}
path = os.path.join(os.path.dirname(__file__), 'templates/article.html')
self.response.out.write(template.render(path, template_values))
else:
self.redirect('/')
else:
self.redirect('/')
class CommentPost(webapp.RequestHandler):
def post(self):
article = db.get(self.request.get('key'))
commentBody = self.request.get('commentBody')
try:
commentBody = misc.cleanHtml(commentBody)
comment = Comment(author=users.get_current_user(),body=commentBody,date=datetime.datetime.now().date(),article=article.key())
except:
self.redirect('/article?id='+str(article.id))
return
comment.put()
memcache.set(str(comment.key()),comment)
article.comments.append(comment.key())
article.put()
memcache.set("article"+str(article.id),article)
memcache.delete("publicArticles")
memcache.delete("allArticles")
self.redirect('/article?id='+str(article.id))
class EditCommentPost(webapp.RequestHandler):
def post(self):
comment = db.get(self.request.get('commentKey'))
if(comment.author == users.get_current_user()):
commentBody = self.request.get('commentBody')
try:
commentBody = misc.cleanHtml(commentBody)
comment.body = commentBody
comment.put()
memcache.set(str(comment.key()), comment)
except:
self.redirect('/article?id='+str(comment.article.id))
return
self.redirect('/article?id='+str(comment.article.id))
class ReplyPost(webapp.RequestHandler):
def post(self):
parentComment = db.get(self.request.get('commentKey'))
commentBody = self.request.get('commentBody')
commentBody = misc.cleanHtml(commentBody)
comment = Comment(author=users.get_current_user(),body=self.request.get('commentBody'),date=datetime.datetime.now().date(),_parent=parentComment,article=parentComment.article.key())
comment.put()
parentComment.children.append(comment.key())
parentComment.put()
memcache.set(str(parentComment.key()),parentComment)
memcache.delete("publicArticles")
memcache.delete("allArticles")
self.redirect('/article?id='+str(comment.article.id))
class DeleteCommentPost(webapp.RequestHandler):
def post(self):
comment = db.get(self.request.get('key'))
if(comment.author == users.get_current_user()):
if not comment.children:
article = comment.article
article.comments.remove(comment.key())
memcache.delete(str(comment.key()))
comment.delete()
article.put()
memcache.delete("publicArticles")
memcache.delete("allArticles")
memcache.set("article"+str(article.id),article)
else:
comment.title = "deleted"
comment.body = "deleted"
comment.author = users.User("deleted")
comment.date = datetime.datetime.now().date()
comment.put()
memcache.set(str(comment.key()), comment)
self.redirect('/article?id='+str(comment.article.id))
def printComments(comments,switch=False):
if(switch):
ret = "<div class='articleCommentsSwitch'>"
else:
ret = "<div class='articleComments'>"
for key in comments:
comment = misc.getComment(key)
ret += printComment(comment)
if comment.children:
ret += printComments(comment.children,(not switch))
ret += "</div>"
return ret
def printComment(comment):
user = users.get_current_user()
loggedin = False
if user:
loggedin = True
commentauthor = False
if(user == comment.author):
commentauthor = True
template_values = {
'comment': comment,
'loggedin':loggedin,
'commentauthor': commentauthor,
'key': comment.key(),
}
path = os.path.join(os.path.dirname(__file__), 'templates/comment.html')
return template.render(path, template_values)
def main():
application = webapp.WSGIApplication([('/', Main),
('/article', Article),
('/commentpost', CommentPost),
('/deletecommentpost', DeleteCommentPost),
('/editcommentpost', EditCommentPost),
('/replypost', ReplyPost),
('/search', Search),],
debug=True)
util.run_wsgi_app(application)
if __name__ == '__main__':
main()
|
16,797 | 024bafba4e0c03729568f62ec1609d18116dd8b9 | from sqlalchemy import create_engine, Column, Integer, String, DateTime
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.engine.url import URL
import product_crawlers.settings
DeclarativeBase = declarative_base()
def db_connect():
"""
Performs database connection using database settings from settings.py.
Returns sqlalchemy engine instance
"""
return create_engine(URL(**product_crawlers.settings.DATABASE))
def create_products_table(engine):
DeclarativeBase.metadata.create_all(engine)
class Products(DeclarativeBase):
"""Sqlalchemy Products model"""
__tablename__ = "products"
id = Column(Integer, primary_key=True)
name = Column('Name', String)
origin_domain = Column('Origin_Domain', String)
origin_url = Column('Origin_URL', String)
extract_date = Column('extract_date', DateTime)
price = Column('Price', String, nullable=True) |
16,798 | b4ee1933b535458a59efe2c0e4c252fdb8afdbdc | from django.apps import AppConfig
from django.utils.translation import gettext_lazy as _
class AppLogicConfig(AppConfig):
name = 'app_logic'
verbose_name = _('logic')
|
16,799 | 62f74678e008225a6b48e2806c1dd3286e60ba13 | from struct import *
import warnings
import re, os, json
from WADParser.Lumps import *
import matplotlib # required for DLL not found error
from skimage import io
import networkx as nx
from WADParser.WADFeatureExtractor import WADFeatureExtractor
import itertools
import subprocess
import numpy as np
from skimage import morphology
from WADParser.Dictionaries.ThingTypes import *
from WADParser.RoomTopology import topological_features
from WADParser.flags import linedef_flags_to_int
class LumpInfo(dict):
def __init__(self, filepos=None, size=None, name=None):
"""
:param filepos: An integer holding a pointer to the start of the lump's data in the file.
:param size: An integer representing the size of the lump in bytes.
:param name: A 8 byte encoded ascii string, eventually padded with 00
"""
super()
self['filepos'] = filepos
self['size'] = size
self['name'] = name
def from_bytes(self, byte_stream):
self['filepos'], = unpack("i", byte_stream[0:4])
self['size'], = unpack("i", byte_stream[4:8])
self['name'] = decode_doomstring(byte_stream[8:16])
return self
def to_bytes(self):
info_bytes = bytearray()
info_bytes += pack("i", self['filepos'])
info_bytes += pack("i", self['size'])
info_bytes += encode_doomstring(self['name'])
return info_bytes
class WAD(dict):
def __init__(self, mode):
"""
Dictionary structured representation of a WAD file. Fields that are dictionary keys are unprocessed data from
the file itself, while object attributes are "secondary access keys" (structures not directly encoded in the
WAD file but built for faster access to data.)
mode: 'R' for reading or 'W' for writing.
Example:
self['lumps'] contains the list of all the lumps.
Lumps describing levels are processed as list or dict(s), the others are kept as raw bytes.
self['directory'] is the list of lump info, the structure reflects the one encoded in the WAD file.
self.levels contains the lumps grouped by each level. (secondary key, not directly encoded into the file)
self.sectors contains the linedefs sorrounding each map sector (secondary key)
Warning:
It's suggested to use the WADReader and WADWriter class in order to read and write a WAD.
If you need to edit a WAD object, please consider copying it into another WAD() using from_bytes and
to_bytes methods.
"""
super()
self['header'] = {
'identification' : 'PWAD', # Ascii identifier: IWAD or PWAD
'numlumps' : 0, # An integer specifying the number of lumps in the WAD.
'infotableofs' : 0 # An integer holding a pointer to the location of the directory.
}
self['lumps'] = [] # List of lumps, some processed, other in byte format
self['directory'] = list() # List of lumpinfo
self.levels = [] # this division in levels is not part of the wad but it's done for fast access
self.map_regex = re.compile('MAP\d\d?')
self.em_regex = re.compile('E\d*M\d\d?')
self.errors = list()
self['exception'] = 0
self.mode = mode
self.current_lump_offset = 12 # Keeps track of the offset in bytes of the last. The header is always 12 bytes long
def from_bytes(self, byte_stream):
'''
Builds a WAD object from the byte stream from a .WAD file.
:param byte_stream:
:return:
'''
assert self.mode == 'R', "Cannot read a WAD opened in write mode. " \
"Please consider copying your WAD() into a new one " \
"using to_bytes and from_bytes methods"
try:
self['header']['identification'] = decode_doomstring(byte_stream[0:4])
self['header']['numlumps'], = unpack("i", byte_stream[4:8])
self['header']['infotableofs'], = unpack("i", byte_stream[8:12])
# the pattern for grouped record is
# [byte[start:start+length] for start in range(offset, offset+n_items*length, length)]
lump_info_records = [byte_stream[start:start+16] for start in range(self['header']['infotableofs'],
self['header']['infotableofs']
+self['header']['numlumps']*16, 16)]
# Populate the lump directory
for lump_info_bytes in lump_info_records:
lumpinfo = LumpInfo().from_bytes(lump_info_bytes)
self['directory'].append(lumpinfo)
# Parsing lumps
for lump in self['directory']:
if lump['size'] < 0:
self.errors.append({'object': lump, 'description': 'Negative size lump', 'fatal':False})
# Some files are corrupted and have a negative lump size. They'd cause a segfault if launched with doom
# We try to go on extracting as much data as we can from the WAD file.
continue
lumpname = lump['name']
if lumpname in ['F_START','TEXTURE1','TEXTURE2']:
self['exception'] = 1 # Ignoring WADs with custom flats and textures, i.e 1538 single floor levels out of 1969
if (self.map_regex.match(lump['name']) is not None) or (self.em_regex.match(lump['name']) is not None):
self.levels.append({'name':lumpname, 'lumps':{}})
lump_bytes = byte_stream[lump['filepos']:lump['filepos'] + lump['size']]
if lumpname in known_lumps_classes.keys() and len(lump_bytes) > 0:
# Got a level lump and need to parse it...
l = known_lumps_classes[lumpname]().from_bytes(lump_bytes)
if len(self.levels)>0: # otherwise we have found a level lump before the level description, which should not happen
self.levels[-1]['lumps'][lumpname] = l
# Adding processed lump to the lump list
self['lumps'].append(l)
else:
# got an empty lump or another type of lump (such textures etc) that is not useful.
# Adding raw format to the lump list
self['lumps'].append(lump_bytes)
# Cleaning empty levels (some wad files has random level descriptor with no lumps following them
for l in self.levels:
if 'SECTORS' not in l['lumps'] or 'LINEDEFS' not in l['lumps']:
self.levels.remove(l)
# Building other secondary access keys
# levels[sector][sector_id] = {sector: lump, sidedefs: list(lump), linedefs: list(lump), vertices=list(), vertex_path=list()}
# Retrieving linedefs for each sector
for level in self.levels:
level['sectors'] = {}
# This part of code makes the access to sectors and vertices easier.
# Lines, Vertices, Sidedef and Sectors are indexed by three lists, and they are connected in this way:
# Line -> Vertices, Line -> Sidedef(s) -> Sector
# Create an entry for each sector.
for sec_id, sec_lump in enumerate(level['lumps']['SECTORS']):
level['sectors'][sec_id] = {'lump': sec_lump, 'linedefs': list(), 'sidedefs': list(), 'vertex_path':list(), 'vertices_xy':list()}
# For each linedef, fill the corresponding sector record(s)
for linedef_id, linedef_lump in enumerate(level['lumps']['LINEDEFS']):
r_side_id = linedef_lump['right_sidedef']
r_sidedef = level['lumps']['SIDEDEFS'][r_side_id]
r_sector = r_sidedef['sector']
level['sectors'][r_sector]['linedefs'].append(linedef_lump)
level['sectors'][r_sector]['sidedefs'].append(r_sidedef)
l_side_id = linedef_lump['left_sidedef']
if l_side_id != -1:
l_sidedef = level['lumps']['SIDEDEFS'][l_side_id]
l_sector = l_sidedef['sector']
level['sectors'][l_sector]['linedefs'].append(linedef_lump)
level['sectors'][l_sector]['sidedefs'].append(l_sidedef)
# create vertex path for each sector for drawing
for sector_id, sector in level['sectors'].items():
# Make the graph G(Linedefs, Verices) undirected
edges = set()
for linedef in sector['linedefs']:
if (linedef['from'] != linedef['to']): # Avoids single-vertex linedefs
edges.add((linedef['from'],linedef['to']))
edges.add((linedef['to'],linedef['from']))
if len(edges) > 0: # Avoid crashes if some sectors are empty
# "hops" is the list of vertex indices as visited by a drawing algorithm
hops = list()
next_edge = min(edges)
if next_edge[0] not in hops:
hops.append(next_edge[0])
if next_edge[1] not in hops:
hops.append(next_edge[1])
while (len(edges) > 1):
edges.remove((next_edge[1], next_edge[0]))
edges.remove((next_edge[0], next_edge[1]))
next_edges = set(filter(lambda x: x[0] == hops[-1] or x[1] == hops[-1], edges))
if len(next_edges) == 0:
break
possible_next = min(next_edges)
if possible_next[1] == hops[-1]:
next_edge = (possible_next[1], possible_next[0])
else:
next_edge = possible_next
if next_edge[-1] not in hops:
hops.append(next_edge[-1])
sector['vertex_path'] = hops
sector['vertices_xy'] = [(level['lumps']['VERTEXES'][v_id]['x'], level['lumps']['VERTEXES'][v_id]['y']) for v_id in hops]
except Exception as e:
# All known exceptions found in the database are avoided, this exception will catch everything else that is unexpected
# and will produce a fatal error
self.errors = list()
self.errors.append({'object': self, 'description': e, 'fatal':True})
return self
def add_lump(self, lumpname, lump):
"""
Adds a new lump named lumpname and updates the information in the directory. Increments the current_lump_offset.
:param lumpname: lump name. It will be converted in doomstring format.
:param lump: a @Lumps object, or None for level descriptors or other zero-sized lumps.
:return: None
"""
assert self.mode == 'W', "Cannot write a WAD opened in read mode. " \
"Please consider copying your WAD() into a new one " \
"using to_bytes and from_bytes methods"
if lump is None:
lump_bytes = bytes()
else:
lump_bytes = lump.to_bytes()
size = len(lump_bytes)
self['directory'].append(LumpInfo(filepos=self.current_lump_offset, size=size, name=lumpname))
self['lumps'].append(lump_bytes)
# Updating directory and header information
self.current_lump_offset += size
self['header']['numlumps'] += 1
# The infotableoffset is always kept at the end of the file
self['header']['infotableofs'] = self.current_lump_offset
def to_bytes(self):
# Build the entire file
# header to bytes
wad_bytes = bytearray()
wad_bytes += bytes('PWAD', encoding='ascii')
wad_bytes += pack('i', self['header']['numlumps'])
wad_bytes += pack('i', self['header']['infotableofs'])
# Adding Lumps
for lump in self['lumps']:
wad_bytes += lump
# Adding directory
for lumpinfo in self['directory']:
wad_bytes += lumpinfo.to_bytes()
return wad_bytes
class WADWriter(object):
def __init__(self, scale_factor=128):
"""
Class for writing a WAD file.
Start by defining a new level with add_level(), then place new sectors and "things". Changes are submitted only
on save or on the addition of a new level, since some optimization are due (duplicated vertices check, etc).
"""
# due to the way the WAD works, for keeping track of the lumps size for filling the dictionary the byte representation is
# needed. But for checking duplicated vertices/linedefs/etc it would be needed to convert back each lump before the
# check. For avoiding this problem, a set of lumps is stored in the writer and written only when the level is
# fully specified.
self.wad = WAD('W')
self.current_level = None
self.lumps = {'THINGS':Things(), 'LINEDEFS':Linedefs(), 'VERTEXES':Vertexes(),'SIDEDEFS': Sidedefs(), 'SECTORS':Sectors()} # Temporary lumps for this level
self.scale_factor = scale_factor
def _sector_orientation(self, vertices):
"""
Check if the polygon is oriented clock-wise or counter-clockwise.
If the polygon is not closed, then closes it
:param vertices: the input vertices
:return: (Bool, vertices) True if clockwise, False otherwise.
"""
if not vertices[0] == vertices[-1]:
vertices.append(vertices[0])
xy = np.transpose(np.array(vertices))
x, y = xy[0], xy[1]
return np.dot(x, np.roll(y, 1)) - np.dot(y, np.roll(x, 1)) > 0, vertices
def _rescale_coords(self, v):
return tuple(int(a * self.scale_factor) for a in v)
def _get_random_enemy(self):
return np.random.choice([3004, 9, 3001, 3002], 1, p=[0.3, 0.1, 0.4, 0.2]).item()
def from_images(self, floormap, heightmap, wallmap, thingsmap, place_enemies=True):
if isinstance(floormap, str):
floormap = io.imread(floormap).astype(dtype=np.bool)
if isinstance(heightmap, str):
heightmap = io.imread(heightmap).astype(np.uint8)
if isinstance(wallmap, str):
wallmap = io.imread(wallmap).astype(dtype=np.bool)
if isinstance(thingsmap, str):
thingsmap = io.imread(thingsmap).astype(np.uint8)
walkable = np.logical_and(floormap, np.logical_not(wallmap)) if wallmap is not None else floormap
# walkable = morphology.remove_small_objects(walkable)
# walkable = morphology.remove_small_holes(walkable)
roommap, graph, metrics = topological_features(walkable, prepare_for_doom=True)
graph = self.decorate_graph(graph, roommap, heightmap, thingsmap)
self.from_graph(graph, place_enemies=place_enemies)
def decorate_graph(self, G, roommap, heightmap, thingsmap):
"""
Adds information about the heightmap and the thingsmap in the region adjacency graph.
:param roommap:
:param heightmap:
:param thingsmap:
:return: updated G
"""
# Selecting candidates for starting and exiting nodes:
# leaves of the spanning tree are the most suitable
# Connected components (floors)
H = G.copy()
# print(G)
H.remove_node(0)
floors = sorted(nx.connected_components(H), key=len, reverse=True)
# print(floors)
level_solution = list()
corrected_heights = dict()
for id, floor_rooms in enumerate(floors):
# Creating a spanning tree for each floor
F = H.subgraph(floor_rooms)
T = nx.minimum_spanning_tree(F)
degree = dict(T.degree())
# Entry point has minimum node degree
floor_entry = min(degree, key=degree.get)
# Finding all paths in the level to determine the best exit (longest path)
paths = list()
for n in T.nodes():
p = list(nx.all_simple_paths(T, floor_entry, n))
if len(p) > 0:
paths += p
else:
# If a floor has a single room then there are no path from n to n and a max cannot be calculated
paths += [[n]]
floor_solution = max(paths, key=len)
if floor_rooms==max(floors, key=len):
level_solution.append(floor_solution)
# Fixing the heights along all paths so every path becomes walkable
for path in paths:
for rid, room in enumerate(path):
if room not in corrected_heights:
height = np.nanmedian(np.where(roommap == room, heightmap, np.nan))
if rid > 0:
# Alter this room height to be walkable
if height > path[rid-1] + 24:
height = path[rid-1] + 24
corrected_heights[room] = int(height)
nx.set_node_attributes(G, corrected_heights, "height")
for id, floor_path in enumerate(level_solution):
if id == 0:
# Place the level start
start_x, start_y = G.nodes[floor_path[0]]["centroid"]
nx.set_node_attributes(G, {floor_path[0]: {"location": (start_x, start_y)}}, "level_start")
else:
# place a teleport source
possible_places = np.stack(np.where(roommap==floor_path[0]), axis=1)
random_pixel_index = np.random.choice(possible_places.shape[0])
x, y = possible_places[random_pixel_index]
nx.set_node_attributes(G, {floor_path[0]: {"location": (x, y)}}, "floor_start")
if id == len(level_solution)-1:
# This is the last floor to visit, place the level exit
possible_places = np.stack(np.where(roommap == floor_path[-1]), axis=1)
random_pixel_index = np.random.choice(possible_places.shape[0])
x, y = possible_places[random_pixel_index]
nx.set_node_attributes(G, {floor_path[-1]: {"location": (x, y)}}, "level_exit")
else:
# There's another unvisited floor, place a teleporter to the next floor
possible_places = np.stack(np.where(roommap==floor_path[-1]), axis=1)
random_pixel_index = np.random.choice(possible_places.shape[0])
x, y = possible_places[random_pixel_index]
nx.set_node_attributes(G, {floor_path[-1]: {"destination":level_solution[id+1][0], "location": (x, y)}}, "floor_exit")
level_objects = {}
# Scanning the room for objects
for room in H.nodes():
things_in_room = (roommap == room)*thingsmap
things_pixels_indices = np.delete(np.unique(things_in_room), 0)
# Converting thing pixels to doom types, need to search from essentials
things_types = [get_type_id_from_index(i, essential=True) for i in things_pixels_indices]
categories = [get_category_from_type_id(t) for t in things_types]
things_dict = {}
for thing_id, thing_type, thing_cat in zip(things_pixels_indices, things_types, categories):
# skipping generated player starts teleports and keys since they are placed statically
if thing_cat is not None and thing_cat not in ["other", "start", "keys"]:
if thing_cat not in things_dict:
things_dict[thing_cat] = {}
if thing_type not in things_dict[thing_cat]:
things_dict[thing_cat][thing_type] = []
x_list, y_list = np.where(things_in_room==thing_id)
for x, y in zip(x_list, y_list):
things_dict[thing_cat][thing_type].append((x, y))
level_objects[room] = things_dict
nx.set_node_attributes(G, level_objects, "things")
return G
def from_graph(self, graph, place_enemies=True):
"""
Builds a level exploiting information stored in the room adjacency graph. Treat each room as a different sector.
:param graph:
:return:
"""
edge_attr_sidedef = dict() # Dictionary for updating edge attributes
node_attr_sectors = dict() # Dictionary for updating edge attributes
heights = nx.get_node_attributes(graph, "height")
# Creating a sector for each room
for n in graph.nodes():
if n == 0:
continue
# Create a sector
node_attr_sectors[n] = self.lumps['SECTORS'].add_sector(floor_height=int(heights[n]), ceiling_height=128+int(max(heights.values())), floor_flat='FLOOR0_1', ceiling_flat='FLOOR4_1', lightlevel=255,
special_sector=0, tag=int(n))
nx.set_node_attributes(graph, node_attr_sectors, 'sector_id')
# Creating two sidedefs for each edge and the corresponding linedef
for i, j in graph.edges():
if i == 0:
# linedef flag is impassable and the right sidedef is j
j_walls = graph.nodes[j]["walls"]
walls = [w for w in j_walls if w[1] == i or w[1] is None] # Check this if there's any problem in the corners
for wall_piece in walls:
start = self.lumps['VERTEXES'].add_vertex(self._rescale_coords(wall_piece[0][0]))
end = self.lumps['VERTEXES'].add_vertex(self._rescale_coords(wall_piece[0][1]))
# - Right sidedef is j
right_sidedef = self.lumps['SIDEDEFS'].add_sidedef(x_offset=0, y_offset=0,
upper_texture='BRONZE1',
lower_texture='BRONZE1',
middle_texture='BRONZE1',
sector=graph.nodes[j]["sector_id"])
# - Make a linedef
linedef = self.lumps['LINEDEFS'].add_linedef(start, end, flags=linedef_flags_to_int(impassable=True), types=0,
trigger=0, right_sidedef_index=right_sidedef)
# Save the linedef into the edge
if (i,j) not in edge_attr_sidedef:
edge_attr_sidedef[(i,j)] = list()
edge_attr_sidedef[(i, j)].append(right_sidedef)
else:
i_walls = graph.nodes[i]["walls"]
# linedef is invisible
# Get the boundaries from i to j
walls_ij = [w for w in i_walls if w[1] == j]
for wall_piece in walls_ij:
start = self.lumps['VERTEXES'].add_vertex(self._rescale_coords(wall_piece[0][0]))
end = self.lumps['VERTEXES'].add_vertex(self._rescale_coords(wall_piece[0][1]))
# - Right sidedef is i
right_sidedef = self.lumps['SIDEDEFS'].add_sidedef(x_offset=0, y_offset=0,
upper_texture='BRONZE1',
lower_texture='BRONZE1',
middle_texture='-',
sector=graph.nodes[i]["sector_id"])
# - Left sidedef is j (in j list there's the reversed linedef)
left_sidedef = self.lumps['SIDEDEFS'].add_sidedef(x_offset=0, y_offset=0,
upper_texture='BRONZE1',
lower_texture='BRONZE1',
middle_texture='-',
sector=graph.nodes[j]["sector_id"])
# - Make a linedef
linedef = self.lumps['LINEDEFS'].add_linedef(start, end, flags=linedef_flags_to_int(twosided=True), types=0,
trigger=0, right_sidedef_index=right_sidedef,
left_sidedef_index=left_sidedef)
# Save the linedef into the edge
if (i, j) not in edge_attr_sidedef:
edge_attr_sidedef[(i, j)] = list()
edge_attr_sidedef[(i, j)].append(right_sidedef)
edge_attr_sidedef[(i, j)].append(left_sidedef)
# Actually update edge attribnutes
nx.set_edge_attributes(graph, edge_attr_sidedef, 'sidedef')
if place_enemies:
# THINGS PLACEMENT
level_things = nx.get_node_attributes(graph, "things")
for n, catlist in level_things.items():
for cat, thinglist in catlist.items():
for thingtype, coordlist in thinglist.items():
for coord in coordlist:
# THIS IS A FIX FOR AVOIDING TOO MANY BOSSES IN THE LEVEL
# if cat == "monsters":
# thingtype = self._get_random_enemy()
x, y = self._rescale_coords(coord)
self.add_thing(x, y, thingtype)
# START AND TELEPORTERS
for n, l_start in nx.get_node_attributes(graph, "level_start").items():
x,y = self._rescale_coords(l_start["location"])
self.set_start(x, y)
print("Setting Start at {},{}".format(x,y))
for n, f_start in nx.get_node_attributes(graph, "floor_start").items():
x, y = self._rescale_coords(f_start["location"])
self.add_teleporter_destination(x, y)
print("Setting teleport dest at {},{}".format(x, y))
for n, f_exit in nx.get_node_attributes(graph, "floor_exit").items():
x, y = self._rescale_coords(f_exit["location"])
self.add_teleporter_source(x, y, to_sector=f_exit["destination"], inside=int(n))
print("Setting teleport source at {},{}".format(x, y))
for n, l_exit in nx.get_node_attributes(graph, "level_exit").items():
x, y = self._rescale_coords(l_exit["location"])
self.add_level_exit(x, y, inside=int(n), floor_height=int(graph.nodes[n]["height"])-16, ceiling_height=128+int(graph.nodes[n]["height"])-16)
print("Setting Exit at {},{}".format(x,y))
def add_teleporter_destination(self, x, y):
self.add_thing(x, y, thing_type=14)
def add_teleporter_source(self, x, y, to_sector, inside, size=32):
"""
Place a teleporter cell to a sector
:param x: x coordinate of the beacon
:param y: y coordinate of the beacon
:param to_sector: destination sector
:param inside: Sector number in which this teleporter is placed
:param size: size of the teleporter
:return: None
"""""
x=int(x)
y=int(y)
to_sector=int(to_sector)
halfsize=size//2
vertices = list(reversed([(x-halfsize, y+halfsize),(x+halfsize, y+halfsize),(x+halfsize, y-halfsize),(x-halfsize, y-halfsize)]))
self.add_sector(vertices, floor_flat='GATE1', kw_sidedef={'upper_texture':'-', 'lower_texture':'-', 'middle_texture':'-'}, kw_linedef={'flags':4, 'type':97, 'trigger': to_sector}, surrounding_sector_id=inside)
def set_start(self, x, y):
self.lumps['THINGS'].add_thing(int(x), int(y), angle=0, type=1, options=0)
def add_thing(self, x,y, thing_type, options=7, angle=0):
if thing_type != None:
self.lumps['THINGS'].add_thing(int(x), int(y), angle=angle, type=int(thing_type), options=options)
def add_door(self,vertices_coords, parent_sector, tag=None, remote=False, texture='DOORTRAK'):
"""
adds a door with a given tag. If tag is left unspecified, then it will be equal to the sector index.
:param vertices_coords:
:param parent_sector:
:param tag:
:param remote:
:param texture:
:return:
"""
height = self.lumps['SECTORS'][parent_sector]['floor_height']
type = 1 if not remote else 0
tag = len(self.lumps['SECTORS']) if tag is None else tag
return self.add_sector(vertices_coords, ceiling_height=height, kw_sidedef={'upper_texture':texture, 'lower_texture':texture, 'middle_texture':'-'}, kw_linedef={'type':type, 'flags':4, 'trigger':0}, tag=tag, surrounding_sector_id=parent_sector, hollow=False)
def add_level_exit(self, x, y, inside, floor_height, ceiling_height, size=32):
"""
Place the level exit
:param x: x coordinate of the beacon
:param y: y coordinate of the beacon
:param inside: Sector number in which this teleporter is placed
:param ceiling_height: Ceiling height for the "EXIT" sign
:param size: size of the teleporter
:return: None
"""""
x=int(x)
y=int(y)
halfsize=size//2
vertices = list(reversed([(x-halfsize, y+halfsize),(x+halfsize, y+halfsize),(x+halfsize, y-halfsize),(x-halfsize, y-halfsize)]))
self.add_sector(vertices, floor_flat='GATE1', kw_sidedef={'upper_texture':'EXITSIGN', 'lower_texture':'-', 'middle_texture':'-'}, kw_linedef={'flags':4, 'type':52, 'trigger': 0}, surrounding_sector_id=inside, floor_height=floor_height,
ceiling_height=ceiling_height)
def add_trigger(self, vertices_coords, parent_sector, trigger_type, trigger_tag, texture='SW1CMT'):
return self.add_sector(vertices_coords,
kw_sidedef={'upper_texture': 'BRONZE1', 'lower_texture': 'BRONZE1', 'middle_texture': texture},
kw_linedef={'type': trigger_type, 'flags': 1, 'trigger': trigger_tag}, tag=0,
surrounding_sector_id=parent_sector, hollow=False)
def add_sector(self, vertices_coords, floor_height=0, ceiling_height=128, floor_flat='FLOOR0_1', ceiling_flat='FLOOR4_1', lightlevel=256, special=0, tag=0, surrounding_sector_id=None, hollow=False, kw_sidedef=None, kw_linedef=None):
"""
Adds a sector with given vertices coordinates, creating all the necessary linedefs and sidedefs and return the relative
sector id for passing the reference to other sectors or objects if needed
:param vertices_coords: Vertices coordinates (x,y),(x2,y2).. If given in CLOCKWISE order then the room will have
its right linedefs facing INWARD, the left one can be left unspecified (i.e. sorrounding_sector_id = None e.g. for the outermost sector)
and the hollow parameter has no effect.
If vertices are in COUNTER-CLOCKWISE order, then you are defining a sector with RIGHT SIDEDEFS facing outside,
for that reason the sorrounding_sector_id parameter is mandatory and you are creating a sector inside another sector,
like a column, a wall or a door. You can set if the linedefs can contain an actual sector or not with the "hollow" parameter.
:param floor_height: height of the floor in doom map units
:param ceiling_height:
:param floor_flat:
:param ceiling_flat:
:param lightlevel:
:param special:
:param tag:
:param wall_texture:
:param surrounding_sector_id: sector id (returned by this function itself) for the sector that surrounds the one you are creating. Can be None only if the vertices are specified in clockwise order, since a linedef must have a sector on its right side.
:param hollow: Has effect only for counter-clockwise specified sectors. Determines if the sector you are creating does actually contains a sector (like for doors) or it's just a hole surrounded by walls/linedefs, like the column or other static structures. Default to False.
:param kw_linedef: (Optional) A list of dictionaries containing the parameters for each linedef, or a single dictionary if all linedefs share the same parameters. Must have the indices: 'type', 'trigger' and 'flags'. Default: {'type':0, 'flags':17, 'trigger':0}
:return:
"""
# In order to add a sector one must add:
# A vertex for each vertex in the sector, only if not already present
# a linedef for each edge, if not already present
# a sidedef for each linedef, put on the correct side of the linedef
# the sector lump itself
# Create a sector
sector_id = self.lumps['SECTORS'].add_sector(floor_height, ceiling_height, floor_flat, ceiling_flat, lightlevel, special, tag)
# Create and lookup vertices
vertices_id = list()
for v in vertices_coords:
v_id = self.lumps['VERTEXES'].add_vertex(v)
vertices_id.append(v_id)
clockwise, _ = self._sector_orientation(vertices_coords)
# Iterate over (v0, v1), (v1, v2), ..., (vn-1, vn).
# Adding the first element at the end for closing the polygon
startiter, enditer = itertools.tee(vertices_id+[vertices_id[0]], 2)
next(enditer, None) # shift the end iterator by one
for segcounter, (start, end) in enumerate(zip(startiter, enditer)):
if kw_sidedef is None:
kw_sidedef = {'upper_texture':'-', 'lower_texture':'-', 'middle_texture':'BRONZE1'}
if clockwise:
# The room has the right sidedef facing surrounding_sector_id, toward the new sector
right_sidedef = self.lumps['SIDEDEFS'].add_sidedef(x_offset=0, y_offset=0, upper_texture=kw_sidedef['upper_texture'], lower_texture=kw_sidedef['lower_texture'],
middle_texture=kw_sidedef['middle_texture'], sector=sector_id)
if surrounding_sector_id is not None:
left_sidedef = self.lumps['SIDEDEFS'].add_sidedef(x_offset=0, y_offset=0, upper_texture=kw_sidedef['upper_texture'], lower_texture=kw_sidedef['lower_texture'], middle_texture=kw_sidedef['middle_texture'], sector=surrounding_sector_id)
else:
left_sidedef=-1
else:
# The room has the right sidedef facing outside, towards the sorrounding sector
right_sidedef = self.lumps['SIDEDEFS'].add_sidedef(x_offset=0, y_offset=0,
upper_texture=kw_sidedef['upper_texture'],
lower_texture=kw_sidedef['lower_texture'],
middle_texture=kw_sidedef['middle_texture'],
sector=surrounding_sector_id)
if not hollow:
left_sidedef = self.lumps['SIDEDEFS'].add_sidedef(x_offset=0, y_offset=0,
upper_texture=kw_sidedef['upper_texture'],
lower_texture=kw_sidedef['lower_texture'],
middle_texture=kw_sidedef['middle_texture'],
sector=sector_id)
else:
left_sidedef = -1
# Linedef creation/Lookup
if kw_linedef is None:
linedef_params = {'type':0, 'flags':17, 'trigger':0}
elif isinstance(kw_linedef, dict):
linedef_params = kw_linedef
elif isinstance(kw_linedef, list):
linedef_params = kw_linedef[segcounter]
else:
raise ValueError("kw_linedef can only be None, a Dict or a list of Dict.")
self.lumps['LINEDEFS'].add_linedef(start, end, flags=linedef_params['flags'], types=linedef_params['type'], trigger=linedef_params['trigger'], right_sidedef_index=right_sidedef, left_sidedef_index=left_sidedef)
return sector_id
def _commit_level(self):
"""
Writes the set of lumps to the WAD object
:return:
"""
assert self.current_level is not None, "Cannot write a level with an empty name"
# Create a new level descriptor in the lump directory
self.wad.add_lump(self.current_level, None)
# Add the lumps to WAD file
self.wad.add_lump('THINGS', self.lumps['THINGS'])
self.wad.add_lump('LINEDEFS', self.lumps['LINEDEFS'])
self.wad.add_lump('SIDEDEFS', self.lumps['SIDEDEFS'])
self.wad.add_lump('VERTEXES', self.lumps['VERTEXES'])
self.wad.add_lump('SECTORS', self.lumps['SECTORS'])
self.lumps = {'THINGS':Things(), 'LINEDEFS':Linedefs(), 'VERTEXES':Vertexes(),'SIDEDEFS': Sidedefs(), 'SECTORS':Sectors()}
def add_level(self, name='MAP01'):
# If it's not the first level, then commit the previous
if self.current_level is not None:
self._commit_level()
self.current_level = name
def save(self, fp, call_node_builder=True):
# Always commit the last level
self._commit_level()
wad_bytes = self.wad.to_bytes()
with open(fp,'wb') as out:
out.write(wad_bytes)
if call_node_builder:
# Calling ZenNode to build subsectors and other lumps needed to play the level on doom
print('Calling ZenNode...')
subprocess.check_call(["ZenNode", fp, '-o', fp],shell=True)
class WADReader(object):
""""Batch reader for WAD files"""
def read(self, w):
"""
Reads a wad file representing it as a dictionary
:param w: path of the .WAD file
:return:
"""
with open(w, 'rb') as file:
wad_name = w.split('/')[-1]
wad = WAD('R').from_bytes(file.read())
record = {'wad_name': wad_name, 'wad': wad, 'errors':list()}
if len(record['wad'].errors) > 0:
if (not record['wad'].errors[0]['fatal']):
print("{}: Malformed file, results may be altered".format(w))
else:
print("{}: Fatal error in file structure, skipping file.".format(w))
record['errors'] += record['wad'].errors
return record
def save_sample(self, wad, path, root_path = '', wad_info=None):
"""
Saves the wad maps (as .png) and features (as .json) to the "path" folder for each level in the wad.
Also adds the produced file paths to the level features,
if root_path is set then these paths are relative to that folder instead of being absolute paths
if wad_info is not None, then adds its fields as features
:param wad: the parsed wad file to save as feature maps
:param path: the output folder
:param root_path: the path to which the paths stored in the features should be relative to
:return: None
"""
os.makedirs(path, exist_ok=True)
for level in wad['levels']:
base_filename=path+wad['wad_name'].split('.')[-2]+'_'+level['name']
# Path relative to the dataset root that will be stored in the database
relative_path = base_filename.replace(root_path, '')
# Adding the features
for map in level['maps']:
# Adding the corresponding path as feature for further access
level['features']['path_{}'.format(map)] = relative_path + '_{}.png'.format(map)
io.imsave(base_filename + '_{}.png'.format(map), level['maps'][map])
for wadinfo in wad_info:
# Adding wad info (author, etc) to the level features.
if wadinfo not in level['features']: # Computed features have priority over provided features
level['features'][wadinfo] = wad_info[wadinfo]
# Completing the features with the level slot
level['features']['slot'] = level['name']
# Doing the same for the other features
level['features']['path_json'] = relative_path + '.json'
with open(base_filename + '.json', 'w') as jout:
json.dump(level['features'], jout)
# Saving the text representation
with open(base_filename + '.txt', 'wb') as txtout:
txtout.writelines([bytes(row + [10]) for row in level['text']])
# Saving the graph
if 'graph' in level:
with open(base_filename + '.networkx', 'wb') as graphout:
nx.write_gpickle(level['graph'], graphout)
def extract(self, wad_fp, save_to=None, root_path=None, update_record=None):
"""
Compute the image representation and the features of each level contained in the wad file.
If 'save_to' is set, then also do the following:
- saves a json file for each level inside the 'save_to' folder
- saves the set of maps as png images inside the 'save_to' folder
- adds the relative path of the above mentioned files as level features
Morover, if 'save_to' is set, then you may want to specify a 'root_path' for avoiding to save the full path in the features.
If 'update_record' is set to a json dictionary (perhaps containing info about the wad author, title, etc),
then this function stores all the update_record fields into the level features dictionary.
:return: Parsed Wad
"""
parsed_wad = self.read(wad_fp)
for error in parsed_wad['errors']:
if error['fatal']:
return None
parsed_wad['levels'] = list()
if len(parsed_wad['wad'].levels) == 1: # Consider only single level
for level in parsed_wad['wad'].levels:
try:
extractor = WADFeatureExtractor()
features, maps, txt, graph = extractor.extract_features_from_wad(level)
parsed_wad['levels'] += [{'name': level['name'], 'features': features, 'maps':maps, 'text':txt, 'graph':graph}]
except:
warnings.warn("Failed to extract data for level {}".format(level['name']))
if save_to is not None:
self.save_sample(parsed_wad, save_to, root_path, update_record)
return parsed_wad
else:
return None |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.