index int64 0 1,000k | blob_id stringlengths 40 40 | code stringlengths 7 10.4M |
|---|---|---|
14,000 | bcf1de927127300be85a1cb869fed97a7eeb8f8f | import csv
import time
import re
import nltk
import numpy as np
import sys
from sklearn.feature_extraction.text import TfidfVectorizer
from sklearn.metrics.pairwise import cosine_similarity
from convertor import convert_docs_in_dic
TFIDF_FILE = "./data/output/tfidf_predictions_optimized.tsv"
BOOK_EVAL_FILE = "./data/processed/narrativeqa_all.eval"
def compute_tfidf(query, story_id, paragraphs, vect_paragraphs, vectorizer, n):
query_tfidf = vectorizer.transform([query])
cosine_similarities = cosine_similarity(query_tfidf, vect_paragraphs)\
.flatten()
best_index = np.flip(np.argsort(cosine_similarities), axis=0)[:n]
best_p = [paragraphs[i] for i in best_index]
best_i = [p.split(" ")[0] for p in best_p]
return best_i
def gather_tfidf(dataset, vectorizer, n, attach_answer):
predictions = dict()
for story_id, story_details in dataset.items():
paragraphs = ["{} {}".format(k,v)
for k, v in dataset[story_id]['paragraphs'].items()]
vectorized_paragraphs = vectorizer.fit_transform(paragraphs)
for query_id, query_details in story_details['queries'].items():
query = "{} {} {}".format(query_details['query'],
query_details['answer1'],
query_details['answer2']) if \
attach_answer else query_details['query']
query = query.lower()
predictions[query_id] = compute_tfidf(query,
story_id,
paragraphs,
vectorized_paragraphs,
vectorizer,
n)
return predictions
def write_tfidf_pred(dataset, vectorizer, n, attach_answer):
with open(TFIDF_FILE, "w") as f:
predictions = gather_tfidf(dataset, vectorizer, n, attach_answer)
for query_id,paragraphs in predictions.items():
for i,p in enumerate(paragraphs):
f.write("{}\t{}\t{}\n".format(query_id, p, i+1))
def main():
vectorizer = TfidfVectorizer(tokenizer=nltk.word_tokenize, ngram_range=(1,2))
dataset = convert_docs_in_dic(BOOK_EVAL_FILE)
write_tfidf_pred(dataset, vectorizer, n=3, attach_answer=True)
if __name__=="__main__":
main()
|
14,001 | 43f020b380935cb022fe37020aca3240feda04cf | import numpy as np
import scipy
import sympy
from numpy import linalg as lg
from numpy.linalg import solve
from numpy.linalg import eig
from scipy.integrate import quad
# Question 1
'''
If uequalsStart 2 By 1 Table 1st Row 1st Column x 2nd Row 1st Column y EndTable is in W, then the vector cuequalscStart 2 By 1 Table 1st Row 1st Column x 2nd Row 1st Column y EndTable equalsStart 2 By 1 Table 1st Row 1st Column cx 2nd Row 1st Column cy EndTable is in W because left parenthesis cx right parenthesis left parenthesis cy right parenthesisequalsc squared left parenthesis xy right parenthesis less than or equals 0 since xy less than or equals 0.
Part B
[0 2]
[2 0]
'''
# Question 2
'''
The set is a subspace of set of prime numbers P 5. The set contains the zero vector of set of prime numbers P 5, the set is closed under vector addition, and the set is closed under multiplication by scalars.
'''
# Question 3
'''
A.
The coefficients form the vectors.
B.
W = Span{U,V}
C.
Since u and v are in set of real numbers R Superscript 4 and WequalsSpan StartSet Bold u comma Bold v EndSet, W is a subspace of set of real numbers R Superscript 4.
'''
# Question 4
'''
W is not a vector space because the zero vector and most sums and scalar multiples of vectors in W are not in W, because their second (middle) value is not equal to negative 3.
'''
# Question 5
'''
No, because:
Aw = [14]
[14]
[ 3]
'''
# Question 6
'''
Spanning set for Nul A is:
[-3] [0]
[ 1] [0]
[ 0] [0]
[ 0] [1]
# Question 7
'''
# The set W is a subset of set of real numbers R Superscript 4. If W were a vector space, what else would be true about it?
#A.
#The set W would be a subspace of set of real numbers R Superscript 4.
#B.
#The zero vector is not in W. There is no t and s such that the vector equation is satisfied.
#C.
#Since the zero vector is not in W, W is not a subspace of set of real numbers R Superscript 4. Thus W is not a vector space.
'''
# Question 8
# A.
# k = 3
# B.
# k = 2
'''
# Question 9
mat = sympy.Matrix([[9, 3], [3, 1], [-12, -4], [6,2]])
print(mat)
print(mat.rref())
# Nonzero vector in Nul A is:
'''
[ 1]
[-3]
Nonzero vector in Col A is:
[ 9]
[ 3]
[-12]
[ 6]
'''
# Question 10
'''
The set spans set of real numbers R cubed.
The set is linearly independent.
The set is a basis for set of real numbers R cubed.
'''
# Question 11
mat2 = sympy.Matrix([[1, 1, -1, -1, 2, 0], [0, 1, 0, -2, -3, 0], [0, 0, -8, 0, 16, 0]])
print(mat2)
print(mat2.rref())
# The rref represents the equations:
'''
x1 + 0 + 0 x4 + 3x5 = 0
0 +x2 + 0-2x4- 3x5 = 0
0 + 0 +x3 + 0 -2x5 = 0
[x1] = -x4 - 3x5
[x2] = 2x4 + 3x5
[x3] = 2x5
[x4] = x4
[x5] = x5
[-1]
[ 2]
x4 [ 0]
[ 1]
[ 0]
[-3]
[ 3]
x5 [ 2]
[ 0]
[ 1]
# Correct
'''
# Question 12
'''
A.
The statement is false because the subspace spanned by the set must also coincide withnbspH.
B.
The statement is true by the Spanning Set Theorem.
C.
The statement is true by the definition of a basis.
D.
The statement is false because the method always produces an independent set.\
E.
The statement is false because the columns of an echelon form B of A are not necessarily in the column space of A.
'''
# Question 13
'''
[-4]
x = [4]
[-8]
'''
# Question 14
mat3 = sympy.Matrix([[1, 3, 1, -1], [-1, -2, -1, 0], [-2, -6, 5, 9]])
print(mat3)
print(mat3.rref())
# Question 15
# Form a matrix from the two vectors. It is identical.
|
14,002 | 3b91f72272c3a6f68e72cb75d6fd9740b8a37f03 | """Create and solve SMT problems related to the search of characteristics."""
|
14,003 | ebcd38d15a2c9816a80e72cd0a1dd3408d122848 | import sys
import os
import re
import operator as o
#### docs ############
# to append to notes.txt use . as first arg followed by lines
# to append to other textfiles in cwd set in first arg
# if thetextfile does not exist don't worry
#### improvements ####
# delete lines based on numbers
# other cmd functionalities
# ...show txt
# ...show jpg
modd = 'empty'
if sys.argv[1][-4:] == '.txt' or sys.argv[1] == '.':
modd = 'texxt'
filename = 'notes.txt' if sys.argv[1] == '.' else sys.argv[1]
if modd == 'texxt' and sys.argv[2][:3] != '...':
modd = 'io'
morelines = sys.argv[2:]
if modd == 'texxt' and sys.argv[2][:3] == '...':
modd = 'txtfilecmd'
moretxtcmd = sys.argv[2:]
if sys.argv[1][:3] == '...': # do i need this
modd = 'cmd'
morecmd = sys.argv[1:]
if modd == 'txtfilecmd':
if sys.argv[2] == '...del':
note2 = open(filename, 'r+')
lineslist = [x for x in note2.readlines()]
linestodel = list(set(sys.argv[3:]))
for dels in linestodel:
lineslist[int(dels)] = ''
slicedtext = ''.join(lineslist)
note2 = open(filename, 'w+')
note2.write(slicedtext)
note2.close()
if modd == 'texxt':
if filename not in os.listdir(os.getcwd()):
a = open(filename, 'w+')
a.write('')
a.close()
def getlinecount():
note2 = open(filename, 'r')
lastline = str(len([True for x in note2.read() if x == '\n']))
lastline = '0' + lastline if int(lastline) < 10 else lastline
note2.close()
return lastline + " "
def appendlines(argy):
try:
note = open(filename, 'a')
note.write(getlinecount() + argy + "\n")
note.close()
print 'all good: ' + argy
except:
print '!!! Check your arguments.'
if modd == 'io':
if len(morelines) == 1: appendlines(morelines[0])
else:
i = 0
for xs in morelines:
appendlines(morelines[i])
i = i + 1
if sys.argv[1] == '...rspace':
froms = sys.argv[2]
tos = sys.argv[3]
formatGood = map( lambda sa: sa.lower().replace(froms, tos), os.listdir( "./" ) )
map( (lambda oldy, newy: os.rename( oldy, newy )), os.listdir( "./" ), formatGood )
|
14,004 | f79345638d6d8add93437d98ffccb2375f501f7e | __version__ = "0.4.3"
default_app_config = 'rest_registration.apps.RestRegistrationConfig'
|
14,005 | 39aa5b43986c63ec68bef1de84c2f95a7ddbb93c | from django.urls import path
from .views import (
CommentListView,
CommentDetailView,
CommentCreateView,
CommentUpdateView,
CommentDeleteView
)
app_name = 'comments'
urlpatterns = [
path('', CommentListView.as_view(), name='comment-list'),
path('<int:pk>/', CommentDetailView.as_view(), name='comment-detail'), #show pk vs. id
path('<int:book_id>/create/', CommentCreateView.as_view(), name='comment-create'),
path('<int:pk>/update/', CommentUpdateView.as_view(), name='comment-update'),
path('<int:pk>/delete/', CommentDeleteView.as_view(), name='comment-delete')
] |
14,006 | 8b736d90ff1e411c3fea39d278a6e778c70a9e7a | import os
class Board:
def __init__(self):
self.blocks = [' '] * 81
self.score = 0
def update(self, pos, marker):
self.blocks[pos] = marker
def update2(self, pos, pos1, marker, marker1):
self.blocks[pos] = marker
self.blocks[pos1] = marker1
def scoreup(self):
self.score += 1
def gprint(self):
os.system('clear')
for i in range(9):
for j in range(9):
print self.blocks[i*9+j],
print
print "Score:", self.score
|
14,007 | d760f7f19cde0fccdd2a549f831913c2325e9b60 | # /bin/etc/env Python
# -*- coding: utf-8 -*-
print(1 ** 2)
print(2 ** 1, 2 ** 2, 2 ** 3, 2 ** 4, 2 ** 5, 2 ** 6, 2 ** 7, 2 ** 8, 2 ** 9, '\n')
# +-*/就不多说了
# **两个星号代表乘方
# +=、-=、*=、是 'a = a + 1' == a += 1 简单写法。
# 整数
print(3 / 2)
print(3.0 / 2)
print(3.0 / 2.0, '\n')
"""
在Python2中,3/2=1 会将小数直接删掉。
"""
print(5 + 3)
print(10 - 2)
print(2 * 4)
print(24 // 3)
# 浮点数
print(0.1 + 0.1)
print(0.2 + 0.2)
print(2 * 0.1)
print(2 * 0.2)
print('所有编程语言中都存在这个问题')
print(0.2 + 0.1)
print(3 * 0.1)
num = 123
ret = num % 2
print(ret) # 余数
if ret == 0:
print("偶数")
else:
print("奇数")
# 避免数据类型错误
age = 25
# message = "Happy" + age + "rd Birthday!"
message = "Happy " + str(age) + "rd Birthday!"
print(message)
"""
注释中的age是无法输出的。因为数据类型不一样,age是一个数字,而message是一个字符串。
两者相加自然会报错。所以使用str()函数
函数与方法的区别:
1、类和实例无绑定关系的function都属于函数(function)。
2、与类和实例有绑定关系的function都属于方法(method)。
str()就属于第一类。
"""
num = "Aiden"
li = ['Aiden','liu']
|
14,008 | cf56dd726a2a76fee89013fd3afe807fa4c02b53 | #!/usr/bin/env python
__author__ = "Bill Ferrell"
"""Parses the Sierra avalanche center status page.
Page found here: http://www.sierraavalanchecenter.org/advisory
This class parses the page and returns an object that allows you to get various status bits.
"""
import re
import logging
import models
from mmlib.scrapers.scraper import Scraper
class AvalancheConditionsParser(Scraper):
def __init__(self):
url = 'http://www.sierraavalanchecenter.org/advisory'
Scraper.__init__(self, url=url, geography='Tahoe',
valueType='AvalancheConditions')
self.scrape()
intro_paragraph = self.parseIntroText()
published_time = self.parsePublishedTime()
low = self.parseDanger(find_value=
'http://www.avalanche.org/~uac/encyclopedia/low_avalanche_hazard.htm')
if not low:
low = self.parseDanger(find_value=
'http://www.avalanche.org/%7Euac/encyclopedia/low_avalanche_hazard.htm')
moderate = self.parseDanger(find_value=
'http://www.avalanche.org/~uac/encyclopedia/moderate_danger.htm')
if not moderate:
moderate = self.parseDanger(find_value=
'http://www.avalanche.org/%7Euac/encyclopedia/moderate_danger.htm')
considerable = self.parseDanger(find_value=
'http://www.avalanche.org/~uac/encyclopedia/considerable_danger.htm')
if not considerable:
considerable = self.parseDanger(find_value=
'http://www.avalanche.org/%7Euac/encyclopedia/considerable_danger.htm')
high = self.parseDanger(find_value=
'http://www.avalanche.org/~uac/encyclopedia/high_danger.htm')
if not high:
high = self.parseDanger(find_value=
'http://www.avalanche.org/%7Euac/encyclopedia/high_danger.htm')
extreme = self.parseDanger(find_value=
'http://www.avalanche.org/~uac/encyclopedia/extreme_danger.htm')
if not extreme:
extreme = self.parseDanger(find_value=
'http://www.avalanche.org/%7Euac/encyclopedia/extreme_danger.htm')
conditions = [low, moderate, considerable, high, extreme]
condition_counter = 0
for condition in conditions:
# print condition
if condition:
condition_counter += 1
if condition_counter > 1:
multiple_danger_levels = True
else:
multiple_danger_levels = False
new_avalanche_data = models.TodaysAvalancheReport()
new_avalanche_data.avalanche_report_paragraph = str(intro_paragraph)
new_avalanche_data.low_danger = low
new_avalanche_data.moderate_danger = moderate
new_avalanche_data.considerable_danger = considerable
new_avalanche_data.high_danger = high
new_avalanche_data.extreme_danger = extreme
new_avalanche_data.multiple_danger_levels = multiple_danger_levels
new_avalanche_data.published_time = str(published_time[0])
new_avalanche_data.put()
def parseDanger(self, find_value):
logging.info('starting parseDanger')
logging.info('findvalue = %s' % find_value)
block = self.soup.find(attrs={'href': find_value})
# Code for troubleshooting.
# block = self.soup.findAll(True)
# for tag in block:
# print tag.name
# print tag.attrs
if block:
logging.info('Found danger')
# print 'DANGER'
return True
logging.info('Found nothing')
return False
def parseIntroText(self):
intro = None
block = self.soup.findAll(attrs={'class':"views-field views-field-field-discussion-php-value"})
for tag in block:
if tag.name == 'td':
intro = tag.findNext('p')
intro = intro.contents[0]
if not intro:
intro = 'None'
logging.info('intro = %s' % str(intro))
return str(intro)
def parsePublishedTime(self):
block = self.soup.find('td', attrs={'class':"views-field views-field-field-pubtime-php-value"})
time = block.findNext('strong').contents
return time
|
14,009 | 20c03df1cf1a57a7f663563433fae9030d96e686 | # not required, but nice for predictive text and tab completion
from aiogram import Bot, Dispatcher, executor, types
# importing modules
import asyncio
from datetime import datetime
import credentials
import re, math
async def startMessage(bot, message):
await bot.send_message(message['from']['id'], "Welcome to this bot.")
async def functionsExample(bot, message, commandargument):
print('call this function by using\nawait F.functionsExample(bot, message, commandargument')
|
14,010 | 6fe02a9cfd27d2a5c7e065521b88f4a206d9a0f7 | from app import db
from app.models import Movie
from sqlalchemy import text
from flask_sqlalchemy import Pagination
def search_movie(search_text, page, per_page):
sql = text('''
WITH similar_words AS (
SELECT string_agg(word, ' | ') AS words
FROM (
SELECT word
FROM unique_lexeme
WHERE word % '':keywords''
ORDER BY word <-> '':keywords''
LIMIT 2
) AS tab
)
SELECT id, count(*) OVER() AS full_count
FROM search_view
WHERE document @@ to_tsquery('english', (SELECT words FROM similar_words))
ORDER BY ts_rank(document, to_tsquery('english', (SELECT words FROM similar_words))) DESC
LIMIT :per_page
OFFSET :offset_page
''') # TODO consider in conditionally recounting because should get few items every time
# TODO if there are only stop words, search them only in title (disable warnings)
# TODO for each element of tsvector, we could get the similar word
# (e.g. "the prestige" query cannot find "Prestige, the")
sql = sql.bindparams(keywords=search_text, per_page=per_page, offset_page=((page-1) * per_page))
result = db.engine.execute(sql).fetchall()
total_count = result[0][1] if len(result) > 0 else 0
movies = [Movie.query.get(row[0]) for row in result]
return Pagination(None, page, per_page, total_count, movies)
|
14,011 | 57dff9feb10de13bdc22230fbd5ca3d4b591b0b6 | import logging
import sys
import shutil
import hashlib
import json
from unittest import TestCase
from unittest import mock
from ansit.environment import (
Environment,
Drivers)
from ansit.manifest import Manifest
from ansit.util import read_yaml_file
from ansit import drivers
logging.basicConfig(level=logging.CRITICAL)
class TestDrivers(TestCase):
@classmethod
def setUpClass(cls):
cls.drivers = Drivers(
Manifest.from_file('tests/examples/good_manifest.yml'))
@classmethod
def tearDownClass(cls):
shutil.rmtree(cls.drivers._manifest['tmp_dir'])
def test_loading_provider(self):
self.assertTrue(isinstance(
self.drivers['ansit.drivers.LocalhostProvider'],
drivers.Provider))
def test_loading_provisioner(self):
self.assertTrue(isinstance(
self.drivers['ansit.drivers.CommandProvisioner'],
drivers.Provisioner))
def test_loading_tester(self):
self.assertTrue(isinstance(
self.drivers['ansit.drivers.CommandTester'],
drivers.Tester))
def test_saving_state(self):
# ensure CommandTester is loaded
self.drivers['ansit.drivers.CommandTester']
self.drivers.save_state()
with open(
self.drivers._state_file, 'r',
encoding='utf-8') as state_src:
state = json.load(state_src)
self.assertDictEqual(
state['ansit.drivers.CommandTester'],
{})
def test_restoring_state(self):
def spawn_drivers():
return Drivers(
Manifest.from_file('tests/examples/good_manifest.yml'),
state_filename='drivers2.json')
state = {'key1': 'val1'}
drivers2 = spawn_drivers()
drivers2['ansit.drivers.CommandTester'].state = state
drivers2.save_state()
drivers2 = spawn_drivers()
self.assertEqual(
drivers2['ansit.drivers.CommandTester'].state,
state)
class TestEnvironmentChanges(TestCase):
@classmethod
def setUpClass(cls):
cls.env = Environment(
Manifest.from_file('tests/examples/good_manifest.yml'))
cls.env.synchronize()
cls.env.apply_changes()
@classmethod
def tearDownClass(cls):
shutil.rmtree(cls.env._manifest['tmp_dir'])
def md5sum(self, path):
with open(path, 'rb') as src:
return hashlib.md5(src.read()).hexdigest()
def test_template(self):
self.assertEqual(
self.md5sum('tests/examples/rendered_template.txt'),
self.md5sum('.ansit/examples/template.txt'))
def test_copy(self):
self.assertEqual(
self.md5sum('tests/examples/copy_src.txt'),
self.md5sum('.ansit/examples/copy_dest.txt'))
def test_remove(self):
content = read_yaml_file('.ansit/examples/test_yaml.yml')
self.assertIsNone(content.get('test_var3'))
def test_add(self):
content = read_yaml_file('.ansit/examples/test_yaml.yml')
self.assertEqual(len(content['test_var2']['subvar2']), 4)
def test_update(self):
content = read_yaml_file('.ansit/examples/test_yaml.yml')
self.assertEqual(content['test_var1'], 'val1_test')
@mock.patch('ansit.drivers.LocalhostProvider.up')
def test_creating_machines(self, up):
def mock_up(*args, **kwargs):
yield ''
up.side_effect = mock_up
self.env.up(['localhost'])
self.assertEqual(up.call_count, 1)
self.assertIn('localhost', up.call_args[0][0])
@mock.patch('ansit.drivers.LocalhostProvider.destroy')
def test_destroying_machines(self, destroy):
def mock_destroy(*args, **kwargs):
yield ''
destroy.side_effect = mock_destroy
self.env.destroy(['localhost'])
self.assertEqual(destroy.call_count, 1)
self.assertIn('localhost', destroy.call_args[0][0])
@mock.patch('ansit.drivers.LocalhostProvider.run')
def test_run_command(self, run):
def mock_run(*args, **kwargs):
yield ''
run.side_effect = mock_run
self.env.run('localhost', 'pwd')
self.assertEqual(run.call_count, 1)
@mock.patch('ansit.drivers.CommandTester.status',
new_callable=mock.PropertyMock)
@mock.patch('ansit.drivers.CommandTester.test')
def test_passed_tests(self, test_run, test_status):
def mock_test_run(*args, **kwargs):
yield ''
test_run.side_effect = mock_test_run
test_status.return_value = True
summary = self.env.test(['localhost'])
self.assertEqual(test_run.call_count, 2)
self.assertEqual(test_status.call_count, 2)
self.assertEqual(
summary,
{
'localhost': [
('Test 1', True),
('Test 2', True)
]
})
@mock.patch('ansit.drivers.CommandTester.status',
new_callable=mock.PropertyMock)
@mock.patch('ansit.drivers.CommandTester.test')
def test_failed_tests(self, test_run, test_status):
def mock_test_run(*args, **kwargs):
yield ''
test_run.side_effect = mock_test_run
test_status.return_value = False
summary = self.env.test(['localhost'])
self.assertEqual(test_run.call_count, 2)
self.assertEqual(test_status.call_count, 2)
self.assertEqual(
summary,
{
'localhost': [
('Test 1', False),
('Test 2', False)
]
})
@mock.patch('subprocess.run')
def test_loging(self, subprocess_run):
self.env.login('localhost')
self.assertEqual(subprocess_run.call_count, 1)
self.assertIn(
'id_rsa',
subprocess_run.call_args[0][0])
self.assertEqual(
sys.stdin.fileno(),
subprocess_run.call_args[1]['stdin'].fileno())
self.assertEqual(
sys.stdout.fileno(),
subprocess_run.call_args[1]['stdout'].fileno())
self.assertEqual(
sys.stderr.fileno(),
subprocess_run.call_args[1]['stderr'].fileno())
@mock.patch('ansit.drivers.CommandProvisioner.provision')
def test_provision(self, provision):
self.env.provision()
self.assertEqual(provision.call_count, 1)
|
14,012 | fd2c33778fa6d84a686c4c63c62e668f8d771447 | n,m = list(map(int,input().split()))
edges = [[False for _ in range(n+1)] for _ in range(n+1)]
visited = [[False for _ in range(n+1)] for _ in range(n+1)]
flag = False
# 并查集 检查连通性
v = [i for i in range(n+1)]
def find(x):
if x==v[x]:return x
else:
v[x] = find(v[x])
return v[x]
def union(x,y):
x = find(x)
y = find(y)
if x==y:return False
else:
v[x] = y
return True
for _ in range(m):
u,j = list(map(int,input().split()))
edges[u][j]=edges[j][u]=True
union(u,j)
for i in range(2,n):
root = find(1)
if find(i)!= root:
flag=True
count=0
for i in range(1,n+1):
e = [j for j in edges[i] if j]
if len(e)%2==1:
count+=1
if not (count==0 or count==2 and len([j for j in edges[1] if j])%2==1):
flag=True
ans = []
end = False
path = []
def dfs(i):
#### flag = False
for j in range(1,n+1):
if edges[i][j] and not visited[i][j]:
## flag=True
visited[i][j]=visited[j][i]=True
dfs(j)
path.append(i)
if flag:
print(-1)
else:
dfs(1)
print(' '.join(map(str,path[::-1])))
|
14,013 | cd77183719ee72b38882e6db5c7d3c1f48008990 | def add_to_deck(card, original_deck, n_deck):
if card in original_deck:
n_deck.append(card)
return n_deck
else:
print('Card not found.')
def remove_from_deck(card, n_deck):
if card in n_deck:
n_deck.remove(card)
return n_deck
else:
print('Card not found.')
def insert_in_deck(card, idx, n_deck):
length_list = len(n_deck)
if length_list >= idx >= 0 and card in deck:
n_deck.insert(idx, card)
else:
print("Error!")
def swap_it(card_one, card_two, new_deck):
idx_1_card = None
idx_2_card = None
for i, j in enumerate(new_deck):
if j == card_one:
idx_1_card = i
if j == card_two:
idx_2_card = i
new_deck[idx_1_card], new_deck[idx_2_card] = new_deck[idx_2_card], new_deck[idx_1_card]
return new_deck
deck = input().split(':')
new_deck = []
while True:
command = input().split()
action = command[0]
if action == 'Ready':
print(' '.join(new_deck))
break
elif action == 'Shuffle':
card = command[1]
new_deck = new_deck[::-1]
elif action == 'Add':
card = command[1]
add_to_deck(card, deck, new_deck)
elif action == 'Remove':
card = command[1]
remove_from_deck(card, new_deck)
elif action == 'Insert':
card = command[1]
idx = int(command[2])
insert_in_deck(card, idx, new_deck)
elif action == 'Swap':
card = command[1]
card_2 = command[2]
swap_it(card, card_2, new_deck)
|
14,014 | 19c91817c1adbda41a16b835958867ef4ae2e84b | #!/usr/bin/env python3
import os
import unittest
import socket
import time
import pdb
import json
import requests
from sure import expect
import subprocess
HOST = os.getenv("HOST", "localhost")
HTTP_PORT = os.getenv("HTTP_PORT", 8888)
HTTPS_PORT = os.getenv("HTTPS_PORT", 443)
INSECURE = not os.getenv("INSECURE", True)
USERNAME = os.getenv("USERNAME", "user1")
PASSWORD = os.getenv("PASSWORD", "user1")
HTTP_TARGET_URL = 'http://%s:%s' % (HOST, HTTP_PORT)
HTTPS_TARGET_URL = 'https://%s:%s' % (HOST, HTTPS_PORT)
script_dir = os.path.dirname(__file__)
@then(u'I make GET request to http://localhost:8888 then I should get redirected and receive 200')
def step_impl(context):
print("\nthen I make GET request to http://localhost:8888 then I should get redirected and receive 200...")
full_url_path = '%s' % (HTTPS_TARGET_URL)
print("to the following url... " + full_url_path)
headers = {'Content-type': 'text/plain'}
response = requests.get(full_url_path,
auth=(USERNAME, PASSWORD),
headers=headers,
verify=INSECURE)
print("response.content was... " + str(response.content))
response.status_code.should.equal(200)
expect(str(response.content)).to.contain("Hello from Nginx server.")
|
14,015 | baf83491a3ed03ab0ff4aebe0ebc92c0060e50ce | import ROOT as root
import numpy as np
class BuildRoots():
def __init__(self, tree, ext):
self._tree = tree
self._root_file = ext
self._max_index = []
def build_max_energy(self):
self._get_max_energy()
def _get_max_energy(self):
f = root.TFile("~/workspace/hgtd/rootfiles/max_energy_"+self._root_file, "update")
t = root.TTree("max_energy_tree", "max energy tree")
max_energy = np.zeros(1, dtype=float)
t.Branch('max_energy', max_energy, 'max_energy/D')
for i in range(self._tree.GetEntries()):
#load the entry
self._tree.GetEntry(i)
#get the index of the max & min energy in each event
self._max_index.append(list(self._tree.towerE).index(max(self._tree.towerE)))
max_energy[0] = self._tree.towerE[self._max_index[i]]
t.Fill()
f.Write()
f.Close()
return t
#this algorithm assumes that the tower with max energy in each event will be a part of the maximum combined energy
#theoretically there's the case where two towers with smaller energy than the max will combine to be more than the
#tower of max energy & a tower next to it
def build_combined2_energy(self):
'''builds the combined tower energy sum
nTowers is number of towers to be combined'''
self._get_combined2_energy()
def _get_combined2_energy(self):
f = root.TFile("~/workspace/hgtd/rootfiles/combined2_energy_"+self._root_file, "update")
t = root.TTree("combined2_energy_tree", "combined2 energy tree")
combined_energy = np.zeros(1, dtype=float)
t.Branch('combined2_energy', combined_energy, 'combined2_energy/D')
#loop & load the entries
for i in range(self._tree.GetEntries()):
self._tree.GetEntry(i)
west_index = self._get_west(self._max_index[i])
east_index = self._get_east(self._max_index[i])
north_index = self._get_north(self._max_index[i], self._tree.towerE.size())
south_index = self._get_south(self._max_index[i], self._tree.towerE.size())
west_energy = self._tree.towerE[west_index] if west_index is not None else 0
east_energy = self._tree.towerE[east_index] if east_index is not None else 0
north_energy = self._tree.towerE[north_index]
south_energy = self._tree.towerE[south_index]
max_adjacent = max(west_energy, east_energy, north_energy, south_energy)
#===================================================================
# west_of_max_index = self._max_index[i] - 1 if self._max_index[i] % 64 != 0 else list(self._tree.towerE).index(min(self._tree.towerE)) #max_index is at west edge
# east_of_max_index = self._max_index[i] + 1 if self._max_index[i] % 64 != 63 else list(self._tree.towerE).index(min(self._tree.towerE)) #max_index is at east edge
# north_of_max_index = self._max_index[i] - 64 if self._max_index[i] - 64 >= 0 else (self._tree.towerE.size() - 64) + self._max_index[i] #wraps around bottom to top (north) BUG HERE
# south_of_max_index = self._max_index[i] + 64 if self._max_index[i] + 64 < self._tree.towerE.size() else self._max_index[i] % 64 #wraps around top to bottom (south)
#===================================================================
#===================================================================
# print("towerE size: " + str(self._tree.towerE.size()))
# print("max index: " + str(self._max_index[i]))
# print("west index: " + str(west_of_max_index))
# print("east index: " + str(east_of_max_index))
# print("north index: " + str(north_of_max_index))
# print("south index: " + str(south_of_max_index))
# print("--------------------")
#===================================================================
#gets the index of the next_maximum energy
#===================================================================
# next_max_index = list(self._tree.towerE).index(max(self._tree.towerE[west_of_max_index], self._tree.towerE[east_of_max_index], self._tree.towerE[south_of_max_index], self._tree.towerE[north_of_max_index]))
#===================================================================
#===================================================================
# #this will be nice to have for information & fact checking
# west_next = True if next_max_index == west_of_max_index else False
# east_next = True if next_max_index == east_of_max_index else False
# south_next = True if next_max_index == south_of_max_index else False
# north_next = True if next_max_index == north_of_max_index else False
#===================================================================
combined_energy[0] = self._tree.towerE[self._max_index[i]] + max_adjacent
t.Fill()
f.Write()
f.Close()
return t
def build_combined4_energy(self):
self._get_combined4_energy()
def _get_combined4_energy(self):
f = root.TFile("~/workspace/hgtd/rootfiles/combined4_energy_"+self._root_file, "update")
t = root.TTree("combined4_energy_tree", "combined4 energy tree")
combined_energy = np.zeros(1, dtype=float)
t.Branch('combine4_energy', combined_energy, 'combined4_energy/D')
#loop & load the entries
for i in range(self._tree.GetEntries()):
self._tree.GetEntry(i)
#===================================================================
# #this is 3x3, but we just need the maximum 2x2 part
# square = self._get_index_dictionary(self._max_index[i], self._tree.towerE.size())
#
# nw_max = sum([self._tree.towerE[self._max_index[i]], self._tree.towerE[square["n"]], self._tree.towerE[square["w"]], self._tree.towerE[square["nw"]]])
# ne_max = sum([self._tree.towerE[self._max_index[i]], self._tree.towerE[square["n"]], self._tree.towerE[square["e"]], self._tree.towerE[square["ne"]]])
# sw_max = sum([self._tree.towerE[self._max_index[i]], self._tree.towerE[square["s"]], self._tree.towerE[square["w"]], self._tree.towerE[square["sw"]]])
# se_max = sum([self._tree.towerE[self._max_index[i]], self._tree.towerE[square["s"]], self._tree.towerE[square["e"]], self._tree.towerE[square["se"]]])
#===================================================================
mat = {}
mat["w"] = self._get_west(self._max_index[i])
mat["e"] = self._get_east(self._max_index[i])
mat["n"] = self._get_north(self._max_index[i], self._tree.towerE.size())
mat["s"] = self._get_south(self._max_index[i], self._tree.towerE.size()
)
if mat["w"] is None:
mat["ne"] = self._get_north(mat["e"], self._tree.towerE.size())
mat["se"] = self._get_south(mat["e"], self._tree.towerE.size())
ne_max = sum([self._tree.towerE[self._max_index[i]], self._tree.towerE[mat["n"]], self._tree.towerE[mat["e"]], self._tree.towerE[mat["ne"]]])
se_max = sum([self._tree.towerE[self._max_index[i]], self._tree.towerE[mat["s"]], self._tree.towerE[mat["e"]], self._tree.towerE[mat["se"]]])
combined_energy[0] = max(ne_max, se_max)
elif mat["e"] is None:
mat["nw"] = self._get_north(mat["w"], self._tree.towerE.size())
mat["sw"] = self._get_south(mat["w"], self._tree.towerE.size())
nw_max = sum([self._tree.towerE[self._max_index[i]], self._tree.towerE[mat["n"]], self._tree.towerE[mat["w"]], self._tree.towerE[mat["nw"]]])
sw_max = sum([self._tree.towerE[self._max_index[i]], self._tree.towerE[mat["s"]], self._tree.towerE[mat["w"]], self._tree.towerE[mat["sw"]]])
combined_energy[0] = max(nw_max, sw_max)
else:
mat["nw"] = self._get_north(mat["w"], self._tree.towerE.size())
mat["ne"] = self._get_north(mat["e"], self._tree.towerE.size())
mat["sw"] = self._get_south(mat["w"], self._tree.towerE.size())
mat["se"] = self._get_south(mat["e"], self._tree.towerE.size())
nw_max = sum([self._tree.towerE[self._max_index[i]], self._tree.towerE[mat["n"]], self._tree.towerE[mat["w"]], self._tree.towerE[mat["nw"]]])
ne_max = sum([self._tree.towerE[self._max_index[i]], self._tree.towerE[mat["n"]], self._tree.towerE[mat["e"]], self._tree.towerE[mat["ne"]]])
sw_max = sum([self._tree.towerE[self._max_index[i]], self._tree.towerE[mat["s"]], self._tree.towerE[mat["w"]], self._tree.towerE[mat["sw"]]])
se_max = sum([self._tree.towerE[self._max_index[i]], self._tree.towerE[mat["s"]], self._tree.towerE[mat["e"]], self._tree.towerE[mat["se"]]])
combined_energy[0] = max(nw_max, ne_max, sw_max, se_max)
t.Fill()
f.Write()
f.Close()
return t
def build_combined9_energy(self):
self._get_combined9_energy()
def _get_combined9_energy(self):
f = root.TFile("~/workspace/hgtd/rootfiles/combined9_energy_"+self._root_file, "update")
t = root.TTree("combined9_energy_tree", "combined9 energy tree")
combined_energy = np.zeros(1, dtype=float)
t.Branch('combined9_energy', combined_energy, 'combined9_energy/D')
#loop & load the entries
for i in range(self._tree.GetEntries()):
self._tree.GetEntry(i)
#this is 3x3, but we just need the maximum 2x2 part
square = self._get_index_dictionary(self._max_index[i], self._tree.towerE.size())
top_sum = sum([self._tree.towerE[square["n"]], self._tree.towerE[square["ne"]], self._tree.towerE[square["nw"]]])
middle_sum = sum([self._tree.towerE[square["m"]], self._tree.towerE[square["w"]], self._tree.towerE[square["e"]]])
bottom_sum = sum([self._tree.towerE[square["s"]], self._tree.towerE[square["sw"]], self._tree.towerE[square["se"]]])
combined_energy[0] = sum([top_sum, middle_sum, bottom_sum])
t.Fill()
f.Write()
f.Close()
return t
#=======================================================================
# self._tree.GetEntry(43)
# max_i = list(self._tree.towerE).index(max(self._tree.towerE))
# self._print_index_dictionary(max_i, self._get_index_dictionary(max_i, self._tree.towerE.size()))
#=======================================================================
def _print_index_dictionary(self, max_index, d):
print('''
{0} {1} {2} {3} {4} {5}
{6} {7} {8} ------value--------> {9} {10} {11}
{12} {13} {14} {15} {16} {17}
'''.format(
d["nw"], d["n"], d["ne"], self._tree.towerE[d["nw"]], self._tree.towerE[d["n"]], self._tree.towerE[d["ne"]],
d["w"], d["m"], d["e"], self._tree.towerE[d["w"]], self._tree.towerE[d["m"]], self._tree.towerE[d["e"]],
d["sw"], d["s"], d["se"], self._tree.towerE[d["sw"]], self._tree.towerE[d["s"]], self._tree.towerE[d["se"]]
))
#this will be 3x3
def _get_index_dictionary(self, max_index, array_size):
#mat = [3][3] --- i wish this syntax were valid but it's not
mat = {}
mat["m"] = max_index
mat["w"] = self._get_west(max_index)
mat["e"] = self._get_east(max_index)
if mat["w"] is None:
return self._get_index_dictionary(mat["e"], array_size, )
elif mat["e"] is None:
return self._get_index_dictionary(mat["w"], array_size, )
mat["n"] = self._get_north(max_index, array_size)
mat["s"] = self._get_south(max_index, array_size)
mat["nw"] = self._get_north(mat["w"], array_size)
mat["ne"] = self._get_north(mat["e"], array_size)
mat["sw"] = self._get_south(mat["w"], array_size)
mat["se"] = self._get_south(mat["e"], array_size)
return mat
def _get_west(self, index):
return index - 1 if index % 64 != 0 else None
def _get_east(self, index):
return index + 1 if index % 64 != 63 else None
def _get_north(self, index, array_size):
return index - 64 if index - 64 >= 0 else (array_size - 64) + index
def _get_south(self, index, array_size):
return index + 64 if index + 64 < array_size else index % 64
# Things this algorithm assumes
# - |
14,016 | 89e70463eae52ccf78c1c613d4a2ef9d05fa79ee | #!/usr/bin/env python
###########################################################################
# Replacement for save_data.py takes a collection of hdf5 files, and #
# builds desired histograms for rapid plotting #
###########################################################################
import numpy as np
import healpy as hp
import argparse, tables
from icecube import astro
import dataFunctions as df
from showerllh.analysis.skypos import getDecRA
from showerllh.analysis.llhtools import inPoly, getEbins
from showerllh.analysis.zfix import zfix
def hdf5extractor(config, file):
##=======================================================================
## Starting parameters
rDict = {'proton':'p','helium':'h','oxygen':'o','iron':'f'}
t1 = astro.Time()
print 'Building arrays from %s...' % file
t = tables.openFile(file)
q = {}
# Get reconstructed compositions from list of children in file
children = []
for node in t.walk_nodes('/'):
try: children += [node.name]
except tables.NoSuchNodeError:
continue
children = list(set(children))
compList = [n.split('_')[-1] for n in children if 'ShowerLLH_' in n]
# Get ShowerLLH cuts and info
rrc = t.root.ShowerLLH_proton.col('exists').astype('bool')
for value in ['zenith', 'azimuth']:
q[value] = t.root.ShowerLLH_proton.col(value)
for comp in compList:
r = rDict[comp]
for value in ['x','y','energy']:
q[r+'ML_'+value] = t.getNode('/ShowerLLH_'+comp).col(value)
q[r+'LLH'] = t.getNode('/ShowerLLHParams_'+comp).col('maxLLH')
# Timing
mjd_day = t.root.I3EventHeader.col('time_start_mjd_day')
mjd_sec = t.root.I3EventHeader.col('time_start_mjd_sec')
mjd_ns = t.root.I3EventHeader.col('time_start_mjd_ns')
q['mjd'] = np.zeros(len(mjd_day), dtype=np.float64)
for i in range(len(mjd_day)):
day = int(mjd_day[i])
sec = int(mjd_sec[i])
ns = int(mjd_ns[i])
t1.SetTime(day, sec, ns)
q['mjd'][i] = t1.GetMJD()
# Event ID
run = t.root.I3EventHeader.col('Run')
event = t.root.I3EventHeader.col('Event')
subevent = t.root.I3EventHeader.col('SubEvent')
eventIDs = []
for i in range(len(run)):
eventIDs += ['%s_%s_%s' % (run[i], event[i], subevent[i])]
q['eventIDs'] = np.asarray(eventIDs)
# Condition and prescale passed (filter[condition, prescale])
# For notes on weights see bottom of file
filtermask = df.filter_mask(config)
filternames = df.filter_names(config)
f = {}
for fname in filternames:
f[fname] = t.getNode('/'+filtermask).col(fname)
f[fname] = f[fname][:,0].astype(float)
filterArray = np.array([f[fname] * df.it_weights(fname)
for fname in f.keys()])
filterArray[filterArray == 0] = 100.
q['weights'] = np.amin(filterArray, axis=0)
# Other reconstruction info
q['NStations'] = t.root.NStations.col('value')
t.close()
# Laputop values
#for key in ['x','y','zenith','azimuth','s125','e_proton','e_iron','beta']:
# arrays += ['lap_'+key]
# Get Laputop info
#for value in ['x', 'y', 'zenith', 'azimuth']:
# q['lap_'+value] = t.root.Laputop.col(value)
#for value in ['s125', 'e_proton', 'e_iron', 'beta']:
# q['lap_'+value] = t.root.LaputopParams.col(value)
# Get most likely composition
rList = [rDict[comp] for comp in compList]
full_llhs = np.array([q[r+'LLH'] for r in rList])
max_llh = np.amax(full_llhs, axis=0)
q['llh_comp'] = np.array(['' for i in range(len(q['pLLH']))])
for r in rList:
q['llh_comp'][q[r+'LLH'] == max_llh] = r
for key in ['x', 'y', 'energy']:
q['ML_'+key] = np.array([q[r+'ML_'+key][i]
for i, r in enumerate(q['llh_comp'])])
# Check for multiple most-likely compositions (mark as bad)
badVals = np.sum(full_llhs == max_llh, axis=0)
badVals = (badVals-1).astype('bool')
q['llh_comp'][badVals] = ''
for key in ['x','y','energy']:
q['ML_'+key][badVals] = np.nan
# Calculate sky positions
q['dec'], q['ra'] = getDecRA(q, verbose=False)
# Containment cut
it_geo = df.it_geo(config)
q['cuts'] = {}
q['cuts']['llh'] = inPoly(q['ML_x'], q['ML_y'], 0, config=it_geo)
return q
def histWriter(config, file, outfile):
# Bin values
eList = ['p','h','o','f']
decbins = ['0-12','12-24','24-40']
rabins = ['0-60','60-120','120-180','180-240','240-300','300-360']
# Build general list of key names to write
keyList = []
keyList += ['energy','energy_w','energy_z','energy_w_z']
keyList += ['zenith','zenith_w','core','core_w']
keyList += ['%s_err' % k for k in keyList]
# Split by composition
keyList = ['%s_%s' % (k, e) for k in keyList for e in eList]
# Split spatially into 3 dec bins (~12 degrees each) and 6 ra bins
keyList = ['%s_%s_%s' % (k, dec, ra) for k in keyList \
for dec in decbins for ra in rabins]
# Extract information from hdf5 file
q = hdf5extractor(config, file)
c0 = q['cuts']['llh']
r = np.log10(q['ML_energy'])[c0]
cosz = np.cos(q['zenith'])[c0]
dist = np.sqrt(q['ML_x']**2 + q['ML_y']**2)[c0]
fit = zfix(q['zenith'], bintype='logdist')[c0]
w = q['weights'][c0]
# Make cuts
degree = np.pi / 180.
for e in eList:
q[e] = (q['llh_comp'] == e)[c0]
for dec in decbins:
decmin = (180 - float(dec.split('-')[1])) * degree
decmax = (180 - float(dec.split('-')[0])) * degree
q[dec] = ((q['dec'] >= decmin) * (q['dec'] < decmax))[c0]
for ra in rabins:
ramin = float(ra.split('-')[0]) * degree
ramax = float(ra.split('-')[1]) * degree
q[ra] = ((q['ra'] >= ramin) * (q['ra'] < ramax))[c0]
# Method of intelligently producing histograms based on key names
def smartHist(key, x, bins):
tempx = x
wts = None
params = key.split('_')
e, dec, ra = params[-3:]
c1 = q[e] * q[dec] * q[ra]
if 'z' in params:
tempx = x - fit
if 'w' in params:
wts = w[c1]
if 'err' in params:
wts = (w[c1])**2
h0 = np.histogram(tempx[c1], bins=bins, weights=wts)[0]
return h0
# Energy distribution
h = {}
print 'Calculating energy distributions...'
bins = getEbins(reco=True)
energyKeys = [k for k in keyList if 'energy' in k]
for key in energyKeys:
h[key] = smartHist(key, r, bins)
# Zenith distribution
print 'Calculating zenith distributions...'
bins = np.linspace(0.8, 1, 81)
zenithKeys = [k for k in keyList if 'zenith' in k]
for key in zenithKeys:
h[key] = smartHist(key, cosz, bins)
# Core distribution
print 'Calculating core position distributions...'
bins = np.linspace(0, 700, 141)
coreKeys = [k for k in keyList if 'core' in k]
for key in coreKeys:
h[key] = smartHist(key, dist, bins)
print 'Saving...'
np.save(outfile, h)
def skyWriter(config, file, outfile):
nside = 64
npix = hp.nside2npix(nside)
# Binning for various parameters
sbins = np.arange(npix+1, dtype=int)
ebins = np.arange(5, 9.501, 0.05)
dbins = np.linspace(0, 700, 141)
lbins = np.linspace(-20, 20, 151)
# Get desired information from hdf5 file
d = hdf5extractor(config, file)
c0 = d['cuts']['llh']
r = np.log10(d['ML_energy'])[c0]
fit = zfix(d['zenith'], bintype='logdist')[c0]
w = d['weights'][c0]
xy = np.sqrt(d['ML_x']**2 + d['ML_y']**2)[c0]
dllh = (d['fLLH'] - d['pLLH'])[c0]
# Bin in sky
#zen = np.pi - d['zenith'][c0]
#azi = d['azimuth'][c0]
dec = d['dec'][c0]
ra = d['ra'][c0]
x = hp.ang2pix(nside, dec, ra)
# Energy cut
ecut = (r >= 6.2)
p = {'weights':w}
q = {}
q['energy_w'] = np.histogram2d(x, r-fit, bins=(sbins,ebins), **p)[0]
q['dist_w'] = np.histogram2d(x, xy, bins=(sbins,dbins), **p)[0]
q['llh_w'] = np.histogram2d(x, dllh, bins=(sbins,lbins), **p)[0]
p['weights'] = w**2
q['energy_err_w'] = np.histogram2d(x, r-fit, bins=(sbins,ebins), **p)[0]
q['dist_err_w'] = np.histogram2d(x, xy, bins=(sbins,dbins), **p)[0]
q['llh_err_w'] = np.histogram2d(x, dllh, bins=(sbins,lbins), **p)[0]
# Energy cut versions
q['llhcut_w'] = np.histogram2d(x[ecut], dllh[ecut], bins=(sbins,lbins),
weights=w[ecut])[0]
q['llhcut_err_w'] = np.histogram2d(x[ecut], dllh[ecut], bins=(sbins,lbins),
weights=(w[ecut])**2)[0]
q['energy'] = np.histogram2d(x, r-fit, bins=(sbins,ebins))[0]
q['dist'] = np.histogram2d(x, xy, bins=(sbins,dbins))[0]
q['llh'] = np.histogram2d(x, dllh, bins=(sbins,lbins))[0]
q['energy_err'] = np.histogram2d(x, r-fit, bins=(sbins,ebins))[0]
q['dist_err'] = np.histogram2d(x, xy, bins=(sbins,dbins))[0]
q['llh_err'] = np.histogram2d(x, dllh, bins=(sbins,lbins))[0]
# Energy cut versions
q['llhcut'] = np.histogram2d(x[ecut], dllh[ecut], bins=(sbins,lbins))[0]
q['llhcut_err'] = np.histogram2d(x[ecut], dllh[ecut], bins=(sbins,lbins))[0]
np.save(outfile, q)
if __name__ == "__main__":
p = argparse.ArgumentParser(description='Converts hdf5 file to npy dict')
p.add_argument('-c', '--config', dest='config',
help='Detector configuration [IT73 --> IT81-IV]')
p.add_argument('-f', '--files', dest='files', nargs='*',
help='Input files')
p.add_argument('-o', '--outfiles', dest='outfiles', nargs='*',
help='Output files')
p.add_argument('--sky', dest='sky',
default=False, action='store_true',
help='Write the sky histograms')
args = p.parse_args()
for infile, outfile in zip(args.files, args.outfiles):
if args.sky:
skyWriter(args.config, infile, outfile)
else:
histWriter(args.config, infile, outfile)
###############################################################################
## Notes on weights
"""
- Events that pass STA8 condition have a prescale and weight of 1.
- Events that pass STA3ii condition have a 1/2 chance to pass the STA3ii
prescale. Those that fail have a 1/3 chance to pass the STA3 prescale. So, the
total chance of an event passing is 1/2+(1/3*1/2) = 2/3. Weight = 1.5
- Events that pass STA3 condition but not STA3ii condition have a prescale and
weight of 3
"""
|
14,017 | f7dc1ca0b52d7df87ac40f5cff6083c0bdde8a31 | """
TSG Data Sender
Author: Morgan Allison
Updated: 03/18
This script allows you to send IQ data to
the TSG and utulize its internal baseband generator
to use it as a simple IQ modulator.
This example creates a dual sideband modulation.
Windows 10 64-bit, TekVISA 4.2.15
Python 3.6.4 64-bit
PyVISA 1.8, NumPy 1.13.1
"""
import visa
import numpy as np
rm = visa.ResourceManager()
tsg = rm.open_resource('TCPIP0::192.168.1.12::INSTR')
tsg.write('*RST')
instID = tsg.query('*idn?')
print('Connected to {}'.format(instID))
# Create waveform data
# Simple sine wave for I, zero vector for Q.
# NOTE: max sample rate is 6 MHz
sampleRate = 6e6
recordLength = 600
freq = 10e3
# Create Waveform
t = np.linspace(0, recordLength / sampleRate, recordLength)
# Scale the amplitude for int16 values
i = np.array(32767 * np.sin(2 * np.pi * freq * t), dtype=np.int16)
q = np.zeros(recordLength, dtype=np.int16)
# Create interleaved IQ waveform
iq = np.empty((i.size + q.size), dtype=i.dtype)
iq[0::2] = i
iq[1::2] = q
# Send IQ data to TSG into SRAM (waveform location 0)
# Send data as big endian and ensure your data type is 'h' for int16
tsg.write_binary_values('wrtw 2, {}, '.format(len(iq) * 16), iq, datatype='h', is_big_endian=True)
# Configure amplitude and frequency
tsg.write('ampr 0')
tsg.write('freq 1 GHz')
# Select phase modulation
tsg.write('type 2')
# Vector modulation subtype
tsg.write('styp 1')
# User waveform source
tsg.write('qfnc 11')
# Load waveform sent previously
tsg.write('wavf 0')
# Configure sample rate
tsg.write('symr {}'.format(sampleRate))
# Turn on modulation and RF output
tsg.write('modl 1')
tsg.write('enbr 1')
tsg.close()
|
14,018 | 4ec2f39cc4fd2f07c574c6e9587c2104ab902055 | #!/usr/bin/python
# -*- coding: UTF-8 -*-
from __future__ import unicode_literals
import sys
from wsgiref.handlers import CGIHandler
from __init__ import create_app
from libs.links import get_all_groups
app = create_app()
CGIHandler().run(app)
if __name__ == '__main__':
app.run(host='0.0.0.0', port=5000,)
|
14,019 | 23abb4fe21d4e83edb4a89573f3fe0d0b70767a5 | #version1:
import math
def factorial(x):
return math.factorial(x)
print factorial(10)
print factorial(3)
print factorial(1)
print factorial(0)
#version2:
def factorial(x):
if x == 0: return 1
else: return x * factorial(x - 1)
print factorial(10)
print factorial(3)
print factorial(1)
print factorial(0)
#version3:
def factorial(x):
num = 1
for n in range(1, x + 1):
num = num * n
return num
print factorial(10)
print factorial(3)
print factorial(1)
print factorial(0)
#version4:
def factorial(x):
num = 1
for n in range(x):
num *= n + 1
return num
print factorial(10)
print factorial(3)
print factorial(1)
print factorial(0)
#version5:
def factorial(x):
count = x
total = 1
while count != 0:
total = total * count
count -= 1
return total
print factorial(10)
print factorial(3)
print factorial(1)
print factorial(0)
|
14,020 | 5295a28ff756281a2b954c87bacb4b8153bf3d03 | '''
@author: xiongfei
@license: (C) Copyright 2013-2017, Node Supply Chain Manager Corporation Limited.
@contact: 386344277@qq.com
@file: SVD++.py
@time: 2018/5/21 下午6:29
@desc: shanghaijiaotong university
'''
from __future__ import division, print_function
import itertools
import os
from scipy.sparse import csr_matrix
import numpy as np
from estimator import Estimator
from tqdm import tqdm
class SVDpp(Estimator):
def __init__(self, n_factors=20, n_epochs=20, lr=0.007, reg=0.002):
"""
:param n_factors: latent vector dim
:param n_epochs: train epochs
:param lr: learning rate
:param reg: regularization rate
"""
super(SVDpp, self).__init__()
self.n_factors = n_factors
self.n_epochs = n_epochs
self.lr = lr
self.reg = reg
def train(self, train_dataset, all):
user_num = train_dataset.matrix.shape[0]
item_num = train_dataset.matrix.shape[1]
self.global_mean = train_dataset.global_mean
self.train_dataset = train_dataset
self.bu = np.zeros(user_num, np.double)
self.bi = np.zeros(item_num, np.double)
self.p = np.random.randn(user_num, self.n_factors) * 0.05
self.q = np.random.randn(item_num, self.n_factors) * 0.05
self.y = np.random.randn(item_num, self.n_factors) * 0.05
for current_epoch in range(self.n_epochs):
print("current_epoch: {}".format(current_epoch))
cur = 0
for u, i, r in train_dataset.all_ratings() :
u_item = train_dataset.get_user(u)[0]
N_u_item = len(u_item)
sqrt_N = np.sqrt(N_u_item)
u_impl_pref = np.sum(self.y[u_item], axis=0) / sqrt_N
rp = self.global_mean + self.bu[u] + self.bi[i] + np.dot(self.q[i], self.p[u] + u_impl_pref)
e_ui = r - rp
self.bu[u] += self.lr * (e_ui - self.reg * self.bu[u])
self.bi[i] += self.lr * (e_ui - self.reg * self.bi[i])
self.p[u] += self.lr * (e_ui * self.q[i] - self.reg * self.p[u])
self.q[i] += self.lr * (e_ui * (self.p[u] + u_impl_pref) - self.reg * self.q[i])
for j in u_item:
self.y[j] += self.lr * (e_ui * self.q[j] / N_u_item - self.reg * self.y[j])
cur += 1
self.progress(cur, all)
def predict(self, u, i):
u_item = self.train_dataset.get_user(u)[0]
N_u_item = len(u_item)
sqrt_N = np.sqrt(N_u_item)
u_impl_pref = np.sum(self.y[u_item], axis=0) / sqrt_N
est = self.global_mean + self.bu[u] + self.bi[i] + np.dot(self.q[i], self.p[u] + u_impl_pref)
return est
|
14,021 | 456c1c8f9864883011a0dd4385946aa9bcda97b7 | from flask import Flask, render_template,json,jsonify,request,current_app as app
from datetime import date
import requests
import os
app = Flask(__name__)
@app.route('/')
def index():
name = 'Abdul Q'
friends =['joe','bob','robert','bobby']
return render_template('index.html',greeting = name,friends =friends)
@app.route('/about')
def about():
return '<h1>about</h1><p>some other content</p>'
@app.route('/nasa')
def show_nasa_pic():#go to nasa endpoint run code below
today = str(date.today())
response = requests.get('https://api.nasa.gov/planetary/apod?api_key=wjlnR0Xw9B5Sh3WEIJa9kmVd368hNMiUVIGahGPi&date='+today)
data = response.json()
return render_template('nasa.html',data=data)
"""@app.route('/album', methods=['GET'])
def album_json():
album_info = os.path.join(app.static_folder, 'data','album.json')
with open(album_info, 'r') as json_data:
json_info = json.load(json_data)
return jsonify(json_info)"""
@app.route('/movies', methods=['GET'])
def movies_json():
movies_info = os.path.join(app.static_folder,'data','movies.json')
with open(movies_info, 'r') as json_data:
json_info = json.load(json_data)
return jsonify(json_info)
@app.route('/movies/search_title',methods=['GET'])#parameters
def movies_search_title():
movies_info = os.path.join(app.static_folder,'data','movies.json')
with open(movies_info, 'r') as json_data:
json_info = json.load(json_data)
results = []
if 'title' in request.args:
#stores the results of the title the user put into the url
title=request.args['title']
# goes through the moves.json files and searchs for the movie
for movie in json_info:# if it equals the movies title then it appends the information off the title
if title in movie['title']:
results.append(movie)
if len(results) < 1:
return "no results found"
return render_template("movies.html",results=results)
if __name__ == '__main__':
app.run(debug=True, host='192.168.1.213')
|
14,022 | 99b9a48da4ac67f98009acaa276a4581484d821d | from flask import Flask, jsonify, request
import json
from db_connector import DB_TEST
from werkzeug.serving import WSGIRequestHandler
import numpy as np
import time
import tensorflow as tf
import os
CUR_DIR = os.path.dirname(os.path.abspath(__file__))
# initialize inference model
MODEL_FILENAME = 'inference_model.pb'
sess = tf.Session()
print("loading graph")
output_graph = os.path.join(CUR_DIR, MODEL_FILENAME)
graph_def = tf.GraphDef()
with tf.gfile.FastGFile(output_graph,'rb') as fp_pb:
graph_def.ParseFromString(fp_pb.read())
sess.graph.as_default()
tf.import_graph_def(graph_def, name='')
tf_input = sess.graph.get_tensor_by_name("input:0")
tf_output = sess.graph.get_tensor_by_name("output:0")
app = Flask(__name__)
db = DB_TEST('127.0.0.1', 'db_sensor_data', 'root', 'root')
print(db)
len_seq = 10
def inference_status():
# status가 NULL인 (아직 상태가 추론되지 않은) 레코드를 모아서 추론 진행
db_ = DB_TEST('127.0.0.1', 'db_sensor_data', 'root', 'root')
db_.execute('select * from tbl_sensor5 where status is null')
result_noinfer = db_.fetchall()
for i in range(len(result_noinfer)):
datum_id_ = result_noinfer[i][0]
sensor_id_ = result_noinfer[i][1]
# status가 NULL인 레코드의 sensor_id와 id를 가져와서
# sensor_id가 동일하고 id가 작은 레코드를 len_seq만큼 가녀와서 input batch를 만든다
query_ = 'select * from tbl_sensor5 where sensor_id=\"' + sensor_id_ + '\" and id <= ' + str(datum_id_) + ' order by id desc limit ' + str(len_seq)
db_.execute(query_)
records = db_.fetchall()
print([datum_id_, sensor_id_, len(records)])
if len(records) == len_seq:
data = []
for idx_record in reversed(range(len_seq)):
data.append(records[idx_record][2:7])
np_input = np.array(data).reshape(1,len_seq,5)
# tensorflow inference model을 사용하여 정상확률[0,1]을 추론한다.
# np_output = np.random.rand()
np_output = sess.run(tf_output, feed_dict = {tf_input: np_input})
# 선택된 status가 NULL인 record의 status를 update해준다.
query__ = 'update tbl_sensor5 set status=' + str(int(3*(1-float(np_output)))) + ' where id=' + str(datum_id_)
db_.execute(query__)
db_.commit()
db_.close()
@app.route("/", methods=['GET','POST'])
def basic():
ip_address = request.remote_addr
param1 = request.args.get('power', "-1")
param2 = request.args.get('audio', "-1")
param3 = request.args.get('gyro', "-1")
print(param1)
# print(param1.split(','))
print(param2)
# print(param2.split(','))
print(param3)
# print(param3.split(','))
# return_str = 'power: '+param1+', gyro: '+param2+', audio: '+param3
# print(return_str)
power = param1.split(',')
audio = param2.split(',')
gyro = param3.split(',')
if len(power) == len(audio) and 3*len(power) == len(gyro):
for i in range(len(power)):
power_ = float(power[i])
audio_ = float(audio[i])
gyro_x = float(gyro[i*3])
gyro_y = float(gyro[i*3+1])
gyro_z = float(gyro[i*3+2])
_query = 'insert into tbl_sensor5 (sensor_id, power, audio, gyro_x, gyro_y, gyro_z) values (%s, %s, %s, %s, %s, %s)'
db.execute(_query, (ip_address, power_, audio_, gyro_x, gyro_y, gyro_z))
db.commit()
tic = time.time()
inference_status()
print(time.time()-tic)
# select data to put in network
# _query = 'select * from tbl_sensor5 where status is null'
# db.execute(_query)
# db.
# db.close()
return "OK"
if __name__ == "__main__":
WSGIRequestHandler.protocol_version = "HTTP/1.1"
app.run( host='0.0.0.0',threaded=True)
# datetime_ = result_noinfer[i][7]
# str_timestamp = '\"{}-{}-{} {}:{}:{:.2f}\"'.format(datetime_.year, datetime_.month, datetime_.day, datetime_.hour, datetime_.minute, datetime_.second + datetime_.microsecond/1000000)
# query_ = 'select * from tbl_sensor5 where sensor_id=\"' + sensor_id_ + '\" and create_time < ' + str_timestamp + ' order by id desc limit ' + str(len_seq)
|
14,023 | af8b1b5059130d5be69763afe9ccfb73d7cb0f8a | from .Worker import Worker
from .Site import Site
from .BaseObject import BaseObject
from mkdir_p import mkdir_p
import os
crab_file_template = """import os
from WMCore.Configuration import Configuration
from CRABClient.UserUtilities import config, getUsernameFromSiteDB
config = Configuration()
taskName = '{taskName}'
config.section_('General')
config.General.requestName = taskName
config.section_('JobType')
config.JobType.pluginName = '{JobType_plugName}'
config.JobType.psetName = {JobType_psetName}
config.JobType.scriptExe = '{JobType_scriptExe}'
config.JobType.inputFiles = {JobType_inputFiles}
config.JobType.outputFiles = {JobType_outputFiles}
config.JobType.maxMemoryMB = {JobType_maxMemoryMB}
config.section_('Data')
config.Data.outputPrimaryDataset = '{Data_outputPrimaryDataset}'
config.Data.splitting = 'EventBased'
config.Data.unitsPerJob = {Data_unitsPerJob}
config.Data.totalUnits = {Data_totalUnits}
config.Data.publication = {Data_publication}
config.Data.outputDatasetTag = '{Data_outputDatasetTag}'
config.Data.outLFNDirBase = {Data_outLFNDirBase}
config.section_('User')
config.section_('Site')
config.Site.storageSite = '{Site_storageSite}'
"""
class CrabConfig(BaseObject):
def __init__(self,name,**kwargs):
super(CrabConfig,self).__init__(name,**kwargs)
class CrabWorker(Worker):
def __init__(self):
super(CrabWorker,self).__init__()
def make_crab_file(self,crabConfig):
outputPath = crabConfig.crab_file_path
mkdir_p(os.path.dirname(outputPath))
crab_file_content = crab_file_template.format(
taskName = crabConfig.taskName,
JobType_plugName = crabConfig.JobType_plugName,
JobType_psetName = crabConfig.JobType_psetName,
JobType_scriptExe = crabConfig.JobType_scriptExe,
JobType_inputFiles = crabConfig.JobType_inputFiles,
JobType_outputFiles = crabConfig.JobType_outputFiles,
JobType_maxMemoryMB = crabConfig.JobType_maxMemoryMB,
Data_outputPrimaryDataset = crabConfig.Data_outputPrimaryDataset,
Data_unitsPerJob = crabConfig.Data_unitsPerJob,
Data_totalUnits = crabConfig.Data_totalUnits,
Data_publication = crabConfig.Data_publication,
Data_outputDatasetTag = crabConfig.Data_outputDatasetTag,
Data_outLFNDirBase = crabConfig.Data_outLFNDirBase,
Site_storageSite = crabConfig.Site_storageSite,
)
outputFile = open(outputPath,"w")
outputFile.write(crab_file_content)
outputFile.close()
def make_exec_file(self,config):
outputPath = config.exec_file_path
cmd_str = config.cmd_str
mkdir_p(os.path.dirname(outputPath))
outputFile = open(outputPath,"w")
outputFile.write(cmd_str)
outputFile.close()
def submit(self,crab_config_path):
os.system("crab submit -c "+crab_config_path)
|
14,024 | 47b0db3c22d6f0cc27ae097cfeb37b16a97e30b4 | #
# PySNMP MIB module Juniper-IP-TUNNEL-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/Juniper-IP-TUNNEL-MIB
# Produced by pysmi-0.3.4 at Wed May 1 14:03:07 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
OctetString, ObjectIdentifier, Integer = mibBuilder.importSymbols("ASN1", "OctetString", "ObjectIdentifier", "Integer")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ValueRangeConstraint, SingleValueConstraint, ValueSizeConstraint, ConstraintsUnion, ConstraintsIntersection = mibBuilder.importSymbols("ASN1-REFINEMENT", "ValueRangeConstraint", "SingleValueConstraint", "ValueSizeConstraint", "ConstraintsUnion", "ConstraintsIntersection")
InterfaceIndex, = mibBuilder.importSymbols("IF-MIB", "InterfaceIndex")
juniMibs, = mibBuilder.importSymbols("Juniper-MIBs", "juniMibs")
JuniName, JuniNextIfIndex = mibBuilder.importSymbols("Juniper-TC", "JuniName", "JuniNextIfIndex")
NotificationGroup, ObjectGroup, ModuleCompliance = mibBuilder.importSymbols("SNMPv2-CONF", "NotificationGroup", "ObjectGroup", "ModuleCompliance")
Unsigned32, ModuleIdentity, Counter32, iso, ObjectIdentity, Integer32, Gauge32, MibIdentifier, IpAddress, Bits, NotificationType, TimeTicks, MibScalar, MibTable, MibTableRow, MibTableColumn, Counter64 = mibBuilder.importSymbols("SNMPv2-SMI", "Unsigned32", "ModuleIdentity", "Counter32", "iso", "ObjectIdentity", "Integer32", "Gauge32", "MibIdentifier", "IpAddress", "Bits", "NotificationType", "TimeTicks", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "Counter64")
DisplayString, TruthValue, TextualConvention, RowStatus = mibBuilder.importSymbols("SNMPv2-TC", "DisplayString", "TruthValue", "TextualConvention", "RowStatus")
juniIpTunnelMIB = ModuleIdentity((1, 3, 6, 1, 4, 1, 4874, 2, 2, 51))
juniIpTunnelMIB.setRevisions(('2003-09-29 17:29', '2002-09-16 21:44', '2002-01-14 18:16', '2001-07-23 20:57',))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
if mibBuilder.loadTexts: juniIpTunnelMIB.setRevisionsDescriptions(('Product rebranding (JUNOSe).', 'Replaced Unisphere names with Juniper names.', 'Added juniIpTunnelSequenceNumbers.', 'Initial version of this MIB module.',))
if mibBuilder.loadTexts: juniIpTunnelMIB.setLastUpdated('200309291729Z')
if mibBuilder.loadTexts: juniIpTunnelMIB.setOrganization('Juniper Networks, Inc.')
if mibBuilder.loadTexts: juniIpTunnelMIB.setContactInfo(' Juniper Networks, Inc. Postal: 10 Technology Park Drive Westford, MA 01886-3146 USA Tel: +1 978 589 5800 Email: mib@Juniper.net')
if mibBuilder.loadTexts: juniIpTunnelMIB.setDescription('The IP Tunnel MIB for the Juniper Networks enterprise.')
juniIpTunnelInterfaceObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 4874, 2, 2, 51, 1))
juniIpTunnelNextIfIndex = MibScalar((1, 3, 6, 1, 4, 1, 4874, 2, 2, 51, 1, 1), JuniNextIfIndex()).setMaxAccess("readonly")
if mibBuilder.loadTexts: juniIpTunnelNextIfIndex.setStatus('current')
if mibBuilder.loadTexts: juniIpTunnelNextIfIndex.setDescription('Coordinate ifIndex value allocation for entries in juniIpTunnelIfTable. A GET of this object returns the next available ifIndex value to be used to create an entry in the associated interface table; or zero, if no valid ifIndex value is available. This object also returns a value of zero when it is the lexicographic successor of a varbind presented in an SNMP GETNEXT or GETBULK request, for which circumstance it is assumed that ifIndex allocation is unintended. Successive GETs will typically return different values, thus avoiding collisions among cooperating management clients seeking to create table entries simultaneously.')
juniIpTunnelInterfaceTable = MibTable((1, 3, 6, 1, 4, 1, 4874, 2, 2, 51, 1, 2), )
if mibBuilder.loadTexts: juniIpTunnelInterfaceTable.setStatus('current')
if mibBuilder.loadTexts: juniIpTunnelInterfaceTable.setDescription('This table contains entries of IP Tunnel interfaces.')
juniIpTunnelInterfaceEntry = MibTableRow((1, 3, 6, 1, 4, 1, 4874, 2, 2, 51, 1, 2, 1), ).setIndexNames((0, "Juniper-IP-TUNNEL-MIB", "juniIpTunnelIfIndex"))
if mibBuilder.loadTexts: juniIpTunnelInterfaceEntry.setStatus('current')
if mibBuilder.loadTexts: juniIpTunnelInterfaceEntry.setDescription('Each entry describes the characteristics of a single IP Tunnel interface. Creating/deleting entries in this table causes corresponding entries for be created/deleted in ifTable/ifXTable/juniIfTable.')
juniIpTunnelIfIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 4874, 2, 2, 51, 1, 2, 1, 1), InterfaceIndex())
if mibBuilder.loadTexts: juniIpTunnelIfIndex.setStatus('current')
if mibBuilder.loadTexts: juniIpTunnelIfIndex.setDescription('The ifIndex of the IP tunnel interface. When creating entries in this table, suitable values for this object are determined by reading juniIpTunnelNextIfIndex.')
juniIpTunnelName = MibTableColumn((1, 3, 6, 1, 4, 1, 4874, 2, 2, 51, 1, 2, 1, 2), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 80))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: juniIpTunnelName.setStatus('current')
if mibBuilder.loadTexts: juniIpTunnelName.setDescription('The administratively assigned name for this IP Tunnel interface.')
juniIpTunnelMode = MibTableColumn((1, 3, 6, 1, 4, 1, 4874, 2, 2, 51, 1, 2, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("ipTunnelModeGre", 0), ("ipTunnelModeDvmrp", 1)))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: juniIpTunnelMode.setStatus('current')
if mibBuilder.loadTexts: juniIpTunnelMode.setDescription('The configured mode for this IP Tunnel interface.')
juniIpTunnelVirtualRouter = MibTableColumn((1, 3, 6, 1, 4, 1, 4874, 2, 2, 51, 1, 2, 1, 4), JuniName().clone('default')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: juniIpTunnelVirtualRouter.setStatus('current')
if mibBuilder.loadTexts: juniIpTunnelVirtualRouter.setDescription('The transport virtual router associated with this IP tunnel interface. This object need not be set when creating row entries. Note that the default when this object is not specified is the router associated with the agent acting on the management request.')
juniIpTunnelChecksum = MibTableColumn((1, 3, 6, 1, 4, 1, 4874, 2, 2, 51, 1, 2, 1, 5), TruthValue().clone('false')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: juniIpTunnelChecksum.setStatus('current')
if mibBuilder.loadTexts: juniIpTunnelChecksum.setDescription('The Tunnel checksum configuration for this entry. Setting this object to true(1) will enabled end-to-end checksumming and will cause the system to drop packets with bad checksums received on this interface. Setting this object to false(2) will disable this feature. Note: This configuration object is not supported for DVMRP tunnels.')
juniIpTunnelMtu = MibTableColumn((1, 3, 6, 1, 4, 1, 4874, 2, 2, 51, 1, 2, 1, 6), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1024, 10240)).clone(10240)).setMaxAccess("readcreate")
if mibBuilder.loadTexts: juniIpTunnelMtu.setStatus('current')
if mibBuilder.loadTexts: juniIpTunnelMtu.setDescription('The tunnel MTU.')
juniIpTunnelDestination = MibTableColumn((1, 3, 6, 1, 4, 1, 4874, 2, 2, 51, 1, 2, 1, 7), IpAddress()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: juniIpTunnelDestination.setStatus('current')
if mibBuilder.loadTexts: juniIpTunnelDestination.setDescription('The tunnel destination IP address.')
juniIpTunnelSource = MibTableColumn((1, 3, 6, 1, 4, 1, 4874, 2, 2, 51, 1, 2, 1, 8), IpAddress()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: juniIpTunnelSource.setStatus('current')
if mibBuilder.loadTexts: juniIpTunnelSource.setDescription('The tunnel source IP address.')
juniIpTunnelRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 4874, 2, 2, 51, 1, 2, 1, 9), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: juniIpTunnelRowStatus.setStatus('current')
if mibBuilder.loadTexts: juniIpTunnelRowStatus.setDescription('Controls creation/deletion of entries in this table according to the RowStatus textual convention, constrained to support the following values only: createAndGo destroy To create an entry in this table, the following entry objects MUST be explicitly configured: juniIpTunnelIfRowStatus juniIpTunnelName juniIpTunnelMode In addition, when creating an entry the following condition must hold: A value for juniIpTunnelIfIndex must have been determined previously, typically by reading juniIpTunnelNextIfIndex. Once created, the following objects may not be modified: juniIpTunnelName juniIpTunnelMode juniIpTunnelVirtualRouter A corresponding entry in ifTable/ifXTable/juniIfTable is created/ destroyed as a result of creating/destroying an entry in this table.')
juniIpTunnelSequenceNumbers = MibTableColumn((1, 3, 6, 1, 4, 1, 4874, 2, 2, 51, 1, 2, 1, 10), TruthValue().clone('false')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: juniIpTunnelSequenceNumbers.setStatus('current')
if mibBuilder.loadTexts: juniIpTunnelSequenceNumbers.setDescription('The tunnel sequence number configuration for this entry. Setting this object to true(1) will enable sequence number generation. Setting this object to false(2) will disable this feature. Note: This configuration object is not supported for DVMRP tunnels.')
juniIpTunnelConformance = MibIdentifier((1, 3, 6, 1, 4, 1, 4874, 2, 2, 51, 2))
juniIpTunnelCompliances = MibIdentifier((1, 3, 6, 1, 4, 1, 4874, 2, 2, 51, 2, 1))
juniIpTunnelGroups = MibIdentifier((1, 3, 6, 1, 4, 1, 4874, 2, 2, 51, 2, 2))
juniIpTunnnelCompliance = ModuleCompliance((1, 3, 6, 1, 4, 1, 4874, 2, 2, 51, 2, 1, 1)).setObjects(("Juniper-IP-TUNNEL-MIB", "juniIpTunnelInterfaceGroup"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
juniIpTunnnelCompliance = juniIpTunnnelCompliance.setStatus('obsolete')
if mibBuilder.loadTexts: juniIpTunnnelCompliance.setDescription('Obsolete compliance statement for entities that implement the Juniper IP Tunnel MIB. This compliance statement became obsolete when juniIpTunnelSequenceNumbers was added.')
juniIpTunnnelCompliance2 = ModuleCompliance((1, 3, 6, 1, 4, 1, 4874, 2, 2, 51, 2, 1, 2)).setObjects(("Juniper-IP-TUNNEL-MIB", "juniIpTunnelInterfaceGroup2"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
juniIpTunnnelCompliance2 = juniIpTunnnelCompliance2.setStatus('current')
if mibBuilder.loadTexts: juniIpTunnnelCompliance2.setDescription('The compliance statement for entities that implement the Juniper IP Tunnel MIB.')
juniIpTunnelInterfaceGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 4874, 2, 2, 51, 2, 2, 1)).setObjects(("Juniper-IP-TUNNEL-MIB", "juniIpTunnelNextIfIndex"), ("Juniper-IP-TUNNEL-MIB", "juniIpTunnelName"), ("Juniper-IP-TUNNEL-MIB", "juniIpTunnelMode"), ("Juniper-IP-TUNNEL-MIB", "juniIpTunnelVirtualRouter"), ("Juniper-IP-TUNNEL-MIB", "juniIpTunnelChecksum"), ("Juniper-IP-TUNNEL-MIB", "juniIpTunnelMtu"), ("Juniper-IP-TUNNEL-MIB", "juniIpTunnelSource"), ("Juniper-IP-TUNNEL-MIB", "juniIpTunnelDestination"), ("Juniper-IP-TUNNEL-MIB", "juniIpTunnelRowStatus"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
juniIpTunnelInterfaceGroup = juniIpTunnelInterfaceGroup.setStatus('obsolete')
if mibBuilder.loadTexts: juniIpTunnelInterfaceGroup.setDescription('Obsolete collection of objects for managing IP Tunnel capabilities in a Juniper product. This group became obsolete when juniIpTunnelSequenceNumbers was added.')
juniIpTunnelInterfaceGroup2 = ObjectGroup((1, 3, 6, 1, 4, 1, 4874, 2, 2, 51, 2, 2, 2)).setObjects(("Juniper-IP-TUNNEL-MIB", "juniIpTunnelNextIfIndex"), ("Juniper-IP-TUNNEL-MIB", "juniIpTunnelName"), ("Juniper-IP-TUNNEL-MIB", "juniIpTunnelMode"), ("Juniper-IP-TUNNEL-MIB", "juniIpTunnelVirtualRouter"), ("Juniper-IP-TUNNEL-MIB", "juniIpTunnelChecksum"), ("Juniper-IP-TUNNEL-MIB", "juniIpTunnelMtu"), ("Juniper-IP-TUNNEL-MIB", "juniIpTunnelSource"), ("Juniper-IP-TUNNEL-MIB", "juniIpTunnelDestination"), ("Juniper-IP-TUNNEL-MIB", "juniIpTunnelRowStatus"), ("Juniper-IP-TUNNEL-MIB", "juniIpTunnelSequenceNumbers"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
juniIpTunnelInterfaceGroup2 = juniIpTunnelInterfaceGroup2.setStatus('current')
if mibBuilder.loadTexts: juniIpTunnelInterfaceGroup2.setDescription('A collection of objects for managing IP Tunnel capabilities in a Juniper product.')
mibBuilder.exportSymbols("Juniper-IP-TUNNEL-MIB", juniIpTunnelNextIfIndex=juniIpTunnelNextIfIndex, juniIpTunnelCompliances=juniIpTunnelCompliances, juniIpTunnelVirtualRouter=juniIpTunnelVirtualRouter, juniIpTunnelMtu=juniIpTunnelMtu, juniIpTunnelDestination=juniIpTunnelDestination, juniIpTunnelInterfaceEntry=juniIpTunnelInterfaceEntry, juniIpTunnelInterfaceGroup=juniIpTunnelInterfaceGroup, PYSNMP_MODULE_ID=juniIpTunnelMIB, juniIpTunnelInterfaceGroup2=juniIpTunnelInterfaceGroup2, juniIpTunnnelCompliance=juniIpTunnnelCompliance, juniIpTunnelMode=juniIpTunnelMode, juniIpTunnelIfIndex=juniIpTunnelIfIndex, juniIpTunnelInterfaceObjects=juniIpTunnelInterfaceObjects, juniIpTunnelConformance=juniIpTunnelConformance, juniIpTunnelSequenceNumbers=juniIpTunnelSequenceNumbers, juniIpTunnelInterfaceTable=juniIpTunnelInterfaceTable, juniIpTunnelMIB=juniIpTunnelMIB, juniIpTunnnelCompliance2=juniIpTunnnelCompliance2, juniIpTunnelChecksum=juniIpTunnelChecksum, juniIpTunnelGroups=juniIpTunnelGroups, juniIpTunnelName=juniIpTunnelName, juniIpTunnelRowStatus=juniIpTunnelRowStatus, juniIpTunnelSource=juniIpTunnelSource)
|
14,025 | eb5d2bcce5bd64c5f8a498837aa13f292d3b88ac | #!/usr/bin/env python
r"""
Python CPlusPlus Reflection
===========================
This package provides wrapper methods for clang's python
The available modules are:
sbind
A simplified wrapper of clang.cindex.py that deals more with strings
than objects.
"""
__all__ = ['sbind'] |
14,026 | 71b1ed2563cca32438385a72577c3bc3c4a15814 | import niftynet
import sys
def start_training(base_dir, gpu_num, cuda_devices, model_dir, config_file):
model_dir = base_dir + 'models\\' + model_dir
config_file_path = '.\\configFiles\\' + config_file
sys.argv = ['', 'train',
'-a', 'net_segment',
'--conf', config_file_path,
'--model_dir', model_dir,
'--num_gpus', str(gpu_num),
'--cuda_devices', str(cuda_devices)]
niftynet.main()
def start_training_lr_drop(base_dir, gpu_num, cuda_devices, model_dir, config_file, starting_lr, drop, steps, starting_iter=0, split_file=None):
model_dir = base_dir + 'models\\' + model_dir
config_file_path = '.\\configFiles\\' + config_file
start_iter = starting_iter
for i in range(0,len(steps)):
sys.argv = ['', 'train',
'-a', 'net_segment',
'--conf', config_file_path,
'--model_dir', model_dir,
'--num_gpus', str(gpu_num),
'--cuda_devices', str(cuda_devices),
'--lr', str(starting_lr),
'--max_iter', str(steps[i])
]
if (start_iter != 0):
sys.argv = sys.argv + ['--starting_iter', str(start_iter)]
if (split_file):
sys.argv = sys.argv + ['--dataset_split_file', split_file]
niftynet.main()
starting_lr = starting_lr * drop
start_iter = steps[i]
return 0 |
14,027 | 31e5b32d5ab245f2ea15c05cf1eb1d0f1c552a6f | # flake8: noqa
from .builders import OpenApiBuilder
import pyopenapi3.objects
import pyopenapi3.data_types
from pyopenapi3.utils import create_schema
__all__ = (
"OpenApiBuilder",
"objects",
"data_types",
"create_schema"
)
__version__ = "0.1.dev0"
|
14,028 | 706f454f49e7b93ae7a02309c88bb91c1550fec8 | #!/bin/python
# -*- coding: utf-8 -*-
from settings import *
from os.path import join
from datetime import datetime
from ctrls.Reader import Reader
import matplotlib.pyplot as plt
class SimpleDrawer():
'''畫出近 n 天收盤價圖'''
def _getFigTitle(self, stock_number):
t = datetime.now()
return ('%s, Update: %s/%s/%s %s:%s:%s' % (stock_number,
str(t.year), str(t.month),str(t.day),
str(t.hour), str(t.minute), str(t.second))
)
def draw(self, number, length = SIMPLE_FIG_LENGTH):
reader = Reader(number)
series = []
while True:
row = reader.getInput()
if row == None: break
series.append(float(row[6]))
x_axis = range(len(series[-SIMPLE_FIG_LENGTH:]))
plt.plot(x_axis, series[-SIMPLE_FIG_LENGTH:], 'b--', ls='-')
plt.title(self._getFigTitle(number))
# set figure
fig = plt.gcf()
fig.set_size_inches(FIGURE_WIDTH, FIGURE_HEIGHT)
# output figure
fig.savefig(join(SIMPLE_FIG_PATH, number+'.png'), dpi=FIGURE_DPI)
plt.clf()
plt.close('all') |
14,029 | 8d1c25b0c72a032d5faa9619d60e376e8e278681 | import streamlit as st
from datetime import datetime, time
import database as db
import pandas as pd
import patient
import doctor
# function to verify medical test id
def verify_medical_test_id(medical_test_id):
verify = False
conn, c = db.connection()
with conn:
c.execute(
"""
SELECT id
FROM medical_test_record;
"""
)
for id in c.fetchall():
if id[0] == medical_test_id:
verify = True
break
conn.close()
return verify
# function to show the details of medical test(s) given in a list (provided as a parameter)
def show_medical_test_details(list_of_medical_tests):
medical_test_titles = ['Medical Test ID', 'Test name', 'Patient ID',
'Patient name', 'Doctor ID', 'Doctor name',
'Medical Lab Scientist ID',
'Test date and time [DD-MM-YYYY (hh:mm)]',
'Result date and time [DD-MM-YYYY (hh:mm)]',
'Result and diagnosis', 'Description',
'Comments', 'Cost (INR)']
if len(list_of_medical_tests) == 0:
st.warning('No data to show')
elif len(list_of_medical_tests) == 1:
medical_test_details = [x for x in list_of_medical_tests[0]]
series = pd.Series(data = medical_test_details, index = medical_test_titles)
st.write(series)
else:
medical_test_details = []
for medical_test in list_of_medical_tests:
medical_test_details.append([x for x in medical_test])
df = pd.DataFrame(data = medical_test_details, columns = medical_test_titles)
st.write(df)
# function to generate unique medical test id using current date and time
def generate_medical_test_id():
id_1 = datetime.now().strftime('%S%M%H')
id_2 = datetime.now().strftime('%Y%m%d')[2:]
id = f'T-{id_1}-{id_2}'
return id
# function to fetch patient name from the database for the given patient id
def get_patient_name(patient_id):
conn, c = db.connection()
with conn:
c.execute(
"""
SELECT name
FROM patient_record
WHERE id = :id;
""",
{ 'id': patient_id }
)
return c.fetchone()[0]
# function to fetch doctor name from the database for the given doctor id
def get_doctor_name(doctor_id):
conn, c = db.connection()
with conn:
c.execute(
"""
SELECT name
FROM doctor_record
WHERE id = :id;
""",
{ 'id': doctor_id }
)
return c.fetchone()[0]
# class containing all the fields and methods required to work with the medical tests' table in the database
class Medical_Test:
def __init__(self):
self.id = str()
self.test_name = str()
self.patient_id = str()
self.patient_name = str()
self.doctor_id = str()
self.doctor_name = str()
self.medical_lab_scientist_id = str()
self.test_date_time = str()
self.result_date_time = str()
self.cost = int()
self.result_and_diagnosis = str()
self.description = str()
self.comments = str()
# method to add a new medical test record to the database
def add_medical_test(self):
st.write('Enter medical test details:')
self.test_name = st.text_input('Test name')
patient_id = st.text_input('Patient ID')
if patient_id == '':
st.empty()
elif not patient.verify_patient_id(patient_id):
st.error('Invalid Patient ID')
else:
st.success('Verified')
self.patient_id = patient_id
self.patient_name = get_patient_name(patient_id)
doctor_id = st.text_input('Doctor ID')
if doctor_id == '':
st.empty()
elif not doctor.verify_doctor_id(doctor_id):
st.error('Invalid Doctor ID')
else:
st.success('Verified')
self.doctor_id = doctor_id
self.doctor_name = get_doctor_name(doctor_id)
self.medical_lab_scientist_id = st.text_input('Medical lab scientist ID')
test_date = st.date_input('Test date (YYYY/MM/DD)').strftime('%d-%m-%Y')
st.info('If the required date is not in the calendar, please type it in the box above.')
test_time = st.time_input('Test time (hh:mm)', time(0, 0)).strftime('%H:%M')
st.info('If the required time is not in the drop down list, please type it in the box above.')
self.test_date_time = f'{test_date} ({test_time})'
result_date = st.date_input('Result date (YYYY/MM/DD)').strftime('%d-%m-%Y')
st.info('If the required date is not in the calendar, please type it in the box above.')
result_time = st.time_input('Result time (hh:mm)', time(0, 0)).strftime('%H:%M')
st.info('If the required time is not in the drop down list, please type it in the box above.')
self.result_date_time = f'{result_date} ({result_time})'
self.cost = st.number_input('Cost (INR)', value = 0, min_value = 0, max_value = 10000)
result_and_diagnosis = st.text_area('Result and diagnosis')
self.result_and_diagnosis = (lambda res_diag : 'Test result awaited' if res_diag == '' else res_diag)(result_and_diagnosis)
description = st.text_area('Description')
self.description = (lambda desc : None if desc == '' else desc)(description)
comments = st.text_area('Comments (if any)')
self.comments = (lambda comments : None if comments == '' else comments)(comments)
self.id = generate_medical_test_id()
save = st.button('Save')
# executing SQLite statements to save the new medical test record to the database
if save:
conn, c = db.connection()
with conn:
c.execute(
"""
INSERT INTO medical_test_record
(
id, test_name, patient_id, patient_name, doctor_id,
doctor_name, medical_lab_scientist_id, test_date_time,
result_date_time, cost, result_and_diagnosis, description,
comments
)
VALUES (
:id, :name, :p_id, :p_name, :dr_id, :dr_name, :mls_id,
:test_date_time, :result_date_time, :cost,
:result_diagnosis, :desc, :comments
);
""",
{
'id': self.id, 'name': self.test_name,
'p_id': self.patient_id, 'p_name': self.patient_name,
'dr_id': self.doctor_id, 'dr_name': self.doctor_name,
'mls_id': self.medical_lab_scientist_id,
'test_date_time': self.test_date_time,
'result_date_time': self.result_date_time, 'cost': self.cost,
'result_diagnosis': self.result_and_diagnosis,
'desc': self.description, 'comments': self.comments
}
)
st.success('Medical test details saved successfully.')
st.write('The Medical Test ID is: ', self.id)
conn.close()
# method to update an existing medical test record in the database
def update_medical_test(self):
id = st.text_input('Enter Medical Test ID of the medical test to be updated')
if id == '':
st.empty()
elif not verify_medical_test_id(id):
st.error('Invalid Medical Test ID')
else:
st.success('Verified')
conn, c = db.connection()
# shows the current details of the medical test before updating
with conn:
c.execute(
"""
SELECT *
FROM medical_test_record
WHERE id = :id;
""",
{ 'id': id }
)
st.write('Here are the current details of the medical:')
show_medical_test_details(c.fetchall())
st.write('Enter new details of the medical test:')
result_and_diagnosis = st.text_area('Result and diagnosis')
self.result_and_diagnosis = (lambda res_diag : 'Test result awaited' if res_diag == '' else res_diag)(result_and_diagnosis)
description = st.text_area('Description')
self.description = (lambda desc : None if desc == '' else desc)(description)
comments = st.text_area('Comments (if any)')
self.comments = (lambda comments : None if comments == '' else comments)(comments)
update = st.button('Update')
# executing SQLite statements to update this medical test's record in the database
if update:
with conn:
c.execute(
"""
UPDATE medical_test_record
SET result_and_diagnosis = :result_diagnosis,
description = :description, comments = :comments
WHERE id = :id;
""",
{
'id': id, 'result_diagnosis': self.result_and_diagnosis,
'description': self.description, 'comments': self.comments
}
)
st.success('Medical test details updated successfully.')
conn.close()
# method to delete an existing medical test record from the database
def delete_medical_test(self):
id = st.text_input('Enter Medical Test ID of the medical test to be deleted')
if id == '':
st.empty()
elif not verify_medical_test_id(id):
st.error('Invalid Medical Test ID')
else:
st.success('Verified')
conn, c = db.connection()
# shows the current details of the medical test before deletion
with conn:
c.execute(
"""
SELECT *
FROM medical_test_record
WHERE id = :id;
""",
{ 'id': id }
)
st.write('Here are the details of the medical test to be deleted:')
show_medical_test_details(c.fetchall())
confirm = st.checkbox('Check this box to confirm deletion')
if confirm:
delete = st.button('Delete')
# executing SQLite statements to delete this medical test's record from the database
if delete:
c.execute(
"""
DELETE FROM medical_test_record
WHERE id = :id;
""",
{ 'id': id }
)
st.success('Medical test details deleted successfully.')
conn.close()
# method to show all the medical tests of a particular patient (using patient id)
def medical_tests_by_patient(self):
patient_id = st.text_input('Enter Patient ID to get the medical test record of that patient')
if patient_id == '':
st.empty()
elif not patient.verify_patient_id(patient_id):
st.error('Invalid Patient ID')
else:
st.success('Verified')
conn, c = db.connection()
with conn:
c.execute(
"""
SELECT *
FROM medical_test_record
WHERE patient_id = :p_id;
""",
{ 'p_id': patient_id }
)
st.write('Here is the medical test record of', get_patient_name(patient_id), ':')
show_medical_test_details(c.fetchall())
conn.close()
|
14,030 | 9c709187de5d06a51d76f1ecff69b693a8c1a293 | # plot 5-10um data from Hill paper
import numpy as np
import matplotlib
from matplotlib import pyplot as plt
from math import *
import astropy.io.ascii as ascii
data=ascii.read
data = ascii.read('table2.txt')
wavelength = data['col1']
total = data['col2']
most_lum = data['col3']
medium_lum = data['col4']
least_lum = data['col5']
fig1=plt.figure()
ax1=fig1.gca()
ax1.axis([5,10,0,1.7])
ax1.set_title('Quasar composites vs luminosity: data from Hill et 2014', size=16)
ax1.set_xlabel('Rest $\lambda$, microns', size=16)
ax1.set_ylabel('Relative flux', size=16)
ax1.tick_params(labelsize=16)
#ax1.plot(wavelength,total)
ax1.plot(wavelength,least_lum*1.3,lw=2)
ax1.plot(wavelength,most_lum/1.7,lw=2)
ax1.plot(wavelength,medium_lum,lw=2)
ax1.text(6.2,1.2,'PAH',size=16,ha='center')
ax1.text(7.7,1.6,'PAH',size=16,ha='center')
ax1.text(8.6,1.1,'PAH',size=16,ha='center')
ax1.text(6.97,0.95,'[ArIII]',size=16,ha='center')
ax1.text(8.97,0.95,'[ArIII]',size=16,ha='center')
plt.show()
|
14,031 | a1641e9f87e0c777d56d131d7928f7ee30f96caf | #!/usr/bin/env python3
import doctest
import functools as fn
import itertools as it
import typing
FACTOR_A = 16807
FACTOR_B = 48271
MULT_A = 4
MULT_B = 8
TEST_A = 65
TEST_B = 8921
PAIRS1 = 40000000
PAIRS2 = 5000000
def no_test(_: int) -> bool:
return True
def is_multiple_of(m: int) -> typing.Callable[[int], bool]:
return lambda n: n % m == 0
def generator(factor: int, test: typing.Callable[[int], bool],
start: int) -> typing.Iterator[int]:
"""Day 15 generator.
>>> list(it.islice(generator(FACTOR_A, no_test, TEST_A), 5))
[1092455, 1181022009, 245556042, 1744312007, 1352636452]
>>> list(it.islice(generator(FACTOR_B, no_test, TEST_B), 5))
[430625591, 1233683848, 1431495498, 137874439, 285222916]
>>> list(it.islice(generator(FACTOR_A, is_multiple_of(MULT_A), TEST_A), 5))
[1352636452, 1992081072, 530830436, 1980017072, 740335192]
>>> list(it.islice(generator(FACTOR_B, is_multiple_of(MULT_B), TEST_B), 5))
[1233683848, 862516352, 1159784568, 1616057672, 412269392]
"""
value = start
while True:
value = (value * factor) % 2147483647
if test(value):
yield value
gen_a = fn.partial(generator, FACTOR_A)
gen_b = fn.partial(generator, FACTOR_B)
gen_a1 = fn.partial(gen_a, no_test)
gen_b1 = fn.partial(gen_b, no_test)
gen_a2 = fn.partial(gen_a, is_multiple_of(MULT_A))
gen_b2 = fn.partial(gen_b, is_multiple_of(MULT_B))
def lower16(n: int) -> int:
return n & 0xFFFF
def judge(genA: typing.Iterator[int], genB: typing.Iterator[int], steps: int) -> int:
"""Day 15 duel judge.
>>> judge(gen_a1(TEST_A), gen_b1(TEST_B), 5)
1
>>> judge(gen_a1(TEST_A), gen_b1(TEST_B), PAIRS1)
588
>>> judge(gen_a2(TEST_A), gen_b2(TEST_B), 1056)
1
>>> judge(gen_a2(TEST_A), gen_b2(TEST_B), PAIRS2)
309
"""
res = 0
for na, nb in it.islice(zip(genA, genB), steps):
la, lb = lower16(na), lower16(nb)
if la == lb:
res += 1
return res
if __name__ == "__main__":
err, tot = doctest.testmod()
if err == 0:
print("{} tests OK :]".format(tot))
START_A = 116
START_B = 299
print("Judge count (1) after %d pairs: %d" %
(PAIRS1, judge(gen_a1(START_A), gen_b1(START_B), PAIRS1)))
print("Judge count (2) after %d pairs: %d" %
(PAIRS2, judge(gen_a2(START_A), gen_b2(START_B), PAIRS2)))
|
14,032 | 31b036da78ad6aeae8c18fd12561f85bcabde033 | from .SearchEngine import SearchEngine, Navigator
from bs4 import BeautifulSoup
class Bing(SearchEngine):
def __init__(self, navigator=None):
"""
:type navigator: Navigator
"""
super().__init__(base_url='https://bing.com', navigator=navigator)
def reset(self):
super().reset()
def get_search_url(self, query):
return f'{self.url}/search?q={query}'
def parse_search_results(self, html):
"""
:type html: BeautifulSoup
:rtype: list[dict[str,str]]
"""
b_content = html.findAll(attrs={'id': 'b_content'})[0]
b_algo = b_content.findAll(attrs={'class': 'b_algo'})
results = [{'url': result.findAll('a')[0]['href'], 'text': result.get_text()} for result in b_algo]
return results
|
14,033 | 9c585f7dbbeb681a8b7cec8555cf6b214cc84324 | # encoding: utf-8
# module PySide.QtMultimedia
# from /corp.blizzard.net/BFD/Deploy/Packages/Published/ThirdParty/Qt4.8.4/2015-05-15.163857/prebuilt/linux_x64_gcc41_python2.7_ucs4/PySide/QtMultimedia.so
# by generator 1.138
# no doc
# imports
import PySide.QtCore as __PySide_QtCore
# no functions
# classes
class _QObject(__PySide_QtCore._Object):
# no doc
def blockSignals(self, *args, **kwargs): # real signature unknown
pass
def childEvent(self, *args, **kwargs): # real signature unknown
pass
def children(self, *args, **kwargs): # real signature unknown
pass
def connect(self, *args, **kwargs): # real signature unknown
pass
def connectNotify(self, *args, **kwargs): # real signature unknown
pass
def customEvent(self, *args, **kwargs): # real signature unknown
pass
def deleteLater(self, *args, **kwargs): # real signature unknown
pass
def disconnect(self, *args, **kwargs): # real signature unknown
pass
def disconnectNotify(self, *args, **kwargs): # real signature unknown
pass
def dumpObjectInfo(self, *args, **kwargs): # real signature unknown
pass
def dumpObjectTree(self, *args, **kwargs): # real signature unknown
pass
def dynamicPropertyNames(self, *args, **kwargs): # real signature unknown
pass
def emit(self, *args, **kwargs): # real signature unknown
pass
def event(self, *args, **kwargs): # real signature unknown
pass
def eventFilter(self, *args, **kwargs): # real signature unknown
pass
def findChild(self, *args, **kwargs): # real signature unknown
pass
def findChildren(self, *args, **kwargs): # real signature unknown
pass
def inherits(self, *args, **kwargs): # real signature unknown
pass
def installEventFilter(self, *args, **kwargs): # real signature unknown
pass
def isWidgetType(self, *args, **kwargs): # real signature unknown
pass
def killTimer(self, *args, **kwargs): # real signature unknown
pass
def metaObject(self, *args, **kwargs): # real signature unknown
pass
def moveToThread(self, *args, **kwargs): # real signature unknown
pass
def objectName(self, *args, **kwargs): # real signature unknown
pass
def parent(self, *args, **kwargs): # real signature unknown
pass
def property(self, *args, **kwargs): # real signature unknown
pass
def receivers(self, *args, **kwargs): # real signature unknown
pass
def registerUserData(self, *args, **kwargs): # real signature unknown
pass
def removeEventFilter(self, *args, **kwargs): # real signature unknown
pass
def sender(self, *args, **kwargs): # real signature unknown
pass
def senderSignalIndex(self, *args, **kwargs): # real signature unknown
pass
def setObjectName(self, *args, **kwargs): # real signature unknown
pass
def setParent(self, *args, **kwargs): # real signature unknown
pass
def setProperty(self, *args, **kwargs): # real signature unknown
pass
def signalsBlocked(self, *args, **kwargs): # real signature unknown
pass
def startTimer(self, *args, **kwargs): # real signature unknown
pass
def thread(self, *args, **kwargs): # real signature unknown
pass
def timerEvent(self, *args, **kwargs): # real signature unknown
pass
def tr(self, *args, **kwargs): # real signature unknown
pass
def trUtf8(self, *args, **kwargs): # real signature unknown
pass
def __delattr__(self, name): # real signature unknown; restored from __doc__
""" x.__delattr__('name') <==> del x.name """
pass
def __getattribute__(self, name): # real signature unknown; restored from __doc__
""" x.__getattribute__('name') <==> x.name """
pass
def __init__(self, *more): # real signature unknown; restored from __doc__
""" x.__init__(...) initializes x; see help(type(x)) for signature """
pass
def __setattr__(self, name, value): # real signature unknown; restored from __doc__
""" x.__setattr__('name', value) <==> x.name = value """
pass
destroyed = None
staticMetaObject = None
__new__ = None
class QAbstractAudioDeviceInfo(__PySide_QtCore.QObject):
# no doc
def byteOrderList(self, *args, **kwargs): # real signature unknown
pass
def channelsList(self, *args, **kwargs): # real signature unknown
pass
def codecList(self, *args, **kwargs): # real signature unknown
pass
def deviceName(self, *args, **kwargs): # real signature unknown
pass
def frequencyList(self, *args, **kwargs): # real signature unknown
pass
def isFormatSupported(self, *args, **kwargs): # real signature unknown
pass
def nearestFormat(self, *args, **kwargs): # real signature unknown
pass
def preferredFormat(self, *args, **kwargs): # real signature unknown
pass
def sampleSizeList(self, *args, **kwargs): # real signature unknown
pass
def sampleTypeList(self, *args, **kwargs): # real signature unknown
pass
def __init__(self, *more): # real signature unknown; restored from __doc__
""" x.__init__(...) initializes x; see help(type(x)) for signature """
pass
staticMetaObject = None
__new__ = None
class QAbstractAudioInput(__PySide_QtCore.QObject):
# no doc
def bufferSize(self, *args, **kwargs): # real signature unknown
pass
def bytesReady(self, *args, **kwargs): # real signature unknown
pass
def elapsedUSecs(self, *args, **kwargs): # real signature unknown
pass
def error(self, *args, **kwargs): # real signature unknown
pass
def format(self, *args, **kwargs): # real signature unknown
pass
def notifyInterval(self, *args, **kwargs): # real signature unknown
pass
def periodSize(self, *args, **kwargs): # real signature unknown
pass
def processedUSecs(self, *args, **kwargs): # real signature unknown
pass
def reset(self, *args, **kwargs): # real signature unknown
pass
def resume(self, *args, **kwargs): # real signature unknown
pass
def setBufferSize(self, *args, **kwargs): # real signature unknown
pass
def setNotifyInterval(self, *args, **kwargs): # real signature unknown
pass
def start(self, *args, **kwargs): # real signature unknown
pass
def state(self, *args, **kwargs): # real signature unknown
pass
def stop(self, *args, **kwargs): # real signature unknown
pass
def suspend(self, *args, **kwargs): # real signature unknown
pass
def __init__(self, *more): # real signature unknown; restored from __doc__
""" x.__init__(...) initializes x; see help(type(x)) for signature """
pass
notify = None
stateChanged = None
staticMetaObject = None
__new__ = None
class QAbstractAudioOutput(__PySide_QtCore.QObject):
# no doc
def bufferSize(self, *args, **kwargs): # real signature unknown
pass
def bytesFree(self, *args, **kwargs): # real signature unknown
pass
def elapsedUSecs(self, *args, **kwargs): # real signature unknown
pass
def error(self, *args, **kwargs): # real signature unknown
pass
def format(self, *args, **kwargs): # real signature unknown
pass
def notifyInterval(self, *args, **kwargs): # real signature unknown
pass
def periodSize(self, *args, **kwargs): # real signature unknown
pass
def processedUSecs(self, *args, **kwargs): # real signature unknown
pass
def reset(self, *args, **kwargs): # real signature unknown
pass
def resume(self, *args, **kwargs): # real signature unknown
pass
def setBufferSize(self, *args, **kwargs): # real signature unknown
pass
def setNotifyInterval(self, *args, **kwargs): # real signature unknown
pass
def start(self, *args, **kwargs): # real signature unknown
pass
def state(self, *args, **kwargs): # real signature unknown
pass
def stop(self, *args, **kwargs): # real signature unknown
pass
def suspend(self, *args, **kwargs): # real signature unknown
pass
def __init__(self, *more): # real signature unknown; restored from __doc__
""" x.__init__(...) initializes x; see help(type(x)) for signature """
pass
notify = None
stateChanged = None
staticMetaObject = None
__new__ = None
class _Object(object):
# no doc
def __init__(self, *args, **kwargs): # real signature unknown
pass
__weakref__ = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""list of weak references to the object (if defined)"""
__dict__ = None # (!) real value is ''
class QAbstractVideoBuffer(_Object):
# no doc
def handle(self, *args, **kwargs): # real signature unknown
pass
def handleType(self, *args, **kwargs): # real signature unknown
pass
def mapMode(self, *args, **kwargs): # real signature unknown
pass
def unmap(self, *args, **kwargs): # real signature unknown
pass
def __init__(self, *more): # real signature unknown; restored from __doc__
""" x.__init__(...) initializes x; see help(type(x)) for signature """
pass
CoreImageHandle = None
GLTextureHandle = None
HandleType = None
MapMode = None
NoHandle = None
NotMapped = None
QPixmapHandle = None
ReadOnly = None
ReadWrite = None
UserHandle = None
WriteOnly = None
XvShmImageHandle = None
__new__ = None
class QAbstractVideoSurface(__PySide_QtCore.QObject):
# no doc
def error(self, *args, **kwargs): # real signature unknown
pass
def isActive(self, *args, **kwargs): # real signature unknown
pass
def isFormatSupported(self, *args, **kwargs): # real signature unknown
pass
def nearestFormat(self, *args, **kwargs): # real signature unknown
pass
def present(self, *args, **kwargs): # real signature unknown
pass
def setError(self, *args, **kwargs): # real signature unknown
pass
def start(self, *args, **kwargs): # real signature unknown
pass
def stop(self, *args, **kwargs): # real signature unknown
pass
def supportedPixelFormats(self, *args, **kwargs): # real signature unknown
pass
def surfaceFormat(self, *args, **kwargs): # real signature unknown
pass
def __init__(self, *more): # real signature unknown; restored from __doc__
""" x.__init__(...) initializes x; see help(type(x)) for signature """
pass
activeChanged = None
Error = None
IncorrectFormatError = None
NoError = None
ResourceError = None
staticMetaObject = None
StoppedError = None
supportedFormatsChanged = None
surfaceFormatChanged = None
UnsupportedFormatError = None
__new__ = None
class QAudio(_Object):
# no doc
def __init__(self, *args, **kwargs): # real signature unknown
pass
ActiveState = None
AudioInput = None
AudioOutput = None
Error = None
FatalError = None
IdleState = None
IOError = None
Mode = None
NoError = None
OpenError = None
State = None
StoppedState = None
SuspendedState = None
UnderrunError = None
class QAudioDeviceInfo(_Object):
# no doc
def availableDevices(self, *args, **kwargs): # real signature unknown
pass
def defaultInputDevice(self, *args, **kwargs): # real signature unknown
pass
def defaultOutputDevice(self, *args, **kwargs): # real signature unknown
pass
def deviceName(self, *args, **kwargs): # real signature unknown
pass
def isFormatSupported(self, *args, **kwargs): # real signature unknown
pass
def isNull(self, *args, **kwargs): # real signature unknown
pass
def nearestFormat(self, *args, **kwargs): # real signature unknown
pass
def preferredFormat(self, *args, **kwargs): # real signature unknown
pass
def supportedByteOrders(self, *args, **kwargs): # real signature unknown
pass
def supportedChannelCounts(self, *args, **kwargs): # real signature unknown
pass
def supportedChannels(self, *args, **kwargs): # real signature unknown
pass
def supportedCodecs(self, *args, **kwargs): # real signature unknown
pass
def supportedFrequencies(self, *args, **kwargs): # real signature unknown
pass
def supportedSampleRates(self, *args, **kwargs): # real signature unknown
pass
def supportedSampleSizes(self, *args, **kwargs): # real signature unknown
pass
def supportedSampleTypes(self, *args, **kwargs): # real signature unknown
pass
def __copy__(self, *args, **kwargs): # real signature unknown
pass
def __init__(self, *more): # real signature unknown; restored from __doc__
""" x.__init__(...) initializes x; see help(type(x)) for signature """
pass
def __nonzero__(self): # real signature unknown; restored from __doc__
""" x.__nonzero__() <==> x != 0 """
pass
__new__ = None
class _QFactoryInterface(__PySide_QtCore._Object):
# no doc
def keys(self, *args, **kwargs): # real signature unknown
pass
def __init__(self, *more): # real signature unknown; restored from __doc__
""" x.__init__(...) initializes x; see help(type(x)) for signature """
pass
__new__ = None
class QAudioEngineFactoryInterface(__PySide_QtCore.QFactoryInterface):
# no doc
def availableDevices(self, *args, **kwargs): # real signature unknown
pass
def createDeviceInfo(self, *args, **kwargs): # real signature unknown
pass
def createInput(self, *args, **kwargs): # real signature unknown
pass
def createOutput(self, *args, **kwargs): # real signature unknown
pass
def __init__(self, *more): # real signature unknown; restored from __doc__
""" x.__init__(...) initializes x; see help(type(x)) for signature """
pass
__new__ = None
class QAudioEnginePlugin(__PySide_QtCore.QObject, QAudioEngineFactoryInterface):
# no doc
def availableDevices(self, *args, **kwargs): # real signature unknown
pass
def createDeviceInfo(self, *args, **kwargs): # real signature unknown
pass
def createInput(self, *args, **kwargs): # real signature unknown
pass
def createOutput(self, *args, **kwargs): # real signature unknown
pass
def keys(self, *args, **kwargs): # real signature unknown
pass
def __init__(self, *more): # real signature unknown; restored from __doc__
""" x.__init__(...) initializes x; see help(type(x)) for signature """
pass
staticMetaObject = None
__new__ = None
class QAudioFormat(_Object):
# no doc
def byteOrder(self, *args, **kwargs): # real signature unknown
pass
def channelCount(self, *args, **kwargs): # real signature unknown
pass
def channels(self, *args, **kwargs): # real signature unknown
pass
def codec(self, *args, **kwargs): # real signature unknown
pass
def frequency(self, *args, **kwargs): # real signature unknown
pass
def isValid(self, *args, **kwargs): # real signature unknown
pass
def sampleRate(self, *args, **kwargs): # real signature unknown
pass
def sampleSize(self, *args, **kwargs): # real signature unknown
pass
def sampleType(self, *args, **kwargs): # real signature unknown
pass
def setByteOrder(self, *args, **kwargs): # real signature unknown
pass
def setChannelCount(self, *args, **kwargs): # real signature unknown
pass
def setChannels(self, *args, **kwargs): # real signature unknown
pass
def setCodec(self, *args, **kwargs): # real signature unknown
pass
def setFrequency(self, *args, **kwargs): # real signature unknown
pass
def setSampleRate(self, *args, **kwargs): # real signature unknown
pass
def setSampleSize(self, *args, **kwargs): # real signature unknown
pass
def setSampleType(self, *args, **kwargs): # real signature unknown
pass
def __copy__(self, *args, **kwargs): # real signature unknown
pass
def __eq__(self, y): # real signature unknown; restored from __doc__
""" x.__eq__(y) <==> x==y """
pass
def __ge__(self, y): # real signature unknown; restored from __doc__
""" x.__ge__(y) <==> x>=y """
pass
def __gt__(self, y): # real signature unknown; restored from __doc__
""" x.__gt__(y) <==> x>y """
pass
def __init__(self, *more): # real signature unknown; restored from __doc__
""" x.__init__(...) initializes x; see help(type(x)) for signature """
pass
def __le__(self, y): # real signature unknown; restored from __doc__
""" x.__le__(y) <==> x<=y """
pass
def __lt__(self, y): # real signature unknown; restored from __doc__
""" x.__lt__(y) <==> x<y """
pass
def __ne__(self, y): # real signature unknown; restored from __doc__
""" x.__ne__(y) <==> x!=y """
pass
BigEndian = None
Endian = None
Float = None
LittleEndian = None
SampleType = None
SignedInt = None
Unknown = None
UnSignedInt = None
__new__ = None
class QAudioInput(__PySide_QtCore.QObject):
# no doc
def bufferSize(self, *args, **kwargs): # real signature unknown
pass
def bytesReady(self, *args, **kwargs): # real signature unknown
pass
def elapsedUSecs(self, *args, **kwargs): # real signature unknown
pass
def error(self, *args, **kwargs): # real signature unknown
pass
def format(self, *args, **kwargs): # real signature unknown
pass
def notifyInterval(self, *args, **kwargs): # real signature unknown
pass
def periodSize(self, *args, **kwargs): # real signature unknown
pass
def processedUSecs(self, *args, **kwargs): # real signature unknown
pass
def reset(self, *args, **kwargs): # real signature unknown
pass
def resume(self, *args, **kwargs): # real signature unknown
pass
def setBufferSize(self, *args, **kwargs): # real signature unknown
pass
def setNotifyInterval(self, *args, **kwargs): # real signature unknown
pass
def start(self, *args, **kwargs): # real signature unknown
pass
def state(self, *args, **kwargs): # real signature unknown
pass
def stop(self, *args, **kwargs): # real signature unknown
pass
def suspend(self, *args, **kwargs): # real signature unknown
pass
def __init__(self, *more): # real signature unknown; restored from __doc__
""" x.__init__(...) initializes x; see help(type(x)) for signature """
pass
notify = None
stateChanged = None
staticMetaObject = None
__new__ = None
class QAudioOutput(__PySide_QtCore.QObject):
# no doc
def bufferSize(self, *args, **kwargs): # real signature unknown
pass
def bytesFree(self, *args, **kwargs): # real signature unknown
pass
def elapsedUSecs(self, *args, **kwargs): # real signature unknown
pass
def error(self, *args, **kwargs): # real signature unknown
pass
def format(self, *args, **kwargs): # real signature unknown
pass
def notifyInterval(self, *args, **kwargs): # real signature unknown
pass
def periodSize(self, *args, **kwargs): # real signature unknown
pass
def processedUSecs(self, *args, **kwargs): # real signature unknown
pass
def reset(self, *args, **kwargs): # real signature unknown
pass
def resume(self, *args, **kwargs): # real signature unknown
pass
def setBufferSize(self, *args, **kwargs): # real signature unknown
pass
def setNotifyInterval(self, *args, **kwargs): # real signature unknown
pass
def start(self, *args, **kwargs): # real signature unknown
pass
def state(self, *args, **kwargs): # real signature unknown
pass
def stop(self, *args, **kwargs): # real signature unknown
pass
def suspend(self, *args, **kwargs): # real signature unknown
pass
def __init__(self, *more): # real signature unknown; restored from __doc__
""" x.__init__(...) initializes x; see help(type(x)) for signature """
pass
notify = None
stateChanged = None
staticMetaObject = None
__new__ = None
class QVideoFrame(_Object):
# no doc
def bits(self, *args, **kwargs): # real signature unknown
pass
def bytesPerLine(self, *args, **kwargs): # real signature unknown
pass
def endTime(self, *args, **kwargs): # real signature unknown
pass
def fieldType(self, *args, **kwargs): # real signature unknown
pass
def handle(self, *args, **kwargs): # real signature unknown
pass
def handleType(self, *args, **kwargs): # real signature unknown
pass
def height(self, *args, **kwargs): # real signature unknown
pass
def imageFormatFromPixelFormat(self, *args, **kwargs): # real signature unknown
pass
def isMapped(self, *args, **kwargs): # real signature unknown
pass
def isReadable(self, *args, **kwargs): # real signature unknown
pass
def isValid(self, *args, **kwargs): # real signature unknown
pass
def isWritable(self, *args, **kwargs): # real signature unknown
pass
def map(self, *args, **kwargs): # real signature unknown
pass
def mapMode(self, *args, **kwargs): # real signature unknown
pass
def mappedBytes(self, *args, **kwargs): # real signature unknown
pass
def pixelFormat(self, *args, **kwargs): # real signature unknown
pass
def pixelFormatFromImageFormat(self, *args, **kwargs): # real signature unknown
pass
def setEndTime(self, *args, **kwargs): # real signature unknown
pass
def setFieldType(self, *args, **kwargs): # real signature unknown
pass
def setStartTime(self, *args, **kwargs): # real signature unknown
pass
def size(self, *args, **kwargs): # real signature unknown
pass
def startTime(self, *args, **kwargs): # real signature unknown
pass
def unmap(self, *args, **kwargs): # real signature unknown
pass
def width(self, *args, **kwargs): # real signature unknown
pass
def __init__(self, *more): # real signature unknown; restored from __doc__
""" x.__init__(...) initializes x; see help(type(x)) for signature """
pass
BottomField = None
FieldType = None
Format_ARGB32 = None
Format_ARGB32_Premultiplied = None
Format_ARGB8565_Premultiplied = None
Format_AYUV444 = None
Format_AYUV444_Premultiplied = None
Format_BGR24 = None
Format_BGR32 = None
Format_BGR555 = None
Format_BGR565 = None
Format_BGRA32 = None
Format_BGRA32_Premultiplied = None
Format_BGRA5658_Premultiplied = None
Format_IMC1 = None
Format_IMC2 = None
Format_IMC3 = None
Format_IMC4 = None
Format_Invalid = None
Format_NV12 = None
Format_NV21 = None
Format_RGB24 = None
Format_RGB32 = None
Format_RGB555 = None
Format_RGB565 = None
Format_User = None
Format_UYVY = None
Format_Y16 = None
Format_Y8 = None
Format_YUV420P = None
Format_YUV444 = None
Format_YUYV = None
Format_YV12 = None
InterlacedFrame = None
PixelFormat = None
ProgressiveFrame = None
TopField = None
__new__ = None
class QVideoSurfaceFormat(_Object):
# no doc
def frameHeight(self, *args, **kwargs): # real signature unknown
pass
def frameRate(self, *args, **kwargs): # real signature unknown
pass
def frameSize(self, *args, **kwargs): # real signature unknown
pass
def frameWidth(self, *args, **kwargs): # real signature unknown
pass
def handleType(self, *args, **kwargs): # real signature unknown
pass
def isValid(self, *args, **kwargs): # real signature unknown
pass
def pixelAspectRatio(self, *args, **kwargs): # real signature unknown
pass
def pixelFormat(self, *args, **kwargs): # real signature unknown
pass
def property(self, *args, **kwargs): # real signature unknown
pass
def propertyNames(self, *args, **kwargs): # real signature unknown
pass
def scanLineDirection(self, *args, **kwargs): # real signature unknown
pass
def setFrameRate(self, *args, **kwargs): # real signature unknown
pass
def setFrameSize(self, *args, **kwargs): # real signature unknown
pass
def setPixelAspectRatio(self, *args, **kwargs): # real signature unknown
pass
def setProperty(self, *args, **kwargs): # real signature unknown
pass
def setScanLineDirection(self, *args, **kwargs): # real signature unknown
pass
def setViewport(self, *args, **kwargs): # real signature unknown
pass
def setYCbCrColorSpace(self, *args, **kwargs): # real signature unknown
pass
def sizeHint(self, *args, **kwargs): # real signature unknown
pass
def viewport(self, *args, **kwargs): # real signature unknown
pass
def yCbCrColorSpace(self, *args, **kwargs): # real signature unknown
pass
def __copy__(self, *args, **kwargs): # real signature unknown
pass
def __eq__(self, y): # real signature unknown; restored from __doc__
""" x.__eq__(y) <==> x==y """
pass
def __ge__(self, y): # real signature unknown; restored from __doc__
""" x.__ge__(y) <==> x>=y """
pass
def __gt__(self, y): # real signature unknown; restored from __doc__
""" x.__gt__(y) <==> x>y """
pass
def __init__(self, *more): # real signature unknown; restored from __doc__
""" x.__init__(...) initializes x; see help(type(x)) for signature """
pass
def __le__(self, y): # real signature unknown; restored from __doc__
""" x.__le__(y) <==> x<=y """
pass
def __lt__(self, y): # real signature unknown; restored from __doc__
""" x.__lt__(y) <==> x<y """
pass
def __ne__(self, y): # real signature unknown; restored from __doc__
""" x.__ne__(y) <==> x!=y """
pass
def __repr__(self): # real signature unknown; restored from __doc__
""" x.__repr__() <==> repr(x) """
pass
BottomToTop = None
Direction = None
TopToBottom = None
YCbCrColorSpace = None
YCbCr_BT601 = None
YCbCr_BT709 = None
YCbCr_CustomMatrix = None
YCbCr_JPEG = None
YCbCr_Undefined = None
YCbCr_xvYCC601 = None
YCbCr_xvYCC709 = None
__new__ = None
|
14,034 | 84d9bae3ebd0f0104dabe37ccb82de00acbcd3e8 | """ A super cool file for helping with DRAG stuff."""
import numpy as np
import scipy
import dsp_utils
# Empty envelopes
def empty_detuning_envelope(t, args):
return 0
def empety_x_envelope(t, args):
return 0
def empty_y_envelope(t, args):
return 0
# Helper functions for building gaussian pulses
def gaussian(t, sigma):
return np.exp(-t**2/(2*sigma**2))
def truncated_gaussian(t, sigma, t0, tn):
erf_part = scipy.special.erf((tn)/(np.sqrt(2)*sigma))
numerator = gaussian(t-t0, sigma) \
- gaussian(tn, sigma)
denominator = np.sqrt(2*np.pi*sigma**2)*erf_part \
- 2*tn*gaussian(tn, sigma)
return numerator/denominator
def truncated_gaussian_derivative(t, sigma, t0, tn):
erf_part = scipy.special.erf((tn)/(np.sqrt(2)*sigma))
numerator = -(t - t0)/(sigma**2) \
* gaussian(t-t0, sigma)
denominator = np.sqrt(2*np.pi*sigma**2)*erf_part \
- 2*tn*gaussian(tn, sigma)
return numerator/denominator
# Functions for bottom two level DRAG in an anharmonic oscillator
def x_envelope_ge(t, args):
erf_part = scipy.special.erf(args['tn']/(np.sqrt(2)*args['tsigma']))
numerator = np.exp(-(t - args['tg'])**2/(2*args['tsigma']**2)) - \
np.exp(-args['tn']**2/(2*args['tsigma']**2))
denominator = np.sqrt(2*np.pi*args['tsigma']**2)*erf_part - \
2*args['tn']*np.exp(-args['tn']**2/(2*args['tsigma']**2))
return args['x_coeff']*args['A']*numerator/denominator
def y_envelope_ge(t, args):
erf_part = scipy.special.erf(args['tn']/(np.sqrt(2)*args['tsigma']))
numerator = -(t-args['tg'])/(args['tsigma']**2) * \
np.exp(-(t - args['tg'])**2/(2*args['tsigma']**2))
denominator = np.sqrt(2*np.pi*args['tsigma']**2)*erf_part - \
2*args['tn']*np.exp(-args['tn']**2/(2*args['tsigma']**2))
return args['y_coeff']*args['A']*numerator/denominator
def det_envelope_ge(t, args):
erf_part = scipy.special.erf(args['tn']/(np.sqrt(2)*args['tsigma']))
numerator = np.exp(-(t - args['tg'])**2/(2*args['tsigma']**2)) - \
np.exp(-args['tn']**2/(2*args['tsigma']**2))
denominator = np.sqrt(2*np.pi*args['tsigma']**2)*erf_part - \
2*args['tn']*np.exp(-args['tn']**2/(2*args['tsigma']**2))
return args['det_coeff']*(args['A']*numerator/denominator)**2
# Functions for intermediate DRAG in an anharmonic oscillator
def x_envelope_ef(t, args):
"""In-Phase Quadrature Envelope for e->f DRAG."""
return args['A'] * truncated_gaussian(t,
args['sigma'],
args['t_g']/2,
args['t_n']/2)
def y_envelope_ef(t, args):
"""Out-of-Phase Quadrature Envelope for e->f DRAG."""
anharms = args['anharms']
couplings = args['couplings']
e = args['e']
g = args['g']
couplings = [c/g for c in couplings]
coeff = -np.sqrt(couplings[e-1]**2 \
+ (anharms[e+2]**2/anharms[e-1]**2) \
* couplings[e+1]**2) / (2*anharms[e+2])
return args['A'] * coeff * truncated_gaussian_derivative(t,
args['sigma'],
args['t_g']/2,
args['t_n']/2)
def detuning_envelope_ef(t, args):
"""Detuning envelope for e->f DRAG."""
anharms = args['anharms']
couplings = args['couplings']
e = args['e']
g = args['g']
couplings = [c/g for c in couplings]
coeff = (couplings[e-1]**2 \
- (anharms[e+2]**2/anharms[e-1]**2) \
* couplings[e+1]) / (4*anharms[e+2])
return coeff*(args['A'] * truncated_gaussian(t,
args['sigma'],
args['t_g']/2,
args['t_n']/2))**2
def create_ge_envelopes(sample_rate,
gate_time,
envelope_args,
modulation_args=None,
quantization_args=None,
upsampling_args=None,
noise_args=None):
"""Returns analytically optimal first order DRAG control pulses.
Args:
sample_rate: the sample rate in GSa/s of the pulses
gate_time: the length in ns of the DRAG gates
envelope_args: the desired parameters for the X control gaussian
modulation_args: the parameters for upconverting the envelopes
quantization_args: for testing - the parameters for quantizing the
pulses
upsampling_args: for testing - the parameters for increasing pulse
resolution
noise_args: for testing - paremeters for adding noise to pulses
Returns:
An np array of the times associated with each pulse value, in ns
An np array of the x control line in V
An np array of the y control line in V
An np array of the z control line in V
"""
xs, times = dsp_utils.create_custom_signal(
x_envelope_ge,
sample_rate,
gate_time,
envelope_args=envelope_args,
modulation_args=modulation_args,
quantization_args=quantization_args,
upsampling_args=upsampling_args,
noise_args=noise_args)
ys, _ = dsp_utils.create_custom_signal(
y_envelope_ge,
sample_rate,
gate_time,
envelope_args=envelope_args,
modulation_args=modulation_args,
quantization_args=quantization_args,
upsampling_args=upsampling_args,
noise_args=noise_args)
dets, _ = dsp_utils.create_custom_signal(
det_envelope_ge,
sample_rate,
gate_time,
envelope_args=envelope_args,
modulation_args=modulation_args,
quantization_args=quantization_args,
upsampling_args=upsampling_args,
noise_args=noise_args)
return times, xs, ys, dets
def create_constant_detuning_DRAG_envelopes(sample_rate,
gate_time,
envelope_args,
modulation_args=None,
quantization_args=None,
upsampling_args=None,
noise_args=None):
"""Returns DRAG control pulses with a constant detuning line.
Analytically derived optimal first order DRAG includes the square of the X
control pulse on the detuning Z line, but solutions can be found through
optimization methods that instead have different X and Y control weightings
and a constant detuning control. This function simply returns DRAG shaped
X and Y control pulses with a constant Z control pulse instead of the
analytic Z control solution.
Args:
sample_rate: the sample rate in GSa/s of the pulses
gate_time: the length in ns of the DRAG gates
envelope_args: the desired parameters for the X control gaussian
modulation_args: the parameters for upconverting the envelopes
quantization_args: for testing - the parameters for quantizing the
pulses
upsampling_args: for testing - the parameters for increasing pulse
resolution
noise_args: for testing - paremeters for adding noise to pulses
Returns:
An np array of the times associated with each pulse value, in ns
An np array of the x control line in V
An np array of the y control line in V
An np array of the z control line in V
"""
xs, times = dsp_utils.create_custom_signal(
x_envelope_ge,
sample_rate,
gate_time,
envelope_args=envelope_args,
modulation_args=modulation_args,
quantization_args=quantization_args,
upsampling_args=upsampling_args,
noise_args=noise_args)
ys, _ = dsp_utils.create_custom_signal(
y_envelope_ge,
sample_rate,
gate_time,
envelope_args=envelope_args,
modulation_args=modulation_args,
quantization_args=quantization_args,
upsampling_args=upsampling_args,
noise_args=noise_args)
def const_function(t, args=None):
return envelope_args['det_coeff']
dets, _ = dsp_utils.create_custom_signal(
const_function,
sample_rate,
gate_time,
envelope_args=None,
modulation_args=None,
quantization_args=quantization_args,
upsampling_args=upsampling_args,
noise_args=noise_args)
return times, xs, ys, dets
|
14,035 | 77bba44a1f072088e21a1cdc6afb37a88c52d3fb | from location import RIGHT, LEFT, UP, DOWN, Location
from game import SQUARE_SIZE
class Player():
'''
This class represents the character which reacts to user inputs in two dimensional grid worlds.
A player is equipped with the various capabilities:
- It can sense its own surroundings (location, facing, the world that it is in).
- It can move forward.
- It can turn around.
- It can sense whether it is alive or not.
- It can have weapon in its repository
- It can collect points
'''
STEP = SQUARE_SIZE/5
GRAVITATION = 0.1
def __init__(self, name):
'''
Creates a new player with the given name.
Parameter name is the name of the bot: string
'''
self.set_name(name) # most-recent holder
self.shape = None # fixed value
self.location = None # most-recent holder
self.game = None # fixed value
self.destroyed = False # flag
self.facing = None # most-recent holder
self.weapon = None # most-recent holder
self.points = 0 # most-recent holder
self.falling = None # flag
self.jump_speed = 0 # most-recent holder
def set_name(self, name):
'''
Sets the name of the enemy.
Default name is "Samsonator" but user can also set custom name.
Parameter name is the new name of the enemy: string
'''
if not name:
self.name = "Samsonator" # most-recent holder
else:
self.name = name
def get_name(self):
'''
Returns the name of the player
'''
return self.name
def set_location(self, location):
'''
Sets the player to certain location
Parameter location is instance of class Location: Location
'''
self.location = location
def get_location(self):
'''
Returns the current location of the player, and if the player is not set to game returns None: Location
(as the location isn't set)
'''
return self.location
def set_game(self, game, location, facing):
'''
Places the player in the given game at the specified
coordinates at the Game. This method is supposed to be used from the
add_player method in the Game class.
This makes sure that enemy will be part of the correct Game.
Parameter game is the game_board in which the enemy will be placed: Game
Parameter location is the coordinates at which the enemy is placed at the Game: Location
Returns False if the square at the given location is not empty or
the robot is already located in some world (the given one or some other world), True otherwise: boolean
'''
result_square = game.get_square(location)
if not result_square.is_empty() or self.get_game() is not None:
return False
else:
self.game = game
self.location = location
self.facing = facing
return True
def get_game(self):
'''
Returns the game in which the player is in: Game
'''
return self.game
def set_facing(self, facing):
'''
Sets the facing as the parameter orders
'''
self.facing = facing
def get_facing(self):
'''
Returns the direction the robot is facing: int
'''
return self.facing
def set_weapon(self, weapon):
'''
Sets a weapon to player.
Parameter weapon is boolean.
'''
self.weapon = weapon
def get_weapon(self):
'''
Returns boolean value stating whether player has weapon or not.
'''
return self.weapon
def shoot(self):
'''
Uses the weapon of the player and shoots with it. For example the rifle speed and impact force
can be customised. Player will shoot when mouse is clicked somewhere on the screen
and in to the direction of mouse click.
'''
pass
def set_points(self, amount):
'''
Sets the given amount of points to player.
Parameter amount is the number of points that will be added to player: int
'''
self.points += amount
def get_points(self):
'''
Returns the number of points of the player: int
'''
return self.points
def is_destroyed(self):
'''
Returns boolean value stating whether the player is destroyed or not
'''
return self.destroyed
def turn(self):
'''
Turns the player around, if the
player is alive. If the enemy is destroyed, the method does nothing.
We use global variables RIGHT and LEFT which are defined in Location. This method checks for the current
facing and turns the enemy to the opposite direction.
'''
if not self.is_destroyed():
if self.facing == RIGHT:
self.facing = LEFT
else:
self.facing = RIGHT
def turn_to(self, facing):
'''
Turns player to wanted direction, if the
player is alive. If the enemy is destroyed, the method does nothing.
'''
if not self.facing == facing:
self.turn()
def move(self, facing):
'''
Moves the player 5 pixels to the current facing.
A destroyed player can't move. enemy, player or
game's playground elements can't overlap. Player doesn't have to move a square by square as they
can move smoothly pixelwise. Although, player has the shape of a square (at least when we consider
the collisions) and the coordinates of the player as measured from the center of the enemy's "shell".
So we determine weather the player collides with some element if the colliding element comes closer than
half square size to the player. If player collides with a wall it turns to another direction.
When player and enemy collide player dies.
Parameter facing is the current facing of the player: Facing (RIGHT/LEFT)
Parameter square_size is the size of the squares at the
Returns a boolean value: TRUE if the movement succeeded and FALSE otherwise
'''
if not self.is_destroyed():
if self.check_front():
cur_x = self.get_location().get_x()
cur_y = self.get_location().get_y()
if facing == RIGHT:
coordinates = Location(cur_x + Player.STEP, cur_y, self.game)
self.set_location(coordinates)
else:
coordinates = Location(cur_x - Player.STEP, cur_y, self.game)
self.set_location(coordinates)
return True
else:
return False
def check_front(self):
'''
This method checks if the item can move forward.
Returns boolean value stating whether the player can move (return True) or not (returns False)
'''
cur_position = self.get_location() # cur_position is the current position of the enemy's center point
facing = self.get_facing() # facing of the player (RIGHT or LEFT)
dist = self.STEP + SQUARE_SIZE/2
relative = cur_position.get_relative(facing, dist)
if not relative.location_is_empty_player():
return False
return True
def jump(self, jump_speed):
'''
Makes the player jump.
'''
self.jump_speed = jump_speed
if not self.is_destroyed():
if self.check_up_or_down(UP, Player.STEP*self.jump_speed):
new_y = self.get_location().get_y() - Player.STEP*self.jump_speed
coordinates = Location(self.get_location().get_x(), new_y, self.game)
self.set_location(coordinates)
else:
self.jump_speed = 0
def destroy(self):
'''
Destroys the player
'''
self.destroyed = True
def check_up_or_down(self, facing, jump_distance):
'''
Returns boolean value stating whether player can move down or up: boolean
Player can move (return True) and not (return False)
'''
cur_position = self.get_location() # cur_position is the current position of the enemy's center point
dist = jump_distance + SQUARE_SIZE/2
relative = cur_position.get_relative(facing, dist)
if not relative.location_is_empty_player():
return False
return True
def is_falling(self):
'''
This method is created to make the code easier to understand at GUI. Changes the value of self.falling.
Returns boolean value stating whether the player is falling or not: boolean
'''
if self.check_up_or_down(DOWN, Player.STEP):
self.falling = True
else:
self.falling = False
return self.falling
|
14,036 | 0d6faeab7d9fcca8a47204a9c502f6b7f3783d87 | N, A, B = map(int, input().split())
print(sum(i * (A <= sum(map(int, str(i))) <= B) for i in range(N + 1)))
|
14,037 | 57d9632c1cf0944bc0ce374bf2c0f9bc64f0b4ce | """
inout.py
Prompt the user for their age.
Then tell them how old they are in dog years.
"""
import sys
try:
years = input("How old are you? ")
except EOFError:
sys.exit(0)
try:
years = float(years)
except ValueError:
print("Sorry,", years, "is not a number.")
years = 0.0
print("I'll assume your age is", years, "years.")
dogYears = round(years / 7)
print("That's about", dogYears, "dog years!")
sys.exit(0)
|
14,038 | 44a99aecf857fe138d4b793718ee58d66a0d5bff | """
Waits two seconds between sending each character
"""
import socket
import time
server_socket = socket.socket()
# listen for connections on provided port
server_socket.connect(('127.0.0.1', 8000))
for c in b'hello\n':
time.sleep(1)
server_socket.sendall(bytes([c]))
print(server_socket.recv(4096))
server_socket.close()
|
14,039 | 856142153c8682be8568b31c00dbf8606c83afa1 | class Solution(object):
def twoSum(self, nums, target):
map = {}
diff = 0
for i in range(0, len(nums), 1):
map[str(i)] = nums[i]
#print map['1']
for k in range(0, len(nums), 1):
#print "======",nums[k],k,"====="
diff = target - nums[k];
#print "diff = ", diff, type(diff)
#print map.get(str(k)), type(map.get(str(k)))
try:
index = int(map.keys()[map.values().index(diff)])
if ( index != (k) ):
#print "yes, index= ", map.keys()[map.values().index(diff)]
return [k,index]
except ValueError:
pass
"""
:type nums: List[int]
:type target: int
:rtype: List[int]
"""
# if __name__ == '__main__':
# test = Solution()
# nums = [5,20,75]
# #nums = [3, 2, 4]
# print type(nums)
# target = 95
# result = test.twoSum(nums, target)
# print result |
14,040 | b2474383784aeaf77e47f5a9afa702e851fee5ee | import logging
import logging.config
logging.config.fileConfig('logging.conf')
import os
from .file import File
from utils.page_loader import PageLoader
class Page:
def __init__(self, link_source = None, path = "", \
url=None, dp=None, lp=None, use_same_url=False, loader=None,\
data_version=1, link_version=1, source_version=1, remove_duplicates=True, timeout=None,\
data_to_new=False, link_to_new=False, max_data_rows=1000, max_link_rows=1000):
self.file = File()
self.timeout = timeout
self.page_loader = PageLoader(timeout=timeout) if loader is None else loader
self.data_parser = dp
self.link_parser = lp
self.data_version = data_version
self.link_version = link_version
self.source_version = source_version
self.set_data_path(path)
self.set_link_path(path)
self.use_same_url = False
self.remove_duplicates = remove_duplicates
self.data_blocks = []
self.link_blocks = []
self.raw_data = None
self.tags = None
self.data_to_new = data_to_new
self.link_to_new = link_to_new
self._url_already_given = False
self._link_source_load_idx = 0
self._link_source_pos = 0
self._has_url = True
if url is None and use_same_url:
logging.warning("for using 'use_same_url' argument use 'url' argument too")
if url is not None:
self._url_already_given = True
self.url = url
self.use_same_url = use_same_url
self.link_source = os.path.join("link_sources", link_source)
self.max_links_from_source = 20000
self.links = []
self.max_data_rows = max_data_rows
self.max_link_rows = max_link_rows
self.cached_links = dict()
def set_timeout(self, timeout):
self.timeout = timeout
self.page_loader.set_timeout(self.timeout)
def get_new_url(self):
if self.use_same_url: return
if self._url_already_given:
self._url_already_given = False
return
if self._link_source_pos >= len(self.links):
self.get_next_links()
self._link_source_pos = 0
if self._link_source_pos >= len(self.links): self.url = None
else: self.url = self.links[self._link_source_pos]
self._link_source_pos += 1
def get_next_links(self):
self.links = self.file.get_lines_from_txt(self.link_source,
start=self._link_source_load_idx,
end=self._link_source_load_idx + self.max_links_from_source, version=self.source_version)
self._link_source_load_idx += len(self.links)
if len(self.links) < 1:
self._has_url = False
def load_page(self):
data = self.page_loader.load(self.url)
while data is None:
self.get_new_url()
if not self._has_url or (data is None and self.use_same_url): return False
data = self.page_loader.load(self.url)
logging.info("Parsed: {}".format(self.url))
self.raw_data = data
return True
def process(self):
self.get_new_url()
if not self.load_page(): return False
if self.data_parser:
self.data_blocks = self.data_parser(self.raw_data)
for block in self.data_blocks:
block.set_save_folder(self.data_path)
block.add_tags(self.tags)
block.generate_table()
block.save_table(max_rows=self.max_data_rows, version=self.data_version, new=self.data_to_new)
if self.link_parser:
self.link_blocks = self.link_parser(self.raw_data)
for path in self.link_blocks:
curr_links, check_dup, memory = self.link_blocks[path]
path_to_link = os.path.join(self.link_path, path)
if self.remove_duplicates:
curr_links = list(set(curr_links))
if check_dup:
if (path_to_link, self.link_version) not in self.cached_links:
self.cached_links[(path_to_link, self.link_version)] = set(self.file.get_lines_from_txt(path_to_link, version=self.link_version))
curr_links = list(set(curr_links) - self.cached_links[(path_to_link, self.link_version)])
if not memory:
del(self.cached_links[(path_to_link, self.link_version)])
self.file.add_data_to_txt(path_to_link, curr_links, \
max_rows=self.max_link_rows, version=self.link_version, new=self.link_to_new)
return True
def set_data_parser(self, parser):
self.data_parser = parser
def set_link_parser(self, parser):
self.link_parser = parser
def add_tags(self, tagname):
if type(tagname) is str: self.tags.add(tagname)
else: self.tags.update(tagname)
def set_data_path(self, path):
self.data_path = os.path.join("data", path)
def set_link_path(self, path):
self.link_path = os.path.join("link_sources", path)
def move_link(self, old_path, new_path, prefix=".txt", old_version=None, new_version=1, stop_if_exists=True):
old_version = old_version if old_version else self.link_version
self.file.move_link(old_path, new_path, prefix, old_version, new_version, stop_if_exists)
def move_data(self, old_path, new_path, prefix=".csv", old_version=None, new_version=1, stop_if_exists=True):
old_version = old_version if old_version else self.data_version
self.file.move_data(old_path, new_path, prefix, old_version, new_version, stop_if_exists)
def get_data_tmp(self, max_rows=None):
max_rows = max_rows if max_rows is not None else self.max_data_rows
return self.file.get_data_tmp(max_rows)
def get_link_tmp(self, max_rows=None):
max_rows = max_rows if max_rows is not None else self.max_link_rows
return self.file.get_link_tmp(max_rows)
def clear_tmp(self, all=False):
self.file.clear_tmp()
def substract_link(self, path1, path2, version1=None, version2=None, max_rows=None, new_path=None, new_version=1):
version1 = version1 if version1 else self.link_version
version2 = version2 if version2 else self.link_version
path1 = os.path.join("link_sources", path1)
path2 = os.path.join("link_sources", path2)
max_rows = max_rows if max_rows is not None else self.max_link_rows
self.file.substract_from_txt(path1, path2, version1, version2, max_rows, new_path, new_version)
def add_link(self, path1, path2, version1=None, version2=None, max_rows=None):
version1 = version1 if version1 else self.link_version
version2 = version2 if version2 else self.link_version
path1 = os.path.join("link_sources", path1)
path2 = os.path.join("link_sources", path2)
max_rows = max_rows if max_rows is not None else self.max_link_rows
self.file.add_txt_to_txt(path1, path2, version1, version2, max_rows)
def remove_link(self, path, version=None):
version = version if version else self.link_version
path = os.path.join("link_sources", path)
self.file.remove_files(path, ".txt", version)
def remove_data(self, path, version=None):
version = version if version else self.data_version
path = os.path.join("data", path)
self.file.remove_files(path, ".csv", version)
|
14,041 | d02047581a7aa1fbfd1e394edbbc9c4d109af84c | from django.shortcuts import render
def index(request):
context = {
'judul': 'Selamat Datang',
'subjudul': 'MNUR',
}
return render(request, 'index.html', context)
|
14,042 | 041aa83ea46a7d788ed2fdb843543fc2944b0897 | #!/usr/bin/env python
# coding: utf-8
# # Setup and import libraries
# In[2]:
import streamlit as st
import pandas as pd
import numpy as np
import plotly.express as px
from plotly.subplots import make_subplots
import plotly.graph_objects as go
import matplotlib.pyplot as plt
from ipywidgets import AppLayout, Button, Layout
# # Load csv files
# ### Different csv files are loaded and saved into data frames:
# 1. Mortality: csv file that inclues covid19 mortality from January 2019 to June 2021
# 2. Life expectancy: csv file that includes life expectancy years by country
# 2. Covid data: csv file that includes data about total confirmed cases and deaths by country
# 3. Health System Ranking: csv file that includes the ranking of health system performance by country
# In[3]:
mortality_by_date = pd.read_csv("./covid_mortalities_by_time.csv")
# In[4]:
life_expectancy = pd.read_csv("./life_expectancy.csv")
# In[5]:
mortality_by_country = pd.read_csv('./WHO_covid_data.csv')
# In[6]:
healthsystem_rank = pd.read_csv('./WHO_healthsystem_rank.csv')
# In[7]:
pop_data = pd.read_csv("./IHME_population_age.CSV")
# In[8]:
#subet the population data set to the three main columns to use for analysis
df = pop_data[['location_name', 'age_group_name', 'val']]
# In[15]:
pop_data['age_group_name'].value_counts()
mapping = {'1 to 4 ':'1 to 15','10 to 14':'1 to 15','Early Neonatal':'1 to 15' ,'Late Neonatal':'1 to 15','Post Neonatal':'1 to 15',
'20 to 24':'16 to 39','25 to 29':'16 to 39','35 to 39 ':'16 to 39','30 to 34':'16 to 39','40 to 44':'40 to 59',
'45 to 49':'40 to 59','50 to 54':'40 to 59','55 to 59':'40 to 59',
'60 to 64':'60 to 80','65 to 69':'60 to 80','70 to 74':'60 to 80','75 to 80':'60 to 80',
'80 to 84':'>80','85 to 89':'>80 ','90 to 94':'>80','95 plus':'>80'
}
# In[16]:
pop_data['age_group_name'] = pop_data['age_group_name'].replace(mapping)
# In[9]:
#merge the life expectancy dataframe with the covid_data in one dataframe
df_merged = pd.merge(life_expectancy,mortality_by_country, on='Country')
df_merged2 = pd.merge(df_merged,healthsystem_rank,on='Country')
# In[11]:
mortality_by_date =mortality_by_date.loc[(mortality_by_date['location']=='World')]
# In[12]:
#extract month and year from date field in the mortality dataframe
mortality_by_date['date'] = pd.to_datetime(mortality_by_date['date'], errors='coerce')
mortality_by_date['Period'] = pd.to_datetime(mortality_by_date['date']).dt.to_period('M')
#group results by sum of total deaths
#df_fig0 =all_mortality.groupby(['month_year']).agg({'new_deaths':'sum'}).unstack(level=0)
# In[13]:
df_fig0 =mortality_by_date[['location','Period','total_deaths']]
df_fig0 = df_fig0.rename(columns={'total_deaths':'Total Deaths'})
# In[126]:
#convert month_year column from period type to datetime to use in line chart
df_fig0['Period'] =df_fig0.Period.values.astype('datetime64[M]')
# In[ ]:
#add title and information to the streamlit dashboard page
st.set_page_config(layout="wide")
st.markdown( "<div style='background-color:#EBF5FB; font-size:30px; text-align: center; color: #2E86C1 ; width: 100%'>MSBA Program</div>",
unsafe_allow_html=True)
st.markdown( "<div style='background-color:#EBF5FB; margin-bottom: 10px; font-size:25px; text-align: center; color: #2E86C1 ; width: 100%'>Health Analytics Course</div>",
unsafe_allow_html=True)
st.title("Analyzing the Association between Covid19 Mortality,Health System and Life Expectancy")
# In[ ]:
#add side bar to streamlit page
#st.sidebar.title("Filtering")
#locations = list(set(
# list(all_mortality['location']) +
# list(df_merged2['Country']) +
# list(healthsystem_rank['Country'])
#))
#locations.insert(0, 'World')
#location = st.sidebar.selectbox("Locations", locations)
#all_mortality = all_mortality.loc[(all_mortality['location'] == location)]
# In[ ]:
#create the first figure
fig0 = px.area(df_fig0,
x='Period',
y='Total Deaths',
title='Total Covid19 deaths in the world from Jan 2020 to June 2021')
fig0.update_layout(margin=dict(b=0))
st.plotly_chart(fig0, use_container_width=True)
st.markdown(
"<div style='margin-bottom: 11px; font-size:10px; text-align: center; color: gray; width: 100%'>Data Source: Covid data Worldmeter.info</div>",
unsafe_allow_html=True
)
#divide streamlit page into columns to organize the layout of the graphs
row2_1, row2_2 = st.beta_columns((1, 1))
with row2_1:
st.subheader('Health Systems Performance ranking in 2019 by Country')
df_fig_2 = df_merged2[['Rank', 'Country', 'Deaths - cumulative total per 100000 population']]
df_fig_2 = df_fig_2.sort_values('Rank', ascending=True)
df_fig_2 = df_fig_2.groupby(['Rank', 'Country']).size().to_frame().head(50).reset_index()
fig_2 = go.Figure(
data=go.Table(
header=dict(values=list(df_fig_2[['Rank', 'Country']].columns)),
cells=dict(values=[df_fig_2.Rank, df_fig_2.Country])
)
)
fig_2.update_layout(margin=dict(t=0,l=0, r=50, b=0),autosize=False,
width=500,
height=300
)
st.write(fig_2, use_container_width=True)
st.markdown(
"<div style='margin-bottom: 11px; font-size:10px; text-align: left; color: gray; width: 100%'>Data Source: WHO, Measuring Overall Health System Performance for 191 Countries</div>",
unsafe_allow_html=True
)
with row2_2:
st.subheader('Covid19 deaths (cumulative total per 100000 population) by country')
df_fig_1 = df_merged2[['Country', 'Deaths - cumulative total per 100000 population']]
df_fig_1 = df_fig_1.sort_values(
'Deaths - cumulative total per 100000 population', ascending=False
).head(50)
fig_1 = px.bar(
df_fig_1,
x='Deaths - cumulative total per 100000 population',
y='Country',
color='Country'
)
fig_1.update_layout(margin=dict(b=0),autosize=False,
width=500,
height=300)
st.plotly_chart(fig_1, use_container_width=True)
st.markdown(
"<div style='margin-bottom: 11px; font-size:10px; text-align: left; color: gray; width: 100%'>Data source: WHO- Covid19 mortality data</div>",
unsafe_allow_html=True
)
row3_1, row3_2 = st.beta_columns((1, 1))
with row3_1:
st.subheader('Total confirmed Covid19 deaths vs Health System ranking')
df_fig_4 = df_merged2[['Deaths - cumulative total per 100000 population', 'Rank']]
fig_4 = px.scatter(
df_fig_4,
x='Rank',
y='Deaths - cumulative total per 100000 population'
)
fig_4.update_layout(margin=dict(b=0,t=0),autosize=False,
width=500,
height=300)
st.plotly_chart(fig_4, use_container_width=True)
st.markdown(
"<div style='margin-bottom: 11px; font-size:10px; text-align: left; color: gray; width: 100%'>Data source:WHO, Measuring Overall Health System Performance for 191 Countries</div>",
unsafe_allow_html=True
)
with row3_2:
st.subheader('Total confirmed Covid19 deaths vs life expectancy')
df_fig_3 = df_merged[[
'Deaths - cumulative total per 100000 population',
'Life Expectancy'
]]
fig_3 = px.scatter(
df_fig_3,
x='Life Expectancy',
y='Deaths - cumulative total per 100000 population'
)
fig_3.update_layout(margin=dict(b=0,t=0),autosize=False,
width=500,
height=300)
st.plotly_chart(fig_3, use_container_width=True)
st.markdown(
"<div style='margin-bottom: 11px; font-size:10px; text-align: left; color: gray; width: 100%'>Data source: WHO- life expectancy(years) by country in 2019</div>",
unsafe_allow_html=True
)
df = pop_data[['location_name', 'age_group_name', 'val']]
df_fig6 = df.groupby(['location_name', 'age_group_name']).agg({'val': 'sum'}).reset_index()
df_fig6 = df_fig6.loc[
(df_fig6['location_name'] == 'France') & (df_fig6['age_group_name'] != 'All Ages')
]
df_fig6['%population'] = (df_fig6['val'] / df_fig6['val'].sum()) * 100
df_fig7 = df.groupby(['location_name', 'age_group_name']).agg({'val': 'sum'}).reset_index()
df_fig7 = df_fig7.loc[
(df_fig7['location_name'] == 'Italy') & (df_fig7['age_group_name'] != 'All Ages')
]
df_fig7['%population'] = (df_fig7['val'] / df_fig7['val'].sum()) * 100
df_age_pop = pd.concat([df_fig6, df_fig7])
#df_fig8 = df.groupby(['location_name', 'age_group_name']).agg({'val': 'sum'}).reset_index()
#df_fig8 = df_fig8.loc[
# (df_fig8['location_name'] == 'South Sudan') & (df_fig8['age_group_name'] != 'All Ages')
# ]
#df_fig8['%population'] = (df_fig8['val'] / df_fig8['val'].sum()) * 100
row4_1, row4_2 = st.beta_columns((1, 1))
with row4_1:
fig_5 = px.bar(
df_fig6,
x='%population',
y='age_group_name',
color='age_group_name'
)
st.plotly_chart(fig_5, use_container_width=True)
fig_5.update_layout(margin=dict(b=0,t=0))
st.markdown(
"<div style='margin-bottom: 11px; font-size:10px; text-align: left; color: gray; width: 100%'>Data Source: Institute for Health Metrics and Evaluation(IHME) population forcasting</div>",
unsafe_allow_html=True
)
with row4_2:
fig_7 = px.bar(
df_fig7,
x='%population',
y='age_group_name',
color='age_group_name'
)
st.plotly_chart(fig_7, use_container_width=True)
fig_7.update_layout(margin=dict(b=0,t=0))
st.markdown(
"<div style='margin-bottom: 11px; font-size:10px; text-align: left; color: gray; width: 100%'>Data Source: Institute for Health Metrics and Evaluation(IHME) population forcasting</div>",
unsafe_allow_html=True
)
|
14,043 | 7d47a877d4f536d838dcbc52778c85e2aed897eb |
import gevent
from gevent import socket
import Tkinter as tk
# class SockLoop(object):
# def __init__(self, callback):
# self.callback = callback
# def __call__(self, sock, client):
# while 1:
# mes = sock.recv(256)
# ret = self.callback(client, mes)
# if ret is not None:
# sock.send(ret)
# def socket_server(port, callback):
# ssock = socket.socket(socket.AF_INET, socket.SOCK_STREAM, socket.IPPROTO_TCP)
# ssock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
# ssock.bind(('', port))
# ssock.listen(5)
# while 1:
# sock, client = ssock.accept()
# gevent.spawn(callback, sock, client)
from gevent import pywsgi
from geventwebsocket.handler import WebSocketHandler
def websocket_app(environ, start_response):
if environ["PATH_INFO"] == '/echo':
ws = environ["wsgi.websocket"]
message = ws.receive()
print message
ws.send(message)
print "starting webserver"
server = pywsgi.WSGIServer(("", 9001), websocket_app, handler_class=WebSocketHandler)
print server
server.serve_forever()
print "we a websocket serving machine!"
class App(object):
def __init__(self, root):
self.greenlet = None
self.root = root
self._build_window(root)
self.root.after(100, self._connect)
def add_text(self, text):
print "!!!!!!!!!!!!!!!!",text
cleaned_string = text.replace('\r', '')
self.text.insert(tk.END, cleaned_string)
def quit(self):
print "trying to quit!"
gevent.kill(self.greenlet)
self.root.quit()
def _build_window(self, root):
self.frame = tk.Frame(root)
self.text = tk.Text(self.frame)
self.quit_button = tk.Button(self.frame, text="Quit", command=self.quit)
self.text.pack()
self.quit_button.pack()
self.frame.pack()
def _connect(self):
print "not connecting to anything"
# self.greenlet = gevent.spawn(websocket_server)
# ,
# 9001,
# SockLoop(lambda cl, txt: self.add_text("%s: %s" % (cl, txt))))
# self.gevent_loop_step()
def gevent_loop_step(self):
print "in step "
# gevent.sleep()
# self.root.after_idle(self.gevent_loop_step)
if __name__ == '__main__':
root = tk.Tk()
app = App(root)
root.mainloop()
# root.destroy()
|
14,044 | 7c8d99e44f77f607ff83c10900750b52caceb7c1 | #!/usr/bin/env python
# coding: utf-8
import pandas as pd
# 调用pandas工具包的read_csv函数/模块,传入训练文件和测试文件的地址参数,获得返回的数据并存于变量
df_train = pd.read_csv('../Datasets/Breast-Cancer/breast-cancer-train.csv')
df_test = pd.read_csv('../Datasets/Breast-Cancer/breast-cancer-test.csv')
# 选取'Clump Thickness'与'Cell Size'作为特征,构建测试集中的正负分类样本
# .loc()是按行扫描函数,通过行标签Type来索引'Clump Thickness'和'Cell Size'
df_test_negative = df_test.loc[df_test['Type'] == 0][['Clump Thickness', 'Cell Size']]
df_test_positive = df_test.loc[df_test['Type'] == 1][['Clump Thickness', 'Cell Size']]
import matplotlib.pyplot as plt
# 绘制图良性与恶性肿瘤样本点,自变量、因变量、符号、符号大小、符号颜色
plt.scatter(df_test_negative['Clump Thickness'],df_test_negative['Cell Size'], marker = 'o', s=200, c='red')
plt.scatter(df_test_positive['Clump Thickness'],df_test_positive['Cell Size'], marker = 'x', s=150, c='black')
# 绘制横纵坐标
plt.xlabel('Clump Thickness')
plt.ylabel('Cell Size')
# 打印图片
plt.show()
import numpy as np
# 随机生成一个只有一个元素的数组,大小0到1
intercept = np.random.random([1])
# 随机生成一个1*2的数组,大小0到1
coef = np.random.random([2])
# 绘制横坐标长度与纵坐标曲线(直线)
lx = np.arange(0, 12)
ly = (-intercept - lx * coef[0]) / coef[1]
# 打印直线,直线颜色是黄色
plt.plot(lx, ly, c='yellow')
# 绘制图良性与恶性肿瘤样本点,自变量、因变量、符号、符号大小、符号颜色
plt.scatter(df_test_negative['Clump Thickness'],df_test_negative['Cell Size'], marker = 'o', s=200, c='red')
plt.scatter(df_test_positive['Clump Thickness'],df_test_positive['Cell Size'], marker = 'x', s=150, c='black')
plt.xlabel('Clump Thickness')
plt.ylabel('Cell Size')
# 要有以下命令才能打印出图片
plt.show()
from sklearn.linear_model import LogisticRegression
lr = LogisticRegression()
# 导入sklearn中的逻辑斯蒂回归分类器
# 使用前10条训练样本学习直线的系数和截距
# 训练数据是Clump Thickness和Cell Size,训练标签是Type,分别都是10个
lr.fit(df_train[['Clump Thickness', 'Cell Size']][:10], df_train['Type'][:10])
# 直接调用分类器的.score()函数可以返回准确率
print 'Testing accuracy (10 training samples):', lr.score(df_test[['Clump Thickness', 'Cell Size']], df_test['Type'])
intercept = lr.intercept_
coef = lr.coef_[0, :]
ly = (-intercept - lx * coef[0]) / coef[1]
# 将训练出来的直线参数系数重新绘制曲线(直线)作为线性分类器
plt.plot(lx, ly, c='green')
# 将直线内容放到图表上
# 再次绘制测试数据样本,查看对比线性分类器是否有改善增强
plt.scatter(df_test_negative['Clump Thickness'],df_test_negative['Cell Size'], marker = 'o', s=200, c='red')
plt.scatter(df_test_positive['Clump Thickness'],df_test_positive['Cell Size'], marker = 'x', s=150, c='black')
plt.xlabel('Clump Thickness')
plt.ylabel('Cell Size')
plt.show()
# 以下代码就是再训练一次,就是通常是通常所说的第二代目
lr = LogisticRegression()
lr.fit(df_train[['Clump Thickness', 'Cell Size']], df_train['Type'])
print 'Testing accuracy (all training samples):', lr.score(df_test[['Clump Thickness', 'Cell Size']], df_test['Type'])
intercept = lr.intercept_
coef = lr.coef_[0, :]
ly = (-intercept - lx * coef[0]) / coef[1]
plt.plot(lx, ly, c='blue')
plt.scatter(df_test_negative['Clump Thickness'],df_test_negative['Cell Size'], marker = 'o', s=200, c='red')
plt.scatter(df_test_positive['Clump Thickness'],df_test_positive['Cell Size'], marker = 'x', s=150, c='black')
plt.xlabel('Clump Thickness')
plt.ylabel('Cell Size')
plt.show()
|
14,045 | 5e10527f97214c03cbd96a3b114e6fc4a64d5d51 | # Generated by Django 3.0.7 on 2021-04-08 10:02
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('Transaction', '0005_auto_20210404_0957'),
]
operations = [
migrations.AlterField(
model_name='transactionhistorymodel',
name='transactionDate',
field=models.DateField(),
),
migrations.AlterField(
model_name='transactionhistorymodel',
name='transactionTime',
field=models.TimeField(),
),
]
|
14,046 | cb6bcf508b34e28c2e274b90efc78b1faa6268a9 | import sys
class Graph():
def __init__(self, vertices):
self.V = vertices
self.graph = [[0 for column in range(vertices)]
for row in range(vertices)]
def min_distance(self, distance_array, visited):
min_val = sys.maxint
for i in range(self.V):
if distance_array[i] < min_val and visited[i] == False:
min_val = distance_array[i]
min_index = i
return min_index
def dijkstra(self, src):
distance_array = self.V*[sys.maxint]
distance_array[src] = 0
visited = self.V*[False]
for _ in xrange(self.V):
u = self.min_distance(distance_array, visited)
visited[u] = True
for j in xrange(self.V):
if self.graph[u][j] > 0 and visited[j] == False and distance_array[j] > distance_array[u] + self.graph[u][j]:
distance_array[j] = distance_array[u] + self.graph[u][j]
for node in range(self.V):
print node, "\t", distance_array[node]
def main():
g = Graph(9)
g.graph = [[0, 4, 0, 0, 0, 0, 0, 8, 0],
[4, 0, 8, 0, 0, 0, 0, 11, 0],
[0, 8, 0, 7, 0, 4, 0, 0, 2],
[0, 0, 7, 0, 9, 14, 0, 0, 0],
[0, 0, 0, 9, 0, 10, 0, 0, 0],
[0, 0, 4, 14, 10, 0, 2, 0, 0],
[0, 0, 0, 0, 0, 2, 0, 1, 6],
[8, 11, 0, 0, 0, 0, 1, 0, 7],
[0, 0, 2, 0, 0, 0, 6, 7, 0]
]
g.dijkstra(0);
if __name__ == "__main__":
main()
|
14,047 | a398445a92eb0d5a97f621f7f98fbb287e15263b | string = input()
substring = 'f'
kst = len(string)
pos = string.find(substring)
newString = string[pos + 1:]
if pos == -1:
print(-2)
elif pos != -1:
pos2 = newString.find(substring)
if pos2 == -1:
print(pos2)
else:
print(pos2 + pos + 1)
|
14,048 | 1431f2d9bcc5eb312e96fc6350524ac5ef44a718 | import pygame
from plane_sprites import *
screenHeight = 700
screenWidth = 480
heroWidth = 120
heroHeight = 125
hero_rect = pygame.Rect(100, 500, heroWidth, heroHeight)
print(hero_rect.x, hero_rect.y, hero_rect.size)
pygame.init()
# 创建游戏窗口 480*700
screen = pygame.display.set_mode((screenWidth, screenHeight))
# 绘制背景图像
bg = pygame.image.load('./images/background.jpg')
screen.blit(bg, (0, 0))
# 加载我方飞机
hero = pygame.image.load('./images/me1.png')
screen.blit(hero, (200, 500))
# 敌机精灵
enemy = GameSprite('./images/enm1.png')
enemy1 = GameSprite('./images/enm1.png', 2)
# 敌机精灵组
enemy_group = pygame.sprite.Group(enemy, enemy1)
while True:
# 捕获事件
for event in pygame.event.get():
if event.type == pygame.QUIT:
print('Game Over')
pygame.quit()
exit()
# 修改飞机位置
hero_rect.y -= 1
if hero_rect.y <= -heroHeight:
hero_rect.y = screenHeight
screen.blit(bg, (0, 0))
screen.blit(hero, hero_rect)
# 精灵组方法
enemy_group.update()
enemy_group.draw(screen)
pygame.display.update()
event = pygame.event.poll()
if event.type == pygame.QUIT:
pygame.quit()
exit() |
14,049 | 4077778184da79fc85eea14472ccdf8e218538b0 | import networkx as nx
from modelo.veiculo import Veiculo
import random
#Função que calcula o tempo da trajetória que determinado veículo fez
def CalculaTempo(listaDeVisitados, veiculo, G):
tempoInicial = 0
for i in listaDeVisitados:
if i != listaDeVisitados[0]:
tempoInicial += G.node[i]["numeroDePacotes"]*veiculo.tc
tempoDescarga = 0
for v in listaDeVisitados:
if v != listaDeVisitados[0]:
tempoDescarga += G.node[v]["numeroDePacotes"]*veiculo.td
tempoLocomocao = 0
# Faz a soma dos tempos de locomoção
for u,v in G.edges():
if(u == listaDeVisitados[0] or v == listaDeVisitados[0])and(G[u][v]["caminho"]==True):
tempoLocomocao += G[u][v]["distancia"]/veiculo.vf
elif(G[u][v]==True):
tempoLocomocao += G[u][v]["distancia"]/veiculo.válida
return (tempoInicial+tempoDescarga+tempoLocomocao)
# TSP é a função avalia se o caminho gerado pela função TSPAux calcula,
# usando em consideração o tempo da trajetória do veículo, o valor em dinheiro que o veículo
# pode carregar e o volume do trajeto.
def TSP (G,veiculo):
listaDeVertices = []
for vertice in G.nodes():
listaDeVertices.append(vertice)
listaDeVisitados = [listaDeVertices[0]]
somaVolume = 0
somavalor = 0
somaTempo = 0
#temporario foi criada com o intuito de salva a ultima trajetória válida que o veiculo pode fazer
temp = nx.Graph()
tempLista = []
#while que verifica se ainda é possivel calcular o um melhor caminho:
# Se ainda todas as condições forem satisfeitas, o TSPAux calcula
#uma nova trajetória
# Caso o TSPAux retorne um caminho inválido,o G recebe o valor da ultima itercação.
motivo = ""
while len(listaDeVisitados) != len(listaDeVertices) and somaVolume <= veiculo.V and somavalor <= veiculo.P and somaTempo<=7:
temp = G.copy()
tempLista = list(listaDeVisitados)
G,listaDeVisitados = TSPaux (G,listaDeVisitados)
for i in listaDeVisitados:
somavalor += G.node[i]["valor"]
somaVolume += G.node[i]["volume"]
if (len(listaDeVisitados) > 1):
somaTempo = CalculaTempo(listaDeVisitados,veiculo,G)
print(len(listaDeVisitados))
if len(listaDeVisitados) == 1:
if somavalor > veiculo.P:
motivo = "Valor"
elif somaVolume > veiculo.V :
motivo = "Volume"
elif (somaTempo > 7):
motivo = "Tempo"
else:
motivo = "Desconhecido"
print("Motivo fornecido: ", motivo)
if somaVolume > veiculo.V or somavalor > veiculo.P or somaTempo>7:
G = temp
listaDeVisitados = tempLista
verticeInicial = listaDeVisitados[0]
#Retorna o caminho atual
for i in listaDeVisitados:
if (i != verticeInicial):
G.remove_node(i)
return G,listaDeVisitados,motivo
# Função que realiza o TSP do vizinho mais proximo
# Ela inicia o ciclo no centro de distribuição e a partir disso enquanto houver casas nao visitadas,
# a função adiciona a casa mais proxima ao ciclio até que todas as casas sejam visitadas
def TSPaux (G,listaVerticesVisitadas):
melhorDistancia = float("inf")
melhorAresta = (-1,-1)
#Acha o novo vertice que irá ser adicionado no ciclo a partir da menor distancia
for u in listaVerticesVisitadas:
for x,verticeAdjacente in G.edges(u):
if G[u][verticeAdjacente]["distancia"] < melhorDistancia and G.node[verticeAdjacente]["visitado"] == False and verticeAdjacente not in listaVerticesVisitadas :
melhorDistancia = G[u][verticeAdjacente]["distancia"]
melhorAresta = (u,verticeAdjacente)
if len(listaVerticesVisitadas) == 1:
G[melhorAresta[0]][melhorAresta[1]]["caminho"] = True
elif len(listaVerticesVisitadas) == 2:
G[melhorAresta[1]][listaVerticesVisitadas[0]]["caminho"] = True
G[melhorAresta[1]][listaVerticesVisitadas[1]]["caminho"] = True
else:
novaMelhorDistancia = float("inf")
novaMelhorAresta = (-1,-1)
for u in listaVerticesVisitadas:
for x,verticeAdjacente in G.edges(u):
if G[u][verticeAdjacente]["distancia"] < novaMelhorDistancia and melhorAresta[0] != u and verticeAdjacente == melhorAresta[1] :
novaMelhorDistancia = G[u][verticeAdjacente]["distancia"]
novaMelhorAresta = (u,verticeAdjacente)
G[melhorAresta[0]][melhorAresta[1]]["caminho"] = True
G[melhorAresta[0]][novaMelhorAresta[0]]["caminho"] = False
G[novaMelhorAresta[0]][novaMelhorAresta[1]]["caminho"] = True
listaVerticesVisitadas.append(melhorAresta[1])
return G,listaVerticesVisitadas
# Função usada para determinar os caminhos por onde um veículo irá passar,ela ordena os veículos a partir de um calculo de custo beneficio,
# a função determina o caminho passando de casa em casa onde o melhor veiculo tem preferencia para fazer a entrega,
# caso ele não suporte o valor ou o volume a rota é fechada e outro veículo é enviado para a casa, caso nenhum veículo possa realizar a entrega
# não é possível resolver e a função retorna erro
def MontaCaminhos (G,veiculos):
P = criarGrafo(G)
# Grafo temporario para que o grafo original não seja alterado
N = nx.Graph()
N = P.copy()
caminhos = []
melhoresVeiculos = []
N.node[1]["volume"] = 0
N.node[1]["valor"] = 0
N.node[1]["numeroDePacotes"] = 0
for veiculo in veiculos:
melhoresVeiculos.append(veiculo)
melhoresVeiculos.sort(reverse = True,key=lambda veiculo:veiculo.calculaCustoBeneficio())
iteracao= 0
# Enquanto o grafo tiver vertíces além do centro, é realizado o TSP para a determinação do caminho que o veículo utilizado
while N.size() != 0:
k = 0
for i in range (0,len(melhoresVeiculos)):
if (melhoresVeiculos[i].Nv > 0):
k = i
break
caminhoAtual = []
N,caminhoAtual,motivo = TSP(N,melhoresVeiculos[0])
# while utilizado para a mudança de veículo caso o atual não suporte o volume ou o valor da entrega
while len(caminhoAtual) == 1 :
k += 1
if k == 5:
# print("Onde estou?",k)
if k == 5 and melhoresVeiculos[k-1].Nv == 0:
raise Exception("Não foi possível resolver por causa do numero de carros")
elif (motivo == "Volume"):
raise Exception("Não foi possível resolver por causa do volume")
elif (motivo == "Valor"):
raise Exception("Não foi possível resolver por causa do valor")
elif (motivo == "Tempo"):
raise Exception("Não foi possível resolver por causa do tempo")
elif (motivo == "Desconhecido"):
raise Exception ("Desconhecido")
if melhoresVeiculos[k].Nv > 0:
N,caminhoAtual,motivo = TSP(N,melhoresVeiculos[k])
caminhos.append({iteracao+1:(caminhoAtual,melhoresVeiculos[k])})
iteracao += 1
veiculos[veiculos.index(melhoresVeiculos[k])].Nv -= 1
return voltaOriginal(caminhos,P,G)
#Função usada para que após feito balanceamento, o grafo seja adaptado para realização do TSP
def criarGrafo (C):
G=nx.Graph()
cont = 2
t = 0
# Cria os vertíces do grafo adaptado
for i in C.nodes():
if(i.centro == True):
G.add_node(1,volume=i.volume,valor=i.getValor(),numeroDePacotes=i.pacotes,coordX=i.getX(),coordY=i.getY(),centro=i.centro,visitado = False)
else:
G.add_node(cont,volume=i.volume,valor=i.getValor(),numeroDePacotes=i.pacotes,coordX=i.getX(),coordY=i.getY(),centro=i.centro,visitado = False)
cont += 1
# print ("numero de vertices :",len(list(G.nodes())))
# Cria as arestas do grafo adaptado, comparando se o [i][j] == [u][v] respectivamente
# onde [i][j] são os indices dos vertices e [u][v] são os clientes
for i in range(1,1+len(list(G.nodes()))):
for j in range(i+1,1+len(list(G.nodes()))):
for u,v in C.edges():
w = u.getValor()
x = u.getX()
y = u.getY()
w1 = v.getValor()
x1 = v.getX()
y1 = v.getY()
if u.volume == G.node[i]["volume"] and w == G.node[i]["valor"] and u.pacotes == G.node[i]["numeroDePacotes"] and x == G.node[i]["coordX"] and y == G.node[i]["coordY"] and u.centro == G.node[i]["centro"]:
if v.volume == G.node[j]["volume"] and w1 == G.node[j]["valor"] and v.pacotes == G.node[j]["numeroDePacotes"] and x1 == G.node[j]["coordX"] and y1 == G.node[j]["coordY"] and v.centro == G.node[j]["centro"]:
G.add_edges_from([(i,j)],distancia = C[u][v]["distancia"],caminho = False)
# print("Vértices: ", len(list(G.nodes())))
# print("Arestas: ", len(list(G.edges())))
return G
def voltaOriginal(caminho, G, grafoOriginal):
listaCaminho = []
listaTeste = []
for k in range (len(caminho)):
listaCaminho.append({k+1:([],caminho[k][k+1][1])})
for i in caminho[k][k+1][0]:
for j in grafoOriginal.nodes():
if j.volume == G.node[i]["volume"] and j.getValor() == G.node[i]["valor"] and j.pacotes == G.node[i]["numeroDePacotes"] and j.getX() == G.node[i]["coordX"] and j.getY() == G.node[i]["coordY"] and j.centro == G.node[i]["centro"]:
listaCaminho[k][k+1][0].append(j)
return listaCaminho |
14,050 | 385f494dbd2ffc62082b4f190bcfe2a246cb0702 | # Generated by Django 2.2.4 on 2020-01-28 18:51
import datetime
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('meet', '0008_auto_20200128_1850'),
]
operations = [
migrations.AlterField(
model_name='meet',
name='end_time',
field=models.TimeField(default=datetime.time(18, 51, 39, 23825)),
),
migrations.AlterField(
model_name='meet',
name='start_time',
field=models.TimeField(default=datetime.time(18, 51, 39, 23786)),
),
]
|
14,051 | 46646bbedb478f7105e8132e575cb1fc6872b5f2 | def info():
return 'Modul tentang menghitung luas segitiga'
def hitung_segitiga(alas, tinggi):
luas_segitiga = alas * tinggi / 2
return luas_segitiga
|
14,052 | 7c8d7d6d75f8339d7b48901da16de3b894b3f631 | import func
while True:
cnpj1 = input('Digite um CNPJ: ')
if func.valida(cnpj1):
print(f'O CNPJ: {cnpj1} é Válido')
else:
print(f'O CNPJ {cnpj1} é Inválido')
|
14,053 | 432207d2ad527519ea907f9f1c0d47c6cf7e73c7 | rule merge_interaction_matrices:
input:
mat = expand(
os.path.join(
hic_data_path, "hic_results", "matrix", "{sample}", "raw",
"{{bin}}", "{sample}_{{bin}}.matrix"
),
sample = samples
),
bed = expand(
os.path.join(
hic_data_path, "hic_results", "matrix", "{sample}", "raw",
"{{bin}}/{sample}_{{bin}}_abs.bed"
),
sample = samples
)
output:
mat = temp(
os.path.join(
hic_output_path, "matrix", "raw", "{bin}", "merged_{bin}.matrix"
)
),
bed = temp(
os.path.join(
hic_output_path, "matrix", "raw", "{bin}",
"merged_{bin}_abs.bed"
)
)
params:
samples = samples,
bin = "{bin}",
in_path = hic_data_path + "/hic_results/matrix/",
out_path = os.path.join(hic_output_path, "matrix", "raw")
conda: "../envs/merge_matrices.yml"
log: "logs/merge/merge_{bin}.log"
threads: 6
shell:
"""
Rscript --vanilla \
scripts/merge_matrices.R \
{params.bin} \
{params.in_path} \
{params.out_path} \
{params.samples} &> {log}
"""
rule compress_merged_output:
input:
mat = os.path.join(
hic_output_path, "matrix", "raw", "{bin}", "merged_{bin}.matrix"
),
bed = os.path.join(
hic_output_path, "matrix", "raw", "{bin}", "merged_{bin}_abs.bed"
)
output:
mat = os.path.join(
hic_output_path, "matrix", "merged_{bin}.matrix.gz"
),
bed = os.path.join(
hic_output_path, "matrix", "merged_{bin}_abs.bed.gz"
)
threads: 4
conda: "../envs/pigz.yml"
shell:
"""
pigz -p {threads} -c {input.mat} > {output.mat}
pigz -p {threads} -c {input.bed} > {output.bed}
"""
|
14,054 | 8733aa6901af6ef26d0f8941afc474ad208464d9 | # importing the calendar module
import calendar
yy = 2020 # yy stands for year
mm = 03 # mm stands for month
# To take month and year as input from the keyboard you may use
# yy = int(input("Enter year: "))
# mm = int(input("Enter month: "))
# displays the calendar
print(calendar.month(yy, mm)) |
14,055 | 6958e294c9d1e248844aa66d68ba8476a203ced1 | '''
Function:
鸟类
作者:
Charles
微信公众号:
Charles的皮卡丘
'''
import pygame
# Bird类
class Bird(pygame.sprite.Sprite):
def __init__(self, HEIGHT, WIDTH):
pygame.sprite.Sprite.__init__(self)
# 基准
self.ori_bird = pygame.image.load("./resources/images/bird.png")
# 显示用
self.rotated_bird = pygame.image.load("./resources/images/bird.png")
self.rect = self.rotated_bird.get_rect()
# 游戏界面宽高
self.HEIGHT = HEIGHT
self.WIDTH = WIDTH
# 角度
self.angle = 0
self.max_angle = 15
# 速度
self.angle_speed = 300
self.down_speed = 300
self.jump_speed = 150
# 当前跳跃的高度
self.cur_jump_height = 0
# 当前跳跃高度到达该阈值时小鸟完成一次跳跃
self.jump_height_thresh = 8
# 是否跳跃
self.is_jump = False
# 位置信息
self.x = 150
self.y = (self.HEIGHT - self.ori_bird.get_height()) / 2
self.set_bird()
# 设置小鸟的位置
def set_bird(self):
self.rotated_bird = pygame.transform.rotate(self.ori_bird, self.angle)
delta_width = (self.rotated_bird.get_rect().width - self.ori_bird.get_rect().width) / 2
delta_height = (self.rotated_bird.get_rect().width - self.ori_bird.get_rect().height) / 2
self.rect.left, self.rect.top = self.x - delta_width, self.y - delta_height
# 判断小鸟是否死亡
def is_dead(self):
if self.y >= self.HEIGHT:
return True
else:
return False
# 更新小鸟
def update(self, time_passed):
if self.is_jump:
if self.angle < self.max_angle:
self.angle = min(time_passed * self.angle_speed + self.angle, self.max_angle)
self.set_bird()
return
if self.cur_jump_height < self.jump_height_thresh:
self.cur_jump_height += time_passed * self.jump_speed
self.y = max(0, self.y - self.cur_jump_height)
self.set_bird()
return
self.cur_jump_height = 0
self.is_jump = False
if self.angle > -self.max_angle:
self.angle = max(-self.max_angle, self.angle - self.angle_speed * time_passed)
self.set_bird()
return
self.y += self.down_speed * time_passed
self.set_bird()
# 重置
def reset(self):
self.angle = 0
self.cur_jump_height = 0
self.is_jump = False
self.x = 150
self.y = (self.HEIGHT - self.ori_bird.get_height()) / 2
self.set_bird() |
14,056 | b1e1eb9f0bd983c2a2f38361022fe7a3a85700b7 | # Generated by Django 3.0.8 on 2020-07-19 21:40
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('persons', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Transaction',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('kind', models.CharField(choices=[('I', 'Receita'), ('E', 'Despesa')], max_length=1, verbose_name='tipo')),
('description', models.CharField(max_length=255, verbose_name='descrição')),
('value', models.DecimalField(decimal_places=2, max_digits=16, verbose_name='valor')),
('created_at', models.DateTimeField(auto_now_add=True, verbose_name='criado em')),
('person', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='persons.Person')),
],
),
]
|
14,057 | 3984630f6fd2c80e298c24c4dd4cc3a15ed04627 | __author__ = 'LiGe'
#encoding:Utf-8
import csv
##########################加类标,去用户-商品名,取纯特征文档#############################
def put_on_label(feature_csv,feature_txt_label):
f=open(feature_txt_label,'w')
for line in csv.reader(file(feature_csv, 'rb')):
f.write(line[2]+' '+line[3]+' '+line[4]+' '+line[5]+' '+line[6]
+' '+line[7]+line[8]
+' '+line[9]
+' '+line[10]
+' '+line[11]
+' '+line[12]
+' '+line[13]
+' '+line[14]
+' '+line[15]
+' '+line[16]
+' '+line[17]
+' '+line[18]
+' '+line[19]
+' '+line[20]
+' '+line[21]
+' '+line[22]
+' '+line[23]
+' '+line[24]
+' '+line[25]
+' '+line[26]
+' '+line[27]
+' '+line[28]
+' '+line[29]
+' '+line[30]
+' '+line[31]
+' '+line[32]
+' '+line[33]
+' '+line[34]
+' '+line[35]
+' '+line[36]
+' '+line[37]
+' '+line[38]
+' '+line[39]
+' '+line[40]
+'\n')
if __name__=="__main__":
feature_csv='global_test_data_feature.csv'
feature_txt_lable='test_data_9feature.txt'
put_on_label(feature_csv,feature_txt_lable)
|
14,058 | 55b988ebfd86cda76a9a95715a99b2328b0513fd | # Generated by Django 2.1.2 on 2021-03-17 18:47
import datetime
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('dashboard', '0003_position_assetid'),
]
operations = [
migrations.AddField(
model_name='asset',
name='latitude',
field=models.DecimalField(decimal_places=16, default=27.218848985295054, max_digits=19),
),
migrations.AddField(
model_name='asset',
name='longitude',
field=models.DecimalField(decimal_places=16, default=77.50081190185514, max_digits=19),
),
migrations.AddField(
model_name='asset',
name='time',
field=models.DateTimeField(default=datetime.datetime(2021, 3, 18, 0, 17, 33, 648187)),
),
]
|
14,059 | 19abef0c900052bd03ad434196fecda7595a7a44 | import ssl
from socket import timeout, error
def recv_all(conn, bite):
text = b""
for i in range(bite):
rec = conn.recv(1)
if not rec:
raise error
text += rec
if len(text) >= 2 and text[-2:] == b"\r\n":
break
return text
def input_user(ask_for="", code=True):
inp = ""
while not inp:
inp = input(ask_for)
return inp.encode("utf-8") if code else inp
def connect(conn, server_name=None, ca_cert="", cert="", key="", ask=False):
context = ssl.SSLContext(ssl.PROTOCOL_TLSv1_2)
ver = ""
if ask:
ver = input_user("Verify = 'y', not verify = something else: ", False)
ver = "" if ver == "y" else ver
if ca_cert and not ver:
print("\033[3;36mZ żądaniem!", sep="")
context.verify_mode = ssl.CERT_REQUIRED
context.load_verify_locations(ca_cert)
if key and cert:
context.load_cert_chain(certfile=cert, keyfile=key)
else:
print("\033[3;35mBez żądania!", sep="")
conn = context.wrap_socket(conn) if not ssl.HAS_SNI or not server_name \
else context.wrap_socket(conn, server_hostname=server_name)
if ca_cert and not ver:
ca_cert = conn.getpeercert()
print(ca_cert)
if not ca_cert or ssl.match_hostname(ca_cert, server_name):
raise Exception("Cert_Error")
return conn
def reach_headers(sock, fnd=b"", print_headers=True):
while True:
mes = recv_all(sock, 1024)
if fnd != b"":
point = mes.find(fnd)
if point != -1:
fnd = mes[point + len(fnd): mes.find(b"\r\n")]
if print_headers:
print(mes)
if mes == b"\r\n":
return fnd
def recv_body(sock, length):
mes = b""
try:
for i in range(length):
mes += sock.recv(1)
return mes
except timeout:
return mes
# server
def rec_messages(connection):
addr = ""
try:
addr = connection.getpeername()
while True:
mes = recv_all(connection, 4096)
print(addr, mes, sep=": ")
connection.sendall(mes)
except error:
print("Disconnected : {}".format(addr[0] + ":" + str(addr[1])))
exit(0)
|
14,060 | 0db466801eadc2eae40156ee96cc2faa7c47faa2 | from bs4 import BeautifulSoup
import requests
from crawlers.Crawler import Crawler
class GeniusCrawler(Crawler):
def __init__(self):
super().__init__('Genius Lyric')
def search_for_lyrics(self, artist, song):
try:
_artist = str(artist).strip().replace(' ', '-').replace("'", '')
_name_song = song.strip().replace(' ', '-').replace("'", '')
song_url = '{}-{}-lyrics'.format(_artist, _name_song)
request = requests.get("https://genius.com/{}".format(song_url))
html_code = BeautifulSoup(request.text, features="html.parser")
lyric = html_code.find("div", {"class": "lyrics"}).get_text()
return self.format_lyrics(lyric)
except Exception as e:
self.raise_not_found()
def format_lyrics(self, lyric):
lines = map(
lambda line: line if ']' not in line and '[' not in line else None,
lyric.split('\n')
)
lines = filter(
lambda line: line is not None,
list(lines)
)
return list(lines) |
14,061 | 27a00bd6943bb3c29da3c889f28939dcecd521df | from __future__ import print_function
def main():
inputVariableNumber = '4lay'
inputVariabelAlpha = 'Alay'
inputVariableSpace = ' AlayAnjay'
hasilNumber = inputVariableNumber.isalnum()
hasilAlpha = inputVariabelAlpha.isalpha()
hasilSpace = inputVariableSpace.isspace()
print('Variabel Number : %s'%inputVariableNumber)
print('Variabel Alpha : %s'%inputVariabelAlpha)
print('Variabel Space : %s'%inputVariableSpace)
print('Hasil Number : %s '%hasilNumber)
print('Hasil Alpha : %s '%hasilAlpha)
print('Hasil Space : %s '%hasilSpace)
if __name__ == "__main__":
main() |
14,062 | e062c71ed64cac5f48db074be7ba06eaf7c91baa | def more_n(sp, n): # объявление функции
for i in sp: # для каждого числа
if i>n: # если число больше n
print(i, end=' ') # то вывести число
more_n((5, 4, 4, 9), 4) |
14,063 | cd7ba97177825fd93d87732d8502fc8795efcda5 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Time : 2017/11/17 9:58
# @Author : zhujinghui
# @File : xueqiu_headers_middlewares.py
# @Software: PyCharm
import random
from ...utils.header import agents
class UserAgentMiddleware(object):
""" CHANGE USER-AGENT """
def process_request(self,request,spider):
agent = random.choice(agents)
request.headers["User-Agent"] = agent |
14,064 | b0c405359a3ff5145e359e13aeaa05ccf1f48592 | ## 1. Data Structures ##
import pandas as pd
#Read the csv file to create a dataframe
fandango = pd.read_csv("fandango_score_comparison.csv")
#use the .head() method to print the first two rows.
print(fandango.head(2))
## 2. Integer Indexes ##
fandango = pd.read_csv('fandango_score_comparison.csv')
#Select the FILM column, assign it to the variable series_film, and print the first five values.
series_film = fandango["FILM"]
print(series_film[0:5])
#select the RottenTomatoes column, assign it to the variable series_rt, and print the first five values.
series_rt = fandango["RottenTomatoes"]
print(series_rt[0:5])
## 3. Custom Indexes ##
# Import the Series object from pandas
from pandas import Series
film_names = series_film.values
rt_scores = series_rt.values
#Create a new Series object named series_custom that has a string index (based on the values from film_names), and contains all of the Rotten Tomatoes scores from series_rt
series_custom = Series(rt_scores, index=film_names)
series_custom[['Minions (2015)', 'Leviathan (2014)']]
## 4. Integer Index Preservation ##
series_custom = Series(rt_scores , index=film_names)
series_custom[['Minions (2015)', 'Leviathan (2014)']]
#Assign the values in series_custom at indexes 5 through 10 to the variable fiveten
fiveten = series_custom[5:11]
print(fiveten)
## 5. Reindexing ##
original_index = series_custom.index
x = sorted(original_index)
sorted_by_index = series_custom.reindex(x)
## 6. Sorting ##
#Sort series_custom by index using sort_index(), and assign the result to the variable sc2.
sc2 = series_custom.sort_index()
sc3 = series_custom.sort_values()
print(sc2[0:11])
print(sc3[0:11])
## 7. Transforming Columns With Vectorized Operations ##
#Normalize series_custom (which is currently on a 0 to 100-point scale) to a 0 to 5-point scale by dividing each value by 20.
series_normalized = series_custom/20
## 8. Comparing and Filtering ##
criteria_one = series_custom > 50
criteria_two = series_custom < 75
both_criteria = series_custom[criteria_one & criteria_two]
## 9. Alignment ##
rt_critics = Series(fandango['RottenTomatoes'].values, index=fandango['FILM'])
rt_users = Series(fandango['RottenTomatoes_User'].values, index=fandango['FILM'])
rt_mean = (rt_critics + rt_users)/2
print(rt_mean) |
14,065 | 3b5b0c3bb83dd937393cd85e2522fc7b40c4c448 | # -*- coding: utf-8 -*-
import os
import time
import shutil
from threading import Thread
from flask import render_template, Response, request
from app import app
from app.chat import itchat_run
from app.utils import generator_string_id
from app.models import GroupMsg
from app.models import User
from app.models import Group
from app.models import Robot
from config import basedir
@app.route('/')
def hello_world():
user_id = generator_string_id(16, 2)
thr = Thread(target=itchat_run, args=[user_id])
thr.start()
qr_img_file_path = os.path.join(basedir, 'app', 'static', 'qr', user_id+'.png')
while not os.path.exists(qr_img_file_path):
time.sleep(0.1)
return render_template('login/index.html', filename='qr/{}.png'.format(user_id))
@app.route('/query/', methods=['GET'])
def data_query():
group = request.args.get('group', '')
nick_name = request.args.get('nick_name', '')
actual_name = request.args.get('actual_name', '')
type = request.args.get('type', '')
start_time = request.args.get('start_time', '')
end_time = request.args.get('end_time', '')
offset = request.args.get('offset', 0, int)
limit = request.args.get('limit', 20, int)
query = GroupMsg.query
if group:
query = query.filter(GroupMsg.group==group)
if nick_name:
query = query.filter(GroupMsg.user_nick_name==nick_name)
if actual_name:
query = query.filter(GroupMsg.user_actual_name==actual_name)
if type and type != -1:
query = query.filter(GroupMsg.type==type)
count = query.count()
msgs = query.order_by(GroupMsg.create_time.desc()).offset(offset).limit(limit).all()
return render_template('login/query.html', msgs=msgs[::-1], offset=offset,
limit=limit, count=count, int=int, abs=abs, type=type,
group=group, nick_name=nick_name, actual_name=actual_name)
@app.route('/users/', methods=['GET'])
def data_users():
group = request.args.get('group', '')
nick_name = request.args.get('nick_name', '')
remark_name = request.args.get('remark_name', '')
display_name = request.args.get('display_name', '')
sex = request.args.get('sex', '')
province = request.args.get('province', '')
city = request.args.get('city', '')
offset = request.args.get('offset', 0, int)
limit = request.args.get('limit', 20, int)
query = User.query
if group:
query = query.filter_by(group=group)
if nick_name:
query = query.filter_by(nick_name=nick_name)
if remark_name:
query = query.filter_by(remark_name=remark_name)
if display_name:
query = query.filter_by(display_name=display_name)
if sex:
query = query.filter_by(sex=sex)
if province:
query = query.filter_by(province=province)
if city:
query = query.filter_by(city=city)
count = query.count()
users = query.order_by(User.id.desc()).offset(offset).limit(limit).all()
return render_template('login/users.html', users=users, group=group,
nick_name=nick_name, remark_name=remark_name,
display_name=display_name, sex=sex, province=province,
city=city, int=int, abs=abs, offset=offset, limit=limit, count=count)
@app.route('/groups/', methods=['GET'])
def data_groups():
group = request.args.get('group', '')
hot = request.args.get('hot', '7')
offset = request.args.get('offset', 0, int)
limit = request.args.get('limit', 20, int)
query = Group.query
if group:
query = query.filter_by(group_name=group)
count = query.count()
groups = query.order_by(Group.id.desc()).offset(offset).limit(limit).all()
return render_template('login/groups.html', groups=groups, int=int, abs=abs,
offset=offset, limit=limit, count=count)
@app.route('/robots/', methods=['GET'])
def data_robots():
status = request.args.get('status', '')
offset = request.args.get('offset', 0, int)
limit = request.args.get('limit', 20, int)
query = Robot.query
if status:
query = query.filter_by(status=status)
count = query.count()
robots = query.order_by(Robot.status.desc()).offset(offset).limit(limit).all()
return render_template('login/robots.html', robots=robots, int=int, abs=abs,
offset=offset, limit=limit, count=count)
# @app.route('/img/{img_id}')
# def img(img_id):
# print(img_id)
# img = file(os.path.join(basedir, img_id))
# resp = Response(img, mimetype='image/png')
# return resp |
14,066 | c9e44baad537f447a9409de948ece2aeffba71d5 | #!/usr/local/bin/python3
import os, sys, time
import psutil
from multiprocessing import Manager
from multiprocessing import Process
def task(pids):
pid = os.getpid()
pids.append(pid)
print('run task(pid): ', pid)
time.sleep(1000)
class Worker(object):
def __init__(self):
self.pids = Manager().list()
self._monitor_process = Process(target=self.monitor_zombie, args=(self.pids,))
self._monitor_process.start()
def start(self):
pids = self.pids
t = Process(target=task, args=(pids,))
t.start()
def monitor_zombie(self, pids):
while True:
print("start monitor pids:")
for pid in pids:
print("monitor pids:", pid)
time.sleep(5)
if __name__ == '__main__':
wo = Worker()
for i in range(10):
time.sleep(2)
print("start task:", i)
wo.start()
time.sleep(1000)
|
14,067 | 95dd4ef3d705fb8bd2fb5d403efd67b5e6629843 | import tensorflow as tf
with tf.GradientTape() as tape:
w=tf.Variable(tf.constant(3.0))
loss=tf.pow(w,2)
grad=tape.gradient(loss,w)
print(grad) |
14,068 | 4c4f9ebc1970934d249a01d9e6bb9e3194d352b5 | class Node(object):
def __init__(self, value):
self.value = value
self.next = None
class Cycle_Linked_list(object):
def __init__(self):
self.head == None:
self.tail == None:
def insert_element(self, value):
if self.head == None:
self.head == Node(value)
self.head.next = self.head
elif value <= self.head.value:
#if max or min, add to the head of the queue
else:
current = self.head
prev = None:
while value >= current.value:
prev = current
current = current.next
prev.next = Node(value)
prev.next.next = current
|
14,069 | 65995791af2258500a940070b469ff7cc5935170 | import datetime
from werkzeug.security import generate_password_hash, check_password_hash
commentslist = []
def save_comment(data):
"""Add comment to list"""
data['comment_id'] = len(commentslist) + 1
data['message'] ="message"
data['author'] = "author"
data['date_created'] = datetime.datetime.now()
# save to list
commentslist.append(data)
def all_user_comments(username):
"""Method to get all user comments based on their usename"""
return commentslist
def get_comment_by_id(username, comment_id):
"""Method to update a previous comment"""
# call the all comments method
dicts = all_user_comments(username)
result = next(
(item for item in dicts if item["comment_id"] == comment_id), False)
return result
def modify_user_comment(username, comment_id, comment):
"""Method that modifies a comment"""
result = get_comment_by_id(username, comment_id)
result["comment"] = comment
result["date_updated"] = datetime.datetime.now()
def delete_user_comment(username, comment_id):
"""Method that deletes a user comment by id"""
result = get_comment_by_id(username, comment_id)
# remove from list
commentslist.remove(result)
"""
Data storage in Data Structures
"""
# Store the users
USERS_LIST = []
# Store the comments
REPLIES_LIST = []
# Blacklist Tokens
BLACKLIST = set()
# Helper methods
def save_user(data):
"""Add user"""
data["type"] = "User"
data["password"] = generate_password_hash(data["password"])
# save to list
USERS_LIST.append(data)
def save_comment(data):
"""Add comment to list"""
data['comment_id'] = len(commentslist) + 1
data['date_created'] = datetime.datetime.now()
# save to list
commentslist.append(data)
def all_user_comments(username):
"""Method to get all user comments based on their usename"""
# comment = [
# comment for comment in commentslist if comment["username"] == username
# ]
return commentslist
def get_comment_by_id(username, comment_id):
"""Method to update a previous comment"""
# call the all comments method
dicts = all_user_comments(username)
result = next(
(item for item in dicts if item["comment_id"] == comment_id), False)
return result
def modify_user_comment(username, comment_id, comment):
"""Method that modifies a comment"""
result = get_comment_by_id(username, comment_id)
result["comment"] = comment
result["date_updated"] = datetime.datetime.now()
def delete_user_comment(username, comment_id):
"""Method that deletes a user comment by id"""
result = get_comment_by_id(username, comment_id)
# remove from list
commentslist.remove(result)
def check_username(search_username):
"""Check if username exists in USERS_LIST"""
for find_username in USERS_LIST:
if find_username["username"] == search_username:
return True
return False
def check_username_for_login(search_username):
"""Return user username"""
for find_username in USERS_LIST:
if find_username["username"] == search_username:
return find_username
def login(data):
"""Login method"""
# Get user dictionary, assign it to variable
logging_user_details = check_username_for_login(data["username"])
if check_password_hash(logging_user_details["password"], data["password"]):
# compare password input to saved password
return True
return False
|
14,070 | 97abcc8ed3fc9b3de987ce53d48c4f43d1201c4c | #from django.db.models import Q
from django.shortcuts import render
from products.models import Product
# Create your views here.
def search_product_view(request):
method_dict = request.GET
query = method_dict.get('q','Loren')
#lookups = Q(title__icontains=query) | Q(description__icontains=query)
queryset = Product.objects.search(query)
context = {
'object_list':queryset
}
template = 'search/view.html'
return render(request,template,context) |
14,071 | b886de3a408b60cc271e4cf94a6982578c15fedb | import copy
import getpass
import subprocess
from db.connectors import connection_factory
from settings import DB_CREDENTIALS, DB_TYPE
admin_credential = copy.deepcopy(DB_CREDENTIALS)
del admin_credential['db']
cnx, connector = None, None
# Create connection to mysql with admin credentials
while not cnx:
username = input('Input DB administrator username: ')
password = getpass.getpass()
admin_credential['user'] = username
admin_credential['passwd'] = password
connector = connection_factory(DB_TYPE, **admin_credential)
cnx = connector.connection
# SQL block for DB, user, grant privileges creation
sql_create_db = "CREATE DATABASE IF NOT EXISTS {}; ".format(DB_CREDENTIALS['db'])
sql_create_user = "CREATE USER IF NOT EXISTS {}@{} IDENTIFIED BY '{}'; ".format(DB_CREDENTIALS['user'],
DB_CREDENTIALS['host'],
DB_CREDENTIALS['passwd'])
sql_grant_perm = "GRANT ALL PRIVILEGES ON {}.* TO {}@{};".format(DB_CREDENTIALS['db'], DB_CREDENTIALS['user'],
DB_CREDENTIALS['host'])
for sql in (sql_create_db, sql_create_user, sql_grant_perm):
connector.execute_sql(sql, change=False)
connector.close()
# Loading DB skeleton
args = [DB_TYPE, '-u{}'.format( admin_credential['user']), '-p{}'.format(admin_credential['passwd']),
DB_CREDENTIALS['db'], ]
with open('db/attendance.sql') as input_file:
proc = subprocess.Popen(args, stdin=input_file, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
proc.communicate()
if proc.returncode == 0:
print('DB {} was created successfully'.format(DB_CREDENTIALS['db']))
|
14,072 | 50e003ca1cf355e15ad6f30ba23831b4f72a7438 | from .pairwise_dist_torch import pairwise_dist_torch
def cdist_kNN_graph_dict_generator_torch(transformed_dict_torch):
cdist_kNN_graph = {}
for key, value in transformed_dict_torch.items():
each_group_data = value
each_group_cdist_torch = pairwise_dist_torch(each_group_data, each_group_data)
# print("each_group_cdist_torch: ", each_group_cdist_torch.shape)
cdist_kNN_graph[key] = each_group_cdist_torch
return cdist_kNN_graph
# if __name__ == "__main__":
# from hotell2_nn.main.parameter_setting_expr_4 import parameter_setting
# from hotell2_nn.utils.unc_data_loader import unc_data_loader
# import torch
#
# experiment_index = 2202
# cuda_index = 0
# repeat_number = 1
# learning_rate = 0.001
# output_dimension = 6
# interval = 5
# epoch_number = 20000
# l1_reg = 0.001
#
# args = parameter_setting(cuda_index, repeat_number, learning_rate, output_dimension, interval, epoch_number, l1_reg,
# experiment_index)
#
# group_data_ret = unc_data_loader(args)
# print("group_data_ret.shape: ", group_data_ret)
#
# group_data_torch = {}
#
# for key, value in group_data_ret.items():
# value_torch = torch.from_numpy(value)
# group_data_torch[key] = value_torch
#
# cdist_kNN_graph_dict_generator_torch(group_data_torch)
|
14,073 | 9aaa6cea83aa724280636b3cd20cf06f5d0908a3 | #!/bin/python3
# Arithmetic Operators
# https://www.hackerrank.com/challenges/python-arithmetic-operators/problem
def solve(a, b):
return (a + b, a - b, a * b)
if __name__ == '__main__':
a = int(input())
b = int(input())
print("\n".join(map(str,solve(a, b)))) |
14,074 | d495a7b732091aee77b37c004df797541fc4ea43 | import requests
import sys
url = 'http://localhost:9090/ords/hr/soda/latest/' + sys.argv[1]
response = requests.delete(url)
if (response.status_code == 200):
print("Collection droped")
else:
print("Erro")
|
14,075 | a8c68ea4caae1f05bfcf41d3cbe39efc7dc5e71e | from django.urls import path
from . import views
urlpatterns = [
path('',views.store, name="store"),
path('checkout/',views.checkout, name="checkout"),
path('cart/',views.cart, name="cart"),
path('update_item/',views.UpdateItem, name="update_item"),
path('process_order/',views.ProcessOrder, name="process_order"),
path('user_logout/',views.userLogout, name = "user_logout"),
path('login_register/', views.loginOrRegister,name="login_register"),
]
|
14,076 | 8115890f9346cbf38a9d8f7846fe9b31b5ed7809 | from django.shortcuts import render
from django.http import HttpResponseRedirect
from django.urls import reverse
# Create your views here.
from resume.models import Comm_div, Comm_code
from django.views import generic
from books.models import Author, Book, Log, Author_book, Best_book
from .forms import AuthorForm, BookForm
from django.shortcuts import get_object_or_404
import os
import datetime
import json
from urllib.request import urlopen
import googlemaps
from django.db.models import Count
import urllib.parse
import ssl
context = ssl._create_unverified_context()
# 함수 로그인 권한 제어
from django.contrib.auth.decorators import login_required
# 클래스 로그인 권한 제어
from django.contrib.auth.mixins import LoginRequiredMixin
# 권한 CHECK
from django.contrib.auth.decorators import permission_required
from django.contrib.auth.mixins import PermissionRequiredMixin
#ip로 주소 찾기
def get_location(ip):
request = "https://geolocation-db.com/json/%s" % (ip)
with urlopen(request, context=context) as url:
data = json.loads(url.read().decode())
return data["latitude"], data["longitude"], data["city"], data["state"]
# 페이지 접속 log기록
def write_log(client_ip,request,log_gb,user):
now = datetime.datetime.now()
nowDatetime = now.strftime('%Y-%m-%d %H:%M:%S')
addr_info = get_location(client_ip)
# 구글서비스를 이용하여 위도/경도로 주소찾기
addr = ''
# if addr_info[0] != 'Not found':
# gmaps = googlemaps.Client(key='AIzaSyCPbwbrbnKTrKAq_2qJ9qfhzmf0FL5P3J0')
# reverse_geocode_result = gmaps.reverse_geocode((addr_info[0], addr_info[1]), language='ko')
# addr = reverse_geocode_result[0].get('formatted_address')
log_context = {'log_dt': nowDatetime, 'ip': client_ip, 'log_gb': log_gb, 'request': request, 'user': user, 'lat': addr_info[0], 'long': addr_info[1], 'state': addr_info[3], 'city': addr_info[2], 'addr': addr}
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
if BASE_DIR.find('home'): # linux이면
file_path = BASE_DIR + '/log.txt'
else: # window이면
file_path = BASE_DIR + '\log.txt'
#파일에 저장
f = open(file_path, 'a', encoding='utf8')
f.write(log_context.__str__() + '\n')
f.close()
#db에 저장
log = Log(client_ip=client_ip, log_gb=log_gb, request_info=request, create_dt=nowDatetime, user=user, lat=addr_info[0], long=addr_info[1], state=addr_info[3], city=addr_info[2], addr=addr)
log.save()
# print(log_context)
# 작가 list
class AuthorList(generic.ListView):
model = Author
paginate_by = 10
#검색 결과 (초기값)
def get_queryset(self):
filter_val_1 = self.request.GET.get('filter_1', '') #filter_1 검색조건 변수명, '' 초기 검색조건값 <- like 검색결과 all 검색을 위해서 ''로 처리함.
filter_val_2 = self.request.GET.get('filter_2', '')
order = self.request.GET.get('orderby', 'author_name') #정렬대상 컬럼명(초기값)
#log 기록
write_log(self.request.META['REMOTE_ADDR'], self.request, '작가조회',self.request.user)
new_context = Author.objects.filter(
author_name__icontains=filter_val_1,
# nation_cd__comm_code_name__icontains=filter_val_2,
).order_by(order)
return new_context
#검색 조건 (초기값)
def get_context_data(self, **kwargs):
context = super(AuthorList, self).get_context_data(**kwargs)
context['filter_1'] = self.request.GET.get('filter_1', '')
context['filter_2'] = self.request.GET.get('filter_2', '')
context['orderby'] = self.request.GET.get('orderby', 'author_name') #정렬대상 컬럼명(초기값)
return context
def AuthorUpdate(request, pk):
# log 기록
write_log(request.META['REMOTE_ADDR'], request, '작가수정', request.user)
if pk:
#master model instance 생성
author = Author.objects.get(author_id=pk)
author_book = Author_book.objects.filter(author_id=pk)
else:
author = Author()
author_book = Author_book()
# master form instance 생성 : 마스터 form 객체는 forms.py에 존재함. : 최초 user화면에 보여주는 수정대상 instance
authorform = AuthorForm(instance=author)
if request.method == "POST": #user의 수정화면을 통한 instance 수정요청이면 데이터 처리.
# master form instance 생성 : Post요청 data로 생성
authorform = AuthorForm(request.POST)
if pk:
# master form instance 생성 : Post요청 data와 pk에 해당하는 마스터 모델 instance연계
authorform = AuthorForm(request.POST, instance=author)
if authorform.is_valid():
authorform.save()
return HttpResponseRedirect(reverse('author_list'))
# template의 html에 Form과 data instance를 딕셔너리 자료형태를 생성한다.
context = {
'authorform': authorform,
'author': author,
'author_book': author_book,
}
# template를 호출한다. context도 같이 넘긴다.
return render(request, 'books/author_update.html', context)
def AuthorCreate(request):
# log 기록
write_log(request.META['REMOTE_ADDR'], request, '작가생성', request.user)
# author model 빈 instance 생성
author = Author()
# author form 빈 instance 생성 : 마스터 form 객체는 forms.py에 존재함. : 최초 user화면에 보여주는 빈 instance
authorform =AuthorForm(instance=author)
if request.method == "POST": #user의 수정화면을 통한 instance 수정요청이면 데이터 처리.
# master form instance 생성 : Post요청 data로 생성
authorform = AuthorForm(request.POST)
if authorform.is_valid():
authorform.save()
return HttpResponseRedirect(reverse('author_list'))
else:
print("author valid error발생")
# template의 html에 Form과 data instance를 딕셔너리 자료형태를 생성한다.
context = {
'authorform': authorform,
'author': author,
}
# template를 호출한다. context도 같이 넘긴다.
return render(request, 'books/author_update.html', context)
def AuthorDelete(request, pk):
# log 기록
write_log(request.META['REMOTE_ADDR'], request, '작가삭제', request.user)
# 파라미터pk로 받은 data가 존재한다면 가져온다
author = get_object_or_404(Author, pk=pk)
author.delete()
return HttpResponseRedirect(reverse('author_list'))
# Book list
class BookList(generic.ListView):
model = Book
paginate_by = 10
#검색 결과 (초기값)
def get_queryset(self):
#log 기록
write_log(self.request.META['REMOTE_ADDR'], self.request, '추천도서조회',self.request.user)
filter_val_1 = self.request.GET.get('filter_1', '') #filter_1 검색조건 변수명, '' 초기 검색조건값 <- like 검색결과 all 검색을 위해서 ''로 처리함.
filter_val_2 = self.request.GET.get('filter_2', '')
filter_val_3 = self.request.GET.get('filter_3', '')
order = self.request.GET.get('orderby', 'author_name') #정렬대상 컬럼명(초기값)
new_context = Book.objects.filter(
author_name__icontains=filter_val_1,
input_name__icontains=filter_val_2,
genre_cd__comm_code_name__icontains=filter_val_3,
).order_by(order)
return new_context
#검색 조건 (초기값)
def get_context_data(self, **kwargs):
context = super(BookList, self).get_context_data(**kwargs)
context['filter_1'] = self.request.GET.get('filter_1', '')
context['filter_2'] = self.request.GET.get('filter_2', '')
context['filter_3'] = self.request.GET.get('filter_3', '')
context['orderby'] = self.request.GET.get('orderby', 'author_name') #정렬대상 컬럼명(초기값)
return context
def BookCreate(request):
# log 기록
write_log(request.META['REMOTE_ADDR'], request, '추천도서 생성', request.user)
# book model 빈 instance 생성
book = Book()
# author form 빈 instance 생성 : 마스터 form 객체는 forms.py에 존재함. : 최초 user화면에 보여주는 빈 instance
bookform =BookForm(instance=book)
if request.method == "POST": #user의 수정화면을 통한 instance 수정요청이면 데이터 처리.
# master form instance 생성 : Post요청 data로 생성
bookform = BookForm(request.POST)
if bookform.is_valid():
bookform.save()
return HttpResponseRedirect(reverse('book_list'))
else:
print("author valid error발생")
# template의 html에 Form과 data instance를 딕셔너리 자료형태를 생성한다.
context = {
'bookform': bookform,
'book': book,
}
# template를 호출한다. context도 같이 넘긴다.
return render(request, 'books/book_update.html', context)
def BookUpdate(request, pk):
# log 기록
write_log(request.META['REMOTE_ADDR'], request, '추천도서 수정', request.user)
if pk:
#master model instance 생성
book = Book.objects.get(book_id=pk)
else:
book = Book()
# master form instance 생성 : 마스터 form 객체는 forms.py에 존재함. : 최초 user화면에 보여주는 수정대상 instance
bookform = BookForm(instance=book)
if request.method == "POST": #user의 수정화면을 통한 instance 수정요청이면 데이터 처리.
# master form instance 생성 : Post요청 data로 생성
bookform = BookForm(request.POST)
if pk:
# master form instance 생성 : Post요청 data와 pk에 해당하는 마스터 모델 instance연계
bookform = BookForm(request.POST, instance=book)
if bookform.is_valid():
bookform.save()
return HttpResponseRedirect(reverse('book_list'))
# template의 html에 Form과 data instance를 딕셔너리 자료형태를 생성한다.
search = urllib.parse.quote_plus(book.book_name + ' ' + book.author_name)
book_link_kyobo = 'https://search.kyobobook.co.kr/mobile/search?keyword=' + search
book_link_aladin = 'https://www.aladin.co.kr/m/msearch.aspx?SearchTarget=all&SearchWord=' + search
context = {
'bookform': bookform,
'book': book,
'book_link_kyobo': book_link_kyobo,
'book_link_aladin': book_link_aladin,
}
# template를 호출한다. context도 같이 넘긴다.
return render(request, 'books/book_update.html', context)
def BookDelete(request, pk):
# log 기록
write_log(request.META['REMOTE_ADDR'], request, '추천도서 삭제', request.user)
# 파라미터pk로 받은 data가 존재한다면 가져온다
book = get_object_or_404(Book, pk=pk)
book.delete()
return HttpResponseRedirect(reverse('book_list'))
# best book list
class Best_bookList(generic.ListView):
model = Best_book
|
14,077 | b8e57e160190c95a87618570fbe3bbfdded33353 | #This is where the source code is written.
import datetime
from BeautifulSoup import BeautifulSoup
from urllib2 import Request, urlopen, URLError, HTTPError
from sqlalchemy import create_engine
from sqlalchemy.orm import Session
import swp_database
def initialise_database():
engine = create_engine('sqlite:///swp_flares.db')
swp_database.Base.metadata.create_all(engine)
session = Session(bind=engine)
#engine = create_engine('sqlite:///swp_flares.db')
#Base = declarative_base(engine)
#session = loadSession()
#def loadSession():
# metadata = Base.metadata
# Session = sessionmaker(bind=engine)
# session = Session()
# return session
return session
#requesting Solarsoft data.
def get_solarsoft_data():
try:
response = urlopen('http://www.lmsal.com/solarsoft/last_events/')
print 'everything is fine'
except HTTPError as e:
print "The server couldn't fulfill the request."
print 'Error code: ', e.code
return None
except URLError as e:
print 'We failed to reach a server.'
print 'Reason: ', e.reason
return None
html_content = response.read()
return read_solarsoft_data(html_content)
#Extracting and parsing of SolarSoft data
def read_solarsoft_data(html_content):
soup = BeautifulSoup(html_content)
table = soup.findAll('table')[2]
resultset = []
for row in table.findAll('tr'):
col = row.findAll('td')
if len(col) < 7:
continue
Event = col[0].string
Datetime = col[2].string
Peak = col[4].string
GOES_class = col[5].string
if col[6].find('a') is not None:
Derived_position = col[6].find('a').string
Region = col[6].find('font').contents[1]
else: #if there is no link, we assume it's an unnamed region & just derived position is available.
Derived_position = col[6].string
Region = ""
newR = Region.replace("(", "").replace(")", "").strip() #get the number from inside the brackets!
result = (Event, Datetime, Peak, GOES_class, Derived_position, newR)
resultset.append(result)
return resultset
def insert_solarsoft_data(ss_result_set, session):
#ss_result_set comes as a list of tuples, in the form (ut_datetime, peak, goes_class, derived_position, region)
solarsoft_object_list = []
for row in ss_result_set:
solarsoft_entry = swp_database.Solarsoft(ut_datetime = row[0], peak = row[1], goes_class = row[2], derived_position = row[3], Region = row[4])
solarsoft_object_list.append(solarsoft_entry)
session.add_all(solarsoft_object_list)
session.commit()
def query_ss(session):
res = session.query(swp_database.Solarsoft).all()
#print res
for row in res:
print row.ut_datetime, row.peak, row.goes_class, row.derived_position, row.Region
#Extrating X-ray flux data
def generate_filename(date, cadence=1):
#this file is only updated every hour or so, we may wish to pull from Gp_xr_1m.txt
if date is None:
date = datetime.datetime.utcnow()
filename = "{date}_Gp_xr_{cadence}m.txt".format(date=date.strftime("%Y%m%d"), cadence=cadence)
return filename
def get_xrayflux_data(date=None):
filename = generate_filename(date)
#this file is only updated every hour or so, we may wish to pull from Gp_xr_1m.txt
try:
response = urlopen('http://www.swpc.noaa.gov/ftpdir/lists/xray/'+filename)
print 'everything is fine'
except HTTPError as e:
print "The server couldn't fulfill the request."
print 'Error code: ', e.code
except URLError as e:
print 'We failed to reach a server.'
print 'Reason: ', e.reason
html_content = response.read()
return read_xrayflux_data(html_content)
def read_xrayflux_data(html_content):
resultset = []
for line in html_content.splitlines():
#if line[0] !='#' and line[0] !=':':
if line[0] not in ['#',':']:
#print line.split()
yyyy, mm, dd, hhmm, jd, ss, shortx, longx = line.split()
date=datetime.datetime.strptime(yyyy+mm+dd+hhmm, "%Y%m%d%H%M")
result = (date, float(longx), float(shortx))
resultset.append(result)
return resultset
def insert_xrayflux_data(xr_result_set, session):
#xr_result_set comes as a list of tuples, in the form (date, long, short)
xrayflux_object_list = []
for row in xr_result_set:
xray_entry = swp_database.Xrayflux(ut_datetime = row[0], short = row[2], longx = row[1])
#print xray_entry
xrayflux_object_list.append(xray_entry)
session.add_all(xrayflux_object_list )
session.commit()
def query_xr(session):
res = session.query(swp_database.Xrayflux).all()
#print res
for row in res:
print row.ut_datetime, row.short, row.longx
def main():
#setup database for the session
session = initialise_database()
#get solarsoft data
ss_result_set = get_solarsoft_data()
#print ss_result_set
#insert into db
insert_solarsoft_data(ss_result_set, session)
#get xrayflux data,
xr_result_set = get_xrayflux_data()
#print xr_result_set
#insert into db
insert_xrayflux_data(xr_result_set, session)
#query db for solarsoft & xray data
query_ss(session)
query_xr(session)
#plot graph and save to file
if __name__ == "__main__":
main()
|
14,078 | e139c5e968ad75d7434920fc66db3761e919da19 | from django.contrib import admin
from .models import Clientes
# Register your models here.
class ClientesAdmin(admin.ModelAdmin):
list_display = (
'id',
'Nombres',
'Apellidos',
'Direccion',
'Telefono',
'Correo',
'Cedula',
'Fecha_Nacimiento',
)
admin.site.register(Clientes, ClientesAdmin) |
14,079 | 78a30b5e868cfcfd2b2666572814a9e218940827 | import os
import math
out = open('ans.txt', 'w')
fin = open('input.txt', 'r')
instr = fin.readlines()
fin.close()
t = int(instr[0])
for i in range(0, t):
row = instr[i + 1]
cnt = int(0)
r = float(0.0)
p = float(0.0)
for s in row.split():
if(cnt == 0):
r = float(s)
cnt = 1
else:
p = float(s)
cnt = 2
delta = (2 * r - 1) * (2 * r - 1) + 8 * p
ans = (-(2 * r - 1) + math.sqrt(delta)) / 4
integerans = int(ans)
out.write('Case #' + str(i + 1) + ': ' + str(integerans) + '\n')
out.close()
|
14,080 | 054af9f5b03b7a7d6f9742aebb7c6af1f6c80501 | ####################################################################
# The goal of this script is to download as much of the timeline as
# possible (3200 tweets) of each neutral twitter feed.
# This script is meant to be run once, in the beginning of the data
# mining phase.
####################################################################
## import necessary libraries
import sys
import twitter
import json
import time
from datetime import datetime, timedelta
## return the UTC date of a tweet, in format [year,month,day]
def tweetdate(tweet):
factors = []
tempdate = tweet['created_at']
monthlist = ['Jan','Feb','Mar','Apr','May','Jun','Jul','Aug','Sep','Oct','Nov','Dec']
date = [int(tempdate[26:30]), monthlist.index(tempdate[4:7]) + 1, int(tempdate[8:10])]
return date
## initialize the twitter api
execfile("../../twitter_OAUTH_credentials.py") #load the OAUTH credentials (stored in a separate file for privacy reasons
auth = twitter.oauth.OAuth(OAUTH_TOKEN, OAUTH_TOKEN_SECRET, CONSUMER_KEY, CONSUMER_SECRET)
twitter_api = twitter.Twitter(auth=auth)
## load the list of neutral twitter feed handles
f = open('../neutral_twitter_feeds.txt', 'r')
neutralfeedlist = []
for line in f:
neutralfeedlist.append(line[0:len(line)-1])
## download the timelines
results = []
for i in range(len(neutralfeedlist)):
print 'downloading timeline from @' + neutralfeedlist[i]
j=0
cond = True
tmpresults = []
while cond:
if j == 0:
tmpresults.append(twitter_api.statuses.user_timeline(screen_name = neutralfeedlist[i], count=200))
else:
tmpresults.append(twitter_api.statuses.user_timeline(screen_name = neutralfeedlist[i], count=200, max_id = maxid))
maxid = tmpresults[j][-1]['id'] - 1
if j == 15:
cond = False
j += 1
tmpresults = [item for sublist in tmpresults for item in sublist] #flatten
print 'tweets collected: ' + str(len(tmpresults))
with open('../../tweet_data/training_data/neutral_tweet_data/neutral_data_timelinehistory_'+ neutralfeedlist[i], 'w') as outfile:
json.dump(tmpresults, outfile)
print 'pausing for 3 min' #pause to avoid going over Twitter's rate limit of 100 API calls/hour
time.sleep(5*60)
|
14,081 | 62031f88627d793e83641559a8963f1e07868bb4 | import argparse
from functools import partial
import torch
from disent.metrics import METRIC_REGISTRY
from disent.models import MODEL_REGISTRY
from disent.optim import OPTIMIZER_REGISTRY
from disent.optim.lr_scheduler import LR_SCHEDULER_REGISTRY
from disent.optim.hparam_scheduler import HPARAM_SCHEDULER_REGISTRY
from disent.tasks import TASK_REGISTRY
def add_checkpoint_args(parser):
group = parser.add_argument_group('Checkpointing')
group.add_argument('--save-dir', metavar='DIR', default='checkpoints',
help='path to save checkpoints')
group.add_argument('--restore-file', default='checkpoint_last.pt',
help='filename in save-dir from which to load checkpoint')
group.add_argument('--reset-optimizers', action='store_true',
help='if set, does not load optimizer state from the checkpoint')
group.add_argument('--reset-lr-schedulers', action='store_true',
help='if set, does not load lr scheduler state from the checkpoint')
group.add_argument('--optimizer-overrides', default="{}", type=str, metavar='DICT',
help='a dictionary used to override optimizer args when loading a checkpoint')
group.add_argument('--save-interval', type=int, default=1, metavar='N',
help='save a checkpoint every N epochs')
group.add_argument('--save-interval-updates', type=int, default=0, metavar='N',
help='save a checkpoint (and validate) every N updates')
group.add_argument('--keep-interval-updates', type=int, default=-1, metavar='N',
help='keep the last N checkpoints saved with --save-interval-updates')
group.add_argument('--keep-last-epochs', type=int, default=-1, metavar='N',
help='keep last N epoch checkpoints')
group.add_argument('--no-save', action='store_true',
help='don\'t save models or checkpoints')
group.add_argument('--no-epoch-checkpoints', action='store_true',
help='only store last and best checkpoints')
group.add_argument('--validate-interval', type=int, default=1, metavar='N',
help='validate every N epochs')
group.add_argument('--no-validate', action='store_true',
help='if set, does not validate')
return group
def add_dataset_args(parser, train=False):
group = parser.add_argument_group('Dataset and data loading')
group.add_argument('--batch-size', type=int, default=64, metavar='N',
help='number of examples in a batch')
return group
def add_evaluation_args(parser):
group = parser.add_argument_group('Evaluation')
group.add_argument('--path', metavar='FILE', help='path to model file')
group.add_argument('--metric', type=str, metavar='MET',
choices=METRIC_REGISTRY.keys(),
help='evaluation metric')
group.add_argument('--num-evals', type=int, default=50, metavar='N',
help='number of evaluations')
group.add_argument('--save-results', action='store_true',
help='save evaluation results to csv file')
group.add_argument('--save-dir', metavar='DIR', default='results',
help='path to save evaluation results')
return group
def add_generation_args(parser):
group = parser.add_argument_group('Generation')
group.add_argument('--path', metavar='FILE', help='path to model file')
group.add_argument('--gen-batches', type=int, default=1, metavar='N',
help='number of batches to generate')
group.add_argument('--gen-mode', type=str, metavar='MODE',
choices=['standard', 'traversal'],
help='generation mode')
group.add_argument('--traversal-range', type=eval_str_list,
metavar='MIN_VAL,MAX_VAL',
help='range of values to traverse')
group.add_argument('--traversal-step', type=float, default=1.,
help='spacing between traversal points')
group.add_argument('--modify-dims', type=partial(eval_str_list, type=int),
metavar='DIM1,DIM2,...,DIM_N',
help='select code dims to modify')
group.add_argument('--save-dir', metavar='DIR', default='gen',
help='path to save generated images')
group.add_argument('--save-format', type=str, default='gif',
choices=['jpg', 'gif'],
help='saving format of the images')
return group
def add_model_args(parser):
group = parser.add_argument_group('Model configuration')
group.add_argument('--vae-arch', default='conv_vae', metavar='ARCH',
help='VAE model architecture')
return group
def add_optimization_args(parser):
group = parser.add_argument_group('Optimization')
group.add_argument('--max-epoch', default=0, type=int, metavar='N',
help='force stop training at specified epoch')
group.add_argument('--max-update', default=0, type=int, metavar='N',
help='force stop training at specified update')
group.add_argument('--clip-norm', default=0, type=float, metavar='NORM',
help='clip threshold of gradients')
group.add_argument('--optimizer', default='adam', metavar='OPT',
choices=OPTIMIZER_REGISTRY.keys(),
help='Optimizer')
group.add_argument('--lr', '--learning-rate', default='0.0001', type=eval_str_list,
metavar='LR_1,LR_2,...,LR_N',
help='learning rate for the first N epochs; all epochs >N using LR_N'
' (note: this may be interpreted differently depending on --lr-scheduler)')
group.add_argument('--momentum', default=0.99, type=float, metavar='M',
help='momentum factor')
group.add_argument('--weight-decay', '--wd', default=0.0, type=float, metavar='WD',
help='weight decay')
group.add_argument('--lr-scheduler', default='fixed',
choices=LR_SCHEDULER_REGISTRY.keys(),
help='Learning Rate Scheduler')
group.add_argument('--lr-shrink', default=0.1, type=float, metavar='LS',
help='learning rate shrink factor for annealing, lr_new = (lr * lr_shrink)')
group.add_argument('--min-lr', default=1e-6, type=float, metavar='LR',
help='minimum learning rate')
group.add_argument('--hparam-scheduler', default='fixed',
choices=HPARAM_SCHEDULER_REGISTRY.keys(),
help='hyper parameter scheduler (used for kld annealing, etc.)')
return group
def eval_str_list(x, type=float):
if x is None:
return None
if isinstance(x, str):
x = eval(x)
try:
return list(map(type, x))
except TypeError:
return [type(x)]
def get_evaluation_parser(default_task='vae'):
parser = get_parser('Evaluation', default_task)
add_dataset_args(parser)
add_evaluation_args(parser)
return parser
def get_generation_parser(default_task='vae'):
parser = get_parser('Generation', default_task)
add_dataset_args(parser)
add_generation_args(parser)
return parser
def get_training_parser(default_task='vae'):
parser = get_parser('Trainer', default_task)
add_dataset_args(parser, train=True)
add_model_args(parser)
add_optimization_args(parser)
add_checkpoint_args(parser)
return parser
def get_parser(desc, default_task):
parser = argparse.ArgumentParser()
parser.add_argument('--no-progress-bar', action='store_true', help='disable progress bar')
parser.add_argument('--log-interval', type=int, default=1000, metavar='N',
help='log progress every N batches (when progress bar is disabled)')
parser.add_argument('--log-format', default=None, help='log format to use',
choices=['json', 'none', 'simple', 'tqdm'])
parser.add_argument('--tensorboard-logdir', metavar='DIR', default='',
help='path to save logs for tensorboard, should match --logdir '
'of running tensorboard (default: no tensorboard logging)')
parser.add_argument('--seed', default=1, type=int, metavar='N',
help='pseudo random number generator seed')
parser.add_argument('--cpu', action='store_true', help='use CPU instead of CUDA')
parser.add_argument('--device-id', default=0, type=int,
help='gpu id to use')
parser.add_argument('--task', metavar='TASK', default=default_task,
choices=TASK_REGISTRY.keys(),
help='task')
return parser
def parse_args(parser, input_args=None, parse_known=False):
args, _ = parser.parse_known_args(input_args)
if hasattr(args, 'metric'):
METRIC_REGISTRY[args.metric].add_args(parser)
if hasattr(args, 'task'):
TASK_REGISTRY[args.task].add_args(parser)
if hasattr(args, 'vae_arch'):
MODEL_REGISTRY[args.vae_arch].add_args(parser)
if hasattr(args, 'optimizer'):
OPTIMIZER_REGISTRY[args.optimizer].add_args(parser)
if hasattr(args, 'lr_scheduler'):
LR_SCHEDULER_REGISTRY[args.lr_scheduler].add_args(parser)
if hasattr(args, 'hparam_scheduler'):
for hparam in TASK_REGISTRY[args.task].hparams:
HPARAM_SCHEDULER_REGISTRY[args.hparam_scheduler].add_args(parser, hparam)
# adversarial archs are added in task def
args, _ = parser.parse_known_args(input_args)
if hasattr(args, 'adversarial_arch'):
MODEL_REGISTRY[args.adversarial_arch].add_args(parser)
if parse_known:
return parser.parse_known_args(input_args)
else:
return parser.parse_args(input_args)
|
14,082 | 7d09d8bfe90c17c4c22b796fffecc317b8aab01b | from sklearn.externals import joblib
import cPickle as Pickle
from SystemUtilities.Configuration import *
from Processing import *
from Extraction import Classification
def train_event_detectors(patients):
# Retrieve features and labels per every sentence
sent_feat_dicts, labels_per_subst = sentence_features_and_labels(patients)
print "Event_Detection/Training.py ln 12: Training event detection on " + str(len(sent_feat_dicts)) + " sents total"
flor_dir = os.path.join(DATA_DIR, "FlorianData", "sentence_level_annotations.json")
if os.path.exists(flor_dir):
print("Florian annotations detected: supplementing training with these data")
# Get Florian's data to reinforce training
flor_sent_feat_dicts, flor_labels_per_subst = flor_sentence_features_and_labels()
sent_feat_dicts.extend(flor_sent_feat_dicts)
labels_per_subst["Tobacco"].extend(flor_labels_per_subst["Tobacco"])
labels_per_subst["Alcohol"].extend(flor_labels_per_subst["Alcohol"])
else:
print ("Florian supplementary data not detected, training on original training dataset only")
for substance_type in ML_CLASSIFIER_SUBSTANCES:
# Train classifier
classifier, feature_map = Classification.train_classifier(sent_feat_dicts, labels_per_subst[substance_type])
# Save to file
classf_file = os.path.join(MODEL_DIR, substance_type + EVENT_DETECT_MODEL_SUFFIX)
featmap_file = os.path.join(MODEL_DIR, substance_type + EVENT_DETECT_FEATMAP_SUFFIX)
joblib.dump(classifier, classf_file)
Pickle.dump(feature_map, open(featmap_file, "wb"))
|
14,083 | a6da6154431bfb06832f0a61419975975336e2d8 | from __future__ import annotations
from rich.cells import cell_len
from rich.console import Console, RenderableType
from rich.protocol import rich_cast
def measure(
console: Console,
renderable: RenderableType,
default: int,
*,
container_width: int | None = None,
) -> int:
"""Measure a rich renderable.
Args:
console: A console object.
renderable: Rich renderable.
default: Default width to use if renderable does not expose dimensions.
container_width: Width of container or None to use console width.
Returns:
Width in cells
"""
if isinstance(renderable, str):
return cell_len(renderable)
width = default
renderable = rich_cast(renderable)
get_console_width = getattr(renderable, "__rich_measure__", None)
if get_console_width is not None:
render_width = get_console_width(
console,
(
console.options
if container_width is None
else console.options.update_width(container_width)
),
).maximum
width = max(0, render_width)
return width
|
14,084 | 341a39673f57049a3f28d111b7b4e46428714cc8 | '''
Set and change time series frequency
In the video, you have seen how to assign a frequency to a DateTimeIndex, and then change this frequency.
Now, you'll use data on the daily carbon monoxide concentration in NYC, LA and Chicago from 2005-17.
You'll set the frequency to calendar daily and then resample to monthly frequency, and visualize both series to see how the different frequencies affect the data.
'''
import pandas as pd
import matplotlib.pyplot as plt
co = pd.read_csv('../datasets/air_quality_data/co_cities.csv', index_col=0, parse_dates=True)
'''
INSTRUCTIONS
We have already imported pandas as pd and matplotlib.pyplot as plt and we have already loaded the co_cities.csv file in a variable co.
* Inspect co using .info().
* Use .asfreq() to set the frequency to calendar daily.
* Show a plot of 'co' using subplots=True.
* Change the the frequency to monthly using the alias 'M'.
* Show another plot of co using subplots=True.
'''
# Inspect data
print(co.info())
# Set the frequency to calendar daily
co = co.asfreq('D')
# Plot the data
co.plot(subplots=True)
plt.show()
# Set frequency to monthly
co = co.asfreq('M')
# Plot the data
co.plot(subplots=True)
plt.show()
|
14,085 | f7d662066a09d3d7d6986ccddfe3d6bee0b59373 | from PIL import Image
from OpenGL.GL import *
import numpy as np
# read and save image
def save_image(width, height, image_name):
buffer = glReadPixels(0, 0, width, height, GL_RGB, GL_UNSIGNED_BYTE);
image = Image.frombytes(mode="RGB", size=(width, height), data=buffer)
image = image.transpose(Image.FLIP_TOP_BOTTOM)
image.save(image_name)
def load_image(imagePath):
image = Image.open(imagePath)
image = image.transpose(Image.FLIP_TOP_BOTTOM)
img_data = np.array(list(image.getdata()), np.uint8)
width, height = image.size
return img_data, width, height |
14,086 | 2d420c95fd52ed90ce551dc1c49f3df434cbeafe | from django import forms
from.models import Visitor
class EventVisitorForm(forms.ModelForm):
class Meta:
model = Visitor
fields = ['first_name', 'last_name', 'email'] |
14,087 | e5a76528a7f840857543fd5c191604f6191e9c27 | # add button recognition module to path
import sys
sys.path.append('./ButtonRec')
import cv2
import bigBrother
from ButtonRec import recognizer
class tracker():
rec = None
cam = None
target = None
yDims = 480
xDims = 640
center = [int(xDims/2), int(yDims/2)]
effectorYOffset = 110 # effector is this many pixels below center
pixelRange = 10
# ---------------- State Codes ----------------
# 0 - not found
# 1 - X centered
# 2 - Y centered
# 3 - move left
# 4 - move right
# 5 - move up
# 6 - move down
# 7 - target lost
prevState = []
visualize = None # do i want to see pictures?
stateCodes = ['Not found', 'X centered', 'Y centered',
'move left', 'move right', 'move up', 'move down', 'target lost']
def __init__(self, visualize = True):
self.rec = recognizer.Recognizer()
self. cam = bigBrother.camera()
self.visualize = visualize
def setTarget(self, target):
# sample validation code
# not used for now b/c I know what I'm doing
# if target not in 'BG23':
# print('Invalid Target')
# return None
self.target = target
def centerCam(self):
if self.target is None:
print('No target set')
return None
frame = self.cam.getFrame()
seen = False # can we see target?
currentState = []
positions, text = self.rec.recognize(frame)
for pos, char in zip(positions, text):
if self.visualize:
p1 = (int(pos[0]), int(pos[1]))
p2 = (int(pos[2]), int(pos[3]))
cv2.rectangle(frame, p1, p2, (0, 255, 0), thickness = 2)
centerX = int((pos[0] + pos[2]) / 2)
centerY = int((pos[1] + pos[3]) / 2)
cv2.putText(frame, char, (centerX, centerY), cv2.FONT_HERSHEY_SIMPLEX, 2, (0, 0, 0), thickness = 2)
if str(self.target) in char:
if seen == True: # duplicate
continue
# we can see target
seen = True
# actual button is second half of detected square
# (3/4 X, 1/2 Y) of box is center
buttonCenterX = int(pos[0] + 3 * (pos[2] - pos[0]) / 4)
buttonCenterY = int(pos[1] + (pos[3] - pos[1]) / 2)
if self.visualize: cv2.circle(frame, (buttonCenterX, buttonCenterY), 1, [0,0,255], thickness=5)
# is button within acceptable offset of center screen
buttonXOffset = abs(buttonCenterX - self.center[0])
buttonYOffset = abs(buttonCenterY - (self.center[1] + self.effectorYOffset))
# x centering
if buttonXOffset <= self.pixelRange:
currentState.append(1)
elif buttonCenterX > self.center[0] + self.pixelRange: # button is too much to the right
currentState.append(4)
else: # button is too much to the left
currentState.append(3)
# y centering
if buttonYOffset <= self.pixelRange:
currentState.append(2)
elif buttonCenterY > self.center[1] + self.effectorYOffset + self.pixelRange: # button is too far down
currentState.append(6)
else: # button is too far up
currentState.append(5)
if not seen and len(self.prevState) != 0 and 0 not in self.prevState: # we saw it and lost it
currentState.append(7)
elif not seen: # we haven't seen it before
currentState.append(0)
if self.visualize:
outString = ''
for s in currentState:
outString += self.stateCodes[s] + ' '
cv2.putText(frame, outString, (0, 25), cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 0, 255), thickness = 2)
cv2.circle(frame, (self.center[0], self.center[1]), 1, [0,0,255], thickness=5)
cv2.circle(frame, (self.center[0], self.center[1] + self.effectorYOffset), 1, [255,255,0], thickness=5)
self.prevState = currentState
if self.visualize: return currentState, frame
else: return currentState |
14,088 | 60806ba1df91a80d9cfeaa9288bc3e8830b6d744 | __author__ = 'kingofspace0wzz'
import projection as pro
import numpy as np
from scipy import linalg as la
from numpy.linalg import matrix_rank as rank
'''
This file gives methods that analyze subspaces' relationships, those of which include:
Distance between subspaces by different definitions
Angles between subspaces (Minimal angle, Maximal angle, and Principal angles)
Rotation measure: how close a matrix can be to another matrix by rotation
'''
# compute the distance/gap between subspaces
# @param1: X, subspace
# @param2: Y, subspace
# @param3: n, rank/dimension of the original space
def subspace_distance(X, Y, n, method = 'definition'):
'''
compute the distance/gap between subspaces
@param1: X, subspace
@param2: Y, subspace
@param3: n, rank/dimension of the original space
'''
# ask if two subspaces have the same dimension/rank
if rank(X) != rank(Y):
return 1 # the gap/distance between any pair of subspaces with different dimensions is one
# compute distance by its definition
if method == 'definition':
P1 = pro.orthoProjection(X, n)[0]
P2 = pro.orthoProjection(Y, n)[0]
# distance = ||P1 - P2||_2
return la.norm(P1 - P2, 2)
# compute distance by use of completement subspace
else:
# orthogonal projection onto Y's completement
P2 = pro.orthoProjection(Y, n)[1]
# find the completement of Y using orthogonal projection
completement = P2.dot(np.eye(n))
return la.norm(X.conjugate().T.dot(completement), 2)
# function rotationMeasure measures how close a matrix can be to another matrix by rotation
# @param1: Matrix A
# @param2: Matrix B
# find the orthogonal matrix Q that minimizes ||A - BQ||_2
def rotation_measure(A, B):
'''
function rotationMeasure measures how close a matrix can be to another matrix by rotation
@param1: Matrix A
@param2: Matrix B
find the orthogonal matrix Q that minimizes ||A - BQ||_2
'''
# ask if the two matrics have the same shape
if A.shape != B.shape:
raise Exception('Two matrics do not have the same shape')
# C=B^T * A
C = B.conjugate().T.dot(A)
U = la.svd(C)[0]
Vh = la.svd(C)[2]
# Q = UVh
return U.dot(Vh)
# compute the minimal angle between subspaces
def min_angle(X, Y, n):
'''
compute the minimal angle between subspaces
'''
P1 = pro.orthoProjection(X, n)[0]
P2 = pro.orthoProjection(Y, n)[0]
return np.arccos(la.norm(P1.dot(P2), 2))
# compute the maximal angle between subspaces with equal dimension
def max_angle(X, Y, n):
'''
compute the maximal angle between subspaces with equal dimension
'''
return np.arcsin(subspace_distance(X, Y, n))
# compute the principal angles between two subspaces
# return: np.array of principal angles, orthogonal matrics U and V
def principal_angles(X, Y, n):
'''
compute the principal angles between two subspaces
return: np.array of principal angles, orthogonal matrics U and V
'''
QX, RX = la.qr(X)
QY, RY = la.qr(Y)
if X.shape[1] >= Y.shape[1]:
C = QX.conjugate().T.dot(QY)
M, cos, Nh = la.svd(C)
U = QX.dot(M)
V = QY.dot(Nh.conjugate().T)
angles = np.arccos(cos)
return angles, U, V
else:
C = QY.conjugate().T.dot(QX)
M, cos, Nh = la.svd(C)
U = QX.dot(M)
V = QY.dot(Nh.conjugate().T)
angles = np.arccos(cos)
return angles, U, V
# Similarity between subspaces by Yamaguchi's definition
def similarity_Yama(X, Y, n):
'''
Similarity between subspaces by Yamaguchi's definition
'''
angles = principal_angles(X, Y, n)[0]
return np.min(angles)
# Similarity between subspaces by Wolf & Shashua's definition
def similarity_Wolf(X, Y, n):
'''
Similarity between subspaces by Wolf & Shashua's definition
'''
cos = np.cos(principal_angles(X, Y, n)[0])
similarity = 1
for c in cos:
similarity = similarity * np.square(c)
return c
# distace between subspaces by Hausdorff's definition
def hausdorff_distance(X, Y, n):
'''
distace between subspaces by Hausdorff's definition
'''
if rank(X) != X.shape[1] & rank(Y) != Y.shape[1]:
raise Exception('Please provide subspaces with full COLUMN rank')
inner = 0
for i in range(X.shape[1]):
for j in range(Y.shape[1]):
inner = inter + np.square(X[:, i].conjugate().T.dot(Y[:, j]))
distance = np.sqrt(np.max(rank(X), rank(Y)) - inner)
return distance
# distance with inner-product
def kernel_distance(X, Y, n):
'''
distance with inner-product
'''
if rank(X) != X.shape[1] & rank(Y) != Y.shape[1]:
raise Exception('Please provide subspaces with full COLUMN rank')
inner = 0
for i in range(X.shape[1]):
for j in range(Y.shape[1]):
inter = inter + np.square(X[:, i].conjugate().T.dot(Y[:, j]))
distance = np.sqrt(inner)
# return the dimension of the intersection of two subspaces
def subspace_intersection(X, Y, n):
'''
return the dimension of the intersection of two subspaces
'''
U = principal_angles(X, Y, n)[1]
V = principal_angles(X, Y, n)[2]
return rank(np.hstack(U, V))
# distance between A and any lower rank matrix
def lowRank_distance(A, k):
'''
distance between A and any lower rank matrix
'''
if rank(A) >= k:
raise Exception('Please provide a lower rank k')
sigma = la.svdvals(A)
# return the k+1'th singular value
return sigma[k]
def test():
A = np.array([[1,2],
[3,4],
[5,6],
[7,8]])
B = np.array([[1.2,2.1],
[2.9,4.3],
[5.2,6.1],
[6.8,8.1]])
print('Matrix Q is: ', rotation_measure(A, B), '\n')
if __name__ == '__main__':
test()
|
14,089 | d5710d06968f399db1f6aa047f1e5344451a1a89 | # Write a Program to pass two variable into a function and then perform arthemtic operations (+,-,*,/).print the result
def cal(a,b):
print("sum is:" ,a+b)
print("diff is:", a-b)
print("product is:",a*b)
print("quotient is",a/b)
return()
x=int(input("enter a no 1: "))
y=int(input("enter a no 2: "))
cal(x,y)
# Output
enter a no 2: 2
sum is: 6
diff is: 2
product is: 8
quotient is 2.0
|
14,090 | f3b3c01f3327032818227adb6f344951807d4930 | from .eyes import Eyes
|
14,091 | 67ff15d130e284411e48421389fe53cb14b83742 | from django.shortcuts import render
from django.http import HttpRequest,HttpResponse
# Create your views here.
def home(request):
return render(request,'firstapp/login.html') |
14,092 | c58754f97f155dfa5dd2064229adf482b7663c15 | import single
#f = single.Solution()
print single.Solution().singleNumber([3,5,5,3,7,7,4,2,2])
#print s
|
14,093 | eaaefe56dd84d9b1a208727453b66ae8d0b84c82 | import pygame as pg
import constants as c
from sys import exit
import setup
class Main(object):
def __init__(self):
self.modes_dict = None
self.now_mode = None
def setup_modes(self, modes_dict, mode_name):
self.modes_dict = modes_dict
self.now_mode = self.modes_dict[mode_name]
self.now_mode.startup()
def update(self):
if self.now_mode.switch and self.now_mode.switch == c.MENU_MODE:
self.switch_mode(c.MENU_MODE)
elif self.now_mode.switch == c.AI_MODE:
self.switch_mode(c.AI_MODE)
elif self.now_mode.switch == c.PVP_MODE:
self.switch_mode(c.PVP_MODE)
self.now_mode.update()
def event_loop(self):
for event in pg.event.get():
if event.type == pg.QUIT:
exit()
self.now_mode.get_event(event)
def switch_mode(self, mode_name):
self.now_mode = self.modes_dict[mode_name]
self.now_mode.startup()
def main(self):
while True:
self.event_loop()
self.update()
pg.display.update()
class _Mode(object):
def __init__(self):
self.switch = False
self.screen = setup.SCREEN
self.pos = (0, 0)
self.font = setup.FONT_DICT
def update(self):
pass
def startup(self):
pass
def get_event(self, event):
pass
|
14,094 | 9dfc3cda3a9d90e02e7257731c1603972facf1a4 | #! /usr/bin/python
import argparse
import os.path
import sys
import configparser
class Analysis:
"""Class Analysis: """
def __init__(self):
self.tableDD = []
parser = argparse.ArgumentParser(description="PyG : The Python Geocube Library.")
parser.add_argument("--conf", help="Path to configuration file")
try:
args = parser.parse_args()
except:
parser.print_help()
sys.exit(0)
if os.path.isfile(args.conf):
self.parseConfigFile(args.conf)
sef.buildDD()
print('OK')
else:
print("Bad Exit Return 1: Config file not found")
###########################################################################
def parseConfigFile(self, config_file_path):
"""Fonction pour parser le fichier de configuration"""
parser = configparser.SafeConfigParser()
parser.read(config_file_path)
self.seuil_snr = int(parser.get('seuils', 'snr'))
self.seuil_elev_sat = int(parser.get('seuils', 'sat_elevation'))
# nav data path
self.nav_data_file = parser.get('data', 'nav')
print(self.nav_data_file)
# obs data paths
self.obs_data_file = parser.get('data', 'obs').split(",")
print(self.obs_data_file)
##########################################################################
def buildDD(self):
# Le nombre_de_DD = Nbre_de_cube - 1
for i in range(1, len(self.obs_data_file)):
self.tableDD.append(DoubleDifference(self.obs_data_file[0], self.obs_data_file[i], ephemeris_path))
Analysis()
|
14,095 | 66934c0fdadc6cd288779367f143bc6e67995dfd | #!/usr/bin/env python
# -*- coding:utf-8 -*-
# author:hpcm
# datetime:19-4-29 下午3:31
import rpyc
s_obj = rpyc.connect("localhost", 22222)
print(s_obj.root.now_time())
print(s_obj.root.new())
s_obj.close()
|
14,096 | d5648c479cc4c3ceb4fd3f964f54fd992dfefba6 | import torch
import torch.nn as nn
import numpy as np
class MorseBatchedLSTMStack(nn.Module):
"""
LSTM stack with dataset input
"""
def __init__(self, device, nb_lstm_layers=2, input_size=1, hidden_layer_size=8, output_size=6, dropout=0.2):
super().__init__()
self.device = device # This is the only way to get things work properly with device
self.nb_lstm_layers = nb_lstm_layers
self.input_size = input_size
self.hidden_layer_size = hidden_layer_size
self.lstm = nn.LSTM(input_size=input_size, hidden_size=hidden_layer_size, num_layers=self.nb_lstm_layers, dropout=dropout)
self.linear = nn.Linear(hidden_layer_size, output_size)
self.hidden_cell = (torch.zeros(self.nb_lstm_layers, 1, self.hidden_layer_size).to(self.device),
torch.zeros(self.nb_lstm_layers, 1, self.hidden_layer_size).to(self.device))
self.use_minmax = False
def _minmax(self, x):
x -= x.min(0)[0]
x /= x.max(0)[0]
def _hardmax(self, x):
x /= x.sum()
def _sqmax(self, x):
x = x**2
x /= x.sum()
def forward(self, input_seq):
#print(len(input_seq), input_seq.shape, input_seq.view(-1, 1, 1).shape)
lstm_out, self.hidden_cell = self.lstm(input_seq.view(-1, 1, self.input_size), self.hidden_cell)
predictions = self.linear(lstm_out.view(len(input_seq), -1))
if self.use_minmax:
self._minmax(predictions[-1])
return predictions[-1]
def zero_hidden_cell(self):
self.hidden_cell = (
torch.zeros(self.nb_lstm_layers, 1, self.hidden_layer_size).to(device),
torch.zeros(self.nb_lstm_layers, 1, self.hidden_layer_size).to(device)
)
class Predictions:
def __init__(self):
self.tbuffer = None
self.device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
print(f"Torch using {self.device}")
self.max_ele = 5 # Number of Morse elements considered
self.look_back = 208 # Constant coming from model training
self.model = MorseBatchedLSTMStack(self.device, nb_lstm_layers=2, hidden_layer_size=60, output_size=self.max_ele+2, dropout=0.1).to(self.device)
self.model.use_minmax = True
self.lp_len = 3
self.lp_win = np.ones(self.lp_len) / self.lp_len
self.lp = True # post process predictions through moving average low pass filtering
@staticmethod
def pytorch_rolling_window(x, window_size, step_size=1):
# unfold dimension to make our rolling window
return x.unfold(0,window_size,step_size)
def load_model(self, filename):
self.model.load_state_dict(torch.load(filename, map_location=self.device))
self.model.eval()
def new_data(self, data):
""" Takes the latest portion of the signal envelope as a numpy array,
make predictions using the model and interpret results to produce decoded text.
"""
if self.tbuffer is None:
self.tbuffer = torch.FloatTensor(data).to(self.device)
else:
self.tbuffer = torch.cat((self.tbuffer, torch.FloatTensor(data).to(self.device)))
if len(self.tbuffer) > self.look_back:
l = len(self.tbuffer) - self.look_back + 1
self.cbuffer = self.tbuffer[-l:].cpu()
X_tests = self.pytorch_rolling_window(self.tbuffer, self.look_back, 1)
self.tbuffer = X_tests[-1][1:]
p_preds = torch.empty(1, self.max_ele+2).to(self.device)
for X_test in X_tests:
with torch.no_grad():
y_pred = self.model(X_test)
p_preds = torch.cat([p_preds, y_pred.reshape(1, self.max_ele+2)])
p_preds = p_preds[1:] # drop first garbage sample
p_preds_t = torch.transpose(p_preds, 0, 1).cpu()
if self.lp:
p_preds_t = np.apply_along_axis(lambda m: np.convolve(m, self.lp_win, mode='full'), axis=1, arr=p_preds_t)
self.p_preds_t = p_preds_t[:,:-self.lp_len+1]
else:
self.p_preds_t = p_preds_t
else:
self.p_preds_t = None
self.cbuffer = None
|
14,097 | a5ee062989d674eba2cfc9bf4d05a09d841c938c | # coding=UTF-8
import numpy as np
import pandas as pd
from sklearn.cluster import KMeans
from sklearn.preprocessing import StandardScaler
from scipy.spatial.distance import cdist
import matplotlib.pyplot as plt
import os
from features.week_data import get_user_pay_week_data
import params
def shop_clusters():
#构造商家特征
# 按周划分最近六周的shop_user样本
week_data = get_user_pay_week_data()
week_last_1st = week_data[1]
week_last_2nd = week_data[2]
week_last_3rd = week_data[3]
week_last_4th = week_data[4]
week_last_5th = week_data[5]
week_last_6th = week_data[6]
# 取倒数三周的商家流量
predict_x = week_last_1st.join(week_last_2nd)
predict_x = predict_x.join(week_last_3rd)
# 将商家三周的各统计特征作为测试样本的特征
test_mean = predict_x.mean(axis=1)
test_median = predict_x.median(axis=1)
predict_x['mean'] = test_mean
predict_x['median'] = test_median
#数据标准化
ss_x = StandardScaler()
predict_x = ss_x.fit_transform(predict_x)
#根据“肘部”观察法确定聚类的个数
K = range(1,16)
mean_distortion = []
for k in K:
kmeans = KMeans(n_clusters=k)
kmeans.fit(predict_x)
mean_distortion.append(sum(np.min(cdist(predict_x, kmeans.cluster_centers_ , 'euclidean'),axis=1))/predict_x.shape[0])
plt.plot(K, mean_distortion, 'bx-')
plt.xlabel('k')
plt.ylabel('Average Dispersion')
plt.title('Select label number by Elbow Method')
plt.show()
#根据“肘部”观察法商家应分为7类
kmeans = KMeans(n_clusters=4)
kmeans.fit(predict_x)
predict_x = pd.DataFrame(ss_x.inverse_transform(predict_x))
cluster_label = pd.Series(kmeans.labels_)
predict_x['cluster_label'] = cluster_label
shop_iid = pd.DataFrame(np.arange(1,2001), columns=['iid'])
predict_x = shop_iid.join(predict_x)
print predict_x['cluster_label'].value_counts()
if (not os.path.exists(params.SHOP_FEATURE_PATH)):
os.mkdir(params.SHOP_FEATURE_PATH)
for i in range(4):
shops = predict_x[predict_x['cluster_label']==i]
print shops
shops.to_csv(params.SHOP_FEATURE_PATH+ 'shop_clusters_' + str(i) + '.csv')
return cluster_label
|
14,098 | d8606881e22e903f7c6bad55a95f30001640722e | import datetime
from django.contrib.auth.models import User
from django.db import models
class Maker(models.Model):
"""
Создаем модель производителей машин
"""
name = models.CharField(max_length=20)
def __str__(self):
return self.name
class AutoModel(models.Model):
"""
Создаем модель автомоделей
"""
name = models.CharField(max_length=20)
def __str__(self):
return self.name
class MakerAndModel(models.Model):
"""
Соединяем в этой модели производителя и модели
"""
model = models.ForeignKey(AutoModel, on_delete=models.CASCADE)
maker = models.ForeignKey(Maker, on_delete=models.CASCADE)
class Body(models.Model):
"""
Модель типов кузова
"""
name = models.CharField(max_length=20)
def __str__(self):
return self.name
class Color(models.Model):
"""
Модель цветов авто
"""
name = models.CharField(max_length=15)
def __str__(self):
return self.name
class Person(models.Model):
"""
Тренировочные пользователи
"""
name = models.CharField(max_length=40)
phone = models.CharField(max_length=12)
def __str__(self):
return self.name
class Advert(models.Model):
"""
Модель рекламного объявления о продаже авто
"""
maker = models.ForeignKey(
Maker, on_delete=models.CASCADE, verbose_name='Бренд'
)
automodel = models.ForeignKey(AutoModel, on_delete=models.CASCADE,
verbose_name='Модель машины')
body = models.ForeignKey(Body, on_delete=models.CASCADE,
verbose_name='Тип кузова')
color = models.ForeignKey(Color, on_delete=models.CASCADE,
verbose_name='Цвет кузова')
ad_user = models.ForeignKey(User, on_delete=models.CASCADE,
verbose_name='Пользователь')
year = models.IntegerField('Год выпуска')
day = models.DateField('advert day')
price = models.IntegerField()
pic = models.ImageField(
upload_to='user_pic', max_length=255, blank=True, null=True
)
phone = models.CharField(max_length=13, blank=True, null=True)
status = models.BooleanField(verbose_name="Активно ли объявление",
default=True)
def get_maker(self):
"""
Возвращаем производителя для кастомизации админки (сортировки)
"""
return self.maker
get_maker.admin_order_field = 'maker__name'
def get_model(self):
"""
Возвращаем модель авто для кастомизации админки (сортировки)
"""
return self.automodel
get_model.admin_order_field = 'automodel__name'
def is_now(self):
"""
Метод проверяет новое ли наше объявление
"""
return datetime.date.today() - datetime.timedelta(days=7) <= self.day
def __str__(self):
return str(self.day) + " " + str(self.price)
|
14,099 | a12a0a6cbb74b54fbc9e18f692691e116940a42d | # -*- coding: utf-8 -*-
'''
Created on 24. Sep. 2015
@author: dietmar
'''
import sys
sys.path.insert(1, '../')
from drivers.driver import *
import time
import threading
class USBHIDDS18B20(Driver):
'''
ds18b20 driver via USB HID
'''
def __init__(self, parameters,logger ):
'''
Constructor
'''
Driver.__init__(self, parameters,logger )
self.path= '/dev/hidraw0'
self.fp= None
self.temperature1= 0
self.temperature2= 0
self.pwr= 0
self.id= 0
self.max= 0
self.lastUpdate1= ''
self.lastUpdate2= ''
self.debug_mode= True
self.sensor_worker= threading.Thread(target=self.run_sensor_worker)
self.sensor_worker.setDaemon(True)
self.sensor_worker.start()
def open(self):
try:
self.fp = open( self.path, 'rb')
except Exception as e:
self.fp= None
pass
def get_observations(self,container):
if not self.fp:
return
temperature_observation= self.observation( 'temperature1', self.lastUpdate1, str("{:.1f}".format( self.temperature1 )), '°C' );
container.append(temperature_observation)
self.handle_debug(self.name + ' delivers ' + str(temperature_observation) )
temperature_observation= self.observation( 'temperature2', self.lastUpdate2, str("{:.1f}".format( self.temperature2 )), '°C' );
container.append(temperature_observation)
self.handle_debug(self.name + ' delivers ' + str(temperature_observation) )
def run_sensor_worker (self):
self.handle_debug ('starting thread')
while self.shall_run:
try:
if not self.fp:
self.open()
buffer = self.fp.read(64)
#print ('DEBUG: found data' + str(buffer) )
self.pwr= buffer[2]
self.id=buffer[1]
self.max=buffer[0]
bytebuffer = bytes([buffer[4], buffer[5]])
temperatureTimes10= int.from_bytes(bytebuffer, byteorder='little', signed=True)
temperature= temperatureTimes10 / 10.0
if self.id == 1:
self.temperature1= temperature
self.lastUpdate1= self.get_observation_time()
if self.id == 2:
self.temperature2= temperature
self.lastUpdate2= self.get_observation_time()
except Exception as e:
time.sleep( 15 )
pass
time.sleep( 1 )
#print ('DEBUG: looping thread')
if __name__ == '__main__':
sensor= USBHIDDS18B20( """{"device":"/dev/hidraw0"}""" )
for count in range(0,100):
container= []
sensor.get_observations(container)
print ( container )
time.sleep(5)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.