text stringlengths 38 1.54M |
|---|
# Definition for singly-linked list.
# class ListNode:
# def __init__(self, x):
# self.val = x
# self.next = None
class Solution:
def rotateRight(self, head: ListNode, k: int) -> ListNode:
front=head
newtail=head
index=1
if head==None or k==0:
return head
while(front.next!=None and index<k):
front=front.next
index+=1
if(front.next==None and index==k):
return head
elif(front.next!=None and index==k):
front=front.next
while(front.next!=None):
front=front.next
newtail=newtail.next
newhead=newtail.next
front.next=head
newtail.next=None
else:
shift=k%index
print(shift)
if shift==0:
return head
else:
for i in range(index-shift-1):
newtail=newtail.next
newhead=newtail.next
front.next=head
newtail.next=None
return newhead
|
# A variable is a container for a value, which can be of various types
'''
This is a
multiline comment
or docstring (used to define a functions purpose)
can be single or double quotes
'''
"""
VARIABLE RULES:
- Variable names are case sensitive (name and NAME are different variables)
- Must start with a letter or an underscore
- Can have numbers but can not start with one
"""
#x=1 #int
#y=2.5 #float
#name='sumit' #str
#is_cool = True #bool
#multi assignment
x,y, name, is_cool = (1,2.5,'sumit',True)
#Basic math
a = x + y
#Casting
x = str(x)
y = int(y)
z= float(y)
print(type(z), z)
# print(type(y), y)
|
# import pygame
# import pgzrun
# from pygame.locals import QUIT, MOUSEBUTTONDOWN
# def win_check():
# for l in reversed(tilestatus):
# ren = 0
# for i in l:
# if i == 1:
# ren += 1
# ren = ren % 10
# elif i == 2:
# ren += 10
# ren = ren - ren % 10
# elif ren == 4:
# print("player 1 win")
# elif ren == 40:
# print("player 2 win")
# elif i == 0:
# ren = 0
# break
# def taketurn():
# if turn1:
# turn1 = False
# return True
# else:
# turn1 = True
# return False
# def tile_flip(xpos, isturn1):
# if tilestatus[xpos][-1] ==0:
# taketurn()
# return False
# index_to_input = tilestatus[xpos].index(0)
# if isturn1:
# tilestatus[xpos][index_to_input] = 1
# return True
# else:
# tilestatus[xpos][index_to_input] = 2
# return True
# def init_tile():
# global tilestatus = [
# [0,0,0,0,0,0],
# [0,0,0,0,0,0],
# [0,0,0,0,0,0],
# [0,0,0,0,0,0],
# [0,0,0,0,0,0],
# [0,0,0,0,0,0],
# [0,0,0,0,0,0]
# ]
# def draw():
# def on_mouse_down(pos, button):
# def init():
# turn1 = True
# """main"""
# while True:
# SURFACE.fill((255,255,255))
# for event in pygame.event.get():
# if event.type == QUIT:
# pygame.quit()
# sys.exit()
# elif even.type == MOUSEBUTTONDOWN and event.button == 1:
# flip(floor(event.pos[0] / SIZE))
# pygame.display.update()
# 0
# SURFACE = pygame.display.set_mode((700,600))
# pygame.display.set_caption("Four")
# init()
# pgzrun.go()
for i in range (5,0):
print (i) |
import random
class Humanbeing:
arm=2
leg=2
live=1
def limbs(self):
if(self.live!=0):
print(self.arm, "arms",self.leg,"legs")
else:
print("Human is dead now We are sorry")
def carAccident(self):
print("There was an accident. You are at hospital now")
possibilities = random.randrange(0,3)
if (possibilities==0):
self.arm-=1
print("You lose an arm. We are sorry")
elif(possibilities==1):
self.leg-=1
print("You lose a leg. We are sorry")
else:
self.live=0
print("We lost the patient.We are sorry")
ahmet = Humanbeing()
ahmet.limbs()
ahmet.carAccident()
ahmet.limbs()
|
import numpy as np
import matplotlib.pyplot as plt
import seaborn as sns
COLOR_MAP = sns.light_palette('dark pink', input='xkcd')
def norm_cm(arr):
sums = arr.sum(axis=1)
arr = arr / sums[:, np.newaxis]
arr = arr.round(2)
return arr
def show_cm(arrs, n, label_cm, filename):
fig, ax = plt.subplots(nrows=1, ncols=n, figsize=(n * 3 + 3, 3), squeeze=False)
for i in range(n):
arr = arrs[i]
sums = arr.sum(axis=1)
arr = arr / sums[:, np.newaxis]
arr = arr.round(2)
curr_ax = ax[0][i]
labels_plot = label_cm
sns.heatmap(arr,
xticklabels=labels_plot,
yticklabels=labels_plot,
cmap=COLOR_MAP,
square=True,
cbar=False,
annot=True,
ax=curr_ax)
curr_ax.set_ylabel('True')
curr_ax.set_xlabel('Predicted')
curr_ax.set_title(f'Accuracy: {(sum(arr.diagonal()) / sum(sum(arr))).round(2)}')
# bottom, top = curr_ax.get_ylim()
# curr_ax.set_ylim(bottom + 0.5, top - 0.5)
if filename is not None:
plt.savefig(filename)
return fig
def show_avg_cm(arrs, n, label_cm, filename=None):
fig, ax = plt.subplots(nrows=1, ncols=1, figsize=(3, 3), squeeze=False)
arr = np.zeros(arrs[0].shape)
for i in range(n):
arr += arrs[i]
sums = arr.sum(axis=1)
arr = arr / sums[:, np.newaxis]
arr = arr.round(2)
labels_plot = label_cm
sns.heatmap(arr,
xticklabels=labels_plot,
yticklabels=labels_plot,
cmap=COLOR_MAP,
square=True,
cbar=False,
annot=True)
ax[0][0].set_ylabel('True')
ax[0][0].set_xlabel('Predicted')
ax[0][0].set_title(f'Accuracy: {(sum(arr.diagonal()) / sum(sum(arr))).round(2)}')
# bottom, top = ax[0][0].get_ylim()
# ax[0][0].set_ylim(bottom + 0.5, top - 0.5)
if filename is not None:
plt.savefig(filename)
return fig |
import random
def apresentacao():
apresentacao = """
Jogo de adivinhação
Aperte enter para começar\n\n
"""
input(apresentacao)
def gera_numero_aleartorio():
print('Gerando um número entre 1 e 100...')
return random.randint(1, 100)
def main():
apresentacao()
numero_secreto = gera_numero_aleartorio()
while True:
numero_chutado = int(input('Chute um número de 1 a 100: '))
if numero_secreto == numero_chutado:
print('Voce Acertou !!')
sair = input('Deseja continuar?\nPara sair digite s \
\nPara continuar, aperte a tecla ENTER\n -> ')
if sair.lower() == 's':
break
else:
numero_secreto = gera_numero_aleartorio()
elif numero_secreto < numero_chutado:
print('Chute um numero menor\n')
else:
print('Chute um numero maior\n')
if __name__ == '__main__':
main()
|
import random
import time
import json
import requests
import datetime
def main(pk):
while(True):
sensor_input = random.uniform(-5, 110)
sensor_input = round(sensor_input, 2)
created_at = datetime.datetime.utcnow() - datetime.timedelta(days=1)
json_created_at = created_at.strftime('%Y-%m-%dT%H:%M:%S.%fZ')
url = "http://127.0.0.1:8000/sensor/api/for/bc-transit/" + str(pk) + "/new/record/"
headers = {'Content-Type': "application/json", 'Accept': "application/json"}
data = {}
data['created_at'] = json_created_at
data['value'] = sensor_input
print(data)
res = requests.put(url, json=data, headers=headers,)
print(res.status_code)
time.sleep(1)
main(6)
|
#!/usr/bin/env python3
"""
Created on 20 Apr 2021
@author: Bruno Beloff (bruno.beloff@southcoastscience.com)
DESCRIPTION
The configuration_monitor_check utility is used to report on the configuration monitor's attempt to access all of
the devices known to the system. Status levels are:
* NOR - NO RESPONSE
* ERR - ERROR
* M - MALFORMED:
* MSA - MALFORMED:SAMPLE
* MCO - MALFORMED:CONFIG
* NSP - NOT SUPPORTED
* R - RECEIVED:
* RNW - RECEIVED:NEW
* RUN - RECEIVED:UNCHANGED
* RUP - RECEIVED:UPDATED
SYNOPSIS
configuration_monitor_check.py [-c CREDENTIALS] { -f TAG | [-t TAG [-x]] [-o] } [-i INDENT] [-v]
EXAMPLES
configuration_monitor_check.py -r ERR | node.py -s | csv_writer.py
DOCUMENT EXAMPLE
{"tag": "scs-ph1-26", "rec": "2021-05-18T14:36:00Z", "result": "ERROR",
"context": ["TimeoutExpired(['./configuration'], 30)"]}
SEE ALSO
scs_analysis/configuration_csv
scs_analysis/configuration_monitor
scs_analysis/monitor_auth
scs_mfr/configuration
BUGS
Result code not currently in use.
"""
import sys
from scs_analysis.cmd.cmd_configuration_monitor_check import CmdConfigurationMonitorCheck
from scs_core.aws.manager.configuration_check_finder import ConfigurationCheckFinder
from scs_core.aws.manager.configuration_check_requester import ConfigurationCheckRequester
from scs_core.aws.security.cognito_client_credentials import CognitoClientCredentials
from scs_core.aws.security.cognito_login_manager import CognitoLoginManager
from scs_core.client.http_exception import HTTPException
from scs_core.data.json import JSONify
from scs_core.sys.logging import Logging
from scs_host.sys.host import Host
# --------------------------------------------------------------------------------------------------------------------
if __name__ == '__main__':
logger = None
try:
# ------------------------------------------------------------------------------------------------------------
# cmd...
cmd = CmdConfigurationMonitorCheck()
if not cmd.is_valid():
cmd.print_help(sys.stderr)
exit(2)
Logging.config('configuration_monitor_check', verbose=cmd.verbose)
logger = Logging.getLogger()
logger.info(cmd)
# ------------------------------------------------------------------------------------------------------------
# authentication...
credentials = CognitoClientCredentials.load_for_user(Host, name=cmd.credentials_name)
if not credentials:
exit(1)
gatekeeper = CognitoLoginManager()
auth = gatekeeper.user_login(credentials)
if not auth.is_ok():
logger.error("login: %s." % auth.authentication_status.description)
exit(1)
# ------------------------------------------------------------------------------------------------------------
# resources...
finder = ConfigurationCheckFinder()
requester = ConfigurationCheckRequester()
# ------------------------------------------------------------------------------------------------------------
# run...
if cmd.force:
response = requester.request(auth.id_token, cmd.force)
print(response.result, file=sys.stderr)
exit(0 if response.result == 'OK' else 1)
response = finder.find(auth.id_token, cmd.tag_filter, cmd.exact_match, cmd.response_mode())
print(JSONify.dumps(sorted(response.items), indent=cmd.indent))
logger.info('retrieved: %s' % len(response.items))
# ------------------------------------------------------------------------------------------------------------
# end...
except KeyboardInterrupt:
print(file=sys.stderr)
except HTTPException as ex:
logger.error(ex.error_report)
exit(1)
|
import glob
import gzip
import os
import re
from Bio import SeqIO
IDS = [
"frdC",
"frdA",
"epmA",
"mscM",
"psd",
"rsgA",
"orn",
"queG",
"nnr",
"tsaE",
"amiB",
"mutL",
"miaA",
"hfq",
"hflX",
"hflK",
"hflC",
"nsrR",
"rnr",
]
PATTERN = re.compile(f"\[gene=({'|'.join(id_ for id_ in IDS)})\]")
seqs = {key: [] for key in IDS}
for i, file in enumerate(glob.glob("data/*.gz")):
sample = file[5:-24]
with gzip.open(file, "rt") as handle:
for seq_record in SeqIO.parse(handle, "fasta"):
if m := re.search(PATTERN, seq_record.description):
gene = m.group(1)
seqs[gene].append((sample, seq_record.seq))
#if i > 1:
# break
for gene, lines in seqs.items():
with open(os.path.join("result", f"{gene}.fasta"), "w") as file:
for sample, line in lines:
file.write(f">{sample}\n{line}\n")
|
import math
#Tarif Rental
TarifRental1= 200000
TarifRental2= 10000
#Keterangan Jam
JamMulaiSewa= 6
MenitMulaiSewa= 0
JamSelesaiSewa= 23
MenitSelesaiSewa= 50
#Menghitung Lama Sewa
LamaSewa= math.floor((JamSelesaiSewa-JamMulaiSewa) + MenitSelesaiSewa/60)
#Menghitung Tarif Sewa
TarifSewa= TarifRental1+(TarifRental2*(LamaSewa-12))
print ("Tarif yang harus dibayarkan=", TarifSewa)
|
import re, os
import pandas as pd
os.chdir('C:/Learning/Python/eleven_projects_python/data_sources')
df = pd.read_csv('survey.csv')
print(df.head())
print('================')
print(df.describe())
print('================')
'1. 빈도 분석: value_counts()'
print(df.sex.value_counts())
print(df.income.value_counts())
print(df.stress.value_counts())
'2. 두 집단 평균 구하기: groupby()'
'df.groupby(그룹을 나누는 변수).연산'
print(df.groupby(df.sex).mean())
print(df.groupby(df.income).mean()) |
import opentracing
import requests
from basictracer import BasicTracer
from requests.models import Response
from opentracing_utils import trace_requests
from tests.conftest import Recorder
def assert_send_request_mock(resp):
def send_request_mock(self, request, **kwargs):
assert 'ot-tracer-traceid' in request.headers
assert 'ot-tracer-spanid' in request.headers
return resp
return send_request_mock
def test_trace_requests_span_extractor(monkeypatch):
resp = Response()
resp.status_code = 200
resp.url = "http://example.com/"
recorder = Recorder()
t = BasicTracer(recorder=recorder)
t.register_required_propagators()
opentracing.tracer = t
top_span = opentracing.tracer.start_span(operation_name='top_span')
def span_extractor(*args, **kwargs):
return top_span
trace_requests(span_extractor=span_extractor)
monkeypatch.setattr('opentracing_utils.libs._requests.__requests_http_send',
assert_send_request_mock(resp))
# disable getting the span from stack
monkeypatch.setattr('opentracing_utils.span.inspect_span_from_stack',
lambda: None)
response = requests.get("http://example.com/")
top_span.finish()
assert len(recorder.spans) == 2
assert recorder.spans[0].context.trace_id == top_span.context.trace_id
assert recorder.spans[0].parent_id == recorder.spans[1].context.span_id
assert recorder.spans[0].operation_name == 'http_send_get'
assert recorder.spans[-1].operation_name == 'top_span'
assert response.status_code == resp.status_code
|
import os
os.environ.setdefault('DJANGO_SETTINGS_MODULE',
'first_project.settings')
import django
django.setup()
## FAKE POP SCRIPT
from first_app.models import User
from faker import Faker
fakegen = Faker()
def populate(N=5):
for entry in range(N):
fake_first_name = fakegen.first_name()
fake_last_name = fakegen.last_name()
fake_email = fakegen.email()
# Create the new User entry
user = User.objects.get_or_create(first_name=fake_first_name,
last_name=fake_last_name,
email=fake_email)[0]
if __name__ == '__main__':
print("populating script...")
populate(200)
print("populating complete!")
|
# 1) Uzģenerēt 1-100 un izdrukāt: Ja skaitlis dalās ar 5 tad 'Fizz'. Ja skaitlis dalās ar 7 tad 'Buzz'. Ja skaitlis dalās ar 5 un 7 tad 'FizzBuzz'. Savādāk pats skaitlis.
for i in range(1, 101):
if i == 100:
print(i, end = '\n')
elif i % 5 == 0 and i % 7 == 0:
print('FizzBuzz', end = ', ')
elif i % 5 == 0:
print('Fizz', end = ', ')
elif i % 7 == 0:
print('Buzz', end = ', ')
else:
print(i, end = ', ')
# 2) Ievadiet eglītes augstumu un izdrukājiet eglīti:
height = 3
for i in range(height):
print(' ' * (height - i) + '*' * (2 * i + 1))
# Otrādāk
# for i in range(height, -1, -1):
# print(' ' * (height - i) + '*' * (2 * i + 1))
# 3) Atrodiet vai ievadītais veselais pozitīvais skaitlis ir pirmskaitlis.
number = 6
if number > 1:
for i in range(2, number):
if (number % i) == 0:
print(f'{number} is not a prime number because {i} * {number // i} = {number}')
break
else:
print(f'{number} is a prime number')
else:
print(f'{number} is not a prime number') |
#%%
# -*- coding UTF-8 -*-
'''
@Project : MyProjects
@File : mySignDialog.py
@Author : chenbei
@Date : 2021/3/6 11:11
'''
import matplotlib.pyplot as plt
from matplotlib.pylab import mpl
plt.rcParams['font.sans-serif'] = ['Times New Roman'] # 设置字体风格,必须在前然后设置显示中文
mpl.rcParams['font.size'] = 10.5 # 图片字体大小
mpl.rcParams['font.sans-serif'] = ['SimHei'] # 显示中文的命令
mpl.rcParams['axes.unicode_minus'] = False # 显示负号的命令
mpl.rcParams['agg.path.chunksize'] = 10000
plt.rcParams['figure.figsize'] = (7.8, 3.8) # 设置figure_size尺寸
plt.rcParams['savefig.dpi'] = 600 # 图片像素
plt.rcParams['figure.dpi'] = 600 # 分辨率
from matplotlib.font_manager import FontProperties
font_set = FontProperties(fname=r"C:\Windows\Fonts\simsun.ttc", size=10.5)
import numpy as np
import pandas as pd
import sys
from PyQt5.QtWidgets import (QMenu,QDialog,QMainWindow,QAction ,QApplication ,QTableWidgetItem, QAbstractItemView,QLabel)
from PyQt5.QtCore import Qt,pyqtSlot,QTimer,QTime,QSize,pyqtSignal
from PyQt5.QtGui import QIcon , QPainter,QFont,QPen,QColor,QBrush
from MyPlatform import signDialog
class mySignDialog(QDialog):
signState = pyqtSignal(bool,bool)
def __init__(self,parent=None):
super().__init__(parent)
self.ui = signDialog.Ui_signDialog()
self.ui.setupUi(self)
self.show()
def checkUsernameIsTrue(self):
if self.ui.username.text() == "admin" :
return True
else:
return False
def checkPasswordIsTrue(self):
if self.ui.password.text() == "123456" :
return True
else:
return False
def __del__(self):
self.close()
@pyqtSlot()
def on_sure_Button_clicked(self):
#print("123")
self.signState.emit(self.checkPasswordIsTrue(), self.checkUsernameIsTrue())
#print(self.checkPasswordIsTrue(), self.checkUsernameIsTrue())
if __name__ == '__main__':
app = QApplication(sys.argv)
form = mySignDialog ()
sys.exit(app.exec_()) |
#
# The Multiverse Platform is made available under the MIT License.
#
# Copyright (c) 2012 The Multiverse Foundation
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation
# files (the "Software"), to deal in the Software without restriction,
# including without limitation the rights to use, copy, modify,
# merge, publish, distribute, sublicense, and/or sell copies
# of the Software, and to permit persons to whom the Software
# is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE
# OR OTHER DEALINGS IN THE SOFTWARE.
#
#
from multiverse.mars import *
from multiverse.mars.objects import *
from multiverse.mars.core import *
from multiverse.mars.events import *
from multiverse.mars.util import *
from multiverse.mars.plugins import *
from multiverse.server.plugins import *
from multiverse.server.math import *
from multiverse.server.events import *
from multiverse.server.objects import *
from multiverse.server.engine import *
from java.lang import *
meshInfo = { "casual07_f_mediumpoly.mesh" : [[ "casual07_f_mediumpoly-mesh.0", "casual07_f_mediumpoly.body" ],
[ "casual07_f_mediumpoly-mesh.1", "casual07_f_mediumpoly.hair_transparent" ]],
"casual06_f_mediumpoly.mesh" : [[ "casual06_f_mediumpoly-mesh.0", "casual06_f_mediumpoly.body" ],
[ "casual06_f_mediumpoly-mesh.1", "casual06_f_mediumpoly.hair_transparent" ]],
"casual15_f_mediumpoly.mesh" : [[ "casual15_f_mediumpoly-mesh.0", "casual15_f_mediumpoly.body" ],
[ "casual15_f_mediumpoly-mesh.1", "casual15_f_mediumpoly.hair_transparent" ]],
"casual19_f_mediumpoly.mesh" : [[ "casual19_f_mediumpoly-mesh.0", "casual19_f_mediumpoly.body" ],
[ "casual19_f_mediumpoly-mesh.1", "casual19_f_mediumpoly.hair_transparent" ]],
"casual21_f_mediumpoly.mesh" : [[ "casual21_f_mediumpoly-mesh.0", "casual21_f_mediumpoly.body" ],
[ "casual21_f_mediumpoly-mesh.1", "casual21_f_mediumpoly.hair_transparent" ]],
"business04_f_mediumpoly.mesh" : [[ "business04_mediumpoly-mesh.0", "business04_f_mediumpoly.body" ],
[ "business04_mediumpoly-mesh.1", "business04_f_mediumpoly.hair_transparent" ]],
"sportive01_f_mediumpoly.mesh" : [[ "sportive01_f_mediumpoly-mesh.0", "sportive01_f_mediumpoly.body" ],
[ "sportive01_f_mediumpoly-mesh.1", "sportive01_f_mediumpoly.hair_transparent" ]],
"sportive02_f_mediumpoly.mesh" : [[ "sportive02_f_mediumpoly-mesh.0", "sportive02_f_mediumpoly.body" ],
[ "sportive02_f_mediumpoly-mesh.1", "sportive02_f_mediumpoly.hair_transparent" ]],
"sportive05_f_mediumpoly.mesh" : [[ "sportive05_f_mediumpoly-mesh.0", "sportive05_f_mediumpoly.body" ],
[ "sportive05_f_mediumpoly-mesh.1", "sportive05_f_mediumpoly.hair_transparent" ]],
"sportive07_f_mediumpoly.mesh" : [[ "sportive07_f_mediumpoly-mesh.0", "sportive07_f_mediumpoly.body" ]],
"casual03_m_mediumpoly.mesh" : [[ "casual03_m_medium-mesh.0", "casual03_m_mediumpoly.body" ]],
"casual04_m_mediumpoly.mesh" : [[ "casual04_m_mediumpoly-mesh.0", "casual04_m_mediumpoly.body" ]],
"casual07_m_mediumpoly.mesh" : [[ "casual07_m_mediumpoly-mesh.0", "casual07_m_mediumpoly.body" ]],
"casual10_m_mediumpoly.mesh" : [[ "casual10_m_mediumpoly-mesh.0", "casual10_m_mediumpoly.body" ]],
"casual16_m_mediumpoly.mesh" : [[ "casual16_m_mediumpoly-mesh.0", "casual16_m_mediumpoly.body" ]],
"casual21_m_mediumpoly.mesh" : [[ "casual21_m_mediumpoly-mesh.0", "casual21_m_mediumpoly.body" ]],
"business03_m_mediumpoly.mesh" : [[ "business03_m_medium-mesh.0", "business03_m_mediumpoly.body" ]],
"business05_m_mediumpoly.mesh" : [[ "business05_m_mediumpoly-mesh.0", "business05_m_mediumpoly.body" ]],
"sportive01_m_mediumpoly.mesh" : [[ "sportive01_m_mediumpoly-mesh.0", "sportive01_m_mediumpoly.body" ]],
"sportive09_m_mediumpoly.mesh" : [[ "sportive09_m_mediumpoly-mesh.0", "sportive09_m_mediumpoly.body" ]] }
displayContext = DisplayContext("casual07_f_mediumpoly.mesh", True)
# default player template
player = Template("MVSocialPlayer")
player.put(WorldManagerClient.NAMESPACE,
WorldManagerClient.TEMPL_DISPLAY_CONTEXT,
displayContext)
player.put(WorldManagerClient.NAMESPACE,
WorldManagerClient.TEMPL_OBJECT_TYPE,
ObjectTypes.player)
ObjectManagerClient.registerTemplate(player)
# character factory
class MVSocialFactory (CharacterFactory):
def createCharacter(self, worldName, uid, properties):
ot = Template()
name = properties.get("characterName");
# get the account name for this player
if not name:
db = Engine.getDatabase()
name = db.getUserName(uid)
if not name:
name = "default"
# set the spawn location
loc = Point(368917, 71000, 294579)
meshName = properties.get("model")
gender = properties.get("sex")
if meshName:
displayContext = DisplayContext(meshName, True)
submeshInfo = meshInfo[meshName]
for entry in submeshInfo:
displayContext.addSubmesh(DisplayContext.Submesh(entry[0],
entry[1]))
ot.put(WorldManagerClient.NAMESPACE,
WorldManagerClient.TEMPL_DISPLAY_CONTEXT, displayContext)
# get default instance oid
instanceOid = InstanceClient.getInstanceOid("default")
if not instanceOid:
Log.error("MVSocialFactory: no 'default' instance")
properties.put("errorMessage", "No default instance")
return 0
# override template
ot.put(WorldManagerClient.NAMESPACE,
WorldManagerClient.TEMPL_NAME, name)
ot.put(WorldManagerClient.NAMESPACE,
WorldManagerClient.TEMPL_INSTANCE, Long(instanceOid))
ot.put(WorldManagerClient.NAMESPACE,
WorldManagerClient.TEMPL_LOC, loc)
ot.put(Namespace.OBJECT_MANAGER,
ObjectManagerClient.TEMPL_PERSISTENT, Boolean(True));
restorePoint = InstanceRestorePoint("default", loc);
restorePoint.setFallbackFlag(True);
restoreStack = LinkedList();
restoreStack.add(restorePoint);
ot.put(Namespace.OBJECT_MANAGER,
ObjectManagerClient.TEMPL_INSTANCE_RESTORE_STACK, restoreStack)
ot.put(Namespace.OBJECT_MANAGER,
ObjectManagerClient.TEMPL_CURRENT_INSTANCE_NAME, "default")
# generate the object
objOid = ObjectManagerClient.generateObject("MVSocialPlayer", ot)
Log.debug("MVSocialFactory: generated obj oid=" + str(objOid))
return objOid
mvSocialFactory = MVSocialFactory()
LoginPlugin.getCharacterGenerator().setCharacterFactory(mvSocialFactory)
|
from binance.client import Client
from binance import enums
import config
import candle
def get_winning_trends(symbol):
client = Client(config.apiKey, config.apiSecret, tld='us')
winningTrends = {} # Stores the determined trends from first pass, list of lists of candles (length of numOfCandlesToLookFor)
totalTrends = {} # Stores total trends amount, winning and losing
# Get Historical Klines
oneMinData = client.get_historical_klines(symbol=symbol, interval=enums.KLINE_INTERVAL_1MINUTE, start_str='1609542363000',
klines_type=enums.HistoricalKlinesType.SPOT)
# Look for areas where price increases in a way we want it to
for x in range(len(oneMinData)):
if x < 10:
continue
if x == len(oneMinData) - 10:
break
barsToCheck = [oneMinData[x], oneMinData[x + 1], oneMinData[x + 2], oneMinData[x + 3], oneMinData[x + 4], oneMinData[x + 5],
oneMinData[x + 6], oneMinData[x + 7], oneMinData[x + 8], oneMinData[x + 9], oneMinData[x + 10]]
buyPrice = float(barsToCheck[0][candle.CLOSE_INDEX])
takeProfitPrice = buyPrice + buyPrice * .0030
stopLossPrice = buyPrice - buyPrice * .0060
takeProfitHit = False
stopLossHit = False
for z in range(len(barsToCheck)):
if z == 0:
continue
if float(barsToCheck[z][candle.LOW_INDEX]) <= stopLossPrice:
stopLossHit = True
break
if float(barsToCheck[z][candle.HIGH_INDEX]) >= takeProfitPrice:
takeProfitHit = True
break
# If no target was hit, then make it a loss
if not takeProfitHit and not stopLossHit:
stopLossHit = True
# Add to the list of candles
if takeProfitHit:
candleList = [candle.Candle(), candle.Candle()]
candleList[0].candleStick = oneMinData[x-1]
candleList[0].leadingCandleStick = oneMinData[x-2]
candleList[0].determine_classification()
candleList[0].determine_direction()
candleList[0].determine_volume_change()
candleList[1].candleStick = oneMinData[x]
candleList[1].leadingCandleStick = oneMinData[x-1]
candleList[1].determine_classification()
candleList[1].determine_direction()
candleList[1].determine_volume_change()
# if candleList[0].classification == candle.Classification.unclassified or \
# candleList[1].classification == candle.Classification.unclassified or \
# continue
if tuple(candleList) in winningTrends.keys():
winningTrends[tuple(candleList)] += 1
else:
winningTrends[tuple(candleList)] = 1
filteredWinningTrends = []
print(len(winningTrends.keys()))
for pt in winningTrends.keys():
totalTrends[tuple(pt)] = 0
# calculate total trends for all bars
for y in range(len(oneMinData)):
if y + 2 >= len(oneMinData):
break
tempBarList = [candle.Candle(), candle.Candle()]
tempBarList[0].candleStick = oneMinData[y]
tempBarList[0].leadingCandleStick = oneMinData[y - 1]
tempBarList[0].determine_classification()
tempBarList[0].determine_direction()
tempBarList[0].determine_volume_change()
tempBarList[1].candleStick = oneMinData[y + 1]
tempBarList[1].leadingCandleStick = oneMinData[y]
tempBarList[1].determine_classification()
tempBarList[1].determine_direction()
tempBarList[1].determine_volume_change()
if tuple(tempBarList) in totalTrends.keys():
totalTrends[tuple(tempBarList)] += 1
# compare and return winners
for trend in winningTrends.keys():
numWinningOccurrences = winningTrends[trend]
numTotalOccurrences = totalTrends[trend]
print(numWinningOccurrences, numTotalOccurrences)
if float(numWinningOccurrences) >= (float(numTotalOccurrences) * .50):
# for t in trend:
# print(t.classification, t.direction, t.volumeChange)
# print('**************************************************************************')
filteredWinningTrends.append(trend)
return filteredWinningTrends
|
'''
3 Написать функцию ask_user() чтобы с помощью input() спрашивать пользователя “Как дела?”, пока он не ответит “Хорошо”
'''
def ask_user():
while True:
user_say=input('Как дела ')
if user_say=='Хорошо':
print('Ну пока, раз хорошо')
break
else:
print('Ну нет, я не это хотел увидеть')
|
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import os
import organization
import settings
from django.utils import simplejson as json
from google.appengine.api import users
from google.appengine.ext import webapp
from google.appengine.ext.webapp import template
from google.appengine.ext.webapp import util
from twilio.util import TwilioCapability
class IntercomHandler(webapp.RequestHandler):
def get(self, user_filter="all"):
user = users.get_current_user()
client_name = user.email().split("@")[0]
capability = TwilioCapability(settings.ACCOUNT_SID,
settings.AUTH_TOKEN)
capability.allow_client_outgoing(settings.APP_SID)
capability.allow_client_incoming(client_name)
tp = {
'token': capability.generate(),
"user_filter": user_filter,
}
path = os.path.join(os.path.dirname(__file__),
'templates', 'intercom.html')
self.response.out.write(template.render(path, tp))
class UsersHandler(webapp.RequestHandler):
def get(self):
self.response.headers["Content-Type"] = "application/json"
self.response.out.write(json.dumps(organization.users()))
class TwimlHandler(webapp.RequestHandler):
def get(self):
tp = {"client": self.request.get("Person")}
self.response.headers["Content-Type"] = "application/xml"
path = os.path.join(os.path.dirname(__file__),
'templates', 'twiml.html')
self.response.out.write(template.render(path, tp))
def post(self):
self.get()
def main():
application = webapp.WSGIApplication([
('/(available|unavailable)?', IntercomHandler),
('/users', UsersHandler),
('/call', TwimlHandler),
], debug=True)
util.run_wsgi_app(application)
if __name__ == '__main__':
main()
|
from django.http import HttpResponseRedirect,HttpResponse
from django.core.exceptions import PermissionDenied
from django.conf import settings
from functools import wraps
import re
HEADER='HTTP_X_BEARER'
MODERN_HEADER='HTTP_AUTHORIZATION'
import logging
logger = logging.getLogger(__name__)
def superuser(function):
@wraps(function)
def wrap(request, *args, **kwargs):
if not request.user.is_anonymous and request.user.is_privileged:
return function(request, *args, **kwargs)
# Quell some odd 'code 400, message Bad request syntax ('tag=1-2-3-4')'
request.POST
# raise PermissionDenied
return HttpResponse("XS denied",status=403,content_type="text/plain")
return wrap
def superuser_or_bearer_required(function):
@wraps(function)
def wrap(request, *args, **kwargs):
if not request.user.is_anonymous and request.user.is_privileged:
return function(request, *args, **kwargs)
if hasattr(settings, 'UT_BEARER_SECRET'):
secret = None
# Pendantic header
if request.META.get(HEADER):
secret = request.META.get(HEADER)
# Also accept a modern RFC 6750 style header.
elif request.META.get(MODERN_HEADER):
match = re.search(r'\bbearer\s+(\S+)', request.META.get(MODERN_HEADER), re.IGNORECASE)
if match:
secret = match.group(1)
for bs in settings.UT_BEARER_SECRET.split():
if secret == bs:
return function(request, *args, **kwargs)
# Quell some odd 'code 400, message Bad request syntax ('tag=1-2-3-4')'
request.POST
# raise PermissionDenied
return HttpResponse("XS denied",status=403,content_type="text/plain")
return wrap
def user_or_kiosk_required(function):
@wraps(function)
def wrap(request, *args, **kwargs):
return function(request, *args, **kwargs)
if request.user and type(request.user).__name__ == 'User':
return function(request, *args, **kwargs)
x_forwarded_for = request.META.get('HTTP_X_FORWARDED_FOR')
if x_forwarded_for:
ip = x_forwarded_for.split(',')[0]
else:
ip = request.META.get('REMOTE_ADDR')
if ip == '127.0.0.1':
return function(request, *args, **kwargs)
# Quell some odd 'code 400, message Bad request syntax ('tag=1-2-3-4')'
request.POST
# raise PermissionDenied
return HttpResponse("XS denied",status=403,content_type="text/plain")
return wrap
def login_or_priveleged(function):
@wraps(function)
def wrap(request, *args, **kwargs):
if not request.user.is_privileged and request.user.id != kwargs['src']:
return HttpResponse("Denied - are not that user - nor are you priveleged",status=404,content_type="text/plain")
return function(request, *args, **kwargs)
return wrap
|
from django import apps
class AppConfig(apps.AppConfig):
name = 'galaxy_api.api'
label = 'galaxy_api'
|
from flask import Flask, render_template, request
import json
import urllib2
app = Flask(__name__)
def azureMl(formList):
data = {
"Inputs": {
"input1":
{
"ColumnNames": ["age", "bloodpressure", "specificgravity", "albumin", "sugar", "puscell", "puscellclumps", "bacteria", "bloodglucoserandom", "bloodurea", "serumcreatinine", "sodium", "potassium", "hemoglobin", "packedcellvolume", "whitebloodcellcount", "redbloodcellcount", "hypertension", "diabetesmellitus", "coronaryarterydisease", "appetite", "pedalenema", "anemia"],
"Values": [ formList ]
}, },
"GlobalParameters": {}
}
body = str.encode(json.dumps(data))
url = 'https://ussouthcentral.services.azureml.net/workspaces/4ad7b48daa0f4455bd1d812507c6edfe/services/7d25282d3b2a4326881597f8f1287b1a/execute?api-version=2.0&details=true'
api_key = 'Paste your API key here'
headers = {'Content-Type':'application/json', 'Authorization':('Bearer '+ api_key)}
req = urllib2.Request(url, body, headers)
try:
response = urllib2.urlopen(req)
result = response.read()
print("result is" + result)
result = json.loads(result)
result = result["Results"]["output1"]["value"]["Values"][0][23]
return result
except urllib2.HTTPError, error:
print("The request failed with status code: " + str(error.code))
print(error.info())
print(json.loads(error.read()))
@app.route('/')
def home_page():
return render_template('home.html')
@app.route('/result',methods = ['POST', 'GET'])
def result():
if request.method == 'POST':
formList = []
formList.append(str(request.form['age']))
formList.append(str(request.form['BloodPressure']))
formList.append(str(1.02))
formList.append(str(request.form['albumin']))
formList.append(str(request.form['sugar']))
formList.append(str("normal"))
formList.append(str("notpresent"))
formList.append(str("notpresent"))
formList.append(str(request.form['bloodGlucose']))
formList.append(str(request.form['bloodUrea']))
formList.append(str(5.23))
formList.append(str(130))
formList.append(str(6.5))
formList.append(str(request.form['haemoglobin']))
formList.append(str(request.form['packed']))
formList.append(str(request.form['white']))
formList.append(str(request.form['red']))
formList.append(str(request.form['hypertension']))
formList.append(str("no"))
formList.append(str("no"))
formList.append(str(request.form['appetite']))
formList.append(str("no"))
formList.append(str(request.form['anaemia']))
result = azureMl(formList)
if result == "ckd":
result = "Chronic Kidney Disease."
else:
result = "Not a Chronic Kidney Disease."
return render_template("result.html",result = result)
if __name__ == '__main__':
app.debug = True
app.run()
app.run(debug = True)
|
# -*- coding: utf-8 -*-
###########################################################
# Aldebaran Behavior Complementary Development Kit
# Persistent memory: a class to store data to a shared memory, in a persistent mode (even if restarting naoqi, nao, ...)
# Aldebaran Robotics (c) 2010 All Rights Reserved - This file is confidential.
###########################################################
"Persistent memory: a class to store data to a shared memory, in a persistent mode (even if restarting naoqi, nao, ...)"
print( "importing abcdk.persistent_memory" );
import mutex
import os
import struct
import time
import debug
import filetools
import naoqitools
import pathtools
unknownValue = -4242.42; # how access to module level ?
class PersistentMemoryData:
def __init__(self, strName = None ):
self.mutex = mutex.mutex();
self.strName = strName;
self.allValue = []; # a big array of triple [time, type, value] but because type is self explained => [time, value] # from older to newer
# precomputed value:
self.lastValue = None;
if( strName != None ):
self.readFromFile();
self.timeLastSave = time.time(); # for autosaving
# __init__ - end
def __del__(self):
print( "INF: PersistentMemoryData.__del__ called" );
self.writeToFile();
# __del__ - end
def updateData( self, value ):
# print( "INF: PersistentMemoryData.updateData: %s set to '%s'" % ( self.strName, str( value ) ) );
while( self.mutex.testandset() == False ):
print( "PersistentMemoryData(%s).updateData: locked" % self.strName );
time.sleep( 0.1 );
# self.allValue.append( [ time.time(), typeToString( value ), value ] );
self.allValue.append( [ time.time(), value ] );
if( len( self.allValue ) > 200 ):
print( "INF: %s: PersistentMemoryData(%s).updateData: reducing history" % ( debug.getHumanTimeStamp(), self.strName ) ); # permet aussi de voir si il n'y a pas des valeurs dans lesquels on poste un peu trop souvent
self.allValue = self.allValue[-100:]; # ne garde que les 100 derniers !
self.lastValue = value;
self.mutex.unlock();
# locking would be done in the write method
if( time.time() - self.timeLastSave > 60 ): # save each variables every 60 seconds
print( "INF: PersistentMemoryData(%s).updateData: autosaving data..." % self.strName );
self.writeToFile();
# updateData - end
def getData( self, defaultValue = -4242.42 ):
"return last value"
# print( "INF: PersistentMemoryData.getData of %s return '%s'" % ( self.strName, str( self.lastValue ) ) );
if( len( self.allValue ) != 0 ):
return self.lastValue;
debug.debug( "WRN: PersistentMemoryData.getData not found: '%s' returning default" % ( self.strName ), bIgnoreDuplicateMessage = True );
return defaultValue;
# getData - end
def getDataAndTime( self, defaultValue = -4242.42 ):
"return last value and time of value"
# print( "INF: PersistentMemoryData.getDataAndTime of %s return '%s'" % ( self.strName, str( self.lastValue ) ) );
if( len( self.allValue ) != 0 ):
return self.allValue[-1];
debug.debug( "WRN: PersistentMemoryData.getDataAndTime not found: '%s' returning default" % ( self.strName ), bIgnoreDuplicateMessage = True );
return [defaultValue,0];
# getDataAndTime - end
def eraseData( self ):
"destroy this data, history and all disk files"
debug.debug( "INF: PersistentMemoryData.eraseData: erasing %d value(s) for '%s'" % ( len( self.allValue ), self.strName ) );
while( self.mutex.testandset() == False ):
print( "PersistentMemoryData(%s).eraseData: locked" % self.strName );
time.sleep( 0.1 );
self.allValue = []; # a big array of triple [time, type, value] but because type is self explained => [time, value]
self.lastValue = None;
cleanedName = str.replace( self.strName, "/", "__" );
filename = self.__getVarPath_Inner__() + cleanedName + '.dat';
try:
os.unlink( filename );
except BaseException, err:
# debug.debug( "INF: PersistentMemoryData.eraseData(%s): normal error if no previous save: '%s'" % ( self.strName, err ) );
pass
self.mutex.unlock();
# eraseData - end
def getDataHist( self, nNbrValue = 3 ):
if( len( self.allValue ) < 1 ):
return [];
elif( len( self.allValue ) < nNbrValue ):
nNbrValue = len( self.allValue );
return self.allValue[-nNbrValue:];
# getDataHist - end
def getDataHistLength( self ):
return len( self.allValue );
# getDataHistLength - end
@staticmethod
def getVarPath():
return pathtools.getCachePath() + 'mem' + pathtools.getDirectorySeparator();
# getVarPath - end
# je ne sais pas pourquoi dans cette classe il veut pas que j'appelle PersistentMemoryData.getVarPath() (depuis une méthode non statique) (c'est nul ca!!!)
def __getVarPath_Inner__( self ):
import pathtools # module already unloaded when called from __del__
return pathtools.getCachePath() + "mem" + pathtools.getDirectorySeparator();
# getVarPath - end
def readFromFile( self ):
print( "INF: PersistentMemoryData.readFromFile: reading previous value for '%s'" % self.strName );
while( self.mutex.testandset() == False ):
print( "PersistentMemoryData(%s).readFromFile: locked" % self.strName );
time.sleep( 0.1 );
cleanedName = str.replace( self.strName, "/", "__" );
filename = self.__getVarPath_Inner__() + cleanedName + '.dat';
try:
file = open( filename, 'rb' );
if( file ):
buf = file.read();
file.close();
self.allValue = eval( buf );
if( len( self.allValue ) > 0 ):
self.lastValue = self.allValue[len(self.allValue)-1][1]; # 1 is the index of the value
print( "INF: PersistentMemoryData.readFromFile: lastValue: %s (%d value(s))"% ( str( self.lastValue ), len( self.allValue ) ) );
except BaseException, err:
debug.debug( "WRN: PersistentMemoryData.readFromFile(%s)\nWRN: error: '%s'\nWRN: => no value readed" % ( filename, err) );
self.mutex.unlock();
# readFromFile - end
def writeToFile( self ):
print( "INF: PersistentMemoryData.writeToFile: storing value for '%s' (%d value(s))" % ( self.strName, len( self.allValue ) ) );
while( self.mutex.testandset() == False ):
print( "PersistentMemoryData(%s).writeToFile: locked" % self.strName );
time.sleep( 0.1 );
if( len( self.allValue ) == 0 ):
import debug # module already unloaded when called from __del__
debug.debug( "WRN: PersistentMemoryData.writeToFile(%s): no write because no value in the object" % self.strName );
# don't save empty object
self.mutex.unlock();
return;
try:
# print( "allValue: %s"% str( self.allValue ) );
cleanedName = str.replace( self.strName, "/", "__" );
filename = self.__getVarPath_Inner__() + cleanedName + '.dat';
except BaseException, err:
print( "ERR: PersistentMemoryData.writeToFile(%s) error: '%s'" % ( self.strName, err ) );
pass
try:
file = open( filename, 'wb' );
if( file ):
buf = str( self.allValue );
#~ buf = "[";
#~ for value in self.allValue:
#~ buf += "[%s,%s,%s]" % ( value[0], typeToString, str( value[2] ) );
#~ buf += "]";
file.write( buf );
file.close();
except BaseException, err:
print( "WRN: PersistentMemoryData.writeToFile(%s), filename: '%s' error: '%s'" % ( self.strName, filename, err ) );
self.timeLastSave = time.time();
self.mutex.unlock();
# writeToFile - end
def exportToALMemory( self ):
"write all value of this variable to the ALMemory"
mem = naoqitools.myGetProxy( "ALMemory" );
while( self.mutex.testandset() == False ):
print( "PersistentMemoryData(%s).exportToALMemory: locked" % self.strName );
time.sleep( 0.1 );
strKeyname = "behaviordata/" + self.strName;
# print( "INF: PersistentMemoryData.exportToALMemory: exporting value for '%s' (%d value(s))" % ( self.strName, len( self.allValue ) ) );
if( mem != None ):
mem.insertData( strKeyname, self.allValue );
self.mutex.unlock();
# exportToALMemory - end
def importFromALMemory( self, strName, strSpecificIP = "localhost" ):
"read a value from a distant ALMemory on a robot"
mem = naoqitools.myGetProxy( "ALMemory", strSpecificIP );
while( self.mutex.testandset() == False ):
print( "PersistentMemoryData(%s).importFromALMemory: locked" % self.strName );
time.sleep( 0.1 );
self.strName = strName;
strKeyname = "behaviordata/" + self.strName;
self.allValue = mem.getData( strKeyname );
self.mutex.unlock();
print( "self.allValue: " + str( self.allValue ) );
# importFromALMemory - end
def generateGraph( self ):
import matplotlib
import pylab
while( self.mutex.testandset() == False ):
print( "PersistentMemoryData(%s).generateGraph: locked" % self.strName );
time.sleep( 0.1 );
valueToGraph = [];
listLibelle = [];
bHasLibelle = False;
bHasValue = False;
for i in range( len( self.allValue ) ):
val = self.allValue[i][1];
if( typetools.isString( val ) ):
valueToGraph.append( None );
listLibelle.append( val );
bHasLibelle = True;
else:
valueToGraph.append( val );
listLibelle.append( '' );
bHasValue = True;
# valueToGraph = [ 0, 3, 2, 0, 5, 7 ];
pylab.plot(valueToGraph);
pylab.grid( True );
pylab.title( self.strName );
if( bHasLibelle ):
if( not bHasValue ):
pylab.axis([0,len( self.allValue ),-3,3] );
# pylab.legend( listLibelle ); # non en fait c'est des etiquettes que je veux et pas une légende !
for i in range( len( listLibelle ) ):
pylab.text( i, ((i+2)%5)-2, listLibelle[i] );
pass
self.mutex.unlock();
# generateGraph - end
def drawGraph( self, nPosX = 0, nPosY = 0, nSizeX = 320, nSizeY = 200 ):
"draw a graph on screen showing all values of this data"
import matplotlib
import pylab
self.generateGraph();
matplotlib.pyplot.show()
matplotlib.pyplot.close();
# drawGraph - end
def saveGraph( self, strFilename = "" ):
"save a png file showing all values into a graph"
import matplotlib
import pylab
try:
if( len( self.allValue ) < 1 ):
return False;
strGraphPath = self.__getVarPath_Inner__() + "graph/";
if( strFilename == "" ):
try:
os.makedirs( strGraphPath );
except:
pass
strFilename = strGraphPath + str.replace( self.strName, "/", "__" ) + ".png";
print( "INF: PersistentMemoryData.saveGraph: saving graph of variable to file '%s'" % ( strFilename ) );
self.generateGraph();
matplotlib.pyplot.savefig( strFilename, format="png", transparent=True); # dpi=50 => 400x300 au lieu de 800x600
matplotlib.pyplot.close()
except BaseException, err:
debug.debug( "WRN: PersistentMemoryData.saveGraph(%s) error: '%s'" % ( self.strName, err ) );
return False;
return True;
# saveGraph - end
# class PersistentMemoryData - end
class PersistentMemory:
""" store data with history"""
def __init__(self):
debug.debug( "INF: PersistentMemoryDataManager.__init__ called" );
self.allData = {};
self.mutexListData = mutex.mutex();
try:
os.makedirs( PersistentMemoryData.getVarPath() );
except:
pass # le dossier existe deja !
# __init__ - end
def __del__(self):
import debug # module already unloaded when called from __del__
debug.debug( "INF: PersistentMemoryDataManager.__del__ called" );
self.exportToALMemory(); # before that we export one time to the ALMemory, it doesn't cost a lot and can help users later (debug or...)
self.allData = {};
# __del__ - end
def updateData( self, strName, value ):
while( self.mutexListData.testandset() == False ):
print( "PersistentMemoryDataManager.updateData(%s): locked" % strName );
time.sleep( 0.1 );
if( not strName in self.allData ):
self.allData[strName] = PersistentMemoryData( strName );
self.mutexListData.unlock();
self.allData[strName].updateData( value ); # on ne mutex pas l'update (ca sera fait dans la methode)
# updateData - end
def getData( self, strName, defautValue = unknownValue ):
"return the value of some data"
if( not strName in self.allData ):
# we create it (or reading it from disk)
self.allData[strName] = PersistentMemoryData( strName );
return self.allData[strName].getData( defautValue );
# getData - end
def getDataAndtime( self, strName, defautValue = unknownValue ):
"return the [value, time_of_this_value] of some data"
if( not strName in self.allData ):
# we create it (or reading it from disk)
self.allData[strName] = PersistentMemoryData( strName );
return self.allData[strName].getDataAndTime( defautValue );
# getDataAndtime - end
def removeData( self, strName, defautValue = unknownValue ):
if( not strName in self.allData ):
# nothing to do!
print( "WRN: PersistentMemoryDataManager.removeData(%s): data not found" % strName );
return;
self.allData[strName].eraseData();
del self.allData[strName]; # erase the object (no backup would be made, since we erase the data)
# getData - end
def loadAll( self ):
"load all variables present on disk in the normal path"
"That's usefull before calling saveGraphs"
print( "INF: PersistentMemoryDataManager.loadAll called" );
while( self.mutexListData.testandset() == False ):
print( "PersistentMemoryDataManager.loadAll: locked" );
time.sleep( 0.1 );
strPath = PersistentMemoryData.getVarPath();
allFiles = filetools.findFile( strPath, ".dat", False );
for file in allFiles:
strVarName = str.replace( file, strPath, "" );
# strVarName = str.replace( strVarName, "extracted_data__", "" );
strVarName = str.replace( strVarName, ".dat", "" );
# print( strVarName );
if( not strVarName in self.allData ):
self.allData[strVarName] = PersistentMemoryData( strVarName );
self.mutexListData.unlock();
print( "loadAll: %d variable(s) loaded" % len( allFiles ) );
# loadAll - end
def storeAll( self ):
"store all variable"
while( self.mutexListData.testandset() == False ):
print( "PersistentMemoryDataManager.storeAll: locked" );
time.sleep( 0.1 );
print( "INF: PersistentMemoryDataManager.storeAll: storing %d variable(s)" % len( self.allData ) );
for k, v in self.allData.iteritems():
v.writeToFile();
self.mutexListData.unlock();
# storeAll - end
def saveGraphs( self ):
"store all variable"
while( self.mutexListData.testandset() == False ):
print( "PersistentMemoryDataManager.saveGraphs: locked" );
time.sleep( 0.1 );
print( "INF: PersistentMemoryDataManager.saveGraphs: graphing %d variable(s)" % len( self.allData ) );
for k, v in self.allData.iteritems():
v.saveGraph();
self.mutexListData.unlock();
# saveGraphs - end
def exportToALMemory( self ):
"copy all variable to ALMemory"
if( len( self.allData ) == 0 ):
return;
print( "INF: PersistentMemoryDataManager.exportToALMemory: exporting %d variable(s)" % len( self.allData ) );
import naoqitools # module already unloaded when called from __del__
mem = naoqitools.myGetProxy( "ALMemory" );
if( mem == None ):
print( "WRN: PersistentMemoryDataManager.exportToALMemory: can't connect to ALMemory" );
return;
while( self.mutexListData.testandset() == False ):
print( "PersistentMemoryDataManager.exportToALMemory: locked" );
time.sleep( 0.1 );
allVarName = [];
mem = naoqitools.myGetProxy( "ALMemory" );
for k, v in self.allData.iteritems():
allVarName.append( v.strName );
v.exportToALMemory();
mem.insertData( "PersistentMemoryDataManager_all_vars", allVarName );
self.mutexListData.unlock();
# exportToALMemory - end
def importFromALMemory( self, strSpecificIP = "localhost" ):
"import all variables from a (remote) ALMemory"
try:
mem = naoqitools.myGetProxy( "ALMemory", strSpecificIP );
allVarName = mem.getData( "PersistentMemoryDataManager_all_vars" );
except BaseException, err:
debug.debug( "WRN: importFromALMemory: %s" % str( err ) );
return;
while( self.mutexListData.testandset() == False ):
print( "PersistentMemoryDataManager.importFromALMemory: locked" );
time.sleep( 0.1 );
self.allData = {};
for strVarName in allVarName:
someData = PersistentMemoryData();
someData.importFromALMemory( strVarName, strSpecificIP );
self.allData[strVarName] = someData;
self.mutexListData.unlock();
print( "importFromALMemory: %d variable(s) loaded" % len( self.allData ) );
# exportToALMemory - end
def dumpAll( self ):
"dump to print all the outputted extracted data"
print( "INF: PersistentMemoryDataManager.dumpAll at %d - humantime: %s" % ( int( time.time() ), debug.getHumanTimeStamp() ) );
print( "*" * 30 );
while( self.mutexListData.testandset() == False ):
print( "PersistentMemoryDataManager.dumpAll: locked" );
time.sleep( 0.1 );
for k, v in self.allData.iteritems():
strOut = "%s " % ( k );
strOut += "(%d val): " % v.getDataHistLength();
aLastValue = v.getDataHist( 3 );
for val in aLastValue:
strOut += "%s: %s; " % ( timeToHuman( val[0] ), str( val[1] ) );
print( strOut );
print( "*" * 30 );
self.mutexListData.unlock();
# dumpAll - end
def getHist( self, strDataName, nNbrValue = 3 ):
if( not strDataName in self.allData ):
return [];
return self.allData[strDataName].getDataHist( nNbrValue );
# getHist - end
# class PersistentMemory - end
persistentMemory = PersistentMemory(); # the singleton
def autoTest():
timeTest_get_first = persistentMemory.getData( "timeTest", 421 );
print( "*** timeTest_get_first: %s" % str( timeTest_get_first ) );
timeTest = time.time();
persistentMemory.updateData( "timeTest", timeTest );
timeTest_get = persistentMemory.getData( "timeTest", 421 );
print( "*** timeTest_get: %s" % str( timeTest_get ) );
print( "*** timeTest_get hist: %s" % str( persistentMemory.getHist( "timeTest", 20 ) ) );
persistentMemory.removeData( "timeTest" );
timeTest_get = persistentMemory.getData( "timeTest", 421 );
print( "*** timeTest_get(after erase): %s" % str( timeTest_get ) );
print( "*** timeTest_get hist(after erase): %s" % str( persistentMemory.getHist( "timeTest", 20 ) ) );
# autoTest - end
# autoTest(); |
import zipfile
import string
import itertools
from threading import Thread
def crack(zip,pwd):
try:
zip.extracktall(pwd=str.encode(pwd))
print("Success: Password is" + pwd)
except:
pass
zipFile = zipfile.ZipFile("C:\\Users\\Timo_Zuerner\\Desktop\\nezip.zip")
myLetters = string.ascii_letters + string.digits
for r in range(6):
print("1")
for x in map(''.join, itertools.product(myLetters, repeat=r)):
t = Thread(target=crack, args=(zipFile, x))
t.start()
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
def euler(x0, xn, y0, n, fx):
#Calculamos H la distancia entre los intervalos
h = (xn - x0) / n
yj = y0
xj = x0
#Recorremos para calcular las aproximaciones
for i in range(n):
xi = xj + (i * h)
#ff = fx(xj, fx)
yi = yj + eval(fx) * h
yj = yi
xj = xi
print (str(xj) + " " + str(yj))
print("La aproximación es : %f") % (yi)
#Funcion que nos ayuda a evaluar las funciones
def fx(x, f):
return eval(f)
x0 = 0.0
xn = 2.0
y0 = 2.0
n = 10
fx = 'yj - (xj ** 2) + 1'
euler(x0, xn, y0, n, fx)
|
# -*- coding: utf-8 -*-
"""
Created on Thu Sep 26 14:17:13 2019
@author: schuhles
"""
'''If Som isn't initialized, his value might be randomize depending on the language'''
'''If all the values are below 0, the divider will b equal to 0, programms don't work well with it'''
tab_list=[1,2,3,4,5,6,-9]
import numpy as np
tab_zeros= np.zeros(12, dtype=np.int32)
tab_from_list=np.array(tab_list)
for id in range(len(tab_from_list)):
print('tab['+str(id)+']='+str(tab_from_list[id]))
print('tab[{index}]={val}'.format(index=id, val=tab_from_list[id]))
if tab_from_list[id]>0:
print('Youpi')
print('Finished')
def mySum(param1, param2):
'''
Function that sums the two input params
Ars:
param1: an int value
param2: an int value
Returns the sum
'''
return param1+param2
print('sumTest=', mySum(1,2))
'''Exercices'''
def average_above_zero(table):
##
#Function that defines and return the average value of the list
#Args : a list of values
#Returns : the average of positives values
#Raises ValueError if input param is not a list
if not(isinstance(table, list)):
raise ValueError('average_above_zero, expected a list as input')
if len(table)==0:
raise ValueError('expected a non empty list as input')
if not(isinstance(table[0], (int, float))):
raise ValueError('average_above_zero, expected a list of numbers')
som = 0
n = 0
for id in range(len(table)):
if table[id] > 0 :
som = som + table[id]
n = n + 1
if n == 0:
raise ValueError('Division by 0')
return som / n
print('Moyenne = ', average_above_zero(tab_list))
'''
MAX VALUE
max <- 0
for i <- 1 to NMAX do
if Tab[i] > Max then
Max <- Tab[i]
IndexMax <- i
Display(Max, i)
'''
def function_max_value(table):
##
#Function that finds and return the maximum value of the list
#Args : a list of values
#Returns : the maximum value of the list
#Raises ValueError
if not(isinstance(table, list)):
raise ValueError('function_max_value, expected a list as input')
if len(table)==0:
raise ValueError('expected a non empty list as input')
if not(isinstance(table[0], (int, float))):
raise ValueError('function_max_value, expected a list of numbers')
max_value = 0
max_index = 0
for id in range(len(table)):
if table[id] > max_value :
max_value = table[id]
max_index = id
return (max_value, max_index)
print('Valeur max = ', function_max_value(tab_list))
'''
REVERSE TABLE
size <- NMAX
index <- size - 1
iterations <- size / 2
for i <- 1 to NMAX do
Temp <- Tab[index]
Tab[index] <- Tab[i]
Tab[i] <- Temp
Index <- Index - 1
Display(Tab)
'''
def function_reverse_table(table):
##
#Function that reverses the array
#Args : a table
#Returns : the reversed table
size = len(table)
index = size - 1
its = size // 2
for i in range(its):
temp = table[index]
table[index] = table[i]
table[i] = temp
index -= 1
print ('Liste ', tab_list)
function_reverse_table(tab_list)
print ('Liste inversée ',tab_list)
'''
Bounding box
for i <- 1 to NMAX do
for i <-
for j <-
'''
import numpy as np
matrix = np.zeros((10,10), dtype=np.int32)
matrix[3:6, 4:8]=np.ones((3,4), dtype=np.int32)
import cv2
img=cv2.imread('Image.png', 0)
cv2.imshow('read image', img)
cv2.waitKey()
for idrow in range(matrix.shape[0]):
for idcol in range(matrix.shape[1]):
pixVal=matrix[idrow, idcol]
#x,y,w,h = cv2.boundingRect(cnt) for x,y top-left coordinates and w,h bottom-right coordinates
#img = cv2.rectangle(img,(x,y),(x+w,y+h),(0,255,0),2)
#############################
# Assignement 1:
# average of only the positive table values
#.... this is an ugly code
# FIXME : find out error cases
def average_above_zero(tab):
pos_val_sum=0
nElem=0
for id in range(len(tab)):
if tab[id] >0:
nElem+=1
pos_val_sum+=tab[id]
lastID=id
if nElem==0:
raise ZeroDivisionError('No positive element found in provided list')
return pos_val_sum/nElem, lastID
#test section
tab_list=[1,2,3,-4,6,-9]
test, lastID=average_above_zero(tab_list)
print('test_sum=',test) |
Python 3.7.2 (tags/v3.7.2:9a3ffc0492, Dec 23 2018, 23:09:28) [MSC v.1916 64 bit (AMD64)] on win32
Type "help", "copyright", "credits" or "license()" for more information.
>>> from nltk.tokenize import word_tokenize
>>> from collections import Counter
>>> Counter(word_tokenize("The cat is in teh box.The cat likes teh box.The box is over the cat."))
Counter({'cat': 3, 'is': 2, 'teh': 2, 'box.The': 2, 'The': 1, 'in': 1, 'likes': 1, 'box': 1, 'over': 1, 'the': 1, '.': 1})
>>>
counter.most
Traceback (most recent call last):
File "<pyshell#3>", line 2, in <module>
counter.most
NameError: name 'counter' is not defined
>>> counter.most_commomn(2)
Traceback (most recent call last):
File "<pyshell#4>", line 1, in <module>
counter.most_commomn(2)
NameError: name 'counter' is not defined
>>> counter.most_common(2)
Traceback (most recent call last):
File "<pyshell#5>", line 1, in <module>
counter.most_common(2)
NameError: name 'counter' is not defined
>>> Counter.most_common(2)
Traceback (most recent call last):
File "<pyshell#6>", line 1, in <module>
Counter.most_common(2)
File "C:\Users\Srushti\AppData\Local\Programs\Python\Python37\lib\collections\__init__.py", line 583, in most_common
return sorted(self.items(), key=_itemgetter(1), reverse=True)
AttributeError: 'int' object has no attribute 'items'
>>>
|
// https://leetcode.com/problems/monotone-increasing-digits
class Solution(object):
def monotoneIncreasingDigits(self, N):
"""
:type N: int
:rtype: int
"""
# get every digits in N
digits = map(int, list(str(N)))
l = len(digits)
# find the first digit, that is not monotone
i = 1
while i < l and digits[i - 1] <= digits[i]:
i += 1
# if all digits are monotone, return N
if i == l:
# print i, l, N
return N
# find the first one that is great than the previous digit plus one
while i > 0 and digits[i] - 1 < digits[i - 1]:
i -= 1
digits[i] -= 1
# all digits after i's position should be 9
res = 0
for j in range(l):
res *= 10
if j > i:
res += 9
else:
res += digits[j]
return res |
#!/usr/bin/env python
"""Exchange management classes."""
__author__ = 'Michael Meisinger'
__license__ = 'Apache 2.0'
from pyon.net import messaging
from pyon.util.log import log
ION_URN_PREFIX = "urn:ionx"
ION_ROOT_XS = "ioncore"
def valid_xname(name):
return name and str(name).find(":") == -1 and str(name).find(" ") == -1
class ExchangeManager(object):
"""
Manager object for the CC to manage Exchange related resources.
"""
def __init__(self, container):
log.debug("ExchangeManager initializing ...")
self.container = container
# Define the callables that can be added to Container public API
self.container_api = [self.create_xs,
self.create_xp,
self.create_xn]
# Add the public callables to Container
for call in self.container_api:
setattr(self.container, call.__name__, call)
self.xs_by_name = {}
self.default_xs = ExchangeSpace(ION_ROOT_XS)
# TODO: Do more initializing here, not in container
def start(self):
# Establish connection to broker
#self.container.node, self.container.ioloop = messaging.make_node() # TODO: shortcut hack
# Declare root exchange
#self.default_xs.ensure_exists(self.container.node)
log.debug("ExchangeManager starting ...")
def create_xs(self, name):
pass
def create_xp(self, xs, name, xptype):
pass
def create_xn(self, xs, name):
pass
def stop(self, *args, **kwargs):
log.debug("ExchangeManager stopping ...")
class ExchangeSpace(object):
ION_DEFAULT_XS = "ioncore"
def __init__(self, name):
assert name, "Invalid XS name %s" % name
name = str(name)
if name.startswith(ION_URN_PREFIX):
name = name[len(ION_URN_PREFIX)+1:]
assert valid_xname(name), "Invalid XS name %s" % name
self.name = name
self.qname = self.build_qname()
def build_qname(self):
qname = "%s:%s" % (ION_URN_PREFIX, self.name)
return qname
def build_xname(self):
xname = "ion.xs.%s" % (self.name)
return xname
def ensure_exists(self, node):
xname = self.build_xname()
log.debug("ExchangeSpace.ensure_exists() xname=%" % xname)
#ch = node.basic_channel()
#log.debug("ExchangeSpace.ensure_exists. Got basic channel %s" % ch)
def __str__(self):
return self.name
def __repr__(self):
return "ExchangeSpace(%s)" % self.qname
class ExchangeName(object):
"""
Exchange names have the following format:
urn:ionx:<XS-Name>:<Name>
"""
def __init__(self, name, xs=None):
assert name, "Invalid XS name %s" % name
name = str(name)
if name.startswith(ION_URN_PREFIX):
name = name[len(ION_URN_PREFIX)+1:]
xs, name = name.split(":")
assert xs, "XS not given"
assert valid_xname(name), "Invalid XN name %s" % name
self.xs = xs
self.name = str(name)
self.qname = self.build_qname()
def build_qname(self):
qname = "%s:%s:%s" % (ION_URN_PREFIX, str(self.xs), self.name)
return qname
def build_xlname(self):
xname = "ion.xs.%s" % (self.name)
return xname
def __str__(self):
return self.name
def __repr__(self):
return "ExchangeName(%s)" % self.qname
class ExchangePoint(ExchangeName):
XPTYPES = {
'basic':'basic',
'ttree':'ttree',
}
def __init__(self, name, xs=None, xptype=None):
ExchangeName.__init__(self, name, xs)
self.xptype = xptype or 'basic'
def build_xname(self):
xname = "ion.xs.%s.xp.%s" % (self.xs, self.name)
return xname
|
from enum import Enum
__author__ = 'attakei'
class _RadikoArea(Enum):
"""Behavior extension for RadikoArea.
"""
def get_id(self):
return 'JP{}'.format(self.value)
@property
def area_id(self):
return self.get_id()
class RadikoArea(_RadikoArea):
"""Radiko Area id enumerations.
Names is based from http://jprs.jp/about/jp-dom/prefecture.html#labels
"""
Hokkaido = 1
Aomori = 2
Iwate = 3
Miyagi = 4
Akita = 5
Yamagata = 6
Fukushima = 7
Ibaraki = 8
Tochigi = 9
Gunma = 10
Saitama = 11
Chiba = 12
Tokyo = 13
Kanagawa = 14
Niigata = 15
Yamanashi = 16
Nagano = 17
Ishikawa = 18
Toyama = 19
Fukui = 20
Aichi = 21
Gifu = 22
Shizuoka = 23
Mie = 24
Osaka = 25
Hyogo = 26
Kyoto = 27
Shiga = 28
Nara = 29
Wakayama = 30
Okayama = 31
Hiroshima = 32
Tottori = 33
Shimane = 34
Yamaguchi = 35
Kagawa = 36
Tokushima = 37
Ehime = 38
Kochi = 39
Fukuoka = 40
Saga = 41
Nagasaki = 42
Kumamoto = 43
Oita = 44
Miyazaki = 45
Kagoshima = 46
Okinawa = 47
class RadikoStation(object):
"""Radiko station struct
"""
def __init__(self):
self.id = None
self.name = None
@classmethod
def from_dom(cls, dom):
inst = cls()
inst.id = dom.findall('id')[0].text
inst.name = dom.findall('name')[0].text
return inst
class RadikoApi(object):
"""Radiko API caller.
"""
ENDPOINT = 'http://radiko.jp/v2'
DEFAULT_AREA = RadikoArea.Tokyo
def __init__(self, area=None):
self.area = area if area is not None else self.DEFAULT_AREA
def fetch_stations(self):
endpoint = '{}/station/list/{}.xml'.format(self.ENDPOINT, self.area.area_id)
# TODO: Should return list of struct object.
import xml.etree.ElementTree as ET
from urllib.request import urlopen
resp = urlopen(endpoint)
stations_root = ET.fromstring(resp.read())
items = stations_root.findall('./station')
return [RadikoStation.from_dom(item) for item in items]
|
import numpy as np
from numba import jit
import sys
import matplotlib.pyplot as plt
import mpl_toolkits.mplot3d.axes3d as p3
#from ast2000tools.solar_system import SolarSystem
np.random.seed(1)
#system = SolarSystem(seed)
def escape_velocity(M, R):
v_escape = np.sqrt((2*G*M)/R)
return v_escape
class rocket_engine():
def __init__(self, dimensions = 3, temperature = 10000, N = 1E5, mass = 1.67e-27, length = 1e-6):
self.k = 1.38064852e-23
self.T = temperature
self.N = N
self.m = mass
self.L = length
self.dim = dimensions
self.sigma = np.sqrt((self.k*self.T)/self.m)
self.x = self.position()
self.v = self.velocities()
def velocities(self):
return np.random.normal(0,self.sigma, size=(int(self.N),self.dim))
def position(self):
return np.random.uniform(0,self.L, size=(int(self.N),self.dim))
#Calculating the mean velocity
def meanvel(self):
self.v_s = 0
for i in range(int(self.N)):
self.v_s += np.sqrt(self.v[i,0]**2+self.v[i,1]**2+self.v[i,2]**2)
return self.v_s
def meankin(self):
m = self.m
vel = 0
for i in range(int(self.N)):
vel += self.v[i,0]**2 + self.v[i,1]**2 + self.v[i,2]**2
return 0.5 * m * vel
def test_mean(self):
"""
making a test function that runs meankin() and meanvel() and checks the
computed velocity and kinetic energy and the relative error between them
anything below 1% is perfectly acceptable
"""
m = self.m
analytical_mean = 1.5*self.T*self.k
computed_mean = 0
for j in self.v:
computed_mean += self.meankin()
computed_mean = computed_mean/self.N
relative_error = abs(analytical_mean - computed_mean)/analytical_mean
print("----------Kinetic energy----------")
print("{:<20}{:g}".format("Computed mean:", computed_mean))
print("{:<20}{:g}".format("Analytical mean:", analytical_mean))
print("{:<20}{:.2f}%".format("Relative error:", relative_error * 100))
print("-----------------------------")
break
assert relative_error < 0.002, "the mean kinetic energy is off"
print("----------Velocity----------")
analytical_vel = np.sqrt(8*self.k*self.T/(np.pi*m))
computed_vel = 0
for i in self.v:
computed_vel += self.meanvel()
computed_vel = computed_vel/self.N
relative_error = abs(analytical_vel - computed_vel)/analytical_vel
print("{:<20}{:g}".format("Computed velocity:", computed_vel))
print("{:<20}{:g}".format("Analytical velocity:", analytical_vel))
print("{:<20}{:.2f}%".format("Relative error:", relative_error *100))
print("-----------------------------")
break
assert relative_error < 0.02, "the mean velocity is off"
def box_escape(self, steps = 1e4, t_end = 1e-9, dt = 1e-12):
"""
Checking how much of the particles actually escape the rocket
steps:
t_end:
dt:
"""
x, v = self.x,self.v
exiting = 0.0
exiting_velocities = 0.0
for t in range(int(t_end/dt)):
x += v * dt
v_exiting = np.abs(v[:,2])
collision_points = np.logical_or(np.less_equal(x, 0.), np.greater_equal(x, self.L))
x_mask = np.logical_or(np.greater_equal(x[:,0], 0.25*self.L), np.less_equal(x[:,0], 0.75*self.L))
y_mask = np.logical_and(np.greater_equal(x[:,0], 0.25*self.L), np.less_equal(x[:,0], 0.75*self.L))
exit_points = np.logical_and(x_mask, y_mask)
exit_points = np.logical_and(np.less_equal(x[:,2], 0), exit_points)
exit_indices = np.where(exit_points == True)
not_exit_indices = np.where(exit_points == False)
v_exiting[not_exit_indices] = 0.
exiting_velocities += np.sum(v_exiting)
collision_indices = np.where(collision_points == True)
exiting += len(exit_indices[0])
s_matrix = np.ones_like(x)
s_matrix[collision_indices] = -1
s_matrix[:,2][exit_indices] = 1
r_matrix = np.zeros_like(x)
x[:,2][exit_indices] += 0.99*self.L
v = np.multiply(v,s_matrix)
particle_per_second = exiting_velocities/t_end
return exiting_velocities, exiting, particle_per_second
def maxwell(self, x):
sigma = np.sqrt(self.k * self.T/self.m)
exponent = -x**2/(2*sigma**2)
return 1/(sigma*np.sqrt(2*np.pi))*np.exp(exponent)
x1 = np.linspace(-25000, 25000, 51)
x_label = ["v_x", "v_y", "v_z"]
gas = rocket_engine()
for i, label in enumerate(x_label):
plt.style.use("classic")
plt.grid()
plt.hist(gas.v[:,i], bins=31, density = True, histtype = "step")
plt.plot(x1, gas.maxwell(x1), "r-")
plt.show()
if __name__ == "__main__":
A = rocket_engine()
#result2 = A.box_escape()
result3 = A.test_mean()
|
# Generated by Django 3.0.6 on 2020-07-03 11:20
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('app', '0006_auto_20200703_0950'),
]
operations = [
migrations.AddField(
model_name='addesc',
name='img_alt',
field=models.CharField(default=0, max_length=150),
preserve_default=False,
),
migrations.AddField(
model_name='blog',
name='img_alt',
field=models.CharField(default=0, max_length=150),
preserve_default=False,
),
migrations.AddField(
model_name='dsdesc',
name='img_alt',
field=models.CharField(default=0, max_length=150),
preserve_default=False,
),
migrations.AddField(
model_name='mldesc',
name='img_alt',
field=models.CharField(default=0, max_length=150),
preserve_default=False,
),
migrations.AddField(
model_name='wddesc',
name='img_alt',
field=models.CharField(default=0, max_length=150),
preserve_default=False,
),
]
|
from multiprocessing import process, Pipe
import os, time
#创建管道对象
#如果是单向管道,fd1-->只读,fd2-->只写
fd1, fd2 = Pipe()
def fun(name):
time.sleep(3)
fd1.send(name)
jobs = []
for i in range(5):
p = Process(target=fun, args=(i,))
jobs.append(p)
p.start()
for i in range(5):
data = fd2.recv()
print(data)
for i in jobs:
i.join()
|
# -*- coding: utf-8 -*-
"""
Created on Wed Nov 29 17:34:01 2017
@author: dori
"""
from netCDF4 import Dataset
import matplotlib.pyplot as plt
plt.close('all')
import matplotlib.dates as md
import numpy as np
import pandas as pd
from glob import glob
import os
import argparse
from radar_settings import radarlib, hydrodict
parser = argparse.ArgumentParser(description='do plots for QuickLookBrowser')
parser.add_argument('-d','--date', nargs=1,
help='gimme datestring in the format YYYYMMDD')
parser.add_argument('-hy','--hydroset', nargs=1,
help='gimme hydrosettings',
choices=hydrodict.keys())
parser.add_argument('--rootpath', nargs=1, help='gimme full path for saving output')
parser.add_argument('-m1', '--moment1', nargs=1, help='tell me the modifier to data and plot folders')
parser.add_argument('-p', '--patch', nargs=1, help='tell me the 3 padded patch number like 001')
parser.print_help()
args = parser.parse_args()
datestr = args.date[0]
hydrostr = args.hydroset[0]
print datestr, hydrostr
plt.close('all')
rootpath = '/data/optimice/pamtra_runs/tripex-pol/'
if args.rootpath is not None:
rootpath = args.rootpath[0]
mod=''
if args.moment1 is not None:
mod = args.moment1[0]
patch = ''
if args.patch is not None:
patch = args.patch[0]
runFld = rootpath + 'data' + mod + '/'
plotFld = rootpath + 'plots' + mod + '/'
runs = ['all_hydro', 'no_snow', 'only_ice', 'only_liquid', 'only_snow', 'only_graupel_hail']
titles = ['all Hydrometeors', 'No Snow', 'Only Ice', 'Only liquid (cloud drops and rain)', 'only Snow', 'only Graupel and Hail']
runTitles=dict(zip(runs,titles))
# Define Plotting Function
def plot_variable(x,y,v,axes,
xlab=None,ylab=None,vlab=None,title=None,
vmin=None,vmax=None,xlim=None,ylim=None,
cmap='jet'):
mesh = axes.pcolormesh(x,y,v,vmin=vmin,vmax=vmax,cmap=cmap)
if title is not None:
axes.text(0.1,0.9,title,transform=axes.transAxes,weight='black',
bbox=dict(facecolor='white'))
plt.colorbar(mesh,label=vlab,ax=axes)
if xlab is not None:
axes.set_xlabel(xlab)
if ylab is not None:
axes.set_ylabel(ylab)
axes.set_xlim(xlim)
axes.set_ylim(ylim)
versus = -1 # Top Down
versus = 1 # Bottom Up
xfmt = md.DateFormatter('%m-%d %H')
ylim=(0,12000)
xDataLim = -1
figsize31=(18,18)
figsize21=(18,12)
# Open the netcdf results file
runFile10 = runFld + hydrostr + '/' + datestr + hydrostr + patch + '_mom_'+'Joyrad10.nc'
runFile35 = runFld + hydrostr + '/' + datestr + hydrostr + patch + '_mom_'+'Joyrad35.nc'
runFile94 = runFld + hydrostr + '/' + datestr + hydrostr + patch + '_mom_'+'Grarad94.nc'
if int(datestr) < 20180930:
runFile10 = runFld + hydrostr + '/' + datestr + hydrostr + '_mom_'+'KiXPol.nc'
runFile35 = runFld + hydrostr + '/' + datestr + hydrostr + '_mom_'+'Joyrad35.nc'
runFile94 = runFld + hydrostr + '/' + datestr + hydrostr + '_mom_'+'Joyrad94.nc'
print runFile10
print runFile35
print runFile94
def readPamtra_nc(ncfile):
runDataset = Dataset(ncfile)
runVars = runDataset.variables
H = (runVars['height'][:,0,:])[:xDataLim,:]
ttt = pd.to_datetime(runVars['datatime'][:,0],unit='s')
tt = (np.tile(ttt,(H.shape[1],1)).T)[:xDataLim,:]
print(tt.shape, H.shape)
a = 2.0*(runVars['Attenuation_Hydrometeors'][:,0,:,0,0] + runVars['Attenuation_Atmosphere'][:,0,:,0])
A = a[:,::versus].cumsum(axis=1)[:,::versus][:xDataLim,:]
Ze = runVars['Ze'][:,0,:,0,0,0][:xDataLim,:]
MDV = -runVars['Radar_MeanDopplerVel'][:,0,:,0,0,0][:xDataLim,:]
SW = runVars['Radar_SpectrumWidth'][:,0,:,0,0,0][:xDataLim,:]
return H, tt, A, Ze, MDV, SW
Hx, ttx, Ax, Zex, MDVx, SWx = readPamtra_nc(runFile10)
Ha, tta, Aa, Zea, MDVa, SWa = readPamtra_nc(runFile35)
Hw, ttw, Aw, Zew, MDVw, SWw = readPamtra_nc(runFile94)
# Plot Attenuation
f,((ax1,ax2,ax3)) = plt.subplots(3, 1, sharex=False, figsize=figsize31)
plot_variable(ttx,Hx,Ax,ax1,None,'height [km]','dB','X-band 2-way Attenuation',0,1,ylim=ylim)
plot_variable(tta,Ha,Aa,ax2,None,'height [km]','dB','Ka-band 2-way Attenuation',0,5,ylim=ylim)
plot_variable(ttw,Hw,Aw,ax3,'time','height [km]','dB', 'W-band 2-way Attenuation',0,15,ylim=ylim)
f.suptitle(runTitles[hydrostr], weight='black',bbox=dict(facecolor='white'))
ax1.set_title('X-band')
ax2.set_title('Ka-band')
ax3.set_title('W-band')
ax1.xaxis.set_major_formatter(xfmt)
ax2.xaxis.set_major_formatter(xfmt)
ax3.xaxis.set_major_formatter(xfmt)
ax1.grid(color='k')
ax2.grid(color='k')
ax3.grid(color='k')
f.tight_layout(pad=0)
f.savefig(plotFld+hydrostr+'/'+datestr+hydrostr+patch+'_attenuation'+'.png', dpi=200, bbox_inches='tight')
# Plot Ze
f,((ax1,ax2,ax3)) = plt.subplots(3,1,sharex=False,figsize=figsize31)
plot_variable(ttx,Hx,Zex,ax1,None,'height [km]','dBZ','X-band Ze',-35,25,ylim=ylim)
plot_variable(tta,Ha,Zea,ax2,None,'height [km]','dBZ', 'Ka-band Ze',-35,25,ylim=ylim)
plot_variable(ttw,Hw,Zew,ax3,'time','height [km]','dBZ', 'W-band Ze',-35,25,ylim=ylim)
ax1.xaxis.set_major_formatter(xfmt)
ax2.xaxis.set_major_formatter(xfmt)
ax3.xaxis.set_major_formatter(xfmt)
ax1.set_title('X-band')
ax2.set_title('Ka-band')
ax3.set_title('W-band')
ax1.grid(color='k')
ax2.grid(color='k')
ax3.grid(color='k')
f.tight_layout(pad=0)
f.savefig(plotFld+hydrostr+'/'+datestr+hydrostr+patch+'_Ze'+'.png', dpi=200, bbox_inches='tight')
# make DWRs and plot
DWRxa = Zex-Zea
DWRaw = Zea-Zew
f,((ax1,ax2)) = plt.subplots(2,1,sharex=False,figsize=figsize21)
plot_variable(ttx,Hx,DWRxa,ax1,None,'height [km]','dB','DWR$_{X Ka}$',-5,20, ylim=ylim,cmap='nipy_spectral')
plot_variable(ttx,Hx,DWRaw,ax2,'time','height [km]','dB','DWR$_{Ka W}$',-5,20, ylim=ylim,cmap='nipy_spectral')
f.suptitle(runTitles[hydrostr], weight='black',bbox=dict(facecolor='white'))
ax1.xaxis.set_major_formatter(xfmt)
ax2.xaxis.set_major_formatter(xfmt)
ax1.set_title('X-Ka')
ax2.set_title('Ka-W')
ax1.grid(color='k')
ax2.grid(color='k')
f.tight_layout(pad=0)
f.savefig(plotFld+hydrostr+'/'+datestr+hydrostr+patch+'_DWRe'+'.png', dpi=200, bbox_inches='tight')
# make attenuated Z and DWRs and respective plots
Zx = Zex-Ax
Za = Zea-Aa
Zw = Zew-Aw
f,((ax1,ax2,ax3)) = plt.subplots(3,1,sharex=False,figsize=figsize31)
plot_variable(ttx,Hx,Zx,ax1,None,'height [km]','dBZ','X-band Z attenuated',-35,25,ylim=ylim)
plot_variable(tta,Ha,Za,ax2,None,'height [km]','dBZ','Ka-band Z attenuated',-35,25,ylim=ylim)
plot_variable(ttw,Hw,Zw,ax3,'time','height [km]','dBZ', 'W-band Z attenuated',-35,25,ylim=ylim)
ax1.set_title('X-band')
ax2.set_title('Ka-band')
ax3.set_title('W-band')
ax1.xaxis.set_major_formatter(xfmt)
ax2.xaxis.set_major_formatter(xfmt)
ax3.xaxis.set_major_formatter(xfmt)
ax1.grid(color='k')
ax2.grid(color='k')
ax3.grid(color='k')
f.tight_layout(pad=0)
f.savefig(plotFld+hydrostr+'/'+datestr+hydrostr+patch+'_Zattenuated'+'.png', dpi=200, bbox_inches='tight')
DWRxa = Zx-Za
DWRaw = Za-Zw
f,((ax1,ax2)) = plt.subplots(2,1,sharex=False,figsize=figsize21)
plot_variable(ttx,Hx,DWRxa,ax1,None,'height [km]','dB','DWR$_{X Ka}$ attenuated',-5,20,ylim=ylim,cmap='nipy_spectral')
plot_variable(ttx,Hx,DWRaw,ax2,'time','height [km]','dB','DWR$_{Ka W}$ attenuated',-5,20,ylim=ylim,cmap='nipy_spectral')
ax1.set_title('X-Ka')
ax2.set_title('Ka-W')
ax1.xaxis.set_major_formatter(xfmt)
ax2.xaxis.set_major_formatter(xfmt)
ax1.grid(color='k')
ax2.grid(color='k')
f.tight_layout(pad=0)
f.savefig(plotFld+hydrostr+'/'+datestr+hydrostr+patch+'_DWRattenuated'+'.png', dpi=200, bbox_inches='tight')
# Plot mean doppler velocity
f,((ax1,ax2,ax3)) = plt.subplots(3,1,sharex=False,figsize=figsize31)
plot_variable(ttx,Hx,MDVx,ax1,None, 'height [km]','m/s',' X-band MDV',-3,0,ylim=ylim)
plot_variable(tta,Ha,MDVa,ax2,None, 'height [km]','m/s','Ka-band MDV',-3,0,ylim=ylim)
plot_variable(ttw,Hw,MDVw,ax3,'time','height [km]','m/s', 'W-band MDV',-3,0,ylim=ylim)
ax1.set_title('X-band')
ax2.set_title('Ka-band')
ax3.set_title('W-band')
ax1.xaxis.set_major_formatter(xfmt)
ax2.xaxis.set_major_formatter(xfmt)
ax3.xaxis.set_major_formatter(xfmt)
ax1.grid(color='k')
ax2.grid(color='k')
ax3.grid(color='k')
f.tight_layout(pad=0)
f.savefig(plotFld+hydrostr+'/'+datestr+hydrostr+patch+'_MDV'+'.png', dpi=200, bbox_inches='tight')
f,((ax1,ax2,ax3)) = plt.subplots(3,1,sharex=False,figsize=figsize31)
plot_variable(ttx,Hx,SWx,ax1,None, 'height [km]','m/s','Ku-band SW',0,1,ylim=ylim)
plot_variable(tta,Ha,SWa,ax2,None, 'height [km]','m/s','Ka-band SW',0,1,ylim=ylim)
plot_variable(ttw,Hw,SWw,ax3,'time','height [km]','m/s', 'W-band SW',0,1,ylim=ylim)
ax1.set_title('X-band')
ax2.set_title('Ka-band')
ax3.set_title('W-band')
ax1.xaxis.set_major_formatter(xfmt)
ax2.xaxis.set_major_formatter(xfmt)
ax3.xaxis.set_major_formatter(xfmt)
ax1.grid(color='k')
ax2.grid(color='k')
ax3.grid(color='k')
f.tight_layout(pad=0)
f.savefig(plotFld+hydrostr+'/'+datestr+hydrostr+patch+'_SW'+'.png', dpi=200, bbox_inches='tight')
# Plot dual doppler velocity
DDWxa = MDVx-MDVa
DDWaw = MDVa-MDVw
f,((ax1,ax2)) = plt.subplots(2,1,sharex=False,figsize=figsize21)
plot_variable(ttx,Hx,DDWxa,ax1,None,'height [km]','m/s','DDV$_{X Ka}$',-0.3,0.3,ylim=ylim,cmap='nipy_spectral')
plot_variable(ttx,Hx,DDWaw,ax2,'time','height [km]','m/s','DDV$_{Ka W}$',-0.3,0.3,ylim=ylim,cmap='nipy_spectral')
ax1.set_title('X-Ka')
ax2.set_title('Ka-W')
ax1.xaxis.set_major_formatter(xfmt)
ax2.xaxis.set_major_formatter(xfmt)
ax1.grid(color='k')
ax2.grid(color='k')
f.tight_layout(pad=0)
f.savefig(plotFld+hydrostr+'/'+datestr+hydrostr+patch+'_DDV'+'.png', dpi=200, bbox_inches='tight')
# Plot dual spectral width
DSWxa = SWx-SWa
DSWaw = SWa-SWw
f,((ax1,ax2)) = plt.subplots(2,1,sharex=False,figsize=figsize21)
plot_variable(ttx,Hx,DSWxa,ax1,None,'height [km]','m/s','DSW$_{X Ka}$',-0.3,0.3,ylim=ylim,cmap='nipy_spectral')
plot_variable(ttx,Hx,DSWaw,ax2,'time','height [km]','m/s','DSW$_{Ka W}$',-0.3,0.3,ylim=ylim,cmap='nipy_spectral')
ax1.set_title('X-Ka')
ax2.set_title('Ka-W')
ax1.grid(color='k')
ax2.grid(color='k')
ax1.xaxis.set_major_formatter(xfmt)
ax2.xaxis.set_major_formatter(xfmt)
f.tight_layout(pad=0)
f.savefig(plotFld+hydrostr+'/'+datestr+hydrostr+patch+'_DSW'+'.png', dpi=200, bbox_inches='tight')
plt.close('all')
|
"""Test report utility methods for retrieving summarized
test execution information from features and scenarios"""
import re
import os
import platform
from re import match
from configobj import ConfigObj
CONFIG = ConfigObj(os.path.join(os.getcwd(), "..", "config", "config.cfg"))
def gather_steps(features):
"""retrieve a dictionary with steps used in latest test execution,
together with the number of times they were executed"""
steps = {}
for feature in features:
for scenario in feature['scenarios']:
steps_back = [step for step in scenario['steps']]
for step in scenario['steps'] + steps_back:
if not step['name'] in steps:
steps[step['name']] = {'quantity': 0, 'total_duration': 0,
'appearances': 0}
steps[step['name']]['appearances'] += 1
add = 1 if step['status'] != 'skipped' else 0
steps[step['name']]['quantity'] += add
steps[step['name']]['total_duration'] += step['duration']
return steps
def get_summary(features, stories_v1, all_scenarios):
""" return summary of ejecution
:param features: features of the ejecutions
:param stories_v1: the stories in versionOne or other webserver
:param all_scenarios: all scenarios ejecuted
:return: tuple the sets
"""
stories = gather_stories(
features, CONFIG["story_tag"]["regex"],
CONFIG["story_tag"]["prefix"])[0]
story_uncovered = stories_both = {}
set_yarara = {tag for tag in sum([scenario['tags']
for scenario in all_scenarios], [])}
if stories_v1:
stories_v1 = {
story['Number']: {'Name': story['Name'],
'Description': story['Description']}
for story in stories_v1
}
set_v1 = set(stories_v1.keys())
diference = set_v1 - set_yarara
both = set_v1 & set_yarara
only_yarara = set_yarara - set_v1
story_uncovered = {number: stories_v1[number] for number in diference}
stories_both = {number: {'scenarios': stories[number],
'description': stories_v1[number]['Name']}
for number in both}
else:
only_yarara = set_yarara
return only_yarara, set_yarara, story_uncovered, stories_both
def get_tags_process(all_scenarios):
"""
return one dict with each key contain one tuple with following format:
in position zero have list of the dict each dict have tuple with the name
scenario in posicion 0 and status in posicion 1
:param all_scenario: list of the scenario
:return: dict
"""
tags = set(sum([scenario['tags'] for scenario in all_scenarios], []))
tags_scenario = {}
for tag in tags:
if not match(CONFIG["story_tag"]["regex"], tag):
tags_scenario[tag] = [
(scenario['name'], scenario['status'], scenario.get('row', ''))
for scenario in all_scenarios if tag in scenario['tags']]
tags_scenario.update(
{key: (value, set([status for name, status, row in value]),
set([row for name, status, row in value]))
for key, value in tags_scenario.items()})
get_status = lambda x: x.get('failed', False) or x.get('passed', False) \
or x.get('skipped', 'skipped')
tags_process = {key: (value[0], get_status({key: key for key in value[1]}))
for key, value in tags_scenario.items()}
return tags_process
def gather_errors(scenario, retrieve_step_name=False):
"""Retrieve the error message related to a particular failing scenario"""
error_msg = None
error_lines = []
error_step = None
filename = os.path.splitext(os.path.basename(scenario['filename']))[0]
folders_in_path = os.path.split(scenario['filename'])[0].split(os.sep)
total_folders_in_path = len(folders_in_path)
if total_folders_in_path > 1:
for index in range(total_folders_in_path, 1, -1):
filename = folders_in_path[index - 1] + "." + filename
for line in error_lines:
regex = r"Failing\sstep:\s[given|when|then|and|or]+ \
\s+(.+)\s\.\.\.\s.+"
match_obj = re.match(regex, line, re.M | re.I)
if match_obj:
error_step = match_obj.group(1)
break
return error_msg, error_lines, error_step
else:
return error_msg, error_lines # def get_stories_v1():
# """
# return all stories storaged in server configured in config file
# :return: False|list
# """
# if CONFIG['software_manager']['manager'] != 'versionOne':
# return False
# config_v1 = configuration.get_config_v1()
#
# if not config_v1 or (config_v1 and '' in config_v1['versionOne'].values()):
# return False
#
# os.environ.setdefault('https_proxy', config_v1['proxy']['https_proxy'])
# v_1 = V1Meta(**config_v1['versionOne'])
#
# try:
# if config_v1['query']['use'] == 'where':
# stories_v1 = v_1.Story.select('Number', 'Name', 'Description')\
# .where(**config_v1['where'])
#
# elif config_v1['query']['use'] == 'filter':
# stories_v1 = v_1.Story.select('Number', 'Name', 'Description')\
# .filter(config_v1['filter']['contain'])
#
# else:
# stories_v1 = v_1.Story.select('Number', 'Name', 'Description')
#
# return [story for story in stories_v1]
# except:
# return False
def second_user_format(seconds_float):
"""formating the time from seconds"""
if seconds_float < 0.009:
return '00:00:00.0'
seconds = int(seconds_float)
hours = seconds / (60 * 60)
minutes, seconds_ = (seconds / 60) % 60, seconds % 60
centis = int((seconds_float - seconds) * 100)
def _normalize_time(unidad):
"""this local function for normalized time
"""
if int(unidad) <= 9:
return '0' + str(unidad)
else:
return str(int(unidad))
return '%s:%s:%s.%s' % (
_normalize_time(hours),
_normalize_time(minutes),
_normalize_time(seconds_),
_normalize_time(centis))
def get_status_traceability(stories):
"""
This function return dict with status the Number with format for file config
:param stories: all stories for traceability
:return: dict with status for each Number
"""
dict_status = {}
for key, story in stories.items():
passed = all([scenary['status'] in ('passed', 'skipped')
for scenary in story])
if passed:
passed = any([scenary['status'] == 'passed' for scenary in story])
dict_status[key] = 'passed' if passed else 'skipped'
else:
dict_status[key] = 'failed'
return dict_status
def _get_root_xml(output_path, filename):
"""
return root xml
:param output_path: path of the output folder
:param filename: name file
:return: path root
"""
path = "TESTS-" + filename + ".xml"
path = os.path.join(output_path, path)
if platform.system() == 'Windows':
from lxml import etree
parser = etree.XMLParser(recover=True)
path = etree.parse(path, parser=parser)
else:
import xml.etree.ElementTree as ET
path = ET.parse(path)
return path.getroot()
def normalize(string):
"""
Normalize specified string by removing characters
that might cause issues when used as file/folder names
"""
return string.replace(" ", "_") \
.replace("(", "") \
.replace(")", "") \
.replace("\"", "") \
.replace("/", "")
|
from tkinter import *
from time import sleep
root = Tk()
root.title("Calculator First try")
#display the input and output
e = Entry(root,width=40)
e.grid(row=0,column=0,padx=15,pady=15,columnspan = 3)
def button_add(number):
x = e.get()
e.delete(0,END)
e.insert(0,str(x) + str(number))
def button_clear():
e.delete(0,END)
def button_equal():
if e.get() == "":
e.insert(0,"please enter a number")
elif e.get() == "please enter a number":
e.delete(0,END)
e.insert(0,"please enter a number not letters")
elif e.get() == "please enter a number not letters":
e.delete(0, END)
e.insert(0, "please enter a number not letters")
print("باعوص")
elif e.get() == "Error we can't dividing by 0":
e.delete(0, END)
e.insert(0, "please enter a number not letters")
elif math == "dividing":
sn = e.get()
e.delete(0, END)
s_n = float(sn)
if s_n == 0:
e.insert(0,"Error we can't dividing by 0")
else:
e.insert(0, f_n / s_n)
else:
sn=e.get()
s_n=float(sn)
e.delete(0,END)
if math == "add":
e.insert(0, f_n + s_n)
elif math == "minus":
e.insert(0, f_n - s_n)
elif math == "multiply":
e.insert(0, f_n * s_n)
elif math == "dividing":
e.insert(0, f_n / s_n)
def cadd():
if e.get() == "":
e.insert(0,"please enter a number")
elif e.get() == "please enter a number":
e.delete(0,END)
e.insert(0,"please enter a number not letters")
elif e.get() == "please enter a number not letters":
e.delete(0, END)
e.insert(0, "please enter a number not letters")
print("باعوص")
else:
fn = e.get()
global f_n
global math
math = "add"
f_n = float(fn)
e.delete(0, END)
def cminus():
if e.get() == "":
e.insert(0,"please enter a number")
elif e.get() == "please enter a number":
e.delete(0,END)
e.insert(0,"please enter a number not letters")
elif e.get() == "please enter a number not letters":
e.delete(0, END)
e.insert(0, "please enter a number not letters")
print("باعوص")
else:
fn = e.get()
global f_n
global math
math = "minus"
f_n = float(fn)
e.delete(0, END)
def cdividing():
if e.get() == "":
e.insert(0,"please enter a number")
elif e.get() == "please enter a number":
e.delete(0,END)
e.insert(0,"please enter a number not letters")
elif e.get() == "please enter a number not letters":
e.delete(0, END)
e.insert(0, "please enter a number not letters")
print("باعوص")
else:
fn = e.get()
global f_n
global math
math = "dividing"
f_n = float(fn)
e.delete(0, END)
def cmultiply():
if e.get() == "":
e.insert(0, "please enter a number")
elif e.get() == "please enter a number":
e.delete(0,END)
e.insert(0,"please enter a number not letters")
elif e.get() == "please enter a number not letters":
e.delete(0, END)
e.insert(0, "please enter a number not letters")
print("باعوص")
else:
fn = e.get()
global f_n
global math
math = "multiply"
f_n = float(fn)
e.delete(0, END)
def c1():
if e.get() == "please enter a number":
e.delete(0, END)
button_add(1)
elif e.get() == "please enter a number not letters":
e.delete(0, END)
button_add(1)
else:
button_add(1)
def c2():
if e.get() == "please enter a number":
e.delete(0, END)
button_add(2)
elif e.get() == "please enter a number not letters":
e.delete(0, END)
button_add(2)
else:
button_add(2)
def c3():
if e.get() == "please enter a number":
e.delete(0, END)
button_add(3)
elif e.get() == "please enter a number not letters":
e.delete(0, END)
button_add(3)
else:
button_add(3)
def c4():
if e.get() == "please enter a number":
e.delete(0, END)
button_add(4)
elif e.get() == "please enter a number not letters":
e.delete(0, END)
button_add(4)
else:
button_add(4)
def c5():
if e.get() == "please enter a number":
e.delete(0, END)
button_add(5)
elif e.get() == "please enter a number not letters":
e.delete(0, END)
button_add(5)
else:
button_add(5)
def c6():
if e.get() == "please enter a number":
e.delete(0, END)
button_add(6)
elif e.get() == "please enter a number not letters":
e.delete(0, END)
button_add(6)
else:
button_add(6)
def c7():
if e.get() == "please enter a number":
e.delete(0, END)
button_add(7)
elif e.get() == "please enter a number not letters":
e.delete(0, END)
button_add(7)
else:
button_add(7)
def c8():
if e.get() == "please enter a number":
e.delete(0, END)
button_add(8)
elif e.get() == "please enter a number not letters":
e.delete(0, END)
button_add(8)
else:
button_add(8)
def c9():
if e.get() == "please enter a number":
e.delete(0, END)
button_add(9)
elif e.get() == "please enter a number not letters":
e.delete(0, END)
button_add(9)
else:
button_add(9)
def c0():
if e.get() == "please enter a number":
e.delete(0, END)
button_add(0)
elif e.get() == "please enter a number not letters":
e.delete(0, END)
button_add(0)
else:
button_add(0)
#button for each number
button1 = Button(root,text = 1,padx = 40 , pady =20, command = c1,).grid(row =3, column = 0)
button2 = Button(root,text = 2,padx = 40 , pady =20, command =c2).grid(row =3, column = 1)
button3 = Button(root,text = 3,padx = 40 , pady =20, command = c3).grid(row =3, column = 2)
button4 = Button(root,text = 4,padx = 40 , pady =20, command = c4).grid(row =2, column = 0)
button5 = Button(root,text = 5,padx = 40 , pady =20, command = c5).grid(row =2, column = 1)
button6 = Button(root,text = 6,padx = 40 , pady =20, command = c6).grid(row =2, column = 2)
button7 = Button(root,text = 7,padx = 40 , pady =20, command = c7).grid(row =1, column = 0)
button8 = Button(root,text = 8,padx = 40 , pady =20, command = c8).grid(row =1, column = 1)
button9 = Button(root,text = 9,padx = 40 , pady =20, command = c9).grid(row =1, column = 2)
button0 = Button(root,text = 0,padx = 40 , pady =20, command = c0).grid(row =4, column = 0)
add_button = Button(root,text = "+",padx = 39, pady = 20, command =cadd,bg="#D1E3D1").grid(row =4,column =1)
equal_button = Button(root,text = "=", padx = 39,pady=20, command = button_equal,bg="#e9def2").grid(row = 5,column=0)
minus_button = Button(root,text = "-", padx=40,pady=20 ,command = cminus,bg="#D1E3D1").grid(row=4,column=2)
multiply_button = Button(root,text = "X", padx=40,pady=20 ,command = cmultiply,bg="#D1E3D1").grid(row=5,column=1)
dividing_button = Button(root,text = "÷", padx=40,pady=20 ,command = cdividing,bg="#D1E3D1").grid(row=5,column=2)
clear_Button = Button(root,text = "Clear",padx = 125, pady = 20,bg="#D9DEF2", command=button_clear).grid(row = 6 , column =0,columnspan = 3 )
root.mainloop()
|
# 프로그래머스와 백준의 차이점
# 1. 함수안에 들어오는 인자가 배열임!!(문자열들로 이루어진 배열)
# 2. 파이참에서 돌리는 것보다 채점사이트에서 돌리는게 더 직관적임..
# 3. 여러 문제를 풀어보면서 적응해야 할듯
def solution(record):
answer = []
user_dict = {}
for mes in record:
if mes.split(' ')[0] == 'Enter' or mes.split(' ')[0] == 'Change':
user_dict[mes.split(' ')[1]] = mes.split(' ')[2]
for mes in record:
temp = ''
if mes.split(' ')[0] == 'Enter':
temp += user_dict[mes.split(' ')[1]] +'님이 들어왔습니다.'
elif mes.split(' ')[0] == 'Leave':
temp += user_dict[mes.split(' ')[1]] +'님이 나갔습니다.'
else:
continue
answer.append(temp)
return answer
|
#!/usr/bin/python3
from setuptools.command.setopt import config_file
import nntplib
from fastemail import FastEmail
# REST Service for managing FastEmail
# creates:
# endpoint: /addmailbox
application_name = "Thomson Reuters FastEmail"
default_port = 10285
default_mailrootdir = "/vmail"
default_mailbox_config = default_mailrootdir + "/cfg/mailbox.json"
mail_uid = 5000
mail_gid = 5000
mailbox_config = default_mailbox_config
service_port = default_port
mailrootdir = default_mailrootdir
verbose = 0
import os
import glob
import time
import sys
import syslog
import getopt
import requests
import subprocess
import pwd
import grp
import base64
import boto3
from flask import Flask, url_for
from flask import Response
from flask import request
from flask import json
app = Flask(__name__)
@app.route('/')
def api_root():
msg = 'Welcome to the FastEmail Configuration Service\n'
msg += 'The config file is located at: ' + mailbox_config
return msg
@app.route('/addmailbox', methods = ['POST'])
def api_addmailbox():
if request.headers['Content-Type'] == 'application/json':
msg = request.json
else:
return "415 Unsupported Media Type ;)"
if (verbose):
print (json.dumps(msg))
fe = FastEmail(msg["email"], mailbox_config, mailrootdir, msg["ses_region"], msg["description"], mail_uid, mail_gid)
fe.lock()
config = fe.read_local_config()
fe.add_local_config(msg["enabled"])
fe.add_locations(msg["locations"])
fe.write_local_config()
fe.unlock()
data = { 'success' : '1' }
js = json.dumps(data)
resp = Response(js, status=200, mimetype='application/json')
return resp
def parse_argv():
try:
opts, args = getopt.getopt(sys.argv[1:], "hr:m:p:u:g:v", ["help", "rootdir=", "mailboxfile=", "port=", "uid=", "gid=", "verbose"])
except getopt.GetoptError as err:
# print help information and exit:
print(err) # will print something like "option -a not recognized"
usage()
sys.exit(2)
mailboxfile = None
port = None
rootdir = None
verbose = None
uid = None
gid = None
for o, a in opts:
if o in ("-h", "--help"):
usage()
sys.exit()
elif o in ("-m", "--mailboxfile"):
mailboxfile = a
elif o in ("-p", "--port"):
port = a
elif o in ("-r", "--rootdir"):
rootdir = a
elif o in ("-g", "--gid"):
gid = a
elif o in ("-u", "--uid"):
uid = a
elif o in ("-v", "--verbose"):
verbose = True
else:
assert False, "unhandled option"
return { "mailboxfile":mailboxfile, "rootdir":rootdir, "verbose":verbose, "port":port, "gid":gid, "uid":uid }
def usage():
print("""Usage: fastemail_REST.py [-m|--mailboxfile=<mailboxfile>] [-p|--port=<port>""")
# parse arguments
if len(sys.argv) > 1:
opts = parse_argv()
if opts["mailboxfile"] != None:
mailbox_config = opts["mailboxfile"]
if opts["port"] != None:
service_port = opts["port"]
if opts["rootdir"] != None:
mailrootdir = opts["rootdir"]
if opts["uid"] != None:
mail_uid = int(opts["uid"])
if opts["gid"] != None:
mail_gid = int(opts["gid"])
if opts["verbose"] != None:
verbose=True
if (verbose):
print ("FastEmail REST service started:")
print (" mailroot at " + mailrootdir)
print (" mailbox config at " + mailbox_config)
print (" port is %d" % service_port)
print (" uid is %d" % mail_uid)
print (" gid is %d" % mail_gid)
if __name__ == '__main__':
app.run(host="0.0.0.0", port=service_port)
|
from models.PyCryptoBot import PyCryptoBot
from models.exchange.binance import AuthAPI as BAuthAPI
from models.exchange.coinbase_pro import AuthAPI as CAuthAPI
# Coinbase Pro fees
app = PyCryptoBot(exchange='coinbasepro')
api = CAuthAPI(app.getAPIKey(), app.getAPISecret(), app.getAPIPassphrase(), app.getAPIURL())
#print (api.getTakerFee())
#print (api.getTakerFee('BTC-GBP'))
#print (api.getMakerFee())
#print (api.getMakerFee('BTC-GBP'))
#print (api.getFees('BTCGBP'))
#print (api.getFees())
print (app.getMakerFee())
print (app.getTakerFee())
# Binance fees
app = PyCryptoBot(exchange='binance')
api = BAuthAPI(app.getAPIKey(), app.getAPISecret(), app.getAPIURL())
#print (api.getTakerFee())
#print (api.getTakerFee('BTCGBP'))
#print (api.getMakerFee())
#print (api.getMakerFee('BTCGBP'))
#print (api.getFees('BTCGBP'))
#print (api.getFees())
print (app.getMakerFee())
print (app.getTakerFee()) |
inp1=raw_input()
inp1=inp1.split(" ")
li=list(inp1)
lis=[]
for i in range(0,9):
lis.append(int(li[i]))
print(min(lis))
|
import os
from typing import Iterable, List
import pytest
from jina.drivers.search import KVSearchDriver
from jina.executors.indexers.keyvalue import BinaryPbIndexer
from jina.flow import Flow
from jina import Document, DocumentArray
from tests import validate_callback
cur_dir = os.path.dirname(os.path.abspath(__file__))
class SearchDocIndexer(BinaryPbIndexer):
def query(self, jina_id: str = None, mongo_ids: List[str] = None):
return super().query([jina_id])[0] # serialized document
def post_init(self):
super().post_init()
# key to have user workaround https://github.com/jina-ai/jina/issues/2295.
# Underlying problem in https://github.com/jina-ai/jina/issues/2299
self.name = 'doc_idx_file'
class SearchDocDriver(KVSearchDriver):
def _apply_all(
self, doc_sequences: Iterable['DocumentArray'], *args, **kwargs
) -> None:
for docs in doc_sequences:
for idx, doc in enumerate(docs):
serialized_doc = self.exec_fn(jina_id=doc.id)
if serialized_doc:
doc.MergeFrom(Document(serialized_doc)) # merge!
@pytest.fixture
def test_workspace(tmpdir):
os.environ['TEST_2295_WORKSPACE'] = str(tmpdir)
yield
del os.environ['TEST_2295_WORKSPACE']
def test_issue_2295(test_workspace, mocker):
# This tests the proposed workaround to user in 2295, once https://github.com/jina-ai/jina/issues/2299 this test
# can be removed
def validate_response(resp):
assert resp.search.docs[0].id == 'id'
assert resp.search.docs[0].text == 'text'
index_set = DocumentArray([Document(id='id', text='text')])
query_set = DocumentArray([Document(id='id')])
with Flow.load_config(os.path.join(cur_dir, 'flow_index.yml')) as f:
f.index(inputs=index_set)
mock_on_done = mocker.Mock()
with Flow.load_config(os.path.join(cur_dir, 'flow_query.yml')) as f:
f.search(inputs=query_set, on_done=mock_on_done)
validate_callback(mock_on_done, validate_response)
|
#!/usr/bin/env python2
# -*- coding: utf-8 -*-
##################################################
# GNU Radio Python Flow Graph
# Title: Top Block
# Generated: Sat Oct 20 11:58:58 2018
##################################################
if __name__ == '__main__':
import ctypes
import sys
if sys.platform.startswith('linux'):
try:
x11 = ctypes.cdll.LoadLibrary('libX11.so')
x11.XInitThreads()
except:
print "Warning: failed to XInitThreads()"
from gnuradio import analog
from gnuradio import audio
from gnuradio import blocks
from gnuradio import eng_notation
from gnuradio import filter
from gnuradio import gr
from gnuradio import wxgui
from gnuradio.eng_option import eng_option
from gnuradio.fft import window
from gnuradio.filter import firdes
from gnuradio.wxgui import fftsink2
from grc_gnuradio import wxgui as grc_wxgui
from optparse import OptionParser
import osmosdr
import wx
class top_block(grc_wxgui.top_block_gui):
def __init__(self):
grc_wxgui.top_block_gui.__init__(self, title="Top Block")
##################################################
# Variables
##################################################
self.samp_rate = samp_rate = 2e6
self.freq = freq = 102e6
##################################################
# Blocks
##################################################
self.wxgui_fftsink2_0 = fftsink2.fft_sink_c(
self.GetWin(),
baseband_freq=freq,
y_per_div=10,
y_divs=10,
ref_level=0,
ref_scale=2.0,
sample_rate=samp_rate,
fft_size=1024,
fft_rate=15,
average=False,
avg_alpha=None,
title='source Plot',
peak_hold=False,
)
self.Add(self.wxgui_fftsink2_0.win)
self.show_rational = fftsink2.fft_sink_c(
self.GetWin(),
baseband_freq=freq,
y_per_div=10,
y_divs=10,
ref_level=0,
ref_scale=2.0,
sample_rate=samp_rate,
fft_size=1024,
fft_rate=15,
average=False,
avg_alpha=None,
title=' rational Plot',
peak_hold=False,
)
self.Add(self.show_rational.win)
self.rational_resampler_xxx_1 = filter.rational_resampler_fff(
interpolation=25,
decimation=25,
taps=None,
fractional_bw=None,
)
self.rational_resampler_xxx_0 = filter.rational_resampler_ccc(
interpolation=1,
decimation=8,
taps=None,
fractional_bw=None,
)
self.osmosdr_source_0 = osmosdr.source( args="numchan=" + str(1) + " " + '' )
self.osmosdr_source_0.set_sample_rate(samp_rate)
self.osmosdr_source_0.set_center_freq(freq, 0)
self.osmosdr_source_0.set_freq_corr(0, 0)
self.osmosdr_source_0.set_dc_offset_mode(0, 0)
self.osmosdr_source_0.set_iq_balance_mode(0, 0)
self.osmosdr_source_0.set_gain_mode(False, 0)
self.osmosdr_source_0.set_gain(10, 0)
self.osmosdr_source_0.set_if_gain(20, 0)
self.osmosdr_source_0.set_bb_gain(20, 0)
self.osmosdr_source_0.set_antenna('', 0)
self.osmosdr_source_0.set_bandwidth(0, 0)
self.low_pass_filter_0 = filter.fir_filter_ccf(1, firdes.low_pass(
1, samp_rate, 256e3, 100e3, firdes.WIN_HAMMING, 6.76))
self.blocks_multiply_const_change_the_sound_db = blocks.multiply_const_vff((3, ))
self.audio_sink_0 = audio.sink(24000, '', True)
self.analog_wfm_rcv_0 = analog.wfm_rcv(
quad_rate=250e3,
audio_decimation=10,
)
self.after_lowfil_0 = fftsink2.fft_sink_f(
self.GetWin(),
baseband_freq=25e3,
y_per_div=10,
y_divs=10,
ref_level=0,
ref_scale=2.0,
sample_rate=samp_rate,
fft_size=1024,
fft_rate=15,
average=False,
avg_alpha=None,
title='before audio card Plot',
peak_hold=False,
)
self.Add(self.after_lowfil_0.win)
self.after_lowfil = fftsink2.fft_sink_c(
self.GetWin(),
baseband_freq=freq,
y_per_div=10,
y_divs=10,
ref_level=0,
ref_scale=2.0,
sample_rate=samp_rate,
fft_size=1024,
fft_rate=15,
average=False,
avg_alpha=None,
title='lowfil Plot',
peak_hold=False,
)
self.Add(self.after_lowfil.win)
##################################################
# Connections
##################################################
self.connect((self.analog_wfm_rcv_0, 0), (self.rational_resampler_xxx_1, 0))
self.connect((self.blocks_multiply_const_change_the_sound_db, 0), (self.after_lowfil_0, 0))
self.connect((self.blocks_multiply_const_change_the_sound_db, 0), (self.audio_sink_0, 0))
self.connect((self.low_pass_filter_0, 0), (self.after_lowfil, 0))
self.connect((self.low_pass_filter_0, 0), (self.analog_wfm_rcv_0, 0))
self.connect((self.osmosdr_source_0, 0), (self.rational_resampler_xxx_0, 0))
self.connect((self.osmosdr_source_0, 0), (self.wxgui_fftsink2_0, 0))
self.connect((self.rational_resampler_xxx_0, 0), (self.low_pass_filter_0, 0))
self.connect((self.rational_resampler_xxx_0, 0), (self.show_rational, 0))
self.connect((self.rational_resampler_xxx_1, 0), (self.blocks_multiply_const_change_the_sound_db, 0))
def get_samp_rate(self):
return self.samp_rate
def set_samp_rate(self, samp_rate):
self.samp_rate = samp_rate
self.wxgui_fftsink2_0.set_sample_rate(self.samp_rate)
self.show_rational.set_sample_rate(self.samp_rate)
self.osmosdr_source_0.set_sample_rate(self.samp_rate)
self.low_pass_filter_0.set_taps(firdes.low_pass(1, self.samp_rate, 256e3, 100e3, firdes.WIN_HAMMING, 6.76))
self.after_lowfil_0.set_sample_rate(self.samp_rate)
self.after_lowfil.set_sample_rate(self.samp_rate)
def get_freq(self):
return self.freq
def set_freq(self, freq):
self.freq = freq
self.wxgui_fftsink2_0.set_baseband_freq(self.freq)
self.show_rational.set_baseband_freq(self.freq)
self.osmosdr_source_0.set_center_freq(self.freq, 0)
self.after_lowfil.set_baseband_freq(self.freq)
def main(top_block_cls=top_block, options=None):
tb = top_block_cls()
tb.Start(True)
tb.Wait()
if __name__ == '__main__':
main()
|
from django.shortcuts import render
from django.shortcuts import render_to_response
from django.http import HttpResponse
from .models import Picture, Ocassion, Person
# Create your views here.
def index(request):
# Only show the first 10 most recent photos
pictures = Picture.objects.order_by('-uploadDate')[:10]
# For the page header
ocassions = Ocassion.objects.all()
#return render(request, 'photos/index.html')
return render_to_response('photos/index.html', {'pictures': pictures,
'ocassions': ocassions})
def ocassion(request, ocassion_id):
pictures = Picture.objects.filter(ocassion__id = ocassion_id)
selected_ocassion = Ocassion.objects.get(pk=ocassion_id)
# For the page header
ocassions = Ocassion.objects.all()
people = Person.objects.order_by('lastName', 'firstName')
return render_to_response('photos/index.html', {'pictures': pictures,
'ocassions': ocassions,
'ocassion_id': ocassion_id,
'selected_ocassion': selected_ocassion})
#def people(request, person_id):
def people(request):
#pictures = Picture.objects.filter(people__id = person_id)
# For the page header
ocassions = Ocassion.objects.all()
people = Person.objects.order_by('lastName', 'firstName')
return render_to_response('photos/index.html', {'ocassions': ocassions,
'people': people})
def person(request, person_id):
pictures = Picture.objects.filter(people__id = person_id)
# For the page header
ocassions = Ocassion.objects.all()
return render_to_response('photos/index.html', {'ocassions': ocassions,
'pictures': pictures})
|
import FileOperations
import pandas as pd
import matplotlib.pyplot as plt
import seaborn as sns
import tensorflow as tf
from tensorflow import keras
from tensorflow.keras import layers
import tensorflow_docs as tfdocs
import tensorflow_docs.modeling
def build_model():
# units: Positive integer, dimensionality of the output space; liczebność neuronów chyba?
# funkcja aktywacji relu - rectified linear unit, f(z) is zero when z is less than zero and f(z) is equal to z when z is above or equal to zero.
# input shape = liczba kolumn w dataframe
model = keras.Sequential([
layers.Dense(8, activation='relu', input_shape=[len(train_dataset_measurement[0].keys())]),
layers.Dense(16, activation='relu'),
layers.Dense(32, activation='relu'),
layers.Dense(64, activation='relu'),
layers.Dense(2)
])
# mse - mean squared error (błąd średniokwadratowy), MSE jest wartością oczekiwaną kwadratu „błędu”, czyli różnicy między estymatorem a wartością estymowaną
# A metric is a function that is used to judge the performance of your model. mae - mean absolute error (średni bezwzględny błąd), informuje on o ile średnio
# w okresie prognoz, będzie wynosić odchylenie od wartości rzeczywistej. Czyli, krótko mówiąc, o jakim błędem miarowym jest obarczona nasza prognoza
model.compile(loss='mse', optimizer = tf.keras.optimizers.RMSprop(0.001), metrics=['mae', 'mse'])
return model
if __name__ == '__main__':
solution = pd.DataFrame
solution_measurements = pd.DataFrame
solution_references = pd.DataFrame
train_dataset_measurement = []
train_dataset_reference = []
train_datasets_measurement = pd.DataFrame
train_datasets_reference = pd.DataFrame
for i in range(12):
train_dataset_measurement.append(FileOperations.FileOperations.load_data_from_excel
(r'pozyxAPI_dane_pomiarowe\pozyxAPI_only_localization_measurement' + str(i+1) + '.xlsx', "measurement", "D:H"))
train_dataset_reference.append(train_dataset_measurement[i][['reference x', 'reference y']])
train_dataset_measurement[i] = train_dataset_measurement[i][['measurement x', 'measurement y']]
solution = FileOperations.FileOperations.load_data_from_excel(r'pozyxAPI_only_localization_dane_testowe_i_dystrybuanta.xlsx', "pomiar", "D:M")
solution_measurements = solution[['measurement x', 'measurement y']].dropna()
solution_references = solution[['reference x', 'reference y']].dropna()
train_datasets_measurement = pd.concat(train_dataset_measurement, ignore_index=True)
train_datasets_reference = pd.concat(train_dataset_reference, ignore_index=True)
model = build_model()
# model.summary()
# validation split - Fraction of the training data to be used as validation data
# verbose: Integer. 0, 1, or 2. Verbosity mode. 0 = silent, 1 = progress bar, 2 = one line per epoch.
# tfdocs.EpochDots simply prints a . for each epoch, and a full set of metrics every 100 epochs.
# EarlyStopping callback tests a training condition for every epoch. If a set amount of epochs elapses without showing improvement, then it stops the training
history = model.fit(train_datasets_measurement, train_datasets_reference, epochs=1000, validation_split=0.1, verbose=0, callbacks=[tfdocs.modeling.EpochDots()])
solution_predictions = model.predict(solution_measurements)
solution_predictions_dataframe = pd.DataFrame(solution_predictions, columns=['x', 'y'])
sns.relplot(x='x', y='y', data=solution_predictions_dataframe)
sns.relplot(x='reference x', y='reference y', data=solution_references)
sns.relplot(x='measurement x', y='measurement y', data=solution_measurements)
plt.show()
|
"""
https://contest.yandex.ru/contest/23389/problems/H/
H. Двоичная система
Ограничение времени 0.07 секунд
Ограничение памяти 39Mb
Тимофей спросил у Гоши, умеет ли тот работать с числами в двоичной системе
счисления. Он ответил, что проходил это на одной из первых лекций по
информатике. Тимофей предложил Гоше решить задачку. Два числа записаны в
двоичной системе счисления. Нужно вывести их сумму, также в двоичной
системе. Встроенную в язык программирования возможность сложения двоичных
чисел применять нельзя.
Решение должно работать за O(N), где N –— количество разрядов максимального
числа на входе.
Формат ввода
Два числа в двоичной системе счисления, каждое на отдельной строке. Длина
каждого числа не превосходит 10 000 символов.
Формат вывода
Одно число в двоичной системе счисления.
Пример 1
Ввод
1010
1011
Вывод
10101
Пример 2
Ввод
1
1
Вывод
10
"""
import logging
logger = logging.getLogger(__name__)
logger.setLevel(logging.DEBUG)
handler = logging.StreamHandler()
handler.setFormatter(logging.Formatter('%(levelname)s, %(message)s'))
logger.addHandler(handler)
def run():
with open('input.txt') as file:
digit1 = ''
digit2 = ''
n = 1
for line in file:
while n < 4:
if n == 1:
digit1 = line.replace('\n', '')
n += 1
break
elif n == 2:
digit2 = line.replace('\n', '')
n += 1
break
elif n == 3:
answer = line.replace('\n', '')
logger.debug(f'{digit1} + {digit2} = {answer}')
digit1 = digit1[::-1]
digit2 = digit2[::-1]
digit1 = [*map(int, digit1)]
logger.debug(digit1)
digit2 = [*map(int, digit2)]
size = max(len(digit1), len(digit2))
digit1 += [0] * (size - len(digit1))
digit2 += [0] * (size - len(digit2))
overflow = 0
res = []
for obj in zip(digit1, digit2):
value = obj[0] + obj[1] + overflow
overflow = value // 2
res.append(value % 2)
if overflow == 1:
res.append(1)
res = res[::-1]
res = ''.join(map(str, res))
logger.debug(f'Ответ: {res}')
assert answer == res, f'{answer} != {res}'
# print(res)
n = 1
break
if __name__ == '__main__':
run()
|
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function
INCLUDES = """
#include <openssl/x509_vfy.h>
/*
* This is part of a work-around for the difficulty cffi has in dealing with
* `STACK_OF(foo)` as the name of a type. We invent a new, simpler name that
* will be an alias for this type and use the alias throughout. This works
* together with another opaque typedef for the same name in the TYPES section.
* Note that the result is an opaque type.
*/
typedef STACK_OF(ASN1_OBJECT) Cryptography_STACK_OF_ASN1_OBJECT;
typedef STACK_OF(X509_OBJECT) Cryptography_STACK_OF_X509_OBJECT;
"""
TYPES = """
static const long Cryptography_HAS_102_VERIFICATION_ERROR_CODES;
static const long Cryptography_HAS_102_VERIFICATION_PARAMS;
static const long Cryptography_HAS_X509_V_FLAG_TRUSTED_FIRST;
static const long Cryptography_HAS_X509_V_FLAG_PARTIAL_CHAIN;
typedef ... Cryptography_STACK_OF_ASN1_OBJECT;
typedef ... Cryptography_STACK_OF_X509_OBJECT;
typedef ... X509_OBJECT;
typedef ... X509_STORE;
typedef ... X509_VERIFY_PARAM;
typedef ... X509_STORE_CTX;
/* While these are defined in the source as ints, they're tagged here
as longs, just in case they ever grow to large, such as what we saw
with OP_ALL. */
/* Verification error codes */
static const int X509_V_OK;
static const int X509_V_ERR_UNABLE_TO_GET_ISSUER_CERT;
static const int X509_V_ERR_UNABLE_TO_GET_CRL;
static const int X509_V_ERR_UNABLE_TO_DECRYPT_CERT_SIGNATURE;
static const int X509_V_ERR_UNABLE_TO_DECRYPT_CRL_SIGNATURE;
static const int X509_V_ERR_UNABLE_TO_DECODE_ISSUER_PUBLIC_KEY;
static const int X509_V_ERR_CERT_SIGNATURE_FAILURE;
static const int X509_V_ERR_CRL_SIGNATURE_FAILURE;
static const int X509_V_ERR_CERT_NOT_YET_VALID;
static const int X509_V_ERR_CERT_HAS_EXPIRED;
static const int X509_V_ERR_CRL_NOT_YET_VALID;
static const int X509_V_ERR_CRL_HAS_EXPIRED;
static const int X509_V_ERR_ERROR_IN_CERT_NOT_BEFORE_FIELD;
static const int X509_V_ERR_ERROR_IN_CERT_NOT_AFTER_FIELD;
static const int X509_V_ERR_ERROR_IN_CRL_LAST_UPDATE_FIELD;
static const int X509_V_ERR_ERROR_IN_CRL_NEXT_UPDATE_FIELD;
static const int X509_V_ERR_OUT_OF_MEM;
static const int X509_V_ERR_DEPTH_ZERO_SELF_SIGNED_CERT;
static const int X509_V_ERR_SELF_SIGNED_CERT_IN_CHAIN;
static const int X509_V_ERR_UNABLE_TO_GET_ISSUER_CERT_LOCALLY;
static const int X509_V_ERR_UNABLE_TO_VERIFY_LEAF_SIGNATURE;
static const int X509_V_ERR_CERT_CHAIN_TOO_LONG;
static const int X509_V_ERR_CERT_REVOKED;
static const int X509_V_ERR_INVALID_CA;
static const int X509_V_ERR_PATH_LENGTH_EXCEEDED;
static const int X509_V_ERR_INVALID_PURPOSE;
static const int X509_V_ERR_CERT_UNTRUSTED;
static const int X509_V_ERR_CERT_REJECTED;
static const int X509_V_ERR_SUBJECT_ISSUER_MISMATCH;
static const int X509_V_ERR_AKID_SKID_MISMATCH;
static const int X509_V_ERR_AKID_ISSUER_SERIAL_MISMATCH;
static const int X509_V_ERR_KEYUSAGE_NO_CERTSIGN;
static const int X509_V_ERR_UNABLE_TO_GET_CRL_ISSUER;
static const int X509_V_ERR_UNHANDLED_CRITICAL_EXTENSION;
static const int X509_V_ERR_KEYUSAGE_NO_CRL_SIGN;
static const int X509_V_ERR_UNHANDLED_CRITICAL_CRL_EXTENSION;
static const int X509_V_ERR_INVALID_NON_CA;
static const int X509_V_ERR_PROXY_PATH_LENGTH_EXCEEDED;
static const int X509_V_ERR_KEYUSAGE_NO_DIGITAL_SIGNATURE;
static const int X509_V_ERR_PROXY_CERTIFICATES_NOT_ALLOWED;
static const int X509_V_ERR_INVALID_EXTENSION;
static const int X509_V_ERR_INVALID_POLICY_EXTENSION;
static const int X509_V_ERR_NO_EXPLICIT_POLICY;
static const int X509_V_ERR_DIFFERENT_CRL_SCOPE;
static const int X509_V_ERR_UNSUPPORTED_EXTENSION_FEATURE;
static const int X509_V_ERR_UNNESTED_RESOURCE;
static const int X509_V_ERR_PERMITTED_VIOLATION;
static const int X509_V_ERR_EXCLUDED_VIOLATION;
static const int X509_V_ERR_SUBTREE_MINMAX;
static const int X509_V_ERR_UNSUPPORTED_CONSTRAINT_TYPE;
static const int X509_V_ERR_UNSUPPORTED_CONSTRAINT_SYNTAX;
static const int X509_V_ERR_UNSUPPORTED_NAME_SYNTAX;
static const int X509_V_ERR_CRL_PATH_VALIDATION_ERROR;
static const int X509_V_ERR_SUITE_B_INVALID_VERSION;
static const int X509_V_ERR_SUITE_B_INVALID_ALGORITHM;
static const int X509_V_ERR_SUITE_B_INVALID_CURVE;
static const int X509_V_ERR_SUITE_B_INVALID_SIGNATURE_ALGORITHM;
static const int X509_V_ERR_SUITE_B_LOS_NOT_ALLOWED;
static const int X509_V_ERR_SUITE_B_CANNOT_SIGN_P_384_WITH_P_256;
static const int X509_V_ERR_HOSTNAME_MISMATCH;
static const int X509_V_ERR_EMAIL_MISMATCH;
static const int X509_V_ERR_IP_ADDRESS_MISMATCH;
static const int X509_V_ERR_APPLICATION_VERIFICATION;
/* Verification parameters */
static const long X509_V_FLAG_CB_ISSUER_CHECK;
static const long X509_V_FLAG_USE_CHECK_TIME;
static const long X509_V_FLAG_CRL_CHECK;
static const long X509_V_FLAG_CRL_CHECK_ALL;
static const long X509_V_FLAG_IGNORE_CRITICAL;
static const long X509_V_FLAG_X509_STRICT;
static const long X509_V_FLAG_ALLOW_PROXY_CERTS;
static const long X509_V_FLAG_POLICY_CHECK;
static const long X509_V_FLAG_EXPLICIT_POLICY;
static const long X509_V_FLAG_INHIBIT_ANY;
static const long X509_V_FLAG_INHIBIT_MAP;
static const long X509_V_FLAG_NOTIFY_POLICY;
static const long X509_V_FLAG_EXTENDED_CRL_SUPPORT;
static const long X509_V_FLAG_USE_DELTAS;
static const long X509_V_FLAG_CHECK_SS_SIGNATURE;
static const long X509_V_FLAG_TRUSTED_FIRST;
static const long X509_V_FLAG_SUITEB_128_LOS_ONLY;
static const long X509_V_FLAG_SUITEB_192_LOS;
static const long X509_V_FLAG_SUITEB_128_LOS;
static const long X509_V_FLAG_PARTIAL_CHAIN;
static const long X509_LU_X509;
static const long X509_LU_CRL;
"""
FUNCTIONS = """
int X509_verify_cert(X509_STORE_CTX *);
/* X509_STORE */
X509_STORE *X509_STORE_new(void);
int X509_STORE_add_cert(X509_STORE *, X509 *);
int X509_STORE_add_crl(X509_STORE *, X509_CRL *);
int X509_STORE_load_locations(X509_STORE *, const char *, const char *);
int X509_STORE_set1_param(X509_STORE *, X509_VERIFY_PARAM *);
int X509_STORE_set_default_paths(X509_STORE *);
int X509_STORE_set_flags(X509_STORE *, unsigned long);
void X509_STORE_free(X509_STORE *);
/* X509_STORE_CTX */
X509_STORE_CTX *X509_STORE_CTX_new(void);
void X509_STORE_CTX_cleanup(X509_STORE_CTX *);
void X509_STORE_CTX_free(X509_STORE_CTX *);
int X509_STORE_CTX_init(X509_STORE_CTX *, X509_STORE *, X509 *,
Cryptography_STACK_OF_X509 *);
void X509_STORE_CTX_trusted_stack(X509_STORE_CTX *,
Cryptography_STACK_OF_X509 *);
void X509_STORE_CTX_set_cert(X509_STORE_CTX *, X509 *);
void X509_STORE_CTX_set_chain(X509_STORE_CTX *,Cryptography_STACK_OF_X509 *);
X509_VERIFY_PARAM *X509_STORE_CTX_get0_param(X509_STORE_CTX *);
void X509_STORE_CTX_set0_param(X509_STORE_CTX *, X509_VERIFY_PARAM *);
int X509_STORE_CTX_set_default(X509_STORE_CTX *, const char *);
void X509_STORE_CTX_set_verify_cb(X509_STORE_CTX *,
int (*)(int, X509_STORE_CTX *));
Cryptography_STACK_OF_X509 *X509_STORE_CTX_get_chain(X509_STORE_CTX *);
Cryptography_STACK_OF_X509 *X509_STORE_CTX_get1_chain(X509_STORE_CTX *);
int X509_STORE_CTX_get_error(X509_STORE_CTX *);
void X509_STORE_CTX_set_error(X509_STORE_CTX *, int);
int X509_STORE_CTX_get_error_depth(X509_STORE_CTX *);
X509 *X509_STORE_CTX_get_current_cert(X509_STORE_CTX *);
int X509_STORE_CTX_set_ex_data(X509_STORE_CTX *, int, void *);
void *X509_STORE_CTX_get_ex_data(X509_STORE_CTX *, int);
/* X509_VERIFY_PARAM */
X509_VERIFY_PARAM *X509_VERIFY_PARAM_new(void);
int X509_VERIFY_PARAM_set_flags(X509_VERIFY_PARAM *, unsigned long);
int X509_VERIFY_PARAM_clear_flags(X509_VERIFY_PARAM *, unsigned long);
unsigned long X509_VERIFY_PARAM_get_flags(X509_VERIFY_PARAM *);
int X509_VERIFY_PARAM_set_purpose(X509_VERIFY_PARAM *, int);
int X509_VERIFY_PARAM_set_trust(X509_VERIFY_PARAM *, int);
void X509_VERIFY_PARAM_set_time(X509_VERIFY_PARAM *, time_t);
int X509_VERIFY_PARAM_add0_policy(X509_VERIFY_PARAM *, ASN1_OBJECT *);
int X509_VERIFY_PARAM_set1_policies(X509_VERIFY_PARAM *,
Cryptography_STACK_OF_ASN1_OBJECT *);
void X509_VERIFY_PARAM_set_depth(X509_VERIFY_PARAM *, int);
int X509_VERIFY_PARAM_get_depth(const X509_VERIFY_PARAM *);
void X509_VERIFY_PARAM_free(X509_VERIFY_PARAM *);
"""
MACROS = """
/* this CRYPTO_EX_DATA function became a macro in 1.1.0 */
int X509_STORE_CTX_get_ex_new_index(long, void *, CRYPTO_EX_new *,
CRYPTO_EX_dup *, CRYPTO_EX_free *);
/* X509_STORE_CTX */
void X509_STORE_CTX_set0_crls(X509_STORE_CTX *,
Cryptography_STACK_OF_X509_CRL *);
/* X509_VERIFY_PARAM */
int X509_VERIFY_PARAM_set1_host(X509_VERIFY_PARAM *, const char *,
size_t);
void X509_VERIFY_PARAM_set_hostflags(X509_VERIFY_PARAM *, unsigned int);
int X509_VERIFY_PARAM_set1_email(X509_VERIFY_PARAM *, const char *,
size_t);
int X509_VERIFY_PARAM_set1_ip(X509_VERIFY_PARAM *, const unsigned char *,
size_t);
int X509_VERIFY_PARAM_set1_ip_asc(X509_VERIFY_PARAM *, const char *);
int sk_X509_OBJECT_num(Cryptography_STACK_OF_X509_OBJECT *);
X509_OBJECT *sk_X509_OBJECT_value(Cryptography_STACK_OF_X509_OBJECT *, int);
X509_VERIFY_PARAM *X509_STORE_get0_param(X509_STORE *);
Cryptography_STACK_OF_X509_OBJECT *X509_STORE_get0_objects(X509_STORE *);
X509 *X509_OBJECT_get0_X509(X509_OBJECT *);
int X509_OBJECT_get_type(const X509_OBJECT *);
"""
CUSTOMIZATIONS = """
/* OpenSSL 1.0.2beta2+ verification error codes */
#if CRYPTOGRAPHY_OPENSSL_102BETA2_OR_GREATER && \
!defined(LIBRESSL_VERSION_NUMBER)
static const long Cryptography_HAS_102_VERIFICATION_ERROR_CODES = 1;
#else
static const long Cryptography_HAS_102_VERIFICATION_ERROR_CODES = 0;
static const long X509_V_ERR_SUITE_B_INVALID_VERSION = 0;
static const long X509_V_ERR_SUITE_B_INVALID_ALGORITHM = 0;
static const long X509_V_ERR_SUITE_B_INVALID_CURVE = 0;
static const long X509_V_ERR_SUITE_B_INVALID_SIGNATURE_ALGORITHM = 0;
static const long X509_V_ERR_SUITE_B_LOS_NOT_ALLOWED = 0;
static const long X509_V_ERR_SUITE_B_CANNOT_SIGN_P_384_WITH_P_256 = 0;
static const long X509_V_ERR_HOSTNAME_MISMATCH = 0;
static const long X509_V_ERR_EMAIL_MISMATCH = 0;
static const long X509_V_ERR_IP_ADDRESS_MISMATCH = 0;
#endif
/* OpenSSL 1.0.2beta2+ verification parameters */
#if CRYPTOGRAPHY_OPENSSL_102BETA2_OR_GREATER && \
!defined(LIBRESSL_VERSION_NUMBER)
static const long Cryptography_HAS_102_VERIFICATION_PARAMS = 1;
#else
static const long Cryptography_HAS_102_VERIFICATION_PARAMS = 0;
/* X509_V_FLAG_TRUSTED_FIRST is also new in 1.0.2+, but it is added separately
below because it shows up in some earlier 3rd party OpenSSL packages. */
static const long X509_V_FLAG_SUITEB_128_LOS_ONLY = 0;
static const long X509_V_FLAG_SUITEB_192_LOS = 0;
static const long X509_V_FLAG_SUITEB_128_LOS = 0;
int (*X509_VERIFY_PARAM_set1_host)(X509_VERIFY_PARAM *, const char *,
size_t) = NULL;
int (*X509_VERIFY_PARAM_set1_email)(X509_VERIFY_PARAM *, const char *,
size_t) = NULL;
int (*X509_VERIFY_PARAM_set1_ip)(X509_VERIFY_PARAM *, const unsigned char *,
size_t) = NULL;
int (*X509_VERIFY_PARAM_set1_ip_asc)(X509_VERIFY_PARAM *, const char *) = NULL;
void (*X509_VERIFY_PARAM_set_hostflags)(X509_VERIFY_PARAM *,
unsigned int) = NULL;
#endif
/* OpenSSL 1.0.2+ or Solaris's backport */
#ifdef X509_V_FLAG_PARTIAL_CHAIN
static const long Cryptography_HAS_X509_V_FLAG_PARTIAL_CHAIN = 1;
#else
static const long Cryptography_HAS_X509_V_FLAG_PARTIAL_CHAIN = 0;
static const long X509_V_FLAG_PARTIAL_CHAIN = 0;
#endif
/* OpenSSL 1.0.2+, *or* Fedora 20's flavor of OpenSSL 1.0.1e... */
#ifdef X509_V_FLAG_TRUSTED_FIRST
static const long Cryptography_HAS_X509_V_FLAG_TRUSTED_FIRST = 1;
#else
static const long Cryptography_HAS_X509_V_FLAG_TRUSTED_FIRST = 0;
static const long X509_V_FLAG_TRUSTED_FIRST = 0;
#endif
#if CRYPTOGRAPHY_OPENSSL_LESS_THAN_110PRE6 || defined(LIBRESSL_VERSION_NUMBER)
Cryptography_STACK_OF_X509_OBJECT *X509_STORE_get0_objects(X509_STORE *ctx) {
return ctx->objs;
}
X509_VERIFY_PARAM *X509_STORE_get0_param(X509_STORE *store) {
return store->param;
}
int X509_OBJECT_get_type(const X509_OBJECT *x) {
return x->type;
}
#endif
#if CRYPTOGRAPHY_OPENSSL_LESS_THAN_110PRE5 || defined(LIBRESSL_VERSION_NUMBER)
X509 *X509_OBJECT_get0_X509(X509_OBJECT *x) {
return x->data.x509;
}
#endif
"""
|
from keras import *
from keras.layers import *
from keras.regularizers import *
from preprocess import Preprocess
import keras
class Model_KWS():
def __init__(self):
processed = Preprocess()
self.train_set, self.test_set, self.y_train, self.y_test = processed.train_test_split()
self.y_train = keras.utils.to_categorical(self.y_train)
self.y_test = keras.utils.to_categorical(self.y_test)
def cnn_model(self):
cnn_input = Input(shape = (40,32,1))
conv = Conv2D(32, (3,3), padding='same', activation= 'relu', kernel_regularizer=regularizers.l2(0.01))(cnn_input)
conv = Dropout(.2)(conv)
conv = Conv2D(32, (3,3), padding='same', activation= 'relu', kernel_regularizer=regularizers.l2(0.01))(conv)
conv = Dropout(.2)(conv)
conv = Conv2D(32, (3,3), padding='same', activation= 'relu', kernel_regularizer=regularizers.l2(0.01))(conv)
conv = Dropout(.2)(conv)
conv = Flatten()(conv)
conv = Dense(50, activation = 'relu',kernel_regularizer=regularizers.l2(0.01))(conv)
# conv = Dense(100, activation = 'relu',kernel_regularizer=regularizers.l2(0.01))(conv)
conv = Dense(12, activation = 'softmax')(conv)
model = Model(cnn_input, conv)
return model
def resnet_model(self):
cnn_input = Input(shape = (40,32,1))
conv = Conv2D(64, (3,3), padding='same', activation= 'relu', kernel_regularizer=regularizers.l2(0.01))(cnn_input)
conv = Dropout(.2)(conv)
conv = Conv2D(32, (3,3), padding='same', activation= 'relu', kernel_regularizer=regularizers.l2(0.01))(conv)
conv = Dropout(.2)(conv)
conv = Conv2D(8, (3,3), padding='same', activation= 'relu', kernel_regularizer=regularizers.l2(0.01))(conv)
# conv = Dropout(.2)(conv)
conv_shortcut = Conv2D(8, (3,3), padding='same', activation= 'relu', kernel_regularizer=regularizers.l2(0.01))(cnn_input)
conv_merged = Add()([conv, conv_shortcut])
conv_merged = Activation('relu')(conv_merged)
conv = Flatten()(conv_merged)
conv = Dense(50, activation = 'relu',kernel_regularizer=regularizers.l2(0.01))(conv)
conv = Dense(100, activation = 'relu',kernel_regularizer=regularizers.l2(0.01))(conv)
conv = Dense(12, activation = 'softmax')(conv)
model = Model(cnn_input, conv)
return model
def run_model(self):
self.train_set = self.train_set.reshape(self.train_set.shape[0],40,32,1)
self.test_set = self.test_set.reshape(self.test_set.shape[0],40,32,1)
model = self.cnn_model()
model.summary()
model.compile(optimizer = 'adam', loss = 'categorical_crossentropy', metrics = ['accuracy'])
model.fit(self.train_set, self.y_train, batch_size = 32, epochs = 100, validation_data = (self.test_set, self.y_test), verbose = 1)
# model.save_weights('kws.h5')
if __name__ == '__main__':
model_kws = Model_KWS()
model_kws.run_model()
|
# exam_2_star.py
# link_spam
g = {'a': ['a', 'b', 'c'], 'b': ['a'], 'c': ['d'], 'd': ['a']}
def lookup_link(graph, link_start, link_target, depth):
if link_start in graph[link_start]:
return True
elif depth == 0:
return False
elif link_start in graph[link_target]:
return True
elif depth == 1:
return False
result = False
if depth >= 2:
for node in graph[link_target]:
result = (result or
lookup_link(graph, link_start, node, depth - 1))
return result
# for start in ['a', 'b', 'c']:
# for end in ['a', 'b', 'c']:
# for n in range(4):
# print "link from %r to %r in %r steps?: %r" % (
# start, end, n, lookup_link(g, start, end, n))
|
from Tkinter import *
from tkFont import Font
from math import *
from tkFont import Font
from math import *
from tkSimpleDialog import *
import tkMessageBox
import tkFileDialog
import re
import ttk
import json
CR = '\n'
# Bugfix 25.02.2019 - on MacOS with MOJAVE there is a problem with Tkinter and
# displaying some widgetes (like buttons). Text is not visible on button (Label)
# Fix: import ttk and all Buttons are ttk.Button, ttk.Checkbutton ,....
#
# style is an easy approach to set all Button with same style
#
# ttk.Radiobuttons: they are declared inside sub python modules :-( (like contourArc.py)
#
ttkStyle = ttk.Style()
ttkStyle.map(
"C.TButton",
foreground=[('pressed', 'blue'), ('active', 'blue')],
background=[('pressed', '!disabled', 'black'), ('active', 'white')])
ttkStyle.map(
"C.Tttk.Radiobutton",
foreground=[('pressed', 'green'), ('active', 'red')],
background=[('pressed', '!disabled', 'black'), ('active', 'white')])
class GeometricalFrame(Frame):
def __init__(self, app, master, frame, title, winGeometry="900x750"):
Frame.__init__(self)
self.app = app
self.parentFrame = frame
self.title = title
self.master = master
self.master.geometry(winGeometry)
self.app.master.title(title)
self.dicMaterials = {} # complete material json list
self.dicSelectedMaterial = {} # current selected material
self.dicSystemConfig = {} # loaded system configuration parameters
self.frmButtonsIndividualContent = Frame(
self.parentFrame,
highlightbackground="darkgray",
highlightcolor="darkgray",
highlightthickness=1)
# format [notuse/use (0/1), GCODE, ....]
#
# deprecated in future
self._standardGCodeSeq = {
"HOMEING": [
"G00 Z{0:08.3f} F{1:05.1f} {2}",
"G00 X{0:08.3f} Y{1:08.3f} F{2:05.1f} {3}"
],
"SPINDLE_CW": ["M3 S{0:08.4f} {1}"],
"SPINDLE_CCW": ["M4 S{0:08.4f} {1}"],
"TOOL": ["T{0:03d} {1}M6 {1}"],
"PREAMBLE": ["G90 G64 G17 G40 G49 {0}"],
"POSTAMBLE": ["G00 Z10 F100 M2 {0}"],
"ZAXIS": [3.0, 10.0],
"TRAVEL_SPEEDXYZ": ["500", "500", "400"], #[X,Y,Z]
}
self.__loadSystemConfig()
self.__loadMillingParameterFile()
self.updateDefaultFields()
def destroy(self):
self.frmImage.destroy()
self.frmStandardContent.destroy()
self.frmButtonsIndividualContent.destroy()
self.frmButtons.destroy()
pass
def show(self,
showImage=True,
showStandardContent=True,
showStandartButton=True):
print "Show"
#
if showImage:
self.__frmImage()
#
if showStandardContent:
self.__frmStandardContent()
#
self._frmIndividualContent()
#
if showStandartButton:
self.__buttonBox()
self.setBtnStates(state=NORMAL)
pass
def __frmImage(self):
print "__frmImage"
self.frmImage = Frame(
self.parentFrame,
highlightbackground="darkgray",
highlightcolor="darkgray",
highlightthickness=1)
self.frmImage.grid(row=0, column=0, rowspan=5)
self.frmImage.pack(expand=True)
pass
def __loadSystemConfig(self):
print("load system json config file")
with open("config.json", "r") as read_file:
self.dicSystemConfig = json.load(read_file)
def updateDefaultFields(self):
'''
set default values from config file
'''
if bool(self.dicSystemConfig):
self._standardGCodeSeq["PREAMBLE"][0] = self.dicSystemConfig[
"Preamble"]
self._standardGCodeSeq["POSTAMBLE"][0] = self.dicSystemConfig[
"Postamble"]
self._standardGCodeSeq["ZAXIS"][0] = self.dicSystemConfig["StartZ"]
self._standardGCodeSeq["ZAXIS"][1] = self.dicSystemConfig[
"SafetyZ"]
self._standardGCodeSeq["TRAVEL_SPEEDXYZ"] = self.dicSystemConfig[
"TravelSpeedXYZ"]
else:
print("no system config file available")
pass
def __loadMillingParameterFile(self):
'''
load json file and save content into self.__millingparameters
'''
print("load JSON Milling parameter file....")
with open("millingparameters.json", "r") as read_file:
self.dicMaterials = json.load(read_file)
#
# insert an unique id for every materials
uid = 0
for (cat,v) in self.dicMaterials.items():
for mat in self.dicMaterials[cat]:
mat['uid'] = uid
uid += 1
#print("--------------------------")
#print(self.dicMaterials)
#print("--------------------------")
def setMaterialDict(self, value):
'''
set internal material dict (dicSelectedMaterial).
'value' is a string representing the text inside material combobox
'''
self.dicSelectedMaterial = self.getMaterialData(value)
def getMaterialData(self, value):
'''
split the combobox string into "Main-Key (e.g. Metal)" and Sub-Key "Material" (e.g. Alu)
Return the compete substring dict "
'''
sp = value.split(',')
# [0] Major Key (e.g Plastic)
# [1] Material (e.g. Polystorol)
# [2] Tool
# [3] ToolDia
# [4] uid (e.g. 11) => id is a "hidden" field, generated during loading json file
return self.getMaterialDataDict(sp[0], sp[1], sp[4])
def upateMaterialFields(self, value):
print("def upateMaterialFields(self, value): {}".format(value))
value = self.material.current()
mat = self.getOptionMenuMillingData()[value]
print(">>Current: {}, {}".format(value, mat))
self.setMaterialDict(mat)
print(self.dicSelectedMaterial)
if "ToolID" in self.dicSelectedMaterial:
self.currentToolID.set(self.dicSelectedMaterial["ToolID"])
self.currentSpindleRPM.set(self.dicSelectedMaterial["Spindel-RPM"])
self.tooldia.set(self.dicSelectedMaterial["Tool dia"])
self.speed_XY_G02G03.set(
self.dicSelectedMaterial["Feed rate mm/min"])
self.speed_Z_G01.set(
self.dicSelectedMaterial["Infeed rate mm/min"])
def getOptionMenuMillingData(self):
'''
create a sorted array of all defined materials
Name of material: (Category) material => Plastic,Polystorol; Wood,beech plywood; ...
'''
data = []
for (k, v) in self.dicMaterials.items():
for list in self.dicMaterials[k]:
s = "{0},{1},({2}mm,{3}mm), {4}".format(k, list["Material"],
list["Tool"], list["Tool dia"],list["uid"])
data.append(s)
return sorted(data)
def getMaterialDataDict(self, category, material, uid):
'''
Return a dictionary for this category and material.
Is used to prefill entry widges like speed, tool dia, ...
Example:
"Material" : "PMMA 4 mm (Plexiglas)",
"Tool" : "Single cutter",
"ToolID" : 12,
"Tool dia" : 2.0,
"Feed rate 1" : 25,
"Feed rate 2" : 1500,
"Infeed rate" : 0.4,
"Spindel-RPM" : 20000,
"Info" : "Einschneid Fraeser",
"uid" : 18
'''
for cat in self.dicMaterials[category]:
print(cat)
if cat["Material"] == material:
if str(cat["uid"]) == uid.strip():
print("found (({}) {},{})").format(uid, cat, material)
return cat
return {}
def __frmStandardContent(self,
showPreamble=True,
showPostamble=True,
showSpindleAndTool=True):
print "__frmStandardContent"
self.frmStandardContent = Frame(
self.parentFrame,
highlightbackground="darkgray",
highlightcolor="darkgray",
highlightthickness=1)
row = 0
if showPreamble:
self._preamble = StringVar()
txt = self._standardGCodeSeq["PREAMBLE"][0].format(CR)
self._preamble.set(txt)
Label(
self.frmStandardContent, text="PreGCode").grid(
row=row, column=0, sticky=W)
FloatEntry(
self.frmStandardContent,
width=70,
mandatory=False,
textvariable=self._preamble).grid(
row=row, column=1, columnspan=6, sticky=W)
if showPostamble:
row += 1
self._postamble = StringVar()
txt = self._standardGCodeSeq["POSTAMBLE"][0].format(CR)
self._postamble.set(txt)
Label(
self.frmStandardContent, text="PostGCode").grid(
row=row, column=0, sticky=W)
FloatEntry(
self.frmStandardContent,
width=70,
mandatory=False,
textvariable=self._postamble).grid(
row=row, column=1, columnspan=6, sticky=W)
# bugfix / requirement #10
if showSpindleAndTool:
row += 1
# New in V0.12.5 -------------------------------
choices = self.getOptionMenuMillingData()
self.selectedMaterial = StringVar()
self.selectedMaterial.set(choices[0])
matWidth = len(max(choices, key=len)) - 20
Label(
self.frmStandardContent, text='Material').grid(
row=row, column=0, sticky=W)
self.material = ttk.Combobox(
self.frmStandardContent,
textvariable=self.selectedMaterial,
values=choices)
self.material.configure(width=matWidth)
print(">>Current: {}".format(self.material.current()))
self.material.bind("<<ComboboxSelected>>",
self.upateMaterialFields)
self.material.config(width=matWidth)
self.material.grid(row=row, column=1, columnspan=2, sticky=W)
#-----------------------------------------------
self.currentSpindleRPM = StringVar(value="")
self.currentToolID = StringVar(value="")
self._spindleCCW = StringVar(value="CW")
Label(
self.frmStandardContent, text="ToolID").grid(
row=row, column=3, sticky=W)
IntEntry(
self.frmStandardContent,
width=10,
mandatory=False,
textvariable=self.currentToolID).grid(
row=row, column=4, sticky=W)
Label(
self.frmStandardContent, text="Spindle Speed").grid(
row=row, column=5, sticky=W)
IntEntry(
self.frmStandardContent,
width=10,
mandatory=False,
textvariable=self.currentSpindleRPM).grid(
row=row, column=6, sticky=W)
# workaround MacOS, Mohjave with TKinter-Buttons
#ttk.Checkbutton(self.frmStandardContent, text="Spindle CCW",
# var=self._spindleCCW, onvalue="CCW", offvalue="CW").grid(
# row=row, column=6, sticky=W
# )
self.frmStandardContent.pack(expand=True, fill=BOTH)
pass
def _frmIndividualContent(self):
#override in subcluss
pass
def __buttonBox(self):
print "__buttonBox"
self.frmButtons = Frame(
self.parentFrame,
highlightbackground="darkgray",
highlightcolor="darkgray",
highlightthickness=1)
# bugfix 25.02.2019, change from "To AXIS" to "To Console"
self.btnAxis = ttk.Button(
self.frmButtons,
text="To Console",
width=10,
command=self.copyConsole,
state=DISABLED,
style="C.TButton")
self.btnAxis.grid(row=0, column=0)
self.btnClip = ttk.Button(
self.frmButtons,
text="To Clipboard",
width=10,
command=self.copyClipboard,
state=DISABLED,
style="C.TButton")
self.btnClip.grid(row=0, column=1)
self.btnSave = ttk.Button(
self.frmButtons,
text="To File",
width=10,
command=self.saveFile,
state=DISABLED,
style="C.TButton")
self.btnSave.grid(row=0, column=2)
self.btnGCode = ttk.Button(
self.frmButtons,
text="gen. GCode",
width=10,
command=self.showGCode,
state=NORMAL,
style="C.TButton")
self.btnGCode.grid(row=0, column=3)
#self.btnGCode = ttk.Button(self.frmButtons, text="Material", width=10,
# command=self.showMaterial, state=DISABLED, style="C.TButton")
#self.btnGCode.grid(
# row=0, column=4
# )
self.btnCancel = ttk.Button(
self.frmButtons,
text="Cancel",
width=10,
command=self.cancel,
state=NORMAL,
style="C.TButton")
self.btnCancel.grid(row=0, column=4)
self.frmButtons.pack(expand=True, fill=BOTH)
def generateGCode(self):
# override from subclass
pass
def getGCode_Homeing(self, x=0, y=0, z=10, fxy=100, fz=100):
gc = "(HOMEING)" + CR
gc += self._standardGCodeSeq["HOMEING"][0].format(z, fz, CR)
gc += self._standardGCodeSeq["HOMEING"][1].format(x, y, fxy, CR)
return gc
def getGCode_SpindleAndTool(self, additional=""):
temp = "(Tool handling)" + CR
#------- Tool handling -----------
if self.currentToolID.get() != "":
t = int(self.currentToolID.get())
if t < 0:
t = 0
msg = "{0} {1:5.2f}mm".format(self.dicSelectedMaterial["Tool"],
self.dicSelectedMaterial["Tool dia"])
temp += "(MSG, change tool to {0} {1}".format(msg, CR)
temp += "T{0:03d} M6 {1}".format(t, CR)
temp += "(Spindel control)" + CR
#------- Spindle control ---------
if self.currentSpindleRPM.get() != "":
s = int(self.currentSpindleRPM.get())
sdir = self._spindleCCW.get()
if s < 0:
s = 0
if sdir == "CW":
temp += "M3 S{0:04d} {1}".format(s, CR)
else:
temp += "M4 S{0:04d} {1}".format(s, CR)
if s == 0:
temp += "M5" + CR
if additional != "":
temp += "(additional)" + CR
temp += additional + CR
return temp + CR
def getGCode_Preamble(self, additional=""):
temp = ""
# Preamble
temp += CR + "(set general preamble)" + CR
temp += self._preamble.get() + CR
if (additional != ""):
temp += "(additional)" + CR
temp += additional + CR
temp += self.getGCode_SpindleAndTool()
return temp
def getGCode_Postamble(self, additional=""):
temp = ""
# Preamble
temp += CR + "(set general postamble)" + CR
temp += self._postamble.get() + CR
if (additional != ""):
temp += "(additional)" + CR
temp += additional + CR
return temp
def getGCodeCutterComp(self, compensation="G40", toolDia=0.0, x=0.0,
y=0.0):
'''
return a GCode for given cutter compensation
This cutter compensation do not use tool table adjustment for
tool diameters.
if toolDia is not set (0.0) than preset tool diameter is used
# if cutter compensation is used please remember:
# G41 is LEFT from path
# G42 is RIGHT from path
#
# if our start position is at 3-clock and G41 is used, tool is inside
# arc (circle), because we should start LEFT from path.
#
# if G42 is used, tool is outside of arc (circle) (RIGHT)
#
# this behaviour depends on general contour direction (CW or CCW)
# CW => above behaviour
# CCW => G41 is RIGHT, G42 is LEFT
'''
gc = ""
#if toolDia == 0.0:
# compensation = "G40"
gc += "(-- cutter compensation --)" + CR
if (compensation == "G41"):
if toolDia == 0.0:
gc += "G41 {1}".format(CR)
else:
gc += "G41.1 D{0:05.2f} X{2:08.3f} Y{3:08.3f}{1}".format(
toolDia, CR, x, y)
#gc += "G41.1 D{0:05.2f}{1}".format(toolDia, CR)
elif (compensation == "G42"):
if toolDia == 0.0:
gc += "G42 {1}".format(CR)
else:
gc += "G42.1 D{0:05.2f} X{2:08.3f} Y{3:08.3f}{1}".format(
toolDia, CR, x, y)
#gc += "G42.1 D{0:05.2f}{1}".format(toolDia, CR)
else: # G40
gc += "G40 {0}".format(CR)
return gc
def copyClipboard(self, event=None):
print "copyClipboard"
gc = self.getGCode()
if gc is None:
return None
self.app.clipboard_clear()
self.app.clipboard_append(gc)
pass
def saveFile(self, event=None):
gc = self.getGCode()
if gc is None:
return None
fname = tkFileDialog.asksaveasfilename(
initialdir="./",
title="Save file",
defaultextension="*.ngc",
filetypes=(("Axis ", "*.ngc"), ("Gcode ", "*.gcode"), ("all files",
"*.*")))
if (fname == None):
# cancle button
return None
print("Save gcode to '{}'".format(fname))
f = open(fname, "w")
f.write(gc)
f.close()
pass
def copyConsole(self, event=None):
print("---------------- copyConsole -----------------")
gc = self.getGCode()
if gc is None:
return None
sys.stdout.write(self.getGCode())
#self.quit()
def showGCode(self, event=None):
gc = self.getGCode()
if gc is None:
return None
d = GCodeDialog(self.app, title="generated GCode")
d.init(gc)
d.update(gc)
pass
def showMaterial(self, event=None):
data = {}
d = EntryGridDialog(self.app, title="MaterialParameters")
d.init(data)
d.update(data)
pass
def setBtnStates(self, state):
self.btnClip.config(state=state)
self.btnSave.config(state=state)
self.btnAxis.config(state=state, default=ACTIVE)
self.btnGCode.config(state=state)
def defaultUserValidation(self):
pre = self._preamble.get()
post = self._postamble.get()
try:
spindle = int(self.currentSpindleRPM.get())
except ValueError:
spindle = -1
try:
toolID = int(self.currentToolID.get())
except ValueError:
toolID = -1
if pre == "":
self.MessageBox(
state="WARN",
title="Warn",
text="Are you shure? There is no preamble gcode available")
return False
if post == "":
self.MessageBox(
state="WARN",
title="Warn",
text="Are you shure? There is no postamble gcode available")
return False
if (spindle < -1 or toolID < -1):
self.MessageBox(
state="INFO",
title="INFO",
text="You set no tool id and/or spindel control")
return True
return True
def userInputValidation(self):
# override in subclass
'''
This function is called from getGCode() and validate all important
input fields in the current dialog.
Implementation should be done inside subclass
This method should return True or False
True if everything is ok
False something is wrong - no GCode generation
'''
pass
def getGCode(self):
if self.defaultUserValidation() == False:
return None
if self.userInputValidation() == False:
return None
gc = "%"
#
# removed because some trouble with GCode-Interpreters (BK-13.12.2018)
gc += '''
; (--------------------------)
; ( __ )
; ( _(\ |@@| )
; ( (__/\__ \--/ __ )
; ( \___|----| | __ )
; ( \ }{ /\ )_ / _\ )
; ( /\__/\ \__O (__ )
; ( (--/\--) \__/ )
; ( _)( )(_ )
; ( `---''---` )
; ( (c) by LunaX 2018 )
; (--------------------------)
'''
gc += CR
gc += self.generateGCode()
gc += "%" + CR
return gc
#------ EXIT --------------------------
def cancel(self, event=None):
print "cancel"
#self.destroy()
self.master.quit()
pass
def MessageBox(self, state="INFO", title="", text=""):
if state == "INFO":
tkMessageBox.showinfo(title, text)
elif state == "WARN":
tkMessageBox.showinfo(title, text)
elif state == "ERROR":
tkMessageBox.showerror(title, text)
else:
tkMessageBox.showtitle("!!!unknown - State !!!", text)
def getDepthSteps(self, total, step):
'''
calculate how many depth steps we need to mill to total depth.
Return two values:
1. Value = numberOfWindings
2. Value = rest depth
'''
r = round((total % step), 3)
w = int(abs(total / step))
return w, r
class ToolTip:
'''
It creates a tooltip for a given widget as the mouse goes on it.
see:
http://stackoverflow.com/questions/3221956/
what-is-the-simplest-way-to-make-tooltips-
in-tkinter/36221216#36221216
http://www.daniweb.com/programming/software-development/
code/484591/a-tooltip-class-for-tkinter
- Originally written by vegaseat on 2014.09.09.
- Modified to include a delay time by Victor Zaccardo on 2016.03.25.
- Modified
- to correct extreme right and extreme bottom behavior,
- to stay inside the screen whenever the tooltip might go out on
the top but still the screen is higher than the tooltip,
- to use the more flexible mouse positioning,
- to add customizable background color, padding, waittime and
wraplength on creation
by Alberto Vassena on 2016.11.05.
Tested on Ubuntu 16.04/16.10, running Python 3.5.2
TODO: themes styles support
'''
def __init__(self,
widget,
bg='#FFFFEA',
pad=(5, 3, 5, 3),
text='widget info',
waittime=400,
wraplength=300):
self.waittime = waittime # in miliseconds, originally 500
self.wraplength = wraplength # in pixels, originally 180
self.widget = widget
self.text = text
self.widget.bind("<Enter>", self.onEnter)
self.widget.bind("<FocusIn>", self.onEnter)
self.widget.bind("<FocusOut>", self.onLeave)
self.widget.bind("<Leave>", self.onLeave)
self.widget.bind("<ButtonPress>", self.onLeave)
self.bg = bg
self.pad = pad
self.id = None
self.tw = None
def onEnter(self, event=None):
self.schedule()
def onLeave(self, event=None):
self.unschedule()
self.hide()
def schedule(self):
self.unschedule()
self.id = self.widget.after(self.waittime, self.show)
def unschedule(self):
id_ = self.id
self.id = None
if id_:
self.widget.after_cancel(id_)
def show(self):
def tip_pos_calculator(widget,
label,
tip_delta=(10, 5),
pad=(5, 3, 5, 3)):
w = widget
s_width, s_height = w.winfo_screenwidth(), w.winfo_screenheight()
width, height = (pad[0] + label.winfo_reqwidth() + pad[2],
pad[1] + label.winfo_reqheight() + pad[3])
mouse_x, mouse_y = w.winfo_pointerxy()
x1, y1 = mouse_x + tip_delta[0], mouse_y + tip_delta[1]
x2, y2 = x1 + width, y1 + height
x_delta = x2 - s_width
if x_delta < 0:
x_delta = 0
y_delta = y2 - s_height
if y_delta < 0:
y_delta = 0
offscreen = (x_delta, y_delta) != (0, 0)
if offscreen:
if x_delta:
x1 = mouse_x - tip_delta[0] - width
if y_delta:
y1 = mouse_y - tip_delta[1] - height
offscreen_again = y1 < 0 # out on the top
if offscreen_again:
# No further checks will be done.
# TIP:
# A further mod might automagically augment the
# wraplength when the tooltip is too high to be
# kept inside the screen.
y1 = 0
return x1, y1
bg = self.bg
pad = self.pad
widget = self.widget
# creates a toplevel window
self.tw = Toplevel(widget)
# Leaves only the label and removes the app window
self.tw.wm_overrideredirect(True)
win = Frame(self.tw, background=bg, borderwidth=0)
label = Label(
win,
text=self.text,
justify=LEFT,
background=bg,
relief=SOLID,
borderwidth=0,
wraplength=self.wraplength)
label.grid(padx=(pad[0], pad[2]), pady=(pad[1], pad[3]), sticky=NSEW)
win.grid()
x, y = tip_pos_calculator(widget, label)
self.tw.wm_geometry("+%d+%d" % (x, y))
def hide(self):
tw = self.tw
if tw:
tw.destroy()
self.tw = None
|
# this method return large prime list
# there is a memory restriction, you can insert by 1000000,
# if you inserted 10000000, your machine will stop.because of memory overflow
def primeTable(n):
sieve = [True for _ in xrange(n + 1)]
i = 2
while i * i <= n:
if sieve[i]:
j = i + i
while j <= n:
sieve[j] = False
j += i
i += 1
table = [i for i in xrange(n + 1) if sieve[i] and i >= 2]
return table
def getFactrial(n):
result = 1
for i in xrange(1,n+1):
result *= i
return result
def getCombinationNum(n, r):
return getFactrial(n)/( getFactrial(r)*getFactrial(n - r) )
# this method allow you to decompress specified number
def primeDecomposition(n):
i = 2
table = []
while i * i <= n:
while n % i == 0:
n /= i
table.append(i)
i += 1
if n > 1:
table.append(n)
return table
# this method allow you to use decimal type iterator or huge number iterator
def drange(start, end):
n = start
while True:
n += 1
yield n
if n >= end:
break
def totientFunction(n, pSet):
if n in pSet:
return n-1
res = primeDecomposition(n)
resset = set(res)
buff = 1.
for r in resset:
buff *= (1. - 1./r)
#print n, buff, buff*n, round(buff*n)
return int(round(buff*n))
|
# Generated by Django 3.2.4 on 2021-08-03 12:48
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Cargo',
fields=[
('idCargo', models.AutoField(primary_key=True, serialize=False)),
('nomCargo', models.CharField(max_length=60)),
],
options={
'db_table': 'cargo',
},
),
migrations.CreateModel(
name='Comuna',
fields=[
('idComuna', models.IntegerField(primary_key=True, serialize=False)),
('nomComuna', models.CharField(max_length=70)),
],
options={
'db_table': 'comuna',
},
),
migrations.CreateModel(
name='Estado',
fields=[
('idEstado', models.IntegerField(primary_key=True, serialize=False)),
('nomEstado', models.CharField(max_length=20)),
],
options={
'db_table': 'estado',
},
),
migrations.CreateModel(
name='Marca',
fields=[
('idMarca', models.AutoField(db_column='idMarca', primary_key=True, serialize=False)),
('nomMarca', models.CharField(max_length=20)),
],
options={
'db_table': 'marca',
},
),
migrations.CreateModel(
name='Region',
fields=[
('idRegion', models.IntegerField(primary_key=True, serialize=False)),
('nomRegion', models.CharField(max_length=70)),
],
options={
'db_table': 'region',
},
),
migrations.CreateModel(
name='tipo_dispositivo',
fields=[
('idTipoDisp', models.AutoField(db_column='idTipoDisp', primary_key=True, serialize=False)),
('nomTipo', models.CharField(max_length=30)),
],
options={
'db_table': 'tipo_dispositivo',
},
),
migrations.CreateModel(
name='Usuario',
fields=[
('Rut', models.CharField(max_length=10, primary_key=True, serialize=False)),
('nombre', models.CharField(max_length=30)),
('apellido_pa', models.CharField(blank=True, max_length=30, null=True)),
('apellido_ma', models.CharField(blank=True, max_length=30, null=True)),
('cargoo', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='core.cargo')),
],
options={
'db_table': 'usuario',
},
),
migrations.CreateModel(
name='Sucursal',
fields=[
('idSucursal', models.IntegerField(primary_key=True, serialize=False)),
('nomSucursal', models.CharField(max_length=70)),
('direccion', models.CharField(max_length=120)),
('comunaa', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='core.comuna')),
],
options={
'db_table': 'sucursal',
},
),
migrations.CreateModel(
name='Modelo',
fields=[
('idModelo', models.AutoField(db_column='idModelo', primary_key=True, serialize=False)),
('nomModelo', models.CharField(max_length=60)),
('marcaa', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='core.marca')),
('tipo_dispositivoo', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='core.tipo_dispositivo')),
],
options={
'db_table': 'modelo',
},
),
migrations.CreateModel(
name='Dispositivo',
fields=[
('idCorrel', models.AutoField(primary_key=True, serialize=False)),
('nroSerie', models.CharField(max_length=20, unique=True)),
('comentario', models.CharField(blank=True, max_length=2000, null=True)),
('estadoo', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='core.estado')),
('modeloo', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='core.modelo')),
('rutt', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='core.usuario')),
('sucursall', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='core.sucursal')),
],
options={
'db_table': 'dispositivo',
},
),
migrations.AddField(
model_name='comuna',
name='regionn',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='core.region'),
),
]
|
# -*- coding: utf-8 -*-
from imp import reload
__author__ = 'silencedut'
# import MySQLdb
import pymysql
import json
from flask import g
# from sae.const import (MYSQL_HOST, MYSQL_HOST_S,
# MYSQL_PORT, MYSQL_USER, MYSQL_PASS, MYSQL_DB
# )
import sys
reload(sys)
# sys.setdefaultencoding('utf8')
def connect():
status=0
try:
# g.db = MySQLdb.connect(MYSQL_HOST, MYSQL_USER, MYSQL_PASS,
# MYSQL_DB, port=int(MYSQL_PORT))
# localhost - 127.0.0.1
g.db = pymysql.connect(
host='localhost',
port=3306,
user='huangweiqi',
passwd='123456',
db='nba_plus_server',
charset='utf8')
g.db.ping(True)
dbc=g.db.cursor()
g.db.set_character_set('utf8')
dbc.execute('SET NAMES utf8;')
dbc.execute('SET CHARACTER SET utf8;')
dbc.execute('SET character_set_connection=utf8;')
except Exception as e:
status=1
return status
def select_teamsort_sql():
c = g.db.cursor()
sql ="select sort from teamsort "
c.execute(sql)
sort_data=c.fetchone()
return sort_data
def select_perstat_sql(colum):
c = g.db.cursor()
sql ="select %s from nbastat "%(colum)
c.execute(sql)
stat_data=c.fetchone()
return stat_data
def select_latest_news_sql():
c = g.db.cursor()
sql ="select * from news order by id DESC limit 1"
c.execute(sql)
news=c.fetchone()
return news
def select_latest_blog_sql():
c = g.db.cursor()
sql ="select * from blog order by id DESC limit 10"
c.execute(sql)
results=c.fetchall()
blogs=[]
blogjsonlist={}
for perblog in results :
blogs.append(json.loads(perblog[2]))
blogjsonlist['nextId']=results[4][1]
blogjsonlist['newslist']=blogs
blogs=json.dumps(blogjsonlist).decode("unicode-escape")
return blogs
def select_news_by_id_sql(newsid):
c = g.db.cursor()
sql ="select * from news where newsid =%s" %(newsid)
c.execute(sql)
news=c.fetchone()
return str(news[2])
def select_blogs_by_id_sql(newsid):
c = g.db.cursor()
sql ="select * from blog where newsid =%s" %(newsid)
c.execute(sql)
results=c.fetchone()
index=results[0]
sql ="select * from blog where id< %d and id>%d" %(index,index-9)
c.execute(sql)
results=c.fetchall()
blogs=[]
blogjsonlist={}
for index in range(8) :
blogs.append(json.loads(results[7-index][2]))
blogjsonlist['nextId']=results[0][1]
blogjsonlist['newslist']=blogs
blogs=json.dumps(blogjsonlist).decode("unicode-escape")
return blogs
def select_all_blog_sql(colum,table_name):
c = g.db.cursor()
sql ="select %s from %s"%(colum,table_name)
c.execute(sql)
stat_data=c.fetchall()
return stat_data
def news_insert_sql(table_name,newsid,newslist,date):
c = g.db.cursor()
sql ="insert into %s (newsid,newslist,date) VALUES ('%s','%s','%s')" % (table_name,newsid,newslist,date)
c.execute(sql)
def newscontent_insert_sql(newsid,content):
c = g.db.cursor()
sql ="insert into articlecontent (articleId,content) VALUES ('%s','%s')" % (newsid,content)
c.execute(sql)
def teamsort_insertsql(teamsort):
c = g.db.cursor()
delete_sql('teamsort')
sql ="insert into teamsort VALUES ('%s')" % (teamsort)
c.execute(sql)
def stat_insertsql(allstat):
g.db = pymysql.connect(
host='localhost',
port=3306,
user='huangweiqi',
passwd='123456',
db='nba_plus_server',
charset='utf8')
c = g.db.cursor()
delete_sql('nbastat')
sql ="insert into nbastat VALUES ('%d', '%s','%s','%s','%s','%s','%s','%s','%s','%s')" % (1, allstat[0],allstat[1],allstat[2],allstat[3],allstat[4],allstat[5],allstat[6],allstat[7],allstat[8])
c.execute(sql)
def delete_sql(table_name):
c = g.db.cursor()
c.execute('delete from %s'%(table_name))
|
# -*- coding: utf-8 -*-
"""
Created on Wed Jan 8 11:43:50 2020
@author: goblo
"""
import numpy as np
from scipy.interpolate import RectBivariateSpline
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
import os
import geomdl
from geomdl import NURBS
from geomdl.visualization import VisMPL
from matplotlib import cm
from geomdl import knotvector
from geomdl import utilities
from geomdl import operations
from matplotlib.colors import Normalize
from matplotlib import cm
file = open('./prgm/AV/brute_25x25', 'rb')
surf = pickle.load(file)
file.close()
surf.enr_points_finaux(surf.points)
# Regularly-spaced, coarse grid
dx, dy = 0.1,0.1
xmax, ymax = 4, 4
x = np.arange(-xmax, xmax, dx)
y = np.arange(-ymax, ymax, dy)
X, Y = np.meshgrid(x, y)
R = 3;
B = (X)**2 + Y**2 <= R**2; # B est un filtre pour simuler le contour d'une lentille
B.astype(np.int)
B = np.multiply(B, 1)
def f(X,Y):
return np.exp(-1/10*((X)**2 + (Y)**2))
Z = f(X,Y)
Zcut = Z
Zcut[B == 0] = False
np.random.seed(1)
noise = (np.random.randn(*Z.shape) * 0.05)
#on agit comme si on avait le filtre qui détecte les contours et qui supprime les points qui ne concernent pas la lentille
noisy = (Z + noise)*B
noisy[B == 0] = -3 #float("NaN") #False
ctrlpts = [[[X[j,i],Y[j,i],noisy[j,i],1] for j in range(len(y))]for i in range(len(x))]
fig = plt.figure()
ax = plt.axes(projection='3d')
#ax.view_init(elev=5, azim=45)
ax.set_axis_off()
ax.set_xlim(-4,4)
ax.set_ylim(-4,4)
ax.set_zlim(-2,6)
ax.scatter(X, Y, noisy, s=5, c='r')
#ax.plot_surface(X, Y, Z*B, color='b' , alpha = 0.9)
fig.tight_layout()
plt.show()
# Generate surface
surf = NURBS.Surface()
surf.degree_u = 3
surf.degree_v = 3
surf.ctrlpts2d = ctrlpts
surf.knotvector_u = knotvector.generate(surf.degree_u, len(ctrlpts))
#surf.knotvector_u[4:len(ctrlpts)-1] = 0.6*np.ones(len(ctrlpts)-1-4)
surf.knotvector_v = knotvector.generate(surf.degree_v, len(ctrlpts[0]))
#surf.knotvector_v[4:len(ctrlpts[0])-1] = 0.6*np.ones(len(ctrlpts)-1-4)
operations.refine_knotvector(surf, [0,0])
# nombre de points dans le fichier .csv en sortie (une seule direction)
surf.sample_size = len(ctrlpts)
# Visualize surface
surf.vis = VisMPL.VisSurfTriangle(ctrlpts=True, axes=False, legend=False)
surf.render(colormap=cm.summer)
# points de la lentille après sampling - à exporter en format .csv ou .txt
surface_points = surf.evalpts
ZRES = np.zeros([len(ctrlpts),len(ctrlpts[0])])
Xres = [surface_points[len(ctrlpts[0])*i][0] for i in range(len(ctrlpts))]
Yres = [surface_points[i][1] for i in range(len(ctrlpts[0]))]
for i in range(len(ctrlpts)):
for j in range(len(ctrlpts[0])):
ZRES[i,j] = surface_points[len(ctrlpts[0])*i + j][2]
XRES,YRES = np.meshgrid(np.asarray(Xres),np.asarray(Yres))
ZINIT = f(XRES,YRES)*B
## comparaison surface sous-jacente et résultat
#xres = np.arange(-xmax, xmax, (2*xmax)/surf.sample_size[0])
#yres = np.arange(-ymax, ymax, (2*ymax)/surf.sample_size[0])
#XRES, YRES = np.meshgrid(xres, yres)
#ZRES = 4*np.exp(- XRES**2 - YRES**2) - 1.5*np.exp(-1/0.05 * ((XRES-1)**2 + (YRES-1)**2))
#fig = plt.figure()
#ax = plt.axes(projection='3d')
#ax.scatter(XRES, YRES, ZRES, s=5, c='r')
#ax.scatter(XRES, YRES, ZINIT, s=5, c='b')
#
#fig = plt.figure()
#ax = plt.axes(projection='3d')
#ax.plot_surface(XRES, YRES, 100*(np.abs(ZRES - ZINIT))/(1+np.abs(ZINIT)),alpha=0.7,cmap=cm.summer)
#
#ax.set_xlim(-R*0.4,R*0.4)
#ax.set_ylim(-R*0.4,R*0.4)
|
#!/usr/bin/env python3
""" Simple numpy demo"""
import os
import sys
import numpy as np
def numpy_demo():
print("NumPy has a lot of standard math functions and constants:")
print("np.linspace(0,1,11): "+str(np.linspace(0,1,11)))
print("np.pi: "+str(np.pi))
print("np.e: "+str(np.e))
print("np.sin(np.pi/2): "+str(np.sin(np.pi/2)))
print("Numpy has matrices:")
a = np.matrix([[1,2],[3,4],[5,6]], dtype=float)
b = np.matrix(np.zeros((2,3)))
b[1,1] = 1.0
print("a{} = ".format(a.shape))
print(a)
print("b{} = ".format(b.shape))
print(b)
ab = a.dot(b)
print("a . b = a * b = ")
if not np.array_equal(ab, a*b):
print("These should be the same!")
sys.exit(1)
print(ab)
if __name__ == "__main__":
numpy_demo()
|
import math
import time
def rank_clusters(cluster_objects):
""" Given a list of cluster objects, ranks them according to learnt function """
# now sort them
sorted_clusters = sorted(cluster_objects, key = lambda cluster : rank_formula(cluster), reverse = True)
for cluster in sorted_clusters:
cluster.metrics['score'] = rank_formula(cluster)
return sorted_clusters
# rank_clusters() ends
def rank_formula(cluster):
""" Our formula to score each cluster """
# avg_named_entities
# oldest_publishing_time
# num_articles
# newest_publishing_time
# avg_distance_from_center
# average_publishing_time
# get current time
current_time = int(time.time())
named_entities = math.log(1.1 + cluster.metrics['avg_named_entities'])
cluster_size = math.log(cluster.metrics['num_articles'])
spread = math.exp(cluster.metrics['avg_distance_from_center']*10+0.1)
time_decay = math.exp(cluster.metrics['average_publishing_time'] / current_time);
if cluster.metrics['num_articles'] == 1:
return 0
else:
return named_entities * cluster_size / spread * time_decay
#value3 = cluster.metrics['avg_distance_from_center']*10+0.1
#return math.log(cluster.metrics['avg_named_entities'] + 2) * math.log(cluster.metrics['num_articles'] + 1) / (cluster.metrics['avg_pairwise_dist']+0.5)
#return 1/(cluster.metrics['avg_pairwise_dist']+0.5)
|
# -*- coding: utf-8 -*-
import hashlib
import time
from http.server import BaseHTTPRequestHandler, HTTPServer
class calcSig(object):
key1 = '57218436'
key2 = '15387264'
rstr = 'efc84c17'
def shuffle(self, p1, p2):
p = ''
p += p1[int(p2[0], 10) - 1]
p += p1[int(p2[1], 10) - 1]
p += p1[int(p2[2], 10) - 1]
p += p1[int(p2[3], 10) - 1]
p += p1[int(p2[4], 10) - 1]
p += p1[int(p2[5], 10) - 1]
p += p1[int(p2[6], 10) - 1]
p += p1[int(p2[7], 10) - 1]
return p.lower()
# 生成 as和cp字段
def ppp(self, u_md5, u_key1, u_key2):
ascp = [0] * 36
ascp[0] = 'a'
ascp[1] = '1'
for i in range(0, 8):
ascp[2 * (i + 1)] = u_md5[i]
ascp[2 * i + 3] = u_key2[i]
ascp[2 * i + 18] = u_key1[i]
ascp[2 * i + 1 + 18] = u_md5[i + 24]
ascp[-2] = 'e'
ascp[-1] = '1'
return ''.join(ascp)
# 解析url参数
def parseURL(self, url):
param_index = url.find('?')
param = url[param_index + 1:]
param_list = param.split('&')
param_list.append('rstr='+self.rstr)
param_list = sorted(param_list)
result = ''
for a in param_list:
tmp = a.split('=')
tmp[1] = tmp[1].replace('+', 'a')
tmp[1] = tmp[1].replace(' ', 'a')
result += tmp[1]
return result
# 计算md5
def calcMD5(self, str_encode):
m = hashlib.md5()
m.update(str_encode.encode('utf-8'))
return m.hexdigest()
def work(self, url, curtime):
url_param = self.parseURL(url)
p_md5 = self.calcMD5(url_param)
if curtime & 1:
p_md5 = self.calcMD5(p_md5)
hexTime = hex(curtime)[2:]
aa = self.shuffle(hexTime, self.key1)
bb = self.shuffle(hexTime, self.key2)
sig = self.ppp(p_md5, aa, bb)
return ('%s&as=%s&cp=%s' % (url, sig[:18], sig[18:]))
# return (sig[:18], sig[18:])
class testHTTPServer_RequestHandler(BaseHTTPRequestHandler):
def outputtxt(self, content):
self.send_response(200)
self.send_header('Content-type', 'application/json')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes(content, "utf-8"))
def do_POST(self):
content_length = int(self.headers['Content-Length']) # <--- Gets the size of data
post_data = self.rfile.read(content_length) # <--- Gets the data itself
# logging.debug("POST request,\nPath: %s\nHeaders:\n%s\n\nBody:\n%s\n", str(self.path), str(self.headers), post_data.decode('utf-8'))
# 接收到的数据
url = post_data.decode('utf-8')
# print(url)
c = calcSig()
t = int(time.time())
# url = '/aweme/v1/user/following/list/?_rticket=1542283051266435909&ac=wifi&aid=1128&app_name=awemechannel=360&count=20device_brand=OnePlus&device_id=59479530042&device_platform=android&device_type=ONEPLUS%2BA5000&dpi=420&iid=51242560222&language=zh&manifest_version_code=169&max_time=1541202996&openudid=5514716858105890&os_api=27&os_version=8.1.0&resolution=1080%2A1920&retry_type=no_retry&ssmix=a&update_version_code=1692&user_id=83774364341&uuid=615720636968612&version_code=169&version_name=1.6.9'
url = url + '&ts=' +str(t)
# print(url)
# print(c.work(url,t))
self.outputtxt(c.work(url,t))
if __name__ == '__main__':
# 启动服务器
port = 8100
print('starting server, port', port)
# Server settings
server_address = ('0.0.0.0', port)
httpd = HTTPServer(server_address, testHTTPServer_RequestHandler)
print('running server...')
httpd.serve_forever() |
"""CPU functionality."""
import sys
class CPU:
"""Main CPU class."""
def __init__(self):
"""Construct a new CPU."""
self.ram = [00000000] * 32
self.reg = [00000000] * 8
self.pc = 0
self.stack = [0b0] * 255
self.HLT = 0b0001
self.LDI = 0b0010
self.PRN = 0b0111
self.MUL = 0b0010
self.ADD = 0b0000
self.POP = 0b0110
self.PUSH = 0b0101
self.CALL = 0b0000
self.RET = 0b0001
def ram_read(self, mar):
mdr = self.ram[mar]
return mdr
def ram_write(self, mar, mdr):
value = mdr
self.ram[mar] = value
def load(self, path):
"""Load a program into memory."""
try:
load_file = open(path, 'r')
address = 0
for line in load_file:
split_line = line.split("#")
possible_command = split_line[0]
if len(possible_command) > 0:
possible_command = possible_command.strip()
if possible_command == "":
pass
else:
command = int(possible_command, 2)
self.ram[address] = command
address += 1
load_file.close()
except(FileNotFoundError):
print("file not found")
# For now, we've just hardcoded a program:
def alu(self, op, reg_a, reg_b):
"""ALU operations."""
if op == "ADD":
self.reg[reg_a] += self.reg[reg_b]
elif op == "MUL":
# elif op == "SUB": etc
self.reg[reg_a] *= self.reg[reg_b]
else:
raise Exception("Unsupported ALU operation")
def trace(self):
"""
Handy function to print out the CPU state. You might want to call this
from run() if you need help debugging.
"""
print(f"TRACE: %02X | %02X %02X %02X |" % (
self.pc,
# self.fl,
# self.ie,
self.ram_read(self.pc),
self.ram_read(self.pc + 1),
self.ram_read(self.pc + 2)
), end='')
for i in range(8):
print(" %02X" % self.reg[i], end='')
print()
def handle_CALL(self, reg_address):
return_add = self.pc + 2
self.reg[7] -= 1
sp = self.reg[7]
self.stack[sp] = return_add
self.pc = self.reg[reg_address]
def handle_LDI(self, reg_address, value):
self.reg[reg_address] = value
def handle_PRN(self, reg_address):
print(self.reg[reg_address])
def handle_POP(self, reg_address):
sp = self.reg[7]
self.reg[reg_address] = self.stack[sp]
self.reg[7] += 1
def handle_PUSH(self, reg_address):
self.reg[7] -= 1
sp = self.reg[7]
self.stack[sp] = self.reg[reg_address]
def handle_RET(self):
sp = self.reg[7]
return_add = self.stack[sp]
self.reg[7] += 1
self.pc = return_add
def run(self):
"""Run the CPU."""
running = True
self.reg[7] = 0b11111111
while running == True:
ir = self.ram[self.pc]
operands = (ir >> 6)
function = ir & 0b00001111
alu = (ir >> 5) & 0b001
set_pc = (ir >> 4) & 0b0001
if set_pc == 1:
if function == self.CALL:
self.handle_CALL(self.ram[self.pc + 1])
elif function == self.RET:
self.handle_RET()
else:
if alu == 1:
if function == self.MUL:
self.alu("MUL", self.ram[self.pc + 1],
self.ram[self.pc + 2])
elif function == self.ADD:
self.alu("ADD", self.ram[self.pc + 1],
self.ram[self.pc + 2])
elif function == self.HLT:
running = False
elif function == self.LDI:
self.handle_LDI(self.ram[self.pc + 1], self.ram[self.pc + 2])
elif function == self.PRN:
self.handle_PRN(self.ram[self.pc + 1])
elif function == self.POP:
self.handle_POP(self.ram[self.pc + 1])
elif function == self.PUSH:
self.handle_PUSH(self.ram[self.pc + 1])
else:
print("ERROR")
self.pc += (operands + 1)
|
import scanorama
import scanpy as sc
base_path = '/Users/zhongyuanke/data/pbmc/'
file1 = base_path+'cd19_b/hg19'
file2 = base_path+'cd4_t/hg19'
file3 = base_path + 'cd4_r_t/hg19'
file4 = base_path+'cd14_monocytes/hg19'
file5 = base_path+'cd34/hg19'
file6 = base_path+'cd56_nk/hg19'
file7 = base_path+'cd8_c/hg19'
file8 = base_path+'3k/hg19'
file9 = base_path + '293t/hg19'
file10 = base_path + 'jurkat/hg19'
file11 = base_path + '293t_jurkat_50_50/hg19'
out_path = '/Users/zhongyuanke/data/result/merge3_scanorama.h5ad'
# files = [file1, file2, file4, file5, file6, file7, file8, file9, file10, file11]
# files = [file1, file2, file4, file5, file6]
files = [file1, file4, file5]
adatas = []
for i in range(len(files)):
adatas.append(sc.read_10x_mtx(files[i]))
integrated, corrected = scanorama.correct_scanpy(adatas, return_dimred=True)
for i in range(len(integrated)):
corrected[i].obsm['mid'] = integrated[i]
print(len(integrated[i]))
adata = corrected[0]
for i in range(1, len(integrated)):
adata = adata.concatenate(corrected[i])
adata.write_h5ad(out_path)
|
"""
Registery for associations. Inspired by Fantomas42/django-tagging
"""
from .models import linked_to, related_to
registry = []
class AlreadyRegistered(Exception):
"""
An attempt was made to register a model more than once.
"""
pass
def register(model, linked_attr='linked', related_attr='related'):
"""
Sets the given model class up for working with association
"""
if model in registry:
raise AlreadyRegistered(
"The model '%s' has already been registered." %
model._meta.object_name)
if hasattr(model, linked_attr):
raise AttributeError(
"'%s' already has an attribute '%s'. You must "
"provide a custom linked_attr to register." % (
model._meta.object_name,
linked_attr, ))
if hasattr(model, related_attr):
raise AttributeError(
"'%s' already has an attribute '%s'. You must "
"provide a custom related_attr to register." % (
model._meta.object_name,
related_attr, ))
# Add linked method
setattr(model, linked_attr, linked_to)
# Add related method
setattr(model, related_attr, related_to)
# Finally register in registry
registry.append(model)
|
def fibonacci(x):
if not isinstance(x, int):
return None # Only accept integers
a, b = 0, 1
for i in range(x):
a, b = b, a + b
return a
if __name__ == '__main__':
print map(fibonacci, range(20)) |
# -*- coding: utf-8 -*-
"""
Trong phần này mình sẽ scan snmp để lấy thông tin cơ bản của server về parse ra đồng thời brute force snmp nếu port open
, tương tự như scan tcp
thông tin
- SNMP: UDP port 161
- SNMP Version 1, 2c, v3
- SNMP Authen: community
- SNMP HEADER
|========================================================================================|
| [version]: 1/2/v2c/3 |
|----------------------------------------------------------------------------------------|
| [community]: pass |
|----------------------------------------------------------------------------------------|
| [pdu type] | request id | error-status| error-index| Name | Value | Name | Value | .. |
|========================================================================================|
[pdu_type] : - Get : 0xa0
- GetNext : 0xa1
- Response : 0xa2
- Set: 0xa3
[request id] : là id của request gởi tới agent vào được respone = với ID đó
[error-status] : noError(0), tooBig(1), noSuchName(2), badValue(3), readOnly(4), genErr(5)
[error-index] : số thứ tự của OID có lỗi (nếu có).
[Name]: tên OID
[Value] : value
"""
import argparse
from scapy.all import *
import threading
import signal
# list community default for scann
from scapy.layers.inet import IP, UDP
from scapy.layers.snmp import SNMP, SNMPnext, SNMPvarbind, SNMPresponse, ASN1F_SNMP_PDU_RESPONSE
default_communities = ['public', 'mtopjcsea', '0']
# default_communities = ['public', 'mtopjcsea', '0', '0392a0', '1234', '2read', '3com', '3Com', '3COM', '4changes', 'access', 'adm', 'admin', 'Admin', 'administrator', 'agent', 'agent_steal', 'all', 'all private', 'all public', 'anycom', 'ANYCOM', 'apc', 'bintec', 'blue', 'boss', 'c', 'C0de',
# 'cable-d',
# 'cable_docsispublic@es0', 'cacti', 'canon_admin', 'cascade', 'cc', 'changeme', 'cisco', 'CISCO', 'cmaker', 'comcomcom', 'community', 'core', 'CR52401', 'crest', 'debug', 'default', 'demo', 'dilbert', 'enable', 'entry', 'field', 'field-service', 'freekevin', 'friend', 'fubar',
# 'guest', 'hello', 'hideit', 'host', 'hp_admin', 'ibm', 'IBM', 'ilmi', 'ILMI', 'intel', 'Intel', 'intermec', 'Intermec', 'internal', 'internet', 'ios', 'isdn', 'l2', 'l3', 'lan', 'liteon', 'login', 'logon', 'lucenttech', 'lucenttech1', 'lucenttech2', 'manager', 'master',
# 'microsoft', 'mngr', 'mngt', 'monitor', 'mrtg', 'nagios', 'net', 'netman', 'network', 'nobody', 'NoGaH$@!', 'none', 'notsopublic', 'nt', 'ntopia', 'openview', 'operator', 'OrigEquipMfr', 'ourCommStr', 'pass', 'passcode', 'password', 'PASSWORD', 'pr1v4t3', 'pr1vat3', 'private',
# ' private', 'private ', 'Private', 'PRIVATE', 'private@es0', 'Private@es0', 'private@es1', 'Private@es1', 'proxy', 'publ1c', 'public', ' public', 'public ', 'Public', 'PUBLIC', 'public@es0', 'public@es1', 'public/RO', 'read', 'read-only', 'readwrite', 'read-write', 'red',
# 'regional', '<removed>', 'rmon', 'rmon_admin', 'ro', 'root', 'router', 'rw', 'rwa', 'sanfran', 'san-fran', 'scotty', 'secret', 'Secret', 'SECRET', 'Secret C0de', 'security', 'Security', 'SECURITY', 'seri', 'server', 'snmp', 'SNMP', 'snmpd', 'snmptrap', 'snmp-Trap',
# 'SNMP_trap', 'SNMPv1/v2c', 'SNMPv2c', 'solaris', 'solarwinds', 'sun', 'SUN', 'superuser', 'supervisor', 'support', 'switch', 'Switch', 'SWITCH', 'sysadm', 'sysop', 'Sysop', 'system', 'System', 'SYSTEM', 'tech', 'telnet', 'TENmanUFactOryPOWER', 'test', 'TEST', 'test2',
# 'tiv0li', 'tivoli', 'topsecret', 'traffic', 'trap', 'user', 'vterm1', 'watch', 'watchit', 'windows', 'windowsnt', 'workstation', 'world', 'write', 'writeit', 'xyzzy', 'yellow', 'ILMI']
q = Queue.Queue()
#===================================================================#
# default OID global #
#===================================================================#
# RouteOIDS = {
# 'ROUTDESTOID': [".1.3.6.1.2.1.4.21.1.1", "Destination"],
# 'ROUTHOPOID': [".1.3.6.1.2.1.4.21.1.7", "Next Hop"],
# 'ROUTMASKOID': [".1.3.6.1.2.1.4.21.1.11", "Mask"],
# 'ROUTMETOID': [".1.3.6.1.2.1.4.21.1.3", "Metric"],
# 'ROUTINTOID': [".1.3.6.1.2.1.4.21.1.2", "Interface"],
# 'ROUTTYPOID': [".1.3.6.1.2.1.4.21.1.8", "Route type"],
# 'ROUTPROTOID': [".1.3.6.1.2.1.4.21.1.9", "Route protocol"],
# 'ROUTAGEOID': [".1.3.6.1.2.1.4.21.1.10", "Route age"]
# }
#
# InterfaceOIDS = {
# # Interface Info
# 'INTLISTOID': [".1.3.6.1.2.1.2.2.1.2", "Interfaces"],
# 'INTIPLISTOID': [".1.3.6.1.2.1.4.20.1.1", "IP address"],
# 'INTIPMASKOID': [".1.3.6.1.2.1.4.20.1.3", "Subnet mask"],
# 'INTSTATUSLISTOID': [".1.3.6.1.2.1.2.2.1.8", "Status"]
# }
#
# ARPOIDS = {
# # Arp table
# 'ARPADDR': [".1.3.6.1.2.1.3.1 ", "ARP address method A"],
# 'ARPADDR2': [".1.3.6.1.2.1.3.1 ", "ARP address method B"]
# }
OIDS = {
'SYSTEM': ["1.3.6.1.2.1.1.1", "SYSTEM Info"]
}
#
# WINDOWS_OIDS = {
# 'RUNNING PROCESSES': ["1.3.6.1.2.1.25.4.2.1.2", "Running Processes"],
# 'INSTALLED SOFTWARE': ["1.3.6.1.2.1.25.6.3.1.2", "Installed Software"],
# 'SYSTEM INFO': ["1.3.6.1.2.1.1", "System Info"],
# 'HOSTNAME': ["1.3.6.1.2.1.1.5", "Hostname"],
# 'DOMAIN': ["1.3.6.1.4.1.77.1.4.1", "Domain"],
# 'USERS': ["1.3.6.1.4.1.77.1.2.25", "Users"],
# 'UPTIME': ["1.3.6.1.2.1.1.3", "UpTime"],
# 'SHARES': ["1.3.6.1.4.1.77.1.2.27", "Shares"],
# 'DISKS': ["1.3.6.1.2.1.25.2.3.1.3", "Disks"],
# 'SERVICES': ["1.3.6.1.4.1.77.1.2.3.1.1", "Services"],
# 'LISTENING TCP PORTS': ["1.3.6.1.2.1.6.13.1.3.0.0.0.0", "Listening TCP Ports"],
# 'LISTENING UDP PORTS': ["1.3.6.1.2.1.7.5.1.2.0.0.0.0", "Listening UDP Ports"]
# }
#
# LINUX_OIDS = {
# 'RUNNING PROCESSES': ["1.3.6.1.2.1.25.4.2.1.2", "Running Processes"],
# 'SYSTEM INFO': ["1.3.6.1.2.1.1", "System Info"],
# 'HOSTNAME': ["1.3.6.1.2.1.1.5", "Hostname"],
# 'UPTIME': ["1.3.6.1.2.1.1.3", "UpTime"],
# 'MOUNTPOINTS': ["1.3.6.1.2.1.25.2.3.1.3", "MountPoints"],
# 'RUNNING SOFTWARE PATHS': ["1.3.6.1.2.1.25.4.2.1.4", "Running Software Paths"],
# 'LISTENING UDP PORTS': ["1.3.6.1.2.1.7.5.1.2.0.0.0.0", "Listening UDP Ports"],
# 'LISTENING TCP PORTS': ["1.3.6.1.2.1.6.13.1.3.0.0.0.0", "Listening TCP Ports"]
# }
#
# CISCO_OIDS = {
# 'LAST TERMINAL USERS': ["1.3.6.1.4.1.9.9.43.1.1.6.1.8", "Last Terminal User"],
# 'INTERFACES': ["1.3.6.1.2.1.2.2.1.2", "Interfaces"],
# 'SYSTEM INFO': ["1.3.6.1.2.1.1.1", "System Info"],
# 'HOSTNAME': ["1.3.6.1.2.1.1.5", "Hostname"],
# 'SNMP Communities': ["1.3.6.1.6.3.12.1.3.1.4", "Communities"],
# 'UPTIME': ["1.3.6.1.2.1.1.3", "UpTime"],
# 'IP ADDRESSES': ["1.3.6.1.2.1.4.20.1.1", "IP Addresses"],
# 'INTERFACE DESCRIPTIONS': ["1.3.6.1.2.1.31.1.1.1.18", "Interface Descriptions"],
# 'HARDWARE': ["1.3.6.1.2.1.47.1.1.1.1.2", "Hardware"],
# 'TACACS SERVER': ["1.3.6.1.4.1.9.2.1.5", "TACACS Server"],
# 'LOG MESSAGES': ["1.3.6.1.4.1.9.9.41.1.2.3.1.5", "Log Messages"],
# 'PROCESSES': ["1.3.6.1.4.1.9.9.109.1.2.1.1.2", "Processes"],
# 'SNMP TRAP SERVER': ["1.3.6.1.6.3.12.1.2.1.7", "SNMP Trap Server"]
# }
#==========================END DEFINE OID=====================================
def banner(hav=True):
if hav:
print """
"##################################"
" _____ _ ____ _______ "
" / ___// | / / |/ / __ \ "
" \\__ \\/ |/ / /|_/ / /_/ "
" ___/ / /| / / / / ____/ "
"/____/_/ |_/_/ /_/_/ "
" "
"SNMP Scanner & Enumeration Script "
"##################################"
##################################################
"""
class SNMPError(Exception):
"""
wrapper của Exceptoin
"""
pass
class SNMPVersion():
v1 = 0
v2c = 1
v3 = 2
@classmethod
def iversion(cls, v):
if v in ['1', 'v1']:
return cls.v1
elif v in ['2', 'v2', 'v2c']:
return cls.v2c
elif v in ['3', 'v3']:
return cls.v3
raise ValueError('No such version %s' % v)
@classmethod
def sversion(cls, v):
if not v:
return 'v1'
elif v == 1:
return 'v2c'
elif v == 2:
return 'v3'
raise ValueError('No such version number %s' % v)
###########################################################
# parse input ip
def ipRange(ip):
"""
"192.168.1.1-192.168.1.255"
:param start_ip:
:param end_ip:
:return: ["192.168.1.1","192.168.1.2"]
"""
if len(str(ip).strip(" ").split("-")) > 1:
start_ip = str(ip).strip(" ").split("-")[0]
end_ip = str(ip).strip(" ").split("-")[1]
else:
start_ip = str(ip).strip(" ").split("-")[0]
end_ip = str(ip).strip(" ").split("-")[0]
start = list(map(int, start_ip.split(".")))
end = list(map(int, end_ip.split(".")))
temp = start
ip_range = []
ip_range.append(start_ip)
while temp != end:
start[3] += 1
for i in (3, 2, 1):
if temp[i] == 256:
temp[i] = 0
temp[i - 1] += 1
ip_range.append(".".join(map(str, temp)))
return ip_range
def ipList(list_ip):
"""
:param list_ip: 192.168.6.200,13.228.20.244
:return: ["192.168.6.200" ,"13.228.20.244"]
"""
ip_range = []
if len(str(list_ip).strip(" ").split(",")) > 1:
for x in str(list_ip).strip(" ").split(","):
ip_range.append(x)
return ip_range
def read_file_dic(fi):
if not os.path.exists(fi):
print "file " + str(fi) + " không tồn tại"
sys.exit(1)
with open(str(fi), "r") as f:
list_com = f.readlines()
f.close()
return list_com
def worker_scan(dic, version, timeout):
ip = q.get()
output = ""
for com in dic:
try:
ans, unans = sr(IP(dst=ip) / UDP(sport=RandShort(), dport=161) / SNMP(community=str(com), version=str(version), PDU=SNMPnext(varbindlist=[SNMPvarbind(oid=ASN1_OID("1.3.6.1.2.1.1.1"))])), timeout=float(timeout), verbose=0)
# check unrespone
# for u in unans:
# print com
# check respone
for send_ans, reci_ans in ans:
if str(reci_ans.getlayer("SNMP")[SNMPresponse].error.val) == "0":
output = ip + ": " + "[" + com + "]" + " - " + str(reci_ans.getlayer("SNMP")[SNMPresponse][SNMPvarbind].value.val)
print output
sys.exit(0)
except:
continue
if __name__ == '__main__':
# show banner
banner()
# parse param
parse = argparse.ArgumentParser()
parse.add_argument("-i", "--ip", help="lựa chọn IP để scan", default=None)
parse.add_argument("-li", "--list_ip", help="lựa chọn IP để scan", default=None)
parse.add_argument("-f", "--file", help="chon file dict community", default=None)
parse.add_argument("-t", "--timeout", help="chọn thời gian timeout cho mỗi community", default=1)
parse.add_argument("-v", "--version", help="chọn version của SNMP", default="v2c")
args = parse.parse_args()
if args.file != None:
default_communities = read_file_dic(args.file)
if args.ip != None:
list_ip = ipRange(str(args.ip))
if args.list_ip != None:
list_ip = ipList(str(args.list_ip))
if args.ip == None and args.list_ip == None:
print "thêm ip"
sys.exit(1)
time_out = args.timeout
version = args.version
# start main
NUMBER_THERAD = len(list_ip)
threads = []
# put ip in queue
for ip in list_ip:
q.put(str(ip))
try:
for x in range(NUMBER_THERAD):
w = threading.Thread(target=worker_scan, args=(default_communities, version, time_out))
w.setDaemon(True)
threads.append(w)
w.start()
for t in threads:
t.join()
except (KeyboardInterrupt) :
print "Ctrl+C Exit.."
sys.exit(1)
print "[#] Scan done at.. " + time.strftime("%Y-%m-%d %H:%M:%s") |
import numpy as np
import pytest
import xarray as xr
import climpred
from climpred import HindcastEnsemble, PerfectModelEnsemble
from climpred.constants import HINDCAST_CALENDAR_STR, PM_CALENDAR_STR
from climpred.tutorial import load_dataset
from climpred.utils import convert_time_index
CALENDAR = PM_CALENDAR_STR.strip("Datetime").lower()
xr.set_options(display_style="text")
@pytest.fixture(autouse=True)
def add_standard_imports(
doctest_namespace,
hindcast_hist_obs_1d,
hindcast_recon_3d,
perfectModelEnsemble_initialized_control,
):
"""imports for doctest"""
xr.set_options(display_style="text")
doctest_namespace["np"] = np
doctest_namespace["xr"] = xr
doctest_namespace["climpred"] = climpred
# always seed numpy.random to make the examples deterministic
np.random.seed(42)
# climpred data
doctest_namespace["HindcastEnsemble"] = hindcast_hist_obs_1d
doctest_namespace["HindcastEnsemble_3D"] = hindcast_recon_3d
doctest_namespace["PerfectModelEnsemble"] = perfectModelEnsemble_initialized_control
@pytest.fixture()
def PM_ds3v_initialized_1d():
"""MPI Perfect-model-framework initialized timeseries xr.Dataset with three
variables."""
return load_dataset("MPI-PM-DP-1D").isel(area=1, period=-1, drop=True)
@pytest.fixture()
def PM_ds_initialized_1d(PM_ds3v_initialized_1d):
"""MPI Perfect-model-framework initialized timeseries xr.Dataset."""
return PM_ds3v_initialized_1d.drop_vars(["sos", "AMO"])
@pytest.fixture()
def PM_ds_initialized_1d_lead0(PM_ds_initialized_1d):
"""MPI Perfect-model-framework initialized timeseries in lead-0
framework."""
ds = PM_ds_initialized_1d
# Convert to lead zero for testing
ds["lead"] = ds["lead"] - 1
ds["init"] = ds["init"] + 1
return ds
@pytest.fixture()
def PM_ds_initialized_3d_full():
"""MPI Perfect-model-framework initialized global maps xr.Dataset."""
return load_dataset("MPI-PM-DP-3D")
@pytest.fixture()
def PM_ds_initialized_3d(PM_ds_initialized_3d_full):
"""MPI Perfect-model-framework initialized maps xr.Dataset of subselected North
Atlantic."""
return PM_ds_initialized_3d_full.sel(x=slice(120, 130), y=slice(50, 60))
@pytest.fixture()
def PM_ds3v_control_1d():
"""To MPI Perfect-model-framework corresponding control timeseries xr.Dataset with
three variables."""
return load_dataset("MPI-control-1D").isel(area=1, period=-1, drop=True)
@pytest.fixture()
def PM_ds_control_1d(PM_ds3v_control_1d):
"""To MPI Perfect-model-framework corresponding control timeseries xr.Dataset."""
return PM_ds3v_control_1d.drop_vars(["sos", "AMO"])
@pytest.fixture()
def PM_ds_control_3d_full():
"""To MPI Perfect-model-framework corresponding control global maps xr.Dataset."""
return load_dataset("MPI-control-3D")
@pytest.fixture()
def PM_ds_control_3d(PM_ds_control_3d_full):
"""To MPI Perfect-model-framework corresponding control maps xr.Dataset of
subselected North Atlantic."""
return PM_ds_control_3d_full.sel(x=slice(120, 130), y=slice(50, 60))
@pytest.fixture()
def perfectModelEnsemble_initialized_control_3d_North_Atlantic(
PM_ds_initialized_3d, PM_ds_control_3d
):
"""PerfectModelEnsemble with `initialized` and `control` for the North Atlantic."""
return PerfectModelEnsemble(PM_ds_initialized_3d).add_control(PM_ds_control_3d)
@pytest.fixture()
def perfectModelEnsemble_initialized_control(PM_ds_initialized_1d, PM_ds_control_1d):
"""PerfectModelEnsemble initialized with `initialized` and `control` xr.Dataset."""
pm = PerfectModelEnsemble(PM_ds_initialized_1d).add_control(PM_ds_control_1d)
return pm.generate_uninitialized()
@pytest.fixture()
def perfectModelEnsemble_3v_initialized_control_1d(
PM_ds3v_initialized_1d, PM_ds3v_control_1d
):
"""PerfectModelEnsemble 1d initialized with `initialized` and `control` xr.Dataset
with three variables."""
return PerfectModelEnsemble(PM_ds3v_initialized_1d).add_control(PM_ds3v_control_1d)
@pytest.fixture()
def hind_ds_initialized_1d():
"""CESM-DPLE initialized hindcast timeseries mean removed xr.Dataset."""
ds = load_dataset("CESM-DP-SST")
ds["SST"].attrs["units"] = "C"
ds["init"] = ds.init.astype("int")
return ds
@pytest.fixture()
def hind_ds_initialized_1d_cftime(hind_ds_initialized_1d):
"""CESM-DPLE initialzed hindcast timeseries with cftime initializations."""
ds = hind_ds_initialized_1d
ds = convert_time_index(ds, "init", "ds.init", calendar=HINDCAST_CALENDAR_STR)
ds.lead.attrs["units"] = "years"
return ds
@pytest.fixture()
def hind_ds_initialized_1d_lead0(hind_ds_initialized_1d):
"""CESM-DPLE initialized hindcast timeseries mean removed xr.Dataset in lead-0
framework."""
ds = hind_ds_initialized_1d
# Change to a lead-0 framework
with xr.set_options(keep_attrs=True):
ds["init"] = ds["init"] + 1
ds["lead"] = ds["lead"] - 1
return ds
@pytest.fixture()
def hind_ds_initialized_3d_full():
"""CESM-DPLE initialized hindcast Pacific maps mean removed xr.Dataset."""
ds = load_dataset("CESM-DP-SST-3D")
return ds - ds.mean("init")
@pytest.fixture()
def hind_ds_initialized_3d(hind_ds_initialized_3d_full):
"""CESM-DPLE initialized hindcast Pacific maps mean removed xr.Dataset."""
return hind_ds_initialized_3d_full.isel(nlon=slice(0, 10), nlat=slice(0, 12))
@pytest.fixture()
def hist_ds_uninitialized_1d():
"""CESM-LE uninitialized historical timeseries members mean removed xr.Dataset."""
ds = load_dataset("CESM-LE")
ds["SST"].attrs["units"] = "C"
# add member coordinate
ds["member"] = range(1, 1 + ds.member.size)
ds = ds - ds.mean("time")
ds["SST"].attrs["units"] = "C"
return ds
@pytest.fixture()
def reconstruction_ds_1d():
"""CESM-FOSI historical reconstruction timeseries members mean removed
xr.Dataset."""
ds = load_dataset("FOSI-SST")
ds = ds - ds.mean("time")
ds["SST"].attrs["units"] = "C"
return ds
@pytest.fixture()
def reconstruction_ds_1d_cftime(reconstruction_ds_1d):
"""CESM-FOSI historical reconstruction timeseries with cftime time axis."""
ds = reconstruction_ds_1d
ds = convert_time_index(ds, "time", "ds.init", calendar=HINDCAST_CALENDAR_STR)
return ds
@pytest.fixture()
def reconstruction_ds_3d_full():
"""CESM-FOSI historical Pacific reconstruction maps members mean removed
xr.Dataset."""
ds = load_dataset("FOSI-SST-3D")
return ds - ds.mean("time")
@pytest.fixture()
def reconstruction_ds_3d(reconstruction_ds_3d_full):
"""CESM-FOSI historical reconstruction maps members mean removed
xr.Dataset."""
return reconstruction_ds_3d_full.isel(nlon=slice(0, 10), nlat=slice(0, 12))
@pytest.fixture()
def observations_ds_1d():
"""Historical timeseries from observations matching `hind_ds_initialized_1d` and
`hind_ds_uninitialized_1d` mean removed."""
ds = load_dataset("ERSST")
ds = ds - ds.mean("time")
ds["SST"].attrs["units"] = "C"
return ds
@pytest.fixture()
def hindcast_recon_3d(hind_ds_initialized_3d, reconstruction_ds_3d):
"""HindcastEnsemble initialized with `initialized`, `reconstruction`(`recon`)."""
hindcast = HindcastEnsemble(hind_ds_initialized_3d)
hindcast = hindcast.add_observations(reconstruction_ds_3d)
hindcast = hindcast - hindcast.sel(time=slice("1964", "2014")).mean("time").sel(
init=slice("1964", "2014")
).mean("init")
return hindcast
@pytest.fixture()
def hindcast_recon_1d_ym(hind_ds_initialized_1d, reconstruction_ds_1d):
"""HindcastEnsemble initialized with `initialized` and `recon`."""
hindcast = HindcastEnsemble(hind_ds_initialized_1d).add_observations(
reconstruction_ds_1d
)
hindcast = hindcast - hindcast.sel(time=slice("1964", "2014")).mean("time").sel(
init=slice("1964", "2014")
).mean("init")
hindcast._datasets["initialized"]["SST"].attrs = hind_ds_initialized_1d["SST"].attrs
hindcast._datasets["observations"]["SST"].attrs = reconstruction_ds_1d["SST"].attrs
return hindcast
@pytest.fixture()
def hindcast_hist_obs_1d(
hind_ds_initialized_1d, hist_ds_uninitialized_1d, observations_ds_1d
):
"""HindcastEnsemble initialized with `initialized`, `uninitialzed` and `obs`."""
hindcast = HindcastEnsemble(hind_ds_initialized_1d)
hindcast = hindcast.add_uninitialized(hist_ds_uninitialized_1d)
hindcast = hindcast.add_observations(observations_ds_1d)
with xr.set_options(keep_attrs=True):
hindcast = hindcast - hindcast.sel(time=slice("1964", "2014")).mean("time").sel(
init=slice("1964", "2014")
).mean("init")
return hindcast
def hindcast_obs_1d_for_alignment(
hind_ds_initialized_1d_cftime, reconstruction_ds_1d_cftime
):
"""HindcastEnsemble initialized with `initialized`, `uninitialzed` and `obs`."""
hindcast = HindcastEnsemble(hind_ds_initialized_1d_cftime)
hindcast = hindcast.add_observations(reconstruction_ds_1d_cftime)
return hindcast
@pytest.fixture()
def reconstruction_ds_1d_mm(reconstruction_ds_1d_cftime):
"""CESM-FOSI historical reconstruction timeseries members mean removed
xr.Dataset in monthly interpolated."""
return reconstruction_ds_1d_cftime.resample(time="1MS").interpolate("linear")
@pytest.fixture()
def hindcast_recon_1d_mm(hindcast_recon_1d_ym, reconstruction_ds_1d_mm):
"""HindcastEnsemble with initialized and reconstruction (observations) as a monthly
observational and initialized time series (no grid)."""
hind = hindcast_recon_1d_ym.get_initialized().sel(init=slice("1964", "1970"))
del hind.coords["valid_time"]
hind["lead"].attrs["units"] = "months"
hindcast = HindcastEnsemble(hind)
hindcast = hindcast.add_observations(reconstruction_ds_1d_mm)
return hindcast
@pytest.fixture()
def hindcast_recon_1d_dm(hindcast_recon_1d_ym):
"""HindcastEnsemble with initialized and reconstruction (observations) as a daily
time series (no grid)."""
hindcast = hindcast_recon_1d_ym.sel(time=slice("1964", "1970"))
hindcast._datasets["initialized"].lead.attrs["units"] = "days"
hindcast._datasets["observations"] = (
hindcast._datasets["observations"].resample(time="1D").interpolate("linear")
)
hindcast._datasets["observations"].attrs = hindcast_recon_1d_ym._datasets[
"observations"
]
assert "units" in hindcast.get_initialized()["SST"].attrs
assert "units" in hindcast_recon_1d_ym.get_observations()["SST"].attrs
assert "units" in hindcast.get_observations()["SST"].attrs
return hindcast
@pytest.fixture()
def hindcast_S2S_Germany():
"""S2S ECMWF on-the-fly hindcasts with daily leads and weekly inits and related
observations from CPC (t2m, pr) and ERA5 (gh_500)."""
init = load_dataset("ECMWF_S2S_Germany")
obs = load_dataset("Observations_Germany")
return HindcastEnsemble(init).add_observations(obs)
@pytest.fixture()
def hindcast_NMME_Nino34():
"""NMME hindcasts with monthly leads and monthly inits and related IOv2
observations for SST of the Nino34 region."""
init = load_dataset("NMME_hindcast_Nino34_sst")
obs = load_dataset("NMME_OIv2_Nino34_sst")
init["sst"].attrs["units"] = "C"
obs["sst"].attrs["units"] = "C"
return HindcastEnsemble(init).add_observations(
obs.broadcast_like(init, exclude=("L", "S", "M"))
)
@pytest.fixture()
def da_lead():
"""Small xr.DataArray with coords `init` and `lead`."""
lead = np.arange(5)
init = np.arange(5)
return xr.DataArray(
np.random.rand(len(lead), len(init)),
dims=["init", "lead"],
coords=[init, lead],
)
@pytest.fixture()
def ds1():
"""Small plain multi-dimensional coords xr.Dataset."""
return xr.Dataset(
{"air": (("lon", "lat"), [[0, 1, 2], [3, 4, 5], [6, 7, 8]])},
coords={"lon": [1, 3, 4], "lat": [5, 6, 7]},
)
@pytest.fixture()
def ds2():
"""Small plain multi-dimensional coords xr.Dataset identical values but with
different coords compared to ds1."""
return xr.Dataset(
{"air": (("lon", "lat"), [[0, 1, 2], [3, 4, 5], [6, 7, 8]])},
coords={"lon": [1, 3, 6], "lat": [5, 6, 9]},
)
@pytest.fixture()
def da1():
"""Small plain two-dimensional"""
return xr.DataArray([[0, 1], [3, 4], [6, 7]], dims=("x", "y"))
@pytest.fixture()
def da2():
"""Small plain two-dimensional xr.DataArray with different values compared to
da1."""
return xr.DataArray([[0, 1], [5, 6], [6, 7]], dims=("x", "y"))
@pytest.fixture()
def multi_dim_ds():
"""xr.Dataset with multi-dimensional coords."""
ds = xr.tutorial.open_dataset("air_temperature")
ds = ds.assign(**{"airx2": ds["air"] * 2})
return ds
@pytest.fixture()
def da_SLM():
"""Small xr.DataArray with dims `S`, `M` and `L` for `init`, `member` and
`lead`.
"""
lead = np.arange(5)
init = np.arange(5)
member = np.arange(5)
return xr.DataArray(
np.random.rand(len(lead), len(init), len(member)),
dims=["S", "L", "M"],
coords=[init, lead, member],
)
@pytest.fixture()
def da_dcpp():
"""Small xr.DataArray with coords `dcpp_init_year`, `member_id` and `time` as from
`intake-esm` `hindcastA-dcpp`."""
lead = np.arange(5)
init = np.arange(5)
member = np.arange(5)
return xr.DataArray(
np.random.rand(len(lead), len(init), len(member)),
dims=["dcpp_init_year", "time", "member_id"],
coords=[init, lead, member],
)
@pytest.fixture()
def PM_ds_initialized_1d_ym_cftime(PM_ds_initialized_1d):
"""MPI Perfect-model-framework initialized timeseries xr.Dataset with init as
cftime."""
PM_ds_initialized_1d = convert_time_index(
PM_ds_initialized_1d,
"init",
"PM_ds_initialized_1d.init",
calendar=PM_CALENDAR_STR,
)
PM_ds_initialized_1d["lead"].attrs["units"] = "years"
return PM_ds_initialized_1d
@pytest.fixture()
def PM_ds_control_1d_ym_cftime(PM_ds_control_1d):
"""To MPI Perfect-model-framework corresponding control timeseries xr.Dataset with
time as cftime."""
PM_ds_control_1d = convert_time_index(
PM_ds_control_1d, "time", "PM_ds_control_1d.time", calendar=PM_CALENDAR_STR
)
return PM_ds_control_1d
@pytest.fixture()
def perfectModelEnsemble_initialized_control_1d_ym_cftime(
PM_ds_initialized_1d_ym_cftime, PM_ds_control_1d_ym_cftime
):
"""PerfectModelEnsemble with MPI Perfect-model-framework initialized and control
timeseries annual mean with cftime coords."""
pm = PerfectModelEnsemble(PM_ds_initialized_1d_ym_cftime)
pm = pm.add_control(PM_ds_control_1d_ym_cftime)
return pm
@pytest.fixture()
def PM_ds_initialized_1d_mm_cftime(PM_ds_initialized_1d):
"""MPI Perfect-model-framework initialized timeseries xr.Dataset with init as
cftime faking all inits with monthly separation in one year and lead units to
monthly."""
PM_ds_initialized_1d["init"] = xr.cftime_range(
start="3004",
periods=PM_ds_initialized_1d.init.size,
freq="MS",
calendar=CALENDAR,
)
PM_ds_initialized_1d["lead"].attrs["units"] = "months"
return PM_ds_initialized_1d
@pytest.fixture()
def PM_ds_control_1d_mm_cftime(PM_ds_control_1d):
"""To MPI Perfect-model-framework corresponding control timeseries xr.Dataset with
time as cftime faking the time resolution to monthly means."""
PM_ds_control_1d["time"] = xr.cftime_range(
start="3000", periods=PM_ds_control_1d.time.size, freq="MS", calendar=CALENDAR
)
return PM_ds_control_1d
@pytest.fixture()
def perfectModelEnsemble_initialized_control_1d_mm_cftime(
PM_ds_initialized_1d_mm_cftime, PM_ds_control_1d_mm_cftime
):
"""PerfectModelEnsemble with MPI Perfect-model-framework initialized and control
timeseries monthly mean with cftime coords."""
pm = PerfectModelEnsemble(PM_ds_initialized_1d_mm_cftime)
pm = pm.add_control(PM_ds_control_1d_mm_cftime)
return pm
@pytest.fixture()
def PM_ds_initialized_1d_dm_cftime(PM_ds_initialized_1d):
"""MPI Perfect-model-framework initialized timeseries xr.Dataset with init as
cftime faking all inits with daily separation in one year and lead units to
daily."""
PM_ds_initialized_1d["init"] = xr.cftime_range(
start="3004",
periods=PM_ds_initialized_1d.init.size,
freq="D",
calendar=CALENDAR,
)
PM_ds_initialized_1d["lead"].attrs["units"] = "days"
return PM_ds_initialized_1d
@pytest.fixture()
def PM_ds_control_1d_dm_cftime(PM_ds_control_1d):
"""To MPI Perfect-model-framework corresponding control timeseries xr.Dataset with
time as cftime faking the time resolution to daily means."""
PM_ds_control_1d = PM_ds_control_1d.isel(
time=np.random.randint(0, PM_ds_control_1d.time.size, 5000)
)
PM_ds_control_1d["time"] = xr.cftime_range(
start="3000", periods=PM_ds_control_1d.time.size, freq="D", calendar=CALENDAR
)
return PM_ds_control_1d
@pytest.fixture()
def perfectModelEnsemble_initialized_control_1d_dm_cftime(
PM_ds_initialized_1d_dm_cftime, PM_ds_control_1d_dm_cftime
):
"""PerfectModelEnsemble with MPI Perfect-model-framework initialized and control
timeseries daily mean with cftime coords."""
pm = PerfectModelEnsemble(PM_ds_initialized_1d_dm_cftime)
pm = pm.add_control(PM_ds_control_1d_dm_cftime)
return pm
@pytest.fixture()
def small_initialized_da():
"""Very small simulation of an initialized forecasting system."""
inits = [1990, 1991, 1992, 1993]
lead = [1]
return xr.DataArray(
np.random.rand(len(inits), len(lead)),
dims=["init", "lead"],
coords=[inits, lead],
name="var",
)
@pytest.fixture()
def small_verif_da():
"""Very small simulation of a verification product."""
time = [1990, 1991, 1992, 1993, 1994]
return xr.DataArray(
np.random.rand(len(time)), dims=["time"], coords=[time], name="var"
)
|
import math
n = int(input())
i = 2
while i <= n:
if n%i == 0:
print (i)
break
i = i + 1 |
CONSUMER_KEY = "MlQHYIaVcsmjr2h4defzRi88H"
CONSUMER_SECRET = "2u7cPGQwiNLMQHpZh0iw0qEj6aJB6INQw35FSlnwfyL0tIlmdU"
ACCESS_TOKEN = "836816176235888640-6lFKqe8OkkUA8NxYz6cWRdv7cJTEALb"
ACCESS_SECRET = "XjrusK6qGpZtrIZkNgq4fjV8oZLO5DF7FZtrs1RxQnF4X" |
"""
Tehnyt Toni Musta. Koodin alussa on muutamia yleisesti käytettäviä funktioita, joille en löytänyt järkevämpää paikkaa. Ohjelman pääfunktio koostuu pelkästään start()-funktiosta(rivi 98), josta voidaan suurinpiirtein seurata koodia ohjelman suorittamassa järjestyksessä. Mikäli haluat arvioida vain normaalin miinaharavan koodin, jätä rivit 127-136, 160-168, 194-213 ja 436-820 lukematta, niissä suoritetaan kolme- ja neljäulotteinen miinaharava.
Ohjelmassa on käytetty python 3.5.1-versiota.
"""
import csv
import random
import re
import datetime
import os
import math
import _thread
# Näitä global muuttujia käytetään käyttäjän antamien x ja z koordinaattien määrittämiseen
LETTERS = ["a","b","c","d","e","f","g","h","i","j","k","l","m","n","o","p","q","r","s","t","u","v","w","x","y","z"]
C_LETTERS = ["A","B","C","D","E","F","G","H","I","J","K","L","M","N","O","P","Q","R","S","T","U","V","W","X","Y","Z"]
def question(options, q): #Tämä funktio kysyy käyttäjältä kysymystä, kunnes saa hyväksyttävän vastauksen.
while True:
a = input(q)
if a == "q" or a == "quit":
quit()
elif a == "h" or a =="help":
os.system('clear')
help()
elif options == "int":
try:
return int(a)
except(TypeError, ValueError):
print("Invalid input")
elif options == "all":
return a
elif not a in options:
print("Invalid input")
else:
return a
def scale_question(dimension):#Kysyy käyttäjältä miinakentän koon ja tarkistaa, ovatko mitat laillisia. 26, koska eng. aakkosia on 26; 60, koska w-koordinaatit otetaan minuutin jakojäännöksestä ja 3, koska huvin vuoksi. Laitoin selvyyden vuoksi omaan funktioon.
while True:
bx = question("int","\n Now, Write a number for the width of the field(min 3, max 26 ): ")
if int(bx) <= 26 and int(bx) >= 3:
break
else:
print("Invalid input. Number must be bigger than 3, but smaller than 26")
while True:
by = question("int","\n Write a number for the height of the field(min 3): ")
if int(by) >= 3:
break
else:
print("Invalid input. Number must be bigger than 3")
if dimension == "3" or dimension == "4":
while True:
bz = question("int","\n Write a number for the depth of the field (min 3, max 26): ")
if int(bz) <= 26 and int(bz) >= 3:
break
else:
print("Invalid input. Number must be bigger than 3, but smaller than 26")
if dimension == "4":
while True:
bw = question("int","\n Write a number for the duration of the field (min 3, max 60): ")
if int(bw) <= 60 and int(bw) >= 3:
break
else:
print("Invalid input. Number must be bigger than 3, but smaller than 60")
if dimension == "2":
return bx, by
elif dimension == "3":
return bx, by, bz
elif dimension == "4":
return bx, by, bz, bw
def help(): #Tulostaa ohjeet, kun käyttäjä syöttää 'help' tai 'h'
with open("minerake_help.txt") as source:
for line in source:
print(line.rstrip())
def opening_screen(): #Tulostaa alkukuvan kun ohjelma käynnistetään
with open("minerake_sick_graphics.txt") as source:
for line in source:
print(line.rstrip())
def write_stats(dim, scale ,mines, turn, time, date, result):
with open("minerake_stats.txt", "a",newline="") as source:
csv.writer(source).writerow([dim, scale, mines, turn, time, date, result])
def read_stats():
try:
with open("minerake_stats.txt") as source:
print("Dimensions \tSize \tMines \tTurns \tTime \t\tDate \t\t\tEndresult \n")
lines = list(csv.reader(source))
for line in reversed(lines):
#line = str.split(line,",")
print(line[0] + "\t\t" + line[1] + "\t" + line[2] + "\t" + line[3] + "\t" + line[4] + "\t" + line[5] + "\t" + line[6])
print("\n")
except(IOError):
print("No current stats")
except(IndexError):
pass
def start(): # "Pääohjelma"
os.system("clear")
start = "ON"
opening_screen()
print("\nWelcome to the 4D-minerake. Made by Toni Musta.\n")
while start != "OFF":
print(" Write 'play' for a new game, 'stats ' for list of played games and 'help' for instructions. Write 'quit' at any point for quitting.")
n = question(["play", "stats"],"Write command: ")
if n == 'play':
start = game()
elif n == 'stats':
read_stats()
def game(): # Tässä alustetaan peli, eli kysytään kentän mitat, miinojen määrät ja uudelleen peluu.
bx = 0
by = 0
bz = 0
bw = 0
os.system("clear")
game = "ON"
vol = 0
while game != "OFF":
a = question(["2","3","4"],"\n Great! Now pick the amount of dimensions you want (2,3 or 4):")
if a == "2":
bx, by = scale_question("2")
if int(bx) < 7 and int(by) < 7:
print("\nWhew, that's a small square. Well, I suppose size doesn't really matter :^)")
vol = bx * by
if a == "3":
bx, by, bz = scale_question("3")
if int(bx) < 7 and int(by) < 7 and int(bz) < 7:
print("\nWhew, that's a small cube. Well, I suppose size doesn't really matter :^)")
vol = bx * by * bz
if a == "4":
bx, by, bz, bw = scale_question("4")
if int(bx) < 7 and int(by) < 7 and int(bz) and int(bw) < 7:
print("\nWhew, that's a small terasect. Well, I suppose size doesn't really matter :^)")
vol = bx * by * bz * bw
c = question("int","\nHow many mines do you want in it? ( Must be lesser than previous numbers multiplied with each other): ")
while not (int(c) > 0 and int(c) < vol):
print("Invalid amount of mines!")
c = question("int","\nHow many mines do you want in it? ( Must be lesser than previous numbers multiplied with each other): ")
if int(c) < math.sqrt(vol):
print("\nWell, you sure do know how to play it safe :v)")
print("\nLet's start the game!\n")
play(int(a), int(c), int(bx), int(by), int(bz), int(bw))
m = input("Do you want to play again?(y/n)")
if not m == "y":
game = "OFF"
return game
def play(dimension, bombs, scalex, scaley, scalez = 0, scalew = 0): # Pelin käyttöliittymä. Ottaa vain vastaan pelaajan käskyt. new-olio huolehtii itse pelin toimimisesta.
os.system("clear")
play = "ON"
while True:
if dimension == 2:
new = Plane(scalex, scaley, bombs)
print("Mines: "+str(bombs))
new.pic()
new.print_pic()
break
elif dimension == 3:
new = Space(scalex, scaley, scalez, bombs)
print("Mines: "+ str(bombs))
new.pic(1)
break
elif dimension == 4:
new = Time(scalex, scaley, scalez, scalew, bombs)
print("Mines: " + str(bombs))
new.pic(1, 0)
break
else:
print("Invalid input!")
while new.status != "OFF":
try:
c = question("all","Your next move: ")
d = c.split(sep=" ")
if not len(d) == 2:
print("Invalid input")
else:
if re.compile(r'[a-z]+').search(d[1]):
x = int(LETTERS.index(re.compile('[a-z]+').search(d[1]).group(0)) + 1)
if re.compile(r'[1-9]+').search(d[1]):
y = int(re.compile('[0-9]+').search(d[1]).group(0))
if re.compile(r'[A-Z]+').search(d[1]):
z = int(C_LETTERS.index(re.compile('[A-Z]+').search(d[1]).group(0)) + 1)
if dimension == 2:
if d[0] == "step" or d[0] == "s":
if not new.step(x, y) == "Stop":
new.evaluate_empties()
elif d[0] == "flag" or d[0] == "f":
new.flag(x, y)
new.evaluate_flags()
else:
print("Invalid input: Unrecognized command")
elif dimension == 3:
if d[0] == "step" or d[0] == "s":
if not new.step(x, y, z) == "Stop":
new.evaluate_empties()
elif d[0] == "flag" or d[0] == "f":
new.flag(x, y, z)
new.evaluate_flags()
elif d[0] == "go" or d[0] == "g":
new.pic(z)
else:
print("Invalid input: Unrecognized command")
elif dimension == 4:
if d[0] == "step" or d[0] == "s":
if not new.wait_step(x, y, z) == "Stop":
new.evaluate_empties()
elif d[0] == "flag" or d[0] == "f":
new.wait_flag(x, y, z)
new.evaluate_flags()
elif d[0] == "go" or d[0] == "g":
new.wait(z)
else:
print("Invalid input: Unrecognized command")
except(IndexError, ValueError):
print("Invalid coordinate!")
new.evaluate_empties()
except(UnboundLocalError, TypeError):
new.evaluate_empties()
if dimension == 3:
new.pic(1)
elif dimension == 4:
new.pic(1,1)
print("Invalid input: Incorrect coordinates")
x, y, z = "empty", "empty", "empty"
del new
return "OFF"
class Plane:
status = "ON"
start = datetime.datetime.now() #Pelin päättyessä tällä mitataan, kauan meni
turns = 0
def __init__(self, nox, noy, bomb):
self.number_of_mines = bomb #number_of_minesiä käytetään vain tulosten kirjaamisessa
self.scalex=nox
self.scaley=noy
self.boxes_left = nox * noy # Tällä arvioidaan, päättyykö peli
self.picture = [] # Tulostettavan kentän multiarray
self.flags=[]
self.empty_boxes=[] # Tämän avulla loopataan tulvatäyttö, väliaikainen
self.number_boxes=[] #Tänne talletetaan numerolaatikot, väliaikainen
self.bombs=[]
while len(self.bombs) < bomb:
x=random.randint(1,nox)
y=random.randint(1,noy)
coordinate = []
coordinate.append(x)
coordinate.append(y)
if not coordinate in self.bombs:
self.bombs.append(coordinate)
self.pic_init()
def pic_init(self): # Rakentaa tyhjän kentän
firstline = " "
for i in range(self.scalex):
firstline += " _"
self.picture.append(firstline)
for i in range(self.scaley):
row = ""
row += " |"
for j in range(self.scalex):
row += "_|"
row += " " + str(i+1)
self.picture.append(row)
x_coord = " "
for k in range(self.scalex):
x_coord += LETTERS[k] + " "
self.picture.append(x_coord)
def pic(self): #Jokaisen käyttäjän teon yhteydessä tällä tehdään muutokset picture-listiin.
for j in self.empty_boxes:
t = list(self.picture[j[1]])
t[j[0]*2] = "#" # # kuvaa tyhjää ruutua
t = "".join(t)
self.picture[j[1]] = t
self.empty_boxes = []
for i in self.number_boxes:
s = list(self.picture[i[1]])
s[i[0]*2] = str(i[2])
s = "".join(s)
self.picture[i[1]] = s
def print_pic(self): # Tämä varsinaisesti tulostaa picturen joka vuorossa
for k in self.picture:
print(k)
def hit_bomb(self): # Tekee muutokset kuvaan, kun käyttäjä astuu miinaan. Auts!
for i in self.bombs:
s = list(self.picture[i[1]])
s[i[0]*2 ] = "X"
s = "".join(s)
self.picture[i[1]] = s
self.print_pic()
def is_bomb(self,x,y): #Tarkistaa, onko pelaajan antamissa koordinaateissa pommi
step_coord=[]
step_coord.append(x)
step_coord.append(y)
if step_coord in self.bombs:
return True
else:
return False
def if_next_to_bomb(self,x,y): # Tarkistaa, onko koordinaattien ympärillä miinoja, eli onko kyseisessä koordinaatissa numerolaatikko
number=0
for k in self.bombs:
l=x-k[0]
m=y-k[1]
if l<=1 and m <=1 and l>=-1 and m>=-1:
number+=1
if number>0:
insertion = []
insertion.append(x)
insertion.append(y)
insertion.append(number)
self.number_boxes.append(insertion)
return number
else:
return 0
def add_to_empties(self,x,y): # Jos yllä oleva palauttaa 0, niin tämä lisää ymppäröivät laatikot empty_boxes - listaan, mikäli kyseisiä koordinaatteja ei jo ole siellä
x=x-1
y=y-1
for v in [0,1,2]:
x1=x+v
if x1==0 or x1 > self.scalex:
continue
else:
for w in [0,1,2]:
y1=y+w
if y1==0 or y1 > self.scaley:
continue
else:
temp_coord = []
temp_coord.append(x1)
temp_coord.append(y1)
if temp_coord in self.empty_boxes:
continue
elif temp_coord in self.number_boxes:
continue
elif temp_coord in self.bombs:
continue
else:
self.empty_boxes.append(temp_coord)
self.boxes_left -= 1
def step(self, a, b): # Pelaajan kutsuttava funktio, joka avaa koordinaatin ja tulvatäyttää kentän.
self.turns += 1
if self.is_bomb(a, b) == True:
os.system("clear")
self.hit_bomb()
print("\n BOOOOOOOOM! You're dead. Hope you had a good life. \n")
print(" You're such a square :3 \n")
self.end_game("Lost")
return "Stop"
elif self.picture[b][2*a] != "_" and self.picture[b][2*a] != "F":
print("Invalid coordinate")
else:
os.system("clear")
if self.if_next_to_bomb(a, b) > 0:
self.boxes_left -= 1
self.pic()
self.print_pic()
else:
self.add_to_empties(a, b)
for i in self.empty_boxes: #Tulvatäyttö
c = i[0]
d = i[1]
if self.if_next_to_bomb(c, d) == False:
self.add_to_empties(c, d)
self.pic()
self.print_pic()
def flag(self, a, b): # Asettaa lipun sinne, missä pelaaja epäilee miinan olevan.
os.system("clear")
f_coord = []
f_coord.append(a)
f_coord.append(b)
if self.picture[b][a*2] == "_" or self.picture[b][2*a] == "F":
if not f_coord in self.flags:
self.flags.append(f_coord)
f = list(self.picture[b])
f[a*2] = "F"
f = "".join(f)
self.picture[b] = f
else:
self.flags.remove(f_coord)
f = list(self.picture[b])
f[a*2] = "_"
f = "".join(f)
self.picture[b] = f
else:
print("Invalid input!")
self.pic()
self.print_pic()
def evaluate_empties(self): # Joka vuoro tämä tarkistaa, onko avaamattomia ruutuja enemmän kuin miinoja. Jos ei, pelaaja voittaa. Poistaa myös liput pois aukaistuista luukuista.
for i in self.flags:
if self.picture[i[1]][2*i[0]] != "_" or self.picture[i[1]][2*i[0]] != "F":
self.flags.remove(i)
if self.boxes_left == len(self.bombs):
print("You won! Congratulations!")
self.pic()
self.end_game("Won")
else:
self.pic()
return False
def evaluate_flags(self): # Jos pelaajan asettamia lippuja yhtä paljon kuin miinoja, tämä ohjelma tarkistaa, ovatko pelaajan epäilykset osoittautuneet oikeiksi.
if len(self.flags) == len(self.bombs):
q = input("Do you want to see if you got it? (y/n)")
if q == "y":
result = ""
for e in self.flags:
if not e in self.bombs:
result = "fail"
break
else:
result = "win"
if result == "win":
print("You won! Congratulations!")
self.end_game("Won")
else:
print("Try again!")
self.pic()
def end_game(self, result): #Kirjoittaa tiedostoon pelin päätyttyä
time = (datetime.datetime.now() - self.start).seconds
write_stats(2, self.scalex * self.scaley, self.number_of_mines, self.turns, str(time// 60)+" min "+str(time%60) + " sec ", datetime.datetime.now().strftime("%d.%m.%y %H:%M:%S"), result)
self.empty()
def empty(self): #Tuhoaa olion muuttujat. En tiennyt, onko tämä tarpeen Pythonissa, mutta ainakin käsittääkseni näin pitää toimia C++:ssa
self.scalex = 0
self.scaley = 0
self.bombs = []
self.flags = []
self.empty_boxes = []
self.number_boxes = []
self.picture = []
self.boxes_left = 0
self.status = "OFF"
class Space: # Tästä alkaa 3D ja 4D oliot. Nämä sisältävät suurimmaksi osaksi saman nimiset funktiot, kuin Planekin, jotka tekevät samat asiat, joskin erillä tavalla. Näiden jälkeen tulee vain yhden rivin pääohjelma: start()
status = "ON"
start = datetime.datetime.now()
turns = 0
def __init__(self, nox, noy, noz, bombs):
self.empty_planes = []
self.scalex = nox
self.scaley = noy
self.scalez = noz
self.planes = []
self.number_of_mines = bombs
local_bombs = []
for l in range(noz):
local_bombs.append(0)
while bombs > 0:
local = random.randint(1, noz)
local_bombs[local - 1] += 1
bombs -= 1
for num in local_bombs:
self.planes.append(Plane(nox, noy, num)) # Eli Space kappale koostuu z- määrästä kaksiulotteisia tasoja. Ne eivät suoranaisesti vaikuta keskenään, vaan Space huolehtii esim. kolmiulotteisesta tulvatäytöstä.
self.pic_init()
def pic_init(self):
for pic in range(len(self.planes)):
self.planes[pic].picture[1] += " " + C_LETTERS[pic]
#print(self.planes[0].bombs)
def pic(self, z):
os.system("clear")
self.planes[z - 1].pic()
self.planes[z - 1].print_pic()
def is_bomb(self, x, y, z):
return self.planes[z - 1].is_bomb(x, y)
def if_next_to_bomb(self, x, y, z):
front = 0
back = 0
if z < self.scalez:
front = self.planes[z].if_next_to_bomb(x, y)
middle = self.planes[z - 1].if_next_to_bomb(x, y)
if z > 1:
back = self.planes[z - 2].if_next_to_bomb(x, y)
if front == 0 and back == 0:
return middle
else:
if middle == 0:
add = []
add.append(x)
add.append(y)
add.append(0)
self.planes[z - 1].number_boxes.append(add)
self.planes[z - 1].number_boxes[-1][2] += front
self.planes[z - 1].number_boxes[-1][2] += back
if front != 0:
self.planes[z].number_boxes.pop()
if back != 0:
self.planes[z - 2].number_boxes.pop()
return back + middle + front
def add_to_empties(self, x, y, z):
if z < self.scalez:
self.planes[z].add_to_empties(x, y)
self.planes[z - 1].add_to_empties(x, y)
if z > 1:
self.planes[z - 2].add_to_empties(x, y)
def add_to_empty_planes(self, z): #3D mallissa tulvatäytössä pitää myös luupata tasoja sitä mukaa, kuin niistä löytyy tyhjiä ruutuja. Muuten tyhjiä ruutuja alkaa ilmestymään omituisiin kohtiin.
try:
add = []
if z < self.scalez:
add.append(z + 1)
add.append(len(self.planes[z].empty_boxes))
if not add in self.empty_planes:
self.empty_planes.append(add)
add = []
add.append(z)
add.append(len(self.planes[z - 1].empty_boxes))
if not add in self.empty_planes:
self.empty_planes.append(add)
add = []
if z > 1:
add.append(z - 1)
add.append(len(self.planes[z - 2].empty_boxes))
if not add in self.empty_planes:
self.empty_planes.append(add)
add = []
except(IndexError):
print(z)
def step(self, a, b, c):
self.turns += 1
current = self.planes[c - 1]
if self.is_bomb(a, b, c) == True:
os.system("clear")
current.hit_bomb()
print("\n BOOOOOOOOM! You're dead. Hope you had a good life. \n")
print("\n Remember, 3D-minesweeper is just like regular one, just deeper :] \n")
self.end_game("Lost")
return "Stop"
elif current.picture[b][2*a] != "_" and current.picture[b][2*a] != "F":
print("Invalid coordinate!")
else:
os.system("clear")
if self.if_next_to_bomb(a, b, c) > 0:
current.boxes_left -= 1
self.pic(c)
else:
self.add_to_empties(a, b, c)
self.add_to_empty_planes(c)
for j in self.empty_planes:
for i in self.planes[j[0] - 1].empty_boxes:
d = i[0]
e = i[1]
f = j[0]
if self.if_next_to_bomb(d, e, f) == 0:
self.add_to_empties(d, e, f)
self.add_to_empty_planes(j[0])
self.pic(c)
def flag(self, a, b, c):
os.system("clear")
self.planes[c - 1].flag(a, b)
def evaluate_empties(self):
for a in self.planes:
for e in a.flags:
if a.picture[e[1]][2*e[0]] != "_" or a.picture[e[1]][2*e[0]] != "F":
a.flags.remove(e)
check = True
for a in self.planes:
if not len(a.bombs) == a.boxes_left:
check = False
if check == True:
print("You won! Congratulations!")
self.end_game("Won")
def evaluate_flags(self):
f_length = 0
b_length = 0
for a in self.planes:
f_length += len(a.flags)
b_length += len(a.bombs)
if f_length == b_length:
q = input("Do you want to see if you got it? (y/n)")
if q == "y":
result = ""
for e in self.planes:
for f in e.flags:
if not f in e.bombs:
result = "fail"
break
else:
result = "win"
if result == "win":
print("You won! Congratulations!")
self.end_game("Won")
else:
print("Try again")
#self.pic()
def end_game(self, result):
time = (datetime.datetime.now() - self.start).seconds
write_stats(3, self.scalex * self.scaley * self.scalez, self.number_of_mines, self.turns, str(time// 60)+" min "+str(time%60) + " sec ", datetime.datetime.now().strftime("%d.%m.%y %H:%M:%S"), result)
self.empty()
def empty(self):
for a in self.planes:
a.empty()
self.empty_planes = []
self.scalex = 0
self.scaley = 0
self.scalez = 0
self.planes = []
local_bombs = []
self.status = "OFF"
class Time:
status = "ON"
start = datetime.datetime.now()
turns = 0
def __init__(self, nox, noy, noz, now, bombs):
self.empty_spaces = []
self.scalex = nox
self.scaley = noy
self.scalez = noz
self.scalew = now
self.spaces = []
self.number_of_mines = bombs
local_bombs = []
for l in range(now):
local_bombs.append(0)
while bombs > 0:
local = random.randint(1, now)
local_bombs[local - 1] += 1
bombs -= 1
for num in local_bombs:
self.spaces.append(Space(nox, noy, noz, num))
self.pic_init()
def pic_init(self):
for sp in range(len(self.spaces)):
for pic in range(len(self.spaces)):
self.spaces[sp].planes[pic].picture[1] += " Time: " + str(sp)
def pic(self, z, w):
self.spaces[w].pic(z)
def is_bomb(self, x, y, z, w):
return self.spaces[w].planes[z - 1].is_bomb(x, y)
def if_next_to_bomb(self, x, y, z, w):
before = 0
after = 0
if w < self.scalew - 1:
after = self.spaces[w + 1].if_next_to_bomb(x, y, z)
now = self.spaces[w].if_next_to_bomb(x, y, z)
if w > 0:
before = self.spaces[w - 1].if_next_to_bomb(x, y, z)
if before == 0 and after == 0:
return now
else:
if now == 0:
add = []
add.append(x)
add.append(y)
add.append(0)
self.spaces[w].planes[z - 1].number_boxes.append(add)
self.spaces[w].planes[z - 1].number_boxes[-1][2] += after
self.spaces[w].planes[z - 1].number_boxes[-1][2] += before
if after != 0:
self.spaces[w + 1].planes[z - 1].number_boxes.pop()
if before != 0:
self.spaces[w - 1].planes[z - 1].number_boxes.pop()
return before + now + after
def add_to_empties(self, x, y, z, w):
if w < self.scalew - 1:
self.spaces[w + 1].add_to_empties(x, y, z)
self.spaces[w].add_to_empties(x, y, z)
if w > 0:
self.spaces[w - 1].add_to_empties(x, y, z)
def add_to_empty_planes(self, z, w):
if w < self.scalew - 1:
self.spaces[w + 1].add_to_empty_planes(z)
self.spaces[w].add_to_empty_planes(z)
if w > 0:
self.spaces[w - 1].add_to_empty_planes(z)
def add_to_empty_spaces(self, w): # Täällä taas pitää loopata myös tilat
add = []
if w < self.scalew - 1:
add.append(w + 1)
add.append(len(self.spaces[w + 1].empty_planes))
if not add in self.empty_spaces:
self.empty_spaces.append(add)
add = []
add.append(w)
add.append(len(self.spaces[w].empty_planes))
if not add in self.empty_spaces:
self.empty_spaces.append(add)
add = []
if w > 0:
add.append(w - 1)
add.append(len(self.spaces[w - 1].empty_planes))
if not add in self.empty_spaces:
self.empty_spaces.append(add)
add = []
def input_thread(self, list, prompt): #Tämä pikkufunktio on kopioitu StackOverflowista. Tällä käyttäjä pysäyttää alemman funktion
input(prompt)
list.append(None)
def wait(self, c, prompt = "stop"): #Neliulotteisuuden saavuttamiseksi ohjelma tulostaa eri tasoja ajan kuluessa. Ohjelma tulostaa yhdessä threadissa sekunnin välein tasoja ja toisessa odottaa käyttäjän inputtia. Inputin tapahtuessa funktio pysähtyy.
a = self.spaces[c]
del a
then = int('{0:%S}'.format(datetime.datetime.now()))% self.scalew
list = []
_thread.start_new_thread(self.input_thread, (list, ""))
while not list:
if then == int('{0:%S}'.format(datetime.datetime.now()))% self.scalew:
continue
else:
self.pic(c, then)
print("Press Enter when you want to {}".format(prompt))
then = int('{0:%S}'.format(datetime.datetime.now()))% self.scalew
return then - 1
def wait_step(self, a, b, c): #Ottaa vastaan käyttäjän step-funktion aikakoordinaatin
d = self.wait(c, "step")
if self.step(a, b, c, d) == "Stop":
return "Stop"
def step(self, a, b, c, w):
self.turns += 1
zed = c - 1
current = self.spaces[w].planes[zed]
if self.is_bomb(a, b, c, w) == True:
os.system("clear")
current.hit_bomb()
print("\n BOOOOOOOOM! You're dead. Hope you had a good life. \n")
print("\n Perhaps 4 dimensions are too much for you :v) :>) :7) :^) \n")
self.end_game("Lost")
return "Stop"
elif current.picture[b][2*a] != "_" and current.picture[b][2*a] != "F":
print("Invalid coordinate!")
else:
os.system("clear")
if self.if_next_to_bomb(a, b, c, w) > 0:
current.boxes_left -= 1
self.pic(c, w)
else:
self.add_to_empties(a, b, c, w)
self.add_to_empty_planes(c, w)
self.add_to_empty_spaces(w)
for i in self.empty_spaces:
g = i[0]
for j in self.spaces[g].empty_planes:
f = j[0]
for k in self.spaces[g].planes[f - 1].empty_boxes:
d = k[0]
e = k[1]
if self.if_next_to_bomb(d, e, f, g) == 0:
self.add_to_empties(d, e, f, g)
self.add_to_empty_planes(f, g)
self.add_to_empty_spaces(g)
self.pic(c, w)
def wait_flag(self, a, b, c): #Ottaa käyttäjän flag-funktion aikakoordinaatin
d = self.wait(c, "flag")
self.flag(a, b, c, d)
def flag(self, a, b, c, d):
os.system("clear")
self.spaces[d].planes[c - 1].flag(a, b)
def evaluate_empties(self):
for a in self.spaces:
for e in a.planes:
for i in e.flags:
if e.picture[i[1]][2*i[0]] != "_" or a.picture[i[1]][2*i[0]] != "F":
e.flags.remove(i)
check = True
for a in self.spaces:
for b in a.planes:
if not b.boxes_left == len(b.bombs):
check = False
if check == True:
print("You won! Congratulations!")
self.end_game("Won")
def evaluate_flags(self):
length = "yes"
f_length = 0
b_length = 0
if not len(self.spaces) == 0:
for a in self.spaces:
for b in a.planes:
f_length += len(b.flags)
b_length += len(b.bombs)
if f_length == b_length:
q = input("Do you want to see if you got it? (y/n)")
if q == "y":
result = ""
for d in self.spaces:
for e in d.planes:
for f in e.flags:
if not f in e.bombs:
result = "fail"
break
else:
result = "win"
if result == "win":
print("You won! Congratulations!")
self.end_game("Won")
else:
print("Wrong. Try again!")
def end_game(self, result):
time = (datetime.datetime.now() - self.start).seconds
write_stats(4, self.scalex * self.scaley * self.scalez * self.scalew, self.number_of_mines, self.turns, str(time// 60)+" min "+str(time%60)+" sec", datetime.datetime.now().strftime("%d.%m.%y %H:%M:%S"), result)
for a in self.spaces:
a.empty()
self.empty_spaces = []
self.scalex = 0
self.scaley = 0
self.scalez = 0
self.scalew = 0
self.spaces = []
local_bombs = []
self.status = "OFF"
start()
|
import time
import requests
from lxml import etree
class doubanTop250:
def __init__(self):
pass
def getHTMLText(self, url, code='UTF-8'):
try:
headers = {
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/67.0.3396.79 Safari/537.36'
}
r = requests.get(url, headers=headers, timeout=30)
r.raise_for_status()
r.encoding = code
return r.text
except:
return 'exception'
def parseContent(self, html_str):
# transfer html_str to HTML object
selector = etree.HTML(html_str)
# get li element lists
li = selector.xpath('//ol[@class="grid_view"]/li')
for li_item in li:
# get the rank
rank = li_item.xpath('div[@class="item"]/div[@class="pic"]/em/text()')
print(rank[0])
# get the title
title = li_item.xpath('div[@class="item"]/div[@class="info"]//span[@class="title"]/text()')
print(title[0])
# get the link
link = li_item.xpath('div[@class="item"]/div[@class="info"]/div[@class="hd"]/a//@href')
print(link[0].strip())
# get the director
directorAndYear = li_item.xpath('div[@class="item"]/div[@class="info"]/div[@class="bd"]/p/text()')
print(directorAndYear[0].strip())
print(directorAndYear[1].strip())
print('--------------------------split line-------------------------------------')
time.sleep(1)
def spider(self, index):
''' 豆瓣top250 '''
URL = 'https://movie.douban.com/top250?start={0}&filter='.format(index)
# get the html_str for dangdang.com
html_str = self.getHTMLText(URL)
self.parseContent(html_str)
def process(self):
for i in range(0, 10):
self.spider(25 * i)
if __name__ == '__main__':
doubanTop250 = doubanTop250()
doubanTop250.process()
|
from flask_wtf import FlaskForm
from wtforms import StringField, SubmitField
from wtforms.validators import URL
class FindRedditVideoForm(FlaskForm):
reddit_url = StringField('Reddit submission URL', validators=[URL()]) |
#!/usr/bin/env python
#--------------------------------------------------------
# Linac Accelerator Nodes for Transport Matrices generation
# These nodes are using the Initial Coordinates particles Attributes.
# Each node (if it is not the first one) calculates the transport matrix
# between the previous node and itself.
# The matrix is a 7x7 matrix that transforms the initial particles
# coordinates to the final ones that are in the bunch.
#--------------------------------------------------------
import math
import sys
import os
# import the finalization function
from orbit.utils import orbitFinalize
# import general accelerator elements and lattice
from orbit.lattice import AccLattice, AccNode, AccActionsContainer
from orbit.py_linac.lattice import MarkerLinacNode
import orbit_utils
from orbit_utils import bunch_utils_functions
from bunch_utils_functions import copyCoordsToInitCoordsAttr
from bunch_utils_functions import transportMtrxFromInitCoords
from orbit_utils import Matrix
class LinacTrMatrixGenNode(MarkerLinacNode):
"""
Linac Accelerator Nodes for Transport Matrices generation.
These nodes are using thethe Initial Coordinates particles Attrubutes.
Each node (if it is not the first one) calculates the transport matrix
between the previous node and itself.
The matrix is a 7x7 matrix that transforms the initial particles
coordinates to the final ones that are in the bu
"""
def __init__(self, trMatricesController, name = "TrMatrixGen"):
if(name == "TrMatrixGen"):
name += name+":"+str(trMatricesController.getCount())
MarkerLinacNode.__init__(self,name)
self.trMatricesController = trMatricesController
self.trMtrxNode_ind = trMatricesController.getCount()
self.use_twiss_weight_x = 0
self.use_twiss_weight_y = 0
self.use_twiss_weight_z = 0
self.relativistic_beta = 0.
self.relativistic_gamma = 0.
#--------------------------------------
self.trMtrx = Matrix(7,7)
#--------------------------------------
self.trMatricesController.addNode(self)
def setInternalIndex(self,ind):
"""
Sets the index of the TrMatrxGenNode in the controller
"""
self.trMtrxNode_ind = ind
def getTrMatricesController(self):
"""
Returns the LinacTrMatricesContrioller that keeps the references to the TrMatrxGenNodes.
"""
return self.trMatricesController
def getTwissWeightUse(self):
"""
Returns (use_x,use,use_z) tuple where use_{} == 1 means the Twiss weights will be used.
"""
res_arr = [True,True,True]
if(self.use_twiss_weight_x == 0): res_arr[0] = False
if(self.use_twiss_weight_y == 0): res_arr[1] = False
if(self.use_twiss_weight_z == 0): res_arr[2] = False
return tuple(res_arr)
def setTwissWeightUse(self,use_twiss_weight_x,use_twiss_weight_y,use_twiss_weight_z):
"""
Sets (use_x,use,use_z) tuple where use_{} == 1 means the Twiss weights will be used.
"""
self.use_twiss_weight_x = 0
self.use_twiss_weight_y = 0
self.use_twiss_weight_z = 0
if(use_twiss_weight_x == True): self.use_twiss_weight_x = 1
if(use_twiss_weight_y == True): self.use_twiss_weight_y = 1
if(use_twiss_weight_z == True): self.use_twiss_weight_z = 1
def track(self, paramsDict):
bunch = paramsDict["bunch"]
self.relativistic_beta = bunch.getSyncParticle().beta()
self.relativistic_gamma = bunch.getSyncParticle().gamma()
if(self.trMtrxNode_ind == 0):
self.trMtrx.unit()
copyCoordsToInitCoordsAttr(bunch)
else:
transportMtrxFromInitCoords(bunch,self.trMtrx,self.use_twiss_weight_x,self.use_twiss_weight_y,self.use_twiss_weight_z)
def trackDesign(self, paramsDict):
"""
This method does nothing for the aperture case.
"""
pass
def getBeta(self):
"""
Returns relativistic beta at this node.
"""
return self.relativistic_beta
def getGamma(self):
"""
Returns relativistic gamma at this node.
"""
return self.relativistic_gamma
def getTransportMatrix(self):
"""
Return transport matrix (7x7).
"""
return self.trMtrx
def getDetXYZ(self, trMtrx = None):
"""
Returns the determinants of the transformations in (x,y,z) directions.
"""
if(trMtrx == None): trMtrx = self.trMtrx
det_x = trMtrx.get(0,0)*trMtrx.get(1,1) - trMtrx.get(1,0)*trMtrx.get(0,1)
det_y = trMtrx.get(0+2,0+2)*trMtrx.get(1+2,1+2) - trMtrx.get(1+2,0+2)*trMtrx.get(0+2,1+2)
det_z = trMtrx.get(0+4,0+4)*trMtrx.get(1+4,1+4) - trMtrx.get(1+4,0+4)*trMtrx.get(0+4,1+4)
return (det_x,det_y,det_z)
def getNormDetXYZ(self):
"""
Returns the normalized determinants of the transformations in (x,y,z) directions.
"""
(node0,node1) = self.getTwoNodes()
beta_in = node0.getBeta()
beta_out = node1.getBeta()
gamma_in = node0.getGamma()
gamma_out = node1.getGamma()
gb_in = beta_in*gamma_in
gb_out = beta_out*gamma_out
(det_x,det_y,det_z) = self.getDetXYZ(self.trMtrx)
return ((gb_out/gb_in)*det_x,(gb_out/gb_in)*det_y,(beta_in/beta_out)*det_z)
def getTwoNodes(self):
"""
Returns two LinacTrMatrixGenNode nodes. The transport matrix is between these nodes.
"""
node0 = self
if(self.trMtrxNode_ind > 0):
node0 = self.trMatricesController.getNode(0)
node1 = self.trMatricesController.getNode(self.trMtrxNode_ind)
return (node0,node1)
def printMatrix(self):
"""
Print the matrix.
"""
name0 = "None"
if(self.trMtrxNode_ind > 0):
name0 = self.trMatricesController.getNode(self.trMtrxNode_ind-1).getName()
name1 = self.trMatricesController.getNode(self.trMtrxNode_ind).getName()
print "----Transport matrix--- from name0=",name0," to name1=",name1
m = self.trMtrx
for i in xrange(m.size()[0]):
for j in xrange(m.size()[1]):
print ("m(" + str(i) + "," + str(j)+")="+"%12.5g"%m.get(i,j) + " "),
print ""
class LinacTrMatricesContrioller:
"""
LinacTrMatricesContrioller keeps the references to the LinacTrMatrixGenNode
instances.
"""
def __init__(self):
self.trMatrxNodes = []
def getCount(self):
return len(self.trMatrxNodes)
def getNode(self,ind):
return self.trMatrxNodes[ind]
def addNode(self,trMatrxNode):
self.trMatrxNodes.append(trMatrxNode)
def getNodes(self):
return self.trMatrxNodes
def init(self):
#--- place nodes in the right order
nodes = self.trMatrxNodes
self.trMatrxNodes = sorted(nodes, key = lambda x: x.getPosition(), reverse = False)
for node_ind in range(len(self.trMatrxNodes)):
node = self.trMatrxNodes[node_ind]
node.setInternalIndex(node_ind)
def addTrMatrxGenNodes(self, accLattice, node_or_nodes, place = MarkerLinacNode.ENTRANCE):
"""
Adds the LinacTrMatrixGenNode to the nodes as child nodes.
"""
nodes = []
if(type(node_or_nodes) in [tuple,list]):
for node in node_or_nodes:
nodes.append(node)
else:
nodes.append(node_or_nodes)
#-----------------------------
for node in nodes:
trMatrxGenNode = LinacTrMatrixGenNode(self,node.getName()+":trMatrx")
node.addChildNode(trMatrxGenNode,place)
#----- set up the position of the TrMatrix nodes
actions = AccActionsContainer()
def accNodeExitAction(paramsDict):
"""
Nonbound function. Sets the position of the TrMatrix nodes.
"""
node = paramsDict["node"]
if(isinstance(node,LinacTrMatrixGenNode)):
pos = paramsDict["path_length"]
node.setPosition(pos)
actions.addAction(accNodeExitAction, AccNode.EXIT)
accLattice.trackActions(actions)
self.init()
return self.trMatrxNodes
def addTrMatrxGenNodesAtEntrance(self, accLattice, node_or_node):
"""
Adds the LinacTrMatrixGenNode to the nodes as child nodes at the entrance.
"""
self.addTrMatrxGenNodes(accLattice, node_or_nodes, MarkerLinacNode.ENTRANCE)
def addTrMatrxGenNodesAtExit(self, accLattice, node_or_node):
"""
Adds the LinacTrMatrixGenNode to the nodes as child nodes at the exit.
"""
self.addTrMatrxGenNodes(accLattice, node_or_nodes, MarkerLinacNode.EXIT)
|
__author__ = 'vasilev_is'
import pickle
import matplotlib.pyplot as plt
from Cases.CasesUtilStuff import IterationInfoAcceptor
import numpy as np
import math
import os, sys
def norm1 (b1,b2):
absdif = [math.fabs(b11-b22) for b11,b22 in zip(b1,b2)]
return max(absdif)
def norm2 (b1,b2):
return np.linalg.norm(np.array(b1)-np.array(b2))
def norm3 (b1,b2):
absdif = [math.fabs(b11-b22) for b11,b22 in zip(b1,b2)]
return sum(absdif)
datafile = 'resfiles/resdump205_DISP.dat'
with open(datafile, 'rb') as f:
rrlist = pickle.load(f)
#фильтруем случаи, где за 30 итераций не сошлось
rrlist_filtered = [case for case in rrlist if case.ll()<30]
print ('total', len(rrlist))
print ('dif', len(rrlist)-len(rrlist_filtered))
folder = "resfiles/chists_spread/chists_spread_n/"
for coeff in range (0,3):
os.mkdir(folder+"changes_hists_b{0}".format(coeff))
#для каждой итерации создаём гистограмму рассеяния изменений
for it in range (10):
#it=1
dflist=[] # список изменений на первой итерации по кейсам
for case in rrlist_filtered:
#dflist.append(norm3(case[0]['b'], case[1]['b']))
try:
dflist.append(case[it+1]['b'][coeff]- case[it]['b'][coeff])
except:
pass
fig, ax = plt.subplots( nrows=1, ncols=1 ) # create figure & 1 axis
ax.hist(dflist,30)
#это у нас графики значений компонента вектора по итерациям.
fig.savefig (folder+'changes_hists_b{1}/img_{0}.png'.format(it, coeff))
plt.close(fig)
|
#!/usr/bin/python
#Major keys
SHARP_NOTES = ['C', 'C#', 'D', 'D#', 'E', 'F', 'F#', 'G', 'G#', 'A', 'A#', 'B']
FLAT_NOTES = ['C', 'Db', 'D', 'Eb', 'E', 'F', 'Gb', 'G', 'Ab', 'A', 'Bb', 'B']
MAJOR_STEPS = [2, 2, 1, 2, 2, 2, 1]
MINOR_STEPS = [2, 1, 2, 2, 1, 2, 2]
def letterShift(note):
pos = SHARP_NOTES.index(note)
return SHARP_NOTES[pos:] + SHARP_NOTES[:pos]
def majorAlgo(note):
seedList = letterShift(note)
newList = []
newString = ''
for i in range(0, 5, 2):
newList.append(seedList[i])
for i in range(5, 12, 2):
newList.append(seedList [i])
for i in newList:
newString += i + ' '
return newString
def letterShift(notes, letter):
pos = notes.index(letter)
return notes[pos:] + notes[:pos]
def major():
return MAJOR_STEPS
def minor():
return MINOR_STEPS
def litmus(notes):
newStr = ''
for i in notes:
newStr += i.replace('#', '')
return len(set(notes)) == len(set(newStr))
def createList(seedList, key):
indice = 0
returnList = []
for i in key:
returnList.append(seedList[indice])
indice += i
return returnList
def toFlat(seedList):
returnList = ['Ab' if note=='G#' else note for note in seedList]
returnList = ['Bb' if note=='A#' else note for note in returnList]
returnList = ['Cb' if note=='B#' else note for note in returnList]
returnList = ['Db' if note=='C#' else note for note in returnList]
returnList = ['Eb' if note=='D#' else note for note in returnList]
returnList = ['Gb' if note=='F#' else note for note in returnList]
return returnList
def toFlatOrSharp(seedList):
returnList = ['E#' if note=='F' else note for note in seedList]
returnList = ['B#' if note=='C' else note for note in returnList]
return returnList
def makeScale(letter, maj):
if maj:
key = major()
else:
key = minor()
if letter not in SHARP_NOTES and letter not in FLAT_NOTES:
return 'Sorry, {0} does not have a key.'.format(letter)
if 'b' not in letter:
returnList = createList(letterShift(SHARP_NOTES, letter), key)
else:
returnList = createList(letterShift(FLAT_NOTES, letter), key)
if litmus(returnList) == False:
returnList = toFlatOrSharp(returnList)
if litmus(returnList) == True:
returnList
if key == major():
return toFlat(createList(letterShift(SHARP_NOTES, letter), key))
if key == minor():
return toFlat(createList(letterShift(SHARP_NOTES, letter), key))
'''
for i in SHARP_NOTES:
print(i + ' = ' + str(makeScale(i, minor())))
for i in SHARP_NOTES:
print(i + ' = ' + str(makeScale(i, major())))
print('a' + ' = ' + str(makeScale('A', major())))
print('a' + ' = ' + str(makeScale('A', minor())))
print makeScale("F", major())
''' |
import pygame
import math
import numpy as np
import random
import time
def checkpattern(col, row):
if row % 2:
if col % 2: #If unequal
return True
else: #if equal
return False
else:
if col % 2: #If unequal
return False
else: #if equal
return True
def drawSpirit(screen, col, row, type, myfont):
"""
Draws a spirit at pos col, row of type = [E (empty), B (bomb), 1, 2, 3, 4, 5, 6]
"""
if type == 'E':
if checkpattern(col,row):
c = (242, 244, 247)
else:
c = (247, 249, 252)
pygame.draw.rect(screen, c, pygame.Rect(col*SIZEOFSQ, row*SIZEOFSQ, SIZEOFSQ, SIZEOFSQ))
else:
drawSpirit(screen, col, row, 'E', myfont)
if type == 1:
text = myfont.render("1", 1, (0, 204, 0))
elif type == 2:
text = myfont.render("2", 1, (255, 204, 0))
elif type == 3:
text = myfont.render("3", 1, (204, 0, 0))
elif type == 4:
text = myfont.render("4", 1, (0, 51, 153))
elif type == 5:
text = myfont.render("5", 1, (255, 102, 0))
elif type == 6:
text = myfont.render("6", 1, (255, 102, 0))
elif type == 'flag':
text = myfont.render("F", 1, (255, 0, 0))
#Get the text rectangle and center it inside the rectangles
textRect = text.get_rect()
textRect.center = (col*SIZEOFSQ + int(0.5*SIZEOFSQ)),(row*SIZEOFSQ + int(0.5*SIZEOFSQ))
screen.blit(text, textRect)
def findNeighbors2(y, x, grid): #Taken online, y = col x = row, return [(row,col),(row,col)]
COLS = grid.shape[1]
ROWS = grid.shape[0]
neighbors = [(y2, x2) for x2 in range(x-1, x+2)
for y2 in range(y-1, y+2)
if (-1 < x < COLS and
-1 < y < ROWS and
(x != x2 or y != y2) and
(0 <= x2 < COLS) and
(0 <= y2 < ROWS))]
return neighbors
def findNeighbors(rowin, colin, grid):
""" Takes col, row and grid as input and returns as list of neighbors
"""
COLS = grid.shape[1]
ROWS = grid.shape[0]
neighbors = []
for col in range(colin-1, colin+2):
for row in range(rowin-1, rowin+2):
if (-1 < rowin < ROWS and
-1 < colin < COLS and
(rowin != row or colin != col) and
(0 <= col < COLS) and
(0 <= row < ROWS)):
neighbors.append((row,col))
return neighbors
def sumMines(grid, col, row):
""" Finds amount of mines adjacent to a field.
"""
mines = 0
neighbors = findNeighbors(row, col, grid)
for n in neighbors:
if grid[n[0],n[1]] == 'B':
mines = mines + 1
return mines
def initBoard(screen, grid, startcol, startrow, mines):
""" Initializes the board
"""
#Randomly place bombs
COLS = grid.shape[1]
ROWS = grid.shape[0]
while mines > 0:
(row, col) = (random.randint(0, ROWS-1), random.randint(0, COLS-1))
#if (col,row) not in findNeighbors(startcol, startrow, grid) and grid[col][row] != 'B' and (col, row) not in (startcol, startrow):
if (row,col) not in findNeighbors(startrow, startcol, grid) and (row,col) != (startrow, startcol) and grid[row][col] != 'B':
grid[row][col] = 'B'
mines = mines - 1
#Get rest of board when bombs have been placed
for col in range(COLS):
for row in range(ROWS):
if grid[row][col] != 'B':
totMines = sumMines(grid, col, row)
if totMines > 0:
grid[row][col] = totMines
else:
grid[row][col] = 'E'
return grid
def printBoard(grid):
COLS = grid.shape[1]
ROWS = grid.shape[0]
for row in range(0,ROWS):
print(' ')
for col in range(0,COLS):
print(grid[row][col], end=' ')
def reveal(screen, grid, col, row, myfont, checked, press = "LM"):
if press == "LM":
if checked[row][col] != 0:
return
checked[row][col] = checked[row][col] + 1
if grid[row][col] != 'B':
#print(grid[row][col])
drawSpirit(screen, col, row, grid[row][col], myfont)
#pygame.display.flip()
#time.sleep(0.2)
#print(checked)
#time.sleep(5)
if grid[row][col] == 'E':
neighbors = findNeighbors(row, col, grid)
for n in neighbors:
if not checked[n[0],n[1]]:
reveal(screen, grid, n[1], n[0], myfont, checked)
elif press == "RM":
drawSpirit(screen, col, row, "flag", myfont)
if __name__ == "__main__":
ROWS = 6
COLS = 6
SIZEOFSQ = 100
MINES = 6
grid = np.zeros((ROWS,COLS), dtype=object)
#color of squares
c1 = (4, 133, 223)
c2 = (4, 145, 223)
pygame.init()
pygame.font.init()
myfont = pygame.font.SysFont("monospace-bold", 100)
screen = pygame.display.set_mode((COLS * SIZEOFSQ, ROWS * SIZEOFSQ))
rects = []
#Initialize Game:
for row in range(ROWS):
for col in range(COLS):
if checkpattern(col, row):
c = c1
else:
c = c2
rects.append(pygame.draw.rect(screen, c, pygame.Rect(col*SIZEOFSQ, row*SIZEOFSQ, SIZEOFSQ, SIZEOFSQ)))
done = False
firstClick = True
while not done:
for event in pygame.event.get(): #If someone clicks or does something
#pygame.draw.rect(screen, (0, 128, 255), pygame.Rect(30, 30, 60, 60))
if event.type == pygame.QUIT:
done = True
if event.type == pygame.MOUSEBUTTONDOWN:
pos = pygame.mouse.get_pos()
for i, rect in enumerate(rects):
if rect.collidepoint(pos):
#print(i)
col = i % COLS
row = math.floor(i/COLS)
print(row, col)
if firstClick:
grid = initBoard(screen, grid, col, row, MINES)
firstClick = False
printBoard(grid)
if pygame.mouse.get_pressed() == (1, 0, 0):
reveal(screen, grid, col, row, myfont, np.zeros_like(grid))
elif pygame.mouse.get_pressed() == (0, 0, 1):
reveal(screen, grid, col, row, myfont, np.zeros_like(grid), press = "RM")
"""
neighbors = findNeighbors(col,row, grid)
for n in neighbors:
drawSpirit(screen, n[0], n[1], 'one', myfont)
"""
pygame.display.flip() |
"""
Define model functions for discriminators.
Author: Nikolay Lysenko
"""
import tensorflow as tf
def basic_mnist_d_network_fn(d_input: tf.Tensor, reuse: bool) -> tf.Tensor:
"""
Define network structure for a basic MNIST-related discriminator.
Here, parameters of layers are selected for the following setup:
* n_fragments_per_image = 7,
* internal_size = 2,
* frame_size = 1.
If another setup is studied, another function should be
defined and used.
:param d_input:
tensor to be passed to the discriminator as input
:param reuse:
if `False`, all variables are created from scratch,
if `True`, variables that already exist are used
:return:
logits that are calculated by discriminator
"""
with tf.variable_scope('discriminator', reuse=reuse):
first_conv_layer = tf.layers.conv2d(
inputs=d_input,
filters=16,
kernel_size=[2, 2],
activation=tf.nn.relu
)
second_conv_layer = tf.layers.conv2d(
inputs=first_conv_layer,
filters=32,
kernel_size=[2, 2],
activation=tf.nn.relu
)
flat_layer = tf.layers.flatten(
second_conv_layer
)
dense_layer = tf.layers.dense(
inputs=flat_layer,
units=64,
activation=tf.nn.relu
)
dropout_layer = tf.layers.dropout(
inputs=dense_layer,
rate=0.4,
training=(not reuse)
)
logits = tf.layers.dense(
inputs=dropout_layer,
units=2
)
return logits
def basic_mnist_d_train_op_fn(
logits: tf.Tensor, labels: tf.Tensor,
learning_rate: float, beta_one: float
) -> tf.Operation:
"""
Define training operation for basic MNIST-related discriminator.
:param logits:
logits that are calculated by discriminator
:param labels:
true labels
:param learning_rate:
learning rate for Adam optimizer
:param beta_one:
exponential decay rate for the first moment estimates
:return:
training operation
"""
loss = tf.losses.sparse_softmax_cross_entropy(labels=labels, logits=logits)
loss = tf.identity(loss, name='d_loss')
optimizer = tf.train.AdamOptimizer(
learning_rate=learning_rate,
beta1=beta_one
)
training_operation = optimizer.minimize(
loss=loss,
global_step=tf.train.get_global_step(),
name='d_train_op'
)
return training_operation
|
from datetime import date, datetime
import sys
class PriceCalculator():
"""
Calculates the quote price depending on the services price
and the data provided by the user
"""
def __init__(self, total=0):
self.total = total
def process_data(self, service_data, user_data):
# todays date
today = date.today()
deadline_price_mult = service_data.deadline_price
concept_price_mult = service_data.concept_price
deadline_date = user_data.get('deadline_date') or False
concept_amount = user_data.get('concept_amount') or False
# calculate the price of concept
concept_price = int(concept_amount) * concept_price_mult
if deadline_date:
deadline_date = datetime.strptime(deadline_date, "%m/%d/%Y").date()
# calculate the difference between now and deadline data in days
diff_in_days = abs((today - deadline_date).days)
multiplier = (24 - diff_in_days)
if multiplier <= 0:
self.total = concept_price + deadline_price_mult
else:
self.total = (multiplier * deadline_price_mult) + concept_price
# since the user did not set the deadline just
# set total to the sum of deadline price multiplier and concept price.
self.total = deadline_price_mult + concept_price
def get_total(self):
if 'test' in sys.argv:
return 100
return self.total
|
import sys
sys.stdin = open("input4865.txt", "r")
T = int(input())
for tc in range(1, T+1):
str1 = input()
str2 = input()
str1_list = []
for x in str1:
if x not in str1_list:
str1_list.append(x)
res = {}
for x in str2:
if x in str1_list:
if x in res:
res[x] += 1
else:
res[x] = 1
s = list(res.values())
max = s[0]
for i in s:
if i > max:
max = i
print("#{} {}".format(tc, max)) |
class Post:
def __init__(self, post_id, name, tagline, created_at, day, comments_count, votes_count, discussion_url,
redirect_url, screenshot_url, maker_inside, user, current_user, comments=None, votes=None,
related_links=None, install_links=None, related_posts=None, media=None, description=None, topics=None,
external_links=None, featured=None, exclusive=None, product_state=None,
category_id=None, badges=None, reviews_count=None, positive_reviews_count=None,
negative_reviews_count=None, neutral_reviews_count=None, makers=None, platforms=None):
from .. import helpers
self.id = post_id
self.name = name
self.tagline = tagline
self.created_at = created_at
self.day = day
self.comments_count = comments_count
self.votes_count = votes_count
self.discussion_url = discussion_url
self.redirect_url = redirect_url
self.screenshot_url = screenshot_url
self.maker_inside = maker_inside
self.current_user = current_user
self.user = helpers.parse_users(user)
self.comments = helpers.parse_comments(comments)
self.votes = helpers.parse_votes(votes)
self.related_links = helpers.parse_related_links(related_links)
self.install_links = helpers.parse_install_links(install_links)
#
self.description = description
self.featured = featured
self.exclusive = exclusive
self.product_state = product_state
self.category_id = category_id
self.reviews_count = reviews_count
self.positive_reviews_count = positive_reviews_count
self.negative_reviews_count = negative_reviews_count
self.neutral_reviews_count = neutral_reviews_count
self.makers = helpers.parse_users(makers)
self.platforms = helpers.parse_platforms(platforms)
self.topics = helpers.parse_topics(topics)
self.external_links = helpers.parse_external_links(external_links) # around the web
self.badges = helpers.parse_badges(badges)
self.related_posts = helpers.parse_related_posts(related_posts)
self.media = helpers.parse_media(media)
|
class Shelf(Object):
""" Abstract class to build the
concrete shelve classes which are
implemented as queues
"""
def pop_by_id(id: int) -> Order:
pass
def push(order: Order):
pass
def get_next() -> []Orders:
pass
|
import numpy as np
def compute_rank(array):
# implement task 2: rigorous ranking here
raise NotImplementedError
|
numbers = [ 4, 8, 3, 9]
multi_num = []
for num in numbers:
multi_num.append(num * 4)
print(multi_num)
|
import numpy as np
import os
import random
import glob
class MelSplitter(object):
"""
Note:
Split mel array into a batch with random length and random starting point
Attributes:
__init__: constructs MelSplitter class
get_seg_assignment: assign start and end frame index for each mel input
split_mel: split mel based on the output of get_seg_assignment
split_all_mels: same as split_mel, looping over multiple mel arrays from datadict
"""
def __init__(self, config):
"""
Note:
Args:
min_frames: integer, minimum length of split batch (frame)
max_frames: integer, maximum length of split batch (frame)
hop_size: integer, fft hop size (msec)
win_len: integer, fft window size (msec)
rand_seg: boolean, if True, assign length and starting frame randomly
Returns:
"""
self.min_frames = config["min_frames"] #140
self.max_frames = config["max_frames"] #180
self.hop_size = config["fft_hop_size"] #10ms
self.win_len = config["fft_window_size"] #25ms
self.rand_seg = config["random_segment"]
self.seg_len = config["uniform_seg_frames"] #140ms; used only when RANDOM_SEGMENT=1
def get_seg_assignment(self,datadict,rand_seg):
"""
Note:
assign start and end frame index for each mel input
Args:
datadict: dictionary, 'mel' is a list of np arrays and 'mel_len' is a list of len(mel[i])
rand_seg: boolean, if True, assign length and starting frame randomly
Returns:
start_end_frame_for_each_mel: list of tuples, share index with datadict['mel']
"""
print(f'seg random {rand_seg}')
if rand_seg:
shortest_idx = np.argmin(datadict['mel_len'])
shortest_mel_len = datadict['mel_len'][shortest_idx]
print(f"shortest_idx {shortest_idx} shortest_mel_len {shortest_mel_len}")
#add random len extra frame [140,141, ... ,179,180]
max_extra = shortest_mel_len - self.min_frames #13
print(f"max_extra {max_extra}" )
extra_frame = random.randint(0, max_extra)
batch_frame_len = self.min_frames + extra_frame
all_mel_len = datadict['mel_len']
start_frame_for_each_mel = [random.randint(0, i-batch_frame_len) for i in all_mel_len]
start_end_frame_for_each_mel = [(i, i+batch_frame_len) for i in start_frame_for_each_mel]
else: #rand_seg=False
batch_frame_len = self.seg_len
all_mel_len = datadict['mel_len']
start_frame_for_each_mel = [random.randint(0, i-batch_frame_len) for i in all_mel_len]
start_end_frame_for_each_mel = [(i, i+batch_frame_len) for i in start_frame_for_each_mel]
return start_end_frame_for_each_mel
def split_mel(self, mel, start_end_frame):
"""
Note:
split mel based on the output of get_seg_assignment
Args:
mel: np array, mel feature
start_end_frame: tuple, containing two integers which indicates start and end frame index individually
Returns:
mel_segment: np array, splitted output
"""
mel_segment = mel[start_end_frame[0]:start_end_frame[1]]
return mel_segment
def split_all_mels(self, start_end_frame_for_each_mel, datadict):
"""
Note:
same as split_mel, looping over multiple mel arrays from datadict
Args:
start_end_frame_for_each_mel:
datadict: dictionary, mel is a list of np arrays and mel_len is a list
Returns:
sliced_mels: a LIST of batches (num batches differ by inital wav selection)
"""
mel_list = datadict['mel']
sliced_mels = []
for i, start_end_frame in enumerate(start_end_frame_for_each_mel):
mel_segment = self.split_mel(mel_list[i], start_end_frame)
sliced_mels.append(mel_segment)
return sliced_mels
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from __future__ import division, print_function, unicode_literals
from itertools import chain
from operator import attrgetter
from .._compat import ffilter
from ._summarizer import AbstractSummarizer
class EdmundsonLocationMethod(AbstractSummarizer):
def __init__(self, stemmer, null_words):
super(EdmundsonLocationMethod, self).__init__(stemmer)
self._null_words = null_words
def __call__(self, document, sentences_count, w_h, w_p1, w_p2, w_s1, w_s2):
significant_words = self._compute_significant_words(document)
ratings = self._rate_sentences(document, significant_words, w_h, w_p1,
w_p2, w_s1, w_s2)
return self._get_best_sentences(document.sentences, sentences_count, ratings)
def _compute_significant_words(self, document):
headings = document.headings
significant_words = chain(*map(attrgetter("words"), headings))
significant_words = map(self.stem_word, significant_words)
significant_words = ffilter(self._is_null_word, significant_words)
return frozenset(significant_words)
def _is_null_word(self, word):
return word in self._null_words
def _rate_sentences(self, document, significant_words, w_h, w_p1, w_p2, w_s1, w_s2):
rated_sentences = {}
paragraphs = document.paragraphs
for paragraph_order, paragraph in enumerate(paragraphs):
sentences = paragraph.sentences
for sentence_order, sentence in enumerate(sentences):
rating = self._rate_sentence(sentence, significant_words)
rating *= w_h
if paragraph_order == 0:
rating += w_p1
elif paragraph_order == len(paragraphs) - 1:
rating += w_p2
if sentence_order == 0:
rating += w_s1
elif sentence_order == len(sentences) - 1:
rating += w_s2
rated_sentences[sentence] = rating
return rated_sentences
def _rate_sentence(self, sentence, significant_words):
words = map(self.stem_word, sentence.words)
return sum(w in significant_words for w in words)
def rate_sentences(self, document, w_h=1, w_p1=1, w_p2=1, w_s1=1, w_s2=1):
significant_words = self._compute_significant_words(document)
return self._rate_sentences(document, significant_words, w_h, w_p1, w_p2, w_s1, w_s2)
|
import os
import yaml
user_input = input()
with open(user_input) as exploit_file:
contents = yaml.safe_load(exploit_file) #ok |
import matplotlib.pyplot as plt
import math
import numpy as np
from scipy.integrate import odeint
import networkx as nx
from NORMtree import N
from NORMtree import W
from NORMtree import N_1
from NORMtree import diff_dop
#1. параметры из статьи
Beta = 2 * 10 ** (-7)
Omega = 0.8 * 10 ** -7
Lambda = 10 ** 5
d = 0.1
p = 100
c = 5
Delta = 0.5
mBL = 1000
pV = 2
pBL = 25
mLB = pBL * mBL
tauj = 1
#из файла с графом достаем N
print()
print('кол-во узлов:', N)
print()
print('матрица связей')
print(W)
#L - число ребер вместе с кровью
L = 0
for i in range(len(W)):
for j in range(len(W[i])):
L += W[i][j]
print('всего ребер', int(L))
#Lj - выходящие ребра из узлов
Lj = []
for row in W:
Lj.append(int(sum(row)))
print('число выходящих ребер из узла j', Lj)
# mkj = mkk/Lk - скорость миграции из k в js равна скорости покидания k делить на колво выходящих сосудов
mjj = 1/tauj #скорость покидания узла j
#m - матрица скоростей
m = np.zeros([N+1,N+1])
for i in range(N):
for j in range(N):
if W[i][j] != 0:
m[i][j] = mjj/Lj[i]
for i in range (N-1): #добавили mjj которое вытекает из j
m[i][i] = mjj
#добавим соединение с кровью для последнего узла и для первых
for i in range(N_1):
m[N][i] = mBL/Lj[-1]
m[N][N] = mBL
m[N-1][N-1] = mLB
m[N-1][N] = mLB
print('матрица m')
print(m)
mkj = np.zeros([N+1,N+1]) #cкорости отдельно из узла в узел
for i in range(N):
for j in range(N):
if W[i][j] != 0:
mkj[i][j] = mjj/Lj[i]
print('mkj=', mkj)
#вектора для клеток начальные концентрации
print('начальные условия для T')
T_0 = []
for i in range(N):
T_0.append(Lambda/d)
print(T_0)
print('введите начальные условия для I')
I_0 = []
for i in range(N):
#I_0.append(int(input()))
I_0.append(0)
print(I_0)
print('введите начальные условия для V')
V_0 = [10]
for i in range(N-1):
#V_0.append(int(input()))
V_0.append(0)
print(V_0)
#начальные концентрации в крови, пока по нулям
TB_0 = 0
IB_0 = 0
VB_0 = 0
#вектор начальных концентраций, значения для крови в конце
y0 = T_0 + I_0 + V_0
y0.append(TB_0)
y0.append(IB_0)
y0.append(VB_0)
print(y0)
ts = np.linspace(0, 30, 10000) # time grid
# solve the system dy/dt = f(y, t)
def f(y, t):
T_I_V = [] # тут T I V плюс TB, IB, VB
for i in range(N * 3 + 3):
T_I_V.append(y[i])
F = []
#в лимфатической системе
for i in range(N):
F.append(Lambda - (Beta * T_I_V[i + 2 * (N)] + Omega * T_I_V[i + (N)]) * T_I_V[i] - (d + m[i][i]) * T_I_V[i] + (m[N][i]) * T_I_V[3 * (N)])
for k in range(N):
F[-1] = F[-1] + mkj[k][i] * T_I_V[k]
for i in range(N):
F.append((Beta * T_I_V[i + 2 * (N)] + Omega * T_I_V[i + N]) * T_I_V[i] - (Delta + m[i][i]) * T_I_V[i + N] + (m[N][i]) * T_I_V[3 * (N) + 1 ])
for k in range(N):
F[-1] = F[-1] + mkj[k][i] * T_I_V[k + N]
for i in range(N):
F.append(p * T_I_V[i + N] - (c + pV * m[i][i]) * T_I_V[i + 2 * (N)] + pV * (m[N][i]) * T_I_V[3 * (N)+2])
for k in range(N):
F[-1] = F[-1] + mkj[k][i] * pV * T_I_V[k + 2 * N]
F.append(- (d + mBL) * T_I_V[3 * (N)] + mLB * T_I_V[N - 1])
F.append(- (Delta + mBL) * T_I_V[3 * (N) + 1 ] + mLB * T_I_V[(2 * N) - 1])
F.append(- (c + pV * mBL) * T_I_V[3 * (N) + 2] + pV * mLB * T_I_V[(3 * N) - 1])
return F
#cамо решение системы
soln = odeint(f, y0, ts)
#for i in range(3*N):
# plt.plot(ts, soln[:,i], label='TVI'+str(i), color = (0.3 + i * 0.02 , 0.3, 0.1 + i * 0.02))
#plt.xlabel('Time, days')
#plt.ylabel('T cell [cells/ml], virus [virion/ml]')
#plt.legend(loc=0)
for i in range(N):
plt.semilogy(ts, soln[:,i], label='T'+str(i), color = 'red')
for i in range(N, 2*N):
plt.semilogy(ts, soln[:, i], label='I' + str(i), color = 'green')
for i in range(2*N, 3*N):
plt.semilogy(ts, soln[:, i], label='V' + str(i), color = 'blue')
plt.title("1")
plt.xlabel('Time, days')
plt.ylabel('T cell [cells/ml], virus [virion/ml] in LS')
plt.legend(bbox_to_anchor=(1, 1), ncol = 2)
plt.show()
"""plt.figure()
for i in range(N):
plt.plot(ts, np.log10(soln[:,i]), label='T'+str(i), color = 'red')
for i in range(N, 2*N):
plt.plot(ts, np.log10(soln[:, i]), label='V' + str(i), color = 'green')
for i in range(2*N, 3*N):
plt.plot(ts, np.log10(soln[:, i]), label='I' + str(i), color = 'blue')
plt.xlabel('Time, days')
plt.ylabel('T cell [cells/ml], virus [virion/ml] in LS, log')
plt.legend(bbox_to_anchor=(1, 1), ncol = 2)
plt.title("2")
plt.show()"""
#в крови
plt.figure()
plt.semilogy(ts, soln[:,3*N], label='T', color = (0.5 , 0.2, 0.3 ))
plt.semilogy(ts, soln[:, 3*N+1], label='I' , color=(0.75, 0.75, 0))
plt.semilogy(ts, soln[:,3*N+2], label='V' , color=(0, 0, 1))
plt.xlabel('Time, days')
plt.ylabel('T cell [cells/ml], virus [virion/ml] in Blood')
plt.legend(bbox_to_anchor=(1, 1))
plt.title("в крови")
plt.show()
#кровь лог
"""plt.figure()
plt.plot(ts, np.log10(soln[:,3*N]), label='T', color = 'red')
plt.plot(ts, np.log10(soln[:, 3*N+1]), label='I', color = 'green')
plt.plot(ts, np.log10(soln[:,3*N+2]), label='V', color = 'blue')
plt.xlabel('Time')
plt.ylabel('T cell [cells/ml], virus [virion/ml] in Blood, log')
plt.legend(bbox_to_anchor=(1, 1))
plt.title("в крови лог")
plt.show()"""
# по уровням
print(diff_dop)
#сумма
plt.semilogy(ts, np.sum(soln[:, :N], axis=1), label='T', color='red')
plt.semilogy(ts, np.sum(soln[:, N:2 * N], axis=1), label='I', color='green')
plt.semilogy(ts, np.sum(soln[:, 2 * N:3 * N], axis=1), label='V', color='blue')
plt.title("сумма semilogy")
plt.xlabel('Time, days')
plt.ylabel('T cell [cells/ml], virus [virion/ml] in LS')
plt.legend(bbox_to_anchor=(1, 1))
plt.show()
#сумма
plt.plot(ts, np.sum(soln[:, :N], axis=1), label='T', color='red')
plt.plot(ts, np.sum(soln[:, N:2 * N], axis=1), label='V', color='green')
plt.plot(ts, np.sum(soln[:, 2 * N:3 * N], axis=1), label='I', color='blue')
plt.title("сумма ")
plt.xlabel('Time, days')
plt.ylabel('T cell [cells/ml], virus [virion/ml] in LS')
plt.legend(bbox_to_anchor=(1, 1))
plt.show()
#ТУТ ВЫЧИСЛЯЕМ СРЕДНЕЕ, МИНИМУМ И МАКСИМУМ
t_history_average = np.sum(soln[:, 0:N], axis=1) / N;
i_history_average = np.sum(soln[:, N:2*N], axis=1) / N;
v_history_average = np.sum(soln[:, 2*N:3*N], axis=1) / N;
t_history_minimal = np.min(soln[:, 0:N], axis=1);
i_history_minimal = np.min(soln[:, N:2*N], axis=1);
v_history_minimal = np.min(soln[:, 2*N:3*N], axis=1);
t_history_maximal = np.max(soln[:, 0:N], axis=1);
i_history_maximal = np.max(soln[:, N:2*N], axis=1);
v_history_maximal = np.max(soln[:, 2*N:3*N], axis=1);
plt.semilogy(ts, (t_history_minimal), label='T_min', color = 'red', linestyle = 'dashed')
plt.semilogy(ts, (t_history_average), label='T_avg', color = 'red')
plt.semilogy(ts, (t_history_maximal), label='T_max', color = 'red', linestyle = 'dashed')
plt.semilogy(ts, (i_history_minimal), label='I_min', color = 'green', linestyle = 'dashed')
plt.semilogy(ts, (i_history_average), label='I_avg', color = 'green')
plt.semilogy(ts, (i_history_maximal), label='I_max', color = 'green', linestyle = 'dashed')
plt.semilogy(ts, (v_history_minimal), label='V_min', color = 'blue', linestyle = 'dashed')
plt.semilogy(ts, (v_history_average), label='V_avg', color = 'blue')
plt.semilogy(ts, (v_history_maximal), label='V_max', color = 'blue', linestyle = 'dashed')
plt.title("cреднее")
plt.xlabel('Time, days')
plt.ylabel('T cell [cells/ml], virus [virion/ml] in LS')
plt.legend(loc=0)
plt.show()
plt.figure()
plt.subplot(2, 1, 1)
plt.semilogy(ts, soln[:,0], label='T'+str(1), color = 'red')
plt.semilogy(ts, soln[:, N], label='I' + str(1), color = 'green')
plt.semilogy(ts, soln[:, 2*N], label='V' + str(1), color = 'blue')
plt.title("a")
plt.ylabel('T cell [cells/ml], virus [virion/ml] in L1')
plt.legend(bbox_to_anchor=(1, 1))
plt.subplot(2, 1, 2)
plt.semilogy(ts, soln[:,(N-1)], label='T'+str(N-1), color = 'red')
plt.semilogy(ts, soln[:, (2*N-1)], label='I' + str(N-1), color = 'green')
plt.semilogy(ts, soln[:, (3*N-1)], label='V' + str(N-1), color = 'blue')
plt.title("b")
plt.xlabel('Time, days')
plt.ylabel('T cell [cells/ml], virus [virion/ml] in LN')
plt.legend(bbox_to_anchor=(1, 1))
plt.show() |
import matplotlib
matplotlib.use("macosx")
import matplotlib.pyplot as plt
import sys
sys.path.append("/Users/lls/Documents/mlhalos_code/")
import numpy as np
from mlhalos import plot
from mlhalos import machinelearning as ml
from mlhalos import parameters
from mlhalos import distinct_colours
path = "/Users/lls/Documents/CODE/stored_files/shear/classification/"
#path = "/home/lls/stored_files/shear_quantities/"
def get_testing_index():
training_index = np.load("/Users/lls/Documents/CODE/stored_files/all_out/50k_features_index.npy")
testing_index = ~np.in1d(np.arange(256 ** 3), training_index)
return testing_index
def get_training_features(features):
training_index = np.load("/Users/lls/Documents/CODE/stored_files/all_out/50k_features_index.npy")
training_features = features[training_index]
return training_features
def get_testing_features(features):
testing_index = get_testing_index()
testing_features = features[testing_index]
return testing_features
def get_false_positives(ids, y_predicted, y_true, threshold=None):
if threshold is None:
threshold = 0.5
labels = y_predicted[:, 1] >= threshold
y_bool = (y_true == 1)
FPs = ids[labels & ~y_bool]
return FPs
def get_false_negatives(ids, y_predicted, y_true, threshold=None):
if threshold is None:
threshold = 0.5
labels = y_predicted[:, 1] >= threshold
y_bool = (y_true == 1)
FNs = ids[~labels & y_bool]
return FNs
def get_false_positives_index(y_predicted, y_true, threshold=None):
if threshold is None:
threshold = 0.5
labels = y_predicted[:, 1] >= threshold
y_bool = (y_true == 1)
ind = (labels & ~y_bool)
return ind
def get_false_negatives_index(y_predicted, y_true, threshold=None):
if threshold is None:
threshold = 0.5
labels = y_predicted[:, 1] >= threshold
y_bool = (y_true == 1)
ind = (~labels & y_bool)
return ind
def get_percentage_false_positives_in_out_halos_per_threshold(y_predicted, y_true, threshold, halos_particles):
shape = len(threshold)
FPs_in_halos = np.zeros(shape, )
FPs_out_halos = np.zeros(shape, )
for i in range(shape):
FPs = get_false_positives_index(y_predicted, y_true, threshold=threshold[i])
in_h = np.where(halos_particles[FPs] > 0)[0]
out_h = np.where(halos_particles[FPs] == 0)[0]
tot = halos_particles[FPs]
if len(in_h) != 0:
FPs_in_halos[i] = len(in_h) / len(tot)
else:
FPs_in_halos[i] = len(in_h)
if len(out_h) != 0:
FPs_out_halos[i] = len(out_h) / len(tot)
else:
FPs_out_halos[i] = len(out_h)
if (len(in_h) != 0) & (len(out_h) != 0):
assert FPs_in_halos[i] + FPs_out_halos[i] == 1
return FPs_in_halos, FPs_out_halos
def false_positives_ids_index_per_threshold(y_predicted, y_true, threshold_list):
FPs = np.array([get_false_positives_index(y_predicted, y_true, threshold=threshold_list[i])
for i in range(len(threshold_list))])
return FPs
def plot_difference_fraction_FPs(FPs_run_one, FPs_run_two,
title="Fraction FPs in density - FPs density + den-sub ellipticity",
label="belong to halos"):
colors= distinct_colours.get_distinct(2)
plt.plot(threshold, FPs_run_one - FPs_run_two, label=label, color=colors[0])
plt.axhline(y=0, color="k")
plt.xlabel("Threshold")
plt.ylabel(r"\mathrm{\Delta} \mathrm{FPs}")
plt.title(title)
plt.legend(loc="best")
plt.tight_layout()
def get_ids_and_halos_test_set():
# halos are ordered such that each halo mass corresponds to particles in order of particle ID (0,1,2,...)
# Need to reorder the array such that it has first all IN particles, the all OUT particles
ic = parameters.InitialConditionsParameters(path="/Users/lls/Documents/CODE/")
ids = np.concatenate((ic.ids_IN, ic.ids_OUT))
halos = np.load("/Users/lls/Documents/CODE/stored_files/halo_mass_particles.npy")
testing_index = get_testing_index()
ids_tested = ids[testing_index]
halos_testing_particles = halos[ids_tested]
return ids_tested, halos_testing_particles
# Load predictions
y_pred_den_sub_ell = np.load(path + "predicted_den+den_sub_ell.npy")
y_true_den_sub_ell = np.load(path + "true_den+den_sub_ell.npy")
y_pred_den = np.load(path + "predicted_den.npy")
y_true_den = np.load(path + "true_den.npy")
assert (y_true_den_sub_ell == y_true_den).all()
############# FALSE POSITIVES #############
# Find FPs of density run and FPs of denisty+density-subtracted ellipticity run
ids_tested, halos_testing_particles = get_ids_and_halos_test_set()
FPs_den_den_sub_ell_ind = get_false_positives_index(y_pred_den_sub_ell, y_true_den_sub_ell)
halos_FPs_den_plus_den_sub_ell = halos_testing_particles[FPs_den_den_sub_ell_ind]
FPs_den_ind = get_false_positives_index(y_pred_den, y_true_den)
halos_FPs_den = halos_testing_particles[FPs_den_ind]
############# FALSE NEGATIVES #############
# Find FNs of density run and FNs of denisty+density-subtracted ellipticity run
FNs_den_den_sub_ell_ind = get_false_negatives_index(y_pred_den_sub_ell, y_true_den_sub_ell)
halos_FNs_den_plus_den_sub_ell = halos_testing_particles[FNs_den_den_sub_ell_ind]
FNs_den_ind = get_false_negatives_index(y_pred_den, y_true_den)
halos_FNs_den = halos_testing_particles[FNs_den_ind]
h_400_mass = 1836194204280.7886
############## PLOTS #################
# Plot which halos do the FPs live in in the density run and the density + density-subtracted ellipticity run
n, bins, patch = plt.hist(np.log10(halos_FPs_den_plus_den_sub_ell[halos_FPs_den_plus_den_sub_ell > 0]), bins=20,
#log=True,
histtype="step", label="density + den-sub ellipticity")
n1, bins1, patch1 = plt.hist(np.log10(halos_FPs_den[halos_FPs_den > 0]), bins=bins,
#log=True,
histtype="step", label="density")
plt.legend(loc="best")
# Plot difference in fraction of FPs in halos as a function of threshold for density-only and density+den-sub
# ellipticity case
threshold = np.linspace(0., 1., 50)[::-1]
FPs_den_in_halos, FPs_den_out_halos = get_percentage_false_positives_in_out_halos_per_threshold(y_pred_den,
y_true_den,
threshold,
halos_testing_particles)
FPs_den_sub_ell_in_halos, FPs_den_sub_ell_out_halos = get_percentage_false_positives_in_out_halos_per_threshold(
y_pred_den_sub_ell, y_true_den, threshold, halos_testing_particles)
plot_difference_fraction_FPs(FPs_den_out_halos, FPs_den_sub_ell_out_halos, label="not belong to halos")
# Which FPs went from wrong to right classification?
FPs_den_ind = false_positives_ids_index_per_threshold(y_pred_den, y_true_den, threshold)
FPs_den_den_sub_ell_ind = false_positives_ids_index_per_threshold(y_pred_den_sub_ell, y_true_den, threshold)
assert FPs_den_ind.shape[0] == len(threshold)
assert FPs_den_den_sub_ell_ind.shape[0] == len(threshold)
common_FPs = np.array([ids_tested[FPs_den_ind[i] & FPs_den_den_sub_ell_ind[i]] for i in range(len(threshold))])
wrong_to_correct_density_FPs = np.array([ids_tested[FPs_den_ind[i] & ~FPs_den_den_sub_ell_ind[i]]
for i in range(len(threshold))])
halos_common_FPs = np.array([halos_testing_particles[FPs_den_ind[i] & FPs_den_den_sub_ell_ind[i]]
for i in range(len(threshold))])
halos_wrong_to_correct_density_FPs = np.array([halos_testing_particles[FPs_den_ind[i] & ~FPs_den_den_sub_ell_ind[i]]
for i in range(len(threshold))])
frac_w_to_cor = np.zeros((50, 2))
for i in range(len(threshold)):
in_h = len(np.where(halos_wrong_to_correct_density_FPs[i] > 0)[0])
out_h = len(np.where(halos_wrong_to_correct_density_FPs[i] == 0)[0])
if in_h != 0:
in_halos = in_h/ len(halos_wrong_to_correct_density_FPs[i])
else:
in_halos = in_h
if out_h != 0:
out_halos = out_h / len(halos_wrong_to_correct_density_FPs[i])
else:
out_halos = out_h
frac_w_to_cor[i,0] = in_halos
frac_w_to_cor[i, 1] = out_halos
plt.figure(figsize=(8,6))
plt.plot(threshold, frac_w_to_cor[:,0], label="in halos")
plt.axhline(y=0.5, color="k", ls="--")
#plt.plot(threshold, frac_w_to_cor[:,1], label="out halos")
plt.xlabel("Threshold")
plt.ylabel("Fraction FPs in halos")
plt.legend(loc="best")
plt.title("FPs in density run correctly classified by den+ell run")
plt.tight_layout()
for i in range(len(wrong_to_correct_density_FPs)):
w_to_c_threshold = wrong_to_correct_density_FPs[i]
w_to_c_threshold_in_halos = w_to_c_threshold[halos_testing_particles[w_to_c_threshold]>0]
w_to_c_threshold_out_halos = w_to_c_threshold[halos_testing_particles[w_to_c_threshold]==0]
print("Threshold " + str(threshold[i]) + str(" :"))
print("The fraction of false positives IN HALOS which flipped from wrongly to correctly classified is " +
str(len(w_to_c_threshold_in_halos)/len(w_to_c_threshold)))
print("The fraction of false positives OUT HALOS which flipped from wrongly to correctly classified is " +
str(len(w_to_c_threshold_out_halos)/len(w_to_c_threshold)))
# plt.plot(threshold, FPs_den_in_halos, label="density")
# plt.plot(threshold, FPs_den_plus_den_sub_ell_in_halos, label="den + den-sub ell")
# plt.xlabel("Threshold")
# plt.ylabel("Number of FPs")
# plt.yscale("log")
# plt.legend(loc="best")
# plt.title("FPs not belonging to a halo")
# plt.tight_layout()
# fpr_den, tpr_den, auc_den, th = ml.roc(y_pred_den, y_true_den)
# fpr_den_sub_ell, tpr_den_sub_ell, auc_den_sub_ell, th = ml.roc(y_pred_den_sub_ell, y_true_den_sub_ell)
# plot.roc_plot(np.column_stack((fpr_den,fpr_den_sub_ell)), np.column_stack((tpr_den,tpr_den_sub_ell )),
# [auc_den,auc_den_sub_ell], labels=["density", "den + den-sub ell"])
# plt.savefig(path + "FPs_in_no_halo.pdf")
|
#https://open.kattis.com/problems/trik
cups = [True, False, False]
moves = input()
for i in range(len(moves)):
move = moves[i]
if move == 'A':
temp = cups[0]
cups[0] = cups[1]
cups[1] = temp
if move == 'B':
temp = cups[1]
cups[1] = cups[2]
cups[2] = temp
if move == 'C':
temp = cups[0]
cups[0] = cups[2]
cups[2] = temp
cup=0
for i in range(3):
if cups[i]:
cup = i + 1
break;
print(cup)
|
infile = 'C:/Users/DELL/music-top-recommend/data/cf_result.data'
outfile = 'C:/Users/DELL/music-top-recommend/data/cf_reclist.redis'
ofile = open(outfile, 'w')
MAX_RECLIST_SIZE = 100
PREFIX = 'CF_'
rec_dict = {}
with open(infile, 'r') as fd:
for line in fd:
itemid_A, itemid_B, score = line.strip().split('\t')
#判断itemA在不在该字典里面,若不在,创建一个key为itemA的列表,把与itemA相关联的itemB和score添加进去
if itemid_A not in rec_dict:
rec_dict[itemid_A] = []
rec_dict[itemid_A].append((itemid_B, score))
#循环遍历字典,格式化数据,把itemB和score中间以:分割,不同的itemB以_分割
for k, v in rec_dict.items():
key = PREFIX+k
# 接下来格式化数据,将数据以从大到小排列后再格式化
# 排序,由于数据量大,我们只取100个
list = sorted(v, key=lambda x: x[1], reverse=True)[:MAX_RECLIST_SIZE]
# 拍好序后,我们来格式化数据
result = '_'.join([':'.join([str(val[0]), str(round(float(val[1]), 6))]) for val in list])
ofile.write(' '.join([key, result]))
ofile.write("\n")
ofile.close()
|
import pandas as pd
import numpy as np
def readData():
"""Reads in Lineup and Boxscore data to pandas Data Frame"""
lu = pd.read_csv('Lineup.csv', header=1)
bs = pd.read_csv('Boxscore.csv')
lu = lu.drop(lu.index[len(lu)-1])
return lu, bs
def playerNames(lu):
"""Returns list of player names and lineup names"""
lu_col = lu.columns
player_names = []
at_player = False
index = 0
while ('Unnamed:' not in lu_col[index]):
if (at_player):
player_names.append(lu_col[index])
if (lu_col[index] == 'TS%'):
at_player = True
index += 1
lineup_names = []
index += 1
while('Unnamed:' not in lu_col[index]):
lineup_names.append(lu_col[index])
index += 1
return player_names, lineup_names
def calculateAdvancedStatistics(df, bs, temp_list):
"""Calculates advanced statistics and appends to temp_list"""
#MP
temp_list.append(round(np.sum(df['Time']),1))
#Possessions
temp_list.append(round(np.sum(df['NYU Poss.'])))
#+/-
temp_list.append(int(np.sum(pd.to_numeric(df['PTS'], errors='coerce')) - np.sum(pd.to_numeric(df['PTS.1'], errors='coerce'))))
#OffRtg
if (np.sum(pd.to_numeric(df['NYU Poss.'])) == 0):
temp_list.append(0)
else:
temp_list.append(round((np.sum(df['PTS'])/np.sum(df['NYU Poss.'])*100),1))
#DefRtg
if (np.sum(pd.to_numeric(df['Opp. Poss.'])) == 0):
temp_list.append(0)
else:
temp_list.append(round(np.sum(pd.to_numeric(df['PTS.1'], errors='coerce'))/np.sum(pd.to_numeric(df['Opp. Poss.'], errors='coerce'))*100,1))
#NetRtg
temp_list.append(round(temp_list[-2] - temp_list[-1],1))
#TOV%
#temp_list.append(round(int(bs.TO[bs['Player'] == temp_list[0]])/np.sum(pd.to_numeric(df['NYU Poss.'], errors='coerce'))*100,1))
return temp_list
def advancedBoxscore(lu, bs, player_names):
"""creates an advanced statistics boxscore for players and returns it as a dataframe"""
col_headers = ['Player', 'MP', 'Poss.', '+/-', 'OffRtg', 'DefRtg', 'NetRtg']
for i in range(len(player_names)):
name = player_names[i]
player_df = lu[lu[name] == 1]
temp_list = [name]
temp_list = calculateAdvancedStatistics(player_df, bs, temp_list)
if (i == 0):
advanced_boxscore_df = pd.DataFrame(np.array(temp_list).reshape(1,len(col_headers)))
advanced_boxscore_df.columns = col_headers
else:
temp_list_df = pd.DataFrame(np.array(temp_list).reshape(1,len(col_headers)))
temp_list_df.columns = col_headers
advanced_boxscore_df = pd.concat([advanced_boxscore_df, temp_list_df], axis=0)
team_list = ['Team']
team_list = calculateAdvancedStatistics(lu, bs, team_list)
team_list_df = pd.DataFrame(np.array(team_list).reshape(1,len(col_headers)))
team_list_df.columns = col_headers
advanced_boxscore_df = pd.concat([advanced_boxscore_df, team_list_df], axis=0)
print(advanced_boxscore_df)
return advanced_boxscore_df
lu, bs = readData()
player_names, lineup_names = playerNames(lu)
player_stats_df = advancedBoxscore(lu, bs, player_names)
lineup_stats_df = advancedBoxscore(lu, bs, lineup_names)
#player_stats_df.to_csv('MedgarPlayer.csv')
#lineup_stats_df.to_csv('MedgarLineup.csv')
|
import argparse
import torch
import logging
import json
import os
import numpy as np
from NeuralBLBF.evaluate import run_test_set
from NeuralBLBF.train import train
from NeuralBLBF.model import TinyEmbedFFNN, SmallEmbedFFNN, SparseLinear, \
LargeEmbedFFNN, CrossNetwork, SparseFFNN
if __name__ == "__main__":
logging.basicConfig(level="INFO", format="%(asctime)s - %(levelname)s - %(message)s")
parser = argparse.ArgumentParser(description='Process some integers.')
parser.add_argument('--mode', default='train', choices=['train', 'test'])
# Paths to datasets
parser.add_argument('--train', default='data/vw_compressed_train')
parser.add_argument('--valid', default='data/vw_compressed_validate')
parser.add_argument('--test', default='data/vw_compressed_validate')
parser.add_argument('--stop_idx', type=int, default=1000000)
parser.add_argument('--step_size', type=int, default=100000)
parser.add_argument('--device_id', type=int, default=1)
parser.add_argument('--feature_dict_name', type=str,
default='data/features_to_keys.json')
# Parameters related to training
parser.add_argument('--lamb', type=float, default=1)
parser.add_argument('--gamma', type=float, default=0)
parser.add_argument('--epochs', type=int, default=10)
parser.add_argument('--learning_rate', type=float, default=0.00005)
parser.add_argument('--batch_size', type=int, default=256)
parser.add_argument('--enable_cuda', action='store_true')
parser.add_argument('--training_eval', action='store_true',
help="Also perform evaluation on training set")
parser.add_argument('--weight_decay', type=float, default=0)
# Parameters related to the layout of the network
parser.add_argument('--model_type', default="TinyEmbedFFNN",
choices=["TinyEmbedFFNN", "SmallEmbedFFNN", "SparseLinear",
"LargeEmbedFFNN", "CrossNetwork", "SparseFFNN"])
parser.add_argument('--sparse', action='store_true')
parser.add_argument('--embedding_dim', type=int, default=20)
parser.add_argument('--hidden_dim', type=int, default=100)
parser.add_argument('--dropout', type=float, default=0)
# Parameters related to loading or storing models
parser.add_argument('--resume', type=str, default=None)
parser.add_argument('--save', action='store_true')
parser.add_argument('--save_model_path', type=str, default='data/models')
args = vars(parser.parse_args())
if args['enable_cuda'] and torch.cuda.is_available():
device = torch.device('cuda', args['device_id'])
else:
device = None
logging.info("Parameters:")
for k, v in args.items():
logging.info(" %12s : %s" % (k, v))
# Load dict mapping features to keys
with open(args['feature_dict_name']) as f: feature_dict = json.load(f)
if not os.path.exists(args['save_model_path']):
os.mkdir(args['save_model_path'])
args['save_model_path'] = '{}/{}_{}'.format(
args['save_model_path'], args['model_type'], args['embedding_dim']
)
# Initialize neural architecture and optimizer to use
if args['model_type'] == "TinyEmbedFFNN" and not args['sparse']:
model = TinyEmbedFFNN(feature_dict, device, **args)
elif args['model_type'] == "SparseLinear":
model = SparseLinear(len(feature_dict))
elif args['model_type'] == "SparseFFNN":
model = SparseFFNN(len(feature_dict))
elif args['model_type'] == "LargeEmbedFFNN":
model = LargeEmbedFFNN(feature_dict, device, **args)
elif args['model_type'] == "SmallEmbedFFNN":
model = SmallEmbedFFNN(feature_dict, device, **args)
elif args['model_type'] == "CrossNetwork":
model = CrossNetwork(feature_dict, device, **args)
else:
raise NotImplementedError()
if args["enable_cuda"] and torch.cuda.is_available(): model.to(device)
start_epoch = 0
optim_checkpoint = None
if args['resume'] is not None:
checkpoint = torch.load(args['resume'])
model.load_state_dict(checkpoint['model'])
optim_checkpoint = checkpoint['optimizer']
start_epoch = checkpoint['epoch']
logging.info("Resuming from model {}. Start at epoch: {}".format(args['resume'], start_epoch))
n_params = sum([np.prod(par.size()) for par in model.parameters() if par.requires_grad])
optimizer = torch.optim.SGD(model.parameters(), lr=args["learning_rate"], weight_decay=args['weight_decay'], momentum=0.9)
if optim_checkpoint is not None:
optimizer.load_state_dict(optim_checkpoint)
logging.info("Initialized model and optimizer. Number of parameters: {}".format(n_params))
if args['mode'] == 'train':
train(model, optimizer, feature_dict, start_epoch, device, **args)
else:
run_test_set(model=model, test_filename=args['test'],
feature_dict=feature_dict, device=device, **args)
|
#! /usr/bin/env python
# --*-- coding:utf-8 --*--
import json
class QueryParser(object):
def __init__(self, druidQuery):
self.druidQuery = druidQuery
self.get_json()
def get_json(self):
if isinstance(self.druidQuery, dict):
self.jsonQuery = self.druidQuery
else:
try:
self.jsonQuery = json.loads(self.druidQuery)
except Exception as ex:
raise ex
def get_time(self):
return self.jsonQuery.get('settings').get('time')
def get_pagination(self):
return self.jsonQuery.get('settings').get('pagination')
def get_data(self):
return self.jsonQuery.get('data')
def get_group(self):
return self.jsonQuery.get('group')
def get_filter(self):
return self.jsonQuery.get('filters').get("$and")
def get_sort(self):
return self.jsonQuery.get('sort')
|
#Django lib
from django.shortcuts import render
from rest_framework import viewsets
from general.serializers import *
from general.controllers import *
from django.http import HttpResponse
from rest_framework_jwt.authentication import JSONWebTokenAuthentication
from rest_framework.authentication import SessionAuthentication, BasicAuthentication, TokenAuthentication
from rest_framework.permissions import IsAuthenticated, IsAuthenticatedOrReadOnly
# Create your views here.
class GeneralViewSet(viewsets.ModelViewSet, GeneralController):
'''
API endpoint that allows to get general information about Servers.
'''
permission_classes = (IsAuthenticated,)
authentication_classes = (TokenAuthentication,)
serializer_class = GeneralSerializer
def list(self, request, *args, **kwargs):
return HttpResponse(self.get_general_info_list(request))
def retrieve(self, request, pk=None):
return HttpResponse(self.get_general_info(request, pk))
def create(self, request, *args, **kwargs):
pass
def update(self, request, *args, **kwargs):
pass
def destroy(self, request, pk=None):
pass
|
"""Helper functions for registering and configuring IOLoop watchers."""
from julythontweets import config
from julythontweets.callbacks import JulythonLiveTwitterCallback
from julythontweets.parsers.github_parser import GitHubParser
from julythontweets.watchers.twitter_watcher import TwitterWatcher
def dummy_callback(commit):
"""Placeholder until we hit the API."""
print commit
def register_watchers(ioloop):
"""
Registers watchers to the ioloop. Eventually, this will be
configurable somewhere...
"""
twitter_config = get_watcher_configuration("twitter", ioloop)
twitter_watcher = TwitterWatcher(ioloop, dummy_callback, twitter_config)
twitter_watcher.start()
def get_watcher_configuration(watcher_name, ioloop):
"""Retrieves configuration based on watcher name."""
configuration = {
"twitter": _get_twitter_configuration(ioloop),
}.get(watcher_name)
return configuration
def _get_twitter_configuration(ioloop):
"""Build twitter configuration."""
# parser stuff should be moved into parser_registry module.
github_parser = GitHubParser(ioloop, {})
julython_live_twitter_callback = JulythonLiveTwitterCallback(ioloop)
return {
"twitter_consumer_key": config.TWITTER_CONSUMER_KEY,
"twitter_consumer_secret": config.TWITTER_CONSUMER_SECRET,
"twitter_access_token": config.TWITTER_ACCESS_TOKEN,
"twitter_access_token_secret": config.TWITTER_ACCESS_TOKEN_SECRET,
"twitter_search_term": config.TWITTER_SEARCH_TERM,
"parsers": {
"github.com": github_parser,
"www.github.com": github_parser
},
"callbacks": [julython_live_twitter_callback]
}
|
"""
A function that fills out and attaches a coversheet to a submission file.
Author: Nicholas Read
"""
from typing import TYPE_CHECKING
if TYPE_CHECKING:
from .submission import Submission
def attach_generic_coversheet(submission: 'Submission'):
"""
Fills in and attaches the generic Voiceworks coversheet to the
associated file in `submission`.
"""
pass
def attach_poetry_coversheet(submission: 'Submission'):
"""
Fills in and attaches the poetry Voiceworks coversheet to the
associated file in `submission`.
"""
pass |
from tkinter import *
root = Tk()
label = Label(root, text=”Hello World”) # 定义标签
label.pack() # 调用pack方法
root.mainloop()
|
import os
import urllib2
from bs4 import BeautifulSoup
import random
import json
def getJoke():
siteList = [{'urlPath' : 'https://crackmeup-api.herokuapp.com/',
'categories' : ['random', 'blond', 'dark', 'dirty', 'gender', 'gross','walks-into-a-bar'],
'key' : 'joke'
}]
siteIndex = 0
site = siteList[siteIndex]
randomCategory = random.randint(0, len(site['categories']) - 1)
urlPath = site['urlPath'] + site['categories'][randomCategory]
jokeSite = urllib2.urlopen(urlPath)
joke = jokeSite.read()
jsonLoader = json.loads(joke)
jokeSite.close()
return jsonLoader[site['key']]
|
n = int(input())
print(sorted([input().split() for _ in range(n)], key=lambda x: int(x[1]))[-2][0])
|
"""
Created by Tomas Knapen on 2011-04-27.
Copyright (c) 2011 __MyCompanyName__. All rights reserved.
"""
import os, _thread, time, datetime
import numpy as np
import matplotlib
matplotlib.use('Agg')
from matplotlib.backends.backend_pdf import PdfPages
import matplotlib.pylab as pl
from tables import *
#class for callbacks
class DataContainer(object):
def __init__(self, hdf5_filename):
self.hdf5_filename = hdf5_filename
def setup_for_simulation(self, nr_timepoints, nr_simulations, nr_variables, nr_noise):
self.result_array = np.zeros((nr_simulations, nr_timepoints, nr_variables + nr_noise))
self.parameter_array = []
self.lock = _thread.allocate_lock()
self.count = 0
#the callback function
def save_to_array(self, value):
# we must use lock here because array stuff is not atomic
# self.lock.acquire()
self.parameter_array.append( value[0] )
self.result_array[self.count] = value[1]
# self.lock.release()
self.count += 1
def save_to_hdf_file(self, run_name, sim_results, add_to_file = True):
# saving the data
parameter, results = zip(*sim_results)
self.parameter_array = parameter
self.result_array = np.array(results)
if os.path.isfile(self.hdf5_filename) and add_to_file:
# os.system('rm ' + self.hdf5_filename)
h5file = open_file(self.hdf5_filename, mode = "r+", title = "simulation results file")
elif os.path.isfile(self.hdf5_filename) and not add_to_file:
os.system('rm ' + self.hdf5_filename)
h5file = open_file(self.hdf5_filename, mode = "w", title = "simulation results file")
else:
h5file = open_file(self.hdf5_filename, mode = "w", title = "simulation results file")
try:
thisRunGroup = h5file.get_node(where = "/", name = run_name, classname='Group')
except NoSuchNodeError:
# import actual data
now = datetime.datetime.now()
thisRunGroup = h5file.create_group("/", run_name, run_name + ' created at ' + now.strftime("%Y-%m-%d_%H.%M.%S"))
h5file.create_array(thisRunGroup, 'simulation_data', self.result_array, '')
ptd = [(k, np.float64) for k in np.unique(np.concatenate(([list(k.keys()) for k in self.parameter_array])))]
self.parameterTypeDictionary = np.dtype(ptd)
# create a table for the parameters of these runs
parameterTable = h5file.create_table(thisRunGroup, 'simulation_parameters', self.parameterTypeDictionary)
# fill up the table
trial = parameterTable.row
for r in self.parameter_array:
for par in r.keys():
trial[par] = r[par]
trial.append()
parameterTable.flush()
h5file.close()
def data_from_hdf_file(self, run_name):
if not os.path.isfile(self.hdf5_filename):
print(self.hdf5_filename + ' is not a file')
self.h5file = open_file(self.hdf5_filename, mode = "r")
try:
thisRunGroup = self.h5file.get_node(where = "/", name = run_name, classname='Group')
except NoSuchNodeError:
# import actual data
print(run_name + ' is not a run in ' + self.hdf5_filename)
return (None, None)
simulation_parameters, simulation_data = thisRunGroup.simulation_parameters.read(), thisRunGroup.simulation_data.read()
self.h5file.close()
return (simulation_parameters, simulation_data)
|
#!/usr/bin/env python3
"""
This file defines unit tests for the Color enum.
"""
from clarity.Color import Color
class TestColorEnum:
"""
This class tests the Color enum.
"""
def test_switch(self):
"""
Tests the switch() function of the Color enum.
"""
assert Color.WHITE == Color.switch(Color.BLACK)
assert Color.BLACK == Color.switch(Color.WHITE)
assert Color.NEITHER == Color.switch(Color.NEITHER)
assert Color.BLACK == Color.switch(Color.WHITE)
|
#!/usr/bin/python
"""
ID: ten.to.1
TASK: barn1
LANG: PYTHON3
"""
filein = open("barn1.in", "r")
fileout = open("barn1.out", "w")
num_max_boards, num_total_stalls, num_occupied_stalls = list(map(int, filein.readline().split()))
occupied_stalls = sorted(list(map(int, filein.read().split())))
gaps_stalls = []
for n in range(1, len(occupied_stalls)):
gaps_stalls.append(occupied_stalls[n] - occupied_stalls[n - 1])
print(occupied_stalls)
print(sorted(gaps_stalls)[:-(num_max_boards - 1)])
print(sum(sorted(gaps_stalls)[:-(num_max_boards - 1)]) + num_max_boards)
if(num_max_boards == 1):
fileout.write(str(sum(gaps_stalls) + 1)+ '\n')
elif(num_max_boards >= len(occupied_stalls)):
fileout.write(str(len(occupied_stalls)) + '\n')
else:
fileout.write(str(sum(sorted(gaps_stalls)[:-(num_max_boards - 1)]) + num_max_boards) + '\n')
|
from collections import defaultdict
DIRS = [
(1, 0),
(-1, 0),
(0, 1),
(0, -1)
]
def solveMaze(maze):
portalToPoints = defaultdict(list)
pointToPortals = {}
def hasPortal(x, y):
for dy,dx in DIRS:
mv1 = maze[dy+y][dx+x]
if mv1.isalpha():
return ''.join(sorted(maze[dy+y][dx+x] + maze[2*dy+y][2*dx+x]))
return None
# Scan for portals
for y, r in enumerate(maze):
for x, c in enumerate(r):
if c == '.':
p = hasPortal(x, y)
if p:
portalToPoints[p].append((x, y))
pointToPortals[(x, y)] = p
# Just do the search, I guess
start = portalToPoints['AA'][0]
visited = {start}
toVisit = [(0, start)]
target = portalToPoints['ZZ'][0]
def getEdges(loc):
x, y = loc
for dy,dx in DIRS:
neighbor = (dx+x, dy+y)
c = maze[neighbor[1]][neighbor[0]]
if c == '.':
yield neighbor
# Check if neighbor is in a portal
if loc in pointToPortals:
portal = pointToPortals[loc]
points = portalToPoints[portal]
for p in points:
if p != loc:
yield p
while toVisit:
cost, location = toVisit.pop(0)
for n in getEdges(location):
if n not in visited:
costToNeighbor = cost + 1
if n == target:
return costToNeighbor
visited.add(n)
toVisit.append((costToNeighbor, n))
def solveMazePart2(maze):
portalToPoints = defaultdict(list)
pointToPortals = {}
height = len(maze)
width = len(maze[0])
def isOutside(p):
x, y = p
return y < 3 or height - y < 4 or x < 3 or width - x < 4
def hasPortal(x, y):
for dy,dx in DIRS:
mv1 = maze[dy+y][dx+x]
if mv1.isalpha():
return ''.join(sorted(maze[dy+y][dx+x] + maze[2*dy+y][2*dx+x]))
return None
# Scan for portals
for y, r in enumerate(maze):
for x, c in enumerate(r):
if c == '.':
p = hasPortal(x, y)
if p:
portalToPoints[p].append((x, y))
pointToPortals[(x, y)] = p
# Just do the search, I guess
start = portalToPoints['AA'][0]
visited = {(0, start)}
from collections import deque
toVisit = deque([(0, (0, start))])
target = (0, portalToPoints['ZZ'][0])
def getEdges(loc):
level, p = loc
x, y = p
for dy,dx in DIRS:
neighbor = (dx+x, dy+y)
c = maze[neighbor[1]][neighbor[0]]
if c == '.':
yield (level, neighbor)
# Check if neighbor is in a portal
if p in pointToPortals:
portal = pointToPortals[p]
points = portalToPoints[portal]
if not(isOutside(p) and level == 0):
levelDelta = -1 if isOutside(p) else 1
for pn in points:
if pn != p:
yield (level + levelDelta, pn)
while toVisit:
cost, node = toVisit.popleft()
for n in getEdges(node):
if n not in visited:
costToNeighbor = cost + 1
if n == target:
return costToNeighbor
visited.add(n)
toVisit.append((costToNeighbor, n))
if __name__ == "__main__":
maze = []
with open("input_day20") as f:
maze = list(map(list, map(lambda x: x.strip('\n').strip('\r'), f.readlines())))
print(solveMaze(maze))
print(solveMazePart2(maze))
|
def print_info(name, age, no):
print(name)
print(age)
print(no)
# 实参和形参的位置是一一对应的
# print_info("田昌", 25, "007")
print_info("007", "田昌", 25) |
def LATERAL_LOAD_TRANSFER_f(ay):
du = 2*m_wheel*ay*h_wheel/tf
ds = (xr/w)*ms*ay*(hrc_f/tf)
dr = ms*ay*(ha/tf)*(Kr_f+2*Qarb_f/tf)/(Kr_f+Kr_r+2*(Qarb_f+Qarb_r)/tf)
return du + ds + dr
def LATERAL_LOAD_TRANSFER_r(ay):
du = 2*m_wheel*ay*h_wheel/tr
ds = (xf/w)*ms*ay*(hrc_r/tr)
dr = ms*ay*(ha/tr)*(Kr_r+2*Qarb_r/tr)/(Kr_f+Kr_r+2*(Qarb_f+Qarb_r)/tr)
return du + ds + dr
def LOAD_fo(ay,R_corner):
return m*g*(xr/w)/2 + 0.5*af*0.5*Cz*rho*S*ay*R_corner + LATERAL_LOAD_TRANSFER_f(ay)
def LOAD_fi(ay,R_corner):
return m*g*(xr/w)/2 + 0.5*af*0.5*Cz*rho*S*ay*R_corner - LATERAL_LOAD_TRANSFER_f(ay)
def LOAD_ro(ay,R_corner):
return m*g*(xf/w)/2 + 0.5*ar*0.5*Cz*rho*S*ay*R_corner + LATERAL_LOAD_TRANSFER_r(ay)
def LOAD_ri(ay,R_corner):
return m*g*(xf/w)/2 + 0.5*ar*0.5*Cz*rho*S*ay*R_corner - LATERAL_LOAD_TRANSFER_r(ay)
#%% PLOT
ay = np.linspace(1,1.6,30)
R_corner=0
plt.clf()
plt.plot(ay,LOAD_fo(ay*g,R_corner),label='front outter load')
plt.plot(ay,LOAD_fi(ay*g,R_corner),label='front inner load')
plt.plot(ay,LOAD_ro(ay*g,R_corner),label='rear outter load')
plt.plot(ay,LOAD_ri(ay*g,R_corner),label='rear inner load')
plt.xlabel('Acceleration (g)')
plt.ylabel('Vertical load (N)')
plt.grid(True)
plt.legend(bbox_to_anchor=(0., 1.02, 1., .102), loc='lower left', ncol=2, mode="expand", borderaxespad=0.)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.