text stringlengths 38 1.54M |
|---|
from django.db import models
from django.utils import timezone
from django.template.defaultfilters import slugify
import itertools
class Holliday(models.Model):
name = models.CharField(max_length=200)
created_date = models.DateTimeField(default=timezone.now)
updated_at = models.DateTimeField(default=timezone.now)
slug = models.SlugField(null=True, blank=True, unique=True, max_length=100)
def __str__(self):
return self.name
def save(self, *args, **kwargs):
self.updated_at = timezone.now()
self.slug = orig = slugify(self.name)[:50]
for x in itertools.count(1):
if not self.__class__.objects.filter(slug=self.slug).exclude(pk=self.pk).exists():
break
# Truncate the original slug dynamically. Minus 1 for the hyphen.
# self.slug = "%s-%d" % (orig[:50 - len(str(x)) - 1], x)
return super(Holliday, self).save(*args, **kwargs)
class Room(models.Model):
### Relationships
holliday = models.ForeignKey(Holliday, on_delete=models.CASCADE)
parent_room = models.ForeignKey('self', on_delete=models.CASCADE, null=True, blank=True, related_name='children')
### Attributes
name = models.CharField(max_length=200)
image_url = models.URLField(max_length=200, blank=True)
page_number = models.PositiveIntegerField(default=0)
content = models.CharField(max_length=50000, blank=True)
afi_komen = models.BooleanField(default=False)
created_date = models.DateTimeField(default=timezone.now)
updated_at = models.DateTimeField(default=timezone.now)
slug = models.SlugField(null=True, blank=True, unique=True, max_length=100)
def __str__(self):
return self.name
def save(self, *args, **kwargs):
self.updated_at = timezone.now()
self.slug = orig = slugify(self.name)[:50]
for x in itertools.count(1):
if not self.__class__.objects.filter(slug=self.slug).exclude(pk=self.pk).exists():
break
# Truncate the original slug dynamically. Minus 1 for the hyphen.
self.slug = "%s-%d" % (orig[:50 - len(str(x)) - 1], x)
return super(Room, self).save(*args, **kwargs)
|
__author__ = 'Administrator'
import json
import urllib.parse
import urllib.request
from suds.client import Client
import configparser
class HttpGet:
def __init__(self, path):
conf = configparser.ConfigParser()
conf.read(path)
self.url = conf.get("url", "Http_url")
key = conf.get("HttpData", "key")
date = conf.get("HttpData", "date")
self.data = {"key":key, "date":date}
def Req(self):
self.data = urllib.parse.urlencode(self.data)
url = self.url + "?" + self.data
try:
response = urllib.request.urlopen(url)
response = response.read().decode("utf-8")
print(response)
except Exception:
print("HttpGet请求错误") |
# 项目各密码配置文件
# 放置于项目根目录下
### django APP秘钥
SECRET_KEY = '-qhsgt6r3a4lb1*181+hl141#o@7@am29wa8v$^@dgp(1e)=yj'
QQ_SECRET = 'B5T4EEMD2MHnmGyX'
### mysql 配置
MYSQL_HOST = 'cdb-07n3b91f.gz.tencentcdb.com'
MYSQL_PORT = 10081
MYSQL_USER = 'root'
MYSQL_PASSWORD = '18759799353gjb!'
### 邮件发送配置
# 发件人授权码
EMAIL_HOST_PASSWORD = 'xfYC4mkT2QLPuBQv'
### Celery配置
# redis的地址
BROKER_URL = 'redis://www.gaoblog.cn:6379/6'
#celery结果返回,可用于跟踪结果
CELERY_RESULT_BACKEND = 'redis://www.gaoblog.cn:6379/0' |
#Importing all the important modules that are reuired
import pandas as pd
import numpy as np
import random
import string
#Dataset is loaded
df = pd.read_csv ('C:/Users/Hp 840/Desktop/Shakespeare_data.csv')
df.head ()
#Unnecesary lines are removed
df = df.dropna (subset = ['Player', 'ActSceneLine'])
df.tail ()
lines = df ['PlayerLine'].tolist ()
print (lines [:4])
# The length of lines generated is restricted to 20 and also to protect code from infinte loop.
Line = 20
# This is used to check the value of 2 values and if diffrence is less than this then they are consider equal.
Diff = 0.0001
# This mark the end of string.
End1 = 'endl'
# white-spaces and punctuations from a line are removed and converted into a list of tokens
def token (line):
base = line.strip ().lower ()
tokens = ''.join ([x for x in base if x not in string.punctuation]).split ()
return tokens
# apiring is added to dictionery
def paired (dictionary, key, value, debug = False):
if key not in dictionary:
dictionary [key] = []
if debug: print (key, dictionary [key])
dictionary [key].append (value)
# Convert list to probability values
def probability_convertion (chain):
frequency = {}
probability = {}
num_of_words = len (chain)
for word in chain:
frequency [word] = frequencies.get (word, 0) + 1
for word, freq in frequency.items ():
probability [word] = round (float (freq) / num_of_words, 3)
return probability
#main function for buliding markav model and model is built here.
def markov_model_built (corpus, first_order_markov_chain, second_order_markov_chain):
# This is a dictionary of words which are used to start a line in Shakespeare's plays
words = []
for line in corpus:
tokens = token (line)
num_of_tokens = len (tokens)
for idx in range (num_of_tokens):
token = tokens [idx]
if idx == 0:
words.append (token)
# First word of line is of no use.
continue
# first-order markov chain
last = tokens [idx - 1]
paired (first_order_markov_chain, last, token)
# The second word in a line can only have a first-level
# markov chain since there is only a single word before it
if idx == 1:
continue
# Chaining last word of line with end so that t can be used during Predicition
if idx == num_of_tokens - 1:
paired (second_order_markov_chain, (last, token), END_TOKEN)
# second-order markov chain
second_last_token = tokens [idx - 2]
paired (second_order_markov_chain, (second_last_token), token)
# Converting first-order markov chain to probability values
for word, chain in first_order_markov_chain.items ():
first_order_markov_chain [word] = probability_convertion (chain)
# Converting second-order markov chain to probability values
for pair, chain in second_order_markov_chain.items ():
second_order_markov_chain [pair] = probability_convertion (chain)
print ('Successfully built Markov Model!\n')
return list (set (words))
# Helpers Functions for using the Markov Model for Text-Generation from the Corpus
# Picking up word with highest probablity from second order markov chain and if two words have same probablity than randomly select.
def next_word_prediction (key, dictionary, debug = False):
max_prob = 0.0
most_used_words = []
for next_word, probability in dictionary.items ():
if probability > max_prob:
max_prob = probability
most_used_words = [next_word]
elif max_prob - probability < Diff:
most_used_words.append (next_word)
if debug: print (key, most_used_words)
return random.choice (most_used_words)
# Randomly picking word ferom first order.
def next_word_picked (key, dictionary, debug = False):
if debug: print (dictionary)
return random.choice (dictionary.keys ())
# Generating text based on corpus
def text_generated (first_word, markov_chain_one, markov_chain_two):
line = []
word = first_word.lower ()
if word not in markov_chain_one.keys ():
return 0
line.append (word)
next_word = pick_next_word (first_word, markov_chain_one [first_word])
line.append (next_word)
n = 0
while n < Line:
nextto_next_word = next_word_prediciton ((word, next_word), markov_chain_two [(word, next_word)])
if next_word2 == END_TOKEN:
return ' '.join (line)
word = next_word
next_word = next_word2
line.append (next_word2)
n += 1
# Writing play of given length.
def play (hints, markovchain1, markovchain2):
for word in hints:
line = write_line (word, markovchain1, markovchain2)
if (line): print (line)
##################################################################
#Prediction
# Last words from seriesis used to lookup.
def predict (sequence, markovvhain1, markovchain2):
# Sanity checks
sequence = sequence.strip ()
if (sequence == ""):
raise ValueError( 'Sequence is empty')
tokens = token (sequence)
line = ''
for token in reversed (tokens):
line = text_generated (token, markovchain1, markovchain2)
if line:
break
return line
#Its a pre bulits first order markov chain. It chains a word with the word(s) that can come after it
markovchain_1 = {}
# Its a pre bulits second order markov chain. It chains a pair of words with word(s) that can follow it
markovchain_2 = {}
#Looking in both first and second order chain.
words = markov_model_built(lines, markovchain_1, markovchain_2)
play_length = 20
hints = [random.choice (words) for x in range (play_length)]
play (hints, markovchain_1, markovchain_2)
predict('Lead us from hence', markovchain_1, markovchain_2)
|
'''
Copyright (c) 2020 Thomas Wilkinson
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
Written by Thomas Wilkinson - github.com/T-Wilko
'''
from micropython import const
from machine import SPI, Pin
from time import sleep_ms
import ustruct
## Display resolution
EPD_WIDTH = const(400)
EPD_HEIGHT = const(300)
# EPD_WIDTH must be divisible by 8 for this driver to work
## Display command addresses
PANEL_SETTING = const(0x00)
POWER_SETTING = const(0x01)
POWER_OFF = const(0x02)
POWER_OFF_SEQ_SET = const(0x03)
POWER_ON = const(0x04)
POWER_ON_MEASURE = const(0x05)
BOOSTER_SOFT_START_CONTROL = const(0x06)
DEEP_SLEEP_MODE = const(0x07)
START_TRANSMISSION_1 = const(0x10)
DATA_STOP = const(0x11)
DISP_REFRESH = const(0x12)
START_TRANSMISSION_2 = const(0x13)
PLL_CONTROL = const(0x30)
TEMP_CALIBRATION = const(0x40)
TEMP_SENSOR_SELECTION = const(0x41)
TEMP_SENSOR_WRITE = const(0x42)
TEMP_SENSOR_READ = const(0x43)
VCOM_DATA_INT_SETTING = const(0x50)
LOW_POWER_DETECT = const(0x51)
TCON_SETTING = const(0x60)
RES_SETTING = const(0x61)
GSST_SETTING = const(0x65)
REVISION = const(0x70)
GET_STATUS = const(0x71)
AUTO_MEASURE_VCOM = const(0x80)
VCOM_READ = const(0x81)
VCM_DC_SETTING = const(0x82)
#PARTIAL_WINDOW = const(0x90)
PARTIAL_IN = const(0x91)
PARTIAL_OUT = const(0x92)
PROGRAM_MODE = const(0xA0)
ACTIVE_PROG = const(0xA1)
READ_OTP = const(0xA2)
CASCADE_SETTING = const(0xE0)
POWER_SAVING = const(0xE3)
FORCE_TEMP = const(0xE5)
class EPD:
def __init__(self):
self.spi = SPI(1, baudrate=20000000, polarity=0, phase=0, sck=Pin(18), mosi=Pin(23), miso=Pin(19))
#self.spi = SPI(1, 10000000, sck=Pin(14), mosi=Pin(13), miso=Pin(12))
self.spi.init()
dc = Pin(27)
cs = Pin(5)
rst = Pin(14)
busy = Pin(4)
self.cs = cs
self.dc = dc
self.rst = rst
self.busy = busy
self.cs.init(self.cs.OUT, value=1)
self.dc.init(self.dc.OUT, value=0)
self.rst.init(self.rst.OUT, value=0)
self.busy.init(self.busy.IN)
self.width = EPD_WIDTH
self.height = EPD_HEIGHT
self.size = self.width * self.height // 8
self.buf = bytearray(self.size)
def clearBuffer(self):
self._command(bytearray([START_TRANSMISSION_2]))
for i in range(0, len(self.buf)):
self.buf[i] = 255
self._data(bytearray([self.buf[i]]))
self._command(bytearray([DATA_STOP]))
def displayBuffer(self, buf):
self._command(bytearray([START_TRANSMISSION_2]))
for i in range(0, len(buf)):
self._data(bytearray([buf[i]]))
self._command(bytearray([DATA_STOP]))
self._command(bytearray([DISP_REFRESH]))
self.wait_until_idle()
def _command(self, command, data=None):
self.cs(1) # according to LOLIN_EPD
self.dc(0)
self.cs(0)
self.spi.write(command)
self.cs(1)
if data is not None:
self._data(data)
def _data(self, data):
self.cs(1) # according to LOLIN_EPD
self.dc(1)
self.cs(0)
self.spi.write(data)
self.cs(1)
def init(self):
#Reset EPD Driver IC
self.reset()
#Booster soft start
self._command(bytearray([BOOSTER_SOFT_START_CONTROL]), b'\x17\x17\x17')
#Power setting
self._command(bytearray([POWER_SETTING]), b'\x03\x00\x2B\x2B\x09')
#Power on
self._command(bytearray([POWER_ON]))
#Check busy pin and proceed if idle
self.wait_until_idle()
#Panel setting - B&W and full resolution
self._command(bytearray([PANEL_SETTING]), b'\x1F')
#PLL control - Set to 100Hz, default is 50Hz
self._command(bytearray([PLL_CONTROL]), b'\x3A')
#Resolution setting
self._command(bytearray([RES_SETTING]), b'\x01\x90\x01\x2C')
#VCM_DC setting - Currently set to -1V, default is -0.1V i.e. b'\x00'
self._command(bytearray([VCM_DC_SETTING]), b'\x12')
#VCOM and Data Interval setting
self._command(bytearray([VCOM_DATA_INT_SETTING]), b'\x87')
def wait_until_idle(self):
while self.busy == 0:
pass
return
def reset(self):
self.rst(1)
sleep_ms(1)
self.rst(0)
sleep_ms(10)
self.rst(1)
# to wake call reset() or init()
def sleep(self):
self._command(bytearray([DEEP_SLEEP_MODE]))
self.wait_until_idle()
|
#not found invalid or unknown data in all row
#not found invalid gender data in all row
#total data instance is complete
#group column data is have many format to tell about this data
import csv
import sys
with open('gender_age_train.csv') as myCSV:
csvReader = csv.reader(myCSV)
findMaxAge = -sys.maxsize
findMinAge = sys.maxsize
userAge = None
count=-1
for row in csvReader:
count = count+1
if row[2].isdigit():
userAge = int(row[2])
#check invalid and unknown data
if(row[0] == "unknown" or row[0] == ''):
print("found invalid data")
if(row[1] == "unknown" or row[1] == ''):
print("found invalid data")
if(row[2] == "unknown" or row[2] == ''):
print("found invalid data")
if(row[3] == "unknown" or row[3] == ''):
print("found invalid data")
#find min and max of user age
if userAge != None:
if(userAge > findMaxAge):
findMaxAge=userAge
if(userAge < findMinAge):
findMinAge=userAge
#find invalid gender data have 1 default because column "gender"
if row[1] != "M" and row[1] != "F":
print("found invalid gender data")
print("total data instance: ", count)
print("maximum and minimum age of user is:", findMaxAge, "and", findMinAge)
|
from guizero import App, Text, Box, PushButton, Picture, TextBox, Drawing
from lemon_pi.car.display_providers import *
from lemon_pi.car.event_defs import (
LeaveTrackEvent, StateChangePittedEvent, StateChangeSettingOffEvent, CompleteLapEvent, OBDConnectedEvent,
OBDDisconnectedEvent, GPSConnectedEvent, GPSDisconnectedEvent, RaceFlagStatusEvent, DriverMessageEvent,
DriverMessageAddendumEvent, ExitApplicationEvent, EnterTrackEvent, RadioReceiveEvent, ButtonPressEvent,
AudioAlarmEvent)
import logging
import platform
import random
import time
from python_settings import settings
from lemon_pi.shared.events import EventHandler
from lemon_pi.shared.gui_components import AlertBox, FadingBox
from lemon_pi.shared.time_provider import TimeProvider
logger = logging.getLogger(__name__)
MILLILITRES_PER_GALLON = 3785
class ToggleImage(Picture):
def __init__(self, parent, on_image, off_image, **kwargs):
Picture.__init__(self, parent, image=off_image, **kwargs)
self.on_image = on_image
self.off_image = off_image
def on(self):
self.image = self.on_image
def off(self):
self.image = self.off_image
class AlertLight(Drawing):
def __init__(self, parent, color="yellow"):
Drawing.__init__(self, parent, width=64, height=64)
self.bg = "black"
self.size = 32
self.adjust = 2
self.color = color
self.o = self.oval(32 - self.size, 32 - self.size, 32 + self.size, 32 + self.size, color=self.color)
self.repeat(50, self.__grow_and_shrink)
def __grow_and_shrink(self):
if self.size <= 0:
self.adjust = 2
if self.size >= 32:
self.adjust = -2
self.size += self.adjust
self.delete(self.o)
self.o = self.oval(32 - self.size, 32 - self.size, 32 + self.size, 32 + self.size, color=self.color)
class Gui(EventHandler):
# these are not really constants, as they get overridden first thing based on
# settings, but it's ok to think of them ac constants
WIDTH = 800
HEIGHT = 480
COL_WIDTH = 266
SCALE_FACTOR = 1
TEXT_TINY = 16
TEXT_SMALL = 24
TEXT_MED = 32
TEXT_LARGE = 48
TEXT_XL = 64
def __init__(self, width, height):
Gui.WIDTH = width
Gui.HEIGHT = height
Gui.COL_WIDTH = int(width / 3)
Gui.LOWER_ROW_HEIGHT = int(Gui.HEIGHT / 5)
Gui.SCALE_FACTOR = Gui.WIDTH / 800
if width > 1000:
Gui.TEXT_TINY = 24
Gui.TEXT_SMALL = 32
Gui.TEXT_MED = 48
Gui.TEXT_LARGE = 64
Gui.TEXT_XL = 72
self.start_time = 0
self.font = self.__identify_font(platform.system())
self.root = App("Lemon-Pi",
bg="black",
width=Gui.WIDTH,
height=Gui.HEIGHT)
self.splash = Box(self.root, width=Gui.WIDTH, height=Gui.HEIGHT, visible=True)
Box(self.splash, width=Gui.WIDTH, height=int(100 * Gui.SCALE_FACTOR))
Picture(self.splash, image="resources/images/perplexuslogoslpash.gif")
Text(self.splash, "Powered by Normtronix", size=Gui.TEXT_SMALL, font=self.font, color="white")
Box(self.splash, width=Gui.WIDTH, height=int(50 * Gui.SCALE_FACTOR))
splash_lower = Box(self.splash, width=Gui.WIDTH, height=107, align="right")
Text(splash_lower, "in conjunction with", size=Gui.TEXT_SMALL, font=self.font, color="white", align="right")
self.app = Box(self.root, width=Gui.WIDTH, height=Gui.HEIGHT, visible=False)
# this is our lower text area
self.lower_row = Box(self.app, align="bottom", width=Gui.WIDTH, height=int(64 * Gui.SCALE_FACTOR))
self.msg_area = Text(self.lower_row, "", align="left", size=48, font=self.font, color="white", bg="purple")
self.col1 = Box(self.app, align="left", width=Gui.COL_WIDTH, height=Gui.HEIGHT - Gui.LOWER_ROW_HEIGHT)
self.col2 = Box(self.app, align="left", width=Gui.COL_WIDTH, height=Gui.HEIGHT - Gui.LOWER_ROW_HEIGHT)
self.col3 = Box(self.app, align="left", width=Gui.COL_WIDTH, height=Gui.HEIGHT - Gui.LOWER_ROW_HEIGHT)
# these are invisible displays used to show special case data when the car is pitting
self.col4 = Box(self.app, align="left", width=self.col3.width, height=self.col3.height, visible=False)
self.col5 = Box(self.app, align="left", width=self.col3.width, height=self.col3.height, visible=False)
self.time_widget = self.create_time_widget(self.col1)
Box(self.col1, height=24, width=int(Gui.COL_WIDTH * 0.8))
self.lap_display = self.create_lap_widget(self.col1)
Box(self.col2, height=24, width=int(Gui.COL_WIDTH * 0.8))
self.temp_widget:AlertBox = self.create_temp_widget(self.col2)
Box(self.col2, height=24, width=int(Gui.COL_WIDTH * 0.8))
self.speed_heading_widget = self.create_speed_widget(self.col2)
self.fuel_display = self.create_fuel_widget(self.col3)
Box(self.col2, height=24, width=int(Gui.COL_WIDTH * 0.8))
# adding obd + gps images
(self.gps_image, self.radio_signal, self.obd_image) = self.create_gps_obd_images(self.col2)
# add a quit button
if settings.EXIT_BUTTON_ENABLED:
PushButton(self.col2, image="resources/images/exitbutton.gif", command=self.quit)
Box(self.col2, height=24, width=int(Gui.COL_WIDTH * 0.8))
a = AlertLight(self.col2, color="cyan")
a.color = "yellow"
a.visible = False
self.stint_ending_display = self.create_stint_end_instructions(self.col4)
self.stint_starting_display = self.create_stint_start_instructions(self.col5)
LeaveTrackEvent.register_handler(self)
EnterTrackEvent.register_handler(self)
StateChangePittedEvent.register_handler(self)
StateChangeSettingOffEvent.register_handler(self)
CompleteLapEvent.register_handler(self)
OBDConnectedEvent.register_handler(self)
OBDDisconnectedEvent.register_handler(self)
GPSConnectedEvent.register_handler(self)
GPSDisconnectedEvent.register_handler(self)
RaceFlagStatusEvent.register_handler(self)
DriverMessageEvent.register_handler(self)
DriverMessageAddendumEvent.register_handler(self)
RadioReceiveEvent.register_handler(self)
def present_main_app(self):
# sleep up to 5 seconds
elapsed_time = time.time() - self.start_time
logger.info("elapsed time to initialize = {}".format(elapsed_time))
if elapsed_time < 5:
time.sleep(5 - elapsed_time)
self.splash.destroy()
self.app.visible = True
def quit(self):
self.root.destroy()
ExitApplicationEvent.emit()
def __remove_message_highlight(self):
self.msg_area.bg = "black"
def __remove_message(self):
self.msg_area.bg = "black"
self.msg_area.value = ""
def handle_event(self, event, **kwargs):
if event == LeaveTrackEvent:
self.col3.hide()
self.col4.show()
self.col5.hide()
return
if event == StateChangePittedEvent:
self.col3.hide()
self.col4.hide()
self.col5.show()
return
if event == StateChangeSettingOffEvent or event == EnterTrackEvent:
self.col3.show()
self.col4.hide()
self.col5.hide()
return
if event == RadioReceiveEvent:
self.radio_signal.brighten()
return
if event == RaceFlagStatusEvent:
# if it's green, make sure the background of the speed dial is black
flag = kwargs.get("flag")
self.speed_heading_widget.text_color = "white"
if flag == "GREEN":
self.speed_heading_widget.bg = "black"
elif flag == "YELLOW":
self.speed_heading_widget.bg = "yellow"
self.speed_heading_widget.text_color = "black"
elif flag == "RED":
self.speed_heading_widget.bg = "red"
elif flag == "BLACK":
self.speed_heading_widget.bg = "dark-blue"
else:
logger.warning("unknown flag state : {}".format(flag))
if event == DriverMessageEvent:
self.msg_area.text_size = Gui.TEXT_LARGE
self.msg_area.value = kwargs.get("text")
duration_secs = kwargs.get("duration_secs")
self.msg_area.bg = "purple"
# we cancel any remove message callback to ensure this message
# stays until it is replaced or stays for the configured time
self.msg_area.cancel(self.__remove_message)
self.msg_area.after(3000, self.__remove_message_highlight)
self.msg_area.after(duration_secs * 1000, self.__remove_message)
return
# when the car behind us crosses the line we get an update on the time
# between them and us, so we add this to the message on show
if event == DriverMessageAddendumEvent:
self.msg_area.text_size = Gui.TEXT_SMALL
self.msg_area.value = self.msg_area.value + kwargs.get("text")
return
# go back to the fuel display if we complete a lap and it is not showing.
if event == CompleteLapEvent and not self.col3.visible:
self.col3.show()
self.col4.hide()
self.col5.hide()
return
if event == OBDConnectedEvent:
self.obd_image.on()
return
if event == OBDDisconnectedEvent:
self.obd_image.off()
return
if event == GPSConnectedEvent:
self.gps_image.on()
return
if event == GPSDisconnectedEvent:
self.gps_image.off()
return
def handle_keyboard(self, event_data):
logger.info("Key Pressed : {}".format(event_data.key))
# check if we got a CTRL-C
if ord(event_data.key) == 3:
self.quit()
return
if event_data.key == "s":
# imitate start/finish behavior
self.col3.hide()
self.col4.show()
self.col5.hide()
if event_data.key == "f":
self.col3.hide()
self.col4.hide()
self.col5.show()
if event_data.key == "h":
self.col3.show()
self.col4.hide()
self.col5.hide()
if event_data.key == 'g':
self.gps_image.on()
if event_data.key == 'G':
self.gps_image.off()
if event_data.key == 'o':
self.obd_image.on()
if event_data.key == 'O':
self.obd_image.off()
if event_data.key == 'l':
self.__updateLap(randomLapTimeProvider)
if event_data.key == 'p':
self.handle_event(RadioReceiveEvent)
if event_data.key == 'b':
ButtonPressEvent.emit(button=0)
def display(self):
self.root.when_key_pressed = self.handle_keyboard
# on raspberry pi we go full screen
if platform.system() == "Linux":
self.root.set_full_screen()
self.start_time = time.time()
self.root.display()
# don't put any code here ... the display loop never returns
def register_temp_provider(self, provider: TemperatureProvider):
# might need to store in order to cancel
self.temp_widget.repeat(1000, self.__updateTemp, args=[provider])
def register_time_provider(self, provider: TimeProvider):
# might need to store in order to cancel
self.time_widget.repeat(1000, self.__updateTime, args=[provider])
self.time_widget.repeat(500, self.__update_time_beat)
def register_lap_provider(self, provider: LapProvider):
self.time_widget.repeat(500, self.__updateLap, args=[provider])
def register_speed_provider(self, provider: SpeedProvider):
self.speed_heading_widget.repeat(200, self.__updateSpeed, args=[provider])
def register_fuel_provider(self, provider: FuelProvider):
self.fuel_display.repeat(1000, self.__updateFuel, args=[provider])
def create_gps_obd_images(self, parent):
result = Box(parent, width=int(Gui.COL_WIDTH * 0.8), height=int(48 * Gui.SCALE_FACTOR))
#result.set_border(4, "darkgreen")
gps = ToggleImage(result,
"resources/images/gps_ok.gif",
"resources/images/gps_off.gif",
align="left")
Box(result, width=32, height=32, align="left")
Text(result, "Radio", size=Gui.TEXT_TINY, color="darkgreen", align="left")
radio = FadingBox(result, width=32, height=32, align="left")
obd = ToggleImage(result,
"resources/images/obd_ok.gif",
"resources/images/obd_off.gif",
align="right")
return gps, radio, obd
def create_temp_widget(self, parent):
result = AlertBox(parent, width=int(Gui.COL_WIDTH * 0.8), height=int(112 * Gui.SCALE_FACTOR))
result.set_range(settings.TEMP_BAND_LOW, settings.TEMP_BAND_WARN, settings.TEMP_BAND_HIGH)
result.set_alarm_cb( lambda: AudioAlarmEvent.emit(message="Engine Overheating"))
result.set_border(4, "darkgreen")
Text(result, "TEMP", size=Gui.TEXT_SMALL, color="white")
Text(result, "???", size=Gui.TEXT_XL, font=self.font, color="white")
return result
def create_time_widget(self, parent):
result = Box(parent, width=int(Gui.COL_WIDTH * 0.8), height=int(112 * Gui.SCALE_FACTOR))
result.set_border(4, "darkgreen")
Text(result, "TIME", size=Gui.TEXT_SMALL, font=self.font, color="white")
Text(result, "hh", size=Gui.TEXT_XL, font=self.font, color="white", align="left")
Text(result, ":", size=Gui.TEXT_MED, font=self.font, color="white", align="left")
Text(result, "mm", size=Gui.TEXT_XL, font=self.font, color="white", align="left")
# Text(result, "ss", size=Gui.TEXT_XL, font=self.font, color="grey", align="left")
return result
def create_speed_widget(self, parent):
result = Box(parent, width=int(Gui.COL_WIDTH * 0.8), height=int(100 * Gui.SCALE_FACTOR))
result.set_border(4, "darkgreen")
Text(result, "???", size=Gui.TEXT_XL, font=self.font, color="white", align="left")
Text(result, "mph", size=Gui.TEXT_TINY, color="white", font=self.font, align="left")
return result
def create_lap_widget(self, parent):
result = Box(parent, width=int(Gui.COL_WIDTH * 0.8), height=int(260 * Gui.SCALE_FACTOR))
result.set_border(4, "darkgreen")
Text(result, "LAP", size=Gui.TEXT_SMALL, font=self.font, color="white")
Text(result, "---", size=Gui.TEXT_SMALL, font=self.font, color="white")
Text(result, "mm:ss", size=Gui.TEXT_MED, font=self.font, color="white")
Box(result, width=200, height=16)
Text(result, "Last Lap", size=Gui.TEXT_TINY, font=self.font, color="white")
Text(result, "mm:ss.S", size=Gui.TEXT_MED, font=self.font, color="white")
return result
def create_fuel_widget(self, parent):
result = Box(parent)
result.set_border(4, "darkgreen")
Text(result, "FUEL (Gal)", size=Gui.TEXT_SMALL, color="lightgreen", font=self.font)
total_box = Box(result, height=int(100 * Gui.SCALE_FACTOR), width=int(Gui.COL_WIDTH * 0.8))
Text(total_box, "Total\nUsed", size=Gui.TEXT_TINY, color="lightgreen", font=self.font, align="left")
Text(total_box, "--.--", size=Gui.TEXT_MED, color="lightgreen", font=self.font, align="left")
last_lap_box = Box(result, height=int(100 * Gui.SCALE_FACTOR), width=int(Gui.COL_WIDTH * 0.8))
Text(last_lap_box, "Last\nLap", size=Gui.TEXT_TINY, color="lightgreen", font=self.font, align="left")
# a wee spacer to align this row with the one above
Box(last_lap_box, width=12 * Gui.SCALE_FACTOR, height=10, align="left")
Text(last_lap_box, "--.--", size=Gui.TEXT_MED, color="lightgreen", font=self.font, align="left")
remaining_box = Box(result, height=int(100 * Gui.SCALE_FACTOR), width=int(Gui.COL_WIDTH * 0.8))
Text(remaining_box, "Rem.", size=Gui.TEXT_TINY, color="lightgreen", font=self.font, align="left")
Text(remaining_box, "--.--", size=Gui.TEXT_MED, color="lightgreen", font=self.font, align="left")
Text(remaining_box, "%", size=Gui.TEXT_TINY, color="lightgreen", font=self.font, align="left")
return result
def create_stint_end_instructions(self, parent):
result = Box(parent)
result.set_border(4, "darkgreen")
Text(result, "INSTRUCTIONS", size=Gui.TEXT_SMALL, color="lightgreen", font=self.font)
instructions = TextBox(result, multiline=True,
width=parent.width - 8, height=parent.height - 24)
instructions.text_size = Gui.TEXT_SMALL
instructions.text_color = "white"
instructions.font = self.font
instructions.value = settings.ENTER_PIT_INSTRUCTIONS
return result
def create_stint_start_instructions(self, parent):
result = Box(parent)
result.set_border(4, "darkgreen")
Text(result, "INSTRUCTIONS", size=Gui.TEXT_SMALL, color="lightgreen", font=self.font)
instructions = TextBox(result, multiline=True,
width=parent.width - 8, height=parent.height - 24)
instructions.text_size = Gui.TEXT_SMALL
instructions.text_color = "white"
instructions.font = self.font
instructions.value = settings.SET_OFF_INSTRUCTIONS
return result
def __updateTemp(self, provider: TemperatureProvider):
val = provider.get_temp_f()
self.temp_widget.update_value(val)
def __updateTime(self, provider: TimeProvider):
self.time_widget.children[1].value = "{:02d}".format(provider.get_hours())
self.time_widget.children[3].value = "{:02d}".format(provider.get_minutes())
def __update_time_beat(self):
beat : Text = self.time_widget.children[2]
if beat.text_color == "white":
beat.text_color = self.app.bg
else:
beat.text_color = "white"
def __updateSpeed(self, provider: SpeedProvider):
self.speed_heading_widget.children[0].value = "{:02d}".format(provider.get_speed())
# self.speed_heading_widget.children[2].value = str(provider.get_heading())
def __updateLap(self, provider: LapProvider):
self.lap_display.children[1].value = provider.get_lap_count()
if provider.get_lap_count() != 999:
minutes = int(provider.get_lap_timer() / 60)
seconds = int(provider.get_lap_timer()) % 60
self.lap_display.children[2].value = "{:02d}:{:02d}".format(minutes, seconds)
if provider.get_last_lap_time() > 0:
ll = provider.get_last_lap_time()
minutes = int(ll / 60)
seconds = int(ll) % 60
tenths = int((ll - int(ll)) * 10)
self.lap_display.children[5].value = "{:02d}:{:02d}.{:01d}".format(minutes, seconds, tenths)
def __updateFuel(self, provider: FuelProvider):
# children offsets:
total_used_box : Box = self.fuel_display.children[1]
# last_hour_box : Box = self.fuel_display.children[2]
last_lap_box : Box = self.fuel_display.children[2]
remaining_box : Box = self.fuel_display.children[3]
total_used_box.children[1].value = "{:02.2f}".format(provider.get_fuel_used_ml() / MILLILITRES_PER_GALLON)
# last_hour_box.children[1].value = "{:02.2f}".format(provider.get_fuel_used_last_hour_ml() / MILLILITRES_PER_GALLON)
last_lap_box.children[1].value = "{:1.02f}".format(provider.get_fuel_used_last_lap_ml() / MILLILITRES_PER_GALLON)
remaining_box.children[1].value = "{:02d}".format(provider.get_fuel_percent_remaining())
def __identify_font(self, platform):
if platform == "Darwin":
return "arial"
elif platform == "Linux":
return "freesans"
else:
Exception("no font defined for {}".format(platform))
### test classes
class RandomLapTimeProvider(LapProvider):
def __init__(self):
self.start_time = time.time()
def get_last_lap_time(self) -> float:
return random.randint(100000, 300000) / 1000
def get_lap_timer(self) -> int:
return int(time.time() - self.start_time)
def get_lap_count(self) -> int:
return 145
randomLapTimeProvider = RandomLapTimeProvider()
|
# coding: utf-8
"""
Xero Finance API
The Finance API is a collection of endpoints which customers can use in the course of a loan application, which may assist lenders to gain the confidence they need to provide capital. # noqa: E501
Contact: api@xero.com
Generated by: https://openapi-generator.tech
"""
import re # noqa: F401
from xero_python.models import BaseModel
class BankTransactionResponse(BaseModel):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
"bank_transaction_id": "str",
"batch_payment_id": "str",
"contact": "ContactResponse",
"date": "date",
"amount": "float",
"line_items": "list[LineItemResponse]",
}
attribute_map = {
"bank_transaction_id": "bankTransactionId",
"batch_payment_id": "batchPaymentId",
"contact": "contact",
"date": "date",
"amount": "amount",
"line_items": "lineItems",
}
def __init__(
self,
bank_transaction_id=None,
batch_payment_id=None,
contact=None,
date=None,
amount=None,
line_items=None,
): # noqa: E501
"""BankTransactionResponse - a model defined in OpenAPI""" # noqa: E501
self._bank_transaction_id = None
self._batch_payment_id = None
self._contact = None
self._date = None
self._amount = None
self._line_items = None
self.discriminator = None
if bank_transaction_id is not None:
self.bank_transaction_id = bank_transaction_id
if batch_payment_id is not None:
self.batch_payment_id = batch_payment_id
if contact is not None:
self.contact = contact
if date is not None:
self.date = date
if amount is not None:
self.amount = amount
if line_items is not None:
self.line_items = line_items
@property
def bank_transaction_id(self):
"""Gets the bank_transaction_id of this BankTransactionResponse. # noqa: E501
Xero Identifier of transaction # noqa: E501
:return: The bank_transaction_id of this BankTransactionResponse. # noqa: E501
:rtype: str
"""
return self._bank_transaction_id
@bank_transaction_id.setter
def bank_transaction_id(self, bank_transaction_id):
"""Sets the bank_transaction_id of this BankTransactionResponse.
Xero Identifier of transaction # noqa: E501
:param bank_transaction_id: The bank_transaction_id of this BankTransactionResponse. # noqa: E501
:type: str
"""
self._bank_transaction_id = bank_transaction_id
@property
def batch_payment_id(self):
"""Gets the batch_payment_id of this BankTransactionResponse. # noqa: E501
Xero Identifier of batch payment. Present if the transaction is part of a batch. # noqa: E501
:return: The batch_payment_id of this BankTransactionResponse. # noqa: E501
:rtype: str
"""
return self._batch_payment_id
@batch_payment_id.setter
def batch_payment_id(self, batch_payment_id):
"""Sets the batch_payment_id of this BankTransactionResponse.
Xero Identifier of batch payment. Present if the transaction is part of a batch. # noqa: E501
:param batch_payment_id: The batch_payment_id of this BankTransactionResponse. # noqa: E501
:type: str
"""
self._batch_payment_id = batch_payment_id
@property
def contact(self):
"""Gets the contact of this BankTransactionResponse. # noqa: E501
:return: The contact of this BankTransactionResponse. # noqa: E501
:rtype: ContactResponse
"""
return self._contact
@contact.setter
def contact(self, contact):
"""Sets the contact of this BankTransactionResponse.
:param contact: The contact of this BankTransactionResponse. # noqa: E501
:type: ContactResponse
"""
self._contact = contact
@property
def date(self):
"""Gets the date of this BankTransactionResponse. # noqa: E501
Date of transaction - YYYY-MM-DD # noqa: E501
:return: The date of this BankTransactionResponse. # noqa: E501
:rtype: date
"""
return self._date
@date.setter
def date(self, date):
"""Sets the date of this BankTransactionResponse.
Date of transaction - YYYY-MM-DD # noqa: E501
:param date: The date of this BankTransactionResponse. # noqa: E501
:type: date
"""
self._date = date
@property
def amount(self):
"""Gets the amount of this BankTransactionResponse. # noqa: E501
Amount of transaction # noqa: E501
:return: The amount of this BankTransactionResponse. # noqa: E501
:rtype: float
"""
return self._amount
@amount.setter
def amount(self, amount):
"""Sets the amount of this BankTransactionResponse.
Amount of transaction # noqa: E501
:param amount: The amount of this BankTransactionResponse. # noqa: E501
:type: float
"""
self._amount = amount
@property
def line_items(self):
"""Gets the line_items of this BankTransactionResponse. # noqa: E501
The LineItems element can contain any number of individual LineItem sub-elements. Not included in summary mode # noqa: E501
:return: The line_items of this BankTransactionResponse. # noqa: E501
:rtype: list[LineItemResponse]
"""
return self._line_items
@line_items.setter
def line_items(self, line_items):
"""Sets the line_items of this BankTransactionResponse.
The LineItems element can contain any number of individual LineItem sub-elements. Not included in summary mode # noqa: E501
:param line_items: The line_items of this BankTransactionResponse. # noqa: E501
:type: list[LineItemResponse]
"""
self._line_items = line_items
|
# -*- coding: utf-8 -*-
import scrapy
import re
try:
import urlparse as parse
except:
from urllib import parse
class FishcSpider(scrapy.Spider):
name = 'fishc'
allowed_domains = ['https://fishc.com.cn/']
start_urls = ['https://fishc.com.cn/thread-51842-1-1.html']
headers = {
"HOST": "fishc.com.cn",
"Referer": "https://fishc.com.cn/",
"User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.87 Safari/537.36"
}
custom_settings = {
"COOKIES_ENABLED": True
}
def parse(self, response):
"""
提取出html页面中的所有url 并跟踪这些url进行一步爬取
如果提取的url中格式为 /question/xxx 就下载之后直接进入解析函数
"""
str = response.css("#postmessage_2051388").extract()[0]
lines = str.split("<br>")
resultList = []
for line in lines:
pattern = re.compile(r'.*<strong>(.*?)</strong>.*')
titleResult = pattern.findall(line)
if titleResult:
print(titleResult)
resultList.append({"title": titleResult[0]})
pattern = re.compile(r'.*<a href="(.*?)" target.*')
addrResult = pattern.findall(line)
if addrResult:
if "addrList" in resultList[len(resultList) - 1]:
resultList[len(resultList) - 1]["addrList"].append({"address": addrResult[0]})
else:
resultList[len(resultList) - 1]["addrList"] = [
{"address": addrResult[0]}
]
pattern = re.compile(r'.*密码:(.*?)$')
passResult = pattern.findall(line)
if passResult:
resultList[len(resultList) - 1]["addrList"][len(resultList[len(resultList) - 1]["addrList"]) - 1][
"password"] = \
passResult[0]
print(resultList)
# all_urls = [parse.urljoin(response.url, url) for url in all_urls]
# all_urls = filter(lambda x:True if x.startswith("https") else False, all_urls)
pass
# 登录
def start_requests(self):
post_url = "https://fishc.com.cn/member.php?mod=logging&action=login&loginsubmit=yes&infloat=yes&lssubmit=yes&inajax=1"
post_data = {
"username": "shen1986",
"password": "4456d776d1c73dddaa25d88b317ace7b",
"quickforward": "yes",
"handlekey": "ls"
}
return [scrapy.FormRequest(
url=post_url,
formdata=post_data,
headers=self.headers,
callback=self.check_login
)]
def check_login(self, response):
# 验证服务器的返回数据判断是否成功
if response.status == 200:
for url in self.start_urls:
yield scrapy.Request(url, dont_filter=True, headers=self.headers)
|
from .Weapons import Wand
class OakWand(Wand):
def __init__(self, name="Oak Wand", value=10, weight=0.5,
strBuff=0, agiBuff=0, intBuff=5, damage=2, actionCost=3, scaleValue=1.0, tier=1):
super().__init__(name, value, weight, strBuff, agiBuff, intBuff, damage, actionCost, scaleValue, tier)
self.setStats([0, 0, 2])
self.setDamage(-1, 1)
self.setWeight(-0.1, 0.2)
class BirchWand(Wand):
def __init__(self, name="Birch Wand", value=25, weight=0.6,
strBuff=0, agiBuff=0, intBuff=7, damage=3, actionCost=3, scaleValue=1.3, tier=2):
super().__init__(name, value, weight, strBuff, agiBuff, intBuff, damage, actionCost, scaleValue, tier)
self.setStats([0, 0, 3])
self.setDamage(-2, 3)
self.setWeight(-0.2, 0.3)
class DogwoodWand(Wand):
def __init__(self, name="Dogwood Wand", value=35, weight=0.7,
strBuff=0, agiBuff=2, intBuff=6, damage=2, actionCost=2, scaleValue=1.9, tier=3):
super().__init__(name, value, weight, strBuff, agiBuff, intBuff, damage, actionCost, scaleValue, tier)
self.setStats([0, 1, 3])
self.setDamage(-1, 3)
self.setWeight(-0.2, 0.3)
class HemlockWand(Wand):
def __init__(self, name="Hemlock Wand", value=45, weight=0.4,
strBuff=0, agiBuff=10, intBuff=5, damage=5, actionCost=4, scaleValue=2.5, tier=4):
super().__init__(name, value, weight, strBuff, agiBuff, intBuff, damage, actionCost, scaleValue, tier)
self.setStats([0, 5, 3])
self.setDamage(-1, 3)
self.setWeight(-0.1, 0.3)
class WalnutWand(Wand):
def __init__(self, name="Walnut Wand", value=55, weight=1.5,
strBuff=0, agiBuff=3, intBuff=7, damage=7, actionCost=7, scaleValue=3.0, tier=5):
super().__init__(name, value, weight, strBuff, agiBuff, intBuff, damage, actionCost, scaleValue, tier)
self.setStats([0, 1, 4])
self.setDamage(-1, 4)
self.setWeight(-0.1, 0.4)
class SycamoreWand(Wand):
def __init__(self, name="Sycamore Wand", value=65, weight=1.5,
strBuff=2, agiBuff=5, intBuff=10, damage=9, actionCost=7, scaleValue=3.5, tier=6):
super().__init__(name, value, weight, strBuff, agiBuff, intBuff, damage, actionCost, scaleValue, tier)
self.setStats([0, 2, 7])
self.setDamage(-1, 5)
self.setWeight(-0.2, 0.3)
class ElderWand(Wand):
def __init__(self, name="Elder Wand", value=75, weight=1,
strBuff=3, agiBuff=12, intBuff=15, damage=13, actionCost=10, scaleValue=4.0, tier=7):
super().__init__(name, value, weight, strBuff, agiBuff, intBuff, damage, actionCost, scaleValue, tier)
self.setStats([1, 10, 12])
self.setDamage(-2, 5)
self.setWeight(-0.5, 0.7)
|
#!/usr/bin/python
# -*- encoding: utf-8; py-indent-offset: 4 -*-
# +------------------------------------------------------------------+
# | ____ _ _ __ __ _ __ |
# | / ___| |__ ___ ___| | __ | \/ | |/ / |
# | | | | '_ \ / _ \/ __| |/ / | |\/| | ' / |
# | | |___| | | | __/ (__| < | | | | . \ |
# | \____|_| |_|\___|\___|_|\_\___|_| |_|_|\_\ |
# | |
# | Copyright Mathias Kettner 2014 mk@mathias-kettner.de |
# +------------------------------------------------------------------+
#
# This file is part of Check_MK.
# The official homepage is at http://mathias-kettner.de/check_mk.
#
# check_mk is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by
# the Free Software Foundation in version 2. check_mk is distributed
# in the hope that it will be useful, but WITHOUT ANY WARRANTY; with-
# out even the implied warranty of MERCHANTABILITY or FITNESS FOR A
# PARTICULAR PURPOSE. See the GNU General Public License for more de-
# ails. You should have received a copy of the GNU General Public
# License along with GNU Make; see the file COPYING. If not, write
# to the Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
# Boston, MA 02110-1301 USA.
#
# Author Trevor Steyn <trevor@webon.co.za
patton_temperature_default_values = (55, 60)
def inventory_patton_temperature(info):
return [ ( None, "patton_temperature_default_values" ) ]
def check_patton_temperature(item, params, info):
warn, crit = params
temp = int( info[0] [0] )
perfdata = [ ( "temp", temp, warn, crit ) ]
if (int(temp)) < warn:
return (0, "Temperature is '%d'C" % temp, perfdata )
elif (int(temp)) >= crit:
return (2, "Temperature is '%d'C" % temp, perfdata )
elif (int(temp)) >= warn:
return (1, "Temperature is '%d'C" % temp, perfdata )
else:
return (3, "UNKNOWN - not valid value")
check_info["patton_temperature"] = {
'check_function' : check_patton_temperature,
'inventory_function' : inventory_patton_temperature,
'service_description' : 'Temperature',
'snmp_info' : ( '.1.3.6.1.4.1.1768.100.70.30.2.1', [ '2.1'] ),
'snmp_scan_function' : lambda oid: oid(".1.3.6.1.4.1.1768.100.70.30.2.1.2.1") != None,
'has_perfdata' : True
}
|
# coding:utf-8
# 把KNN的结果作为输入
import math
original = file('../../../features/3_refine_street/train.csv')
knn = file('../../../features/4_points/knn_train.csv')
target = file('../../../features/4_points/train.csv', 'w')
knn.readline()
line_cnt = 0
for line in original:
target.write(line[:-1])
if line[0] == 'C':
#if line[0] == 'I':
for i in range(39):
target.write(',point_' + str(i))
target.write('\n')
else:
knn_line = knn.readline().split(',')
for i in range(39):
p = float(knn_line[i + 1]) + 0.0001
target.write(',' + str(math.log(p / (1 - p))))
target.write('\n')
if line_cnt % 50000 == 0:
print line_cnt
line_cnt += 1
|
# Generated by Django 3.2.7 on 2021-10-27 17:03
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('product', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='product',
name='product_detail',
field=models.TextField(default='', max_length=1000),
),
migrations.AlterField(
model_name='product',
name='name',
field=models.CharField(default='', max_length=1000),
),
]
|
from django.shortcuts import render
from django.http import Http404
from .models import Artist, Genre, Album
from .filters import ArtistFilter, GenreFilter
def artist_list(request):
artist_list = Artist.objects.all()
artist_filter = ArtistFilter(request.GET, queryset=artist_list)
return render(request, 'app1/artist.html', {'filter': artist_filter})
def tracks_list(request, artist_name):
try:
artist = Artist.objects.get(name=artist_name)
albums = Album.objects.filter(tracks_of_album__artist=artist).values('name').distinct()
except Artist.DoesNotExist:
raise Http404
return render(request, 'app1/tracks.html', {
'tracks': artist.tracks_of_artist.all(),
'artist': artist,
'albums': albums,
})
def genre_list_track(request, genre_name):
try:
genre = Genre.objects.get(name=genre_name)
except Genre.DoesNotExist:
raise Http404
return render(request, 'app1/genre.html', {
'genre': genre,
'tracks': genre.tracks_of_genre.all()
})
def genge_list(request):
genre = Genre.objects.all()
genre_filter = GenreFilter(request.GET, queryset=genre)
return render(request, 'app1/genre_list.html', {'filter': genre_filter})
def album_list(request):
albums = Album.objects.all()
return render(request, 'app1/album_list.html', {'albums': albums})
def album_list_track(request, album_name):
try:
album = Album.objects.get(name=album_name)
except Genre.DoesNotExist:
raise Http404
return render(request, 'app1/album.html', {
'album': album,
'tracks': album.tracks_of_album.all()
}) |
class TreeNode(object):
def __init__(self, x):
self.val = x
self.left = None
self.right = None
class Solution(object):
def height(self, node, h):
if node:
d1 = self.height(node.left, h)
d2 = self.height(node.right, h)
d = max(d1, d2) + 1
if d not in h:
h[d] = []
h[d].append(node)
return d
else:
return 0
def isequal(self, s, t):
Q1 = [s]
Q2 = [t]
while Q1:
if not Q2:
return False
u1 = Q1.pop()
u2 = Q2.pop()
if u1.val != u2.val:
return False
if u1.left:
Q1.append(u1.left)
if u1.right:
Q1.append(u1.right)
if u2.left:
Q2.append(u2.left)
if u2.right:
Q2.append(u2.right)
return bool(Q2)
def isSubtree(self, s, t):
"""
:type s: TreeNode
:type t: TreeNode
:rtype: bool
"""
h = {}
_ = self.height(s, h)
dt = self.height(t, {})
for node in h[dt]:
if self.isequal(node, t):
return True
return False
|
import re
import os
import numpy as np
import pandas as pd
from pathlib import Path
from datetime import datetime
import tensorflow as tf
from tensorflow.keras.applications import ResNet50, MobileNetV2, InceptionV3
from tensorflow.keras.preprocessing.image import ImageDataGenerator
def list_dataset():
for dirname, _, filenames in os.walk('ml/dataset'):
for filename in filenames:
print(os.path.join(dirname, filename))
# Add class name prefix to each path based on class name include in filename
def add_class_name_prefix(df, col_name):
df[col_name] = df[col_name].apply(lambda x: x[:re.search("\d",x).start()] + '/' + x)
return df
def class_id_to_label(id):
label_map = {1: 'glass', 2: 'paper', 3: 'cardboard', 4: 'plastic', 5: 'metal', 6: 'trash'}
return label_map[id]
IMAGES_DIR = Path('ml/dataset/Garbage classification/Garbage classification/').absolute()
train_file = Path('ml/dataset/one-indexed-files-notrash_train.txt').absolute()
val_file = Path('ml/dataset/one-indexed-files-notrash_val.txt').absolute()
test_file = Path('ml/dataset/one-indexed-files-notrash_test.txt').absolute()
df_train = pd.read_csv(train_file, sep=' ', header=None, names=['rel_path', 'label'])
df_valid = pd.read_csv(val_file, sep=' ', header=None, names=['rel_path', 'label'])
df_test = pd.read_csv(val_file, sep=' ', header=None, names=['rel_path', 'label'])
df_train = add_class_name_prefix(df_train, 'rel_path')
df_valid = add_class_name_prefix(df_valid, 'rel_path')
df_test = add_class_name_prefix(df_test, 'rel_path')
df_train['label'] = df_train['label'].apply(class_id_to_label)
df_valid['label'] = df_valid['label'].apply(class_id_to_label)
df_test['label'] = df_test['label'].apply(class_id_to_label)
print(f'Found {len(df_train)} training, {len(df_valid)} validation and {len(df_test)} samples.')
datagen = ImageDataGenerator()
datagen_train = datagen.flow_from_dataframe(
dataframe=df_train,
directory=IMAGES_DIR,
x_col='rel_path',
y_col='label',
color_mode="rgb",
class_mode="categorical",
batch_size=32,
shuffle=True,
seed=7,
)
datagen_valid = datagen.flow_from_dataframe(
dataframe=df_valid,
directory=IMAGES_DIR,
x_col='rel_path',
y_col='label',
color_mode="rgb",
class_mode="categorical",
batch_size=32,
shuffle=True,
seed=7,
)
def build_model(num_classes):
base_model = ResNet50(weights='imagenet', include_top=False)
#base_model = MobileNetV2(weights='imagenet', include_top=False)
#base_model = InceptionV3(weights='imagenet', include_top=False)
x = base_model.output
x = tf.keras.layers.GlobalAveragePooling2D()(x)
x = tf.keras.layers.Dense(1024, activation='relu')(x)
predictions = tf.keras.layers.Dense(num_classes, activation='softmax')(x)
model = tf.keras.Model(inputs=base_model.input, outputs=predictions)
for layer in base_model.layers:
layer.trainable = False
return model
net = build_model(num_classes=6)
net.compile(optimizer='Adam',
loss='categorical_crossentropy',
metrics=[tf.keras.metrics.categorical_accuracy])
net.summary()
early_stop = tf.keras.callbacks.EarlyStopping(monitor='val_loss', patience=5, verbose=1, restore_best_weights=True)
history = net.fit_generator(
generator=datagen_train,
validation_data=datagen_valid,
epochs=30,
validation_freq=1,
callbacks=[early_stop]
)
import matplotlib.pyplot as plt
fig, axs = plt.subplots(nrows=1, ncols=2, figsize=(20, 4))
axs[0].plot(history.history['loss'], label='loss')
axs[0].plot(history.history['val_loss'], label='val_loss')
axs[1].plot(history.history['categorical_accuracy'], label='acc')
axs[1].plot(history.history['val_categorical_accuracy'], label='val_acc')
plt.legend()
plt.show()
test_generator = datagen.flow_from_dataframe(
dataframe=df_test,
directory=IMAGES_DIR,
x_col='rel_path',
y_col='label',
color_mode="rgb",
class_mode="categorical",
batch_size=1,
shuffle=False,
seed=7
)
# y_pred = net.predict(test_generator, batch_size=None, verbose=0, steps=None, callbacks=None, max_queue_size=10, workers=1, use_multiprocessing=False)
filenames = test_generator.filenames
nb_samples = len(filenames)
net.evaluate_generator(test_generator, nb_samples)
model_path = Path('ml/model')
model_path.mkdir(parents=True, exist_ok=True)
model_name = datetime.now().strftime('%m_%d_%y_%H%M%S') + '.h5'
print((model_path / model_name).absolute())
net.save((model_path / model_name).absolute())
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import json
from alipay.aop.api.constant.ParamConstants import *
from alipay.aop.api.domain.DiagnosisDisease import DiagnosisDisease
class Diagnosis(object):
def __init__(self):
self._diseases = None
@property
def diseases(self):
return self._diseases
@diseases.setter
def diseases(self, value):
if isinstance(value, list):
self._diseases = list()
for i in value:
if isinstance(i, DiagnosisDisease):
self._diseases.append(i)
else:
self._diseases.append(DiagnosisDisease.from_alipay_dict(i))
def to_alipay_dict(self):
params = dict()
if self.diseases:
if isinstance(self.diseases, list):
for i in range(0, len(self.diseases)):
element = self.diseases[i]
if hasattr(element, 'to_alipay_dict'):
self.diseases[i] = element.to_alipay_dict()
if hasattr(self.diseases, 'to_alipay_dict'):
params['diseases'] = self.diseases.to_alipay_dict()
else:
params['diseases'] = self.diseases
return params
@staticmethod
def from_alipay_dict(d):
if not d:
return None
o = Diagnosis()
if 'diseases' in d:
o.diseases = d['diseases']
return o
|
import UdGraph
import random
import math
"""
Mathematics of Gerrymandering
Washington Experimental Mathematics Lab, 18 Sp
Project GitHub: https://github.com/weifanjiang/WXML-18wi-Research
This file contains the model to perform Metropolis-Ising algorithm on
a graph which represents an actual state
"""
class WashingtonModel:
"""
Program to simulate Metropolis-Ising model.
In this case, specific for Washington.
"""
def __init__(self, adj_graph, bound, population_dict, district_num=10):
"""
Initializing the model
:param adj_graph: adjacency graph for Washington
:param population_dict: dictionary maps from Washington redistricting units to populations
"""
self.adj_graph = adj_graph
self.population_dict = population_dict
self.district_num = district_num
self.total_population = 0
for n in self.adj_graph.get_nodes():
self.total_population += self.population_dict[n]
self.bound = bound
def get_boundary(self, redistricting):
"""
Get a set of edges which are on boundary districts
:param redistricting: a redistricting plan
:return: a set of edges
"""
all_edges = self.adj_graph.get_edges()
boundary_edges = set()
for e in all_edges:
if redistricting[e[0]] != redistricting[e[1]]:
boundary_edges.add(e)
return boundary_edges
def get_candidate(self, redistricting):
"""
Get a candidate for the next redistricting
:param redistricting: the current redistricting
:return: a new redistricting as candidate
"""
validated = False
bad_choice = set()
while not validated:
candidate = redistricting.copy()
boundary = self.get_boundary(redistricting)
edge = random.choice(list(boundary))
while 0 not in (candidate[edge[0]], candidate[edge[1]]):
edge = random.choice(list(boundary))
if candidate[edge[0]] == 0:
flag = 0
else:
flag = 1
'''
flag = random.choice((0, 1))
'''
if flag == 0:
candidate[edge[0]] = candidate[edge[1]]
else:
candidate[edge[1]] = candidate[edge[0]]
if (edge, flag) not in bad_choice:
changed = edge[flag]
original_belong = redistricting[changed]
neighbors = set()
for n in self.adj_graph.get_neighbors(changed):
if redistricting[n] == original_belong:
neighbors.add(n)
if len(neighbors) == 0:
validated = True
else:
init = random.choice(list(neighbors))
active = [init]
seen = set()
while active != [] and validated == False:
curr = active[0]
active = active[1:]
if curr not in seen:
seen.add(curr)
for n in self.adj_graph.get_neighbors(curr):
if n not in seen and candidate[n] == original_belong:
active.append(n)
finished = True
for neighbor in neighbors:
if neighbor not in seen:
finished = False
if finished:
validated = True
finished = True
for neighbor in neighbors:
if neighbor not in seen:
finished = False
if finished:
validated = True
if validated:
for i in range(self.district_num):
if i not in candidate.values():
validated = False
if not validated:
bad_choice.add((edge, flag))
return candidate
def population_energy(self, redistricting):
"""
Calculate the population energy of current redistricting
:param redistricting: redistricting
:return: a float
"""
pop = 0
district_pop = {}
for n in self.adj_graph.get_nodes():
label = redistricting[n]
prev_pop = district_pop.get(label, 0)
district_pop[label] = prev_pop + self.population_dict[n]
for district, population in district_pop.items():
val = population - self.total_population / 4
val = val ** 2
pop += val
return int(pop // 100000000000)
def compactness_energy(self, redistricting):
"""
Calculate the compactness energy of current redistricting
:param redistricting: redistricting
:return: a float
"""
com = 0
count = {}
param = {}
boundary_edges = self.get_boundary(redistricting)
boundary_nodes = set()
for e in boundary_edges:
boundary_nodes.add(e[0])
boundary_nodes.add(e[1])
for n in self.adj_graph.get_nodes():
label = redistricting[n]
prev_count = count.get(label, 0)
count[label] = prev_count + 1
if n in self.bound or n in boundary_nodes:
prev_param = param.get(label, 0)
param[label] = prev_param + 1
for district, parameter in param.items():
val = parameter / count[district]
val = val ** 2
com += val
return round(com, 2)
def calc_ratio(self, redistricting, param_func,iter):
"""
Calculated energy ratio
:param redistricting: redistricting
:param iter: iteration number
:return: a float
"""
compactness_energy = self.compactness_energy(redistricting)
population_energy = self.population_energy(redistricting)
(alpha, beta) = param_func(iter)
return math.exp(round(alpha * compactness_energy + beta * population_energy, 2))
def pop_error(self, redistricting):
result = redistricting
total_pop = 0
pop_dict = dict()
for pre, dis in result.iteritems():
pop_dict[dis] = pop_dict.get(dis, 0) + self.population_dict[pre]
total_pop += self.population_dict[pre]
population_error = 0
for j in range(10):
population_error += abs(pop_dict[j] - total_pop / 10)
error = population_error * 100.0 / total_pop
return error
def make_one_move(self, redistricting, param_func, iter):
"""
Make one movement based on current redistricting
:param redistricting:
:param iter: number of iteration
:return: new redistricting
"""
candidate = self.get_candidate(redistricting)
self_energy = self.calc_ratio(redistricting, param_func, iter)
candidate_energy = self.calc_ratio(candidate, param_func, iter)
return candidate
'''
if candidate_energy < self_energy:
return candidate
else:
rand_num = random.uniform(0.0, 1.0)
ratio = self_energy / candidate_energy
if rand_num < ratio:
return candidate
else:
return redistricting
'''
def run(self, initial, iter, param_func):
"""
Run the algorithm with certain number of iterations, given an specific parameter function
:param initial: initial map
:param iter: number of iterations
:param param_func: parameter function
:return: final sample
"""
curr = initial
for i in range(iter):
sample = self.make_one_move(curr, param_func, i)
curr = sample
return curr
|
import copy
def get_num_left_most_pos(number, num):
"""
:param number:
:return:
"""
# O(n)
pos = len(number) -1
for item in number:
print(number[pos])
if number[pos] == num:
return pos
pos = pos -1
return pos
def big_number():
"""
:return:
"""
# number = [9,8,5,6,9,4,9]
number = [9, 8, 7, 6, 5, 4]
# O(nlogn) sorting
sorted_number = copy.deepcopy(number)
sorted_number.sort(reverse=True)
print(number, sorted_number)
idx = 0
for sorted, item in zip(sorted_number, number):
if sorted == item:
idx += 1
continue
else:
pos = get_num_left_most_pos(number, sorted)
print(pos, sorted, number)
temp = number[idx]
number[idx] = number[pos]
number[pos] = temp
break
# Overall O(nlogn)
print('Result:', number)
big_number() |
import os
import pytest
from app import create_app, db
from users.daos.user_dao import add_user, get_user_by_name
@pytest.fixture(scope='module')
def client():
# arrange
filename = '/tmp/fresh_test.db'
db_path = 'sqlite:///' + filename
app = create_app({'TESTING': True, 'SQLALCHEMY_DATABASE_URI': db_path})
with app.test_client() as cli:
with app.app_context():
db.create_all()
yield cli
# cleanup
os.remove(filename)
def test_insert_admin_user(client):
# arrange
n = 'admin'
m = 'admin@example.com'
# act
add_user(n, m)
# assert
stored_user = get_user_by_name(n)
assert stored_user.username == 'admin'
'''
def test_insert_user_jose(client):
# arrange
n = 'jose'
m = 'jose@example.com'
# act
add_user(n, m)
# assert
stored_user = get_user_by_name(n)
assert stored_user.username == 'jose'
'''
def test_insert_user_twice(client):
# arrange
n = 'raul'
m = 'raul@example.com'
# act
add_user(n, m)
# assert
with pytest.raises(ValueError, match=r"User with name raul already in the DB"):
add_user(n, 'raul@anothermail.com')
|
# -*- coding: utf-8 -*-
from hearthstone.entities import Entity
from entity.spell_entity import SpellEntity
class LETL_648(SpellEntity):
"""
混乱护符4
眼棱造成的伤害增加6点。
"""
def __init__(self, entity: Entity):
super().__init__(entity)
def equip(self, hero):
pass
|
import pandas as pd
import math
class WeightCalculationForPyfolio:
def __init__(self,wtsDF,returnsDF):
self.wtsDF=wtsDF
self.returnsDF=returnsDF
def returnsCalculation(self,strategyName):
returnsDFCopied = self.returnsDF.copy()
for idx,i in self.wtsDF.iterrows():
startDate=i['Out_Sample_Start_Date']
endDate=i['Out_Sample_End_Date']
startegyWt=i[strategyName]
#print(startegyWt,startDate,endDate)
if startegyWt == 0:
abs(returnsDFCopied[startDate:endDate]*0)
#print(abs(returnsDFCopied[startDate:endDate]*0))
elif startegyWt < 0:
returnsDFCopied[startDate:endDate]*(-1)
#print(returnsDFCopied[startDate:endDate]*(-1))
else:
returnsDFCopied[startDate:endDate]*1
#print(returnsDFCopied[startDate:endDate]*1)
return returnsDFCopied
def PositionCalculation():
pass
def TransactionCalculation():
pass
wtsDF=pd.read_csv("IPStrategy.csv")
returnsDF=pd.read_csv("Return BRACKET_ORDER_STR.csv", names=["DateTime","Returns"])
returnsDF["DateTime"]=returnsDF["DateTime"].apply(lambda x: x.split(" ")[0])
returnsDF.index = returnsDF['DateTime']
del returnsDF['DateTime']
######################################################################################################
transactionDF=pd.read_csv("Transactions BRACKET_ORDER_STR.csv")
transactionDF["date"]=transactionDF["date"].apply(lambda x: x.split(" ")[0])
transactionDF.index = transactionDF['date']
del transactionDF['date']
transactionDFCopied = transactionDF.copy()
prevWeight=0
strategyName="BRACKET_ORDER_STR"
transactionInRange = pd.DataFrame()
for idx,i in wtsDF.iterrows():
startDate=i['Out_Sample_Start_Date']
endDate=i['Out_Sample_End_Date']
startegyWt=i[strategyName]
# sumOfPreviousSet = sum(transactionInRange["amount"])
# weightDiff = 0
# if sumOfPreviousSet != 0:
# weightDiff = math.copysign(1,sumOfPreviousSet) * (startegyWt - prevWeight)
transactionInRange = transactionDFCopied[startDate:endDate]
print(transactionInRange)
if startegyWt != 0:
transactionInRange["amount"] = transactionInRange["amount"] * startegyWt
prevWeight = startegyWt
# transactionInRange.iloc[0]["amount"] = transactionInRange.iloc[0]["amount"] + weightDiff
#return returnsDFCopied
for index, row in weights.iterrows():
fr = weights.loc[i]
dt = data[fr["Out_Sample_Start_Date"]: fr["Out_Sample_End_Date"]]
if fr["BRACKET_ORDER_SG"] is not 0:
dt["amount"] = dt["amount"] * fr["BRACKET_ORDER_SG"]
print(dt["amount"])
######################################################################################################
object1=WeightCalculationForPyfolio(wtsDF,returnsDF)
#object1.to_csv("updated_return_file.csv")
#object1.getReturns()
#getTransactions()
#getPositions()
z=object1.returnsCalculation("BRACKET_ORDER_STR")
z.to_csv("updated_return_file.csv")
|
#!/usr/bin/env python
import numpy as np
import pyopencl as cl
ctx = cl.create_some_context()
queue = cl.CommandQueue(ctx, properties=cl.command_queue_properties.PROFILING_ENABLE)
MAX_GRID = 65535
kernels ="""
__kernel void update_h(int nx, int ny, int nz, float cl,
__global float *ex, __global float *ey, __global float *ez,
__global float *hx, __global float *hy, __global float *hz,
__global float *chx, __global float *chy, __global float *chz,
__local float *s) {
int tx = get_local_id(0);
int bx = get_group_id(0);
int idx = get_global_id(0);
int k = idx/(nx*ny);
int j = (idx - k*nx*ny)/nx;
int i = idx%nx;
float* sx = (float*) s;
float* sy = (float*) &sx[bx+2];
float* sz = (float*) &sy[bx+3];
sx[tx] = ex[idx];
sy[tx] = ey[idx];
sz[tx] = ez[idx];
if( tx == 0 && i > 1 ) {
sy[tx-1] = ey[idx-1];
sz[tx-1] = ez[idx-1];
sy[tx-2] = ey[idx-2];
sz[tx-2] = ez[idx-2];
}
if( tx == bx-1 && i < nx-1 ) {
sy[tx+1] = ey[idx+1];
sz[tx+1] = ez[idx+1];
}
barrier(CLK_LOCAL_MEM_FENCE);
if( j>1 && j<ny-1 && k>1 && k<nz-1 )
hx[idx] -= cl*chx[idx]*( 27*( sz[tx] - ez[idx-nx] - sy[tx] + ey[idx-nx*ny] )
- ( ez[idx+nx] - ez[idx-2*nx] - ey[idx+nx*ny] + ey[idx-2*nx*ny] ) );
if( i>1 && i<nx-1 && k>1 && k<nz-1 )
hy[idx] -= cl*chy[idx]*( 27*( ex[idx] - ex[idx-nx*ny] - ez[idx] + ez[idx-1] )
- ( ex[idx+nx*ny] - ex[idx-2*nx*ny] - ez[idx+1] + ez[idx-2] ) );
if( i>1 && i<nx-1 && j>1 && j<ny-1 )
hz[idx] -= cl*chz[idx]*( 27*( ey[idx] - ey[idx-1] - ex[idx] + ex[idx-nx] )
- ( ey[idx+1] - ey[idx-2] - ex[idx+nx] + ex[idx-2*nx] ) );
}
__kernel void update_e(int nx, int ny, int nz, float dl,
__global float *ex, __global float *ey, __global float *ez,
__global float *hx, __global float *hy, __global float *hz,
__global float *cex, __global float *cey, __global float *cez,
__local float *s) {
int idx = get_global_id(0);
int k = idx/(nx*ny);
int j = (idx - k*nx*ny)/nx;
int i = idx%nx;
if( j>0 && j<ny-2 && k>0 && k<nz-2 )
ex[idx] += dl*cex[idx]*( 27*( hz[idx+nx] - hz[idx] - hy[idx+nx*ny] + hy[idx] )
- ( hz[idx+2*nx] - hz[idx-nx] - hy[idx+2*nx*ny] + hy[idx-nx*ny] ) );
if( i>0 && i<nx-2 && k>0 && k<nz-2 )
ey[idx] += dl*cey[idx]*( 27*( hx[idx+nx*ny] - hx[idx] - hz[idx+1] + hz[idx] )
- ( hx[idx+2*nx*ny] - hx[idx-nx*ny] - hz[idx+2] + hz[idx-1] ) );
if( i>0 && i<nx-2 && j>0 && j<ny-2 )
ez[idx] += dl*cez[idx]*( 27*( hy[idx+1] - hy[idx] - hx[idx+nx] + hx[idx] )
- ( hy[idx+2] - hy[idx-1] - hx[idx+2*nx] + hx[idx-nx] ) );
}
__kernel void update_src(int nx, int ny, int nz, float tn,
__global float *f) {
int idx = get_global_id(0);
int ijk = (nz/2)*nx*ny + (ny/2)*nx + idx;
if( idx < nx ) f[ijk] += sin(0.1*tn);
}
"""
if __name__ == '__main__':
nx, ny, nz = 256, 256, 240
nnx, nny, nnz = np.int32(nx), np.int32(ny), np.int32(nz)
S = 0.743 # Courant stability factor
# symplectic integrator coefficients
c1, c2, c3 = 0.17399689146541, -0.12038504121430, 0.89277629949778
d1, d2 = 0.62337932451322, -0.12337932451322
clst = np.array([c1,c2,c3,c2,c1], dtype=np.float32)
dlst = np.array([d1,d2,d2,d1,0], dtype=np.float32)
print 'dim (%d, %d, %d)' % (nx, ny, nz)
total_bytes = nx*ny*nz*4*12
if total_bytes/(1024**3) == 0:
print 'mem %d MB' % ( total_bytes/(1024**2) )
else:
print 'mem %1.2f GB' % ( float(total_bytes)/(1024**3) )
# memory allocate
f = np.zeros((nx,ny,nz), 'f', order='F')
#f = np.random.randn(nx*ny*nz).astype(np.float32).reshape((nx,ny,nz),order='F')
cf = np.ones_like(f)*(S/24)
mf = cl.mem_flags
ex_gpu = cl.Buffer(ctx, mf.READ_WRITE | mf.COPY_HOST_PTR, hostbuf=f)
ey_gpu = cl.Buffer(ctx, mf.READ_WRITE | mf.COPY_HOST_PTR, hostbuf=f)
ez_gpu = cl.Buffer(ctx, mf.READ_WRITE | mf.COPY_HOST_PTR, hostbuf=f)
hx_gpu = cl.Buffer(ctx, mf.READ_WRITE | mf.COPY_HOST_PTR, hostbuf=f)
hy_gpu = cl.Buffer(ctx, mf.READ_WRITE | mf.COPY_HOST_PTR, hostbuf=f)
hz_gpu = cl.Buffer(ctx, mf.READ_WRITE | mf.COPY_HOST_PTR, hostbuf=f)
cex_gpu = cl.Buffer(ctx, mf.READ_ONLY | mf.COPY_HOST_PTR, hostbuf=cf)
cey_gpu = cl.Buffer(ctx, mf.READ_ONLY | mf.COPY_HOST_PTR, hostbuf=cf)
cez_gpu = cl.Buffer(ctx, mf.READ_ONLY | mf.COPY_HOST_PTR, hostbuf=cf)
chx_gpu = cl.Buffer(ctx, mf.READ_ONLY | mf.COPY_HOST_PTR, hostbuf=cf)
chy_gpu = cl.Buffer(ctx, mf.READ_ONLY | mf.COPY_HOST_PTR, hostbuf=cf)
chz_gpu = cl.Buffer(ctx, mf.READ_ONLY | mf.COPY_HOST_PTR, hostbuf=cf)
# prepare kernels
prg = cl.Program(ctx, kernels).build()
Gs = (nx*ny*nz,) # global size
Ls = (512,) # local size
lmem_size = 4*(3*512+6) # local memory size 4bytes
# prepare for plot
from matplotlib.pyplot import *
ion()
imsh = imshow(np.ones((ny,nz),'f'), cmap=cm.hot, origin='lower', vmin=0, vmax=0.01)
colorbar()
# measure kernel execution time
from datetime import datetime
t1 = datetime.now()
# main loop
for tn in xrange(1, 100+1):
for sn in xrange(4):
prg.update_h(
queue, Gs, nnx, nny, nnz, clst[sn],
ex_gpu, ey_gpu, ez_gpu, hx_gpu, hy_gpu, hz_gpu,
chx_gpu, chy_gpu, chz_gpu,
cl.LocalMemory(lmem_size, local_size=Ls)
prg.update_e(
queue, Gs, nnx, nny, nnz, dlst[sn],
ex_gpu, ey_gpu, ez_gpu, hx_gpu, hy_gpu, hz_gpu,
cex_gpu, cey_gpu, cez_gpu,
cl.LocalMemory(lmem_size, local_size=Ls)
prg.update_src(queue, (nx,), nnx, nny, nnz, np.float32(tn+sn/5.), ex_gpu)
prg.update_h(
queue, Gs, nnx, nny, nnz, clst[4],
ex_gpu, ey_gpu, ez_gpu, hx_gpu, hy_gpu, hz_gpu,
chx_gpu, chy_gpu, chz_gpu,
cl.LocalMemory(lmem_size, local_size=Ls)
if tn%10 == 0:
#if tn == 100:
print 'tn =', tn
cl.enqueue_read_buffer(queue, ex_gpu, f)
imsh.set_array( f[nx/2,:,:]**2 )
draw()
#savefig('./png-wave/%.5d.png' % tstep)
cl.enqueue_read_buffer(queue, ex_gpu, f).wait()
print datetime.now() - t1
|
#!/usr/bin/python
import math
def recipe_batches(recipe, ingredients):
count = 0
for name in recipe.keys():
if name not in ingredients:
return 0
tempCount = int(ingredients[name] / recipe[name])
if count == 0 or tempCount < count:
count = tempCount
return count
if __name__ == '__main__':
# Change the entries of these dictionaries to test
# your implementation with different inputs
recipe = { 'milk': 2}
ingredients = { 'milk': 200 }
print("{batches} batches can be made from the available ingredients: {ingredients}.".format(batches=recipe_batches(recipe, ingredients), ingredients=ingredients)) |
from app import db
class Environment(db.Model):
"""
Model inherits db.Model from SQL Alchemy
"""
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(120), index=True, unique=True)
env_status = db.Column(db.Integer, index=True, default=1)
status_timestamp = db.Column(db.DateTime, index=True, default=None)
approval_status = db.Column(db.Integer, index=True)
approval_timestamp = (db.Column(db.DateTime, index=True))
timing = (db.Column(db.Integer))
connection_status = (db.Column(db.Integer, default=1))
user_id_fk = db.Column(db.Integer, db.ForeignKey('user.id'))
servers = db.relationship('Server', backref='server_in_env', lazy='dynamic')
def __repr__(self):
"""
Defines how objects of this Class are printed
:return: environment as dictionary
"""
environment = '{id: ' + str(self.id) + 'name: ' + self.name + ',' + 'env_status: ' \
+ str(self.env_status) + ',' + 'status_timestamp: ' + str(self.status_timestamp) \
+ ',' + 'approval_status: ' + str(self.approval_status) + ',' + 'approval_timestamp: ' \
+ str(self.approval_timestamp) + ',' + 'timing: ' + str(self.timing) + 'connection_status: ' \
+ str(self.connection_status) + '}'
return environment
|
# -*- coding: utf-8
from django.db import models
class VIP(models.Model):
id = models.IntegerField(primary_key=True)
sycee = models.IntegerField("直接充值元宝", unique=True)
levy = models.IntegerField("征收次数")
hang_addition = models.IntegerField("挂机收益加成")
friends = models.IntegerField("好友数量上限")
arena_buy = models.IntegerField("比武购买次数")
stage_elite_buy = models.IntegerField("精英关卡重置次数")
stage_elite_buy_total = models.IntegerField("精英关卡总重置次数")
plunder = models.IntegerField("掠夺次数")
plunder_addition = models.IntegerField("掠夺资源加成")
prisoner_get = models.IntegerField("招降概率加成")
horse_strength_free = models.IntegerField("坐骑免费培养次数")
union_checkin = models.IntegerField("工会签到次数")
des = models.TextField()
class Meta:
db_table = 'vip'
verbose_name = 'VIP'
verbose_name_plural = 'VIP'
class VIPReward(models.Model):
id = models.IntegerField(primary_key=True)
item_one_type = models.ForeignKey('config.ResourceType', related_name='vip_reward_item_one')
item_one_id = models.IntegerField()
item_one_amount = models.IntegerField(default=1)
item_two_type = models.ForeignKey('config.ResourceType', related_name='vip_reward_item_two')
item_two_id = models.IntegerField()
item_two_amount = models.IntegerField(default=1)
item_three_type = models.ForeignKey('config.ResourceType', related_name='vip_reward_item_three')
item_three_id = models.IntegerField()
item_three_amount = models.IntegerField(default=1)
package = models.ForeignKey('goodspackage.Package')
class Meta:
db_table = 'vip_reward'
verbose_name = "VIP奖励"
verbose_name_plural = "VIP奖励"
|
from django.db import models
from django.core.validators import MinValueValidator
from User import models as user_models
from Client import models as client_models
from Restaurant import models as restaurant_models
from Orders import models as order_models
choices = (
('Client', 'Client'),
('Invigilator', 'Invigilator')
)
class Invigilator(models.Model):
user = models.ForeignKey(user_models.User, on_delete=models.CASCADE, related_name="invigilator_invigilator_user", null=True, blank=True)
city = models.CharField(max_length=100, null=True, blank=True)
latitude = models.CharField(max_length=30, null=True, blank=True)
longitude = models.CharField(max_length=30, null=True, blank=True)
created = models.DateTimeField(auto_now_add=True, editable=False)
last_updated = models.DateTimeField(auto_now=True, editable=False)
class Meta:
verbose_name = 'Invigilator'
verbose_name_plural = 'Invigilators'
def __str__(self):
return self.user.name
class InvigilatorRestaurant(models.Model):
invigilator = models.ForeignKey(Invigilator, on_delete=models.CASCADE, related_name="invigilator_invigilator_restaurant_invigilator", null=True, blank=True)
restaurant = models.ForeignKey(restaurant_models.Restaurant, on_delete=models.CASCADE, related_name="invigilator_invigilator_restaurant_restaurant", null=True, blank=True)
class Meta:
verbose_name = 'Invigilator Restaurant'
verbose_name_plural = 'Invigilator Restaurants'
class InvigilatorClientMessage(models.Model):
invigilator = models.ForeignKey(Invigilator, on_delete=models.CASCADE, related_name="invigilator_invigilator_client_chat_invigilator", null=True, blank=True)
client = models.ForeignKey(client_models.Client, on_delete=models.CASCADE, related_name="invigilator_invigilator_client_chat_client", null=True, blank=True)
message = models.TextField(null=True, blank=True)
message_from = models.TextField(choices=choices, null=True, blank=True)
created = models.DateTimeField(auto_now_add=True, editable=False)
last_updated = models.DateTimeField(auto_now=True, editable=False)
class Meta:
verbose_name = 'Invigilator Client Message'
verbose_name_plural = 'Invigilator Client Message'
def __str__(self):
if self.invigilator.user:
return self.invigilator.user.name
return ''
|
# Get the top n stories by word x
import pandas as pd
pd.options.display.max_colwidth = 5000
stories = 10
word = 'china'
df = pd.read_csv('../data/data/data.csv', sep=',', low_memory=False, encoding = 'ISO-8859-1')
df = df.drop(labels=['subreddit', 'over_18', 'time_created', 'down_votes'], axis=1)
df = df[df['title'].str.contains(word)]
df = df.sort_values(by=['up_votes'], ascending=False)
df[0:stories].to_csv('../data/top-{}-stories-{}.csv'.format(stories, word), sep=',', encoding='utf-8')
|
'''
NOTE: the program using the Psutil library that should be installed first via 'pip' or other source that requering your system.
Created on 24 Apr 2018
@authors: Gal , Mark , Noy
Process_monitor can scan the system (Process that running in the background) and export two csv files:
1. process_list.csv - History of all processes that were cauch until now in format [pid Num, Name, Time, Status ]
2. Status_Log_File.csv - indication of processes that were closed/opened
The program still in Development.
our goals:
(*) expotring two encrypt files with GUI.
'''
import psutil
from time import sleep
while True:
dict1 = {}
for p in psutil.process_iter():
dict1[p.pid] = p.name(), p.status(), p.create_time()
with open("process_list.csv", "a") as process_list:
process_list.write(str(p.pid) + "," + p.name() + "," + str(p.create_time())+","+p.status())
process_list.write("\r\n")
# for i,value in dict1.items():
# print i,", value: " ,value
dict2 = {}
# sleep(5)
for p in psutil.process_iter():
dict2[p.pid] = p.name(), p.status(), p.create_time()
with open("process_list.csv", "a") as process_list:
process_list.write(str(p.pid) + "," + p.name() + "," + str(p.create_time())+","+p.status())
process_list.write("\r\n")
value = {k: dict2[k] for k in set(dict2) - set(dict1)}
if (value):
for key, value1 in value.items():
with open("Status_Log_File.csv", "a") as status_log:
name, status, time = value1
print "opend...."
status_log.write(str(key) + "," + name + "," + "opened" + "," +str(time))
status_log.write("\r\n")
# with open("Status_Log_File.csv", "a") as Status_Log_File: #
# Status_Log_File.write(l.getName() + "," + l.getPid() + "," + l.getCTime() + "," + "Closed")
# Status_Log_File.write("\r\n")
value = {k: dict1[k] for k in set(dict1) - set(dict2)}
if (value):
for key, value1 in value.items():
with open("Status_Log_File.csv", "a") as status_log:
name, status, time = value1
print "opaa closed...."
status_log.write(str(key) + "," + name + "," + "closed" + "," +str(time))
status_log.write("\r\n")
# def Matches(dict,key):
# return dict.has_key(key)
|
__author__ = 'frankhe'
import time
import sys
import tensorflow as tf
import numpy as np
from mpi4py import MPI
import copy
import interaction
import neural_networks
import agents
FLAGS = tf.app.flags.FLAGS
# Experiment settings
tf.app.flags.DEFINE_integer('epochs', 40, 'Number of training epochs')
tf.app.flags.DEFINE_integer('steps_per_epoch', 250000, 'Number of steps per epoch')
tf.app.flags.DEFINE_integer('test_length', 125000, 'Number of steps per test')
tf.app.flags.DEFINE_integer('seed', 123456, 'random seed')
tf.app.flags.DEFINE_bool('diff_seed', True, 'enable different seed for each process')
tf.app.flags.DEFINE_integer('summary_fr', 6000, 'summary every x training steps')
tf.app.flags.DEFINE_string('logs_path', './logs', 'tensor board path')
tf.app.flags.DEFINE_bool('test', False, 'enable test mode')
tf.app.flags.DEFINE_bool('ckpt', False, 'enable save models')
tf.app.flags.DEFINE_integer('feeding_threads', 1, 'feeding data threads')
tf.app.flags.DEFINE_integer('feeding_queue_size', 50, 'feeding queue capacity')
tf.app.flags.DEFINE_float('gpu_memory_fraction', 0.3, 'gpu memory fraction, 0.0 = no limit')
# ALE Environment settings
tf.app.flags.DEFINE_string('rom', 'breakout', 'game ROM')
tf.app.flags.DEFINE_string('roms_path', './roms/', 'game ROMs path')
tf.app.flags.DEFINE_integer('frame_skip', 4, 'every frame_skip frames to act')
tf.app.flags.DEFINE_integer('buffer_length', 2, 'screen buffer size for one image')
tf.app.flags.DEFINE_float('repeat_action_probability', 0, 'Probability that action choice will be ignored')
tf.app.flags.DEFINE_float('input_scale', 255.0, 'image rescale')
tf.app.flags.DEFINE_integer('input_width', 84, 'environment to agent image width') # 128 vgg
tf.app.flags.DEFINE_integer('input_height', 84, 'environment to agent image width')
tf.app.flags.DEFINE_integer('num_actions', 2, 'environment accepts x actions')
tf.app.flags.DEFINE_integer('max_start_no_op', 30, 'Maximum number of null_ops at the start')
tf.app.flags.DEFINE_bool('lol_end', True, 'lost of life ends training episode')
# Agent settings
tf.app.flags.DEFINE_float('lr', 0.0002, 'learning rate')
tf.app.flags.DEFINE_float('lr_min', 0.00005, 'learning rate minimum')
tf.app.flags.DEFINE_integer('lr_decay_a', 250000*FLAGS.epochs/10/4, 'learning rate decay a, training steps') # 1/10
tf.app.flags.DEFINE_integer('lr_decay_b', 250000*FLAGS.epochs/10/4*5, 'learning rate decay b, training steps') # 1/2
tf.app.flags.DEFINE_float('discount', 0.99, 'discount rate')
tf.app.flags.DEFINE_float('ep_st', 1.0, 'epsilon start value')
tf.app.flags.DEFINE_float('ep_min', 0.1, 'epsilon minimum value')
tf.app.flags.DEFINE_float('ep_decay', 250000*FLAGS.epochs/10, 'steps for epsilon reaching minimum') # 1/10
tf.app.flags.DEFINE_float('ep_decay_b', 250000*FLAGS.epochs/10*5, 'trainings for epsilon reaching 0.01') # 1/2
tf.app.flags.DEFINE_integer('phi_length', 4, 'frames for representing a state')
tf.app.flags.DEFINE_integer('memory', 1000000, 'replay memory size')
tf.app.flags.DEFINE_integer('batch', 32, 'training batch size')
tf.app.flags.DEFINE_string('network', 'nature', 'neural network type, linear, nature, vgg')
tf.app.flags.DEFINE_integer('freeze', 2500, """freeze interval between updates, update network every x trainings.
To be noticed, Nature paper is inconsistent with its code.""")
tf.app.flags.DEFINE_string('loss_func', 'huber', 'loss function: huber; quadratic')
tf.app.flags.DEFINE_string('optimizer', 'adam', 'optimizer type')
tf.app.flags.DEFINE_integer('train_fr', 4, 'training frequency: train a batch every x steps')
tf.app.flags.DEFINE_integer('train_st', 50000, 'training start: training starts after x steps')
tf.app.flags.DEFINE_integer('percentile', 75, 'confidence interval')
tf.app.flags.DEFINE_bool('clip_reward', True, 'clip reward to -1, 1')
# Multi threads settings
tf.app.flags.DEFINE_integer('threads', 4, 'CPU threads for agents')
tf.app.flags.DEFINE_bool('use_gpu', True, 'use GPUs')
tf.app.flags.DEFINE_integer('gpus', 4, 'number of GPUs for agents')
tf.app.flags.DEFINE_string('gpu_config',
"""{'gpu0': [0], 'gpu1': [1], 'gpu2': [2], 'gpu3': [3]}""",
'GPU configuration for agents, default gpu0')
tf.app.flags.DEFINE_string('threads_specific_config', "{}",
"""{0: {'rom': 'breakout'}, 1: {'rom': 'pong'}, 2: {'rom': 'beam_rider'},
3: {'rom': 'space_invaders'}} configuration for each agent""")
# optimality tightening
tf.app.flags.DEFINE_bool('ot', False, 'optimality tightening')
tf.app.flags.DEFINE_bool('close2', True, 'close bounds')
tf.app.flags.DEFINE_bool('one_bound', True, 'only use lower bounds')
tf.app.flags.DEFINE_integer('nob', 4, 'number of bounds')
tf.app.flags.DEFINE_float('pw', 0.8, 'penalty weight')
def initialize(pid, device, flags, comm, share_comm):
message = 'initialize process: {:d} with GPU: {} game: {}'.format(pid, device, flags.rom)
comm.send([-1, 'print', message], dest=flags.threads)
import os
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '2'
os.environ["CUDA_DEVICE_ORDER"] = "PCI_BUS_ID"
os.environ["CUDA_VISIBLE_DEVICES"] = device[-1]
np.random.seed(flags.seed)
tf.set_random_seed(flags.seed)
try:
import ale_python_interface
except ImportError:
import atari_py.ale_python_interface as ale_python_interface
# initialize ALE environment
if flags.rom.endswith('.bin'):
rom = flags.rom
else:
rom = "%s.bin" % flags.rom
full_rom_path = os.path.join(flags.roms_path, rom)
ale = ale_python_interface.ALEInterface()
ale.setInt('random_seed', flags.seed)
ale.setBool('sound', False)
ale.setBool('display_screen', False)
ale.setFloat('repeat_action_probability', flags.repeat_action_probability)
ale.loadROM(full_rom_path)
num_actions = len(ale.getMinimalActionSet())
# adjust flags
flags.num_actions = num_actions
flags.logs_path = os.path.join(flags.logs_path, '#' + str(pid) + '_' + flags.rom)
tf.gfile.MakeDirs(flags.logs_path)
# print settings
setting_file = open(os.path.join(flags.logs_path, 'flags.txt'), mode='w+')
for key, item in flags.__flags.items():
setting_file.write(key + ' : ' + str(item) + '\n')
# initialize agent
if flags.ot:
network = neural_networks.OptimalityTighteningNetwork(pid, flags, device, share_comm)
else:
network = neural_networks.DeepQNetwork(pid, flags, device, share_comm)
setting_file.write(network.nn_structure_file)
setting_file.close()
if flags.ot:
agent = agents.OptimalityTigheningAgent(pid, network, flags, comm, share_comm)
else:
agent = agents.QLearning(pid, network, flags, comm, share_comm)
interaction.Interaction(pid, ale, agent, flags, comm).start()
def display_threads(message_dict, flags=FLAGS):
one_line = '\r\033[K'
for pid, element in message_dict.items():
if pid == -1:
print
for message in element.get('print', []):
print message
else:
if 'step' in element:
total_steps = flags.steps_per_epoch if element['step'][0] == 'TRAIN' else flags.test_length
one_line += ' #{:d}:{} E{:d} {:.1f}% '.format(
pid, element['step'][0], element['step'][1],
(1.0 - float(element['step'][2]) / total_steps) * 100)
if 'speed' in element:
one_line += ' St/Sec: cur:{:d} avg:{:d} '.format(element['speed'][0], element['speed'][1])
if len(one_line) > 160:
one_line = one_line[:182]
sys.stdout.write(one_line)
sys.stdout.flush()
return
def main(argv=None):
# comm is used for message transmitting
comm = MPI.COMM_WORLD
pid = comm.Get_rank()
pid_device = {}
d = eval(FLAGS.gpu_config)
for device, pids in d.items():
for i in pids:
pid_device[i] = device
flags = copy.deepcopy(FLAGS)
flags.seed += int(flags.diff_seed) * pid
if flags.test:
flags.threads = 2 # np=3
flags.gpus = 2
flags.epochs = 2
flags.steps_per_epoch = 10000
flags.test_length = 2000
flags.summary_fr = 100
flags.network = 'linear'
flags.train_st = 2000
flags.freeze = 100
flags.memory = 5000
flags.ot = False
flags.one_bound = True
if pid == flags.threads:
color = 0
else:
color = 1
# share_comm is used for sharing parameters
share_comm = MPI.COMM_WORLD.Split(color, pid)
# print share_comm.Get_rank(), share_comm.Get_rank()
if pid == flags.threads:
# process=threads is the printer process and the main process
if tf.gfile.Exists(FLAGS.logs_path):
tf.gfile.DeleteRecursively(FLAGS.logs_path)
comm.Barrier()
if flags.logs_path == './logs':
print 'WARNING: logs_path is not specified, default to ./logs'
"""
[pid, 'step', [testing, epoch, steps_left]]
[pid, 'speed', [current, avg]]
[-1, 'print', message]
"""
end_threads = np.zeros(flags.threads, dtype=np.bool_)
while True:
message_dict = {}
for i in xrange(flags.threads * 2):
if np.all(end_threads):
comm.Barrier()
return
pid, key, message = comm.recv(source=MPI.ANY_SOURCE)
element = message_dict.setdefault(pid, {})
if key == 'step' or key == 'speed':
element[key] = message
if key == 'print':
element.setdefault(key, []).append(message)
if key == 'END':
print '\n', pid, 'join',
end_threads[pid] = True
if message_dict: # not empty
display_threads(message_dict)
else:
comm.Barrier()
threads_specific_config = eval(flags.threads_specific_config)
for key, val in threads_specific_config.get(pid, {}).items():
setattr(flags, key, val)
initialize(pid, pid_device.get(pid, "gpu0")[-1], flags, comm, share_comm)
if __name__ == '__main__':
tf.app.run()
# mpirun -np threads + 1 python start_here.py
# mpirun -np 3 python start_here.py --test True
|
# -*- coding:utf-8 -*-
'''
__author__ = 'XD'
__mtime__ = 2021/1/22
__project__ = Pon-Sol2
Fix the Problem, Not the Blame.
'''
# 本地
from unittest import TestCase
import logging
# 第三方
import pandas as pd
import numpy as np
# 自定义
import feature_extraction as fe
import logconfig
import model
class FeatureTest(TestCase):
def setUp(self) -> None:
logconfig.setup_logging()
self.log = logging.getLogger("ponsol.test.feature_extraction")
self.seq = """MSNVRVSNGSPSLERMDARQAEHPKPSACRNLFGPVDHEELTRDLEKHCRDMEEASQRKW
NFDFQNHKPLEGKYEWQEVEKGSLPEFYYRPPRPPKGACKVPAQESQDVSGSRPAAPLIG
APANSEDTHLVDPKTDPSDSQTGLAEQCAGIRKRPATDDSSTQNKRANRTEENVSDGSPN
AGSVEQTPKKPGLRRRQT"""
self.aa = "S2A"
def test_feature_extraction(self):
features = {}
features.update(fe.get_length(self.seq, self.aa))
features.update(fe.get_aaindex(self.seq, self.aa))
features.update(fe.get_neighborhood_features(self.seq, self.aa))
df_features = pd.DataFrame([features])
self.log.debug("df_features = \n%s", df_features)
self.log.debug("df_features[0] = \n%s", df_features.iloc[0])
def test_get_all_features(self):
features = fe.get_all_features(self.seq, self.aa)
self.log.debug("all features = \n%s", features)
def test_feature_selected(self):
features = fe.get_all_features(self.seq, self.aa)
ponsol = model.PonSol2()
self.log.debug("feature1 = %s", ponsol.model.fs1)
self.log.debug("feature2 = %s", ponsol.model.fs2)
# self.log.debug("fs1 = %s", features.loc[:, ponsol.model.fs1.values], )
# self.log.debug("fs2 = %s", features.loc[:, ponsol.model.fs2.values], )
self.log.debug("check features: %s", ponsol.check_X(features))
def test_predict_single_aa(self):
features = fe.get_all_features(self.seq, self.aa)
ponsol = model.PonSol2()
p = ponsol.predict(features)
self.log.debug("p = %s", p)
|
import sprite
import struct
class Map:
g_width = 0
g_height = 0
sprite_bank = []
grid = []
def __init__(self, g_width=0, g_height=0, sprite_bank=None, grid=None):
self.g_width = g_width
self.g_height = g_height
self.sprite_bank = sprite_bank
self.grid = grid
def save(self, location):
f = open(location, 'wb')
# write width as int16
width_bytes = struct.pack('<h', self.g_width)
f.write(width_bytes)
# write height as int16
height_bytes = struct.pack('<h', self.g_height)
f.write(height_bytes)
# create new list of sprites
new_bank = []
new_bank_pointers = [-1]*len(self.sprite_bank)
for tile in self.grid:
if tile != -1:
if not self.bank_contains_sprite(new_bank, self.sprite_bank[tile]):
new_bank_pointers[tile] = len(new_bank)
new_bank += [self.sprite_bank[tile]]
for s in self.sprite_bank:
if s.keep_in_bank:
new_bank += [s]
# remap grid to use new bank
new_grid = []
for tile in self.grid:
if tile != -1:
new_grid += [new_bank_pointers[tile]]
else:
new_grid += [-1]
# write sprite look up table
for s in new_bank:
sprite_name_len = len(s.sprite_name)
sprite_name_bytes = s.sprite_name
sprite_name_bytes += chr(0)*(32-sprite_name_len)
f.write(sprite_name_bytes)
f.write('-'*32)
# f.seek(16388, 0)
for tile in new_grid:
tile_bytes = struct.pack('<h', tile)
if len(tile_bytes) != 2:
placeholder = 0
f.write(tile_bytes)
f.close()
def save_zipmap(self, location):
# create new list of sprites
new_bank = []
new_bank_pointers = [-1]*len(self.sprite_bank)
for tile in self.grid:
if tile != -1:
if not self.bank_contains_sprite(new_bank, self.sprite_bank[tile]):
new_bank_pointers[tile] = len(new_bank)
new_bank += [self.sprite_bank[tile]]
for s in self.sprite_bank:
if s.keep_in_bank:
new_bank += [s]
# remap grid to use new bank
new_grid = []
for tile in self.grid:
if tile != -1:
new_grid += [new_bank_pointers[tile]]
else:
new_grid += [-1]
f = open(location, 'wb')
# write width as int16
width_bytes = struct.pack('<h', self.g_width)
f.write(width_bytes)
# write height as int16
height_bytes = struct.pack('<h', self.g_height)
f.write(height_bytes)
sprite_starts = []
global_palette = [0]*(255*3) + [0, 63, 0]
global_pal_pos = 0
# remap/register sprite palette to global palette
f.seek(776+(4*len(new_bank))+2)
for s in new_bank:
sprite_remap = [0]*256
for i in range(255):
test_color = [(ord(s.palette[(i*3)+0])/4), (ord(s.palette[(i*3)+1])/4), (ord(s.palette[(i*3)+2])/4)]
if test_color == [0, 0, 0]:
continue
global_index = self.palette_index_of_color(global_palette, test_color)
if global_index == -1:
sprite_remap[i] = global_pal_pos
global_palette[global_pal_pos*3:(global_pal_pos*3)+3] = test_color
global_pal_pos += 1
else:
sprite_remap[i] = global_index
sprite_starts += [f.tell()]
# write remapped sprite data
width_bytes = struct.pack('<h', s.f_width)
f.write(width_bytes)
height_bytes = struct.pack('<h', s.f_height)
f.write(height_bytes)
iso_bottom_bytes = struct.pack('<h', s.iso_bottom)
f.write(iso_bottom_bytes)
iso_left_bytes = struct.pack('<h', s.iso_left)
f.write(iso_left_bytes)
f.write(chr(s.tiles_z))
f.write(chr(s.anim_f_count))
for c in s.sprite_name:
f.write(c)
f.write('\0'*(16-len(s.sprite_name)))
# write bitmap data
for b in s.bitmap_bytes:
f.write(chr(sprite_remap[ord(b)]))
# write grid start and grid
f.write('-'*32)
grid_start = f.tell()
for tile in new_grid:
tile_bytes = struct.pack('<h', tile)
f.write(tile_bytes)
f.seek(4, 0)
grid_start_bytes = struct.pack('<I', grid_start)
f.write(grid_start_bytes)
# write palette
f.seek(8, 0)
for i in range(256*3):
f.write(chr(global_palette[i]))
# write start positions
f.seek(776, 0)
sprite_count_bytes = struct.pack('<h', len(new_bank))
f.write(sprite_count_bytes)
for start in sprite_starts:
sprite_start_bytes = struct.pack('<I', start)
f.write(sprite_start_bytes)
f.close()
def load(self, location):
f = open(location, 'rb')
self.g_width = struct.unpack('<h', f.read(2))[0]
self.g_height = struct.unpack('<h', f.read(2))[0]
self.sprite_bank = []
bank_end = False
while not bank_end:
sprite_name = f.read(32).replace('\0', '')
if sprite_name == '-'*32:
bank_end = True
continue
if sprite_name != '':
self.sprite_bank += [sprite.Sprite('SPRITES\\compiled\\%s.sprite' % sprite_name)]
# f.seek(16388, 0)
self.grid = []
for tile_index in range(self.g_width*self.g_height):
data = f.read(2)
tile_pointer = struct.unpack('<h', data)[0]
if tile_pointer < len(self.sprite_bank):
self.grid += [tile_pointer]
else:
self.grid += [-1]
f.close()
def palette_index_of_color(self, palette, color):
for i in range(256):
if palette[i*3:i*3+3] == color:
return i
return -1
def bank_contains_sprite(self, bank, spr):
for s in bank:
if s.sprite_name == spr.sprite_name:
return True
else:
return False |
import unittest
import json
import sys
try:
filename = sys.argv[1]
with open('tmp/%s' % filename, 'r') as f:
pass
except FileNotFoundError:
print('[X] file not exist. tmp/filename')
sys.exit(0)
except IndexError:
print('[X] please enter filename.')
sys.exit(0)
class ResultTestCase(unittest.TestCase):
def setUp(self):
with open('tmp/%s' % filename, 'r') as f:
self.data = json.loads( f.read() )
def tearDown(self):
pass
def test_url(self):
e = []
for row in self.data:
if not row['url']:
e.append( (row) )
self.assertEqual(len(e), 0, e)
def test_title(self):
e = []
for row in self.data:
if not row['title']:
e.append( (row['url'], row['title']) )
self.assertEqual(len(e), 0, e)
def test_publish_date(self):
e = []
for row in self.data:
if not row['publish_date']:
e.append( (row['url'], row['publish_date']) )
self.assertEqual(len(e), 0, e)
def test_authors(self):
e = []
for row in self.data:
if not row['authors']:
e.append( (row['url'], row['authors']) )
self.assertEqual(len(e), 0, e)
def test_tags(self):
e = []
for row in self.data:
if not row['tags']:
e.append( (row['url'], row['tags']) )
self.assertEqual(len(e), 0, e)
def test_text(self):
e = []
for row in self.data:
if not row['text']:
e.append( (row['url'], row['text']) )
self.assertEqual(len(e), 0, e)
def test_text_html(self):
e = []
for row in self.data:
if not row['text_html']:
e.append( (row['url'], row['text_html']) )
self.assertEqual(len(e), 0, e)
def test_images(self):
e = []
for row in self.data:
if not row['images']:
e.append( (row['url'], row['images']) )
self.assertEqual(len(e), 0, e)
def test_video(self):
e = []
for row in self.data:
if not row['video']:
e.append( (row['url'], row['video']) )
self.assertEqual(len(e), 0, e)
def test_links(self):
e = []
for row in self.data:
if len(row['links']) == 0:
e.append( (row['url'], row['links']) )
self.assertEqual(len(e), 0, e)
if __name__ == '__main__':
unittest.main(argv = [sys.argv[0]])
|
import os,re
import numpy as np
import gslibUtil as gu
import arrayUtil as au
prefix = 'Q2'
nrow,ncol = 197,116
delc,delr = 2650.,2650.
offset = 668350.,288415.
#--load hard data
harddata_file = 'tbl_29.dat'
title,harddata_names,harddata = gu.loadGslibFile(harddata_file)
hard_xy = np.zeros((len(harddata),2),dtype='float')
hard_xy[:,0] = harddata[:,0].copy()
hard_xy[:,1] = harddata[:,1].copy()
#--load omni probs
omni_file = 'reals\\'+prefix+'_thkcdf_omni_probs.dat'
otitle,ovar_names,omni_array = gu.loadGslibFile(omni_file)
#-load aniso probs
aniso_file = 'reals\\'+prefix+'_thkcdf_aniso_probs.dat'
atitle,avar_names,aniso_array = gu.loadGslibFile(aniso_file)
assert len(ovar_names) == len(avar_names)
for var in range(0,len(ovar_names)):
print ovar_names[var]
thisO = omni_array[:,var].copy()
print np.shape(thisO)
thisO.resize(nrow,ncol)
au.plotArray(np.flipud(thisO),delr,delc,offset=offset,gpts=hard_xy,title=prefix+'_omni_'+ovar_names[var],outputFlag='save')
thisA = aniso_array[:,var].copy()
thisA.resize(nrow,ncol)
au.plotArray(np.flipud(thisA),delr,delc,offset=offset,gpts=hard_xy,title=prefix+'_aniso_'+ovar_names[var],outputFlag='save')
diff = thisO - thisA
print np.mean(diff),np.std(diff)
au.plotArray(np.flipud(diff),delr,delc,offset=offset,gpts=hard_xy,title='diff_'+ovar_names[var],outputFlag='save')
|
from pysensationcore import *
import math
# === IntensityModulation Block ===
# A Block which can be used to modulate the intensity (strength) of a Sensation at a supplied frequency
intensityModulationBlock = defineBlock("IntensityModulation")
defineInputs(intensityModulationBlock,
"t",
"point",
"modulationFrequency")
defineBlockInputDefaultValue(intensityModulationBlock.modulationFrequency, (143.0, 0.0, 0.0))
setMetaData(intensityModulationBlock.modulationFrequency, "Type", "Scalar")
def modulateIntensity(inputs):
time = inputs[0][0]
point = inputs[1]
modulationFrequency = inputs[2][0]
intensity = 0.5 * (1 - math.cos(2 * math.pi * time * modulationFrequency))
return (point[0], point[1], point[2], intensity)
defineOutputs(intensityModulationBlock, "out")
defineBlockOutputBehaviour(intensityModulationBlock.out, modulateIntensity)
setMetaData(intensityModulationBlock.out, "Sensation-Producing", False)
|
# Runtime 48 ms, Memory Usage 14 MB
def selfDividingNumbers(left: int, right: int):
# declare an empty list
self_dividing = []
# iterate through the range of numbers starting with left and ending with right
# since range() is not inclusive, we need to set the last end of the range as right+1
for x in range(left, right+1):
# if x is less than 10, it is self dividing and should be appended to the self dividing list
# if not check if the number is divisible by 10
# if so, it is not a self dividing number
# if not, iterate through a string of the number and convert each individual char to an integer and see if the number can be divided by it
# if so, append the number
# if not, break out of the for loop and skip to the next number
if x < 10:
self_dividing.append(x)
elif x % 10 != 0:
self_dividing_num = True
for s in str(x):
if int(s) == 0 or x % int(s) != 0:
self_dividing_num = False
break
if self_dividing_num == True:
self_dividing.append(x)
# return the self dividing number list
return self_dividing |
# 주말 과제
# Dence 모델로 구성 input_shape=(28*28, )
import numpy as np
import matplotlib.pyplot as plt
from tensorflow.keras.datasets import mnist
(x_train, y_train), (x_test, y_test) = mnist.load_data()
print(x_train.shape, y_train.shape) # (60000, 28, 28), (60000,) <- 흑백
print(x_test.shape, y_test.shape) # (10000, 28, 28), (10000,)
print(x_train[0])
print(y_train[0])
print(x_train[0].shape) # (28, 28)
x_train = x_train.reshape(60000,28 *28).astype('float32')/255.
x_test = x_test.reshape(10000, 28 * 28)/255.
print(x_train.shape, x_test.shape) # (60000, 784), (10000, 784)
# OneHotEncoding
# 여러분이 하시오!!!
from tensorflow.keras.utils import to_categorical
y_train = to_categorical(y_train)
y_test = to_categorical(y_test)
print(y_train)
print(y_test)
# 실습!! 완성하시오!!!
# 지표는 acc /// 0.985 이상
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import Dense, Dropout
model = Sequential()
model.add(Dense(1024, activation='relu', input_shape=(784,)))
model.add(Dropout(0.2))
model.add(Dense(512, activation='relu'))
model.add(Dropout(0.2))
model.add(Dense(256, activation='relu'))
model.add(Dropout(0.2))
model.add(Dense(128, activation='relu'))
model.add(Dropout(0.2))
model.add(Dense(64, activation='relu'))
model.add(Dense(32, activation='relu'))
model.add(Dense(10, activation='softmax'))
model.summary()
model.compile(loss='categorical_crossentropy', optimizer='adam', metrics=['acc'])
from tensorflow.keras.callbacks import EarlyStopping
early_stopping = EarlyStopping(monitor='loss', patience=10, mode='auto')
model.fit(x_train, y_train, epochs=4000, batch_size=32, validation_split=0.2, verbose=2, callbacks=[early_stopping])
loss, acc = model.evaluate(x_test, y_test)
print('loss :', loss)
print('acc :', acc)
# loss : 0.15171925723552704
# acc : 0.984499990940094
# 응용
# y_test 10개와 10개를 출력하시오
y_pred = model.predict(x_test)
print(y_pred)
print('==========================')
print(' 예상 ' ,'|',' 예측 ')
for i in range(10):
print(' ', np.argmax(y_test[i+40]), ' |', np.argmax(y_pred[i+40]))
|
# Copyright (c) 2015 Ultimaker B.V.
# Uranium is released under the terms of the AGPLv3 or higher.
from UM.Scene.SceneNodeDecorator import SceneNodeDecorator
from UM.Signal import Signal, SignalEmitter
from UM.Application import Application
from copy import deepcopy
## A decorator that can be used to override individual setting values.
class SettingOverrideDecorator(SceneNodeDecorator, SignalEmitter):
def __init__(self):
super().__init__()
self._settings = {}
self._setting_values = {}
self._temp_values = {}
settingAdded = Signal()
settingRemoved = Signal()
settingValueChanged = Signal()
def getAllSettings(self):
return self._settings
def getAllSettingValues(self):
self._temp_values = {}
instance = Application.getInstance().getMachineManager().getActiveMachineInstance()
for key in self._settings:
setting = instance.getMachineDefinition().getSetting(key)
if key in self._setting_values:
self._temp_values[key] = setting.parseValue(self._setting_values[key])
for required_by_key in self._getDependentSettingKeys(key):
if required_by_key not in self._temp_values:
self._temp_values[required_by_key] = instance.getMachineDefinition().getSetting(required_by_key).getDefaultValue(self)
self._temp_values[key] = setting.getDefaultValue(self)
values = self._temp_values
self._temp_values = {}
return values
def addSetting(self, key):
instance = Application.getInstance().getMachineManager().getActiveMachineInstance()
setting = instance.getMachineDefinition().getSetting(key)
if not setting:
return
self._settings[key] = setting
self.settingAdded.emit()
Application.getInstance().getController().getScene().sceneChanged.emit(self.getNode())
## Recursively find all settings that directly or indirectly depend on certain setting.
def _getDependentSettingKeys(self, key):
required_setting_keys = set()
instance = Application.getInstance().getMachineManager().getActiveMachineInstance()
setting = instance.getMachineDefinition().getSetting(key)
for dependent_key in setting.getRequiredBySettingKeys():
required_setting_keys.add(dependent_key)
required_setting_keys.update(self._getDependentSettingKeys(dependent_key))
return required_setting_keys
def setSettingValue(self, key, value):
if key not in self._settings:
return
self._setting_values[key] = value
self.settingValueChanged.emit(self._settings[key])
Application.getInstance().getController().getScene().sceneChanged.emit(self.getNode())
def getSetting(self, key):
if key not in self._settings:
parent = self._node.getParent()
# It could be that the parent does not have a decoration but it's parent parent does.
while parent is not None:
if parent.hasDecoration("getSetting"):
return parent.callDecoration("getSetting")
else:
parent = parent.getParent()
else:
return self._settings[key]
def getSettingValue(self, key):
if key not in self._settings and key not in self._temp_values:
if self.getNode().callDecoration("getProfile"):
return self.getNode().callDecoration("getProfile").getSettingValue(key)
return Application.getInstance().getMachineManager().getWorkingProfile().getSettingValue(key)
if key in self._temp_values:
return self._temp_values[key]
setting = self._settings[key]
if key in self._setting_values:
return setting.parseValue(self._setting_values[key])
return setting.getDefaultValue(self)
def removeSetting(self, key):
if key not in self._settings:
return
del self._settings[key]
if key in self._setting_values:
del self._setting_values[key]
self.settingRemoved.emit()
Application.getInstance().getController().getScene().sceneChanged.emit(self.getNode())
|
import urllib.request
from bs4 import BeautifulSoup as b
import json
import requests
from requests.auth import HTTPBasicAuth, HTTPDigestAuth
from urllib import parse
import csv
import datetime
import time
import re
import uuid
import sqlite3
from db_location import dbLoc
# this is a scraper for mtg deck lists that uses mtgtop8.com
# It takes a format URL as input
# It writes the decklist, sideboard, and deck information to a database file as output
dbPath = dbLoc
#from wget
#beautiful soup scraper to insert lists into db
#create three tables:
#1 : deck metadata
#eventname
#date
#username
#format
#uniqueID(key)
#deckname
#record
"""
create table if not exists deck_meta
(eventname text,
eventdate text,
playername text,
format text,
uuid text primary key,
deckname text,
place text);
"""
#2 mainboard
#uniqueID(key)
#cardname
#quantity
"""
create table if not exists main_deck
(uuid text,
cardname text,
quantity real);
"""
#3 sideboard
#uniqueID(key)
#cardname
#quantity
"""
create table if not exists side_board
(uuid text,
cardname text,
quantity real);
"""
#indexes
"""
create index maindeck_name on main_deck(cardname);
create index maindeck_uuid on main_deck(uuid);
create index sideboard_uuid on side_board(uuid);
create index sideboard_name on side_board(cardname);
"""
#scraper
#legacy decks from top8
legacy_url = "https://mtgtop8.com/format?f=LE"
#open the meta time period dropdown on a format and run open_meta for each
def format_metas(url):
print('running format_metas')
html = urllib.request.urlopen(url).read()
soup = b(html, 'html.parser')
for x in soup.find_all('div',{'id':'metas_list'}):
print('finding meta lists')
#print('x:')
#print(x)
# i should be making a list of the 'a' tags and then running my open_meta loop after
# garbage collection
for y in x.find_all('a'):
#print('y href: ',y['href'])
#print('meta: ',y.string)
#print("https://mtgtop8.com/format" + y['href'], y.string)
open_meta("https://mtgtop8.com/format" + y['href'], y.string)
break
# opens every 'next page' link on a format url
# produces a 'new url' for the next page
#open_meta finds the next page on a meta until there isn't a next page
#url is url for the selected dropdown
# meta is the string tag of that dropdown, something like "last 2 months" or "all 2017 decks"
def open_meta(url, meta):
print('i am processing url in open_meta:',url)
#print('the meta: ',meta)
html = urllib.request.urlopen(url).read()
soup = b(html, 'html.parser')
#send url to explore_event
try:
print('processing events from meta page')
explore_event(url, meta)
except:
print('could not process events from meta')
#check if there's a next page and run recursion
try:
next_button = soup.find("a", string="Next")
new_url = legacy_url[:-5] + next_button.get('href')
print('the new_url:',new_url)
print('starting up explore_event')
# explore_event scrapes each event url from the page
# then it calls event_processor
print('running recursion')
open_meta(new_url, meta)
except:
print('there is no next url')
#collect the next and pass it to open_meta
#open_meta(next_url)
# explore_event finds every event(and link) on the page, runs open_meta
#passes url, meta, and date to event_processor
def explore_event(event_url, meta):
#import pdb; pdb.set_trace()
url = event_url
html = urllib.request.urlopen(url).read()
soup = b(html, 'html.parser')
try:
#all_tables = soup.find_all('tr',class_='hover_tr')
#last_events = soup.find_all('table',class_='Stable')[1]
#selects the "last 20 events" table, which is the second on the page
last_events = soup.find_all('table',class_='Stable')[1]
#print('last_events',last_events)
#s14 = last_events.find_all('td',class_='S14')
#print(s14[0])
except:
print('could not find_all table')
#create a list of the links for each deck in the table
links = []
try:
rows = last_events.find_all('tr')
for row in rows:
cols = row.find_all('td')
col_list = []
for item in cols:
col_list.append(item)
a_href=col_list[1].a['href']
col_date = col_list[3].text.strip()
# add the event url and date to a list called links
links.append([a_href,col_date])
except:
print('could not comprehend table')
try:
for event_data in links:
try:
print('starting event_processor for link')
event_processor("https://mtgtop8.com/" + event_data[0],meta, event_data[1])
except:
None
#print(links)
"""
for atag in s14:
#print("https://mtgtop8.com/" + atag.a['href'])
links.append("https://mtgtop8.com/" + atag.a['href'])
#implement deck scrape for that href
for x in links:
try:
print('starting event_processor for ',x)
event_processor(x, meta)
except:
None
"""
except:
print('failed to find, probably table comprehension issue')
# this scrapes the deck that's on the page
def deck_scrape(deck_scrape_url, meta, event_date, deck_name,place,player_name):
url=deck_scrape_url
html = urllib.request.urlopen(url).read()
time.sleep(.600)
soup = b(html, 'html.parser')
meta = meta
#find the deck name, player name, date, event name
#event is in class_="event_title" and its the first one. deck name is second
event_title = soup.find_all(class_='event_title')
#print('event title class results:')
#for thing in event_title:
# print(thing.string)
#generate uuid:
deck_uuid = uuid.uuid1()
print('deck_uuid: ', deck_uuid)
#deck_name = event_title[1].find('a').previousSibling
event_title = event_title[0].string
print('event title: ',event_title)
print('deck name: ',deck_name)
print('player name: ',player_name)
#deck_name
#deck_uuid
#event_date
#event_title
#player_name
#meta
try:
print('place: ',place, 'meta: ',meta)
except:
print('could not print place or meta')
#import pdb; pdb.set_trace()
try:
c.execute('insert or ignore into deck_meta values (?,?,?,?,?,?,?)',(
str(event_title),
str(event_date),
str(player_name),
str(meta),
str(deck_uuid),
str(deck_name),
str(place),
))
except:
print('could not push deck_meta vals to sql')
# finds the txt file for the decklist
for x in soup.find_all(string=re.compile('Expor')):
txt_file = x.parent.a['href']
txt_url = "https://mtgtop8.com/" + txt_file
print('scraping ',txt_url)
data = urllib.request.urlopen(txt_url)
file_list = []
for line in data:
dec_line = line.decode('UTF-8')
file_list.append(dec_line[:-2].split(" ",1))
#print(line)
sb = 0
main_board = []
side_board = []
for slot in file_list:
#print('processing slot: ',slot)
if slot[0] == 'Sideboard':
sb = 1
#print('found sideboard')
elif sb is 1:
# add to sideboard
#print('adding to side board')
try:
c.execute('insert or ignore into side_board values (?,?,?)',(
str(deck_uuid),
str(slot[1]),
slot[0]
))
side_board.append(slot)
except:
print('could not push side_board to sql')
else:
#print('adding to main board')
main_board.append(slot)
# add to mainboard
try:
c.execute('insert or ignore into main_deck values (?,?,?)',(
str(deck_uuid),
str(slot[1]),
slot[0]
))
except:
print('could not push main_deck to sql')
print('main board: ',main_board)
print('side board: ',side_board)
#print(file_list)
#this works except for accents in the file name
def add_mainboard():
None
def add_sideboard():
None
# event_processor receives a list of events, their meta, and their date
# checks the list of decks in the event
# it creates a link to that deck's page, finds player name
# it calls the deck scraper
def event_processor(event_url, meta, event_date):
#count the number of decks in the sidebar
url=event_url
html = urllib.request.urlopen(url).read()
soup = b(html, 'html.parser')
hrefs = set()
for x in soup.find_all('div',attrs={'style':'margin:0px 4px 0px 4px;'}):
deck_line = x.find_all('div',attrs={'style':'padding:3px 0px 3px 0px;'})
deck_list = []
# deck_line is a list of decks pulled from the left hand sidebar
# each 'y' is a deck listing with place, deck name, and player name
for deck_listing in deck_line:
#import pdb; pdb.set_trace()
print('new y line')
#this means new deck, first y is place and second is both name and link
S14 = deck_listing.find_all('div',class_='S14')
placement = S14[0].text
deck_name = S14[1].text
deck_link = S14[1].find('a').get('href')
print('deck_link: ',deck_link)
print('deck_name; ',deck_name)
player_name = deck_listing.find_all('div',class_='G11')[0].string
print("player name: ",player_name)
try:
print('deckscraping')
#print('url:',deck[2])
#deck_scrape(deck_scrape_url, meta, event_date, deck_name,place)
#import pdb; pdb.set_trace()
deck_scrape("https://mtgtop8.com/event" + deck_link,meta,event_date, deck_name,placement,player_name)
except:
print('could not manage decklist')
break
print('starting decklist scraper for legacy 2 weeks')
print('connecting to db')
cardsDb = sqlite3.connect(dbPath)
c = cardsDb.cursor()
#"""
last2weeks = "https://mtgtop8.com/format?f=LE&meta=34&a="
open_meta(last2weeks, "legacy")
#"""
cardsDb.commit()
print('im closing the db')
cardsDb.close()
# this script is mostly completed. You pass it a format link on mtgtop8, and need to hardcode the format.
# it also needs tuning up around the open_meta stuff. I think I still need to fix the ascii/utf-8 thing where accent marks crash the scraper
# it also needs to delay at the deck scraping thing
#last 2 weeks
#https://www.mtgtop8.com/format?f=LE&meta=34&cp=1
#open_meta("https://mtgtop8.com/format?f=LE&meta=34&a=","last 2 weeks") |
sm.lockUI()
FANZY = 1500010
sm.removeEscapeButton()
sm.flipDialoguePlayerAsSpeaker()
sm.sendNext("#bBleh! I almost drowned!#k")
sm.setSpeakerID(FANZY)
sm.sendSay("There must be some kind of enchantment to keep people from swimming across.")
sm.flipDialoguePlayerAsSpeaker()
sm.sendSay("#bYou could have told me that in advance!#k")
sm.setSpeakerID(FANZY)
sm.sendSay("I'm not omniscient, and you make a good test subject. We'll have to find another way.")
sm.unlockUI()
sm.startQuest(32102)
sm.completeQuest(32102)
sm.warp(101070000, 0) |
import requests
from django.shortcuts import render
from django.urls import reverse_lazy
from django.views.generic import View
from django.views.generic.edit import CreateView,UpdateView
from django.contrib.auth.views import LoginView
from .forms import IndexForm,AuthUserForm,CustomUserCreationForm, CustomUserChangeForm
from .models import CustomUser,Weather,City
from rest_framework import viewsets
from .serializers import CitySerializer,WeatherSerializer
# Create your views here.
class WeatherLoginView(LoginView):
''' View for User Login. '''
template_name='login.html'
authentication_form=AuthUserForm
next = 'project1:profile_url'
class CustomUserCreate(CreateView):
''' View for User registration '''
model = CustomUser
form_class = CustomUserCreationForm
template_name = "weather/signup.html"
def get_success_url(self):
return '/'
class CustomUserUpdate(UpdateView):
'''View for update user settings. '''
model = CustomUser
template_name = 'weather/profile.html'
form_class = CustomUserChangeForm
def get_object(self,queryset=None):
return CustomUser.objects.get(pk=self.kwargs['pk'])
class IndexView(View):
''' View for drawing index page of site. '''
def get(self,request):
'''Function for http GET method. '''
auth = request.user.is_authenticated
if auth : # If user authenticated.
form = IndexForm()
weather_api1,weather_api2,weather_api3 = CustomUser.objects.get_weather_favorite_cities_auth_users(request.user.pk) # get weather from personal user list.
context = {'weather_api1':weather_api1,'weather_api2':weather_api2,'weather_api3':weather_api3,'form':form}
return render(request,'weather/index.html',context) # Return data.
else: # Else call get_weather_favorite_cities_notauth_users.
form = IndexForm()
all_cities=CustomUser.objects.get_weather_favorite_cities_notauth_users()
context = {'weather_info':all_cities,'form':form}
return render(request,'weather/index.html',context)
def post(self,request):
'''Function for http POST method. '''
bound_form=IndexForm(request.POST) # Get form with data from request.
all_cities=[]
auth = request.user.is_authenticated
cities=[]
if bound_form.is_valid():
if auth :
weather_api1,weather_api2,weather_api3 = CustomUser.objects.get_weather_favorite_cities_auth_users(request.user.pk) # Get weather from DB for user list.
cities = bound_form.get_weather_open_weather_map() # Get info from API for city in IndexForm.
form = IndexForm()
context = {'weather_api1':weather_api1,'weather_api2':weather_api2,'weather_api3':weather_api3,'form':form,'cities':cities}
else:
all_cities=CustomUser.objects.get_weather_favorite_cities_notauth_users() # Get weather from DB for CONST_SITIES.
cities = bound_form.get_weather_open_weather_map() # Get info from API for city in IndexForm.
form = IndexForm()
context = {'weather_info':all_cities,'cities':cities,'form':form}
return render (request, 'weather/index.html', context) # Return data.
class CityAPIView(viewsets.ModelViewSet):
'''Simple class for CRUD-model city model object. '''
serializer_class = CitySerializer
queryset = City.objects.all()
class WeatherAPIView(viewsets.ModelViewSet):
''' Simple class for CRUD-model weather model object. '''
serializer_class = WeatherSerializer
queryset = Weather.objects.all() |
# this is Bi-Directional Bubble Sort
def bdBubbleSort(alist):
exchanges = True
passnum = len(alist)-1
# the high and low bound of the list that will be bubbled
low = 0
high = len(alist)-1
direction = 'r'
while passnum > 0 and exchanges:
exchanges = False
# bubble from left to right
if direction == 'r':
for i in range(low, high):
if alist[i] > alist[i+1]:
exchanges = True
temp = alist[i]
alist[i] = alist[i+1]
alist[i+1] = temp
# the high bound of the list must decrease by one
high -= 1
# change the direction
direction = 'l'
# bubble from right to left
elif direction == 'l':
for i in range(high, low, -1):
if alist[i] < alist[i-1]:
exchanges = True
temp = alist[i]
alist[i] = alist[i-1]
alist[i-1] = temp
# the low bound of the list increase by one
low += 1
# change the direction
direction = 'r'
passnum = passnum-1
#print list after one pass
print(alist)
# test the method
if __name__ == '__main__':
alist = [54,26,93,17,77,31,44,55,20]
# alist = [7,6,5,4,3,2,1]
bdBubbleSort(alist)
print(alist) |
from BasePlotter import BasePlotter
from MeasurementStatistics import MeasurementStatistics
from CSVWriter import CSVWriter
import os
import matplotlib
import matplotlib.pyplot as plt
import numpy as np
class DiagramPlotter(BasePlotter):
def __init__(self, outputDir):
BasePlotter.__init__(self, outputDir)
def initPlotting(self, filePatterns, measurementResult):
BasePlotter.initPlotting(self, filePatterns, measurementResult)
def plot(self):
for i, filename in enumerate(self.filePatterns):
measurementEntryList = self.measurementResult[i]
for entry in measurementEntryList:
freq = entry.frequency
data = entry.data
x = np.arange(0, len(data), 1)
fig, ax = plt.subplots()
ax.plot(x, data)
ax.set_title(filename + ' @ ' + str(freq) + ' Hz')
ax.set_xlabel('samples')
ax.set_ylabel('dB')
ax.grid()
fig.savefig(self.outputDir + filename + '_' + str(int(freq)))
def terminate(self):
BasePlotter.terminate(self) |
import httplib
import json
from hashing import ConsistentHashRing
cr = ConsistentHashRing()
cr.__setitem__("server1","5001")
cr.__setitem__("server2","5002")
cr.__setitem__("server3","5003")
for i in xrange(1,11):
port = cr.__getitem__(str(i))
url = "localhost:" + str(port)
print url
connection = httplib.HTTPConnection(url)
headers = {'Content-type': 'application/json'}
foo = {
"id": i,
"category": "training",
"description": "iPhone for training",
"email": "foo1@bar.com",
"estimated_costs": "6760",
"link": "http://www.apple.com/shop/buy-ipad/ipad-pro",
"name": "Foo Bar",
"submit_date": "09-08-2016"
}
json_foo = json.dumps(foo)
url2 = "/v1/expenses"
print url2
connection.request('POST', url2, json_foo, headers)
response = connection.getresponse()
print(response.read().decode())
|
import pandas as pd
import pdb
import numpy as np
import matplotlib.pyplot as plt
import matplotlib.colors as colors
import os
def plot_RZ_heatmap(R_data_coords_mesh, Z_data_coords_mesh,
R_edges_mesh, Z_edges_mesh, data_grid,
file_name,
fig_height = 9, fig_width = 13, colormap = 'magma',
Norm = 'linear', vmin=None, vmax=None,
linthresh = None, linscale = None,
ylabel = 'Z [pc]', xlabel = 'R [pc]',
cb_label = ' ', counts_mask = True):
fig, axes = plt.subplots(ncols=2, nrows=1, gridspec_kw={"width_ratios":[15,1]})
fig.set_figheight(fig_height)
fig.set_figwidth(fig_width)
#plt.subplots_adjust(wspace=wspace_double_cbax)
ax = axes[0] #Plot
cbax = axes[1] #Colorbar
ax.set_aspect('equal')
data_grid_masked = np.ma.masked_where(np.logical_not(counts_mask), data_grid)
if Norm == 'lognorm':
im = ax.pcolormesh(R_edges_mesh, Z_edges_mesh, data_grid_masked,
cmap = colormap, norm=colors.LogNorm(vmin=vmin, vmax=vmax))
elif Norm == 'symlognorm':
im = ax.pcolormesh(R_edges_mesh, Z_edges_mesh, data_grid_masked,
cmap = colormap,
norm=colors.SymLogNorm(vmin=vmin, vmax=vmax,
linthresh=linthresh,
linscale=linscale))
elif Norm== 'linear':
im = ax.pcolormesh(R_edges_mesh, Z_edges_mesh, data_grid_masked,
cmap = colormap, vmin=vmin, vmax=vmax)
ax.set_ylabel(ylabel)
ax.set_xlabel(xlabel)
cb = fig.colorbar(im, cax=cbax)
cb.set_label(label=cb_label)
plt.savefig(file_name)
return
def plot_RZ_heatmap_and_lines(R_data_coords_mesh, Z_data_coords_mesh,
R_edges_mesh, Z_edges_mesh,
data_grid, data_error_upper, data_error_lower,
file_name, fig_height = 10, fig_width = 25, colormap = 'magma',
Norm = 'linear', vmin=None, vmax=None,
linthresh = None, linscale = None,
ylabel = 'Z [pc]', xlabel = 'R [pc]',
cb_label = ' ', counts_mask=True):
num_R_bins = len(R_data_coords_mesh[:,0])
# width_ratios_vector = [14,1]
# for ii in range(num_R_bins):
# width_ratios_vector.append(2)
fig, axes = plt.subplots(ncols=2, nrows=2,
gridspec_kw={"height_ratios":[10,1],"width_ratios":[15,50]})
fig.set_figheight(fig_height)
fig.set_figwidth(fig_width)
plt.subplots_adjust(wspace=0.1)
heat_ax = axes[0,0] # Heatmap
cbax = axes[1,0] # Colorbar
line_ax = axes[0,1] # Line plot
scale_ax = axes[1,1] # Scale for line plot
scale_ax.get_shared_x_axes().join(line_ax, scale_ax)
line_ax.get_shared_y_axes().join(line_ax, heat_ax)
data_grid_masked = np.ma.masked_where(np.logical_not(counts_mask), data_grid)
data_error_upper_masked = np.ma.masked_where(np.logical_not(counts_mask), data_error_upper)
data_error_lower_masked = np.ma.masked_where(np.logical_not(counts_mask), data_error_lower)
# Heat Map
if Norm == 'lognorm':
im = heat_ax.pcolormesh(R_edges_mesh, Z_edges_mesh, data_grid_masked,
cmap = colormap, norm=colors.LogNorm(vmin=vmin, vmax=vmax))
elif Norm == 'symlognorm':
im = heat_ax.pcolormesh(R_edges_mesh, Z_edges_mesh, data_grid_masked,
cmap = colormap,
norm=colors.SymLogNorm(vmin=vmin, vmax=vmax,
linthresh=linthresh,
linscale=linscale))
elif Norm== 'linear':
im = heat_ax.pcolormesh(R_edges_mesh, Z_edges_mesh, data_grid_masked,
cmap = colormap, vmin=vmin, vmax=vmax)
heat_ax.set_ylabel(ylabel)
heat_ax.set_xlabel(xlabel)
# Colorbar
cb = fig.colorbar(im, cax=cbax, orientation = 'horizontal')
cb.set_label(label=cb_label)
# Line plot
spacing_param = 1
scaling_param = 1.5/np.nanmax(data_grid_masked)
min_data_and_err = min(0.,np.nanmin(data_grid_masked), np.nanmin(data_grid_masked+data_error_lower_masked))
max_data_and_err = max(0.,np.nanmax(data_grid_masked), np.nanmin(data_grid_masked+data_error_upper_masked))
line_ax.axhline(0, xmin=min_data_and_err*scaling_param,
xmax=max_data_and_err*scaling_param,
ls='-')
for RR, Rval in enumerate(R_data_coords_mesh[:,0]):
zero_point = RR * spacing_param
Z_values = Z_data_coords_mesh[RR,:]
data_values = data_grid_masked[RR,:]
data_upper = data_error_upper_masked[RR,:]
data_lower = data_error_lower_masked[RR,:]
#line_ax.set_aspect('equal')
main_line = line_ax.plot(data_values*scaling_param + zero_point, Z_values, ls='-', linewidth=2)
line_color = main_line[0].get_color()
line_ax.axvline(zero_point,ls='-', color = line_color)
line_ax.plot((data_values+data_upper)*scaling_param + zero_point, Z_values, ls='-',
color = line_color, alpha=0.5, linewidth=1)
line_ax.plot((data_values-data_lower)*scaling_param + zero_point, Z_values, ls='-',
color = line_color, alpha=0.5, linewidth=1)
line_ax.fill_betweenx(Z_values,(data_values-data_lower)*scaling_param + zero_point,
(data_values+data_upper)*scaling_param + zero_point,
color=line_color, alpha=0.1)
line_ax.spines['top'].set_visible(False)
line_ax.spines['left'].set_visible(False)
line_ax.spines['bottom'].set_visible(False)
line_ax.spines['right'].set_visible(False)
line_ax.xaxis.set_ticks_position('none')
line_ax.yaxis.set_ticks_position('none')
line_ax.set_xticks(range(len(R_data_coords_mesh[:,0])))
line_ax.xaxis.set_ticklabels(R_data_coords_mesh[:,0],
rotation='vertical')
line_ax.xaxis.set_label_coords(0.08, -0.05)
line_ax.set_xlabel(xlabel + u"\u2192")
line_ax.yaxis.set_ticklabels([])
# Scale
scale_ax.yaxis.set_ticklabels([])
scale_ax.xaxis.set_ticklabels([])
#scale_ax.xaxis.set_ticks_position('none')
scale_ax.yaxis.set_ticks_position('none')
#scale_ax.set_xlabel(cb_label)
scale_ax.spines['top'].set_visible(False)
scale_ax.spines['left'].set_visible(False)
scale_ax.spines['right'].set_visible(False)
scale_ax.spines['bottom'].set_visible(False)
scale_ax.set_ylim([-1,1])
scale_max = 10**int(round(np.log10(np.nanmax(data_grid_masked))))
scale_ax.plot((0,scale_max*scaling_param), (0,0), ls='-', lw=2)
scale_ax.scatter(np.array([0,scale_max*scaling_param]), np.array([0,0]), marker='+',s=70)
scale_ax.xaxis.set_ticklabels([0,scale_max])
scale_ax.set_xticks([0,scale_max*scaling_param])
scale_ax.xaxis.set_label_coords(0.25, -0.5)
scale_ax.set_xlabel(cb_label)
plt.savefig(file_name)
return
def plot_matrix_heatmap(matrix, out_file_name,
fig_height = 9, fig_width = 13, colormap = 'magma',
cb_label = 'Correlation'):
fig, axes = plt.subplots(ncols=2, nrows=1, gridspec_kw={"width_ratios":[15,1]})
fig.set_figheight(fig_height)
fig.set_figwidth(fig_width)
#plt.subplots_adjust(wspace=wspace_double_cbax)
ax = axes[0] #Plot
cbax = axes[1] #Colorbar
im = ax.pcolormesh(matrix, cmap = colormap)
ax.invert_yaxis()
ax.set_aspect('equal')
cb = fig.colorbar(im, cax=cbax)
cb.set_label(label=cb_label)
plt.savefig(out_file_name)
plt.close()
def plot_RZ_histograms(oscar_obj, counts_threshold = 50, phi_slice = 0):
#Set up file structure
plot_folder = oscar_obj.data_root + oscar_obj.data_file_name.split('.')[0] + \
'/samplings_' + str(oscar_obj.N_samplings) + \
'_Rlim_' + str(oscar_obj.Rmin) + '_' + str(oscar_obj.Rmax) + \
'_philim_' + str(oscar_obj.phimin) + '_' + str(oscar_obj.phimax) + \
'_Zlim_' + str(oscar_obj.Zmin) + '_' + str(oscar_obj.Zmax) + \
'_Rbins_' + str(oscar_obj.num_R_bins) + \
'_phibins_' + str(oscar_obj.num_phi_bins) + \
'_Zbins_' + str(oscar_obj.num_Z_bins) + \
oscar_obj.binning_type + '/'
if not os.path.isdir(plot_folder):
os.makedirs(plot_folder)
# PLOT RESULTS
skewness_stat_counts_grid,\
skewness_stat_vbar_R1_dat_grid, skewness_stat_vbar_p1_dat_grid,\
skewness_stat_vbar_Z1_dat_grid, skewness_stat_vbar_RR_dat_grid,\
skewness_stat_vbar_pp_dat_grid, skewness_stat_vbar_ZZ_dat_grid,\
skewness_stat_vbar_RZ_dat_grid = oscar_obj.skewness_stat_grids
skewness_pval_counts_grid,\
skewness_pval_vbar_R1_dat_grid, skewness_pval_vbar_p1_dat_grid,\
skewness_pval_vbar_Z1_dat_grid, skewness_pval_vbar_RR_dat_grid,\
skewness_pval_vbar_pp_dat_grid, skewness_pval_vbar_ZZ_dat_grid,\
skewness_pval_vbar_RZ_dat_grid = oscar_obj.skewness_pval_grids
kurtosis_stat_counts_grid,\
kurtosis_stat_vbar_R1_dat_grid, kurtosis_stat_vbar_p1_dat_grid,\
kurtosis_stat_vbar_Z1_dat_grid, kurtosis_stat_vbar_RR_dat_grid,\
kurtosis_stat_vbar_pp_dat_grid, kurtosis_stat_vbar_ZZ_dat_grid,\
kurtosis_stat_vbar_RZ_dat_grid = oscar_obj.kurtosis_stat_grids
kurtosis_pval_counts_grid,\
kurtosis_pval_vbar_R1_dat_grid, kurtosis_pval_vbar_p1_dat_grid,\
kurtosis_pval_vbar_Z1_dat_grid, kurtosis_pval_vbar_RR_dat_grid,\
kurtosis_pval_vbar_pp_dat_grid, kurtosis_pval_vbar_ZZ_dat_grid,\
kurtosis_pval_vbar_RZ_dat_grid = oscar_obj.kurtosis_pval_grids
gaussianity_stat_counts_grid,\
gaussianity_stat_vbar_R1_dat_grid, gaussianity_stat_vbar_p1_dat_grid,\
gaussianity_stat_vbar_Z1_dat_grid, gaussianity_stat_vbar_RR_dat_grid,\
gaussianity_stat_vbar_pp_dat_grid, gaussianity_stat_vbar_ZZ_dat_grid,\
gaussianity_stat_vbar_RZ_dat_grid = oscar_obj.gaussianity_stat_grids
gaussianity_pval_counts_grid,\
gaussianity_pval_vbar_R1_dat_grid, gaussianity_pval_vbar_p1_dat_grid,\
gaussianity_pval_vbar_Z1_dat_grid, gaussianity_pval_vbar_RR_dat_grid,\
gaussianity_pval_vbar_pp_dat_grid, gaussianity_pval_vbar_ZZ_dat_grid,\
gaussianity_pval_vbar_RZ_dat_grid = oscar_obj.gaussianity_pval_grids
# TRACER DENSITY
plot_RZ_heatmap(oscar_obj.R_data_coords_mesh[:,phi_slice,:], oscar_obj.Z_data_coords_mesh[:,phi_slice,:],
oscar_obj.R_edges_mesh[:,phi_slice,:], oscar_obj.Z_edges_mesh[:,phi_slice,:], oscar_obj.nu_dat_grid[:,phi_slice,:],
plot_folder + 'nu_data.pdf', colormap = 'magma',
Norm = 'lognorm', cb_label='Tracer density stars [stars pc$^{-3}$]')
masked_cmap = plt.cm.viridis
masked_cmap.set_bad(color='grey')
masked_counts = np.ma.masked_where(oscar_obj.counts_grid[:,phi_slice,:] < counts_threshold, oscar_obj.counts_grid[:,phi_slice,:])
counts_above_threshold = oscar_obj.counts_grid[:,phi_slice,:] >= counts_threshold
plot_RZ_heatmap(oscar_obj.R_data_coords_mesh[:,phi_slice,:], oscar_obj.Z_data_coords_mesh[:,phi_slice,:],
oscar_obj.R_edges_mesh[:,phi_slice,:], oscar_obj.Z_edges_mesh[:,phi_slice,:],
masked_counts,
plot_folder + 'nu_data_pure_counts.pdf', colormap = masked_cmap,
Norm = 'lognorm', vmin=50.,
cb_label='Star count [stars per bin]',
counts_mask = True)
plot_RZ_heatmap(oscar_obj.R_data_coords_mesh[:,phi_slice,:], oscar_obj.Z_data_coords_mesh[:,phi_slice,:],
oscar_obj.R_edges_mesh[:,phi_slice,:], oscar_obj.Z_edges_mesh[:,phi_slice,:], gaussianity_pval_counts_grid[:,phi_slice,:],
plot_folder + 'nu_gauss_pval.pdf', colormap = 'magma',
Norm = 'lognorm', vmin=1e-2, vmax=1.,
cb_label='Tracer density gaussianity p-value')
plot_RZ_heatmap(oscar_obj.R_data_coords_mesh[:,phi_slice,:], oscar_obj.Z_data_coords_mesh[:,phi_slice,:],
oscar_obj.R_edges_mesh[:,phi_slice,:], oscar_obj.Z_edges_mesh[:,phi_slice,:], skewness_stat_counts_grid[:,phi_slice,:],
plot_folder + 'nu_skew_stat.pdf', colormap = 'magma',
Norm = 'linear', vmin=0., vmax=1.,
cb_label = 'Tracer density Skewness z-score')
plot_RZ_heatmap(oscar_obj.R_data_coords_mesh[:,phi_slice,:], oscar_obj.Z_data_coords_mesh[:,phi_slice,:],
oscar_obj.R_edges_mesh[:,phi_slice,:], oscar_obj.Z_edges_mesh[:,phi_slice,:], kurtosis_stat_counts_grid[:,phi_slice,:],
plot_folder + 'nu_kurt_stat.pdf', colormap = 'magma',
Norm = 'linear', vmin=0., vmax=1.,
cb_label = 'Tracer density kurtosis z-score')
#Vertical Velocity vZ1
plot_RZ_heatmap(oscar_obj.R_data_coords_mesh[:,phi_slice,:], oscar_obj.Z_data_coords_mesh[:,phi_slice,:],
oscar_obj.R_edges_mesh[:,phi_slice,:], oscar_obj.Z_edges_mesh[:,phi_slice,:], oscar_obj.vbar_Z1_dat_grid[:,phi_slice,:],
plot_folder + 'vbar_Z1_data.pdf', colormap = 'seismic',
Norm = 'linear',
vmin=-np.nanmax(abs(oscar_obj.vbar_Z1_dat_grid[:,phi_slice,:][counts_above_threshold])),
vmax=np.nanmax(abs(oscar_obj.vbar_Z1_dat_grid[:,phi_slice,:][counts_above_threshold])),
cb_label='Vertical velocity $\overline{v_Z}$ [km s$^{-1}$]',
counts_mask = counts_above_threshold)
plot_RZ_heatmap(oscar_obj.R_data_coords_mesh[:,phi_slice,:], oscar_obj.Z_data_coords_mesh[:,phi_slice,:],
oscar_obj.R_edges_mesh[:,phi_slice,:], oscar_obj.Z_edges_mesh[:,phi_slice,:], gaussianity_pval_vbar_Z1_dat_grid[:,phi_slice,:],
plot_folder + 'vbar_Z1_gauss_pval.pdf', colormap = 'magma',
Norm = 'lognorm', vmin=1e-2, vmax=1.,
cb_label='$\overline{v_Z}$ gaussianity p-value')
plot_RZ_heatmap(oscar_obj.R_data_coords_mesh[:,phi_slice,:], oscar_obj.Z_data_coords_mesh[:,phi_slice,:],
oscar_obj.R_edges_mesh[:,phi_slice,:], oscar_obj.Z_edges_mesh[:,phi_slice,:], skewness_stat_vbar_Z1_dat_grid[:,phi_slice,:],
plot_folder + 'vbar_Z1_skew_stat.pdf', colormap = 'magma',
Norm = 'linear', vmin=0., vmax=1.,
cb_label = '$\overline{v_Z}$ Skewness z-score')
plot_RZ_heatmap(oscar_obj.R_data_coords_mesh[:,phi_slice,:], oscar_obj.Z_data_coords_mesh[:,phi_slice,:],
oscar_obj.R_edges_mesh[:,phi_slice,:], oscar_obj.Z_edges_mesh[:,phi_slice,:], kurtosis_stat_vbar_Z1_dat_grid[:,phi_slice,:],
plot_folder + 'vbar_Z1_kurt_stat.pdf', colormap = 'magma',
Norm = 'linear', vmin=0., vmax=1.,
cb_label = '$\overline{v_Z}$ kurtosis z-score')
#Vertical Velocity vZZ
plot_RZ_heatmap(oscar_obj.R_data_coords_mesh[:,phi_slice,:], oscar_obj.Z_data_coords_mesh[:,phi_slice,:],
oscar_obj.R_edges_mesh[:,phi_slice,:], oscar_obj.Z_edges_mesh[:,phi_slice,:], oscar_obj.vbar_ZZ_dat_grid[:,phi_slice,:],
plot_folder + 'vbar_ZZ_data.pdf', colormap = 'nipy_spectral',
Norm = 'linear',
vmin=np.nanmin(oscar_obj.vbar_ZZ_dat_grid[:,phi_slice,:][counts_above_threshold]),
vmax=np.nanmax(oscar_obj.vbar_ZZ_dat_grid[:,phi_slice,:][counts_above_threshold]),
cb_label='Vertical velocity $\overline{v_Z v_Z}$ [km$^{2}$ s$^{-2}$]',
counts_mask = counts_above_threshold)
plot_RZ_heatmap(oscar_obj.R_data_coords_mesh[:,phi_slice,:], oscar_obj.Z_data_coords_mesh[:,phi_slice,:],
oscar_obj.R_edges_mesh[:,phi_slice,:], oscar_obj.Z_edges_mesh[:,phi_slice,:], gaussianity_pval_vbar_ZZ_dat_grid[:,phi_slice,:],
plot_folder + 'vbar_ZZ_gauss_pval.pdf', colormap = 'magma',
Norm = 'lognorm', vmin=1e-2, vmax=1.,
cb_label='$\overline{v_Z v_Z}$ gaussianity p-value')
plot_RZ_heatmap(oscar_obj.R_data_coords_mesh[:,phi_slice,:], oscar_obj.Z_data_coords_mesh[:,phi_slice,:],
oscar_obj.R_edges_mesh[:,phi_slice,:], oscar_obj.Z_edges_mesh[:,phi_slice,:], skewness_stat_vbar_ZZ_dat_grid[:,phi_slice,:],
plot_folder + 'vbar_ZZ_skew_stat.pdf', colormap = 'magma',
Norm = 'linear', vmin=0., vmax=1.,
cb_label = '$\overline{v_Z v_Z}$ skewness z-score')
plot_RZ_heatmap(oscar_obj.R_data_coords_mesh[:,phi_slice,:], oscar_obj.Z_data_coords_mesh[:,phi_slice,:],
oscar_obj.R_edges_mesh[:,phi_slice,:], oscar_obj.Z_edges_mesh[:,phi_slice,:], kurtosis_stat_vbar_ZZ_dat_grid[:,phi_slice,:],
plot_folder + 'vbar_ZZ_kurt_stat.pdf', colormap = 'magma',
Norm = 'linear', vmin=0., vmax=1.,
cb_label = '$\overline{v_Z v_Z}$ kurtosis z-score')
#Radial Velocity vR
plot_RZ_heatmap(oscar_obj.R_data_coords_mesh[:,phi_slice,:], oscar_obj.Z_data_coords_mesh[:,phi_slice,:],
oscar_obj.R_edges_mesh[:,phi_slice,:], oscar_obj.Z_edges_mesh[:,phi_slice,:], oscar_obj.vbar_R1_dat_grid[:,phi_slice,:],
plot_folder + 'vbar_R1_data.pdf', colormap = 'seismic',
Norm = 'linear',
vmin=-np.nanmax(abs(oscar_obj.vbar_R1_dat_grid[:,phi_slice,:][counts_above_threshold])),
vmax=np.nanmax(abs(oscar_obj.vbar_R1_dat_grid[:,phi_slice,:][counts_above_threshold])),
cb_label='Radial velocity $\overline{v_R}$ [km s$^{-1}$]',
counts_mask = counts_above_threshold)
plot_RZ_heatmap(oscar_obj.R_data_coords_mesh[:,phi_slice,:], oscar_obj.Z_data_coords_mesh[:,phi_slice,:],
oscar_obj.R_edges_mesh[:,phi_slice,:], oscar_obj.Z_edges_mesh[:,phi_slice,:], gaussianity_pval_vbar_R1_dat_grid[:,phi_slice,:],
plot_folder + 'vbar_R1_gauss_pval.pdf', colormap = 'magma',
Norm = 'lognorm', vmin=1e-2, vmax=1.,
cb_label='$\overline{v_R}$ gaussianity p-value')
plot_RZ_heatmap(oscar_obj.R_data_coords_mesh[:,phi_slice,:], oscar_obj.Z_data_coords_mesh[:,phi_slice,:],
oscar_obj.R_edges_mesh[:,phi_slice,:], oscar_obj.Z_edges_mesh[:,phi_slice,:], skewness_stat_vbar_R1_dat_grid[:,phi_slice,:],
plot_folder + 'vbar_R1_skew_stat.pdf', colormap = 'magma',
Norm = 'linear', vmin=0., vmax=1.,
cb_label = '$\overline{v_R}$ Skewness z-score')
plot_RZ_heatmap(oscar_obj.R_data_coords_mesh[:,phi_slice,:], oscar_obj.Z_data_coords_mesh[:,phi_slice,:],
oscar_obj.R_edges_mesh[:,phi_slice,:], oscar_obj.Z_edges_mesh[:,phi_slice,:], kurtosis_stat_vbar_R1_dat_grid[:,phi_slice,:],
plot_folder + 'vbar_R1_kurt_stat.pdf', colormap = 'magma',
Norm = 'linear', vmin=0., vmax=1.,
cb_label = '$\overline{v_R}$ kurtosis z-score')
#Radial Velocity vRR
plot_RZ_heatmap(oscar_obj.R_data_coords_mesh[:,phi_slice,:], oscar_obj.Z_data_coords_mesh[:,phi_slice,:],
oscar_obj.R_edges_mesh[:,phi_slice,:], oscar_obj.Z_edges_mesh[:,phi_slice,:], oscar_obj.vbar_RR_dat_grid[:,phi_slice,:],
plot_folder + 'vbar_RR_data.pdf', colormap = 'nipy_spectral',
Norm = 'linear',
vmin=np.nanmin(oscar_obj.vbar_RR_dat_grid[:,phi_slice,:][counts_above_threshold]),
vmax=np.nanmax(oscar_obj.vbar_RR_dat_grid[:,phi_slice,:][counts_above_threshold]),
cb_label='Radial velocity $\overline{v_R v_R}$ [km$^{2}$ s$^{-2}$]',
counts_mask = counts_above_threshold)
plot_RZ_heatmap(oscar_obj.R_data_coords_mesh[:,phi_slice,:], oscar_obj.Z_data_coords_mesh[:,phi_slice,:],
oscar_obj.R_edges_mesh[:,phi_slice,:], oscar_obj.Z_edges_mesh[:,phi_slice,:], gaussianity_pval_vbar_RR_dat_grid[:,phi_slice,:],
plot_folder + 'vbar_RR_gauss_pval.pdf', colormap = 'magma',
Norm = 'lognorm', vmin=1e-2, vmax=1.,
cb_label='$\overline{v_R v_R}$ gaussianity p-value')
plot_RZ_heatmap(oscar_obj.R_data_coords_mesh[:,phi_slice,:], oscar_obj.Z_data_coords_mesh[:,phi_slice,:],
oscar_obj.R_edges_mesh[:,phi_slice,:], oscar_obj.Z_edges_mesh[:,phi_slice,:], skewness_stat_vbar_RR_dat_grid[:,phi_slice,:],
plot_folder + 'vbar_RR_skew_stat.pdf', colormap = 'magma',
Norm = 'linear', vmin=0., vmax=1.,
cb_label = '$\overline{v_R v_R}$ Skewness z-score')
plot_RZ_heatmap(oscar_obj.R_data_coords_mesh[:,phi_slice,:], oscar_obj.Z_data_coords_mesh[:,phi_slice,:],
oscar_obj.R_edges_mesh[:,phi_slice,:], oscar_obj.Z_edges_mesh[:,phi_slice,:], kurtosis_stat_vbar_RR_dat_grid[:,phi_slice,:],
plot_folder + 'vbar_RR_kurt_stat.pdf', colormap = 'magma',
Norm = 'linear', vmin=0., vmax=1.,
cb_label = '$\overline{v_R v_R}$ kurtosis z-score')
#Tangential Velocity vp
plot_RZ_heatmap(oscar_obj.R_data_coords_mesh[:,phi_slice,:], oscar_obj.Z_data_coords_mesh[:,phi_slice,:],
oscar_obj.R_edges_mesh[:,phi_slice,:], oscar_obj.Z_edges_mesh[:,phi_slice,:], oscar_obj.vbar_p1_dat_grid[:,phi_slice,:],
plot_folder + 'vbar_p1_data.pdf', colormap = 'magma',
Norm = 'linear',
vmin=np.nanmin(oscar_obj.vbar_p1_dat_grid[:,phi_slice,:][counts_above_threshold]),
vmax=np.nanmax(oscar_obj.vbar_p1_dat_grid[:,phi_slice,:][counts_above_threshold]),
cb_label='Angular Velocity $\overline{v_\phi}$ [rad s$^{-1}$]',
counts_mask = counts_above_threshold)
plot_RZ_heatmap(oscar_obj.R_data_coords_mesh[:,phi_slice,:], oscar_obj.Z_data_coords_mesh[:,phi_slice,:],
oscar_obj.R_edges_mesh[:,phi_slice,:], oscar_obj.Z_edges_mesh[:,phi_slice,:], gaussianity_pval_vbar_p1_dat_grid[:,phi_slice,:],
plot_folder + 'vbar_p1_gauss_pval.pdf', colormap = 'magma',
Norm = 'lognorm', vmin=1e-2, vmax=1.,
cb_label='$\overline{v_p}$ gaussianity p-value')
plot_RZ_heatmap(oscar_obj.R_data_coords_mesh[:,phi_slice,:], oscar_obj.Z_data_coords_mesh[:,phi_slice,:],
oscar_obj.R_edges_mesh[:,phi_slice,:], oscar_obj.Z_edges_mesh[:,phi_slice,:], skewness_stat_vbar_p1_dat_grid[:,phi_slice,:],
plot_folder + 'vbar_p1_skew_stat.pdf', colormap = 'magma',
Norm = 'linear', vmin=0., vmax=1.,
cb_label = '$\overline{v_p}$ Skewness z-score')
plot_RZ_heatmap(oscar_obj.R_data_coords_mesh[:,phi_slice,:], oscar_obj.Z_data_coords_mesh[:,phi_slice,:],
oscar_obj.R_edges_mesh[:,phi_slice,:], oscar_obj.Z_edges_mesh[:,phi_slice,:], kurtosis_stat_vbar_p1_dat_grid[:,phi_slice,:],
plot_folder + 'vbar_p1_kurt_stat.pdf', colormap = 'magma',
Norm = 'linear', vmin=0., vmax=1.,
cb_label = '$\overline{v_p}$ kurtosis z-score')
plot_RZ_heatmap(oscar_obj.R_data_coords_mesh[:,phi_slice,:], oscar_obj.Z_data_coords_mesh[:,phi_slice,:],
oscar_obj.R_edges_mesh[:,phi_slice,:], oscar_obj.Z_edges_mesh[:,phi_slice,:],
oscar_obj.vbar_p1_dat_grid[:,phi_slice,:]*oscar_obj.R_data_coords_mesh[:,phi_slice,:]*3.086E1, #picorad/s *
plot_folder + 'vbar_T1_data.pdf', colormap = 'nipy_spectral',
Norm = 'linear',
vmin=np.nanmin((oscar_obj.vbar_p1_dat_grid[:,phi_slice,:]*oscar_obj.R_data_coords_mesh[:,phi_slice,:]*3.086E1)[counts_above_threshold]),
vmax=np.nanmax((oscar_obj.vbar_p1_dat_grid[:,phi_slice,:]*oscar_obj.R_data_coords_mesh[:,phi_slice,:]*3.086E1)[counts_above_threshold]),
cb_label='Tangential velocity $\overline{v_p}$ [km s$^{-1}$]',
counts_mask = counts_above_threshold)
#Tilt Term vRvZ
plot_RZ_heatmap(oscar_obj.R_data_coords_mesh[:,phi_slice,:], oscar_obj.Z_data_coords_mesh[:,phi_slice,:],
oscar_obj.R_edges_mesh[:,phi_slice,:], oscar_obj.Z_edges_mesh[:,phi_slice,:], oscar_obj.vbar_RZ_dat_grid[:,phi_slice,:],
plot_folder + 'vbar_RZ_data.pdf', colormap = 'seismic',
Norm = 'symlognorm',
vmin=-np.nanmax(abs(oscar_obj.vbar_RZ_dat_grid[:,phi_slice,:][counts_above_threshold])),
vmax=np.nanmax(abs(oscar_obj.vbar_RZ_dat_grid[:,phi_slice,:][counts_above_threshold])),
linthresh = 200, linscale = 1.0,
cb_label='RZ velocity cross term $\overline{v_R v_Z}$ [km$^{2}$ s$^{-2}$]',
counts_mask = counts_above_threshold)
plot_RZ_heatmap(oscar_obj.R_data_coords_mesh[:,phi_slice,:], oscar_obj.Z_data_coords_mesh[:,phi_slice,:],
oscar_obj.R_edges_mesh[:,phi_slice,:], oscar_obj.Z_edges_mesh[:,phi_slice,:], gaussianity_pval_vbar_RZ_dat_grid[:,phi_slice,:],
plot_folder + 'vbar_RZ_gauss_pval.pdf', colormap = 'magma',
Norm = 'lognorm', vmin=1e-2, vmax=1.,
cb_label='$\overline{v_R v_Z}$ gaussianity p-value')
plot_RZ_heatmap(oscar_obj.R_data_coords_mesh[:,phi_slice,:], oscar_obj.Z_data_coords_mesh[:,phi_slice,:],
oscar_obj.R_edges_mesh[:,phi_slice,:], oscar_obj.Z_edges_mesh[:,phi_slice,:], skewness_stat_vbar_RZ_dat_grid[:,phi_slice,:],
plot_folder + 'vbar_RZ_skew_stat.pdf', colormap = 'magma',
Norm = 'linear', vmin=0., vmax=1.,
cb_label = '$\overline{v_R v_Z}$ Skewness z-score')
plot_RZ_heatmap(oscar_obj.R_data_coords_mesh[:,phi_slice,:], oscar_obj.Z_data_coords_mesh[:,phi_slice,:],
oscar_obj.R_edges_mesh[:,phi_slice,:], oscar_obj.Z_edges_mesh[:,phi_slice,:], kurtosis_stat_vbar_RZ_dat_grid[:,phi_slice,:],
plot_folder + 'vbar_RZ_kurt_stat.pdf', colormap = 'magma',
Norm = 'linear', vmin=0., vmax=1.,
cb_label = '$\overline{v_R v_Z}$ kurtosis z-score')
plot_RZ_heatmap(oscar_obj.R_data_coords_mesh[:,phi_slice,:], oscar_obj.Z_data_coords_mesh[:,phi_slice,:],
oscar_obj.R_edges_mesh[:,phi_slice,:], oscar_obj.Z_edges_mesh[:,phi_slice,:],
oscar_obj.vbar_RZ_dat_grid[:,phi_slice,:] - oscar_obj.vbar_R1_dat_grid[:,phi_slice,:]*oscar_obj.vbar_Z1_dat_grid[:,phi_slice,:],
plot_folder + 'sigma_RZ_data.pdf', colormap = 'seismic',
Norm = 'symlognorm',
vmin=-np.nanmax(abs(oscar_obj.vbar_RZ_dat_grid[:,phi_slice,:][counts_above_threshold])),
vmax=np.nanmax(abs(oscar_obj.vbar_RZ_dat_grid[:,phi_slice,:][counts_above_threshold])),
linthresh = 200, linscale = 1.0,
cb_label='RZ velocity cross term $\sigma_{RZ} = \overline{v_R v_Z} \
- \overline{v_R}\,\overline{v_Z}$ [km$^{2}$ s$^{-2}$]',
counts_mask = counts_above_threshold)
def plot_correlation_matrix(oscar_obj):
# Total Correlation Matrix
plot_matrix_heatmap(oscar_obj.data_corr, 'correlation_matrix_all.pdf')
#Counts correlations
block_size = len(oscar_obj.R_data_coords_mesh.flatten())
file_name_vec = ['correlation_matrix_counts.pdf',
'correlation_matrix_vbar_R1.pdf',
'correlation_matrix_vbar_p1.pdf',
'correlation_matrix_vbar_Z1.pdf',
'correlation_matrix_vbar_RR.pdf',
'correlation_matrix_vbar_pp.pdf',
'correlation_matrix_vbar_ZZ.pdf',
'correlation_matrix_vbar_RZ.pdf']
for NN in range(0,8):
plot_matrix_heatmap(oscar_obj.data_corr[NN*block_size:(NN+1)*block_size,
NN*block_size:(NN+1)*block_size],
file_name_vec[NN],colormap='seismic')
# plot_RZ_heatmap_and_lines(oscar_obj.R_data_coords_mesh, oscar_obj.Z_data_coords_mesh,
# oscar_obj.R_edges_mesh, oscar_obj.Z_edges_mesh,
# oscar_obj.nu_dat_grid[:,phi_slice,:], oscar_obj.nu_std_grid[:,phi_slice,:], oscar_obj.nu_std_grid[:,phi_slice,:],
# plot_folder + 'nu_data_w_line.pdf',colormap = 'magma',
# Norm = 'lognorm', cb_label='Tracer density stars [stars pc$^{-3}$]')
#
# plot_RZ_heatmap_and_lines(oscar_obj.R_data_coords_mesh, oscar_obj.Z_data_coords_mesh,
# oscar_obj.R_edges_mesh, oscar_obj.Z_edges_mesh,
# oscar_obj.vbar_Z1_dat_grid[:,phi_slice,:], oscar_obj.vbar_Z1_std_grid[:,phi_slice,:],oscar_obj.vbar_Z1_std_grid[:,phi_slice,:],
# plot_folder + 'vbar_Z1_data_w_line.pdf', colormap = 'seismic',
# Norm = 'linear',
# vmin=-np.nanmax(abs(oscar_obj.vbar_Z1_dat_grid[:,phi_slice,:][counts_above_threshold])),
# vmax=np.nanmax(abs(oscar_obj.vbar_Z1_dat_grid[:,phi_slice,:][counts_above_threshold])),
# cb_label='Vertical velocity $\overline{v_Z}$ [km s$^{-1}$]',
# counts_mask = counts_above_threshold)
# plot_RZ_heatmap_and_lines(oscar_obj.R_data_coords_mesh, oscar_obj.Z_data_coords_mesh,
# oscar_obj.R_edges_mesh, oscar_obj.Z_edges_mesh,
# oscar_obj.vbar_ZZ_dat_grid[:,phi_slice,:], oscar_obj.vbar_ZZ_std_grid[:,phi_slice,:],oscar_obj.vbar_ZZ_std_grid[:,phi_slice,:],
# plot_folder + 'vbar_ZZ_data_w_line.pdf', colormap = 'nipy_spectral',
# Norm = 'linear',
# vmin=np.nanmin(oscar_obj.vbar_ZZ_dat_grid[:,phi_slice,:][counts_above_threshold]),
# vmax=np.nanmax(oscar_obj.vbar_ZZ_dat_grid[:,phi_slice,:][counts_above_threshold]),
# cb_label='Vertical velocity $\overline{v_Z v_Z}$ [km$^{2}$ s$^{-2}$]',
# counts_mask = counts_above_threshold)
#
# plot_RZ_heatmap_and_lines(oscar_obj.R_data_coords_mesh, oscar_obj.Z_data_coords_mesh,
# oscar_obj.R_edges_mesh, oscar_obj.Z_edges_mesh,
# oscar_obj.vbar_R1_dat_grid[:,phi_slice,:], oscar_obj.vbar_R1_std_grid[:,phi_slice,:],oscar_obj.vbar_R1_std_grid[:,phi_slice,:],
# plot_folder + 'vbar_R1_data_w_line.pdf', colormap = 'seismic',
# Norm = 'linear',
# vmin=-np.nanmax(abs(oscar_obj.vbar_R1_dat_grid[:,phi_slice,:][counts_above_threshold])),
# vmax=np.nanmax(abs(oscar_obj.vbar_R1_dat_grid[:,phi_slice,:][counts_above_threshold])),
# cb_label='Radial velocity $\overline{v_R}$ [km s$^{-1}$]',
# counts_mask = counts_above_threshold)
#
# plot_RZ_heatmap_and_lines(oscar_obj.R_data_coords_mesh, oscar_obj.Z_data_coords_mesh,
# oscar_obj.R_edges_mesh, oscar_obj.Z_edges_mesh,
# oscar_obj.vbar_RR_dat_grid[:,phi_slice,:], oscar_obj.vbar_RR_std_grid[:,phi_slice,:],oscar_obj.vbar_RR_std_grid[:,phi_slice,:],
# plot_folder + 'vbar_RR_data_w_line.pdf', colormap = 'nipy_spectral',
# Norm = 'linear',
# vmin=np.nanmin(oscar_obj.vbar_RR_dat_grid[:,phi_slice,:][counts_above_threshold]),
# vmax=np.nanmax(oscar_obj.vbar_RR_dat_grid[:,phi_slice,:][counts_above_threshold]),
# cb_label='Radial velocity $\overline{v_R v_R}$ [km$^{2}$ s$^{-2}$]',
# counts_mask = counts_above_threshold)
#
#
# plot_RZ_heatmap_and_lines(oscar_obj.R_data_coords_mesh, oscar_obj.Z_data_coords_mesh,
# oscar_obj.R_edges_mesh, oscar_obj.Z_edges_mesh,
# oscar_obj.vbar_p1_dat_grid[:,phi_slice,:], oscar_obj.vbar_p1_std_grid[:,phi_slice,:],oscar_obj.vbar_p1_std_grid[:,phi_slice,:],
# plot_folder + 'vbar_p1_data_w_line.pdf', colormap = 'magma',
# Norm = 'linear',
# vmin=np.nanmin(oscar_obj.vbar_p1_dat_grid[:,phi_slice,:][counts_above_threshold]),
# vmax=np.nanmax(oscar_obj.vbar_p1_dat_grid[:,phi_slice,:][counts_above_threshold]),
# cb_label='Angular Velocity $\overline{v_\phi}$ [rad s$^{-1}$]',
# counts_mask = counts_above_threshold)
#
# plot_RZ_heatmap_and_lines(oscar_obj.R_data_coords_mesh, oscar_obj.Z_data_coords_mesh,
# oscar_obj.R_edges_mesh, oscar_obj.Z_edges_mesh,
# oscar_obj.vbar_p1_dat_grid[:,phi_slice,:]*oscar_obj.R_data_coords_mesh[:,phi_slice,:]*3.086E1,
# oscar_obj.vbar_p1_std_grid[:,phi_slice,:]*oscar_obj.R_data_coords_mesh[:,phi_slice,:]*3.086E1,
# oscar_obj.vbar_p1_std_grid[:,phi_slice,:]*oscar_obj.R_data_coords_mesh[:,phi_slice,:]*3.086E1,
# plot_folder + 'vbar_T1_data_w_line.pdf', colormap = 'nipy_spectral',
# Norm = 'linear',
# vmin=None,#np.amin(oscar_obj.vbar_p1_dat_grid[:,phi_slice,:][~np.isnan(oscar_obj.vbar_p1_dat_grid[:,phi_slice,:])]),
# vmax=None,#np.amax(oscar_obj.vbar_p1_dat_grid[:,phi_slice,:][~np.isnan(oscar_obj.vbar_p1_dat_grid[:,phi_slice,:])]),
# cb_label='Tangential velocity $\overline{v_p}$ [km s$^{-1}$]',
# counts_mask = counts_above_threshold)
#
# plot_RZ_heatmap_and_lines(oscar_obj.R_data_coords_mesh, oscar_obj.Z_data_coords_mesh,
# oscar_obj.R_edges_mesh, oscar_obj.Z_edges_mesh,
# oscar_obj.vbar_RZ_dat_grid[:,phi_slice,:], oscar_obj.vbar_RZ_std_grid[:,phi_slice,:],oscar_obj.vbar_RZ_std_grid[:,phi_slice,:],
# plot_folder + 'vbar_RZ_data_w_line.pdf', colormap = 'seismic',
# Norm = 'symlognorm',
# vmin=-np.nanmax(abs(oscar_obj.vbar_RZ_dat_grid[:,phi_slice,:][counts_above_threshold])),
# vmax=np.nanmax(abs(oscar_obj.vbar_RZ_dat_grid[:,phi_slice,:][counts_above_threshold])),
# linthresh = 200, linscale = 1.0,
# cb_label='RZ velocity cross term $\overline{v_R v_Z}$ [km$^{2}$ s$^{-2}$]',
# counts_mask = counts_above_threshold)
|
filename = input('Enter new filename: ')
f = open(filename, 'w')
strings = [
'Hey',
'Men',
'Aye\n',
'Hello\n',
]
try:
for i in strings:
f.write(i)
except Exception as e:
print('Error caught', e)
finally:
f.close()
print('Closed') |
import os
import logging
class ScraperLogger:
"""
Class for logging.
"""
log_file = os.path.join(os.path.dirname(__file__), '../logs/scraping.log')
formatter = logging.Formatter('%(asctime)s | %(name)s | %(levelname)s | %(message)s')
def __init__(self, name):
"""
Constructor for logger.
"""
if not os.path.exists(os.path.dirname(self.log_file)):
os.makedirs(os.path.dirname(self.log_file))
self.logger = logging.getLogger(name)
self.logger.setLevel(logging.DEBUG)
# File handler
fh = logging.FileHandler(self.log_file)
fh.setLevel(logging.DEBUG)
# Commandline handler
ch = logging.StreamHandler()
ch.setLevel(logging.DEBUG)
# Formatting
fh.setFormatter(self.formatter)
ch.setFormatter(self.formatter)
if not self.logger.handlers:
self.logger.addHandler(fh)
self.logger.addHandler(ch)
|
import torch
from torch.distributions import Normal
from torch import nn
from torch.nn import functional as F
class Encoder(nn.Module):
"""Maps an (x_i, y_i) pair to a representation r_i.
Parameters
----------
x_dim : int
Dimension of x values.
y_dim : int
Dimension of y values.
h_dim : int
Dimension of hidden layer.
r_dim : int
Dimension of output representation r.
"""
def __init__(self, x_dim, y_dim, h_dim, r_dim):
super(Encoder, self).__init__()
self.x_dim = x_dim
self.y_dim = y_dim
self.h_dim = h_dim
self.r_dim = r_dim
layers = [nn.Linear(x_dim + y_dim, h_dim),
nn.ReLU(inplace=True),
nn.Linear(h_dim, r_dim)]
self.input_to_hidden = nn.Sequential(*layers)
def forward(self, x, y):
"""
x : torch.Tensor
Shape (batch_size, x_dim)
y : torch.Tensor
Shape (batch_size, y_dim)
"""
batch_size, num_points, _ = x.size()
# Flatten tensors, as encoder expects one dimensional inputs
x_flat = x.view(batch_size * num_points, self.x_dim) # (BxN) x X_dim
y_flat = y.contiguous().view(batch_size * num_points, self.y_dim) # (BxN) x Y_dim
# Encode each point into a representation r_i
input_pairs = torch.cat((x_flat, y_flat), dim=1) # (BxN) x (X_dim+Y_dim)
r_i_flat = self.input_to_hidden(input_pairs)
# Reshape tensors into batches
r_i = r_i_flat.view(batch_size, num_points, self.r_dim)
# Aggregate representations r_i into a single representation r
r = torch.mean(r_i, dim=1) # B x r_dim <-- B x N x r_dim
return r
class MuSigmaEncoder(nn.Module):
"""
Maps a representation r to mu and sigma which will define the normal
distribution from which we sample the latent variable z.
Parameters
----------
r_dim : int
Dimension of output representation r.
z_dim : int
Dimension of latent variable z.
"""
def __init__(self, x_dim, y_dim, h_dim, r_dim, z_dim):
super(MuSigmaEncoder, self).__init__()
self.r_dim = r_dim
self.z_dim = z_dim
self.xy_to_r = Encoder(x_dim, y_dim, h_dim, r_dim)
self.r_to_hidden = nn.Linear(r_dim, r_dim)
self.hidden_to_mu = nn.Linear(r_dim, z_dim)
self.hidden_to_sigma = nn.Linear(r_dim, z_dim)
def forward(self, x, y):
"""
r : torch.Tensor
Shape (batch_size, r_dim)
"""
r = self.xy_to_r(x, y)
hidden = torch.relu(self.r_to_hidden(r))
mu = self.hidden_to_mu(hidden)
# Define sigma following convention in "Empirical Evaluation of Neural
# Process Objectives" and "Attentive Neural Processes"
sigma = 0.1 + 0.9 * torch.sigmoid(self.hidden_to_sigma(hidden))
return mu, sigma
class Decoder(nn.Module):
"""
Maps target input x_target and samples z (encoding information about the
context points) to predictions y_target.
Parameters
----------
x_dim : int
Dimension of x values.
z_dim : int
Dimension of latent variable z.
h_dim : int
Dimension of hidden layer.
y_dim : int
Dimension of y values.
"""
def __init__(self, x_dim, rep_dim, h_dim, y_dim, fixed_sigma, min_sigma):
super(Decoder, self).__init__()
self.x_dim = x_dim
self.rep_dim = rep_dim
self.h_dim = h_dim
self.y_dim = y_dim
self.fixed_sigma = fixed_sigma
self.min_sigma = min_sigma
layers = [nn.Linear(x_dim + rep_dim, h_dim),
nn.ReLU(inplace=True),
nn.Linear(h_dim, h_dim),
nn.ReLU(inplace=True)]
self.xz_to_hidden = nn.Sequential(*layers)
self.hidden_to_mu = nn.Linear(h_dim, y_dim)
self.hidden_to_sigma = nn.Linear(h_dim, y_dim)
def forward(self, x, rep):
"""
x : torch.Tensor
Shape (batch_size, num_points, x_dim)
z : torch.Tensor
Shape (batch_size, z_dim)
Returns
-------
Returns mu and sigma for output distribution. Both have shape
(batch_size, num_points, y_dim).
"""
batch_size, num_points, _ = x.size()
# Flatten x and z to fit with linear layer
x_flat = x.view(batch_size * num_points, self.x_dim)
rep_flat = rep.view(batch_size * num_points, self.rep_dim)
# Input is concatenation of the representation with every row of x
input_pairs = torch.cat((x_flat, rep_flat), dim=-1)
hidden = self.xz_to_hidden(input_pairs)
mu = self.hidden_to_mu(hidden)
pre_sigma = self.hidden_to_sigma(hidden)
# Reshape output into expected shape
mu = mu.view(batch_size, num_points, self.y_dim)
pre_sigma = pre_sigma.view(batch_size, num_points, self.y_dim)
# Define sigma following convention in "Empirical Evaluation of Neural
# Process Objectives" and "Attentive Neural Processes"
if self.fixed_sigma is None:
sigma = 0.1 + 0.9 * F.softplus(pre_sigma)
else:
sigma = torch.Tensor(mu.shape)
sigma.fill_(self.fixed_sigma)
return mu, sigma
class NeuralProcess(nn.Module):
"""
Implements Neural Process for functions of arbitrary dimensions.
Parameters
----------
x_dim : int
Dimension of x values.
y_dim : int
Dimension of y values.
r_dim : int
Dimension of output representation r.
z_dim : int
Dimension of latent variable z.
h_dim : int
Dimension of hidden layer in encoder and decoder.
"""
def __init__(self, x_dim, y_dim, r_dim, z_dim, h_dim, fixed_sigma=None, min_sigma=0.1):
super(NeuralProcess, self).__init__()
self.x_dim = x_dim
self.y_dim = y_dim
self.r_dim = r_dim
self.z_dim = z_dim
self.h_dim = h_dim
self.fixed_sigma = fixed_sigma
self.id = 'NP'
# Initialize networks
self.xy_to_r = Encoder(x_dim, y_dim, h_dim, r_dim)
self.xy_to_mu_sigma = MuSigmaEncoder(x_dim, y_dim, h_dim, r_dim, z_dim)
self.xrep_to_y = Decoder(x_dim, z_dim+r_dim, h_dim, y_dim, fixed_sigma, min_sigma)
def forward(self, x_context, y_context, x_target, y_target=None):
"""
Given context pairs (x_context, y_context) and target points x_target,
returns a distribution over target points y_target.
Parameters
----------
x_context : torch.Tensor
Shape (batch_size, num_context, x_dim).
y_context : torch.Tensor
Shape (batch_size, num_context, y_dim)
x_target : torch.Tensor
Shape (batch_size, num_target, x_dim)
y_target : torch.Tensor or None
Shape (batch_size, num_target, y_dim). Only used during training.
Note
----
We follow the convention given in "Empirical Evaluation of Neural
Process Objectives" where context is a subset of target points. This was
shown to work best empirically.
"""
# Infer quantities from tensor dimensions
batch_size, num_context, x_dim = x_context.size()
_, num_target, _ = x_target.size()
_, _, y_dim = y_context.size()
if self.training:
# Encode target and context (context needs to be encoded to
# calculate kl term)
r_target = self.xy_to_r(x_context, y_context) # B x r_dim <-- B x N x xy_dim
mu_target, sigma_target = self.xy_to_mu_sigma(x_target, y_target)
mu_context, sigma_context = self.xy_to_mu_sigma(x_context, y_context)
# Sample from encoded distribution using reparameterization trick
q_target = Normal(mu_target, sigma_target)
q_context = Normal(mu_context, sigma_context)
z_sample = q_target.rsample()
# Repeat z (and r), so it can be concatenated with every x. This changes shape
# from (batch_size, z_dim) to (batch_size, num_points, z_dim)
z_sample = z_sample.unsqueeze(1).repeat(1, num_target, 1)
r = r_target.unsqueeze(1).repeat(1, num_target, 1)
rep = torch.cat([z_sample, r], dim=-1)
# Get parameters of output distribution (Decoder)
y_pred_mu, y_pred_sigma = self.xrep_to_y(x_target, rep)
p_y_pred = Normal(y_pred_mu, y_pred_sigma)
return p_y_pred, q_target, q_context
else:
# At testing time, encode only context
r_context = self.xy_to_r(x_context, y_context)
mu_context, sigma_context = self.xy_to_mu_sigma(x_context, y_context)
# Sample from distribution based on context
q_context = Normal(mu_context, sigma_context)
z_sample = q_context.rsample()
# Repeat z, so it can be concatenated with every x. This changes shape
# from (batch_size, z_dim) to (batch_size, num_points, z_dim)
z_sample = z_sample.unsqueeze(1).repeat(1, num_target, 1)
r = r_context.unsqueeze(1).repeat(1, num_target, 1)
rep = torch.cat([z_sample, r], dim=-1)
# Predict target points based on context
y_pred_mu, y_pred_sigma = self.xrep_to_y(x_target, rep)
p_y_pred = Normal(y_pred_mu, y_pred_sigma)
return p_y_pred
def sample_z(self, x_context, y_context, num_target=1):
mu_context, sigma_context = self.xy_to_mu_sigma(x_context, y_context)
# Sample from distribution based on context
q_context = Normal(mu_context, sigma_context)
z_sample = q_context.rsample()
# Repeat z, so it can be concatenated with every x. This changes shape
# from (batch_size, z_dim) to (batch_size, num_points, z_dim)
z_sample = z_sample.unsqueeze(1).repeat(1, num_target, 1)
return z_sample, q_context
|
'''
写一个函数get_score()来获取用户输入的学生成绩(0-100的整数),如果输入
出现错误,则此函数返回0,如果用户输入的数是0-100之间的数,返回这个数
'''
def get_score():
score =input("输入学生成绩:")
i =int (score)
if 0 < i <= 100:
return i
return 0
try:
scors=get_score()
except ValueError:
scors =0
print('学生成绩:',scors) |
"""Models for asteroids app."""
from flask_sqlalchemy import SQLAlchemy
from datetime import datetime
from flask_login import UserMixin
db = SQLAlchemy()
class User(UserMixin, db.Model):
"""A user."""
__tablename__ = 'users'
user_id = db.Column(db.Integer,
autoincrement=True,
primary_key=True)
username = db.Column(db.String, unique=True)
fname = db.Column(db.String)
lname = db.Column(db.String)
email = db.Column(db.String, unique=True)
password = db.Column(db.String)
def __repr__(self):
return f'<User user_id={self.user_id} email={self.email} username={self.username}>'
def is_active(self):
"""True, as all users are active."""
return True
def get_id(self):
"""Return the user id address to satisfy Flask-Login's requirements."""
return self.user_id
def is_authenticated(self):
"""Return True if the user is authenticated."""
return self.authenticated
class Favorite(db.Model):
"""Favorites Asteroids"""
__tablename__ = 'favorites'
favorite_id = db.Column(db.Integer,
autoincrement=True,
primary_key=True)
user_id = db.Column(db.Integer, db.ForeignKey('users.user_id'))
asteroid_id = db.Column(db.Integer, db.ForeignKey('asteroids.asteroid_id'))
users = db.relationship('User', backref='favorites')
asteroids = db.relationship('Asteroid', backref='favorites')
def __repr__(self):
return f'<Favorite favorite_id={self.favorite_id} user_id={self.user_id} asteroid_id={self.asteroid_id}>'
class Asteroid(db.Model):
"""All Asteroids information from API"""
__tablename__ = 'asteroids'
asteroid_id = db.Column(db.Integer,
autoincrement=True,
primary_key=True)
api_asteroid_id = db.Column(db.Integer)
name = db.Column(db.String)
potentially_hazardous = db.Column(db.String) #values can be T or F or/and 1 or 0
close_approach_date = db.Column(db.DateTime)
nasa_jpl_url = db.Column(db.String)
relative_velocity_kilometers_per_hour = db.Column(db.Float)
relative_velocity_miles_per_hour = db.Column(db.Float)
orbiting_body = db.Column(db.String)
miss_distance_kilometers = db.Column(db.Float)
miss_distance_miles = db.Column(db.Float)
estimated_diameter_kilometers_min = db.Column(db.Float)
estimated_diameter_kilometers_max = db.Column(db.Float)
estimated_diameter_miles_min = db.Column(db.Float)
estimated_diameter_miles_max = db.Column(db.Float)
def __repr__(self):
return f'<Asteroid asteroid_id={self.asteroid_id} name={self.name} nasa_jpl_url={self.nasa_jpl_url}>'
def connect_to_db(flask_app, db_uri='postgresql:///asteroidsdb', echo=True):
flask_app.config['SQLALCHEMY_DATABASE_URI'] = db_uri
flask_app.config['SQLALCHEMY_ECHO'] = echo
flask_app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False
db.app = flask_app
db.init_app(flask_app)
print('Connected to the db!')
if __name__ == '__main__':
from server import app
# Call connect_to_db(app, echo=False) if your program output gets
# too annoying; this will tell SQLAlchemy not to print out every
# query it executes.
connect_to_db(app) |
def send_email(text):
import socket
import smtplib
import os
import sys
from secrets import sender_address
from secrets import sender_password
from secrets import sender_server
from secrets import sender_port
from secrets import recipient_address
try:
message = "From: " + sender_address + "\nTo: " + recipient_address + "\nSubject: Device Information\n\n" + text
server = smtplib.SMTP(sender_server, sender_port)
server.ehlo()
server.starttls()
server.login(sender_address, sender_password)
server.sendmail(sender_address, recipient_address, message)
server.close()
print("Message sent:\n", message)
except:
print("failed to send email")
def send_sms(text):
from twilio.rest import Client
#import numpy as np
#generaterandomintegervalues
#from random import seed
#from random import randint
account_sid='AC96bfff1c6422104b584ad9788ea2ece8'
auth_token='3e3839be893f6f2eee47d408cf983f75'
client=Client(account_sid,auth_token)
#Generateamessage: x=generatearandomnumberusingrandomfunction y=somethinghere–needtofixit
#fullMessage=text=('the big test')
message=client.messages \
.create(
body=text,
from_='+13476953808',
to='+17075839017'
)
#print(message.sid)#thiswouldbesomehtinglike:85a11ef34d8ea1d9b4
def get_raspi_temp():
import subprocess
#subprocess.call("rm temp.txt", shell=True)
subprocess.call("/opt/vc/bin/vcgencmd measure_temp | cut -f2 -d'=' > temp.txt", shell=True)
with open('temp.txt') as textfile:
temp = float(textfile.read().split("'")[0])
#if temp > threshold_temp:
#print(temp)
#print('TOO HOT!!!')
subprocess.call("rm temp.txt", shell=True)
return temp
threshold_temp = 40
current_temp = get_raspi_temp()
message = "Your RasPi temperature is " + str(current_temp)
print(message)
if current_temp > threshold_temp:
print('TOO HOT!!!!! sending email and sms')
send_email("warning! " + message)
send_sms("warning! " + message)
|
# Databricks notebook source
# Function to call connection config
class load_connect_config():
def __init__(self, p_config_id):
table_metadata_query = "select \
conn.kv_scope, conn.secret_url \
from de_barney.test_process_metadata_events_v4 pm \
inner join de_barney.conn_config conn\
on \
pm.environment_name = conn.env and pm.conn_name = conn.db_name \
where config_id = %s" %(p_config_id)
r_conn_info=spark.sql(table_metadata_query).rdd.first()
self.kv_scope = r_conn_info[0]
self.secrect_key = r_conn_info[1]
def get_kv_scope(self):
return self.kv_scope
def get_secret_key(self):
return self.secrect_key
# COMMAND ----------
# connect_info = load_connect_config(3)
# connect_info.get_kv_scope()
|
def bubble_sort(items):
is_sorted = False
while not is_sorted:
is_sorted = True
for i in range(len(items) - 1):
if items[i] > items[i + 1]:
is_sorted = False
temp = items[i]
items[i] = items[i + 1]
items[i + 1] = temp
return items
def selection_sort(items):
for i in range(0, len(items) - 1):
min_index = i
for a in range(i + 1, len(items)):
if items[a] < items[min_index]:
min_index = a
if min_index != i:
temp = items[min_index]
items[min_index] = items[i]
items[i] = temp
return items
unsorted_list = [9,8,7,6,5,2,1,4]
print(selection_sort(unsorted_list))
|
#!/usr/bin/python
import socket
import time
import spidev
import array
from subprocess import call, PIPE
TCP_IP = '192.168.0.116'
TCP_PORT = 5044
# Reset Attiny
call("gpio -g mode 22 out", shell = True)
call("gpio -g write 22 0", shell = True)
time.sleep(0.1)
call("gpio -g write 22 1", shell = True)
#call("sudo raspi-gpio set 22 op dl", shell = True)
#time.sleep(0.1)
#call("sudo raspi-gpio set 22 op dh", shell = True)
# Open SPI interface
spi = spidev.SpiDev()
spi.open(0,0)
spi.max_speed_hz = 10000
spi.mode = 0b00
# Open TCP server socket
print("open TCP server socket")
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
sock.bind((TCP_IP, TCP_PORT))
while(True):
print("Waiting for TCP client ...")
sock.listen(True)
conn, addr = sock.accept()
print("Connected: " + addr[0]);
conn.setblocking(1)
#call("sudo raspi-gpio set 22 op dl", shell = True)
#time.sleep(0.1)
#call("sudo raspi-gpio set 22 op dh", shell = True)
call("gpio -g mode 22 out", shell = True)
call("gpio -g write 22 0", shell = True)
time.sleep(0.1)
call("gpio -g write 22 1", shell = True)
while(True):
try:
print 'Waiting for incoming data from TCP client...'
input = ""
while not input.endswith('\n'):
data = conn.recv(1)
if not data:
break
input += data
if input:
print 'TCP-Input', input
chars = []
for c in input:
chars.append(ord(c))
res = spi.xfer2(chars)
time.sleep(0.1)
str = ''
for i in res:
str += chr(i)
str += '\n'
conn.send(str[1:])
print "attiny: ", str[1:]
if "gp" in input:
time.sleep(0.1)
chars = [48,48,48,48,48,48,48]
res = spi.xfer2(chars)
str = ''
for i in res:
str += chr(i)
str += '\n'
print "attiny-res: ", str[1:]
conn.send(str[1:])
else:
# connection was closed by client -> exit loop
print 'connection was closed by client'
break
except socket.error, e:
print 'socket error occurred'
break
|
#! /usr/bin/env python
import rospy, tf
from math import pi
from geometry_msgs.msg import Quaternion
from fake_sensor import FakeSensor
# Convert from radians to Quaternion
def make_quaternion(angle):
q = tf.transformations.quaternion_from_euler(0, 0, angle)
return Quaternion(*q)
def publish_value(value):
angle = value * 2 * pi / 100.0
q = make_quaternion(angle)
pub.publish(q)
if __name__ == '__main__':
global pub
rospy.init_node('fake_sensor')
pub = rospy.Publisher('angle', Quaternion, queue_size=10)
sensor = FakeSensor()
sensor.sensor.register_callback(publish_value)
|
//Use xrange() if you ever have break. range() pre-creates the list and that can be memory inefficieint.
//The code below is a bit ugly because I used some measures to optimize it.
//Solution by Andrew Xing
n = input()
breaker = False
if n == 1 or n == 2:
print 2
elif n == 3:
print 3
elif n == 5 or n == 4:
print 5
else:
for x in xrange(n, n*2):
if breaker == True:
break
if x % 2 == 0 or x % 3 == 0 or x % 5 == 0:
continue
else:
for y in xrange(2, int(x**0.5)):
if x % y == 0:
break
else:
print x
breaker = True
|
# coding utf-8
# @time :2019/6/12 10:39
# @Author :zjunbin
# @Email :648060307@qq.com
# @File :OrganizationManagement.py
import time
from common.basepage import BasePage
from PageLocator.OrgManagement import OrgManage as om
class OrgManagemnet(BasePage):
def loginPage(self):
self.wait_element_Visible(locator=om.mor,model_name="更多功能")
self.click_element("更多功能",locator=om.mor)
self.wait_element_Visible(locator=om.bicycle_menu,model_name="组织结构管理")
self.click_element("组织结构管理",locator=om.bicycle_menu)
def new_org(self,pcnumber,pcname,pltype,areas):
self.switch_iframe(iframe=om.ifram411)
self.switch_iframe(iframe=om.iframStaiton)
self.click_element(model_name='新建按钮',locator=om.btnAdd)
# self.switch_iframe(iframe=om.iframeDiv)
self.switch_iframe(iframe=om.iframeAdd)
self.wait_element_Visible(locator=om.txtPcNumber,model_name="派出所编号")
self.input_text(model_name='派出所编号',locator=om.txtPcNumber,value=pcnumber)
self.input_text(model_name='派出所名称',locator=om.txtPcName,value=pcname)
self.selector(locator=om.drpPLType, model_name='组织机构类型',type='text', value=pltype)
self.selector(locator=om.drpAreas, model_name='所属区域',type='text', value=areas)
self.click_element(model_name='新增提交',locator=om.btnSave)
def success_message(self):
text= self.get_element_text(model_name='新增成功',locator=om.success_message)
return text |
from dataclasses import dataclass
from flask import render_template, Blueprint
from flask_login import login_required, current_user
from app.permissions.permissions import army_name_required
base = Blueprint('base', __name__, template_folder='templates')
@dataclass
class UserResources:
amount: int
picture: str
def get_user_resources():
user_resources = {
"coin": UserResources(current_user.army.coin, 'coin'),
"metal": UserResources(current_user.army.metal, 'metal'),
"wood": UserResources(current_user.army.wood, 'wood'),
"field": UserResources(current_user.army.field, 'field'),
"diamond": UserResources(current_user.army.diamond, 'diamond'),
}
return user_resources
@base.route("/")
@login_required
@army_name_required
def index():
user_resources = get_user_resources()
return render_template('base/index.html', user_resources=user_resources)
|
import sys
import pickle
import random
import numpy as np
from difflib import SequenceMatcher as Sqm
split_ratio=float(sys.argv[1])
f=open("/home/ubuntu/results/saliency/featured.pkl","rb")
featured_list=pickle.load(f)
f.close()
f=open("/home/ubuntu/results/saliency/distanced.pkl","rb")
dis_list=pickle.load(f)
f.close()
f=open("/home/ubuntu/results/saliency/keywords_list","rb")
keywords_list=pickle.load(f)
f.close()
f=open("/home/ubuntu/results/ontology/ontology_wordlist.pkl","rb")
word_list=pickle.load(f)
f.close()
f=open("/home/ubuntu/results/ontology/ontology_word2taxonomy.pkl","rb")
word2tvec=pickle.load(f)
f.close()
f=open("/home/ubuntu/results/saliency/idf.pkl","rb")
idf=pickle.load(f)
f.close()
f=open("/home/ubuntu/results/saliency/centrality.pkl","rb")
centrality=pickle.load(f)
f.close()
sample_prelist=[]
count=0
p_count=0
for i in range(0,len(dis_list)):
if not featured_list[i]['abs']:
continue
if not dis_list[i]['title'] and not dis_list[i]['keywords']:
continue
label_set=set()
body_set=set()
for item in dis_list[i]['title']:
label_set.add(item[1])
for item in dis_list[i]['keywords']:
label_set.add(item[1])
for item in dis_list[i]['body']:
body_set.add(item[1])
for key in featured_list[i]['abs'].keys():
# count+=1
label=0
if key in label_set:
label=1
for kw in keywords_list[i]:
if Sqm(None,kw,key.lower()).ratio()>=0.5:
label=1
break
# p_count+=label
if key in word_list:
k_idf=idf[word_list.index(key)]
else:
k_idf=0.0
sample_prelist.append([key,featured_list[i]['abs'][key][0],featured_list[i]['abs'][key][1]-featured_list[i]['abs'][key][0],featured_list[i]['abs'][key][2],k_idf,centrality[key],label]) #(str)entity, (float)distance, (float)spread, (int)count, (float)idf, (float)centrality, label
split=int(split_ratio*len(sample_prelist))
key_phrase=[0 for i in range(0,len(word_list))]
for i in range(0,split):
try:
key_phrase[word_list.index(sample_prelist[i][0])]+=sample_prelist[i][6]
except:
pass
f=open("/home/ubuntu/results/saliency/keyphrase.pkl","wb")
pickle.dump(key_phrase,f)
f.close()
training_list=[]
for i in range(0,split):
if sample_prelist[i][0] in word_list:
kpns=key_phrase[word_list.index(sample_prelist[i][0])]
_list=list(np.array(word2tvec[sample_prelist[i][0]])+1.0)
else:
kpns=0.0
_list=[0.0 for i in range(0,16)]
_list.extend([kpns,sample_prelist[i][1],sample_prelist[i][2],sample_prelist[i][3],sample_prelist[i][4],sample_prelist[i][5],sample_prelist[i][6]])
training_list.append(_list)
f=open("/home/ubuntu/results/saliency/trainlist.pkl","wb")
pickle.dump(training_list,f)
f.close()
test_list=[]
for i in range(split,len(sample_prelist)):
if sample_prelist[i][0] in word_list:
kpns=key_phrase[word_list.index(sample_prelist[i][0])]
_list=list(np.array(word2tvec[sample_prelist[i][0]])+1.0)
else:
kpns=0.0
_list=[0.0 for i in range(0,16)]
_list.extend([kpns,sample_prelist[i][1],sample_prelist[i][2],sample_prelist[i][3],sample_prelist[i][4],sample_prelist[i][5],sample_prelist[i][6]])
test_list.append(_list)
f=open("/home/ubuntu/results/saliency/testlist.pkl","wb")
pickle.dump(test_list,f)
f.close() |
#!/usr/bin/env python
#-*- coding:utf-8 -*-
import numpy as np
import cv2, sys, time, os
from pantilthat import *
from pan_tilt.msg import MsgState
# from sensor_msgs.msg import Image #이미지 캡쳐
# from cv_bridge import CvBridge, CvBridgeError
class PID:
def __init__(self, kP=1, kI=0, kD=0):
# initialize gains
self.kP = kP
self.kI = kI
self.kD = kD
if __name__ == "__main__":
# rospy.init_node('pantilt_node', anonymous=False)
face_cascade = cv2.CascadeClassifier('./pretrained_data/data/haarcascades/haarcascade_frontalface_default.xml')
cap = cv2.VideoCapture(0)
# Load the BCM V4l2 driver for /dev/video0
os.system('sudo modprobe bcm2835-v4l2')
# Set the framerate ( not sure this does anything! )
os.system('v4l2-ctl -p 8')
# bridge=CvBridge()
# rasimage_pub = rospy.Publisher('rasimage', Image, queue_size=10)
cam_pan = 0
cam_tilt = 50
pan(cam_pan) # Turn the camera to the default position
tilt(cam_tilt)
light_mode(WS2812)
pid = PID(4, 0.8, 0.5)
start_time = time.time()
error_x_prev = 0.
error_y_prev = 0.
time_prev = 0.
dt_sleep = 0.01
while cap is not None:
ret, img = cap.read()
gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
gray = cv2.flip(gray, -1)
gray = cv2.flip(gray, 1)
height, width = gray.shape
faces = face_cascade.detectMultiScale(gray, 1.3, 5)
time_cur = time.time()
for (x,y,w,h) in faces:
gray = cv2.rectangle(gray,(x,y),(x+w,y+h),(255,0,0),2)
roi_gray = gray[y:y+h, x:x+w]
roi_color = img[y:y+h, x:x+w]
error_x = (x+w/2.0) - (width/2.0)
error_y = (y+h/2.0) - (height/2.0)
error_x /= (width/2.0) # VFOV
error_y /= (height/2.0) # HFOV
de_x = error_x - error_x_prev
de_y = error_y - error_y_prev
dt = time_cur - time_prev
# cam_pan += error_x * 5
# cam_tilt += error_y * 5
# pid.kP * error_x + pid.kD * de_x / dt + pid.kI * error_x * dt
# print(time_cur, " ", time_prev, " ", dt)
# print(pid.kP * error_x, " ", pid.kD * de_x / dt, " ", pid.kI * error_x * dt)
if dt < 10 :
cam_pan += pid.kP * error_x + pid.kD * de_x / dt + pid.kI * error_x * dt
cam_tilt += pid.kP * error_y + pid.kD * de_y / dt + pid.kI * error_y * dt
if (cam_pan >= -90 and cam_pan <= 90) and (cam_tilt >= -90 and cam_tilt <= 90) :
pan(cam_pan) # Turn the camera to the default position
tilt(cam_tilt)
error_x_prev = error_x
error_y_prev = error_y
time_prev = time_cur
# print("width : ",width , "height : ", height)
# print("box_x : ",x , "box_y : ", y)
# print("x : ", cam_pan, "y : ", cam_tilt)
# rasimage = gray
# rasimage_msg = bridge.cv2_to_imgmsg(rasimage, encoding="passthrough")
# rasimage_msg.encoding="mono8"
# rasimage_pub.publish(rasimage_msg)
cv2.imshow('img',gray)
time.sleep(dt_sleep)
k = cv2.waitKey(30) & 0xff
if k == 27:
break
cap.release()
# cv2.destroyAllWindows() |
# https://towardsdatascience.com/text-summarization-96079bf23e83
import glob
import os
import nltk
import re
import heapq
from collections import Counter
stop_words = set(nltk.corpus.stopwords.words('english'))
stop_words.add('figure')
max_sentence_length = 30
max_summary_sentences = 3
os.chdir('data')
for text_file in glob.glob('*.txt'):
with open(text_file, 'r', encoding='windows-1252') as f:
# Read
text = f.read()
# Clean
text = text.strip()
text = re.sub(r'\[[0-9]*\]', ' ', text)
text = re.sub(r'\s+', ' ', text)
word_stream = text.lower()
word_stream = re.sub(r'\W', ' ', word_stream)
word_stream = re.sub(r'\d', ' ', word_stream)
word_stream = re.sub(r'\s+', ' ', word_stream)
# Count words
word_stream = filter(lambda word: word not in stop_words, nltk.word_tokenize(word_stream))
word_count = Counter(word_stream)
max_count = max(word_count.values())
word_count = Counter({word: count / max_count for word, count in word_count.items()})
# Rank sentences
raw_sentences = nltk.sent_tokenize(text)
tokenized_sentences = filter(
lambda words: len(words) <= max_sentence_length,
map(lambda sentence: nltk.word_tokenize(sentence.lower()), raw_sentences)
)
sentence_score = {}
for sentence, words in zip(raw_sentences, tokenized_sentences):
for word in words:
if sentence not in sentence_score:
sentence_score[sentence] = 0
sentence_score[sentence] -= word_count[word] # because min heap
# Print top sentences
summary_sentences = heapq.nlargest(
max_summary_sentences, sentence_score, key=sentence_score.get)
print(f.name)
print(' '.join(summary_sentences))
print()
|
import pandas as pd
import matplotlib.pyplot as plt
from os import path
from os import makedirs
dir_path = 'reports'
def list_to_csv(l, name='dataframe.csv'):
create_dir(dir_path)
df = pd.DataFrame(l[1:], columns=l[:1])
df.to_csv(dir_path + '/' + name, index=False)
df = None
def create_dir(dir_path):
if not path.exists(dir_path):
makedirs(dir_path)
def plot_serial_process_time(name='fig1.png', df_name='dataframe.csv'):
df = pd.read_csv(dir_path + "/" + df_name)
df.sort_values(by=['amount'], inplace=True)
x = df['amount']
serial_y = df['serial_time']
process_y = df['process_time']
plt.plot(x, serial_y, ls='--', marker='o')
plt.plot(x, process_y, ls='--', marker='o')
plt.xlabel('Cantidad de muestras')
plt.ylabel('Tiempo de ejecución')
plt.legend(['Serial', 'Procesos'])
plt.title('Comparación de tiempos de ejecución')
create_dir(dir_path)
plt.savefig(dir_path + '/' + name, dpi=300) |
'''
Created on Jan 22, 2018
@author: PATI
'''
from domain.jucator import Jucator
class ExceptionR(Exception):
"""
Clasa de exceptii pentru repo
"""
def __init__(self,*args,**kwargs):
Exception.__init__(self,*args,**kwargs)
class RepoJ():
'''
repo pentru jucatori
'''
def __init__(self, nume):
'''
initializam campul nume cu numele jucatorului
'''
self.__nume=nume
"""
Functia care citeste din fisier
"""
def __loadFromFile(self):
try:
f=open(self.__nume,"r")
except IOError:
rez=[]
line=f.readline().strip()
rez=[]
while line!="":
inf=line.split(";")
ju=Jucator(str(inf[0]),str(inf[1]),int(inf[2]),str(inf[3]))
rez.append(ju)
line=f.readline().strip()
f.close()
return rez[:]
"""
Functia care scrie in fisier
"""
def __storeToFile(self,lista):
f=open(self.__nume,"w")
for x in lista:
ju=str(x.get_nume())+";"+str(x.get_prenume())+";"+str(x.get_inaltime())+";"+str(x.get_post())+"\n"
f.write(ju)
f.close()
"""
Functia returneaza toate elementele din fisier
"""
def getAll(self):
alls=self.__loadFromFile()
return alls[:]
"""
Adauga un jucator in fisier
Date intrare: ju-jucator
"""
def add(self,ju):
alls=self.__loadFromFile()
for x in alls:
if x==ju:
raise ExceptionR("Un jucator cu acest nume exista deja!!!")
alls.append(ju)
self.__storeToFile(alls)
"""
Adauga un jucator in fisier fara a arunca exceptii
Date intrare: ju-jucator
"""
def add2(self,ju):
alls=self.__loadFromFile()
ok=1
for x in alls:
if x==ju:
ok=0
if ok==1:
alls.append(ju)
self.__storeToFile(alls)
"""
Modifica ianltimea unui jucator din fisier
Date intrare: nume-nume jucator, prenume-prenume jucator,inaltime-inaltimea
"""
def mod(self,nume,prenume,inaltime):
alls=self.__loadFromFile()
ju=Jucator(nume,prenume,inaltime,"Fundas")
for x in alls:
if x==ju:
x.set_inaltime(inaltime)
self.__storeToFile(alls)
return
raise ExceptionR("Nu exista acest jucator!!!")
"""
Returneaza lista cu toti fundasii
"""
def cautaFundas(self):
alls=self.__loadFromFile()
rez=[]
for x in alls:
if x.get_post()=="Fundas":
rez.append(x)
return rez[:]
"""
Returneaza lista cu toti pivotii
"""
def cautaPivot(self):
alls=self.__loadFromFile()
rez=[]
for x in alls:
if x.get_post()=="Pivot":
rez.append(x)
return rez[:]
"""
Returneaza lista cu toate extremele
"""
def cautaExtrema(self):
alls=self.__loadFromFile()
rez=[]
for x in alls:
if x.get_post()=="Extrema":
rez.append(x)
return rez[:]
"""
Sorteaza o lista
DAte intrare: lista -lista
Date iesire: lista sortata
"""
def sortare(self,lista):
sorted=False
while not sorted:
sorted=True
for i in range(1,len(lista)):
if lista[i].get_inaltime()>lista[i-1].get_inaltime():
aux=lista[i]
lista[i]=lista[i-1]
lista[i-1]=aux
sorted=False
"""
Formeaza o echipa de jucatori
Date iesire: lista de jucatori dintr-o echipa
"""
def echipa(self):
fundas=self.cautaFundas()
pivot=self.cautaPivot()
extrema=self.cautaExtrema()
self.sortare(fundas)
self.sortare(pivot)
self.sortare(extrema)
if len(pivot)<1:
raise ExceptionR("Nu sunt suficienti pivoti!!!")
if len(fundas)<2:
raise ExceptionR("Nu sunt suficienti fundasi!!!")
if len(extrema)<2:
raise ExceptionR("Nu sunt suficienti jucatori cu postul de extrema!!!")
rez=[]
rez.append(fundas[0])
rez.append(fundas[1])
rez.append(pivot[0])
rez.append(extrema[0])
rez.append(extrema[1])
return rez[:] |
#!/usr/bin/env python
import os
from flask_mongoengine import MongoEngine
from flask_script import Manager
from flask_script import Server
from flask_script import Shell
from pymongo import MongoClient
from pymongo.errors import ServerSelectionTimeoutError
from application import create_app
from application import db
from configuration import DB_NAME, DEV_DB_NAME, DB_PORT, DEV_DB_PORT
app = create_app(os.environ.get('APP_CONFIG') or 'development')
manager = Manager(app)
def make_shell_context():
"""
Makes a context object to use for the shell environment exposed by flask-script
:return context: Context object containing environmental defaults
:type <type, 'dict'>
>>> from flask import Flask
>>> response = make_shell_context()
>>> isinstance(response, dict)
True
>>> app = response.get('app')
>>> isinstance(app, Flask)
True
>>> db = MongoEngine(app)
>>> isinstance(db, MongoEngine)
True
"""
return dict(app=app, db=db)
manager.add_command('runserver', Server(host='127.0.0.1', port='9060', use_debugger=True, use_reloader=True))
manager.add_command('shell', Shell(make_context=make_shell_context))
@manager.command
def initdb():
"""
Initializes the database with values necessary for rapid usability feedback
:return initialized: Boolean value specifying the status of the initialization
operation | function
:type <type, 'bool'>
>>> initdb()
True
"""
#: Get the current environment of execution
environment = os.environ.get('APP_CONFIG')
joblivery_port = os.environ.get('DB_PORT') or DB_PORT
joblivery_host = os.environ.get('DB_HOST') or DB_HOST
if joblivery_host and joblivery_port and (environment=='production'):
initialized = __initialize_db(DB_NAME)
else:
initialized = __initialize_db(DEV_DB_NAME)
#: As always no news is good news philosophy preferred
if not initialized:
print 'Database Initialization Failed: Check To Ensure MongoDB Is Installed and Running'
return initialized
@manager.command
def dropdb():
"""
Drops the database with values necessary for rapid usability feedback
:return dropped: Boolean value specifying the status of the initialization
operation | function
:type <type, 'bool'>
>>> dropdb()
True
"""
#: Get the current environment of execution
environment = os.environ.get('APP_CONFIG')
if environment == 'production':
dropped = __drop_db(DB_NAME)
else:
dropped = __drop_db(DEV_DB_NAME)
#: As always no news is good news philosophy preferred
if not dropped:
print 'Database Drop Failed: Ensure DB Exists and MongoDB Is Running'
return dropped
@manager.command
def populatedb():
"""
Populates the mongodb database with default values especially for testing purposes
:return:
"""
from mongoengine import connect
pass
def __initialize_db(database_name):
"""
Private helper method to initialize a MongoDB database and prevent small but still same old
code repetition
"""
connection = MongoClient(serverSelectionTimeoutMS=10000)
initialization_collection = 'initialization'
try:
new_db = connection[database_name]
if not initialization_collection in new_db.collection_names():
new_db.create_collection(initialization_collection)
except ServerSelectionTimeoutError as sste:
return False
return True
def __drop_db(database_name):
"""
Drops the databases created
:param database_name: The name of the database to drop
"""
try:
from mongoengine import connect
dbs = connect(database_name)
dbs.drop_database(database_name)
except ValueError:
return False
return True
application = app
if __name__ == '__main__':
manager.run()
|
# Generated by Django 3.1.2 on 2020-10-02 17:13
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('users', '0005_auto_20201002_1710'),
]
operations = [
migrations.AlterField(
model_name='profile',
name='DateOfBirth',
field=models.DateField(blank=True, null=True),
),
]
|
# https://stackoverflow.com/a/4142178
# This file signals to python that is is okay to import from this directory. |
import vim
from abstract_formatter import AbstractFormatter
class BashFormatter(AbstractFormatter):
def __init__(self):
super(self.__class__, self).__init__()
self._beautysh = self._getAbsPath(self._getRootDir(),
"build/venv/bin/beautysh")
def _getFormatCommand(self, formattedFilename, guideFilename):
indent = vim.eval("g:VimFormatterBashIndent")
functionStyle = vim.eval("g:VimFormatterBashFuncStyle")
return '{} --indent-size {} --force-function-style {} "{}" && cat "{}"'.format(
self._beautysh, indent, functionStyle, formattedFilename,
formattedFilename)
|
import sqlite3
conn = sqlite3.connect("sqlit3_LOCAL.db")
Mobile_Number="9986146542"
RequirementID="1"
Client="2"
name="3"
skills="4"
Yearsofexperience="5"
CURRENT_LOCATION="6"
lOCATION_OF_INTEREST="7"
CTC="8"
ECTC="9"
Notice_Period="10"
Email="12"
Source="13"
Date_of_birth="14"
PANCARD_NO="15"
dateOfSub="16"
Note="17"
##q= "SELECT Mobile_Number from Submit_resume where Mobile_Number = "+'"'+str(Mobile_Number)+'"'
#### RequirementID,Client,name,skills,Yearsofexperience,CURRENT_LOCATION,lOCATION_OF_INTEREST,CTC,ECTC,Notice_Period,Mobile_Number,Email,Source,Date_of_birth,PANCARD_NO,dateOfSub,Note
##q="UPDATE Submit_resume SET RequirementID="+'"'+str(RequirementID)+'"' +"where Mobile_Number = "+'"'+str(Mobile_Number)+'"'
##
##cursor=conn.execute(q)
##print cursor.fetchall()
conn = sqlite3.connect("sqlit3_LOCAL.db")
print "IN UpdateAll function data ::::::::",RequirementID
##cursor=conn.execute("UPDATE Submit_resume SET RequirementID="+'"'+str(RequirementID)+'"' WHERE Mobile_Number = "+'"'+str(Mobile_Number)+'"')
q = "UPDATE Submit_resume SET RequirementID = '" + str(RequirementID) + "' WHERE Mobile_Number = '"+ str(Mobile_Number) + "'"
print "q= ",q
cursor=conn.execute(q)
conn.commit()
print cursor.fetchall()
|
from PyQt5.QtWidgets import QApplication, QLabel
def hello_qt(a_message):
app = QApplication([])
label = QLabel(a_message)
label.show()
app.exec_()
# Press the green button in the gutter to run the script.
if __name__ == '__main__':
hello_qt('Hello, Qt')
# See PyCharm help at https://www.jetbrains.com/help/pycharm/
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import lib.shortestPaths as sp
def generateMetaGraph (mazeMap, playerLocation, coins):
"""
Generate a metaGraph from mazeMap, containing all coins and the player.
This function is built on the shortestPaths lib.
"""
nodes = [playerLocation] + coins
metaGraph = {}
bestPaths = {}
i = len(nodes)-1
while i >= 0:
routingTable = sp.dijkstra(mazeMap, nodes[i])
j = 0
while j < i:
if nodes[i] not in bestPaths :
bestPaths[nodes[i]] = {}
metaGraph[nodes[i]] = {}
if nodes[j] not in bestPaths :
bestPaths[nodes[j]] = {}
metaGraph[nodes[j]] = {}
if not metaGraph[nodes[j]].get(nodes[i], False):
path = sp.orderPath(routingTable, nodes[i], nodes[j], [])
distance = routingTable[nodes[j]][1]
metaGraph[nodes[i]][nodes[j]] = distance
bestPaths[nodes[i]][nodes[j]] = path
metaGraph[nodes[j]][nodes[i]] = distance
bestPaths[nodes[j]][nodes[i]] = path[::-1]
j += 1
i -= 1
return metaGraph, bestPaths
bestDistance = float('inf')
bestPaths = []
def TSM_auxi(nodeStart, nodes, distance, path):
global bestDistance
global bestPaths
if not nodes:
if distance < bestDistance:
bestDistance = distance
bestPaths = path
else:
for node in nodes:
toseeNodes = list(nodes)
toseeNodes.remove(node)
TSM_auxi(node, toseeNodes, distance + node[1], path+[node[0]])
def travellingSalesman(nodeStart, nodes):
"""
Implementation of the travelling salesman problem algorithm with naïve try.
"""
global bestDistance
global bestPaths
bestDistance = float('inf')
bestPaths = []
distance = 0
path = []
TSM_auxi(nodeStart, nodes, distance, path)
return bestDistance, bestPaths
def backTrack(metaGraph, startNode, path, deep):
"""
Implementation of the backTracking algorithm.
"""
global bestDistance
global bestPaths
bestDistance = float('inf')
bestPaths = []
BT_auxi(nodeStart, nodes, distance, path)
return bestDistance, bestPaths
def findNearestCoin(mazeMap, playerLocation, coinsList):
"""
Returns the best path for the nearest coin.
"""
routingTable = sp.dijkstra(mazeMap, playerLocation)
nearest = -1
distance = float('inf')
for coin in coinsList:
if routingTable[coin][1] < distance :
distance = routingTable[coin][1] < distance
nearest = coin
return sp.orderPath(routingTable, playerLocation, nearest, [])
|
#!/usr/bin/python
#-*- coding: utf-8 -*-
import commands
def system_running_time():
s = commands.getoutput('uptime').split()
if len(s) == 10:
d = 0
else:
d = s[2]
if len(s[-8].split(':')) < 2:
d = 0
h = 0
m = s[-9]
else:
h = s[-8].split(':')[0]
m = s[-8].split(':')[1].rstrip(',')
res = '%s天%s小时%s分' % (d,h,m)
return res
if __name__ == '__main__':
res = system_running_time()
print res
|
"""
created by Bradley Sheneman
modified version of SpockBot physics.py
changed the walk, sprint, jump functions to a single move function for code clarity
more importantly, it allows for single movements that are composites of
different motions (e.g. jump while walking northeast)
does not directly impact any data. calculates a new vector
which the client can use at its discretion
"""
# documentation for SpockBot physics plugin
"""
PhysicsPlugin is planned to provide vectors and tracking necessary to implement
SMP-compliant client-side physics for entities. Primarirly this will be used to
keep update client position for gravity/knockback/water-flow etc. But it should
also eventually provide functions to track other entities affected by SMP
physics
Minecraft client/player physics is unfortunately very poorly documented. Most of
these values are based of experimental results and the contributions of a
handful of people (Thank you 0pteron!) to the Minecraft wiki talk page on
Entities and Transportation. Ideally someone will decompile the client with MCP
and document the totally correct values and behaviors.
"""
# Gravitational constants defined in blocks/(client tick)^2
PLAYER_ENTITY_GAV = 0.08
THROWN_ENTITY_GAV = 0.03
RIDING_ENTITY_GAV = 0.04
BLOCK_ENTITY_GAV = 0.04
ARROW_ENTITY_GAV = 0.05
# Air drag constants defined in 1/tick
PLAYER_ENTITY_DRG = 0.02
THROWN_ENTITY_DRG = 0.01
RIDING_ENTITY_DRG = 0.05
BLOCK_ENTITY_DRG = 0.02
ARROW_ENTITY_DRG = 0.01
# Player ground acceleration isn't actually linear, but we're going to pretend
# that it is. Max ground velocity for a walking client is 0.215blocks/tick, it
# takes a dozen or so ticks to get close to max velocity. Sprint is 0.28, just
# apply more acceleration to reach a higher max ground velocity
PLAYER_WLK_ACC = 0.15
PLAYER_SPR_ACC = 0.20
PLAYER_GND_DRG = 0.41
# we can add more if there are more speeds that a player can move
motions = { 1: PLAYER_WLK_ACC,
2: PLAYER_SPR_ACC }
# Seems about right, not based on anything
PLAYER_JMP_ACC = 0.45
import math
from spock.mcmap import mapdata
from spock.utils import pl_announce, BoundingBox
from spock.vector import Vector3
import logging
logger = logging.getLogger('spock')
class NewPhysicsCore:
def __init__(self, vec, pos):
self.vec = vec
self.pos = pos
def move(self, direction, motion, jump):
acc = motions[motion]
# as before, we assume angles are in degrees
angle = math.radians(direction)
z = math.sin(angle)*acc
x = math.cos(angle)*acc
y = 0.0
if jump:
if self.pos.on_ground:
self.pos.on_ground = False
y = PLAYER_JMP_ACC
#self.vec.add_vector(x=x, y=y, z=z)
self.vec += Vector3(x,y,z)
@pl_announce('NewPhysics')
class NewPhysicsPlugin:
def __init__(self, ploader, settings):
self.vec = Vector3(0.0, 0.0, 0.0)
self.playerbb = BoundingBox(0.8, 1.8)
self.world = ploader.requires('World')
self.event = ploader.requires('Event')
clinfo = ploader.requires('ClientInfo')
self.pos = clinfo.position
ploader.reg_event_handler('physics_tick', self.tick)
self.pycore = NewPhysicsCore(self.vec, self.pos)
ploader.provides('NewPhysics', self.pycore)
def tick(self, _, __):
self.check_collision()
self.apply_horizontal_drag()
self.apply_vector()
def check_collision(self):
cb = Vector3(math.floor(self.pos.x), math.floor(self.pos.y), math.floor(self.pos.z))
if self.block_collision(cb, y=2): #we check +2 because above my head
self.vec.y = 0
if self.block_collision(cb, y=-1): #we check below feet
self.pos.on_ground = True
self.vec.y = 0
self.pos.y = cb.y
else:
self.pos.on_ground = False
#self.vec.add_vector(y = -PLAYER_ENTITY_GAV)
self.vec += Vector3(0, -PLAYER_ENTITY_GAV, 0)
self.apply_vertical_drag()
#feet or head collide with x
if self.block_collision(cb, x=1) or self.block_collision(cb, x=-1) or self.block_collision(cb, y=1, x=1) or self.block_collision(cb, y=1, x=-1):
self.vec.x = 0
#replace with real info in event
self.event.emit("phy_collision", "x")
#feet or head collide with z
if self.block_collision(cb, z=1) or self.block_collision(cb, z=-1) or self.block_collision(cb, y=1, z=1) or self.block_collision(cb, y=1, z=-1):
self.vec.z = 0
#replace with real info in event
self.event.emit("phy_collision", "z")
def block_collision(self, cb, x = 0, y = 0, z = 0):
block_id, meta = self.world.get_block(cb.x+x, cb.y+y, cb.z+z)
block = mapdata.get_block(block_id, meta)
if block == None:
return False
#possibly we want to use the centers of blocks as the starting points for bounding boxes instead of 0,0,0
#this might make thinks easier when we get to more complex shapes that are in the center of a block aka fences but more complicated for the player
#uncenter the player position and bump it up a little down to prevent colliding in the floor
pos1 = Vector3(self.pos.x-self.playerbb.w/2, self.pos.y-0.2, self.pos.z-self.playerbb.d/2)
bb1 = self.playerbb
bb2 = block.bounding_box
if bb2 != None:
pos2 = Vector3(cb.x+x+bb2.x, cb.y+y+bb2.y, cb.z+z+bb2.z)
if ((pos1.x + bb1.w) >= (pos2.x) and (pos1.x) <= (pos2.x + bb2.w)) and \
((pos1.y + bb1.h) >= (pos2.y) and (pos1.y) <= (pos2.y + bb2.h)) and \
((pos1.z + bb1.d) >= (pos2.z) and (pos1.z) <= (pos2.z + bb2.d)):
return True
return False
def apply_vertical_drag(self):
self.vec.y = self.vec.y - self.vec.y*PLAYER_ENTITY_DRG
def apply_horizontal_drag(self):
self.vec.x -= self.vec.x * PLAYER_GND_DRG
self.vec.z -= self.vec.z * PLAYER_GND_DRG
def apply_vector(self):
p = self.pos
p.x = p.x + self.vec.x
p.y = p.y + self.vec.y
p.z = p.z + self.vec.z
|
from __future__ import print_function
import airflow
import pytz
import logging
from datetime import datetime, timedelta
from airflow import DAG
from airflow.operators.bash_operator import BashOperator
from airflow.models import Variable
start_date = datetime(2017, 10, 24, 0, 0, 0, tzinfo=pytz.utc)
default_args = {
'owner': 'airflow',
'depends_on_past': False,
'start_date': start_date,
'schedule_interval': None,
'email': [''],
'email_on_failure': False,
'email_on_retry': False,
'retries': 0,
'retry_delay': timedelta(minutes=5)
}
dag = DAG('lab2',
description = 'Using BashOperator and WGET to download files',
schedule_interval = None,
default_args = default_args)
"""
# Step 1.
download_file = BashOperator(
task_id = 'download_file',
bash_command = 'wget https://raw.githubusercontent.com/umg/data-science-summit-airflow/master/data/shazam/shazam_AR_20171029.txt -O /tmp/shazam_AR_20171029.txt',
dag = dag
)
"""
"""
# Step 2
download_file = BashOperator(
task_id = 'download_file',
bash_command = 'wget $URL/shazam_AR_20171029.txt -O /tmp/shazam_AR_20171029.txt',
env = {'URL': '{{ var.value.shazam_files_url }}'},
dag = dag
)
"""
"""
# Step 3
download_file = BashOperator(
task_id = 'download_file',
bash_command = 'wget $URL/shazam_AR_20171029.txt -O /tmp/shazam_AR_20171029.txt; echo $?' ,
env = {'URL': '{{ var.value.shazam_files_url }}'},
xcom_push = True,
dag = dag
)
"""
"""
# Step 4
download_file = BashOperator(
task_id = 'download_file',
bash_command = 'wget $URL/shazam_AR_$EXEC_DATE.txt -O /tmp/shazam_AR_$EXEC_DATE.txt; echo $?',
env={'URL': '{{ var.value.shazam_files_url }}',
'EXEC_DATE': '{{ ds_nodash }}'},
xcom_push = True,
dag = dag
)
"""
# Step 5
shazam_country_list = Variable.get('shazam_country_list').split(',')
for country in shazam_country_list:
download_file = BashOperator(
task_id = 'download_file_{}'.format(
country
),
bash_command = 'wget $URL/shazam_{}_$EXEC_DATE.txt -O /tmp/shazam_{}_$EXEC_DATE.txt; echo $?'.format(
country,
country
),
env={'URL': '{{ var.value.shazam_files_url }}',
'EXEC_DATE': '{{ ds_nodash }}'},
xcom_push = True,
dag = dag
)
|
import random
from copy import copy
class Player:
def __init__(self, player_id):
self.player_id = player_id
self.market = {} # {item: price}
self.inventory = {} # {item: amount}
self.merchant_pos = "Home"
self.money = 50
self.victory_points = 0
self.buildings = []
self.building_card_hand = []
# bookeeping history
self.player_inventory_history = list()
self.player_money_history = list()
def log(self, log_string):
print("Player {}: {}".format(self.player_id, log_string))
def buy(self, item, amount, other_player):
cost = other_player.market[item]*amount
if amount > other_player.inventory[item]:
raise Exception("Tried to buy {} {} items, but only {} available".format(amount,item,other_player.inventory[item]))
if self.money < cost:
raise Exception("Tried to buy {} {} items. Not enough money: {} < {}".format(amount, item,self.money,cost))
# make transaction
self.money -= cost
other_player.money += cost
other_player.inventory[item] -= amount
self.inventory[item] += amount
self.log("Buying: {} {}'s for {} from player {}, {}$ left".format(amount, item, cost, other_player.player_id, self.money))
def set_player_refs(self, other_players_ref, building_deck_ref):
# get latest info about other player
self.other_players_ref = other_players_ref
self.building_deck_ref = building_deck_ref
def init_inventory(self,start_inventory,building_deck):
# init inventory
all_resources = building_deck.all_resource_list
self.inventory = dict()
for item in all_resources:
self.inventory[item] = 0
for key, value in start_inventory.items():
self.inventory[key] = value
def draw_start_cards(self, building_deck):
for i in range(5):
card = building_deck.draw_card()
self.building_card_hand.append(card)
def adjust_market(self):
self.log("ADJUST MARKET")
# Player adjusts market price of its inventory items
self.market = {}
for item, amount in self.inventory.items():
if amount > 0:
price = random.randint(5, 20)
self.market[item] = price
self.log({x:"{} / {}$".format(self.inventory[x],y) for x,y in self.market.items()})
def trade(self):
self.log("TRADE")
# Player select a fellow merchant player to trade with
fellow_merchant = random.choice(self.other_players_ref)
self.log("Choosing player {} to trade with".format(fellow_merchant.player_id))
can_afford_items = [x for x,y in fellow_merchant.market.items() if y <= self.money]
if len(can_afford_items) > 0:
item = random.choice(can_afford_items)
price = fellow_merchant.market[item]
max_amount = self.money // price
max_amount = min(max_amount,fellow_merchant.inventory[item])
if max_amount > 1:
amount = random.randint(1, max_amount)
self.buy(item, amount, fellow_merchant)
return None
def build(self):
self.log("BUILD")
# build building action
can_build_buildings = [(i,x) for i, x in enumerate(self.building_card_hand) if x.can_build(self.inventory)]
if len(can_build_buildings) > 0:
to_build_choice = random.choice(can_build_buildings)
to_build_idx = to_build_choice[0]
to_build = to_build_choice[1]
# pay building cost
for item, cost in to_build.cost.items():
self.inventory[item] -= cost
self.log("building {} at cost {} ".format(to_build.name,to_build.cost))
# build
self.buildings.append(to_build)
self.building_card_hand.pop(to_build_idx)
else:
self.log("Cant build any of {}".format([x.name for x in self.building_card_hand]))
# change building card action
if len(self.building_card_hand) > 0:
to_change_idx = random.randint(0,len(self.building_card_hand)-1)
discard_card = self.building_card_hand[to_change_idx]
self.building_card_hand.pop(to_change_idx)
self.building_deck_ref.discard_card(discard_card)
# TODO discard pile
# draw cards to fill hand
while len(self.building_card_hand) < 5:
card = self.building_deck_ref.draw_card()
self.building_card_hand.append(card)
return None
def collect(self):
self.log("COLLECT")
# collect resources
for building in self.buildings:
# choose among production options
can_afford_options = []
for production_option in building.production:
can_afford = True
for item_name, item_amount in production_option["cost"].items():
if self.inventory[item_name] - item_amount < 0:
can_afford = False
if can_afford:
can_afford_options.append(production_option)
if len(can_afford_options) > 0:
# select what to produce
production_option = random.choice(can_afford_options)
# pay production cost
for item_name, item_amount in production_option["cost"].items():
self.inventory[item_name] -= item_amount
# get produced items
for item_name, item_amount in production_option["result"].items():
self.inventory[item_name] += item_amount
self.log("Producing {} {} from {} at cost {}".format(item_amount,item_name,building.name,production_option["cost"]))
return None
def log_data(self):
self.player_inventory_history.append(copy(self.inventory))
self.player_money_history.append(copy(self.money))
# ==============================
# Unit tests
def test_player_buy():
players = [Player(player_id=x) for x in range(2)]
builidng_deck = BuildingDeck()
# Bug, inventory goes to negative |
import zipfile
import json
import glob
import os
def save_json(filename, json_str):
with open(filename, "w") as f:
new_dict = json.loads(json_str)
json.dump(new_dict, f)
print("加载入文件完成...")
def zip_files(files, zip_name):
files = glob.glob(files)
f = zipfile.ZipFile(zip_name, 'w', zipfile.ZIP_DEFLATED)
for file in files:
f.write(file, os.path.basename(file))
f.close()
# files = ['.\\123.txt', '.\\3.txt'] # 文件的位置,多个文件用“,”隔开
# zip_file = '.\\m66y.zip' # 压缩包名字
# zip_files(files, zip_file)
if __name__ == "__main__":
files ="../../doc/Vectors/*"
zip_files(files,'../../doc/Vectors/test.zip')
|
import os
import sys
if __name__ == '__main__':
pkgname = sys.argv[1]
os.system('pip uninstall -y {0}'.format(pkgname))
os.system('conda install {0}'.format(pkgname))
os.system('conda env export > environment.yml')
|
LINK = "http://selenium1py.pythonanywhere.com/"
LOGIN_LINK = "http://selenium1py.pythonanywhere.com/accounts/login/"
CODERS_LINK = "http://selenium1py.pythonanywhere.com/catalogue/coders-at-work_207/"
STARS_LINK = "http://selenium1py.pythonanywhere.com/en-gb/catalogue/the-city-and-the-stars_95/"
|
# -*- coding: utf-8 -*-
"""
Created on Mon May 16 10:44:25 2016
Constants used across modules.
@author: lvanhulle
"""
import logging
import importlib
ARC_NUMPOINTS = 20
CW = -1 #Circle direction clock wise
CCW = 1 #circle direction counter clowise
X, Y, Z = 0, 1, 2
START = 0 #start of circle
END = 1 #end of circle
DIR = 2 #direction to travel
CENTER = 3 #center of circle
INSIDE = 1 #Point is inside shape
OUTSIDE = 0 #point is outside shape
ALLOW_PROJECTION = True
EPSILON = 1.0/10000 # The precision level to use when doing numerical comparisons
OMIT_Z = True # When moving to a new point this chooses the Z level should not be used
INCLUDE_Z = False
USED = True
NOT_USED = False
LEFT, ON_EDGE, RIGHT = -1,0,1
""" Used in InFill """
PARTIAL_ROW = 0
FULL_ROW = 1
FULL_FIELD = 2
CENTERED_FIELD = 3
TRIMMED_FIELD = 4
GCODE = 0
ROBOTCODE = 1
STL_FLAG = 'stl_file'
THE_LABEL = 'the_label'
MODEL = 'model'
BED_TEMP_FLAG = '#BED_TEMP'
EXTRUDER_TEMP_FLAG = '#EXTRUDER_TEMP'
LOG_LEVEL = logging.WARN # logging.INFO
importlib.reload(logging)
logging.basicConfig(format='\n\nLog Message\n-----------\nModule: %(name)s\n' +
'Log Level: %(levelname)s\nMsg: %(message)s', level=LOG_LEVEL) |
# cfg
# Globalized things needed throughout all modules
from datetime import datetime
import os
from os.path import dirname
import sys
#Variables modified by JSONReader
token = ''
adminUsers = []
cmdPrefix = ''
soundTime = ''
noSoundTimer = []
disabledIntros = []
blockedChannels = []
fileSizeAllowed = ''
maxSoundFiles = ''
#current directory path
fileDir = os.path.dirname(__file__)
configPath = dirname(fileDir)+'/config.json'
soundsPath = dirname(fileDir)+'/sounds'
#Basic logging function to timestamp output
#Might be expanded to write to a log file
def Log(_logString):
logString = _logString
now = datetime.now()
currentTime = now.strftime("%m/%d/%Y %H:%M:%S")
print(currentTime+': '+logString)
|
from tasks.set_tasks.unique_in_list import get_unique_in_list
def test_get_unique_in_list(new_set):
assert get_unique_in_list([1, 2, 1, 3, 2]) == {1, 2, 3}
|
#ImportModules
import ShareYourSystem as SYS
"""
Nicolas tests
rates 5-15 sigmaext 5-5
taums 20-10 thresholds 20-20 resets 10-10 taurps 0-0
delays 1-1 taur 0.5-0.5 taud 5-5
JEE, JEI, JIE, JII=100,100,100,100:
stable, eigenvalue w largest real part = -36,0
100,100,100,0:
unstable, 128, 184 (f=29Hz)
0,100,100,100:
stable, -15, 338
100,0,0,0:
unstable, 135, 0
0,100,100,0:
unstable, 99, 240 (f=38Hz)
"""
#ImportModules
import ShareYourSystem as SYS
LateralWeightVariablesList=[
#[[100,-100],[100,-100]],
[[100,-100],[100,-0]],
#[[0,-100],[100,-100]],
#[[100,-0],[0,-0]],
#[[0,-100],[100,-0]]
#[[0,0],[0,-100]],
]
#Check
for __LateralWeightVariable in LateralWeightVariablesList:
#Define
MyStabilizer=SYS.StabilizerClass(
).stationarize(
_MeanWeightVariable=__LateralWeightVariable,
_ConstantTimeVariable=[0.02,0.01],
_RateVariable=[5.,15.],
_InteractionStr="Spike"
).stabilize(
_DelayTimeVariable=0.001,
_DecayTimeVariable=0.005,
_RiseTimeVariable=0.0005,
#_ScanFrequencyVariable=[10.]
)
#Choose the parameters to print
KeyStrsList=[
'StationarizingMeanWeightVariable',
'StabilizingConstantTimeVariable', #in ms
'StabilizingDelayTimeVariable',
'StabilizedPerturbationComplex',
'StabilizedTotalPerturbationComplexesArray', #matrix M
'StabilizedDeterminantFloatsTuple', #If it has converged, then it has to be closed to (0,0)
'StabilizedBiggestLambdaFloatsTuple',
'StabilizedInstabilityLambdaFloatsTuple', # real part should be negative if stable, (from this particular initial condition)
'StabilizedInstabilityFrequencyFloat'
]
#print
SYS._print(SYS.collections.OrderedDict(zip(KeyStrsList,MyStabilizer.mapGet(KeyStrsList))))
|
from django.urls import path
from . import views
urlpatterns = [
path('', views.index, name='index'),
# path('monitor/<str:address>', views.monitor, name='monitor'),
path('monitor/<str:address>/<int:pool_id>', views.monitor_pool, name='monitor_pool'),
path('chart_data_json/<str:addr>/<int:pool_id>', views.chart_data_json, name='chart_data_json'),
]
|
# -*- coding: utf-8 -*-
from datetime import datetime, timedelta
from odoo import api, fields, models
class ImportItem(models.Model):
_name='account.import.item'
name = fields.Char(string="Name")
import_entry_id = fields.Many2one('account.import.entry', string="Import journal entry")
active = fields.Boolean(string="Active", default="True")
currency_id = fields.Many2one('res.currency', string="Currency", default="2")
account_id = fields.Many2one('account.account', string="Account")
debit = fields.Float(string="Debit")
credit = fields.Float(string="Credit")
fully_imported = fields.Boolean(string="Fully imported")
journal_entry_id = fields.Many2one('account.move', string="Journal entry")
product_id = fields.Many2one('product.template',string="Product")
price_unit = fields.Float(string="Unit Price")
quantity = fields.Float(string="Quantity")
partner_id = fields.Many2one('res.partner',string="Contact")
|
import requests
import json
from dotenv import load_dotenv
import os
load_dotenv()
nyt_api_key1 = os.getenv("nyt_api_key")
rainforest_api1 = os.getenv("rainforest_api")
list_names = f"https://api.nytimes.com/svc/books/v3/lists/names.json?api-key={nyt_api_key1}"
api_url = f"https://api.nytimes.com/svc/books/v3/lists/current/hardcover-fiction.json?api-key={nyt_api_key1}"
def get_products(api_url):
request_url = api_url
response = requests.get(request_url)
return json.loads(response.text)
def parse_json(api_url):
response = requests.get(api_url)
response_data = json.loads(response.text)
return response_data
def read_date(date):
date = str(date)
day = date[-2:]
if '0' in day:
day = date[-1:]
year = date[0: 4]
month = date[5: 7]
month_table = {'01':'Janurary', '02':'February', '03':'March', '04':'April', '05':'May', '06':'June', '07':'July', '08':'August', '09':'September', '10':'October', '11':'November', '12':'December'}
month_name = month_table[month]
return f"{month_name} {day}, {year}"
get_products(list_names)
nyt_list = parse_json(list_names)
nyt_list_names = []
nyt_list_info = {}
nyt_list_dict = {}
for item in nyt_list['results']:
list_name = item['list_name']
list_name_nyt = item['list_name_encoded']
last_date = item['newest_published_date']
last_updated = item['updated']
string = f"List Name: {list_name}\nUpdated on: {last_date} Updated {last_updated}"
nyt_list_names.append(list_name)
nyt_list_dict[list_name] = list_name_nyt
readable_date = read_date(last_date)
nyt_list_info[item['list_name']] = f"The best seller list for {list_name} was last updated on {readable_date} and is typically updated {last_updated.lower()}."
#Print welcoming message to user with a description of the app
welcome_message = "\nWelcome to the Best Read Discovery App!\nIn this app you'll be able to search for a book by genre, browse best seller lists, and book purchasing options.\nAll in one place."
print(welcome_message)
#For this app you can indicate book genre, and we'll pull best selling from the NYT
prompt = "\nWhen you're ready to begin press enter.."
begin = input(prompt)
welcome_message2 = "\nLet's browse New York Times Best Seller Lists to find a book!\n"
print(welcome_message2)
#Allow the user to see what genres are available
#Prompt the user for book genre
#Print thank you, and print the genre selected
#Key Feature 1: Search New York Times Best Seller List by Genre
def list_browse():
global genre
global nyt_list_data_adj
while True:
prompt1 = "\nEnter in the genre of best seller list you want to browse (ex. 'Hardcover Fiction').\nIf you want to see a complete list of genres, enter 'show me'.\nEnter here: "
genre = input(prompt1)
if genre == 'show me':
for lis in nyt_list_names:
print(lis)
elif genre in nyt_list_names:
matching_genre = f"\nWe found a matching genre!\n{nyt_list_info[genre]}\n\nThe New York Times Top 5 Best Selling {genre} Books: \n"
print(matching_genre)
url_list = nyt_list_dict[genre]
list_data_url = f"https://api.nytimes.com/svc/books/v3/lists/current/{url_list}.json?api-key={nyt_api_key1}"
nyt_list_data = parse_json(list_data_url)
nyt_list_data_adj = nyt_list_data['results']['books']
break
else:
no_match = " \nWe couldn't find a matching genre! Please try again"
print(no_match)
# The List_Browse function is intended search New York Times and show a list of Available Best Seller Lists for the User
# The function also allows the user to enter a best seller list and return the top 5 books from the list
list_browse()
#From NYT API, return a top 5 list of the best selling books within that genre for that timeframe
book_list = {}
read_list = {}
#Key Feature 2: Add Book Titles and Authors to a Read List to be printed at the end of using the application
def add_books_to_list():
prompt6 = "Enter in the book title here: "
book_add = input(prompt6)
prompt7 = "Enter the author here: "
author_add = input(prompt7)
read_list[book_add] = author_add
add_statement = f"\n{book_add}, written by {author_add} has been added to the Read List!\n"
print(add_statement)
#Key Feature 3: The book_ranking function is an extension of feature 1. It also allows the user to enter a best seller list and return the top 5 books from the list
def book_ranking():
for book in nyt_list_data_adj:
global book_list
global author
if book['rank'] <= 5:
rank = book['rank']
book_name = book['title']
author = book['author']
book_list[book_name] = author
rank_statement = f"{rank}. {book_name}\nWritten by {author}\n"
print(rank_statement)
else:
pass
book_ranking()
#The book_description function allows the user the ability to see a book description from New York Times API. This is part of feature 1.
def book_description():
for book in nyt_list_data_adj:
if browse_or_read == book['title']:
desc = book['description']
description = f"\nDESCRIPTON: {desc}\n"
print(description)
else:
pass
#The function below allows you to continue to browse, to add a book to a Read List, or to pass on to Searching a book on Amazon.
#The Read List functionality helps complete feature 2.
def more_options():
while True:
global browse_or_read
prompt2 = "\nMore Actions Below:\n\n1. Would you like to see the description of a book to learn more? If so, enter the title of the book (ex. 'SULLY').\n2. Do you want to browse other book lists? If so, enter 'browse'.\n3. Do you want to add a book to your read list? If so, enter 'add'.\n \nIf you want to continue to Search on Amazon, type 'pass'.\n \nEnter here: "
browse_or_read = input(prompt2)
if browse_or_read =='browse':
list_browse()
book_ranking()
elif browse_or_read == 'pass':
break
elif browse_or_read == 'add':
add_books_to_list()
elif browse_or_read in book_list:
book_description()
#Key Feature 4: The function below allows users to search Amazon for a Book Title and Author
def amazon_search_results(product_data):
amz = product_data['search_results']
print("\nTop 3 Amazon Search Results:")
for a in amz:
if a['position'] < 4:
title = a['title']
link = a['link']
try:
rating = a['rating']
except KeyError:
rating = 'Rating not available'
prime = ''
if a['is_prime'] == True:
prime += 'Yes'
else:
prime += 'No'
prices = {}
try:
for l in a['prices']:
prices[l['name']] = l['raw']
except KeyError:
prices['Price Not Available'] = ''
book_information = f"\n{title}\nAverage Rating: {rating}\nPrime Eligible: {prime}\nNavigate to Link: {link}\n Price List:"
print(book_information)
for i in prices:
price_statement = f" {i.capitalize()} {prices[i]}"
print(price_statement)
#The function below navigates Amazon product data to pull book title, price, URL, avg. rating, and prime eligibility.
def search_books():
while browse_or_read != 'done':
prompt3 = "\nSearch and Finish Options: \n1. Would you like to search any book title on Amazon.com? If so, enter 'search'.\n2. Would you like to finish searching books? If so, enter 'done'.\n3. Would you like to continue to browse? If so, enter 'browse'.\nEnter here: "
global search_book
search_book = input(prompt3)
if search_book == 'browse':
list_browse()
book_ranking()
break
elif search_book == 'done':
if read_list:
for read in read_list:
print(f"\nRead List:\n{read}, written by {read_list[read]}.\nWe're done finding books! Keep on reading!\n")
break
else:
print("\nWe're done finding books! Keep on reading!")
break
elif search_book == 'search':
prompt5 = "\nEnter Book Title here: "
search_book_title = input(prompt5)
prompt4 = "Enter Book Author here: "
search_book_author = input(prompt4)
search_book_title = search_book_title.replace(" ", "+")
search_book_author = search_book_author.replace(" ", "+")
search_term = f"{search_book_title}+{search_book_author}"
amazon_url = f"https://api.rainforestapi.com/request?api_key={rainforest_api1}&type=search&amazon_domain=amazon.com&search_term={search_term}"
get= get_products(amazon_url)
amazon_search_results(get)
break
#The funciton below is for displaying a final prompt to the user, intended to provide a circular user experience allowing the user to continue to research or finish researching.
def final_prompt():
if search_book == 'done':
print("\nThank you for using the Best Read Discovery App! Have a great day!")
pass
elif search_book != 'done':
prompt_final = ("\nWould you like to continue to research books?\nIf so type 'yes', if no type 'done'.\nEnter here: ")
final = input(prompt_final)
if final == 'yes':
more_options()
search_books()
else:
print("\nThank you for using the Best Read Discovery App! Have a great day!\n")
#Ask the user if he/she wants to see the book on amazon
#From Amazon API, pull in price and kindle format data
#If not on Amazon, print statement saying that not available on Amazon
#If possible, print the URL for purchase for all items on the list
while True:
more_options()
search_books()
if search_book == 'done':
break
final_prompt()
#Allow the user to browse books and search again if he/she chooses to
#End the program by printing the Read List and a thank you message
|
import torch
import random
import cairo
import numpy as np
import math
import os
import torchvision
import skimage.draw
import skimage.io
from models import LocationBasedGenerator
from PIL import Image
from torchvision.datasets import ImageFolder
from torch.utils.data import Dataset, DataLoader
from utils import return_default_mat, show2, compute_iou
from os import listdir
from os.path import isfile, join
def draw_circle(cent, color):
mat = np.zeros([128, 128, 3], dtype=np.uint8)
circle = skimage.draw.circle(cent[0], cent[1], 10)
mat[circle] = color
return mat
def draw_sq(cent, color, size=7):
mat = np.zeros([128, 128, 3], dtype=np.uint8)
square = skimage.draw.polygon([cent[0]-size, cent[0]+size, cent[0]+size, cent[0]-size],
[cent[1]-size, cent[1]-size, cent[1]+size, cent[1]+size])
mat[square] = color
return mat
def draw_tri(cent, color, size=7):
mat = np.zeros([128, 128, 3], dtype=np.uint8)
square = skimage.draw.polygon([cent[0] - size, cent[0], cent[0] + size],
[cent[1] + size, cent[1] - size, cent[1] + size])
mat[square] = color
return mat
name2color = {
'blue': [66, 0, 192],
'pink': [194, 0, 192],
'brown': [194, 128, 64],
'green': [66, 128, 64],
'red': [194, 0, 64],
'navy': [66, 128, 192],
'pink2': [194, 128, 192],
'c-0': [3, 153, 183], 'c-1': [29, 81, 34], 'c-2': [143, 64, 188], 'c-3': [242, 219, 176],
'c-4': [196, 218, 69], 'c-5': [165, 67, 114], 'c-6': [187, 187, 253]
}
shape2func = {
"circle": draw_circle,
"square": draw_sq,
"tri": draw_tri
}
def create_data():
nb_samples = 1000
for color in name2color:
for shape in shape2func:
print(color, shape)
save_dir = "data/fine-scale/%s-%s" % (color, shape)
for i in range(nb_samples):
cent = [random.randint(14, 114), random.randint(14, 114)]
im = shape2func[shape](cent, name2color[color])
skimage.io.imsave("%s/mat-%d.png" % (save_dir, i), im)
def analyze():
device = "cuda"
sae = LocationBasedGenerator()
sae.to(device)
sae.load_state_dict(torch.load("pre_models/model-sim-20200725-114336", map_location=device))
sae.eval()
color_data = {}
shape_data = {}
for color in name2color:
for shape in shape2func:
root_dir = "data/fine-scale/%s-%s" % (color, shape)
all_files = [join(root_dir, f) for f in listdir(root_dir) if isfile(join(root_dir, f))]
data = []
trans = torchvision.transforms.ToTensor()
for a_file in all_files:
img_pil = Image.open(a_file).convert('RGB')
img = trans(img_pil).unsqueeze(0)
data.append(img)
data = torch.cat(data, dim=0)
data_loader = DataLoader(data, batch_size=64, shuffle=False)
ious = []
for batch in data_loader:
def_mat = torch.cat([return_default_mat(batch[i])[0].unsqueeze(0)
for i in range(batch.size(0))], dim=0).to(device)
batch = batch.to(device)
with torch.no_grad():
pred = sae.infer(batch, def_mat)
ious.extend(compute_iou(pred, batch)[0])
# show2([pred.cpu(), batch.cpu(), def_mat.cpu()], "test", 4)
print(root_dir, np.mean(ious))
if color not in color_data:
color_data[color] = [np.mean(ious)]
else:
color_data[color].append(np.mean(ious))
if shape not in shape_data:
shape_data[shape] = [np.mean(ious)]
else:
shape_data[shape].append(np.mean(ious))
du1, du2 = [], []
for c in color_data:
dm1, dm2 = np.mean(color_data[c]), np.var(color_data[c])
du1.append(dm1)
du2.append(dm2)
print(dm1, dm2)
print(np.mean(du1), np.mean(du2))
print()
du1, du2 = [], []
for c in shape_data:
dm1, dm2 = np.mean(shape_data[c]), np.var(shape_data[c])
du1.append(dm1)
du2.append(dm2)
print(dm1, dm2)
print(np.mean(du1), np.mean(du2))
def random_colors():
colors = {}
for i in range(7):
r = random.randint(0,255)
g = random.randint(0,255)
b = random.randint(0,255)
colors[torch.tensor([r, g, b])] = "c-%d" % i
print(colors)
if __name__ == '__main__':
analyze() |
from django.contrib import admin
from .models import Departamento
admin.site.register(Departamento) |
'''
Manage everything from the install to the grub-install
'''
import os
import shutil
import subprocess
class Grub:
'''
Manage preparing the environment for grub
'''
def __init__(self, opts, target, nbd):
self.opts = opts
self.target = target
self.nbd = nbd
def _grub_conf(self):
'''
Edits the grub config and returns the grub root for the grub install
'''
# TODO: Grow up and use a with statement
lst = os.path.join(self.target, 'boot/grub/menu.lst')
lines = open(lst, 'r').readlines()
grub_root = ''
for ind in range(0, len(lines)):
if lines[ind].startswith('#'):
continue
if lines[ind].startswith('root'):
grub_root = lines[ind][lines[ind].index('('):]
if lines[ind].startswith('kernel'):
s = lines[ind]
if self.opts['generic']:
lines[ind] = s.replace(self.nbd + 'p', '/dev/sda')
else:
lines[ind] = s.replace('/dev/sda', '/dev/vda')
open(lst, 'w').writelines(lines)
return grub_root
def _fstab(self):
'''
Edit the fstab with the propper devices!
'''
fstab = os.path.join(self.target, 'etc/fstab')
lines = open(fstab, 'r').readlines()
for ind in range(0, len(lines)):
if lines[ind].startswith('#'):
continue
if lines[ind].startswith('/dev/mapper/loop'):
s = lines[ind]
if self.opts['generic']:
lines[ind] = s.replace(self.nbd + 'p', '/dev/sda')
else:
lines[ind] = s.replace(self.nbd + 'p', '/dev/vda')
open(fstab, 'w').writelines(lines)
def _copy_stages(self):
'''
Copy in the boot stages
'''
shutil.copy('/boot/grub/stage1',
os.path.join(self.target, 'boot/grub/'))
shutil.copy('/boot/grub/stage2',
os.path.join(self.target, 'boot/grub/'))
def _install_grub(self, grub_root):
'''
Um... install grub!
'''
g_cmd = 'grub --batch --no-floppy --device-map=/dev/null'
g_lines = 'device (hd0) ' + self.opts['image'] + '\n'\
+ 'root ' + grub_root + '\n'\
+ 'setup (hd0)\n'\
+ 'quit\n'
g_lines = str.encode(g_lines)
grub = subprocess.Popen(g_cmd, shell=True, stdin=subprocess.PIPE)
grub.communicate(g_lines)
rc = grub.wait()
def setup_boot(self):
'''
Run the routines that will setup booting the virtual machine.
'''
grub_root = self._grub_conf()
self._fstab()
self._copy_stages()
self._install_grub(grub_root)
|
# -*- coding: utf-8 -*-
"""
Created on Mon Apr 8 08:59:46 2019
@author: lopez
"""
'''
while loop, find ma height
'''
v0 = 5 # meters per sec
g = 9.81
n = 2000
X= 1000 # for github use
# time steps
# time
import numpy as np
a_t = np.linspace(0,1,n) # we created an array
# computations
y = v0*a_t - 0.5*g*a_t**2
print( a_t)
print(y)
# find max height in while loop
i = 1
while y[i] > y[i-1]:
largest_height = y[i]
i += 1
print( 'max. hieght: %10.2f'%(largest_height) )
import matplotlib.pyplot as plt
plt.plot( a_t, y )
plt.show()
# the difference between a while loop and a for loop is that it will stop and it is more usful for comparing
# for a for loop you would need a break statement
# |
#!/usr/bin/python3
# -*- coding: utf-8 -*-
# __author__ = 'Benjamin'
import json
import time
from osc_sdk_python import Gateway
TIMEOUT=60
def create_vm(profile, vmtype, storage, omi):
gw = Gateway(**{'profile': profile})
with open('/Users/benjaminlaplane/.oapi_credentials') as creds:
credentials = json.load(creds)
region = credentials[profile]['region']
with open('config/omi.json') as omis:
omi_list = json.load(omis)
image = omi_list[region][omi]
try:
if 'db_manager_key' not in [key['KeypairName'] for key in gw.ReadKeypairs()['Keypairs']] :
new_keypair = gw.CreateKeypair(KeypairName='db_manager_key')
with open('../db_manager_key.rsa', 'w') as newkey:
newkey.write(new_keypair['Keypair']['PrivateKey'])
new_vm = gw.CreateVms(ImageId=image, VmType=vmtype, KeypairName='db_manager_key')['Vms'][0]
new_vol = gw.CreateVolume(Size=storage['size'], VolumeType=storage['type'], SubregionName=region + 'a')
gw.LinkVolume(VmId=new_vm['VmId'], VolumeId=new_vol['Volume']['VolumeId'], DeviceName='/dev/xvdb')
except Exception as errorExcept:
return False, None, errorExcept
new_vm = gw.ReadVms(Filters={'VmIds': [new_vm['VmId']]})['Vms'][0]
if waitforit(gw=gw, vms=[new_vm], state='running'):
return True, new_vm, None
return False, new_vm, None
def delete_vm(profile, vm_id):
gw = Gateway(**{'profile': profile})
try:
if vm_id in [vm['VmId'] for vm in gw.ReadVms()['Vms']]:
gw.DeleteVms(VmIds=[vm_id])
return True
except:
return False
def waitforit(gw, vms, state):
waited = 0
while waited < TIMEOUT:
req_vm_list = gw.ReadVmsState(AllVms=True, Filters={'VmIds': [vm['VmId'] for vm in vms]})
vm_list = req_vm_list['VmStates']
if not vm_list:
return False
if len([vm for vm in vm_list if vm['VmState'] != state]):
time.sleep(1)
waited +=1
else:
return True
return False
|
#!/usr/bin/env python
import logging
import time
from collections import Counter
import numpy as np
import pickle
import pysam
from util import initialize_iterator, prepare_file_name
logger = logging.getLogger(__name__)
samples = ['GACCGC', 'AAAACT', 'GGCGTC', 'AAAGTT', 'GTTCGA', 'ATATAG',
'TAAAGT', 'ATCAAA', 'TCTGCA' , 'CCCTGG', 'TTAATC', 'CCGGAC']
organism = 'Mouse'
path = '/data/UMI/data/MUS/RP/'
pt = '/data/UMI/data/MUS/'
def repeat_families(repeats):
families = []
for gene in repeats:
prefix = gene[:gene.rfind('dup') -1]
families.append(prefix)
return Counter(families).keys(), Counter(families).values()
def categorize_genes(genes):
gene_list = [item for item in genes if item is not None]
if not gene_list:
return None
g = []
r = []
for gene in gene_list:
if 'ENSMUS' in gene:
g.append(gene)
else:
r.append(gene)
return r, g
def build_r2g_table(pysam_iterator):
reads = initialize_iterator(pysam_iterator)
r2g = {}
for r in reads:
if r.has_tag('GE'):
ge = r.get_tag('GE')
qname = r.query_name
if qname in r2g:
r2g[qname].append(ge)
else:
r2g.update({qname: [ge]})
return r2g
def build_umi_based_r2g_table(st):
st.reset()
reads = st.fetch(until_eof=True)
r2g = {}
for r in reads:
if not r.is_unmapped and r.get_tag('NH') > 1:
xm = r.get_tag('XM')
ge = None
if r.has_tag('GE'):
ge = r.get_tag('GE')
qname = r.query_name
if not xm in r2g:
r2g.update({xm:{qname:[ge]}})
else:
if qname in r2g[xm]:
r2g[xm][qname].append(ge)
else:
r2g[xm].update({qname: [ge]})
return r2g
def get_non_isolated_umis(pysam_iterator):
reads = initialize_iterator(pysam_iterator)
umis = {}
u1 = []
for r in reads:
if not r.is_unmapped:
xm = r.get_tag('XM')
nh = r.get_tag('NH')
if nh > 1:
qn = r.query_name
if xm in umis:
umis[xm].append(qn)
else:
umis.update({xm: [qn]})
elif nh == 1:
u1.append(xm)
uniqs_umis = Counter(u1).keys()
non_isolated = []
pending = []
for umi in umis:
p = len(np.unique(umis[umi]))
if p > 1:
non_isolated.append(umi)
else:
pending.append(umi)
intersect = list(set(pending).intersection(uniqs_umis))
non_isolated += intersect
isolated = list(set(pending) - set(intersect))
return non_isolated, isolated
'''
def gene_filtered_read_generator(alignmentIterable, exist=True, pattern=''):
"""Generator of type pysam.alignedSegment.
Takes an iterable of type pysam.alignedSegment as input.
Filters the input file as follows based on 'exist' and 'pattern'.
Yields filtered reads."""
for alignedSegment in alignmentIterable:
if alignedSegment.has_tag("GE"):
tag = alignedSegment.get_tag("GE")
if pattern in tag:
if exist:
yield alignedSegment
else:
if not(exist):
yield alignedSegment
'''
'''
def repeat_stats(alignmentFile, domain='reads', report=True, draw_pie=False):
domain_str = ''
if domain == 'read':
domain_str = ' bam file '
gen = gene_filtered_read_generator(alignmentFile, exist=False, pattern='ENSMUS')
alignmentFile.reset()
total = len([i for i in ngene_annotated_generator(alignmentFile, annotate=True)])
elif domain == 'unique':
domain_str = ' uniquely mapped '
gen = gene_filtered_read_generator(stats.uniques_generator(alignmentFile), exist=False, pattern='ENSMUS')
alignmentFile.reset()
_,total = stats.count_um(alignmentFile)
elif domain == 'multimap':
domain_str = ' multimapped '
gen = gene_filtered_read_generator(stats.multimap_generator(alignmentFile), exist=False, pattern='ENSMUS')
alignmentFile.reset()
total,_ = stats.count_um(alignmentFile)
else:
logger.info("Domain should be one of: 'read' , 'unique' , 'multimap'.")
return
alignmentFile.reset()
repeats_cnt = len([i for i in gen])
if report:
logger.info("\t" + "Total number of" + domain_str + "reads:" + "%s" % format(total, ",").rjust(25))
logger.info("\t" + "Number of repeats in" + domain_str + "reads:" + "%s" % format(repeats_cnt, ",").rjust(20))
if draw_pie:
labels = domain_str , domain_str + "repeats"
plt.pie([total-repeats_cnt,repeats_cnt], labels=labels, autopct='%1.1f%%', shadow=True, startangle=90)
return total, repeats_cnt
'''
if __name__ == "__main__":
logging.basicConfig(level=logging.INFO)
fh = logging.FileHandler('/data/UMI/data/MUS/RP/RepeatDistReport.log')
fh.setLevel(logging.INFO)
logger.addHandler(fh)
logger.info('Call to Repeat Solver Library.')
logger.info('Repeat distribution report for %d samples.' %len(samples))
logger.info('Organism : %s\n' %organism)
file_names = []
for sample in samples:
f1 = prepare_file_name(sample, pt, 'sample_', '.bam')
f2 = prepare_file_name(sample, path, 'r2r_', '.pkl')
file_names.append((f1, f2))
counter = 1
for in_file, out_file in file_names:
logger.info('\n')
logger.info('-' * 100)
logger.info('Sample no. %d : %s' % (counter, in_file))
logger.info('-' * 100)
logger.info('\n')
start_time = time.time()
st = pysam.AlignmentFile(in_file, "rb")
non_iso, iso = get_non_isolated_umis(st)
r2g_table = build_umi_based_r2g_table(st)
mm = sum([len(item.keys()) for item in r2g_table.values()])
r2r_table = {}
total = 0
for umi in non_iso:
if umi in r2g_table:
for qname in r2g_table[umi]:
tup = categorize_genes(r2g_table[umi][qname])
if tup:
r, _ = tup
r2r_table.update({qname: r})
total += 1
pickle.dump(r2r_table, open(out_file, 'wb'), protocol=pickle.HIGHEST_PROTOCOL)
repeat_tagged = []
family_tagged = []
for qname in r2r_table:
repeats = r2r_table[qname]
if repeats:
repeat_tagged.append(qname)
_, v = repeat_families(repeats)
if min(v) > 1:
family_tagged.append(qname)
con1 = len(repeat_tagged)
per_con1 = str(round(float(con1)/total, 3))
con2 = len(family_tagged)
per_con2 = str(round(float(con2)/total, 3))
end_time = time.time()
logger.info('Total number of multi reads: %s\n' % (format(mm,',')))
logger.info('-'*80)
logger.info('Number of multi reads with non-isolated UMIs: %s\n' %(format(total,',')))
logger.info('Non-isolated multi reads with at least 1 repeat copy: %s (%s%%)\n' % (format(con1,','), per_con1))
logger.info('Non-isolated multi reads with at least 2 copies of the same repeat family: %s (%s%%)\n' % (format(con2,','), per_con2))
counter += 1
|
# -*- coding: utf-8 -*-
# Created on Sun Dec 17 2017 11:28:47
# Author: WuLC
# EMail: liangchaowu5@gmail.com
# dp, O(1) space
class Solution(object):
def minCostClimbingStairs(self, cost):
"""
:type cost: List[int]
:rtype: int
"""
last1, last2 = cost[0], cost[1]
for i in range(2, len(cost)):
curr = min(last1, last2) + cost[i]
last1, last2 = last2, curr
return min(last1, last2)
|
#!/usr/bin/env python
# coding: utf-8
# In[1]:
import numpy as np
import pandas as pd
import sys
# In[3]:
if "__main__":
data = pd.read_csv(sys.argv[1])
by_rep = pd.Series("No-repair(Conditon 1 and 2)" if ((i==1) or (i==2)) else "Repair(Condition 3 and 4)" for i in data['Participant study:'])
data['repair'] = by_rep
sanity_dict = {1:3,2:1,3:3,4:1}
sanity_dict_2 = {1:5,2:5,3:4,4:4}
data_sanity_check = data[[sanity_dict[data['Participant study:'][i]] == data['Did this voice agent ever make a mistake?'][i] for i in range(0,(len(data)))]]
data_sanity_check_2 = data_sanity_check[[sanity_dict_2[data_sanity_check['Participant study:'][i]]
== data_sanity_check['Did this agent ever try to repair a mistake it made?'][i] for i in data_sanity_check.index]]
data_sanity_check_2.to_csv(sys.argv[1].split('.csv')[0]+"_modified.csv",index = False)
|
from function_base import my_abs
from function_base import power
from function_base import enroll
from function_base import add_end
from function_base import calc
from function_base import calc_change
from function_base import person
from function_base import move
from function_base import lazy_sum
from function_base import now
from function_base import moveUse
#import sys
import datetime as dt
from function_base import fact
#print (sys.version_info)
print (dt.datetime.now())
print (my_abs(-78))
print ('8 * 8 = %d ' % power(8))
print ('---enroll : --- \n' )
print (enroll('Sarah','F'))
print (add_end())
print (add_end())
print (add_end())
# 不可变参数
print(calc([1,2,3]))
#可变参数
print(calc_change(1,3,5,7))
nums = [1,2,3]
print(nums)
print(calc_change(*nums))
print(calc(nums))
# 关键字参数
person('Michael',80)
person('Bob',101,city='shanghai')
person('Adam', 45, gender='M', job='Engineer')
extra ={'city':"Beijing",'job':"Engineer"}
person('Jack',35,**extra)
# 递归函数
print(fact(3))
print(fact(10))
# 汉诺塔问题
move(2,'A','B','C')
print('----------')
# moveUse(2,'A','B','C')
# move(3,'A','B','C')
# 返回函数作为参数
f = lazy_sum(1,3,5,7,9)
print(f)
print(f())
print (now())
|
import requests_mock
import requests.exceptions
import pytest
from .. import weather
def test_simple_result():
"""Check that the weather info can be parsed."""
sample_good_output = '{"coord":{"lon":0.08,"lat":52.24},"weather":[{"id":804,"main":"Clouds","description":"overcast clouds","icon":"04d"}],"base":"stations","main":{"temp":282.57,"pressure":1034,"humidity":76,"temp_min":282.15,"temp_max":283.15},"visibility":10000,"wind":{"speed":3.6,"deg":260},"clouds":{"all":90},"dt":1546262760,"sys":{"type":1,"id":1482,"message":0.0031,"country":"GB","sunrise":1546243745,"sunset":1546271815},"id":2648627,"name":"Girton","cod":200}'
with requests_mock.Mocker() as m:
m.get(weather.WEATHER_API_ENDPOINT, text=sample_good_output)
responses = [
weather.get_weather('apikey', 'Girton,GB'),
weather.Weather('apikey', 'Girton,GB').get_weather()
]
for resp in responses:
assert 'temperature' in resp
assert resp['temperature'] == 282.57
assert all(x in resp for x in
['temperature', 'humidity', 'sunset', 'sunrise'])
class TestErrorCases(object):
sample_error_output = '{"cod":"500","message":"Internal error: 500001"}'
def test_error_response(self):
"""Check that appropriate exception is raised on error."""
with requests_mock.Mocker() as m:
m.get(weather.WEATHER_API_ENDPOINT, text=self.sample_error_output,
status_code=500)
with pytest.raises(weather.WeatherServiceError):
weather.get_weather('apikey', 'Girton,GB')
with pytest.raises(weather.WeatherServiceError):
weather.Weather('apikey', 'Girton,GB').get_weather()
|
#!/usr/bin/env python3
import math
def solve0(problem):
elves = int(problem)
names = [i for i in range(1, elves+1)]
current = 0
while True:
nextto = (current + len(names) // 2) % len(names)
names.pop(nextto)
if len(names) == 1:
return names[0]
if current > nextto:
current = current % len(names)
else:
current = (current + 1) % len(names)
def dump_data():
res = ((i, solve0(str(i))) for i in range(2, 2210))
res = '\n'.join(', '.join((str(i), str(x))) for i,x in res)
print('n, x')
print(res)
def solve(problem):
elves = int(problem)
t = int(math.log(elves, 3))
r = 3**t
R = 3**(t+1)
m = R - r
if elves == r:
return r
elif elves <= m:
return elves - r
else:
return elves * 2 - R
def test():
assert solve('2') == 1
assert solve('3') == 3
assert solve('4') == 1
assert solve('5') == 2
assert solve('6') == 3
assert solve('7') == 5
assert solve('10') == 1
assert solve('65535') == 6486
assert solve('65536') == 6487
assert solve('65537') == 6488
def getinput():
import fileinput
with fileinput.input() as f:
return ''.join(f).strip()
if __name__ == '__main__':
# test()
print(solve(getinput()))
|
import pandas as pd
import numpy as np
import re
import sys
import pyper
from time import sleep
def cleansing(one_day):
half = int(one_day.shape[0]/2)
one_day = one_day.iloc[:half]
one_day = one_day.drop('Unnamed: 0', axis=1)
for i in range(one_day.shape[0]):
if one_day.loc[i, 'e_win'] == 1.0:
one_day.loc[i, 'e_rikishi'], one_day.loc[i, 'w_rikishi'] = one_day.loc[i, 'w_rikishi'], one_day.loc[i, 'e_rikishi']
elif one_day.loc[i, 'e_win'] == 0.0:
pass
elif one_day.loc[i, 'e_win'] == 1.01 or one_day.loc[i, 'e_win'] == -0.1:
one_day = one_day.drop(i, axis=0)
else:
sys.exit(f'cleansing error: {year}, {month}, {day}')
one_day = one_day.drop(['e_win', 'w_win'], axis=1)
one_day.columns = ['loser', 'winner']
one_day['year'] = year
one_day['month'] = month
one_day['day'] = day
return one_day
if __name__ == '__main__':
year_list = [
'2001', '2002', '2003', '2004', '2005',
'2006', '2007', '2008', '2009', '2010', '2011',
'2012', '2013', '2014',
'2015', '2016', '2017', '2018'
]
month_list = ['01', '03', '05', '07', '09', '11']
day_list = range(1, 16)
data = pd.DataFrame(columns=['loser', 'winner', 'year', 'month', 'day'])
for year in year_list:
for month in month_list:
for day in day_list:
fname = year+month+f'&day={day}_bt.csv'
try:
one_day = pd.read_csv(f'input/{fname}')
except:
print(f'"{fname}" is not here')
else:
one_day = cleansing(one_day)
data = pd.concat([data, one_day], axis=0)
data = data.reset_index(drop=True)
data.to_csv('data.csv')
# sys.exit()
#200205, 201103, 201105はデータが丸々ない
|
#!/usr/bin/env python
# coding: utf-8
# Author: SCU Wuyuzhang College, Wei Chengan & Wan Ziyi
# Data: 2020/5
import os
import h5py
import json
import datetime
import random
import numpy as np
import tensorflow as tf
from tensorflow.keras import callbacks
from tensorflow.keras import backend as K
from tensorflow.keras import layers
from tensorflow.keras import Input
from tensorflow.keras import regularizers
from tensorflow.keras import models
from sklearn.model_selection import StratifiedKFold
from sklearn import metrics
from cnn_models import get_model
from dataset import load_data, get_article_aug_data, load_gen_data
''' Set GPU settings '''
config = tf.ConfigProto()
config.gpu_options.allow_growth=True
sess = tf.Session(config=config)
K.set_session(sess)
''' Logging'''
def log_to_json(log, path):
class MyEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, np.integer):
return int(obj)
elif isinstance(obj, np.floating):
return float(obj)
elif isinstance(obj, np.ndarray):
return obj.tolist()
elif isinstance(obj, regularizers.Regularizer):
return str(obj)
else:
return super(MyEncoder, self).default(obj)
with open(path, 'w') as hf:
json.dump(log, hf, cls=MyEncoder)
def get_metrics(y_score, y_true):
r = np.array([1 if i > 0.5 else 0 for i in y_score])
tp = np.where(r[np.where(r == 1)] == y_true[np.where(r == 1)])[0].shape[0]
fp = np.where(r[np.where(r == 1)] != y_true[np.where(r == 1)])[0].shape[0]
tn = np.where(r[np.where(r == 0)] == y_true[np.where(r == 0)])[0].shape[0]
fn = np.where(r[np.where(r == 0)] != y_true[np.where(r == 0)])[0].shape[0]
def devide(up, down):
try:
result = up / down
except ZeroDivisionError:
result = -1
return result
accuracy = devide(tp + tn, tp + fp + tn + fn)
sensitivity = devide(tp, tp + fn)
specificity = devide(tn, tn + fp)
precision = devide(tp, tp + fp)
f1score = devide(2 * tp, 2 * tp + fp + fn)
auc = metrics.roc_auc_score(y_true, y_score)
mets = {
'accuracy' : accuracy,
'sensitivity': sensitivity,
'specificity': specificity,
'precision': precision,
'f1score': f1score,
'auc': auc,
'confusion_matrix': [[tp, fn], [fp, tn]]
}
return mets
''' Compute metrics of kfold training '''
def summary_kfold(eval_dir):
dicts = []
for fname in os.listdir(eval_dir):
path = os.path.join(eval_dir, fname)
with open(path, 'r') as f:
ev = json.load(f)
del ev['confusion_matrix']
del ev['y_score']
dicts.append(ev)
summary = {
'accuracy' : [],
'sensitivity': [],
'specificity': [],
'precision': [],
'f1score': [],
'auc': [],
'loss': []
}
for d in dicts:
for k, v in d.items():
summary[k].append(v)
mNs = {}
for k in summary.keys():
valid_list = list(filter(lambda x: False if x == -1 else True, summary[k]))
mNs[k + '_mean'] = np.mean(valid_list)
mNs[k + '_std'] = np.std(valid_list)
mNs[k + '_outlier'] = len(summary[k]) - len(valid_list)
for k, v in mNs.items():
summary[k] = v
return summary
''' Evaluate model '''
def evaluate_model(model, x, y, batch_size=30):
test_hist = model.evaluate(x=x, y=y)
predict_p = model.predict(x)
p = np.array(predict_p).squeeze()
eval_dict = get_metrics(p, y)
eval_dict['loss'] = test_hist[0]
eval_dict['y_score'] = p
return eval_dict
def compile_model(model_name, hp, op='adam'):
''' Construct model '''
model, hparams = get_model(name=model_name, hparams=hp)
model.compile(loss='binary_crossentropy', optimizer=op, metrics=['acc'])
# print(model.summary())
return model, hparams
def train_kfold(
log_dir,
hparams,
model_name,
k,
state,
X,
Y,
X_test=None,
Y_test=None,
X_train_add=None,
Y_train_add=None,
batch_size=20,
epochs=200):
'''
X: The training set
X_test: The testing set
X_train_add: The additional set for model training, used for the 2nd experiment in paper.
'''
''' Training '''
# Log
timestamp = datetime.datetime.now().strftime('%Y-%m-%d-%H-%M-%S-%f')
model_dir = os.path.join(log_dir, timestamp, 'models')
os.makedirs(model_dir)
hist_dir = os.path.join(log_dir, timestamp, 'history')
os.makedirs(hist_dir)
eval_dir = os.path.join(log_dir, timestamp, 'evaluate')
os.makedirs(eval_dir)
params_savename = os.path.join(log_dir, timestamp, 'params.json')
summary_savename = os.path.join(log_dir, timestamp, 'summary.json')
test_hist_savename = os.path.join(log_dir, timestamp, 'test.json')
# Start training
kfold = StratifiedKFold(n_splits=k, shuffle=True, random_state=state)
fold = 1
accs = []
for train_index, val_index in kfold.split(X, Y):
model, params = compile_model(model_name, hparams)
if fold == 1:
print(model.summary())
print(params)
print('\n' + '='*60 + ' Fold: ' + str(fold) + ' ' + '='*60 + '\n')
# Callback functions
model_savename = os.path.join(model_dir, 'model{0}.h5'.format(str(fold)))
hist_savename = os.path.join(hist_dir, 'history{0}.json'.format(str(fold)))
val_savename = os.path.join(eval_dir, 'evaluate{0}.json'.format(str(fold)))
cb_list = [
callbacks.ModelCheckpoint(
filepath=model_savename,
monitor='val_acc',
save_best_only=True
),
callbacks.EarlyStopping(
monitor='acc',
patience=6,
)
]
# Add new training sets
try:
if X_train_add.any() and Y_train_add.any():
x_train = np.concatenate([X[train_index], X_train_add], axis=0)
y_train = np.concatenate([Y[train_index], Y_train_add], axis=0)
index = list(range(len(y_train)))
random.seed(state + 1)
random.shuffle(index)
x_train = x_train[index]
y_train = y_train[index]
except AttributeError:
x_train = X[train_index]
y_train = Y[train_index]
history = model.fit(
x_train,
y_train,
validation_data=(X[val_index], Y[val_index]),
batch_size=batch_size,
epochs=epochs,
callbacks=cb_list,
verbose=2
)
# Log
hist_dict = history.history
m = models.load_model(model_savename, custom_objects={'tf': tf})
val_dict = evaluate_model(m, X[val_index], Y[val_index])
accs.append(val_dict['accuracy'])
log_to_json(hist_dict, hist_savename)
log_to_json(val_dict, val_savename)
fold += 1
K.clear_session()
print('Session cleared.')
# Summary
try:
if X_test.any() and Y_test.any():
model_path = os.path.join(model_dir, 'model{0}.h5'.format(accs.index(max(accs))+1))
m = models.load_model(model_path, custom_objects={'tf': tf})
test_dict = evaluate_model(m, X_test, Y_test)
log_to_json(test_dict, test_hist_savename)
except AttributeError:
pass
log_to_json(hparams, params_savename)
summary = summary_kfold(eval_dir)
print(summary)
log_to_json(summary, summary_savename)
if __name__ == '__main__':
datapath = r'dataset\seqs_dm3\dm3_seqs.h5'
hparams = {
'inp_shape': [1000, 4],
'embed_n': 4,
'embed_dim': 64,
'cnn_filters': [32],
'cnn_kernels': [2],
'cnn_dilations': [[1, 1, 1]],
'cnn_dropouts': [0.5],
'cnn_regularizers': None,
'pooling': 'local',
'max_pool': 2,
'dense_regularizer': None,
'batchnormal': False
}
X_train, Y_train, X_test, Y_test = load_data(datapath, train_size=0.8, state=99)
train_kfold(
log_dir=r'test',
hparams=hparams, model_name='cnn', k=10, state=1,
X=X_train, Y=Y_train, X_test=X_test, Y_test=Y_test, X_train_add=None, Y_train_add=None,
batch_size=50, epochs=200)
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import fnmatch
def process_package_filters(available_packages: list[str], package_filters: list[str] | None):
"""Filters the package list against a set of filters.
A packet is returned if it matches at least one filter. The function keeps the order of the packages.
"""
if not package_filters:
return available_packages
invalid_filters = [
f for f in package_filters if not any(fnmatch.fnmatch(p, f) for p in available_packages)
]
if invalid_filters:
raise SystemExit(
f"Some filters did not find any package: {invalid_filters}, Please check if they are correct."
)
return [p for p in available_packages if any(fnmatch.fnmatch(p, f) for f in package_filters)]
|
import boto3
BUCKET_NAME = "parwizforogh12"
s3_client = boto3.client('s3')
def upload_files(file_name, bucket, object_name=None, args=None):
if object_name is None:
object_name = file_name
s3_client.upload_file(file_name, bucket, object_name, ExtraArgs=args)
print("{} has been uploaded to {} bucket".format(file_name, BUCKET_NAME))
upload_files("file.txt", BUCKET_NAME)
'''
# uploading with resource
BUCKET_NAME = "parwizforogh7777"
s3_client = boto3.resource('s3')
def upload_files(file_name, bucket, object_name=None, args=None):
if object_name is None:
object_name = file_name
s3_client.meta.client.upload_file(file_name, bucket, object_name, ExtraArgs=args)
print("{} has been uploaded to {} bucket".format(file_name, BUCKET_NAME))
upload_files("myfile.txt", BUCKET_NAME)
'''
|
from flask import Flask, request, render_template, json, Response, make_response
import logging
from config import server_host, server_port
from config import LOGGER, DEBUG_LEVEL
from classes import PackageSearch
app = Flask(__name__)
# Ensure that the required JSON data file are pre-loaded in memory at the time of server start.
package_search = PackageSearch.load()
@app.route('/')
@app.route('/sdt/faq')
@app.route('/sdt/')
def index():
resp = make_response(render_template('index.html'))
resp.headers.set('Cache-Control','no-cache, no-store, must-revalidate')
resp.headers.set('Pragma','no-cache')
resp.headers.set('Expires','0')
return resp
@app.route('/getSupportedDistros')
@app.route('/sdt/getSupportedDistros')
def getSupportedDistros():
package_search = PackageSearch.load()
json_data = json.dumps(package_search.getSupportedDistros())
resp = Response(json_data,mimetype="application/json")
resp.headers.set('Cache-Control','no-cache, no-store, must-revalidate')
resp.headers.set('Pragma','no-cache')
resp.headers.set('Expires','0')
return resp
@app.route('/searchPackages')
@app.route('/sdt/searchPackages')
def searchPackages():
package_search = PackageSearch.load()
search_term = ''
exact_match = False
search_bit_flag = 0
page_number = 0
try:
search_term = str(request.args.get('search_term', ''))
search_term = search_term.lstrip().rstrip()
exact_match = request.args.get('exact_match', False)
search_bit_flag = int(request.args.get('search_bit_flag', '0'))
page_number = int(request.args.get('page_number', '0'))
json_data = package_search.searchPackages(search_term, exact_match, search_bit_flag, page_number)
resp = Response(json_data,mimetype="application/json")
resp.headers.set('Cache-Control','no-cache, no-store, must-revalidate')
resp.headers.set('Pragma','no-cache')
resp.headers.set('Expires','0')
return resp
except Exception as ex:
LOGGER.error('Error in searchPackages with search parameters: %s', str(ex))
# Logic to start flask server if executed via command line.
if __name__ == '__main__':
if DEBUG_LEVEL == logging.DEBUG:
app.debug = True
app.run(host=server_host, port=server_port)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.