text stringlengths 38 1.54M |
|---|
lista = []
class Individual (object):
def __init__(self, genotype, accuracy):
self.genotype = genotype
self.accuracy = accuracy
def __repr__(self):
return str(self.genotype) + ":" + str(self.accuracy) + "\n"
ind = Individual([1,23,4],0.3)
lista.append(ind)
ind = Individual([1,23,4],0.7)
lista.append(ind)
ind = Individual([1,23,4],0.2)
lista.append(ind)
ind = Individual([1,23,4],0.8)
lista.append(ind)
ind = Individual([1,23,4],1)
lista.append(ind)
ind = Individual([1,23,4],0.1)
lista.append(ind)
def getBestFromGeneration( currentPopulation, crossoverRate ):
best = []
currentPopulation.sort(key=lambda x: x.accuracy, reverse=True)
for individual in currentPopulation:
print("examintaing individual", individual)
if retrievedPeople > 0:
print("individual is better", individual)
best.append(individual)
retrievedPeople = retrievedPeople -1
return best
print(getBestFromGeneration(lista,2)) |
###############################################################################
# $Id$
#
# Project: Sub1 project of IRRI
# Purpose: Quality Assessment extraction from MODIS
# Author: Yann Chemin, <yann.chemin@gmail.com>
#
###############################################################################
# Copyright (c) 2008, Yann Chemin <yann.chemin@gmail.com>
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
###############################################################################
#!/usr/bin/python
import wx
import wx.lib.filebrowsebutton as filebrowse
import os
# For Image Processing
import numpy as N
from osgeo import gdalnumeric
from osgeo import gdal
from osgeo import gdal_array
from osgeo.gdalconst import *
# For icons, pngs, etc coming from images.py
from wx import ImageFromStream, BitmapFromImage, EmptyIcon
import cStringIO
import images
# Define satellite bands
# Based on Landsat channels
qc = ''
# Define output file name
output = ''
# Define list of MODIS types
NameMOD = ['250','500']
# Define list of QA types
NameQC = ['modland_qa_bits','cloud','data_quality','atcorr','adjcorr','diff_orbit_from_500m']
# Define band number
bandno = ['1','2','3','4','5','6','7']
# Define Info Message
overview = """MODIS Quality Assessment Extractor
Makes Human-readable images of Quality Assessment binary bits from MOD09 products.
500m does not have "cloud" and "diff_orbit_from_500m" options.
# MODLAND QA Bits 250m Unsigned Int bits[0-1]
#00 -> class 0: Corrected product produced at ideal quality -- all bands
#01 -> class 1: Corrected product produced at less than idel quality -- some or all bands
#10 -> class 2: Corrected product NOT produced due to cloud effect -- all bands
#11 -> class 3: Corrected product NOT produced due to other reasons -- some or all bands maybe fill value (Note that a value of [11] overrides a value of [01])
# Cloud State 250m Unsigned Int bits[2-3]
#00 -> class 0: Clear -- No clouds
#01 -> class 1: Cloudy
#10 -> class 2: Mixed
#11 -> class 3: Not Set ; Assumed Clear
# Band-wise Data Quality 250m Unsigned Int bits[4-7][8-11]
# Band-wise Data Quality 500m long Int bits[2-5][6-9][10-13][14-17][18-21][22-25][26-29]
#0000 -> class 0: highest quality
#0111 -> class 1: noisy detector
#1000 -> class 2: dead detector; data interpolated in L1B
#1001 -> class 3: solar zenith >= 86 degrees
#1010 -> class 4: solar zenith >= 85 and < 86 degrees
#1011 -> class 5: missing input
#1100 -> class 6: internal constant used in place of climatological data for at least one atmospheric constant
#1101 -> class 7: correction out of bounds, pixel constrained to extreme allowable value
#1110 -> class 8: L1B data faulty
#1111 -> class 9: not processed due to deep ocean or cloud
#Class 10-15: Combination of bits unused
# Atmospheric correction 250m Unsigned Int bit[12]
#0 -> class 0: Not Corrected product
#1 -> class 1: Corrected product
# Adjacency correction 250m Unsigned Int bit[13]
#0 -> class 0: Not Corrected product
#1 -> class 1: Corrected product
# Different orbit from 500m product, 250m Unsigned Int bit[14]
#0 -> class 0: same orbit as 500m
#1 -> class 1: different orbit from 500m
"""
class MyFrame(wx.Frame):
def __init__(self,parent, id=-1, title='MODIS Quality Bits Extractor',
pos=(0,0),
size=(400,650),
style=wx.DEFAULT_FRAME_STYLE):
wx.Frame.__init__(self, parent, id, title, pos, size, style)
ico = images.getPngGipeIcon()
self.SetIcon(ico)
self.lognull = wx.LogNull()
# Input Filenames
self.qc = qc
self.qc_type = 'modland_qa_bits'
self.pixelres = '250'
self.band_no = '1'
self.NameMOD = NameMOD
self.NameQC = NameQC
self.bandno = bandno
self.output = output
# Construct Interface
self.make_text()
self.make_buttons()
self.make_radiobuttons1()
self.make_radiobuttons2()
self.make_radiobuttons3()
self.make_fb()
self.mbox = wx.BoxSizer(wx.VERTICAL)
self.mbox.Add((10,10))
self.mbox.Add(self.text, 1, wx.EXPAND|wx.CENTER, 10)
self.mbox.Add(self.cc2, 1, wx.EXPAND, 0)
self.mbox.Add(self.cc6, 1, wx.EXPAND, 0)
self.mbox.Add(self.rbox1, 1, wx.CENTER, 0)
self.mbox.Add(self.rbox2, 1, wx.CENTER, 0)
self.mbox.Add(self.rbox3, 1, wx.CENTER, 0)
self.mbox.Add((10,10))
self.mbox.Add((50,10))
self.mbox.Add(self.bbox, 1, wx.CENTER, 10)
self.mbox.Add((10,10))
self.SetSizer(self.mbox)
self.bindEvents()
# Process Equations, Handling and saving of output
def OnOK(self,event):
#print "qc: ", self.qc
#print "out:", self.output
if(self.qc==''):
self.OnFileInError()
else:
self.qcF = gdal.Open(self.qc)
self.bqc = self.qcF.GetRasterBand(1)
self.test = gdal.Open(self.qc)
self.CrAr( self.qc, self.output, 'GTiff' )
self.result = gdal.Open(self.output, GA_Update)
for self.y in range(self.bqc.YSize - 1, -1, -1):
print self.y
self.scanline1=self.bqc.ReadAsArray(0, self.y, self.bqc.XSize, 1, self.bqc.XSize, 1)
for self.x in range(0, self.bqc.XSize - 1, 1):
self.pix1 = self.scanline1[0][self.x]
self.scanline1[0][self.x]=self.qcbits(self.pix1,self.qc_type,int(self.pixelres),int(self.band_no))
self.result.GetRasterBand(1).WriteArray(N.reshape(self.scanline1,(1,self.bqc.XSize)), 0, self.y)
self.Destroy()
#def bin(self,i):
#"""
#Convert Binary to Integer Bit Field
#Manish Jethani (manish.j at gmx.net)
#http://bytes.com/forum/thread20381.html
#"""
#b = ''
#while i > 0:
#j = i & 1
#b = str(j) + b
#i >>= 1
#return b
def qcbits(self,qcbit,qcflag,pixres,bandno):
outclas = 0
#calculate modland QA bits extraction
if (qcflag=="modland_qa_bits"):
if (pixres==500):
# 500m product
outclas = self.qc500a(qcbit)
else:
# 250m product
outclas = self.qc250a(qcbit)
#calculate cloud state
elif (qcflag=="cloud"):
if (pixres==500):
# 500m product
# Signal user that the flag name is badly written
# therefore not understood by the application
print "flag name unavailable for 500m, please restart"
self.OnQCInError()
else:
# ONLY 250m product!
outclas = self.qc250b(qcbit)
#calculate modland QA bits extraction
elif (qcflag=="data_quality"):
if (pixres==500):
# 500m product
outclas = self.qc500c(qcbit, bandno)
else:
# 250m product
outclas = self.qc250c(qcbit, bandno)
#calculate atmospheric correction flag
elif (qcflag=="atcorr"):
if (pixres==500):
# 500m product
outclas = self.qc500d(qcbit)
else:
# 250m product
outclas = self.qc250d(qcbit)
#calculate adjacency correction flag
elif (qcflag=="adjcorr"):
if (pixres==500):
# 500m product
outclas = self.qc500e(qcbit)
else:
# 250m product
outclas = self.qc250e(qcbit)
#calculate different orbit from 500m flag
elif (qcflag=="diff_orbit_from_500m"):
if (pixres==500):
# 500m product
# Signal user that the flag name is badly written
# therefore not understood by the application
print "flag name unavailable for 500m, please restart"
self.OnQCInError()
else:
# ONLY 250m product!
outclas = self.qc250f(qcbit)
else:
# Signal user that the flag name is badly written
# therefore not understood by the application
print "Unknown flag name, please check spelling"
self.OnQCInError()
return outclas
def qc250a(self, pixel):
"""
# MODLAND QA Bits 250m Unsigned Int bits[0-1]
#00 -> class 0: Corrected product produced at ideal quality -- all bands
#01 -> class 1: Corrected product produced at less than idel quality -- some or all bands
#10 -> class 2: Corrected product NOT produced due to cloud effect -- all bands
#11 -> class 3: Corrected product NOT produced due to other reasons -- some or all bands maybe fill value (Note that a value of [11] overrides a value of [01])
"""
pixel = pixel & 3
return pixel
def qc250b(self, pixel):
"""
# Cloud State 250m Unsigned Int bits[2-3]
#00 -> class 0: Clear -- No clouds
#01 -> class 1: Cloudy
#10 -> class 2: Mixed
#11 -> class 3: Not Set ; Assumed Clear
"""
pixel >> 2
pixel = pixel & 3
return pixel
def qc250c(self,pixel,bandno):
"""
# Band-wise Data Quality 250m Unsigned Int bits[0-1]
#0000 -> class 0: highest quality
#0111 -> class 1: noisy detector
#1000 -> class 2: dead detector; data interpolated in L1B
#1001 -> class 3: solar zenith >= 86 degrees
#1010 -> class 4: solar zenith >= 85 and < 86 degrees
#1011 -> class 5: missing input
#1100 -> class 6: internal constant used in place of climatological data for at least one atmospheric constant
#1101 -> class 7: correction out of bounds, pixel constrained to extreme allowable value
#1110 -> class 8: L1B data faulty
#1111 -> class 9: not processed due to deep ocean or cloud
#Class 10-15: Combination of bits unused
"""
pixel >> 4 + (4*(bandno-1))
pixel = pixel & 15
return pixel
def qc250d(self, pixel):
"""
# Atmospheric correction 250m Unsigned Int bit[12]
#0 -> class 0: Not Corrected product
#1 -> class 1: Corrected product
"""
pixel >> 12
pixel = pixel & 1
return pixel
def qc250e(self,pixel):
"""
# Adjacency correction 250m Unsigned Int bit[13]
#0 -> class 0: Not Corrected product
#1 -> class 1: Corrected product
"""
pixel >> 13
pixel = pixel & 1
return pixel
def qc250f(self,pixel):
"""
# Different orbit from 500m product, 250m Unsigned Int bit[14]
#0 -> class 0: same orbit as 500m
#1 -> class 1: different orbit from 500m
"""
pixel >> 14
pixel = pixel & 1
return pixel
def qc500a(self,pixel):
"""
# MODLAND QA Bits 500m long int bits[0-1]
#00 -> class 0: Corrected product produced at ideal quality -- all bands
#01 -> class 1: Corrected product produced at less than idel quality -- some or all bands
#10 -> class 2: Corrected product NOT produced due to cloud effect -- all bands
#11 -> class 3: Corrected product NOT produced due to other reasons -- some or all bands mayb be fill value (Note that a value of [11] overrides a value of [01])
"""
pixel = pixel & 3
return pixel
def qc500c(self,pixel,bandno):
"""
# Band-wise Data Quality 500m long Int
#bits[2-5][6-9][10-13][14-17][18-21][22-25][26-29]
#0000 -> class 0: highest quality
#0111 -> class 1: noisy detector
#1000 -> class 2: dead detector; data interpolated in L1B
#1001 -> class 3: solar zenith >= 86 degrees
#1010 -> class 4: solar zenith >= 85 and < 86 degrees
#1011 -> class 5: missing input
#1100 -> class 6: internal constant used in place of climatological data for at least one atmospheric constant
#1101 -> class 7: correction out of bounds, pixel constrained to extreme allowable value
#1110 -> class 8: L1B data faulty
#1111 -> class 9: not processed due to deep ocean or cloud
#Class 10-15: Combination of bits unused
"""
pixel >> 2 + (4*(bandno-1))
pixel = pixel & 15
return pixel
def qc500d(self,pixel):
"""
# Atmospheric correction 500m long Int bit[30]
#0 -> class 0: Not Corrected product
#1 -> class 1: Corrected product
"""
pixel >> 30
pixel = pixel & 1
return pixel
def qc500e(self,pixel):
"""
# Adjacency correction 500m long Int bit[31]
#0 -> class 0: Not Corrected product
#1 -> class 1: Corrected product
"""
pixel >> 31
pixel = pixel & 1
return pixel
def CrAr(self, src_flnm, dst_flnm, format ):
"""
CrAr(): Create Array with Georeferencing from another file (src_flnm), save it in file (dst_flnm) with format (format)
CrAr( self, src_flnm, dst_flnm, format )
"""
cr_opts=[]
# Read information from source file.
src_ds = gdal.Open(str(src_flnm))
gt = src_ds.GetGeoTransform()
pj = src_ds.GetProjection()
src_ds = None
# Standard checking on the GDAL driver
Driver = gdal.GetDriverByName( str(format) )
if Driver is None:
raise ValueError, "CrAr: No DriverFound "+format
DriverMTD = Driver.GetMetadata()
if not DriverMTD.has_key('DCAP_CREATE'):
print 'Format Driver %s does not support creation and piecewise writing.\nPlease select a format that does, such as GTiff or HFA (Erdas/Imagine).' % format
sys.exit( 1 )
# Set up the band number
nbands = 1
#print "nbands =", nbands
# Collect information on source files
flinfos = self.names_to_fileinfos( str(src_flnm) )
ulx = flinfos[0].ulx
uly = flinfos[0].uly
lrx = flinfos[0].lrx
lry = flinfos[0].lry
# get largest extends
for fi in flinfos:
ulx = min(ulx, fi.ulx)
uly = max(uly, fi.uly)
lrx = max(lrx, fi.lrx)
lry = min(lry, fi.lry)
# Set other info
psize_x = flinfos[0].geotransform[1]
psize_y = flinfos[0].geotransform[5]
band_type = flinfos[0].band_type
# Try opening as an existing file
gdal.PushErrorHandler( 'CPLQuietErrorHandler' )
out_fh = gdal.Open( str(dst_flnm), gdal.GA_Update )
gdal.PopErrorHandler()
# Otherwise create a new file
if out_fh is None:
geot = [ulx, psize_x, 0, uly, 0, psize_y]
print geot[0], geot[1], geot[2], geot[3], geot[4]
xsize = int((lrx-ulx)/geot[1]+0.5)
ysize = int((lry-uly)/geot[5]+0.5)
out_fh=Driver.Create(str(dst_flnm),xsize,ysize,nbands,band_type,cr_opts)
if out_fh is None:
raise ValueError, "CrAr: Failed to create new file "+dst_flnm
sys.exit( 1 )
out_fh.SetGeoTransform( gt )
out_fh.SetProjection( pj )
#out_fh.GetRasterBand(1).SetRasterColorTable(flinfos[0].ct)
nodata = None
iband = 1
for fi in flinfos:
fi.copy_into( out_fh, 1, iband, nodata )
iband=iband+1
iband = 0
def names_to_fileinfos( self, name ):
file_infos = []
fi = file_info()
if fi.init_from_name( name ) == 1:
file_infos.append( fi )
return file_infos
def OnFileInError(self):
dlg = wx.MessageDialog(self,
'Minimum files to add:\n\n Input files => NDVI and Modis Band7\n One Output file',
'Error',wx.OK | wx.ICON_INFORMATION)
dlg.ShowModal()
dlg.Destroy()
def OnQCInError(self):
dlg = wx.MessageDialog(self,
'QC type error\n\n Please check your input',
'Error',wx.OK | wx.ICON_INFORMATION)
dlg.ShowModal()
dlg.Destroy()
# Path+filename seek and set
def make_fb(self):
# get current working directory
self.dirnm = os.getcwd()
self.cc2 = filebrowse.FileBrowseButton(
self, -1, size=(50, -1), labelText='QC File:',
startDirectory = self.dirnm,
fileMode=wx.OPEN,
changeCallback = self.fbbCallback2,
)
self.cc6 = filebrowse.FileBrowseButton(
self, -1, size=(50, -1), labelText='OUT File: ',
startDirectory = self.dirnm,
fileMask='*.tif',
fileMode=wx.SAVE,
changeCallback = self.fbbCallback6
)
# Collect path+filenames
def fbbCallback2(self, evt):
self.qc = str(evt.GetString())
def fbbCallback6(self, evt):
self.output = str(evt.GetString())
# Front text
def make_text(self):
self.text = wx.StaticText(self, -1, "This is processing MODIS Quality Assessment Bits through the use of gdal and numeric.")
# QC type radio buttons
def make_radiobuttons1(self):
self.rbox1 = wx.BoxSizer(wx.HORIZONTAL)
self.rb1 = wx.RadioBox(self, -1, "Select MODIS Type",
wx.DefaultPosition, wx.DefaultSize,
self.NameMOD, 2, wx.RA_SPECIFY_COLS)
self.rb1.SetToolTip(wx.ToolTip("Select MODIS type"))
self.rb1.SetLabel("MODIS Type")
self.rbox1.Add(self.rb1,1,wx.ALL,10)
def EvtRadioBox1(self, evt):
self.nb = evt.GetInt()
self.pixelres = NameMOD[self.nb]
#print self.pixelres
def make_radiobuttons2(self):
self.rbox2 = wx.BoxSizer(wx.HORIZONTAL)
self.rb2 = wx.RadioBox(self, -1, "Select Band number (data quality only)",
wx.DefaultPosition, wx.DefaultSize,
self.bandno, 7, wx.RA_SPECIFY_COLS)
self.rb2.SetToolTip(wx.ToolTip("Select Band number (for data_quality)"))
self.rb2.SetLabel("Band Number (for \"data quality\" only)")
self.rbox2.Add(self.rb2,1,wx.ALL,10)
def EvtRadioBox2(self, evt):
self.nb = evt.GetInt()
self.band_no = self.bandno[self.nb]
#print self.band_no
def make_radiobuttons3(self):
self.rbox3 = wx.BoxSizer(wx.HORIZONTAL)
self.rb3 = wx.RadioBox(self, -1, "Select QC Type",
wx.DefaultPosition, wx.DefaultSize,
self.NameQC, 2, wx.RA_SPECIFY_COLS)
self.rb3.SetToolTip(wx.ToolTip("Select QC type"))
self.rb3.SetLabel("QC Type")
self.rbox3.Add(self.rb3,1,wx.ALL,10)
def EvtRadioBox3(self, evt):
self.nb = evt.GetInt()
self.qc_type = NameQC[self.nb]
#print self.qc_type
# Bottom buttons
def make_buttons(self):
self.bbox = wx.BoxSizer(wx.HORIZONTAL)
# OnOK
bmp0 = images.getPngDialogOKBitmap()
self.b0 = wx.BitmapButton(self, 20, bmp0, (20, 20),
(bmp0.GetWidth()+50, bmp0.GetHeight()+10), style=wx.NO_BORDER)
self.b0.SetToolTipString("Process")
self.bbox.Add(self.b0,1,wx.CENTER,10)
# OnCancel
bmp1 = images.getPngDialogCancelBitmap()
self.b1 = wx.BitmapButton(self, 30, bmp1, (20, 20),
(bmp1.GetWidth()+50, bmp1.GetHeight()+10), style=wx.NO_BORDER)
self.b1.SetToolTipString("Abort")
self.bbox.Add(self.b1,1,wx.CENTER,10)
# OnInfo
bmp2 = images.getPngHelpAboutBitmap()
self.b2 = wx.BitmapButton(self, 40, bmp2, (20, 20),
(bmp2.GetWidth()+50, bmp2.GetHeight()+10), style=wx.NO_BORDER)
self.b2.SetToolTipString("Help/Info.")
self.bbox.Add(self.b2,1,wx.CENTER,10)
def bindEvents(self):
self.Bind(wx.EVT_CLOSE, self.OnCloseWindow)
self.Bind(wx.EVT_BUTTON, self.OnOK, self.b0)
self.Bind(wx.EVT_BUTTON, self.OnCancel, self.b1)
self.Bind(wx.EVT_BUTTON, self.OnInfo, self.b2)
self.Bind(wx.EVT_RADIOBOX, self.EvtRadioBox1, self.rb1)
self.Bind(wx.EVT_RADIOBOX, self.EvtRadioBox2, self.rb2)
self.Bind(wx.EVT_RADIOBOX, self.EvtRadioBox3, self.rb3)
def OnCloseWindow(self, event):
self.Destroy()
def OnCancel(self, event):
self.Destroy()
def OnInfo(self,event):
dlg = wx.MessageDialog(self, overview,
'Help', wx.OK | wx.ICON_INFORMATION
)
dlg.ShowModal()
dlg.Destroy()
class file_info:
"""A class holding information about a GDAL file."""
def init_from_name(self, filename):
"""
Initialize file_info from filename
filename -- Name of file to read.
Returns 1 on success or 0 if the file can't be opened.
"""
fh = gdal.Open( str(filename) )
if fh is None:
return 0
self.filename = filename
self.bands = fh.RasterCount
self.xsize = fh.RasterXSize
self.ysize = fh.RasterYSize
self.band_type = fh.GetRasterBand(1).DataType
self.projection = fh.GetProjection()
self.geotransform = fh.GetGeoTransform()
self.ulx = self.geotransform[0]
self.uly = self.geotransform[3]
self.lrx = self.ulx + self.geotransform[1] * self.xsize
self.lry = self.uly + self.geotransform[5] * self.ysize
ct = fh.GetRasterBand(1).GetRasterColorTable()
if ct is not None:
self.ct = ct.Clone()
else:
self.ct = None
return 1
def copy_into( self, t_fh, s_band = 1, t_band = 1, nodata_arg=None ):
"""
Copy this files image into target file.
"""
t_geotransform = t_fh.GetGeoTransform()
t_ulx = t_geotransform[0]
t_uly = t_geotransform[3]
t_lrx = t_geotransform[0] + t_fh.RasterXSize * t_geotransform[1]
t_lry = t_geotransform[3] + t_fh.RasterYSize * t_geotransform[5]
# figure out intersection region
tgw_ulx = max(t_ulx,self.ulx)
tgw_lrx = min(t_lrx,self.lrx)
if t_geotransform[5] < 0:
tgw_uly = min(t_uly,self.uly)
tgw_lry = max(t_lry,self.lry)
else:
tgw_uly = max(t_uly,self.uly)
tgw_lry = min(t_lry,self.lry)
# do they even intersect?
if tgw_ulx >= tgw_lrx:
return 1
if t_geotransform[5] < 0 and tgw_uly <= tgw_lry:
return 1
if t_geotransform[5] > 0 and tgw_uly >= tgw_lry:
return 1
# compute target window in pixel coordinates.
tw_xoff = int((tgw_ulx - t_geotransform[0]) / t_geotransform[1] + 0.1)
tw_yoff = int((tgw_uly - t_geotransform[3]) / t_geotransform[5] + 0.1)
tw_xsize = int((tgw_lrx-t_geotransform[0])/t_geotransform[1] + 0.5) - tw_xoff
tw_ysize = int((tgw_lry-t_geotransform[3])/t_geotransform[5] + 0.5) - tw_yoff
if tw_xsize < 1 or tw_ysize < 1:
return 1
# Compute source window in pixel coordinates.
sw_xoff = int((tgw_ulx - self.geotransform[0]) / self.geotransform[1])
sw_yoff = int((tgw_uly - self.geotransform[3]) / self.geotransform[5])
sw_xsize = int((tgw_lrx - self.geotransform[0]) / self.geotransform[1] + 0.5) - sw_xoff
sw_ysize = int((tgw_lry - self.geotransform[3]) / self.geotransform[5] + 0.5) - sw_yoff
if sw_xsize < 1 or sw_ysize < 1:
return 1
# Open the source file, and copy the selected region.
s_fh = gdal.Open( str(self.filename) )
return self.raster_copy( s_fh, sw_xoff, sw_yoff, sw_xsize, sw_ysize, s_band, t_fh, tw_xoff, tw_yoff, tw_xsize, tw_ysize, t_band, nodata_arg )
def raster_copy( self, s_fh, s_xoff, s_yoff, s_xsize, s_ysize, s_band_n, t_fh, t_xoff, t_yoff, t_xsize, t_ysize, t_band_n, nodata=None ):
if nodata is not None:
return self.raster_copy_with_nodata(
s_fh, s_xoff, s_yoff, s_xsize, s_ysize, s_band_n,
t_fh, t_xoff, t_yoff, t_xsize, t_ysize, t_band_n,
nodata )
s_band = s_fh.GetRasterBand( s_band_n )
t_band = t_fh.GetRasterBand( t_band_n )
data = s_band.ReadRaster( s_xoff, s_yoff, s_xsize, s_ysize, t_xsize, t_ysize, t_band.DataType )
t_band.WriteRaster( t_xoff, t_yoff, t_xsize, t_ysize, data, t_xsize, t_ysize, t_band.DataType )
return 0
def raster_copy_with_nodata( self, s_fh, s_xoff, s_yoff, s_xsize, s_ysize, s_band_n,t_fh, t_xoff, t_yoff, t_xsize, t_ysize, t_band_n, nodata ):
import Numeric as Num
s_band = s_fh.GetRasterBand( s_band_n )
t_band = t_fh.GetRasterBand( t_band_n )
data_src = s_band.ReadAsArray( s_xoff, s_yoff, s_xsize, s_ysize, t_xsize, t_ysize )
data_dst = t_band.ReadAsArray( t_xoff, t_yoff, t_xsize, t_ysize )
nodata_test = Num.equal(data_src,nodata)
to_write = Num.choose(nodata_test, (data_src, data_dst))
t_band.WriteArray( to_write, t_xoff, t_yoff )
return 0
class MainApp(wx.App):
def OnInit(self):
frame = MainFrame(None)
frame.Show(True)
self.SetTopWindow(frame)
return True
if __name__ == '__main__':
app = wx.App()
frame = MyFrame(None)
frame.Show()
app.MainLoop()
|
### Rocket Dashboard version 0.1 -- Andrew R Gross -- 2020-10-29
from plotly.subplots import make_subplots
import pandas as pd
import plotly.express as px
import plotly.graph_objects as go
import dash
import dash_core_components as dcc
import dash_html_components as html
from dash.dependencies import Input, Output
app = dash.Dash() # This declares the dash object and names it "app"
### Define toggle switches - These are not used currently
toggle_one_status = 0
toggle_two_status = 0
toggle_three_status = 0
toggle_four_status = 0
slider = 75
# Initialize figure with subplots, starting with the collback function
@app.callback(Output(component_id='page', component_property='figure'), [Input('interval-component', 'n_intervals')]) # The callback decorator
def update_page(n): # The function the callback wraps
fig = make_subplots( # Defining the figure object
rows=2, cols=5, # The number of subfigures in the master figure
column_widths=[0.2, 0.2, 0.2, 0.2, 0.2],
row_heights=[1.4, 0.8],
specs=[[None, {"type": "scattergeo", "colspan": 2}, None, None, {"type": "bar", "rowspan":2}], # Contents of the subfigures
[{"type": "scatter", "colspan": 4}, None, None, None, None]])
# The next section defines the subfigures
# This defines a scattergeo figure (the Earth)
fig.add_trace(
go.Scattergeo(mode="markers",
hoverinfo="text",
showlegend=False,
marker=dict(color="crimson", size=4, opacity=0.8)),
row=1, col=2 )
# Figure definition: the four dots (as a scatter plot)
fig.add_trace(
go.Scatter(x=[0,1,2,3.2], y=[1,1,1,1], mode='markers',
marker=dict(
color=pd.read_csv('test-data.csv').iloc[0].iloc[0:4], opacity = [1,1,1,1],
size=[55, 55, 55, 80],
showscale=False,
colorscale=[[0.0, "rgb(190,190,190)"],
[1.0, "rgb(240,40,40)"]],
line=dict(width=6, color='DarkSlateGrey'))),
row=2, col=1)
# Figure definition: Throttle bar
fig.add_trace(
go.Bar(y=pd.read_csv('test-data.csv').iloc[0][4:5], width = 0.5,
marker=dict(color=pd.read_csv('test-data.csv').iloc[0][4:5], colorscale= 'Peach', cmin = 0, cmax = 100, showscale = True )),
row=1, col=5)
# Subfigure Parameters - These modify the subfigures
# Earth parameters
fig.update_geos(
projection_type="orthographic",
landcolor="white",
oceancolor="MidnightBlue",
showocean=True,
lakecolor="LightBlue")
# Paremeters for all plots
fig.update_yaxes(title_text="Throttle %", range=[0, 100], row=1, col=5)
fig.update_layout(
paper_bgcolor='rgba(0,0,0,0)',
plot_bgcolor='rgba(0,0,0,0)',
xaxis=dict(
autorange=True,
showgrid=False,
ticks='',
showticklabels=False,
visible=False ),
yaxis=dict(
autorange=True,
showgrid=False,
ticks='',
showticklabels=False,
visible=False ),
showlegend=False,
margin=dict(r=10, t=25, b=40, l=60),
annotations=[
dict(
text="Source: NOAA",
showarrow=False,
xref="paper",
yref="paper",
x=0,
y=0) ])
return fig
### The Layout: this defines the page display settings
app.layout = html.Div([
dcc.Graph(id='page', style={'height':700}), # The page only has two components: the main figure, and a timer
dcc.Interval( # This timer triggers the callback function every 200 ms
id='interval-component',
interval=200, # in milliseconds
n_intervals=0)
])
### The app run command creates and updates the server with the page
app.run_server(debug=False, use_reloader=False) #
|
import FWCore.ParameterSet.Config as cms
from Configuration.Eras.Era_Phase2C11_cff import Phase2C11
from Configuration.ProcessModifiers.dd4hep_cff import dd4hep
Phase2C11_dd4hep = cms.ModifierChain(Phase2C11, dd4hep)
|
from bs4 import BeautifulSoup
import requests
import re
from get_contaminent_details1 import get_contaminent_details
from pprint import pprint
def assign_data(contam_name, contam_data):
"""given the name of a contamination and the data for it, assign by dict values
contam_name: str
contam_data: list
"""
contam_dict = {}
if len(contam_data) != 9:
print('error: data missing')
for i in range(9):
if i == 0:
contam_dict['potential_effect'] = contam_data[i]
elif i == 1:
contam_dict['times_greater_than_legal'] = contam_data[i]
elif i == 4:
contam_dict['ppb'] = contam_data[i]
elif i == 6:
contam_dict['health_guideline'] = contam_data[i]
elif i == 8:
contam_dict['legal_limit'] = contam_data[i]
return contam_dict
if __name__ == "__main__":
URL = 'https://www.ewg.org/tapwater/system.php?pws=WA5372250'
page = requests.get(URL)
soup = BeautifulSoup(page.content, 'html.parser')
contam_dict = {}
contam_dict0 = get_contaminent_details(soup)
for c in contam_dict0.keys():
contam_dict[c] = assign_data(c, contam_dict0[c])
pprint(contam_dict)
|
import numpy as np
import json
import Plots
import SpiralMapping
def analog_generator(filename, CSNR, symbols, alpha, sigma):
rxSignal_aux = np.zeros((len(CSNR), symbols))
rxSignal_ML_aux = np.zeros((len(CSNR), symbols))
gamma = np.zeros(len(CSNR))
delta = np.zeros(len(CSNR))
txSignal = np.zeros((len(CSNR), symbols))
rxSignal_Ray_aux = np.zeros((len(CSNR), symbols))
rxSignal_ML_Ray_aux = np.zeros((len(CSNR), symbols))
gamma_Ray = np.zeros(len(CSNR))
delta_Ray = np.zeros((len(CSNR), symbols))
txSignal_Ray = np.zeros((len(CSNR), symbols))
h_aux = np.zeros((len(CSNR), symbols))
data = np.zeros((2, symbols, len(CSNR)))
DataOut = np.zeros((2, symbols, len(CSNR)))
data_ray = np.zeros((2, symbols, len(CSNR)))
DataOut_Ray = np.zeros((2, symbols, len(CSNR)))
MSE_AWGN = np.zeros(len(CSNR))
SDR_AWGN = np.zeros(len(CSNR))
MSE_Ray = np.zeros(len(CSNR))
SDR_Ray = np.zeros(len(CSNR))
#
for i in range(len(CSNR)):
print(f'{i+1}/{len(CSNR)}')
np.random.seed(987654321)
delta[i] = 2 * np.pi * ((6 * 0.16 ** 2) / (CSNR[i])) ** (1 / 4)
x = np.zeros((2, symbols))
for k in range(symbols):
s1 = np.random.randn()
s2 = np.random.randn()
# x[:, k] = [s1, s2]
x[0, k] = s1
x[1, k] = s2
txSignal[i, k] = SpiralMapping.mapping(s1, s2, delta[i], alpha)
data[:, :, i] = x
gamma[i] = np.sqrt(np.mean(np.absolute(txSignal[i, :]) ** 2))
# Received signal
rxSignal_aux[i, :] = txSignal[i, :] + gamma[i] * sigma[i] * np.random.randn(1, symbols)
# Decodificacao ML com filtro MMSE
rxSignal_ML_aux[i, :] = rxSignal_aux[i, :] / (1 + 2 * sigma[i] ** 2)
theta_est = np.sign(rxSignal_ML_aux[i, :]) * (np.absolute(rxSignal_ML_aux[i, :]) ** (1 / alpha))
s1_hat = delta[i] / np.pi * np.sign(theta_est) * theta_est * np.sin(theta_est)
s2_hat = delta[i] / np.pi * theta_est * np.cos(theta_est)
# print(np.shape([s1_hat, s2_hat]))
DataOut[:, :, i] = [s1_hat, s2_hat]
# MSE_AWGN[i] = (1 / 2) * np.mean((data[0, :, i] - DataOut[0, :, i]) ** 2 + (data[1, :, i] - DataOut[1, :, i]) ** 2)
MSE_AWGN[i] = np.mean(np.mean(np.square(data[:, :, i] - DataOut[:, :, i])))
SDR_AWGN[i] = 10 * np.log10(1 / MSE_AWGN[i])
# Rayleigh Channel
np.random.seed(987654321)
h_aux[i, :] = np.absolute(np.sqrt(0.5) * (np.random.randn(1, symbols) + 1.j * np.random.randn(1, symbols)))
delta_Ray[i, :] = 2 * np.pi * ((6 * 0.16 ** 2) / (h_aux[i, :] ** 2 * CSNR[i])) ** (1 / 4)
x = np.zeros((2, symbols))
for k in range(symbols):
s1 = np.random.randn()
s2 = np.random.randn()
x[:, k] = [s1, s2]
txSignal_Ray[i, k] = SpiralMapping.mapping(s1, s2, delta_Ray[i, k], alpha)
data_ray[:, :, i] = x
gamma_Ray[i] = np.sqrt(np.mean(np.absolute(txSignal_Ray[i, :]) ** 2))
np.random.seed(987654321)
# Received signal
rxSignal_Ray_aux[i, :] = txSignal_Ray[i, :] * h_aux[i, :] + gamma_Ray[i] * sigma[i] * np.random.randn(1, symbols)
# Decodificacao ML com filtro MMSE
rxSignal_ML_Ray_aux[i, :] = h_aux[i, :] * rxSignal_Ray_aux[i, :] / (h_aux[i, :] ** 2 + 2 * sigma[i] ** 2)
theta_est = np.sign(rxSignal_ML_Ray_aux[i, :]) * (np.absolute(rxSignal_ML_Ray_aux[i, :]) ** (1 / alpha))
s1_hat = delta_Ray[i] / np.pi * np.sign(theta_est) * theta_est * np.sin(theta_est)
s2_hat = delta_Ray[i] / np.pi * theta_est * np.cos(theta_est)
DataOut_Ray[:, :, i] = [s1_hat, s2_hat]
MSE_Ray[i] = 1 / 2 * np.mean((data_ray[0, :, i] - DataOut_Ray[0, :, i]) ** 2 + (data_ray[1, :, i] - DataOut_Ray[1, :, i]) ** 2)
SDR_Ray[i] = 10 * np.log10(1 / MSE_Ray[i])
out_AWGN = DataOut # (2, symbols, CSNR)
out_Ray = rxSignal_ML_Ray_aux # (CSNR, symbols)
out_delta_Ray = delta_Ray # (CSNR, symbols)
save(filename, out_AWGN, out_Ray, out_delta_Ray, data_ray, CSNR, symbols)
print('Generate Done!')
return SDR_AWGN, SDR_Ray
def save(filename, out_AWGN, out_Ray, out_delta_Ray, data_ray, CSNR, symbols):
# filename .txt
data = {"out_AWGN": out_AWGN.tolist(),
"out_Ray": out_Ray.tolist(),
"out_delta_Ray": out_delta_Ray.tolist(),
"data_ray": data_ray.tolist(),
"CSNR": CSNR.tolist(),
"symbols": symbols,
}
f = open(filename, "w")
json.dump(data, f)
f.close()
def load(filename):
f = open(filename, "r")
data = json.load(f)
f.close()
out_AWGN = data["out_AWGN"]
out_Ray = data["out_Ray"]
out_delta_Ray = data["out_delta_Ray"]
data_ray = data["data_ray"]
CSNR = data["CSNR"]
symbols = data["symbols"]
return out_AWGN, out_Ray, out_delta_Ray, data_ray, CSNR, symbols
# test
# CSNRdB = np.array([40, 50, 60])
# CSNR = 10 ** (CSNRdB / 10)
# sigma = np.sqrt(10 ** (-CSNRdB / 10))
# # print(sigma)
# filename = 'test_generation.txt'
# SDR_AWGN, SDR_Ray = analog_generator(filename, CSNR, 200, 2, sigma)
#
# Plots.PlotSDR(CSNRdB, SDR_AWGN, SDR_Ray)
#
# print('Loading...')
#
# out_AWGN, out_Ray, out_delta_Ray, data_ray, CSNR, symbols = load(filename)
# print(np.shape(out_AWGN))
# print(np.shape(out_Ray))
# print(np.shape(out_delta_Ray))
|
import tkinter as tool
from tkinter import filedialog
import pyautogui
root=tool.Tk()
canvas1=tool.Canvas(root,width=300,height=300)
canvas1.pack()
def takeScreenshot ():
screenshot = pyautogui.screenshot()
file_path=filedialog.asksaveasfilename(defaultextension='.png')
# file_path=filedialog.asksaveasfile("C:\Users\pgarabadu\Desktop\TECO\screenshots")
screenshot.save(file_path)
Button = tool.Button(text='capture',command='capture',bg='green',font=10)
canvas1.create_window(150,150,window=Button)
root.mainloop()
|
import requests, time
import sikhgenerator
request = {'token' : '[redacted]'}
while True:
response = requests.get('https://api.groupme.com/v3/groups/27409006/messages', params=request)
if(response.status_code == 200):
messages = response.json()['response']['messages']
for m in messages:
if(m['text'] == 'Generate sikh'):
post = {'bot_id' : 'afb038714798f3441bbd41f2ea', 'text': sikhgenerator.make_name()}
requests.post('https://api.groupme.com/v3/bots/post', params=post)
request['since_id'] = m['id']
break
time.sleep(4)
|
from injector import Module, Injector, inject, singleton
from flask import Flask, Request, jsonify
from flask_injector import FlaskInjector
from flask_sqlalchemy import SQLAlchemy
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm.exc import NoResultFound
from sqlalchemy import Column, String
# We use standard SQLAlchemy models rather than the Flask-SQLAlchemy magic, as
# it requires a global Flask app object and SQLAlchemy db object.
Base = declarative_base()
class KeyValue(Base):
__tablename__ = 'data'
key = Column(String, primary_key=True)
value = Column(String)
def __init__(self, key, value):
self.key = key
self.value = value
def serializable(self):
return
class PostgresModule(Module):
def __init__(self, app):
self.app = app
"""Configure the application."""
def configure(self, binder):
# We configure the DB here, explicitly, as Flask-SQLAlchemy requires
# the DB to be configured before request handlers are called.
db = self.configure_db(self.app)
binder.bind(SQLAlchemy, to=db, scope=singleton)
def configure_db(self, app):
db = SQLAlchemy(app)
Base.metadata.create_all(db.engine)
db.session.add_all([KeyValue('hello', 'world'), KeyValue('goodbye', 'cruel world')])
db.session.commit()
return db |
# Generated by Django 3.1.5 on 2021-02-01 06:58
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('website', '0057_auto_20210201_0655'),
]
operations = [
migrations.AlterModelOptions(
name='application',
options={'ordering': ['order']},
),
migrations.AlterModelOptions(
name='component',
options={'ordering': ['order']},
),
migrations.AlterModelOptions(
name='product',
options={'ordering': ['order']},
),
migrations.AlterModelOptions(
name='researchapplication',
options={'ordering': ['order']},
),
migrations.AlterField(
model_name='sitemenu',
name='title',
field=models.CharField(choices=[('H', 'Header'), ('F', 'Footer')], default='H', max_length=1, unique=True),
),
]
|
#!/usr/bin/env python3
from sys import argv, stdin
if __name__ == '__main__':
if len(argv) == 2:
iterator = stdin
squareSize = int(argv[1])
else:
filename = argv[1]
squareSize = int(argv[2])
if filename == "-" :
iterator = stdin
else:
iterator = open(filename, 'r').readlines()
for line in iterator:
l = line[:-1].split(',')
i = 0
while i+squareSize < len(l):
print( ''.join(l[i:i+squareSize]))
i += squareSize
print("[" + ', '.join(l[i:]) + "]\n")
|
from django.contrib import admin
from .models import (
User, AuctionListing, Bid, Comment, Category, Watchlist, WinnerList
)
# Register your models here.
admin.site.site_header = "CS50 Project2 Dashboard"
admin.site.site_title = "CS50 Project2 Dashboard"
admin.site.index_title = "Auctions Dashboard"
class UserAdmin(admin.ModelAdmin):
list_display = ("username", "first_name", "last_name",
"email", "is_superuser", )
list_display_links = ("username", "email",)
class ListAdmin(admin.ModelAdmin):
list_display = ("owner", "title", "category",
"price", "is_active", "post_time")
list_display_links = ("title",)
list_filter = ("is_active", "owner", "category",)
class BidAdmin(admin.ModelAdmin):
list_display = ("bid_price", "bid_user", "bid_count", "of_product",)
class CommentAdmin(admin.ModelAdmin):
list_display = ("post_comment", "comment_user", "of_product",)
class WatchlistAdmin(admin.ModelAdmin):
list_display = ("product", "watcher",)
class WinnerAdmin(admin.ModelAdmin):
list_display = ("winner", "won_product")
admin.site.register(User, UserAdmin)
admin.site.register(AuctionListing, ListAdmin)
admin.site.register(Bid, BidAdmin)
admin.site.register(Comment, CommentAdmin)
admin.site.register(Category)
admin.site.register(Watchlist, WatchlistAdmin)
admin.site.register(WinnerList, WinnerAdmin)
|
import random
import requests
import json
class MyRequests:
get_time_url = "http://s-kv-center-v20:44330/api/FreeTime?FilialId=1998&TicketToken=ODNjMjVhYTgyMmRlN2NmYjNjNDViNjIxNWZlYjMzYWE6eyJpZCI6MzV9"
get_lagers_url = "http://s-kv-center-v20:44330/api/Lager?filialId=2382&ticketToken=56"
create_lagers_in_basket_url = "http://s-kv-center-v20:44330/api/Lager"
create_order_url = "http://s-kv-center-v20:44330/api/Orders"
request_data = {
"filial_id": 2382,
"tiket_token": "ODNjMjVhYTgyMmRlN2NmYjNjNDViNjIxNWZlYjMzYWE6eyJpZCI6MzV9",
"user_id": "56"
}
def get_free_time(self):
response = requests.get(self.get_time_url, self.request_data)
free_time_dict = response.json()
free_time_list = []
for free_time in free_time_dict:
free_time_list.append(free_time["begin"])
# print(free_time_list)
return free_time_list
def get_lagers_id(self):
get_lager_request = {
'filial_id': self.request_data['filial_id'],
'user_id': self.request_data['user_id']
}
my_response = requests.get(self.get_lagers_url, get_lager_request)
lager_dict = my_response.json()
lager_id_list = []
for lager in lager_dict['lagers']:
if lager['remnant'] > 1:
for items in lager['items']:
if items['count'] > 1:
lager_id = (items['id'])
lager_id_list.append(lager_id)
# print(lager_id_list)
return lager_id_list
def create_lagers_in_basket(self, howe_mach_lagers):
lager_id_list = self.get_lagers_id()
lagers = lager_id_list[0:howe_mach_lagers + 1]
headers = {
"accept": "text/plain",
"Content-Type": "application/json-patch+json"
}
for lager_id in lagers:
lager_dict = {
"ticketToken": self.request_data['user_id'],
"filialId": self.request_data['filial_id'],
"lager": {
"id": lager_id,
"count": 1
}
}
# print(lager_id)
requests_lager = requests.post(self.create_lagers_in_basket_url, json.dumps(lager_dict), headers=headers)
# print(requests_lager.json())
def create_order(self, howe_mach_lagers=0, howe_mach_order=0):
for i in range(howe_mach_order):
self.create_lagers_in_basket(howe_mach_lagers)
free_time = random.choice(self.get_free_time())
headers = {
"accept": "text/plain",
"Content-Type": "application/json-patch+json"
}
order_field = {
'customerPhone': "+380777777777",
'ticketToken': self.request_data['user_id'],
'filialId': self.request_data['filial_id'],
'readyDate': free_time
}
response = requests.post(self.create_order_url, json.dumps(order_field), headers=headers)
print(response.json())
print(response)
#
# """________________________________________My_practice_____________________________________"""
status_dict = {
"0995336d-8483-46c7-a012-437df3cd40ab": "Нове",
"2f916855-834b-47c0-bd26-c615a411dfdf": "Підтверджене",
"acd8914b-11c6-43fb-9cbd-deddb9a220aa": "Скасоване",
"2f257b3f-89cc-4e7e-8aec-cef9700034b6": "Прострочене",
"f9e38007-b27c-409f-8ff7-b2c7c39eb294": "Готове",
"15225f94-ad0e-4a8a-a04d-ac83ac41d23d": "Утилізований",
"07baae3c-cff6-424a-a455-470909e70fe2": "Розукомплектований",
"f1a67b87-fbd2-4f91-97eb-1764c42dc39e": "Видане"
}
headers = {
"accept": "text/plain",
"Content-Type": "application/json-patch+json"
}
def get_all_order_with_status(self, date):
"""Return all orders and they statuses for chosen date (date in format - yyyy-mm-dd)"""
created_orders_url = "http://s-kv-center-v20:44330/api/Orders/Sync?FilialId=2382&Date=" + date
orders_responds = requests.get(created_orders_url, self.request_data)
order_list = orders_responds.json()
info_of_orders = []
statuses = []
for data in order_list:
info_of_orders.append(data['orderNumber'])
info_of_orders.append(data['statusId'])
#Take order number and status id
for status in info_of_orders:
if type(status) == int:
statuses.append(status) #Take order number in our list
for status_id in self.status_dict:
if status == status_id:
statuses.append(self.status_dict.get(status_id)) #Take name of status in our list from status_dict
for i in range(0, len(statuses), 2):
print('Номер ордеру та його статус:' + str(statuses[i:i + 2]) + ' дата початку синхронізації ' + date)
def go_to_next_status(self, order_num, date):
created_orders_url = "http://s-kv-center-v20:44330/api/Orders/Sync?FilialId=2382&Date=" + date
update_order_url = "http://s-kv-center-v20:44330/api/Orders/Sync"
orders_responds = requests.get(created_orders_url, self.request_data)
order_list = orders_responds.json()
info_of_orders = []
orders = []
order_id = []
next_status_id = []
for data in order_list:
info_of_orders.append(data['orderNumber'])
info_of_orders.append(data['statusId'])
info_of_orders.append(data['orderId'])
for i in range(0, len(info_of_orders), 3):
orders.append(info_of_orders[i:i + 3])
for j in range(len(orders)):
if orders[j][0] == order_num:
order_id.append(orders[j][2])
order_id.append(orders[j][1])
if order_id[1] == '0995336d-8483-46c7-a012-437df3cd40ab':
next_status_id.insert(1,('2f916855-834b-47c0-bd26-c615a411dfdf'))
elif order_id[1] == '2f916855-834b-47c0-bd26-c615a411dfdf':
next_status_id.insert(1,('f9e38007-b27c-409f-8ff7-b2c7c39eb294'))
elif order_id[1] == 'f9e38007-b27c-409f-8ff7-b2c7c39eb294':
next_status_id.insert(1,('f1a67b87-fbd2-4f91-97eb-1764c42dc39e'))
else:
print('заказ видано')
return True
next_status = {
"orderId": str(order_id[0]),
"statusId": str(next_status_id[0])
}
next_status_responds = requests.put(update_order_url, json.dumps(next_status), headers=self.headers)
print(next_status_responds)
print("заказ № " + str(order_num) + " переведено в наступний статус")
print('some massage')
|
# -*- coding: utf-8 -*-
"""
validator handlers boolean module.
"""
from pyrin.core.globals import _
from pyrin.validator.handlers.base import ValidatorBase
from pyrin.validator.handlers.exceptions import ValueIsNotBooleanError
class BooleanValidator(ValidatorBase):
"""
boolean validator class.
"""
invalid_type_error = ValueIsNotBooleanError
invalid_type_message = _('The provided value for [{param_name}] '
'must be of boolean type.')
def __init__(self, domain, field, **options):
"""
initializes an instance of BooleanValidator.
:param type[BaseEntity] | str domain: the domain in which this validator
must be registered. it could be a
type of a BaseEntity subclass.
if a validator must be registered
independent from any BaseEntity subclass,
the domain could be a unique string name.
note that the provided string name must be
unique at application level.
:param InstrumentedAttribute | str field: validator field name. it could be a
string or a column. each validator will
be registered with its field name in
corresponding domain. to enable automatic
validations, the provided field name must
be the exact name of the parameter which
this validator will validate. if you pass
a column attribute, some constraints
such as `nullable`, `min_length`, `max_length`,
`min_value`, `max_value`, `allow_blank`,
`allow_whitespace`, `check_in` and
`check_not_in` could be extracted
automatically from that column if not provided
in inputs.
:keyword bool nullable: specifies that null values should be accepted as valid.
defaults to True if not provided.
:keyword str localized_name: localized name of the parameter
which this validator will validate.
it must be passed using `_` method
from `pyrin.core.globals`.
defaults to `name` if not provided.
:keyword bool is_list: specifies that the value must be a list of items.
defaults to False if not provided.
:keyword bool null_items: specifies that list items could be None.
it is only used if `is_list=True` is provided.
defaults to False if not provided.
:keyword bool allow_single: specifies that list validator should also
accept single, non list values.
it is only used if `is_list=True` is provided.
defaults to False if not provided.
:keyword bool allow_empty_list: specifies that list validators should also
accept empty lists.
it is only used if `is_list=True` is provided.
defaults to False if not provided.
:keyword str name: a custom name for this validator.
if provided, the name of `field` will be ignored.
:keyword bool for_find: specifies that this validator must only
be used on validation for find.
defaults to False if not provided.
:raises ValidatorFieldIsRequiredError: validator field is required error.
:raises ValidatorNameIsRequiredError: validator name is required error.
:raises InvalidValidatorDomainError: invalid validator domain error.
:raises InvalidNotAcceptedTypeError: invalid not accepted type error.
:raises ValidatorFixerMustBeCallable: validator fixer must be callable.
:raises InvalidValidationExceptionTypeError: invalid validation exception type error.
"""
options.update(accepted_type=bool)
super().__init__(domain, field, **options)
|
# ************************************* BANK_APPLICATION **********************************
import ast
class Bank:
def __init__(self):
pass
# User login Function/method
def signin(self):
account_id = input(" Enter Your ID/Name ")
account_pwd = str(input(" Enter Your password: "))
decison = check_id(account_id,account_pwd)
x=account_id
if decison == None:
x,decison = check_name(account_id,account_pwd) # here x is the id
if decison != None:
print("We have this account ", decison)
while (True):
n = str(input('For DEPOSIT : type 1\nfor WITHDRAW : type 2\nfor EXIT:\ttype 0\n'))
if (n=='1'):
self.deposit(x) # here x is the id
elif (n=='2'):
self.withdraw(x)
elif (n=='0'):
print("**************** Have a Nice Day!!! ******************** ")
break
else:
print('enter correct keyword')
else:
print("No such Account")
# New User Signup Function/method
def signup(self):
global d1
sub_d1 = dict()
id = max(d1['detail'].keys()) + 1
sub_d1["Name"] = input('Enter ur name : ')
sub_d1['Balance'] = 0
d1['detail'][id] = sub_d1
d1['auth'][id] = input('Enter new PIN : ')
with open('demo.txt','w') as file1:
file1.write(str(d1))
print(f"New Account Created\nId : {id}\nName : {d1['detail'][id]['Name']}\nBalance : {d1['detail'][id]['Balance']}\nPIN : {d1['auth'][id]}\n")
return d1
def deposit(self,id): # for deposit and updating Balance
global d1
id = int(id)
amount = int(input('Enter deposit amount : '))
d1['detail'][id]['Balance'] += amount
print(d1['detail'][id])
with open('demo.txt','w') as file1:
file1.write(str(d1))
def withdraw(self,id): # for withdrawing and updating Balance
global d1
id = int(id)
amount = int(input('Enter withdraw amount : '))
if (d1['detail'][id]['Balance']>=amount):
d1['detail'][id]['Balance'] -= amount
print(d1['detail'][id])
else:
print('You Have Insufficiant Balance!!!! ')
with open('demo.txt','w') as file1:
file1.write(str(d1))
# Checking Name and pasword entered by user is correct or not
def check_name(name,pwd):
for id, details in d1['detail'].items():
try:
if (d1['detail'][id]['Name']).lower() == name.lower():
if (pwd==d1['auth'][id]):
return id,details
else:
print('Wrong Password')
except:
pass
else:
return id,None
# Checking ID and pasword entered by user is correct or not
def check_id(id,pwd):
try:
id = int(id)
if (pwd==d1['auth'][id]):
acc_detail = d1['detail'][id]
return acc_detail
else:
print('Wrong Password')
except:
return None
with open('demo.txt','r') as file1: # use path if demo.txt in other location inside 'file_pathname'
i = file1.read()
i = ast.literal_eval(i) #converted string to dictinory
d1 = dict(i)
while (True):
print('\n******************** Welcome To ABC Bank **************************' )
log = input('For SIGNIN : type 1\nfor SIGNUP : type 2\n')
x = Bank()
if log == '1':
x.signin()
break
elif log == '2':
x.signup()
continue
else:
print('press correct keyword')
|
#!/usr/bin/python
#-*-coding:utf-8-*-
#(40)(39)で構築したデータベースを読み込み,標準入力から読み込んだ文の生起確率を計算せよ.
#入力された文が単語列(w1, w2, ..., wN)で構成されるとき,生起確率はP(w2|w1)P(w3|w2)...P(wN|wN-1)と求めればよい.
#試しに,"this paper is organized as follows"と"is this paper organized as follows"の生起確率を計算せよ.
import kyotocabinet as kc
import sys
from test39 import KyotoCabinet
def get_sent_probability(db,sent):
probability = 1
for i in range(len(sent)-1):
prob = db.get_str((sent[i], sent[i+1]))
#print prob
if not prob:
prob = 0
probability = float(prob) * probability
return probability
def main():
db = KyotoCabinet()
db.open("test39_db.kch", kc.DB.OWRITER | kc.DB.OCREATE)
sent = raw_input('input a sentence\n')
sent = sent.strip().split()
probability = get_sent_probability(db, sent)
print probability
if __name__ == '__main__':
main() |
# Import the pyautogui library; will need to install it with pip (python package manager)
import pyautogui
# Get position of the chrome icon on the launcher
chromeX = 690
chromeY = 873
# Move the mouse to the position of the chrome icon and click it; make it take 2 seconds
pyautogui.moveTo(chromeX, chromeY, 2)
pyautogui.click()
# Press the key combination COMMAND and T; this combo opens a new tab in chrome
pyautogui.hotkey('command','t')
# Type the phrase "What can I use python for" and press enter; leave 0.1 seconds between key presses
pyautogui.typewrite('What can I use python for?\n', interval=0.1)
|
## Taken from yt/fields/xray_emission_fields.py
## https://yt-project.org/doc/analyzing/analysis_modules/xray_emission_fields.html#
from yt.utilities.on_demand_imports import _h5py as h5py
import numpy as np
import os
from yt.config import ytcfg
from yt.fields.derived_field import DerivedField
from yt.funcs import \
mylog, \
only_on_root, \
parse_h5_attr
from yt.utilities.exceptions import YTFieldNotFound
from yt.utilities.exceptions import YTException
from yt.utilities.linear_interpolators import \
UnilinearFieldInterpolator, BilinearFieldInterpolator
from yt.units.yt_array import YTArray, YTQuantity
from yt.utilities.cosmology import Cosmology
data_version = {"cloudy": 2,
"apec": 2}
def _get_data_file(table_type, data_dir=None):
data_file = "%s_emissivity_v%d.h5" % (table_type, data_version[table_type])
if data_dir is None:
supp_data_dir = ytcfg.get("yt", "supp_data_dir")
data_dir = supp_data_dir if os.path.exists(supp_data_dir) else "."
data_path = os.path.join(data_dir, data_file)
if not os.path.exists(data_path):
msg = "Failed to find emissivity data file %s in %s! " % (data_file, data_path) \
+ "Please download from http://yt-project.org/data!"
mylog.error(msg)
raise IOError(msg)
return data_path
class EnergyBoundsException(YTException):
def __init__(self, lower, upper):
self.lower = lower
self.upper = upper
def __str__(self):
return "Energy bounds are %e to %e keV." % \
(self.lower, self.upper)
class ObsoleteDataException(YTException):
def __init__(self, table_type):
data_file = "%s_emissivity_v%d.h5" % (table_type, data_version[table_type])
self.msg = "X-ray emissivity data is out of date.\n"
self.msg += "Download the latest data from %s/%s." % (data_url, data_file)
def __str__(self):
return self.msg
class XrayEmissivityIntegrator(object):
r"""Class for making X-ray emissivity fields. Uses hdf5 data tables
generated from Cloudy and AtomDB/APEC.
Initialize an XrayEmissivityIntegrator object.
Parameters
----------
table_type : string
The type of data to use when computing the emissivity values. If "cloudy",
a file called "cloudy_emissivity.h5" is used, for photoionized
plasmas. If, "apec", a file called "apec_emissivity.h5" is used for
collisionally ionized plasmas. These files contain emissivity tables
for primordial elements and for metals at solar metallicity for the
energy range 0.1 to 100 keV.
redshift : float, optional
The cosmological redshift of the source of the field. Default: 0.0.
data_dir : string, optional
The location to look for the data table in. If not supplied, the file
will be looked for in the location of the YT_DEST environment variable
or in the current working directory.
use_metals : boolean, optional
If set to True, the emissivity will include contributions from metals.
Default: True
"""
def __init__(self, table_type, redshift=0.0, data_dir=None, use_metals=True):
mylog.setLevel(50)
filename = _get_data_file(table_type, data_dir=data_dir)
only_on_root(mylog.info, "Loading emissivity data from %s." % filename)
in_file = h5py.File(filename, "r")
if "info" in in_file.attrs:
only_on_root(mylog.info, parse_h5_attr(in_file, "info"))
if parse_h5_attr(in_file, "version") != data_version[table_type]:
raise ObsoleteDataException(table_type)
else:
only_on_root(mylog.info, "X-ray '%s' emissivity data version: %s." % \
(table_type, parse_h5_attr(in_file, "version")))
self.log_T = in_file["log_T"][:]
self.emissivity_primordial = in_file["emissivity_primordial"][:]
if "log_nH" in in_file:
self.log_nH = in_file["log_nH"][:]
if use_metals:
self.emissivity_metals = in_file["emissivity_metals"][:]
self.ebin = YTArray(in_file["E"], "keV")
in_file.close()
self.dE = np.diff(self.ebin)
self.emid = 0.5*(self.ebin[1:]+self.ebin[:-1]).to("erg")
self.redshift = redshift
def get_interpolator(self, data_type, e_min, e_max, energy=True):
data = getattr(self, "emissivity_%s" % data_type)
if not energy:
data = data[..., :] / self.emid.v
e_min = YTQuantity(e_min, "keV")*(1.0+self.redshift)
e_max = YTQuantity(e_max, "keV")*(1.0+self.redshift)
if (e_min - self.ebin[0]) / e_min < -1e-3 or \
(e_max - self.ebin[-1]) / e_max > 1e-3:
raise EnergyBoundsException(self.ebin[0], self.ebin[-1])
e_is, e_ie = np.digitize([e_min, e_max], self.ebin)
e_is = np.clip(e_is - 1, 0, self.ebin.size - 1)
e_ie = np.clip(e_ie, 0, self.ebin.size - 1)
my_dE = self.dE[e_is: e_ie].copy()
# clip edge bins if the requested range is smaller
my_dE[0] -= e_min - self.ebin[e_is]
my_dE[-1] -= self.ebin[e_ie] - e_max
interp_data = (data[..., e_is:e_ie]*my_dE).sum(axis=-1)
if data.ndim == 2:
emiss = UnilinearFieldInterpolator(np.log10(interp_data),
[self.log_T[0], self.log_T[-1]],
"log_T", truncate=True)
else:
emiss = BilinearFieldInterpolator(np.log10(interp_data),
[self.log_nH[0], self.log_nH[-1],
self.log_T[0], self.log_T[-1]],
["log_nH", "log_T"], truncate=True)
return emiss
def get_xray_emissivity(T, Z=1.0, emin=0.5, emax=7.0, table_type='apec', energy=True):
dirname = os.path.dirname(__file__)
data_dir = os.path.join(dirname, '../../data')
x = XrayEmissivityIntegrator(table_type, data_dir=data_dir)
log_em_0 = x.get_interpolator('primordial', emin, emax, energy=energy)
log_em_z = x.get_interpolator('metals', emin, emax, energy=energy)
dd = dict(log_nH=0.0, log_T=np.log10(T))
em_tot = 10.0**log_em_0(dd) + Z*10.0**log_em_z(dd)
return em_tot
|
import random
class Node(object):
def __init__(self, key, level):
self.key = key
self.forward = [None]*(level+1)
class SkipList(object):
def __init__(self, max_lvl, P):
self.MAXLVL = max_lvl
self.P = P
self.header = self.createNode(self.MAXLVL, -1)
self.level = 0
# create new node
def createNode(self, lvl, key):
n = Node(key, lvl)
return n
def randomLevel(self):
lvl = 0
while random.random()<self.P and lvl<self.MAXLVL:lvl += 1
return lvl
def insertElement(self, key):
update = [None]*(self.MAXLVL+1)
current = self.header
for i in range(self.level, -1, -1):
while current.forward[i] and current.forward[i].key < key:
current = current.forward[i]
update[i] = current
current = current.forward[0]
if current == None or current.key != key:
rlevel = self.randomLevel()
if rlevel > self.level:
for i in range(self.level+1, rlevel+1):
update[i] = self.header
self.level = rlevel
n = self.createNode(rlevel, key)
for i in range(rlevel+1):
n.forward[i] = update[i].forward[i]
update[i].forward[i] = n
print("Successfully inserted key {}".format(key))
def deleteElement(self, search_key):
update = [None]*(self.MAXLVL+1)
current = self.header
for i in range(self.level, -1, -1):
while(current.forward[i] and current.forward[i].key < search_key):
current = current.forward[i]
update[i] = current
current = current.forward[0]
if current != None and current.key == search_key:
for i in range(self.level+1):
if update[i].forward[i] != current:
break
update[i].forward[i] = current.forward[i]
while(self.level>0 and self.header.forward[self.level] == None):
self.level -= 1
print("Successfully deleted {}".format(search_key))
def searchElement(self, key):
current = self.header
for i in range(self.level, -1, -1):
while(current.forward[i] and current.forward[i].key < key):
current = current.forward[i]
current = current.forward[0]
if current and current.key == key:
print("Found key ", key)
def displayList(self):
print("\n*****Skip List******")
head = self.header
for lvl in range(self.level+1):
print("Level {}: ".format(lvl), end=" ")
node = head.forward[lvl]
while(node != None):
print(node.key, end=" ")
node = node.forward[lvl]
print("")
def main():
lst = SkipList(3, 0.5)
lst.insertElement(3)
lst.insertElement(6)
lst.insertElement(7)
lst.insertElement(9)
lst.insertElement(12)
lst.insertElement(19)
lst.insertElement(17)
lst.insertElement(26)
lst.insertElement(21)
lst.insertElement(25)
lst.displayList()
# Search 19
lst.searchElement(19)
# Delete 19
lst.deleteElement(19)
lst.displayList()
main()
|
from rest_framework import generics
from . import serializers
from Drives import models
class ListDrivers(generics.ListAPIView):
queryset = models.Driver.objects.all()
serializer_class = serializers.DriverSerializer
class ListVehicle(generics.ListAPIView):
queryset = models.Vehicle.objects.all()
serializer_class = serializers.VehicleSerializer |
# -*- coding: utf-8 -*-
import scrapy
from foodmate.items import FoodmateItem
import re
from lxml import html
import time
import copy
class YingyangSpider(scrapy.Spider):
name = 'yingyang'
allowed_domains = ['foodmate.net']
start_urls = ["http://db.foodmate.net/yingyang"]
def parse(self, response):
food_kind_list=response.xpath("//div[@id='top']/a")[0:2]
for food_kind in food_kind_list:
item=FoodmateItem()
item['kind']=food_kind.xpath("./text()").extract_first()
food_kind_url = food_kind.xpath("./@href").extract_first()
print(food_kind_url)
yield scrapy.Request(self.start_urls[0]+"/"+food_kind_url,
callback=self.parse_food,
meta={'item':item})
time.sleep(2)
def parse_food(self,response):
item=response.meta['item']
food_list=response.xpath("//div[@id='dibu']/li")
for food in food_list:
# 这里要用deepcopy新建一个对象,
# 因为上面的item是分类时创建的item,小分类中用的是大分类创建的item
# 小分类赋值时,后面添加的item字段会把前面的item字段覆盖
item=copy.deepcopy(item)
item['food_name']=food.xpath("./a/text()").extract_first()
detail_url=food.xpath("./a/@href").extract_first()
print(detail_url)
yield scrapy.Request(self.start_urls[0]+"/"+detail_url,
callback=self.parse_detail,
meta={'item':item}
)
def parse_detail(self,response):
item=response.meta['item']
detail_list=response.xpath("//div[@id='rightlist']//div[@class='list']").extract()
nutrition={}
for detail in detail_list:
#print(detail)
ret=re.match(r"<div class=\"list\"><div class=\"list_m\">(.*)</div>(.*)</div>",detail)
if(ret):
nutrition[ret.group(1)]=ret.group(2)
item['nutrition']=nutrition
yield item
|
from imagepy.core.engine import Free
from imagepy import IPy
import platform
import subprocess
import json
import wx
from imagepy.ui.panelconfig import ParaDialog
from imagepy.core.util import fileio
import os
class Plugin(Free):
title = 'DeepClas4BioPy'
model = ''
framework = ''
python = ''
pathAPI = ''
def load(self):
dirdialog=wx.DirDialog(IPy.get_window(),message=wx.DirSelectorPromptStr, defaultPath="",
style=wx.DD_DEFAULT_STYLE, pos=wx.DefaultPosition, size=wx.DefaultSize,
name=wx.DirDialogNameStr)
if dirdialog.ShowModal() == wx.ID_OK:
self.pathAPI = dirdialog.GetPath()
self.pathAPI=self.pathAPI+os.path.sep
else:
return False
if platform.system() == 'Windows':
self.python = 'python'
else:
self.python = 'python3'
subprocess.check_output([self.python, self.pathAPI + 'listFrameworks.py'])
data = json.load(open('data.json'))
frameworks = data['frameworks']
subprocess.check_output([self.python, self.pathAPI + 'listModels.py', '-f', 'Keras'])
data = json.load(open('data.json'))
models = data['models']
Para = {'f': 'Keras', 'm': 'VGG16'}
View = [('lab', 'Select the framework and the model'),
(list, frameworks, str, 'Framework', 'f', ''),
(list, models, str, 'Model', 'm', '')
]
md = MyDialog(None, 'DeepClas4BioPy', self.pathAPI, self.python, View, Para)
md.initView()
if md.ShowModal() == wx.ID_OK:
self.framework = md.para['f']
self.model = md.para['m']
md.Destroy()
return True
else:
md.Destroy()
return False
def run(self, para=None):
imp = IPy.get_ips()
if imp is None:
IPy.alert("Please open the image you want to classify", 'Error')
return
name = imp.title
recent = fileio.recent
for i in recent:
pos1 = i.rfind(os.sep)
pos2 = i.rfind('.')
if name == i[pos1 + 1:pos2]:
image = i
subprocess.check_output(
[self.python, self.pathAPI + 'predict.py', '-i', image, '-f', self.framework, '-m', self.model])
data = json.load(open('data.json'))
className = data['class']
IPy.alert("The class which the image belongs is " + className, 'Prediction')
class MyDialog(ParaDialog):
pathAPI = ''
python = ''
def __init__(self, parent, title, pathApi, python, view, para):
ParaDialog.__init__(self, parent, title)
self.para = para
self.view = view
self.pathAPI = pathApi
self.python = python
def para_changed(self, key):
ParaDialog.para_changed(self, key)
if key == 'f':
subprocess.check_output([self.python, self.pathAPI + 'listFrameworks.py'])
data = json.load(open('data.json'))
frameworks = data['frameworks']
framework = self.para[key]
subprocess.check_output([self.python, self.pathAPI + 'listModels.py', '-f', framework])
data = json.load(open('data.json'))
models = data['models']
self.para = {'f': framework, 'm': models[0]}
self.view = [('lab', 'Select the framework and the model'),
(list, frameworks, str, 'Framework', 'f', ''),
(list, models, str, 'Model', 'm', '')
]
for child in self.GetChildren():
child.Destroy()
self.tus = []
self.initView()
self.Layout()
def initView(self):
ParaDialog.init_view(self, self.view, self.para)
|
from typing import Any, Dict, Iterable, Optional
import numpy as np
import torch
from torch import nn
from autoPyTorch.constants import CLASSIFICATION_TASKS, STRING_TO_TASK_TYPES
from autoPyTorch.pipeline.components.setup.forecasting_target_scaling.base_target_scaler import BaseTargetScaler
from autoPyTorch.pipeline.components.setup.network.base_network import NetworkComponent
from autoPyTorch.pipeline.components.setup.network.forecasting_architecture import (
ForecastingDeepARNet,
ForecastingNet,
ForecastingSeq2SeqNet,
NBEATSNet
)
from autoPyTorch.pipeline.components.setup.network_head.forecasting_network_head.distribution import \
DisForecastingStrategy
from autoPyTorch.pipeline.components.training.base_training import autoPyTorchTrainingComponent
from autoPyTorch.utils.common import (
FitRequirement,
get_device_from_fit_dictionary
)
class ForecastingNetworkComponent(NetworkComponent):
def __init__(
self,
network: Optional[torch.nn.Module] = None,
random_state: Optional[np.random.RandomState] = None,
) -> None:
super(ForecastingNetworkComponent, self).__init__(network=network, random_state=random_state)
self._fit_requirements.clear()
self.add_fit_requirements([
FitRequirement('dataset_properties', (Dict,), user_defined=False, dataset_property=True),
FitRequirement('window_size', (int,), user_defined=False, dataset_property=False),
FitRequirement('network_structure', (Dict,), user_defined=False, dataset_property=False),
FitRequirement("network_embedding", (torch.nn.Module,), user_defined=False, dataset_property=False),
FitRequirement("network_encoder", (Dict,), user_defined=False,
dataset_property=False),
FitRequirement("network_decoder", (Dict,), user_defined=False,
dataset_property=False),
FitRequirement("network_head", (Optional[torch.nn.Module],), user_defined=False, dataset_property=False),
FitRequirement("auto_regressive", (bool,), user_defined=False, dataset_property=False),
FitRequirement("target_scaler", (BaseTargetScaler,), user_defined=False, dataset_property=False),
FitRequirement("net_output_type", (str,), user_defined=False, dataset_property=False),
FitRequirement("feature_names", (Iterable,), user_defined=False, dataset_property=True),
FitRequirement("feature_shapes", (Iterable,), user_defined=False, dataset_property=True),
FitRequirement('transform_time_features', (bool,), user_defined=False, dataset_property=False),
FitRequirement('static_features', (tuple,), user_defined=True, dataset_property=True),
FitRequirement('time_feature_names', (Iterable,), user_defined=True, dataset_property=True),
])
def fit(self, X: Dict[str, Any], y: Any = None) -> autoPyTorchTrainingComponent:
# Make sure that input dictionary X has the required
# information to fit this stage
self.check_requirements(X, y)
network_structure = X['network_structure']
network_encoder = X['network_encoder']
network_decoder = X['network_decoder']
net_output_type = X['net_output_type']
feature_names = X['dataset_properties']['feature_names']
feature_shapes = X['dataset_properties']['feature_shapes']
transform_time_features = X['transform_time_features']
known_future_features = X['dataset_properties']['known_future_features']
if transform_time_features:
time_feature_names = X['dataset_properties']['time_feature_names']
else:
time_feature_names = ()
network_init_kwargs = dict(network_structure=network_structure,
network_embedding=X['network_embedding'],
network_encoder=network_encoder,
network_decoder=network_decoder,
temporal_fusion=X.get("temporal_fusion", None),
network_head=X['network_head'],
auto_regressive=X['auto_regressive'],
window_size=X['window_size'],
dataset_properties=X['dataset_properties'],
target_scaler=X['target_scaler'],
output_type=net_output_type,
feature_names=feature_names,
feature_shapes=feature_shapes,
known_future_features=known_future_features,
time_feature_names=time_feature_names,
static_features=X['dataset_properties']['static_features']
)
if net_output_type == 'distribution':
dist_forecasting_strategy = X['dist_forecasting_strategy'] # type: DisForecastingStrategy
network_init_kwargs.update(dict(forecast_strategy=dist_forecasting_strategy.forecast_strategy,
num_samples=dist_forecasting_strategy.num_samples,
aggregation=dist_forecasting_strategy.aggregation, ))
if X['auto_regressive']:
first_decoder = next(iter(network_decoder.items()))[1]
if first_decoder.decoder_properties.recurrent:
self.network = ForecastingSeq2SeqNet(**network_init_kwargs)
else:
self.network = ForecastingDeepARNet(**network_init_kwargs)
else:
first_decoder = next(iter(network_decoder.items()))[1]
if first_decoder.decoder_properties.multi_blocks:
self.network = NBEATSNet(**network_init_kwargs)
else:
self.network = ForecastingNet(**network_init_kwargs)
# Properly set the network training device
if self.device is None:
self.device = get_device_from_fit_dictionary(X)
self.to(self.device)
if STRING_TO_TASK_TYPES[X['dataset_properties']['task_type']] in CLASSIFICATION_TASKS:
self.final_activation = nn.Softmax(dim=1)
self.is_fitted_ = True
return self
def predict(self, loader: torch.utils.data.DataLoader) -> torch.Tensor:
"""
Performs batched prediction given a loader object
"""
assert self.network is not None
self.network.eval()
# Batch prediction
Y_batch_preds = list()
for i, (X_batch, Y_batch) in enumerate(loader):
# Predict on batch
past_targets = X_batch['past_targets']
past_features = X_batch['past_features']
future_features = X_batch["future_features"]
past_observed_targets = X_batch['past_observed_targets']
if past_targets.ndim == 2:
past_targets = past_targets.unsqueeze(-1)
pred_kwargs = {"past_targets": past_targets,
"past_features": past_features,
"future_features": future_features}
for key in pred_kwargs.keys():
if pred_kwargs[key] is not None:
pred_kwargs[key] = pred_kwargs[key].float()
pred_kwargs.update({'past_observed_targets': past_observed_targets})
with torch.no_grad():
Y_batch_pred = self.network.predict(**pred_kwargs)
Y_batch_preds.append(Y_batch_pred.cpu())
return torch.cat(Y_batch_preds, 0).cpu().numpy()
|
# The MIT License
#
# Copyright (c) 2009 John Schember <john@nachtimwald.com>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE
from python_qt_binding.QtCore import Qt
from python_qt_binding.QtGui import QPainter
try:
from python_qt_binding.QtGui import QWidget, QFrame, QHBoxLayout
except Exception:
from python_qt_binding.QtWidgets import QWidget, QFrame, QHBoxLayout
class LineNumberWidget(QFrame):
class NumberBar(QWidget):
def __init__(self, *args):
QWidget.__init__(self, *args)
self.edit = None
# it is the highest line that is currently visible.
self.highest_line = 0
def set_text_edit(self, edit):
self.edit = edit
def update(self, *args):
# the +4 is used to compensate for the current line being bold.
width = self.fontMetrics().width(str(self.highest_line)) + 4
if self.width() != width:
self.setFixedWidth(width)
QWidget.update(self, *args)
def paintEvent(self, event):
contents_y = self.edit.verticalScrollBar().value()
page_bottom = contents_y + self.edit.viewport().height()
font_metrics = self.fontMetrics()
current_block = self.edit.document().findBlock(self.edit.textCursor().position())
painter = QPainter(self)
painter.setPen(Qt.darkGray)
line_count = 0
# Iterate over all text blocks in the document.
block = self.edit.document().begin()
while block.isValid():
line_count += 1
# the top left position of the block in the document
position = self.edit.document().documentLayout().blockBoundingRect(block).topLeft()
# check if the position of the block is out side of visible area
if position.y() > page_bottom:
break
# we want the line number for the selected line to be bold.
bold = False
if block == current_block:
bold = True
font = painter.font()
font.setBold(True)
painter.setFont(font)
painter.setPen(Qt.black)
# Draw the line number right justified at the y position of the
# line. 3 is the magic padding number. drawText(x, y, text)
painter.drawText(self.width() - font_metrics.width(str(line_count)) - 3, round(position.y()) - contents_y + font_metrics.ascent(), str(line_count))
if bold:
font = painter.font()
font.setBold(False)
painter.setFont(font)
painter.setPen(Qt.darkGray)
block = block.next()
self.highest_line = line_count
painter.end()
QWidget.paintEvent(self, event)
def __init__(self, editor, *args):
QFrame.__init__(self, *args)
self.setFrameStyle(QFrame.StyledPanel | QFrame.Sunken)
self.edit = editor
self.number_bar = self.NumberBar()
self.number_bar.set_text_edit(self.edit)
hbox = QHBoxLayout(self)
hbox.setSpacing(0)
hbox.setContentsMargins(0, 0, 0, 0)
# hbox.setMargin(0) # removed: it is not supported by Qt5
hbox.addWidget(self.number_bar)
hbox.addWidget(self.edit)
self.edit.installEventFilter(self)
self.edit.viewport().installEventFilter(self)
def eventFilter(self, obj, event):
# Update the line numbers for all events on the text edit and the viewport.
# This is easier than connecting all necessary signals.
try:
if obj in (self.edit, self.edit.viewport()):
self.number_bar.update()
return False
return QFrame.eventFilter(obj, event)
except Exception:
pass
def get_text_edit(self):
return self.edit
|
#!/usr/bin/env python2
"""
Thin layer chromatography spot segmentation & quantification.
"""
# Need matplotlib for saving image
import matplotlib
import matplotlib.pyplot as plt
# Import other Python libraries we use
import argparse
from collections import defaultdict
from sys import stdout
from glob import glob
from datetime import datetime
import time
import csv
import multiprocessing
#from imageio import imread # This causes come problems; using PIL instead
import PIL
import numpy as np
from scipy.spatial.distance import euclidean
from skimage import dtype_limits
from skimage.feature import (peak_local_max,
blob_log,
)
from skimage.color import (rgb2gray,
label2rgb,
)
from skimage.measure import label
from skimage.morphology import watershed
from skimage.io import imsave
from skimage.segmentation import find_boundaries
from skimage.util import invert
#We use Tkinter for GUI
import Tkinter as tk
from PIL import Image, ImageTk
# Import image analysis library
import appaloosa
# Define and parse arguments; use custom MyFormatter to do both ArgumentDefault
# and RawDescription Formatters via multiple inheritence, this is a trick to
# preserve docstring formatting in --help output
class MyFormatter(argparse.ArgumentDefaultsHelpFormatter,
argparse.RawDescriptionHelpFormatter,
):
pass
parser = argparse.ArgumentParser(description=__doc__,
formatter_class=MyFormatter,
)
image_filename_helpstring = "Image of TLC plate"
parser.add_argument('image_filename',
help=image_filename_helpstring,
)
intermediate_images_helpstring = ("Output intermediate image steps to PNGs. "
"Useful for understanding what's happening."
)
intermediate_images_figsize = 10
parser.add_argument('--intermediate_images',
action='store_true',
default=False,
help=intermediate_images_helpstring,
)
zoom_helpstring = ("Image display zoom. "
"This determines how large the image and window are."
)
parser.add_argument('--zoom',
type=float,
default=3,
help=zoom_helpstring,
)
args = parser.parse_args()
# Load plate image
image = np.array(PIL.Image.open(args.image_filename))
plate = appaloosa.Plate(image=image,
#image=imread(args.image_filename),
tag_in='original_image',
source_filename=args.image_filename,
)
if args.intermediate_images:
plate.display(tag_in='original_image',
figsize=intermediate_images_figsize,
output_filename="original_image.png",
)
# Segment the plates from the background
plate.crop_to_plate(tag_in='original_image',
tag_out='cropped_image',
feature_out='crop_rotation',
second_pass=False,
)
if args.intermediate_images:
plate.display(tag_in='cropped_image',
figsize=intermediate_images_figsize,
output_filename="cropped_image.png",
)
# Trim the outermost pixels a bit to make sure no background remains around the
# edges
cropped_image = plate.image_stash['cropped_image']
cropped_image_height, cropped_image_width = cropped_image.shape[:2]
cropped_image_min_dimension = min(cropped_image_height, cropped_image_width)
percent_crop = 0.03
border = int(round(cropped_image_min_dimension * percent_crop))
plate.crop_border(tag_in='cropped_image',
tag_out='border_cropped_image',
border=border,
)
if args.intermediate_images:
plate.display(tag_in='border_cropped_image',
figsize=intermediate_images_figsize,
output_filename="border_cropped_image.png",
)
# Rescale image to standard size
# This is very important because the image morphology parameters we use for analysis are defined
# in terms of pixels and therefore are specific to a (ballpark) resolution.
cropped_image = plate.image_stash['border_cropped_image']
cropped_height, cropped_width = cropped_image.shape[:2]
largest_dimension = max(cropped_height, cropped_width)
target_scale = 500
scaling_factor = float(target_scale) / largest_dimension
plate.rescale_image(tag_in='border_cropped_image',
tag_out='rescaled_image',
scaling_factor=scaling_factor,
)
if args.intermediate_images:
plate.display(tag_in='rescaled_image',
figsize=intermediate_images_figsize,
output_filename="rescaled_image.png",
)
# Median correct the image to correct uneven intensity over the plate
uncorrected_image = plate.image_stash['rescaled_image']
corrected_image = appaloosa.Plate.median_correct_image(image=uncorrected_image,
median_disk_radius=31,
)
plate.image_stash['corrected_rescaled_image'] = corrected_image
if args.intermediate_images:
plate.display(tag_in='corrected_rescaled_image',
figsize=intermediate_images_figsize,
output_filename="corrected_rescaled_image.png",
)
# Let's try segmenting the spots using the waterfall algorithm
plate.waterfall_segmentation(tag_in='corrected_rescaled_image',
feature_out='waterfall_basins',
R_out='R_img',
mg_out='mg_img',
median_disk_radius=31,
smoothing_sigma=2,
threshold_opening_size=2,
basin_open_close_size=5,
skeleton_label=0,
debug_output=False,
)
if args.intermediate_images:
plate.display(tag_in='corrected_rescaled_image',
basins_feature='waterfall_basins',
figsize=intermediate_images_figsize,
output_filename="waterfall_basins.png",
)
# The largest item found is the background; we need to get rid of it
plate.remove_most_frequent_label(basins_feature='waterfall_basins',
feature_out='filtered_waterfall_basins',
debug_output=False,
)
# Overlay finegrained watershed over waterfall segmentation
plate.overlay_watershed(tag_in='corrected_rescaled_image',
intensity_image_tag='corrected_rescaled_image',
median_radius=None,
filter_basins=True,
waterfall_basins_feature='filtered_waterfall_basins',
feature_out='overlaid_watershed_basins',
min_localmax_dist=5,
smoothing_sigma=1,
min_area=10,
min_intensity=0.1,
rp_radius_factor=None,
debug_output=False,
basin_open_close_size=None,
)
if args.intermediate_images:
plate.display(tag_in='corrected_rescaled_image',
basins_feature='overlaid_watershed_basins',
figsize=intermediate_images_figsize,
output_filename='overlaid_watershed_basins.png',
)
# Measure basins
plate.measure_basin_intensities(tag_in='corrected_rescaled_image',
median_radius=None,
filter_basins=True,
radius_factor=None,
basins_feature='overlaid_watershed_basins',
feature_out='basin_intensities',
multiplier=10.0,
)
plate.find_basin_centroids(tag_in='corrected_rescaled_image',
basins_feature='overlaid_watershed_basins',
feature_out='basin_centroids',
)
# Each spot is given a unique integer identifier
#Its intensity is shown as I= <- this is currently omitted
if args.intermediate_images:
plate.display(tag_in='rescaled_image',
figsize=70,
basins_feature='overlaid_watershed_basins',
basin_alpha=0.1,
baseline_feature=None,
solvent_front_feature=None,
lanes_feature=None,
basin_centroids_feature='basin_centroids',
basin_lane_assignments_feature=None,
#basin_intensities_feature='basin_intensities',
basin_rfs_feature=None,
lines_feature=None,
draw_boundaries=True,
side_by_side=False,
display_labels=True,
output_filename="initial_output.png",
)
# Display basins in GUI and begin interactive segmentation
plate.feature_stash['iterated_basins'] = \
plate.feature_stash['overlaid_watershed_basins'].copy()
resize_ratio = args.zoom
def make_pil_image(color_image,
basins,
resize_ratio=3,
background_grid=5,
assignment_feature='base_assignments',
):
if background_grid is not None:
gridded_image = color_image.copy()
for (h, w), basin in np.ndenumerate(basins):
if basin != 0:
continue
if h % background_grid != 0:
continue
if w % background_grid != 0:
continue
gridded_image[h, w] = 100
color_image = gridded_image
basin_boundaries = find_boundaries(basins,
mode='inner',
)
h, w, num_channels = color_image.shape
if num_channels == 4:
# If alpha channel is not treated separately, then alpha for boundaries
# becomes 1, so the boundaries appear white
boundaries_list = [basin_boundaries] * (num_channels - 1)
boundaries_list.append(np.zeros_like(basin_boundaries).astype(np.bool))
else:
boundaries_list = [basin_boundaries] * num_channels
stacked_boundaries = np.stack(boundaries_list,
axis=-1,
)
segmented_image = color_image * ~stacked_boundaries
global plate, baseline_colors, canvas
if (assignment_feature is not None
and assignment_feature in plate.feature_stash
):
assignments = plate.feature_stash[assignment_feature]
for basin, base_assign_state in assignments.iteritems():
isolated_basin = np.where(basins == basin,
1,
0,
)
isolated_boundary = find_boundaries(isolated_basin,
mode='inner',
)
if num_channels == 4:
boundaries_list = [isolated_boundary] * (num_channels - 1)
boundaries_list.append(
np.zeros_like(isolated_boundary).astype(np.bool)
)
else:
boundaries_list = [isolated_boundary] * num_channels
stacked_boundaries = np.stack(boundaries_list,
axis=-1,
)
baseline_color = baseline_colors[base_assign_state - 1]
baseline_rgb = canvas.winfo_rgb(baseline_color) # 16-bit
imin, imax = dtype_limits(color_image, clip_negative=False)
baseline_r = float(baseline_rgb[0]) * imax / 65535
baseline_g = float(baseline_rgb[1]) * imax / 65535
baseline_b = float(baseline_rgb[2]) * imax / 65535
for (h, w), boundary in np.ndenumerate(isolated_boundary):
if boundary:
segmented_image[h, w, 0] = baseline_r
segmented_image[h, w, 1] = baseline_g
segmented_image[h, w, 2] = baseline_b
uint8_image = np.rint(segmented_image * 255).astype(np.uint8)
pil_image = Image.fromarray(uint8_image)
#display_size = 1000
image_width, image_height = pil_image.size
#resize_ratio = min(float(display_size) / image_width,
# float(display_size) / image_height,
# )
resized_width = int(round(image_width * resize_ratio))
resized_height = int(round(image_height * resize_ratio))
pil_image = pil_image.resize((resized_width, resized_height))
return pil_image
#meshgrid would be faster, but I this is easier to understand
background_grid = np.zeros_like(plate.feature_stash['iterated_basins'])
grid_spacing = 3
for h, w in np.ndindex(background_grid.shape):
if h % grid_spacing == 0 and w % grid_spacing == 0:
background_grid[h, w] = 1
background_ovals = []
def grid_background(canvas,
basins,
):
global background_grid
this_grid = np.where(basins == 0,
background_grid,
0,
)
global background_ovals
background_ovals = []
oval_radius = 3
global resize_ratio
for (h, w), g in np.ndenumerate(this_grid):
if g == 1:
oval = canvas.create_oval(w * resize_ratio - oval_radius,
h * resize_ratio - oval_radius,
w * resize_ratio + oval_radius,
h * resize_ratio + oval_radius,
width=0,
fill='gray',
)
background_ovals.append(oval)
root = tk.Tk()
color_image = plate.image_stash['rescaled_image']
pil_image = make_pil_image(color_image=color_image,
basins=plate.feature_stash['iterated_basins'],
resize_ratio=resize_ratio,
)
tk_image = ImageTk.PhotoImage(master=root,
image=pil_image,
)
image_width, image_height = pil_image.size
canvas = tk.Canvas(root,
width=image_width,
height=image_height,
)
canvas.pack()
canvas_image = canvas.create_image(0, 0,
anchor='nw',
image=tk_image,
)
bottom_frame = tk.Frame(root)
bottom_frame.pack(side=tk.BOTTOM)
quit_button = tk.Button(bottom_frame,
text="Quit",
command=quit,
)
quit_button.grid(column=0, row=1)
def order_centroids(basin_centroids,
line,
):
(h1, w1), (h2, w2) = line
projected_centroids = {basin: appaloosa.Plate.project_point_on_segment(
point=(h, w),
segment=((h1, w1), (h2, w2)),
)
for basin, (h, w) in basin_centroids.iteritems()
}
basin_ordering = list(enumerate(sorted(projected_centroids.items(),
key=lambda x:x[1],
),
start=1,
)
)
basin_map = {basin: index
for index, (basin, position) in basin_ordering
}
return basin_map
def on_save():
global plate
basin_centroids = plate.feature_stash['basin_centroids']
solvent_front = plate.feature_stash.get('solvent_front', None)
if solvent_front is not None:
basin_map = order_centroids(basin_centroids=basin_centroids,
line=solvent_front,
)
else:
basin_map = {basin: basin for basin in basin_centroids.iterkeys()}
basins = plate.feature_stash['iterated_basins']
sorted_basins = np.zeros_like(basins)
for basin, sorted_basin in basin_map.iteritems():
sorted_basins = np.where(basins == basin,
sorted_basin,
sorted_basins,
)
plate.feature_stash['sorted_basins'] = sorted_basins
sorted_centroids = {basin_map[basin]: centroid
for basin, centroid in basin_centroids.iteritems()
}
plate.feature_stash['sorted_centroids'] = sorted_centroids
epoch_hash = appaloosa.epoch_to_hash(time.time())
output_basename = epoch_hash
print("Saving using basename " + str(output_basename))
image_filename = output_basename + "_segmented.png"
plate.display(tag_in='rescaled_image',
figsize=70,
#basins_feature='iterated_basins',
basins_feature='sorted_basins',
basin_alpha=0.1,
baseline_feature=None,
solvent_front_feature=None,
lanes_feature=None,
#basin_centroids_feature='basin_centroids',
basin_centroids_feature='sorted_centroids',
basin_lane_assignments_feature=None,
#basin_intensities_feature='basin_intensities',
basin_rfs_feature=None,
lines_feature=None,
draw_boundaries=True,
side_by_side=False,
display_labels=True,
output_filename=image_filename,
)
csv_filename = output_basename + "_intensities.csv"
basin_intensities = plate.feature_stash['basin_intensities']
indexed_basin_rfs = plate.feature_stash.get('indexed_basin_rfs', {})
collated_basin_rfs = {}
for base_assign_state, rf_dict in indexed_basin_rfs.iteritems():
for basin, rf in rf_dict.iteritems():
assert basin not in collated_basin_rfs
collated_basin_rfs[basin] = (base_assign_state, rf)
with open(csv_filename, 'w') as csv_file:
csv_writer = csv.writer(csv_file)
csv_header = ["Spot #", "Intensity", "Baseline #", "Rf"]
csv_writer.writerow(csv_header)
for basin, sorted_basin in sorted(basin_map.iteritems(),
key=lambda x:x[1],
):
#for basin, intensity in sorted(basin_intensities.iteritems(),
# key=lambda x:x[0],
# ):
intensity = basin_intensities[basin]
base_assign_state, rf = collated_basin_rfs.get(basin, (None, None))
basin_row = [sorted_basin, intensity, base_assign_state, rf]
csv_writer.writerow(basin_row)
print("Finished writing CSV.")
save_button = tk.Button(bottom_frame,
text="Save & Quit",
command=on_save,
)
save_button.grid(column=1, row=1)
def alive():
print("Alive!")
alive_button = tk.Button(bottom_frame,
text="Alive?",
command=alive,
)
alive_button.grid(column=8, row=1)
maxima_distance_label = tk.Label(bottom_frame,
text="Subdivision resolution",
)
maxima_distance_label.grid(column=0, row=2)
maxima_distance_entry = tk.Entry(bottom_frame)
maxima_distance_entry.insert(0, 2)
maxima_distance_entry.grid(column=1, row=2)
def remeasure_basins(plate):
plate.measure_basin_intensities(tag_in='corrected_rescaled_image',
median_radius=None,
filter_basins=True,
radius_factor=None,
basins_feature='iterated_basins',
feature_out='basin_intensities',
multiplier=10.0,
)
plate.find_basin_centroids(tag_in='corrected_rescaled_image',
basins_feature='iterated_basins',
feature_out='basin_centroids',
)
left_click_buffer = []
left_click_buffer_size = 2
left_click_shapes = []
def right_click(event):
stdout.write("Deleting spot...")
stdout.flush()
w, h = event.x, event.y
global plate, canvas, canvas_image, tk_image, pil_image, resize_ratio
basins = plate.feature_stash['iterated_basins']
mapped_w = int(round(float(w) / resize_ratio))
mapped_h = int(round(float(h) / resize_ratio))
basin = basins[mapped_h, mapped_w]
if basin == 0:
print("This is background; not deleting.")
return
basins = np.where(basins == basin,
0,
basins,
)
plate.feature_stash['iterated_basins'] = basins
remeasure_basins(plate)
color_image = plate.image_stash['rescaled_image']
pil_image = make_pil_image(color_image=color_image,
basins=plate.feature_stash['iterated_basins'],
resize_ratio=resize_ratio,
)
tk_image = ImageTk.PhotoImage(image=pil_image)
canvas.itemconfig(canvas_image, image=tk_image)
#grid_background(canvas=canvas,
# basins=basins,
# )
stdout.write("complete\n")
stdout.flush()
canvas.bind('<Button 3>', right_click)
def subdivide_spot():
stdout.write("Subdivision...")
stdout.flush()
global plate, resize_ratio, canvas, canvas_image, pil_image, tk_image
global maxima_distance_entry
maxima_distance = int(maxima_distance_entry.get())
if len(left_click_buffer) < 1:
print("Insufficient points defined")
return
w, h = left_click_buffer[-1]
mapped_w = int(round(float(w) / resize_ratio))
mapped_h = int(round(float(h) / resize_ratio))
basins = plate.feature_stash['iterated_basins']
basin = basins[mapped_h, mapped_w] # Note h, w reversed vs event
if basin == 0:
print("This is background; not splitting.")
return
plate.subdivide_basin(tag_in='corrected_rescaled_image',
feature_out='iterated_basins',
basins_feature='iterated_basins',
target_basin=basin,
smoothing_sigma=None,
maxima_distance=maxima_distance,
)
remeasure_basins(plate)
color_image = plate.image_stash['rescaled_image']
pil_image = make_pil_image(color_image=color_image,
basins=plate.feature_stash['iterated_basins'],
resize_ratio=resize_ratio,
)
tk_image = ImageTk.PhotoImage(image=pil_image)
canvas.itemconfig(canvas_image, image=tk_image)
stdout.write("complete\n")
stdout.flush()
linear_split_button = tk.Button(bottom_frame,
text="Watershed subdivide spot",
command=subdivide_spot,
)
linear_split_button.grid(column=3, row=2)
def left_click(event):
w, h = event.x, event.y
global left_click_buffer, left_click_buffer_size, left_click_shapes
left_click_buffer.append((w, h))
left_click_buffer = left_click_buffer[-left_click_buffer_size:]
oval_radius = 3
for w, h in left_click_buffer:
oval = canvas.create_oval(w - oval_radius,
h - oval_radius,
w + oval_radius,
h + oval_radius,
width=0,
fill='orange',
)
left_click_shapes.append(oval)
for oval in left_click_shapes[:-left_click_buffer_size]:
canvas.delete(oval)
canvas.bind('<Button 1>', left_click)
def linear_split():
stdout.write("Subdivision...")
stdout.flush()
global left_click_buffer
global plate, resize_ratio, canvas, canvas_image, pil_image, tk_image
if len(left_click_buffer) < 2:
print("Insufficient points defined")
return
(w1, h1), (w2, h2) = left_click_buffer[-2:]
mapped_w1 = int(round(float(w1) / resize_ratio))
mapped_h1 = int(round(float(h1) / resize_ratio))
mapped_w2 = int(round(float(w2) / resize_ratio))
mapped_h2 = int(round(float(h2) / resize_ratio))
line = (mapped_h1, mapped_w1), (mapped_h2, mapped_w2)
basins = plate.feature_stash['iterated_basins']
basin = basins[mapped_h1, mapped_w1] # Note h, w reversed vs event
if basin == 0:
print("This is background; not splitting.")
return
plate.linear_split_basin(feature_out='iterated_basins',
basins_feature='iterated_basins',
line=line,
target_basin=basin,
)
remeasure_basins(plate)
color_image = plate.image_stash['rescaled_image']
pil_image = make_pil_image(color_image=color_image,
basins=plate.feature_stash['iterated_basins'],
resize_ratio=resize_ratio,
)
tk_image = ImageTk.PhotoImage(image=pil_image)
canvas.itemconfig(canvas_image, image=tk_image)
stdout.write("complete\n")
stdout.flush()
linear_split_button = tk.Button(bottom_frame,
text="Linear split",
command=linear_split,
)
linear_split_button.grid(column=4, row=1)
solvent_front = None
solvent_front_line = None
def solvent():
global left_click_buffer, left_click_buffer_size, solvent_front, plate
global canvas, pil_image, solvent_front_line
if len(left_click_buffer) < 2:
print("Insufficient points defined")
return
line = (w1, h1), (w2, h2) = left_click_buffer[-2:]
mapped_w1 = int(round(float(w1) / resize_ratio))
mapped_h1 = int(round(float(h1) / resize_ratio))
mapped_w2 = int(round(float(w2) / resize_ratio))
mapped_h2 = int(round(float(h2) / resize_ratio))
mapped_line = (mapped_w1, mapped_h1), (mapped_w2, mapped_h2)
extended_line = appaloosa.Plate.extend_line(
line=mapped_line,
image=plate.feature_stash['iterated_basins'],
)
plate.feature_stash['solvent_front'] = extended_line
pil_w, pil_h = pil_image.size
(ew1, eh1), (ew2, eh2) = appaloosa.Plate.extend_line(line=line,
image=np.zeros((pil_h, pil_w)),
)
if solvent_front is not None:
canvas.delete(solvent_front)
solvent_front_line = ((mapped_w1, mapped_h1), (mapped_w2, mapped_h2))
solvent_front = canvas.create_line(ew1, eh1, ew2, eh2, fill='red')
solvent_front_button = tk.Button(bottom_frame,
text="Solvent front",
command=solvent,
)
solvent_front_button.grid(column=5, row=1)
baseline_colors = ['orange',
'orange red',
'deep pink',
'maroon',
]
baselines = []
def add_baseline():
global left_click_buffer, plate, canvas, pil_image, solvent_front
global baseline_colors, baselines, solvent_front_line, pil_image
global resize_ratio
if solvent_front is None:
print("Solvent front not defined")
return
if len(left_click_buffer) < 1:
print("Insufficient points defined")
return
if len(baselines) >= len(baseline_colors):
print("All baselines have been defined; ignoring.")
return
point = w, h = left_click_buffer[-1]
mapped_w = int(round(float(w) / resize_ratio))
mapped_h = int(round(float(h) / resize_ratio))
mapped_point = mapped_w, mapped_h
projected_w, projected_h = appaloosa.Plate.project_point_on_segment(
point=mapped_point,
segment=solvent_front_line,
)
delta_w, delta_h = mapped_w - projected_w, mapped_h - projected_h
basins = plate.feature_stash['iterated_basins']
baseline = appaloosa.Plate.translate_line(line=solvent_front_line,
h=delta_h, w=delta_w,
extend=True,
image=np.zeros_like(basins),
)
baseline_dict = plate.feature_stash.setdefault('baselines', {})
baseline_dict[len(baselines)] = baseline
baselines.append(baseline)
(sw1, sh1), (sw2, sh2) = baseline
ew1 = int(round(sw1 * resize_ratio))
eh1 = int(round(sh1 * resize_ratio))
ew2 = int(round(sw2 * resize_ratio))
eh2 = int(round(sh2 * resize_ratio))
baseline_color = baseline_colors[len(baselines) - 1]
baseline = canvas.create_line(ew1, eh1, ew2, eh2, fill=baseline_color)
baseline_button = tk.Button(bottom_frame,
text="Add baseline",
command=add_baseline,
)
baseline_button.grid(column=6, row=1)
base_assign_state = 0
def base1_assign():
global base1_assign_button
global base2_assign_button
global base3_assign_button
global base4_assign_button
global base_assign_state
if base_assign_state != 1:
base_assign_state = 1
base1_assign_button.config(relief=tk.SUNKEN)
base2_assign_button.config(relief=tk.RAISED)
base3_assign_button.config(relief=tk.RAISED)
base4_assign_button.config(relief=tk.RAISED)
elif base_assign_state == 1:
base_assign_state = 0
base1_assign_button.config(relief=tk.RAISED)
base2_assign_button.config(relief=tk.RAISED)
base3_assign_button.config(relief=tk.RAISED)
base4_assign_button.config(relief=tk.RAISED)
def base2_assign():
global base1_assign_button
global base2_assign_button
global base3_assign_button
global base4_assign_button
global base_assign_state
if base_assign_state != 2:
base_assign_state = 2
base1_assign_button.config(relief=tk.RAISED)
base2_assign_button.config(relief=tk.SUNKEN)
base3_assign_button.config(relief=tk.RAISED)
base4_assign_button.config(relief=tk.RAISED)
elif base_assign_state == 2:
base_assign_state = 0
base1_assign_button.config(relief=tk.RAISED)
base2_assign_button.config(relief=tk.RAISED)
base3_assign_button.config(relief=tk.RAISED)
base4_assign_button.config(relief=tk.RAISED)
def base3_assign():
global base1_assign_button
global base2_assign_button
global base3_assign_button
global base4_assign_button
global base_assign_state
if base_assign_state != 3:
base_assign_state = 3
base1_assign_button.config(relief=tk.RAISED)
base2_assign_button.config(relief=tk.RAISED)
base3_assign_button.config(relief=tk.SUNKEN)
base4_assign_button.config(relief=tk.RAISED)
elif base_assign_state == 3:
base_assign_state = 0
base1_assign_button.config(relief=tk.RAISED)
base2_assign_button.config(relief=tk.RAISED)
base3_assign_button.config(relief=tk.RAISED)
base4_assign_button.config(relief=tk.RAISED)
def base4_assign():
global base1_assign_button
global base2_assign_button
global base3_assign_button
global base4_assign_button
global base_assign_state
if base_assign_state != 4:
base_assign_state = 4
base1_assign_button.config(relief=tk.RAISED)
base2_assign_button.config(relief=tk.RAISED)
base3_assign_button.config(relief=tk.RAISED)
base4_assign_button.config(relief=tk.SUNKEN)
elif base_assign_state == 4:
base_assign_state = 0
base1_assign_button.config(relief=tk.RAISED)
base2_assign_button.config(relief=tk.RAISED)
base3_assign_button.config(relief=tk.RAISED)
base4_assign_button.config(relief=tk.RAISED)
base1_assign_button = tk.Button(bottom_frame,
text="1",
command=base1_assign,
)
base1_assign_button.grid(column=4, row=2)
base2_assign_button = tk.Button(bottom_frame,
text="2",
command=base2_assign,
)
base2_assign_button.grid(column=5, row=2)
base3_assign_button = tk.Button(bottom_frame,
text="3",
command=base3_assign,
)
base3_assign_button.grid(column=6, row=2)
base4_assign_button = tk.Button(bottom_frame,
text="4",
command=base4_assign,
)
base4_assign_button.grid(column=7, row=2)
def assign(event):
global base_assign_state, plate, canvas, canvas_image, pil_image, tk_image
global resize_ratio
if base_assign_state == 0:
print("No baseline chosen for assignment; ignoring.")
return
if len(baselines) < base_assign_state:
print("baseline #"
+ str(base_assign_state)
+ " not yet defined; ignoring."
)
return
w, h = event.x, event.y
mapped_w = int(round(float(w) / resize_ratio))
mapped_h = int(round(float(h) / resize_ratio))
basins = plate.feature_stash['iterated_basins']
basin = basins[mapped_h, mapped_w]
if basin == 0:
print("Clicked on background; not assigned.")
return
assignments = plate.feature_stash.setdefault('base_assignments', {})
assignments[basin] = base_assign_state
baseline = plate.feature_stash['baselines'][base_assign_state - 1]
plate.feature_stash['temp_baseline'] = baseline
basin_centroids = plate.feature_stash['basin_centroids']
filtered_basin_centroids = {Label: centroid
for Label, centroid
in basin_centroids.iteritems()
if (Label in assignments
and assignments[Label] == base_assign_state
)
}
plate.feature_stash['temp_basin_centroids'] = filtered_basin_centroids
plate.compute_basin_rfs(basin_centroids_feature='temp_basin_centroids',
baseline_feature='temp_baseline',
solvent_front_feature='solvent_front',
feature_out='temp_basin_rfs',
)
indexed_basin_rfs = plate.feature_stash.setdefault('indexed_basin_rfs', {})
indexed_basin_rfs[base_assign_state] = plate.feature_stash['temp_basin_rfs']
color_image = plate.image_stash['rescaled_image']
pil_image = make_pil_image(color_image=color_image,
basins=plate.feature_stash['iterated_basins'],
resize_ratio=resize_ratio,
assignment_feature='base_assignments',
)
tk_image = ImageTk.PhotoImage(image=pil_image)
canvas.itemconfig(canvas_image, image=tk_image)
canvas.bind('<Double-Button-1>', assign)
canvas.focus_set()
basin_texts = {}
def make_boolean_circle(image,
h, w,
radius,
):
h, w, radius = int(round(h)), int(round(w)), int(round(radius))
image_height, image_width = image.shape[:2]
hh, ww = np.mgrid[:image_height,:image_width]
radial_distance_sq = (hh - h)**2 + (ww - w)**2
boolean_circle_array = (radial_distance_sq <= radius**2)
return boolean_circle_array
def circle_filter(image,
basins,
basin,
radius,
):
(best_h,
best_w,
best_circle,
best_value,
) = appaloosa.Plate.best_circle(
image=image,
basins=basins,
basin=basin,
radius=radius,
)
logical_filter = np.where((basins == basin) & ~best_circle,
True,
False,
)
updated_basins = np.where(logical_filter,
np.zeros_like(basins),
basins,
)
return updated_basins
def circle_filter_all(image,
basins,
radius,
):
basin_set = set(basins.reshape(-1))
updated_basins = basins
for basin in iter(basin_set):
updated_basins = circle_filter(image=image,
basins=updated_basins,
basin=basin,
radius=radius,
)
return updated_basins
def circle_filter_all_button():
mode = 'isolated_LoG_MP'
if mode == 'LoG':
stdout.write("Applying circle filter to all basins...")
stdout.flush()
global plate, circle_filter_entry
radius = int(circle_filter_entry.get())
plate.image_stash['inverted_corrected_rescaled_image'] = \
invert(plate.image_stash['corrected_rescaled_image'])
plate.find_blobs(tag_in='inverted_corrected_rescaled_image',
feature_out='blobs_log',
min_sigma=5,
max_sigma=max_radius,
num_sigma=10,
threshold=0.01,
overlap=0.5,
)
#plate.display(tag_in='corrected_rescaled_image',
# figsize=20,
# blobs_feature='blobs_log',
# output_filename='BLOBS.png',
# )
basins = plate.feature_stash['iterated_basins']
per_basin_blobs = defaultdict(list)
for blob in plate.feature_stash['blobs_log']:
h, w, r = blob
h, w, r = int(h), int(w), int(r)
basin = basins[h, w]
if basin == 0:
continue
per_basin_blobs[basin].append((h, w, r))
largest_per_basin_blobs = {basin: max(blobs, key=lambda x:x[2])
for basin, blobs
in per_basin_blobs.iteritems()
}
circle_scaling = 1.5
for basin, (h, w, r) in largest_per_basin_blobs.iteritems():
boolean_circle_array = appaloosa.Plate.make_boolean_circle(
image=plate.image_stash['corrected_rescaled_image'],
h=h, w=w,
radius=r,
)
updated_basins = circle_filter(image=plate.image_stash['corrected_rescaled_image'],
basins=plate.feature_stash['iterated_basins'],
basin=basin,
radius=r*circle_scaling,
)
plate.feature_stash['iterated_basins'] = updated_basins
remeasure_basins(plate)
global tk_image, canvas, pil_image, canvas_image, resize_ratio
color_image = plate.image_stash['rescaled_image']
pil_image = make_pil_image(
color_image=color_image,
basins=plate.feature_stash['iterated_basins'],
resize_ratio=resize_ratio,
)
tk_image = ImageTk.PhotoImage(image=pil_image)
canvas.itemconfig(canvas_image, image=tk_image)
stdout.write("complete\n")
stdout.flush()
elif mode == 'isolated_LoG':
raise NotImplementedError("Superseded by isolated_LoG_MP")
stdout.write("Applying circle filter to all basins...")
stdout.flush()
global plate, circle_filter_entry
max_radius = int(circle_filter_entry.get())
plate.image_stash['inverted_corrected_rescaled_image'] = \
invert(plate.image_stash['corrected_rescaled_image'])
basins = plate.feature_stash['iterated_basins']
per_basin_blobs = defaultdict(list)
all_blobs = []
inverted_image_min = \
np.amin(plate.image_stash['inverted_corrected_rescaled_image'])
for basin in iter(set(basins.reshape(-1))):
if basin == 0:
continue
plate.image_stash['isolated_image'] = np.where(
basins == basin,
plate.image_stash['inverted_corrected_rescaled_image'],
inverted_image_min,
)
feature_name = 'isolated_' + str(basin) + '_blobs'
plate.find_blobs(tag_in='isolated_image',
feature_out=feature_name,
min_sigma=5,
max_sigma=max_radius,
num_sigma=10,
threshold=0.01,
overlap=0.5,
)
for blob in plate.feature_stash[feature_name]:
h, w, r = blob
h, w, r = int(h), int(w), int(r)
per_basin_blobs[basin].append((h, w, r))
all_blobs.append((h, w, r))
plate.feature_stash['all_blobs'] = all_blobs
#plate.display(tag_in='corrected_rescaled_image',
# figsize=20,
# blobs_feature='all_blobs',
# output_filename='ISOLATED_BLOBS.png',
# )
largest_per_basin_blobs = {basin: max(blobs, key=lambda x:x[2])
for basin, blobs
in per_basin_blobs.iteritems()
}
circle_scaling = 1.5
for basin, (h, w, r) in largest_per_basin_blobs.iteritems():
boolean_circle_array = appaloosa.Plate.make_boolean_circle(
image=plate.image_stash['corrected_rescaled_image'],
h=h, w=w,
radius=r,
)
updated_basins = circle_filter(image=plate.image_stash['corrected_rescaled_image'],
basins=plate.feature_stash['iterated_basins'],
basin=basin,
radius=r*circle_scaling,
)
plate.feature_stash['iterated_basins'] = updated_basins
remeasure_basins(plate)
global tk_image, canvas, pil_image, canvas_image, resize_ratio
color_image = plate.image_stash['rescaled_image']
pil_image = make_pil_image(
color_image=color_image,
basins=plate.feature_stash['iterated_basins'],
resize_ratio=resize_ratio,
)
tk_image = ImageTk.PhotoImage(image=pil_image)
canvas.itemconfig(canvas_image, image=tk_image)
stdout.write("complete\n")
stdout.flush()
elif mode == 'isolated_LoG_MP':
stdout.write("Applying circle filter to all basins...")
stdout.flush()
global plate, circle_filter_entry
max_radius = int(circle_filter_entry.get())
plate.image_stash['inverted_corrected_rescaled_image'] = \
invert(plate.image_stash['corrected_rescaled_image'])
basins = plate.feature_stash['iterated_basins']
per_basin_blobs = defaultdict(list)
all_blobs = []
inverted_image_min = \
np.amin(plate.image_stash['inverted_corrected_rescaled_image'])
pool = multiprocessing.Pool(processes=None,
maxtasksperchild=None,
)
processes = []
for basin in iter(set(basins.reshape(-1))):
if basin == 0:
continue
plate.image_stash['isolated_image'] = np.where(
basins == basin,
plate.image_stash['inverted_corrected_rescaled_image'],
inverted_image_min,
)
isolated_image = plate.image_stash['isolated_image']
min_sigma = 5
max_sigma = max_radius
num_sigma = 10
threshold = 0.01
overlap = 0.5
process = pool.apply_async(blob_log,
(isolated_image,
min_sigma,
max_sigma,
num_sigma,
threshold,
overlap,
)
)
processes.append((basin, process))
pool.close()
pool.join()
for basin, process in processes:
blobs = process.get()
for blob in blobs:
h, w, r = blob
h, w, r = int(h), int(w), int(r)
per_basin_blobs[basin].append((h, w, r))
all_blobs.append((h, w, r))
plate.feature_stash['all_blobs'] = all_blobs
#plate.display(tag_in='corrected_rescaled_image',
# figsize=20,
# blobs_feature='all_blobs',
# output_filename='ISOLATED_BLOBS.png',
# )
#best_per_basin_blobs = {basin: max(blobs, key=lambda x:x[2])
# for basin, blobs
# in per_basin_blobs.iteritems()
# }
best_per_basin_blobs = {}
for basin, blobs in per_basin_blobs.iteritems():
best_h, best_w, best_r, best_value = None, None, None, None
for blob in blobs:
h, w, r = blob
h, w, r = int(h), int(w), int(r)
image = plate.image_stash['corrected_rescaled_image']
boolean_circle = make_boolean_circle(image=image,
h=h, w=w,
radius=r,
)
circle_sum = np.sum(np.where(boolean_circle, image, 0))
circle_area = np.sum(np.where(boolean_circle, 1, 0))
circle_value = circle_sum / float(circle_area)
if best_value is None or circle_value < best_value:
best_h = h
best_w = w
best_r = r
best_value = circle_value
best_per_basin_blobs[basin] = (best_h, best_w, best_r)
circle_scaling = 1.5
pool = multiprocessing.Pool(processes=None,
maxtasksperchild=None,
)
processes = []
updated_basin_list = []
for basin, (h, w, r) in best_per_basin_blobs.iteritems():
image = plate.image_stash['corrected_rescaled_image']
radius = r
process = pool.apply_async(make_boolean_circle,
(image,
h, w,
radius*circle_scaling,
)
)
processes.append((basin, process))
pool.close()
pool.join()
updated_basins = basins
for basin, process in processes:
boolean_circle_array = process.get()
logical_filter = np.where(
(updated_basins == basin) & ~boolean_circle_array,
False,
True,
)
updated_basins = np.where(logical_filter,
updated_basins,
np.zeros_like(updated_basins),
)
plate.feature_stash['iterated_basins'] = updated_basins
remeasure_basins(plate)
global tk_image, canvas, pil_image, canvas_image, resize_ratio
color_image = plate.image_stash['rescaled_image']
pil_image = make_pil_image(
color_image=color_image,
basins=plate.feature_stash['iterated_basins'],
resize_ratio=resize_ratio,
)
tk_image = ImageTk.PhotoImage(image=pil_image)
canvas.itemconfig(canvas_image, image=tk_image)
stdout.write("complete\n")
stdout.flush()
elif mode == 'manual':
stdout.write("Applying circle filter to all basins...")
stdout.flush()
global plate, circle_filter_entry
basins = plate.feature_stash['iterated_basins']
image = plate.image_stash['corrected_rescaled_image']
radius = int(circle_filter_entry.get())
updated_basins = circle_filter_all(image=image,
basins=basins,
radius=radius,
)
plate.feature_stash['iterated_basins'] = updated_basins
remeasure_basins(plate)
global tk_image, canvas, pil_image, canvas_image, resize_ratio
color_image = plate.image_stash['rescaled_image']
pil_image = make_pil_image(
color_image=color_image,
basins=plate.feature_stash['iterated_basins'],
resize_ratio=resize_ratio,
)
tk_image = ImageTk.PhotoImage(image=pil_image)
canvas.itemconfig(canvas_image, image=tk_image)
stdout.write("complete\n")
stdout.flush()
else:
print("Invalid circle filter mode...skipping.")
def keyboard(event):
char = event.char
global resize_ratio, plate, canvas
w, h = event.x, event.y
mapped_w = int(round(float(w) / resize_ratio))
mapped_h = int(round(float(h) / resize_ratio))
basins = plate.feature_stash['iterated_basins']
basin = basins[mapped_h, mapped_w]
if basin == 0:
print("Mouse over background; ignoring.")
return
height, width = basins.shape
outside_bounds = (mapped_w < 0
or mapped_h < 0
or mapped_w >= width
or mapped_h >= height
)
if outside_bounds:
return
if char == 'd':
basin_intensity = plate.feature_stash['basin_intensities'][basin]
ch, cw = basin_centroid = plate.feature_stash['basin_centroids'][basin]
pcw = int(round(cw * resize_ratio))
pch = int(round(ch * resize_ratio))
basin_text = "I = " + str(basin_intensity)
indexed_basin_rfs = plate.feature_stash.get('indexed_basin_rfs', None)
rf_text = ""
if indexed_basin_rfs is not None:
for base_assign_state, basin_rfs in indexed_basin_rfs.iteritems():
for b, rf in basin_rfs.iteritems():
if b == basin:
rf_text = "\nRf = " + str(round(rf, 2))
basin_text += rf_text
global basin_texts
if basin in basin_texts:
canvas.delete(basin_texts[basin])
bt = canvas.create_text(pcw, pch,
text=basin_text,
)
basin_texts[basin] = bt
elif char == 'c':
stdout.write("Applying circle filter...")
stdout.flush()
mode = 'isolated_LoG'
global plate, circle_filter_entry
if mode == 'isolated_LoG':
max_radius = int(circle_filter_entry.get())
plate.image_stash['inverted_corrected_rescaled_image'] = \
invert(plate.image_stash['corrected_rescaled_image'])
inverted_image_min = \
np.amin(plate.image_stash['inverted_corrected_rescaled_image'])
plate.image_stash['isolated_image'] = np.where(
basins == basin,
plate.image_stash['inverted_corrected_rescaled_image'],
inverted_image_min,
)
plate.find_blobs(tag_in='isolated_image',
feature_out='isolated_blobs',
min_sigma=5,
max_sigma=max_radius,
num_sigma=10,
threshold=0.01,
overlap=0.5,
)
blobs = plate.feature_stash['isolated_blobs']
best_h, best_w, best_r, best_value = None, None, None, None
image = plate.image_stash['corrected_rescaled_image']
for blob in blobs:
h, w, r = blob
h, w, r = int(h), int(w), int(r)
boolean_circle = make_boolean_circle(image=image,
h=h, w=w,
radius=r,
)
circle_sum = np.sum(np.where(boolean_circle, image, 0))
circle_area = np.sum(np.where(boolean_circle, 1, 0))
circle_value = circle_sum / float(circle_area)
if best_value is None or circle_value < best_value:
best_h = h
best_w = w
best_r = r
best_value = circle_value
circle_scaling = 1.5
boolean_circle_array = make_boolean_circle(
image=image,
h=best_h, w=best_w,
radius=best_r*circle_scaling,
)
updated_basins = basins
logical_filter = np.where(
(updated_basins == basin) & ~boolean_circle_array,
False,
True,
)
updated_basins = np.where(logical_filter,
updated_basins,
np.zeros_like(updated_basins),
)
plate.feature_stash['iterated_basins'] = updated_basins
elif mode == 'manual':
radius = int(circle_filter_entry.get())
updated_basins = circle_filter(
image=plate.image_stash['corrected_rescaled_image'],
basins=basins,
basin=basin,
radius=radius,
)
plate.feature_stash['iterated_basins'] = updated_basins
else:
print("Unrecognized circle filter mode; ignoring.")
remeasure_basins(plate)
global tk_image, canvas, pil_image, canvas_image, resize_ratio
color_image = plate.image_stash['rescaled_image']
pil_image = make_pil_image(
color_image=color_image,
basins=plate.feature_stash['iterated_basins'],
resize_ratio=resize_ratio,
)
tk_image = ImageTk.PhotoImage(image=pil_image)
canvas.itemconfig(canvas_image, image=tk_image)
stdout.write("complete\n")
stdout.flush()
else:
pass
canvas.bind('<Key>', keyboard)
def add_basin():
global left_click_buffer, resize_ratio
if len(left_click_buffer) < 2:
print("Insufficient points defined")
return
stdout.write("Adding spot...")
stdout.flush()
(w1, h1), (w2, h2) = left_click_buffer[-2:]
mapped_w1 = int(round(float(w1) / resize_ratio))
mapped_h1 = int(round(float(h1) / resize_ratio))
mapped_w2 = int(round(float(w2) / resize_ratio))
mapped_h2 = int(round(float(h2) / resize_ratio))
center_h = float(mapped_h1 + mapped_h2) / 2
center_w = float(mapped_w1 + mapped_w2) / 2
radius = euclidean((mapped_w1, mapped_h1), (mapped_w2, mapped_h2)) / 2.0
global plate
basins = plate.feature_stash['iterated_basins']
largest_basins_tag = np.amax(basins)
print("largest_basins_tag = " + str(largest_basins_tag))
new_basin_tag = largest_basins_tag + 1
updated_basins = basins.copy()
#min_h, max_h = min(mapped_h1, mapped_h2), max(mapped_h1, mapped_h2)
#min_w, max_w = min(mapped_w1, mapped_w2), max(mapped_w1, mapped_w2)
min_h, max_h = center_h - radius, center_h + radius
min_w, max_w = center_w - radius, center_w + radius
for (h, w), basin in np.ndenumerate(basins):
if not (min_h <= h <= max_h and min_w <= w <= max_w):
continue
distance = euclidean((h, w), (center_h, center_w))
if distance > radius:
continue
updated_basins[h, w] = new_basin_tag
plate.feature_stash['iterated_basins'] = updated_basins
remeasure_basins(plate)
global tk_image, canvas, pil_image
color_image = plate.image_stash['rescaled_image']
pil_image = make_pil_image(color_image=color_image,
basins=plate.feature_stash['iterated_basins'],
resize_ratio=resize_ratio,
assignment_feature='base_assignments',
)
tk_image = ImageTk.PhotoImage(image=pil_image)
canvas.itemconfig(canvas_image, image=tk_image)
stdout.write("complete\n")
stdout.flush()
add_basin_button = tk.Button(bottom_frame,
text="Add spot",
command=add_basin,
)
add_basin_button.grid(column=7, row=1)
def post_front():
global plate
solvent_front_line = plate.feature_stash.get('solvent_front', None)
if solvent_front_line is None:
print("Solvent front not defined; ignoring.")
return
(mapped_w1, mapped_h1), (mapped_w2, mapped_h2) = solvent_front_line
basins = plate.feature_stash['iterated_basins']
split_plate = np.zeros_like(basins).astype(np.bool)
if mapped_h1 == mapped_h2:
for (h, w) in np.ndindex(*split_plate.shape):
if h > mapped_h1:
split_plate[h, w] = True
elif mapped_w1 == mapped_w2:
for (h, w) in np.ndindex(*split_plate.shape):
if w > mapped_w1:
split_plate[h, w] = True
else:
slope = float(mapped_w2 - mapped_w1) / (mapped_h2 - mapped_h1)
for (h, w) in np.ndindex(*split_plate.shape):
coord_value = slope * (h - mapped_h1) + mapped_w1
if coord_value > w:
split_plate[h, w] = True
global left_click_buffer
if len(left_click_buffer) < 1:
print("No point defined; ignoring.")
return
w, h = left_click_buffer[-1]
global resize_ratio
mapped_w = int(round(float(w) / resize_ratio))
mapped_h = int(round(float(h) / resize_ratio))
if appaloosa.Plate.point_line_distance(point=(mapped_h, mapped_w),
line=((mapped_h1, mapped_w1),
(mapped_h2, mapped_w2)),
) < 1:
print("Point too close to solvent front; ignoring.")
return
basin_centroids = plate.feature_stash['basin_centroids']
to_delete = []
for basin, (ch, cw) in basin_centroids.iteritems():
ich, icw = int(round(ch)), int(round(cw))
if split_plate[ich, icw] == split_plate[mapped_h, mapped_w]:
to_delete.append(basin)
for delete_basin in to_delete:
basins = np.where(basins == delete_basin,
0,
basins,
)
plate.feature_stash['iterated_basins'] = basins
remeasure_basins(plate)
global pil_image, tk_image, canvas
color_image = plate.image_stash['rescaled_image']
pil_image = make_pil_image(color_image=color_image,
basins=plate.feature_stash['iterated_basins'],
resize_ratio=resize_ratio,
)
tk_image = ImageTk.PhotoImage(image=pil_image)
canvas.itemconfig(canvas_image, image=tk_image)
post_front_button = tk.Button(bottom_frame,
text="Remove spots above front",
command=post_front,
)
post_front_button.grid(column=3, row=1)
pil_cache = None
def overlay_original(event):
global plate, pil_image, tk_image, canvas, pil_cache, resize_ratio
global canvas_image
pil_cache = pil_image
color_image = plate.image_stash['rescaled_image']
uint8_image = np.rint(color_image * 255).astype(np.uint8)
pil_image = Image.fromarray(uint8_image)
image_width, image_height = pil_image.size
resized_width = int(round(image_width * resize_ratio))
resized_height = int(round(image_height * resize_ratio))
pil_image = pil_image.resize((resized_width, resized_height))
tk_image = ImageTk.PhotoImage(image=pil_image)
canvas.itemconfig(canvas_image, image=tk_image)
def unoverlay_original(event):
global plate, pil_image, tk_image, canvas, pil_cache, resize_ratio
global canvas_image
pil_image = pil_cache
tk_image = ImageTk.PhotoImage(image=pil_image)
canvas.itemconfig(canvas_image, image=tk_image)
show_original_button = tk.Button(bottom_frame,
text="Show original",
)
show_original_button.grid(column=1, row=3)
show_original_button.bind('<ButtonPress-1>', overlay_original)
show_original_button.bind('<ButtonRelease-1>', unoverlay_original)
circle_filter_label = tk.Label(bottom_frame,
text="Circle filter sigma",
)
circle_filter_label.grid(column=3, row=3)
circle_filter_entry = tk.Entry(bottom_frame)
circle_filter_entry.insert(0, 10)
circle_filter_entry.grid(column=4, row=3)
circle_filter_all_button = tk.Button(bottom_frame,
text="Circle filter all",
command=circle_filter_all_button,
)
circle_filter_all_button.grid(column=5, row=3)
root.mainloop()
|
import math as m
D=100
LB=-1
UB=1
def FitnessFunction(x):
s=0
p=0
q=0
for i in range(1,D-1):
a=x[i-1]*m.sin(x[i])+m.sin(x[i+1])
b=((x[i-1]**2)-2*x[i]+3*x[i+1]-m.cos(x[i])+1)
p = p + (20 * i * (m.sin(a) ** 2))
q = q + (i * m.log10(1 + i * (b ** 2)))
for i in range(0,D):
s = s + (i * x[i] * x[i])
return round(s+p+q,2) |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('crimeapp', '0004_rides'),
]
operations = [
migrations.CreateModel(
name='Crimes',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('date', models.DateField()),
('time', models.TimeField(blank=True)),
('offense', models.CharField(max_length=180)),
('surname', models.CharField(max_length=50)),
('first_name', models.CharField(max_length=50)),
('location', models.CharField(max_length=50, blank=True)),
],
options={
},
bases=(models.Model,),
),
migrations.DeleteModel(
name='Rides',
),
migrations.RemoveField(
model_name='result',
name='created',
),
migrations.RemoveField(
model_name='result',
name='result',
),
migrations.RemoveField(
model_name='result',
name='result_name',
),
]
|
import smtplib, ssl
from email.mime.text import MIMEText
from email.mime.multipart import MIMEMultipart
import numpy as np
import pandas as pd
import config
def sendEmail(df, email, passwordy, errors_and_warnings, program_emails):
emails_not_found = []
# Create a list of all of the program names from df
programs = df.Program.unique() # As df only includes bad stuff, programs with zero issues will not get an email.
# Create a dict of program names: email recipients e.g. "Clearview (GH0079)":clearviewgh@scarc.org
programEmails = program_emails# {"TEST GROUP HOME":["mnoah@scarc.org", "mnoah@scarc.org"], "Test Day Program":["Mnoah@scarc.org"]}
# Create separate df from each program
context = ssl.create_default_context()
with smtplib.SMTP("smtp.office365.com", 587) as server:
sender_email = email
password= passwordy
server.ehlo() # Can be omitted
server.starttls(context=context)
server.ehlo() # Can be omitted
try:
server.login(sender_email, password)
except smtplib.SMTPAuthenticationError:
errors_and_warnings['errors'].append("Could not login. Check credentials and try again.")
return errors_and_warnings
except TypeError as terror:
errors_and_warnings['errors'].append(f'Please enter valid email credentials and try again.')
return errors_and_warnings
except smtplib.SMTPSenderRefused:
errors_and_warnings['errors'].append(f'Please enter valid email credentials and try again.')
return errors_and_warnings
for program in programs:
#df = df[["Program Name"] == program]
#df_html=df.to_html()
try:
receiver_email = programEmails[program.lower()]
except KeyError as ke:
errors_and_warnings['warnings'].append(str(ke))
continue
message = MIMEMultipart("alternative")
message["Subject"] = "MONTHLY QA AUDIT"
message["From"] = sender_email
message["To"] = ", ".join(receiver_email)
# Create the plain-text and HTML version of your message
text = """\
An HTML email could not be delivered. Please reply to this email at your earliest convenience."""
html = '''<html><head><style>#customers {
font-family: Arial, Helvetica, sans-serif;
font-size: 12px;
border-collapse: collapse;
width: 90%;
}
#customers td, #customers th {
border: 1px solid #ddd;
padding: 8px;
}
#customers tr:nth-child(even){background-color: #f2f2f2;}
#customers tr:hover {background-color: #ddd;}
#customers th {
padding-top: 12px;
padding-bottom: 12px;
text-align: left;
background-color: #003366;
color: white;
}"
</style>
</head>
</body>
<p>Please see below for the monthly data review of all of your program's documentation. Issues do not need to be fixed. Please review the issues/concerns moving forward. Please see the flag descriptions at the bottom of the email.</p>
'''
df_html = df[df["Program"]==program].to_html(index=False).replace("<table border", "<table id=customers border")
notes = '''
<ul>
<li>
DUPLICATED NOTES: there should only be one note per person per shift. A duplicated note indicates that there are two notes for the same date and start time.
</li>
<li>
DUPLICATED CONTENT: Vary your progress notes, make it more person-centered, etc. Please do not copy-paste.
</li>
<li>
FORM: the Progress Note - Residential form must be included with all progress notes except where otherwise indicated.
</li>
<li>
SHORT NOTE: Generally, this is due to an additional progress note that was started and not finished. Otherwise, check to see if notes are actually being completed.
</li>
<li>
SERVICE TYPE: In group homes, we generally only provide the following services: Individual Supports, Behavioral Supports, Physical Therapy. If the Service Type=Not Applicable, you will have to modify their plan in Plans and Reviews and choose the correct Service Type.
</li>
<li>
PN's < 3: This is not always bad. If there was no staff on a 7-3, for example, then this would be fine. But if you know that that does not happen much in your program, then this flag will help you identify staff that missed notes.
</li>
<li>
Note: an extra rating for a note that was started and never finished or deleted will result in many flags being triggered.
</li>
<li>
NOTE TYPE: all residential services (except for PT/OT/Speech/Transp/BehSupp) must be Service/Treatment Plan Linked notes.
</li>
<li>
NAME CHECKER: there may be false positives. Please don't use names of individuals in notes other than their own.
</li>
</ul>
'''
html+=df_html + notes+ "</body></html>"
#print(html)
# Turn these into plain/html MIMEText objects
part1 = MIMEText(text, "plain")
part2 = MIMEText(html, "html")
# Add HTML/plain-text parts to MIMEMultipart message
# The email client will try to render the last part first
message.attach(part1)
message.attach(part2)
try:
server.sendmail(sender_email, receiver_email, message.as_string())
except smtplib.SMTPSenderRefused as e:
errors_and_warnings['errors'].append(str(e))
except smtplib.SMTPServerDisconnected as e:
errors_and_warnings['errors'].append(str(e))
except:
errors_and_warnings['errors'].append("An uknown issue occurred.")
return errors_and_warnings
#if __name__ == "__main__":
# sendEmail(df)
|
# Copyright 2021 Hathor Labs
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import hashlib
from abc import ABC, abstractmethod, abstractproperty
from collections import defaultdict, deque
from contextlib import AbstractContextManager
from threading import Lock
from typing import Any, Iterator, NamedTuple, Optional, cast
from weakref import WeakValueDictionary
from intervaltree.interval import Interval
from structlog import get_logger
from hathor.conf import HathorSettings
from hathor.indexes import IndexesManager
from hathor.indexes.height_index import HeightInfo
from hathor.profiler import get_cpu_profiler
from hathor.pubsub import PubSubManager
from hathor.transaction.base_transaction import BaseTransaction
from hathor.transaction.block import Block
from hathor.transaction.storage.exceptions import (
TransactionDoesNotExist,
TransactionIsNotABlock,
TransactionNotInAllowedScopeError,
)
from hathor.transaction.storage.migrations import BaseMigration, MigrationState, add_min_height_metadata
from hathor.transaction.storage.tx_allow_scope import TxAllowScope, tx_allow_context
from hathor.transaction.transaction import Transaction
from hathor.transaction.transaction_metadata import TransactionMetadata
from hathor.util import not_none
settings = HathorSettings()
cpu = get_cpu_profiler()
# these are the timestamp values to be used when resetting them, 1 is used for the node instead of 0, so it can be
# greater, that way if both are reset (which also happens on a database that never run this implementation before) we
# guarantee that indexes will be initialized (because they would be "older" than the node timestamp).
NULL_INDEX_LAST_STARTED_AT = 0
NULL_LAST_STARTED_AT = 1
INDEX_ATTR_PREFIX = 'index_'
class AllTipsCache(NamedTuple):
timestamp: int
tips: set[Interval]
merkle_tree: bytes
hashes: list[bytes]
class TransactionStorage(ABC):
"""Legacy sync interface, please copy @deprecated decorator when implementing methods."""
pubsub: Optional[PubSubManager]
indexes: Optional[IndexesManager]
log = get_logger()
# Key storage attribute to save if the network stored is the expected network
_network_attribute: str = 'network'
# Key storage attribute to save if the full node is running a full verification
_running_full_verification_attribute: str = 'running_full_verification'
# Key storage attribute to save if the manager is running
_manager_running_attribute: str = 'manager_running'
# Ket storage attribute to save the last time the node started
_last_start_attribute: str = 'last_start'
# history of migrations that have to be applied in the order defined here
_migration_factories: list[type[BaseMigration]] = [
add_min_height_metadata.Migration,
]
_migrations: list[BaseMigration]
def __init__(self) -> None:
# Weakref is used to guarantee that there is only one instance of each transaction in memory.
self._tx_weakref: WeakValueDictionary[bytes, BaseTransaction] = WeakValueDictionary()
self._tx_weakref_disabled: bool = False
# This lock is needed everytime a storage is getting a tx from the weakref and,
# in the case the tx is not there, it creates a new object to save there.
# We were having some concurrent access and two different objects were being saved
# in the weakref, what is an error (https://github.com/HathorNetwork/hathor-core/issues/70)
# With this lock we guarantee there isn't going to be any problem with concurrent access
self._weakref_lock_per_hash: WeakValueDictionary[bytes, Lock] = WeakValueDictionary()
# This is a global lock used to prevent concurrent access when getting the tx lock in the dict above
self._weakref_lock: Lock = Lock()
# Flag to allow/disallow partially validated vertices.
self._allow_scope: TxAllowScope = TxAllowScope.VALID
# Cache for the best block tips
# This cache is updated in the consensus algorithm.
self._best_block_tips_cache: Optional[list[bytes]] = None
# If should create lock when getting a transaction
self._should_lock = False
# Provide local logger
self.log = self.log.new()
# Cache for the latest timestamp of all tips with merkle tree precalculated to be used on the sync algorithm
# This cache is invalidated every time a new tx or block is added to the cache and
# self._all_tips_cache.timestamp is always self.latest_timestamp
self._all_tips_cache: Optional[AllTipsCache] = None
# Initialize cache for genesis transactions.
self._genesis_cache: dict[bytes, BaseTransaction] = {}
# Internal toggle to choose when to select topological DFS iterator, used only on some tests
self._always_use_topological_dfs = False
self._saving_genesis = False
# Migrations instances
self._migrations = [cls() for cls in self._migration_factories]
# XXX: sanity check
migration_names = set()
for migration in self._migrations:
migration_name = migration.get_db_name()
if migration_name in migration_names:
raise ValueError(f'Duplicate migration name "{migration_name}"')
migration_names.add(migration_name)
def set_allow_scope(self, allow_scope: TxAllowScope) -> None:
"""Set the allow scope for the current storage.
This method should not normally be used directly, use one of the `allow_*_scope` methods instead."""
self._allow_scope = allow_scope
def get_allow_scope(self) -> TxAllowScope:
"""Get the current allow scope."""
return self._allow_scope
@abstractmethod
def reset_indexes(self) -> None:
"""Reset all the indexes, making sure that no persisted value is reused."""
raise NotImplementedError
@abstractmethod
def is_empty(self) -> bool:
"""True when only genesis is present, useful for checking for a fresh database."""
raise NotImplementedError
def update_best_block_tips_cache(self, tips_cache: Optional[list[bytes]]) -> None:
# XXX: check that the cache update is working properly, only used in unittests
# XXX: this might not actually hold true in some cases, commenting out while we figure it out
# if settings.SLOW_ASSERTS:
# calculated_tips = self.get_best_block_tips(skip_cache=True)
# self.log.debug('cached best block tips must match calculated',
# calculated=[i.hex() for i in calculated_tips],
# cached=[i.hex() for i in tips_cache])
# assert set(tips_cache) == set(calculated_tips)
self._best_block_tips_cache = tips_cache
def pre_init(self) -> None:
"""Storages can implement this to run code before transaction loading starts"""
self._check_and_set_network()
self._check_and_apply_migrations()
@abstractmethod
def get_migration_state(self, migration_name: str) -> MigrationState:
raise NotImplementedError
@abstractmethod
def set_migration_state(self, migration_name: str, state: MigrationState) -> None:
raise NotImplementedError
def _check_and_apply_migrations(self) -> None:
"""Check which migrations have not been run yet and apply them in order."""
from hathor.transaction.storage.exceptions import OutOfOrderMigrationError, PartialMigrationError
db_is_empty = self.is_empty()
self.log.debug('step through all migrations', count=len(self._migrations))
migrations_to_run = []
# XXX: this is used to ensure migrations don't advance out of order
previous_migration_state = MigrationState.COMPLETED
for migration in self._migrations:
migration_name = migration.get_db_name()
self.log.debug('step migration', migration=migration_name)
# short-cut to avoid running migrations on empty database
if migration.skip_empty_db() and db_is_empty:
self.log.debug('migration is new, but does not need to run on an empty database',
migration=migration_name)
self.set_migration_state(migration_name, MigrationState.COMPLETED)
continue
# get the migration state to decide whether to run, skip or error
migration_state = self.get_migration_state(migration_name)
if migration_state > previous_migration_state:
raise OutOfOrderMigrationError(f'{migration_name} ran after a migration that wasn\'t advanced')
previous_migration_state = migration_state
should_run_migration: bool
if migration_state is MigrationState.NOT_STARTED:
self.log.debug('migration is new, will run', migration=migration_name)
should_run_migration = True
elif migration_state is MigrationState.STARTED:
self.log.warn('this migration was started before, but it is not marked as COMPLETED or ERROR, '
'it will run again but might fail', migration=migration_name)
should_run_migration = True
elif migration_state is MigrationState.COMPLETED:
self.log.debug('migration is already complete', migration=migration_name)
should_run_migration = False
elif migration_state is MigrationState.ERROR:
self.log.error('this migration was run before but resulted in an error, the database will need to be '
'either manually fixed or discarded', migration=migration_name)
raise PartialMigrationError(f'Migration error state previously: {migration_name}')
else:
raise ValueError(f'Unexcepted migration state: {migration_state!r}')
# run if needed, updating the state along the way
if should_run_migration:
migrations_to_run.append(migration)
self.log.debug('stepped through all migrations')
if migrations_to_run:
self.log.info('there are migrations that need to be applied')
migrations_to_run_count = len(migrations_to_run)
for i, migration in enumerate(migrations_to_run):
migration_name = migration.get_db_name()
self.log.info(f'running migration {i+1} out of {migrations_to_run_count}', migration=migration_name)
self.set_migration_state(migration_name, MigrationState.STARTED)
try:
migration.run(self)
# XXX: we catch "any" exception because just we want to mark the state as "ERROR"
except Exception as exc:
self.set_migration_state(migration_name, MigrationState.ERROR)
raise PartialMigrationError(f'Migration error state: {migration_name}') from exc
else:
self.set_migration_state(migration_name, MigrationState.COMPLETED)
if migrations_to_run:
self.log.info('all migrations have been applied')
def _check_and_set_network(self) -> None:
"""Check the network name is as expected and try to set it when none is present"""
from hathor.transaction.storage.exceptions import WrongNetworkError
network = settings.NETWORK_NAME
stored_network = self.get_network()
if stored_network is None:
# no network is set, let's try to infer it
self._checked_set_network(network)
elif stored_network != network:
# the stored network does not match, something is wrong
raise WrongNetworkError(f'Databases created on {stored_network}, expected {network}')
else:
# the network is what is expected, nothing to do here
pass
def _checked_set_network(self, network: str) -> None:
"""Tries to set the network name on storage, while checking if we can safely do so."""
from hathor.transaction.storage.exceptions import WrongNetworkError
if self.is_empty():
# we're fresh out of a new database, let's just make sure we don't have the wrong genesis
for tx in self.get_all_transactions():
# XXX: maybe this can happen if you start a fresh database on one network and the genesis is saved
# somehow (is this even possible?) and you then start on a different network, hopefully this
# can be safely removed in a few releases
if not tx.is_genesis:
raise WrongNetworkError(f'Transaction {tx.hash_hex} is not from {network}')
self.set_network(network)
else:
# XXX: the database IS NOT empty, what do we do?
# - for the sake of compatibility we will accept this on the mainnet, and set it as mainnet,
# this is mostly so everyone running on the mainnet has a no-interaction auto-migration
# - for the sake of cleaning up the mess of foxtrot->golf testnet migration, we'll refuse to use
# the database when it is not the mainnet
# - in a few releases we can be confident that everyone running the network has made a smooth
# upgrade and we should be able to remove these workarounds and refuse older databases, and
# instead indiviudally assist (like suggesting a snapshot or fresh start) to anyone that is
# unable to use a very old database
if network == 'mainnet':
self.set_network(network)
else:
raise WrongNetworkError(f'This database is not suitable to be used on {network}')
def get_best_block(self) -> Block:
"""The block with highest score or one of the blocks with highest scores. Can be used for mining."""
assert self.indexes is not None
block_hash = self.indexes.height.get_tip()
block = self.get_transaction(block_hash)
assert isinstance(block, Block)
assert block.get_metadata().validation.is_fully_connected()
return block
def _save_or_verify_genesis(self) -> None:
"""Save all genesis in the storage."""
self._saving_genesis = True
for tx in self._get_genesis_from_settings():
try:
assert tx.hash is not None
tx2 = self.get_transaction(tx.hash)
assert tx == tx2
except TransactionDoesNotExist:
self.save_transaction(tx)
self.add_to_indexes(tx)
tx2 = tx
assert tx2.hash is not None
self._genesis_cache[tx2.hash] = tx2
self._saving_genesis = False
def _get_genesis_from_settings(self) -> list[BaseTransaction]:
"""Return all genesis from settings."""
from hathor.transaction.genesis import _get_genesis_transactions_unsafe
return _get_genesis_transactions_unsafe(self)
def _save_to_weakref(self, tx: BaseTransaction) -> None:
""" Save transaction to weakref.
"""
if self._tx_weakref_disabled:
return
assert tx.hash is not None
tx2 = self._tx_weakref.get(tx.hash, None)
if tx2 is None:
self._tx_weakref[tx.hash] = tx
else:
assert tx is tx2, 'There are two instances of the same transaction in memory ({})'.format(tx.hash_hex)
def _remove_from_weakref(self, tx: BaseTransaction) -> None:
"""Remove transaction from weakref.
"""
if self._tx_weakref_disabled:
return
assert tx.hash is not None
self._tx_weakref.pop(tx.hash, None)
def get_transaction_from_weakref(self, hash_bytes: bytes) -> Optional[BaseTransaction]:
""" Get a transaction from weakref if it exists. Otherwise, returns None.
"""
if self._tx_weakref_disabled:
return None
return self._tx_weakref.get(hash_bytes, None)
# TODO: check if the method bellow is currently needed
def allow_only_valid_context(self) -> AbstractContextManager[None]:
"""This method is used to temporarily reset the storage back to only allow valid transactions.
The implementation will OVERRIDE the current scope to allowing only valid transactions on the observed
storage.
"""
return tx_allow_context(self, allow_scope=TxAllowScope.VALID)
def allow_partially_validated_context(self) -> AbstractContextManager[None]:
"""This method is used to temporarily make the storage allow partially validated transactions.
The implementation will INCLUDE allowing partially valid transactions to the current allow scope.
"""
new_allow_scope = self.get_allow_scope() | TxAllowScope.PARTIAL
return tx_allow_context(self, allow_scope=new_allow_scope)
def allow_invalid_context(self) -> AbstractContextManager[None]:
"""This method is used to temporarily make the storage allow invalid transactions.
The implementation will INCLUDE allowing invalid transactions to the current allow scope.
"""
new_allow_scope = self.get_allow_scope() | TxAllowScope.INVALID
return tx_allow_context(self, allow_scope=new_allow_scope)
def is_only_valid_allowed(self) -> bool:
"""Whether only valid transactions are allowed to be returned/accepted by the storage, the default state."""
return self.get_allow_scope() is TxAllowScope.VALID
def is_partially_validated_allowed(self) -> bool:
"""Whether partially validated transactions are allowed to be returned/accepted by the storage."""
return TxAllowScope.PARTIAL in self.get_allow_scope()
def is_invalid_allowed(self) -> bool:
"""Whether invalid transactions are allowed to be returned/accepted by the storage."""
return TxAllowScope.INVALID in self.get_allow_scope()
def _enable_weakref(self) -> None:
""" Weakref should never be disabled unless you know exactly what you are doing.
"""
self._tx_weakref_disabled = False
def _disable_weakref(self) -> None:
""" Weakref should never be disabled unless you know exactly what you are doing.
"""
self._tx_weakref_disabled = True
@abstractmethod
def save_transaction(self: 'TransactionStorage', tx: BaseTransaction, *, only_metadata: bool = False) -> None:
# XXX: although this method is abstract (because a subclass must implement it) the implementer
# should call the base implementation for correctly interacting with the index
"""Saves the tx.
:param tx: Transaction to save
:param only_metadata: Don't save the transaction, only the metadata of this transaction
"""
assert tx.hash is not None
meta = tx.get_metadata()
self.pre_save_validation(tx, meta)
def pre_save_validation(self, tx: BaseTransaction, tx_meta: TransactionMetadata) -> None:
""" Must be run before every save, will raise AssertionError or TransactionNotInAllowedScopeError
A failure means there is a bug in the code that allowed the condition to reach the "save" code. This is a last
second measure to prevent persisting a bad transaction/metadata.
This method receives the transaction AND the metadata in order to avoid calling ".get_metadata()" which could
potentially create a fresh metadata.
"""
assert tx.hash is not None
assert tx_meta.hash is not None
assert tx.hash == tx_meta.hash, f'{tx.hash.hex()} != {tx_meta.hash.hex()}'
self._validate_partial_marker_consistency(tx_meta)
self._validate_transaction_in_scope(tx)
self._validate_block_height_metadata(tx)
def post_get_validation(self, tx: BaseTransaction) -> None:
""" Must be run before every save, will raise AssertionError or TransactionNotInAllowedScopeError
A failure means there is a bug in the code that allowed the condition to reach the "get" code. This is a last
second measure to prevent getting a transaction while using the wrong scope.
"""
tx_meta = tx.get_metadata()
self._validate_partial_marker_consistency(tx_meta)
self._validate_transaction_in_scope(tx)
self._validate_block_height_metadata(tx)
def _validate_partial_marker_consistency(self, tx_meta: TransactionMetadata) -> None:
voided_by = tx_meta.get_frozen_voided_by()
# XXX: PARTIALLY_VALIDATED_ID must be included if the tx is fully connected and must not be included otherwise
has_partially_validated_marker = settings.PARTIALLY_VALIDATED_ID in voided_by
validation_is_fully_connected = tx_meta.validation.is_fully_connected()
assert (not has_partially_validated_marker) == validation_is_fully_connected, \
'Inconsistent ValidationState and voided_by'
def _validate_transaction_in_scope(self, tx: BaseTransaction) -> None:
if not self.get_allow_scope().is_allowed(tx):
tx_meta = tx.get_metadata()
raise TransactionNotInAllowedScopeError(tx.hash_hex, self.get_allow_scope().name, tx_meta.validation.name)
def _validate_block_height_metadata(self, tx: BaseTransaction) -> None:
if tx.is_block:
tx_meta = tx.get_metadata()
assert tx_meta.height is not None
@abstractmethod
def remove_transaction(self, tx: BaseTransaction) -> None:
"""Remove the tx.
:param tx: Transaction to be removed
"""
if self.indexes is not None:
self.del_from_indexes(tx, remove_all=True, relax_assert=True)
def remove_transactions(self, txs: list[BaseTransaction]) -> None:
"""Will remove all the transactions on the list from the database.
Special notes:
- will refuse and raise an error when removing all transactions would leave dangling transactions, that is,
transactions without existing parent. That is, it expects the `txs` list to include all children of deleted
txs, from both the confirmation and funds DAGs
- inputs's spent_outputs should not have any of the transactions being removed as spending transactions,
this method will update and save those transaction's metadata
- parent's children metadata will be updated to reflect the removals
- all indexes will be updated
"""
parents_to_update: dict[bytes, list[bytes]] = defaultdict(list)
dangling_children: set[bytes] = set()
txset = {not_none(tx.hash) for tx in txs}
for tx in txs:
assert tx.hash is not None
tx_meta = tx.get_metadata()
assert not tx_meta.validation.is_checkpoint()
for parent in set(tx.parents) - txset:
parents_to_update[parent].append(tx.hash)
dangling_children.update(set(tx_meta.children) - txset)
for spending_txs in tx_meta.spent_outputs.values():
dangling_children.update(set(spending_txs) - txset)
for tx_input in tx.inputs:
spent_tx = tx.get_spent_tx(tx_input)
spent_tx_meta = spent_tx.get_metadata()
if tx.hash in spent_tx_meta.spent_outputs[tx_input.index]:
spent_tx_meta.spent_outputs[tx_input.index].remove(tx.hash)
self.save_transaction(spent_tx, only_metadata=True)
assert not dangling_children, 'It is an error to try to remove transactions that would leave a gap in the DAG'
for parent_hash, children_to_remove in parents_to_update.items():
parent_tx = self.get_transaction(parent_hash)
parent_meta = parent_tx.get_metadata()
for child in children_to_remove:
parent_meta.children.remove(child)
self.save_transaction(parent_tx, only_metadata=True)
for tx in txs:
self.log.debug('remove transaction', tx=tx.hash_hex)
self.remove_transaction(tx)
@abstractmethod
def transaction_exists(self, hash_bytes: bytes) -> bool:
"""Returns `True` if transaction with hash `hash_bytes` exists.
:param hash_bytes: Hash in bytes that will be checked.
"""
raise NotImplementedError
def compare_bytes_with_local_tx(self, tx: BaseTransaction) -> bool:
"""Compare byte-per-byte `tx` with the local transaction."""
assert tx.hash is not None
# XXX: we have to accept any scope because we only want to know what bytes we have stored
with tx_allow_context(self, allow_scope=TxAllowScope.ALL):
local_tx = self.get_transaction(tx.hash)
local_tx_bytes = bytes(local_tx)
tx_bytes = bytes(tx)
if tx_bytes == local_tx_bytes:
return True
self.log.critical('non-equal transactions with same id', tx_id=tx.hash.hex(),
local_tx=local_tx_bytes.hex(), tx=tx_bytes.hex())
return False
@abstractmethod
def _get_transaction(self, hash_bytes: bytes) -> BaseTransaction:
"""Returns the transaction with hash `hash_bytes`.
:param hash_bytes: Hash in bytes that will be checked.
"""
raise NotImplementedError
def disable_lock(self) -> None:
""" Turn off lock
"""
self._should_lock = False
def enable_lock(self) -> None:
""" Turn on lock
"""
self._should_lock = True
def _get_lock(self, hash_bytes: bytes) -> Optional[Lock]:
""" Get lock for tx hash in the weakref dictionary
"""
if not self._should_lock:
return None
with self._weakref_lock:
lock = self._weakref_lock_per_hash.get(hash_bytes, None)
if lock is None:
lock = Lock()
self._weakref_lock_per_hash[hash_bytes] = lock
return lock
def get_transaction(self, hash_bytes: bytes) -> BaseTransaction:
"""Acquire the lock and get the transaction with hash `hash_bytes`.
:param hash_bytes: Hash in bytes that will be checked.
"""
if self._should_lock:
lock = self._get_lock(hash_bytes)
assert lock is not None
with lock:
tx = self._get_transaction(hash_bytes)
else:
tx = self._get_transaction(hash_bytes)
self.post_get_validation(tx)
return tx
def get_transaction_by_height(self, height: int) -> Optional[BaseTransaction]:
"""Returns a transaction from the height index. This is fast."""
assert self.indexes is not None
ancestor_hash = self.indexes.height.get(height)
return None if ancestor_hash is None else self.get_transaction(ancestor_hash)
def get_metadata(self, hash_bytes: bytes) -> Optional[TransactionMetadata]:
"""Returns the transaction metadata with hash `hash_bytes`.
:param hash_bytes: Hash in bytes that will be checked.
:rtype :py:class:`hathor.transaction.TransactionMetadata`
"""
try:
tx = self.get_transaction(hash_bytes)
return tx.get_metadata(use_storage=False)
except TransactionDoesNotExist:
return None
def get_all_transactions(self) -> Iterator[BaseTransaction]:
"""Return all vertices (transactions and blocks) within the allowed scope.
"""
# It is necessary to retain a copy of the current scope because this method will yield
# and the scope may undergo changes. By doing so, we ensure the usage of the scope at the
# time of iterator creation.
scope = self.get_allow_scope()
for tx in self._get_all_transactions():
if scope.is_allowed(tx):
yield tx
@abstractmethod
def _get_all_transactions(self) -> Iterator[BaseTransaction]:
"""Internal implementation that iterates over all transactions/blocks.
"""
raise NotImplementedError
@abstractmethod
def get_vertices_count(self) -> int:
# TODO: verify the following claim:
"""Return the number of transactions/blocks stored.
:rtype int
"""
raise NotImplementedError
@abstractproperty
def latest_timestamp(self) -> int:
raise NotImplementedError
@abstractproperty
def first_timestamp(self) -> int:
raise NotImplementedError
@abstractmethod
def get_best_block_tips(self, timestamp: Optional[float] = None, *, skip_cache: bool = False) -> list[bytes]:
""" Return a list of blocks that are heads in a best chain. It must be used when mining.
When more than one block is returned, it means that there are multiple best chains and
you can choose any of them.
"""
if timestamp is None and not skip_cache and self._best_block_tips_cache is not None:
return self._best_block_tips_cache[:]
best_score = 0.0
best_tip_blocks: list[bytes] = []
for block_hash in (x.data for x in self.get_block_tips(timestamp)):
meta = self.get_metadata(block_hash)
assert meta is not None
if meta.voided_by and meta.voided_by != set([block_hash]):
# If anyone but the block itself is voiding this block, then it must be skipped.
continue
if abs(meta.score - best_score) < 1e-10:
best_tip_blocks.append(block_hash)
elif meta.score > best_score:
best_score = meta.score
best_tip_blocks = [block_hash]
if timestamp is None:
self._best_block_tips_cache = best_tip_blocks[:]
return best_tip_blocks
@abstractmethod
def get_n_height_tips(self, n_blocks: int) -> list[HeightInfo]:
assert self.indexes is not None
return self.indexes.height.get_n_height_tips(n_blocks)
def get_weight_best_block(self) -> float:
heads = [self.get_transaction(h) for h in self.get_best_block_tips()]
highest_weight = 0.0
for head in heads:
if head.weight > highest_weight:
highest_weight = head.weight
return highest_weight
def get_height_best_block(self) -> int:
""" Iterate over best block tips and get the highest height
"""
heads = [self.get_transaction(h) for h in self.get_best_block_tips()]
highest_height = 0
for head in heads:
assert isinstance(head, Block)
head_height = head.get_height()
if head_height > highest_height:
highest_height = head_height
return highest_height
@cpu.profiler('get_merkle_tree')
def get_merkle_tree(self, timestamp: int) -> tuple[bytes, list[bytes]]:
""" Generate a hash to check whether the DAG is the same at that timestamp.
:rtype: tuple[bytes(hash), list[bytes(hash)]]
"""
if self._all_tips_cache is not None and timestamp >= self._all_tips_cache.timestamp:
return self._all_tips_cache.merkle_tree, self._all_tips_cache.hashes
intervals = self.get_all_tips(timestamp)
if timestamp >= self.latest_timestamp:
# get_all_tips will add to cache in that case
assert self._all_tips_cache is not None
return self._all_tips_cache.merkle_tree, self._all_tips_cache.hashes
return self.calculate_merkle_tree(intervals)
def calculate_merkle_tree(self, intervals: set[Interval]) -> tuple[bytes, list[bytes]]:
""" Generate a hash of the transactions at the intervals
:rtype: tuple[bytes(hash), list[bytes(hash)]]
"""
hashes = [x.data for x in intervals]
hashes.sort()
merkle = hashlib.sha256()
for h in hashes:
merkle.update(h)
return merkle.digest(), hashes
@abstractmethod
def get_block_tips(self, timestamp: Optional[float] = None) -> set[Interval]:
raise NotImplementedError
@abstractmethod
def get_all_tips(self, timestamp: Optional[float] = None) -> set[Interval]:
raise NotImplementedError
@abstractmethod
def get_tx_tips(self, timestamp: Optional[float] = None) -> set[Interval]:
raise NotImplementedError
@abstractmethod
def get_newest_blocks(self, count: int) -> tuple[list[Block], bool]:
""" Get blocks from the newest to the oldest
:param count: Number of blocks to be returned
:return: List of blocks and a boolean indicating if has more blocks
"""
raise NotImplementedError
@abstractmethod
def get_newest_txs(self, count: int) -> tuple[list[BaseTransaction], bool]:
""" Get transactions from the newest to the oldest
:param count: Number of transactions to be returned
:return: List of transactions and a boolean indicating if has more txs
"""
raise NotImplementedError
@abstractmethod
def get_older_blocks_after(self, timestamp: int, hash_bytes: bytes,
count: int) -> tuple[list[Block], bool]:
""" Get blocks from the timestamp/hash_bytes reference to the oldest
:param timestamp: Timestamp reference to start the search
:param hash_bytes: Hash reference to start the search
:param count: Number of blocks to be returned
:return: List of blocks and a boolean indicating if has more blocks
"""
raise NotImplementedError
@abstractmethod
def get_newer_blocks_after(self, timestamp: int, hash_bytes: bytes,
count: int) -> tuple[list[BaseTransaction], bool]:
""" Get blocks from the timestamp/hash_bytes reference to the newest
:param timestamp: Timestamp reference to start the search
:param hash_bytes: Hash reference to start the search
:param count: Number of blocks to be returned
:return: List of blocks and a boolean indicating if has more blocks
"""
raise NotImplementedError
@abstractmethod
def get_older_txs_after(self, timestamp: int, hash_bytes: bytes, count: int) -> tuple[list[BaseTransaction], bool]:
""" Get transactions from the timestamp/hash_bytes reference to the oldest
:param timestamp: Timestamp reference to start the search
:param hash_bytes: Hash reference to start the search
:param count: Number of transactions to be returned
:return: List of transactions and a boolean indicating if has more txs
"""
raise NotImplementedError
@abstractmethod
def get_newer_txs_after(self, timestamp: int, hash_bytes: bytes, count: int) -> tuple[list[BaseTransaction], bool]:
""" Get transactions from the timestamp/hash_bytes reference to the newest
:param timestamp: Timestamp reference to start the search
:param hash_bytes: Hash reference to start the search
:param count: Number of transactions to be returned
:return: List of transactions and a boolean indicating if has more txs
"""
raise NotImplementedError
@abstractmethod
def _manually_initialize(self) -> None:
# XXX: maybe refactor, this is actually part of the public interface
"""Caches must be initialized. This function should not be called, because
usually the HathorManager will handle all this initialization.
"""
pass
def topological_iterator(self) -> Iterator[BaseTransaction]:
"""This method will return the fastest topological iterator available based on the database state.
This will be:
- self._topological_sort_timestamp_index() when the timestamp index is up-to-date
- self._topological_sort_metadata() otherwise, metadata is assumed to be up-to-date
- self._topological_sort_dfs() when the private property `_always_use_topological_dfs` is set to `True`
"""
# TODO: we currently assume that metadata is up-to-date, and thus this method can only run when that assumption
# is known to be true, but we could add a mechanism similar to what indexes use to know they're
# up-to-date and get rid of that assumption so this method can be used without having to make any
# assumptions
assert self.indexes is not None
if self._always_use_topological_dfs:
self.log.debug('force choosing DFS iterator')
return self._topological_sort_dfs()
db_last_started_at = self.get_last_started_at()
sorted_all_db_name = self.indexes.sorted_all.get_db_name()
if sorted_all_db_name is None:
can_use_timestamp_index = False
else:
sorted_all_index_last_started_at = self.get_index_last_started_at(sorted_all_db_name)
can_use_timestamp_index = db_last_started_at == sorted_all_index_last_started_at
iter_tx: Iterator[BaseTransaction]
if can_use_timestamp_index:
self.log.debug('choosing timestamp-index iterator')
iter_tx = self._topological_sort_timestamp_index()
else:
self.log.debug('choosing metadata iterator')
iter_tx = self._topological_sort_metadata()
return iter_tx
@abstractmethod
def _topological_sort_dfs(self) -> Iterator[BaseTransaction]:
"""Return an iterable of the transactions in topological ordering, i.e., from genesis to the most recent
transactions. The order is important because the transactions are always valid --- their parents and inputs
exist. This method is designed to be used for rebuilding metadata or indexes, that is, it does not make use of
any metadata, only the transactions parents data is used and no index is used.
XXX: blocks are prioritized so as soon as a block can be yielded it will, which means that it is possible for a
block to be yielded much sooner than an older transaction that isn't being confirmed by that block.
"""
raise NotImplementedError
@abstractmethod
def _topological_sort_timestamp_index(self) -> Iterator[BaseTransaction]:
"""Return an iterable of the transactions in topological ordering, i.e., from genesis to the most recent
transactions. The order is important because the transactions are always valid --- their parents and inputs
exist. This method makes use of the timestamp index, so it is crucial that that index is correct and complete.
XXX: blocks are still prioritized over transactions, but only within the same timestamp, which means that it
will yield a different sequence than _topological_sort_dfs, but the sequence is still topological.
"""
raise NotImplementedError
@abstractmethod
def _topological_sort_metadata(self) -> Iterator[BaseTransaction]:
"""Return an iterable of the transactions in topological ordering, using only info from metadata.
This is about as good as _topological_sort_timestamp_index but only needs the transaction's metadata to be
consistent and up-to-date. It could replace _topological_sort_timestamp_index if we can show it is faster or at
least not slower by most practical cases.
"""
raise NotImplementedError
@abstractmethod
def add_to_indexes(self, tx: BaseTransaction) -> None:
raise NotImplementedError
@abstractmethod
def del_from_indexes(self, tx: BaseTransaction, *, remove_all: bool = False, relax_assert: bool = False) -> None:
raise NotImplementedError
@abstractmethod
def get_block_count(self) -> int:
raise NotImplementedError
@abstractmethod
def get_tx_count(self) -> int:
raise NotImplementedError
@abstractmethod
def get_genesis(self, hash_bytes: bytes) -> Optional[BaseTransaction]:
"""Returning hardcoded genesis block and transactions."""
raise NotImplementedError
@abstractmethod
def get_all_genesis(self) -> set[BaseTransaction]:
raise NotImplementedError
@abstractmethod
def get_transactions_before(self, hash_bytes: bytes, num_blocks: int = 100) -> list[BaseTransaction]:
"""Run a BFS starting from the giving `hash_bytes`.
:param hash_bytes: Starting point of the BFS, either a block or a transaction.
:param num_blocks: Number of blocks to be return.
:return: List of transactions
"""
raise NotImplementedError
@abstractmethod
def get_blocks_before(self, hash_bytes: bytes, num_blocks: int = 100) -> list[Block]:
"""Run a BFS starting from the giving `hash_bytes`.
:param hash_bytes: Starting point of the BFS.
:param num_blocks: Number of blocks to be return.
:return: List of transactions
"""
raise NotImplementedError
def add_value(self, key: str, value: str) -> None:
""" Save value on storage
Need to be a string to support all storages, including rocksdb, that needs bytes
"""
raise NotImplementedError
def remove_value(self, key: str) -> None:
""" Remove value from storage
"""
raise NotImplementedError
def get_value(self, key: str) -> Optional[str]:
""" Get value from storage
"""
raise NotImplementedError
def get_network(self) -> Optional[str]:
""" Return the stored network name
"""
return self.get_value(self._network_attribute)
def set_network(self, network: str) -> None:
""" Save the network name
"""
return self.add_value(self._network_attribute, network)
def start_full_verification(self) -> None:
""" Save full verification on storage
"""
self.add_value(self._running_full_verification_attribute, '1')
def finish_full_verification(self) -> None:
""" Remove from storage that the full node is initializing with a full verification
"""
self.remove_value(self._running_full_verification_attribute)
def is_running_full_verification(self) -> bool:
""" Return if the full node is initializing with a full verification
or was running a full verification and was stopped in the middle
"""
return self.get_value(self._running_full_verification_attribute) == '1'
def start_running_manager(self) -> None:
""" Save on storage that manager is running
"""
self.add_value(self._manager_running_attribute, '1')
def stop_running_manager(self) -> None:
""" Remove from storage that manager is running
"""
self.remove_value(self._manager_running_attribute)
def is_running_manager(self) -> bool:
""" Return if the manager is running or was running and a sudden crash stopped the full node
"""
return self.get_value(self._manager_running_attribute) == '1'
def get_last_started_at(self) -> int:
""" Return the timestamp when the database was last started.
"""
# XXX: defaults to 1 just to force indexes initialization, by being higher than 0
return int(self.get_value(self._last_start_attribute) or NULL_LAST_STARTED_AT)
def set_last_started_at(self, timestamp: int) -> None:
""" Update the timestamp when the database was last started.
"""
self.add_value(self._last_start_attribute, str(timestamp))
def get_index_last_started_at(self, index_db_name: str) -> int:
""" Return the timestamp when an index was last started.
"""
attr_name = INDEX_ATTR_PREFIX + index_db_name
return int(self.get_value(attr_name) or NULL_INDEX_LAST_STARTED_AT)
def set_index_last_started_at(self, index_db_name: str, timestamp: int) -> None:
""" Update the timestamp when a specific index was last started.
"""
attr_name = INDEX_ATTR_PREFIX + index_db_name
self.add_value(attr_name, str(timestamp))
def update_last_started_at(self, timestamp: int) -> None:
""" Updates the respective timestamps of when the node was last started.
Using this mehtod ensures that the same timestamp is being used and the correct indexes are being selected.
"""
assert self.indexes is not None
self.set_last_started_at(timestamp)
for index in self.indexes.iter_all_indexes():
index_db_name = index.get_db_name()
if index_db_name is None:
continue
self.set_index_last_started_at(index_db_name, timestamp)
@abstractmethod
def flush(self) -> None:
"""Flushes the storage. It's called during shutdown of the node, for instance.
Should be implemented by storages that provide some kind of in-memory cache
"""
raise NotImplementedError
def iter_mempool_tips_from_tx_tips(self) -> Iterator[Transaction]:
""" Same behavior as the mempool index for iterating over the tips.
This basically means that the returned iterator will yield all transactions that are tips and have not been
confirmed by a block on the best chain.
This method requires indexes to be enabled.
"""
assert self.indexes is not None
tx_tips = self.indexes.tx_tips
for interval in tx_tips[self.latest_timestamp + 1]:
tx = self.get_transaction(interval.data)
tx_meta = tx.get_metadata()
assert isinstance(tx, Transaction) # XXX: tx_tips only has transactions
# XXX: skip txs that have already been confirmed
if tx_meta.first_block:
continue
yield tx
def iter_mempool_from_tx_tips(self) -> Iterator[Transaction]:
""" Same behavior as the mempool index for iterating over all mempool transactions.
This basically means that the returned iterator will yield all transactions that have not been confirmed by a
block on the best chain. Order is not guaranteed to be the same as in the mempool index.
This method requires indexes to be enabled.
"""
from hathor.transaction.storage.traversal import BFSTimestampWalk
root = self.iter_mempool_tips_from_tx_tips()
walk = BFSTimestampWalk(self, is_dag_funds=True, is_dag_verifications=True, is_left_to_right=False)
for tx in walk.run(root):
tx_meta = tx.get_metadata()
# XXX: skip blocks and tx-tips that have already been confirmed
if tx_meta.first_block is not None or tx.is_block:
walk.skip_neighbors(tx)
else:
assert isinstance(tx, Transaction)
yield tx
def iter_mempool_tips_from_best_index(self) -> Iterator[Transaction]:
"""Get tx tips in the mempool, using the best available index (mempool_tips or tx_tips)"""
assert self.indexes is not None
if self.indexes.mempool_tips is not None:
yield from self.indexes.mempool_tips.iter(self)
else:
yield from self.iter_mempool_tips_from_tx_tips()
def iter_mempool_from_best_index(self) -> Iterator[Transaction]:
"""Get all transactions in the mempool, using the best available index (mempool_tips or tx_tips)"""
assert self.indexes is not None
if self.indexes.mempool_tips is not None:
yield from self.indexes.mempool_tips.iter_all(self)
else:
yield from self.iter_mempool_from_tx_tips()
def compute_transactions_that_became_invalid(self, new_best_height: int) -> list[BaseTransaction]:
""" This method will look for transactions in the mempool that have become invalid due to the reward lock.
It compares each tx's `min_height` to the `new_best_height`, accounting for the fact that the tx can be
confirmed by the next block.
"""
from hathor.transaction.validation_state import ValidationState
to_remove: list[BaseTransaction] = []
for tx in self.iter_mempool_from_best_index():
tx_min_height = tx.get_metadata().min_height
assert tx_min_height is not None
# We use +1 here because a tx is valid if it can be confirmed by the next block
if new_best_height + 1 < tx_min_height:
tx.set_validation(ValidationState.INVALID)
to_remove.append(tx)
return to_remove
class BaseTransactionStorage(TransactionStorage):
indexes: Optional[IndexesManager]
def __init__(self, indexes: Optional[IndexesManager] = None, pubsub: Optional[Any] = None) -> None:
super().__init__()
# Pubsub is used to publish tx voided and winner but it's optional
self.pubsub = pubsub
# Indexes.
self.indexes = indexes
# Either save or verify all genesis.
self._save_or_verify_genesis()
self._latest_n_height_tips: list[HeightInfo] = []
@property
def latest_timestamp(self) -> int:
assert self.indexes is not None
return self.indexes.info.get_latest_timestamp()
@property
def first_timestamp(self) -> int:
assert self.indexes is not None
return self.indexes.info.get_first_timestamp()
@abstractmethod
def _save_transaction(self, tx: BaseTransaction, *, only_metadata: bool = False) -> None:
raise NotImplementedError
def reset_indexes(self) -> None:
"""Reset all indexes. This function should not be called unless you know what you are doing."""
assert self.indexes is not None, 'Cannot reset indexes because they have not been enabled.'
self.indexes.force_clear_all()
self.update_best_block_tips_cache(None)
self._all_tips_cache = None
def remove_cache(self) -> None:
"""Remove all caches in case we don't need it."""
self.indexes = None
def get_best_block_tips(self, timestamp: Optional[float] = None, *, skip_cache: bool = False) -> list[bytes]:
return super().get_best_block_tips(timestamp, skip_cache=skip_cache)
def get_n_height_tips(self, n_blocks: int) -> list[HeightInfo]:
block = self.get_best_block()
if self._latest_n_height_tips:
best_block = self._latest_n_height_tips[0]
if block.hash == best_block.id and n_blocks <= len(self._latest_n_height_tips):
return self._latest_n_height_tips[:n_blocks]
self._latest_n_height_tips = super().get_n_height_tips(n_blocks)
return self._latest_n_height_tips[:n_blocks]
def get_weight_best_block(self) -> float:
return super().get_weight_best_block()
def get_block_tips(self, timestamp: Optional[float] = None) -> set[Interval]:
if self.indexes is None:
raise NotImplementedError
assert self.indexes is not None
if timestamp is None:
timestamp = self.latest_timestamp
return self.indexes.block_tips[timestamp]
def get_tx_tips(self, timestamp: Optional[float] = None) -> set[Interval]:
if self.indexes is None:
raise NotImplementedError
assert self.indexes is not None
if timestamp is None:
timestamp = self.latest_timestamp
tips = self.indexes.tx_tips[timestamp]
if __debug__:
# XXX: this `for` is for assert only and thus is inside `if __debug__:`
for interval in tips:
meta = self.get_metadata(interval.data)
assert meta is not None
# assert not meta.voided_by
return tips
def get_all_tips(self, timestamp: Optional[float] = None) -> set[Interval]:
if self.indexes is None:
raise NotImplementedError
assert self.indexes is not None
if timestamp is None:
timestamp = self.latest_timestamp
if self._all_tips_cache is not None and timestamp >= self._all_tips_cache.timestamp:
assert self._all_tips_cache.timestamp == self.latest_timestamp
return self._all_tips_cache.tips
tips = self.indexes.all_tips[timestamp]
if timestamp >= self.latest_timestamp:
merkle_tree, hashes = self.calculate_merkle_tree(tips)
self._all_tips_cache = AllTipsCache(self.latest_timestamp, tips, merkle_tree, hashes)
return tips
def get_newest_blocks(self, count: int) -> tuple[list[Block], bool]:
if self.indexes is None:
raise NotImplementedError
assert self.indexes is not None
block_hashes, has_more = self.indexes.sorted_blocks.get_newest(count)
blocks = [cast(Block, self.get_transaction(block_hash)) for block_hash in block_hashes]
return blocks, has_more
def get_newest_txs(self, count: int) -> tuple[list[BaseTransaction], bool]:
if self.indexes is None:
raise NotImplementedError
assert self.indexes is not None
tx_hashes, has_more = self.indexes.sorted_txs.get_newest(count)
txs = [self.get_transaction(tx_hash) for tx_hash in tx_hashes]
return txs, has_more
def get_older_blocks_after(self, timestamp: int, hash_bytes: bytes, count: int) -> tuple[list[Block], bool]:
if self.indexes is None:
raise NotImplementedError
assert self.indexes is not None
block_hashes, has_more = self.indexes.sorted_blocks.get_older(timestamp, hash_bytes, count)
blocks = [cast(Block, self.get_transaction(block_hash)) for block_hash in block_hashes]
return blocks, has_more
def get_newer_blocks_after(self, timestamp: int, hash_bytes: bytes,
count: int) -> tuple[list[BaseTransaction], bool]:
if self.indexes is None:
raise NotImplementedError
assert self.indexes is not None
block_hashes, has_more = self.indexes.sorted_blocks.get_newer(timestamp, hash_bytes, count)
blocks = [self.get_transaction(block_hash) for block_hash in block_hashes]
return blocks, has_more
def get_older_txs_after(self, timestamp: int, hash_bytes: bytes, count: int) -> tuple[list[BaseTransaction], bool]:
if self.indexes is None:
raise NotImplementedError
assert self.indexes is not None
tx_hashes, has_more = self.indexes.sorted_txs.get_older(timestamp, hash_bytes, count)
txs = [self.get_transaction(tx_hash) for tx_hash in tx_hashes]
return txs, has_more
def get_newer_txs_after(self, timestamp: int, hash_bytes: bytes, count: int) -> tuple[list[BaseTransaction], bool]:
if self.indexes is None:
raise NotImplementedError
assert self.indexes is not None
tx_hashes, has_more = self.indexes.sorted_txs.get_newer(timestamp, hash_bytes, count)
txs = [self.get_transaction(tx_hash) for tx_hash in tx_hashes]
return txs, has_more
def _manually_initialize(self) -> None:
self._manually_initialize_indexes()
def _manually_initialize_indexes(self) -> None:
if self.indexes is not None:
self.indexes._manually_initialize(self)
def _topological_sort_timestamp_index(self) -> Iterator[BaseTransaction]:
assert self.indexes is not None
cur_timestamp: Optional[int] = None
cur_blocks: list[Block] = []
cur_txs: list[Transaction] = []
for tx_hash in self.indexes.sorted_all.iter():
tx = self.get_transaction(tx_hash)
if tx.timestamp != cur_timestamp:
yield from cur_blocks
cur_blocks.clear()
yield from cur_txs
cur_txs.clear()
cur_timestamp = tx.timestamp
if tx.is_block:
assert isinstance(tx, Block)
cur_blocks.append(tx)
else:
assert isinstance(tx, Transaction)
cur_txs.append(tx)
yield from cur_blocks
yield from cur_txs
def _topological_sort_metadata(self, *, include_partial: bool = False) -> Iterator[BaseTransaction]:
import heapq
from dataclasses import dataclass, field
@dataclass(order=True)
class Item:
timestamp: int
# XXX: because bools are ints, and False==0, True==1, is_transaction=False < is_transaction=True, which
# will make blocks be prioritized over transactions with the same timestamp
is_transaction: bool
tx: BaseTransaction = field(compare=False)
def __init__(self, tx: BaseTransaction):
self.timestamp = tx.timestamp
self.is_transaction = tx.is_transaction
self.tx = tx
to_visit: list[Item] = list(map(Item, self.get_all_genesis()))
seen: set[bytes] = set()
heapq.heapify(to_visit)
while to_visit:
item = heapq.heappop(to_visit)
assert item.tx.hash is not None
yield item.tx
# XXX: We can safely discard because no other tx will try to visit this one, since timestamps are strictly
# higher in children, meaning we cannot possibly have item.tx as a descendant of any tx in to_visit.
seen.discard(item.tx.hash)
for child_tx_hash in item.tx.get_metadata().children:
if child_tx_hash in seen:
continue
child_tx = self.get_transaction(child_tx_hash)
heapq.heappush(to_visit, Item(child_tx))
seen.add(child_tx_hash)
def _topological_sort_dfs(self) -> Iterator[BaseTransaction]:
# TODO We must optimize this algorithm to remove the `visited` set.
# It will consume too much memory when the number of transactions is big.
# A solution would be to store the ordering in disk, probably indexing by tx's height.
# Sorting the vertices by the lengths of their longest incoming paths produces a topological
# ordering (Dekel, Nassimi & Sahni 1981). See: https://epubs.siam.org/doi/10.1137/0210049
# See also: https://gitlab.com/HathorNetwork/hathor-python/merge_requests/31
visited: dict[bytes, int] = dict() # dict[bytes, int]
for tx in self.get_all_transactions():
if not tx.is_block:
continue
yield from self._run_topological_sort_dfs(tx, visited)
for tx in self.get_all_transactions():
yield from self._run_topological_sort_dfs(tx, visited)
def _run_topological_sort_dfs(self, root: BaseTransaction, visited: dict[bytes, int]) -> Iterator[BaseTransaction]:
if root.hash in visited:
return
stack = [root]
while stack:
tx = stack[-1]
assert tx.hash is not None
if tx.hash in visited:
if visited[tx.hash] == 0:
visited[tx.hash] = 1 # 1 = Visited
yield tx
assert tx == stack.pop()
continue
visited[tx.hash] = 0 # 0 = Visit in progress
# The parents are reversed to go first through the blocks and only then
# go through the transactions. It works because blocks must have the
# previous block as the first parent. For transactions, the order does not
# matter.
for parent_hash in tx.parents[::-1]:
if parent_hash not in visited:
try:
parent = self.get_transaction(parent_hash)
except TransactionDoesNotExist:
# XXX: it's possible transactions won't exist because of missing dependencies
pass
else:
stack.append(parent)
for txin in tx.inputs:
if txin.tx_id not in visited:
try:
txinput = self.get_transaction(txin.tx_id)
except TransactionDoesNotExist:
# XXX: it's possible transactions won't exist because of missing dependencies
pass
else:
stack.append(txinput)
def add_to_indexes(self, tx: BaseTransaction) -> None:
if self.indexes is None:
if self._saving_genesis:
# XXX: avoid failing on some situations where this is called before we know it's OK to skip
# see: https://github.com/HathorNetwork/hathor-core/pull/436
return
else:
raise NotImplementedError
assert self.indexes is not None
self._all_tips_cache = None
self.indexes.add_tx(tx)
def del_from_indexes(self, tx: BaseTransaction, *, remove_all: bool = False, relax_assert: bool = False) -> None:
if self.indexes is None:
raise NotImplementedError
assert self.indexes is not None
self.indexes.del_tx(tx, remove_all=remove_all, relax_assert=relax_assert)
def get_block_count(self) -> int:
if self.indexes is None:
raise NotImplementedError
assert self.indexes is not None
return self.indexes.info.get_block_count()
def get_tx_count(self) -> int:
if self.indexes is None:
raise NotImplementedError
assert self.indexes is not None
return self.indexes.info.get_tx_count()
def get_vertices_count(self) -> int:
if self.indexes is None:
raise NotImplementedError
assert self.indexes is not None
return self.indexes.info.get_vertices_count()
def get_genesis(self, hash_bytes: bytes) -> Optional[BaseTransaction]:
assert self._genesis_cache is not None
return self._genesis_cache.get(hash_bytes, None)
def get_all_genesis(self) -> set[BaseTransaction]:
assert self._genesis_cache is not None
return set(self._genesis_cache.values())
def get_transactions_before(self, hash_bytes: bytes,
num_blocks: int = 100) -> list[BaseTransaction]: # pragma: no cover
ref_tx = self.get_transaction(hash_bytes)
visited: dict[bytes, int] = dict() # dict[bytes, int]
result = [x for x in self._run_topological_sort_dfs(ref_tx, visited) if not x.is_block]
result = result[-num_blocks:]
return result
def get_blocks_before(self, hash_bytes: bytes, num_blocks: int = 100) -> list[Block]:
ref_tx = self.get_transaction(hash_bytes)
if not ref_tx.is_block:
raise TransactionIsNotABlock
result = [] # list[Block]
pending_visits = deque(ref_tx.parents) # list[bytes]
used = set(pending_visits) # set[bytes]
while pending_visits:
tx_hash = pending_visits.popleft()
tx = self.get_transaction(tx_hash)
if not tx.is_block:
continue
assert isinstance(tx, Block)
result.append(tx)
if len(result) >= num_blocks:
break
for parent_hash in tx.parents:
if parent_hash not in used:
used.add(parent_hash)
pending_visits.append(parent_hash)
return result
def flush(self) -> None:
pass
|
chords_to_notes_mappings = {
"A": ['A', 'B', 'C#', 'D', 'E', 'F#', 'G#', 'A'],
"A#": ['A#', 'C', 'D', 'D#', 'F', 'G', 'A', 'A#'],
"B": ['B', 'C#', 'D#', 'E', 'F#', 'G#', 'A#', 'B'],
"C": ['C', 'D', 'E', 'F', 'G', 'A', 'B'],
"C#": ['C#', 'D#', 'F', 'F#', 'G#', 'A#', 'C', 'C#'],
"D": ['D', 'E', 'F#', 'G', 'A', 'B', 'C#', 'D'],
"D#": ['D#', 'F', 'G', 'G#', 'A#', 'C', 'D'],
"E": ['E', 'F#', 'G#', 'A', 'B', 'C#', 'D#'],
"F": ['F', 'G', 'A', 'A#', 'C', 'D', 'E'],
"F#": ['F#', 'G#', 'A#', 'B', 'C#', 'D#', 'F'],
"G": ['G', 'A', 'B', 'C', 'D', 'E', 'F'],
"G#": ['G#', 'A#', 'C', 'C#', 'D#', 'F', 'G']
}
note1, note2, note3, note4, note5, note6 = input("Enter 6 notes: ").split()
# note1 = 'C'
# note2 = 'D'
# note3 = 'C#'
# note4 = 'A'
# note5 = 'G'
# note6 = 'F'
matched_notes_count = {
"A": 0,
"A#": 0,
"B": 0,
"C": 0,
"C#": 0,
"D": 0,
"D#": 0,
"E": 0,
"F": 0,
"F#": 0,
"G": 0,
"G#": 0
}
for key, value in chords_to_notes_mappings.items():
if note1 in value:
matched_notes_count[key] += 1
if note2 in value:
matched_notes_count[key] += 1
if note3 in value:
matched_notes_count[key] += 1
if note4 in value:
matched_notes_count[key] += 1
if note5 in value:
matched_notes_count[key] += 1
if note6 in value:
matched_notes_count[key] += 1
print('Matched Count : ', matched_notes_count)
all_matched_counts = matched_notes_count.values()
max_matched_count = max(all_matched_counts)
print('Max matched :', max_matched_count)
for key, value in matched_notes_count.items():
if value == max_matched_count:
print(key)
# print(value)
# for note in value:
# print(note) |
import sys
import string
import re
import math
if len(sys.argv) == 1:
print " #Successes, #Trials, #Probability of Success"
print " #Successes - an integer or the letter X"
#print sys.argv[1]
#print sys.argv[2]
#print sys.argv[3]
if sys.argv[1].isdigit():
r = int(sys.argv[1])
elif sys.argv[1].upper() == "X":
r = sys.argv[1].upper()
else:
print "invalid success number"
sys.exit(0);
if sys.argv[2].isdigit():
n = int(sys.argv[2])
if n < r and isinstance(r,int):
print "More Successes than Trials, invalid numbers"
sys.exit(0);
try:
if '/' in sys.argv[3]:
Ps=float(sys.argv[3].split('/')[0]) / float(sys.argv[3].split('/')[1])
elif '\\' in sys.argv[3]:
Ps=float(sys.argv[3].split('\\')[0]) / float(sys.argv[3].split('\\')[1])
elif '.' in sys.argv[3]:
Ps=float(sys.argv[3])
except ValueError:
print "Invalid Probability given"
if Ps > 1.0 or Ps < 0.0:
print "Invalid Probability given"
sys.exit(0);
Pf = 1 - Ps
#r = int(sys.argv
#n =
#C(n,r) = n! / ( r!(n - r)! )
#C = math.factorial(n) / (math.factorial(r)*math.factorial(n-r))
#print "%s Combinations, Ps=%s, Pf=%s" % (C,Ps,Pf)
bernoulli={}
Ptotal=0.0
Pless=0.0
Plessequal=0.0
Pequal=0.0
Pgreater=0.0
Pgreaterequal=0.0
for i in range(0,n+1):
C = math.factorial(n) / (math.factorial(i)*math.factorial(n-i))
bernoulli[i]=C*math.pow(Ps,i)*math.pow(Pf,n-i)
#print "%s Combinations, Ps=%s, Pf=%s" % (C,Ps,Pf)
#bern=C*math.pow(Ps,i)*math.pow(Pf,n-i)
print "C(%d,%d) %f" % (n,i,bernoulli[i]),
Ptotal += bernoulli[i]
if isinstance(r,int):
print
if i < r: Pless += bernoulli[i]
if i <= r: Plessequal += bernoulli[i]
if i == r: Pequal += bernoulli[i]
if i > r: Pgreater += bernoulli[i]
if i >= r: Pgreaterequal += bernoulli[i]
elif r=="X":
Pequal += bernoulli[i]
print " Pi = %f Pd = %f" % (Pequal,1.0-Pequal)
#print "-----"
#print Ptotal
print
if isinstance(r,int):
print "The probability of:"
print "Having less than %d successes: %f" % (r,Pless)
print "Having less than or %d successes: %f" % (r,Plessequal)
print "Having exactly %d successes: %f" % (r,Pequal)
print "Having greater than %d successes: %f" % (r,Pgreater)
print "Having greater than or %d successes: %f" % (r,Pgreaterequal)
|
from flask_sqlalchemy import SQLAlchemy
db = SQLAlchemy()
class User_SW(db.Model):
__tablename__ = 'user_sw'
id_user = db.Column(db.Integer, primary_key=True)
name_user = db.Column(db.String(250), nullable=False)
password = db.Column(db.String(250), nullable=False)
email_user = db.Column(db.String(250), nullable=False)
birth_user = db.Column(db.String(250), nullable=False)
is_active = db.Column(db.Boolean(), unique=False, nullable=False)
def __repr__(self):
return '<User_SW %r>' % self.name_user
def serialize_user(self):
return {
"id_user": self.id_user,
"name_user": self.name_user,
"email": self.email_user,
"birth_user": self.birth_user,
"is_active": self.is_active
}
class Planets_SW(db.Model):
__tablename__ = 'planets_sw'
id_planets = db.Column(db.Integer, primary_key=True)
planets_name = db.Column(db.String(250), nullable=False)
planets_diameter = db.Column(db.Integer, nullable=False)
planets_rotation_period = db.Column(db.Integer, nullable=False)
planets_orbital_period = db.Column(db.Integer, nullable=False)
planets_gravity = db.Column(db.String(250), nullable=False)
planets_population = db.Column(db.Integer, nullable=False)
planets_climate = db.Column(db.String(250), nullable=False)
planets_terrain = db.Column(db.String(250), nullable=False)
planets_surface_water = db.Column(db.Integer, nullable=False)
planets_created = db.Column(db.String(250), nullable=False)
planets_edited = db.Column(db.String(250), nullable=False)
planets_url = db.Column(db.String(250), nullable=False)
def __repr__(self):
return '<Planets_SW %r>' % self.planets_name
def serialize_planets(self):
return {
"id_planets": self.id_planets,
"planets_name": self.planets_name,
"planets_diameter": self.planets_diameter,
"planets_rotation_period": self.planets_rotation_period,
"planets_orbital_period": self.planets_orbital_period,
"planets_gravity": self.planets_gravity,
"planets_population": self.planets_population,
"planets_climate": self.planets_climate,
"planets_terrain": self.planets_terrain,
"planets_surface_water": self.planets_surface_water,
"planets_created": self.planets_created,
"planets_edited": self.planets_edited,
"planets_url": self.planets_url
}
class People_SW(db.Model):
__tablename__ = 'people_sw'
id_people = db.Column(db.Integer, primary_key=True)
people_height = db.Column(db.Integer, nullable=False)
people_mass = db.Column(db.Integer, nullable=False)
people_hair_color = db.Column(db.String(250), nullable=False)
people_skin_color = db.Column(db.String(250), nullable=False)
people_eye_color = db.Column(db.String(250), nullable=False)
people_birth_year = db.Column(db.String(250), nullable=False)
people_gender = db.Column(db.String(250), nullable=False)
people_created = db.Column(db.String(250), nullable=False)
people_edited = db.Column(db.String(250), nullable=False)
people_name = db.Column(db.String(250), nullable=False)
people_homeworld = db.Column(db.String(250), nullable=False)
people_url = db.Column(db.String(250), nullable=False)
planets_id_fk = db.Column(db.Integer, db.ForeignKey('planets_sw.id_planets'), nullable=True)
planets = db.relationship('Planets_SW')
def __repr__(self):
return '<People_SW %r>' % self.people_name
def serialize_people(self):
return {
"id_people": self.id_people,
"people_height": self.people_height,
"people_mass": self.people_mass,
"people_hair_color": self.people_hair_color,
"people_skin_color": self.people_skin_color,
"people_eye_color": self.people_eye_color,
"people_birth_year": self.people_birth_year,
"people_gender": self.people_gender,
"people_created": self.people_created,
"people_edited": self.people_edited,
"people_name": self.people_name,
"people_homeworld": self.people_homeworld,
"people_url": self.people_url,
"planets_id_fk": self.planets_id_fk
}
# Ultima linea #
class User(db.Model):
id = db.Column(db.Integer, primary_key=True)
email = db.Column(db.String(120), unique=True, nullable=False)
password = db.Column(db.String(80), unique=False, nullable=False)
is_active = db.Column(db.Boolean(), unique=False, nullable=False)
def __repr__(self):
return '<User %r>' % self.username
def serialize(self):
return {
"id": self.id,
"email": self.email,
# do not serialize the password, its a security breach
}
class Favorite_SW(db.Model):
__tablename__ = 'favorite_sw'
id_favorite = db.Column(db.Integer, primary_key=True)
user_id_fk = db.Column(db.Integer, db.ForeignKey('user_sw.id_user'), nullable=False)
planets_id_fk = db.Column(db.Integer, db.ForeignKey('planets_sw.id_planets'), nullable=False)
people_id_fk = db.Column(db.Integer,db.ForeignKey('people_sw.id_people'), nullable=False)
type_favorite = db.Column(db.String(250), nullable=False)
user = db.relationship('User_SW')
planets = db.relationship('Planets_SW')
people = db.relationship('People_SW')
def __repr__(self):
return '<Favorite_SW %r>' % self.user_id_fk
def serialize_favorite(self):
return {
"id_favorite": self.id_favorite,
"user_id_fk": self.user_id_fk,
"planets_id_fk": self.planets_id_fk,
"people_id_fk": self.people_id_fk,
"type_favorite": self.type_favorite
} |
# -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2017-08-06 13:15
from __future__ import unicode_literals
from django.conf import settings
import django.contrib.auth.models
import django.core.validators
from django.db import migrations, models
import django.db.models.deletion
import re
class Migration(migrations.Migration):
initial = True
dependencies = [
('empresa', '0001_initial'),
('auth', '0008_alter_user_username_max_length'),
]
operations = [
migrations.CreateModel(
name='Cargo',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('nombre', models.CharField(max_length=30)),
('descripcion', models.CharField(blank=True, max_length=200, null=True)),
('estado', models.BooleanField(default=True)),
('empresa', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='empresa.Empresa')),
],
),
migrations.CreateModel(
name='Documento',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('nombre', models.CharField(max_length=30)),
('descripcion', models.CharField(blank=True, max_length=200, null=True)),
('estado', models.BooleanField(default=True)),
],
),
migrations.CreateModel(
name='Usuario',
fields=[
('user_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to=settings.AUTH_USER_MODEL)),
('identificacion', models.CharField(max_length=15, unique=True, validators=[django.core.validators.RegexValidator(re.compile('^[0-9]+$'), 'identificacion no valida', 'invalid')])),
('telefono_fijo', models.CharField(blank=True, max_length=15, validators=[django.core.validators.RegexValidator(re.compile('^[0-9]+$'), 'telefono no valido', 'invalid')], verbose_name='Telefono fijo')),
('telefono_celular', models.CharField(max_length=15, validators=[django.core.validators.RegexValidator(re.compile('^[0-9]+$'), 'telefono no valido', 'invalid')], verbose_name='Celular')),
('estado', models.BooleanField(default=True)),
],
options={
'abstract': False,
'verbose_name': 'user',
'verbose_name_plural': 'users',
},
bases=('auth.user',),
managers=[
('objects', django.contrib.auth.models.UserManager()),
],
),
migrations.CreateModel(
name='Administrador',
fields=[
('usuario_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='usuario.Usuario')),
('direccion', models.CharField(blank=True, max_length=50, null=True)),
('fecha_nacimiento', models.DateField(blank=True, null=True)),
('foto', models.ImageField(blank=True, null=True, upload_to='administrador/')),
('tienda', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='empresa.Tienda')),
],
options={
'verbose_name': 'Administrador Tienda',
'verbose_name_plural': 'Administradores de Tiendas',
},
bases=('usuario.usuario',),
managers=[
('objects', django.contrib.auth.models.UserManager()),
],
),
migrations.CreateModel(
name='Empleado',
fields=[
('usuario_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='usuario.Usuario')),
('direccion', models.CharField(max_length=50)),
('fecha_nacimiento', models.DateField(blank=True, null=True)),
('foto', models.ImageField(blank=True, null=True, upload_to='empleado/')),
('tienda', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='empresa.Tienda')),
],
options={
'verbose_name': 'Empleado',
'verbose_name_plural': 'Empleados',
},
bases=('usuario.usuario',),
managers=[
('objects', django.contrib.auth.models.UserManager()),
],
),
migrations.AddField(
model_name='usuario',
name='documento',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='usuario.Documento', verbose_name='Tipo de documento'),
),
]
|
import numpy as np
import datarobot as dr
from AE_ts_model import open_data
import pandas as pd
"""Load the data"""
X_train, X_val, y_train, y_val = open_data('./UCR_TS_Archive_2015')
X_train, X_train_out = X_train[:, :-1], X_train[:, -1]
X_val, X_val_out = X_val[:, :-1], X_val[:, -1]
N = X_train.shape[0]
Nval = X_val.shape[0]
D = X_train.shape[1]
ave = 10
Z_train = np.zeros((N, D - ave))
Z_val = np.zeros((Nval, D - ave))
for i in range(D - ave):
Z_train[:, i] = np.mean(X_train[:, i:(i+ave)], axis=1)
Z_val[:, i] = np.mean(X_val[:, i:(i+ave)], axis=1)
data_train_zave = np.concatenate((X_train_out.reshape((N, 1)),
Z_train), axis=1)
data_test_zave = np.concatenate((X_val_out.reshape((Nval, 1)),
Z_val), axis=1)
dtr_zave = pd.DataFrame(data_train_zave)
dte_zave = pd.DataFrame(data_test_zave)
dtr_zave.to_excel('dtr_zave.xlsx', index=False)
dte_zave.to_excel('dte_zave.xlsx', index=False)
# ========= #
# Datarobot #
# ========= #
API_TOKEN = '-aP9mLf539Zy_1FLr2FzZkY8ZeoI59uA'
END_POINT = 'https://app.datarobot.com/api/v2'
# Intantiate DataRobot Client
dr.Client(token= API_TOKEN, endpoint=END_POINT)
def get_projects_by_name(name):
return list(filter(lambda x: name in x.project_name, dr.Project.list()))
# ========== #
# Model zave #
# ========== #
# create project
TRAIN_SET = '/Users/alex/Desktop/roar/test/dtr_zave.xlsx'
TEST_SET = '/Users/alex/Desktop/roar/test/dte_zave.xlsx'
# TRAIN_SET = '/Users/yihewang/Desktop/test/dtr_z.xlsx'
# TEST_SET = '/Users/yihewang/Desktop/test/dte_z.xlsx'
project_autopilot = dr.Project.create(TRAIN_SET, project_name='AE_zave')
project_autopilot.set_target(target='0', mode=dr.AUTOPILOT_MODE.QUICK, worker_count=4)
models = project_autopilot.get_models()
# prediction
projects = get_projects_by_name('AE_zave')
project_autopilot = projects[0]
dataset = project_autopilot.upload_dataset(TEST_SET)
models = project_autopilot.get_models()
predict_job = models[0].request_predictions(dataset.id)
predictions = predict_job.get_result_when_complete()
MSE_Z = np.sum((np.array(predictions.iloc[:, 0]) - X_val_out)**2)/Nval
MSE_Z
# 0.10906995778314993 |
import numpy as np
import scipy as sp
from quaternion import from_rotation_matrix, quaternion
from rlbench.environment import Environment
from rlbench.action_modes import ArmActionMode, ActionMode
from rlbench.observation_config import ObservationConfig
from rlbench.tasks import *
from pyrep.const import ConfigurationPathAlgorithms as Algos
import copy
import ipdb
import pyrep
import math
def skew(x):
return np.array([[0, -x[2], x[1]],
[x[2], 0, -x[0]],
[-x[1], x[0], 0]])
def sample_normal_pose(pos_scale, rot_scale):
'''
Samples a 6D pose from a zero-mean isotropic normal distribution
'''
pos = np.random.normal(scale=pos_scale)
eps = skew(np.random.normal(scale=rot_scale))
R = sp.linalg.expm(eps)
quat_wxyz = from_rotation_matrix(R)
return pos, quat_wxyz
class Scene:
def __init__(self, env, task, mode):
self._env = env
self._scene_objs = {}
self._task = task
self._pos_scale = [0.005] * 3 # noise params
self._rot_scale = [0.01] * 3
self._mode = mode
def register_objs(self):
'''
This function creates a dictionary {obj_name : class_object of actual object}
'''
objs = self._env._scene._active_task.get_base().get_objects_in_tree(exclude_base=True, first_generation_only=False)
for obj in objs:
name = obj.get_name()
self._scene_objs[name] = obj
# self.update_reset_positions()
def set_positions(self):
objs = self._env._scene._active_task.get_base().get_objects_in_tree(exclude_base=True, first_generation_only=False)
x = 0
for obj in objs:
name = obj.get_name()
# if((name == 'crackers') or (name == 'crackers_visual')
# or (name == 'chocolate_jello') or (name == 'chocolate_jello_visual')
# or (name == 'strawberry_jello') or (name == 'strawberry_jello_visual')
# or (name == 'tuna') or (name == 'tuna_visual')
# or (name == 'spam') or (name == 'spam_visual')
# or (name == 'coffee') or (name == 'coffee_visual')
# or (name == 'mustard') or (name == 'mustard_visual')
# or (name == 'sugar') or (name == 'sugar_visual')):
#
# obj.set_position([x, 0.03, 0.1])
# x += 0.01
#
# if((name == 'soup') or (name == 'soup_visual')):
# obj.set_position([0.3, 0, 0.8])
#
# if((name == 'soup_grasp_point')):
# obj.set_position([0.3, 0, 0.825])
if(name == 'cupboard'):
cupboard_pose = obj.get_position()
cupboard_pose[2] += 0.75
obj.set_position(cupboard_pose)
self.update()
def update(self, joint_positions=None, gripper_state=None, ignore_collisions=False):
obs = self._task._scene.get_observation()
if(gripper_state != None):
if(self._mode == "abs_joint_pos"):
path = env._robot.arm.get_path(position=joint_positions[0:3], quaternion=joint_positions[3:],
max_configs = 500, trials = 1000, algorithm=Algos.BiTRRT,
ignore_collisions=ignore_collisions)
self.execute_path(path, gripper_state)
else:
action = joint_positions.tolist() + [gripper_state]
self._task.step(action)
# pass
else:
gripper_state = self._env._robot.gripper.get_open_amount()[0]
if(self._mode == "abs_joint_pos"):
# print(obs.joint_positions.tolist() + [gripper_state])
self._task.step(obs.joint_positions.tolist() + [gripper_state])
else:
gripper_state = self._env._robot.gripper.get_open_amount()[0]
self._task.step(obs.gripper_pose.tolist() + [gripper_state])
def get_noisy_poses(self):
objs = self._env._scene._active_task.get_base().get_objects_in_tree(exclude_base=True, first_generation_only=False)
obj_poses = {}
for obj in objs:
name = obj.get_name()
pose = obj.get_pose()
# pos, quat_wxyz = sample_normal_pose(self._pos_scale, self._rot_scale)
# gt_quat_wxyz = quaternion(pose[6], pose[3], pose[4], pose[5])
# perturbed_quat_wxyz = quat_wxyz * gt_quat_wxyz
# pose[:3] += pos
# pose[3:] = [perturbed_quat_wxyz.x, perturbed_quat_wxyz.y, perturbed_quat_wxyz.z, perturbed_quat_wxyz.w]
obj_poses[name] = pose
return obj_poses
def where_to_place(self, curr_obj_name):
# TODO: where to place the objects while reset
curr_obj = self._scene_objs[curr_obj_name[:-12]]
obj_grasp_point = self._scene_objs[curr_obj_name]
bb0 = curr_obj.get_bounding_box()
half_diag = (bb0[0]**2 + bb0[2]**2)**0.5
h = curr_obj.get_pose()[2]-0.25
# h = abs(bb0[4]*2)
while True:
check = True
a = np.random.uniform(0,0.25)
b = np.random.uniform(0, 0.4)
theta = np.random.uniform(0, 2*math.pi)
x = a*math.cos(theta) + 0.25
y = b*math.sin(theta)
# print(x,y,h)
obj_poses = self.get_noisy_poses()
# action = [x,y,h] + list(obj_poses[name+'_grasp_point'][3:]) + [False]
objs = self._env._scene._active_task.get_base().get_objects_in_tree(exclude_base=True, first_generation_only=False)
for obj in objs:
# print(obj.get_name())
pose = obj.get_pose()
dist = np.sum((pose[0:2]-np.array([x,y]))**2) ** 0.5
bb = obj.get_bounding_box()
if dist < half_diag + (bb[0]**2 + bb[2]**2)**0.5:
check = False
break
if not check:
continue
else:
break
#[x, y, z, q1, q2, q3, q4]
return np.array([x,y,h] + obj_grasp_point.get_pose()[3:].tolist())
# return np.array([x,y,h] + [0,0,0,1])
def reset(self):
'''
TODO
1. Check for every box in a sequence, from closer to farther
2. Generate a series of waypoints to pick the object and place it in its set loc.
'''
obj_poses = self.get_noisy_poses()
# import ipdb; ipdb.set_trace()
grasp_points = [] #[x, y, z, q1, q2, q3, q4]
# iterate through all the objects
for k, v in obj_poses.items():
if 'grasp' not in k:
pass
else:
grasp_points.append((k,v))
# sort object positions based on distance from the base
# grasp_points = sorted(grasp_points, key = lambda x: (x[0]**2 + x[1]**2))
while grasp_points:
try:
obj_name, gsp_pt = grasp_points.pop(0)
print("Grasping: ", obj_name[:-12])
pre_gsp_pt = self.pre_grasp(gsp_pt.copy())
print("Move to pre-grasp point for: ", obj_name[:-12])
self.update(pre_gsp_pt, True)
print("Move to grasp point for: ", obj_name[:-12])
self.update(gsp_pt, True)
print("Close gripper for: ", obj_name[:-12])
self.update(gsp_pt, False, ignore_collisions=True)
print("Attach object to gripper: " + obj_name[:-12], env._robot.gripper.grasp(scene._scene_objs[obj_name[:-12]]))
self.update()
print("Just move up while holding: ", obj_name[:-12])
self.update(pre_gsp_pt, False, ignore_collisions=True)
# ipdb.set_trace()
while True:
print("Trying new positions")
place_pt = self.where_to_place(obj_name)
pre_place_pt = self.pre_grasp(place_pt.copy())
try:
print("Going to pre_place_pt with gripper close")
self.update(pre_place_pt, False)
print("Going to place_pt with gripper close")
self.update(place_pt, False)
break
except:
print("Path not found")
continue
print("opening gripper")
self.update(place_pt, True, ignore_collisions=True)
print("DeGrasp: " + obj_name[:-12])
env._robot.gripper.release()
self.update()
print("Going in air")
self.update(pre_place_pt, True)
except pyrep.errors.ConfigurationPathError:
print("Could Not find Path")
env._robot.gripper.release()
return
def pre_grasp(self, grasp_vect):
pre_grasp_point = grasp_vect
pre_grasp_point[2] += 0.3
return pre_grasp_point
def execute_path(self, path, gripper_open):
path_points = path._path_points.reshape(-1, path._num_joints)
if(gripper_open):
path_joints = np.hstack((path_points, np.ones((path_points.shape[0], 1))))
else:
path_joints = np.hstack((path_points, np.zeros((path_points.shape[0], 1))))
i = 0
while not path._path_done and i < path_joints.shape[0]:
task.step(path_joints[i])
i += 1
if __name__ == "__main__":
# Initializes environment and task
mode = "abs_joint_pos" # ee_pose_plan
# mode = "ee_pose_plan"
if(mode == "ee_pose_plan"):
action_mode = ActionMode(ArmActionMode.ABS_EE_POSE_PLAN) # See rlbench/action_modes.py for other action modes
elif(mode == "abs_joint_pos"):
action_mode = ActionMode(ArmActionMode.ABS_JOINT_POSITION)
else:
print("Mode Not Found")
env = Environment(action_mode, '', ObservationConfig(), False, static_positions=False)
task = env.get_task(PutGroceriesInCupboard) # available tasks: EmptyContainer, PlayJenga, PutGroceriesInCupboard, SetTheTable
task.reset()
scene = Scene(env, task, mode) # Initialize our scene class
scene.register_objs() # Register all objects in the environment
# TODO - RL Forward Policy
scene.set_positions() # Run an episode of forward policy or set object locations manually
scene.reset()
while True:
scene.update()
scene.reset()
env.shutdown()
|
'''
查詢歷史資料 範例
https://rate.bot.com.tw/xrt/all/2023-03-20
'''
import requests
from bs4 import BeautifulSoup
def get_html_data1(url):
print('取得網頁資料: ', url)
resp = requests.get(url)
# 檢查 HTTP 回應碼是否為 requests.codes.ok(200)
if resp.status_code != requests.codes.ok:
print('讀取網頁資料錯誤, url: ', resp.url)
return None
else:
return resp
print('查詢中央銀行匯率')
url = 'https://rate.bot.com.tw/xrt/all/day'
#https://rate.bot.com.tw/xrt?Lang=zh-TW 另一個網址
html_data = get_html_data1(url)
soup = BeautifulSoup(html_data.text, 'html.parser')
print(soup.prettify()) #prettify()這個函數可以將DOM tree以比較美觀的方式印出。
#print('多重條件選擇')
cells = soup.select('table tr td') #尋找talbe標籤裡面的tr標籤裡面的td標籤 三者都要符合的抓出來
print(type(cells))
print('符合條件的資料', len(cells), '筆')
print(cells)
print('--------------------------------------------------------')
i = 0
for cell in cells:
print(i)
print(cell.text.strip())
i = i + 1
print('--------------------------------------------------------')
print('顯示 幣別')
print('多重條件選擇')
dnames = soup.select('table tr td[data-table=幣別] div.visible-phone')
#print(type(dnames))
print('符合條件的資料', len(dnames), '筆')
#print(dnames)
names = list()
for dname in dnames:
names.append(dname.text.strip())
print(names)
print('--------------------------------------------------------')
print('顯示 本行即期買入')
print('多重條件選擇')
buyingrate = soup.select('table tr td[data-table=本行即期買入]')
print(type(buyingrate))
print('符合條件的資料', len(buyingrate), '筆')
print(buyingrate)
i = 0
for price in buyingrate:
print(i)
print(price.text.strip())
i = i + 1
print('--------------------------------------------------------')
prices = list()
for price in buyingrate:
prices.append(price.text.strip())
print(prices)
print('--------------------------------------------------------')
print(names)
print(prices)
rates = zip(names, prices)
for rate in rates:
print(rate)
print('--------------------------------------------------------')
|
import pygame
import colors
class button(pygame.sprite.Sprite):
def __init__(self, game, pos,**kwargs):
self.game = game
self.onClick = False
self.groups = []
self.rect = (0, 0, 200, 60)
# Normal Selected
self.colors = ((50, 255, 255), (255, 255, 255))
self.spriteInit = False
self.hover = False
self.clicked = False
self.instaKill = False
self.text = ''
self.center = False
for k, v in kwargs.items():
self.__dict__[k] = v
pygame.sprite.Sprite.__init__(self, self.groups)
self.rect = pygame.Rect(self.rect)
self.rect.x, self.rect.y = pos
self.image = pygame.Surface(self.rect.size, pygame.SRCALPHA)
self.rendText = self.game.menuFont.render(self.text, self.game.antialiasing, (0, 0, 0))
self.textRect = self.rendText.get_rect()
if self.center:
self.textRect.center = pygame.Rect(0, 0, self.rect.width, self.rect.height).center
else:
self.textRect.x += 2
self.textRect.y += 2
def update(self):
self.image = pygame.Surface(self.rect.size)
self.hover = False
self.clicked = False
mouseRect = pygame.Rect(pygame.mouse.get_pos()[0], pygame.mouse.get_pos()[1], 1, 1)
if mouseRect.colliderect(self.rect):
self.hover = True
if self.hover:
for event in self.game.events:
if event.type == pygame.MOUSEBUTTONDOWN:
self.clicked = True
if self.onClick:
self.onClick()
if self.instaKill:
self.kill()
self.image.fill(self.colors[1])
else:
self.image.fill(self.colors[0])
self.image.blit(self.rendText, self.textRect)
def reset(self):
self.clicked = False
class settingSlider(pygame.sprite.Sprite):
def __init__(self, game, pos,**kwargs):
self.game = game
self.rect = (0, 0, 200, 60)
self.sliderRect = (0, 0, 20, 10)
self.bgColor = colors.black
# line (normal) Rect
self.colors = ((50, 255, 255), colors.yellow, (255, 255, 255))
self.clicked = False
self.text = ''
self.center = False
self.groups = [] ## These few lines are the lines for component objects
self.addGroups = []
for k, v in kwargs.items():
self.__dict__[k] = v
self.groups = self.groups + self.addGroups
pygame.sprite.Sprite.__init__(self, self.groups)
self.rect = pygame.Rect(self.rect)
self.rect.x, self.rect.y = pos
self.image = pygame.Surface(self.rect.size)
self.sliderRect = pygame.Rect(self.sliderRect)
self.sliderRect.centery = self.rect.height/2
self.sliderRect.x = self.rect.width - self.sliderRect.width
def reset(self):
self.sliderRect = pygame.Rect(self.sliderRect)
self.sliderRect.centery = self.rect.height/2
self.sliderRect.x = self.rect.width - self.sliderRect.width
def update(self):
if self.clicked:
if not pygame.mouse.get_pressed()[0]:
self.clicked = False
else:
self.sliderRect.x = min(self.rect.right-self.sliderRect.width, pygame.mouse.get_pos()[0]-self.sliderRect.width) - self.rect.x
self.sliderRect.x = max(0, self.sliderRect.x)
else:
self.checkClicked()
self.render()
def get_ratio(self):
return self.sliderRect.x/(self.rect.width-self.sliderRect.width)
def setRatio(self, percent): # Set between 0 & 1
self.sliderRect.x = (self.rect.width-self.sliderRect.width)*percent
def render(self):
self.image.fill(self.bgColor)
pygame.draw.line(self.image, self.colors[1],(0, self.rect.height/2), (self.sliderRect.centerx, self.rect.height/2), 4)
pygame.draw.line(self.image, self.colors[0],(self.sliderRect.centerx, self.rect.height/2), (self.rect.width, self.rect.height/2), 4)
pygame.draw.rect(self.image, self.colors[2], self.sliderRect)
def checkClicked(self):
if pygame.mouse.get_pressed()[0]:
mouseRect = pygame.Rect(pygame.mouse.get_pos()[0], pygame.mouse.get_pos()[1], 1, 1)
if mouseRect.colliderect(self.rect):
self.clicked = True
class menuItem(pygame.sprite.Sprite):
def __init__(self, game, pos, image, **kwargs):
self.game = game
self.rect = (0, 0, 100, 100)
self.bgColor = (0, 0, 0, 0)
# line (normal) Rect
self.hover = False
self.zoom = 1
self.zoomMax = 2
self.zoomSpeed = 0.4
self.desc = ''
self.text = ''
self.groups = [] ## These few lines are the lines for component objects
for k, v in kwargs.items():
self.__dict__[k] = v
pygame.sprite.Sprite.__init__(self, self.groups)
self.rect = pygame.Rect(self.rect)
self.rect.topleft = pos
self.image = pygame.Surface((self.rect.w, self.rect.h), pygame.SRCALPHA)
self.imageSrc = pygame.image.load(image)
def setIcon(self):
self.icon = pygame.transform.scale(self.imageSrc, (int(self.imageSrc.get_width()*self.zoom), int(self.imageSrc.get_height()*self.zoom)))
def setRect(self):
self.rect = self.imageSrc.get_rect()
def render(self):
self.image.fill(self.bgColor)
self.setIcon()
rect = self.icon.get_rect(center=(self.rect.w/2, self.rect.h/2))
self.image.blit(self.icon, rect)
def update(self):
self.hover = False
mouseRect = pygame.Rect(pygame.mouse.get_pos()[0], pygame.mouse.get_pos()[1], 1, 1)
if mouseRect.colliderect(self.rect):
self.hover = True
if self.hover:
self.zoom = min(self.zoomMax, self.zoom + self.zoomSpeed)
else:
self.zoom = max(1, self.zoom - self.zoomSpeed)
self.render()
|
#!/usr/bin/env python
import rospy
import numpy as np
from visualization_msgs.msg import Marker
from geometry_msgs.msg import Point
def callback(msg):
marker.pose.position.x = msg.z
marker.pose.position.y = -msg.x
marker.pose.position.z = -msg.y
mypub.publish(marker)
rospy.init_node("pong_pong_marker_publisher", anonymous = False)
mypub = rospy.Publisher("pongo", Marker, queue_size=1)
rospy.Subscriber("topic", Point, callback)
marker = Marker()
marker.header.frame_id = "camera_link"
marker.header.stamp = rospy.Time.now()
marker.ns = "what"
marker.id = 0
marker.type = Marker.SPHERE
marker.action = Marker.ADD
marker.pose.position.x = 0.04
marker.pose.position.y = 0.04
marker.pose.position.z = 0.04
marker.pose.orientation.x = 0.0
marker.pose.orientation.y = 0.0
marker.pose.orientation.z = 0.0
marker.pose.orientation.w = 1.0
marker.scale.x = 0.04
marker.scale.y = 0.04
marker.scale.z = 0.04
marker.color.a = 0.9
marker.color.r = 0.8
marker.color.g = 0.5
marker.color.b = 0.2
rospy.spin()
|
class Solution:
def maxProfit(self, prices: List[int]) -> int:
local_min = float('inf')
answer = 0
for price in prices:
if local_min > price:
local_min = price
answer = max(answer, price - local_min)
return answer |
#!/usr/bin/env python
import rospy
from nav_msgs.msg import OccupancyGrid
from std_msgs.msg import Int16
from geometry_msgs.msg import Twist
from compressed_image_transport import *
from nav_msgs.msg import Odometry
from sensor_msgs.msg import LaserScan
from tf2_msgs.msg import TFMessage
import tf
import math
import cv2
import numpy as np
from maze_solving_algorithm import Solver
import threading
def existance(arr, num):
for i in range(0, len(arr)):
if arr[i] == num:
return True
return False
def configure(arr):
arr_ = []
for i in range(0, len(arr)):
if existance(arr_, arr[i]) == False:
arr_.append(arr[i])
return arr_
def distance_dot2line(a, b, c, x0, y0):
distance = abs(x0*a + y0*b + c)/math.sqrt(a*a + b*b)
return distance
def distance_dot2dot(x1, y1, x2, y2):
distance = math.sqrt((x2 - x1)*(x2 - x1) + (y2 - y1)*(y2-y1))
return distance
def collision_test(start, goal, map, difference_low, difference_col):
start = [start[0] - difference_low, start[1] - difference_col]
goal = [goal[0] - difference_low, goal[1] - difference_col]
if goal[0] != start[0]:
a = (goal[1] - start[1]) / (goal[0] - start[0])
b = -a*start[0] + start[1]
for i in range(min(start[0], goal[0]), max(start[0], goal[0])):
if map[i][int(a*i + b)] == True:
return 'danger'
else:
for i in range(min(start[1], goal[1]), max(start[1], goal[1])):
if map[start[0]][i] == True:
return 'danger'
return 'safe'
def euler_from_quaternion(quaternion):
theta = tf.transformations.euler_from_quaternion(quaternion)[2] - 3.141592 / 2
if theta < 0:
theta = theta + 3.141592 * 2
return theta
class Maze_pathfinder():
def __init__(self):
self._sub = rospy.Subscriber('/map', OccupancyGrid, self.callback, queue_size=1)
self._sub = rospy.Subscriber('/odom', Odometry, self.callback2, queue_size=1)
self._sub = rospy.Subscriber('/scan', LaserScan, self.callback3, queue_size=1)
# self._sub = rospy.Subscriber('/tf', TFMessage, self.callback4, queue_size=1)
self._pub = rospy.Publisher('/cmd_vel', Twist, queue_size=1)
self.img = np.zeros((384, 384, 3), np.uint8)
self.low_position = 0
self.col_position = 0
self.destination_low = 0
self.destination_col = 0
self.theta = 0
self.state = 'stop' # path_finding, stop, going, direction_setting
self.shortest_path = [[0,0]]
self.path = [0,0]
def define_destination(self, event, x, y, flags, param):
if event == cv2.EVENT_LBUTTONDOWN:
self.destination_low = y
self.destination_col = x
self.state = 'path_finding'
def callback(self, map):
thread1 = threading.Thread(target=self.path_finding, args=(map,))
thread1.setDaemon(True)
thread1.start()
def path_finding(self, map):
self.img = np.zeros((384, 384, 3), np.uint8)
for i in range(0, 384):
for j in range(0, 384):
if map.data[384*j + i] == -1:
self.img[i][j][0] = 255
if map.data[384*j + i] == 0:
self.img[i][j][1] = 255
if map.data[384*j + i] == 100:
self.img[i][j][0] = 0
self.img[i][j][1] = 0
self.img[i][j][2] = 0
# Draw direction
self.img = cv2.line(self.img, (self.col_position, self.low_position), (self.col_position + int(10*math.cos(self.theta)), self.low_position - int(10*math.sin(self.theta))), (0, 255, 255), 1)
if self.state == 'path_finding':
print 'path finding....'
solver = Solver([self.low_position, self.col_position], [self.destination_low, self.destination_col], map.data)
solver.solve_distance()
solver.find_shortest_path()
self.shortest_path = solver.shortest_path
self.state = 'direction_setting'
print 'path finding end!'
while(1):
if len(self.shortest_path) > 2:
if solver.collision_test([self.low_position, self.col_position], self.shortest_path[len(self.shortest_path)-3]) == 'safe':
_ = self.shortest_path.pop()
print 'poped out'
else:
print 'collision'
break
else:
break
self.path = self.shortest_path[len(self.shortest_path)-2]
if self.path == [0,0]:
print "something wrong!"
if self.state != 'stop' and self.state != 'path_finding':
self.img = cv2.line(self.img, (self.path[1], self.path[0]), (self.path[1], self.path[0]), (0, 170, 255), 2)
for i in range(len(self.shortest_path)):
self.img = cv2.line(self.img, (self.shortest_path[i][1], self.shortest_path[i][0]), (self.shortest_path[i][1], self.shortest_path[i][0]), (255, 0, 255), 2)
if i != 0:
self.img = cv2.line(self.img, (self.shortest_path[i][1], self.shortest_path[i][0]), (self.shortest_path[i - 1][1], self.shortest_path[i - 1][0]), (0, 255, 255), 1)
else:
self.img = cv2.line(self.img, (self.col_position, self.low_position), (self.shortest_path[i - 1][1], self.shortest_path[i - 1][0]), (0, 255, 255), 1)
def callback2(self, odometry):
#print 'map_to_odom', self.tf_map_to_odom[0], self.tf_map_to_odom[1]
#print 'odom_to_base', self.tf_odom_to_base[0], self.tf_odom_to_base[1]
#print 'odom', odometry.pose.pose.position.x, odometry.pose.pose.position.y
#print self.tf_map_to_odom[0] + self.tf_odom_to_base[0], self.tf_map_to_odom[1] + self.tf_odom_to_base[1]
#quaternion = (odometry.pose.pose.orientation.x, odometry.pose.pose.orientation.y, odometry.pose.pose.orientation.z, odometry.pose.pose.orientation.w)
#self.theta = euler_from_quaternion(quaternion)
direction_desired = math.atan2(self.low_position - self.path[0], self.path[1] - self.col_position)
if direction_desired < 0:
direction_desired = direction_desired + 3.141592*2
if self.state == 'direction_setting':
# calculate degree and direction
if direction_desired > self.theta:
if direction_desired - self.theta < 3.141592:
turn_direction = 'left'
else:
turn_direction = 'right'
else:
if self.theta - direction_desired < 3.141592:
turn_direction = 'right'
else:
turn_direction = 'left'
# publish topic
difference = abs(direction_desired - self.theta)
if difference > 3.141592:
difference = 3.141592*2 - difference
if difference > 0.3:
turn_speed = 0.6
elif difference > 0.2:
turn_speed = 0.3
elif difference > 0.1:
turn_speed = 0.1
elif difference > 0.01:
turn_speed = 0.05
else:
turn_speed = 0
self.state = 'going'
vel = Twist()
if turn_direction =='left':
vel.angular.z = turn_speed
else:
vel.angular.z = - turn_speed
vel.angular.x = 0
vel.angular.y = 0
vel.linear.x = 0
vel.linear.y = 0
vel.linear.z = 0
self._pub.publish(vel)
if self.state == 'going':
a = math.tan(self.theta + 3.141592/2)
b = -1
c = -a*self.low_position + self.col_position
distance_expected = distance_dot2line(a, b, c, self.path[0], self.path[1])
distance_now = distance_dot2dot(self.low_position, self.col_position, self.path[0], self.path[1])
distance_from_destination = distance_dot2dot(self.low_position, self.col_position, self.destination_low, self.destination_col)
# print 'expected : ', distance_expected, 'now : ', distance_now
if distance_expected > 1:
self.state = 'direction_setting'
if distance_from_destination == 0:
self.state = 'stop'
elif distance_now == 0:
self.state = 'path_finding'
vel = Twist()
vel.angular.x = 0
vel.angular.y = 0
vel.angular.z = 0
vel.linear.x = 0.06
vel.linear.y = 0
vel.linear.z = 0
self._pub.publish(vel)
if self.state == 'stop':
vel = Twist()
vel.angular.x = 0
vel.angular.y = 0
vel.angular.z = 0
vel.linear.x = 0
vel.linear.y = 0
vel.linear.z = 0
self._pub.publish(vel)
def callback3(self, scan):
cv2.namedWindow('SLAM')
cv2.setMouseCallback('SLAM', self.define_destination)
img_copy = np.zeros((384, 384, 3), np.uint8)
np.copyto(img_copy, self.img)
for i in range(360):
low_scan = int(scan.ranges[i] * math.sin(i*3.141592/180 + self.theta) * 20)
col_scan = int(scan.ranges[i] * math.cos(i*3.141592/180 + self.theta) * 20)
img_copy[self.low_position - low_scan][self.col_position + col_scan][0] = 0
img_copy[self.low_position - low_scan][self.col_position + col_scan][1] = 0
img_copy[self.low_position - low_scan][self.col_position + col_scan][2] = 255
img_copy = cv2.line(img_copy, (self.col_position, self.low_position), (self.col_position, self.low_position), (0, 0, 255), 2)
img_large = cv2.resize(img_copy,(1000,1000))
cv2.imshow("SLAM", img_copy), cv2.waitKey(1)
cv2.imshow("SLAM_large", img_large), cv2.waitKey(1)
def tf_listener_map_to_base(self):
listener = tf.TransformListener()
rate = rospy.Rate(10.0)
while not rospy.is_shutdown():
try:
(trans, rot) = listener.lookupTransform('/map', '/base_footprint', rospy.Time(0))
self.low_position = 192 + int((trans[0]) * 20) + 7
self.col_position = 192 + int((trans[1]) * 20) + 8
self.theta = euler_from_quaternion(rot)
print 'basefootprint', trans, rot
except (tf.LookupException, tf.ConnectivityException, tf.ExtrapolationException):
continue
rate.sleep()
def main(self):
thread_tf_listener_map_to_base = threading.Thread(target = self.tf_listener_map_to_base)
thread_tf_listener_map_to_base.start()
rospy.spin()
if __name__ == '__main__':
rospy.init_node('maze_pathfinder')
mazesolver = Maze_pathfinder()
mazesolver.main()
|
import numpy as np
import matplotlib
matplotlib.use('TkAgg')
import matplotlib.pyplot as plt
from matplotlib import animation
def make_animation1d(x, y, y_, E, optimizer, xlim, ylim, answer, print_error=True, epoch_per_frame=1, **kwargs):
train_x = np.squeeze(x.get_result())
train_y = np.squeeze(y.get_result())
test_x = np.expand_dims(np.arange(xlim[0], xlim[1], (xlim[1] - xlim[0]) / 300), 1)
test_y = answer(test_x)
fig = plt.figure()
ax = plt.axes(xlim=xlim, ylim=ylim)
line, = ax.plot([], [], lw=2)
ans, = ax.plot(test_x, test_y)
def animate(i):
for _ in range(epoch_per_frame):
optimizer.minimize()
if print_error:
print('E:', E.get_result())
line.set_data(train_x, np.squeeze(y_.get_result()))
return line,
return animation.FuncAnimation(fig, animate, **kwargs)
def make_animation2d(x, y, y_, E, optimizer, xlim, ylim, print_error=True, epoch_per_frame=1, **kwargs):
train_x = x.get_result()
train_y = y.get_result()
dx = (xlim[1] - xlim[0]) / 100
dy = (ylim[1] - ylim[0]) / 100
test_x = []
for i in np.arange(xlim[0], xlim[1], dx):
for j in np.arange(ylim[0], ylim[1], dy):
test_x.append([i, j])
test_x = np.array(test_x)
fig = plt.figure()
ax = plt.axes(xlim=xlim, ylim=ylim)
red_scatter = ax.scatter([], [])
blue_scatter = ax.scatter([], [])
train_scatter = ax.scatter(train_x[:,0], train_x[:,1], c=np.squeeze(train_y))
def anim_update(i):
x.set_result(test_x)
test_y = y_.get_result().T[0]
x.set_result(train_x)
for _ in range(epoch_per_frame):
optimizer.minimize()
if print_error:
print('E:', E.get_result())
red = []
blue = []
for idx, rex in enumerate(test_x):
if test_y[idx] > 0.5:
red.append(rex)
else:
blue.append(rex)
if len(red) > 0:
red_scatter.set_offsets(np.array(red))
if len(blue) > 0:
blue_scatter.set_offsets(np.array(blue))
return blue_scatter, red_scatter, train_scatter
return animation.FuncAnimation(fig, anim_update, **kwargs)
|
import logging
import click
from pypgatk.cgenomes.cosmic_downloader import CosmicDownloadService
from pypgatk.toolbox.general import read_yaml_from_file
log = logging.getLogger(__name__)
@click.command('cosmic-downloader', short_help='Command to download the cosmic mutation database')
@click.option('-c', '--config_file', help='Configuration file for the ensembl data downloader pipeline')
@click.option('-o', '--output_directory', help='Output directory for the peptide databases')
@click.option('-u', '--username',
help="Username for cosmic database -- please if you don't have one register here (https://cancer.sanger.ac.uk/cosmic/register)")
@click.option('-p', '--password',
help="Password for cosmic database -- please if you don't have one register here (https://cancer.sanger.ac.uk/cosmic/register)")
@click.option("--url_file", help='Add the url to a downloaded file')
@click.pass_context
def cosmic_downloader(ctx, config_file, output_directory, username, password, url_file):
config_data = None
if config_file is not None:
config_data = read_yaml_from_file(config_file)
pipeline_arguments = {}
if output_directory is not None:
pipeline_arguments[CosmicDownloadService.CONFIG_OUTPUT_DIRECTORY] = output_directory
if username is not None:
pipeline_arguments[CosmicDownloadService.CONFIG_COSMIC_FTP_USER] = username
if password is not None:
pipeline_arguments[CosmicDownloadService.CONFIG_COSMIC_FTP_PASSWORD] = password
cosmic_downloader_service = CosmicDownloadService(config_data, pipeline_arguments)
cosmic_downloader_service.download_mutation_file(url_file_name=url_file)
|
from porc import Client
client = Client('e6e56d2e-b91e-4dc2-aac7-ec6028c378e2')
# make sure our API key works
client.ping().raise_for_status()
zipcode = client.get('income', 97229)
print(zipcode['median']) |
import random
import uuid
# Zadanie 1
laws_of_robotics = [
'A robot may not injure a human being or, through inaction, allow a human being to come to harm.',
'A robot must obey the orders given it by human beings except where such orders would conflict with the First Law.',
'A robot must protect its own existence as long as such protection does not conflict with the First or Second Laws.',
]
new_first_law = 'No machine may harm humanity; or, through inaction, allow humanity to come to harm.'
zeroth_law = 'A robot may not injure humanity, or, by inaction, allow humanity to come to harm.'
laws_of_robotics[0] = new_first_law
laws_of_robotics.insert(0, zeroth_law)
print('Four Laws Of Robotics')
for i, law in enumerate(laws_of_robotics):
print(f'{i}. {law}')
# Zadanie 2
places = {
'Aurora': {
'age': 20_000,
'description': 'Originally named New Earth, in later millennia the planet would be renamed "Aurora", which means "dawn", to signify the dawning of a new age for the Spacer culture.'
},
'Dahl': {
'age': 1000,
'description': 'Dahl is a district of Trantor. It is a small and rather down-trodden sector, which does not necessarily seem politically ambitious.'
},
'Cinna': {
'age': 2000,
'description': 'Cinna is the native planet of Dors Venabili.'
},
'Helicon': {
'age': 10_000,
'description': 'Helicon is the native planet of Hari Seldon. As the character says, Helicon is characterized by her Fight Abilities.'
}
}
max_age = 0
oldest_name = None
for place, place_detail_dict in places.items():
if place_detail_dict['age'] > max_age:
max_age = place_detail_dict['age']
pldest_place = place
print(
f"The oldest place we know {oldest_name}. It's descrinption according to encyclopedia: {places[pldest_place]['description']}")
# Zadanie 3
# • Znajdź z którego miejsca (origin) pochodzi najwięcej bohaterów
# • Który z bohaterów ma największą średnią wiedzy z matematyki i historii (średnia kluczy math i history)
# • Utwórz dwie nowe listy: robots i humans, które zawierać będą nazwy bohaterów z kluczami robot: True i False (odpowiednio)
heroes = [
{
'name': 'Hari Seldon',
'robot': True,
'math': 10,
'history': 4,
'origin': 'Helicon'
},
{
'name': 'Yugo Amaryl',
'robot': False,
'math': 10,
'history': 1,
'origin': 'Dahl'
},
{
'name': 'Dors Venabili',
'robot': True,
'math': 3,
'history': 9,
'origin': 'Cinna'
},
{
'name': 'R. Daneel Olivaw',
'robot': True,
'math': 8,
'history': 10,
'origin': 'Aurora'
},
{
'name': 'Raych Seldon',
'robot': False,
'math': 3,
'history': 5,
'origin': 'Dahl'
},
]
# Znajdź z którego miejsca (origin) pochodzi najwięcej bohaterów
origin_count_dict = {}
for hero in heroes:
if hero['origin'] in origin_count_dict.keys():
origin_count_dict[hero['origin']] += 1
else:
origin_count_dict[hero['origin']] = 1
print(origin_count_dict)
sorted_origins_count = sorted(origin_count_dict, key=origin_count_dict.get, reverse=True)
print(sorted_origins_count[0])
# • Który z bohaterów ma największą średnią wiedzy z matematyki i historii (średnia kluczy math i history)
# • Utwórz dwie nowe listy: robots i humans, które zawierać będą nazwy bohaterów z kluczami robot: True i False (odpowiednio)
humans = [hero['name'] for hero in heroes if not hero['robot']]
robots = [hero['name'] for hero in heroes if hero['robot']]
print(humans, robots)
# Zadanie 4
# Używając w pokazany sposób funkcji random_planet(), która zwraca słownik opisujący szukaj tak długo, aż znajdziesz planetę spełniającą następujące warunki:
# • has_water o wartości True
# • oxygen_percentage większe od 20 a maksymalnie 25
# • Is_solid o wartości True
# TIP: https://www.w3schools.com/python/python_while_loops.asp
def random_planet():
return {
'name': f'planet{uuid.uuid4()}',
'has_water': random.choice((True, False,)),
'oxygen_percentage': random.randint(0, 100),
'is_solid': random.choice((True, False,)),
}
perfect_planet, my_planet = False, {}
while not perfect_planet:
my_planet = random_planet()
# print(f'Random planet:{my_planet}')
perfect_planet = my_planet['has_water'] and my_planet['is_solid'] and (20 < my_planet['oxygen_percentage'] <= 25)
print(my_planet)
# INACZEJ
my_planet = random_planet()
while not (my_planet['has_water'] and my_planet['is_solid'] and (20 <= my_planet['oxygen_percentage'] <= 25)):
my_planet = random_planet()
print(my_planet) |
from envparse import Env
from jinja2 import Environment, FileSystemLoader
template_env = Environment(loader=FileSystemLoader('/usr/local/docker'))
template = template_env.get_template('default.conf.j2')
env = Env()
context = {
'NGINX_DEV': env.bool('NGINX_DEV', default=False),
'NGINX_ENABLE_PROXY_HEADERS': env.bool('NGINX_ENABLE_PROXY_HEADERS', default=False),
}
with open("/etc/nginx/conf.d/default.conf", "wb") as fh:
fh.write(template.render(**context))
|
import logging
import os
import time
from urllib.parse import urljoin
from django.conf import settings
from django.core.cache import cache
from django.http import JsonResponse
from common import utils, errors, config
from libs.http import render_json
from user import logic
from user.forms import ProfileForm
from user.models import User, Profile
logger = logging.getLogger('inf')
def verify_phone(request):
"""
验证手机号
:param request:
:return:
"""
phone_num = request.POST.get('phone_num', '')
phone_num = phone_num.strip()
if utils.is_phone_num(phone_num):
if logic.send_verify_code(phone_num):
return render_json()
else:
return render_json(code=errors.SmsSendError.code)
return render_json(code=errors.PhoneNumError.code)
def login(request):
phone_num = request.POST.get('phone_num', '')
code = request.POST.get('code', '')
phone_num = phone_num.strip()
code = code.strip()
# 1、检查 验证码
cached_code = cache.get(config.VERIFY_CODE_CACHE_PREFIX % phone_num)
if cached_code != code:
return render_json(code=errors.VerifyCodeError.code)
# 2、登录或注册
# try:
# user = User.get(phonenum=phone)
# except User.DoesNotExist:
# user = User.objects.create(phonenum=phone)
# # 创建用户的同时,使用 user.id 创建 Profile 对象,建立一对一的关联
# Profile.objects.create(id=user.id)
user, created = User.get_or_create(phonenum=phone_num)
request.session['uid'] = user.id
logger.info('user.login, uid:%s' % user.id)
return render_json(data=user.to_dict())
def get_profile(request):
user = request.user
# 1、先从缓存中获取 profile_data
key = config.PROFILE_DATA_CACHE_PREFIX % user.id
profile_data = cache.get(key)
logger.debug('get from cache')
print('get from cache')
# 2、如果缓存中没有,则从数据库获取
if profile_data is None:
profile = user.profile
profile_data = profile.to_dict(exclude=['vibration', 'only_matche', 'auto_play'])
logger.debug('get from DB')
# 3、将 profile_data 存储至缓存
cache.set(key, profile_data)
logger.debug('set cache')
return render_json(data=profile_data)
def set_profile(request):
user = request.user
form = ProfileForm(request.POST, instance=user.profile)
if form.is_valid():
profile = form.save()
# 保存成功后,直接更新缓存
# 也可以通过直接删除缓存的方式更新缓存内容
key = config.PROFILE_DATA_CACHE_PREFIX % user.id
profile_data = profile.to_dict(exclude=['vibration', 'only_matche', 'auto_play'])
cache.set(key, profile_data)
return render_json()
else:
return render_json(data=form.errors)
def upload_avatar(request):
avatar = request.FILES.get('avatar')
user = request.user
# filename = 'avatar-%s-%d' % (user.id, int(time.time()))
# filepath = os.path.join(settings.MEDIA_ROOT, filename)
#
# with open(filepath, 'wb+') as output:
# for chunk in avatar.chunks():
# output.write(chunk)
ret = logic.async_upload_avatar(user, avatar)
if ret:
return render_json()
else:
return render_json(code=errors.AvatarUploadError.code)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import math
import os
import xml.etree.ElementTree as xml
import cv2
from . import common as com
from .wbia_object import IBEIS_Object
class IBEIS_Image(object): # NOQA
def __init__(ibsi, filename_xml, absolute_dataset_path, **kwargs):
with open(filename_xml, 'r') as _xml:
_xml = xml.XML(_xml.read().replace('\n', ''))
ibsi.folder = com.get(_xml, 'folder')
ibsi.absolute_dataset_path = absolute_dataset_path
ibsi.filename = com.get(_xml, 'filename')
source = com.get(_xml, 'source', text=False)
ibsi.source_database = com.get(source, 'database')
ibsi.source_annotation = com.get(source, 'annotation')
ibsi.source_image = com.get(source, 'image')
size = com.get(_xml, 'size', text=False)
ibsi.width = int(com.get(size, 'width'))
ibsi.height = int(com.get(size, 'height'))
try:
ibsi.depth = int(com.get(size, 'depth'))
except TypeError:
ibsi.depth = 3
ibsi.segmented = com.get(size, 'segmented') == '1'
ibsi.objects = []
ibsi.objects_patches = []
ibsi.objects_invalid = []
for obj in com.get(_xml, 'object', text=False, singularize=False):
temp = IBEIS_Object(obj, ibsi.width, ibsi.height)
if (
temp.width > kwargs['object_min_width']
and temp.height > kwargs['object_min_height']
):
ibsi.objects.append(temp)
else:
ibsi.objects_invalid.append(temp)
flag = True
for cat in ibsi.categories():
if cat in kwargs['mine_exclude_categories']:
flag = False
if kwargs['mine_negatives'] and flag:
negatives = 0
for i in range(kwargs['mine_max_attempts']):
if negatives >= kwargs['mine_max_keep']:
break
width = com.randInt(
kwargs['mine_width_min'],
min(ibsi.width - 1, kwargs['mine_width_max']),
)
height = com.randInt(
kwargs['mine_height_min'],
min(ibsi.height - 1, kwargs['mine_height_max']),
)
x = com.randInt(0, ibsi.width - width - 1)
y = com.randInt(0, ibsi.height - height - 1)
obj = {
'xmax': x + width,
'xmin': x,
'ymax': y + height,
'ymin': y,
}
overlap_names = ibsi._overlaps(
ibsi.objects, obj, kwargs['mine_overlap_margin']
)
if len(overlap_names) > 0:
continue
ibsi.objects.append(
IBEIS_Object(obj, ibsi.width, ibsi.height, name='MINED')
)
negatives += 1
if kwargs['mine_patches']:
patch_width = kwargs['mine_patch_width']
patch_height = kwargs['mine_patch_height']
x_length = float(ibsi.width - patch_width - 1)
y_length = float(ibsi.height - patch_height - 1)
x_bins = int(x_length / kwargs['mine_patch_stride_suggested'])
y_bins = int(y_length / kwargs['mine_patch_stride_suggested'])
x_bins = max(1, x_bins)
y_bins = max(1, y_bins)
patch_stride_x = x_length / x_bins
patch_stride_y = y_length / y_bins
# ibsi.show()
for x in range(x_bins + 1):
for y in range(y_bins + 1):
x_min = int(x * patch_stride_x)
y_min = int(y * patch_stride_y)
x_max = x_min + patch_width
y_max = y_min + patch_height
assert (
0 <= x_min
and x_max < ibsi.width
and 0 <= y_min
and y_max < ibsi.height
)
# Add patch
obj = {
'xmax': x_max,
'xmin': x_min,
'ymax': y_max,
'ymin': y_min,
}
overlap_names = ibsi._overlaps(
ibsi.objects, obj, kwargs['mine_patch_overlap_margin']
)
if len(overlap_names) > 0:
for overlap_name in overlap_names:
name = '%s' % overlap_name.upper()
ibsi.objects_patches.append(
IBEIS_Object(obj, ibsi.width, ibsi.height, name=name)
)
else:
ibsi.objects_patches.append(
IBEIS_Object(
obj, ibsi.width, ibsi.height, name='NEGATIVE'
)
)
def __str__(ibsi):
return '<IBEIS Image Object | %s | %d objects>' % (
ibsi.filename,
len(ibsi.objects),
)
def __repr__(ibsi):
return '<IBEIS Image Object | %s>' % (ibsi.filename)
def __len__(ibsi):
return len(ibsi.objects)
def __lt__(ibsi1, ibsi2):
if ibsi1.filename < ibsi2.filename:
return -1
if ibsi1.filename < ibsi2.filename:
return 1
return 0
def _distance(pt1, pt2):
(x1, y1) = pt1
(x2, y2) = pt2
return math.sqrt((x1 - x2) ** 2 + (y1 - y2) ** 2)
def _overlaps(
ibsi, objects, obj, margin=0.50, bins=['left', 'front', 'right', 'back']
):
names = []
for _obj in objects:
x_overlap = max(0, min(obj['xmax'], _obj.xmax) - max(obj['xmin'], _obj.xmin))
y_overlap = max(0, min(obj['ymax'], _obj.ymax) - max(obj['ymin'], _obj.ymin))
area_overlap = float(x_overlap * y_overlap)
width = obj['xmax'] - obj['xmin']
height = obj['ymax'] - obj['ymin']
area_total = min(width * height, _obj.area)
score = area_overlap / area_total
# print(score)
if score >= margin:
names.append(_obj.name + ':' + _obj.pose_str)
return list(set(names))
def image_path(ibsi):
return os.path.join(ibsi.absolute_dataset_path, 'JPEGImages', ibsi.filename)
def categories(ibsi, unique=True, sorted_=True, patches=False):
temp = [_object.name for _object in ibsi.objects]
if patches:
temp += [_object.name for _object in ibsi.objects_patches]
if unique:
temp = list(set(temp))
if sorted_:
temp = sorted(temp)
return temp
def bounding_boxes(ibsi, **kwargs):
return [_object.bounding_box(**kwargs) for _object in ibsi.objects]
def _accuracy_match(ibsi, prediction, object_list):
# For this non-supressed prediction, compute and assign to the closest bndbox
centerx, centery, minx, miny, maxx, maxy, confidence, supressed = prediction
index_best = None
score_best = -1.0
for index, _object in enumerate(object_list):
width = maxx - minx
height = maxy - miny
x_overlap = max(0, min(maxx, _object.xmax) - max(minx, _object.xmin))
y_overlap = max(0, min(maxy, _object.ymax) - max(miny, _object.ymin))
area_overlap = float(x_overlap * y_overlap)
area_total = (width * height) + _object.area
score = area_overlap / (area_total - area_overlap)
if score >= score_best:
# Wooo! Found a (probably) better candidate, but...
if score == score_best:
# Well, this is awkward?
assert index_best is not None # Just to be sure
_object_best = object_list[index_best]
a = ibsi._distance(
(centerx, centery), (_object_best.xcenter, _object_best.ycenter)
)
b = ibsi._distance(
(centerx, centery), (_object.xcenter, _object.ycenter)
)
if a < b:
# Not a better candidate based on distance
continue
elif a == b:
# First come, first serve
continue
# Save new best
score_best = score
index_best = index
return index_best, score_best
def accuracy(ibsi, prediction_list, category, alpha=0.5):
# PASCAL ACCURACY MEASUREMENT
object_list = []
for _object in ibsi.objects + ibsi.objects_invalid:
if _object.name == category:
object_list.append(_object)
# Trivial case
if len(object_list) == 0 and len(prediction_list) == 0:
return 1.0, 0.0, 0.0, 0.0
true_positive = 0
false_positive = 0
counters = [0] * len(object_list)
for prediction in prediction_list:
centerx, centery, minx, miny, maxx, maxy, confidence, supressed = prediction
if supressed == 0.0:
index_best, score_best = ibsi._accuracy_match(prediction, object_list)
if score_best >= alpha:
counters[index_best] += 1
true_positive += 1
else:
false_positive += 1
false_negative = counters.count(0)
precision = float(true_positive)
recall = true_positive + false_positive + false_negative
assert recall != 0
return precision / recall, true_positive, false_positive, false_negative
def show(
ibsi,
objects=True,
parts=True,
display=True,
prediction_list=None,
category=None,
alpha=0.5,
label=True,
):
def _draw_box(
img, annotation, xmin, ymin, xmax, ymax, color, stroke=2, position='top'
):
font = cv2.FONT_HERSHEY_SIMPLEX
scale = 0.5
width, height = cv2.getTextSize(annotation, font, scale, -1)[0]
cv2.rectangle(img, (xmin, ymin), (xmax, ymax), color, stroke)
if label:
if position in ['top']:
cv2.rectangle(
img, (xmin, ymin), (xmin + width, ymin + height), color, -1
)
cv2.putText(
img,
annotation,
(xmin + 5, ymin + height),
font,
0.4,
(255, 255, 255),
)
elif position in ['bottom']:
cv2.rectangle(
img, (xmin, ymax - height), (xmin + width, ymax), color, -1
)
cv2.putText(
img, annotation, (xmin + 5, ymax), font, 0.4, (255, 255, 255)
)
original = com.openImage(ibsi.image_path(), color=True)
color_dict = {}
for _object in ibsi.objects:
color = com.randColor()
color_dict[_object] = color
_draw_box(
original,
_object.name.upper(),
_object.xmin,
_object.ymin,
_object.xmax,
_object.ymax,
color,
)
if parts:
for part in _object.parts:
_draw_box(
original,
part.name.upper(),
part.xmin,
part.ymin,
part.xmax,
part.ymax,
color,
)
for _object in ibsi.objects_invalid:
color = [0, 0, 0]
color_dict[_object] = color
_draw_box(
original,
_object.name.upper(),
_object.xmin,
_object.ymin,
_object.xmax,
_object.ymax,
color,
)
if parts:
for part in _object.parts:
_draw_box(
original,
part.name.upper(),
part.xmin,
part.ymin,
part.xmax,
part.ymax,
color,
)
for _object in ibsi.objects_patches:
if _object.name.upper() == 'NEGATIVE':
continue
color = [255, 0, 0]
else:
color = [0, 0, 255]
color_dict[_object] = color
_draw_box(
original,
_object.name.upper(),
_object.xmin,
_object.ymin,
_object.xmax,
_object.ymax,
color,
)
if parts:
for part in _object.parts:
_draw_box(
original,
part.name.upper(),
part.xmin,
part.ymin,
part.xmax,
part.ymax,
color,
)
if prediction_list is not None:
assert category is not None
object_list = []
for _object in ibsi.objects + ibsi.objects_invalid:
if _object.name == category:
object_list.append(_object)
for prediction in prediction_list:
(
centerx,
centery,
minx,
miny,
maxx,
maxy,
confidence,
supressed,
) = prediction
if supressed == 0.0:
if len(object_list) > 0:
index_best, score_best = ibsi._accuracy_match(
prediction, object_list
)
_object_best = object_list[index_best]
color = color_dict[_object_best]
if score_best >= alpha:
annotation = 'DETECT [TRUE POS %.2f]' % score_best
else:
annotation = 'DETECT [FALSE POS %.2f]' % score_best
cv2.line(
original,
(int(minx), int(miny)),
(_object_best.xmin, _object_best.ymin),
color,
1,
)
cv2.line(
original,
(int(minx), int(maxy)),
(_object_best.xmin, _object_best.ymax),
color,
1,
)
cv2.line(
original,
(int(maxx), int(miny)),
(_object_best.xmax, _object_best.ymin),
color,
1,
)
cv2.line(
original,
(int(maxx), int(maxy)),
(_object_best.xmax, _object_best.ymax),
color,
1,
)
else:
annotation = 'DETECT [FALSE POS]'
color = [0, 0, 255]
_draw_box(
original,
annotation,
int(minx),
int(miny),
int(maxx),
int(maxy),
color,
stroke=1,
position=False,
)
if display:
cv2.imshow(ibsi.filename + ' with Bounding Boxes', original)
cv2.waitKey(0)
cv2.destroyAllWindows()
else:
return original
|
from io import open
import re
from setuptools import setup, find_packages
def find_version(pth):
with open(pth, encoding='utf8') as f:
version_match = re.search(
r"^__version__ = ['\"]([^'\"]*)['\"]",
f.read(), re.M
)
if version_match:
return version_match.group(1)
raise RuntimeError("Unable to find version string.")
setup(
name='pyapac-webtool',
version=find_version('pyapacweb.py'),
license='MIT',
description='Web content tools for PyCon APAC 2015',
author='PyCon APAC 2015 organizers',
author_email='organizers@pycon.tw',
url='https://github.com/ccwang002/pyapac_web_uploader',
classifiers=[
'Development Status :: 5 - Stable',
'Environment :: Console',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
],
keywords=['pycon', 'apac', '2015', 'taiwan'],
install_requires=[
'requests>2.5', 'beautifulsoup4>4.3', 'click', 'six',
'pandas', 'lxml', # for review and stat
],
extras_require={
':python_version=="2.7"': ['pathlib'],
':python_version=="3.3"': ['pathlib'],
},
packages=find_packages(
exclude=[
'contrib', 'docs', 'examples', 'deps',
'*.tests', '*.tests.*', 'tests.*', 'tests',
]
),
py_modules=['pyapacweb'],
test_suite='nose.collector',
entry_points={
'console_scripts': [
'pyapac-web = pyapacweb:cli',
]
},
)
|
nota=float(input("digite a nota"))
mensagem=input("mensagem")
acrescimo=10/100*nota
if (mensagem.upper()=="S"):
resposta=nota+acrescimo
else:
resposta=nota
print(resposta) |
import requests
data = {
'data1': 'XXXXX',
'data2': 'XXXXX'
}
url = 'xxx'
# Requests:data为dict,json
response = requests.post(url=url, data=data) |
from pyshorteners import Shortener
import webbrowser
import clipboard
import time
import tinyurl
import urllib2
#------------------------HELPER UTILITIES--------------------
def getOnlyQRCode(shortened_url):
url = "http://chart.apis.google.com/chart?cht=qr&chl={}&chs=120x120".format(shortened_url)
print "\n\tQRCode is on the URL: {}".format(url)
webbrowser.open_new_tab(url)
time.sleep(2)
def printShortener(shortener, url):
#print "\n\tinside print."
try:
getshorted = shortener.short(url)
print "\n\tShortened url is {}".format(getshorted)
clipboard.copy(getshorted)
print "\n\tDone, your shortened url is on clipboard.!"
print "\n\tLaunch your browser and use 'Command-V' OR 'Ctrl + V' to paste the link in \n\tyour browser."
time.sleep(5)
print "\n\tWant to Fetch QRCode? press 1 else press 0"
choice=int(input("\n\t"))
if choice == 1:
getQRCode(shortener,url)
elif choice == 0:
return
else:
print "Error!"
return
except Exception as e:
print str(e)
def getQRCode(shortener, url):
shortener.short(url)
print "\n\tQRCode is on the URL: {}".format(shortener.qrcode())
try:
webbrowser.open_new_tab(shortener.qrcode())
time.sleep(2)
except Exception as e:
print "\n\tLaunch your browser and use 'Command-V' OR 'Ctrl + V' to paste the link in \n\t your browser."
clipboard.copy(shortener.short(url))
time.sleep(2)
#you could also save your qrcode locally by simply calling urllib on the image url
#----------------------- MAIN FUNCTIONS --------------------
def googleShortener(url):
key = 'AIzaSyAkWqqAmotOf98k421TC3PetlPbeZlXoEA'
shortener = Shortener('Google', api_key = key)
printShortener(shortener,url)
def bitlyShortener(url):
access_token = '03cf036ff2a4aa31b93c369af9e33478ddd44f02'
shortener = Shortener('Bitly', bitly_token = access_token)
printShortener(shortener,url)
def tinyurlShortener(url):
shortened = tinyurl.create_one(url)
print "\n\tShortened url is {}".format(shortened)
clipboard.copy(shortened)
print "\n\tDone, your shortened url is on clipboard.!"
print "\n\tLaunch your browser and use 'Command-V' OR 'Ctrl + V' to paste the link in\n\tyour browser."
time.sleep(5)
print "\n\tWant to Fetch QRCode? press 1 else press 0"
choice=int(input("\n\t"))
if choice == 1:
getOnlyQRCode(shortened)
elif choice == 0:
return
else:
print "Error!"
return
def isgdShortener(tourl):
url = 'https://is.gd/create.php?format=simple&url={}'.format(tourl)
shortened = urllib2.urlopen(url).read()
print "\n\tShortened url is {}".format(shortened)
clipboard.copy(shortened)
print "\n\tDone, your shortened url is on clipboard.!"
print "\n\tLaunch your browser and use 'Command-V' OR 'Ctrl + V' to paste the link in\n\tyour browser."
time.sleep(5)
print "\n\tWant to Fetch QRCode? press 1 else press 0"
choice=int(input("\n\t"))
if choice == 1:
getOnlyQRCode(shortened)
elif choice == 0:
return
else:
print "Error!"
return
'''
def adflyShortener(tourl):
UID = 18844965
API_KEY = 'd8a2283a6bbafbe31b442776fdc108ab'
url = 'http://api.adf.ly/api.php?key={}&uid={}&advert_type=int&domain=adf.ly&url={}'.format(API_KEY,UID,tourl)
r = urllib2.urlopen(url).read()
print r
'''
def main():
print "\n\tList of URL Shortener Services:\n\t\t1. Google\n\t\t2. Bit.ly\n\t\t3. TinyURL\n\t\t4. IS.GD"
try:
choice = int(raw_input("\n\tChoose from any of the services mentioned above: "))
print "\n\tTo enter url, you can type manually in your console or else you can copy the url using 'Command-V' or 'Ctrl + V'\n\tfrom browser."
print "\n\t1. Manually in console\n\t2. Copy from browser\t"
urlchoice = int(raw_input("\n\tEnter choice: "))
if urlchoice == 1:
print "\n\tEnter url to be shortened: ",
url = str(raw_input(""))
elif urlchoice == 2:
print "\tYou have five seconds..copy the url from address bar you wish to shorten!"
time.sleep(5)
url = clipboard.paste()
print "\n\tYour url is: {}".format(url)
else:
print "\n\tInvalid Option.! Quitting.."
time.sleep(1)
sys.exit(0)
if choice == 1:
googleShortener(url)
elif choice == 2:
bitlyShortener(url)
elif choice == 3:
tinyurlShortener(url)
elif choice == 4:
isgdShortener(url)
else:
print "Invalid Service."
except Exception as e:
print str(e)
if __name__ == '__main__':
main() |
#!/usr/bin/env python
import argparse
import messagebird
from messagebird.conversation_message import MESSAGE_TYPE_TEXT
parser = argparse.ArgumentParser()
parser.add_argument('--accessKey', help='access key for MessageBird API', type=str, required=True)
parser.add_argument('--channelId', help='channel that you want to start a conversation', type=str, required=True)
parser.add_argument('--phoneNumber', help='phone number that you want to send a message', type=str, required=True)
parser.add_argument('--textMessage', help='text that you want to send', type=str, required=True)
args = vars(parser.parse_args())
try:
client = messagebird.Client(args['accessKey'])
msg = client.conversation_start(
{'channelId': args['channelId'], 'to': args['phoneNumber'], 'type': MESSAGE_TYPE_TEXT,
'content': {'text': args['textMessage']}})
# Print the object information.
print('The following information was returned as a Conversation object:')
print(msg)
except messagebird.client.ErrorException as e:
print('An error occured while requesting a Message object:')
for error in e.errors:
print(' code : %d' % error.code)
print(' description : %s' % error.description)
print(' parameter : %s\n' % error.parameter)
|
from __future__ import annotations
from sympy.core import Basic
from sympy import Matrix, Symbol, sympify, ones
from typing import Tuple, Union
from copy import deepcopy
from functools import cmp_to_key
import re
from ._methods import (
_cartan_matrix,
_cocartan_matrix,
_quadratic_form,
_reflection_matricies,
_annotate_matrix,
Basis,
_basis_lookup
)
from ._backend import create_backend
class NumericSymbol(Symbol):
"""Extension of Sympy symbol that allows
latex formatting but also tracks the underlying
integer value. Useful for dimension representations
of irreps"""
def __new__(cls, dim: int, fmtted_dim: str):
obj = super().__new__(cls, fmtted_dim)
obj.numeric_dim = int(dim)
return obj
@classmethod
def from_symbol(cls, symbol: Symbol):
"""Converts from sympy.Symbol into NumericSymbol by
regex search for digits from latex display pattern and
returns a NumericSymbol. Will raise if no numeric is
present in the symbol.
"""
try:
s = symbol.__str__()
num = re.findall(r"\d+", s)[0]
return cls(int(num), s)
except (IndexError, ValueError):
raise ValueError("Could not extract numerical from sympy.Symbol")
class LieAlgebra(Basic):
"""The base class for all lie algebras. The methods and properties
in this class are basis independent and apply in a general sense. In
order to write down the roots as matricies and vectors, we choose a
representation.
"""
def __new__(cls, series: str, rank: int):
"""
Returns a new instance of a Sympy object
Args:
series (str): The series type
rank (int): The rank of the algebra
"""
obj = super().__new__(cls, sympify(rank))
obj._series = series
return obj
def __init__(self, *args, **kwargs):
"""Used to set lazy properties
"""
self._simple_roots = None
self._positive_roots = None
self._cartan_matrix = None
self._omega_matrix = None
self._quadratic_form = None
self._cocartan_matrix = None
self._reflection_matricies = None
self._fundamental_weights = None
self._backend = None
self._root_system = None
self._adjoint_casimir = None
@property
def series(self) -> str:
"""Algebra series type
"""
return self._series
@property
def rank(self) -> int:
"""Algebra rank
"""
return self.args[0]
@property
def dimension(self) -> int:
"""Algebra dimension
Abstract
"""
@property
def n_pos_roots(self) -> int:
"""Total number of positive roots in the algebra
Abstract
"""
@property
def n_roots(self) -> int:
"""Total number of roots in the algebra"""
return 2 * self.n_pos_roots + self.rank
@property
def simple_roots(self) -> list[Matrix]:
"""Returns a list of Sympy matrix (1,dimension)
objects representing a chosen basis of the algebra.
Basis: Orthogonal
This method can be overridden to choose your own basis,
be sure to do this before any other properties are called
as they are lazily evaluated and the simple_roots define
the entire representation of the algebra.
Examples
========
.. code-block:: python
from liesym import F4
algebra = F4()
my_simple_roots = [
# my basis
]
algebra.simple_roots = my_simple_roots
"""
return [_annotate_matrix(x) for x in self._simple_roots]
@simple_roots.setter
def simple_roots(self, val: list[Matrix]):
"""Overrides the default representation of the algebras simple_roots.
Please ensure that roots are in Orthogonal Basis
"""
assert len(val) == len(
self._simple_roots), "Incorrect number of simple roots"
self._simple_roots = val
@property
def cartan_matrix(self) -> Matrix:
r"""For a given simple Lie algebra the elements $a_{ij}$ can be
generated by
.. math::
a_{ji} = 2 \langle\alpha_i, \alpha_j\rangle / \langle\alpha_j, \alpha_j\rangle
where $a_i$ is the i'th simple root and $\langle,\rangle$ is the scalar product.
Sources:
- https://en.wikipedia.org/wiki/Cartan_matrix
- https://mathworld.wolfram.com/CartanMatrix.html
Returns:
Matrix: Cartan Matrix as a Sympy object
"""
if self._cartan_matrix is None:
self._cartan_matrix = _cartan_matrix(self.simple_roots)
return self._cartan_matrix
@property
def cocartan_matrix(self) -> Matrix:
"""The cocartan matrix rows are generated from the coroots of
the algebra such that multiplication by a simple root will
generate a row of the cartan matrix.
Returns:
Matrix: Cocartan Matrix as a Sympy object
"""
if self._cocartan_matrix is None:
self._cocartan_matrix = _cocartan_matrix(self.simple_roots)
return self._cocartan_matrix
@property
def omega_matrix(self) -> Matrix:
"""The rows of the omega matrix are the fundamental weights
of the algebra.
Returns:
Matrix: Omega Matrix as a Sympy object
"""
if self._omega_matrix is None:
self._omega_matrix = self.cocartan_matrix.pinv().T
return self._omega_matrix
@property
def metric_tensor(self) -> Matrix:
"""Also known as the quadratic form, the metric tensor
serves as the metrix for the inner product of two roots or weights
when they are not in the orthogonal basis.
Returns:
Matrix: Metric Tensor as a Sympy object
"""
if self._quadratic_form is None:
self._quadratic_form = _quadratic_form(
self.cartan_matrix, self.simple_roots)
return self._quadratic_form
@property
def reflection_matricies(self) -> list[Matrix]:
"""Returns a list of reflection matrices built from
rotations about each simple root.
Returns:
list[Matrix]: list of Sympy Matrices
"""
if self._reflection_matricies is None:
self._reflection_matricies = _reflection_matricies(
self.simple_roots)
return self._reflection_matricies
@property
def fundamental_weights(self) -> list[Matrix]:
"""Returns the fundamental weights of the algebra.
Basis: Orthogonal
Returns:
list[Matrix]: list of Sympy Matrices
"""
if self._fundamental_weights is None:
self._fundamental_weights = [
_annotate_matrix(self.omega_matrix.row(i))
for i in range(self.omega_matrix.rows)
]
return self._fundamental_weights
@property
def positive_roots(self) -> list[Matrix]:
"""Returns the postive roots of the algebra. They are sorted
first by their distance from the highest root and then by
tuple ordering (convention).
Basis: Orthogonal
Returns:
list[Matrix]: list of Sympy Matrices
"""
if self._positive_roots is None:
self._positive_roots = self.root_system()[:self.n_pos_roots]
return self._positive_roots
@property
def _backend_instance(self):
if self._backend is None:
self._backend = create_backend(self)
return self._backend
def orbit(self, weight: Matrix, stabilizers=None, basis="ortho") -> list[Matrix]:
"""Returns the orbit of the weight or root by reflecting it
a plane. A stabilizer may be passed to calculate the orbit using
the Orbit-Stabilizer theorem.
Basis: Ortho
Args:
weight (Matrix): A Matrix of shape (1, rank)
stabilizer (Iterable of ints, optional): Per Orbit-Stabilizer
theorem, integer iterable of simple root indexes. Defaults to None.
Sources:
- https://en.wikipedia.org/wiki/Coadjoint_representation#Coadjoint_orbit
- https://en.wikipedia.org/wiki/Group_action#Orbits_and_stabilizers
"""
weight = self.to_ortho(weight, "ortho")
return [self.to_ortho(x, "ortho") for x in self._backend_instance.orbit(weight, stabilizers)]
def dim_name(self, irrep: Matrix, basis="omega") -> NumericSymbol:
r"""Returns a sympy formatted symbol for the irrep.
This is commonly used in physics literature. Returns
a NumericSymbol object that is a simple extension of
sympy.Symbol.
Examples
=========
>>> from liesym import A
>>> from sympy import Matrix
>>> a3 = A(3)
>>> assert str(a3.dim_name(Matrix([[1, 1, 0]]))) == '\\bar{20}'
"""
irrep = self.to_omega(irrep, basis)
dim = self.dim(irrep)
max_dd = self.max_dynkin_digit(irrep)
same_dim_irreps: list[Matrix] = self.get_irrep_by_dim(dim, max_dd)
num_primes = 0
conjugate = 0
so8label = ""
if len(same_dim_irreps) > 1:
# group by index
index_pairs = {} # type: ignore
for i in same_dim_irreps:
index = self._backend_instance.index_irrep(i, dim)
index_pairs[index] = index_pairs.get(index, []) + [i]
groups = [sorted(dimindex, key=cmp_to_key(self._dimindexsort))
for dimindex in index_pairs.values()]
positions = []
for id1, grps in enumerate(groups):
for id2, g in enumerate(grps):
if g == irrep:
positions.append([id1, id2])
[num_primes, conjugate] = positions[0]
so8label = self._is_s08(irrep)
has_conjugate = conjugate == 1 if so8label == "" else False
return self._dim_name_fmt(dim, has_conjugate, num_primes, so8label)
def irrep_lookup(self, dim: Union[Symbol, str], max_dynkin_digit=5) -> Matrix:
"""Returns the irrep matrix for the dimension.
Args:
dim (Union[Symbol, str]): Can either be a sympy.Symbol or string.
Raises:
KeyError: Dim not found
Returns:
Matrix: Returns irrep in Omega basis
Examples
========
>>> from liesym import A
>>> A3 = A(3)
>>> A3.irrep_lookup(r"\\bar{4}")
Matrix([[0, 0, 1]])
>>> A3.irrep_lookup("4")
Matrix([[1, 0, 0]])
"""
if isinstance(dim, str):
dim = Symbol(dim)
if isinstance(dim, Symbol) and not isinstance(dim, NumericSymbol):
dim = NumericSymbol.from_symbol(dim)
n_dim = dim.numeric_dim
dd = 0
while dd < max_dynkin_digit:
dd += 1
for c in self.get_irrep_by_dim(n_dim, dd):
if self.dim_name(c) == dim:
return c
raise KeyError(f"Irrep {dim} not found.")
def conjugate(self, irrep: Matrix) -> Matrix:
"""Finds the conjugate irrep. If it is the same
as the original irrep, you have a Real Irrep, otherwise
it's a Complex Irrep.
Examples
========
.. code-block:: python
from liesym import A,D
from sympy import Matrix
SU4 = A(3)
irrep_20 = Matrix([[0,1,1]])
irrep_20bar = Matrix([[1,1,0]])
assert irrep_20 == SU4.conjugate(irrep_20bar)
SO10 = D(5)
irrep_10 = Matrix([[1, 0, 0, 0, 0]])
assert irrep_10 == SO10.conjugate(irrep_10)
"""
return self.to_omega(self._backend_instance.conjugate(irrep)[0], "omega")
def _is_s08(self, irrep):
return ""
def _dimindexsort(self, irrep1, irrep2):
cong1 = self._congruency_class(irrep1)
cong2 = self._congruency_class(irrep2)
if isinstance(cong1, tuple):
return 1 if cong1[-1] <= cong2[-1] else -1
else:
return -1 if cong1 <= cong2 else 1
def _congruency_class(self, irrep):
return 0
def max_dynkin_digit(self, irrep: Matrix) -> int:
"""Returns the max Dynkin Digit for the representations"""
pass
def _dim_name_fmt(self, dim: int, conj=False, primes=0, sub="") -> NumericSymbol:
if conj:
irrep = r"\bar{" + str(dim) + "}"
else:
irrep = str(dim)
if primes > 0:
irrep += r"^{" + " ".join([r"\prime"] * primes) + r"}"
if sub != "":
irrep += r"_{" + str(sub) + r"}"
return NumericSymbol(dim, irrep)
def get_irrep_by_dim(self, dim: int, max_dd: int = 3, with_symbols=False) -> list[Union[Matrix, Tuple[Matrix, NumericSymbol]]]:
r"""Gets all irreps by dimension and max dynkin digit. `max_dd` is . This algorithm brute forces searches by using `itertools.product`
which can become expensive for large so searching max_dd > 3 will be
very expensive
Args:
dim (int): Dimension to query
max_dd (int, optional): The max dynkin digit to use. Defaults to 3.
with_symbols (bool, optional): Returns list of tuples of rep and latex fmt. Defaults to False.
Returns:
list[Union[Matrix, Tuple[Matrix,NumericSymbol]]]: If `with_symbols=True` will return a list of tuples.
Examples
=========
>>> from liesym import A
>>> from sympy import Matrix
>>> a3 = A(3)
>>> expected = a3.get_irrep_by_dim(20)
>>> result = [
... Matrix([[1, 1, 0]]),
... Matrix([[0, 1, 1]]),
... Matrix([[0, 2, 0]]),
... Matrix([[3, 0, 0]]),
... Matrix([[0, 0, 3]])]
>>> assert expected == result
>>> a3.get_irrep_by_dim(20, with_symbols=True)
[(Matrix([[1, 1, 0]]), \bar{20}), (Matrix([[0, 1, 1]]), 20), (Matrix([[0, 2, 0]]), 20^{\prime}), (Matrix([[3, 0, 0]]), \bar{20}^{\prime \prime}), (Matrix([[0, 0, 3]]), 20^{\prime \prime})]
"""
backend_results: list[Matrix] = self._backend_instance.get_irrep_by_dim(
dim, max_dd) or []
results = [self.to_omega(x, "omega") for x in backend_results]
if with_symbols:
results = [(x, self.dim_name(x)) for x in results]
return results
def dim(self, irrep: Matrix, basis="omega") -> int:
r"""Returns the dimension of the weight, root or irreducible representations.
This follows Weyl's dimension formula:
.. math::
dim(w) = \prod_{\alpha\in\Delta^{+}} \frac{\langle \alpha, w + \rho\rangle}{\langle\alpha,\rho\rangle}
where $\Delta^{+}$ are the positive roots and $rho$ is the sum of
the positive roots: `[1] * rank`.
Examples
========
>>> from liesym import A
>>> from sympy import Matrix
>>> a2 = A(2)
>>> assert a2.dim(Matrix([[1,0]])) == 3
"""
basis = _basis_lookup(basis)
_annotate_matrix(irrep, basis)
irrep = self.to_omega(irrep)
return sympify(self._backend_instance.dim(irrep))
def root_system(self) -> list[Matrix]:
"""Returns the entire rootsystem of the algebra. This
includes the positive, negative and zeros of the algebra.
Basis: Orthogonal
Returns:
list[Matrix]: list of ordered roots.
"""
if self._root_system is None:
self._root_system = [self.to_ortho(
x, basis="omega") for x in self._backend_instance.root_system()]
return self._root_system
def tensor_product_decomposition(self, weights: list[Matrix], basis="omega", **_) -> list[Matrix]:
"""Returns the tensor product between irreducible representations
as a the tensor sum of the irreducible representations of their
highest weights. This algorithm is based on Klimky's formula.
Args:
weights (list[Matrix]): A list of fundamental weights to take the tensor product between
basis (str, Optional): Basis of incoming weights. If not set, will implicitly set. Defaults to 'omega'.
Returns:
list[Matrix]: list of weights decomposed from the tensor product. Basis: Omega
Examples
=========
>>> from liesym import A
>>> from sympy import Matrix
>>> a2 = A(2)
>>> results = a2.tensor_product_decomposition([Matrix([[1,0]]), Matrix([[1,0]])])
>>> print(results)
[Matrix([[0, 1]]), Matrix([[2, 0]])]
"""
weights = [self.to_omega(x, basis) for x in weights]
w = deepcopy(weights)
i = w.pop()
j = w.pop()
decomp = self._backend_instance.tensor_product_decomposition(i, j)
while len(w) > 0:
j = w.pop()
results = []
for i in decomp:
# i,j reversed because pop takes from -1 index
results += self._backend_instance.tensor_product_decomposition(
j, i)
decomp = results
return [self.to_omega(x, "omega") for x in decomp]
def to_ortho(self, x: Matrix, basis=None) -> Matrix:
"""Rotates to orthogonal basis
Args:
x (Matrix): Matrix to be rotated
basis (optional): If `basis` attribute is not set on `x` define it here. Defaults to None.
Raises:
ValueError: If no `x.basis` is set and None is passed to `basis` kwarg.
Returns:
Matrix: Matrix in orthogonal basis.
"""
basis = _basis_lookup(basis)
_annotate_matrix(x, basis)
if x.basis is Basis.ORTHO:
r = x
elif x.basis is Basis.OMEGA:
r = x * self.omega_matrix
elif x.basis is Basis.ALPHA:
r = x * self.cartan_matrix * self.omega_matrix
else:
raise ValueError(
"Basis arg cannot be None if attribute `basis` has not been set on Matrix.")
r.basis = Basis.ORTHO
return r
def to_omega(self, x: Matrix, basis=None) -> Matrix:
"""Rotates to omega basis
Args:
x (Matrix): Matrix to be rotated
basis (optional): If `basis` attribute is not set on `x` define it here. Defaults to None.
Raises:
ValueError: If no `x.basis` is set and None is passed to `basis` kwarg.
Returns:
Matrix: Matrix in omega basis.
"""
basis = _basis_lookup(basis)
_annotate_matrix(x, basis)
if x.basis is Basis.OMEGA:
r = x
elif x.basis is Basis.ORTHO:
r = x * self.omega_matrix.pinv()
elif x.basis is Basis.ALPHA:
r = x * self.cartan_matrix
else:
raise ValueError(
"Basis arg cannot be None if attribute `basis` has not been set on Matrix.")
r.basis = Basis.OMEGA
return r
def to_alpha(self, x: Matrix, basis=None) -> Matrix:
"""Rotates to alpha basis
Args:
x (Matrix): Matrix to be rotated
basis (optional): If `basis` attribute is not set on `x` define it here. Defaults to None.
Raises:
ValueError: If no `x.basis` is set and None is passed to `basis` kwarg.
Returns:
Matrix: Matrix in alpha basis.
"""
basis = _basis_lookup(basis)
_annotate_matrix(x, basis)
if x.basis is Basis.ALPHA:
r = x
elif x.basis is Basis.ORTHO:
r = x * self.omega_matrix.pinv() * self.cartan_matrix.pinv()
elif x.basis is Basis.OMEGA:
r = x * self.cartan_matrix.pinv()
else:
raise ValueError(
"Basis arg cannot be None if attribute `basis` has not been set on Matrix.")
r.basis = Basis.ALPHA
return r
def _scalar_product(self, irrep1, irrep2, basis="ortho"):
"""Scalar product between two irreps."""
irrep1 = self.to_ortho(irrep1, basis=basis)
irrep2 = self.to_ortho(irrep2, basis=basis)
return irrep1.dot(irrep2)
def quadratic_casimir(self, irrep: Matrix = None, basis="omega", **kwargs) -> Basic:
r"""Returns the quadratic casimir for an arbitrary irreducible
representation, $R$.
.. math::
\frac{1}{2}<R | R + \rho>
Args:
irrep (Matrix): Irreducible representation
basis (str, optional): Basis of irrep. Defaults to "omega".
Returns:
Basic: Rational number
"""
if irrep is None:
# return sympify(1)
irrep = self.adjoint_rep
basis = irrep.basis
irrep = self.to_omega(irrep, basis=basis)
rho = 2 * ones(1, self.rank)
return self._scalar_product(irrep, irrep + rho, basis="omega") / 2
def dynkin_index(self, irrep: Matrix = None, basis="omega") -> Basic:
r"""Calculates the dynkin index of an arbitrary irrep. This
is also called in literature second-order index, $I_2(R)$. If no irrep
is passed, basis and rep default to the algebra's adjoint rep.
For arbitrary irrep, $R$, the dynkin index, $I_2(R)$ can be calculated
as:
.. math::
I_2(R) = \frac{Dim(R)}{Dim(adj)}c(R)
where the $dim$ is the dimension formula, $adj$ is the adjoint irrep
and $c$ is the quadratic casimir (`Liealgebra.casimir`).
"""
irrep = irrep or self.adjoint_rep
basis = basis if irrep else self.adjoint_rep.basis
d_r = self.dim(irrep, basis=basis)
d_a = self.dim(self.adjoint_rep)
c_r = self.quadratic_casimir(irrep, basis=basis)
return d_r * c_r / d_a
@property
def adjoint_rep(self) -> Matrix:
"""Returns the adjoint irrep. This is the highest weight of the
root system."""
return self.positive_roots[0]
|
"""
Author: Tyson Bradford
Assignment: Checkpoint 1
"""
"""
When you physically exercise to strengthen your heart, you
should maintain your heart rate within a range for at least 20
minutes. To find that range, subtract your age from 220. This
difference is your maximum heart rate per minute. Your heart
simply will not beat faster than this maximum (220 - age).
When exercising to strengthen your heart, you should keep your
heart rate between 65% and 85% of your heart's maximum.
"""
text = input("Please enter your age: ")
age = int(text)
max_rate = 220 - age
slowest = max_rate * 0.65
fastest = max_rate * 0.85
print("When you exercise to strengthen your heart, you should keep")
print(f"your heart rate between {slowest:.0f} and {fastest:.0f} beats per minute.")
|
# basicAnimationDemo2.py
# version 0.5
# Barebones timer, mouse, and keyboard events
# without (much) event-based programming or (much) object-oriented programming
# To run this, you need to download basicAnimation.py
# and save that file in the same folder as this one.
from Tkinter import *
from basicAnimation import BasicAnimationRunner
def onMousePressed(canvas, event):
(canvas.data.x, canvas.data.y) = (event.x, event.y)
def onKeyPressed(canvas, event):
if (event.keysym == "Up"): canvas.data.y -= 5
elif (event.keysym == "Down"): canvas.data.y += 5
def onTimerFired(canvas):
canvas.data.x = (canvas.data.x + 10) % (canvas.app.width)
def init(canvas):
canvas.app.setTimerDelay(100)
canvas.data.x = 50
canvas.data.y = 50
canvas.data.r = 25
def redrawAll(canvas):
(x, y, r) = (canvas.data.x, canvas.data.y, canvas.data.r)
canvas.delete(ALL)
canvas.create_oval(x-r, y-r, x+r, y+r, fill="green")
canvas.create_text(canvas.app.width/2, 20, text="Click to move circle")
canvas.create_text(canvas.app.width/2, 40, text="Up/Down arrows also work")
def myBasicAnimation(app, canvas, extraArg):
print "Running animation, extraArg =", extraArg
init(canvas)
while app.isRunning():
(eventType, event) = app.getEvent()
if (eventType == "mousePressed"): onMousePressed(canvas, event)
elif (eventType == "keyPressed"): onKeyPressed(canvas, event)
elif (eventType == "timerFired"): onTimerFired(canvas)
redrawAll(canvas)
print "Done!"
BasicAnimationRunner(myBasicAnimation, width=300, height=300, extraArg="wow!")
|
import argparse
import h5py
import numpy as np
from geodesic import GeodesicDistanceComputation
def main(input_animation_file, output_sploc_file):
with h5py.File(input_animation_file, 'r') as f:
verts = f['verts'].value.astype(np.float)
tris = f['tris'].value
N, _ = verts.shape
compute_distance = GeodesicDistanceComputation(verts, tris)
with h5py.File(output_sploc_file, 'w') as f:
f['Gnum'] = N
for i in range(0, N):
f['geodis%05d' % i] = compute_distance(i)
#main('F:\\yangjiee\\yangjie\\tracking\\paper\\_tem_\\inputfine.h5','F:\\yangjiee\\yangjie\\tracking\\paper\\_tem_\\inputfine.h5')
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description='Find Sparse Localized Deformation Components')
parser.add_argument('input_animation_file')
parser.add_argument('output_sploc_file')
args = parser.parse_args()
main(args.input_animation_file,
args.output_sploc_file)
|
# -*- encoding: utf-8 -*-
import urllib.request
from urllib.parse import urlencode
import json
import sys
import importlib
importlib.reload(sys)
appid = 'wxb596c90e795d46b3'
secret = 'f6bc8c361cb46bd8b4dc227ad01e3201'
gettoken = 'https://api.weixin.qq.com/cgi-bin/token?grant_type=client_credential&appid=' + appid + '&secret=' + secret
f = urllib.request.urlopen(gettoken)
stringjson = f.read()
access_token = json.loads(stringjson)['access_token']
print(access_token)
posturl = "https://api.weixin.qq.com/cgi-bin/menu/create?access_token=" + access_token
menu = '''''{
"button":
[
{
"name": "校区1",
"sub_button":
[
{
"type": "view",
"name": "浴室",
"url": "http://mp.weixin.qq.com/wiki?t=resource/res_main&id=mp1418702138&token=&lang=zh_CN"
},
{
"type": "view",
"name": "水房",
"url": "http://mp.weixin.qq.com/wiki?t=resource/res_main&id=mp1418702138&token=&lang=zh_CN"
}
]
},
{
"name": "校区2",
"sub_button":
[
{
"type": "view",
"name": "浴室",
"url": "http://mp.weixin.qq.com/wiki?t=resource/res_main&id=mp1418702138&token=&lang=zh_CN"
},
{
"type": "view",
"name": "水房",
"url": "http://mp.weixin.qq.com/wiki?t=resource/res_main&id=mp1418702138&token=&lang=zh_CN"
}
]
},
{
"type": "click",
"name": "其他",
"key": "mpGuide"
}
]
}'''
request = urllib.request.urlopen(posturl, menu.encode('utf-8'))
print(request.read()) |
# -*- coding: utf-8 -*-
import numpy as np
import matplotlib.pyplot as plt
from matplotlib import animation
fig = plt.figure(figsize=(15,5))
ax = plt.subplot2grid((1,4),(0, 0))
ax.set_xlim(-0.5,0.5)
ax.set_ylim(-2,2)
spine, = ax.plot([], [], lw=2)
left_arm, = ax.plot([], [], lw=2)
right_arm, = ax.plot([], [], lw=2)
left_leg, = ax.plot([], [], lw=2)
right_leg, = ax.plot([], [], lw=2)
px = plt.subplot2grid((1,4),(0, 1), colspan=3)
px.set_xlim(0,200)
px.set_ylim(-0.4,0.4)
curves = [px.plot([],[],"-")[0] for i in range(11)]
# 21 schlüsselbilder
keytime = np.linspace(0, 200, 21)
keyframe = []
keyframe.append(np.array([0.,-0.05, -0.2,-0.2, 0.2,-0.2, 0.25, -0.3, 0.3, 0.1, 0.2]))
keyframe.append(np.array([0.,0.0, 0.2,-0.1, -0.2,-0.1, 0.1, 0.1,0.2, -0.3,0.3]))
for i in range(9):
keyframe.append(keyframe[0])
keyframe.append(keyframe[1])
keyframe.append(keyframe[0])
assert(len(keytime) == len(keyframe))
def rotate( v, angle ):
s = np.sin(angle)
c = np.cos(angle)
rv = np.array([v[0] * c - v[1] * s, v[0] * s + v[1] * c])
return rv
def param2pos( param ):
"""
Computes positions of joints for the stick guy.
Inputs:
param : list of parameters describing the pose
param[0]: height of hip
param[1]: angle of spine to vertical axis
param[2]: angle of upper arm 0 to spine
param[3]: angle of lower arm 0 to upper arm 0
param[4,5]: as above, other arm
param[6]: angle of neck/head to spine
param[7]: angle of upper leg 0 to vertical axis
param[8]: angle of lower leg 0 to upper leg 0
param[9,10]: as above, other leg
"""
hip_pos = np.array([0.0, param[0]])
spine_vec = np.array([0.0, 1.0])
spine_vec = rotate(spine_vec, param[1])
neck_pos = hip_pos + spine_vec
basic_arm_vec = -0.6*spine_vec
arm_vec = rotate(basic_arm_vec, param[2])
left_elbow_pos = neck_pos + arm_vec
arm_vec = rotate(arm_vec, param[3])
left_hand_pos = left_elbow_pos + arm_vec
lad = np.array([neck_pos,left_elbow_pos,left_hand_pos])
left_arm.set_data(lad[:,0],lad[:,1])
arm_vec = rotate(basic_arm_vec, param[4])
right_elbow_pos = neck_pos + arm_vec
arm_vec = rotate(arm_vec, param[5])
right_hand_pos = right_elbow_pos + arm_vec
rad = np.array([neck_pos,right_elbow_pos,right_hand_pos])
right_arm.set_data(rad[:,0],rad[:,1])
neck_vec = 0.3*spine_vec
neck_vec = rotate( neck_vec, param[6])
head_pos = neck_pos + neck_vec
sd = np.array([hip_pos,neck_pos,head_pos])
spine.set_data(sd[:,0],sd[:,1])
basic_leg_vec = (0.0,-0.7)
leg_vec = rotate(basic_leg_vec, param[7])
left_knee_pos = hip_pos + leg_vec
leg_vec = rotate(leg_vec, param[8])
left_foot_pos = left_knee_pos + leg_vec
lld = np.array([hip_pos,left_knee_pos,left_foot_pos])
left_leg.set_data(lld[:,0],lld[:,1])
leg_vec = rotate(basic_leg_vec, param[9])
right_knee_pos = hip_pos + leg_vec
leg_vec = rotate(leg_vec, param[10])
right_foot_pos = right_knee_pos + leg_vec
rld = np.array([hip_pos,right_knee_pos,right_foot_pos])
right_leg.set_data(rld[:,0],rld[:,1])
def animate(t):
curframe = interpolate(t)
param2pos(curframe)
global curves_x, curves_y
if t == 0:
curves_x = [float(t)]
curves_y = curframe
else:
curves_x.append(float(t))
curves_y = np.c_[curves_y, curframe]
for i in range(len(curves)):
curves[i].set_data(curves_x, curves_y[i])
return left_arm,right_arm,spine,left_leg, right_leg
def interpolate(t):
#Hier ist die lineare interpolation!!!
k = np.searchsorted(keytime, t, side='right') - 1
u = (t - keytime[k]) / (keytime[k + 1] - keytime[k])
curframe = (1.0-u)*keyframe[k] + u*keyframe[k+1]
return curframe
anim = animation.FuncAnimation(fig, animate, frames=200, interval=10, blit=False)
plt.subplots_adjust(left=0.04, right=0.98, top=0.98, bottom=0.05)
plt.show()
plt.subplot(2,1,1)
tx = np.linspace(0,200, 1000, endpoint=False)
ty = np.zeros((1000,11))
for i, t in enumerate(tx):
ty[i] = interpolate(t)
for i in range(11):
plt.plot(tx, ty[:,i], '-')
plt.xlim(0,200)
plt.ylim(-0.4,0.4)
plt.subplot(2,1,2)
for i in range(11):
plt.plot(np.hstack((tx, tx+200.0)), np.hstack((ty[:,i],ty[:,i])), '-')
plt.xlim(180,220)
plt.ylim(-0.4,0.4)
plt.subplots_adjust(left=0.06, right=0.97, top=0.98, bottom=0.05)
plt.savefig('./figures/aufgabe2a.pdf')
plt.show()
|
# -*- coding: utf-8 -*-
import psycopg2, psycopg2.extras
import urllib2
import ConfigParser
from string import join
import os
from time import sleep
class loadtables():
def conect(self):
thisfolder = os.path.dirname(os.path.abspath(__file__))
initfile = os.path.join(thisfolder, 'config.ini')
config = ConfigParser.ConfigParser()
config.read(initfile)
##Publicacion
#self.host = config.get('Publicacion', 'host')
#self.port = config.get('Publicacion', 'port')
self.codprov = config.get('Publicacion', 'codprov')
#DATABASE
nameDB = config.get('DB', 'nameDBgis')
userDB = config.get('DB', 'userDBgis')
passDB = config.get('DB', 'passDBgis')
hostDB = config.get('DB', 'hostDBgis')
print ("CONECTANDO DB--....", nameDB)
self.conn = psycopg2.connect(dbname=nameDB, user=userDB, password=passDB, host=hostDB)
#self.conn = psycopg2.connect(dbname='nacion',user='postgres',password='23462', host='localhost')
try:
self.cursor = self.conn.cursor()
except psycopg2.DatabaseError as e:
#logging.info("Error de conexion con la BD local")
print ((e.pgerror))
if self.cursor:
#logging.info("Conexion correcta con la BD local: " + nameDB)
print (("Conexion correcta con la BD local: " + nameDB))
else:
print ("Error de conexion con la BD local")
return self.cursor
def conectodoo(self):
thisfolder = os.path.dirname(os.path.abspath(__file__))
initfile = os.path.join(thisfolder, 'config.ini')
config = ConfigParser.ConfigParser()
config.read(initfile)
##Publicacion
#self.host = config.get('Publicacion', 'host')
#self.port = config.get('Publicacion', 'port')
self.codprov = config.get('Publicacion', 'codprov')
#DATABASE
nameDB = config.get('DB', 'nameDBodoo')
userDB = config.get('DB', 'userDBodoo')
passDB = config.get('DB', 'passDBodoo')
hostDB = config.get('DB', 'hostDBodoo')
print ("CONECTANDO DB--...." + nameDB + " - " + userDB + " - " + passDB + " - " + hostDB)
#self.conn = psycopg2.connect(dbname='nacion',user='postgres',password='23462', host='localhost')
try:
self.conn2 = psycopg2.connect(dbname=nameDB, user=userDB, password=passDB, host=hostDB)
#self.cursor = self.conn2.cursor()
except psycopg2.DatabaseError as e:
#logging.info("Error de conexion con la BD local")
print ((e.pgerror))
return self.conn2
def validColumnNames(self, cursor3, tablename):
obligatory = ['id', 'expediente', 'nombre', 'titular', 'mineral', 'estado_legal']
#"create_date", "write_date"]
print (("VALIDANDO CAMPOS DE TABLA... "+ tablename))
for field in obligatory:
print (("VALIDANDO CAMPO... "+ field))
try:
#query = """SELECT * FROM %s WHERE %s IS NULL"""
query = """SELECT %s FROM %s"""
res = cursor3.execute(query % (field, tablename))
res = True
except (Exception, psycopg2.DatabaseError) as error:
res = False
print (("FALTA CAMPOS OBLIGATORIO:", field))
break
return res
def validColumnNames2(self, cursor3, tablename):
obligatory = ['id', 'expediente', 'nombre', 'titular', 'mineral', 'estado_legal']
#A continuacion debera colocar la cantidad de campos obligatorios menos uno
cant_obligatory = 5
#"create_date", "create_date"]
print (("VALIDANDO CAMPOS DE TABLA... "+ tablename))
try:
#query = """SELECT * FROM %s WHERE %s IS NULL"""
query = """SELECT *
FROM information_schema.columns
WHERE table_schema = 'public'
AND table_name = '%s'"""
cursor3.execute(query % (tablename))
#res = True
except (Exception, psycopg2.DatabaseError) as error:
print (("ERROR", error))
contar_campos = 1
res = False
for field in self.cursor3.fetchall():
if contar_campos > cant_obligatory:
res = True
if field[3] in obligatory:
print ((" ***************CAMPOS DE LA TABLA: ", field[3], ' EN OBLIGATORIO'))
contar_campos = contar_campos + 1
#else:
#print ((" ***************CAMPOS DE LA TABLA: ", field[3], ' no esta en OBLIGATORIO'))
return res
def loadTableNames2(self):
#print (("Script Temporizado..."))
sleep(7)
results = []
self.conn2 = self.conectodoo()
self.cursor2odoo = self.conn2.cursor()
self.cursor2odoo.execute('DELETE FROM public.tablesgis_tablesgis CASCADE')
#self.conn2.commit()
#print (("EL PUERTO EN LA BASE ES ..." + str(baseObj.port)))
print (("CARGANDO TABLAS..." ))
self.cursor2 = self.conect()
self.cursor3 = self.conect()
#self._conectar()
try:
self.cursor2.execute("""SELECT table_name
FROM information_schema.tables
WHERE table_schema='public' AND table_type='BASE TABLE'""" )
except psycopg2.Error as e:
pass
print ((e.pgerror))
#self.conn.commit()
#self.conn.close()
for row in self.cursor2.fetchall():
results.append(dict(zip("1", row)))
#self.env['tablesgis.tablesgis'].create({'name': str(row)})
if self.validColumnNames2(self.cursor3, join(row)):
try:
print (("DARIOINSERTANDO EL VALOR ES: " + join(row)))
self.cursor2odoo.execute('INSERT INTO public.tablesgis_tablesgis(create_uid, create_date, name, write_uid, cod, write_date)'\
' VALUES (%s, %s, %s, %s, %s, %s)', (1, '2018-12-05 12:44:51.572921',
join(row), 1, join(row), '2018-12-05 12:44:51.572921'))
except psycopg2.Error as e:
pass
print ((e.pgerror))
#print (("TABLAS ENCONTRADAS: "+str(results)))
self.conn2.commit()
#self.cursor2odoo.close()
return {'value': {'encontrados_ids': False}}
u = loadtables()
u.loadTableNames2() |
import configparser
import glob
import inspect
import logging
import os
import pprint
import sys
import unittest
from itertools import takewhile, dropwhile
from pathlib import Path
from securify.solidity import compile_cfg
from securify.staticanalysis import static_analysis
from securify.__main__ import fix_pragma
USE_COMPILATION_CACHE = False
def make_test_case(path_src, logger):
def test_case(self: unittest.TestCase):
# if USE_COMPILATION_CACHE:
# cfg, ast, *_ = compile_cached(path_src)
# else:
#path_src = fix_pragma(path_src)
new_src = fix_pragma(path_src)
cfg, ast, *_ = compile_cfg(new_src)
result = static_analysis.analyze_cfg(cfg, logger=logger).facts_out
with open(path_src, 'r') as f:
src_lines = f.readlines()
lines = dropwhile(lambda l: "/**" not in l, src_lines)
lines = takewhile(lambda l: "*/" not in l, lines)
lines = list(lines)[1:]
config = configparser.ConfigParser()
config.read_string("".join(lines))
specs = config["Specs"]
pattern = specs["pattern"]
compliant = [c.strip() for c in specs.get("compliant", "").split(",")]
violation = [c.strip() for c in specs.get("violation", "").split(",")]
compliant += [f"L{i + 1}" for i, l in enumerate(src_lines) if
"//" in l and "compliant" in l.split("//")[1].lower()]
violation += [f"L{i + 1}" for i, l in enumerate(src_lines) if
"//" in l and "violation" in l.split("//")[1].lower()]
compliant = [s.strip() for s in compliant if "" != s.strip()]
violation = [s.strip() for s in violation if "" != s.strip()]
pattern_matches = [t[1:] for t in result["patternMatch"] if t[0] == pattern]
pattern_matches_lines = [t[1:] for t in result["patternMatchInfo"] if t[0] == pattern]
pattern_matches_lines = {match: line for match, key, line in pattern_matches_lines if key == "line"}
compliant_output = [pattern_matches_lines[m] for m, c in pattern_matches if c == "compliant"]
violation_output = [pattern_matches_lines[m] for m, c in pattern_matches if c == "violation"]
conflict_output = [pattern_matches_lines[m] for m, c in pattern_matches if c == "conflict"]
def compare(expected, actual, e):
try:
self.assertSetEqual(set(expected), set(actual), e)
except AssertionError as e: # Fix ambiguous error messages
msg = e.args[0]
msg = msg.replace("Items in the first set but not the second",
"Items expected but not reported")
msg = msg.replace("Items in the second set but not the first",
"Items incorrectly reported")
raise AssertionError(msg) from None
if conflict_output:
data = pprint.pformat(conflict_output)
raise Exception("Conflict\n" + data)
compare(compliant, compliant_output, "Compliance")
compare(violation, violation_output, "Violations")
return test_case
class TestPatterns(unittest.TestCase):
base_path = os.path.dirname(os.path.abspath(__file__)) + "/"
frame = inspect.currentframe()
for filename in glob.iglob(f'{base_path}**/*.sol', recursive=True):
path = Path(filename)
test_name = str(path.relative_to(Path(os.path.abspath(__file__)).parent)) \
.replace(".sol", "") \
.replace("\\", ".") \
.replace("/", ".")
frame.f_locals[f'test_{test_name}'] = make_test_case(str(path), logging)
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Tue Nov 7 13:03:11 2017
@author: bgris
"""
import numpy as np
import structured_vector_fields as struct
import cmath
import group
class function_2D_scalingdisplacement():
def __init__(self, space, kernel):
self.space = space
self.unstructured_op = struct.get_from_structured_to_unstructured(space, kernel)
def function(self, vect_field):
"""
returns a list of two vectors : \int v and
(1 / \int |v| ) * \int x |v(x)|
"""
dim = 2
function_one = self.space.one()
value0 = [function_one.inner(vect_field[i]) for i in range(dim)]
points = self.space.points().T
vect_field_abs = self.space.tangent_bundle.element(np.abs(vect_field))
image_norm = sum([vect_field_abs[i] for i in range(dim)])
norm1 = function_one.inner(image_norm)
if (norm1 < 1e-10):
raise ValueError('problem in function function_2D_scalingdisplacement : norm1 is zero')
value1 = [(1 / norm1) * function_one.inner(points[i] * image_norm) for i in range(dim)]
return [value0, value1]
def function_structured(self, structured_field):
unstructured_field = self.unstructured_op(structured_field)
return self.function(unstructured_field)
def solver(self, w1, w2):
"""
returns the velocity element such that exponential(g).w1 = w2
w1 and w2 are lists of 2 lists with 2 elements
"""
comp1_0 = complex(w1[0][0], w1[0][1])
comp1_1 = complex(w1[1][0], w1[1][1])
comp2_0 = complex(w2[0][0], w2[0][1])
comp2_1 = complex(w2[1][0], w2[1][1])
norm1 = abs(comp1_0)
norm2 = abs(comp2_0)
if norm1 < 1e-10:
raise ValueError('problem in solver dim1 : norm1 is zero')
ratio = norm2 / norm1
# log taken because the result is a velocity, not the group element
lam = np.log(ratio)
theta = cmath.phase(comp2_0 / (ratio * comp1_0))
translation_group = comp2_1 - cmath.rect(1,theta) * comp1_1
# need to take the 'log' for translation too
translation = (1 / group.sinc(theta/2)) * cmath.rect(1, - theta / 2) * translation_group
return np.array([lam, theta, translation.real, translation.imag])
|
#August Challenge 2020
import math
try:
def getScore(finalPower):
if finalPower%9==0:
return finalPower/9
else:
return math.floor(finalPower/9)+1
def declareWinner(ChefScore,RickScore):
if RickScore>ChefScore:
print("0",int(ChefScore))
else:
print("1",int(RickScore))
TestCases=int(input())
for _ in range(TestCases):
Pc,Pr=map(int,input().split())
declareWinner(getScore(Pc),getScore(Pr))
except:
pass
# Chef is playing a card game with his friend Rick Sanchez. He recently won against Rick's grandson Morty; however, Rick is not as easy to beat. The rules of this game are as follows:
# The power of a positive integer is the sum of digits of that integer. For example, the power of 13 is 1+3=4.
# Chef and Rick receive randomly generated positive integers. For each player, let's call the integer he received final power.
# The goal of each player is to generate a positive integer such that its power (defined above) is equal to his final power.
# The player who generated the integer with fewer digits wins the game. If both have the same number of digits, then Rick cheats and wins the game.
# You are given the final power of Chef PC and the final power of Rick PR. Assuming that both players play optimally, find the winner of the game and the number of digits of the integer he generates.
# Input
# The first line of the input contains a single integer T denoting the number of test cases. The description of T test cases follows.
# The first and only line of each test case contains two space-separated integers PC and PR.
# Output
# For each test case, print a single line containing two space-separated integers. The first of these integers should be either 0 if Chef wins or 1 if Rick wins. The second integer should be the number of digits of the integer generated by the winner.
# Constraints
# 1≤T≤105
# 1≤PC,PR≤106
# Subtasks
# Subtask #1 (100 points): original constraints
# Example Input
# 3
# 3 5
# 28 18
# 14 24
# Example Output
# 1 1
# 1 2
# 0 2
# Explanation
# Example case 1: Chef and Rick generate the optimal integers 3 and 5 respectively. Each of them has 1 digit, so Rick cheats and wins the game.
# Example case 2: Chef and Rick could generate e.g. 6877 and 99 respectively. Since Rick's integer has 2 digits and Chef cannot generate an integer with less than 4 digits, Rick wins.
# Example case 3: Chef and Rick could generate e.g. 86 and 888 respectively. Chef's integer has 2 digits and Rick cannot generate an integer with less than 3 digits, so Chef wins.
|
查找和替换模式
class Solution:
def findAndReplacePattern(self, words, pattern):
"""
:type words: List[str]
:type pattern: str
:rtype: List[str]
"""
a = []
len1 = len(set(pattern))
for i in words:
dic = {}
flag = True
for j in range(len(i)):
if i[j] not in dic.keys():
dic[i[j]] = pattern[j]
else:
if dic[i[j]] != pattern[j]:
flag = False
break
if flag:
if len(set(dic.keys())) == len(set(dic.values())):
a.append(i)
return a
|
'''
Simple python cache system.
Example usage:
import json
from chainedcache import DictCache, FileCache, S3Cache, ChainedCache
json2bytes = lambda d: json.dumps(d).encode('UTF-8')
bytes2json = lambda d: json.loads(d.decode('UTF-8'))
stream2json = lambda d: json.load(d)
dict_cache = DictCache()
file_cache = FileCache('./cache', mode='bytes',
put_transformer=json2bytes, get_transformer=bytes2json)
s3_cache = S3Cache('my_s3_bucket', 'cache', region='us-east-1',
put_transformer=json2bytes, get_transformer=stream2json)
cache = ChainedCache([dict_cache, file_cache, s3_cache])
def data_generator(key):
return { "the_key_is": key }
json_data = cache.get_put("hello", data_generator)
'''
from .chainedcache import DictCache, FileCache, S3Cache, ChainedCache
|
from import_export import resources
from .models import Visitor,Visit
from import_export.fields import Field
from import_export.widgets import ForeignKeyWidget
class VisitorResource(resources.ModelResource):
class Meta:
model = Visitor
class VisitResource(resources.ModelResource):
cellphone = Field(
column_name='cellphone',
attribute='cellphone',
widget=ForeignKeyWidget(Visitor, 'cellphone'))
class Meta:
model = Visit
use_bulk = True
batch_size = 1000
force_init_instance = True
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
'''
File: level14.py
Author: rshen <anticlockwise5@gmail.com>
Description: The Python Challenge Level 14 (Walk Around): http://www.pythonchallenge.com/pc/return/italy.html
'''
import urllib
import Image
img_url = "http://huge:file@www.pythonchallenge.com/pc/return/wire.png"
req = urllib.urlopen(img_url)
open("wire.png", 'w').write(req.read())
img = Image.open("wire.png")
img_new = Image.new(img.mode, (100, 100))
seq = [i for i in range(1,101)]
seq = seq * 2
seq.sort()
seq.reverse()
seq = seq[1:]
x, y, step = -1, 99, 0
directions = [(1, 0), (0, -1), (-1, 0), (0, 1)]
direction = 0
for num in seq:
for i in range(num):
x += directions[direction][0]
y += directions[direction][1]
img_new.putpixel((x, y), img.getpixel((step, 0)))
step += 1
direction += 1
if direction == 4:
direction = 0
img_new.save("wire14.png")
|
with open(r'd:\Projects\ExperisAcademy\Exercises\IO\redwood-data.txt', 'r') as input_file:
next(input_file)
next(input_file)
tree_data = []
current_ind = highest_ind = highest_height = biggest_ind = biggest_diameter = 0
for line in input_file:
cols = [col.strip() for col in line.split('\t')]
tree_name, tree_place, tree_diameter, tree_height = cols[0], cols[1], float(cols[2]), float(cols[3])
tree_data.append([tree_name, tree_place, tree_diameter, tree_height])
if tree_height > highest_height:
highest_height = tree_height
highest_ind = current_ind
if tree_diameter > biggest_diameter:
biggest_diameter = tree_diameter
biggest_ind = current_ind
current_ind += 1
print(f'The biggest tree is {tree_data[biggest_ind][0]} with a diameter of {tree_data[biggest_ind][2]}')
print(f'The highest tree is {tree_data[highest_ind][0]} with a height of {tree_data[highest_ind][3]}')
|
# -*- coding: utf-8 -*-
#
# Test all end points are working as expected
#
# :copyright: 2023 Sonu Kumar
# :license: BSD-3-Clause
#
import unittest
import pyquery
from django.test import TestCase
from util import TestBase
from django.conf import settings
from error_tracker.django.models import ErrorModel
class ViewTestCase(TestCase, TestBase):
def test_list_view(self):
self.get('/value-error')
self.post('/post-view')
html = self.get('/dev', follow=True).content
urls = [node.attrib['href'] for node in pyquery.PyQuery(html)('a.view-link, a.home-link, a.delete')]
# 2 links for delete operation and 2 links to navigate and 1 link to home page
self.assertEqual(len(urls), 2 + 3)
urls = [node.attrib['href'] for node in pyquery.PyQuery(html)('a.view-link')]
self.assertEqual(len(urls), 2)
def test_detail_view(self):
self.get('/value-error')
html = self.get('/dev', follow=True).content
url = [node.attrib['href'] for node in pyquery.PyQuery(html)('a.view-link')][0]
response = self.get(url).content
row = pyquery.PyQuery(response)('.mb-4')
self.assertEqual(2, len(row))
divs = pyquery.PyQuery(response)('.row>div')
self.assertEqual(len(divs), 11)
def test_delete_view(self):
self.get('/value-error')
html = self.get('/dev', follow=True).content
url = [node.attrib['href'] for node in pyquery.PyQuery(html)('.delete')][0]
self.get(url, follow=True)
self.assertEqual(len(self.get_exceptions()), 0)
def test_pagination(self):
self.get('/value-error')
exception = self.get_exceptions()[0]
hashx = exception.hash
inserted = 0
i = 0
while inserted < 20:
i += 1
idx = str(i) + hashx[2:]
inserted += 1
ErrorModel.create_or_update_entity(idx, exception.host, exception.path,
exception.method, exception.request_data,
exception.exception_name,
exception.traceback)
response = self.get('/dev', follow=True).content
urls = [node.attrib['href'] for node in pyquery.PyQuery(response)('a.view-link, a.delete, a.pagelink, a.home-link')]
self.assertEqual(len(urls), settings.EXCEPTION_APP_DEFAULT_LIST_SIZE * 2 + 2)
self.assertTrue('/dev/?page=2' in urls)
response = self.get('/dev/?page=2', follow=True).content
urls = [node.attrib['href'] for node in pyquery.PyQuery(response)('a.view-link, a.delete, a.pagelink, a.home-link')]
self.assertEqual(len(urls), settings.EXCEPTION_APP_DEFAULT_LIST_SIZE * 2 + 3)
self.assertTrue('/dev/?page=1' in urls)
self.assertTrue('/dev/?page=3' in urls)
response = self.get('/dev/?page=5', follow=True).content
urls = [node.attrib['href'] for node in pyquery.PyQuery(response)('a.view-link, a.delete, a.pagelink, a.home-link')]
self.assertTrue('/dev/?page=4' in urls)
response = self.get('/dev/?page=6', follow=True).content
urls = [node.attrib['href'] for node in pyquery.PyQuery(response)('a.view-link, a.delete, a.pagelink, a.home-link')]
self.assertEqual(len(urls), 2)
if __name__ == '__main__':
unittest.main() |
travel = ['Jerusalem', 'Hong Kong', 'Singapore', 'Disney World', 'Silicon Valley']
print(travel)
print(sorted(travel))
print(travel)
print(sorted(travel, reverse=True))
print(travel)
travel.reverse()
print(travel)
travel.reverse()
print(travel)
travel.sort()
print(travel)
travel.sort(reverse=True) |
#!/usr/bin/python
#########################
# python script 21
########################
from os import system
#defining a class
class Class_name():
def function1(self):
system("ls -ltr & df -h & ./dict_to_json.py")
#creating a object
class_object = Class_name()
#calling a def in class
class_object.function1()
|
class Car:
def __init__(self, streets):
self.streets = streets
self.num_streets = len(streets)
self.next_street = 0
self.time_to_intersection = 0
self.finish_time = None
# add to first intersection
self.streets[self.next_street].queue.append(self)
def __str__(self):
return(f"""
Car:
next_intersection: {self.streets[self.next_street].end_intersection.id}
time_to_intersection: {self.time_to_intersection}
is_done: {self.is_done()}
""")
|
#you will need the following library
#First we import SpeechToTextV1 from ibm_watson.For more information on the API,
from ibm_watson import SpeechToTextV1
import json
from ibm_cloud_sdk_core.authenticators import IAMAuthenticator
import wget as wg
#The service endpoint is based on the location of the service instance, we store the information in the variable URL.
# To find out which URL to use, view the service credentials.
url_s2t = "https://api.us-south.speech-to-text.watson.cloud.ibm.com/instances/ad1503f8-bcd4-4607-9602-31e5ef4f300d"
#You require an API key, and you can obtain the key
iam_apikey_s2t = "2a5pItIM96XXDpiSTTP0AnnDY9ei6HAw3fSNVE6bi7iu"
#You create a Speech To Text Adapter object the parameters are the endpoint and API key.
authenticator = IAMAuthenticator(iam_apikey_s2t)
s2t = SpeechToTextV1(authenticator=authenticator)
s2t.set_service_url(url_s2t)
print(s2t)
#Lets download the audio file that we will use to convert into text.
#url = "https://s3-api.us-geo.objectstorage.softlayer.net/cf-courses-data/CognitiveClass/PY0101EN/labs/PolynomialRegressionandPipelines.mp3"
#We have the path of the wav file we would like to convert to text
#wg.download(url, 'G:\PIAIC\PIAIC AI COURSE\Quarter 1\Classes\Quarter 1\Python\A.I\PolynomialRegressionandPipelines.mp3')
filename='G:\PIAIC\PIAIC AI COURSE\Quarter 1\Classes\Quarter 1\Python\A.I\PolynomialRegressionandPipelines.mp3'
#We create the file object wav with the wav file using open ;
# we set the mode to "rb" ,
# this is similar to read mode, but it ensures the file is in binary mode.
# We use the method recognize to return the recognized text. The parameter audio is the file object wav,
# the parameter content_type is the format of the audio file.
with open(filename, mode="rb") as wav:
response = s2t.recognize(audio=wav, content_type='audio/mp3')
#The attribute result contains a dictionary that includes the translation:
#print(response.result)
from pandas.io.json import json_normalize
print(json_normalize(response.result['results'],"alternatives"))
print(response)
#We can obtain the recognized text and assign it to the variable recognized_text
recognized_text=response.result['results'][0]["alternatives"][0]["transcript"]
#print(type(recognized_text))
print(recognized_text) |
from django.db import models
class AuditFieldBasic(models.Model):
created_at = models.DateTimeField(auto_now_add=True, editable=False)
updated_at = models.DateTimeField(auto_now=True)
class Meta:
abstract = True
|
# ~ STATES
# ----------------------------------
START = "start"
INFO = "info"
CANCEL = "cancel"
# > CONSTANTES PARA CATEGORIA DE MOTOS
NAKED = "naked"
SCOOTER = "scooter"
CRUCERO = "crucero"
CHOOPER = "chooper"
CUSTOM = "custom"
SIDECAR = "sidecar"
TOURING = "touring"
RACER = "racer"
TRIAL = "trial"
SHOW_MOTO_NAKED = "show_moto_naked"
SHOW_MOTO_SCOOTER = "show_moto_scooter"
SHOW_MOTO_CRUCERO = "show_moto_crucero"
SHOW_MOTO_CHOOPER = "show_moto_chooper"
SHOW_MOTO_CUSTOM = "show_moto_custom"
SHOW_MOTO_SIDECAR = "show_moto_sidecar"
SHOW_MOTO_TOURING = "show_moto_touring"
SHOW_MOTO_RACER = "show_moto_racer"
SHOW_MOTO_TRIAL = "show_moto_trial"
# ~ USMOTOS PRODUCTOS
PRODUCTS = "products"
MOTORCYCLE = "motorcycle"
ACCESORIES = "accesories"
PROMOTIONS = "promotions"
CATEGORYS_MOTO = "categorys_moto"
CATEGORY_MOTORCYCLE = "category_motorcycle"
CATEGORYS_M = "categorys_m"
LIST_ASISTENCIAS = "list_asistencias"
# ----------------------------------
# ~ TRIGGERS
# ----------------------------------
SHOW_START = "show_start"
SHOW_INFO = "show_info"
SHOW_LOGIN = "show_login"
START_DESTROY = "start_destroy"
ADD_CEDULA_DESTROY = "add_cedula_destroy"
SHOW_MISSION = "show_mission"
SHOW_VIEW = "show_view"
SHOW_SCHEDULE = "show_schedule"
SHOW_ADDRESS = "show_address"
MISSION = "mission"
VIEW = "view"
SCHEDULE = "schedule"
ADDRESS = "address"
SHOW_INFORMATION = "show_information"
INFORMATION_BUSINESS = "information_business"
# MOTOS
SHOW_PRODUCTS = "show_products"
SHOW_MOTORCYCLES = "show_motorcycles"
SHOW_ACCESORIES = "show_accessories"
SHOW_PROMOTIONS = "show_promotions"
SHOW_REPARACION = "show_reparacion"
SHOW_CATEGORY_MOTORCYCLE = "show_category_motorcycle"
SHOW_ASISTENCIAS = "show_asistencias"
# ----------------------------------
# ~ CONDITIONS
# ----------------------------------
HAS_SESSION = "has_session"
HAS_USER = "has_user"
PROCESS_USER = "process_user"
ADD_CEDULA = "add_cedula"
# ~ STATES CONFIG
# ----------------------------------
states = [
START, INFO,
PRODUCTS, MOTORCYCLE, ACCESORIES, PROMOTIONS, CATEGORY_MOTORCYCLE,
CATEGORYS_MOTO, MOTORCYCLE , SHOW_MOTO_NAKED, SHOW_MOTO_SCOOTER , SHOW_MOTO_CRUCERO,
SHOW_MOTO_CHOOPER, SHOW_MOTO_CUSTOM, SHOW_MOTO_SIDECAR, SHOW_MOTO_TOURING, SHOW_MOTO_RACER, SHOW_MOTO_TRIAL,
SHOW_PROMOTIONS, SHOW_ACCESORIES, PROCESS_USER, LIST_ASISTENCIAS, SHOW_MISSION ,SHOW_VIEW,
SHOW_SCHEDULE, SHOW_ADDRESS ,SHOW_INFORMATION, INFORMATION_BUSINESS
]
# ~ TRANSITIONS
# ----------------------------------
transitions = [
{ # ------ USMOTOS TRANSICIONES ------
'source': START, 'dest': PRODUCTS,
'trigger': SHOW_PRODUCTS,
'after': 'render_' + PRODUCTS, # Reply
'conditions': []
},
{
'source': PRODUCTS, 'dest': START,
'trigger': CANCEL,
'after': 'render_' + START, # Reply
'conditions': []
},
{
'source': START, 'dest': INFO,
'trigger': SHOW_INFO,
'after': 'render_' + INFO, # Reply
'conditions': []
},
{
'source': PRODUCTS, 'dest': START,
'trigger': CANCEL,
'after': 'render_' + START, # Reply
'conditions': []
},
{
'source': PRODUCTS, 'dest': CATEGORY_MOTORCYCLE,
'trigger': SHOW_CATEGORY_MOTORCYCLE,
'after': 'render_' + CATEGORYS_MOTO, # Reply
'conditions': []
},
{
'source': CATEGORY_MOTORCYCLE, 'dest': START,
'trigger': CANCEL,
'after': 'render_' + START, # Reply
'conditions': []
},
{
'source': CATEGORY_MOTORCYCLE, 'dest': SHOW_MOTO_NAKED,
'trigger': NAKED,
'after': 'render_' + SHOW_MOTO_NAKED, # Reply
'conditions': []
},
{
'source': SHOW_MOTO_NAKED, 'dest': CATEGORYS_MOTO,
'trigger': CATEGORYS_M,
'after': 'render_' + CATEGORYS_MOTO, # Reply
'conditions': []
},
{
'source': CATEGORYS_MOTO, 'dest': START,
'trigger': CANCEL,
'after': 'render_' + START, # Reply
'conditions': []
},
{
'source': CATEGORYS_MOTO, 'dest': SHOW_MOTO_NAKED,
'trigger': NAKED,
'after': 'render_' + SHOW_MOTO_NAKED, # Reply
'conditions': []
},
{
'source': CATEGORY_MOTORCYCLE, 'dest': SHOW_MOTO_SCOOTER,
'trigger': SCOOTER,
'after': 'render_' + SHOW_MOTO_SCOOTER, # Reply
'conditions': []
},
{
'source': SHOW_MOTO_SCOOTER, 'dest': CATEGORYS_MOTO,
'trigger': CATEGORYS_M,
'after': 'render_' + CATEGORYS_MOTO, # Reply
'conditions': []
},
{
'source': CATEGORYS_MOTO, 'dest': SHOW_MOTO_SCOOTER,
'trigger': SCOOTER,
'after': 'render_' + SHOW_MOTO_SCOOTER, # Reply
'conditions': []
},
{
'source': CATEGORY_MOTORCYCLE, 'dest': SHOW_MOTO_CRUCERO,
'trigger': CRUCERO,
'after': 'render_' + SHOW_MOTO_CRUCERO, # Reply
'conditions': []
},
{
'source': SHOW_MOTO_CRUCERO, 'dest': CATEGORYS_MOTO,
'trigger': CATEGORYS_M,
'after': 'render_' + CATEGORYS_MOTO, # Reply
'conditions': []
},
{
'source': CATEGORYS_MOTO, 'dest': SHOW_MOTO_CRUCERO,
'trigger': CRUCERO,
'after': 'render_' + SHOW_MOTO_CRUCERO, # Reply
'conditions': []
},
{
'source': CATEGORY_MOTORCYCLE, 'dest': SHOW_MOTO_CHOOPER,
'trigger': CHOOPER,
'after': 'render_' + SHOW_MOTO_CHOOPER, # Reply
'conditions': []
},
{
'source': SHOW_MOTO_CHOOPER, 'dest': CATEGORYS_MOTO,
'trigger': CATEGORYS_M,
'after': 'render_' + CATEGORYS_MOTO, # Reply
'conditions': []
},
{
'source': CATEGORYS_MOTO, 'dest': SHOW_MOTO_CHOOPER,
'trigger': CHOOPER,
'after': 'render_' + SHOW_MOTO_CHOOPER, # Reply
'conditions': []
},
{
'source': CATEGORY_MOTORCYCLE, 'dest': SHOW_MOTO_CUSTOM,
'trigger': CUSTOM,
'after': 'render_' + SHOW_MOTO_CUSTOM, # Reply
'conditions': []
},
{
'source': SHOW_MOTO_CUSTOM, 'dest': CATEGORYS_MOTO,
'trigger': CATEGORYS_M,
'after': 'render_' + CATEGORYS_MOTO, # Reply
'conditions': []
},
{
'source': CATEGORYS_MOTO, 'dest': SHOW_MOTO_CUSTOM,
'trigger': CUSTOM,
'after': 'render_' + SHOW_MOTO_CUSTOM, # Reply
'conditions': []
},
{
'source': CATEGORY_MOTORCYCLE, 'dest': SHOW_MOTO_SIDECAR,
'trigger': SIDECAR,
'after': 'render_' + SHOW_MOTO_SIDECAR, # Reply
'conditions': []
},
{
'source': SHOW_MOTO_SIDECAR, 'dest': CATEGORYS_MOTO,
'trigger': CATEGORYS_M,
'after': 'render_' + CATEGORYS_MOTO, # Reply
'conditions': []
},
{
'source': CATEGORYS_MOTO, 'dest': SHOW_MOTO_SIDECAR,
'trigger': SIDECAR,
'after': 'render_' + SHOW_MOTO_SIDECAR, # Reply
'conditions': []
},
{
'source': CATEGORY_MOTORCYCLE, 'dest': SHOW_MOTO_TOURING,
'trigger': TOURING,
'after': 'render_' + SHOW_MOTO_TOURING, # Reply
'conditions': []
},
{
'source': SHOW_MOTO_SIDECAR, 'dest': CATEGORYS_MOTO,
'trigger': CATEGORYS_M,
'after': 'render_' + CATEGORYS_MOTO, # Reply
'conditions': []
},
{
'source': CATEGORYS_MOTO, 'dest': SHOW_MOTO_SIDECAR,
'trigger': SIDECAR,
'after': 'render_' + SHOW_MOTO_SIDECAR, # Reply
'conditions': []
},
{
'source': CATEGORY_MOTORCYCLE, 'dest': SHOW_MOTO_TOURING,
'trigger': TOURING,
'after': 'render_' + SHOW_MOTO_TOURING, # Reply
'conditions': []
},
{
'source': SHOW_MOTO_TOURING, 'dest': CATEGORYS_MOTO,
'trigger': CATEGORYS_M,
'after': 'render_' + CATEGORYS_MOTO, # Reply
'conditions': []
},
{
'source': CATEGORYS_MOTO, 'dest': SHOW_MOTO_TOURING,
'trigger': TOURING,
'after': 'render_' + SHOW_MOTO_TOURING, # Reply
'conditions': []
},
{
'source': CATEGORY_MOTORCYCLE, 'dest': SHOW_MOTO_RACER,
'trigger': RACER,
'after': 'render_' + SHOW_MOTO_RACER, # Reply
'conditions': []
},
{
'source': SHOW_MOTO_RACER, 'dest': CATEGORYS_MOTO,
'trigger': CATEGORYS_M,
'after': 'render_' + CATEGORYS_MOTO, # Reply
'conditions': []
},
{
'source': CATEGORYS_MOTO, 'dest': SHOW_MOTO_RACER,
'trigger': RACER,
'after': 'render_' + SHOW_MOTO_RACER, # Reply
'conditions': []
},
{
'source': CATEGORY_MOTORCYCLE, 'dest': SHOW_MOTO_TRIAL,
'trigger': TRIAL,
'after': 'render_' + SHOW_MOTO_TRIAL, # Reply
'conditions': []
},
{
'source': SHOW_MOTO_TRIAL, 'dest': CATEGORYS_MOTO,
'trigger': CATEGORYS_M,
'after': 'render_' + CATEGORYS_MOTO, # Reply
'conditions': []
},
{
'source': CATEGORYS_MOTO, 'dest': SHOW_MOTO_TRIAL,
'trigger': TRIAL,
'after': 'render_' + SHOW_MOTO_TRIAL, # Reply
'conditions': []
},
{
'source': SHOW_MOTO_NAKED, 'dest': START,
'trigger': START,
'after': 'render_' + START, # Reply
'conditions': []
},
{
'source': SHOW_MOTO_SCOOTER, 'dest': START,
'trigger': START,
'after': 'render_' + START, # Reply
'conditions': []
},
{
'source': SHOW_MOTO_CRUCERO, 'dest': START,
'trigger': START,
'after': 'render_' + START, # Reply
'conditions': []
},
{
'source': SHOW_MOTO_CHOOPER, 'dest': START,
'trigger': START,
'after': 'render_' + START, # Reply
'conditions': []
},
{
'source': SHOW_MOTO_CUSTOM, 'dest': START,
'trigger': START,
'after': 'render_' + START, # Reply
'conditions': []
},
{
'source': SHOW_MOTO_SIDECAR, 'dest': START,
'trigger': START,
'after': 'render_' + START, # Reply
'conditions': []
},
{
'source': SHOW_MOTO_CRUCERO, 'dest': START,
'trigger': START,
'after': 'render_' + START, # Reply
'conditions': []
},
{
'source': SHOW_MOTO_TOURING, 'dest': START,
'trigger': START,
'after': 'render_' + START, # Reply
'conditions': []
},
{
'source': SHOW_MOTO_RACER, 'dest': START,
'trigger': START,
'after': 'render_' + START, # Reply
'conditions': []
},
{
'source': SHOW_MOTO_TRIAL, 'dest': START,
'trigger': START,
'after': 'render_' + START, # Reply
'conditions': []
},
# PROMOTIONS
{
'source': PRODUCTS, 'dest': PROMOTIONS,
'trigger': SHOW_PROMOTIONS,
'after': 'render_' + PROMOTIONS, # Reply
'conditions': []
},
{
'source': PROMOTIONS, 'dest': START,
'trigger': START,
'after': 'render_' + START, # Reply
'conditions': []
},
{
'source': PRODUCTS, 'dest': ACCESORIES,
'trigger': SHOW_ACCESORIES,
'after': 'render_' + ACCESORIES, # Reply
'conditions': []
},
{
'source': ACCESORIES, 'dest': START,
'trigger': START,
'after': 'render_' + START, # Reply
'conditions': []
},
{
'source': ACCESORIES, 'dest': PRODUCTS,
'trigger': PRODUCTS,
'after': 'render_' + PRODUCTS, # Reply
'conditions': []
},
# Para Asistencia Mecanica
{
'source': START, 'dest': PROCESS_USER,
'trigger': SHOW_REPARACION,
'after': 'render_' + PROCESS_USER, # Reply
'conditions': []
},
{
'source': PROCESS_USER, 'dest': PROCESS_USER,
'trigger': ADD_CEDULA,
'after': 'render_' + PROCESS_USER, # Reply
'conditions': [HAS_USER]
},
{
'source': PROCESS_USER, 'dest': LIST_ASISTENCIAS,
'trigger': SHOW_ASISTENCIAS,
'after': 'render_' + LIST_ASISTENCIAS, # Reply
'conditions': []
},
{
'source': PROCESS_USER, 'dest': START,
'trigger': START_DESTROY,
'after': 'render_' + START, # Reply
'conditions': []
},
{
'source': LIST_ASISTENCIAS, 'dest': START,
'trigger': START_DESTROY,
'after': 'render_' + START, # Reply
'conditions': []
},
{
'source': LIST_ASISTENCIAS, 'dest': PROCESS_USER,
'trigger': ADD_CEDULA_DESTROY,
'after': 'render_' + PROCESS_USER, # Reply
'conditions': [HAS_USER]
},
# informacion de la empresa
{
'source': START, 'dest': SHOW_INFORMATION,
'trigger': INFORMATION_BUSINESS,
'after': 'render_' + SHOW_INFORMATION, # Reply
'conditions': []
},
{
'source': SHOW_INFORMATION, 'dest': SHOW_MISSION,
'trigger': MISSION,
'after': 'render_' + SHOW_MISSION, # Reply
'conditions': []
},
{
'source': SHOW_MISSION, 'dest': SHOW_INFORMATION,
'trigger': INFORMATION_BUSINESS,
'after': 'render_' + SHOW_INFORMATION, # Reply
'conditions': []
},
{
'source': SHOW_INFORMATION, 'dest': SHOW_VIEW,
'trigger': VIEW,
'after': 'render_' + SHOW_VIEW, # Reply
'conditions': []
},
{
'source': SHOW_VIEW, 'dest': SHOW_INFORMATION,
'trigger': INFORMATION_BUSINESS,
'after': 'render_' + SHOW_INFORMATION, # Reply
'conditions': []
},
{
'source': SHOW_INFORMATION, 'dest': SHOW_ADDRESS,
'trigger': ADDRESS,
'after': 'render_' + SHOW_ADDRESS, # Reply
'conditions': []
},
{
'source': SHOW_ADDRESS, 'dest': SHOW_INFORMATION,
'trigger': INFORMATION_BUSINESS,
'after': 'render_' + SHOW_INFORMATION, # Reply
'conditions': []
},
{
'source': SHOW_INFORMATION, 'dest': SHOW_SCHEDULE,
'trigger': SCHEDULE,
'after': 'render_' + SHOW_SCHEDULE, # Reply
'conditions': []
},
{
'source': SHOW_SCHEDULE, 'dest': SHOW_INFORMATION,
'trigger': INFORMATION_BUSINESS,
'after': 'render_' + SHOW_INFORMATION, # Reply
'conditions': []
},
{
'source': SHOW_INFORMATION, 'dest': START,
'trigger': START,
'after': 'render_' + START, # Reply
'conditions': []
},
{
'source': SHOW_ADDRESS, 'dest': START,
'trigger': START,
'after': 'render_' + START, # Reply
'conditions': []
},
{
'source': SHOW_SCHEDULE, 'dest': START,
'trigger': START,
'after': 'render_' + START, # Reply
'conditions': []
},
{
'source': SHOW_VIEW, 'dest': START,
'trigger': START,
'after': 'render_' + START, # Reply
'conditions': []
},
{
'source': SHOW_MISSION, 'dest': START,
'trigger': START,
'after': 'render_' + START, # Reply
'conditions': []
}
] |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
f = open('hotel_daily.csv')
f2 = open('hotel_predict','w')
st = f.readline()
f2.write('id,hotel_id,the_date,dayofweek,total_need_money\n')
while st:
st = f.readline()
if st:
st = st.strip('\n').split(',')
the_date = '2016-7-1'
dayofweek = '星期五'
s = [st[0],st[1],the_date,dayofweek,'0\n']
f2.write(','.join(s))
the_date = '2016-7-2'
dayofweek = '星期六'
s = [st[0],st[1],the_date,dayofweek,'0\n']
f2.write(','.join(s))
the_date = '2016-7-3'
dayofweek = '星期日'
s = [st[0],st[1],the_date,dayofweek,'0\n']
f2.write(','.join(s))
the_date = '2016-7-4'
dayofweek = '星期一'
s = [st[0],st[1],the_date,dayofweek,'0\n']
f2.write(','.join(s))
the_date = '2016-7-5'
dayofweek = '星期二'
s = [st[0],st[1],the_date,dayofweek,'0\n']
f2.write(','.join(s))
the_date = '2016-7-6'
dayofweek = '星期三'
s = [st[0],st[1],the_date,dayofweek,'0\n']
f2.write(','.join(s))
the_date = '2016-7-7'
dayofweek = '星期四'
s = [st[0],st[1],the_date,dayofweek,'0\n']
f2.write(','.join(s))
f.close()
f2.close()
f = open('food_daily.csv')
f2= open('food_predict','w')
st = f.readline()
f2.write('hotel_id,food_id,cate_code,unit_money,check_date,dayofweek,total_num\n')
strs = []
while st:
st = f.readline()
if st:
st = st.strip('\n').split(',')
if st[1]+st[3] in strs:
continue
strs.append(st[1]+st[3])
the_date = '2016-7-1'
dayofweek = '星期五'
s = [st[1],st[3],st[5],st[7],the_date,dayofweek,'0\n']
f2.write(','.join(s))
the_date = '2016-7-2'
dayofweek = '星期六'
s = [st[1],st[3],st[5],st[7],the_date,dayofweek,'0\n']
f2.write(','.join(s))
the_date = '2016-7-3'
dayofweek = '星期日'
s = [st[1],st[3],st[5],st[7],the_date,dayofweek,'0\n']
f2.write(','.join(s))
the_date = '2016-7-4'
dayofweek = '星期一'
s = [st[1],st[3],st[5],st[7],the_date,dayofweek,'0\n']
f2.write(','.join(s))
the_date = '2016-7-5'
dayofweek = '星期二'
s = [st[1],st[3],st[5],st[7],the_date,dayofweek,'0\n']
f2.write(','.join(s))
the_date = '2016-7-6'
dayofweek = '星期三'
s = [st[1],st[3],st[5],st[7],the_date,dayofweek,'0\n']
f2.write(','.join(s))
the_date = '2016-7-7'
dayofweek = '星期四'
s = [st[1],st[3],st[5],st[7],the_date,dayofweek,'0\n']
f2.write(','.join(s))
f.close()
f2.close()
'''
f= open('food_predict')
st = f.readline()
while st:
st = f.readline()
if st:
st = st.strip('\n').split(',')
f.close()
'''
|
# -*- coding: utf-8 -*-
"""
Created on Wens Jul 4 20:16:28 2018
@author: boston
"""
from mpl_toolkits.mplot3d import Axes3D
import matplotlib.pyplot as plt
import numpy as np
import MagDipole as md
import MagLoops as ml
import Moment as mm
'''
图形界面输入自定义数据:
define
线圈:电流:20 半径:0.25 位置:(0,0,1) 频率:1000
物体:形状:sphere 半径:0.2m 位置:(1,1,-1) 磁导率u: 电导率e:
'''
#define loop
loop=ml.Loops(20,0.25,1,1000)
#define object
obj=mm.Moment(1,1,1000,0.2)
#calculate Hprim
Hp=np.zeros(3,dtype=complex)
Hp=loop.AllField(3,1,-1)
#calculate magnetic momemt
mom=np.zeros(3,dtype=complex)
mom=obj.SphereM(Hp)
#calculate secondary feild data
def mo(a,b,c=0):
return np.sqrt(a**2+b**2+c**2)
xmin, xmax, ymin, ymax, z = -5., 5., -5., 5., 1. # x, y bounds and elevation
profile_x = 0. # x-coordinate of y-profile
profile_y = 0. # y-coordinate of x-profile
h = 0.2 # grid interval
radii = (2., 5.) # how many layers of field lines for plotting
Naz = 10 # number of azimuth
xi, yi = np.meshgrid(np.r_[xmin:xmax+h:h], np.r_[ymin:ymax+h:h])
x1, y1 = xi.flatten(), yi.flatten()
z1 = np.full(x1.shape,z)
Bx, By, Bz = np.zeros(len(x1),dtype=complex), np.zeros(len(x1),dtype=complex), np.zeros(len(x1),dtype=complex)
Ba1 = np.zeros(len(x1),dtype=complex)
for i in np.arange(len(x1)):
Bx[i], By[i], Bz[i] = md.MagneticDipoleField((1,1,-1),(x1[i],y1[i],z1[i]),mom)
Ba1[i]=mo(Bx[i],By[i],Bz[i])
# get x-profile
x2 = np.r_[xmin:xmax+h:h]
y2, z2 = np.full(x2.shape,profile_y), np.full(x2.shape,z)
Bx, By, Bz = np.zeros(len(x2),dtype=complex), np.zeros(len(x2),dtype=complex), np.zeros(len(x2),dtype=complex)
Ba2=np.zeros(len(x2),dtype=complex)
for i in np.arange(len(x2)):
Bx[i], By[i], Bz[i] = md.MagneticDipoleField((1,1,-1),(x2[i],y2[i],z2[i]),mom)
Ba2[i] = mo(Bx[i],By[i],Bz[i])
# get y-profile
y3 = np.r_[ymin:ymax+h:h]
x3, z3 = np.full(y3.shape,profile_x), np.full(y3.shape,z)
Bx, By, Bz = np.zeros(len(x3),dtype=complex), np.zeros(len(x3),dtype=complex), np.zeros(len(x3),dtype=complex)
Ba3=np.zeros(len(x3),dtype=complex)
for i in np.arange(len(x3)):
Bx[i], By[i], Bz[i] = md.MagneticDipoleField((1,1,-1),(x3[i],y3[i],z3[i]),mom)
Ba3[i] = mo(Bx[i],By[i],Bz[i])
#plot
fig = plt.figure()
ax = fig.gca(projection='3d')
ax.scatter(x1,y1,z1,s=2,alpha=0.3)
Bt = Ba1.reshape(xi.shape)*1e9 # contour and color scale in nT
c = ax.contourf(xi,yi,Bt,alpha=1,zdir='z',offset=z-max(radii)*2,cmap='jet',
levels=np.linspace(Bt.min(),Bt.max(),50,endpoint=True))
fig.colorbar(c)
# auto-scaling for profile plot
ptpmax = np.max((Ba2.ptp(),Ba3.ptp())) # dynamic range
autoscaling = np.max(radii) / ptpmax
# plot x-profile
ax.scatter(x2,y2,z2,s=2,c='black',alpha=0.3)
ax.plot(x2,Ba2*autoscaling,zs=ymax,c='black',zdir='y')
# plot y-profile
ax.scatter(x3,y3,z3,s=2,c='black',alpha=0.3)
ax.plot(y3,Ba3*autoscaling,zs=xmin,c='black',zdir='x')
ax.set_xlabel('X')
ax.set_ylabel('Y')
ax.set_zlabel('Z')
ax.set_xlim(xmin, xmax)
ax.set_ylim(ymin, ymax)
ax.set_zlim(z-max(radii)*2, max(radii)*1.5)
plt.show()
|
import datetime
import json
from json import JSONDecodeError
from django.conf import settings
from django.contrib.postgres.search import TrigramSimilarity
from django.db import models
from django.db.models import Case, When
from django.db.models.query_utils import Q
from django_filters import rest_framework as filters
from djqscsv import render_to_csv_response
from drf_spectacular.utils import extend_schema, extend_schema_view
from dry_rest_permissions.generics import DRYPermissionFiltersBase, DRYPermissions
from rest_framework import filters as rest_framework_filters
from rest_framework import mixins, serializers, status, viewsets
from rest_framework.decorators import action
from rest_framework.exceptions import ValidationError
from rest_framework.filters import BaseFilterBackend
from rest_framework.generics import get_object_or_404
from rest_framework.mixins import CreateModelMixin, ListModelMixin, RetrieveModelMixin
from rest_framework.pagination import PageNumberPagination
from rest_framework.permissions import IsAuthenticated
from rest_framework.response import Response
from rest_framework.viewsets import GenericViewSet
from care.facility.api.serializers.patient import (
FacilityPatientStatsHistorySerializer,
PatientDetailSerializer,
PatientListSerializer,
PatientNotesSerializer,
PatientSearchSerializer,
PatientTransferSerializer,
)
from care.facility.api.serializers.patient_icmr import PatientICMRSerializer
from care.facility.api.viewsets.mixins.history import HistoryMixin
from care.facility.models import (
CATEGORY_CHOICES,
COVID_CATEGORY_CHOICES,
DISCHARGE_REASON_CHOICES,
FACILITY_TYPES,
BedTypeChoices,
DailyRound,
Facility,
FacilityPatientStatsHistory,
PatientNotes,
PatientRegistration,
ShiftingRequest,
)
from care.facility.models.base import covert_choice_dict
from care.facility.models.bed import AssetBed
from care.facility.models.patient_base import DISEASE_STATUS_DICT
from care.users.models import User
from care.utils.cache.cache_allowed_facilities import get_accessible_facilities
from care.utils.filters.choicefilter import CareChoiceFilter
from care.utils.filters.multiselect import MultiSelectFilter
from care.utils.queryset.patient import get_patient_notes_queryset
from config.authentication import (
CustomBasicAuthentication,
CustomJWTAuthentication,
MiddlewareAuthentication,
)
REVERSE_FACILITY_TYPES = covert_choice_dict(FACILITY_TYPES)
REVERSE_BED_TYPES = covert_choice_dict(BedTypeChoices)
DISCHARGE_REASONS = [choice[0] for choice in DISCHARGE_REASON_CHOICES]
VENTILATOR_CHOICES = covert_choice_dict(DailyRound.VentilatorInterfaceChoice)
class PatientFilterSet(filters.FilterSet):
source = filters.ChoiceFilter(choices=PatientRegistration.SourceChoices)
disease_status = CareChoiceFilter(choice_dict=DISEASE_STATUS_DICT)
facility = filters.UUIDFilter(field_name="facility__external_id")
facility_type = CareChoiceFilter(
field_name="facility__facility_type",
choice_dict=REVERSE_FACILITY_TYPES,
)
phone_number = filters.CharFilter(field_name="phone_number")
emergency_phone_number = filters.CharFilter(field_name="emergency_phone_number")
allow_transfer = filters.BooleanFilter(field_name="allow_transfer")
name = filters.CharFilter(field_name="name", lookup_expr="icontains")
patient_no = filters.CharFilter(
field_name="last_consultation__patient_no", lookup_expr="icontains"
)
gender = filters.NumberFilter(field_name="gender")
age = filters.NumberFilter(field_name="age")
age_min = filters.NumberFilter(field_name="age", lookup_expr="gte")
age_max = filters.NumberFilter(field_name="age", lookup_expr="lte")
deprecated_covid_category = filters.ChoiceFilter(
field_name="last_consultation__deprecated_covid_category",
choices=COVID_CATEGORY_CHOICES,
)
category = filters.ChoiceFilter(
method="filter_by_category",
choices=CATEGORY_CHOICES,
)
def filter_by_category(self, queryset, name, value):
if value:
queryset = queryset.filter(
(
Q(last_consultation__last_daily_round__isnull=False)
& Q(last_consultation__last_daily_round__patient_category=value)
)
| (
Q(last_consultation__last_daily_round__isnull=True)
& Q(last_consultation__category=value)
)
)
return queryset
created_date = filters.DateFromToRangeFilter(field_name="created_date")
modified_date = filters.DateFromToRangeFilter(field_name="modified_date")
srf_id = filters.CharFilter(field_name="srf_id")
is_declared_positive = filters.BooleanFilter(field_name="is_declared_positive")
date_declared_positive = filters.DateFromToRangeFilter(
field_name="date_declared_positive"
)
date_of_result = filters.DateFromToRangeFilter(field_name="date_of_result")
last_vaccinated_date = filters.DateFromToRangeFilter(
field_name="last_vaccinated_date"
)
is_antenatal = filters.BooleanFilter(field_name="is_antenatal")
is_active = filters.BooleanFilter(field_name="is_active")
# Location Based Filtering
district = filters.NumberFilter(field_name="district__id")
district_name = filters.CharFilter(
field_name="district__name", lookup_expr="icontains"
)
local_body = filters.NumberFilter(field_name="local_body__id")
local_body_name = filters.CharFilter(
field_name="local_body__name", lookup_expr="icontains"
)
state = filters.NumberFilter(field_name="state__id")
state_name = filters.CharFilter(field_name="state__name", lookup_expr="icontains")
# Consultation Fields
is_kasp = filters.BooleanFilter(field_name="last_consultation__is_kasp")
last_consultation_kasp_enabled_date = filters.DateFromToRangeFilter(
field_name="last_consultation__kasp_enabled_date"
)
last_consultation_admission_date = filters.DateFromToRangeFilter(
field_name="last_consultation__admission_date"
)
last_consultation_discharge_date = filters.DateFromToRangeFilter(
field_name="last_consultation__discharge_date"
)
last_consultation_symptoms_onset_date = filters.DateFromToRangeFilter(
field_name="last_consultation__symptoms_onset_date"
)
last_consultation_admitted_bed_type_list = MultiSelectFilter(
field_name="last_consultation__current_bed__bed__bed_type"
)
last_consultation_admitted_bed_type = CareChoiceFilter(
field_name="last_consultation__current_bed__bed__bed_type",
choice_dict=REVERSE_BED_TYPES,
)
last_consultation_discharge_reason = filters.ChoiceFilter(
field_name="last_consultation__discharge_reason",
choices=DISCHARGE_REASON_CHOICES,
)
last_consultation_assigned_to = filters.NumberFilter(
field_name="last_consultation__assigned_to"
)
last_consultation_is_telemedicine = filters.BooleanFilter(
field_name="last_consultation__is_telemedicine"
)
ventilator_interface = CareChoiceFilter(
field_name="last_consultation__last_daily_round__ventilator_interface",
choice_dict=VENTILATOR_CHOICES,
)
# Vaccination Filters
covin_id = filters.CharFilter(field_name="covin_id")
is_vaccinated = filters.BooleanFilter(field_name="is_vaccinated")
number_of_doses = filters.NumberFilter(field_name="number_of_doses")
# Permission Filters
assigned_to = filters.NumberFilter(field_name="assigned_to")
# Other Filters
has_bed = filters.BooleanFilter(field_name="has_bed", method="filter_bed_not_null")
def filter_bed_not_null(self, queryset, name, value):
return queryset.filter(
last_consultation__bed_number__isnull=value,
last_consultation__discharge_date__isnull=True,
)
class PatientDRYFilter(DRYPermissionFiltersBase):
def filter_queryset(self, request, queryset, view):
if view.action == "list":
queryset = self.filter_list_queryset(request, queryset, view)
if request.user.asset:
return queryset.filter(
last_consultation__last_daily_round__bed_id__in=AssetBed.objects.filter(
asset=request.user.asset
).values("id"),
last_consultation__last_daily_round__bed__isnull=False,
)
if not request.user.is_superuser:
if request.user.user_type >= User.TYPE_VALUE_MAP["StateLabAdmin"]:
queryset = queryset.filter(facility__state=request.user.state)
elif request.user.user_type >= User.TYPE_VALUE_MAP["DistrictLabAdmin"]:
queryset = queryset.filter(facility__district=request.user.district)
elif view.action != "transfer":
allowed_facilities = get_accessible_facilities(request.user)
q_filters = Q(facility__id__in=allowed_facilities)
q_filters |= Q(last_consultation__assigned_to=request.user)
q_filters |= Q(assigned_to=request.user)
queryset = queryset.filter(q_filters)
return queryset
def filter_list_queryset(self, request, queryset, view):
try:
show_without_facility = json.loads(
request.query_params.get("without_facility")
)
except (
JSONDecodeError,
TypeError,
):
show_without_facility = False
return queryset.filter(facility_id__isnull=show_without_facility)
class PatientCustomOrderingFilter(BaseFilterBackend):
def filter_queryset(self, request, queryset, view):
ordering = request.query_params.get("ordering", "")
if ordering == "category_severity" or ordering == "-category_severity":
category_ordering = {
category: index + 1
for index, (category, _) in enumerate(CATEGORY_CHOICES)
}
when_statements = [
When(last_consultation__category=cat, then=order)
for cat, order in category_ordering.items()
]
queryset = queryset.annotate(
category_severity=Case(
*when_statements,
default=(len(category_ordering) + 1),
output_field=models.IntegerField(),
)
).order_by(ordering)
return queryset
@extend_schema_view(history=extend_schema(tags=["patient"]))
class PatientViewSet(
HistoryMixin,
mixins.CreateModelMixin,
mixins.ListModelMixin,
mixins.RetrieveModelMixin,
mixins.UpdateModelMixin,
GenericViewSet,
):
authentication_classes = [
CustomBasicAuthentication,
CustomJWTAuthentication,
MiddlewareAuthentication,
]
permission_classes = (IsAuthenticated, DRYPermissions)
lookup_field = "external_id"
queryset = PatientRegistration.objects.all().select_related(
"local_body",
"district",
"state",
"ward",
"assigned_to",
"facility",
"facility__ward",
"facility__local_body",
"facility__district",
"facility__state",
# "nearest_facility",
# "nearest_facility__local_body",
# "nearest_facility__district",
# "nearest_facility__state",
"last_consultation",
"last_consultation__assigned_to",
"last_edited",
"created_by",
)
ordering_fields = [
"facility__name",
"id",
"name",
"created_date",
"modified_date",
"review_time",
"last_consultation__current_bed__bed__name",
"date_declared_positive",
]
serializer_class = PatientDetailSerializer
filter_backends = (
PatientDRYFilter,
filters.DjangoFilterBackend,
rest_framework_filters.OrderingFilter,
PatientCustomOrderingFilter,
)
filterset_class = PatientFilterSet
date_range_fields = [
"created_date",
"modified_date",
"date_declared_positive",
"date_of_result",
"last_vaccinated_date",
"last_consultation_admission_date",
"last_consultation_discharge_date",
"last_consultation_symptoms_onset_date",
]
CSV_EXPORT_LIMIT = 7
def get_queryset(self):
# filter_query = self.request.query_params.get("disease_status")
queryset = super().get_queryset()
# if filter_query:
# disease_status = filter_query if filter_query.isdigit() else DiseaseStatusEnum[filter_query].value
# return queryset.filter(disease_status=disease_status)
# if self.action == "list":
# queryset = queryset.filter(is_active=self.request.GET.get("is_active", True))
return queryset
def get_serializer_class(self):
if self.action == "list":
return PatientListSerializer
elif self.action == "icmr_sample":
return PatientICMRSerializer
elif self.action == "transfer":
return PatientTransferSerializer
else:
return self.serializer_class
def list(self, request, *args, **kwargs):
"""
Patient List
`without_facility` accepts boolean - default is false -
if true: shows only patients without a facility mapped
if false (default behaviour): shows only patients with a facility mapped
`disease_status` accepts - string and int -
SUSPECTED = 1
POSITIVE = 2
NEGATIVE = 3
RECOVERY = 4
RECOVERED = 5
EXPIRED = 6
"""
if settings.CSV_REQUEST_PARAMETER in request.GET:
# Start Date Validation
temp = filters.DjangoFilterBackend().get_filterset(
self.request, self.queryset, self
)
temp.is_valid()
within_limits = False
for field in self.date_range_fields:
slice_obj = temp.form.cleaned_data.get(field)
if slice_obj:
if not slice_obj.start or not slice_obj.stop:
raise ValidationError(
{
field: "both starting and ending date must be provided for export"
}
)
days_difference = (
temp.form.cleaned_data.get(field).stop
- temp.form.cleaned_data.get(field).start
).days
if days_difference <= self.CSV_EXPORT_LIMIT:
within_limits = True
else:
raise ValidationError(
{
field: f"Cannot export more than {self.CSV_EXPORT_LIMIT} days at a time"
}
)
if not within_limits:
raise ValidationError(
{
"date": f"Atleast one date field must be filtered to be within {self.CSV_EXPORT_LIMIT} days"
}
)
# End Date Limiting Validation
queryset = self.filter_queryset(self.get_queryset()).values(
*PatientRegistration.CSV_MAPPING.keys()
)
return render_to_csv_response(
queryset,
field_header_map=PatientRegistration.CSV_MAPPING,
field_serializer_map=PatientRegistration.CSV_MAKE_PRETTY,
)
return super(PatientViewSet, self).list(request, *args, **kwargs)
@extend_schema(tags=["patient"])
@action(detail=True, methods=["POST"])
def transfer(self, request, *args, **kwargs):
patient = PatientRegistration.objects.get(external_id=kwargs["external_id"])
if patient.allow_transfer is False:
return Response(
{"Patient": "Cannot Transfer Patient , Source Facility Does Not Allow"},
status=status.HTTP_406_NOT_ACCEPTABLE,
)
patient.allow_transfer = False
patient.is_active = True
serializer = self.get_serializer_class()(patient, data=request.data)
serializer.is_valid(raise_exception=True)
serializer.save()
patient = PatientRegistration.objects.get(external_id=kwargs["external_id"])
response_serializer = self.get_serializer(patient)
# Update all Active Shifting Request to Rejected
for shifting_request in ShiftingRequest.objects.filter(
~Q(status__in=[30, 50, 80]), patient=patient
):
shifting_request.status = 30
shifting_request.comments = f"{shifting_request.comments}\n The shifting request was auto rejected by the system as the patient was moved to {patient.facility.name}"
shifting_request.save(update_fields=["status", "comments"])
return Response(data=response_serializer.data, status=status.HTTP_200_OK)
class FacilityPatientStatsHistoryFilterSet(filters.FilterSet):
entry_date = filters.DateFromToRangeFilter(field_name="entry_date")
class FacilityPatientStatsHistoryViewSet(viewsets.ModelViewSet):
lookup_field = "external_id"
permission_classes = (
IsAuthenticated,
DRYPermissions,
)
queryset = FacilityPatientStatsHistory.objects.filter(
facility__deleted=False
).order_by("-entry_date")
serializer_class = FacilityPatientStatsHistorySerializer
filter_backends = (filters.DjangoFilterBackend,)
filterset_class = FacilityPatientStatsHistoryFilterSet
http_method_names = ["get", "post", "delete"]
def get_queryset(self):
user = self.request.user
queryset = self.queryset.filter(
facility__external_id=self.kwargs.get("facility_external_id")
)
if user.is_superuser:
return queryset
elif self.request.user.user_type >= User.TYPE_VALUE_MAP["StateLabAdmin"]:
return queryset.filter(facility__state=user.state)
elif self.request.user.user_type >= User.TYPE_VALUE_MAP["DistrictLabAdmin"]:
return queryset.filter(facility__district=user.district)
return queryset.filter(facility__users__id__exact=user.id)
def get_object(self):
return get_object_or_404(
self.get_queryset(), external_id=self.kwargs.get("external_id")
)
def get_facility(self):
facility_qs = Facility.objects.filter(
external_id=self.kwargs.get("facility_external_id")
)
if not self.request.user.is_superuser:
facility_qs.filter(users__id__exact=self.request.user.id)
return get_object_or_404(facility_qs)
def perform_create(self, serializer):
return serializer.save(facility=self.get_facility())
def list(self, request, *args, **kwargs):
"""
Patient Stats - List
Available Filters
- entry_date_before: date in YYYY-MM-DD format, inclusive of this date
- entry_date_before: date in YYYY-MM-DD format, inclusive of this date
"""
return super(FacilityPatientStatsHistoryViewSet, self).list(
request, *args, **kwargs
)
class PatientSearchSetPagination(PageNumberPagination):
page_size = 200
class PatientSearchViewSet(ListModelMixin, GenericViewSet):
http_method_names = ["get"]
queryset = PatientRegistration.objects.only(
"id",
"external_id",
"name",
"gender",
"phone_number",
"state_id",
"facility",
"allow_transfer",
"is_active",
)
serializer_class = PatientSearchSerializer
permission_classes = (IsAuthenticated, DRYPermissions)
pagination_class = PatientSearchSetPagination
def get_queryset(self):
if self.action != "list":
return super(PatientSearchViewSet, self).get_queryset()
else:
serializer = PatientSearchSerializer(
data=self.request.query_params, partial=True
)
serializer.is_valid(raise_exception=True)
if self.request.user.user_type >= User.TYPE_VALUE_MAP["DistrictLabAdmin"]:
search_keys = [
"date_of_birth",
"year_of_birth",
"phone_number",
"name",
"age",
]
else:
search_keys = [
"date_of_birth",
"year_of_birth",
"phone_number",
"age",
]
search_fields = {
key: serializer.validated_data[key]
for key in search_keys
if serializer.validated_data.get(key)
}
if not search_fields:
raise serializers.ValidationError(
{
"detail": [
f"None of the search keys provided. Available: {', '.join(search_keys)}"
]
}
)
# if not self.request.user.is_superuser:
# search_fields["state_id"] = self.request.user.state_id
if "age" in search_fields:
age = search_fields.pop("age")
year_of_birth = datetime.datetime.now().year - age
search_fields["age__gte"] = year_of_birth - 5
search_fields["age__lte"] = year_of_birth + 5
name = search_fields.pop("name", None)
queryset = self.queryset.filter(**search_fields)
if name:
queryset = (
queryset.annotate(similarity=TrigramSimilarity("name", name))
.filter(similarity__gt=0.2)
.order_by("-similarity")
)
return queryset
@extend_schema(tags=["patient"])
def list(self, request, *args, **kwargs):
"""
Patient Search
### Available filters -
- year_of_birth: in YYYY format
- date_of_birth: in YYYY-MM-DD format
- phone_number: in E164 format: eg: +917795937091
- name: free text search
- age: number - searches age +/- 5 years
**SPECIAL NOTE**: the values should be urlencoded
`Eg: api/v1/patient/search/?year_of_birth=1992&phone_number=%2B917795937091`
"""
return super(PatientSearchViewSet, self).list(request, *args, **kwargs)
class PatientNotesViewSet(
ListModelMixin, RetrieveModelMixin, CreateModelMixin, GenericViewSet
):
queryset = (
PatientNotes.objects.all()
.select_related("facility", "patient", "created_by")
.order_by("-created_date")
)
serializer_class = PatientNotesSerializer
permission_classes = (IsAuthenticated, DRYPermissions)
def get_queryset(self):
user = self.request.user
queryset = self.queryset.filter(
patient__external_id=self.kwargs.get("patient_external_id")
)
if not user.is_superuser:
return queryset
if user.user_type >= User.TYPE_VALUE_MAP["StateLabAdmin"]:
queryset = queryset.filter(patient__facility__state=user.state)
elif user.user_type >= User.TYPE_VALUE_MAP["DistrictLabAdmin"]:
queryset = queryset.filter(patient__facility__district=user.district)
else:
allowed_facilities = get_accessible_facilities(user)
q_filters = Q(patient__facility__id__in=allowed_facilities)
q_filters |= Q(patient__last_consultation__assigned_to=user)
q_filters |= Q(patient__assigned_to=user)
queryset = queryset.filter(q_filters)
return queryset
def perform_create(self, serializer):
patient = get_object_or_404(
get_patient_notes_queryset(self.request.user).filter(
external_id=self.kwargs.get("patient_external_id")
)
)
if not patient.is_active:
raise ValidationError(
{"patient": "Only active patients data can be updated"}
)
return serializer.save(
facility=patient.facility,
patient=patient,
created_by=self.request.user,
)
|
from .db import db
import datetime
class Pad(db.Model):
__tablename__ = 'pads'
id = db.Column(db.Integer, primary_key=True)
title = db.Column(db.String(255), nullable=False)
color = db.Column(db.String(64), default='#AFB1D4')
multiplier = db.Column(db.Integer, default=1)
block_seq = db.Column(db.ARRAY(db.Integer), nullable=False)
note_seq = db.Column(db.ARRAY(db.String), nullable=False)
date_created = db.Column(db.Date, default=datetime.datetime.today())
user_id = db.Column(db.Integer, db.ForeignKey('users.id'))
board_id = db.Column(db.Integer, db.ForeignKey(
'boards.id'))
user = db.relationship('User', back_populates='pads')
board = db.relationship('Board', back_populates='pads')
def to_dict(self):
return {
'id': self.id,
'title': self.title,
'color': self.color,
'multiplier': self.multiplier,
'block_seq': self.block_seq,
'note_seq': self.note_seq,
'user_id': self.user_id,
'board_id': self.board_id,
'date_created': self.date_created
}
|
#!/usr/bin/env python
# coding=utf-8
import codecs
import sys
#only for query
#the node class
class edge():
def __init__(self):
self.words = 0
self.prefix = 0
self.sons = {} #use dictionary, hash map faster
def addWord(self,word):
if word == '':
self.words = self.words+1
else:
self.prefix = self.prefix+1
k = word[0]
son = self.sons.get(k)
if son == None:
self.sons[k] = edge()
word = word[1:]
self.sons[k].addWord(word)
def countWord(self,word):
if word == '':
return self.words
else:
k = word[0]
son = self.sons.get(k)
if son == None:
return -1
else:
word = word[1:]
return son.countWord(word)
def countPrefix(self,word):
if word == '':
return self.prefix
else:
k = word[0]
son = self.sons.get(k)
if son == None:
return -1
else:
word = word[1:]
return son.countPrefix(word)
#store all the items below the node
def get_subword(node,word,result):
if len(node.sons.keys()) == 0:
if word not in result:
result.append(word)
else:
for (first_letter,sub_node) in node.sons.items():
new_word = word+first_letter
if sub_node.words >0:
result.append(new_word)
get_subword(sub_node, new_word, result)
def queryWord(word,result,node,initial_word):
if word == '':
result.append(initial_word)
#store all the items below
get_subword(node, initial_word, result)
return result
else:
k = word[0]
son = node.sons.get(k)
if son == None:
return -1
else:
word = word[1:]
queryWord(word, result, son,initial_word)
return result
##data processing
def gener(file_name):
with codecs.open(file_name,"r","utf-8") as f:
for line in f:
temp_line = []
# line = unicode(line,'utf-8')
line = line.strip('\n')
items = line.split(u',')
for item in items[:-1]:
temp_line.append(item)
for item in items[-1].split(u';'):
temp_line.append(item)
yield temp_line
## add more data
def gener_more(file_name):
with codecs.open(file_name,"r","utf-8") as f:
for line in f:
word = line.strip('\n')
yield word
if __name__ == '__main__':
print "########################################################"
print "Hello Prof:"
print "Initializing: buiding tree..."
tree = edge()
for lines in gener('data.txt'):
for word in lines:
tree.addWord(word)
for word in gener_more('more_data.txt'):
tree.addWord(word)
print "Initializing finished!"
print "########################################################"
print "Please enter a word:"
while True:
word = raw_input().decode(sys.stdin.encoding)
if word == '':
print "Please enter some words!"
print "******************************************\n\n"
continue
result = []
initial_word = word
res = queryWord(word, result, tree, initial_word)
if res == -1 or len(res)==0:
print "******************************************"
print "Not Found!"
print "******************************************\n\n"
else:
print "******************************************"
#show 15 items at most
show_len = len(res) if len(res)<=15 else 15
for item in res[:show_len]:
print item," Prefix: ",tree.countPrefix(item)," Word: ", tree.countWord(item)
print "******************************************\n\n"
|
import argparse
def get_argument():
parser = argparse.ArgumentParser()
# Directory option
parser.add_argument("--checkpoints", type=str, default="../../checkpoints/")
parser.add_argument("--data_root", type=str, default="../../data/")
parser.add_argument("--device", type=str, default="cuda")
parser.add_argument("--result", type=str, default="./results")
parser.add_argument("--dataset", type=str, default="mnist")
parser.add_argument("--attack_mode", type=str, default="all2one")
parser.add_argument("--temps", type=str, default="./temps")
# ---------------------------- For Neural Cleanse --------------------------
# Model hyperparameters
parser.add_argument("--batchsize", type=int, default=64)
parser.add_argument("--lr", type=float, default=1e-1)
parser.add_argument("--input_height", type=int, default=None)
parser.add_argument("--input_width", type=int, default=None)
parser.add_argument("--input_channel", type=int, default=None)
parser.add_argument("--init_cost", type=float, default=1e-3)
parser.add_argument("--atk_succ_threshold", type=float, default=99.0)
parser.add_argument("--early_stop", type=bool, default=True)
parser.add_argument("--early_stop_threshold", type=float, default=99.0)
parser.add_argument("--early_stop_patience", type=int, default=25)
parser.add_argument("--patience", type=int, default=5)
parser.add_argument("--cost_multiplier", type=float, default=2)
parser.add_argument("--epoch", type=int, default=50)
parser.add_argument("--num_workers", type=int, default=8)
parser.add_argument("--target_label", type=int)
parser.add_argument("--total_label", type=int)
parser.add_argument("--EPSILON", type=float, default=1e-7)
parser.add_argument("--to_file", type=bool, default=True)
parser.add_argument("--n_times_test", type=int, default=5)
return parser
|
import json
import time
import requests
import datetime
import numpy as np
from PIL import Image
from io import BytesIO
import tensorflow as tf
from azureml.core.model import Model
def init():
global model
try:
model_path = Model.get_model_path('tacosandburritos')
except:
model_path = '/model/latest.h5'
print('Attempting to load model')
model = tf.keras.models.load_model(model_path)
model.summary()
print('Done!')
print('Initialized model "{}" at {}'.format(model_path, datetime.datetime.now()))
def run(raw_data):
global model
prev_time = time.time()
post = json.loads(raw_data)
img_path = post['image']
current_time = time.time()
tensor = process_image(img_path, 160)
t = tf.reshape(tensor, [-1, 160, 160, 3])
o = model.predict(t, steps=1)#[0][0]
print(o)
o = o[0][0]
inference_time = datetime.timedelta(seconds=current_time - prev_time)
payload = {
'time': inference_time.total_seconds(),
'prediction': 'burrito' if o > 0.5 else 'tacos',
'scores': str(o)
}
print('Input ({}), Prediction ({})'.format(post['image'], payload))
return payload
def process_image(path, image_size):
# Extract image (from web or path)
if(path.startswith('http')):
response = requests.get(path)
img = np.array(Image.open(BytesIO(response.content)))
else:
img = np.array(Image.open(path))
img_tensor = tf.convert_to_tensor(img, dtype=tf.float32)
#tf.image.decode_jpeg(img_raw, channels=3)
img_final = tf.image.resize(img_tensor, [image_size, image_size]) / 255
return img_final
def info(msg, char = "#", width = 75):
print("")
print(char * width)
print(char + " %0*s" % ((-1*width)+5, msg) + char)
print(char * width)
if __name__ == "__main__":
images = {
'tacos': 'https://c1.staticflickr.com/5/4022/4401140214_f489c708f0_b.jpg',
'burrito': 'https://www.exploreveg.org/files/2015/05/sofritas-burrito.jpeg'
}
init()
for k, v in images.items():
print('{} => {}'.format(k, v))
info('Taco Test')
taco = json.dumps({ 'image': images['tacos'] })
print(taco)
run(taco)
info('Burrito Test')
burrito = json.dumps({ 'image': images['burrito'] })
print(burrito)
run(burrito) |
class Solution:
def convert(self, s, numRows):
"""
:type s: str
:type numRows: int
:rtype: str
zigzag font means there are
always a column full of chars,
then numRows-2 diagonal chars
then again a column full of chars
then again numRows-2 diagonal chars...
So how many columns in total will we get?
PAYPALISHIRING, n=14, numRows=3
columns: 1 1 1 1 1 1 1 = 7
chars: 3 1 3 1 3 1 2
PAYPALISHIRING, n=14, numRows=4
columns: 1 1 1 1 1 1 1 = 7
chars: 4 1 1 4 1 1 2
PAYPALISHIRING, n=14, numRows=5
columns: 1 1 1 1 1 1 = 6
chars: 5 1 1 1 5 1
PAYPALISHIRING, n=14, numRows=6
columns: 1 1 1 1 1 1 = 6
chars: 6 1 1 1 1 4
Runtime: 108 ms, faster than 59.59% of Python3 online submissions for ZigZag Conversion.
Runtime: O(n * log(n))
"""
indices = [None] * len(s)
col = row = bt = tbt = 0
for i,c in enumerate(s):
indices[i] = (col,row,i)
if bt>0:
indices[i] = (col,bt,i)
col += 1
bt -= 1
if bt == 0:
row = 0
continue
if (i+1-tbt)%numRows == 0:
col += 1
row = 0
bt = numRows-2
tbt += bt
continue
row += 1
sorted_rowwise = sorted(indices, key=lambda e: e[1])
res = ''
for e in sorted_rowwise:
res +=s[e[2]]
return res
s = Solution()
assert s.convert('PAYPALISHIRING', 3) == 'PAHNAPLSIIGYIR'
assert s.convert('PAYPALISHIRING', 4) == 'PINALSIGYAHRPI'
assert s.convert('ABC', 1) == 'ABC'
|
"""
Tests for some application-specific db invariants
"""
import itertools
from sqlalchemy import func, or_, and_, literal, union_all, select, true
from sqlalchemy import orm
from clld.db.meta import DBSession
from clld.db.models import Config, Language, LanguageIdentifier, Identifier, ValueSet
from glottolog3.models import Languoid, LanguoidLevel, TreeClosureTable, Ref
class CheckMeta(type):
__instances = []
def __init__(self, name, bases, dct):
super(CheckMeta, self).__init__(name, bases, dct)
if 'invalid_query' in dct:
self.__instances.append(self)
def __iter__(self):
return iter(self.__instances)
class Check(metaclass=CheckMeta):
detail = True
def __init__(self):
self.query = self.invalid_query(DBSession)
def validate(self):
self.invalid_count = self.query.count()
print(self)
if self.invalid_count: # pragma: no cover
if self.detail:
self.invalid = self.query.all()
self.display()
return False
else:
self.invalid = ()
return True
def invalid_query(self, session, **kw):
raise NotImplementedError() # pragma: no cover
def display(self, number=25): # pragma: no cover
ids = (i.id for i in itertools.islice(self.invalid, number))
cont = ', ...' if number < self.invalid_count else ''
print(' %s%s' % (', '.join(ids), cont))
def __str__(self):
if self.invalid_count: # pragma: no cover
msg = '%d invalid\n (violating %s)' % (self.invalid_count, self.__doc__)
else:
msg = 'OK'
return '%s: %s' % (self.__class__.__name__, msg)
class FamiliesDistinct(Check):
"""Each family node has a unique set of member languages."""
def invalid_query(self, session, exclude=u'Unclassified'):
member = orm.aliased(Languoid, flat=True)
extent = func.array(
session.query(member.pk)
.filter_by(active=True, level=LanguoidLevel.language)
.join(TreeClosureTable, TreeClosureTable.child_pk == member.pk)
.filter_by(parent_pk=Languoid.pk)
.order_by(member.pk).scalar_subquery())
cte = session.query(Languoid.id, extent.label('extent'))\
.filter_by(active=True, level=LanguoidLevel.family)\
.filter(~Languoid.name.startswith(exclude)).cte()
dup = orm.aliased(cte)
return session.query(cte.c.id)\
.filter(session.query(dup).filter(
dup.c.id != cte.c.id, dup.c.extent == cte.c.extent).exists())\
.order_by(cte.c.extent, cte.c.id)
class DialectFather(Check):
"""Father of a dialect is a language or dialect."""
def invalid_query(self, session, **kw):
return session.query(Languoid)\
.filter_by(active=True, level=LanguoidLevel.dialect)\
.order_by(Languoid.id)\
.join(Languoid.father, aliased=True)\
.filter(Languoid.level.notin_(
[LanguoidLevel.language, LanguoidLevel.dialect]))
class FamilyChildren(Check):
"""Family has at least one subfamily or language."""
def invalid_query(self, session, **kw):
return session.query(Languoid)\
.filter_by(active=True, level=LanguoidLevel.family)\
.filter(~Languoid.children.any(and_(
Languoid.active == true(),
Languoid.level.in_([LanguoidLevel.family, LanguoidLevel.language]))))\
.order_by(Languoid.id)
class FatherFamily(Check):
"""Languoids have correct top-level family."""
def invalid_query(self, session, **kw):
cte = session.query(Languoid.pk, Languoid.father_pk)\
.cte(recursive=True)
parent = orm.aliased(Languoid)
cte = cte.union_all(session.query(cte.c.pk, parent.father_pk)
.join(parent, cte.c.father_pk == parent.pk)
.filter(parent.father_pk != None)) # noqa
family = orm.aliased(Languoid)
return session.query(Languoid).join(cte, Languoid.pk == cte.c.pk)\
.outerjoin(family, and_(
cte.c.father_pk == family.pk, family.father_pk == None))\
.filter(Languoid.family_pk != family.pk)\
.order_by(Languoid.id) # noqa
class TreeClosure(Check):
"""Treeclosuretable is correct."""
detail = False
def invalid_query(self, session, **kw):
cte = session.query(
Languoid.pk, Languoid.pk.label('father_pk'), literal(0).label('depth'))\
.cte(recursive=True)
parent = orm.aliased(Languoid)
cte = cte.union_all(session.query(cte.c.pk, parent.father_pk, cte.c.depth + 1)
.join(parent, cte.c.father_pk == parent.pk)
.filter(parent.father_pk != None)) # noqa
tree1 = session.query(
TreeClosureTable.child_pk,
TreeClosureTable.parent_pk,
TreeClosureTable.depth)
tree2 = session.query(cte.c.pk, cte.c.father_pk, cte.c.depth)
diff = union_all(tree1.except_all(tree2), tree2.except_all(tree1))
return session.query(diff.alias())
class ChildCounts(Check):
"""Languoids have correct child family/language/dialect counts."""
def invalid_query(self, session, **kw):
cte = session.query(Languoid.pk, Languoid.father_pk, Languoid.level)\
.filter(Languoid.father_pk != None).cte(recursive=True) # noqa
parent = orm.aliased(Languoid)
cte = cte.union_all(session.query(cte.c.pk, parent.father_pk, cte.c.level)\
.join(parent, cte.c.father_pk == parent.pk)\
.filter(parent.father_pk != None)) # noqa
return session.query(Languoid)\
.outerjoin(cte, Languoid.pk == cte.c.father_pk)\
.group_by(Language.pk, Languoid.pk)\
.having(or_(
func.coalesce(Languoid.child_family_count, -1) !=
func.count(func.nullif(cte.c.level != LanguoidLevel.family, True)),
func.coalesce(Languoid.child_language_count, -1) !=
func.count(func.nullif(cte.c.level != LanguoidLevel.language, True)),
func.coalesce(Languoid.child_dialect_count, -1) !=
func.count(func.nullif(cte.c.level != LanguoidLevel.dialect, True))))\
.order_by((Languoid.id))
class FamilyLanguages(Check):
"""Family has at least two languages."""
def invalid_query(self, session, exclude=None):
exclude = ['Unclassified', 'Unattested'] if exclude is None else exclude
child = orm.aliased(Languoid, flat=True)
return session.query(Languoid)\
.filter_by(active=True, level=LanguoidLevel.family)\
.filter(Languoid.family.has(Languoid.name.notin_(exclude)))\
.join(TreeClosureTable, TreeClosureTable.parent_pk == Languoid.pk)\
.outerjoin(child, and_(
TreeClosureTable.child_pk == child.pk,
TreeClosureTable.depth > 0,
child.level == LanguoidLevel.language))\
.group_by(Language.pk, Languoid.pk)\
.having(func.count(child.pk) < 2)\
.order_by(Languoid.id)
class BookkeepingNoChildren(Check):
"""Bookkeeping languoids lack children."""
def invalid_query(self, session, **kw):
return session.query(Languoid)\
.filter(Languoid.category == 'Bookkeeping')\
.filter(Languoid.children.any())\
.order_by(Languoid.id)
class IsolateInactive(Check):
"""Inactive languoids lack parent and children."""
def invalid_query(self, session, **kw):
return session.query(Languoid)\
.filter_by(active=False).filter(or_(
Languoid.father_pk != None,
Languoid.children.any()))\
.order_by(Languoid.id) # noqa
class UniqueIsoCode(Check):
"""Active languoids do not share iso639-3 identifiers."""
@staticmethod
def _ident_query(session, type=u'iso639-3'):
lang = orm.aliased(Languoid)
ident = orm.aliased(Identifier)
query = session.query(lang).filter_by(active=True)\
.join(LanguageIdentifier, LanguageIdentifier.language_pk == lang.pk)\
.join(ident, and_(
LanguageIdentifier.identifier_pk == ident.pk, ident.type == type))
return lang, ident, query
def invalid_query(self, session, **kw):
lang, ident, query = self._ident_query(session)
other, other_ident, other_query = self._ident_query(session)
return query.filter(other_query.filter(
other.pk != lang.pk, ident.name == other_ident.name).exists())\
.order_by(lang.id)
class CleanName(Check):
"""Glottolog names lack problematic characters."""
def invalid_query(self, session, type='name',
description=Languoid.GLOTTOLOG_NAME, **kw):
return session.query(Languoid)\
.filter(Languoid.identifiers.any(or_(
Identifier.name.op('~')(r'^\s|\s$'),
Identifier.name.op('~')(r'[`_*:\xa4\xab\xb6\xbc]'),
), type=type, description=description))\
.order_by(Languoid.id)
class UniqueName(Check):
"""Among active languages Glottolog names are unique."""
@staticmethod
def _ident_query(session, type='name',
description=Languoid.GLOTTOLOG_NAME):
lang = orm.aliased(Languoid)
ident = orm.aliased(Identifier)
query = session.query(lang).filter_by(
active=True, level=LanguoidLevel.language)\
.join(LanguageIdentifier, LanguageIdentifier.language_pk == lang.pk)\
.join(ident, and_(
LanguageIdentifier.identifier_pk == ident.pk,
ident.type == type, ident.description == description))
return lang, ident, query
def invalid_query(self, session, **kw):
lang, ident, query = self._ident_query(session)
other, other_ident, other_query = self._ident_query(session)
return query.filter(other_query.filter(
other.pk != lang.pk, ident.name == other_ident.name).exists())\
.order_by(lang.id)
class RefRedirects(Check):
"""Redirects of reference ids target an unredirected id."""
def invalid_query(self, session, **kw):
return session.query(
func.regexp_replace(Config.key, r'\D', '', u'g').label('id'),
func.nullif(Config.value, '__gone__').label('target'))\
.filter(Config.key.like('__Source_%%__'))\
.filter(
session.query(orm.aliased(Config))
.filter_by(key=func.format('__Source_%s__', Config.value)).exists())\
.order_by('id', 'target')
class MarkupRefLinks(Check):
"""Classification description source links are valid."""
def invalid_query(self, session, **kw):
vs_rid = select([
ValueSet.pk,
func.unnest(func.regexp_matches(
ValueSet.description, r'\*\*(\d+)\*\*', 'g')).label('ref_id')]).alias()
return session.query(ValueSet)\
.filter(ValueSet.pk.in_(
session.query(vs_rid.c.pk)
.filter(~session.query(Ref).filter_by(id=vs_rid.c.ref_id).exists())))\
.order_by(ValueSet.id)
class RefPages(Check):
"""References do not have zero/negative page count."""
def invalid_query(self, session, **kw):
return session.query(Ref)\
.filter(Ref.pages_int < 1)\
.order_by(Ref.pk)
def main(args):
for cls in Check:
if cls.__name__ != 'Check':
check = cls()
check.validate()
|
# coding: utf-8
# In[11]:
import cv2
import numpy as np
vc = cv2.VideoCapture(0)
pic_no = 0
total_pic = 1200
flag_capturing = False
path = './dataset/Y'
while(vc.isOpened()):
# read image
rval, frame = vc.read()
frame = cv2.flip(frame, 1)
# get hand data from the rectangle sub window on the screen
cv2.rectangle(frame, (300,300), (100,100), (0,255,0),0)
cv2.imshow("image", frame)
crop_img = frame[100:300, 100:300]
if flag_capturing:
pic_no += 1
save_img = cv2.resize( crop_img, (50,50) )
save_img = np.array(save_img)
cv2.imwrite(path + "/" + str(pic_no) + ".jpg", save_img)
keypress = cv2.waitKey(1)
if pic_no == total_pic:
flag_capturing = False
break
if keypress == ord('q'):
break
elif keypress == ord('c'):
flag_capturing = True
vc.release()
cv2.destroyAllWindows()
cv2.waitKey(1)
|
import FWCore.ParameterSet.Config as cms
# Geometry for simulation
from Geometry.ForwardCommonData.totemTest2021XML_cfi import *
from Geometry.TrackerNumberingBuilder.trackerNumberingGeometry_cff import *
from Geometry.EcalCommonData.ecalSimulationParameters_cff import *
from Geometry.HcalCommonData.hcalDDConstants_cff import *
from Geometry.MuonNumbering.muonGeometryConstants_cff import *
from Geometry.MuonNumbering.muonOffsetESProducer_cff import *
|
import struct
import string
from .utils import chunkify, rchunkify, int_to_byte, byte_to_int
# chr(x) - int to single char
# bin(x) - int to bin repr
# ba = bytearray()
# ba.append(num) (num <= 255)
# ba.extend(b'\x01\x02\x03')
_static_table = (
(':authority', ''),
(':method', 'GET'),
(':method', 'POST'),
(':path', '/'),
(':path', '/index.html'),
(':scheme', 'http'),
(':scheme', 'https'),
(':status', '200'),
(':status', '204'),
(':status', '206'),
(':status', '304'),
(':status', '400'),
(':status', '404'),
(':status', '500'),
('accept-charset', ''),
('accept-encoding', 'gzip, deflate'),
('accept-language', ''),
('accept-ranges', ''),
('accept', ''),
('access-control-allow-origin', ''),
('age', ''),
('allow', ''),
('authorization', ''),
('cache-control', ''),
('content-disposition', ''),
('content-encoding', ''),
('content-language', ''),
('content-length', ''),
('content-location', ''),
('content-range', ''),
('content-type', ''),
('cookie', ''),
('date', ''),
('etag', ''),
('expect', ''),
('expires', ''),
('from', ''),
('host', ''),
('if-match', ''),
('if-modified-since', ''),
('if-none-match', ''),
('if-range', ''),
('if-unmodified-since', ''),
('last-modified', ''),
('link', ''),
('location', ''),
('max-forwards', ''),
('proxy-authenticate', ''),
('proxy-authorization', ''),
('range', ''),
('referer', ''),
('refresh', ''),
('retry-after', ''),
('server', ''),
('set-cookie', ''),
('strict-transport-security', ''),
('transfer-encoding', ''),
('user-agent', ''),
('vary', ''),
('via', ''),
('www-authenticate', ''),
)
def uint_encode(val, n=8):
"""
Encodes unsigned integer accoriding to [RFC7541, section 5.1].
:param val:
pass
:param n:
pass
"""
if val < 0:
raise ValueError('Integer must be equal or greater than zero!')
if val < 2**n - 1:
return bytearray(int_to_byte(val))
result = bytearray()
i = val
result.append(2**n - 1)
i = i - (2**n - 1)
while i >= 128:
result.append(i % 128 + 128)
i = i // 128
result.append(i)
return result
def uint_decode(array, n=8):
"""
Decodes unsigned integer accoriding to [RFC7541, section 5.1].
:param array:
pass
:param n:
pass
"""
ind = 0
i = array[ind] & 2**n - 1
if i < 2**n - 1:
return i
m = 0
while True:
ind += 1
b = array[ind]
i += (b & 127) * 2**m
m += 7
if b & 128 != 128:
break
return i
def bytestr_encode(bytestr, huffman=False, encoding='ascii'):
"""
Decodes byte string literal accoriding to [RFC7541, section 5.2].
:param bytestr:
pass
:param huffman:
pass
"""
if isinstance(bytestr, str):
bytestr = bytestr.encode(encoding)
if not isinstance(bytestr, (bytes, bytearray)):
raise ValueError('bytestr must by an instance of bytes or bytearray')
if huffman:
raise NotImplementedError()
result = uint_encode(len(bytestr), n=7)
if huffman:
result[0] = 128 | result[0]
result.extend(bytestr)
return result
class IndexTable(object):
STATIC_LENGTH = len(_static_table)
def __init__(self):
self._dyn_table = []
def find(self, name, value=None):
return self._find_name(name) if value is None else self._find_field(name, value)
def _find_name(self, name):
static_names = [i[0] for i in _static_table]
dynaimc_names = [i[0] for i in self._dyn_table]
try:
return (static_names + dynaimc_names).index(name) + 1
except ValueError:
return None
def _find_field(self, name, value):
try:
return (list(_static_table) + self._dyn_table).index((name, value)) + 1
except ValueError:
return None
def add(self, name, field):
self._dyn_table.insert(0, (name, field))
def get(self, index):
index -= 1
if index < self.STATIC_LENGTH:
return _static_table[index]
return self._dyn_table[index - self.STATIC_LENGTH]
def __getitem__(self, index):
return self.get(index)
def __len__(self):
return self.STATIC_LENGTH + len(self._dyn_table)
class Encoder(object):
def __init__(self):
self.index_table = IndexTable()
def encode_headers(self, headers):
if isinstance(headers, str):
headers = headers.encode('ascii')
result = bytearray()
headers = headers.split(b'\n')
for header in headers:
h = header.rsplit(b':', 1)
h[1] = h[1].strip()
result.extend(self.encode_header(h[0], h[1]))
return bytes(result)
def encode_header(self, name, value):
ind = self.index_table.find(name, value)
if ind is not None:
return self._encode_indexed_field(ind)
ind = self.index_table.find(name)
return self._encode_literal_field(
name=ind if ind is not None else name,
value=value
)
def _encode_indexed_field(self, idx):
result = uint_encode(idx, n=7)
result[0] = 128 | result[0]
return result
def _encode_literal_field(self, name, value, indexed=True, never_indexed=False):
if indexed and never_indexed:
raise ValueError()
if isinstance(name, int):
header = uint_encode(name, n=6 if indexed else 4)
else:
header = bytearray(b'\x00')
_map = {
(True, False): b'\x40',
(False, False): b'\x00',
(False, True): b'\x10',
}
header[0] = byte_to_int(
_map[(indexed, never_indexed)]
) | header[0]
result = header
if isinstance(name, bytes):
l = uint_encode(len(name), n=7)
# l[0] = 128 | l[0]
l.extend(name)
result.extend(l)
l = uint_encode(len(value), n=7)
# l[0] = 128 | l[0]
l.extend(value)
result.extend(l)
return bytes(result)
class Decoder(object):
def __init__(self):
self.index_table = IndexTable()
def decode(self, header_block):
pass |
# Generated by Django 2.1.4 on 2019-01-14 15:04
import django.core.validators
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('quote', '0004_auto_20190114_1242'),
]
operations = [
migrations.AlterField(
model_name='quote',
name='phone',
field=models.CharField(max_length=11, validators=[django.core.validators.RegexValidator(code='nomatch', message='Please use a valid phone number', regex='^.{11}$')]),
),
]
|
import config
class Wall:
def __init__(self, grid):
self.rows = grid.rows
self.cols = grid.cols
for i in range(self.rows):
for j in range(self.cols):
if i in [0, self.rows-1] or j in [0, self.cols-1]:
grid.matrix[i][j] = 'wall'
for i in range(1, grid.rows-1):
for j in range(1, grid.cols-1):
if i % 2 is 0 and j % 2 is 0:
grid.matrix[i][j] = 'wall' |
from subprocess import Popen, PIPE
from crm import app
@app.cli.command()
def generate_graphql_docs():
"""
Generates schema.graphql IDL file and the GraphQL API documentation for queries and mutations.
requires graphdoc to be installed.
"""
from crm import app
sc = app.graphql_schema
with open('./schema.graphql', "w") as f:
f.write(str(sc))
p = Popen(['graphdoc', '--force', '-s', './schema.graphql', '-o',
'docs/graphqlapi'], stdout=PIPE, stderr=PIPE)
p.communicate()[0]
if p.returncode != 0:
print("Failed to generate graphqlapi docs.")
exit(1)
|
from bs4 import BeautifulSoup
from email.mime.text import MIMEText
from email.header import Header
import urllib.request
import json
import smtplib
import datetime
import locale
locale.setlocale(locale.LC_ALL)
today = datetime.date.today()
tarih1=""
tarih2=""
tarih3=""
eskitarih1=""
eskitarih2=""
eskitarih3=""
mail_1_alici = ['__MAIL__@gmail.com','__MAIL__@gmail.com']
bir_mesaj_1parca= """\
<html>
<head></head>
<body>
<p>1. Sınıf Ders Programı Değişmiştir<br /><em>Güncel Tarih: <span style="color: #ff0000;">
</html>
"""
bir_mesaj_2parca = """\
<html>
</span></em></p>
<p>İndirmek için tıklayınız</p>
<p>(<a href="http://www.ctf.edu.tr/egitim_ogretim/ders/2017_2018/17_18_dersprog1.pdf"><strong>pdf</strong></a>) (<a href="http://www.ctf.edu.tr/egitim_ogretim/ders/2017_2018/17_18_dersprog1.doc"><strong>word</strong></a>)</p>
<p>çalışmazsa: http://www.ctf.edu.tr/egitim_ogretim/ders/2017_2018/17_18_dersprog1.doc</p>
<p style="text-align: center;"><strong><em>CerrahApp - raydingoz - 2017</em></strong></p>
<p>takip listesinden çıkmak için <a href="https://goo.gl/forms/B8vRMOUuhz3dRrOG2">tıklayınız</a></p>
</html>
"""
mail_2_alici = ['__MAIL__@gmail.com','__MAIL__@gmail.com']
iki_mesaj_1parca= """\
<html>
<head></head>
<body>
<p>2. Sınıf Ders Programı Değişmiştir<br /><em>Güncel Tarih: <span style="color: #ff0000;">
</html>
"""
iki_mesaj_2parca = """\
<html>
</span></em></p>
<p>İndirmek için tıklayınız</p>
<p>(<a href="http://www.ctf.edu.tr/egitim_ogretim/ders/2017_2018/17_18_dersprog2.pdf"><strong>pdf</strong></a>) (<a href="http://www.ctf.edu.tr/egitim_ogretim/ders/2017_2018/17_18_dersprog2.doc"><strong>word</strong></a>)</p>
<p>çalışmazsa: http://www.ctf.edu.tr/egitim_ogretim/ders/2017_2018/17_18_dersprog2.doc</p>
<p style="text-align: center;"><strong><em>CerrahApp - raydingoz - 2017</em></strong></p>
<p>takip listesinden çıkmak için <a href="https://goo.gl/forms/B8vRMOUuhz3dRrOG2">tıklayınız</a></p>
</html>
"""
mail_3_alici = ['__MAIL__@gmail.com','__MAIL__@gmail.com']
uc_mesaj_1parca= """\
<html>
<head></head>
<body>
<p>3. Sınıf Ders Programı Değişmiştir<br /><em>Güncel Tarih: <span style="color: #ff0000;">
</html>
"""
uc_mesaj_2parca = """\
<html>
</span></em></p>
<p>İndirmek için tıklayınız</p>
<p>(<a href="http://www.ctf.edu.tr/egitim_ogretim/ders/2017_2018/17_18_dersprog3.pdf"><strong>pdf</strong></a>) (<a href="http://www.ctf.edu.tr/egitim_ogretim/ders/2017_2018/17_18_dersprog3.doc"><strong>word</strong></a>)</p>
<p>çalışmazsa: http://www.ctf.edu.tr/egitim_ogretim/ders/2017_2018/17_18_dersprog3.doc</p>
<p style="text-align: center;"><strong><em>CerrahApp - raydingoz - 2017</em></strong></p>
<p>takip listesinden çıkmak için <a href="https://goo.gl/forms/B8vRMOUuhz3dRrOG2">tıklayınız</a></p>
</html>
"""
def website():
url = "http://www.ctf.edu.tr/egitim_ogretim/dersprog.htm"
url_oku = urllib.request.urlopen(url)
soup = BeautifulSoup(url_oku, 'html.parser')
for tr in soup.find_all('tr')[2:]:
tds = tr.find_all('td')
sinif = str(tds[1].text)
sinif = sinif.strip('\t\r\n')
if (sinif == "1"):
global tarih1
tarih1 = str(tds[2].text)
tarih1 = tarih1.strip('\t\r\n')
print("web : 1. sınıf : " + tarih1)
elif (sinif == "2"):
global tarih2
tarih2 = str(tds[2].text)
tarih2 = tarih2.strip('\t\r\n')
print("web : 2. sınıf : " + tarih2)
elif (sinif == "3"):
global tarih3
tarih3 = str(tds[2].text)
tarih3 = tarih3.strip('\t\r\n')
print("web : 3. sınıf : " + tarih3)
def eskial():
url = "____READ_DATA_API___.php"
r = urllib.request.urlopen(url)
data = json.loads(r.read().decode(r.info().get_param('charset') or 'utf-8'))
print("")
global eskitarih1
eskitarih1 = data["api"][0]["deger"]
print("eski : Birinci Sınıf : "+data["api"][0]["deger"])
global eskitarih2
eskitarih2 = data["api"][1]["deger"]
print("eski : İkinci Sınıf : "+data["api"][1]["deger"])
global eskitarih3
eskitarih3 = data["api"][2]["deger"]
print("eski : Üçüncü Sınıf : "+data["api"][2]["deger"])
def yenitarihgir(bir,iki,uc):
urllib.request.urlopen("____ADD_DATA_API___.php?bir="+bir+"&iki="+iki+"&uc="+uc).read()
def karsilastir():
if (tarih1!=eskitarih1):
print("bir eşit değil")
yenitarihgir(tarih1,tarih2,tarih3)
mailat(mail_1_alici,"Ders Programı",bir_mesaj_1parca + tarih1 + bir_mesaj_2parca)
if (tarih2!=eskitarih2):
print("iki eşit değil")
yenitarihgir(tarih1,tarih2,tarih3)
mailat(mail_1_alici,"Ders Programı",iki_mesaj_1parca+ tarih2+iki_mesaj_2parca)
if (tarih3!=eskitarih3):
print("üç eşit değil")
yenitarihgir(tarih1,tarih2,tarih3)
mailat(mail_1_alici,"Ders Programı",uc_mesaj_1parca+ tarih3+"d"+uc_mesaj_2parca+"tarih: "+today.strftime('%d %b'))
def mailat(alici,baslik, msg):
try:
server = smtplib.SMTP('smtp.gmail.com', 587)
server.starttls()
server.login("____EMAIL___", "___PASSWORD___")
message = 'Subject: {}\n\n{}'.format(baslik, msg)
m = MIMEText(msg.encode('utf-8'), 'html', 'utf-8')
m['Subject'] = Header(baslik, 'utf-8')
server.sendmail("____EMAIL___", alici, m.as_string())
server.quit()
print('mail başarıyla gönderildi')
except Exception as e:
print(e)
print("mail gönderilirken hata")
website()
eskial()
karsilastir()
|
class Authentication(object):
def __init__(self, request, response):
self.request = request
self.response = response
#
#
def login(self):
return 'Authentication.Login'
#
#
def logout(self):
return 'Authentication.Logout'
#
#
def callback(self):
return 'Authentication.Callback'
|
from django.conf import settings
from django.db import models
class Follower(models.Model):
"""Модель подписчиков"""
user=models.ForeignKey(settings.AUTH_USER_MODEL,on_delete=models.CASCADE,related_name='owner')
subscriber=models.ForeignKey(settings.AUTH_USER_MODEL,on_delete=models.CASCADE,related_name='subscribers')
|
import math
shape = input()
area = 0.0
if shape == "square":
a = float(input())
area = a * a
elif shape == "rectangle":
a = float(input())
b = float(input())
area = a * b
elif shape == "circle":
r = float(input())
area = r * r * math.pi
elif shape == "triangle":
a = float(input())
h = float(input())
area = a * h / 2
print(f"{area:.3f}")
|
from __future__ import division
pazartesi = "(1) pazartesi"
sali = "(2) sali"
carsamba = "(3) carsamba"
persembe = "(4) persembe"
cuma = "(5) cuma"
cumartesi = "(6) cumartesi"
pazar = "(7) pazar"
isim = raw_input("isiminiz")
print pazartesi, sali, carsamba, persembe, cuma, cumartesi, pazar
soru = raw_input("hangi gunun listesi ?")
if soru == "1":
print ("Gun: Pazartesi")
print ("musteri ismi: %s") %(isim)
print "Gunun menusu"
print "corba : mercimek"
print "ana yemek: tavuk sote"
print "tatli : kadayif "
if soru == "2":
print ("Gun: Sali")
print ("musteri ismi: %s") %(isim)
print "Gunun menusu"
print "corba : sehriye"
print "ana yemek: tavuk "
print "tatli : baklava "
if soru == "3":
print ("Gun: Carsamba")
print ("musteri ismi: %s") %(isim)
print "Gunun menusu"
print "corba : ezogelin"
print "ana yemek: tavuk "
print "tatli : gullac "
|
from BrythonAnimation import BrythonAnimation
import browser
def rgbString(r, g, b):
if not(0 <= r < 256 and 0 <= g < 256 and 0 <= b < 256):
return "#000000"
r = "{0:x}".format(r)
if (len(r) == 1): r = "0" + r
g = "{0:x}".format(g)
if (len(g) == 1): g = "0" + g
b = "{0:x}".format(b)
if (len(b) == 1): b = "0" + b
return "#" + r + g + b
class Demo(BrythonAnimation):
def init(self):
self.ovals = dict()
def onTouch(self, touches):
for touchID in touches:
self.ovals[touchID] = touches[touchID]
def onTouchDrag(self, touches):
self.onTouch(touches)
def onTouchRelease(self, touches):
for touchID in touches:
del self.ovals[touchID]
def onTouchCancel(self, touches):
self.onTouchRelease(touches)
def onKeyDown(self, event):
print(event.keysym)
def redrawAll(self):
for identifier in self.ovals:
coords = self.ovals[identifier]
self.context.create_circle(coords.x, coords.y, 30, fill="#0000ff")
Demo(touch=True, keys=True) |
"""0MQ Frame pure Python methods."""
# Copyright (C) PyZMQ Developers
# Distributed under the terms of the Modified BSD License.
import zmq
from zmq.backend import Frame as FrameBase
from .attrsettr import AttributeSetter
def _draft(v, feature):
zmq.error._check_version(v, feature)
if not zmq.DRAFT_API:
raise RuntimeError(
"libzmq and pyzmq must be built with draft support for %s" % feature
)
class Frame(FrameBase, AttributeSetter):
"""Frame(data=None, track=False, copy=None, copy_threshold=zmq.COPY_THRESHOLD)
A zmq message Frame class for non-copying send/recvs and access to message properties.
A ``zmq.Frame`` wraps an underlying ``zmq_msg_t``.
Message *properties* can be accessed by treating a Frame like a dictionary (``frame["User-Id"]``).
.. versionadded:: 14.4, libzmq 4
Frames created by ``recv(copy=False)`` can be used to access message properties and attributes,
such as the CURVE User-Id.
For example::
frames = socket.recv_multipart(copy=False)
user_id = frames[0]["User-Id"]
This class is used if you want to do non-copying send and recvs.
When you pass a chunk of bytes to this class, e.g. ``Frame(buf)``, the
ref-count of `buf` is increased by two: once because the Frame saves `buf` as
an instance attribute and another because a ZMQ message is created that
points to the buffer of `buf`. This second ref-count increase makes sure
that `buf` lives until all messages that use it have been sent.
Once 0MQ sends all the messages and it doesn't need the buffer of ``buf``,
0MQ will call ``Py_DECREF(s)``.
Parameters
----------
data : object, optional
any object that provides the buffer interface will be used to
construct the 0MQ message data.
track : bool [default: False]
whether a MessageTracker_ should be created to track this object.
Tracking a message has a cost at creation, because it creates a threadsafe
Event object.
copy : bool [default: use copy_threshold]
Whether to create a copy of the data to pass to libzmq
or share the memory with libzmq.
If unspecified, copy_threshold is used.
copy_threshold: int [default: zmq.COPY_THRESHOLD]
If copy is unspecified, messages smaller than this many bytes
will be copied and messages larger than this will be shared with libzmq.
"""
def __getitem__(self, key):
# map Frame['User-Id'] to Frame.get('User-Id')
return self.get(key)
@property
def group(self):
"""The RADIO-DISH group of the message.
Requires libzmq >= 4.2 and pyzmq built with draft APIs enabled.
.. versionadded:: 17
"""
_draft((4, 2), "RADIO-DISH")
return self.get('group')
@group.setter
def group(self, group):
_draft((4, 2), "RADIO-DISH")
self.set('group', group)
@property
def routing_id(self):
"""The CLIENT-SERVER routing id of the message.
Requires libzmq >= 4.2 and pyzmq built with draft APIs enabled.
.. versionadded:: 17
"""
_draft((4, 2), "CLIENT-SERVER")
return self.get('routing_id')
@routing_id.setter
def routing_id(self, routing_id):
_draft((4, 2), "CLIENT-SERVER")
self.set('routing_id', routing_id)
# keep deprecated alias
Message = Frame
__all__ = ['Frame', 'Message']
|
import json
import cherrypy
class notesController:
def __init__(self, notesdb):
self.notesdb = notesdb
# create a new note
def POST_NOTE(self):
msg = json.loads(cherrypy.request.body.read())
self.notesdb.add_note(msg)
return json.dumps({"result": "success"})
# get existing notes
def GET_NOTES(self):
notes = self.notesdb.get_all_notes()
return json.dumps({"result": "sucess", "notes": notes})
# get an existing note
def GET_NOTE(self, note_id):
return json.dumps(self.notesdb.get_note(note_id))
# change an existing note
def POST_UPDATED_NOTE(self):
print("here")
self.notesdb.set_note(json.loads(cherrypy.request.body.read()))
return json.dumps({"result": "success"})
# delete an existing note
def DELETE_NOTE(self, note_id):
self.notesdb.delete_note(note_id)
return json.dumps({"result": "success"})
|
#!/usr/bin/python
'''
filter MulTiXcan and PrediXcan WB rm bad map results Mappability>0.8 and no cross-mappability between pairs
to also rm likely false positives based on NCBI gene summaries
grep retro, pseudogene, or paralog
needed for Table 1 tested count and Fig 2 QQ plot
'''
import gzip
import sys
import argparse
import os
def check_arg(args=None):
parser = argparse.ArgumentParser(description='Script to filter poor mappability genes')
parser.add_argument('-f', '--infile',
help='input file',
required='True'
)
parser.add_argument('-p', '--infilepath',
help='path to input file',
required='True'
)
return parser.parse_args(args)
#retrieve command line arguments
args = check_arg(sys.argv[1:])
resfilestring = args.infile
resfilepath = args.infilepath
mydir = "/gpfs/data/im-lab/nas40t2/hwheeler/trans-px/"
rmdir = mydir + "rm_bad_mapping_genes/"
sumdir = mydir + "NCBI_Gene_Summaries/"
suspectfile = sumdir + "hgFixed.refSeqSummary_gencode.v19.annotations_suspect_list.txt.gz"
resfile = resfilepath + resfilestring
suspect = dict()
for line in gzip.open(suspectfile):
arr = line.strip().split('\t')
ensgene = arr[2][:15]
suspect[ensgene] = line
outres = gzip.open(sumdir + "rm_suspect_NCBIgenes_" + resfilestring,"wb")
for line in gzip.open(resfile):
arr = line.strip().split()
gene1 = arr[0][:15]
gene2 = arr[1][:15]
if gene1 == 'snps':
outres.write('\t'.join(arr) + '\n')
elif gene1 not in suspect and gene2 not in suspect:
outres.write('\t'.join(arr) + '\n')
outres.close()
|
# A child is playing a cloud hopping game. In this game, there are sequentially numbered clouds that can be thunderheads or cumulus clouds. The character must jump from cloud to cloud until it reaches the start again.
# There is an array of clouds, c and an energy level e = 100 . The character starts from c[0] and uses 1 unit of energy to make a jump of size k to cloud c[(i+k) % n]. If it lands on a thundercloud, c[i] = 1, its energy (e) decreases by 2 additional units. The game ends when the character lands back on cloud .
# Given the values of n, k, and the configuration of the clouds as an array c, determine the final value of e after the game ends.
# Function Description
# Complete the jumpingOnClouds function in the editor below.
# jumpingOnClouds has the following parameter(s):
# int c[n]: the cloud types along the path
# int k: the length of one jump
# Returns
# int: the energy level remaining.
def jumpingOnClouds(c, k):
e = 100
i = k
if i == len(c):
if c[len(c) - 1] == 1:
return e - 3
if c[len(c) - 1] == 0:
return e - 1
while i != 0:
e -= 1
if c[i] == 1:
e -= 2
i += k
if i == len(c):
if c[0] == 1:
e -= 3
if c[0] == 0:
e -= 1
i = 0
if i > len(c):
reset = i - len(c)
i = reset
return e
print(jumpingOnClouds([1, 1, 0, 1, 0, 1,
0, 1, 0, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1], 19)) # 97
print(jumpingOnClouds([0, 0, 1, 0, 0, 1, 1, 0], 2)) # 92
print(jumpingOnClouds([1, 1, 1, 0, 1, 1, 0, 0, 0, 0, ], 3)) # 80
|
import torch
import torch.nn as nn
import numpy as np
import time
from torch.autograd import Variable
import torch.optim as optim
import torch.backends.cudnn as cudnn
from torch.utils.data import DataLoader
from models import resnet18
import torchvision.models as models
import torchvision.transforms as transforms
import torchvision.datasets as datasets
from torch.utils.data import Dataset
from torch.utils.data.sampler import SubsetRandomSampler
from torch.utils.data.sampler import SubsetRandomSampler
from sklearn.metrics import accuracy_score,roc_auc_score
import os
import nibabel as nib
import argparse
from utils import AverageMeter
from distutils.version import LooseVersion
import math
from losses import DICELoss
# from losses import DICELossMultiClass
def train(train_loader, model, criterion, optimizer, epoch, args):
losses = AverageMeter()
model.train()
for iteration, (sample,target) in enumerate(train_loader):
# print(iteration,target)
image = Variable(sample).float().cuda()
label = Variable(target).float().cuda()
out = model(image)
# out = torch.clamp(out, min = 0.0, max = 1.0)
out = out.contiguous()
label = label.contiguous()
# print(out[:,1], label)
loss = criterion(out[:,1], label)
losses.update(loss.data[0],image.size(0))
# compute gradient and do SGD step
optimizer.zero_grad()
loss.backward()
optimizer.step()
# adjust learning rate
cur_iter = iteration + (epoch - 1) * args.epoch_iters
adjust_learning_rate(optimizer, cur_iter, args)
if iteration%200==0:
print(' * i {} | lr: {:.7f} | Training Loss: {losses.avg:.3f}'.format(iteration, args.running_lr, losses=losses))
print(' * EPOCH {epoch} | Training Loss: {losses.avg:.3f}'.format(epoch=epoch, losses=losses))
def validation(val_loader, model,epoch, args):
model.eval()
label_list = []
prob_list = []
for iteration, (sample,target) in enumerate(val_loader):
image = Variable(sample).float().cuda()
label = Variable(target).long().data.cpu().numpy()
out = model(image)
with torch.no_grad():
out_label = torch.max(out, 1)[1].cpu().numpy()
out = out.data.cpu().numpy()
# print(out,label)
for j in range(out.shape[0]):
# name_list.append(name[j])
label_list.append(label[j])
# prob = out[j,1]/(out_label[j,0]+out_label[j,1])
prob_list.append(out[j,1])
# prob_list.append(out_label[j])
# label = np.array(label_list)
# prob = np.array(prob_list)
#
# accu1 = float(np.sum(label))/flaot(len(label))
# accu1 = accuracy_score(label[label==1],prob[prob==1])
# accu = accuracy_score(label[label==0],prob[prob==0])
accu = roc_auc_score(np.array(label_list),np.array(prob_list))
print(' * EPOCH {epoch} | Validation AUC: {losses} '.format(epoch=epoch, losses=accu))
return accu # print(out[0,0])
# break
def save_checkpoint(state, epoch, args):
filename = args.ckpt + '/' + str(epoch) + '_checkpoint.pth.tar'
print(filename)
torch.save(state, filename)
# def load_checkpoint(epoch, args):
# torch.save(state, filename)
def adjust_learning_rate(optimizer, cur_iter, args):
scale_running_lr = ((1. - float(cur_iter) / args.max_iters) ** args.lr_pow)
args.running_lr = args.lr * scale_running_lr
for param_group in optimizer.param_groups:
param_group['lr'] = args.running_lr
def main(args):
# import network architecture
# builder = ModelBuilder()
# model = resnet18(num_classes = 2)
# model = resnet18(num_classes = 2)
# model = models.vgg11_bn( num_classes = 2)
model = models.squeezenet1_0( num_classes = 2)
# model = models.AlexNet( num_classes = 2)
# model = models.densenet121(num_classes = 2)
model = torch.nn.DataParallel(model, device_ids=list(range(args.num_gpus))).cuda()
cudnn.benchmark = True
# collect the number of parameters in the network
print("------------------------------------------")
# print("Network Architecture of Model %s:" % (args.id))
num_para = 0
for name, param in model.named_parameters():
num_mul = 1
for x in param.size():
num_mul *= x
num_para += num_mul
print(model)
print("Number of trainable parameters %d in Model" % (num_para))
print("------------------------------------------")
# set the optimizer and loss
#optimizer = optim.RMSprop(model.parameters(), args.lr, alpha=args.alpha, eps=args.eps, weight_decay=args.weight_decay, momentum=args.momentum)
optimizer = optim.Adam(model.parameters(), args.lr, eps=args.eps, weight_decay=args.weight_decay)
# optimizer = optim.SparseAdam(model.parameters(), args.lr, betas=args.betas, eps=args.eps, weight_decay=args.weight_decay)
criterion = DICELoss()
# criterion = nn.CrossEntropyLoss()
# criterion = nn.CrossEntropyLoss(weight = torch.cuda.FloatTensor([1.0,9.0]))
# criterion = DICELossMultiClass()
if args.resume:
if os.path.isfile(args.resume):
print("=> Loading checkpoint '{}'".format(args.resume))
checkpoint = torch.load(args.resume)
start_epoch = checkpoint['epoch']
model.load_state_dict(checkpoint['state_dict'])
optimizer.load_state_dict(checkpoint['opt_dict'])
print("=> Loaded checkpoint (epoch {})".format(checkpoint['epoch']))
else:
print("=> No checkpoint found at '{}'".format(args.resume))
# loading data
data_transforms = transforms.Compose([
# transforms.CenterCrop(192),
transforms.RandomHorizontalFlip(),
# transforms.RandomVerticalFlip(),
#transforms.RandomRotation(degrees = args.rotate_range),
transforms.ToTensor(),
# transforms.Normalize([63.321, 63.321, 63.321], [40.964, 40.964, 40.964])
])
dataset = datasets.ImageFolder(root=args.root_path, transform = data_transforms )
num_train = len(dataset)
# print(num_train)
indices = list(range(num_train))
split = num_train/10
validation_idx = np.random.choice(indices, size=split, replace=False)
train_idx = list(set(indices) - set(validation_idx))
train_sampler = SubsetRandomSampler(train_idx)
validation_sampler = SubsetRandomSampler(validation_idx)
# tf = TrainDataset(train_dir, args)
train_loader = DataLoader(dataset, batch_size=args.batch_size, shuffle=False, num_workers=args.num_workers, pin_memory=True,sampler=train_sampler)
validation_loader = DataLoader(dataset, batch_size=args.batch_size, shuffle=False, num_workers=args.num_workers, pin_memory=True,sampler=validation_sampler)
print("Start training ...")
best_epoch=0
best_accu= 0
for epoch in range(args.start_epoch + 1, args.num_epochs + 1):
start_time = time.time()
train(train_loader, model, criterion, optimizer, epoch, args)
# save models
# if (epoch >= args.particular_epoch and epoch % args.save_epochs_steps == 0) or epoch % args.save_epochs_steps_before == 0 :
# if epoch % args.save_epochs_steps == 0:
save_checkpoint({'epoch': epoch, 'state_dict': model.state_dict(), 'opt_dict': optimizer.state_dict()}, epoch, args)
filename = args.ckpt + '/' + str(epoch) + '_checkpoint.pth.tar'
# print(filename)
checkpoint = torch.load(filename)
state_dict = checkpoint['state_dict']
model.load_state_dict(state_dict)
accu = validation(validation_loader, model, epoch, args)
if accu > best_accu:
best_epoch = epoch
best_accu = accu
elapsed_time = time.time() - start_time
print('epoch time ' +str(time.strftime("%H:%M:%S", time.gmtime(elapsed_time))) + ', remaining time ' +str(time.strftime("%H:%M:%S", time.gmtime(elapsed_time*(args.num_epochs - epoch)))))
print("Training Done")
print("Best epoch " +str(best_epoch) + ', auc score '+str(best_accu))
if __name__ == '__main__':
assert LooseVersion(torch.__version__) >= LooseVersion('0.3.0'), \
'PyTorch>=0.3.0 is required'
parser = argparse.ArgumentParser()
# Model related arguments
# Path related arguments
parser.add_argument('--root_path', default='/home/wynonna/Documents/Research/Brats2018/course_pro/data/train/',
help='root directory of data')
parser.add_argument('--ckpt', default='./saved_models',
help='folder to output ciheckpoints')
parser.add_argument('--num_round', default=21, type=int)
# Data related arguments
parser.add_argument('--num_classes', default=2, type=int,
help='number of classes')
parser.add_argument('--num_workers', default=8, type=int,
help='number of data loading workers')
parser.add_argument('--shuffle', default=True, type=bool,
help='if shuffle the data during training')
parser.add_argument('--rotate_range', default=5, type=int,
help='if shuffle the data during training')
# optimization related arguments
parser.add_argument('--random_sample', action='store_true', default=True, help='whether to sample the dataset with random sampler')
parser.add_argument('--num_gpus', default=1, type=int, help='number of GPUs to use')
parser.add_argument('--batch_size', default=48, type=int,
help='training batch size')
parser.add_argument('--num_epochs', default=60, type=int,
help='epochs for training')
parser.add_argument('--start_epoch', default=0, type=int,
help='epoch to start training. useful if continue from a checkpoint')
parser.add_argument('--lr', default=8e-5, type=float,
help='start learning rate')
parser.add_argument('--lr_pow', default=0.98, type=float,
help='power in poly to drop learning rate')
parser.add_argument('--optim', default='RMSprop', help='optimizer')
parser.add_argument('--alpha', default='0.9', type=float, help='alpha in RMSprop')
# parser.add_argument('--betas', default='(0.9,0.999)', type=float, help='betas in Adam')
parser.add_argument('--eps', default=10**(-4), type=float, help='eps in RMSprop')
parser.add_argument('--weight_decay', default=1e-3, type=float, help='weights regularizer')
parser.add_argument('--momentum', default=0.8, type=float, help='momentum for RMSprop')
parser.add_argument('--save_epochs_steps', default=1, type=int,
help='frequency to save models after a particular number of epochs')
parser.add_argument('--save_epochs_steps_before', default=10, type=int,
help='frequency to save models after a particular number of epochs')
parser.add_argument('--particular_epoch', default=40, type=int,
help='after this number, we will save models more frequently')
parser.add_argument('--resume', default='',
help='the checkpoint that resumes from')
args = parser.parse_args()
print("Input arguments:")
for key, val in vars(args).items():
print("{:16} {}".format(key, val))
# train_file = open(args.train_path, 'r')
# train_dir = train_file.readlines()
args.ckpt = os.path.join(args.ckpt, str(args.num_round))
print('Models are saved at %s' % (args.ckpt))
if not os.path.isdir(args.ckpt):
os.makedirs(args.ckpt)
if args.start_epoch > 1:
args.resume = args.ckpt + '/' + str(args.start_epoch) + '_checkpoint.pth.tar'
args.running_lr = args.lr
args.epoch_iters = math.ceil(int(30000/args.batch_size))
args.max_iters = args.epoch_iters * args.num_epochs
main(args)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.