blob_id stringlengths 40 40 | directory_id stringlengths 40 40 | path stringlengths 2 616 | content_id stringlengths 40 40 | detected_licenses listlengths 0 69 | license_type stringclasses 2
values | repo_name stringlengths 5 118 | snapshot_id stringlengths 40 40 | revision_id stringlengths 40 40 | branch_name stringlengths 4 63 | visit_date timestamp[us] | revision_date timestamp[us] | committer_date timestamp[us] | github_id int64 2.91k 686M ⌀ | star_events_count int64 0 209k | fork_events_count int64 0 110k | gha_license_id stringclasses 23
values | gha_event_created_at timestamp[us] | gha_created_at timestamp[us] | gha_language stringclasses 220
values | src_encoding stringclasses 30
values | language stringclasses 1
value | is_vendor bool 2
classes | is_generated bool 2
classes | length_bytes int64 2 10.3M | extension stringclasses 257
values | content stringlengths 2 10.3M | authors listlengths 1 1 | author_id stringlengths 0 212 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
6bc12f0a49aa1c1b9ea82d0bfd50f225658caa67 | bd8acf5db2b9983470986b5badc500c20285ee24 | /example02_graph.py | 093ad01a2d47acfd69515c5e9264e15ed0a6cd58 | [] | no_license | JiXuanyuan/tensorflow_example | ff47e98b313abfce7b90b3e49803e4e4ef07a125 | 092abaa24a6e7cde61955c559c0141c83f17351b | refs/heads/master | 2020-06-17T19:05:59.913884 | 2019-07-21T09:31:56 | 2019-07-21T09:31:56 | 196,018,646 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,021 | py | import tensorflow as tf
# graph 计算图
# 1.graph用来构建计算流程,不组织数据和运算
# 2.graph由节点和弧组成,对应tensor和operator
# 3.tensorflow提供了一个默认图,创建的节点默认加到该图
# ==============================================
# 1.使用默认的计算图
# 构建计算图
a1 = tf.constant(12, dtype=tf.float32, name="input1")
a2 = tf.constant(5, dtype=tf.float32, name="input2")
print(a1)
print(a2)
b5 = tf.add(a1, a2, name="add")
b6 = tf.subtract(a1, a2, name="sub")
b7 = tf.multiply(a1, a2, name="mul")
b8 = tf.divide(a1, a2, name="div")
print(b5)
print(b6)
print(b7)
print(b8)
# 获取默认图,返回一个图的序列化的GraphDef表示
print(tf.get_default_graph().as_graph_def())
# ==============================================
# 2.创建一个新图
g = tf.Graph()
with g.as_default():
# 构建计算图
c1 = tf.constant(3)
c2 = tf.constant(4)
d1 = c1 * c2
print(c1)
print(c2)
print(d1)
print(g.as_graph_def())
| [
"chenjiahui@chenjiahuideMacBook-Air.local"
] | chenjiahui@chenjiahuideMacBook-Air.local |
1947266d3ef696f1a305bc46c4a1750ba67c0529 | 54796f604e365646432244e577a9d1a0c220c7d3 | /todo/admin.py | 680332f8602ab4d278ed525ade4bda07ba7f1668 | [] | no_license | summerbdbd/todo_site | dbf120c68158febb3e27f3182c9db75b35e0fe27 | 2d1b5fa82a44ddaf12591a43ef1c9c56c66b0163 | refs/heads/master | 2022-06-25T12:09:47.585289 | 2020-04-30T13:24:15 | 2020-04-30T13:24:15 | 257,938,778 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 136 | py | from django.contrib import admin
from .models import Todo
# Register your models here..
#todo/admin.py
admin.site.register(Todo)
| [
"noreply@github.com"
] | summerbdbd.noreply@github.com |
27ee695315777b4265254966acca17340270eb5a | c54841dd6617b6bf3c692cef623d249b99ae13ff | /homework/m2_iteration/prime.py | 79b1d947b95dd3aa27d1093d69fcab3d37f8888d | [] | no_license | ZhenJie-Zhang/Python | e95415a3dbd49b1e77bc62a9244d2aec64e4626e | 6c9802272aa12eb94ec488f77e3259247619d183 | refs/heads/master | 2020-08-13T21:29:31.294697 | 2019-10-20T15:27:24 | 2019-10-20T15:27:24 | 215,040,958 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,344 | py | # 5. 迴圈的練習-prime
# 輸入一正整數,找出所有小於或等於的質數。
import time
t_0 = time.process_time()
num = 10000
print('小於或等於{}的質數: '.format(num), end="")
count_prime = 0
for test in range(1, num + 1):
# print(test)
count_factor = 0
for factor in range(1, test + 1):
if test % factor == 0:
count_factor += 1
# print(factor)
if count_factor == 2:
count_prime += 1
# if count_prime == 1:
# print('{:d}'.format(test), end="")
# elif count_prime != 1:
# print(', {:d}'.format(test), end=" ")
print()
print('共有{}個質數'.format(count_prime))
print(time.process_time() - t_0)
t_0 = time.process_time()
print('小於或等於{}的質數: '.format(num), end="")
count_prime = 0
for test in range(2, num + 1):
# print(test)
count_factor = 0
# print(int((test ** 0.5) // 1))
for factor in range(1, int((test ** 0.5) // 1)+1):
if test % factor == 0:
count_factor += 1
if count_factor == 1:
count_prime += 1
# if count_prime == 1:
# print('{:d}'.format(test), end="")
# elif count_prime != 1:
# print(', {:d}'.format(test), end=" ")
print()
print('共有{}個質數'.format(count_prime))
print(time.process_time() - t_0)
| [
"53026360+ZhenJie-Zhang@users.noreply.github.com"
] | 53026360+ZhenJie-Zhang@users.noreply.github.com |
317288bb41c5c374236f56788577a76f1c080b9c | 42fe2827d14a82043ade9393beaedf53e22a69f5 | /bebop_ws/devel/.private/bebop_msgs/lib/python2.7/dist-packages/bebop_msgs/msg/_CommonCommonStateCurrentDateChanged.py | 55096047d13f8e60d5b3ab4a3aa26cae99d7e236 | [] | no_license | cjbanks/bebop-software-framework | a3714646545e9d7d71299a365814bc87437f5e14 | 7da1bbdef4e84aa0ed793cfaad9fe133959ebe21 | refs/heads/master | 2023-04-30T17:52:23.255302 | 2020-11-18T18:32:41 | 2020-11-18T18:32:41 | 368,626,051 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,233 | py | # This Python file uses the following encoding: utf-8
"""autogenerated by genpy from bebop_msgs/CommonCommonStateCurrentDateChanged.msg. Do not edit."""
import sys
python3 = True if sys.hexversion > 0x03000000 else False
import genpy
import struct
import std_msgs.msg
class CommonCommonStateCurrentDateChanged(genpy.Message):
_md5sum = "7b1c2ad09d95986b33cc46dd275d6aad"
_type = "bebop_msgs/CommonCommonStateCurrentDateChanged"
_has_header = True # flag to mark the presence of a Header object
_full_text = """# CommonCommonStateCurrentDateChanged
# auto-generated from up stream XML files at
# github.com/Parrot-Developers/libARCommands/tree/master/Xml
# To check upstream commit hash, refer to last_build_info file
# Do not modify this file by hand. Check scripts/meta folder for generator files.
#
# SDK Comment: Date changed.\n Corresponds to the latest date set on the drone.\n\n **Please note that you should not care about this event if you are using the libARController API as this library is handling the connection process for you.**
Header header
# Date with ISO-8601 format
string date
================================================================================
MSG: std_msgs/Header
# Standard metadata for higher-level stamped data types.
# This is generally used to communicate timestamped data
# in a particular coordinate frame.
#
# sequence ID: consecutively increasing ID
uint32 seq
#Two-integer timestamp that is expressed as:
# * stamp.sec: seconds (stamp_secs) since epoch (in Python the variable is called 'secs')
# * stamp.nsec: nanoseconds since stamp_secs (in Python the variable is called 'nsecs')
# time-handling sugar is provided by the client library
time stamp
#Frame this data is associated with
string frame_id
"""
__slots__ = ['header','date']
_slot_types = ['std_msgs/Header','string']
def __init__(self, *args, **kwds):
"""
Constructor. Any message fields that are implicitly/explicitly
set to None will be assigned a default value. The recommend
use is keyword arguments as this is more robust to future message
changes. You cannot mix in-order arguments and keyword arguments.
The available fields are:
header,date
:param args: complete set of field values, in .msg order
:param kwds: use keyword arguments corresponding to message field names
to set specific fields.
"""
if args or kwds:
super(CommonCommonStateCurrentDateChanged, self).__init__(*args, **kwds)
# message fields cannot be None, assign default values for those that are
if self.header is None:
self.header = std_msgs.msg.Header()
if self.date is None:
self.date = ''
else:
self.header = std_msgs.msg.Header()
self.date = ''
def _get_types(self):
"""
internal API method
"""
return self._slot_types
def serialize(self, buff):
"""
serialize message into buffer
:param buff: buffer, ``StringIO``
"""
try:
_x = self
buff.write(_get_struct_3I().pack(_x.header.seq, _x.header.stamp.secs, _x.header.stamp.nsecs))
_x = self.header.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
_x = self.date
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(locals().get('_x', self)))))
except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(locals().get('_x', self)))))
def deserialize(self, str):
"""
unpack serialized message in str into this message instance
:param str: byte array of serialized message, ``str``
"""
try:
if self.header is None:
self.header = std_msgs.msg.Header()
end = 0
_x = self
start = end
end += 12
(_x.header.seq, _x.header.stamp.secs, _x.header.stamp.nsecs,) = _get_struct_3I().unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.header.frame_id = str[start:end].decode('utf-8')
else:
self.header.frame_id = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.date = str[start:end].decode('utf-8')
else:
self.date = str[start:end]
return self
except struct.error as e:
raise genpy.DeserializationError(e) # most likely buffer underfill
def serialize_numpy(self, buff, numpy):
"""
serialize message with numpy array types into buffer
:param buff: buffer, ``StringIO``
:param numpy: numpy python module
"""
try:
_x = self
buff.write(_get_struct_3I().pack(_x.header.seq, _x.header.stamp.secs, _x.header.stamp.nsecs))
_x = self.header.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
_x = self.date
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(locals().get('_x', self)))))
except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(locals().get('_x', self)))))
def deserialize_numpy(self, str, numpy):
"""
unpack serialized message in str into this message instance using numpy for array types
:param str: byte array of serialized message, ``str``
:param numpy: numpy python module
"""
try:
if self.header is None:
self.header = std_msgs.msg.Header()
end = 0
_x = self
start = end
end += 12
(_x.header.seq, _x.header.stamp.secs, _x.header.stamp.nsecs,) = _get_struct_3I().unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.header.frame_id = str[start:end].decode('utf-8')
else:
self.header.frame_id = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.date = str[start:end].decode('utf-8')
else:
self.date = str[start:end]
return self
except struct.error as e:
raise genpy.DeserializationError(e) # most likely buffer underfill
_struct_I = genpy.struct_I
def _get_struct_I():
global _struct_I
return _struct_I
_struct_3I = None
def _get_struct_3I():
global _struct_3I
if _struct_3I is None:
_struct_3I = struct.Struct("<3I")
return _struct_3I
| [
"Chewie_Alex@nder1"
] | Chewie_Alex@nder1 |
0c45c116dcc4ff0eb06de34ab770795f920d7bda | f286c1a98f995b2206facd19347eb3ab5695adaf | /Proyecto1/settings.py | edc85581cfa0e99acea87ff695b22500f588cf81 | [] | no_license | manursanchez/practicasdjango | 8815fc0b038551ff7027fedcfd6e65af6df395ee | 5ae594213b68af7399a8adc909710d9af7ea8803 | refs/heads/main | 2023-07-20T15:38:28.022797 | 2021-08-25T16:53:53 | 2021-08-25T16:53:53 | 396,749,929 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,248 | py | """
Django settings for Proyecto1 project.
Generated by 'django-admin startproject' using Django 3.2.6.
For more information on this file, see
https://docs.djangoproject.com/en/3.2/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.2/ref/settings/
"""
from pathlib import Path
# Build paths inside the project like this: BASE_DIR / 'subdir'.
BASE_DIR = Path(__file__).resolve().parent.parent
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.2/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'django-insecure-+ug=o9zx5f*3hd+-9%nvidz^jw6q_v+51957p!5k_d@axfa7d&'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'Proyecto1.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'Proyecto1.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.2/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': BASE_DIR / 'db.sqlite3',
}
}
# Password validation
# https://docs.djangoproject.com/en/3.2/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.2/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.2/howto/static-files/
STATIC_URL = '/static/'
# Default primary key field type
# https://docs.djangoproject.com/en/3.2/ref/settings/#default-auto-field
DEFAULT_AUTO_FIELD = 'django.db.models.BigAutoField'
| [
"mrodrigue212@alumno.uned.es"
] | mrodrigue212@alumno.uned.es |
73c728462aaa1aeb1ff14b80acd3d67f327d7557 | 106983cf0b8df622f514ecff2bb2fa4c794c9dac | /Misc/OpenCV/camshiftTest.py | 5677142b105f693d0656e9845a8b7bfcaa575dc3 | [] | no_license | michael5486/Senior-Design | 2d9ae521c637abf7c0825f85b32752ad61c62744 | 6b6c78bed5f20582a9753a9c10020c709d6b6e53 | refs/heads/master | 2021-01-19T09:58:35.378164 | 2017-05-26T17:17:13 | 2017-05-26T17:17:13 | 67,556,475 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,367 | py | #!/usr/bin/env python
import cv2.cv as cv
import serial
#ser = serial.Serial("/dev/ttyACM0",9600)
def is_rect_nonzero(r):
(_,_,w,h) = r
return (w > 0) and (h > 0)
class CamShiftDemo:
def __init__(self):
self.capture = cv.CaptureFromCAM(0)
cv.NamedWindow( "CamShiftDemo", 1 )
cv.NamedWindow( "Histogram", 1 )
cv.SetMouseCallback( "CamShiftDemo", self.on_mouse)
self.drag_start = None # Set to (x,y) when mouse starts drag
self.track_window = None # Set to rect when the mouse drag finishes
print( "Keys:\n"
" ESC - quit the program\n"
" b - switch to/from backprojection view\n"
"To initialize tracking, drag across the object with the mouse\n" )
def hue_histogram_as_image(self, hist):
""" Returns a nice representation of a hue histogram """
histimg_hsv = cv.CreateImage( (320,200), 8, 3)
mybins = cv.CloneMatND(hist.bins)
cv.Log(mybins, mybins)
(_, hi, _, _) = cv.MinMaxLoc(mybins)
cv.ConvertScale(mybins, mybins, 255. / hi)
w,h = cv.GetSize(histimg_hsv)
hdims = cv.GetDims(mybins)[0]
for x in range(w):
xh = (180 * x) / (w - 1) # hue sweeps from 0-180 across the image
val = int(mybins[int(hdims * x / w)] * h / 255)
cv.Rectangle( histimg_hsv, (x, 0), (x, h-val), (xh,255,64), -1)
cv.Rectangle( histimg_hsv, (x, h-val), (x, h), (xh,255,255), -1)
histimg = cv.CreateImage( (320,200), 8, 3)
cv.CvtColor(histimg_hsv, histimg, cv.CV_HSV2BGR)
return histimg
def on_mouse(self, event, x, y, flags, param):
if event == cv.CV_EVENT_LBUTTONDOWN:
self.drag_start = (x, y)
if event == cv.CV_EVENT_LBUTTONUP:
self.drag_start = None
self.track_window = self.selection
if self.drag_start:
xmin = min(x, self.drag_start[0])
ymin = min(y, self.drag_start[1])
xmax = max(x, self.drag_start[0])
ymax = max(y, self.drag_start[1])
self.selection = (xmin, ymin, xmax - xmin, ymax - ymin)
def run(self):
hist = cv.CreateHist([180], cv.CV_HIST_ARRAY, [(0,180)], 1 )
backproject_mode = False
print "hitting run section"
x = 0
while True:
#print x
#x = x + 1
frame = cv.QueryFrame( self.capture )
cv.Flip(frame, frame, 1)
# Convert to HSV and keep the hue
hsv = cv.CreateImage(cv.GetSize(frame), 8, 3)
cv.CvtColor(frame, hsv, cv.CV_BGR2HSV)
self.hue = cv.CreateImage(cv.GetSize(frame), 8, 1)
cv.Split(hsv, self.hue, None, None, None)
# Compute back projection
backproject = cv.CreateImage(cv.GetSize(frame), 8, 1)
# Run the cam-shift
cv.CalcArrBackProject( [self.hue], backproject, hist )
if self.track_window and is_rect_nonzero(self.track_window):
crit = ( cv.CV_TERMCRIT_EPS | cv.CV_TERMCRIT_ITER, 10, 1)
print self.track_window
(iters, (area, value, rect), track_box) = cv.CamShift(backproject, self.track_window, crit)
self.track_window = rect
print self.track_window
try:
#prints the center x and y value of the tracked ellipse
coord = track_box[0]
print "center = {}".format(coord)
if (coord[0] < 320):
print "move right"
# ser.write("R")
elif (coord[0] == 320):
print "do nothing"
else:
print "move left"
# ser.write("L")
except UnboundLocalError:
print "track_box is None"
# If mouse is pressed, highlight the current selected rectangle
# and recompute the histogram
if self.drag_start and is_rect_nonzero(self.selection):
sub = cv.GetSubRect(frame, self.selection)
save = cv.CloneMat(sub)
cv.ConvertScale(frame, frame, 0.5)
cv.Copy(save, sub)
x,y,w,h = self.selection
cv.Rectangle(frame, (x,y), (x+w,y+h), (255,255,255))
sel = cv.GetSubRect(self.hue, self.selection )
cv.CalcArrHist( [sel], hist, 0)
(_, max_val, _, _) = cv.GetMinMaxHistValue( hist)
if max_val != 0:
cv.ConvertScale(hist.bins, hist.bins, 255. / max_val)
elif self.track_window and is_rect_nonzero(self.track_window):
print track_box
cv.EllipseBox( frame, track_box, cv.CV_RGB(255,0,0), 3, cv.CV_AA, 0 )
if not backproject_mode:
cv.ShowImage( "CamShiftDemo", frame )
else:
cv.ShowImage( "CamShiftDemo", backproject)
cv.ShowImage( "Histogram", self.hue_histogram_as_image(hist))
c = cv.WaitKey(7) % 0x100
if c == 27:
break
elif c == ord("b"):
backproject_mode = not backproject_mode
if __name__=="__main__":
demo = CamShiftDemo()
demo.run()
cv.DestroyAllWindows()
| [
"michael5486@gmail.com"
] | michael5486@gmail.com |
3717212a7d113d2ba3c9aa7d7a5f7215bf8452ee | f452375e2cbd4bd9cae6ec9d037c4f6847b64146 | /printcolors.py | 3284fcaa7097f6bdf88f39c8f4a44678308449d6 | [] | no_license | jangdoyeon/printcolors | 167b3e699440955305d80a2061933d3fb96d6664 | 4853a9debf9999add785ecf2dfad38c445c37a14 | refs/heads/master | 2022-11-16T20:18:14.522319 | 2020-07-02T05:10:55 | 2020-07-02T05:10:55 | 268,464,344 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 382 | py | class Pcolors:
RED = '\033[95m'
BLUE = '\033[94m'
GREEN = '\033[92m'
YELLO = '\033[93m'
ORANGE = '\033[91m'
GRAY = '\033[0m'
BOLD = '\033[1m'
UNDERLINE = '\033[4m'
print(f"""
{Pcolors.RED}RED
{Pcolors.BLUE}BLUE
{Pcolors.GREEN}GREEN
{Pcolors.YELLO}YELLO
{Pcolors.ORANGE}ORANGE
{Pcolors.GRAY}GRAY
{Pcolors.BOLD}BOLD
{Pcolors.UNDERLINE}UNDERLINE
""")
| [
"ehdus85@naver.com"
] | ehdus85@naver.com |
1595a26a907b6a2ec61ed4c05a82c89b56bbc3ee | 1ab98b7c2ebb8b22fa51538901313055eca5ce8a | /Oplevering 2/palindroom-s1096607-ifict-poging1.py | 2c21255c2c1b80a8e99f07dd3833feb04ca6dcde | [] | no_license | Miesvanderlippe/ISCRIP | 69482bc18a09a4b50a3fedbf945af4f7eaeedcc3 | a3738ab4dd7be00a7d00a948888b35476422d786 | refs/heads/master | 2021-10-17T03:10:00.525229 | 2019-02-13T13:03:20 | 2019-02-13T13:03:20 | 110,585,999 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 706 | py | import math
def is_palindrome(word: str) -> bool:
# Word length
word_length = len(word)
# Half word length, floored. This ensures middle character isn't checked
# if the word is of uneven length
half_length = math.floor(word_length / 2)
# First half of the word
first_half = word[0:half_length]
# Second half reversed
second_half = ''.join(reversed(word[word_length - half_length::]))
# If those match, it's a palindroom
return first_half == second_half
if __name__ == '__main__':
with open(input("File:\n")) as f:
for line in f:
stripped = line.strip().lower()
if is_palindrome(stripped):
print(stripped)
| [
"miesvanderlippe@gmail.com"
] | miesvanderlippe@gmail.com |
743a2d6746845c99593574ae9290a8a6cc8670f7 | d62b493e638700d1cbc031ed3d72ccaae1409ca5 | /www/camera.py | 5134eec9419556cde005fd505264c98b76a31cf6 | [
"MIT"
] | permissive | wwwins/OpenCV-Samples | c0f4b96248387bce2bddc011826427ee574c0565 | 6a88c411064d5a8d012fbc2299a6d85b4526785e | refs/heads/master | 2021-01-21T02:11:05.577850 | 2019-05-09T06:59:56 | 2019-05-09T06:59:56 | 58,920,978 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,957 | py | from imutils.video import VideoStream
import cv2
import time
import sys
import numpy as np
import os
#cas_path = os.getcwd()
#cas_path += "/data/haarcascade_frontalface_default.xml"
#cas_path = "/home/pi/opencv_samples/data/lbpcascade_frontalface.xml"
#cas_path = "/home/pi/opencv_samples/data/haarcascade_frontalface_alt2.xml"
#cas_path = "~/Downloads/opencv_samples/data/haarcascade_frontalface_alt2.xml"
cas_path = "/Users/isobar/github/opencv_samples/data/lbpcascade_frontalface.xml"
faceCascade = cv2.CascadeClassifier(cas_path)
FRAME_WIDTH = 640
FRAME_HEIGHT = 480
class VideoCamera(object):
def __init__(self):
# Using OpenCV to capture from device 0. If you have trouble capturing
# from a webcam, comment the line below out and use a video file
# instead.
self.video = self.VideoCapture()
# If you decide to use video.mp4, you must have this file in the folder
# as the main.py.
# self.video = cv2.VideoCapture('video.mp4')
def VideoCapture(self):
video = cv2.VideoCapture(0)
video.set(cv2.CAP_PROP_FRAME_WIDTH, FRAME_WIDTH)
video.set(cv2.CAP_PROP_FRAME_HEIGHT, FRAME_HEIGHT)
return video
def __del__(self):
self.video.release()
def get_frame(self):
success, frame = self.video.read()
gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
faces = faceCascade.detectMultiScale(
gray,
scaleFactor=1.3,
minNeighbors=5,
minSize=(80, 80),
flags=cv2.CASCADE_SCALE_IMAGE
)
if faces is not ():
for (x, y, w, h) in faces:
cv2.rectangle(frame, (x, y), (x + w, y + h), (0, 255, 0), 2)
ret, jpeg = cv2.imencode('.jpg', frame)
return jpeg.tobytes()
def get_simpleFrame(self):
success, frame = self.video.read()
rec, jpeg = cv2.imencode('.jpg', frame)
return jpeg.tobytes()
| [
"jacky.huang@isobar.com"
] | jacky.huang@isobar.com |
78c5d06ca90944e1b636708222886b7055533241 | 83b11f1c010ae1c13b4f51fb5b32502b613a153d | /第四模块 socket/windows巡检test.py | ccc8e61410661865fcce3f76ade12b0909c72f3b | [] | no_license | w312752508/Python_study | 2c4b98e294e0ec81a0bf99ad61e4fc871f5a4228 | f3e7bcfaf753a8ccd612cf405c7c0bcca0f4eca0 | refs/heads/master | 2021-09-21T00:53:45.612281 | 2018-08-18T05:52:06 | 2018-08-18T05:52:06 | 114,959,219 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 236 | py | import sys,os,subprocess,time,json,re,psutil
disk = psutil.disk_io_counters()
a= round(int(disk.read_bytes)/1024/1024/1024)
print(a)
print(round(int(disk.read_bytes)/1024/1024/1024))
print(round(int(disk.write_bytes)/1024/1024/1024))
| [
"312752508@qq.com"
] | 312752508@qq.com |
47befcf66e46b26472ad8cb956c2fc14284c7c9e | 3794bc772676d34a6794d19eedb41c2d8a7d39c0 | /ge_dqn/monitor.py | 53024700f3b5ca11545565d3ad057f2807cd0141 | [] | no_license | geyang/reinforcement_learning_learning_notes | 3a79af021b6b126e37b09bf1871cfe9852690abe | f862dbf496f7f5d6cb091604dfb808511de5aa9c | refs/heads/master | 2021-08-23T11:32:14.127137 | 2017-12-04T18:28:35 | 2017-12-04T18:28:35 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 970 | py | from copy import deepcopy
import numpy
def contextify(env):
type(env).__enter__ = lambda s: s
type(env).__exit__ = lambda s, *args: s.close()
return env
def monitor(env):
episode_rewards = []
_step = env.step
def step(action):
s, rew, done, info = _step(action)
episode_rewards.append(rew)
if not done:
return s, rew, done, info
episode_info = dict(
total_reward=sum(episode_rewards),
average_reward=numpy.mean(episode_rewards),
timesteps=len(episode_rewards)
)
episode_rewards.clear()
if type(info) is list:
info = deepcopy(info) + [episode_info]
elif type(info) is tuple:
info = tuple(*deepcopy(info), *episode_info)
elif hasattr(info, 'update'):
info = deepcopy(info)
info.update(**episode_info)
return s, rew, done, info
env.step = step
return env
| [
"yangge1987@gmail.com"
] | yangge1987@gmail.com |
cc48714970e03a701c840881b03fb876dff65246 | b939c20a7dcc8140c653a35867e8468026ebe085 | /lpthw-part2/ex20.py | 56b4fd82a211d024b876bda2dbedaba288170b54 | [] | no_license | jupiterhub/learn-python-the-hard-way | bbadaab0c37059dfbd82d92c5304daf4b90c7245 | c3a256acaa4fd7212c6cf76d1430f522f612f92c | refs/heads/master | 2021-07-20T03:30:29.400604 | 2017-10-27T09:36:35 | 2017-10-27T09:36:35 | 105,427,340 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 675 | py | # More functions, with file
from sys import argv
script, input_file = argv
def print_all(f):
print(f.read())
def rewind(f):
f.seek(0) # go to the 1st byte of the file
def print_a_line(line_count, f):
# notice readline() NO camelcase
print(line_count, f.readline())
current_file = open(input_file)
print("First Let's print the while file:\n")
print_all(current_file)
print("\nNow let's rewind, kind of like a tape.\n")
rewind(current_file)
print("Let's print three lines:")
current_line = 1
print_a_line(current_line, current_file)
current_line += 1
print_a_line(current_line, current_file)
current_line += 1
print_a_line(current_line, current_file)
| [
"jupiter.adverts@gmail.com"
] | jupiter.adverts@gmail.com |
54fbb5addb43bb517f26faddc374f91f535422f3 | 56b8de541b846423d9e0f4182dbcbd7ed625a4c7 | /project_internship/user_model/custom_model/account/models.py | 0c43116099eb31023fbbd2458df93c89bded7c33 | [] | no_license | 8264044685/django_project | a042d64070458318e0e1830de71f966193277242 | 6dab5e6090fbf9152cb5131de6e0eb4c34d15c23 | refs/heads/master | 2020-12-13T17:01:59.514053 | 2020-01-23T06:51:36 | 2020-01-23T06:51:36 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,287 | py | from django.db import models
from django.contrib.auth.models import AbstractBaseUser, BaseUserManager
from django import forms
# from django.contrib.auth.hashers import make_password
class MyAccountManager(BaseUserManager):
def create_user(self, email, username, password=None):
if not email:
raise ValueError('Users must have an email address')
if not username:
raise ValueError('Users must have a username')
user = self.model(
email=self.normalize_email(email),
username=username,
)
user.set_password(password)
user.save(using=self._db)
return user
def create_superuser(self, email, username, password):
user = self.create_user(
email=self.normalize_email(email),
password=password,
username=username,
)
user.is_admin = True
user.is_staff = True
user.is_superuser = True
user.save(using=self._db)
return user
class Account(AbstractBaseUser):
email = models.EmailField(verbose_name="email", max_length=60, unique=True)
username = models.CharField(max_length=30, unique=True)
first_name =models.CharField(max_length=30,blank=True)
last_name = models.CharField(max_length=30,blank=True)
address = models.TextField(blank=True)
mobile_no = models.IntegerField()
city = models.CharField(max_length=50,blank=True)
state = models.CharField(max_length=50,blank=True)
country = models.CharField(max_length=50, blank=True)
profilePicture = models.FileField(upload_to='photos/%Y/%m/%d', blank=True)
# password = forms.CharField(widget=forms.PasswordInput)
date_joined = models.DateTimeField(verbose_name='date joined', auto_now_add=True)
last_login = models.DateTimeField(verbose_name='last login', auto_now=True)
is_admin = models.BooleanField(default=False)
is_active = models.BooleanField(default=True)
is_staff = models.BooleanField(default=False)
is_superuser = models.BooleanField(default=False)
USERNAME_FIELD = 'email'
REQUIRED_FIELDS = ['username']
objects = MyAccountManager()
def __str__(self):
return self.email
# For checking permissions. to keep it simple all admin have ALL permissons
def has_perm(self, perm, obj=None):
return self.is_admin
# Does this user have permission to view this app? (ALWAYS YES FOR SIMPLICITY)
def has_module_perms(self, app_label):
return True
| [
"parasdabhi1996@gmail.com"
] | parasdabhi1996@gmail.com |
815fb3177d93f4c5b3da4d57786399655d7a5e2b | 493a36f1f8606c7ddce8fc7fe49ce4409faf80be | /.history/B073040023/client_20210614185342.py | 411412020365d07802e69305599262f66838a62f | [] | no_license | ZhangRRz/computer_network | f7c3b82e62920bc0881dff923895da8ae60fa653 | 077848a2191fdfe2516798829644c32eaeded11e | refs/heads/main | 2023-05-28T02:18:09.902165 | 2021-06-15T06:28:59 | 2021-06-15T06:28:59 | 376,568,344 | 0 | 0 | null | 2021-06-13T14:48:36 | 2021-06-13T14:48:36 | null | UTF-8 | Python | false | false | 5,078 | py | import socket
import threading
import tcppacket
import struct
from time import sleep
# socket.socket() will create a TCP socket (default)
# socket.socket(socket.AF_INET, socket.SOCK_STREAM) to explicitly define a TCP socket
sock = socket.socket(socket.AF_INET,socket.SOCK_DGRAM) # explicitly define a UDP socket
udp_host = '127.0.0.1' # Host IP
udp_port = 12345 # specified port to connect
def init_new_calc_req(msg):
sock = socket.socket(socket.AF_INET,socket.SOCK_DGRAM)
oldmsg = msg.encode('utf-8')
print(data)
tcp = tcppacket.TCPPacket(data=msg)
tcp.assemble_tcp_feilds()
sock.sendto(tcp.raw, (udp_host, udp_port))
# print("UDP target IP:", udp_host)
# print("UDP target Port:", udp_port) # Sending message to UDP server
while True:
data, address = sock.recvfrom(512*1024)
sock.connect(address)
s = struct.calcsize('!HHLLBBH')
unpackdata = struct.unpack('!HHLLBBH', data[:s])
msg = data[s:].decode('utf-8')
print(oldmsg,"is", msg)
if(unpackdata[5] % 2):
# fin_falg
fin_falg = 1
else:
fin_falg = 0
tcp = tcppacket.TCPPacket(
data="ACK".encode('utf-8'),
flags_ack=1,
flags_fin=fin_falg)
tcp.assemble_tcp_feilds()
print("ACK send to (IP,port):", address)
sock.sendto(tcp.raw, address)
if(fin_falg):
break
def init_new_videoreq_req(i):
sock = socket.socket(socket.AF_INET,socket.SOCK_DGRAM)
msg = "video 1".encode('utf-8')
# print("UDP target IP:", udp_host)
# print("UDP target Port:", udp_port)
tcp = tcppacket.TCPPacket(data=msg)
tcp.assemble_tcp_feilds()
sock.sendto(tcp.raw, (udp_host, udp_port)) # Sending message to UDP server
recvdata = b''
ack_seq = 0
seq = 0
counter = 0
while True:
data, address = sock.recvfrom(512*1024)
s = struct.calcsize('!HHLLBBHHH')
raw = struct.unpack('!HHLLBBHHH', data[:s])
print("receive packet from ", address,
"with header", raw)
if(raw[2] == ack_seq and raw[7] == 0):
recvdata += data[s:]
if(raw[5] % 2):
# fin_falg
fin_flag = 1
else:
fin_flag = 0
ack_seq += 1
counter += 1
else:
print("Receive ERROR packet from ", address)
fin_flag = 1
counter = 3
# --------------------------------------------
# send ACK
if(counter == 3):
tcp = tcppacket.TCPPacket(
data=str("ACK").encode('utf-8'),
seq=seq, ack_seq=ack_seq,
flags_ack=1,
flags_fin=fin_flag)
tcp.assemble_tcp_feilds()
print("ACK send to (IP,port):", address,
"with ack seq: ", ack_seq, " and seq: ", seq)
sock.sendto(tcp.raw, address)
if(not fin_flag):
counter = 0
seq += 1
# --------------------------------------------
print(fin_flag)
if(fin_flag):
break
savename = str(i+1)+"received.mp4"
f = open(savename, "wb")
f.write(recvdata)
f.close()
def init_new_dns_req(i):
# ---------------------
sock = socket.socket(socket.AF_INET,socket.SOCK_DGRAM)
oldmsg = msg = "dns google.com"
msg = msg.encode('utf-8')
tcp = tcppacket.TCPPacket(data=msg)
tcp.assemble_tcp_feilds()
sock.sendto(tcp.raw, (udp_host, udp_port))
# print("UDP target IP:", udp_host)
# print("UDP target Port:", udp_port)
while True:
data, address = sock.recvfrom(512*1024)
sock.connect(address)
s = struct.calcsize('!HHLLBBH')
unpackdata = struct.unpack('!HHLLBBH', data[:s])
msg = data[s:].decode('utf-8')
print(oldmsg,"is", msg)
if(unpackdata[5] % 2):
# fin_falg
fin_falg = 1
else:
fin_falg = 0
tcp = tcppacket.TCPPacket(
data="ACK".encode('utf-8'),
flags_ack=1,
flags_fin=fin_falg)
tcp.assemble_tcp_feilds()
print("ACK send to (IP,port):", address)
sock.sendto(tcp.raw, address)
if(fin_falg):
break
# ----------------------
# def init_new
threads = []
#Calculation
print("Demo calculation function")
init_new_calc_req("calc 2 + 6")
sleep(0.25)
init_new_calc_req("calc 2 - 6")
sleep(0.25)
init_new_calc_req("calc 2 * 6")
sleep(0.25)
init_new_calc_req("calc 2 / 6")
sleep(0.25)
init_new_calc_req("calc 2 ^ 6")
sleep(0.25)
init_new_calc_req("calc 16 sqrt")
sleep(0.25)
# threads.append(threading.Thread(target = init_new_calc_req, args = (i,)))
# threads[-1].start()
# for i in range(1):
# threads.append(threading.Thread(target = init_new_dns_req, args = (i,)))
# threads[-1].start()
# for i in range(1):
# threads.append(threading.Thread(target = init_new_videoreq_req, args = (i,)))
# threads[-1].start() | [
"tom95011@gmail.com"
] | tom95011@gmail.com |
ac61d410d9419c6949dc2e7bb0e4fd3b37e85afe | 2b7efe276d1dfdc70a4b5cd59ae863b7b7a1bd58 | /euler35.py | 24b79c529bb65377213bed68a3834c21df6f4544 | [] | no_license | mckkcm001/euler | 550bbd126e8d9bb5bc7cb854147399060f865cfc | 8cf1db345b05867d47921b01e8c7e4c2df4ee98d | refs/heads/master | 2021-01-01T17:43:28.799946 | 2017-11-07T02:17:34 | 2017-11-07T02:17:34 | 18,375,089 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 546 | py | import math
n = [2]
def is_prime(n):
if n % 2 == 0 and n > 2:
return False
for i in range(3, int(math.sqrt(n)) + 1, 2):
if n % i == 0:
return False
return True
def is_circ(n):
a = n
for i in range(len(str(n))):
a = 10**(len(str(a))-1)*(a%10)+ a//10
if not is_prime(a):
return False
return True
for i in range(3,1000000,2):
if i%10 == 0:
continue
if is_circ(i):
n.append(i)
print(len(n))
| [
"noreply@github.com"
] | mckkcm001.noreply@github.com |
b3a1fb9a0ba8e08ed80a035a97995f48e1306764 | 1bc1ddab0e65a65629edd19544bfadaab46c1b93 | /src/cnn_utils.py | 65993eeb2fb7b2504b1293c6002581d7c4a0150c | [
"MIT"
] | permissive | melanierbutler/cassava | 24e87fd4938721bb26d6859525a948758b3ac688 | 6b1feda4bc779025d3e8629d4d4966871e998460 | refs/heads/main | 2023-03-18T04:59:19.713003 | 2021-03-05T00:29:49 | 2021-03-05T00:29:49 | 334,276,395 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,354 | py | # import packages
import numpy as np
import os
import pandas as pd
import pathlib
import PIL
import PIL.Image
import matplotlib.pyplot as plt
from matplotlib import rcParams
from sklearn.utils import shuffle
import tensorflow as tf
from tensorflow.keras.preprocessing.image import ImageDataGenerator
def set_plot(size):
"""Sets style preferences and text sizes for matplotlib plots."""
rcParams['font.family'] = 'sans-serif'
rcParams['font.sans-serif'] = ['Arial']
rcParams['axes.grid']=False
rcParams['xtick.minor.visible']=True
rcParams['ytick.minor.visible']=True
rcParams['xtick.direction']='in'
rcParams['ytick.direction']='in'
plt.rc('axes', titlesize=size) # fontsize of the axes title
plt.rc('axes', labelsize=size) # fontsize of the x and y labels
plt.rc('xtick', labelsize=size*0.8) # fontsize of the tick labels
plt.rc('ytick', labelsize=size*0.8) # fontsize of the tick labels
def balance_df(df, label='label', class_size=1000):
"""Resamples data frame containing class labels so that every class has an equal class size.
Classes are sampled with replacement if they exceed the desired class size, and without
replacement if they do not.
Inputs
- df : name of dataframe containing sample data.
- label : name of column containing one-hot encoded class labels.
- class_size : desired size of each class after resampling.
Returns
balanced_df - a dataframe with balanced classes."""
balanced_df = pd.DataFrame()
n_classes = df[label].nunique()
for i in range(n_classes):
one_class = df[df[label] == i]
if len(one_class) >= 2000:
replace=False
else:
replace=True
idx = np.random.choice(df[df[label] == i].index, size=class_size, replace=replace)
temp = df.iloc[idx]
balanced_df = pd.concat([balanced_df, temp])
balanced_df = balanced_df.sample(frac=1).reset_index().rename(columns={'index':'old_index'})
return balanced_df
def plot_batch(dfiterator, label_key=None, cutmix=False):
"""Plots the next batch of images and labels in a keras dataframe iterator.
Inputs:
-dfiterator: keras dataframe iterator
-label_key: series or dictionary such that label_key[image_label] returns a class string.
-cutmix: whether or not the generator is a cutmix generator."""
bs = dfiterator.batch_size
images, labels = next(dfiterator)
if cutmix:
cols = 3
else:
cols = 4
rows = int(bs / cols) + int(bs % cols > 0)
fig, axes = plt.subplots(rows, cols, figsize=(16, 5 * rows))
axes = axes.flatten()
for i, (img, label) in enumerate(zip(images, labels)):
axes[i].imshow(img)
axes[i].axis('off')
if label_key is not None:
if cutmix:
max_class = np.argsort(label)[-1]
second_class = np.argsort(label)[-2]
max_pct = round(label[max_class] * 100)
second_pct = round(label[second_class] * 100)
title = label_key[max_class]+ ' / ' + str(max_pct) + '%\n' + label_key[second_class] + ' / ' + str(second_pct) + '%'
else:
title = label_key[np.argmax(label)]
axes[i].set_title(title) | [
"butler.464@gmail.com"
] | butler.464@gmail.com |
bf357e5ecbd0d62ab712fa96cf653e5384b55992 | 368370e5539e3799fa94eacd51a53b0cbda06f80 | /model_factory/models/MolWeightRegressor/model.py | 0666bb3946745ab3215e8b20598c6803bcb7b440 | [] | no_license | ytbai/chem_transformer | 6700fa3ee09fa5705ec14185bcc33a5bb796200f | aab9161df9cab50c5440e95bfa27e72e52cbb4d6 | refs/heads/master | 2022-11-27T21:07:06.268138 | 2020-08-06T06:00:09 | 2020-08-06T06:00:09 | 282,987,698 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 318 | py | import torch
import numpy as np
from torch import nn
from model_factory.modules import *
class MolWeightRegressor(nn.Module):
def __init__(self):
super().__init__()
self.d_model = 64
self.n_cxt = 97
self.head = RegressorHead(self.d_model, self.n_cxt)
def forward(self, x):
return self.head(x) | [
"ytbai@princeton.edu"
] | ytbai@princeton.edu |
320aa009bc8015194f321089be13615ebf99be42 | 8b83d79425985e9c87ff4b641c2dcb6a151f3aa1 | /recipes/templatetags/markdown.py | 75cbd8e9f44234b8d253b147c3548fd001844065 | [] | no_license | akx/pyttipannu | e1366d982bae62a70da24b7da1a93c40efb51217 | 7b02f7d18d594beddb64beb99283c738ca06b8f0 | refs/heads/master | 2021-01-13T09:15:59.086825 | 2016-09-29T13:52:17 | 2016-09-29T13:52:17 | 69,023,828 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 287 | py | from django.template import Library
from django.utils.encoding import force_text
from django.utils.safestring import mark_safe
from markdown import markdown as render_markdown
register = Library()
@register.filter
def markdown(s):
return mark_safe(render_markdown(force_text(s)))
| [
"akx@iki.fi"
] | akx@iki.fi |
1e755eb994acd901dde6244445fe1f9da6f63909 | ceeb00e3bde18c2caccdf48a6b3be2f2f08c6602 | /To print inverted half pyramid using characters.py | 63bb2aff8076fb07f9ee3526e8d59ce444efb31b | [] | no_license | RAHULSHARMA63/To-print-inverted-half-pyramid-using-characters | a2ae1ffe7fb181d1a632d5d16c405437e3ffb35b | 0f1ea65787960d51ac018ef943cdb37baf8f9656 | refs/heads/main | 2023-08-15T18:50:41.693654 | 2021-10-20T13:12:01 | 2021-10-20T13:12:01 | 419,335,443 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 155 | py | rows = int(input("Enter number of rows: "))
for i in range(rows, 0, -1):
for j in range(0, i):
print("* ", end=" ")
print("\n") | [
"noreply@github.com"
] | RAHULSHARMA63.noreply@github.com |
dfa802d2eab75f6143932b9db16d2742cd829829 | 84ee74894d1e6d76281dd1d3b76ee1dcde0d36b5 | /plotting/visualisePhi.py | 1c7e4c4f7e5da898e37f21b143394c229a9fa1a5 | [] | no_license | pyccel/pygyro | e3f13e5679b37a2dfebbd4b10337e6adefea1105 | a8562e3f0dd8fd56159785e655f017bbcae92e51 | refs/heads/master | 2023-03-10T07:43:17.663359 | 2022-08-17T12:06:25 | 2022-08-17T12:06:25 | 170,837,738 | 4 | 3 | null | 2023-01-02T10:09:08 | 2019-02-15T09:27:22 | Python | UTF-8 | Python | false | false | 2,652 | py | import argparse
from mpi4py import MPI
import numpy as np
from pygyro.model.grid import Grid
from pygyro.model.layout import LayoutSwapper, getLayoutHandler
from pygyro.poisson.poisson_solver import DensityFinder, QuasiNeutralitySolver
from pygyro.utilities.grid_plotter import SlicePlotterNd
from pygyro.initialisation.setups import setupCylindricalGrid
from pygyro.diagnostics.norms import l2
parser = argparse.ArgumentParser(
description='Plot the intial electric potential')
parser.add_argument('const_filename', type=str,
help='The constants file describing the setup')
args = parser.parse_args()
comm = MPI.COMM_WORLD
rank = comm.Get_rank()
distribFunc, constants, t = setupCylindricalGrid(constantFile=args.const_filename,
layout='v_parallel',
comm=comm,
allocateSaveMemory=True)
nprocs = distribFunc.getLayout(distribFunc.currentLayout).nprocs[:2]
layout_poisson = {'v_parallel_2d': [0, 2, 1],
'mode_solve': [1, 2, 0]}
layout_vpar = {'v_parallel_1d': [0, 2, 1]}
layout_poloidal = {'poloidal': [2, 1, 0]}
remapperPhi = LayoutSwapper(comm, [layout_poisson, layout_vpar, layout_poloidal],
[nprocs, nprocs[0], nprocs[1]
], distribFunc.eta_grid[:3],
'mode_solve')
remapperRho = getLayoutHandler(
comm, layout_poisson, nprocs, distribFunc.eta_grid[:3])
phi = Grid(distribFunc.eta_grid[:3], distribFunc.getSpline(slice(0, 3)),
remapperPhi, 'mode_solve', comm, dtype=np.complex128)
rho = Grid(distribFunc.eta_grid[:3], distribFunc.getSpline(slice(0, 3)),
remapperRho, 'v_parallel_2d', comm, dtype=np.complex128)
density = DensityFinder(6, distribFunc.getSpline(3),
distribFunc.eta_grid, constants)
QNSolver = QuasiNeutralitySolver(distribFunc.eta_grid[:3], 7, distribFunc.getSpline(0),
constants, chi=0)
distribFunc.setLayout('v_parallel')
density.getPerturbedRho(distribFunc, rho)
QNSolver.getModes(rho)
rho.setLayout('mode_solve')
phi.setLayout('mode_solve')
QNSolver.solveEquation(phi, rho)
phi.setLayout('v_parallel_2d')
rho.setLayout('v_parallel_2d')
QNSolver.findPotential(phi)
norm = l2(distribFunc.eta_grid, remapperPhi.getLayout('v_parallel_2d'))
val = norm.l2NormSquared(phi)
print(val)
plotter = SlicePlotterNd(phi, 0, 1, True, sliderDimensions=[
2], sliderNames=['z'])
if (rank == 0):
plotter.show()
else:
plotter.calculation_complete()
| [
"noreply@github.com"
] | pyccel.noreply@github.com |
0f59ddf53e19bb9c1f3b0b8ef1a3e04546cc89e4 | 52b5773617a1b972a905de4d692540d26ff74926 | /.history/brackets_20200810105706.py | e35d818bc5c2a83d99fa7e410edda4e403b93436 | [] | no_license | MaryanneNjeri/pythonModules | 56f54bf098ae58ea069bf33f11ae94fa8eedcabc | f4e56b1e4dda2349267af634a46f6b9df6686020 | refs/heads/master | 2022-12-16T02:59:19.896129 | 2020-09-11T12:05:22 | 2020-09-11T12:05:22 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 205 | py | def brackets(S):
# "{[()()]}"
stack = []
for i in S:
stack.append(i)
for i in S:
if i == "(" and stack.pop()
print(brackets("{[()()]}"))
| [
"mary.jereh@gmail.com"
] | mary.jereh@gmail.com |
ce8fbb9544928f2604a8b3505be8f46ed4db75c8 | 29e07a75504ed71a3706b77db06c75565ec90acc | /app/utils/client_session.py | 588dda291ad3ed41e5ec63ecc099ca7f8cbc129d | [] | no_license | adilamirov/cheap-flights | 423bdd450fea4661f1b224df2f4c2bb99a34b341 | f8606a370789b24a1028168bf151f090bde29599 | refs/heads/master | 2022-11-28T09:44:08.666557 | 2020-08-06T07:16:34 | 2020-08-06T07:16:34 | 285,501,388 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 482 | py | import logging
from aiohttp import ClientSession
from aiohttp.abc import Application
log = logging.getLogger(__name__)
async def setup_client_session(app: Application):
log.info('Creating aiohttp.ClientSession')
app['client_session'] = ClientSession(
headers={'Accept': 'application/json'}
)
try:
yield
finally:
log.info('Closing ClientSession')
await app['client_session'].close()
log.info('ClientSession closed')
| [
"adil.e.amirov@ya.ru"
] | adil.e.amirov@ya.ru |
f764ad8a7ee26405601aec311bef1e00d846245e | 2fa492f92b222ddaa42140ddafffae0cd55b83d5 | /src/extensions/quran/__init__.py | a4b74d35083eaca0fa083525dc2afa499b9a0040 | [
"MIT"
] | permissive | Durkastan/durkabot | 09cec082f1239f1ab13b3b1a2c3d89e0feb088f1 | 4ada75626952bf66376d96c254ca0e9ada9d33cc | refs/heads/development | 2021-10-25T10:15:22.084376 | 2019-01-03T10:42:19 | 2019-01-03T10:42:19 | 136,093,659 | 3 | 2 | MIT | 2019-02-25T02:00:24 | 2018-06-04T23:27:44 | HTML | UTF-8 | Python | false | false | 87 | py | from extensions.quran.quran import Quran
def setup(bot):
bot.add_cog(Quran(bot))
| [
"40224433+martomato@users.noreply.github.com"
] | 40224433+martomato@users.noreply.github.com |
0b6f64806cc4dba27b669fa678ab67215c794eaa | c645e0340f9674a9140110e3cef726e4e1087c0a | /Case/chart/migrations/0003_auto_20181223_0241.py | 31669ddaabb454b08f8d7b0af7677a7dc2573143 | [] | no_license | blu3cat3803/alcohol-density-detector | 858480a29bb6f30e7e8863e6963c357056c0f778 | fe61dea16734b39823d9f1337701e25163b4c029 | refs/heads/master | 2020-04-16T10:09:57.155566 | 2019-01-19T21:16:02 | 2019-01-19T21:16:02 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 468 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.16 on 2018-12-23 02:41
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('chart', '0002_auto_20181222_1000'),
]
operations = [
migrations.AlterField(
model_name='alcohol_1',
name='v',
field=models.DecimalField(decimal_places=1, max_digits=5),
),
]
| [
"noreply@github.com"
] | blu3cat3803.noreply@github.com |
5328be94b6b7b5d34270b3276badb49bfb04b4f1 | d886f41ac037343b6b9652977f753808117e6246 | /Behaviors/FK_Relative_Reverse_01.py | 0a1594dbcc59fe2ead208d9d03c6eabe281422a2 | [] | no_license | TPayneExperience/TrevorPaynes_RigAndAnimSuite | 5e918be2de896fdacf2da039815e85b91cf0d7ed | 18e0482ca6d70277b6455d9a14e6b10406f1553f | refs/heads/master | 2023-09-03T04:14:48.862905 | 2021-11-10T02:50:54 | 2021-11-10T02:50:54 | 275,663,064 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,254 | py |
import pymel.core as pm
import Abstracts.Abstract_Behavior as absBhv
import Utilities.Rig_Utilities as rigUtil
import Utilities.Logger as log
class FK_Relative_01(absBhv.Abstract_Behavior):
bhvType = 'FK Relative Reverse'
validLimbTypes = (4,) # rigData.LIMB_TYPES
groupType = 'FKR' # LookAt, IKPV...
groupShape = 'Cube_Poly'
groupCount = 1
groupMoveable = False # for moving control pivots
uiOrderIndex = 250
usesJointControls = False
usesLimbControls = True
bakeLosesData = True
duplicateJointGroups = False
def InitLimb(self, limb):
log.funcFileDebug()
limbGroup = rigUtil.GetLimbGroups(limb, self.groupType)[0]
jointGroups = pm.listConnections(limb.jointGroups)
jointGroup = rigUtil.SortGroups(jointGroups)[-1]
joint = pm.listConnections(jointGroup.joint)[0]
pm.parent(limbGroup, joint)
rigUtil.ResetAttrs(limbGroup)
pm.parent(limbGroup, limb)
def CleanupLimb(self, limb):
log.funcFileDebug()
#============= FOR BEHAVIOR OPERATION ============================
def Setup_ForBhvOp(self, limb):
pass
def Teardown_ForBhvOp(self, limb):
pass
#============= SETUP ============================
def Setup_Rig_Controls(self, limb):
log.funcFileDebug()
limbGroup = rigUtil.GetLimbGroups(limb, self.groupType)[0]
limbControl = pm.listConnections(limbGroup.control)[0]
jointGroups = pm.listConnections(limb.jointGroups)
jointGroups = rigUtil.SortGroups(jointGroups)[::-1]
controls = []
# Parent control hierarchy
for i in range(len(jointGroups)-1):
childGroup = jointGroups[i+1]
parentCtr = pm.listConnections(jointGroups[i].control)[0]
pm.parent(childGroup, parentCtr)
controls.append(parentCtr)
# Parent Root Joint group to Control
childGroup = jointGroups[0]
pm.parentConstraint(limbControl, childGroup, mo=1)
# Bind rotations
multNode = pm.createNode('multiplyDivide')
pm.connectAttr(limbControl.rotate, multNode.input1)
scalar = 1.0/max(len(controls)-2, 1)
multNode.input2.set(scalar, scalar, scalar)
for childControl in controls[1:]:
pm.connectAttr(multNode.output, childControl.rotate)
# External
parentControl = rigUtil.GetParentControl(limb)
if parentControl:
pm.parentConstraint(parentControl, limbGroup, mo=1)
def Setup_Constraint_JointsToControls(self, limb):
log.funcFileDebug()
for group in pm.listConnections(limb.jointGroups):
joint = pm.listConnections(group.joint)[0]
control = pm.listConnections(group.control)[0]
pm.parentConstraint(control, joint, mo=1)
def Setup_Constraint_ControlsToXforms(self, limb,
xforms, hasPosCst, hasRotCst, hasScaleCst):
log.funcFileDebug()
limbGroup = rigUtil.GetLimbGroups(limb, self.groupType)[0]
limbControl = pm.listConnections(limbGroup.control)[0]
xform = xforms[-1]
if hasPosCst:
pm.pointConstraint(xform, limbControl, mo=1)
if hasRotCst:
pm.orientConstraint(xform, limbControl, mo=1)
if hasScaleCst:
pm.scaleConstraint(xform, limbControl)
return [limbControl]
#============= TEARDOWN ============================
def Teardown_Rig_Controls(self, limb):
log.funcFileDebug()
limbGroup = rigUtil.GetLimbGroups(limb, self.groupType)[0]
limbControl = pm.listConnections(limbGroup.control)[0]
conversionNode = pm.listConnections(limbControl.r)[0]
multNodes = pm.listConnections(conversionNode.output)
pm.delete(multNodes) # delete mult node
groups = pm.listConnections(limb.jointGroups)
groups = rigUtil.SortGroups(groups)[:-1]
pm.parent(groups, limb)
if pm.listConnections(limb.limbParent):
group = rigUtil.GetLimbGroups(limb, self.groupType)[0]
cst = pm.listRelatives(group, c=1, type='parentConstraint')
pm.delete(cst)
def Teardown_Constraint_JointsToControls(self, limb):
log.funcFileDebug()
jointGroups = pm.listConnections(limb.jointGroups)
joints = [pm.listConnections(g.joint)[0] for g in jointGroups]
for joint in joints:
cst = pm.listRelatives(joint, c=1, type='parentConstraint')
pm.delete(cst)
def Teardown_Constraint_ControlsToXforms(self, limb):
log.funcFileDebug()
group = rigUtil.GetLimbGroups(limb, self.groupType)[0]
control = pm.listConnections(group.control)[0]
pm.delete(pm.listRelatives(control, c=1, type='constraint'))
#============= EDITABLE UI ============================
def Setup_Behavior_Limb_UI(self, limb):
log.funcFileDebug()
return False
#============= ANIMATION UI ============================
def Setup_AnimationTools_Limb_UI(self, limb):
return False # return if UI is enabled
# Copyright (c) 2021 Trevor Payne
# See user license in "PayneFreeRigSuite\Data\LicenseAgreement.txt"
| [
"crashandexplode@hotmail.com"
] | crashandexplode@hotmail.com |
10bf94250ae78f7e23d7e6bd2890662625883c6b | 555002c30895a1e2267d05d67d5167275ade3845 | /server/server.py | d2f825a62b33cfc1b7403d77eceaecff86615fcd | [] | no_license | odbite/jkpghack2016 | 159b2938fd8ab7a2a815c664a38c791f2fb440ec | 8b4f5b3ec555f3436f764c2b49927c200ff335a4 | refs/heads/master | 2021-01-10T05:52:52.600618 | 2016-02-27T17:41:07 | 2016-02-27T17:41:07 | 52,673,289 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 559 | py | from animals import AnimalApi
from flask import Flask, render_template
from flask_restful import Api
import os
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
static_folder = os.path.join(BASE_DIR, 'client', 'app', 'dist')
print(static_folder)
app = Flask(__name__, template_folder='../client/app', static_path='/static', static_folder=static_folder)
api = Api(app)
api.add_resource(AnimalApi, '/api/animals')
@app.route("/")
def hello():
return render_template('index.html')
if __name__ == '__main__':
app.run(debug=True)
| [
"draso.odin@gmail.com"
] | draso.odin@gmail.com |
720fad10c8eeb20b6e8a660dadb600d8bb28608d | 0e8b852655b8e1a6f3124da8abb1108fed1efe16 | /getaudio.py | 7287c3580b89ab9a391771d85c9a95a1c0465d8a | [] | no_license | Tusharmaa/AccentRecognitionSystem | f56e0feb077b228f29fac97fcba1ee01c0fc6c95 | 943e02aa6c273e78a0bfff67cb5efca4d5d226b9 | refs/heads/master | 2022-11-06T08:13:25.244773 | 2020-06-23T18:22:03 | 2020-06-23T18:22:03 | 274,477,200 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,202 | py | import pandas as pd
import urllib.request
import os
import sys
from pydub import AudioSegment
class GetAudio:
def __init__(self, csv_filepath, destination_folder= 'audio/', wait= 1.5, debug=False ):
'''
Initializes GetAudio class object
:param destination_folder (str): Folder where audio files will be saved
:param wait (float): Length (in seconds) between web requests
:param debug (bool): Outputs status indicators to console when True
'''
self.csv_filepath = csv_filepath
self.audio_df = pd.read_csv(csv_filepath)
self.url = 'http://chnm.gmu.edu/accent/soundtracks/{}.mp3'
self.destination_folder = destination_folder
self.wait = wait
self.debug = False
def check_path(self):
'''
Checks if self.distination_folder exists. If not, a folder called self.destination_folder is created
'''
if not os.path.exists(self.destination_folder):
if self.debug:
print('{} does not exist, creating'.format(self.destination_folder))
os.makedirs('../' + self.destination_folder)
def get_audio(self):
'''
Retrieves all audio files from 'language_num' column of self.audio_df
If audio file already exists, move on to the next
:return (int): Number of audio files downloaded
'''
self.check_path()
counter = 0
for lang_num in self.audio_df['language_num']:
if not os.path.exists(self.destination_folder +'{}.wav'.format(lang_num)):
if self.debug:
print('downloading {}'.format(lang_num))
(filename, headers) = urllib.request.urlretrieve(self.url.format(lang_num))
sound = AudioSegment.from_mp3(filename)
sound.export( self.destination_folder + "{}.wav".format(lang_num), format="wav")
counter += 1
return counter
if __name__ == '__main__':
'''
Example console command
python GetAudio.py audio_metadata.csv
'''
# csv_file = sys.argv[1]
csv_file = 'bio_data.csv'
ga = GetAudio(csv_filepath=csv_file)
ga.get_audio()
| [
"saytotushar@gmail.com"
] | saytotushar@gmail.com |
340f9e8061a4b226c9e54dd46cb4c72dbd1febcc | 3e980e6caa46b055380dcb9e2c779f77265059ad | /datagenerator.py | 12da171b2a3894e95f9bf3c38f7a74eab1d78272 | [] | no_license | rouyunpan/ObjectRecognitionWithTensorflow | 3c51c62cd8c74ae73727a1cfba04191eeb4c0e9b | 18ee3b9b7378e53b66d9da3091f0597d4174bf79 | refs/heads/master | 2021-01-06T20:39:45.991482 | 2017-08-07T07:55:44 | 2017-08-07T07:55:44 | 99,539,181 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,932 | py | """Containes a helper class for image input pipelines in tensorflow."""
import tensorflow as tf
import numpy as np
from tensorflow.contrib.data import Dataset
from tensorflow.python.framework import dtypes
from tensorflow.python.framework.ops import convert_to_tensor
VGG_MEAN = tf.constant([123.68, 116.779, 103.939], dtype=tf.float32)
class ImageDataGenerator(object):
"""Wrapper class around the new Tensorflows dataset pipeline.
Requires Tensorflow >= version 1.12rc0
"""
def __init__(self, txt_file, mode, batch_size, num_classes, shuffle=True,
buffer_size=1000):
"""Create a new ImageDataGenerator.
Recieves a path string to a text file, which consists of many lines,
where each line has first a path string to an image and seperated by
a space an integer, referring to the class number. Using this data,
this class will create TensrFlow datasets, that can be used to train
e.g. a convolutional neural network.
Args:
txt_file: Path to the text file.
mode: Either 'training' or 'validation'. Depending on this value,
different parsing functions will be used.
batch_size: Number of images per batch.
num_classes: Number of classes in the dataset.
shuffle: Wether or not to shuffle the data in the dataset and the
initial file list.
buffer_size: Number of images used as buffer for TensorFlows
shuffling of the dataset.
Raises:
ValueError: If an invalid mode is passed.
"""
self.txt_file = txt_file
self.num_classes = num_classes
# retrieve the data from the text file
self._read_txt_file()
# number of samples in the dataset
self.data_size = len(self.labels)
# initial shuffling of the file and label lists (together!)
if shuffle:
self._shuffle_lists()
# convert lists to TF tensor
self.img_paths = convert_to_tensor(self.img_paths, dtype=dtypes.string)
self.labels = convert_to_tensor(self.labels, dtype=dtypes.int32)
# create dataset
data = Dataset.from_tensor_slices((self.img_paths, self.labels))
# distinguish between train/infer. when calling the parsing functions
if mode == 'training':
data = data.map(self._parse_function_train)
elif mode == 'inference':
data = data.map(self._parse_function_inference)
else:
raise ValueError("Invalid mode '%s'." % (mode))
# shuffle the first `buffer_size` elements of the dataset
if shuffle:
data = data.shuffle(buffer_size=buffer_size)
# create a new dataset with batches of images
data = data.batch(batch_size)
self.data = data
def _read_txt_file(self):
"""Read the content of the text file and store it into lists."""
self.img_paths = []
self.labels = []
with open(self.txt_file, 'r') as f:
lines = f.readlines()
for line in lines:
items = line.split(' ')
self.img_paths.append(items[0])
self.labels.append(int(items[1]))
def _shuffle_lists(self):
"""Conjoined shuffling of the list of paths and labels."""
path = self.img_paths
labels = self.labels
permutation = np.random.permutation(self.data_size)
self.img_paths = []
self.labels = []
for i in permutation:
self.img_paths.append(path[i])
self.labels.append(labels[i])
def _parse_function_train(self, filename, label):
"""Input parser for samples of the training set."""
# convert label number into one-hot-encoding
one_hot = tf.one_hot(label, self.num_classes)
# load and preprocess the image
img_string = tf.read_file(filename)
img_decoded = tf.image.decode_png(img_string, channels=3)
img_resized = tf.image.resize_images(img_decoded, [227, 227])
"""
Dataaugmentation comes here.
"""
img_centered = tf.subtract(img_resized, VGG_MEAN)
# RGB -> BGR
img_bgr = img_centered[:, :, ::-1]
return img_bgr, one_hot
def _parse_function_inference(self, filename, label):
"""Input parser for samples of the validation/test set."""
# convert label number into one-hot-encoding
one_hot = tf.one_hot(label, self.num_classes)
# load and preprocess the image
img_string = tf.read_file(filename)
img_decoded = tf.image.decode_png(img_string, channels=3)
img_resized = tf.image.resize_images(img_decoded, [227, 227])
img_centered = tf.subtract(img_resized, VGG_MEAN)
# RGB -> BGR
img_bgr = img_centered[:, :, ::-1]
return img_bgr, one_hot
| [
"rouyun.pan@gmail.com"
] | rouyun.pan@gmail.com |
c7fd4d7c7e6fcf220651884afc5c13fafcf92ac4 | c98e146ba8231e4151d2585dc2b396621358a21f | /colheritage/homepage/cached_templates/templates/rentals.html.py | 04fb904051f06fb599a54b626d81148db26a6838 | [] | no_license | cjpwrs/Colheritage | 1ea9c024ce7f8f6f381e5d41d9cef22d34069a2e | 81be5869e21a9ec66e580b946db393b070d0993d | refs/heads/master | 2021-01-15T23:35:29.698690 | 2015-04-06T20:44:05 | 2015-04-06T20:44:05 | 33,500,785 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,792 | py | # -*- coding:ascii -*-
from mako import runtime, filters, cache
UNDEFINED = runtime.UNDEFINED
__M_dict_builtin = dict
__M_locals_builtin = locals
_magic_number = 10
_modified_time = 1428206939.167545
_enable_loop = True
_template_filename = '/Users/cjpowers/colheritage/homepage/templates/rentals.html'
_template_uri = 'rentals.html'
_source_encoding = 'ascii'
import os, os.path, re
_exports = ['content']
def _mako_get_namespace(context, name):
try:
return context.namespaces[(__name__, name)]
except KeyError:
_mako_generate_namespaces(context)
return context.namespaces[(__name__, name)]
def _mako_generate_namespaces(context):
pass
def _mako_inherit(template, context):
_mako_generate_namespaces(context)
return runtime._inherit_from(context, 'base.htm', _template_uri)
def render_body(context,**pageargs):
__M_caller = context.caller_stack._push_frame()
try:
__M_locals = __M_dict_builtin(pageargs=pageargs)
rental = context.get('rental', UNDEFINED)
def content():
return render_content(context._locals(__M_locals))
__M_writer = context.writer()
__M_writer('\n\n')
if 'parent' not in context._data or not hasattr(context._data['parent'], 'content'):
context['self'].content(**pageargs)
return ''
finally:
context.caller_stack._pop_frame()
def render_content(context,**pageargs):
__M_caller = context.caller_stack._push_frame()
try:
rental = context.get('rental', UNDEFINED)
def content():
return render_content(context)
__M_writer = context.writer()
__M_writer('\n\t\n\t<div class="clearfix"></div>\n\t<div class="text-right">\n\t\t<a href="/homepage/rentals.create/" class="btn btn-primary">Create New Event </a>\n\t</div>\n\n\t<table class="table table-striped table-bordered">\n\t\t\t<tr>\n\t\t\t\t<th>ID</th>\n\t\t\t\t<th>Date Out</th>\n\t\t\t\t<th>Date Due</th>\n\t\t\t\t<th>Date In</th>\n\t\t\t\t<th>Discount Percent</th>\n\t\t\t\t<th>Rental Product</th>\n <th>Renter</th>\n <th>Actions</th>\n\t\t\t</tr>\n')
for Rented_Item in rental:
__M_writer('\t\t\t<tr>\n\t\t\t\t<td>')
__M_writer(str( Rented_Item.id ))
__M_writer('</td>\n\t\t\t\t<td>')
__M_writer(str( Rented_Item.date_out ))
__M_writer('</td>\n\t\t\t\t<td>')
__M_writer(str( Rented_Item.date_due ))
__M_writer('</td>\n\t\t\t\t<td>')
__M_writer(str( Rented_Item.date_in ))
__M_writer('</td>\n\t\t\t\t<td>')
__M_writer(str( Rented_Item.discount_percent ))
__M_writer('</td>\n <td>')
__M_writer(str( Rented_Item.rental_product ))
__M_writer('</td>\n <td>')
__M_writer(str( Rented_Item.renter ))
__M_writer('</td>\n\t\t\t\t<td><a href="/homepage/rentals.rentalreturn/')
__M_writer(str( Rented_Item.id ))
__M_writer('/" class="btn btn-xs btn-default">Return </a>\n <a href="/homepage/rentals.damagefee/" class="btn btn-xs btn-default">Damage Fee </a>\n </td>\n\n\t\t\t</tr>\n')
__M_writer('\t</table>\n\n')
return ''
finally:
context.caller_stack._pop_frame()
"""
__M_BEGIN_METADATA
{"source_encoding": "ascii", "uri": "rentals.html", "line_map": {"64": 27, "65": 28, "66": 28, "67": 29, "68": 29, "69": 30, "70": 30, "71": 36, "77": 71, "27": 0, "35": 1, "45": 3, "52": 3, "53": 21, "54": 22, "55": 23, "56": 23, "57": 24, "58": 24, "59": 25, "60": 25, "61": 26, "62": 26, "63": 27}, "filename": "/Users/cjpowers/colheritage/homepage/templates/rentals.html"}
__M_END_METADATA
"""
| [
"cjpwrs@gmail.com"
] | cjpwrs@gmail.com |
13bdc405645b0b39b6eddec012b2abcc8c01a3de | 78dc15505e17cef3e49410bbadc1bb4812cdbbad | /foiamachine/local/apps/users/utils.py | 824afac2280c58b95134ab546eea5bc2cd47db3c | [
"MIT"
] | permissive | jgillum/foiamachine | 4a7e4ef9fec681341c014dbe7c98bbce79debb4e | 26d3b02870227696cdaab639c39d47b2a7a42ae5 | refs/heads/master | 2020-06-29T11:19:46.232758 | 2019-08-19T02:27:45 | 2019-08-19T02:27:45 | 200,519,075 | 3 | 1 | null | 2019-08-04T16:57:27 | 2019-08-04T16:57:27 | null | UTF-8 | Python | false | false | 54 | py | /home/foiamachine/repo/foiamachine/apps/users/utils.py | [
"jgillum@gmail.com"
] | jgillum@gmail.com |
92ee837f3a588f235c1b667af3f4a6641030358f | 33e4a85dd6a6fbd1fb8cc999a6c03b8a676a2009 | /HW_10.py | 27cf29377e3d604d51641dbb871958d67f2e4dde | [] | no_license | skavya90/AdvancedData | e6c0f1f57b91503cb100f5e1a8a0ba9d4df50a00 | 2d1dbe3d1b8d48b1bbaa793688b6ccc6b6aeddb5 | refs/heads/master | 2020-05-14T09:06:20.408484 | 2019-04-20T19:00:20 | 2019-04-20T19:00:20 | 181,734,162 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,436 | py | # -*- coding: utf-8 -*-
"""
Created on Fri Apr 19 18:55:53 2019
@author: skavy
"""
import numpy as np
from sqlalchemy.ext.automap import automap_base
from sqlalchemy.orm import Session
from sqlalchemy import create_engine, func,and_
from flask import Flask, jsonify
import datetime as dt
#################################################
# Database Setup
#################################################
engine = create_engine("sqlite:///C:/Users/skavy/Desktop/Bootcamp/AdvancedData/hawaii.sqlite",connect_args={'check_same_thread': False})
# reflect an existing database into a new model
Base = automap_base()
# reflect the tables
Base.prepare(engine, reflect=True)
# Save reference to the table
Measurement = Base.classes.measurement
# Create our session (link) from Python to the DB
session = Session(engine)
#################################################
# Flask Setup
#################################################
app = Flask(__name__)
#################################################
# Flask Routes
#################################################
@app.route("/")
def Home():
"""List all available api routes."""
return (
f"Welcome to the Home Page<br/>"
f"Available Routes:<br/>"
f"/api/v1.0/precipitation<br/>"
f"/api/v1.0/stations<br/>"
f"/api/v1.0/tobs<br/>"
f"/api/v1.0/<start><br/>"
f"/api/v1.0/<start>/<end>"
)
#####################################################
@app.route("/api/v1.0/precipitation")
def precipitation():
last_entry=session.query(Measurement.date).order_by(Measurement.date.desc()).first()
last_entry = last_entry[0]
last_year = dt.datetime.strptime(last_entry, '%Y-%m-%d') - dt.timedelta(days=365)
"""Convert the query results to a Dictionary using date as the key and prcp as the value."""
prcp_result = session.query(Measurement.date,Measurement.prcp).\
filter(Measurement.date >= last_year).\
order_by(Measurement.date).all()
p_dict=dict(prcp_result)
"""Return the JSON representation of your dictionary."""
return jsonify(p_dict)
#####################################################
@app.route("/api/v1.0/stations")
def stations():
"""Return a JSON list of stations from the dataset."""
stations_result = session.query(Measurement.station).group_by(Measurement.station).all()
s_list = list(np.ravel(stations_result))
return jsonify(s_list)
#####################################################
@app.route("/api/v1.0/tobs")
def tobs():
last_entry=session.query(Measurement.date).order_by(Measurement.date.desc()).first()
last_entry = last_entry[0]
last_year = dt.datetime.strptime(last_entry, '%Y-%m-%d') - dt.timedelta(days=365)
"""query for the dates and temperature observations from a year from the last data point."""
tobs_result = session.query(Measurement.date, Measurement.tobs).\
filter(Measurement.date >= last_year).order_by(Measurement.date).all()
t_list=list(np.ravel(tobs_result))
"""Return a JSON list of Temperature Observations (tobs) for the previous year."""
return jsonify(t_list)
#####################################################
@app.route("/api/v1.0/<start>")
def start_date(start):
"""Return a JSON list of the minimum temperature, the average temperature, and the max temperature for a given start or start-end range."""
"""When given the start only, calculate TMIN, TAVG, and TMAX for all dates greater than and equal to the start date."""
strt_date = session.query(func.min(Measurement.tobs), \
func.max(Measurement.tobs),\
func.avg(Measurement.tobs)).\
filter(Measurement.date >= start).all()
return jsonify(strt_date)
#####################################################
@app.route("/api/v1.0/<start>/<end>")
def start_end(start,end):
strtend_date = session.query(func.min(Measurement.tobs), \
func.max(Measurement.tobs),\
func.avg(Measurement.tobs)).\
filter(and_(Measurement.date >= start, Measurement.date <= end)).all()
return jsonify(strtend_date)
#####################################################
if __name__ == '__main__':
app.run(debug=False)
| [
"skavya90@gmail.com"
] | skavya90@gmail.com |
c3f065c3d818245096ad71f216b0edd590b52ae2 | 97c4d3d0509897c8a3591cd340dc2b77c4eeba77 | /5.18/03_Operator/operator1.py | 947a91e1d294e63c378dc59eb73880c7630b7ac3 | [] | no_license | asterinwl/2021-K-Digital-Training_selfstudy | a056f3e7fa92c6b51914171d5171f05fd13fd9ec | cd9c136ddba5df4be6ae2ceb447ee6bb8d5d53bc | refs/heads/master | 2023-08-29T02:18:13.234499 | 2021-10-09T15:21:55 | 2021-10-09T15:21:55 | 373,746,421 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,363 | py | #문제.10000초는 몇 시간 몇 분 몇 초 인가?
s=10000
s=10000
#m = s//60
#r = s%60
#h = m//60
#print("%d시 %d분 %d초" % (m,r,h) )
#산술연산자 우선순위:
#()
#지수**
#곱셈,나눗셈,나머지,몫
#덧셈,뺄셈
#산술연산자:+,-,*,/,//(몫),%(나머지),^
#할당연산자:=
#대입연산자:+=,-=,*=,/=,//=,%=,**=
#a=100
#a=a+10
#a=a+10 # a += 10 과 같음
#a=a+10 #값을 계속 연속해서 더하는 것=값을 누적해가면서 더하는 것
#b-= 10 #b=b-10
#c*=100 #c=c*100
#d/=10 #d=d/10
#e**=3 #e=e**3
#관계연산자:>,<,>=,<=,==,!= 결과값이 참 혹은 거짓
'''
100>3 #True
a=100
b=1001
a>b
print(a>b)
#논리연산자 and,or,not(항이 하나)
print(a>b and b==1001)
print(a>b or b==1001)
print(not(a>b)
'''
#비트연산자 : 정수를 2진수로 변환한 수 각각의 비트별로 연산
#&(논리곱) |(논리합) ^(xor) ~(부정not) <<(왼쪽 시프트) >>(오른쪽 시프트)
print(10&3) #1010&0011 ->0010
print(10|3) #1010|0011 ->1011 (11)
print(10^3) #1010^0011 ->1001 (9)
print(~3) #~0011 -> 1100 (12)
print(~3+1)
print(10<<1) #해당숫자에 2를 곱한 꼴
print(10<<2) #해당숫자에 2^2를 곱한 꼴
print(10<<3)
print(10>>1) #해당숫자에 2를 나눈 꼴
print(10>>2)
print(10>>3)
#bin=>2진수 hex->16진수 oct->8진수 | [
"noreply@github.com"
] | asterinwl.noreply@github.com |
9f4802a0adb12e9e53c888ddc1d995e8c04f2963 | 82b946da326148a3c1c1f687f96c0da165bb2c15 | /sdk/python/pulumi_azure_native/web/get_static_site_user_provided_function_app_for_static_site.py | f4136b23143201325f3c527173a8c7c478e1d846 | [
"Apache-2.0",
"BSD-3-Clause"
] | permissive | morrell/pulumi-azure-native | 3916e978382366607f3df0a669f24cb16293ff5e | cd3ba4b9cb08c5e1df7674c1c71695b80e443f08 | refs/heads/master | 2023-06-20T19:37:05.414924 | 2021-07-19T20:57:53 | 2021-07-19T20:57:53 | 387,815,163 | 0 | 0 | Apache-2.0 | 2021-07-20T14:18:29 | 2021-07-20T14:18:28 | null | UTF-8 | Python | false | false | 5,754 | py | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
__all__ = [
'GetStaticSiteUserProvidedFunctionAppForStaticSiteResult',
'AwaitableGetStaticSiteUserProvidedFunctionAppForStaticSiteResult',
'get_static_site_user_provided_function_app_for_static_site',
]
@pulumi.output_type
class GetStaticSiteUserProvidedFunctionAppForStaticSiteResult:
"""
Static Site User Provided Function App ARM resource.
"""
def __init__(__self__, created_on=None, function_app_region=None, function_app_resource_id=None, id=None, kind=None, name=None, type=None):
if created_on and not isinstance(created_on, str):
raise TypeError("Expected argument 'created_on' to be a str")
pulumi.set(__self__, "created_on", created_on)
if function_app_region and not isinstance(function_app_region, str):
raise TypeError("Expected argument 'function_app_region' to be a str")
pulumi.set(__self__, "function_app_region", function_app_region)
if function_app_resource_id and not isinstance(function_app_resource_id, str):
raise TypeError("Expected argument 'function_app_resource_id' to be a str")
pulumi.set(__self__, "function_app_resource_id", function_app_resource_id)
if id and not isinstance(id, str):
raise TypeError("Expected argument 'id' to be a str")
pulumi.set(__self__, "id", id)
if kind and not isinstance(kind, str):
raise TypeError("Expected argument 'kind' to be a str")
pulumi.set(__self__, "kind", kind)
if name and not isinstance(name, str):
raise TypeError("Expected argument 'name' to be a str")
pulumi.set(__self__, "name", name)
if type and not isinstance(type, str):
raise TypeError("Expected argument 'type' to be a str")
pulumi.set(__self__, "type", type)
@property
@pulumi.getter(name="createdOn")
def created_on(self) -> str:
"""
The date and time on which the function app was registered with the static site.
"""
return pulumi.get(self, "created_on")
@property
@pulumi.getter(name="functionAppRegion")
def function_app_region(self) -> Optional[str]:
"""
The region of the function app registered with the static site
"""
return pulumi.get(self, "function_app_region")
@property
@pulumi.getter(name="functionAppResourceId")
def function_app_resource_id(self) -> Optional[str]:
"""
The resource id of the function app registered with the static site
"""
return pulumi.get(self, "function_app_resource_id")
@property
@pulumi.getter
def id(self) -> str:
"""
Resource Id.
"""
return pulumi.get(self, "id")
@property
@pulumi.getter
def kind(self) -> Optional[str]:
"""
Kind of resource.
"""
return pulumi.get(self, "kind")
@property
@pulumi.getter
def name(self) -> str:
"""
Resource Name.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def type(self) -> str:
"""
Resource type.
"""
return pulumi.get(self, "type")
class AwaitableGetStaticSiteUserProvidedFunctionAppForStaticSiteResult(GetStaticSiteUserProvidedFunctionAppForStaticSiteResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetStaticSiteUserProvidedFunctionAppForStaticSiteResult(
created_on=self.created_on,
function_app_region=self.function_app_region,
function_app_resource_id=self.function_app_resource_id,
id=self.id,
kind=self.kind,
name=self.name,
type=self.type)
def get_static_site_user_provided_function_app_for_static_site(function_app_name: Optional[str] = None,
name: Optional[str] = None,
resource_group_name: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetStaticSiteUserProvidedFunctionAppForStaticSiteResult:
"""
Static Site User Provided Function App ARM resource.
API Version: 2020-12-01.
:param str function_app_name: Name of the function app registered with the static site.
:param str name: Name of the static site.
:param str resource_group_name: Name of the resource group to which the resource belongs.
"""
__args__ = dict()
__args__['functionAppName'] = function_app_name
__args__['name'] = name
__args__['resourceGroupName'] = resource_group_name
if opts is None:
opts = pulumi.InvokeOptions()
if opts.version is None:
opts.version = _utilities.get_version()
__ret__ = pulumi.runtime.invoke('azure-native:web:getStaticSiteUserProvidedFunctionAppForStaticSite', __args__, opts=opts, typ=GetStaticSiteUserProvidedFunctionAppForStaticSiteResult).value
return AwaitableGetStaticSiteUserProvidedFunctionAppForStaticSiteResult(
created_on=__ret__.created_on,
function_app_region=__ret__.function_app_region,
function_app_resource_id=__ret__.function_app_resource_id,
id=__ret__.id,
kind=__ret__.kind,
name=__ret__.name,
type=__ret__.type)
| [
"noreply@github.com"
] | morrell.noreply@github.com |
e42e6d9eff0feb26d86e7fe6faaa06618d7c3faa | bdec02b1681bc866e96007317ae2feb6680b81be | /general_speech_restoration/unet/model_kqq_lstm_mask_gan/train.py | e041e20d94623f2068e693c916f2be172dd957fb | [] | no_license | TrendingTechnology/voicefixer_main | 022407fc35ca3543e9496730fda2a26fc606242d | 3496d965bfe89c4e49c2b953c7c7a98ee68f816d | refs/heads/main | 2023-08-19T15:59:14.887287 | 2021-09-27T12:44:00 | 2021-09-27T12:44:00 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,896 | py | import sys
sys.path.append("/Users/admin/Documents/projects/arnold_workspace/src")
sys.path.append("/opt/tiger/lhh_arnold_base/arnold_workspace/src")
import torchaudio
torchaudio.set_audio_backend("sox_io")
from pytorch_lightning import Trainer
from pynvml import *
from pytorch_lightning.callbacks import ModelCheckpoint, LearningRateMonitor
from pytorch_lightning.loggers.tensorboard import TensorBoardLogger
from pytorch_lightning.plugins import DDPPlugin
from general_speech_restoration.unet.get_model import *
from general_speech_restoration.unet.dm_sr_rand_sr_order import SrRandSampleRate
from dataloaders.main import DATA
from callbacks.base import *
from callbacks.verbose import *
import time
from argparse import ArgumentParser
from iclr_2022.config import Config
from tools.dsp.lowpass import *
def report_dataset(names):
res = "#"
for each in names:
res += each
return res+"#"
# Clipping effects only
Config.aug_sources = ["vocals"]
# Config.aug_effects = ["clip"]
# Config.aug_conf['clip'] = {
# 'prob': [1.0], # todo
# 'louder_time': [1.0, 12.0]
# }
# Config.aug_effects = ["reverb_rir"]
# Config.aug_conf['reverb_rir'] = {
# 'prob': [1.0],
# 'rir_file_name': None
# }
if __name__ == "__main__":
parser = ArgumentParser()
parser.add_argument("-m", "--model", default="lstm", help="Model name you wanna use.")
parser.add_argument("-l", "--loss", default="l1_sp", help="Loss function")
parser.add_argument("-t", "--train_dataset", nargs="+", default=["vctk","vocal_wav_44k","vd_noise","dcase"], help="Train dataset")
parser.add_argument("-v", "--val_dataset", nargs="+", default=["vctk"], help="validation datasets.")
parser.add_argument("-t_type", "--train_data_type", nargs="+", default=["vocals","noise"], help="Training data types.")
parser.add_argument("-c", "--check_val_epoch", type=int, default=50,help="Every 10 hours of training data is called an epoch.")
parser.add_argument("-r", '--reload', type=str, default="")
parser.add_argument("-n", '--name', type=str, default="fix_samplerate")
parser.add_argument("-g", '--gpu_nums', type=int, default=0)
parser.add_argument("-san", '--sanity_val_steps', type=int, default=2)
parser.add_argument("--dl", type=str, default="FixLengthAugRandomDataLoader") # "FixLengthFixSegRandomDataLoader", "FixLengthThreshRandDataLoader"
parser.add_argument("--overlap_num", type=int, default=1)
# experiment
parser.add_argument("--source_sample_rate_low", type=int, default=8000)
parser.add_argument("--source_sample_rate_high", type=int, default=24000)
parser.add_argument("--lr", type=float, default=0.001, help="Learning rate.")
parser.add_argument("--gamma", type=float, default=0.8, help="lr exponential decay.")
parser.add_argument("--batchsize", type=int, default=16, help="training batch size.")
parser.add_argument("--frame_length", type=float, default=3.0, help="frame length in seconds.")
parser.add_argument("--warmup_data", type=float, default=26.6, help="Hours of warmup dataloaders.")
parser.add_argument("--reduce_lr_period", type=float, default=400, help="How many hours of data per lr reduction.")
parser.add_argument("--max_epoches", type=int, default=5000, help="Maximum epoches")
parser.add_argument("--back_hdfs_every_hours", type=int, default=53, help="Every how many epoch do you want back up file to hdfs")
parser.add_argument("--save_top_k", type=int, default=-1, help="")
parser.add_argument("--save_metric_monitor", type=str, default="val_loss")
parser.add_argument("--sample_rate", type=int, default=44100)
parser.add_argument("--early_stop_tolerance", type=int, default=5)
parser.add_argument("--early_stop_crateria", default="min", help="min or max")
ROOT = Config.ROOT
if ("tiger" in ROOT): ARNOLD = True
else: ARNOLD = False
assert len(Config.TRAIL_NAME) != 0
if (os.path.exists("temp_path.json")):
os.remove("temp_path.json")
if (os.path.exists("path.json")):
os.remove("path.json")
parser = pl.Trainer.add_argparse_args(parser)
args = parser.parse_args()
current = time.strftime('%Y-%m-%d', time.localtime(time.time()))
name = current + "-" + args.model+"-"+report_dataset(args.train_data_type)+"-"+\
report_dataset(args.train_dataset)+"-" + \
report_dataset(args.val_dataset) + "-" +\
args.name + "-" + args.loss + "#"+str(args.source_sample_rate_low)+"_"+ str(args.source_sample_rate_high) + "#"
if (len(args.reload) != 0):
name += "_reload_" + (args.reload).replace("/", ".")
if (ARNOLD):
nvmlInit()
if(args.gpu_nums == 0): gpu_nums = int(nvmlDeviceGetCount())
else: gpu_nums = args.gpu_nums
accelerator = 'ddp'
distributed = True if (gpu_nums > 1) else False
else:
gpu_nums = args.gpu_nums
accelerator = None
distributed = False
logger = TensorBoardLogger(save_dir=Config.TRAIL_NAME + "_log", name=name)
if (gpu_nums != 0): seconds_per_step = gpu_nums * args.batchsize * args.frame_length
else: seconds_per_step = args.batchsize * args.frame_length
model = get_model(args.model)(channels=1, type_target="vocals", loss=args.loss,
# training
lr=args.lr,
gamma=args.gamma,
batchsize=args.batchsize,
frame_length=args.frame_length,
sample_rate=args.sample_rate,
check_val_every_n_epoch = args.check_val_epoch,
warm_up_steps=int(args.warmup_data * 3600 / seconds_per_step),
reduce_lr_steps=int(args.reduce_lr_period * 3600 / seconds_per_step))
print(Config.aug_conf)
print(Config.aug_sources)
print(Config.aug_effects)
dm = SrRandSampleRate(
source_sample_rate_low = args.source_sample_rate_low,source_sample_rate_high = args.source_sample_rate_high,
target_sample_rate=args.sample_rate,
distributed=distributed, overlap_num=args.overlap_num,
train_loader=args.dl,
train_data=DATA.merge([DATA.get_trainset(set) for set in args.train_dataset]),
val_data=DATA.merge([DATA.get_testset(set) for set in args.val_dataset]),
train_data_type=args.train_data_type, val_datasets=args.val_dataset,
batchsize=args.batchsize, frame_length=args.frame_length, num_workers=22, sample_rate=args.sample_rate,
aug_conf=Config.aug_conf, aug_sources=Config.aug_sources, aug_effects=Config.aug_effects,
hours_for_an_epoch=100
)
callbacks = []
callbacks.extend([
ArgsSaver(args),
LearningRateMonitor(logging_interval='step'),
ModelCheckpoint(
filename='{epoch}',
# monitor=args.save_metric_monitor,
save_top_k=-1,
mode='min',
),
BackUpHDFS(
model=model,
current_dir=os.getcwd(),
save_step_frequency=int(args.back_hdfs_every_hours * 103 * 3600 / seconds_per_step)
),
initLogDir(current_dir=os.getcwd()),
# ReportDatasets(dm=dm, config=Config),
# EarlyStop(tolerance=args.early_stop_tolerance,type=args.early_stop_crateria)
]
)
print("eval_callbacks: ")
for each in callbacks: print(each)
trainer = Trainer.from_argparse_args(args,
gpus=gpu_nums,
plugins=DDPPlugin(find_unused_parameters=True),
max_epochs=args.max_epoches,
terminate_on_nan=True,
num_sanity_val_steps=args.sanity_val_steps,
resume_from_checkpoint=args.reload if (len(args.reload) != 0) else None,
callbacks=callbacks,
accelerator=accelerator,
sync_batchnorm=True,
replace_sampler_ddp=False,
check_val_every_n_epoch=args.check_val_epoch,
checkpoint_callback=True, logger=logger, log_every_n_steps=10,
progress_bar_refresh_rate=1, flush_logs_every_n_steps=200)
dm.setup('fit')
trainer.fit(model, datamodule=dm)
| [
"867390095@qq.com"
] | 867390095@qq.com |
649a99b0302a7335459a416c74b79e78637a1018 | 4025ccf7ca0043d39e914687ab243f3666e44ae7 | /nft-generator-gui.py | 34e007a82409d5374178e346c54260aca25159c2 | [] | no_license | FullDev333/-NFT-generator | 3fc86a992491d3bf5d4cf22a59445a76612ffe59 | 7579a157e9d5d46009f9ca3c5e0d5a511e343b9a | refs/heads/main | 2023-08-11T02:15:36.130557 | 2021-10-03T14:06:33 | 2021-10-03T14:06:33 | 413,092,585 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,988 | py | import streamlit as st
import os
from generator import NFTGenerator
from pathlib import Path
if 'gogo' not in st.session_state:
print('init gogo')
st.session_state.gogo = False
with st.sidebar:
input_dir = st.text_input('input dir')
is_animate = st.checkbox('animate?', )
if is_animate:
fps = st.number_input('fps', 1)
n_frame = st.number_input('no. of frame', 1)
test = st.button('test')
st.session_state['test'] = True
with st.form(key="generate?"):
amount = st.number_input('amount', 1)
output_dir = st.text_input('output dir', 'generated')
unique = st.checkbox("unique mode")
st.write('*unique mode will generate in order (not random)')
submit_button = st.form_submit_button(label='go go')
if submit_button:
print('GOGO')
print(output_dir)
print(amount)
p = Path(output_dir)
p.mkdir(parents=True, exist_ok=True)
the_bar = st.progress(0)
if is_animate:
nft_generator = NFTGenerator(input_dir=input_dir, animate=is_animate, fps=fps, n_frame=n_frame, unique=unique)
for i in range(amount):
the_bar.progress((i + 1) / amount)
nft_generator.generate(save_path=output_dir, file_name=i)
else:
nft_generator = NFTGenerator(input_dir=input_dir, unique=unique)
for i in range(amount):
the_bar.progress((i + 1) / amount)
nft_generator.generate(save_path=output_dir, file_name=i)
st.header("DONE!")
st.subheader(f"pls check out {p.absolute()}")
if test:
if is_animate:
nft_generator = NFTGenerator(input_dir=input_dir, animate=is_animate, fps=fps, n_frame=n_frame, unique=unique)
sample = nft_generator.generate()
st.image(sample, caption=[f'frame {i + 1}' for i in range(len(sample))])
else:
nft_generator = NFTGenerator(input_dir=input_dir, unique=unique)
sample = nft_generator.generate()
st.image(sample, caption="sample")
| [
"istardev0103@gmail.com"
] | istardev0103@gmail.com |
66d28ef2484aa90bdbdec1fec326ff7355cf4dd5 | b68de833bc3bd8ec12686a059fe275e195a2e6b8 | /qa/rpc-tests/fundrawtransaction-hd.py | a198380c6f1e830a5dab887ae1f0cda56b3f9dd4 | [
"MIT"
] | permissive | voicechain/voicechain | 0e3fd61875d706e7049355c3c240d9f26d26e67a | 7c48aabad58c4681ee7c1663bb1e6c232c418b4e | refs/heads/master | 2023-03-02T01:41:18.885950 | 2021-02-16T00:01:26 | 2021-02-16T00:01:26 | 336,066,992 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 25,468 | py | #!/usr/bin/env python3
# Copyright (c) 2014-2015 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import *
# Create one-input, one-output, no-fee transaction:
class RawTransactionsTest(BitcoinTestFramework):
def __init__(self):
super().__init__()
self.setup_clean_chain = True
self.num_nodes = 4
def setup_network(self, split=False):
self.nodes = start_nodes(4, self.options.tmpdir, [['-usehd=1']] * self.num_nodes, redirect_stderr=True)
connect_nodes_bi(self.nodes,0,1)
connect_nodes_bi(self.nodes,1,2)
connect_nodes_bi(self.nodes,0,2)
connect_nodes_bi(self.nodes,0,3)
self.is_network_split=False
self.sync_all()
def run_test(self):
self.log.info("Mining blocks...")
min_relay_tx_fee = self.nodes[0].getnetworkinfo()['relayfee']
# This test is not meant to test fee estimation and we'd like
# to be sure all txs are sent at a consistent desired feerate
for node in self.nodes:
node.settxfee(min_relay_tx_fee)
# if the fee's positive delta is higher than this value tests will fail,
# neg. delta always fail the tests.
# The size of the signature of every input may be at most 2 bytes larger
# than a minimum sized signature.
# = 2 bytes * minRelayTxFeePerByte
feeTolerance = 2 * min_relay_tx_fee/1000
self.nodes[2].generate(1)
self.sync_all()
self.nodes[0].generate(121)
self.sync_all()
watchonly_address = self.nodes[0].getnewaddress()
watchonly_pubkey = self.nodes[0].validateaddress(watchonly_address)["pubkey"]
watchonly_amount = Decimal(2000)
self.nodes[3].importpubkey(watchonly_pubkey, "", True)
watchonly_txid = self.nodes[0].sendtoaddress(watchonly_address, watchonly_amount)
self.nodes[0].sendtoaddress(self.nodes[3].getnewaddress(), watchonly_amount / 10)
self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 15)
self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 10)
self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 50)
self.sync_all()
self.nodes[0].generate(1)
self.sync_all()
###############
# simple test #
###############
inputs = [ ]
outputs = { self.nodes[0].getnewaddress() : 10 }
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
fee = rawtxfund['fee']
dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
assert(len(dec_tx['vin']) > 0) #test if we have enought inputs
##############################
# simple test with two coins #
##############################
inputs = [ ]
outputs = { self.nodes[0].getnewaddress() : 22 }
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
fee = rawtxfund['fee']
dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
assert(len(dec_tx['vin']) > 0) #test if we have enough inputs
##############################
# simple test with two coins #
##############################
inputs = [ ]
outputs = { self.nodes[0].getnewaddress() : 26 }
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
fee = rawtxfund['fee']
dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
assert(len(dec_tx['vin']) > 0)
assert_equal(dec_tx['vin'][0]['scriptSig']['hex'], '')
################################
# simple test with two outputs #
################################
inputs = [ ]
outputs = { self.nodes[0].getnewaddress() : 26, self.nodes[1].getnewaddress() : 25 }
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
fee = rawtxfund['fee']
dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
totalOut = 0
for out in dec_tx['vout']:
totalOut += out['value']
assert(len(dec_tx['vin']) > 0)
assert_equal(dec_tx['vin'][0]['scriptSig']['hex'], '')
#########################################################################
# test a fundrawtransaction with a VIN greater than the required amount #
#########################################################################
utx = False
listunspent = self.nodes[2].listunspent()
for aUtx in listunspent:
if aUtx['amount'] == 50:
utx = aUtx
break
assert(utx!=False)
inputs = [ {'txid' : utx['txid'], 'vout' : utx['vout']}]
outputs = { self.nodes[0].getnewaddress() : 10 }
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
assert_equal(utx['txid'], dec_tx['vin'][0]['txid'])
rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
fee = rawtxfund['fee']
dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
totalOut = 0
for out in dec_tx['vout']:
totalOut += out['value']
assert_equal(fee + totalOut, utx['amount']) #compare vin total and totalout+fee
#####################################################################
# test a fundrawtransaction with which will not get a change output #
#####################################################################
utx = False
listunspent = self.nodes[2].listunspent()
for aUtx in listunspent:
if aUtx['amount'] == 50:
utx = aUtx
break
assert(utx!=False)
inputs = [ {'txid' : utx['txid'], 'vout' : utx['vout']}]
outputs = { self.nodes[0].getnewaddress() : Decimal(50) - fee - feeTolerance }
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
assert_equal(utx['txid'], dec_tx['vin'][0]['txid'])
rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
fee = rawtxfund['fee']
dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
totalOut = 0
for out in dec_tx['vout']:
totalOut += out['value']
assert_equal(rawtxfund['changepos'], -1)
assert_equal(fee + totalOut, utx['amount']) #compare vin total and totalout+fee
#########################################################################
# test a fundrawtransaction with a VIN smaller than the required amount #
#########################################################################
utx = False
listunspent = self.nodes[2].listunspent()
for aUtx in listunspent:
if aUtx['amount'] == 10:
utx = aUtx
break
assert(utx!=False)
inputs = [ {'txid' : utx['txid'], 'vout' : utx['vout']}]
outputs = { self.nodes[0].getnewaddress() : 10 }
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
# 4-byte version + 1-byte vin count + 36-byte prevout then script_len
rawtx = rawtx[:82] + "0100" + rawtx[84:]
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
assert_equal(utx['txid'], dec_tx['vin'][0]['txid'])
assert_equal("00", dec_tx['vin'][0]['scriptSig']['hex'])
rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
fee = rawtxfund['fee']
dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
totalOut = 0
matchingOuts = 0
for i, out in enumerate(dec_tx['vout']):
totalOut += out['value']
if out['scriptPubKey']['addresses'][0] in outputs:
matchingOuts+=1
else:
assert_equal(i, rawtxfund['changepos'])
assert_equal(utx['txid'], dec_tx['vin'][0]['txid'])
assert_equal("00", dec_tx['vin'][0]['scriptSig']['hex'])
assert_equal(matchingOuts, 1)
assert_equal(len(dec_tx['vout']), 2)
###########################################
# test a fundrawtransaction with two VINs #
###########################################
utx = False
utx2 = False
listunspent = self.nodes[2].listunspent()
for aUtx in listunspent:
if aUtx['amount'] == 10:
utx = aUtx
if aUtx['amount'] == 50:
utx2 = aUtx
assert(utx!=False)
inputs = [ {'txid' : utx['txid'], 'vout' : utx['vout']},{'txid' : utx2['txid'], 'vout' : utx2['vout']} ]
outputs = { self.nodes[0].getnewaddress() : 60 }
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
assert_equal(utx['txid'], dec_tx['vin'][0]['txid'])
rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
fee = rawtxfund['fee']
dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
totalOut = 0
matchingOuts = 0
for out in dec_tx['vout']:
totalOut += out['value']
if out['scriptPubKey']['addresses'][0] in outputs:
matchingOuts+=1
assert_equal(matchingOuts, 1)
assert_equal(len(dec_tx['vout']), 2)
matchingIns = 0
for vinOut in dec_tx['vin']:
for vinIn in inputs:
if vinIn['txid'] == vinOut['txid']:
matchingIns+=1
assert_equal(matchingIns, 2) #we now must see two vins identical to vins given as params
#########################################################
# test a fundrawtransaction with two VINs and two vOUTs #
#########################################################
utx = False
utx2 = False
listunspent = self.nodes[2].listunspent()
for aUtx in listunspent:
if aUtx['amount'] == 10:
utx = aUtx
if aUtx['amount'] == 50:
utx2 = aUtx
assert(utx!=False)
inputs = [ {'txid' : utx['txid'], 'vout' : utx['vout']},{'txid' : utx2['txid'], 'vout' : utx2['vout']} ]
outputs = { self.nodes[0].getnewaddress() : 60, self.nodes[0].getnewaddress() : 10 }
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
assert_equal(utx['txid'], dec_tx['vin'][0]['txid'])
rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
fee = rawtxfund['fee']
dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
totalOut = 0
matchingOuts = 0
for out in dec_tx['vout']:
totalOut += out['value']
if out['scriptPubKey']['addresses'][0] in outputs:
matchingOuts+=1
assert_equal(matchingOuts, 2)
assert_equal(len(dec_tx['vout']), 3)
##############################################
# test a fundrawtransaction with invalid vin #
##############################################
listunspent = self.nodes[2].listunspent()
inputs = [ {'txid' : "1c7f966dab21119bac53213a2bc7532bff1fa844c124fd750a7d0b1332440bd1", 'vout' : 0} ] #invalid vin!
outputs = { self.nodes[0].getnewaddress() : 10}
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
try:
rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
raise AssertionError("Spent more than available")
except JSONRPCException as e:
assert("Insufficient" in e.error['message'])
############################################################
#compare fee of a standard pubkeyhash transaction
inputs = []
outputs = {self.nodes[1].getnewaddress():11}
rawTx = self.nodes[0].createrawtransaction(inputs, outputs)
fundedTx = self.nodes[0].fundrawtransaction(rawTx)
#create same transaction over sendtoaddress
txId = self.nodes[0].sendtoaddress(self.nodes[1].getnewaddress(), 11)
signedFee = self.nodes[0].getrawmempool(True)[txId]['fee']
#compare fee
feeDelta = Decimal(fundedTx['fee']) - Decimal(signedFee)
assert(feeDelta >= 0 and feeDelta <= feeTolerance)
############################################################
############################################################
#compare fee of a standard pubkeyhash transaction with multiple outputs
inputs = []
outputs = {self.nodes[1].getnewaddress():11,self.nodes[1].getnewaddress():12,self.nodes[1].getnewaddress():1,self.nodes[1].getnewaddress():13,self.nodes[1].getnewaddress():2,self.nodes[1].getnewaddress():3}
rawTx = self.nodes[0].createrawtransaction(inputs, outputs)
fundedTx = self.nodes[0].fundrawtransaction(rawTx)
#create same transaction over sendtoaddress
txId = self.nodes[0].sendmany("", outputs)
signedFee = self.nodes[0].getrawmempool(True)[txId]['fee']
#compare fee
feeDelta = Decimal(fundedTx['fee']) - Decimal(signedFee)
assert(feeDelta >= 0 and feeDelta <= feeTolerance)
############################################################
############################################################
#compare fee of a 2of2 multisig p2sh transaction
# create 2of2 addr
addr1 = self.nodes[1].getnewaddress()
addr2 = self.nodes[1].getnewaddress()
addr1Obj = self.nodes[1].validateaddress(addr1)
addr2Obj = self.nodes[1].validateaddress(addr2)
mSigObj = self.nodes[1].addmultisigaddress(2, [addr1Obj['pubkey'], addr2Obj['pubkey']])
inputs = []
outputs = {mSigObj:11}
rawTx = self.nodes[0].createrawtransaction(inputs, outputs)
fundedTx = self.nodes[0].fundrawtransaction(rawTx)
#create same transaction over sendtoaddress
txId = self.nodes[0].sendtoaddress(mSigObj, 11)
signedFee = self.nodes[0].getrawmempool(True)[txId]['fee']
#compare fee
feeDelta = Decimal(fundedTx['fee']) - Decimal(signedFee)
assert(feeDelta >= 0 and feeDelta <= feeTolerance)
############################################################
############################################################
#compare fee of a standard pubkeyhash transaction
# create 4of5 addr
addr1 = self.nodes[1].getnewaddress()
addr2 = self.nodes[1].getnewaddress()
addr3 = self.nodes[1].getnewaddress()
addr4 = self.nodes[1].getnewaddress()
addr5 = self.nodes[1].getnewaddress()
addr1Obj = self.nodes[1].validateaddress(addr1)
addr2Obj = self.nodes[1].validateaddress(addr2)
addr3Obj = self.nodes[1].validateaddress(addr3)
addr4Obj = self.nodes[1].validateaddress(addr4)
addr5Obj = self.nodes[1].validateaddress(addr5)
mSigObj = self.nodes[1].addmultisigaddress(4, [addr1Obj['pubkey'], addr2Obj['pubkey'], addr3Obj['pubkey'], addr4Obj['pubkey'], addr5Obj['pubkey']])
inputs = []
outputs = {mSigObj:11}
rawTx = self.nodes[0].createrawtransaction(inputs, outputs)
fundedTx = self.nodes[0].fundrawtransaction(rawTx)
#create same transaction over sendtoaddress
txId = self.nodes[0].sendtoaddress(mSigObj, 11)
signedFee = self.nodes[0].getrawmempool(True)[txId]['fee']
#compare fee
feeDelta = Decimal(fundedTx['fee']) - Decimal(signedFee)
assert(feeDelta >= 0 and feeDelta <= feeTolerance)
############################################################
############################################################
# spend a 2of2 multisig transaction over fundraw
# create 2of2 addr
addr1 = self.nodes[2].getnewaddress()
addr2 = self.nodes[2].getnewaddress()
addr1Obj = self.nodes[2].validateaddress(addr1)
addr2Obj = self.nodes[2].validateaddress(addr2)
mSigObj = self.nodes[2].addmultisigaddress(2, [addr1Obj['pubkey'], addr2Obj['pubkey']])
# send 12 VOC to msig addr
txId = self.nodes[0].sendtoaddress(mSigObj, 12)
self.sync_all()
self.nodes[1].generate(1)
self.sync_all()
oldBalance = self.nodes[1].getbalance()
inputs = []
outputs = {self.nodes[1].getnewaddress():11}
rawTx = self.nodes[2].createrawtransaction(inputs, outputs)
fundedTx = self.nodes[2].fundrawtransaction(rawTx)
signedTx = self.nodes[2].signrawtransaction(fundedTx['hex'])
txId = self.nodes[2].sendrawtransaction(signedTx['hex'])
self.sync_all()
self.nodes[1].generate(1)
self.sync_all()
# make sure funds are received at node1
assert_equal(oldBalance+Decimal('11.0000000'), self.nodes[1].getbalance())
############################################################
# locked wallet test
self.nodes[1].encryptwallet("test")
self.nodes.pop(1)
stop_node(self.nodes[0], 0)
stop_node(self.nodes[1], 2)
stop_node(self.nodes[2], 3)
self.nodes = start_nodes(4, self.options.tmpdir, [['-usehd=1']] * self.num_nodes, redirect_stderr=True)
# This test is not meant to test fee estimation and we'd like
# to be sure all txs are sent at a consistent desired feerate
for node in self.nodes:
node.settxfee(min_relay_tx_fee)
connect_nodes_bi(self.nodes,0,1)
connect_nodes_bi(self.nodes,1,2)
connect_nodes_bi(self.nodes,0,2)
connect_nodes_bi(self.nodes,0,3)
self.is_network_split=False
self.sync_all()
# drain the keypool
self.nodes[1].getnewaddress()
self.nodes[1].getrawchangeaddress()
inputs = []
outputs = {self.nodes[0].getnewaddress():1.1}
rawTx = self.nodes[1].createrawtransaction(inputs, outputs)
# fund a transaction that requires a new key for the change output
# creating the key must be impossible because the wallet is locked
try:
fundedTx = self.nodes[1].fundrawtransaction(rawTx)
raise AssertionError("Wallet unlocked without passphrase")
except JSONRPCException as e:
assert('Keypool ran out' in e.error['message'])
#refill the keypool
self.nodes[1].walletpassphrase("test", 100)
self.nodes[1].keypoolrefill(2) #need to refill the keypool to get an internal change address
self.nodes[1].walletlock()
try:
self.nodes[1].sendtoaddress(self.nodes[0].getnewaddress(), 12)
raise AssertionError("Wallet unlocked without passphrase")
except JSONRPCException as e:
assert('walletpassphrase' in e.error['message'])
oldBalance = self.nodes[0].getbalance()
inputs = []
outputs = {self.nodes[0].getnewaddress():11}
rawTx = self.nodes[1].createrawtransaction(inputs, outputs)
fundedTx = self.nodes[1].fundrawtransaction(rawTx)
#now we need to unlock
self.nodes[1].walletpassphrase("test", 100)
signedTx = self.nodes[1].signrawtransaction(fundedTx['hex'])
txId = self.nodes[1].sendrawtransaction(signedTx['hex'])
self.sync_all()
self.nodes[1].generate(1)
self.sync_all()
# make sure funds are received at node1
assert_equal(oldBalance+Decimal('511.0000000'), self.nodes[0].getbalance())
###############################################
# multiple (~19) inputs tx test | Compare fee #
###############################################
#empty node1, send some small coins from node0 to node1
self.nodes[1].sendtoaddress(self.nodes[0].getnewaddress(), self.nodes[1].getbalance(), "", "", True)
self.sync_all()
self.nodes[0].generate(1)
self.sync_all()
for i in range(0,20):
self.nodes[0].sendtoaddress(self.nodes[1].getnewaddress(), 0.01)
self.sync_all()
self.nodes[0].generate(1)
self.sync_all()
#fund a tx with ~20 small inputs
inputs = []
outputs = {self.nodes[0].getnewaddress():0.15,self.nodes[0].getnewaddress():0.04}
rawTx = self.nodes[1].createrawtransaction(inputs, outputs)
fundedTx = self.nodes[1].fundrawtransaction(rawTx)
#create same transaction over sendtoaddress
txId = self.nodes[1].sendmany("", outputs)
signedFee = self.nodes[1].getrawmempool(True)[txId]['fee']
#compare fee
feeDelta = Decimal(fundedTx['fee']) - Decimal(signedFee)
assert(feeDelta >= 0 and feeDelta <= feeTolerance*19) #~19 inputs
#############################################
# multiple (~19) inputs tx test | sign/send #
#############################################
#again, empty node1, send some small coins from node0 to node1
self.nodes[1].sendtoaddress(self.nodes[0].getnewaddress(), self.nodes[1].getbalance(), "", "", True)
self.sync_all()
self.nodes[0].generate(1)
self.sync_all()
for i in range(0,20):
self.nodes[0].sendtoaddress(self.nodes[1].getnewaddress(), 0.01)
self.sync_all()
self.nodes[0].generate(1)
self.sync_all()
#fund a tx with ~20 small inputs
oldBalance = self.nodes[0].getbalance()
inputs = []
outputs = {self.nodes[0].getnewaddress():0.15,self.nodes[0].getnewaddress():0.04}
rawTx = self.nodes[1].createrawtransaction(inputs, outputs)
fundedTx = self.nodes[1].fundrawtransaction(rawTx)
fundedAndSignedTx = self.nodes[1].signrawtransaction(fundedTx['hex'])
txId = self.nodes[1].sendrawtransaction(fundedAndSignedTx['hex'])
self.sync_all()
self.nodes[0].generate(1)
self.sync_all()
assert_equal(oldBalance+Decimal('500.19000000'), self.nodes[0].getbalance()) #0.19+block reward
#####################################################
# test fundrawtransaction with OP_RETURN and no vin #
#####################################################
rawtx = "0100000000010000000000000000066a047465737400000000"
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
assert_equal(len(dec_tx['vin']), 0)
assert_equal(len(dec_tx['vout']), 1)
rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
assert_greater_than(len(dec_tx['vin']), 0) # at least one vin
assert_equal(len(dec_tx['vout']), 2) # one change output added
##################################################
# test a fundrawtransaction using only watchonly #
##################################################
inputs = []
outputs = {self.nodes[2].getnewaddress() : watchonly_amount / 2}
rawtx = self.nodes[3].createrawtransaction(inputs, outputs)
result = self.nodes[3].fundrawtransaction(rawtx, True)
res_dec = self.nodes[0].decoderawtransaction(result["hex"])
assert_equal(len(res_dec["vin"]), 1)
assert_equal(res_dec["vin"][0]["txid"], watchonly_txid)
assert("fee" in result.keys())
assert_greater_than(result["changepos"], -1)
###############################################################
# test fundrawtransaction using the entirety of watched funds #
###############################################################
inputs = []
outputs = {self.nodes[2].getnewaddress() : watchonly_amount}
rawtx = self.nodes[3].createrawtransaction(inputs, outputs)
result = self.nodes[3].fundrawtransaction(rawtx, True)
res_dec = self.nodes[0].decoderawtransaction(result["hex"])
assert_equal(len(res_dec["vin"]), 2)
assert(res_dec["vin"][0]["txid"] == watchonly_txid or res_dec["vin"][1]["txid"] == watchonly_txid)
assert_greater_than(result["fee"], 0)
assert_greater_than(result["changepos"], -1)
assert_equal(result["fee"] + res_dec["vout"][result["changepos"]]["value"], watchonly_amount / 10)
signedtx = self.nodes[3].signrawtransaction(result["hex"])
assert(not signedtx["complete"])
signedtx = self.nodes[0].signrawtransaction(signedtx["hex"])
assert(signedtx["complete"])
self.nodes[0].sendrawtransaction(signedtx["hex"])
if __name__ == '__main__':
RawTransactionsTest().main()
| [
"60183029+AndresCHz45@users.noreply.github.com"
] | 60183029+AndresCHz45@users.noreply.github.com |
2be9fbd4078f8c3e4c254cf4f1588a5e860afda6 | 87abf9676115e1bce89473167da593b951c96571 | /users/migrations/0002_alter_profile_id.py | e1a2086fe20770293ef3767d639567c8de0bf3d7 | [] | no_license | yousefshalby/blog_web_app | 929a64997d053337ec188cada96e5f84586fd9ba | f6cf99d5ccb7adb4ee6f673893a8ec02b50d6d7c | refs/heads/main | 2023-07-27T06:34:31.583734 | 2021-09-17T20:40:41 | 2021-09-17T20:40:41 | 369,969,311 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 451 | py | # Generated by Django 3.2.3 on 2021-05-22 08:39
from django.db import migrations, models
import uuid
class Migration(migrations.Migration):
dependencies = [
('users', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='profile',
name='id',
field=models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False, unique=True),
),
]
| [
"yousifshalby2016@gmail.com"
] | yousifshalby2016@gmail.com |
61a80292405f553b920a1da32bc42bf812b2727c | b49ecf5ff8815ee5f4095ab3e431834d50723d43 | /ardrone/constant.py | 68a479cab163b397546f193f123e7f610321f6f1 | [
"MIT"
] | permissive | chalios/python-ardrone | 1271bb03032ed3648c8a1d848394d3bf8956ca93 | 132423a86e434b1520f0c164b0fa9532684635df | refs/heads/master | 2022-04-25T16:02:53.194890 | 2020-04-29T08:21:35 | 2020-04-29T08:21:35 | 259,860,727 | 0 | 0 | MIT | 2020-04-29T07:53:40 | 2020-04-29T07:53:40 | null | UTF-8 | Python | false | false | 58 | py | NAVDATA_PORT = 5554
VIDEO_PORT = 5555
COMMAND_PORT = 5556
| [
"fkmclane@gmail.com"
] | fkmclane@gmail.com |
51f1291b2afb40a6c8d8781e7bc461ba3d058225 | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p03838/s497427946.py | 53bb3ecb9f9567bfcac1e11d066677c406a3138b | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 302 | py | x, y = map(int, input().split())
cnt = 0
diff = abs(x) - abs(y)
if diff == 0:
if x * y < 0:
cnt += 1
elif diff > 0:
cnt += diff
if x > 0:
cnt += 1
if y > 0:
cnt += 1
else:
cnt += -diff
if x < 0:
cnt += 1
if y < 0:
cnt += 1
print(cnt)
| [
"66529651+Aastha2104@users.noreply.github.com"
] | 66529651+Aastha2104@users.noreply.github.com |
658d5bef1d744722a60927196f44ca49a022b4de | 8ff0a962d8ea988768eb93ec7862b7da19fba7e7 | /euler69.py | 95c68342686829a5176c2c2dd931f574085cb0c5 | [] | no_license | metodj/Project-Euler | 6c9a69054afe6f2c981ec274c641707a6346eb59 | 974c7e4d73f91f735f60d95de929027bb67c77f8 | refs/heads/master | 2021-01-12T06:35:25.810484 | 2016-12-26T14:48:37 | 2016-12-26T14:48:37 | 77,389,904 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,019 | py | '''def GCD (m, n):
if n == 0:
return m
else:
return GCD(n, m % n)'''
'''from fractions import gcd'''
def gcd(x, y):
while y != 0:
(x, y) = (y, x % y)
return x
def euler69(n):
max, resitev = 0.0, 0
for stevilo in range(2, n + 1):
sez = [prime_candidat for prime_candidat in range(1,stevilo) if gcd(stevilo, prime_candidat) == 1]
if stevilo/len(sez) > max:
max = stevilo/len(sez)
resitev = stevilo
return resitev
'''zgornja koda je pravilna, vendar je time inefficient'''
def je_prastevilo(n):
s = 0
for i in range(2, int(n**(1/2)) + 1):
if n % i == 0:
return False
else:
return True
def primes(n):
return [i for i in range (1, n + 1) if je_prastevilo(i)]
def euler69_2(meja):
niz_prastevil, resitev = primes(100), 1
for x in niz_prastevil:
resitev *= x
if resitev > meja:
return int(resitev / x)
| [
"noreply@github.com"
] | metodj.noreply@github.com |
7b8534c78010fa84a8218a35cda0e5eab37dff3d | 944cd4c8247441eb2f3ea680e96677094df16141 | /1167.py | 4106a617d163f544a08b575698cb222ed02c52d3 | [] | no_license | williamabreu/uri-online-judge | c90cd80b7b6bed7b6962a2bfe38677fc2bc82708 | e6617822fcccb51b6569946dde75e0b6e1add02f | refs/heads/master | 2023-03-12T17:46:17.155846 | 2021-03-03T21:43:27 | 2021-03-03T21:43:27 | 322,916,648 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,480 | py | # Constants.
NUMBER = 0
NAME = 1
def get_position(number, position, length):
# Brute force... (Modular arithmetic got wrong)
if number % 2 == 0:
# even - clockwise (-)
j = (position - 1) % length
for i in range(number - 1):
j = (j - 1) % length
return j
else:
# odd - anticlockwise (+)
j = position % length
for i in range(number - 1):
j = (j + 1) % length
return j
if __name__ == '__main__':
N = int(input()) # FIRST
while N != 0:
# Circular list.
circle = [] # format: [(card_number, child_name), ...]
# Fill the circle.
for _ in range(N):
name, number = input().split() # READ
circle.append((int(number), name))
# Circle filled.
# First move.
number = circle[0][NUMBER]
position = 0
if number % 2 == 0:
# even - clockwise (-)
position = -number % len(circle)
else:
# odd - anticlockwise (+)
position = number % len(circle)
if len(circle) > 1:
number = circle.pop(position)[NUMBER]
# Next move.
while len(circle) > 1:
position = get_position(number, position, len(circle))
number = circle.pop(position)[NUMBER]
# Announce the winner.
print('Vencedor(a):', circle[0][NAME])
N = int(input()) # NEXT
| [
"contato@williamabreu.net"
] | contato@williamabreu.net |
fc1b5a96808a0168ed881bbfc75c480525b45f33 | fc8a588f19d611584a3130a05c2f4b4e975b0124 | /features/pages/manage_your_money_page.py | 1ba22dde3799ef64e7e0fc0cff42b33fad14d043 | [] | no_license | hytyip/TM | de5d615aa8f1bd4cb71ceeeaafc7c0b178da7f7c | 3d675a6a6df7f9fce6bc34d08300e45953cf7e74 | refs/heads/main | 2023-03-25T09:20:28.355058 | 2021-03-21T11:27:19 | 2021-03-21T11:27:19 | 349,970,412 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,042 | py | from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions
from browser import Browser
class ManageYourMoneyPageLocator(object):
# Manage Your Money Page Locators
header_text = (By.XPATH, "//h1")
continue_to_login_button = (By.XPATH, "//*[contains(text(), 'continue to login')]")
class ManageYourMoneyPage(Browser):
# Manage Your Money Page Actions
def get_header(self):
return self.get_element(*ManageYourMoneyPageLocator.header_text).text
def click_continue_to_login_button(self):
self.click_element(*ManageYourMoneyPageLocator.continue_to_login_button)
def wait_for_element_login_button(self):
try:
element_visible = expected_conditions.visibility_of_element_located(*ManageYourMoneyPageLocator.continue_to_login_button)
WebDriverWait(self.driver, 10).until(element_visible)
except Exception:
print("Timed out waiting for page to load")
| [
"tony.yip@sage.com"
] | tony.yip@sage.com |
cc4cc73a1658745c6207a8ff22ba53c8853a68c4 | 96334e06781b65e64415d08efb644800a1744ae1 | /myapp/models.py | c41fd7bd91052d27e780b3a32e8036aeba378c11 | [] | no_license | Boundman/test_for_backend_developer | b0a413c270cda27a53c59084f050582bed1006e0 | b86700e420f91d3417e898f3900062dbce1f6b70 | refs/heads/master | 2020-04-26T03:32:46.589990 | 2019-03-24T16:45:06 | 2019-03-24T16:45:06 | 173,270,369 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 527 | py | from django.db import models
class ImageModel(models.Model):
width = models.IntegerField(null=True)
height = models.IntegerField(null=True)
size = models.IntegerField(null=True)
picture = models.FileField(null=True, blank=True, upload_to='static/')
def __unicode__(self):
dictionary = dict()
dictionary['width'] = self.width
dictionary['height'] = self.height
dictionary['size'] = self.size
dictionary['picture_name'] = self.picture.name
return dictionary
| [
"victor00x@mail.ru"
] | victor00x@mail.ru |
a58f17eb9e4d2910963c6e15fe22d40f6bb4337b | d134ad541f23b2152d30a1d482a8ed21dfb1fa43 | /home/my_module/auction_crawling.py | 560e5cb9a595fd303dc61cc252208e28d259913b | [] | no_license | ChanHHOO/Design_pattern | 171ac590cbdfba5a43ba2c74da8e1036bd73112a | 18d09c5a8f4bbdefdc4e579715bbb5f647902511 | refs/heads/master | 2022-12-14T03:40:26.620189 | 2019-03-31T10:43:09 | 2019-03-31T10:43:09 | 171,880,551 | 0 | 0 | null | 2022-12-08T00:45:58 | 2019-02-21T13:52:03 | Python | UTF-8 | Python | false | false | 1,960 | py |
import logging
from selenium import webdriver as webb
#webdirver path setting module
from webdriver_manager.chrome import ChromeDriverManager
import os
import sys
from bs4 import BeautifulSoup as bs
class WebdriveSetting:
def __init__(self):
self.driver = webb.Chrome(ChromeDriverManager().install())
self.driver.implicitly_wait(3)
#driver = webdriver.PhantomJS('~/pysrc/myweb/home/phantomjs')
self.driver.get("http://localhost:8001")
class login_page(WebdriveSetting):
def __init__(self):
WebdriveSetting.__init__(self,)
self.driver.find_element_by_name('email').send_keys('hpyho33@naver.com')
self.driver.find_element_by_name('password').send_keys('7513aa')
self.driver.find_element_by_xpath('//*[@id="login"]').click()
class goAnotherPage(login_page):
def __init__(self):
login_page.__init__(self,)
self.driver.get('http://localhost:8001/selled_item')
self.html = self.driver.page_source
self.soup = bs(self.html, 'html.parser')
self.val = self.soup.select('body > div > table > tbody')
def get_value(self):
return self.val
class get_product_item(goAnotherPage):
def __init__(self):
goAnotherPage.__init__(self,)
self.tr_data = self.val[0].find_all('th')
self.tr_list = [i.get_text() for i in self.tr_data]
#zz
self.td_data = self.val[0].find_all('td')
self.td_list = []
self.td_dic = {}
index = 0
self.td_keys = ['seller', 'start_price', 'current_price', 'ended_time']
for value in range(int(len(self.td_data) / 4)):
for i in range(4):
self.td_dic[self.td_keys[i]] = self.td_data[index].get_text()
index += 1
# print(td_dic)
self.td_list.append(self.td_dic.copy())
print(self.td_list)
def get_value(self):
return self.td_list, self.tr_list
| [
"noreply@github.com"
] | ChanHHOO.noreply@github.com |
8e9f5df2ff2d86feb903629ae8536f6271a75a52 | 13964da45cc481ce41fdd9bbb23b615854b323dc | /Firstpage/migrations/0009_auto_20210326_1702.py | 8a0851d2993dabeb5806699c33db6e86a82d60c9 | [] | no_license | pratiksha2395/TO_DO_Project | c57fa9b1f77e565cbf3382487bda3b73558662c2 | 0b6f8a5fb21cdba8119a14f5ad53dfcaa50db687 | refs/heads/master | 2023-03-29T12:13:32.766570 | 2021-04-02T15:17:26 | 2021-04-02T15:17:26 | 351,514,007 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 395 | py | # Generated by Django 3.1.5 on 2021-03-26 11:32
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('Firstpage', '0008_auto_20210326_1700'),
]
operations = [
migrations.AlterField(
model_name='todo',
name='memo',
field=models.TextField(blank=True, max_length=3),
),
]
| [
"pratikshakude@gmail.com"
] | pratikshakude@gmail.com |
44c23c600a6f7aeb2fa02ecf1d7585be1b333669 | 020b874b24dfad20f27be9852cd07e77be0779cf | /loss.py | 6964f2b64e5ce4e77e8af1601d90fbe67f505133 | [] | no_license | zonghaofan/pytorch_sample_project | 22531c24c548dce9511aa255fb5819545b3506b2 | 0973140da217672ad5d199d51c13d7de459d302f | refs/heads/master | 2022-12-05T19:13:53.161117 | 2020-08-24T02:49:21 | 2020-08-24T02:49:21 | 289,810,209 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 79 | py | import torch.nn as nn
criterion = nn.CrossEntropyLoss() # 交叉熵损失函数 | [
"2465521333@qq.com"
] | 2465521333@qq.com |
37cd9106b660771559602b682d72efb66a90c2bb | 737c4b081582752f32ee8cfb94f573655669ed23 | /auctions/migrations/0003_auto_20201201_2256.py | 7a649f867aa3a270a14bc4be46ce7caa07eceada | [] | no_license | amanaligit/auctions | 4a28f20046668c48b4df8770dc4130c5eb9f9b56 | 5392f96dc711e29363d475fa211fe302a26ad21c | refs/heads/main | 2023-04-09T04:01:01.648890 | 2021-04-10T13:26:44 | 2021-04-10T13:26:44 | 317,600,790 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 388 | py | # Generated by Django 3.1.4 on 2020-12-01 17:26
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('auctions', '0002_bids_comments_listings'),
]
operations = [
migrations.AlterField(
model_name='listings',
name='image',
field=models.URLField(blank=True),
),
]
| [
"ali.aman.2010@gmail.com"
] | ali.aman.2010@gmail.com |
71f2e5e6ecb1290716f644a6b14935db9cc59a3f | a62f34b565d99df0a8af01e2decc3696bfef1ba0 | /adagu_ds_spider/model/DsMatchEventText.py | 04473a8783970621ccdcb3aa503c0a88dbcfe4ba | [] | no_license | JUNWSGJ/ds_spider | 6e7a50d5c6df5e0e558a58d4c8eaf362fee0a427 | 4a56a141251a9b46528b92ec13a81ebd8b64d483 | refs/heads/master | 2021-08-15T04:02:01.495336 | 2017-11-17T09:42:14 | 2017-11-17T09:42:14 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 726 | py | from . import Base
from sqlalchemy import Column, String, Integer, DateTime
import datetime
class DsMatchEventText(Base):
__tablename__ = 'ds_match_event_text'
id = Column(Integer, primary_key=True)
match_id = Column(Integer)
home_away = Column(String(45))
team_id = Column(Integer)
team_name = Column(String(200))
timestamp = Column(Integer)
txt = Column(String(400))
info = Column(String(45))
created_time = Column(DateTime, nullable=False)
updated_time = Column(DateTime, nullable=False)
def __init__(self):
self.created_time = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")
self.updated_time = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")
| [
"shihao@quyiyuan.com"
] | shihao@quyiyuan.com |
eae409f0cfe112314878b3129c19172958517b96 | d3210868266ce3f0c17d0777c157da82402d3ed7 | /horizon/openstack_dashboard/dashboards/project/instances/tables.py | 8e610ce626c3c1a3077c52d4315550ca1a3ece88 | [
"Apache-2.0"
] | permissive | cauberong099/openstack | 4f0bb1671bf3f2421a756c8b3bfcd7b344e07096 | 4fc261d37d84126d364de50fbc6ca98b8dc8dd39 | refs/heads/master | 2021-01-10T19:44:22.108399 | 2015-03-28T02:46:21 | 2015-03-28T02:46:21 | 33,003,055 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 40,216 | py | # Copyright 2012 Nebula, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging
from django.conf import settings
from django.core import urlresolvers
from django.http import HttpResponse # noqa
from django import shortcuts
from django import template
from django.template.defaultfilters import title # noqa
from django.utils.http import urlencode
from django.utils.translation import npgettext_lazy
from django.utils.translation import pgettext_lazy
from django.utils.translation import string_concat # noqa
from django.utils.translation import ugettext_lazy as _
from django.utils.translation import ungettext_lazy
from horizon import conf
from horizon import exceptions
from horizon import messages
from horizon import tables
from horizon.templatetags import sizeformat
from horizon.utils import filters
from openstack_dashboard import api
from openstack_dashboard.dashboards.project.access_and_security.floating_ips \
import workflows
from openstack_dashboard.dashboards.project.instances import tabs
from openstack_dashboard.dashboards.project.instances.workflows \
import resize_instance
from openstack_dashboard.dashboards.project.instances.workflows \
import update_instance
from openstack_dashboard import policy
LOG = logging.getLogger(__name__)
ACTIVE_STATES = ("ACTIVE",)
VOLUME_ATTACH_READY_STATES = ("ACTIVE", "SHUTOFF")
SNAPSHOT_READY_STATES = ("ACTIVE", "SHUTOFF", "PAUSED", "SUSPENDED")
POWER_STATES = {
0: "NO STATE",
1: "RUNNING",
2: "BLOCKED",
3: "PAUSED",
4: "SHUTDOWN",
5: "SHUTOFF",
6: "CRASHED",
7: "SUSPENDED",
8: "FAILED",
9: "BUILDING",
}
PAUSE = 0
UNPAUSE = 1
SUSPEND = 0
RESUME = 1
def is_deleting(instance):
task_state = getattr(instance, "OS-EXT-STS:task_state", None)
if not task_state:
return False
return task_state.lower() == "deleting"
class TerminateInstance(policy.PolicyTargetMixin, tables.BatchAction):
name = "terminate"
classes = ("btn-danger",)
icon = "remove"
policy_rules = (("compute", "compute:delete"),)
help_text = _("Terminated instances are not recoverable.")
@staticmethod
def action_present(count):
return ungettext_lazy(
u"Terminate Instance",
u"Terminate Instances",
count
)
@staticmethod
def action_past(count):
return ungettext_lazy(
u"Scheduled termination of Instance",
u"Scheduled termination of Instances",
count
)
def allowed(self, request, instance=None):
"""Allow terminate action if instance not currently being deleted."""
return not is_deleting(instance)
def action(self, request, obj_id):
api.nova.server_delete(request, obj_id)
class RebootInstance(policy.PolicyTargetMixin, tables.BatchAction):
name = "reboot"
classes = ('btn-danger', 'btn-reboot')
policy_rules = (("compute", "compute:reboot"),)
help_text = _("Restarted instances will lose any data"
" not saved in persistent storage.")
@staticmethod
def action_present(count):
return ungettext_lazy(
u"Hard Reboot Instance",
u"Hard Reboot Instances",
count
)
@staticmethod
def action_past(count):
return ungettext_lazy(
u"Hard Rebooted Instance",
u"Hard Rebooted Instances",
count
)
def allowed(self, request, instance=None):
if instance is not None:
return ((instance.status in ACTIVE_STATES
or instance.status == 'SHUTOFF')
and not is_deleting(instance))
else:
return True
def action(self, request, obj_id):
api.nova.server_reboot(request, obj_id, soft_reboot=False)
class SoftRebootInstance(RebootInstance):
name = "soft_reboot"
@staticmethod
def action_present(count):
return ungettext_lazy(
u"Soft Reboot Instance",
u"Soft Reboot Instances",
count
)
@staticmethod
def action_past(count):
return ungettext_lazy(
u"Soft Rebooted Instance",
u"Soft Rebooted Instances",
count
)
def action(self, request, obj_id):
api.nova.server_reboot(request, obj_id, soft_reboot=True)
class TogglePause(tables.BatchAction):
name = "pause"
icon = "pause"
@staticmethod
def action_present(count):
return (
ungettext_lazy(
u"Pause Instance",
u"Pause Instances",
count
),
ungettext_lazy(
u"Resume Instance",
u"Resume Instances",
count
),
)
@staticmethod
def action_past(count):
return (
ungettext_lazy(
u"Paused Instance",
u"Paused Instances",
count
),
ungettext_lazy(
u"Resumed Instance",
u"Resumed Instances",
count
),
)
def allowed(self, request, instance=None):
if not api.nova.extension_supported('AdminActions',
request):
return False
if not instance:
return False
self.paused = instance.status == "PAUSED"
if self.paused:
self.current_present_action = UNPAUSE
policy = (("compute", "compute_extension:admin_actions:unpause"),)
else:
self.current_present_action = PAUSE
policy = (("compute", "compute_extension:admin_actions:pause"),)
has_permission = True
policy_check = getattr(settings, "POLICY_CHECK_FUNCTION", None)
if policy_check:
has_permission = policy_check(
policy, request,
target={'project_id': getattr(instance, 'tenant_id', None)})
return (has_permission
and (instance.status in ACTIVE_STATES or self.paused)
and not is_deleting(instance))
def action(self, request, obj_id):
if self.paused:
api.nova.server_unpause(request, obj_id)
self.current_past_action = UNPAUSE
else:
api.nova.server_pause(request, obj_id)
self.current_past_action = PAUSE
class ToggleSuspend(tables.BatchAction):
name = "suspend"
classes = ("btn-suspend",)
@staticmethod
def action_present(count):
return (
ungettext_lazy(
u"Suspend Instance",
u"Suspend Instances",
count
),
ungettext_lazy(
u"Resume Instance",
u"Resume Instances",
count
),
)
@staticmethod
def action_past(count):
return (
ungettext_lazy(
u"Suspended Instance",
u"Suspended Instances",
count
),
ungettext_lazy(
u"Resumed Instance",
u"Resumed Instances",
count
),
)
def allowed(self, request, instance=None):
if not api.nova.extension_supported('AdminActions',
request):
return False
if not instance:
return False
self.suspended = instance.status == "SUSPENDED"
if self.suspended:
self.current_present_action = RESUME
policy = (("compute", "compute_extension:admin_actions:resume"),)
else:
self.current_present_action = SUSPEND
policy = (("compute", "compute_extension:admin_actions:suspend"),)
has_permission = True
policy_check = getattr(settings, "POLICY_CHECK_FUNCTION", None)
if policy_check:
has_permission = policy_check(
policy, request,
target={'project_id': getattr(instance, 'tenant_id', None)})
return (has_permission
and (instance.status in ACTIVE_STATES or self.suspended)
and not is_deleting(instance))
def action(self, request, obj_id):
if self.suspended:
api.nova.server_resume(request, obj_id)
self.current_past_action = RESUME
else:
api.nova.server_suspend(request, obj_id)
self.current_past_action = SUSPEND
class LaunchLink(tables.LinkAction):
name = "launch"
verbose_name = _("Launch Instance")
url = "horizon:project:instances:launch"
classes = ("ajax-modal", "btn-launch")
icon = "cloud-upload"
policy_rules = (("compute", "compute:create"),)
ajax = True
def __init__(self, attrs=None, **kwargs):
kwargs['preempt'] = True
super(LaunchLink, self).__init__(attrs, **kwargs)
def allowed(self, request, datum):
try:
limits = api.nova.tenant_absolute_limits(request, reserved=True)
instances_available = limits['maxTotalInstances'] \
- limits['totalInstancesUsed']
cores_available = limits['maxTotalCores'] \
- limits['totalCoresUsed']
ram_available = limits['maxTotalRAMSize'] - limits['totalRAMUsed']
if instances_available <= 0 or cores_available <= 0 \
or ram_available <= 0:
if "disabled" not in self.classes:
self.classes = [c for c in self.classes] + ['disabled']
self.verbose_name = string_concat(self.verbose_name, ' ',
_("(Quota exceeded)"))
else:
self.verbose_name = _("Launch Instance")
classes = [c for c in self.classes if c != "disabled"]
self.classes = classes
except Exception:
LOG.exception("Failed to retrieve quota information")
# If we can't get the quota information, leave it to the
# API to check when launching
return True # The action should always be displayed
def single(self, table, request, object_id=None):
self.allowed(request, None)
return HttpResponse(self.render())
class LaunchLinkNG(LaunchLink):
name = "launch-ng"
verbose_name = _("Launch Instance NG")
ajax = False
classes = ("btn-launch")
def __init__(self,
attrs={
"ng-controller": "LaunchInstanceModalCtrl",
"ng-click": "openLaunchInstanceWizard()"
},
**kwargs):
kwargs['preempt'] = True
super(LaunchLink, self).__init__(attrs, **kwargs)
def get_link_url(self, datum=None):
return "javascript:void(0);"
class EditInstance(policy.PolicyTargetMixin, tables.LinkAction):
name = "edit"
verbose_name = _("Edit Instance")
url = "horizon:project:instances:update"
classes = ("ajax-modal",)
icon = "pencil"
policy_rules = (("compute", "compute:update"),)
def get_link_url(self, project):
return self._get_link_url(project, 'instance_info')
def _get_link_url(self, project, step_slug):
base_url = urlresolvers.reverse(self.url, args=[project.id])
next_url = self.table.get_full_url()
params = {"step": step_slug,
update_instance.UpdateInstance.redirect_param_name: next_url}
param = urlencode(params)
return "?".join([base_url, param])
def allowed(self, request, instance):
return not is_deleting(instance)
class EditInstanceSecurityGroups(EditInstance):
name = "edit_secgroups"
verbose_name = _("Edit Security Groups")
def get_link_url(self, project):
return self._get_link_url(project, 'update_security_groups')
def allowed(self, request, instance=None):
return (instance.status in ACTIVE_STATES and
not is_deleting(instance) and
request.user.tenant_id == instance.tenant_id)
class CreateSnapshot(policy.PolicyTargetMixin, tables.LinkAction):
name = "snapshot"
verbose_name = _("Create Snapshot")
url = "horizon:project:images:snapshots:create"
classes = ("ajax-modal",)
icon = "camera"
policy_rules = (("compute", "compute:snapshot"),)
def allowed(self, request, instance=None):
return instance.status in SNAPSHOT_READY_STATES \
and not is_deleting(instance)
class ConsoleLink(policy.PolicyTargetMixin, tables.LinkAction):
name = "console"
verbose_name = _("Console")
url = "horizon:project:instances:detail"
classes = ("btn-console",)
policy_rules = (("compute", "compute_extension:consoles"),)
def allowed(self, request, instance=None):
# We check if ConsoleLink is allowed only if settings.CONSOLE_TYPE is
# not set at all, or if it's set to any value other than None or False.
return bool(getattr(settings, 'CONSOLE_TYPE', True)) and \
instance.status in ACTIVE_STATES and not is_deleting(instance)
def get_link_url(self, datum):
base_url = super(ConsoleLink, self).get_link_url(datum)
tab_query_string = tabs.ConsoleTab(
tabs.InstanceDetailTabs).get_query_string()
return "?".join([base_url, tab_query_string])
class LogLink(policy.PolicyTargetMixin, tables.LinkAction):
name = "log"
verbose_name = _("View Log")
url = "horizon:project:instances:detail"
classes = ("btn-log",)
policy_rules = (("compute", "compute_extension:console_output"),)
def allowed(self, request, instance=None):
return instance.status in ACTIVE_STATES and not is_deleting(instance)
def get_link_url(self, datum):
base_url = super(LogLink, self).get_link_url(datum)
tab_query_string = tabs.LogTab(
tabs.InstanceDetailTabs).get_query_string()
return "?".join([base_url, tab_query_string])
class ResizeLink(policy.PolicyTargetMixin, tables.LinkAction):
name = "resize"
verbose_name = _("Resize Instance")
url = "horizon:project:instances:resize"
classes = ("ajax-modal", "btn-resize")
policy_rules = (("compute", "compute:resize"),)
def get_link_url(self, project):
return self._get_link_url(project, 'flavor_choice')
def _get_link_url(self, project, step_slug):
base_url = urlresolvers.reverse(self.url, args=[project.id])
next_url = self.table.get_full_url()
params = {"step": step_slug,
resize_instance.ResizeInstance.redirect_param_name: next_url}
param = urlencode(params)
return "?".join([base_url, param])
def allowed(self, request, instance):
return ((instance.status in ACTIVE_STATES
or instance.status == 'SHUTOFF')
and not is_deleting(instance))
class ConfirmResize(policy.PolicyTargetMixin, tables.Action):
name = "confirm"
verbose_name = _("Confirm Resize/Migrate")
classes = ("btn-confirm", "btn-action-required")
policy_rules = (("compute", "compute:confirm_resize"),)
def allowed(self, request, instance):
return instance.status == 'VERIFY_RESIZE'
def single(self, table, request, instance):
api.nova.server_confirm_resize(request, instance)
class RevertResize(policy.PolicyTargetMixin, tables.Action):
name = "revert"
verbose_name = _("Revert Resize/Migrate")
classes = ("btn-revert", "btn-action-required")
policy_rules = (("compute", "compute:revert_resize"),)
def allowed(self, request, instance):
return instance.status == 'VERIFY_RESIZE'
def single(self, table, request, instance):
api.nova.server_revert_resize(request, instance)
class RebuildInstance(policy.PolicyTargetMixin, tables.LinkAction):
name = "rebuild"
verbose_name = _("Rebuild Instance")
classes = ("btn-rebuild", "ajax-modal")
url = "horizon:project:instances:rebuild"
policy_rules = (("compute", "compute:rebuild"),)
def allowed(self, request, instance):
return ((instance.status in ACTIVE_STATES
or instance.status == 'SHUTOFF')
and not is_deleting(instance))
def get_link_url(self, datum):
instance_id = self.table.get_object_id(datum)
return urlresolvers.reverse(self.url, args=[instance_id])
class DecryptInstancePassword(tables.LinkAction):
name = "decryptpassword"
verbose_name = _("Retrieve Password")
classes = ("btn-decrypt", "ajax-modal")
url = "horizon:project:instances:decryptpassword"
def allowed(self, request, instance):
enable = getattr(settings,
'OPENSTACK_ENABLE_PASSWORD_RETRIEVE',
False)
return (enable
and (instance.status in ACTIVE_STATES
or instance.status == 'SHUTOFF')
and not is_deleting(instance)
and get_keyname(instance) is not None)
def get_link_url(self, datum):
instance_id = self.table.get_object_id(datum)
keypair_name = get_keyname(datum)
return urlresolvers.reverse(self.url, args=[instance_id,
keypair_name])
class AssociateIP(policy.PolicyTargetMixin, tables.LinkAction):
name = "associate"
verbose_name = _("Associate Floating IP")
url = "horizon:project:access_and_security:floating_ips:associate"
classes = ("ajax-modal",)
icon = "link"
policy_rules = (("compute", "network:associate_floating_ip"),)
def allowed(self, request, instance):
if not api.network.floating_ip_supported(request):
return False
if api.network.floating_ip_simple_associate_supported(request):
return False
return not is_deleting(instance)
def get_link_url(self, datum):
base_url = urlresolvers.reverse(self.url)
next_url = self.table.get_full_url()
params = {
"instance_id": self.table.get_object_id(datum),
workflows.IPAssociationWorkflow.redirect_param_name: next_url}
params = urlencode(params)
return "?".join([base_url, params])
class SimpleAssociateIP(policy.PolicyTargetMixin, tables.Action):
name = "associate-simple"
verbose_name = _("Associate Floating IP")
icon = "link"
policy_rules = (("compute", "network:associate_floating_ip"),)
def allowed(self, request, instance):
if not api.network.floating_ip_simple_associate_supported(request):
return False
return not is_deleting(instance)
def single(self, table, request, instance_id):
try:
# target_id is port_id for Neutron and instance_id for Nova Network
# (Neutron API wrapper returns a 'portid_fixedip' string)
target_id = api.network.floating_ip_target_get_by_instance(
request, instance_id).split('_')[0]
fip = api.network.tenant_floating_ip_allocate(request)
api.network.floating_ip_associate(request, fip.id, target_id)
messages.success(request,
_("Successfully associated floating IP: %s")
% fip.ip)
except Exception:
exceptions.handle(request,
_("Unable to associate floating IP."))
return shortcuts.redirect(request.get_full_path())
class SimpleDisassociateIP(policy.PolicyTargetMixin, tables.Action):
name = "disassociate"
verbose_name = _("Disassociate Floating IP")
classes = ("btn-danger", "btn-disassociate",)
policy_rules = (("compute", "network:disassociate_floating_ip"),)
def allowed(self, request, instance):
if not api.network.floating_ip_supported(request):
return False
if not conf.HORIZON_CONFIG["simple_ip_management"]:
return False
return not is_deleting(instance)
def single(self, table, request, instance_id):
try:
# target_id is port_id for Neutron and instance_id for Nova Network
# (Neutron API wrapper returns a 'portid_fixedip' string)
targets = api.network.floating_ip_target_list_by_instance(
request, instance_id)
target_ids = [t.split('_')[0] for t in targets]
fips = [fip for fip in api.network.tenant_floating_ip_list(request)
if fip.port_id in target_ids]
# Removing multiple floating IPs at once doesn't work, so this pops
# off the first one.
if fips:
fip = fips.pop()
api.network.floating_ip_disassociate(request, fip.id)
messages.success(request,
_("Successfully disassociated "
"floating IP: %s") % fip.ip)
else:
messages.info(request, _("No floating IPs to disassociate."))
except Exception:
exceptions.handle(request,
_("Unable to disassociate floating IP."))
return shortcuts.redirect(request.get_full_path())
def instance_fault_to_friendly_message(instance):
fault = getattr(instance, 'fault', {})
message = fault.get('message', _("Unknown"))
default_message = _("Please try again later [Error: %s].") % message
fault_map = {
'NoValidHost': _("There is not enough capacity for this "
"flavor in the selected availability zone. "
"Try again later or select a different availability "
"zone.")
}
return fault_map.get(message, default_message)
def get_instance_error(instance):
if instance.status.lower() != 'error':
return None
message = instance_fault_to_friendly_message(instance)
preamble = _('Failed to perform requested operation on instance "%s", the '
'instance has an error status') % instance.name or instance.id
message = string_concat(preamble, ': ', message)
return message
class UpdateRow(tables.Row):
ajax = True
def get_data(self, request, instance_id):
instance = api.nova.server_get(request, instance_id)
try:
instance.full_flavor = api.nova.flavor_get(request,
instance.flavor["id"])
except Exception:
exceptions.handle(request,
_('Unable to retrieve flavor information '
'for instance "%s".') % instance_id,
ignore=True)
error = get_instance_error(instance)
if error:
messages.error(request, error)
return instance
class StartInstance(policy.PolicyTargetMixin, tables.BatchAction):
name = "start"
classes = ('btn-confirm',)
policy_rules = (("compute", "compute:start"),)
@staticmethod
def action_present(count):
return ungettext_lazy(
u"Start Instance",
u"Start Instances",
count
)
@staticmethod
def action_past(count):
return ungettext_lazy(
u"Started Instance",
u"Started Instances",
count
)
def allowed(self, request, instance):
return ((instance is None) or
(instance.status in ("SHUTDOWN", "SHUTOFF", "CRASHED")))
def action(self, request, obj_id):
api.nova.server_start(request, obj_id)
class StopInstance(policy.PolicyTargetMixin, tables.BatchAction):
name = "stop"
classes = ('btn-danger',)
policy_rules = (("compute", "compute:stop"),)
help_text = _("To power off a specific instance.")
@staticmethod
def action_present(count):
return npgettext_lazy(
"Action to perform (the instance is currently running)",
u"Shut Off Instance",
u"Shut Off Instances",
count
)
@staticmethod
def action_past(count):
return npgettext_lazy(
"Past action (the instance is currently already Shut Off)",
u"Shut Off Instance",
u"Shut Off Instances",
count
)
def allowed(self, request, instance):
return ((instance is None)
or ((get_power_state(instance) in ("RUNNING", "SUSPENDED"))
and not is_deleting(instance)))
def action(self, request, obj_id):
api.nova.server_stop(request, obj_id)
class LockInstance(policy.PolicyTargetMixin, tables.BatchAction):
name = "lock"
policy_rules = (("compute", "compute_extension:admin_actions:lock"),)
@staticmethod
def action_present(count):
return ungettext_lazy(
u"Lock Instance",
u"Lock Instances",
count
)
@staticmethod
def action_past(count):
return ungettext_lazy(
u"Locked Instance",
u"Locked Instances",
count
)
# TODO(akrivoka): When the lock status is added to nova, revisit this
# to only allow unlocked instances to be locked
def allowed(self, request, instance):
if not api.nova.extension_supported('AdminActions', request):
return False
return True
def action(self, request, obj_id):
api.nova.server_lock(request, obj_id)
class UnlockInstance(policy.PolicyTargetMixin, tables.BatchAction):
name = "unlock"
policy_rules = (("compute", "compute_extension:admin_actions:unlock"),)
@staticmethod
def action_present(count):
return ungettext_lazy(
u"Unlock Instance",
u"Unlock Instances",
count
)
@staticmethod
def action_past(count):
return ungettext_lazy(
u"Unlocked Instance",
u"Unlocked Instances",
count
)
# TODO(akrivoka): When the lock status is added to nova, revisit this
# to only allow locked instances to be unlocked
def allowed(self, request, instance):
if not api.nova.extension_supported('AdminActions', request):
return False
return True
def action(self, request, obj_id):
api.nova.server_unlock(request, obj_id)
def get_ips(instance):
template_name = 'project/instances/_instance_ips.html'
ip_groups = {}
for ip_group, addresses in instance.addresses.iteritems():
ip_groups[ip_group] = {}
ip_groups[ip_group]["floating"] = []
ip_groups[ip_group]["non_floating"] = []
for address in addresses:
if ('OS-EXT-IPS:type' in address and
address['OS-EXT-IPS:type'] == "floating"):
ip_groups[ip_group]["floating"].append(address)
else:
ip_groups[ip_group]["non_floating"].append(address)
context = {
"ip_groups": ip_groups,
}
return template.loader.render_to_string(template_name, context)
def get_size(instance):
if hasattr(instance, "full_flavor"):
template_name = 'project/instances/_instance_flavor.html'
size_ram = sizeformat.mb_float_format(instance.full_flavor.ram)
if instance.full_flavor.disk > 0:
size_disk = sizeformat.diskgbformat(instance.full_flavor.disk)
else:
size_disk = _("%s GB") % "0"
context = {
"name": instance.full_flavor.name,
"id": instance.id,
"size_disk": size_disk,
"size_ram": size_ram,
"vcpus": instance.full_flavor.vcpus,
"flavor_id": instance.full_flavor.id
}
return template.loader.render_to_string(template_name, context)
return _("Not available")
def get_keyname(instance):
if hasattr(instance, "key_name"):
keyname = instance.key_name
return keyname
return _("Not available")
def get_power_state(instance):
return POWER_STATES.get(getattr(instance, "OS-EXT-STS:power_state", 0), '')
STATUS_DISPLAY_CHOICES = (
("deleted", pgettext_lazy("Current status of an Instance", u"Deleted")),
("active", pgettext_lazy("Current status of an Instance", u"Active")),
("shutoff", pgettext_lazy("Current status of an Instance", u"Shutoff")),
("suspended", pgettext_lazy("Current status of an Instance",
u"Suspended")),
("paused", pgettext_lazy("Current status of an Instance", u"Paused")),
("error", pgettext_lazy("Current status of an Instance", u"Error")),
("resize", pgettext_lazy("Current status of an Instance",
u"Resize/Migrate")),
("verify_resize", pgettext_lazy("Current status of an Instance",
u"Confirm or Revert Resize/Migrate")),
("revert_resize", pgettext_lazy(
"Current status of an Instance", u"Revert Resize/Migrate")),
("reboot", pgettext_lazy("Current status of an Instance", u"Reboot")),
("hard_reboot", pgettext_lazy("Current status of an Instance",
u"Hard Reboot")),
("password", pgettext_lazy("Current status of an Instance", u"Password")),
("rebuild", pgettext_lazy("Current status of an Instance", u"Rebuild")),
("migrating", pgettext_lazy("Current status of an Instance",
u"Migrating")),
("build", pgettext_lazy("Current status of an Instance", u"Build")),
("rescue", pgettext_lazy("Current status of an Instance", u"Rescue")),
("deleted", pgettext_lazy("Current status of an Instance", u"Deleted")),
("soft_deleted", pgettext_lazy("Current status of an Instance",
u"Soft Deleted")),
("shelved", pgettext_lazy("Current status of an Instance", u"Shelved")),
("shelved_offloaded", pgettext_lazy("Current status of an Instance",
u"Shelved Offloaded")),
)
TASK_DISPLAY_NONE = pgettext_lazy("Task status of an Instance", u"None")
# Mapping of task states taken from Nova's nova/compute/task_states.py
TASK_DISPLAY_CHOICES = (
("scheduling", pgettext_lazy("Task status of an Instance",
u"Scheduling")),
("block_device_mapping", pgettext_lazy("Task status of an Instance",
u"Block Device Mapping")),
("networking", pgettext_lazy("Task status of an Instance",
u"Networking")),
("spawning", pgettext_lazy("Task status of an Instance", u"Spawning")),
("image_snapshot", pgettext_lazy("Task status of an Instance",
u"Snapshotting")),
("image_snapshot_pending", pgettext_lazy("Task status of an Instance",
u"Image Snapshot Pending")),
("image_pending_upload", pgettext_lazy("Task status of an Instance",
u"Image Pending Upload")),
("image_uploading", pgettext_lazy("Task status of an Instance",
u"Image Uploading")),
("image_backup", pgettext_lazy("Task status of an Instance",
u"Image Backup")),
("updating_password", pgettext_lazy("Task status of an Instance",
u"Updating Password")),
("resize_prep", pgettext_lazy("Task status of an Instance",
u"Preparing Resize or Migrate")),
("resize_migrating", pgettext_lazy("Task status of an Instance",
u"Resizing or Migrating")),
("resize_migrated", pgettext_lazy("Task status of an Instance",
u"Resized or Migrated")),
("resize_finish", pgettext_lazy("Task status of an Instance",
u"Finishing Resize or Migrate")),
("resize_reverting", pgettext_lazy("Task status of an Instance",
u"Reverting Resize or Migrate")),
("resize_confirming", pgettext_lazy("Task status of an Instance",
u"Confirming Resize or Migrate")),
("rebooting", pgettext_lazy("Task status of an Instance", u"Rebooting")),
("reboot_pending", pgettext_lazy("Task status of an Instance",
u"Reboot Pending")),
("reboot_started", pgettext_lazy("Task status of an Instance",
u"Reboot Started")),
("rebooting_hard", pgettext_lazy("Task status of an Instance",
u"Rebooting Hard")),
("reboot_pending_hard", pgettext_lazy("Task status of an Instance",
u"Reboot Pending Hard")),
("reboot_started_hard", pgettext_lazy("Task status of an Instance",
u"Reboot Started Hard")),
("pausing", pgettext_lazy("Task status of an Instance", u"Pausing")),
("unpausing", pgettext_lazy("Task status of an Instance", u"Resuming")),
("suspending", pgettext_lazy("Task status of an Instance",
u"Suspending")),
("resuming", pgettext_lazy("Task status of an Instance", u"Resuming")),
("powering-off", pgettext_lazy("Task status of an Instance",
u"Powering Off")),
("powering-on", pgettext_lazy("Task status of an Instance",
u"Powering On")),
("rescuing", pgettext_lazy("Task status of an Instance", u"Rescuing")),
("unrescuing", pgettext_lazy("Task status of an Instance",
u"Unrescuing")),
("rebuilding", pgettext_lazy("Task status of an Instance",
u"Rebuilding")),
("rebuild_block_device_mapping", pgettext_lazy(
"Task status of an Instance", u"Rebuild Block Device Mapping")),
("rebuild_spawning", pgettext_lazy("Task status of an Instance",
u"Rebuild Spawning")),
("migrating", pgettext_lazy("Task status of an Instance", u"Migrating")),
("deleting", pgettext_lazy("Task status of an Instance", u"Deleting")),
("soft-deleting", pgettext_lazy("Task status of an Instance",
u"Soft Deleting")),
("restoring", pgettext_lazy("Task status of an Instance", u"Restoring")),
("shelving", pgettext_lazy("Task status of an Instance", u"Shelving")),
("shelving_image_pending_upload", pgettext_lazy(
"Task status of an Instance", u"Shelving Image Pending Upload")),
("shelving_image_uploading", pgettext_lazy("Task status of an Instance",
u"Shelving Image Uploading")),
("shelving_offloading", pgettext_lazy("Task status of an Instance",
u"Shelving Offloading")),
("unshelving", pgettext_lazy("Task status of an Instance",
u"Unshelving")),
)
POWER_DISPLAY_CHOICES = (
("NO STATE", pgettext_lazy("Power state of an Instance", u"No State")),
("RUNNING", pgettext_lazy("Power state of an Instance", u"Running")),
("BLOCKED", pgettext_lazy("Power state of an Instance", u"Blocked")),
("PAUSED", pgettext_lazy("Power state of an Instance", u"Paused")),
("SHUTDOWN", pgettext_lazy("Power state of an Instance", u"Shut Down")),
("SHUTOFF", pgettext_lazy("Power state of an Instance", u"Shut Off")),
("CRASHED", pgettext_lazy("Power state of an Instance", u"Crashed")),
("SUSPENDED", pgettext_lazy("Power state of an Instance", u"Suspended")),
("FAILED", pgettext_lazy("Power state of an Instance", u"Failed")),
("BUILDING", pgettext_lazy("Power state of an Instance", u"Building")),
)
class InstancesFilterAction(tables.FilterAction):
filter_type = "server"
filter_choices = (('name', _("Instance Name"), True),
('status', _("Status ="), True),
('image', _("Image ID ="), True),
('flavor', _("Flavor ID ="), True))
class InstancesTable(tables.DataTable):
TASK_STATUS_CHOICES = (
(None, True),
("none", True)
)
STATUS_CHOICES = (
("active", True),
("shutoff", True),
("suspended", True),
("paused", True),
("error", False),
("rescue", True),
("shelved", True),
("shelved_offloaded", True),
)
name = tables.Column("name",
link="horizon:project:instances:detail",
verbose_name=_("Instance Name"))
image_name = tables.Column("image_name",
verbose_name=_("Image Name"))
ip = tables.Column(get_ips,
verbose_name=_("IP Address"),
attrs={'data-type': "ip"})
size = tables.Column(get_size,
verbose_name=_("Size"),
attrs={'data-type': 'size'})
keypair = tables.Column(get_keyname, verbose_name=_("Key Pair"))
status = tables.Column("status",
filters=(title, filters.replace_underscores),
verbose_name=_("Status"),
status=True,
status_choices=STATUS_CHOICES,
display_choices=STATUS_DISPLAY_CHOICES)
az = tables.Column("availability_zone",
verbose_name=_("Availability Zone"))
task = tables.Column("OS-EXT-STS:task_state",
verbose_name=_("Task"),
empty_value=TASK_DISPLAY_NONE,
status=True,
status_choices=TASK_STATUS_CHOICES,
display_choices=TASK_DISPLAY_CHOICES)
state = tables.Column(get_power_state,
filters=(title, filters.replace_underscores),
verbose_name=_("Power State"),
display_choices=POWER_DISPLAY_CHOICES)
created = tables.Column("created",
verbose_name=_("Time since created"),
filters=(filters.parse_isotime,
filters.timesince_sortable),
attrs={'data-type': 'timesince'})
class Meta(object):
name = "instances"
verbose_name = _("Instances")
status_columns = ["status", "task"]
row_class = UpdateRow
table_actions_menu = (StartInstance, StopInstance, SoftRebootInstance)
launch_actions = ()
if getattr(settings, 'LAUNCH_INSTANCE_LEGACY_ENABLED', True):
launch_actions = (LaunchLink,) + launch_actions
if getattr(settings, 'LAUNCH_INSTANCE_NG_ENABLED', False):
launch_actions = (LaunchLinkNG,) + launch_actions
table_actions = launch_actions + (TerminateInstance,
InstancesFilterAction)
row_actions = (StartInstance, ConfirmResize, RevertResize,
CreateSnapshot, SimpleAssociateIP, AssociateIP,
SimpleDisassociateIP, EditInstance,
DecryptInstancePassword, EditInstanceSecurityGroups,
ConsoleLink, LogLink, TogglePause, ToggleSuspend,
ResizeLink, LockInstance, UnlockInstance,
SoftRebootInstance, RebootInstance,
StopInstance, RebuildInstance, TerminateInstance)
| [
"root@localhost.localdomain"
] | root@localhost.localdomain |
bda6827f0e45a54787510cf4db41b02b4412e477 | 229e5783414a9f665530cec710bf89c411b2428a | /bitwise or.py | 3be9df19bec4253ee46643dbc5a14be4cad8619d | [] | no_license | rupinirudrappan/guvi_codekata | 1a1cc5befd577a9e84c0e672cd8e6b24200700f5 | bca68e0fa650db590ccfff25fad2de1a1fac9f73 | refs/heads/master | 2020-07-22T18:05:21.473193 | 2019-09-10T03:58:49 | 2019-09-10T03:58:49 | 207,284,186 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 261 | py | array=[]
result=0
try:
size=int(input())
array=list(map(int,input().split()))
if len(array)==size:
for i in array:
result=result|i
print(result)
else:
raise IndexError
except IndexError:
pass
except ValueError:
pass
| [
"noreply@github.com"
] | rupinirudrappan.noreply@github.com |
6234237d22d1e2a20cc1766714f825df98311bc5 | b0b21bd961031083ef2ff04e2c71648192e181bc | /snippets/popular_libraries/click-lib/intermediate-features/click_prompting.py | 821cd2007448e2464760e7ef0930ebb6d7949af7 | [
"Apache-2.0"
] | permissive | melvio/python3-examples | 3406b732b071fb8a23e296b9a2e5aab9e7a04deb | 5340fe17e0a5001a81cf195e63f825b77dc16fca | refs/heads/main | 2023-08-25T06:33:34.743019 | 2021-10-11T11:29:20 | 2021-10-11T11:29:20 | 396,391,616 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 104 | py | import click
user_input = click.prompt(text="Folder name", default="Download")
print(f"{user_input=}")
| [
"31448155+melvio@users.noreply.github.com"
] | 31448155+melvio@users.noreply.github.com |
a211b243c189461e1ce8c113460aca29ec888327 | c191afe7ebe9295d8c023d35d2d7a30f3f5a5c2d | /download_data.py | 6801710beb36429a6f1b11dd2c0779230510a43b | [] | no_license | idanbudin93/epitope-predictor | 008baf85993f5763db507cdf160504956f23ed97 | 7356a63f618464a7b632796ab84d82350c8c58d7 | refs/heads/master | 2022-12-27T00:36:17.031687 | 2020-10-05T20:17:16 | 2020-10-05T20:17:16 | 286,854,541 | 0 | 2 | null | 2020-09-24T23:07:47 | 2020-08-11T21:48:26 | Python | UTF-8 | Python | false | false | 1,221 | py | import os
import pathlib
import urllib
import urllib.request
import shutil
import random
import zipfile
TEMP_FILENAME = "tmp.zip"
def download_data(out_path, csv_filename, url, force=False):
""" downloads the data to the specified out_path """
dir_path = pathlib.Path(out_path)
dir_path.mkdir(exist_ok=True)
tmp_path = dir_path.joinpath(TEMP_FILENAME)
csv_path = dir_path.joinpath(csv_filename)
if csv_path.is_file() and not force:
print(f'csv file {str(csv_path)} exists, skipping download.')
else:
if tmp_path.is_file() and not force:
print(f'zip file {str(tmp_path)} exists, skipping download.')
else:
print(f'Downloading {url}...')
with urllib.request.urlopen(url) as response, open(str(tmp_path), 'wb') as out_file:
shutil.copyfileobj(response, out_file)
print(f'Saved to {str(tmp_path)}.')
print(f'Extracting zip {str(tmp_path)}.')
with zipfile.ZipFile(str(tmp_path), 'r') as zip_ref:
zipinfo = zip_ref.infolist()[0]
zipinfo.filename = str(csv_path)
zip_ref.extract(zipinfo)
tmp_path.unlink()
return csv_filename | [
"smadarlagazit@gmail.com"
] | smadarlagazit@gmail.com |
050bbf688cf7d218631a19ef68de581fb04dd53c | 839db939ac031a78d6a28de6372a00f96a97d1e6 | /code_2020/src/ssi6_train0616_parole3.py | 0f8bb30922b6f4fe8a02f26cf5ec016c62816d01 | [] | no_license | chaoyixue/Stage_Silent_Speech | 25770aa11db47a1507c2f47717a7d932e3f84751 | 36958a5f895feaccc92c488dfd89ff7736ecd052 | refs/heads/main | 2023-07-16T16:20:08.471874 | 2021-08-30T10:22:15 | 2021-08-30T10:22:15 | 343,715,397 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 14,537 | py | #import struct
import os
# os.environ["CUDA_DEVICE_ORDER"]="PCI_BUS_ID"
# os.environ["CUDA_VISIBLE_DEVICES"]="3"
from ssi11_input_data_new import *
import torch
import torch.nn as nn
import torch.optim as optim
from torch.autograd import Variable
from torchvision import transforms
import torch.nn.functional as F
from torch.utils.data import Dataset,DataLoader,TensorDataset
from sklearn.metrics import r2_score, mean_absolute_error
from sklearn import preprocessing
import matplotlib.pyplot as plt
import sys
import cv2
import numpy as np
# import librosa
# import librosa.display
import time
#from torch.utils.checkpoint import checkpoint
from pytorchtools import EarlyStopping
# from torchvision.models import AlexNet
# from torchviz import make_dot
# from torchvision import models
root="../out/"
BATCH_SIZE = 128
BASE_LR= 1e-5
NUM_EPOCH = 50
WEIGHT_DECAY=1e-7
MOMENTUM=0.9
PATIENCE=5
DROPOUT=0.2
plt.switch_backend('agg')
i=2
#数据输入input data----------
def match_image_label(image_data): #2D
l=image_data.shape[0]
image_match=[]
for m in range(l-i+1):
image_con=np.concatenate((image_data[m:m+i]),axis=-1) #(batch_size, 64, 64, 6)
image_match.append(image_con)
image_match = np.array(image_match)
return image_match
def SSIDatasets():
print('[INFO] -------------------------------------------------')
print('[INFO] set datasets')
train_lips, test_lips, train_tongue, test_tongue, train_label, test_label = load_dataset()
if i==1 or i==2:
pass
else:
train_label = train_label[:-i+2,:]
test_label = test_label[:-i+2,:]
#preprocessing
# train_lips = img_train_normalize(train_lips)
# test_lips = img_train_normalize(test_lips)
# train_tongue = img_train_normalize(train_tongue)
# test_tongue = img_train_normalize(test_tongue)
train_lips = match_image_label(train_lips)
train_tongue = match_image_label(train_tongue)
test_lips = match_image_label(test_lips)
test_tongue = match_image_label(test_tongue)
#to torch.tensor
train_lips = torch.from_numpy(train_lips).float()
test_lips = torch.from_numpy(test_lips).float()
train_tongue = torch.from_numpy(train_tongue).float()
test_tongue = torch.from_numpy(test_tongue).float()
train_label = torch.from_numpy(train_label).float()
test_label = torch.from_numpy(test_label).float()
#change dimension match: (x,64,64,6) --> (x,6,64,64)
train_lips = train_lips.permute(0,3,1,2)
test_lips = test_lips.permute(0,3,1,2)
train_tongue = train_tongue.permute(0,3,1,2)
test_tongue = test_tongue.permute(0,3,1,2)
# #change dimension match2 (x,1,64,64,2) --> (x,1,2,64,64)
# train_lips = train_lips.permute(0,1,4,2,3).cuda()
# test_lips = test_lips.permute(0,1,4,2,3).cuda()
# train_tongue = train_tongue.permute(0,1,4,2,3).cuda()
# test_tongue = test_tongue.permute(0,1,4,2,3).cuda()
#set datasets and dataloader
train_datasets = TensorDataset(train_lips, train_tongue, train_label)
train_loader = DataLoader(dataset=train_datasets, batch_size=BATCH_SIZE, shuffle=True)
eval_datasets = TensorDataset(test_lips, test_tongue, test_label)
eval_loader = DataLoader(dataset=eval_datasets, batch_size=BATCH_SIZE, shuffle=True)
test_datasets = TensorDataset(test_lips, test_tongue, test_label)
test_loader = DataLoader(dataset=test_datasets, batch_size=BATCH_SIZE, shuffle=False)
# print(len(train_loader)) #100
# print(len(train_loader.dataset)) #10000
return train_datasets, train_loader, eval_datasets, eval_loader, test_datasets, test_loader
#output console information-----------
# class Logger(object):
# def __init__(self,fileN='Default.log'):
# self.terminal=sys.stdout
# self.log=open(fileN,'w')
# def write(self,message):
# self.terminal.write(message)
# self.log.write(message)
# self.flush()
# def flush(self):
# self.log.flush()
# sys.stdout = Logger(root+'console information.txt')
#调整lr,adjust lr-----------
def adjust_lr(optimizer,epoch):
if (epoch+1)%10==0:
for param_group in optimizer.param_groups:
param_group['lr']=param_group['lr']*0.1 #每10个epoch lr*0.1
#cnn model-----------
class CNN(nn.Module):
def __init__(self):
super(CNN, self).__init__()
self.conv1 = nn.Sequential(
nn.Conv2d(i, 16, kernel_size=3, padding=1, bias=True),#(2*64*64) 若在卷积后加bn,最好bias=False
nn.ReLU(),
nn.Conv2d(16, 16, kernel_size=3, padding=1, bias=True),
nn.ReLU(),
nn.MaxPool2d(2), #(16*32*32)
nn.BatchNorm2d(16),
nn.Dropout(DROPOUT))
self.conv2 = nn.Sequential(
nn.Conv2d(16, 32, kernel_size=3, padding=1,bias=False),
nn.ReLU(),
nn.Conv2d(32, 32, kernel_size=3, padding=1,bias=False),
nn.ReLU(),
nn.MaxPool2d(2), # (32*16*16)
nn.BatchNorm2d(32),
nn.Dropout(DROPOUT))
self.conv3 = nn.Sequential(
nn.Conv2d(32, 64, kernel_size=3, padding=1,bias=False),
nn.ReLU(),
nn.Conv2d(64, 64, kernel_size=3, padding=1,bias=False),
nn.ReLU(),
nn.MaxPool2d(2), # (32*16*16)
nn.BatchNorm2d(64),
nn.Dropout(DROPOUT))
self.conv4 = nn.Sequential(
nn.Conv2d(i, 16, kernel_size=3, padding=1, bias=True),
nn.ReLU(),
nn.Conv2d(16, 16, kernel_size=3, padding=1, bias=True),
nn.ReLU(),
nn.MaxPool2d(2), #(16*32*32)
nn.BatchNorm2d(16),
nn.Dropout(DROPOUT))
self.conv5 = nn.Sequential(
nn.Conv2d(16, 32, kernel_size=3, padding=1,bias=False),
nn.ReLU(),
nn.Conv2d(32, 32, kernel_size=3, padding=1,bias=False),
nn.ReLU(),
nn.MaxPool2d(2), # (32*16*16)
nn.BatchNorm2d(32),
nn.Dropout(DROPOUT))
self.conv6 = nn.Sequential(
nn.Conv2d(32, 64, kernel_size=3, padding=1,bias=False),
nn.ReLU(),
nn.Conv2d(64, 64, kernel_size=3, padding=1,bias=False),
nn.ReLU(),
nn.MaxPool2d(2), # (32*16*16)
nn.BatchNorm2d(64),
nn.Dropout(DROPOUT))
self.dense1 = nn.Sequential(
nn.Linear(8192, 1024), # 卷积核3*3*16, 64-3+1=62, 输出62*62*16
nn.ReLU(),
nn.Dropout(DROPOUT))
self.dense2 = nn.Sequential(
nn.Linear(1024, 128), # 卷积核3*3*16, 64-3+1=62, 输出62*62*16
nn.ReLU())
# nn.Dropout(0.2))
# self.dense3 = nn.Sequential(
# nn.Linear(256, 64),
# nn.LeakyReLU())
# # nn.Sigmoid())# 旧,jyr
# # nn.Softmax()#分类
def forward(self, lips, tongue):
out1 = self.conv1(lips)
out1 = self.conv2(out1)
out1 = self.conv3(out1)
out1 = out1.view(out1.size(0),-1)
out2 = self.conv4(tongue)
out2 = self.conv5(out2)
out2 = self.conv6(out2)
out2 = out2.view(out2.size(0),-1)
out = torch.cat((out1, out2),dim=1)
out = self.dense1(out)
out = self.dense2(out)
# out = self.dense3(out)
return out
model = CNN()
model.cuda()
print('[INFO] cnn model ---------------------------------------')
print(model)
# inputs = torch.randn(6,2,64,64)
# # g=make_dot(model(lips,tongue))
# g=make_dot(model(inputs), params=dict(model.named_parameters()))
# g.render(root+'cnn_model', view=False)
#优化和损失函数optimizer and loss function----------
# optimizer = optim.SGD(model.parameters(), lr=BASE_LR, momentum=MOMENTUM, weight_decay=WEIGHT_DECAY) #随机梯度下降
optimizer = optim.Adam(model.parameters(), lr=BASE_LR, betas=(0.9, 0.999),eps=1e-08, weight_decay=WEIGHT_DECAY) # wd正则化
loss_func = nn.MSELoss() #默认reduce=true返回标量,size_average=true返回loss.mean
# loss_func = nn.BCEWithLogitsLoss()
# # multiple optim
# optimizer = optim.Adam(model.parameters(), lr=BASE_LR, weight_decay=WEIGHT_DECAY) # wd正则化
# optimizer = optim.SGD(model.parameters(), lr=BASE_LR, momentum=MOMENTUM, weight_decay=WEIGHT_DECAY) #随机梯度下降
# optimizer = [optimizer_Adam, optimizer_SGD]
# loss_func = nn.MSELoss()
# losses_history = [[],[]]
def main():
#训练test_train-----------
# print('[INFO] start training ')
# train_losses, eval_losses, eval_r2s=[], [], []
train_losses, eval_losses=[], []
early_stopping=EarlyStopping(patience=PATIENCE,verbose=True)
for epoch in range(NUM_EPOCH):
print('[INFO] start training ')
model.train() #启用batchnormalization和dropout
train_loss=0.0
#step_loss=0.0
for step, (train_lips, train_tongue, train_label) in enumerate(train_loader):
train_lips, train_tongue, train_label = Variable(train_lips).cuda(), Variable(train_tongue).cuda(), Variable(train_label).cuda()
optimizer.zero_grad() #梯度值初始化为0
output = model(train_lips, train_tongue)
loss = loss_func(output,train_label)
loss.backward() #反向传播
optimizer.step() #更新参数
train_loss += float(loss.item()*train_lips.size(0))
# print('Epoch:[%d/%d], Step:[%d/%d], Step loss: %.4f' % (epoch + 1, NUM_EPOCH, step + 1, len(train_datasets) // BATCH_SIZE, loss.item()))
if step%100==99:
print('Epoch:[%d/%d], Step:[%d/%d], Step loss: %.4f' % (epoch + 1, NUM_EPOCH, step + 1, len(train_datasets) // BATCH_SIZE, loss.item()))
#print('Epoch:[%d/%d], Step:[%d/%d], Average step loss:%.4f' % (epoch + 1, NUM_EPOCH, step + 1, len(train_datasets) // BATCH_SIZE, step_loss/50))
train_losses.append(train_loss/len(train_datasets))
print('=====> Epoch:',epoch+1, ' | Average epoch train loss: %.4f' % (train_loss/len(train_datasets)))
adjust_lr(optimizer,epoch)
#eval-----------
print('[INFO] start evaluation')
model.eval() #不启用batchnormalization和dropout
with torch.no_grad():
# eval_loss,eval_r2 = 0.0, 0.0
eval_loss=0.0
for step,(test_lips, test_tongue, test_label) in enumerate(eval_loader):
test_lips, test_tongue, test_label = Variable(test_lips).cuda(), Variable(test_tongue).cuda(), Variable(test_label).cuda()
output = model(test_lips,test_tongue)
loss = loss_func(output,test_label)
eval_loss += float(loss.item()*test_lips.size(0))
eval_losses.append(eval_loss/len(eval_datasets))
print('=====> Epoch:',epoch+1, ' | Average epoch eval loss: %.4f ' % (eval_loss/len(eval_datasets)))
#print('=====> Epoch:',epoch+1, ' | Average epoch test loss:%.4f ' % (eval_loss/len(test_datasets)), '| average r2 :%.4f ' % (eval_r2/len(test_datasets)))
print('[INFO] evaluation complete')
# early_stopping(train_loss/len(train_datasets),model)
early_stopping(eval_loss/len(test_datasets),model)
if early_stopping.early_stop:
print('[INFO] early stop')
break
return train_losses, eval_losses
def test_model():
model.load_state_dict(torch.load(root+'checkpoint.pt'))
print('[INFO] start testing, output predict')
model.eval() #不启用batchnormalization和dropout
test_loss=0.0
# mae, test_mae=0.0, 0.0
for step,(test_lips, test_tongue, test_label) in enumerate(test_loader):
test_lips, test_tongue, test_label = Variable(test_lips).cuda(), Variable(test_tongue).cuda(), Variable(test_label).cuda()
output = model(test_lips, test_tongue)
loss = loss_func(output,test_label)
test_loss += float(loss.item()*test_lips.size(0))
# mae = mean_absolute_error(test_label.cpu().detach().numpy(),output.cpu().detach().numpy())
# test_mae += float(mae*test_lips.size(0))
if step==0:
# prediction=output.view(-1,128)
prediction=output
else:
prediction=torch.cat((prediction,output),0) #按行竖着接
# prediction=torch.cat((prediction,output.view(-1,128)),0) #按行竖着接
print('=====> Average loss: %.4f ' % (test_loss/len(test_datasets)))
# print('=====> Average loss: %.4f ' % (test_loss/len(test_datasets)), ' | Test mean absolute error: %.4f ' % (test_mae/len(test_datasets)))
print('[INFO] test complete')
return prediction
if __name__ == "__main__":
start=time.perf_counter()
train_datasets, train_loader, eval_datasets, eval_loader, test_datasets, test_loader = SSIDatasets()
train_losses, eval_losses = main()
# prediction = test_model()
print('[INFO] save train result picture')
fig=plt.figure(figsize=(10,8))
plt.plot(train_losses,color='red')
plt.plot(eval_losses,color='blue')
minloss=eval_losses.index(min(eval_losses))
plt.axvline(minloss,linestyle='--',color='green')
plt.legend(['Train Loss','Eval Loss'],loc='upper right')
plt.title('epoch loss')
plt.xlabel('epoch')
plt.ylabel('loss')
plt.grid(True)
plt.savefig(root+"epoch_loss.png")
fig2=plt.figure(figsize=(10,8))
plt.plot(eval_losses,color='green')
plt.legend(['Eval loss'],loc='upper right')
plt.xlabel('epoch')
plt.ylabel('Eval loss')
plt.grid(True)
plt.savefig(root+"eval_loss.png")
np.save(root+"train_losses.npy", np.array(train_losses))
np.save(root+"eval_losses.npy", np.array(eval_losses))
# print('[INFO] save model parameters')
# torch.save(model.state_dict(),root+'test_model_ssi.pth') #只保存参数,不保存模型
# print('[INFO] training complete')
# #保存模型save model:
# print('[INFO] save model')
# torch.save(model,'model_ssi.pth')
# print('training complete')
# print('[INFO] save test output')
# spec = prediction.cpu().detach().numpy()
# # spec = min_max_scaler2.inverse_transform(spec)
# np.save(root+"test_predict.npy", spec)
# print('[INFO] finished')
end=time.perf_counter()
print('[INFO] running time: %.4s seconds' %(end-start)) | [
"chaoyi.xue@gmail.com"
] | chaoyi.xue@gmail.com |
b5787674ba00b1b8c5c9c14c74abd19e72a89b73 | ac0ef4aecb6053020b7af6c756b39446bbb19a91 | /setup.py | ac54f10d8c34fae90939c9bf50227239c6ad5ba3 | [] | no_license | nolanbconaway/rpi-camera | 2ccce96f20d5483931d38f8793394341f98d7ba1 | f721e44da93918a828afaf425871b3094b651218 | refs/heads/master | 2023-03-26T04:30:50.095865 | 2021-03-27T13:51:32 | 2021-03-27T13:51:32 | 316,606,316 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 405 | py | import setuptools
setuptools.setup(
name="camera",
version="0.0.1",
packages=["camera"],
python_requires=">=3.7",
install_requires=[
"picamera==1.13",
"flask==1.1.2",
"pytz==2020.4",
"gevent==20.9.0",
],
package_data={
"camera": ["html/*.html"],
},
extras_require={"cv": ["opencv-python-headless==4.4.0.46", "tqdm==4.54.0"]},
)
| [
"nolanbconaway@gmail.com"
] | nolanbconaway@gmail.com |
83fa096992b60ee9f25862dd01b9c52b2c6c1ea5 | 487ce91881032c1de16e35ed8bc187d6034205f7 | /codes/CodeJamCrawler/16_0_3_neat/16_0_3_JawBone_main.py | 9103e18ea816c4880314a942a1d0134a68bf0711 | [] | no_license | DaHuO/Supergraph | 9cd26d8c5a081803015d93cf5f2674009e92ef7e | c88059dc66297af577ad2b8afa4e0ac0ad622915 | refs/heads/master | 2021-06-14T16:07:52.405091 | 2016-08-21T13:39:13 | 2016-08-21T13:39:13 | 49,829,508 | 2 | 0 | null | 2021-03-19T21:55:46 | 2016-01-17T18:23:00 | Python | UTF-8 | Python | false | false | 1,228 | py | def cpaf(rn):
for divisor in xrange(2, 100):
if not rn % divisor:
return (False, divisor)
return (True, 1)
def baseconverter(rn, basefrom):
digits = "0123456789"
result = ""
while True:
remains = rn % basefrom
result = digits[remains] + result
rn = rn / basefrom
if rn == 0:
break
return result
lines = raw_input()
for question_index in xrange(1, int(lines) + 1):
length_of_jamcoin, types_of_jamcoin = [int(s) for s in raw_input().split(" ")]
answer_list = []
count = 0
for index in xrange(1, pow(2, length_of_jamcoin)):
inside = baseconverter(index, 2)
if len(str(inside)) < length_of_jamcoin - 1:
result = str(inside).zfill(length_of_jamcoin - 2)
temp_testcase = '1' + result + '1'
answers = temp_testcase
for i in xrange(2, 11):
temp = cpaf(int(temp_testcase, i))
if not temp[0]:
answers += ' ' + str(temp[1])
if answers.count(' ') >= 9:
answer_list.append(answers)
if len(answer_list) >= types_of_jamcoin:
break
print 'Case #1:'
for ans in answer_list:
print ans | [
"[dhuo@tcd.ie]"
] | [dhuo@tcd.ie] |
2ddd55605050c3b2aa54d92f04a82d1be4921a8f | 727f1bc2205c88577b419cf0036c029b8c6f7766 | /out-bin/py/google/fhir/models/run_locally.runfiles/com_google_fhir/external/pypi__tensorflow_1_12_0/tensorflow-1.12.0.data/purelib/tensorflow/python/layers/layers.py | 5b4a0b637a1140402fc92bb72547a407526c443b | [
"Apache-2.0"
] | permissive | rasalt/fhir | 55cf78feed3596a3101b86f9e9bbf6652c6ed4ad | d49883cc4d4986e11ca66058d5a327691e6e048a | refs/heads/master | 2020-04-13T00:16:54.050913 | 2019-01-15T14:22:15 | 2019-01-15T14:22:15 | 160,260,223 | 0 | 0 | Apache-2.0 | 2018-12-03T22:07:01 | 2018-12-03T22:07:01 | null | UTF-8 | Python | false | false | 175 | py | /home/rkharwar/.cache/bazel/_bazel_rkharwar/c4bcd65252c8f8250f091ba96375f9a5/external/pypi__tensorflow_1_12_0/tensorflow-1.12.0.data/purelib/tensorflow/python/layers/layers.py | [
"ruchika.kharwar@gmail.com"
] | ruchika.kharwar@gmail.com |
1bfbb57c71c850399d3c6919018d085525a33a0d | 9b313b1a4d36bdc3883479f58f7f38d681b009b0 | /helpers.py | 8ab66a9de874ad92875accd06e2c672d451c22c0 | [
"MIT"
] | permissive | jgliss/aerocom_obs_props | 6fd0de913bff3da97c32977ed341d7061e461658 | 8c2824b2cdf558a74edc0f180a653e86faf9a442 | refs/heads/master | 2020-03-25T17:26:24.842939 | 2018-09-20T11:58:19 | 2018-09-20T11:58:19 | 143,977,644 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 20,384 | py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Tue Aug 7 08:19:06 2018
@author: jonasg
"""
import os
import numpy as np
import pyaerocom as pya
from functools import reduce
import pandas as pd
import matplotlib.pyplot as plt
class AnalysisSetup(pya._lowlevel_helpers.BrowseDict):
"""Setup class for model / obs intercomparison
An instance of this setup class can be used to run a collocation analysis
between a model and an observation network and will create a number of
:class:`pya.CollocatedData` instances and save them as netCDF file.
Note
----
This is a very first draft and may change
Attributes
----------
vars_to_analyse : list
variables to be analysed (should be available in model and obs data)
"""
def __init__(self, vars_to_analyse=None, model_id=None, obs_id=None,
years=None, filter_name='WORLD-noMOUNTAINS',
ts_type_setup=None, out_basedir=None, **kwargs):
self.vars_to_analyse = vars_to_analyse
self.model_id = model_id
self.obs_id = obs_id
self.filter_name = filter_name
if not isinstance(ts_type_setup, _TS_TYPESetup):
ts_type_setup = _TS_TYPESetup(**ts_type_setup)
self.ts_type_setup = ts_type_setup
self.years = years
self.out_basedir = out_basedir
self.update(**kwargs)
def get_all_vars(OBS_INFO_DICT):
all_vars = []
for obs_id, variables in OBS_INFO_DICT.items():
for variable in list(variables):
if not variable in all_vars:
all_vars.append(variable)
return all_vars
class _TS_TYPESetup(pya._lowlevel_helpers.BrowseDict):
def __init__(self, *args, **kwargs):
self.read_alt = {}
super(_TS_TYPESetup, self).__init__(*args, **kwargs)
def __str__(self):
s ='ts_type settings (<read>: <analyse>)\n'
for key, val in self.items():
if key == 'read_alt':
continue
s+=' {}:{}\n'.format(key, val)
if self['read_alt']:
s+=' Alternative ts_types (read)\n'
for key, val in self['read_alt'].items():
s+=' {}:{}\n'.format(key, val)
def check_prepare_dirs(basedir, model_id):
def chk_make_dir(base, name):
d = os.path.join(base, name)
if not os.path.exists(d):
os.mkdir(d)
return d
dirs = {}
if not os.path.exists(basedir):
basedir = pya.const.OUT_BASEDIR
out_dir = chk_make_dir(basedir, model_id)
dirs['data'] = chk_make_dir(out_dir, 'data')
dirs['scatter_plots'] = chk_make_dir(out_dir, 'scatter_plots')
return dirs
def prepare_ts_types(model_reader, ts_type_setup):
ts_type_read = list(ts_type_setup.keys())
ts_type_matches = list(np.intersect1d(ts_type_read, model_reader.ts_types))
if 'read_alt' in ts_type_setup:
ts_type_read_alt = ts_type_setup.read_alt
for ts_type, ts_types_alt in ts_type_read_alt.items():
if not ts_type in ts_type_matches:
for ts_type_alt in ts_types_alt:
if ts_type_alt in model_reader.ts_types:
ts_type_matches.append(ts_type_alt)
ts_type_setup[ts_type_alt] = ts_type_setup[ts_type]
break
return (ts_type_matches, ts_type_setup)
def start_stop_from_year(year):
start = pya.helpers.to_pandas_timestamp(year)
stop = pya.helpers.to_pandas_timestamp('{}-12-31 23:59:59'.format(year))
return (start, stop)
def colldata_save_name(model_data, model_id, obs_id, ts_type_ana, filter_name,
start=None, stop=None):
if start is None:
start = model_data.start_time
else:
start = pya.helpers.to_pandas_timestamp(start)
if stop is None:
stop = model_data.stop_time
else:
stop = pya.helpers.to_pandas_timestamp(stop)
start_str = pya.helpers.to_datestring_YYYYMMDD(start)
stop_str = pya.helpers.to_datestring_YYYYMMDD(stop)
ts_type_src = model_data.ts_type
coll_data_name = pya.CollocatedData._aerocom_savename(model_data.var_name,
obs_id,
model_id,
ts_type_src,
start_str,
stop_str,
ts_type_ana,
filter_name)
return coll_data_name + '.nc'
def check_colldata_exists(data_dir, colldata_save_name):
files = os.listdir(data_dir)
if colldata_save_name in files:
return True
return False
def get_file_list(result_dir, models, verbose=True):
all_files = []
for item in os.listdir(result_dir):
if item in models:
data_dir = os.path.join(result_dir, item, 'data/')
files = os.listdir(data_dir)
if len(files) > 0:
if verbose:
print('Importing {} result files from model {}'
.format(len(files), item))
for f in files:
if f.endswith('COLL.nc'):
all_files.append(os.path.join(data_dir, f))
return all_files
def load_result_files(file_list, verbose=True):
results = []
data = pya.CollocatedData()
try:
from ipywidgets import FloatProgress
from IPython.display import display
max_count = len(file_list)
f = FloatProgress(min=0, max=max_count) # instantiate the bar
display(f) # display the bar
except Exception as e:
print('Failed to instantiate progress bar: {}'.format(repr(e)))
for file in file_list:
info = data.get_meta_from_filename(file)
info['model_id'] = info['data_source'][1]
info['obs_id'] = info['data_source'][0]
info['year'] = info['start'].year
info['data'] = data.read_netcdf(file).to_dataframe()
obs = info['data']['ref'].values
model = info['data']['data'].values
stats = pya.mathutils.calc_statistics(model, obs)
info.update(stats)
results.append(info)
if f is not None:
f.value += 1
return results
def results_to_dataframe(results):
"""Based on output of :func:`get_result_info`"""
header = ['Model', 'Year', 'Variable', 'Obs', 'Freq', 'FreqSRC',
'Bias', 'RMS', 'R', 'FGE']
data = []
for info in results:
file_data = [info['model_id'],
info['year'],
info['var_name'],
info['obs_id'],
info['ts_type'],
info['ts_type_src'],
info['nmb'],
info['rms'],
info['R'],
info['fge']]
data.append(file_data)
df = pd.DataFrame(data, columns=header)
df.set_index(['Model', 'Year', 'Variable', 'Obs'], inplace=True)
df.sort_index(inplace=True)
return df
def slice_dataframe(df, values, levels):
"""Crop a selection from a MultiIndex Dataframe
Parameters
----------
df : DataFrame
"""
names = df.index.names
num_indices = len(names)
if num_indices == 1:
# no Multiindex
return df.loc[values, :]
else:
# Multiindex
if levels is None:
print("Input levels not defined for MultiIndex, assuming 0")
levels = [0]
elif isinstance(levels, str): #not a list
levels, values = [levels], [values]
else: #not a string and not None, so either a list or a number (can be checked using iter())
try:
iter(levels)
except:
#input is single level / value pair
levels, values = [levels], [values]
if isinstance(levels[0], str):
level_nums = [names.index(x) for x in levels]
else:
level_nums = levels
indexer = []
for idx in range(len(names)):
if idx in level_nums:
pos = level_nums.index(idx)
indexer.append(values[pos])
else:
indexer.append(slice(None))
df = df.loc[tuple(indexer), :]
for i, level in enumerate(levels):
if len(values[i]) == 1:
df.index = df.index.droplevel(level)
df.sort_index(inplace=True)
return df
def perform_analysis(vars_to_analyse, model_id, obs_id, years, filter_name,
ts_type_setup, out_basedir=None, logfile=None,
reanalyse_existing=False):
plt.ioff()
try:
pya.io.ReadUngridded(obs_id)
_run_gridded_ungridded(vars_to_analyse, model_id, obs_id, years, filter_name,
ts_type_setup, out_basedir, logfile,
reanalyse_existing)
except pya.exceptions.NetworkNotSupported:
_run_gridded_gridded(vars_to_analyse, model_id, obs_id, years,
filter_name, ts_type_setup, out_basedir, logfile,
reanalyse_existing)
plt.ion()
def _run_gridded_ungridded(vars_to_analyse, model_id, obs_id, years, filter_name,
ts_type_setup, out_basedir=None, logfile=None,
reanalyse_existing=False):
# all temporal resolutions that are supposed to be read
dirs = check_prepare_dirs(out_basedir, model_id)
obs_reader = pya.io.ReadUngridded()
obs_data = obs_reader.read(obs_id, vars_to_analyse)
ts_types = pya.const.GRID_IO.TS_TYPES
model_reader = pya.io.ReadGridded(model_id)
var_matches = list(reduce(np.intersect1d, (vars_to_analyse,
model_reader.vars_provided,
obs_data.contains_vars)))
if len(var_matches) == 0:
raise pya.exceptions.DataCoverageError('No variable matches between '
'{} and {} for input vars: {}'
.format(model_id, obs_id,
vars_to_analyse))
year_matches = list(np.intersect1d(years, model_reader.years))
if len(year_matches) == 0:
raise pya.exceptions.DataCoverageError('No year matches between '
'{} and {} for input vars: {}'
.format(model_id, obs_id,
vars_to_analyse))
ts_type_matches, ts_type_setup = prepare_ts_types(model_reader,
ts_type_setup)
if len(ts_type_matches) == 0:
raise pya.exceptions.DataCoverageError('No ts_type matches between '
'{} and {} for input vars: {}'
.format(model_id, obs_id,
vars_to_analyse))
for year in year_matches:
start, stop = start_stop_from_year(year)
for ts_type in ts_type_matches:
ts_types_ana = ts_type_setup[ts_type]
model_reader.read(var_matches,
start_time=year,
ts_type=ts_type,
flex_ts_type=False)
if len(model_reader.data) == 0:
if logfile:
logfile.write('No model data available ({}, {})\n'.format(year,
ts_type))
continue
for var, model_data in model_reader.data.items():
if not var in obs_reader.data:
if logfile:
logfile.write('No obs data available ({}, {})\n'.format(year,
ts_type))
continue
for ts_type_ana in ts_types_ana:
if ts_types.index(ts_type_ana) >= ts_types.index(ts_type):
out_dir = dirs['data']
savename = colldata_save_name(model_data,
model_id,
obs_id,
ts_type_ana,
filter_name,
start,
stop)
file_exists = check_colldata_exists(out_dir,
savename)
if file_exists:
if not reanalyse_existing:
if logfile:
logfile.write('SKIP: {}\n'.format(savename))
continue
else:
os.remove(os.path.join(out_dir, savename))
data_coll = pya.collocation.collocate_gridded_ungridded_2D(
model_data, obs_data,
ts_type=ts_type_ana,
start=start, stop=stop,
filter_name=filter_name)
data_coll.to_netcdf(out_dir)
save_name_fig = data_coll.save_name_aerocom + '_SCAT.png'
if logfile:
logfile.write('WRITE: {}\n'.format(savename))
data_coll.plot_scatter(savefig=True,
save_dir=dirs['scatter_plots'],
save_name=save_name_fig)
plt.close('all')
def _run_gridded_gridded(vars_to_analyse, model_id, obs_id, years, filter_name,
ts_type_setup, out_basedir=None, logfile=None,
reanalyse_existing=False):
# all temporal resolutions that are supposed to be read
dirs = check_prepare_dirs(out_basedir, model_id)
ts_types = pya.const.GRID_IO.TS_TYPES
model_reader = pya.io.ReadGridded(model_id)
obs_reader = pya.io.ReadGridded(obs_id)
var_matches = list(reduce(np.intersect1d, (vars_to_analyse,
model_reader.vars_provided,
obs_reader.vars)))
if len(var_matches) == 0:
raise pya.exceptions.DataCoverageError('No variable matches between '
'{} and {} for input vars: {}'
.format(model_id, obs_id,
vars_to_analyse))
year_matches = list(reduce(np.intersect1d, (years,
model_reader.years,
obs_reader.years)))
if len(year_matches) == 0:
raise pya.exceptions.DataCoverageError('No year matches between '
'{} and {} for input vars: {}'
.format(model_id, obs_id,
vars_to_analyse))
ts_type_matches, ts_type_setup = prepare_ts_types(model_reader,
ts_type_setup)
if len(ts_type_matches) == 0:
raise pya.exceptions.DataCoverageError('No ts_type matches between '
'{} and {} for input vars: {}'
.format(model_id, obs_id,
vars_to_analyse))
for year in year_matches:
start, stop = start_stop_from_year(year)
for ts_type in ts_type_matches:
ts_types_ana = ts_type_setup[ts_type]
# reads only year if starttime is provided but not stop time
model_reader.read(var_matches,
start_time=year,
ts_type=ts_type,
flex_ts_type=False)
obs_reader.read(var_matches, start_time=year,
ts_type = ts_type,
flex_ts_type=True)
if len(model_reader.data) == 0:
if logfile:
logfile.write('No model data available ({}, {})\n'.format(year,
ts_type))
continue
for var, model_data in model_reader.data.items():
if not var in obs_reader.data:
if logfile:
logfile.write('No obs data available ({}, {})\n'.format(year,
ts_type))
continue
for ts_type_ana in ts_types_ana:
if ts_types.index(ts_type_ana) >= ts_types.index(ts_type):
obs_data = obs_reader.data[var]
out_dir = dirs['data']
savename = colldata_save_name(model_data,
model_id,
obs_id,
ts_type_ana,
filter_name,
start,
stop)
file_exists = check_colldata_exists(out_dir,
savename)
if file_exists:
if not reanalyse_existing:
if logfile:
logfile.write('SKIP: {}\n'.format(savename))
continue
else:
os.remove(os.path.join(out_dir, savename))
data_coll = pya.collocation.collocate_gridded_gridded(
model_data, obs_data,
ts_type=ts_type_ana,
start=start, stop=stop,
filter_name=filter_name)
if data_coll.save_name_aerocom + '.nc' != savename:
raise Exception
data_coll.to_netcdf(out_dir)
save_name_fig = data_coll.save_name_aerocom + '_SCAT.png'
if logfile:
logfile.write('WRITE: {}\n'.format(savename))
data_coll.plot_scatter(savefig=True,
save_dir=dirs['scatter_plots'],
save_name=save_name_fig)
plt.close('all')
def print_file(file_path):
if not os.path.exists(file_path):
raise IOError('File not found...')
with open(file_path) as f:
for line in f:
if line.strip():
print(line)
| [
"jonasgliss@gmail.com"
] | jonasgliss@gmail.com |
e1d28343bba645d8be668da7b073af3541987896 | 383d711b269aa42ec051a8300f9bad8cd3384de8 | /docker/models.py | 718aa7f04973c627897a573e40c8adb538b13cc7 | [] | no_license | Lupino/docker-server | 7af8dab451528704f470a19ae07fbd99afb47435 | 4a199e7e75dcf5ba5161a5373214bb03e8e2cf25 | refs/heads/master | 2021-01-10T19:30:42.888559 | 2014-04-01T07:23:22 | 2014-04-01T07:23:22 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,867 | py | from docker.conf import prefix
from lee import Model, query, Table, conf as lee_conf
from docker.logging import logger
class _Container(Model):
table_name = '{}container'.format(prefix)
columns = [
{'name': 'container_id', 'type': 'str', 'primary': True, 'length': 32},
{'name': 'image_id', 'type': 'str', 'length': 32},
{'name': 'passwd', 'type': 'str', 'length': 32},
{'name': 'ssh_port', 'type': 'int', 'unsigned': True, 'length': 5, 'default': 0},
{'name': 'server_port', 'type': 'int', 'unsigned': True, 'length': 5, 'default': 0},
{'name': 'created_at', 'type': 'int', 'unsigned': True, 'length': 10, 'default': 0},
{'name': 'stop_at', 'type': 'int', 'unsigned': True, 'length': 10, 'default': 0},
]
Container = Table(_Container)
class _UserContainer(Model):
table_name = '{}user_container'.format(prefix)
columns = [
{'name': 'user_id', 'type': 'int', 'length': 10, 'unsigned': True, 'primary': True},
{'name': 'container_id', 'type': 'str', 'length': 32, 'primary': True, 'unique': True}
]
UserContainer = Table(_UserContainer)
class _User(Model):
table_name = '{}user'.format(prefix)
columns = [
{'name': 'user_id', 'type': 'int', 'length': 10, 'unsigned': True, 'primary': True, 'auto_increment': True},
{'name': 'username', 'type': 'str', 'length': 50, 'unique': True},
{'name': 'passwd', 'type': 'str', 'length': 32},
{'name': 'email', 'type': 'str', 'length': 100, 'unique': True}
]
User = Table(_User)
class Sequence(Model):
table_name = 'sequence'
columns = [
{'name': 'name', 'type': 'str', 'primary': True, 'length': 20},
{'name': 'id', 'type': 'int', 'default': 0}
]
@query(autocommit=True)
def next(self, name, cur):
name = '{}:{}'.format(prefix, name)
last_id = 0
if lee_conf.use_mysql:
sql = 'INSERT INTO `sequence` (`name`) VALUES (?) ON DUPLICATE KEY UPDATE `id` = LAST_INSERT_ID(`id` + 1)'
args = (name, )
logger.debug('Query> SQL: %s | ARGS: %s'%(sql, args))
cur.execute(sql, args)
last_id = cur.lastrowid
else:
seq = self._table.find_by_id(name)
if seq:
sql = 'UPDATE `sequence` SET `id` = `id` + 1 WHERE `name` = ?'
args = (name, )
logger.debug('Query> SQL: %s | ARGS: %s'%(sql, args))
cur.execute(sql, args)
else:
self._table.save({'name': name})
seq = self._table.find_by_id(name)
last_id = seq['id']
return last_id
def save(self, name, id):
name = '{}:{}'.format(prefix, name)
return self._table.save({'name': name, 'id': id})
seq = Table(Sequence)()
| [
"lmjubuntu@gmail.com"
] | lmjubuntu@gmail.com |
030b62f7d497bdfb140784be25d63d997ab83331 | ab453e04b34eb8d510ef8c1663cd6e3da4d67c71 | /tests/urls.py | 187d1394df4ee85b5a91332eb3f8fe5e11e3b026 | [
"MIT"
] | permissive | gbere/django-template-obfuscator | 400d2fab5a17bf534938fb195d625e6477133479 | 5211bda5b9f37e8da6a44425f0362837e9d5a6ad | refs/heads/master | 2020-07-29T12:57:50.466308 | 2019-09-17T15:35:01 | 2019-09-17T15:35:01 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 243 | py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals, absolute_import
from django.conf.urls import url, include
urlpatterns = [
url(r'^', include('django_template_obfuscator.urls', namespace='django_template_obfuscator')),
]
| [
"rafahuelin@gmail.com"
] | rafahuelin@gmail.com |
454d744eedb4d7ef6400ff1daf55405c7d179bc0 | feb2ad26f596045ddccf8a36b514fb0460a37e01 | /expression_data/data/models.py | dcdcdcd4988deac32f133e4a6f8e228f877dc7bc | [
"BSD-2-Clause"
] | permissive | lamarck2008/expression-data-server | 9a06de7bd3f69cfe92dcf9d7400715e8096d2c1c | 7f70fd5d5a9569a315716c389f828b17a487fdbc | refs/heads/master | 2021-01-16T20:24:14.289633 | 2012-11-19T02:52:06 | 2012-11-19T02:52:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,797 | py | '''These models control the data saved into the database for a given experiment.
There is a generic base class named Data, which is then further subclassed into specific data models.
'''
from django.db import models
from django.contrib.contenttypes.models import ContentType
from django.contrib.contenttypes import generic
from genes.models import Gene
class BaseData(models.Model):
'''This is the abstract base class for all data objects.
This model contains data for a given :class:`~experiments.models.mRNASeqExperiment` or :class:`~experiments.models.MicroArrayExperiment`.
The experiment is defined by a Generic ForeignKey to one of those two :class:`~experiments.models.Experiment` objects.
'''
#These fields control the foreignkey to the experiment.
experiment_type_choices = models.Q(app_label = 'experiments', model = 'mrnaseqexperiment') | models.Q(app_label = 'experiments', model = 'microarrayexperiment')
experiment_type = models.ForeignKey(ContentType, limit_choices_to = experiment_type_choices, help_text="Experiment Type")
experiment_id = models.PositiveIntegerField()
experiment = generic.GenericForeignKey('experiment_type', 'experiment_id')
gene = models.ForeignKey(Gene, help_text="The gene for these data.")
def __unicode__(self):
'''The unicode representation is the name.'''
return "%s" % self.gene
class Meta:
'''This is an abstract model.'''
abstract = True
class GeneExperimentData(BaseData):
'''These data are for gene-level data, aggregated per experiment.
These data can be used with :class:`~experiments.models.mRNASeqExperiment` or :class:`~experiments.models.MicroArrayExperiment` experiments.
This is an extension of the abstract base model :class:`data.models.BaseData`.
The fields in this model are based on the columns in the gene_exp.diff from cufflinks. See http://cufflinks.cbcb.umd.edu/manual.html#cuffdiff_output for more details.
The required fields are **gene**, **experiment**, **fold_change**, **p_value** and **q_value**.
'''
locus = models.CharField(max_length=20, blank=True, null=True, help_text="Chromosomal location of this gene.")
internal_id = models.CharField(max_length=20, blank=True, null=True, help_text="The probe id, or internal identification code for this gene.")
sample_1 = models.CharField(max_length=20, blank=True, null=True, help_text="The name of the first group in the comparason.")
sample_2 = models.CharField(max_length=20, blank=True, null=True, help_text="The name of the second group in the comparason.")
amount_1 = models.DecimalField(max_digits=15, decimal_places=6, blank=True, null=True, help_text="The amount in the first group.")
amount_2 = models.DecimalField(max_digits=15, decimal_places=6, blank=True, null=True, help_text="The amount in the second group.")
status = models.CharField(max_length=20, blank=True, null=True, help_text="The status code of the test.")
fold_change = models.FloatField(help_text="The log(2) fold change.")
test_statistic = models.FloatField(blank=True, null=True, help_text="The value of the test statistic used to compute significance.")
p_value = models.DecimalField(max_digits=9, decimal_places=8, help_text="Unadjusted p-value.")
q_value = models.DecimalField(max_digits=9, decimal_places=8, help_text="Multiple Comparason Adjusted p-value (Typically FDR)")
significant = models.CharField(max_length=3, blank=True, null=True, help_text="Is the q-value < 0.05?")
class Meta:
'''Updated the verbose name of the datum.'''
verbose_name_plural = 'Experiment Level Data for a Gene'
verbose_name = 'Experiment Level Datum for a Gene'
| [
"dave.bridges@gmail.com"
] | dave.bridges@gmail.com |
3d90cc6849daaac96ce8740ed0e1ae26e39fc671 | e88a1c909ee3c60e0e74fea09d7ed2d4ce545d89 | /settings.py | 148eac4271656b1a9c4ca60ee0a7f5330eb678da | [] | no_license | Faulik/jumpdie-backend | b74c042edd6f2e3d2bd870e395bfeae22370fa5c | 292e7981c429060204c0f789b53f17288671625b | refs/heads/master | 2021-01-23T21:35:22.129175 | 2015-08-19T08:47:09 | 2015-08-19T08:47:09 | 39,410,549 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,785 | py | """
Django settings for djchat project.
For more information on this file, see
https://docs.djangoproject.com/en/1.7/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.7/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.dirname(__file__)
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.7/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'fux9z2i)6ab$b_5*^z@96hdtqfj5=ct7b)m6_6cfrr5g%x#=81'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
TEMPLATE_DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'pulsar.apps.pulse',
'djchat',
'game',
'registration'
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'djchat.views.middleware',
'game.views.middleware'
)
ROOT_URLCONF = 'djchat.urls'
WSGI_APPLICATION = 'djchat.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.7/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2', # Add 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
'NAME': 'django_db',
'USER': 'django_db',
'PASSWORD': 'whynot',
'HOST': '0.0.0.0', # Empty for localhost through
# domain sockets or '127.0.0.1' for localhost through TCP.
'PORT': '', # Set to empty string for default.
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.7/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.7/howto/static-files/
STATIC_URL = 'static/'
STATICFILES_DIRS = (
os.path.join(BASE_DIR, "static/")
)
#Registration
ACCOUNT_ACTIVATION_DAYS = 7 # One-week activation window; you may, of course, use a different value.
REGISTRATION_AUTO_LOGIN = True # Automatically log the user in.
LOGIN_REDIRECT_URL = '/'
REGISTRATION_EMAIL_HTML = False
| [
"faullik@gmail.com"
] | faullik@gmail.com |
f4b7ae8e9946c91cded7fe2092eda6da7b6a3cdf | 4090d8b4e8e9e28d620d222651c73a12a753be36 | /contextadv/migrations/0006_alter_contextadvertisementdescription_description.py | d762b37198cad99a6353794de8fe7074771fc939 | [] | no_license | isaev4lex/220studio | 91aa08f9d10ff55e98effe2542e26799efb6e2f2 | 6188403eeed7ee590b21da15c67af9e6f06ab06b | refs/heads/main | 2023-08-20T07:14:18.203593 | 2021-10-31T07:24:19 | 2021-10-31T07:24:19 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 522 | py | # Generated by Django 3.2.4 on 2021-08-05 12:13
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('contextadv', '0005_metatags'),
]
operations = [
migrations.AlterField(
model_name='contextadvertisementdescription',
name='description',
field=models.TextField(verbose_name='Описание инструмента\n\n(для переноса строки использовать <br>)'),
),
]
| [
"FWorld21@protonmail.com"
] | FWorld21@protonmail.com |
358be1517a6567c187fc0c758e6e8ce6b61d5ae6 | 0a1356b97465cc1d5c3f661f61b3b8c51fb05d46 | /android_binding/.buildozer/android/platform/build-armeabi-v7a/build/other_builds/hostpython3/desktop/hostpython3/Tools/msi/make_zip.py | 58f3b15ef8524e3b3487ec688380a8d5b9de0e2c | [
"GPL-1.0-or-later",
"Python-2.0",
"MIT"
] | permissive | Rohan-cod/cross_platform_calc | 00360f971e4da68dd36d6836c9ddbb157f6b77d5 | 5785a5e8150d174019b330c812e7eb012cc4dd79 | refs/heads/master | 2022-12-22T10:29:05.317051 | 2021-06-05T10:52:44 | 2021-06-05T10:52:44 | 237,465,912 | 2 | 1 | MIT | 2022-12-09T05:18:55 | 2020-01-31T16:07:31 | C | UTF-8 | Python | false | false | 7,729 | py | import argparse
import py_compile
import re
import sys
import shutil
import stat
import os
import tempfile
from itertools import chain
from pathlib import Path
from zipfile import ZipFile, ZIP_DEFLATED
TKTCL_RE = re.compile(r'^(_?tk|tcl).+\.(pyd|dll)', re.IGNORECASE)
DEBUG_RE = re.compile(r'_d\.(pyd|dll|exe|pdb|lib)$', re.IGNORECASE)
PYTHON_DLL_RE = re.compile(r'python\d\d?\.dll$', re.IGNORECASE)
DEBUG_FILES = {
'_ctypes_test',
'_testbuffer',
'_testcapi',
'_testconsole',
'_testimportmultiple',
'_testmultiphase',
'xxlimited',
'python3_dstub',
}
EXCLUDE_FROM_LIBRARY = {
'__pycache__',
'idlelib',
'pydoc_data',
'site-packages',
'tkinter',
'turtledemo',
}
EXCLUDE_FROM_EMBEDDABLE_LIBRARY = {
'ensurepip',
'venv',
}
EXCLUDE_FILE_FROM_LIBRARY = {
'bdist_wininst.py',
}
EXCLUDE_FILE_FROM_LIBS = {
'liblzma',
'python3stub',
}
EXCLUDED_FILES = {
'pyshellext',
}
def is_not_debug(p):
if DEBUG_RE.search(p.name):
return False
if TKTCL_RE.search(p.name):
return False
return p.stem.lower() not in DEBUG_FILES and p.stem.lower() not in EXCLUDED_FILES
def is_not_debug_or_python(p):
return is_not_debug(p) and not PYTHON_DLL_RE.search(p.name)
def include_in_lib(p):
name = p.name.lower()
if p.is_dir():
if name in EXCLUDE_FROM_LIBRARY:
return False
if name == 'test' and p.parts[-2].lower() == 'lib':
return False
if name in {'test', 'tests'} and p.parts[-3].lower() == 'lib':
return False
return True
if name in EXCLUDE_FILE_FROM_LIBRARY:
return False
suffix = p.suffix.lower()
return suffix not in {'.pyc', '.pyo', '.exe'}
def include_in_embeddable_lib(p):
if p.is_dir() and p.name.lower() in EXCLUDE_FROM_EMBEDDABLE_LIBRARY:
return False
return include_in_lib(p)
def include_in_libs(p):
if not is_not_debug(p):
return False
return p.stem.lower() not in EXCLUDE_FILE_FROM_LIBS
def include_in_tools(p):
if p.is_dir() and p.name.lower() in {'scripts', 'i18n', 'pynche', 'demo', 'parser'}:
return True
return p.suffix.lower() in {'.py', '.pyw', '.txt'}
BASE_NAME = 'python{0.major}{0.minor}'.format(sys.version_info)
FULL_LAYOUT = [
('/', '$build', 'python.exe', is_not_debug),
('/', '$build', 'pythonw.exe', is_not_debug),
('/', '$build', 'python{}.dll'.format(sys.version_info.major), is_not_debug),
('/', '$build', '{}.dll'.format(BASE_NAME), is_not_debug),
('DLLs/', '$build', '*.pyd', is_not_debug),
('DLLs/', '$build', '*.dll', is_not_debug_or_python),
('include/', 'include', '*.h', None),
('include/', 'PC', 'pyconfig.h', None),
('Lib/', 'Lib', '**/*', include_in_lib),
('libs/', '$build', '*.lib', include_in_libs),
('Tools/', 'Tools', '**/*', include_in_tools),
]
EMBED_LAYOUT = [
('/', '$build', 'python*.exe', is_not_debug),
('/', '$build', '*.pyd', is_not_debug),
('/', '$build', '*.dll', is_not_debug),
('{}.zip'.format(BASE_NAME), 'Lib', '**/*', include_in_embeddable_lib),
]
if os.getenv('DOC_FILENAME'):
FULL_LAYOUT.append(('Doc/', 'Doc/build/htmlhelp', os.getenv('DOC_FILENAME'), None))
if os.getenv('VCREDIST_PATH'):
FULL_LAYOUT.append(('/', os.getenv('VCREDIST_PATH'), 'vcruntime*.dll', None))
EMBED_LAYOUT.append(('/', os.getenv('VCREDIST_PATH'), 'vcruntime*.dll', None))
def copy_to_layout(target, rel_sources):
count = 0
if target.suffix.lower() == '.zip':
if target.exists():
target.unlink()
with ZipFile(str(target), 'w', ZIP_DEFLATED) as f:
with tempfile.TemporaryDirectory() as tmpdir:
for s, rel in rel_sources:
if rel.suffix.lower() == '.py':
pyc = Path(tmpdir) / rel.with_suffix('.pyc').name
try:
py_compile.compile(str(s), str(pyc), str(rel), doraise=True, optimize=2)
except py_compile.PyCompileError:
f.write(str(s), str(rel))
else:
f.write(str(pyc), str(rel.with_suffix('.pyc')))
else:
f.write(str(s), str(rel))
count += 1
else:
for s, rel in rel_sources:
dest = target / rel
try:
dest.parent.mkdir(parents=True)
except FileExistsError:
pass
if dest.is_file():
dest.chmod(stat.S_IWRITE)
shutil.copy(str(s), str(dest))
if dest.is_file():
dest.chmod(stat.S_IWRITE)
count += 1
return count
def rglob(root, pattern, condition):
dirs = [root]
recurse = pattern[:3] in {'**/', '**\\'}
while dirs:
d = dirs.pop(0)
for f in d.glob(pattern[3:] if recurse else pattern):
if recurse and f.is_dir() and (not condition or condition(f)):
dirs.append(f)
elif f.is_file() and (not condition or condition(f)):
yield f, f.relative_to(root)
def main():
parser = argparse.ArgumentParser()
parser.add_argument('-s', '--source', metavar='dir', help='The directory containing the repository root', type=Path)
parser.add_argument('-o', '--out', metavar='file', help='The name of the output archive', type=Path, default=None)
parser.add_argument('-t', '--temp', metavar='dir', help='A directory to temporarily extract files into', type=Path, default=None)
parser.add_argument('-e', '--embed', help='Create an embedding layout', action='store_true', default=False)
parser.add_argument('-b', '--build', help='Specify the build directory', type=Path, default=None)
ns = parser.parse_args()
source = ns.source or (Path(__file__).resolve().parent.parent.parent)
out = ns.out
build = ns.build or Path(sys.exec_prefix)
assert isinstance(source, Path)
assert not out or isinstance(out, Path)
assert isinstance(build, Path)
if ns.temp:
temp = ns.temp
delete_temp = False
else:
temp = Path(tempfile.mkdtemp())
delete_temp = True
if out:
try:
out.parent.mkdir(parents=True)
except FileExistsError:
pass
try:
temp.mkdir(parents=True)
except FileExistsError:
pass
layout = EMBED_LAYOUT if ns.embed else FULL_LAYOUT
try:
for t, s, p, c in layout:
if s == '$build':
fs = build
else:
fs = source / s
files = rglob(fs, p, c)
extra_files = []
if s == 'Lib' and p == '**/*':
extra_files.append((
source / 'tools' / 'msi' / 'distutils.command.bdist_wininst.py',
Path('distutils') / 'command' / 'bdist_wininst.py'
))
copied = copy_to_layout(temp / t.rstrip('/'), chain(files, extra_files))
print('Copied {} files'.format(copied))
if ns.embed:
with open(str(temp / (BASE_NAME + '._pth')), 'w') as f:
print(BASE_NAME + '.zip', file=f)
print('.', file=f)
print('', file=f)
print('# Uncomment to run site.main() automatically', file=f)
print('#import site', file=f)
if out:
total = copy_to_layout(out, rglob(temp, '**/*', None))
print('Wrote {} files to {}'.format(total, out))
finally:
if delete_temp:
shutil.rmtree(temp, True)
if __name__ == "__main__":
sys.exit(int(main() or 0))
| [
"rohaninjmu@gmail.com"
] | rohaninjmu@gmail.com |
6cd2f5bcb891ae0f058d81069997a2dfc4b61e90 | c1fabf9660cf8cd05ee1d6343d4e5be3f37d069a | /plot_bias_term.py | dc12f437e83d3d1126061d0a7c43aa84369241a6 | [] | no_license | DanielHolmelund/Learning-and-Visualizing-Bipartite-Network-Embeddings | 087175747981d411af6e291b64c621eff5d45603 | 19138b3a8bb6e0d6f8b0f88398f159a4c0635fee | refs/heads/master | 2023-05-31T12:47:41.392938 | 2021-06-21T20:00:40 | 2021-06-21T20:00:40 | 337,565,110 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,861 | py | """
PLot the latent embedding together with the bias values.
"""
import matplotlib.pyplot as plt
import pandas as pd
import torch
import csv
import seaborn as sns
device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
method = "torch" #set datatype for files "torch" or "csv"
#Importing gene names as a list
with open('Datasets/Single_cell/critical_period_genes.csv', newline='') as f:
reader = csv.reader(f)
data = list(reader)
data = data[1:]
#Getting index for most common:
idx = []
names = [["Entpd2"], ["Gm4577"], ["Kcnip4"]]
for j in range(len(names)):
index_active = data.index(names[j])
idx.append(index_active)
### Load in the embeddings
iteration = 28000
embeddings_filename_i = f"results/embedding/latent_i_{iteration}"
embeddings_filename_j = f"results/embedding/latent_j_{iteration}"
beta_file = f"results/embedding/beta_{iteration}"
gamma_file = f"results/embedding/gamma_{iteration}"
if method == "torch":
data = torch.load(embeddings_filename_i)
latent_i = data.cpu().data.numpy()
data = torch.load(embeddings_filename_j)
latent_j = data.cpu().data.numpy()
data = torch.load(beta_file)
beta = data.cpu().data.numpy()
data = torch.load(gamma_file)
gamma = data.cpu().data.numpy()
else:
#latent_i = np.genfromtxt(embeddings_filename_i, delimiter = "\n")
#latent_j = np.genfromtxt(embeddings_filename_j, delimiter = " ")
latent_i = pd.read_csv(embeddings_filename_i).to_numpy()
latent_j = pd.read_csv(embeddings_filename_j).to_numpy()
cmap = sns.color_palette("viridis", as_cmap = True)
f, ax = plt.subplots()
points = ax.scatter(latent_i[:, 0], latent_i[:, 1], s=0.2, c = beta, cmap = cmap)
f.colorbar(points)
plt.show()
f, ax = plt.subplots()
points = ax.scatter(latent_j[:, 0], latent_j[:, 1], s=0.2, c = gamma, cmap = cmap)
f.colorbar(points)
plt.show()
| [
"noreply@github.com"
] | DanielHolmelund.noreply@github.com |
253b6652ddac0a3ffbcf6e0fd96dfc8abecaf9b8 | a3bb97955ad28e8c83a23e4466bb5352ee86847d | /revision/apps/public/forms.py | 9b3b57cd9930137d58592f723e09c96bb6e411bb | [] | no_license | rosscdh/revision | 23ac75385cca5b44032ff2637eb635fa954bb2ec | 090fb2a82072c5570d89878c6f506dd22d5c5ed5 | refs/heads/master | 2016-09-05T10:53:33.652493 | 2014-11-29T10:57:41 | 2014-11-29T10:57:41 | 23,582,177 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,484 | py | # -*- coding: utf-8 -*-
from django import forms
from django.contrib.auth.models import User
from django.contrib.auth import authenticate
from django.core.urlresolvers import reverse_lazy
from parsley.decorators import parsleyfy
from crispy_forms.helper import FormHelper, Layout
from crispy_forms.layout import ButtonHolder, Div, Field, Fieldset, HTML, Submit
from revision.utils import _get_unique_username
import logging
logger = logging.getLogger('django.request')
@parsleyfy
class SignUpForm(forms.Form):
username = forms.CharField(
required=False,
widget=forms.HiddenInput
)
first_name = forms.CharField(
error_messages={
'required': "First name can't be blank."
},
label='',
max_length=30,
widget=forms.TextInput(attrs={'placeholder': 'First name'})
)
last_name = forms.CharField(
error_messages={
'required': "Last name can't be blank."
},
label='',
max_length=30,
widget=forms.TextInput(attrs={'placeholder': 'Last name'})
)
email = forms.EmailField(
error_messages={
'invalid': "Email is invalid.",
'required': "Email can't be blank."
},
label='',
max_length=75,
widget=forms.EmailInput(attrs={'placeholder': 'Email address', 'autocomplete': 'off'})
)
password = forms.CharField(
error_messages={
'required': "Password can't be blank."
},
label='',
widget=forms.PasswordInput(attrs={'placeholder': 'Password'})
)
password_confirm = forms.CharField(
error_messages={
'required': "Confirm password can't be blank."
},
label='',
widget=forms.PasswordInput(attrs={'placeholder': 'Password again'})
)
t_and_c = forms.BooleanField(
error_messages={
'required': "You must agree to the Terms and Conditions."
},
initial=False,
label='I agree to the Terms and Conditions.',
required=True
)
def __init__(self, *args, **kwargs):
self.helper = FormHelper()
self.helper.attrs = {
'id': 'signup-form',
'parsley-validate': ''
}
self.helper.form_show_errors = False
self.helper.layout = Layout(
HTML('{% include "partials/form-errors.html" with form=form %}'),
Fieldset(
'',
Div(
Field('first_name', css_class=''),
Field('last_name', css_class=''),
css_class='form-name clearfix'
),
Field('email'),
Field('password'),
Field('password_confirm'),
Field('t_and_c', template='partials/t_and_c.html'),
),
ButtonHolder(
Submit('submit', 'Create Account')
)
)
super(SignUpForm, self).__init__(*args, **kwargs)
# Override the label with a link to the terms (can't go higher as the urls aren't loaded yet)
self.fields['t_and_c'].label = 'I agree to the <a href="%s" target="_blank">Terms and Conditions</a>.' % reverse_lazy('public:terms')
def clean_username(self):
final_username = self.data.get('email').split('@')[0]
final_username = _get_unique_username(username=final_username)
logger.info('Username %s available' % final_username)
return final_username
def clean_password_confirm(self):
password_confirm = self.cleaned_data.get('password_confirm')
password = self.cleaned_data.get('password')
if password != password_confirm:
raise forms.ValidationError("The two password fields didn't match.")
return password_confirm
def clean_email(self):
"""
Ensure the email is normalised
"""
email = User.objects.normalize_email(self.cleaned_data.get('email'))
user = User.objects.filter(email=email).first()
if user is None:
return email
else:
#
# NOTE! We cant be specific about the email in use as a message here as
# it could be used to determine if that email address exists (which it does
# and its prety clear but making the text a bit less specific may put them off)
#
raise forms.ValidationError("Sorry, but you cant use that email address.")
def save(self):
user = User.objects.create_user(self.cleaned_data.get('username'),
self.cleaned_data.get('email'),
self.cleaned_data.get('password'),
first_name=self.cleaned_data.get('first_name'),
last_name=self.cleaned_data.get('last_name'))
return user
@parsleyfy
class SignInForm(forms.Form):
email = forms.EmailField(
error_messages={
'required': "Email can't be blank."
},
label='',
widget=forms.EmailInput(attrs={'placeholder': 'Email address'})
)
password = forms.CharField(
error_messages={
'required': "Password can't be blank."
},
label='',
widget=forms.PasswordInput(attrs={'placeholder': 'Password'})
)
def __init__(self, *args, **kwargs):
self.helper = FormHelper()
self.helper.attrs = {
'parsley-validate': '',
}
self.helper.form_show_errors = False
self.helper.layout = Layout(
HTML('{% include "partials/form-errors.html" with form=form %}'),
Fieldset(
'',
Field('email', css_class='input-hg'),
Field('password', css_class='input-hg'),
),
ButtonHolder(
Submit('submit', 'Secure Sign In', css_class='btn btn-primary btn-lg')
)
)
super(SignInForm, self).__init__(*args, **kwargs)
def clean(self):
user = None
if 'email' in self.cleaned_data and 'password' in self.cleaned_data:
user = authenticate(username=self.cleaned_data['email'], password=self.cleaned_data['password'])
if user is None:
raise forms.ValidationError("Sorry, no account with those credentials was found.")
return super(SignInForm, self).clean()
| [
"ross@lawpal.com"
] | ross@lawpal.com |
4e009c93c039eb04670636eb123f6a973e479fd8 | f9d564f1aa83eca45872dab7fbaa26dd48210d08 | /huaweicloud-sdk-dbss/huaweicloudsdkdbss/v1/model/batch_delete_resource_tag_request.py | 951851e88d7c6383d31b3e128954862b7a8c1840 | [
"Apache-2.0"
] | permissive | huaweicloud/huaweicloud-sdk-python-v3 | cde6d849ce5b1de05ac5ebfd6153f27803837d84 | f69344c1dadb79067746ddf9bfde4bddc18d5ecf | refs/heads/master | 2023-09-01T19:29:43.013318 | 2023-08-31T08:28:59 | 2023-08-31T08:28:59 | 262,207,814 | 103 | 44 | NOASSERTION | 2023-06-22T14:50:48 | 2020-05-08T02:28:43 | Python | UTF-8 | Python | false | false | 4,953 | py | # coding: utf-8
import six
from huaweicloudsdkcore.utils.http_utils import sanitize_for_serialization
class BatchDeleteResourceTagRequest:
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
sensitive_list = []
openapi_types = {
'resource_type': 'str',
'resource_id': 'str',
'body': 'ResourceTagRequest'
}
attribute_map = {
'resource_type': 'resource_type',
'resource_id': 'resource_id',
'body': 'body'
}
def __init__(self, resource_type=None, resource_id=None, body=None):
"""BatchDeleteResourceTagRequest
The model defined in huaweicloud sdk
:param resource_type: 资源类型。审计:auditInstance
:type resource_type: str
:param resource_id: 资源ID
:type resource_id: str
:param body: Body of the BatchDeleteResourceTagRequest
:type body: :class:`huaweicloudsdkdbss.v1.ResourceTagRequest`
"""
self._resource_type = None
self._resource_id = None
self._body = None
self.discriminator = None
self.resource_type = resource_type
self.resource_id = resource_id
if body is not None:
self.body = body
@property
def resource_type(self):
"""Gets the resource_type of this BatchDeleteResourceTagRequest.
资源类型。审计:auditInstance
:return: The resource_type of this BatchDeleteResourceTagRequest.
:rtype: str
"""
return self._resource_type
@resource_type.setter
def resource_type(self, resource_type):
"""Sets the resource_type of this BatchDeleteResourceTagRequest.
资源类型。审计:auditInstance
:param resource_type: The resource_type of this BatchDeleteResourceTagRequest.
:type resource_type: str
"""
self._resource_type = resource_type
@property
def resource_id(self):
"""Gets the resource_id of this BatchDeleteResourceTagRequest.
资源ID
:return: The resource_id of this BatchDeleteResourceTagRequest.
:rtype: str
"""
return self._resource_id
@resource_id.setter
def resource_id(self, resource_id):
"""Sets the resource_id of this BatchDeleteResourceTagRequest.
资源ID
:param resource_id: The resource_id of this BatchDeleteResourceTagRequest.
:type resource_id: str
"""
self._resource_id = resource_id
@property
def body(self):
"""Gets the body of this BatchDeleteResourceTagRequest.
:return: The body of this BatchDeleteResourceTagRequest.
:rtype: :class:`huaweicloudsdkdbss.v1.ResourceTagRequest`
"""
return self._body
@body.setter
def body(self, body):
"""Sets the body of this BatchDeleteResourceTagRequest.
:param body: The body of this BatchDeleteResourceTagRequest.
:type body: :class:`huaweicloudsdkdbss.v1.ResourceTagRequest`
"""
self._body = body
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
if attr in self.sensitive_list:
result[attr] = "****"
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
import simplejson as json
if six.PY2:
import sys
reload(sys)
sys.setdefaultencoding("utf-8")
return json.dumps(sanitize_for_serialization(self), ensure_ascii=False)
def __repr__(self):
"""For `print`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, BatchDeleteResourceTagRequest):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| [
"hwcloudsdk@huawei.com"
] | hwcloudsdk@huawei.com |
a8e18bbc0e52088184d9e116b167dacf54bd45b6 | e3613f4e249fd9986c7a5e18e2e02ba2c4b9bf44 | /test/test_base.py | 896f2713d1cbcbb291d2124a5d989a024366c2d5 | [
"BSD-3-Clause"
] | permissive | FrancescoSaverioZuppichini/vcstools | b1b880d881ab8712d1fa76decde85575fb031c62 | 3ae59b1a428055f1be665e613d7a52c8431f97fb | refs/heads/master | 2020-03-31T03:27:40.089224 | 2013-06-14T23:17:14 | 2013-06-14T23:17:14 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,432 | py | from __future__ import absolute_import, print_function, unicode_literals
import os
import sys
import io
import unittest
import tempfile
import shutil
from mock import Mock
import vcstools
from vcstools.vcs_base import VcsClientBase, VcsError
from vcstools.common import sanitized, normalized_rel_path, \
run_shell_command, urlretrieve_netrc, _netrc_open, urlopen_netrc
class BaseTest(unittest.TestCase):
def test_normalized_rel_path(self):
self.assertEqual(None, normalized_rel_path(None, None))
self.assertEqual('foo', normalized_rel_path(None, 'foo'))
self.assertEqual('/foo', normalized_rel_path(None, '/foo'))
self.assertEqual('../bar', normalized_rel_path('/bar', '/foo'))
self.assertEqual('../bar', normalized_rel_path('/bar', '/foo/baz/..'))
self.assertEqual('../bar', normalized_rel_path('/bar/bam/foo/../..', '/foo/baz/..'))
self.assertEqual('bar', normalized_rel_path('bar/bam/foo/../..', '/foo/baz/..'))
def test_sanitized(self):
self.assertEqual('', sanitized(None))
self.assertEqual('', sanitized(''))
self.assertEqual('"foo"', sanitized('foo'))
self.assertEqual('"foo"', sanitized('\"foo\"'))
self.assertEqual('"foo"', sanitized('"foo"'))
self.assertEqual('"foo"', sanitized('" foo"'))
try:
sanitized('bla"; foo"')
self.fail("Expected Exception")
except VcsError:
pass
try:
sanitized('bla";foo"')
self.fail("Expected Exception")
except VcsError:
pass
try:
sanitized('bla";foo \"bum')
self.fail("Expected Exception")
except VcsError:
pass
try:
sanitized('bla";foo;"bam')
self.fail("Expected Exception")
except VcsError:
pass
try:
sanitized('bla"#;foo;"bam')
self.fail("Expected Exception")
except VcsError:
pass
def test_shell_command(self):
self.assertEqual((0, "", None), run_shell_command("true"))
self.assertEqual((1, "", None), run_shell_command("false"))
self.assertEqual((0, "foo", None), run_shell_command("echo foo", shell = True))
(v, r, e ) = run_shell_command("[", shell = True)
self.assertFalse(v == 0)
self.assertFalse(e is None)
self.assertEqual(r, '')
(v, r, e ) = run_shell_command("echo foo && [", shell = True)
self.assertFalse(v == 0)
self.assertFalse(e is None)
self.assertEqual(r, 'foo')
# not a great test on a system where this is default
_, env_langs, _ = run_shell_command("/usr/bin/env |grep LANG=", shell = True, us_env = True)
self.assertTrue("LANG=en_US.UTF-8" in env_langs.splitlines())
try:
run_shell_command("two words")
self.fail("expected exception")
except: pass
def test_shell_command_verbose(self):
# just check no Exception happens due to decoding
run_shell_command("echo %s"%(b'\xc3\xa4'.decode('UTF-8')), shell=True, verbose=True)
run_shell_command(["echo", b'\xc3\xa4'.decode('UTF-8')], verbose=True)
def test_netrc_open(self):
root_directory = tempfile.mkdtemp()
machine = 'foo.org'
uri = 'https://%s/bim/bam' % machine
netrcname = os.path.join(root_directory, "netrc")
mock_build_opener = Mock()
mock_build_opener_fun = Mock()
mock_build_opener_fun.return_value = mock_build_opener
back_build_opener = vcstools.common.build_opener
try:
vcstools.common.build_opener = mock_build_opener_fun
filelike = _netrc_open(uri, netrcname)
self.assertFalse(filelike)
with open(netrcname, 'w') as fhand:
fhand.write(
'machine %s login fooname password foopass' % machine)
filelike = _netrc_open(uri, netrcname)
self.assertTrue(filelike)
filelike = _netrc_open('other', netrcname)
self.assertFalse(filelike)
filelike = _netrc_open(None, netrcname)
self.assertFalse(filelike)
finally:
shutil.rmtree(root_directory)
vcstools.common.build_opener = back_build_opener
def test_urlopen_netrc(self):
mockopen = Mock()
mock_result = Mock()
backopen = vcstools.common.urlopen
backget = vcstools.common._netrc_open
try:
#monkey-patch with mocks
vcstools.common.urlopen = mockopen
vcstools.common._netrc_open = Mock()
vcstools.common._netrc_open.return_value = mock_result
ioe = IOError('MockError')
mockopen.side_effect = ioe
self.assertRaises(IOError, urlopen_netrc, 'foo')
ioe.code = 401
result = urlopen_netrc('foo')
self.assertEqual(mock_result, result)
finally:
vcstools.common.urlopen = backopen
vcstools.common._netrc_open = backget
def test_urlretrieve_netrc(self):
root_directory = tempfile.mkdtemp()
examplename = os.path.join(root_directory, "foo")
outname = os.path.join(root_directory, "fooout")
with open(examplename, "w") as fhand:
fhand.write('content')
mockget = Mock()
mockopen = Mock()
mock_fhand = Mock()
backopen = vcstools.common.urlopen
backget = vcstools.common._netrc_open
try:
# vcstools.common.urlopen = mockopen
# vcstools.common.urlopen.return_value = mock_fhand
# mock_fhand.read.return_value = 'content'
mockopen.open.return_value
vcstools.common._netrc_open = Mock()
vcstools.common._netrc_open.return_value = mockget
(fname, headers) = urlretrieve_netrc('file://' + examplename)
self.assertTrue(fname)
self.assertFalse(os.path.exists(outname))
(fname, headers) = urlretrieve_netrc('file://' + examplename,
outname)
self.assertEqual(outname, fname)
self.assertTrue(os.path.isfile(outname))
finally:
vcstools.common.urlopen = backopen
vcstools.common._netrc_open = backget
shutil.rmtree(root_directory)
| [
"kruset@in.tum.de"
] | kruset@in.tum.de |
90ce17400257d8e886aa3c49973efb6bbe7e3d0f | 8830831a87f35ff2628f379d8230928ec6b5641a | /BNPParibas/code/gbc_deviance.py | f947f44609ebf50d5d1c3aa5f5f6442aa072e2f5 | [] | no_license | nickmcadden/Kaggle | e5882c9d68a81700d8d969328d91c059a0643868 | cbc5347dec90e4bf64d4dbaf28b8ffb362efc64f | refs/heads/master | 2019-07-18T08:09:40.683168 | 2018-01-26T14:35:38 | 2018-01-26T14:35:38 | 40,735,982 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,263 | py | import sys
import pandas as pd
import numpy as np
import scipy as sp
import xgboost as xgb
import data
import argparse
import pickle as pkl
from scipy import stats
from collections import OrderedDict
from sklearn.utils import shuffle
from sklearn.cross_validation import StratifiedShuffleSplit, KFold
from sklearn.ensemble import GradientBoostingClassifier
from sklearn.utils import shuffle
def log_loss(act, pred):
""" Vectorised computation of logloss """
epsilon = 1e-15
pred = sp.maximum(epsilon, pred)
pred = sp.minimum(1-epsilon, pred)
ll = sum(act*sp.log(pred) + sp.subtract(1, act)*sp.log(sp.subtract(1, pred)))
ll = ll * -1.0/len(act)
return ll
parser = argparse.ArgumentParser(description='XGBoost for BNP')
parser.add_argument('-f','--n_features', help='Number of features', type=int, default=1000)
parser.add_argument('-n','--n_rounds', help='Number of iterations', type=int, default=350)
parser.add_argument('-e','--eta', help='Learning rate', type=float, default=0.0125)
parser.add_argument('-r','--r_seed', help='Set random seed', type=int, default=3)
parser.add_argument('-b','--minbin', help='Minimum categorical bin size', type=int, default=1)
parser.add_argument('-ct','--cat_trans', help='Category transformation method', type=str, default='std')
parser.add_argument('-cv','--cv', action='store_true')
parser.add_argument('-codetest','--codetest', action='store_true')
parser.add_argument('-getcached', '--getcached', action='store_true')
parser.add_argument('-extra', '--extra', action='store_true')
m_params = vars(parser.parse_args())
# Load data
X, y, X_sub, ids = data.load(m_params)
print("BNP Parabas: classification...\n")
clf = GradientBoostingClassifier(loss='deviance', learning_rate=m_params['eta'], n_estimators=m_params['n_rounds'], subsample=1, max_features= 35, min_samples_split= 4, max_depth = 12, min_samples_leaf= 2, verbose=2, random_state=1)
if m_params['cv']:
# do cross validation scoring
kf = KFold(X.shape[0], n_folds=4, shuffle=True, random_state=1)
scr = np.zeros([len(kf)])
oob_pred = np.zeros(X.shape[0])
sub_pred = np.zeros((X_sub.shape[0], 4))
for i, (tr_ix, val_ix) in enumerate(kf):
clf.fit(X[tr_ix], y[tr_ix])
pred = clf.predict_proba(X[val_ix])
oob_pred[val_ix] = np.array(pred[:,1])
sub_pred[:,i] = clf.predict_proba(X_sub)[:,1]
scr[i] = log_loss(y[val_ix], np.array(pred[:,1]))
print('Train score is:', scr[i])
print(log_loss(y, oob_pred))
print oob_pred[1:10]
sub_pred = sub_pred.mean(axis=1)
oob_pred_filename = '../output/oob_pred_gbcdeviance_' + str(np.mean(scr))
sub_pred_filename = '../output/sub_pred_gbcdeviance_' + str(np.mean(scr))
pkl.dump(oob_pred, open(oob_pred_filename + '.p', 'wb'))
pkl.dump(sub_pred, open(sub_pred_filename + '.p', 'wb'))
preds = pd.DataFrame({"ID": ids, "PredictedProb": sub_pred})
preds.to_csv(sub_pred_filename + '.csv', index=False)
else:
X, y = shuffle(X, y)
# Train on full data
print("Training on full data")
clf.fit(X,y)
print("Creating predictions")
pred = clf.predict_proba(X_sub)
print("Saving Results.")
model_name = '../output/pred_gbcdev_' + str(m_params['n_rounds'])
preds = pd.DataFrame({"ID": ids, "PredictedProb": pred[:,1]})
preds.to_csv(model_name + '.csv', index=False)
| [
"nmcadden@globalpersonals.co.uk"
] | nmcadden@globalpersonals.co.uk |
13783e3a2dddc08461ab600d7cbfcdd269974415 | c1dfeaf0d560198e60e03476fabdb33067b4680a | /xkcdpwgen.py | 4485035e94ec6f8e03e838eddb59865cb04c48cb | [] | no_license | amandadupell/passwordgeneratorproject | b80b4bcfd91f934541feaac30c5cd8f37fcb84fa | b3a13c14e51f4c7cc52a96c345021db6065f2b0c | refs/heads/master | 2020-07-23T03:37:56.003476 | 2019-09-10T01:14:19 | 2019-09-10T01:14:19 | 207,435,944 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,095 | py | #!/usr/local/bin/python3
import argparse
import random
wordlist = open("wordlist.txt", "r").readlines()
parser = argparse.ArgumentParser("Generate a secure, memorable password using the XKCD method")
parser.add_argument("-w", "--words", type=int, default=4, help="include WORDS words in the password (default=4)")
parser.add_argument("-c", "--caps", type=int, default=0,
help="capitalize the first letter of CAPS random words (default=0)")
parser.add_argument("-n", "--numbers", type=int, default=0,
help="insert NUMBERS random numbers in the password (default=0)")
parser.add_argument("-s", "--symbols", type=int, default=0,
help="insert SYMBOLS random symbols in the password (default=0)")
args = parser.parse_args()
numWords = args.words
numCaps = args.caps
numNums = args.numbers
numSyms = args.symbols
password = []
output = ""
x = 0
y = 0
z = 0
s = 0
while x < numWords:
randomWord = wordlist[random.randint(0, len(wordlist) - 1)]
password.append(randomWord)
x += 1
cappedIndex = []
while y < numCaps:
randomInt = random.randint(0, len(password) - 1)
capitalize = password[randomInt]
if randomInt in cappedIndex:
continue
cappedIndex.append(randomInt)
begin = capitalize[:1]
end = capitalize[1:]
capitalizedWord = begin.upper() + end
password.remove(capitalize)
password.insert(randomInt, capitalizedWord)
y += 1
while z < numNums:
insertNum = str(random.randint(0, 9))
insertAt = random.randint(0, len(password))
if insertAt is 0:
password.insert(0, insertNum)
else:
password.insert(insertAt, insertNum)
z += 1
symbols = ["~", "!", "@", "#", "$", "%", "^", "&", "*", ".", ":", ";"]
while s < numSyms:
insertSym = symbols[random.randint(0, len(symbols) - 1)]
insertAt = random.randint(0, len(password))
if insertAt is 0:
password.insert(0, insertSym)
else:
password.insert(insertAt, insertSym)
s += 1
for x in range(0, len(password)):
output += password[x].strip('\n')
print(output)
| [
"anddupell@gmail.com"
] | anddupell@gmail.com |
12c795df6468b2b398c176ca41df8df3249bd69e | 5150ceffc9be27ae17077b30b3e26c8203f450fc | /gis/__init__.py | f7fc55a2b7b053b3b7296d141796a25fb46580bf | [
"Apache-2.0"
] | permissive | DiviPeople/gis | 2fd1e9d50734f395426b8121c1e06f22cc09475c | b9de79f3985787685a00d407488909da183a5ccf | refs/heads/master | 2023-05-15T01:15:10.762333 | 2021-05-12T18:57:19 | 2021-06-10T18:12:55 | 316,723,852 | 0 | 0 | null | 2021-06-10T18:12:56 | 2020-11-28T11:59:27 | null | UTF-8 | Python | false | false | 144 | py | """Module initializing the package."""
import os
import django
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'gis.docker')
django.setup()
| [
"49566826+Themanwhosmellslikesugar@users.noreply.github.com"
] | 49566826+Themanwhosmellslikesugar@users.noreply.github.com |
a5b0c5f8741798c0ac66138f3bbe7463fa715838 | 8afd515ce74d93716e1996dae71a793b7143e206 | /ex.11720_sumofnums.py | 365ffe6cb6bd67ea5d7805fb2fb6ea5f2a6bf4ac | [] | no_license | NikkieS/Algorithm_practice | 9684d65a571688aaa772de6801df2d36c023a760 | cde2486663c0d26796166ef7b4e3fbcc4b0fb148 | refs/heads/master | 2023-01-18T15:29:37.624421 | 2020-11-18T01:41:48 | 2020-11-18T01:41:48 | 253,959,506 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 279 | py | """
Date : 11/06/20
Author : GaRam Song
URL : https://www.acmicpc.net/problem/11720
Description : sum of numbers
"""
# My Answer
l = int(input())
num = input()
ans = 0
for i in range(l):
ans += int(num[i])
print(ans)
# Short Coding
input()
print(sum(map(int,input())))
| [
"21400389@handong.edu"
] | 21400389@handong.edu |
1d92b49f32e9e43e80212f55a3859782d21b2ef1 | b777fc0715c1d20faf0c849f6894cf41cfe90cbe | /tests/test_decompressor_fuzzing.py | a7f81adab4e4d6d70076e5aa4efea84caea7078d | [
"BSD-3-Clause"
] | permissive | pombredanne/python-zstandard | 31a417d3a7d5a05594346235fd88e89e9a01d698 | 70dcc805d7761b1d7e35fd219a4d5d4512acd96a | refs/heads/master | 2020-09-16T08:18:39.956344 | 2019-11-16T04:40:30 | 2019-11-16T04:41:57 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 18,091 | py | import io
import os
import unittest
try:
import hypothesis
import hypothesis.strategies as strategies
except ImportError:
raise unittest.SkipTest("hypothesis not available")
import zstandard as zstd
from .common import (
make_cffi,
NonClosingBytesIO,
random_input_data,
)
@unittest.skipUnless("ZSTD_SLOW_TESTS" in os.environ, "ZSTD_SLOW_TESTS not set")
@make_cffi
class TestDecompressor_stream_reader_fuzzing(unittest.TestCase):
@hypothesis.settings(
suppress_health_check=[hypothesis.HealthCheck.large_base_example]
)
@hypothesis.given(
original=strategies.sampled_from(random_input_data()),
level=strategies.integers(min_value=1, max_value=5),
streaming=strategies.booleans(),
source_read_size=strategies.integers(1, 1048576),
read_sizes=strategies.data(),
)
def test_stream_source_read_variance(
self, original, level, streaming, source_read_size, read_sizes
):
cctx = zstd.ZstdCompressor(level=level)
if streaming:
source = io.BytesIO()
writer = cctx.stream_writer(source)
writer.write(original)
writer.flush(zstd.FLUSH_FRAME)
source.seek(0)
else:
frame = cctx.compress(original)
source = io.BytesIO(frame)
dctx = zstd.ZstdDecompressor()
chunks = []
with dctx.stream_reader(source, read_size=source_read_size) as reader:
while True:
read_size = read_sizes.draw(strategies.integers(-1, 131072))
chunk = reader.read(read_size)
if not chunk and read_size:
break
chunks.append(chunk)
self.assertEqual(b"".join(chunks), original)
# Similar to above except we have a constant read() size.
@hypothesis.settings(
suppress_health_check=[hypothesis.HealthCheck.large_base_example]
)
@hypothesis.given(
original=strategies.sampled_from(random_input_data()),
level=strategies.integers(min_value=1, max_value=5),
streaming=strategies.booleans(),
source_read_size=strategies.integers(1, 1048576),
read_size=strategies.integers(-1, 131072),
)
def test_stream_source_read_size(
self, original, level, streaming, source_read_size, read_size
):
if read_size == 0:
read_size = 1
cctx = zstd.ZstdCompressor(level=level)
if streaming:
source = io.BytesIO()
writer = cctx.stream_writer(source)
writer.write(original)
writer.flush(zstd.FLUSH_FRAME)
source.seek(0)
else:
frame = cctx.compress(original)
source = io.BytesIO(frame)
dctx = zstd.ZstdDecompressor()
chunks = []
reader = dctx.stream_reader(source, read_size=source_read_size)
while True:
chunk = reader.read(read_size)
if not chunk and read_size:
break
chunks.append(chunk)
self.assertEqual(b"".join(chunks), original)
@hypothesis.settings(
suppress_health_check=[hypothesis.HealthCheck.large_base_example]
)
@hypothesis.given(
original=strategies.sampled_from(random_input_data()),
level=strategies.integers(min_value=1, max_value=5),
streaming=strategies.booleans(),
source_read_size=strategies.integers(1, 1048576),
read_sizes=strategies.data(),
)
def test_buffer_source_read_variance(
self, original, level, streaming, source_read_size, read_sizes
):
cctx = zstd.ZstdCompressor(level=level)
if streaming:
source = io.BytesIO()
writer = cctx.stream_writer(source)
writer.write(original)
writer.flush(zstd.FLUSH_FRAME)
frame = source.getvalue()
else:
frame = cctx.compress(original)
dctx = zstd.ZstdDecompressor()
chunks = []
with dctx.stream_reader(frame, read_size=source_read_size) as reader:
while True:
read_size = read_sizes.draw(strategies.integers(-1, 131072))
chunk = reader.read(read_size)
if not chunk and read_size:
break
chunks.append(chunk)
self.assertEqual(b"".join(chunks), original)
# Similar to above except we have a constant read() size.
@hypothesis.settings(
suppress_health_check=[hypothesis.HealthCheck.large_base_example]
)
@hypothesis.given(
original=strategies.sampled_from(random_input_data()),
level=strategies.integers(min_value=1, max_value=5),
streaming=strategies.booleans(),
source_read_size=strategies.integers(1, 1048576),
read_size=strategies.integers(-1, 131072),
)
def test_buffer_source_constant_read_size(
self, original, level, streaming, source_read_size, read_size
):
if read_size == 0:
read_size = -1
cctx = zstd.ZstdCompressor(level=level)
if streaming:
source = io.BytesIO()
writer = cctx.stream_writer(source)
writer.write(original)
writer.flush(zstd.FLUSH_FRAME)
frame = source.getvalue()
else:
frame = cctx.compress(original)
dctx = zstd.ZstdDecompressor()
chunks = []
reader = dctx.stream_reader(frame, read_size=source_read_size)
while True:
chunk = reader.read(read_size)
if not chunk and read_size:
break
chunks.append(chunk)
self.assertEqual(b"".join(chunks), original)
@hypothesis.settings(
suppress_health_check=[hypothesis.HealthCheck.large_base_example]
)
@hypothesis.given(
original=strategies.sampled_from(random_input_data()),
level=strategies.integers(min_value=1, max_value=5),
streaming=strategies.booleans(),
source_read_size=strategies.integers(1, 1048576),
)
def test_stream_source_readall(self, original, level, streaming, source_read_size):
cctx = zstd.ZstdCompressor(level=level)
if streaming:
source = io.BytesIO()
writer = cctx.stream_writer(source)
writer.write(original)
writer.flush(zstd.FLUSH_FRAME)
source.seek(0)
else:
frame = cctx.compress(original)
source = io.BytesIO(frame)
dctx = zstd.ZstdDecompressor()
data = dctx.stream_reader(source, read_size=source_read_size).readall()
self.assertEqual(data, original)
@hypothesis.settings(
suppress_health_check=[hypothesis.HealthCheck.large_base_example]
)
@hypothesis.given(
original=strategies.sampled_from(random_input_data()),
level=strategies.integers(min_value=1, max_value=5),
streaming=strategies.booleans(),
source_read_size=strategies.integers(1, 1048576),
read_sizes=strategies.data(),
)
def test_stream_source_read1_variance(
self, original, level, streaming, source_read_size, read_sizes
):
cctx = zstd.ZstdCompressor(level=level)
if streaming:
source = io.BytesIO()
writer = cctx.stream_writer(source)
writer.write(original)
writer.flush(zstd.FLUSH_FRAME)
source.seek(0)
else:
frame = cctx.compress(original)
source = io.BytesIO(frame)
dctx = zstd.ZstdDecompressor()
chunks = []
with dctx.stream_reader(source, read_size=source_read_size) as reader:
while True:
read_size = read_sizes.draw(strategies.integers(-1, 131072))
chunk = reader.read1(read_size)
if not chunk and read_size:
break
chunks.append(chunk)
self.assertEqual(b"".join(chunks), original)
@hypothesis.settings(
suppress_health_check=[hypothesis.HealthCheck.large_base_example]
)
@hypothesis.given(
original=strategies.sampled_from(random_input_data()),
level=strategies.integers(min_value=1, max_value=5),
streaming=strategies.booleans(),
source_read_size=strategies.integers(1, 1048576),
read_sizes=strategies.data(),
)
def test_stream_source_readinto1_variance(
self, original, level, streaming, source_read_size, read_sizes
):
cctx = zstd.ZstdCompressor(level=level)
if streaming:
source = io.BytesIO()
writer = cctx.stream_writer(source)
writer.write(original)
writer.flush(zstd.FLUSH_FRAME)
source.seek(0)
else:
frame = cctx.compress(original)
source = io.BytesIO(frame)
dctx = zstd.ZstdDecompressor()
chunks = []
with dctx.stream_reader(source, read_size=source_read_size) as reader:
while True:
read_size = read_sizes.draw(strategies.integers(1, 131072))
b = bytearray(read_size)
count = reader.readinto1(b)
if not count:
break
chunks.append(bytes(b[0:count]))
self.assertEqual(b"".join(chunks), original)
@hypothesis.settings(
suppress_health_check=[hypothesis.HealthCheck.large_base_example]
)
@hypothesis.given(
original=strategies.sampled_from(random_input_data()),
level=strategies.integers(min_value=1, max_value=5),
source_read_size=strategies.integers(1, 1048576),
seek_amounts=strategies.data(),
read_sizes=strategies.data(),
)
def test_relative_seeks(
self, original, level, source_read_size, seek_amounts, read_sizes
):
cctx = zstd.ZstdCompressor(level=level)
frame = cctx.compress(original)
dctx = zstd.ZstdDecompressor()
with dctx.stream_reader(frame, read_size=source_read_size) as reader:
while True:
amount = seek_amounts.draw(strategies.integers(0, 16384))
reader.seek(amount, os.SEEK_CUR)
offset = reader.tell()
read_amount = read_sizes.draw(strategies.integers(1, 16384))
chunk = reader.read(read_amount)
if not chunk:
break
self.assertEqual(original[offset : offset + len(chunk)], chunk)
@hypothesis.settings(
suppress_health_check=[hypothesis.HealthCheck.large_base_example]
)
@hypothesis.given(
originals=strategies.data(),
frame_count=strategies.integers(min_value=2, max_value=10),
level=strategies.integers(min_value=1, max_value=5),
source_read_size=strategies.integers(1, 1048576),
read_sizes=strategies.data(),
)
def test_multiple_frames(
self, originals, frame_count, level, source_read_size, read_sizes
):
cctx = zstd.ZstdCompressor(level=level)
source = io.BytesIO()
buffer = io.BytesIO()
writer = cctx.stream_writer(buffer)
for i in range(frame_count):
data = originals.draw(strategies.sampled_from(random_input_data()))
source.write(data)
writer.write(data)
writer.flush(zstd.FLUSH_FRAME)
dctx = zstd.ZstdDecompressor()
buffer.seek(0)
reader = dctx.stream_reader(
buffer, read_size=source_read_size, read_across_frames=True
)
chunks = []
while True:
read_amount = read_sizes.draw(strategies.integers(-1, 16384))
chunk = reader.read(read_amount)
if not chunk and read_amount:
break
chunks.append(chunk)
self.assertEqual(source.getvalue(), b"".join(chunks))
@unittest.skipUnless("ZSTD_SLOW_TESTS" in os.environ, "ZSTD_SLOW_TESTS not set")
@make_cffi
class TestDecompressor_stream_writer_fuzzing(unittest.TestCase):
@hypothesis.given(
original=strategies.sampled_from(random_input_data()),
level=strategies.integers(min_value=1, max_value=5),
write_size=strategies.integers(min_value=1, max_value=8192),
input_sizes=strategies.data(),
)
def test_write_size_variance(self, original, level, write_size, input_sizes):
cctx = zstd.ZstdCompressor(level=level)
frame = cctx.compress(original)
dctx = zstd.ZstdDecompressor()
source = io.BytesIO(frame)
dest = NonClosingBytesIO()
with dctx.stream_writer(dest, write_size=write_size) as decompressor:
while True:
input_size = input_sizes.draw(strategies.integers(1, 4096))
chunk = source.read(input_size)
if not chunk:
break
decompressor.write(chunk)
self.assertEqual(dest.getvalue(), original)
@unittest.skipUnless("ZSTD_SLOW_TESTS" in os.environ, "ZSTD_SLOW_TESTS not set")
@make_cffi
class TestDecompressor_copy_stream_fuzzing(unittest.TestCase):
@hypothesis.given(
original=strategies.sampled_from(random_input_data()),
level=strategies.integers(min_value=1, max_value=5),
read_size=strategies.integers(min_value=1, max_value=8192),
write_size=strategies.integers(min_value=1, max_value=8192),
)
def test_read_write_size_variance(self, original, level, read_size, write_size):
cctx = zstd.ZstdCompressor(level=level)
frame = cctx.compress(original)
source = io.BytesIO(frame)
dest = io.BytesIO()
dctx = zstd.ZstdDecompressor()
dctx.copy_stream(source, dest, read_size=read_size, write_size=write_size)
self.assertEqual(dest.getvalue(), original)
@unittest.skipUnless("ZSTD_SLOW_TESTS" in os.environ, "ZSTD_SLOW_TESTS not set")
@make_cffi
class TestDecompressor_decompressobj_fuzzing(unittest.TestCase):
@hypothesis.given(
original=strategies.sampled_from(random_input_data()),
level=strategies.integers(min_value=1, max_value=5),
chunk_sizes=strategies.data(),
)
def test_random_input_sizes(self, original, level, chunk_sizes):
cctx = zstd.ZstdCompressor(level=level)
frame = cctx.compress(original)
source = io.BytesIO(frame)
dctx = zstd.ZstdDecompressor()
dobj = dctx.decompressobj()
chunks = []
while True:
chunk_size = chunk_sizes.draw(strategies.integers(1, 4096))
chunk = source.read(chunk_size)
if not chunk:
break
chunks.append(dobj.decompress(chunk))
self.assertEqual(b"".join(chunks), original)
@hypothesis.given(
original=strategies.sampled_from(random_input_data()),
level=strategies.integers(min_value=1, max_value=5),
write_size=strategies.integers(
min_value=1, max_value=4 * zstd.DECOMPRESSION_RECOMMENDED_OUTPUT_SIZE
),
chunk_sizes=strategies.data(),
)
def test_random_output_sizes(self, original, level, write_size, chunk_sizes):
cctx = zstd.ZstdCompressor(level=level)
frame = cctx.compress(original)
source = io.BytesIO(frame)
dctx = zstd.ZstdDecompressor()
dobj = dctx.decompressobj(write_size=write_size)
chunks = []
while True:
chunk_size = chunk_sizes.draw(strategies.integers(1, 4096))
chunk = source.read(chunk_size)
if not chunk:
break
chunks.append(dobj.decompress(chunk))
self.assertEqual(b"".join(chunks), original)
@unittest.skipUnless("ZSTD_SLOW_TESTS" in os.environ, "ZSTD_SLOW_TESTS not set")
@make_cffi
class TestDecompressor_read_to_iter_fuzzing(unittest.TestCase):
@hypothesis.given(
original=strategies.sampled_from(random_input_data()),
level=strategies.integers(min_value=1, max_value=5),
read_size=strategies.integers(min_value=1, max_value=4096),
write_size=strategies.integers(min_value=1, max_value=4096),
)
def test_read_write_size_variance(self, original, level, read_size, write_size):
cctx = zstd.ZstdCompressor(level=level)
frame = cctx.compress(original)
source = io.BytesIO(frame)
dctx = zstd.ZstdDecompressor()
chunks = list(
dctx.read_to_iter(source, read_size=read_size, write_size=write_size)
)
self.assertEqual(b"".join(chunks), original)
@unittest.skipUnless("ZSTD_SLOW_TESTS" in os.environ, "ZSTD_SLOW_TESTS not set")
class TestDecompressor_multi_decompress_to_buffer_fuzzing(unittest.TestCase):
@hypothesis.given(
original=strategies.lists(
strategies.sampled_from(random_input_data()), min_size=1, max_size=1024
),
threads=strategies.integers(min_value=1, max_value=8),
use_dict=strategies.booleans(),
)
def test_data_equivalence(self, original, threads, use_dict):
kwargs = {}
if use_dict:
kwargs["dict_data"] = zstd.ZstdCompressionDict(original[0])
cctx = zstd.ZstdCompressor(
level=1, write_content_size=True, write_checksum=True, **kwargs
)
if not hasattr(cctx, "multi_compress_to_buffer"):
self.skipTest("multi_compress_to_buffer not available")
frames_buffer = cctx.multi_compress_to_buffer(original, threads=-1)
dctx = zstd.ZstdDecompressor(**kwargs)
result = dctx.multi_decompress_to_buffer(frames_buffer)
self.assertEqual(len(result), len(original))
for i, frame in enumerate(result):
self.assertEqual(frame.tobytes(), original[i])
frames_list = [f.tobytes() for f in frames_buffer]
result = dctx.multi_decompress_to_buffer(frames_list)
self.assertEqual(len(result), len(original))
for i, frame in enumerate(result):
self.assertEqual(frame.tobytes(), original[i])
| [
"gregory.szorc@gmail.com"
] | gregory.szorc@gmail.com |
c2864dbd08f04b309b4a5c594929169cbd5d96f1 | 879e6e92d212f7db201dbaa8d6868754d8b082eb | /car/migrations/0008_auto_20200429_0110.py | c074a9440d25177b1fe2b3674eb66e126b185ad9 | [] | no_license | tugcegungoru/DjangoProject | 518726bb879ca26aa90637bb3b32722fc6280953 | 954ea699ed749ac15dd57e50f886f905ef2d52a1 | refs/heads/master | 2023-01-02T16:41:17.618397 | 2020-10-11T15:00:03 | 2020-10-11T15:00:03 | 250,832,866 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,391 | py | # Generated by Django 3.0.4 on 2020-04-28 22:10
from django.db import migrations, models
import django.db.models.deletion
import mptt.fields
class Migration(migrations.Migration):
dependencies = [
('car', '0007_car_slug'),
]
operations = [
migrations.AddField(
model_name='category',
name='level',
field=models.PositiveIntegerField(default=0, editable=False),
preserve_default=False,
),
migrations.AddField(
model_name='category',
name='lft',
field=models.PositiveIntegerField(default=0, editable=False),
preserve_default=False,
),
migrations.AddField(
model_name='category',
name='rght',
field=models.PositiveIntegerField(default=0, editable=False),
preserve_default=False,
),
migrations.AddField(
model_name='category',
name='tree_id',
field=models.PositiveIntegerField(db_index=True, default=0, editable=False),
preserve_default=False,
),
migrations.AlterField(
model_name='category',
name='parent',
field=mptt.fields.TreeForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='children', to='car.Category'),
),
]
| [
"tucegungoru@gmail.com"
] | tucegungoru@gmail.com |
ab6f3554063761fefaf2451f10bfaf0b4936c995 | 15a9342c7dad3176b7b8ff993748f4643216e75a | /scanner/inter_image.py | fa3c000121997bbe0cee1088e5d69ca7f01c5a63 | [] | no_license | limu007/Charlay | 4ce763603b46b2af4fa3c2091c8a58c37284d51a | efe3d5b9875627cd693e5c898066bc4a7abf4c11 | refs/heads/master | 2021-01-14T08:03:37.227666 | 2017-03-24T12:52:42 | 2017-03-24T12:52:42 | 2,166,287 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,155 | py | import pyfits
from scipy import ndimage
def shape_me(ifile="soi6um-red.fits"):
qred=pyfits.open(ifile)[0].data
qpos=lambda x:median(x[x>5000]) if sum(x>5000)>2 else 0
medtst=array([[qpos(qred[i:i+50,j:j+50]) for j in range(0,750,50)] for i in range(0,500,50)])
#rescaled map
#medful=array([[1 if (qred[i,j]>medtst[i//50,j//50]) else 0 for j in range(0,750)] for i in range(0,500)])
#medful[qred[0:500,0:750]<5000]=-1
x,y=indices(medtst.shape)*50+25
spins=interpolate.bisplrep(x[medtst>0],y[medtst>0],medtst[medtst>0],nxest=25,nyest=25)
flat=interpolate.bisplev(r_[:500],r_[:750],spins)
flat[qred[0:500,0:750]<5000]=4000
qnew=qred[0:500,0:750]/flat
qnew[qred[0:500,0:750]<5000]=0
def imanal(qnew,cmin=10,nit=1,rep=0):
if nit<0: qsel=ndimage.binary_dilation(ndimage.binary_erosion(qnew,iterations=-nit),iterations=-nit)
elif nit>0: qsel=ndimage.binary_erosion(ndimage.binary_dilation(qnew,iterations=nit),iterations=nit)
qout=ndimage.label(qsel)
if rep==1: return qout[0]
cens=[(qout[0]==i).sum() for i in range(qout[1])]
from numpy import r_,where
cpos=where(r_[cens]>cmin)[0]
print 'found %i regions -> reduced %i'%(qout[1],len(cpos))
if rep==2: return cens,cpos
cmax=len(cpos)
qs=qout[0]
j=0
for i in range(1,qout[1]):
if i in cpos:
qs[qs==i]=j
j+=1
else:
qs[qs==i]=cmax
return qs
def testfile(ifile="/home/limu/Dropbox/Data/profs.pck"):
if ifile:
import cPickle
wave=cPickle.load(open(ifile,"r"))
dgrn,dred,dnir=[pyfits.open("soi3mm-"+ep+".fits")[0].data for ep in ['green','red','830']]
wave=array([d[290:490,290:310].mean(1) for d in [dgrn,dred,dnir]])
pnir=spectra.extrema(wave[2])[0]
pred=spectra.extrema(wave[1])[0]
#not flat background
def linanal():
import spectra
from numpy import polyfit
pgrn=spectra.extrema(wave[0],msplit='sort')[0]
pfit2=polyfit(pgrn,wave[0][pgrn],2) #quad. model
y=wave[0]-polyval(pfit2,r_[:len(wave[0])])
pgrn2=spectra.extrema(y,msplit='sort')[0]
i=3
pos=[pgrn2,pred,pnir]
wlns=[532.,650.,830.]
dir=-1
interpt = lambda i,seq:seq[(seq>pnir[i])*(seq<pnir[i+1])]
posk=lambda s,k,lam:(dir*(s-pnir[i])/float(pnir[i+1]-pnir[i])*830+k*830)/lam
#sres=array([[posk(s,k,650) for s in ip] for k in range(2,20)])
skipmax = lambda arr:arr[arange(len(arr))!=arr.argmax()]
fracint = lambda arr:skipmax((arr-(arr+0.5).astype('int'))**2).sum() #
gfall=[fracint(array([[posk(s,k,wlns[j]) for s in interpt(i,pos[j])] for k in range(2,20)])) for j in range(2)]
fdif=lambda d:array([fracint((d-polyval(afit,pos[0]))/wlns[i]) for i in range(3)])
mins=array([3373, 3764, 4388, 4897, 5342, 5816, 6409, 7003]) # nejmensi 3373, a posledni 2
#final estimate
afit=array([polyfit(pos[j],r_[:len(pos[j])]/2.*wlns[j],3) for j in range(3)]).mean(0) #cubic profile
def model1(pts,xsize=200,ysize=200):
from scipy import interpolate as ip
from numpy import mgrid,sin,indices
gr=indices((6,6))
#pz=sin((gr[0]**2+gr[1]**2)/2.).ravel()
pz=exp(-((gr[0]-2)**2+(gr[1]-3)**2)/2.).ravel()
zmodel=ip.LSQBivariateSpline(gr[0].ravel(),gr[1].ravel(),pz,r_[:6:2],r_[:6:2])
gr2=mgrid[:4:xsize*1j,:4:ysize*1j]
fine=zmodel.ev(gr2[0].ravel(),gr2[1].ravel())
imshow(sin(phas*2/pi))
| [
"limu007@gmail.com"
] | limu007@gmail.com |
50b0d0d43f43bcda2ef5a05062a45b32b719010f | 4bd5cdb67fdd6a6f0ceb3af025ceaf977b661273 | /gconv_experiments/groupy/garray/D4h_array.py | 1c0b4b98530c21dfa94b6937ccf955673ddf5fa0 | [] | no_license | andreiqv/gconv | 93d7d313cdc78e2bfefd53820918293526fc4680 | 23f9ec62b119c64cc87f8727cc1e409a469db0f1 | refs/heads/master | 2020-05-07T21:05:28.840973 | 2019-04-11T23:25:31 | 2019-04-11T23:25:31 | 180,890,960 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,376 | py | import numpy as np
from groupy.garray.finitegroup import FiniteGroup
from groupy.garray.matrix_garray import MatrixGArray
from groupy.garray.D4ht_array import D4htArray
from groupy.garray.Z3_array import Z3Array
"""
Implementation of dihedral finite group D4h, consisting of 16 elements in total.
These are the elements of C4h, with added reflection.
Int parameterisation contains an extra parameter, m (in {0, 1}) to represent this reflection.
"""
class D4hArray(MatrixGArray):
parameterizations = ['int', 'mat', 'hmat']
_g_shapes = {'int': (3,), 'mat': (3, 3), 'hmat': (4, 4)}
_left_actions = {}
_reparameterizations = {}
_group_name = 'D4h'
def __init__(self, data, p='int'):
data = np.asarray(data)
assert data.dtype == np.int
# classes OArray can be multiplied with
self._left_actions[D4hArray] = self.__class__.left_action_hmat
self._left_actions[D4htArray] = self.__class__.left_action_hmat
self._left_actions[Z3Array] = self.__class__.left_action_vec
super(D4hArray, self).__init__(data, p)
self.elements = self.get_elements()
def mat2int(self, mat_data):
'''
Transforms 3x3 matrix representation to int representation.
To handle any size and shape of mat_data, the original mat_data
is reshaped to a long list of 3x3 matrices, converted to a list of
int representations, and reshaped back to the original mat_data shape.
mat-2-int is achieved by taking the matrix, and looking up whether it
exists in the element list. If not, the matrix should be multiplied with -1
to retrieve the reflection. The resulting matrix can be looked up in the
element list, and that index can be converted to y and z.
'''
input = mat_data.reshape((-1, 3, 3))
data = np.zeros((input.shape[0], 3), dtype=np.int)
for i in xrange(input.shape[0]):
mat = input[i]
# check for reflection
if mat.tolist() not in self.elements:
mat = np.array(mat) * -1
data[i, 2] = 1
# determine z and y
index = self.elements.index(mat.tolist())
z = int(index % 4)
y = int((index - z) / 4)
data[i, 0] = y
data[i, 1] = z
data = data.reshape(mat_data.shape[:-2] + (3,))
return data
def int2mat(self, int_data):
'''
Transforms integer representation to 3x3 matrix representation.
Original int_data is flattened and later reshaped back to its original
shape to handle any size and shape of input.
'''
# rotations over y, z and reflection
y = int_data[..., 0].flatten()
z = int_data[..., 1].flatten()
m = int_data[..., 2].flatten()
data = np.zeros((len(y),) + (3, 3), dtype=np.int)
for j in xrange(len(y)):
index = (y[j] * 4) + z[j]
mat = self.elements[index]
mat = np.array(mat) * ((-1) ** m[j]) # mirror if reflection
data[j, 0:3, 0:3] = mat.tolist()
data = data.reshape(int_data.shape[:-1] + (3, 3))
return data
def _multiply(self, element, generator, times):
'''
Helper function to multiply an _element_ with a _generator_
_times_ number of times.
'''
element = np.array(element)
for i in range(times):
element = np.dot(element, np.array(generator))
return element
def get_elements(self):
'''
Function to generate a list containing elements of group D4h,
similar to get_elements() of BArray.
Elements are stored as lists rather than numpy arrays to enable
lookup through self.elements.index(x).
'''
# specify generators
g1 = np.array([[-1, 0, 0], [0, 1, 0], [0, 0, -1]]) # 180 degrees over y
g2 = np.array([[0, -1, 0], [1, 0, 0], [0, 0, 1]]) # 90 degrees over z
element_list = []
element = np.array([[1, 0, 0], [0, 1, 0], [0, 0, 1]]) # starting point = identity matrix
for i in range(0, 2):
element = self._multiply(element, g1, i)
for j in range(0, 4):
element = self._multiply(element, g2, j)
element_list.append(element.tolist())
return element_list
class D4hGroup(FiniteGroup, D4hArray):
def __init__(self):
D4hArray.__init__(
self,
data=np.array([[i, j, m] for i in xrange(2) for j in xrange(4) for m in xrange(2)]),
p='int'
)
FiniteGroup.__init__(self, D4hArray)
def factory(self, *args, **kwargs):
return D4hArray(*args, **kwargs)
D4h = D4hGroup()
def rand(size=()):
'''
Returns an D4hArray of shape size, with randomly chosen elements in int parameterization.
'''
data = np.zeros(size + (3,), dtype=np.int)
data[..., 0] = np.random.randint(0, 2, size)
data[..., 1] = np.random.randint(0, 4, size)
data[..., 2] = np.random.randint(0, 2, size)
return D4hArray(data=data, p='int')
def identity(p='int'):
'''
Returns the identity element: a matrix with 1's on the diagonal.
'''
li = [[1, 0, 0], [0, 1, 0], [0, 0, 1]]
e = D4hArray(data=np.array(li, dtype=np.int), p='mat')
return e.reparameterize(p)
| [
"phxv@mail.ru"
] | phxv@mail.ru |
5b03345483725c4fa99083528da2354c33d38c16 | 96b67d9253c3d9381d32758decdd9343684492bf | /healthtools_ke_api/tests/test_nurses_api.py | 968f98405ad38ffe5e26ae41244f5935f9475ecc | [
"MIT"
] | permissive | andela-ookoro/HealthTools.API | 14e369b33f7d595021ee329e9f7bf6f37d17e29a | 0bae579a5876819433cfe071ad81dbe6922b41a8 | refs/heads/master | 2021-08-24T21:06:31.545816 | 2017-09-11T16:03:32 | 2017-09-11T16:03:32 | 103,128,202 | 0 | 0 | null | 2017-09-11T16:00:02 | 2017-09-11T11:35:15 | Python | UTF-8 | Python | false | false | 1,225 | py | from unittest import TestCase
from healthtools_ke_api import app
from healthtools_ke_api.views.nurses import get_nurses_from_nc_registry
class TestNursesAPI(TestCase):
def setUp(self):
self.client = app.test_client()
def test_gets_nurses_from_nc_registry(self):
nurses = get_nurses_from_nc_registry("Marie")
self.assertTrue(len(nurses) > 0)
def test_gets_nurses_from_nc_registry_handle_inexistent_nurse(self):
nurses = get_nurses_from_nc_registry("ihoafiho39023u8")
self.assertEqual(len(nurses), 0)
def test_nurses_endpoint_handles_bad_query(self):
response = self.client.get("/nurses/search.json?q=")
self.assertIn("A query is required.", response.data)
def test_nurses_endpoint_gets_nurses(self):
response = self.client.get("/nurses/search.json?q=Marie")
self.assertIn("success", response.data)
def test_nurses_endpoint_can_retrieve_cached_result(self):
# call once
self.client.get("/nurses/search.json?q=Marie")
# second time should retrieve cached result
response = self.client.get("/nurses/search.json?q=Marie")
self.assertIn("X-Retrieved-From-Cache", response.headers.keys())
| [
"marvinryan@ymail.com"
] | marvinryan@ymail.com |
ef955cf11a1cd96660828ba53df533af7add7417 | a9b5bc48a89329aa44cb4dd63ce47a3c0dfc90ba | /tests/test_withings_object.py | 8fa9c36a34257583ebac5c15851c1621aa312ca8 | [
"MIT"
] | permissive | randi120/python-withings | d050a263f5c500ad258072dbb3661a43dd225de3 | 016bb3cc2d62a4e2813df422829eba21530570bc | refs/heads/master | 2021-01-22T13:47:49.355343 | 2014-12-26T00:47:12 | 2014-12-26T00:47:12 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 938 | py | import time
import unittest
from datetime import datetime
from withings import WithingsObject
class TestWithingsObject(unittest.TestCase):
def test_attributes(self):
data = {
"date": "2013-04-10",
"string": "FAKE_STRING",
"integer": 55555,
"float": 5.67
}
obj = WithingsObject(data)
self.assertEqual(datetime.strftime(obj.date, '%Y-%m-%d'), data['date'])
self.assertEqual(obj.string, data['string'])
self.assertEqual(obj.integer, data['integer'])
self.assertEqual(obj.float, data['float'])
# Test time as epoch
data = {"date": 1409596058}
obj = WithingsObject(data)
self.assertEqual(time.mktime(obj.date.timetuple()), data['date'])
# Test funky time
data = {"date": "weird and wacky date format"}
obj = WithingsObject(data)
self.assertEqual(obj.date, data['date'])
| [
"bradpitcher@gmail.com"
] | bradpitcher@gmail.com |
cc1454d122573184c132666c2fe8f7e97e045849 | d8416cd4c8f532809c4c9d368d43fa773b3b198c | /torchsupport/flex/examples/cifar_tdre.py | 546881ac3c571f5f93a027d84caa06030768d4c4 | [
"MIT"
] | permissive | DavidMetzIMT/torchsupport | a53a0d532b7542d81dc158d3d67f195cbce86bf9 | a0ca719c820a4895e98091c52e43c5300e1a71a3 | refs/heads/master | 2023-05-28T21:45:09.302210 | 2021-06-14T17:30:58 | 2021-06-14T17:30:58 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,151 | py | from functools import partial
from torchsupport.data.namedtuple import namespace
import torch
import torch.nn as nn
import torch.nn.functional as func
from torch.distributions import Normal
from torch.utils.data import Dataset
from torchvision.datasets import CIFAR10
from torchvision.transforms import ToTensor
from torchsupport.modules import ReZero
from torchsupport.training.samplers import Langevin
from torchsupport.utils.argparse import parse_options
from torchsupport.flex.log.log_types import LogImage
from torchsupport.flex.context.context import TrainingContext
from torchsupport.flex.data_distributions.data_distribution import DataDistribution
from torchsupport.flex.tasks.energy.density_ratio import direct_mixing, noise_contrastive_estimation, probability_surface_estimation, random_dim_mixing, tdre_mixing, tdre_step, tnce_step, independent_mixing, vp_mixing
from torchsupport.flex.training.density_ratio import telescoping_density_ratio_training
def valid_callback(args, ctx: TrainingContext=None):
ctx.log(images=LogImage(args.sample))
labels = args.prediction.argmax(dim=1)
for idx in range(10):
positive = args.sample[labels == idx]
if positive.size(0) != 0:
ctx.log(**{f"classified {idx}": LogImage(positive)})
def generate_step(energy, base, integrator: Langevin=None, ctx=None):
sample = base.sample(ctx.batch_size)
levels = torch.arange(0.0, 1.0, 0.01, device=opt.device)
for level in reversed(levels):
this_level = level * torch.ones(sample.size(0), device=sample.device)
sample = integrator.integrate(
ConditionalEnergy(energy, sample, shift=0.025), sample, this_level, None
)
result = ((sample + 1) / 2).clamp(0, 1)
ctx.log(samples=LogImage(result))
class CIFAR10Dataset(Dataset):
def __init__(self, data):
self.data = data
def __getitem__(self, index):
data, _ = self.data[index]
data = data + torch.randn_like(data) / 255
return 2 * data - 1, []
def __len__(self):
return len(self.data)
class Base(nn.Module):
def __init__(self):
super().__init__()
self.mean = nn.Parameter(torch.zeros(3, 1, 1))
self.logv = nn.Parameter(torch.zeros(3, 1, 1))
def sample(self, batch_size):
dist = Normal(
self.mean.expand(3, 32, 32),
self.logv.exp().expand(3, 32, 32)
)
return torch.randn(batch_size, 3, 32, 32, device=self.mean.device)#dist.rsample(sample_shape=(batch_size,))
def log_prob(self, data, condition):
return torch.zeros_like(self(data, condition)[0])
def forward(self, data, condition):
dist = Normal(self.mean, self.logv.exp())
log_p = dist.log_prob(data)
log_p = log_p.view(*log_p.shape[:-3], -1)
return log_p.sum(dim=-1, keepdim=True), namespace(
distribution=dist
)
class SineEmbedding(nn.Module):
def __init__(self, size, depth=2):
super().__init__()
self.blocks = nn.ModuleList([
nn.Linear(1, size)
] + [
nn.Linear(size, size)
for idx in range(depth - 1)
])
def forward(self, time):
out = time[:, None]
for block in self.blocks:
out = block(out).sin()
return out
class ResBlock(nn.Module):
def __init__(self, size):
super().__init__()
self.condify = SineEmbedding(2 * size)
self.skip = SineEmbedding(2 * size)
self.blocks = nn.ModuleList([
nn.Conv2d(size, size, 3, padding=1)
for idx in range(2)
])
self.zero = ReZero(size)
def forward(self, inputs, levels):
cond = self.condify(levels)
cond = cond.view(*cond.shape, 1, 1)
skip = self.skip(levels)
skip = skip.view(*skip.shape, 1, 1)
scale, bias = cond.chunk(2, dim=1)
skip_scale, skip_bias = skip.chunk(2, dim=1)
out = func.silu(self.blocks[0](inputs))
out = scale * out + bias
out = self.blocks[1](out)
inputs = skip_scale * inputs + skip_bias
return self.zero(inputs, out)
class Energy(nn.Module):
def __init__(self, base):
super().__init__()
self.base = base
self.conv = nn.ModuleList([
nn.Conv2d(3, 32, 3, padding=1),
nn.Conv2d(32, 64, 3, padding=1),
nn.Conv2d(64, 128, 3, padding=1),
nn.Conv2d(128, 256, 3, padding=1)
])
self.res = nn.ModuleList([
ResBlock(32),
ResBlock(64),
ResBlock(128),
ResBlock(256),
])
self.W = nn.Linear(256, 256)
self.b = nn.Linear(256, 1)
def forward(self, inputs, levels, *args):
out = inputs
for res, conv in zip(self.res, self.conv):
out = func.silu(conv(out))
out = res(out, levels)
out = 2 ** 2 * func.avg_pool2d(out, 2)
features = out.size(-1) ** 2 * func.adaptive_avg_pool2d(out, 1)
features = features.view(features.size(0), -1)
quadratic = (features * self.W(features)).sum(dim=1, keepdim=True)
linear = self.b(features)
return quadratic + linear
class TotalEnergy(nn.Module):
def __init__(self, energy, levels):
super().__init__()
self.energy = energy
self.levels = levels
def forward(self, data: torch.Tensor, *args):
inputs = data.repeat_interleave(len(self.levels), dim=0)
levels = torch.cat(data.size(0) * [self.levels], dim=0)
factors = self.energy(inputs, levels, *args)
result = factors.view(-1, data.size(0), 1).sum(dim=0)
return result
class ConditionalEnergy(nn.Module):
def __init__(self, energy, origin, shift=0.025):
super().__init__()
self.energy = energy
self.origin = origin.detach()
self.shift = shift
def forward(self, data, level, *args):
raw_energy = self.energy(data, level)
dist = Normal(self.origin, self.shift)
cond = dist.log_prob(data)
cond = cond.view(cond.size(0), -1).mean(dim=1, keepdim=True)
return raw_energy + cond
if __name__ == "__main__":
opt = parse_options(
"CIFAR10 EBM using TNCE in flex.",
path="flexamples/cifar10-tdre-10",
device="cuda:0",
batch_size=8,
max_epochs=1000,
report_interval=1000
)
cifar10 = CIFAR10("examples/", download=False, transform=ToTensor())
data = CIFAR10Dataset(cifar10)
data = DataDistribution(
data, batch_size=opt.batch_size,
device=opt.device
)
base = Base().to(opt.device)
energy = Energy(base).to(opt.device)
levels = torch.arange(0.0, 1.0, 0.01, device=opt.device)
training = telescoping_density_ratio_training(
energy, base, data,
mixing=partial(
independent_mixing,
mixing=tdre_mixing,
levels=levels
),
optimizer_kwargs=dict(lr=1e-4),
telescoping_step=tdre_step,
train_base=False,
path=opt.path,
device=opt.device,
batch_size=opt.batch_size,
max_epochs=opt.max_epochs,
report_interval=opt.report_interval
)
# add generating images every few steps:
integrator = Langevin(
rate=-0.01, noise=0.01,
steps=5, max_norm=None,
clamp=(-1, 1)
)
training.add(
generate_step=partial(
generate_step, energy=energy,
base=base, integrator=integrator,
ctx=training
),
every=opt.report_interval
)
# training.get_step("tdre_step").extend(
# lambda args, ctx=None:
# ctx.log(real_images=LogImage(args.real_data.clamp(0, 1)))
# )
training.load()
training.train()
| [
"jendrusch@stud.uni-heidelberg.de"
] | jendrusch@stud.uni-heidelberg.de |
fa79356736541e36a07ddcd18a65b5cb23c60ad7 | b2c24abff86b28ca8a495b3a3c3227f070737aa2 | /parlai/tasks/opensubtitles/build_2018.py | 271858012beab88fe3ac123c42db3a991c6a3074 | [
"MIT"
] | permissive | hengyicai/AdaND | d5dda1b2fcd2abd17be6603de632f0515382b37b | 5e3fefb1cf40c42215a37246efc64958ae6db005 | refs/heads/master | 2023-09-01T07:38:49.076947 | 2020-10-19T04:58:00 | 2020-10-19T04:58:00 | 204,633,631 | 10 | 2 | MIT | 2023-08-11T19:52:23 | 2019-08-27T06:20:39 | Python | UTF-8 | Python | false | false | 11,239 | py | #!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
# Download and build the data if it does not exist.
import parlai.core.build_data as build_data
import glob
import gzip
import multiprocessing
import os
import re
import sys
import time
import tqdm
import xml.etree.ElementTree as ET
NUM_MOVIE_FOLDERS = 140044
NUM_SUBTITLES_FILES = 446612
MAX_TIME_DIFFERENCE_S = 2
MIN_WORD_LENGTH = 2
MAX_WORD_LENGTH = 20
# remove brackets
CLEAN_BRACKETS_REGEX = re.compile(
r'<!--.*?-->|<[^>]*>|\([^\)]*\)|\[[^\]]*\]|\{[^\}]*\}|##|~'
)
# Usually, unbalanced brackets correspond to very noisy sentences
# '#' is usually pretty bad and means lyrics of the song
BRACKETS_CHARACTERS = ['[', ']', '(', ')', '{', '}', '<', '>', '#']
MULTI_WHITESPACES_REGEX = re.compile(r'\s+')
# Existing apostrophe tokenization in Open Subtitles is not compatible with nltk
APOSTROPHE_REPLACEMENT_REGEX = [
(re.compile(r"(\s?)n(\s?)'(\s?)t(\s|$)"), "\\1n't\\4"),
(re.compile(r"'(\s?)(s|re|em|im|bout|cause|ve|d|ll|ne)(\s+|$)"), " '\\2\\3"),
# it's a common (in OpenSubtitles) spelling error to use 'il instead of 'll
(re.compile(r"'(\s?)il(\s|$)"), " 'll\\2"),
(re.compile(r"(\s|^)i(\s?)'(\s?)(m|mm)(\s|$)"), "\\1i 'm\\5"),
(re.compile(r"in(\s?)'(\s|$)"), "ing\\2"),
(re.compile(r"(\s|^)ma(\s?)'(\s?)am(\s|$)"), "\\1ma'am\\4"),
(re.compile(r"(\s|^)c(\s?)'(\s?)mon(\s|$)"), "\\1c'mon\\4"),
(re.compile(r"(\s|^)o(\s?)'(\s?)clock(\s|$)"), "\\1o'clock\\4"),
(re.compile(r"(\s|^)y(\s?)'(\s?)all(\s|$)"), "\\1y'all\\4"),
]
# Some cleaning steps are taken from
CLEANUP_REGEX_RULES = [
# remove speaker tag "xxx: "
(re.compile(r'^\s*[A-z]*\s*:'), ''),
# remove unnecessary symbols
(re.compile(r"-{2,}"), ' '),
# delete a space right before a period for titles
(re.compile(r'(?<=( mr| jr| ms| dr| st|mrs)) \.'), '. '),
]
CLEANUP_REPLACE_RULES = [
('"', ' '),
("``", " "),
("''", " "),
("% %", " "),
("i̇", "i"),
]
def get_movie_id(filename_path):
dirpath, filename = os.path.split(filename_path)
_, movie_id_str = os.path.split(dirpath)
return int(movie_id_str)
# OpenSubtitles2016 contains have several subtitles per movie,
# stored in a separate folders.
# We gather all subtitles files based on the movie they correspond to
# and apply deduplication for the extracted replicas
def get_list_of_files(top_path):
result = {}
for path, _dirs, files in os.walk(top_path):
for filename in files:
if filename.endswith('.xml'):
full_filename = os.path.realpath(os.path.join(path, filename))
assert os.path.isfile(full_filename), 'Bad file ' + full_filename
movie_id = get_movie_id(full_filename)
if movie_id not in result:
result[movie_id] = []
result[movie_id].append(full_filename)
return result
def parse_xml(filepath):
extension = os.path.splitext(filepath)[1]
if extension == '.gz':
with gzip.open(filepath, 'r') as f:
return ET.parse(f)
else:
return ET.parse(filepath)
def normalize_whitespaces(sentence):
return MULTI_WHITESPACES_REGEX.sub(' ', sentence).strip()
def normalize_apostrophe(sentence):
sentence = normalize_whitespaces(sentence)
for rule in APOSTROPHE_REPLACEMENT_REGEX:
sentence = rule[0].sub(rule[1], sentence)
return sentence
def clean_text(words):
if len(words) > 0 and words[-1] == ':':
return None
sentence = ' '.join(words).strip(' -').lower()
sentence = CLEAN_BRACKETS_REGEX.sub('', sentence)
if len([ch for ch in BRACKETS_CHARACTERS if ch in sentence]) > 0:
return None
sentence = sentence.replace('\\\'', '\'')
if sentence.count('"') % 2 == 1:
# There are unmatched double-quotes.
# Usually, it means a quote got splitted into separate utterances,
# so it's bad example of a dialog
return None
sentence = normalize_apostrophe(sentence)
for (regex, replacement) in CLEANUP_REGEX_RULES:
sentence = regex.sub(replacement, sentence)
for (pattern, replacement) in CLEANUP_REPLACE_RULES:
sentence = sentence.replace(pattern, replacement)
words = normalize_whitespaces(sentence).split()
if (
len(words) > 0
and any(map(lambda k: re.search(r'\w', k) is not None, words))
and len(words) >= MIN_WORD_LENGTH
and len(words) <= MAX_WORD_LENGTH
):
return ' '.join(words)
else:
return None
def parse_time_str(time_value_str):
if not (
time_value_str is not None
and len(time_value_str) == 12
and time_value_str[2] == ':'
and time_value_str[5] == ':'
and time_value_str[8] == ','
):
return None
try:
return (
int(time_value_str[0:2]) * 3600
+ int(time_value_str[3:5]) * 60
+ int(time_value_str[6:8])
)
except ValueError:
return None
def extract_data_from_xml(xml_object):
previous_end_time = -1000
conversation = []
for sentence_node in xml_object.getroot():
if sentence_node.tag != 's':
continue
words = []
start_time, end_time = None, None
for node in sentence_node:
if node.tag == 'time':
time_value = parse_time_str(node.get('value'))
if time_value is None:
continue
if node.get('id')[-1] == 'S':
start_time = (
time_value
if start_time is None
else min(time_value, start_time)
)
elif node.get('id')[-1] == 'E':
end_time = (
time_value if end_time is None else max(time_value, end_time)
)
else:
raise Exception('Unknown time-id for node: %s' % node)
elif node.tag == 'w':
if node.text is not None and len(node.text) > 0:
words.append(node.text)
else:
pass
sentence = clean_text(words)
start_time = start_time or previous_end_time
end_time = end_time or previous_end_time
# add to the conversation
# flush and start new conversation
if (
sentence is not None
and start_time - previous_end_time <= MAX_TIME_DIFFERENCE_S
):
conversation.append(sentence)
else:
if len(conversation) > 1:
yield conversation
conversation = []
if sentence is not None:
conversation.append(sentence)
previous_end_time = max(start_time, end_time)
def conversation_to_fb_format(conversation):
assert len(conversation) > 1
lines = []
for i in range(0, len(conversation), 2):
if i + 1 < len(conversation):
lines.append(
'%d %s\t%s' % (i / 2 + 1, conversation[i], conversation[i + 1])
)
else:
lines.append('%d %s' % (i / 2 + 1, conversation[i]))
return '\n'.join(lines)
def conversation_to_basic_format(conversation):
assert len(conversation) > 1
lines = []
for i in range(len(conversation)):
if i + 1 < len(conversation):
lines.append('1 %s\t%s' % (conversation[i], conversation[i + 1]))
return '\n'.join(lines)
class DataProcessor(object):
def __init__(self, use_history):
self.use_history = use_history
def __call__(self, movie_id_with_files):
movie_id, files = movie_id_with_files
data = set()
for filepath in files:
try:
xml_object = parse_xml(filepath)
for conversation in extract_data_from_xml(xml_object):
if self.use_history:
data.add(conversation_to_fb_format(conversation))
else:
data.add(conversation_to_basic_format(conversation))
except ET.ParseError:
# TODO: We possibly can log these errors,
# but I'm not sure how it would intervene with the PrograssLogger
pass
except Exception:
print(
'Unexpected error for file %s:\n%s' % (filepath, sys.exc_info()[0]),
file=sys.stderr,
)
raise
data_str = '\n'.join(data) + ('\n' if len(data) > 0 else '')
return data_str
def create_fb_format(inpath, outpath, use_history):
print('[building fbformat]')
start_time = time.time()
ftrain = open(os.path.join(outpath, 'train.txt'), 'w')
fvalid = open(os.path.join(outpath, 'valid.txt'), 'w')
ftest = open(os.path.join(outpath, 'test.txt'), 'w')
movie_dirs = get_list_of_files(inpath)
total_movie_dirs = len(movie_dirs)
total_files = sum([len(l) for l in movie_dirs.values()])
print(
'[Found %d movie folders and %d subtitles within %s in %d seconds]'
% (total_movie_dirs, total_files, inpath, time.time() - start_time)
)
assert total_movie_dirs == NUM_MOVIE_FOLDERS, 'Incorrect number of movies'
assert total_files == NUM_SUBTITLES_FILES, 'Incorrect number of files'
processor = DataProcessor(use_history)
with multiprocessing.Pool(processes=os.cpu_count()) as pool:
for i, s in enumerate(pool.imap(processor, tqdm.tqdm(movie_dirs.items()))):
handle = ftrain
# TODO: Shall we use smaller valid/test sets? Even 10% is A LOT here
if i % 10 == 0:
handle = ftest
if i % 10 == 1:
handle = fvalid
handle.write(s)
ftrain.close()
fvalid.close()
ftest.close()
print(
'[Data has been successfully extracted in %d seconds]'
% (time.time() - start_time,)
)
def build(datapath, use_history):
dpath = os.path.join(datapath, 'OpenSubtitles2018')
if not use_history:
dpath += '_no_history'
version = '1'
if not build_data.built(dpath, version_string=version):
print('[building data: ' + dpath + ']')
if build_data.built(dpath):
# An older version exists, so remove these outdated files.
build_data.remove_dir(dpath)
build_data.make_dir(dpath)
untar_path = os.path.join(dpath, 'OpenSubtitles', 'xml', 'en')
if len(glob.glob(untar_path + '/*/*/*.xml')) != NUM_SUBTITLES_FILES:
# Download the data.
url = 'https://object.pouta.csc.fi/OPUS-OpenSubtitles/v2018/xml/en.zip'
build_data.download(url, dpath, 'OpenSubtitles2018.zip')
build_data.untar(dpath, 'OpenSubtitles2018.zip')
create_fb_format(untar_path, dpath, use_history)
# Mark the data as built.
build_data.mark_done(dpath, version_string=version)
return dpath
| [
"caihengyi@ict.ac.cn"
] | caihengyi@ict.ac.cn |
256c7ad0a85da1ed140fc481079c81fb2324dffd | 05390b91e88413591a50b5128ee452ced0955630 | /task/pretrained/baike/parser.py | 59f4b0f93454fa70575e38bcefb1f4d44708b934 | [
"Apache-2.0"
] | permissive | ssunqf/nlp-exp | d0387ce770522b99747ae505ab0b8d45b7b762b9 | f4ebc77d2f7b85b95a12d2cb47196a950c8a1a1f | refs/heads/master | 2020-03-13T13:09:38.473593 | 2019-11-04T09:25:04 | 2019-11-04T09:25:04 | 131,133,319 | 4 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,663 | py | #!/usr/bin/env python3.6
# -*- coding: utf-8 -*-
from typing import Dict, List, Tuple
import torch
from torch import nn
from .base import make_masks
from .attention import MultiHeadedAttention
class SpanScorer(nn.Module):
def __init__(self, hidden_size, num_labels):
super(SpanScorer, self).__init__()
self.hidden_size = hidden_size
self.num_labels = num_labels
self.scorer = nn.Sequential(
nn.Linear(hidden_size, self.hidden_size),
nn.ReLU(),
nn.Linear(self.num_labels, self.num_labels)
)
def forward(self, hiddens: torch.Tensor, lens: torch.Tensor) -> Tuple[List[Dict[Tuple[int, int], int]], List[torch.Tensor]]:
charts, spans = self._make_chart(hiddens, lens)
scores = self.scorer(spans).split([len(chart) for chart in charts])
charts = [{span: i for i, span in enumerate(chart)} for chart in charts]
return charts, scores
def _make_chart(self, hiddens, lens):
spans = []
charts = []
for bid, seq_len in enumerate(lens):
charts.append([])
for length in range(1, seq_len):
for left in range(0, seq_len + 1 - length):
right = left + length
charts[-1].append((left, right))
spans.append(torch.cat((hiddens[right, bid, :self.hidden_size//2] - hiddens[left, bid, :self.hidden_size//2],
hiddens[left+1, bid, self.hidden_size//2:] - hiddens[right+1, bid, self.hidden_size//2:]), dim=-1))
spans = torch.stack(spans, dim=0)
return charts, spans
class Parser(nn.Module):
def __init__(self,
embedding: nn.Embedding,
encoder: nn.LSTM,
attention: MultiHeadedAttention,
scorer: SpanScorer):
super(Parser, self).__init__()
self.embedding = embedding
self.encoder = nn.LSTM
self.attention = attention
self.scorer = SpanScorer
def forward(self, input, lens, trees):
embed = self.embedding(input)
hidden, _ = self.encoder(embed)
hidden, _ = self.attention(hidden, hidden, hidden, mask=make_masks(input, lens))
charts, scores = self.scorers(hidden, lens)
sum(self.loss(score, chart, root) for chart, _scores, root in zip(charts, scores, trees)
loss = sum(self.loss(_spans, tree) for _spans, tree in zip(spans, trees))
return loss
def loss(self, seq_spans, chart, root):
loss = 0
for node in root.traverse():
loss += seq_spans[]
return loss
| [
"ssunqf@163.com"
] | ssunqf@163.com |
b822848a1ac865bd4987aa491f38e5c30cbc4753 | bba055ef8f57536e1829aa10dcb2529302bb2226 | /main.py | ca7dd534eca351dcd33be6b750829d8def9dc40b | [] | no_license | vikas-ukani/fast-news | 391297bc8c779ead0951f6e540ea98a03ac67673 | b27ea14fec353c870fdb833cc945ee4a71f011cf | refs/heads/main | 2023-07-23T03:23:32.918497 | 2021-08-25T20:17:42 | 2021-08-25T20:17:42 | 399,896,155 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,097 | py | # from db.db_config import db_close, db_connection
from fastapi import FastAPI, Depends
from sqlalchemy.orm.session import Session
from api_requests.news_request import NewsCreate
from db.db_config import engine, get_db
from model import models
app = FastAPI()
models.Base.metadata.create_all(bind=engine)
# EVENTS FOR
# Database Connection
# @app.on_event("startup")
# async def database_connect():
# await db_connection()
# # Database Connection DROP
# @app.on_event("shutdown")
# async def database_disconnect():
# await db_close()
@app.get('/')
def index():
return {"message": "Welcome to FastAPI"}
@app.get('/news')
def news():
return {"news": [], 'message': 'All news available here'}
@app.get('/news/{id}')
def getNews(id: int):
return {
'detail': {"id": id},
'message': f"News Details available for id={id} news"
}
@app.post('/news')
def create_news(request: NewsCreate, db: Session = Depends(get_db)):
db_news = models.create_news(db=db, news=request)
return {
"data": db_news,
"message": "News saved..."
}
| [
"vikasukani5@gmail.com"
] | vikasukani5@gmail.com |
6f30be9808f9005bfe03e507dd059d99ee5ba97b | 6604f5fdecd62cd59908b3b359b34991c8ef1148 | /blog/views.py | 4cab991e1e1665bed8c30710861e8061187680a6 | [] | no_license | LEMB1999/django_blog | d4bb9e2ca39492094e867730ed4553da00515eab | 4f665b9e8a36fdd91f7cbbcec75eb1f299fd4a7a | refs/heads/master | 2023-04-10T08:11:50.083546 | 2021-04-23T18:12:28 | 2021-04-23T18:12:28 | 360,970,086 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 292 | py | # blog/views.py
from django.views.generic import ListView,DetailView
from .models import Post
# Create your views here.
class BlogListView(ListView):
model = Post
template_name = 'home.html'
class BlogDetailView(DetailView):
model = Post
template_name = 'post_detail.html' | [
"mendezemmanuel1999@gmail.com"
] | mendezemmanuel1999@gmail.com |
efdfe5a0a9fd4511946056b84590b1ff8569b14c | 4e7669f4234dbbcc6ef8206ac43bba33c53b8d1e | /Predictions/DataProcessing.py | 73ecbdb3270823e4d440ea305f64b9d0f26fce93 | [] | no_license | chouhansolo/edbn | 46cadbcb8d4e079cee746868663379b5b825286b | 63cfcd7e5e5e17242aed3b1968119e85b2796015 | refs/heads/master | 2023-04-05T06:54:26.380161 | 2021-04-21T08:50:30 | 2021-04-21T08:50:30 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,175 | py | """
Author: Stephen Pauwels
"""
import os
import pickle
import pandas as pd
from RelatedMethods.Camargo.support_modules.role_discovery import role_discovery
from Utils.LogFile import LogFile
BPIC15 = "BPIC15"
BPIC15_1 = "BPIC15_1"
BPIC15_2 = "BPIC15_2"
BPIC15_3 = "BPIC15_3"
BPIC15_4 = "BPIC15_4"
BPIC15_5 = "BPIC15_5"
BPIC12 = "BPIC12"
BPIC12W = "BPIC12W"
HELPDESK = "HELPDESK"
BPIC18 = "BPIC18"
LOGFILE_PATH = "../Data/Logfiles"
def preprocess(logfile, add_end, reduce_tasks, resource_pools, resource_attr, remove_resource):
# Discover Roles
if resource_pools and resource_attr is not None:
resources, resource_table = role_discovery(logfile.get_data(), resource_attr, 0.5)
log_df_resources = pd.DataFrame.from_records(resource_table)
log_df_resources = log_df_resources.rename(index=str, columns={"resource": resource_attr})
print(logfile.data)
logfile.data = logfile.data.merge(log_df_resources, on=resource_attr, how='left')
logfile.categoricalAttributes.add("role")
if remove_resource:
logfile.data = logfile.data.drop([resource_attr], axis=1)
resource_attr = "role"
else:
logfile.data = logfile.data.rename(columns={resource_attr: "role"})
logfile.categoricalAttributes.add("role")
print(logfile.data)
if add_end:
cases = logfile.get_cases()
new_data = []
for case_name, case in cases:
record = {}
for col in logfile.data:
if col == logfile.trace:
record[col] = case_name
else:
record[col] = "start"
new_data.append(record)
for i in range(0, len(case)):
new_data.append(case.iloc[i].to_dict())
record = {}
for col in logfile.data:
if col == logfile.trace:
record[col] = case_name
else:
record[col] = "end"
new_data.append(record)
logfile.data = pd.DataFrame.from_records(new_data)
# Check for dublicate events with same resource
if reduce_tasks and resource_attr is not None:
cases = logfile.get_cases()
reduced = []
for case_name, case in cases:
reduced.append(case.iloc[0].to_dict())
current_trace = [case.iloc[0][[logfile.activity, resource_attr]].values]
for i in range(1, len(case)):
if case.iloc[i][logfile.activity] == current_trace[-1][0] and \
case.iloc[i][resource_attr] == current_trace[-1][1]:
pass
else:
current_trace.append(case.iloc[i][[logfile.activity, resource_attr]].values)
reduced.append(case.iloc[i].to_dict())
logfile.data = pd.DataFrame.from_records(reduced)
print("Removed duplicated events")
logfile.convert2int()
return logfile
def get_data(dataset, dataset_size, k, add_end, reduce_tasks, resource_pools, remove_resource):
filename_parts = [dataset, str(dataset_size), str(k)]
for v in [add_end, reduce_tasks, resource_pools, remove_resource]:
if v:
filename_parts.append(str(1))
else:
filename_parts.append(str(0))
print(filename_parts)
cache_file = LOGFILE_PATH + "/" + "_".join(filename_parts)
colTitles = []
if os.path.exists(cache_file):
print("Loading file from cache")
with open(cache_file, "rb") as pickle_file:
preprocessed_log = pickle.load(pickle_file)
else:
resource_attr = None
if dataset == BPIC15_1 or dataset == BPIC15:
logfile = LogFile("../Data/BPIC15_1_sorted_new.csv", ",", 0, dataset_size, "Complete Timestamp", "Case ID", activity_attr="Activity", convert=False, k=k)
resource_attr = "Resource"
colTitles = ["Case ID", "Activity", "Resource"]
logfile.keep_attributes(colTitles)
logfile.filter_case_length(5)
elif dataset == BPIC15_2:
logfile = LogFile("../Data/BPIC15_2_sorted_new.csv", ",", 0, dataset_size, "Complete Timestamp", "Case ID",
activity_attr="Activity", convert=False, k=k)
resource_attr = "Resource"
colTitles = ["Case ID", "Activity", "Resource"]
logfile.keep_attributes(colTitles)
logfile.filter_case_length(5)
elif dataset == BPIC15_3:
logfile = LogFile("../Data/BPIC15_3_sorted_new.csv", ",", 0, dataset_size, "Complete Timestamp", "Case ID", activity_attr="Activity", convert=False, k=k)
resource_attr = "Resource"
colTitles = ["Case ID", "Activity", "Resource"]
logfile.keep_attributes(colTitles)
logfile.filter_case_length(5)
elif dataset == BPIC15_4:
logfile = LogFile("../Data/BPIC15_4_sorted_new.csv", ",", 0, dataset_size, "Complete Timestamp", "Case ID", activity_attr="Activity", convert=False, k=k)
resource_attr = "Resource"
colTitles = ["Case ID", "Activity", "Resource"]
logfile.keep_attributes(colTitles)
logfile.filter_case_length(5)
elif dataset == BPIC15_5:
logfile = LogFile("../Data/BPIC15_5_sorted_new.csv", ",", 0, dataset_size, "Complete Timestamp", "Case ID", activity_attr="Activity", convert=False, k=k)
resource_attr = "Resource"
colTitles = ["Case ID", "Activity", "Resource"]
logfile.keep_attributes(colTitles)
logfile.filter_case_length(5)
elif dataset == BPIC12:
logfile = LogFile("../Data/BPIC12.csv", ",", 0, dataset_size, "completeTime", "case", activity_attr="event", convert=False, k=k)
resource_attr = "org:resource"
colTitles = ["case", "event", "org:resource"]
logfile.keep_attributes(colTitles)
logfile.filter_case_length(5)
elif dataset == BPIC12W:
logfile = LogFile("../Data/BPIC12W.csv", ",", 0, dataset_size, "completeTime", "case", activity_attr="event", convert=False, k=k)
resource_attr = "org:resource"
colTitles = ["case", "event", "org:resource"]
logfile.keep_attributes(colTitles)
logfile.filter_case_length(5)
elif dataset == HELPDESK:
logfile = LogFile("../Data/Helpdesk.csv", ",", 0, dataset_size, "completeTime", "case", activity_attr="event", convert=False, k=k)
resource_attr = "Resource"
colTitles = ["case", "event", "Resource"]
logfile.keep_attributes(colTitles)
logfile.filter_case_length(3)
elif dataset == BPIC18:
logfile = LogFile("../Data/BPIC18.csv", ",", 0, dataset_size, "startTime", "case", activity_attr="event", convert=False, k=k)
colTitles = ["case", "event", "subprocess"]
logfile.keep_attributes(colTitles)
else:
print("Unknown Dataset")
return None
preprocessed_log = preprocess(logfile, add_end, reduce_tasks, resource_pools, resource_attr, remove_resource)
preprocessed_log.create_k_context()
with open(cache_file, "wb") as pickle_file:
pickle.dump(preprocessed_log, pickle_file)
return preprocessed_log, "_".join(filename_parts)
def calc_charact():
import numpy as np
print("Calculating characteristics")
datasets = [BPIC12, BPIC12W, BPIC15_1, BPIC15_2, BPIC15_3, BPIC15_4, BPIC15_5, HELPDESK]
for dataset in datasets:
logfile, name = get_data(dataset, 20000000, 0, False, False, False, True)
cases = logfile.get_cases()
case_lengths = [len(c[1]) for c in cases]
print("Logfile:", name)
print("Num events:", len(logfile.get_data()))
print("Num cases:", len(cases))
print("Num activities:", len(logfile.get_data()[logfile.activity].unique()))
print("Avg activities in case:", np.average(case_lengths))
print("Max activities in case:", max(case_lengths))
print()
if __name__ == "__main__":
calc_charact()
| [
"stephen.pauwels@uantwerpen.be"
] | stephen.pauwels@uantwerpen.be |
86e31e072867f5db0e20e27b4ccb8b3ed9e54b0f | 06e485d37f1b34e6354275ed97aff9805384e716 | /tests/test_serialisation.py | f2857c90a419d2c789633e446e596bc45c4d9050 | [
"MIT"
] | permissive | binaryaffairs/a-la-mode | af697461978d33c1e7a48dc58143f7ea5d6e1fea | d3ff176e473ac8ce606e6907c060aa9c128cd49e | refs/heads/master | 2020-12-31T11:26:41.591048 | 2020-05-19T10:10:50 | 2020-05-19T10:10:50 | 239,017,686 | 73 | 4 | null | null | null | null | UTF-8 | Python | false | false | 606 | py | import pytest
import hashlib
from deepdiff import DeepDiff
from a_la_mode import Dag, sha
from .shared import eg_dag, encoded_dag
def sha_file(file):
h = hashlib.sha256()
with file.open("rb") as f:
for chunk in iter(lambda: f.read(4096), b""):
h.update(chunk)
return h.hexdigest()
def test_roundtrip(tmpdir):
tmpfile = tmpdir.join("eg.dag")
eg_dag.save(tmpfile)
assert DeepDiff(Dag.load(tmpfile), encoded_dag)
def test_sha_of_file(tmpdir):
tmpfile = tmpdir.join("eg.dag")
eg_dag.save(tmpfile)
assert sha(eg_dag.bencode()) == sha_file(tmpfile)
| [
"thattommyhall@gmail.com"
] | thattommyhall@gmail.com |
6b446d65a205357f8259e3c079c4ec1ea290d095 | 364ade8c1f0aca17a4123bee1cac338cfe4a7010 | /mardesign/wsgi.py | de85e1f1031182f76b787bad3c7df628ad0695d6 | [] | no_license | crootz/mardesign | d5509101b40eadaa5a6e2029b542a63c9f9debec | 8c5966d5ffe38f3aa55a12292cfe449b18159cd3 | refs/heads/master | 2023-02-12T14:18:57.704635 | 2021-01-08T11:54:08 | 2021-01-08T11:54:08 | 326,398,057 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 395 | py | """
WSGI config for mardesign project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.1/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'mardesign.settings')
application = get_wsgi_application()
| [
"adam_ac_@hotmail.com"
] | adam_ac_@hotmail.com |
240b97aea52be8a26c3a5cf1be0c510ebeff50e0 | bff37773d1e6c3f4bf8ae4eaa64d7a2d563ecf68 | /backend/users/migrations/0002_auto_20201217_0711.py | 46bfcb7d1b2be14df2fe21ac2b64e683539ccceb | [] | no_license | crowdbotics-apps/mobile-17-dec-dev-16856 | d405478f85248047e00ed97cd4b61fa5ca2a8fd6 | b5c60c39b4e6715b17fa1e7dff6c72527f6ae967 | refs/heads/master | 2023-02-03T00:16:51.489994 | 2020-12-17T14:12:46 | 2020-12-17T14:12:46 | 322,203,166 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 394 | py | # Generated by Django 2.2.17 on 2020-12-17 07:11
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("users", "0001_initial"),
]
operations = [
migrations.AlterField(
model_name="user",
name="name",
field=models.CharField(blank=True, max_length=255, null=True),
),
]
| [
"team@crowdbotics.com"
] | team@crowdbotics.com |
ffe3bf11275679f55860c58974e3e10318d71af7 | 8a20138d6d46ea262d89a1bc076fc66da250b1c9 | /carrots/carrots.py | 01a4b5914d226c49a4b71dd7335ab2e8094358fc | [
"MIT"
] | permissive | chrisgzf/kattis | cb0dec15b3610cdf7ab0d6f94087e76eef9261b4 | 7b66474c040a31cfc997863141f57a7c81f6ebab | refs/heads/master | 2020-05-03T03:20:41.198620 | 2019-04-01T12:41:09 | 2019-04-01T12:41:09 | 178,395,276 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 32 | py | a, b = input().split()
print(b)
| [
"chrisgzf@gmail.com"
] | chrisgzf@gmail.com |
1c975da9b297dfac0f22eb4c928d6c4b3970af84 | 44a1eb956a2d1ee328cce83becda7202ac983a9f | /api/migrations/0006_auto__del_field_message_author__add_field_message_author_id__add_field.py | ee11de83b958ac67d09429c3fad2c857f822c5a0 | [] | no_license | pandotree/Ring | d6a5f938d9acf20fc6ebc0ae705cf81a567b8263 | 9f35d137a3a019c0aef823eeeb85d1bc9f34f7c8 | refs/heads/master | 2021-08-18T01:07:31.572007 | 2012-09-19T02:37:39 | 2012-09-19T02:37:39 | 5,176,096 | 0 | 0 | null | 2021-06-10T17:37:59 | 2012-07-25T07:35:53 | JavaScript | UTF-8 | Python | false | false | 7,943 | py | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Deleting field 'Message.author'
db.delete_column('api_message', 'author')
# Adding field 'Message.author_id'
db.add_column('api_message', 'author_id',
self.gf('django.db.models.fields.IntegerField')(default=-1),
keep_default=False)
# Adding field 'Message.author_name'
db.add_column('api_message', 'author_name',
self.gf('django.db.models.fields.CharField')(default='', max_length=64),
keep_default=False)
# Deleting field 'PinnedItem.author'
db.delete_column('api_pinneditem', 'author')
# Adding field 'PinnedItem.author_id'
db.add_column('api_pinneditem', 'author_id',
self.gf('django.db.models.fields.IntegerField')(default=-1),
keep_default=False)
# Adding field 'PinnedItem.author_name'
db.add_column('api_pinneditem', 'author_name',
self.gf('django.db.models.fields.CharField')(default='', max_length=64),
keep_default=False)
def backwards(self, orm):
# Adding field 'Message.author'
db.add_column('api_message', 'author',
self.gf('django.db.models.fields.IntegerField')(default=-1),
keep_default=False)
# Deleting field 'Message.author_id'
db.delete_column('api_message', 'author_id')
# Deleting field 'Message.author_name'
db.delete_column('api_message', 'author_name')
# Adding field 'PinnedItem.author'
db.add_column('api_pinneditem', 'author',
self.gf('django.db.models.fields.IntegerField')(default=-1),
keep_default=False)
# Deleting field 'PinnedItem.author_id'
db.delete_column('api_pinneditem', 'author_id')
# Deleting field 'PinnedItem.author_name'
db.delete_column('api_pinneditem', 'author_name')
models = {
'api.groups': {
'Meta': {'object_name': 'Groups'},
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'group_id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'group_name': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'users': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'user_set'", 'symmetrical': 'False', 'to': "orm['api.Users']"})
},
'api.message': {
'Meta': {'object_name': 'Message'},
'author_id': ('django.db.models.fields.IntegerField', [], {}),
'author_name': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'content': ('django.db.models.fields.TextField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'sent': ('django.db.models.fields.DateTimeField', [], {}),
'thread': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['api.MessageThread']"})
},
'api.messagethread': {
'Meta': {'object_name': 'MessageThread'},
'group': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['api.Groups']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'subject': ('django.db.models.fields.TextField', [], {})
},
'api.pinneditem': {
'Meta': {'object_name': 'PinnedItem'},
'author_id': ('django.db.models.fields.IntegerField', [], {}),
'author_name': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'caption': ('django.db.models.fields.TextField', [], {}),
'group': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['api.Groups']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'url': ('django.db.models.fields.CharField', [], {'max_length': '300'})
},
'api.users': {
'Meta': {'object_name': 'Users'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'phone_number': ('django.db.models.fields.IntegerField', [], {}),
'preferred_contact_method': ('django.db.models.fields.IntegerField', [], {}),
'university': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['auth.User']", 'unique': 'True'})
},
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
}
}
complete_apps = ['api'] | [
"gracewang92@gmail.com"
] | gracewang92@gmail.com |
3e70084529dd783e8832f46cb66eacd6cdde9156 | a0f6ff6f0c7ba13398f1d7a59fbdbaf42c3e6ece | /爬取各大娱乐网/1.py | c5f9246af140ecc87e51d86c8833b20c0a2dd87b | [] | no_license | yuzsmc/python | 236ba66d2a2287280fe0a96c3c083d5f4dd0438f | 36d1d9e3129e3b5c0d04b2a25b5178625fb549c6 | refs/heads/master | 2020-06-07T17:21:19.882746 | 2019-06-14T09:10:43 | 2019-06-14T09:10:43 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,375 | py | from bs4 import BeautifulSoup
import urllib.request
import pymysql
import datetime
import time
import threading
now_time = datetime.datetime.now().strftime('%m-%d')
sqlhost = input("输入数据库地址:")
sqluser = input("输入数据库账号:")
sqlpass = input("输入数据库密码:")
sqldb = input("输入数据库名:")
settime = input("输入多久执行一次(秒):")
# 连接database
#conn = pymysql.connect(host="127.0.0.1", user="root",password="root",database="daohang",charset="utf8")
conn = pymysql.connect(host=sqlhost, user=sqluser,password=sqlpass,database=sqldb,charset="utf8")
# 得到一个可以执行SQL语句的光标对象
cursor = conn.cursor()
# 定义要执行的SQL语句
def insertdh(title,time,taburl,user,index):
time=pymysql.escape_string(time);
index=pymysql.escape_string(index);
title=pymysql.escape_string(title);
taburl=pymysql.escape_string(taburl);
user=pymysql.escape_string(user);
selsql='select * from yq_dh_content where title="'+title+'" and user="'+user+'"'
cursor.execute(selsql)
info = cursor.fetchall()
if len(info)<=0:
sql = 'insert into yq_dh_content set `title`="'+title+'" ,`time`="'+time+'",`class`="'+index+'",`from`="'+taburl+'",`user`="'+user+'"'
# 执行SQL语句
if cursor.execute(sql):
print("新增一条");
def xd0(): #小刀娱乐网
url1='https://www.xd0.com'
resp=urllib.request.urlopen(url1)
html=resp.read()
soup = BeautifulSoup(html,'html.parser',from_encoding='utf-8');
links = soup.find_all('li',class_="column half");
for i in links:
if i.find("span").get_text()==now_time:
taburl=url1+i.find("a").attrs["href"] #地址
title=i.find("a").get_text() #标题
insertdh(title,now_time,taburl,"admin",'1')
def iqshw(): #爱Q生活网
url1='https://www.iqshw.com'
resp=urllib.request.urlopen(url1)
html=resp.read()
soup = BeautifulSoup(html,'html.parser',from_encoding='GBK');
links = soup.find_all('div',class_="news-comm-wrap")[0].find_all("ul",class_="f_l")[0].find_all("li");
for i in links:
if i.find("span"):
if i.find("span").get_text()==now_time:
taburl=url1+i.find("a").attrs["href"] #地址
title=i.find("a").get_text() #标题
insertdh(title,now_time,taburl,"admin",'2')
def z115(): #115z
url1='https://www.115z.com'
resp=urllib.request.urlopen(url1)
html=resp.read()
soup = BeautifulSoup(html,'html.parser',from_encoding='GBK');
links = soup.find_all('div',class_="r-content")[0].find("ul").find_all("li");
for i in links:
if i.find("a").find("i"):
print("绕过一条广告");
else:
if i.find("font").get_text()==now_time:
taburl=url1+i.find("a").attrs["href"] #地址
title=i.find("a").get_text() #标题
insertdh(title,now_time,taburl,"admin",'3')
def xkw(): #小K网
url1='https://www.kjsv.com'
resp=urllib.request.urlopen(url1)
html=resp.read()
soup = BeautifulSoup(html,'html.parser',from_encoding='GBK');
links = soup.find_all('li',id="li-box");
for i in links:
if i.find("div"):
print("绕过一条广告")
elif i.find("a"):
if i.find("span").get_text()==now_time:
taburl=url1+i.find("a").attrs["href"] #地址
title=i.find("a").get_text() #标题
insertdh(title,now_time,taburl,"admin",'4')
def qqyewu(): #qq业务乐园
url1='http://www.qqyewu.com'
resp=urllib.request.urlopen(url1)
html=resp.read()
soup = BeautifulSoup(html,'html.parser',from_encoding='GBK');
links = soup.find('div',class_="link_con").find("ul").find_all("li");
links2 = soup.find('div',class_="recommend bor").find("ul").find_all("li");
for i in links:
if i.find('span').find('em').get_text()==now_time:
taburl=url1+i.find("a",class_="titname").attrs["href"] #地址
title=i.find("a",class_="titname").get_text() #标题
insertdh(title,now_time,taburl,"admin",'5')
for i in links2:
if i.find('span').find('em').get_text()==now_time:
taburl=url1+i.find("a").attrs["href"] #地址
title=i.find("a").attrs['title'] #标题
insertdh(title,now_time,taburl,"admin",'5')
def t1():
xd0()
iqshw()
z115()
xkw()
qqyewu()
def sleeptime(hour,min,sec):
return hour*3600 + min*60 + sec
second = sleeptime(0,0,int(settime))
while 1==1:
time.sleep(second)
t1()
# 关闭光标对象
cursor.close()
# 关闭数据库连接
conn.close()
| [
"noreply@github.com"
] | yuzsmc.noreply@github.com |
243c8b1408674edcd8f9d8117420f863ac30e23d | 47545577669a841633f40765f32ac6cbac84e884 | /tp_player.py | 9e8599d5456317eb6b61bb320aa088cdbd244bde | [] | no_license | jrod006/sp-trivial-purfruit | 1b2e31d7f621437d7e487a8f893caee23bd29ebc | 013051e67f7547e8e00023de79cc46879fe89973 | refs/heads/master | 2022-12-04T06:30:04.525831 | 2020-08-19T21:06:15 | 2020-08-19T21:06:15 | 279,123,891 | 2 | 2 | null | null | null | null | UTF-8 | Python | false | false | 571 | py |
class Player:
#Basic Player Class
def __init__(self, ID):
self.chips = []
self.id = ID
self.location = 33
self.active = True
def addChip(self, category):
if (category not in self.chips):
self.chips.append(category)
def updateLocation(location):
self.location=location
def __str__(self):
outstr = ''
outstr += 'Player ' + str(self.id) + ':\n'
outstr += 'Location = ' + str(self.location) + '\n'
outstr += 'Chips: ' + str(self.chips)
return outstr
| [
"mtmanger@gmail.com"
] | mtmanger@gmail.com |
78e88ae9748d81fd701191515d86a7dd92904b4d | a2487bdbd12b6803f86f6b3a04f63bfa9d84622a | /Assignment05/ex43_mod.py | 7d7f5f2b2e55f5c537c1d6f3e0ef37aba8e2d5fc | [] | no_license | radetar/hort503 | 71620b2558478ecb8353d23d28f5eb899bda5b86 | b38b9bf88d94208321b6e4cca4178bf99094754d | refs/heads/master | 2021-05-13T11:56:57.548248 | 2018-05-04T16:05:33 | 2018-05-04T16:05:33 | 117,145,228 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,433 | py | from sys import exit
from random import randint
from textwrap import dedent
class Scene(object):
def enter(self):
print("This scene is not yet configured")
print("Subclass it and implement enter().")
exit(1)
class Engine(object):
def __init__(self, scene_map):
self.scene_map = scene_map
def play(self):
current_scene = self.scene_map.opening_scene()
last_scene= self.scene_map.next_scene('finished')
while current_scene != last_scene:
next_scene_name = current_scene.enter()
current_scene = self.scene_map.next_scene(next_scene_name)
#be sure to print out last scene
current_scene.enter
class Death(Scene):
quips = ["You died, you suck at this", "Another insult you died", "you died", "you died", "some insult"]
def enter(self):
print(Death.quips[randint(0,len(self.quips)-1)])
exit(1)
class CentralCorridor(Scene):
def enter(self):
print(dedent("""Blah blah blah blah blah blah blah you are on an alien ship kill em' all."""))
action = input("> ")
if action == "shoot":
print("bad choice")
return 'death'
elif action == "joke":
print("good choice")
return 'laser_weapon_armory'
else:
print("DOES NOT COMPUTE!")
return 'central_corridor'
class LaserWeaponArmory(Scene):
def enter(self):
print(dedent(""" You get into the armory and need to put in a 3 digit code"""))
code = "123"
guess = input("[keypad]> ")
guesses = 0
while guess != code and guesses < 10:
print("WRONG. Try again")
guesses += 1
guess = input("[keypad]> ")
if guess == code:
print("you got it.")
return 'the_bridge'
else:
print("you suck")
return 'death'
class TheBridge(Scene):
def enter(self):
print("You are on the bridge. There is an alien. What do you do?")
action = input("> ")
if action == "throw the bomb":
print(dedent("you die"))
return 'death'
elif action == "set bomb":
print("good choice")
return 'escape_pod'
else:
print("DOES NOT COMPUTE")
return 'the_bridge'
class EscapePod(Scene):
def enter(self):
print("Ok you get the pods. Which pod you take?")
good_pod = 1
guess = input("[pod #]> ")
if int(guess) != good_pod:
print("Bad choice")
return 'death'
else:
print("You live")
return 'finished'
class Finished(Scene):
def enter(self):
print("You won!")
return 'finished'
class Map(object):
scenes = {'central_corridor':CentralCorridor(), 'laser_weapon_armory':LaserWeaponArmory(), 'the_bridge':TheBridge(), 'escape_pod':EscapePod(), 'death': Death(), 'finished':Finished()}
def __init__(self, start_scene):
self.start_scene = start_scene
def next_scene(self, scene_name):
val = Map.scenes.get(scene_name)
return val
def opening_scene(self):
return self.next_scene(self.start_scene)
a_map = Map('central_corridor')
a_game = Engine(a_map)
a_game.play()
| [
"rachael.detar@wsu.edu"
] | rachael.detar@wsu.edu |
2c705f84317dd7fb1c449692c21c19157c862a5f | e87d793b3a5facc6e54e0263fbd67703e1fbb382 | /duckietown-world-venv/bin/jupyter-trust | b408b0d21630fc79d1e9443daa6b2a05bc45d37c | [] | no_license | llingg/behaviour-benchmarking | a860bbe709309e13f3e1133d916944882199a40f | 85bbf1a9c2c628ba74480fe7abac3804d6afdac4 | refs/heads/v1 | 2022-10-06T08:21:29.068329 | 2020-06-11T07:02:46 | 2020-06-11T07:02:46 | 259,622,704 | 0 | 0 | null | 2020-06-02T17:52:46 | 2020-04-28T11:52:08 | C++ | UTF-8 | Python | false | false | 302 | #!/home/linuslingg/duckietown-world/duckietown-world-venv/bin/python
# -*- coding: utf-8 -*-
import re
import sys
from nbformat.sign import TrustNotebookApp
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(TrustNotebookApp.launch_instance())
| [
"linggl@student.ethz.ch"
] | linggl@student.ethz.ch | |
a771616d7922f435f00c65682134094a8523c0a6 | eeba24b69d4cbe1b60844e12649737f58ccb3d7b | /background_subtraction/background_subtraction.py | 5314bc103ad43f2b1b37aa35ebf238cd2b0a732a | [
"MIT"
] | permissive | ahzam1/CMPUT414Project | 5f31a3be2fe9db4e34902afe40624f5b3266cefe | c86fed0c5c8f2d4211ef3495fbf5fe37b759df34 | refs/heads/master | 2021-04-15T01:38:30.406946 | 2020-04-29T03:04:06 | 2020-04-29T03:04:06 | 249,283,772 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,289 | py | from __future__ import print_function
import cv2 as cv
import argparse
parser = argparse.ArgumentParser(description='This program shows how to use background subtraction methods provided by \
OpenCV. You can process both videos and images.')
parser.add_argument('--input', type=str, help='Path to a video or a sequence of image.', default='vtest.avi')
parser.add_argument('--algo', type=str, help='Background subtraction method (KNN, MOG2).', default='MOG2')
args = parser.parse_args()
if args.algo == 'MOG2':
backSub = cv.createBackgroundSubtractorMOG2()
else:
backSub = cv.createBackgroundSubtractorKNN()
capture = cv.VideoCapture(cv.samples.findFileOrKeep(args.input))
if not capture.isOpened:
print('Unable to open: ' + args.input)
exit(0)
while True:
ret, frame = capture.read()
if frame is None:
break
fgMask = backSub.apply(frame)
cv.rectangle(frame, (10, 2), (100,20), (255,255,255), -1)
cv.putText(frame, str(capture.get(cv.CAP_PROP_POS_FRAMES)), (15, 15),
cv.FONT_HERSHEY_SIMPLEX, 0.5 , (0,0,0))
cv.imshow('Frame', frame)
cv.imshow('FG Mask', fgMask)
keyboard = cv.waitKey(30)
if keyboard == 'q' or keyboard == 27:
break | [
"ZIYUANLIU98@users.noreply.github.com"
] | ZIYUANLIU98@users.noreply.github.com |
cbc77e1e365239cfd27fe846d403ecea5659a664 | 612be36eb2f8d64aad7327a7fc8cd133ec2eb6cc | /day01/7.py | 9b41dfa260fe8f8ed9661362679e781028a6b3cc | [] | no_license | tndud042713/python_study | 568f2c567a8a120aecc6722d9f75ce7df9387c55 | a493bffa10a6b9ed44f892c9b305910ee64008a8 | refs/heads/master | 2022-12-11T08:08:17.475770 | 2020-09-11T16:22:43 | 2020-09-11T16:22:43 | 290,159,800 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 143 | py | print("안녕하세요.""반갑습니다")
print("안녕하세요",40)
print("안녕하세요","반갑습니다.")
print("나이:",44)
| [
"noreply@github.com"
] | tndud042713.noreply@github.com |
a1b70d1dfd1d7b10d6a760f539ab5101bc40a785 | d8b9c898c2d66ec3ac98222617104332e2c8ce4c | /visit.py | e0e14b7a8993530dbe3d7b20fe0c682b65d4ed8a | [] | no_license | dwitek/Compilers | 8d5a1169be8613963edbfbd2fa1730e19c952f7b | 4a3f15c1dfd7a6efbeacae4c0cad865a62d5fa98 | refs/heads/master | 2020-04-28T03:41:51.483857 | 2019-06-02T16:22:51 | 2019-06-02T16:22:51 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,827 | py | #!/usr/bin/env python2
import inspect
__all__ = ['on', 'when']
def on(param_name):
def f(fn):
dispatcher = Dispatcher(param_name, fn)
return dispatcher
return f
def when(param_type):
# f - actual decorator
# fn - decorated method, i.e. visit
# ff - fn gets replaced by ff in the effect of applying @when decorator
# dispatcher is an function object
def f(fn):
frame = inspect.currentframe().f_back
dispatcher = frame.f_locals[fn.__name__]
if not isinstance(dispatcher, Dispatcher):
dispatcher = dispatcher.dispatcher
dispatcher.add_target(param_type, fn)
def ff(*args, **kw):
return dispatcher(*args, **kw)
ff.dispatcher = dispatcher
return ff
return f
class Dispatcher(object):
def __init__(self, param_name, fn):
self.param_index = inspect.getargspec(fn).args.index(param_name)
self.param_name = param_name
self.targets = {}
def __call__(self, *args, **kw):
"""
If there is a visit function defined explicitely
for the class of `typ`, result of the `visit` function is returned.
If the visit function is defined for superclasse(s)
of `typ`, a list of `visit` results for all `typ`
superclasses is returned.
"""
typ = args[self.param_index].__class__
d = self.targets.get(typ)
if d is not None:
return d(*args, **kw)
else:
class_to_visitorfun = self.targets
classes = class_to_visitorfun.iterkeys()
results = [class_to_visitorfun[c](*args, **kw) for c in classes if issubclass(typ, c)]
if results == []:
print("No visitor found for class {}".format(typ))
return results
def add_target(self, typ, target):
self.targets[typ] = target | [
"dwitekw@gmail.com"
] | dwitekw@gmail.com |
f144f7b73ebe30a7e76a64da83d93ef60ed349b9 | 79cd7b2cb1d80053ab87adb118d3c9ff934261e8 | /blog/migrations/0002_post_text.py | 1d13898f07b306ad66060b1a28836e9cd4bfe92b | [] | no_license | jhussyelleReis/my-first-blog | 0359024e290595f4b7ff0284811b1538476d1a82 | 059132f98831fb37a9ce8c276078ce4027a69e64 | refs/heads/master | 2021-01-18T13:16:33.448383 | 2017-08-18T12:36:12 | 2017-08-18T12:36:12 | 100,373,226 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 428 | py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('blog', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='post',
name='text',
field=models.TextField(default='Texto Default'),
preserve_default=False,
),
]
| [
"jhussyelle.reis@gmail.com"
] | jhussyelle.reis@gmail.com |
693351d13cbca26f5dc2f674b07e879c28cc09eb | b3a55844de9ff46972448b56ccadc1e3088adae1 | /poptimizer/data/views/go.py | 8e124ea774a85d74b096f602bcad3c5d32f544ed | [
"Unlicense"
] | permissive | tjlee/poptimizer | 480a155e2f4ffd5d6eda27323c5baa682d7f9f00 | 3a67544fd4c1bce39d67523799b76c9adfd03969 | refs/heads/master | 2023-08-15T10:16:11.161702 | 2021-10-15T15:35:38 | 2021-10-15T15:35:38 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 583 | py | """Предварительная версия интеграции с Go."""
import aiohttp
from bson import json_util
from poptimizer.shared import connections
async def rest_reader(session: aiohttp.ClientSession = connections.HTTP_SESSION):
async with session.get("http://localhost:3000/trading_dates/trading_dates") as respond:
respond.raise_for_status()
json = await respond.text()
return json_util.loads(json)
if __name__ == "__main__":
import asyncio
loop = asyncio.get_event_loop()
print(loop.run_until_complete(rest_reader()))
| [
"wlmike@gmail.com"
] | wlmike@gmail.com |
9fae61cc9b66874728fc4205ee72c995ee529830 | faefef027f1878833589db2e1eab0859bb9c64ee | /back_lib.py | 394f5cce2bf7a5e3c62cd275bc44de19b781c0a2 | [
"MIT"
] | permissive | LeeBergstrand/BackBLAST2 | 174f0d424bcb071a4b0b6bb4c8218cd5f72f5d35 | 9f6e63d9a13386b4f6ddd1833354437dbed301f0 | refs/heads/master | 2021-01-10T16:50:16.267720 | 2015-06-04T17:05:43 | 2015-06-04T17:05:43 | 36,826,981 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,324 | py | import csv
import subprocess
import sys
from Bio import SeqIO
# Functions:
# =================================================================================================
def run_blastp(query_file, database_file, e_value_cutoff, processes):
"""
Runs BLASTp...
:param query_file: The amino acid query FASTA file.
:param database_file: The amino acid BLAST database location (amino acid FASTA file at this location)
:param e_value_cutoff: The e-value cutoff for BLASTp.
:param processes: Number of processes for BLASTp to use.
:return: A csv formatted BLASTp output (query_sequence_id, subject_sequence_id, percent_identity, e-value, query coverage, bitscore)
"""
BLASTOut = subprocess.check_output(
["blastp", "-db", database_file, "-query", query_file, "-evalue", str(e_value_cutoff), "-num_threads",
str(processes), "-outfmt", "10 qseqid sseqid pident evalue qcovhsp bitscore"])
return BLASTOut
# -------------------------------------------------------------------------------------------------
# 3: Filters HSPs by Percent Identity...
def filterBLASTCSV(BLASTOut):
minIdent = 25
BLASTCSVOut = BLASTOut.splitlines(True) # Converts raw BLAST csv output into list of csv rows.
BLASTreader = csv.reader(BLASTCSVOut) # Reads BLAST csv rows as a csv.
BLASTCSVOutFiltred = [] # Note should simply delete unwanted HSPs from current list rather than making new list.
# Rather than making a new one.
for HSP in BLASTreader:
if HSP[2] >= minIdent: # Filters by minimum identity.
# Converts each HSP parameter that should be a number to a number.
HSP[2] = float(HSP[2])
HSP[3] = float(HSP[3])
HSP[4] = float(HSP[4])
HSP[5] = float(HSP[5])
BLASTCSVOutFiltred.append(HSP) # Appends to output array.
return BLASTCSVOutFiltred
# -------------------------------------------------------------------------------------------------
# 5: Creates a python dictionary (hash table) that contains the the FASTA for each protein in the proteome.
def createProteomeHash(ProteomeFile):
ProteomeHash = dict()
try:
handle = open(ProteomeFile, "rU")
proteome = SeqIO.parse(handle, "fasta")
for record in proteome:
ProteomeHash.update({record.id: record.format("fasta")})
handle.close()
except IOError:
print("Failed to open " + ProteomeFile)
sys.exit(1)
return ProteomeHash
| [
"leemacboy@gmail.com"
] | leemacboy@gmail.com |
f611a878a16540a8544d96b179da3dbe91d2edf7 | 8afb5afd38548c631f6f9536846039ef6cb297b9 | /MY_REPOS/INTERVIEW-PREP-COMPLETE/notes-n-resources/Data-Structures-N-Algo/_DS-n-Algos/_Another-One/Project Euler/Problem 04/sol1.py | ba8a39290c9cd8d45a5050c08b2e276e81e6c6f9 | [
"MIT"
] | permissive | bgoonz/UsefulResourceRepo2.0 | d87588ffd668bb498f7787b896cc7b20d83ce0ad | 2cb4b45dd14a230aa0e800042e893f8dfb23beda | refs/heads/master | 2023-03-17T01:22:05.254751 | 2022-08-11T03:18:22 | 2022-08-11T03:18:22 | 382,628,698 | 10 | 12 | MIT | 2022-10-10T14:13:54 | 2021-07-03T13:58:52 | null | UTF-8 | Python | false | false | 872 | py | """
Problem:
A palindromic number reads the same both ways. The largest palindrome made from the product of two 2-digit numbers is 9009 = 91 x 99.
Find the largest palindrome made from the product of two 3-digit numbers which is less than N.
"""
from __future__ import print_function
limit = int(input("limit? "))
# fetchs the next number
for number in range(limit - 1, 10000, -1):
# converts number into string.
strNumber = str(number)
# checks whether 'strNumber' is a palindrome.
if strNumber == strNumber[::-1]:
divisor = 999
# if 'number' is a product of two 3-digit numbers
# then number is the answer otherwise fetch next number.
while divisor != 99:
if (number % divisor == 0) and (len(str(number / divisor)) == 3):
print(number)
exit(0)
divisor -= 1
| [
"bryan.guner@gmail.com"
] | bryan.guner@gmail.com |
d60d4722685e75032e6f9efa8201bd9e533268ce | 2837bb900c2abb8d7ba34d92a771c430aeef90c8 | /player s2 13.py | 64fedc06cf024a5c2c0728362ffcabee302dc1dd | [] | no_license | Aravindandeva/Python-files | 9e81919db80f18e28ac76f37bcb63ef3d7477db0 | 4c259478efd8d7d014d56542400d3444951ea97b | refs/heads/master | 2020-06-14T12:11:41.237085 | 2019-07-31T11:16:14 | 2019-07-31T11:16:14 | 195,001,196 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 75 | py | u=int(input())
l=0
ru=u
while u>0:
ru=u%10
l+=ru**2
u=u//10
print(l)
| [
"noreply@github.com"
] | Aravindandeva.noreply@github.com |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.