blob_id stringlengths 40 40 | directory_id stringlengths 40 40 | path stringlengths 3 281 | content_id stringlengths 40 40 | detected_licenses listlengths 0 57 | license_type stringclasses 2
values | repo_name stringlengths 6 116 | snapshot_id stringlengths 40 40 | revision_id stringlengths 40 40 | branch_name stringclasses 313
values | visit_date timestamp[us] | revision_date timestamp[us] | committer_date timestamp[us] | github_id int64 18.2k 668M ⌀ | star_events_count int64 0 102k | fork_events_count int64 0 38.2k | gha_license_id stringclasses 17
values | gha_event_created_at timestamp[us] | gha_created_at timestamp[us] | gha_language stringclasses 107
values | src_encoding stringclasses 20
values | language stringclasses 1
value | is_vendor bool 2
classes | is_generated bool 2
classes | length_bytes int64 4 6.02M | extension stringclasses 78
values | content stringlengths 2 6.02M | authors listlengths 1 1 | author stringlengths 0 175 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
960c42bb96022428399d3f6c90afa9aafe47ae6a | 09e8c92187ff8d7a726727041e2dd80850dcce3d | /leetcode/965_univalued_binary_tree.py | e0da41251305a9165e61a661dbd73b04870d0e67 | [] | no_license | kakru/puzzles | 6dd72bd0585f526e75d026f3ba2446b0c14f60e0 | b91bdf0e68605f7e517446f8a00b1e0f1897c24d | refs/heads/master | 2020-04-09T09:47:31.341475 | 2019-05-03T21:24:41 | 2019-05-03T21:24:41 | 160,246,660 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 970 | py | #!/usr/bin/env python3
# Definition for a binary tree node.
class TreeNode(object):
def __init__(self, x, left=None, right=None):
self.val = x
self.left = left
self.right = right
class Solution(object):
def isUnivalTree(self, root):
"""
:type root: TreeNode
:rtype: bool
"""
val = root.val
stack = [root]
while stack:
p = stack.pop()
if p.val != val: return False
if p.left: stack.append(p.left)
if p.right: stack.append(p.right)
return True
t = TreeNode(1,
TreeNode(1,
TreeNode(1),
TreeNode(1)
),
TreeNode(1,
None,
TreeNode(1)
)
)
print(Solution().isUnivalTree(t), True)
t = TreeNode(2,
TreeNode(2,
TreeNode(5),
TreeNode(2)
),
TreeNode(2)
)
print(Solution().isUnivalTree(t), False)
| [
"karol@kruzelecki.com"
] | karol@kruzelecki.com |
609021985877b887a2b29d318d6e4ee09ffbc4df | 95b9fc9e1a109ee8612583221269aa5eb4f7de4a | /mysitea/settings.py | b82bd4078e0e56e73872ed0b18115c84b22b545c | [] | no_license | Choi0427/mysitea | d3af19cf8cca3e28b13e8c6f8ec9ec1331ad9636 | bda03507ba3f0a5f22ea061811ac13b0afe6bfa4 | refs/heads/master | 2023-01-28T12:57:21.857301 | 2020-12-07T13:04:28 | 2020-12-07T13:04:28 | 319,324,689 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,065 | py | """
Django settings for mysitea project.
Generated by 'django-admin startproject' using Django 3.1.4.
For more information on this file, see
https://docs.djangoproject.com/en/3.1/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.1/ref/settings/
"""
from pathlib import Path
# Build paths inside the project like this: BASE_DIR / 'subdir'.
BASE_DIR = Path(__file__).resolve().parent.parent
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.1/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '0drzt5l8uuu#m-%vj(i#2^-koq!rs@xk8+%fqi+-knrb4m-hm^'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'mysitea.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'mysitea.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.1/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': BASE_DIR / 'db.sqlite3',
}
}
# Password validation
# https://docs.djangoproject.com/en/3.1/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.1/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.1/howto/static-files/
STATIC_URL = '/static/'
| [
"you@example.com"
] | you@example.com |
a65ad9748193a80ca6ea3a3b9948f43ba7938fbe | 60a831fb3c92a9d2a2b52ff7f5a0f665d4692a24 | /IronPythonStubs/release/stubs.min/System/ComponentModel/__init___parts/DataObjectAttribute.py | fa31cc9ac8a0bb5e911aa72b7329df96aa63c06d | [
"MIT"
] | permissive | shnlmn/Rhino-Grasshopper-Scripts | a9411098c5d1bbc55feb782def565d535b27b709 | 0e43c3c1d09fb12cdbd86a3c4e2ba49982e0f823 | refs/heads/master | 2020-04-10T18:59:43.518140 | 2020-04-08T02:49:07 | 2020-04-08T02:49:07 | 161,219,695 | 11 | 2 | null | null | null | null | UTF-8 | Python | false | false | 2,065 | py | class DataObjectAttribute(Attribute,_Attribute):
"""
Identifies a type as an object suitable for binding to an System.Web.UI.WebControls.ObjectDataSource object. This class cannot be inherited.
DataObjectAttribute()
DataObjectAttribute(isDataObject: bool)
"""
def Equals(self,obj):
"""
Equals(self: DataObjectAttribute,obj: object) -> bool
Determines whether this instance of System.ComponentModel.DataObjectAttribute fits the pattern
of another object.
obj: An object to compare with this instance of System.ComponentModel.DataObjectAttribute.
Returns: true if this instance is the same as the instance specified by the obj parameter; otherwise,
false.
"""
pass
def GetHashCode(self):
"""
GetHashCode(self: DataObjectAttribute) -> int
Returns the hash code for this instance.
Returns: A 32-bit signed integer hash code.
"""
pass
def IsDefaultAttribute(self):
"""
IsDefaultAttribute(self: DataObjectAttribute) -> bool
Gets a value indicating whether the current value of the attribute is the default value for the
attribute.
Returns: true if the current value of the attribute is the default; otherwise,false.
"""
pass
def __eq__(self,*args):
""" x.__eq__(y) <==> x==y """
pass
def __init__(self,*args):
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
@staticmethod
def __new__(self,isDataObject=None):
"""
__new__(cls: type)
__new__(cls: type,isDataObject: bool)
"""
pass
def __ne__(self,*args):
pass
IsDataObject=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets a value indicating whether an object should be considered suitable for binding to an System.Web.UI.WebControls.ObjectDataSource object at design time.
Get: IsDataObject(self: DataObjectAttribute) -> bool
"""
DataObject=None
Default=None
NonDataObject=None
| [
"magnetscoil@gmail.com"
] | magnetscoil@gmail.com |
237d4c4a57a192116b0989a1faf1b1895c0795f5 | 6242f818537cf4c6f4f3580f1ca79c0683a0dc36 | /Brownian_Movie_Analysis_nodata/EPR_inference/Dissipative_Component_Analysis.py | 8a4e368a4c3e9a4178dccf1b5afcf872aa4944b6 | [
"MIT"
] | permissive | montarou/NonequilibriumBrownianMovies | bbe3b3d765e2e762ec2237110045b7088d2c647e | 748a2d4f90933423fdf119bb1d9b8e783ea7b524 | refs/heads/master | 2023-06-30T23:31:17.671243 | 2020-08-08T17:21:58 | 2020-08-08T17:21:58 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,451 | py | import numpy as np
import matplotlib.pyplot as plt
import scipy.linalg as scilg
from numpy import linalg as nlg
def DCA(X,output_directory):
"""
Performs Dissipative Component Analysis (DCA) starting from Projected data onto PCs. For details please refer to the Supplementary Material (SM) (Sec. IV ).
Returns data projected onto DCs.
"""
print("Perfoming DCA..")
dt=0.005 #Time step of simulation
max_modes=200 #maximum number of retained modes
X = X - np.mean(X,axis=0) #center data around average
print("Splitting dataset into train/test sets")
X_train=X[:X.shape[0]//10,:max_modes] #Split trajectories in train/test sets
X_test=X[X.shape[0]//10:,:max_modes]
print('Computing Adot..')
dX=X_train[1:]-X_train[:-1] #Compute one time-step difference
X_times_dX=np.tensordot(X_train[:-1],dX,axes=([0],[0]))
Adot=(X_times_dX-X_times_dX.T)/(2.*X_train.shape[0]*dt) #Compute area enclosing rate
print('Computing change-of-basis matrix B..')
Covariance=np.dot(X_train.T,X_train)/(X_train.shape[0]-1) #Compute Covariance: note that we are already in PC-coordinates, so Covariance is diagonal.
Covariance_evals,Covariance_evecs=np.linalg.eigh(Covariance) #Diagonalize
B=(Covariance_evecs[:,::-1])/np.sqrt(Covariance_evals[::-1]) #Compute change-of-basis matrix to covariance-identity-coordinates (cic). In the manuscript it is indicated as C_pca
B_inv=Covariance_evecs[:,::-1]*np.sqrt(Covariance_evals[::-1]) #Inverse of B
print('Computing Diffusion matrix D..')
D=np.dot(dX.T,dX)/(2*dX.shape[0]*dt) #Compute diffusion matrix via mean-square-displacement
D_inv=nlg.inv(D) #Inverse of D
print('Transforming into cic..')
Adot_cic=(B.T).dot(Adot).dot(B) #Area enclosing rate in cic
D_inv_cic=B_inv.dot(D_inv.dot(B_inv.T)) #Diffusion matrix in cic
Adot_cic_vals,Adot_cic_vecs=nlg.eigh(Adot_cic.dot(Adot_cic.T)) #Diagonalize product A_dot_cic.Adot_cic^T (Eigenvalues are denoted as 'lambda' in Sec. IV of SM)
D_inv_scic=Adot_cic_vecs.T.dot(D_inv_cic).dot(Adot_cic_vecs) #D_inv in special-cic (scic)
print('Ordering terms..')
Sdot_terms=Adot_cic_vals[::2]*(np.diag(D_inv_scic)[::2]+np.diag(D_inv_scic)[1::2]) #Form Sdot pairs that will be used for ordering the dissipative components (see Eq. S4 of Sec. IV of SM)
vec_pairs=np.array([[Adot_cic_vecs[:,i],Adot_cic_vecs[:,i+1]] for i in range(0,Adot_cic_vecs.shape[1],2)]) #Form pairs of eigenvectors of Adot_cic
Sdot_ordering_args=np.argsort(Sdot_terms)[::-1] #Find indices that sort Sdot_terms from largest to smallest contributions.
DCA_components=vec_pairs[Sdot_ordering_args,:,:].reshape(Adot_cic_vecs.shape).T #Dissipative components: they are the reordered eigenvectors of A_dot_cic according to Sdot_terms
DCA_eigenvalues=Adot_cic_vals[::2][Sdot_ordering_args] #Reordering eigenvalues (lambda in SM Sec. IV)
DCA_components=DCA_components.T #Transpose and put vectors in rows for convenience
X_test_cic=B.T.dot((X_test-np.mean(X_test,axis=0)).T) #Change basis of X_test to cic
DCA_projections_test=X_test_cic.T.dot(DCA_components.T) #project the test set onto DCs
np.save(output_directory+"DCA_components.npy", DCA_components[:max_modes])
np.save(output_directory+"DCA_projections_test.npy", DCA_projections_test[:,:max_modes])
np.save(output_directory+"DCA_eigenvalues.npy", DCA_eigenvalues)
print('Done.')
return DCA_projections_test
| [
"noreply@github.com"
] | noreply@github.com |
ec80910561a64702c69266b5f78a7aef95964ee5 | 3756953b4fbeeb7581e5c18c91f0d96a993f932b | /others/bpi-test-rfid/usr/local/bin/rfid_epc_usb.py | dbbfaf1fd05bc5568c2a60bf55519ef3b0527fcd | [] | no_license | bmk10/BPI-files | 1c17b00c13ada7db0bbbed262dfb6caa6281c642 | 6ea84539846126245da5ec3453bdb2edd16fc697 | refs/heads/master | 2022-12-19T08:47:16.959673 | 2020-09-20T12:29:00 | 2020-09-20T12:29:00 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,553 | py | #!/usr/bin/python
#import jogtek
import serial
import sys
import time
import numpy as np
def crc16(data):
'''
CRC-16-CCITT Algorithm
'''
data = bytearray(data)
byteflag = 0
bytedata = 0
poly = 0x1021
crc = 0xFFFF
for b in data:
if b > 0x39:
b = b - 0x41 + 10
else:
b -= 0x30
if byteflag == 0:
b <<= 4
bytedata = b
byteflag = 1
else:
bytedata += b
byteflag = 0
c = (bytedata << 8)
for _ in range(0, 8):
if ((crc ^ c) & 0x8000) :
crc = (crc << 1) ^ poly
else:
crc <<= 1
c = c << 1
crc = crc & 0xffff
return np.uint16(crc)
#comport = '/dev/ttyAMA0'
comport = '/dev/ttyUSB0'
if len(sys.argv) >= 2:
comport = sys.argv[1]
#print 'set: ' + comport
#multiepc = False
multiepc = True
ser = serial.Serial(port=comport, baudrate = 38400,
parity=serial.PARITY_NONE,
stopbits=serial.STOPBITS_ONE,
bytesize=serial.EIGHTBITS,
timeout=2)
#print
#print("Read EPC by TM915")
#print
try:
while True:
epc = ""
if multiepc == True :
#print("epc:1")
ser.write(b'\nU\r')
else:
#print("epc:2")
ser.write(b'\nQ\r')
while True:
buf = ser.readline()
buf = ser.readline().decode('utf-8')
#print("epc:3")
if multiepc == True :
if len(buf) <= 4 :
break
#print("epc:4")
#print("buf: " + buf)
#print(buf[1:33])
print(buf[1:17] + " " + buf[17:31])
crcdata=bytes(buf[1:33],"ascii")
if crc16(crcdata) == 0x1D0F :
epc = epc + buf[1:33] + "\r\n"
else:
print("epc:5")
if len(buf) > 5 :
crcdata=bytes(buf[1:33],"ascii")
if crc16(crcdata) == 0x1D0F :
epc = epc + buf[1:33] + "\r\n"
break
#print("epc:6")
if epc != "" :
epc = epc[:len(epc)-2]
print (epc)
time.sleep(0.2)
except KeyboardInterrupt:
ser.close()
print("Exit.")
sys.exit()
finally:
ser.close()
sys.exit()
| [
"lionwang@sinovoip.com.cn"
] | lionwang@sinovoip.com.cn |
91e2b4dcec3ec942698bfbf77e1be3df16cfbb8a | 4b8af906e787ac27cc138201594298f758a02ca5 | /SSD/models/base_models.py | 240fc2ddf2be1e6545c1531b23de46f0db2d373e | [] | no_license | WangDeyu/Stronger-One-stage-detector-with-much-Tricks | 2ea72a29843cfbca51288254ab786f61baa3e40a | 09bc1f5cd0b069731bfe228a386311b2c1bf9747 | refs/heads/master | 2020-11-29T05:27:20.769014 | 2019-06-04T13:37:30 | 2019-06-04T13:37:30 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,148 | py | import torch
import torch.nn as nn
def vgg(cfg, i, batch_norm=False):
layers = []
in_channels = i
for v in cfg:
if v == 'M':
layers += [nn.MaxPool2d(kernel_size=2, stride=2)]
elif v == 'C':
layers += [nn.MaxPool2d(kernel_size=2, stride=2, ceil_mode=True)]
else:
if not batch_norm:
conv2d = nn.Conv2d(in_channels, v, kernel_size=3, padding=1)
else:
conv2d = BasicConv(in_channels, v, kernel_size=3, padding=1,relu=False, bn=batch_norm, bias=True)
layers += [conv2d,nn.ReLU(inplace=True)]
in_channels = v
pool5 = nn.MaxPool2d(kernel_size=3, stride=1, padding=1)
if not batch_norm:
conv6 = nn.Conv2d(512, 1024, kernel_size=3, padding=6, dilation=6)
conv7 = nn.Conv2d(1024, 1024, kernel_size=1)
else:
conv6 = BasicConv(512, 1024, kernel_size=3, padding=6, dilation=6, relu=False)
conv7 = BasicConv(1024, 1024, kernel_size=1, relu=False)
layers += [pool5, conv6, nn.ReLU(inplace=True), conv7, nn.ReLU(inplace=True)]
return layers
vgg_base = {
'300': [64, 64, 'M', 128, 128, 'M', 256, 256, 256, 'C', 512, 512, 512, 'M',
512, 512, 512],
'512': [64, 64, 'M', 128, 128, 'M', 256, 256, 256, 'C', 512, 512, 512, 'M',
512, 512, 512],
}
class BasicConv(nn.Module):
def __init__(self, in_planes, out_planes, kernel_size, stride=1, padding=0, dilation=1, groups=1, relu=True,
bn=True, bias=False):
super(BasicConv, self).__init__()
self.out_channels = out_planes
self.conv = nn.Conv2d(in_planes, out_planes, kernel_size=kernel_size, stride=stride, padding=padding,
dilation=dilation, groups=groups, bias=bias)
self.bn = nn.BatchNorm2d(out_planes, eps=1e-5, momentum=0.01, affine=True) if bn else None
self.relu = nn.ReLU(inplace=True) if relu else None
def forward(self, x):
x = self.conv(x)
if self.bn is not None:
x = self.bn(x)
if self.relu is not None:
x = self.relu(x)
return x
| [
"844518133@qq.com"
] | 844518133@qq.com |
4996e301ea38ba37f316c14cfed17c8e1282f1d2 | 5b6190ab59c6a16193a1adb2b4d9651215587235 | /reading_assistant/review/view_review.py | 1dc3079e7cebed89857f8013c00c9f52bb92a569 | [] | no_license | panyzzing/reading_assistant_python | 0b8926783a62593cffbb5ab08a73cc33cb893fd6 | 936e7756a29cc3bc7b4da81390ccf3c03e927caf | refs/heads/master | 2021-01-17T20:25:22.220176 | 2013-11-21T08:19:16 | 2013-11-21T08:19:16 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,000 | py | # -*- coding: utf-8 -*-
'''
Created on 2013. 5. 29.
'''
from django.http import HttpResponseRedirect, HttpResponse
from django.shortcuts import render_to_response
from django.template.context import RequestContext
from reading_assistant.models import Book, ReviewForm, Review, BestWord
def insert_form(request):
email = request.POST['user']
isbn = request.POST['book']
book = Book(isbn=isbn)
return render_to_response('review/review_insert.html', {
'user_email' : email,
'book_isbn' : isbn,
'book' : book,
}, context_instance = RequestContext(request));
def insert(request):
reviewForm = ReviewForm(request.POST)
if reviewForm.is_valid() :
reviewForm.save()
else :
print reviewForm.errors
return HttpResponse('<script>alert("폼 유효성 검사 실패!\n");history.back();</script>')
isbn = request.POST['isbn']
book = Book.objects.get(isbn=isbn)
return render_to_response('book/book_detail.html', {
'book' : book,
'bestword_list' : BestWord.objects.select_related().filter(book=book).order_by('-reg_dt'),
'review_list' : Review.objects.select_related().filter(book=book).order_by('-reg_dt'),
'current_ratio' : float(book.current_page) / float(book.total_page) * 100.0,
}, context_instance = RequestContext(request));
def delete(request):
review_name = request.POST['review_name']
review = Review(review_name=review_name)
review.delete()
isbn = request.POST['isbn']
book = Book.objects.get(isbn=isbn)
return render_to_response('book/book_detail.html', {
'book' : book,
'bestword_list' : BestWord.objects.select_related().filter(book=book).order_by('-reg_dt'),
'review_list' : Review.objects.select_related().filter(book=book).order_by('-reg_dt'),
'current_ratio' : float(book.current_page) / float(book.total_page) * 100.0,
}, context_instance = RequestContext(request)); | [
"skytear7@naver.com"
] | skytear7@naver.com |
56aab08d1b6c26fa14b194206cbfa5efc8283483 | 461fd5b3e74493fae216f196a0f4f1e2cfc674d2 | /src/5.1.py | 07ed2cb80d98e0c2d400bf018f307371d2593616 | [] | no_license | xietx1995/python-pdb-intro | 48270548962a4ce92dccd288620a79b7814d7d09 | 99672618c51f03d44d3b60bf4203beaeff5fd3b7 | refs/heads/master | 2023-07-16T21:29:53.983152 | 2021-08-22T04:38:17 | 2021-08-22T04:38:17 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 243 | py | """代码清单 5.1.py"""
import json
import util
def load_info(path):
"""读取json文件"""
with open(path) as f:
return json.load(f)
if __name__ == '__main__':
info = load_info('info.json')
util.print_info(info)
| [
"noreply@github.com"
] | noreply@github.com |
f1340d03564b55084aa43cbf65a62e0069f8210d | e7d5450f6c0a99be41d437eb377e6fee95366c69 | /client/migrations/0002_alter_client_balance.py | 71792b6f948aa28728c710ac5fb43af4a4fc4197 | [] | no_license | SofiaSh/MetaminsTest | ea0f5b31b2473a5d2dba832106870a2a551f1820 | 43518c90a13102c109c6f2ac7231f557220fd965 | refs/heads/main | 2023-04-09T01:51:41.767942 | 2021-04-12T19:07:03 | 2021-04-12T19:07:03 | 356,824,622 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 372 | py | # Generated by Django 3.2 on 2021-04-11 17:48
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('client', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='client',
name='balance',
field=models.IntegerField(default=0),
),
]
| [
"79996200981@ya.ru"
] | 79996200981@ya.ru |
6da658a7cd0fa783707ae8e692a3eebd505ebfec | 92a13acc1316d3cbe60b86bf262cdd1f13b10bc8 | /app/core/C.py | d126dbc2c7f66971cea5ce737abf5dfe4625e902 | [] | no_license | sbhkmndl/flask-autosklearn | 06d86d18d0d964ee88ba0fd51e675736c163d623 | 9859d499824f5814451f2e882e95ad1dba7c3bfd | refs/heads/main | 2023-08-28T12:17:53.944013 | 2021-11-09T04:50:03 | 2021-11-09T04:50:03 | 424,578,604 | 0 | 1 | null | 2021-11-04T13:48:02 | 2021-11-04T11:58:55 | Python | UTF-8 | Python | false | false | 103 | py | import uuid
DATETIME_FORMAT = "%y-%m-%d %H:%M:%S"
def get_unique_id():
return str(uuid.uuid4())
| [
"soubhikmandal@datoin.com"
] | soubhikmandal@datoin.com |
15d66bf881c1a50056be62d9e4df57b8b88c1f1f | 351977b81d80bdbf611a3fe885494bc805b2cdc0 | /geocoder.py | cb18b7086d6f3bd5d6b1b2946fede86932aa7eb0 | [
"MIT"
] | permissive | xyx0826/sea_fires_around_me | 707688dd2dfcf3156fd2eb3c5e3e6032c44076a3 | 81c8240cfeaab724b28b7554459405fb7703959d | refs/heads/master | 2023-04-17T06:39:03.636011 | 2021-04-21T07:05:18 | 2021-04-21T07:05:18 | 359,633,954 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,381 | py | # geocoding.py: Geocodes addresses using Google Maps Geocoding API.
from typing import Tuple
import requests
# MapQuest geocoding API
API_ENDPOINT = "https://www.mapquestapi.com/geocoding/v1/address"
# Suffix for Seattle addresses
ADDRESS_SUFFIX = ", Seattle, WA"
class Geocoder:
"""
A MapQuest geocoder that converts addresses to coordinates.
"""
def __init__(self, key: str) -> None:
"""
Creates a new geocoder.
Args:
key (str): MapQuest API key.
"""
self._key = key
self._ses = requests.Session()
def _make_url_params(self, addr: str) -> dict:
"""
Creates a URL parameter dictionary for MapQuest API.
Args:
addr (str): The address to geocode.
Returns:
dict: URL parameters for the endpoint.
"""
return {
"key": self._key,
"location": addr,
"thumbMaps": False
}
def _send_request(self, query: dict) -> dict:
"""
Sends a request with specified parameters to MapQuest.
Args:
query (dict): URL query parameters.
Returns:
dict: The JSON response, or None if there is an error.
"""
r = self._ses.get(API_ENDPOINT, params=query)
if r.status_code != 200:
print("Mapquest request error: " + r.text)
return None
return r.json()
def geocode(self, addr: str) -> Tuple[float, float]:
"""
Geocodes the specified Seattle address to coordinates.
Args:
addr (str): The address to geocode.
Returns:
Tuple[float, float]: (lat, lon), or (0, 0) if results are invalid.
"""
addr = addr.replace("/", "&") + ADDRESS_SUFFIX
query = self._make_url_params(addr)
resp = self._send_request(query)
if resp is None:
return (0.0, 0.0)
results = resp["results"]
if len(results) < 1:
print("Mapquest didn't return any results.")
return (0.0, 0.0)
for result in results:
for loc in result["locations"]:
if loc["adminArea5"] == "Seattle":
return (loc["latLng"]["lat"], loc["latLng"]["lng"])
print("Mapquest didn't return any valid locations.")
return (0.0, 0.0)
| [
"xyx0826@hotmail.com"
] | xyx0826@hotmail.com |
400d7622016fcfb492740dda163779cef6503a7d | 0912f9d024a2b1f256237f4c10c794b7afbdc7e2 | /sorts/selection/selection.py | 41587aede39c7ff32cbabe169ea126967e9f5611 | [] | no_license | aleksandragaworska/algorithms | 9f8a444dd60c72e5303454acc73f6a9c4bd63961 | 44aec20fa1d198f52d7f9df66e20d9ed4221345e | refs/heads/main | 2023-07-18T01:03:11.401202 | 2021-08-29T15:29:58 | 2021-08-29T15:29:58 | 399,578,075 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,405 | py |
def selection_sort_recursive(not_sorted):
not_sorted = not_sorted.copy()
def recursive_select(not_sorted, sorted_list):
if len(not_sorted) == 0:
return sorted_list
min_val = min(not_sorted)
sorted_list.append(min_val)
not_sorted.remove(min_val)
return recursive_select(not_sorted, sorted_list)
return recursive_select(not_sorted, [])
def selection_sort_for(not_sorted):
not_sorted = not_sorted.copy()
sorted_list = []
for i in range(len(not_sorted)):
min_val = min(not_sorted)
sorted_list.append(min_val)
not_sorted.remove(min_val)
return sorted_list
def selection_sort_swap(not_sorted):
not_sorted = not_sorted.copy()
def swap(my_list, next_index, value):
to_swap_index = my_list.index(value, next_index)
old_value = my_list[next_index]
my_list[next_index] = value
my_list[to_swap_index] = old_value
return my_list
for i in range(len(not_sorted)):
min_val = not_sorted[i]
for elem in not_sorted[i:]:
min_val = min(min_val, elem)
not_sorted = swap(not_sorted, i, min_val)
return not_sorted
def selection_sort_swap_2(not_sorted):
not_sorted = not_sorted.copy()
def swap(my_list, next_index, to_swap_index):
old_value = my_list[next_index]
my_list[next_index] = my_list[to_swap_index]
my_list[to_swap_index] = old_value
return my_list
for i in range(len(not_sorted)):
min_val = not_sorted[i]
min_idx = i
for j, elem in enumerate(not_sorted[i:], i):
if elem < min_val:
min_val = elem
min_idx = j
not_sorted = swap(not_sorted, i, min_idx)
return not_sorted
def selection_sort_swap_3(not_sorted):
not_sorted = not_sorted.copy()
def swap(my_list, next_index, to_swap_index):
old_value = my_list[next_index]
my_list[next_index] = my_list[to_swap_index]
my_list[to_swap_index] = old_value
return my_list
for i in range(len(not_sorted)):
min_val = not_sorted[i]
min_idx = i
for j in range(i, len(not_sorted)):
elem = not_sorted[j]
if elem < min_val:
min_val = elem
min_idx = j
not_sorted = swap(not_sorted, i, min_idx)
return not_sorted
| [
"agaworska@cloudferro.com"
] | agaworska@cloudferro.com |
50a16c67461ce02e724913ae7d872f1247fe0fd7 | 07379959ff4df8b88adf9cadba3406a68247c658 | /src/rc_buttons.py | 926fac902e311d852252914a266fcd7b777b9b27 | [] | no_license | dargo600/ir-server | 212400f6598c8e0aa648eb4553b46f7bc1b1b6dc | f4234a41f6026e36229b607817526d6e15206ef9 | refs/heads/master | 2020-06-07T08:04:06.743609 | 2019-08-12T20:34:43 | 2019-08-12T20:34:43 | 192,968,807 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,685 | py | """
This is the rc buttons module and supports all the ReST actions for the
RC Buttons collection
"""
from flask import make_response, abort
from config import db
from models import RCButton, RCButtonSchema
def read_all():
"""
This function responds to a request for /api/rc_buttons
with the complete lists of rc_buttons
:return: json string of list of rc_buttons
"""
rc_buttons = RCButton.query.order_by(RCButton.rc_button_id).all()
rc_button_schema = RCButtonSchema(many=True)
data = rc_button_schema.dump(rc_buttons).data
return data
def read_one(rc_button_id):
"""
This function responds to a request for /api/rc_buttons/{rc_button_id}
with one matching rc_button from rc_buttons
:param rc_button_id: Id of rc_button to find
:return: rc_button matching id
"""
rc_button = RCButton.query\
.filter(RCButton.rc_button_id == rc_button_id)\
.one_or_none()
if rc_button is not None:
rc_button_schema = RCButtonSchema()
data = rc_button_schema.dump(rc_button).data
return data
else:
abort(404, f"RC_button not found for Id: {rc_button_id}")
def create(rc_button):
"""
This function creates a new rc_button in the rc_buttons structure
based on the passed in rc_button data
:param rc_button: rc_button to create in rc_buttons structure
:return: 201 on success, 406 on rc_button exists
"""
rc_button_type = rc_button.get("rc_type")
device_config_id = rc_button.get("device_config_id")
existing_rc_button = RCButton.query\
.filter(RCButton.rc_type == rc_button_type)\
.filter(RCButton.device_config_id == device_config_id)\
.one_or_none()
if existing_rc_button is None:
schema = RCButtonSchema()
new_rc_button = schema.load(rc_button, session=db.session).data
db.session.add(new_rc_button)
db.session.commit()
data = schema.dump(new_rc_button).data
return data, 201
else:
abort(409,
f"rc_button {rc_button_type} and {device_config_id} exists already")
def update(rc_button_id, rc_button):
"""
This function updates an existing rc_button in the rc_button structure
Throws an error if a rc_button with the name we want to update to
already exists in the database.
:param rc_button_id: Id of the rc_button to update in the people structure
:param rc_button: rc_button to update
:return: updated rc_button structure
"""
update_rc_button = RCButton.query\
.filter(RCButton.rc_button_id == rc_button_id)\
.one_or_none()
if update_rc_button is not None:
schema = RCButtonSchema()
new_update = schema.load(rc_button, session=db.session).data
new_update.rc_button_id = update_rc_button.rc_button_id
db.session.merge(new_update)
db.session.commit()
data = schema.dump(update_rc_button).data
return data, 200
else:
abort(404, f"rc_button not found for Id: {rc_button_id}")
def delete(rc_button_id):
"""
This function deletes a rc_button from the rc_button structure
:param rc_button_id: Id of the rc_button to delete
:return: 200 on successful delete, 404 if not found
"""
rc_button = RCButton.query.filter(RCButton.rc_button_id == rc_button_id)\
.one_or_none()
if rc_button is not None:
db.session.delete(rc_button)
db.session.commit()
return make_response(f"rc_button {rc_button_id} deleted", 200)
else:
abort(404, f"rc_button not found for Id: {rc_button_id}") | [
"david.turgeon@gmail.com"
] | david.turgeon@gmail.com |
cc1b9a672163c2594baee1485636929c3ba41bf0 | 3955c3f367a3a60f8602dcb4609faec9898438bb | /graylog/apis/systemshutdown_api.py | 255334f55b57fe4bd65f0fe6abe937ca07111116 | [
"Apache-2.0"
] | permissive | MinhKMA/graylog.py | e89c34defa5422d59d0a501355058f5eb2dfe68c | 3118f4a49c91c2cbbd660523b0ab99e56fbfd861 | refs/heads/master | 2021-05-06T21:03:06.946509 | 2016-09-23T04:31:13 | 2016-09-23T04:31:13 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,316 | py | # coding: utf-8
"""
No descripton provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: 2.1.1+01d50e5
Generated by: https://github.com/swagger-api/swagger-codegen.git
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from __future__ import absolute_import
import sys
import os
import re
# python 2 and python 3 compatibility library
from six import iteritems
from ..configuration import Configuration
from ..api_client import ApiClient
class SystemshutdownApi(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
config = Configuration()
if api_client:
self.api_client = api_client
else:
if not config.api_client:
config.api_client = ApiClient()
self.api_client = config.api_client
def shutdown(self, **kwargs):
"""
Shutdown this node gracefully.
Attempts to process all buffered and cached messages before exiting, shuts down inputs first to make sure that no new messages are accepted.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.shutdown(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.shutdown_with_http_info(**kwargs)
else:
(data) = self.shutdown_with_http_info(**kwargs)
return data
def shutdown_with_http_info(self, **kwargs):
"""
Shutdown this node gracefully.
Attempts to process all buffered and cached messages before exiting, shuts down inputs first to make sure that no new messages are accepted.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.shutdown_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = []
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method shutdown" % key
)
params[key] = val
del params['kwargs']
resource_path = '/system/shutdown/shutdown'.replace('{format}', 'json')
path_params = {}
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept([])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type([])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
| [
"on99@users.noreply.github.com"
] | on99@users.noreply.github.com |
1d0988a109e7416e5983d95d3000c5a379261685 | 6feff4f9560c6f78b2cf86256e301190657b4726 | /Problem 3_6.py | 57d14b512e983e07b2092eb552f334303626786d | [] | no_license | NebiyouSAbebe/Python-Programming. | 6f41fb6173476f1a3f22b3f8bc416e4632428c98 | 97102f16f92440e6d97c5ee212cad7123c59b1af | refs/heads/master | 2021-04-18T20:37:34.230657 | 2018-03-26T20:16:07 | 2018-03-26T20:16:07 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,643 | py | #%%
"""
Problem 3_6:
Write a program (not just a function, but a stand alone program or script) that
reads through a file and writes another file that gives the length of each line
in the first file. If line is the line that you've read into your program, use
line.strip("\n") to strip away the invisible newline character at the end of
each line. Otherwise, your count will be one higher than the autograder's.
Note that since this is a program running from the Command Window (or terminal
or cmd.exe), it won't be runnable as our usual functions are by entering
Shift-Enter. You should use the File menu in Spyder to create you own file.
But, if you prefer, there is a starter file called problem3_6starter.py.
Here is a run of my solution program using the HumptyDumpty.txt file. The run
is followed by a printout of HumptyDumpty.txt and the written file
HumptyLength.txt. Note that your program does not print anything out. It does
write a text file though. To see these files we have to use type on a PC ( but
it would be cat for Mac or Linux).
C:>python problem3_6.py humptydumpty.txt humptylength.txt
C:>type humptydumpty.txt
Humpty Dumpty sat on a wall,
Humpty Dumpty had a great fall.
All the king's horses and all the king's men
Couldn't put Humpty together again.
C:>type humptylength.txt
28
31
44
35
Solution Starter:
"""
import sys
infile = sys.argv[1]
outfile = sys.argv[2]
infile = open(infile)
outfile = open(outfile, 'w')
for line in infile:
line = line.strip("\n")
outfile.write(str(len(line)) + "\n")
infile.close()
outfile.close()
"""
| [
"noreply@github.com"
] | noreply@github.com |
fc8988a55bceef6e1ae0c856c3c021d96b304a03 | ac29c5a593d664ebfcc057cf689ade7ce6c8b3de | /practice/python/ABC180/C.py | 31e7dfba60563a25a73c083abbda44565bc21cf4 | [] | no_license | TakahiroSono/atcoder | 193c186078f3b7240bb630a242d2e339dc6e8d6a | da65bcbf6d11d8d6a4481a708d5e020779f43419 | refs/heads/master | 2023-05-31T15:32:46.732591 | 2021-06-13T11:15:20 | 2021-06-13T11:15:20 | 294,786,356 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 185 | py | N = int(input())
ans = []
for i in range(1, int(N ** 0.5) + 1):
if N % i:
continue
else:
ans.append(i)
if i != N // i:
ans.append(N // i)
ans.sort()
print(*ans)
| [
"taka.sono889@gmail.com"
] | taka.sono889@gmail.com |
df61da301719a0bb3bd40237de169034740203a2 | 357eee1fcb340f828a24e988f459771cf1ace57a | /do_sum_proogramming_dud.py | 4f60173f1d4504e2252f6bde0c2249ab6c93b4c0 | [] | no_license | MoonRaccoon/CS101 | 85cfc8e47db2b57fce8c1031157189030e8d2129 | 147edc1b956233fbfcd35fed0ae8d924aeb62827 | refs/heads/master | 2020-05-31T19:19:36.485604 | 2014-10-11T01:47:10 | 2014-10-11T01:47:10 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 42 | py | print 3 - 60
print "I am da best at meth"
| [
"shamoun723@gmail.com"
] | shamoun723@gmail.com |
039b5a5d6166730f71fa8dbae29bca022fb667b1 | a3cc7286d4a319cb76f3a44a593c4a18e5ddc104 | /lib/surface/logging/metrics/delete.py | 0fce2d106ce17d119936f16b3a3b14351d2e6cd1 | [
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0"
] | permissive | jordanistan/Google-Cloud-SDK | f2c6bb7abc2f33b9dfaec5de792aa1be91154099 | 42b9d7914c36a30d1e4b84ae2925df7edeca9962 | refs/heads/master | 2023-09-01T01:24:53.495537 | 2023-08-22T01:12:23 | 2023-08-22T01:12:23 | 127,072,491 | 0 | 1 | NOASSERTION | 2023-08-22T01:12:24 | 2018-03-28T02:31:19 | Python | UTF-8 | Python | false | false | 1,918 | py | # Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""'logging metrics delete' command."""
from googlecloudsdk.api_lib.logging import util
from googlecloudsdk.calliope import base
from googlecloudsdk.core import log
from googlecloudsdk.core.console import console_io
class Delete(base.DeleteCommand):
"""Deletes a logs-based metric."""
@staticmethod
def Args(parser):
"""Register flags for this command."""
parser.add_argument(
'metric_name', help='The name of the metric to delete.')
def Run(self, args):
"""This is what gets called when the user runs this command.
Args:
args: an argparse namespace. All the arguments that were provided to this
command invocation.
"""
console_io.PromptContinue(
'Really delete metric [%s]?' % args.metric_name, cancel_on_no=True)
util.GetClient().projects_metrics.Delete(
util.GetMessages().LoggingProjectsMetricsDeleteRequest(
metricName=util.CreateResourceName(
util.GetCurrentProjectParent(), 'metrics', args.metric_name)))
log.DeletedResource(args.metric_name)
Delete.detailed_help = {
'DESCRIPTION': """\
Deletes a logs-based metric called high_severity_count.
""",
'EXAMPLES': """\
To delete a metric called high_severity_count, run:
$ {command} high_severity_count
""",
}
| [
"jordan.robison@gmail.com"
] | jordan.robison@gmail.com |
77cfd36b65d609b44e3fa1b960fbfb54748bfadd | e9172452ed3777653ec7a4c7ef6d2269a2309a4c | /pandasRollingStats.py | 30244426f797b5c72c4db22f9f4bba209db9fc6a | [] | no_license | aiporre/QuinoaMarketForecast | ec7163ea52e7c63c34448c302d4539b96270a3dd | b76bf5380b930859392a7c6c46eade2464a94143 | refs/heads/master | 2021-09-24T09:20:13.704502 | 2016-10-03T06:51:14 | 2016-10-03T06:51:14 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,524 | py |
import pandas as pd
import quandl
import matplotlib.pyplot as plt
from matplotlib import style
style.use('fivethirtyeight')
def get_zinc_price():
api_key = open('data/myApiKey.txt', 'r').read()
gold = quandl.get('ODA/PZINC_USD', authtoken=api_key)
pd.DataFrame(gold).to_pickle('data/zinc.pickle')
def get_wheat_price():
api_key = open('data/myApiKey.txt', 'r').read()
gold = quandl.get('ODA/PWHEAMT_USD', authtoken=api_key)
pd.DataFrame(gold).to_pickle('data/wheat.pickle')
fig = plt.figure()
ax1 = plt.subplot2grid((4,1),(0,0))
ax2 = plt.subplot2grid((4,1),(1,0))
ax3 = plt.subplot2grid((4,1),(2,0))
ax4 = plt.subplot2grid((4,1),(3,0))
# read prices of zinc
try:
zinc = pd.read_pickle('data/zinc.pickle')
except:
zinc = get_zinc_price()
# read prices of wheat
try:
wheat = pd.read_pickle('data/wheat.pickle')
except:
wheat = get_wheat_price()
# calculatin rollings
zinc.columns = ['price_z']
wheat.columns = ['price_w']
zw = zinc.join(wheat)
zinc['priceRA'] = pd.rolling_mean(zinc['price_z'],12)
zinc['priceRS'] = pd.rolling_std(zinc['price_z'],12)
print zw.head(10)
zinc_wheat_corr = pd.rolling_corr(zw['price_z'],zw['price_w'],12)
print zinc.head(15)
print zinc_wheat_corr.head(15)
# zinc.dropna(inplace = True) # posible to use dorpna
zinc[['price_z','priceRA']].plot(ax = ax1)
zinc['priceRS'].plot(ax = ax2)
zw.plot(ax = ax3)
zinc_wheat_corr.plot(ax = ax4)
plt.show()
# standrd deviatio help to filter date that doesnlt fit
# an to undersatd the volatitty of the data.
| [
"ariel.iporre.rivas@gmail.com"
] | ariel.iporre.rivas@gmail.com |
fac205b61649966e510a8828aedfcc3387602619 | c3461b299c8fce34cc99d881631e0023dc95d465 | /Anurag/Database_Feature_Selection/databaseFeatureSelection.py | cb40afb53d939c537315b03a1acd3610e27bd100 | [] | no_license | RawatVimal/internship | 563c1743e41a852cc2be7b94a6c6b712f7baa7c0 | 5993828c654d8f1cdcbd5079569d40e74c6bf976 | refs/heads/main | 2023-05-07T11:09:36.271402 | 2021-05-31T20:34:02 | 2021-05-31T20:34:02 | 317,539,882 | 0 | 2 | null | 2021-05-31T20:34:03 | 2020-12-01T12:51:09 | Jupyter Notebook | UTF-8 | Python | false | false | 2,429 | py | import pandas as pd
from pandas import DataFrame
import requests
top_db_count = 20 # user can change the count to get that many top databases
#===========================getting database names ======================================
df_list = pd.read_html('https://db-engines.com/en/ranking') # this parses all the tables in webpages to a list
df_list = df_list[3]
df_list = df_list.iloc[3:top_db_count+3,3]
df=df_list.to_frame()
df.columns =['Database Names']
df.reset_index(drop=True, inplace=True)
df['Database Names'] = df['Database Names'].str.replace('Detailed vendor-provided information available', '')
df = df.T
df.columns = df.iloc[0]
df = df[1:]
df.insert(loc=0, column='Features', value='')
df.columns = df.columns.str.strip()
#================================== getting feature names ================================
df_list_features_names = pd.read_html('https://db-engines.com/en/system/Oracle')
df_list_features_names = df_list_features_names[3]
df_list_features_names= df_list_features_names.iloc[2:35,0]
df_list_features_names=df_list_features_names.to_frame()
df_list_features_names.columns =['Features']
df_list_features_names.reset_index(drop=True, inplace=True)
df['Features'] = df_list_features_names['Features']
#======================================== Saving features of each database ======================
count = 1
while count != top_db_count+1: #outer while loop to loop over defined top databases
db_name = df.columns[count]
databse_name = df.columns[count]
db_name = db_name.replace(" ", "+")
df_list_features_values = pd.read_html('https://db-engines.com/en/system/'+db_name)
df_list_features_db = df_list_features_values[3]
feature = 0
while feature != 33: #inner while loop to get only features which are present for that particular database.Feature which are not present are filled with null.
boolean_finding = df_list_features_db[0].str.match(df['Features'][feature]).any()
if (boolean_finding):
index = df_list_features_db[df_list_features_db[0] == df['Features'][feature]].index.item()
df[databse_name][feature] = df_list_features_db[1][index]
else:
df[databse_name][feature] = ""
feature += 1
count = count + 1
#======================================== Writing dataframe to csv file ======================
df.to_csv('databaseFeatures.csv')
| [
"joshianurag144@gmail.com"
] | joshianurag144@gmail.com |
ef4f04aa2147edecf62a633709bacc946ccb5d38 | 60c5173b2fd79afdff613ed980e718f8867015b0 | /15 clone project - blog/mysite/mysite/settings.py | 81be91b26c0f0b0b0e41a16484e93fc8f7544877 | [] | no_license | susan8213/Python-and-Django-Full-Stack-Web-Developer-Bootcamp | bc27e5127d80dfa826503950e475e07e1c868aa4 | 7ead38ec9dbbb60e2213e7ffa85e03293b80a08d | refs/heads/master | 2022-11-29T09:59:24.917236 | 2020-04-12T15:27:17 | 2020-04-12T15:27:17 | 228,852,519 | 0 | 0 | null | 2022-11-22T05:17:25 | 2019-12-18T14:03:51 | Python | UTF-8 | Python | false | false | 3,125 | py | """
Django settings for mysite project.
Generated by 'django-admin startproject' using Django 3.0.2.
For more information on this file, see
https://docs.djangoproject.com/en/3.0/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.0/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.0/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '1k1nxrrxkua76td*97ho1)j3^*6$uceaodht$ty0!^sz=wr)aq'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'blog',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'mysite.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'mysite.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.0/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/3.0/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.0/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.0/howto/static-files/
STATIC_URL = '/static/'
LOGIN_REDIRECT_URL = '/' | [
"susan8213@gmail.com"
] | susan8213@gmail.com |
f50a2e2610bbc7e0d8b231d6e7da88dcde121bc8 | 28829fc9f340a731da8a8b5d35dfc5e8c1e981fe | /tiler-deployment/tests/test_api.py | dae7f9e1813c83b31c435c5646a848787a8723d7 | [] | no_license | geohackweek/ghw2019_cog-pipeline | a6750ac23b8304066cbf9dfb3ab75b55d1d5168e | 2fbf15fd50568ca51af36f4ec6476076c5c7b19c | refs/heads/master | 2020-07-24T12:59:43.568979 | 2019-10-09T17:53:33 | 2019-10-09T17:53:33 | 207,936,431 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 21,958 | py | import os
import json
import base64
import pytest
import vector_tile_base
from tiler.api import APP
file_sar = os.path.join(os.path.dirname(__file__), "fixtures", "sar_cog.tif")
file_rgb = os.path.join(os.path.dirname(__file__), "fixtures", "rgb_cog.tif")
file_lidar = os.path.join(os.path.dirname(__file__), "fixtures", "lidar_cog.tif")
file_nodata = os.path.join(os.path.dirname(__file__), "fixtures", "rgb_cog_nodata.tif")
file_mos1 = os.path.join(os.path.dirname(__file__), "fixtures", "mosaic_cog1.tif")
file_mos2 = os.path.join(os.path.dirname(__file__), "fixtures", "mosaic_cog2.tif")
mosaic_url = ",".join([file_mos1, file_mos2])
@pytest.fixture()
def event():
"""event fixture"""
return {
"path": "/",
"httpMethod": "GET",
"headers": {},
"queryStringParameters": {},
}
def test_API_favicon(event):
"""Test /favicon.ico route."""
event["path"] = "/favicon.ico"
event["httpMethod"] = "GET"
resp = {
"body": "",
"headers": {
"Access-Control-Allow-Credentials": "true",
"Access-Control-Allow-Methods": "GET",
"Access-Control-Allow-Origin": "*",
"Content-Type": "text/plain",
},
"statusCode": 204,
}
res = APP(event, {})
assert res == resp
def test_API_bbox(event):
"""Test /bbox route."""
event["path"] = f"/bbox"
event["httpMethod"] = "GET"
res = APP(event, {})
assert res["statusCode"] == 500
headers = res["headers"]
assert headers["Content-Type"] == "application/json"
body = json.loads(res["body"])
assert "url" in body["errorMessage"]
assert "bbox" in body["errorMessage"]
print(body["errorMessage"])
event["path"] = f"/bbox"
event["httpMethod"] = "GET"
event["queryStringParameters"] = {"url": file_rgb}
res = APP(event, {})
assert res["statusCode"] == 500
headers = res["headers"]
assert headers["Content-Type"] == "application/json"
body = json.loads(res["body"])
assert "bbox" in body["errorMessage"]
event["path"] = f"/bbox"
event["httpMethod"] = "GET"
event["queryStringParameters"] = {
"url": file_rgb,
"bbox": "-61.56544,-61.563559,16.226925",
}
res = APP(event, {})
assert res["statusCode"] == 500
headers = res["headers"]
assert headers["Content-Type"] == "application/json"
body = json.loads(res["body"])
assert body["errorMessage"] == "BBOX must be a 4 values array"
event["path"] = f"/bbox"
event["httpMethod"] = "GET"
event["queryStringParameters"] = {
"url": file_rgb,
"bbox": "-61.56544,16.22859,-61.563559,16.226925",
}
res = APP(event, {})
assert res["statusCode"] == 200
headers = res["headers"]
assert headers["Content-Type"] == "application/json"
body = json.loads(res["body"])
assert body["address"]
assert body["bbox"]
assert body["band_descriptions"]
assert len(body["statistics"].keys()) == 3
event["path"] = f"/bbox"
event["httpMethod"] = "GET"
event["queryStringParameters"] = {
"url": file_rgb,
"indexes": "1,2",
"bbox": "-61.56544,16.22859,-61.563559,16.226925",
}
res = APP(event, {})
assert res["statusCode"] == 200
headers = res["headers"]
assert headers["Content-Type"] == "application/json"
body = json.loads(res["body"])
assert body["address"]
assert len(body["statistics"].keys()) == 2
event["path"] = f"/bbox"
event["httpMethod"] = "GET"
event["queryStringParameters"] = {
"url": file_nodata,
"nodata": "0",
"histogram_bins": "10",
"bbox": "-104.77514,38.9536719,-104.7749354,38.9535883",
}
res = APP(event, {})
assert res["statusCode"] == 200
headers = res["headers"]
assert headers["Content-Type"] == "application/json"
body = json.loads(res["body"])
assert len(body["statistics"]["1"]["histogram"][0]) == 10
# Test boox outside data
event["path"] = f"/bbox"
event["httpMethod"] = "GET"
event["queryStringParameters"] = {
"url": file_nodata,
"nodata": "0",
"bbox": "-61.56544,16.22859,-61.563559,16.226925",
}
res = APP(event, {})
assert res["statusCode"] == 500
headers = res["headers"]
assert headers["Content-Type"] == "application/json"
def test_API_point(event):
"""Test /point route."""
event["path"] = f"/point"
event["httpMethod"] = "GET"
res = APP(event, {})
assert res["statusCode"] == 500
headers = res["headers"]
assert headers["Content-Type"] == "application/json"
body = json.loads(res["body"])
assert "url" in body["errorMessage"]
event["path"] = f"/point"
event["httpMethod"] = "GET"
event["queryStringParameters"] = {"url": file_rgb}
res = APP(event, {})
assert res["statusCode"] == 500
headers = res["headers"]
assert headers["Content-Type"] == "application/json"
body = json.loads(res["body"])
assert "coordinates" in body["errorMessage"]
event["path"] = f"/point"
event["httpMethod"] = "GET"
event["queryStringParameters"] = {
"url": file_rgb,
"coordinates": "-61.56463623161228,16.227860775481847",
}
res = APP(event, {})
assert res["statusCode"] == 200
headers = res["headers"]
assert headers["Content-Type"] == "application/json"
body = json.loads(res["body"])
assert body["address"]
assert body["band_descriptions"]
assert len(body["coordinates"]) == 2
assert body["values"] == {"1": 82, "2": 126, "3": 99}
# Test indexes
event["queryStringParameters"] = {
"url": file_rgb,
"coordinates": "-61.56463623161228,16.227860775481847",
"indexes": "2",
}
res = APP(event, {})
assert res["statusCode"] == 200
headers = res["headers"]
assert headers["Content-Type"] == "application/json"
body = json.loads(res["body"])
body = json.loads(res["body"])
assert body["address"]
assert body["band_descriptions"]
assert len(body["coordinates"]) == 2
assert body["values"] == {"2": 126}
def test_API_metadata(event):
"""Test /metadata route."""
event["path"] = f"/metadata"
event["httpMethod"] = "GET"
res = APP(event, {})
assert res["statusCode"] == 500
headers = res["headers"]
assert headers["Content-Type"] == "application/json"
body = json.loads(res["body"])
assert "url" in body["errorMessage"]
event["path"] = f"/metadata"
event["httpMethod"] = "GET"
event["queryStringParameters"] = {"url": file_sar}
res = APP(event, {})
assert res["statusCode"] == 200
headers = res["headers"]
assert headers["Content-Type"] == "application/json"
body = json.loads(res["body"])
assert body["address"]
assert len(body["bounds"]["value"]) == 4
assert body["bounds"]["crs"] == "EPSG:4326"
assert len(body["statistics"].keys()) == 1
assert body["minzoom"]
assert body["maxzoom"]
assert body["band_descriptions"]
event["path"] = f"/metadata"
event["httpMethod"] = "GET"
event["queryStringParameters"] = {"url": file_lidar, "histogram_bins": "20"}
res = APP(event, {})
assert res["statusCode"] == 200
headers = res["headers"]
assert headers["Content-Type"] == "application/json"
body = json.loads(res["body"])
assert body["address"]
assert len(body["bounds"]["value"]) == 4
assert body["bounds"]["crs"] == "EPSG:4326"
assert len(body["statistics"]['1']["histogram"][0]) == 20
assert body["minzoom"]
assert body["maxzoom"]
assert body["band_descriptions"] == [
[1, 'min'], [2, 'max'], [3, 'mean'], [4, 'idw'], [5, 'stdev']
]
event["path"] = f"/metadata"
event["httpMethod"] = "GET"
event["queryStringParameters"] = {"url": file_rgb}
res = APP(event, {})
assert res["statusCode"] == 200
headers = res["headers"]
assert headers["Content-Type"] == "application/json"
body = json.loads(res["body"])
assert body["address"]
assert len(body["bounds"]["value"]) == 4
assert body["bounds"]["crs"] == "EPSG:4326"
assert len(body["statistics"].keys()) == 3
assert body["minzoom"]
assert body["maxzoom"]
assert body["band_descriptions"]
event["queryStringParameters"] = {"url": file_nodata, "nodata": "0"}
res = APP(event, {})
assert res["statusCode"] == 200
headers = res["headers"]
assert headers["Content-Type"] == "application/json"
body = json.loads(res["body"])
assert body["address"]
assert len(body["bounds"]["value"]) == 4
assert body["bounds"]["crs"] == "EPSG:4326"
assert len(body["statistics"].keys()) == 3
event["queryStringParameters"] = {"url": file_rgb, "indexes": "1"}
res = APP(event, {})
assert res["statusCode"] == 200
headers = res["headers"]
assert headers["Content-Type"] == "application/json"
body = json.loads(res["body"])
assert body["address"]
assert len(body["bounds"]["value"]) == 4
assert body["bounds"]["crs"] == "EPSG:4326"
assert len(body["statistics"].keys()) == 1
event["queryStringParameters"] = {"url": file_rgb, "overview_level": "1"}
res = APP(event, {})
assert res["statusCode"] == 200
headers = res["headers"]
assert headers["Content-Type"] == "application/json"
body = json.loads(res["body"])
assert body["address"]
assert len(body["bounds"]["value"]) == 4
assert body["bounds"]["crs"] == "EPSG:4326"
assert len(body["statistics"].keys()) == 3
event["queryStringParameters"] = {"url": file_rgb, "max_size": "512"}
res = APP(event, {})
assert res["statusCode"] == 200
headers = res["headers"]
assert headers["Content-Type"] == "application/json"
body = json.loads(res["body"])
assert body["address"]
assert len(body["bounds"]["value"]) == 4
assert body["bounds"]["crs"] == "EPSG:4326"
assert len(body["statistics"].keys()) == 3
def test_API_tiles(event):
"""Test /tiles route."""
# test missing url in queryString
event["path"] = f"/tiles/12/2180/2049.jpg"
event["httpMethod"] = "GET"
res = APP(event, {})
assert res["statusCode"] == 500
headers = res["headers"]
assert headers["Content-Type"] == "application/json"
body = json.loads(res["body"])
assert body["errorMessage"] == "Missing 'url' parameter"
# test valid jpg request with linear rescaling
event["path"] = f"/tiles/12/2180/2049.jpg"
event["httpMethod"] = "GET"
event["queryStringParameters"] = {"url": file_sar, "rescale": "-1,1"}
res = APP(event, {})
assert res["statusCode"] == 200
headers = res["headers"]
assert headers["Content-Type"] == "image/jpg"
assert res["body"]
assert res["isBase64Encoded"]
# test valid jpg request with rescaling and colormap
event["path"] = f"/tiles/12/2180/2049.jpg"
event["httpMethod"] = "GET"
event["queryStringParameters"] = {
"url": file_sar,
"rescale": "-1,1",
"color_map": "cfastie",
}
res = APP(event, {})
assert res["statusCode"] == 200
headers = res["headers"]
assert headers["Content-Type"] == "image/jpg"
assert res["body"]
assert res["isBase64Encoded"]
# test scale (512px tile size)
event["path"] = f"/tiles/12/2180/2049@2x.jpg"
event["httpMethod"] = "GET"
event["queryStringParameters"] = {"url": file_sar, "rescale": "-1,1"}
res = APP(event, {})
assert res["statusCode"] == 200
headers = res["headers"]
assert headers["Content-Type"] == "image/jpg"
assert res["body"]
assert res["isBase64Encoded"]
# test with nodata == nan
event["path"] = f"/tiles/12/2180/2049.jpg"
event["httpMethod"] = "GET"
event["queryStringParameters"] = {
"url": file_sar,
"rescale": "-1,1",
"nodata": "nan",
}
res = APP(event, {})
assert res["statusCode"] == 200
headers = res["headers"]
assert headers["Content-Type"] == "image/jpg"
assert res["body"]
assert res["isBase64Encoded"]
# test internal nodata
event["path"] = f"/tiles/20/219109/400917.jpg"
event["httpMethod"] = "GET"
event["queryStringParameters"] = {"url": file_nodata, "rescale": "0,2000"}
res = APP(event, {})
assert res["statusCode"] == 200
headers = res["headers"]
assert headers["Content-Type"] == "image/jpg"
assert res["body"]
assert res["isBase64Encoded"]
# test rgb file
event["path"] = f"/tiles/18/86242/119093.jpg"
event["httpMethod"] = "GET"
event["queryStringParameters"] = {"url": file_rgb}
res = APP(event, {})
assert res["statusCode"] == 200
headers = res["headers"]
assert headers["Content-Type"] == "image/jpg"
assert res["body"]
assert res["isBase64Encoded"]
# test indexes option
event["path"] = f"/tiles/18/86242/119093.jpg"
event["httpMethod"] = "GET"
event["queryStringParameters"] = {"url": file_rgb, "indexes": "1"}
res = APP(event, {})
assert res["statusCode"] == 200
headers = res["headers"]
assert headers["Content-Type"] == "image/jpg"
assert res["body"]
assert res["isBase64Encoded"]
# test colormap
event["path"] = f"/tiles/18/86242/119093.jpg"
event["httpMethod"] = "GET"
event["queryStringParameters"] = {"url": file_rgb, "color_ops": "gamma rgb 3"}
res = APP(event, {})
assert res["statusCode"] == 200
headers = res["headers"]
assert headers["Content-Type"] == "image/jpg"
assert res["body"]
assert res["isBase64Encoded"]
def test_API_tilejson(event):
"""Test /metadata route."""
event["path"] = f"/tilejson.json"
event["httpMethod"] = "GET"
res = APP(event, {})
assert res["statusCode"] == 500
headers = res["headers"]
assert headers["Content-Type"] == "application/json"
body = json.loads(res["body"])
assert "url" in body["errorMessage"]
event["path"] = f"/tilejson.json"
event["httpMethod"] = "GET"
event["queryStringParameters"] = {"url": file_sar}
res = APP(event, {})
assert res["statusCode"] == 200
headers = res["headers"]
assert headers["Content-Type"] == "application/json"
body = json.loads(res["body"])
assert body["name"] == os.path.basename(file_sar)
assert body["tilejson"] == "2.1.0"
assert body["tiles"]
assert body["tiles"][0].endswith(f"{{z}}/{{x}}/{{y}}.png?url={file_sar}")
assert len(body["bounds"]) == 4
assert len(body["center"]) == 2
assert body["minzoom"] == 9
assert body["maxzoom"] == 10
event["path"] = f"/tilejson.json"
event["httpMethod"] = "GET"
event["queryStringParameters"] = {"url": file_sar, "tile_format": "pbf"}
res = APP(event, {})
assert res["statusCode"] == 200
headers = res["headers"]
assert headers["Content-Type"] == "application/json"
body = json.loads(res["body"])
assert body["name"] == os.path.basename(file_sar)
assert body["tilejson"] == "2.1.0"
assert body["tiles"]
assert body["tiles"][0].endswith(f"{{z}}/{{x}}/{{y}}.pbf?url={file_sar}")
assert len(body["bounds"]) == 4
assert len(body["center"]) == 2
assert body["minzoom"] == 9
assert body["maxzoom"] == 10
# test with kwargs and image_format
event["path"] = f"/tilejson.json"
event["httpMethod"] = "GET"
event["queryStringParameters"] = {
"url": file_sar, "tile_format": "jpg", "rescale": "-1,1"
}
res = APP(event, {})
assert res["statusCode"] == 200
headers = res["headers"]
assert headers["Content-Type"] == "application/json"
body = json.loads(res["body"])
assert body["name"] == os.path.basename(file_sar)
assert body["tilejson"] == "2.1.0"
assert body["tiles"]
assert body["tiles"][0].endswith(f"{{z}}/{{x}}/{{y}}.jpg?url={file_sar}&rescale=-1,1")
assert len(body["bounds"]) == 4
assert len(body["center"]) == 2
assert body["minzoom"] == 9
assert body["maxzoom"] == 10
def test_API_Vtiles(event):
"""Test /tiles.pbf route."""
# test missing url in queryString
event["path"] = f"/tiles/12/2161/2047.pbf"
event["httpMethod"] = "GET"
res = APP(event, {})
assert res["statusCode"] == 500
headers = res["headers"]
assert headers["Content-Type"] == "application/json"
body = json.loads(res["body"])
assert "url" in body["errorMessage"]
event["path"] = f"/tiles/12/2161/2047.pbf"
event["httpMethod"] = "GET"
event["queryStringParameters"] = {"url": file_lidar}
res = APP(event, {})
assert res["statusCode"] == 200
headers = res["headers"]
assert headers["Content-Type"] == "application/x-protobuf"
assert res["body"]
assert res["isBase64Encoded"]
body = base64.b64decode(res["body"])
vt = vector_tile_base.VectorTile(body)
props = vt.layers[0].features[0].properties
assert props["min"]
assert props["min"] == "11.0"
event["path"] = f"/tiles/12/2161/2047.pbf"
event["httpMethod"] = "GET"
event["queryStringParameters"] = {
"url": file_lidar,
"nodata": "-9999",
"feature_type": "polygon"
}
res = APP(event, {})
assert res["statusCode"] == 200
headers = res["headers"]
assert headers["Content-Type"] == "application/x-protobuf"
assert res["body"]
assert res["isBase64Encoded"]
body = base64.b64decode(res["body"])
vt = vector_tile_base.VectorTile(body)
props = vt.layers[0].features[0].properties
assert props["min"]
assert props["max"]
assert props["mean"]
assert props["idw"]
def test_API_tilejson_mosaic(event):
"""Test /mosaic/tilejson.json route."""
# test missing url in queryString
event["path"] = f"/mosaic/tilejson.json"
event["httpMethod"] = "GET"
res = APP(event, {})
assert res["statusCode"] == 500
# test valid jpg request with linear rescaling
event["path"] = f"/mosaic/tilejson.json"
event["httpMethod"] = "GET"
event["queryStringParameters"] = {"urls": mosaic_url, "rescale": "-1,1"}
res = APP(event, {})
assert res["statusCode"] == 200
headers = res["headers"]
assert headers["Content-Type"] == "application/json"
body = json.loads(res["body"])
assert body["bounds"]
assert body["minzoom"] == 9
assert body["maxzoom"] == 11
def test_API_tiles_mosaic(event):
"""Test /mosaic route."""
# test missing url in queryString
event["path"] = f"/mosaic/12/2156/2041.jpg"
event["httpMethod"] = "GET"
res = APP(event, {})
assert res["statusCode"] == 500
headers = res["headers"]
assert headers["Content-Type"] == "application/json"
body = json.loads(res["body"])
assert body["errorMessage"] == "Missing 'urls' parameter"
# test valid jpg request with linear rescaling
event["path"] = f"/mosaic/12/2156/2041.jpg"
event["httpMethod"] = "GET"
event["queryStringParameters"] = {"urls": mosaic_url, "rescale": "-1,1"}
res = APP(event, {})
assert res["statusCode"] == 200
headers = res["headers"]
assert headers["Content-Type"] == "image/jpg"
assert res["body"]
assert res["isBase64Encoded"]
# test valid jpg request with rescaling and colormap
event["path"] = f"/mosaic/12/2156/2041.jpg"
event["httpMethod"] = "GET"
event["queryStringParameters"] = {
"urls": mosaic_url,
"rescale": "-1,1",
"color_map": "cfastie",
}
res = APP(event, {})
assert res["statusCode"] == 200
headers = res["headers"]
assert headers["Content-Type"] == "image/jpg"
assert res["body"]
assert res["isBase64Encoded"]
# test scale (512px tile size)
event["path"] = f"/mosaic/12/2156/2041@2x.jpg"
event["httpMethod"] = "GET"
event["queryStringParameters"] = {"urls": mosaic_url, "rescale": "0,10"}
res = APP(event, {})
assert res["statusCode"] == 200
headers = res["headers"]
assert headers["Content-Type"] == "image/jpg"
assert res["body"]
assert res["isBase64Encoded"]
# test with nodata == nan
event["path"] = f"/mosaic/12/2156/2041.jpg"
event["httpMethod"] = "GET"
event["queryStringParameters"] = {
"urls": mosaic_url,
"rescale": "-1,1",
"nodata": "nan",
}
res = APP(event, {})
assert res["statusCode"] == 200
headers = res["headers"]
assert headers["Content-Type"] == "image/jpg"
assert res["body"]
assert res["isBase64Encoded"]
# test indexes option
event["path"] = f"/mosaic/12/2156/2041.jpg"
event["httpMethod"] = "GET"
event["queryStringParameters"] = {
"urls": mosaic_url, "indexes": "1", "rescale": "0,10"
}
res = APP(event, {})
assert res["statusCode"] == 200
headers = res["headers"]
assert headers["Content-Type"] == "image/jpg"
assert res["body"]
assert res["isBase64Encoded"]
# test mvt option
event["path"] = f"/mosaic/12/2156/2041.pbf"
event["httpMethod"] = "GET"
event["queryStringParameters"] = {"urls": mosaic_url}
res = APP(event, {})
assert res["statusCode"] == 200
headers = res["headers"]
assert headers["Content-Type"] == "application/x-protobuf"
assert res["body"]
assert res["isBase64Encoded"]
body = base64.b64decode(res["body"])
vt = vector_tile_base.VectorTile(body)
props = vt.layers[0].features[0].properties
assert props["idw"]
# test mvt option
event["path"] = f"/mosaic/12/2156/2041.pbf"
event["httpMethod"] = "GET"
event["queryStringParameters"] = {"urls": mosaic_url, "feature_type": "polygon"}
res = APP(event, {})
assert res["statusCode"] == 200
headers = res["headers"]
assert headers["Content-Type"] == "application/x-protobuf"
assert res["body"]
assert res["isBase64Encoded"]
body = base64.b64decode(res["body"])
vt = vector_tile_base.VectorTile(body)
props = vt.layers[0].features[0].properties
assert props["idw"]
| [
"aimee@developmentseed.org"
] | aimee@developmentseed.org |
71017150041135c6351523ec3d66b81fb131421f | 443c435416376df1cd0ebf9fa6544ebdb23ede57 | /cluster_hiercluster.py | 14584e5c44244358c2a52bbb987e98c2d2de7e89 | [] | no_license | yinchuandong/mycaffe | 2c73fb6d2946f4e702cad40d4f0c07fbbf6b33fe | 0a39c2c7fdd814319b1e5814507426654d02a9d8 | refs/heads/master | 2021-01-09T21:50:14.221839 | 2016-03-21T09:44:27 | 2016-03-21T09:44:27 | 54,076,346 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,566 | py | import os
import sys
from PIL import Image
from PCV.clustering import hcluster
from matplotlib.pyplot import *
from numpy import *
# hierarchy clustering
def loadData(path):
imlist = [os.path.join(path, f) for f in os.listdir(path) if f.endswith('.jpg')]
imlist = imlist[0:100]
# extract feature vector (8 bins per color channel)
features = zeros((len(imlist), 8000))
for i, f in enumerate(imlist):
im = array(Image.open(f))
# multi-dimensional histogram
h, edges = histogramdd(im.reshape(-1, 3), 20, normed=False, range=[(0, 255), (0, 255), (0, 255)])
features[i] = h.flatten()
return imlist, features
if __name__ == '__main__':
path = '../data_exterior/exterior/'
result_path = '../cluster_result/'
imlist, features = loadData(path)
tree = hcluster.hcluster(features)
# visualize clusters with some (arbitrary) threshold
clusters = tree.extract_clusters(0.23 * tree.distance)
# plot images for cluster_result with more than 3 elements
# print len(clusters)
# sys.exit()
for c in clusters:
elements = c.get_cluster_elements()
nbr_elements = len(elements)
# print nbr_elements
# continue
if nbr_elements >= 3:
figure()
for p in range(minimum(nbr_elements, 20)):
subplot(4, 5, p + 1)
im = array(Image.open(imlist[elements[p]]))
imshow(im)
axis('off')
show()
# hcluster.draw_dendrogram(tree, imlist, filename='exterior_result.png') | [
"yincd520@sina.com"
] | yincd520@sina.com |
70e4497255159185bbd2c4946a1eb958f6f1520f | 4178f2916d2da72cbb45454fbed941dcfe8f6460 | /POM_test/TestCase/Detail_Profile/TC_005.py | a8f7d88a1374eb3444ef223474fdf03a291f71c2 | [] | no_license | maxcrup007/Selenium_Webdriver_Python | 15196cb04ba5cafdc5b776c26d167f0b48fb0e14 | 6be7f0b9f53df1ba592957029e8a4d22e409d1c4 | refs/heads/main | 2023-03-24T21:04:31.976451 | 2021-03-22T09:16:04 | 2021-03-22T09:16:04 | 349,379,454 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,008 | py |
# ทดสอบการเข้าใช้งานของ "ข้อมูลส่วนตัว"
import time
import unittest
import sys
from selenium import webdriver
from selenium.webdriver import ActionChains
from POM_test.login import *
from POM_test.profilePage import *
from POM_test.scrollbar import *
import os
sys.path.append(os.path.join(os.path.dirname(__file__), "...", "..."))
class TestProfile_5(unittest.TestCase):
@classmethod
def setUpClass(self):
self.driver = webdriver.Chrome(executable_path="C:/Users/voraw/Downloads/Compressed/webdriver/chromedriver/chromedriver")
self.driver.implicitly_wait(10)
self.driver.maximize_window()
def test_login_valid(self):
driver = self.driver
self.driver.get("https://top-upstream-client.mulberrysoft.com/#/older/activity")
login = LoginPage(driver)
scroll = ScrollbarPage(driver)
login.enter_username("demo005")
login.enter_password("123456")
login.click_login()
time.sleep(2)
profile = ProfilePage(driver)
profile.into_profilePage()
time.sleep(5)
profile.profile_name_input("vatcharapong mahachot")
time.sleep(2)
profile.profile_email_input("vatcharapong11@hotmail.com")
time.sleep(2)
profile.profile_phone_number("086799315")
time.sleep(2)
scroll.profile_scrolling()
time.sleep(2)
profile.profile_address_text("555 หมู่17")
time.sleep(2)
scroll.profile_scrolling2()
time.sleep(2)
profile.profile_submit_confirm()
time.sleep(2)
@classmethod
def tearDownClass(cls):
cls.driver.close()
cls.driver.quit()
print("Test Completed")
if __name__ == '__main__':
unittest.main()
| [
"36732487+maxcrup007@users.noreply.github.com"
] | 36732487+maxcrup007@users.noreply.github.com |
9b30be79b4640fc18d4b5a9fb2f16622061e4b49 | 650a431056d08d2ad431ef7ce88884089eadf884 | /backend/config/wsgi.py | 844432239f5a5cc2588f07a311e7a73eae85a283 | [
"MIT"
] | permissive | krish300/settle | ea36b514d5fcc652b94f6a7fcc400ff536bcfc4c | 9bd8863b54d342a9fe4fafb98397cf8be2620036 | refs/heads/master | 2023-08-04T07:38:37.499353 | 2023-08-02T09:02:05 | 2023-08-02T09:02:05 | 245,692,012 | 0 | 0 | MIT | 2023-08-02T09:02:07 | 2020-03-07T19:23:32 | Vue | UTF-8 | Python | false | false | 174 | py | import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'config.settings')
application = get_wsgi_application()
| [
"devlife404@gmail.com"
] | devlife404@gmail.com |
b8f80b76863b79a5fbe498e0238d5dbb929bfd6e | 74fb7288a3bfa7cd2a2e313cc66d631c95cd8d0f | /Practical2/ex4.py | 8c75b093d437ac7937c9df847667eb46e8c5d776 | [] | no_license | jacobwindsor/BIST_SDA_Practicals | 27ed24d293431edcbf7a261956f50fb49bda2119 | 6bce969050be830f3ade72ac559c9ff7dab98a84 | refs/heads/master | 2020-08-31T01:16:22.395792 | 2019-11-27T17:43:54 | 2019-11-27T17:43:54 | 218,543,830 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,040 | py | import pandas as pd
import statsmodels as sm
import statsmodels.api as statsmodels
from statsmodels.formula.api import ols
from scipy import stats
import numpy as np
import matplotlib.pyplot as plt
from pathlib import Path
from statsmodels.stats.multicomp import MultiComparison
from statsmodels.sandbox.stats.multicomp import tukeyhsd
import math
from statsmodels.graphics.factorplots import interaction_plot
""""Ex 1 ANOVA 2 (ex 4, practical 2)
Limpets ANOVA
"""
# Create the data
spring = [1.167, 0.5, 1.667, 1.5, 0.833, 1, 0.667, 0.667, 0.75]
summer = [4, 3.83, 3.83, 3.33, 2.58, 2.75, 2.54, 1.83, 1.63]
density = [6] * 3 + [12] * 3 + [24] * 3
data = {
"DENSITY": density * 2,
"SEASON": ["SPRING"] * len(spring) + ["SUMMER"] * len(summer),
"EGGS": spring + summer
}
df = pd.DataFrame(data)
df["DENSITY"] = df["DENSITY"].astype(object)
# Look at dispesion of eggs of each factor
df.boxplot(column="EGGS", by="DENSITY")
print("See graphs/ex4_boxplot_eggs_density.png")
plt.savefig(Path.cwd() / "Practical2/graphs/ex4_boxplot_eggs_density.png")
df.boxplot(column="EGGS", by="SEASON")
print("See graphs/ex4_boxplot_eggs_season.png")
plt.savefig(Path.cwd() / "Practical2/graphs/ex4_boxplot_eggs_season.png")
# And together
df.boxplot(column="EGGS", by=["DENSITY", "SEASON"])
print("See graphs/ex4_boxplot_eggs_density_season.png")
plt.savefig(Path.cwd() / "Practical2/graphs/ex4_boxplot_eggs_density_season.png")
# Perform two way ANOVA
print("Performing two way ANOVA")
mod = ols('EGGS ~ DENSITY + SEASON + DENSITY:SEASON', data = df).fit()
print(sm.stats.anova.anova_lm(mod))
print("Both the density and season affect the eggs and there IS an interaction between the two factors.")
# Create interaction plot
print("Creating interaction plot")
interaction_plot(df['DENSITY'], df['SEASON'], df['EGGS'])
print("See graphs/ex4_interaction_plot.png")
plt.savefig(Path.cwd() / "Practical2/graphs/ex4_interaction_plot.png")
print("More eggs are laid during spring")
print("Lines are not parallel so an interaction occurs.")
| [
"me@jcbwndsr.com"
] | me@jcbwndsr.com |
81a2e919b5687a4a574388dc21d4a0acf316a28e | 61047afc9ebe5f1e4ed0878322a95a1d4e6f71d6 | /test.py | 722a924e55c96ca760c8cb903cab603a5c44b36c | [] | no_license | wesclock777/nlpbuildteam | ce4776c18a403491d24c16c8e0d56363ef1c9b3e | f22b745fa8a3d579e1fe42e9ec41593969069e23 | refs/heads/master | 2020-03-07T02:23:18.307564 | 2018-03-28T22:57:39 | 2018-03-28T22:57:39 | 123,489,014 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 391 | py | import requests, json
url = "https://7etxwojv97.execute-api.us-east-1.amazonaws.com/prod/message"
auth_header = {'content-type': 'application/json'}
data = {
"to_number": "+15123630687",
"from_number": "+16284000095",
"message": "Hello from Python Requests Test to Prod!"
}
r = requests.post(url, data=json.dumps(data), headers=auth_header)
print(r.status_code," ", r.content)
| [
"wesleyklock@utexas.edu"
] | wesleyklock@utexas.edu |
f0a1c7bf4be3b747a196d83b4498c7e8d69d4a77 | f6e0014ba63fe7c01881420f03d2763a2695ac88 | /learning_logs/views.py | 99ce17e1e80068e277723ccf90ff44b6edaa4966 | [] | no_license | shaobinqiu/learning_log | ab857607decf57d70a05f046f58c260053955034 | da6bfc2a68c522224f03f18c59cf7c1c358ccc57 | refs/heads/master | 2021-07-11T05:17:17.049501 | 2017-10-15T03:39:31 | 2017-10-15T03:39:31 | 106,246,584 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,124 | py | from django.shortcuts import render
from django.http import HttpResponseRedirect, Http404
from django.core.urlresolvers import reverse
from django.contrib.auth.decorators import login_required
from .models import Topic, Entry
from .forms import TopicForm, EntryForm
def index(request):
"""学习笔记的主页"""
return render(request, 'learning_logs/index.html')
@login_required
def topics(request):
"""显示所有主题"""
topics = Topic.objects.filter(owner=request.user).order_by('date_added')
context = {'topics':topics}
return render(request, 'learning_logs/topics.html', context)
@login_required
def topic(request, topic_id):
"""显示单个主题及其所有的条目"""
topic= Topic.objects.get(id=topic_id)
# 确认请求的主题属于当前用户
if topic.owner != request.user:
raise Http404
entries = topic.entry_set.order_by('-date_added')
context = {'topic': topic,'entries': entries}
return render(request, 'learning_logs/topic.html', context)
@login_required
def new_topic(request):
"""添加新主题"""
if request.method != 'POST':
#未提交数据: 创建一个新的表单
form = TopicForm()
else:
#POST提交的数据,对数据进行处理
form =TopicForm(request.POST)
if form.is_valid():
new_topic = form.save(commit=False)
new_topic.owner = request.user
new_topic.save()
return HttpResponseRedirect(reverse('learning_logs:topics'))
context = {'form': form}
return render(request,'learning_logs/new_topic.html', context)
@login_required
def new_entry(request,topic_id):
"""在特定的主题中添加新条目"""
topic = Topic.objects.get(id=topic_id)
if request.method != 'POST':
# 未提交数据,创建一个新表单
form = EntryForm()
else:
# POST提交的数据,对数据进行处理
form = EntryForm(data=request.POST)
if form.is_valid():
new_entry = form.save(commit=False)
new_entry.topic = topic
new_entry.save()
return HttpResponseRedirect(reverse('learning_logs:topic',
args=[topic_id]))
context = {'topic':topic,'form':form}
return render(request, 'learning_logs/new_entry.html', context)
@login_required
def edit_entry(request, entry_id):
"""编辑既有条目"""
entry = Entry.objects.get(id=entry_id)
topic = entry.topic
if topic.owner != request.user:
raise Http404
if request.method != 'POST':
# 初次请求,使用当前条目填充表单
form = EntryForm(instance=entry)
else:
# POST提交的数据,对数据进行处理
form = EntryForm(instance=entry, data=request.POST)
if form.is_valid():
form.save()
return HttpResponseRedirect(reverse('learning_logs:topic',
args=[topic.id]))
context = {'entry':entry, 'topic':topic, 'form':form}
return render(request, 'learning_logs/edit_entry.html', context)
| [
"18814122627@163.com"
] | 18814122627@163.com |
ef02ad50203e98899613e15fe68a62ebf283e4ce | fed6c6bdb6276d195bc565e527c3f19369d22b74 | /selection_bias/bias_check/multi_pole_fit.py | 4baa664a7c606616ae4c495cdcb11ac8dcdde1f8 | [] | no_license | hekunlie/astrophy-research | edbe12d8dde83e0896e982f08b463fdcd3279bab | 7b2b7ada7e7421585e8993192f6111282c9cbb38 | refs/heads/master | 2021-11-15T05:08:51.271669 | 2021-11-13T08:53:33 | 2021-11-13T08:53:33 | 85,927,798 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 13,795 | py | import os
my_home = os.popen("echo $MYWORK_DIR").readlines()[0][:-1]
from sys import path, argv
path.append('%s/work/mylib/' % my_home)
import numpy
from mpi4py import MPI
import h5py
from plot_tool import Image_Plot
from Fourier_Quad import Fourier_Quad
import component_fit
comm = MPI.COMM_WORLD
rank = comm.Get_rank()
cpus = comm.Get_size()
data_path = argv[1]
data_nm = argv[2]
g1t = [0, -0.04, 0, 0.04, -0.02, 0, 0.02, 0, 0.02]
g2t = [0, 0, 0.04, -0.04, 0, -0.02, 0, 0.02, -0.02]
g1 = g1t[rank]
g2 = g2t[rank]
scale = 100
xy_bin_num, radius_bin_num = 200, 60
h5f = h5py.File(data_path + "/data_%s_%d.hdf5" % (data_nm, rank), "r")
print("g1:", g1, "g2:", g2)
print(data_path + "/data_%s_%d.hdf5" % (data_nm, rank))
mg1 = h5f["/mg1"][()] / scale
mg2 = h5f["/mg2"][()] / scale
mn = h5f["/mn"][()] / scale
mu = h5f["/mu"][()] / scale
h5f.close()
img = Image_Plot(xpad=0.15, ypad=0.2, fig_x=4, fig_y=3)
img.subplots(6, 6)
xy_bin = component_fit.get_bin(mg1, mg2, xy_bin_num)
num, xgrid, ygrid, radius_grid = component_fit.get_2dhist(mg1, mg2, xy_bin)[:4]
dpl, radius_bin, radius_mask, mean_of_annuli = component_fit.get_dipole(num, radius_grid, radius_bin_num)
qpl, dpl_fit, sin_theta, cos_theta = component_fit.get_quadrupole(dpl, xgrid, ygrid, radius_bin, radius_bin_num)
qpl_fit, sin_2theta, cos_2theta = component_fit.fit_quadrupole(qpl, xgrid, ygrid, radius_bin, radius_bin_num)
dpl = numpy.nan_to_num(dpl)
qpl = numpy.nan_to_num(qpl)
fig1 = img.axs[0][0].imshow(dpl)
img.figure.colorbar(fig1, ax=img.axs[0][0])
img.axs[0][1].plot(ygrid[:, 0], dpl.sum(axis=1), label="G1")
img.axs[0][1].plot(xgrid[0], dpl.sum(axis=0), label="G2")
img.axs[0][1].legend()
fig1 = img.axs[0][2].imshow(dpl_fit)
img.figure.colorbar(fig1, ax=img.axs[0][2])
fig1 = img.axs[0][3].imshow(qpl)
img.figure.colorbar(fig1, ax=img.axs[0][3])
img.axs[0][4].plot(ygrid[:, 0], qpl.sum(axis=1), label="G1")
img.axs[0][4].plot(xgrid[0], qpl.sum(axis=0), label="G2")
img.axs[0][4].legend()
fig1 = img.axs[0][5].imshow(qpl_fit)
img.figure.colorbar(fig1, ax=img.axs[0][5])
g1s = numpy.linspace(g1 - 0.1, g1 + 0.1, 5)
g2s = numpy.linspace(g2 - 0.1, g2 + 0.1, 5)
for j in range(5):
mg1_sym = mg1 - g1s[j] * (mn + mu)
mg2_sym = mg2 - g2s[j] * (mn - mu)
num_sym, xgrid_sym, ygrid_sym, radius_grid_sym = component_fit.get_2dhist(mg1_sym, mg2_sym, xy_bin)[:4]
dpl_sym, radius_bin_sym, radius_mask_sym, mean_of_annuli_sym = component_fit.get_dipole(num_sym,
radius_grid_sym,
radius_bin_num)
qpl_sym, dpl_sym_fit, sin_theta_sym, cos_theta_sym = component_fit.get_quadrupole(dpl_sym, xgrid_sym, ygrid_sym,
radius_bin_sym,
radius_bin_num)
qpl_sym_fit, sin_2theta_sym, cos_2theta_sym = component_fit.fit_quadrupole(qpl_sym, xgrid_sym, ygrid_sym,
radius_bin_sym, radius_bin_num)
dpl_sym = numpy.nan_to_num(dpl_sym)
qpl_sym = numpy.nan_to_num(qpl_sym)
fig1 = img.axs[1 + j][0].imshow(dpl_sym)
img.figure.colorbar(fig1, ax=img.axs[1 + j][0])
img.axs[1 + j][1].plot(ygrid_sym[:, 0], dpl_sym.sum(axis=1), label="G1")
img.axs[1 + j][1].plot(xgrid_sym[0], dpl_sym.sum(axis=0), label="G2")
img.axs[1 + j][1].legend()
fig1 = img.axs[1 + j][2].imshow(dpl_sym_fit)
img.figure.colorbar(fig1, ax=img.axs[1 + j][2])
fig1 = img.axs[1 + j][3].imshow(qpl_sym)
img.figure.colorbar(fig1, ax=img.axs[1 + j][3])
img.axs[1 + j][4].plot(ygrid_sym[:, 0], qpl_sym.sum(axis=1), label="G1")
img.axs[1 + j][4].plot(xgrid_sym[0], qpl_sym.sum(axis=0), label="G2")
img.axs[1 + j][4].legend()
fig1 = img.axs[1 + j][5].imshow(qpl_sym_fit)
img.figure.colorbar(fig1, ax=img.axs[1 + j][5])
for i in range(6):
for j in range(6):
if j in [0, 2, 3, 5]:
img.del_ticks(i, j, [0, 1])
img.set_label(i, j, 0, "+ G1 -")
img.set_label(i, j, 1, "- G2 +")
else:
img.axs[i][j].yaxis.major.formatter.set_powerlimits((0, 0))
img.axs[i][j].xaxis.major.formatter.set_powerlimits((0, 0))
img.save_img(data_path + "/%s_%d_vary_g.png" % (data_nm, rank))
img.show_img()
img.close_img()
exit()
fq = Fourier_Quad(12,1234)
data_path = argv[1]
xy_bin_num, radius_bin_num = int(argv[2]), int(argv[3])
shear_scale = float(argv[4])
pic_path = data_path + "/multipole_pic_%.1f"%shear_scale
if rank == 0:
if not os.path.exists(pic_path):
os.makedirs(pic_path)
scale = 1000
# data_nm = [["noise_free"], ["noisy_cpp"], ["cross_term"], ["noise_residual"], ["cross_term", "noise_residual"]]
data_nm = [["noise_residual_1"], ["noise_residual_50"], ["noise_residual_2500"], ["noise_residual_12500"]]
h5f = h5py.File(data_path + "/shear.hdf5", "r")
g1t = h5f["/g1"][()]
g2t = h5f["/g2"][()]
h5f.close()
gh = numpy.linspace(-0.1, 0.1, 21)
for tag, nm in enumerate(data_nm):
pic_nm = "-".join(nm)
for sub_tag, sub_nm in enumerate(nm):
if rank == 0:
print(pic_nm)
if sub_tag == 0:
h5f = h5py.File(data_path + "/data_%s_%d.hdf5"%(sub_nm, rank), "r")
data = h5f["/data"][()]/scale
mg1 = data[:,0]
mg2 = data[:,1]
mn = data[:,2]
mu = data[:,3]
h5f.close()
else:
h5f = h5py.File(data_path + "/data_%s_%d.hdf5"%(sub_nm, rank), "r")
data = h5f["/data"][()]/scale
mg1 = mg1 + data[:,0]
mg2 = mg2 + data[:,1]
mn = mn + data[:,2]
mu = mu + data[:,3]
h5f.close()
num, xgrid, ygrid, radius_grid = component_fit.get_bingrid(mg1, mg2, xy_bin_num, 1, 0.3, 99.7)[:4]
dpl, radius_bin, radius_mask, mean_of_annuli = component_fit.get_dipole(num, radius_grid, radius_bin_num)
qpl, dpl_fit, sin_theta, cos_theta = component_fit.get_quadrupole(dpl, xgrid, ygrid, radius_bin, radius_bin_num)
qpl_fit, sin_2theta, cos_2theta = component_fit.fit_quadrupole(qpl,xgrid, ygrid, radius_bin, radius_bin_num)
mnu1 = mn + mu
mnu2 = mn - mu
mg1_sym = mg1 - g1t[rank]*mnu1*shear_scale
mg2_sym = mg2 - g2t[rank]*mnu2*shear_scale
num_sym, xgrid_sym, ygrid_sym, radius_grid_sym = component_fit.get_bingrid(mg1_sym, mg2_sym, xy_bin_num, 1, 0.3, 99.7)[:4]
dpl_sym, radius_bin_sym, radius_mask_sym, mean_of_annuli_sym = component_fit.get_dipole(num_sym, radius_grid_sym, radius_bin_num)
qpl_sym, dpl_fit_sym, sin_theta_sym, cos_theta_sym = component_fit.get_quadrupole(dpl_sym, xgrid_sym, ygrid_sym, radius_bin_sym, radius_bin_num)
qpl_sym_fit, sin_2theta_sym, cos_2theta_sym = component_fit.fit_quadrupole(qpl_sym,xgrid_sym, ygrid_sym, radius_bin_sym, radius_bin_num)
chisq1 = fq.get_chisq_range(mg1, mnu1, 10, gh)[1]
chisq2 = fq.get_chisq_range(mg2, mnu2, 10, gh)[1]
chisq1_sym = fq.get_chisq_range(mg1_sym, mnu1, 10, gh)[1]
chisq2_sym = fq.get_chisq_range(mg2_sym, mnu2, 10, gh)[1]
numpy.savez(pic_path + "/cache_%s_%d.npz"%(pic_nm,rank), num, dpl, qpl, dpl_fit, qpl_fit)
numpy.savez(pic_path + "/cache_%s_sym_%d.npz"%(pic_nm,rank), num_sym, dpl_sym, qpl_sym, dpl_fit_sym, qpl_sym_fit)
numpy.savez(pic_path + "/cache_%s_chisq_%d.npz"%(pic_nm,rank), chisq1, chisq2, chisq1_sym, chisq2_sym)
#################################################################################
# hist of data
img = Image_Plot(fig_x=6, fig_y=5)
img.subplots(2, 3)
plot_data = [[num, dpl, dpl_fit], [0,qpl, qpl_fit]]
titles = [["G1-G2-hist", "dipole", "dipole-fit"], ["$\chi^2$", "quadrupole","quadrupole-fit"]]
img.axs[1][0].plot(gh, chisq1, label="$\chi^2_{g1}$,g1=%.3f" % g1t[rank])
img.axs[1][0].plot(gh, chisq2, label="$\chi^2_{g2}$,g2=%.3f" % g2t[rank])
img.set_label(1, 0, 0, "$\chi^2$")
img.set_label(1, 0, 1, "$\hat{g}$")
img.axs[1][0].legend()
for i in range(2):
if i == 0:
st = 0
else:
st = 1
for j in range(st,3):
fig = img.axs[i][j].imshow(plot_data[i][j])
img.figure.colorbar(fig, ax=img.axs[i][j])
img.del_ticks(i,j,[0,1])
img.set_label(i,j,0, "+ G1 -")
img.set_label(i,j,1, "- G2 +")
for j in range(3):
img.axs[i][j].set_title(titles[i][j])
pic_name = pic_path + "/%s_%d.png"%(pic_nm, rank)
img.save_img(pic_name)
img.close_img()
#################################################################################
# hist of PDF_SYM_data
img = Image_Plot(fig_x=6, fig_y=5)
img.subplots(2, 3)
plot_data = [[num_sym, dpl_sym, dpl_fit_sym], [0, qpl_sym, qpl_sym_fit]]
titles =[["PDF_SYM-G1-G2-hist", "PDF_SYM-dipole", "PDF_SYM-dipole-fit"],
["PDF_SYM-$\chi^2$", "PDF_SYM-quadrupole", "PDF_SYM-quadrupole-fit"]]
img.axs[1][0].plot(gh, chisq1_sym, label="$\chi^2_{g1}$,g1=%.3f" % g1t[rank])
img.axs[1][0].plot(gh, chisq2_sym, label="$\chi^2_{g2}$,g2=%.3f" % g2t[rank])
img.set_label(1, 0, 0, "$\chi^2$")
img.set_label(1, 0, 1, "$\hat{g}$")
img.axs[1][0].legend()
for i in range(2):
if i == 0:
st = 0
else:
st = 1
for j in range(st, 3):
fig = img.axs[i][j].imshow(plot_data[i][j])
img.figure.colorbar(fig, ax=img.axs[i][j])
img.del_ticks(i, j, [0, 1])
img.set_label(i, j, 0, "+ G1 -")
img.set_label(i, j, 1, "- G2 +")
for j in range(3):
img.axs[i][j].set_title(titles[i][j])
pic_name = pic_path + "/%s_%d_sym.png" % (pic_nm, rank)
img.save_img(pic_name)
img.close_img()
#################################################################################
# compare
img = Image_Plot(fig_x=5, fig_y=4,xpad=0.25, ypad=0.25)
img.subplots(2, 3)
titles = [["$\chi^2$", "dipole-fit", "quadrupole-fit"],
["$\chi^2-SYM$", "dipole-fit_SYM", "quadruple-fit_SYM"]]
img.axs[0][0].plot(gh, chisq1, label="$\chi^2_{g1}$,g1=%.3f" % g1t[rank])
img.axs[0][0].plot(gh, chisq2, label="$\chi^2_{g2}$,g2=%.3f" % g2t[rank])
img.axs[1][0].plot(gh, chisq1_sym, label="$\chi^2_{g1}$,g1=%.3f" % g1t[rank])
img.axs[1][0].plot(gh, chisq2_sym, label="$\chi^2_{g2}$,g2=%.3f" % g2t[rank])
img.set_label(0, 0, 0, "$\chi^2$")
img.set_label(0, 0, 1, "$\hat{g}$")
img.axs[0][0].legend()
img.set_label(1, 0, 0, "$\chi^2-SYM$")
img.set_label(1, 0, 1, "$\hat{g}$")
img.axs[1][0].legend()
dpl_fit = numpy.nan_to_num(dpl_fit)
dpl_fit_sym = numpy.nan_to_num(dpl_fit_sym)
qpl_fit = numpy.nan_to_num(qpl_fit)
qpl_sym_fit = numpy.nan_to_num(qpl_sym_fit)
vmax_dpl = max(numpy.abs(dpl_fit).max(), numpy.abs(dpl_fit_sym).max())
vmax_qpl = max(numpy.abs(qpl_fit).max(), numpy.abs(qpl_sym_fit).max())
fig = img.axs[0][1].imshow(dpl_fit, vmin=-vmax_dpl, vmax=vmax_dpl)
img.figure.colorbar(fig, ax=img.axs[0][1])
fig = img.axs[1][1].imshow(dpl_fit_sym, vmin=-vmax_dpl, vmax=vmax_dpl)
img.figure.colorbar(fig, ax=img.axs[1][1])
fig = img.axs[0][2].imshow(qpl_fit, vmin=-vmax_qpl, vmax=vmax_qpl)
img.figure.colorbar(fig, ax=img.axs[0][2])
fig = img.axs[1][2].imshow(qpl_sym_fit, vmin=-vmax_qpl, vmax=vmax_qpl)
img.figure.colorbar(fig, ax=img.axs[1][2])
for i in range(2):
for j in range(3):
if j > 0:
img.del_ticks(i, j, [0, 1])
img.set_label(i, j, 0, "+ G1 -")
img.set_label(i, j, 1, "- G2 +")
img.axs[i][j].set_title(titles[i][j])
pic_name = pic_path + "/%s_%d_compare.png" % (pic_nm, rank)
img.save_img(pic_name)
img.close_img()
#################################################################################
# x & y grid, raidus ....
img = Image_Plot()
img.subplots(2, 5)
titles = [["x-grid", "y-grid", "radius-grid", "radius-bin", "mean_num_annuli"],
["PDF_SYM-x-grid", "PDF_SYM-y-grid", "PDF_SYM-radius-grid", "PDF_SYM-radius-bin", "PDF_SYM-mean_num_annuli"]]
plot_data = [[xgrid, ygrid, radius_grid, radius_mask, mean_of_annuli],
[xgrid_sym, ygrid_sym, radius_grid_sym, radius_mask_sym, mean_of_annuli_sym]]
for i in range(2):
for j in range(5):
fig = img.axs[i][j].imshow(plot_data[i][j])
img.figure.colorbar(fig, ax=img.axs[i][j])
img.del_ticks(i,j,[0,1])
img.axs[i][j].set_title(titles[i][j])
pic_name = pic_path + "/%s_%d_check1.png"%(pic_nm, rank)
img.save_img(pic_name)
img.close_img()
#################################################################################
# sin_theta .....
img = Image_Plot()
img.subplots(2, 4)
titles = [["sin$\\theta$", "cos$\\theta$", "sin$2\\theta$", "cos$2\\theta$"],
["PDF_SYM-sin$\\theta$", "PDF_SYM-cos$\\theta$", "PDF_SYM-sin$2\\theta$", "PDF_SYM-cos$2\\theta$"]]
plot_data = [[sin_theta, cos_theta, sin_2theta, cos_2theta],
[sin_theta_sym, cos_theta_sym, sin_2theta_sym, cos_2theta_sym]]
for i in range(2):
for j in range(4):
fig = img.axs[i][j].imshow(plot_data[i][j])
img.figure.colorbar(fig, ax=img.axs[i][j])
img.del_ticks(i,j,[0,1])
img.axs[i][j].set_title(titles[i][j])
pic_name = pic_path + "/%s_%d_check2.png"%(pic_nm, rank)
img.save_img(pic_name)
img.close_img()
| [
"hekun_lee@sjtu.edu.cn"
] | hekun_lee@sjtu.edu.cn |
c4553e8d45b0af5117b6476b2a45fc654ded335e | c83fe8dacbe9af4bb405432978ec72a53060d508 | /reg_app/migrations/0004_publish_userid.py | 5c64d81ff78619d3ae7d9a024a9c07ba16a77ff0 | [] | no_license | booksandbakes/Tamilan | 1dac0be8adc4dd8af6389048c131fa6cd8ce2068 | 1ed3287d1bbe252a3901257ff2599746bd54c808 | refs/heads/master | 2023-04-13T14:38:16.057816 | 2021-04-29T07:20:20 | 2021-04-29T07:20:20 | 283,239,620 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 519 | py | # Generated by Django 3.0.6 on 2020-07-06 11:46
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('reg_app', '0003_auto_20200706_1335'),
]
operations = [
migrations.AddField(
model_name='publish',
name='Userid',
field=models.ForeignKey(default=1, on_delete=django.db.models.deletion.CASCADE, to='reg_app.Userdata'),
preserve_default=False,
),
]
| [
"nayaki12112000@gmail.com"
] | nayaki12112000@gmail.com |
2b08130501783bf2507f62e8914005d763912e39 | 5b56cab15932afae603b6530b29ab2f587a69247 | /apps/address/views.py | 825101db76e808b367dfe8c0fd62b8a2270aa20f | [] | no_license | PeterChain/simpleorgs | 409bdc625a1a34339597837af4ff39f6217d5c0c | 0ad694880c867fdda1048497f34784ce5afcdb89 | refs/heads/master | 2021-07-22T08:08:17.345988 | 2017-10-27T17:13:14 | 2017-10-27T17:13:14 | 103,066,898 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 250 | py | from django.urls import reverse
from django.views.generic import ListView
from .models import Address
class AddressBookListView(ListView):
"""
View class the address book
"""
template_name = 'address/book.html'
model = Address
| [
"pedroc25@msn.com"
] | pedroc25@msn.com |
f633f4a460c8e9563aa1a63e4baa49c7d1cef828 | 151349407fbf68ad184dac80f1928453e68c3841 | /PyEngine2/canvas_classes.py | 92ed4030a642fb8b4265b7e32d8e4319da5d2f9c | [] | no_license | klucly/PyEngine2 | 8fc5b59fcc7f12c24be25547cc29d8508cf17130 | 00ec995760fe73a36b63699581492e72f2159868 | refs/heads/master | 2021-02-27T17:32:12.248163 | 2020-08-18T09:39:21 | 2020-08-18T09:39:21 | 245,623,047 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 10,793 | py | try:
from PyEngine2.libs import BaseLibrary, math
from PyEngine2.default_classes import DEFAULT, DefaultCanvasObject
except ModuleNotFoundError:
from libs import BaseLibrary, math
from default_classes import DEFAULT, DefaultCanvasObject
class Sprite(DefaultCanvasObject):
def __init__(self, win, directory, coords = [100, 100]):
self.coords = coords
self.directory = directory
self.__image__ = BaseLibrary.PhotoImage(file = self.directory)
self.__coords__ = [self.coords[0], self.coords[1], 0, 0]
self.__coords__ = [self.__coords__[0], self.__coords__[1], self.__coords__[0]+self.__image__.width(), self.__coords__[1]+self.__image__.height()]
win.__objectList__.append(self)
self.__win__ = win
def value_change(self, directory = DEFAULT, coords = DEFAULT):
if directory == DEFAULT:
directory = self.directory
if coords == DEFAULT:
coords = self.coords
self.__init__(directory, coords)
def zoom(self, times = int(1)):
self.__image__ = self.__image__.zoom(times)
self.__coords__ = [self.__coords__[0], self.__coords__[1], self.__coords__[0]+self.__image__.width(), self.__coords__[1]+self.__image__.height()]
class Rectangle(DefaultCanvasObject):
def __init__(self, win, mode = "xywh", coords = [10, 10, 50, 70], bg = "#ffffff", border = 0, outline = "#000000", angle = 0):
self.mode = mode
self.angle = angle
self.coords = coords
self.bg = bg
self.border = border
self.outline = outline
self.__coords__ = [[0, 0], [0, 0], [0, 0], [0, 0]]
self.__canvas__ = BaseLibrary.Canvas()
if mode == "xy2":
self.__coords__ = coords
elif mode == "xywh":
x1 = coords[0]
x2 = coords[2]+coords[0]
y1 = coords[1]
y2 = coords[1]+coords[3]
self.__coords__ = [[x2, y1], [x1, y1], [x1, y2], [x2, y2]]
else:
raise WrongModeError(f"{self}, use 'xy2' or 'xywh' mode")
if self not in win.__objectList__:
win.__objectList__.append(self)
self.center = [(self.__coords__[1][0]+self.__coords__[3][0])/2, (self.__coords__[1][1]+self.__coords__[3][1])/2]
self.__coords__ = self.rotate(self.__coords__, self.angle, self.center)
self.__win__ = win
def rotate(self, points, angle, center):
angle = math.radians(angle)
cos_val = math.cos(angle)
sin_val = math.sin(angle)
cx, cy = center
new_points = []
for x_old, y_old in points:
x_old -= cx
y_old -= cy
x_new = x_old * cos_val - y_old * sin_val
y_new = x_old * sin_val + y_old * cos_val
new_points.append([x_new + cx, y_new + cy])
return new_points
class OvalSector(DefaultCanvasObject):
def __init__(self, win, mode = "xywh", coords = [50, 50, 100, 100], angle = 45, start_angle = 0, bg = "white", border = 1, outline = "black"):
self.mode = mode
self.coords = coords
self.angle = angle
self.start_angle = start_angle
self.bg = bg
self.border = border
self.outline = outline
if mode == "xy2":
self.__coords__ = coords
elif mode == "xywh":
self.__coords__ = [coords[0], coords[1], coords[2]+coords[0], coords[1]+coords[3]]
else:
raise WrongModeError(f"{self}, use 'xy2' or 'xywh' mode")
if self not in win.__objectList__:
win.__objectList__.append(self)
self.__win__ = win
def value_change(self, mode = DEFAULT, coords = DEFAULT, angle = DEFAULT, start_angle = DEFAULT, bg = DEFAULT, border = DEFAULT, outline = DEFAULT):
if mode == DEFAULT:
mode = self.mode
if coords == DEFAULT:
coords = self.coords
if angle == DEFAULT:
angle = self.angle
if start_angle == DEFAULT:
start_angle = self.start_angle
if bg == DEFAULT:
bg = self.bg
if border == DEFAULT:
border = self.border
if outline == DEFAULT:
outline = self.outline
self.__init__(mode, coords, angle, start_angle, bg, border, outline)
class OvalSegment(DefaultCanvasObject):
def __init__(self, win, mode = "xywh", coords = [50, 50, 100, 100], angle = 45, start_angle = 0, bg = "white", border = 1, outline = "black"):
self.mode = mode
self.coords = coords
self.angle = angle
self.start_angle = start_angle
self.bg = bg
self.border = border
self.outline = outline
if mode == "xy2":
self.__coords__ = coords
elif mode == "xywh":
self.__coords__ = [coords[0], coords[1], coords[2]+coords[0], coords[1]+coords[3]]
else:
raise WrongModeError(f"{self}, use 'xy2' or 'xywh' mode")
if self not in win.__objectList__:
win.__objectList__.append(self)
self.__win__ = win
def value_change(self, mode = DEFAULT, coords = DEFAULT, angle = DEFAULT, start_angle = DEFAULT, bg = DEFAULT, border = DEFAULT, outline = DEFAULT):
if mode == DEFAULT:
mode = self.mode
if coords == DEFAULT:
coords = self.coords
if angle == DEFAULT:
angle = self.angle
if start_angle == DEFAULT:
start_angle = self.start_angle
if bg == DEFAULT:
bg = self.bg
if border == DEFAULT:
border = self.border
if outline == DEFAULT:
outline = self.outline
self.__init__(mode, coords, angle, start_angle, bg, border, outline)
class Arc(DefaultCanvasObject):
def __init__(self, win, mode = "xywh", coords = [50, 50, 100, 100], angle = 45, start_angle = 0, color = "white", border = 1):
self.mode = mode
self.angle = angle
self.coords = coords
self.start_angle = start_angle
self.color = color
self.border = border
if mode == "xy2":
self.__coords__ = coords
elif mode == "xywh":
self.__coords__ = [coords[0], coords[1], coords[2]+coords[0], coords[1]+coords[3]]
else:
raise WrongModeError(f"{self}, use 'xy2' or 'xywh' mode")
if self not in win.__objectList__:
win.__objectList__.append(self)
self.__win__ = win
def value_change(self, mode = DEFAULT, coords = DEFAULT, angle = DEFAULT, start_angle = DEFAULT, color = DEFAULT, border = DEFAULT):
if mode == DEFAULT:
mode = self.mode
if coords == DEFAULT:
coords = self.coords
if angle == DEFAULT:
angle = self.angle
if start_angle == DEFAULT:
start_angle = self.start_angle
if color == DEFAULT:
color = self.color
if border == DEFAULT:
border = self.border
self.__init__(mode, coords, angle, start_angle, color, border)
class Line(DefaultCanvasObject):
def __init__(self, win, coords = [10, 10, 100, 100], border = 0, color = "#000000"):
self.coords = coords
self.border = border
self.color = color
self.__coords__ = coords
self.mode = "xy2"
if self not in win.__objectList__:
win.__objectList__.append(self)
self.__win__ = win
def value_change(self, coords = DEFAULT, border = DEFAULT, color = DEFAULT):
if coords == DEFAULT:
coords = self.coords
if border == DEFAULT:
border = self.border
if color == DEFAULT:
color = self.color
self.__init__(self.__win__, coords, border, color)
class Oval(DefaultCanvasObject):
def __init__(self, win, mode = "xywh", coords = [10, 10, 50, 70], bg = "#ffffff", border = 0, outline = "#000000"):
self.mode = mode
self.coords = coords
self.bg = bg
self.border = border
self.outline = outline
self.__coords__ = coords
if mode == "xy2":
self.__coords__ = coords
elif mode == "xywh":
self.__coords__ = [coords[0], coords[1], coords[2]+coords[0], coords[1]+coords[3]]
else:
raise WrongModeError(f"{self}, use 'xy2' or 'xywh' mode")
if self not in win.__objectList__:
win.__objectList__.append(self)
self.__win__ = win
class Circle(DefaultCanvasObject):
def __init__(self, win, coords = [100, 100], radius = 100, bg = "white", border = 0, outline = "black"):
self.__coords__ = [coords[0]-radius, coords[1]-radius, coords[0]+radius, coords[1]+radius]
self.coords = coords
self.radius = radius
self.bg = bg
self.border = border
self.outline = outline
self.__win__ = win
self.angle = 0
if self not in win.__objectList__:
win.__objectList__.append(self)
def value_change(self, win = DEFAULT, coords = DEFAULT, radius = DEFAULT, bg = DEFAULT, border = DEFAULT, outline = DEFAULT, angle = 0):
if radius == DEFAULT:
radius = self.radius
if coords == DEFAULT:
coords = self.coords
if bg == DEFAULT:
bg = self.bg
if border == DEFAULT:
border = self.border
if outline == DEFAULT:
outline = self.outline
self.__init__(self.__win__, coords, radius, bg, border, outline)
def set_position(self, coords): self.value_change(coords = coords)
class Polygon(DefaultCanvasObject):
def __init__(self, win, coords = [10, 20, 50, 100, 150, 30, 200, 200], bg = "#ffffff", border = 1, outline = "#000000"):
self.coords = coords
self.angle = 0
self.bg = bg
self.border = border
self.outline = outline
self.__coords__ = coords
if self not in win.__objectList__:
win.__objectList__.append(self)
self.__win__ = win
def value_change(self, coords = DEFAULT, bg = DEFAULT, border = DEFAULT, outline = DEFAULT):
if coords == DEFAULT:
coords = self.coords
if bg == DEFAULT:
bg = self.bg
if border == DEFAULT:
border = self.border
if outline == DEFAULT:
outline = self.outline
self.__init__(self.__win__, coords, bg, border, outline)
| [
"noreply@github.com"
] | noreply@github.com |
7fd118190b92da15e2430f153afa61e73362b00f | ff49399bf418e7d3612155b0eb0139eb1c138d58 | /알고리즘/럭키스트레이트.py | fb28f24f11364d45f4b251a39f9ca5993e990922 | [] | no_license | KOHJeongHwan/TIL | c277efcf03545867aa2b4a7a68cc479839303ec3 | 83a23b97a3b5bc016820ff938f0c5669d26ede24 | refs/heads/master | 2023-03-01T04:31:44.464310 | 2021-02-11T17:25:15 | 2021-02-11T17:25:15 | 295,469,018 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 246 | py | """
2021-01-27
123402
LUCKY
"""
n = input()
mid=int(len(n)/2)
left = n[:mid]
right = n[mid:]
L_cnt = 0
R_cnt = 0
for i in left:
L_cnt += int(i)
for i in right:
R_cnt += int(i)
if L_cnt == R_cnt:
print("LUCKY")
else:
print("READY") | [
"kohjohn96@naver.com"
] | kohjohn96@naver.com |
2a1e29bb5786850365a0cf5fca0e7f577085fec3 | 36e593943be060ca5ea74a3d45923aba422ad2c9 | /ThinkBayes/code/dungeons.py | 0df9ed07edd9dfe1089ba8c63d598987c192d448 | [] | no_license | xjr7670/book_practice | a73f79437262bb5e3b299933b7b1f7f662a157b5 | 5a562d76830faf78feec81bc11190b71eae3a799 | refs/heads/master | 2023-08-28T19:08:52.329127 | 2023-08-24T09:06:00 | 2023-08-24T09:06:00 | 101,477,574 | 3 | 1 | null | 2021-06-10T18:38:54 | 2017-08-26T09:56:02 | Python | UTF-8 | Python | false | false | 2,831 | py | """This file contains code for use with "Think Bayes",
by Allen B. Downey, available from greenteapress.com
Copyright 2012 Allen B. Downey
License: GNU GPLv3 http://www.gnu.org/licenses/gpl.html
"""
import random
import thinkbayes
import thinkplot
FORMATS = ['pdf', 'eps', 'png']
class Die(thinkbayes.Pmf):
"""Represents the PMF of outcomes for a die."""
def __init__(self, sides, name=''):
"""Initializes the die.
sides: int number of sides
name: string
"""
thinkbayes.Pmf.__init__(self, name=name)
for x in xrange(1, sides+1):
self.Set(x, 1)
self.Normalize()
def PmfMax(pmf1, pmf2):
"""Computes the distribution of the max of values drawn from two Pmfs.
pmf1, pmf2: Pmf objects
returns: new Pmf
"""
res = thinkbayes.Pmf()
for v1, p1 in pmf1.Items():
for v2, p2 in pmf2.Items():
res.Incr(max(v1, v2), p1*p2)
return res
def main():
pmf_dice = thinkbayes.Pmf()
pmf_dice.Set(Die(4), 5)
pmf_dice.Set(Die(6), 4)
pmf_dice.Set(Die(8), 3)
pmf_dice.Set(Die(12), 2)
pmf_dice.Set(Die(20), 1)
pmf_dice.Normalize()
mix = thinkbayes.Pmf()
for die, weight in pmf_dice.Items():
for outcome, prob in die.Items():
mix.Incr(outcome, weight*prob)
mix = thinkbayes.MakeMixture(pmf_dice)
thinkplot.Hist(mix, width=0.9)
thinkplot.Save(root='dungeons3',
xlabel='Outcome',
ylabel='Probability',
formats=FORMATS)
random.seed(17)
d6 = Die(6, 'd6')
dice = [d6] * 3
three = thinkbayes.SampleSum(dice, 1000)
three.name = 'sample'
three.Print()
three_exact = d6 + d6 + d6
three_exact.name = 'exact'
three_exact.Print()
thinkplot.PrePlot(num=2)
thinkplot.Pmf(three)
thinkplot.Pmf(three_exact, linestyle='dashed')
thinkplot.Save(root='dungeons1',
xlabel='Sum of three d6',
ylabel='Probability',
axis=[2, 19, 0, 0.15],
formats=FORMATS)
thinkplot.Clf()
thinkplot.PrePlot(num=1)
# compute the distribution of the best attribute the hard way
best_attr2 = PmfMax(three_exact, three_exact)
best_attr4 = PmfMax(best_attr2, best_attr2)
best_attr6 = PmfMax(best_attr4, best_attr2)
# thinkplot.Pmf(best_attr6)
# and the easy way
best_attr_cdf = three_exact.Max(6)
best_attr_cdf.name = ''
best_attr_pmf = thinkbayes.MakePmfFromCdf(best_attr_cdf)
best_attr_pmf.Print()
thinkplot.Pmf(best_attr_pmf)
thinkplot.Save(root='dungeons2',
xlabel='Best of three d6',
ylabel='Probability',
axis=[2, 19, 0, 0.23],
formats=FORMATS)
if __name__ == '__main__':
main()
| [
"xjr30226@126.com"
] | xjr30226@126.com |
9508085aae598fdc4bd2c1dcb0bb97913ef321f9 | 132c4b0643476a5cd1aa78874249cf6c7fde7314 | /tests.py | 05dd98bafc7b277fe35102a9ffe846dd13b886b6 | [] | no_license | fladi/ldif3 | 6aaedbd3b2578917a0d0832c10ee6cc642434f92 | 7b50593d4abd7da45687131ca0233af505d0b07d | refs/heads/master | 2021-01-21T20:22:35.307390 | 2015-05-22T21:31:15 | 2015-05-22T21:31:15 | 37,085,665 | 0 | 0 | null | 2015-06-08T18:42:42 | 2015-06-08T18:42:42 | null | UTF-8 | Python | false | false | 9,855 | py | from __future__ import unicode_literals
import unittest
try:
from unittest import mock
except ImportError:
import mock
from io import BytesIO
import ldif3
BYTES = b"""version: 1
dn: cn=Alice Alison,
mail=alicealison@example.com
objectclass: top
objectclass: person
objectclass: organizationalPerson
cn: Alison Alison
mail: alicealison@example.com
modifytimestamp: 4a463e9a
# another person
dn: mail=foobar@example.org
objectclass: top
objectclass: person
mail: foobar@example.org
modifytimestamp: 4a463e9a
"""
BYTES_OUT = b"""dn: cn=Alice Alison,mail=alicealison@example.com
cn: Alison Alison
mail: alicealison@example.com
modifytimestamp: 4a463e9a
objectclass: top
objectclass: person
objectclass: organizationalPerson
dn: mail=foobar@example.org
mail: foobar@example.org
modifytimestamp: 4a463e9a
objectclass: top
objectclass: person
"""
LINES = [
b'version: 1',
b'dn: cn=Alice Alison,mail=alicealison@example.com',
b'objectclass: top',
b'objectclass: person',
b'objectclass: organizationalPerson',
b'cn: Alison Alison',
b'mail: alicealison@example.com',
b'modifytimestamp: 4a463e9a',
b'',
b'dn: mail=foobar@example.org',
b'objectclass: top',
b'objectclass: person',
b'mail: foobar@example.org',
b'modifytimestamp: 4a463e9a',
]
BLOCKS = [[
b'version: 1',
b'dn: cn=Alice Alison,mail=alicealison@example.com',
b'objectclass: top',
b'objectclass: person',
b'objectclass: organizationalPerson',
b'cn: Alison Alison',
b'mail: alicealison@example.com',
b'modifytimestamp: 4a463e9a',
], [
b'dn: mail=foobar@example.org',
b'objectclass: top',
b'objectclass: person',
b'mail: foobar@example.org',
b'modifytimestamp: 4a463e9a',
]]
DNS = [
'cn=Alice Alison,mail=alicealison@example.com',
'mail=foobar@example.org'
]
CHANGETYPES = [None, None]
RECORDS = [{
'cn': ['Alison Alison'],
'mail': ['alicealison@example.com'],
'modifytimestamp': ['4a463e9a'],
'objectclass': ['top', 'person', 'organizationalPerson'],
}, {
'mail': ['foobar@example.org'],
'modifytimestamp': ['4a463e9a'],
'objectclass': ['top', 'person'],
}]
URL = b'https://tools.ietf.org/rfc/rfc2849.txt'
URL_CONTENT = 'The LDAP Data Interchange Format (LDIF)'
class TestUnsafeString(unittest.TestCase):
unsafe_chars = ['\0', '\n', '\r']
unsafe_chars_init = unsafe_chars + [' ', ':', '<']
def _test_all(self, unsafes, fn):
for i in range(128): # TODO: test range(255)
try:
match = ldif3.UNSAFE_STRING_RE.search(fn(i))
if i <= 127 and chr(i) not in unsafes:
self.assertIsNone(match)
else:
self.assertIsNotNone(match)
except AssertionError:
print(i)
raise
def test_unsafe_chars(self):
self._test_all(self.unsafe_chars, lambda i: 'a%s' % chr(i))
def test_unsafe_chars_init(self):
self._test_all(self.unsafe_chars_init, lambda i: '%s' % chr(i))
def test_example(self):
s = 'cn=Alice, Alison,mail=Alice.Alison@example.com'
self.assertIsNone(ldif3.UNSAFE_STRING_RE.search(s))
def test_trailing_newline(self):
self.assertIsNotNone(ldif3.UNSAFE_STRING_RE.search('asd\n'))
class TestLower(unittest.TestCase):
def test_happy(self):
self.assertEqual(ldif3.lower(['ASD', 'HuHu']), ['asd', 'huhu'])
def test_falsy(self):
self.assertEqual(ldif3.lower(None), [])
def test_dict(self):
self.assertEqual(ldif3.lower({'Foo': 'bar'}), ['foo'])
def test_set(self):
self.assertEqual(ldif3.lower(set(['FOo'])), ['foo'])
class TestIsDn(unittest.TestCase):
def test_happy(self):
pass # TODO
class TestLDIFParser(unittest.TestCase):
def setUp(self):
self.stream = BytesIO(BYTES)
self.p = ldif3.LDIFParser(self.stream)
def test_strip_line_sep(self):
self.assertEqual(self.p._strip_line_sep(b'asd \n'), b'asd ')
self.assertEqual(self.p._strip_line_sep(b'asd\t\n'), b'asd\t')
self.assertEqual(self.p._strip_line_sep(b'asd\r\n'), b'asd')
self.assertEqual(self.p._strip_line_sep(b'asd\r\t\n'), b'asd\r\t')
self.assertEqual(self.p._strip_line_sep(b'asd\n\r'), b'asd\n\r')
self.assertEqual(self.p._strip_line_sep(b'asd'), b'asd')
self.assertEqual(self.p._strip_line_sep(b' asd '), b' asd ')
def test_iter_unfolded_lines(self):
self.assertEqual(list(self.p._iter_unfolded_lines()), LINES)
def test_iter_blocks(self):
self.assertEqual(list(self.p._iter_blocks()), BLOCKS)
def _test_error(self, fn):
self.p._strict = True
with self.assertRaises(ValueError):
fn()
with mock.patch('ldif3.log.warning') as warning:
self.p._strict = False
fn()
warning.assert_called()
def test_check_dn_not_none(self):
self._test_error(lambda:
self.p._check_dn('some dn', 'mail=alicealison@example.com'))
def test_check_dn_invalid(self):
self._test_error(lambda:
self.p._check_dn(None, 'invalid'))
def test_check_dn_happy(self):
self.p._check_dn(None, 'mail=alicealison@example.com')
def test_check_changetype_dn_none(self):
self._test_error(lambda:
self.p._check_changetype(None, None, 'add'))
def test_check_changetype_not_none(self):
self._test_error(lambda:
self.p._check_changetype('some dn', 'some changetype', 'add'))
def test_check_changetype_invalid(self):
self._test_error(lambda:
self.p._check_changetype('some dn', None, 'invalid'))
def test_check_changetype_happy(self):
self.p._check_changetype('some dn', None, 'add')
def test_parse_attr_base64(self):
attr_type, attr_value = self.p._parse_attr(b'foo:: YQpiCmM=\n')
self.assertEqual(attr_type, 'foo')
self.assertEqual(attr_value, 'a\nb\nc')
def test_parse_attr_url(self):
self.p._process_url_schemes = [b'https']
attr_type, attr_value = self.p._parse_attr(b'foo:< ' + URL + b'\n')
self.assertIn(URL_CONTENT, attr_value)
def test_parse_attr_url_all_ignored(self):
attr_type, attr_value = self.p._parse_attr(b'foo:< ' + URL + b'\n')
self.assertEqual(attr_value, '')
def test_parse_attr_url_this_ignored(self):
self.p._process_url_schemes = [b'file']
attr_type, attr_value = self.p._parse_attr(b'foo:< ' + URL + b'\n')
self.assertEqual(attr_value, '')
def test_parse(self):
items = list(self.p.parse())
for i, item in enumerate(items):
dn, changetype, record = item
self.assertEqual(dn, DNS[i])
self.assertEqual(changetype, CHANGETYPES[i])
self.assertEqual(record, RECORDS[i])
class TestLDIFWriter(unittest.TestCase):
def setUp(self):
self.stream = BytesIO()
self.w = ldif3.LDIFWriter(self.stream)
def test_fold_line_10_n(self):
self.w._cols = 10
self.w._line_sep = b'\n'
self.w._fold_line(b'abcdefghijklmnopqrstuvwxyz')
folded = b'abcdefghij\n klmnopqrs\n tuvwxyz\n'
self.assertEqual(self.stream.getvalue(), folded)
def test_fold_line_12_underscore(self):
self.w._cols = 12
self.w._line_sep = b'__'
self.w._fold_line(b'abcdefghijklmnopqrstuvwxyz')
folded = b'abcdefghijkl__ mnopqrstuvw__ xyz__'
self.assertEqual(self.stream.getvalue(), folded)
def test_fold_line_oneline(self):
self.w._cols = 100
self.w._line_sep = b'\n'
self.w._fold_line(b'abcdefghijklmnopqrstuvwxyz')
folded = b'abcdefghijklmnopqrstuvwxyz\n'
self.assertEqual(self.stream.getvalue(), folded)
def test_needs_base64_encoding_forced(self):
self.w._base64_attrs = ['attr_type']
result = self.w._needs_base64_encoding('attr_type', 'attr_value')
self.assertTrue(result)
def test_needs_base64_encoding_not_safe(self):
result = self.w._needs_base64_encoding('attr_type', '\r')
self.assertTrue(result)
def test_needs_base64_encoding_safe(self):
result = self.w._needs_base64_encoding('attr_type', 'abcABC123_+')
self.assertFalse(result)
def test_unparse_attr_base64(self):
self.w._unparse_attr('foo', 'a\nb\nc')
value = self.stream.getvalue()
self.assertEqual(value, b'foo:: YQpiCmM=\n')
def test_unparse_entry_record(self):
self.w._unparse_entry_record(RECORDS[0])
value = self.stream.getvalue()
self.assertEqual(value, (
b'cn: Alison Alison\n'
b'mail: alicealison@example.com\n'
b'modifytimestamp: 4a463e9a\n'
b'objectclass: top\n'
b'objectclass: person\n'
b'objectclass: organizationalPerson\n'))
def test_unparse_changetype_add(self):
self.w._unparse_changetype(2)
value = self.stream.getvalue()
self.assertEqual(value, b'changetype: add\n')
def test_unparse_changetype_modify(self):
self.w._unparse_changetype(3)
value = self.stream.getvalue()
self.assertEqual(value, b'changetype: modify\n')
def test_unparse_changetype_other(self):
with self.assertRaises(ValueError):
self.w._unparse_changetype(4)
with self.assertRaises(ValueError):
self.w._unparse_changetype(1)
def test_unparse(self):
for i, record in enumerate(RECORDS):
self.w.unparse(DNS[i], record)
value = self.stream.getvalue()
self.assertEqual(value, BYTES_OUT)
def test_unparse_fail(self):
with self.assertRaises(ValueError):
self.w.unparse(DNS[0], 'foo')
| [
"tobias.bengfort@gmx.net"
] | tobias.bengfort@gmx.net |
e443cbc8cd162c6d41caeb459c2a65e30a9bd7ed | a276798900d6f99d0dc79b8e4e66c23973ede4a9 | /blog/migrations/0001_initial.py | aa37c65ebd521970f620498b0c36f5e75b07553b | [] | no_license | Mgarchuk/MyBlog | ad73a06e008f80644b33fba795eb601dead21754 | a2890a347ab4b56f1e703053840c1cdd1dddb5ba | refs/heads/main | 2023-03-07T07:03:32.991927 | 2021-02-24T22:14:07 | 2021-02-24T22:14:07 | 342,049,298 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,233 | py | # Generated by Django 3.0.6 on 2020-05-27 16:08
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import django.utils.timezone
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Post',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=200)),
('text', models.TextField()),
('created_date', models.DateTimeField(default=django.utils.timezone.now)),
('published_date', models.DateTimeField(blank=True, null=True)),
('author', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='Photo',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('image', models.ImageField(blank=True, null=True, upload_to='images/')),
('published_date', models.DateTimeField(blank=True, null=True)),
('author', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='Comment',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('text', models.TextField()),
('published_date', models.DateTimeField(blank=True, null=True)),
('status', models.BooleanField(default=True)),
('author', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='comments', to=settings.AUTH_USER_MODEL)),
('post', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='comments', to='blog.Post')),
],
),
]
| [
"mgarchuk6@gmail.com"
] | mgarchuk6@gmail.com |
9cdb76e81612b5b87a3078f6f2c985f285dbbe6e | be7a79f3c590f0923f1e793c6a36cfebd9ca4d01 | /brocolli/converter/onnx_layers/concat_func.py | 32b54e65699b614aff8c4d4dff1d7f195fd89e83 | [
"MIT"
] | permissive | inisis/brocolli | f255d44dc9148fd2b3bc82f6a21e429a579399b4 | 46a3d8c5e19e481746a9c8a85c5e9a71a49b846c | refs/heads/master | 2023-07-22T09:37:19.480983 | 2023-07-17T14:25:35 | 2023-07-17T14:25:35 | 168,733,444 | 326 | 72 | MIT | 2023-06-04T17:03:43 | 2019-02-01T17:17:22 | Python | UTF-8 | Python | false | false | 905 | py | from loguru import logger
from onnx import helper
from onnx import TensorProto as tp
from .base_layer import BaseLayer
class ConcatFunc(BaseLayer):
def __init__(self, source_node, module=None, auto_gen=True):
super(ConcatFunc, self).__init__(source_node, module, auto_gen)
def get_concat_attr(self):
attr_dict = {"axis": []}
dim = self.get_value_by_key_or_index("dim", 1, 0)
attr_dict["axis"] = dim
return attr_dict
def generate_node(self, name=None, params=None, attr_dict=None):
if name is not None:
self._name = name
if attr_dict is None:
attr_dict = self.get_concat_attr()
node = helper.make_node(
"Concat", self._in_names, self._out_names, self._name, **attr_dict
)
logger.info(f"{self.__class__.__name__}: {self._name} created")
self._node.append(node)
| [
"desmond.yao@buaa.edu.cn"
] | desmond.yao@buaa.edu.cn |
6cc7f69539c01aef0778bf4e6cddfff154fb5d17 | a2e84ba873c640a8cb8825429e22f86c5236afb4 | /sktps/ps/sample_freeze_restore.py | 82beca23c5e127c423044ef0834aaad8bc1ddca3 | [
"Apache-2.0"
] | permissive | jclee81/sktacc | 6ad336c79824b8197a7e56e2826485018de9e633 | 6f601ce8f61b4e361b17773060ee2544bf35dbe4 | refs/heads/master | 2022-12-09T21:11:23.004557 | 2017-08-11T04:23:43 | 2017-08-11T04:23:43 | 99,192,008 | 2 | 0 | Apache-2.0 | 2022-12-07T23:56:29 | 2017-08-03T04:52:48 | Python | UTF-8 | Python | false | false | 2,126 | py | from __future__ import print_function
import tensorflow as tf
from tensorflow.core.framework import graph_pb2
from tensorflow.python.framework import graph_util
from tensorflow.python.framework import importer
from tensorflow.python.framework import ops
# from tensorflow.python.tools import freeze_graph
from util.log import log
def print_nodes(graph_def, msg=''):
print('nodes in graph', msg, [n.name for n in graph_def.node])
def restore_graph(s):
log.info('restore_graph')
g = ops.get_default_graph()
graph_def = graph_pb2.GraphDef()
graph_def.ParseFromString(s)
# print_nodes(graph_def)
# print ('before', len(g.as_graph_def().node))
importer.import_graph_def(graph_def, name='restore')
# print ('after', len(g.as_graph_def().node))
# print_nodes(g.as_graph_def())
# t = g.get_tensor_by_name('restore/y1:0')
return graph_def
def _get_node(restored, name):
for n in restored.node:
if n.name == name:
return n
return None
def run():
log.info('Run freeze restore')
y = tf.Variable([float(88.8), float(5)], name='y1')
# print(y.op.node_def)
init_op = tf.global_variables_initializer()
with tf.Session() as sess:
sess.run(init_op)
# sess.run(y)
g = sess.graph
g_def = g.as_graph_def()
# print node names
# print([n.name for n in g_def.node])
# constants
constants = graph_util.convert_variables_to_constants(
sess, g_def, ['y1'])
# serialize
s = constants.SerializeToString()
# print(len(g_def.node))
print_nodes(g.as_graph_def(), 'before restore:')
_ = restore_graph(s)
print_nodes(g.as_graph_def(), 'after restore:')
t = g.get_tensor_by_name('restore/y1:0')
sess.run(y.assign(y + t))
print(sess.run(y))
# print(len(g_def.node))
# print(sess.run(y.assign([float(99.9)])))
# print(n)
# print(sess.run(y.assign(n)))
# g2 = tf.Graph()
# g2_def = g2.as_graph_def()
# print([n.name for n in g2_def.node])
# run()
| [
"sipqko@gmail.com"
] | sipqko@gmail.com |
a8abae70078496d721a757b3549329c99ce74a5b | 87dd09b1e96ab844d9c23f2cada966f6a642eed9 | /IFT6285-A2018-Charles-Ashby-Teo-Orthlieb/evaluation/model_eval.py | c742fb67ea4f7a388f71bb8cced0b89ec07894ec | [] | no_license | charlesashby/nlp_dev_1 | 6a42140660eae6f18f09f3b65fc17a6bb82d1927 | 61c0e13ab207f61a4ce2ab5b195168575dcc9241 | refs/heads/master | 2020-03-29T14:31:03.355269 | 2018-11-15T19:16:21 | 2018-11-15T19:16:21 | 150,021,568 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,488 | py | import os
import sys
from evaluation.eval import evaluate, evaluatecrash, test
from evaluation.generate import generate
from models.ngram.ngram_tree_simple import NG_Tree_Simple
from models.ngram.ngram_tree_backoff import NG_Tree_Backoff
from models.ngram.ngram_regular import NG_Regular
# Holds the correction of the test
words = {"en": {}, "fi": {}}
train = "../train/"
def load_correction(file):
(test_type, lang) = file.split("-")[-1].split(".")
print(test_type, end=" ")
words[lang][test_type] = []
strtofind = '<unk w="'
nextstart = len(strtofind) + 1
with open(file, "r", encoding="utf-16") as correction:
fdata = correction.read()
start = 0
index = fdata.find(strtofind, start)
while index != -1:
start = index + nextstart
word_end = fdata.find('"', start)
words[lang][test_type].append(sys.intern(fdata[start-1:word_end].lower()))
index = fdata.find(strtofind, start)
def load_corrections(lang):
files = os.listdir(test)
print(f"loading {lang} correction: ", end="")
for file in files:
if not file.startswith("t-") and file.endswith(lang):
load_correction(test+file)
print()
def get_little_training_file(language):
return f"{train}little.{language}"
def get_training_file(language):
return f"{train}train-europarl-v7.fi-en.{language}"
def evaluate_NGM(modelclass, pre_wordscount, post_wordscount, lang, max_pred=1, crash=False,trace=False):
print(f"{modelclass.__name__} ({pre_wordscount}c{post_wordscount}):")
model = modelclass(pre_wordscount, post_wordscount)
model.train(get_training_file(lang))
if crash:
evaluatecrash(model, pre_wordscount, post_wordscount, lang, words[lang])
else:
evaluate(model, pre_wordscount, post_wordscount, lang, words[lang], trace, crash, max_pred)
print()
def generate_NGM(modelclass, pre_wordscount, lang):
model = modelclass(pre_wordscount, 0)
model.train(get_training_file(lang))
starts = ["The european", "I would", "This year", "However", "The euro is"]
for start in starts:
pre_words = start.strip()
print(f'Generating with {modelclass.__name__} ("{pre_words}"):')
sentence = generate(model, pre_words.lower().split())
print(sentence)
if __name__ == '__main__':
lang = "en"
mclass = NG_Tree_Backoff
load_corrections(lang)
contexts = [(2, 1)]
for (pre, post) in contexts:
print(f"{mclass.__name__} ({pre}c{post}):")
evaluate_NGM(mclass, pre, post, lang, trace=True, crash=False, max_pred=1)
| [
"charlesa.l@hotmail.com"
] | charlesa.l@hotmail.com |
20942c535dfee4650fc842453a3aeb4bd7d24a59 | 792b6242765b11ae8e6d94e8861ff81ea08cd871 | /Customer/admin.py | 601ede3a21901b6e92a9bccd088e717edd28e3d7 | [] | no_license | vivek-fulldev/spice | 33853763768f0d5aac47074cd054aa48cfc576a2 | 6dc1a3d393ec45ab661548537626af3152f8af14 | refs/heads/main | 2023-05-05T13:08:22.856494 | 2021-05-30T10:32:08 | 2021-05-30T10:32:08 | 372,184,977 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,286 | py | from django.contrib import admin
from datetime import datetime
from django.forms.utils import ErrorList
from Configuration.models import *
from Customer.models import CustomerProfile
from import_export.admin import ImportExportModelAdmin
def make_active(modeladmin, request, queryset):
queryset.update(active_status="1", updated_at=datetime.now())
make_active.short_description = "Move Items to Active"
def make_deactive(modeladmin, request, queryset):
queryset.update(active_status="0", updated_at=datetime.now())
make_deactive.short_description = "Move Items to Deactive"
@admin.register(CustomerProfile)
class CustomerProfileAdmin(ImportExportModelAdmin):
exclude = [
'active_status',
'created_at',
'updated_at',
]
search_fields = [
'user_type',
]
list_display = [
'user_type',
'area',
'emp_code',
'password',
'created_at',
'updated_at',
]
actions = [make_active, make_deactive]
list_per_page = 10
def has_delete_permission(self, request, obj=None):
return True
def save_model(self, request, obj, form, change):
if not change:
obj.created_at = datetime.now()
else:
obj.updated_at = datetime.now()
obj.save()
# admin.site.register(CustomerProfile,CustomerProfileAdmin)
| [
"vivek@ebetainnovations.com"
] | vivek@ebetainnovations.com |
83d88f0242e4c09fd74e8582094347562219cc8d | 1892324f7bcb2273543a5b08d5dbf8a101d67803 | /examples/blending_colors_2.py | be6181beeceee2d281f9d3d7ca3323c7cdb4a543 | [
"MIT"
] | permissive | byewokko/penrose | b5b5266585969d5528e2f04b0a985c150f492e2b | f339a6f0a2c9b0486f0b46a517bb9b8907c4be07 | refs/heads/master | 2023-02-21T11:51:29.732515 | 2020-12-24T23:33:11 | 2020-12-24T23:33:11 | 302,706,319 | 12 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,083 | py | import colorsys
import numpy as np
import multigrid
import ntiling
from drawing.pil_draw_simple import Draw
draw = Draw(scale=30, width=3*1280, height=3*1280, bg_color="#ffffff", color_mode="RGB")
draw.line_color = None
index_range = (-18, 18)
grid = multigrid.Pentagrid()
tiling_builder = ntiling.TilingBuilder(grid)
tiling_builder.prepare_grid(index_range)
tiling_builder.generate_rhomb_list()
# assign a color to each ribbon
base_colors = [colorsys.hsv_to_rgb(1/12*i, 1, 1) for i in range(8, 13)]
# compute colors for each ribbon intersection type
palette = {}
for i, j in multigrid.triangle_iterator(5):
c = [
("0" + hex(int(a))[2:])[-2:]
for a
in (((np.sqrt(base_colors[i]) + np.sqrt(base_colors[j]))/2) ** 2) * 256
]
palette[(i, j)] = "#" + "".join(c)
for rhomb in tiling_builder._rhombs.values():
c = tuple(rhomb.node[:2])
if rhomb.type() in (1, 4):
pass
# draw.polygon(rhomb.xy(), color="#ffffff", outline=None)
else:
draw.polygon(rhomb.xy(form="xy1"), color=palette[c], outline=None)
draw.show()
| [
"byewokko@seznam.cz"
] | byewokko@seznam.cz |
10eb3ff34e111d759aa4124b34648aabc0a84e52 | 5febc7f6386fbe18e8788041c0f8f6be4aabe2af | /lenstronomy/LensModel/Profiles/shapelet_pot_polar.py | 3b22b7118cdf8bb86a63a8d3173136ac9a95197c | [
"MIT"
] | permissive | guoxiaowhu/lenstronomy | 2b3aa10d6c72061edfd179d773948a40d9c2014a | dcdfc61ce5351ac94565228c822f1c94392c1ad6 | refs/heads/master | 2020-04-05T08:10:34.836586 | 2018-11-06T18:34:52 | 2018-11-06T18:34:52 | 156,704,299 | 1 | 0 | MIT | 2018-11-08T12:33:00 | 2018-11-08T12:33:00 | null | UTF-8 | Python | false | false | 9,446 | py | __author__ = 'sibirrer'
# description of the polar shapelets in potential space
import numpy as np
import scipy.special
import math
import lenstronomy.Util.param_util as param_util
class PolarShapelets(object):
"""
this class contains the function and the derivatives of the Singular Isothermal Sphere
"""
param_names = ['coeffs', 'beta', 'center_x', 'center_y']
lower_limit_default = {'coeffs': [0], 'beta': 0, 'center_x': -100, 'center_y': -100}
upper_limit_default = {'coeffs': [100], 'beta': 100, 'center_x': 100, 'center_y': 100}
def __init__(self):
n = 10
self.poly = [[[] for i in range(n)] for i in range(n)]
for i in range(0,n):
for j in range(0,n):
self.poly[i][j] = scipy.special.genlaguerre(i, j)
def function(self, x, y, coeffs, beta, center_x=0, center_y=0):
shapelets = self._createShapelet(coeffs)
r, phi = param_util.cart2polar(x, y, center=np.array([center_x, center_y]))
f_ = self._shapeletOutput(r, phi, beta, shapelets)
return f_
def derivatives(self, x, y, coeffs, beta, center_x=0, center_y=0):
"""
returns df/dx and df/dy of the function
"""
shapelets = self._createShapelet(coeffs)
r, phi = param_util.cart2polar(x, y, center=np.array([center_x, center_y]))
alpha1_shapelets, alpha2_shapelets = self._alphaShapelets(shapelets, beta)
f_x = self._shapeletOutput(r, phi, beta, alpha1_shapelets)
f_y = self._shapeletOutput(r, phi, beta, alpha2_shapelets)
return f_x, f_y
def hessian(self, x, y, coeffs, beta, center_x=0, center_y=0):
"""
returns Hessian matrix of function d^2f/dx^2, d^f/dy^2, d^2/dxdy
"""
shapelets = self._createShapelet(coeffs)
r, phi = param_util.cart2polar(x, y, center=np.array([center_x, center_y]))
kappa_shapelets=self._kappaShapelets(shapelets, beta)
gamma1_shapelets, gamma2_shapelets=self._gammaShapelets(shapelets, beta)
kappa_value=self._shapeletOutput(r, phi, beta, kappa_shapelets)
gamma1_value=self._shapeletOutput(r, phi, beta, gamma1_shapelets)
gamma2_value=self._shapeletOutput(r, phi, beta, gamma2_shapelets)
f_xx = kappa_value + gamma1_value
f_xy = gamma2_value
f_yy = kappa_value - gamma1_value
return f_xx, f_yy, f_xy
def _createShapelet(self,coeff):
"""
returns a shapelet array out of the coefficients *a, up to order l
:param num_l: order of shapelets
:type num_l: int.
:param coeff: shapelet coefficients
:type coeff: floats
:returns: complex array
:raises: AttributeError, KeyError
"""
n_coeffs = len(coeff)
num_l = self._get_num_l(n_coeffs)
shapelets=np.zeros((num_l+1,num_l+1),'complex')
nl=0
k=0
i=0
while i < len(coeff):
if i%2==0:
shapelets[nl][k]+=coeff[i]/2.
shapelets[k][nl]+=coeff[i]/2.
if k==nl:
nl+=1
k=0
i+=1
continue
else:
k+=1
i+=1
continue
else:
shapelets[nl][k] += 1j*coeff[i]/2.
shapelets[k][nl] -= 1j*coeff[i]/2.
i+=1
return shapelets
def _shapeletOutput(self, r, phi, beta, shapelets):
"""
returns the the numerical values of a set of shapelets at polar coordinates
:param shapelets: set of shapelets [l=,r=,a_lr=]
:type shapelets: array of size (n,3)
:param coordPolar: set of coordinates in polar units
:type coordPolar: array of size (n,2)
:returns: array of same size with coords [r,phi]
:raises: AttributeError, KeyError
"""
if type(r) == float or type(r) == int or type(r) == type(np.float64(1)) or len(r) <= 1:
values = 0.
else:
values = np.zeros(len(r), 'complex')
for nl in range(0,len(shapelets)): #sum over different shapelets
for nr in range(0,len(shapelets)):
value = shapelets[nl][nr]*self._chi_lr(r, phi, nl, nr, beta)
values += value
return values.real
def _chi_lr(self,r, phi, nl,nr,beta):
"""
computes the generalized polar basis function in the convention of Massey&Refregier eqn 8
:param nl: left basis
:type nl: int
:param nr: right basis
:type nr: int
:param beta: beta --the characteristic scale typically choosen to be close to the size of the object.
:type beta: float.
:param coord: coordinates [r,phi]
:type coord: array(n,2)
:returns: values at positions of coordinates.
:raises: AttributeError, KeyError
"""
m=int((nr-nl).real)
n=int((nr+nl).real)
p=int((n-abs(m))/2)
p2=int((n+abs(m))/2)
q=int(abs(m))
if p % 2==0: #if p is even
prefac=1
else:
prefac=-1
prefactor=prefac/beta**(abs(m)+1)*np.sqrt(math.factorial(p)/(np.pi*math.factorial(p2)))
poly=self.poly[p][q]
return prefactor*r**q*poly((r/beta)**2)*np.exp(-(r/beta)**2/2)*np.exp(-1j*m*phi)
def _kappaShapelets(self, shapelets, beta):
"""
calculates the convergence kappa given lensing potential shapelet coefficients (laplacian/2)
:param shapelets: set of shapelets [l=,r=,a_lr=]
:type shapelets: array of size (n,3)
:returns: set of kappa shapelets.
:raises: AttributeError, KeyError
"""
output=np.zeros((len(shapelets)+1,len(shapelets)+1),'complex')
for nl in range(0,len(shapelets)):
for nr in range(0,len(shapelets)):
a_lr=shapelets[nl][nr]
if nl>0:
output[nl-1][nr+1]+=a_lr*np.sqrt(nl*(nr+1))/2
if nr>0:
output[nl-1][nr-1]+=a_lr*np.sqrt(nl*nr)/2
output[nl+1][nr+1]+=a_lr*np.sqrt((nl+1)*(nr+1))/2
if nr>0:
output[nl+1][nr-1]+=a_lr*np.sqrt((nl+1)*nr)/2
return output/beta**2
def _alphaShapelets(self,shapelets, beta):
"""
calculates the deflection angles given lensing potential shapelet coefficients (laplacian/2)
:param shapelets: set of shapelets [l=,r=,a_lr=]
:type shapelets: array of size (n,3)
:returns: set of alpha shapelets.
:raises: AttributeError, KeyError
"""
output_x = np.zeros((len(shapelets)+1, len(shapelets)+1), 'complex')
output_y = np.zeros((len(shapelets)+1, len(shapelets)+1), 'complex')
for nl in range(0,len(shapelets)):
for nr in range(0,len(shapelets)):
a_lr=shapelets[nl][nr]
output_x[nl][nr+1]-=a_lr*np.sqrt(nr+1)/2
output_y[nl][nr+1]-=a_lr*np.sqrt(nr+1)/2*1j
output_x[nl+1][nr]-=a_lr*np.sqrt(nl+1)/2
output_y[nl+1][nr]+=a_lr*np.sqrt(nl+1)/2*1j
if nl>0:
output_x[nl-1][nr]+=a_lr*np.sqrt(nl)/2
output_y[nl-1][nr]-=a_lr*np.sqrt(nl)/2*1j
if nr>0:
output_x[nl][nr-1]+=a_lr*np.sqrt(nr)/2
output_y[nl][nr-1]+=a_lr*np.sqrt(nr)/2*1j
return output_x/beta,output_y/beta #attention complex numbers!!!!
def _gammaShapelets(self,shapelets, beta):
"""
calculates the shear gamma given lensing potential shapelet coefficients
:param shapelets: set of shapelets [l=,r=,a_lr=]
:type shapelets: array of size (n,3)
:returns: set of alpha shapelets.
:raises: AttributeError, KeyError
"""
output_x = np.zeros((len(shapelets)+2,len(shapelets)+2),'complex')
output_y = np.zeros((len(shapelets)+2,len(shapelets)+2),'complex')
for nl in range(0, len(shapelets)):
for nr in range(0, len(shapelets)):
a_lr = shapelets[nl][nr]
output_x[nl+2][nr] += a_lr*np.sqrt((nl+1)*(nl+2))/2
output_x[nl][nr+2] += a_lr*np.sqrt((nr+1)*(nr+2))/2
output_x[nl][nr] += a_lr*(1-(nr+1)-(nl+1))
if nl>1:
output_x[nl-2][nr] += a_lr*np.sqrt((nl)*(nl-1))/2
if nr>1:
output_x[nl][nr-2] += a_lr*np.sqrt((nr)*(nr-1))/2
output_y[nl+2][nr] += a_lr*np.sqrt((nl+1)*(nl+2))*1j/4
output_y[nl][nr+2] -= a_lr*np.sqrt((nr+1)*(nr+2))*1j/4
if nl>0:
output_y[nl-1][nr+1] += a_lr*np.sqrt((nl)*(nr+1))*1j/2
if nr>0:
output_y[nl+1][nr-1] -= a_lr*np.sqrt((nr)*(nl+1))*1j/2
if nl>1:
output_y[nl-2][nr] -= a_lr*np.sqrt((nl)*(nl-1))*1j/4
if nr>1:
output_y[nl][nr-2] += a_lr*np.sqrt((nr)*(nr-1))*1j/4
return output_x/beta**2, output_y/beta**2 #attention complex numbers!!!!
def _get_num_l(self, n_coeffs):
"""
:param n_coeffs: number of coeffs
:return: number of n_l of order of the shapelets
"""
num_l = int(round((math.sqrt(8*n_coeffs + 9)-3)/2 +0.499))
return num_l
| [
"simon.birrer@pyhs.ethz.ch"
] | simon.birrer@pyhs.ethz.ch |
1893e38548a1fab8e0f39e6fce7e091246db8d68 | 9cc032fae9ef7c067fb2641da6cd8c2c7b5279d1 | /alien2.py | 94ad747054a59f022a89fb31bd65cbcd7750ac2a | [] | no_license | ursstaud/PCC-Basics | 5be4efa324f07d0f64a0c9bb39b57fb1b5ac83c3 | 036ec4976efb2d23945564cdd7ec61cd3ae0f373 | refs/heads/master | 2022-08-07T03:25:56.639987 | 2020-05-23T06:11:26 | 2020-05-23T06:11:26 | 266,267,426 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 518 | py | alien_0 = {'x_position': 0, 'y_position': 25, 'speed': 'medium'}
print(f"Original position: {alien_0['x_position']}")
#move th alien to the right
#determine how far to move the alien based on its current speed
if alien_0['speed'] == 'slow':
x_increment = 1
elif alien_0['speed'] == 'medium':
x_increment = 2
else:
x_increment = 3
#the new position is the old position plus the increment
alien_0['x_position'] = alien_0['x_position'] + x_increment
print(f"New position: {alien_0['x_position']}") | [
"noreply@github.com"
] | noreply@github.com |
a0a83028a3a6053fbf17d8665c12eeb4ad4e51ef | c47340ae6bcac6002961cc2c6d2fecb353c1e502 | /test/test_passwords_object.py | f302c422a7a0449e34de00837f7cdeffa116807f | [
"MIT"
] | permissive | rafaeldelrey/controlm_py | 6d9f56b8b6e72750f329d85b932ace6c41002cbd | ed1eb648d1d23e587321227217cbfcc5065535ab | refs/heads/main | 2023-04-23T09:01:32.024725 | 2021-05-19T00:25:53 | 2021-05-19T00:25:53 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 911 | py | # coding: utf-8
"""
Control-M Services
Provides access to BMC Control-M Services # noqa: E501
OpenAPI spec version: 9.20.115
Contact: customer_support@bmc.com
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import unittest
import controlm_py
from controlm_py.models.passwords_object import PasswordsObject # noqa: E501
from controlm_py.rest import ApiException
class TestPasswordsObject(unittest.TestCase):
"""PasswordsObject unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testPasswordsObject(self):
"""Test PasswordsObject"""
# FIXME: construct object with mandatory attributes with example values
# model = controlm_py.models.passwords_object.PasswordsObject() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
| [
"dcompane@gmail.com"
] | dcompane@gmail.com |
9d4a67d6299821b89898ef186710b151e47a3692 | 89c0a9ca66aa1c7e5cdf2bf4bc1fb453d5b7832a | /Retrieval.py | fbdc5625e0464a5896be533296df01929ea5ba53 | [] | no_license | ARenomX/EuroFinance | 9d0a5d56ecc01dc52fbc840b30ae85d413b47fcd | c9abf9c2fd857af327d0b5e8e79942f62f8f0ee1 | refs/heads/master | 2021-01-05T19:54:45.642523 | 2020-02-25T10:41:46 | 2020-02-25T10:41:46 | 241,122,005 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 751 | py | # Data retrieval
import pandas_datareader.data as web
import yfinance as yf
def stock(stock,starts,ends):
#Retrieves stock closing values between start and end dates
return web.DataReader(stock, start = starts, end = ends ,
data_source='yahoo')['Close'].values
def day(stock , day):
#Retrieves the close of a stock on a specific day
try:
return (True, web.DataReader(stock, start=day, end=day ,
data_source='yahoo')['Close'].values[0])
except:
return (False, 0)
def intraday(stock, starts, ends):
tick=yf.Ticker(stock)
return tick.history(start=starts, end = ends,
interval = '2m')['Open'].values | [
"56108524+ARenomX@users.noreply.github.com"
] | 56108524+ARenomX@users.noreply.github.com |
08f4778c0cbe11f6f0bdbfad4d498ba643ef669b | c88617c762f4620dde62fcefe6875dc247dbe3c3 | /solved/CheckIo2.py | 62b9888832860e2054c13e7a6329344f890392e2 | [] | no_license | alexe1ka/CheckIoSolutions | fd755f1431e56612cc5e0d2c5fdf39914254e3c9 | 36f26217265d95778e6a7aa1ee785480e911765f | refs/heads/master | 2021-03-30T20:43:31.632360 | 2018-05-14T22:05:10 | 2018-05-14T22:05:10 | 124,941,827 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 994 | py | import re
def say_hi(name, age):
return "Hi. My name is " + name + " and I'm " + str(age) + " years old"
#####################################################
def correct_sentence(text: str) -> str:
"""
returns a corrected sentence which starts with a capital letter
and ends with a dot.
"""
# your code here
text = text[0].upper() + text[1:]
if not text.endswith('.'):
text += '.'
return text
#######################################################
def first_word(text: str) -> str:
"""
returns the first word in a given text
"""
word = re.search("[A-Z]*[a-z]*[']*[a-z]", text)
return word.group(0)
################################################################
def second_index(text: str, symbol: str):
"""
returns the second index of a symbol in a given text
"""
second_ind = text.replace(symbol, "", 1).find(symbol) + 1
if second_ind == 0:
return
return second_ind
| [
"leshka_09@bk.ru"
] | leshka_09@bk.ru |
969ba7ae2cf7f49718fd98a7c15ee047084e9def | 710d04753e75bfcb3b011b45fe6cfebac73497ea | /timer.py | 696e8d63b0419364893ec97ab0c18dac7e49ab04 | [] | no_license | AlliAmbrose/ArtemisBots | c9e3b1d9d544a43513243334ee09c740651f2ef6 | 0082a416d4f248d5275f70b6070811ad5e680a89 | refs/heads/master | 2021-05-06T11:32:38.778755 | 2017-12-16T04:59:22 | 2017-12-16T04:59:22 | 114,296,182 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 705 | py | from Myro import*
import time
init("com4")
setForwardness("scribbler-forward")
def timer(t):
while t >= 0:
##print(t)
forward(1,1)
t = t-1
if t == 0:
turnBy(90)
forward(.5,1)
break
def followLine():
while True:
[l,r]=getLine()
if l==1 and r==1:
turnRight(1,.5)
if timer(2) and l==1 and r==1:
turnBy(90)
elif l==1 and r==0:
lastTurn=1
backward(1,1)
turnRight(1,.5)
elif l==0 and r==1:
lastTurn=2
backward(1,1)
turnLeft(1,.5)
else:
forward(1)
timer(2)
followLine()
| [
"noreply@github.com"
] | noreply@github.com |
c8993adf076834d6a94ce21bf48e6a96c2f6d088 | c452914f5ee176ec0306d6b1cd8300f27f251d90 | /a_Python3.6Essentials/0.9 - working with numbers/working-with-numbers-02.py | 97b03895db29a55e1d66865f3fde5cefe1891160 | [] | no_license | cgmoganedi/PythonBasics | 201cccc40122cdda58cbe2a0b157768fe0f759df | e881f72514b6ed2c3427eaa79945f9d5e5073c0f | refs/heads/master | 2020-03-26T22:12:35.850508 | 2020-01-22T00:29:00 | 2020-01-22T00:29:00 | 145,440,966 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 55 | py | #floats
a = 1.0
b = 2e2
print ("a = ", a, "b = ", b)
| [
"cgmoganedi@gmail.com"
] | cgmoganedi@gmail.com |
7bb7a1b5c5952f038f77e6f02148dddc3d85693b | 6323dcaac51889d4a490efbe04c348f993dab89a | /generator/views.py | bcfdc61213db7979d9c8a06801a7fe97f7acec8a | [] | no_license | Kingghost0519/PassGen | 6d9bffdbf319dc2d82c7fa8a0a4213e31f511041 | 0a5b2095256684031ac220b5b034c436ed772295 | refs/heads/master | 2022-06-17T22:08:07.918912 | 2020-05-08T19:11:19 | 2020-05-08T19:11:19 | 262,291,775 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 819 | py | from django.shortcuts import render
from django.http import HttpResponse
import random
# Create your views here.
def home(request):
return render(request, 'generator/home.html')
def about(request):
return render(request, 'generator/about.html')
def password(request):
characters = list('abcdefghijklmnopqrstuvwxyz')
length = int(request.GET.get('length',12))
if request.GET.get('uppercase'):
characters.extend(list('ABCDEFGHIJKLMNOPQRSTUVWXYZ'))
if request.GET.get('special'):
characters.extend(list('!@#$%^&*()'))
if request.GET.get('numbers'):
characters.extend(list('1234567890'))
thepassword = ''
for x in range(length):
thepassword += random.choice(characters)
return render(request, 'generator/password.html', {'password':thepassword})
| [
"kingghostsh@gmail.com"
] | kingghostsh@gmail.com |
11a15bc4a6c19b4e324aab79401dcb1d0b247012 | cf904ac377911e2f826455d6a2a26e3c954fe191 | /chat2seq.py | 37ffaf4c64c123b35184f46ec8f5ad7e8814ba36 | [
"Apache-2.0"
] | permissive | arhumzafar/seq2chat | 07362ad390e338b7e30bbaf7fadbb169ab12a895 | 1ce793691dab5b400b1b1b5cf7db51a180f9d2b3 | refs/heads/main | 2023-07-13T07:57:27.699905 | 2021-08-22T16:35:53 | 2021-08-22T16:35:53 | 398,099,895 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,309 | py | # Arhum Zafar
"""
import watermark
watermark -a -t -d
"""
from tensorflow.keras.models import load_model, Model
from tensorflow.keras.layers import Input, Concatenate
import tensorflow as tf
import os
from tensorflow.python.keras.layers import Layer
from tensorflow.python.keras import backend as K
import pickle
import numpy as np
import re
from AttentionLayer import AttentionLayer
#get models
with open('dic.pkl', 'rb') as f:
vocab = pickle.load(f)
with open('inv.pkl', 'rb') as f:
inv_vocab = pickle.load(f)
def clean_text(txt):
txt = txt.lower()
txt = re.sub(r"i'm", "i am", txt)
txt = re.sub(r"he's", "he is", txt)
txt = re.sub(r"she's", "she is", txt)
txt = re.sub(r"that's", "that is", txt)
txt = re.sub(r"what's", "what is", txt)
txt = re.sub(r"where's", "where is", txt)
txt = re.sub(r"\'ll", " will", txt)
txt = re.sub(r"\'ve", " have", txt)
txt = re.sub(r"\'re", " are", txt)
txt = re.sub(r"\'d", " would", txt)
txt = re.sub(r"won't", "will not", txt)
txt = re.sub(r"can't", "can not", txt)
txt = re.sub(r"[^\w\s]", "", txt)
return txt
attn_layer = AttentionLayer()
model = load_model('chatbot.h5', custom_objects={'AttentionLayer': attn_layer})
encoder_inputs = model.layers[0].input
embed = model.layers[2]
enc_embed = embed(encoder_inputs)
enocoder_layer = model.layers[3]
encoder_outputs, fstate_h, fstate_c, bstate_h, bstate_c = enocoder_layer(enc_embed)
h = Concatenate()([fstate_h, bstate_h])
c = Concatenate()([fstate_c, bstate_c])
encoder_states = [h, c]
enc_model = Model(encoder_inputs,
[encoder_outputs,
encoder_states])
latent_dim = 800
decoder_inputs = model.layers[1].input
decoder_lstm = model.layers[6]
decoder_dense = model.layers[9]
decoder_state_input_h = Input(shape=(latent_dim,), name='input_3')
decoder_state_input_c = Input(shape=(latent_dim,), name='input_4')
decoder_states_inputs = [decoder_state_input_h, decoder_state_input_c]
dec_embed = embed(decoder_inputs)
decoder_outputs, state_h, state_c = decoder_lstm(dec_embed, initial_state=decoder_states_inputs)
decoder_states = [state_h, state_c]
dec_model = Model([decoder_inputs, decoder_states_inputs], [decoder_outputs] + decoder_states)
dec_dense = model.layers[-1]
attn_layer = model.layers[7]
from keras.preprocessing.sequence import pad_sequences
print("##########################################")
print("# start chatting ver. 1.0 #")
print("##########################################")
prepro1 = ""
while prepro1 != 'q':
prepro1 = input("you : ")
try:
prepro1 = clean_text(prepro1)
prepro = [prepro1]
txt = []
for x in prepro:
lst = []
for y in x.split():
try:
lst.append(vocab[y])
except:
lst.append(vocab['<OUT>'])
txt.append(lst)
txt = pad_sequences(txt, 13, padding='post')
###
enc_op, stat = enc_model.predict(txt)
empty_target_seq = np.zeros((1, 1))
empty_target_seq[0, 0] = vocab['<SOS>']
stop_condition = False
decoded_translation = ''
while not stop_condition:
dec_outputs, h, c = dec_model.predict([empty_target_seq] + stat)
###
###########################
attn_op, attn_state = attn_layer([enc_op, dec_outputs])
decoder_concat_input = Concatenate(axis=-1)([dec_outputs, attn_op])
decoder_concat_input = dec_dense(decoder_concat_input)
###########################
sampled_word_index = np.argmax(decoder_concat_input[0, -1, :])
sampled_word = inv_vocab[sampled_word_index] + ' '
if sampled_word != '<EOS> ':
decoded_translation += sampled_word
if sampled_word == '<EOS> ' or len(decoded_translation.split()) > 13:
stop_condition = True
empty_target_seq = np.zeros((1, 1))
empty_target_seq[0, 0] = sampled_word_index
stat = [h, c]
print("chatbot attention : ", decoded_translation)
print("==============================================")
except:
print("sorry didn't got you , please type again :( ")
| [
"arhum10@yahoo.com"
] | arhum10@yahoo.com |
cf73b2cdd146109296beefc05d7bab706ede4929 | d8b8ff78310d874434d58dc36b452149c0ffcf09 | /dictionary5.py | b377b6e36711e52e15e302a314a4219f13e49830 | [] | no_license | Manas2909/Python-Stuff | 6133a14cc1539feac6e5f6bb67d2184ad2c0f0e8 | c222c76b977e4499d9d906e15e56143a1b967574 | refs/heads/master | 2020-09-15T04:01:17.208827 | 2019-12-03T07:27:25 | 2019-12-03T07:27:25 | 223,343,508 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 361 | py | # -*- coding: utf-8 -*-
"""
Created on Wed Jun 26 13:35:10 2019
@author: Manas
"""
num_key=int(input("enter total number of keys"))
d=dict()
for i in range(1,num_key+1) :
key=int(input("enter key "))
value=input("enter values ")
d[key]=value
print(d)
largest=0
for i in d.keys():
print(largest)
| [
"noreply@github.com"
] | noreply@github.com |
f510be9f877cf397ceb2bf6817365f456d8d5106 | 6490638f15a2dfbe0cec9725186f9784d57c92f0 | /SCOS/__init__.py | a6e179ad6b3b0a273223cde1aa960d4a7e93d834 | [
"MIT"
] | permissive | khawatkom/SpacePyLibrary | af9c490ef796b9d37a13298c41df1fb5bf6b3cee | c94415e9d85519f345fc56938198ac2537c0c6d0 | refs/heads/master | 2020-05-14T21:52:39.388979 | 2019-04-17T17:06:04 | 2019-04-17T17:06:04 | 181,970,668 | 1 | 0 | null | 2019-04-17T21:26:44 | 2019-04-17T21:26:44 | null | UTF-8 | Python | false | false | 1,145 | py | #******************************************************************************
# (C) 2018, Stefan Korner, Austria *
# *
# The Space Python Library is free software; you can redistribute it and/or *
# modify it under under the terms of the MIT License as published by the *
# Massachusetts Institute of Technology. *
# *
# The Space Python Library is distributed in the hope that it will be useful, *
# but WITHOUT ANY WARRANTY; without even the implied warranty of *
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the MIT License *
# for more details. *
#******************************************************************************
# SCOS-2000 Functionality *
#******************************************************************************
__all__ = ["ENV", "MIB"]
| [
"korner-hajek@gmx.at"
] | korner-hajek@gmx.at |
7db291f72352b9ac3132ffa7526c7298011977a9 | aeb98195376b048127cab333398124da5981f07e | /pavement.py | 6daaec062384625c6bbe8b350e6836d8fc302942 | [] | no_license | tchenuag/csppython | 928baad5890d174053392663ad7ac2193953a32b | aae18129d41709085d51951a67bb13809723a044 | refs/heads/master | 2021-01-18T03:16:58.628518 | 2016-04-30T20:28:46 | 2016-04-30T20:28:46 | 68,535,412 | 0 | 0 | null | 2016-09-18T17:09:23 | 2016-09-18T17:09:23 | null | UTF-8 | Python | false | false | 1,075 | py | import paver
from paver.easy import *
import paver.setuputils
paver.setuputils.install_distutils_tasks()
import os, sys
from sphinxcontrib import paverutils
sys.path.append(os.getcwd())
home_dir = os.getcwd()
master_url = 'http://127.0.0.1:8000'
master_app = 'runestone'
serving_dir = "./build/csppython"
dest = "../../static"
options(
sphinx = Bunch(docroot=".",),
build = Bunch(
builddir="./build/csppython",
sourcedir="_sources",
outdir="./build/csppython",
confdir=".",
project_name = "csppython",
template_args={'course_id': 'csppython',
'login_required':'false',
'appname':master_app,
'loglevel': 0,
'course_url':master_url,
'use_services': 'false',
'python3': 'false',
'dburl': '',
'basecourse': 'csppython'
}
)
)
from runestone import build # build is called implicitly by the paver driver.
| [
"timmy.i.chen@gmail.com"
] | timmy.i.chen@gmail.com |
2526646d885e61e524121648c742613bef3af8ee | 29d0a088e1bd92c4f383d0515e14249892798261 | /diemthpt2021/middlewares.py | b5bea5aaebbe85968407f1935f8522998868b244 | [] | no_license | bang98ptit/thpt2021 | 7413aa33852f3e692afb7c28b0940b1604984c97 | 5cf1bcd0471f612c1286d6655167f3fb777737a9 | refs/heads/master | 2023-08-21T11:04:22.875350 | 2021-11-01T09:27:52 | 2021-11-01T09:27:52 | 422,858,037 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,593 | py | # -*- coding: utf-8 -*-
# Define here the models for your spider middleware
#
# See documentation in:
# https://docs.scrapy.org/en/latest/topics/spider-middleware.html
from scrapy import signals
class Diemthpt2021SpiderMiddleware:
# Not all methods need to be defined. If a method is not defined,
# scrapy acts as if the spider middleware does not modify the
# passed objects.
@classmethod
def from_crawler(cls, crawler):
# This method is used by Scrapy to create your spiders.
s = cls()
crawler.signals.connect(s.spider_opened, signal=signals.spider_opened)
return s
def process_spider_input(self, response, spider):
# Called for each response that goes through the spider
# middleware and into the spider.
# Should return None or raise an exception.
return None
def process_spider_output(self, response, result, spider):
# Called with the results returned from the Spider, after
# it has processed the response.
# Must return an iterable of Request, dict or Item objects.
for i in result:
yield i
def process_spider_exception(self, response, exception, spider):
# Called when a spider or process_spider_input() method
# (from other spider middleware) raises an exception.
# Should return either None or an iterable of Request, dict
# or Item objects.
pass
def process_start_requests(self, start_requests, spider):
# Called with the start requests of the spider, and works
# similarly to the process_spider_output() method, except
# that it doesn’t have a response associated.
# Must return only requests (not items).
for r in start_requests:
yield r
def spider_opened(self, spider):
spider.logger.info('Spider opened: %s' % spider.name)
class Diemthpt2021DownloaderMiddleware:
# Not all methods need to be defined. If a method is not defined,
# scrapy acts as if the downloader middleware does not modify the
# passed objects.
@classmethod
def from_crawler(cls, crawler):
# This method is used by Scrapy to create your spiders.
s = cls()
crawler.signals.connect(s.spider_opened, signal=signals.spider_opened)
return s
def process_request(self, request, spider):
# Called for each request that goes through the downloader
# middleware.
# Must either:
# - return None: continue processing this request
# - or return a Response object
# - or return a Request object
# - or raise IgnoreRequest: process_exception() methods of
# installed downloader middleware will be called
return None
def process_response(self, request, response, spider):
# Called with the response returned from the downloader.
# Must either;
# - return a Response object
# - return a Request object
# - or raise IgnoreRequest
return response
def process_exception(self, request, exception, spider):
# Called when a download handler or a process_request()
# (from other downloader middleware) raises an exception.
# Must either:
# - return None: continue processing this exception
# - return a Response object: stops process_exception() chain
# - return a Request object: stops process_exception() chain
pass
def spider_opened(self, spider):
spider.logger.info('Spider opened: %s' % spider.name)
| [
"bang98ptit@gmail.com"
] | bang98ptit@gmail.com |
8bdf85ae22c647c63942ba7386de93b7ac56216a | 41a0e73f70c763396afb9448ff599dc5daefbc04 | /Main.py | a8b87da2fccc7685dd3c45c44f7b829d453232fc | [] | no_license | astroboi-SH-KWON/get_guide_fr_R1_barcode_fr_R2_then_check_Guide_Barcode_pairs | caf0bea9e24f6e6919fcad5e6d31f22d2fc40c67 | a71703c21fc819abfc7c2e3f3d90501292eea650 | refs/heads/master | 2022-11-30T16:39:56.148002 | 2020-08-03T08:35:33 | 2020-08-03T08:35:33 | 284,590,657 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,433 | py | import time
import os
import Util
import LogicPrep
import Logic
############### start to set env ###############
# WORK_DIR = os.getcwd() + "/"
WORK_DIR = "D:/000_WORK/KimNahye/20200803/WORK_DIR/"
PROJECT_NAME = WORK_DIR.split("/")[-2]
INPUT = "input/"
OUTPUT = "output/"
FASTQ = "NGS_READ/"
Guide_barcode = "Guide_barcode.txt"
FASTQ_pair = "FASTQ_list.txt"
BEFORE_GUIDE_R1 = "CCAGCAGGTCCCATGGTGTAATGGTtAGCACTCTGGACTTTGAATCCAGCGaTCCGAGTTCAAATCTCGGTGGGACCT"
LEN_GUIDE_R1 = 20
RP_SEQ_R2 = "CAGAAGACGGCATACGA"
BLANK_BP_R2 = 33
LEN_BRCD_R2 = 15
INIT = [BEFORE_GUIDE_R1, LEN_GUIDE_R1, RP_SEQ_R2, BLANK_BP_R2, LEN_BRCD_R2]
############### end setting env ################
def main():
util = Util.Utils()
logic_prep = LogicPrep.LogicPreps()
logic = Logic.Logics(INIT)
idx_g_b_list = util.csv_to_list_ignr_header(WORK_DIR + INPUT + Guide_barcode, "\t")
fastq_pairs = util.csv_to_list_ignr_header(WORK_DIR + INPUT + FASTQ_pair, "\t")
fastq_r1_dict, fastq_r2_dict = logic_prep.get_r1_r2_pairs(WORK_DIR + FASTQ, "fastq", fastq_pairs)
result_dict, err_dict = logic.analyze_(fastq_r1_dict, fastq_r2_dict, idx_g_b_list)
util.make_excel(WORK_DIR + OUTPUT + "result", result_dict)
if __name__ == '__main__':
start_time = time.perf_counter()
print("start [ " + PROJECT_NAME + " ]>>>>>>>>>>>>>>>>>>")
main()
print("::::::::::: %.2f seconds ::::::::::::::" % (time.perf_counter() - start_time)) | [
"terry007x@korea.ac.kr"
] | terry007x@korea.ac.kr |
a00977ae7b9fc224137b8000972d35669de9010e | 15bf2e6a831bfe9215a81d25def26187c49bf2ca | /AESCipher.py | 8f6ab9bc7940304e4c02b50261e5e597561b0d89 | [] | no_license | dvopsway/csv_encrypt | a832f5484a073ce9598a3660f80c1e599dfe406b | aa8676f593bd108b9c692e9b482d5cd7fa3a64ba | refs/heads/master | 2021-01-17T18:16:03.475242 | 2018-08-28T08:38:23 | 2018-08-28T08:38:23 | 37,268,660 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 849 | py | import base64
import hashlib
from Crypto.Cipher import AES
from Crypto import Random
class AESCipher(object):
def __init__(self, key):
self.bs = 32
self.key = hashlib.sha256(key.encode()).digest()
def encrypt(self, raw):
raw = self._pad(raw)
iv = Random.new().read(AES.block_size)
cipher = AES.new(self.key, AES.MODE_CBC, iv)
return base64.b64encode(iv + cipher.encrypt(raw))
def decrypt(self, enc):
enc = base64.b64decode(enc)
iv = enc[:AES.block_size]
cipher = AES.new(self.key, AES.MODE_CBC, iv)
return self._unpad(cipher.decrypt(enc[AES.block_size:])).decode('utf-8')
def _pad(self, s):
return s + (self.bs - len(s) % self.bs) * chr(self.bs - len(s) % self.bs)
@staticmethod
def _unpad(s):
return s[:-ord(s[len(s)-1:])] | [
"padmakarojha@gmail.com"
] | padmakarojha@gmail.com |
37e3d7c64cf6e4b0c8868b0f9292d97c967c4294 | 6036b23309c2b4c74c8bac64b05ba1ed3d052bff | /app.py | 48c4e91031b3a178573482b0aa0dff61bbf27cdf | [] | no_license | juvanthomas/whatsap | f1e1e5e2979fc7562c56f0e844c7dddb87d36c98 | 56c6d40923d153beb6235467a564b2ace29a227f | refs/heads/main | 2023-08-24T04:24:37.127743 | 2021-10-18T11:50:02 | 2021-10-18T11:50:02 | 417,138,239 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,792 | py | from flask import Flask, request
from twilio.twiml.messaging_response import Body, Media, Message, MessagingResponse
import requests
import uuid
from flask import send_file
# data of the url of views
dashboard_urls = {
'promotion': "https://ask.beinex.com//api/3.9/sites/908f81e2-cb30-43a2-a7d0-461879aa9016/views/1829f8ea-00a7-490f-b0cd-97fd55a6e710/image",
'ticket_sales': "https://ask.beinex.com//api/3.9/sites/908f81e2-cb30-43a2-a7d0-461879aa9016/views/250d968b-51ca-461d-a1a7-662e8f0aefb1/image",
'ticket_group': "https://ask.beinex.com//api/3.9/sites/908f81e2-cb30-43a2-a7d0-461879aa9016/views/8d28d450-e92a-4894-8707-2f57a73d3038/image"
}
maping = {"1": "promotion",
"2": "ticket_sales",
"3": "ticket_group"
}
maping_for_display = {"promotion": "Promotion Performance",
"ticket_sales": "Ticket Sales Overview",
"ticket_group": "Website Sales"}
# ~ logic of the program starts here
app = Flask(__name__)
@app.route("/")
def hello():
return "#########"
@app.route("/get-image/<image_name>")
def get_image(image_name):
print(image_name)
#path ='/home/juvanthomas/PycharmProjects/whatsap_tableau/documents/'
path ='/home/ubuntu/sreejith_whatsaap/documents/'
file =path+image_name
return send_file(path_or_file=file, as_attachment=False)
@app.route("/sms", methods=['POST'])
def sms_reply():
"""Respond to incoming calls with a simple text message."""
# Fetch the message
msg = request.form.get('Body') # whatsap
# mobile = request.form.get('From')
query = str(msg)
try:
dashboard_name = maping[query]
keyword = dashboard_urls[maping[query]]
print(maping[query])
print('keyword = ', keyword)
if type(keyword) == str:
token = log_in()
HEADERS = {'X-Tableau-Auth': token}
r = requests.get(url=keyword, headers=HEADERS, verify=False)
object_name = str(uuid.uuid4())
folder = "documents/"
file_to_save = folder + object_name + '.png'
with open(file_to_save, 'wb') as f:
f.write(r.content)
print("Succesfully generated and saved the datasource in Local")
#print("http://0.0.0.0/:8081/get-image/" + object_name+'.png')
print("http://3.6.141.16/:8080/get-image/" + object_name+'.png')
#local_url = "http://0.0.0.0:8081/get-image/" + object_name+'.png'
local_url = "http://3.6.141.16:8080/get-image/" + object_name+'.png'
print(local_url)
response = MessagingResponse()
response.message("Your dashboard is loading ")
# print(url)
# Create 1st attachment
message1 = Message()
message1.body(maping_for_display[dashboard_name])
message1.media(local_url)
response.append(message1)
except Exception as e:
print(e)
response = MessagingResponse()
response.message(
"\nBelow are the dashboards available. \nReply with the corresponding number to view it. \n\n 1. Promotion Performance \n 2. Ticket Sales Overview \n 3. Website Sales")
return str(response)
def log_in():
URL = "https://ask.beinex.com/api/3.6/auth/signin"
xml = """<tsRequest>
<credentials name="juvan" password="Juvan@123">
<site contentUrl="Expo" />
</credentials>
</tsRequest>"""
head = {"Accept": "application/json"}
r = requests.post(url=URL, data=xml, headers=head, verify=False)
jsonfile = r.json()
token = jsonfile["credentials"]["token"]
return token
if __name__ == "__main__":
app.run(host="0.0.0.0", port=8080, debug=True)
| [
"noreply@github.com"
] | noreply@github.com |
987de59ca525a3d629142cda7edaf824dcbc5083 | d238ec65e3e008cc269a92d0c0bc9388248bce58 | /creature_spawning/spawn_creature_test.py | dfdd2b6e0d75f13131bffce3173f3eb955c305b3 | [] | no_license | rwill128/evo-sim-cuda-python | e9b5dc26d701b1a893350f089490fde69bd5ad0e | c83ef18149f392986b9ae3a2a08e232e59cdb83f | refs/heads/master | 2023-01-21T11:27:11.341990 | 2020-11-25T20:24:20 | 2020-11-25T20:24:20 | 304,350,049 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 179 | py | import unittest
class SpawnCreatureTest(unittest.TestCase):
def test_something(self):
self.assertEqual(True, False)
if __name__ == '__main__':
unittest.main()
| [
"rwilliams@casenetllc.com"
] | rwilliams@casenetllc.com |
4223e6af16f292854ac2a9c6c4fcd62bc5aaab47 | 90bb39887282a5e7d9c3c8f47f2c4e3d2c56604b | /graphapi/utils.py | 4e048d7fb65542d714a05fa41eacceb29c5f547c | [] | no_license | ashoksiri/facebookapi | d4f85b6e9cb1369c6f0ab83da7f3db9d66dfc5f3 | 641aa407abe63e15fece1f7ba91391f4a33371bf | refs/heads/master | 2022-12-14T16:27:15.909345 | 2017-11-03T15:05:49 | 2017-11-03T15:05:49 | 109,138,857 | 0 | 0 | null | 2022-12-08T00:38:30 | 2017-11-01T14:04:57 | Python | UTF-8 | Python | false | false | 2,127 | py | import six
import datetime
from decimal import Decimal
_PROTECTED_TYPES = six.integer_types + (
type(None), float, Decimal, datetime.datetime, datetime.date, datetime.time
)
def is_protected_type(obj):
"""Determine if the object instance is of a protected type.
Objects of protected types are preserved as-is when passed to
force_text(strings_only=True).
"""
return isinstance(obj, _PROTECTED_TYPES)
def force_text(s, encoding='utf-8', strings_only=False, errors='strict'):
"""
Similar to smart_text, except that lazy instances are resolved to
strings, rather than kept as lazy objects.
If strings_only is True, don't convert (some) non-string-like objects.
"""
# Handle the common case first for performance reasons.
if issubclass(type(s), six.text_type):
return s
if strings_only and is_protected_type(s):
return s
try:
if not issubclass(type(s), six.string_types):
if six.PY3:
if isinstance(s, bytes):
s = six.text_type(s, encoding, errors)
else:
s = six.text_type(s)
elif hasattr(s, '__unicode__'):
s = six.text_type(s)
else:
s = six.text_type(bytes(s), encoding, errors)
else:
# Note: We use .decode() here, instead of six.text_type(s, encoding,
# errors), so that if s is a SafeBytes, it ends up being a
# SafeText at the end.
s = s.decode(encoding, errors)
except UnicodeDecodeError as e:
if not isinstance(s, Exception):
raise ValueError(s, *e.args)
else:
# If we get to here, the caller has passed in an Exception
# subclass populated with non-ASCII bytestring data without a
# working unicode method. Try to handle this without raising a
# further exception by individually forcing the exception args
# to unicode.
s = ' '.join(force_text(arg, encoding, strings_only, errors)
for arg in s)
return s | [
"ashok@msr-it.com"
] | ashok@msr-it.com |
5231b1176e669d8aa95fff862a57be460421e78e | ae2695f60480aa9dbe2acf68309d7918b67c6954 | /alembic/versions/aca6937e73_committee_summary_te.py | d71879fcec60efa04167e13acbb5c7c1a6ddbb46 | [
"MIT"
] | permissive | mgax/mptracker | 0853dd11a7b15bce8d535eb86f65c1e37596a4e6 | e8d3c489aed36c70f81e89626f02e735e5890435 | refs/heads/master | 2023-02-22T03:53:26.481927 | 2020-08-19T09:25:58 | 2020-08-19T09:25:58 | 11,983,896 | 4 | 6 | MIT | 2023-02-02T07:16:26 | 2013-08-08T18:52:45 | Python | UTF-8 | Python | false | false | 278 | py | revision = 'aca6937e73'
down_revision = '58f2cb9046f'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.add_column('committee_summary',
sa.Column('text', sa.Text(), nullable=True))
def downgrade():
op.drop_column('committee_summary', 'text')
| [
"alex@grep.ro"
] | alex@grep.ro |
e4ce7e967120ec413c360cfb38e9419d4965a57c | 5abf069ff84cb7ea465069c258c144460649da35 | /desktop/toolkit/qscintilla2/actions.py | 813c39c4e18bf77a3179ae66180e0b5080e6f9a0 | [] | no_license | poyraz76/Packages-Systemd | 7628cf6f6a8808f8766735551956e3dd8da9a2a9 | a515ea0275dc0d8ec38fb6eaacc85904dde9f286 | refs/heads/master | 2021-01-09T05:51:48.542336 | 2017-02-04T10:25:22 | 2017-02-04T10:25:22 | 80,849,530 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,161 | py | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Licensed under the GNU General Public License, version 3.
# See the file http://www.gnu.org/licenses/gpl.txt
from pisi.actionsapi import autotools
from pisi.actionsapi import pisitools
from pisi.actionsapi import shelltools
from pisi.actionsapi import pythonmodules
from pisi.actionsapi import get
from pisi.actionsapi import qt5
WorkDir = "QScintilla-gpl-%s" % get.srcVERSION()
NoStrip = ["/usr/share/doc"]
def setup():
shelltools.cd("Qt4Qt5")
shelltools.system("qmake qscintilla.pro")
# Change C/XXFLAGS
pisitools.dosed("Makefile", "^CFLAGS.*\\$\\(DEFINES\\)", "CFLAGS = %s -fPIC $(DEFINES)" % get.CFLAGS())
pisitools.dosed("Makefile", "^CXXFLAGS.*\\$\\(DEFINES\\)", "CXXFLAGS = %s -fPIC $(DEFINES)" % get.CXXFLAGS())
# Get designer plugin's Makefile
shelltools.cd("../designer-Qt4Qt5/")
shelltools.system("qmake designer.pro INCLUDEPATH+=../Qt4Qt5 QMAKE_LIBDIR+=../Qt4Qt5")
# Change C/XXFLAGS of designer plugin's makefile
pisitools.dosed("Makefile", "^CFLAGS.*\\$\\(DEFINES\\)", "CFLAGS = %s -fPIC $(DEFINES)" % get.CFLAGS())
pisitools.dosed("Makefile", "^CXXFLAGS.*\\$\\(DEFINES\\)", "CXXFLAGS = %s -fPIC $(DEFINES)" % get.CXXFLAGS())
def build():
shelltools.system("cp -rf Python Python3")
shelltools.cd("Qt4Qt5")
qt5.make()
shelltools.cd("../designer-Qt4Qt5/")
qt5.make()
# Get Makefile of qscintilla-python via sip
shelltools.cd("../Python")
pythonmodules.run("configure.py -n ../Qt4Qt5 -o ../Qt4Qt5 -c --pyqt=PyQt5 --pyqt-sipdir=/usr/share/sip/Py2Qt5 --qsci-sipdir=/usr/share/sip/Py2Qt5 --sip-incdir=/usr/lib/python2.7/site-packages --qmake /usr/bin/qmake")
pisitools.dosed("Makefile", "/usr/include/qt/QtPrintSupport", "/usr/include/qt5/QtPrintSupport")
pisitools.dosed("Makefile", "/usr/include/qt/QtWidgets", "/usr/include/qt5/QtWidgets")
autotools.make()
shelltools.cd("../Python3")
pythonmodules.run("configure.py -n ../Qt4Qt5 -o ../Qt4Qt5 -c --pyqt=PyQt5 --qmake /usr/bin/qmake", pyVer = "3")
pisitools.dosed("Makefile", "/usr/include/qt/QtPrintSupport", "/usr/include/qt5/QtPrintSupport")
pisitools.dosed("Makefile", "/usr/include/qt/QtWidgets", "/usr/include/qt5/QtWidgets")
autotools.make()
def install():
shelltools.cd("Qt4Qt5")
qt5.install("INSTALL_ROOT=%s" % get.installDIR())
shelltools.cd("../designer-Qt4Qt5/")
qt5.install("INSTALL_ROOT=%s" % get.installDIR())
#build and install qscintilla-python
shelltools.cd("../Python3")
#autotools.rawInstall("DESTDIR=%s" % get.installDIR())
qt5.install("INSTALL_ROOT=%s" % get.installDIR())
pisitools.insinto("/usr/lib/python3.4/site-packages/PyQt5", "Qsci.so")
shelltools.cd("../Python")
#autotools.rawInstall("DESTDIR=%s" % get.installDIR())
qt5.install("INSTALL_ROOT=%s" % get.installDIR())
pisitools.insinto("/usr/lib/python2.7/site-packages/PyQt5", "Qsci.so")
shelltools.cd("..")
pisitools.dohtml("doc/html-Qt4Qt5/")
pisitools.insinto("/usr/share/doc/%s/Scintilla" % get.srcNAME(), "doc/Scintilla/*")
pisitools.dodoc("LICENSE*", "NEWS", "README")
| [
"ergunsalman@hotmail.com"
] | ergunsalman@hotmail.com |
47c23f65420d56807a4e91bb39696a8fc37c5d88 | 703fc4b6cce01eee98c0604a275574d2925f92ef | /cpe101/homework4/data.py | 5ce2acc0610ee763b203d6a406dee7e2ecd41633 | [] | no_license | aleksvetushko/C-l-P-ly | f020c4e65cd05fd3d57c133828a2916ab5778f99 | 879a3bd2905613abea4773a371e783a6c35fce91 | refs/heads/master | 2021-01-25T06:24:25.981493 | 2017-06-12T23:26:49 | 2017-06-12T23:26:49 | 93,557,206 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,075 | py | import utility
class Point:
def __eq__(self, other):
if utility.epsilon_equal(self.x, other.x, epsilon=0.00001) and utility.epsilon_equal(self.y, other.y, epsilon=0.00001) and utility.epsilon_equal(self.z, other.z, epsilon=0.00001):
return True
def __init__(self, x, y, z):
self.x = x
self.y = y
self.z = z
class Vector:
def __eq__(self, other):
if utility.epsilon_equal(self.x, other.x, epsilon=0.00001) and utility.epsilon_equal(self.y, other.y, epsilon=0.00001) and utility.epsilon_equal(self.z, other.z, epsilon=0.00001):
return True
def __init__(self, x, y, z):
self.x = x
self.y = y
self.z = z
class Ray:
def __eq__(self, other):
if utility.epsilon_equal(self.pt.x, other.pt.x, epsilon=0.00001) and utility.epsilon_equal(self.pt.y, other.pt.y, epsilon=0.00001) and utility.epsilon_equal(self.pt.z, other.pt.z, epsilon=0.00001) and utility.epsilon_equal(self.dir.x, other.dir.x, epsilon=0.00001) and utility.epsilon_equal(self.dir.y, other.dir.y, epsilon=0.00001) and utility.epsilon_equal(self.dir.z, other.dir.z, epsilon=0.00001):
return True
def __init__(self, pt, dir):
self.pt = pt
self.dir = dir
class Sphere:
def __init__(self, center, radius, color, finish):
self.center = center
self.radius = radius
self.color = color
self.finish = finish
def __eq__(self, other):
center = utility.epsilon_equal(self.center, other.center, epsilon=0.00001)
radius = utility.epsilon_equal(self.radius, other.radius, epsilon=0.00001)
color = utility.epsilon_equal(self.color, other.color, epsilon=0.00001)
finish = utility.epsilon_equal(self.finish, other.finish, epsilon=0.00001)
return center and radius and color and finish
class Color:
def __init__(self, r, g, b):
self.r = r
self.g = g
self.b = b
def __eq__(self, other):
r = utility.epsilon_equal(self.r, other.r, epsilon = 0.00001)
g = utility.epsilon_equal(self.g, other.g, epsilon = 0.00001)
b = utility.epsilon_equal(self.b, other.b, epsilon = 0.00001)
return r and g and b
class Finish:
def __init__(self, ambient, diffuse, specular, roughness):
self.ambient = ambient
self.diffuse = diffuse
self.specular = specular
self.roughness = roughness
def __eq__(self, other):
ambient = utility.epsilon_equal(self.ambient, other.ambient, epsilon = 0.00001)
diffuse = utility.epsilon_equal(self.diffuse, other.diffuse, epsilon = 0.00001)
specular = utility.epsilon_equal(self.specular, other.specular, epsilon = 0.00001)
roughness = utility.epsilon_equal(self.roughness, other.roughness, epsilon = 0.00001)
return ambient and diffuse and specular and roughness
class Light:
def __init__(self, pt, color):
self.pt = pt
self.color = color
def __eq__(self, other):
point = utility.epsilon_equal(self.pt, other.pt, epsilon = 0.00001)
color = utility.epsilon_equal(self.color, other.color, epsilon = 0.00001)
| [
"alexvetushko@icloud.com"
] | alexvetushko@icloud.com |
bcc0cf6b29a4efd04c84ec9f456335e0199607db | d580371108f0b60e675ab61d1b7cea856eab7540 | /MaMomeDashboard/venv/bin/jupyter-trust | 61652ad4053080b2fd18ce9af2b164b704828693 | [] | no_license | lynetteshaw/MaMome_summarize | a2b2419a5b9f6fbbc5d04ff1a221877b15c03964 | dc24b4416bc0b494ce4ad1dcc5dcdbd8dbfb9e76 | refs/heads/master | 2023-01-01T23:02:29.556193 | 2020-10-27T05:07:41 | 2020-10-27T05:07:41 | 300,387,668 | 0 | 2 | null | 2022-12-20T22:39:57 | 2020-10-01T18:43:50 | Python | UTF-8 | Python | false | false | 314 | #!/home/shawla/Documents/GitHub/MaMome_summarize/MaMomeDashboard/venv/bin/python
# -*- coding: utf-8 -*-
import re
import sys
from nbformat.sign import TrustNotebookApp
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(TrustNotebookApp.launch_instance())
| [
"alt.shawla@gmail.com"
] | alt.shawla@gmail.com | |
d7c281e7d4a49c4396ef1c0d09009eb8a3e8b47f | 5cccd576070ec78f254bd3d1dff774a96d21fce5 | /app/user/tests/test_user_api.py | 7db5dff838423e456fc7dfe3a5efdb9d81a1fd3b | [
"MIT"
] | permissive | astepe/recipe-app-api | 14a2461bebf22dd0594668a33369ce6696537765 | 0f277fe7d7c06039faf7838dc24b646b07fbf708 | refs/heads/master | 2020-05-15T14:24:44.275854 | 2019-05-28T16:32:20 | 2019-05-28T16:32:20 | 182,335,673 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,317 | py | from django.test import TestCase
from django.contrib.auth import get_user_model
from django.urls import reverse
# test framework for making requests to our api and check responses
from rest_framework.test import APIClient
# contains status codes in human-readable format
from rest_framework import status
# constant variable for test url
CREATE_USER_URL = reverse('user:create')
TOKEN_URL = reverse('user:token')
ME_URL = reverse('user:me')
# helper function to get the user model
def create_user(**params):
return get_user_model().objects.create_user(**params)
# public APIs are just apis that aren't authenticated
class PublicUsersAPITests(TestCase):
"""Test the users API (public)"""
def setUp(self):
self.client = APIClient()
# all tests will use a new and refreshed instance of the database so
# we don't need to worry about not using the same data in each test
def test_create_valid_user_success(self):
"""Test creating user with valid payload is successful"""
# payload is the object that you pass to the api when you make
# the request
payload = {
'email': 'test@test.com',
'password': 'testpass',
'name': 'Test name',
}
# user should get created and saved to database when request is made
response = self.client.post(CREATE_USER_URL, payload)
# check that the request was successful
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
# check that the user was saved to the database by trying to get it
user = get_user_model().objects.get(**response.data)
# check that the password is correct
self.assertTrue(user.check_password(payload['password']))
# check that the password was not sent with the response. This could be
# a potential security vulnerability
self.assertNotIn('password', response.data)
def test_user_exists(self):
"""Test creating a user that already exists FAILS"""
payload = {'email': 'test@test.com', 'password': 'testpass'}
# create and save user to db
create_user(**payload)
# then when the post request is made it must fail
response = self.client.post(CREATE_USER_URL, payload)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_password_too_short(self):
"""Test that the password must be more than 5 characters"""
payload = {'email': 'test@test.com', 'password': 'pw'}
response = self.client.post(CREATE_USER_URL, payload)
# not only do we check that the server returned a 400, but that the
# user was ALSO NOT saved to database
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
user_exists = get_user_model().objects.filter(
email=payload['email']
).exists()
self.assertFalse(user_exists)
def test_create_token_for_user(self):
"""Test that a token is created for the user"""
payload = {'email': 'ari@test.com', 'password': 'testpass'}
create_user(**payload)
response = self.client.post(TOKEN_URL, payload)
self.assertIn('token', response.data)
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_create_token_invalid_credentials(self):
"""Test that token is not created if invalid credentials are given"""
create_user(email='test@test.com', password='testpass')
payload = {'email': 'test@if invalid credenttest.com',
'password': 'wrong'}
response = self.client.post(TOKEN_URL, payload)
self.assertNotIn('token', response.data)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_create_token_no_user(self):
"""Test that token is not created if user doesn't exist"""
payload = {'email': 'test@test.com', 'password': 'testpass'}
response = self.client.post(TOKEN_URL, payload)
self.assertNotIn('token', response.data)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_create_token_missing_field(self):
"""Test that email and password are required"""
response = self.client.post(TOKEN_URL,
{'email': 'one', 'password': ''})
self.assertNotIn('token', response.data)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_retrieve_user_unauthorized(self):
"""Test that authentication is requried for users"""
response = self.client.get(ME_URL)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
class PrivateUsersAPITests(TestCase):
"""Test API requests that require authentication"""
def setUp(self):
payload = {'email': 'ari@test.com', 'password': 'testpass'}
self.user = create_user(**payload)
self.client = APIClient()
self.client.force_authenticate(user=self.user)
def test_retrieve_profile_success(self):
"""Test retrieving profile for logged in user"""
response = self.client.get(ME_URL)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data, {
'name': self.user.name,
'email': self.user.email,
})
def test_post_me_not_allowed(self):
"""Test that post is not allowed on the me url"""
response = self.client.post(ME_URL, {})
self.assertEqual(response.status_code,
status.HTTP_405_METHOD_NOT_ALLOWED)
def test_update_user_profile(self):
"""Test that the user can update profile for auth user"""
payload = {'name': 'new name', 'password': 'newpass123'}
response = self.client.patch(ME_URL, payload)
# updated user with latest values from database. Basically get the
# updated information from the database and set it to the user object
self.user.refresh_from_db()
# verify updates were made by comparing to payload
self.assertEqual(self.user.name, payload['name'])
self.assertTrue(self.user.check_password(payload['password']))
self.assertEqual(response.status_code, status.HTTP_200_OK)
| [
"arisstepe@gmail.com"
] | arisstepe@gmail.com |
bb0064bac911eeca385697eb7bb59e527553579f | 14522f0116c5b74c115a4193f92976f4bb5c5c09 | /Python/Combinatorial Search/Programming Selection/prog.py | cc2b3e49202bbb0d56feffb27160bae07df6dbd8 | [] | no_license | Lykaos/Programming-Under-Pressure | c7099b0113ee2d9b9cc543eaad6b3bb547a3757b | a205c236567a181dc2e833073afc48c8e826d7aa | refs/heads/master | 2020-06-03T20:55:46.644753 | 2019-06-13T09:22:12 | 2019-06-13T09:22:12 | 191,727,960 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,699 | py | ############################################################
# #
# DD2458 Problem Solving and Programming Under Pressure #
# Hw 9 - Programming Selection #
# Eduardo Rodes Pastor (9406031931) #
# #
############################################################
import sys
from itertools import combinations
def solve(grouped):
if len(dic) == len(grouped):
return "possible"
mini = 15
min_idx = -1
for key in dic.keys():
if key in grouped:
continue
cnt = 0
for value in dic[key]:
if value not in grouped:
cnt += 1
if cnt < 2:
return "impossible"
else:
if (cnt < mini):
mini = cnt
min_idx = key
ways = combinations(dic[min_idx], 2)
for w in ways:
if w[0] in grouped or w[1] in grouped or w[0] not in dic[w[1]]:
continue
new_grouped = set(grouped)
for j in [min_idx, w[0], w[1]]:
new_grouped.add(j)
if solve(new_grouped) == "possible":
return "possible"
return "impossible"
while (True):
dic = dict()
n = int(sys.stdin.readline())
if (n == 0):
break
for i in range(n):
line = sys.stdin.readline().split()
for name in line:
if name not in dic:
dic[name] = []
dic[line[0]].append(line[1])
dic[line[1]].append(line[0])
if (len(dic) % 3 != 0):
print "impossible"
else:
print solve(set()) | [
"dragon11_r@hotmail.com"
] | dragon11_r@hotmail.com |
457511baa39c93fcb58cbf7a167deb248fbf97f0 | 8deef5778d0104682d9e1c25d5ef8fc9a2e63feb | /PyInstaller/hooks/hook-h5py.py | 449c848a3ec3624f6dc9d192bc3d65a5ea3ac1ff | [
"MIT",
"GPL-1.0-or-later",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | Weeeendi/Picture2Text | a2715a9c0f2d4749eab4768dea16f9720567557e | 1919d99327b4360291b111fc8c122fffdce7ccc5 | refs/heads/master | 2022-07-03T06:30:55.664995 | 2022-06-03T13:01:35 | 2022-06-03T13:01:35 | 195,062,567 | 0 | 3 | MIT | 2022-06-03T13:01:36 | 2019-07-03T13:47:27 | Python | UTF-8 | Python | false | false | 535 | py | #-----------------------------------------------------------------------------
# Copyright (c) 2013-2018, PyInstaller Development Team.
#
# Distributed under the terms of the GNU General Public License with exception
# for distributing bootloader.
#
# The full license is in the file COPYING.txt, distributed with this software.
#-----------------------------------------------------------------------------
"""
Hook for http://pypi.python.org/pypi/h5py/
"""
hiddenimports = ['h5py._proxy', 'h5py.utils', 'h5py.defs', 'h5py.h5ac']
| [
"wendi1078771091@gmail.com"
] | wendi1078771091@gmail.com |
8379d136bb7ffb1733dc7ce6e2af8e4a941d0241 | a30591ba0a14c905f28c10a7d64aa2e05ff7ed25 | /howdoi/practice.py | b57e435df9a5e4760ce4eaf4d71bcbd0da0ff507 | [] | no_license | oceanumeric/Reading_Great_Code | 21f97f9a7038349efe80c334a2393e4e369a1a08 | 2e879888ca3d0bcfe46cafea34e3777a34385068 | refs/heads/main | 2023-08-07T20:22:12.204386 | 2021-09-22T16:42:53 | 2021-09-22T16:42:53 | 403,096,132 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,256 | py | import logging
import os
import requests
import sys
from urllib.request import getproxies
from urllib.parse import quote as url_quote
BOLD = '\033[1m'
GREEN = '\033[92m'
RED = '\033[91m'
UNDERLINE = '\033[4m'
END_FORMAT = '\033[0m' # append to string to end text formatting.
USER_AGENTS = ('Mozilla/5.0 (Macintosh; Intel Mac OS X 10.7; rv:11.0) \
Gecko/20100101 Firefox/11.0',
'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:22.0) \
Gecko/20100 101 Firefox/22.0',
'Mozilla/5.0 (Windows NT 6.1; rv:11.0) \
Gecko/20100101 Firefox/11.0',
('Mozilla/5.0 (Macintosh; Intel Mac OS X 10_7_4) \
AppleWebKit/536.5 (KHTML, like Gecko) '
'Chrome/19.0.1084.46 Safari/536.5'),
('Mozilla/5.0 (Windows; Windows NT 6.1) AppleWebKit/536.5 \
(KHTML, like Gecko) Chrome/19.0.1084.46'
'Safari/536.5'),)
DISABLE_SSL = True
if DISABLE_SSL: # Set http instead of https
SCHEME = 'http://'
VERIFY_SSL_CERTIFICATE = False
else:
SCHEME = 'https://'
VERIFY_SSL_CERTIFICATE = True
SEARCH_URLS = {
'bing': SCHEME + 'www.bing.com/search?q=site:{0}%20{1}&hl=en',
'google': SCHEME + 'www.google.com/search?q=site:{0}%20{1}&hl=en',
'duckduckgo': SCHEME + 'duckduckgo.com/html?q=site:{0}%20{1}&t=hj&ia=web'
}
search_session = requests.session()
def _random_int(width):
# width: length of bytes 1bytes = 8 bit = 2^8 = 256
bres = os.urandom(width)
if sys.version < '3':
ires = int(bres.encode('hex'), 16)
else:
ires = int.from_bytes(bres, 'little')
return ires
def _random_choice(seq):
return seq[_random_int(1) % len(seq)]
def get_proxies():
proxies = getproxies()
filtered_proxies = {}
for key, value in proxies.items():
if key.startswith('http'):
if not value.startswith('http'):
filtered_proxies[key] = f'http://{value}'
else:
filtered_proxies[key] = value
return filtered_proxies
def _get_search_url(search_engine):
return SEARCH_URLS.get(search_engine, SEARCH_URLS['google'])
def _get_result(url):
try:
resp = search_session.get(url,
headers={
'User-Agent': _random_choice(USER_AGENTS)
},
proxies=get_proxies(),
verify=VERIFY_SSL_CERTIFICATE,
cookies={'CONSENT': 'YES+US.en+20170717-00-0'}
)
resp.raise_for_status()
return resp.text
except requests.exceptions.SSLError as error:
logging.error(
'%sEncountered an SSL Error. Try using HTTP instead of '
'HTTPS by disabling SSL verification".\n%s', RED, END_FORMAT
)
raise error
if __name__ == '__main__':
URL = 'stackoverflow.com'
query = 'python list comprehension'
search_engine = 'google'
search_url = _get_search_url(search_engine).format(URL, url_quote(query))
print(search_url)
print(url_quote(query)) | [
"numerical.ocean@gmail.com"
] | numerical.ocean@gmail.com |
a46e60ebdf24c5dc1a7a082a563e503deea9c428 | 9977e4a5cb94760b380bd0de0faab9c04a3d94db | /examples/plot_simulation2d.py | 84f7ae9dcd7f6be0d65082772180cb538387b9ec | [] | no_license | vishalbelsare/mtw | 82e76826f1382b9602eadad835a9b6355923505c | e15e918774bb5b1e020c5b87572004a552eb571e | refs/heads/master | 2022-11-28T18:57:55.045921 | 2019-08-07T21:55:16 | 2019-08-07T21:55:16 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,779 | py | """
====================
MTW synthetic images
====================
This example generates 3 synthetic sparse images (as regression coefficients)
which are fed to random gaussian matrices X. Increasing the Wasserstein
hyperparameter increases consistency across regression coefficients.
"""
import numpy as np
import matplotlib.pyplot as plt
from matplotlib import cm
from mtw import MTW, utils
from mtw.examples_utils import (generate_dirac_images, gaussian_design,
contour_coefs)
print(__doc__)
print("Generating data...")
seed = 42
width, n_tasks = 32, 4
nnz = 3 # number of non zero elements per image
overlap = 0.
positive = True
n_features = width ** 2
n_samples = n_features // 2
"""Generate Coefs and X, Y data..."""
coefs = generate_dirac_images(width, n_tasks, nnz=nnz, positive=positive,
seed=seed, overlap=overlap)
coefs_flat = coefs.reshape(-1, n_tasks)
std = 0.25
X, Y = gaussian_design(n_samples, coefs_flat, corr=0.95, sigma=std,
scaled=True, seed=seed)
###############################################################################
# set ot params
epsilon = 2.5 / n_features
M = utils.groundmetric2d(n_features, p=2, normed=True)
gamma = utils.compute_gamma(0.8, M)
###############################################################################
# set hyperparameters and fit MTW
betamax = np.array([x.T.dot(y) for x, y in zip(X, Y)]).max() / n_samples
alpha = 10. / n_samples
beta_fr = 0.35
beta = beta_fr * betamax
callback_options = {'callback': True,
'x_real': coefs.reshape(- 1, n_tasks),
'verbose': True, 'rate': 1}
print("Fitting MTW model...")
mtw = MTW(M=M, alpha=alpha, beta=beta, sigma0=0., positive=positive,
epsilon=epsilon, gamma=gamma, stable=False, tol_ot=1e-6, tol=1e-4,
maxiter_ot=10, maxiter=2000, n_jobs=n_tasks,
gpu=False, **callback_options)
mtw.fit(X, Y)
###############################################################################
# Now we plot the 3 images on top of each other (True), the MTW fitted
# coefficients and their latent Wasserstein barycenter"""
f, axes = plt.subplots(1, 3, figsize=(12, 4))
coefs = coefs.reshape(width, width, -1)
coefs_mtw = mtw.coefs_.reshape(width, width, -1)
thetabar = mtw.barycenter_.reshape(width, width)[:, :, None]
contours = [coefs, coefs_mtw, thetabar]
titles = ["True", "Recovered", "Barycenter"]
cmaps = [cm.Reds, cm.Blues, cm.Greens, cm.Oranges, cm.Greys, cm.Purples]
for ax, data_, t in zip(axes.ravel(), contours, titles):
contour_coefs(data_, ax, cmaps=cmaps, title=t)
axes[-1].clear()
contour_coefs(thetabar, ax=axes[-1], cmaps=cmaps,
title="barycenter Contours")
plt.tight_layout()
plt.show()
| [
"hicham.janati@inria.fr"
] | hicham.janati@inria.fr |
8e75b88201a1a9c29a76c8dbb9c96749e65847cc | 5a52ccea88f90dd4f1acc2819997fce0dd5ffb7d | /alipay/aop/api/response/AlipayOpenAppOpenidBatchqueryResponse.py | 852a2cc617b5cca9294234c0928fbc32c01da61e | [
"Apache-2.0"
] | permissive | alipay/alipay-sdk-python-all | 8bd20882852ffeb70a6e929038bf88ff1d1eff1c | 1fad300587c9e7e099747305ba9077d4cd7afde9 | refs/heads/master | 2023-08-27T21:35:01.778771 | 2023-08-23T07:12:26 | 2023-08-23T07:12:26 | 133,338,689 | 247 | 70 | Apache-2.0 | 2023-04-25T04:54:02 | 2018-05-14T09:40:54 | Python | UTF-8 | Python | false | false | 1,574 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import json
from alipay.aop.api.response.AlipayResponse import AlipayResponse
from alipay.aop.api.domain.OpenIdValue import OpenIdValue
class AlipayOpenAppOpenidBatchqueryResponse(AlipayResponse):
def __init__(self):
super(AlipayOpenAppOpenidBatchqueryResponse, self).__init__()
self._illegal_user_id_list = None
self._open_id_list = None
@property
def illegal_user_id_list(self):
return self._illegal_user_id_list
@illegal_user_id_list.setter
def illegal_user_id_list(self, value):
if isinstance(value, list):
self._illegal_user_id_list = list()
for i in value:
self._illegal_user_id_list.append(i)
@property
def open_id_list(self):
return self._open_id_list
@open_id_list.setter
def open_id_list(self, value):
if isinstance(value, list):
self._open_id_list = list()
for i in value:
if isinstance(i, OpenIdValue):
self._open_id_list.append(i)
else:
self._open_id_list.append(OpenIdValue.from_alipay_dict(i))
def parse_response_content(self, response_content):
response = super(AlipayOpenAppOpenidBatchqueryResponse, self).parse_response_content(response_content)
if 'illegal_user_id_list' in response:
self.illegal_user_id_list = response['illegal_user_id_list']
if 'open_id_list' in response:
self.open_id_list = response['open_id_list']
| [
"jishupei.jsp@alibaba-inc.com"
] | jishupei.jsp@alibaba-inc.com |
0bd8731be622f07deac0afac701ca77b88318204 | 53d85f09655e2e9ebc8df1efe5db3854eb3826c8 | /epcnn/epcnn_alerts.py | e9fe176514a4588dd75ffed720503817e2b982b0 | [] | no_license | LinZou2018/reptile | 403353fec6be1fb2d8063f45dc0061d7a22df170 | 1ca08ef2db164af1413fd1d9f72bba87851c28ef | refs/heads/master | 2020-03-25T06:35:19.665704 | 2018-09-18T08:34:16 | 2018-09-18T08:34:16 | 143,511,803 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,077 | py | import re
import headers
import requests
from lxml import etree
from error_document import mistake
from mongodb_news import storageDatabase, rechecking
def storage(number, title, author, timeout, source, texts, label, classify, statement, img):
dicts = {
"_id": number,
"title": title,
"author": author,
"release_time": timeout,
"source": source,
"main": texts,
"label": label,
"classify": classify,
"statement": statement,
"ing": img,
}
storageDatabase(dicts, come_from="epcnn_alerts")
def download(html, author, number, url):
try:
print("epcnn_alerts")
# 获取标题
title = html.xpath('/html/body/section/div[1]/div/header/h1/a/text()')[0]
# 获取发布时间
timeout = html.xpath('/html/body/section/div[1]/div/header/div/span[1]/text()')[0]
# 文章的分类
classify = html.xpath('/html/body/section/div[1]/div/header/div/span[2]/a/text()')[0]
# 文章的标签
label = html.xpath('/html/body/section/div[1]/div/div[5]/a/text()')
# 本网站此文章的声明
statement = html.xpath('/html/body/section/div[1]/div/div[3]/text()')[0]
# 所引用到的图片
img = html.xpath('/html/body/section/div[1]/div/article/p/img/@src')
# 文章的来源位置
source_location = html.xpath('/html/body/div[2]/div/a/text()')
source = "e能链财经"
for i in source_location:
source += "-" + i
# 文章的正文内容
texts = html.xpath('/html/body/section/div[1]/div/article/p/text()')
storage(number, title, author, timeout, source, texts, label, classify, statement, img)
except Exception as err:
mistake(url, err)
def getUrl(html):
urls = html.xpath('/html/body/section/div[1]/div/article/header/h2/a/@href')
# 获取文章对应的作者
authors = html.xpath('/html/body/section/div[1]/div/article/p[1]/span[1]/text()')
for url, author in zip(urls, authors):
reponse = requests.get(url, headers=headers.header())
reponse.encoding = "utf-8"
# 文章对应的编号
pattern = re.compile('\d+')
number = re.findall(pattern, url)[0]
if rechecking(number, come_from="epcnn_alerts"):
return True
if reponse.status_code == 200:
html = etree.HTML(reponse.text)
download(html, author, number, url)
else:
err = reponse.status_code
mistake(url, err)
def starts():
n = 1
while True:
url = 'https://www.epcnn.com/sg/page/%s' % n
reponse = requests.get(url, headers=headers.header())
reponse.encoding = "utf-8"
if reponse.status_code == 200:
html = etree.HTML(reponse.text)
data = getUrl(html)
if data:
break
n += 1
else:
err = reponse.status_code
mistake(url, err)
break
if __name__ == '__main__':
starts() | [
"Zou_Lin_1341@163.com"
] | Zou_Lin_1341@163.com |
5e39eb8955dc1dd950f8be5ec30e52c4a34ae3a7 | b5e253dfaf85ce5c8d449976785d33dafe379e14 | /python/triangle/triangle.py | a68bbcc33a9a08398467e68cc1c3ec109e5e39f2 | [] | no_license | JaydevSR/exercism.io | c18d41c0bb7b5877a42840c427667023050fcd9c | 5d319d4cdac2265c0423f1170887058dfdd0422a | refs/heads/master | 2023-05-29T08:00:17.420386 | 2021-06-06T06:26:34 | 2021-06-06T06:26:34 | 274,627,632 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 399 | py | def equilateral(sides):
return congruity(sides) == 1
def isosceles(sides):
return congruity(sides) in (1,2)
def scalene(sides):
return congruity(sides) == 3
# Helper
def congruity(sides:list):
sides.sort()
side_set = set(sides)
is_valid = sides[0] + sides[1] > sides[2]
if len(side_set) in (1,2,3) and is_valid:
return len(side_set)
else:
return 0
| [
"jaydevsr7701@gmail.com"
] | jaydevsr7701@gmail.com |
3cab6450b02dc38ade17099a750607d0f894bdac | f856ad2e96263a38a6c717eca995f8a3f66b3f2f | /tools/blade/src/blade/configparse.py | 990831306a2c5afb20c271405663fa07f004250c | [
"BSD-3-Clause",
"LicenseRef-scancode-unknown-license-reference",
"BSL-1.0",
"Apache-2.0",
"BSD-2-Clause",
"MIT"
] | permissive | xiake-1024/Pebble | befaee5868905fb804c50d895e80d3489d464200 | 283310f67f5b30adaed5a21df97f706560b3617f | refs/heads/master | 2022-12-09T14:24:51.785830 | 2020-09-26T03:05:05 | 2020-09-26T03:05:05 | 296,327,254 | 0 | 0 | NOASSERTION | 2020-09-17T13:00:05 | 2020-09-17T13:00:04 | null | UTF-8 | Python | false | false | 8,253 | py | """
This is the configuration parse module which parses
the BLADE_ROOT as a configuration file.
"""
import os
import sys
import traceback
import console
from blade_util import var_to_list
from cc_targets import HEAP_CHECK_VALUES
# Global config object
blade_config = None
def config_items(**kwargs):
"""Used in config functions for config file, to construct a appended
items dict, and then make syntax more pretty
"""
return kwargs
class BladeConfig(object):
"""BladeConfig. A configuration parser class. """
def __init__(self, current_source_dir):
self.current_source_dir = current_source_dir
self.current_file_name = ''
self.configs = {
'global_config' : {
'build_path_template': 'build${m}_${profile}',
'duplicated_source_action': 'warning', # Can be 'warning', 'error', 'none'
},
'cc_test_config': {
'dynamic_link': False,
'heap_check': '',
'gperftools_libs': [],
'gperftools_debug_libs': [],
'gtest_libs': [],
'gtest_main_libs': []
},
'cc_binary_config': {
'extra_libs': []
},
'distcc_config': {
'enabled': False
},
'link_config': {
'link_on_tmp': False,
'enable_dccc': False
},
'java_config': {
'source_version': '',
'target_version': ''
},
'thrift_config': {
'thrift': 'thrift',
'thrift_libs': [],
'thrift_incs': [],
},
'fbthrift_config': {
'fbthrift1': 'thrift1',
'fbthrift2': 'thrift2',
'fbthrift_libs': [],
'fbthrift_incs': [],
},
'proto_library_config': {
'protoc': 'thirdparty/protobuf/bin/protoc',
'protobuf_libs': [],
'protobuf_path': '',
'protobuf_incs': [],
'protobuf_php_path': '',
'protoc_php_plugin': '',
},
'cc_config': {
'extra_incs': [],
'cppflags': [],
'cflags': [],
'cxxflags': [],
'linkflags': [],
'c_warnings': [],
'cxx_warnings': [],
'warnings': [],
'cpplint': 'cpplint.py',
'optimize': [],
'benchmark_libs': [],
'benchmark_main_libs': [],
}
}
def _try_parse_file(self, filename):
"""load the configuration file and parse. """
try:
self.current_file_name = filename
if os.path.exists(filename):
execfile(filename)
except SystemExit:
console.error_exit('Parse error in config file %s, exit...' % filename)
def parse(self):
"""load the configuration file and parse. """
self._try_parse_file(os.path.join(os.path.dirname(sys.argv[0]), 'blade.conf'))
self._try_parse_file(os.path.expanduser('~/.bladerc'))
self._try_parse_file(os.path.join(self.current_source_dir, 'BLADE_ROOT'))
def update_config(self, section_name, append, user_config):
"""update config section by name. """
config = self.configs.get(section_name, {})
if config:
if append:
self._append_config(section_name, config, append)
self._replace_config(section_name, config, user_config)
else:
console.error('%s: %s: unknown config section name' % (
self.current_file_name, section_name))
def _append_config(self, section_name, config, append):
"""Append config section items"""
if not isinstance(append, dict):
console.error('%s: %s: append must be a dict' %
(self.current_file_name, section_name))
else:
for k in append:
if k in config:
if isinstance(config[k], list):
config[k] += var_to_list(append[k])
else:
console.warning('%s: %s: config item %s is not a list' %
(self.current_file_name, section_name, k))
else:
console.warning('%s: %s: unknown config item name: %s' %
(self.current_file_name, section_name, k))
def _replace_config(self, section_name, config, user_config):
"""Replace config section items"""
for k in user_config:
if k in config:
if isinstance(config[k], list):
user_config[k] = var_to_list(user_config[k])
else:
user_config[k] = user_config[k]
else:
console.warning('%s: %s: unknown config item name: %s' %
(self.current_file_name, section_name, k))
del user_config[k]
config.update(user_config)
def get_config(self, section_name):
"""get config section, returns default values if not set """
return self.configs.get(section_name, {})
def cc_test_config(append=None, **kwargs):
"""cc_test_config section. """
heap_check = kwargs.get('heap_check')
if heap_check is not None and heap_check not in HEAP_CHECK_VALUES:
console.error_exit('cc_test_config: heap_check can only be in %s' %
HEAP_CHECK_VALUES)
blade_config.update_config('cc_test_config', append, kwargs)
def cc_binary_config(append=None, **kwargs):
"""cc_binary_config section. """
blade_config.update_config('cc_binary_config', append, kwargs)
__DUPLICATED_SOURCE_ACTION_VALUES = set(['warning', 'error', 'none', None])
def global_config(append=None, **kwargs):
"""global_config section. """
duplicated_source_action = kwargs.get('duplicated_source_action')
if duplicated_source_action not in __DUPLICATED_SOURCE_ACTION_VALUES:
console.error_exit('Invalid global_config.duplicated_source_action '
'value, can only be in %s' % __DUPLICATED_SOURCE_ACTION_VALUES)
blade_config.update_config('global_config', append, kwargs)
def distcc_config(append=None, **kwargs):
"""distcc_config. """
blade_config.update_config('distcc_config', append, kwargs)
def link_config(append=None, **kwargs):
"""link_config. """
blade_config.update_config('link_config', append, kwargs)
def java_config(append=None, **kwargs):
"""java_config. """
blade_config.update_config('java_config', append, kwargs)
def proto_library_config(append=None, **kwargs):
"""protoc config. """
path = kwargs.get('protobuf_include_path')
if path:
console.warning(('%s: proto_library_config: protobuf_include_path has '
'been renamed to protobuf_incs, and become a list') %
blade_config.current_file_name)
del kwargs['protobuf_include_path']
if isinstance(path, basestring) and ' ' in path:
kwargs['protobuf_incs'] = path.split()
else:
kwargs['protobuf_incs'] = [path]
blade_config.update_config('proto_library_config', append, kwargs)
def thrift_library_config(append=None, **kwargs):
"""thrift config. """
blade_config.update_config('thrift_config', append, kwargs)
def fbthrift_library_config(append=None, **kwargs):
"""fbthrift config. """
blade_config.update_config('fbthrift_config', append, kwargs)
def cc_config(append=None, **kwargs):
"""extra cc config, like extra cpp include path splited by space. """
if 'extra_incs' in kwargs:
extra_incs = kwargs['extra_incs']
if isinstance(extra_incs, basestring) and ' ' in extra_incs:
console.warning('%s: cc_config: extra_incs has been changed to list' %
blade_config.current_file_name)
kwargs['extra_incs'] = extra_incs.split()
blade_config.update_config('cc_config', append, kwargs)
| [
"chexiongsheng@qq.com"
] | chexiongsheng@qq.com |
578c603c700507c4432334a4b884cf285d91ef14 | c956a50ecbfa9afa4ece21384c499bef0f95d503 | /VideoInfo/asgi.py | f50a42af787471e61d9829cd5d6020adf62928bc | [] | no_license | RenZihou/VideoInfo | 4bf3460f9f91676e9cdf1cab8924ae27c6d0faed | 6a3272108f459554b2c7c514506ed26d43c4d04b | refs/heads/main | 2023-07-25T18:21:31.841003 | 2021-09-07T08:02:21 | 2021-09-07T08:02:21 | 401,346,688 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 395 | py | """
ASGI config for VideoInfo project.
It exposes the ASGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.2/howto/deployment/asgi/
"""
import os
from django.core.asgi import get_asgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'VideoInfo.settings')
application = get_asgi_application()
| [
"renzihou.bj@gmail.com"
] | renzihou.bj@gmail.com |
e88e9f76f48b5d5cb02126cb89512c89157825a7 | c515560b515019d984dca3dfb65e292d0741369e | /apprest/migrations/0001_auto_20190123_1156.py | 6aef802cfedc1e494c05f4c301d5d6a1297fbf80 | [
"MIT"
] | permissive | Calipsoplus/calipsoplus-backend | 37ec443b9947b6e496434efca7ecbb29bfc43b53 | 3f6b034f16668bc154b0f4b759ed62b055f41647 | refs/heads/develop | 2022-12-11T23:29:53.143566 | 2020-09-09T09:13:09 | 2020-09-09T09:13:09 | 153,811,662 | 4 | 10 | MIT | 2022-12-08T02:26:16 | 2018-10-19T16:27:31 | Python | UTF-8 | Python | false | false | 546 | py | # Generated by Django 2.0.2 on 2019-01-23 11:56
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('apprest', '0001_initial'),
]
operations = [
migrations.RenameField(
model_name='calipsoexperiment',
old_name='serial_number',
new_name='proposal_id',
),
migrations.RenameField(
model_name='historicalcalipsoexperiment',
old_name='serial_number',
new_name='proposal_id',
),
]
| [
"acampsm@cells.es"
] | acampsm@cells.es |
18e94dc2803689c51398a27a1ee6c5b57c7d566c | 9ee54d71d4648b10c9c66d53a29a891c4a035390 | /src/portfolio/blog/apps.py | 3f45c6e25f55ab7deb7424494a70d91f32cc364a | [] | no_license | saurav389/portfolio | 4913625ea734b01fc6cbc15ccc6c482a0750ed9d | 5d9e2484155335519af1d8c85734354d4530f15a | refs/heads/master | 2023-06-20T20:55:42.905580 | 2021-07-21T16:41:11 | 2021-07-21T16:41:11 | 385,015,969 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 194 | py | from django.apps import AppConfig
class BlogConfig(AppConfig):
default_auto_field = 'django.db.models.BigAutoField'
name = 'blog'
# def ready(self):
# import users.signals
| [
"sauravkumar5star@gmail.com"
] | sauravkumar5star@gmail.com |
f5a4640726561393b9f539c6d0b0824e4756d89f | 2e152e7b1da721ffb9c001db805455f78bcdbd3e | /code/删除链表的倒数第N个节点.py | 7a584c840a028df016aba31a946ccf3c3852cf59 | [] | no_license | fxyan/data-structure | ecb092f17464e4e272035358c8016528f4430374 | aa2088aac635625cd61b7250387dedf584826257 | refs/heads/master | 2020-03-27T03:20:32.571375 | 2019-05-06T07:14:35 | 2019-05-06T07:14:35 | 145,855,158 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 922 | py | """
给定一个链表,删除链表的倒数第 n 个节点,并且返回链表的头结点。
解法 设置一个key,遍历链表每次k-1如果k>0那么说明没有要删除的节点
如果k <= 0 那么从新遍历链表每次k+1找到k=0的节点就是要删除的节点
"""
# Definition for singly-linked list.
# class ListNode:
# def __init__(self, x):
# self.val = x
# self.next = None
class Solution:
def removeNthFromEnd(self, head, n: int):
if head is None:
return head
k = n
head_1 = head
res = head
while head_1:
head_1 = head_1.next
k -= 1
if k > 0:
return res
if k == 0:
return res.next
while head:
k += 1
if k == 0:
head.next = head.next.next
break
head = head.next
return res | [
"1350821504@qq.com"
] | 1350821504@qq.com |
1a310d12e5d6aca3b58eccea94976393c70dcc33 | 836d5f7190f6b4503e758c87c71598f18fdfce14 | /2-Veri-Tipleri-ve-Değişkenler/Float-Veri-Tipi.py | e16df4bfbc907194105af1f6be9ca54d54325f37 | [] | no_license | S-Oktay-Bicici/PYTHON-PROGRAMMING | cf452723fd3e7e8ec2aadc7980208d747c502e9a | 22e864f89544249d6309d6f4570a4104bf47346b | refs/heads/main | 2021-11-30T00:19:21.158084 | 2021-11-16T15:44:29 | 2021-11-16T15:44:29 | 316,716,147 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 461 | py | a = 3.5
print(a)
print(type(a))
b = 2.1
print(b)
print(type(b))
d = 3
print(d)
print(type(d))
z = 3.0
print(z)
print(type(z))
t = 10/5
print(t)
print(type(t))
# // işlemi ile float çıkacak sonucu integer hale getiriyoruz
t = 10//5
print(t)
print(type(t))
#işlem gören sayılardan biri float ise sonuç da float olur
t =10.2//5
print(t)
print(type(t))
#işlem gören sayılardan biri float ise sonuç da float olur
t =10//5.2
print(t)
print(type(t))
| [
"noreply@github.com"
] | noreply@github.com |
db5be71bb1edca1717adeff59eaa6b2e222facf3 | 0588f17dc28db5a32d5b6a502693d732bb2f962d | /Recursion/turtles_first.py | 36d2bc40f975da2ceed13c54d648a5d647b7167e | [] | no_license | drsjb80/CS2inPython | 012a9e6f939480b0670c7be54d6c7efcad0539b7 | 8240d81b8b6c3e6517006545b544e2e42d52ec76 | refs/heads/master | 2020-04-06T21:57:13.872523 | 2018-06-30T00:10:30 | 2018-06-30T00:10:30 | 50,789,214 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 237 | py | from __future__ import print_function
def how_many_turtles(level):
if 1 == level: return 4
hmt = how_many_turtles(level-1)
return hmt * 4
print (how_many_turtles(1))
print (how_many_turtles(2))
print (how_many_turtles(10))
| [
"drsjb80@gmail.com"
] | drsjb80@gmail.com |
dcba46ce4d521117daba7969c01cc4d44dbe4026 | b7a0ae80f296f0a337014e6f5e345807315fe4fc | /auction/migrations/0004_auto_20180315_1715.py | cf3e4d0c5f211bac5c123cc105b5329ae988a8c0 | [] | no_license | RODJER200586/auction | 3f3b698c6e166c0d92e49af0add62f892482c44f | 1c838eaac21e6412f33881f43a5416d75bd4a9f1 | refs/heads/master | 2022-12-13T01:29:48.173803 | 2018-03-18T06:54:47 | 2018-03-18T06:54:47 | 123,966,908 | 0 | 0 | null | 2022-12-08T00:55:25 | 2018-03-05T19:18:22 | HTML | UTF-8 | Python | false | false | 2,329 | py | # Generated by Django 2.0.2 on 2018-03-15 17:15
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('auction', '0003_auto_20180314_1053'),
]
operations = [
migrations.AlterModelOptions(
name='lot',
options={'verbose_name': 'Лот', 'verbose_name_plural': 'Лоты'},
),
migrations.AlterField(
model_name='lot',
name='description',
field=models.TextField(verbose_name='Описание'),
),
migrations.AlterField(
model_name='lot',
name='finish',
field=models.DateTimeField(verbose_name='Окончание торгов в'),
),
migrations.AlterField(
model_name='lot',
name='image',
field=models.ImageField(upload_to='lots/%Y/%m/%d/', verbose_name='Изображение'),
),
migrations.AlterField(
model_name='lot',
name='inserted_at',
field=models.DateTimeField(auto_now_add=True, verbose_name='Добавлено в'),
),
migrations.AlterField(
model_name='lot',
name='name',
field=models.CharField(max_length=500, verbose_name='Название'),
),
migrations.AlterField(
model_name='lot',
name='start',
field=models.DateTimeField(verbose_name='Начало торгов в'),
),
migrations.AlterField(
model_name='lot',
name='status',
field=models.CharField(choices=[('opened', 'Opened'), ('closed', 'Closed'), ('canceled', 'Canceled')], default='opened', max_length=8, verbose_name='Статус'),
),
migrations.AlterField(
model_name='lot',
name='updated_at',
field=models.DateTimeField(auto_now=True, verbose_name='Обновлено в'),
),
migrations.AlterField(
model_name='lot',
name='winner',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL, verbose_name='Победитель'),
),
]
| [
"rodjer200586@gmail.com"
] | rodjer200586@gmail.com |
c7719ed1ad900b90c84ac39a07805bc5a3df9dd5 | f89ae5f9815ef65e10231a22b15594e50ba4a68f | /venv/bin/pip3.7 | e5e2534a0743af0a91725cbf1dac3c7ea7e6a458 | [] | no_license | shilpahosur25/Weather_App | 92acfd9e11042241516e7b14b8cc835ecb183b64 | 15a8ca5a77a3b52ca4afe7c2aad3191faaeb0f54 | refs/heads/master | 2022-12-17T16:56:09.560363 | 2019-06-16T20:39:22 | 2019-06-16T20:39:22 | 192,234,462 | 0 | 0 | null | 2022-12-08T05:16:27 | 2019-06-16T20:34:49 | Python | UTF-8 | Python | false | false | 410 | 7 | #!/Users/shima/PycharmProjects/sh_weather/venv/bin/python
# EASY-INSTALL-ENTRY-SCRIPT: 'pip==19.0.3','console_scripts','pip3.7'
__requires__ = 'pip==19.0.3'
import re
import sys
from pkg_resources import load_entry_point
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(
load_entry_point('pip==19.0.3', 'console_scripts', 'pip3.7')()
)
| [
"shilpahosur25@gmail.com"
] | shilpahosur25@gmail.com |
6fa613cc5ab6c61f64fdbdea15ce0918990aa03f | 20007cbd0280251d21f70bf1f0edbb776a81b438 | /starbucks/starbucks/settings.py | 2e3bd9b0c24748d00307ca3437d6b77b82030260 | [
"MIT"
] | permissive | Zhaominxin/MyProject | c2af53449efdac9343850635780a7cf4731b0d13 | cb0156a3379724a8d3ed8d6d742fd6969250bf39 | refs/heads/master | 2021-01-01T19:05:54.307040 | 2018-07-17T14:14:25 | 2018-07-17T14:14:25 | 98,508,443 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,158 | py | # -*- coding: utf-8 -*-
# Scrapy settings for starbucks project
#
# For simplicity, this file contains only settings considered important or
# commonly used. You can find more settings consulting the documentation:
#
# http://doc.scrapy.org/en/latest/topics/settings.html
# http://scrapy.readthedocs.org/en/latest/topics/downloader-middleware.html
# http://scrapy.readthedocs.org/en/latest/topics/spider-middleware.html
BOT_NAME = 'starbucks'
SPIDER_MODULES = ['starbucks.spiders']
NEWSPIDER_MODULE = 'starbucks.spiders'
# Crawl responsibly by identifying yourself (and your website) on the user-agent
#USER_AGENT = 'starbucks (+http://www.yourdomain.com)'
# Obey robots.txt rules
ROBOTSTXT_OBEY = True
# Configure maximum concurrent requests performed by Scrapy (default: 16)
#CONCURRENT_REQUESTS = 32
# Configure a delay for requests for the same website (default: 0)
# See http://scrapy.readthedocs.org/en/latest/topics/settings.html#download-delay
# See also autothrottle settings and docs
#DOWNLOAD_DELAY = 3
# The download delay setting will honor only one of:
#CONCURRENT_REQUESTS_PER_DOMAIN = 16
#CONCURRENT_REQUESTS_PER_IP = 16
# Disable cookies (enabled by default)
#COOKIES_ENABLED = False
# Disable Telnet Console (enabled by default)
#TELNETCONSOLE_ENABLED = False
# Override the default request headers:
#DEFAULT_REQUEST_HEADERS = {
# 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
# 'Accept-Language': 'en',
#}
# Enable or disable spider middlewares
# See http://scrapy.readthedocs.org/en/latest/topics/spider-middleware.html
#SPIDER_MIDDLEWARES = {
# 'starbucks.middlewares.StarbucksSpiderMiddleware': 543,
#}
# Enable or disable downloader middlewares
# See http://scrapy.readthedocs.org/en/latest/topics/downloader-middleware.html
#DOWNLOADER_MIDDLEWARES = {
# 'starbucks.middlewares.MyCustomDownloaderMiddleware': 543,
#}
# Enable or disable extensions
# See http://scrapy.readthedocs.org/en/latest/topics/extensions.html
#EXTENSIONS = {
# 'scrapy.extensions.telnet.TelnetConsole': None,
#}
# Configure item pipelines
# See http://scrapy.readthedocs.org/en/latest/topics/item-pipeline.html
#ITEM_PIPELINES = {
# 'starbucks.pipelines.StarbucksPipeline': 300,
#}
# Enable and configure the AutoThrottle extension (disabled by default)
# See http://doc.scrapy.org/en/latest/topics/autothrottle.html
#AUTOTHROTTLE_ENABLED = True
# The initial download delay
#AUTOTHROTTLE_START_DELAY = 5
# The maximum download delay to be set in case of high latencies
#AUTOTHROTTLE_MAX_DELAY = 60
# The average number of requests Scrapy should be sending in parallel to
# each remote server
#AUTOTHROTTLE_TARGET_CONCURRENCY = 1.0
# Enable showing throttling stats for every response received:
#AUTOTHROTTLE_DEBUG = False
# Enable and configure HTTP caching (disabled by default)
# See http://scrapy.readthedocs.org/en/latest/topics/downloader-middleware.html#httpcache-middleware-settings
#HTTPCACHE_ENABLED = True
#HTTPCACHE_EXPIRATION_SECS = 0
#HTTPCACHE_DIR = 'httpcache'
#HTTPCACHE_IGNORE_HTTP_CODES = []
#HTTPCACHE_STORAGE = 'scrapy.extensions.httpcache.FilesystemCacheStorage'
| [
"343428523@qq.com"
] | 343428523@qq.com |
f67d4b03a9a181a346b2a8fdaaed22e022001e21 | 107b555b0021b300975053eb630458763ae463cc | /Dietmanager/settings.py | ca89016b7da73883f3a0254ba852fd983f7302eb | [] | no_license | GTytoniak/dietmanager | 096f11322160484ea5c7b235b7176f92675329d2 | a09a1916c1a8f0b248dbee86153d9f6ba5a502d1 | refs/heads/master | 2021-01-15T19:59:53.716794 | 2017-08-09T17:50:28 | 2017-08-09T17:50:28 | 99,837,283 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,280 | py | """
Django settings for Dietmanager project.
Generated by 'django-admin startproject' using Django 1.11.2.
For more information on this file, see
https://docs.djangoproject.com/en/1.11/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.11/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.11/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '6xns8tqum^@wf-$9x-&h2=ko_ywf0ezan5bc81ujdeqyy=be_w'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
#my apps
'dietmanager'
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'Dietmanager.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'Dietmanager.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.11/ref/settings/#databases
DATABASES = {
'default': {
'HOST': '127.0.0.1',
'NAME': 'dietmanager',
'ENGINE': 'django.db.backends.mysql',
'USER': 'root',
'PASSWORD': 'coderslab',
'OPTIONS': {
'autocommit': True,
},
}
}
# Password validation
# https://docs.djangoproject.com/en/1.11/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.11/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'Europe/Warsaw'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.11/howto/static-files/
STATIC_URL = '/static/'
| [
"tyto@poczta.onet.pl"
] | tyto@poczta.onet.pl |
058fc6c307680b8132797732d1c2935f208e2cff | 71e43068e82c91acbb3849169d1723f1375ac27f | /test/test_login_params.py | c5ace7940a02f5cf97942e6a18680162679dbcb8 | [
"MIT"
] | permissive | talon-one/talon_one.py | aa08a1dbddd8ea324846ae022e43d441c57028f6 | 917dffb010e3d3e2f841be9cccba5bba1ea6c5c3 | refs/heads/master | 2023-05-11T18:50:00.041890 | 2023-05-03T20:17:39 | 2023-05-03T20:17:39 | 79,575,913 | 1 | 7 | MIT | 2023-05-03T15:10:14 | 2017-01-20T16:29:46 | Python | UTF-8 | Python | false | false | 2,115 | py | # coding: utf-8
"""
Talon.One API
Use the Talon.One API to integrate with your application and to manage applications and campaigns: - Use the operations in the [Integration API section](#integration-api) are used to integrate with our platform - Use the operation in the [Management API section](#management-api) to manage applications and campaigns. ## Determining the base URL of the endpoints The API is available at the same hostname as your Campaign Manager deployment. For example, if you access the Campaign Manager at `https://yourbaseurl.talon.one/`, the URL for the [updateCustomerSessionV2](https://docs.talon.one/integration-api#operation/updateCustomerSessionV2) endpoint is `https://yourbaseurl.talon.one/v2/customer_sessions/{Id}` # noqa: E501
The version of the OpenAPI document:
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import unittest
import datetime
import talon_one
from talon_one.models.login_params import LoginParams # noqa: E501
from talon_one.rest import ApiException
class TestLoginParams(unittest.TestCase):
"""LoginParams unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def make_instance(self, include_optional):
"""Test LoginParams
include_option is a boolean, when False only required
params are included, when True both required and
optional params are included """
# model = talon_one.models.login_params.LoginParams() # noqa: E501
if include_optional :
return LoginParams(
email = 'john.doe@example.com',
password = 'admin123456'
)
else :
return LoginParams(
email = 'john.doe@example.com',
password = 'admin123456',
)
def testLoginParams(self):
"""Test LoginParams"""
inst_req_only = self.make_instance(include_optional=False)
inst_req_and_optional = self.make_instance(include_optional=True)
if __name__ == '__main__':
unittest.main()
| [
"noreply@github.com"
] | noreply@github.com |
d3f234039f84b458010466dbe11c5863b26dc687 | f307f05c0c96222073942a7ea97698d933a68677 | /app/utils/conditions/operation_mode.py | a0626a0ab70ff4a17a2ce2c45d146dfbdc0a31e0 | [
"MIT"
] | permissive | travelteker/flask_api | 758996710f7eb39e41f4efdc8f9f4d99bbf24bce | 7bd91e62937aee3c555993491aad96a619ef1bbb | refs/heads/master | 2023-04-23T18:02:54.173129 | 2021-05-10T15:14:06 | 2021-05-10T15:14:06 | 365,323,782 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 417 | py | from os import getenv
from sys import exit
class OperationMode:
@staticmethod
def trigger_action():
"""Method to validate exits value MODE_DEFAULT before start application Flask
:return: None
"""
mode = getenv('ENV')
if mode not in getenv("WHITE_LIST_MODE").split(','):
exit('Aborting execution, please contact with support showing this code <0x00001>')
| [
"devjpozo@gmail.com"
] | devjpozo@gmail.com |
2ac98ecb6790ac1efcbd27dfc11b18c994694e7c | ee58ff8dd90b17a044d5d4d60550bd688942df0f | /webapp/controllers/rest/parsers.py | 0270c1f2a9445f924eb39ed45e9fa09cc1a3c12e | [] | no_license | assqingt/Flaskblog | 49294fbcb34eb01dc927ec9a5199750a74fa83ac | 7dda5dc0ffc9fdac52f5f2ca8826ababec7763bb | refs/heads/master | 2021-06-14T02:30:15.186678 | 2017-02-18T01:02:15 | 2017-02-18T01:02:15 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,428 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from flask_restful import reqparse
post_get_parser = reqparse.RequestParser()
post_get_parser.add_argument(
'page',
type=int,
location=['json','args','headers'],
required = False
)
post_get_parser.add_argument(
'user',
type=str,
location=['json','args','headers']
)
post_post_parser = reqparse.RequestParser()
post_post_parser.add_argument(
'title',
type=str,
required=True,
help='title is required'
)
post_post_parser.add_argument(
'text',
type=str,
required=True,
help='body text is required'
)
post_post_parser.add_argument(
'tags',
type=str,
action='append'
)
post_post_parser.add_argument(
'token',
type=str,
required=True,
help='Auth token is required to create posts'
)
user_post_parser = reqparse.RequestParser()
user_post_parser.add_argument(
'username',
type=str,
required=True
)
user_post_parser.add_argument(
'password',
type=str,
required=True
)
post_put_parser = reqparse.RequestParser()
post_put_parser.add_argument(
'token',
type=str,
required=True,
help='Auth token is required to edit post'
)
post_put_parser.add_argument(
'title',
type=str
)
post_put_parser.add_argument(
'text',
type=str
)
post_put_parser.add_argument(
'tags',
type=str,
action='append'
)
post_delete_parser = reqparse.RequestParser()
post_delete_parser.add_argument('token',type=str,required=True,help='Auth token is required to delete post') | [
"assqingt@gmail.com"
] | assqingt@gmail.com |
b16e2e9a66be9969b417d12be51b37b00ed3b38c | 6cc50a15672155f7d66e88830ad1baec6a061077 | /processing/legacy/icetop_llhratio/python/globals.py | 515a24a58ef19c6d8f525f718693f26293ddc978 | [
"MIT"
] | permissive | jrbourbeau/cr-composition | 16b29c672b2d1c8d75c1c45e35fe6bb60b53ffe2 | e9efb4b713492aaf544b5dd8bb67280d4f108056 | refs/heads/master | 2020-06-24T21:48:21.784277 | 2018-11-01T21:30:56 | 2018-11-01T21:30:56 | 74,618,907 | 0 | 1 | MIT | 2018-08-23T21:01:03 | 2016-11-23T22:31:01 | Jupyter Notebook | UTF-8 | Python | false | false | 3,420 | py |
# -*- coding: utf-8 -*-
#
## copyright (C) 2018
# The Icecube Collaboration
#
# $Id$
#
# @version $Revision$
# @date $LastChangedDate$
# @author Hershal Pandya <hershal@udel.edu> Last changed by: $LastChangedBy$
#
import numpy as np
logEnergyBins = np.linspace(3,8,26)
logEnergyBins=np.array([logEnergyBins[i] for i in range(len(logEnergyBins)) if i%2==0],dtype=float)
cosZenBin0 = 0.86
cosZenBins = np.linspace(cosZenBin0, 1.0+ np.finfo(float).eps , (1-cosZenBin0)/0.01+1)
cosZenBins=np.array([cosZenBins[i] for i in range(len(cosZenBins)) if i%2==0],dtype=float)
logChargeBins = np.linspace(-3,4,71)
deltaCharge = 0.1
unhitCharge = logChargeBins[0]-0.5*deltaCharge
logChargeBins = np.hstack([unhitCharge-0.5*deltaCharge, logChargeBins])
excludedCharge = logChargeBins[0]-0.5*deltaCharge
logChargeBins = np.hstack([excludedCharge-0.5*deltaCharge, logChargeBins])
deltaT = 0.1
nBins = 5.0/deltaT
tBinsUp = np.linspace(0,5,nBins+1)
tBinsDown = -1.0*tBinsUp
tBinsDown.sort()
logTBins = np.hstack([tBinsDown[0:-1],tBinsUp])
unhitTime = logTBins[0]-0.5*deltaT
logTBins = np.hstack([unhitTime-0.5*deltaT, logTBins])
excludedTime = logTBins[0]-0.5*deltaT
logTBins = np.hstack([excludedTime-0.5*deltaT, logTBins])
logDBins = np.linspace(0,3.5,36)
pulses1='Shield_HLCSLCTimeCorrectedTankMerged_SplineMPEfast_SRT_Split_InIcePulses_singleHits'
pulses2='Shield_HLCSLCTimeCorrectedTankMerged_SplineMPEfast_SRT_Split_InIcePulses_singleHits_UnHit'
pulses3='IceTopExcludedTanks'
reco_track2='SplineMPEfast_SRT_Split_InIcePulses'
reco_track1='MuEx_mie_SplineMPEfast_SRT_Split_InIcePulses'
def rotate_to_shower_cs(x,y,z,phi,theta,core_x,core_y,core_z):
"""
Rotate to shower CS takes a fit (assumes is set) and returns a rotation matrix.
Requires np.
"""
# counter-clockwise (pi + phi) rotation
d_phi = np.matrix([ [ -np.cos(phi), -np.sin(phi), 0],
[ np.sin(phi), -np.cos(phi), 0],
[ 0, 0, 1] ])
# clock-wise (pi - theta) rotation
d_theta = np.matrix([ [ -np.cos(theta), 0, -np.sin(theta)],
[ 0, 1, 0, ],
[ np.sin(theta), 0, -np.cos(theta)] ])
rotation=d_theta*d_phi
origin = np.array([[core_x], [core_y], [core_z]])
det_cs_position = np.array([[x],
[y],
[z]])
shower_cs_position = rotation*(det_cs_position - origin)
shower_cs_radius = np.sqrt(shower_cs_position[0]**2 + shower_cs_position[1]**2)
return np.float(shower_cs_radius)
def to_shower_cs(fit):
"""
Rotate to shower CS takes a fit (assumes fit.dir is set) and returns a rotation matrix.
Requires numpy.
"""
import numpy
from math import cos, sin
# counter-clockwise (pi + phi) rotation
d_phi = numpy.matrix([ [ -cos(fit.dir.phi), -sin(fit.dir.phi), 0],
[ sin(fit.dir.phi), -cos(fit.dir.phi), 0],
[ 0, 0, 1] ])
# clock-wise (pi - theta) rotation
d_theta = numpy.matrix([ [ -cos(fit.dir.theta), 0, -sin(fit.dir.theta)],
[ 0, 1, 0, ],
[ sin(fit.dir.theta), 0, -cos(fit.dir.theta)] ])
return d_theta*d_phi
| [
"jrbourbeau@gmail.com"
] | jrbourbeau@gmail.com |
d8ff75f7f2b6d46085e4237bee2ae770867c47ee | dc02e1d3bc3794ee03b2ce916b6e8134b2541d07 | /1061/1063.py | d1d4519f7f7fb05efbe2c89ca4e2eb51dbfedc34 | [] | no_license | dddooo9/CodeUp | 2e2a342f4aba98e828af7294898241dc000cfe82 | 3390c41b8e7e5ceec0ab549faa62038fda1929da | refs/heads/master | 2023-08-05T08:29:14.932202 | 2021-09-22T10:02:19 | 2021-09-22T10:02:19 | 325,982,260 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 67 | py | a, b = input().split()
print(int(a) if int(a) > int(b) else int(b)) | [
"dlaehdus99@naver.com"
] | dlaehdus99@naver.com |
9bbcc0c16ede6b46e150e8675a790033d34e559b | d73de5a4d84e892ef885d4fe7a27b0409253bb12 | /pyspider/libs/mycommon/selenium_spider.py | 2051414ec251408ea8a9d3a6612a57c260db7277 | [
"Apache-2.0"
] | permissive | mannuan/mypyspider | 957de1b4f3b74b39db848e7d264eee1ac44c263f | 6f8cbdc725052a10fd8b365e8b78c21f6df4b3ff | refs/heads/master | 2021-09-16T03:04:47.654265 | 2018-06-15T09:27:39 | 2018-06-15T09:27:39 | 108,707,367 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 54,565 | py | # -*- coding:utf-8 -*-
import sys,logging,os,time,random,re,json,inspect,traceback,StringIO
reload(sys)
sys.setdefaultencoding('utf-8')
from selenium import webdriver
from pymongo import MongoClient
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.common.action_chains import ActionChains
from logging.handlers import TimedRotatingFileHandler
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.common.by import By
from pyvirtualdisplay import Display
class SeleniumSpider(object):
#isvirtualdisplay的优先级高于isheadless
def __init__(self,isheadless=False,ismobile=False,isvirtualdisplay=False):
self.isheadless = isheadless
self.ismobile = ismobile
self.isvirtualdisplay = isvirtualdisplay
self.logger = self.get_logger()
self.driver = self.get_driver()
def __del__(self):
self.driver.quit()
#4 表示当前运行的层数,是通用的
def __get_current_function_name__(self):
return (inspect.stack()[4][3],inspect.stack()[4][2],inspect.stack()[3][3],inspect.stack()[3][2])
def __get_running_func__(self):
return "%s.%s[%s].%s[%s]" % (self.__class__.__name__, self.__get_current_function_name__()[0],
self.__get_current_function_name__()[1],
self.__get_current_function_name__()[2],
self.__get_current_function_name__()[3])
def get_logger(self):
logger = logging.getLogger(__name__)
logger.setLevel(level=logging.DEBUG)
# handler = TimedRotatingFileHandler(filename="%s/logs/log_%s.txt"%(os.getcwd(),self.id),
# when='d',interval=1,backupCount=7)
# handler.setLevel(logging.DEBUG)
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
# handler.setFormatter(formatter)
console = logging.StreamHandler()
console.setLevel(logging.DEBUG)
console.setFormatter(formatter)
# logger.addHandler(handler)
logger.addHandler(console)
logger.info("Start print log")
return logger
def get_driver(self):
options = webdriver.ChromeOptions()
if self.ismobile:
options.add_argument(
'user-agent="Mozilla/5.0 (iPhone; CPU iPhone OS 9_2 like Mac OS X) AppleWebKit/601.1.46 (KHTML, like Gecko) Version/9.0 Mobile/13C75 Safari/601.1"')
else:
options.add_argument(
'user-agent="Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/535.11 (KHTML, like Gecko) Ubuntu/11.10 Chromium/27.0.1453.93 Chrome/27.0.1453.93 Safari/537.36"')
options.add_argument('lang=zh_CN.UTF-8')
if self.isvirtualdisplay:
self.logger.debug('virtualdisplay is running')
display = Display(visible=0, size=(1440, 900))
display.start()
if self.isvirtualdisplay == False and self.isheadless == True:
self.logger.debug('headless is running')
options.add_argument('--headless')
driver = webdriver.Chrome(chrome_options=options)
return driver
def new_window(self, url):
newwindow = 'window.open("{}");'.format(url)
self.driver.execute_script(newwindow)
'''
下拉滚动加载
times : 表示倍数
'''
def vertical_scroll_to(self,min_offset=1000,max_offset=5000):
self.driver.execute_script('window.scrollTo(0, document.body.scrollHeight + %s)' % random.randint(min_offset,max_offset))
def vertical_scroll_by(self,offset=100):
self.driver.execute_script('window.scrollBy(0,%s)'%offset)
def merge_dict(self,data1,data2):
return dict(data1, **data2)
def error_log(self, name='', e=None):
if not e:
e = ''
fp = StringIO.StringIO()
traceback.print_stack(file=fp)
message = fp.getvalue()+str(e)
self.logger.error('@%s %s: %s' % (self.__get_running_func__(),name, message))
def warning_log(self, name='', e=None):
if not e:
e = ''
self.logger.warning('@%s %s: %s' % (self.__get_running_func__(),name, e))
def info_log(self, name='', data=None):
if not data:
data = ''
self.logger.info('@%s %s: %s' % (self.__get_running_func__(),name, data))
def debug_log(self, name='', data=None):
if not data:
data = ''
self.logger.debug('@%s %s: %s' % (self.__get_running_func__(),name, data))
def scroll_into_view(self, ele=None):
if not ele:
self.error_log(e='ele不可以为空')
return None
self.driver.execute_script("arguments[0].scrollIntoView(false);", ele)
def focus_on_element(self, ele=None):
if not ele:
self.error_log(e='ele不可以为空')
return None
self.driver.execute_script("arguments[0].focus();", ele)
def until_scroll_into_view_by_css_selector(self, ele=None, css_selector=None):
if not css_selector:
self.error_log(e='css_selector不可以为空')
return None
if not ele:
ele = self.driver
ele = self.until_presence_of_element_located_by_css_selector(ele=ele,css_selector=css_selector)
self.driver.execute_script("arguments[0].scrollIntoView(false);", ele)
def until_scroll_into_view_by_partial_link_text(self, ele=None, link_text=None):
if not link_text:
self.error_log(e='link_text不可以为空')
return None
if not ele:
ele = self.driver
ele = self.until_presence_of_element_located_by_partial_link_text(ele=ele, link_text=link_text)
self.driver.execute_script("arguments[0].scrollIntoView(false);", ele)
def until_scroll_into_view_by_link_text(self, ele=None, link_text=None):
if not link_text:
self.error_log(e='link_text不可以为空')
return None
if not ele:
ele = self.driver
ele = self.until_presence_of_element_located_by_link_text(ele=ele,link_text=u'%s'%link_text)
self.driver.execute_script("arguments[0].scrollIntoView(false);", ele)
def until_click_by_css_selector(self, ele=None, timeout=10, css_selector=None):
if not css_selector:
self.error_log(e='css_selector不可以为None')
return None
if not ele:
ele = self.driver
return WebDriverWait(driver=ele, timeout=timeout)\
.until(EC.element_to_be_clickable((By.CSS_SELECTOR, css_selector))).click()
def until_click_by_partial_link_text(self, ele=None, timeout=10, link_text=None):
if not link_text:
self.error_log(e='link_text不可以为None')
return None
if not ele:
ele = self.driver
return WebDriverWait(driver=ele, timeout=timeout)\
.until(EC.element_to_be_clickable((By.PARTIAL_LINK_TEXT, u'%s'%link_text))).click()
def until_click_by_link_text(self, ele=None, timeout=10, link_text=None):
if not link_text:
self.error_log(e='link_text不可以为None')
return None
if not ele:
ele = self.driver
return WebDriverWait(driver=ele, timeout=timeout)\
.until(EC.element_to_be_clickable((By.LINK_TEXT, u'%s'%link_text))).click()
def move_to_element(self, ele=None,xoffset=0,yoffset=0):
if not ele:
self.error_log(e='ele不可以为空')
return None
ActionChains(self.driver).move_to_element(ele).move_by_offset(xoffset=xoffset,yoffset=yoffset).perform()
def until_move_to_element_by_css_selector(self, ele=None, css_selector=None):
if not css_selector:
self.error_log(e='css_selector不可以为None')
return None
if not ele:
ele = self.driver
ActionChains(self.driver).move_to_element(
self.until_presence_of_element_located_by_css_selector(ele=ele,css_selector=css_selector)).perform()
def until_move_to_element_by_partial_link_text(self, ele=None, link_text=None):
if not link_text:
self.error_log(e='link_text不可以为None')
return None
if not ele:
ele = self.driver
ActionChains(self.driver).move_to_element(
self.until_presence_of_element_located_by_partial_link_text(ele=ele,link_text=link_text)).perform()
def until_move_to_element_by_link_text(self, ele=None, link_text=None):
if not link_text:
self.error_log(e='link_text不可以为None')
return None
if not ele:
ele = self.driver
ActionChains(self.driver).move_to_element(
self.until_presence_of_element_located_by_link_text(ele=ele,link_text=link_text)).perform()
def filter_integer(self, str):
return int(re.sub(r'[^\d]*',r'',str))
def filter_float(self, str):
return float(re.sub(r'[^\d.]*',r'',str))
def filter_str(self, str):
return re.sub(r'[\n]*',r'',str).strip()
'''
:param a:区间起点
:param b:区间终点
:param d:除数
:return:
'''
def get_random_time(self, a, b, d):
if d <= 0:
d = 1
c = b
if a > b:
b = a
a = c
pause_time = (lambda x: 10 if x < 10 else x)(random.random() * random.randint(a, b) / d)
self.logger.info('...随机暂停{}秒...'.format(pause_time))
return pause_time
def until_send_enter_by_css_selector(self, ele=None,css_selector=None):
if not css_selector:
self.error_log(e='css_selector不可以为空')
return None
if not ele:
ele = self.driver
self.until_presence_of_element_located_by_css_selector(ele=ele,css_selector=css_selector).send_keys(Keys.ENTER)
def until_send_enter_by_link_text(self, ele=None,link_text=None):
if not link_text:
self.error_log(e='link_text不可以为空')
return None
if not ele:
ele = self.driver
self.until_presence_of_element_located_by_link_text(ele=ele,link_text=link_text).send_keys(Keys.ENTER)
def until_send_enter_by_particl_link_text(self, ele=None,link_text=None):
if not link_text:
self.error_log(e='link_text不可以为空')
return None
if not ele:
ele = self.driver
self.until_presence_of_element_located_by_partial_link_text(ele=ele,link_text=link_text).send_keys(Keys.ENTER)
def until_send_text_by_css_selector(self, ele=None,css_selector=None, text=None):
if not css_selector or not text:
self.error_log(e='css_selector和text都不可以为空')
return None
if not ele:
ele = self.driver
self.until_presence_of_element_located_by_css_selector(ele=ele,css_selector=css_selector).send_keys(u'%s'%text)
def until_get_elements_len_by_css_selector(self, ele=None, css_selector=None,timeout=1):
if not css_selector:
self.error_log(e='css_selector不可以为空')
return None
if not ele:
ele = self.driver
return len(self.until_presence_of_all_elements_located_by_css_selector(ele=ele,css_selector=css_selector,
timeout=timeout))
def until_send_key_arrow_down_by_css_selector(self, ele=None,css_selector=None, min_frequency=100,max_frequency=300,
timeout=1):
if not css_selector:
self.error_log(e='css_selector不可以为None')
return None
if not ele:
ele = self.driver
for i in range(random.randint(min_frequency,max_frequency)):
ActionChains(self.driver).move_to_element(
self.until_presence_of_element_located_by_css_selector(ele=ele,css_selector=css_selector,timeout=timeout))\
.send_keys(Keys.ARROW_DOWN).perform()
def until_send_key_arrow_down_by_partial_link_text(self, ele=None,link_text=None, frequency=100):
if not link_text:
self.error_log(e='link_text不可以为None')
return None
if not ele:
ele = self.driver
for i in range(frequency):
ActionChains(self.driver).move_to_element(
self.until_presence_of_element_located_by_partial_link_text(ele=ele,link_text=link_text))\
.send_keys(Keys.ARROW_DOWN).perform()
def until_send_key_arrow_down_by_link_text(self, ele=None,link_text=None, frequency=100):
if not link_text:
self.error_log(e='link_text不可以为None')
return None
if not ele:
ele = self.driver
for i in range(frequency):
ActionChains(self.driver).move_to_element(
self.until_presence_of_element_located_by_link_text(ele=ele,link_text=link_text))\
.send_keys(Keys.ARROW_DOWN).perform()
'''
判断title,返回布尔值
'''
def until_title_is(self, ele=None, timeout=10, title=None):
if not ele:
ele = self.driver
if not title:
self.error_log(e='标题为空!!!')
return False
return WebDriverWait(ele, timeout).until(EC.title_is(u"%s"%title))
'''
判断title,返回布尔值
'''
def until_title_contains(self, ele=None, timeout=10, title=None):
if not ele:
ele = self.driver
if not title:
self.error_log(e='标题为空!!!')
return False
return WebDriverWait(ele,timeout).until(EC.title_contains(u"%s"%title))
'''
判断某个元素是否被加到了dom树里,并不代表该元素一定可见,如果定位到就返回WebElement
'''
def until_presence_of_element_located_by_id(self, ele=None, timeout=10, id=None):
if not ele:
ele = self.driver
if not id:
self.error_log(e='id为空!!!')
return None
return WebDriverWait(ele, timeout).until(EC.presence_of_element_located((By.ID, id)))
'''
判断某个元素是否被加到了dom树里,并不代表该元素一定可见,如果定位到就返回WebElement
'''
def until_presence_of_element_located_by_css_selector(self, ele=None, timeout=float(10), css_selector=None):
if not ele:
ele = self.driver
if not css_selector:
self.error_log(e='css_selector为空!!!')
return None
return WebDriverWait(ele, timeout).until(EC.presence_of_element_located((By.CSS_SELECTOR, css_selector)))
'''
判断某个元素是否被加到了dom树里,并不代表该元素一定可见,如果定位到就返回WebElement
'''
def until_presence_of_element_located_by_link_text(self, ele=None, timeout=10, link_text=None):
if not ele:
ele = self.driver
if not link_text:
self.error_log(e='link_text为空!!!')
return None
return WebDriverWait(ele, timeout).until(EC.presence_of_element_located((By.LINK_TEXT, link_text)))
'''
判断某个元素是否被加到了dom树里,并不代表该元素一定可见,如果定位到就返回WebElement
'''
def until_presence_of_element_located_by_partial_link_text(self, ele=None, timeout=10, link_text=None):
if not ele:
ele = self.driver
if not link_text:
self.error_log(e='link_text为空!!!')
return None
return WebDriverWait(ele, timeout).until(EC.presence_of_element_located((By.PARTIAL_LINK_TEXT, link_text)))
'''
判断元素是否可见,如果可见就返回这个元素
'''
def until_visibility_of_by_id(self, ele=None, timeout=10, id=None):
if not ele:
ele = self.driver
if not id:
self.error_log(e='id为空!!!')
return None
return WebDriverWait(ele, timeout).until(EC.visibility_of((By.ID, id)))
'''
判断元素是否可见,如果可见就返回这个元素
'''
def until_visibility_of_by_css_selector(self, ele=None, timeout=10, css_selector=None):
if not ele:
ele = self.driver
if not css_selector:
self.error_log(e='css_selector为空!!!')
return None
return WebDriverWait(ele, timeout).until(EC.visibility_of((By.CSS_SELECTOR, css_selector)))
'''
判断元素是否可见,如果可见就返回这个元素
'''
def until_visibility_of_by_link_text(self, ele=None, timeout=10, link_text=None):
if not ele:
ele = self.driver
if not link_text:
self.error_log(e='link_text为空!!!')
return None
return WebDriverWait(ele, timeout).until(EC.visibility_of((By.LINK_TEXT, link_text)))
'''
判断元素是否可见,如果可见就返回这个元素
'''
def until_visibility_of_by_particl_link_text(self, ele=None, timeout=10, partial_link_text=None):
if not ele:
ele = self.driver
if not partial_link_text:
self.error_log(e='partial_link_text为空!!!')
return None
return WebDriverWait(ele, timeout).until(EC.visibility_of((By.PARTIAL_LINK_TEXT, partial_link_text)))
'''
判断是否至少有1个元素存在于dom树中,如果定位到就返回列表
'''
def until_presence_of_all_elements_located_by_id(self, ele=None, timeout=10, id=None):
if not ele:
ele = self.driver
if not id:
self.error_log(e='id为空!!!')
return None
return WebDriverWait(ele, timeout).until(EC.presence_of_all_elements_located((By.ID, id)))
'''
判断是否至少有1个元素存在于dom树中,如果定位到就返回列表
'''
def until_presence_of_all_elements_located_by_css_selector(self, ele=None, timeout=10, css_selector=None):
if not ele:
ele = self.driver
if not css_selector:
self.error_log(e='css_selector为空!!!')
return None
return WebDriverWait(ele, timeout).until(EC.presence_of_all_elements_located((By.CSS_SELECTOR, css_selector)))
'''
判断是否至少有1个元素存在于dom树中,如果定位到就返回列表
'''
def until_presence_of_all_elements_located_by_link_text(self, ele=None, timeout=10, link_text=None):
if not ele:
ele = self.driver
if not link_text:
self.error_log(e='link_text为空!!!')
return None
return WebDriverWait(ele, timeout).until(EC.presence_of_all_elements_located((By.LINK_TEXT, link_text)))
'''
判断是否至少有1个元素存在于dom树中,如果定位到就返回列表
'''
def until_presence_of_all_elements_located_by_partial_link_text(self, ele=None, timeout=10, link_text=None):
if not ele:
ele = self.driver
if not link_text:
self.error_log(e='link_text为空!!!')
return None
return WebDriverWait(ele, timeout).until(EC.presence_of_all_elements_located((By.PARTIAL_LINK_TEXT, u'%s'%link_text)))
'''
判断是否至少有一个元素在页面中可见,如果定位到就返回列表
'''
def until_visibility_of_any_elements_located_by_id(self, ele=None, timeout=10, id=None):
if not ele:
ele = self.driver
if not id:
self.error_log(e='id为空!!!')
return None
return WebDriverWait(ele, timeout).until(EC.presence_of_all_elements_located((By.ID, id)))
'''
判断是否至少有一个元素在页面中可见,如果定位到就返回列表
'''
def until_visibility_of_any_elements_located_by_css_selector(self, ele=None, timeout=10, css_selector=None):
if not ele:
ele = self.driver
if not css_selector:
self.error_log(e='css_selector为空!!!')
return None
return WebDriverWait(ele, timeout).until(EC.presence_of_all_elements_located((By.CSS_SELECTOR, css_selector)))
'''
判断是否至少有一个元素在页面中可见,如果定位到就返回列表
'''
def until_visibility_of_any_elements_located_by_link_text(self, ele=None, timeout=10, link_text=None):
if not ele:
ele = self.driver
if not link_text:
self.error_log(e='link_text为空!!!')
return None
return WebDriverWait(ele, timeout).until(EC.presence_of_all_elements_located((By.LINK_TEXT, link_text)))
'''
判断是否至少有一个元素在页面中可见,如果定位到就返回列表
'''
def until_visibility_of_any_elements_located_by_partial_link_text(self, ele=None, timeout=10, link_text=None):
if not ele:
ele = self.driver
if not link_text:
self.error_log(e='link_text为空!!!')
return None
return WebDriverWait(ele, timeout).until(EC.presence_of_all_elements_located((By.PARTIAL_LINK_TEXT, link_text)))
'''
判断指定的元素中是否包含了预期的字符串,返回布尔值
'''
def until_text_to_be_present_in_element_located_by_id(self, ele=None, timeout=10, id=None):
if not ele:
ele = self.driver
if not id:
self.error_log(e='id为空!!!')
return None
return WebDriverWait(ele, timeout).until(EC.presence_of_all_elements_located((By.ID, id)))
'''
判断指定的元素中是否包含了预期的字符串,返回布尔值
'''
def until_text_to_be_present_in_element_located_by_css_selector(self, ele=None, timeout=10, css_selector=None):
if not ele:
ele = self.driver
if not css_selector:
self.error_log(e='css_selector为空!!!')
return None
return WebDriverWait(ele, timeout).until(EC.presence_of_all_elements_located((By.CSS_SELECTOR, css_selector)))
'''
判断指定的元素中是否包含了预期的字符串,返回布尔值
'''
def until_text_to_be_present_in_element_located_by_link_text(self, ele=None, timeout=10, link_text=None):
if not ele:
ele = self.driver
if not link_text:
self.error_log(e='link_text为空!!!')
return None
return WebDriverWait(ele, timeout).until(EC.presence_of_all_elements_located((By.LINK_TEXT, link_text)))
'''
判断指定的元素中是否包含了预期的字符串,返回布尔值
'''
def until_text_to_be_present_in_element_located_by_partial_link_text(self, ele=None, timeout=10, link_text=None):
if not ele:
ele = self.driver
if not link_text:
self.error_log(e='link_text为空!!!')
return None
return WebDriverWait(ele, timeout).until(EC.presence_of_all_elements_located((By.PARTIAL_LINK_TEXT, link_text)))
'''
判断指定元素的属性值中是否包含了预期的字符串,返回布尔值
'''
def until_text_to_be_present_in_element_value_by_id(self, ele=None, timeout=10, id=None, _text=None):
if not ele:
ele = self.driver
if not id:
self.error_log(e='id为空!!!')
return None
return WebDriverWait(ele, timeout).until(EC.text_to_be_present_in_element_value((By.ID, id),u'%s'%_text))
'''
判断指定元素的属性值中是否包含了预期的字符串,返回布尔值
'''
def until_text_to_be_present_in_element_value_by_css_selector(self, ele=None, timeout=10, css_selector=None, _text=None):
if not ele:
ele = self.driver
if not css_selector:
self.error_log(e='css_selector为空!!!')
return None
return WebDriverWait(ele, timeout).until(EC.text_to_be_present_in_element_value((By.CSS_SELECTOR, css_selector),
u'%s'%_text))
'''
判断指定元素的属性值中是否包含了预期的字符串,返回布尔值
'''
def until_text_to_be_present_in_element_value_by_link_text(self, ele=None, timeout=10, link_text=None, _text=None):
if not ele:
ele = self.driver
if not link_text:
self.error_log(e='link_text为空!!!')
return None
return WebDriverWait(ele, timeout).until(EC.text_to_be_present_in_element_value((By.LINK_TEXT, link_text),
u'%s'%_text))
'''
判断指定元素的属性值中是否包含了预期的字符串,返回布尔值
'''
def until_text_to_be_present_in_element_value_by_partial_link_text(self, ele=None, timeout=10, link_text=None, _text=None):
if not ele:
ele = self.driver
if not link_text:
self.error_log(e='link_text为空!!!')
return None
return WebDriverWait(ele, timeout).until(EC.text_to_be_present_in_element_value((By.PARTIAL_LINK_TEXT, link_text),
u'%s' % _text))
'''
判断该frame是否可以switch进去,如果可以的话,返回True并且switch进去,否则返回False
'''
def until_frame_to_be_available_and_switch_to_it(self, ele=None, timeout=10):
if not ele:
ele = self.driver
return WebDriverWait(ele, timeout).until(EC.frame_to_be_available_and_switch_to_it(ele))
'''
判断某个元素在是否存在于dom或不可见,如果可见返回False,不可见返回这个元素
'''
def until_invisibility_of_element_located_by_id(self, ele=None, timeout=10, id=None):
if not ele:
ele = self.driver
if not id:
self.error_log(e='id为空!!!')
return None
return WebDriverWait(ele, timeout).until(EC.invisibility_of_element_located((By.ID, id)))
'''
判断某个元素在是否存在于dom或不可见,如果可见返回False,不可见返回这个元素
'''
def until_invisibility_of_element_located_by_css_selector(self, ele=None, timeout=10, css_selector=None):
if not ele:
ele = self.driver
if not css_selector:
self.error_log(e='css_selector为空!!!')
return None
return WebDriverWait(ele, timeout).until(EC.invisibility_of_element_located((By.CSS_SELECTOR, css_selector)))
'''
判断某个元素在是否存在于dom或不可见,如果可见返回False,不可见返回这个元素
'''
def until_invisibility_of_element_located_by_link_text(self, ele=None, timeout=10, link_text=None):
if not ele:
ele = self.driver
if not link_text:
self.error_log(e='link_text为空!!!')
return None
return WebDriverWait(ele, timeout).until(EC.invisibility_of_element_located((By.LINK_TEXT, link_text)))
'''
判断某个元素在是否存在于dom或不可见,如果可见返回False,不可见返回这个元素
'''
def until_invisibility_of_element_located_by_partial_link_text(self, ele=None, timeout=10, partial_link_text=None):
if not ele:
ele = self.driver
if not partial_link_text:
self.error_log(e='partial_link_text为空!!!')
return None
return WebDriverWait(ele, timeout).until(EC.invisibility_of_element_located((By.LINK_TEXT, partial_link_text)))
'''
判断某个元素中是否可见并且是enable的,代表可点击
'''
def until_element_to_be_clickable_by_id(self, ele=None, timeout=10, id=None):
if not ele:
ele = self.driver
if not id:
self.error_log(e='id为空!!!')
return None
return WebDriverWait(ele, timeout).until(EC.element_to_be_clickable((By.ID, id)))
'''
判断某个元素中是否可见并且是enable的,代表可点击
'''
def until_element_to_be_clickable_by_css_selector(self, ele=None, timeout=10, css_selector=None):
if not ele:
ele = self.driver
if not css_selector:
self.error_log(e='css_selector为空!!!')
return None
return WebDriverWait(ele, timeout).until(EC.element_to_be_clickable((By.CSS_SELECTOR, css_selector)))
'''
判断某个元素中是否可见并且是enable的,代表可点击
'''
def until_element_to_be_clickable_by_link_text(self, ele=None, timeout=10, link_text=None):
if not ele:
ele = self.driver
if not link_text:
self.error_log(e='link_text为空!!!')
return None
return WebDriverWait(ele, timeout).until(EC.element_to_be_clickable((By.LINK_TEXT, link_text)))
'''
判断某个元素中是否可见并且是enable的,代表可点击
'''
def until_element_to_be_clickable_by_partial_link_text(self, ele=None, timeout=10, link_text=None):
if not ele:
ele = self.driver
if not link_text:
self.error_log(e='link_text为空!!!')
return None
return WebDriverWait(ele, timeout).until(EC.element_to_be_clickable((By.PARTIAL_LINK_TEXT, link_text)))
'''
等待某个元素从dom树中移除
'''
def until_staleness_of_by_id(self, ele=None, timeout=10, id=None):
if not ele:
ele = self.driver
if not id:
self.error_log(e='id为空!!!')
return None
return WebDriverWait(ele, timeout).until(EC.staleness_of((By.ID, id)))
'''
等待某个元素从dom树中移除
'''
def until_staleness_of_by_css_selector(self, ele=None, timeout=10, css_selector=None):
if not ele:
ele = self.driver
if not css_selector:
self.error_log(e='css_selector为空!!!')
return None
return WebDriverWait(ele, timeout).until(EC.staleness_of((By.CSS_SELECTOR, css_selector)))
'''
等待某个元素从dom树中移除
'''
def until_staleness_of_by_link_text(self, ele=None, timeout=10, link_text=None):
if not ele:
ele = self.driver
if not link_text:
self.error_log(e='link_text为空!!!')
return None
return WebDriverWait(ele, timeout).until(EC.staleness_of((By.LINK_TEXT, link_text)))
'''
等待某个元素从dom树中移除
'''
def until_staleness_of_by_partial_link_text(self, ele=None, timeout=10, link_text=None):
if not ele:
ele = self.driver
if not link_text:
self.error_log(e='link_text为空!!!')
return None
return WebDriverWait(ele, timeout).until(EC.staleness_of((By.PARTIAL_LINK_TEXT, link_text)))
'''
判断某个元素是否被选中了,一般用在下拉列表
'''
def until_element_to_be_selected_by_id(self, ele=None, timeout=10, id=None):
if not ele:
ele = self.driver
if not id:
self.error_log(e='id为空!!!')
return None
return WebDriverWait(ele, timeout).until(EC.element_to_be_selected((By.ID, id)))
'''
判断某个元素是否被选中了,一般用在下拉列表
'''
def until_element_to_be_selected_by_css_selector(self, ele=None, timeout=10, css_selector=None):
if not ele:
ele = self.driver
if not css_selector:
self.error_log(e='css_selector为空!!!')
return None
return WebDriverWait(ele, timeout).until(EC.element_to_be_selected((By.CSS_SELECTOR, css_selector)))
'''
判断某个元素是否被选中了,一般用在下拉列表
'''
def until_element_to_be_selected_by_link_text(self, ele=None, timeout=10, link_text=None):
if not ele:
ele = self.driver
if not link_text:
self.error_log(e='link_text为空!!!')
return None
return WebDriverWait(ele, timeout).until(EC.element_to_be_selected((By.LINK_TEXT, link_text)))
'''
判断某个元素是否被选中了,一般用在下拉列表
'''
def until_element_to_be_selected_by_partial_link_text(self, ele=None, timeout=10, link_text=None):
if not ele:
ele = self.driver
if not link_text:
self.error_log(e='link_text为空!!!')
return None
return WebDriverWait(ele, timeout).until(EC.element_to_be_selected((By.PARTIAL_LINK_TEXT, link_text)))
'''
判断某个元素的选中状态是否符合预期
'''
def until_element_selection_state_to_be_by_id(self, ele=None, timeout=10, id=None, status=True):
if not ele:
ele = self.driver
if not id:
self.error_log(e='id为空!!!')
return None
return WebDriverWait(ele, timeout).until(EC.element_selection_state_to_be((By.ID, id),status))
'''
判断某个元素的选中状态是否符合预期
'''
def until_element_selection_state_to_be_by_css_selector(self, ele=None, timeout=10, css_selector=None, status=True):
if not ele:
ele = self.driver
if not css_selector:
self.error_log(e='css_selector为空!!!')
return None
return WebDriverWait(ele, timeout).until(EC.element_selection_state_to_be((By.CSS_SELECTOR, css_selector),status))
'''
判断某个元素的选中状态是否符合预期
'''
def until_element_selection_state_to_be_by_link_text(self, ele=None, timeout=10, link_text=None, status=True):
if not ele:
ele = self.driver
if not link_text:
self.error_log(e='link_text为空!!!')
return None
return WebDriverWait(ele, timeout).until(EC.element_selection_state_to_be((By.LINK_TEXT, link_text),status))
'''
判断某个元素的选中状态是否符合预期
'''
def until_element_selection_state_to_be_by_partial_link_text(self, ele=None, timeout=10, link_text=None, status=True):
if not ele:
ele = self.driver
if not link_text:
self.error_log(e='link_text为空!!!')
return None
return WebDriverWait(ele, timeout).until(EC.element_selection_state_to_be((By.PARTIAL_LINK_TEXT, link_text),status))
'''
判断某个元素的选中状态是否符合预期
'''
def until_element_located_selection_state_to_be_by_id(self, ele=None, timeout=10, id=None, status=True):
if not ele:
ele = self.driver
if not id:
self.error_log(e='id为空!!!')
return None
return WebDriverWait(ele, timeout).until(EC.element_located_selection_state_to_be((By.ID, id),status))
'''
判断某个元素的选中状态是否符合预期
'''
def until_element_located_selection_state_to_be_by_css_selector(self, ele=None, timeout=10, css_selector=None, status=True):
if not ele:
ele = self.driver
if not css_selector:
self.error_log(e='css_selector为空!!!')
return None
return WebDriverWait(ele, timeout).until(EC.element_located_selection_state_to_be((By.CSS_SELECTOR, css_selector),status))
'''
判断某个元素的选中状态是否符合预期
'''
def until_element_located_selection_state_to_be_by_link_text(self, ele=None, timeout=10, link_text=None, status=True):
if not ele:
ele = self.driver
if not link_text:
self.error_log(e='link_text为空!!!')
return None
return WebDriverWait(ele, timeout).until(EC.element_located_selection_state_to_be((By.LINK_TEXT, link_text),status))
'''
判断某个元素的选中状态是否符合预期
'''
def until_element_located_selection_state_to_be_by_partial_link_text(self, ele=None, timeout=10, link_text=None, status=True):
if not ele:
ele = self.driver
if not link_text:
self.error_log(e='link_text为空!!!')
return None
return WebDriverWait(ele, timeout).until(EC.element_located_selection_state_to_be((By.PARTIAL_LINK_TEXT, link_text),
status))
'''
判断页面上是否存在alert,如果有就切换到alert并返回alert的句柄
'''
def until_alert_is_present(self, ele=None, timeout=10):
if not ele:
ele = self.driver
return WebDriverWait(ele, timeout).until(EC.alert_is_present())
'''
获得店铺的关键字段
field : 字段的名字
ele : webelement
css_selector : str
attr : str
regex : str 正则表达式用来去掉不需要的
'''
def get_key_str_field_by_css_selector(self, name='new', ele=None, css_selector=None, attr=None, regex='', repl='', timeout=1,
offset=20,try_times=20):
regex = u'%s'%regex
repl = u'%s'%repl
try:
if ele and css_selector:
self.until_presence_by_vertical_scroll_page_down_by_css_selector(
ele=ele,css_selector=css_selector,offset=offset,timeout=timeout,try_times=try_times)
ele = self.until_presence_of_element_located_by_css_selector(ele=ele,css_selector=css_selector,timeout=timeout)
elif ele and not css_selector:
ele = ele
elif not ele and css_selector:
self.until_presence_by_vertical_scroll_page_down_by_css_selector(
css_selector=css_selector,offset=offset,timeout=timeout,try_times=try_times)
ele = self.until_presence_of_element_located_by_css_selector(css_selector=css_selector,timeout=timeout)
else:
self.error_log(name=name,e='未指定样式选择器和目标元素,无法取得该字段内容!!!')
return
ActionChains(self.driver).move_to_element(ele).perform()
self.vertical_scroll_by()
if attr:
_str = ele.get_attribute(attr)
else:
_str = ele.text
_str = self.filter_str(_str)
_str = re.sub('%s' % regex, '%s' % repl, _str)
except Exception as e:
self.error_log(name=name, e=e)
_str = None
self.info_log(name=name, data=_str)
return _str
def get_str_field_by_css_selector(self, name='new', ele=None, css_selector=None, attr=None, regex='',repl='', timeout=1,
offset=100,try_times=20):
regex = u'%s'%regex
repl = u'%s'%repl
try:
if ele and css_selector:
self.until_presence_by_vertical_scroll_page_down_by_css_selector(
ele=ele,css_selector=css_selector,offset=offset,timeout=timeout,try_times=try_times)
ele = self.until_presence_of_element_located_by_css_selector(ele=ele,css_selector=css_selector,timeout=timeout)
elif ele and not css_selector:
ele = ele
elif not ele and css_selector:
self.until_presence_by_vertical_scroll_page_down_by_css_selector(
css_selector=css_selector,offset=offset,timeout=timeout,try_times=try_times)
ele = self.until_presence_of_element_located_by_css_selector(css_selector=css_selector,timeout=timeout)
else:
self.error_log(name=name, e='未指定样式选择器和目标元素,无法取得该字段内容!!!')
return
ActionChains(self.driver).move_to_element(ele).perform()
self.vertical_scroll_by()
if attr:
_str = ele.get_attribute(attr)
else:
_str = ele.text
_str = self.filter_str(_str)
_str = re.sub('%s' % regex, '%s' % repl, _str)
except Exception as e:
self.error_log(name=name, e=e)
_str = None
self.info_log(name=name, data=_str)
return _str
def get_str_list_field_by_css_selector(self, name='new', ele=None, list_css_selector=None,
item_css_selector=None, attr=None, regex='', repl='',timeout=1):
regex = u'%s'%regex
repl = u'%s'%repl
_list = []
try:
list_ele = self.until_presence_of_element_located_by_css_selector(
ele=ele,css_selector=list_css_selector,timeout=timeout)
if not list_ele:
self.warning_log(name=name,e='该字段为空')
return None
for item in self.until_presence_of_all_elements_located_by_css_selector(
ele=list_ele,css_selector=item_css_selector,timeout=timeout):
_str = self.get_str_field_by_css_selector(name=name,ele=item,attr=attr,regex=regex,repl=repl,timeout=timeout)
if _str:
_list.append(_str)
except Exception as e:
self.error_log(name=name,e=e)
self.info_log(name=name, data=_list)
return _list
def get_int_field_by_css_selector(self, name='new', ele=None, css_selector=None, attr=None, regex='',repl='', timeout=1,
offset=20,try_times=20):
regex = u'%s'%regex
repl = u'%s'%repl
try:
if ele and css_selector:
self.until_presence_by_vertical_scroll_page_down_by_css_selector(
ele=ele,css_selector=css_selector,offset=offset,timeout=timeout,try_times=try_times)
ele = self.until_presence_of_element_located_by_css_selector(ele=ele,css_selector=css_selector,timeout=timeout)
elif ele and not css_selector:
ele = ele
elif not ele and css_selector:
self.until_presence_by_vertical_scroll_page_down_by_css_selector(
css_selector=css_selector,offset=offset,timeout=timeout,try_times=try_times)
ele = self.until_presence_of_element_located_by_css_selector(css_selector=css_selector,timeout=timeout)
else:
self.error_log(name=name, e='未指定样式选择器和目标元素,无法取得该字段内容!!!')
return
ActionChains(self.driver).move_to_element(ele).perform()
self.vertical_scroll_by()
if attr:
_str = ele.get_attribute(attr)
else:
_str = ele.text
_str = self.filter_str(_str)
_str = re.sub('%s' % regex, '%s' % repl, _str)
_int = self.filter_integer(_str)
except Exception as e:
self.error_log(name=name, e=e)
_int = 0
self.info_log(name=name, data=str(_int))
return _int
def get_float_field_by_css_selector(self,name='new', ele=None, css_selector=None, attr=None, regex='',repl='', timeout=1,
offset=20,try_times=20):
regex = u'%s'%regex
repl = u'%s'%repl
try:
if ele and css_selector:
self.until_presence_by_vertical_scroll_page_down_by_css_selector(
ele=ele,css_selector=css_selector,offset=offset,timeout=timeout,try_times=try_times)
ele = self.until_presence_of_element_located_by_css_selector(ele=ele,css_selector=css_selector, timeout=timeout)
elif ele and not css_selector:
ele = ele
elif not ele and css_selector:
self.until_presence_by_vertical_scroll_page_down_by_css_selector(
css_selector=css_selector,offset=offset,timeout=timeout,try_times=try_times)
ele = self.until_presence_of_element_located_by_css_selector(css_selector=css_selector, timeout=timeout)
else:
self.error_log(name=name, e='未指定样式选择器和目标元素,无法取得该字段内容!!!')
return
ActionChains(self.driver).move_to_element(ele).perform()
self.vertical_scroll_by()
if attr:
_str = ele.get_attribute(attr)
else:
_str = ele.text
_str = self.filter_str(_str)
_str = re.sub('%s' % regex, '%s' % repl, _str)
_float = self.filter_float(_str)
except Exception as e:
self.error_log(name=name, e=e)
_float = 0.0
self.info_log(name=name, data=str(_float))
return _float
'''
运行一个新建标签页的任务(默认根据url打开标签页)
'''
def run_new_tab_task(self, click_css_selector=None, name='', func=None, url=None,pause_time=1, **args):
if not func:
self.warning_log(name=name, e='标签页任务里面没有具体要执行的内容!!!')
return None
if not url:
self.error_log(name=name,e='标签页任务里面没有具体要打开的url!!!')
return None
self.new_window(url)
self.driver.switch_to_window(self.driver.window_handles[-1])
time.sleep(pause_time)
data = func(**args)
self.driver.close()
self.driver.switch_to_window(self.driver.window_handles[-1])
return data
'''
运行一个新标签页的任务(通过按钮点击打开标签页)
'''
def run_tab_task(self, click_ele=None, name='', func=None,pause_time=1,offset=8,try_times=20, **args):
if not func:
self.warning_log(name=name, e='标签页任务里面没有具体要执行的内容!!!')
return None
if not click_ele:
self.error_log(name=name,e='click_ele不可以为空')
return None
self.until_click_by_vertical_scroll_page_down(click_ele=click_ele,offset=offset,try_times=try_times)
self.driver.switch_to_window(self.driver.window_handles[-1])
time.sleep(pause_time)
data = func(**args)
self.driver.close()
self.driver.switch_to_window(self.driver.window_handles[-1])
return data
def run_spider(self):
pass
'''
通过长度判断页面是否有更多
'''
def ismore_by_scroll_page_judge_by_len(self, css_selector,min_offset=1000,max_offset=5000,comment_len=None):
self.info_log(data='...开始下拉页面...')
while (True):
list_len = self.until_get_elements_len_by_css_selector(
css_selector=css_selector,timeout=1)
self.vertical_scroll_to(min_offset=min_offset,max_offset=max_offset)
list_len2 = self.until_get_elements_len_by_css_selector(css_selector=css_selector,timeout=1)
self.info_log(data='当前数量%s:' % list_len2)
if list_len == list_len2:
if comment_len:
if list_len2 >= comment_len:
break
time.sleep(2)
self.vertical_scroll_to(min_offset=min_offset,max_offset=max_offset)
list_len2 = self.until_get_elements_len_by_css_selector(css_selector=css_selector,timeout=1)
if list_len == list_len2:
break
self.logger.info('...结束下拉页面...')
'''
通过长度判断页面是否有更多
list_css_selector : 列表的css样式
ele_css_selector : 发送向下指令的元素的css样式
frequency : 表示按向下键的次数
'''
def until_ismore_by_send_key_arrow_down_judge_by_len(self, list_css_selector=None,ele_css_selector=None,
min_frequency=100,max_frequency=300,comment_len=None,timeout=1):
if not list_css_selector:
self.error_log(e='list_css_selector不可以为空!!!')
return None
if not ele_css_selector:
self.error_log(e='ele_css_selector不可以为空!!!')
return None
self.info_log(data='...开始下拉页面...')
while (True):
list_len = self.until_get_elements_len_by_css_selector(css_selector=list_css_selector,timeout=timeout)
self.until_send_key_arrow_down_by_css_selector(css_selector=ele_css_selector,
min_frequency=min_frequency,max_frequency=max_frequency,timeout=timeout)
list_len2 = self.until_get_elements_len_by_css_selector(css_selector=list_css_selector)
self.info_log(data='当前数量%s:' % list_len2)
if list_len == list_len2:
if comment_len:
if list_len2 >= comment_len:
break
time.sleep(2)
self.until_send_key_arrow_down_by_css_selector(css_selector=ele_css_selector,
min_frequency=min_frequency,max_frequency=max_frequency,timeout=timeout)
list_len2 = self.until_get_elements_len_by_css_selector(css_selector=list_css_selector,timeout=timeout)
if list_len == list_len2:
break
self.logger.info('...结束下拉页面...')
def until_click_by_vertical_scroll_page_down(self,click_ele=None,offset=8,try_times=20):
failed_times = 0
while(True):
self.scroll_into_view(ele=click_ele)
if failed_times > try_times:
break
try:
click_ele.click()
self.info_log(data='点击成功')
break
except Exception:
failed_times += 1
self.warning_log(e='...正在尝试第%s次点击...'%failed_times)
self.vertical_scroll_by(offset=offset)
def until_presence_by_vertical_scroll_page_down_by_css_selector(self,ele=None,css_selector=None,offset=8,try_times=20,timeout=1):
if not css_selector:
self.error_log(e='css_selector不允许为空!!!')
return None
if not ele:
ele = self.driver
failed_times = 0
while(True):
if failed_times > try_times:
break
try:
self.until_presence_of_element_located_by_css_selector(ele=ele,css_selector=css_selector,timeout=float(timeout)/10)
self.info_log(data='元素存在,可以访问')
break
except Exception:
failed_times += 1
self.warning_log(e='...正在尝试第%s次下拉...'%failed_times)
self.vertical_scroll_by(offset=offset)
def until_refresh_by_css_selector(self,css_selector,try_times=10):
count = 0
if not css_selector:
self.error_log(e='css_selector不可以为空!!!')
return None
for i in range(try_times):
try:
self.until_presence_of_element_located_by_css_selector(css_selector=css_selector,timeout=1)
except Exception as e:
count += 1
self.info_log(data='第%s次刷新!!!'%count)
self.driver.refresh()
'''
根据css样式点击直到没有下一页
'''
def until_click_no_next_page_by_css_selector(self,func=None,css_selector=None,timeout=1,pause_time=1,**kwargs):
if not css_selector:
self.error_log(e='css_selector不可以为空!!!')
return None
count = 0
self.vertical_scroll_to(min_offset=0, max_offset=0)
while(True):
self.until_scroll_into_view_by_css_selector(css_selector=css_selector)
func(**kwargs)
time.sleep(pause_time)
try:
self.until_click_by_css_selector(css_selector=css_selector,timeout=timeout)
count += 1
self.info_log(data='点击第%s页...'%count)
except Exception as e:
self.error_log(e='没有下一页了!!!')
break
'''
根据链接文字点击直到没有下一页
'''
def until_click_no_next_page_by_link_text(self,func=None,link_text=None,timeout=1,pause_time=1,**kwargs):
if not link_text:
self.error_log(e='link_text不可以为空!!!')
return None
count = 0
self.vertical_scroll_to(min_offset=0, max_offset=0)
while(True):
self.until_scroll_into_view_by_link_text(link_text=link_text)
func(**kwargs)
time.sleep(pause_time)
try:
self.until_click_by_link_text(link_text=link_text,timeout=timeout)
count += 1
self.info_log(data='点击第%s页...'%count)
except Exception as e:
self.error_log(e='没有下一页了!!!')
break
'''
根据部分链接文字点击直到没有下一页
'''
def until_click_no_next_page_by_partical_link_text(self,func=None,link_text=None,timeout=1,pause_time=1,**kwargs):
if not link_text:
self.error_log(e='link_text不可以为空!!!')
return None
count = 0
self.vertical_scroll_to(min_offset=0,max_offset=0)
while(True):
self.until_scroll_into_view_by_partial_link_text(link_text=link_text)
func(**kwargs)
time.sleep(pause_time)
try:
self.until_click_by_partial_link_text(link_text=link_text,timeout=timeout)
count += 1
self.info_log(data='点击第%s页...'%count)
except Exception as e:
self.error_log(e='没有下一页了!!!')
break
'''
关闭先前的页面
'''
def close_pre_page(self):
self.driver.switch_to_window(self.driver.window_handles[0])
self.driver.close()
self.driver.switch_to_window(self.driver.window_handles[0])
'''
根据索引切换浏览器窗口
'''
def switch_window_by_index(self,index=None):
if not index:
self.error_log(e='index不可以为空!!!')
return None
self.driver.switch_to_window(self.driver.window_handles[index])
def isneed_to_update_comment_data(self):
pass | [
"1271990125@qq.com"
] | 1271990125@qq.com |
0557fa2ffa1cf6df52973e1d2ce00043da40e3a7 | 970c539c033f3f6e9c993c0cbbe3e84614185e6b | /Rule_modification/RAM/Wildcard Increasing Priority/auto_sh.py | 790dd033a90e6bed7e82d4953ccdf0d2b19dbed5 | [] | no_license | fmmazz/SDN-table-update | cce9663e896fd1f5fbc16e5714204fac1a5dce95 | 30bf0d867ed9841a18b0333a2674f64977dc02d4 | refs/heads/master | 2020-03-07T23:25:52.023362 | 2018-04-12T18:06:55 | 2018-04-12T18:06:55 | 127,781,274 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 853 | py | import os
import time
import sys
import subprocess
packets = sys.argv[1]
initial_rules = sys.argv[2]
number_repetitions = sys.argv[3]
ipv4_src = "192.168.10.1"
#cmd = 'sudo python pktgen-v3.py ' + str(packets) + ' ' + str (ipv4_src) + ' 45678 &'
os.system('sudo python file_changer.py modify_increasing_priorityv3_sh.py ' + str(initial_rules))
print "BEGIN ----- Initial Rules: " + str(initial_rules) + " -----"
for i in xrange(1,int(number_repetitions)+1):
print "------ " + str(i) + " -------"
#call the RYU script
os.system('sudo bash call_ryu_sh.sh ' + str(packets) + ' &')
time.sleep(95)
#call the script that calculates mean and organize results files
os.system('sudo python parse_controlv2.py ' + str(initial_rules) + ' ' + str(number_repetitions))
print "END ----- Initial Rules: " + str(initial_rules) + " -----"
sys.exit()
| [
"fmmazzola@inf.ufrgs.br"
] | fmmazzola@inf.ufrgs.br |
477e9b6444672e436d32d11dc46f7b782779e802 | 1a5c29faa360851cb3a6f3efade56b6b8d482004 | /read_old_data.py | 7228694134eba4e9d2fb6f3a5e4511fe5273cf8b | [] | no_license | 294486709/Youtube_Analyzer | 8d3b84159a602dee1ffce2652a2810fa428504d5 | 66ac2307455f2eea56617af2fedcf43e0a5213de | refs/heads/master | 2020-04-04T08:59:32.890931 | 2018-12-13T02:31:31 | 2018-12-13T02:31:31 | 155,802,294 | 4 | 3 | null | 2018-12-11T22:01:34 | 2018-11-02T02:17:28 | Python | UTF-8 | Python | false | false | 690 | py | import nltk
from vocab import Vocab
import os
import codecs
import numpy as np
import random
import pandas as pd
import multiprocessing as mp
from gensim.models import Word2Vec
from gensim.utils import simple_preprocess
LENGTH_TRAINING = 80
TESTING_PERSENTATGE = 0.10
num_features = 100
MAX_LENGTH=19
def read_from_file():
f = pd.read_csv('USvideos_sorted.csv')
x = list(f[f.columns[2]])
y = list(f[f.columns[4]])
return x,y
def main():
current,label = read_from_file()
for i in range(len(label)):
if label[i] in ['0','1','2','3','4','5']:
f = open(str(label[i]) + '.txt','a')
f.write(current[i])
f.write('\n')
f.close()
pass
if __name__ == '__main__':
main() | [
"jedzhang@bu.edu"
] | jedzhang@bu.edu |
7b0597275393a4e60df88ff6dabff13ca0bfa6f1 | 61bc53ec90d92aece91753ec5ec9d25e0879a1e2 | /content/pythia/pythia/legacy/top_down_bottom_up/unittests.py | d1f24d7a35974cc3ea42778088b753406f23a637 | [
"BSD-3-Clause"
] | permissive | aluka1994/textvqa | 08a16c9b21ea9c5eca05f5d4d1763c190d2d7275 | 694cb2be08def519ba73be78e34664afa2c607b5 | refs/heads/master | 2021-05-26T23:44:21.973827 | 2020-04-08T22:05:58 | 2020-04-08T22:05:58 | 254,190,630 | 0 | 0 | MIT | 2020-04-08T20:14:11 | 2020-04-08T20:14:10 | null | UTF-8 | Python | false | false | 4,467 | py | # Copyright (c) Facebook, Inc. and its affiliates.
# All rights reserved.
#
# This source code is licensed under the license found in the
# LICENSE file in the root directory of this source tree.
#
import unittest
import numpy as np
import torch
from torch.autograd import Variable
from global_variables.global_variables import use_cuda
from top_down_bottom_up.classifier import logit_classifier
from top_down_bottom_up.image_embedding import image_embedding
from top_down_bottom_up.question_embeding import QuestionEmbeding
from top_down_bottom_up.top_down_bottom_up_model import \
top_down_bottom_up_model
class Test_top_down_bottom_up_model(unittest.TestCase):
def test_classifier(self):
batch_size = 12
joint_embedding_dim = 10
num_ans_candidates = 20
text_embeding_dim = 64
image_embedding_dim = 32
my_classifier = logit_classifier(
joint_embedding_dim,
num_ans_candidates,
image_embedding_dim,
text_embeding_dim,
)
joint_embedding = Variable(torch.randn(batch_size, joint_embedding_dim))
res = my_classifier(joint_embedding)
self.assertEqual((12, 20), res.shape)
def test_classifier_batch_size_1(self):
batch_size = 1
joint_embedding_dim = 10
num_ans_candidates = 20
text_embeding_dim = 64
image_embedding_dim = 32
my_classifier = logit_classifier(
joint_embedding_dim,
num_ans_candidates,
image_embedding_dim,
text_embeding_dim,
)
joint_embedding = Variable(torch.randn(batch_size, joint_embedding_dim))
res = my_classifier(joint_embedding)
self.assertEqual((1, 20), res.shape)
def test_question_embedding(self):
num_vocab = 20
embedding_dim = 300
lstm_dim = 512
lstm_layer = 1
dropout = 0.1
batch_first = True
batch_size = 32
question_len = 10
my_word_embedding_model = QuestionEmbeding(
num_vocab, embedding_dim, lstm_dim, lstm_layer, dropout, batch_first
)
my_word_embedding_model = (
my_word_embedding_model.cuda() if use_cuda else my_word_embedding_model
)
input_txt = Variable(
torch.rand(batch_size, question_len).type(torch.LongTensor) % num_vocab
)
input_txt = input_txt.cuda() if use_cuda else input_txt
embedding = my_word_embedding_model(input_txt, batch_first=True)
self.assertEqual((32, 512), embedding.shape)
def test_image_embedding(self):
image_feat_dim = 40
txt_embedding_dim = 50
hidden_size = 30
num_of_loc = 5
batch_size = 16
my_image_embeding = image_embedding(
image_feat_dim, txt_embedding_dim, hidden_size
)
image_feat = Variable(torch.randn(batch_size, num_of_loc, image_feat_dim))
txt = Variable(torch.randn(batch_size, txt_embedding_dim))
res = my_image_embeding(image_feat, txt)
self.assertEqual((batch_size, image_feat_dim), res.shape)
def test_model(self):
image_feat_dim = 40
txt_embedding_dim = 300
lstm_dim = 512
hidden_size = 30
num_of_loc = 5
batch_size = 16
num_vocab = 60
num_ans_candidates = 35
joint_embedding_dim = 500
question_len = 13
batch_first = True
image_embedding_model = image_embedding(image_feat_dim, lstm_dim, hidden_size)
question_embedding_model = QuestionEmbeding(
num_vocab,
txt_embedding_dim,
lstm_dim,
lstm_layer=2,
dropout=0.1,
batch_first=batch_first,
)
my_classifier = logit_classifier(
joint_embedding_dim, num_ans_candidates, image_feat_dim, txt_embedding_dim
)
loss = torch.nn.CrossEntropyLoss()
my_model = top_down_bottom_up_model(
image_embedding_model, question_embedding_model, my_classifier, loss
)
image_feat = np.random.rand(batch_size, num_of_loc, image_feat_dim)
input_txt = Variable(
torch.rand(batch_size, question_len).type(torch.LongTensor) % num_vocab
)
res = my_model(image_feat, input_txt, batch_first)
self.assertEqual((batch_size, num_ans_candidates), res.shape)
if __name__ == "__main__":
unittest.main()
| [
"anandkumar@instance-1.us-central1-a.c.andromanit.internal"
] | anandkumar@instance-1.us-central1-a.c.andromanit.internal |
bdc14ed51f22263f092152c8eb153cafeab0a246 | 25089e415a3975a94dafb8fe8281cf1ba5576324 | /src/xmpp_transport.py | 63dbb36526a27d83944180d05b01fe69d834ade7 | [
"MIT"
] | permissive | hogrewe/uniblab | 9211e444fbf203c4db7daed51a39d20221d2f693 | 5286644582efeff89f379945f3f553c72d71e1b6 | refs/heads/master | 2021-01-01T15:41:22.112361 | 2012-08-11T16:07:12 | 2012-08-11T16:07:12 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,090 | py | from twisted.words.xish import domish
from twisted.words.protocols.jabber import jid
from twisted.internet import reactor, protocol
from twisted.application import service
from wokkel.xmppim import MessageProtocol, PresenceClientProtocol, RosterClientProtocol
from wokkel.xmppim import AvailablePresence
from wokkel.client import XMPPClient
import uniblab_message
class UniblabXMPPProtocol(MessageProtocol, PresenceClientProtocol,RosterClientProtocol):
transport_type = 'gtalk'
def connectionInitialized(self):
print 'Initializing connection'
MessageProtocol.connectionInitialized(self)
PresenceClientProtocol.connectionInitialized(self)
RosterClientProtocol.connectionInitialized(self)
self.getRoster().addCallback(self.processRoster)
def connectionMade(self):
print "Connected!"
self.available(None, None, {None: 'Being a bot'})
def processRoster(self, roster):
print 'Processing roster'
for name,item in roster.items():
print 'Processing item', name
if not item.subscriptionTo:
'Subscribing to', item.jid
self.subscribe(item.jid)
def connectionLost(self, reason):
print "Disconnected!"
def typing_notification(self, jid):
"""Send a typing notification to the given jid."""
msg = domish.Element((None, "message"))
msg["to"] = jid
msg["from"] = self.transport.xmpp_user
msg.addElement(('jabber:x:event', 'x')).addElement("composing")
self.send(msg)
def send_plain(self, user, content):
msg = domish.Element((None, "message"))
msg["to"] = user
msg["from"] = self.parent.jid.full()
msg["type"] = 'chat'
msg.addElement("body", content=content)
self.send(msg)
def onMessage(self, msg):
if msg["type"] == 'chat' and hasattr(msg, "body") and msg.body:
self.typing_notification(msg['from'])
user = msg['from'].split('/')[0]
username = self.transport.uniblab.username_from_gtalk(user.strip().lower())
message = uniblab_message.uniblab_message(msg['from'], self.transport.xmpp_user, str(msg.body), None, self.transport_type, username)
self.transport.uniblab.message(message,self.transport )
# presence stuff
def availableReceived(self, entity, show=None, statuses=None, priority=0):
user = entity.full().split('/')[0]
username = self.transport.uniblab.username_from_gtalk(user.strip().lower())
new_status = None
if statuses:
new_status = statuses[None]
message = uniblab_message.uniblab_message(entity.full(), self.transport.xmpp_user, new_status, None, self.transport_type, username)
self.transport.uniblab.status(message,self.transport )
print "Available from %s (%s, %s)" % (entity.full(), show, statuses)
def unavailableReceived(self, entity, statuses=None):
print "Unavailable from %s" % entity.userhost()
def subscribedReceived(self, entity):
print "Subscribed received from %s" % (entity.userhost())
self.subscribe(entity)
self.subscribed(entity)
self.send_plain(entity.full(), "Yo, I'm a bot. Ask me: 'uniblab: what can you do?'")
def unsubscribedReceived(self, entity):
print "Unsubscribed received from %s" % (entity.userhost())
self.unsubscribe(entity)
self.unsubscribed(entity)
def subscribeReceived(self, entity):
print "Subscribe received from %s" % (entity.userhost())
self.subscribe(entity)
self.subscribed(entity)
def unsubscribeReceived(self, entity):
print "Unsubscribe received from %s" % (entity.userhost())
self.unsubscribe(entity)
self.unsubscribed(entity)
def onRosterSet(self, item):
if not item.subscriptionTo:
self.subscribe(item.jid)
print 'Roster set', item.jid.full()
def onRosterRemove(self, entity):
print 'Roster removed', entity.full()
class XMPPTransport:
def __init__(self, uniblab):
self.uniblab = uniblab
def config(self, config):
self.xmpp_host = config.get('xmpp', 'xmpp_host')
self.xmpp_port = config.get('xmpp', 'xmpp_port')
self.xmpp_user = config.get('xmpp', 'xmpp_user')
self.xmpp_pass = config.get('xmpp', 'xmpp_pass')
def connect(self):
application = service.Application('UniblabXMPP')
xmppclient = XMPPClient(jid.internJID(self.xmpp_user), self.xmpp_pass)
self.client=UniblabXMPPProtocol()
self.client.transport = self
self.client.setHandlerParent(xmppclient)
xmppclient.setServiceParent(application)
xmppclient.startService()
def respond(self, m, response):
if self.client != None:
if response.respondall and m.receiver != self.xmpp_user:
user = m.receiver
else:
user = m.sender
self.client.send_plain(user, response.text)
| [
"jhogrewe@channelinsight.com"
] | jhogrewe@channelinsight.com |
d579dd284d9927a5d3d431bf5e5f440c71ef6bdf | 61a3d8eab8eee2eccdadeab0c0804144c8aeca82 | /outPlan/data/__init__.py | 64793bbc91c007e52ea4d2127fb44d16bd384a02 | [] | no_license | gurongrong0329/Python3_project | f9f06b9e1502c4d6a1f4854254131fddf77a8bd3 | 2a34c7a3db040d1edf64bdd391ac30d20cfb359f | refs/heads/master | 2020-03-22T19:09:51.909862 | 2018-12-17T03:48:49 | 2018-12-17T03:48:49 | 140,510,090 | 0 | 1 | null | 2018-12-17T03:48:50 | 2018-07-11T02:18:56 | Python | UTF-8 | Python | false | false | 91 | py | # -*- coding: utf-8 -*-
# 作者: admin
# 时间: 2018/7/27 11:06
# 文件: __init__.py.py
| [
"gurr@kalamodo.com"
] | gurr@kalamodo.com |
0b5a319eaa0d9131a0c7a6bb4a2ed8fe8b6499ec | 7064bcec6fc7a8370be99921212998da481d9c0b | /tweetontime/tweet_on_time.py | f02b3f969572f46392674d8c882abd1048dcabd6 | [] | no_license | zachdtaylor/tweet-on-time-flask | e5bdce1e78ed2fce4d6a0ce6e58424212988dae2 | 3ace1f8c59b35fef05319794a740b5aef81311cc | refs/heads/master | 2022-12-16T08:42:58.515467 | 2020-09-18T01:59:57 | 2020-09-18T01:59:57 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,214 | py | import json
import requests
import urllib
from apscheduler.schedulers.background import BackgroundScheduler
from datetime import datetime
from requests_oauthlib import OAuth1
from .settings import API_KEY, API_SECRET_KEY, ACCESS_TOKEN, ACCESS_TOKEN_SECRET
auth = OAuth1(API_KEY, API_SECRET_KEY, ACCESS_TOKEN, ACCESS_TOKEN_SECRET)
tweet_scheduler = BackgroundScheduler()
def get_profile_info():
url = 'https://api.twitter.com/1.1/account/verify_credentials.json'
res = requests.get(url, auth=auth)
data = json.loads(res.text)
data['profile_image_url_https'] = data['profile_image_url_https'].replace(
'normal', '400x400')
return data
def post_tweet(status):
encoded = urllib.parse.quote(status)
url = f'https://api.twitter.com/1.1/statuses/update.json?status={encoded}'
res = requests.post(url, auth=auth)
return json.loads(res.text)
def schedule_tweet(status, datetime_str):
tweet_scheduler.add_job(
lambda: post_tweet(status),
'date',
run_date=datetime.fromisoformat(datetime_str)
)
if __name__ == '__main__':
response = requests.request("GET", url, headers=headers, data=payload)
print(response.text.encode('utf8'))
| [
"zachtylr04@gmail.com"
] | zachtylr04@gmail.com |
a5ea4acde43bd227278705b853ff815836a465f3 | 6e3dca7de6921ba90d7f5b798664d2fa5d62b0f8 | /api/scholarship/migrations/0012_auto_20210114_1429.py | bbba735451cec78bd0404081f0fd51c985f5ab73 | [] | no_license | Zasugh/dsch | 2d41bf4b33d1d3ee197d4be72516187dfa0f43d0 | e946542edced015560bf905ac57ee1e45def9210 | refs/heads/main | 2023-03-27T07:41:41.874638 | 2021-03-31T15:01:18 | 2021-03-31T15:01:18 | 353,392,934 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 488 | py | # Generated by Django 2.2.12 on 2021-01-14 20:29
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('scholarship', '0011_auto_20210114_1428'),
]
operations = [
migrations.AlterField(
model_name='scholarship',
name='file_reply',
field=models.CharField(blank=True, max_length=100, null=True, verbose_name='Archivo de Respuesta de Comisión Dictaminadora'),
),
]
| [
"joselealcasique@gmail.com"
] | joselealcasique@gmail.com |
bbe03901b46f486d869c324fceb2ecf87ab9fb6e | 8a1665f8e534e5903c454605f0105aaf62a04fe0 | /weather/urls.py | 8b6ed775076990a52b9497da48cb0593b131732e | [] | no_license | ShadePy/weather_app | 3125e24e7db47415ce5696c029ace28a8de73cd5 | 349ce8af46567464a4bdaf59996c96857879537e | refs/heads/master | 2022-04-06T03:43:32.344395 | 2020-02-09T00:00:29 | 2020-02-09T00:00:29 | 239,221,817 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 216 | py | from django.contrib import admin
from django.urls import path, include
from . import views
urlpatterns = [
path("", views.index, name="home"),
path("delete/<city_name>", views.delete_city, name="delete"),
]
| [
"serezhasoroka@hotmail.com"
] | serezhasoroka@hotmail.com |
c50c0e9005ec170abfa46abc1f26c3c35a8a774c | a99a44aee5cfc5e080f6d83d2bcc1c3d273a3426 | /htdocs/plotting/auto/scripts/p98.py | fce3feefb062bd6cee36902c4ba30cf7d0d5e9f0 | [
"MIT"
] | permissive | ragesah/iem | 1513929c8bc7f254048271d61b4c4cf27a5731d7 | 8ed970d426bddeaa3e7ded593665d22f0f9f6e87 | refs/heads/main | 2023-08-20T20:01:15.480833 | 2021-10-12T15:44:52 | 2021-10-12T15:44:52 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,728 | py | """Day of month frequency."""
import calendar
import numpy as np
from pandas.io.sql import read_sql
from pyiem import network
from pyiem.plot import figure_axes
from pyiem.util import get_autoplot_context, get_dbconn
from pyiem.exceptions import NoDataFound
PDICT = {
"precip": "Daily Precipitation",
"snow": "Daily Snowfall",
"snowd": "Daily Snow Depth",
"high": "High Temperature",
"low": "Low Temperature",
}
PDICT2 = {"above": "At or Above Threshold", "below": "Below Threshold"}
def get_description():
"""Return a dict describing how to call this plotter"""
desc = dict()
desc["data"] = True
desc[
"description"
] = """This plot produces the daily frequency of
a given criterion being meet for a station and month of your choice. The
number labeled above each bar is the actual number of years.
"""
desc["arguments"] = [
dict(
type="station",
name="station",
default="IATDSM",
label="Select Station",
network="IACLIMATE",
),
dict(type="month", name="month", default=9, label="Which Month:"),
dict(
type="select",
name="var",
default="high",
label="Which Variable:",
options=PDICT,
),
dict(
type="text",
name="thres",
default="90",
label="Threshold (F or inch):",
),
dict(
type="select",
name="dir",
default="above",
label="Threshold Direction:",
options=PDICT2,
),
]
return desc
def plotter(fdict):
"""Go"""
pgconn = get_dbconn("coop")
ctx = get_autoplot_context(fdict, get_description())
station = ctx["station"]
varname = ctx["var"]
month = ctx["month"]
threshold = float(ctx["thres"])
if PDICT.get(varname) is None:
return
drct = ctx["dir"]
if PDICT2.get(drct) is None:
return
operator = ">=" if drct == "above" else "<"
table = "alldata_%s" % (station[:2],)
nt = network.Table("%sCLIMATE" % (station[:2],))
df = read_sql(
f"""
SELECT sday,
sum(case when {varname} {operator} %s then 1 else 0 end)
as hit,
count(*) as total
from {table} WHERE station = %s and month = %s
GROUP by sday ORDER by sday ASC
""",
pgconn,
params=(threshold, station, month),
index_col="sday",
)
if df.empty:
raise NoDataFound("No Data Found.")
df["freq"] = df["hit"] / df["total"] * 100.0
title = ("[%s] %s %s %s %s\nduring %s (Avg: %.2f days/year)") % (
station,
nt.sts[station]["name"],
PDICT.get(varname),
PDICT2.get(drct),
threshold,
calendar.month_name[month],
df["hit"].sum() / float(df["total"].sum()) * len(df.index),
)
fig, ax = figure_axes(title=title)
bars = ax.bar(np.arange(1, len(df.index) + 1), df["freq"])
for i, mybar in enumerate(bars):
ax.text(
i + 1,
mybar.get_height() + 0.3,
"%s" % (df["hit"][i],),
ha="center",
)
ax.set_ylabel("Frequency (%)")
ax.set_xlabel(
("Day of %s, number of years (out of %s) meeting criteria labelled")
% (calendar.month_name[month], np.max(df["total"]))
)
ax.grid(True)
ax.set_xlim(0.5, 31.5)
ax.set_ylim(0, df["freq"].max() + 5)
return fig, df
if __name__ == "__main__":
plotter(
dict(
month=9,
dir="below",
thres=65,
station="IA2724",
network="IACLIMATE",
)
)
| [
"akrherz@iastate.edu"
] | akrherz@iastate.edu |
a900aa027ffb014e1aba09385256e017ad548f62 | 9a5f3d1bacc497b0f4d90b4f9d99564479a94261 | /comments/views.py | d9639a8548955b370e04c8b699f0e103045799de | [] | no_license | cerikzhan/tancho-blog | d9f6546f4ba4f085fd205a2a31cfd0cf165a3f59 | 7c7e4baeaf00da32188c93fe2e7f6b4c401ff7ae | refs/heads/main | 2023-08-23T11:54:20.655057 | 2020-11-17T10:57:48 | 2020-11-17T10:57:48 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,183 | py | from django.contrib import messages
from django.shortcuts import render, redirect
from .models import Comment
def inactive_comments(request):
comments = Comment.objects.inactive()
comment_id = request.POST.get('comment_id')
if comment_id is not None:
try:
comment = Comment.objects.get(id=comment_id)
except Comment.DoesNotExist:
messages.danger(request, 'Comment not found')
return redirect('inactive')
comment.is_active = True
comment.save()
messages.success(request, 'Comment was activated')
return redirect('inactive')
context = {
'comments': comments
}
return render(request, 'comments/inactive_comments.html', context)
def delete_comment(request):
comment_id = request.POST.get('comment_id')
if comment_id is not None:
try:
comment = Comment.objects.get(id=comment_id)
except Comment.DoesNotExist:
messages.danger(request, 'Comment not found')
return redirect('inactive')
comment.delete()
messages.success(request, 'Comment was deleted')
return redirect('inactive')
| [
"serj_navii@mail.ru"
] | serj_navii@mail.ru |
908aafa15644f2c0975c55354fc11158d00ceb4f | 18ada29f4730e8d7718422044c465f5e5c6ea7fe | /Parcial 2/Sub_ventana - copia.py | af6abacc4a4e35a5c3d4435a52e9e05ea95fecea | [] | no_license | horacio2307/Programacion-Avanzada | 3bb0eef2ce076a66bf1ed0e678500cde0f721286 | af2cf57f47c738b45409d6b670ea5d494533f192 | refs/heads/main | 2023-07-29T07:44:32.158189 | 2021-09-10T18:29:20 | 2021-09-10T18:29:20 | 336,617,479 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 435 | py | from tkinter import*
global sv
def sub_ventana():
global sv
sv=Toplevel()
b2=Button(sv,text="Cerrar",command=cerrar)
b2.grid(row=0,column=0)
def cerrar():
global sv
b3=Button(vp,text="Reabrir",command=abrir)
b3.grid(row=0,column=1)
sv.withdraw()
def abrir():
global sv
sv.deiconify()
vp=Tk()
b1=Button(vp,text="Abre ventana",command=sub_ventana)
b1.grid(row=0,column=0)
vp.mainloop() | [
"horaciopena07@outlook.com"
] | horaciopena07@outlook.com |
53cf228cb14fcd7ded5594923581c4fea8dd0d31 | a7c26b903d5ddc0b3fe71c641b6fa3413f68868b | /class_html.py | b8f8bf1ffac5f590f56bb0ef9afac5306234c128 | [] | no_license | trickre/site-maker | 191d1407fce9839055e06982afb66099e828ca2a | e0e9955580ba424678641afbb4b8bad8bc3b75dd | refs/heads/master | 2020-03-22T13:09:01.274276 | 2018-11-28T12:08:21 | 2018-11-28T12:08:21 | 140,087,023 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,364 | py | #coding:utf-8
import sys
"""
Usage :>python main_module.py [target file] [option...]
Output :"target file.html"
"""
class HTML_Object(object):
def __init__(self):
self.content = []
self.tag_start = ""
self.tag_end = ""
def print_tag_start(self):
return (self.tag_start)
def print_tag_end(self):
return (self.tag_end)
def print_content(self):
#self.print_tag_start()
content = ""
for c in self.content:
if issubclass(type(c),HTML_Object):
print("print innner")
c.print_html()
else:
content+=str(c)
return(content)
#self.print_tag_end()
def add_content(self,c):
self.content.append(c)
def print_html(self):
html = self.print_tag_start()
html += self.print_content()
html += self.print_tag_end()
return(html)
class p(HTML_Object):
def __init__(self):
super().__init__()
self.tag_start = "<p>"
self.tag_end = "</p>"
class Html(HTML_Object):
def __init__(self):
super().__init__()
self.tag_start = "<html>"
self.tag_end = "</html>"
self.add_content("<head>\n\
<meta charset=\"utf-8\"/>\n\
<link rel=\"stylesheet\" href=\"style.css\">")
self.add_content("\n</head>")
class div(HTML_Object):
def __init__(self):
super().__init__()
self.tag_start = "<div>"
self.tag_end = "</div>"
#Usage: var = h(1) #h1; var = h(2) #h2
class h(HTML_Object):
def __init__(self,num):
super().__init__()
try:
h_num = int(num)
except ValueError:
print("input must be int. @h.__init__()")
self.tag_start = "<h"+str(h_num)+">"
self.tag_end = "</h"+str(h_num)+">"
class li(HTML_Object):
def __init__(self):
super().__init__()
self.tag_start = "<li>"
self.tag_end = "</li>"
class Article():
title = ""
def __init__(self,title_txt):
title = title_txt
#Usage
if __name__ =="__main__":
print("TEST html classes")
tp = p()
html = Html()
tp.add_content("this is p content")
h1 = h(1)
h1.add_content("これは第一タイトルです")
html.add_content(h1)
html.add_content(tp)
html.print_content() | [
"ogino.ktai@gmail.com"
] | ogino.ktai@gmail.com |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.