blob_id stringlengths 40 40 | directory_id stringlengths 40 40 | path stringlengths 2 616 | content_id stringlengths 40 40 | detected_licenses listlengths 0 69 | license_type stringclasses 2
values | repo_name stringlengths 5 118 | snapshot_id stringlengths 40 40 | revision_id stringlengths 40 40 | branch_name stringlengths 4 63 | visit_date timestamp[us] | revision_date timestamp[us] | committer_date timestamp[us] | github_id int64 2.91k 686M โ | star_events_count int64 0 209k | fork_events_count int64 0 110k | gha_license_id stringclasses 23
values | gha_event_created_at timestamp[us] | gha_created_at timestamp[us] | gha_language stringclasses 220
values | src_encoding stringclasses 30
values | language stringclasses 1
value | is_vendor bool 2
classes | is_generated bool 2
classes | length_bytes int64 2 10.3M | extension stringclasses 257
values | content stringlengths 2 10.3M | authors listlengths 1 1 | author_id stringlengths 0 212 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
e92b6a0a8f15c772f6a3f238232ce0d47afa3a9f | ee87e89befa0d4bf353dcf682b6467f9daaf657e | /src/foo_ext/setup_foo.py | 00cab0b82444aae83ea486fa9f58bec6a8b7de40 | [
"BSD-3-Clause",
"MIT"
] | permissive | umedoblock/fugou | 43046056ce5f20b81d76e3c8e3149717b63708ed | 45d95f20bba6f85764fb686081098d92fc8cdb20 | refs/heads/master | 2021-07-15T15:26:30.856753 | 2018-11-26T23:44:18 | 2018-11-26T23:44:18 | 152,105,228 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 639 | py | from distutils.core import setup, Extension
# module_camellia = Extension('camellia', sources = ['camellia/pycamellia.c'])
module_foo = \
Extension('_foo',
sources = ['foo/pyfoo.c'],
extra_link_args=['-Wl,-soname,build/lib.linux-i686-3.2-pydebug/_foo.cpython-32dm.so'])
# build/lib.linux-i686-3.2-pydebug/_foo.cpython-32dm.so
# extra_compile_args=[''])
setup( name = 'sample',
version = '0.0',
author = 'ๆข
ๆฟ้
(umedoblock)',
author_email = 'umedoblock@gmail.com',
url = 'empty',
description = 'This is a foo object package',
ext_modules = [module_foo])
| [
"devnull@localhost"
] | devnull@localhost |
7e046e16115c4a31cd8ee22f84097cb167323a1e | 9171beea7de1dd602422b3d6e3e38a0b0b259ff9 | /tests/test_foobar.py | 122cf40291a6720980f208869055ce8d0ac13490 | [] | no_license | macobo/testrepo | 8003b026837eaf866491946d953c2fe61b869729 | c1c121b051543522b69ca49249a689442a737973 | refs/heads/master | 2021-01-01T18:29:50.571302 | 2015-05-09T17:54:18 | 2015-05-09T17:54:18 | 35,281,790 | 0 | 0 | null | 2015-05-09T17:54:18 | 2015-05-08T13:48:04 | Python | UTF-8 | Python | false | false | 111 | py | from time import sleep
def test_this():
sleep(30)
assert 1+2 == 3
# def test_failing(): assert 1 == 2
| [
"oxymaccy@gmail.com"
] | oxymaccy@gmail.com |
189e918e786df3ff66786de2365ae2f313200b19 | 475aa827d073c9e6a2745eb32698812160b9b7b5 | /Semana5/metodoslistas.py | b80b49f03f1205e00d65720b1535b3d39ee5d84a | [
"MIT"
] | permissive | jgualdr73793/Algoritmos_y_ProgramacionC3-C2 | b489db488326b4100f97e693cece566034bb34b4 | 02e12d765c34bd5391883fb15b5310dd7415e92d | refs/heads/main | 2023-08-11T10:33:58.773453 | 2021-09-15T16:33:54 | 2021-09-15T16:33:54 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 654 | py | #Mรฉtodos de las listas
#append()
#clear()
#extend() une una lista con otra
#count()
#index()
#insert(posicion,elemento)
#pop()
#remove()
#reverse()
#sort()
#.sort(reverse=True)
#lista
#tuplas
lista=["mango","limon","Coco","manzana","melon","mango"]#lista
lista2=[3,2,3,4,5,61,7,1,9,10]
#tamaรฑo=5
fruta="Banano"
lista.append(fruta)
tamano=len(lista)#7
#lista.clear()
#lista3=lista.extend(lista2)
contador=lista.count("mango")
#p=lista.index("Melon")
lista.insert(0,"Pera")
#lista.pop(0)
#lista.remove("Coco")
#lista.reverse()
#lista2.sort(reverse=True)
#tamaรฑo=
tuplas=("Laura","Camila","Gabriel","Lorena","Camila")#tupla
print(tuplas.count("Camila"))
| [
"noreply@github.com"
] | jgualdr73793.noreply@github.com |
31d6264158df83c60628ba128a22a79302ea9b27 | 04ca0a6ecf4a9c57454e4eaa2224a6204a356f78 | /python/codewars/sum_of_numbers_from_0_to_N.py | 41cd99c48310bcd46832f58506d997d15dd23e2c | [] | no_license | mkt-Do/codewars | 29cc74546dee24bdb5c97a0986fec6b69ce12774 | e145227187191aa70391b2ce58d21a744a781f26 | refs/heads/master | 2021-12-12T21:37:38.351140 | 2021-12-01T00:33:31 | 2021-12-01T00:33:31 | 169,721,979 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 166 | py | def show_sequence(n):
return '{} = {}'.format('+'.join([str(i) for i in range(0, n + 1)]), sum(range(0, n + 1))) if n > 0 else '{}<0'.format(n) if n < 0 else '0=0'
| [
"mkt.koro.mikuru@gmail.com"
] | mkt.koro.mikuru@gmail.com |
76cd68ca722ef484c2c99e2d85befd4ed929e370 | 4072888be7d6b5584875f955649792f4094c46ce | /cartola_mn/settings.py | 87517e3bfb8e0341bf748cdfb0c6e0f38bc52f83 | [] | no_license | ikaromn/cartola-valorizacao | 8adfc96eae684d2620754ecea1d4dfc001e15303 | f85b58aa9e5449ecab3472b41911012a56fa6972 | refs/heads/master | 2022-12-09T01:53:21.338624 | 2020-02-12T00:36:22 | 2020-02-12T00:36:22 | 91,812,706 | 2 | 0 | null | 2022-04-22T21:18:07 | 2017-05-19T14:11:41 | Python | UTF-8 | Python | false | false | 3,150 | py | import os
import django_heroku
import dj_database_url
from prettyconf import config
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
TEMPLATE_DIR = os.path.join(BASE_DIR, "templates")
STATIC_DIR = os.path.join(BASE_DIR, "static")
STATIC_ROOT = os.path.join(BASE_DIR, "staticfiles")
STATIC_URL = '/static/'
STATICFILES_DIRS = [
os.path.join(BASE_DIR, "static"),
]
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.2/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = config('SECRET_KEY', default='')
CARTOLA_TOKEN = config('CARTOLA_TOKEN')
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.humanize',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'cartola_mn.apps.player',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'cartola_mn.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [
TEMPLATE_DIR,
],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'cartola_mn.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.2/ref/settings/#databases
DATABASES = {
'default': dj_database_url.config('DATABASE_URL')
}
# Password validation
# https://docs.djangoproject.com/en/2.2/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.2/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.2/howto/static-files/
django_heroku.settings(locals())
| [
"ikaropinheiro6@gmail.com"
] | ikaropinheiro6@gmail.com |
ff10aab701873a6743c66ff43a452b141e61b2e3 | d153c170a4839deb4d8606009be15198418aea69 | /์๊ณ ๋ฆฌ์ฆํ์ด/21.07.09/๋ฒฝ๋ถ์๊ณ ์ด๋.py | 9a0c96664f8cbc835b7ed167735d13703b0e7b60 | [] | no_license | rlatmd0829/algorithm | 669085907e2243b4c3a663feab87cd83ff50cc49 | 116bebf16afa6e20d9e968aa312b99b8eea447a5 | refs/heads/master | 2023-08-21T02:27:36.944919 | 2021-09-26T09:39:52 | 2021-09-26T09:39:52 | 345,480,784 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,533 | py | # ์๊ฐ์ด๊ณผ
import collections
n, m = map(int, input().split())
graph = [list(map(int,input())) for _ in range(n)]
dx, dy = [-1,1,0,0], [0,0,-1,1]
def bfs():
queue = collections.deque()
queue.append((0,0))
distance = [[0]*m for _ in range(n)]
while queue:
x, y = queue.popleft()
for i in range(4):
nx = x + dx[i]
ny = y + dy[i]
if 0 <= nx < n and 0 <= ny < m:
if graph[nx][ny] == 0 and distance[nx][ny] == 0:
distance[nx][ny] = distance[x][y] + 1
queue.append((nx,ny))
return distance[n-1][m-1]
result = []
for i in range(n):
for j in range(m):
if graph[i][j] == 1:
graph[i][j] = 0
demo = bfs()
if demo != 0:
result.append(demo)
graph[i][j] = 1
if result:
print(min(result)+1)
else:
print(-1)
##################
from sys import stdin
from collections import deque
N,M = map(int, stdin.readline().split(" "))
map = [list(map(int, stdin.readline().strip())) for _ in range(N)]
# ์ขํ๊ณ์ฐ ์ํ ๋ฐฐ์ด
dx = [-1,1,0,0]
dy = [0,0,1,-1]
curMin = 1000000
def bfs():
global curMin
# ์ต๋จ๊ฒฝ๋ก ์ ์ฅ ๋ฐฐ์ด. ์์ง ๋ฐฉ๋ฌธ ์ํ๋ค๋ ํ์๋ -1๋ก
distances = [[[-1]*2 for _ in range(M)] for _ in range(N)]
# ํ
queue = deque()
queue.append((0,0,0))
distances[0][0][0] = 1
while queue:
x, y, broken = queue.popleft()
for i in range(4):
nx = x + dx[i]
ny = y + dy[i]
if 0 <= nx < N and 0 <= ny <M:
# ๋ถ์์ง ์๊ณ ๊ฐ ์ ์๋ ๊ฒฝ์ฐ
if map[nx][ny] == 0 and distances[nx][ny][broken] == -1:
distances[nx][ny][broken] = distances[x][y][broken]+1
queue.append((nx,ny,broken))
# ๋ถ์ด์ผ๋ง ๊ฐ ์ ์๋ ๊ฒฝ์ฐ
# ์ง๊ธ๊น์ง ํ๋ฒ๋ ์ ๋ถ์์ด์ผ ํ๋ค
# ๋ฒฝ์ด ์์ด์ผ ํ๋ค
# ๋ฐฉ๋ฌธ๊ธฐ๋ก์ด ์์ด์ผ ํ๋ค
elif broken == 0 and map[nx][ny] == 1 and distances[nx][ny][1] == -1:
distances[nx][ny][1] = distances[x][y][0]+1
queue.append((nx,ny,1))
if distances[N-1][M-1][0] != -1:
curMin = min(curMin, distances[N-1][M-1][0])
if distances[N-1][M-1][1] != -1:
curMin = min(curMin, distances[N-1][M-1][1])
bfs()
if curMin == 1000000:
print(-1)
else:
print(curMin) | [
"rlatmd0829@naver.com"
] | rlatmd0829@naver.com |
d09337dd6751a5bd7ead2f1cb07d835579b407f9 | 9781b61e35fce6c219f5359d8336ed87eac7a7d6 | /puppy_models.py | 05f0677a6ae0737ce7ce21dfc509006f1a825675 | [] | no_license | johnnyhperkins/OAuthPython | 68651f2b744fd33c60a90cc526f8a62db07cb0fb | 07b510ff7693638da7023a365165d3efa3c1f9d2 | refs/heads/master | 2020-04-12T00:51:43.016422 | 2018-12-18T01:46:27 | 2018-12-18T01:53:08 | 162,214,300 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 645 | py | from sqlalchemy import Column, Integer, String
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy import create_engine
Base = declarative_base()
class Puppy(Base):
__tablename__ = 'puppy'
name =Column(String(80), nullable = False)
id = Column(Integer, primary_key = True)
description = Column(String(250))
#Add add a decorator property to serialize data from the database
@property
def serialize(self):
return {
'id' : self.id,
'name' : self.name,
'description' : self.description
}
engine = create_engine('sqlite:///puppies.db')
Base.metadata.create_all(engine) | [
"Johnny@Johnnys-MacBook-Air.local"
] | Johnny@Johnnys-MacBook-Air.local |
6053712f6528d72f50dd12642f249150218a7d4c | 651a296c8f45b5799781fd78a6b5329effe702a0 | /bvec/bvec_print.py | a927d2db4dfdd041e9b0fa3dbdc83056ccf7b51a | [] | no_license | pdhhiep/Computation_using_Python | 095d14370fe1a01a192d7e44fcc81a52655f652b | 407ed29fddc267950e9860b8bbd1e038f0387c97 | refs/heads/master | 2021-05-29T12:35:12.630232 | 2015-06-27T01:05:17 | 2015-06-27T01:05:17 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,944 | py | #!/usr/bin/env python
def bvec_print ( n, bvec, title ) :
#*****************************************************************************80
#
## BVEC_PRINT prints a binary integer vector, with an optional title.
#
# Discussion:
#
# A BVEC is an integer vector of binary digits, intended to
# represent an integer. BVEC(1) is the units digit, BVEC(N-1)
# is the coefficient of 2^(N-2), and BVEC(N) contains sign
# information. It is 0 if the number is positive, and 1 if
# the number is negative.
#
# The vector is printed "backwards", that is, the first entry
# printed is BVEC(N).
#
# Licensing:
#
# This code is distributed under the GNU LGPL license.
#
# Modified:
#
# 24 December 2014
#
# Author:
#
# John Burkardt
#
# Parameters:
#
# Input, integer N, the number of components of the vector.
#
# Input, integer BVEC(N), the vector to be printed.
#
# Input, character ( len = * ) TITLE, a title to be printed first.
# TITLE may be blank.
#
if ( 0 < len ( title ) ):
print ''
print title
for ihi in range ( n - 1, -1, -70 ):
ilo = max ( ihi - 70, -1 )
print ' ',
for i in range ( ihi, -1, ilo ):
print '%1d' % ( bvec[i] ),
print ''
return
def bvec_print_test ( ):
#*****************************************************************************80
#
## BVEC_PRINT_TEST tests BVEC_PRINT.
#
# Licensing:
#
# This code is distributed under the GNU LGPL license.
#
# Modified:
#
# 24 December 2014
#
# Author:
#
# John Burkardt
#
import numpy as np
n = 10
bvec = np.array ( [ 1, 0, 0, 1, 0, 1, 1, 1, 0, 0 ] )
print ''
print 'BVEC_PRINT_TEST'
print ' BVEC_PRINT prints a binary vector.'
bvec_print ( n, bvec, ' BVEC:' )
print ''
print 'BVEC_PRINT_TEST'
print ' Normal end of execution.'
return
if ( __name__ == '__main__' ):
from timestamp import timestamp
timestamp ( )
bvec_print_test ( )
timestamp ( )
| [
"siplukabir@gmail.com"
] | siplukabir@gmail.com |
1539d348092bab286434a5b073c5490382d7dffe | 9f4b1884273f995806c1e755665a92b785cc52a8 | /onnx/test/parser_test.py | 46604593e0c848bd177032dfeda4264980d26494 | [
"Apache-2.0"
] | permissive | zhijl/onnx | 340f7c5794a9aca96d2a9e76c3336aeebe798776 | ac0afea916f989c714692dd8551eff762a639cd5 | refs/heads/main | 2023-03-31T02:30:50.151799 | 2023-03-20T23:09:55 | 2023-03-20T23:09:55 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,295 | py | # SPDX-License-Identifier: Apache-2.0
import unittest
from parameterized import parameterized
import onnx
from onnx import GraphProto, OperatorSetIdProto, checker
class TestBasicFunctions(unittest.TestCase):
def check_graph(self, graph: GraphProto) -> None:
self.assertEqual(len(graph.node), 3)
self.assertEqual(graph.node[0].op_type, "MatMul")
self.assertEqual(graph.node[1].op_type, "Add")
self.assertEqual(graph.node[2].op_type, "Softmax")
def test_parse_graph(self) -> None:
input = """
agraph (float[N, 128] X, float[128,10] W, float[10] B) => (float[N] C)
{
T = MatMul(X, W)
S = Add(T, B)
C = Softmax(S)
}
"""
graph = onnx.parser.parse_graph(input)
self.check_graph(graph)
def test_parse_model(self) -> None:
input = """
<
ir_version: 7,
opset_import: [ "" : 10, "com.microsoft": 1]
>
agraph (float[N, 128] X, float[128,10] W, float[10] B) => (float[N] C)
{
T = MatMul(X, W)
S = Add(T, B)
C = Softmax(S)
}
"""
model = onnx.parser.parse_model(input)
self.assertEqual(model.ir_version, 7)
self.assertEqual(len(model.opset_import), 2)
self.check_graph(model.graph)
def test_parse_graph_error(self) -> None:
input = """
agraph (float[N, 128] X, float[128,10] W, float[10] B) => (float[N] C)
{
T = MatMul[X, W]
S = Add(T, B)
C = Softmax(S)
}
"""
self.assertRaises(
onnx.parser.ParseError, lambda: onnx.parser.parse_graph(input)
)
def test_parse_model_error(self) -> None:
input = """
<
ir_version: 7,
opset_import: [ "" : 10 "com.microsoft": 1]
>
agraph (float[N, 128] X, float[128,10] W, float[10] B) => (float[N] C)
{
T = MatMul(X, W)
S = Add(T, B)
C = Softmax(S)
}
"""
self.assertRaises(
onnx.parser.ParseError, lambda: onnx.parser.parse_model(input)
)
def test_parse_function_with_attributes(self) -> None:
input = """
<
ir_version: 9,
opset_import: [ "" : 15, "custom_domain" : 1],
producer_name: "FunctionProtoTest",
producer_version: "1.0",
model_version: 1,
doc_string: "A test model for model local functions."
>
agraph (float[N] x) => (float[N] out)
{
out = custom_domain.Selu<alpha=2.0, gamma=3.0>(x)
}
<
domain: "custom_domain",
opset_import: [ "" : 15],
doc_string: "Test function proto"
>
Selu
<alpha: float=1.67326319217681884765625, gamma: float=1.05070102214813232421875>
(X) => (C)
{
constant_alpha = Constant<value_float: float=@alpha>()
constant_gamma = Constant<value_float: float=@gamma>()
alpha_x = CastLike(constant_alpha, X)
gamma_x = CastLike(constant_gamma, X)
exp_x = Exp(X)
alpha_x_exp_x = Mul(alpha_x, exp_x)
alpha_x_exp_x_ = Sub(alpha_x_exp_x, alpha_x)
neg = Mul(gamma_x, alpha_x_exp_x_)
pos = Mul(gamma_x, X)
_zero = Constant<value_float=0.0>()
zero = CastLike(_zero, X)
less_eq = LessOrEqual(X, zero)
C = Where(less_eq, neg, pos)
}
"""
model = onnx.parser.parse_model(input)
checker.check_model(model)
@parameterized.expand(
[
(
"agraph (float[N] x) => (float[N] out) { out = custom_domain.Selu(x) }",
{},
),
(
"agraph (float[N] x) => (float[N] out) { out = custom_domain.Selu<alpha=2.0>(x) }",
{"alpha": 2.0},
),
(
"agraph (float[N] x) => (float[N] out) { out = custom_domain.Selu<gamma=3.0>(x) }",
{"gamma": 3.0},
),
(
"agraph (float[N] x) => (float[N] out) { out = custom_domain.Selu<alpha=2.0, gamma=3.0>(x) }",
{"alpha": 2.0, "gamma": 3.0},
),
]
)
def test_composite_parse_function_with_attributes(
self, graph_text: str, expected_attribute: dict
) -> None:
default_alpha = 1.67326319217681884765625
default_gamma = 1.05070102214813232421875
def expect_custom_node_attribute(node, attributes):
for key in attributes:
match_attr = [attr for attr in node.attribute if attr.name == key]
assert len(match_attr) == 1
assert match_attr[0].f == attributes[key]
def expect_model_function_attribute(model):
assert len(model.functions[0].attribute_proto) == 2
attr_proto_alpha = [
attr_proto
for attr_proto in model.functions[0].attribute_proto
if attr_proto.name == "alpha"
]
assert len(attr_proto_alpha) == 1 and attr_proto_alpha[0].f == default_alpha
attr_proto_gamma = [
attr_proto
for attr_proto in model.functions[0].attribute_proto
if attr_proto.name == "gamma"
]
assert len(attr_proto_gamma) == 1 and attr_proto_gamma[0].f == default_gamma
function_text = f"""
<
domain: "custom_domain",
opset_import: [ "" : 15],
doc_string: "Test function proto"
>
Selu
<alpha: float={default_alpha}, gamma: float={default_gamma}>
(X) => (C)
{{
constant_alpha = Constant<value_float: float=@alpha>()
constant_gamma = Constant<value_float: float=@gamma>()
alpha_x = CastLike(constant_alpha, X)
gamma_x = CastLike(constant_gamma, X)
exp_x = Exp(X)
alpha_x_exp_x = Mul(alpha_x, exp_x)
alpha_x_exp_x_ = Sub(alpha_x_exp_x, alpha_x)
neg = Mul(gamma_x, alpha_x_exp_x_)
pos = Mul(gamma_x, X)
_zero = Constant<value_float=0.0>()
zero = CastLike(_zero, X)
less_eq = LessOrEqual(X, zero)
C = Where(less_eq, neg, pos)
}}
"""
functions = [onnx.parser.parse_function(function_text)]
graph = onnx.parser.parse_graph(graph_text)
opset_imports = [
OperatorSetIdProto(domain="", version=15),
OperatorSetIdProto(domain="custom_domain", version=1),
]
model = onnx.helper.make_model(
graph, functions=functions, opset_imports=opset_imports
)
checker.check_model(model)
expect_model_function_attribute(model)
expect_custom_node_attribute(model.graph.node[0], expected_attribute)
if __name__ == "__main__":
unittest.main()
| [
"noreply@github.com"
] | zhijl.noreply@github.com |
0865f7c0f6bac11488dd9d842e0967fab198dc76 | 90094f652d0235307a445301ce6168697781736c | /tools/pfToolsTest.py | ce3bf84e01660e1b17ac7627ac2e3cdc51af3eae | [] | no_license | decosa/usercode | a1a7fe19e5645fa68518923bec73a2aed5fa983d | d1eb83e8649d49fad067f412a39031120c0f3a51 | refs/heads/master | 2020-05-16T21:10:35.196037 | 2012-11-23T17:33:54 | 2012-11-23T17:33:54 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 47,950 | py | from FWCore.GuiBrowsers.ConfigToolBase import *
from PhysicsTools.PatAlgos.tools.coreTools import *
from PhysicsTools.PatAlgos.tools.jetTools import *
from PhysicsTools.PatAlgos.tools.tauToolsTest import *
from FWCore.ParameterSet.Modules import EDProducer
from RecoTauTag.RecoTau.TauDiscriminatorTools import adaptTauDiscriminator, producerIsTauTypeMapper
def warningIsolation():
print "WARNING: particle based isolation must be studied"
class AdaptPFMuons(ConfigToolBase):
"""
"""
_label='AdaptPFMuons'
_defaultParameters={}
def __init__(self):
ConfigToolBase.__init__(self)
self.addParameter(self._defaultParameters,'module',self._defaultValue, '',EDProducer)
self._parameters=copy.deepcopy(self._defaultParameters)
self._comment = ""
def getDefaultParameters(self):
return self._defaultParameters
def dumpPython(self):
dumpPythonImport = "\nfrom PhysicsTools.PatAlgos.tools.pfTools import *\n"
dumpPython=''
if self._comment!="":
dumpPython = '#'+self._comment
dumpPython += "\nadaptPFMuons(process, "
dumpPython += str(self.getvalue('module'))+')'+'\n'
return (dumpPythonImport,dumpPython)
def __call__(self,process,
module=None) :
if module is None:
module=self._defaultParameters['module'].value
self.setParameter('module',module)
self.apply(process)
def toolCode(self, process):
module=self._parameters['module'].value
print "Adapting PF Muons "
print "***************** "
warningIsolation()
print
module.useParticleFlow = True
module.userIsolation = cms.PSet()
module.isoDeposits = cms.PSet(
pfChargedHadrons = cms.InputTag("isoDepMuonWithCharged"),
pfNeutralHadrons = cms.InputTag("isoDepMuonWithNeutral"),
pfPhotons = cms.InputTag("isoDepMuonWithPhotons")
)
module.isolationValues = cms.PSet(
pfChargedHadrons = cms.InputTag("isoValMuonWithCharged"),
pfNeutralHadrons = cms.InputTag("isoValMuonWithNeutral"),
pfPhotons = cms.InputTag("isoValMuonWithPhotons")
)
# matching the pfMuons, not the standard muons.
mcMuons = getattr(process,module.genParticleMatch.moduleLabel)
mcMuons.src = module.pfMuonSource
print " muon source:", module.pfMuonSource
print " isolation :",
print module.isolationValues
print " isodeposits: "
print module.isoDeposits
print
adaptPFMuons=AdaptPFMuons()
class AdaptPFElectrons(ConfigToolBase):
"""
"""
_label='AdaptPFElectrons'
_defaultParameters={}
def __init__(self):
ConfigToolBase.__init__(self)
self.addParameter(self._defaultParameters,'module',self._defaultValue, '',EDProducer)
self.addParameter(self._defaultParameters,'l1Collection',cms.InputTag("patElectrons"), '')
self._parameters=copy.deepcopy(self._defaultParameters)
self._comment = ""
def getDefaultParameters(self):
return self._defaultParameters
def dumpPython(self):
dumpPythonImport = "\nfrom PhysicsTools.PatAlgos.tools.pfTools import *\n"
dumpPython=''
if self._comment!="":
dumpPython = '#'+self._comment
dumpPython += "\nadaptPFElectrons(process, "
dumpPython += str(self.getvalue('module'))+', '
dumpPython += str(self.getvalue('l1Collection'))+')'+'\n'
return (dumpPythonImport,dumpPython)
def __call__(self,process,
module = None,
l1Collection = None) :
if module is None:
module=self._defaultParameters['module'].value
if l1Collection is None:
l1Collection = self._defaultParameters['l1Collection'].value
self.setParameter('module',module)
self.setParameter('l1Collection',l1Collection)
self.apply(process)
def toolCode(self, process):
module=self._parameters['module'].value
l1Collection=self._parameters['l1Collection'].value
# module.useParticleFlow = True
print "Adapting PF Electrons "
print "********************* "
warningIsolation()
print
module.useParticleFlow = True
module.userIsolation = cms.PSet()
module.isoDeposits = cms.PSet(
pfChargedHadrons = cms.InputTag("isoDepElectronWithCharged"),
pfNeutralHadrons = cms.InputTag("isoDepElectronWithNeutral"),
pfPhotons = cms.InputTag("isoDepElectronWithPhotons")
)
module.isolationValues = cms.PSet(
pfChargedHadrons = cms.InputTag("isoValElectronWithCharged"),
pfNeutralHadrons = cms.InputTag("isoValElectronWithNeutral"),
pfPhotons = cms.InputTag("isoValElectronWithPhotons")
)
# COLIN: since we take the egamma momentum for pat Electrons, we must
# match the egamma electron to the gen electrons, and not the PFElectron.
# -> do not uncomment the line below.
# process.electronMatch.src = module.pfElectronSource
# COLIN: how do we depend on this matching choice?
print " PF electron source:", module.pfElectronSource
print " isolation :"
print module.isolationValues
print " isodeposits: "
print module.isoDeposits
print
print "removing traditional isolation"
if (l1Collection.moduleLabel=="patElectrons"):
process.patDefaultSequence.remove(getattr(process, 'patElectronIsolation'))
## print "Temporarily switching off isolation & isoDeposits for PF Electrons"
## module.isolation = cms.PSet()
## module.isoDeposits = cms.PSet()
## print "Temporarily switching off electron ID for PF Electrons"
## module.isolation = cms.PSet()
## module.addElectronID = False
## if module.embedTrack.value():
## module.embedTrack = False
## print "Temporarily switching off electron track embedding"
## if module.embedGsfTrack.value():
## module.embedGsfTrack = False
## print "Temporarily switching off electron gsf track embedding"
## if module.embedSuperCluster.value():
## module.embedSuperCluster = False
## print "Temporarily switching off electron supercluster embedding"
adaptPFElectrons=AdaptPFElectrons()
class AdaptPFPhotons(ConfigToolBase):
"""
"""
_label='AdaptPFPhotons'
_defaultParameters={}
def __init__(self):
ConfigToolBase.__init__(self)
self.addParameter(self._defaultParameters,'module',self._defaultValue,'',EDProducer)
self._parameters=copy.deepcopy(self._defaultParameters)
self._comment = ""
def getDefaultParameters(self):
return self._defaultParameters
def dumpPython(self):
dumpPythonImport = "\nfrom PhysicsTools.PatAlgos.tools.pfTools import *\n"
dumpPython=''
if self._comment!="":
dumpPython = '#'+self._comment
dumpPython += "\nadaptPFPhotons(process, "
dumpPython += str(self.getvalue('module'))+')'+'\n'
return (dumpPythonImport,dumpPython)
def __call__(self,process,module=None) :
if module is None:
module=self._defaultParameters['module'].value
self.setParameter('module',module)
self.apply(process)
def toolCode(self, process):
module=self._parameters['module'].value
raise RuntimeError, "Photons are not supported yet"
adaptPFPhotons=AdaptPFPhotons()
class ReconfigureLayer0Taus(ConfigToolBase):
"""
"""
_label='ReconfigureLayer0Taus'
_defaultParameters={}
def __init__(self):
ConfigToolBase.__init__(self)
self.addParameter(self._defaultParameters,'moduleL0',self._defaultValue, "", Type=cms.EDProducer)
self.addParameter(self._defaultParameters,'tauType','shrinkingConePFTau', "")
self.addParameter(self._defaultParameters,'layer0Selection',["DiscriminationByIsolation", "DiscriminationByLeadingPionPtCut"], "")
self.addParameter(self._defaultParameters,'selectionDependsOn',["DiscriminationByLeadingTrackFinding"], "")
self.addParameter(self._defaultParameters,'producerFromType',lambda producer: producer+"Producer", "")
self._parameters=copy.deepcopy(self._defaultParameters)
self._comment = ""
def getDefaultParameters(self):
return self._defaultParameters
def dumpPython(self):
dumpPythonImport = "\nfrom PhysicsTools.PatAlgos.tools.pfTools import *\n"
dumpPython=''
if self._comment!="":
dumpPython = '#'+self._comment
dumpPython = "\nreconfigureLayer0Taus(process, "
dumpPython += str(self.getvalue('moduleL0'))+")"+", "
dumpPython += "'"+str(self.getvalue('tauType'))+"'"+", "
dumpPython += str(self.getvalue('layer0Selection'))+", "
dumpPython += str(self.getvalue('selectionDependsOn'))+", "
dumpPython += "'"+str(self.getvalue('producerFromType'))+"'"+")"+'\n'
return (dumpPythonImport,dumpPython)
def __call__(self,process,
moduleL0 = None,
tauType = None,
layer0Selection = None,
selectionDependsOn = None,
producerFromType = None):
if moduleL0 is None:
moduleL0=self._defaultParameters['moduleL0'].value
if tauType is None:
tauType=self._defaultParameters['tauType'].value
if layer0Selection is None:
layer0Selection=self._defaultParameters['layer0Selection'].value
if selectionDependsOn is None:
selectionDependsOn=self._defaultParameters['selectionDependsOn'].value
if producerFromType is None:
producerFromType=self._defaultParameters['producerFromType'].value
self.setParameter('moduleL0',moduleL0)
self.setParameter('tauType',tauType)
self.setParameter('layer0Selection',layer0Selection)
self.setParameter('selectionDependsOn',selectionDependsOn)
self.setParameter('producerFromType',producerFromType)
self.apply(process)
def toolCode(self, process):
moduleL0=self._parameters['moduleL0'].value
tauType=self._parameters['tauType'].value
layer0Selection=self._parameters['layer0Selection'].value
selectionDependsOn=self._parameters['selectionDependsOn'].value
producerFromType=self._parameters['producerFromType'].value
print "patTaus will be produced from taus of type: %s that pass %s" \
% (tauType, layer0Selection)
# Get the prototype of tau producer to make, i.e. fixedConePFTauProducer
producerName = producerFromType(tauType)
# Set as the source for the all layer0 taus selector
moduleL0.src = producerName
# Start our layer0 base sequence
process.allLayer0TausBaseSequence = cms.Sequence(getattr(process,
producerName))
# Get our prediscriminants
for predisc in selectionDependsOn:
# Get the prototype
originalName = tauType+predisc # i.e. fixedConePFTauProducerDiscriminationByLeadingTrackFinding
clonedName = "allLayer0TausBase"+predisc
clonedDisc = getattr(process, originalName).clone()
# Register in our process
setattr(process, clonedName, clonedDisc)
process.allLayer0TausBaseSequence += getattr(process, clonedName)
# Adapt this discriminator for the cloned prediscriminators
adaptTauDiscriminator(clonedDisc, newTauProducer="allLayer0TausBase",
newTauTypeMapper=producerIsTauTypeMapper,
preservePFTauProducer=True)
# Reconfigure the layer0 PFTau selector discrimination sources
moduleL0.discriminators = cms.VPSet()
for selection in layer0Selection:
# Get our discriminator that will be used to select layer0Taus
originalName = tauType+selection
clonedName = "allLayer0TausBase"+selection
clonedDisc = getattr(process, originalName).clone()
# Register in our process
setattr(process, clonedName, clonedDisc)
# Adapt our cloned discriminator to the new prediscriminants
adaptTauDiscriminator(clonedDisc, newTauProducer="allLayer0TausBase",
newTauTypeMapper=producerIsTauTypeMapper, preservePFTauProducer=True)
process.allLayer0TausBaseSequence += clonedDisc
# Add this selection to our layer0Tau selectors
moduleL0.discriminators.append(cms.PSet(
discriminator=cms.InputTag(clonedName), selectionCut=cms.double(0.5)))
reconfigureLayer0Taus=ReconfigureLayer0Taus()
class AdaptPFTaus(ConfigToolBase):
"""
"""
_label='AdaptPFTaus'
_defaultParameters={}
def __init__(self):
ConfigToolBase.__init__(self)
self.addParameter(self._defaultParameters,'tauType',"shrinkingConePFTau", '')
self.addParameter(self._defaultParameters,'l0tauColl',cms.InputTag("allLayer0Taus"), '')
self._parameters=copy.deepcopy(self._defaultParameters)
self._comment = ""
def getDefaultParameters(self):
return self._defaultParameters
def dumpPython(self):
dumpPythonImport = "\nfrom PhysicsTools.PatAlgos.tools.pfTools import *\n"
dumpPython=''
if self._comment!="":
dumpPython = '#'+self._comment
dumpPython += "\nadaptPFTaus(process, "
dumpPython += '"'+str(self.getvalue('tauType'))+','
dumpPython += '"'+str(self.getvalue('l0tauColl'))+'"'+')'+'\n'
return (dumpPythonImport,dumpPython)
def __call__(self,process,
tauType = None,
l0tauColl = None) :
if tauType is None:
tauType=self._defaultParameters['tauType'].value
if l0tauColl is None:
l0tauColl=self._defaultParameters['l0tauColl'].value
self.setParameter('tauType',tauType)
self.setParameter('l0tauColl',l0tauColl)
self.apply(process)
def toolCode(self, process):
tauType=self._parameters['tauType'].value
l0tauColl=self._parameters['l0tauColl'].value
moduleL0 = getattr(process,l0tauColl.moduleLabel)
oldTaus = moduleL0.src
# Set up the collection used as a preselection to use this tau type
reconfigureLayer0Taus(process,moduleL0, tauType)
module.tauSource = l0tauColl
redoPFTauDiscriminators(process,
cms.InputTag(tauType+'Producer'),
module.tauSource,
tauType,
l0tauCollection=l0tauColl)
#switchToAnyPFTau(process, oldTaus, process.patTaus.tauSource, tauType)
switchToPFTauByType(process,module, pfTauType=tauType,
pfTauLabelNew=module.tauSource,
pfTauLabelOld=oldTaus)
if (l0tauColl.moduleLabel=="allLayer0Taus"):
process.makePatTaus.remove(process.patPFCandidateIsoDepositSelection)
if (l0tauColl.moduleLabel=="pfLayer0Taus"):
process.PF2PAT.remove(process.patPFCandidateIsoDepositSelection)
adaptPFTaus=AdaptPFTaus()
class TauTypeInPF2PAT(ConfigToolBase):
""" Helper for PAT on PF2PAT sample
"""
_label='TauTypeInPF2PAT'
_defaultParameters={}
def __init__(self):
ConfigToolBase.__init__(self)
self.addParameter(self._defaultParameters,'tauType','shrinkingConePFTau', "")
self._parameters=copy.deepcopy(self._defaultParameters)
self._comment = ""
def getDefaultParameters(self):
return self._defaultParameters
def dumpPython(self):
dumpPythonImport = "\nfrom PhysicsTools.PatAlgos.tools.pfTools import *\n"
dumpPython=''
if self._comment!="":
dumpPython = '#'+self._comment
dumpPython = "\ntauTypeInPF2PAT(process, "
dumpPython += "'"+str(self.getvalue('tauType'))+"'"+")"+'\n'
return (dumpPythonImport,dumpPython)
def __call__(self,process,
tauType = None):
if tauType is None:
tauType=self._defaultParameters['tauType'].value
self.setParameter('tauType',tauType)
self.apply(process)
def toolCode(self, process):
tauType=self._parameters['tauType'].value
process.load("PhysicsTools.PFCandProducer.pfTaus_cff")
process.allLayer0Taus.src = cms.InputTag(tauType+'Producer')
tauTypeInPF2PAT=TauTypeInPF2PAT()
class AddPFCandidates(ConfigToolBase):
"""
"""
_label='AddPFCandidates'
_defaultParameters={}
def __init__(self):
ConfigToolBase.__init__(self)
self.addParameter(self._defaultParameters,'src',self._defaultValue, '', cms.InputTag)
self.addParameter(self._defaultParameters,'patLabel','PFParticles', '')
self.addParameter(self._defaultParameters,'cut',"", '')
self.addParameter(self._defaultParameters,'layer',"pat", '')
self.addParameter(self._defaultParameters,'selected',"selectedPat", '')
self.addParameter(self._defaultParameters,'counted',"countPat", '')
self._parameters=copy.deepcopy(self._defaultParameters)
self._comment = ""
def getDefaultParameters(self):
return self._defaultParameters
def dumpPython(self):
dumpPythonImport = "\nfrom PhysicsTools.PatAlgos.tools.pfTools import *\n"
dumpPython=''
if self._comment!="":
dumpPython = '#'+self._comment
dumpPython += "\naddPFCandidates(process, "
dumpPython += str(self.getvalue('src'))+ ", "
dumpPython += '"'+str(self.getvalue('patLabel'))+'"'+', '
dumpPython += '"'+str(self.getvalue('cut'))+'"'+', '
dumpPython += '"'+str(self.getvalue('layer'))+'"'+', '
dumpPython += '"'+str(self.getvalue('selected'))+'"'+', '
dumpPython += '"'+str(self.getvalue('counted'))+'"'+')'+'\n'
return (dumpPythonImport,dumpPython)
def __call__(self,process,
src = None,
patLabel = None,
cut = None,
layer = None,
selected = None,
counted = None) :
if src is None:
src=self._defaultParameters['src'].value
if patLabel is None:
patLabel=self._defaultParameters['patLabel'].value
if cut is None:
cut=self._defaultParameters['cut'].value
if layer is None:
layer=self._defaultParameters['layer'].value
if selected is None:
selected=self._defaultParameters['selected'].value
if counted is None:
counted=self._defaultParameters['counted'].value
self.setParameter('src',src)
self.setParameter('patLabel',patLabel)
self.setParameter('cut',cut)
self.setParameter('layer',layer)
self.setParameter('selected',selected)
self.setParameter('counted',counted)
self.apply(process)
def toolCode(self, process):
src=self._parameters['src'].value
patLabel =self._parameters['patLabel'].value
cut=self._parameters['cut'].value
layer=self._parameters['layer'].value
selected=self._parameters['selected'].value
counted=self._parameters['counted'].value
from PhysicsTools.PatAlgos.producersLayer1.pfParticleProducer_cfi import patPFParticles
# make modules
producer = patPFParticles.clone(pfCandidateSource = src)
filter = cms.EDFilter("PATPFParticleSelector",
src = cms.InputTag(layer + patLabel),
cut = cms.string(cut))
counter = cms.EDFilter("PATCandViewCountFilter",
minNumber = cms.uint32(0),
maxNumber = cms.uint32(999999),
src = cms.InputTag(selected + patLabel))
# add modules to process
setattr(process, layer + patLabel, producer)
setattr(process, selected + patLabel, filter)
setattr(process, counted + patLabel, counter)
# insert into sequence
if (layer=='pat'):
process.patCandidates.replace(process.patCandidateSummary, producer+process.patCandidateSummary)
process.selectedPatCandidates.replace(process.selectedPatCandidateSummary, filter + process.selectedPatCandidateSummary)
process.countPatCandidates += counter
# summary tables
process.patCandidateSummary.candidates.append(cms.InputTag('pat' + patLabel))
process.selectedPatCandidateSummary.candidates.append(cms.InputTag('selectedPat' + patLabel))
if (layer=='pfPat'):
process.pfPatCandidates.replace(process.pfPatCandidateSummary, producer + process.pfPatCandidateSummary)
process.pfSelectedPatCandidates.replace(process.pfSelectedPatCandidateSummary, filter + process.pfSelectedPatCandidateSummary)
process.pfCountPatCandidates += counter
addPFCandidates= AddPFCandidates()
class SwitchToPFMET(ConfigToolBase):
"""
"""
_label='SwitchToPFMET'
_defaultParameters={}
def __init__(self):
ConfigToolBase.__init__(self)
self.addParameter(self._defaultParameters,'input',cms.InputTag('pfMET'), '')
self.addParameter(self._defaultParameters,'metColl',cms.InputTag('patAK5CaloMETs'), '')
self._parameters=copy.deepcopy(self._defaultParameters)
self._comment = ""
def getDefaultParameters(self):
return self._defaultParameters
def dumpPython(self):
dumpPythonImport = "\nfrom PhysicsTools.PatAlgos.tools.pfTools import *\n"
dumpPython=''
if self._comment!="":
dumpPython = '#'+self._comment
dumpPython += "\nswitchToPFMET(process, "
dumpPython += str(self.getvalue('input'))+','
dumpPython += str(self.getvalue('metColl'))+')'+'\n'
return (dumpPythonImport,dumpPython)
def __call__(self,process,input=None) :
if input is None:
input=self._defaultParameters['input'].value
if metColl is None:
metColl = self._defaultParameters['metColl'].value
self.setParameter('input',input)
self.setParameter('metColl',metColl)
self.apply(process)
def toolCode(self, process):
input=self._parameters['input'].value
metColl=self._parameters['metColl'].value
print 'MET: using ', input
module = getattr(process,metColl.moduleLabel)
oldMETSource = module.metSource
module.metSource = input
module.addMuonCorrections = False
if (metColl.moduleLabel=='patAK5CaloMETs'):
process.patDefaultSequence.remove(process.patMETCorrections)
switchToPFMET=SwitchToPFMET()
class SwitchToPFJets(ConfigToolBase):
"""
"""
_label='SwitchToPFJets'
_defaultParameters={}
def __init__(self):
ConfigToolBase.__init__(self)
self.addParameter(self._defaultParameters,'input',cms.InputTag('pfNoTau'), '')
self.addParameter(self._defaultParameters,'algo','IC5', '')
self.addParameter(self._defaultParameters,'l1jetColl',cms.InputTag(jetCollectionString()), '')
self._parameters=copy.deepcopy(self._defaultParameters)
self._comment = ""
def getDefaultParameters(self):
return self._defaultParameters
def dumpPython(self):
dumpPythonImport = "\nfrom PhysicsTools.PatAlgos.tools.pfTools import *\n"
dumpPython=''
if self._comment!="":
dumpPython = '#'+self._comment
dumpPython += "\nswitchToPFJets(process, "
dumpPython += str(self.getvalue('input'))+','
dumpPython += str(self.getvalue('algo'))+','
dumpPython += str(self.getvalue('l1jetColl'))+')'+'\n'
return (dumpPythonImport,dumpPython)
def __call__(self,process,input=None) :
if input is None:
input=self._defaultParameters['input'].value
if algo is None:
algo=self._defaultParameters['algo'].value
if l1jetColl is None:
l1jetColl=self._defaultParameters['l1jetColl'].value
self.setParameter('input',input)
self.setParameter('algo',algo)
self.setParameter('l1jetColl',l1jetColl)
self.apply(process)
def toolCode(self, process):
input=self._parameters['input'].value
algo=self._parameters['algo'].value
l1jetColl=self._parameters['l1jetColl'].value
print "Switching to PFJets, ", algo
print "************************ "
if( algo == 'IC5' ):
genJetCollectionName = 'iterativeCone5GenJetsNoNu'
elif algo == 'AK5':
genJetCollectionName = 'ak5GenJetsNoNu'
else:
print 'bad jet algorithm:', algo, '! for now, only IC5 and AK5 are allowed. If you need other algorithms, please contact Colin'
sys.exit(1)
# changing the jet collection in PF2PAT:
from PhysicsTools.PFCandProducer.Tools.jetTools import jetAlgo
process.allPfJets = jetAlgo( algo );
switchJetCollection(process,
input,
algo,
'PFlow',
doJTA=True,
doBTagging=True,
jetCorrLabel=( algo, 'PF' ),
doType1MET=False,
doL1Cleaning = False,
doL1Counters = False,
genJetCollection = genJetCollectionName,
doJetID =True
)
l1jets = getattr(process,l1jetColl.moduleLabel)
l1jets.embedCaloTowers = False
# l1jets.embedPFCandidates = True
switchToPFJets=SwitchToPFJets()
class UsePF2PAT(ConfigToolBase):
# PLEASE DO NOT CLOBBER THIS FUNCTION WITH CODE SPECIFIC TO A GIVEN PHYSICS OBJECT.
# CREATE ADDITIONAL FUNCTIONS IF NEEDED.
"""Switch PAT to use PF2PAT instead of AOD sources. if 'runPF2PAT' is true, we'll also add PF2PAT in front of the PAT sequence
"""
_label='UsePF2PAT'
_defaultParameters={}
def __init__(self):
ConfigToolBase.__init__(self)
self.addParameter(self._defaultParameters,'runPF2PAT',True, '')
self.addParameter(self._defaultParameters,'jetAlgo','IC5', '')
self._parameters=copy.deepcopy(self._defaultParameters)
self._comment = ""
def getDefaultParameters(self):
return self._defaultParameters
def dumpPython(self):
dumpPythonImport = "\nfrom PhysicsTools.PatAlgos.tools.pfTools import *\n"
dumpPython=''
if self._comment!="":
dumpPython = '#'+self._comment
dumpPython += "\nusePF2PAT(process, "
dumpPython += str(self.getvalue('runPF2PAT'))+','
dumpPython += '"'+str(self.getvalue('jetAlgo'))+ '"'+')'+'\n'
return (dumpPythonImport,dumpPython)
def __call__(self,process,
runPF2PAT = None,
jetAlgo = None) :
if runPF2PAT is None:
runPF2PAT=self._defaultParameters['runPF2PAT'].value
if jetAlgo is None:
jetAlgo=self._defaultParameters['jetAlgo'].value
self.setParameter('runPF2PAT',runPF2PAT)
self.setParameter('jetAlgo',jetAlgo)
self.apply(process)
def toolCode(self, process):
runPF2PAT=self._parameters['runPF2PAT'].value
jetAlgo=self._parameters['jetAlgo'].value
# -------- CORE ---------------
if runPF2PAT:
process.load("PhysicsTools.PFCandProducer.PF2PAT_cff")
#process.dump = cms.EDAnalyzer("EventContentAnalyzer")
process.patDefaultSequence.replace(process.patCandidates, process.PF2PAT+process.patCandidates)
removeCleaning(process)
# -------- OBJECTS ------------
# Muons
adaptPFMuons(process,process.patMuons)
# Electrons
adaptPFElectrons(process,process.patElectrons)
# Photons
print "Temporarily switching off photons completely"
removeSpecificPATObjects(process,['Photons'])
process.patDefaultSequence.remove(process.patPhotonIsolation)
# Jets
switchToPFJets( process, cms.InputTag('pfNoTau'), jetAlgo )
# Taus
#adaptPFTaus( process ) #default (i.e. shrinkingConePFTau)
adaptPFTaus( process,process.patTaus, tauType='fixedConePFTau' )
# MET
switchToPFMET(process, cms.InputTag('pfMET'))
# Unmasked PFCandidates
addPFCandidates(process,cms.InputTag('pfNoJet'),patLabel='PFParticles',cut="")
usePF2PAT=UsePF2PAT()
class UsePATandPF2PAT(ConfigToolBase):
# PLEASE DO NOT CLOBBER THIS FUNCTION WITH CODE SPECIFIC TO A GIVEN PHYSICS OBJECT.
# CREATE ADDITIONAL FUNCTIONS IF NEEDED.
"""
"""
_label= 'UsePATandPF2PAT'
_defaultParameters={}
def __init__(self):
ConfigToolBase.__init__(self)
self.addParameter(self._defaultParameters,'runPATandPF2PAT',True, '')
self.addParameter(self._defaultParameters,'jetAlgo','IC5', '')
self._parameters=copy.deepcopy(self._defaultParameters)
self._comment = ""
def getDefaultParameters(self):
return self._defaultParameters
def dumpPython(self):
dumpPythonImport = "\nfrom PhysicsTools.PatAlgos.tools.pfTools import *\n"
dumpPython=''
if self._comment!="":
dumpPython = '#'+self._comment
dumpPython += "\nusePATandPF2PAT(process, "
dumpPython += str(self.getvalue('runPATandPF2PAT'))+','
dumpPython += '"'+str(self.getvalue('jetAlgo'))+'"'+')'+'\n'
return (dumpPythonImport,dumpPython)
def __call__(self,process,
runPATandPF2PAT = None,
jetAlgo = None) :
if runPATandPF2PAT is None:
runPATandPF2PAT=self._defaultParameters['runPATandPF2PAT'].value
if jetAlgo is None:
jetAlgo=self._defaultParameters['jetAlgo'].value
self.setParameter('runPATandPF2PAT',runPATandPF2PAT)
self.setParameter('jetAlgo',jetAlgo)
self.apply(process)
def toolCode(self, process):
runPATandPF2PAT=self._parameters['runPATandPF2PAT'].value
jetAlgo=self._parameters['jetAlgo'].value
if runPATandPF2PAT:
process.load("PhysicsTools.PFCandProducer.PF2PAT_cff")
#LAYER1
# #ELECTRONS
import PhysicsTools.PatAlgos.producersLayer1.electronProducer_cfi
process.pfPatElectrons=PhysicsTools.PatAlgos.producersLayer1.electronProducer_cfi.patElectrons.clone()
adaptPFElectrons(process,process.pfPatElectrons, cms.InputTag("pfPatElectrons"))
# #MUONS
import PhysicsTools.PatAlgos.producersLayer1.muonProducer_cfi
process.pfPatMuons=PhysicsTools.PatAlgos.producersLayer1.muonProducer_cfi.patMuons.clone()
import PhysicsTools.PatAlgos.mcMatchLayer0.muonMatch_cfi
process.pfMuonMatch=PhysicsTools.PatAlgos.mcMatchLayer0.muonMatch_cfi.muonMatch.clone()
process.pfPatMuons.genParticleMatch=cms.InputTag("pfMuonMatch")
adaptPFMuons(process,process.pfPatMuons)
# #TAUS
from PhysicsTools.PFCandProducer.pfTaus_cff import allLayer0Taus
process.pfLayer0Taus=allLayer0Taus.clone()
process.pfTauSequence.replace(process.allLayer0Taus,
process.pfLayer0Taus)
process.pfNoTau.topCollection=cms.InputTag("pfLayer0Taus")
import PhysicsTools.PatAlgos.producersLayer1.tauProducer_cfi
process.pfPatTaus=PhysicsTools.PatAlgos.producersLayer1.tauProducer_cfi.patTaus.clone()
import PhysicsTools.PatAlgos.mcMatchLayer0.tauMatch_cfi
process.pfTauMatch =PhysicsTools.PatAlgos.mcMatchLayer0.tauMatch_cfi.tauMatch.clone()
import PhysicsTools.PatAlgos.mcMatchLayer0.tauMatch_cfi
process.pfTauGenJetMatch =PhysicsTools.PatAlgos.mcMatchLayer0.tauMatch_cfi.tauGenJetMatch.clone()
from PhysicsTools.PatAlgos.recoLayer0.tauIsolation_cff import *
process.pfTauIsoDepositPFCandidates=PhysicsTools.PatAlgos.recoLayer0.tauIsolation_cff.tauIsoDepositPFCandidates.clone()
process.pfTauIsoDepositPFChargedHadrons = PhysicsTools.PatAlgos.recoLayer0.tauIsolation_cff.tauIsoDepositPFChargedHadrons.clone()
process.pfTauIsoDepositPFNeutralHadrons = PhysicsTools.PatAlgos.recoLayer0.tauIsolation_cff.tauIsoDepositPFNeutralHadrons.clone()
process.pfTauIsoDepositPFGammas = PhysicsTools.PatAlgos.recoLayer0.tauIsolation_cff.tauIsoDepositPFGammas.clone()
process.pfPatTaus.isoDeposits.pfAllParticles = cms.InputTag("pfTauIsoDepositPFCandidates")
process.pfPatTaus.isoDeposits.pfChargedHadron = cms.InputTag("pfTauIsoDepositPFChargedHadrons")
process.pfPatTaus.isoDeposits.pfNeutralHadron = cms.InputTag("pfTauIsoDepositPFNeutralHadrons")
process.pfPatTaus.isoDeposits.pfGamma = cms.InputTag("pfTauIsoDepositPFGammas")
process.pfPatTaus.userIsolation.pfAllParticles.src = cms.InputTag("pfTauIsoDepositPFCandidates")
process.pfPatTaus.userIsolation.pfChargedHadron.src = cms.InputTag("pfTauIsoDepositPFChargedHadrons")
process.pfPatTaus.userIsolation.pfNeutralHadron.src = cms.InputTag("pfTauIsoDepositPFNeutralHadrons")
process.pfPatTaus.userIsolation.pfGamma.src = cms.InputTag("pfTauIsoDepositPFGammas")
process.pfPatTaus.genParticleMatch = cms.InputTag("pfTauMatch")
process.pfPatTaus.genJetMatch = cms.InputTag("pfTauGenJetMatch")
adaptPFTaus( process,process.pfPatTaus,tauType='fixedConePFTau', l0tauColl=cms.InputTag("pfLayer0Taus"))
# #JETS
import PhysicsTools.PatAlgos.producersLayer1.jetProducer_cfi
process.pfPatJets=PhysicsTools.PatAlgos.producersLayer1.jetProducer_cfi.patAK5CaloJets.clone()
import PhysicsTools.PatAlgos.mcMatchLayer0.jetMatch_cfi
process.pfJetPartonMatch = PhysicsTools.PatAlgos.mcMatchLayer0.jetMatch_cfi.jetPartonMatch.clone()
process.pfJetGenJetMatch = PhysicsTools.PatAlgos.mcMatchLayer0.jetMatch_cfi.jetGenJetMatch.clone()
process.pfPatJets.genPartonMatch = cms.InputTag("pfJetPartonMatch")
process.pfPatJets.genJetMatch = cms.InputTag("pfJetGenJetMatch")
import PhysicsTools.PatAlgos.mcMatchLayer0.jetFlavourId_cff
process.pfJetPartonAssociation = PhysicsTools.PatAlgos.mcMatchLayer0.jetFlavourId_cff.jetPartonAssociation.clone()
process.pfJetFlavourAssociation = PhysicsTools.PatAlgos.mcMatchLayer0.jetFlavourId_cff.jetFlavourAssociation.clone()
process.pfJetFlavourAssociation.srcByReference = cms.InputTag("pfJetPartonAssociation")
process.pfPatJets.JetPartonMapSource = cms.InputTag("pfJetFlavourAssociation")
import PhysicsTools.PatAlgos.recoLayer0.jetCorrFactors_cfi
process.pfJetCorrFactors=PhysicsTools.PatAlgos.recoLayer0.jetCorrFactors_cfi.jetCorrFactors.clone()
process.pfPatJets.jetCorrFactorsSource = cms.VInputTag(cms.InputTag("pfJetCorrFactors") )
import RecoJets.JetAssociationProducers.ak5JTA_cff
process.jetTracksAssociatorAtVertexPF = RecoJets.JetAssociationProducers.ak5JTA_cff.ak5JetTracksAssociatorAtVertex.clone()
import PhysicsTools.PatAlgos.recoLayer0.jetTracksCharge_cff
process.pfJetCharge = PhysicsTools.PatAlgos.recoLayer0.jetTracksCharge_cff.patJetCharge.clone()
process.pfJetCharge.src = cms.InputTag("jetTracksAssociatorAtVertexPF")
process.pfPatJets.trackAssociationSource = cms.InputTag("jetTracksAssociatorAtVertexPF")
process.pfPatJets.jetChargeSource = cms.InputTag("pfJetCharge")
# #MET
import PhysicsTools.PatAlgos.producersLayer1.metProducer_cfi
process.pfPatMETs = PhysicsTools.PatAlgos.producersLayer1.metProducer_cfi.patMETs.clone()
switchToPFMET(process, cms.InputTag('pfMET'),metColl=cms.InputTag('pfPatMETs'))
# #SUMMARY
process.pfPatCandidateSummary = cms.EDAnalyzer("CandidateSummaryTable",
logName = cms.untracked.string("pfPatCandidates|PATSummaryTables"),
candidates = cms.VInputTag(cms.InputTag("pfPatElectrons"),
cms.InputTag("pfPatMuons"),
cms.InputTag("pfPatTaus"),
cms.InputTag("pfPatPhotons"),
cms.InputTag("pfPatJets"),
cms.InputTag("pfPatMETs")
)
)
# #SEQUENCE
process.makepflayer1Muons=cms.Sequence(process.pfMuonMatch+
process.pfPatMuons)
process.makepflayer1Taus=cms.Sequence(process.pfTauMatch+
process.pfTauGenJetMatch+
process.pfTauIsoDepositPFCandidates+
process.pfTauIsoDepositPFChargedHadrons+
process.pfTauIsoDepositPFNeutralHadrons+
process.pfTauIsoDepositPFGammas+
process.pfPatTaus)
process.makepflayer1Jets=cms.Sequence(process.pfJetPartonMatch+
process.pfJetGenJetMatch+
process.pfJetPartonAssociation+
process.pfJetFlavourAssociation+
process.pfJetCorrFactors+
process.jetTracksAssociatorAtVertexPF+
process.pfJetCharge+
process.pfPatJets)
switchToPFJets( process,cms.InputTag('pfNoTau'), jetAlgo, l1jetColl = cms.InputTag("pfPatJets") )
process.pfPatCandidates=cms.Sequence(process.pfPatElectrons+
process.makepflayer1Muons+
process.makepflayer1Taus+
process.makepflayer1Jets+
process.pfPatMETs+
process.pfPatCandidateSummary)
#SELECTED
# #ELECTRONS
import PhysicsTools.PatAlgos.selectionLayer1.electronSelector_cfi
process.pfSelectedPatElectrons = PhysicsTools.PatAlgos.selectionLayer1.electronSelector_cfi.selectedPatElectrons.clone()
process.pfSelectedPatElectrons.src=cms.InputTag("pfPatElectrons")
# #MUONS
import PhysicsTools.PatAlgos.selectionLayer1.muonSelector_cfi
process.pfSelectedPatMuons = PhysicsTools.PatAlgos.selectionLayer1.muonSelector_cfi.selectedPatMuons.clone()
process.pfSelectedPatMuons.src=cms.InputTag("pfPatMuons")
# #TAUS
import PhysicsTools.PatAlgos.selectionLayer1.tauSelector_cfi
process.pfSelectedPatTaus = PhysicsTools.PatAlgos.selectionLayer1.tauSelector_cfi.selectedPatTaus.clone()
process.pfSelectedPatTaus.src=cms.InputTag("pfPatTaus")
# #JETS
import PhysicsTools.PatAlgos.selectionLayer1.jetSelector_cfi
process.pfSelectedPatJets = PhysicsTools.PatAlgos.selectionLayer1.jetSelector_cfi.selectedPatJets.clone()
process.pfSelectedPatJets.src=cms.InputTag("pfPatJets")
# #SUMMARY
process.pfSelectedPatCandidateSummary = cms.EDAnalyzer("CandidateSummaryTable",
logName = cms.untracked.string("pfSelectedPatCandidates|PATSummaryTables"),
candidates = cms.VInputTag(cms.InputTag("pfSelectedPatElectrons"),
cms.InputTag("pfSelectedPatMuons"),
cms.InputTag("pfSelectedPatTaus"),
cms.InputTag("pfSelectedPatPhotons"),
cms.InputTag("pfSelectedPatJets"),
cms.InputTag("pfPatMETs")
)
)
# #SEQUENCE
process.pfSelectedPatCandidates=cms.Sequence(process.pfSelectedPatElectrons+
process.pfSelectedPatMuons+
process.pfSelectedPatTaus+
process.pfSelectedPatJets+
process.pfSelectedPatCandidateSummary
)
#COUNT
# #ELECTRONS
import PhysicsTools.PatAlgos.selectionLayer1.electronCountFilter_cfi
process.pfCountPatElectrons = PhysicsTools.PatAlgos.selectionLayer1.electronCountFilter_cfi.countPatElectrons.clone()
process.pfCountPatElectrons.src=cms.InputTag("pfSelectedPatElectrons")
# #MUONS
import PhysicsTools.PatAlgos.selectionLayer1.muonCountFilter_cfi
process.pfCountPatMuons = PhysicsTools.PatAlgos.selectionLayer1.muonCountFilter_cfi.countPatMuons.clone()
process.pfCountPatMuons.src=cms.InputTag("pfSelectedPatMuons")
# #TAUS
import PhysicsTools.PatAlgos.selectionLayer1.tauCountFilter_cfi
process.pfCountPatTaus = PhysicsTools.PatAlgos.selectionLayer1.tauCountFilter_cfi.countPatTaus.clone()
process.pfCountPatTaus.src=cms.InputTag("pfSelectedPatTaus")
# #JETS
import PhysicsTools.PatAlgos.selectionLayer1.jetCountFilter_cfi
process.pfCountPatJets = PhysicsTools.PatAlgos.selectionLayer1.jetCountFilter_cfi.countPatJets.clone()
process.pfCountPatJets.src=cms.InputTag("pfSelectedPatJets")
# #SEQUENCE
process.pfCountPatCandidates=cms.Sequence(process.pfCountPatElectrons+
process.pfCountPatMuons+
process.pfCountPatTaus+
process.pfCountPatJets
)
#FINAL SEQUENCE
addPFCandidates(process,cms.InputTag('pfNoJet'),patLabel='PFParticles',cut="",
layer='pfPat',selected='pfselectedPat',
counted='pfcountPat')
process.PFPATafterPAT =cms.Sequence(process.PF2PAT+
process.pfPatCandidates+
process.pfSelectedPatCandidates+
process.pfCountPatCandidates
)
usePATandPF2PAT=UsePATandPF2PAT()
class RemoveMCDependencedorPF(ConfigToolBase):
""" Remove MC dependence
"""
_label='RemoveMCDependencedorPF'
_defaultParameters={}
def __init__(self):
ConfigToolBase.__init__(self)
self._parameters=copy.deepcopy(self._defaultParameters)
self._comment = ""
def getDefaultParameters(self):
return self._defaultParameters
def dumpPython(self):
dumpPythonImport = "\nfrom PhysicsTools.PatAlgos.tools.pfTools import *\n"
dumpPython=''
if self._comment!="":
dumpPython = '#'+self._comment
dumpPython = "\nremoveMCDependencedorPF(process)\n "
return (dumpPythonImport,dumpPython)
def __call__(self,process):
self.apply(process)
def toolCode(self, process):
#-- Remove MC dependence ------------------------------------------------------
from PhysicsTools.PatAlgos.tools.coreTools import removeMCMatching
process.patDefaultSequence.remove(process.genParticlesForMETAllVisible)
process.patDefaultSequence.remove(process.genMetTrue)
process.patDefaultSequence.remove(process.genParticlesForJets)
process.patDefaultSequence.remove(process.ak5GenJetsNoNu)
process.patDefaultSequence.remove(process.iterativeCone5GenJetsNoNu)
removeMCMatching(process, ['PFAll'])
removeMCDependencedorPF=RemoveMCDependencedorPF()
| [
""
] | |
acdde6bea347782546092d5b1f26a80f59365c61 | aaa2981048906526ca701513a5e8fccfa6a27e35 | /ACM-ICPC_6494_MissingPages.py | b8dbb03bacb16e8ce91f6560304ad044ff4a0ace | [
"MIT"
] | permissive | shogo54/uva-online-judge | 980f4b2b56171ef68b44f3acf51eded87697a6e2 | c7209ac66bbb5a9624ba44cc6053b8712f20f455 | refs/heads/master | 2020-03-11T06:42:45.701457 | 2020-01-16T20:28:33 | 2020-01-16T20:28:33 | 129,837,477 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 610 | py | '''
ACM-ICPC 6494 Missing Pages
Accepted by OJ
'''
import sys
for line in sys.stdin:
line = line.strip()
if line == '0':
break
nums = line.split()
paper = int(nums[0])
num = int(nums[1])
pages = []
if num % 2 == 0:
pages.append(num - 1)
pages.append(paper - num + 1)
pages.append(paper - num + 2)
else:
pages.append(num + 1)
pages.append(paper - num)
pages.append(paper - num + 1)
pages = sorted(pages)
result = ''
for i in range(len(pages)):
result += str(pages[i]) + ' '
print(result.strip())
| [
"sakiyama@knox.edu"
] | sakiyama@knox.edu |
2650068ef90d42f1538af59e3287f65ee6debe61 | 48d331090e0d51419d0e358d74916dd080e02ef8 | /bandit/lib/LinPHE.py | d0ffe17cba2adc33bbce2ef8b237524f3659dfd7 | [] | no_license | JCK-1096/Bandit-and-Reinforcement-Learning | e20e00981eec9ef23651f5f7f38333755429554c | d70ec825f50c1706a663164ab2be41a3ec0c0c4c | refs/heads/main | 2023-08-14T10:40:17.228745 | 2021-10-02T22:53:06 | 2021-10-02T22:53:06 | 469,805,716 | 2 | 0 | null | 2022-03-14T16:01:45 | 2022-03-14T16:01:44 | null | UTF-8 | Python | false | false | 2,898 | py | """
Created on Oct 13, 2020
Author: Jiayi Chen
"""
import numpy as np
import sys
class LinPHE_Struct:
def __init__(self, featureDimension, lambda_, noiseScale):
self.d = featureDimension
self.a = 2 # integer tunable scale a > 0
self.lambda_ = lambda_ # regularization
self.NoiseScale = noiseScale # variance of pseudo rewards
self.Gt = self.lambda_ * (self.a + 1) * np.identity(n=self.d) # G_0
self.UserTheta_t = np.zeros(self.d)
self.history = {
'X':[],
'Y':[]
}
self.time = 0
def updateParameters(self, x_At, Yt):
self.history['X'].append(x_At)
self.history['Y'].append(Yt)
self.time += 1
if self.time > self.d:
# update G_t
self.Gt = self.Gt + (self.a + 1) * np.outer(x_At, x_At)
# generate pseudo rewards, and calculate X_l[Y_l + sum_j{Zjl}]
tmp = np.zeros(self.time)
PseudoRewards = np.random.normal(0, self.NoiseScale, int(self.time * self.a))
for l in range(self.time):
PseudoRewards_l = PseudoRewards[ l*self.a : (l+1)*self.a ]
sum_PseudoRewards_l = np.sum(PseudoRewards_l) # sum_j Z_lj
# calculate X_l [Y_l + sum Z]
tmp[l] = self.history['X'][l] * (self.history['Y'][l] + sum_PseudoRewards_l)
# update theta_t
self.UserTheta_t = np.dot( np.linalg.inv(self.Gt), np.sum(tmp) )
else:
# update Gt
self.Gt = self.Gt + (self.a + 1) * np.outer(x_At, x_At)
def getTheta(self):
return self.UserTheta_t
def getA(self):
return self.A
def decide(self, pool_articles):
if self.time > self.d:
max = float('-inf')
articlePicked = None
for article in pool_articles:
estimation = np.dot(self.UserTheta_t, article.featureVector)
if max < estimation:
articlePicked = article
max = estimation
return articlePicked
else:
return pool_articles[self.d - self.time] # select arm (K-t+1)
class LinPHE:
def __init__(self, dimension, lambda_, noise):
self.users = {}
self.dimension = dimension
self.lambda_ = lambda_
self.noiseScale =noise
self.CanEstimateUserPreference = True
def decide(self, pool_articles, userID):
if userID not in self.users:
self.users[userID] = LinPHE_Struct(self.dimension, self.lambda_, self.noiseScale)
return self.users[userID].decide(pool_articles)
def updateParameters(self, articlePicked, click, userID):
self.users[userID].updateParameters(articlePicked.featureVector[:self.dimension], click)
def getTheta(self, userID):
return self.users[userID].UserTheta_t
| [
"chenjiayi@JiayideMBP.local"
] | chenjiayi@JiayideMBP.local |
d5340e2f4d9bc41d172ffd092e624ec156c598ed | 3c54a766a8e69041c29519aee490490f7342fd99 | /blacklistcheck.py | 456a861b32a1b454cede9936377cefe00d2d5c02 | [] | no_license | rahul-rajm/Email-blacklist-mangement-app | 319e77c6f2f928ce7cc9a374b753d4e96f39acce | e7a8da7edb243f4587ef8cd90e3297319ee7fa7f | refs/heads/master | 2020-04-05T00:12:35.940132 | 2018-11-06T13:40:13 | 2018-11-06T13:40:13 | 156,387,757 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,506 | py | #/usr/bin/env python
#Individual func to check blacklist
import requests
WORKFLOW_INFO = 'XXX'
MAILGUN_INFO_1 = 'XXX'
MAILGUN_INFO_2 = 'XXX'
WORKFLOW_HEADERS = {'Authorization': 'Basic XXX'}
WORKFLOW_URL_1 = 'XXX'
MAILGUN_URL_1 = 'XXX'
MAILGUN_URL_2 = 'XXX'
MAILGUN_HEADERS = {'Authorization': 'Basic XXX'}
def provider_check(INFO):
if INFO == WORKFLOW_INFO:
return 1
elif INFO == MAILGUN_INFO_1:
return 2
elif INFO == MAILGUN_INFO_2:
return 3
def email_blacklist_check(INFO, STATUS_CODE):
print "Checking in %s." % INFO
if STATUS_CODE == 200:
print "Blacklisted on %s" % (INFO)
return provider_check(INFO)
elif STATUS_CODE == 404:
print "Not blacklisted"
return 0
else:
print "Response received is: ", STATUS_CODE
def http_check(INFO, URL, HEADERS):
try:
response = requests.get(URL, headers=HEADERS, timeout=15)
STATUS_CODE = response.status_code
return email_blacklist_check(INFO, STATUS_CODE)
except Exception as error:
return "Unable to resolve the URL, got the error %s" % error
def workflow_b(email):
CHECK_URL = WORKFLOW_URL_1 + email
return http_check(WORKFLOW_INFO, CHECK_URL, WORKFLOW_HEADERS )
def mailgun_b(email):
CHECK_URL = MAILGUN_URL_1 + email
return http_check(MAILGUN_INFO_1, CHECK_URL, MAILGUN_HEADERS )
def mailgun_c(email):
CHECK_URL = MAILGUN_URL_2 + email
return http_check(MAILGUN_INFO_2, CHECK_URL, MAILGUN_HEADERS )
| [
"noreply@github.com"
] | rahul-rajm.noreply@github.com |
ac733cf1f4551f7017adcfd6914d84fd884293f4 | 9ce0413314ebc5a75252b3d35933f8caeb4a695e | /๊ตฌํ/์คํ์ฑํ
๋ฐฉ.py | 2d7971f85d90c66cf2d87e0661f9a9cc900017e1 | [] | no_license | didwns7347/algotest | 32ed9e0e06be8a7450ab02daf4bddaee44def4f6 | 67d763dd2d48093d9b98d350ff98fbc9ab5ef8df | refs/heads/master | 2023-04-06T05:59:26.111374 | 2021-04-21T05:11:30 | 2021-04-21T05:11:30 | 263,583,095 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 719 | py | def solution(record):
answer = []
id_dic={}
for s in record:
s=s.split()
if s[0]=="Enter":
id_dic[s[1]]=s[2]
elif s[0]=="Leave":
continue
else:
id_dic[s[1]]=s[2]
for s in record:
s=s.split()
if s[0]=="Enter":
text=id_dic[s[1]]+"๋์ด ๋ค์ด์์ต๋๋ค."
answer.append(text)
elif s[0]=="Leave":
text=id_dic[s[1]]+"๋์ด ๋๊ฐ์ต๋๋ค."
answer.append(text)
else:
continue
return answer
r=["Enter uid1234 Muzi", "Enter uid4567 Prodo","Leave uid1234","Enter uid1234 Prodo","Change uid4567 Ryan"]
a= solution(r)
for s in a:
print(s)
| [
"didwns7347@gmail.com"
] | didwns7347@gmail.com |
428a7443f7d54eee384ad95d174dee298614bf00 | fc35e7bb1c954c38ae8cc2f295d61b2c81ca7c69 | /code 015 11/study/study/spiders/study02_spider.py | f9810daf8c65a0ca55a9a32f9380d377a90c57cd | [] | no_license | zeopean/pycode | 364eb3208c04a9760b745fe155dd90d3b51a7e0e | 32aa2e55d5c6492955136085c698be1e50f2948d | refs/heads/master | 2021-01-10T01:46:14.858359 | 2015-12-09T14:41:05 | 2015-12-09T14:41:05 | 47,268,741 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 832 | py | # coding=utf8
# !/usr/bin/python
import scrapy
from study.items import StudyItem
class Study02Spider(scrapy.Spider):
name = 'study02'
allowed_domain = ['study.02.demo']
start_urls = ["http://www.dmoz.org/Computers/Programming/Languages/Python/"]
def parse(self, response):
for href in response.css("ul.directory.dir-col > li > a::attr('href')"):
url = response.urljoin(response.url, href.extract())
yield scrapy.Request(url, callback=self.parse_dir_contents)
def parse_dir_contents(self, response):
for sel in response.xpath('//ul/li'):
item = DmozItem()
item['title'] = sel.xpath('a/text()').extract()
item['link'] = sel.xpath('a/@href').extract()
item['desc'] = sel.xpath('text()').extract()
yield item
| [
"1412512785@qq.com"
] | 1412512785@qq.com |
a6ba11a6934043c5f7f7920a47fd1bb500f2a7ff | 4f0dabb907d3eda9f83bea54ae979bfafe0a6ffe | /find_process.py | f07408eefd00c97447e0bf6935099ceb5c3a627d | [] | no_license | IvanVizcaino/PeiroStream | 9699648bef4325abe14da696c094aa652e4bb9ba | d8b718a082d51399006477e975f15ac7a2acf8a2 | refs/heads/master | 2023-03-05T18:52:08.951051 | 2021-02-17T19:19:15 | 2021-02-17T19:19:15 | 330,394,883 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 687 | py | import psutil
def findProcessIdByName(processName):
'''
Get a list of all the PIDs of a all the running process whose name contains
the given string processName
'''
listOfProcessObjects = []
#Iterate over the all the running process
for proc in psutil.process_iter():
try:
pinfo = proc.as_dict(attrs=['pid', 'name', 'create_time'])
# Check if process name contains the given name string.
if processName.lower() in pinfo['name'].lower() :
listOfProcessObjects.append(pinfo)
except (psutil.NoSuchProcess, psutil.AccessDenied , psutil.ZombieProcess) :
pass
return listOfProcessObjects; | [
"ivan@yeboyebo.es"
] | ivan@yeboyebo.es |
5db4ca59d2dcaad517794713fea207b9c28965ba | b38066e71cc8328e383e707ab4eefc8c3d322792 | /server/src/gameserver/GM.py | afba76eeb40b355e3fbf7c730597722d6039e498 | [] | no_license | kongyt/duel | 57750cba4f0ccdc0dbfb9cd223e3806408c51284 | b9ae0c3ba30a5f8feff8af886983307ac7f8fa5c | refs/heads/master | 2021-01-21T11:11:08.961987 | 2017-03-16T08:37:30 | 2017-03-16T08:37:30 | 83,532,205 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 828 | py | #coding: utf-8
from Singleton import *
from Room import *
from Player import *
from Log import *
from RedisDB import *
from Timer import *
@singleton
class GM:
def __init__(self):
self.playerMgr = PlayerMgr(self)
self.roomMgr = RoomMgr()
self.redisDB = RedisDB("127.0.0.1", 6379)
self.timerMgr = TimerMgr()
self.logMgr = LogMgr("duel", LogMgr.DEBUG)
self.server = None
def setServer(self, server):
self.server = server
def getServer(self):
return self.server
def getPlayerMgr(self):
return self.playerMgr
def getRoomMgr(self):
return self.roomMgr
def getRedisDB(self):
return self.redisDB
def getTimerMgr(self):
return self.timerMgr
def getLogMgr(self):
return self.logMgr
| [
"839339849.qq.com"
] | 839339849.qq.com |
f4944d1fae34901da120145102384d71e22fa5d6 | d2d5d402062d98fce4cf6dbaa848c9e2b60c1838 | /controlvariables.py | 84a5eef8c2c1f4882205d834d5a0c42d321aa7b5 | [] | no_license | emanuelalava/SailboatControlSystem | 1b29ce2e1ef9cc537794c932fb7c3d96a142cbb7 | c1969cf9f5e40e010342453d0b4293c7a7612317 | refs/heads/master | 2022-11-05T10:41:04.394741 | 2020-06-25T17:23:33 | 2020-06-25T17:23:33 | 274,975,041 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 232 | py | class ControlVariable:
def setControlVariable(rigangle,rudderangle):
self.rigangle = rigangle
self.rudderangle = rudderangle
def getControlVariable():
return (self.rigangle,self.rudderangle)
| [
"walava@espol.edu.ec"
] | walava@espol.edu.ec |
0e507cd7a87fd7f046fbdebfcc99ba300d06ff2a | 2a5a5d094c10bd7ccb2c7bb59ee30054d56a8305 | /myblog/migrations/0002_post_title_tag.py | 6c2b3cf286a3126bbf559aed1bcd069512d1752d | [] | no_license | SHUJA12/projectblog | 8b36a3cf3e24e3f2b0a637a53600dee5db87d46d | d5bcb48116a55b552962abc62942d73474ada2ed | refs/heads/main | 2023-02-04T15:26:48.838872 | 2020-12-23T20:09:19 | 2020-12-23T20:09:19 | 323,990,740 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 385 | py | # Generated by Django 2.2.2 on 2020-06-03 07:01
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('myblog', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='post',
name='title_tag',
field=models.CharField(max_length=200, null=True),
),
]
| [
"shuzaahmad7@gmail.com"
] | shuzaahmad7@gmail.com |
78c274ea9242eb1b2aed3b23a1b935b101bedba9 | 086b24ee80b9ee943e709cfb38bdd9be216f416c | /resnet.py | 223c4ec22fcf62afc224ab73683ec53781744485 | [] | no_license | a84227321a/yyzz_ocr | 1fe49dbc1ada295cd313245dd8c351d870668850 | 5dea7f1dd105331e5be4ef3cf50f3f278286e348 | refs/heads/master | 2022-11-06T02:28:24.066208 | 2020-06-12T09:54:39 | 2020-06-12T09:54:39 | 271,237,631 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,866 | py | from keras import layers
from keras.layers import Conv2D, BatchNormalization, Activation, Permute, TimeDistributed, Dense, Dropout
from keras.layers import Convolution2D, Flatten
import keras.backend as K
def identity_block(input_tensor, kernel_size, filters, stage, block, dilation_rate=(1, 1)):
"""The identity block is the block that has no conv layer at shortcut.
# Arguments
input_tensor: input tensor
kernel_size: default 3, the kernel size of middle conv layer at main path
filters: list of integers, the filters of 3 conv layer at main path
stage: integer, current stage label, used for generating layer names
block: 'a','b'..., current block label, used for generating layer names
# Returns
Output tensor for the block.
"""
filters1, filters2, filters3 = filters
if K.image_data_format() == 'channels_last':
bn_axis = 3
else:
bn_axis = 1
conv_name_base = 'res' + str(stage) + block + '_branch'
bn_name_base = 'bn' + str(stage) + block + '_branch'
x = Conv2D(filters1, (1, 1), name=conv_name_base + '2a')(input_tensor)
x = BatchNormalization(axis=bn_axis, name=bn_name_base + '2a')(x)
x = Activation('relu')(x)
x = Conv2D(filters2, kernel_size, dilation_rate=dilation_rate,
padding='same', name=conv_name_base + '2b')(x)
x = BatchNormalization(axis=bn_axis, name=bn_name_base + '2b')(x)
x = Activation('relu')(x)
x = Conv2D(filters3, (1, 1), name=conv_name_base + '2c')(x)
x = BatchNormalization(axis=bn_axis, name=bn_name_base + '2c')(x)
x = layers.add([x, input_tensor])
x = Activation('relu')(x)
return x
def conv_block(input_tensor, kernel_size, filters, stage, block, strides=(2, 2)):
"""A block that has a conv layer at shortcut.
# Arguments
input_tensor: input tensor
kernel_size: default 3, the kernel size of middle conv layer at main path
filters: list of integers, the filters of 3 conv layer at main path
stage: integer, current stage label, used for generating layer names
block: 'a','b'..., current block label, used for generating layer names
strides: Strides for the first conv layer in the block.
# Returns
Output tensor for the block.
Note that from stage 3,
the first conv layer at main path is with strides=(2, 2)
And the shortcut should have strides=(2, 2) as well
"""
filters1, filters2, filters3 = filters
if K.image_data_format() == 'channels_last':
bn_axis = 3
else:
bn_axis = 1
conv_name_base = 'res' + str(stage) + block + '_branch'
bn_name_base = 'bn' + str(stage) + block + '_branch'
x = Conv2D(filters1, (1, 1), strides=strides, padding='same',
name=conv_name_base + '2a')(input_tensor)
x = BatchNormalization(axis=bn_axis, name=bn_name_base + '2a')(x)
x = Activation('relu')(x)
x = Conv2D(filters2, kernel_size, padding='same',
name=conv_name_base + '2b')(x)
x = BatchNormalization(axis=bn_axis, name=bn_name_base + '2b')(x)
x = Activation('relu')(x)
x = Conv2D(filters3, (1, 1), padding='same', name=conv_name_base + '2c')(x)
x = BatchNormalization(axis=bn_axis, name=bn_name_base + '2c')(x)
shortcut = Conv2D(filters3, (1, 1), strides=strides, padding='same',
name=conv_name_base + '1')(input_tensor)
shortcut = BatchNormalization(axis=bn_axis, name=bn_name_base + '1')(shortcut)
x = layers.add([x, shortcut])
x = Activation('relu')(x)
return x
def res_block(input_tensor, kernel_size, filters, stage, block, dilation_rate=(1, 1), strides=(1, 1), is_cut=False):
if K.image_data_format() == 'channels_last':
bn_axis = 3
else:
bn_axis = 1
conv_name_base = 'res' + str(stage) + block + '_branch'
bn_name_base = 'bn' + str(stage) + block + '_branch'
x = Conv2D(filters[0], kernel_size, strides=strides, padding='same', name=conv_name_base + '2a')(input_tensor)
x = BatchNormalization(axis=bn_axis, name=bn_name_base + '2a')(x)
x = Activation('relu')(x)
x = Conv2D(filters[1], kernel_size, dilation_rate=dilation_rate,
padding='same', name=conv_name_base + '2b')(x)
x = BatchNormalization(axis=bn_axis, name=bn_name_base + '2b')(x)
if len(filters) > 2:
x = Activation('relu')(x)
x = Conv2D(filters[2], kernel_size, padding='same', name=conv_name_base + '2c')(x)
x = BatchNormalization(axis=bn_axis, name=bn_name_base + '2c')(x)
if is_cut:
shortcut = Conv2D(filters[-1], (1, 1), strides=strides, padding='same',
name=conv_name_base + '1')(input_tensor)
shortcut = BatchNormalization(axis=bn_axis, name=bn_name_base + '1')(shortcut)
else:
shortcut = input_tensor
x = layers.add([x, shortcut])
x = Activation('relu')(x)
return x
def ResNet50(input_data, n_class):
if K.image_data_format() == 'channels_last':
bn_axis = 3
else:
bn_axis = 1
x = Convolution2D(64, (3, 3), strides=(1, 1), kernel_initializer='he_normal', padding='same',
activation='relu')(input_data)
x = Convolution2D(64, (3, 3), strides=(2, 2), kernel_initializer='he_normal', padding='same')(x)
x = BatchNormalization(axis=bn_axis)(x)
x = Activation('relu')(x)
x = conv_block(x, 3, [64, 64, 256], stage=2, block='a', strides=(1, 1))
x = identity_block(x, 3, [64, 64, 256], stage=2, block='b')
x = identity_block(x, 3, [64, 64, 256], stage=2, block='c')
x = conv_block(x, 3, [128, 128, 512], stage=3, block='a', strides =(2, 2))
x = identity_block(x, 3, [128, 128, 512], stage=3, block='b')
x = identity_block(x, 3, [128, 128, 512], stage=3, block='c')
x = identity_block(x, 3, [128, 128, 512], stage=3, block='d')
x = conv_block(x, 3, [256, 256, 1024], stage=4, block='a', strides=(2, 1))
x = identity_block(x, 3, [256, 256, 1024], stage=4, block='b')
x = identity_block(x, 3, [256, 256, 1024], stage=4, block='c')
x = identity_block(x, 3, [256, 256, 1024], stage=4, block='d')
x = identity_block(x, 3, [256, 256, 1024], stage=4, block='e')
x = identity_block(x, 3, [256, 256, 1024], stage=4, block='f')
x = conv_block(x, 3, [512, 512, 2048], stage=5, block='a', strides=(2, 1))
x = identity_block(x, 3, [512, 512, 2048], stage=5, block='b')
x = identity_block(x, 3, [512, 512, 2048], stage=5, block='c')
x = Activation('relu')(x)
x = BatchNormalization(axis=-1, epsilon=1.1e-5)(x)
x = Permute((2, 1, 3), name='permute')(x)
x = TimeDistributed(Flatten(), name='flatten')(x)
x = TimeDistributed(Dense(n_class))(Dropout(rate=0.2)(x))
y_pred = Activation(activation='softmax')(x)
return y_pred
if __name__ == '__main__':
pass
| [
"865046239@qq.com"
] | 865046239@qq.com |
0d472e70636ddeee8273010b4e839638fc3fab46 | 12c33015ca071fe6bee7131328706fcc8af6e098 | /bing_webscrapper.py | 90e2bdd49b5457713a40fdb01da9f37ea8761fcc | [] | no_license | Nextafari/web_crawler-google-bing- | 7e62eedd393d9b3f9cf7f57c722a148bcaf78f2d | 3843e5f40324b9054a92174cd7efc9f10ecfbf2b | refs/heads/master | 2021-04-23T03:33:25.868889 | 2020-08-16T21:29:31 | 2020-08-16T21:29:31 | 249,894,812 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 2,243 | py | from bs4 import BeautifulSoup
import requests
search = input("Enter your search here: ")
params = {"q": search}
url = ("https://www.bing.com/search")
USER_AGENT ="Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.149 Safari/537.36"
headers = {"user-agent": USER_AGENT}
#create a variable for the requests
r = requests.get(url, params=params, headers=headers)
def bing_webcrawler():
soup = BeautifulSoup(r.text, "html.parser") #content of the request(variable r)
results = soup.find("ol", {"id": "b_results"}) #this must be a dictionary
links = results.find_all("li", {"class": "b_algo"}) #this must be a dictionary
for i in links:
#Variables for the information we want to get from the links
item_text = i.find("a").text
item_link = i.find("a").attrs["href"]
if item_text and item_link:
print(item_text)
print(item_link)
bing_webcrawler()
#Steps to make a webcrawler using BeautifulSoup
#1)Import BeautifulSoup from bs4, also import requests
#2)create a search variable to contain an input to be searched for
#3)create a parameter to contain the following arguments on line 5 above to send the (get_request)
#variable of q, equal to what we search for along the line. this basicially tells the url to append "q" and the search keyword
#4)create a requests variable with a .get request and pass in the website you want to crawl and you pass in params
#5)create a variable like the one on line 8, and assign a value to parse the html document and return it as a text
#6)create a variable results and pass in the html element you are looking to get info and also the attributes you are looking for
# (more like get the element with the whole search result and the id or other class there) as seen in line 9
#7)create another variable called links that will search the "result" to return what you need from inside the tag housing
#the various elements you want information from eg all the elements in a container
#8)We use a for loop to loop through the data gotten from the "links" variable above and we put the values in variables
#
# | [
"chinedue16@gmail.com"
] | chinedue16@gmail.com |
9ab33ff79ca55b1491450b69895fe6e1b812f44e | 9de84e92189ac64ec63df1953b73bcaec5eae07e | /src/jsoncfg/__init__.py | 032617b40ecd0f1a471f964c2f429be464cdd967 | [
"MIT"
] | permissive | pasztorpisti/json-cfg | ce5985e0d5995cde16250487780e6d2c3b008fd3 | 0ccde013987910f2bb37a46788c0b8d3053a9ac6 | refs/heads/master | 2022-02-22T07:54:32.274364 | 2019-10-19T16:45:07 | 2019-10-19T16:45:07 | 34,360,374 | 35 | 8 | MIT | 2022-02-02T23:16:23 | 2015-04-22T00:57:59 | Python | UTF-8 | Python | false | false | 2,198 | py | # -*- coding: utf-8 -*-
from .exceptions import JSONConfigException
from .parser import JSONConfigParserException
from .parser_listener import ObjectBuilderParams
from .config_classes import (
JSONConfigQueryError, JSONConfigValueMapperError, JSONConfigValueNotFoundError, JSONConfigNodeTypeError,
JSONValueMapper,
node_location, node_exists, node_is_object, node_is_array, node_is_scalar,
ensure_exists, expect_object, expect_array, expect_scalar,
)
from .functions import (
loads, load, loads_config, load_config, JSONParserParams,
)
from .tree_python import (
PythonObjectBuilderParams, DefaultObjectCreator, DefaultArrayCreator, default_number_converter,
DefaultStringToScalarConverter,
)
__all__ = [
'JSONConfigException',
'JSONConfigParserException',
'JSONConfigQueryError', 'JSONConfigValueMapperError',
'JSONConfigValueNotFoundError', 'JSONConfigNodeTypeError',
'JSONValueMapper',
'node_location', 'node_exists', 'node_is_object', 'node_is_array', 'node_is_scalar',
'ensure_exists', 'expect_object', 'expect_array', 'expect_scalar',
'loads', 'load', 'loads_config', 'load_config',
'JSONParserParams',
'ObjectBuilderParams', 'PythonObjectBuilderParams',
'DefaultObjectCreator', 'DefaultArrayCreator', 'default_number_converter', 'DefaultStringToScalarConverter',
]
# version_info[0]: Increase in case of large milestones/releases.
# version_info[1]: Increase this and zero out version_info[2] if you have explicitly modified
# a previously existing behavior/interface.
# If the behavior of an existing feature changes as a result of a bugfix
# and the new (bugfixed) behavior is that meets the expectations of the
# previous interface documentation then you shouldn't increase this, in that
# case increase only version_info[2].
# version_info[2]: Increase in case of bugfixes. Also use this if you added new features
# without modifying the behavior of the previously existing ones.
version_info = (0, 4, 2)
__version__ = '.'.join(str(n) for n in version_info)
__author__ = 'Istvรกn Pรกsztor'
__license__ = 'MIT'
| [
"pasztorpisti@gmail.com"
] | pasztorpisti@gmail.com |
73bede51d5735756525347d3dd9f95c50fa375a6 | f766b084d893fde9f96cd10e12c1269d418719ed | /submissions/dummy/feature_extractor.py | dde02fbd3bc7d441ded782a2d5283c623874dcd9 | [] | no_license | youssef-brachmi/Recommender-System-for-beers | 8442ec966368d4dd822f5bafbba123915a33110f | 48ad1396509554e13cd2d53072bbe97730a8de43 | refs/heads/master | 2021-10-12T02:49:42.782742 | 2019-01-31T21:12:32 | 2019-01-31T21:12:32 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 301 | py | from scipy import constants
import pandas as pd
class FeatureExtractor(object):
def __init__(self):
pass
def fit(self, X_df, y):
return(self, X_df)
def transform(self, X_df):
return X_df[['authorId', 'beerId', 'styleId', 'brewerId', 'abv', 'mean_rating' ]]
| [
"noreply@github.com"
] | youssef-brachmi.noreply@github.com |
cf549f57214bbbadb76d22785366c5b2adfc840f | 9bf35ac000395206d28f0b1a2b1a86a731b3435c | /main.py | e111c61d21ae33abd16589a9913f264d4ece1609 | [] | no_license | Daveloper20/Python_Tetris | a76f973108f78188a5670e1aef9e644ce8cdfc87 | 6b827216f13ff533b7ea1aded9deeb31243bd82c | refs/heads/main | 2023-04-03T01:25:44.171428 | 2021-04-19T14:11:24 | 2021-04-19T14:11:24 | 359,258,695 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 12,070 | py | import pygame
import random
import time
colors = [
(0, 0, 0),
(0, 240, 240),
(0, 0, 240),
(240, 160, 0),
(240, 240, 0),
(0, 240, 0),
(160, 0, 240),
(240, 0, 0)
]
class Figure:
x = 0
y = 0
Figures = [
[[1, 5, 9, 13], [4, 5, 6, 7]],
[[1, 2, 5, 9], [0, 4, 5, 6], [1, 5, 9, 8], [4, 5, 6, 10]],
[[1, 2, 6, 10], [5, 6, 7, 9], [2, 6, 10, 11], [3, 5, 6, 7]],
[[1, 2, 5, 6]],
[[6, 7, 9, 10], [1, 5, 6, 10]],
[[1, 4, 5, 6], [1, 4, 5, 9], [4, 5, 6, 9], [1, 5, 6, 9]],
[[4, 5, 9, 10], [2, 6, 5, 9]]
]
def __init__(self, x_coord, y_coord):
self.x = x_coord
self.y = y_coord
self.type = random.randint(0, len(self.Figures) - 1)
self.color = colors[self.type + 1]
self.rotation = 0
def image(self):
return self.Figures[self.type][self.rotation]
def rotate(self):
self.rotation = (self.rotation + 1) % len(self.Figures[self.type])
class Tetris:
height = 0
width = 0
field = []
score = 0
state = "start"
Figure = None
Shadow = None
def __init__(self, _height, _width):
self.new_best_lines = False
self.new_best_score = False
self.width = _width
self.height = _height
self.field = []
self.score = 0
self.broken_lines = 0
self.state = "start"
self.hold_Figure = None
self.primary_Figure = None
self.next_Figure = None
for i in range(_height):
new_line = []
for j in range(_width):
new_line.append(0)
self.field.append(new_line)
self.new_figure()
def new_figure(self):
if self.next_Figure is None:
self.Figure = Figure(5, 0)
self.Shadow = Figure(5, 0)
self.next_Figure = Figure(5, 0)
else:
self.Figure = self.next_Figure
self.Shadow = Figure(5, 0)
self.next_Figure = Figure(5, 0)
self.position_shadow()
def position_shadow(self):
grounded = False
self.Shadow.x = self.Figure.x
self.Shadow.y = self.Figure.y
while not grounded:
self.Shadow.y += 1
for i in range(4):
for j in range(4):
p = i * 4 + j
if p in self.Figure.image():
if i + self.Shadow.y > self.height - 1 or \
i + self.Shadow.y < 0 or \
self.field[i + self.Shadow.y][j + self.Shadow.x] > 0:
grounded = True
self.Shadow.y -= 1
def go_down(self):
self.Figure.y += 1
if self.intersects():
self.Figure.y -= 1
self.freeze()
def side(self, dx):
old_x = self.Figure.x
edge = False
for i in range(4):
for j in range(4):
p = i * 4 + j
if p in self.Figure.image():
if j + self.Figure.x + dx > self.width - 1 or \
j + self.Figure.x + dx < 0:
edge = True
if not edge:
self.Figure.x += dx
if self.intersects():
self.Figure.x = old_x
self.position_shadow()
def left(self):
self.side(-1)
def right(self):
self.side(1)
def down(self):
while not self.intersects():
self.Figure.y += 1
self.Figure.y -= 1
self.freeze()
def rotate(self):
old_rotation = self.Figure.rotation
self.Figure.rotate()
if self.intersects():
self.Figure.rotation = old_rotation
else:
self.position_shadow()
def hold(self):
self.Figure.x = 5
self.Figure.y = 0
if self.hold_Figure is None:
self.hold_Figure = self.Figure
self.Figure = None
self.new_figure()
else:
self.primary_Figure = self.Figure
self.Figure = self.hold_Figure
self.hold_Figure = self.primary_Figure
self.position_shadow()
def intersects(self):
intersection = False
for i in range(4):
for j in range(4):
p = i * 4 + j
if p in self.Figure.image():
try:
if i + self.Figure.y > self.height - 1 or \
i + self.Figure.y < 0 or \
self.field[i + self.Figure.y][j + self.Figure.x] > 0 or \
self.Figure.x + j < 0:
intersection = True
except IndexError:
intersection = True
return intersection
def freeze(self):
for i in range(4):
for j in range(4):
p = i * 4 + j
if p in self.Figure.image():
self.field[i + self.Figure.y][j + self.Figure.x] = self.Figure.type + 1
self.break_lines()
self.new_figure()
if self.intersects():
self.state = "gameover"
def break_lines(self):
lines = 0
for i in range(1, self.height):
zeros = 0
for j in range(self.width):
if self.field[i][j] == 0:
zeros += 1
if zeros == 0:
lines += 5
self.broken_lines += 1
for i2 in range(i, 1, -1):
for j in range(self.width):
self.field[i2][j] = self.field[i2 - 1][j]
self.score += lines ** 2
pygame.init()
screen = pygame.display.set_mode((650, 670))
pygame.display.set_caption('Tetris')
BACKGROUND = (0, 0, 0)
FOREGROUND = (255, 255, 255)
GRAY = (128, 128, 128)
if 7 < time.localtime().tm_hour < 17:
BACKGROUND = (255, 255, 255)
FOREGROUND = (0, 0, 0)
done = False
fps = 45
frame = 0
clock = pygame.time.Clock()
counter = 0
zoom = 30
game = Tetris(20, 15)
while not done:
frame += 1
if game.state == "start" and frame == 30:
frame = 0
game.go_down()
for event in pygame.event.get():
if event.type == pygame.KEYDOWN:
if event.key == pygame.K_UP:
game.rotate()
if event.key == pygame.K_DOWN:
game.go_down()
if event.key == pygame.K_t:
if BACKGROUND == (0, 0, 0):
BACKGROUND = (255, 255, 255)
FOREGROUND = (0, 0, 0)
else:
BACKGROUND = (0, 0, 0)
FOREGROUND = (255, 255, 255)
if event.key == pygame.K_LEFT:
game.left()
if event.key == pygame.K_RIGHT:
game.right()
if event.key == pygame.K_c:
game.hold()
if event.key == pygame.K_SPACE and game.state == "gameover":
game.Figure = None
game.Shadow = None
game.field = []
game.__init__(20, 15)
game.broken_lines = 0
game.score = 0
frame = 0
screen.fill(color=BACKGROUND)
game.new_figure()
game.state = "start"
elif event.key == pygame.K_SPACE:
game.down()
if event.type == pygame.QUIT:
done = True
screen.fill(color=BACKGROUND)
for i in range(game.height):
for j in range(game.width):
if game.field[i][j] == 0:
color = GRAY
just_border = 1
else:
color = colors[game.field[i][j]]
just_border = 0
pygame.draw.rect(screen, color, [38 + j * zoom, 50 + i * zoom, zoom, zoom], just_border)
if game.Figure is not None:
for i in range(4):
for j in range(4):
p = i * 4 + j
if p in game.Figure.image():
pygame.draw.rect(screen, GRAY, [38 + (j + game.Shadow.x) * zoom, 50 + (i + game.Shadow.y) * zoom,
zoom, zoom])
pygame.draw.rect(screen, game.Figure.color,
[38 + (j + game.Figure.x) * zoom, 50 + (i + game.Figure.y) * zoom, zoom, zoom])
if game.hold_Figure is not None:
if p in game.hold_Figure.image():
pygame.draw.rect(screen, game.hold_Figure.color, [410 + (j + game.hold_Figure.x) * zoom,
550 + (i + game.hold_Figure.y) * zoom, zoom,
zoom])
if game.next_Figure is not None:
if p in game.next_Figure.image():
pygame.draw.rect(screen, game.next_Figure.color, [350 + (j + game.next_Figure.x) * zoom,
200 + (i + game.next_Figure.y) * zoom, zoom,
zoom])
gameover_font = pygame.font.SysFont('Calibri', 55, True, False)
text_gameover = gameover_font.render("Game Over!", True, (255, 0, 0))
text_message = gameover_font.render("Press Space to Restart", True, (255, 0, 0))
standard_font = pygame.font.SysFont('Arial', 55)
little_font = pygame.font.SysFont('Arial', 35)
next_Figure_text = little_font.render('Next Shape:', True, FOREGROUND)
holding_text = little_font.render('Holding', True, FOREGROUND)
Figure_text = little_font.render('Shape:', True, FOREGROUND)
text_best_score = standard_font.render('New Highscore for best score!', True, (0, 255, 0))
text_best_lines = standard_font.render('New Highscore for most lines!', True, (0, 255, 0))
screen.blit(next_Figure_text, [490, 100])
if game.hold_Figure is not None:
screen.blit(holding_text, [510, 450])
screen.blit(Figure_text, [510, 500])
if game.state == "gameover":
best_scores_file = open("Data File.txt", 'r')
scores_with_new_lines = best_scores_file.readlines()
best_score = scores_with_new_lines[0].split('\n')
best_lines = scores_with_new_lines[1].split('\n')
if game.score > int(best_score[0]):
best_scores_file.close()
game.new_best_score = True
write_file = open("Data File.txt", 'w')
write_file.writelines(str(game.score) + "\n" + str(best_lines[0]))
write_file.close()
if game.broken_lines > int(best_lines[0]):
best_scores_file.close()
game.new_best_lines = True
write_file = open("Data File.txt", 'w')
write_file.writelines(str(best_score[0]) + "\n" + str(game.broken_lines))
write_file.close()
if not best_scores_file.closed:
best_scores_file.close()
if game.new_best_score:
screen.blit(text_best_score, [25, 400])
if game.new_best_lines:
screen.blit(text_best_lines, [25, 500])
screen.blit(text_gameover, [25, 250])
screen.blit(text_message, [25, 350])
score_font = pygame.font.SysFont('Calibri', 50, True, False)
text_score = score_font.render("Score: " + str(game.score), True, FOREGROUND)
text_lines = score_font.render("Lines: " + str(game.broken_lines), True, FOREGROUND)
screen.blit(text_score, [20, 0])
screen.blit(text_lines, [screen.get_width() - 200, 0])
pygame.display.flip()
clock.tick(fps)
pygame.quit()
| [
"noreply@github.com"
] | Daveloper20.noreply@github.com |
f4a8e3c81ba011c641b4218d7ed3cca00179f752 | e0c8662a56d89730043146ddc340e9e0b9f7de72 | /plugin/14e55cec-1596.py | 7b13f9266669dc060f05fe19bfca14b9054da31c | [] | no_license | izj007/bugscan_poc | f2ef5903b30b15c230b292a1ff2dc6cea6836940 | 4490f3c36d4033bdef380577333722deed7bc758 | refs/heads/master | 2020-09-22T17:20:50.408078 | 2019-01-18T09:42:47 | 2019-01-18T09:42:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 720 | py | #coding:utf-8
from lib.curl import *
# -*- coding: utf-8 -*-
"""
POC Name : OGNL console
Author : a
mail : a@lcx.cc
Referer: http://wooyun.org/bugs/wooyun-2010-080076
"""
import urlparse
def assign(service, arg):
if service == 'www':
arr = urlparse.urlparse(arg)
return True, '%s://%s/' % (arr.scheme, arr.netloc)
def audit(arg):
payload = '/struts/webconsole.html'
url = arg + payload
code, head, res, errcode, _ = curl.curl('"%s"' % url)
if code == 200 and "Welcome to the OGNL console" in res:
security_info('find ognl console:' +url)
if __name__ == '__main__':
from dummy import *
audit(assign('www', 'http://www.homilychart.com/')[1])
| [
"yudekui@wsmtec.com"
] | yudekui@wsmtec.com |
8787aeb0950cc8d74bb12753045c0ae4d10b16e6 | 17c280ade4159d4d8d5a48d16ba3989470eb3f46 | /18/mc/ExoDiBosonResonances/EDBRTreeMaker/test/crab3_analysisWprime1800.py | 9e802f49450f00b24370cdff361d92b3565fac2c | [] | no_license | chengchen1993/run2_ntuple | 798ff18489ff5185dadf3d1456a4462e1dbff429 | c16c2b203c05a3eb77c769f63a0bcdf8b583708d | refs/heads/master | 2021-06-25T18:27:08.534795 | 2021-03-15T06:08:01 | 2021-03-15T06:08:01 | 212,079,804 | 0 | 2 | null | null | null | null | UTF-8 | Python | false | false | 2,082 | py | from WMCore.Configuration import Configuration
config = Configuration()
config.section_("General")
config.General.requestName = 'Wprime_1800_weight_v2'
config.General.transferLogs = True
config.section_("JobType")
config.JobType.pluginName='Analysis'
config.JobType.sendExternalFolder=True# = 'Analysis'
config.JobType.inputFiles = ['Autumn18_V19_MC_L1FastJet_AK4PFchs.txt','Autumn18_V19_MC_L2Relative_AK4PFchs.txt','Autumn18_V19_MC_L3Absolute_AK4PFchs.txt','Autumn18_V19_MC_L1FastJet_AK8PFchs.txt','Autumn18_V19_MC_L2Relative_AK8PFchs.txt','Autumn18_V19_MC_L3Absolute_AK8PFchs.txt','Autumn18_V19_MC_L1FastJet_AK8PFPuppi.txt','Autumn18_V19_MC_L2Relative_AK8PFPuppi.txt','Autumn18_V19_MC_L3Absolute_AK8PFPuppi.txt','Autumn18_V19_MC_L1FastJet_AK4PFPuppi.txt','Autumn18_V19_MC_L2Relative_AK4PFPuppi.txt','Autumn18_V19_MC_L3Absolute_AK4PFPuppi.txt']
#config.JobType.inputFiles = ['PHYS14_25_V2_All_L1FastJet_AK4PFchs.txt','PHYS14_25_V2_All_L2Relative_AK4PFchs.txt','PHYS14_25_V2_All_L3Absolute_AK4PFchs.txt','PHYS14_25_V2_All_L1FastJet_AK8PFchs.txt','PHYS14_25_V2_All_L2Relative_AK8PFchs.txt','PHYS14_25_V2_All_L3Absolute_AK8PFchs.txt']
# Name of the CMSSW configuration file
#config.JobType.psetName = 'bkg_ana.py'
config.JobType.psetName = 'analysis.py'
#config.JobType.allowUndistributedCMSSW = True
config.JobType.allowUndistributedCMSSW = True
config.section_("Data")
#config.Data.inputDataset = '/WJetsToLNu_13TeV-madgraph-pythia8-tauola/Phys14DR-PU20bx25_PHYS14_25_V1-v1/MINIAODSIM'
config.Data.inputDataset = '/WprimeToWZToWlepZhad_narrow_M-1800_13TeV-madgraph/RunIISummer16MiniAODv2-PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/MINIAODSIM'
config.Data.inputDBS = 'global'
#config.Data.inputDBS = 'phys03'
config.Data.splitting = 'FileBased'
config.Data.unitsPerJob =10
config.Data.totalUnits = -1
config.Data.publication = False
# This string is used to construct the output dataset name
config.Data.outputDatasetTag = 'Wprime_1800_weight_v2'
config.section_("Site")
# Where the output files will be transmitted to
config.Site.storageSite = 'T2_CH_CERN'
| [
"c.chen@cern.ch"
] | c.chen@cern.ch |
1ad22bd26da42377f4ca937904276d100a42ed17 | 864d98cb2b88b0aa703c1caaaae5bade0036b6ba | /nginx_blackout/utils.py | 15b3c31e8bde7735d8e1f5ef6838d8797e03cf65 | [
"MIT"
] | permissive | Vitalyudin/nginx-blackout-python | d70e043a5b843ef39f8ce04fa7198e83e818ac30 | 536038924ac990af10352c8a1a17f9c80f8931e9 | refs/heads/master | 2020-11-24T17:09:09.969810 | 2019-12-14T19:30:00 | 2019-12-14T19:31:17 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,509 | py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
import time
from email.utils import formatdate
from typing import Any, Tuple, List
from aiohttp.web import Request, Response
from nginx_blackout.app import NginxBlackout
def get_app(request: Request) -> NginxBlackout:
app = request.app["nginx_blackout"] # type: NginxBlackout
return app
def norm_lang(s: str) -> str:
return s.strip().lower().replace("_", "-")
def parse_accept_language(s: str) -> List[str]:
raw_result = [] # type: List[Tuple[float, str]]
if not s:
return []
for lang_full in s.split(","):
lang_full = lang_full.strip()
if not lang_full:
continue
if ";" not in lang_full:
raw_result.append((1.0, norm_lang(lang_full)))
continue
lang, opts = lang_full.split(";", 1)
qvalue = 1.0
if opts.startswith("q="):
try:
qvalue = float(opts[2:])
except ValueError:
pass
raw_result.append((qvalue, norm_lang(lang)))
return [x[1] for x in sorted(raw_result, reverse=True)]
def get_request_locale(request: Request) -> str:
candidates = [] # type: List[str]
if request.query.get("locale"):
candidates.append(norm_lang(request.query["locale"]))
if request.cookies.get("locale"):
candidates.append(norm_lang(request.cookies["locale"]))
if request.headers.get("Accept-Language"):
candidates.extend(parse_accept_language(request.headers["Accept-Language"]))
known_locales = get_app(request).locales
for lang in candidates:
if lang in known_locales:
return lang
if "-" in lang:
lang_short = lang.split("-", 1)[0]
if lang_short in known_locales:
return lang_short
return known_locales[0]
def set_cache_headers(response: Response, max_age: int = 0) -> None:
epoch_seconds = int(time.time()) + max_age
response.headers["Expires"] = formatdate(epoch_seconds, usegmt=True)
response.headers["Cache-Control"] = "max-age={max_age}".format(max_age=max(0, max_age))
def get_localized_config(locale: str, value: Any, default: Any = None) -> Any:
if value is None:
return default
if not isinstance(value, dict):
return value
if "default" not in value:
raise ValueError("Localized config must have the 'default' key")
if locale in value:
return value[locale]
return value["default"]
| [
"andriyano-31@mail.ru"
] | andriyano-31@mail.ru |
d9463f6427fc42138a954db54cfe4ff9144205f9 | 668905b3a5caeb7ba3f9a575b996a2a32a48f5d7 | /crimemap.py | 0e53aceac454559b02dfe1f2e7484d008dd5c9fb | [] | no_license | Fola09/crimemap | 90a560a14bf28966e462b726bffec672fc30e222 | d36d212e97d9f368570b270c7905b1e612b07f6e | refs/heads/master | 2021-05-21T22:54:15.602513 | 2020-04-10T22:37:26 | 2020-04-10T22:37:26 | 252,843,677 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 793 | py | from flask import Flask, render_template, request
import dbconfig
if dbconfig.test:
from mockdbhelper import MockDBHelper as DBHelper
else:
from dbhelper import DBHelper
app = Flask(__name__)
DB = DBHelper()
@app.route("/")
def home():
try:
data = DB.get_all_inputs()
except Exception as e:
print(e)
data = None
return render_template("home.html", data=data)
@app.route("/add", methods=['POST'])
def add():
try:
data = request.form.get("userinput")
DB.add_input(data)
except Exception as e:
print(e)
return home()
@app.route('/clear')
def clear():
try:
DB.clear_all()
except Exception as e:
print(e)
return home()
if __name__ == '__main__':
app.run(port=5000, debug=True)
| [
"folaolayiwola.fo@gmail.com"
] | folaolayiwola.fo@gmail.com |
3e466dffd0b79a8e26b47596233aa19edadc61ce | 47b4d76e9c87e6c45bab38e348ae12a60a60f94c | /Mutation_Modules/GLN_ASN.py | efbed28aa20e0ca0a3e8faeedc094ac0a4d66aac | [] | no_license | PietroAronica/Parasol.py | 9bc17fd8e177e432bbc5ce4e7ee2d721341b2707 | 238abcdc2caee7bbfea6cfcdda1ca705766db204 | refs/heads/master | 2021-01-10T23:57:40.225140 | 2020-10-14T02:21:15 | 2020-10-14T02:21:15 | 70,791,648 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 15,045 | py | # GLN to ASN Mutation
import Frcmod_creator
import PDBHandler
import Leapy
from parmed.tools.actions import *
from parmed.amber.readparm import *
def parmed_command(vxi='VXI', lipid='No'):
bc = {}
with open('Param_files/AminoAcid/GLN.param', 'r') as b:
data = b.readlines()[1:]
for line in data:
key, value = line.split()
bc[key] = float(value)
b.close()
fc = {}
with open('Param_files/AminoAcid/ASN.param', 'r') as b:
data = b.readlines()[1:]
for line in data:
key, value = line.split()
fc[key] = float(value)
b.close()
for i in range(11):
a = i*10
i = float(i)
parm = AmberParm('Solv_{}_{}.prmtop'.format(a, 100-a))
change(parm, 'charge', ':{}@N'.format(vxi), bc['N']+((fc['N']-bc['N'])/10)*i).execute()
change(parm, 'charge', ':{}@H'.format(vxi), bc['H']+((fc['H']-bc['H'])/10)*i).execute()
change(parm, 'charge', ':{}@CA'.format(vxi), bc['CA']+((fc['CA']-bc['CA'])/10)*i).execute()
change(parm, 'charge', ':{}@HA'.format(vxi), bc['HA']+((fc['HA']-bc['HA'])/10)*i).execute()
change(parm, 'charge', ':{}@CB'.format(vxi), bc['CB']+((fc['CB']-bc['CB'])/10)*i).execute()
change(parm, 'charge', ':{}@HB2'.format(vxi), bc['HB2']+((fc['HB2']-bc['HB2'])/10)*i).execute()
change(parm, 'charge', ':{}@HB3'.format(vxi), bc['HB3']+((fc['HB3']-bc['HB3'])/10)*i).execute()
change(parm, 'charge', ':{}@CG1'.format(vxi), bc['CG']-((bc['CG'])/10)*i).execute()
change(parm, 'charge', ':{}@HB2'.format(vxi), bc['HB2']-((bc['HB2'])/10)*i).execute()
change(parm, 'charge', ':{}@HB3'.format(vxi), bc['HB3']-((bc['HB3'])/10)*i).execute()
change(parm, 'charge', ':{}@CG'.format(vxi), bc['CD']+((fc['CG']-bc['CD'])/10)*i).execute()
change(parm, 'charge', ':{}@OD1'.format(vxi), bc['OE1']+((fc['OD1']-bc['OE1'])/10)*i).execute()
change(parm, 'charge', ':{}@ND2'.format(vxi), bc['NE2']+((fc['ND2']-bc['NE2'])/10)*i).execute()
change(parm, 'charge', ':{}@HD21'.format(vxi), bc['HE21']+((fc['HD21']-bc['HE21'])/10)*i).execute()
change(parm, 'charge', ':{}@HD22'.format(vxi), bc['HE22']+((fc['HD22']-bc['HE22'])/10)*i).execute()
change(parm, 'charge', ':{}@C'.format(vxi), bc['C']+((fc['C']-bc['C'])/10)*i).execute()
change(parm, 'charge', ':{}@O'.format(vxi), bc['O']+((fc['O']-bc['O'])/10)*i).execute()
#print printDetails(parm, ':VXI')
setOverwrite(parm).execute()
parmout(parm, 'Solv_{}_{}.prmtop'.format(a, 100-a)).execute()
def makevxi(struct, out, aa, vxi='VXI'):
struct.residue_dict[aa].set_resname(vxi)
pdb = open(out, 'w')
try:
pdb.write(struct.other_dict['Cryst1'].formatted())
except KeyError:
pass
for res in struct.residue_list:
for atom in res.atom_list:
if atom.get_name() == 'CG' and res.get_resname() == vxi:
pdb.write(atom.change_name('CG1'))
elif atom.get_name() == 'CD' and res.get_resname() == vxi:
pdb.write(atom.change_name('CG'))
elif atom.get_name() == 'OE1' and res.get_resname() == vxi:
pdb.write(atom.change_name('OD1'))
elif atom.get_name() == 'NE2' and res.get_resname() == vxi:
pdb.write(atom.change_name('ND2'))
elif atom.get_name() == 'HE21' and res.get_resname() == vxi:
pdb.write(atom.change_name('HD21'))
elif atom.get_name() == 'HE22' and res.get_resname() == vxi:
pdb.write(atom.change_name('HD22'))
else:
pdb.write(atom.formatted())
try:
pdb.write(struct.other_dict[atom.get_number()].ter())
except:
pass
for oth in struct.other_dict:
try:
if oth.startswith('Conect'):
pdb.write(struct.other_dict[oth].formatted())
except:
pass
pdb.write('END\n')
def variablemake(sym='^'):
var1 = sym + '1'
var2 = sym + '2'
var3 = sym + '3'
var4 = sym + '4'
var5 = sym + '5'
var6 = sym + '6'
var7 = sym + '7'
var8 = sym + '8'
var9 = sym + '9'
var10 = sym + '0'
var11 = sym + 'a'
var12 = sym + 'b'
var13 = sym + 'c'
var14 = sym + 'd'
var15 = sym + 'e'
return var1, var2, var3, var4, var5, var6, var7, var8, var9, var10, var11, var12, var13, var14, var15
def lib_make(ff, outputfile, vxi='VXI', var=variablemake()):
intcar = var[0]
inthyd = var[1]
ctrl = open('lyp.in', 'w')
ctrl.write("source %s\n"%ff)
ctrl.write("%s=loadpdb Param_files/LibPDB/ASN-GLN.pdb\n"%vxi)
ctrl.write('set %s.1.1 element "N"\n'%vxi)
ctrl.write('set %s.1.2 element "H"\n'%vxi)
ctrl.write('set %s.1.3 element "C"\n'%vxi)
ctrl.write('set %s.1.4 element "H"\n'%vxi)
ctrl.write('set %s.1.5 element "C"\n'%vxi)
ctrl.write('set %s.1.6 element "H"\n'%vxi)
ctrl.write('set %s.1.7 element "H"\n'%vxi)
ctrl.write('set %s.1.8 element "C"\n'%vxi)
ctrl.write('set %s.1.9 element "H"\n'%vxi)
ctrl.write('set %s.1.10 element "H"\n'%vxi)
ctrl.write('set %s.1.11 element "C"\n'%vxi)
ctrl.write('set %s.1.12 element "O"\n'%vxi)
ctrl.write('set %s.1.13 element "N"\n'%vxi)
ctrl.write('set %s.1.14 element "H"\n'%vxi)
ctrl.write('set %s.1.15 element "H"\n'%vxi)
ctrl.write('set %s.1.16 element "C"\n'%vxi)
ctrl.write('set %s.1.17 element "O"\n'%vxi)
ctrl.write('set %s.1.1 name "N"\n'%vxi)
ctrl.write('set %s.1.2 name "H"\n'%vxi)
ctrl.write('set %s.1.3 name "CA"\n'%vxi)
ctrl.write('set %s.1.4 name "HA"\n'%vxi)
ctrl.write('set %s.1.5 name "CB"\n'%vxi)
ctrl.write('set %s.1.6 name "HB2"\n'%vxi)
ctrl.write('set %s.1.7 name "HB3"\n'%vxi)
ctrl.write('set %s.1.8 name "CG1"\n'%vxi)
ctrl.write('set %s.1.9 name "HG2"\n'%vxi)
ctrl.write('set %s.1.10 name "HG3"\n'%vxi)
ctrl.write('set %s.1.11 name "CG"\n'%vxi)
ctrl.write('set %s.1.12 name "OD1"\n'%vxi)
ctrl.write('set %s.1.13 name "ND2"\n'%vxi)
ctrl.write('set %s.1.14 name "HD21"\n'%vxi)
ctrl.write('set %s.1.15 name "HD22"\n'%vxi)
ctrl.write('set %s.1.16 name "C"\n'%vxi)
ctrl.write('set %s.1.17 name "O"\n'%vxi)
ctrl.write('set %s.1.1 type "N"\n'%vxi)
ctrl.write('set %s.1.2 type "H"\n'%vxi)
ctrl.write('set %s.1.3 type "CT"\n'%vxi)
ctrl.write('set %s.1.4 type "H1"\n'%vxi)
ctrl.write('set %s.1.5 type "CT"\n'%vxi)
ctrl.write('set %s.1.6 type "HC"\n'%vxi)
ctrl.write('set %s.1.7 type "HC"\n'%vxi)
ctrl.write('set %s.1.8 type "%s"\n'%(vxi, intcar))
ctrl.write('set %s.1.9 type "%s"\n'%(vxi, inthyd))
ctrl.write('set %s.1.10 type "%s"\n'%(vxi, inthyd))
ctrl.write('set %s.1.11 type "C"\n'%vxi)
ctrl.write('set %s.1.12 type "O"\n'%vxi)
ctrl.write('set %s.1.13 type "N"\n'%vxi)
ctrl.write('set %s.1.14 type "H"\n'%vxi)
ctrl.write('set %s.1.15 type "H"\n'%vxi)
ctrl.write('set %s.1.16 type "C"\n'%vxi)
ctrl.write('set %s.1.17 type "O"\n'%vxi)
ctrl.write('bond %s.1.1 %s.1.2\n'%(vxi, vxi))
ctrl.write('bond %s.1.1 %s.1.3\n'%(vxi, vxi))
ctrl.write('bond %s.1.3 %s.1.4\n'%(vxi, vxi))
ctrl.write('bond %s.1.3 %s.1.5\n'%(vxi, vxi))
ctrl.write('bond %s.1.3 %s.1.16\n'%(vxi, vxi))
ctrl.write('bond %s.1.5 %s.1.6\n'%(vxi, vxi))
ctrl.write('bond %s.1.5 %s.1.7\n'%(vxi, vxi))
ctrl.write('bond %s.1.5 %s.1.8\n'%(vxi, vxi))
ctrl.write('bond %s.1.8 %s.1.9\n'%(vxi, vxi))
ctrl.write('bond %s.1.8 %s.1.10\n'%(vxi, vxi))
ctrl.write('bond %s.1.8 %s.1.11\n'%(vxi, vxi))
ctrl.write('bond %s.1.11 %s.1.12\n'%(vxi, vxi))
ctrl.write('bond %s.1.11 %s.1.13\n'%(vxi, vxi))
ctrl.write('bond %s.1.13 %s.1.14\n'%(vxi, vxi))
ctrl.write('bond %s.1.13 %s.1.15\n'%(vxi, vxi))
ctrl.write('bond %s.1.16 %s.1.17\n'%(vxi, vxi))
ctrl.write('set %s.1 connect0 %s.1.N\n'%(vxi, vxi))
ctrl.write('set %s.1 connect1 %s.1.C\n'%(vxi, vxi))
ctrl.write('set %s name "%s"\n'%(vxi, vxi))
ctrl.write('set %s.1 name "%s"\n'%(vxi, vxi))
ctrl.write('set %s head %s.1.N\n'%(vxi, vxi))
ctrl.write('set %s tail %s.1.C\n'%(vxi, vxi))
ctrl.write('saveoff %s %s.lib\n'%(vxi, vxi))
ctrl.write("quit\n")
ctrl.close()
Leapy.run('lyp.in', outputfile)
def all_make():
for i in range(0,110,10):
Frcmod_creator.make ('{}_{}.frcmod'.format(i, 100-i))
def cal(x, y, i):
num = x+((y-x)/10)*i
return num
def lac(y, x, i):
num = x+((y-x)/10)*i
return num
def stock_add_to_all(var=variablemake()):
intcar = var[0]
inthyd = var[1]
Frcmod_creator.make_hyb()
Frcmod_creator.TYPE_insert(intcar, 'C', 'sp3')
Frcmod_creator.TYPE_insert(inthyd, 'H', 'sp3')
p = {}
with open('Param_files/Stock/Stock.param', 'r') as b:
data = b.readlines()[1:]
for line in data:
p[line.split()[0]] = []
for point in line.split()[1:]:
p[line.split()[0]].append(float(point))
b.close()
for i in range(11):
a = i*10
Frcmod_creator.MASS_insert('{}_{}.frcmod'.format(a, 100-a), intcar, lac(p['0_C'][0], p['CT'][0], i), lac(p['0_C'][1], p['CT'][1], i))
Frcmod_creator.MASS_insert('{}_{}.frcmod'.format(a, 100-a), inthyd, lac(p['0_H'][0], p['HC'][0], i), lac(p['0_H'][1], p['HC'][1], i))
Frcmod_creator.BOND_insert('{}_{}.frcmod'.format(a, 100-a), '{}-{}'.format('CT', intcar), lac(p['CT_mC'][0], p['CT_CT'][0], i), lac(p['CT_mC'][1], p['CT_CT'][1], i))
Frcmod_creator.BOND_insert('{}_{}.frcmod'.format(a, 100-a), '{}-{}'.format(intcar, 'C '), lac(p['C_mC'][0], p['CT_C'][0], i), lac(p['C_mC'][1], p['CT_C'][1], i))
Frcmod_creator.BOND_insert('{}_{}.frcmod'.format(a, 100-a), '{}-{}'.format(intcar, inthyd), lac(p['HC_mC'][0], p['CT_HC'][0], i), lac(p['HC_mC'][1], p['CT_HC'][1], i))
Frcmod_creator.ANGLE_insert('{}_{}.frcmod'.format(a, 100-a), '{}-{}-{}'.format(inthyd, intcar, inthyd), lac(p['Close'][0], p['H_C_H'][0], i), lac(p['Close'][1], p['H_C_H'][1], i))
Frcmod_creator.ANGLE_insert('{}_{}.frcmod'.format(a, 100-a), '{}-{}-{}'.format('CT', intcar, inthyd), lac(p['Dritt'][0], p['C_C_H'][0], i), lac(p['Dritt'][1], p['C_C_H'][1], i))
Frcmod_creator.ANGLE_insert('{}_{}.frcmod'.format(a, 100-a), '{}-{}-{}'.format(inthyd, intcar, 'C '), lac(p['Close'][0], p['C_C_H'][0], i), lac(p['Close'][1], p['C_C_H'][1], i))
Frcmod_creator.ANGLE_insert('{}_{}.frcmod'.format(a, 100-a), '{}-{}-{}'.format('CT', intcar, 'C '), lac(p['Dritt'][0], p['CT_CT_C'][0], i), lac(p['Dritt'][1], p['CT_CT_C'][1], i))
Frcmod_creator.ANGLE_insert('{}_{}.frcmod'.format(a, 100-a), '{}-{}-{}'.format('CT', 'CT', intcar), lac(p['C_C_C'][0], p['C_C_C'][0], i), lac(p['C_C_C'][1], p['C_C_C'][1], i))
Frcmod_creator.ANGLE_insert('{}_{}.frcmod'.format(a, 100-a), '{}-{}-{}'.format('HC', 'CT', intcar), lac(p['C_C_H'][0], p['C_C_H'][0], i), lac(p['C_C_H'][1], p['C_C_H'][1], i))
Frcmod_creator.ANGLE_insert('{}_{}.frcmod'.format(a, 100-a), '{}-{}-{}'.format(intcar, 'C ', 'O '), lac(p['C_C_O'][0], p['C_C_O'][0], i), lac(p['C_C_O'][1], p['C_C_O'][1], i))
Frcmod_creator.ANGLE_insert('{}_{}.frcmod'.format(a, 100-a), '{}-{}-{}'.format(intcar, 'C ', 'N '), lac(p['C_C_N'][0], p['C_C_N'][0], i), lac(p['C_C_N'][1], p['C_C_N'][1], i))
Frcmod_creator.DIHEDRAL_insert('{}_{}.frcmod'.format(a, 100-a), '{}-{}-{}-{}'.format(inthyd, intcar, 'C ', 'O '), lac(p['0_10'][0], p['H_C_C_O_1'][0], i), lac(p['0_10'][1], p['H_C_C_O_1'][1], i), lac(p['0_10'][2], p['H_C_C_O_1'][2], i), lac(p['0_10'][3], p['H_C_C_O_1'][3], i))
Frcmod_creator.DIHEDRAL_insert('{}_{}.frcmod'.format(a, 100-a), '{}-{}-{}-{}'.format(inthyd, intcar, 'C ', 'O '), lac(p['0_8'][0], p['H_C_C_O_2'][0], i), lac(p['0_8'][1], p['H_C_C_O_2'][1], i), lac(p['0_8'][2], p['H_C_C_O_2'][2], i), lac(p['0_8'][3], p['H_C_C_O_2'][3], i))
Frcmod_creator.DIHEDRAL_insert('{}_{}.frcmod'.format(a, 100-a), '{}-{}-{}-{}'.format(inthyd, intcar, 'C ', 'O '), lac(p['0_9'][0], p['H_C_C_O_3'][0], i), lac(p['0_9'][1], p['H_C_C_O_3'][1], i), lac(p['0_9'][2], p['H_C_C_O_3'][2], i), lac(p['0_9'][3], p['H_C_C_O_3'][3], i))
Frcmod_creator.DIHEDRAL_insert('{}_{}.frcmod'.format(a, 100-a), '{}-{}-{}-{}'.format('HC', 'CT', intcar, inthyd), lac(p['0_1'][0], p['H_C_C_H'][0], i), lac(p['0_1'][1], p['H_C_C_H'][1], i), lac(p['0_1'][2], p['H_C_C_H'][2], i), lac(p['0_1'][3], p['H_C_C_H'][3], i))
Frcmod_creator.DIHEDRAL_insert('{}_{}.frcmod'.format(a, 100-a), '{}-{}-{}-{}'.format('CT', 'CT', intcar, inthyd), lac(p['0_1'][0], p['C_C_C_H'][0], i), lac(p['0_1'][1], p['C_C_C_H'][1], i), lac(p['0_1'][2], p['C_C_C_H'][2], i), lac(p['0_1'][3], p['C_C_C_H'][3], i))
Frcmod_creator.DIHEDRAL_insert('{}_{}.frcmod'.format(a, 100-a), '{}-{}-{}-{}'.format('HC', 'CT', intcar, 'C '), lac(p['0_4'][0], p['X_C_C_X'][0], i), lac(p['0_4'][1], p['X_C_C_X'][1], i), lac(p['0_4'][2], p['X_C_C_X'][2], i), lac(p['0_4'][3], p['X_C_C_X'][3], i))
Frcmod_creator.DIHEDRAL_insert('{}_{}.frcmod'.format(a, 100-a), '{}-{}-{}-{}'.format('CT', 'CT', intcar, 'C '), lac(p['0_4'][0], p['X_C_C_X'][0], i), lac(p['0_4'][1], p['X_C_C_X'][1], i), lac(p['0_4'][2], p['X_C_C_X'][2], i), lac(p['0_4'][3], p['X_C_C_X'][3], i))
Frcmod_creator.DIHEDRAL_insert('{}_{}.frcmod'.format(a, 100-a), '{}-{}-{}-{}'.format('CT', intcar, 'C ', 'O '), lac(p['Ring_Dihe_2'][0], p['Ring_Dihe_2'][0], i), lac(p['Ring_Dihe_2'][1], p['Ring_Dihe_2'][1], i), lac(p['Ring_Dihe_2'][2], p['Ring_Dihe_2'][2], i), lac(p['Ring_Dihe_2'][3], p['Ring_Dihe_2'][3], i))
Frcmod_creator.DIHEDRAL_insert('{}_{}.frcmod'.format(a, 100-a), '{}-{}-{}-{}'.format('CT', intcar, 'C ', 'N '), lac(p['0_3'][0], p['C_C_C_N_1'][0], i), lac(p['0_3'][1], p['C_C_C_N_1'][1], i), lac(p['0_3'][2], p['C_C_C_N_1'][2], i), lac(p['0_3'][3], p['C_C_C_N_1'][3], i))
Frcmod_creator.DIHEDRAL_insert('{}_{}.frcmod'.format(a, 100-a), '{}-{}-{}-{}'.format('CT', intcar, 'C ', 'N '), lac(p['0_8'][0], p['C_C_C_N_2'][0], i), lac(p['0_8'][1], p['C_C_C_N_2'][1], i), lac(p['0_8'][2], p['C_C_C_N_2'][2], i), lac(p['0_8'][3], p['C_C_C_N_2'][3], i))
Frcmod_creator.DIHEDRAL_insert('{}_{}.frcmod'.format(a, 100-a), '{}-{}-{}-{}'.format('CT', intcar, 'C ', 'N '), lac(p['0_2'][0], p['C_C_C_N_3'][0], i), lac(p['0_2'][1], p['C_C_C_N_3'][1], i), lac(p['0_2'][2], p['C_C_C_N_3'][2], i), lac(p['0_2'][3], p['C_C_C_N_3'][3], i))
Frcmod_creator.DIHEDRAL_insert('{}_{}.frcmod'.format(a, 100-a), '{}-{}-{}-{}'.format('CT', intcar, 'C ', 'N '), lac(p['0_7'][0], p['C_C_C_N_4'][0], i), lac(p['0_7'][1], p['C_C_C_N_4'][1], i), lac(p['0_7'][2], p['C_C_C_N_4'][2], i), lac(p['0_7'][3], p['C_C_C_N_4'][3], i))
Frcmod_creator.DIHEDRAL_insert('{}_{}.frcmod'.format(a, 100-a), '{}-{}-{}-{}'.format(inthyd, intcar, 'C ', 'N '), lac(p['Ring_Dihe_2'][0], p['Ring_Dihe_2'][0], i), lac(p['Ring_Dihe_2'][1], p['Ring_Dihe_2'][1], i), lac(p['Ring_Dihe_2'][2], p['Ring_Dihe_2'][2], i), lac(p['Ring_Dihe_2'][3], p['Ring_Dihe_2'][3], i))
Frcmod_creator.NONBON_insert('{}_{}.frcmod'.format(a, 100-a), intcar, lac(p['0_C'][2], p['CT'][2], i), lac(p['0_C'][3], p['CT'][3], i))
Frcmod_creator.NONBON_insert('{}_{}.frcmod'.format(a, 100-a), inthyd, lac(p['0_H'][2], p['HC'][2], i), lac(p['0_H'][3], p['HC'][3], i))
| [
"pietro.ga.aronica@gmail.com"
] | pietro.ga.aronica@gmail.com |
9237c6352187b386e2dc6a1d16bfa2974937c6a3 | 927b9a4394626f8fcba83eb8944c17c5b61197e8 | /test.py | 6007dcea9364a9576b3ae4163dd2086e733ba025 | [] | no_license | Natsuyu/First | e9e66074f6883667852d191b480a1a15152fdecc | 346c4823d317db9538670e9ff55b9692423d701e | refs/heads/master | 2020-03-27T21:24:43.732674 | 2018-09-03T02:18:40 | 2018-09-03T02:18:40 | 147,141,239 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 2,971 | py | ###################################################
# Nicolo Savioli, 2017 -- Conv-GRU pytorch v 1.0 #
###################################################
import torch
from torch import nn
import torch.nn.functional as f
from torch.autograd import Variable
class ConvGRUCell(nn.Module):
def __init__(self, input_size, hidden_size, kernel_size, cuda_flag):
super(ConvGRUCell, self).__init__()
self.input_size = input_size
self.cuda_flag = cuda_flag
self.hidden_size = hidden_size
self.kernel_size = kernel_size
self.ConvGates = nn.Conv2d(self.input_size + self.hidden_size, 2 * self.hidden_size, 3,
padding=self.kernel_size // 2)
self.Conv_ct = nn.Conv2d(self.input_size + self.hidden_size, self.hidden_size, 3, padding=self.kernel_size // 2)
dtype = torch.FloatTensor
def forward(self, input, hidden):
if hidden is None:
size_h = [input.data.size()[0], self.hidden_size] + list(input.data.size()[2:])
if self.cuda_flag == True:
hidden = Variable(torch.zeros(size_h)).cuda()
else:
hidden = Variable(torch.zeros(size_h))
c1 = self.ConvGates(torch.cat((input, hidden), 1))
(rt, ut) = c1.chunk(2, 1)
reset_gate = f.sigmoid(rt)
update_gate = f.sigmoid(ut)
gated_hidden = torch.mul(reset_gate, hidden)
p1 = self.Conv_ct(torch.cat((input, gated_hidden), 1))
ct = f.tanh(p1)
next_h = torch.mul(update_gate, hidden) + (1 - update_gate) * ct
return next_h
def test(num_seqs, channels_img, \
size_image, max_epoch, model, cuda_test):
input_image = torch.rand(num_seqs, 1, channels_img, size_image, size_image)
target_image = torch.rand(num_seqs, 1, channels_img, size_image, size_image)
print('\n\n ==> Create Autograd Variables:')
input_gru = Variable(input_image)
target_gru = Variable(target_image)
if cuda_test == True:
input_gru = input_gru.cuda()
target_gru = target_gru.cuda()
print('\n\n ==> Create a MSE criterion:')
MSE_criterion = nn.MSELoss()
if cuda_test == True:
print("==> test on the GPU active")
MSE_criterion = MSE_criterion.cuda()
err = 0
for e in xrange(max_epoch):
for time in xrange(num_seqs):
h_next = model(input_gru[time], None)
err += MSE_criterion(h_next[0], target_gru[time])
print(err.data[0])
def main():
num_seqs = 10
hidden_size = 3
channels_img = 3
size_image = 256
max_epoch = 10
cuda_flag = False
kernel_size = 3
print('Init Conv GRUs model:')
model = ConvGRUCell(channels_img, hidden_size, kernel_size, cuda_flag)
if cuda_flag == True:
model = model.cuda()
print(repr(model))
test(num_seqs, channels_img, size_image, \
max_epoch, model, cuda_flag)
if __name__ == '__main__':
main()
| [
"15674984531@163.com"
] | 15674984531@163.com |
00a2b0a5e7dd0cf127743255beda0c97d6f09bc5 | 5c5a4a88f29c1e78a5ba2e41758a4ee59d9d91f2 | /main.py | ae70bbc33d87df2ad09fac8872fcd50df2c5b0ee | [] | no_license | prathamesh43/FFT | dd11057136ad45344849c4d97732d708e01ff907 | 464cd98d5cfa251995c7bc13b54b66b44a148174 | refs/heads/main | 2023-03-30T18:48:01.041259 | 2021-03-31T05:54:11 | 2021-03-31T05:54:11 | 352,614,480 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,992 | py | import smbus
import time
from time import sleep
import sys
bus = smbus.SMBus(1)
EARTH_GRAVITY_MS2 = 9.80665
SCALE_MULTIPLIER = 0.004
DATA_FORMAT = 0x31
BW_RATE = 0x2C
POWER_CTL = 0x2D
BW_RATE_1600HZ = 0x0F
BW_RATE_800HZ = 0x0E
BW_RATE_400HZ = 0x0D
BW_RATE_200HZ = 0x0C
BW_RATE_100HZ = 0x0B
BW_RATE_50HZ = 0x0A
BW_RATE_25HZ = 0x09
RANGE_2G = 0x00
RANGE_4G = 0x01
RANGE_8G = 0x02
RANGE_16G = 0x03
MEASURE = 0x08
AXES_DATA = 0x32
class ADXL345:
address = None
def __init__(self, address=0x53):
self.address = address
self.set_bandwidth_rate(BW_RATE_1600HZ)
self.set_range(RANGE_16G)
self.enable_measurement()
def enable_measurement(self):
bus.write_byte_data(self.address, POWER_CTL, MEASURE)
def set_bandwidth_rate(self, rate_flag):
bus.write_byte_data(self.address, BW_RATE, rate_flag)
def set_range(self, range_flag):
value = bus.read_byte_data(self.address, DATA_FORMAT)
value &= ~0x0F
value |= range_flag
value |= 0x08
bus.write_byte_data(self.address, DATA_FORMAT, value)
def get_axes(self, gforce=False):
bytes = bus.read_i2c_block_data(self.address, AXES_DATA, 6)
x = bytes[0] | (bytes[1] << 8)
if x & (1 << 16 - 1):
x = x - (1 << 16)
y = bytes[2] | (bytes[3] << 8)
if y & (1 << 16 - 1):
y = y - (1 << 16)
z = bytes[4] | (bytes[5] << 8)
if z & (1 << 16 - 1):
z = z - (1 << 16)
x = x * SCALE_MULTIPLIER
y = y * SCALE_MULTIPLIER
z = z * SCALE_MULTIPLIER
if gforce is False:
x = x * EARTH_GRAVITY_MS2
y = y * EARTH_GRAVITY_MS2
z = z * EARTH_GRAVITY_MS2
x = round(x, 4)
y = round(y, 4)
z = round(z, 4)
return {"x": x, "y": y, "z": z}
acc = ADXL345()
try:
while True:
print(acc.getAxes())
time.sleep(1)
except KeyboardInterrupt:
sys.exit()
| [
"noreply@github.com"
] | prathamesh43.noreply@github.com |
47cd266a23e4bba7b55e0fddd37c91c1827f8bbd | 78d9746ca970eae570de1660c2eb2aeea79bc77b | /Week2/2-fileExercises/index.py | 5e11bad2b61f4c9d211ecd98494a6192be91f13e | [] | no_license | egarcia410/digitalCraft | ca942c7ed7676c19c324ba99f4d4bbfed9767438 | e12199876818415072a20e6e506864f778e1d9a2 | refs/heads/master | 2021-05-16T05:46:12.486790 | 2017-11-08T17:03:54 | 2017-11-08T17:03:54 | 103,176,165 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,150 | py | """File I/O Exercies"""
# def exercise1():
# """Exercise 1: User inputs file name and read contents of file"""
# response = input('Filename: ')
# fname = response + '.txt'
# with open(fname, 'w') as f:
# f.write('This is the first line of the file.\n')
# f.write('Second line of the file\n')
# with open(fname, 'r') as f:
# for line in f:
# print(line, end='')
# exercise1()
# ############################################################################
# def exercise2():
# """Exercise 2: User inpute file and contents, reads contents of file"""
# response = input('Filename: ')
# fname = response + '.txt'
# with open(fname, 'w+') as f:
# f.write('This is the first line of the file.\n')
# f.write('Second line of the file\n')
# with open(fname, 'r') as f:
# for line in f:
# print(line, end='')
# exercise2
# ############################################################################
# def exercise3():
# """Exercise 3: Prints letter/word histogram of file contents"""
# wordDict = {}
# letterDict = {}
# response = input('Filename: ')
# fname = response + '.txt'
# with open(fname, 'w+') as f:
# f.write('This is the first line of the file.\n')
# f.write('Second line of the file\n')
# with open(fname, 'r') as f:
# for line in f:
# words = line.split(" ")
# for word in words:
# word = word.lower()
# if word in wordDict:
# wordDict[word] += 1
# else:
# wordDict[word] = 1
# for letter in line:
# letter = letter.lower()
# if letter in letterDict:
# letterDict[letter] += 1
# else:
# letterDict[letter] = 1
# print('Word Histogram: ', wordDict)
# print('Letter Histogram: ', letterDict)
# exercise3()
# ############################################################################
# import json
# import matplotlib.pyplot as plot
# def exercise4():
# """Exercise 4: Takes a JSON file name as input and plots the X,Y data"""
# data = { 'data': [
# [1, 1],
# [2, 2],
# [3, 3],
# [4, 4] ]
# }
# xCord = []
# yCord = []
# with open('data.json', 'w') as f:
# json.dump(data, f)
# with open('data.json', 'r') as f:
# data = json.load(f)
# for cord in data['data']:
# xCord.append(cord[0])
# yCord.append(cord[1])
# plot.plot(xCord, yCord, 'ro')
# plot.show()
# exercise4()
# ############################################################################
# import sys
# def crashTest():
# """Bonus Exercise: Write a program that writes to an in memory file until your program dies"""
# text = ""
# fh = open('hello.txt', 'w+')
# while True:
# text += 'a\n'
# fh.write(text)
# content = fh.read()
# print(sys.getsizeof(content))
# crashTest()
| [
"egarcia410@gmail.com"
] | egarcia410@gmail.com |
1b2c1db63a3bce1dfc0fe2ab7537df8b210b0ef0 | b3c83f15ae2a77d7af5262e3a9bba5a3efba48b6 | /hello.py | 0f64b95b29101a969ec284f0a838ba1468c958e6 | [] | no_license | ujjawalgupta29/Stack_Overflow_Search | 0c9c79625187839a839f3eef1406add6ff4b2798 | 6bca90a58da401ba95345886931e24792c48a19f | refs/heads/master | 2023-02-02T01:50:05.166647 | 2020-12-22T19:46:36 | 2020-12-22T19:46:36 | 323,717,675 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 27 | py | print("Hello")
print(3+'u') | [
"gujjawal29@gmail.com"
] | gujjawal29@gmail.com |
c4af510cd5d3874ffba5624683153abbd49b06d4 | 6b07203afd7128cc82aac98d5748feaedb198623 | /Project636/network.py | 33164888d8779152c8bf1aca2eb4a400da57f688 | [] | no_license | tiandi111/road-to-dl | 82199cf05bce32c2406aa59c0bc6548a34ba7189 | 2591c6bb4b304e3a71f62e85c9d0f1a6d7fc6f4c | refs/heads/master | 2023-01-20T03:19:29.834614 | 2020-11-21T23:57:26 | 2020-11-21T23:57:26 | 291,029,611 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,186 | py | import torch
import torch.nn as nn
import torch.nn.functional as F
class ResBlock(nn.Module):
"""
inC == outC
if downSampling is True, will insert a (1, 1) Conv2d layer at the head of the block
"""
def __init__(self, inC, bottleC, outC: int, downSampling=False):
super(ResBlock, self).__init__()
self.Block = nn.Sequential(
nn.BatchNorm2d(inC),
nn.ReLU(),
nn.Conv2d(inC, bottleC, (1, 1), 2 if downSampling else 1, 0),
nn.BatchNorm2d(bottleC),
nn.ReLU(),
nn.Conv2d(bottleC, bottleC, (3, 3), 1, 1),
nn.BatchNorm2d(bottleC),
nn.ReLU(),
nn.Conv2d(bottleC, outC, (1, 1), 1, 0),
)
if inC != outC:
self.Shortcut = nn.Conv2d(inC, outC, (1, 1), 2 if downSampling else 1, 0)
else:
self.Shortcut = None
def forward(self, X: torch.Tensor):
if self.Shortcut is not None:
return self.Block(X) + self.Shortcut(X)
return self.Block(X) + X
class ResNet(nn.Module):
"""
Network Architecture:
(3, 3) conv, 64, /2
(64, 64, 64)
"""
def __init__(self, firstNumFilter: int, stackSize = (2, 2, 2, 2)):
super(ResNet, self).__init__()
self.InConv = nn.Conv2d(3, firstNumFilter, (3, 3), 1, 1)
self.ResPart = nn.Sequential()
self.lastOutC = firstNumFilter
for i in range(len(stackSize)):
outC = 4 * firstNumFilter * (2**i)
bottC = int(outC / 4)
for j in range(stackSize[i]):
self.ResPart.add_module("stack{i}_{j}".format(i=i, j=j),
ResBlock(self.lastOutC, bottC, outC, downSampling=(i > 0 and j == 0)))
self.lastOutC = outC
self.FC = nn.Linear(self.lastOutC, 10)
"""
Args:
x: torch.Tensor of shape [batchSize, Channel, Height, Weight]
"""
def forward(self, x: torch.Tensor):
x = self.InConv(x)
x = self.ResPart(x)
x = F.adaptive_avg_pool2d(x, (1, 1))
x = self.FC(x.view(int(x.size()[0]), int(x.size()[1])))
return x
| [
"tiandi03@kuaishou.com"
] | tiandi03@kuaishou.com |
90671ee6e4387190edadaad51537ca557f763924 | 9ead5fcc5efaf7a73c4c585d813c1cddcb89666d | /m5/src/mem/slicc/ast/FormalParamAST.py | c64731196d5ef5004fa09540cf47bf11b12ed267 | [
"BSD-3-Clause"
] | permissive | x10an14/tdt4260Group | b539b6271c8f01f80a9f75249779fb277fa521a4 | 1c4dc24acac3fe6df749e0f41f4d7ab69f443514 | refs/heads/master | 2016-09-06T02:48:04.929661 | 2014-04-08T10:40:22 | 2014-04-08T10:40:22 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,396 | py | # Copyright (c) 1999-2008 Mark D. Hill and David A. Wood
# Copyright (c) 2009 The Hewlett-Packard Development Company
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from slicc.ast.AST import AST
from slicc.symbols import Var
class FormalParamAST(AST):
def __init__(self, slicc, type_ast, ident, default = None, pointer = False):
super(FormalParamAST, self).__init__(slicc)
self.type_ast = type_ast
self.ident = ident
self.default = default
self.pointer = pointer
def __repr__(self):
return "[FormalParamAST: %s]" % self.ident
@property
def name(self):
return self.ident
def generate(self):
type = self.type_ast.type
param = "param_%s" % self.ident
# Add to symbol table
v = Var(self.symtab, self.ident, self.location, type, param,
self.pairs)
self.symtab.newSymbol(v)
return type, "%s %s" % (type.c_ident, param)
| [
"chrischa@stud.ntnu.no"
] | chrischa@stud.ntnu.no |
062c3c2ce797601c2db2451622590172ca277679 | 526dde1a4ec06adc2ab97833871934f6bce626fc | /simplesocial/groups/urls.py | 1290aaa7f2d4d2cb787b4000190a9c73d45edaab | [] | no_license | CristianCondruz/SocialSiteClone | 99dcfc6a6c7e85220cdd893ff3cfdc7537b07d28 | 886ac792c5112c1ffb928d5a52bbf88dd77064e3 | refs/heads/master | 2020-03-30T17:02:41.961616 | 2018-10-05T14:45:43 | 2018-10-05T14:45:43 | 151,439,276 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 453 | py | from django.urls import path
from groups import views
app_name = 'groups'
urlpatterns = [
path('',views.ListGroups.as_view(),name='all'),
path('new/',views.CreateGroup.as_view(),name='create'),
re_path('posts/in/(?P<slug>[-\w]+)/$',views.SingleGroup.as_view(),name='single'),
re_path('join/(?P<slug>[-\w]+)/$',views.JoinGroup.as_view(),name='join'),
re_path('leave/(?P<slug>[-\w]+)/$',views.LeaveGroup.as_view(),name='leave'),
]
| [
"cruz@enea.se"
] | cruz@enea.se |
76c6d426ea19c82ba2d57cfb8810ec4fedfbf1d8 | f03f7f4cad663f4687b8b87ea9a001cd7a0c6b31 | /rule_engine/asgi.py | 626b087bf951a5d79ee0f8275ef1dc902482b7ec | [] | no_license | amarbabuk/rule-engine | 79f05a2338539a8791aaea3a0432e4b8a1a7d1d3 | 9b7a504501d2db02178e4bbeac0409dfd0ba4833 | refs/heads/master | 2023-05-03T20:40:01.259232 | 2021-05-15T21:24:18 | 2021-05-15T21:24:18 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 399 | py | """
ASGI config for rule_engine project.
It exposes the ASGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.0/howto/deployment/asgi/
"""
import os
from django.core.asgi import get_asgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'rule_engine.settings')
application = get_asgi_application()
| [
"kohlivishrut@gmail.com"
] | kohlivishrut@gmail.com |
c0a4a20a1dec5c8a8f05d500bd1ef929d52fcfbe | 3f25a47dfaa9fb46ba1af7b4b5390c84fb6c2f24 | /CollatzSequence .py | b1d1b4c560bb9c8a16397688e33d3bec8464c198 | [] | no_license | folger/ATBSWP | 937590c52efc7fd385dc00bd33876807530a30b0 | 650300839caf26f425ab41308fc184f7d9171499 | refs/heads/master | 2021-01-05T01:38:55.270341 | 2020-02-16T04:46:36 | 2020-02-16T04:46:36 | 240,832,601 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 88 | py | while (1):
try:
n = int(input())
print(n)
except:
break
| [
"folger@originlab.com.cn"
] | folger@originlab.com.cn |
e7a02d735c47a7acf5c428993b534c9e81cc7de3 | 9bbe3bd5cb5c0373f4b7f7129f641194a2f83551 | /Chapter10-Object Oriented Programing/04_employee.py | d3cd124567d5a1b1a2acc563e53738b669b99af9 | [] | no_license | ParitoshBarman/Python-Practice | bbcb7dd0b35fc0ebd4854e7c502f0d9e56ec242e | 4d094148c876c45be5d4051860ae492b3de5ef18 | refs/heads/master | 2023-03-05T08:47:20.564854 | 2021-02-21T11:03:12 | 2021-02-21T11:03:12 | 340,874,365 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 289 | py | class Employee:
company = "Google"
sallary = 100
pari = Employee()
rajni = Employee()
pari.sallary = 300
rajni.sallary = 400
print(pari.company)
print(rajni.company)
Employee.company = "YouTube"
print(pari.company)
print(rajni.company)
print(pari.sallary)
print(rajni.sallary) | [
"barmanpari163@gmail.com"
] | barmanpari163@gmail.com |
669563710a76da0b0965af59920ba5fa960381db | a1f009fbc7700cd17fffcd97518bda1593064e33 | /source_code/python/python_advanced/strings/bytes.py | b6ee415eaff9935b7df255dd1b656f9772eacbb5 | [] | no_license | Alrin12/ComputerScienceSchool | 2db06f9d198f67ad587535b3cab0dabd8a4b8e5c | 7543ae686394fc573f80bf680ae4371a2871dede | refs/heads/master | 2021-01-23T15:04:22.672139 | 2017-07-17T15:32:31 | 2017-07-17T15:32:31 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 129 | py | b = b"abcde"
#print(b)
#print(b.upper())
#print(b.startswith(b"ab"))
#bytes -> string
string = b.decode('UTF-8')
print(string)
| [
"ythwork83@gmail.com"
] | ythwork83@gmail.com |
958bdbd617eda01eb001718f9cb42ed2b5e25f47 | 444a9480bce2035565332d4d4654244c0b5cd47b | /research/cv/REDNet30/train.py | 28c5e009e67749e074c17f9960038bf0f668f8c1 | [
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference",
"LicenseRef-scancode-proprietary-license"
] | permissive | mindspore-ai/models | 7ede9c6454e77e995e674628204e1c6e76bd7b27 | eab643f51336dbf7d711f02d27e6516e5affee59 | refs/heads/master | 2023-07-20T01:49:34.614616 | 2023-07-17T11:43:18 | 2023-07-17T11:43:18 | 417,393,380 | 301 | 92 | Apache-2.0 | 2023-05-17T11:22:28 | 2021-10-15T06:38:37 | Python | UTF-8 | Python | false | false | 4,602 | py | # Copyright 2022 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""train rednet30."""
import argparse
import os
import time
import mindspore.nn as nn
from mindspore import context
from mindspore import dataset as ds
from mindspore.context import ParallelMode
from mindspore.communication.management import init, get_rank
from mindspore.train.callback import ModelCheckpoint, CheckpointConfig, LossMonitor, TimeMonitor
from mindspore.common import set_seed
from mindspore.train.model import Model
from mindspore.train.loss_scale_manager import DynamicLossScaleManager
from src.dataset import Dataset
from src.model import REDNet30
def train_net(opt):
"""train"""
device_id = int(os.getenv('DEVICE_ID', '0'))
rank_id = int(os.getenv('RANK_ID', '0'))
device_num = int(os.getenv('DEVICE_NUM', '1'))
if opt.platform == "GPU":
context.set_context(mode=context.GRAPH_MODE, save_graphs=False, device_target="GPU")
else:
context.set_context(mode=context.GRAPH_MODE, save_graphs=False,
device_target="Ascend", device_id=device_id)
# if distribute:
if opt.is_distributed:
init()
rank_id = get_rank()
context.set_auto_parallel_context(parallel_mode=ParallelMode.DATA_PARALLEL,
device_num=device_num, gradients_mean=True)
# dataset
print("============== Loading Data ==============")
train_dataset = Dataset(opt.dataset_path, opt.patch_size)
train_de_dataset = ds.GeneratorDataset(train_dataset, ["input", "label"], num_shards=device_num,
shard_id=rank_id, shuffle=True)
train_de_dataset = train_de_dataset.batch(opt.batch_size, drop_remainder=True)
step_size = train_de_dataset.get_dataset_size()
print("============== Loading Model ==============")
model = REDNet30()
optimizer = nn.Adam(model.trainable_params(), learning_rate=opt.lr)
loss = nn.MSELoss()
loss_scale_manager = DynamicLossScaleManager(init_loss_scale=opt.init_loss_scale, scale_window=1000)
model = Model(model, loss_fn=loss, optimizer=optimizer, loss_scale_manager=loss_scale_manager, amp_level="O3")
time_cb = TimeMonitor(data_size=step_size)
loss_cb = LossMonitor()
cb = [time_cb, loss_cb]
config_ck = CheckpointConfig(keep_checkpoint_max=opt.ckpt_save_max)
ckpt_cb = ModelCheckpoint(prefix='RedNet30_{}'.format(rank_id),
directory=os.path.join("ckpt", 'ckpt_' + str(rank_id) + '/'), config=config_ck)
cb += [ckpt_cb]
print("============== Starting Training ==============")
model.train(opt.num_epochs, train_de_dataset, callbacks=cb, dataset_sink_mode=True)
print("================== Finished ==================")
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument('--dataset_path', type=str, default='./data/BSD300', help='training image path')
parser.add_argument('--platform', type=str, default='GPU', choices=('Ascend', 'GPU'), help='run platform')
parser.add_argument('--is_distributed', type=bool, default=False, help='distributed training')
parser.add_argument('--patch_size', type=int, default=50, help='training patch size')
parser.add_argument('--batch_size', type=int, default=16, help='training batch size')
parser.add_argument('--num_epochs', type=int, default=1000, help='epoch number')
parser.add_argument('--lr', type=float, default=1e-4, help='learning rate')
parser.add_argument('--seed', type=int, default=1, help='random seed')
parser.add_argument('--ckpt_save_max', type=int, default=5, help='maximum number of checkpoint files can be saved')
parser.add_argument('--init_loss_scale', type=float, default=65536., help='initialize loss scale')
option = parser.parse_args()
set_seed(option.seed)
time_start = time.time()
train_net(option)
time_end = time.time()
print('train time: %f' % (time_end - time_start))
| [
"17863107261@163.com"
] | 17863107261@163.com |
b8b4375854b3f06c943f5c05c9c5dad65c548f2b | cb74e628b4a1a10952c42d16563957ee0b77da14 | /webapps/wsgi.py | 00baffa2e3cc71e5ac1bc4816e538030e93b25bd | [] | no_license | aysfzaicmu/aukshop | 972ef78886a0b954c203a42c3ede1ac667855540 | 81a9852426642c4e4066aed7d901cdee1bea8e56 | refs/heads/master | 2021-01-12T12:16:27.534308 | 2016-10-31T05:37:16 | 2016-10-31T05:37:16 | 72,404,837 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 517 | py | """
WSGI config for webapps project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.7/howto/deployment/wsgi/
"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "webapps.settings")
# from django.core.wsgi import get_wsgi_application
# application = get_wsgi_application()
from django.core.wsgi import get_wsgi_application
from dj_static import Cling
application = Cling(get_wsgi_application())
| [
"aliyousafzai@Alis-MacBook-Pro.local"
] | aliyousafzai@Alis-MacBook-Pro.local |
489e85928fed2d4d5c809ef233de0472f3a737d9 | c30c762feea6c5413ba4f8d483fdc984f09b5b51 | /celeba/perf_tests.py | 6d21eb3fb303b975dadd21b1695186ee5a5d08fb | [] | no_license | avudzor/FormEstDistRisks | 3d451054ef0fb75a183011023f5ff4638d24975d | ac4c4a1e5673e34b4633f0c46187c772479ecb51 | refs/heads/main | 2023-07-28T18:21:03.177498 | 2021-08-27T15:20:45 | 2021-08-27T15:20:45 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,901 | py | from model_utils import get_model, BASE_MODELS_DIR
from data_utils import CelebaWrapper, SUPPORTED_PROPERTIES
import torch.nn as nn
import numpy as np
import utils
from tqdm import tqdm
import os
import matplotlib.pyplot as plt
import matplotlib as mpl
mpl.rcParams['figure.dpi'] = 200
def get_models(folder_path, n_models=1000):
paths = np.random.permutation(os.listdir(folder_path))[:n_models]
models = []
for mpath in tqdm(paths):
model = get_model(os.path.join(folder_path, mpath))
models.append(model)
return models
def get_accs(val_loader, models):
accs = []
criterion = nn.BCEWithLogitsLoss().cuda()
for model in tqdm(models):
model = model.cuda()
vloss, vacc = utils.validate_epoch(
val_loader, model, criterion, verbose=False)
accs.append(vacc)
# accs.append(vloss)
return np.array(accs)
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('--batch_size', type=int, default=512)
parser.add_argument('--filter', help='alter ratio for this attribute',
required=True, choices=SUPPORTED_PROPERTIES)
parser.add_argument('--task', default="Smiling",
choices=SUPPORTED_PROPERTIES,
help='task to focus on')
parser.add_argument('--ratio_1', help="ratio for D_1", default="0.5")
parser.add_argument('--ratio_2', help="ratio for D_2")
parser.add_argument('--total_models', type=int, default=100)
args = parser.parse_args()
utils.flash_utils(args)
# Get data with ratio
print("Preparing data")
ds_1 = CelebaWrapper(args.filter, float(
args.ratio_1), "adv", cwise_samples=(int(1e6), int(1e6)),
classify=args.task)
ds_2 = CelebaWrapper(args.filter, float(
args.ratio_2), "adv", cwise_samples=(int(1e6), int(1e6)),
classify=args.task)
# Get loaders
loaders = [
ds_1.get_loaders(args.batch_size, shuffle=False)[1],
ds_2.get_loaders(args.batch_size, shuffle=False)[1]
]
# Load victim models
print("Loading models")
models_victim_1 = get_models(os.path.join(
BASE_MODELS_DIR, "victim", args.filter, args.ratio_1))
models_victim_2 = get_models(os.path.join(
BASE_MODELS_DIR, "victim", args.filter, args.ratio_2))
# Load adv models
total_models = args.total_models
models_1 = get_models(os.path.join(
BASE_MODELS_DIR, "adv", args.filter, args.ratio_1), total_models // 2)
models_2 = get_models(os.path.join(
BASE_MODELS_DIR, "adv", args.filter, args.ratio_2), total_models // 2)
allaccs_1, allaccs_2 = [], []
vic_accs, adv_accs = [], []
for loader in loaders:
accs_1 = get_accs(loader, models_1)
accs_2 = get_accs(loader, models_2)
# Look at [0, 100]
accs_1 *= 100
accs_2 *= 100
print("Number of samples: %d" % total_models)
tracc, threshold, rule = utils.find_threshold_acc(accs_1, accs_2)
print("[Adversary] Threshold based accuracy: %.2f at threshold %.2f" %
(100 * tracc, threshold))
adv_accs.append(tracc)
# Compute accuracies on this data for victim
accs_victim_1 = get_accs(loader, models_victim_1)
accs_victim_2 = get_accs(loader, models_victim_2)
# Look at [0, 100]
accs_victim_1 *= 100
accs_victim_2 *= 100
# Threshold based on adv models
combined = np.concatenate((accs_victim_1, accs_victim_2))
classes = np.concatenate(
(np.zeros_like(accs_victim_1), np.ones_like(accs_victim_2)))
specific_acc = utils.get_threshold_acc(
combined, classes, threshold, rule)
print("[Victim] Accuracy at specified threshold: %.2f" %
(100 * specific_acc))
vic_accs.append(specific_acc)
# Collect all accuracies for basic baseline
allaccs_1.append(accs_victim_1)
allaccs_2.append(accs_victim_2)
adv_accs = np.array(adv_accs)
vic_accs = np.array(vic_accs)
# Basic baseline: look at model performance on test sets from both G_b
# Predict b for whichever b it is higher
allaccs_1 = np.array(allaccs_1).T
allaccs_2 = np.array(allaccs_2).T
preds_1 = (allaccs_1[:, 0] > allaccs_1[:, 1])
preds_2 = (allaccs_2[:, 0] < allaccs_2[:, 1])
basic_baseline_acc = (np.mean(preds_1) + np.mean(preds_2)) / 2
print("Loss-test accuracy: %.3f" % (100 * basic_baseline_acc))
# Threshold baseline: look at model performance on test sets from both G_b
# and pick the better one
print("Threshold-test baseline accuracy: %.3f" %
(100 * vic_accs[np.argmax(adv_accs)]))
plt.plot(np.arange(len(accs_1)), np.sort(accs_1))
plt.plot(np.arange(len(accs_2)), np.sort(accs_2))
plt.savefig("./quick_see.png")
| [
"anshuman@email.virginia.edu"
] | anshuman@email.virginia.edu |
143e5ef8c7e9e44d20e390e8e765dd78b23e84f6 | 92227347d4a6778bce1abc198a07835d0ce2c628 | /minio/lifecycleconfig.py | 904a0a0caba62db1ffa68fdbdc9c851ebe333238 | [
"Apache-2.0"
] | permissive | boiler/minio-py | 4a39df16c0429d97d2db3c7ec66f333d2c25c7bc | cfd711d0594b5885d5cdd6da94527a69b3d5b646 | refs/heads/master | 2023-03-26T21:45:01.565707 | 2021-03-20T10:05:19 | 2021-03-20T10:05:19 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 12,205 | py | # -*- coding: utf-8 -*-
# MinIO Python Library for Amazon S3 Compatible Cloud Storage, (C)
# 2015, 2016, 2017, 2018, 2019 MinIO, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Request/response of PutBucketLifecycleConfiguration and
GetBucketLifecycleConfiguration APIs.
"""
# pylint: disable=invalid-name
from __future__ import absolute_import
from abc import ABCMeta
from .commonconfig import BaseRule, check_status
from .time import from_iso8601utc, to_iso8601utc
from .xml import Element, SubElement, find, findall, findtext
class DateDays:
"""Base class holds date and days of Transition and Expiration."""
__metaclass__ = ABCMeta
def __init__(self, date=None, days=None):
self._date = date
self._days = days
@property
def date(self):
"""Get date."""
return self._date
@property
def days(self):
"""Get days."""
return self._days
@staticmethod
def parsexml(element):
"""Parse XML to date and days."""
date = from_iso8601utc(findtext(element, "Date"))
days = findtext(element, "Days")
if days is not None:
days = int(days)
return date, days
def toxml(self, element):
"""Convert to XML."""
if self._date is not None:
SubElement(
element, "Date", to_iso8601utc(self._date),
)
if self._days:
SubElement(element, "Days", str(self._days))
return element
class Transition(DateDays):
"""Transition."""
def __init__(self, date=None, days=None, storage_class=None):
super().__init__(date, days)
self._storage_class = storage_class
@property
def storage_class(self):
"""Get storage class."""
return self._storage_class
@classmethod
def fromxml(cls, element):
"""Create new object with values from XML element."""
element = find(element, "Transition")
date, days = cls.parsexml(element)
return cls(date, days, findtext(element, "StorageClass"))
def toxml(self, element):
"""Convert to XML."""
element = SubElement(element, "NoncurrentVersionTransition")
super().toxml(element)
if self._storage_class:
SubElement(element, "StorageClass", self._storage_class)
return element
class NoncurrentVersionTransition:
"""Noncurrent version transition."""
def __init__(self, noncurrent_days=None, storage_class=None):
self._noncurrent_days = noncurrent_days
self._storage_class = storage_class
@property
def noncurrent_days(self):
"""Get Noncurrent days."""
return self._noncurrent_days
@property
def storage_class(self):
"""Get storage class."""
return self._storage_class
@classmethod
def fromxml(cls, element):
"""Create new object with values from XML element."""
element = find(element, "NoncurrentVersionTransition")
noncurrent_days = findtext(element, "NoncurrentDays")
if noncurrent_days is not None:
noncurrent_days = int(noncurrent_days)
return cls(noncurrent_days, findtext(element, "StorageClass"))
def toxml(self, element):
"""Convert to XML."""
element = SubElement(element, "NoncurrentVersionTransition")
if self._noncurrent_days:
SubElement(element, "NoncurrentDays", str(self._noncurrent_days))
if self._storage_class:
SubElement(element, "StorageClass", self._storage_class)
return element
class NoncurrentVersionExpiration:
"""Noncurrent version expiration."""
def __init__(self, noncurrent_days=None):
self._noncurrent_days = noncurrent_days
@property
def noncurrent_days(self):
"""Get Noncurrent days."""
return self._noncurrent_days
@classmethod
def fromxml(cls, element):
"""Create new object with values from XML element."""
element = find(element, "NoncurrentVersionExpiration")
noncurrent_days = findtext(element, "NoncurrentDays")
if noncurrent_days is not None:
noncurrent_days = int(noncurrent_days)
return cls(noncurrent_days)
def toxml(self, element):
"""Convert to XML."""
element = SubElement(element, "NoncurrentVersionExpiration")
if self._noncurrent_days:
SubElement(element, "NoncurrentDays", str(self._noncurrent_days))
return element
class Expiration(DateDays):
"""Expiration."""
def __init__(self, date=None, days=None,
expired_object_delete_marker=None):
super().__init__(date, days)
self._expired_object_delete_marker = expired_object_delete_marker
@property
def expired_object_delete_marker(self):
"""Get expired object delete marker."""
return self._expired_object_delete_marker
@classmethod
def fromxml(cls, element):
"""Create new object with values from XML element."""
element = find(element, "Expiration")
date, days = cls.parsexml(element)
expired_object_delete_marker = findtext(
element, "ExpiredObjectDeleteMarker",
)
if expired_object_delete_marker is not None:
if expired_object_delete_marker.title() not in ["False", "True"]:
raise ValueError(
"value of ExpiredObjectDeleteMarker must be "
"'True' or 'False'",
)
expired_object_delete_marker = (
expired_object_delete_marker.title() == "True"
)
return cls(date, days, expired_object_delete_marker)
def toxml(self, element):
"""Convert to XML."""
element = SubElement(element, "Expiration")
super().toxml(element)
if self._expired_object_delete_marker is not None:
SubElement(
element,
"ExpiredObjectDeleteMarker",
str(self._expired_object_delete_marker),
)
return element
class AbortIncompleteMultipartUpload:
"""Abort incomplete multipart upload."""
def __init__(self, days_after_initiation=None):
self._days_after_initiation = days_after_initiation
@property
def days_after_initiation(self):
"""Get days after initiation."""
return self._days_after_initiation
@classmethod
def fromxml(cls, element):
"""Create new object with values from XML element."""
element = find(element, "AbortIncompleteMultipartUpload")
days_after_initiation = findtext(element, "DaysAfterInitiation")
if days_after_initiation is not None:
days_after_initiation = int(days_after_initiation)
return cls(days_after_initiation)
def toxml(self, element):
"""Convert to XML."""
element = SubElement(element, "AbortIncompleteMultipartUpload")
if self._days_after_initiation:
SubElement(
element,
"DaysAfterInitiation",
str(self._days_after_initiation),
)
return element
class Rule(BaseRule):
"""Lifecycle rule. """
def __init__(self, status, abort_incomplete_multipart_upload=None,
expiration=None, rule_filter=None, rule_id=None,
noncurrent_version_expiration=None,
noncurrent_version_transition=None,
transition=None):
check_status(status)
super().__init__(rule_filter, rule_id)
self._status = status
self._abort_incomplete_multipart_upload = (
abort_incomplete_multipart_upload
)
self._expiration = expiration
self._noncurrent_version_expiration = noncurrent_version_expiration
self._noncurrent_version_transition = noncurrent_version_transition
self._transition = transition
@property
def status(self):
"""Get status."""
return self._status
@property
def abort_incomplete_multipart_upload(self):
"""Get abort incomplete multipart upload."""
return self._abort_incomplete_multipart_upload
@property
def expiration(self):
"""Get expiration."""
return self._expiration
@property
def noncurrent_version_expiration(self):
"""Get noncurrent version expiration."""
return self._noncurrent_version_expiration
@property
def noncurrent_version_transition(self):
"""Get noncurrent version transition."""
return self._noncurrent_version_transition
@property
def transition(self):
"""Get transition."""
return self._transition
@classmethod
def fromxml(cls, element):
"""Create new object with values from XML element."""
status = findtext(element, "Status", True)
abort_incomplete_multipart_upload = (
None if find(element, "AbortIncompleteMultipartUpload") is None
else AbortIncompleteMultipartUpload.fromxml(element)
)
expiration = (
None if find(element, "Expiration") is None
else Expiration.fromxml(element)
)
rule_filter, rule_id = cls.parsexml(element)
noncurrent_version_expiration = (
None if find(element, "NoncurrentVersionExpiration") is None
else NoncurrentVersionExpiration.fromxml(element)
)
noncurrent_version_transition = (
None if find(element, "NoncurrentVersionTransition") is None
else NoncurrentVersionTransition.fromxml(element)
)
transition = (
None if find(element, "Transition") is None
else Transition.fromxml(element)
)
return cls(
status,
abort_incomplete_multipart_upload=(
abort_incomplete_multipart_upload
),
expiration=expiration,
rule_filter=rule_filter,
rule_id=rule_id,
noncurrent_version_expiration=noncurrent_version_expiration,
noncurrent_version_transition=noncurrent_version_transition,
transition=transition,
)
def toxml(self, element):
"""Convert to XML."""
element = SubElement(element, "Rule")
SubElement(element, "Status", self._status)
if self._abort_incomplete_multipart_upload:
self._abort_incomplete_multipart_upload.toxml(element)
if self._expiration:
self._expiration.toxml(element)
super().toxml(element)
if self._noncurrent_version_expiration:
self._noncurrent_version_expiration.toxml(element)
if self._noncurrent_version_transition:
self._noncurrent_version_transition.toxml(element)
if self._transition:
self._transition.toxml(element)
return element
class LifecycleConfig:
"""Lifecycle configuration."""
def __init__(self, rules):
if not rules:
raise ValueError("rules must be provided")
self._rules = rules
@property
def rules(self):
"""Get rules."""
return self._rules
@classmethod
def fromxml(cls, element):
"""Create new object with values from XML element."""
elements = findall(element, "Rule")
rules = []
for tag in elements:
rules.append(Rule.fromxml(tag))
return cls(rules)
def toxml(self, element):
"""Convert to XML."""
element = Element("LifecycleConfiguration")
for rule in self._rules:
rule.toxml(element)
return element
| [
"noreply@github.com"
] | boiler.noreply@github.com |
9618de4743eed95e145f302cc80a2547716ee168 | 55e2d07829ce6ff451c068606f6ca5934dcad0f9 | /django/env_var/comment/app_comments/apps.py | 432b62880fb02c76931ecc7f4cf217a5cc8facc1 | [] | no_license | tulbadex/zuriPythonClassAssignments | ec3fd55ea24d18604cc041bd0ad083acf23f6e47 | affedcd204e1fd67633747a0b7fbebaf2ad08718 | refs/heads/main | 2023-04-16T01:44:44.967433 | 2021-04-27T05:44:38 | 2021-04-27T05:44:38 | 354,865,365 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 155 | py | from django.apps import AppConfig
class AppCommentsConfig(AppConfig):
default_auto_field = 'django.db.models.BigAutoField'
name = 'app_comments'
| [
"ibrahimadedayo@rocketmail.com"
] | ibrahimadedayo@rocketmail.com |
500a9013cb9cfe027410a13198b612b9d6ab8fb4 | facfc296400ab5f10152c2f7014902be20c2eed6 | /Amazon/spiders/amazon.py | 1cee8b0ba7bd2ebaac5379b6f3c9535d6f83344c | [] | no_license | ayushikorde111/AmazonScraper | 2c7455343d468f008b3765595589b5196f3d3e06 | 3570ed8d38457b322f5d773c88cc4664aabe2cba | refs/heads/master | 2020-11-25T09:40:20.559380 | 2019-12-16T17:55:12 | 2019-12-16T17:55:12 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,753 | py | # -*- coding: utf-8 -*-
import scrapy
from ..items import AmazonItem
from ..userAgents import Get_Headers
import pymongo
import re
import time
class AmazonSpider(scrapy.Spider):
name = 'amazon'
allowed_domains = ['amazon.in']
items = AmazonItem()
headers = {'User-Agent': Get_Headers()}
start_urls = ['https://www.amazon.in/Books/b?ie=UTF8&node=976389031&ref_=sd_allcat_sbc_books_all']
def __init__(self):
self.conn = pymongo.MongoClient(
'localhost',27017
)
db = self.conn['Amazon']
self.collection = db['books']
def parse(self, response):
Categories = response.xpath('.//div[@id="leftNav"]//span/a/@href').extract()
for category in Categories:
if category[:3] == '/gp':
pass
else:
link = response.urljoin(category)
yield scrapy.Request(url=link, callback= self.get_ASIN)
def get_Details_of_Primary_Page(self,asin,response):
base_query = './/li[@data-asin=' + f'"{str(asin)}"' + ']'
title = response.xpath(base_query + '//h2[@class = "a-size-medium s-inline s-access-title a-text-normal"]/text()').extract_first()
link = response.xpath(base_query +
'//a[@class="a-link-normal s-access-detail-page s-color-twister-title-link a-text-normal"]/@href').extract_first()
if link[:5] != 'https':
link = response.urljoin(link)
img = response.xpath(base_query +
'//div[@class="a-column a-span12 a-text-center"]//img/@src').extract_first()
paperback = response.xpath(base_query +
'//div[@class="a-column a-span7"]//div/a[@title = "Paperback"]/../following-sibling::div[1]'
'//span[@class="a-size-base a-color-price s-price a-text-bold" or @class="a-size-base a-color-price a-text-bold"]/text()').extract_first()
kindle_edition = response.xpath(base_query +
'//div[@class="a-column a-span7"]//div/a[@title = "Kindle Edition"]/../following-sibling::div[1]'
'//span[@class="a-size-base a-color-price s-price a-text-bold" or @class="a-size-base a-color-price a-text-bold"]/text()').extract_first()
hardcover = response.xpath(base_query +
'//div[@class="a-column a-span7"]//div/a[@title = "Hardcover"]/../following-sibling::div[1]'
'//span[@class="a-size-base a-color-price s-price a-text-bold" or @class="a-size-base a-color-price a-text-bold"]/text()').extract_first()
url = re.findall(r'https://.*/.*/[0-9]*',link)[0]
if url == 'https://www.amazon.in/gp/slredirect/picassoRedirect.html/':
pass
else:
if self.collection.find({'url': url}):
for obj in self.collection.find({'url': url}):
if paperback != obj['paperback'][-1]['Price']:
self.collection.update({'url': url},
{'$push': {'paperback': {"Price": paperback, "Time": time.asctime()}}})
if kindle_edition != obj['kindle_edition'][-1]["Price"]:
self.collection.update({'url': url},
{'$push': {
'kindle_edition': {"Price": kindle_edition, "Time": time.asctime()}}})
if hardcover != obj['hardcover'][-1]["Price"]:
self.collection.update({'url': url},
{'$push': {'hardcover': {"Price": hardcover, "Time": time.asctime()}}})
else:
self.collection.insert_one({'url': url, 'title': title, 'img': img,'paperback': [{"Price": paperback, "Time": time.asctime()}],
'kindle_edition': [{"Price": kindle_edition, "Time": time.asctime()}],
'hardcover': [{"Price": hardcover, "Time": time.asctime()}]})
def get_Details_of_Secondary_Page(self,asin,response):
base_query = './/div[@data-asin=' + f'"{str(asin)}"' + ']'
title = response.xpath(base_query +
'//span[@class="a-size-medium a-color-base a-text-normal"]/text()').extract_first()
link = response.xpath(base_query +
'//h2[@class="a-size-mini a-spacing-none a-color-base s-line-clamp-2"]/a/@href').extract_first()
if link[:5] != 'https':
link = response.urljoin(link)
img = response.xpath(base_query +
'//div[@class="a-section aok-relative s-image-fixed-height"]/img/@src').extract_first()
paperback = response.xpath(base_query +
'//div[@class="a-row a-size-base a-color-base"]/a[contains(text(),"Paperback")]'
'/../following-sibling::div[1]//span[@class="a-price-whole" or @class="a-color-price"]/text()').extract_first()
kindle_edition = response.xpath(base_query +
'//div[@class="a-row a-size-base a-color-base"]/a[contains(text(),"Kindle Edition")]'
'/../following-sibling::div[1]//span[@class="a-price-whole" or @class="a-color-price"]/text()').extract_first()
hardcover = response.xpath(base_query +
'//div[@class="a-row a-size-base a-color-base"]/a[contains(text(),"Hardcover")]'
'/../following-sibling::div[1]//span[@class="a-price-whole" or @class="a-color-price"]/text()').extract_first()
url = re.findall(r'https://.*/.*/[0-9]*', link)[0]
if url =='https://www.amazon.in/gp/slredirect/picassoRedirect.html/':
pass
else:
if self.collection.find({'url': url}):
for obj in self.collection.find({'url': url}):
print(obj)
if paperback != obj['paperback'][-1]['Price']:
self.collection.update({'url': url},
{'$push': {'paperback': {"Price": paperback, "Time": time.asctime()}}})
if kindle_edition != obj['kindle_edition'][-1]["Price"]:
self.collection.update({'url': url},
{'$push': {
'kindle_edition': {"Price": kindle_edition, "Time": time.asctime()}}})
if hardcover != obj['hardcover'][-1]["Price"]:
self.collection.update({'url': url},
{'$push': {'hardcover': {"Price": hardcover, "Time": time.asctime()}}})
else:
self.collection.insert_one(
{'url': url, 'title': title, 'img': img, 'paperback': [{"Price": paperback, "Time": time.asctime()}],
'kindle_edition': [{"Price": kindle_edition, "Time": time.asctime()}],
'hardcover': [{"Price": hardcover, "Time": time.asctime()}]})
def get_ASIN(self, response):
try:
ASIN = response.xpath('.//li[@class = "s-result-item celwidget "]/@data-asin').extract()
for asin in ASIN:
if asin in asins:
pass
else:
self.get_Details_of_Primary_Page(asin,response)
asins.add(asin)
next_page = response.xpath('.//div[@class="pagnHy"]/span[@class="pagnRA"]/a/@href').extract()
yield scrapy.Request(url=response.urljoin(next_page[0]), callback=self.get_ASIN,
headers=self.headers)
except:
try:
ASIN_next = response.xpath(
'.//div[@class="sg-col-20-of-24 s-result-item sg-col-0-of-12 sg-col-28-of-32 sg-col-16-of-20 sg-col sg-col-32-of-36 sg-col-12-of-16 sg-col-24-of-28"]/@data-asin').extract()
for asin_next in ASIN_next:
if asin_next in asins:
pass
else:
self.get_Details_of_Secondary_Page(asin_next,response)
asins.add(asin_next)
next_page_link = response.xpath('.//li[@class="a-last"]/a/@href').extract()
yield scrapy.Request(url=response.urljoin(next_page_link[0]), callback=self.get_ASIN,
headers=self.headers)
except:
pass
asins = set() | [
"shivam.dongre14@gmail.com"
] | shivam.dongre14@gmail.com |
325e30673dc9f03db17dbfa84afe21c002c2bc80 | ac8df11d241f87cbaf61b8e509eff8053b62a32c | /train.py | fb34cdcfb895c545fe464207cfb95f57867a4e7a | [
"MIT"
] | permissive | antoniosequeira/trainer_mobilenet_v2 | faca6d06c38628520afb091c139a33fb63089807 | 3764d1f8653b4c0bda650821e1280ac56363a062 | refs/heads/main | 2022-12-30T23:11:10.334269 | 2020-10-17T15:31:49 | 2020-10-17T15:31:49 | 304,073,715 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,381 | py | # Head Notes
# Tensorflow doesn't like numpy 1.17 and gives a lot of warnings, to remove them use the following command:
# pip install "numpy<1.17"
# Python API
import os
import random
import time
import warnings
# 3rd party API
import pickle
import numpy as np
import cv2
from PIL import Image
from skimage.transform import resize
from sklearn.model_selection import train_test_split
from sklearn.preprocessing import LabelBinarizer, StandardScaler, Normalizer
from keras import Model
from keras.layers.core import Dense
from keras.layers import GlobalAveragePooling2D
from keras.preprocessing.image import img_to_array, ImageDataGenerator
from keras.applications.mobilenet_v2 import MobileNetV2
from keras.callbacks import ModelCheckpoint, EarlyStopping, CSVLogger, ReduceLROnPlateau
from keras.utils import plot_model
from matplotlib import pyplot as plt
#To remove warnings from the system
warnings.simplefilter(action='ignore', category=FutureWarning)
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3'
# Variables
path = 'Trashnet'
pathTrainResult = 'TrainResult'
batch_size = 12
epochs = 500
WIDTH = 192 # (224, 192, 160, 128, and 96).
HEIGHT = WIDTH
lista_imagens, x, y = [], [], []
lb = LabelBinarizer()
for category in os.listdir(path):
path_category = path + '/' + category
folder = os.path.join(path_category)
images = os.listdir(folder)
print('{0} - {1}'.format(category, len(images)))
for j in images:
imagePath = os.path.join(folder + '/' + j)
lista_imagens.append((imagePath, category))
print('Total: {0} imagens'.format(len(lista_imagens)))
start = time.time()
# loop over the input images
for imagePath, category in lista_imagens:
# load the image, pre-process it, and store it in the data list
image = cv2.imread(imagePath)
image = cv2.resize(image, (WIDTH, HEIGHT))
image = img_to_array(image)
x.append(image)
y.append(category)
end = time.time()
print('Resized images in {0} seconds'.format(round(end-start,0)))
# transform multi-class labels to binary labels
y = np.array(y)
y = lb.fit_transform(y)
# Generate test dataset
X_train, X_test, y_train, y_test = train_test_split(x, y, test_size=0.2)
# Generate validation train and validation dataset
X_train, X_val, y_train, y_val = train_test_split(x, y, test_size=0.2)
x_l = []
for i in X_train:
x_l.append(i.reshape(-1))
X_train = np.array(x_l)
X_train.reshape(-1)
x_l = []
for i in X_val:
x_l.append(i.reshape(-1))
X_val = np.array(x_l)
x_l = []
for i in X_test:
x_l.append(i.reshape(-1))
X_test = np.array(x_l)
# Image Standardization
scaler = StandardScaler()
scaler.fit(X_train)
X_train = scaler.transform(X_train)
X_val = scaler.transform(X_val)
X_test = scaler.transform(X_test)
print('Standardized images')
# Image Normalization
scaler = Normalizer()
scaler.fit(X_train)
X_train = scaler.transform(X_train)
X_val = scaler.transform(X_val)
X_test = scaler.transform(X_test)
print('Normalized Images')
X_trein = []
for i in X_train:
X_trein.append(i.reshape((WIDTH,HEIGHT, 3)))
X_train = np.array(X_trein)
X_vali = []
for i in X_val:
X_vali.append(i.reshape((WIDTH,HEIGHT, 3)))
X_val = np.array(X_vali)
X_teste = []
for i in X_test:
X_teste.append(i.reshape((WIDTH,HEIGHT, 3)))
X_test = np.array(X_teste)
print('Defining classifier')
mobilenet = MobileNetV2(input_shape=(WIDTH, HEIGHT, 3), include_top=False, weights='imagenet')
x = mobilenet.output
x = GlobalAveragePooling2D()(x)
x=Dense(1024,activation='relu')(x) #we add dense layers so that the model can learn more complex functions and classify for better results.
x=Dense(1024,activation='relu')(x) #dense layer 2
x=Dense(512,activation='relu')(x) #dense layer 3
predictions = Dense(6, activation='softmax')(x)
classifier = Model(inputs= mobilenet.input, outputs=predictions)
classifier.compile(optimizer = 'adam', loss = 'categorical_crossentropy', metrics = ['accuracy'])
classifier.summary()
print('Finished defining classifier')
if not os.path.exists(pathTrainResult):
os.makedirs(pathTrainResult)
# construct the training image generator for data augmentation
aug = ImageDataGenerator(rotation_range=20, zoom_range=0.15,
width_shift_range=0.2, height_shift_range=0.2, shear_range=0.15,
horizontal_flip=True, fill_mode="nearest")
file_name = 'batch_{0}_shape_{1}'.format(batch_size, WIDTH)
# to save best model
bestcheckpoint = ModelCheckpoint(pathTrainResult + '/batch_'+ str(batch_size) +'_epochs_'+ str(epochs) +'_shape_'+ str(WIDTH) +'.h5', save_best_only=True, monitor='val_loss', mode='min')
callback = EarlyStopping(monitor='val_loss', min_delta=0, patience=30, mode='auto')
csv_logger = CSVLogger('PlotResults/batch_'+ str(batch_size) +'_epochs_'+ str(epochs) +'_shape_'+ str(WIDTH) +'_training.log')
reduce_lr = ReduceLROnPlateau(monitor='val_loss', factor=0.2, patience=5, min_lr=0.001)
steps = int( np.ceil(X_train.shape[0] / batch_size) )
# fit() should be used for small datasets, loads everything into memory
# fit_generator() should be used for larger datasets, which loads into memory only small batches of data.
H = classifier.fit_generator(aug.flow(X_train, y_train, batch_size=batch_size), validation_data = (X_val, y_val),steps_per_epoch=steps, epochs = epochs, verbose = 1, callbacks=[bestcheckpoint, csv_logger, reduce_lr])
# Plot training & validation accuracy values
plt.figure()
plt.plot(H.history['acc'])
plt.plot(H.history['val_acc'])
plt.title('Model accuracy')
plt.ylabel('Accuracy')
plt.xlabel('Epoch')
plt.legend(['Train', 'Val'], loc='upper left')
plt.savefig('PlotResults/{0}_accplot.png'.format(file_name))
# Plot training & validation loss values
plt.figure()
plt.plot(H.history['loss'])
plt.plot(H.history['val_loss'])
plt.title('Model loss')
plt.ylabel('Loss')
plt.xlabel('Epoch')
plt.legend(['Train', 'Val'], loc='upper left')
plt.savefig('PlotResults/{0}_lossplot.png'.format(file_name))
# save One Hot Encoding
f = open('{0}/{1}_one_hot_encoding.txt'.format(pathTrainResult, file_name), "wb")
f.write(pickle.dumps(lb))
f.close()
print("Saved One Hot Encoding to disk")
# save X_test
f = open('{0}/{1}_X_test.txt'.format(pathTrainResult, file_name), "wb")
f.write(pickle.dumps(X_test))
f.close()
print("Saved X_test to disk")
# save y_test
f = open('{0}/{1}_y_test.txt'.format(pathTrainResult, file_name), "wb")
f.write(pickle.dumps(y_test))
f.close()
print("Saved y_test to disk")
| [
"antonioserolsequeira@gmail.com"
] | antonioserolsequeira@gmail.com |
cbabaab8f53d23cfaa2ecbf319388276b6172f67 | 433d8d457ed431b9ad38e3ed8ed6e441b7caa334 | /bin/generate_zippylog_message_classes | 92c4f25b4a7ff5fa92a47278254795a8f91aaf8f | [
"Apache-2.0"
] | permissive | indygreg/zippylog | 365f4f95dd2c9f8743180178fa90d66b0611cc71 | 5efc10b28a3e9d5f4df6c2014e7121d689291a70 | refs/heads/master | 2020-05-09T17:15:23.063121 | 2012-09-06T23:53:19 | 2012-09-06T23:53:19 | 795,523 | 8 | 2 | null | null | null | null | UTF-8 | Python | false | false | 2,674 | #!/usr/bin/python
# Copyright 2011 Gregory Szorc
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# this script is meant to be used by zippylog developers only.
# it generates the autogenerated protocol buffer classes for the messages
# defined by zippylog itself
# it is assumed this script is executed from within a source distribution
from json import dump, load
from os import walk
from os.path import exists, join, dirname, splitext
from shutil import copy2, rmtree
from subprocess import Popen
from sys import path
from tempfile import mkdtemp
base_dir = dirname(dirname(__file__))
path.insert(0, join(base_dir, 'lib', 'py'))
proto_dir = join(base_dir, 'proto')
zippylog_compile = join(base_dir, 'bin', 'zippylog_compile')
state_file = join(proto_dir, 'zippylog-state.json')
out_dir = mkdtemp()
print 'temp output directory: %s' % out_dir
if exists(state_file):
copy2(state_file, join(out_dir, 'zippylog-state.json'))
compile_args = [ zippylog_compile, '--cpp-namespace', 'zippylog' ]
compile_args.append(proto_dir)
compile_args.append(out_dir)
p = Popen(compile_args)
if p.wait() != 0:
print 'zippylog_compile did not execute successfully'
exit(1)
copy2(join(out_dir, 'zippylog-state.json'), state_file)
for root, dirs, files in walk(join(out_dir, 'py', 'zippylog')):
for f in filter(lambda x: x[-3:] == '.py', files):
src = join(root, f)
dst = src[len(out_dir)+1:]
copy2(src, join(base_dir, 'lib', dst))
for root, dirs, files in walk(join(out_dir, 'cpp', 'zippylog')):
for f in filter(lambda x: splitext(x)[1] in ['.h', '.hpp', '.cc', '.cpp'], files):
src = join(root, f)
dst = src[len(out_dir)+5:]
copy2(src, join(base_dir, 'src', dst))
for root, dirs, files in walk(join(out_dir, 'lua', 'zippylog')):
for f in filter(lambda x: splitext(x)[1] in ['.h', '.cc'], files):
src = join(root, f)
dst = src[len(out_dir)+5:]
copy2(src, join(base_dir, 'src', dst))
copy2(join(out_dir, 'lua', 'lua-protobuf.h'), join(base_dir, 'src', 'lua-protobuf.h'))
copy2(join(out_dir, 'lua', 'lua-protobuf.cc'), join(base_dir, 'src', 'lua-protobuf.cc'))
rmtree(out_dir)
| [
"gregory.szorc@gmail.com"
] | gregory.szorc@gmail.com | |
7afe6149aecbf87a30620698846e0958ce168fe8 | 1149dfd7bb5e7b7b35d839ed38abd10a4bec313e | /library/serializers.py | 9b2260c232c0dd6d1fe85af3c654a0ebd9fc46e4 | [] | no_license | kill-rill1996/GB_DRF | 8a941176111047a17dfe1f029a4e09459be0854f | 714edea1785864841f6cd5bb363c7031f902eb88 | refs/heads/main | 2023-05-28T11:11:19.244098 | 2021-06-14T15:17:13 | 2021-06-14T15:17:13 | 363,373,847 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 380 | py | from rest_framework.serializers import HyperlinkedModelSerializer, ModelSerializer
from .models import Author, Book
class AuthorModelSerializer(ModelSerializer):
class Meta:
model = Author
fields = "__all__"
class BookModelSerializer(ModelSerializer):
author = AuthorModelSerializer()
class Meta:
model = Book
fields = '__all__'
| [
"hizenberg228@mail.ru"
] | hizenberg228@mail.ru |
309a43c41e15b1283dda3abcb6b9413a325b5afd | 6f77ce7b93ff8edded44bc4c46cc610d5ef2a6e0 | /src/third/deep_q_network.py | eea744ea379ae0264bdc3a449796a8f853d1837b | [
"MIT"
] | permissive | wwbin2017/reinforcement-learning | 20ce500a1677605999345eb1ee7672bab999fe8e | a7f8316a31a81a99c54224f853a2d06ea0b00234 | refs/heads/master | 2020-07-22T20:46:51.605954 | 2019-11-10T03:11:57 | 2019-11-10T03:11:57 | 207,322,387 | 4 | 0 | null | null | null | null | UTF-8 | Python | false | false | 9,537 | py | # -*- coding:utf-8 -*-
#
# Author : ๅฏๆฑ้ช
# E-mail :
# Date : 19/10/10 00:03:44
# Desc :
#
"""
Dependencies:
tensorflow r1.14
pygame 1.9.4
"""
from __future__ import print_function
import tensorflow as tf
import cv2
import sys
sys.path.append("game/")
import wrapped_flappy_bird as game
import random
import numpy as np
from collections import deque
import os
class Config(object):
GAME = 'bird'
ACTIONS = 2
GAMMA = 0.99
OBSERVE = 100000.0
EXPLORE = 2000000.0
FINAL_EPSILON = 0.0001
INITIAL_EPSILON = 0.06
REPLAY_MEMORY = 50000
BATCH = 32
FRAME_PER_ACTION = 1
SAVE_MODEL_EVERY = 10000
class QValueEvaluation(object):
def __init__(self, scope='estimator', log_dir=None, config=Config):
self.scope = scope
self.summary_writer = None
self.config = config
with tf.variable_scope(scope):
self.build_graph()
if log_dir:
summary_dir = os.path.join(log_dir, scope)
os.makedirs(summary_dir)
self.summary_writer = tf.summary.FileWriter(summary_dir)
def weight_variable(self, shape):
initial = tf.truncated_normal(shape, stddev = 0.01)
return tf.Variable(initial)
def bias_variable(self, shape):
initial = tf.constant(0.01, shape = shape)
return tf.Variable(initial)
def conv2d(slef, x, W, stride):
return tf.nn.conv2d(x, W, strides = [1, stride, stride, 1], padding = "SAME")
def max_pool_2x2(self, x):
return tf.nn.max_pool(x, ksize = [1, 2, 2, 1], strides = [1, 2, 2, 1], padding = "SAME")
def build_graph(self):
# network weights
W_conv1 = self.weight_variable([8, 8, 4, 32])
b_conv1 = self.bias_variable([32])
W_conv2 = self.weight_variable([4, 4, 32, 64])
b_conv2 = self.bias_variable([64])
W_conv3 = self.weight_variable([3, 3, 64, 64])
b_conv3 = self.bias_variable([64])
W_fc1 = self.weight_variable([1600, 512])
b_fc1 = self.bias_variable([512])
W_fc2 = self.weight_variable([512, self.config.ACTIONS])
b_fc2 = self.bias_variable([self.config.ACTIONS])
self.s = tf.placeholder("float", [None, 80, 80, 4])
h_conv1 = tf.nn.relu(self.conv2d(self.s, W_conv1, 4) + b_conv1)
h_pool1 = self.max_pool_2x2(h_conv1)
h_conv2 = tf.nn.relu(self.conv2d(h_pool1, W_conv2, 2) + b_conv2)
#h_pool2 = max_pool_2x2(h_conv2)
h_conv3 = tf.nn.relu(self.conv2d(h_conv2, W_conv3, 1) + b_conv3)
#h_pool3 = max_pool_2x2(h_conv3)
#h_pool3_flat = tf.reshape(h_pool3, [-1, 256])
h_conv3_flat = tf.reshape(h_conv3, [-1, 1600])
h_fc1 = tf.nn.relu(tf.matmul(h_conv3_flat, W_fc1) + b_fc1)
# readout layer
self.readout = tf.matmul(h_fc1, W_fc2) + b_fc2
# define the cost function
self.a = tf.placeholder("float", [None, self.config.ACTIONS])
self.y = tf.placeholder("float", [None])
readout_action = tf.reduce_sum(tf.multiply(self.readout, self.a), reduction_indices=1)
self.cost = tf.reduce_mean(tf.square(self.y - readout_action))
self.train_step = tf.train.AdamOptimizer(1e-6).minimize(self.cost)
self.summaries = tf.summary.merge([
tf.summary.scalar('loss', self.cost),
tf.summary.scalar('max_q', tf.reduce_max(self.readout))
])
def predict(self, sess, s_t):
q_values = sess.run(self.readout, feed_dict={self.s: s_t})
return q_values
def train(self, sess, s_j_batch, a_batch, y_batch):
_, summaries, global_step = sess.run(
[self.train_step, self.summaries, tf.train.get_global_step()],
feed_dict={
self.y: y_batch,
self.a: a_batch,
self.s: s_j_batch}
)
if self.summary_writer:
self.summary_writer.add_summary(summaries, global_step)
self.summary_writer.flush()
def n_step_copy_model_parameters(q_value_evaluation, target_q_value_evaluation):
def get_params(estimator):
params = [t for t in tf.trainable_variables() if t.name.startswith(estimator.scope)]
params = sorted(params, key=lambda t: t.name)
return params
params = get_params(q_value_evaluation)
target_params = get_params(target_q_value_evaluation)
assign_ops = []
for t, target_t in zip(params, target_params):
assign_op = tf.assign(ref=target_t, value=t)
assign_ops.append(assign_op)
return assign_ops
def preprocess_state(x_t):
x_t = cv2.cvtColor(cv2.resize(x_t, (80, 80)), cv2.COLOR_BGR2GRAY)
ret, x_t = cv2.threshold(x_t, 1, 255, cv2.THRESH_BINARY)
return x_t
def train_dqn(sess, q_value_evaluation, target_q_value_evaluation, model_dir=None,
fix_target=True, pretrained_model_dir=None, assign_ops=None):
# ๅผๅงๆธธๆ
game_state = game.GameState()
# ๅญๅจๆ ทๆฌ
D = deque()
do_nothing = np.zeros(Config.ACTIONS)
do_nothing[0] = 1
x_t, r_0, terminal = game_state.frame_step(do_nothing)
x_t = preprocess_state(x_t)
s_t = np.stack((x_t, x_t, x_t, x_t), axis=2)
# saving and loading networks
saver = tf.train.Saver()
sess.run(tf.initialize_all_variables())
checkpoint = tf.train.get_checkpoint_state("saved_networks")
if False and checkpoint and checkpoint.model_checkpoint_path:
saver.restore(sess, checkpoint.model_checkpoint_path)
print("Successfully loaded:", checkpoint.model_checkpoint_path)
else:
print("Could not find old network weights")
# start training
epsilon = Config.INITIAL_EPSILON
t = 0
while "flappy bird" != "angry bird":
readout_t = q_value_evaluation.predict(sess, [s_t])[0]
# ๅผๅ๏ผ [1,0]๏ผไปไน้ฝไธๅ๏ผ [0,1]๏ผๆๅBird
a_t = np.zeros([Config.ACTIONS])
action_index = 0
if t % Config.FRAME_PER_ACTION == 0:
if random.random() <= epsilon:
print("----------Random Action----------")
action_index = random.randrange(Config.ACTIONS)
a_t[random.randrange(Config.ACTIONS)] = 1
else:
action_index = np.argmax(readout_t)
a_t[action_index] = 1
else:
a_t[0] = 1 # do nothing
# scale down epsilon
if epsilon > Config.FINAL_EPSILON and t > Config.OBSERVE:
epsilon -= (Config.INITIAL_EPSILON - Config.FINAL_EPSILON) / Config.EXPLORE
# run the selected action and observe next state and reward
# ่กจ็คบ็้ขๅพๅๆฐๆฎ๏ผๅพๅไปฅๅๆฏๅฆ็ปๆๆธธๆ
x_t1_colored, r_t, terminal = game_state.frame_step(a_t)
x_t1 = preprocess_state(x_t1_colored)
x_t1 = np.reshape(x_t1, (80, 80, 1))
s_t1 = np.append(x_t1, s_t[:, :, :3], axis=2)
# store the transition in D
D.append((s_t, a_t, r_t, s_t1, terminal))
if len(D) > Config.REPLAY_MEMORY:
D.popleft()
# only train if done observing
if t > Config.OBSERVE:
# sample a minibatch to train on
minibatch = random.sample(D, Config.BATCH)
# get the batch variables
s_j_batch = [d[0] for d in minibatch]
a_batch = [d[1] for d in minibatch]
r_batch = [d[2] for d in minibatch]
s_j1_batch = [d[3] for d in minibatch]
y_batch = []
readout_j1_batch = q_value_evaluation.predict(sess, s_j1_batch)
for i in range(0, len(minibatch)):
terminal = minibatch[i][4]
if terminal:
y_batch.append(r_batch[i])
else:
y_batch.append(r_batch[i] + Config.GAMMA * np.max(readout_j1_batch[i]))
q_value_evaluation.train(sess, s_j_batch, a_batch, y_batch)
s_t = s_t1
t += 1
# train q_estimator
if t > Config.OBSERVE:
if t % Config.UPDATE_TARGET_ESTIMATOR_EVERY == 0 and fix_target:
sess.run(assign_ops)
# 10000 ่ฟญไปฃไฟๅญไธๆฌกๆจกๅ
if t % 10000 == 0:
saver.save(sess, 'saved_networks/' + Config.GAME + '-dqn', global_step = t)
if t <= Config.OBSERVE:
state = "observe"
elif t > Config.OBSERVE and t <= Config.OBSERVE + Config.EXPLORE:
state = "explore"
else:
state = "train"
print("TIMESTEP", t, "/ STATE", state, \
"/ EPSILON", epsilon, "/ ACTION", action_index, "/ REWARD", r_t, \
"/ Q_MAX %e" % np.max(readout_t))
# write info to files
'''
if t % 10000 <= 100:
a_file.write(",".join([str(x) for x in readout_t]) + '\n')
h_file.write(",".join([str(x) for x in h_fc1.eval(feed_dict={s:[s_t]})[0]]) + '\n')
cv2.imwrite("logs_tetris/frame" + str(t) + ".png", x_t1)
'''
def main():
log_dir = "log_dir/"
q_value_evaluation = QValueEvaluation(scope='estimator', log_dir=log_dir)
target_q_value_evaluation = QValueEvaluation(scope='target_estimator', log_dir=log_dir)
assign_ops = n_step_copy_model_parameters(q_value_evaluation, target_q_value_evaluation)
with tf.Session() as sess:
sess.run(tf.global_variables_initializer())
train_dqn(sess, q_value_evaluation, target_q_value_evaluation, None,
True, None, assign_ops)
if __name__ == "__main__":
main()
| [
"wangwenbin001@ke.com"
] | wangwenbin001@ke.com |
676316a4635e1ab12355ea038c5b1cb00ca3726b | b15c47a45207e854fb002d69f7e33f8943a5e2b3 | /chatbot/botbuilder/bot_response_list_info.py | 835f981eaa0c71dc47024e9df8c21d3c1358a36e | [
"Apache-2.0"
] | permissive | yurimkoo/tensormsa | e1af71c00a6b2ec3b3ed35d5adad7bafc34c6fbe | 6ad2fbc7384e4dbe7e3e63bdb44c8ce0387f4b7f | refs/heads/master | 2021-07-22T13:41:45.110348 | 2017-11-02T07:13:31 | 2017-11-02T07:13:31 | 109,469,204 | 1 | 0 | null | 2017-11-04T05:19:51 | 2017-11-04T05:19:50 | null | UTF-8 | Python | false | false | 587 | py | from chatbot import serializers
class BotResponseListInfo:
def run_response_builder(self, data):
"""
insert nn_info version data
:param req:
:return:
"""
try:
serializer_response = serializers.CB_RESPONSE_LIST_INFO_Serializer(data=data)
if serializer_response.is_valid():
serializer_response.save()
else:
return serializer_response.is_valid(raise_exception=True)
return data["story_id"]
except Exception as e:
raise Exception(e)
| [
"healess1@gmail.com"
] | healess1@gmail.com |
ac35039ea461f5d9e8be9bc31b8f9e8c1007918a | 972206f2e150a4127b162a0cd37acd75771bb0ed | /tests/test_postman_collection.py | 8bc5a834a5c27c921a19e45e0383005223a77262 | [] | no_license | cristianowa/courierman | d3158c935944283bafeef0a13d6c77ef56e972bc | 4a356ef38b7a4226e7c7706e24647545618d12d3 | refs/heads/master | 2023-01-06T18:21:08.262851 | 2020-11-09T17:54:20 | 2020-11-09T18:08:03 | 309,841,978 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 589 | py | import os
from courierman.collection import Collection
expected_results = [
401,
200,
200,
200,
200,
200,
200,
200,
200,
200,
200,
200,
200,
200,
200,
200,
200,
200,
200,
200,
200,
200,
200,
200,
200,
200,
200,
200,
200,
200,
200,
200,
200,
200,
200,
404,
404,
]
def test_collection():
collection = Collection(os.path.join(
"../", "samples", "Postman Echo.postman_collection.json"))
for request, expected_result in zip(collection.requests, expected_results):
ans = request.execute()
assert ans.status_code == expected_result, f"Failure in {request.name}" | [
"cristianowerneraraujo@gmail.com"
] | cristianowerneraraujo@gmail.com |
d99ebe78755008ee7505ad47b3d080a49b4e52f8 | 5b088bec719fb33d717db30e1a9c6f495515b69c | /visitor.py | b07ed357e5b3a3341187067461e293454442b18c | [] | no_license | PLUSAlo/AplicacionesPython | 72390d5980c048430cbe0c77619bf81a5c2b073c | 237bd85e2ab3daf90c3e2d0a33f0384090fc1909 | refs/heads/master | 2020-04-06T19:22:09.425311 | 2018-11-15T18:48:35 | 2018-11-15T18:48:35 | 157,734,420 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,686 | py | #visitor.py
class House(object): #The class being visited
def accept(self, visitor):
"""Interface to accept a visitor"""
#Triggers the visiting operation!
visitor.visit(self)
def work_on_hvac(self, hvac_specialist):
print(self, "worked on by", hvac_specialist) #Note that we now have a reference to the HVAC specialist object in the house object!
def work_on_electricity(self, electrician):
print(self, "worked on by", electrician) #Note that we now have a reference to the electrician object in the house object!
def __str__(self):
"""Simply return the class name when the House object is printed"""
return self.__class__.__name__
class Visitor(object):
"""Abstract visitor"""
def __str__(self):
"""Simply return the class name when the Visitor object is printed"""
return self.__class__.__name__
class HvacSpecialist(Visitor): #Inherits from the parent class, Visitor
"""Concrete visitor: HVAC specialist"""
def visit(self, house):
house.work_on_hvac(self) #Note that the visitor now has a reference to the house object
class Electrician(Visitor): #Inherits from the parent class, Visitor
"""Concrete visitor: electrician"""
def visit(self, house):
house.work_on_electricity(self) #Note that the visitor now has a reference to the house object
#Create an HVAC specialist
hv = HvacSpecialist()
#Create an electrician
e = Electrician()
#Create a house
home = House()
#Let the house accept the HVAC specialist and work on the house by invoking the visit() method
home.accept(hv)
#Let the house accept the electrician and work on the house by invoking the visit() method
home.accept(e)
| [
"noreply@github.com"
] | PLUSAlo.noreply@github.com |
f8781c6e5d3267a5c285a92e7ec0fb25b8282d05 | aed66d0c5c914b97fbbdc4752bec5d4df427d31f | /Week08/Pset6/sentiments/application.py | a8d143dac53bf8196018db77cdd802143f5dbc1e | [] | no_license | chiuchiuuu/CS50 | 73d05c1f6433dba636e6eb44ef4f48d3b50c37fc | 1c6b111086221ad0d71fe0621a94ea22c5a08045 | refs/heads/master | 2021-05-16T09:34:03.272090 | 2018-04-19T12:30:38 | 2018-04-19T12:30:38 | 104,468,210 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,204 | py | from flask import Flask, redirect, render_template, request, url_for
import helpers
import sys
import os
from analyzer import Analyzer
app = Flask(__name__)
@app.route("/")
def index():
return render_template("index.html")
@app.route("/search")
def search():
# validate screen_name
screen_name = request.args.get("screen_name", "")
if not screen_name:
return redirect(url_for("index"))
# get screen_name's tweets
tweets = helpers.get_user_timeline(screen_name, count=100)
# TODO
positive, negative, neutral = 0.0, 0.0, len(tweets)
# absolute paths to lists
positives = os.path.join(sys.path[0], "positive-words.txt")
negatives = os.path.join(sys.path[0], "negative-words.txt")
# instantiate analyzer
analyzer = Analyzer(positives, negatives)
for tweet in tweets:
score = analyzer.analyze(tweet)
if score > 0:
positive += 1
neutral -= 1
elif score < 0:
negative += 1
neutral -= 1
# generate chart
chart = helpers.chart(positive, negative, neutral)
# render results
return render_template("search.html", chart=chart, screen_name=screen_name)
| [
"30572942+chiuchiuuu@users.noreply.github.com"
] | 30572942+chiuchiuuu@users.noreply.github.com |
5c02cbf0ed194ab4d97ac98ceb7af761adf2a322 | 15a970d7358be3fdd971f71f8ee2d8e865abadaf | /rockyfire/0000.py | 99baecd1037ba604327c02ae75e112120eea7bce | [] | no_license | rockyfire/python | 9050f3d0fb3cff90470e7d7c63eea3654c2da96a | cf28dd2f99c0fae38ff0c8fd8a42dffe182ab396 | refs/heads/master | 2021-08-09T02:59:22.636555 | 2017-11-12T01:36:11 | 2017-11-12T01:36:11 | 110,394,742 | 0 | 0 | null | 2017-11-12T01:29:18 | 2017-11-12T01:29:18 | null | UTF-8 | Python | false | false | 1,790 | py | #-*- coding:utf8 -*-
'''
sudo apt-get update
sudo apt-get install python-dev
sudo apt-get install libtiff5-dev
libjpeg8-dev zlib1g-dev
libfreetype6-dev liblcms2-dev libwebp-dev
tcl8.6-dev tk8.6-dev python-tk
sudo pip3 install pillow
'''
# ๆๅณไธ่งๅพๅๆๅฐๅพ
infile="you.jpg"
outfile="you"
size=(80,80)
small_img=Image.open(infile)
small_img.thumbnail(size)
small_img.save(outfile,"JPEG")
region=Image.open(outfile).copy()
boxs=(img.size[0]-80,0,img.size[0],80)
img.paste(region,boxs)
img.save("you&me","JPEG")
# ๅ่ http://www.cnblogs.com/apexchu/p/4231041.html
# ๆฐดๅฐ
# http://www.cnblogs.com/apexchu/p/4231032.html
def text2img(text,font_color="Blue",font_size=25):
"""็ๆๅ
ๅฎนไธบ TEXT ็ๆฐดๅฐ"""
# ๆฅ็linux็ๅญไฝ fc-list ๅญไฝ็ฎๅฝ: /usr/share/fonts/truetype/*
# fc-list:lang=zh ไธญๆๅญไฝ
# ๆฐๅขๅญไฝ http://bbs.chinaunix.net/thread-2025374-1-1.html
font=ImageFont.truetype('SIMSUN.TTC',font_size)
mark_width=0
(width,height) = font.getsize(text)
if mark_width < width:
mark_width = width
mark_height = height
# Image.new(mode, size, color=None)
# color็้ป่ฎคๅผๆฏ้ป่ฒ
mark=Image.new('RGBA',(mark_width,mark_height),(255, 0, 0))
draw=ImageDraw.ImageDraw(mark,"RGBA")
# draw.setfont(font)
draw.text((0,0),text,fill=font_color,font=font)
return mark
def set_opacity(im, opacity):
"""่ฎพ็ฝฎ้ๆๅบฆ"""
assert opacity >=0 and opacity < 1
if im.mode != "RGBA":
im = im.convert('RGBA')
else:
im = im.copy()
# Red๏ผ็บข่ฒ๏ผGreen๏ผ็ปฟ่ฒ๏ผBlue๏ผ่่ฒ๏ผๅAlpha็่ฒๅฝฉ็ฉบ้ด
alpha = im.split()[3]
alpha = ImageEnhance.Brightness(alpha).enhance(opacity)
im.putalpha(alpha)
return im
| [
"rockyfire@126.com"
] | rockyfire@126.com |
04f8d6bc87a258cf4e9b029554c18b19cef3151c | f50f1aa1f8f139d546db3230a1cb1f53043fd9e6 | /hardware/firewire/libffado/actions.py | 12ac7a51a5ba427f73271873cdf391c720e8dc69 | [] | no_license | pars-linux/corporate2 | 7887961d1552d39bc3b0bef4a60fd3413d9b82bb | 14d1eacfc824fb8d0bff8173e7ac06b36b88d10d | refs/heads/master | 2020-05-26T15:02:12.005654 | 2017-02-27T03:07:14 | 2017-02-27T03:07:14 | 82,476,084 | 4 | 0 | null | null | null | null | UTF-8 | Python | false | false | 747 | py | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright 2010 TUBITAK/UEKAE
# Licensed under the GNU General Public License, version 2.
# See the file http://www.gnu.org/licenses/old-licenses/gpl-2.0.txt
from pisi.actionsapi import pisitools
from pisi.actionsapi import scons
from pisi.actionsapi import get
WorkDir = "%s" % get.srcDIR().replace("_", "-")
def build():
pisitools.dosed("SConstruct", "usr/local", "usr")
scons.make()
def install():
scons.install("install DESTDIR=%s" % get.installDIR())
pisitools.dodir("/usr/share/applications")
pisitools.dosym("/usr/share/libffado/icons/hi64-apps-ffado.png", "/usr/share/pixmaps/ffado-mixer.png")
pisitools.dodoc("AUTHORS", "ChangeLog", "LICENSE*", "TODO", "README")
| [
"igungor@users.noreply.github.com"
] | igungor@users.noreply.github.com |
c8b391a7931266d0fcb31994e9fd56daba3697fe | 93e9bbcdd981a6ec08644e76ee914e42709579af | /co_fb/155_Min_Stack.py | 9c2ae407af25c37ef4a4a635ed5e52ff2d13b37f | [] | no_license | vsdrun/lc_public | 57aa418a8349629494782f1a009c1a8751ffe81d | 6350568d16b0f8c49a020f055bb6d72e2705ea56 | refs/heads/master | 2020-05-31T11:23:28.448602 | 2019-10-02T21:00:57 | 2019-10-02T21:00:57 | 190,259,739 | 6 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,684 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
https://leetcode.com/problems/min-stack/description/
easy.
Design a stack that supports push, pop, top,
and retrieving the minimum element in constant time.
push(x) -- Push element x onto stack.
pop() -- Removes the element on top of the stack.
top() -- Get the top element.
getMin() -- Retrieve the minimum element in the stack.
Example:
MinStack minStack = new MinStack();
minStack.push(-2);
minStack.push(0);
minStack.push(-3);
minStack.getMin(); --> Returns -3.
minStack.pop();
minStack.top(); --> Returns 0.
minStack.getMin(); --> Returns -2.
"""
class MinStack(object):
def __init__(self):
"""
initialize your data structure here.
"""
self.stack = []
def push(self, x):
"""
:type x: int
:rtype: void
"""
if self.stack:
if x > self.stack[-1][1]:
self.stack += (x, self.stack[-1][1]),
return
self.stack += (x, x),
def pop(self):
"""
:rtype: void
"""
val, _ = self.stack.pop()
return val
def top(self):
"""
:rtype: int
"""
val, _ = self.stack[-1]
return val
def getMin(self):
"""
:rtype: int
"""
_, min = self.stack[-1]
return min
# Your MinStack object will be instantiated and called as such:
# obj = MinStack()
# obj.push(x)
# obj.pop()
# param_3 = obj.top()
# param_4 = obj.getMin()
if __name__ == "__main__":
m = MinStack()
m.push(-10)
m.push(3)
m.push(-27)
m.push(7)
v = m.pop()
print(v)
print(m.getMin())
| [
"shchang@linkedin.com"
] | shchang@linkedin.com |
992b9fcf1f9245559736c39f1ff5f2a4fad0a1a8 | de24f83a5e3768a2638ebcf13cbe717e75740168 | /moodledata/vpl_data/59/usersdata/233/44940/submittedfiles/testes.py | c32c53695e128c23eb21837b58d93558de20eeb8 | [] | no_license | rafaelperazzo/programacao-web | 95643423a35c44613b0f64bed05bd34780fe2436 | 170dd5440afb9ee68a973f3de13a99aa4c735d79 | refs/heads/master | 2021-01-12T14:06:25.773146 | 2017-12-22T16:05:45 | 2017-12-22T16:05:45 | 69,566,344 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 103 | py | # -*- coding: utf-8 -*-
#COMECE AQUI ABAIXO
s='Um elefante incomoda muita gente'
print(a.capitalize(s)) | [
"rafael.mota@ufca.edu.br"
] | rafael.mota@ufca.edu.br |
8c13285e2c7e562979a58da4c3efd37610225b5b | 5abf3b5b42acce1b0006511eb28e1dd39d564559 | /analyze_altmetric.py | 826c95133298e03ea07eccf00af8acce1688280a | [
"MIT"
] | permissive | abitofalchemy/ScientificImpactPrediction | ac8e5d61acfab76c585e6cc21cf7a1a2111df591 | fa4f429726f8b2070ccc147b38eca004a3760428 | refs/heads/master | 2021-01-21T04:39:58.707266 | 2016-06-28T09:50:41 | 2016-06-28T09:50:41 | 53,091,217 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,802 | py | # -*- coding: utf-8 -*-
__author__ = "Sal Aguinaga"
__copyright__ = "Copyright 2015, The Phoenix Project"
__credits__ = ["Sal Aguinaga", "Rodrigo Palacios", "Tim Weninger"]
__license__ = "GPL"
__version__ = "0.1.0"
__maintainer__ = "Sal Aguinaga"
__email__ = "saguinag (at) nd dot edu"
import json
import argparse,traceback
from pprint import pprint
def parse_input_args():
parser = argparse.ArgumentParser(description='query twitter and output to file')
parser.add_argument('injson', metavar='INJSON', help='Input JSON path')
parser.add_argument('--version', action='version', version=__version__)
args = vars(parser.parse_args())
return args
# << Begin >>
args = parse_input_args()
in_json_path = args['injson']
# reads in the JSON file
json_data = []
docsd = {}
with open(in_json_path) as f:
for line in f:
jobj=json.loads(line)
json_data.append(jobj)
# for jl in json_data:
# print jl['details_url']
# for alt_k in jl.keys():
# #if 'count_inprint '{}\t: {}\n'.format(alt_k, jl[alt_k])
# if 'count' in alt_k:
# print '\t',alt_k, jl[alt_k]
for jl in json_data:
packet_ar = []
for ak in jl.keys():
# print [x for x in str(ak)]
if '_id' in ak: packet_ar.append(jl[ak])
if 'count' in ak: packet_ar.append(jl[ak])
print packet_ar
print '-'*80
alt_pub = json_data[-1]
pprint(alt_pub.keys())
pub_title = 'Publication title: {}'.format(alt_pub['title'])
print pub_title
print alt_pub['cited_by_tweeters_count']
## counts
'''
for ak in jl.keys():
# print [x for x in str(ak)]
if '_id' in ak: packet_ar.append(jl[ak])
if 'count' in ak: packet_ar.append(jl[ak])count_ar =
'''
cnt_d = {}
for k,v in alt_pub.items():
if 'count' in k:
cnt_d[k] = v
pprint(cnt_d)
print '-'*80
pprint(alt_pub['tq'])
####
| [
"saguinag@nd.edu"
] | saguinag@nd.edu |
09d8e28411bec6737f2bab2c0280f4aef947f48b | a5aaf046991af00ea164d370bf883dd65dec8a9e | /publishconf.py | 3e5a463576696aab11284685b16ba9fba5dc39bc | [
"MIT"
] | permissive | natla/thesims-storytelling | 902cb0cae1bef142f01b1e37ab04f602f24fa855 | cfa38b092288f564bee4c12480e2ce1ad0f493b3 | refs/heads/master | 2022-10-22T22:42:39.525372 | 2022-09-29T22:43:17 | 2022-09-29T22:44:17 | 163,457,564 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 490 | py | # This file is only used if you use `make publish` or
# explicitly specify it as your config file.
import os
import sys
from pelicanconf import * # pylint: disable=wildcard-import
sys.path.append(os.curdir)
# If your site is available via HTTPS, make sure SITEURL begins with https://
SITEURL = 'https://symphony-garden.cf'
RELATIVE_URLS = False
FEED_ALL_ATOM = 'feeds/all.atom.xml'
CATEGORY_FEED_ATOM = 'feeds/{slug}.atom.xml'
DELETE_OUTPUT_DIRECTORY = True
#GOOGLE_ANALYTICS = ""
| [
"natalia.malkovska@gmail.com"
] | natalia.malkovska@gmail.com |
235816fa216a389792fdfe911577639149faa8fa | 1be659ea30f82bbfbb4e0f04b0c56df53d361a73 | /venv/bin/easy_install | c98477883ea2506a885ad59c121baefdca22e6db | [] | no_license | jocelimjr2000/DBackup-Python | ed4625d0c7cbafcd84227bc4813e943ef8a69f31 | 74852dd4520b07b3f90d0ae906bfb711d2460ecc | refs/heads/master | 2023-03-17T13:52:50.455900 | 2021-03-24T21:51:06 | 2021-03-24T21:51:06 | 351,210,587 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 268 | #!/Users/jocelim/PycharmProjects/DBackup/venv/bin/python
# -*- coding: utf-8 -*-
import re
import sys
from setuptools.command.easy_install import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(main())
| [
"jocelimjr2000@hotmail.com"
] | jocelimjr2000@hotmail.com | |
eb38bc55a97113a4ff5245a636b1639df9dc6782 | 79064292be46828f98b9f0666b7cf83de54749d7 | /Classroom/migrations/0001_initial.py | 19b48c891975f5f1195e9dc6b3285f39d7badbe8 | [] | no_license | prantacse4/Django_React | 711ba5efd4f8ec1e3526aaf8b4ee258465b81d4d | 0f59716f650c602dca398091660c99d8fb3452cf | refs/heads/master | 2023-06-11T05:03:06.105140 | 2021-07-01T05:21:37 | 2021-07-01T05:21:37 | 381,023,740 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,395 | py | # Generated by Django 3.2.4 on 2021-07-01 04:12
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('Accounts', '0001_initial'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='MyPost',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('post', models.TextField()),
('classroom', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='Accounts.myclassroom')),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='MyComment',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('comment', models.TextField()),
('post', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='Classroom.mypost')),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
]
| [
"pranta.cse4.bu@gmail.com"
] | pranta.cse4.bu@gmail.com |
cfa23c0f36e9717c33dffbcf6434e5f23170cdee | df1b1050f65d27bb9cf3e5705dc21d4af4a34fc3 | /player/choices.py | fb816da05d91c31955e2cc570950dc632ccffd83 | [] | no_license | Aisha-Atique/criclysis | 494c185b18b7ab50a339ee58ea3f3764cdb922cf | 07951a397565ef7d82a016db33f4168cd9245c11 | refs/heads/master | 2020-03-23T19:37:49.892555 | 2018-11-07T11:50:52 | 2018-11-07T11:50:52 | 141,992,019 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 303 | py | ROLE_CHOICES = (
('Playing role Bowler', 'Playing role Bowler'),
('Playing role Allrounder', 'Playing role Allrounder'),
('Playing role Bowling allrounder', 'Playing role Bowling allrounder'),
('Playing role Batting allrounder', 'Playing role Batting allrounder'),
)
| [
"aishaatique1@yahoo.com"
] | aishaatique1@yahoo.com |
e724a3c48e595bf7c48a83851d2e887104b40271 | 605d63d23bc2e07eb054979a14557d469787877e | /atest/testdata/core/resources_and_variables/variables_imported_by_resource.py | 73662bdefa9dd586742fe3ebc59da8b64bfb1dc2 | [
"Apache-2.0",
"CC-BY-3.0"
] | permissive | robotframework/robotframework | 407b0cdbe0d3bb088f9bfcf9ea7d16e22eee1ddf | cf896995f822f571c33dc5651d51365778b1cf40 | refs/heads/master | 2023-08-29T03:19:00.734810 | 2023-08-27T18:14:48 | 2023-08-28T18:14:11 | 21,273,155 | 8,635 | 2,623 | Apache-2.0 | 2023-09-05T04:58:08 | 2014-06-27T11:10:38 | Python | UTF-8 | Python | false | false | 82 | py | variables_imported_by_resource = 'Variable from variables_imported_by_resource.py' | [
"peke@iki.fi"
] | peke@iki.fi |
feaff7485bb58d31649aaba97aec806c7bb47198 | 934a0da35723bd7312167ee1c94bce48b851fd50 | /work/run_parameter_sweep_missing_data.py | 509f22b67b64041d301a217ce0c3a6dd82dbcd16 | [
"Apache-2.0"
] | permissive | byooooo/dispersant_screening_PAL | 46c8afd31b3add3aad4d519da2c6e2c1d76db810 | e25acc82c18db209fbf29046780ca31835f587d0 | refs/heads/master | 2023-04-12T01:28:13.674545 | 2021-01-05T16:30:43 | 2021-01-05T16:30:43 | 252,852,504 | 8 | 5 | Apache-2.0 | 2023-01-25T04:01:26 | 2020-04-03T22:06:43 | Jupyter Notebook | UTF-8 | Python | false | false | 2,634 | py | # -*- coding: utf-8 -*-
# Copyright 2020 PyPAL authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import subprocess
import time
import click
import pandas as pd
N_SAMPLES = [60, 100]
EPSILON = [0.01, 0.05, 0.1]
BETA_SCALE = [1 / 9, 1 / 20]
DELTA = [0.05]
SLURM_TEMPLATE = '''#!/bin/bash -l
#SBATCH --chdir ./
#SBATCH --mem 32GB
#SBATCH --ntasks 1
#SBATCH --cpus-per-task 1
#SBATCH --job-name {name}
#SBATCH --time 48:00:00
#SBATCH --partition serial
source /home/kjablonk/anaconda3/bin/activate
conda activate pypal
python missing_data_test.py {epsilon} {delta} {beta_scale} 1 . {n_samples}
'''
THIS_DIR = os.path.dirname(__file__)
def write_submission_script(counter, epsilon, delta, beta_scale, n_samples):
name = 'ePALMISSING_{}'.format(counter)
script = SLURM_TEMPLATE.format(**{
'name': name,
'epsilon': epsilon,
'delta': delta,
'beta_scale': beta_scale,
'n_samples': n_samples
})
filename = name + '.slurm'
with open(filename, 'w') as fh:
fh.write(script)
return filename
@click.command('cli')
@click.option('--submit', is_flag=True)
def main(submit):
experiments = []
counter = 0
for n_samples in N_SAMPLES:
for epsilon in EPSILON:
for beta_scale in BETA_SCALE:
for delta in DELTA:
experiment = {
'counter': counter,
'n_samples': n_samples,
'epsilon': epsilon,
'beta_scale': beta_scale,
'delta': delta
}
experiments.append(experiment)
SUBMISSIONSCRIPTNAME = write_submission_script(counter, epsilon, delta, beta_scale, n_samples)
if submit:
subprocess.call('sbatch {}'.format(SUBMISSIONSCRIPTNAME), shell=True, cwd='.')
time.sleep(10)
counter += 1
df = pd.DataFrame(experiments)
df.to_csv('all_experiments.csv', index=False)
if __name__ == '__main__':
main()
| [
"noreply@github.com"
] | byooooo.noreply@github.com |
61df62e8287d7e1f28d3326b2933f82d53142f42 | 3b1d17fbecdf29e9bb1b3cf5e9ccf7476e7697dc | /script-programming/Protocol_programming/Exercises/toiminnallisuus.py | 662068b534dd89dadfc593127ee99f0cfa9e9f89 | [] | no_license | ILiterallyCannot/script-programming | 647859e4b18efdf694887b1fde5eb8401e6a8fde | dc08d66992dd70f9edf7148933c86af05a817e57 | refs/heads/master | 2020-04-27T07:29:01.152793 | 2019-03-06T12:06:07 | 2019-03-06T12:06:07 | 174,137,373 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 947 | py |
def send_data(socket, data):
total = 0
while total < len(data):
lahetetty = socket.send(data[total:])
total += lahetetty
print "lahetettiin %d tavua" % total
def laheta_viesti(socket, data):
data_len = len(data)
send_data(socket, str(data_len)+"\n")
send_data(socket, data)
def lue_pituus(socket):
bufferi =""
while True:
apu = socket.recv(1)
if apu =="\n":
break
bufferi += apu
return int(bufferi)
def lue_viesti(socket):
bufferi = ""
pituus = lue_pituus(socket)
print "tavuja tulossa", pituus
vastaanotettu = 0
while vastaantotettu < pituus:
data = socket.recv(1024)
bufferi += data
vastaanotettu += len(data)
return bufferi
| [
"noreply@github.com"
] | ILiterallyCannot.noreply@github.com |
d4a8f90d3cad125d91e6c5d84b22f6fbe0c3ff75 | 53a3f72012ee69183600b26ed027d2dd35427030 | /database.py | e2b6a906c0cad7d8f743f0e1649059633a55e61f | [] | no_license | clevs1363/SFHSChess | f1d7a9a17249c0186f2b0492d82ff9e908f156e0 | 696893db6a2db3f5896dfe3a68fa6534fa086cd8 | refs/heads/master | 2020-04-20T22:27:35.249904 | 2019-02-27T18:15:27 | 2019-02-27T18:15:27 | 169,132,847 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 396 | py | import sqlite3
conn = sqlite3.connect(':memory:')
c = conn.cursor()
c.execute("""CREATE TABLE user_login (
email text,
username text,
password text
)""")
def create_user(userObj):
with conn:
c.execute("INSERT INTO user_login VALUES (:username, :password)", {'username': userObj.username, 'password': userObj.password})
conn.close() | [
"cleversleym13@gmail.com"
] | cleversleym13@gmail.com |
14fcaeb305d053f5521da45fd3ee2dd1a9697fba | 09e57dd1374713f06b70d7b37a580130d9bbab0d | /data/p3BR/R1/benchmark/startCirq155.py | 094f1a6799a76414621d8cdf570c3e79f509ea54 | [
"BSD-3-Clause"
] | permissive | UCLA-SEAL/QDiff | ad53650034897abb5941e74539e3aee8edb600ab | d968cbc47fe926b7f88b4adf10490f1edd6f8819 | refs/heads/main | 2023-08-05T04:52:24.961998 | 2021-09-19T02:56:16 | 2021-09-19T02:56:16 | 405,159,939 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,673 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Time : 5/15/20 4:49 PM
# @File : grover.py
# qubit number=3
# total number=29
import cirq
import cirq.google as cg
from typing import Optional
import sys
from math import log2
import numpy as np
#thatsNoCode
from cirq.contrib.svg import SVGCircuit
# Symbols for the rotation angles in the QAOA circuit.
def make_circuit(n: int, input_qubit):
c = cirq.Circuit() # circuit begin
c.append(cirq.H.on(input_qubit[0])) # number=1
c.append(cirq.rx(-0.09738937226128368).on(input_qubit[2])) # number=2
c.append(cirq.H.on(input_qubit[1])) # number=3
c.append(cirq.CNOT.on(input_qubit[1],input_qubit[0])) # number=4
c.append(cirq.Y.on(input_qubit[1])) # number=15
c.append(cirq.CNOT.on(input_qubit[1],input_qubit[0])) # number=10
c.append(cirq.H.on(input_qubit[1])) # number=19
c.append(cirq.CZ.on(input_qubit[0],input_qubit[1])) # number=20
c.append(cirq.H.on(input_qubit[1])) # number=21
c.append(cirq.H.on(input_qubit[1])) # number=26
c.append(cirq.CZ.on(input_qubit[0],input_qubit[1])) # number=27
c.append(cirq.H.on(input_qubit[1])) # number=28
c.append(cirq.X.on(input_qubit[1])) # number=23
c.append(cirq.CNOT.on(input_qubit[0],input_qubit[1])) # number=24
c.append(cirq.CNOT.on(input_qubit[0],input_qubit[1])) # number=18
c.append(cirq.Z.on(input_qubit[1])) # number=11
c.append(cirq.CNOT.on(input_qubit[1],input_qubit[0])) # number=12
c.append(cirq.Y.on(input_qubit[1])) # number=14
c.append(cirq.CNOT.on(input_qubit[1],input_qubit[0])) # number=5
c.append(cirq.X.on(input_qubit[1])) # number=6
c.append(cirq.Z.on(input_qubit[1])) # number=8
c.append(cirq.X.on(input_qubit[1])) # number=7
c.append(cirq.rx(-2.42845112122491).on(input_qubit[1])) # number=25
# circuit end
c.append(cirq.measure(*input_qubit, key='result'))
return c
def bitstring(bits):
return ''.join(str(int(b)) for b in bits)
if __name__ == '__main__':
qubit_count = 4
input_qubits = [cirq.GridQubit(i, 0) for i in range(qubit_count)]
circuit = make_circuit(qubit_count,input_qubits)
circuit = cg.optimized_for_sycamore(circuit, optimizer_type='sqrt_iswap')
circuit_sample_count =2000
simulator = cirq.Simulator()
result = simulator.run(circuit, repetitions=circuit_sample_count)
frequencies = result.histogram(key='result', fold_func=bitstring)
writefile = open("../data/startCirq155.csv","w+")
print(format(frequencies),file=writefile)
print("results end", file=writefile)
print(circuit.__len__(), file=writefile)
print(circuit,file=writefile)
writefile.close() | [
"wangjiyuan123@yeah.net"
] | wangjiyuan123@yeah.net |
627cf8253da28f9a0b598a5ce5132606b0f3c62b | a1431c25ebd62daead742e0120a16253c4cf67ca | /django/movie/migrations/0002_auto_20190910_2053.py | 212f7307d9b37543ceb71c884a998090b3067fed | [] | no_license | KonradMarzec1991/my_MDB | f840cbf495c23272b3e39db68c241219a60d63bd | d77339a4c37a3d7ae21b6d28bd9644ce15130f10 | refs/heads/master | 2022-04-29T10:15:37.109422 | 2019-11-03T20:13:57 | 2019-11-03T20:13:57 | 207,375,063 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,230 | py | # Generated by Django 2.2.5 on 2019-09-10 20:53
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('movie', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Person',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('first_name', models.CharField(max_length=140)),
('last_name', models.CharField(max_length=140)),
('born', models.DateField()),
('died', models.DateField(blank=True, null=True)),
],
options={
'ordering': ('last_name', 'first_name'),
},
),
migrations.AlterModelOptions(
name='movie',
options={'ordering': ('-year', 'title')},
),
migrations.CreateModel(
name='Role',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=140)),
('movie', models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, to='movie.Movie')),
('person', models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, to='movie.Person')),
],
options={
'unique_together': {('movie', 'person', 'name')},
},
),
migrations.AddField(
model_name='movie',
name='actors',
field=models.ManyToManyField(blank=True, related_name='acting_credits', through='movie.Role', to='movie.Person'),
),
migrations.AddField(
model_name='movie',
name='director',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='directed', to='movie.Person'),
),
migrations.AddField(
model_name='movie',
name='writers',
field=models.ManyToManyField(blank=True, related_name='writing_credits', to='movie.Person'),
),
]
| [
"konrimarzec@gmail.com"
] | konrimarzec@gmail.com |
8a7e9cb212dececbf6cc856be93fa7b78077b778 | 846852ff2e6e5dd0ccd24f443f2825bf809be310 | /rds/aliyunsdkcms/request/v20170301/EnableAlarmRequest.py | 34bd31dad5b17bf8f2f5499c503c43ef4d00d6d8 | [] | no_license | iceflame999/aliyunmonitor | e8069725fb4679c97618c8b6216445950ed5e486 | 6de9950c6083d9ca7a861f1b7ba2a03a38f52153 | refs/heads/master | 2020-03-24T20:22:03.850691 | 2018-07-31T06:53:50 | 2018-07-31T06:53:50 | 142,974,346 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,286 | py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
class EnableAlarmRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'Cms', '2017-03-01', 'EnableAlarm','cms')
def get_callby_cms_owner(self):
return self.get_query_params().get('callby_cms_owner')
def set_callby_cms_owner(self,callby_cms_owner):
self.add_query_param('callby_cms_owner',callby_cms_owner)
def get_Id(self):
return self.get_query_params().get('Id')
def set_Id(self,Id):
self.add_query_param('Id',Id) | [
"hanyu@roobo.com"
] | hanyu@roobo.com |
d37c487f4b744517ed61737964d516663ef158fa | af435d574e0de422f321c906a4e5e514ed2fcf4b | /get_all_projects_form_gitlab.py | bedd93d571f708848b65c3f2866aa9dee00474a0 | [
"MIT"
] | permissive | liguobao/git-cloneall | 63c5a4a3095accfd822978cdbca641a53317c72a | 5767ec1c9e4ff7f074a5cd61497b47c2cda3f418 | refs/heads/main | 2023-04-19T17:02:38.021229 | 2021-05-08T05:53:31 | 2021-05-08T05:53:31 | 365,417,745 | 0 | 0 | MIT | 2021-05-08T04:24:02 | 2021-05-08T04:24:02 | null | UTF-8 | Python | false | false | 1,531 | py | import requests
from loguru import logger
## copy all gitlab headers
default_headers = {
"":""
}
gitlab_host = "https://git.sample.com"
def get_group_projects(group_name, page):
url = f"{gitlab_host}/groups/{group_name}/-/children.json?page={page}"
payload = {}
headers = default_headers
response = requests.request("GET", url, headers=headers, data=payload)
if response.status_code == 200:
logger.info(
f"get_group_projects successfully,group_name:{group_name},page:{page}")
return response.json()
logger.info(
f"get_group_projects fail,group_name:{group_name},page:{page},response.text:{response.text}")
return []
def get_all_group_projects(group_name):
page = 1
all_projects = []
projects = get_group_projects(group_name, page)
while projects:
all_projects = all_projects + projects
page = page + 1
projects = get_group_projects(group_name, page)
return all_projects
group_name = "xxxx"
all_projects = get_all_group_projects(group_name)
sub_group_names = [p["relative_path"]
for p in all_projects if p["type"] == "group"]
for sub_group in sub_group_names:
sub_group_projects = get_all_group_projects(sub_group)
all_projects = all_projects + sub_group_projects
project_urls = [
f"{gitlab_host}{p['relative_path']}" for p in all_projects if p["type"] == "project"]
with open("cloneall.txt", "w+") as fp:
for project_url in project_urls:
fp.write(project_url + "\r\n")
| [
"codelover@qq.com"
] | codelover@qq.com |
66f7467d7a2e896090798e85e65aed2155385b2d | b896fe5f0dc33a5f1270f2ccbf55678f7cd94658 | /BN_SGCC/Electrical+system.py | 388a8975d0e4b4226b46973f50d18122c24d655c | [] | no_license | qiwr007/SGCC | 6546b5305c5b2f2b166353022219b6455e8d4de2 | 82f918e1e19faca0a3c942193798e4c7f43375e6 | refs/heads/main | 2023-06-04T22:36:50.863096 | 2021-06-28T11:55:16 | 2021-06-28T11:55:16 | 381,013,708 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,233 | py | import pandas as pd
import os
import numpy as np
from sklearn.metrics import confusion_matrix
from sklearn.model_selection import train_test_split
# ๅฏผๅ
ฅๆด็ด ่ดๅถๆฏๅ็ฑปๅจ
from sklearn.naive_bayes import MultinomialNB
import matplotlib.pyplot as plt
# ๅฏ่งๅๆททๆท็ฉ้ต
def cm_plot(y, yp):
cm = confusion_matrix(y, yp)
plt.matshow(cm, cmap=plt.cm.Blues)
plt.colorbar()
for x in range(len(cm)):
for y in range(len(cm)):
plt.annotate(cm[x, y], xy=(y, x), horizontalalignment='center',
verticalalignment='center')
plt.ylabel('True label')
plt.xlabel('Predicted label')
return plt
if __name__ == '__main__':
# ๆฐๆฎ้ขๅค็
file_path = os.getcwd()
data = pd.read_csv(file_path+"/Elimination record.csv", encoding='gbk', engine='python')
# ็ฌฌไธ่กๅฐฑๆฏไธไธช็ๅฎ็ๆฐๆฎ๏ผ้่ฆๅ ไธไธไธชheader=None๏ผ่ฟ่พนไธ้่ฆ๏ผ็ดๆฅ็จ้ป่ฎค็ใ
data = data.drop('่ฎพๅคๅๅท', axis=1) # ๅฐA1้ฃไธๅๅ ้ค
data = data.drop('ๅฎ่ฃ
ๅฐ็น', axis=1) # ๅฐๅฎ่ฃ
ๅฐ็น้ฃไธๅๅ ้ค
data.head()
# ๅฏนๅๅงๆฐๆฎ้่ฟ่กๅๅ
X_whole = data.drop('ๆๅๆง่ดจ', axis=1) #fhjffj
y_whole = data['ๆๅๆง่ดจ']
# ๏ผๅขๅฉๆ ๏ผๅฐๆญคๅคไปฃ็ ่ฟ่กไบไฟฎๆญฃ๏ผ็ฎๅๅฏไปฅ็ดๆฅ็จๆๅญไฝไธบๅคฉๆฐๆ ็ญพ
# ๅฐๆฐๆฎๆฐๅผๅ๏ผไฝฟ็จmapๅฝๆฐ่ฟ่กๆ ๅฐใ
# ่ฟ่พนไธ็ฅ้ไธบไฝไธ็ดๆฅ้๏ผ็ดๆฅๅจEXCEL้้ขๆฟๆขไบ
# ไธ้ขๆฏๅๆฅๆ็ฎไฝฟ็จ็ไปฃ็
# data['ๆๅๆถ็ๅคฉๆฐ']
# data['ๆๅๆถ็ๅคฉๆฐ'].unique()
label_mapping = {'ๆด': 1, '้ท้ต้จ': 2, '้ด': 3, 'ๅคไบ': 4, 'ๅฐ้น': 5, 'ๆด้จ': 6, 'ๅฐ้จ': 7, '้พ': 8, '้จๅคน้ช': 9, 'ๅคง้จ': 10, 'ๅคง้ช': 11,'ๆฒๅฐๆด': 12}
data['ๆๅๆถ็ๅคฉๆฐ'] = data['ๆๅๆถ็ๅคฉๆฐ'].map(label_mapping)
X_whole['ๆๅๆถ็ๅคฉๆฐ'] = data['ๆๅๆถ็ๅคฉๆฐ']
"""
ๅๅๆฐๆฎ้
"""
# ๆต่ฏ้ๅ่ฎญ็ป้่ฟ่กๅๅ๏ผ่ฎญ็ป้8๏ผๆต่ฏ้2
x_train_w, x_test_w, y_train_w, y_test_w = train_test_split(X_whole, y_whole, test_size = 0.2, random_state = 0)
# ๅฎไพๅ่ดๅถๆฏๅ็ฑปๅจ๏ผๅญๅจไธไธชๅ้้้ข
classifier = MultinomialNB(alpha=1)
# alphaๆๅๆจกๅๆถ็ๅนณๆปๅบฆ๏ผ้ป่ฎคไธบ1,ๅฏ่ฟ่ก่ฐๆด
# ไผ ๅ
ฅ่ฎญ็ป้ๆฐๆฎ
classifier.fit(x_train_w, y_train_w) # ๅฐ่ฟๅฟๅฎๆไบๆด็ด ่ดๅถๆฏ็ฎๆณ็่ฎญ็ป
"""
่ฎญ็ป้้ขๆต๏ผไนๅฐฑๆฏๅฏน่ฎญ็ป้ๆต่ฏ
"""
# ็ปๅถ่ฎญ็ป้ๆททๆท็ฉ้ต
train_pred = classifier.predict(x_train_w)
cm_plot(y_train_w, train_pred) # ๆททๆท็ฉ้ต
"""
ๆต่ฏ้้ขๆต
"""
test_pred = classifier.predict(x_test_w)
cm_plot(y_test_w, test_pred) # ๆททๆท็ฉ้ต
'''
ๅฏนๅคๅ็ฑป้ฎ้ข็่ฏไปทๆๆ ใ
่ฟ่พน็จไบไธคไธช๏ผไธไธชๆฏkappa็ณปๆฐ๏ผๅฆไธไธชๆฏๆตทๆ่ท็ฆปใ
'''
# ็จkappa็ณปๆฐๆฅ่ฏไปทๆจกๅ๏ผๅๅผๅจ-1ๅฐ1ไน้ดใ
# ่ฟไธช็ณปๆฐ็ๅผ่ถ้ซ๏ผๅไปฃ่กจๆจกๅๅฎ็ฐ็ๅ็ฑปๅ็กฎๅบฆ่ถ้ซ
from sklearn.metrics import cohen_kappa_score
kappa = cohen_kappa_score(y_test_w, test_pred)
print(kappa)
# -0.20300751879699241
# ๆตทๆ่ท็ฆป,ๆตทๆ่ท็ฆปไน้็จไบๅคๅ็ฑป็้ฎ้ข๏ผ็ฎๅๆฅ่ฏดๅฐฑๆฏ่กก้้ขๆตๆ ็ญพไธ็ๅฎๆ ็ญพไน้ด็่ท็ฆป๏ผๅๅผๅจ0~1ไน้ดใ
# ่ท็ฆปไธบ0่ฏดๆ้ขๆต็ปๆไธ็ๅฎ็ปๆๅฎๅ
จ็ธๅ๏ผ่ท็ฆปไธบ1ๅฐฑ่ฏดๆๆจกๅไธๆไปฌๆณ่ฆ็็ปๆๅฎๅ
จๅฐฑๆฏ่้่้ฉฐใ
from sklearn.metrics import hamming_loss
ham_distance = hamming_loss(y_test_w, test_pred)
print(ham_distance)
# 0.625
'''
ไธ้ข่ๆไบไธไบๆฐๆฎ๏ผๆไปถpredict.csvๆไปถ๏ผ่ฟ่ก้ขๆต
'''
data_predict = pd.read_csv(file_path+"/predict.csv",encoding='gbk', engine='python')
data_predict = data_predict.drop('่ฎพๅคๅๅท', axis=1)#ๅฐID้ฃไธๅๅ ้ค
data_predict = data_predict.drop('ๅฎ่ฃ
ๅฐ็น', axis=1)
X_predict_1 = data_predict.drop('ๆๅๆง่ดจ', axis=1)
#ๅฐๆๅๆถ็ๅคฉๆฐๆฐๅผๅ
data_predict['ๆๅๆถ็ๅคฉๆฐ'] = data_predict['ๆๅๆถ็ๅคฉๆฐ'].map(label_mapping)
X_predict_1['ๆๅๆถ็ๅคฉๆฐ']= data_predict['ๆๅๆถ็ๅคฉๆฐ']
y_predict_1 = data_predict['ๆๅๆง่ดจ']
test_pred_1 = classifier.predict_proba(X_predict_1) #ไฟฎๆนไธบ้ขๆตๅๆๅ็ฑปๅ็ๆฆ็
label=[
'',
'่ๅ',
'็ญ่ทฏ',
'้ท็ตๅป็ฉฟ',
'่ฏๆฃๆญ่ฃ',
'ๅๆฝฎ',
'็ป็ผๅญๆญ่ฃ',
'็บฟๅๆๅ',
'ๅฅ็ฎก็ ด่ฃ',
'่ถ
่ด่ท่ฟ่ก',
'ๅทฅไฝ็ฏๅข่ฟ้ซ',
'ๆฅ่งฆ้ขๆฐงๅ่ฟ้',
'ไบๆฌกๅผ่ทฏ',
'็ตๅ่ฟ้ซ๏ผๅป็ฉฟ',
'็ต่งฃๆถฒๆผๆ',
'่ฟ็ตๅๅป็ฉฟ',
'็ป็ปๅป็ฉฟ',
'ๅจ่ฝๅผน็ฐงๅๅฝข',
]
i = 0
for row in test_pred_1:
i = i+1
print("็ฌฌ"+str(i)+"่ก",end='\t')
for index in np.argsort(-row): #ๅฐๆฐ็ปไปๅคงๅฐๅฐๆๅบๅนถไธ่ฟๅไธๆ
print(label[classifier.classes_[index]]+":"+str(row[index]*100)[:4]+ '%', end='\t')
print("\n", end="")
#print(test_pred_1)
| [
"noreply@github.com"
] | qiwr007.noreply@github.com |
ca059aa8c32a39ed214dc0199c72e92922850c57 | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p02677/s955369222.py | 6cf51b7ad9b75384f56164aff5faa203ac653ac3 | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 465 | py | """AtCoder."""
import math
a, b, h, m = [int(v) for v in input().split(' ')]
class Point:
def __init__(self, r, v):
self.r = r
self.w = (2 * math.pi) / v
def get_pos(self, t):
wt = self.w * t
return self.r * math.cos(wt), self.r * math.sin(wt)
p1 = Point(a, 12 * 60)
p2 = Point(b, 60)
minute = (h * 60) + m
x1, y1 = p1.get_pos(minute)
x2, y2 = p2.get_pos(minute)
print(math.sqrt(pow(x1 - x2, 2) + pow(y1 - y2, 2)))
| [
"66529651+Aastha2104@users.noreply.github.com"
] | 66529651+Aastha2104@users.noreply.github.com |
5a6df1e4a35b657a5b17fe0e968eb6b40bd67f03 | 35fb3ffea70c5c34e2eefd08d99e3eb64ee19230 | /Autocad1.py | 5e84d04593f1d72477ec2947ee018bdc14a77bbe | [] | no_license | aleksandrovfa/example | 910da8fd7d0156f7f8d8b83526d825cad5ad8374 | 6bd4d65ba21077689a0d5af803955d45df0cd131 | refs/heads/master | 2023-01-05T21:15:11.008584 | 2020-10-27T19:15:34 | 2020-10-27T19:15:34 | 307,803,283 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 41,033 | py | import math
import matplotlib.pyplot as plt
import networkx as nx
import numpy as np
import openpyxl
import pandas as pd
from openpyxl.styles import PatternFill, Border, Side, Alignment, Font
from termcolor import colored
# ะะฟะพัะฝัะน ัะทะตะป, ั ะบะพัะพัะพะณะพ ะฝะฐัะธะฝะฐะตััั ัะฐััะตั (ะธััะพัะฝะธะบ)
Support_node = 'ะะ-ะะ2'
# ะะฐะดะตะฝะธะต U ะธ ัะพะฟัะพัะธะฒะปะตะฝะธะต ะดะพ ะธััะพัะฝะธะบะฐ
dUfromTP = 0.019
ZfromTP = 0.005
cable_df = pd.DataFrame(np.array([['ะญะ_ะะพะทะดััะฝะฐั ะปะธะฝะธั', 'ะกะะ-2ะ-3x25+1x35', 25, 35, 13.1, 1.47, 1.05],
['ะญะ_ะะฐะฑะตะปัะฝะฐั ะปะธะฝะธั', 'ะะฒะะะฝะณ(ะ)-4ั
16', 16, 16, 21.9, 1.37, 1.37],
]),
columns=['Type', 'NameCable', 'Sf', 'So', 'alfa', 'Rf', 'Ro'])
bracket_df = pd.DataFrame(np.array([['ะ20-0,5-0,5-0-1', 1, 0],
['ะ21(90)-0,5-0,5-0-1', 2, 90],
['ะ21(135)-0,5-0,5-0-1', 2, 135],
['ะ21-0,5-0,5-0-1', 2, 180],
['ะ22-0,5-0,5-0-1', 3, 90]
]),
columns=['Type', 'Count', 'Angle'], dtype=str)
bracket_df.loc[:, ['Count', 'Angle']] = bracket_df.loc[:, ['Count', 'Angle']].astype('float')
Support_df = pd.DataFrame(np.array([['ะะะะะ-7,5 ะก3', 'ะคะ-0,325-3,0', 17000, 40000, 7.5],
['ะะะะะ-7,5 ะก1', 'ะคะ-0,325-3,0', 7500, 17000, 7.5],
['ะะะะะ-7,5 ะฃ1', 'ะคะ-0,219-2,2', 1, 7500, 7.5],
['ะะะะก-7,5', 'ะคะ-0,219-2,2', 0, 1, 7.5],
['ะะะะก-4', 'ะคะ-0,159-2,0', 0, 1, 4]
]),
columns=['Support_Type', 'Found_Type', 'min_F', 'max_F', 'height'], )
Support_df.loc[:, ['min_F', 'max_F', 'height']] = Support_df.loc[:, ['min_F', 'max_F', 'height']].astype('float')
# ะคัะฝะบัะธะธ ะดะปั ะฟะพััะฐะฟะฝะพะน ะพะฑัะฐะฑะพัะบะธ, ะฒ ัะตะปะพะผ ะฝะฐะทะฒะฐะฝะธั ะณะพะฒะพััั ัะฐะผะธ ะทะฐ ัะตะฑั.
# ะงะตััะฝะพ ัะบะฐะทะฐัั ะพัะพัะผะปะตะฝะธะต ะพัะตะฝั ั
ะตัะพะฒะตะฝัะบะพะต, ะฒะตัั ัะบัะธะฟั ััะฟะพ ะฝะฐะฟะธัะฐะฝะธะต ะพัะดะตะปัะฝัั
ััะฝะบัะธะน ะธ ะฟะพััะฐะฟะฝะฐั ะพะฑัะฐะฑะพัะบะฐ
def of_TXT_in_df(file):
file = file.read().rstrip().split('\n')
g = []
for i in file:
lst = i.split('\t')
g.append(lst)
file = pd.DataFrame(g)
return file
def points_line(df):
df = df.loc[:, 'COORDS':]
df.columns = range(df.shape[1])
for g in range(df.shape[1]):
for i in range(df.shape[0]):
if df.loc[i, g] is not None:
xy = df.loc[i, g].strip('(').strip(')').split()
df.loc[i, f'POINTx{g}'] = round(float(xy[1]), -3)
df.loc[i, f'POINTy{g}'] = round(float(xy[2]), -3)
else:
df.loc[i, f'POINTx{g}'] = None
df.loc[i, f'POINTy{g}'] = None
return df
def points_line1(line):
df = line.loc[:, 'COORDS':]
df.columns = range(df.shape[1])
for index, row in df.iterrows():
xy1 = df.loc[index, 0].strip('(').strip(')').split()
xy2 = df.loc[index, len(row.dropna()) - 1].strip('(').strip(')').split()
line.loc[index, 'POINTx1'] = round(float(xy1[1]), -3)
line.loc[index, 'POINTy1'] = round(float(xy1[2]), -3)
line.loc[index, 'POINTx2'] = round(float(xy2[1]), -3)
line.loc[index, 'POINTy2'] = round(float(xy2[2]), -3)
return line
def points(df):
for i in range(len(df)):
xy = df.loc[i, 'POINT'].strip('(').strip(')').split()
df.loc[i, 'POINTx'] = round(float(xy[0]), -3)
df.loc[i, 'POINTy'] = round(float(xy[1]), -3)
return df
def get_distance(unit1, unit2):
phi = abs(unit2 - unit1) % 360
sign = 1
# used to calculate sign
if not ((0 <= unit1 - unit2 <= 180) or
(-180 >= unit1 - unit2 >= -360)):
sign = -1
if phi > 180:
result = 360 - phi
else:
result = phi
return abs(int(round(result * sign / 5.0) * 5.0))
def light_in_support(Support, light):
print('____________________________________________________________________________________')
print(colored(' ะะฐัะฐะปะพ ะฟัะธะฒัะทะบะธ ัะฒะตัะธะปัะฝะธะบะพะฒ ะบ ะพะฟะพัะฐะผ!', 'yellow'))
number_light = 0
power_light = 0
for index, row in Support.iterrows():
ss = light[(light.POINTx == row.POINTx) & (light.POINTy == row.POINTy)]
power = ss.loc[:, ['POWER', 'Angle1']].sort_values(by=['POWER'], ascending=False)
Support.loc[index, 'Count'] = int(len(power))
Support.loc[index, 'Power'] = power.POWER.sum()
if len(power) < 2:
Support.loc[index, 'Angle'] = 0
elif len(power) == 2:
Support.loc[index, 'Angle'] = get_distance(power.Angle1[power.index[0]], power.Angle1[power.index[1]])
elif len(power) >= 3:
angle1 = get_distance(power.Angle1[power.index[0]], power.Angle1[power.index[1]])
angle2 = get_distance(power.Angle1[power.index[0]], power.Angle1[power.index[2]])
Support.loc[index, 'Angle'] = min(angle1, angle2)
light_lst = str('ะะพะปะฝะฐ ะะธะฝะธ LED ')
for p in range(len(power)):
Support.loc[index, f'light{p + 1}'] = power.POWER[power.index[p]]
light.loc[power.index[p], 'POWER1'] = power.POWER[power.index[p]]
power_light = power_light + power.POWER[power.index[p]]
number_light = number_light + 1
if len(light_lst) > 15:
light_lst = light_lst +str(',')
light_lst = light_lst + str(int(power.POWER[power.index[p]]))
light_lst = light_lst + 'ะั'
Support.loc[index, 'LIGHT'] = light_lst
light1 = light.isnull()
light1 = light1[light1.POWER1 == True]
light = light.loc[light1.index, ['POINTx', 'POINTy', 'POWER']]
print(light)
print(colored(f'ะะพะปะธัะตััะฒะพ ัะฒะตัะธะปัะฝะธะบะพะฒ {number_light}', 'yellow'))
print(colored(f'ะะพัะฝะพััั ะฒัะตั
ัะฒะตัะธะปัะฝะธะบะพะฒ {power_light}', 'yellow'))
print(colored(' ะะพะฝะตั ะฟัะธะฒัะทะบะธ ัะฒะตัะธะปัะฝะธะบะพะฒ ะบ ะพะฟะพัะฐะผ!', 'yellow'))
return Support
def cable_in_support(line, Support):
print('____________________________________________________________________________________')
print(colored(' ะะฐัะฐะปะพ ะฟัะธะฒัะทะบะธ ะบะฐะฑะตะปะตะน ะบ ะพะฟะพัะฐะผ ', 'magenta'))
for index, row in line.iterrows():
try:
a = Support[(Support.POINTx == row.POINTx1) & (Support.POINTy == row.POINTy1)].N_PYLON
b = Support[(Support.POINTx == row.POINTx2) & (Support.POINTy == row.POINTy2)].N_PYLON
line.loc[index, 'N_PYLON1'] = str(a[a.index[0]])
line.loc[index, 'N_PYLON2'] = str(b[b.index[0]])
except:
print('ะะต ะฝะฐะนะดะตะฝะฐ ะพะฟะพัะฐ ะฒ ะปะธะฝะธะธ ะผะตะถะดั ', line.loc[index, 'N_PYLON1'], line.loc[index, 'N_PYLON2'])
# print('ะัะธะฒัะทะบะฐ ะปะธะฝะธะธ ะดะปะธะฝะพะน', line.loc[index, 'LENGTH'], 'ัะปะพั', line.loc[index, 'LAYER'], 'ะผะตะถะดั',
# line.loc[index, 'N_PYLON1'], line.loc[index, 'N_PYLON2'])
line['LENGTH'] = line['LENGTH'].astype('float')
print(colored(' ะะพะฝะตั ะฟัะธะฒัะทะบะธ ะบะฐะฑะตะปะตะน ะบ ะพะฟะพัะฐะผ ', 'magenta'))
return line
def draw_graph(G):
plt.figure()
pos = nx.kamada_kawai_layout(G)
edges = G.edges()
colors = [G[u][v][0]['LAYER'] for u, v in edges]
color = []
for i in colors:
if i == 'ะญะ_ะะฐะฑะตะปัะฝะฐั ะปะธะฝะธั':
color.append('g')
if i == 'ะญะ_ะะพะทะดััะฝะฐั ะปะธะฝะธั':
color.append('r')
nx.draw(G, pos, edge_color=color, with_labels=True)
plt.show(block=False)
# print(nx.dfs_successors(G,"N"))
# print(nx.dfs_predecessors(G,"N"))
# print(G["N"])
def draw_graph1(Ga, Support):
Support = Support.set_index('N_PYLON')
plt.figure()
pos = nx.kamada_kawai_layout(Ga)
nx.draw_networkx_nodes(Ga, pos, node_color='b', node_size=150, alpha=0.2, label=True)
nx.draw_networkx_labels(Ga, pos, font_size=10)
ax = plt.gca()
for e in Ga.edges:
color = nx.get_edge_attributes(Ga, 'LAYER')
if color[e] == 'ะญะ_ะะฐะฑะตะปัะฝะฐั ะปะธะฝะธั':
color = 'g'
elif color[e] == 'ะญะ_ะะพะทะดััะฝะฐั ะปะธะฝะธั':
color = 'r'
ax.annotate("",
xy=pos[e[0]], xycoords='data',
xytext=pos[e[1]], textcoords='data',
arrowprops=dict(arrowstyle="->", color=color,
shrinkA=5, shrinkB=5,
patchA=None, patchB=None,
connectionstyle="arc3,rad=rrr".replace('rrr', str(0.2 * e[2])
),
),
)
ax.text(pos[e[1]][0] - 0.03, pos[e[1]][1] - 0.04, Support.loc[e[1], 'GROUP'],
fontsize=7)
plt.axis('off')
plt.show()
plt.show()
def support_selection(G, Support):
print('____________________________________________________________________________________')
print(colored(' ะะฐัะฐะปะพ ะฟะพะดะฑะพัะฐ ะพะฟะพั ะบัะพะฝััะตะนะฝะพะฒ ะธ ััะฝะดะฐะผะตะฝัะพะฒ', 'blue'))
'''ะ ะดะฐะฝะฝะพะน ััะฝะบัะธะธ ะฟัะพะธัั
ะพะดะธั ะฒะตะบัะพัะฝะพะต ัะปะพะถะตะฝะธะต ะฒัะตั
ะฒะพะทะดััะฝัั
ะปะธะฝะธะน ะฟะพะบะปััะตะฝะฝัั
ะบ ะพะฟะพัะต,
ะะท ะณัะฐัะฐ ะฒัะณััะถะฐัััั ะฒัะต ะฒะพะทะดััะฝัะต ะปะธะฝะธะธ ะพะฟะพัั ะธ ะฟะพัะปะต ะฟัะพะธัั
ะพะดะธั ัะปะพะถะตะฝะธะต ะธั
ะฟะพะปะพะฒะธะฝะพะบ.
ะัะธ ััะพะผ ะพะฑัะฐะทัะตััั coefficient ะบะพัะพััะน ั
ะฐัะฐะบัะธัะธะทัะตั ะฝะฐััะถะตะฝะธะต ะพะฟะพัั.
ะกะดะตะปะฐะป ัะฐะบ ะฟัะพััะพ ะฟะพัะพะผั ััะพ ัะปะธัะบะพะผ ะดะพะปะณะพ ะธะทััะฐัั ะบะฐะบ ะฟัะฐะฒะธะปัะฝะพ ััะธัะฐะตััั ะฝะฐะณััะทะบะฐ,
ะธ ะฝะฐะดะพ ะฟะพะฑััััะตะต ะดะพะดะตะปะฐัั ะฟัะพัะพัะธะฟ'''
Support = Support.set_index('N_PYLON')
for N_PYLON in Support.index:
weight_x, weight_y = 0, 0
coefficient = 0
x1 = int(Support[Support.index == N_PYLON].POINTx)
y1 = int(Support[Support.index == N_PYLON].POINTy)
in_out = []
for CN_PYLON in G[N_PYLON]:
for num in G[N_PYLON][CN_PYLON]:
in_out.append(G[N_PYLON][CN_PYLON][num]['LAYER'])
if G[N_PYLON][CN_PYLON][num]['LAYER'] == 'ะญะ_ะะพะทะดััะฝะฐั ะปะธะฝะธั':
x2 = int(Support[Support.index == CN_PYLON].POINTx)
y2 = int(Support[Support.index == CN_PYLON].POINTy)
delt_x = (x2 - x1) / 2
delt_y = (y2 - y1) / 2
weight_x = weight_x + delt_x
weight_y = weight_y + delt_y
coefficient = math.sqrt((weight_x ** 2) + (weight_y ** 2))
'''ะกะปะตะดัััะตะต ััะปะพะฒะธะต ะฝะตะพะฑั
ะพะดะธะผะพ ะดะปั ัะพะณะพ ััะพะฑั ะฒะพะทะดััะฝะฐั ะพะฟะพัะฐ ั ะฝะฐััะถะตะฝะธะตะผ 0
ะฝะต ััะธััะฒะฐะปะฐัั ะบะฐะบ ะบะฐะฑะตะปัะฝะฐั.ะะพััะพะผั ะตะผั ะฟัะพััะพ ะฟัะธัะฒะฐะธะฒะฐะตััั 7.
ะะฝะฐั ััะพ ััะฟะพ ะฝะพ ะทะฐัะพ ะพัะตะฝั ะฟัะพััะพ ะธ ะปะธัะฐะตั ะผะฝะพะณะธั
ะฟัะพะฑะปะตะผ.
ะัะผะฐะป ัะพะทะดะฐัั ะบะฐะบะพะน ะฝะธะฑัะดั ะฟะฐัะฐะผะตัั ะบะพัะพััะน ะทะฐ ััะพ ะพัะฒะตัะฐะตั,
ะฝะพ ัะฐะบ ะผะฝะต ะบะฐะถะตััั ะฟัะพัะต'''
if coefficient == 0 and ('ะญะ_ะะพะทะดััะฝะฐั ะปะธะฝะธั' in in_out):
coefficient = 7
Support.loc[N_PYLON, 'PYLON'] = coefficient
'''ะกะพะฑััะฒะตะฝะฝะพ ัะฐะผะธ ััะปะพะฒะธั ะฒัะฑะพัะฐ ัะธะฟะฐ ะพะฟะพัั, ะพะฝะธ ะพัะตะฝั ััะปะพะฒะฝัะต ะธ ะฝะธ ะฝะฐ ััะพ ะฝะต ะฒะปะธััั.
ะกะดะตะปะฐะฝะพ ัะบะพัะตะต ะดะปั ะฝะฐะณะปัะดะฝะพััะธ ะธ ะฟัะพะฒะตัะบะธ'''
if coefficient == 0:
Support.loc[N_PYLON, 'PYLON_Type'] = "ะะฑััะฝะฐั"
elif 0 < coefficient < 7500:
Support.loc[N_PYLON, 'PYLON_Type'] = "ะัะพั
ะพะดะฝะฐั"
elif 7500 <= coefficient < 17000:
Support.loc[N_PYLON, 'PYLON_Type'] = "ะะพะฝัะตะฒะฐั"
elif 17000 <= coefficient:
Support.loc[N_PYLON, 'PYLON_Type'] = "ะฃะณะปะพะฒะฐั"
Support = Support.reset_index()
'''ะัะพัะฐั ัะฐััั ะฒ ะบะพัะพัะพะน ัะถะต ะบะพะฝะบัะตัะฝะพ ะฟะพะดะฑะธัะฐัััั ะบัะพะฝััะตะนะฝั ะพะฟะพัั ะธ ััะฝะดะฐะผะตะฝัั.
ะะฐะดะพ ะฑั ะตะต ัะพะตะดะธะฝะธัั ั ะฒะตัั
ะฝะตะน ัะฐัััั ะบะพะดะฐ, ะฝะพ ะบะฐะบ ะฝะธะฑัะดั ะฟะพัะพะผ'''
for index, row in Support.iterrows():
try:
Support.loc[index, 'BRACKET'] = bracket_df[(bracket_df.Count == Support.loc[index, 'Count']) &
(bracket_df.Angle == Support.loc[index, 'Angle'])].Type.values
except:
print('ะฝะต ะฝะฐะนะดะตะฝ ะบัะพะฝััะตะนะฝ ะดะปั ะพะฟะพัั', Support.loc[index, 'N_PYLON'], 'c', Support.loc[index, 'Count'],
'ัะฒะตัะธะปัะฝะธะบะพะผ(ะฐะผะธ) ะธ ัะณะปะพะผ', Support.loc[index, 'Angle'])
try:
a = Support_df[(Support_df.min_F <= row.PYLON) &
(Support_df.max_F > row.PYLON) & (row.HEIGHT == Support_df.height)].loc[:,
['Support_Type', 'Found_Type']]
Support.loc[index, 'SUPPORT_TYPE'] = a.Support_Type.values
Support.loc[index, 'FOUND_TYPE'] = a.Found_Type.values
except:
print('ะฝะต ะฝะฐะนะดะตะฝะฐ ะพะฟะพัะฐ ะดะปั ', Support.loc[index, 'N_PYLON'])
print(colored(' ะะพะฝะตั ะฟะพะดะฑะพัะฐ ะพะฟะพั ะบัะพะฝััะตะนะฝะพะฒ ะธ ััะฝะดะฐะผะตะฝัะพะฒ', 'blue'))
return Support
def support_set_power(G, Support):
Support = Support.set_index('N_PYLON')
tree = nx.dfs_successors(G, Support_node)
for GROUP in Support['GROUP'].unique():
Support[f'{GROUP}'] = Support[Support.GROUP == GROUP].Power
Support[f'{GROUP}'] = Support[f'{GROUP}'].fillna(0)
for i in reversed(list(tree.keys())):
for g in tree[i]:
Support.loc[i, f'{GROUP}'] = Support.loc[i, f'{GROUP}'] + Support.loc[g, f'{GROUP}']
Support = Support.reset_index()
return Support
def cable_list(G, Support, Support_node):
""" ะกะพะทะดะฐะตััั df ะธะท ะฟัะฐะฒะธะปัะฝะพ ะฝะฐะฟัะฐะฒะปะตะฝะฝัั
ัะตะฑะตั ะณัะฐัะฐ ะธ ะฟะพ ะฟะพััะดะบั ะฐะปะณะพัะธัะผะฐ ะฟะพะธัะบะฐ ะฒ ะณะปัะฑะธะฝั """
Support = Support.set_index('N_PYLON')
cable_list = list(nx.dfs_edges(G, Support_node))
cable_list = pd.DataFrame(data=cable_list)
cable_list = cable_list.rename(columns={0: 'N_PYLON1', 1: 'N_PYLON2'})
''' ะัะธัะฒะฐะธะฒะฐัััั ะฟะฐัะฐะผะตััั ะธะท df ะฟะพ ะปะธะฝะธัะผ(Line),
ัะฐะบ ะถะต ะฒะพ ะฒัะพัะพะผ ัะธะบะปะต for ะฟัะธัะฒะฐะธะฒะฐัััั ะฒัะต ะผะพัะฝะพััะธ ะฟะพ ะณััะฟะฟะฐะผ ะบะพัะพััะต ะฑัะปะธ ะฒ ัะฐะฑะปะธัะต Support'''
for index, row in cable_list.iterrows():
Line_row = Line[((Line.N_PYLON1 == row['N_PYLON1']) & (Line.N_PYLON2 == row['N_PYLON2'])) |
((Line.N_PYLON1 == row['N_PYLON2']) & (Line.N_PYLON2 == row['N_PYLON1']))]
cable_list.loc[index, 'LENGTH'] = Line_row.LENGTH.values
cable_list.loc[index, 'LAYER'] = Line_row.LAYER.values
cable_list.loc[index, '//HANDLE'] = Line_row['//HANDLE'].values
for GROUP in Support['GROUP'].unique():
cable_list.loc[index, f'{GROUP}'] = Support.loc[row['N_PYLON2'], GROUP]
'''ะะดะตัั ะดะพะฑะฐะฒะปัะตััั ะฟะพ ะฒััะพัะต ะพะฟะพัั ะบ ะบะฐะฑะตะปัะฝะพะน ะปะธะฝะธะธะธ ะดะปั ัะพะณะพ ััะพะฑั ััะตััั ะฟะตัะตั
ะพะด ะทะตะผะปั ะฒะพะทะดัั
ะะตัะฒัะผ ััะปะพะฒะธะตะผ ัะพะทะดะฐัััั ะดะฒะฐ ัะฟะธัะบะฐ ะธะท ัะธะฟะฐ ะฟัะธั
ะพะดััะธั
ะธ ะพัั
ะพะดััะธั
ะปะธะฝะธะน.
ะัะพััะผ ััะปะพะฒะธะตะผ ะพะฝะธ ััะฐะฒะฝะตะฝะธะฒะฐัััั ะธ ะฟัะธัะฒะฐะธะฒะฐัััั ะฝัะถะฝัะต ะทะฝะฐัะตะฝะธั '''
cable_list['ADD'] = float(1)
for index, row in cable_list.iterrows():
if row.LAYER == 'ะญะ_ะะฐะฑะตะปัะฝะฐั ะปะธะฝะธั':
a = list(cable_list[cable_list.N_PYLON2 == row.N_PYLON1].LAYER.unique())
b = list(cable_list[cable_list.N_PYLON1 == row.N_PYLON2].LAYER.unique())
if a != [row.LAYER] and len(a) > 0:
cable_list.loc[index, 'ADD'] = cable_list.loc[index, 'ADD'] + float(
Support.loc[row['N_PYLON1'], 'HEIGHT'])
if b != [row.LAYER] and len(b) > 0:
cable_list.loc[index, 'ADD'] = cable_list.loc[index, 'ADD'] + float(
Support.loc[row['N_PYLON2'], 'HEIGHT'])
''' ะะท ะพะดะฝะพะณะพ ะบะฐะฑะตะปัะฝะพะณะพ ะถััะฝะฐะปะฐ ะฟะพ ะฒัะตะผ ะณััะฟะฟะฐะผ ัะพัะผะธััะตััั ะฝะตัะบะพะปัะบะพ ะดะปั ะบะฐะถะดะพะน ะณััะฟะฟั ะธ
ะธ ัะพะตะดะธะฝััััั ะฒ ะพะดะธะฝ '''
cable_list_new = pd.DataFrame()
for GROUP in Support['GROUP'].unique():
df = cable_list[cable_list[f'{GROUP}'] != 0].loc[:,
('//HANDLE', 'N_PYLON1', 'N_PYLON2', 'LENGTH', 'LAYER', 'ADD', f'{GROUP}')]
df = df.rename(columns={f'{GROUP}': 'POWER'})
df['GROUP'] = GROUP
cable_list_new = pd.concat([cable_list_new, df])
return cable_list_new
def calculation_of_voltage_drop_and_short_circuit_currents(dU_Tkz):
####################### ะ ะะกะงะะข ะะะะะะะฏ ะะะะ ะฏะะะะะฏ ะ ะขะะ ##############################################
""" ะ ะฐััะตั dU ะธ Z_area ะดะปั ะบะฐะถะดะพะณะพ ััะฐััะบะฐ ะฒ ะทะฐะฒะธัะธะผะพััะธ ะพั ัะธะฟะฐ ะบะฐะฑะตะปั """
dU_Tkz = dU_Tkz.reset_index(drop=True)
dU_Tkz.LENGTH = dU_Tkz.LENGTH / 1000
for index, row in dU_Tkz.iterrows():
dU_Tkz.loc[index, 'dU_area'] = float(cable_df[cable_df['Type'] == row['LAYER']].alfa) * \
row['POWER'] / 1000 * row['LENGTH'] / (10 ** 3) / \
float(cable_df[cable_df['Type'] == row['LAYER']].Sf)
dU_Tkz.loc[index, 'Z_area'] = (float(cable_df[cable_df['Type'] == row['LAYER']].Ro) +
float(cable_df[cable_df['Type'] == row['LAYER']].Rf)) * \
row['LENGTH'] / 1000
''' ะะตัะตะฟะธััะฒะฐัััั ะทะฝะฐัะตะฝะธั ะฒ ะตัะต ะพะดะธะฝ ััะพะปะฑะธะบ ะดะปั ัะฐััะตัะฐ ะพะฑัะธั
ะฟะฐัะฐะผะตััะพะฒ '''
dU_Tkz.loc[:, 'dU'] = dU_Tkz.loc[:, 'dU_area']
dU_Tkz.loc[:, 'Z'] = dU_Tkz.loc[:, 'Z_area']
''' ะะพะฑะฐะฒะปะตะฝะธะต ะฟะตัะฒะพะฝะฐัะฐะปัะฝัั
ะทะฝะฐัะตะฝะธะน dU ะธ Z ะดะพ ะฟะตัะฒะพะณะพ ัะทะปะฐ(ัะฐััะธััะฒะฐัััั ะฒัััะฝัั) '''
for index, row in dU_Tkz[(dU_Tkz.N_PYLON1 == Support_node)].iterrows():
dU_Tkz.loc[index, 'dU'] = dU_Tkz.loc[index, 'dU'] + dUfromTP
dU_Tkz.loc[index, 'Z'] = dU_Tkz.loc[index, 'Z'] + ZfromTP
''' ะกะพะทะดะฐะฝะธั ัะฟะธัะบะฐ ัะตะฑะตั ะฟะพ ะฐะปะณะพัะธัะผั ะฟะพะธัะบะฐ ะฒ ะณะปัะฑะธะฝั ะพั ะพะฟะพัะฝะพะณะพ ัะทะปะฐ
ะะพัะปะตะดะพะฒะฐัะตะปัะฝัะน ะฟะพะธัะบ ัะตะฑะตั ะธะท ะบะฐะฑะตะปัะฝะพะณะพ ะถััะฝะฐะปะฐ ะธ ะฟะพัะปะตะดะพะฒะฐัะตะปัะฝะพะต ัะปะพะถะตะฝะธะต dU ะธ dZ
ะ ะฟัะธัะฒะพะตะฝะธะต ะฟะพะปััะธะฒัะตั
ัั ะฟะฐัะฐะผะตััะพะฒ '''
tree = list(nx.dfs_edges(G, Support_node))
for gr in dU_Tkz['GROUP'].unique():
for su in tree:
cable_next = dU_Tkz[(dU_Tkz.N_PYLON1 == su[1]) &
(dU_Tkz.GROUP == gr)]
cable_now = dU_Tkz[(dU_Tkz.N_PYLON1 == su[0]) &
(dU_Tkz.N_PYLON2 == su[1]) &
(dU_Tkz.GROUP == gr)]
if len(cable_next.index.values) > 0:
for index, row in cable_next.iterrows():
dU_Tkz.loc[index, 'dU'] = dU_Tkz.loc[index, 'dU'] + cable_now.dU.values
dU_Tkz.loc[index, 'Z'] = dU_Tkz.loc[index, 'Z'] + cable_now.Z.values
''' ัะฐััะตั ะขะะ '''
dU_Tkz.loc[:, 'Ikz'] = 0.22 / dU_Tkz.loc[:, 'Z'] * 1000
''' ะะฐะผะตะฝะฐ ัะปะพั ะฝะฐ ะบะฐะฑะตะปั '''
for index, row in dU_Tkz.iterrows():
dU_Tkz.loc[index, 'LAYER'] = cable_df[cable_df['Type'] == row['LAYER']].NameCable.values
''' ะกะพััะธัะพะฒะบะฐ ะฟะพ ะณััะฟะฟะฐะผ ะฒ ะฟะพััะดะบะต ะฒะพะทัะฐััะฐะฝะธั ะณััะฟะฟ ะธ ะฟัะธัะฒะพะตะฝะธะต ะธะฝะดะตะบัะฐ '''
dU_Tkz = dU_Tkz.reset_index()
dU_Tkz = dU_Tkz.sort_values(by=['GROUP', 'index'])
dU_Tkz = dU_Tkz.reset_index(drop=True)
dU_Tkz = dU_Tkz.drop(columns='index')
''' ะะบััะณะปะตะฝะธะต'''
dU_Tkz.loc[:, 'dU_area':'Ikz'] = dU_Tkz.loc[:, 'dU_area':'Ikz'].round(4)
dU_Tkz.loc[:, 'LENGTH'] = dU_Tkz.loc[:, 'LENGTH'].round(1)
''' ะคะพัะผะธัะพะฒะฐะฝะธะต ะบะฐะฑะตะปัะฝะพะณะพ ะถััะฝะฐะปะฐ '''
cable_list = dU_Tkz.loc[:, ['//HANDLE', 'GROUP', 'N_PYLON1', 'N_PYLON2', 'LAYER', 'LENGTH', 'ADD']]
cable_ground = cable_df[cable_df.Type == 'ะญะ_ะะฐะฑะตะปัะฝะฐั ะปะธะฝะธั'].NameCable.iloc[0]
cable_air = cable_df[cable_df.Type == 'ะญะ_ะะพะทะดััะฝะฐั ะปะธะฝะธั'].NameCable.iloc[0]
cable_list['GROUND'] = cable_list[cable_list.LAYER == cable_ground].LENGTH
cable_list['AIR'] = cable_list[cable_list.LAYER == cable_air].LENGTH
cable_list['GROUND'] = cable_list['GROUND'].fillna(0)
cable_list['AIR'] = cable_list['AIR'].fillna(0)
cable_list['SUMM'] = cable_list['GROUND'] + cable_list['AIR'] + cable_list['ADD']
return cable_list, dU_Tkz
def crossing(line, Tube, cable_list):
print('____________________________________________________________________________________')
print(colored(' ะะฐัะฐะปะพ ะฟัะธะฒัะทะบะธ ะฟะตัะตัะตัะตะบ ะบ ะปะธะฝะธัะผ', 'grey'))
line = line.set_index('//HANDLE')
line = line.loc[:, 'COORDS':]
line.columns = range(line.shape[1])
print(' ะขะฐะฑะปะธัะฐ ะดัะฑะปะธะบะฐัะพะฒ ัะฝะธะบะฐะปัะฝัั
ะฝะพะผะตัะพะฒ ะฟะตัะตัะตัะตะบ')
print(Tube[Tube.duplicated(subset=['N_CROSSING'], keep=False)])
cable_list.loc[:, 'CROSSING'] = ''
cable_list['CROSSING'] = cable_list['CROSSING'].astype('str')
unique_cross = list(Tube.N_CROSSING.sort_values().unique())
print('ะะฐะทะฒะฐะฝะธะต ัะฝะธะบะฐะปัะฝัั
ะฟะตัะตัะตัะตะบ:', unique_cross)
duplicate_cross = []
for unique_tube in Tube.WIDTH.unique():
cable_list[f'TUBE{unique_tube}'] = 0
for itube, rowtube in Tube.iterrows():
xy = rowtube.POINT.strip('(').strip(')').split()
x = float(xy[0])
y = float(xy[1])
for iline, rowline in line.iterrows():
for i in range(len(rowline.dropna()) - 1):
x1y1 = rowline[i].strip('(').strip(')').split()
x1 = float(x1y1[1])
y1 = float(x1y1[2])
x2y2 = rowline[i + 1].strip('(').strip(')').split()
x2 = float(x2y2[1])
y2 = float(x2y2[2])
col = round(float(((x - x1) * (y2 - y1)) - ((y - y1) * (x2 - x1))), -7)
if (col == 0) & ((x1 <= x <= x2) | (x2 <= x <= x1)) \
& ((y1 <= y <= y2) | (y2 <= y <= y1)):
N_PYLON1 = cable_list[cable_list['//HANDLE'] == iline].N_PYLON1.values[0]
N_PYLON2 = cable_list[cable_list['//HANDLE'] == iline].N_PYLON2.values[0]
number = len(cable_list[cable_list['//HANDLE'] == iline].loc[:, ['N_PYLON1', 'N_PYLON2']])
print('ะะปั ะฟะตัะตัะตัะบะธ ะฝะพะผะตั ', rowtube.N_CROSSING,
'ะฝะฐะนะดะตะฝะพ', number, 'ะบะฐะฑะตะปั ะผะตะถะดั', N_PYLON1, N_PYLON2)
index = cable_list[cable_list['//HANDLE'] == iline].index
cable_list.loc[index, 'CROSSING'] = \
cable_list.loc[index, 'CROSSING'] + '(' + rowtube.N_CROSSING + ')'
cable_list.loc[index, f'TUBE{rowtube.WIDTH}'] = \
cable_list.loc[index, f'TUBE{rowtube.WIDTH}'] + float(rowtube.LENGTH) / 1000
unique_cross.remove(rowtube.N_CROSSING)
print('ะะตัะตัะตัะบะธ ะฝะต ะฟัะธะฒัะทะฐะฝะฝัะต ะบ ะบะฐะฑะตะปัะผ', unique_cross)
print(colored(' ะะพะฝะตั ะฟัะธะฒัะทะบะธ ะฟะตัะตัะตัะตะบ ะบ ะปะธะฝะธัะผ', 'grey'))
return cable_list
def dU_Tkz_decor(dU_Tkz):
dU_Tkz_of = dU_Tkz.loc[:,
['GROUP', 'N_PYLON1', 'N_PYLON2', 'LENGTH', 'POWER', 'LAYER', 'dU_area', 'dU', 'Z_area', 'Z', 'Ikz']]
dU_Tkz_of = dU_Tkz_of.rename(columns={'N_PYLON1': 'ะะฐัะฐะปะพ\n ััะฐััะบะฐ',
'N_PYLON2': 'ะะพะฝะตั\n ััะฐััะบะฐ',
'LENGTH': 'ะะปะธะฝะฐ\nััะฐััะบะฐ,ะผ',
'LAYER': 'ะะฐัะบะฐ ะฟัะพะฒะพะดะฐ',
'POWER': 'ะ ั ะฝะฐ\nััะฐััะบะต,\n ะั',
'GROUP': 'ะััะฟะฟะฐ',
'dU_area': 'โU% ะฝะฐ\nัั.,ะ',
'Z_area': 'Z ะฟะตัะปะธ\nััะฐััะบะฐ, ะะผ',
'dU': 'โU%,\nะพั ะขะ,ะ',
'Z': 'Z ะฟะตัะปะธ\nะดะพ ะขะ, ะะผ',
'Ikz': 'Iะบะท 1,\n ะ', })
dU_Tkz_of.to_excel('dU ะธ ะขะะ.xlsx', sheet_name='0')
wb = openpyxl.load_workbook('dU ะธ ะขะะ.xlsx')
ws = wb.active
ws.column_dimensions['B'].width = 10
ws.column_dimensions['C'].width = 10
ws.column_dimensions['D'].width = 10
ws.column_dimensions['E'].width = 10
ws.column_dimensions['F'].width = 10
ws.column_dimensions['G'].width = 20
ws.column_dimensions['H'].width = 9
ws.column_dimensions['I'].width = 9
ws.column_dimensions['j'].width = 9
ws.column_dimensions['K'].width = 9
ws.column_dimensions['L'].width = 9
# ะััะฐะฒะฝะธะฒะฐะฝะธะต
alignment = Alignment(horizontal='center',
vertical='center',
wrap_text=True)
# ะจัะธัั
font = Font(name='Calibri', italic=True, )
# ะะฐะปะธะฒะบะฐ ัะตัะฝัั
ะณััะฟะฟ
patternfill1 = PatternFill(fill_type='solid',
start_color='00CCFFFF',
end_color='00CC99FF')
# ะะฐะปะธะฒะบะฐ ะฝะตัะตัะฝัั
ะณััะฟะฟ
patternfill2 = PatternFill(fill_type='solid',
start_color='00FFCC99',
end_color='00FFCC99')
# ะะฐะปะธะฒะบะฐ ะบัะฐัะฝัะผ
patternfill3 = PatternFill(fill_type='solid',
start_color='00FF7171',
end_color='00FF7171')
# ะัะฐะฝะธัะฐ โ1 ะดะปั ะพะฑััะฝัั
ัััะพะบ
border1 = Border(left=Side(border_style='thin', color='FF000000'),
right=Side(border_style='thin', color='FF000000'),
top=Side(border_style='thin', color='FF000000'),
bottom=Side(border_style='thin', color='FF000000'))
# ะัะฐะฝะธัะฐ โ2 ะดะปั ะบะพะฝะตัะฝัั
ัััะพะบ
border2 = Border(left=Side(border_style='thin', color='FF000000'),
right=Side(border_style='thin', color='FF000000'),
top=Side(border_style='thin', color='FF000000'),
bottom=Side(border_style='thick', color='FF000000'))
col = ws['B1':'L1']
for cells in col:
for cell in cells:
cell.alignment = alignment
cell.font = font
gr = str()
score = 0
for i in range(2, ws.max_row + 1):
if gr != ws.cell(row=i, column=2).value:
gr = ws.cell(row=i, column=2).value
score += 1
if ws.cell(row=i, column=12).value == dU_Tkz.Ikz.min():
patternfill = patternfill3
elif score % 2 == 1:
patternfill = patternfill1
elif score % 2 != 1:
patternfill = patternfill2
col = ws[f'B{i}':f'L{i}']
if ws.cell(row=i, column=4).value == ws.cell(row=i + 1, column=3).value:
for cells in col:
for cell in cells:
cell.alignment = alignment
cell.font = font
cell.fill = patternfill
cell.border = border1
else:
for cells in col:
for cell in cells:
cell.alignment = alignment
cell.font = font
cell.fill = patternfill
cell.border = border2
wb.save("dU ะธ ะขะะ.xlsx")
def cable_list_decor(cable_list):
list_name = ['GROUP', 'N_PYLON1', 'N_PYLON2', 'LAYER', 'LENGTH', 'AIR', 'GROUND', 'ADD', 'SUMM']
dict_name = {'GROUP': 'ะััะฟะฟะฐ',
'N_PYLON1': 'ะะฐัะฐะปะพ',
'N_PYLON2': 'ะะพะฝะตั',
'LAYER': 'ะะฐัะบะฐ ะฟัะพะฒะพะดะฐ',
'LENGTH': 'ะะปะธะฝะฐ ,ะผ',
'POWER': 'ะ ั ะฝะฐ\nััะฐััะบะต,\n ะั',
'AIR': 'ะะพ ะฒะพะทะดัั
ั',
'GROUND': 'ะ ััะฐะฝัะตะธ\n (ะฒ ะณะพัั. ัั.D50ะผะผ)',
'ADD': 'ะ ัะธัะต/ะพะฟะพัะต.\nะะฐ ัะฐะทะดะตะปะบั/ะฟัะพะฒะธั.',
'SUMM': 'ะัะพะณะพ ะบะฐะฑะตะปั'}
if blocks[(blocks.loc[:, 1] == 'ะขััะฑะฐ ั ัะฐะทะผะตัะพะผ')].shape[0] > 0:
list_name.append('CROSSING')
dict_name.update({'CROSSING': 'ะะพะผะตั\nะฟะตัะตัะตัะตะฝะธั'})
for i in tube.WIDTH.unique():
list_name.append(f'TUBE{i}')
dict_name.update({f'TUBE{i}': f'ะ ััะฐะฝัะตะธ\n(ะฒ ะะะ D{i}ะผะผ)'})
cable_list_of = cable_list.loc[:, list_name]
cable_list_of = cable_list_of.rename(columns=dict_name)
cable_list_of.to_excel('ะะฐะฑะตะปัะฝัะน ะถััะฝะฐะป.xlsx', sheet_name='0')
wb = openpyxl.load_workbook('ะะฐะฑะตะปัะฝัะน ะถััะฝะฐะป.xlsx')
ws = wb.active
# ะััะฐะฒะฝะธะฒะฐะฝะธะต ะธ ัะฐะทะฒะพัะฐัะธะฒะฐะฝะธะต ะฟะตัะฒะพะน ัััะพะบะธ
alignment = Alignment(horizontal='center',
vertical='center',
text_rotation=90,
wrap_text=True)
# ะััะฐะฒะฝะธะฒะฐะฝะธะต
alignment1 = Alignment(horizontal='center',
vertical='center',
wrap_text=True)
# ะจัะธัั
font1 = Font(name='Calibri', italic=True, )
# ะจัะธัั ะถะธัะฝัะน
font2 = Font(name='Calibri', italic=True, bold=True, )
# ะะฐะปะธะฒะบะฐ ัะตัะฝัั
ะณััะฟะฟ ะบะฐะฑะตะปัะฝะฐั ะปะธะฝะธั
patternfill_1gr = PatternFill(fill_type='solid',
start_color='00CCFFFF',
end_color='00CCFFFF')
# ะะฐะปะธะฒะบะฐ ัะตัะฝัั
ะณััะฟะฟ ะฒะพะทะดััะฝะฐั ะปะธะฝะธั
patternfill_1air = PatternFill(fill_type='solid',
start_color='00E6FFFF',
end_color='00E6FFFF')
# ะะฐะปะธะฒะบะฐ ะฝะตัะตัะฝัั
ะณััะฟะฟ ะบะฐะฑะตะปัะฝะฐั ะปะธะฝะธั
patternfill_2gr = PatternFill(fill_type='solid',
start_color='00FFCC99',
end_color='00FFCC99')
# ะะฐะปะธะฒะบะฐ ะฝะตัะตัะฝัั
ะณััะฟะฟ ะฒะพะทะดััะฝะฐั ะปะธะฝะธั
patternfill_2air = PatternFill(fill_type='solid',
start_color='00FFE6CD',
end_color='00FFE6CD')
border = Border(left=Side(border_style='thin', color='FF000000'),
right=Side(border_style='thin', color='FF000000'),
top=Side(border_style='thin', color='FF000000'),
bottom=Side(border_style='thin', color='FF000000'))
dims = {}
for row in ws.rows:
for cell in row:
if cell.value and cell.row > 1:
dims[cell.column_letter] = max((dims.get(cell.column_letter, 0), len(str(cell.value))))
for col, value in dims.items():
ws.column_dimensions[col].width = value + 4
for row in ws.rows:
for cell in row:
if cell.row == 1:
cell.alignment = alignment
cell.font = font1
cell.border = border
gr = str()
score = 0
for i in range(2, ws.max_row + 1):
if gr != ws.cell(row=i, column=2).value:
gr = ws.cell(row=i, column=2).value
score += 1
if ws.cell(row=i, column=5).value == cable_df[cable_df.Type == 'ะญะ_ะะพะทะดััะฝะฐั ะปะธะฝะธั'].NameCable.values:
if score % 2 == 1:
patternfill = patternfill_1air
elif score % 2 != 1:
patternfill = patternfill_2air
elif ws.cell(row=i, column=5).value == cable_df[cable_df.Type == 'ะญะ_ะะฐะฑะตะปัะฝะฐั ะปะธะฝะธั'].NameCable.values:
if score % 2 == 1:
patternfill = patternfill_1gr
elif score % 2 != 1:
patternfill = patternfill_2gr
row = ws[i]
for cell in row:
if cell.column > 1:
if cell.column == 10:
cell.font = font2
else:
cell.font = font1
while cell.column < 11:
cell.alignment = alignment1
break
cell.fill = patternfill
cell.border = border
if cell.value == 0:
cell.value = ''
wb.save("ะะฐะฑะตะปัะฝัะน ะถััะฝะฐะป.xlsx")
def Support_decor(Support):
Support = Support.drop(Support[Support.N_PYLON == Support_node].index)
Support = Support.sort_values(by=['N_PYLON'])
Support = Support.reset_index(drop=True)
list_name = ['N_PYLON', 'GROUP', 'SUPPORT_TYPE', 'BRACKET', 'FOUND_TYPE', 'LIGHT']
dict_name = {'GROUP': 'ะััะฟะฟะฐ',
'N_PYLON': 'โ ะะฟะพัั',
'SUPPORT_TYPE': 'ะขะธะฟ ะพะฟะพัั',
'BRACKET': 'ะัะพะฝััะตะนะฝ',
'FOUND_TYPE': 'ะะฐะบะปะฐะดะฝะฐั\nะดะตัะฐะปั',
'LIGHT': 'ะกะฒะตัะธะปัะฝะธะบะธ',}
Support = Support.loc[:, list_name]
Support = Support.rename(columns=dict_name)
Support.to_excel('ะะตะดะพะผะพััั.xlsx', sheet_name='0')
wb = openpyxl.load_workbook('ะะตะดะพะผะพััั.xlsx')
ws = wb.active
# ะััะฐะฒะฝะธะฒะฐะฝะธะต
alignment1 = Alignment(horizontal='center',
vertical='center',
wrap_text=True)
alignment2 = Alignment(horizontal='left',
vertical='center',
wrap_text=True)
# ะจัะธัั
font1 = Font(name='Calibri', italic=True, )
# ะจัะธัั ะถะธัะฝัะน
font2 = Font(name='Calibri', italic=True, bold=True, )
# ะะฐะปะธะฒะบะฐ ัะตัะฝัั
ะณััะฟะฟ ะบะฐะฑะตะปัะฝะฐั ะปะธะฝะธั
patternfill_1gr = PatternFill(fill_type='solid',
start_color='00CCFFFF',
end_color='00CCFFFF')
# ะะฐะปะธะฒะบะฐ ัะตัะฝัั
ะณััะฟะฟ ะฒะพะทะดััะฝะฐั ะปะธะฝะธั
patternfill_1air = PatternFill(fill_type='solid',
start_color='00E6FFFF',
end_color='00E6FFFF')
# ะะฐะปะธะฒะบะฐ ะฝะตัะตัะฝัั
ะณััะฟะฟ ะบะฐะฑะตะปัะฝะฐั ะปะธะฝะธั
patternfill_2gr = PatternFill(fill_type='solid',
start_color='00FFCC99',
end_color='00FFCC99')
# ะะฐะปะธะฒะบะฐ ะฝะตัะตัะฝัั
ะณััะฟะฟ ะฒะพะทะดััะฝะฐั ะปะธะฝะธั
patternfill_2air = PatternFill(fill_type='solid',
start_color='00FFE6CD',
end_color='00FFE6CD')
border = Border(left=Side(border_style='thin', color='FF000000'),
right=Side(border_style='thin', color='FF000000'),
top=Side(border_style='thin', color='FF000000'),
bottom=Side(border_style='thin', color='FF000000'))
dims = {}
for row in ws.rows:
for cell in row:
if cell.value and cell.row > 1:
dims[cell.column_letter] = max((dims.get(cell.column_letter, 0), len(str(cell.value))))
for col, value in dims.items():
ws.column_dimensions[col].width = value + 4
for row in ws.rows:
for cell in row:
if cell.row == 1:
cell.alignment = alignment1
cell.font = font2
cell.border = border
if cell.row > 1:
if cell.column > 1:
cell.alignment = alignment1
cell.font = font1
cell.border = border
if cell.column == 7:
cell.alignment = alignment2
wb.save('ะะตะดะพะผะพััั.xlsx')
# ะกะพะทะดะฐะฝะธะต ะดะฒัั
df ะฟะพ ะฒัะณััะถะตะฝะฝัะผ ัั
ั ัะฐะนะปะฐะผ
blocks = open('OUT_block.txt')
blocks = of_TXT_in_df(blocks)
line = open('OUT_line.txt')
line = of_TXT_in_df(line)
# ะะฑัะฐะฑะพัะบะฐ df, ัะฐะทะดะตะปะตะฝะธะต ะฝะฐ df Support,light,Line
# Line - ะบะฐะฑะตะปัะฝัะน ะถััะฝะฐะป
# Support - ะะตะดะพะผะพััั ะพะฟะพั
INDEXblocks = blocks[(blocks[0] == '//HANDLE')].index
Support = blocks[(blocks.loc[:, 1] == 'ะพะฟะพัะฐ_ะฟัะพะผะตะถััะพัะฝะฐั 0.4')].reset_index(drop=True)
Support.columns = blocks.loc[INDEXblocks[0]]
Support = Support.loc[:, ['NAME_BLOCK', 'POINT', 'N_PYLON', 'GROUP', 'HEIGHT']]
Support['HEIGHT'] = Support['HEIGHT'].astype('float')
# ัะพะทะดะฐะฝะธะต df ะฟะพ ัะฒะตัะธะปัะฝะธะบะฐะผ
light = blocks[(blocks.loc[:, 1] == 'ะญะ_ะะฟะพัะฐ_1_ัะฒะตั')].reset_index(drop=True)
light.columns = blocks.loc[INDEXblocks[1]]
light = light.loc[:, ['NAME_BLOCK', 'POINT', 'POWER', 'Angle1']]
# ัะพะทะดะฐะฝะธะต df ะฟะพ ะฟะตัะตัะตัะบะฐะผ
if blocks[(blocks.loc[:, 1] == 'ะขััะฑะฐ ั ัะฐะทะผะตัะพะผ')].shape[0] > 0:
tube = blocks[(blocks.loc[:, 1] == 'ะขััะฑะฐ ั ัะฐะทะผะตัะพะผ')].reset_index(drop=True)
tube.columns = blocks.loc[INDEXblocks[2]]
tube = tube.loc[:, ['NAME_BLOCK', 'POINT', 'WIDTH', 'LENGTH', 'N_CROSSING']]
# ัะพะทะดะฐะฝะธะต df ะฟะพ ะปะธะฝะธัะผ
INDEXline = line[(line[0] == '//HANDLE')].index
line.columns = line.loc[INDEXline[0]]
line = line.loc[1:, :]
Line = line[(line['LAYER'] == 'ะญะ_ะะพะทะดััะฝะฐั ะปะธะฝะธั') | (line['LAYER'] == 'ะญะ_ะะฐะฑะตะปัะฝะฐั ะปะธะฝะธั')].reset_index(drop=True)
light = points(light)
light = light.assign(POWER=light.POWER.str.replace(r'ะั$', ''))
light.loc[:, ['POWER', 'Angle1']] = light.loc[:, ['POWER', 'Angle1']].astype('float')
light.Angle1 = light.Angle1 / math.pi * 180
# ะะฐัะฐะปะฐัั ะพะฑัะฐะฑะพัะบะฐ(ัะพัะผะฐัะธัะพะฒะฐะฝะธะต ะบะพะพัะดะธะฝะฐั)
Support = points(Support)
Line = points_line1(Line)
Support.reset_index.__doc__
Support = light_in_support(Support, light)
Line = cable_in_support(Line, Support)
# ะคะพัะผะธัะพะฒะฐะฝะธะต ะฟะตัะฒะพะณะพ ะณัะฐัะฐ
G = nx.from_pandas_edgelist(Line, 'N_PYLON1', 'N_PYLON2', ['LENGTH', 'LAYER'], create_using=nx.MultiGraph)
# ะ ะฐะทะดะตะปะตะฝะธะต ะผะพัะฝะพััะธ ะฝะฐ ะพะฟะพัะฐั
ะฟะพ ะณััะฟะฟะฐะผ
Support = support_set_power(G, Support)
# ะคะพัะผะธัะพะฒะฐะฝะธะต ะบะฐะฑะตะปัะฝะพะณะพ ะปะธััะฐ ะฝะฐ ะพัะฝะพะฒะต ัะฟะธัะบะฐ ัะตะฑะตั ะฐะปะณะพัะธัะผะฐ
# ะฟะพะธัะบะฐ ะฒ ะณััะฑะธะฝั.
cable_list = cable_list(G, Support, Support_node)
# ะคะพัะผะธัะพะฒะฐะฝะธะต ะฒัะพัะพะณะพ ะผัะปััะธะณัะฐัะฐ ะฒ ะบะพัะพัะพะผ ัััะตะฝั ะณััะฟะฟั
G = nx.from_pandas_edgelist(cable_list, 'N_PYLON1', 'N_PYLON2', ['LENGTH', 'LAYER'], create_using=nx.MultiGraph)
# ะ ะฐััะตั ะฒะตะบัะพัะฝะพะณะพ ะฝะฐััะถะตะฝะธั ะพะฟะพัั ั ััะตัะพะผ ะดัะฑะปะธัะพะฒะฐะฝะธะต ะบะฐะฑะตะปะตะน ัะฐะทะฝัั
ะณััะฟะฟ
Support = support_selection(G, Support)
cable_list, dU_Tkz = calculation_of_voltage_drop_and_short_circuit_currents(cable_list)
if blocks[(blocks.loc[:, 1] == 'ะขััะฑะฐ ั ัะฐะทะผะตัะพะผ')].shape[0] > 0:
cable_list = crossing(line, tube, cable_list)
draw_graph1(G, Support)
dU_Tkz_decor(dU_Tkz)
cable_list_decor(cable_list)
Support_decor(Support)
print(colored('ะะะะะ ะะะะฏะฎ ะะซะะ ะกะชะญะะะะะะะะะ ะะะะะะะ ะะะะะ, ะฟะพัะปะต ัะฐะบะพะน ัะฐะฑะพัั ะผะพะถะฝะพ ะธ ะพัะดะพั
ะฝััั', 'green'))
| [
"aleksandrov.fa95@gmail.com"
] | aleksandrov.fa95@gmail.com |
1db004b2684733f7fc4fd823f8e0082adda8bd8b | e6502a8ffc6582fb3a3cc8e17b677a98b0e090a5 | /pages/Page_wk_template.py | b576b8e5eda10676bd2145366056136f3a651db3 | [] | no_license | jianviper/SJY_PO | fcf04151323ef23b5873da9f79a1959107241256 | 05960bdfde00023f39c2d6a58b5eac4c524c9cb9 | refs/heads/master | 2021-05-19T18:19:42.521383 | 2021-03-05T09:36:44 | 2021-03-05T09:36:44 | 252,061,673 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,229 | py | #!/usr/bin/env python
#coding:utf-8
from selenium.webdriver.common.by import By
from selenium.webdriver.common.action_chains import ActionChains
from common.BasePage import BasePage
from parts.tool_page import wait_tips
from time import sleep
'''
Create on 2020-3-18
author:linjian
summary:ๆจก็็ๅ
็ด ๅฏน่ฑก
'''
class TemplatePage(BasePage):
#ๅฎไฝๅจ๏ผ้่ฟๅ
็ด ๅฑๆงๅฎไฝๅ
็ด ๅฏน่ฑก
header_loc = (By.CSS_SELECTOR, '.header.ant-layout-header')
tool_loc = (By.CLASS_NAME, 'work_tool')
tool_temp_loc = (By.CSS_SELECTOR, '.work_tool>div:nth-child(11)')
el_tempImg_loc = (By.CSS_SELECTOR, '.content.flex_bteween>div:first-child>div:first-child')
el_divs_loc = (By.CSS_SELECTOR, '.work_element')
el_searchInput_loc = (By.CSS_SELECTOR, '.searchInput.form-line')
el_resultName_loc = (By.CLASS_NAME, 'name')
el_secondtext_loc = (By.CLASS_NAME, 'secondtext')
el_tempName_loc = (By.CSS_SELECTOR, '.no-input.data-name>.searchInput.form-line')
el_job_loc = (By.CSS_SELECTOR, '.no-input.data-job>.searchInput.form-line')
el_notfind_loc = (By.CSS_SELECTOR, '.helpOut.pointer_cursor.flex_bteween')
el_warnTitle_loc = (By.CLASS_NAME, 'warn_title')
el_tempMenu_loc = (By.CSS_SELECTOR, '.workTempalte.default_cursor.flex_bteween.tpl_menu')
btn_useTemp_loc = (By.CSS_SELECTOR, '.content.flex_bteween>div:first-child>.sure-btn.is-plain.use-tpl')
btn_search_loc = (By.CLASS_NAME, 'iconsearch')
btn_submit_loc = (By.CSS_SELECTOR, '.sure-btn.is-plain.submit-info')
btn_submit2_loc = (By.CSS_SELECTOR, '.sure-btn.submit-info')
lastProjectName_loc = (By.CSS_SELECTOR, '.home_content>:last-child>.item_text>.item_title')
#้่ฟ็ปงๆฟ่ฆ็๏ผOverriding๏ผๆนๆณ๏ผๅฆๆๅญ็ฑปๅ็ถ็ฑป็ๆนๆณๅ็ธๅ๏ผไผๅ
็จๅญ็ฑป่ชๅทฑ็ๆนๆณใ
#ๆๅผ็ฝ้กต
def open(self):
self._open(self.baseurl)
def choose_template(self):
self.find_element(*self.tool_temp_loc).click()
sleep(1)
def add_temp(self):
self.choose_template()
self.do_search('SWOT')
action = ActionChains(self.driver)
action.move_to_element(self.find_element(*self.el_tempImg_loc)).perform()
sleep(1)
self.find_element(*self.btn_useTemp_loc).click()
sleep(1.5)
action = ActionChains(self.driver)
action.move_to_element_with_offset(self.find_element(*self.header_loc), 200, 200).click().perform()
sleep(1)
def do_search(self, text):
sleep(1)
self.find_element(*self.el_searchInput_loc).send_keys(text)
action = ActionChains(self.driver)
action.click(self.find_element(*self.btn_search_loc)).perform()
sleep(3)
def submit_myTemp(self, name, job, sb=1):
el = self.btn_submit_loc
self.find_element(*self.el_tempName_loc).send_keys(name)
self.find_element(*self.el_job_loc).send_keys(job)
if sb == 2: el = self.btn_submit2_loc
self.find_element(*el).click()
wait_tips(self)
def click_notfind(self):
self.find_element(*self.el_notfind_loc).click()
sleep(2)
def click_submit(self):
self.find_element(*self.btn_submit_loc).click()
sleep(1)
| [
"linjianviper@vip.qq.com"
] | linjianviper@vip.qq.com |
02530b5f72eb0863a34493fd9ecb943619e65782 | 8d19f798ae6a623e2f4741c1ab51d8ea46872af3 | /geekshop/authapp/forms.py | 7167d74293d53c4483cf1397f7e431c42a09e577 | [] | no_license | dmitry-vokhmin/geekshop | 6a8b347b546513850c67927934a06455d55f46d9 | 05b8df8083091664fc0cc3cb01ebe9de805c0f40 | refs/heads/main | 2023-07-09T10:50:47.636986 | 2021-08-17T17:06:04 | 2021-08-17T17:06:04 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,533 | py | import random
import hashlib
from django.contrib.auth.forms import AuthenticationForm, UserCreationForm, UserChangeForm
from django import forms
from .models import ShopUser, ShopUserProfile
class ShopUserLoginForm(AuthenticationForm):
class Meta:
model = ShopUser
fields = ("username", "password")
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
for field_name, field in self.fields.items():
field.widget.attrs["class"] = "form-control"
class ShopUserRegisterForm(UserCreationForm):
class Meta:
model = ShopUser
fields = ("username", "first_name", "password1", "password2", "email", "age", "avatar")
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
for field_name, field in self.fields.items():
field.widget.attrs["class"] = "form-control"
field.help_text = ""
def clean_age(self):
data = self.cleaned_data["age"]
if data < 18:
raise forms.ValidationError("You are young!")
return data
def save(self, commit=True):
user = super().save(commit=True)
user.is_active = False
salt = hashlib.sha1(str(random.random()).encode("utf8")).hexdigest()[:6]
user.activation_key = hashlib.sha1((user.email + salt).encode("utf8")).hexdigest()
user.save()
return user
class ShopUserEditForm(UserChangeForm):
class Meta:
model = ShopUser
fields = ("username", "first_name", "email", "age", "avatar", "password")
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
for field_name, field in self.fields.items():
field.widget.attrs["class"] = "form-control"
field.help_text = ""
if field_name == "password":
field.widget = forms.HiddenInput()
def clean_age(self):
data = self.cleaned_data["age"]
if data < 18:
raise forms.ValidationError("You are young!")
return data
class ShopUserProfileEditForm(UserChangeForm):
class Meta:
model = ShopUserProfile
fields = ("tag_line", "about_me", "gender")
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
for field_name, field in self.fields.items():
field.widget.attrs["class"] = "form-control"
field.help_text = ""
if field_name == "password":
field.widget = forms.HiddenInput()
| [
"boxdima1@gmail.com"
] | boxdima1@gmail.com |
2f5a0fdf8f81ef767fc19d5a34d2bbaeb635d01d | 646f2a135dc8ba97b2fc7436194dcab2a8f0ae8c | /autocomplete_light/channel/base.py | 8ba3f984df5a1c0a22922c1c42937c3567e22822 | [
"MIT"
] | permissive | pix0r/django-autocomplete-light | 9f55252d4aa4fb8a28471772a98e793b171cdb0c | f1026dfe49934065206ca1fdae46289c68e8c231 | refs/heads/master | 2020-12-30T18:50:36.304623 | 2012-05-30T09:39:24 | 2012-05-30T09:39:24 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,932 | py | """
The channel.base module provides a channel class which you can extend to make
your own channel. It also serves as default channel class.
"""
from django.core import urlresolvers
from django.template import loader
from django.utils.translation import ugettext_lazy as _
__all__ = ('ChannelBase',)
class ChannelBase(object):
"""
A basic implementation of a channel, which should fit most use cases.
Attributes:
model
The model class this channel serves. If None, a new class will be
created in registry.register, and the model attribute will be set in
that subclass. So you probably don't need to worry about it, just know
that it's there for you to use.
result_template
The template to use in result_as_html method, to render a single
autocomplete suggestion. By default, it is
autocomplete_light/channelname/result.html or
autocomplete_light/result.html.
autocomplete_template
The template to use in render_autocomplete method, to render the
autocomplete box. By default, it is
autocomplete_light/channelname/autocomplete.html or
autocomplete_light/autocomplete.html.
search_field
The name of the field that the default implementation of query_filter
uses. Default is 'name'.
limit_results
The number of results that this channel should return. For example, if
query_filter returns 50 results and that limit_results is 20, then the
first 20 of 50 results will be rendered. Default is 20.
bootstrap
The name of the bootstrap kind. By default, deck.js will only
initialize decks for wrappers that have data-bootstrap="normal". If
you want to implement your own bootstrapping logic in javascript,
then you set bootstrap to anything that is not "normal". Default is
'normal'.
placeholder
The initial text in the autocomplete text input.
"""
model = None
search_field = 'name'
limit_results = 20
bootstrap = 'normal'
placeholder = _(u'type some text to search in this autocomplete')
result_template = None
autocomplete_template = None
def __init__(self):
"""
Set result_template and autocomplete_template if necessary.
"""
name = self.__class__.__name__.lower()
if not self.result_template:
self.result_template = [
'autocomplete_light/%s/result.html' % name,
'autocomplete_light/result.html',
]
if not self.autocomplete_template:
self.autocomplete_template = [
'autocomplete_light/%s/autocomplete.html' % name,
'autocomplete_light/autocomplete.html',
]
self.request = None
def get_absolute_url(self):
"""
Return the absolute url for this channel, using
autocomplete_light_channel url
"""
return urlresolvers.reverse('autocomplete_light_channel', args=(
self.__class__.__name__,))
def as_dict(self):
"""
Return a dict of variables for this channel, it is used by javascript.
"""
return {
'url': self.get_absolute_url(),
'name': self.__class__.__name__
}
def init_for_request(self, request, *args, **kwargs):
"""
Set self.request, self.args and self.kwargs, useful in query_filter.
"""
self.request = request
self.args = args
self.kwargs = kwargs
def query_filter(self, results):
"""
Filter results using the request.
By default this will expect results to be a queryset, and will filter
it with self.search_field + '__icontains'=self.request['q'].
"""
q = self.request.GET.get('q', None)
if q:
kwargs = {"%s__icontains" % self.search_field: q}
results = results.filter(**kwargs)
return results
def values_filter(self, results, values):
"""
Filter results based on a list of values.
By default this will expect values to be an iterable of model ids, and
results to be a queryset. Thus, it will return a queryset where pks are
in values.
"""
results = results.filter(pk__in=values)
return results
def get_queryset(self):
"""
Return a queryset for the channel model.
"""
return self.model.objects.all()
def get_results(self, values=None):
"""
Return an iterable of result to display in the autocomplete box.
By default, it will:
- call self.get_queryset(),
- call values_filter() if values is not None,
- call query_filter() if self.request is set,
- call order_results(),
- return a slice from offset 0 to self.limit_results.
"""
results = self.get_queryset()
if values is not None:
# used by the widget to prerender existing values
results = self.values_filter(results, values)
elif self.request:
# used by the autocomplete
results = self.query_filter(results)
return self.order_results(results)[0:self.limit_results]
def order_results(self, results):
"""
Return the result list after ordering.
By default, it expects results to be a queryset and order it by
search_field.
"""
return results.order_by(self.search_field).distinct()
def are_valid(self, values):
"""
Return True if the values are valid.
By default, expect values to be a list of object ids, return True if
all the ids are found in the queryset.
"""
return self.get_queryset().filter(pk__in=values).count() == len(values)
def result_as_html(self, result, extra_context=None):
"""
Return the html representation of a result for display in the deck
and autocomplete box.
By default, render result_template with channel and result in the
context.
"""
context = {
'channel': self,
'result': result,
'value': self.result_as_value(result),
}
context.update(extra_context or {})
return loader.render_to_string(self.result_template, context)
def result_as_value(self, result):
"""
Return the value that should be set to the widget field for a result.
By default, return result.pk.
"""
return result.pk
def render_autocomplete(self):
"""
Render the autocomplete suggestion box.
By default, render self.autocomplete_template with the channel in the
context.
"""
return loader.render_to_string(self.autocomplete_template, {
'channel': self,
})
| [
"jamespic@gmail.com"
] | jamespic@gmail.com |
070eb0eb248d00b0725d085b1937cb7a5da23da2 | 4351c4eed4c5b4ab0d477a989c96c0a0cfeda1e5 | /omnicanvas/canvas.py | 1dc679a7bd73f1dd22fd0d3b27ab18dc75d8b334 | [
"MIT"
] | permissive | samirelanduk/omnicanvas | b601eb5bbeb868211cdf195ad4168ea8d0ea3c25 | edc22ec802da6188759fbbbb30f0dd44aabb3a7a | refs/heads/master | 2020-12-29T02:37:48.896323 | 2017-01-22T21:40:43 | 2017-01-22T21:40:43 | 53,693,336 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 12,171 | py | """This module contains the main Canvas class."""
from .color import process_color
from . import graphics
from . import svg
class Canvas:
"""A backdrop on which other :py:class:`.Graphic` objects are painted.
:param width: The canvas's width in pixels.
:param height: The canvas's height in pixels.
:param background_color: The canvas's background colour - the default is\
white"""
def __init__(self, width, height, background_color=None):
if isinstance(width, float):
width = round(width)
if not isinstance(width, int):
raise TypeError("Width must be numeric, not '%s'" % width)
self._width = width
if isinstance(height, float):
height = round(height)
if not isinstance(height, int):
raise TypeError("Height must be numeric, not '%s'" % height)
self._height = height
if background_color is None:
self._background_color = None
else:
self._background_color = process_color(background_color)
self._graphics = []
def __repr__(self):
return "<Canvas %iร%i (%i Graphics)>" % (
self._width, self._height, len(self._graphics)
)
def width(self, width=None):
"""The canvas's width in pixels. Passing a value will update the width
property.
:param width: If given, the canvas's width will be set to this.
:rtype: ``int``"""
if width is None:
return self._width
else:
if isinstance(width, float):
width = round(width)
if not isinstance(width, int):
raise TypeError("Width must be numeric, not '%s'" % width)
self._width = width
def height(self, height=None):
"""The canvas's height in pixels. Passing a value will update the height
property.
:param height: If given, the canvas's height will be set to this.
:rtype: ``int``"""
if height is None:
return self._height
else:
if isinstance(height, float):
height = round(height)
if not isinstance(height, int):
raise TypeError("Height must be numeric, not '%s'" % height)
self._height = height
def background_color(self, background_color=None):
"""The canvas's background colour, as a hex string. Passing a value will
update the background_color property (as a hex string).
:param str background_color: If given, the canvas's background_color \
will be set to this.
:rtype: ``str``"""
if background_color is None:
return self._background_color
else:
self._background_color = process_color(background_color)
def graphics(self):
"""A list of all the :py:class:`.Graphic` objects on this canvas.
:rtype: ``list``"""
return list(self._graphics)
def get_graphic_by_name(self, name):
"""Searches the canvas's :py:class:`.Graphic` objects and returns the
first one with a matching name. Returns ``None`` if there are no
matches.
:param str name: The name to search by.
:rtype: str"""
if not isinstance(name, str):
raise TypeError(
"Can only search for str name, not '%s'" % str(name)
)
for graphic in self.graphics():
if graphic.name() == name:
return graphic
def get_graphics_by_name(self, name):
"""Searches the canvas's :py:class:`.Graphic` objects and returns all
the ones with a matching name. Returns an empty list if there are no
matches.
:param str name: The name to search by.
:returns: ``list`` of :py:class:`.Graphic`"""
if not isinstance(name, str):
raise TypeError(
"Can only search for str name, not '%s'" % str(name)
)
return [g for g in self.graphics() if g.name() == name]
def move_graphic_forward(self, graphic):
"""Moves a :py:class:`.Graphic` forward - that is, closer to the viewer.
This method will make the :py:class:`.Graphic` more visible if it was
occluded.
:param Graphic graphic: The :py:class:`.Graphic` to move forward."""
if not isinstance(graphic, graphics.Graphic):
raise TypeError("%s is not a Graphic" % str(graphic))
if not graphic is self.graphics()[-1]:
index = self.graphics().index(graphic)
self._graphics[index], self._graphics[index + 1] = (
self._graphics[index + 1], self._graphics[index]
)
def move_graphic_backward(self, graphic):
"""Shifts a :py:class:`.Graphic` backward - away from the viewer. This
method will hide the :py:class:`.Graphic` behind others.
:param Graphic graphic: The :py:class:`.Graphic` to move backward."""
if not isinstance(graphic, graphics.Graphic):
raise TypeError("%s is not a Graphic" % str(graphic))
if not graphic is self.graphics()[0]:
index = self.graphics().index(graphic)
if index == -1:
raise ValueError("%s is not a Graphic in %s" % (
graphic, self
))
self._graphics[index], self._graphics[index - 1] = (
self._graphics[index - 1], self._graphics[index]
)
def add_rectangle(self, *args, **kwargs):
"""Adds a :py:class:`.Rectangle` to the canvas.
:param x: The x-coordinate of the Rectangle's upper left corner.
:param y: The y-coordinate of the Rectangle's upper left corner.
:param width: The Rectangle's width.
:param height: The Rectangle's height.
:param str fill_color: The Rectangle's interior colour.
:param opacity: The degree of transparency, from 0 to 1 (0 being\
invisible).
:param line_width: The width of the edge of the Rectangle in pixels.
:param str line_style: The pattern of the edges. Acceptable values are\
``-`` (default), ``..`` (dotted) or ``--`` (dashed).
:param str line_color: The colour of the edge.
:param tuple rotation: Any rotation to be applied, in the format\
(x of rotation point, y of rotation point, angle).
:param dict data: Any data to be associated with the Rectangle.
:rtype: :py:class:`.Rectangle`"""
self._graphics.append(graphics.Rectangle(*args, **kwargs))
return self._graphics[-1]
def add_line(self, *args, **kwargs):
"""Adds a :py:class:`.Line` to the canvas.
:param x1: The x-coordinate of the Line's start point.
:param y1: The y-coordinate of the Line's start point.
:param x2: The x-coordinate of the Line's end point.
:param y2: The y-coordinate of the Line's end point.
:param line_width: The width of the Line in pixels.
:param str line_style: The pattern of the Line. Acceptable values are\
``-`` (default), ``..`` (dotted) or ``--`` (dashed).
:param str line_color: The colour of the Line.
:param tuple rotation: Any rotation to be applied, in the format\
(x of rotation point, y of rotation point, angle).
:param dict data: Any data to be associated with the Line.
:rtype: :py:class:`.Line`"""
self._graphics.append(graphics.Line(*args, **kwargs))
return self._graphics[-1]
def add_oval(self, *args, **kwargs):
"""Adds a :py:class:`.Oval` to the canvas.
:param x: The x-coordinate of the Oval's bounding rectangle upper left corner.
:param y: The y-coordinate of the Oval's bounding rectangle upper left corner.
:param width: The bounding rectangle's width.
:param height: The bounding rectangle's height.
:param str fill_color: The Oval's interior colour.
:param opacity: The degree of transparency, from 0 to 1 (0 being\
invisible).
:param line_width: The width of the edge of the Oval in pixels.
:param str line_style: The pattern of the edges. Acceptable values are\
``-`` (default), ``..`` (dotted) or ``--`` (dashed).
:param str line_color: The colour of the edge.
:param tuple rotation: Any rotation to be applied, in the format\
(x of rotation point, y of rotation point, angle).
:param dict data: Any data to be associated with the Oval.
:rtype: :py:class:`.Oval`"""
self._graphics.append(graphics.Oval(*args, **kwargs))
return self._graphics[-1]
def add_polygon(self, *args, **kwargs):
"""Adds a :py:class:`.Polygon` to the canvas.
:param \*points: The alternating x and y values of the Polygon's\
corners.
:param str fill_color: The Polygon's interior colour.
:param opacity: The degree of transparency, from 0 to 1 (0 being\
invisible).
:param line_width: The width of the edge of the Polygon in pixels.
:param str line_style: The pattern of the edges. Acceptable values are\
``-`` (default), ``..`` (dotted) or ``--`` (dashed).
:param str line_color: The colour of the edge.
:param tuple rotation: Any rotation to be applied, in the format\
(x of rotation point, y of rotation point, angle).
:param dict data: Any data to be associated with the Polygon.
:rtype: :py:class:`.Polygon`"""
self._graphics.append(graphics.Polygon(*args, **kwargs))
return self._graphics[-1]
def add_text(self, *args, **kwargs):
"""Adds a :py:class:`.Text` to the canvas.
:param x: The Text's x location.
:param y: The Text's y location.
:param str text: The text to display.
:param font_size: The font size of the Text when displayed.
:param horizontal_align: The horizontal alignment of the Text. Acceptable\
values are ``left``, ``center`` (default) and ``right``.
:param vertical_align: The vertical alignment of the Text. Acceptable\
values are ``top``, ``middle`` (default) and ``bottom``.
:param str fill_color: Defaults to '#FFFFFF'.
:param opacity: The degree of transparency, from 0 to 1 (0 being\
invisible).
:param line_width: Defaults to 0.
:param str line_style: The line pattern. Acceptable values are\
``-`` (default), ``..`` (dotted) or ``--`` (dashed).
:param str line_color: Defaults to '#000000'.
:param tuple rotation: Any rotation to be applied, in the format\
(x of rotation point, y of rotation point, angle), in degrees.
:param dict data: Any data to be associated with the Text.
:rtype: :py:class:`.Text`"""
self._graphics.append(graphics.Text(*args, **kwargs))
return self._graphics[-1]
def add_polyline(self, *args, **kwargs):
"""Adds a :py:class:`.Polyline` to the canvas.
:param \*points: The alternating x and y values of the Polyline's\
corners.
:param line_width: The width of the edge of the Polyline in pixels.
:param str line_style: The pattern of the edges. Acceptable values are\
``-`` (default), ``..`` (dotted) or ``--`` (dashed).
:param str line_color: The colour of the edge.
:param tuple rotation: Any rotation to be applied, in the format\
(x of rotation point, y of rotation point, angle).
:param dict data: Any data to be associated with the Polyline.
:rtype: :py:class:`.Polyline`"""
self._graphics.append(graphics.Polyline(*args, **kwargs))
return self._graphics[-1]
def save(self, path):
"""Saves the canvas to file as an SVG file.
:param str path: The location and filename to save to."""
with open(path, "w") as f:
f.write(self.to_svg())
to_svg = svg.generate_canvas_svg
"""Returns the SVG text of the canvas.
Any ``data`` attributes of the Graphics contained will be rendered as SVG
attributes.
:rtype: ``str``"""
| [
"sam.ireland.uk@gmail.com"
] | sam.ireland.uk@gmail.com |
21f5f6578d42a4cff0361b1514e27e689c8da7af | bc3e12c93dba2d05e7d60fd4090556caa99996ce | /Gerar_Mapas_Matemรกtica.py | 2026a3e749521f1e08a75bca47196d2e6a91a02c | [] | no_license | Zombrooc/Python-Scripts | 29421a109a2051d124a635c92ba171befb6dfb12 | 852dab8f9f378b3c6ee06dbbbe48adf776a2a88c | refs/heads/main | 2023-07-08T16:40:50.582717 | 2021-08-10T11:41:55 | 2021-08-10T11:41:55 | 322,958,204 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,265 | py | from random import randrange
quadrados = []
j = 1
while j < 999:
lista = []
for i in range(1, 5):
coresCopia = ['V', 'P', 'A', 'B']
if i is 1:
lista.append(coresCopia[randrange(4)])
elif i is 2:
coresCopia = ['V', 'P', 'A', 'B']
coresCopia.remove(lista[0])
lista.append(coresCopia[randrange(len(coresCopia))])
elif i is 3:
coresCopia = ['V', 'P', 'A', 'B']
coresCopia.remove(lista[0])
lista.append(coresCopia[randrange(len(coresCopia))])
elif i is 4:
coresCopia = ['V', 'P', 'A', 'B']
if lista[1] != lista[2]:
coresCopia.remove(lista[1])
coresCopia.remove(lista[2])
lista.append(coresCopia[randrange(len(coresCopia))])
else:
coresCopia.remove(lista[1])
lista.append(coresCopia[randrange(len(coresCopia))])
if lista not in quadrados:
quadrados.append(lista)
j+=1
print(len(quadrados))
for el in quadrados:
print('''
############################
# # #
# {} # {} #
# # #
############################
# # #
# {} # {} #
# # #
############################
'''.format(el[0], el[1], el[2], el[3])) | [
"noreply@github.com"
] | Zombrooc.noreply@github.com |
da9c168a75cce38ed9ba39dc0533271d349b9866 | e8b21b149f1ce8a9be0b5e52049aa48e30fc0e27 | /webtier/cgi-bin/test2.py | a3aae3f33684d6ae13af4a3b146b2a46e9b71327 | [] | no_license | EricB2745/BlueFlame | b52fb3fc292da01401a4e8dac863a89e223fd262 | 1d5c0efafade5711d49d97fe063b04e455081767 | refs/heads/master | 2021-08-10T20:26:20.092171 | 2017-11-12T23:42:39 | 2017-11-12T23:42:39 | 110,448,418 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 238 | py | #!/usr/bin/env python
# enable debugging
import cgitb
cgitb.enable()
from Db.OrderStatus import OrderStatus
print "Content-Type: text/plain\r\n\r\n"
orderStatus = OrderStatus(4, '', '')
orderStatus.load()
print orderStatus.toJson()
| [
"ebuhring@yahoo.com"
] | ebuhring@yahoo.com |
afd75311add388724a7e600527831e6721354fd8 | 3dcd062546df547abd3a22ee34dd4b7cc8554f2d | /simple.py | 3d29978acec169342a1f69cf76aca854efd70109 | [] | no_license | shixiangheng/qidian | cbb0296f4783c412c479dc066f5a2a5be746be09 | 77a330b6e4c5065c0e2a85106d07bc86fff86bfb | refs/heads/master | 2020-06-08T02:51:54.553993 | 2019-08-12T02:04:27 | 2019-08-12T02:04:27 | 193,145,428 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 154 | py | import multiprocessing as mp
import cv2
import numpy as np
import os
img = cv2.imread("spl.jpg")
img=cv2.Canny(img, 200, 300)
cv2.imwrite("c.jpg", img)
| [
"noreply@github.com"
] | shixiangheng.noreply@github.com |
50173552aa4041ba8d7633c9dbd2b6a83f10766d | 2067efb839a6bf442200f8b4a2150f85df2f8eb2 | /vrtic_budva/urls.py | 51cfa0e2e4338f01c4af1541e7565604af6f8db0 | [] | no_license | radomir-mijovic/preschool_managing_system | 0b91dc386e308d5389f041f1d9665f7cd975d1ef | cd16aa2ddaa8e18873b60d944a7f5c538bb56847 | refs/heads/master | 2023-04-17T02:51:16.966802 | 2021-05-10T20:43:30 | 2021-05-10T20:43:30 | 363,444,841 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 205 | py | from django.contrib import admin
from django.urls import path, include
urlpatterns = [
path('', include('vrtic.urls')),
path('', include('companies.urls')),
path('admin/', admin.site.urls),
]
| [
"batomijovic4@gmail.com"
] | batomijovic4@gmail.com |
a585d4489cb8b4295cdbaa734255fddff64656b5 | 416f598c62277659f787a37d06f3ebc633a79d53 | /every_election/apps/organisations/migrations/0036_auto_20180606_1035.py | bfa7659c15a88e941da82db96db06e8575c0edfb | [] | no_license | chris48s/EveryElection | 53b6d807e97b2a8b9a943dedcc5ff6ecc65d20fc | 38192a075ae359b91e2aa352fb3886c6c93d3337 | refs/heads/master | 2021-01-22T19:49:15.898338 | 2018-08-17T09:11:42 | 2018-08-17T09:11:42 | 85,244,907 | 0 | 0 | null | 2017-03-16T21:53:29 | 2017-03-16T21:53:28 | null | UTF-8 | Python | false | false | 484 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.13 on 2018-06-06 10:35
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('organisations', '0035_rename_divset_constraint'),
]
operations = [
migrations.AlterModelOptions(
name='organisation',
options={'get_latest_by': 'start_date', 'ordering': ('official_name', '-start_date')},
),
]
| [
"chris.shaw480@gmail.com"
] | chris.shaw480@gmail.com |
76326cd1f0f3627467abd0da88748f56f3ca7401 | b5c30e1d8796e6219afcf3dd221e54d8345eff39 | /contacts/urls.py | c5a4b3120e07d04655da3eaee9092511c591665f | [] | no_license | rraihansaputra/mitreka-vue-api | 887e9d7d246b2af4c6b33c9f25614952e3aaecec | 60f5b31ffdf56d485973eafdc7c89c82f0b6f2e0 | refs/heads/master | 2022-05-03T13:43:57.149551 | 2019-10-08T10:15:57 | 2019-10-08T10:15:57 | 213,610,717 | 0 | 0 | null | 2022-04-22T22:31:25 | 2019-10-08T10:13:38 | Python | UTF-8 | Python | false | false | 1,200 | py | """contacts URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/2.2/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path
from django.urls import include,path
from rest_framework import routers
from core import views
router = routers.DefaultRouter()
router.register(r'users', views.UserViewSet)
router.register(r'groups', views.GroupViewSet)
router.register(r'contacts', views.ContactViewSet)
router.register(r'interactions', views.InteractionViewSet)
urlpatterns = [
path('', include(router.urls)),
path('api-auth/', include('rest_framework.urls', namespace='rest_framework')),
path('admin/', admin.site.urls),
]
| [
"rraihansaputra@gmail.com"
] | rraihansaputra@gmail.com |
f829e7c48e92abe5ad75a0a74cac07b79e02ee0f | 907a5b5399099b5e3eb25b54553fdb8dc712db37 | /mimer/mimer/wsgi.py | a0eca67bb54b705e4c7a4429e28971a48d041f16 | [
"MIT"
] | permissive | jollescott/mimer | b175f5ff2b84331c0e777e9580218cffa271ce1a | f32847a1275ea31347abf4790973dbb014d7de8d | refs/heads/master | 2023-07-24T21:47:29.399807 | 2023-02-09T20:53:38 | 2023-02-09T20:53:38 | 226,271,720 | 0 | 0 | MIT | 2023-07-15T00:52:57 | 2019-12-06T07:32:42 | Python | UTF-8 | Python | false | false | 387 | py | """
WSGI config for mimer project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/2.2/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'mimer.settings')
application = get_wsgi_application()
| [
"jollescott.carl@hotmail.com"
] | jollescott.carl@hotmail.com |
92a080835d4d17b956c1404b4cd6c7af24ef44e7 | be2994ebe2361a3cd3670af5bc06872c19e27697 | /utils/LoadDataAiSky.py | 220b369bdb1824a5610e1cbafcf220063056611c | [
"MIT"
] | permissive | shawn3298317/SPG | 541df4c35dd838e73d424f0a7da66b0e5cb5aa90 | 36beb51570e41e159de56b780cd60e822a85ffff | refs/heads/master | 2021-01-07T13:34:12.659957 | 2020-05-01T02:00:11 | 2020-05-01T02:00:11 | 241,711,565 | 0 | 0 | MIT | 2020-02-19T19:53:31 | 2020-02-19T19:53:30 | null | UTF-8 | Python | false | false | 3,164 | py | import re
import os
import copy
from collections import Counter
import torch
from torch.utils.data.dataset import Dataset
from torch.utils.data import DataLoader
import torchvision.transforms as transforms
from torchvision.datasets import ImageFolder
from bbox import BBox2D, XYXY, XYWH
import ast
import cv2
import pandas as pd
import numpy as np
from tqdm import tqdm
import matplotlib.image as mpimg
def collate_fn(seq_list):
targets = [t[0][1] for t in seq_list]
imgT = torch.stack([t[0][0] for t in seq_list])
return (imgT, targets)
trans_fn = transforms.Compose([transforms.ToTensor()])
def loader_fn(path):
img_class = re.sub(r".*?/(has_car|no_car)/\d+\.png", "\\1", path)
img = cv2.imread(path, cv2.IMREAD_COLOR)
img = cv2.resize(img, (224, 224))
imgT = trans_fn(img)
target = {}
label = 1.0 if img_class == "has_car" else 0.0
target["bboxs"] = []
target["label"] = label
return imgT, target
def loader_bbox_fn(path):
img_class = re.sub(r".*?/(has_car|no_car)/\d+\.png", "\\1", path)
img = cv2.imread(path, cv2.IMREAD_COLOR)
img = cv2.resize(img, (224, 224))
imgT = trans_fn(img)
target = {}
label = 1.0 if img_class == "has_car" else 0.0
target["label"] = label
target["bboxs"]= []
if label == 1.0:
assert(path in bbox_map)
target["bboxs"] = bbox_map[path][0]
return imgT, target
def get_bbox_mapping(path):
bbox_map = pd.read_csv(os.path.join(path, "bbox.csv"), sep=",")
bbox_map["bbox"] = bbox_map["bbox"].apply(lambda x: parse_bboxs_from_str(x))
bbox_map = bbox_map.set_index("has_car_img_paths").T.to_dict("list")
return bbox_map
def parse_bboxs_from_str(s):
"ex: [(23, 0, 154, 87), (75, 213, 195, 256), (158, 8, 256, 98)]"
bbox_raw_list = ast.literal_eval(s)
bbox_list = []
K = 224/256
for bbox in bbox_raw_list:
bbox_list.append(to_bbox2d(bbox, norm_coords))
return bbox_list
def norm_coords(v, K=224/256):
return int(K*v)
def to_bbox2d(bbox, f):
return BBox2D((f(bbox[0]), f(bbox[1]), f(bbox[2]-bbox[0]), f(bbox[3]-bbox[1])), mode=XYWH)
bbox_map = get_bbox_mapping("/projectnb/saenkog/awong1/dataset/aiskyeye/processed_iou0.1_256x256_2/testing/")
def data_loader(args, test_path=False):
tr_dir = "/projectnb/saenkog/awong1/dataset/aiskyeye/processed_iou0.1_256x256_old/training"
val_dir = "/projectnb/saenkog/awong1/dataset/aiskyeye/processed_iou0.1_256x256_old/validation"
tst_dir = "/projectnb/saenkog/awong1/dataset/aiskyeye/processed_iou0.1_256x256_2/testing"
tr_dataset = ImageFolder(root=tr_dir, loader=loader_fn)
val_dataset = ImageFolder(root=tr_dir, loader=loader_fn)
tst_dataset = ImageFolder(root=tst_dir, loader=loader_bbox_fn)
tr_loader = DataLoader(tr_dataset, batch_size=args.batch_size, shuffle=True, num_workers=2, collate_fn=collate_fn)
val_loader = DataLoader(val_dataset, batch_size=args.batch_size, shuffle=True, num_workers=2, collate_fn=collate_fn)
tst_loader = DataLoader(tst_dataset, batch_size=args.batch_size, shuffle=True, num_workers=2, collate_fn=collate_fn)
return tr_loader, val_loader, tst_loader
| [
"shawnlin@bu.edu"
] | shawnlin@bu.edu |
251d4806904d1ee835a57474bb35cd757da5a8a2 | 6125b244cd727da18aa80f663df73a36970eaa95 | /SG_combine.py | 8bb4a32ac9fae9624c401c1206921286de0a709c | [] | no_license | HELL-TO-HEAVEN/stroke | e2863780addf90167534be574f2db981dc0ccee4 | 73c3466bfbd4be939bc9a07c773098739ca2f51e | refs/heads/master | 2020-12-13T16:56:52.176073 | 2019-07-02T22:48:17 | 2019-07-02T22:48:17 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,139 | py | import os
import cv2
import numpy as np
import matplotlib.pyplot as plt
from PIL import Image
from scipy.spatial.distance import cosine
SAVE_PATH = "result/SG"
def load_data(img_path):
img = Image.open(img_path)
img = img.convert('L')
img = np.array(img)
# img = binarization(img)
return img
def binarization(img):
thres = 128
for i in range(256):
for j in range(256):
if img[i, j] < thres:
img[i, j] = 0
else:
img[i, j] = 1
return img
def cos(v_a, v_b):
return cosine(v_a, v_b)
def is_near(paa, pab, pba, pbb):
dis_1 = ((paa[1] - pba[1])**2 + (paa[2] - pba[2])**2)**0.5
dis_2 = ((paa[1] - pbb[1])**2 + (paa[2] - pbb[2])**2)**0.5
dis_3 = ((pab[1] - pba[1])**2 + (pab[2] - pba[2])**2)**0.5
dis_4 = ((pab[1] - pbb[1])**2 + (pab[2] - pbb[2])**2)**0.5
if dis_1 <= 4 or dis_2 <= 4 or dis_3 <= 4 or dis_4 <= 4:
return True
return False
def combine(p):
fs = os.listdir(p)
name = p[-1]
v = []
txt = np.loadtxt(SAVE_PATH + '/%s' % name + '/%s_start_end.txt' % name, delimiter=',')
# print(txt[-1][0])
for i in range(int(txt[-1][0])):
a = txt[(3 * i)]
b = txt[(2 + 3 * i)]
if a[1] > b[1]:
v_ = (a[2] - b[2]), (a[1] - b[1])
v.append(v_)
else:
v_ = (b[2] - a[2]) , (b[1] - a[1])
v.append(v_)
t = np.zeros(int(txt[-1][0]))
count = 1
for i in range(int(txt[-1][0])):
for j in range(i+1, int(txt[-1][0])):
cos_dis = cos(v[i], v[j])
if cos_dis < 0.1 and is_near(txt[(3 * i)], txt[(2 + 3 * i)], txt[(3 * j)], txt[(2 + 3 * j)]):
# print(cos_dis)
# connect.append([i+1, j+1])
if t[i] != 0:
t[j] = t[i]
elif t[j] != 0:
t[i] = t[j]
else:
t[i] = count
t[j] = count
count = count + 1
print(name)
print(t)
order = 1
for i in range(int(max(t) + 1)):
stroke_img = np.zeros((256, 256), dtype=np.int16)
for j in range(len(t)):
if t[j] == 0:
img = load_data(p + '/SG_%s_%02d.jpg' % (name, j+1))
save_name = 'SG_combine_%s_%02d.jpg' % (name, order)
cv2.imencode('.jpg', img)[1].tofile(SAVE_PATH + '/' + name + '/' + save_name)
order = order + 1
t[j] = -1
continue
if t[j] == i:
img = load_data(p + '/SG_%s_%02d.jpg' % (name, j+1))
stroke_img = stroke_img + img
stroke_img[np.where(stroke_img > 255)] = 255
save_name = 'SG_combine_%s_%02d.jpg' % (name, order)
cv2.imencode('.jpg', stroke_img)[1].tofile(SAVE_PATH + '/' + name + '/' + save_name)
order = order + 1
if __name__ == '__main__':
dirs = os.listdir(SAVE_PATH)
# print(dirs)
for d in dirs:
p = SAVE_PATH + '/' + d
combine(p)
| [
"qq85726@gmail.com"
] | qq85726@gmail.com |
c24446138c5306616924ff004223854bde50c197 | d481d28740e3c2e43285c4281aba7e1504233504 | /mp_manager/process_helpers.py | bfc25a7b089e8681fd0cfc7394fb9284e7bd830b | [
"BSD-3-Clause"
] | permissive | dmiwell/mp_manager | c043c29d52584b3901b75e395c53ae02195ff139 | 006fd2018a6dfdd7b4f867f01c865227e89cbbf2 | refs/heads/master | 2023-01-10T14:20:47.484399 | 2022-12-27T05:08:10 | 2022-12-27T05:08:10 | 187,804,671 | 2 | 0 | NOASSERTION | 2022-12-27T05:08:11 | 2019-05-21T09:23:05 | Python | UTF-8 | Python | false | false | 5,433 | py | # -*- coding: utf-8 -*-
import time
import traceback
import signal
from multiprocessing.managers import SyncManager
import pickle
import datetime
class NS:
def __init__(self):
mgr = SyncManager()
mgr.start(signal.signal, (signal.SIGINT, signal.SIG_IGN))
self.ns_default = mgr.Namespace()
self.ns_default.error = None
self.ns_stats = mgr.Namespace()
self.input_queue = mgr.Queue(maxsize=100)
self.error_occurred = mgr.Event()
self.error_processed = mgr.Event()
self.batch_done = mgr.Event()
self.mgr = mgr
self.stats_lock = mgr.Lock()
self.main_lock = mgr.Lock()
def set_shared_value(self, name, value, no_lock=False):
if no_lock:
setattr(self.ns_default, name, value)
return
with self.main_lock:
setattr(self.ns_default, name, value)
def get_shared_value(self, name, no_lock=False):
if no_lock:
return getattr(self.ns_default, name)
with self.main_lock:
return getattr(self.ns_default, name)
ns_container = NS()
class Stats:
stats = ns_container.ns_stats
start_time = datetime.datetime.utcnow()
_lock = ns_container.stats_lock
_locked = False
@classmethod
def time_spent(cls):
return datetime.datetime.utcnow() - cls.start_time
@classmethod
def lock(cls):
cls._lock.acquire()
cls._locked = True
@classmethod
def unlock(cls):
cls._lock.release()
cls._locked = False
@classmethod
def set_stats_val(cls, name, value):
cls.lock()
cls.add_key(name)
setattr(cls.stats, name, value)
cls.unlock()
return value
@classmethod
def inc_stats_val(cls, name, value=1):
cls.lock()
cls.add_key(name)
if not hasattr(cls.stats, name):
setattr(cls.stats, name, 0)
setattr(cls.stats, name, getattr(cls.stats, name) + value)
cls.unlock()
return getattr(cls.stats, name)
@classmethod
def get_stats_val(cls, name, default=None):
cls.lock()
value = getattr(cls.stats, name, default)
cls.unlock()
return value
@classmethod
def inc_stats_vals(cls, vals, return_stats=False, **kwargs):
cls.lock()
for name, value in vals.items():
cls.add_key(name)
if not hasattr(cls.stats, name):
setattr(cls.stats, name, 0)
setattr(cls.stats, name, getattr(cls.stats, name) + value)
if return_stats:
stats = cls.get_stats(**kwargs)
cls.unlock()
if return_stats:
return stats
@classmethod
def get_stats(cls, ignore_keys=(), **kwargs):
keys = (k for k in cls.stat_keys() if k not in ignore_keys)
return {k: getattr(cls.stats, k, None) for k in keys}
@classmethod
def add_key(cls, name):
stat_keys = getattr(cls.stats, 'stat_keys', set())
stat_keys.add(name)
cls.stats.stat_keys = stat_keys
@classmethod
def stat_keys(cls):
return getattr(cls.stats, 'stat_keys', set())
@classmethod
def time_spent(cls):
return str(datetime.datetime.utcnow() - cls.start_time)
class ExitStatus:
_listeners = set()
seconds = 0
time_start = time.time()
def __init__(self, message='Success', code=0, target=None, **kwargs):
self.time_start = kwargs.get('time_start') or self.time_start
if traceback.sys.exc_info()[0]:
message = traceback.format_exc()
code = code or 1
self.code = code
self.message = message
self.target_error = target
@property
def seconds(self):
time_now = time.time()
return time_now - self.time_start
def __repr__(self):
data = 'Finished with code {c} in {s} with message: [{m}]'.format(
c=self.code,
s=self.seconds,
m=self.message
)
if self.target_error:
data += '\nError target: {}'.format(self.target_error)
return data
def __iter__(self):
for f in ['code', 'seconds', 'message', 'target_error']:
val = getattr(self, f, None)
if val is not None:
yield f, val
def get_error(self):
if self.target_error:
return f'target: {self.target_error}\nmessage: {self.message}'
@classmethod
def get_status(cls, wait=True):
if ns_container.error_occurred.is_set():
wait and ns_container.error_processed.wait()
result = ns_container.ns_default.error
status = pickle.loads(result) if isinstance(
result,
bytes) else result
else:
status = ExitStatus()
for l in cls._listeners:
l(status)
return status
@classmethod
def add_exit_listener(cls, listener):
cls._listeners.add(listener)
def handle_exception_occurred(**kwargs):
if ns_container.error_processed.is_set():
return
result = ExitStatus(**kwargs)
print(result.message)
ns_container.ns_default.error = pickle.dumps(result)
ns_container.error_occurred.set()
def _sigint_default(_signal, frame):
handle_exception_occurred(code=_signal, message='Keyboard interrupt')
signal.signal(signal.SIGINT, _sigint_default)
| [
"dmitriy.pom0@gmail.com"
] | dmitriy.pom0@gmail.com |
fcddffa8abe2694b934a5dac72502b422e0187d8 | 47673d255e86d35e063f2ca5da712f6679b65503 | /mysite/settings.py | 13f3abfcd2b2036cb3cdce42c86c8392668220cb | [] | no_license | nurcanyesil/ilk-blogum | 022969a30cd68338b5a97dcd67eab2719d37df91 | e7bb74ddf31a4801bfd89cf69810d0693efe6732 | refs/heads/master | 2021-01-12T06:02:52.783803 | 2016-12-24T15:19:03 | 2016-12-24T15:19:03 | 77,282,364 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,235 | py | """
Django settings for mysite project.
Generated by 'django-admin startproject' using Django 1.9.
For more information on this file, see
https://docs.djangoproject.com/en/1.9/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.9/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.9/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '9ivn+kktyj(&02ei65d3&60!5w877-2()%5-^+r&o_90n84)te'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'blog',
]
MIDDLEWARE_CLASSES = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'mysite.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'mysite.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.9/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.9/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.9/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'Europe/Istanbul'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.9/howto/static-files/
STATIC_URL = '/static/'
STATIC_ROOT = os.path.join(BASE_DIR, 'static')
| [
"westnurcan@hotmail.com"
] | westnurcan@hotmail.com |
0216d00c4a0280404201ed358bfc7c240952ec5a | 0202d8faff21f24e468654b3da56ca16457ff5b3 | /entrant/abc133/abc133-c.py | a93b0c17761a620084c2519ce520de7d390fcc5d | [] | no_license | ryogoOkura/atcoder | a3d8d052c6424db26994444eca1ebaa3efbd3e21 | 2865b42bbdb50d83bf129fd868083c2363e92024 | refs/heads/master | 2021-06-24T06:07:32.290393 | 2021-01-02T13:39:24 | 2021-01-02T13:39:24 | 187,552,021 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 258 | py | l,r=map(int,input().split())
if (l//2019) == (r//2019):
l,r=l%2019,r%2019
ans=2018
for i in range(l,r):
for j in range(i+1,r+1):
tmp=(i*j)%2019
if tmp<ans:
ans=tmp
print(ans)
else:
print(0)
| [
"nem1442so@gmail.com"
] | nem1442so@gmail.com |
fcab7e57e5a9878635ff29c93ac02d85b67e4e24 | b1d17d9391398fd7acd64d9d8be38085dc58678f | /appuim_app_01/test_appium_web_0628.py | 4d86f0c86a40ce5efe94d3bbbb638c819a0cff8d | [] | no_license | TestingWu/pythonProject | ccaf88953e0f99e8b1c08b33b1a4309a3f1068f5 | 32d6465eb92b6b3f2e90d3899204cdbe8ef10dfa | refs/heads/main | 2023-07-17T05:11:48.990160 | 2021-09-02T07:16:58 | 2021-09-02T07:16:58 | 368,195,245 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 637 | py | from time import sleep
from appium import webdriver
class TestAppiumWeb:
'''ๆต่ฏๆๆบ่ชๅธฆ็ๆต่งๅจไธญ็็ฝ้กต'''
def setup(self):
desired_caps = {
'platformName': 'Android',
'platformVersion': '6.0.1',
'browserName': 'Browser',
'noReset': True,
'deviceName': '127.0.0.1:7555',
}
self.driver = webdriver.Remote('http://127.0.0.1:4723/wd/hub', desired_caps)
self.driver.implicitly_wait(15)
def teardown(self):
self.driver.quit()
def test_web(self):
self.driver.get('http://m.baidu.com')
sleep(5) | [
"543223938@qqใcom"
] | 543223938@qqใcom |
e9114bcfcb85fbaadd2c41533b2f464f2ec7e9d8 | 38de3e53f84dad52157b164d926324dc58e109ca | /project/urls.py | 2bf909a7badce8d8f73e6039e2c3b0d125bf16e4 | [] | no_license | UniqueWA/backend-service | 0cf45db8408ef144e8d644af4e28deb4891c267d | cf3f8501af5921a7f172ea09fb8754ef378f6a27 | refs/heads/master | 2021-09-07T19:12:21.742252 | 2018-02-27T18:49:38 | 2018-02-27T18:49:38 | 119,415,916 | 0 | 2 | null | 2018-02-27T18:49:39 | 2018-01-29T17:25:44 | JavaScript | UTF-8 | Python | false | false | 490 | py | from django.conf.urls import url
from django.contrib import admin
from django.urls import path, include, re_path
from rest_framework import serializers, viewsets, routers
from rest_framework.urlpatterns import format_suffix_patterns
from project import views
router = routers.DefaultRouter()
router.register(r'projects', views.ProjectViewSet)
urlpatterns = [
url(r'^', include(router.urls)),
url(r'^api-auth/', include('rest_framework.urls', namespace='rest_framework'))
]
| [
"breakroger104@gmail.com"
] | breakroger104@gmail.com |
39ceb9d36775a75edf35014ee07e0ae39affc16f | f8f2536fa873afa43dafe0217faa9134e57c8a1e | /aliyun-python-sdk-hbr/aliyunsdkhbr/request/v20170908/DescribeHanaRestoresRequest.py | 29b991a2c5cbecc928a581fe3e4ae75d2966a997 | [
"Apache-2.0"
] | permissive | Sunnywillow/aliyun-openapi-python-sdk | 40b1b17ca39467e9f8405cb2ca08a85b9befd533 | 6855864a1d46f818d73f5870da0efec2b820baf5 | refs/heads/master | 2022-12-04T02:22:27.550198 | 2020-08-20T04:11:34 | 2020-08-20T04:11:34 | 288,944,896 | 1 | 0 | NOASSERTION | 2020-08-20T08:04:01 | 2020-08-20T08:04:01 | null | UTF-8 | Python | false | false | 2,536 | py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
class DescribeHanaRestoresRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'hbr', '2017-09-08', 'DescribeHanaRestores','hbr')
self.set_protocol_type('https')
def get_VaultId(self):
return self.get_query_params().get('VaultId')
def set_VaultId(self,VaultId):
self.add_query_param('VaultId',VaultId)
def get_DatabaseName(self):
return self.get_query_params().get('DatabaseName')
def set_DatabaseName(self,DatabaseName):
self.add_query_param('DatabaseName',DatabaseName)
def get_BackupId(self):
return self.get_query_params().get('BackupId')
def set_BackupId(self,BackupId):
self.add_query_param('BackupId',BackupId)
def get_PageSize(self):
return self.get_query_params().get('PageSize')
def set_PageSize(self,PageSize):
self.add_query_param('PageSize',PageSize)
def get_RestoreStatus(self):
return self.get_query_params().get('RestoreStatus')
def set_RestoreStatus(self,RestoreStatus):
self.add_query_param('RestoreStatus',RestoreStatus)
def get_RestoreId(self):
return self.get_query_params().get('RestoreId')
def set_RestoreId(self,RestoreId):
self.add_query_param('RestoreId',RestoreId)
def get_ClusterId(self):
return self.get_query_params().get('ClusterId')
def set_ClusterId(self,ClusterId):
self.add_query_param('ClusterId',ClusterId)
def get_PageNumber(self):
return self.get_query_params().get('PageNumber')
def set_PageNumber(self,PageNumber):
self.add_query_param('PageNumber',PageNumber)
def get_Token(self):
return self.get_query_params().get('Token')
def set_Token(self,Token):
self.add_query_param('Token',Token) | [
"sdk-team@alibabacloud.com"
] | sdk-team@alibabacloud.com |
9d18894bc6f3d5ded7a164214f5b64ba5203e62f | 7018e330f93cf5a9329172c283e20ac3d8862fdd | /test/learnselenium.py | 94131cda2edf764d3b8a49d6163c1e32c9f70514 | [] | no_license | mwshang/qian_ka | 49654325a679958639cb81e52378f0ccaabe0c1e | e2d217c67894b7e0c5b199827ed1e6d28f3404ca | refs/heads/master | 2020-05-02T17:57:57.194820 | 2019-04-05T13:24:45 | 2019-04-05T13:24:45 | 178,114,291 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,696 | py | from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.support.wait import WebDriverWait
from selenium.webdriver import ActionChains
from selenium.common.exceptions import NoSuchElementException
import time
if False:
browser = webdriver.Chrome()
try:
browser.get("https://www.baidu.com")
input = browser.find_element_by_id("kw")
input.send_keys("Python")
input.send_keys(Keys.ENTER)
wait = WebDriverWait(browser,10)
wait.until(EC.presence_of_all_elements_located((By.ID,"content_left")))
print(browser.current_url)
print(browser.get_cookies())
print("---------------")
print(browser.page_source)
finally:
browser.close()
if False:
browser = webdriver.Chrome()
browser.get("http://www.runoob.com/try/try.php?filename=jqueryui-api-droppable")
browser.switch_to.frame("iframeResult")
source = browser.find_element_by_css_selector("#draggable")
target = browser.find_element_by_css_selector("#droppable")
actions = ActionChains(browser)
actions.drag_and_drop(source,target)
actions.perform()
time.sleep(2)
browser.close()
if False:
browser = webdriver.Chrome()
browser.get("https://www.zhihui.com/explore")
time.sleep(10)
browser.execute_script('window.scrollTo(0,document.body.scrollHeight)')
time.sleep(2)
browser.execute_script("alert('To Bottom')")
if False:
browser = webdriver.Chrome()
browser.get("http://www.runoob.com/try/try.php?filename=jqueryui-api-droppable")
browser.switch_to.frame("iframeResult")
try:
logo = browser.find_element_by_class_name("logo")
except NoSuchElementException as e:
print("NO LOGO")
browser.switch_to.parent_frame()
logo = browser.find_element_by_class_name("logo")
print(logo)
print(logo.text)
if False:
browser = webdriver.Chrome()
browser.get("https://wwww.baidu.com")
print(browser.get_cookies())
print(browser.add_cookie({"name":'smw',"pwd":'123','domain':'www.baidu.com','value':'000'}))
print(browser.get_cookies())
browser.delete_all_cookies()
print(browser.get_cookies())
if True:
browser = webdriver.Chrome()
browser.get("https://wwww.baidu.com")
browser.execute_script('window.open()')
print(browser.window_handles)
browser.switch_to.window(browser.window_handles[1])
browser.get("https://www.taobao.com")
time.sleep(1)
browser.switch_to.window(browser.window_handles[0])
browser.get("https://python.org")
| [
"shangmw@163.com"
] | shangmw@163.com |
9cd6d2a4b8c7f93156b9edcdad455b0823b17bb2 | 9d8a490b8db1d6be0c2d741d4a01f3a46e8ee072 | /tests/test_schema.py | 299e7cfc849e15250951baed8e54ae079e5579fc | [
"MIT"
] | permissive | AxlTom/validater | a44d8d8a2d6a502192780cda01d367aebe18eeca | d3054ac7b808e16ccb2e5b4b939ff717e51f1270 | refs/heads/master | 2021-01-24T15:05:57.897231 | 2016-03-31T07:40:05 | 2016-03-31T07:40:05 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,689 | py | #!/usr/bin/env python
# coding: utf-8
from __future__ import unicode_literals, absolute_import, print_function
from validater import validate, parse, parse_snippet, Schema, SchemaError
import pytest
import copy
def test_parse_snippet():
assert parse_snippet('int') == {
'validater': 'int',
}
assert parse_snippet('int(1,10)') == {
'validater': 'int',
'args': (1, 10),
}
assert parse_snippet('int&required&default=5') == {
'validater': 'int',
'required': True,
'default': 5
}
assert parse_snippet('int(1,10)&required&default=5') == {
'validater': 'int',
'args': (1, 10),
'required': True,
'default': 5
}
assert parse_snippet(('int(1,10)&required&default=5', 'desc')) == {
'validater': 'int',
'args': (1, 10),
'required': True,
'default': 5,
'desc': 'desc'
}
def test_parse():
snippet = parse_snippet("int&required")
sche = parse({'userid': "int&required"})
assert sche['userid'].data == snippet
sche = parse({'userid': ["int&required"]})
assert sche['userid'][0].data == snippet
sche = parse({'userid': {'userid': "int&required"}})
assert sche['userid']['userid'].data == snippet
sche = parse({'userid': [{'userid': "int&required"}]})
assert sche['userid'][0]['userid'].data == snippet
sche = parse({'email': ("email&required", "้ฎ็ฎฑๅฐๅ")})
assert sche['email'].data == parse_snippet(("email&required", "้ฎ็ฎฑๅฐๅ"))
sche = parse([{'userid': "int&required"}])
assert sche[0]['userid'].data == snippet
sche = parse("int&required")
assert sche.data == snippet
sche = parse(["int&required"])
assert sche[0].data == snippet
sche = parse([[["int&required"]]])
assert sche[0][0][0].data == snippet
sche = parse({'validater': 'int'})
assert sche.data == {'validater': 'int'}
sche = parse([{'validater': 'int'}])
assert sche[0].data == {'validater': 'int'}
sche = parse({'userid': {'validater': 'int'}})
assert sche['userid'].data == {'validater': 'int'}
def test_parse_error():
with pytest.raises(SchemaError):
parse({'vali': 'int', 'required': True})
with pytest.raises(SchemaError):
parse("&required")
with pytest.raises(SchemaError):
parse("int&")
with pytest.raises(SchemaError):
parse("_unknown_validater")
with pytest.raises(SchemaError):
parse("int(abc)")
with pytest.raises(SchemaError):
parse("int&default=abc")
with pytest.raises(SchemaError):
parse("int&required=true")
# note: sche is a tuple
sche = {"userid": "int&required=true"},
with pytest.raises(SchemaError):
parse(sche)
def test_schema_will_not_modified():
sche = {'data': ("int&required", "input a number")}
origin = copy.deepcopy(sche)
parsed = parse(sche)
assert sche == origin
assert parsed != origin
sche = [("int&required", "input a number")]
origin = copy.deepcopy(sche)
parsed = parse(sche)
assert sche == origin
assert parsed != origin
def test_schema():
data = {
'validater': 'int',
'args': (0, 5),
'required': True,
'default': 0,
'desc': 'desc',
'somekey': 'somevalue'
}
sche = Schema(data)
assert sche.validater('123') == (True, 123)
assert sche.required
assert sche.default == 0
assert sche.args == (0, 5)
assert sche.kwargs == {'somekey': 'somevalue'}
assert 'desc' in sche.error
assert 'int' in sche.error
# test eq and ne
assert Schema(data) == Schema(data)
assert not (Schema(data) != Schema(data))
assert Schema(data) != object()
def test_reuse_schema_snippet():
snippet = {"name": "str"}
schema = {
"user1": snippet,
"user2": snippet,
}
sche = parse(schema)
assert sche['user1']['name'] == parse("str")
assert sche['user2']['name'] == parse("str")
# parse a parsed schema shouldn't cause exception
assert parse(parse(schema)) == parse(schema)
def test_validate_single_schema():
sche = parse('int&required')
err, val = validate('123', sche)
assert not err
assert val == 123
err, val = validate(None, sche)
assert err and 'required' in dict(err)['']
assert val is None
err, val = validate('abc', sche)
assert err and 'int' in dict(err)['']
assert val is None
def test_validate_simple_schema():
sche = parse({'userid': 'int&required'})
err, val = validate({'userid': '123'}, sche)
assert not err
assert val['userid'] == 123
err, val = validate({'userid': None}, sche)
assert err and 'required' in dict(err)['userid']
assert val == {'userid': None}
err, val = validate({}, sche)
assert err and 'required' in dict(err)['userid']
assert val == {'userid': None}
err, val = validate(None, sche)
assert err and 'must be dict' in dict(err)['']
assert val == {}
def test_validate_deep_schema():
sche = parse({"user": {'userid': 'int&required'}})
err, val = validate({"user": {"userid": "123"}}, sche)
assert not err
assert val == {"user": {"userid": 123}}
err, val = validate({"user": {"userid": None}}, sche)
assert err and "required" in dict(err)["user.userid"]
assert val == {"user": {"userid": None}}
def test_validate_simple_schema_has_default_value():
sche = parse({'userid': 'int&required&default=0'})
err, val = validate({'userid': None}, sche)
assert not err
assert val == {'userid': 0}
err, val = validate({}, sche)
assert not err
assert val == {'userid': 0}
sche = parse({'userid': 'int&default=0'})
err, val = validate({'userid': None}, sche)
assert not err
assert val == {'userid': 0}
err, val = validate({}, sche)
assert not err
assert val == {'userid': 0}
sche = parse({'userid': 'int&default=None'})
err, val = validate({'userid': None}, sche)
assert not err
assert val == {'userid': None}
err, val = validate({}, sche)
assert not err
assert val == {'userid': None}
def test_validate_schema_callable_default():
import random
num = random.randint(0, 1)
sche = parse({'validater': 'int', 'args': (0, 1), 'default': lambda: num})
err, val = validate(None, sche)
assert not err
assert val == num
def test_validate_list_schema():
sche = parse({'userid': ['int&required']})
err, val = validate({'userid': []}, sche)
assert not err
assert val == {'userid': []}
err, val = validate({'userid': ['123', '456']}, sche)
assert not err
assert val == {'userid': [123, 456]}
err, val = validate({'userid': ['x123', 'x456']}, sche)
assert err and set(['userid[0]', 'userid[1]']) == set(dict(err))
assert val == {'userid': [None, None]}
err, val = validate({}, sche)
assert err and 'must be list' in dict(err)['userid']
assert val == {'userid': []}
err, val = validate({'userid': '123'}, sche)
assert err and 'must be list' in dict(err)['userid']
assert val == {'userid': []}
def test_validate_deep_list_schema_error():
sche = parse([{'nums': ['int']}])
err, val = validate([{'nums': ['x123', 'x456']}, {'nums': 'x'}, 'x'], sche)
expect = set(['[0].nums[0]', '[0].nums[1]', '[1].nums', '[2]'])
assert expect == set(dict(err))
def test_validate_custom_types():
class User(object):
def __init__(self, userid):
self.userid = userid
sche = parse({
'userid': "int&required",
'friends': [{'userid': "int&required"}]})
jack, f1, f2 = User(0), User(1), User(2)
jack.friends = [f1, f2]
err, val = validate(jack, sche, proxy_types=[User])
assert not err
assert val == {'userid': 0,
'friends': [{'userid': 1}, {'userid': 2}]}
def test_validate_empty_value():
# empty value is not always None, depends on validater
# string type's empty value is ""
err, val = validate(None, parse('str'))
assert not err
assert val == str("")
err, val = validate(None, parse('unicode'))
assert not err
assert val == ""
err, val = validate(None, parse('url'))
assert not err
assert val == ""
# treat empty string as None
err, val = validate(str(''), parse('str&required'))
assert err and err == [('', 'required')]
assert val == str("")
err, val = validate('', parse('unicode&required'))
assert err and err == [('', 'required')]
assert val == ""
err, val = validate('', parse('url&required'))
assert err and err == [('', 'required')]
assert val == ""
| [
"1316792450@qq.com"
] | 1316792450@qq.com |
9746ce1f5cccb769036e8e989585ed927696d9af | 4eafc9dd445b1f292a0a4cb94dea06f813458520 | /mysite_demo/mysite_demo/urls.py | 1783c981a4e25d085cf208dfd3269a338dedb01e | [] | no_license | kele5215/PycharmProjects | 0fdbf584e55774ba643264b1700960862802f9af | 67bbfa6ffc240ddb838c4b56971a006ea0586cfa | refs/heads/master | 2023-01-05T11:24:32.405810 | 2019-10-21T08:26:49 | 2019-10-21T08:26:49 | 191,890,830 | 0 | 0 | null | 2023-01-04T23:08:54 | 2019-06-14T06:49:14 | Jupyter Notebook | UTF-8 | Python | false | false | 768 | py | """mysite_demo URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.11/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url
from django.contrib import admin
urlpatterns = [
url(r'^admin/', admin.site.urls),
]
| [
"77523916@qq.com"
] | 77523916@qq.com |
2109d760a8de819025898ddea45a4375454ef1c9 | 04db754364a08a365d73e53f732e5c2c2c607521 | /sample1.py | e668cb7c8698eaf3b26e498c7ba2c77f9a430471 | [] | no_license | jun-take/test1 | b0a8f66700605c21a04d5ec7af90ea216f3c9fd8 | cc972eebac538bb8a3d7f915510e4fa70cbc75c1 | refs/heads/master | 2020-09-22T09:22:49.804072 | 2019-12-01T12:35:34 | 2019-12-01T12:35:34 | 225,136,315 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 34 | py | wkout = "hello take"
print(wkout)
| [
"jun-take@mbg.ocn.ne.jp"
] | jun-take@mbg.ocn.ne.jp |
79ee743f199f8c5d0810ee36a96b1e9ef33c9dcf | 474f8c72ad5cc99794cfdd025a6939d621ccc2f1 | /manage.py | 24709d91dc802715cf15af4825b17ed1aa842332 | [] | no_license | moodydev/traffic-analyzer | 29dac0d0891d44085b446031d92855dda2f988fd | 1cddb0b4151a1eea6d696d4ac7a861cabfe113bc | refs/heads/master | 2021-12-27T10:45:33.277834 | 2021-12-10T11:33:21 | 2021-12-10T11:33:21 | 151,363,124 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 899 | py | import argparse
from app import ENVIRONMENTS, scripts
def _parse_args():
parser = argparse.ArgumentParser()
parser.add_argument(
'command',
help='Command to be executed',
choices=['runserver', 'load_analyzer', 'shell']
)
parser.add_argument(
'-e', '--environment',
help='Choose config environment that will be used',
default='development',
choices=ENVIRONMENTS.keys(),
required=False,
dest='environment'
)
return parser.parse_args()
if __name__ == '__main__':
args = _parse_args()
# TODO set global env variable, so it't not injected in scripts
if args.command == 'runserver':
scripts.runserver(args.environment)
if args.command == 'load_analyzer':
scripts.load_analyzer(args.environment)
if args.command == 'shell':
scripts.shell(args.environment)
| [
"kovach.matej@gmail.com"
] | kovach.matej@gmail.com |
796734084a7cafb3a327ff60188177363716463b | e1a846a215ca119f47216d1d2121ac03fff4d493 | /All_Contents/Linear_Model/ridge.py | c9406522ec8669f48bdbe2ace8e625704a9fde87 | [] | no_license | wqainlpdl/ML_with_Python3 | f5af1437f62a7e853c23ac0f22da6c8b702f61a7 | 5e1987e36d6a58faf7b653e9aafa2ff2724b2580 | refs/heads/master | 2020-04-05T21:42:20.609733 | 2019-01-02T16:15:04 | 2019-01-02T16:15:04 | 157,230,062 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,504 | py | import numpy as np
import matplotlib.pyplot as plt
from sklearn import datasets
from sklearn import linear_model
from sklearn import model_selection
def load_data():
diabetes = datasets.load_diabetes()
return model_selection.train_test_split(diabetes.data,\
diabetes.target, test_size = 0.25, random_state = 0)
def test_Ridge(*data):
train_X, test_X, train_y, test_y = data
model = linear_model.Ridge()
model.fit(train_X, train_y)
print("Cofficients: %s, intercept: %.2f" % (model.coef_,\
model.intercept_))
test_y_hat = model.predict(test_X)
print("Residual sum of square:%.2f" % np.mean((test_y_hat - test_y) ** 2))
print("Score: %.2f" % model.score(test_X, test_y))
def test_Ridge_alpha(*data):
train_X, test_X, train_y, test_y = data
alphas = [0.01, 0.02, 0.05, 0.1, 0.2, 0.5, 1, 2, 5, 10, 20, 50, 100,\
200, 500, 1000]
scores = []
for i, alpha in enumerate(alphas):
model = linear_model.Ridge(alpha)
model.fit(train_X, train_y)
scores.append(model.score(test_X, test_y))
fig = plt.figure()
ax = fig.add_subplot(1, 1, 1)
ax.plot(alphas, scores)
ax.set_xlabel(r"$\alpha$")
ax.set_ylabel(r"score")
ax.set_xscale("log")
ax.set_title("Ridge")
plt.show()
def main():
train_X, test_X, train_y, test_y = load_data()
# test_Ridge(train_X, test_X, train_y, test_y)
test_Ridge_alpha(train_X, test_X, train_y, test_y)
if __name__ == "__main__":
main()
| [
"wqainlp@outlook.com"
] | wqainlp@outlook.com |
88dd48273dd8a6333efc2ba2c6b64715187334aa | 25098d32bb5257984c8ac13e67f839d544d66076 | /src/bench_paper_solo_all.py | b12bf431de4f36a66477d80df0a6bfe42dcdd9df | [
"MIT"
] | permissive | dpukhkaiev/compositional-system-for-hyperparameter-tuning | 74439b285c5e42ffcee0a5131bc46e52672a6513 | 8bd9306dd7c7c1d70cda0df9267f88949c2938eb | refs/heads/master | 2023-06-27T03:27:31.461192 | 2020-06-19T10:38:20 | 2020-06-19T10:38:20 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 18,719 | py | import warnings
from hypothesis.custom_gp_kernel import KERNEL_MAUNA, KERNEL_SIMPLE, KERNEL_GPML
from search import MOEActr
from generator import SamplesGenerator
from composite import ModelsUnion
from joblib import Parallel, delayed
from sklearn.neural_network import MLPRegressor
from sklearn.svm import SVR
from sklearn.dummy import DummyRegressor
from sklearn.linear_model import LinearRegression, RANSACRegressor
from sklearn.ensemble import GradientBoostingRegressor
import sklearn.gaussian_process as gp
from sklearn.model_selection import ParameterGrid
from sklearn.model_selection import train_test_split
from sklearn import clone
import numpy as np
import pandas as pd
import pygmo as pg
from pprint import pformat
import datetime
import time
import random
import string
import json
import os
import logging
import sobol_seq
logging.basicConfig(filename='./paper_surr_solo.log', level=logging.INFO)
warnings.filterwarnings('ignore')
def sobol_sample(bounds, n=1):
""" Sobol sampling
Args:
bounds (Tuple): Tuple with lower and higher bound for each feature in objective space.
Example: (([0., 0.]), ([2., 4.]))
n (int, optional): Sample count. Defaults to 1.
Returns:
List: Point from search space
"""
n_dim = len(bounds[0])
sb = sobol_seq.i4_sobol_generate(n_dim, n, 1)
diff = [r-l for l, r in zip(*bounds)]
left = [l for l, _ in zip(*bounds)]
return sb*diff+left
def make_nd_pop(pro, x, y):
nd_front = pg.fast_non_dominated_sorting(y)[0][0]
nd_x = np.array(x)[nd_front]
nd_y = np.array(y)[nd_front]
t_pop = pg.population(pro)
for i, p_vector in enumerate(nd_x):
t_pop.push_back(x=p_vector, f=nd_y[i])
return t_pop
def tuning_loop(pro, udp,
X_init, y_init,
surr_portfolio,
eval_budget,
n_pred,
):
loop_start = time.time()
iter_solution = []
gen = SamplesGenerator(pro)
# Evaluate initial set
if np.array(X_init).size > 0:
gen.update(X_init, y_init)
pred = {}
pred['iteration'] = 0
pred['problem'] = pro.get_name()
pred['objectives'] = pro.get_nobj()
pred['feature_dim'] = pro.get_nx()
pred['eval_budget'] = eval_budget
try:
# ref_point = pg.nadir(np.array(y_init))
ref_point = np.amax(np.array(y_init), axis=0).tolist()
pred['ref_point'] = ref_point
nd_pop = make_nd_pop(pro, np.array(X_init), np.array(y_init))
hypervolume = pg.hypervolume(nd_pop.get_f()
).compute(ref_point)
pred['hypervolume'] = hypervolume or None
pred["ndf_size"] = len(nd_pop.get_f())
pred["i_fevals"] = pro.get_fevals()
pred["pop_ndf_x"] = nd_pop.get_x().tolist()
pred["pop_ndf_y"] = nd_pop.get_f().tolist()
score = udp.p_distance(nd_pop) if hasattr(
udp, 'p_distance') else None
pred["p_distance"] = score
iter_solution.append(pred)
except Exception as err:
pred['error'] = "Init stat: {}".format(err)
iter_solution.append(pred)
i = 0
n_iter = int(eval_budget/n_pred)
while i < n_iter:
i = i+1
logging.info(
"\n--- {} {}: {}".format(i, pro.get_name(), [type(model).__name__.lower() for model in surr_portfolio]))
X, y = gen.return_X_y()
# equalize the number of samples for your portfolio and static models
X_train, X_test, y_train, y_test = train_test_split(
X, y, test_size=0.25)
estimator = surr_portfolio[0].fit(X_train, y_train)
if isinstance(estimator, ModelsUnion):
models = estimator.models
else:
models = [estimator]
solver = MOEActr(bounds=pro.get_bounds(),
pop_size=100, gen=100).fit(models)
appr = solver.predict()
propos = appr[np.random.choice(
appr.shape[0], n_pred, replace=False), :]
logging.info(propos)
pred = {}
pred['iteration'] = i
pred['problem'] = pro.get_name()
pred['objectives'] = pro.get_nobj()
pred['feature_dim'] = pro.get_nx()
pred['eval_budget'] = eval_budget
pred['model name'] = [type(m).__name__.lower() for m in models]
pred['n_pred'] = n_pred
if np.array(X_init).size > 0:
pred['x_init'] = X_init
pred['y_init'] = y_init
pred['init_dataset_size'] = np.array(X_init).size
else:
pred['x_init'] = None
pred['y_init'] = None
pred['pred_x'] = propos
pred['pred_fitness_y'] = ''
pred["i_fevals"] = ''
pred["pop_ndf_x"] = ''
pred["pop_ndf_y"] = ''
pred['p_distance'] = ''
pred["i_time"] = ''
# Update dataset
pred_y = [pro.fitness(p).tolist() for p in propos]
pred['pred_fitness_y'] = pred_y
gen.update(list(propos), pred_y)
# ---------------------- Hypervolume
samples_x, samples_y = gen.return_X_y()
if 0 in (np.array(samples_x).size, np.array(samples_y).size):
continue
try:
# ref_point = pg.nadir(np.array(samples_y))
ref_point = np.amax(np.array(samples_y), axis=0).tolist()
pred['ref_point'] = ref_point
nd_pop = make_nd_pop(pro, np.array(samples_x), np.array(samples_y))
hypervolume = pg.hypervolume(nd_pop.get_f()
).compute(ref_point)
pred['hypervolume'] = hypervolume or None
pred["ndf_size"] = len(nd_pop.get_f())
pred["i_fevals"] = pro.get_fevals()
pred["pop_ndf_x"] = nd_pop.get_x().tolist()
pred["pop_ndf_y"] = nd_pop.get_f().tolist()
score = udp.p_distance(nd_pop) if hasattr(
udp, 'p_distance') else None
pred["p_distance"] = score
except Exception as err:
pred['error'] = "Hypervolume: {}".format(err)
iter_solution.append(pred)
continue
# ---------------------- Spacing metric
try:
dist = pg.crowding_distance(points=nd_pop.get_f())
not_inf_dist = dist[np.isfinite(dist)]
mean_dist = np.mean(not_inf_dist)
space_m = (sum([(mean_dist - d)**2 for d in not_inf_dist]
)/(len(not_inf_dist)-1))**(1/2)
pred["ndf_space"] = space_m
except Exception as err:
pred['error'] = "Spacing metric: {}".format(err)
iter_solution.append(pred)
continue
pred["i_time"] = time.time() - loop_start
iter_solution.append(pred)
loop = pd.DataFrame(iter_solution)
loop = loop.drop(['estimator'], axis=1, errors='ignore')
loop = loop.assign(solver_id=id(solver))
# File and path to folder
loop_prefix = id(solver)
rel_path = '/benchmark_results/{}{}_{}_paper_solo_loop.{}'.format(
pro.get_name(), pro.get_nobj(), n_pred, loop_prefix)
path = os.path.dirname(os.path.abspath(__file__))
# Write results
print(" Write loop csv file. Path:{}".format(path + rel_path + '.csv'))
loop.to_csv(path + rel_path + '.csv', mode='a+', index=False)
print(" Write loop pkl file. Path:{}".format(path + rel_path + '.pkl'))
loop.to_pickle(path + rel_path + '.pkl')
X, y = gen.return_X_y()
return np.array(X), np.array(y)
def experiment(problem_name: str,
prob_id: int,
prob_dim: int,
obj: int,
pred_count: int,
eval_budget: int,
surr_port,
# solver: str,
# train_test_sp: float,
# cv_split: int,
# cv_threshold: str,
# test_threshold: str,
# solution_comb: str,
start_set: float,
seed=None):
result = {
"problem_name": problem_name,
"seed": seed,
"problem_id": prob_id,
"objectives": obj,
"feature_dim": prob_dim,
'surr_portfolio': surr_port,
'eval_budget': eval_budget,
"pred_count": pred_count,
'start_set': start_set,
"pop_ndf_x": '',
"pop_ndf_f": '',
"fevals": '',
"evolve_time": '',
"date": '',
"p_distance": '',
"hypervolume": '',
"ndf_space": '',
"ndf_size": '',
'error': '',
'final': False
}
# ---------------------- Initialize problem
try:
if problem_name is 'wfg':
udp = pg.wfg(prob_id=prob_id, dim_dvs=prob_dim,
dim_obj=obj, dim_k=obj-1)
elif problem_name is 'zdt':
udp = pg.zdt(prob_id=prob_id, param=prob_dim)
elif problem_name is 'dtlz':
udp = pg.dtlz(prob_id=prob_id, dim=prob_dim, fdim=obj)
prob = pg.problem(udp)
except Exception as err:
result['error'] = "Init problem: {}".format(err)
return result
# ---------------------- Initial sample plan
try:
start_x = []
start_f = []
if start_set > 0:
eval_budget = eval_budget - start_set
start_x = sobol_sample(prob.get_bounds(), start_set)
start_f = [prob.fitness(x).tolist() for x in start_x]
else:
pass
except Exception as err:
result['error'] = "Init sample plan: {}".format(err)
return result
# ---------------------- Solo model loop
evolve_start = time.time()
try:
x_loop, y_loop = tuning_loop(prob, udp,
start_x, start_f,
surr_port,
eval_budget,
pred_count,
)
result["fevals"] = prob.get_fevals()
nd_pop = make_nd_pop(prob, x_loop, y_loop)
score = udp.p_distance(nd_pop) if hasattr(udp, 'p_distance') else None
result["p_distance"] = score or None
except Exception as err:
result['error'] = "Solo loop: {}".format(err)
return result
# ---------------------- Hypervolume
try:
# ref_point = pg.nadir(y_loop)
ref_point = np.amax(y_loop, axis=0).tolist()
hypervolume = pg.hypervolume(nd_pop.get_f()
).compute(ref_point)
result['hypervolume'] = hypervolume or None
except Exception as err:
result['error'] = "Hypervolume: {}".format(err)
return result
# ---------------------- Spacing metric
# The spacing metric aims at assessing the spread (distribution)
# of vectors throughout the set of nondominated solutions.
try:
dist = pg.crowding_distance(points=nd_pop.get_f())
not_inf_dist = dist[np.isfinite(dist)]
mean_dist = np.mean(not_inf_dist)
space_m = (sum([(mean_dist - d)**2 for d in not_inf_dist]
)/(len(not_inf_dist)-1))**(1/2)
result["ndf_space"] = space_m
except Exception as err:
result['error'] = "Spacing metric: {}".format(err)
return result
# ---------------------- Write results
try:
t_end = time.time()
result["pop_ndf_x"] = nd_pop.get_x().tolist()
result["pop_ndf_f"] = nd_pop.get_f().tolist()
result["ndf_size"] = len(nd_pop.get_f())
result["evolve_time"] = t_end - evolve_start
result["date"] = datetime.datetime.now().strftime("%Y-%m-%d %H:%M")
result["final"] = True
except Exception as err:
result['error'] = "Write results: {}".format(err)
return result
if __name__ == "__main__":
logging.info(
"----\n Start paper benchamrk Solo \n ---- ")
print("Start paper benchamrk Solo ")
# 1
# tea_pot = TpotWrp(generations=2, population_size=10)
# 2
gp_mauna = gp.GaussianProcessRegressor(
kernel=KERNEL_MAUNA, n_restarts_optimizer=20)
# 3
grad_uni = ModelsUnion(
models=[GradientBoostingRegressor(n_estimators=500)],
split_y=True)
# 4
lin_uni = ModelsUnion(models=[LinearRegression()], split_y=True)
# 5
svr_rbf = SVR(kernel='rbf', C=100, gamma=0.1, epsilon=.1)
svr_uni = ModelsUnion(models=[svr_rbf], split_y=True)
# 6
mlp_reg = MLPRegressor(hidden_layer_sizes=(
20, 60, 20), activation='relu', solver='lbfgs')
mlp_uni = ModelsUnion(models=[mlp_reg], split_y=True)
custom_lin_gp = ModelsUnion(models=[SVR(kernel='linear', C=100, gamma='auto'), gp.GaussianProcessRegressor(
kernel=KERNEL_MAUNA, n_restarts_optimizer=20)], split_y=True)
custom_lin_grad = ModelsUnion(models=[SVR(
kernel='linear', C=100, gamma='auto'), GradientBoostingRegressor(n_estimators=500)], split_y=True)
SEED = random.randint(1, 1000)
test_set = [
# ============================================================================================= WFG
# 4. ndf : 100SamplingSize Sobol: CV threshold -Inf [LINEAR,LINEAR]
# 5. ndf: 100SamplingSize Sobol: CV threshold - Inf[GPR, GPR]
{
'problem_name': ['wfg'],
'prob_id': [2, 3],
'prob_dim': [3],
'obj': [2],
'eval_budget': [1000],
'pred_count': [10],
'surr_port':[
[svr_uni],
[LinearRegression()],
[ModelsUnion(models=[LinearRegression()],
split_y=True)],
[RANSACRegressor()],
[ModelsUnion(models=[RANSACRegressor()],
split_y=True)],
[ModelsUnion(models=[gp.GaussianProcessRegressor(
kernel=KERNEL_MAUNA, n_restarts_optimizer=20)], split_y=True)],
[gp.GaussianProcessRegressor(
kernel=KERNEL_MAUNA, n_restarts_optimizer=20)]
],
'start_set': [100],
'seed': [SEED]
},
{
'problem_name': ['wfg'],
'prob_id': [4, 5, 6, 7, 8, 9],
'prob_dim': [2],
'obj': [2],
'eval_budget': [1000],
'pred_count': [10],
'surr_port':[
[svr_uni],
[LinearRegression()],
[ModelsUnion(models=[LinearRegression()], split_y=True)],
[RANSACRegressor()],
[ModelsUnion(models=[RANSACRegressor()], split_y=True)],
[ModelsUnion(models=[gp.GaussianProcessRegressor(
kernel=KERNEL_MAUNA, n_restarts_optimizer=20)], split_y=True)],
[gp.GaussianProcessRegressor(
kernel=KERNEL_MAUNA, n_restarts_optimizer=20)]
],
'start_set': [100],
'seed': [SEED]
},
# ============================================================================================= ZDT
# 4. ndf : 100SamplingSize Sobol: CV threshold -Inf [LINEAR,LINEAR]
# 5. ndf: 100SamplingSize Sobol: CV threshold - Inf[GPR, GPR]
{
'problem_name': ['zdt'],
'prob_id': [1, 2, 3, 4, 6],
'prob_dim': [2],
'obj': [2],
'eval_budget': [1000],
'pred_count': [10],
'surr_port':[
[svr_uni],
[LinearRegression()],
[ModelsUnion(models=[LinearRegression()], split_y=True)],
[RANSACRegressor()],
[ModelsUnion(models=[RANSACRegressor()], split_y=True)],
[ModelsUnion(models=[gp.GaussianProcessRegressor(
kernel=KERNEL_MAUNA, n_restarts_optimizer=20)], split_y=True)],
[gp.GaussianProcessRegressor(
kernel=KERNEL_MAUNA, n_restarts_optimizer=20)]
],
'start_set': [100],
'seed': [SEED]
},
# ============================================================================================= DTLZ
# 4. ndf : 100SamplingSize Sobol: CV threshold -Inf [LINEAR,LINEAR]
# 5. ndf: 100SamplingSize Sobol: CV threshold - Inf[GPR, GPR]
{
'problem_name': ['dtlz'],
'prob_id': [1, 2, 3, 4, 5, 6, 7],
'prob_dim': [3],
'obj': [2],
'eval_budget': [1000],
'pred_count': [10],
'surr_port':[
[svr_uni],
[LinearRegression()],
[ModelsUnion(models=[LinearRegression()], split_y=True)],
[RANSACRegressor()],
[ModelsUnion(models=[RANSACRegressor()], split_y=True)],
[ModelsUnion(models=[gp_mauna], split_y=True)],
[gp_mauna]
],
'start_set': [100],
'seed': [SEED]
},
]
logging.info(pformat(test_set))
i_total = 0
with Parallel(prefer='threads') as parallel:
for param_grid in test_set:
grid = ParameterGrid(param_grid)
total_comb = len(grid)
logging.info(
"\n Total combinations in round: {}".format(total_comb))
res = parallel(delayed(experiment)(**p) for p in grid)
# File and path to folder
prefix = ''.join(random.choices(
string.ascii_lowercase + string.digits, k=5))
file_name = '/benchmark_results/paper_solo_on_{}_{}.{}'.format(
param_grid['problem_name'][0], SEED, prefix)
path = os.path.dirname(os.path.abspath(__file__)) + file_name
# Write results
# logging.info("\n Total evaluations: {}".format(i_total))
logging.info(" Write solo sumary. Path:{} \n".format(path+'.csv'))
res_table = pd.DataFrame(res)
res_table.to_csv(path + '.csv', mode='a+', index=False)
print(" Write solo sumary. Path:{}".format(path + '.pkl'))
res_table.to_pickle(path + '.pkl')
print("---- Finish ----")
| [
"valavanca@gmail.com"
] | valavanca@gmail.com |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.