blob_id stringlengths 40 40 | directory_id stringlengths 40 40 | path stringlengths 2 616 | content_id stringlengths 40 40 | detected_licenses listlengths 0 69 | license_type stringclasses 2 values | repo_name stringlengths 5 118 | snapshot_id stringlengths 40 40 | revision_id stringlengths 40 40 | branch_name stringlengths 4 63 | visit_date timestamp[us] | revision_date timestamp[us] | committer_date timestamp[us] | github_id int64 2.91k 686M ⌀ | star_events_count int64 0 209k | fork_events_count int64 0 110k | gha_license_id stringclasses 23 values | gha_event_created_at timestamp[us] | gha_created_at timestamp[us] | gha_language stringclasses 220 values | src_encoding stringclasses 30 values | language stringclasses 1 value | is_vendor bool 2 classes | is_generated bool 2 classes | length_bytes int64 2 10.3M | extension stringclasses 257 values | content stringlengths 2 10.3M | authors listlengths 1 1 | author_id stringlengths 0 212 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
616647cdf2be27e724c65198518eae2d86e18a24 | c8da7907833ed9640c7f5c8b53421e260d854563 | /day5/day5.py | c37f748236b988e82895f68e28cb990f6f9e9aba | [] | no_license | alsonfx/AoC2020 | eb302214634ea0f9567aa4112808dc48f468d996 | a971eabce7465a412451daeb973c6fca11b5ed75 | refs/heads/main | 2023-05-14T15:33:16.670563 | 2020-12-08T03:13:54 | 2020-12-08T03:13:54 | 319,510,696 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 945 | py | zones = open('zones.txt', 'r')
max_id = 0
my_seat = []
for line in zones:
seat_id = 0
rows = [*range(128)]
columns = [*range(8)]
for letter in line:
if letter == 'F':
mid_row = len(rows) // 2
rows = rows[:mid_row]
elif letter == 'B':
mid_row = len(rows) // 2
rows = rows[mid_row:]
elif letter == 'L':
mid_col = len(columns) // 2
columns = columns[:mid_col]
elif letter == 'R':
mid_col = len(columns) // 2
columns = columns[mid_col:]
seat_id = rows[0] * 8 + columns[0]
if seat_id > max_id:
max_id = seat_id
my_seat.append(seat_id)
zones.close()
my_seat.sort()
my_id = 0
for x in range(len(my_seat) - 1):
if my_seat[x + 1] - my_seat[x] != 1:
my_id = my_seat[x] + 1
break
print(f"The highest Seat ID is: {max_id}")
print(f"My seat on the plane is: {my_id}")
| [
"49161570+alsonfx@users.noreply.github.com"
] | 49161570+alsonfx@users.noreply.github.com |
38912cc3fa092bccfa3bccf43f060efc5ae72b50 | 5d3e77854877be580529e3cb5aaff1da3d19958a | /0x0F-python-object_relational_mapping/100-relationship_states_cities.py | 4805077b20c4592541c20fd017d77050841a8b63 | [] | no_license | afinesami/holbertonschool-higher_level_programming | a504a06cdb485a98a929bed4b13dda5d5286f0d6 | 13c9cec136665d68fb44157206b43a26e5243bd3 | refs/heads/master | 2020-09-28T22:42:33.081237 | 2020-06-22T12:50:01 | 2020-06-22T12:50:01 | 226,883,016 | 11 | 26 | null | null | null | null | UTF-8 | Python | false | false | 687 | py | #!/usr/bin/python3
"""
All states via SQLAlchemy
"""
from sys import argv
from relationship_state import Base, State
from relationship_city import City
from sqlalchemy import (create_engine)
from sqlalchemy.orm import Session
if __name__ == "__main__":
engine = create_engine('mysql+mysqldb://{}:{}@localhost/{}'.
format(argv[1], argv[2], argv[3]),
pool_pre_ping=True)
Base.metadata.create_all(engine)
session = Session(engine)
new_state = State(name='California')
new_city = City(name='San Francisco')
new_state.cities.append(new_city)
session.add(new_state)
session.commit()
session.close()
| [
"samibenomar84@gmail.com"
] | samibenomar84@gmail.com |
569e5135fac1555cf0fb518269b99b2c71661cc5 | 2f98aa7e5bfc2fc5ef25e4d5cfa1d7802e3a7fae | /python/python_9477.py | fa57d72004d0e3842548a0d58e2499f639d33ab5 | [] | no_license | AK-1121/code_extraction | cc812b6832b112e3ffcc2bb7eb4237fd85c88c01 | 5297a4a3aab3bb37efa24a89636935da04a1f8b6 | refs/heads/master | 2020-05-23T08:04:11.789141 | 2015-10-22T19:19:40 | 2015-10-22T19:19:40 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 164 | py | # Django Testing: Using a login decorator for test cases
class SimpleTest(TestCase):
def setUp(self):
self.client.login(username='foo', password='bar')
| [
"ubuntu@ip-172-31-7-228.us-west-2.compute.internal"
] | ubuntu@ip-172-31-7-228.us-west-2.compute.internal |
4597d83584788cf695d87c1e2a496c54a0755578 | b3f125b5a3039fdda872365d5ddeed2947a0a0aa | /data_process/save_shstock_list.py | e21659f85d192fe8bf00d7f61c9c8c82888c72a5 | [
"MIT"
] | permissive | dxcv/vnpyStockEngine | f7faa77aebfc813f7a82fecddd4822dadcd0e112 | 7543c7ed5bae70b5b87342d9317517a12444b673 | refs/heads/master | 2020-06-21T21:26:31.405461 | 2019-03-06T07:16:00 | 2019-03-06T07:16:11 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 890 | py | import pandas as pd
import pickle
import os
from data_process.base_define import shfile_address_base
def save_data(date):
count = 0
shfile_list = os.listdir(shfile_address_base.format(date))
for file_name in shfile_list:
count += 1
file_address = shfile_address_base.format(date) + os.sep + file_name
df = pd.read_csv(file_address)
# 获取可交易的上证指数成分股
df_copy = df[(df['交易状态'] == 'T111') & (df['证券代码'] > 'SH600000 ') & (df['证券代码'] <= 'SH700000 ')]
stock_list = list(set(df_copy['证券代码']))
df_copy.to_csv('./sh_temp/trade_data{}.csv'.format(count))
with open('./stock_list.pkl', 'wb') as f:
pickle.dump(stock_list, f)
return stock_list
#
# if __name__ == '__main__':
# from data_process.base_define import date_list
# save_data(date_list[0])
| [
"18665304480@163.com"
] | 18665304480@163.com |
dc669df5d5ea84b97b7364cf7fe66b5c289a477b | 6fa7045a2b9a14fc371e65ea5a843571f81c643e | /driver.py | 4bbce18f7489e1c5ada2057ab46e0f58e6c3df3a | [
"MIT"
] | permissive | MileyCao/ToyLanguage | a406451d9181f1d5d48081f0a8da12e23c109e04 | b565d10fbdb922c27a61f5f75e414bc669bfbbd6 | refs/heads/master | 2020-08-13T01:58:20.438939 | 2018-08-20T00:10:57 | 2018-08-20T00:10:57 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,832 | py | """
This file is the driver file, it provides a command-line frontend
Usage: python3 driver.py <file> [Option]
Option:
--dump-ast: dumps the AST
--dump-assembly: dumps the assembly code for the internal virtual machine
--help: print the help message
If no option specified, it will run the program by default
"""
import argparse
from codegen import *
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description='Toy language interpreter',
formatter_class=argparse.RawTextHelpFormatter)
parser.add_argument('code_file',
type=argparse.FileType('r'))
parser.add_argument('--dump-ast',
help='dump the AST',
action='store_true',
dest='ast_dump')
parser.add_argument('--dump-assembly',
help='dump assembly for the internal virtual machine',
action='store_true',
dest='assembly_dump')
args = parser.parse_args()
lex = Lexer(args.code_file)
parser = Parser(lex)
try:
ast = parser.parse_program()
check_function_definition(ast)
check_symbol_definition(ast, symbol_table)
if args.ast_dump and not args.assembly_dump:
print(ast)
exit(0)
push_instruction('main') # the entry point is the main function
push_instruction(gen_iexit()) # when the main function returns, then the program will exit
generate_code(ast, symbol_table)
link_function()
if args.ast_dump:
print(ast)
if args.assembly_dump:
print_text()
if not args.ast_dump and not args.assembly_dump:
run_vm()
except ValueError as err:
print(err)
exit(0)
exit(0)
| [
"1024842937@qq.com"
] | 1024842937@qq.com |
39ec26d2bdaef2cebe504d4ad940fe7e5fc8bea9 | dedcb694d8d8f53fdbf047334db1ed23146be4ee | /todo/kurz.py | 7e29fd9ab27c7212ed4c4807240a0beafc0fdbbb | [] | no_license | jozo/pythonbrno | a436a7942ddd124845bdc0ddb2b4e9462d32805a | 88c77b6f48931d001de16b611ccec4bb3ebb29fa | refs/heads/master | 2021-06-15T14:49:22.961764 | 2017-03-24T08:48:02 | 2017-03-24T08:48:02 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,073 | py | from flask import request, url_for
from flask.ext.api import FlaskAPI, status, exceptions
app = FlaskAPI(__name__)
import sqlite3
# notes = {
# 0: 'do the shopping',
# 1: 'build the codez',
# 2: 'paint the door',
# }
def note_repr(key):
conn = sqlite3.connect('test.db')
cur = conn.cursor()
cur.execute("SELECT * FROM todos WHERE Id = " + str(key))
row = cur.fetchone()
return {
'url': request.host_url.rstrip('/') + url_for('notes_detail', key=key),
'text': row[1]
}
@app.route("/", methods=['GET', 'POST'])
def notes_list():
"""
List or create notes.
"""
conn = sqlite3.connect('test.db')
cur = conn.cursor()
if request.method == 'POST':
note = str(request.data.get('text', ''))
# idx = max(notes.keys()) + 1
cur.execute("INSERT INTO todos VALUES (?, ?)", (None, note,))
cur.execute("SELECT * FROM todos ORDER BY Id DESC LIMIT 1")
idx = cur.fetchone()[0]
conn.commit()
# notes[idx] = note
return note_repr(idx), status.HTTP_201_CREATED
# request.method == 'GET'
cur.execute("SELECT * FROM todos")
rows = cur.fetchall()
keys = [i[0] for i in rows]
cur.close()
conn.close()
return [note_repr(idx) for idx in sorted(keys)]
@app.route("/<int:key>/", methods=['GET', 'PUT', 'DELETE'])
def notes_detail(key):
"""
Retrieve, update or delete note instances.
"""
if request.method == 'PUT':
note = str(request.data.get('text', ''))
notes[key] = note
return note_repr(key)
elif request.method == 'DELETE':
notes.pop(key, None)
return '', status.HTTP_204_NO_CONTENT
# request.method == 'GET'
if key not in notes:
raise exceptions.NotFound()
return note_repr(key)
if __name__ == "__main__":
conn = sqlite3.connect('test.db')
cur = conn.cursor()
cur.execute("CREATE TABLE IF NOT EXISTS todos (Id INTEGER PRIMARY KEY AUTOINCREMENT, todo_text TEXT)")
app.run(debug=True)
cur.close()
conn.close()
| [
"fadawar@gmail.com"
] | fadawar@gmail.com |
52b1286ab48d460abebb87719f7d65cef1e7009d | c62a07c8051d6106717863651004c8186a0e3027 | /logic.py | 84c2678f81cea7f404f2c5b6faddd8b4b1335110 | [] | no_license | isakura313/third_22 | bf47bef5914ac5debeb33a36dad39566181ed6fb | c43911d6b73f638894d14f757a0ec5462e9e8005 | refs/heads/master | 2022-04-18T14:27:00.231299 | 2020-04-22T17:01:23 | 2020-04-22T17:01:23 | 257,967,272 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 425 | py | role = input("Введите вашу роль в проекте: ")
age = input("Введите ваш возраст: ")
age = int(age)
if role == "admin" and age > 18:
print("У вас есть все права")
elif role == "user" and age> 16:
print("У вас на этом проекте есть некоторые права")
else:
print(" этот сервис закрыт на карантин")
| [
"isakura313@gmail.com"
] | isakura313@gmail.com |
b9c6ad69d505a67cc6c868578c045ddeed34fda6 | fe108bfc4ac5206949d8ae9f787acf2e46b78bed | /code/32.3 从上到下按行打印二叉树.py | ca141d6c5c6d8f62df1011020b73c430182bddca | [] | no_license | Lmyxxn/JZoffer | 88dc8ea71a72e4f1b6806de4e7af3323d6e12f7e | 12b8889eda79a0af72b9326982c597e807d1ccea | refs/heads/master | 2022-04-18T06:05:19.209089 | 2020-04-13T16:00:53 | 2020-04-13T16:00:53 | 255,370,848 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,362 | py | # Definition for a binary tree node.
class TreeNode:
def __init__(self, x):
self.val = x
self.left = None
self.right = None
###### 从上到下按行打印
class Solution:
def levelOrderBottom(self, root: TreeNode) -> List[List[int]]:
if not root: return []
queue = [root]
res = [[root.val]]
while queue:
temp = []
for node in queue:
if node.left:
temp.append(node.left)
if node.right:
temp.append(node.right)
res.append([node.val for node in temp])
queue = temp
return res[:-1]
# Definition for a binary tree node.
# class TreeNode:
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
###### 从下到上按行打印
class Solution:
def levelOrderBottom(self, root: TreeNode) -> List[List[int]]:
if not root: return []
queue = [root]
res = [[root.val]]
while queue:
temp = []
for node in queue:
if node.left:
temp.append(node.left)
if node.right:
temp.append(node.right)
res.append([node.val for node in temp])
queue = temp
return res[::-1][1:]
| [
"noreply@github.com"
] | Lmyxxn.noreply@github.com |
e9c1e88cba3c232330a1faccfd36c0eabcd038bf | 7757c2a7d49562e381a2aff5819e590fa77eee84 | /TiendaOnline/settings.py | c132e6ac19fae5d2b1c75076e6e2786935bf23b1 | [] | no_license | eduardobrunner/TestAppDjango | 22737c3427d7c4e7901c27f9bc8e3904b57c775c | de4fd29e5afebdcbd1e4b0282e64ad3b68e464f4 | refs/heads/main | 2023-07-07T03:11:57.831023 | 2021-08-16T15:15:16 | 2021-08-16T15:15:16 | 396,839,053 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,279 | py | """
Django settings for TiendaOnline project.
Generated by 'django-admin startproject' using Django 3.2.6.
For more information on this file, see
https://docs.djangoproject.com/en/3.2/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.2/ref/settings/
"""
from pathlib import Path
# Build paths inside the project like this: BASE_DIR / 'subdir'.
BASE_DIR = Path(__file__).resolve().parent.parent
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.2/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'django-insecure-agy#e7v(^#r^m+f4_3$0!ayu$z*@zdxe1o8j#c-lg5-j4o4hxl'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'gestionPedidos',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'TiendaOnline.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'TiendaOnline.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.2/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': BASE_DIR / 'db.sqlite3',
}
}
# Password validation
# https://docs.djangoproject.com/en/3.2/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.2/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.2/howto/static-files/
STATIC_URL = '/static/'
# Default primary key field type
# https://docs.djangoproject.com/en/3.2/ref/settings/#default-auto-field
DEFAULT_AUTO_FIELD = 'django.db.models.BigAutoField'
| [
"eduardobrunner94@gmail.com"
] | eduardobrunner94@gmail.com |
4f59061967f5c5c68703686d4d4f94c877768bfb | 275341122022985c9e5b4020b1d0541b0da9a1b5 | /simplifyPath.py | f1d4cab781ddfb2190d4fa66b5d135c128312dad | [] | no_license | wxy325/leetCodePython | 54135677591a05e9342d983935f58a53bcde7750 | 1ce2e24a0065f83b5e5e2ddbdef138445b357b8f | refs/heads/master | 2021-01-18T23:31:22.854424 | 2016-06-10T17:04:17 | 2016-06-10T17:04:17 | 18,469,072 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 896 | py | __author__ = 'wxy325'
class Solution:
# @param path, a string
# @return a string
def simplifyPath(self, path):
if path == '':
return '/'
pathArray = path.split('/')
outArray = []
outputPath = ''
for p in pathArray:
if p == '' or p == '.':
continue
elif p == '..':
if len(outArray) > 0:
outArray.pop()
else:
outArray.append(p)
for p in outArray:
outputPath += '/' + p
if outputPath == '':
outputPath = '/'
return outputPath
if __name__ == '__main__':
s = Solution()
assert s.simplifyPath('//') == '/'
assert s.simplifyPath('/../') == '/'
assert s.simplifyPath('/a/..') == '/'
assert s.simplifyPath('/a/.') == '/a'
assert s.simplifyPath('/a/../c') == '/c' | [
"wxy325@qq.com"
] | wxy325@qq.com |
f2b3d2056573b279c7c6d398884534fc2b8b9c75 | 83b24f0895a2db3c335753f43a5b1bc8fac8440a | /Baekjoon1000.py | 0abf028a96827d9fa3a3817c829f0e27a74c37ac | [] | no_license | Sorune/BaekJoon | 78ac2d317abc17b1ac842181fc83fadb3f237a39 | 0c264132eea66eb46143f166d3fb776a0a774d41 | refs/heads/master | 2023-02-14T14:08:34.345087 | 2021-01-13T04:30:24 | 2021-01-13T04:30:24 | 327,553,337 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 65 | py | IntA = input()
result = int(IntA[0]) + int(IntA[2])
print(result) | [
"zksktldl125@naver.com"
] | zksktldl125@naver.com |
1df1aee8ad792efd4d9263211983fde1e9a21686 | 7a0810569ea1bf5da242747051e6abb271667c0f | /djangofirstexample/manage.py | 9552d57fba9f7e3b5d0c1694553b151e23cfa6e8 | [] | no_license | saifu-tech/django_crud_ | 1e8020e9e6ea878354ab96782b9cd25ffdebf97b | 7de5c9d8b10aeba030e425ab4cbeed14b1d27b89 | refs/heads/master | 2020-05-18T23:17:53.777832 | 2019-05-03T06:40:02 | 2019-05-03T06:40:02 | 184,708,771 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 638 | py | #!/usr/bin/env python
"""Django's command-line utility for administrative tasks."""
import os
import sys
def main():
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'djangofirstexample.settings')
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
if __name__ == '__main__':
main()
| [
"saifudeen2638@gmail.com"
] | saifudeen2638@gmail.com |
7953194e08d87e2cc8bd5e2a743dc383d4d6458b | fc3c9d2143aecedce191bb91dbd01babe7f6d40b | /tensorpack/callbacks/dump.py | ef62833b31118c6a9f00e80eb5e6c9216d57a65e | [
"Apache-2.0"
] | permissive | rahulbprakash/tensorpack | 0ee10de245f486d17a252354833c98dd713fd6e6 | b2ec42a8d152760498aa911818d50b01e408bb43 | refs/heads/master | 2020-12-30T19:12:08.800662 | 2016-06-09T23:03:37 | 2016-06-09T23:03:37 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,077 | py | # -*- coding: UTF-8 -*-
# File: dump.py
# Author: Yuxin Wu <ppwwyyxx@gmail.com>
import os
import scipy.misc
from scipy.misc import imsave
import numpy as np
from .base import Callback
from ..utils import logger
from ..tfutils import get_op_var_name
__all__ = ['DumpParamAsImage']
class DumpParamAsImage(Callback):
"""
Dump a variable to image(s) after every epoch.
"""
def __init__(self, var_name, prefix=None, map_func=None, scale=255, clip=False):
"""
:param var_name: the name of the variable.
:param prefix: the filename prefix for saved images. Default is the op name.
:param map_func: map the value of the variable to an image or list of
images of shape [h, w] or [h, w, c]. If None, will use identity
:param scale: a multiplier on pixel values, applied after map_func. default to 255
:param clip: whether to clip the result to [0, 255]
"""
op_name, self.var_name = get_op_var_name(var_name)
self.func = map_func
if prefix is None:
self.prefix = op_name
else:
self.prefix = prefix
self.log_dir = logger.LOG_DIR
self.scale = scale
self.clip = clip
def _before_train(self):
# TODO might not work for multiGPU?
self.var = self.graph.get_tensor_by_name(self.var_name)
def _trigger_epoch(self):
val = self.trainer.sess.run(self.var)
if self.func is not None:
val = self.func(val)
if isinstance(val, list):
for idx, im in enumerate(val):
self._dump_image(im, idx)
else:
self._dump_image(val)
def _dump_image(self, im, idx=None):
assert im.ndim in [2, 3], str(im.ndim)
fname = os.path.join(
self.log_dir,
self.prefix + '-ep{:03d}{}.png'.format(
self.epoch_num, '-' + str(idx) if idx else ''))
res = im * self.scale
if self.clip:
res = np.clip(res, 0, 255)
imsave(fname, res.astype('uint8'))
| [
"ppwwyyxxc@gmail.com"
] | ppwwyyxxc@gmail.com |
b13a8e027de105c667d522cc548f74f489d5db8a | ed62feadf4e8735854e3d3d55f3a06ece56994ed | /Chapter 1/Practice exercise 1/2 Number of seconds in a year.py | af64871617419deccd56896a802b253200ab7b70 | [] | no_license | kitsmart/pythonbooklet | e903ce82a2aa7c03e19a273c3969d238aeb8fe79 | 9eba2df0174a03551837a2be294269cfbbfa3ef4 | refs/heads/master | 2023-02-09T10:32:23.134185 | 2017-11-19T20:23:39 | 2017-11-19T20:23:39 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 102 | py | seconds = 1
noOfin1year = seconds * 60 * 24 * 365
print("Number of seconds in a year:", noOfin1year)
| [
"atomlizard@gmail.com"
] | atomlizard@gmail.com |
47c54f8ce55b1827413426a314d5845705c18bbf | 391b3db590a1a8e991c5d3bfe1911bd43a8aeb2a | /debug-scripts/python_listener.py | a6f1ddc3fa008156048ebdf00d37979e3d04981b | [] | no_license | GRIDAPPSD/gridappsd-docker | 4b356a32043209097bcb274a34eff8ff9e3e7d59 | 226d03c8aa7a0e986099feea3dd080f3d6a1b836 | refs/heads/main | 2023-09-01T06:33:43.204690 | 2023-08-04T23:41:20 | 2023-08-04T23:41:20 | 109,312,823 | 9 | 11 | null | 2023-07-17T21:55:05 | 2017-11-02T20:00:42 | Shell | UTF-8 | Python | false | false | 3,646 | py | import argparse
import json
from fncs import fncs
class PythonListener(object):
def __init__(self, broker_port, sim_id, sim_length):
self.broker_location = "tcp://localhost:{}".format(broker_port)
self.subscription_topic = "{}/fncs_output".format(sim_id)
self.sim_length = int(sim_length)
self.sim_id = str(sim_id)
def register_with_fncs(self):
fncs_configuration = {
"name" : "PythonListener{}".format(self.sim_id),
"time_delta" : "1s",
"broker" : self.broker_location,
"values" : {
"{}".format(self.sim_id) : {
"topic" : self.subscription_topic,
"default" : "{}",
"type" : "JSON",
"list" : "false"
}
}
}
configuration_zpl = ('name = {0}\n'.format(fncs_configuration['name'])
+ 'time_delta = {0}\n'.format(fncs_configuration['time_delta'])
+ 'broker = {0}\nvalues'.format(fncs_configuration['broker']))
for x in fncs_configuration['values'].keys():
configuration_zpl += '\n {0}'.format(x)
configuration_zpl += '\n topic = {0}'.format(
fncs_configuration['values'][x]['topic'])
configuration_zpl += '\n default = {0}'.format(
fncs_configuration['values'][x]['default'])
configuration_zpl += '\n type = {0}'.format(
fncs_configuration['values'][x]['type'])
configuration_zpl += '\n list = {0}'.format(
fncs_configuration['values'][x]['list'])
try:
fncs.initialize(configuration_zpl)
if not fncs.is_initialized():
raise RuntimeError("fncs.initialize(configuration_zpl) failed!\nconfiguration_zpl = {}".format(configuration_zpl))
except Exception as e:
if fncs.is_initialized():
fncs.die()
raise
def run_simulation(self):
try:
current_time = 0
while current_time <= self.sim_length:
sim_message_topics = fncs.get_events()
if self.sim_id in sim_message_topics:
message = fncs.get_value(self.sim_id)
time_request = current_time + 1
if time_request > self.sim_length:
fncs.finalize()
break
time_approved = fncs.time_request(time_request)
if time_approved != time_request:
raise RuntimeError("The time approved from the fncs broker is not the time requested.\ntime_request = {}.\ntime_approved = {}".format(time_request, time_approved))
current_time += 1
except Exception as e:
if fncs.is_initialized():
fncs.die()
raise
def get_opts():
parser = argparse.ArgumentParser()
parser.add_argument("broker_port", help="The port location for the FNCS broker.")
parser.add_argument("simulation_id", help="The simulation id.")
parser.add_argument("simulation_duration", help="The simulation runtime lenght.")
opts = parser.parse_args()
return opts
def main(broker_port, simulation_id, simulation_duration):
listener = PythonListener(broker_port, simulation_id, simulation_duration)
listener.register_with_fncs()
listener.run_simulation()
if __name__ == "__main__":
opts = get_opts()
port = opts.broker_port
sim_id = opts.simulation_id
duration = opts.simulation_duration
main(port, sim_id, duration)
| [
"andrew.fisher@pnnl.gov"
] | andrew.fisher@pnnl.gov |
9d3ebb55f1314362a215d95a4aadf6a840bf824d | 1b9075ffea7d4b846d42981b41be44238c371202 | /2009/stable/hardware/firmware/flashrom/actions.py | 870e4cb6e12699846f65a6f0b2a8ad85380f45fd | [] | no_license | pars-linux/contrib | bf630d4be77f4e484b8c6c8b0698a5b34b3371f4 | 908210110796ef9461a1f9b080b6171fa022e56a | refs/heads/master | 2020-05-26T20:35:58.697670 | 2011-07-11T11:16:38 | 2011-07-11T11:16:38 | 82,484,996 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 457 | py | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright 2007-2010 TUBITAK/UEKAE
# Licensed under the GNU General Public License, version 2.
# See the file http://www.gnu.org/licenses/old-licenses/gpl-2.0.txt
from pisi.actionsapi import autotools
from pisi.actionsapi import pisitools
def build():
autotools.make()
def install():
pisitools.dosbin("flashrom")
pisitools.doman("flashrom.8")
pisitools.dodoc("ChangeLog", "COPYING", "README")
| [
"necdetyucel@gmail.com"
] | necdetyucel@gmail.com |
2fc890324dbea582d0d5a3fe622be21a02df6bbc | 11372b8fb4df2b8ccb68589fc84bb716a7ea10a2 | /Bindings/Python/tests/test_DataTable.py | ef9952122c4fa0ea26195ac9e8e21f97c8a259cb | [
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0"
] | permissive | shyamalschandra/opensim-core | 4e8adce1da4ff964b03a2b7f94c4a05a539ec37f | eb6cc66e3fae995e50ee054bce5d039a0f7d9ef5 | refs/heads/master | 2020-05-29T08:43:06.596505 | 2016-09-30T00:04:14 | 2016-09-30T00:04:14 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,456 | py | """
Test DataTable interface.
"""
import os, unittest
import opensim as osim
class TestDataTable(unittest.TestCase):
def test_DataTable(self):
table = osim.DataTable()
# Set column labels.
table.setColumnLabels(['0', '1', '2', '3'])
assert table.getColumnLabels() == ('0', '1', '2', '3')
assert table.hasColumn('0')
assert table.hasColumn('2')
table.setColumnLabel(0, 'zero')
table.setColumnLabel(2, 'two')
assert table.getColumnLabel(0) == 'zero'
assert table.getColumnLabel(2) == 'two'
assert table.getColumnIndex('zero') == 0
assert table.getColumnIndex('two') == 2
# Append a row to the table.
row = osim.RowVector([1, 2, 3, 4])
table.appendRow(0.1, row)
assert table.getNumRows() == 1
assert table.getNumColumns() == 4
row0 = table.getRowAtIndex(0)
assert (row0[0] == row[0] and
row0[1] == row[1] and
row0[2] == row[2] and
row0[3] == row[3])
# Append another row to the table.
row[0] *= 2
row[1] *= 2
row[2] *= 2
row[3] *= 2
table.appendRow(0.2, row)
assert table.getNumRows() == 2
assert table.getNumColumns() == 4
row1 = table.getRow(0.2)
assert (row1[0] == row[0] and
row1[1] == row[1] and
row1[2] == row[2] and
row1[3] == row[3])
# Append another row to the table.
row[0] *= 2
row[1] *= 2
row[2] *= 2
row[3] *= 2
table.appendRow(0.3, row)
assert table.getNumRows() == 3
assert table.getNumColumns() == 4
row2 = table.getRow(0.3)
assert (row2[0] == row[0] and
row2[1] == row[1] and
row2[2] == row[2] and
row2[3] == row[3])
# Retrieve independent column.
assert table.getIndependentColumn() == (0.1, 0.2, 0.3)
# Retrieve dependent columns.
col1 = table.getDependentColumnAtIndex(1)
assert (col1[0] == 2 and
col1[1] == 4 and
col1[2] == 8)
col3 = table.getDependentColumn('3')
assert (col3[0] == 4 and
col3[1] == 8 and
col3[2] == 16)
assert table.hasColumn(0)
assert table.hasColumn(2)
def test_TimeSeriesTable(self):
table = osim.TimeSeriesTable()
table.setColumnLabels(('col1', 'col2', 'col3', 'col4'))
assert(table.getColumnLabels() == ('col1', 'col2', 'col3', 'col4'))
# Append a row to the table.
row = osim.RowVector([1, 2, 3, 4])
table.appendRow(0.1, row)
assert table.getNumRows() == 1
assert table.getNumColumns() == 4
row0 = table.getRowAtIndex(0)
assert (row0[0] == row[0] and
row0[1] == row[1] and
row0[2] == row[2] and
row0[3] == row[3])
# Append another row to the table.
row[0] *= 2
row[1] *= 2
row[2] *= 2
row[3] *= 2
table.appendRow(0.2, row)
assert table.getNumRows() == 2
assert table.getNumColumns() == 4
row1 = table.getRow(0.2)
assert (row1[0] == row[0] and
row1[1] == row[1] and
row1[2] == row[2] and
row1[3] == row[3])
# Append another row to the table with a timestamp
# less than the previous one. Exception expected.
try:
table.appendRow(0.15, row)
assert False
except RuntimeError:
pass
def test_DataTableVec3(self):
table = osim.DataTableVec3()
# Set columns labels.
table.setColumnLabels(['0', '1', '2'])
assert table.getColumnLabels() == ('0', '1', '2')
# Append a row to the table.
row = osim.RowVectorOfVec3([osim.Vec3(1, 2, 3),
osim.Vec3(4, 5, 6),
osim.Vec3(7, 8, 9)])
table.appendRow(0.1, row)
assert table.getNumRows() == 1
assert table.getNumColumns() == 3
row0 = table.getRowAtIndex(0)
assert (str(row0[0]) == str(row[0]) and
str(row0[1]) == str(row[1]) and
str(row0[2]) == str(row[2]))
# Append another row to the table.
row = osim.RowVectorOfVec3([osim.Vec3( 2, 4, 6),
osim.Vec3( 8, 10, 12),
osim.Vec3(14, 16, 18)])
table.appendRow(0.2, row)
assert table.getNumRows() == 2
assert table.getNumColumns() == 3
row1 = table.getRow(0.2)
assert (str(row1[0]) == str(row[0]) and
str(row1[1]) == str(row[1]) and
str(row1[2]) == str(row[2]))
# Append another row to the table.
row = osim.RowVectorOfVec3([osim.Vec3( 4, 8, 12),
osim.Vec3(16, 20, 24),
osim.Vec3(28, 32, 36)])
table.appendRow(0.3, row)
assert table.getNumRows() == 3
assert table.getNumColumns() == 3
row2 = table.getRow(0.3)
assert (str(row2[0]) == str(row[0]) and
str(row2[1]) == str(row[1]) and
str(row2[2]) == str(row[2]))
# Retrieve independent column.
assert table.getIndependentColumn() == (0.1, 0.2, 0.3)
# Retrieve dependent columns.
col1 = table.getDependentColumnAtIndex(1)
assert (str(col1[0]) == str(osim.Vec3( 4, 5, 6)) and
str(col1[1]) == str(osim.Vec3( 8, 10, 12)) and
str(col1[2]) == str(osim.Vec3(16, 20, 24)))
col2 = table.getDependentColumn('2')
assert (str(col2[0]) == str(osim.Vec3( 7, 8, 9)) and
str(col2[1]) == str(osim.Vec3(14, 16, 18)) and
str(col2[2]) == str(osim.Vec3(28, 32, 36)))
def test_TimeSeriesTableVec3(self):
table = osim.TimeSeriesTableVec3()
# Set columns labels.
table.setColumnLabels(['0', '1', '2'])
assert table.getColumnLabels() == ('0', '1', '2')
# Append a row to the table.
row = osim.RowVectorOfVec3([osim.Vec3(1, 2, 3),
osim.Vec3(4, 5, 6),
osim.Vec3(7, 8, 9)])
table.appendRow(0.1, row)
assert table.getNumRows() == 1
assert table.getNumColumns() == 3
row0 = table.getRowAtIndex(0)
assert (str(row0[0]) == str(row[0]) and
str(row0[1]) == str(row[1]) and
str(row0[2]) == str(row[2]))
# Append another row to the table.
row = osim.RowVectorOfVec3([osim.Vec3( 2, 4, 6),
osim.Vec3( 8, 10, 12),
osim.Vec3(14, 16, 18)])
table.appendRow(0.2, row)
assert table.getNumRows() == 2
assert table.getNumColumns() == 3
row1 = table.getRow(0.2)
assert (str(row1[0]) == str(row[0]) and
str(row1[1]) == str(row[1]) and
str(row1[2]) == str(row[2]))
# Append another row to the table with a timestamp
# less than the previous one. Exception expected.
try:
table.appendRow(0.15, row)
assert False
except RuntimeError:
pass
| [
"klshrinidhi@gmail.com"
] | klshrinidhi@gmail.com |
e8a722224a3af0bbf9f0bb156957c59e20ba719a | f32cd42623b5ae25895a2a1da9f748d592b457e0 | /airline/flight/migrations/0003_passanger.py | 165e8fae53aa8b2a1de06b8c5ff58d646b79b8ed | [] | no_license | drkrthnblk/Django | 790dac28961ca5f706a57385f7f8affa70e0a08f | 2bb0d678bcbc90b35a98bd1e57aa69b32a2b6d35 | refs/heads/master | 2020-03-28T01:41:35.831566 | 2018-09-08T07:05:48 | 2018-09-08T07:05:48 | 147,523,313 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 753 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.15 on 2018-09-08 04:48
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('flight', '0002_auto_20180906_1341'),
]
operations = [
migrations.CreateModel(
name='Passanger',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('first', models.CharField(max_length=64)),
('last', models.CharField(max_length=64)),
('flights', models.ManyToManyField(blank=True, related_name='passengers', to='flight.Flight')),
],
),
]
| [
"rarc111@gmail.com"
] | rarc111@gmail.com |
51150a3dae6b91bae2baf4002c8e95d9079059ef | ffcb28129a7ff97c585bd3dd0eeace40c363c978 | /mglearn/plot_kneighbors_regularization.py | 2dc0c238953ae98a0902bff483e6e4411f6fa153 | [
"BSD-2-Clause"
] | permissive | data-ml/advanced_training | 6b090482d99a24e821f6c419135eba659734da69 | 84d78b60161d08b0f212a14f10f80c6bdc346998 | refs/heads/master | 2020-04-01T20:27:10.907126 | 2018-10-18T11:46:21 | 2018-10-18T11:46:21 | 153,604,614 | 0 | 0 | BSD-2-Clause | 2018-10-18T10:14:25 | 2018-10-18T10:14:24 | null | UTF-8 | Python | false | false | 1,159 | py | import numpy as np
import matplotlib.pyplot as plt
from sklearn.neighbors import KNeighborsRegressor
def plot_regression_datasets():
fig, axes = plt.subplots(1, 3, figsize=(15, 5))
for n_samples, ax in zip([10, 100, 1000], axes):
x, y = make_dataset(n_samples)
ax.plot(x, y, 'o', alpha=.6)
def plot_kneighbors_regularization():
rnd = np.random.RandomState(42)
x = np.linspace(-3, 3, 100)
y_no_noise = np.sin(4 * x) + x
y = y_no_noise + rnd.normal(size=len(x))
X = x[:, np.newaxis]
fig, axes = plt.subplots(1, 3, figsize=(15, 5))
x_test = np.linspace(-3, 3, 1000)
for n_neighbors, ax in zip([2, 5, 20], axes.ravel()):
kneighbor_regression = KNeighborsRegressor(n_neighbors=n_neighbors)
kneighbor_regression.fit(X, y)
ax.plot(x, y_no_noise, label="true function")
ax.plot(x, y, "o", label="data")
ax.plot(x_test, kneighbor_regression.predict(x_test[:, np.newaxis]),
label="prediction")
ax.legend()
ax.set_title("n_neighbors = %d" % n_neighbors)
if __name__ == "__main__":
plot_kneighbors_regularization()
plt.show()
| [
"amueller@nyu.edu"
] | amueller@nyu.edu |
22af9c136349ee70da4d000c5eef00cb1baf0109 | 8ecd899a8558ad0a644ecefa28faf93e0710f6fb | /ABC007/ABC007_A.py | 679623e243c760e493ba9bd56ca1c2569cd69a61 | [] | no_license | yut-inoue/AtCoder_ABC | b93885547049788d452e86b442a4a9f5ee191b0e | 3d2c4b2b2f8871c75f86040ad07ccd7736ad3dbe | refs/heads/master | 2021-07-03T09:09:20.478613 | 2021-02-21T13:20:31 | 2021-02-21T13:20:31 | 227,140,718 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 152 | py | n = int(input())
#a, b = map(int,input().split())
#l = list(map(int,input().split()))
#l = [list(map(int,input().split())) for i in range(n)]
print(n-1) | [
"yinoue.1996787@gmail.com"
] | yinoue.1996787@gmail.com |
d332802e56fe91375933bf622ca63bde19817eb3 | 386b836548bd64369e9192b7b540fadd5c1e0338 | /DGGAN/code/utils.py | eac66edcf20a489af2d4adada14c5f4670c14ca9 | [] | no_license | RingBDStack/AGE | e8efef8dc4ce958bf19ce03bac93e5f8ef6c7520 | 392ed8ca4b84d8e713719c6c7b93e092f8931bf9 | refs/heads/main | 2023-04-03T23:48:24.061213 | 2021-04-12T15:56:18 | 2021-04-12T15:56:18 | 357,246,136 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,315 | py | import numpy as np
def read_graph(train_filename):
nodes = set()
nodes_s = set()
egs = []
graph = [{}, {}]
with open(train_filename) as infile:
for line in infile.readlines():
source_node, target_node = line.strip().split(' ')
source_node = int(source_node)
target_node = int(target_node)
nodes.add(source_node)
nodes.add(target_node)
nodes_s.add(source_node)
egs.append([source_node, target_node])
if source_node not in graph[0]:
graph[0][source_node] = []
if target_node not in graph[1]:
graph[1][target_node] = []
graph[0][source_node].append(target_node)
graph[1][target_node].append(source_node)
n_node = len(nodes)
return graph, n_node, list(nodes), list(nodes_s), egs
def str_list_to_float(str_list):
return [float(item) for item in str_list]
def read_embeddings(filename, n_node, n_embed):
embedding_matrix = np.random.rand(n_node, n_embed)
i = -1
with open(filename) as infile:
for line in infile.readlines()[1:]:
i += 1
emd = line.strip().split()
embedding_matrix[int(emd[0]), :] = str_list_to_float(emd[1:])
return embedding_matrix
| [
"fuxc@act.buaa.edu.cn"
] | fuxc@act.buaa.edu.cn |
9c512e0241560b6fe71836a85588014ff24d8cfd | 4496b6864f51f1716d8221514ec43f648d1cd7d1 | /task3.py | 9c9a2ab2f90f9a4659213cb1d1518b3b0201c41d | [] | no_license | inwk6312winter2019/modelopenbook-1-khyathinalluri | 0d19bc33c0761c756b1f21ddbf7e56cd347a242d | 8da42272aaf0563ddb22a785dfa57f10a5c906f7 | refs/heads/master | 2020-04-21T08:47:35.807738 | 2019-02-06T19:11:26 | 2019-02-06T19:11:26 | 169,429,755 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,141 | py | def get_access_list(fout):
fout.seek(0)
transit_access_in=[]
global_access=[]
fw_management_access_in=[]
for line in fout:
line=line.strip()
if 'access-list' in line:
if 'transit_access_in' in line:
transit_access_in.append(line)
elif 'global_access' in line:
global_access.append(line)
elif 'fw-management_access_in' in line:
fw_management_access_in.append(line)
print('access list for transit_access_in::\n',transit_access_in)
print('access list for global_access::\n',global_access)
print('access list for fw_management_access_in::\n',fw_management_access_in)
try:
fout=open('running-config.cfg','r')
fin=open('new-running-config.cfg','a+')
print("The dictionary of ip addresses::",list_ifname_ip(fout))
if new_config_file(fout,fin):
print('New File Created Successfully')
else:
print('Not Able to Create New File File')
get_access_list(fout)
except:
print('Something Went wrong While working with Files.Please check files have proper permissions')
| [
"Khyathi.Nalluri@dal.ca"
] | Khyathi.Nalluri@dal.ca |
91be3f6ba80e16dd77e7a909e79b0c4743203ba1 | 226b3aefd1877a51e26cd9c00ffadb94249bc7c0 | /Image-OCR-and-Translator/Auto_Translate.py | 40610a7c6fe19862f63a057f141a7f01d4a7a7e2 | [] | no_license | CMWorks/Python | f8872f7f87f30fbe0072ddf262ba8abda7fa34d3 | 1852a4cfbca099cd153fc46676b7cee8ec2d9b6c | refs/heads/master | 2022-11-19T18:05:29.630181 | 2020-07-19T16:50:07 | 2020-07-19T16:50:07 | 280,886,772 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 11,320 | py | import sys
import time
from os import listdir
from os.path import exists
import threading
import cv2
import imutils
import numpy as np
import pyautogui as auto
import pytesseract
from PIL import ImageGrab
from PyQt5.QtCore import Qt
from PyQt5 import QtCore
from PyQt5.QtGui import QCloseEvent
from PyQt5.QtWidgets import QMainWindow, QLabel, QComboBox, QSlider, QPushButton, QCheckBox, QApplication, QWidget, \
QPlainTextEdit, QAction, QMessageBox
from googletrans import Translator
RUN = True # The main bool, true for all the treads to run, false to close all threads
if not exists("C:\\Program Files\\Tesseract-OCR\\tesseract.exe"):
auto.alert(
"Tesseract is not installed\nPlease install tesseract https://github.com/tesseract-ocr/tesseract/releases/tag/5.0.0-alpha")
sys.exit()
translator = Translator()
pytesseract.pytesseract.tesseract_cmd = r'C:\\Program Files\\Tesseract-OCR\\tesseract.exe'
# asm.traineddata
f = listdir("C:\\Program Files\\Tesseract-OCR\\tessdata")
languages = []
for file in f:
sp = file.split(".")
if len(sp) == 2 and sp[1] == "traineddata":
languages.append(sp[0])
lang = "eng"
x0 = 100
y0 = 100
x1 = 500
y1 = 500
gray_cutoff = 150
rotation_angle_deg = 0
gauss_constant = 3
invert = False
do_gauss = False
auto_up = False
print_to_con = False
new_img = auto.screenshot()
def kill_program():
sys.exit()
class Main(QWidget):
update_qt = QtCore.pyqtSignal(str)
def __init__(self):
super().__init__()
def display_image(self):
global x0, y0, x1, y1, new_img
def process_img(original_image):
gray = np.array(original_image.convert('L'))
if do_gauss:
if invert:
gray = cv2.bitwise_not(gray)
threshed_image = cv2.adaptiveThreshold(gray, 255, cv2.ADAPTIVE_THRESH_GAUSSIAN_C, cv2.THRESH_BINARY, 7,
gauss_constant)
else:
if invert:
ret, threshed_image = cv2.threshold(gray, gray_cutoff, 255, cv2.THRESH_BINARY_INV)
else:
ret, threshed_image = cv2.threshold(gray, gray_cutoff, 255, cv2.THRESH_BINARY)
rotated_image = imutils.rotate(threshed_image, rotation_angle_deg)
return rotated_image
while RUN:
if x0 >= x1 or y0 >= y1:
x0 = 100
y0 = 100
x1 = 500
y1 = 500
img = ImageGrab.grab(bbox=(x0, y0, x1, y1))
new_img = process_img(img)
cv2.imshow('Window', new_img)
if cv2.waitKey(25) & 0xFF == ord('q'):
cv2.destroyAllWindows()
break
def run(self):
while RUN:
if auto_up:
self.get_text()
else:
time.sleep(1)
def get_text(self):
try:
text = pytesseract.image_to_string(new_img, lang)
except SystemError:
text = "Error"
if lang != "eng" and lang != "equ" and lang != "osd" and text != "":
result = translator.translate(text).text
elif text != "":
result = text
else:
result = "-Empty-"
self.update_qt.emit(result)
class MainGUI(QMainWindow):
def __init__(self):
super().__init__()
label0 = QLabel("Language:", self)
label0.setGeometry(30, 10, 100, 20)
self.lan = QComboBox(self)
self.lan.addItems(languages)
self.lan.setGeometry(130, 10, 100, 20)
self.lan.setCurrentText("eng")
self.lan.currentIndexChanged.connect(self.set_lang)
label = QLabel("Grayscale Cutoff", self)
label.setGeometry(30, 40, 200, 20)
gslider = QSlider(Qt.Horizontal, self)
gslider.setGeometry(30, 60, 200, 20)
gslider.setMinimum(0)
gslider.setMaximum(255)
gslider.setSingleStep(1)
gslider.setValue(150)
gslider.valueChanged[int].connect(self.set_gray_cutoff)
label2 = QLabel("Rotate", self)
label2.setGeometry(30, 80, 200, 20)
self.rslider = QSlider(Qt.Horizontal, self)
self.rslider.setGeometry(30, 100, 200, 20)
self.rslider.setMinimum(-90)
self.rslider.setMaximum(90)
self.rslider.setValue(0)
self.rslider.valueChanged[int].connect(self.set_rotation)
reset_button = QPushButton("Reset Rotation", self)
reset_button.setGeometry(30, 125, 100, 30)
reset_button.pressed.connect(self.reset)
checkbox = QCheckBox("Invert", self)
checkbox.setGeometry(140, 130, 100, 20)
checkbox.stateChanged.connect(self.set_invert)
gauss_check = QCheckBox("Gaussian", self)
gauss_check.setGeometry(200, 130, 100, 20)
gauss_check.stateChanged.connect(self.set_gauss)
label3 = QLabel("Gaussian Constant", self)
label3.setGeometry(30, 165, 200, 20)
gaussslider = QSlider(Qt.Horizontal, self)
gaussslider.setGeometry(30, 190, 200, 20)
gaussslider.setMinimum(0)
gaussslider.setMaximum(30)
gaussslider.setSingleStep(1)
gaussslider.setValue(3)
gaussslider.valueChanged[int].connect(self.set_gauss_constant)
bounds = QPushButton("Set Bounds", self)
bounds.setGeometry(30, 230, 100, 30)
bounds.pressed.connect(self.set_bounds)
up = QPushButton("Get Text", self)
up.setGeometry(150, 230, 100, 30)
up.pressed.connect(self.calc)
self.audit = QCheckBox("Auto Update", self)
self.audit.setGeometry(330, 240, 100, 30)
self.audit.stateChanged.connect(self.set_auto_up)
label4 = QLabel("Out Text", self)
label4.setGeometry(345, 10, 200, 30)
# self.out = QLabel("Out:", self)
self.out = QPlainTextEdit(self)
self.out.setGeometry(270, 40, 200, 200)
quit = QAction("Quit", self)
quit.triggered.connect(self.closeEvent)
self.app = Main()
self.app.update_qt.connect(self.set_out)
self.clock = ClockTicker()
self.clock.update_qt.connect(self.set_out)
self.x = threading.Thread(target=self.app.display_image)
self.x.start()
self.y = threading.Thread(target=self.app.run)
self.y.start()
self.z = threading.Thread(target=self.clock.run)
self.z.start()
self.setGeometry(50, 100, 480, 280)
self.setWindowTitle("Auto Translate")
self.show()
def set_lang(self):
global lang
lang = self.lan.currentText()
def set_gray_cutoff(self, value):
global gray_cutoff
gray_cutoff = value
def set_rotation(self, value):
global rotation_angle_deg
rotation_angle_deg = value
def set_invert(self):
global invert
invert = not invert
def reset(self):
global rotation_angle_deg
rotation_angle_deg = 0
self.rslider.setValue(0)
def set_gauss(self):
global do_gauss
do_gauss = not do_gauss
def set_gauss_constant(self, value):
global gauss_constant
gauss_constant = value
def set_bounds(self):
self.audit.setChecked(False)
self.out.clear()
self.clock.do_run = True
def calc(self):
self.app.get_text()
def set_auto_up(self):
global auto_up
auto_up = not auto_up
def set_out(self, value):
self.out.setPlainText(value)
def closeEvent(self, event):
reply = QMessageBox.question(self, 'Quit?',
'Are you sure you want to quit?',
QMessageBox.Yes | QMessageBox.No, QMessageBox.No)
if reply == QMessageBox.Yes:
if not type(event) == bool:
global RUN
RUN = False
event.accept()
else:
sys.exit()
else:
if not type(event) == bool:
event.ignore()
class ClockTicker(QWidget):
update_qt = QtCore.pyqtSignal(str)
do_run = False
def __init__(self):
super().__init__()
def run(self):
global x1, y1, x0, y0
while RUN:
if self.do_run:
self.update_qt.emit("Top Left...3")
time.sleep(.25)
self.update_qt.emit("Top Left...3.")
time.sleep(.25)
self.update_qt.emit("Top Left...3..")
time.sleep(.25)
self.update_qt.emit("Top Left...3...")
time.sleep(.25)
self.update_qt.emit("Top Left...3...2")
time.sleep(.25)
self.update_qt.emit("Top Left...3...2.")
time.sleep(.25)
self.update_qt.emit("Top Left...3...2..")
time.sleep(.25)
self.update_qt.emit("Top Left...3...2...")
time.sleep(.25)
self.update_qt.emit("Top Left...3...2...1")
time.sleep(.25)
self.update_qt.emit("Top Left...3...2...1.")
time.sleep(.25)
self.update_qt.emit("Top Left...3...2...1..")
time.sleep(.25)
self.update_qt.emit("Top Left...3...2...1...")
time.sleep(.25)
x0_temp = auto.position().x
y0_temp = auto.position().y
self.update_qt.emit("Bottom Right...3")
time.sleep(.25)
self.update_qt.emit("Bottom Right...3.")
time.sleep(.25)
self.update_qt.emit("Bottom Right...3..")
time.sleep(.25)
self.update_qt.emit("Bottom Right...3...")
time.sleep(.25)
self.update_qt.emit("Bottom Right...3...2")
time.sleep(.25)
self.update_qt.emit("Bottom Right...3...2.")
time.sleep(.25)
self.update_qt.emit("Bottom Right...3...2..")
time.sleep(.25)
self.update_qt.emit("Bottom Right...3...2...")
time.sleep(.25)
self.update_qt.emit("Bottom Right...3...2...1")
time.sleep(.25)
self.update_qt.emit("Bottom Right...3...2...1.")
time.sleep(.25)
self.update_qt.emit("Bottom Right...3...2...1..")
time.sleep(.25)
self.update_qt.emit("Bottom Right...3...2...1...")
time.sleep(.25)
x1 = auto.position().x
y1 = auto.position().y
x0 = x0_temp
y0 = y0_temp
self.update_qt.emit("")
self.do_run = False
else:
time.sleep(1)
if __name__ == '__main__':
app = QApplication(sys.argv)
ex = MainGUI()
sys.exit(app.exec_())
| [
"noreply@github.com"
] | CMWorks.noreply@github.com |
22cbfa85a5db8d187adefa8c0db904d9217a9a83 | 9c912c4150f49b098d747bd1eb4dc4a20581b0a1 | /IMU/pi to pc/data_to_3d.py | 3c398262c4fa396baa53d279e62e719319e72c00 | [] | no_license | Pratyush-S/LOCALISATION | a2dd40bdb8a9dab17378b9efec6701855cf5554e | 02e12122f2d24fe996f5f543661ec9f4b0f7021d | refs/heads/master | 2021-01-14T18:06:57.999736 | 2020-04-04T08:18:22 | 2020-04-04T08:18:22 | 242,706,700 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 660 | py | from mpl_toolkits import mplot3d
%matplotlib inline
import numpy as np
import matplotlib.pyplot as plt
fig = plt.figure(figsize=(10,10))
ax = plt.axes(projection='3d')
from matplotlib.pyplot import figure
figure(num=None, figsize=(1, 1), dpi=80, facecolor='w', edgecolor='k')
# Data for a three-dimensional line
zline = []
xline = []
yline = []
append_val(1000,1000,1000)
def append_val(x,y,z):
global zline
global yline
global xline
zline.append(z)
xline.append(x)
yline.append(y)
fig = plt.figure(figsize=(10,10))
ax = plt.axes(projection='3d')
ax.plot3D(xline, yline, zline, 'gray')
| [
"shukla1024@gmail.com"
] | shukla1024@gmail.com |
e2273f82a362b586f2ca3bba2a5a25a7d369649a | a68e51cdd60da58880487dffb14083014a2337fb | /pixiv_v2.py | 61c688eb07156e214789efed649859608de8f959 | [] | no_license | monburan/pythongit | 96cf57a8ad8449711443acdbe996db9e4729b5f1 | f1ab12e6a654b4a82c626eb117120f48ae74dcce | refs/heads/master | 2021-01-17T18:00:56.334659 | 2016-07-18T09:36:55 | 2016-07-18T09:36:55 | 56,758,578 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 13,291 | py | #coding:UTF-8
__author__ = 'monburan'
__version__ = '0.10 only_international'
import os
import re
import urllib
import urllib2
import cookielib
from urllib2 import urlopen
from bs4 import BeautifulSoup
class Tools:
remove = re.compile('amp;')
rmbig = re.compile('_big')
make_m = re.compile('mode=medium')
def removebig(self,x):
x = re.sub(self.rmbig,"",x)
return x.strip()
def removesomething(self,x):
x = re.sub(self.remove,"",x)
return x.strip()
def make_big_url(self,x):
x = re.sub(self.make_m,"mode=manga_big",x)
return x.strip()
def Pic_Type(self,real_url): #区分图片分辨率
p_type = re.search(re.compile('png',re.S),real_url)
if p_type == None:
self.pic_type = 'jpg'
return self.pic_type
else:
self.pic_type = 'png'
return self.pic_type
class Pixiv_Spider:
def __init__(self):
self.tool = Tools()
self.p_id = ''
self.p_pw = ''
self.p_choice = ''
self.dl_dir = ''
self.pic_type = ''
self.p_international_url = 'http://www.pixiv.net/ranking_area.php?type=detail&no=6' #国际排行榜url
def Login(self): #处理登录所需要的请求信息
p_login_url = 'https://www.pixiv.net/login.php'
data = { #登录所要post的信息
'mode':'login',
'skip':1
}
data['pixiv_id'] = self.p_id #传入登录id以及password
data['pass'] = self.p_pw
p_login_data = urllib.urlencode(data)
p_login_header = { #头信息
'accept-language':'zh-cn,zh;q=0.8',
'referer':'https://www.pixiv.net/login.php?return_to=0',
'user-agent':'mozilla/5.0 (windows nt 10.0; win64; x64; rv:45.0) gecko/20100101 firefox/45.0'
}
request = urllib2.Request(
url = p_login_url,
data = p_login_data,
headers = p_login_header
)
try:
cookie_file = 'cookie.txt' #生成cookie
cookie = cookielib.MozillaCookieJar(cookie_file)
opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cookie))
response = opener.open(request) #登录
cookie.save(ignore_discard = True,ignore_expires = True)
except urllib2.URLError,e:
if hasattr(e,"reason"):
print "登录失败???",e.reason
def Download_Request(self,opener,make_url,real_url):
p_download_header = { #头信息
'Accept-Language':'zh-CN,zh;q=0.8',
'User-Agent':'Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:45.0) Gecko/20100101 Firefox/45.0'
}
p_download_header['Referer'] = self.tool.removebig(make_url) #将处理过的referer加入header,没有referer会显示403
download_request = urllib2.Request(
url = real_url.group(1),
headers = p_download_header
)
decode_url = opener.open(download_request)
return decode_url.read()
def Cookie_Login(self): #读取之前登陆生成的cookie
cookie_login = cookielib.MozillaCookieJar()
cookie_login.load('cookie.txt',ignore_discard = True,ignore_expires = True)
opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cookie_login))
return opener
def Choice_Pixiv(self,opener): #选择要跳转到的页面
if (self.p_choice == '1'):
try:
p_page = opener.open(self.p_international_url)
p_international = p_page.read().decode('utf-8')
dl_dir = 'international'
self.Pixiv_International(opener,p_international,dl_dir)
except urllib2.URLError,e:
if hasattr(e,"reason"):
print "连接错误:",e.reason
def Pixiv_International(self,opener,p_international,dl_dir):
soup = BeautifulSoup(p_international)
os.mkdir(r'E:/pixivdata/' + dl_dir + '/') #生成文件夹
print "生成"+dl_dir+"目录成功!"
for i in range(1,101): #已知pixiv国际榜的排名为100名,用for循环来完成
get_information = str(soup.find(id=i)) #通过bs处理html将我们所需要的信息大体提取出来
result_url = re.search(re.compile('<.*?work\s_work\s".*?href="(.*?)">',re.S),get_information)
result_multiple = re.search(re.compile('<a.*?work\s_work\smultiple\s.*?href="(.*?)">',re.S),get_information)
result_video = re.search(re.compile('<a.*?work\s_work\sugoku-illust\s.*?href="(.*?)">',re.S),get_information)
result_manga_multiple = re.search(re.compile('<a.*?work\s_work\smanga\smultiple\s.*?href="(.*?)">',re.S),get_information)
if result_video == None:
if result_manga_multiple == None: #判断是否为manga
if result_multiple == None: #判断是否为多图
p_num = '1'
p_url = self.tool.removesomething('http://www.pixiv.net/' + result_url.group(1))
print "报告!前方发现单张图片..."
p_id = self.Download_Data(i,get_information,p_url,opener,dl_dir)
self.Download_Pic(p_num,i,opener,p_url,p_id,dl_dir)
else:
p_num = 'more'
p_url = self.tool.removesomething('http://www.pixiv.net/' + result_multiple.group(1))
print "报告!前方发现多张图片..."
p_id = self.Download_Data(i,get_information,p_url,opener,dl_dir)
self.Download_Pic(p_num,i,opener,p_url,p_id,dl_dir)
else:
p_num = 'more'
p_url = self.tool.removesomething('http://www.pixiv.net/' + result_manga_multiple.group(1))
print "报告!前方发现多张漫画..."
p_id = self.Download_Data(i,get_information,p_url,opener,dl_dir)
self.Download_Pic(p_num,i,opener,p_url,p_id,dl_dir)
else:
print "报告!前方这是张动图...无能为力啊...╮(╯▽╰)╭"
def Download_Data(self,i,get_information,p_url,opener,dl_dir):
#通过使用正则表达式再处理一遍经过bs处理的html代码,找到需要的信息(url,title,user)
result_title = re.search(re.compile('<a href=".*?>(.*?)</a>',re.S),get_information)
result_id = re.search(re.compile('<a class.*?illust_id=(.*?)">',re.S),get_information)
result_user = re.search(re.compile('<span class.*?>(.*?)</span>',re.S),get_information)
p_rank = str(i)
p_id = result_id.group(1)
p_title = result_title.group(1)
p_user = result_user.group(1)
print "RANK #" + p_rank + "\nPixiv ID:" + p_id + "\nTitle:" + p_title +"\nUser:" + p_user
file_data = open('E:/pixivdata/' + dl_dir + '/pixiv_' + p_id + '.txt','w') #创建信息文件
massage = [ #保存信息
'rank:' + p_rank +'\n',
'id:' + p_id + '\n',
'title:' + p_title + '\n',
'user:' + p_user + '\n',
'url:' + p_url
]
file_data.writelines(massage)
file_data.close()
print "报告!pixiv信息保存成功..." #将信息以txt格式保存下来
return p_id
def Download_Pic(self,p_num,i,opener,p_url,p_id,dl_dir):
if p_num == '1':
soup = BeautifulSoup(opener.open(p_url))
real_url = re.search(re.compile('.*?data-src="(.*?)"',re.S),str(soup.find_all("img",class_="original-image")))
print '成功找到大图链接(ˉ﹃ˉ)...\n' + real_url.group(1)
p_type = self.tool.Pic_Type(real_url.group(1))
file_pic = open('E:/pixivdata/' + dl_dir + '/pixiv_' + p_id + '.' + p_type,'wb')
file_pic.write(self.Download_Request(opener,p_url,real_url))
file_pic.close()
print '成功下载到本地(/≧▽≦)/...'
if p_num == 'more':
soup = BeautifulSoup(opener.open(p_url))
result_pic_more = re.search(re.compile('</li><li>.*?\s(.*?)P</li>',re.S),str(soup.find_all("ul",class_="meta")))
print "发现图片" + result_pic_more.group(1) + "张...⊙▽⊙"
for j in range(0,int(result_pic_more.group(1))):
make_url = self.tool.make_big_url(p_url)+'&page='+str(j) #生成多张的url
m_soup = BeautifulSoup(opener.open(make_url))
real_url = re.search(re.compile('<img.*?src="(.*?)"/>',re.S),str(m_soup.find_all("img")))
p_type = self.tool.Pic_Type(real_url.group(1))
print '成功找到大图链接(ˉ﹃ˉ)...\n' + real_url.group(1) #下载图片并保存
file_pic = open('E:/pixivdata/' + dl_dir + '/pixiv_' + p_id + '_' + str(j) + '.' + p_type,'wb')
file_pic.write(self.Download_Request(opener,make_url,real_url))
file_pic.close()
print '成功下载到本地(/≧▽≦)/...'
def Program_Start(self):
self.Login()
opener = self.Cookie_Login()
self.Choice_Pixiv(opener)
ps = Pixiv_Spider()
ps.p_id = raw_input('请输入你的pixiv id:')
ps.p_pw = raw_input('请输入你的pixiv密码:')
print ('1.进入国际排行榜)
ps.p_choice = raw_input()
ps.Program_Start()
def User_Data(self,opener,f_url):
soup = BeautifulSoup(opener.open(f_url))
uname_list = re.findall(re.compile('data-user_name="(.*?)"',re.S),str(soup.find_all(class_="userdata")))
uid_list = re.findall(re.compile('data-user_id="(.*?)"',re.S),str(soup.find_all(class_="userdata")))
for h in range(0,len(uid_list)):
user_name = uname_list[h]
user_id = uid_list[h]
user_page = 'http://www.pixiv.net/member_illust.php?id=' + user_id
os.mkdir(r'E:/pixivdata/'+user_id+'/')
user_info = BeautifulSoup(opener.open(user_page))
pic_num = re.search(re.compile('(\d+)',re.S),str(user_info.find(class_="count-badge")))
print '画师:' + user_name + '共有' + pic_num.group(1) + '幅作品'
if (int(pic_num.group(1))%20)!=0:
p = (int(pic_num.group(1))/20) + 1
else :
p = int(pic_num.group(1))/20
massage1 = [] #用来存放爬取下来的单图url
massage2 = [] #用来存放爬取下来的多图url
massage3 = [] #用来存放爬取下来的漫画url
for i in range(1,p+1):
pic_s = self.tool.Pic_Style_M(str(BeautifulSoup(opener.open(user_page + '&type=all&p=' + str(i)))))
single = pic_s[0]
multiple = pic_s[1]
manga = pic_s[2]
video = pic_s[3]
print '第'+str(i)+'页共有' + str(len(single)) + '张单图'
for j in range(0,len(single)):
p_num = '1'
p_url = self.tool.removesomething('http://www.pixiv.net/' + single[j])
p_id = re.search(re.compile('(\d+)',re.S),p_url)
massage1.append(p_url+'\n')
self.Download_Pic(p_num,opener,p_url,p_id.group(1),user_id)
print '第'+str(i)+'页共有' + str(len(multiple)) + '套多图'
for k in range(0,len(multiple)):
p_num = 'more'
p_url = self.tool.removesomething('http://www.pixiv.net/' + multiple[k])
p_id = re.search(re.compile('(\d+)',re.S),p_url)
massage2.append(p_url+'\n')
self.Download_Pic(p_num,opener,p_url,p_id.group(1),user_id)
print '第'+str(i)+'页共有' + str(len(manga)) + '套漫画'
for l in range(0,len(manga)):
p_num = 'more'
p_url = self.tool.removesomething('http://www.pixiv.net/' + manga[l])
p_id = re.search(re.compile('(\d+)',re.S),p_url)
massage3.append(p_url +'\n')
self.Download_Pic(p_num,opener,p_url,p_id.group(1),user_id)
if len(video)== 0 :
print '没有动图...'
else:
print'第'+str(i)+'页共有' + str(len(video)) + '张动图,主动放弃...'
singledata = open('E:/pixivdata/'+user_id+'/single.txt','w') #将信息保存下来
singledata1.writelines(massage1)
singledata1.close()
multipledata = open('E:/pixivdata/'+user_id+'/multiple.txt','w')
multipledata.writelines(massage2)
multipledata.close()
mangadata3 = open('E:/pixivdata/'+user_id+'/manga.txt','w')
mangadata3.writelines(massage2)
mangadata3.close()
| [
"mengbulang012@gmail.com"
] | mengbulang012@gmail.com |
0c436c65853fb5972935b4ecbfa85d46d0946c27 | 5c078ed010ca8a085f69669fa2590d7afb780e3d | /venv/Scripts/easy_install-script.py | a484963ffdfdf50f719b2b070aaf7a15e1713cc6 | [] | no_license | klaartjebloks/Selenium-Robot-Automation | baee9f4ff70f30d9beb3c18b8dbdbe8f94624e27 | d95a0c52b568846629ce6bc4ea6181ec4549c587 | refs/heads/master | 2020-05-30T20:57:51.018591 | 2019-06-03T08:17:34 | 2019-06-03T08:17:34 | 189,961,308 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 453 | py | #!C:\Users\Klaar\PycharmProjects\Automation\venv\Scripts\python.exe
# EASY-INSTALL-ENTRY-SCRIPT: 'setuptools==39.1.0','console_scripts','easy_install'
__requires__ = 'setuptools==39.1.0'
import re
import sys
from pkg_resources import load_entry_point
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(
load_entry_point('setuptools==39.1.0', 'console_scripts', 'easy_install')()
)
| [
"klaartje.bloks@closesure.nl"
] | klaartje.bloks@closesure.nl |
2859050f5f4926044ceeb1a9937dfdf2a9332f07 | 3ec84a6e34f9bc709cb203f8b3f668f2b6697e2a | /python20200322-master/class_Python기초/py12패키지/mylib/operation/test.py | a8124c496932fed9a4168d433ceb4a82eeb59f3b | [] | no_license | eopr12/pythonclass | 52079bd99358ac73664beed236659b97c8b63d40 | 2526fe255969a799f6c534c9db6bff9e4eccd877 | refs/heads/master | 2022-07-10T11:17:31.692754 | 2020-05-16T08:43:00 | 2020-05-16T08:43:00 | 263,377,402 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 153 | py |
def test_operation():
result = None
try:
result = "test_operation"
except Exception as ex:
print(ex)
return result | [
"kye9565@gmail.com"
] | kye9565@gmail.com |
17e7c8be515f80ea9dee0d5f118fd8e35ac8d642 | 652a145a1bdedd9be6ed21b7c47fb8e4bd44f263 | /model.py | 7b0955bbd3f9552190014ad7bd37bce102bfbb1a | [] | no_license | vaibhavkakodiya/rock-paper-scissors | f9e62d174241d4c0b1b02d48160fe4a2983df3db | 06c6e6b54e02e29d8187cf7c7c81b50150b0b7f1 | refs/heads/master | 2022-12-02T23:03:40.876420 | 2020-08-22T20:21:09 | 2020-08-22T20:21:09 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 422 | py | from pygame import image, draw, font
import game_confi as gc
def scoreboard(your,bot):
your_score = font.SysFont('freesansbold.ttf',100)
bot_score = font.SysFont('freesansbold.ttf',100)
your_score = your_score.render('your score : ' + str(your),True, (0,255,0))
bot_score = bot_score.render("computer's score : " + str(bot), True, (255,0,0))
return your_score,bot_score
def block():
return
| [
"vaibhavkakodiya12345@gmail.com"
] | vaibhavkakodiya12345@gmail.com |
b189c9f79a4040f7fdc65f0d0acd2de225abbcc1 | e3a50f0c2c4aab5340c6d1cb6acb078475d89455 | /wordcount/views.py | 443780481e57e0022ac76c3e4d3773bb37def313 | [] | no_license | thecplusplus/wordcount-project | e3a3259c01b732dccd1642f0e41bef63fe159727 | abdfb7245e00e2a45bb1479792e2ce2816202508 | refs/heads/master | 2022-11-28T02:20:16.857336 | 2020-07-31T06:26:11 | 2020-07-31T06:26:11 | 283,953,345 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 688 | py | from django.http import HttpResponse
from django.shortcuts import render
def homepage(request):
return render(request, 'homepage.html')
def hola(request):
return HttpResponse("Hola Amigo")
def about(request):
return render(request, 'about.html')
def count(request):
text = request.GET['fulltext']
wordlist = text.split()
worddict = {}
for x in wordlist:
if x in worddict:
# increment
worddict[x]+=1
else:
#add
worddict[x]=1
sortwords = sorted(worddict.items())
return render(request, 'count.html',{'text': text, 'words': len(wordlist),'worddict':worddict,'sortwords':sortwords}) | [
"cplusplus.2611@gmail.com"
] | cplusplus.2611@gmail.com |
a227df16f5ae47666110b93b5b1169d8690aa7b7 | 6189f34eff2831e3e727cd7c5e43bc5b591adffc | /WebMirror/management/rss_parser_funcs/feed_parse_extractEllionoratranslationsCom.py | 0f48d87eec935ae8a6cf120a0f95327cacb1e09f | [
"BSD-3-Clause"
] | permissive | fake-name/ReadableWebProxy | 24603660b204a9e7965cfdd4a942ff62d7711e27 | ca2e086818433abc08c014dd06bfd22d4985ea2a | refs/heads/master | 2023-09-04T03:54:50.043051 | 2023-08-26T16:08:46 | 2023-08-26T16:08:46 | 39,611,770 | 207 | 20 | BSD-3-Clause | 2023-09-11T15:48:15 | 2015-07-24T04:30:43 | Python | UTF-8 | Python | false | false | 563 | py |
def extractEllionoratranslationsCom(item):
'''
Parser for 'ellionoratranslations.com'
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol) or "preview" in item['title'].lower():
return None
tagmap = [
('PRC', 'PRC', 'translated'),
('Loiterous', 'Loiterous', 'oel'),
]
for tagname, name, tl_type in tagmap:
if tagname in item['tags']:
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False
| [
"something@fake-url.com"
] | something@fake-url.com |
47eb73bba21e3a85c8dfe3a57c32769f1bf528c4 | 02d278bad0bba99efca54b00e8f66e8d6b0497e0 | /ultrafinance/pyTaLib/indicator.py | bc6a5828c22a3f886ce5fba52bb004dd187d5c37 | [] | no_license | afcarl/ultra-finance | 857528407e4f9b9fe3288dad57dd9aab20a6185e | fd16aebf42ddcf420990c41f475c7b13358ac9a7 | refs/heads/master | 2020-09-03T04:18:51.544354 | 2014-03-03T08:43:29 | 2014-03-03T08:43:29 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,155 | py | '''
Created on May 26, 2012
@author: ppa
'''
import numpy
from math import sqrt
from collections import deque
def mean(array):
''' average '''
return numpy.mean(array, axis = 0)
def stddev(array):
''' Standard Deviation '''
return numpy.std(array, axis = 0)
def sharpeRatio(array, n = 252):
''' calculate sharpe ratio '''
#precheck
if (array is None or len(array) < 2 or n < 1):
return -1
returns = []
pre = array[0]
for post in array[1:]:
returns.append((float(post) - float(pre)) / pre)
pre = post
return sqrt(n) * mean(returns) / stddev(returns)
''' refer to http://rosettacode.org/wiki/Averages/Simple_moving_average#Python '''
class Sma(object):
def __init__(self, period):
assert period == int(period) and period > 0, "Period must be an integer > 0"
self.__period = period
self.__stream = deque()
def __call__(self, n):
self.__stream.append(n)
if len(self.__stream) > self.__period:
self.__stream.popleft()
return sum(self.__stream) / float(len(self.__stream) ) | [
"panpandas@gmail.com@4d73ce26-f01c-a35a-b3e1-28a64a014117"
] | panpandas@gmail.com@4d73ce26-f01c-a35a-b3e1-28a64a014117 |
8275af2db00836647300750763b190f0e3727163 | 3c99871ed1923632cbd7d0068f6ed50acca8163e | /Homework/session_1_ex_1.py | 4402f8bd922a21f1327dde1bcbf22de7fad078d7 | [] | no_license | mindt102/C4T_A03 | f7828919ffedc9629329de71fd4bc593699a9b00 | 845ddf3a0646c1496a812e285e946e2b8cf8ddfd | refs/heads/master | 2022-08-14T20:45:34.901714 | 2020-05-22T14:26:33 | 2020-05-22T14:26:33 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 258 | py | loop = True
while loop:
n = input("Enter a number: ")
if not n.isdigit:
continue
n = int(n)
if n < 0:
continue
loop = False
factorial = 1
for i in range (1,n+1):
factorial *= i
print("{}! is {}" .format(n,factorial))
| [
"minhdt24082002@gmail.com"
] | minhdt24082002@gmail.com |
7c5f13bb16f8b2cc9880f0b652a33abc44f9b229 | 275aaf4eba0badd745a17a42a1a43c822f5e4416 | /app/controllers/user.py | 1ebbe549383968658f2e640f4902d32bccd16065 | [
"MIT"
] | permissive | leonardowolf/bookfree | e99c26ffcb457110efd228c927ea7bb33dc2646d | 3ddfe970f87207ff8b536e994fac9daec773ac1c | refs/heads/master | 2020-12-26T19:59:06.873321 | 2016-07-22T14:54:55 | 2016-07-22T14:54:55 | 63,426,474 | 0 | 0 | null | 2016-07-15T14:07:24 | 2016-07-15T14:07:24 | null | UTF-8 | Python | false | false | 159 | py | from app import lm
from flask import render_template
from app.models.tables import User
@lm.user_loader
def load_user(id):
return User.query.get(int(id))
| [
"contato@juliarizza.com"
] | contato@juliarizza.com |
d90f9239f6914f951f2e982845309fcde51fb5c6 | c0a49fa2d42df21eac90d8e94825edd8dadd655b | /shoogie/views.py | c535d1bcfec86d7c96398c508d9c10571c0f22a9 | [
"MIT"
] | permissive | hermanTenuki/django-shoogie | 55241d3d0450c8094262386035b88b44e0c7cf68 | 96f0b1178a7f08b1a980274d610b131ca1fc0f89 | refs/heads/master | 2021-10-07T20:52:23.365300 | 2018-12-05T10:48:16 | 2018-12-05T10:48:16 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 322 | py | from django.http import HttpResponse
from django.views.generic import detail
from shoogie import models
class TechnicalResponseView(detail.BaseDetailView):
queryset = models.ServerError.objects.all()
def render_to_response(self, context):
return HttpResponse(context['servererror'].technical_response)
| [
"git@aryehleib.com"
] | git@aryehleib.com |
815d3f3a34ea7f77be96600c31e2cb95753d4113 | ff018db3042b3b06076fab02f49b8c17b8887d3b | /car/autohome/pipelines.py | 7a7b0ec42ded1e4896d8f53ff324c52b40a59efa | [] | no_license | chenzhenpin/my_scrapy | bef05c9a843dad4697b431023e7f094fa8c570b9 | 77310d9c9082dcd54f175a13c7a3919384bbc309 | refs/heads/master | 2020-04-08T12:49:03.973291 | 2019-07-17T08:17:30 | 2019-07-17T08:17:30 | 159,362,784 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,601 | py | # -*- coding: utf-8 -*-
# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: https://doc.scrapy.org/en/latest/topics/item-pipeline.html
from datetime import datetime
import pymysql
import requests
from scrapy import Selector
from bs4 import BeautifulSoup
import re
import os
import traceback
import random
class AutohomePipeline(object):
def __init__(self):
try:
self.conn = pymysql.connect(host="127.0.0.1", user="root", passwd="333333", db="car", charset="utf8")
self.myTotal=0
self.startTime = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
except Exception as e:
print(r'error connect--------------------------------------------------')
traceback.print_exc(e)
def process_item(self, item, spider):
link='https://car.autohome.com.cn' + item['brandUrl']
r = requests.get(link)
soup = BeautifulSoup(r.text)
brandImgUrl=soup.find('div',class_='uibox-con contbox').find('div',class_='carbradn-pic').img["src"]
carTreeSrc=soup.find('div',id="cartree").script["src"]
carTreeText = requests.get('https://car.autohome.com.cn'+carTreeSrc).text[18:-3]
carTreeElem = BeautifulSoup(carTreeText)
ddElems=carTreeElem.find('li',id=item['brandId']).find_all('dd')
for ddElem in ddElems:
modelText=ddElem.get_text()
modelName=modelText[0:modelText.find("(")]
modelLink=ddElem.a['href']
yearModelLinkList=[]
for yearModelLink in self.crawYearModelList(item,modelLink,yearModelLinkList,True):
self.crawYearModelInfo(item,yearModelLink)
return item
def crawYearModelList(self,item,link,yearModelLinkList,flag):
link = 'https://car.autohome.com.cn' + link
r = requests.get(link)
soup=BeautifulSoup(r.text)
ulElems=soup.find_all('ul',class_='interval01-list')
for ulElem in ulElems:
liElems=ulElem.find_all('li')
for liElem in liElems:
yearModelLink=liElem.find('div', class_='interval01-list-cars-infor').find('p',id=re.compile("p*")).a['href']
yearModelLinkList.append(yearModelLink)
if flag==True:
liClickElems = soup.find('div',class_='tab-nav border-t-no').find('ul', attrs={"data-trigger":"click"}).find_all('li')
otherMoldeLinkList=[]
for liClickElem in liClickElems:
try:
if liClickElem['class']==['current'] or liClickElem['class']==['disabled']:
continue
except Exception as e:
pass
otherMoldeLinkList.append(liClickElem.a['href'])
for otherMoldeLink in otherMoldeLinkList:
self.crawYearModelList(item,otherMoldeLink,yearModelLinkList,False)
return yearModelLinkList
def crawYearModelInfo(self,item,yearModelList):
r = requests.get("https:"+yearModelList)
soup = BeautifulSoup(r.text)
return item
def download(self, link):
filename = str(self.myTotal)+re.findall(r'.*/(.+)', link)[0]
try:
pic = requests.get(link)
imgPath="d:\\carimg" + os.sep +filename
if pic.status_code == 200:
with open(imgPath, 'wb') as fp:
fp.write(pic.content)
fp.close()
return filename,imgPath
except Exception as e:
print(e)
print("保存失败>>"+filename) | [
"1120135449@qq.com"
] | 1120135449@qq.com |
08b472221630c4565953cc952679cc1626a408d6 | c130eeec1da4547b0bd0a11f0048dab4db424435 | /test-yolov3-shuffleunit-9-tiny2-caltech-416.py | ec863fc6a838cc564cc380fcebd7f0f4ecae5214 | [
"MIT"
] | permissive | zwangZJU/RSYNet | 7627dca6cb4281534f7a9dae99f4e44cbc86057f | d042a0b6aab0b0e38020dca6c1fe9a95a083240d | refs/heads/master | 2020-11-29T06:25:25.850797 | 2019-12-25T05:36:04 | 2019-12-25T05:36:04 | 230,046,048 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 11,000 | py | import argparse
import json
import time
from pathlib import Path
from models import *
from utils.datasets import *
from utils.utils import *
def test(
cfg,
data_cfg,
weights,
batch_size=16,
img_size=416,
iou_thres=0.5,
conf_thres=0.3,
nms_thres=0.45,
save_json=False
):
device = torch_utils.select_device()
# Configure run
data_cfg_dict = parse_data_cfg(data_cfg)
nC = int(data_cfg_dict['classes']) # number of classes (80 for COCO)
test_path = data_cfg_dict['valid']
# Initialize model
model = Darknet(cfg, img_size)
# # model = Darknet('E:/workspace_python/PyTorch-YOLOv3-Mobilev2/config/yolov3_mobilev2.cfg', img_size)
# # model.load_state_dict(torch.load('E:/workspace_python/PyTorch-YOLOv3-Mobilev2/checkpoints/dict_76.pth', map_location='cpu'))
# model = torch.load('E:/workspace_python/PyTorch-YOLOv3-Mobilev2/checkpoints/76.pth')
# Load weights
model.load_state_dict(torch.load(weights, map_location='cpu')['model'])
#model.load_state_dict(torch.load(weights))
# if weights.endswith('.pt'): # pytorch format
# model.load_state_dict(torch.load(weights, map_location='cpu')['model'])
#
# else: # darknet format
# load_darknet_weights(model, weights)
model.to(device).eval()
# Get dataloader
# dataloader = torch.utils.data.DataLoader(LoadImagesAndLabels(test_path), batch_size=batch_size)
dataloader = LoadImagesAndLabels(test_path, batch_size=batch_size, img_size=img_size)
print(len(dataloader))
mean_mAP, mean_R, mean_P, seen = 0.0, 0.0, 0.0, 0
print('%11s' * 5 % ('Image', 'Total', 'P', 'R', 'mAP'))
outputs, mAPs, mR, mP, TP, confidence, pred_class, target_class, jdict = \
[], [], [], [], [], [], [], [], []
AP_accum, AP_accum_count = np.zeros(nC), np.zeros(nC)
coco91class = coco80_to_coco91_class()
total_time_model = 0
total_iou = face_iou = person_iou = 0
total_object = face = person = 0
total_time_network = 0
for batch_i, (imgs, targets, paths, shapes) in enumerate(dataloader):
t = time.time()
output = model(imgs.to(device))
total_time_network += time.time() - t
output = non_max_suppression(output, conf_thres=conf_thres, nms_thres=nms_thres)
total_time_model += time.time() - t
with open('../results/caltech416.txt', 'a') as f:
for i in range(batch_size):
if i < len(paths):
path = paths[i]
if output[i] is not None:
for oup in output[i]:
oup = oup.cpu().numpy().tolist()
oup = ','.join(list(map(str, oup)))
f.write(path+','+oup+'\n')
# Compute average precision for each sample
for si, (labels, detections) in enumerate(zip(targets, output)):
seen += 1
if detections is None:
# If there are labels but no detections mark as zero AP
if labels.size(0) != 0:
mAPs.append(0), mR.append(0), mP.append(0)
continue
# Get detections sorted by decreasing confidence scores
detections = detections.cpu().numpy()
detections = detections[np.argsort(-detections[:, 4])]
if save_json:
# [{"image_id": 42, "category_id": 18, "bbox": [258.15, 41.29, 348.26, 243.78], "score": 0.236}, ...
box = torch.from_numpy(detections[:, :4]).clone() # xyxy
scale_coords(img_size, box, shapes[si]) # to original shape
box = xyxy2xywh(box) # xywh
box[:, :2] -= box[:, 2:] / 2 # xy center to top-left corner
# add to json dictionary
for di, d in enumerate(detections):
jdict.append({
'image_id': int(Path(paths[si]).stem.split('_')[-1]),
'category_id': coco91class[int(d[6])],
'bbox': [float3(x) for x in box[di]],
'score': float3(d[4] * d[5])
})
# If no labels add number of detections as incorrect
correct = []
if labels.size(0) == 0:
# correct.extend([0 for _ in range(len(detections))])
mAPs.append(0), mR.append(0), mP.append(0)
continue
else:
target_cls = labels[:, 0]
# Extract target boxes as (x1, y1, x2, y2)
target_boxes = xywh2xyxy(labels[:, 1:5]) * img_size
detected = []
for *pred_bbox, conf, obj_conf, obj_pred in detections:
pred_bbox = torch.FloatTensor(pred_bbox).view(1, -1)
# Compute iou with target boxes
iou = bbox_iou(pred_bbox, target_boxes)
# Extract index of largest overlap
best_i = np.argmax(iou)
# If overlap exceeds threshold and classification is correct mark as correct
if iou[best_i] > iou_thres and obj_pred == labels[best_i, 0] and best_i not in detected:
total_iou += iou[best_i]
total_object += 1
if obj_pred == 0:
person_iou += iou[best_i]
person += 1
else:
face_iou += iou[best_i]
face += 1
correct.append(1)
detected.append(best_i)
else:
correct.append(0)
# Compute Average Precision (AP) per class
AP, AP_class, R, P = ap_per_class(tp=correct,
conf=detections[:, 4],
pred_cls=detections[:, 6],
target_cls=target_cls)
# Accumulate AP per class
AP_accum_count += np.bincount(AP_class, minlength=nC)
AP_accum += np.bincount(AP_class, minlength=nC, weights=AP)
# Compute mean AP across all classes in this image, and append to image list
mAPs.append(AP.mean())
mR.append(R.mean())
mP.append(P.mean())
# Means of all images
mean_mAP = np.mean(mAPs)
mean_R = np.mean(mR)
mean_P = np.mean(mP)
# Print image mAP and running mean mAP
print(('%11s%11s' + '%11.3g' * 4 + 's') %
(seen, dataloader.nF, mean_P, mean_R, mean_mAP, time.time() - t))
# Print mAP per class
print('%11s' * 5 % ('Image', 'Total', 'P', 'R', 'mAP') + '\n\nmAP Per Class:')
for i, c in enumerate(load_classes(data_cfg_dict['names'])):
print('%15s: %-.4f' % (c, AP_accum[i] / (AP_accum_count[i] + 1E-16)))
print('平均每秒{}张图-network'.format(len(dataloader) * batch_size / total_time_network))
print('平均每秒{}张图-整个模型'.format(len(dataloader) * batch_size / total_time_model))
print('平均IoU{}'.format(total_iou / total_object))
print('平均person_IoU{}'.format(person_iou / person))
# print('平均face_IoU{}'.format(face_iou / face))
# Save JSON
# if save_json:
# imgIds = [int(Path(x).stem.split('_')[-1]) for x in dataloader.img_files]
# with open('results.json', 'w') as file:
# json.dump(jdict, file)
#
# from pycocotools.coco import COCO
# from pycocotools.cocoeval import COCOeval
#
# # https://github.com/cocodataset/cocoapi/blob/master/PythonAPI/pycocoEvalDemo.ipynb
# cocoGt = COCO('../coco/annotations/instances_val2014.json') # initialize COCO ground truth api
# cocoDt = cocoGt.loadRes('results.json') # initialize COCO detections api
#
# cocoEval = COCOeval(cocoGt, cocoDt, 'bbox')
# cocoEval.params.imgIds = imgIds # [:32] # only evaluate these images
# cocoEval.evaluate()
# cocoEval.accumulate()
# cocoEval.summarize()
# Return mAP
return mean_mAP, mean_R, mean_P
if __name__ == '__main__':
parser = argparse.ArgumentParser(prog='test.py')
parser.add_argument('--batch-size', type=int, default=16, help='size of each image batch')
parser.add_argument('--cfg', type=str, default='cfg/yolov3-shuffleunit-9-tiny2-caltech-416.cfg', help='cfg file path')
parser.add_argument('--data-cfg', type=str, default='cfg/caltech.data', help='coco.data file path')
parser.add_argument('--weights', type=str, default='weights/yolov3-shuffleunit-9-tiny2-caltech-416.pt', help='path to weights file')
parser.add_argument('--iou-thres', type=float, default=0.5, help='iou threshold required to qualify as detected')
parser.add_argument('--conf-thres', type=float, default=0.3, help='object confidence threshold')
parser.add_argument('--nms-thres', type=float, default=0.45, help='iou threshold for non-maximum suppression')
parser.add_argument('--save-json', action='store_true', help='save a cocoapi-compatible JSON results file')
parser.add_argument('--img-size', type=int, default=416, help='size of each image dimension')
opt = parser.parse_args()
print(opt, end='\n\n')
with torch.no_grad():
mAP = test(
opt.cfg,
opt.data_cfg,
opt.weights,
opt.batch_size,
opt.img_size,
opt.iou_thres,
opt.conf_thres,
opt.nms_thres,
opt.save_json
)
# Image Total P R mAP # YOLOv3 320
# 32 5000 0.66 0.597 0.591
# 64 5000 0.664 0.62 0.604
# 96 5000 0.653 0.627 0.614
# 128 5000 0.639 0.623 0.607
# 160 5000 0.642 0.63 0.616
# 192 5000 0.651 0.636 0.621
# Image Total P R mAP # YOLOv3 416
# 32 5000 0.635 0.581 0.57
# 64 5000 0.63 0.591 0.578
# 96 5000 0.661 0.632 0.622
# 128 5000 0.659 0.632 0.623
# 160 5000 0.665 0.64 0.633
# 192 5000 0.66 0.637 0.63
# Image Total P R mAP # YOLOv3 608
# 32 5000 0.653 0.606 0.591
# 64 5000 0.653 0.635 0.625
# 96 5000 0.655 0.642 0.633
# 128 5000 0.667 0.651 0.642
# 160 5000 0.663 0.645 0.637
# 192 5000 0.663 0.643 0.634
| [
"zju.zwang@gmail.com"
] | zju.zwang@gmail.com |
947a607c11baaa1bf4c629a3a5122e7985c37f8a | c3f171c3c05ca95c354e594598fb396a17a5c72d | /Project_Management/Project_Management/wsgi.py | 385a28c034eb7813623e08a2447faee57ce03e45 | [] | no_license | VikNim/Project-Management | 8919fe6a32431596e601aae5683840856cd2d0d0 | c10e09ac4625f233e13b10628f8c8bbc24b677bf | refs/heads/master | 2022-12-19T18:03:02.357715 | 2020-09-30T21:05:29 | 2020-09-30T21:05:29 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 413 | py | """
WSGI config for Project_Management project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/dev/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "Project_Management.settings")
application = get_wsgi_application()
| [
"noreply@github.com"
] | VikNim.noreply@github.com |
0554687d5d1f80d813f8dbe86b7380c3f0309fae | 6550c2da7abe8942ab26aceac57fd14eec29095f | /db.py | 1f68b5a93af43162f43d344ccea9d8d1c6ef62b3 | [] | no_license | katelk/oligopoly-bot | 09e6f6c2ff5b9e398bcf932896d8b67a30f5166a | 6febcea57a5df408c3742b9480e091b3eba6cf11 | refs/heads/main | 2023-02-02T23:08:42.136787 | 2020-12-23T09:34:11 | 2020-12-23T09:34:11 | 323,705,946 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 11,158 | py | import sqlite3
from market import set_params
class Tbgames:
def __init__(self):
self.connection = sqlite3.connect('rooms.db', check_same_thread=False)
cursor = self.connection.cursor()
cursor.execute(''' CREATE TABLE IF NOT EXISTS games
(creator INTEGER,
room_name VARCHAR(20),
password VARCHAR(20),
n_players INTEGER,
step INTEGER,
steps INTEGER,
a INTEGER,
b INTEGER,
mc Integer
)''')
cursor.execute(''' CREATE TABLE IF NOT EXISTS users_rooms
(user_id INTEGER,
chat_id INTEGER,
room_name VARCHAR(20)
)''')
cursor.close()
self.connection.commit()
def new_user(self, user_id, chat_id): # регистрация нового для бота пользователя
cursor = self.connection.cursor()
check = list(cursor.execute('''SELECT * FROM users_rooms WHERE user_id = ?''', (user_id,)))
if not check:
cursor.execute('''INSERT INTO users_rooms
(user_id, chat_id, room_name)
VALUES (?, ?, ?)''', (user_id, chat_id, ""))
cursor.close()
self.connection.commit()
def try_connect_to_room(self, user_id, room_name): # добавление нового игрока в комнату из \joingame
cursor = self.connection.cursor()
cursor.execute('''UPDATE users_rooms
SET room_name = ?
WHERE user_id = ?''', (room_name, user_id))
cursor.close()
self.connection.commit()
def new_room(self, user_id, room_name): # создание новой комнаты
cursor = self.connection.cursor()
params = set_params()
cursor.execute('''INSERT INTO games
(creator, room_name, password, n_players, step, steps, a, b, mc)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)''', (user_id, room_name, "", 0, 0, 0, params[0], params[1], params[2]))
cursor.execute('''CREATE TABLE IF NOT EXISTS ''' + room_name +
''' (user_id INTEGER,
name VARCHAR(20),
state VARCHAR(30),
step INTEGER,
q INTEGER,
money INTEGER
)''')
self.try_connect_to_room(user_id, room_name)
cursor.close()
self.connection.commit()
def set_room_steps(self, steps, user_id):
cursor = self.connection.cursor()
cursor.execute('''UPDATE games
SET steps = ?
WHERE creator = ?''', (steps, user_id))
cursor.close()
self.connection.commit()
def check_password(self, password, room_name):
cursor = self.connection.cursor()
if password == list((cursor.execute('''SELECT password FROM games WHERE room_name = ?''', (room_name,))))[0][0]:
return True
return False
def check_existence(self, room_name): # проверка существует ли комнаты с таким названием
cursor = self.connection.cursor()
check = list(cursor.execute('''SELECT * FROM games WHERE room_name = ?''', (room_name,)))
if not check:
return False
return True
def set_password(self, user_id, password): # установление пароля для комнаты
cursor = self.connection.cursor()
cursor.execute('''UPDATE games
SET password = ?
WHERE creator = ?''', (password, user_id))
cursor.close()
self.connection.commit()
def get_room(self, user_id): # функция возвращает название комнаты, в которой сейчас играет пользователь с user_id
cursor = self.connection.cursor()
return list((cursor.execute('''SELECT room_name FROM users_rooms WHERE user_id = ?''', (user_id,))))[0][0]
def get_room_steps(self, room_name):
cursor = self.connection.cursor()
return list(cursor.execute('''SELECT steps FROM games WHERE room_name = ?''', (room_name,)))[0][0]
def get_params(self, room_name):
cursor = self.connection.cursor()
params = list(cursor.execute('''SELECT a, b, mc FROM games WHERE room_name = ?''', (room_name,)))
print(params)
a = params[0][0]
b = params[0][1]
mc = params[0][2]
return (a, b, mc)
def name_existence(self, name, room_name):
cursor = self.connection.cursor()
check = list(cursor.execute('''SELECT * FROM ''' + room_name + ''' WHERE name = ?''', (name,)))
if not check:
return False
return True
def how_much_players(self, room_name):
cursor = self.connection.cursor()
return list(cursor.execute('''SELECT n_players FROM games WHERE room_name = ?''', (room_name,)))[0][0]
def set_name(self, room_name, user_id, name):
cursor = self.connection.cursor()
cursor.execute('''INSERT INTO ''' + room_name +
''' (user_id, name, state, step, money)
VALUES (?, ?, ?, ?, ?)''', (user_id, name, "pre-playing", 0, 1000))
n = self.how_much_players(room_name)
cursor.execute('''UPDATE games
SET n_players = ?
WHERE room_name = ?''', (n+1, room_name))
cursor.close()
self.connection.commit()
def get_players(self, room_name):
cursor = self.connection.cursor()
return list(cursor.execute('''SELECT user_id FROM ''' + room_name))
def get_chat_id(self, user_id):
cursor = self.connection.cursor()
return list(cursor.execute('''SELECT chat_id FROM users_rooms WHERE user_id = ?''', (user_id,)))[0][0]
def delete_user(self, user_id, room_name):
cursor = self.connection.cursor()
n = self.how_much_players(room_name)
cursor.execute('''UPDATE games
SET n_players = ?
WHERE room_name = ?''', (n - 1, room_name))
cursor.execute('''UPDATE users_rooms
SET room_name = ?
WHERE user_id = ?''', ("", user_id))
cursor.close()
self.connection.commit()
def check_all_states(self, state, room_name):
cursor = self.connection.cursor()
step = list(cursor.execute('''SELECT step FROM games WHERE room_name = ?''', (room_name,)))[0][0]
states_steps = list(cursor.execute('''SELECT state, step FROM ''' + room_name))
for status_step in states_steps:
if status_step[0] != state or status_step[1] != step:
return False
return True
def set_quantity(self, user_id, q):
cursor = self.connection.cursor()
cursor.execute('''UPDATE ''' + self.get_room(user_id) +
''' SET q = ?
WHERE user_id = ?''', (q, user_id))
cursor.close()
self.connection.commit()
def set_state(self, user_id, state):
cursor = self.connection.cursor()
cursor.execute('''UPDATE ''' + self.get_room(user_id) +
''' SET state = ?
WHERE user_id = ?''', (state, user_id))
cursor.close()
self.connection.commit()
def get_step(self, room_name):
cursor = self.connection.cursor()
return list(cursor.execute('''SELECT step FROM games WHERE room_name = ?''', (room_name,)))[0][0]
def next_step_user(self, user_id):
cursor = self.connection.cursor()
room_name = self.get_room(user_id)
step = list(cursor.execute('''SELECT step FROM ''' + room_name + ''' WHERE user_id = ?''', (user_id,)))[0][0]
cursor.execute('''UPDATE ''' + room_name +
''' SET step = ?
WHERE user_id = ?''', (step + 1, user_id))
cursor.close()
self.connection.commit()
def next_step(self, room_name):
cursor = self.connection.cursor()
step = self.get_step(room_name)
cursor.execute('''UPDATE games
SET step = ?
WHERE room_name = ?''', (step + 1, room_name))
cursor.close()
self.connection.commit()
def get_values(self, room_name):
values = {} # values = {"user_id1": Q1, "user_id2": Q2, "user_id3": Q3)}
cursor = self.connection.cursor()
for user_id in self.get_players(room_name):
values[user_id[0]] = list(cursor.execute('''SELECT q FROM ''' + room_name +
''' WHERE user_id = ?''', (user_id[0],)))[0][0]
return values
def update_income(self, room_name, result: dict):
cursor = self.connection.cursor()
for user_id in result.keys():
money = list(cursor.execute('''SELECT money FROM ''' + room_name +
''' WHERE user_id = ?''', (user_id,)))[0][0]
cursor.execute('''UPDATE ''' + room_name +
''' SET money = ?
WHERE user_id = ?''', (round(money + result[user_id], 1), user_id))
cursor.close()
self.connection.commit()
def get_rate(self, room_name):
cursor = self.connection.cursor()
players = self.get_players(room_name)
rating = []
for user_id in players:
rating.append(list(cursor.execute('''SELECT name FROM ''' + room_name +
''' WHERE user_id = ?''', (user_id[0],)))[0][0])
rating.sort(key=lambda x: list(cursor.execute('''SELECT money FROM ''' + room_name +
''' WHERE name = ?''', (x,)))[0][0], reverse=True)
return rating
def get_money(self, room_name, name):
cursor = self.connection.cursor()
money = list(cursor.execute('''SELECT money FROM ''' + room_name +
''' WHERE name = ?''', (name,)))[0][0]
return money
def delete_room(self, room_name):
cursor = self.connection.cursor()
cursor.execute('''DROP TABLE ''' + room_name)
cursor.execute('''DELETE FROM games WHERE room_name = ?''', (room_name,))
cursor.close()
self.connection.commit()
| [
"noreply@github.com"
] | katelk.noreply@github.com |
7665eb4664516e82e89dc0b78f3cc8b550ddd050 | c0bb77593f805adffa90bc585f755888305ce2a4 | /WriteILIinput.py | 2f4696efe71ccfa7bf4811cb04897a8843e26771 | [
"Apache-2.0"
] | permissive | chloejjjk/SpaceM | 398b91f2d32f1764e9052a48e09cfda1065f4a11 | cf3da7e4a81b6b928c87d2b40471b480ea06d7f9 | refs/heads/master | 2023-07-29T10:15:47.019758 | 2020-09-30T20:45:56 | 2020-09-30T20:45:56 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,360 | py | # from pyimzml.ImzMLParser import ImzMLParser, getionimage
# from sm_analytics_python.sm_annotation_utils import sm_annotation_utils as smau
from metaspace import sm_annotation_utils as smau
import numpy as np
import csv
import glob, os, tqdm
import pandas as pd
def preCSVdatagen(xy_p, radius, nbin, PlainFirst):
"""Format the data before generating the csv input for ili'.
Args:
xy_p (str): path to the X and Y coordiantes of ablation marks .npy file.
radius (int): displayed radius of the marks in ili'.
nbin (int): bin factor used to bin the image for ili'.
PlainFirst (bool): intensity values of each datapoints are equal to 1. Used to visualize the ablation mark
coordinates on the postMALDI brighfield in ili'.
Returns:
data (list): formatted data (2D).
"""
X, Y = np.load(xy_p)
Xs = X /( nbin) # todo check relevance of Y <-> X
Ys = Y /( nbin)
Ys = Ys - np.min(Ys)
Xs = Xs - np.min(Xs)
Rs = np.ones(np.shape(Xs)) * radius
data = []
data.append(list(np.append('Num', list(range(np.shape(Xs.ravel())[0])))))
data.append(list(np.append('X', Ys.ravel())))
data.append(list(np.append('Y', Xs.ravel())))
data.append(list(np.append('Z', np.zeros(np.shape(Xs.ravel())))))
data.append(list(np.append('R', Rs.ravel())))
if PlainFirst:
data.append(list(np.append('Flat', np.ones(np.shape(Xs.ravel())))))
return data
def writeCSV(path, data):
"""Writes the formatted data in a csv file.
Args:
path (str): str of the csv file to write.
data (list): data to write (2D).
"""
data_csv = list(zip(*data))
with open(path, 'w') as testfile:
cw = csv.writer(testfile)
for i in range(np.shape(data_csv)[0]):
cw.writerow(data_csv[i])
def annotationSM2CSV(MFA, MFI, fdr, nbin, radius, tf_obj, db='HMDB-v4'):
"""Fetches annotation images from METASPACE (http://metaspace2020.eu/#/about) and writes intensity values of
each ablation marks in a csv input for ili' (https://ili.embl.de/). Used to visualize the ion signal on the
preMALDI microsocpy after registration and validate the geometric transform to apply to the ion image.
Args:
MFA (str): path to Main Folder Analysis.
MFI (str): path to Main Folder Input.
fdr (float): fdr threshold value can only be 0.05, 0.1, 0.2 and 0.5.
nbin (int): bin factor used to bin the image for ili'.
radius (int): displayed radius of the marks in ili'.
tf_obj (function): Image transformation to apply on ion image for registration.
"""
def CSVdatagen(data, results, d):
"""Writes intensity values of each ablation marks in a csv input for ili'.
TODO finish documentation
Args:
data (list): data to populate with ion intensities (2D).
results (): .
d (): .
Returns:
data (list): data to write in csv input for ili.
"""
ind = 0
for i, row in enumerate(results.reset_index().itertuples()):
images = d.isotope_images(row.formula, row.adduct)
print(row.formula)
data.append(list(np.append(row[1], tf_obj(images[0]).ravel())))
ind += 1
return data
# config = {
# 'graphql_url': 'http://staging.metaspace2020.eu/graphql',
# 'moldb_url': 'http://staging.metaspace2020.eu/mol_db/v1',
# 'jwt': None}
sm = smau.SMInstance()
sm.login(email='luca.rappez@embl.de', password='Zeppar12')
os.chdir(MFI + 'MALDI/')
ds_name = glob.glob('*.imzML')[0].replace('.imzML', '')
d = sm.dataset(ds_name)
results = sm.msm_scores([d], d.annotations(database=db, fdr=fdr), db_name=db).T
predata = preCSVdatagen(MFA + 'Fiducials/transformedMarks.npy', radius, nbin, PlainFirst=False)
data_csv = CSVdatagen(predata, results, d)
writeCSV(path = MFA + '/ili/sm_annotation_detections.csv', data = data_csv)
def annotationSM2CSV_offline(MF,
tf_obj,
hdf5_path=r'F:\Google Drive\A-Team\projects\1c\hepatocytes_40samples, DKFZ\datasets/',
on_sample_list_path=r"F:\Google Drive\A-Team\projects\1c\hepatocytes_40samples, DKFZ\KATJAnMANUAL_ON_sample_annotations.csv"):
MF = r'F:\Experiments\20171106_Hepa_Nov_ANALYSIS_PAPER\F3/'
os.chdir(MF + 'Input/MALDI/')
imzml_name = glob.glob('*.imzML')[0]
ds_name = imzml_name.replace('.imzML', '')
if os.path.isdir(hdf5_path):
df_im0 = pd.concat([pd.read_hdf(p) for p in glob.glob(hdf5_path + '*.hdf5')])
else:
df_im0 = pd.read_hdf(hdf5_path)
df_im = df_im0[df_im0['ds_name'] == ds_name].reset_index()
on_mol_df = pd.read_csv(on_sample_list_path)
Xs, Ys = np.load(MF + 'Analysis/Fiducials/transformedMarks.npy')
Ys = Ys - np.min(Ys)
Xs = Xs - np.min(Xs)
ili_df = pd.DataFrame()
ili_df['Num'] = list(range(len(Xs)))
ili_df['X'] = Ys
ili_df['Y'] = Xs
ili_df['Z'] = np.ones(len(Xs)) * 0
ili_df['R'] = np.ones(len(Xs)) * 20
for i in tqdm.tqdm(df_im.index):
mol_name = '{}, {}'.format(df_im.loc[i, 'mol_formula'], df_im.loc[i, 'adduct'])
ili_df[mol_name] = tf_obj(df_im.loc[i, 'image']).ravel()
ili_df.to_csv(MF + 'Analysis/ili/offline_on_sample.csv', index=False)
| [
"luca.rappez@embl.de"
] | luca.rappez@embl.de |
b581b228bef7e76b72d1825eaeb5b75ab0df67b6 | 6f87861d1ed440b09521bb2df7f8bc1d35e7087e | /manual/(4) Migration/Template/master/import_for_master.py | 5ad780b5bf9a5305acdc60efdad65aa3358a1534 | [] | no_license | AllianceWebcoop/webcoop_other | 0654122fffd29ad6562bbc2b1b7218adc7ea38f8 | 3d4ac702d58489906e11f1dd52c1ab0497b0f291 | refs/heads/master | 2023-01-09T07:11:11.553430 | 2020-11-12T08:03:06 | 2020-11-12T08:03:06 | 274,335,569 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,988 | py | import xlrd
import xmlrpclib
import csv
import traceback
import logging
import ConfigParser
CHUNK_SIZE = 50
THREADS = 4
class Struct(object):
def __init__(self, **entries):
self.__dict__.update(entries)
def read_csv(filename):
csv_file = open(filename,'r')
#record count check , throw error if over 1000
if len(open(filename).readlines()) > 1001:
raise ValueError('Too much record for import, please re-make the file by within 1000 records')
f = csv.reader(csv_file, delimiter=",", doublequote=True, lineterminator="\r\n", quotechar='"', skipinitialspace=True)
i = 0
data = []
for row in f:
if i == 0:
header = row
i = 1
else:
data.append(row)
return header, data
def load_file(conn, table, filename, logger):
#read csv file
try :
header, data = read_csv(filename)
except Exception ,e:
logger.exception('CSV read Failed')
raise
try :
models = xmlrpclib.ServerProxy('{}/xmlrpc/2/object'.format(conn.url))
res = models.execute_kw(conn.db, conn.uid, conn.password,
table, 'load', [header, data]
);
except Exception ,e:
logger.exception('Import Failed')
raise
print "res:", res
if res['ids']>0:
print filename
logmessage = "Import successfully. table = %s ,records id = %s " % (table, res['ids'])
logger.info(logmessage)
else:
logger.exception('Data Validation Error:%s' % res)
raise ValueError
#print
def delete_all_record(conn, table, logger):
try :
models = xmlrpclib.ServerProxy('{}/xmlrpc/2/object'.format(conn.url))
search_ids = models.execute_kw(conn.db, conn.uid, conn.password, table, 'search', [[]])
models.execute_kw(conn.db, conn.uid, conn.password, table, 'unlink', [search_ids])
except Exception ,e:
logger.exception('Cannot delete current record')
raise
logmessage = "delete successfully.table = %s , records id = %s" % (table, [search_ids])
logger.info(logmessage)
if __name__=="__main__":
#log file handele
logger = logging.getLogger('LoggingTest')
logger.setLevel(10)
fh = logging.FileHandler('error.log')
logger.addHandler(fh)
sh = logging.StreamHandler()
logger.addHandler(sh)
formatter = logging.Formatter('%(asctime)s:%(lineno)d:%(levelname)s:%(message)s')
fh.setFormatter(formatter)
sh.setFormatter(formatter)
#set config file
config = ConfigParser.ConfigParser()
config.read('config.ini')
#*************connection info(get from config.ini)**************
url = config.get('connection_info','url')
admin = config.get('connection_info','admin')
admin_password = config.get('connection_info','admin_password')
db = config.get('connection_info','db')
#*************connection info**************
try :
common = xmlrpclib.ServerProxy('{}/xmlrpc/2/common'.format(url))
except Exception ,e:
logger.exception('Connection Failed')
raise
print "VERSION", common.version()
try :
uid = common.authenticate(db, admin, admin_password, {})
except Exception ,e:
logger.exception('Authenticate Failed')
raise
print "UID:", uid
p = {
'uid': uid,
'password': admin_password,
'db': db,
'url': url,
}
conn = Struct(**p)
#***************update database table******************
#import journal info
logger.info("start import")
try :
load_file(conn, "account.account", "coa.csv", logger)
load_file(conn, "wc.account.type", "account.type.csv", logger)
load_file(conn, "wc.loan.type", "loan.type.csv", logger)
load_file(conn, "wc.loan.type.deduction", "loan.type.deduction.csv", logger)
load_file(conn, "wc.center", "center.csv", logger)
except Exception:
raise
#***************update database table******************
| [
"noreply@github.com"
] | AllianceWebcoop.noreply@github.com |
dcb2eab57b35e0b1daf8da8703b54e63f51b7cae | 49e987133ad2146a3d265f4acdf61a31d1cb57fe | /logic.py | 946ef853e4a3a21de2c63ac797700d428a6c18ee | [] | no_license | Ashleybishop37/Python | 6f636aab8dcb28ae8db1129ba45939199002ecea | b5b3882c2b6803608e63d941c806c8afaf9aa4d5 | refs/heads/master | 2022-12-11T14:55:26.059802 | 2020-09-07T00:11:59 | 2020-09-07T00:11:59 | 292,190,255 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,433 | py | from Functions import temperature_calc,area_calc,volume_calc,weight_calc,log,read_file
def menu_selection():
print("\033[0;34m Welcome! What kind of conversion are you looking to do?")
print("\033[1;37m Here are your options: \n 1. Temperature \n 2. Area \n 3. Volume \n 4. Weight")
conversionType = input("\033[0;31m Enter the appropriate number(1-4): \033[0m")
if conversionType == "1":
print("\033[0;34m \n You chose a TEMPERATURE conversion")
print("\033[1;37m Please specify the direction of the conversion: \n 1. Celsius to Fahrenheit \n 2. Fahrenheit to Celsius")
tempType = input("\033[0;31m Enter the appropriate number(1-2): \033[1;37m")
if tempType == "1" or tempType == "2":
temperature_calc(tempType)
end_prompt()
if conversionType == "2":
print("\033[0;34m\n You chose a AREA conversion")
print("\033[1;37mPlease specify the direction of the conversion:")
print("1. Square Foot to Square Meter \n2. Square Meter to Square Foot")
areaType = input(" \033[0;31mEnter the appropriate number(1-2): \033[1;37m")
if areaType == "1" or areaType == "2":
area_calc(areaType)
end_prompt()
if conversionType == "3":
print("\033[0;34m\n You chose a VOLUME conversion")
print("\033[1;37mPlease specify the direction of the conversion: \n 1. Liter to Gallon \n 2. Gallon to Liter")
volumeType = input(" \033[0;31mEnter the appropriate number(1-2): \033[1;37m")
if volumeType == "1" or volumeType == "2":
volume_calc(volumeType)
end_prompt()
if conversionType == "4":
print("\033[0;34m\n You chose a WEIGHT conversion")
print("\033[1;37mPlease specify the direction of the conversion: \n 1. Pound to Kilogram \n 2. Kilogram to Pound")
weightType = input(" \033[0;31mEnter the appropriate number(1-2): \033[1;37m")
if weightType == "1" or weightType == "2":
weight_calc(weightType)
end_prompt()
# Prompt for print history or exit
def end_prompt():
print("\n 1. Print last 10 runs \n 2. Start Over \n 3. Exit")
response = input("\033[0;31m Please enter appropriate number(1-3): \033[0m")
if response == "1":
read_file()
if response == "2":
menu_selection()
if response == "3":
exit()
menu_selection()
| [
"noreply@github.com"
] | Ashleybishop37.noreply@github.com |
37e7f87129a8f909f5d37aa564038f78fc589e64 | cd69495691e5e84634e64618287407520508ff25 | /model.py | 76bc86f98466352d990ad7a00faaafd34dcde32d | [] | no_license | MahlerTom/SiameseNeuralNetworks | 6ba9fbef18fec06383aa725974ba911f7c3f6d5d | da1529e5961c0aa1ab60c1fd792d73b6555c3cb0 | refs/heads/master | 2020-05-24T09:28:33.355329 | 2019-06-09T19:08:43 | 2019-06-09T19:08:43 | 187,203,803 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,351 | py | from .distance import absolute_distance
import time
import numpy as np
from tensorflow.keras.layers import Conv2D, Input, MaxPooling2D, Lambda, Flatten, Dense
# from tensorflow.keras.layers import BatchNormalization
from tensorflow.keras.models import Sequential, Model, load_model
from tensorflow.keras.regularizers import l2
from tensorflow.keras import backend as K
from tensorflow.keras.optimizers import Adam
from tensorflow.keras.metrics import Precision, Recall
def initialize_bias(shape, name=None, dtype=None):
return np.random.normal(loc=0.5, scale=1e-2, size=shape)
def initialize_weights(shape, name=None, dtype=None):
return np.random.normal(loc=0.0, scale=1e-2, size=shape)
def initialize_weights_dense(shape, name=None, dtype=None):
return np.random.normal(loc=0.0, scale=0.2, size=shape)
def siamese_model(
input_shape=(250, 250, 1),
filters=64,
kernel_initializer=initialize_weights,
kernel_initializer_d=initialize_weights_dense,
kernel_regularizer=l2(2e-4),
kernel_regularizer_d=l2(1e-3),
bias_initializer=initialize_bias,
kernel_size_list=[(10, 10), (7, 7), (4, 4), (4, 4)],
units=64*64, # filters*64
optimizer=Adam(lr=1e-3),
loss='binary_crossentropy',
metrics=['accuracy', Precision(name='Precision'), Recall(name='Recall')],
pretrained_weights=None,
model_path=None,
distance=absolute_distance,
distance_output_shape=None,
prediction_activation='sigmoid',
):
if model_path is not None:
return load_model(model_path)
# Define the tensors for the two input images
left_input = Input(input_shape)
right_input = Input(input_shape)
model = Sequential()
# Convolutional Layer 1
model.add(
Conv2D(
filters=filters,
kernel_size=kernel_size_list[0],
activation='relu',
input_shape=input_shape,
kernel_initializer=kernel_initializer,
kernel_regularizer=kernel_regularizer
)
)
model.add(MaxPooling2D())
# model.add(
# BatchNormalization(
# axis=1
# # axis=-1,
# # momentum=0.99,
# # epsilon=0.001,
# # center=True,
# # scale=True,
# # beta_initializer='zeros',
# # gamma_initializer='ones',
# # moving_mean_initializer='zeros',
# # moving_variance_initializer='ones',
# # beta_regularizer=None,
# # gamma_regularizer=None,
# # beta_constraint=None,
# # gamma_constraint=None,
# # renorm=False,
# # renorm_clipping=None,
# # renorm_momentum=0.99,
# # fused=None,
# # trainable=True,
# # virtual_batch_size=None,
# # adjustment=None,
# # name=None,
# # **kwargs
# )
# )
# Convolutional Layer 2
model.add(
Conv2D(
filters=filters*2,
kernel_size=kernel_size_list[1],
activation='relu',
kernel_initializer=kernel_initializer,
bias_initializer=bias_initializer,
kernel_regularizer=kernel_regularizer
)
)
model.add(MaxPooling2D())
# model.add(BatchNormalization(axis=1))
# Convolutional Layer 3
model.add(
Conv2D(
filters=filters*4,
kernel_size=kernel_size_list[2],
activation='relu',
kernel_initializer=kernel_initializer,
bias_initializer=bias_initializer,
kernel_regularizer=kernel_regularizer
)
)
model.add(MaxPooling2D())
# model.add(BatchNormalization(axis=1))
# Convolutional Layer 4
model.add(
Conv2D(
filters=filters*8,
kernel_size=kernel_size_list[3],
activation='relu',
kernel_initializer=kernel_initializer,
bias_initializer=bias_initializer,
kernel_regularizer=kernel_regularizer
)
)
# model.add(BatchNormalization(axis=1))
# Flatten Layer
model.add(Flatten())
model.add(
Dense(
units=units,
activation='sigmoid',
kernel_regularizer=kernel_regularizer_d,
kernel_initializer=kernel_initializer_d,
bias_initializer=bias_initializer
)
)
# Generate the encodings (feature vectors) for the two images
encoded_l = model(left_input)
encoded_r = model(right_input)
# Add a customized layer to compute the absolute difference between the encodings
L1_distance = Lambda(distance, distance_output_shape)([encoded_l, encoded_r])
# Add a dense layer with a sigmoid unit to generate the similarity score
# prediction = Dense(1, activation=prediction_activation, bias_initializer=bias_initializer)(L1_distance)
prediction = L1_distance
# Connect the inputs with the outputs
siamese_net = Model(inputs=[left_input, right_input], outputs=prediction)
siamese_net.compile(optimizer=optimizer, loss=loss, metrics=metrics)
#siamese_net.summary()
if(pretrained_weights):
siamese_net.load_weights(pretrained_weights)
# return the model
return siamese_net | [
"tom@mahler.tech"
] | tom@mahler.tech |
acfe67012e03a874f34951b890be688b24c038bb | b672a86db3e802c596ae94803c89f7a1802c0ad7 | /tools/check_totals.py | a98eb56c45d500f50b44e0bba073788012854f70 | [
"Unlicense"
] | permissive | couby-debog-dev-fini/Kaios | 44ce3e02e0e0d98de13b0d116aa1fa291d5c1a8f | cf9a4a6f3266eda9e4fe7c95bc5d10a648a0ca04 | refs/heads/master | 2023-01-28T20:54:46.688952 | 2020-12-10T13:34:46 | 2020-12-10T13:34:46 | 314,804,454 | 0 | 0 | Unlicense | 2020-12-06T05:08:23 | 2020-11-21T12:14:48 | HTML | UTF-8 | Python | false | false | 1,351 | py | #!/usr/bin/env python
# Check that the area case numbers add up to the totals.
import pandas as pd
def check_latest(totals_csv_file, cases_csv_file, country):
totals = pd.read_csv(totals_csv_file)
last_row = totals.to_dict("records")[-1]
date = last_row["Date"]
total_cases = last_row["ConfirmedCases"]
cases_uk = pd.read_csv(cases_csv_file)
cases_uk_on_date = cases_uk[
(cases_uk["Date"] == date) & (cases_uk["Country"] == country)
]
cases_uk_on_date = cases_uk_on_date.astype({"TotalCases": "int64"})
total_cases_check = cases_uk_on_date["TotalCases"].sum()
if total_cases == total_cases_check:
print(
"Total cases for {} on {} checks out at {}".format(
country, date, total_cases
)
)
else:
print(
"Mismatch. Total cases for {} on {} is {} from {} and {} from {}".format(
country,
date,
total_cases,
totals_csv_file,
total_cases_check,
cases_csv_file,
)
)
if __name__ == "__main__":
check_latest(
"data/covid-19-totals-scotland.csv", "data/covid-19-cases-uk.csv", "Scotland"
)
check_latest(
"data/covid-19-totals-wales.csv", "data/covid-19-cases-uk.csv", "Wales"
)
| [
"67220682+couby-debog-dev-fini@users.noreply.github.com"
] | 67220682+couby-debog-dev-fini@users.noreply.github.com |
41e8e74f21cffd487f6b0b9d67b13155c65d9abe | 9a928434baded09706e33841a8b6696fa7ac145f | /Dev/trydjango1-11/src/restuarants/views.py | 48f53200c54c1ae8421b6a4498a2d7a916734b96 | [] | no_license | rtamir/Django | 912e36dc8b682c9dc09faadd971e703260a3c98b | 39efc83fb3bd685e89b8b89b980b185b2ef0e5c6 | refs/heads/master | 2022-12-21T04:48:00.259828 | 2019-03-06T11:25:42 | 2019-03-06T11:25:42 | 161,774,693 | 0 | 1 | null | 2022-12-12T10:30:03 | 2018-12-14T11:19:33 | Python | UTF-8 | Python | false | false | 2,411 | py | import random
from django.http import HttpResponse
from django.shortcuts import render
from django.views import View
from django.views.generic import TemplateView
from .models import RestuarantLocation
# Create your views here.
#fun based view
def restuarants_listview(request):
template_name = 'restuarants/restuarants_list.html'
queryset = RestuarantLocation.objects.all()
context = {
"object_list":queryset
}
return render(request,template_name,context)
# def home_old(request):
# html_var = 'f strings'
# html_ = f"""<!DOCTYPE html>
# <html lang=en>
# <head>
# </head>
# <body>
# <h1>Hello</h1>
# <p>This is {html_var} page</p>
# </body>
# </html>
# """
# return HttpResponse(html_)
# #return render(request,"home.html",{})#response
# def home(request):
# num = None
# some_list = [random.randint(0,1000000),random.randint(0,1000000), random.randint(0,1000000) ]
# cond_bool_itm = False
# if cond_bool_itm:
# num = random.randint(0,1000000)
# context = {
# "some_list": some_list,
# "num" : num}
# return render(request,"home.html",context)#response
# def about(request):
# context = {}
# return render(request,"about.html",context)#response
# def contact(request):
# context = {}
# return render(request,"contact.html",context)#response
# class ContactView(View):
# """docstring for ContactView"""
# def get(self,request,*args,**kwargs):
# #print(kwargs)
# context={}
# return render(request,"contact.html",context)
# def post(self,request,*args,**kwargs):
# #print(kwargs)
# context={}
# return render(request,"contact.html",context)
# def put(self,request,*args,**kwargs):
# #print(kwargs)
# context={}
# return render(request,"contact.html",context)
# class ContactTemplateView(TemplateView):
# template_name = 'contact.html'
# class HomeView(TemplateView):
# template_name = 'home.html'
# def get_context_data(self,*args,**kwargs):
# context = super(HomeView,self).get_context_data(*args,**kwargs)
# num = None
# some_list = [random.randint(0,1000000),random.randint(0,1000000), random.randint(0,1000000) ]
# cond_bool_itm = True
# if cond_bool_itm:
# num = random.randint(0,1000000)
# context = {
# "some_list": some_list,
# "num" : num}
# return(context)
# class AboutView(TemplateView):
# template_name = 'about.html'
# class ContactView(TemplateView):
# template_name = 'contact.html'
# | [
"rk4ravikiran@gmail.com"
] | rk4ravikiran@gmail.com |
b6946f3709bb792e94e5f84e8b521c1e4e7eb13e | cbce70a7882bc872fd63c1a8438a52bc299494bf | /src/cmp.py | b2fdcf603eb05959437516542c324e797dc3f8e6 | [] | no_license | lxd99/2020_BDCI | 00b83ae6c3eb189c3010e2a00b86976595d6cc87 | 505d5d7156b9083e9584fa60646cd0bd2cd12a2f | refs/heads/master | 2023-07-01T08:31:04.933865 | 2021-08-11T03:30:13 | 2021-08-11T03:30:13 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,511 | py | import numpy as np
import pandas as pd
from tqdm import tqdm
import math
import numpy as np
import matplotlib.pyplot as plt
tx = pd.DataFrame([[1, 2], [3, 4]])
def duck():
data = pd.read_csv('../data/train.csv')
data['mean'] = data[[f'{j}_{i}' for i in range(1, 6) for j in ['CPU_USAGE', 'cpu']]].std()
fdata = pd.DataFrame(pd.np.empty((0, len(data.columns))), columns=data.columns)
for id in tqdm(data['QUEUE_ID'].unique().tolist()):
mdf = data[data['QUEUE_ID'] == id]
mdf = mdf.sort_values(by=['mean']).reset_index(drop=True)
mdf = mdf.iloc[:int(0.9 * mdf.shape[0])]
fdata = fdata.append(mdf)
# fdata.drop('mean')
fdata = fdata.drop('mean', axis=1)
# print(fdata.info())
fdata.to_csv('../data/drop_tran.csv')
def DTWDistance(s1, s2):
DTW = {}
for i in range(len(s1)):
DTW[(i, -1)] = float('inf')
for i in range(len(s2)):
DTW[(-1, i)] = float('inf')
DTW[(-1, -1)] = 0
for i in range(len(s1)):
for j in range(len(s2)):
dist = (s1[i] - s2[j]) ** 2
DTW[(i, j)] = dist + min(DTW[(i - 1, j)], DTW[(i, j - 1)], DTW[(i - 1, j - 1)])
return math.sqrt(DTW[len(s1) - 1, len(s2) - 1])
fx,fy = 'baseline.csv','baseline_lda.csv'
lx, ly, lz = pd.read_csv(fx)[[f'LAUNCHING_JOB_NUMS_{i}' for i in range(1, 6)]].values, \
pd.read_csv(fy)[[f'LAUNCHING_JOB_NUMS_{i}' for i in range(1, 6)]].values, \
pd.read_csv('../data/comb_test.csv')[[f'LAUNCHING_JOB_NUMS_{i}' for i in range(1, 6)]].values
x, y, z = pd.read_csv(fx)[[f'CPU_USAGE_{i}' for i in range(1, 6)]].values, \
pd.read_csv(fy)[[f'CPU_USAGE_{i}' for i in range(1, 6)]].values, \
pd.read_csv('../data/comb_test.csv')[[f'CPU_USAGE_{i}' for i in range(1, 6)]].values
x, y = np.append(z, x, axis=1), np.append(z, y, axis=1)
lx, ly = np.append(lz, lx, axis=1), np.append(lz, ly, axis=1)
# x = x[[f'CPU_USAGE_{i}'for i in range(1,6)]].values
# y =
x = x.astype(np.float)
y = y.astype(np.float)
print(x.dtype)
cmp = []
for i in range(x.shape[0]):
cmp.append((DTWDistance(lx[i, -5:], ly[i, -5:]), i))
cmp.sort(reverse=True)
fig = plt.figure()
cnt = 1
pos = 5
need_out = [cmp[i][1]+1 for i in range(16*pos,16*(pos+1))]
for err, i in cmp[16*pos:16*(pos+1)]:
plt.subplot(4, 4, cnt)
plt.plot(range(10), x[i], 'r', label='merge')
plt.plot(range(10), y[i], 'b', label='merege2')
plt.title(f'error={int(err)}')
# plt.legend()
cnt += 1
plt.show()
print(need_out) | [
"l12x19d4@buaa.edu.cn"
] | l12x19d4@buaa.edu.cn |
9248bd8244dbde9dc739d3c6ccb0ab3d3844b09d | 4e3ebb0704bba0cb12757ced36018e39829c8f6d | /ejemplos/network/telnet_startWar.py | c78f1f6c7017c282c4ef081402d76e974ebe48ab | [] | no_license | moisesStevend/micropython_nodemcu2 | 9c62c0ff397ef8018528219c5c175257e97ad248 | 6922dc8dc0db16e0d7a4a5f3b7cf5444618e4814 | refs/heads/master | 2021-01-19T14:45:24.935187 | 2017-10-24T18:00:03 | 2017-10-24T18:00:03 | 100,919,680 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 253 | py | import socket
#addr_info = socket.getaddrinfo("furrymuck.com", 8888)
addr_info = socket.getaddrinfo("aardmud.org",23)
addr = addr_info[0][-1]
s = socket.socket()
s.connect(addr)
while True:
data = s.recv(500)
print(str(data, 'utf8'), end='')
| [
"noreply@github.com"
] | moisesStevend.noreply@github.com |
f103a915f720597b01a67d9961f1606c3d21dd2d | 23f3519e7610253c3affc469cfdd7d7e87efc057 | /cogs/role.py | fbac146790c53e09fa7b78a41dd3e4e3203d7c84 | [] | no_license | roott-x/discord-bot | 5c923f9bf73cb5807b78f2c9b5cf0741f2c56ee7 | 0e266965e80bc654432f89fd3083e8ee62d86583 | refs/heads/main | 2023-02-10T06:04:00.691896 | 2020-12-14T17:21:29 | 2020-12-14T17:21:29 | 321,418,996 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 9,567 | py | import discord
from discord.ext import commands
from discord.ext.commands import Bot, has_permissions, MissingPermissions
class Role(commands.Cog):
def __init__(self, client):
self.client = client
@commands.command(pass_context = True)
@commands.cooldown(1, 5, commands.BucketType.user)
async def join(self, ctx, *args):
arg_str = ' '.join(args)
file = open("docs/selfRoleDict.txt","r")
lines = file.readlines();
dict = eval(lines[0])
file.close()
role = discord.utils.get(ctx.guild.roles, name = arg_str)
if arg_str in dict[str(ctx.guild.id)]:
if role in ctx.author.roles:
embed = discord.Embed(title="You have already joined this role.", color=0xf03950)
await ctx.send(embed=embed)
else:
await ctx.message.author.add_roles(role)
embed = discord.Embed(title="You have joined the " + arg_str + " role.", color=0xf03950)
await ctx.send(embed=embed)
else:
if role in ctx.guild.roles:
embed = discord.Embed(title="You do not have permission to join this role.", color=0xf03950)
await ctx.send(embed=embed)
else:
embed = discord.Embed(title="That role does not exist.", color=0xf03950)
await ctx.send(embed=embed)
@commands.command(pass_context = True)
@commands.cooldown(1, 5, commands.BucketType.user)
async def leave(self, ctx, *args):
arg_str = ' '.join(args)
role = discord.utils.get(ctx.message.guild.roles, name=arg_str)
if role in ctx.message.guild.roles:
if role in ctx.author.roles:
file = open("docs/memberJoinRoleDict.txt","r")
lines = file.readlines();
dict = eval(lines[0])
file.close()
try:
dict_role = dict[str(ctx.message.guild.id)]
if arg_str == dict_role:
embed = discord.Embed(title="This is a default role. Contact an administrator if you want this role removed.", color=0xf03950)
await ctx.send(embed=embed)
else:
await ctx.author.remove_roles(discord.utils.get(ctx.guild.roles, name = arg_str))
embed = discord.Embed(title="You have left the " + arg_str + " role.", color=0xf03950)
await ctx.send(embed=embed)
except:
await ctx.author.remove_roles(discord.utils.get(ctx.guild.roles, name = arg_str))
embed = discord.Embed(title="You have left the " + arg_str + " role.", color=0xf03950)
await ctx.send(embed=embed)
else:
embed = discord.Embed(title="You do not have this role.", color=0xf03950)
await ctx.send(embed=embed)
else:
embed = discord.Embed(title="That role does not exist.", color=0xf03950)
await ctx.send(embed=embed)
@commands.command(pass_context = True)
@has_permissions(administrator=True)
async def setjoinrole(self, ctx, *args):
arg_str = ' '.join(args)
if discord.utils.get(ctx.message.guild.roles, name=arg_str) in ctx.message.guild.roles:
file = open("docs/memberJoinRoleDict.txt","r")
lines = file.readlines();
dict = eval(lines[0])
file.close()
dict[str(ctx.message.guild.id)] = arg_str
file = open("docs/memberJoinRoleDict.txt","w")
file.truncate(0)
file.write(str(dict))
file.close
await ctx.send(arg_str + " set as default role. When new members join, they will get this role.")
else:
await ctx.send("That role does not exist.")
@commands.command(pass_context = True)
@has_permissions(administrator=True)
async def deljoinrole(self, ctx, *args):
arg_str = ' '.join(args)
if discord.utils.get(ctx.message.guild.roles, name=arg_str) in ctx.message.guild.roles:
file = open("docs/memberJoinRoleDict.txt","r")
lines = file.readlines();
dict = eval(lines[0])
file.close()
if dict.get(str(ctx.message.guild.id), 0) != 0:
dict.pop(str(ctx.message.guild.id))
file = open("docs/memberJoinRoleDict.txt","w")
file.truncate(0)
file.write(str(dict))
file.close
await ctx.send(arg_str + " no longer set as default role. When new members join, they will no longer get this role.")
else:
await ctx.send("That role does not exist, or was not set as the default role.")
else:
await ctx.send("That role does not exist, or was not set as the default role.")
@commands.command(pass_context = True, aliases = ['asr'])
@has_permissions(administrator=True)
async def addselfrole(self, ctx, *args):
arg_str = ' '.join(args)
role = discord.utils.get(ctx.message.guild.roles, name=arg_str)
if role in ctx.guild.roles:
file = open("docs/selfRoleDict.txt","r")
lines = file.readlines();
dict = eval(lines[0])
file.close()
if dict.get(str(ctx.message.guild.id), 0) != 0:
list = dict[str(ctx.guild.id)]
if arg_str in list:
embed = discord.Embed(title="This role is already a self assignable role.", color=0xf03950)
await ctx.send(embed=embed)
else:
dict[str(ctx.guild.id)].append(arg_str)
embed = discord.Embed(title=""+arg_str+" role can now be obtained through -join.", color=0xf03950)
await ctx.send(embed=embed)
else:
dict[str(ctx.guild.id)] = [arg_str]
embed = discord.Embed(title=""+arg_str+" role can now be obtained through -join.", color=0xf03950)
await ctx.send(embed=embed)
file = open("docs/selfRoleDict.txt","w")
file.truncate(0)
file.write(str(dict))
file.close
else:
embed = discord.Embed(title="That role does not exist.", color=0xf03950)
await ctx.send(embed=embed)
@commands.command(pass_context = True, aliases = ['dsr'])
@has_permissions(administrator=True)
async def delselfrole(self, ctx, *args):
arg_str = ' '.join(args)
file = open("docs/selfRoleDict.txt","r")
lines = file.readlines();
dict = eval(lines[0])
file.close()
list = dict[str(ctx.guild.id)]
if arg_str in list:
list.remove(arg_str)
dict[str(ctx.guild.id)] = list
embed = discord.Embed(title=""+arg_str+" role is no longer a self assignable role.", color=0xf03950)
await ctx.send(embed=embed)
else:
embed = discord.Embed(title="This role is currently not a self assignable role.", color=0xf03950)
await ctx.send(embed=embed)
file = open("docs/selfRoleDict.txt","w")
file.truncate(0)
file.write(str(dict))
file.close
@commands.command(pass_context = True)
@commands.cooldown(1, 5, commands.BucketType.user)
async def rolelist(self, ctx):
file = open("docs/selfRoleDict.txt","r")
lines = file.readlines();
dict = eval(lines[0])
file.close()
try:
list = dict[str(ctx.guild.id)]
if (len(list) == 0):
embed = discord.Embed(title="There are currently no self-assignable roles.", color=0x23272A)
await ctx.send(embed=embed)
else:
desc = ""
for i in list:
desc = desc + "\n" + i
embed = discord.Embed(title="Self-assignable roles:", description=desc, color=0x23272A)
await ctx.send(embed=embed)
except:
embed = discord.Embed(title="There are currently no self-assignable roles.", color=0x23272A)
await ctx.send(embed=embed)
def setup(client):
client.add_cog(Role(client))
#old code
'''
guild = ctx.message.author.guild
member = discord.utils.get(guild.roles, name="Member")
if member in ctx.message.author.roles:
if discord.utils.get(guild.roles, name = arg_str) in guild.roles:
role = discord.utils.get(guild.roles, name = arg_str)
if role == discord.utils.get(guild.roles, name = "NSFW Permission"):
embed = discord.Embed(title="Please ask a moderator for this role.", color=0xf03950)
await ctx.send(embed=embed)
else:
if role.position >= member.position:
embed = discord.Embed(title="You do not have permission to join this role.", color=0xf03950)
await ctx.send(embed=embed)
else:
await ctx.message.author.add_roles(discord.utils.get(guild.roles, name = arg_str))
embed = discord.Embed(title="You have joined the " + arg_str + " role.", color=0xf03950)
await ctx.send(embed=embed)
else:
embed = discord.Embed(title="Error: Role not found.", color=0xf03950)
await ctx.send(embed=embed)
else:
embed = discord.Embed(title="You do not have permission to join this role.", color=0xf03950)
await ctx.send(embed=embed)
'''
| [
"noreply@github.com"
] | roott-x.noreply@github.com |
be81f104decc5d52a29adf9cf12a2b4e18b608ee | afe201d25bdd4c838821b15cb2406bf393add74f | /server.py | f9686b0b53c71fb6495e01da138d22c060d6495a | [] | no_license | kyle-deleon/job_search_tracker | c239da9ef93551a268153f2fc58d9cb0d436949b | 8058e0445e650d3cf546c932994b55513ac8e428 | refs/heads/master | 2022-09-21T16:52:23.130225 | 2020-06-05T03:05:05 | 2020-06-05T03:05:05 | 267,954,477 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,796 | py | from flask import Flask, render_template, request, session, flash, redirect
from flask_bcrypt import Bcrypt
from mysqlconnection import connectToMySQL
import re
EMAIL_REGEX = re.compile(r'^[a-zA-Z0-9.+_-]+@[a-zA-Z0-9._-]+\.[a-zA-Z]+$')
app = Flask(__name__)
app.secret_key = "keep it secret"
bcrypt = Bcrypt(app)
schema = "job_search_tracker"
@app.route('/')
def log_reg_landing():
return render_template("login.html")
@app.route('/register')
def register():
return render_template("registration.html")
@app.route('/on_register', methods=['POST'])
def on_register():
is_valid = True
if len(request.form['em']) < 1:
is_valid = False
flash("Please enter an email")
elif not EMAIL_REGEX.match(request.form['em']):
is_valid = False
flash("Please enter a valid email")
else:
mysql = connectToMySQL(schema)
query = 'SELECT * FROM users WHERE email = %(em)s;'
data = {
'em':request.form['em']
}
user = mysql.query_db(query,data)
if user:
is_valid = False
flash("email already in use")
if len(request.form['fn']) < 2:
is_valid=False
flash("Fist name must be atleast 2 characters long.")
if len(request.form['ln']) < 2:
is_valid=False
flash("last name must be atleast 2 characters long.")
if len(request.form['pw']) < 8:
is_valid=False
flash("password must be atleast 8 characters long.")
if request.form['pw'] != request.form['cpw']:
is_valid=False
flash("Passwords must match")
if is_valid:
query = "INSERT INTO users (first_name, last_name, email, password, created_at, updated_at) VALUES ( %(fn)s, %(ln)s, %(em)s, %(pw)s, NOW(), NOW())"
data = {
"fn": request.form['fn'],
"ln": request.form['ln'],
"em": request.form['em'],
"pw": bcrypt.generate_password_hash(request.form['pw'])
}
mysql = connectToMySQL(schema)
user_id = mysql.query_db(query,data)
if user_id:
session['user_id'] = user_id
session['name'] = request.form['fn']
return redirect ('/')
return redirect('/')
@app.route("/on_login", methods=["POST"])
def on_login():
is_valid = True
if not EMAIL_REGEX.match(request.form['em']):
is_valid = False
flash("email is not valid")
if is_valid:
query = "SELECT users.id, users.first_name, users.password FROM users WHERE users.email = %(em)s"
data = {
'em': request.form['em']
}
mysql = connectToMySQL(schema)
result = mysql.query_db(query, data)
if result:
if not bcrypt.check_password_hash(result[0]['password'], request.form['pw']):
flash("incorrect password and/or email")
return redirect('/')
else:
session['user_id'] = result[0]['id']
session['name'] = result[0]['first_name']
return redirect('/account')
else:
flash("incorrect email and/or password")
return redirect ('/')
@app.route('/on_logout')
def on_logout():
session.clear()
return redirect('/')
@app.route('/account')
def account():
if "user_id" not in session:
return redirect('/')
query = "SELECT * FROM jobs where user_id = %(sid)s"
data = {'sid':session['user_id']}
mysql = connectToMySQL(schema)
jobs = mysql.query_db(query, data)
return render_template('account.html', jobs = jobs)
@app.route('/on_applied', methods=['POST'])
def on_applied():
is_valid = True
if len(request.form['company_name']) < 3:
is_valid = False
flash("Company Name must be 3 characters")
if len(request.form['position']) < 6:
is_valid=False
flash("Postion must be atleast 6 characters long")
if len(request.form['platform']) < 6:
is_valid=False
flash("Platform must be atleast 6 characters long")
if len(request.form['link']) < 8:
is_valid=False
flash("Please enter Link")
if is_valid:
query = "INSERT INTO jobs (user_id, company_name, position, platform, company_link, created_at, updated_at) VALUES ( %(sid)s, %(company_name)s, %(position)s, %(platform)s, %(link)s, NOW(), NOW())"
data = {
"sid":session['user_id'],
"company_name": request.form['company_name'],
"position": request.form['position'],
"platform": request.form['platform'],
"link": request.form['link']
}
mysql = connectToMySQL(schema)
mysql.query_db(query,data)
return redirect('/account')
if __name__ == "__main__":
app.run(debug=True) | [
"kydeleon10@gmail.com"
] | kydeleon10@gmail.com |
6f6ff961181868786b67946078300a383acc9695 | b0ae1a052ec28a681d4201066c5e25af622831c8 | /PatRecTutorials/patrec-tutorials/blatt1/aufg01.py | f68aadb06ac0dec684e53fb942dd7762b8ff5987 | [] | no_license | morpheus176/Mustererkennung | 8bda7d2c6a8f811fe92c977ff06ad8f91f518c2e | 3b9107306993b8af263ffde21a00ae60d638441b | refs/heads/master | 2022-04-12T08:27:35.927135 | 2020-02-13T11:44:28 | 2020-02-13T11:44:28 | 238,529,729 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,716 | py | from common.data_provider import DataProvider
import matplotlib.pyplot as plt
from common.classification import KNNClassifier
from common.classification import ClassificationEvaluator
from common.classification import CrossValidation
def aufg01():
# Zur Einfuehrung werden auf einem Beispieldatensatz Klassifikatoren
# implementiert. Der Datensatz data2d enthaelt zweidimensionalen
# Trainingsmerkmale fuer drei Musterklassen. Fuer diese Daten soll eine
# Klassifikation ueber Naechster-Nachbar realisiert werden.
# Achtung: Gestalten Sie Ihre Implementierung so, dass Sie die Klassifikatoren
# fuer zukuenftige Aufgaben wiederverwenden koennen.
# Im Folgenden wird der Beispieldatensatz ueber die Klasse DataProvided
# geladen und anschliessend visualisiert. Machen Sie sich mit sowohl dem
# Laden als auch der Visualisierung vertraut, da Sie in den kommenden
# Aufgaben diese Aspekte wiederverwenden werden.
# http://matplotlib.org/users/pyplot_tutorial.html
#
# Nuetzliche Funktionen: plt.scatter
# http://matplotlib.org/api/pyplot_api.html#matplotlib.pyplot.scatter
#
# Tipp: zu einer besseren Visualisierung sollten alle scatter plots in Matplotlib
# immer mit dem Argument "edgecolor=(0, 0, 0)" aufgerufen werden.
train_data_provider = DataProvider(DataProvider.DATA2DROOT_TRAIN)
test_data_provider = DataProvider(DataProvider.DATA2DROOT_TEST)
fig = plt.figure()
ax = fig.add_subplot(111)
# fig, (ax, ax2) = plt.subplots(1, 2)
data = train_data_provider.get_class_arr(0)
ax.scatter(data[:, 0], data[:, 1], c='#FF0000', edgecolor=(0, 0, 0))
data = train_data_provider.get_class_arr(1)
ax.scatter(data[:, 0], data[:, 1], c='#00FF00', edgecolor=(0, 0, 0))
data = train_data_provider.get_class_arr(2)
ax.scatter(data[:, 0], data[:, 1], c='#0000FF', edgecolor=(0, 0, 0))
# plt.show()
#
# Implementieren Sie einen Naechster-Nachbar-Klassifikator.
# Vervollstaendigen Sie dazu die Klasse KNNClassifier im Modul common.classifiers.
# Testen Sie verschiedene Abstandsmasse. Welche halten Sie insbesondere fuer sinnvoll?
train_data, train_labels = train_data_provider.get_dataset_and_labels()
test_data, test_labels_gt = test_data_provider.get_dataset_and_labels()
test_labels_gt = test_labels_gt.astype('float64')
train_labels = train_labels.astype('float64')
knn_classifier = KNNClassifier(k_neighbors=70, metric='cityblock') # euclidean, cityblock, chebyshev
knn_classifier.estimate(train_data, train_labels)
estimated_labels = knn_classifier.classify(test_data)
# print(len(estimated_labels==0))
# print(test_labels_gt.shape, estimated_labels.shape)
# print(test_labels_gt.dtype, estimated_labels.dtype)
#
#data_x = test_data[mask, 0]
#data_y = test_data[mask, 1]
#ax2.scatter(data_x, data_y, c='#FF0000', edgecolor=(0, 0, 0))
#data = test_data[estimated_labels == 1]
#ax2.scatter(data[:, 0], data[:, 1], c='#00FF00', edgecolor=(0, 0, 0))
#data = test_data[estimated_labels == 2]
#ax2.scatter(data[:, 0], data[:, 1], c='#0000FF', edgecolor=(0, 0, 0))
#fig.tight_layout()
#plt.show()
#
# Nutzen Sie zur Evaluation der Ergebnisse die Klasse ClassificationEvaluator
# im Modul common.classifiers.
evals = ClassificationEvaluator(estimated_labels, test_labels_gt)
error_rate, n_wrong, n_samples = evals.error_rate()
print(error_rate, n_wrong, n_samples)
# raise NotImplementedError('Implement me')
# Ein NN-Klassifikator alleine ist meist nicht ausreichend. Erweitern Sie
# den Klassifikator zum k-NN Klassifikator.
# Fuer den Mehrheitsentscheid ist das defaultdict nuetzlich (siehe intro).
# https://docs.python.org/3/library/collections.html#collections.defaultdict
# Trainingsparameter sollten immer per Kreuzvalidierung auf den Trainingsdaten
# optimiert werden. Mit den besten Parametern wird dann ein Klassifikator
# erstellt und auf den Testdaten evaluiert.
# Nutzen Sie die Klasse CrossValidation im Modul classification um den
# Parameter k zu optimieren.
# In den folgenden Aufgaben ist es Ihnen freigestellt, ob Sie Kreuzvalidierung
# nutzen oder direkt auf den Testdaten optimieren.
cross = CrossValidation(train_data, train_labels, 5)
for i in range(65, 76):
knn = KNNClassifier(i, 'cityblock')
crossval_overall_result, crossval_class_results = cross.validate(knn)
print('Anzahl der Nachbarn = ', i, ' : ', crossval_overall_result)
''' Optimum bei k_neighbours = 70 '''
# raise NotImplementedError('Implement me')
if __name__ == '__main__':
aufg01()
| [
"julia.sobolewski@tu-dortmund.de"
] | julia.sobolewski@tu-dortmund.de |
8c7bb7ade1dfce6ac565e2f47ddb593c0c45c890 | 6155d98c696f012eacdab9f0817f3d7a909e8c5c | /chap7/MP-HW7/graph_node.py | 3136046596d5dbd8c1303624f4c125db65ac18d0 | [
"MIT"
] | permissive | Forrest-Z/Motion-Planning-Course | 496c9f3689abb67904f9db02a17b9684769567a7 | d2ff6c96bbe3944c21a08af65e82fc44e882d506 | refs/heads/master | 2020-09-27T05:32:15.580994 | 2020-01-02T08:28:26 | 2020-01-02T08:28:26 | 226,441,879 | 0 | 0 | MIT | 2020-01-02T08:28:28 | 2019-12-07T02:05:48 | null | UTF-8 | Python | false | false | 3,389 | py | from racetracks import *
class Node:
def __init__(self, px, py, vx, vy):
# state
self.px = px
self.py = py
self.vx = vx
self.vy = vy
# value
self.g_value = 0.0
# successor
self.next_prob_9 = []
self.next_prob_1 = []
# key
self.key = self.get_key()
self.is_goal = False
@staticmethod
def generate_key(px, py, vx, vy):
return "%02d" % px + "%02d" % py + "%02d" % vx + "%02d" % vy
def get_key(self):
return self.generate_key(self.px, self.py, self.vx, self.vy)
def connect_to_graph(self, grid):
for u in ACTION_SPACE:
self.next_prob_9.append(self.control(u[0], u[1], grid, success=True))
self.next_prob_1.append(self.control(u[0], u[1], grid, success=False))
@staticmethod
def velocity_constraints(vx, vy):
return np.sign(vx) * min(abs(vx), 4), np.sign(vy) * min(abs(vy), 4)
def safety_constraints(self, px2, py2, grid):
assert 0 <= self.px < grid.shape[0]
assert 0 <= self.py < grid.shape[1]
x_dist = np.abs(px2 - self.px)
y_dist = np.abs(py2 - self.py)
step = max(x_dist, y_dist)
x_way_points = np.linspace(self.px, px2, step + 1, endpoint=True)
y_way_points = np.linspace(self.py, py2, step + 1, endpoint=True)
way_points = np.stack([np.ceil(x_way_points), np.ceil(y_way_points)], axis=1).astype(np.int)
for idx in range(way_points.shape[0]):
point = way_points[idx]
if (0 <= point[0] < grid.shape[0]) and (0 <= point[1] < grid.shape[1]):
if grid[point[0], point[1]] == FINISH:
return FINISH, point
elif grid[point[0], point[1]] == OCCUPIED:
return OCCUPIED, point
# else:
# free and start: continue
else:
return OUTBOUND, point
if grid[way_points[-1][0], way_points[-1][1]] == START:
return START, way_points[-1]
else:
return FREE, way_points[-1]
# end definition
def control(self, ux, uy, grid, success):
assert ux in action_assert_list
assert uy in action_assert_list
# success with probability of 0.9
if not success:
ux = 0
uy = 0
# dynamic model
vx = self.vx + ux
vy = self.vy + uy
vx, vy = self.velocity_constraints(vx, vy)
px = self.px + vx
py = self.py + vy
# check collision
status, point = self.safety_constraints(px, py, grid)
if status == FREE:
assert px == point[0] and py == point[1]
return self.generate_key(px, py, vx, vy)
elif status == START:
assert grid[point[0], point[1]] == START
assert px == point[0] and py == point[1]
return self.generate_key(point[0], point[1], 0, 0)
elif status == FINISH:
assert grid[point[0], point[1]] == FINISH
return self.generate_key(point[0], point[1], 0, 0)
else: # out of bound or occupied
assert status == OUTBOUND or status == OCCUPIED
rand_start = START_LINE[np.random.randint(low=0, high=3, size=1)[0]]
return self.generate_key(rand_start[0], rand_start[1], 0, 0)
| [
"boyang.li@outlook.com"
] | boyang.li@outlook.com |
922ad653e03b85705765df9053e41ed4a995fcc9 | 7e3c7e9bf8e8410b688787bbf41f93e0bce30ef8 | /misc/fix_keras_optimizer.py | 4c1e72705ec6e77de0e31f5dd426bd7ffed1acef | [] | no_license | directorscut82/msthesis-experiments | bb8233d4e54da0b294b3a43f219bc424626e8ad5 | f86e344c972f2b61c3fa16eae523fd20303e8842 | refs/heads/master | 2020-03-23T08:24:19.535200 | 2017-07-27T06:23:18 | 2017-07-27T06:23:18 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 475 | py | #!/usr/bin/env python
"""
Make keras 1.x models usable in keras 2.x.
Run this when you get the following error:
ValueError: Optimizer weight shape (512,) not compatible with provided weight shape (32,)
"""
import glob
import h5py
model_files = sorted(glob.glob('*.h5'))
for model_file in model_files:
print("Update '{}'".format(model_file))
with h5py.File(model_file, 'a') as f:
if 'optimizer_weights' in f.keys():
del f['optimizer_weights']
| [
"info@martin-thoma.de"
] | info@martin-thoma.de |
214e49ddfe976444c8056d7219d2103c35998f7b | 47d504eba70ce8fcf0d71fb2e3c5895c55897bcf | /auto/migrations/0014_auto_20201210_2149.py | 1814600924fd9487c14c2e1f31f801256bb3da65 | [] | no_license | mzKaNgPae/PruebaDesarrollo3FINAL | aed4d51da6e042f02e0b041d13abae7dbd2c55b4 | c58f200b6b8483d6000406cbd7ad18b021dc1353 | refs/heads/main | 2023-02-03T20:45:42.765639 | 2020-12-18T16:11:37 | 2020-12-18T16:11:37 | 322,665,921 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,034 | py | # Generated by Django 3.1.2 on 2020-12-11 00:49
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('auto', '0013_auto_20201210_2044'),
]
operations = [
migrations.AddField(
model_name='marca',
name='pais',
field=models.PositiveSmallIntegerField(choices=[(0, 'No definido'), (1, 'Alemania'), (2, 'Italia'), (3, 'Reino Unido'), (4, 'Estados Unidos'), (5, 'Japon'), (6, 'Francia')], default=0),
),
migrations.CreateModel(
name='Competencia',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('nombre', models.CharField(max_length=70)),
('anno', models.IntegerField(blank=True, max_length=4)),
('marca_campion', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='auto.marca')),
],
),
]
| [
"diegoquezadapavez@gmail.com"
] | diegoquezadapavez@gmail.com |
236efac96ff09fffad954d9f0b67a036e1924fb6 | 9c04dfec0c7e6a5e82fc5bf0ddf2e06c2308cdef | /packages/auto-nlp-deployment/src/models/__init__.py | 43798c3e912b4453892d8b1f4235b4ed1ef41932 | [
"MIT"
] | permissive | fhswf/tagflip-autonlp | 8d678c780476d20d4d870a23320e5908a4e8972f | f94abb35ed06198567e5d9cbb7abb7e112149d6c | refs/heads/main | 2023-04-07T10:19:01.108884 | 2022-04-10T19:56:48 | 2022-04-10T19:56:48 | 410,777,896 | 5 | 2 | MIT | 2022-04-10T12:19:35 | 2021-09-27T07:07:28 | TypeScript | UTF-8 | Python | false | false | 39 | py | from .model_service import ModelService | [
"timo@n.euhaus.net"
] | timo@n.euhaus.net |
e903a4ab704f34dc65dc8c166c839d8b82cf06b5 | 60db7dd5aab51a53e6b39e41a32dea226d25f082 | /flask_example/venv/bin/pip3.6 | 75f1e711ab79e19a9f9948f1d482a79aa241de67 | [] | no_license | VladBeglik/homeworks | 2e0f55d4a3cd1b6317be8c8499bbb3faa8e1f97f | fdefb210c75586de93b9f51ea5e0a8850936d8f9 | refs/heads/master | 2022-04-15T21:27:12.655346 | 2020-04-01T14:45:17 | 2020-04-01T14:45:17 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 406 | 6 | #!/home/vlad/Documents/python/example/venv/bin/python
# EASY-INSTALL-ENTRY-SCRIPT: 'pip==19.0.3','console_scripts','pip3.6'
__requires__ = 'pip==19.0.3'
import re
import sys
from pkg_resources import load_entry_point
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(
load_entry_point('pip==19.0.3', 'console_scripts', 'pip3.6')()
)
| [
"vladbeglik@gmail.com"
] | vladbeglik@gmail.com |
a515fc587646476cc8878bb50f72120b4e6aa5ba | bad85cd8d547a071baf4b6590f7e81d13ef1ec0d | /assistant/core/views.py | 2732966b89290e6982d4e90149bce48ffa294e63 | [
"MIT"
] | permissive | kapiak/ware_prod | 92e11671059642e14219d5aa8334e0564403db77 | ae61256890834c434d2e38cc2ccacf00b638665a | refs/heads/master | 2023-01-06T04:36:43.173093 | 2020-09-21T04:06:51 | 2020-09-21T04:06:51 | 310,320,165 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 871 | py | from typing import List
from django.contrib.auth.mixins import LoginRequiredMixin
from django.views.generic import TemplateView
from assistant.orders.models import Order
from assistant.products.models import Product
class DashboardViewMixin(LoginRequiredMixin):
title: str = None
breadcrumbs: List = []
def get_title(self):
return self.title
def get_context_data(self):
context = super().get_context_data()
context.update({'title': self.get_title()})
return context
class DashboardTemplateView(LoginRequiredMixin, TemplateView):
template_name = "core/dashboard.html"
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
context.update({
'orders': Order.objects.all(),
'products': Product.objects.all()
})
return context
| [
"hamadeh.basel@gmail.com"
] | hamadeh.basel@gmail.com |
6de9de2c26cd97ae4bf4fcf3e8775dfda68e8817 | b95f9592df987386ff60d8ab10dd71b566995ee5 | /create_map.py | 5540cd7246c81808d9fb729c365e93adea5c760d | [] | no_license | ernie7334066/ATXHackTheTraffic | 35bc78cae5dfd54c93b34f992015e2eb24eb698b | aee6552607fa887ce86f99c5446471f9a39b2992 | refs/heads/master | 2020-05-22T16:08:01.731225 | 2017-03-12T04:55:50 | 2017-03-12T04:55:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 810 | py | from database_setup import Base, User, TravelSensor, Summary, db_session
import pickle
class Map:
READER_IDs = []
ADJ_INTERSECTIONS = {}
def get_map_from_database(self):
travelSensors = db_session.query(TravelSensor).all()
self.READER_IDs = [x.READER_ID for x in travelSensors]
for sensor in self.READER_IDs:
adjs = db_session.query(Summary.Destination).filter(Summary.Origin==sensor).distinct(Summary.Destination).all()
self.ADJ_INTERSECTIONS[sensor] = [adj[0] for adj in adjs]
#print sensor
#print self.ADJ_INTERSECTIONS[sensor]
def get_map_from_file():
infile = open('map.dump','rb')
return pickle.load(infile)
if __name__ == '__main__':
m = Map()
m.get_map_from_database()
print m.ADJ_INTERSECTIONS
output = open('map.dump', 'wb')
pickle.dump(m, output)
output.close()
| [
"tiger.yang.g@gmail.com"
] | tiger.yang.g@gmail.com |
358e081b2f6c8b3bf299043f9bcaf84f6c843033 | 410049acf0ce29853f4a41cdbeb148d0610103cc | /PM500gui.py | 6c994f45f4db56377604c97045939ee540307a68 | [] | no_license | ari1127/PM500 | 59f1a99757ee0781eb81fcb0566a0afecba6349a | f40f0bfc35a58e59a3e643e772b8f4a0ae5400a8 | refs/heads/master | 2021-01-02T22:49:55.774700 | 2015-02-25T23:04:08 | 2015-02-25T23:04:08 | 31,338,681 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 29,866 | py | # -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'PM500.ui'
#
# Created: Fri Oct 10 13:37:38 2014
# by: PyQt4 UI code generator 4.9.6
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
def _fromUtf8(s):
return s
try:
_encoding = QtGui.QApplication.UnicodeUTF8
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig, _encoding)
except AttributeError:
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig)
class Ui_Dialog(object):
def setupUi(self, Dialog):
Dialog.setObjectName(_fromUtf8("Dialog"))
Dialog.resize(462, 347)
self.curposLCDum = QtGui.QLCDNumber(Dialog)
self.curposLCDum.setGeometry(QtCore.QRect(50, 30, 151, 51))
palette = QtGui.QPalette()
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(170, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Light, brush)
brush = QtGui.QBrush(QtGui.QColor(212, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Midlight, brush)
brush = QtGui.QBrush(QtGui.QColor(85, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Dark, brush)
brush = QtGui.QBrush(QtGui.QColor(113, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Mid, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.BrightText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(170, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Shadow, brush)
brush = QtGui.QBrush(QtGui.QColor(212, 127, 127))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.AlternateBase, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 220))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ToolTipBase, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ToolTipText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(170, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Light, brush)
brush = QtGui.QBrush(QtGui.QColor(212, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Midlight, brush)
brush = QtGui.QBrush(QtGui.QColor(85, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Dark, brush)
brush = QtGui.QBrush(QtGui.QColor(113, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Mid, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.BrightText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(170, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Shadow, brush)
brush = QtGui.QBrush(QtGui.QColor(212, 127, 127))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.AlternateBase, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 220))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ToolTipBase, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ToolTipText, brush)
brush = QtGui.QBrush(QtGui.QColor(85, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(170, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Light, brush)
brush = QtGui.QBrush(QtGui.QColor(212, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Midlight, brush)
brush = QtGui.QBrush(QtGui.QColor(85, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Dark, brush)
brush = QtGui.QBrush(QtGui.QColor(113, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Mid, brush)
brush = QtGui.QBrush(QtGui.QColor(85, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.BrightText, brush)
brush = QtGui.QBrush(QtGui.QColor(85, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(170, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(170, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Shadow, brush)
brush = QtGui.QBrush(QtGui.QColor(170, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.AlternateBase, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 220))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ToolTipBase, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ToolTipText, brush)
self.curposLCDum.setPalette(palette)
self.curposLCDum.setNumDigits(8)
self.curposLCDum.setObjectName(_fromUtf8("curposLCDum"))
self.label_4 = QtGui.QLabel(Dialog)
self.label_4.setGeometry(QtCore.QRect(180, 0, 141, 31))
self.label_4.setObjectName(_fromUtf8("label_4"))
self.groupBox = QtGui.QGroupBox(Dialog)
self.groupBox.setGeometry(QtCore.QRect(10, 90, 161, 81))
self.groupBox.setObjectName(_fromUtf8("groupBox"))
self.verticalLayoutWidget = QtGui.QWidget(self.groupBox)
self.verticalLayoutWidget.setGeometry(QtCore.QRect(10, 20, 91, 51))
self.verticalLayoutWidget.setObjectName(_fromUtf8("verticalLayoutWidget"))
self.verticalLayout = QtGui.QVBoxLayout(self.verticalLayoutWidget)
self.verticalLayout.setMargin(0)
self.verticalLayout.setObjectName(_fromUtf8("verticalLayout"))
self.label_2 = QtGui.QLabel(self.verticalLayoutWidget)
self.label_2.setObjectName(_fromUtf8("label_2"))
self.verticalLayout.addWidget(self.label_2)
self.label_3 = QtGui.QLabel(self.verticalLayoutWidget)
self.label_3.setObjectName(_fromUtf8("label_3"))
self.verticalLayout.addWidget(self.label_3)
self.verticalLayoutWidget_2 = QtGui.QWidget(self.groupBox)
self.verticalLayoutWidget_2.setGeometry(QtCore.QRect(100, 20, 52, 51))
self.verticalLayoutWidget_2.setObjectName(_fromUtf8("verticalLayoutWidget_2"))
self.verticalLayout_2 = QtGui.QVBoxLayout(self.verticalLayoutWidget_2)
self.verticalLayout_2.setMargin(0)
self.verticalLayout_2.setObjectName(_fromUtf8("verticalLayout_2"))
self.nomirrors = QtGui.QLineEdit(self.verticalLayoutWidget_2)
self.nomirrors.setMaximumSize(QtCore.QSize(50, 16777215))
self.nomirrors.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.nomirrors.setObjectName(_fromUtf8("nomirrors"))
self.verticalLayout_2.addWidget(self.nomirrors)
self.maxtrav = QtGui.QLineEdit(self.verticalLayoutWidget_2)
self.maxtrav.setMaximumSize(QtCore.QSize(50, 16777215))
self.maxtrav.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.maxtrav.setObjectName(_fromUtf8("maxtrav"))
self.verticalLayout_2.addWidget(self.maxtrav)
self.goabsbutton = QtGui.QPushButton(Dialog)
self.goabsbutton.setGeometry(QtCore.QRect(290, 150, 91, 31))
self.goabsbutton.setObjectName(_fromUtf8("goabsbutton"))
self.gorelbutton = QtGui.QPushButton(Dialog)
self.gorelbutton.setGeometry(QtCore.QRect(290, 190, 91, 31))
self.gorelbutton.setObjectName(_fromUtf8("gorelbutton"))
self.gohomebutton = QtGui.QPushButton(Dialog)
self.gohomebutton.setGeometry(QtCore.QRect(290, 230, 91, 31))
self.gohomebutton.setObjectName(_fromUtf8("gohomebutton"))
self.rebootbutton = QtGui.QPushButton(Dialog)
self.rebootbutton.setGeometry(QtCore.QRect(10, 280, 75, 23))
self.rebootbutton.setObjectName(_fromUtf8("rebootbutton"))
self.groupBox_2 = QtGui.QGroupBox(Dialog)
self.groupBox_2.setGeometry(QtCore.QRect(10, 170, 161, 81))
self.groupBox_2.setObjectName(_fromUtf8("groupBox_2"))
self.verticalLayoutWidget_3 = QtGui.QWidget(self.groupBox_2)
self.verticalLayoutWidget_3.setGeometry(QtCore.QRect(10, 20, 91, 51))
self.verticalLayoutWidget_3.setObjectName(_fromUtf8("verticalLayoutWidget_3"))
self.verticalLayout_3 = QtGui.QVBoxLayout(self.verticalLayoutWidget_3)
self.verticalLayout_3.setMargin(0)
self.verticalLayout_3.setObjectName(_fromUtf8("verticalLayout_3"))
self.label_5 = QtGui.QLabel(self.verticalLayoutWidget_3)
self.label_5.setObjectName(_fromUtf8("label_5"))
self.verticalLayout_3.addWidget(self.label_5)
self.label_6 = QtGui.QLabel(self.verticalLayoutWidget_3)
self.label_6.setObjectName(_fromUtf8("label_6"))
self.verticalLayout_3.addWidget(self.label_6)
self.verticalLayoutWidget_4 = QtGui.QWidget(self.groupBox_2)
self.verticalLayoutWidget_4.setGeometry(QtCore.QRect(100, 20, 52, 51))
self.verticalLayoutWidget_4.setObjectName(_fromUtf8("verticalLayoutWidget_4"))
self.verticalLayout_4 = QtGui.QVBoxLayout(self.verticalLayoutWidget_4)
self.verticalLayout_4.setMargin(0)
self.verticalLayout_4.setObjectName(_fromUtf8("verticalLayout_4"))
self.accelset = QtGui.QLineEdit(self.verticalLayoutWidget_4)
self.accelset.setMaximumSize(QtCore.QSize(50, 16777215))
self.accelset.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.accelset.setObjectName(_fromUtf8("accelset"))
self.verticalLayout_4.addWidget(self.accelset)
self.veloset = QtGui.QLineEdit(self.verticalLayoutWidget_4)
self.veloset.setMaximumSize(QtCore.QSize(50, 16777215))
self.veloset.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.veloset.setObjectName(_fromUtf8("veloset"))
self.verticalLayout_4.addWidget(self.veloset)
self.statusedit = QtGui.QLineEdit(Dialog)
self.statusedit.setGeometry(QtCore.QRect(210, 290, 171, 20))
self.statusedit.setObjectName(_fromUtf8("statusedit"))
self.label_9 = QtGui.QLabel(Dialog)
self.label_9.setGeometry(QtCore.QRect(145, 290, 51, 21))
self.label_9.setObjectName(_fromUtf8("label_9"))
self.label_10 = QtGui.QLabel(Dialog)
self.label_10.setGeometry(QtCore.QRect(230, 100, 61, 31))
self.label_10.setObjectName(_fromUtf8("label_10"))
self.goedit = QtGui.QLineEdit(Dialog)
self.goedit.setGeometry(QtCore.QRect(290, 100, 91, 31))
self.goedit.setObjectName(_fromUtf8("goedit"))
self.unitcombo = QtGui.QComboBox(Dialog)
self.unitcombo.setGeometry(QtCore.QRect(390, 100, 51, 31))
font = QtGui.QFont()
font.setPointSize(12)
self.unitcombo.setFont(font)
self.unitcombo.setObjectName(_fromUtf8("unitcombo"))
self.unitcombo.addItem(_fromUtf8(""))
self.unitcombo.addItem(_fromUtf8(""))
self.axiscombo = QtGui.QComboBox(Dialog)
self.axiscombo.setGeometry(QtCore.QRect(180, 100, 41, 31))
font = QtGui.QFont()
font.setPointSize(12)
self.axiscombo.setFont(font)
self.axiscombo.setObjectName(_fromUtf8("axiscombo"))
self.axiscombo.addItem(_fromUtf8(""))
self.verticalLayoutWidget_5 = QtGui.QWidget(Dialog)
self.verticalLayoutWidget_5.setGeometry(QtCore.QRect(190, 170, 41, 111))
self.verticalLayoutWidget_5.setObjectName(_fromUtf8("verticalLayoutWidget_5"))
self.verticalLayout_5 = QtGui.QVBoxLayout(self.verticalLayoutWidget_5)
self.verticalLayout_5.setMargin(0)
self.verticalLayout_5.setObjectName(_fromUtf8("verticalLayout_5"))
self.Xcb = QtGui.QCheckBox(self.verticalLayoutWidget_5)
self.Xcb.setChecked(True)
self.Xcb.setObjectName(_fromUtf8("Xcb"))
self.verticalLayout_5.addWidget(self.Xcb)
self.Ycb = QtGui.QCheckBox(self.verticalLayoutWidget_5)
self.Ycb.setObjectName(_fromUtf8("Ycb"))
self.verticalLayout_5.addWidget(self.Ycb)
self.Zcb = QtGui.QCheckBox(self.verticalLayoutWidget_5)
self.Zcb.setObjectName(_fromUtf8("Zcb"))
self.verticalLayout_5.addWidget(self.Zcb)
self.Acb = QtGui.QCheckBox(self.verticalLayoutWidget_5)
self.Acb.setObjectName(_fromUtf8("Acb"))
self.verticalLayout_5.addWidget(self.Acb)
self.Bcb = QtGui.QCheckBox(self.verticalLayoutWidget_5)
self.Bcb.setObjectName(_fromUtf8("Bcb"))
self.verticalLayout_5.addWidget(self.Bcb)
self.label_11 = QtGui.QLabel(Dialog)
self.label_11.setGeometry(QtCore.QRect(180, 150, 61, 16))
self.label_11.setObjectName(_fromUtf8("label_11"))
self.curposLCDps = QtGui.QLCDNumber(Dialog)
self.curposLCDps.setGeometry(QtCore.QRect(260, 30, 151, 51))
palette = QtGui.QPalette()
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(170, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Light, brush)
brush = QtGui.QBrush(QtGui.QColor(212, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Midlight, brush)
brush = QtGui.QBrush(QtGui.QColor(85, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Dark, brush)
brush = QtGui.QBrush(QtGui.QColor(113, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Mid, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.BrightText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(170, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Shadow, brush)
brush = QtGui.QBrush(QtGui.QColor(212, 127, 127))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.AlternateBase, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 220))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ToolTipBase, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ToolTipText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(170, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Light, brush)
brush = QtGui.QBrush(QtGui.QColor(212, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Midlight, brush)
brush = QtGui.QBrush(QtGui.QColor(85, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Dark, brush)
brush = QtGui.QBrush(QtGui.QColor(113, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Mid, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.BrightText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(170, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Shadow, brush)
brush = QtGui.QBrush(QtGui.QColor(212, 127, 127))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.AlternateBase, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 220))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ToolTipBase, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ToolTipText, brush)
brush = QtGui.QBrush(QtGui.QColor(85, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(170, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Light, brush)
brush = QtGui.QBrush(QtGui.QColor(212, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Midlight, brush)
brush = QtGui.QBrush(QtGui.QColor(85, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Dark, brush)
brush = QtGui.QBrush(QtGui.QColor(113, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Mid, brush)
brush = QtGui.QBrush(QtGui.QColor(85, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.BrightText, brush)
brush = QtGui.QBrush(QtGui.QColor(85, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(170, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(170, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Shadow, brush)
brush = QtGui.QBrush(QtGui.QColor(170, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.AlternateBase, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 220))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ToolTipBase, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ToolTipText, brush)
self.curposLCDps.setPalette(palette)
self.curposLCDps.setNumDigits(8)
self.curposLCDps.setObjectName(_fromUtf8("curposLCDps"))
self.label_12 = QtGui.QLabel(Dialog)
self.label_12.setGeometry(QtCore.QRect(210, 50, 31, 31))
self.label_12.setObjectName(_fromUtf8("label_12"))
self.label_13 = QtGui.QLabel(Dialog)
self.label_13.setGeometry(QtCore.QRect(420, 50, 31, 31))
self.label_13.setObjectName(_fromUtf8("label_13"))
self.retranslateUi(Dialog)
QtCore.QObject.connect(self.goabsbutton, QtCore.SIGNAL(_fromUtf8("clicked()")), Dialog.goabs)
QtCore.QObject.connect(self.gorelbutton, QtCore.SIGNAL(_fromUtf8("clicked()")), Dialog.gorel)
QtCore.QObject.connect(self.gohomebutton, QtCore.SIGNAL(_fromUtf8("clicked()")), Dialog.gohome)
QtCore.QObject.connect(self.rebootbutton, QtCore.SIGNAL(_fromUtf8("clicked()")), Dialog.reboot)
QtCore.QObject.connect(self.nomirrors, QtCore.SIGNAL(_fromUtf8("textChanged(QString)")), Dialog.chgmirrors)
QtCore.QObject.connect(self.maxtrav, QtCore.SIGNAL(_fromUtf8("textChanged(QString)")), Dialog.chgmaxtrav)
QtCore.QObject.connect(self.accelset, QtCore.SIGNAL(_fromUtf8("textChanged(QString)")), Dialog.chgaccel)
QtCore.QObject.connect(self.veloset, QtCore.SIGNAL(_fromUtf8("textChanged(QString)")), Dialog.chgvel)
QtCore.QObject.connect(self.Xcb, QtCore.SIGNAL(_fromUtf8("toggled(bool)")), Dialog.chgAxisenabled)
QtCore.QObject.connect(self.Ycb, QtCore.SIGNAL(_fromUtf8("toggled(bool)")), Dialog.chgAxisenabled)
QtCore.QObject.connect(self.Zcb, QtCore.SIGNAL(_fromUtf8("toggled(bool)")), Dialog.chgAxisenabled)
QtCore.QObject.connect(self.Acb, QtCore.SIGNAL(_fromUtf8("toggled(bool)")), Dialog.chgAxisenabled)
QtCore.QObject.connect(self.Bcb, QtCore.SIGNAL(_fromUtf8("toggled(bool)")), Dialog.chgAxisenabled)
QtCore.QMetaObject.connectSlotsByName(Dialog)
def retranslateUi(self, Dialog):
Dialog.setWindowTitle(_translate("Dialog", "Dialog", None))
self.label_4.setText(_translate("Dialog", "<html><head/><body><p><span style=\" font-size:14pt;\">Current Position</span></p></body></html>", None))
self.groupBox.setTitle(_translate("Dialog", "Stage Setup", None))
self.label_2.setText(_translate("Dialog", "No. Mirrors:", None))
self.label_3.setText(_translate("Dialog", "Max Travel (cm):", None))
self.nomirrors.setText(_translate("Dialog", "2", None))
self.maxtrav.setText(_translate("Dialog", "10", None))
self.goabsbutton.setText(_translate("Dialog", "Move Absolute", None))
self.gorelbutton.setText(_translate("Dialog", "Move Relative", None))
self.gohomebutton.setText(_translate("Dialog", "Home", None))
self.rebootbutton.setText(_translate("Dialog", "Reboot", None))
self.groupBox_2.setTitle(_translate("Dialog", "Movement Setup", None))
self.label_5.setText(_translate("Dialog", "Accel/Decel", None))
self.label_6.setText(_translate("Dialog", "Velocity", None))
self.accelset.setText(_translate("Dialog", "25", None))
self.veloset.setText(_translate("Dialog", "25", None))
self.label_9.setText(_translate("Dialog", "Status:", None))
self.label_10.setText(_translate("Dialog", "<html><head/><body><p><span style=\" font-size:14pt;\">Go To:</span></p></body></html>", None))
self.unitcombo.setItemText(0, _translate("Dialog", "um", None))
self.unitcombo.setItemText(1, _translate("Dialog", "ps", None))
self.axiscombo.setItemText(0, _translate("Dialog", "X", None))
self.Xcb.setText(_translate("Dialog", "X", None))
self.Ycb.setText(_translate("Dialog", "Y", None))
self.Zcb.setText(_translate("Dialog", "Z", None))
self.Acb.setText(_translate("Dialog", "A", None))
self.Bcb.setText(_translate("Dialog", "B", None))
self.label_11.setText(_translate("Dialog", "Axis Enabled", None))
self.label_12.setText(_translate("Dialog", "<html><head/><body><p><span style=\" font-size:14pt;\">um</span></p></body></html>", None))
self.label_13.setText(_translate("Dialog", "<html><head/><body><p><span style=\" font-size:14pt;\">ps</span></p></body></html>", None))
| [
"ari1127@gmail.com"
] | ari1127@gmail.com |
0411474f0c3bcd60e5acc7806209416068b0b003 | 1242dfc7dfd8f7573fb0d7c8bf6a0490418c825a | /examples/test_predict.py | 935354b789dd0dc0781e70c0e40d687a48cfe407 | [] | no_license | vvagias/HDP-3.0-classifying-melanoma | 5ad3f532228b8e85a735ba28cb283df119e22a4d | 3d5f4015647fd39ab86c8dac6aa718022a21fd04 | refs/heads/master | 2020-04-01T21:10:04.066085 | 2018-10-20T11:15:39 | 2018-10-20T11:15:39 | 153,640,454 | 0 | 0 | null | 2018-10-18T14:46:08 | 2018-10-18T14:46:02 | Python | UTF-8 | Python | false | false | 646 | py | from keras.models import load_model
import cv2
import numpy as np
from keras.models import model_from_json
# load json and create model
json_file = open('model.json', 'r')
loaded_model_json = json_file.read()
json_file.close()
loaded_model = model_from_json(loaded_model_json)
# load weights into new model
loaded_model.load_weights("weights.h5")
print("Loaded model from disk")
model.compile(loss='binary_crossentropy',
optimizer='rmsprop',
metrics=['accuracy'])
img = cv2.imread('test.jpg')
img = cv2.resize(img,(320,240))
img = np.reshape(img,[1,320,240,3])
classes = model.predict_classes(img)
print classes
| [
"vasilis.n.vagias@gmail.com"
] | vasilis.n.vagias@gmail.com |
b9a942907211d6ab1c304c195019967ec722f6e4 | 77cc6f40bbff0285d64dd4864cedf7d594b346e6 | /openstack/common/test.py | 7f400e5aa9fdc39c90a76d21ff380d4a70d0aa04 | [
"Apache-2.0",
"BSD-2-Clause"
] | permissive | thomasem/oslo-incubator | 22d8aad763f6d43c2a9a3bca50c9c8dd324862d6 | 9bfa4ec4240e0f33f854d37e10dbc65a5f3b2789 | refs/heads/master | 2021-01-20T21:35:14.812836 | 2013-09-05T07:03:23 | 2013-09-05T07:03:23 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,937 | py | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2010-2011 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Common utilities used in testing"""
import os
import fixtures
import testtools
class BaseTestCase(testtools.TestCase):
def setUp(self):
super(BaseTestCase, self).setUp()
self._set_timeout()
self._fake_output()
self.useFixture(fixtures.FakeLogger('openstack.common'))
def _set_timeout(self):
test_timeout = os.environ.get('OS_TEST_TIMEOUT', 0)
try:
test_timeout = int(test_timeout)
except ValueError:
# If timeout value is invalid do not set a timeout.
test_timeout = 0
if test_timeout > 0:
self.useFixture(fixtures.Timeout(test_timeout, gentle=True))
def _fake_output(self):
if (os.environ.get('OS_STDOUT_CAPTURE') == 'True' or
os.environ.get('OS_STDOUT_CAPTURE') == '1'):
stdout = self.useFixture(fixtures.StringStream('stdout')).stream
self.useFixture(fixtures.MonkeyPatch('sys.stdout', stdout))
if (os.environ.get('OS_STDERR_CAPTURE') == 'True' or
os.environ.get('OS_STDERR_CAPTURE') == '1'):
stderr = self.useFixture(fixtures.StringStream('stderr')).stream
self.useFixture(fixtures.MonkeyPatch('sys.stderr', stderr))
| [
"review@openstack.org"
] | review@openstack.org |
6037f6bbf5ef3b3a143e54da96b9cb295f8855e7 | 0ac185a7f50a630b713cc11902c3fd3a5436ece4 | /leet872.py | ae3b8ea87c1dc59b48543afddf16e82a27eab59e | [] | no_license | thebestKaKa/Leet-Python | 25156d75c3e0782c839ed7bfe48bb66666bcc73f | dc9e3dc62f96f2b071b4d51bd8692ca41502bc8c | refs/heads/master | 2023-05-05T12:20:37.370835 | 2021-05-23T08:48:42 | 2021-05-23T08:48:42 | 366,065,447 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,225 | py | # Definition for a binary tree node.
class TreeNode(object):
def __init__(self, val=0, left=None, right=None):
self.val = val
self.left = left
self.right = right
class Solution(object):
def leafSimilar(self, root1, root2):
"""
:type root1: TreeNode
:type root2: TreeNode
:rtype: bool
"""
return self.getSeq(root1) == self.getSeq(root2)
def getSeq(self, root: TreeNode):
res = []
def dfs(temp: TreeNode):
if temp.left is None and temp.right is None:
res.append(temp.val)
if temp.left:
leftchild = temp.left
dfs(leftchild)
if temp.right:
rightchild = temp.right
dfs(rightchild)
dfs(root)
return res
if __name__ == '__main__':
t4 = TreeNode(6)
t6 = TreeNode(9)
t7 = TreeNode(8)
t8 = TreeNode(7)
t9 = TreeNode(4)
t5 = TreeNode(2, left=t8, right=t9)
t2 = TreeNode(5, left=t4, right=t5)
t3 = TreeNode(1, left=t6, right=t7)
t1 = TreeNode(3, left=t2, right=t3) # root
s = Solution()
ans = s.getSeq(t1)
print(ans)
print(s.leafSimilar(t1,t1))
| [
"2429642242@qq.com"
] | 2429642242@qq.com |
56a8a84a92ce89ddbed0b0db7c66cebb8b539849 | ea8e2a8c0166d0bb0ca618a3b6628f060170e802 | /dnc/model/memory.py | 19c6c376b4b26bc3147eca912a65bc4e5013eabe | [] | no_license | yejiming/ml_papers | 051c7f86adeb032b0d2dd625f6d235f852dea491 | b3e27e1311a29e9ec68e5f660f91581c385328e0 | refs/heads/master | 2020-03-25T02:11:07.925614 | 2018-08-02T11:44:59 | 2018-08-02T11:44:59 | 143,277,568 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 17,153 | py | import numpy as np
import tensorflow as tf
from dnc.model import utils
class Memory:
def __init__(self, words_num=256, word_size=64, read_heads=4, batch_size=1):
"""
constructs a memory matrix with read heads and a write head as described
in the DNC paper
http://www.nature.com/nature/journal/vaop/ncurrent/full/nature20101.html
Parameters:
----------
words_num: int
the maximum number of words that can be stored in the memory at the
same time
word_size: int
the size of the individual word in the memory
read_heads: int
the number of read heads that can read simultaneously from the memory
batch_size: int
the size of input data batch
"""
self.words_num = words_num
self.word_size = word_size
self.read_heads = read_heads
self.batch_size = batch_size
# a words_num x words_num identity matrix
self.I = tf.constant(np.identity(words_num, dtype=np.float32))
# maps the indecies from the 2D array of free list per batch to
# their corresponding values in the flat 1D array of ordered_allocation_weighting
self.index_mapper = tf.constant(
np.cumsum([0] + [words_num] * (batch_size - 1), dtype=np.int32)[:, np.newaxis]
)
def init_memory(self):
"""
returns the initial values for the memory Parameters
Returns: Tuple
"""
return (
tf.fill([self.batch_size, self.words_num, self.word_size], 1e-6), # initial memory matrix
tf.zeros([self.batch_size, self.words_num, ]), # initial usage vector
tf.zeros([self.batch_size, self.words_num, ]), # initial precedence vector
tf.zeros([self.batch_size, self.words_num, self.words_num]), # initial link matrix
tf.fill([self.batch_size, self.words_num, ], 1e-6), # initial write weighting
tf.fill([self.batch_size, self.words_num, self.read_heads], 1e-6), # initial read weightings
tf.fill([self.batch_size, self.word_size, self.read_heads], 1e-6), # initial read vectors
)
def get_lookup_weighting(self, memory_matrix, keys, strengths):
"""
retrives a content-based adderssing weighting given the keys
Parameters:
----------
memory_matrix: Tensor (batch_size, words_num, word_size)
the memory matrix to lookup in
keys: Tensor (batch_size, word_size, number_of_keys)
the keys to query the memory with
strengths: Tensor (batch_size, number_of_keys, )
the list of strengths for each lookup key
Returns: Tensor (batch_size, words_num, number_of_keys)
The list of lookup weightings for each provided key
"""
normalized_memory = tf.nn.l2_normalize(memory_matrix, 2)
normalized_keys = tf.nn.l2_normalize(keys, 1)
similiarity = tf.matmul(normalized_memory, normalized_keys)
strengths = tf.expand_dims(strengths, 1)
return tf.nn.softmax(similiarity * strengths, 1)
def update_usage_vector(self, usage_vector, read_weightings, write_weighting, free_gates):
"""
updates and returns the usgae vector given the values of the free gates
and the usage_vector, read_weightings, write_weighting from previous step
Parameters:
----------
usage_vector: Tensor (batch_size, words_num)
read_weightings: Tensor (batch_size, words_num, read_heads)
write_weighting: Tensor (batch_size, words_num)
free_gates: Tensor (batch_size, read_heads, )
Returns: Tensor (batch_size, words_num, )
the updated usage vector
"""
free_gates = tf.expand_dims(free_gates, 1)
retention_vector = tf.reduce_prod(1 - read_weightings * free_gates, 2)
updated_usage = (usage_vector + write_weighting - usage_vector * write_weighting) * retention_vector
return updated_usage
def get_allocation_weighting(self, sorted_usage, free_list):
"""
retreives the writing allocation weighting based on the usage free list
Parameters:
----------
sorted_usage: Tensor (batch_size, words_num, )
the usage vector sorted ascndingly
free_list: Tensor (batch, words_num, )
the original indecies of the sorted usage vector
Returns: Tensor (batch_size, words_num, )
the allocation weighting for each word in memory
"""
shifted_cumprod = tf.cumprod(sorted_usage, axis = 1, exclusive=True)
unordered_allocation_weighting = (1 - sorted_usage) * shifted_cumprod
mapped_free_list = free_list + self.index_mapper
flat_unordered_allocation_weighting = tf.reshape(unordered_allocation_weighting, (-1,))
flat_mapped_free_list = tf.reshape(mapped_free_list, (-1,))
flat_container = tf.TensorArray(tf.float32, self.batch_size * self.words_num)
flat_ordered_weightings = flat_container.scatter(
flat_mapped_free_list,
flat_unordered_allocation_weighting
)
packed_wightings = flat_ordered_weightings.stack()
return tf.reshape(packed_wightings, (self.batch_size, self.words_num))
def update_write_weighting(self, lookup_weighting, allocation_weighting, write_gate, allocation_gate):
"""
updates and returns the current write_weighting
Parameters:
----------
lookup_weighting: Tensor (batch_size, words_num, 1)
the weight of the lookup operation in writing
allocation_weighting: Tensor (batch_size, words_num)
the weight of the allocation operation in writing
write_gate: (batch_size, 1)
the fraction of writing to be done
allocation_gate: (batch_size, 1)
the fraction of allocation to be done
Returns: Tensor (batch_size, words_num)
the updated write_weighting
"""
# remove the dimension of 1 from the lookup_weighting
lookup_weighting = tf.squeeze(lookup_weighting)
updated_write_weighting = write_gate * (allocation_gate * allocation_weighting + (1 - allocation_gate) * lookup_weighting)
return updated_write_weighting
def update_memory(self, memory_matrix, write_weighting, write_vector, erase_vector):
"""
updates and returns the memory matrix given the weighting, write and erase vectors
and the memory matrix from previous step
Parameters:
----------
memory_matrix: Tensor (batch_size, words_num, word_size)
the memory matrix from previous step
write_weighting: Tensor (batch_size, words_num)
the weight of writing at each memory location
write_vector: Tensor (batch_size, word_size)
a vector specifying what to write
erase_vector: Tensor (batch_size, word_size)
a vector specifying what to erase from memory
Returns: Tensor (batch_size, words_num, word_size)
the updated memory matrix
"""
# expand data with a dimension of 1 at multiplication-adjacent location
# to force matmul to behave as an outer product
write_weighting = tf.expand_dims(write_weighting, 2)
write_vector = tf.expand_dims(write_vector, 1)
erase_vector = tf.expand_dims(erase_vector, 1)
erasing = memory_matrix * (1 - tf.matmul(write_weighting, erase_vector))
writing = tf.matmul(write_weighting, write_vector)
updated_memory = erasing + writing
return updated_memory
def update_precedence_vector(self, precedence_vector, write_weighting):
"""
updates the precedence vector given the latest write weighting
and the precedence_vector from last step
Parameters:
----------
precedence_vector: Tensor (batch_size. words_num)
the precedence vector from the last time step
write_weighting: Tensor (batch_size,words_num)
the latest write weighting for the memory
Returns: Tensor (batch_size, words_num)
the updated precedence vector
"""
reset_factor = 1 - tf.reduce_sum(write_weighting, 1, keepdims=True)
updated_precedence_vector = reset_factor * precedence_vector + write_weighting
return updated_precedence_vector
def update_link_matrix(self, precedence_vector, link_matrix, write_weighting):
"""
updates and returns the temporal link matrix for the latest write
given the precedence vector and the link matrix from previous step
Parameters:
----------
precedence_vector: Tensor (batch_size, words_num)
the precedence vector from the last time step
link_matrix: Tensor (batch_size, words_num, words_num)
the link matrix form the last step
write_weighting: Tensor (batch_size, words_num)
the latest write_weighting for the memory
Returns: Tensor (batch_size, words_num, words_num)
the updated temporal link matrix
"""
write_weighting = tf.expand_dims(write_weighting, 2)
precedence_vector = tf.expand_dims(precedence_vector, 1)
reset_factor = 1 - utils.pairwise_add(write_weighting, is_batch=True)
updated_link_matrix = reset_factor * link_matrix + tf.matmul(write_weighting, precedence_vector)
updated_link_matrix = (1 - self.I) * updated_link_matrix # eliminates self-links
return updated_link_matrix
def get_directional_weightings(self, read_weightings, link_matrix):
"""
computes and returns the forward and backward reading weightings
given the read_weightings from the previous step
Parameters:
----------
read_weightings: Tensor (batch_size, words_num, read_heads)
the read weightings from the last time step
link_matrix: Tensor (batch_size, words_num, words_num)
the temporal link matrix
Returns: Tuple
forward weighting: Tensor (batch_size, words_num, read_heads),
backward weighting: Tensor (batch_size, words_num, read_heads)
"""
forward_weighting = tf.matmul(link_matrix, read_weightings)
backward_weighting = tf.matmul(link_matrix, read_weightings, adjoint_a=True)
return forward_weighting, backward_weighting
def update_read_weightings(self, lookup_weightings, forward_weighting, backward_weighting, read_mode):
"""
updates and returns the current read_weightings
Parameters:
----------
lookup_weightings: Tensor (batch_size, words_num, read_heads)
the content-based read weighting
forward_weighting: Tensor (batch_size, words_num, read_heads)
the forward direction read weighting
backward_weighting: Tensor (batch_size, words_num, read_heads)
the backward direction read weighting
read_mode: Tesnor (batch_size, 3, read_heads)
the softmax distribution between the three read modes
Returns: Tensor (batch_size, words_num, read_heads)
"""
backward_mode = tf.expand_dims(read_mode[:, 0, :], 1) * backward_weighting
lookup_mode = tf.expand_dims(read_mode[:, 1, :], 1) * lookup_weightings
forward_mode = tf.expand_dims(read_mode[:, 2, :], 1) * forward_weighting
updated_read_weightings = backward_mode + lookup_mode + forward_mode
return updated_read_weightings
def update_read_vectors(self, memory_matrix, read_weightings):
"""
reads, updates, and returns the read vectors of the recently updated memory
Parameters:
----------
memory_matrix: Tensor (batch_size, words_num, word_size)
the recently updated memory matrix
read_weightings: Tensor (batch_size, words_num, read_heads)
the amount of info to read from each memory location by each read head
Returns: Tensor (word_size, read_heads)
"""
updated_read_vectors = tf.matmul(memory_matrix, read_weightings, adjoint_a=True)
return updated_read_vectors
def write(self, memory_matrix, usage_vector, read_weightings, write_weighting,
precedence_vector, link_matrix, key, strength, free_gates,
allocation_gate, write_gate, write_vector, erase_vector):
"""
defines the complete pipeline of writing to memory gievn the write variables
and the memory_matrix, usage_vector, link_matrix, and precedence_vector from
previous step
Parameters:
----------
memory_matrix: Tensor (batch_size, words_num, word_size)
the memory matrix from previous step
usage_vector: Tensor (batch_size, words_num)
the usage_vector from the last time step
read_weightings: Tensor (batch_size, words_num, read_heads)
the read_weightings from the last time step
write_weighting: Tensor (batch_size, words_num)
the write_weighting from the last time step
precedence_vector: Tensor (batch_size, words_num)
the precedence vector from the last time step
link_matrix: Tensor (batch_size, words_num, words_num)
the link_matrix from previous step
key: Tensor (batch_size, word_size, 1)
the key to query the memory location with
strength: (batch_size, 1)
the strength of the query key
free_gates: Tensor (batch_size, read_heads)
the degree to which location at read haeds will be freed
allocation_gate: (batch_size, 1)
the fraction of writing that is being allocated in a new locatio
write_gate: (batch_size, 1)
the amount of information to be written to memory
write_vector: Tensor (batch_size, word_size)
specifications of what to write to memory
erase_vector: Tensor(batch_size, word_size)
specifications of what to erase from memory
Returns : Tuple
the updated usage vector: Tensor (batch_size, words_num)
the updated write_weighting: Tensor(batch_size, words_num)
the updated memory_matrix: Tensor (batch_size, words_num, words_size)
the updated link matrix: Tensor(batch_size, words_num, words_num)
the updated precedence vector: Tensor (batch_size, words_num)
"""
lookup_weighting = self.get_lookup_weighting(memory_matrix, key, strength)
new_usage_vector = self.update_usage_vector(usage_vector, read_weightings, write_weighting, free_gates)
sorted_usage, free_list = tf.nn.top_k(-1 * new_usage_vector, self.words_num)
sorted_usage = -1 * sorted_usage
allocation_weighting = self.get_allocation_weighting(sorted_usage, free_list)
new_write_weighting = self.update_write_weighting(lookup_weighting, allocation_weighting, write_gate, allocation_gate)
new_memory_matrix = self.update_memory(memory_matrix, new_write_weighting, write_vector, erase_vector)
new_link_matrix = self.update_link_matrix(precedence_vector, link_matrix, new_write_weighting)
new_precedence_vector = self.update_precedence_vector(precedence_vector, new_write_weighting)
return new_usage_vector, new_write_weighting, new_memory_matrix, new_link_matrix, new_precedence_vector
def read(self, memory_matrix, read_weightings, keys, strengths, link_matrix, read_modes):
"""
defines the complete pipeline for reading from memory
Parameters:
----------
memory_matrix: Tensor (batch_size, words_num, word_size)
the updated memory matrix from the last writing
read_weightings: Tensor (batch_size, words_num, read_heads)
the read weightings form the last time step
keys: Tensor (batch_size, word_size, read_heads)
the kyes to query the memory locations with
strengths: Tensor (batch_size, read_heads)
the strength of each read key
link_matrix: Tensor (batch_size, words_num, words_num)
the updated link matrix from the last writing
read_modes: Tensor (batch_size, 3, read_heads)
the softmax distribution between the three read modes
Returns: Tuple
the updated read_weightings: Tensor(batch_size, words_num, read_heads)
the recently read vectors: Tensor (batch_size, word_size, read_heads)
"""
lookup_weighting = self.get_lookup_weighting(memory_matrix, keys, strengths)
forward_weighting, backward_weighting = self.get_directional_weightings(read_weightings, link_matrix)
new_read_weightings = self.update_read_weightings(lookup_weighting, forward_weighting, backward_weighting, read_modes)
new_read_vectors = self.update_read_vectors(memory_matrix, new_read_weightings)
return new_read_weightings, new_read_vectors
| [
"jiming_ye@163.com"
] | jiming_ye@163.com |
f7526d46e57dacaf54913613ea92feeddb67cffd | e34cbf5fce48f661d08221c095750240dbd88caf | /python/homework/day10_ansibleLike/core/verify.py | 70613c6ced9cebe0e42908774b56c4de14604d30 | [] | no_license | willianflasky/growup | 2f994b815b636e2582594375e90dbcb2aa37288e | 1db031a901e25bbe13f2d0db767cd28c76ac47f5 | refs/heads/master | 2023-01-04T13:13:14.191504 | 2020-01-12T08:11:41 | 2020-01-12T08:11:41 | 48,899,304 | 2 | 0 | null | 2022-12-26T19:46:22 | 2016-01-02T05:04:39 | C | UTF-8 | Python | false | false | 790 | py | #!/usr/bin/env python
# -*-coding:utf8-*-
# __author__ = "willian"
import getpass
from lib import mysql_helper
from conf.settings import *
def verify():
conn = mysql_helper.MySQLHandler(db_host, db_port, db_user, db_pass, db_name)
result = conn.select('select * from {0}', 'users')
count = 3
while count > 0:
_username = input("请输入用户名:").strip()
_password = getpass.getpass("请输入密码:").strip() # pycharm调试不好用
for user_dic in result:
if _username == user_dic['username'] and _password == user_dic['password']:
print("\033[32;1m验证成功!\033[0m")
return True, user_dic
count -= 1
else:
print("\033[31;1m超过3次!\033[0m")
return False
| [
"284607860@qq.com"
] | 284607860@qq.com |
47349284a779673df3b266dddcf38cbc1c86ae82 | 68676effc4e5d177fe5411e968098ab55e39f49c | /sim_viz_alpha.py | 03dfbb32feec8b8c89dc90da509a0d3041694ca9 | [] | no_license | barboc/sim_viz_alpha | c3144accde2b1c2a3a012f9259290092a6c47740 | c953e350a99ec7c9653da1d4003752d3d1ebf5b8 | refs/heads/master | 2022-12-28T07:35:46.397355 | 2020-10-11T12:37:07 | 2020-10-11T12:37:07 | 299,478,665 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 16,145 | py | # _____ _
# |_ _| | |
# | | _ __ ___ _ __ ___ _ __| |_ ___
# | | | '_ ` _ \| '_ \ / _ \| '__| __/ __|
# _| |_| | | | | | |_) | (_) | | | |_\__ \
# |_____|_| |_| |_| .__/ \___/|_| \__|___/
# | |
# |_|
import pygame
import simpy
import random
import sys
from collections import deque, namedtuple
import math
import pprint
# _____ _
# / ____| | |
# | (___ ___| |_ _ _ _ __
# \___ \ / _ \ __| | | | '_ \
# ____) | __/ |_| |_| | |_) |
# |_____/ \___|\__|\__,_| .__/
# | |
# |_|
# initialize pygame
pygame.init()
# initialize simpy
env = simpy.Environment()
# Timer
clock = pygame.time.Clock()
# _____ _ _
# / ____| | | | |
# | | ___ _ __ ___| |_ __ _ _ __ | |_ ___
# | | / _ \| '_ \/ __| __/ _` | '_ \| __/ __|
# | |___| (_) | | | \__ \ || (_| | | | | |_\__ \
# \_____\___/|_| |_|___/\__\__,_|_| |_|\__|___/
#
#
# Window
SCREEN_WIDTH = 1920
SCREEN_HEIGHT = 1080
SIZE = (SCREEN_WIDTH, SCREEN_HEIGHT)
TITLE = "Simulation: ALPHA"
# Initialize the game screen
SCREEN = pygame.display.set_mode(SIZE)
pygame.display.set_caption(TITLE)
# Clock frame rate
FPS = 60
# Colors
BLACK = (0, 0, 0)
WHITE = (255, 255, 255)
RED = (255, 0, 0)
GREEN = (0, 255, 0)
BLUE = (0, 0, 255)
CHOCOLATE = (210, 105, 30)
LIGHT_GREEN = (000, 200, 100)
LIGHT_BLUE = (000, 000, 200)
LIGHT_GRAY = (200, 200, 200)
#SIM
RANDOM_SEED = 42
random.seed(RANDOM_SEED)
NEW_OPPS = 10 # Total number of opportunities
INTERVAL_OPPS = 10.0 # Generate new opportunities roughly every x seconds
MIN_PATIENCE = 1 # Min. customer patience
MAX_PATIENCE = 3 # Max. customer patience
RES_A_CAPACITY = 1 # Capacity for Resource A
RES_A_TIME = 10 # Time expect to use resource
# VIZ
SIM_SPEED = 200 # Set to 1000 to match milli-sec speed of pygame
# ____ ____ _ ______ _____ _______ _____
# / __ \| _ \ | | ____/ ____|__ __/ ____|
# | | | | |_) | | | |__ | | | | | (___
# | | | | _ < _ | | __|| | | | \___ \
# | |__| | |_) | |__| | |___| |____ | | ____) |
# \____/|____/ \____/|______\_____| |_| |_____/
#
#
# Create named tuple that will record the SIM in the deque
Log_Event = namedtuple('Log_Event', ['time', 'name', 'type', 'action'])
# Class for recording SIM events to the deque for later playback.
class SIMObserver:
def __init__(self):
self.sim_queue = deque()
def add_sim_event(self, sim_event):
self.sim_queue.append(sim_event)
def print_deque(self):
pprint.pprint(self.sim_queue)
# _____ _____ __ __
# / ____|_ _| \/ |
# | (___ | | | \ / |
# \___ \ | | | |\/| |
# ____) |_| |_| | | |
# |_____/|_____|_| |_|
#
class Source:
def __init__(self, env):
self.env = env
self.res_div = simpy.Resource(self.env, capacity=RES_A_CAPACITY)
# LOG FIRST RESOURCE CREATE***********
event_log = Log_Event(round(self.env.now, 2), "RESOURCE_A", "RESOURCE", "CREATE")
record.add_sim_event(event_log)
# Add first sim process and pass the resource.
self.env.process(self.source(env, NEW_OPPS, INTERVAL_OPPS, self.res_div))
# LOG FIRST SOURCE CREATE***********
event_log = Log_Event(round(self.env.now, 2), "SOURCE_A", "SOURCE", "CREATE")
record.add_sim_event(event_log)
def source(self, env, number, interval, res_div):
"""Source generates opportunities randomly"""
for i in range(number):
opp = Opportunity(env, f'Opp{i}', res_div, time_in_queue=RES_A_TIME)
t = random.expovariate(1.0 / interval)
yield env.timeout(t)
class Opportunity:
def __init__(self, env, name, res_div, time_in_queue):
self.env = env
self.name = name
self.res_div = res_div
self.time_in_queue = time_in_queue
self.env.process(self.create_opp(self.env, self.name, self.res_div, self.time_in_queue))
def create_opp(self, env, name, res_div, time_in_queue):
"""Opportunity arrives, is served or abandon."""
arrive = env.now
# LOG OPP CREATE***********
event_log = Log_Event(round(self.env.now, 2), name, "OPP", "CREATE")
record.add_sim_event(event_log)
yield env.timeout(10) # ***temp delay before resource req for debug...remove later
with res_div.request() as req:
patience = random.uniform(MIN_PATIENCE, MAX_PATIENCE)
# Wait for the resource or abort at the end
results = yield req | env.timeout(patience)
wait = env.now - arrive # Is this used???
if req in results:
# LOG OPP GET RESOURCE***********
event_log = Log_Event(round(self.env.now, 2), name, "OPP", "RA_USE")
record.add_sim_event(event_log)
tib = random.expovariate(1.0 / time_in_queue)
yield env.timeout(tib)
# LOG OPP FINISH***********
event_log = Log_Event(round(self.env.now, 2), name, "OPP", "RA_FINISH")
record.add_sim_event(event_log)
else:
# LOG OPP RENEGE***********
event_log = Log_Event(round(self.env.now, 2), name, "OPP", "RA_RENEGE")
record.add_sim_event(event_log)
#
# _____ _ _
# | __ \(_) | |
# | | | |_ _ __ ___ ___| |_ ___ _ __
# | | | | | '__/ _ \/ __| __/ _ \| '__|
# | |__| | | | | __/ (__| || (_) | |
# |_____/|_|_| \___|\___|\__\___/|_|
#
#
class Director:
def __init__(self, start_scene):
self.active_scene = start_scene
def is_quit_event(self, event, pressed_keys):
x_out = event.type == pygame.QUIT
ctrl = pressed_keys[pygame.K_LCTRL] or pressed_keys[pygame.K_RCTRL]
q = pressed_keys[pygame.K_q]
return x_out or (ctrl and q)
def action(self):
while self.active_scene is not None:
# event handling
pressed_keys = pygame.key.get_pressed()
filtered_events = []
for event in pygame.event.get():
if self.is_quit_event(event, pressed_keys):
self.active_scene.terminate()
else:
filtered_events.append(event)
# game logic
self.active_scene.process_input(filtered_events, pressed_keys)
self.active_scene.update()
self.active_scene.render()
self.active_scene = self.active_scene.next_scene
# update and tick
pygame.display.flip()
clock.tick(FPS)
# _____
# / ____|
# | (___ ___ ___ _ __ ___ ___
# \___ \ / __/ _ \ '_ \ / _ \/ __|
# ____) | (_| __/ | | | __/\__ \
# |_____/ \___\___|_| |_|\___||___/
#
#
class Scene:
def __init__(self):
self.next_scene = self
def process_input(self, events, pressed_keys):
raise NotImplementedError
def update(self):
raise NotImplementedError
def render(self):
raise NotImplementedError
def terminate(self):
self.next_scene = None
class OppEntity(pygame.sprite.Sprite):
def __init__(self, opp_info, location):
super(OppEntity, self).__init__()
self.info = opp_info
self.time = opp_info.time
self.name = opp_info.name
self.type = opp_info.type
self.action = opp_info.action
self.location = location
self.color = LIGHT_GREEN
self.surf = pygame.Surface((50, 50), pygame.SRCALPHA)
self.rect = self.surf.get_rect(center=self.location)
pygame.draw.polygon(self.surf, self.color, [(12, 0), (36, 0), (50, 25), (36, 50), (12, 50), (0, 25)])
self.text = pygame.font.Font(None, 24).render(self.name, True, BLACK)
self.text_rect = self.text.get_rect()
self.text_rect.centerx = 50 // 2
self.text_rect.bottom = 35
self.surf.blit(self.text, self.text_rect)
self.default_motion_rads = math.radians(random.randint(0, 360))
self.motion_speed = 6
def update(self, *args):
cos_rads = math.cos(self.default_motion_rads)
sin_rads = math.sin(self.default_motion_rads)
self.rect.y += (self.motion_speed * sin_rads)
self.rect.x += (self.motion_speed * cos_rads)
def draw(self, color):
pygame.draw.polygon(self.surf, color, [(12, 0), (36, 0), (50, 25), (36, 50), (12, 50), (0, 25)])
self.surf.blit(self.text, self.text_rect)
class SourceEntity(pygame.sprite.Sprite):
def __init__(self, source_info, location):
super(SourceEntity, self).__init__()
self.info = source_info
self.time = source_info.time
self.name = source_info.name
self.type = source_info.type
self.action = source_info.action
self.location = location
self.color = LIGHT_GRAY
self.surf = pygame.Surface((100, 100), pygame.SRCALPHA)
self.rect = self.surf.get_rect(center=self.location)
pygame.draw.polygon(self.surf, self.color, [(0, 0), (100, 0), (50, 100)])
self.text = pygame.font.Font(None, 18).render(self.name, True, BLACK)
self.text_rect = self.text.get_rect()
self.text_rect.centerx = 100 // 2
self.text_rect.top = 5
self.surf.blit(self.text, self.text_rect)
class ResourceEntity(pygame.sprite.Sprite):
def __init__(self, resource_info, location):
super(ResourceEntity, self).__init__()
self.info = resource_info
self.time = resource_info.time
self.name = resource_info.name
self.type = resource_info.type
self.action = resource_info.action
self.location = location
self.color = LIGHT_BLUE
self.surf = pygame.Surface((100, 100), pygame.SRCALPHA)
self.rect = self.surf.get_rect(center=self.location)
pygame.draw.ellipse(self.surf, self.color, self.surf.get_rect())
self.text = pygame.font.Font(None, 18).render(self.name, True, BLACK)
self.text_rect = self.text.get_rect()
self.text_rect.centerx = 100 // 2
self.text_rect.centery = 100 // 2
self.surf.blit(self.text, self.text_rect)
class SIMScene(Scene):
def __init__(self):
super().__init__()
print("VIZ Scene")
# Obj Properties
self.empty_deque = False
# Locations for scene items
self.scene_setup = {"SOURCE_A": (200, 200),
"RESOURCE_A": (1500, 300),
"END_LOC": (1500, 900)}
# Dict of all scene sim entities by name
self.scene_entity = {}
# Sprite groups to control movement and collisions around static entities
self.LOST_GROUP = pygame.sprite.Group()
self.PLACED_GROUP = pygame.sprite.Group()
# Scene Common Text
self.title_text = pygame.font.Font(None, 64).render("SIM ALPHA", 1, WHITE)
self.over_text = pygame.font.Font(None, 64).render("Queue Empty...OVER!!!", 1, RED)
self.title_rect = self.title_text.get_rect()
self.over_rect = self.over_text.get_rect()
self.title_rect.centerx = SCREEN_WIDTH // 2
self.title_rect.bottom = SCREEN_HEIGHT // 2
self.over_rect.centerx = SCREEN_WIDTH // 2
self.over_rect.bottom = (SCREEN_HEIGHT // 2) + 300
def sim_create_resource(self, create_resource_event):
if create_resource_event.name == "RESOURCE_A":
new_resource = ResourceEntity(create_resource_event, self.scene_setup["RESOURCE_A"])
self.PLACED_GROUP.add(new_resource)
self.scene_entity[new_resource.name] = new_resource
def sim_create_source(self, create_source_event):
if create_source_event.name == "SOURCE_A":
new_source = SourceEntity(create_source_event, self.scene_setup["SOURCE_A"])
self.PLACED_GROUP.add(new_source)
self.scene_entity[new_source.name] = new_source
def process_sim_event(self):
# Pop next event off queue
next_sim_event = record.sim_queue.popleft()
print(" ")
pprint.pprint(next_sim_event)
pprint.pprint(f'Game Time {pygame.time.get_ticks()} with SIM Time {next_sim_event.time * SIM_SPEED}')
# Process each log event
if (next_sim_event.type == "SOURCE") and (next_sim_event.action == "CREATE"):
self.sim_create_source(next_sim_event)
if (next_sim_event.type == "RESOURCE") and (next_sim_event.action == "CREATE"):
self.sim_create_resource(next_sim_event)
if (next_sim_event.type == "OPP") and (next_sim_event.action == "CREATE"):
new_opp = OppEntity(next_sim_event, self.scene_setup["SOURCE_A"])
self.LOST_GROUP.add(new_opp)
self.scene_entity[new_opp.name] = new_opp
if (next_sim_event.type == "OPP") and (next_sim_event.action == "RA_USE"):
opp_obj = self.scene_entity.get(next_sim_event.name)
self.LOST_GROUP.add(opp_obj)
self.PLACED_GROUP.remove(opp_obj)
opp_obj.rect.center = self.scene_setup.get("RESOURCE_A")
if (next_sim_event.type == "OPP") and (next_sim_event.action == "RA_FINISH"):
opp_obj = self.scene_entity.get(next_sim_event.name)
self.LOST_GROUP.add(opp_obj)
self.PLACED_GROUP.remove(opp_obj)
opp_obj.rect.center = self.scene_setup.get("END_LOC")
opp_obj.draw(WHITE)
if (next_sim_event.type == "OPP") and (next_sim_event.action == "RA_RENEGE"):
opp_obj = self.scene_entity.get(next_sim_event.name)
opp_obj.draw(CHOCOLATE)
def process_input(self, events, pressed_keys):
for event in events:
if event.type == pygame.KEYDOWN:
if event.key == pygame.K_SPACE:
self.terminate()
queue_peek = len(record.sim_queue) > 0
while queue_peek:
if (record.sim_queue[0][0] * SIM_SPEED) <= pygame.time.get_ticks():
self.process_sim_event()
queue_peek = len(record.sim_queue) > 0
else:
queue_peek = False
# Event queue empty, end viz
if len(record.sim_queue) == 0:
self.empty_deque = True
def update(self):
for opp_sprite in self.LOST_GROUP.sprites():
if pygame.sprite.spritecollideany(opp_sprite, self.PLACED_GROUP):
opp_sprite.update()
else:
self.LOST_GROUP.remove(opp_sprite)
self.PLACED_GROUP.add(opp_sprite)
def render(self):
# Set background color
SCREEN.fill(BLACK)
# Render Text
SCREEN.blit(self.title_text, self.title_rect)
if self.empty_deque:
SCREEN.blit(self.over_text, self.over_rect)
# Render SIM Objects
for entity in self.PLACED_GROUP:
SCREEN.blit(entity.surf, entity.rect)
for entity in self.LOST_GROUP:
SCREEN.blit(entity.surf, entity.rect)
def terminate(self):
print("\nQueue Empty...OVER!!!")
print("LOST: ", self.LOST_GROUP)
print("PLACED: ", self.PLACED_GROUP)
pprint.pprint(self.scene_entity)
self.next_scene = None
# __ __ _
# | \/ | (_)
# | \ / | __ _ _ _ __
# | |\/| |/ _` | | '_ \
# | | | | (_| | | | | |
# |_| |_|\__,_|_|_| |_|
#
#
# START THE SIM
if __name__ == "__main__":
# Setup and start the simulation
print('START VIZ SIM ALPHA......................')
# RUN SIMULATION FIRST
record = SIMObserver()
opportunities = Source(env)
print('SIM START')
env.run()
print('SIM OVER')
record.print_deque()
# RUN SIM VIZ
print('VIZ START')
first_scene = SIMScene()
game_dir = Director(first_scene)
game_dir.action()
print('VIZ OVER')
pygame.quit()
print('END VIZ SIM ALPHA.........................')
sys.exit()
| [
"chris.barbosky@yahoo.com"
] | chris.barbosky@yahoo.com |
460f26e4ddbda79353b5efdb773a8dc6b7355b37 | 0f3606394c731e06360a0ae4c0590769306f2d9c | /app/lobby.py | e7ebaddf585b07769d3a3267f8d51343125a301d | [] | no_license | Oneoeigh/Flask-test | 63de921b9b96917eab5141cf25143ee625b92f56 | cc5bdb29a44133e220e5530dc11afcceccd78a9f | refs/heads/master | 2020-04-11T12:29:06.972027 | 2018-12-15T15:33:11 | 2018-12-15T15:33:11 | 161,781,919 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 214 | py | import threading
import json
from flask_socketio import join_room, leave_room
from flask import render_template, request, redirect, url_for
from flask_login import current_user
from app import app, socketio
| [
"jqy108@gmail.com"
] | jqy108@gmail.com |
7bd3553f72aa8efcac87122cf902fc094470fc29 | 381bf9a429b1b0bcfdb52faa3e0f8e553264fde4 | /ingest/qtl/pQTL_sun2018/fix_mac_column.py | 98f768ace1c79c0c5a7e73329d7f0df3b378c3a3 | [
"Apache-2.0"
] | permissive | thehyve/genetics-sumstat-data | a99f327c68ba5524006c12d7262e7e9aad5916ef | 9abe1dfba744266f4321fcfa58ef0c764747c183 | refs/heads/master | 2022-11-18T06:07:44.523537 | 2022-06-09T15:36:39 | 2022-06-09T15:36:39 | 224,444,420 | 0 | 0 | Apache-2.0 | 2022-07-29T08:11:28 | 2019-11-27T14:05:14 | Python | UTF-8 | Python | false | false | 1,681 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Jeremy Schwartzentruber
#
# The MAC column for two datasets (eQTLGen and SUN2018) was saved as a double,
# but should have been an int, for compatibility with other QTL datasets
# ingested. This fixes that.
import sys
import os
from time import time
import pyspark.sql
from pyspark.sql.types import *
from pyspark.sql import DataFrame
from pyspark.sql.functions import *
def main():
global spark
spark = (
pyspark.sql.SparkSession.builder
.config("parquet.enable.summary-metadata", "true")
.getOrCreate()
)
print('Spark version: ', spark.version)
start_time = time()
df = spark.read.parquet('gs://genetics-portal-dev-sumstats/unfiltered/molecular_trait_tofix/SUN2018.parquet')
# Write back to main molecular_trait directory
df = df.withColumn('mac', col('mac').cast(IntegerType()))
(
df
.write
.partitionBy('bio_feature', 'chrom')
.parquet(
'gs://genetics-portal-dev-sumstats/unfiltered/molecular_trait/SUN2018.parquet',
mode='overwrite',
compression='snappy'
)
)
# Write to artificially partitioned molecular trait directory,
# so that we can read all mol_trait studies at once
df = df.withColumn('mac', col('mac').cast(IntegerType()))
(
df
.write
.partitionBy('bio_feature', 'chrom')
.parquet(
'gs://genetics-portal-dev-sumstats/unfiltered/molecular_trait_partitioned/col_study_id=SUN2018/',
mode='overwrite',
compression='snappy'
)
)
return 0
if __name__ == '__main__':
main()
| [
"jeremy37@gmail.com"
] | jeremy37@gmail.com |
e9b643923ea8f3ea277bfd5d97c1270a1ed32532 | d9c0dad2ddc262ac6316f5eab49c5873fa26cc00 | /docker/resources/sdnctrl/rest_qos.py | c5bac29a3057e08588e28cd480542dc0d1c07e3b | [
"MIT"
] | permissive | RENCI-NRIG/Mobius | c45b8e1de956f7b81fbb7c87eeda8a372a214079 | 96aac6dd6dd89bf8565d953cee1e4d1e8a95f70a | refs/heads/master | 2023-05-05T10:44:20.089459 | 2022-07-18T16:05:04 | 2022-07-18T16:05:04 | 81,361,221 | 4 | 2 | MIT | 2023-03-01T20:48:12 | 2017-02-08T18:21:06 | Java | UTF-8 | Python | false | false | 40,028 | py | # Copyright (C) 2014 Kiyonari Harigae <lakshmi at cloudysunny14 org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import json
import re
from ryu.app import conf_switch_key as cs_key
from ryu.app.wsgi import ControllerBase
from ryu.app.wsgi import Response
from ryu.app.wsgi import route
from ryu.app.wsgi import WSGIApplication
from ryu.base import app_manager
from ryu.controller import conf_switch
from ryu.controller import ofp_event
from ryu.controller import dpset
from ryu.controller.handler import set_ev_cls
from ryu.controller.handler import MAIN_DISPATCHER
from ryu.exception import OFPUnknownVersion
from ryu.lib import dpid as dpid_lib
from ryu.lib import mac
from ryu.lib import ofctl_v1_0
from ryu.lib import ofctl_v1_2
from ryu.lib import ofctl_v1_3
from ryu.lib.ovs import bridge
from ryu.ofproto import ofproto_v1_0
from ryu.ofproto import ofproto_v1_2
from ryu.ofproto import ofproto_v1_3
from ryu.ofproto import ofproto_v1_3_parser
from ryu.ofproto import ether
from ryu.ofproto import inet
# =============================
# REST API
# =============================
#
# Note: specify switch and vlan group, as follows.
# {switch-id} : 'all' or switchID
# {vlan-id} : 'all' or vlanID
#
# about queue status
#
# get status of queue
# GET /qos/queue/status/{switch-id}
#
# about queues
# get a queue configurations
# GET /qos/queue/{switch-id}
#
# set a queue to the switches
# POST /qos/queue/{switch-id}
#
# request body format:
# {"port_name":"<name of port>",
# "type": "<linux-htb or linux-other>",
# "max-rate": "<int>",
# "queues":[{"max_rate": "<int>", "min_rate": "<int>"},...]}
#
# Note: This operation override
# previous configurations.
# Note: Queue configurations are available for
# OpenvSwitch.
# Note: port_name is optional argument.
# If does not pass the port_name argument,
# all ports are target for configuration.
#
# delete queue
# DELETE /qos/queue/{swtich-id}
#
# Note: This operation delete relation of qos record from
# qos colum in Port table. Therefore,
# QoS records and Queue records will remain.
#
# about qos rules
#
# get rules of qos
# * for no vlan
# GET /qos/rules/{switch-id}
#
# * for specific vlan group
# GET /qos/rules/{switch-id}/{vlan-id}
#
# set a qos rules
#
# QoS rules will do the processing pipeline,
# which entries are register the first table (by default table id 0)
# and process will apply and go to next table.
#
# * for no vlan
# POST /qos/{switch-id}
#
# * for specific vlan group
# POST /qos/{switch-id}/{vlan-id}
#
# request body format:
# {"priority": "<value>",
# "match": {"<field1>": "<value1>", "<field2>": "<value2>",...},
# "actions": {"<action1>": "<value1>", "<action2>": "<value2>",...}
# }
#
# Description
# * priority field
# <value>
# "0 to 65533"
#
# Note: When "priority" has not been set up,
# "priority: 1" is set to "priority".
#
# * match field
# <field> : <value>
# "in_port" : "<int>"
# "dl_src" : "<xx:xx:xx:xx:xx:xx>"
# "dl_dst" : "<xx:xx:xx:xx:xx:xx>"
# "dl_type" : "<ARP or IPv4 or IPv6>"
# "nw_src" : "<A.B.C.D/M>"
# "nw_dst" : "<A.B.C.D/M>"
# "ipv6_src": "<xxxx:xxxx:xxxx:xxxx:xxxx:xxxx:xxxx:xxxx/M>"
# "ipv6_dst": "<xxxx:xxxx:xxxx:xxxx:xxxx:xxxx:xxxx:xxxx/M>"
# "nw_proto": "<TCP or UDP or ICMP or ICMPv6>"
# "tp_src" : "<int>"
# "tp_dst" : "<int>"
# "ip_dscp" : "<int>"
#
# * actions field
# <field> : <value>
# "mark": <dscp-value>
# sets the IPv4 ToS/DSCP field to tos.
# "meter": <meter-id>
# apply meter entry
# "queue": <queue-id>
# register queue specified by queue-id
#
# Note: When "actions" has not been set up,
# "queue: 0" is set to "actions".
#
# delete a qos rules
# * for no vlan
# DELETE /qos/rule/{switch-id}
#
# * for specific vlan group
# DELETE /qos/{switch-id}/{vlan-id}
#
# request body format:
# {"<field>":"<value>"}
#
# <field> : <value>
# "qos_id" : "<int>" or "all"
#
# about meter entries
#
# set a meter entry
# POST /qos/meter/{switch-id}
#
# request body format:
# {"meter_id": <int>,
# "bands":[{"action": "<DROP or DSCP_REMARK>",
# "flag": "<KBPS or PKTPS or BURST or STATS"
# "burst_size": <int>,
# "rate": <int>,
# "prec_level": <int>},...]}
#
# delete a meter entry
# DELETE /qos/meter/{switch-id}
#
# request body format:
# {"<field>":"<value>"}
#
# <field> : <value>
# "meter_id" : "<int>"
#
SWITCHID_PATTERN = dpid_lib.DPID_PATTERN + r'|all'
VLANID_PATTERN = r'[0-9]{1,4}|all'
QOS_TABLE_ID = 0
REST_ALL = 'all'
REST_SWITCHID = 'switch_id'
REST_COMMAND_RESULT = 'command_result'
REST_PRIORITY = 'priority'
REST_VLANID = 'vlan_id'
REST_PORT_NAME = 'port_name'
REST_QUEUE_TYPE = 'type'
REST_QUEUE_MAX_RATE = 'max_rate'
REST_QUEUE_MIN_RATE = 'min_rate'
REST_QUEUES = 'queues'
REST_QOS = 'qos'
REST_QOS_ID = 'qos_id'
REST_COOKIE = 'cookie'
REST_MATCH = 'match'
REST_IN_PORT = 'in_port'
REST_SRC_MAC = 'dl_src'
REST_DST_MAC = 'dl_dst'
REST_DL_TYPE = 'dl_type'
REST_DL_TYPE_ARP = 'ARP'
REST_DL_TYPE_IPV4 = 'IPv4'
REST_DL_TYPE_IPV6 = 'IPv6'
REST_DL_VLAN = 'dl_vlan'
REST_SRC_IP = 'nw_src'
REST_DST_IP = 'nw_dst'
REST_SRC_IPV6 = 'ipv6_src'
REST_DST_IPV6 = 'ipv6_dst'
REST_NW_PROTO = 'nw_proto'
REST_NW_PROTO_TCP = 'TCP'
REST_NW_PROTO_UDP = 'UDP'
REST_NW_PROTO_ICMP = 'ICMP'
REST_NW_PROTO_ICMPV6 = 'ICMPv6'
REST_TP_SRC = 'tp_src'
REST_TP_DST = 'tp_dst'
REST_DSCP = 'ip_dscp'
REST_ACTION = 'actions'
REST_ACTION_QUEUE = 'queue'
REST_ACTION_MARK = 'mark'
REST_ACTION_METER = 'meter'
REST_METER_ID = 'meter_id'
REST_METER_BURST_SIZE = 'burst_size'
REST_METER_RATE = 'rate'
REST_METER_PREC_LEVEL = 'prec_level'
REST_METER_BANDS = 'bands'
REST_METER_ACTION_DROP = 'drop'
REST_METER_ACTION_REMARK = 'remark'
DEFAULT_FLOW_PRIORITY = 0
QOS_PRIORITY_MAX = ofproto_v1_3_parser.UINT16_MAX - 1
QOS_PRIORITY_MIN = 1
VLANID_NONE = 0
VLANID_MIN = 2
VLANID_MAX = 4094
COOKIE_SHIFT_VLANID = 32
BASE_URL = '/qos'
REQUIREMENTS = {'switchid': SWITCHID_PATTERN,
'vlanid': VLANID_PATTERN}
LOG = logging.getLogger(__name__)
class RestQoSAPI(app_manager.RyuApp):
OFP_VERSIONS = [ofproto_v1_0.OFP_VERSION,
ofproto_v1_2.OFP_VERSION,
ofproto_v1_3.OFP_VERSION]
_CONTEXTS = {
'dpset': dpset.DPSet,
'conf_switch': conf_switch.ConfSwitchSet,
'wsgi': WSGIApplication}
def __init__(self, *args, **kwargs):
super(RestQoSAPI, self).__init__(*args, **kwargs)
# logger configure
QoSController.set_logger(self.logger)
self.cs = kwargs['conf_switch']
self.dpset = kwargs['dpset']
wsgi = kwargs['wsgi']
self.waiters = {}
self.data = {}
self.data['dpset'] = self.dpset
self.data['waiters'] = self.waiters
wsgi.registory['QoSController'] = self.data
wsgi.register(QoSController, self.data)
def stats_reply_handler(self, ev):
msg = ev.msg
dp = msg.datapath
if dp.id not in self.waiters:
return
if msg.xid not in self.waiters[dp.id]:
return
lock, msgs = self.waiters[dp.id][msg.xid]
msgs.append(msg)
flags = 0
if dp.ofproto.OFP_VERSION == ofproto_v1_0.OFP_VERSION or \
dp.ofproto.OFP_VERSION == ofproto_v1_2.OFP_VERSION:
flags = dp.ofproto.OFPSF_REPLY_MORE
elif dp.ofproto.OFP_VERSION == ofproto_v1_3.OFP_VERSION:
flags = dp.ofproto.OFPMPF_REPLY_MORE
if msg.flags & flags:
return
del self.waiters[dp.id][msg.xid]
lock.set()
@set_ev_cls(conf_switch.EventConfSwitchSet)
def conf_switch_set_handler(self, ev):
if ev.key == cs_key.OVSDB_ADDR:
QoSController.set_ovsdb_addr(ev.dpid, ev.value)
else:
QoSController._LOGGER.debug("unknown event: %s", ev)
@set_ev_cls(conf_switch.EventConfSwitchDel)
def conf_switch_del_handler(self, ev):
if ev.key == cs_key.OVSDB_ADDR:
QoSController.delete_ovsdb_addr(ev.dpid)
else:
QoSController._LOGGER.debug("unknown event: %s", ev)
@set_ev_cls(dpset.EventDP, dpset.DPSET_EV_DISPATCHER)
def handler_datapath(self, ev):
if ev.enter:
QoSController.regist_ofs(ev.dp, self.CONF)
else:
QoSController.unregist_ofs(ev.dp)
# for OpenFlow version1.0
@set_ev_cls(ofp_event.EventOFPFlowStatsReply, MAIN_DISPATCHER)
def stats_reply_handler_v1_0(self, ev):
self.stats_reply_handler(ev)
# for OpenFlow version1.2 or later
@set_ev_cls(ofp_event.EventOFPStatsReply, MAIN_DISPATCHER)
def stats_reply_handler_v1_2(self, ev):
self.stats_reply_handler(ev)
# for OpenFlow version1.2 or later
@set_ev_cls(ofp_event.EventOFPQueueStatsReply, MAIN_DISPATCHER)
def queue_stats_reply_handler_v1_2(self, ev):
self.stats_reply_handler(ev)
# for OpenFlow version1.2 or later
@set_ev_cls(ofp_event.EventOFPMeterStatsReply, MAIN_DISPATCHER)
def meter_stats_reply_handler_v1_2(self, ev):
self.stats_reply_handler(ev)
class QoSOfsList(dict):
def __init__(self):
super(QoSOfsList, self).__init__()
def get_ofs(self, dp_id):
if len(self) == 0:
raise ValueError('qos sw is not connected.')
dps = {}
if dp_id == REST_ALL:
dps = self
else:
try:
dpid = dpid_lib.str_to_dpid(dp_id)
except:
raise ValueError('Invalid switchID.')
if dpid in self:
dps = {dpid: self[dpid]}
else:
msg = 'qos sw is not connected. : switchID=%s' % dp_id
raise ValueError(msg)
return dps
class QoSController(ControllerBase):
_OFS_LIST = QoSOfsList()
_LOGGER = None
def __init__(self, req, link, data, **config):
super(QoSController, self).__init__(req, link, data, **config)
self.dpset = data['dpset']
self.waiters = data['waiters']
@classmethod
def set_logger(cls, logger):
cls._LOGGER = logger
cls._LOGGER.propagate = False
hdlr = logging.StreamHandler()
fmt_str = '[QoS][%(levelname)s] %(message)s'
hdlr.setFormatter(logging.Formatter(fmt_str))
cls._LOGGER.addHandler(hdlr)
@staticmethod
def regist_ofs(dp, CONF):
if dp.id in QoSController._OFS_LIST:
return
dpid_str = dpid_lib.dpid_to_str(dp.id)
try:
f_ofs = QoS(dp, CONF)
f_ofs.set_default_flow()
except OFPUnknownVersion as message:
QoSController._LOGGER.info('dpid=%s: %s',
dpid_str, message)
return
QoSController._OFS_LIST.setdefault(dp.id, f_ofs)
QoSController._LOGGER.info('dpid=%s: Join qos switch.',
dpid_str)
@staticmethod
def unregist_ofs(dp):
if dp.id in QoSController._OFS_LIST:
del QoSController._OFS_LIST[dp.id]
QoSController._LOGGER.info('dpid=%s: Leave qos switch.',
dpid_lib.dpid_to_str(dp.id))
@staticmethod
def set_ovsdb_addr(dpid, value):
ofs = QoSController._OFS_LIST.get(dpid, None)
if ofs is not None:
ofs.set_ovsdb_addr(dpid, value)
@staticmethod
def delete_ovsdb_addr(dpid):
ofs = QoSController._OFS_LIST.get(dpid, None)
if ofs is not None:
ofs.set_ovsdb_addr(dpid, None)
@route('qos_switch', BASE_URL + '/queue/{switchid}',
methods=['GET'], requirements=REQUIREMENTS)
def get_queue(self, req, switchid, **_kwargs):
return self._access_switch(req, switchid, VLANID_NONE,
'get_queue', None)
@route('qos_switch', BASE_URL + '/queue/{switchid}',
methods=['POST'], requirements=REQUIREMENTS)
def set_queue(self, req, switchid, **_kwargs):
return self._access_switch(req, switchid, VLANID_NONE,
'set_queue', None)
@route('qos_switch', BASE_URL + '/queue/{switchid}',
methods=['DELETE'], requirements=REQUIREMENTS)
def delete_queue(self, req, switchid, **_kwargs):
return self._access_switch(req, switchid, VLANID_NONE,
'delete_queue', None)
@route('qos_switch', BASE_URL + '/queue/status/{switchid}',
methods=['GET'], requirements=REQUIREMENTS)
def get_status(self, req, switchid, **_kwargs):
return self._access_switch(req, switchid, VLANID_NONE,
'get_status', self.waiters)
@route('qos_switch', BASE_URL + '/rules/{switchid}',
methods=['GET'], requirements=REQUIREMENTS)
def get_qos(self, req, switchid, **_kwargs):
return self._access_switch(req, switchid, VLANID_NONE,
'get_qos', self.waiters)
@route('qos_switch', BASE_URL + '/rules/{switchid}/{vlanid}',
methods=['GET'], requirements=REQUIREMENTS)
def get_vlan_qos(self, req, switchid, vlanid, **_kwargs):
return self._access_switch(req, switchid, vlanid,
'get_qos', self.waiters)
@route('qos_switch', BASE_URL + '/rules/{switchid}',
methods=['POST'], requirements=REQUIREMENTS)
def set_qos(self, req, switchid, **_kwargs):
return self._access_switch(req, switchid, VLANID_NONE,
'set_qos', self.waiters)
@route('qos_switch', BASE_URL + '/rules/{switchid}/{vlanid}',
methods=['POST'], requirements=REQUIREMENTS)
def set_vlan_qos(self, req, switchid, vlanid, **_kwargs):
return self._access_switch(req, switchid, vlanid,
'set_qos', self.waiters)
@route('qos_switch', BASE_URL + '/rules/{switchid}',
methods=['DELETE'], requirements=REQUIREMENTS)
def delete_qos(self, req, switchid, **_kwargs):
return self._access_switch(req, switchid, VLANID_NONE,
'delete_qos', self.waiters)
@route('qos_switch', BASE_URL + '/rules/{switchid}/{vlanid}',
methods=['DELETE'], requirements=REQUIREMENTS)
def delete_vlan_qos(self, req, switchid, vlanid, **_kwargs):
return self._access_switch(req, switchid, vlanid,
'delete_qos', self.waiters)
@route('qos_switch', BASE_URL + '/meter/{switchid}',
methods=['GET'], requirements=REQUIREMENTS)
def get_meter(self, req, switchid, **_kwargs):
return self._access_switch(req, switchid, VLANID_NONE,
'get_meter', self.waiters)
@route('qos_switch', BASE_URL + '/meter/{switchid}',
methods=['POST'], requirements=REQUIREMENTS)
def set_meter(self, req, switchid, **_kwargs):
return self._access_switch(req, switchid, VLANID_NONE,
'set_meter', self.waiters)
@route('qos_switch', BASE_URL + '/meter/{switchid}',
methods=['DELETE'], requirements=REQUIREMENTS)
def delete_meter(self, req, switchid, **_kwargs):
return self._access_switch(req, switchid, VLANID_NONE,
'delete_meter', self.waiters)
def _access_switch(self, req, switchid, vlan_id, func, waiters):
try:
rest = req.json if req.body else {}
except ValueError:
QoSController._LOGGER.debug('invalid syntax %s', req.body)
return Response(status=400)
try:
dps = self._OFS_LIST.get_ofs(switchid)
vid = QoSController._conv_toint_vlanid(vlan_id)
except ValueError as message:
return Response(status=400, body=str(message))
msgs = []
for f_ofs in dps.values():
function = getattr(f_ofs, func)
try:
if waiters is not None:
msg = function(rest, vid, waiters)
else:
msg = function(rest, vid)
except ValueError as message:
return Response(status=400, body=str(message))
msgs.append(msg)
body = json.dumps(msgs)
return Response(content_type='application/json', body=body)
@staticmethod
def _conv_toint_vlanid(vlan_id):
if vlan_id != REST_ALL:
vlan_id = int(vlan_id)
if (vlan_id != VLANID_NONE and
(vlan_id < VLANID_MIN or VLANID_MAX < vlan_id)):
msg = 'Invalid {vlan_id} value. Set [%d-%d]' % (VLANID_MIN,
VLANID_MAX)
raise ValueError(msg)
return vlan_id
class QoS(object):
_OFCTL = {ofproto_v1_0.OFP_VERSION: ofctl_v1_0,
ofproto_v1_2.OFP_VERSION: ofctl_v1_2,
ofproto_v1_3.OFP_VERSION: ofctl_v1_3}
def __init__(self, dp, CONF):
super(QoS, self).__init__()
self.vlan_list = {}
self.vlan_list[VLANID_NONE] = 0 # for VLAN=None
self.dp = dp
self.version = dp.ofproto.OFP_VERSION
# Dictionary of port name to Queue config.
# e.g.)
# self.queue_list = {
# "s1-eth1": {
# "0": {
# "config": {
# "max-rate": "600000"
# }
# },
# "1": {
# "config": {
# "min-rate": "900000"
# }
# }
# }
# }
self.queue_list = {}
self.CONF = CONF
self.ovsdb_addr = None
self.ovs_bridge = None
if self.version not in self._OFCTL:
raise OFPUnknownVersion(version=self.version)
self.ofctl = self._OFCTL[self.version]
def set_default_flow(self):
if self.version == ofproto_v1_0.OFP_VERSION:
return
cookie = 0
priority = DEFAULT_FLOW_PRIORITY
actions = [{'type': 'GOTO_TABLE',
'table_id': QOS_TABLE_ID + 1}]
flow = self._to_of_flow(cookie=cookie,
priority=priority,
match={},
actions=actions)
cmd = self.dp.ofproto.OFPFC_ADD
self.ofctl.mod_flow_entry(self.dp, flow, cmd)
def set_ovsdb_addr(self, dpid, ovsdb_addr):
old_address = self.ovsdb_addr
if old_address == ovsdb_addr:
return
elif ovsdb_addr is None:
# Determine deleting OVSDB address was requested.
if self.ovs_bridge:
self.ovs_bridge = None
return
ovs_bridge = bridge.OVSBridge(self.CONF, dpid, ovsdb_addr)
try:
ovs_bridge.init()
except:
raise ValueError('ovsdb addr is not available.')
self.ovsdb_addr = ovsdb_addr
self.ovs_bridge = ovs_bridge
def _update_vlan_list(self, vlan_list):
for vlan_id in self.vlan_list.keys():
if vlan_id is not VLANID_NONE and vlan_id not in vlan_list:
del self.vlan_list[vlan_id]
def _get_cookie(self, vlan_id):
if vlan_id == REST_ALL:
vlan_ids = self.vlan_list.keys()
else:
vlan_ids = [vlan_id]
cookie_list = []
for vlan_id in vlan_ids:
self.vlan_list.setdefault(vlan_id, 0)
self.vlan_list[vlan_id] += 1
self.vlan_list[vlan_id] &= ofproto_v1_3_parser.UINT32_MAX
cookie = (vlan_id << COOKIE_SHIFT_VLANID) + \
self.vlan_list[vlan_id]
cookie_list.append([cookie, vlan_id])
return cookie_list
@staticmethod
def _cookie_to_qosid(cookie):
return cookie & ofproto_v1_3_parser.UINT32_MAX
# REST command template
def rest_command(func):
def _rest_command(*args, **kwargs):
key, value = func(*args, **kwargs)
switch_id = dpid_lib.dpid_to_str(args[0].dp.id)
return {REST_SWITCHID: switch_id,
key: value}
return _rest_command
@rest_command
def get_status(self, req, vlan_id, waiters):
if self.version == ofproto_v1_0.OFP_VERSION:
raise ValueError('get_status operation is not supported')
msgs = self.ofctl.get_queue_stats(self.dp, waiters)
return REST_COMMAND_RESULT, msgs
@rest_command
def get_queue(self, rest, vlan_id):
if len(self.queue_list):
msg = {'result': 'success',
'details': self.queue_list}
else:
msg = {'result': 'failure',
'details': 'Queue is not exists.'}
return REST_COMMAND_RESULT, msg
@rest_command
def set_queue(self, rest, vlan_id):
if self.ovs_bridge is None:
msg = {'result': 'failure',
'details': 'ovs_bridge is not exists'}
return REST_COMMAND_RESULT, msg
port_name = rest.get(REST_PORT_NAME, None)
vif_ports = self.ovs_bridge.get_port_name_list()
if port_name is not None:
if port_name not in vif_ports:
raise ValueError('%s port is not exists' % port_name)
vif_ports = [port_name]
queue_list = {}
queue_type = rest.get(REST_QUEUE_TYPE, 'linux-htb')
parent_max_rate = rest.get(REST_QUEUE_MAX_RATE, None)
queues = rest.get(REST_QUEUES, [])
queue_id = 0
queue_config = []
for queue in queues:
max_rate = queue.get(REST_QUEUE_MAX_RATE, None)
min_rate = queue.get(REST_QUEUE_MIN_RATE, None)
if max_rate is None and min_rate is None:
raise ValueError('Required to specify max_rate or min_rate')
config = {}
if max_rate is not None:
config['max-rate'] = max_rate
if min_rate is not None:
config['min-rate'] = min_rate
if len(config):
queue_config.append(config)
queue_list[queue_id] = {'config': config}
queue_id += 1
for port_name in vif_ports:
try:
self.ovs_bridge.set_qos(port_name, type=queue_type,
max_rate=parent_max_rate,
queues=queue_config)
except Exception as msg:
raise ValueError(msg)
self.queue_list[port_name] = queue_list
msg = {'result': 'success',
'details': queue_list}
return REST_COMMAND_RESULT, msg
def _delete_queue(self):
if self.ovs_bridge is None:
return False
vif_ports = self.ovs_bridge.get_external_ports()
for port in vif_ports:
self.ovs_bridge.del_qos(port.port_name)
return True
@rest_command
def delete_queue(self, rest, vlan_id):
if self._delete_queue():
msg = 'success'
self.queue_list.clear()
else:
msg = 'failure'
return REST_COMMAND_RESULT, msg
@rest_command
def set_qos(self, rest, vlan_id, waiters):
msgs = []
cookie_list = self._get_cookie(vlan_id)
for cookie, vid in cookie_list:
msg = self._set_qos(cookie, rest, waiters, vid)
msgs.append(msg)
return REST_COMMAND_RESULT, msgs
def _set_qos(self, cookie, rest, waiters, vlan_id):
match_value = rest[REST_MATCH]
if vlan_id:
match_value[REST_DL_VLAN] = vlan_id
priority = int(rest.get(REST_PRIORITY, QOS_PRIORITY_MIN))
if (QOS_PRIORITY_MAX < priority):
raise ValueError('Invalid priority value. Set [%d-%d]'
% (QOS_PRIORITY_MIN, QOS_PRIORITY_MAX))
match = Match.to_openflow(match_value)
actions = []
action = rest.get(REST_ACTION, None)
if action is not None:
if REST_ACTION_MARK in action:
actions.append({'type': 'SET_FIELD',
'field': REST_DSCP,
'value': int(action[REST_ACTION_MARK])})
if REST_ACTION_METER in action:
actions.append({'type': 'METER',
'meter_id': action[REST_ACTION_METER]})
if REST_ACTION_QUEUE in action:
actions.append({'type': 'SET_QUEUE',
'queue_id': action[REST_ACTION_QUEUE]})
else:
actions.append({'type': 'SET_QUEUE',
'queue_id': 0})
actions.append({'type': 'GOTO_TABLE',
'table_id': QOS_TABLE_ID + 1})
flow = self._to_of_flow(cookie=cookie, priority=priority,
match=match, actions=actions)
cmd = self.dp.ofproto.OFPFC_ADD
try:
self.ofctl.mod_flow_entry(self.dp, flow, cmd)
except:
raise ValueError('Invalid rule parameter.')
qos_id = QoS._cookie_to_qosid(cookie)
msg = {'result': 'success',
'details': 'QoS added. : qos_id=%d' % qos_id}
if vlan_id != VLANID_NONE:
msg.setdefault(REST_VLANID, vlan_id)
return msg
@rest_command
def get_qos(self, rest, vlan_id, waiters):
rules = {}
msgs = self.ofctl.get_flow_stats(self.dp, waiters)
if str(self.dp.id) in msgs:
flow_stats = msgs[str(self.dp.id)]
for flow_stat in flow_stats:
if flow_stat['table_id'] != QOS_TABLE_ID:
continue
priority = flow_stat[REST_PRIORITY]
if priority != DEFAULT_FLOW_PRIORITY:
vid = flow_stat[REST_MATCH].get(REST_DL_VLAN, VLANID_NONE)
if vlan_id == REST_ALL or vlan_id == vid:
rule = self._to_rest_rule(flow_stat)
rules.setdefault(vid, [])
rules[vid].append(rule)
get_data = []
for vid, rule in rules.items():
if vid == VLANID_NONE:
vid_data = {REST_QOS: rule}
else:
vid_data = {REST_VLANID: vid, REST_QOS: rule}
get_data.append(vid_data)
return REST_COMMAND_RESULT, get_data
@rest_command
def delete_qos(self, rest, vlan_id, waiters):
try:
if rest[REST_QOS_ID] == REST_ALL:
qos_id = REST_ALL
else:
qos_id = int(rest[REST_QOS_ID])
except:
raise ValueError('Invalid qos id.')
vlan_list = []
delete_list = []
msgs = self.ofctl.get_flow_stats(self.dp, waiters)
if str(self.dp.id) in msgs:
flow_stats = msgs[str(self.dp.id)]
for flow_stat in flow_stats:
cookie = flow_stat[REST_COOKIE]
ruleid = QoS._cookie_to_qosid(cookie)
priority = flow_stat[REST_PRIORITY]
dl_vlan = flow_stat[REST_MATCH].get(REST_DL_VLAN, VLANID_NONE)
if priority != DEFAULT_FLOW_PRIORITY:
if ((qos_id == REST_ALL or qos_id == ruleid) and
(vlan_id == dl_vlan or vlan_id == REST_ALL)):
match = Match.to_mod_openflow(flow_stat[REST_MATCH])
delete_list.append([cookie, priority, match])
else:
if dl_vlan not in vlan_list:
vlan_list.append(dl_vlan)
self._update_vlan_list(vlan_list)
if len(delete_list) == 0:
msg_details = 'QoS rule is not exist.'
if qos_id != REST_ALL:
msg_details += ' : QoS ID=%d' % qos_id
msg = {'result': 'failure',
'details': msg_details}
else:
cmd = self.dp.ofproto.OFPFC_DELETE_STRICT
actions = []
delete_ids = {}
for cookie, priority, match in delete_list:
flow = self._to_of_flow(cookie=cookie, priority=priority,
match=match, actions=actions)
self.ofctl.mod_flow_entry(self.dp, flow, cmd)
vid = match.get(REST_DL_VLAN, VLANID_NONE)
rule_id = QoS._cookie_to_qosid(cookie)
delete_ids.setdefault(vid, '')
delete_ids[vid] += (('%d' if delete_ids[vid] == ''
else ',%d') % rule_id)
msg = []
for vid, rule_ids in delete_ids.items():
del_msg = {'result': 'success',
'details': ' deleted. : QoS ID=%s' % rule_ids}
if vid != VLANID_NONE:
del_msg.setdefault(REST_VLANID, vid)
msg.append(del_msg)
return REST_COMMAND_RESULT, msg
@rest_command
def set_meter(self, rest, vlan_id, waiters):
if self.version == ofproto_v1_0.OFP_VERSION:
raise ValueError('set_meter operation is not supported')
msgs = []
msg = self._set_meter(rest, waiters)
msgs.append(msg)
return REST_COMMAND_RESULT, msgs
def _set_meter(self, rest, waiters):
cmd = self.dp.ofproto.OFPMC_ADD
try:
self.ofctl.mod_meter_entry(self.dp, rest, cmd)
except:
raise ValueError('Invalid meter parameter.')
msg = {'result': 'success',
'details': 'Meter added. : Meter ID=%s' %
rest[REST_METER_ID]}
return msg
@rest_command
def get_meter(self, rest, vlan_id, waiters):
if (self.version == ofproto_v1_0.OFP_VERSION or
self.version == ofproto_v1_2.OFP_VERSION):
raise ValueError('get_meter operation is not supported')
msgs = self.ofctl.get_meter_stats(self.dp, waiters)
return REST_COMMAND_RESULT, msgs
@rest_command
def delete_meter(self, rest, vlan_id, waiters):
if (self.version == ofproto_v1_0.OFP_VERSION or
self.version == ofproto_v1_2.OFP_VERSION):
raise ValueError('delete_meter operation is not supported')
cmd = self.dp.ofproto.OFPMC_DELETE
try:
self.ofctl.mod_meter_entry(self.dp, rest, cmd)
except:
raise ValueError('Invalid meter parameter.')
msg = {'result': 'success',
'details': 'Meter deleted. : Meter ID=%s' %
rest[REST_METER_ID]}
return REST_COMMAND_RESULT, msg
def _to_of_flow(self, cookie, priority, match, actions):
flow = {'cookie': cookie,
'priority': priority,
'flags': 0,
'idle_timeout': 0,
'hard_timeout': 0,
'match': match,
'actions': actions}
return flow
def _to_rest_rule(self, flow):
ruleid = QoS._cookie_to_qosid(flow[REST_COOKIE])
rule = {REST_QOS_ID: ruleid}
rule.update({REST_PRIORITY: flow[REST_PRIORITY]})
rule.update(Match.to_rest(flow))
rule.update(Action.to_rest(flow))
return rule
class Match(object):
_CONVERT = {REST_DL_TYPE:
{REST_DL_TYPE_ARP: ether.ETH_TYPE_ARP,
REST_DL_TYPE_IPV4: ether.ETH_TYPE_IP,
REST_DL_TYPE_IPV6: ether.ETH_TYPE_IPV6},
REST_NW_PROTO:
{REST_NW_PROTO_TCP: inet.IPPROTO_TCP,
REST_NW_PROTO_UDP: inet.IPPROTO_UDP,
REST_NW_PROTO_ICMP: inet.IPPROTO_ICMP,
REST_NW_PROTO_ICMPV6: inet.IPPROTO_ICMPV6}}
@staticmethod
def to_openflow(rest):
def __inv_combi(msg):
raise ValueError('Invalid combination: [%s]' % msg)
def __inv_2and1(*args):
__inv_combi('%s=%s and %s' % (args[0], args[1], args[2]))
def __inv_2and2(*args):
__inv_combi('%s=%s and %s=%s' % (
args[0], args[1], args[2], args[3]))
def __inv_1and1(*args):
__inv_combi('%s and %s' % (args[0], args[1]))
def __inv_1and2(*args):
__inv_combi('%s and %s=%s' % (args[0], args[1], args[2]))
match = {}
# error check
dl_type = rest.get(REST_DL_TYPE)
nw_proto = rest.get(REST_NW_PROTO)
if dl_type is not None:
if dl_type == REST_DL_TYPE_ARP:
if REST_SRC_IPV6 in rest:
__inv_2and1(
REST_DL_TYPE, REST_DL_TYPE_ARP, REST_SRC_IPV6)
if REST_DST_IPV6 in rest:
__inv_2and1(
REST_DL_TYPE, REST_DL_TYPE_ARP, REST_DST_IPV6)
if REST_DSCP in rest:
__inv_2and1(
REST_DL_TYPE, REST_DL_TYPE_ARP, REST_DSCP)
if nw_proto:
__inv_2and1(
REST_DL_TYPE, REST_DL_TYPE_ARP, REST_NW_PROTO)
elif dl_type == REST_DL_TYPE_IPV4:
if REST_SRC_IPV6 in rest:
__inv_2and1(
REST_DL_TYPE, REST_DL_TYPE_IPV4, REST_SRC_IPV6)
if REST_DST_IPV6 in rest:
__inv_2and1(
REST_DL_TYPE, REST_DL_TYPE_IPV4, REST_DST_IPV6)
if nw_proto == REST_NW_PROTO_ICMPV6:
__inv_2and2(
REST_DL_TYPE, REST_DL_TYPE_IPV4,
REST_NW_PROTO, REST_NW_PROTO_ICMPV6)
elif dl_type == REST_DL_TYPE_IPV6:
if REST_SRC_IP in rest:
__inv_2and1(
REST_DL_TYPE, REST_DL_TYPE_IPV6, REST_SRC_IP)
if REST_DST_IP in rest:
__inv_2and1(
REST_DL_TYPE, REST_DL_TYPE_IPV6, REST_DST_IP)
if nw_proto == REST_NW_PROTO_ICMP:
__inv_2and2(
REST_DL_TYPE, REST_DL_TYPE_IPV6,
REST_NW_PROTO, REST_NW_PROTO_ICMP)
else:
raise ValueError('Unknown dl_type : %s' % dl_type)
else:
if REST_SRC_IP in rest:
if REST_SRC_IPV6 in rest:
__inv_1and1(REST_SRC_IP, REST_SRC_IPV6)
if REST_DST_IPV6 in rest:
__inv_1and1(REST_SRC_IP, REST_DST_IPV6)
if nw_proto == REST_NW_PROTO_ICMPV6:
__inv_1and2(
REST_SRC_IP, REST_NW_PROTO, REST_NW_PROTO_ICMPV6)
rest[REST_DL_TYPE] = REST_DL_TYPE_IPV4
elif REST_DST_IP in rest:
if REST_SRC_IPV6 in rest:
__inv_1and1(REST_DST_IP, REST_SRC_IPV6)
if REST_DST_IPV6 in rest:
__inv_1and1(REST_DST_IP, REST_DST_IPV6)
if nw_proto == REST_NW_PROTO_ICMPV6:
__inv_1and2(
REST_DST_IP, REST_NW_PROTO, REST_NW_PROTO_ICMPV6)
rest[REST_DL_TYPE] = REST_DL_TYPE_IPV4
elif REST_SRC_IPV6 in rest:
if nw_proto == REST_NW_PROTO_ICMP:
__inv_1and2(
REST_SRC_IPV6, REST_NW_PROTO, REST_NW_PROTO_ICMP)
rest[REST_DL_TYPE] = REST_DL_TYPE_IPV6
elif REST_DST_IPV6 in rest:
if nw_proto == REST_NW_PROTO_ICMP:
__inv_1and2(
REST_DST_IPV6, REST_NW_PROTO, REST_NW_PROTO_ICMP)
rest[REST_DL_TYPE] = REST_DL_TYPE_IPV6
elif REST_DSCP in rest:
# Apply dl_type ipv4, if doesn't specify dl_type
rest[REST_DL_TYPE] = REST_DL_TYPE_IPV4
else:
if nw_proto == REST_NW_PROTO_ICMP:
rest[REST_DL_TYPE] = REST_DL_TYPE_IPV4
elif nw_proto == REST_NW_PROTO_ICMPV6:
rest[REST_DL_TYPE] = REST_DL_TYPE_IPV6
elif nw_proto == REST_NW_PROTO_TCP or \
nw_proto == REST_NW_PROTO_UDP:
raise ValueError('no dl_type was specified')
else:
raise ValueError('Unknown nw_proto: %s' % nw_proto)
for key, value in rest.items():
if key in Match._CONVERT:
if value in Match._CONVERT[key]:
match.setdefault(key, Match._CONVERT[key][value])
else:
raise ValueError('Invalid rule parameter. : key=%s' % key)
else:
match.setdefault(key, value)
return match
@staticmethod
def to_rest(openflow):
of_match = openflow[REST_MATCH]
mac_dontcare = mac.haddr_to_str(mac.DONTCARE)
ip_dontcare = '0.0.0.0'
ipv6_dontcare = '::'
match = {}
for key, value in of_match.items():
if key == REST_SRC_MAC or key == REST_DST_MAC:
if value == mac_dontcare:
continue
elif key == REST_SRC_IP or key == REST_DST_IP:
if value == ip_dontcare:
continue
elif key == REST_SRC_IPV6 or key == REST_DST_IPV6:
if value == ipv6_dontcare:
continue
elif value == 0:
continue
if key in Match._CONVERT:
conv = Match._CONVERT[key]
conv = dict((value, key) for key, value in conv.items())
match.setdefault(key, conv[value])
else:
match.setdefault(key, value)
return match
@staticmethod
def to_mod_openflow(of_match):
mac_dontcare = mac.haddr_to_str(mac.DONTCARE)
ip_dontcare = '0.0.0.0'
ipv6_dontcare = '::'
match = {}
for key, value in of_match.items():
if key == REST_SRC_MAC or key == REST_DST_MAC:
if value == mac_dontcare:
continue
elif key == REST_SRC_IP or key == REST_DST_IP:
if value == ip_dontcare:
continue
elif key == REST_SRC_IPV6 or key == REST_DST_IPV6:
if value == ipv6_dontcare:
continue
elif value == 0:
continue
match.setdefault(key, value)
return match
class Action(object):
@staticmethod
def to_rest(flow):
if REST_ACTION in flow:
actions = []
for act in flow[REST_ACTION]:
field_value = re.search('SET_FIELD: \{ip_dscp:(\d+)', act)
if field_value:
actions.append({REST_ACTION_MARK: field_value.group(1)})
meter_value = re.search('METER:(\d+)', act)
if meter_value:
actions.append({REST_ACTION_METER: meter_value.group(1)})
queue_value = re.search('SET_QUEUE:(\d+)', act)
if queue_value:
actions.append({REST_ACTION_QUEUE: queue_value.group(1)})
action = {REST_ACTION: actions}
else:
action = {REST_ACTION: 'Unknown action type.'}
return action
| [
"kthar10@renci.org"
] | kthar10@renci.org |
efdec7b3b3fca645a6647896a60d9df62a9902ca | ab26555301ac1196086c6c9329df44e6eefa15df | /python/coppeliasimapi2.py | 2dd57a130d3fe034e5ad0e716a0d7bae091c8d3b | [] | no_license | ljmanso/sonata | dd050a7efd4a02e06077488eb2dca3e6129ad6cf | 559165e7a7868aaf61d46f7c682464b6e40b8982 | refs/heads/master | 2022-11-22T03:17:11.639336 | 2020-07-17T17:21:50 | 2020-07-17T17:21:50 | 279,068,834 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 10,751 | py | #!/usr/bin/env python3
import os
import sys
import numpy as np
from math import cos, sin, atan2
from os import path
import time
from typing import List, Tuple, Sequence
import threading
from pyrep import PyRep
from pyrep.objects.shape import Shape
from pyrep.objects.object import Object
from pyrep.const import PrimitiveShape
# from pyrep.robots.mobiles.youbot import YouBot
from youbot import YouBot
from shapely.geometry import Polygon
#
# Wall
#
class Wall(object):
def __init__(self, p1: Sequence[float], p2: Sequence[float]):
super(Wall, self).__init__()
self.p1, self.p2 = p1, p2
# pre
x, y = 0.5*(p1[0] + p2[0]), 0.5*(p1[1] + p2[1])
angle = atan2(p2[1]-p1[1], p2[0]-p1[0])
self.length = np.linalg.norm(np.array(p2)-np.array(p1))
ss = Shape.create(type=PrimitiveShape.CUBOID,
color=[1,1,1], size=[self.length, 0.1, 1.],
position=[x, y, 0.5])
ss.set_color([1,1,1])
ss.set_position([x, y, 0.5])
ss.set_dynamic(False)
ss.rotate([0., 0., angle])
self.handle = ss
self.handle.set_model(True)
def get_length(self):
return self.length
def get_position(self, relative_to=None):
return self.handle.get_position(relative_to=relative_to)
def get_orientation(self, relative_to=None):
return self.handle.get_orientation(relative_to=relative_to)
def get_handle(self):
return self.handle._handle
def remove(self):
self.handle.remove()
def check_collision(self, obj):
return self.handle.check_collision(obj)
def get_model_bounding_box(self):
return self.handle.get_model_bounding_box()
class Goal(object):
def __init__(self, x, y):
super(Goal, self).__init__()
ss1 = Shape.create(type=PrimitiveShape.CONE,
color=[1,0,0], size=[0.4, 0.4, 0.75],
position=[x, y, 2.5],orientation=[3.14,0,3.14])
ss1.set_color([1, 0, 0])
ss1.set_position([x, y, 1.5])
ss1.set_orientation([3.14,0,3.14])
ss1.set_dynamic(False)
self.handle_add = ss1
ss2 = Shape.create(type=PrimitiveShape.CONE,
color=[0,1,0], size=[0.75, 0.75, 0.0015],
position=[x, y, 0.000],orientation=[3.14,0,3.14])
ss2.set_color([0, 1, 0])
ss2.set_position([x, y, 0.000])
ss2.set_orientation([3.14,0,3.14])
ss2.set_dynamic(False)
self.handle = ss2
self.handle.set_model(True)
def get_position(self, relative_to=None):
return self.handle.get_position(relative_to=relative_to)
def get_orientation(self, relative_to=None):
return self.handle.get_orientation(relative_to=relative_to)
def get_handle(self):
return self.handle._handle
def remove(self):
self.handle.remove()
self.handle_add.remove()
def check_collision(self, obj):
return self.handle.check_collision(obj)
def get_model_bounding_box(self):
return self.handle.get_model_bounding_box()
class Human(object):
def __init__(self, handle: Object):
super(Human, self).__init__()
self.handle = handle
handle.value = 0
children = handle.get_objects_in_tree(handle)
for child in children:
name = child.get_name()
if 'Bill_goalPosCylinder' in name:
self.dummy_handle = child.get_parent()
# self.dummy_handle._set_property(prop_type: int, value: bool) -> None:
def set_position(self, position, relative_to=None):
self.handle.set_position(position, relative_to)
self.move(position, relative_to)
def set_orientation(self, position, relative_to=None):
self.handle.set_orientation(position, relative_to)
def move(self, position, relative_to=None ):
self.dummy_handle.set_position(position, relative_to)
def get_position(self, relative_to=None):
return self.handle.get_position(relative_to=relative_to)
def get_orientation(self, relative_to=None):
return self.handle.get_orientation(relative_to=relative_to)
def get_handle(self):
return self.handle._handle
def remove(self):
self.dummy_handle.remove()
self.handle.remove()
def check_collision(self, obj):
if type(obj)==type(self):
return self.handle.check_collision(obj.handle)
else:
return self.handle.check_collision(obj)
def get_model_bounding_box(self):
return self.dummy_handle.get_model_bounding_box()
class HumanOnPath(object):
def __init__(self, handle: Object):
super(HumanOnPath, self).__init__()
self.handle = handle
handle.value = 0
children = self.handle.get_objects_in_tree()
for child in children:
name = child.get_name()
if 'Bill_base' in name:
self.human_handle = child
def set_position(self, position):
self.handle.set_position(position)
def set_orientation(self, position):
self.handle.set_orientation(position)
def get_position(self, relative_to=None):
return self.human_handle.get_position(relative_to=relative_to)
def get_orientation(self, relative_to=None):
return self.human_handle.get_orientation(relative_to=relative_to)
# CoppeliaSimAPI
class CoppeliaSimAPI(PyRep):
def __init__(self, paths: Sequence[str]=[]):
super(CoppeliaSimAPI, self).__init__()
self.coppelia_paths = paths + ['./', os.environ['COPPELIASIM_ROOT']+'/']
def load_scene(self, scene_path: str, headless: bool=False):
for source in self.coppelia_paths:
full_path = source + '/' + scene_path
if path.exists(full_path):
return self.launch(os.path.abspath(full_path), headless)
def close(self):
self.shutdown()
def create_wall(self, p1: Sequence[float], p2: Sequence[float]):
return Wall(p1, p2)
def create_goal(self, p1: Sequence[float], p2: Sequence[float]):
return Goal(p1, p2)
def get_object(self, name: str):
return Object.get_object(name)
def set_object_parent(self, obj, parent, keep_in_place=True):
obj = self.convert_to_valid_handle(obj)
parent = self.convert_to_valid_handle(parent)
code = f'sim.setObjectParent({obj}, {parent}, {keep_in_place})'
ret = self.run_script(code)
return ret
def create_human(self):
model = 'models/people/path planning Bill.ttm'
human_handle = self.load_model(model)
return Human(human_handle)
def create_human2(self):
model = 'Bill_on_simple_path.ttm'
human_handle = self.load_model(model)
return HumanOnPath(human_handle)
def load_model(self, model):
for source in self.coppelia_paths:
full_path = source + '/' + model
if path.exists(full_path):
ret = self.import_model(os.path.abspath(full_path))
return ret
def remove_objects(self, humans_list,tables_list,laptops_list,plants_list,goal,walls_list):
for i in range(len(walls_list)):
walls_list[i].remove()
for i in range(len(humans_list)):
humans_list[i].remove()
for i in range(len(tables_list)):
tables_list[i].remove()
for i in range(len(laptops_list)):
laptops_list[i].remove()
for i in range(len(plants_list)):
plants_list[i].remove()
if goal is not None:
goal.remove()
# robot.remove()
def remove_object(self, object_):
object_.remove()
# NOT INCLUDED IN THE DOCUMENTATION YET
def get_youbot(self) -> YouBot:
children = self.get_objects_children('sim.handle_scene', children_type='sim.object_shape_type', filter_children=1+2)
for h in children:
name = self.get_object_name(h)
if name == 'youBot':
return YouBot(self, h)
def create_youbot(self, x: float, y: float, z: float) -> YouBot:
ix, iy, iz = YouBot.get_position_offsets()
ret = self.create_model('models/robots/mobile/KUKA YouBot.ttm', x+ix, y+iy, z+iz, 0.)
self.set_object_orientation(ret, *YouBot.get_orientation_offsets())
return YouBot(self, ret)
def set_joint_target_velocity(self, handle, target, asynch=False):
call = self.get_call_object(asynch)
return self.client.simxSetJointTargetVelocity(handle, target, call.get())
def pause(self):
call = self.get_call_object(asynch)
self.client.simxPauseSimulation(call.get())
def check_collision(self, obj1, obj2, asynch=False):
poly1 = self.getobject_polygon(obj1)
poly2 = self.getobject_polygon(obj2)
return poly1.intersects(poly2)
def getobject_polygon(self, obj):
bb = obj.get_model_bounding_box()
pos = obj.get_position()
poly = []
poly.append((bb[0]+pos[0], bb[2]+pos[1]))
poly.append((bb[0]+pos[0], bb[3]+pos[1]))
poly.append((bb[1]+pos[0], bb[3]+pos[1]))
poly.append((bb[1]+pos[0], bb[2]+pos[1]))
return Polygon(poly)
def set_collidable(self, obj, asynch=False):
handle = self.convert_to_valid_handle(obj)
return self.run_script(f'sim.setObjectSpecialProperty({handle},sim.objectspecialproperty_collidable+'
f'sim.objectspecialproperty_measurable+sim.objectspecialproperty_detectable_all'
f'+sim.objectspecialproperty_renderable)', asynch)
@staticmethod
def get_transform_matrix(x: float, y: float, z: float, angle: float):
rotate_matrix = np.matrix([[cos(angle), -sin(angle), 0., 0.],
[sin(angle), cos(angle), 0., 0.],
[ 0., 0., 1., 0.],
[ 0., 0., 0., 1.]])
translate_matrix = np.matrix([[ 1., 0., 0., x ],
[ 0., 1., 0., y ],
[ 0., 0., 1., z ],
[ 0., 0., 0., 1.]])
return (translate_matrix @ rotate_matrix).flatten().tolist()[0]
@staticmethod
def get_transformation_matrix(x: float, y: float, angle: float):
M = np.zeros( (3,3) )
M[0][0], M[0][1], M[0][2] = +cos(angle), -sin(angle), x
M[1][0], M[1][1], M[1][2] = +sin(angle), +cos(angle), y
M[2][0], M[2][1], M[2][2] = 0., 0., 1.
return M
| [
"pilar.bachiller@gmail.com"
] | pilar.bachiller@gmail.com |
143e7fb5eb3ea2d1e3afc369d304ee334bde63fc | 7b6c0318585f1639529002e8c69f23f3603775a9 | /detectron2/modeling/box_regression.py | 1488ad748288cefe9b4b2e9d28f00774362e203f | [
"Apache-2.0"
] | permissive | av777x/detectron2 | 41f2dda1198c21ef999da1cd0e28b980f68065ee | c1794881d6d2fac6af0b3206937d32628677469c | refs/heads/master | 2023-03-03T19:40:45.820084 | 2021-02-19T00:02:03 | 2021-02-19T00:02:03 | 340,200,661 | 0 | 0 | Apache-2.0 | 2021-02-19T00:02:04 | 2021-02-18T23:04:22 | null | UTF-8 | Python | false | false | 11,122 | py | # Copyright (c) Facebook, Inc. and its affiliates.
import math
from typing import List, Tuple
import torch
from fvcore.nn import giou_loss, smooth_l1_loss
from detectron2.layers import cat
from detectron2.structures import Boxes
# Value for clamping large dw and dh predictions. The heuristic is that we clamp
# such that dw and dh are no larger than what would transform a 16px box into a
# 1000px box (based on a small anchor, 16px, and a typical image size, 1000px).
_DEFAULT_SCALE_CLAMP = math.log(1000.0 / 16)
__all__ = ["Box2BoxTransform", "Box2BoxTransformRotated"]
@torch.jit.script
class Box2BoxTransform(object):
"""
The box-to-box transform defined in R-CNN. The transformation is parameterized
by 4 deltas: (dx, dy, dw, dh). The transformation scales the box's width and height
by exp(dw), exp(dh) and shifts a box's center by the offset (dx * width, dy * height).
"""
def __init__(
self, weights: Tuple[float, float, float, float], scale_clamp: float = _DEFAULT_SCALE_CLAMP
):
"""
Args:
weights (4-element tuple): Scaling factors that are applied to the
(dx, dy, dw, dh) deltas. In Fast R-CNN, these were originally set
such that the deltas have unit variance; now they are treated as
hyperparameters of the system.
scale_clamp (float): When predicting deltas, the predicted box scaling
factors (dw and dh) are clamped such that they are <= scale_clamp.
"""
self.weights = weights
self.scale_clamp = scale_clamp
def get_deltas(self, src_boxes, target_boxes):
"""
Get box regression transformation deltas (dx, dy, dw, dh) that can be used
to transform the `src_boxes` into the `target_boxes`. That is, the relation
``target_boxes == self.apply_deltas(deltas, src_boxes)`` is true (unless
any delta is too large and is clamped).
Args:
src_boxes (Tensor): source boxes, e.g., object proposals
target_boxes (Tensor): target of the transformation, e.g., ground-truth
boxes.
"""
assert isinstance(src_boxes, torch.Tensor), type(src_boxes)
assert isinstance(target_boxes, torch.Tensor), type(target_boxes)
src_widths = src_boxes[:, 2] - src_boxes[:, 0]
src_heights = src_boxes[:, 3] - src_boxes[:, 1]
src_ctr_x = src_boxes[:, 0] + 0.5 * src_widths
src_ctr_y = src_boxes[:, 1] + 0.5 * src_heights
target_widths = target_boxes[:, 2] - target_boxes[:, 0]
target_heights = target_boxes[:, 3] - target_boxes[:, 1]
target_ctr_x = target_boxes[:, 0] + 0.5 * target_widths
target_ctr_y = target_boxes[:, 1] + 0.5 * target_heights
wx, wy, ww, wh = self.weights
dx = wx * (target_ctr_x - src_ctr_x) / src_widths
dy = wy * (target_ctr_y - src_ctr_y) / src_heights
dw = ww * torch.log(target_widths / src_widths)
dh = wh * torch.log(target_heights / src_heights)
deltas = torch.stack((dx, dy, dw, dh), dim=1)
assert (src_widths > 0).all().item(), "Input boxes to Box2BoxTransform are not valid!"
return deltas
def apply_deltas(self, deltas, boxes):
"""
Apply transformation `deltas` (dx, dy, dw, dh) to `boxes`.
Args:
deltas (Tensor): transformation deltas of shape (N, k*4), where k >= 1.
deltas[i] represents k potentially different class-specific
box transformations for the single box boxes[i].
boxes (Tensor): boxes to transform, of shape (N, 4)
"""
deltas = deltas.float() # ensure fp32 for decoding precision
boxes = boxes.to(deltas.dtype)
widths = boxes[:, 2] - boxes[:, 0]
heights = boxes[:, 3] - boxes[:, 1]
ctr_x = boxes[:, 0] + 0.5 * widths
ctr_y = boxes[:, 1] + 0.5 * heights
wx, wy, ww, wh = self.weights
dx = deltas[:, 0::4] / wx
dy = deltas[:, 1::4] / wy
dw = deltas[:, 2::4] / ww
dh = deltas[:, 3::4] / wh
# Prevent sending too large values into torch.exp()
dw = torch.clamp(dw, max=self.scale_clamp)
dh = torch.clamp(dh, max=self.scale_clamp)
pred_ctr_x = dx * widths[:, None] + ctr_x[:, None]
pred_ctr_y = dy * heights[:, None] + ctr_y[:, None]
pred_w = torch.exp(dw) * widths[:, None]
pred_h = torch.exp(dh) * heights[:, None]
pred_boxes = torch.zeros_like(deltas)
pred_boxes[:, 0::4] = pred_ctr_x - 0.5 * pred_w # x1
pred_boxes[:, 1::4] = pred_ctr_y - 0.5 * pred_h # y1
pred_boxes[:, 2::4] = pred_ctr_x + 0.5 * pred_w # x2
pred_boxes[:, 3::4] = pred_ctr_y + 0.5 * pred_h # y2
return pred_boxes
@torch.jit.script
class Box2BoxTransformRotated(object):
"""
The box-to-box transform defined in Rotated R-CNN. The transformation is parameterized
by 5 deltas: (dx, dy, dw, dh, da). The transformation scales the box's width and height
by exp(dw), exp(dh), shifts a box's center by the offset (dx * width, dy * height),
and rotate a box's angle by da (radians).
Note: angles of deltas are in radians while angles of boxes are in degrees.
"""
def __init__(
self,
weights: Tuple[float, float, float, float, float],
scale_clamp: float = _DEFAULT_SCALE_CLAMP,
):
"""
Args:
weights (5-element tuple): Scaling factors that are applied to the
(dx, dy, dw, dh, da) deltas. These are treated as
hyperparameters of the system.
scale_clamp (float): When predicting deltas, the predicted box scaling
factors (dw and dh) are clamped such that they are <= scale_clamp.
"""
self.weights = weights
self.scale_clamp = scale_clamp
def get_deltas(self, src_boxes, target_boxes):
"""
Get box regression transformation deltas (dx, dy, dw, dh, da) that can be used
to transform the `src_boxes` into the `target_boxes`. That is, the relation
``target_boxes == self.apply_deltas(deltas, src_boxes)`` is true (unless
any delta is too large and is clamped).
Args:
src_boxes (Tensor): Nx5 source boxes, e.g., object proposals
target_boxes (Tensor): Nx5 target of the transformation, e.g., ground-truth
boxes.
"""
assert isinstance(src_boxes, torch.Tensor), type(src_boxes)
assert isinstance(target_boxes, torch.Tensor), type(target_boxes)
src_ctr_x, src_ctr_y, src_widths, src_heights, src_angles = torch.unbind(src_boxes, dim=1)
target_ctr_x, target_ctr_y, target_widths, target_heights, target_angles = torch.unbind(
target_boxes, dim=1
)
wx, wy, ww, wh, wa = self.weights
dx = wx * (target_ctr_x - src_ctr_x) / src_widths
dy = wy * (target_ctr_y - src_ctr_y) / src_heights
dw = ww * torch.log(target_widths / src_widths)
dh = wh * torch.log(target_heights / src_heights)
# Angles of deltas are in radians while angles of boxes are in degrees.
# the conversion to radians serve as a way to normalize the values
da = target_angles - src_angles
da = (da + 180.0) % 360.0 - 180.0 # make it in [-180, 180)
da *= wa * math.pi / 180.0
deltas = torch.stack((dx, dy, dw, dh, da), dim=1)
assert (
(src_widths > 0).all().item()
), "Input boxes to Box2BoxTransformRotated are not valid!"
return deltas
def apply_deltas(self, deltas, boxes):
"""
Apply transformation `deltas` (dx, dy, dw, dh, da) to `boxes`.
Args:
deltas (Tensor): transformation deltas of shape (N, k*5).
deltas[i] represents box transformation for the single box boxes[i].
boxes (Tensor): boxes to transform, of shape (N, 5)
"""
assert deltas.shape[1] % 5 == 0 and boxes.shape[1] == 5
boxes = boxes.to(deltas.dtype).unsqueeze(2)
ctr_x = boxes[:, 0]
ctr_y = boxes[:, 1]
widths = boxes[:, 2]
heights = boxes[:, 3]
angles = boxes[:, 4]
wx, wy, ww, wh, wa = self.weights
dx = deltas[:, 0::5] / wx
dy = deltas[:, 1::5] / wy
dw = deltas[:, 2::5] / ww
dh = deltas[:, 3::5] / wh
da = deltas[:, 4::5] / wa
# Prevent sending too large values into torch.exp()
dw = torch.clamp(dw, max=self.scale_clamp)
dh = torch.clamp(dh, max=self.scale_clamp)
pred_boxes = torch.zeros_like(deltas)
pred_boxes[:, 0::5] = dx * widths + ctr_x # x_ctr
pred_boxes[:, 1::5] = dy * heights + ctr_y # y_ctr
pred_boxes[:, 2::5] = torch.exp(dw) * widths # width
pred_boxes[:, 3::5] = torch.exp(dh) * heights # height
# Following original RRPN implementation,
# angles of deltas are in radians while angles of boxes are in degrees.
pred_angle = da * 180.0 / math.pi + angles
pred_angle = (pred_angle + 180.0) % 360.0 - 180.0 # make it in [-180, 180)
pred_boxes[:, 4::5] = pred_angle
return pred_boxes
def _dense_box_regression_loss(
anchors: List[Boxes],
box2box_transform: Box2BoxTransform,
pred_anchor_deltas: List[torch.Tensor],
gt_boxes: List[torch.Tensor],
fg_mask: torch.Tensor,
box_reg_loss_type="smooth_l1",
smooth_l1_beta=0.0,
):
"""
Compute loss for dense multi-level box regression.
Loss is accumulated over ``fg_mask``.
Args:
anchors: #lvl anchor boxes, each is (HixWixA, 4)
pred_anchor_deltas: #lvl predictions, each is (N, HixWixA, 4)
gt_boxes: N ground truth boxes, each has shape (R, 4) (R = sum(Hi * Wi * A))
fg_mask: the foreground boolean mask of shape (N, R) to compute loss on
box_reg_loss_type (str): Loss type to use. Supported losses: "smooth_l1", "giou".
smooth_l1_beta (float): beta parameter for the smooth L1 regression loss. Default to
use L1 loss. Only used when `box_reg_loss_type` is "smooth_l1"
"""
anchors = type(anchors[0]).cat(anchors).tensor # (R, 4)
if box_reg_loss_type == "smooth_l1":
gt_anchor_deltas = [box2box_transform.get_deltas(anchors, k) for k in gt_boxes]
gt_anchor_deltas = torch.stack(gt_anchor_deltas) # (N, R, 4)
loss_box_reg = smooth_l1_loss(
cat(pred_anchor_deltas, dim=1)[fg_mask],
gt_anchor_deltas[fg_mask],
beta=smooth_l1_beta,
reduction="sum",
)
elif box_reg_loss_type == "giou":
pred_boxes = [
box2box_transform.apply_deltas(k, anchors) for k in cat(pred_anchor_deltas, dim=1)
]
loss_box_reg = giou_loss(
torch.stack(pred_boxes)[fg_mask], torch.stack(gt_boxes)[fg_mask], reduction="sum"
)
else:
raise ValueError(f"Invalid dense box regression loss type '{box_reg_loss_type}'")
return loss_box_reg
| [
"facebook-github-bot@users.noreply.github.com"
] | facebook-github-bot@users.noreply.github.com |
a78ee24a96832a0173b264d9e6d62ade8714107c | d964f0a98f5bfa5f71d23de2df2d32a07219cc1f | /students/Volodzko/Task_12/tests/test_tsk_12_1.py | 9e41d9c8d9158eac0937bcc343d19da08f95018d | [] | no_license | AlexandrSech/Z49-TMS | cc8e3308e54c8e9f7ed03c6d7891da074e5c7c45 | d83200ff04c06772ef643b31569b37006420cd6b | refs/heads/main | 2023-07-28T02:40:17.355675 | 2021-08-15T16:54:27 | 2021-08-15T16:54:27 | 366,099,594 | 3 | 4 | null | 2021-09-08T16:52:11 | 2021-05-10T16:00:24 | Python | UTF-8 | Python | false | false | 1,490 | py | import unittest
from Task_12_1.task_12_1 import *
class TestTask_12_1(unittest.TestCase):
def my_object(self, h, m, s, h_r, m_r, s_r):
time = MyTime(h, m, s)
self.assertEqual(time.hours, h_r)
self.assertEqual(time.minutes, m_r)
self.assertEqual(time.seconds, s_r)
def test_Mytime(self):
self.my_object(10, 15, 17, 10, 15, 17)
self.my_object(50, 70, 80, 3, 11, 20)
self.my_object(-10, -20, -30, 13, 39, 30)
self.my_object("10", "20", "30", 10, 20, 30)
self.my_object("aa", "bb", "cc", None, None, None)
"""time = MyTime(10, 15, 17)
self.assertEqual(time.hours, 10)
self.assertEqual(time.minutes, 15)
self.assertEqual(time.seconds, 17)
time2 = MyTime(50,70,80)
self.assertEqual(time2.hours, 3)
self.assertEqual(time2.minutes, 11)
self.assertEqual(time2.seconds, 20)
time3 = MyTime(-10,-20,-30)
self.assertEqual(time3.hours, 13)
self.assertEqual(time3.minutes, 39)
self.assertEqual(time3.seconds, 30)
time4 = MyTime("10", "20", "30")
self.assertEqual(time4.hours, 10)
self.assertEqual(time4.minutes, 20)
self.assertEqual(time4.seconds, 30)
time5 = MyTime("aa", "bb", "cc")
self.assertEqual(time5.hours, None)
self.assertEqual(time5.minutes, None)
self.assertEqual(time5.seconds, None)"""
if __name__ == '__main__':
unittest.main()
| [
"volodko_ura@mail.ru"
] | volodko_ura@mail.ru |
ad4aad0835ca0dcd62db019c4780755f1c0304e6 | 0325048809df9b0b5f26a22ab790ff94947a1728 | /depend.py | a89dbc9c66b9e36e263cc44d1d4b5c4dc9a8056f | [] | no_license | madisong/aiudirog | 78a2b83ed2bc8dc76a4a878cc5353c22de794d4f | 9285fe020b0b96e0e729f6a62fd2c01f485ed898 | refs/heads/master | 2021-01-01T16:34:22.421712 | 2014-08-18T01:40:59 | 2014-08-18T01:40:59 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 610 | py | #Install Linux (Debian) dependencies.
import os
try:
import wx
except:
os.system("sudo apt-get install python-wxgtk2.8 python-wxtools wx2.8-i18n libwxgtk2.8-dev libgtk2.0-dev")
try:
import keyring
except:
os.system("sudo apt-get install python-keyring")
try:
import BeautifulSoup
except:
try:
import pip
except:
os.system("sudo apt-get install python-pip")
os.system("pip install BeautifulSoup")
try:
import googlevoice
except:
os.system("python ./pygooglevoice-0.5/setup.py build")
os.system("python ./pygooglevoice-0.5/setup.py install")
| [
"aiudirog@gilbertschool.org"
] | aiudirog@gilbertschool.org |
1f274d45c819c75e5909ef811396617f68af6e41 | 32271508e449e8842f38186e5e4528696b41d1f9 | /tabby/tab/migrations/0025_remove_race_win_market.py | 16142941a5e3532c0ac93d4c68b1aef608b2742a | [] | no_license | Tjorriemorrie/tabby | d623ad5be3ae53b9370fd400f362d940e7191ac3 | 09c697bd48fdc4de548c911f1fd81b2a7e4b511b | refs/heads/master | 2022-12-10T10:01:40.317751 | 2019-12-13T04:31:12 | 2019-12-13T04:31:12 | 100,076,546 | 4 | 2 | null | 2022-12-08T06:51:55 | 2017-08-11T23:26:00 | Jupyter Notebook | UTF-8 | Python | false | false | 326 | py | # Generated by Django 2.0.1 on 2018-01-30 01:00
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('tab', '0024_auto_20180128_1356'),
]
operations = [
migrations.RemoveField(
model_name='race',
name='win_market',
),
]
| [
"jacoj82@gmail.com"
] | jacoj82@gmail.com |
21dae073458e0bac5899c85d1f117f88958119dc | fb78fd824e904705fb1ee09db8b3c20cc3902805 | /django-myshop/myshop/settings.py | e93a63838401dfb03886299b9b686dadf4dae54b | [] | no_license | Roderich25/mac | 8469833821ac49c539a744db29db5a41d755ad55 | 4f7fe281c88f0199b85d0ac99ce41ffb643d6e82 | refs/heads/master | 2023-01-12T05:55:12.753209 | 2021-11-26T01:16:24 | 2021-11-26T01:16:24 | 207,029,750 | 0 | 0 | null | 2023-01-07T11:49:23 | 2019-09-07T21:51:53 | Jupyter Notebook | UTF-8 | Python | false | false | 3,011 | py | import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.2/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'ea=e^w3s$qfrb9_+5oq962$u(e7xq&me_b%ez7^c!6&6hm-q0d'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'shop.apps.ShopConfig',
'cart.apps.CartConfig',
'orders.apps.OrdersConfig',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'myshop.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
'cart.context_processors.cart',
],
},
},
]
WSGI_APPLICATION = 'myshop.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.2/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/2.2/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.2/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.2/howto/static-files/
STATIC_URL = '/static/'
MEDIA_URL = '/media/'
MEDIA_ROOT = os.path.join(BASE_DIR, 'media/')
CART_SESSION_ID = 'cart'
| [
"rodrigoavilasolis@gmail.com"
] | rodrigoavilasolis@gmail.com |
e350c375091476a2506c1e698410dc3a6adfbfb8 | 30f8afce1ba484183d8e1e14aae76cabb2d92354 | /pbase/day29/old.py | 741ae415c047f7297fc6ca49c5aab16131a342b0 | [] | no_license | brooot/Python_Base_Codes | d83e8c3b8a37b86672412c812fdb0d47deb67836 | a864685e160b5df4162a6f9fb910627eda702aaf | refs/heads/master | 2023-04-10T20:08:39.161289 | 2021-03-25T12:59:23 | 2021-03-25T12:59:23 | 200,570,412 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 790 | py | from socket import *
import sys
class FtpClient(object):
def __init__(self,serveraddr):
self.serveraddr=serveraddr
def do_list(self):
sockfd=socket()
sockfd.connect(self.ser)
def main():
if len(sys.argv)<3:
print('argv is error')
host=sys.argv[1]
port=int(sys.argv[2])
BUFFERSIZE=1024
addr=(host,port)
# sockfd=socket()
while True:
print('**command **')
print('**list **')
print('**get **')
print('**put filename**')
print('**quit **')
data=input('shuru')
ftp=FtpClient(addr)
if data[:4]=='list':
ftp.do_list()
elif data[:3]=='get':
ftp.do_get()
elif data=='put':
pass
else:
sys.quit(0)
if __name__=='__main__':
main()
| [
"1442704671@qq.com"
] | 1442704671@qq.com |
b9cb41b35cc969196c8ec6633fcff5fa349c91a6 | c5abb3fd23687c9200b055f5762ee01e52ce1a76 | /project_euler/multiplesOf3and5.py | 09dcddd7f2eb33d56936732e4dbbba55918dc1f9 | [] | no_license | hxperl/hackerrank | c0fc8f99c38380d56458cd57e16f7198b8e0037c | 57e6984e8dced08caaf14742465668de6ba23b4c | refs/heads/master | 2021-05-15T17:17:23.372158 | 2020-04-14T06:22:11 | 2020-04-14T06:22:11 | 107,495,176 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 293 | py | #!/bin/python3
import sys
sums = list()
t = int(input().strip())
for a0 in range(t):
n = int(input().strip())
n -= 1
tmp = 3 * (n//3 * ((n//3)+1)) // 2
tmp += 5 * (n//5 * ((n//5)+1)) // 2
tmp -= 15 * (n//15 * ((n//15)+1)) // 2
sums.append(tmp)
print(*sums, sep="\n") | [
"hxperl@gmail.com"
] | hxperl@gmail.com |
cd9ce63f65b34f85c2d8bf78f234cf30286802fa | e6eb5c33be238a9281b457630e32e1ae181ecea4 | /PDB_RMSD_Calculate | a10d0fe66d6d187c4c28f1e8ce2d86174ebfb6e8 | [] | no_license | Power-dog/21.7.4 | 55f7e3c2fcdf4b6b4f5381eb2727ddaae61a1fc0 | 1524e7d936377b17ff5d2374126ae8d2d5d40dc4 | refs/heads/master | 2023-06-29T17:28:39.135427 | 2021-07-26T08:35:50 | 2021-07-26T08:35:50 | 389,521,603 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,681 | # 한 파일 내에서의 MODEL n과 n2의 RMSD를 비교하는 프로그램
def pdb(n, n2):
import math
p = open("C:/Users/SML/Desktop/우원/7월/3주/1l2y.pdb", 'r').read().split('\nMODEL') # MODEL 단위로 나눠줌
p2 = "".join(p[n]) # n번째 MODEL 이후 다음 MODEL 전까지 전체 문자열
p3 = p2[p2.index('ATOM'):p2.index('TER')-1] # 76:24700-1. xyz좌표 있는 것들만 따로 모아 문자열로
p4 = p3.split('\n') # xyz 있는 것들 한 줄씩 나눠 리스트로 (304줄)
d2 = "".join(p[n2])
d3 = d2[d2.index('ATOM'):d2.index('TER')-1]
d4 = d3.split('\n')
# x,y,z값 따로 모으기
x1=[]
y1=[]
z1=[]
for i in range(len(p4)): # x,y,z 각각 모아 리스트 한줄에 저장
x1.append(float(p4[i].split()[6]))
y1.append(float(p4[i].split()[7]))
z1.append(float(p4[i].split()[8]))
x2=[]
y2=[]
z2=[]
for i in range(len(d4)):
x2.append(float(d4[i].split()[6]))
y2.append(float(d4[i].split()[7]))
z2.append(float(d4[i].split()[8]))
# RMSD(RMSE) 계산
r=[]
for i in range(len(p4)): # (x2-x1)^2 + (y2-y1)^2... 한 값들을 r에 저장
r.append((float(x2[i]-x1[i]))**2 + float((y2[i]-y1[i]))**2 + float((z2[i]-z1[i]))**2)
print(math.sqrt(sum(r)/len(p4))) # r 더한것에서 전체 수를 나눠주고 루트
pdb(1, 4)
| [
"Ekdzhdwhdk@catholic.ac.kr"
] | Ekdzhdwhdk@catholic.ac.kr | |
a296b492f907bd7c9b0172fb9bddb55b6caf2e3b | 2f98aa7e5bfc2fc5ef25e4d5cfa1d7802e3a7fae | /python/python_15314.py | d42d7ec70ac3476ead6abba82a8293d71aa4e166 | [] | no_license | AK-1121/code_extraction | cc812b6832b112e3ffcc2bb7eb4237fd85c88c01 | 5297a4a3aab3bb37efa24a89636935da04a1f8b6 | refs/heads/master | 2020-05-23T08:04:11.789141 | 2015-10-22T19:19:40 | 2015-10-22T19:19:40 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 98 | py | # Different results when using sklearn RandomizedPCA with sparse and dense matrices
RandomizedPCA
| [
"ubuntu@ip-172-31-7-228.us-west-2.compute.internal"
] | ubuntu@ip-172-31-7-228.us-west-2.compute.internal |
c51c27f98dfdd33ed8055495236836b200efc808 | 93652e0f73558ffa24059647324f79ba043ba241 | /topi/tests/python/test_topi_bitserial_conv2d.py | 6df18483a45f9263f685e4ade3c425b75b29eb76 | [
"Apache-2.0"
] | permissive | souptc/tvm | 830b1444435b6bda267df305538a783eb687d473 | a8574e7bb814997cb3920a72035071899635b753 | refs/heads/master | 2020-03-25T12:42:20.686770 | 2018-08-06T21:07:38 | 2018-08-06T21:07:38 | 143,789,191 | 1 | 0 | Apache-2.0 | 2018-08-06T22:18:20 | 2018-08-06T22:18:19 | null | UTF-8 | Python | false | false | 4,821 | py | import os
import numpy as np
import tvm
import topi
import topi.testing
from tvm.contrib.pickle_memoize import memoize
from topi.util import get_const_tuple
from tvm.contrib import util
from tvm.contrib.pickle_memoize import memoize
def generate_quantized_np(shape, bits, out_dtype):
min_val = 0
max_val = 1 << bits
return np.random.randint(min_val, max_val, size=shape).astype(out_dtype)
def verify_bitserial_conv2d_nchw(batch, in_size, in_channel, num_filter, kernel, stride, padding,
activation_bits, weight_bits, dorefa):
in_height = in_width = in_size
input_type='uint32'
out_dtype='int32'
with tvm.target.create('llvm'):
A = tvm.placeholder((batch, in_channel, in_height, in_width), dtype=input_type, name='A')
W = tvm.placeholder((num_filter, in_channel, kernel, kernel), dtype=input_type, name='W')
B = topi.nn.bitserial_conv2d(A, W, stride, padding, activation_bits, weight_bits,
out_dtype=out_dtype, layout="NCHW", dorefa=dorefa)
s = topi.generic.schedule_bitserial_conv2d_nchw([B])
a_shape = get_const_tuple(A.shape)
w_shape = get_const_tuple(W.shape)
dtype = A.dtype
def get_ref_data():
a_np = generate_quantized_np(get_const_tuple(A.shape), activation_bits, input_type)
w_np = generate_quantized_np(get_const_tuple(W.shape), weight_bits, input_type)
if dorefa:
w_ = np.copy(w_np).astype(out_dtype)
for x in np.nditer(w_, op_flags=['readwrite']):
x[...] = 1 if x == 1 else -1
b_np = topi.testing.conv2d_nchw_python(a_np.astype(out_dtype), w_, stride, padding)
else:
b_np = topi.testing.conv2d_nchw_python(a_np, w_np, stride, padding)
return a_np, w_np, b_np
a_np, w_np, b_np = get_ref_data()
ctx = tvm.cpu(0)
a = tvm.nd.array(a_np, ctx)
w = tvm.nd.array(w_np, ctx)
b = tvm.nd.array(np.zeros(get_const_tuple(B.shape), dtype=B.dtype), ctx)
func = tvm.build(s, [A, W, B], "llvm")
func(a, w, b)
np.testing.assert_allclose(b.asnumpy(), b_np, rtol=1e-5)
def verify_bitserial_conv2d_nhwc(batch, in_size, in_channel, num_filter, kernel, stride, padding,
activation_bits, weight_bits, dorefa):
in_height = in_width = in_size
input_type='uint32'
out_dtype='int32'
with tvm.target.create('llvm'):
A = tvm.placeholder((batch, in_height, in_width, in_channel), dtype=input_type, name='A')
W = tvm.placeholder((kernel, kernel, in_channel, num_filter), dtype=input_type, name='W')
B = topi.nn.bitserial_conv2d(A, W, stride, padding, activation_bits, weight_bits, out_dtype=out_dtype,
layout="NHWC", dorefa=dorefa)
s = topi.generic.schedule_bitserial_conv2d_nhwc([B])
a_shape = get_const_tuple(A.shape)
w_shape = get_const_tuple(W.shape)
dtype = A.dtype
def get_ref_data():
a_np = generate_quantized_np(get_const_tuple(A.shape), activation_bits, input_type)
w_np = generate_quantized_np(get_const_tuple(W.shape), weight_bits, input_type)
if dorefa:
w_ = np.copy(w_np).astype(out_dtype)
for x in np.nditer(w_, op_flags=['readwrite']):
x[...] = 1 if x == 1 else -1
b_np = topi.testing.conv2d_nhwc_python(a_np, w_, stride, padding).astype(out_dtype)
else:
b_np = topi.testing.conv2d_nhwc_python(a_np, w_np, stride, padding).astype(out_dtype)
return a_np, w_np, b_np
a_np, w_np, b_np = get_ref_data()
ctx = tvm.cpu(0)
a = tvm.nd.array(a_np, ctx)
w = tvm.nd.array(w_np, ctx)
b = tvm.nd.array(np.zeros(get_const_tuple(B.shape), dtype=B.dtype), ctx)
func = tvm.build(s, [A, W, B], 'llvm')
func(a, w, b)
np.testing.assert_allclose(b.asnumpy(), b_np, rtol=1e-5)
def test_bitserial_conv2d():
in_size = 56
ic, oc = 64, 64
k = 3
stride = 1
pad = 1
verify_bitserial_conv2d_nchw(1, in_size, ic, oc, k, stride, pad, 1, 1, True)
verify_bitserial_conv2d_nchw(1, in_size, ic, oc, k, stride, pad, 2, 1, True)
verify_bitserial_conv2d_nchw(1, in_size, ic, oc, k, stride, pad, 1, 1, False)
verify_bitserial_conv2d_nchw(1, in_size, ic, oc, k, stride, pad, 2, 1, False)
verify_bitserial_conv2d_nchw(1, in_size, ic, oc, k, stride, pad, 2, 2, False)
verify_bitserial_conv2d_nhwc(1, in_size, ic, oc, k, stride, pad, 1, 1, True)
verify_bitserial_conv2d_nhwc(1, in_size, ic, oc, k, stride, pad, 2, 1, True)
verify_bitserial_conv2d_nhwc(1, in_size, ic, oc, k, stride, pad, 1, 1, False)
verify_bitserial_conv2d_nhwc(1, in_size, ic, oc, k, stride, pad, 2, 1, False)
verify_bitserial_conv2d_nhwc(1, in_size, ic, oc, k, stride, pad, 2, 2, False)
if __name__ == "__main__":
test_bitserial_conv2d() | [
"tqchen@users.noreply.github.com"
] | tqchen@users.noreply.github.com |
3d1f1756528afaa87544ba2b6c62e67f3b6572f7 | b2c24abff86b28ca8a495b3a3c3227f070737aa2 | /parlai/agents/AdaND/utils.py | 2467ef5a277ddf9fdc24d14669efbf63843aff07 | [
"MIT"
] | permissive | hengyicai/AdaND | d5dda1b2fcd2abd17be6603de632f0515382b37b | 5e3fefb1cf40c42215a37246efc64958ae6db005 | refs/heads/master | 2023-09-01T07:38:49.076947 | 2020-10-19T04:58:00 | 2020-10-19T04:58:00 | 204,633,631 | 10 | 2 | MIT | 2023-08-11T19:52:23 | 2019-08-27T06:20:39 | Python | UTF-8 | Python | false | false | 947 | py | import torch.nn as nn
def reverse(lst):
return lst[::-1]
class FeedForward(nn.Module):
def __init__(self, input_dim, out_dim, hidden_sizes=(512,),
activation="Tanh", bias=True, dropout=0.1):
super(FeedForward, self).__init__()
self.activation = getattr(nn, activation)()
n_inputs = [input_dim] + list(hidden_sizes)
n_outputs = list(hidden_sizes) + [out_dim]
self.linears = nn.ModuleList([nn.Linear(n_in, n_out, bias=bias)
for n_in, n_out in zip(n_inputs, n_outputs)])
self.num_layer = len(self.linears)
self.dropout_layer = nn.Dropout(dropout)
def forward(self, input_):
x = input_
i = 0
for linear in self.linears:
x = linear(x)
if i < self.num_layer - 1:
x = self.dropout_layer(x)
x = self.activation(x)
i += 1
return x
| [
"caihengyi@ict.ac.cn"
] | caihengyi@ict.ac.cn |
19a193a3410c3f1777a2e80692d94b6d8f38c74a | cd4dbdab77668dbe09298d46879373c331001f06 | /pythonDjango/notes.py | 7dd0b66436f40cb963526724c694e449b6e20033 | [] | no_license | robertkoszegi/ga-classwork | add2bd00fa8d677dea5af1656505b6b1e16f5214 | 78678b8efc991564fbc957f9ed877549e2518dc5 | refs/heads/main | 2023-08-11T20:39:04.570050 | 2021-09-18T18:36:47 | 2021-09-18T18:36:47 | 389,796,657 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 232 | py | # three-step guide to making model changes:
# Change your models (in models.py).
# Run python manage.py makemigrations to create migrations for those changes
# Run python manage.py migrate to apply those changes to the database.
| [
"robkoszegi@gmail.com"
] | robkoszegi@gmail.com |
fdef0e55fea15ec9925ee84443a708abafdfecc5 | 4a8775eac5a5f39400848b4c81476c49ddfbd871 | /apps/api-test/urls.py | a999262fcf5944dbfda9b7441dd035df4a93df14 | [] | no_license | wdudek82/quotarium-backend | ec6d73c13ed06a201066442f108cdbcc4777da5e | b37cbbe1a136f89fe10ed6d6418a69d585bec8ff | refs/heads/master | 2022-12-10T18:32:37.564838 | 2018-07-08T20:40:28 | 2018-07-08T20:40:28 | 140,035,629 | 0 | 0 | null | 2022-12-08T02:17:15 | 2018-07-06T22:39:13 | Python | UTF-8 | Python | false | false | 142 | py | from django.conf.urls import url
from .views import UserViewSet
urlpatterns = [
url(r'^users/$', UserViewSet.as_view(), name='users'),
] | [
"wdudek82@gmail.com"
] | wdudek82@gmail.com |
1bbf5ffc9207b2158d368fcd96d197f1026c6620 | 22e6960251e43746656a2b42f7ac1c1ae66a8ba9 | /Scale/data.py | 972dda0c6c6e727bc33d4437e5226c7087853ecd | [] | no_license | Zziwei/Fairness-in-Cold-Start-Recommendation | c48f26c456fe0915f3d64e0edd30d26853371d7d | a13f0bdf36a5e626c5c0d2e0b32f1b17aea59a29 | refs/heads/main | 2023-07-01T05:54:14.701777 | 2021-07-31T22:58:42 | 2021-07-31T22:58:42 | 363,529,120 | 11 | 4 | null | null | null | null | UTF-8 | Python | false | false | 4,535 | py | import numpy as np
import tensorflow as tf
import scipy.sparse
import utils
import pandas as pd
"""
This module contains class and methods related to data used in DropoutNet
"""
def load_eval_data(test_data, name, train_data, warm_test):
timer = utils.timer()
if warm_test is not None:
test_data = np.concatenate([test_data, warm_test], axis=0)
timer.toc('read %s triplets %s' % (name, test_data.shape)).tic()
eval_data = EvalData(
test_data,
train=train_data
)
timer.toc('loaded %s' % name).tic()
print(eval_data.get_stats_string())
return eval_data
class EvalData:
def __init__(self, test_triplets, train):
# build map both-ways between compact and original indices
# compact indices only contains:
# 1) items in test set
# 2) users who interacted with such test items
# item ids in test set
self.test_item_ids = np.unique(test_triplets['iid'])
# item global id to test local id mapping, and local id to global id mapping
self.test_item_ids_map = {iid: i for i, iid in enumerate(self.test_item_ids)}
self.test_item_new2old_list = np.zeros(len(self.test_item_ids_map)).astype(int)
for old in self.test_item_ids_map:
self.test_item_new2old_list[self.test_item_ids_map[old]] = old
# user ids in test set
self.test_user_ids = np.unique(test_triplets['uid'])
# user global id to test local id mapping
self.test_user_ids_map = {user_id: i for i, user_id in enumerate(self.test_user_ids)}
self.test_user_new2old_list = np.zeros(len(self.test_user_ids_map)).astype(int)
for old in self.test_user_ids_map:
self.test_user_new2old_list[self.test_user_ids_map[old]] = old
# generate a sparse user-item matrix by local ids for the test set
_test_i_for_inf = [self.test_user_ids_map[_t[0]] for _t in test_triplets]
_test_j_for_inf = [self.test_item_ids_map[_t[1]] for _t in test_triplets]
self.R_test_inf = scipy.sparse.coo_matrix(
(np.ones(len(_test_i_for_inf)),
(_test_i_for_inf, _test_j_for_inf)),
shape=[len(self.test_user_ids), len(self.test_item_ids)]
).tolil(copy=False)
if train is not None:
train_ij_for_inf = [(self.test_user_ids_map[_t[0]], self.test_item_ids_map[_t[1]]) for _t
in train
if _t[1] in self.test_item_ids_map and _t[0] in self.test_user_ids_map]
self.R_train_inf = scipy.sparse.coo_matrix((
np.ones(len(train_ij_for_inf)),
zip(*train_ij_for_inf)), shape=self.R_test_inf.shape).tolil(copy=False)
else:
self.R_train_inf = None
# allocate fields
self.tf_eval_train = None #
self.eval_batch = None # batch windows for evaluation, which is for batches of users
def init_tf(self, eval_batch_size):
eval_l = self.R_test_inf.shape[0] # number of users in the test set
# generate batch windows for evaluation, which is for batches of users
self.eval_batch = [(x, min(x + eval_batch_size, eval_l)) for x in range(0, eval_l, eval_batch_size)]
if self.R_train_inf is not None:
self.tf_eval_train = []
for (eval_start, eval_finish) in self.eval_batch:
_ui = self.R_train_inf[eval_start:eval_finish, :].tocoo()
_ui = np.concatenate([_ui.row.reshape((-1, 1)), _ui.col.reshape((-1, 1))], axis=1)
self.tf_eval_train.append(
tf.SparseTensorValue(
indices=_ui,
values=np.full(_ui.shape[0], -100000, dtype=np.float32),
dense_shape=[eval_finish - eval_start, self.R_train_inf.shape[1]]
)
)
else:
self.tf_eval_train = None
def get_stats_string(self):
return ('\tn_test_users:[%d]\n\tn_test_items:[%d]' % (len(self.test_user_ids), len(self.test_item_ids))
+ '\n\tR_train_inf: %s' % (
'no R_train_inf for cold' if self.R_train_inf is None else 'shape=%s nnz=[%d]' % (
str(self.R_train_inf.shape), len(self.R_train_inf.nonzero()[0])
)
)
+ '\n\tR_test_inf: shape=%s nnz=[%d]' % (
str(self.R_test_inf.shape), len(self.R_test_inf.nonzero()[0])
))
| [
"zhuziwei@tamu.edu"
] | zhuziwei@tamu.edu |
e6a2fb17e898a3dedb7ffb531fb3c9dcd46ee0a7 | 6189f34eff2831e3e727cd7c5e43bc5b591adffc | /WebMirror/management/rss_parser_funcs/feed_parse_extractRandnovelstlsamatchateaWordpressCom.py | d0cebc880f2171d48c773fdaf78dc3e6e389d55b | [
"BSD-3-Clause"
] | permissive | fake-name/ReadableWebProxy | 24603660b204a9e7965cfdd4a942ff62d7711e27 | ca2e086818433abc08c014dd06bfd22d4985ea2a | refs/heads/master | 2023-09-04T03:54:50.043051 | 2023-08-26T16:08:46 | 2023-08-26T16:08:46 | 39,611,770 | 207 | 20 | BSD-3-Clause | 2023-09-11T15:48:15 | 2015-07-24T04:30:43 | Python | UTF-8 | Python | false | false | 586 | py |
def extractRandnovelstlsamatchateaWordpressCom(item):
'''
Parser for 'randnovelstlsamatchatea.wordpress.com'
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol) or "preview" in item['title'].lower():
return None
tagmap = [
('PRC', 'PRC', 'translated'),
('Loiterous', 'Loiterous', 'oel'),
]
for tagname, name, tl_type in tagmap:
if tagname in item['tags']:
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False
| [
"something@fake-url.com"
] | something@fake-url.com |
16106a80a06c2b6a9745ba4cca55bd6be164dc48 | 5c55d83a16628cf2115f5276fe13566334b48843 | /Codeforces/Problem-1475F.py | 9bea59ddfb6250c88f1ddff90d8473b9852e7c53 | [] | no_license | MahirJhaveri/CompetitiveProgramming | a07a8dd5fd27351b83d4d0b422cc0ae118e6458c | 064b87d1ac786f404213d4fa0da18332b278845f | refs/heads/master | 2021-09-17T23:23:54.605010 | 2021-08-02T02:52:40 | 2021-08-02T02:52:40 | 230,447,015 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,231 | py | # Problem 1475F - Unusual Matrix
def solve():
n = int(input())
a1 = []
a2 = []
for i in range(n):
arr = input().rstrip()
a1.append([int(arr[x]) for x in range(n)])
a2.append([int(arr[x]) for x in range(n)])
input()
b = []
for i in range(n):
arr = input().rstrip()
b.append([int(arr[x]) for x in range(n)])
for j in range(n):
a2[0][j] == 1-a2[0][j]
if a1[0][j] == b[0][j]:
for i in range(n):
a2[i][j] = 1-a2[i][j]
else:
for i in range(n):
a1[i][j] = 1-a1[i][j]
r1 = r2 = True
for i in range(1,n):
s1 = s2 = 0
for j in range(n):
s1 += abs(a1[i][j] - b[i][j])
s2 += abs(a1[i][j] + b[i][j] - 1)
if s1 != 0 and s2 != 0:
r1 = False
for i in range(1,n):
s1 = s2 = 0
for j in range(n):
s1 += abs(a2[i][j] - b[i][j])
s2 += abs(a2[i][j] + b[i][j] - 1)
if s1 != 0 and s2 != 0:
r2 = False
if r1 or r2:
print("YES")
else:
print("NO")
def main():
T = int(input())
for c in range(T):
solve()
main()
| [
"noreply@github.com"
] | MahirJhaveri.noreply@github.com |
fc671086c736037ad538ae6e90677684639d03d8 | 8a025c88c554c782885ccf780faf7af51f8d2583 | /stu.py | 345b718b7d85569d755d248bf840883f8a5233e2 | [] | no_license | LIGHT1213/PythonStudy | cc97a6372d5347229d014fa7fa499ac6af1abd2c | b3ac29777e395dee1282d9d422f1f6556cc2b3f6 | refs/heads/master | 2020-04-08T14:17:08.773627 | 2018-12-19T10:32:17 | 2018-12-19T10:32:17 | 159,430,245 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 445 | py | Tempin=input("请输入一个带符号的数组")
if Tempin[0:-1] in ['1','2','3','4','5','6','7','8','9','0']:
if Tempin[-1] in ['f','F']:
C=(eval(Tempin[:-1])-32)/1.8
print("转化后的温度为{:.2f}C".format(C))
elif Tempin[-1] in ['c','C']:
F=1.8*eval(Tempin[:-1])+32
print("转化后的温度为{:.2f}F".format(F))
else:
print("输入格式错误")
else:
print("输入格式错误") | [
"pch19980807@gmail.com"
] | pch19980807@gmail.com |
0511fb43057a60f889f53874c33afcd98ff8ddd8 | 01bb92319db579fc60087cfc4b5c2edd924809d9 | /part_b.py | 269e2f26af02c1b213f06c2538aef80e316adce6 | [] | no_license | UIUC-public/MP5_py | 81282ae9a0884c5d3f5e6710d36a8049a59af2cd | 3664b5ec1f0c37b44363f6c5dbbcd479cf66736c | refs/heads/master | 2020-03-12T13:30:03.527768 | 2018-04-23T05:19:32 | 2018-04-23T05:19:32 | 130,643,680 | 0 | 2 | null | null | null | null | UTF-8 | Python | false | false | 253 | py | from pyspark import SparkContext
from numpy import array
from pyspark.mllib.clustering import KMeans, KMeansModel
sc =SparkContext()
# do NOT change this line
data=sc.textFile("dataset/cars.data")
# TODO
f=open('py_part_b.txt','w+')
# TODO
f.close()
| [
"noreply@github.com"
] | UIUC-public.noreply@github.com |
4aa8272025f036b52ea729420003ccaed04615fc | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p02955/s157641213.py | 4c49cb084d398b88a82c7768e464c3400e1d3697 | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 693 | py | from collections import deque
def isok(x):
que=deque(sorted(z%x for z in a))
res=0
while que:
l=que[0]
if l==0:
que.popleft()
continue
r=que[-1]
if r==0:
que.pop()
continue
d=min(l,x-r)
que[0]-=d
que[-1]=(que[-1]+d)%x
res+=d
return res
n,k=map(int,input().split())
a=list(map(int,input().split()))
sum_=sum(a)
fac=set()
for i in range(1,sum_+1):
if i*i>sum_:
break
if sum_%i==0:
fac.add(i)
fac.add(sum_//i)
fac=sorted(fac,reverse=True)
ans=1
for x in fac:
c=isok(x)
if c<=k:
ans=x
break
print(ans) | [
"66529651+Aastha2104@users.noreply.github.com"
] | 66529651+Aastha2104@users.noreply.github.com |
25c76f936b1e618ae4f59f11a453aeb716d710ca | 4c0062f3b45afe6a087f0e8b0b9292448ce8680e | /inwike/wsgi.py | b62adb52993addcae9133236c57a9f24c5e90cd2 | [] | no_license | mitshel/inwike | 89846286824d4dd322edb4d51836af8d86da00d2 | e89bd4ccb9c3a71d17692d14def6e1041596d0f9 | refs/heads/master | 2020-06-20T21:04:00.623930 | 2019-07-19T20:14:03 | 2019-07-19T20:14:03 | 197,248,276 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 389 | py | """
WSGI config for inwike project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/2.2/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'inwike.settings')
application = get_wsgi_application()
| [
"mitshel@mail.ru"
] | mitshel@mail.ru |
d6ecb87852168421b3635fb5dffa7be3e972afcf | 163e2435668e07f6ba0b4e44653a0cc95e612d88 | /easy/073_diamond/diamond.py | 6985e06fa1a86f49007ca236313e514defdf4c1e | [] | no_license | mrifqy-abdallah/python-exercises | d202ff4204269db5974fb494a813de5494c8286e | df6637a40a853e4876cdd74eea0deebadbce688d | refs/heads/main | 2023-05-30T16:35:50.790242 | 2021-06-19T12:01:59 | 2021-06-19T12:01:59 | 361,814,985 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,131 | py | from string import ascii_uppercase
def rows(letter: str) -> 'list[str]':
letter = letter.upper()
if letter not in ascii_uppercase:
raise ValueError("Input must be alphabet.")
if len(letter) != 1:
raise ValueError("You must insert exactly one alphabet.")
# Quick return
if letter == "A":
return ["A"]
index_of_letter = ascii_uppercase.index(letter)
total_spaces = index_of_letter * 2 + 1
result = []
half_below = []
# Initiate adding the opening 'A'
result.append(f"{'A':^{total_spaces}}")
# Initiate adding the closing 'A'
half_below.append(f"{'A':^{total_spaces}}")
for i in range(1, index_of_letter + 1):
char = ascii_uppercase[i]
center_spaces = i * 2 - 1
shape = f"{char}{' ' * center_spaces}{char}"
# This condition gets executed at the last `i`
if i == index_of_letter:
result.append(shape)
result.extend(half_below)
else:
shape = f"{shape:^{total_spaces}}"
half_below.insert(0, shape)
result.append(shape)
return result
| [
"mrifqyabdallah@gmail.com"
] | mrifqyabdallah@gmail.com |
daf4ddaea769085c50cf8f4f15f0287de9a5ab16 | 7256596fc6437c7f3cd1947f9f88bc556df6ba56 | /programs_in_python/programming_excercise/1.py | 7aa1303ea03548fa583c4aa0857c6f32292d692b | [] | no_license | panu2306/Python-Articles | fd02cf70635e4a63eae8b691597b6858c40832b8 | 7585dbdca92264a8f52cfb3c1b918b29814d3bd1 | refs/heads/master | 2020-12-27T17:33:28.576776 | 2020-05-06T14:55:10 | 2020-05-06T14:55:10 | 237,990,657 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 645 | py | '''
Write a program which will find all such numbers which are divisible by 7 but are not a multiple of 5,
between 2000 and 3200 (both included).
The numbers obtained should be printed in a comma-separated sequence on a single line.
'''
# Using List in Python:
def multiple_of_seven(start, end):
l = []
for i in range(start, end+1):
if((i%7 == 0) and (i%5 != 0)):
l.append(str(i))
return l
print(','.join(multiple_of_seven(2000, 3200)))
# Using yield in Python:
def multiple_seven(start, end):
for i in range(start, end+1):
if((i%7==0) and (i%5!=0)):
yield(str(i))
for i in multiple_seven(2000, 3200):
print(i, end=',')
| [
"pranavbhendawade@gmail.com"
] | pranavbhendawade@gmail.com |
95efb04d5889db91c927e9ef5d60e9eba7e04bdb | 65ae9d8ca2f3d5657ab92bd998eec7d4a9ed9b6a | /benchmark/django_ab/django_ab/wsgi.py | 6dc073640d5af46ee9fe68dba15a364d663e30a9 | [
"MIT"
] | permissive | JieweiWei/lnet | cbb54ab0ad7dc37a1814e8b34e21a7d2cab9df05 | 94188045fba29824591cc92e6447d4807cba831b | refs/heads/master | 2016-08-12T20:08:54.559226 | 2016-05-09T03:28:53 | 2016-05-09T03:28:53 | 53,181,697 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 395 | py | """
WSGI config for django_ab project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.9/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "django_ab.settings")
application = get_wsgi_application()
| [
"weijieweijerry@163.com"
] | weijieweijerry@163.com |
813e7f76f75b88baa4938c22d880b668d437d16a | 503c001c0e1ad76d2ee2fd0ece71dbdbdb0c0465 | /sprites/block.py | 957b3cb2256bf95ba6132f5382da1b6f0e61b15e | [] | no_license | king1600/DragonTakeOver | 607a0e65abe20b5167e647234e282335e9feb5c8 | de5979b5d0d122966073112b0c745674c10159f7 | refs/heads/master | 2020-05-22T08:11:16.993950 | 2016-10-05T23:56:30 | 2016-10-05T23:56:30 | 64,617,280 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 868 | py | import pygame
import colors
hitbox_img = pygame.Surface((64, 64))
hitbox_img.fill(colors.BLUE)
""" Direction Facing """
class DirectionEnum:
LEFT = 0x00
RIGHT = 0x01
""" Basic Sprite Block (interactive object) """
class Block(pygame.sprite.Sprite):
WIDTH = 64
HEIGHT = 64
SIZE = (WIDTH, HEIGHT)
COORDS = (0, 0)
COLOR = colors.BLUE
block_id = None
IS_ALIVE = False
def __init__(self, size=None, coords=(0,0)):
super(Block, self).__init__()
if size is not None:
self.WIDTH, self.HEIGHT = size
self.SIZE = (self.WIDTH, self.HEIGHT)
self.COORDS = coords
self.image = hitbox_img
self.rect = pygame.Rect(self.COORDS, self.SIZE)
def set_pos(self, x, y):
self.rect.x = x
self.rect.y = y
| [
"noreply@github.com"
] | king1600.noreply@github.com |
5b4b604359261863dda10a1df806583c9f595799 | 556290802d8cbcfea6634e26d26e80567efb59bf | /2048 Player/tempCodeRunnerFile.py | 785cfe5b5a83017fae748da9e7188eeec14e2ea1 | [] | no_license | leognon/2048Bots | 7319dff041deae87aaa494460f3d0b5a92a17266 | c6668a5f6b398f51e3329cf897793355b9e555a7 | refs/heads/master | 2023-08-09T09:10:36.674126 | 2019-07-14T15:50:43 | 2019-07-14T15:50:43 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 128 | py | ][-2] > old_board[-1][-2] and # 2nd to Right
# new_board[-1][-1] >= old_board[-1][-1]):
# fit += .6 | [
"noreply@github.com"
] | leognon.noreply@github.com |
0bce4dd101c8d39822754429170351618117265b | 75c787f39802888b6c752688c68af439201a7894 | /回归分析散点图绘制.py | afd148f7272f4e534a89bc1d1aa007c4b3cd8e39 | [] | no_license | llqqyy/data_ans | 48478df92bba3e3ce6975a30746a9edaae0a80dc | d65e497f1011b802918eea973846021fd1235d11 | refs/heads/master | 2020-03-18T17:11:11.821009 | 2018-05-27T03:17:35 | 2018-05-27T03:17:35 | 135,011,657 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,363 | py | # -*- coding: utf-8 -*-
import numpy as np
import matplotlib.pyplot as plt
from pylab import *
mpl.rcParams['font.sans-serif'] = ['SimHei']#标签显示中文
mpl.rcParams['axes.unicode_minus'] = False#正常显示正负号
data=np.loadtxt('C:/Users/刘青源/Desktop/回归分析数据.txt')#读取数据
data=np.transpose(data)#将数据进行转置,第一行是月份
#print(data)
#进行库存金额-销售额散点图绘制
fig=plt.figure
ax1=plt.subplot(221)
plt.title('库存金额关系')#设置标题
plt.xlabel('库存金额')#设置x轴标签
plt.ylabel('销售额')#设置y轴标签
plt.legend('库存金额')#设置数据标签
x1=data[1,:]
y1=data[4,:]#数据
plt.scatter(x1,y1,c='r',marker='o')#画图
#进行广告投入-销售额散点图绘制
ax1=plt.subplot(222)
plt.title('广告投入关系')#设置标题
plt.xlabel('广告投入')#设置x轴标签
plt.ylabel('销售额')#设置y轴标签
plt.legend('广告投入')#设置数据标签
x1=data[2,:]
y1=data[4,:]#数据
plt.scatter(x1,y1,c='r',marker='o')#画图
#进行员工-销售额散点图绘制
ax1=plt.subplot(223)
plt.title('员工薪酬关系')#设置标题
plt.xlabel('员工薪酬')#设置x轴标签
plt.ylabel('销售额')#设置y轴标签
plt.legend('员工薪酬')#设置数据标签
x1=data[3,:]
y1=data[4,:]#数据
plt.scatter(x1,y1,c='r',marker='o')#画图
plt.show()
| [
"383870587@qq.com"
] | 383870587@qq.com |
c7e086c6ea45c41cf28e897e3b175a4f462aca19 | ccf94dcb6b1500fcbbd56964ae8c4832a496b8b3 | /python/baiduads-sdk-auto/test/test_app_info_item.py | deaf0a9f6d02f3a631c0a04ff600f2afd04a818c | [
"Apache-2.0"
] | permissive | baidu/baiduads-sdk | 24c36b5cf3da9362ec5c8ecd417ff280421198ff | 176363de5e8a4e98aaca039e4300703c3964c1c7 | refs/heads/main | 2023-06-08T15:40:24.787863 | 2023-05-20T03:40:51 | 2023-05-20T03:40:51 | 446,718,177 | 16 | 11 | Apache-2.0 | 2023-06-02T05:19:40 | 2022-01-11T07:23:17 | Python | UTF-8 | Python | false | false | 637 | py | """
dev2 api schema
'dev2.baidu.com' api schema # noqa: E501
Generated by: https://openapi-generator.tech
"""
import sys
import unittest
import baiduads
from baiduads.appprocess.model.app_info_item import AppInfoItem
class TestAppInfoItem(unittest.TestCase):
"""AppInfoItem unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testAppInfoItem(self):
"""Test AppInfoItem"""
# FIXME: construct object with mandatory attributes with example values
# model = AppInfoItem() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
| [
"jiangyuan04@baidu.com"
] | jiangyuan04@baidu.com |
ab3a482230c4698e7f9d49b9254e260e3bddd324 | 80195b74663c39334f0586c170e0a98a25050135 | /modules/analyzer/wimea_analyzer_python/eraseMaillog.py | 08c6f0e1c6c8823056600c6f365c3d493da5267f | [] | no_license | wimea-ict/AWS-monitor | e929bc876373f8c979d835cb5febecf43ffa40ab | 993a7b23d45923acb5394914844e2545e4d9fb65 | refs/heads/master | 2022-07-29T15:40:11.217388 | 2021-07-05T17:04:34 | 2021-07-05T17:04:34 | 120,739,881 | 0 | 3 | null | 2022-07-07T20:44:53 | 2018-02-08T09:27:46 | Blade | UTF-8 | Python | false | false | 165 | py | from paths.directories import emailPath
messageLog = emailPath+'/mailHistory.txt'
f = open(messageLog, 'r+')
f.truncate(0)
f.write('userId,problem,date')
f.close() | [
"piuskk71@gmail.com"
] | piuskk71@gmail.com |
37c1bcd5aee686df823ee3f0be5cd04f64936ce0 | 80de2ec1cba6bb1742640b9da93fc001dad17c15 | /alarmpi/get_greeting.py | 01e9eafafea4914f9c65b44eeb3ed94df9b51540 | [] | no_license | baobin25/cid4smartalarm | ebc362f361cedb42093c628eb64680a5cb857234 | 362f68e5e4ed9c39d18743781b1d9b64b310bff4 | refs/heads/master | 2020-07-04T11:19:26.027571 | 2016-08-24T14:44:53 | 2016-08-24T14:44:53 | 66,471,110 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 768 | py | #!/bin/python
# -*- coding: utf-8 -*-
import time
import better_spoken_numbers as bsn
from apcontent import alarmpi_content
class greeting(alarmpi_content):
def build(self):
day_of_month=str(bsn.d2w(int(time.strftime("%d"))))
now = time.strftime("%A %B ") + day_of_month + ',' + time.strftime(" %I %M %p")
if int(time.strftime("%H")) < 12:
period = 'morning'
if int(time.strftime("%H")) >= 12:
period = 'afternoon'
if int(time.strftime("%H")) >= 17:
period = 'evening'
# reads out good morning + my name
gmt = 'Good ' + period + ', '
# reads date and time
day = ' it is ' + now + '. '
greeting = gmt + self.sconfig['name'] + day
if self.debug:
print greeting
self.content = greeting
| [
"noreply@github.com"
] | baobin25.noreply@github.com |
ed9fac599e6466da1021f90cfb4d5e0684fad02f | fd3b0bc4bb7f49a32b1f016926ffaa584e1471de | /blog/applevel/migrations/0006_post.py | 4587ce980ac500277e9a30faedb5a029493a6c16 | [] | no_license | prema1432/blog | 9dc153eae8f062f5c0e33ae1e64932d56c83a005 | 1b9bcce3ff27bacf61c0804cd294f1893cc695f9 | refs/heads/master | 2021-01-09T13:33:33.680983 | 2020-02-22T10:10:51 | 2020-02-22T10:10:51 | 242,320,260 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,406 | py | # Generated by Django 2.2 on 2020-02-18 06:09
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import django.utils.timezone
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('applevel', '0005_delete_post'),
]
operations = [
migrations.CreateModel(
name='Post',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=250)),
('slug', models.SlugField(max_length=250, unique_for_date='publish')),
('body', models.TextField()),
('publish', models.DateTimeField(default=django.utils.timezone.now)),
('created', models.DateTimeField(auto_now_add=True)),
('updated', models.DateTimeField(auto_now=True)),
('status', models.CharField(choices=[('draft', 'Draft'), ('published', 'Published')], default='draft', max_length=10)),
('author', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='blog_posts', to=settings.AUTH_USER_MODEL)),
],
options={
'ordering': ('-publish',),
},
),
]
| [
"talamarlapremanath@gmail.com"
] | talamarlapremanath@gmail.com |
543ef8c8c0b3586eacae8fba47408cda829a38bb | 697345ad4337a29767de0c20edaad331b3798202 | /biofeedback/views.py | cee97e77e731f38de078a74a9850d384ab1b622b | [] | no_license | Yursksf1/D_Biofeedback | 0bb055a40c35b7c1b5a98b7c2b91ba4a7d7a2824 | abcfb5f5aaa81197d8be678fda03ad6fb4f911ef | refs/heads/master | 2020-03-10T17:16:44.770226 | 2018-04-14T08:11:30 | 2018-04-14T08:11:30 | 129,496,291 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,358 | py | from django.shortcuts import render
import os
from django.http import JsonResponse
import _thread as thread
#from subprocess import run
#from .models import Question
def index(request):
thread.start_new_thread(plotters, ())
#plotters()
print("yes")
context = {'1': 1}
return render(request, 'bio/index.html', context)
def plotters():
cmd = 'python luz3.py'
os.system(cmd)
import serial
ser = serial.Serial('/dev/ttyUSB0', 9600)
temp_ant = 0
def get_seriales(request):
global temp_ant
try:
temp = ser.readline()
try:
temp = int(temp)
except ValueError as Ve:
temp = 'none'
except AttributeError as Ae:
temp = 'none'
except TypeError as Te:
temp = 'none'
except Exception as e:
temp = 'none'
change = True
if (temp == 'none'):
temp = temp_ant
change = False
if (temp_ant != temp):
temp_ant = temp
data = {
'serial': temp,
'serial_1': temp,
'serial_2': temp,
'serial_3': temp,
'change': change
}
return JsonResponse(data)
def index_2(request):
context = {'1': 1}
return render(request, 'bio/index_2.html', context) | [
"root@yurley.sanchez"
] | root@yurley.sanchez |
0609f510d3ee1eb27d449292f8f96ac3cbfa6912 | 5f6f78eade0734f1e95cacb39b3a320a18dfc4e5 | /poembot.py | b1a4a337b81dfc9999a6a3fe693ffda7d63e07bd | [] | no_license | mplitnikas/poetrybot | 1f772aed67143ce83b88cfc1990f64af20c6f867 | 1bc6f6b7768904990700b0d05f4e10a2dc8a183c | refs/heads/master | 2021-01-01T05:10:56.265847 | 2016-05-04T00:16:08 | 2016-05-04T00:16:08 | 58,009,627 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,389 | py | import pprint
import random
import rhymer, chainer
#with open('bernie.txt', 'r') as fo:
with open('trump.txt', 'r') as fo:
content = fo.read()
#content = content.decode('utf-8')
r = rhymer.Rhymer(content)
lexicon = r.vocab # set of all words (no duplicates)
clean_text = r.cleaned_text # all words in order, lowercase and no punctuation
c = chainer.Markov(None, clean_text)
poem = ""
poem_length = 8 # pairs of lines
line_length = 10 # words in a line
while True:
for l in range(poem_length):
while True:
seed_pos = random.randint(line_length, len(clean_text))
seed_word = clean_text[seed_pos]
print("Trying seed word " + seed_word)
try:
poss_rhymes = r.rhyme(seed_word)
next_rhyme = random.choice(poss_rhymes)
if (seed_word == next_rhyme): # can't rhyme a word with itself, jeez
continue
print("Rhymes with: " + next_rhyme)
(w1, w2) = seed_word, clean_text[seed_pos-1]
next_rhyme_pos = c.find(clean_text, next_rhyme) # c.find takes a dump?
(r1, r2) = next_rhyme, clean_text[next_rhyme_pos-1]
line_1 = c.constuct_chain(length=line_length, seed_choice=(w1,w2)) + '\n'
line_2 = c.constuct_chain(length=line_length, seed_choice=(r1,r2)) + '\n'
poem += line_1 + line_2
break
except:
print("no rhyme for " + seed_word)
continue
print('\n')
print(poem)
raw_input("Another poem?> ")
poem = '' | [
"mplitnikas@gmail.com"
] | mplitnikas@gmail.com |
a829de9080f12b849296a6ffe1ccf39566113196 | 39a9e65aab78fd01b2d0722a2b4227468f317209 | /venv/bin/pip | d56e258cbc6545d7e6bd6bdbd8c225569f0ee525 | [] | no_license | Nikhildevadas/flask-intro | 2549531cc0ddaabf9ec4cf90da635c22d666d8d5 | ddf5dae7c4fadaa717e75c1213c3b2ec1871faf1 | refs/heads/master | 2020-04-05T23:40:24.357879 | 2014-07-14T06:05:48 | 2014-07-14T06:05:48 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 306 | #!/home/nikhil/flask-intro/venv/bin/python
# EASY-INSTALL-ENTRY-SCRIPT: 'pip==1.3.1','console_scripts','pip'
__requires__ = 'pip==1.3.1'
import sys
from pkg_resources import load_entry_point
if __name__ == '__main__':
sys.exit(
load_entry_point('pip==1.3.1', 'console_scripts', 'pip')()
)
| [
"nikforu91@gmail.com"
] | nikforu91@gmail.com |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.