hexsha stringlengths 40 40 | size int64 4 1.02M | ext stringclasses 8
values | lang stringclasses 1
value | max_stars_repo_path stringlengths 4 209 | max_stars_repo_name stringlengths 5 121 | max_stars_repo_head_hexsha stringlengths 40 40 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 191k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 4 209 | max_issues_repo_name stringlengths 5 121 | max_issues_repo_head_hexsha stringlengths 40 40 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 67k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 4 209 | max_forks_repo_name stringlengths 5 121 | max_forks_repo_head_hexsha stringlengths 40 40 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 4 1.02M | avg_line_length float64 1.07 66.1k | max_line_length int64 4 266k | alphanum_fraction float64 0.01 1 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
4277a02207aa92145ad6b49dab6def4f85950ba0 | 1,882 | py | Python | botfw/etc/util.py | lzpel/btc_bot_framework | abb585e1cd8b70297ef29d529e2da75046409f4b | [
"MIT"
] | 115 | 2019-11-29T23:48:37.000Z | 2022-03-25T11:07:21.000Z | botfw/etc/util.py | lzpel/btc_bot_framework | abb585e1cd8b70297ef29d529e2da75046409f4b | [
"MIT"
] | 15 | 2019-12-05T07:31:05.000Z | 2022-02-07T02:34:47.000Z | botfw/etc/util.py | lzpel/btc_bot_framework | abb585e1cd8b70297ef29d529e2da75046409f4b | [
"MIT"
] | 38 | 2019-11-30T00:06:04.000Z | 2022-03-22T08:40:03.000Z | import time
import datetime
import decimal
import logging
import threading
import traceback
import hmac
import hashlib
import ccxt
no_traceback_exceptions = (ccxt.NetworkError)
def unix_time_from_ISO8601Z(date):
td = datetime.datetime.strptime(date[:19], '%Y-%m-%dT%H:%M:%S')
td = td.replace(tzinfo=datetime.timezone.utc)
ts = td.timestamp()
ts += float('0.' + date[20:-1])
return ts
def decimal_add(x0, x1):
return float(decimal.Decimal(str(x0)) + decimal.Decimal(str(x1)))
def hmac_sha256(key, msg):
return hmac.new(key.encode(), msg.encode(), hashlib.sha256).hexdigest()
def setup_logger(level=logging.INFO):
log = logging.getLogger()
log.setLevel(level)
handler = logging.StreamHandler()
handler.setFormatter(logging.Formatter(
fmt='[%(asctime)s.%(msecs)03d %(levelname).1s %(name)s] %(message)s',
datefmt='%H:%M:%S'))
log.addHandler(handler)
def run_forever(cb, log, sleep=0, exception_sleep=5):
while True:
try:
cb()
except StopRunForever:
break
except no_traceback_exceptions as e:
log.error(f'{type(e).__name__}: {e}')
except Exception:
log.error(traceback.format_exc())
time.sleep(exception_sleep)
time.sleep(sleep)
def run_forever_nonblocking(cb, log, sleep, exception_sleep=5):
thread = threading.Thread(
name=log.name, target=run_forever,
args=(cb, log, sleep, exception_sleep))
thread.daemon = True
thread.start()
return thread
class StopRunForever(BaseException):
pass
class Timer:
def __init__(self, interval):
self.interval = interval
self.ts = time.time()
def is_interval(self):
now = time.time()
if now - self.ts > self.interval:
self.ts = now
return True
return False
| 23.822785 | 77 | 0.639214 |
5c0273ed43c40c359d2c0275aca7f3eaffcd0e7d | 22,602 | py | Python | src/m1_pizza.py | brackinPatsy/12-MoreSequences | 388fbed2b2d5d47e40c56778ab13b99a5b25d88e | [
"MIT"
] | null | null | null | src/m1_pizza.py | brackinPatsy/12-MoreSequences | 388fbed2b2d5d47e40c56778ab13b99a5b25d88e | [
"MIT"
] | null | null | null | src/m1_pizza.py | brackinPatsy/12-MoreSequences | 388fbed2b2d5d47e40c56778ab13b99a5b25d88e | [
"MIT"
] | null | null | null | """
This module lets you practice:
-- ITERATING (i.e. LOOPING) through a SEQUENCE
-- Using OBJECTS
-- DEFINING functions
-- CALLING functions
Authors: David Mutchler, Vibha Alangar, Matt Boutell, Dave Fisher,
Mark Hays, Amanda Stouder, Aaron Wilkin, their colleagues,
and Patsy.
""" # done: 1. PUT YOUR NAME IN THE ABOVE LINE.
import rosegraphics as rg
import math
def main():
""" Calls the TEST functions in this module. """
# -------------------------------------------------------------------------
# STUDENTS: Do the work in this module as follows.
# Otherwise, you will be overwhelmed by the output.
#
# For each function that you implement:
# 1. Locate the statements just below this comment
# that call TEST functions.
# 2. UN-comment only one test at a time.
# 3. Implement that function per its _TODO_.
# 4. When satisfied with your work, move onto the next test,
# RE-commenting out the previous test to reduce the output.
# -------------------------------------------------------------------------
run_test_generate_points_on_circle()
# run_test_draw_points_on_circle()
# run_test_pizza()
# run_test_polygon()
# run_test_fancy_polygon()
def run_test_generate_points_on_circle():
""" Tests the generate_points_on_circle function. """
# -------------------------------------------------------------------------
# done: 2. Implement this TEST function.
# It TESTS the generate_points_on_circle function defined below.
# Include at least ** 1 ** ADDITIONAL test (that YOU write).
#
# As usual, include both EXPECTED and ACTUAL results in your test
# and compute the latter BY HAND (not by running your program).
#
# Your professor may do this exercise with you as "live coding".
# -------------------------------------------------------------------------
print()
print('--------------------------------------------------')
print('Testing the generate_points_on_circle function:')
print('--------------------------------------------------')
# Test 1:
expected = [rg.Point(125.0, 50.0), # All numbers are approximate.
rg.Point(112.5, 71.7),
rg.Point(87.5, 71.7),
rg.Point(75.0, 50.0),
rg.Point(87.5, 28.3),
rg.Point(112.5, 28.3)]
circle = rg.Circle(rg.Point(100, 50), 25)
answer = generate_points_on_circle(circle, 6)
print('Expected:', expected)
print('Actual: ', answer)
expected = [rg.Point(150., 300.),
rg.Point(100., 350.),
rg.Point(50., 300.),
rg.Point(100., 250.)]
circle = rg.Circle(rg.Point(100, 300), 50)
answer = generate_points_on_circle(circle, 4)
print('Expected:', expected)
print('Actual: ', answer)
# -------------------------------------------------------------------------
# Test 2: (YOU write THIS test)
# -------------------------------------------------------------------------
def generate_points_on_circle(circle_for_points, number_of_points_to_generate):
"""
What comes in:
-- an rg.Circle
-- a positive integer that specifies how many rg.Points
to generate
What goes out: Returns a list containing the given number
of rg.Points, where the rg.Points:
-- all lie on the circumference of the given rg.Circle,
-- are equally distant from each other, and
-- go clockwise around the circumference of the given rg.Circle,
starting at the rightmost point on the rg.Circle.
Side effects: None.
Examples:
See the 'draw_points_on_circle' pictures in the pizza.pdf
file attached, with the points shown on those pictures.
Type hints:
:type circle_for_points: rg.Circle
:type number_of_points_to_generate: int
:rtype: list of rg.Points
"""
# -------------------------------------------------------------------------
# Students:
# Do NOT touch the above generate_points_on_circle function -
# it has no TO DO.
# Do NOT copy code from this function.
#
# Instead, ** CALL ** this function as needed in the problems below.
# -------------------------------------------------------------------------
radius = circle_for_points.radius
center_x = circle_for_points.center.x
center_y = circle_for_points.center.y
# -------------------------------------------------------------------------
# Each point is delta_degrees from the previous point,
# along the circumference of the given circle.
# -------------------------------------------------------------------------
delta_degrees = 360 / number_of_points_to_generate
points = []
degrees = 0
for _ in range(number_of_points_to_generate):
# ---------------------------------------------------------------------
# Compute x and y of the point on the circumference of the
# circle by using a polar representation.
# ---------------------------------------------------------------------
angle = math.radians(degrees)
x = radius * math.cos(angle) + center_x
y = radius * math.sin(angle) + center_y
# ---------------------------------------------------------------------
# Construct the point and append it to the list.
# ---------------------------------------------------------------------
point_on_circumference = rg.Point(x, y)
points.append(point_on_circumference)
# ---------------------------------------------------------------------
# The next point will be delta_degrees from this point,
# along the circumference of the given circle.
# ---------------------------------------------------------------------
degrees = degrees + delta_degrees
return points
def run_test_draw_points_on_circle():
""" Tests the draw_points_on_circle function. """
# -------------------------------------------------------------------------
# TODO: 3. Implement this TEST function.
# It TESTS the draw_points_on_circle function defined below.
# Include at least ** 1 ** ADDITIONAL test (that YOU write).
#
# As usual, include both EXPECTED and ACTUAL results in your test
# and compute the latter BY HAND (not by running your program).
#
# Your professor may do this exercise with you as "live coding".
# -------------------------------------------------------------------------
print()
print('--------------------------------------------------')
print('Testing the draw_points_on_circle function:')
print('See the windows that pop up.')
print('--------------------------------------------------')
# Test 1:
title = 'DRAW_POINTS_ON_CIRCLE, test 1: 7 yellow dots.'
window = rg.RoseWindow(400, 400, title)
circle = rg.Circle(rg.Point(200, 200), 150)
draw_points_on_circle(window, circle, 7, 'yellow')
window.close_on_mouse_click()
# Tests 2 and 3 (on the same window):
title = 'Tests 2 and 3: 6 blue on deep pink; 10 green1 on unfilled.'
window = rg.RoseWindow(440, 400, title)
circle = rg.Circle(rg.Point(135, 135), 50)
circle.fill_color = 'deep pink'
draw_points_on_circle(window, circle, 6, 'blue')
window.continue_on_mouse_click()
circle = rg.Circle(rg.Point(210, 210), 100)
draw_points_on_circle(window, circle, 10, 'green1')
window.close_on_mouse_click()
# -------------------------------------------------------------------------
# Test 4: (YOU write THIS test)
# -------------------------------------------------------------------------
def draw_points_on_circle(window, circle, number_of_points, color):
"""
What comes in:
-- an rg.RoseWindow
-- an rg.Circle
-- a positive integer that specifies how many rg.Points
to generate and use as described below
-- a string that can be used as a RoseGraphics color
What goes out: Nothing (i.e., None).
Side effects:
See the 'draw_points_on_circle' pictures in pizza.pdf in this
project; they may help you better understand the following:
1. Attaches the given rg.Circle to the given rg.RoseWindow.
2. Generates (constructs) the given number of rg.Point objects on the
\given rg.Circle's circumference, spaced equally from each other.
3. For each of those rg.Point objects:
a. Constructs an rg.Circle centered at that point,
filled with the given color and with a radius of 10.
b. Attaches the new rg.Circle to the given rg.RoseWindow.
c. Attaches the rg.Point object to the given rg.RoseWindow.
4. Renders the given rg.RoseWindow.
Note that the rg.Point objects will be visible since each is
attached AFTER its corresponding rg.Circle object, hence on TOP
of its corresponding rg.Circle object.
Examples: See the 'draw_points_on_circle' pictures
in the pizza.pdf file in this project.
Type hints:
:type window: rg.RoseWindow
:type circle: rg.Circle
:type number_of_points: int
:type color: str
"""
# -------------------------------------------------------------------------
# TODO: 4. Implement and test this function.
# Note that you should write its TEST function first (above).
#
# IMPLEMENTATION REQUIREMENT:
# You MUST USE (call) the generate_points_on_circle
# (defined above) to generate the points to draw.
#
# Your professor may do this exercise with you as "live coding".
# -------------------------------------------------------------------------
circle.attach_to(window)
points = generate_points_on_circle(circle,number_of_points)
for k in range(number_of_points):
c = rg.Circle(points[k],10)
c.attach_to(window)
c.fill_color=color
points[k].attach_to(window)
window.render()
def run_test_pizza():
""" Tests the pizza function. """
# -------------------------------------------------------------------------
# TODO: 5. Implement this TEST function.
# It TESTS the pizza function defined below.
# Include at least ** 1 ** ADDITIONAL test (that YOU write).
#
# As usual, include both EXPECTED and ACTUAL results in your test
# and compute the latter BY HAND (not by running your program).
# -------------------------------------------------------------------------
print()
print('--------------------------------------------------')
print('Testing the pizza function:')
print('See the windows that pop up.')
print('--------------------------------------------------')
# Test 1:
title = 'PIZZA test 1: 5 slices, thin (thickness=3) blue lines.'
window = rg.RoseWindow(400, 400, title)
circle = rg.Circle(rg.Point(200, 200), 150)
circle.outline_thickness = 3
pizza(window, circle, 5, 'blue', 3)
window.close_on_mouse_click()
# Tests 2 and 3 (on the same window):
title = ('PIZZA tests 2 and 3: 8 white slices on purple circle;'
+ ' 20 green slices on blue circle.')
window = rg.RoseWindow(520, 400, title)
circle = rg.Circle(rg.Point(125, 125), 50)
circle.fill_color = 'purple'
pizza(window, circle, 8, 'white', 5)
window.continue_on_mouse_click()
circle = rg.Circle(rg.Point(350, 200), 125)
circle.fill_color = 'blue'
pizza(window, circle, 20, 'green1', 3)
window.close_on_mouse_click()
# -------------------------------------------------------------------------
# Test 4: (YOU write THIS test)
# SUGGESTION: You might enjoy:
# -- a large number of thin black lines
# -- on a yellow-filled circle.
# -------------------------------------------------------------------------
def pizza(window, circle, number_of_slices, color, thickness):
"""
What comes in:
-- an rg.RoseWindow
-- an rg.Circle
-- an integer >= 2 that specifies how many rg.Lines
to generate as described below
-- a string that can be used as a RoseGraphics color
-- a positive integer that specifies the thickness to be used
for each rg.Line
What goes out: Nothing (i.e., None).
Side effects:
See the 'pizza' pictures in the pizza.pdf file in this
project; they may help you better understand the following:
1. Draws the given rg.Circle in the given rg.RoseWindow.
2. Constructs and draws rg.Line objects to make the picture look
like a 'pizza pie' cut into the given number of 'slices'.
Each rg.Line has the given color and thickness (width).
Examples: See the 'pizza' pictures in the pizza.pdf file
in this project.
Type hints:
:type window: rg.RoseWindow
:type circle: rg.Circle
:type number_of_slices: int
:type color: str
:type thickness: int
"""
# -------------------------------------------------------------------------
# TODO: 6. Implement and test this function.
# Note that you should write its TEST function first (above).
#
# IMPLEMENTATION REQUIREMENT:
# You MUST USE (call) the generate_points_on_circle
# (defined above) to generate the relevant points,
# and then draw lines that are based in part on those points.
# -------------------------------------------------------------------------
def run_test_polygon():
""" Tests the polygon function. """
# -------------------------------------------------------------------------
# TODO: 7. Implement this TEST function.
# It TESTS the polygon function defined below.
# Include at least ** 1 ** ADDITIONAL test (that YOU write).
#
# As usual, include both EXPECTED and ACTUAL results in your test
# and compute the latter BY HAND (not by running your program).
# -------------------------------------------------------------------------
print()
print('--------------------------------------------------')
print('Testing the polygon function:')
print('See the windows that pop up.')
print('--------------------------------------------------')
# Tests 1 and 2 (on the same window):
title = ('POLYGON tests 1 and 2: 3 segments with thick blue lines;'
+ ' 6 with medium red lines.')
window = rg.RoseWindow(550, 400, title)
circle = rg.Circle(rg.Point(100, 100), 80)
circle.outline_thickness = 3
polygon(window, circle, 3, 'blue', 10)
window.continue_on_mouse_click()
circle = rg.Circle(rg.Point(350, 200), 150)
circle.outline_thickness = 3
polygon(window, circle, 6, 'red', 5)
window.close_on_mouse_click()
# -------------------------------------------------------------------------
# Test 3: (YOU write THIS test)
# -------------------------------------------------------------------------
def polygon(window, circle, number_of_segments, color, thickness):
"""
What comes in:
-- an rg.RoseWindow
-- an rg.Circle
-- an integer >= 2 that specifies how many rg.Lines
to generate as described below
-- a string that can be used as a RoseGraphics color
-- a positive integer that specifies the thickness to be used
for each rg.Line
What goes out: Nothing (i.e., None).
Side effects:
See the 'polygon' pictures in the pizza.pdf file in this
project; they may help you better understand the following:
1. Draws the given rg.Circle in the given rg.RoseWindow.
2. Constructs and draws rg.Line objects that form
a regular polygon with the given number of segments,
inscribed in the given rg.Circle.
Each rg.Line has the given color and thickness (width).
Examples: See the 'polygon' pictures in the pizza.pdf file
in this project.
Type hints:
:type window: rg.RoseWindow
:type circle: rg.Circle
:type number_of_segments: int
:type color: str
:type thickness: int
"""
# -------------------------------------------------------------------------
# TODO: 8. Implement and test this function.
# Note that you should write its TEST function first (above).
#
# IMPLEMENTATION REQUIREMENT:
# You MUST USE (call) the generate_points_on_circle
# (defined above) to generate the relevant points,
# and then draw lines that are based in part on those points.
# -------------------------------------------------------------------------
def run_test_fancy_polygon():
""" Tests the fancy_polygon function. """
# -------------------------------------------------------------------------
# TODO: 9. Implement this TEST function.
# It TESTS the fancy_polygon function defined below.
# Include at least ** 1 ** ADDITIONAL test (that YOU write).
#
# As usual, include both EXPECTED and ACTUAL results in your test
# and compute the latter BY HAND (not by running your program).
# -------------------------------------------------------------------------
print()
print('--------------------------------------------------')
print('Testing the fancy_polygon function:')
print('See the windows that pop up.')
print('--------------------------------------------------')
# Tests 1 and 2 (on the same window):
title = ('FANCY POLYGON tests 1 and 2: 7 blue lines, hops = 2;'
+ ' 5 red lines, hops = 3.')
window = rg.RoseWindow(520, 400, title)
circle = rg.Circle(rg.Point(100, 100), 80)
fancy_polygon(window, circle, 7, 2, 'blue', 3)
window.continue_on_mouse_click()
circle = rg.Circle(rg.Point(330, 200), 150)
fancy_polygon(window, circle, 5, 3, 'red', 3)
window.close_on_mouse_click()
# Test 3 (on another window):
title = ('FANCY POLYGON test 3: 20 lime green lines on blue circle,'
+ ' hops = 7.')
window = rg.RoseWindow(480, 350, title)
circle = rg.Circle(rg.Point(240, 165), 150)
circle.fill_color = 'blue'
fancy_polygon(window, circle, 20, 7, 'lime green', 5)
window.close_on_mouse_click()
# -------------------------------------------------------------------------
# Test 4: (YOU write THIS test).
# If you wish, try even more tests to get some really cool
# pictures. Some that I especially like are:
# -- 20 segments, hops of length 5
# -- 51 segments, hops of length 25
# -- 300 segments, hops of length 61
# For all these, filling the circles with one color and using
# a contrasting color for the lines makes them especially pretty.
# -------------------------------------------------------------------------
def fancy_polygon(window, circle, number_of_lines, hops_to_next_point, color,
thickness):
"""
What comes in:
-- an rg.RoseWindow
-- an rg.Circle
-- an integer >= 2 that specifies how many rg.Lines
to generate as described below
-- a positive integer that specifies how many points each line
"hops" over (see below for details).
-- a string that can be used as a RoseGraphics color
-- a positive integer that specifies the thickness to be used
for each rg.Line
What goes out: Nothing (i.e., None).
Side effects:
See the 'fancy_polygon' pictures in the pizza.pdf file in
this project; they may help you better understand the following:
1. Draws the given rg.Circle in the given rg.RoseWindow.
2. Constructs and draws rg.Line objects to make the picture
look like an inscribed regular polygon with the given number
of segments, but with each rg.Line going from one point
on the given rg.Circle to the point on the given rg.Circle
that is the given number of 'hops' away (wrapping as needed).
Each rg.Line has the given color and thickness.
Each rg.Line should be drawn as an arrow,
by setting the rg.Line's arrow instance variable
to the string 'last'.
For example, if hops_to_next_point is 1,
then the picture is a regular polygon.
Or, if hops_to_next_point is 2, the lines go:
-- from point 0 to point 2
-- from point 1 to point 3
-- from point 2 to point 4
-- from point 3 to point 5
-- etc.
One more example:
if hops_to_next_point is 3
and number_of_segments is 5, then the lines go:
-- from point 0 to point 3
-- from point 1 to point 4
-- from point 2 to point 0 (note the 'wrap' effect)
-- from point 3 to point 1
-- from point 4 to point 2
Examples: See the 'fancy_polygon' pictures in the pizza.pdf
file in this project.
Type hints:
:type window: rg.RoseWindow
:type circle: rg.Circle
:type number_of_lines: int
:type hops_to_next_point: int
:type color: str
:type thickness: int
"""
# -------------------------------------------------------------------------
# TODO: 10. Implement and test this function.
# Note that you should write its TEST function first (above).
#
# IMPLEMENTATION REQUIREMENT:
# You MUST USE (call) the generate_points_on_circle
# (defined above) to generate the relevant points,
# and then draw lines that are based in part on those points.
#
###########################################################################
# IMPORTANT: One way to do "wrapping" is to use the % operator
# appropriately. ASK YOUR INSTRUCTOR FOR AN EXAMPLE.
###########################################################################
# -------------------------------------------------------------------------
# -----------------------------------------------------------------------------
# Calls main to start the ball rolling.
# -----------------------------------------------------------------------------
main()
| 41.624309 | 79 | 0.520883 |
df468edb88e4ab02aa7bece881979f86549562e1 | 3,583 | py | Python | midterm2/s4108056005/s4108056005/settings.py | AndyChiangSH/1102_Web-based-systems | 2ae1ff82190a060755e60ed442afda1cf28a21fb | [
"MIT"
] | null | null | null | midterm2/s4108056005/s4108056005/settings.py | AndyChiangSH/1102_Web-based-systems | 2ae1ff82190a060755e60ed442afda1cf28a21fb | [
"MIT"
] | null | null | null | midterm2/s4108056005/s4108056005/settings.py | AndyChiangSH/1102_Web-based-systems | 2ae1ff82190a060755e60ed442afda1cf28a21fb | [
"MIT"
] | null | null | null | """
Django settings for s4108056005 project.
Generated by 'django-admin startproject' using Django 3.1.2.
For more information on this file, see
https://docs.djangoproject.com/en/3.1/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.1/ref/settings/
"""
from pathlib import Path
from .router import MydbRouter
import os
# Build paths inside the project like this: BASE_DIR / 'subdir'.
BASE_DIR = Path(__file__).resolve().parent.parent
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.1/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'tu-mo%+1)4)%*q8231ygx&20%t0crj=3i)l8ne!y#l2dx#^i(s'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'dining_brief',
'food_recommendation',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 's4108056005.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR, "templates")],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 's4108056005.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.1/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': BASE_DIR / 'db.sqlite3',
},
'mydb': {
'ENGINE': 'django.db.backends.mysql',
'NAME': 'mydb',
'USER': 'user',
'PASSWORD': 'user',
'HOST': 'localhost',
'PORT': '',
'OPTIONS': {
'init_command': "SET sql_mode='STRICT_TRANS_TABLES'"
},
}
}
DATABASE_ROUTERS = [MydbRouter, ]
# Password validation
# https://docs.djangoproject.com/en/3.1/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.1/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.1/howto/static-files/
STATIC_URL = '/static/'
STATICFILES_DIRS = [
os.path.join(BASE_DIR, "static"),
] | 25.776978 | 91 | 0.677086 |
bb51da1d5024b36d0b579a704f14c72f08466a6c | 16,843 | py | Python | qiskit/aqua/algorithms/factorizers/shor.py | v-r0/qiskit-aqua_duplicate20210324 | 03ac8575edd98f7bf7e3ed0a98b6348981bc4951 | [
"Apache-2.0"
] | null | null | null | qiskit/aqua/algorithms/factorizers/shor.py | v-r0/qiskit-aqua_duplicate20210324 | 03ac8575edd98f7bf7e3ed0a98b6348981bc4951 | [
"Apache-2.0"
] | null | null | null | qiskit/aqua/algorithms/factorizers/shor.py | v-r0/qiskit-aqua_duplicate20210324 | 03ac8575edd98f7bf7e3ed0a98b6348981bc4951 | [
"Apache-2.0"
] | null | null | null | # This code is part of Qiskit.
#
# (C) Copyright IBM 2019, 2021.
#
# This code is licensed under the Apache License, Version 2.0. You may
# obtain a copy of this license in the LICENSE.txt file in the root directory
# of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
#
# Any modifications or derivative works of this code must retain this
# copyright notice, and modified files need to carry a notice indicating
# that they have been altered from the originals.
"""Shor's factoring algorithm."""
from typing import Optional, Union, Tuple, List
import math
import array
import fractions
import logging
import numpy as np
from qiskit import ClassicalRegister, QuantumCircuit, QuantumRegister
from qiskit.circuit import Gate, Instruction, ParameterVector
from qiskit.circuit.library import QFT
from qiskit.providers import BaseBackend
from qiskit.providers import Backend
from qiskit.aqua import QuantumInstance
from qiskit.aqua.algorithms import AlgorithmResult, QuantumAlgorithm
from qiskit.aqua.utils import get_subsystem_density_matrix, summarize_circuits
from qiskit.aqua.utils.arithmetic import is_power
from qiskit.aqua.utils.validation import validate_min
logger = logging.getLogger(__name__)
# pylint: disable=invalid-name
class Shor(QuantumAlgorithm):
"""Shor's factoring algorithm.
Shor's Factoring algorithm is one of the most well-known quantum algorithms and finds the
prime factors for input integer :math:`N` in polynomial time.
The input integer :math:`N` to be factored is expected to be odd and greater than 2.
Even though this implementation is general, its capability will be limited by the
capacity of the simulator/hardware. Another input integer :math:`a` can also be supplied,
which needs to be a co-prime smaller than :math:`N` .
Adapted from https://github.com/ttlion/ShorAlgQiskit
See also https://arxiv.org/abs/quant-ph/0205095
"""
def __init__(self,
N: int = 15,
a: int = 2,
quantum_instance: Optional[
Union[QuantumInstance, BaseBackend, Backend]] = None) -> None:
"""
Args:
N: The integer to be factored, has a min. value of 3.
a: Any integer that satisfies 1 < a < N and gcd(a, N) = 1.
quantum_instance: Quantum Instance or Backend
Raises:
ValueError: Invalid input
"""
validate_min('N', N, 3)
validate_min('a', a, 2)
super().__init__(quantum_instance)
self._n = None # type: Optional[int]
self._up_qreg = None
self._down_qreg = None # type: Optional[QuantumRegister]
self._aux_qreg = None # type: Optional[QuantumRegister]
# check the input integer
if N < 1 or N % 2 == 0:
raise ValueError('The input needs to be an odd integer greater than 1.')
self._N = N
if a >= N or math.gcd(a, self._N) != 1:
raise ValueError('The integer a needs to satisfy a < N and gcd(a, N) = 1.')
self._a = a
self._ret = AlgorithmResult({"factors": [], "total_counts": 0, "successful_counts": 0})
# check if the input integer is a power
tf, b, p = is_power(N, return_decomposition=True)
if tf:
logger.info('The input integer is a power: %s=%s^%s.', N, b, p)
self._ret['factors'].append(b)
self._qft = QFT(do_swaps=False).to_instruction()
self._iqft = self._qft.inverse()
self._phi_add_N = None # type: Optional[Gate]
self._iphi_add_N = None
def _get_angles(self, a: int) -> np.ndarray:
"""Calculates the array of angles to be used in the addition in Fourier Space."""
s = bin(int(a))[2:].zfill(self._n + 1)
angles = np.zeros([self._n + 1])
for i in range(0, self._n + 1):
for j in range(i, self._n + 1):
if s[j] == '1':
angles[self._n - i] += math.pow(2, -(j - i))
angles[self._n - i] *= np.pi
return angles[::-1]
@staticmethod
def _phi_add_gate(size: int, angles: Union[np.ndarray, ParameterVector]) -> Gate:
"""Gate that performs addition by a in Fourier Space."""
circuit = QuantumCircuit(size, name="phi_add")
for i, angle in enumerate(angles):
circuit.p(angle, i)
return circuit.to_gate()
def _double_controlled_phi_add_mod_N(self,
num_qubits: int,
angles: Union[np.ndarray, ParameterVector]
) -> QuantumCircuit:
"""Creates a circuit which implements double-controlled modular addition by a."""
circuit = QuantumCircuit(num_qubits, name="phi_add")
ctl_up = 0
ctl_down = 1
ctl_aux = 2
# get qubits from aux register, omitting the control qubit
qubits = range(3, num_qubits)
# store the gates representing addition/subtraction by a in Fourier Space
phi_add_a = self._phi_add_gate(len(qubits), angles)
iphi_add_a = phi_add_a.inverse()
circuit.append(phi_add_a.control(2), [ctl_up, ctl_down, *qubits])
circuit.append(self._iphi_add_N, qubits)
circuit.append(self._iqft, qubits)
circuit.cx(qubits[0], ctl_aux)
circuit.append(self._qft, qubits)
circuit.append(self._phi_add_N, qubits)
circuit.append(iphi_add_a.control(2), [ctl_up, ctl_down, *qubits])
circuit.append(self._iqft, qubits)
circuit.x(qubits[0])
circuit.cx(qubits[0], ctl_aux)
circuit.x(qubits[0])
circuit.append(self._qft, qubits)
circuit.append(phi_add_a.control(2), [ctl_up, ctl_down, *qubits])
return circuit
def _controlled_multiple_mod_N(self, num_qubits: int, a: int) -> Instruction:
"""Implements modular multiplication by a as an instruction."""
circuit = QuantumCircuit(
num_qubits, name="multiply_by_{}_mod_{}".format(a % self._N, self._N)
)
down = circuit.qubits[1: self._n + 1]
aux = circuit.qubits[self._n + 1:]
qubits = [aux[i] for i in reversed(range(self._n + 1))]
ctl_up = 0
ctl_aux = aux[-1]
angle_params = ParameterVector("angles", length=len(aux) - 1)
double_controlled_phi_add = self._double_controlled_phi_add_mod_N(
len(aux) + 2, angle_params
)
idouble_controlled_phi_add = double_controlled_phi_add.inverse()
circuit.append(self._qft, qubits)
# perform controlled addition by a on the aux register in Fourier space
for i, ctl_down in enumerate(down):
a_exp = (2 ** i) * a % self._N
angles = self._get_angles(a_exp)
bound = double_controlled_phi_add.assign_parameters({angle_params: angles})
circuit.append(bound, [ctl_up, ctl_down, ctl_aux, *qubits])
circuit.append(self._iqft, qubits)
# perform controlled subtraction by a in Fourier space on both the aux and down register
for j in range(self._n):
circuit.cswap(ctl_up, down[j], aux[j])
circuit.append(self._qft, qubits)
a_inv = self.modinv(a, self._N)
for i in reversed(range(len(down))):
a_exp = (2 ** i) * a_inv % self._N
angles = self._get_angles(a_exp)
bound = idouble_controlled_phi_add.assign_parameters({angle_params: angles})
circuit.append(bound, [ctl_up, down[i], ctl_aux, *qubits])
circuit.append(self._iqft, qubits)
return circuit.to_instruction()
def construct_circuit(self, measurement: bool = False) -> QuantumCircuit:
"""Construct circuit.
Args:
measurement: Boolean flag to indicate if measurement should be included in the circuit.
Returns:
Quantum circuit.
"""
# Get n value used in Shor's algorithm, to know how many qubits are used
self._n = math.ceil(math.log(self._N, 2))
self._qft.num_qubits = self._n + 1
self._iqft.num_qubits = self._n + 1
# quantum register where the sequential QFT is performed
self._up_qreg = QuantumRegister(2 * self._n, name='up')
# quantum register where the multiplications are made
self._down_qreg = QuantumRegister(self._n, name='down')
# auxiliary quantum register used in addition and multiplication
self._aux_qreg = QuantumRegister(self._n + 2, name='aux')
# Create Quantum Circuit
circuit = QuantumCircuit(self._up_qreg,
self._down_qreg,
self._aux_qreg,
name="Shor(N={}, a={})".format(self._N, self._a))
# Create gates to perform addition/subtraction by N in Fourier Space
self._phi_add_N = self._phi_add_gate(self._aux_qreg.size - 1, self._get_angles(self._N))
self._iphi_add_N = self._phi_add_N.inverse()
# Create maximal superposition in top register
circuit.h(self._up_qreg)
# Initialize down register to 1
circuit.x(self._down_qreg[0])
# Apply the multiplication gates as showed in
# the report in order to create the exponentiation
for i, ctl_up in enumerate(self._up_qreg): # type: ignore
a = int(pow(self._a, pow(2, i)))
controlled_multiple_mod_N = self._controlled_multiple_mod_N(
len(self._down_qreg) + len(self._aux_qreg) + 1, a,
)
circuit.append(
controlled_multiple_mod_N, [ctl_up, *self._down_qreg, *self._aux_qreg]
)
# Apply inverse QFT
iqft = QFT(len(self._up_qreg)).inverse().reverse_bits().to_instruction()
circuit.append(iqft, self._up_qreg)
if measurement:
up_cqreg = ClassicalRegister(2 * self._n, name='m')
circuit.add_register(up_cqreg)
circuit.measure(self._up_qreg, up_cqreg)
logger.info(summarize_circuits(circuit))
return circuit
@staticmethod
def modinv(a: int, m: int) -> int:
"""Returns the modular multiplicative inverse of a with respect to the modulus m."""
def egcd(a: int, b: int) -> Tuple[int, int, int]:
if a == 0:
return b, 0, 1
else:
g, y, x = egcd(b % a, a)
return g, x - (b // a) * y, y
g, x, _ = egcd(a, m)
if g != 1:
raise ValueError("The greatest common divisor of {} and {} is {}, so the "
"modular inverse does not exist.".format(a, m, g))
return x % m
def _get_factors(self, measurement: str) -> Optional[List[int]]:
"""Apply the continued fractions to find r and the gcd to find the desired factors."""
x_final = int(measurement, 2)
logger.info('In decimal, x_final value for this result is: %s.', x_final)
if x_final <= 0:
fail_reason = 'x_final value is <= 0, there are no continued fractions.'
else:
fail_reason = None
logger.debug('Running continued fractions for this case.')
# Calculate T and x/T
T_upper = len(measurement)
T = pow(2, T_upper)
x_over_T = x_final / T
# Cycle in which each iteration corresponds to putting one more term in the
# calculation of the Continued Fraction (CF) of x/T
# Initialize the first values according to CF rule
i = 0
b = array.array('i')
t = array.array('f')
b.append(math.floor(x_over_T))
t.append(x_over_T - b[i])
exponential = 0.0
while i < self._N and fail_reason is None:
# From the 2nd iteration onwards, calculate the new terms of the CF based
# on the previous terms as the rule suggests
if i > 0:
b.append(math.floor(1 / t[i - 1]))
t.append((1 / t[i - 1]) - b[i]) # type: ignore
# Calculate the denominator of the CF using the known terms
denominator = self._calculate_continued_fraction(b)
# Increment i for next iteration
i += 1
if denominator % 2 == 1:
logger.debug('Odd denominator, will try next iteration of continued fractions.')
continue
# Denominator is even, try to get factors of N
# Get the exponential a^(r/2)
if denominator < 1000:
exponential = pow(self._a, denominator / 2)
# Check if the value is too big or not
if exponential > 1000000000:
fail_reason = 'denominator of continued fraction is too big.'
else:
# The value is not too big,
# get the right values and do the proper gcd()
putting_plus = int(exponential + 1)
putting_minus = int(exponential - 1)
one_factor = math.gcd(putting_plus, self._N)
other_factor = math.gcd(putting_minus, self._N)
# Check if the factors found are trivial factors or are the desired factors
if any(factor in {1, self._N} for factor in (one_factor, other_factor)):
logger.debug('Found just trivial factors, not good enough.')
# Check if the number has already been found,
# (use i - 1 because i was already incremented)
if t[i - 1] == 0:
fail_reason = 'the continued fractions found exactly x_final/(2^(2n)).'
else:
# Successfully factorized N
return sorted((one_factor, other_factor))
# Search for factors failed, write the reason for failure to the debug logs
logger.debug(
'Cannot find factors from measurement %s because %s',
measurement, fail_reason or 'it took too many attempts.'
)
return None
@staticmethod
def _calculate_continued_fraction(b: array.array) -> int:
"""Calculate the continued fraction of x/T from the current terms of expansion b."""
x_over_T = 0
for i in reversed(range(len(b) - 1)):
x_over_T = 1 / (b[i + 1] + x_over_T)
x_over_T += b[0]
# Get the denominator from the value obtained
frac = fractions.Fraction(x_over_T).limit_denominator()
logger.debug('Approximation number %s of continued fractions:', len(b))
logger.debug("Numerator:%s \t\t Denominator: %s.", frac.numerator, frac.denominator)
return frac.denominator
def _run(self) -> AlgorithmResult:
if not self._ret['factors']:
logger.debug('Running with N=%s and a=%s.', self._N, self._a)
if self._quantum_instance.is_statevector:
circuit = self.construct_circuit(measurement=False)
logger.warning('The statevector_simulator might lead to '
'subsequent computation using too much memory.')
result = self._quantum_instance.execute(circuit)
complete_state_vec = result.get_statevector(circuit)
# TODO: this uses too much memory
up_qreg_density_mat = get_subsystem_density_matrix(
complete_state_vec,
range(2 * self._n, 4 * self._n + 2)
)
up_qreg_density_mat_diag = np.diag(up_qreg_density_mat)
counts = dict()
for i, v in enumerate(up_qreg_density_mat_diag):
if not v == 0:
counts[bin(int(i))[2:].zfill(2 * self._n)] = v ** 2
else:
circuit = self.construct_circuit(measurement=True)
counts = self._quantum_instance.execute(circuit).get_counts(circuit)
self._ret.data["total_counts"] = len(counts)
# For each simulation result, print proper info to user
# and try to calculate the factors of N
for measurement in list(counts.keys()):
# Get the x_final value from the final state qubits
logger.info("------> Analyzing result %s.", measurement)
factors = self._get_factors(measurement)
if factors:
logger.info(
'Found factors %s from measurement %s.',
factors, measurement
)
self._ret.data["successful_counts"] += 1
if factors not in self._ret['factors']:
self._ret['factors'].append(factors)
return self._ret
| 40.102381 | 99 | 0.596746 |
8b9f321ab5dcfcc1f9d4414f22c38b906f546ab5 | 7,455 | py | Python | main.py | Parskatt/DeepLabv3.pytorch | 5fcbd6bf347296a842f44aaef4c396a949ed5130 | [
"BSD-3-Clause"
] | null | null | null | main.py | Parskatt/DeepLabv3.pytorch | 5fcbd6bf347296a842f44aaef4c396a949ed5130 | [
"BSD-3-Clause"
] | null | null | null | main.py | Parskatt/DeepLabv3.pytorch | 5fcbd6bf347296a842f44aaef4c396a949ed5130 | [
"BSD-3-Clause"
] | null | null | null | import argparse
import os
import numpy as np
import torch
import torch.nn as nn
import torch.optim as optim
import pdb
from PIL import Image
from scipy.io import loadmat
from torch.autograd import Variable
from torchvision import transforms
import deeplab
from pascal import VOCSegmentation
from cityscapes import Cityscapes
from utils import AverageMeter, inter_and_union
parser = argparse.ArgumentParser()
parser.add_argument('--train', action='store_true', default=False,
help='training mode')
parser.add_argument('--exp', type=str, required=True,
help='name of experiment')
parser.add_argument('--gpu', type=int, default=0,
help='test time gpu device id')
parser.add_argument('--backbone', type=str, default='resnet101',
help='resnet101')
parser.add_argument('--dataset', type=str, default='pascal',
help='pascal or cityscapes')
parser.add_argument('--groups', type=int, default=None,
help='num of groups for group normalization')
parser.add_argument('--epochs', type=int, default=30,
help='num of training epochs')
parser.add_argument('--batch_size', type=int, default=16,
help='batch size')
parser.add_argument('--base_lr', type=float, default=0.00025,
help='base learning rate')
parser.add_argument('--last_mult', type=float, default=1.0,
help='learning rate multiplier for last layers')
parser.add_argument('--scratch', action='store_true', default=False,
help='train from scratch')
parser.add_argument('--freeze_bn', action='store_true', default=False,
help='freeze batch normalization parameters')
parser.add_argument('--weight_std', action='store_true', default=False,
help='weight standardization')
parser.add_argument('--beta', action='store_true', default=False,
help='resnet101 beta')
parser.add_argument('--crop_size', type=int, default=513,
help='image crop size')
parser.add_argument('--resume', type=str, default=None,
help='path to checkpoint to resume from')
parser.add_argument('--workers', type=int, default=4,
help='number of data loading workers')
args = parser.parse_args()
def main():
assert torch.cuda.is_available()
torch.backends.cudnn.benchmark = True
model_fname = 'data/deeplab_{0}_{1}_v3_{2}_epoch%d.pth'.format(
args.backbone, args.dataset, args.exp)
if args.dataset == 'pascal':
dataset = VOCSegmentation('data/VOCdevkit',
train=args.train, crop_size=args.crop_size)
elif args.dataset == 'cityscapes':
dataset = Cityscapes('data/cityscapes',
train=args.train, crop_size=args.crop_size)
else:
raise ValueError('Unknown dataset: {}'.format(args.dataset))
if args.backbone == 'resnet101':
model = getattr(deeplab, 'resnet101')(
pretrained=(not args.scratch),
num_classes=len(dataset.CLASSES),
num_groups=args.groups,
weight_std=args.weight_std,
beta=args.beta)
else:
raise ValueError('Unknown backbone: {}'.format(args.backbone))
if args.train:
criterion = nn.CrossEntropyLoss(ignore_index=255)
model = nn.DataParallel(model).cuda()
model.train()
if args.freeze_bn:
for m in model.modules():
if isinstance(m, nn.BatchNorm2d):
m.eval()
m.weight.requires_grad = False
m.bias.requires_grad = False
backbone_params = (
list(model.module.conv1.parameters()) +
list(model.module.bn1.parameters()) +
list(model.module.layer1.parameters()) +
list(model.module.layer2.parameters()) +
list(model.module.layer3.parameters()) +
list(model.module.layer4.parameters()))
last_params = list(model.module.aspp.parameters())
optimizer = optim.SGD([
{'params': filter(lambda p: p.requires_grad, backbone_params)},
{'params': filter(lambda p: p.requires_grad, last_params)}],
lr=args.base_lr, momentum=0.9, weight_decay=0.0001)
dataset_loader = torch.utils.data.DataLoader(
dataset, batch_size=args.batch_size, shuffle=args.train,
pin_memory=True, num_workers=args.workers)
max_iter = args.epochs * len(dataset_loader)
losses = AverageMeter()
start_epoch = 0
if args.resume:
if os.path.isfile(args.resume):
print('=> loading checkpoint {0}'.format(args.resume))
checkpoint = torch.load(args.resume)
start_epoch = checkpoint['epoch']
model.load_state_dict(checkpoint['state_dict'])
optimizer.load_state_dict(checkpoint['optimizer'])
print('=> loaded checkpoint {0} (epoch {1})'.format(
args.resume, checkpoint['epoch']))
else:
print('=> no checkpoint found at {0}'.format(args.resume))
for epoch in range(start_epoch, args.epochs):
for i, (inputs, target) in enumerate(dataset_loader):
cur_iter = epoch * len(dataset_loader) + i
lr = args.base_lr * (1 - float(cur_iter) / max_iter) ** 0.9
optimizer.param_groups[0]['lr'] = lr
optimizer.param_groups[1]['lr'] = lr * args.last_mult
inputs = Variable(inputs.cuda())
target = Variable(target.cuda())
outputs = model(inputs)
loss = criterion(outputs, target)
if np.isnan(loss.item()) or np.isinf(loss.item()):
pdb.set_trace()
losses.update(loss.item(), args.batch_size)
loss.backward()
optimizer.step()
optimizer.zero_grad()
print('epoch: {0}\t'
'iter: {1}/{2}\t'
'lr: {3:.6f}\t'
'loss: {loss.val:.4f} ({loss.ema:.4f})'.format(
epoch + 1, i + 1, len(dataset_loader), lr, loss=losses))
if epoch % 10 == 9:
torch.save({
'epoch': epoch + 1,
'state_dict': model.state_dict(),
'optimizer': optimizer.state_dict(),
}, model_fname % (epoch + 1))
else:
torch.cuda.set_device(args.gpu)
model = model.cuda()
model.eval()
checkpoint = torch.load(model_fname % args.epochs)
state_dict = {k[7:]: v for k, v in checkpoint['state_dict'].items() if 'tracked' not in k}
model.load_state_dict(state_dict)
cmap = loadmat('data/pascal_seg_colormap.mat')['colormap']
cmap = (cmap * 255).astype(np.uint8).flatten().tolist()
inter_meter = AverageMeter()
union_meter = AverageMeter()
for i in range(len(dataset)):
inputs, target = dataset[i]
inputs = Variable(inputs.cuda())
with torch.no_grad():
outputs = model(inputs.unsqueeze(0))
_, pred = torch.max(outputs, 1)
pred = pred.data.cpu().numpy().squeeze().astype(np.uint8)
mask = target.numpy().astype(np.uint8)
imname = dataset.masks[i].split('/')[-1]
mask_pred = Image.fromarray(pred)
mask_pred.putpalette(cmap)
mask_pred.save(os.path.join('data/val', imname))
print('eval: {0}/{1}'.format(i + 1, len(dataset)))
inter, union = inter_and_union(pred, mask, len(dataset.CLASSES))
inter_meter.update(inter)
union_meter.update(union)
iou = inter_meter.sum / (union_meter.sum + 1e-10)
for i, val in enumerate(iou):
print('IoU {0}: {1:.2f}'.format(dataset.CLASSES[i], val * 100))
print('Mean IoU: {0:.2f}'.format(iou.mean() * 100))
if __name__ == "__main__":
main()
| 39.236842 | 94 | 0.634608 |
9c2e4dc2803537993439bb75d287acae571dcc0e | 4,587 | py | Python | example_code/GPS_example.py | M2I-HABET/ahac2020workshop | 7ebc6e2ba3bf10ef18881e96523adc5d54f8d62e | [
"MIT"
] | null | null | null | example_code/GPS_example.py | M2I-HABET/ahac2020workshop | 7ebc6e2ba3bf10ef18881e96523adc5d54f8d62e | [
"MIT"
] | null | null | null | example_code/GPS_example.py | M2I-HABET/ahac2020workshop | 7ebc6e2ba3bf10ef18881e96523adc5d54f8d62e | [
"MIT"
] | null | null | null | # Simple GPS module demonstration.
# Will wait for a fix and print a message every second with the current location
# and other details.
import time
import board
import busio
import adafruit_gps
# Create a serial connection for the GPS connection using default speed and
# a slightly higher timeout (GPS modules typically update once a second).
# These are the defaults you should use for the GPS FeatherWing.
# For other boards set RX = GPS module TX, and TX = GPS module RX pins.
uart = busio.UART(board.TX, board.RX, baudrate=9600, timeout=10)
# for a computer, use the pyserial library for uart access
# import serial
# uart = serial.Serial("/dev/ttyUSB0", baudrate=9600, timeout=10)
# If using I2C, we'll create an I2C interface to talk to using default pins
# i2c = board.I2C()
# Create a GPS module instance.
gps = adafruit_gps.GPS(uart, debug=False) # Use UART/pyserial
# gps = adafruit_gps.GPS_GtopI2C(i2c, debug=False) # Use I2C interface
# Initialize the GPS module by changing what data it sends and at what rate.
# These are NMEA extensions for PMTK_314_SET_NMEA_OUTPUT and
# PMTK_220_SET_NMEA_UPDATERATE but you can send anything from here to adjust
# the GPS module behavior:
# https://cdn-shop.adafruit.com/datasheets/PMTK_A11.pdf
# Turn on the basic GGA and RMC info (what you typically want)
gps.send_command(b"PMTK314,0,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0")
# Turn on just minimum info (RMC only, location):
# gps.send_command(b'PMTK314,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0')
# Turn off everything:
# gps.send_command(b'PMTK314,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0')
# Tuen on everything (not all of it is parsed!)
# gps.send_command(b'PMTK314,1,1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0')
# Set update rate to once a second (1hz) which is what you typically want.
gps.send_command(b"PMTK220,1000")
# Or decrease to once every two seconds by doubling the millisecond value.
# Be sure to also increase your UART timeout above!
# gps.send_command(b'PMTK220,2000')
# You can also speed up the rate, but don't go too fast or else you can lose
# data during parsing. This would be twice a second (2hz, 500ms delay):
# gps.send_command(b'PMTK220,500')
# Main loop runs forever printing the location, etc. every second.
last_print = time.monotonic()
while True:
# Make sure to call gps.update() every loop iteration and at least twice
# as fast as data comes from the GPS unit (usually every second).
# This returns a bool that's true if it parsed new data (you can ignore it
# though if you don't care and instead look at the has_fix property).
gps.update()
# Every second print out current location details if there's a fix.
current = time.monotonic()
if current - last_print >= 1.0:
last_print = current
if not gps.has_fix:
# Try again if we don't have a fix yet.
print("Waiting for fix...")
continue
# We have a fix! (gps.has_fix is true)
# Print out details about the fix like location, date, etc.
print("=" * 40) # Print a separator line.
print(
"Fix timestamp: {}/{}/{} {:02}:{:02}:{:02}".format(
gps.timestamp_utc.tm_mon, # Grab parts of the time from the
gps.timestamp_utc.tm_mday, # struct_time object that holds
gps.timestamp_utc.tm_year, # the fix time. Note you might
gps.timestamp_utc.tm_hour, # not get all data like year, day,
gps.timestamp_utc.tm_min, # month!
gps.timestamp_utc.tm_sec,
)
)
print("Latitude: {0:.6f} degrees".format(gps.latitude))
print("Longitude: {0:.6f} degrees".format(gps.longitude))
print("Fix quality: {}".format(gps.fix_quality))
# Some attributes beyond latitude, longitude and timestamp are optional
# and might not be present. Check if they're None before trying to use!
if gps.satellites is not None:
print("# satellites: {}".format(gps.satellites))
if gps.altitude_m is not None:
print("Altitude: {} meters".format(gps.altitude_m))
if gps.speed_knots is not None:
print("Speed: {} knots".format(gps.speed_knots))
if gps.track_angle_deg is not None:
print("Track angle: {} degrees".format(gps.track_angle_deg))
if gps.horizontal_dilution is not None:
print("Horizontal dilution: {}".format(gps.horizontal_dilution))
if gps.height_geoid is not None:
print("Height geo ID: {} meters".format(gps.height_geoid))
| 47.28866 | 80 | 0.676041 |
8beaf7c14e08311fc884bc653cc89f7b581c5a2a | 8,577 | py | Python | mbrl/util/logger.py | pecey/mbrl-lib | ebca518b35a1370dbaede2a1c96fcde714bc5489 | [
"MIT"
] | 592 | 2021-04-20T04:30:18.000Z | 2022-03-30T13:34:54.000Z | mbrl/util/logger.py | pecey/mbrl-lib | ebca518b35a1370dbaede2a1c96fcde714bc5489 | [
"MIT"
] | 57 | 2021-04-21T17:20:05.000Z | 2022-03-28T15:31:45.000Z | mbrl/util/logger.py | pecey/mbrl-lib | ebca518b35a1370dbaede2a1c96fcde714bc5489 | [
"MIT"
] | 76 | 2021-04-20T15:50:14.000Z | 2022-03-25T19:05:25.000Z | # Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import collections
import csv
import pathlib
from typing import Counter, Dict, List, Mapping, Tuple, Union
import termcolor
import torch
LogFormatType = List[Tuple[str, str, str]]
LogTypes = Union[int, float, torch.Tensor]
EVAL_LOG_FORMAT = [
("episode", "E", "int"),
("model_reward", "MR", "float"),
]
SAC_TRAIN_LOG_FORMAT = [
("step", "S", "int"),
("batch_reward", "BR", "float"),
("actor_loss", "ALOSS", "float"),
("critic_loss", "CLOSS", "float"),
("alpha_loss", "TLOSS", "float"),
("alpha_value", "TVAL", "float"),
("actor_entropy", "AENT", "float"),
]
class AverageMeter(object):
def __init__(self):
self._sum = 0.0
self._count = 0
def update(self, value: float, n: int = 1):
self._sum += value
self._count += n
def value(self) -> float:
return self._sum / max(1, self._count)
class MetersGroup(object):
def __init__(self, file_name: Union[str, pathlib.Path], formatting: LogFormatType):
self._csv_file_path = self._prepare_file(file_name, ".csv")
self._formatting = formatting
self._meters: Dict[str, AverageMeter] = collections.defaultdict(AverageMeter)
self._csv_file = open(self._csv_file_path, "w")
self._csv_writer = None
@staticmethod
def _prepare_file(prefix: Union[str, pathlib.Path], suffix: str) -> pathlib.Path:
file_path = pathlib.Path(prefix).with_suffix(suffix)
if file_path.exists():
file_path.unlink()
return file_path
def log(self, key: str, value: float):
self._meters[key].update(value)
def _dump_to_csv(self, data):
if self._csv_writer is None:
self._csv_writer = csv.DictWriter(
self._csv_file, fieldnames=sorted(data.keys()), restval=0.0
)
self._csv_writer.writeheader()
self._csv_writer.writerow(data)
self._csv_file.flush()
@staticmethod
def _format(key: str, value: float, format_type: str):
if format_type == "int":
value = int(value)
return f"{key}: {value}"
elif format_type == "float":
return f"{key}: {value:.04f}"
elif format_type == "time":
return f"{key}: {value:04.1f} s"
else:
raise ValueError(f"Invalid format type: {format_type}")
def _dump_to_console(self, data, prefix: str, color: str = "yellow"):
prefix = termcolor.colored(prefix, color)
pieces = [f"| {prefix: <14}"]
for key, disp_key, ty in self._formatting:
value = data.get(key, 0)
pieces.append(self._format(disp_key, value, ty))
print(" | ".join(pieces))
def dump(self, step: int, prefix: str, save: bool = True, color: str = "yellow"):
if len(self._meters) == 0:
return
if save:
data = dict([(key, meter.value()) for key, meter in self._meters.items()])
data["step"] = step
self._dump_to_csv(data)
self._dump_to_console(data, prefix, color)
self._meters.clear()
class Logger(object):
"""Light-weight csv logger.
This logger is based on pytorch_sac's
`logger <https://github.com/denisyarats/pytorch_sac/blob/master/logger.py>`_
with some modifications and some of its features removed.
To use this logger you must register logging groups using :meth:`register_group`. Each
group will save data to a separate csv file, at `log_dir/<group_name>.csv`, and will
output to console using its own dedicated tabular format.
Args:
log_dir (str or pathlib.Path): the directory where to save the logs.
enable_back_compatible (bool, optional): if ``True``, this logger can be used in the
methods in the `pytorch_sac` library. Defaults to ``False``.
"""
def __init__(
self, log_dir: Union[str, pathlib.Path], enable_back_compatible: bool = False
):
self._log_dir = pathlib.Path(log_dir)
self._groups: Dict[str, Tuple[MetersGroup, int, str]] = {}
self._group_steps: Counter[str] = collections.Counter()
if enable_back_compatible:
self.register_group("train", SAC_TRAIN_LOG_FORMAT)
self.register_group("eval", EVAL_LOG_FORMAT, color="green")
def register_group(
self,
group_name: str,
log_format: LogFormatType,
dump_frequency: int = 1,
color: str = "yellow",
):
"""Register a logging group.
Args:
group_name (str): the name assigned to the logging group.
log_format (list of 3-tuples): each tuple contains 3 strings, representing
(variable_name, shortcut, type), for a variable that the logger should keep
track of in this group. The variable name will be used as a header in the csv file
for the entries of this variable. The shortcut will be used as a header for
the console output tabular format. The type should be one of
"int", "float", "time".
dump_frequency (int): how often (measured in calls to :meth:`log_data`)
should the logger dump the data collected since the last call. If
``dump_frequency > 1``, then the data collected between calls is averaged.
color (str): a color to use for this group in the console.
"""
if group_name in self._groups:
print(f"Group {group_name} has already been registered.")
return
new_group = MetersGroup(self._log_dir / group_name, formatting=log_format)
self._groups[group_name] = (new_group, dump_frequency, color)
self._group_steps[group_name] = 0
def log_histogram(self, *_args):
pass
def log_param(self, *_args):
pass
def log_data(self, group_name: str, data: Mapping[str, LogTypes]):
"""Logs the data contained in a given dictionary to the given logging group.
Args:
group_name (str): the name of the logging group to use. It must have been registered
already, otherwise an exception will be thrown.
data (mapping str->(int/float/torch.Tensor)): the dictionary with the data. Each
keyword must be a variable name in the log format passed when creating this group.
"""
if group_name not in self._groups:
raise ValueError(f"Group {group_name} has not been registered.")
meter_group, dump_frequency, color = self._groups[group_name]
for key, value in data.items():
if isinstance(value, torch.Tensor):
value = value.item() # type: ignore
meter_group.log(key, value)
self._group_steps[group_name] += 1
if self._group_steps[group_name] % dump_frequency == 0:
self._dump(group_name)
def _dump(self, group_name: str, save: bool = True):
if group_name not in self._groups:
raise ValueError(f"Group {group_name} has not been registered.")
meter_group, dump_frequency, color = self._groups[group_name]
meter_group.dump(self._group_steps[group_name], group_name, save, color=color)
# ----------------------------------------------------------- #
# These methods are here for backward compatibility with pytorch_sac
@staticmethod
def _split_group_and_key(group_and_key: str) -> Tuple[str, str]:
assert group_and_key.startswith("train") or group_and_key.startswith("eval")
if group_and_key.startswith("train"):
key = f"{group_and_key[len('train') + 1:]}"
group_name = "train"
else:
key = f"{group_and_key[len('eval') + 1:]}"
group_name = "eval"
key = key.replace("/", "_")
return group_name, key
def log(self, group_and_key: str, value: LogTypes, _step: int):
group_name, key = self._split_group_and_key(group_and_key)
if isinstance(value, torch.Tensor):
value = value.item() # type: ignore
meter_group, *_ = self._groups[group_name]
meter_group.log(key, value)
def dump(self, step, save=True):
for group_name in ["train", "eval"]:
meter_group, _, color = self._groups[group_name]
meter_group.dump(step, group_name, save, color=color)
| 38.809955 | 98 | 0.62003 |
24ad7f30f93aa24129b1cf88fa6cd9f7377fecc0 | 3,123 | py | Python | bytecash/wallet/puzzles/load_clvm.py | konarshankar07/bytecash-blockchain | 1c1fab19664e7777b75be1a60facb8a454341443 | [
"Apache-2.0"
] | null | null | null | bytecash/wallet/puzzles/load_clvm.py | konarshankar07/bytecash-blockchain | 1c1fab19664e7777b75be1a60facb8a454341443 | [
"Apache-2.0"
] | null | null | null | bytecash/wallet/puzzles/load_clvm.py | konarshankar07/bytecash-blockchain | 1c1fab19664e7777b75be1a60facb8a454341443 | [
"Apache-2.0"
] | null | null | null | import importlib
import inspect
import os
import pathlib
import pkg_resources
from clvm_tools.clvmc import compile_clvm as compile_clvm_py
from bytecash.types.blockchain_format.program import Program, SerializedProgram
compile_clvm = compile_clvm_py
# Handle optional use of clvm_tools_rs if available and requested
if "CLVM_TOOLS_RS" in os.environ:
try:
def sha256file(f):
import hashlib
m = hashlib.sha256()
m.update(open(f).read().encode("utf8"))
return m.hexdigest()
from clvm_tools_rs import compile_clvm as compile_clvm_rs
def translate_path(p_):
p = str(p_)
if os.path.isdir(p):
return p
else:
module_object = importlib.import_module(p)
return os.path.dirname(inspect.getfile(module_object))
def rust_compile_clvm(full_path, output, search_paths=[]):
treated_include_paths = list(map(translate_path, search_paths))
print("compile_clvm_rs", full_path, output, treated_include_paths)
compile_clvm_rs(str(full_path), str(output), treated_include_paths)
if os.environ["CLVM_TOOLS_RS"] == "check":
orig = str(output) + ".orig"
compile_clvm_py(full_path, orig, search_paths=search_paths)
orig256 = sha256file(orig)
rs256 = sha256file(output)
if orig256 != rs256:
print("Compiled %s: %s vs %s\n" % (full_path, orig256, rs256))
print("Aborting compilation due to mismatch with rust")
assert orig256 == rs256
compile_clvm = rust_compile_clvm
finally:
pass
def load_serialized_clvm(clvm_filename, package_or_requirement=__name__) -> SerializedProgram:
"""
This function takes a .clvm file in the given package and compiles it to a
.clvm.hex file if the .hex file is missing or older than the .clvm file, then
returns the contents of the .hex file as a `Program`.
clvm_filename: file name
package_or_requirement: usually `__name__` if the clvm file is in the same package
"""
hex_filename = f"{clvm_filename}.hex"
try:
if pkg_resources.resource_exists(package_or_requirement, clvm_filename):
full_path = pathlib.Path(pkg_resources.resource_filename(package_or_requirement, clvm_filename))
output = full_path.parent / hex_filename
compile_clvm(full_path, output, search_paths=[full_path.parent])
except NotImplementedError:
# pyinstaller doesn't support `pkg_resources.resource_exists`
# so we just fall through to loading the hex clvm
pass
clvm_hex = pkg_resources.resource_string(package_or_requirement, hex_filename).decode("utf8")
clvm_blob = bytes.fromhex(clvm_hex)
return SerializedProgram.from_bytes(clvm_blob)
def load_clvm(clvm_filename, package_or_requirement=__name__) -> Program:
return Program.from_bytes(bytes(load_serialized_clvm(clvm_filename, package_or_requirement=package_or_requirement)))
| 37.626506 | 120 | 0.677874 |
afcc50bc4bf1c81e9c45cdc5db1a8e350b081493 | 573 | py | Python | abnum.py | liguangyulgy/mytest1 | 08133d04881ec94df49093aba94baa31e30ffb9b | [
"BSD-2-Clause"
] | null | null | null | abnum.py | liguangyulgy/mytest1 | 08133d04881ec94df49093aba94baa31e30ffb9b | [
"BSD-2-Clause"
] | null | null | null | abnum.py | liguangyulgy/mytest1 | 08133d04881ec94df49093aba94baa31e30ffb9b | [
"BSD-2-Clause"
] | null | null | null | class AbNum:
def __init__(self, state):
self.a, self.right = state.split('==', 2)
self.left = self.a.split('+')
self.right = self.right.strip()
self.left = [a.strip() for a in self.left]
if len(self.left) < 1 or len(self.right) < 1:
raise Exception
def show(self):
print('left', self.left, 'right', self.right)
def check(self, left, right):
sum = 0
for n in left:
sum += int(n)
if sum == int(right):
return True
else:
return False
| 27.285714 | 53 | 0.504363 |
48da5729e47f8fe9893fa9387e21ce7c03000304 | 1,225 | py | Python | util/chplenv/chpl_re2.py | strikeraryu/chapel | fe002d61b56d9584d8205c06af6a3162434ae136 | [
"ECL-2.0",
"Apache-2.0"
] | 1 | 2022-03-30T18:06:23.000Z | 2022-03-30T18:06:23.000Z | util/chplenv/chpl_re2.py | strikeraryu/chapel | fe002d61b56d9584d8205c06af6a3162434ae136 | [
"ECL-2.0",
"Apache-2.0"
] | null | null | null | util/chplenv/chpl_re2.py | strikeraryu/chapel | fe002d61b56d9584d8205c06af6a3162434ae136 | [
"ECL-2.0",
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python3
import os
import sys
import chpl_compiler, chpl_platform, overrides, third_party_utils
from chpl_home_utils import get_chpl_third_party
from utils import memoize
@memoize
def get():
re2 = overrides.get('CHPL_RE2')
regexp = overrides.get('CHPL_REGEXP')
if regexp:
if not re2:
re2 = 'bundled' if regexp == 're2' else 'none'
sys.stderr.write("Warning: CHPL_REGEXP is deprecated.\n"
"Set CHPL_RE2 to 'bundled' or 'none'.\n"
"Assuming CHPL_RE2={}.\n".format(re2))
elif not re2:
re2_header = os.path.join(get_chpl_third_party(), 're2',
'install', get_uniq_cfg_path(),
'include', 're2', 're2.h')
re2 = 'bundled' if os.path.exists(re2_header) else 'none'
return re2
@memoize
def get_uniq_cfg_path():
return third_party_utils.default_uniq_cfg_path()
@memoize
def get_link_args():
return third_party_utils.default_get_link_args('re2',
libs=['-lre2', '-lpthread'])
def _main():
sys.stdout.write("{0}\n".format(get()))
if __name__ == '__main__':
_main()
| 27.222222 | 79 | 0.588571 |
33b1a8485775448fcac138360bdcc1e6ab7e2dd0 | 280 | py | Python | plusGitRepo/__init__.py | edina/plusGitRepo | 78d33dfb85f5c665445485659cc36915b70e8371 | [
"BSD-3-Clause"
] | null | null | null | plusGitRepo/__init__.py | edina/plusGitRepo | 78d33dfb85f5c665445485659cc36915b70e8371 | [
"BSD-3-Clause"
] | 2 | 2022-01-24T11:48:10.000Z | 2022-01-24T14:24:06.000Z | plusGitRepo/__init__.py | edina/plusGitRepo | 78d33dfb85f5c665445485659cc36915b70e8371 | [
"BSD-3-Clause"
] | null | null | null | # This is called by the notebook
def _jupyter_nbextension_paths():
return [{
'section': 'tree',
'src': 'main',
'dest': 'plusGitRepo',
'require': 'plusGitRepo/index'
}]
def load_jupyter_server_extension(nbapp):
web_app = nbapp.web_app
| 21.538462 | 41 | 0.614286 |
e16122ba46cdf6c0d38ffe4b84dc35ef3a69540c | 7,573 | py | Python | visualizing_data/orientation_and_rotation_plots.py | eliweissler/Colin_Eli_Kai_Math_178_Final | fbb10a6a424927dccc94ea219903d15dcca0ecf8 | [
"Unlicense"
] | null | null | null | visualizing_data/orientation_and_rotation_plots.py | eliweissler/Colin_Eli_Kai_Math_178_Final | fbb10a6a424927dccc94ea219903d15dcca0ecf8 | [
"Unlicense"
] | null | null | null | visualizing_data/orientation_and_rotation_plots.py | eliweissler/Colin_Eli_Kai_Math_178_Final | fbb10a6a424927dccc94ea219903d15dcca0ecf8 | [
"Unlicense"
] | null | null | null | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Sun May 31 13:43:41 2020
@author: Eli
"""
import pandas as pd
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
import numpy as np
from pyquaternion import Quaternion
from scipy.ndimage import gaussian_filter
from scipy.spatial.transform import Rotation as R
import numpy as np
import scipy
import matplotlib
from classifiers.quaternions import PCA_rotate_data,rotate_to_zero,rotate_quat
from classifiers.rotateByGyro import rotateFeatMats
from featMatHelpers import getAcc, getYPR
import os
matplotlib.rc('font', size=12)
def normalize_rows(xyz_data):
"""
Normalizes each row of an nx3 numpy array
"""
return np.apply_along_axis(lambda x: x/np.linalg.norm(x), axis = 1, arr = xyz_data)
def plot_on_sphere(xyz_data, normalize=False):
"""
plots each row of an nx3 numpy array on the surface of a sphere
to do this we first normalize each row
"""
#make 3d figure
fig = plt.figure()
ax = plt.axes(projection='3d')
#build up a sphere
u, v = np.mgrid[0:2*np.pi:200j, 0:np.pi:100j]
x = np.cos(u)*np.sin(v)
y = np.sin(u)*np.sin(v)
z = np.cos(v)
#plot the sphere
ax.plot_surface(x, y, z, color="k", alpha = .1)
#normalize data then plot it
if normalize:
xyz_normalized = normalize_rows(xyz_data)
ax.scatter3D(xyz_normalized[:,0], xyz_normalized[:,1], xyz_normalized[:,2],s=0.1)
else:
ax.scatter3D(xyz_data[:,0], xyz_data[:,1], xyz_data[:,2],s=0.1)
return fig, ax
def plot_adjusted_vs_unadjusted(featVec, savePath, n_entries=256,freq=50,nonlinear=False):
"""
plots the adjusted and unadjusted acceleration data. FeatVec must be
a dataframe of length 1, i.e. gotten by FeatMat.iloc[[i]]
"""
#create the time axis
times = np.arange(n_entries)*(1/freq)
#rotate and align
aligned, rotated, new_coords = rotateFeatMats(featVec, savePath=None, fname=None, featLen = 256)
new_coords = new_coords[0]
acc = getAcc(featVec)
acc_rotated = rotated.reshape(n_entries, 3)
acc_align = getAcc(aligned)
# obtain save info:
act = featVec.label.values[0]
f, ax = plt.subplots(nrows=3,ncols=1, figsize=(25,15) )
ax[0].plot(times,acc)
ax[0].legend(['a_x', 'a_y', 'a_z'])
ax[0].set_xlabel('time (s)')
ax[0].set_ylabel('acceleration (m/s^2)')
ax[0].set_title('un-adjusted')
ax[1].plot(times,acc_rotated)
ax[1].legend(['a_x', 'a_y', 'a_z'])
ax[1].set_xlabel('time (s)')
ax[1].set_ylabel('acceleration (m/s^2)')
ax[1].set_title('rotated')
ax[2].plot(times,acc_align)
ax[2].legend(['a_x', 'a_y', 'a_z'])
ax[2].set_xlabel('time (s)')
ax[2].set_ylabel('acceleration (m/s^2)')
ax[2].set_title('rotated and aligned')
f.tight_layout(pad=2)
plt.savefig(os.path.join(savePath, act + '_all.png'))
plt.figure()
ax = plt.axes(projection='3d')
ax.scatter3D(acc_rotated[:,0], acc_rotated[:,1], acc_rotated[:,2],s=0.5)
ax.set_xlabel('x')
ax.set_ylabel('y')
ax.set_zlabel('z')
mean_a = np.mean(acc_rotated, axis=0)
ax.quiver(mean_a[0],mean_a[1],mean_a[2],new_coords[0][0],new_coords[0][1],new_coords[0][2], color = 'r')
ax.quiver(mean_a[0],mean_a[1],mean_a[2],new_coords[1][0],new_coords[1][1],new_coords[1][2], color = 'b')
ax.quiver(mean_a[0],mean_a[1],mean_a[2],new_coords[2][0],new_coords[2][1],new_coords[2][2], color = 'g')
ax.set_title('rotated')
plt.savefig(os.path.join(savePath, act + '_rotatedVectors.png'))
return
def plot_activity_heading(featMat,activity, savePath, n_entries=256, downsample = True):
"""
For each observation plots on a sphere the point defined by the vector
[1/sqrt(3), 1/sqrt(3), 1/sqrt(3)]
in the coordinates defined by the roll,pitch, and yaw
plots the roll pitch and yaw data based on the x,y,z basis vectors
shows the phone's orientation
single x,y,z are where the heading is pointing in the direction
Returns
-------
None.
"""
# dictionary to translate from activity
d = {'wlk': 'Walking', 'sit': 'Sitting', 'std': 'Standing', 'ups': 'Walking Up Stairs',
'dws': 'Walking Down Stairs', 'jog': 'Jogging'}
# grab the activity of interest
featMat = featMat[featMat['label']==activity]
# downsample the data so it doesn't take forever to run
if downsample:
featMat = featMat.loc[::50,:]
gyro = getYPR(featMat)
trans = R.from_euler('zxy',gyro).as_quat() #returns the scalar last
trans = [Quaternion(imaginary = x[:-1], real = x[-1]) for x in trans]
x = np.array([rotate_quat(Quaternion(imaginary = [1,0,0]), rot).imaginary for rot in trans])
y = np.array([rotate_quat(Quaternion(imaginary = [0,1,0]), rot).imaginary for rot in trans])
z = np.array([rotate_quat(Quaternion(imaginary = [0,0,1]), rot).imaginary for rot in trans])
# calculate mean / stdev of angles
# we know that r = 1, as the data is normalized.
phi = np.rad2deg(np.arctan2(z[:,1], z[:,0]))
theta = np.rad2deg(np.arccos(z[:,2]))
heading_vec = (x+y+z)/np.sqrt(3)
fig, ax = plot_on_sphere(heading_vec, normalize=False)
ax.set_title('Activity: ' + d[activity])
ax.set_xlabel('X', labelpad=10)
ax.set_ylabel('Y', labelpad=10)
ax.set_zlabel('Z', labelpad=10)
plt.savefig(os.path.join(savePath, activity + '.png'))
fig, ax = plot_on_sphere(z, normalize=False)
ax.set_title('Activity: ' + d[activity])
ax.set_xlabel('X', labelpad=10)
ax.set_ylabel('Y', labelpad=10)
ax.set_zlabel('Z', labelpad=10)
plt.savefig(os.path.join(savePath, activity + '_z.png'))
return phi, theta
if __name__ == '__main__':
motionSense = '/Users/kaikaneshina/Documents/MATH178/project_data/featMat256/MotionSense_FeatMat_256.csv'
mobiAct = '/Users/kaikaneshina/Documents/MATH178/project_data/featMat256/mobiAct_FeatMat_256.csv'
savePath = '/Users/kaikaneshina/Documents/MATH178/project_data/results/256/motionSense'
df = pd.read_csv(motionSense)
# users = df.user.unique()[:5]
# for u in users:
# dfSub = df[df['user']==u]
# savePathSub = os.path.join(savePath,u)
# try: os.mkdir(savePathSub)
# except: pass
# phiList = []
# thetaList = []
# activities = dfSub.label.unique()
# for act in activities:
# phi, theta = plot_activity_heading(dfSub,act, savePathSub)
# phiList.append( [np.mean(phi), np.std(phi)])
# thetaList.append( [np.mean(theta), np.std(theta)])
# theta = np.array(thetaList)
# phi = np.array(phiList)
# angles = pd.DataFrame()
# angles['activity'] = activities
# angles['theta mean'] = theta[:,0]
# angles['theta stdev'] = theta[:,1]
# angles['phi mean'] = phi[:,0]
# angles['phi stdev'] = phi[:,1]
# angles.to_csv(os.path.join(savePathSub, 'angles.csv'), index = False)
activities = df.label.unique()
savePath = os.path.join(savePath, 'rotation')
try: os.mkdir(savePath)
except: pass
for act in activities:
subDf = df[df['label']==act]
featVect= subDf.iloc[0,:].to_frame().T
plot_adjusted_vs_unadjusted(featVect, savePath)
| 30.413655 | 109 | 0.622475 |
85789f78c2f86fc4a1a8edf99e954cf2b24f6ba0 | 5,080 | py | Python | flex_bt_turtlebot_demo_bringup/launch/nav2.launch.py | FlexBE/flex_bt_turtlebot_demo | 215b24e3c2c2b80ef990676dca88781ad3f12178 | [
"BSD-3-Clause"
] | null | null | null | flex_bt_turtlebot_demo_bringup/launch/nav2.launch.py | FlexBE/flex_bt_turtlebot_demo | 215b24e3c2c2b80ef990676dca88781ad3f12178 | [
"BSD-3-Clause"
] | null | null | null | flex_bt_turtlebot_demo_bringup/launch/nav2.launch.py | FlexBE/flex_bt_turtlebot_demo | 215b24e3c2c2b80ef990676dca88781ad3f12178 | [
"BSD-3-Clause"
] | null | null | null | # Copyright (c) 2018 Intel Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
from ament_index_python.packages import get_package_share_directory
from launch import LaunchDescription
from launch.actions import DeclareLaunchArgument, SetEnvironmentVariable
from launch.substitutions import LaunchConfiguration
from launch_ros.actions import Node
from nav2_common.launch import RewrittenYaml
def generate_launch_description():
bringup_dir = get_package_share_directory('nav2_bringup')
namespace = LaunchConfiguration('namespace')
use_sim_time = LaunchConfiguration('use_sim_time')
autostart = LaunchConfiguration('autostart')
params_file = LaunchConfiguration('params_file')
default_bt_xml_filename = LaunchConfiguration("default_bt_xml_filename")
map_subscribe_transient_local = LaunchConfiguration('map_subscribe_transient_local')
lifecycle_nodes = ['controller_server',
'planner_server',
'recoveries_server',
'behavior_tree_server',
'waypoint_follower']
remappings = [('/tf', 'tf'),
('/tf_static', 'tf_static')]
param_substitutions = {
'use_sim_time': use_sim_time,
'default_bt_xml_filename': default_bt_xml_filename,
'autostart': autostart,
'map_subscribe_transient_local': map_subscribe_transient_local}
configured_params = RewrittenYaml(
source_file=params_file,
root_key=namespace,
param_rewrites=param_substitutions,
convert_types=True)
return LaunchDescription([
# Set env var to print messages to stdout immediately
SetEnvironmentVariable('RCUTILS_LOGGING_BUFFERED_STREAM', '1'),
DeclareLaunchArgument(
'namespace', default_value='',
description='Top-level namespace'),
DeclareLaunchArgument(
'use_sim_time', default_value='false',
description='Use simulation (Gazebo) clock if true'),
DeclareLaunchArgument(
'autostart', default_value='true',
description='Automatically startup the nav2 stack'),
DeclareLaunchArgument(
'params_file',
default_value=os.path.join(bringup_dir, 'params', 'nav2_params.yaml'),
description='Full path to the ROS2 parameters file to use'),
DeclareLaunchArgument(
'default_bt_xml_filename',
default_value=os.path.join(
get_package_share_directory('flex_bt_turtlebot_demo_bringup'),
'behavior_trees', 'nav2_navigate_to_pose.xml'),
description='Full path to the behavior tree xml file to use'),
DeclareLaunchArgument(
'map_subscribe_transient_local', default_value='false',
description='Whether to set the map subscriber QoS to transient local'),
Node(
package='nav2_controller',
executable='controller_server',
output='screen',
parameters=[configured_params],
remappings=remappings),
Node(
package='nav2_planner',
executable='planner_server',
name='planner_server',
output='screen',
parameters=[configured_params],
remappings=remappings),
Node(
package='nav2_recoveries',
executable='recoveries_server',
name='recoveries_server',
output='screen',
parameters=[configured_params],
remappings=remappings),
Node(
package='flex_bt_server',
executable='flex_bt_server_bt_server_executor_node',
name='behavior_tree_server',
output='screen',
parameters=[configured_params,
{'default_bt_xml_filename': LaunchConfiguration('default_bt_xml_filename')}],
remappings=remappings),
Node(
package='nav2_waypoint_follower',
executable='waypoint_follower',
name='waypoint_follower',
output='screen',
parameters=[configured_params],
remappings=remappings),
Node(
package='nav2_lifecycle_manager',
executable='lifecycle_manager',
name='lifecycle_manager_navigation',
output='screen',
parameters=[{'use_sim_time': use_sim_time},
{'autostart': autostart},
{'node_names': lifecycle_nodes}]),
])
| 36.546763 | 101 | 0.646654 |
43a2f2142a91023b870ffc68bbe9eab62bb27431 | 3,070 | py | Python | map_label_tool/py_proto/modules/perception/lidar/lib/scene_manager/roi_service/proto/roi_service_pb2.py | freeclouds/OpenHDMap | b61c159fbdf4f50ae1d1650421596b28863f39be | [
"Apache-2.0"
] | null | null | null | map_label_tool/py_proto/modules/perception/lidar/lib/scene_manager/roi_service/proto/roi_service_pb2.py | freeclouds/OpenHDMap | b61c159fbdf4f50ae1d1650421596b28863f39be | [
"Apache-2.0"
] | null | null | null | map_label_tool/py_proto/modules/perception/lidar/lib/scene_manager/roi_service/proto/roi_service_pb2.py | freeclouds/OpenHDMap | b61c159fbdf4f50ae1d1650421596b28863f39be | [
"Apache-2.0"
] | 1 | 2021-05-26T08:42:11.000Z | 2021-05-26T08:42:11.000Z | # Generated by the protocol buffer compiler. DO NOT EDIT!
# source: modules/perception/lidar/lib/scene_manager/roi_service/proto/roi_service.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
from google.protobuf import descriptor_pb2
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(
name='modules/perception/lidar/lib/scene_manager/roi_service/proto/roi_service.proto',
package='apollo.perception.lidar',
syntax='proto2',
serialized_pb=_b('\nNmodules/perception/lidar/lib/scene_manager/roi_service/proto/roi_service.proto\x12\x17\x61pollo.perception.lidar\"?\n\x10ROIServiceConfig\x12\x12\n\x05range\x18\x01 \x01(\x01:\x03\x31\x32\x30\x12\x17\n\tcell_size\x18\x02 \x01(\x01:\x04\x30.25')
)
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
_ROISERVICECONFIG = _descriptor.Descriptor(
name='ROIServiceConfig',
full_name='apollo.perception.lidar.ROIServiceConfig',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='range', full_name='apollo.perception.lidar.ROIServiceConfig.range', index=0,
number=1, type=1, cpp_type=5, label=1,
has_default_value=True, default_value=float(120),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='cell_size', full_name='apollo.perception.lidar.ROIServiceConfig.cell_size', index=1,
number=2, type=1, cpp_type=5, label=1,
has_default_value=True, default_value=float(0.25),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=107,
serialized_end=170,
)
DESCRIPTOR.message_types_by_name['ROIServiceConfig'] = _ROISERVICECONFIG
ROIServiceConfig = _reflection.GeneratedProtocolMessageType('ROIServiceConfig', (_message.Message,), dict(
DESCRIPTOR = _ROISERVICECONFIG,
__module__ = 'modules.perception.lidar.lib.scene_manager.roi_service.proto.roi_service_pb2'
# @@protoc_insertion_point(class_scope:apollo.perception.lidar.ROIServiceConfig)
))
_sym_db.RegisterMessage(ROIServiceConfig)
try:
# THESE ELEMENTS WILL BE DEPRECATED.
# Please use the generated *_pb2_grpc.py files instead.
import grpc
from grpc.framework.common import cardinality
from grpc.framework.interfaces.face import utilities as face_utilities
from grpc.beta import implementations as beta_implementations
from grpc.beta import interfaces as beta_interfaces
except ImportError:
pass
# @@protoc_insertion_point(module_scope)
| 35.287356 | 267 | 0.77785 |
d7e9491b8b8d473b3f65235794e42ba33fbb076b | 17,580 | py | Python | tests/contrib/hooks/test_bigquery_hook.py | robinedwards/incubator-airflow | f2738b085e1aa38e1795ddc21516fbe4c9d3dddc | [
"Apache-2.0",
"BSD-2-Clause",
"MIT",
"ECL-2.0",
"BSD-3-Clause"
] | 2 | 2018-06-06T22:31:09.000Z | 2018-08-13T23:12:22.000Z | tests/contrib/hooks/test_bigquery_hook.py | robinedwards/incubator-airflow | f2738b085e1aa38e1795ddc21516fbe4c9d3dddc | [
"Apache-2.0",
"BSD-2-Clause",
"MIT",
"ECL-2.0",
"BSD-3-Clause"
] | 13 | 2018-07-11T10:45:30.000Z | 2018-08-18T00:43:30.000Z | tests/contrib/hooks/test_bigquery_hook.py | robinedwards/incubator-airflow | f2738b085e1aa38e1795ddc21516fbe4c9d3dddc | [
"Apache-2.0",
"BSD-2-Clause",
"MIT",
"ECL-2.0",
"BSD-3-Clause"
] | 5 | 2020-05-12T13:38:14.000Z | 2022-03-17T17:17:50.000Z | # -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
import unittest
import warnings
from google.auth.exceptions import GoogleAuthError
import mock
from airflow.contrib.hooks import bigquery_hook as hook
from airflow.contrib.hooks.bigquery_hook import _cleanse_time_partitioning
bq_available = True
try:
hook.BigQueryHook().get_service()
except GoogleAuthError:
bq_available = False
class TestBigQueryDataframeResults(unittest.TestCase):
def setUp(self):
self.instance = hook.BigQueryHook()
@unittest.skipIf(not bq_available, 'BQ is not available to run tests')
def test_output_is_dataframe_with_valid_query(self):
import pandas as pd
df = self.instance.get_pandas_df('select 1')
self.assertIsInstance(df, pd.DataFrame)
@unittest.skipIf(not bq_available, 'BQ is not available to run tests')
def test_throws_exception_with_invalid_query(self):
with self.assertRaises(Exception) as context:
self.instance.get_pandas_df('from `1`')
self.assertIn('Reason: ', str(context.exception), "")
@unittest.skipIf(not bq_available, 'BQ is not available to run tests')
def test_succeeds_with_explicit_legacy_query(self):
df = self.instance.get_pandas_df('select 1', dialect='legacy')
self.assertEqual(df.iloc(0)[0][0], 1)
@unittest.skipIf(not bq_available, 'BQ is not available to run tests')
def test_succeeds_with_explicit_std_query(self):
df = self.instance.get_pandas_df(
'select * except(b) from (select 1 a, 2 b)', dialect='standard')
self.assertEqual(df.iloc(0)[0][0], 1)
@unittest.skipIf(not bq_available, 'BQ is not available to run tests')
def test_throws_exception_with_incompatible_syntax(self):
with self.assertRaises(Exception) as context:
self.instance.get_pandas_df(
'select * except(b) from (select 1 a, 2 b)', dialect='legacy')
self.assertIn('Reason: ', str(context.exception), "")
class TestBigQueryTableSplitter(unittest.TestCase):
def test_internal_need_default_project(self):
with self.assertRaises(Exception) as context:
hook._split_tablename('dataset.table', None)
self.assertIn('INTERNAL: No default project is specified',
str(context.exception), "")
def test_split_dataset_table(self):
project, dataset, table = hook._split_tablename('dataset.table',
'project')
self.assertEqual("project", project)
self.assertEqual("dataset", dataset)
self.assertEqual("table", table)
def test_split_project_dataset_table(self):
project, dataset, table = hook._split_tablename('alternative:dataset.table',
'project')
self.assertEqual("alternative", project)
self.assertEqual("dataset", dataset)
self.assertEqual("table", table)
def test_sql_split_project_dataset_table(self):
project, dataset, table = hook._split_tablename('alternative.dataset.table',
'project')
self.assertEqual("alternative", project)
self.assertEqual("dataset", dataset)
self.assertEqual("table", table)
def test_colon_in_project(self):
project, dataset, table = hook._split_tablename('alt1:alt.dataset.table',
'project')
self.assertEqual('alt1:alt', project)
self.assertEqual("dataset", dataset)
self.assertEqual("table", table)
def test_valid_double_column(self):
project, dataset, table = hook._split_tablename('alt1:alt:dataset.table',
'project')
self.assertEqual('alt1:alt', project)
self.assertEqual("dataset", dataset)
self.assertEqual("table", table)
def test_invalid_syntax_triple_colon(self):
with self.assertRaises(Exception) as context:
hook._split_tablename('alt1:alt2:alt3:dataset.table',
'project')
self.assertIn('Use either : or . to specify project',
str(context.exception), "")
self.assertFalse('Format exception for' in str(context.exception))
def test_invalid_syntax_triple_dot(self):
with self.assertRaises(Exception) as context:
hook._split_tablename('alt1.alt.dataset.table',
'project')
self.assertIn('Expect format of (<project.|<project:)<dataset>.<table>',
str(context.exception), "")
self.assertFalse('Format exception for' in str(context.exception))
def test_invalid_syntax_column_double_project_var(self):
with self.assertRaises(Exception) as context:
hook._split_tablename('alt1:alt2:alt.dataset.table',
'project', 'var_x')
self.assertIn('Use either : or . to specify project',
str(context.exception), "")
self.assertIn('Format exception for var_x:',
str(context.exception), "")
def test_invalid_syntax_triple_colon_project_var(self):
with self.assertRaises(Exception) as context:
hook._split_tablename('alt1:alt2:alt:dataset.table',
'project', 'var_x')
self.assertIn('Use either : or . to specify project',
str(context.exception), "")
self.assertIn('Format exception for var_x:',
str(context.exception), "")
def test_invalid_syntax_triple_dot_var(self):
with self.assertRaises(Exception) as context:
hook._split_tablename('alt1.alt.dataset.table',
'project', 'var_x')
self.assertIn('Expect format of (<project.|<project:)<dataset>.<table>',
str(context.exception), "")
self.assertIn('Format exception for var_x:',
str(context.exception), "")
class TestBigQueryHookSourceFormat(unittest.TestCase):
def test_invalid_source_format(self):
with self.assertRaises(Exception) as context:
hook.BigQueryBaseCursor("test", "test").run_load(
"test.test", "test_schema.json", ["test_data.json"], source_format="json"
)
# since we passed 'json' in, and it's not valid, make sure it's present in the
# error string.
self.assertIn("JSON", str(context.exception))
class TestBigQueryExternalTableSourceFormat(unittest.TestCase):
def test_invalid_source_format(self):
with self.assertRaises(Exception) as context:
hook.BigQueryBaseCursor("test", "test").create_external_table(
external_project_dataset_table='test.test',
schema_fields='test_schema.json',
source_uris=['test_data.json'],
source_format='json'
)
# since we passed 'csv' in, and it's not valid, make sure it's present in the
# error string.
self.assertIn("JSON", str(context.exception))
# Helpers to test_cancel_queries that have mock_poll_job_complete returning false,
# unless mock_job_cancel was called with the same job_id
mock_canceled_jobs = []
def mock_poll_job_complete(job_id):
return job_id in mock_canceled_jobs
def mock_job_cancel(projectId, jobId):
mock_canceled_jobs.append(jobId)
return mock.Mock()
class TestBigQueryBaseCursor(unittest.TestCase):
def test_invalid_schema_update_options(self):
with self.assertRaises(Exception) as context:
hook.BigQueryBaseCursor("test", "test").run_load(
"test.test",
"test_schema.json",
["test_data.json"],
schema_update_options=["THIS IS NOT VALID"]
)
self.assertIn("THIS IS NOT VALID", str(context.exception))
@mock.patch.object(hook.BigQueryBaseCursor, 'run_with_configuration')
def test_bql_deprecation_warning(self, mock_rwc):
with warnings.catch_warnings(record=True) as w:
hook.BigQueryBaseCursor("test", "test").run_query(
bql='select * from test_table'
)
self.assertIn(
'Deprecated parameter `bql`',
w[0].message.args[0])
def test_nobql_nosql_param_error(self):
with self.assertRaises(TypeError) as context:
hook.BigQueryBaseCursor("test", "test").run_query(
sql=None,
bql=None
)
self.assertIn(
'missing 1 required positional',
str(context.exception))
def test_invalid_schema_update_and_write_disposition(self):
with self.assertRaises(Exception) as context:
hook.BigQueryBaseCursor("test", "test").run_load(
"test.test",
"test_schema.json",
["test_data.json"],
schema_update_options=['ALLOW_FIELD_ADDITION'],
write_disposition='WRITE_EMPTY'
)
self.assertIn("schema_update_options is only", str(context.exception))
@mock.patch("airflow.contrib.hooks.bigquery_hook.LoggingMixin")
@mock.patch("airflow.contrib.hooks.bigquery_hook.time")
def test_cancel_queries(self, mocked_time, mocked_logging):
project_id = 12345
running_job_id = 3
mock_jobs = mock.Mock()
mock_jobs.cancel = mock.Mock(side_effect=mock_job_cancel)
mock_service = mock.Mock()
mock_service.jobs = mock.Mock(return_value=mock_jobs)
bq_hook = hook.BigQueryBaseCursor(mock_service, project_id)
bq_hook.running_job_id = running_job_id
bq_hook.poll_job_complete = mock.Mock(side_effect=mock_poll_job_complete)
bq_hook.cancel_query()
mock_jobs.cancel.assert_called_with(projectId=project_id, jobId=running_job_id)
@mock.patch.object(hook.BigQueryBaseCursor, 'run_with_configuration')
def test_run_query_sql_dialect_default(self, run_with_config):
cursor = hook.BigQueryBaseCursor(mock.Mock(), "project_id")
cursor.run_query('query')
args, kwargs = run_with_config.call_args
self.assertIs(args[0]['query']['useLegacySql'], True)
@mock.patch.object(hook.BigQueryBaseCursor, 'run_with_configuration')
def test_run_query_sql_dialect_override(self, run_with_config):
for bool_val in [True, False]:
cursor = hook.BigQueryBaseCursor(mock.Mock(), "project_id")
cursor.run_query('query', use_legacy_sql=bool_val)
args, kwargs = run_with_config.call_args
self.assertIs(args[0]['query']['useLegacySql'], bool_val)
class TestLabelsInRunJob(unittest.TestCase):
@mock.patch.object(hook.BigQueryBaseCursor, 'run_with_configuration')
def test_run_query_with_arg(self, mocked_rwc):
project_id = 12345
def run_with_config(config):
self.assertEqual(
config['labels'], {'label1': 'test1', 'label2': 'test2'}
)
mocked_rwc.side_effect = run_with_config
bq_hook = hook.BigQueryBaseCursor(mock.Mock(), project_id)
bq_hook.run_query(
sql='select 1',
destination_dataset_table='my_dataset.my_table',
labels={'label1': 'test1', 'label2': 'test2'}
)
mocked_rwc.assert_called_once()
class TestTimePartitioningInRunJob(unittest.TestCase):
@mock.patch("airflow.contrib.hooks.bigquery_hook.LoggingMixin")
@mock.patch("airflow.contrib.hooks.bigquery_hook.time")
@mock.patch.object(hook.BigQueryBaseCursor, 'run_with_configuration')
def test_run_load_default(self, mocked_rwc, mocked_time, mocked_logging):
project_id = 12345
def run_with_config(config):
self.assertIsNone(config['load'].get('timePartitioning'))
mocked_rwc.side_effect = run_with_config
bq_hook = hook.BigQueryBaseCursor(mock.Mock(), project_id)
bq_hook.run_load(
destination_project_dataset_table='my_dataset.my_table',
schema_fields=[],
source_uris=[],
)
mocked_rwc.assert_called_once()
@mock.patch("airflow.contrib.hooks.bigquery_hook.LoggingMixin")
@mock.patch("airflow.contrib.hooks.bigquery_hook.time")
@mock.patch.object(hook.BigQueryBaseCursor, 'run_with_configuration')
def test_run_load_with_arg(self, mocked_rwc, mocked_time, mocked_logging):
project_id = 12345
def run_with_config(config):
self.assertEqual(
config['load']['timePartitioning'],
{
'field': 'test_field',
'type': 'DAY',
'expirationMs': 1000
}
)
mocked_rwc.side_effect = run_with_config
bq_hook = hook.BigQueryBaseCursor(mock.Mock(), project_id)
bq_hook.run_load(
destination_project_dataset_table='my_dataset.my_table',
schema_fields=[],
source_uris=[],
time_partitioning={'type': 'DAY', 'field': 'test_field', 'expirationMs': 1000}
)
mocked_rwc.assert_called_once()
@mock.patch("airflow.contrib.hooks.bigquery_hook.LoggingMixin")
@mock.patch("airflow.contrib.hooks.bigquery_hook.time")
@mock.patch.object(hook.BigQueryBaseCursor, 'run_with_configuration')
def test_run_query_default(self, mocked_rwc, mocked_time, mocked_logging):
project_id = 12345
def run_with_config(config):
self.assertIsNone(config['query'].get('timePartitioning'))
mocked_rwc.side_effect = run_with_config
bq_hook = hook.BigQueryBaseCursor(mock.Mock(), project_id)
bq_hook.run_query(sql='select 1')
mocked_rwc.assert_called_once()
@mock.patch("airflow.contrib.hooks.bigquery_hook.LoggingMixin")
@mock.patch("airflow.contrib.hooks.bigquery_hook.time")
@mock.patch.object(hook.BigQueryBaseCursor, 'run_with_configuration')
def test_run_query_with_arg(self, mocked_rwc, mocked_time, mocked_logging):
project_id = 12345
def run_with_config(config):
self.assertEqual(
config['query']['timePartitioning'],
{
'field': 'test_field',
'type': 'DAY',
'expirationMs': 1000
}
)
mocked_rwc.side_effect = run_with_config
bq_hook = hook.BigQueryBaseCursor(mock.Mock(), project_id)
bq_hook.run_query(
sql='select 1',
destination_dataset_table='my_dataset.my_table',
time_partitioning={'type': 'DAY',
'field': 'test_field', 'expirationMs': 1000}
)
mocked_rwc.assert_called_once()
def test_dollar_makes_partition(self):
tp_out = _cleanse_time_partitioning('test.teast$20170101', {})
expect = {
'type': 'DAY'
}
self.assertEqual(tp_out, expect)
def test_extra_time_partitioning_options(self):
tp_out = _cleanse_time_partitioning(
'test.teast',
{'type': 'DAY', 'field': 'test_field', 'expirationMs': 1000}
)
expect = {
'type': 'DAY',
'field': 'test_field',
'expirationMs': 1000
}
self.assertEqual(tp_out, expect)
def test_cant_add_dollar_and_field_name(self):
with self.assertRaises(ValueError):
_cleanse_time_partitioning(
'test.teast$20170101',
{'type': 'DAY', 'field': 'test_field', 'expirationMs': 1000}
)
class TestBigQueryHookLegacySql(unittest.TestCase):
"""Ensure `use_legacy_sql` param in `BigQueryHook` propagates properly."""
@mock.patch.object(hook.BigQueryBaseCursor, 'run_with_configuration')
def test_hook_uses_legacy_sql_by_default(self, run_with_config):
with mock.patch.object(hook.BigQueryHook, 'get_service'):
bq_hook = hook.BigQueryHook()
bq_hook.get_first('query')
args, kwargs = run_with_config.call_args
self.assertIs(args[0]['query']['useLegacySql'], True)
@mock.patch.object(hook.BigQueryBaseCursor, 'run_with_configuration')
def test_legacy_sql_override_propagates_properly(self, run_with_config):
with mock.patch.object(hook.BigQueryHook, 'get_service'):
bq_hook = hook.BigQueryHook(use_legacy_sql=False)
bq_hook.get_first('query')
args, kwargs = run_with_config.call_args
self.assertIs(args[0]['query']['useLegacySql'], False)
if __name__ == '__main__':
unittest.main()
| 39.41704 | 90 | 0.641354 |
cda2b75987295f94112d22222f1d69c1c6e0daae | 1,153 | py | Python | 25/25a.py | jamOne-/adventofcode2018 | d51c01578ae7e4f30824c4f6ace66958491c1ed4 | [
"MIT"
] | null | null | null | 25/25a.py | jamOne-/adventofcode2018 | d51c01578ae7e4f30824c4f6ace66958491c1ed4 | [
"MIT"
] | null | null | null | 25/25a.py | jamOne-/adventofcode2018 | d51c01578ae7e4f30824c4f6ace66958491c1ed4 | [
"MIT"
] | null | null | null | import re
import sys
def get_all_numbers(line):
return tuple(map(int, re.findall('-?\d+', line)))
def distance(point1, point2):
return abs(point1[0] - point2[0]) + abs(point1[1] - point2[1]) + abs(point1[2] - point2[2]) + abs(point1[3] - point2[3])
def get_connections(points):
connections = [[] for _ in points]
for i, point in enumerate(points):
for j in range(i + 1, len(points)):
point2 = points[j]
if distance(point, point2) <= 3:
connections[i].append(j)
connections[j].append(i)
return connections
def get_number_of_constelations(connections):
visited = set()
stack = []
counter = 0
for i in range(len(connections)):
if i in visited:
continue
visited.add(i)
stack = [i]
counter += 1
while stack:
u = stack.pop()
for v in connections[u]:
if v not in visited:
visited.add(v)
stack.append(v)
return counter
def solve(puzzle_input):
points = [get_all_numbers(line) for line in puzzle_input]
connections = get_connections(points)
return get_number_of_constelations(connections)
print(solve(sys.stdin))
| 19.542373 | 122 | 0.6366 |
247d125a3c73acf59c9561c699fb7f2fefc099ce | 14,548 | py | Python | Libraries/Vulkan/VulkanSDK/1.1.126.0/shaderc/glslc/test/parameter_tests.py | IanWigle/SlitherEngine2 | 5364d8ad5a52cd7c651ff5bb3c1db59ba15441f5 | [
"MIT"
] | 1 | 2019-11-21T17:40:18.000Z | 2019-11-21T17:40:18.000Z | Libraries/Vulkan/VulkanSDK/1.1.126.0/shaderc/glslc/test/parameter_tests.py | Paldamar/SlitherEngine2 | 5364d8ad5a52cd7c651ff5bb3c1db59ba15441f5 | [
"MIT"
] | 2 | 2021-05-10T21:48:41.000Z | 2021-05-11T07:00:49.000Z | Libraries/Vulkan/VulkanSDK/1.1.126.0/shaderc/glslc/test/parameter_tests.py | Paldamar/SlitherEngine2 | 5364d8ad5a52cd7c651ff5bb3c1db59ba15441f5 | [
"MIT"
] | null | null | null | # Copyright 2015 The Shaderc Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import expect
import os.path
from glslc_test_framework import inside_glslc_testsuite
from placeholder import FileShader, StdinShader, TempFileName
@inside_glslc_testsuite('File')
class SimpleFileCompiled(expect.ValidObjectFile):
"""Tests whether or not a simple glsl file compiles."""
shader = FileShader('#version 310 es\nvoid main() {}', '.frag')
glslc_args = ['-c', shader]
@inside_glslc_testsuite('File')
class NotSpecifyingOutputName(expect.SuccessfulReturn,
expect.CorrectObjectFilePreamble):
"""Tests that when there is no -o and -E/-S/-c specified, output as a.spv."""
shader = FileShader('#version 140\nvoid main() {}', '.frag')
glslc_args = [shader]
def check_output_a_spv(self, status):
output_name = os.path.join(status.directory, 'a.spv')
return self.verify_object_file_preamble(output_name)
@inside_glslc_testsuite('Parameters')
class HelpParameters(
expect.ReturnCodeIsZero, expect.StdoutMatch, expect.StderrMatch):
"""Tests the --help flag outputs correctly and does not produce and error."""
glslc_args = ['--help']
expected_stdout = '''glslc - Compile shaders into SPIR-V
Usage: glslc [options] file...
An input file of - represents standard input.
Options:
-c Only run preprocess, compile, and assemble steps.
-Dmacro[=defn] Add an implicit macro definition.
-E Outputs only the results of the preprocessing step.
Output defaults to standard output.
-fauto-bind-uniforms
Automatically assign bindings to uniform variables that
don't have an explicit 'binding' layout in the shader
source.
-fauto-map-locations
Automatically assign locations to uniform variables that
don't have an explicit 'location' layout in the shader
source.
-fentry-point=<name>
Specify the entry point name for HLSL compilation, for
all subsequent source files. Default is "main".
-fhlsl_functionality1, -fhlsl-functionality1
Enable extension SPV_GOOGLE_hlsl_functionality1 for HLSL
compilation.
-finvert-y Invert position.Y output in vertex shader.
-fhlsl-iomap Use HLSL IO mappings for bindings.
-fhlsl-offsets Use HLSL offset rules for packing members of blocks.
Affects only GLSL. HLSL rules are always used for HLSL.
-flimit=<settings>
Specify resource limits. Each limit is specified by a limit
name followed by an integer value. Tokens should be
separated by whitespace. If the same limit is specified
several times, only the last setting takes effect.
-flimit-file <file>
Set limits as specified in the given file.
-fnan-clamp Generate code for max and min builtins so that, when given
a NaN operand, the other operand is returned. Similarly,
the clamp builtin will favour the non-NaN operands, as if
clamp were implemented as a composition of max and min.
-fresource-set-binding [stage] <reg0> <set0> <binding0>
[<reg1> <set1> <binding1>...]
Explicitly sets the descriptor set and binding for
HLSL resources, by register name. Optionally restrict
it to a single stage.
-fcbuffer-binding-base [stage] <value>
Same as -fubo-binding-base.
-fimage-binding-base [stage] <value>
Sets the lowest automatically assigned binding number for
images. Optionally only set it for a single shader stage.
For HLSL, the resource register number is added to this
base.
-fsampler-binding-base [stage] <value>
Sets the lowest automatically assigned binding number for
samplers Optionally only set it for a single shader stage.
For HLSL, the resource register number is added to this
base.
-fssbo-binding-base [stage] <value>
Sets the lowest automatically assigned binding number for
shader storage buffer objects (SSBO). Optionally only set
it for a single shader stage. Only affects GLSL.
-ftexture-binding-base [stage] <value>
Sets the lowest automatically assigned binding number for
textures. Optionally only set it for a single shader stage.
For HLSL, the resource register number is added to this
base.
-fuav-binding-base [stage] <value>
For automatically assigned bindings for unordered access
views (UAV), the register number is added to this base to
determine the binding number. Optionally only set it for
a single shader stage. Only affects HLSL.
-fubo-binding-base [stage] <value>
Sets the lowest automatically assigned binding number for
uniform buffer objects (UBO). Optionally only set it for
a single shader stage.
For HLSL, the resource register number is added to this
base.
-fshader-stage=<stage>
Treat subsequent input files as having stage <stage>.
Valid stages are vertex, vert, fragment, frag, tesscontrol,
tesc, tesseval, tese, geometry, geom, compute, and comp.
-g Generate source-level debug information.
Currently this option has no effect.
--help Display available options.
-I <value> Add directory to include search path.
-mfmt=<format> Output SPIR-V binary code using the selected format. This
option may be specified only when the compilation output is
in SPIR-V binary code form. Available options include bin, c
and num. By default the binary output format is bin.
-M Generate make dependencies. Implies -E and -w.
-MM An alias for -M.
-MD Generate make dependencies and compile.
-MF <file> Write dependency output to the given file.
-MT <target> Specify the target of the rule emitted by dependency
generation.
-O Optimize the generated SPIR-V code for better performance.
-Os Optimize the generated SPIR-V code for smaller size.
-O0 Disable optimization.
-o <file> Write output to <file>.
A file name of '-' represents standard output.
-std=<value> Version and profile for GLSL input files. Possible values
are concatenations of version and profile, e.g. 310es,
450core, etc. Ignored for HLSL files.
-S Only run preprocess and compilation steps.
--show-limits Display available limit names and their default values.
--target-env=<environment>
Set the target client environment, and the semantics
of warnings and errors. An optional suffix can specify
the client version. Values are:
vulkan1.0 # The default
vulkan1.1
vulkan # Same as vulkan1.0
opengl4.5
opengl # Same as opengl4.5
--target-spv=<spirv-version>
Set the SPIR-V version to be used for the generated SPIR-V
module. The default is the highest version of SPIR-V
required to be supported for the target environment.
For example, default for vulkan1.0 is spv1.0, and
the default for vulkan1.1 is spv1.3.
Values are:
spv1.0, spv1.1, spv1.2, spv1.3, spv1.4, spv1.5
--version Display compiler version information.
-w Suppresses all warning messages.
-Werror Treat all warnings as errors.
-x <language> Treat subsequent input files as having type <language>.
Valid languages are: glsl, hlsl.
For files ending in .hlsl the default is hlsl.
Otherwise the default is glsl.
'''
expected_stderr = ''
@inside_glslc_testsuite('Parameters')
class HelpIsNotTooWide(expect.StdoutNoWiderThan80Columns):
"""Tests that --help output is not too wide."""
glslc_args = ['--help']
@inside_glslc_testsuite('Parameters')
class UnknownSingleLetterArgument(expect.ErrorMessage):
"""Tests that an unknown argument triggers an error message."""
glslc_args = ['-a']
expected_error = ["glslc: error: unknown argument: '-a'\n"]
@inside_glslc_testsuite('Parameters')
class UnknownMultiLetterArgument(expect.ErrorMessage):
"""Tests that an unknown argument triggers an error message."""
glslc_args = ['-zzz']
expected_error = ["glslc: error: unknown argument: '-zzz'\n"]
@inside_glslc_testsuite('Parameters')
class UnsupportedOption(expect.ErrorMessage):
"""Tests that an unsupported option triggers an error message."""
glslc_args = ['--unsupported-option']
expected_error = [
"glslc: error: unsupported option: '--unsupported-option'\n"]
@inside_glslc_testsuite('File')
class FileNotFound(expect.ErrorMessage):
"""Tests the error message if a file cannot be found."""
blabla_file = TempFileName('blabla.frag')
glslc_args = [blabla_file]
expected_error = [
"glslc: error: cannot open input file: '", blabla_file,
"': No such file or directory\n"]
@inside_glslc_testsuite('Unsupported')
class LinkingNotSupported(expect.ErrorMessage):
"""Tests the error message generated by linking not supported yet."""
shader1 = FileShader('#version 140\nvoid main() {}', '.vert')
shader2 = FileShader('#version 140\nvoid main() {}', '.frag')
glslc_args = [shader1, shader2]
expected_error = [
'glslc: error: linking multiple files is not supported yet. ',
'Use -c to compile files individually.\n']
@inside_glslc_testsuite('Unsupported')
class MultipleStdinUnsupported(expect.ErrorMessage):
"""Tests the error message generated by having more than one - input."""
glslc_args = ['-c', '-fshader-stage=vertex', '-', '-']
expected_error = [
'glslc: error: specifying standard input "-" as input more'
' than once is not allowed.\n']
@inside_glslc_testsuite('Parameters')
class StdinWithoutShaderStage(expect.StdoutMatch, expect.StderrMatch):
"""Tests that you must use -fshader-stage when specifying - as input."""
shader = StdinShader(
"""#version 140
int a() {
}
void main() {
int x = a();
}
""")
glslc_args = [shader]
expected_stdout = ''
expected_stderr = [
"glslc: error: '-': -fshader-stage required when input is from "
'standard input "-"\n']
@inside_glslc_testsuite('Parameters')
class LimitsHelp(expect.StdoutMatch, expect.StderrMatch):
"""Tests --show-limits shows correct output."""
glslc_args = ['--show-limits']
expected_stderr = ''
expected_stdout = """MaxLights 8
MaxClipPlanes 6
MaxTextureUnits 2
MaxTextureCoords 8
MaxVertexAttribs 16
MaxVertexUniformComponents 4096
MaxVaryingFloats 60
MaxVertexTextureImageUnits 16
MaxCombinedTextureImageUnits 80
MaxTextureImageUnits 16
MaxFragmentUniformComponents 1024
MaxDrawBuffers 8
MaxVertexUniformVectors 256
MaxVaryingVectors 15
MaxFragmentUniformVectors 256
MaxVertexOutputVectors 16
MaxFragmentInputVectors 15
MinProgramTexelOffset -8
MaxProgramTexelOffset 7
MaxClipDistances 8
MaxComputeWorkGroupCountX 65535
MaxComputeWorkGroupCountY 65535
MaxComputeWorkGroupCountZ 65535
MaxComputeWorkGroupSizeX 1024
MaxComputeWorkGroupSizeY 1024
MaxComputeWorkGroupSizeZ 64
MaxComputeUniformComponents 512
MaxComputeTextureImageUnits 16
MaxComputeImageUniforms 8
MaxComputeAtomicCounters 8
MaxComputeAtomicCounterBuffers 1
MaxVaryingComponents 60
MaxVertexOutputComponents 64
MaxGeometryInputComponents 64
MaxGeometryOutputComponents 128
MaxFragmentInputComponents 128
MaxImageUnits 8
MaxCombinedImageUnitsAndFragmentOutputs 8
MaxCombinedShaderOutputResources 8
MaxImageSamples 0
MaxVertexImageUniforms 0
MaxTessControlImageUniforms 0
MaxTessEvaluationImageUniforms 0
MaxGeometryImageUniforms 0
MaxFragmentImageUniforms 8
MaxCombinedImageUniforms 8
MaxGeometryTextureImageUnits 16
MaxGeometryOutputVertices 256
MaxGeometryTotalOutputComponents 1024
MaxGeometryUniformComponents 512
MaxGeometryVaryingComponents 60
MaxTessControlInputComponents 128
MaxTessControlOutputComponents 128
MaxTessControlTextureImageUnits 16
MaxTessControlUniformComponents 1024
MaxTessControlTotalOutputComponents 4096
MaxTessEvaluationInputComponents 128
MaxTessEvaluationOutputComponents 128
MaxTessEvaluationTextureImageUnits 16
MaxTessEvaluationUniformComponents 1024
MaxTessPatchComponents 120
MaxPatchVertices 32
MaxTessGenLevel 64
MaxViewports 16
MaxVertexAtomicCounters 0
MaxTessControlAtomicCounters 0
MaxTessEvaluationAtomicCounters 0
MaxGeometryAtomicCounters 0
MaxFragmentAtomicCounters 8
MaxCombinedAtomicCounters 8
MaxAtomicCounterBindings 1
MaxVertexAtomicCounterBuffers 0
MaxTessControlAtomicCounterBuffers 0
MaxTessEvaluationAtomicCounterBuffers 0
MaxGeometryAtomicCounterBuffers 0
MaxFragmentAtomicCounterBuffers 0
MaxCombinedAtomicCounterBuffers 1
MaxAtomicCounterBufferSize 32
MaxTransformFeedbackBuffers 4
MaxTransformFeedbackInterleavedComponents 64
MaxCullDistances 8
MaxCombinedClipAndCullDistances 8
MaxSamples 4
"""
| 40.636872 | 81 | 0.675213 |
6cd14fdc3e33e8835c499dce6a5682ad6a4238fa | 3,556 | py | Python | turtle/moth-light/turtle-moth-light.py | whitmans-max/python-examples | 881a8f23f0eebc76816a0078e19951893f0daaaa | [
"MIT"
] | 140 | 2017-02-21T22:49:04.000Z | 2022-03-22T17:51:58.000Z | turtle/moth-light/turtle-moth-light.py | whitmans-max/python-examples | 881a8f23f0eebc76816a0078e19951893f0daaaa | [
"MIT"
] | 5 | 2017-12-02T19:55:00.000Z | 2021-09-22T23:18:39.000Z | turtle/moth-light/turtle-moth-light.py | whitmans-max/python-examples | 881a8f23f0eebc76816a0078e19951893f0daaaa | [
"MIT"
] | 79 | 2017-01-25T10:53:33.000Z | 2022-03-11T16:13:57.000Z | import turtle
import random
# --- classes ---
class Moth:
def __init__(self, pos, dest=None):
'''create and initiate moth'''
# define turtle
self.t = turtle.Turtle()
self.t.speed(0)
# it is needed to execute `ontimer`
self.screen = turtle.Screen()
# remember destination
self.dest = dest
# at start it is not fly
self.is_flying = False
# move to start position
#(it will use self.dest so it has to be after `self.dest = dest`)
self.move(pos)
# if destination is set then fly to it
# (currently it is in `move()`)
#if self.dest is not None:
# self.move_to_light(self.dest)
def move(self, pos):
'''move to start position immediately (without steps)'''
# move to new position
self.t.up()
self.t.setposition(pos)
self.t.down()
# start flying
if self.dest is not None:
self.move_to_light(self.dest)
def move_to_random_place(self):
x = random.randint(-800, 800)
y = random.randint(-300, 300)
self.move((x,y))
def move_to_light(self, dest):
'''start flying to light'''
# set new destination position
self.dest = dest
# turn head to destination position
self.t.setheading(self.t.towards(self.dest))
# if not flying yet then start flying
if not self.is_flying:
self.is_flying = True
self._fly()
def _fly(self):
'''fly to light with 10px steps (using ontimer)'''
STEP = 10
# get distance to light
distance = self.t.distance(self.dest)
# make one step or stop
if distance > STEP:
self.t.forward(STEP)
elif distance > 0: # you can use ie. > 3 to stop 3px before light
self.t.forward(distance)
# you can stop flying
#self.is_flying = False
# or you move to new random place
self.move_to_random_place()
#else do nothing
# if still flying then repeat function after 100ms
if self.is_flying:
self.screen.ontimer(self._fly, 100)
def get_position():
'''get current position'''
return self.t.position()
# --- functions ---
def move_light(x, y):
global light
# get mouse position as new light
light = (x, y)
# change destination for all moths
for m in moths:
m.move_to_light(light)
def move_moths(x, y):
# move all moths to random places
for m in moths:
# put one moth in random places
m.move_to_random_place()
# --- main ---
# list to keep all moths
moths = []
# ligth position at start
# (later you can change it clicking on screen)
# (there is no object which show light on screen)
light = (0, 0)
# create some moths in random places
for _ in range(5):
# put moth in random places
x = random.randint(-800, 800)
y = random.randint(-300, 300)
m = Moth( (x, y), dest=light)
# or use `move_to_random_place()`
#m = Moth( (0, 0), dest=light)
#m.move_to_random_place()
# remember moth on list
moths.append(m)
# mouse LEFT button will move light
turtle.onscreenclick(move_light)
# mouse RIGHT button will move moths to random places
turtle.onscreenclick(move_moths, 3)
# star "the engine"
turtle.done()
| 24.694444 | 73 | 0.567492 |
7b56559d0de42e365dd4c852243894acda65b38e | 9,909 | py | Python | characterize.py | cccs-dawson/assemblyline-service-characterize | aa919d0f065ed78208482f86f678d0e423dcdae7 | [
"MIT"
] | 1 | 2021-06-18T14:56:28.000Z | 2021-06-18T14:56:28.000Z | characterize.py | cccs-dawson/assemblyline-service-characterize | aa919d0f065ed78208482f86f678d0e423dcdae7 | [
"MIT"
] | 8 | 2020-12-02T20:10:22.000Z | 2022-03-02T18:16:04.000Z | characterize.py | cccs-dawson/assemblyline-service-characterize | aa919d0f065ed78208482f86f678d0e423dcdae7 | [
"MIT"
] | 4 | 2021-01-19T18:56:41.000Z | 2021-07-14T18:38:21.000Z | from __future__ import absolute_import
import json
import re
import subprocess
import traceback
from typing import Dict, List, Optional, Tuple, Union
import hachoir.core.config as hachoir_config
from hachoir.core.log import log as hachoir_logger, Log, Logger
from hachoir.metadata import extractMetadata
from hachoir.parser.guess import createParser
from assemblyline.common.dict_utils import flatten
from assemblyline.common.entropy import calculate_partition_entropy
from assemblyline_v4_service.common.base import ServiceBase
from assemblyline_v4_service.common.request import ServiceRequest
from assemblyline_v4_service.common.result import Result, ResultSection, BODY_FORMAT
from parse_lnk import decode_lnk
TAG_MAP = {
'ole2': {
'author': 'file.ole.summary.author',
'last_modification': 'file.date.last_modified',
'subject': 'file.ole.summary.subject',
'title': 'file.ole.summary.title'
},
'LNK': {
'target_file_dosname': 'file.name.extracted'
},
'ZIP': {
'zip_modify_date': 'file.date.last_modified'
},
'EXE': {
'file_description': 'file.pe.versions.description',
'time_stamp': 'file.pe.linker.timestamp'
},
'DLL': {
'file_description': 'file.pe.versions.description',
'time_stamp': 'file.pe.linker.timestamp'
},
'DOC': {
'author': 'file.ole.summary.author',
'code_page': 'file.ole.summary.codepage',
'comment': 'file.ole.summary.comment',
'company': 'file.ole.summary.company',
'create_date': 'file.date.creation',
'last_modified_by': 'file.ole.summary.last_saved_by',
'manager': 'file.ole.summary.manager',
'modify_date': 'file.date.last_modified',
'subject': 'file.ole.summary.subject',
'title': 'file.ole.summary.title'
},
None: {
'image_size': 'file.img.size',
'megapixels': 'file.img.mega_pixels',
'create_date': 'file.date.creation',
'creation_date': 'file.date.creation',
'modify_date': 'file.date.last_modified',
'original_file_name': 'file.name.extracted'
}
}
BAD_LINK_RE = re.compile("http[s]?://|powershell|cscript|wscript|mshta|<script")
def build_key(input_string: str) -> str:
list_string = list(input_string)
new_list: List[str] = []
previous_upper = False
for idx, i in enumerate(list_string):
if i.isupper():
if idx != 0 and not previous_upper:
new_list.append("_")
previous_upper = True
new_list.append(i.lower())
elif i in [".", "_"]:
previous_upper = True
new_list.append(i)
else:
previous_upper = False
new_list.append(i)
return "".join(new_list)
def get_type_val(data: str, src_name: str) -> Tuple[str, str]:
key = src_name
val = data
if ":" in data:
key, val = data.split(":", 1)
elif "=" in data:
key, val = data.split("=", 1)
key = build_key(key)
val = val.strip()
return key, val
#########################################################
# Scan Execution Class #
#########################################################
class Characterize(ServiceBase):
def hachoir_logger_callback(self, level: int, prefix: str, _text: str,
ctxt: Optional[Logger]) -> None:
# Show where in hachoir the log comes from using ctxt if it exists
log = f"hachoir {ctxt.__class__} [{ctxt._logger()}]: {_text}" \
if ctxt else f"hachoir: {_text}\n"
log += traceback.format_exc(limit=2)
if Log.LOG_INFO == level:
self.log.info(log)
elif Log.LOG_WARN == level:
self.log.warning(log)
elif Log.LOG_ERROR == level:
self.log.error(log)
def start(self) -> None:
hachoir_config.quiet = True
# Don't print to stdout, use our logger via callback
hachoir_logger.use_print = False
hachoir_logger.on_new_message = self.hachoir_logger_callback
def execute(self, request: ServiceRequest) -> None:
request.result = Result()
# 1. Calculate entropy map
with open(request.file_path, 'rb') as fin:
(entropy, part_entropies) = calculate_partition_entropy(fin)
entropy_graph_data = {
'type': 'colormap',
'data': {
'domain': [0, 8],
'values': part_entropies
}
}
ResultSection(f"File entropy: {round(entropy, 3)}", parent=request.result, body_format=BODY_FORMAT.GRAPH_DATA,
body=json.dumps(entropy_graph_data))
if request.file_type == "meta/shortcut/windows":
# 2. Parse windows shortcuts
self.parse_link(request.result, request.file_path)
else:
# 3. Get hachoir metadata
parser = createParser(request.file_path)
if parser is not None:
with parser:
parser_tags = parser.getParserTags()
parser_id = parser_tags.get('id', 'unknown')
# Do basic metadata extraction
metadata = extractMetadata(parser, 1)
if metadata:
kv_body: Dict[str, Union[str, List[str]]] = {}
tags: List[Tuple[str, str]] = []
for m in metadata:
if m.key == "comment":
for v in m.values:
key, val = get_type_val(v.text, "comment")
if not val:
continue
kv_body[key] = val
tag_type = TAG_MAP.get(parser_id, {}).get(key, None) or \
TAG_MAP.get(None, {}).get(key, None)
if tag_type is not None:
tags.append((tag_type, val))
elif m.key in ["mime_type"]:
pass
else:
values = [v.text for v in m.values]
if len(values) == 1 and values[0]:
kv_body[m.key] = values[0]
elif values:
kv_body[m.key] = values
for v in values:
tag_type = TAG_MAP.get(parser_id, {}).get(m.key, None) or \
TAG_MAP.get(None, {}).get(m.key, None)
if tag_type is not None:
tags.append((tag_type, v))
if kv_body:
res = ResultSection(f"Metadata extracted by hachoir-metadata [Parser: {parser_id}]",
body=json.dumps(kv_body), body_format=BODY_FORMAT.KEY_VALUE,
parent=request.result)
for t_type, t_val in tags:
res.add_tag(t_type, t_val)
# 4. Get Exiftool Metadata
exif = subprocess.run(["exiftool", "-j", request.file_path], capture_output=True, check=False)
if exif.stdout:
exif_data = json.loads(exif.stdout.decode('utf-8', errors="ignore"))
res_data = exif_data[0]
if "Error" not in res_data:
exif_body = {build_key(k): v for k, v in res_data.items()
if v and k not in ["SourceFile", "ExifToolVersion", "FileName", "Directory", "FileSize",
"FileModifyDate", "FileAccessDate", "FileInodeChangeDate",
"FilePermissions", "FileType", "FileTypeExtension", "MIMEType"]}
if exif_body:
e_res = ResultSection("Metadata extracted by ExifTool",
body=json.dumps(exif_body), body_format=BODY_FORMAT.KEY_VALUE,
parent=request.result)
for k, v in exif_body.items():
tag_type = TAG_MAP.get(res_data.get("FileTypeExtension", "UNK").upper(), {}).get(k, None) or \
TAG_MAP.get(None, {}).get(k, None)
if tag_type:
e_res.add_tag(tag_type, v)
def parse_link(self, parent_res: Result, path: str) -> bool:
with open(path, "rb") as fh:
metadata = decode_lnk(fh.read())
if metadata is None:
return False
body_output = {build_key(k): v for k, v in flatten(metadata).items() if v}
res = ResultSection("Metadata extracted by parse_lnk", body_format=BODY_FORMAT.KEY_VALUE,
body=json.dumps(body_output), parent=parent_res)
bp = metadata.get("BasePath", "").strip()
rp = metadata.get("RELATIVE_PATH", "").strip()
nn = metadata.get("NetName", "").strip()
cla = metadata.get("COMMAND_LINE_ARGUMENTS", "").strip()
s = BAD_LINK_RE.search(cla.lower())
if s:
res.set_heuristic(1)
res.add_tag(tag_type="file.name.extracted", value=(bp or rp or nn).rsplit("\\")[-1])
res.add_tag(tag_type="dynamic.process.command_line", value=f"{(rp or bp or nn)} {cla}".strip())
for k, v in body_output.items():
tag_type = TAG_MAP.get("LNK", {}).get(k, None) or \
TAG_MAP.get(None, {}).get(k, None)
if tag_type:
res.add_tag(tag_type, v)
return True
| 40.280488 | 118 | 0.521445 |
376bb6c792ba47d0c7c427267344dbc1c1cfec3a | 19,958 | py | Python | tests/room_test.py | phildenhoff/matrix-nio | 5642d5a980d87c363e3b8c55d82224a4a353f3d3 | [
"Apache-2.0"
] | 1 | 2021-06-28T02:45:13.000Z | 2021-06-28T02:45:13.000Z | tests/room_test.py | vSLG/matrix-nio | e408334c5db061e01fe89bcf57e34fa573cee573 | [
"Apache-2.0"
] | null | null | null | tests/room_test.py | vSLG/matrix-nio | e408334c5db061e01fe89bcf57e34fa573cee573 | [
"Apache-2.0"
] | null | null | null | import pytest
from helpers import faker
from nio.events import (InviteAliasEvent, InviteMemberEvent, InviteNameEvent,
RoomAvatarEvent,
RoomCreateEvent, RoomGuestAccessEvent,
RoomHistoryVisibilityEvent, RoomJoinRulesEvent,
RoomMemberEvent, RoomNameEvent, TypingNoticeEvent,
Receipt, ReceiptEvent)
from nio.responses import RoomSummary
from nio.rooms import MatrixInvitedRoom, MatrixRoom
TEST_ROOM = "!test:example.org"
BOB_ID = "@bob:example.org"
ALICE_ID = "@alice:example.org"
class TestClass:
def _create_test_data(self):
pass
@property
def new_user(self):
return faker.mx_id(), faker.name(), faker.avatar_url()
@property
def test_room(self):
room = MatrixRoom(TEST_ROOM, BOB_ID)
room.update_summary(RoomSummary(0, 0, []))
return room
def test_room_creation(self):
room = self.test_room
assert room
def test_adding_members(self):
room = self.test_room
assert not room.users
mx_id, name, avatar = self.new_user
room.add_member(mx_id, name, avatar)
room.summary.heroes.append(mx_id)
room.summary.joined_member_count += 1
assert room.users
assert room.member_count == 1
room.summary = None
assert room.member_count == 1
member = list(room.users.values())[0]
assert member.user_id == mx_id
assert member.display_name == name
assert member.avatar_url == avatar
def test_summary_details(self):
room = self.test_room
room.summary = None
with pytest.raises(ValueError):
assert room._summary_details()
room.summary = RoomSummary(None, None, [])
with pytest.raises(ValueError):
assert room._summary_details()
room.summary = RoomSummary(0, None, [])
with pytest.raises(ValueError):
assert room._summary_details()
room.summary = RoomSummary(None, 0, [])
with pytest.raises(ValueError):
assert room._summary_details()
room.summary = RoomSummary(0, 0, [])
assert room._summary_details() == ([], 0, 0)
def test_named_checks(self):
room = self.test_room
assert not room.is_named
assert room.is_group
room.name = "Test room"
assert room.is_named
assert not room.is_group
def test_name_calculation_when_unnamed(self):
room = self.test_room
assert room.named_room_name() is None
assert room.display_name == "Empty Room"
# Members join
room.add_member(BOB_ID, "Bob", None) # us
room.summary.joined_member_count += 1
assert room.display_name == "Empty Room"
room.add_member("@alice:example.org", "Alice", None)
room.summary.heroes.append("@alice:example.org")
room.summary.joined_member_count += 1
assert room.display_name == "Alice"
room.add_member("@malory:example.org", "Alice", None)
room.summary.heroes.append("@malory:example.org")
room.summary.joined_member_count += 1
assert (room.display_name ==
"Alice (@alice:example.org) and Alice (@malory:example.org)")
room.add_member("@steve:example.org", "Steve", None)
room.summary.heroes.append("@steve:example.org")
room.summary.joined_member_count += 1
assert (room.display_name ==
"Alice (@alice:example.org), Alice (@malory:example.org) "
"and Steve")
room.add_member("@carol:example.org", "Carol", None)
room.summary.joined_member_count += 1
assert (room.display_name ==
"Alice (@alice:example.org), Alice (@malory:example.org), "
"Steve and 1 other")
room.add_member("@dave:example.org", "Dave", None)
room.summary.joined_member_count += 1
assert (room.display_name ==
"Alice (@alice:example.org), Alice (@malory:example.org), "
"Steve and 2 others")
room.add_member("@erin:example.org", "Eirin", None)
room.summary.invited_member_count += 1
assert (room.display_name ==
"Alice (@alice:example.org), Alice (@malory:example.org), "
"Steve and 3 others")
# Members leave
room.summary.joined_member_count = 1
room.summary.invited_member_count = 0
assert (room.display_name ==
"Empty Room (had Alice (@alice:example.org), "
"Alice (@malory:example.org) and Steve)")
room.remove_member("@steve:example.org")
room.summary.heroes.remove("@steve:example.org")
assert (room.display_name ==
"Empty Room (had Alice (@alice:example.org) and "
"Alice (@malory:example.org))")
room.remove_member("@malory:example.org")
room.summary.heroes.remove("@malory:example.org")
assert room.display_name == "Empty Room (had Alice)"
room.remove_member("@alice:example.org")
room.summary.heroes.remove("@alice:example.org")
assert room.display_name == "Empty Room"
room.remove_member("@bob:example.org") # us
assert not room.summary.heroes
assert room.display_name == "Empty Room"
def test_name_calculation_when_unnamed_no_summary(self):
room = self.test_room
room.summary = RoomSummary()
assert room.named_room_name() is None
assert room.display_name == "Empty Room"
# Members join
room.add_member(BOB_ID, "Bob", None) # us
assert room.display_name == "Empty Room"
room.add_member("@alice:example.org", "Alice", None)
assert room.display_name == "Alice"
room.add_member("@malory:example.org", "Alice", None)
assert (room.display_name ==
"Alice (@alice:example.org) and Alice (@malory:example.org)")
room.add_member("@steve:example.org", "Steve", None)
room.add_member("@carol:example.org", "Carol", None)
room.add_member("@dave:example.org", "Dave", None)
assert (room.display_name ==
"Alice (@alice:example.org), Alice (@malory:example.org), "
"Carol, Dave and Steve")
room.add_member("@erin:example.org", "Eirin", None)
assert (room.display_name ==
"Alice (@alice:example.org), Alice (@malory:example.org), "
"Carol, Dave, Eirin and 1 other")
room.add_member("@frank:example.org", "Frank", None)
assert (room.display_name ==
"Alice (@alice:example.org), Alice (@malory:example.org), "
"Carol, Dave, Eirin and 2 others")
room.add_member("@gregor:example.org", "Gregor", None)
assert (room.display_name ==
"Alice (@alice:example.org), Alice (@malory:example.org), "
"Carol, Dave, Eirin and 3 others")
# Members leave
for member in room.users.copy():
room.remove_member(member)
assert room.display_name == "Empty Room"
def test_name_calculation_with_canonical_alias(self):
room = self.test_room
room.canonical_alias = "#test:termina.org.uk"
assert room.display_name == "#test:termina.org.uk"
def test_name_calculation_prefer_name_over_alias(self):
room = self.test_room
room.canonical_alias = "#test:termina.org.uk"
room.name = "Test room"
assert room.display_name == "Test room"
def test_name_calculation_when_hash_already_prefixed(self):
room = self.test_room
room.name = "#test"
assert room.display_name == "#test"
def test_set_room_avatar(self):
room = self.test_room
room.room_avatar_url = "mxc://foo"
assert room.gen_avatar_url == "mxc://foo"
def test_room_avatar_calculation_when_no_set_avatar(self):
room = self.test_room
assert room.room_avatar_url is None
assert room.summary
assert room.is_group
room.add_member("@bob:example.org", "Bob", "mxc://abc", True) # us
room.summary.joined_member_count += 1
assert room.gen_avatar_url is None
room.add_member("@carol:example.org", "Carol", "mxc://bar", True)
room.summary.invited_member_count += 1
assert room.gen_avatar_url is None
room.summary.heroes.append("@carol:example.org")
assert room.gen_avatar_url == "mxc://bar"
room.name = "Test"
assert not room.is_group
assert room.gen_avatar_url is None
room.name = None
assert room.is_group
assert room.gen_avatar_url == "mxc://bar"
room.add_member("@alice:example.org", "Alice", "mxc://baz")
room.summary.heroes.append("@alice:matrix.org")
room.summary.joined_member_count += 1
assert room.gen_avatar_url is None
def test_room_avatar_calculation_when_no_set_avatar_no_summary(self):
room = self.test_room
room.summary = None
assert room.room_avatar_url is None
assert room.is_group
room.add_member("@bob:example.org", "Bob", "mxc://abc", True) # us
assert room.gen_avatar_url is None
room.add_member("@carol:example.org", "Carol", "mxc://bar", True)
assert room.gen_avatar_url == "mxc://bar"
room.name = "Test"
assert not room.is_group
assert room.gen_avatar_url is None
room.name = None
assert room.is_group
assert room.gen_avatar_url == "mxc://bar"
room.add_member("@alice:example.org", "Alice", "mxc://baz")
assert room.gen_avatar_url is None
def test_user_name_calculation(self):
room = self.test_room
assert room.user_name("@not_in_the_room:example.org") is None
room.add_member("@alice:example.org", "Alice", None)
assert room.user_name("@alice:example.org") == "Alice"
assert room.user_name_clashes("Alice") == ["@alice:example.org"]
room.add_member("@bob:example.org", None, None)
assert room.user_name("@bob:example.org") == "@bob:example.org"
room.add_member("@malory:example.org", "Alice", None)
assert room.user_name("@alice:example.org") == "Alice (@alice:example.org)"
assert room.user_name("@malory:example.org") == "Alice (@malory:example.org)"
assert room.user_name_clashes("Alice") == ["@alice:example.org", "@malory:example.org"]
room.remove_member("@alice:example.org")
assert room.user_name("@malory:example.org") == "Alice"
room.remove_member("@malory:example.org")
room.add_member("@alice:example.org", None, None)
assert room.user_name("@alice:example.org") == "@alice:example.org"
assert room.user_name_clashes("@alice:example.org") == ["@alice:example.org"]
room.add_member("@malory:example.org", "@alice:example.org", None)
assert room.user_name("@alice:example.org") == "@alice:example.org"
assert room.user_name("@malory:example.org") == "@alice:example.org (@malory:example.org)"
assert room.user_name_clashes("@alice:example.org") == ["@alice:example.org", "@malory:example.org"]
def test_avatar_url(self):
room = self.test_room
assert room.user_name("@not_in_the_room:example.org") is None
assert room.avatar_url("@not_in_the_room:example.org") is None
room.add_member("@alice:example.org", "Alice", "mxc://foo")
assert room.avatar_url("@alice:example.org") == "mxc://foo"
def test_machine_name(self):
room = self.test_room
assert room.machine_name == TEST_ROOM
room.canonical_alias = "Alias room"
assert room.machine_name == "Alias room"
def test_typing_notice_event(self):
room = self.test_room
assert not room.typing_users
room.handle_ephemeral_event(TypingNoticeEvent([BOB_ID]))
assert room.typing_users == [BOB_ID]
def test_read_receipt_event(self):
"""Verify that m.read ReceiptEvents update a room's read_receipt dict.
Successive m.read receipts should replace the first receipt with the
second.
"""
room = self.test_room
assert room.read_receipts == {}
r1 = Receipt("event_id", "m.read", BOB_ID, 10)
r2 = Receipt("event_id2", "m.read", BOB_ID, 15)
r1_event = ReceiptEvent([r1])
r2_event = ReceiptEvent([r2])
room.handle_ephemeral_event(r1_event)
assert room.read_receipts == {
BOB_ID: r1
}
room.handle_ephemeral_event(r2_event)
assert room.read_receipts == {
BOB_ID: r2
}
def test_non_read_receipt_event(self):
"""Verify that non-m.read receipts don't leak into a room's read_receipt
dict.
"""
room = self.test_room
room.handle_ephemeral_event(
ReceiptEvent([
Receipt("event_id", "m.downvoted", BOB_ID, 0)
])
)
assert room.read_receipts == {}
def test_create_event(self):
room = self.test_room
assert not room.creator
room.handle_event(
RoomCreateEvent(
{
"event_id": "event_id",
"sender": BOB_ID,
"origin_server_ts": 0
},
BOB_ID, False
)
)
assert room.creator == BOB_ID
assert room.federate is False
assert room.room_version == "1"
def test_guest_access_event(self):
room = self.test_room
assert room.guest_access == "forbidden"
room.handle_event(
RoomGuestAccessEvent(
{
"event_id": "event_id",
"sender": BOB_ID,
"origin_server_ts": 0
},
"can_join"
)
)
assert room.guest_access == "can_join"
def test_history_visibility_event(self):
room = self.test_room
assert room.history_visibility == "shared"
room.handle_event(
RoomHistoryVisibilityEvent(
{
"event_id": "event_id",
"sender": BOB_ID,
"origin_server_ts": 0
},
"invited"
)
)
assert room.history_visibility == "invited"
def test_join_rules_event(self):
room = self.test_room
assert room.join_rule == "invite"
room.handle_event(
RoomJoinRulesEvent(
{
"event_id": "event_id",
"sender": BOB_ID,
"origin_server_ts": 0
},
"public"
)
)
assert room.join_rule == "public"
def test_name_event(self):
room = self.test_room
assert not room.name
room.handle_event(
RoomNameEvent(
{
"event_id": "event_id",
"sender": BOB_ID,
"origin_server_ts": 0
},
"test name"
)
)
assert room.name == "test name"
def test_room_avatar_event(self):
room = self.test_room
assert not room.gen_avatar_url
room.handle_event(
RoomAvatarEvent(
{
"event_id": "event_id",
"sender": BOB_ID,
"origin_server_ts": 0
},
"mxc://foo"
)
)
assert room.gen_avatar_url == "mxc://foo"
def test_summary_update(self):
room = self.test_room
room.summary = None
room.update_summary(RoomSummary(1, 2, []))
assert room.member_count == 3
assert room.summary
room.update_summary(RoomSummary(1, 3, ["@alice:example.org"]))
assert room.member_count == 4
assert room.summary.heroes == ["@alice:example.org"]
def test_invited_room(self):
room = MatrixInvitedRoom(TEST_ROOM, BOB_ID)
room.handle_event(InviteMemberEvent(
{},
"@alice:example.org",
BOB_ID,
"invite",
None,
{
"membership": "invite"
}
))
assert room.inviter == "@alice:example.org"
assert not room.name
room.handle_event(InviteNameEvent({}, BOB_ID, "test name"))
assert room.name == "test name"
assert not room.canonical_alias
room.handle_event(InviteAliasEvent({}, BOB_ID, "test alias"))
assert room.canonical_alias == "test alias"
def test_handle_member_return_value(self):
room = self.test_room
assert not room.users
mx_id, name, avatar = self.new_user
assert room.add_member(mx_id, name, avatar)
assert not room.add_member(mx_id, name, avatar)
assert room.remove_member(mx_id)
assert not room.remove_member(mx_id)
def test_user_membership_changes(self):
invited_event = RoomMemberEvent(
{"event_id": "event1", "sender": BOB_ID, "origin_server_ts": 1},
ALICE_ID,
"invite",
None,
{"membership": "invite", "displayname": "Alice Margarine"},
)
joins_event = RoomMemberEvent(
{"event_id": "event2", "sender": ALICE_ID, "origin_server_ts": 2},
ALICE_ID,
"join",
None,
{
"membership": "join",
"displayname": "Alice Margatroid",
"avatar_url": "mxc://new",
},
)
leaves_event = RoomMemberEvent(
{"event_id": "event3", "sender": ALICE_ID, "origin_server_ts": 3},
ALICE_ID,
"leave",
None,
{"membership": "leave"},
)
unknown_event = RoomMemberEvent(
{"event_id": "event4", "sender": ALICE_ID, "origin_server_ts": 4},
ALICE_ID,
"bad_membership",
None,
{"membership": "bad_membership"},
)
room = self.test_room
assert not room.users
assert not room.invited_users
# Alice is invited, accepts (her name and avatar changed) then leaves
room.handle_membership(invited_event)
assert set(room.users) == {ALICE_ID}
assert set(room.invited_users) == {ALICE_ID}
room.handle_membership(joins_event)
assert set(room.users) == {ALICE_ID}
assert not room.invited_users
assert room.names["Alice Margatroid"] == [ALICE_ID]
assert room.users[ALICE_ID].display_name == "Alice Margatroid"
assert room.users[ALICE_ID].avatar_url == "mxc://new"
room.handle_membership(leaves_event)
assert not room.users
assert not room.invited_users
# Alice is invited and declines
room.handle_membership(invited_event)
assert set(room.users) == {ALICE_ID}
assert set(room.invited_users) == {ALICE_ID}
room.handle_membership(leaves_event)
assert not room.users
assert not room.invited_users
# Alice joins without invite then leaves
room.handle_membership(joins_event)
assert set(room.users) == {ALICE_ID}
assert not room.invited_users
room.handle_membership(leaves_event)
assert not room.users
assert not room.invited_users
# Ensure we get False if we handle an event that changes nothing or
# has an unknown new membership
assert not room.handle_membership(leaves_event)
assert not room.handle_membership(unknown_event)
| 34.116239 | 108 | 0.587133 |
74e712632d164dfcff5edf7a2dd99ac636337f61 | 1,084 | py | Python | ch15/rw_visual.py | yiyidhuang/PythonCrashCrouse2nd | 3512f9ab8fcf32c6145604a37e2a62feddf174d1 | [
"MIT"
] | null | null | null | ch15/rw_visual.py | yiyidhuang/PythonCrashCrouse2nd | 3512f9ab8fcf32c6145604a37e2a62feddf174d1 | [
"MIT"
] | null | null | null | ch15/rw_visual.py | yiyidhuang/PythonCrashCrouse2nd | 3512f9ab8fcf32c6145604a37e2a62feddf174d1 | [
"MIT"
] | null | null | null | import matplotlib.pyplot as plt
from random_walk import RandomWalk
# As long as the program is active, it constantly simulates random walking
while True:
# Create a RandomWalk instance
# rw = RandomWalk()
rw = RandomWalk(50_000)
rw.fill_walk()
# Draw all points
plt.style.use('classic')
# fig, ax = plt.subplots()
fig, ax = plt.subplots(figsize=(15, 9))
point_numbers = range(rw.num_points)
# ax.scatter(rw.x_values, rw.y_values, s=15)
# ax.scatter(rw.x_values, rw.y_values, c=point_numbers, cmap=plt.cm.Blues, edgecolors='none', s=15)
ax.scatter(rw.x_values, rw.y_values, c=point_numbers, cmap=plt.cm.Blues, edgecolors='none', s=1)
# Highlight the start point and the end point
ax.scatter(0, 0, c='green', edgecolors='none', s=100)
ax.scatter(rw.x_values[-1], rw.y_values[-1], c='red', edgecolors='none', s=100)
plt.show()
# Hide axes
ax.get_yaxis().set_visible(False)
ax.get_yaxis().set_visible(False)
keep_running = input("Make another walk? (y/n): ")
if keep_running == 'n':
break
| 31.882353 | 103 | 0.666052 |
7c1066a8be8448e719fb7ba84c328229762f3a68 | 1,790 | py | Python | project/common.py | drdtr/udacity-data-analyst-project-2-introduction-to-data-analysis | cf598dc90ec6bfab4582471e3fa0aac05c6ef6c1 | [
"MIT"
] | null | null | null | project/common.py | drdtr/udacity-data-analyst-project-2-introduction-to-data-analysis | cf598dc90ec6bfab4582471e3fa0aac05c6ef6c1 | [
"MIT"
] | null | null | null | project/common.py | drdtr/udacity-data-analyst-project-2-introduction-to-data-analysis | cf598dc90ec6bfab4582471e3fa0aac05c6ef6c1 | [
"MIT"
] | null | null | null |
resources_folder = "../resources/project"
output_folder = "../output/project"
the_year = 2018
"""We are going to analyze data sets for the year 2018."""
def get_declared_class_attributes(clazz):
return dict(filter(lambda e: not e[0].startswith("__"), clazz.__dict__.items()))
def print_declared_class_attributes(clazz):
print(f"class {clazz.__name__}:")
for attr_name, attr_value in get_declared_class_attributes(clazz).items():
print(f'\t{attr_name} = "{attr_value}"')
class ColGdp:
"""Column names for the GDP data sets"""
country_code = "Country code"
country = "Country"
year = "Year"
gdp_per_capita = "GDP per capita"
gdp_per_capita_ppp = "GDP per capita (PPP dollars)"
# Additional columns
above_avg_gdp_per_capita = "Above-average GDP per capita"
above_avg_gdp_per_capita_ppp = "Above-average GDP per capita (PPP dollars)"
gdp_level = "GDP level"
gdp_total = "GDP total"
gdp_total_ppp = "GDP total (PPP dollars)"
class ColPop:
"""Column names for the population data set"""
country_code = "Country code"
country = "Country"
population = "Population"
class ColFuel:
"""Column names for the fuel export data set"""
country_code = "Country code"
country = "Country"
fuel_exports = "Fuel exports (% of merchandise exports)"
class ColDem:
"""Column names for the democracy index data set"""
country_code = "Country code"
country = "Country"
year = "Year"
democracy_index = "Democracy index"
# Additional columns
regime_type = "Regime type"
class RegimeType:
"""Regime types for the democracy data set"""
democracy = "Democracy"
hybrid = "Hybrid"
authoritarian = "Authoritarian"
values = [democracy, hybrid, authoritarian]
| 27.121212 | 84 | 0.685475 |
bbadcdd93fc6f31d48f4629d4336c74353c44853 | 461 | py | Python | remote/labml_remote/util/__init__.py | elgalu/labml | 511f0bbfcbeb4bc34bc6966a3973ff4e7e48eeee | [
"MIT"
] | 463 | 2021-05-28T03:21:14.000Z | 2022-03-28T06:28:21.000Z | remote/labml_remote/util/__init__.py | elgalu/labml | 511f0bbfcbeb4bc34bc6966a3973ff4e7e48eeee | [
"MIT"
] | 15 | 2021-06-22T10:02:36.000Z | 2021-12-20T06:14:12.000Z | remote/labml_remote/util/__init__.py | elgalu/labml | 511f0bbfcbeb4bc34bc6966a3973ff4e7e48eeee | [
"MIT"
] | 29 | 2020-06-03T07:13:31.000Z | 2021-05-23T18:20:34.000Z | from pathlib import Path
from typing import Dict
def template(file: Path, replace: Dict[str, str]):
with open(str(file), 'r') as f:
content = f.read()
for k, v in replace.items():
content = content.replace(f'%%{k.upper()}%%', v)
return content
def get_env_vars(env_vars: Dict[str, str]):
if not env_vars:
return ''
exports = [f'export {k}={v}' for k, v in env_vars.items()]
return '\n'.join(exports) | 24.263158 | 62 | 0.596529 |
557464e7fefa9ef6cb3b3dcd58a6d93031b5678b | 4,491 | py | Python | murano-7.0.0/murano/tests/unit/engine/system/test_net_explorer.py | scottwedge/OpenStack-Stein | 7077d1f602031dace92916f14e36b124f474de15 | [
"Apache-2.0"
] | null | null | null | murano-7.0.0/murano/tests/unit/engine/system/test_net_explorer.py | scottwedge/OpenStack-Stein | 7077d1f602031dace92916f14e36b124f474de15 | [
"Apache-2.0"
] | 5 | 2019-08-14T06:46:03.000Z | 2021-12-13T20:01:25.000Z | murano-7.0.0/murano/tests/unit/engine/system/test_net_explorer.py | scottwedge/OpenStack-Stein | 7077d1f602031dace92916f14e36b124f474de15 | [
"Apache-2.0"
] | 2 | 2020-03-15T01:24:15.000Z | 2020-07-22T20:34:26.000Z | # Copyright (c) 2016 AT&T
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import mock
from oslo_config import cfg
from murano.dsl import murano_method
from murano.dsl import murano_type
from murano.engine.system import net_explorer
from murano.tests.unit import base
CONF = cfg.CONF
class TestNetExplorer(base.MuranoTestCase):
def setUp(self):
super(TestNetExplorer, self).setUp()
self.mock_class = mock.MagicMock(spec=murano_type.MuranoClass)
self.mock_method = mock.MagicMock(spec=murano_method.MuranoMethod)
self._this = mock.MagicMock()
self.region_name = "test-region"
self.addCleanup(mock.patch.stopall)
@mock.patch("murano.engine.system.net_explorer.nclient")
@mock.patch("murano.engine.system.net_explorer.auth_utils")
@mock.patch("murano.dsl.helpers.get_execution_session")
def test_get_available_cidr(self, execution_session,
mock_authentication, mock_nclient):
ne = net_explorer.NetworkExplorer(self._this, self.region_name)
router_id = 12
net_id = 144
self.assertIsNotNone(ne.get_available_cidr(router_id, net_id))
self.assertTrue(execution_session.called)
@mock.patch("murano.engine.system.net_explorer.nclient")
@mock.patch("murano.engine.system.net_explorer.auth_utils")
@mock.patch("murano.dsl.helpers.get_execution_session")
def test_list(self, execution_session, mock_authentication, mock_nclient):
ne = net_explorer.NetworkExplorer(self._this, self.region_name)
self.assertEqual(ne.list_networks(),
ne._client.list_networks()['networks'])
self.assertEqual(ne.list_subnetworks(),
ne._client.list_subnets()['subnets'])
self.assertEqual(ne.list_ports(), ne._client.list_ports()['ports'])
self.assertEqual(ne.list_neutron_extensions(),
ne._client.list_extensions()['extensions'])
self.assertEqual(ne.get_default_dns(), ne._settings.default_dns)
@mock.patch("murano.engine.system.net_explorer.nclient")
@mock.patch("murano.engine.system.net_explorer.auth_utils")
@mock.patch("murano.dsl.helpers.get_execution_session")
def test_get_router_error(self, execution_session,
mock_authentication, mock_nclient):
ne = net_explorer.NetworkExplorer(self._this, self.region_name)
self.assertRaises(KeyError, ne.get_default_router)
@mock.patch("murano.engine.system.net_explorer.nclient")
@mock.patch("murano.engine.system.net_explorer.auth_utils")
@mock.patch("murano.dsl.helpers.get_execution_session")
def test_get_ext_network_id_router(self, execution_session,
mock_authentication, mock_nclient):
ne = net_explorer.NetworkExplorer(self._this, self.region_name)
router_id = 12
self.assertIsNone(ne.get_external_network_id_for_router(router_id))
@mock.patch("murano.engine.system.net_explorer.nclient")
@mock.patch("murano.engine.system.net_explorer.auth_utils")
@mock.patch("murano.dsl.helpers.get_execution_session")
def test_get_ext_network_id_network(self, execution_session,
mock_authentication, mock_nclient):
ne = net_explorer.NetworkExplorer(self._this, self.region_name)
net_id = 144
self.assertEqual(net_id,
ne.get_external_network_id_for_network(net_id))
@mock.patch("murano.engine.system.net_explorer.nclient")
@mock.patch("murano.engine.system.net_explorer.auth_utils")
@mock.patch("murano.dsl.helpers.get_execution_session")
def test_get_cidr_none_router(self, execution_session,
mock_authentication, mock_nclient):
ne = net_explorer.NetworkExplorer(self._this, self.region_name)
router_id = None
self.assertEqual([], ne._get_cidrs_taken_by_router(router_id))
| 45.363636 | 78 | 0.706524 |
d7cc0454a9b709ed98662b2484ce946a362fab8e | 9,275 | py | Python | netforce_mfg/netforce_mfg/models/production_plan.py | nfco/netforce | 35252eecd0a6633ab9d82162e9e3ff57d4da029a | [
"MIT"
] | 27 | 2015-09-30T23:53:30.000Z | 2021-06-07T04:56:25.000Z | netforce_mfg/netforce_mfg/models/production_plan.py | nfco/netforce | 35252eecd0a6633ab9d82162e9e3ff57d4da029a | [
"MIT"
] | 191 | 2015-10-08T11:46:30.000Z | 2019-11-14T02:24:36.000Z | netforce_mfg/netforce_mfg/models/production_plan.py | nfco/netforce | 35252eecd0a6633ab9d82162e9e3ff57d4da029a | [
"MIT"
] | 32 | 2015-10-01T03:59:43.000Z | 2022-01-13T07:31:05.000Z | # Copyright (c) 2012-2015 Netforce Co. Ltd.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE
# OR OTHER DEALINGS IN THE SOFTWARE.
from netforce.model import Model, fields, get_model
from netforce.database import get_connection
from netforce import access
class ProductionPlan(Model):
_name = "production.plan"
_string = "Production Plan"
_name_field = "number"
_fields = {
"number": fields.Char("Number", required=True, search=True),
"product_id": fields.Many2One("product", "Product", required=True, search=True),
"customer_id": fields.Many2One("contact", "Customer", search=True),
"date_from": fields.Date("From Date", required=True, search=True),
"date_to": fields.Date("To Date", required=True, search=True),
"plan_qty": fields.Decimal("Planned Production Qty", required=True),
"uom_id": fields.Many2One("uom", "UoM", required=True),
"location_id": fields.Many2One("stock.location", "To Warehouse"), # XXX: deprecated
"bom_id": fields.Many2One("bom", "BoM", required=True, search=True),
"priority": fields.Selection([["high", "High"], ["medium", "Medium"], ["low", "Low"]], "Priority", search=True),
"state": fields.Selection([["open", "Open"], ["closed", "Closed"]], "Status", required=True, search=True),
"description": fields.Text("Description"),
"comments": fields.One2Many("message", "related_id", "Comments"),
"year": fields.Char("Year", sql_function=["year", "due_date"]),
"quarter": fields.Char("Quarter", sql_function=["quarter", "due_date"]),
"month": fields.Char("Month", sql_function=["month", "due_date"]),
"week": fields.Char("Week", sql_function=["week", "due_date"]),
"agg_qty": fields.Decimal("Total Qty", agg_function=["sum", "qty"]),
"comments": fields.One2Many("message", "related_id", "Comments"),
"actual_qty": fields.Decimal("Actual Production Qty", function="get_actual_qty"),
"stock_moves": fields.One2Many("stock.move", "related_id", "Stock Movements"),
"plan_in_qty": fields.Decimal("Planned Receipt Qty", function="get_plan_in_qty"),
"plan_remain_qty": fields.Decimal("Planned Remain Qty", function="get_plan_remain_qty"),
"actual_qty": fields.Decimal("Actual Production Qty", function="get_actual_qty"),
}
_order = "date_to"
def _get_number(self, context={}):
seq_id = get_model("sequence").find_sequence(type="production_plan",context=context)
if not seq_id:
return None
while 1:
num = get_model("sequence").get_next_number(seq_id, context=context)
if not num:
return None
user_id = access.get_active_user()
access.set_active_user(1)
res = self.search([["number", "=", num]])
access.set_active_user(user_id)
if not res:
return num
get_model("sequence").increment_number(seq_id, context=context)
_defaults = {
"number": _get_number,
"state": "open",
}
def get_actual_qty(self, ids, context={}):
vals = {}
for obj in self.browse(ids):
cond = [["state", "=", "done"], ["due_date", ">=", obj.date_from],
["due_date", "<=", obj.date_to], ["product_id", "=", obj.product_id.id]]
if obj.customer_id:
cond.append(["customer_id", "=", obj.customer_id.id])
total = 0
for order in get_model("production.order").search_browse(cond):
total += get_model("uom").convert(order.qty_received, order.uom_id.id, obj.uom_id.id)
vals[obj.id] = total
return vals
def get_plan_in_qty(self, ids, context={}):
settings = get_model("settings").browse(1)
vals = {}
for obj in self.browse(ids):
cond = [["state", "in", ["pending", "approved", "done"]], ["date", ">=", obj.date_from + " 00:00:00"], ["date", "<=",
obj.date_to + " 23:59:59"], ["product_id", "=", obj.product_id.id], ["location_to_id", "=", obj.location_id.id]]
if obj.customer_id:
cond.append(["contact_id", "=", obj.customer_id.id])
total = 0
for move in get_model("stock.move").search_browse(cond):
total += get_model("uom").convert(move.qty, move.uom_id.id, obj.uom_id.id)
vals[obj.id] = total
return vals
def get_plan_remain_qty(self, ids, context={}):
db = get_connection()
vals = {}
for obj in self.browse(ids):
bal_qty = 0
res = db.query("SELECT SUM(qty) AS qty,uom_id FROM stock_move WHERE product_id=%s AND location_to_id=%s AND date<=%s AND state IN ('pending','approved','done') GROUP BY uom_id",
obj.product_id.id, obj.location_id.id, obj.date_to + " 23:59:59")
for r in res:
bal_qty += get_model("uom").convert(r.qty, r.uom_id, obj.uom_id.id)
res = db.query("SELECT SUM(qty) AS qty,uom_id FROM stock_move WHERE product_id=%s AND location_from_id=%s AND date<=%s AND state IN ('pending','approved','done') GROUP BY uom_id",
obj.product_id.id, obj.location_id.id, obj.date_to + " 23:59:59")
for r in res:
bal_qty -= get_model("uom").convert(r.qty, r.uom_id, obj.uom_id.id)
vals[obj.id] = bal_qty
return vals
def update_stock(self, ids, context={}):
settings = get_model("settings").browse(1)
res = get_model("stock.location").search([["type", "=", "production"]])
if not res:
raise Exception("Production location not found")
mfg_loc_id = res[0]
for obj in self.browse(ids):
obj.stock_moves.delete()
diff_qty = obj.plan_qty - obj.plan_in_qty
if diff_qty <= 0:
continue
bom=obj.bom_id
if not bom:
raise Exception("Missing BoM")
vals = {
"date": obj.date_to + " 23:59:59",
"journal_id": settings.pick_in_journal_id.id,
"related_id": "production.plan,%s" % obj.id,
"location_from_id": mfg_loc_id,
"location_to_id": bom.location_id.id,
"product_id": obj.product_id.id,
"qty": diff_qty,
"uom_id": obj.uom_id.id,
"state": "pending",
}
get_model("stock.move").create(vals)
ratio=diff_qty/bom.qty
for line in bom.lines:
line_qty=line.qty*ratio
vals = {
"date": obj.date_to + " 23:59:59",
"journal_id": settings.pick_out_journal_id.id,
"related_id": "production.plan,%s" % obj.id,
"location_from_id": line.location_id.id,
"location_to_id": mfg_loc_id,
"product_id": line.product_id.id,
"qty": line_qty,
"uom_id": line.uom_id.id,
"state": "pending",
}
get_model("stock.move").create(vals)
def close(self, ids, context={}):
for obj in self.browse(ids):
obj.stock_moves.delete()
obj.write({"state": "closed"})
def reopen(self, ids, context={}):
for obj in self.browse(ids):
obj.write({"state": "open"})
obj.update_stock()
def copy(self, ids, context={}):
for obj in self.browse(ids):
vals = {
"number": obj.number,
"product_id": obj.product_id.id,
"customer_id": obj.customer_id.id,
"date_from": obj.date_from,
"date_to": obj.date_to,
"plan_qty": obj.plan_qty,
"uom_id": obj.uom_id.id,
"location_id": obj.location_id.id,
"priority": obj.priority,
"description": obj.description,
"state": "open",
}
self.create(vals)
ProductionPlan.register()
| 48.307292 | 228 | 0.575633 |
a0a3fa75e1deb1c6ee5834ed180d040438ecae7d | 2,163 | py | Python | trackeddy_utils/access_om2/pandas_fields/pandas_model.py | Josue-Martinez-Moreno/phd_source | add2aa0ff3e8fc4596d4dc9504e2b80c3d42a3e5 | [
"MIT"
] | 2 | 2021-07-28T14:28:36.000Z | 2022-01-26T06:37:51.000Z | trackeddy_utils/access_om2/pandas_fields/pandas_model.py | josuemtzmo/phd_source | add2aa0ff3e8fc4596d4dc9504e2b80c3d42a3e5 | [
"MIT"
] | null | null | null | trackeddy_utils/access_om2/pandas_fields/pandas_model.py | josuemtzmo/phd_source | add2aa0ff3e8fc4596d4dc9504e2b80c3d42a3e5 | [
"MIT"
] | null | null | null | import matplotlib
matplotlib.use('agg')
import sys
from netCDF4 import Dataset
import os
os.environ["PROJ_LIB"] = "/g/data/v45/jm5970/env/track_env/share/proj"
import cmocean as cm
from trackeddy.tracking import *
from trackeddy.datastruct import *
from trackeddy.geometryfunc import *
from trackeddy.physics import *
from numpy import *
import xarray as xr
outputfilenumber = int(sys.argv[1])
division_number = int(sys.argv[2])
file_division = int(sys.argv[3])
file_count = int(sys.argv[4])
outfile = sys.argv[5]
def split_list(alist, wanted_parts=1):
length = len(alist)
return np.array([ alist[i*length // wanted_parts: (i+1)*length // wanted_parts]
for i in range(wanted_parts) ])
outfolder='/scratch/x77/jm5970/trackeddy_output/'
# Output data path
outputpath='/g/data/cj50/access-om2/raw-output/access-om2-01/01deg_jra55v140_iaf/output%03d/' % outputfilenumber
# Import SSH values to python environment.
ncfile=xr.open_mfdataset(outputpath+'ocean/ocean-2d-sea_level-1-daily-mean-ym_*.nc',combine='by_coords')
time=ncfile.time.values[:]
time_division=split_list(range(0,len(time)), wanted_parts=file_division)
init_time = datetime.strptime(str(time[time_division[division_number][0]]), "%Y-%m-%d %H:%M:%S")
print(init_time)
#eta=ncfile.sea_level.values[time_division[division_number][0]:time_division[division_number][-1]+1,:,:]
#etashape=shape(eta)
# Output data patha
filepath = outfolder+'npy/ACCESS_01_{0:05}_{1:02}_pos.npy'.format(outputfilenumber,division_number)
analysedatap=np.load(filepath,allow_pickle=True)
dictanalysep=analysedatap.item()
pos_pd = dict2pd(dictanalysep,inittime=init_time.strftime("%d-%m-%Y"),n=0,polarity='pos',ensemble=None)
filepath = outfolder+'npy/ACCESS_01_{0:05}_{1:02}_neg.npy'.format(outputfilenumber,division_number)
analysedatan=np.load(filepath,allow_pickle=True)
dictanalysen=analysedatan.item()
neg_pd = dict2pd(dictanalysen,inittime=init_time.strftime("%d-%m-%Y"),n=0,polarity='neg',ensemble=None)
pd_result = pd.concat([pos_pd,neg_pd])
pd_result.to_csv(outfolder+'pandas/csv_identified_eddies_{0:05}_{1:02}.csv'.format(outputfilenumber,division_number))
| 34.887097 | 117 | 0.763292 |
11095c4b29353869e471d4443b6819010c70a1fb | 1,032 | py | Python | tests/rke_etcd_backup_restore/test.py | trilioData/tvk-interop-plugins | dab12a297f2dc728e5bdc91051b22e68019dac1b | [
"RSA-MD"
] | null | null | null | tests/rke_etcd_backup_restore/test.py | trilioData/tvk-interop-plugins | dab12a297f2dc728e5bdc91051b22e68019dac1b | [
"RSA-MD"
] | null | null | null | tests/rke_etcd_backup_restore/test.py | trilioData/tvk-interop-plugins | dab12a297f2dc728e5bdc91051b22e68019dac1b | [
"RSA-MD"
] | null | null | null | import pytest
import sys
import os
import io
sys.path.append(f"{os.getcwd()}/internal/utils")
import util as rke
sys.path.append(f"{os.getcwd()}/tools/rke_etcd_backup_plugin")
def test_backup(config_param):
ret_val = rke.run(
f"sudo python3 tools/rke_etcd_backup_plugin/rke-etcd-backup-restore.py -backup --target-name {config_param['target-name']} --target-namespace {config_param['target-namespace']} --rancher-url {config_param['rancher-url']} --bearer-token {config_param['bearer-token']} --cluster-name {config_param['cluster-name']}")
assert ret_val == 0
#@patch('builtins.input', lambda restore_id: "")
def test_restore(config_param, monkeypatch):
ret_val = rke.run(
"{{ echo ''; echo '{0}'; }} | sudo python3 tools/rke_etcd_backup_plugin/rke-etcd-backup-restore.py -restore --rancher-url {1} --bearer-token {2} --cluster-name {3}".format(format(config_param['target-secretkey']), config_param['rancher-url'], config_param['bearer-token'], config_param['cluster-name']))
assert ret_val == 0
| 54.315789 | 322 | 0.723837 |
33e703bc9e1f85b9812b70cffb61e8f0dd56b4f5 | 47 | py | Python | truetool/command_line.py | truecharts/truetool | e487c84139b70f868499892cdf308e31d48a8be9 | [
"BSD-3-Clause"
] | 12 | 2022-01-20T03:37:56.000Z | 2022-03-17T21:51:06.000Z | truetool/command_line.py | truecharts/truetool | e487c84139b70f868499892cdf308e31d48a8be9 | [
"BSD-3-Clause"
] | 2 | 2022-02-03T10:06:37.000Z | 2022-02-18T08:58:11.000Z | truetool/command_line.py | truecharts/truetool | e487c84139b70f868499892cdf308e31d48a8be9 | [
"BSD-3-Clause"
] | null | null | null | import truetool
def main():
truetool.run() | 11.75 | 18 | 0.680851 |
b06c1f045862d26ced051802da48bd6a740baa59 | 533 | py | Python | test/functions/decl9.py | kylebarron/MagicPython | da6fa0793e2c85d3bf7709ff1d4f65ccf468db11 | [
"MIT"
] | 1,482 | 2015-10-16T21:59:32.000Z | 2022-03-30T11:44:40.000Z | test/functions/decl9.py | kylebarron/MagicPython | da6fa0793e2c85d3bf7709ff1d4f65ccf468db11 | [
"MIT"
] | 226 | 2015-10-15T15:53:44.000Z | 2022-03-25T03:08:27.000Z | test/functions/decl9.py | kylebarron/MagicPython | da6fa0793e2c85d3bf7709ff1d4f65ccf468db11 | [
"MIT"
] | 129 | 2015-10-20T02:41:49.000Z | 2022-03-22T01:44:36.000Z | cdef foo(): pass
cdef : source.python
: source.python
foo : meta.function-call.generic.python, meta.function-call.python, source.python
( : meta.function-call.python, punctuation.definition.arguments.begin.python, source.python
) : meta.function-call.python, punctuation.definition.arguments.end.python, source.python
: : punctuation.separator.colon.python, source.python
: source.python
pass : keyword.control.flow.python, source.python
| 41 | 103 | 0.652908 |
d08a27a470dcd119d4c57af3f6fffa0144b9059c | 22,191 | py | Python | integration/combination/test_api_with_authorizers.py | huanjiayang-ibm/serverless-application-model | 153ad4393435b64d84803d805b40366f3172e459 | [
"Apache-2.0"
] | 1,279 | 2020-08-25T03:33:15.000Z | 2022-03-31T09:49:22.000Z | integration/combination/test_api_with_authorizers.py | huanjiayang-ibm/serverless-application-model | 153ad4393435b64d84803d805b40366f3172e459 | [
"Apache-2.0"
] | 797 | 2020-08-24T23:30:05.000Z | 2022-03-31T22:28:29.000Z | integration/combination/test_api_with_authorizers.py | huanjiayang-ibm/serverless-application-model | 153ad4393435b64d84803d805b40366f3172e459 | [
"Apache-2.0"
] | 431 | 2020-08-27T20:47:26.000Z | 2022-03-31T23:57:55.000Z | import requests
from integration.helpers.base_test import BaseTest
from integration.helpers.deployer.utils.retry import retry
from integration.helpers.exception import StatusCodeError
class TestApiWithAuthorizers(BaseTest):
def test_authorizers_min(self):
self.create_and_verify_stack("combination/api_with_authorizers_min")
stack_outputs = self.get_stack_outputs()
rest_api_id = self.get_physical_id_by_type("AWS::ApiGateway::RestApi")
apigw_client = self.client_provider.api_client
authorizers = apigw_client.get_authorizers(restApiId=rest_api_id)["items"]
lambda_authorizer_uri = (
"arn:aws:apigateway:"
+ self.my_region
+ ":lambda:path/2015-03-31/functions/"
+ stack_outputs["AuthorizerFunctionArn"]
+ "/invocations"
)
lambda_token_authorizer = get_authorizer_by_name(authorizers, "MyLambdaTokenAuth")
self.assertEqual(lambda_token_authorizer["type"], "TOKEN", "lambdaTokenAuthorizer: Type must be TOKEN")
self.assertEqual(
lambda_token_authorizer["identitySource"],
"method.request.header.Authorization",
"lambdaTokenAuthorizer: identity source must be method.request.header.Authorization",
)
self.assertIsNone(
lambda_token_authorizer.get("authorizerCredentials"),
"lambdaTokenAuthorizer: authorizer credentials must not be set",
)
self.assertIsNone(
lambda_token_authorizer.get("identityValidationExpression"),
"lambdaTokenAuthorizer: validation expression must not be set",
)
self.assertEqual(
lambda_token_authorizer["authorizerUri"],
lambda_authorizer_uri,
"lambdaTokenAuthorizer: authorizer URI must be the Lambda Function Authorizer's URI",
)
self.assertIsNone(
lambda_token_authorizer.get("authorizerResultTtlInSeconds"), "lambdaTokenAuthorizer: TTL must not be set"
)
lambda_request_authorizer = get_authorizer_by_name(authorizers, "MyLambdaRequestAuth")
self.assertEqual(lambda_request_authorizer["type"], "REQUEST", "lambdaRequestAuthorizer: Type must be REQUEST")
self.assertEqual(
lambda_request_authorizer["identitySource"],
"method.request.querystring.authorization",
"lambdaRequestAuthorizer: identity source must be method.request.querystring.authorization",
)
self.assertIsNone(
lambda_request_authorizer.get("authorizerCredentials"),
"lambdaRequestAuthorizer: authorizer credentials must not be set",
)
self.assertEqual(
lambda_request_authorizer["authorizerUri"],
lambda_authorizer_uri,
"lambdaRequestAuthorizer: authorizer URI must be the Lambda Function Authorizer's URI",
)
self.assertIsNone(
lambda_request_authorizer.get("authorizerResultTtlInSeconds"),
"lambdaRequestAuthorizer: TTL must not be set",
)
cognito_authorizer = get_authorizer_by_name(authorizers, "MyCognitoAuthorizer")
cognito_user_pool_arn = stack_outputs["CognitoUserPoolArn"]
self.assertEqual(
cognito_authorizer["type"], "COGNITO_USER_POOLS", "cognitoAuthorizer: Type must be COGNITO_USER_POOLS"
)
self.assertEqual(
cognito_authorizer["providerARNs"],
[cognito_user_pool_arn],
"cognitoAuthorizer: provider ARN must be the Cognito User Pool ARNs",
)
self.assertIsNone(
cognito_authorizer.get("identityValidationExpression"),
"cognitoAuthorizer: validation expression must not be set",
)
self.assertEqual(
cognito_authorizer["identitySource"],
"method.request.header.Authorization",
"cognitoAuthorizer: identity source must be method.request.header.Authorization",
)
resources = apigw_client.get_resources(restApiId=rest_api_id)["items"]
lambda_token_get_method_result = get_method(resources, "/lambda-token", rest_api_id, apigw_client)
self.assertEqual(
lambda_token_get_method_result["authorizerId"],
lambda_token_authorizer["id"],
"lambdaTokenAuthorizer: GET method must be configured to use the Lambda Token Authorizer",
)
lambda_request_get_method_result = get_method(resources, "/lambda-request", rest_api_id, apigw_client)
self.assertEqual(
lambda_request_get_method_result["authorizerId"],
lambda_request_authorizer["id"],
"lambdaRequestAuthorizer: GET method must be configured to use the Lambda Request Authorizer",
)
cognito_get_method_result = get_method(resources, "/cognito", rest_api_id, apigw_client)
self.assertEqual(
cognito_get_method_result["authorizerId"],
cognito_authorizer["id"],
"cognitoAuthorizer: GET method must be configured to use the Cognito Authorizer",
)
iam_get_method_result = get_method(resources, "/iam", rest_api_id, apigw_client)
self.assertEqual(
iam_get_method_result["authorizationType"],
"AWS_IAM",
"iamAuthorizer: GET method must be configured to use AWS_IAM",
)
base_url = stack_outputs["ApiUrl"]
self.verify_authorized_request(base_url + "none", 200)
self.verify_authorized_request(base_url + "lambda-token", 401)
self.verify_authorized_request(base_url + "lambda-token", 200, "Authorization", "allow")
self.verify_authorized_request(base_url + "lambda-request", 401)
self.verify_authorized_request(base_url + "lambda-request?authorization=allow", 200)
self.verify_authorized_request(base_url + "cognito", 401)
self.verify_authorized_request(base_url + "iam", 403)
def test_authorizers_max(self):
self.create_and_verify_stack("combination/api_with_authorizers_max")
stack_outputs = self.get_stack_outputs()
rest_api_id = self.get_physical_id_by_type("AWS::ApiGateway::RestApi")
apigw_client = self.client_provider.api_client
authorizers = apigw_client.get_authorizers(restApiId=rest_api_id)["items"]
lambda_authorizer_uri = (
"arn:aws:apigateway:"
+ self.my_region
+ ":lambda:path/2015-03-31/functions/"
+ stack_outputs["AuthorizerFunctionArn"]
+ "/invocations"
)
lambda_token_authorizer = get_authorizer_by_name(authorizers, "MyLambdaTokenAuth")
self.assertEqual(lambda_token_authorizer["type"], "TOKEN", "lambdaTokenAuthorizer: Type must be TOKEN")
self.assertEqual(
lambda_token_authorizer["identitySource"],
"method.request.header.MyCustomAuthHeader",
"lambdaTokenAuthorizer: identity source must be method.request.header.MyCustomAuthHeader",
)
self.assertEqual(
lambda_token_authorizer["authorizerCredentials"],
stack_outputs["LambdaAuthInvokeRoleArn"],
"lambdaTokenAuthorizer: authorizer credentials must be set",
)
self.assertEqual(
lambda_token_authorizer["identityValidationExpression"],
"allow",
"lambdaTokenAuthorizer: validation expression must be set to allow",
)
self.assertEqual(
lambda_token_authorizer["authorizerUri"],
lambda_authorizer_uri,
"lambdaTokenAuthorizer: authorizer URI must be the Lambda Function Authorizer's URI",
)
self.assertEqual(
lambda_token_authorizer["authorizerResultTtlInSeconds"], 20, "lambdaTokenAuthorizer: TTL must be 20"
)
lambda_request_authorizer = get_authorizer_by_name(authorizers, "MyLambdaRequestAuth")
self.assertEqual(lambda_request_authorizer["type"], "REQUEST", "lambdaRequestAuthorizer: Type must be REQUEST")
self.assertEqual(
lambda_request_authorizer["identitySource"],
"method.request.header.authorizationHeader, method.request.querystring.authorization, method.request.querystring.authorizationQueryString1",
"lambdaRequestAuthorizer: identity source must be method.request.header.authorizationHeader, method.request.querystring.authorization, method.request.querystring.authorizationQueryString1",
)
self.assertEqual(
lambda_request_authorizer["authorizerCredentials"],
stack_outputs["LambdaAuthInvokeRoleArn"],
"lambdaRequestAuthorizer: authorizer credentials must be set",
)
self.assertEqual(
lambda_request_authorizer["authorizerUri"],
lambda_authorizer_uri,
"lambdaRequestAuthorizer: authorizer URI must be the Lambda Function Authorizer's URI",
)
self.assertEqual(
lambda_request_authorizer["authorizerResultTtlInSeconds"], 0, "lambdaRequestAuthorizer: TTL must be 0"
)
cognito_authorizer = get_authorizer_by_name(authorizers, "MyCognitoAuthorizer")
cognito_user_pool_arn = stack_outputs["CognitoUserPoolArn"]
cognito_user_pool2_arn = stack_outputs["CognitoUserPoolTwoArn"]
self.assertEqual(
cognito_authorizer["type"], "COGNITO_USER_POOLS", "cognitoAuthorizer: Type must be COGNITO_USER_POOLS"
)
self.assertEqual(
cognito_authorizer["providerARNs"],
[cognito_user_pool_arn, cognito_user_pool2_arn],
"cognitoAuthorizer: provider ARN must be the Cognito User Pool ARNs",
)
self.assertEqual(
cognito_authorizer["identityValidationExpression"],
"myauthvalidationexpression",
"cognitoAuthorizer: validation expression must be set to myauthvalidationexpression",
)
self.assertEqual(
cognito_authorizer["identitySource"],
"method.request.header.MyAuthorizationHeader",
"cognitoAuthorizer: identity source must be method.request.header.MyAuthorizationHeader",
)
resources = apigw_client.get_resources(restApiId=rest_api_id)["items"]
lambda_token_get_method_result = get_method(resources, "/lambda-token", rest_api_id, apigw_client)
self.assertEqual(
lambda_token_get_method_result["authorizerId"],
lambda_token_authorizer["id"],
"lambdaTokenAuthorizer: GET method must be configured to use the Lambda Token Authorizer",
)
lambda_request_get_method_result = get_method(resources, "/lambda-request", rest_api_id, apigw_client)
self.assertEqual(
lambda_request_get_method_result["authorizerId"],
lambda_request_authorizer["id"],
"lambdaRequestAuthorizer: GET method must be configured to use the Lambda Request Authorizer",
)
cognito_get_method_result = get_method(resources, "/cognito", rest_api_id, apigw_client)
self.assertEqual(
cognito_get_method_result["authorizerId"],
cognito_authorizer["id"],
"cognitoAuthorizer: GET method must be configured to use the Cognito Authorizer",
)
iam_get_method_result = get_method(resources, "/iam", rest_api_id, apigw_client)
self.assertEqual(
iam_get_method_result["authorizationType"],
"AWS_IAM",
"iamAuthorizer: GET method must be configured to use AWS_IAM",
)
base_url = stack_outputs["ApiUrl"]
self.verify_authorized_request(base_url + "none", 200)
self.verify_authorized_request(base_url + "lambda-token", 401)
self.verify_authorized_request(base_url + "lambda-token", 200, "MyCustomAuthHeader", "allow")
self.verify_authorized_request(base_url + "lambda-request", 401)
self.verify_authorized_request(
base_url + "lambda-request?authorization=allow&authorizationQueryString1=x", 200, "authorizationHeader", "y"
)
self.verify_authorized_request(base_url + "cognito", 401)
self.verify_authorized_request(base_url + "iam", 403)
def test_authorizers_max_openapi(self):
self.create_and_verify_stack("combination/api_with_authorizers_max_openapi")
stack_outputs = self.get_stack_outputs()
rest_api_id = self.get_physical_id_by_type("AWS::ApiGateway::RestApi")
apigw_client = self.client_provider.api_client
authorizers = apigw_client.get_authorizers(restApiId=rest_api_id)["items"]
lambda_authorizer_uri = (
"arn:aws:apigateway:"
+ self.my_region
+ ":lambda:path/2015-03-31/functions/"
+ stack_outputs["AuthorizerFunctionArn"]
+ "/invocations"
)
lambda_token_authorizer = get_authorizer_by_name(authorizers, "MyLambdaTokenAuth")
self.assertEqual(lambda_token_authorizer["type"], "TOKEN", "lambdaTokenAuthorizer: Type must be TOKEN")
self.assertEqual(
lambda_token_authorizer["identitySource"],
"method.request.header.MyCustomAuthHeader",
"lambdaTokenAuthorizer: identity source must be method.request.header.MyCustomAuthHeader",
)
self.assertEqual(
lambda_token_authorizer["authorizerCredentials"],
stack_outputs["LambdaAuthInvokeRoleArn"],
"lambdaTokenAuthorizer: authorizer credentials must be set",
)
self.assertEqual(
lambda_token_authorizer["identityValidationExpression"],
"allow",
"lambdaTokenAuthorizer: validation expression must be set to allow",
)
self.assertEqual(
lambda_token_authorizer["authorizerUri"],
lambda_authorizer_uri,
"lambdaTokenAuthorizer: authorizer URI must be the Lambda Function Authorizer's URI",
)
self.assertEqual(
lambda_token_authorizer["authorizerResultTtlInSeconds"], 20, "lambdaTokenAuthorizer: TTL must be 20"
)
lambda_request_authorizer = get_authorizer_by_name(authorizers, "MyLambdaRequestAuth")
self.assertEqual(lambda_request_authorizer["type"], "REQUEST", "lambdaRequestAuthorizer: Type must be REQUEST")
self.assertEqual(
lambda_request_authorizer["identitySource"],
"method.request.header.authorizationHeader, method.request.querystring.authorization, method.request.querystring.authorizationQueryString1",
"lambdaRequestAuthorizer: identity source must be method.request.header.authorizationHeader, method.request.querystring.authorization, method.request.querystring.authorizationQueryString1",
)
self.assertEqual(
lambda_request_authorizer["authorizerCredentials"],
stack_outputs["LambdaAuthInvokeRoleArn"],
"lambdaRequestAuthorizer: authorizer credentials must be set",
)
self.assertEqual(
lambda_request_authorizer["authorizerUri"],
lambda_authorizer_uri,
"lambdaRequestAuthorizer: authorizer URI must be the Lambda Function Authorizer's URI",
)
self.assertEqual(
lambda_request_authorizer["authorizerResultTtlInSeconds"], 0, "lambdaRequestAuthorizer: TTL must be 0"
)
cognito_authorizer = get_authorizer_by_name(authorizers, "MyCognitoAuthorizer")
cognito_user_pool_arn = stack_outputs["CognitoUserPoolArn"]
cognito_user_pool2_arn = stack_outputs["CognitoUserPoolTwoArn"]
self.assertEqual(
cognito_authorizer["type"], "COGNITO_USER_POOLS", "cognitoAuthorizer: Type must be COGNITO_USER_POOLS"
)
self.assertEqual(
cognito_authorizer["providerARNs"],
[cognito_user_pool_arn, cognito_user_pool2_arn],
"cognitoAuthorizer: provider ARN must be the Cognito User Pool ARNs",
)
self.assertEqual(
cognito_authorizer["identityValidationExpression"],
"myauthvalidationexpression",
"cognitoAuthorizer: validation expression must be set to myauthvalidationexpression",
)
self.assertEqual(
cognito_authorizer["identitySource"],
"method.request.header.MyAuthorizationHeader",
"cognitoAuthorizer: identity source must be method.request.header.MyAuthorizationHeader",
)
resources = apigw_client.get_resources(restApiId=rest_api_id)["items"]
lambda_token_get_method_result = get_method(resources, "/lambda-token", rest_api_id, apigw_client)
self.assertEqual(
lambda_token_get_method_result["authorizerId"],
lambda_token_authorizer["id"],
"lambdaTokenAuthorizer: GET method must be configured to use the Lambda Token Authorizer",
)
lambda_request_get_method_result = get_method(resources, "/lambda-request", rest_api_id, apigw_client)
self.assertEqual(
lambda_request_get_method_result["authorizerId"],
lambda_request_authorizer["id"],
"lambdaRequestAuthorizer: GET method must be configured to use the Lambda Request Authorizer",
)
cognito_get_method_result = get_method(resources, "/cognito", rest_api_id, apigw_client)
self.assertEqual(
cognito_get_method_result["authorizerId"],
cognito_authorizer["id"],
"cognitoAuthorizer: GET method must be configured to use the Cognito Authorizer",
)
iam_get_method_result = get_method(resources, "/iam", rest_api_id, apigw_client)
self.assertEqual(
iam_get_method_result["authorizationType"],
"AWS_IAM",
"iamAuthorizer: GET method must be configured to use AWS_IAM",
)
base_url = stack_outputs["ApiUrl"]
self.verify_authorized_request(base_url + "none", 200)
self.verify_authorized_request(base_url + "lambda-token", 401)
self.verify_authorized_request(base_url + "lambda-token", 200, "MyCustomAuthHeader", "allow")
self.verify_authorized_request(base_url + "lambda-request", 401)
self.verify_authorized_request(
base_url + "lambda-request?authorization=allow&authorizationQueryString1=x", 200, "authorizationHeader", "y"
)
self.verify_authorized_request(base_url + "cognito", 401)
self.verify_authorized_request(base_url + "iam", 403)
api_key_id = stack_outputs["ApiKeyId"]
key = apigw_client.get_api_key(apiKey=api_key_id, includeValue=True)
self.verify_authorized_request(base_url + "apikey", 200, "x-api-key", key["value"])
self.verify_authorized_request(base_url + "apikey", 403)
def test_authorizers_with_invoke_function_set_none(self):
self.create_and_verify_stack("combination/api_with_authorizers_invokefunction_set_none")
rest_api_id = self.get_physical_id_by_type("AWS::ApiGateway::RestApi")
apigw_client = self.client_provider.api_client
resources = apigw_client.get_resources(restApiId=rest_api_id)["items"]
function_with_invoke_role_default = get_method(
resources, "/MyFunctionDefaultInvokeRole", rest_api_id, apigw_client
)
credentials_for_invoke_role_default = function_with_invoke_role_default["methodIntegration"]["credentials"]
self.assertEqual(credentials_for_invoke_role_default, "arn:aws:iam::*:user/*")
function_with_invoke_role_none = get_method(resources, "/MyFunctionNONEInvokeRole", rest_api_id, apigw_client)
credentials_for_invoke_role_none = function_with_invoke_role_none.get("methodIntegration").get(
"methodIntegration"
)
self.assertIsNone(credentials_for_invoke_role_none)
api_event_with_auth = get_method(resources, "/api/with-auth", rest_api_id, apigw_client)
auth_type_for_api_event_with_auth = api_event_with_auth["authorizationType"]
self.assertEqual(auth_type_for_api_event_with_auth, "AWS_IAM")
api_event_with_out_auth = get_method(resources, "/api/without-auth", rest_api_id, apigw_client)
auth_type_for_api_event_without_auth = api_event_with_out_auth["authorizationType"]
self.assertEqual(auth_type_for_api_event_without_auth, "NONE")
@retry(StatusCodeError, 10)
def verify_authorized_request(
self,
url,
expected_status_code,
header_key=None,
header_value=None,
):
if not header_key or not header_value:
response = requests.get(url)
else:
headers = {header_key: header_value}
response = requests.get(url, headers=headers)
status = response.status_code
if status != expected_status_code:
raise StatusCodeError(
"Request to {} failed with status: {}, expected status: {}".format(url, status, expected_status_code)
)
if not header_key or not header_value:
self.assertEqual(
status, expected_status_code, "Request to " + url + " must return HTTP " + str(expected_status_code)
)
else:
self.assertEqual(
status,
expected_status_code,
"Request to "
+ url
+ " ("
+ header_key
+ ": "
+ header_value
+ ") must return HTTP "
+ str(expected_status_code),
)
def get_authorizer_by_name(authorizers, name):
for authorizer in authorizers:
if authorizer["name"] == name:
return authorizer
return None
def get_resource_by_path(resources, path):
for resource in resources:
if resource["path"] == path:
return resource
return None
def get_method(resources, path, rest_api_id, apigw_client):
resource = get_resource_by_path(resources, path)
return apigw_client.get_method(restApiId=rest_api_id, resourceId=resource["id"], httpMethod="GET")
| 46.23125 | 201 | 0.680186 |
a740c389f4de2cb982536b4444a6f5fdfc5248f2 | 10,017 | py | Python | nibabies/utils/bids.py | mgxd/nibabies | 4ae099af626b770142c9f2ced97c1436d17cae07 | [
"Apache-2.0"
] | 1 | 2022-02-09T18:42:56.000Z | 2022-02-09T18:42:56.000Z | nibabies/utils/bids.py | mgxd/nibabies | 4ae099af626b770142c9f2ced97c1436d17cae07 | [
"Apache-2.0"
] | null | null | null | nibabies/utils/bids.py | mgxd/nibabies | 4ae099af626b770142c9f2ced97c1436d17cae07 | [
"Apache-2.0"
] | null | null | null |
# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*-
# vi: set ft=python sts=4 ts=4 sw=4 et:
"""Utilities to handle BIDS inputs."""
import os
import json
from pathlib import Path
import sys
def write_bidsignore(deriv_dir):
# TODO: Port to niworkflows
bids_ignore = (
"*.html", "logs/", "figures/", # Reports
"*_xfm.*", # Unspecified transform files
"*.surf.gii", # Unspecified structural outputs
# Unspecified functional outputs
"*_boldref.nii.gz", "*_bold.func.gii",
"*_mixing.tsv", "*_AROMAnoiseICs.csv", "*_timeseries.tsv",
)
ignore_file = Path(deriv_dir) / ".bidsignore"
ignore_file.write_text("\n".join(bids_ignore) + "\n")
def write_derivative_description(bids_dir, deriv_dir):
from ..__about__ import (
__version__,
__packagename__,
DOWNLOAD_URL,
)
bids_dir = Path(bids_dir)
deriv_dir = Path(deriv_dir)
desc = {
'Name': 'NiBabies: Neuroimaging preprocessing workflows for babies',
'BIDSVersion': '1.4.0',
'DatasetType': 'derivative',
'GeneratedBy': [{
'Name': __packagename__,
'Version': __version__,
'CodeURL': DOWNLOAD_URL,
}],
'HowToAcknowledge': 'TODO',
}
# Keys that can only be set by environment
if 'NIBABIES_DOCKER_TAG' in os.environ:
desc['GeneratedBy'][0]['Container'] = {
"Type": "docker",
"Tag": f"nipreps/fmriprep:{os.environ['NIBABIES_DOCKER_TAG']}"
}
if 'NIBABIES_SINGULARITY_URL' in os.environ:
desc['GeneratedBy'][0]['Container'] = {
"Type": "singularity",
"URI": os.getenv('NIBABIES_SINGULARITY_URL')
}
# Keys deriving from source dataset
orig_desc = {}
fname = bids_dir / 'dataset_description.json'
if fname.exists():
orig_desc = json.loads(fname.read_text())
if 'DatasetDOI' in orig_desc:
desc['SourceDatasets'] = [{
'URL': f'https://doi.org/{orig_desc["DatasetDOI"]}',
'DOI': orig_desc['DatasetDOI']
}]
if 'License' in orig_desc:
desc['License'] = orig_desc['License']
Path.write_text(deriv_dir / 'dataset_description.json', json.dumps(desc, indent=4))
def extract_entities(file_list):
"""
Return a dictionary of common entities given a list of files.
Examples
--------
>>> extract_entities('sub-01/anat/sub-01_T1w.nii.gz')
{'subject': '01', 'suffix': 'T1w', 'datatype': 'anat', 'extension': '.nii.gz'}
>>> extract_entities(['sub-01/anat/sub-01_T1w.nii.gz'] * 2)
{'subject': '01', 'suffix': 'T1w', 'datatype': 'anat', 'extension': '.nii.gz'}
>>> extract_entities(['sub-01/anat/sub-01_run-1_T1w.nii.gz',
... 'sub-01/anat/sub-01_run-2_T1w.nii.gz'])
{'subject': '01', 'run': [1, 2], 'suffix': 'T1w', 'datatype': 'anat', 'extension': '.nii.gz'}
"""
from collections import defaultdict
from bids.layout import parse_file_entities
from niworkflows.utils.connections import listify
entities = defaultdict(list)
for e, v in [
ev_pair
for f in listify(file_list)
for ev_pair in parse_file_entities(f).items()
]:
entities[e].append(v)
def _unique(inlist):
inlist = sorted(set(inlist))
if len(inlist) == 1:
return inlist[0]
return inlist
return {
k: _unique(v) for k, v in entities.items()
}
def group_bolds_ref(*, layout, subject):
"""
Extracts BOLD files from a BIDS dataset and combines them into buckets.
Files in a bucket share:
1) Session
2) Phase-encoding direction (PEdir)
3) Total readout time (TRT)
Any files with missing data for (2) or (3) are put in their own bucket.
Parameters
----------
layout : pybids.layout.BIDSLayout
Initialized BIDSLayout
subject : str
The subject ID
Outputs
-------
combinations : list of tuples
Each tuple is composed of (session, PEdir, TRT)
files : list of lists
Files matching each combination.
Limitations
-----------
Single-band reference (sbref) are excluded.
Does not group multi-echo data.
"""
from contextlib import suppress
from itertools import product
from sdcflows.utils.epimanip import get_trt
base_entities = {
"subject": subject,
"extension": (".nii", ".nii.gz"),
"scope": "raw", # Ensure derivatives are not captured
}
# list of tuples with unique combinations
combinations = []
# list of lists containing filenames that apply per combination
files = []
for ses, suffix in sorted(product(layout.get_sessions() or (None,), {'bold', })):
# bold files same session
bolds = layout.get(suffix=suffix, session=ses, **base_entities)
for bold in bolds:
# session, pe, ro
meta = bold.get_metadata()
pe_dir = meta.get("PhaseEncodingDirection")
ro = None
with suppress(ValueError):
ro = get_trt(meta, bold.path)
if ro is not None:
meta.update({"TotalReadoutTime": ro})
comb = (ses, pe_dir, ro)
if any(v is None for v in (pe_dir, ro)):
# cannot be certain so treat as unique
combinations.append(comb)
files.append([bold.path])
elif comb in combinations:
# do not add a new entry to the combinations
# instead append the file to the existing bucket
idx = combinations.index(comb)
files[idx].append(bold.path)
else:
# add a new entry and start a file bucket
combinations.append(comb)
files.append([bold.path])
assert len(combinations) == len(files), "Nonequal number of combinations and file buckets"
assert len(bolds) == sum([len(x) for x in files]), "Final BOLD images count is off"
return combinations, files
def validate_input_dir(exec_env, bids_dir, participant_label):
# Ignore issues and warnings that should not influence NiBabies
import tempfile
import subprocess
validator_config_dict = {
"ignore": [
"EVENTS_COLUMN_ONSET",
"EVENTS_COLUMN_DURATION",
"TSV_EQUAL_ROWS",
"TSV_EMPTY_CELL",
"TSV_IMPROPER_NA",
"VOLUME_COUNT_MISMATCH",
"BVAL_MULTIPLE_ROWS",
"BVEC_NUMBER_ROWS",
"DWI_MISSING_BVAL",
"INCONSISTENT_SUBJECTS",
"INCONSISTENT_PARAMETERS",
"BVEC_ROW_LENGTH",
"B_FILE",
"PARTICIPANT_ID_COLUMN",
"PARTICIPANT_ID_MISMATCH",
"TASK_NAME_MUST_DEFINE",
"PHENOTYPE_SUBJECTS_MISSING",
"STIMULUS_FILE_MISSING",
"DWI_MISSING_BVEC",
"EVENTS_TSV_MISSING",
"TSV_IMPROPER_NA",
"ACQTIME_FMT",
"Participants age 89 or higher",
"DATASET_DESCRIPTION_JSON_MISSING",
"FILENAME_COLUMN",
"WRONG_NEW_LINE",
"MISSING_TSV_COLUMN_CHANNELS",
"MISSING_TSV_COLUMN_IEEG_CHANNELS",
"MISSING_TSV_COLUMN_IEEG_ELECTRODES",
"UNUSED_STIMULUS",
"CHANNELS_COLUMN_SFREQ",
"CHANNELS_COLUMN_LOWCUT",
"CHANNELS_COLUMN_HIGHCUT",
"CHANNELS_COLUMN_NOTCH",
"CUSTOM_COLUMN_WITHOUT_DESCRIPTION",
"ACQTIME_FMT",
"SUSPICIOUSLY_LONG_EVENT_DESIGN",
"SUSPICIOUSLY_SHORT_EVENT_DESIGN",
"MALFORMED_BVEC",
"MALFORMED_BVAL",
"MISSING_TSV_COLUMN_EEG_ELECTRODES",
"MISSING_SESSION"
],
"error": ["NO_T1W"],
"ignoredFiles": ['/dataset_description.json', '/participants.tsv']
}
# Limit validation only to data from requested participants
if participant_label:
all_subs = set([s.name[4:] for s in bids_dir.glob('sub-*')])
selected_subs = set([s[4:] if s.startswith('sub-') else s
for s in participant_label])
bad_labels = selected_subs.difference(all_subs)
if bad_labels:
error_msg = 'Data for requested participant(s) label(s) not found. Could ' \
'not find data for participant(s): %s. Please verify the requested ' \
'participant labels.'
if exec_env == 'docker':
error_msg += ' This error can be caused by the input data not being ' \
'accessible inside the docker container. Please make sure all ' \
'volumes are mounted properly (see https://docs.docker.com/' \
'engine/reference/commandline/run/#mount-volume--v---read-only)'
if exec_env == 'singularity':
error_msg += ' This error can be caused by the input data not being ' \
'accessible inside the singularity container. Please make sure ' \
'all paths are mapped properly (see https://www.sylabs.io/' \
'guides/3.0/user-guide/bind_paths_and_mounts.html)'
raise RuntimeError(error_msg % ','.join(bad_labels))
ignored_subs = all_subs.difference(selected_subs)
if ignored_subs:
for sub in ignored_subs:
validator_config_dict["ignoredFiles"].append("/sub-%s/**" % sub)
with tempfile.NamedTemporaryFile(mode='w+', suffix='.json') as temp:
temp.write(json.dumps(validator_config_dict))
temp.flush()
try:
subprocess.check_call(['bids-validator', str(bids_dir), '-c', temp.name])
except FileNotFoundError:
print("bids-validator does not appear to be installed", file=sys.stderr)
| 36.162455 | 98 | 0.585505 |
b562638674708fded17ed0525230c7cf90180abb | 948 | py | Python | 2020/day13.py | tobeannouncd/AdventOfCode | b4fe6e9b10a0dc191429a90ab351806df03bca10 | [
"MIT"
] | null | null | null | 2020/day13.py | tobeannouncd/AdventOfCode | b4fe6e9b10a0dc191429a90ab351806df03bca10 | [
"MIT"
] | null | null | null | 2020/day13.py | tobeannouncd/AdventOfCode | b4fe6e9b10a0dc191429a90ab351806df03bca10 | [
"MIT"
] | null | null | null | from math import ceil, lcm
import advent
def main():
data = advent.get_input(2020, 13).splitlines()
print(part_one(data))
print(part_two(data))
def parse(data):
first_departure = int(data[0])
buses = {}
for i, x in enumerate(data[1].split(',')):
if x.isnumeric():
buses[i] = int(x)
return first_departure, buses
def part_one(data):
first_departure, buses = parse(data)
departures = {}
for id in buses.values():
departures[id] = ceil(first_departure/id) * id
bus = min(departures, key=departures.get)
return bus*(departures[bus] - first_departure)
def part_two(data):
_, buses = parse(data)
l = list(buses.items())
i, id = l[0]
mult = id
offset = (id-i) % mult
for i, id in l[1:]:
while offset % id != (id-i) % id:
offset += mult
mult = lcm(mult, id)
return offset
if __name__ == '__main__':
main()
| 19.75 | 54 | 0.583333 |
7a95de7b7515066ca2411ef8cb15369cc3e3303a | 574 | py | Python | StreamPlayer.py | sbamamoto/raspiradio | f0ff608554c30c18e67fe79031328f26cf6fd010 | [
"MIT"
] | null | null | null | StreamPlayer.py | sbamamoto/raspiradio | f0ff608554c30c18e67fe79031328f26cf6fd010 | [
"MIT"
] | null | null | null | StreamPlayer.py | sbamamoto/raspiradio | f0ff608554c30c18e67fe79031328f26cf6fd010 | [
"MIT"
] | null | null | null | #Using a process in a subclass Chapter 3: Process Based #Parallelism
import multiprocessing
import subprocess
import os
class StreamPlayer():
def __init__(self, station):
(type,url) = station
print "************** Now Playing: %s"%url
if type == "STREAM":
self.processHandle = subprocess.Popen(["/usr/bin/mpg123", url])
else:
self.processHandle = subprocess.Popen(["/usr/bin/mpg123", "-@", url])
def stop(self):
self.processHandle.terminate()
def wait(self):
self.processHandle.wait()
| 26.090909 | 81 | 0.611498 |
4c8b02312ff9e0e70808b4412bd603964b233d6c | 12,709 | py | Python | code/python/FundsAPIforDigitalPortals/v2/fds/sdk/FundsAPIforDigitalPortals/model/inline_response2005_data_share_class_fee_management.py | factset/enterprise-sdk | 3fd4d1360756c515c9737a0c9a992c7451d7de7e | [
"Apache-2.0"
] | 6 | 2022-02-07T16:34:18.000Z | 2022-03-30T08:04:57.000Z | code/python/FundsAPIforDigitalPortals/v2/fds/sdk/FundsAPIforDigitalPortals/model/inline_response2005_data_share_class_fee_management.py | factset/enterprise-sdk | 3fd4d1360756c515c9737a0c9a992c7451d7de7e | [
"Apache-2.0"
] | 2 | 2022-02-07T05:25:57.000Z | 2022-03-07T14:18:04.000Z | code/python/FundsAPIforDigitalPortals/v2/fds/sdk/FundsAPIforDigitalPortals/model/inline_response2005_data_share_class_fee_management.py | factset/enterprise-sdk | 3fd4d1360756c515c9737a0c9a992c7451d7de7e | [
"Apache-2.0"
] | null | null | null | """
Funds API For Digital Portals
Search for mutual funds and ETFs using one single consolidated API, including a criteria-based screener. The API provides also base data, key figures, and holdings. A separate endpoint returns the possible values and value range for the parameters that the endpoint /fund/notation/screener/search accepts: Application developers can request the values and value range only for a restricted set of notations that match predefined parameters. This functionality may be used to pre-fill the values and value ranges of the parameters of the /fund/notation/screener/search endpoint so that performing a search always leads to a non-empty set of notations. This API is fully integrated with the corresponding Quotes API, allowing access to detailed price and performance information of instruments, as well as basic security identifier cross-reference. For direct access to price histories, please refer to the Time Series API for Digital Portals. Similar criteria based screener APIs exist for equity instruments and securitized derivatives: See the Stocks API and the Securitized Derivatives API for details. # noqa: E501
The version of the OpenAPI document: 2
Generated by: https://openapi-generator.tech
"""
import re # noqa: F401
import sys # noqa: F401
from fds.sdk.FundsAPIforDigitalPortals.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
OpenApiModel
)
from fds.sdk.FundsAPIforDigitalPortals.exceptions import ApiAttributeError
def lazy_import():
from fds.sdk.FundsAPIforDigitalPortals.model.inline_response2005_data_share_class_fee_management_current import InlineResponse2005DataShareClassFeeManagementCurrent
globals()['InlineResponse2005DataShareClassFeeManagementCurrent'] = InlineResponse2005DataShareClassFeeManagementCurrent
class InlineResponse2005DataShareClassFeeManagement(ModelNormal):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
}
validations = {
}
@cached_property
def additional_properties_type():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
"""
lazy_import()
return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
lazy_import()
return {
'current': (InlineResponse2005DataShareClassFeeManagementCurrent,), # noqa: E501
}
@cached_property
def discriminator():
return None
attribute_map = {
'current': 'current', # noqa: E501
}
read_only_vars = {
}
_composed_schemas = {}
@classmethod
@convert_js_args_to_python_args
def _from_openapi_data(cls, *args, **kwargs): # noqa: E501
"""InlineResponse2005DataShareClassFeeManagement - a model defined in OpenAPI
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
current (InlineResponse2005DataShareClassFeeManagementCurrent): [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
return self
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, *args, **kwargs): # noqa: E501
"""InlineResponse2005DataShareClassFeeManagement - a model defined in OpenAPI
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
current (InlineResponse2005DataShareClassFeeManagementCurrent): [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
if var_name in self.read_only_vars:
raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate "
f"class with read only attributes.")
| 48.507634 | 1,125 | 0.607522 |
169bf3fed14ff0f475b7b024c20fa4eecef8a45f | 4,138 | py | Python | designs/search.py | amindelavar/kimia-dict-pygtk | 1b68bd48c6b55f7561f02e50ae6069f289566837 | [
"MIT"
] | 2 | 2019-06-27T05:00:07.000Z | 2019-06-27T05:00:07.000Z | designs/search.py | amindelavar/kimia-dict-pygtk | 1b68bd48c6b55f7561f02e50ae6069f289566837 | [
"MIT"
] | null | null | null | designs/search.py | amindelavar/kimia-dict-pygtk | 1b68bd48c6b55f7561f02e50ae6069f289566837 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
from gi.repository import Gtk, Gdk
from modules import _global as glob
import gi
gi.require_version('Gtk', '3.0')
from modules import models
# -----------------------------------------
# =>global vars
SearchTxt = Gtk.Entry()
SearchBtn = None
content = None
# =>a class for handling input and button
# class SearchBox_handler():
# -----------------------------------------
# =>main search box class
class SearchBox(Gtk.Box):
#=>public vars
SearchValue = ''
# ---------------------------------------------------
def __init__(self):
Gtk.Box.__init__(self)
#=>define a entry textbox properties
SearchTxt.set_activates_default(True)
SearchTxt.set_placeholder_text("Search any Word/Term...")
# SearchTxt.set_active(True)
#=>handle 'key_release' event of button
SearchTxt.connect('key_release_event',
self.searchtxt__KeyRelease__event)
#=>define a button
SearchBtn = Gtk.Button("Search")
#=>handle 'clicked' event of button
SearchBtn.connect('clicked', self.searchbtn__clicked__event)
#=>add button to end of box layout
self.pack_end(SearchBtn, False, False, 2)
#=>add entry to start of box layout
self.pack_start(SearchTxt, True, True, 2)
# ---------------------------------------------------
# =>KeyRelease event for searchtxt
def searchtxt__KeyRelease__event(self, widget, event):
# print('search entry...'+widget.get_text() +'...'+str(event.hardware_keycode))
self.SearchValue = widget.get_text()
if event.hardware_keycode == 36: # ENTER press key
self.search(widget.get_text())
# ---------------------------------------------------
# =>clicked event for searchbtn
def searchbtn__clicked__event(self, event):
print('search button...')
self.search(self.SearchValue)
# ---------------------------------------------------
# =>main search function
@staticmethod
def search(text:str,isnew:bool=True):
#=>define vars
results = [] #=>define results list for show
dict_count=0 #=>count of found result dicts
#=>convert text to trim and lowercase
text = text.strip().lower()
print('search:'+text+'$')
#append to WORDB
if isnew:
glob.WORDB,glob.WORDBPOS=models.wordb.append(glob.WORDB,text,glob.WORDBPOS)
#=>if text be null or '', then return Nothing!
if len(text) == 0:
return
#=>remove all segments on content
for con in content.get_children():
content.remove(con)
#=>search into databases
for dic in glob.DICTIONARIES:
res = search.SearchDB().search_in_databases(dic.name, text, glob.LANG, dic.count)
# print('res('+dic.name+')::'+str(res))
#=>if result of 'search_in_databases' be empty
if res.dict_type == '':
print('not exist db...')
continue
print('searched in database:'+dic.name)
#=>append result to 'results' list
results.append(res)
#=>if found anything!
if len(res.result_values)>0:
#=>set search results in content
frame = content.create_frame(res)
#delete res object
del(res)
#=>append created frame to box layout
content.pack_start(frame, True, True, 0)
#count dict found
dict_count+=1
#=>if not found, ignore it!
else : continue
#=>if not found any result on all dicts
if dict_count==0:
content.pack_start(content.create_notfound(), True, True, 0)
# =>update box layout
content.show_all()
# -----------------------------------------
# =>main content panel class
from designs import content
# -----------------------------------------
content = content.MainContent()
# -----------------------------------------
#=> search in database class
from modules import search
| 37.618182 | 93 | 0.536491 |
80eb0dfb990d27ccbfeb352fd30b04bdb16230cf | 7,877 | py | Python | heat_dashboard/test/test_data/neutron_data.py | efenfauzi/heat-dashboard | e1e9fc853b72c2770ba041c0ac1151b4c9484f36 | [
"Apache-2.0"
] | 13 | 2017-10-31T10:35:14.000Z | 2020-10-28T03:23:04.000Z | heat_dashboard/test/test_data/neutron_data.py | efenfauzi/heat-dashboard | e1e9fc853b72c2770ba041c0ac1151b4c9484f36 | [
"Apache-2.0"
] | 3 | 2019-11-01T13:03:10.000Z | 2019-12-06T17:33:03.000Z | heat_dashboard/test/test_data/neutron_data.py | openstack/heat-dashboard | 8704da6f4a6d8f6f64be2d72af49f0ea44d7b900 | [
"Apache-2.0"
] | 13 | 2017-12-21T08:33:23.000Z | 2021-01-18T14:26:22.000Z | # Copyright 2012 Nebula, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
from openstack_dashboard.api import neutron
from heat_dashboard.test.test_data import utils
def data(TEST):
# Data returned by openstack_dashboard.api.neutron wrapper.
TEST.networks = utils.TestDataContainer()
TEST.subnets = utils.TestDataContainer()
TEST.ports = utils.TestDataContainer()
TEST.routers = utils.TestDataContainer()
TEST.floating_ips = utils.TestDataContainer()
TEST.security_groups = utils.TestDataContainer()
TEST.qos_policies = utils.TestDataContainer()
# Data return by neutronclient.
TEST.api_networks = utils.TestDataContainer()
TEST.api_subnets = utils.TestDataContainer()
# 1st network.
network_dict = {'admin_state_up': True,
'id': '82288d84-e0a5-42ac-95be-e6af08727e42',
'name': 'net1',
'status': 'ACTIVE',
'subnets': ['e8abc972-eb0c-41f1-9edd-4bc6e3bcd8c9',
'41e53a49-442b-4307-9e9a-88967a6b6657'],
'tenant_id': '1',
'router:external': False,
'shared': False}
subnet_dict = {'allocation_pools': [{'end': '10.0.0.254',
'start': '10.0.0.2'}],
'dns_nameservers': [],
'host_routes': [],
'cidr': '10.0.0.0/24',
'enable_dhcp': True,
'gateway_ip': '10.0.0.1',
'id': network_dict['subnets'][0],
'ip_version': 4,
'name': 'mysubnet1',
'network_id': network_dict['id'],
'tenant_id': network_dict['tenant_id']}
subnetv6_dict = {
'allocation_pools': [{'start': 'fdb6:b88a:488e::2',
'end': 'fdb6:b88a:488e:0:ffff:ffff:ffff:ffff'}],
'dns_nameservers': [],
'host_routes': [],
'cidr': 'fdb6:b88a:488e::/64',
'enable_dhcp': True,
'gateway_ip': 'fdb6:b88a:488e::1',
'id': network_dict['subnets'][1],
'ip_version': 6,
'name': 'myv6subnet',
'network_id': network_dict['id'],
'tenant_id': network_dict['tenant_id'],
'ipv6_ra_mode': 'slaac',
'ipv6_address_mode': 'slaac'
}
TEST.api_networks.add(network_dict)
TEST.api_subnets.add(subnet_dict)
TEST.api_subnets.add(subnetv6_dict)
network = copy.deepcopy(network_dict)
subnet = neutron.Subnet(subnet_dict)
subnetv6 = neutron.Subnet(subnetv6_dict)
network['subnets'] = [subnet, subnetv6]
TEST.networks.add(neutron.Network(network))
TEST.subnets.add(subnet)
TEST.subnets.add(subnetv6)
# Ports on 1st network.
port_dict = {
'admin_state_up': True,
'device_id': 'af75c8e5-a1cc-4567-8d04-44fcd6922890',
'device_owner': 'network:dhcp',
'fixed_ips': [{'ip_address': '10.0.0.3',
'subnet_id': subnet_dict['id']}],
'id': '063cf7f3-ded1-4297-bc4c-31eae876cc91',
'mac_address': 'fa:16:3e:9c:d5:7e',
'name': '',
'network_id': network_dict['id'],
'status': 'ACTIVE',
'tenant_id': network_dict['tenant_id'],
'binding:vnic_type': 'normal',
'binding:host_id': 'host',
'allowed_address_pairs': [
{'ip_address': '174.0.0.201',
'mac_address': 'fa:16:3e:7a:7b:18'}
],
'port_security_enabled': True,
'security_groups': [],
}
TEST.ports.add(neutron.Port(port_dict))
# External network.
network_dict = {'admin_state_up': True,
'id': '9b466b94-213a-4cda-badf-72c102a874da',
'name': 'ext_net',
'status': 'ACTIVE',
'subnets': ['d6bdc71c-7566-4d32-b3ff-36441ce746e8'],
'tenant_id': '3',
'router:external': True,
'shared': False}
subnet_dict = {'allocation_pools': [{'start': '172.24.4.226.',
'end': '172.24.4.238'}],
'dns_nameservers': [],
'host_routes': [],
'cidr': '172.24.4.0/28',
'enable_dhcp': False,
'gateway_ip': '172.24.4.225',
'id': 'd6bdc71c-7566-4d32-b3ff-36441ce746e8',
'ip_version': 4,
'name': 'ext_subnet',
'network_id': network_dict['id'],
'tenant_id': network_dict['tenant_id']}
ext_net = network_dict
network = copy.deepcopy(network_dict)
subnet = neutron.Subnet(subnet_dict)
network['subnets'] = [subnet]
TEST.networks.add(neutron.Network(network))
TEST.subnets.add(subnet)
assoc_port = port_dict
router_dict = {'id': '279989f7-54bb-41d9-ba42-0d61f12fda61',
'name': 'router1',
'status': 'ACTIVE',
'admin_state_up': True,
'distributed': True,
'external_gateway_info':
{'network_id': ext_net['id']},
'tenant_id': '1',
'availability_zone_hints': ['nova']}
TEST.routers.add(neutron.Router(router_dict))
# Associated (with compute port on 1st network).
fip_dict = {'tenant_id': '1',
'floating_ip_address': '172.16.88.228',
'floating_network_id': ext_net['id'],
'id': 'a97af8f2-3149-4b97-abbd-e49ad19510f7',
'fixed_ip_address': assoc_port['fixed_ips'][0]['ip_address'],
'port_id': assoc_port['id'],
'router_id': router_dict['id']}
fip_with_instance = copy.deepcopy(fip_dict)
fip_with_instance.update({'instance_id': '1',
'instance_type': 'compute'})
TEST.floating_ips.add(neutron.FloatingIp(fip_with_instance))
# Security group.
sec_group_1 = {'tenant_id': '1',
'description': 'default',
'id': 'faad7c80-3b62-4440-967c-13808c37131d',
'name': 'default'}
sec_group_2 = {'tenant_id': '1',
'description': 'NotDefault',
'id': '27a5c9a1-bdbb-48ac-833a-2e4b5f54b31d',
'name': 'other_group'}
sec_group_3 = {'tenant_id': '1',
'description': 'NotDefault',
'id': '443a4d7a-4bd2-4474-9a77-02b35c9f8c95',
'name': 'another_group'}
groups = [sec_group_1, sec_group_2, sec_group_3]
sg_name_dict = dict([(sg['id'], sg['name']) for sg in groups])
for sg in groups:
sg['security_group_rules'] = []
# OpenStack Dashboard internaly API.
TEST.security_groups.add(
neutron.SecurityGroup(copy.deepcopy(sg), sg_name_dict))
# qos policies
policy_dict = {'id': 'a21dcd22-7189-cccc-aa32-22adafaf16a7',
'name': 'policy 1',
'tenant_id': '1'}
TEST.qos_policies.add(neutron.QoSPolicy(policy_dict))
policy_dict1 = {'id': 'a21dcd22-7189-ssss-aa32-22adafaf16a7',
'name': 'policy 2',
'tenant_id': '1'}
TEST.qos_policies.add(neutron.QoSPolicy(policy_dict1))
| 39.984772 | 78 | 0.548813 |
bba7b2c99c732d273ee5b58749307b07babb32c7 | 3,575 | py | Python | sdk/python/pulumi_azure_native/sql/v20200801preview/get_server_dns_alias.py | pulumi-bot/pulumi-azure-native | f7b9490b5211544318e455e5cceafe47b628e12c | [
"Apache-2.0"
] | null | null | null | sdk/python/pulumi_azure_native/sql/v20200801preview/get_server_dns_alias.py | pulumi-bot/pulumi-azure-native | f7b9490b5211544318e455e5cceafe47b628e12c | [
"Apache-2.0"
] | null | null | null | sdk/python/pulumi_azure_native/sql/v20200801preview/get_server_dns_alias.py | pulumi-bot/pulumi-azure-native | f7b9490b5211544318e455e5cceafe47b628e12c | [
"Apache-2.0"
] | null | null | null | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union
from ... import _utilities, _tables
__all__ = [
'GetServerDnsAliasResult',
'AwaitableGetServerDnsAliasResult',
'get_server_dns_alias',
]
@pulumi.output_type
class GetServerDnsAliasResult:
"""
A server DNS alias.
"""
def __init__(__self__, azure_dns_record=None, id=None, name=None, type=None):
if azure_dns_record and not isinstance(azure_dns_record, str):
raise TypeError("Expected argument 'azure_dns_record' to be a str")
pulumi.set(__self__, "azure_dns_record", azure_dns_record)
if id and not isinstance(id, str):
raise TypeError("Expected argument 'id' to be a str")
pulumi.set(__self__, "id", id)
if name and not isinstance(name, str):
raise TypeError("Expected argument 'name' to be a str")
pulumi.set(__self__, "name", name)
if type and not isinstance(type, str):
raise TypeError("Expected argument 'type' to be a str")
pulumi.set(__self__, "type", type)
@property
@pulumi.getter(name="azureDnsRecord")
def azure_dns_record(self) -> str:
"""
The fully qualified DNS record for alias
"""
return pulumi.get(self, "azure_dns_record")
@property
@pulumi.getter
def id(self) -> str:
"""
Resource ID.
"""
return pulumi.get(self, "id")
@property
@pulumi.getter
def name(self) -> str:
"""
Resource name.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def type(self) -> str:
"""
Resource type.
"""
return pulumi.get(self, "type")
class AwaitableGetServerDnsAliasResult(GetServerDnsAliasResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetServerDnsAliasResult(
azure_dns_record=self.azure_dns_record,
id=self.id,
name=self.name,
type=self.type)
def get_server_dns_alias(dns_alias_name: Optional[str] = None,
resource_group_name: Optional[str] = None,
server_name: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetServerDnsAliasResult:
"""
A server DNS alias.
:param str dns_alias_name: The name of the server dns alias.
:param str resource_group_name: The name of the resource group that contains the resource. You can obtain this value from the Azure Resource Manager API or the portal.
:param str server_name: The name of the server that the alias is pointing to.
"""
__args__ = dict()
__args__['dnsAliasName'] = dns_alias_name
__args__['resourceGroupName'] = resource_group_name
__args__['serverName'] = server_name
if opts is None:
opts = pulumi.InvokeOptions()
if opts.version is None:
opts.version = _utilities.get_version()
__ret__ = pulumi.runtime.invoke('azure-native:sql/v20200801preview:getServerDnsAlias', __args__, opts=opts, typ=GetServerDnsAliasResult).value
return AwaitableGetServerDnsAliasResult(
azure_dns_record=__ret__.azure_dns_record,
id=__ret__.id,
name=__ret__.name,
type=__ret__.type)
| 33.101852 | 171 | 0.645315 |
877c170273c6363d9c725f85aafe848195fd8df6 | 928 | py | Python | aries_cloudagent/indy/sdk/models/schema.py | SmartLoan/smartloan-cloud-agent | 3d1e4b506f02e25bdece194240110c71b1786911 | [
"Apache-2.0"
] | null | null | null | aries_cloudagent/indy/sdk/models/schema.py | SmartLoan/smartloan-cloud-agent | 3d1e4b506f02e25bdece194240110c71b1786911 | [
"Apache-2.0"
] | null | null | null | aries_cloudagent/indy/sdk/models/schema.py | SmartLoan/smartloan-cloud-agent | 3d1e4b506f02e25bdece194240110c71b1786911 | [
"Apache-2.0"
] | 1 | 2022-02-02T17:05:27.000Z | 2022-02-02T17:05:27.000Z | """Schema artifacts."""
from marshmallow import fields
from ....messaging.models.openapi import OpenAPISchema
from ....messaging.valid import INDY_SCHEMA_ID, INDY_VERSION, NATURAL_NUM
class SchemaSchema(OpenAPISchema):
"""Marshmallow schema for indy schema."""
ver = fields.Str(description="Node protocol version", **INDY_VERSION)
ident = fields.Str(data_key="id", description="Schema identifier", **INDY_SCHEMA_ID)
name = fields.Str(
description="Schema name",
example=INDY_SCHEMA_ID["example"].split(":")[2],
)
version = fields.Str(description="Schema version", **INDY_VERSION)
attr_names = fields.List(
fields.Str(
description="Attribute name",
example="score",
),
description="Schema attribute names",
data_key="attrNames",
)
seqNo = fields.Int(description="Schema sequence number", strict=True, **NATURAL_NUM)
| 33.142857 | 88 | 0.674569 |
c398ffe1c5c3c4965a040517e6ce03a5d021f495 | 943 | py | Python | code/Site/routes.py | jbassen/oars | 7b560bf2ce9f2c4026ebf77a1df35da6b1f28ea4 | [
"MIT"
] | 3 | 2018-06-28T15:56:29.000Z | 2021-04-14T07:21:30.000Z | code/Site/routes.py | jbassen/oars | 7b560bf2ce9f2c4026ebf77a1df35da6b1f28ea4 | [
"MIT"
] | null | null | null | code/Site/routes.py | jbassen/oars | 7b560bf2ce9f2c4026ebf77a1df35da6b1f28ea4 | [
"MIT"
] | null | null | null | # Copyright (c) 2017 Jonathan Bassen, Stanford University
from activity_text import ActivityTextHandler
from course import CourseHandler
from home import HomeHandler
from login import LagunitaLoginHandler
from login import SUClassLoginHandler
from login import LogoutHandler
from mapping import MappingHandler
from module import ModuleHandler
from objective import ObjectiveHandler
from landing import LandingHandler
from skill import SkillHandler
def get_routes():
cg = r"/([0-9a-zA-Z\%\:\+\_\-\.]+)"
return [
(r"/", HomeHandler),
(r"/edx/login", LagunitaLoginHandler),
(r"/logout", LogoutHandler),
(r"/lagunita/dump", ActivityTextHandler),
(cg, LandingHandler),
(cg + cg, CourseHandler),
(cg + cg + cg, MappingHandler),
(cg + cg + cg + cg, ModuleHandler),
(cg + cg + cg + cg + cg, ObjectiveHandler),
(cg + cg + cg + cg + cg + cg, SkillHandler),
]
| 32.517241 | 57 | 0.676564 |
8616f4947af3fd73b13efdd3b945b5001c7d22f4 | 96 | py | Python | venv/lib/python3.8/site-packages/future/builtins/newsuper.py | Retraces/UkraineBot | 3d5d7f8aaa58fa0cb8b98733b8808e5dfbdb8b71 | [
"MIT"
] | 2 | 2022-03-13T01:58:52.000Z | 2022-03-31T06:07:54.000Z | venv/lib/python3.8/site-packages/future/builtins/newsuper.py | DesmoSearch/Desmobot | b70b45df3485351f471080deb5c785c4bc5c4beb | [
"MIT"
] | 19 | 2021-11-20T04:09:18.000Z | 2022-03-23T15:05:55.000Z | venv/lib/python3.8/site-packages/future/builtins/newsuper.py | DesmoSearch/Desmobot | b70b45df3485351f471080deb5c785c4bc5c4beb | [
"MIT"
] | null | null | null | /home/runner/.cache/pip/pool/2e/68/94/43f7fa3570c8cfabfab033e4a135a5fb665c732ecfa597d0298b6c71a7 | 96 | 96 | 0.895833 |
824153370074be0fb611d66019e19b92fe45d51f | 5,392 | py | Python | sdk/compute/azure-mgmt-compute/azure/mgmt/compute/v2019_03_01/operations/_usage_operations.py | rsdoherty/azure-sdk-for-python | 6bba5326677468e6660845a703686327178bb7b1 | [
"MIT"
] | 2,728 | 2015-01-09T10:19:32.000Z | 2022-03-31T14:50:33.000Z | sdk/compute/azure-mgmt-compute/azure/mgmt/compute/v2019_03_01/operations/_usage_operations.py | rsdoherty/azure-sdk-for-python | 6bba5326677468e6660845a703686327178bb7b1 | [
"MIT"
] | 17,773 | 2015-01-05T15:57:17.000Z | 2022-03-31T23:50:25.000Z | sdk/compute/azure-mgmt-compute/azure/mgmt/compute/v2019_03_01/operations/_usage_operations.py | rsdoherty/azure-sdk-for-python | 6bba5326677468e6660845a703686327178bb7b1 | [
"MIT"
] | 1,916 | 2015-01-19T05:05:41.000Z | 2022-03-31T19:36:44.000Z | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import TYPE_CHECKING
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpRequest, HttpResponse
from azure.mgmt.core.exceptions import ARMErrorFormat
from .. import models as _models
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
class UsageOperations(object):
"""UsageOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.compute.v2019_03_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def list(
self,
location, # type: str
**kwargs # type: Any
):
# type: (...) -> Iterable["_models.ListUsagesResult"]
"""Gets, for the specified location, the current compute resource usage information as well as the
limits for compute resources under the subscription.
:param location: The location for which resource usage is queried.
:type location: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either ListUsagesResult or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.compute.v2019_03_01.models.ListUsagesResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ListUsagesResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-03-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'location': self._serialize.url("location", location, 'str', pattern=r'^[-\w\._]+$'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('ListUsagesResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Compute/locations/{location}/usages'} # type: ignore
| 45.310924 | 134 | 0.652448 |
3104f4cdf24766a61e021fd8c9b4915848c690c0 | 2,648 | py | Python | cogs/misc.py | LoganHaug/reminder-bot | 1bb1853b79e0299240a214e947e8bc29ed34e46e | [
"MIT"
] | 2 | 2021-01-02T04:30:54.000Z | 2021-01-02T04:30:54.000Z | cogs/misc.py | LoganHaug/reminder-bot | 1bb1853b79e0299240a214e947e8bc29ed34e46e | [
"MIT"
] | 8 | 2021-01-02T02:06:04.000Z | 2021-03-15T06:05:50.000Z | cogs/misc.py | LoganHaug/reminder-bot | 1bb1853b79e0299240a214e947e8bc29ed34e46e | [
"MIT"
] | 2 | 2021-01-02T01:50:06.000Z | 2021-01-02T20:02:58.000Z | import discord
from discord.ext import commands
from PIL import Image
import database
import utils
class Misc(commands.Cog):
def __init__(self, bot):
self.bot = bot
@commands.command(aliases=["p"])
async def ping(self, ctx):
await ctx.send(
embed=utils.generate_embed("Pong", f"{self.bot.latency * 1000:2.3f} ms")
)
@commands.command(aliases=["g"])
async def graph(self, ctx):
"""Sends the graph"""
utils.generate_graph()
await ctx.send(
"", file=discord.File("img/image.svg", filename="img/da_graph.svg")
)
@commands.command(aliases=["pet"])
async def pat(self, ctx):
"""Pats the reminder bot, or a user"""
if len(ctx.message.mentions) >= 1:
pats = database.increment_pat(ctx.guild.id, ctx.message.mentions[-1].id)
user = ctx.message.mentions[-1].name
else:
pats = database.increment_pat(ctx.guild.id, self.bot.user.id)
user = self.bot.user.name
if pats == 1:
await ctx.send(
embed=utils.generate_embed("💜", f"{user} has received {pats} pat")
)
else:
await ctx.send(
embed=utils.generate_embed("💜", f"{user} has received {pats} pats")
)
@commands.command(aliases=["gr", "g_radia"])
async def gen_radia(self, ctx, date):
"""Generates the World of Radia given a date
Enter a date with format Month-Day-Year or Month/Day/Year
ex. June 1st 2000 -> 06/01/2000 or 06-01-2000"""
date = utils.split_date(date)
if date is None:
ctx.send(embed=utils.generate_embed("Error", "Please enter a valid date"))
center = Image.open("img/background.png")
ringsFiles = [
"img/rings/ring6.png",
"img/rings/ring5.png",
"img/rings/ring4.png",
"img/rings/ring3.png",
"img/rings/ring2.png",
"img/rings/ring1.png",
"img/rings/ring0.png",
]
ringSpeeds = [0.25, 1, -2, 1.5, 1, -2, 0] # num rotations per year
dayOfYear = 360 * date["year"] + 30 * (date["month"] - 1) + date["day"] - 1
for ring in ringsFiles:
temp = Image.open(ring)
temp = temp.rotate(
angle=-ringSpeeds[ringsFiles.index(ring)] * 0.6 * dayOfYear
) # 360 days per year
center.paste(temp, (0, 0), temp)
center.save("img/out.png")
await ctx.send(file=discord.File("img/out.png", filename="img/out.png"))
def setup(bot):
bot.add_cog(Misc(bot))
| 32.292683 | 86 | 0.556269 |
af8228dcc4cd0d61b4b5a8f93314cc24cff58d28 | 1,298 | py | Python | backend/api/routes/lent.py | Mozzo1000/loan-tracker | 7d2c197c83caf93abc9481606a38c860f6ff633a | [
"Apache-2.0"
] | null | null | null | backend/api/routes/lent.py | Mozzo1000/loan-tracker | 7d2c197c83caf93abc9481606a38c860f6ff633a | [
"Apache-2.0"
] | 2 | 2021-07-24T12:31:49.000Z | 2021-08-01T14:11:31.000Z | backend/api/routes/lent.py | Mozzo1000/loan-tracker | 7d2c197c83caf93abc9481606a38c860f6ff633a | [
"Apache-2.0"
] | null | null | null | from flask import Blueprint, request, jsonify, abort
from flask_jwt_extended import (jwt_required, get_jwt_identity, get_jwt)
from models import Lent, LentSchema, User
lent = Blueprint('lent', __name__)
@lent.route('/lent', methods=['GET'])
@jwt_required()
def get_all_lents():
lent_schema = LentSchema(many=True)
lents = Lent.query.filter_by(account_id=User.find_by_email(get_jwt_identity()).id).all()
return jsonify(lent_schema.dump(lents))
@lent.route('/lent/<id>', methods=['GET'])
@jwt_required()
def get_lent(id):
lent_schema = LentSchema()
lent = Lent.query.filter_by(account_id=User.find_by_email(get_jwt_identity()).id, id=id).first()
return lent_schema.dump(lent)
@lent.route('/lent', methods=['POST'])
@jwt_required()
def add_lent():
new_lent = Lent(account_id=User.find_by_email(get_jwt_identity()).id,
to=request.json['to'],
description=request.json['description'],
amount=request.json['amount'],
currency=request.json['currency'],
due_date=request.json['due_date'])
try:
new_lent.save_to_db()
return jsonify({'message': 'New lent added to database'})
except:
return jsonify({'message': 'Something went wrong'}), 500 | 38.176471 | 100 | 0.662558 |
7955e6f7f5f0f71a29b8b421612179b90055e71d | 4,847 | py | Python | tests/pipelines/test_optuna.py | gr33n-made/catalyst | bd413abc908ef7cbdeab42b0e805277a791e3ddb | [
"Apache-2.0"
] | 1 | 2021-09-29T20:30:50.000Z | 2021-09-29T20:30:50.000Z | tests/pipelines/test_optuna.py | gr33n-made/catalyst | bd413abc908ef7cbdeab42b0e805277a791e3ddb | [
"Apache-2.0"
] | null | null | null | tests/pipelines/test_optuna.py | gr33n-made/catalyst | bd413abc908ef7cbdeab42b0e805277a791e3ddb | [
"Apache-2.0"
] | null | null | null | # flake8: noqa
import os
from tempfile import TemporaryDirectory
from pytest import mark
import torch
from torch import nn
from torch.utils.data import DataLoader
from catalyst import dl
from catalyst.contrib.datasets import MNIST
from catalyst.data import ToTensor
from catalyst.settings import IS_CUDA_AVAILABLE, NUM_CUDA_DEVICES, SETTINGS
if SETTINGS.optuna_required:
import optuna
def train_experiment(device, engine=None):
with TemporaryDirectory() as logdir:
def objective(trial):
lr = trial.suggest_loguniform("lr", 1e-3, 1e-1)
num_hidden = int(trial.suggest_loguniform("num_hidden", 32, 128))
loaders = {
"train": DataLoader(
MNIST(os.getcwd(), train=False, download=True, transform=ToTensor()),
batch_size=32,
),
"valid": DataLoader(
MNIST(os.getcwd(), train=False, download=True, transform=ToTensor()),
batch_size=32,
),
}
model = nn.Sequential(
nn.Flatten(), nn.Linear(784, num_hidden), nn.ReLU(), nn.Linear(num_hidden, 10)
)
optimizer = torch.optim.Adam(model.parameters(), lr=lr)
criterion = nn.CrossEntropyLoss()
runner = dl.SupervisedRunner(
input_key="features", output_key="logits", target_key="targets"
)
runner.train(
engine=engine or dl.DeviceEngine(device),
model=model,
criterion=criterion,
optimizer=optimizer,
loaders=loaders,
callbacks={
"optuna": dl.OptunaPruningCallback(
loader_key="valid", metric_key="accuracy01", minimize=False, trial=trial
),
"accuracy": dl.AccuracyCallback(
input_key="logits", target_key="targets", num_classes=10
),
},
num_epochs=2,
)
score = trial.best_score
return score
study = optuna.create_study(
direction="maximize",
pruner=optuna.pruners.MedianPruner(
n_startup_trials=1, n_warmup_steps=0, interval_steps=1
),
)
study.optimize(objective, n_trials=3, timeout=300)
print(study.best_value, study.best_params)
# Torch
@mark.skipif(not SETTINGS.optuna_required, reason="catalyst[optuna] in not required")
def test_on_cpu():
train_experiment("cpu")
@mark.skipif(
not (IS_CUDA_AVAILABLE and SETTINGS.optuna_required), reason="CUDA device is not available"
)
def test_on_torch_cuda0():
train_experiment("cuda:0")
@mark.skipif(
not (SETTINGS.optuna_required and IS_CUDA_AVAILABLE and NUM_CUDA_DEVICES >= 2),
reason="No CUDA>=2 found",
)
def test_on_torch_cuda1():
train_experiment("cuda:1")
@mark.skipif(
not (SETTINGS.optuna_required and IS_CUDA_AVAILABLE and NUM_CUDA_DEVICES >= 2),
reason="No CUDA>=2 found",
)
def test_on_torch_dp():
train_experiment(None, dl.DataParallelEngine())
# @mark.skipif(
# not (IS_CUDA_AVAILABLE and NUM_CUDA_DEVICES >=2),
# reason="No CUDA>=2 found",
# )
# def test_on_ddp():
# train_experiment(None, dl.DistributedDataParallelEngine())
# AMP
@mark.skipif(
not (SETTINGS.optuna_required and IS_CUDA_AVAILABLE and SETTINGS.amp_required),
reason="No CUDA or AMP found",
)
def test_on_amp():
train_experiment(None, dl.AMPEngine())
@mark.skipif(
not (
SETTINGS.optuna_required
and IS_CUDA_AVAILABLE
and NUM_CUDA_DEVICES >= 2
and SETTINGS.amp_required
),
reason="No CUDA>=2 or AMP found",
)
def test_on_amp_dp():
train_experiment(None, dl.DataParallelAMPEngine())
# @mark.skipif(
# not (IS_CUDA_AVAILABLE and NUM_CUDA_DEVICES >= 2 and SETTINGS.amp_required),
# reason="No CUDA>=2 or AMP found",
# )
# def test_on_amp_ddp():
# train_experiment(None, dl.DistributedDataParallelAMPEngine())
# APEX
@mark.skipif(
not (SETTINGS.optuna_required and IS_CUDA_AVAILABLE and SETTINGS.apex_required),
reason="No CUDA or Apex found",
)
def test_on_apex():
train_experiment(None, dl.APEXEngine())
@mark.skipif(
not (
SETTINGS.optuna_required
and IS_CUDA_AVAILABLE
and NUM_CUDA_DEVICES >= 2
and SETTINGS.apex_required
),
reason="No CUDA>=2 or Apex found",
)
def test_on_apex_dp():
train_experiment(None, dl.DataParallelAPEXEngine())
# @mark.skipif(
# not (IS_CUDA_AVAILABLE and NUM_CUDA_DEVICES >= 2 and SETTINGS.apex_required),
# reason="No CUDA>=2 or Apex found",
# )
# def test_on_apex_ddp():
# train_experiment(None, dl.DistributedDataParallelApexEngine())
| 28.85119 | 96 | 0.63008 |
d886b895cc9f258cf5151dbc124a51aaaa2ae04c | 6,077 | py | Python | WaffenPicker.py | JoergRue/Sephrasto | a4fa3c2c1b095b674a9e71416ca448e3be3de225 | [
"MIT"
] | null | null | null | WaffenPicker.py | JoergRue/Sephrasto | a4fa3c2c1b095b674a9e71416ca448e3be3de225 | [
"MIT"
] | null | null | null | WaffenPicker.py | JoergRue/Sephrasto | a4fa3c2c1b095b674a9e71416ca448e3be3de225 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
"""
Created on Sun Mar 5 16:45:34 2017
@author: Aeolitus
"""
import CharakterWaffen
from PyQt5 import QtCore, QtWidgets, QtGui
from Wolke import Wolke
import Objekte
import Definitionen
import logging
class WaffenPicker(object):
def __init__(self,waffe=None):
super().__init__()
logging.debug("Initializing WaffenPicker...")
self.waffe = None
if waffe is not None and waffe in Wolke.DB.waffen:
self.current = waffe
else:
self.current = ""
self.Form = QtWidgets.QDialog()
self.ui = CharakterWaffen.Ui_Dialog()
self.ui.setupUi(self.Form)
self.Form.setWindowFlags(
QtCore.Qt.Window |
QtCore.Qt.CustomizeWindowHint |
QtCore.Qt.WindowTitleHint |
QtCore.Qt.WindowCloseButtonHint)
logging.debug("Ui is Setup...")
self.populateTree()
logging.debug("Tree Filled...")
self.ui.treeWeapons.itemSelectionChanged.connect(self.changeHandler)
self.ui.treeWeapons.itemDoubleClicked.connect(lambda item, column: self.ui.buttonBox.buttons()[0].click())
self.ui.treeWeapons.header().setSectionResizeMode(0,1)
self.ui.treeWeapons.setFocus()
self.updateInfo()
logging.debug("Info Updated...")
self.ui.nameFilterEdit.textChanged.connect(self.populateTree)
self.Form.setWindowModality(QtCore.Qt.ApplicationModal)
self.Form.show()
self.ret = self.Form.exec_()
if self.ret == QtWidgets.QDialog.Accepted and self.current != '':
self.waffe = Wolke.DB.waffen[self.current]
else:
self.waffe = None
def populateTree(self):
currSet = self.current != ""
self.ui.treeWeapons.clear();
kampfferts = self.findKampfFertigkeiten()
for kind in kampfferts:
wafs = []
for waf in Wolke.DB.waffen:
if self.ui.nameFilterEdit.text() and not self.ui.nameFilterEdit.text().lower() in Wolke.DB.waffen[waf].anzeigename.lower():
continue
if Wolke.DB.waffen[waf].fertigkeit == kind.name:
wafs.append(waf)
wafs.sort()
if len(wafs) == 0:
continue
parent = QtWidgets.QTreeWidgetItem(self.ui.treeWeapons)
parent.setText(0,kind.name)
parent.setText(1,"")
parent.setExpanded(True)
for el in wafs:
if not currSet:
self.current = el
currSet = True
child = QtWidgets.QTreeWidgetItem(parent)
child.setText(0,Wolke.DB.waffen[el].anzeigename or el)
child.setText(1,Wolke.DB.waffen[el].talent)
child.setData(0, QtCore.Qt.UserRole, el) # store key of weapon in user data
self.ui.treeWeapons.sortItems(1,QtCore.Qt.AscendingOrder)
if self.current in Wolke.DB.waffen:
found = self.ui.treeWeapons.findItems(Wolke.DB.waffen[self.current].anzeigename, QtCore.Qt.MatchExactly | QtCore.Qt.MatchRecursive)
if len(found) > 0:
self.ui.treeWeapons.setCurrentItem(found[0], 0, QtCore.QItemSelectionModel.Select)
elif self.ui.treeWeapons.topLevelItemCount() > 0 and self.ui.treeWeapons.topLevelItem(0).childCount() > 0:
self.ui.treeWeapons.setCurrentItem(self.ui.treeWeapons.topLevelItem(0).child(0), 0, QtCore.QItemSelectionModel.Select)
self.changeHandler()
def findKampfFertigkeiten(self):
return [el for el in Wolke.DB.fertigkeiten.values() if el.kampffertigkeit > 0]
def changeHandler(self):
kampfferts = []
for fert in self.findKampfFertigkeiten():
kampfferts.append(fert.name)
self.current = ""
for el in self.ui.treeWeapons.selectedItems():
if el.text(0) in kampfferts:
continue
self.current = el.data(0, QtCore.Qt.UserRole) # contains key of weapon
break
self.updateInfo()
def updateInfo(self):
self.ui.buttonBox.buttons()[0].setEnabled(self.current != "")
if self.current == "":
self.ui.labelName.setText("Keine Waffe selektiert")
self.ui.labelTyp.setText("")
self.ui.labelFert.setText("")
self.ui.plainStile.setPlainText("")
self.ui.labelTP.setText("")
self.ui.labelRW.setText("")
self.ui.labelWMLZ_Text.setText("Waffenmodifikator")
self.ui.labelWMLZ.setText("")
self.ui.labelH.setText("")
self.ui.plainEigenschaften.setPlainText("")
else:
w = Wolke.DB.waffen[self.current]
self.ui.labelName.setText(w.name)
if type(w) == Objekte.Nahkampfwaffe:
self.ui.labelTyp.setText("Nah")
else:
self.ui.labelTyp.setText("Fern")
self.ui.labelFert.setText(w.fertigkeit + " (" + w.talent + ")")
stile = Definitionen.KeinKampfstil
if len(w.kampfstile) > 0:
stile = ", ".join(w.kampfstile)
self.ui.plainStile.setPlainText(stile)
tp = str(w.W6) + " W6"
if w.plus < 0:
tp += " " + str(w.plus)
else:
tp += " +" + str(w.plus)
self.ui.labelTP.setText(tp)
self.ui.labelRW.setText(str(w.rw))
if type(w) == Objekte.Nahkampfwaffe:
self.ui.labelWMLZ_Text.setText("Waffenmodifikator")
if w.wm<0:
self.ui.labelWMLZ.setText(str(w.wm))
else:
self.ui.labelWMLZ.setText("+" + str(w.wm))
else:
self.ui.labelWMLZ_Text.setText("Ladezeit")
self.ui.labelWMLZ.setText(str(w.lz))
self.ui.labelH.setText(str(w.haerte))
self.ui.plainEigenschaften.setPlainText(", ".join(w.eigenschaften))
| 40.245033 | 143 | 0.577423 |
91b74d9566be61fdf4b1da3de442cfc6cd0a203b | 25,786 | py | Python | challenges/movi/movi_b.py | ziyuw/kubric | 5957d8113cfb00aa2097352a893f1d0d906d600a | [
"Apache-2.0"
] | null | null | null | challenges/movi/movi_b.py | ziyuw/kubric | 5957d8113cfb00aa2097352a893f1d0d906d600a | [
"Apache-2.0"
] | null | null | null | challenges/movi/movi_b.py | ziyuw/kubric | 5957d8113cfb00aa2097352a893f1d0d906d600a | [
"Apache-2.0"
] | null | null | null | # Copyright 2022 The Kubric Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# pylint: disable=line-too-long, unexpected-keyword-arg
import dataclasses
import json
import logging
import imageio
import numpy as np
import png
import tensorflow as tf
import tensorflow_datasets.public_api as tfds
from typing import List, Dict, Union
_DESCRIPTION = """
A simple rigid-body simulation based on the CLEVR dataset.
The scene consists of a randomly colored floor, four light sources, a camera, and between
3 and 10 random objects.
The camera position is sampled randomly in a half-sphere shell around the scene
and always points at the origin.
The objects are randomly chosen from:
- one of eleven shapes ("cube", "cylinder", "sphere", "cone", "torus", "gear",
"torus_knot", "sponge", "spot", "teapot", "suzanne")
- scaled uniformly between 0.7 and 1.4,
- have one of two materials [rubber, metal],
- a uniformly random hue
They are spawned without overlap in the region [(-5, -5, 1), (5, 5, 5)], and
initialized with a random velocity from the range [(-4, -4, 0), (4, 4, 0)]
minus the position of the object to bias their trajectory towards the center of
the scene.
The scene is simulated for 2 seconds, with the physical properties of the
objects depending on the material:
- metal: friction=0.4, restitution=0.3, density=2.7
- rubber: friction=0.8, restitution=0.7, density=1.1
The dataset contains approx 10k videos rendered at 256x256 pixels and 12fps.
Each sample contains the following video-format data:
(s: sequence length, h: height, w: width)
- "video": (s, h, w, 3) [uint8]
The RGB frames.
- "segmentations": (s, h, w, 1) [uint8]
Instance segmentation as per-pixel object-id with background=0.
Note: because of this the instance IDs used here are one higher than their
corresponding index in sample["instances"].
- "depth": (s, h, w, 1) [uint16]
Distance of each pixel from the center of the camera.
(Note this is different from the z-value sometimes used, which measures the
distance to the camera *plane*.)
The values are stored as uint16 and span the range specified in
sample["metadata"]["depth_range"]. To convert them back to world-units
use:
minv, maxv = sample["metadata"]["depth_range"]
depth = sample["depth"] / 65535 * (maxv - minv) + minv
- "forward_flow": (s, h, w, 2) [uint16]
Forward optical flow in the form (delta_row, delta_column).
The values are stored as uint16 and span the range specified in
sample["metadata"]["forward_flow_range"]. To convert them back to pixels use:
minv, maxv = sample["metadata"]["forward_flow_range"]
depth = sample["forward_flow"] / 65535 * (maxv - minv) + minv
- "backward_flow": (s, h, w, 2) [uint16]
Backward optical flow in the form (delta_row, delta_column).
The values are stored as uint16 and span the range specified in
sample["metadata"]["backward_flow_range"]. To convert them back to pixels use:
minv, maxv = sample["metadata"]["backward_flow_range"]
depth = sample["backward_flow"] / 65535 * (maxv - minv) + minv
- "normal": (s, h, w, 3) [uint16]
Surface normals for each pixel in world coordinates.
- "object_coordinates": (s, h, w, 3) [uint16]
Object coordinates encode the position of each point relative to the objects
bounding box (i.e. back-left-top (X=Y=Z=1) corner is white,
while front-right-bottom (X=Y=Z=0) corner is black.)
Additionally there is rich instance-level information in sample["instances"]:
- "mass": [float32]
Mass of the object used for simulation.
- "friction": [float32]
Friction coefficient used for simulation.
- "restitution": [float32]
Restitution coefficient (bounciness) used for simulation.
- "positions": (s, 3) [float32]
Position of the object for each frame in world-coordinates.
- "quaternions": (s, 4) [float32]
Rotation of the object for each frame as quaternions.
- "velocities": (s, 3) [float32]
Velocity of the object for each frame.
- "angular_velocities": (s, 3) [float32]
Angular velocity of the object for each frame.
- "bboxes_3d": (s, 8, 3) [float32]
World-space corners of the 3D bounding box around the object.
- "image_positions": (s, 2) [float32]
Normalized (0, 1) image-space (2D) coordinates of the center of mass of the
object for each frame.
- "bboxes": (None, 4) [float32]
The normalized image-space (2D) coordinates of the bounding box
[ymin, xmin, ymax, xmax] for all the frames in which the object is visible
(as specified in bbox_frames).
- "bbox_frames": (None,) [int]
A list of all the frames the object is visible.
- "visibility": (s,) [uint16]
Visibility of the object in number of pixels for each frame (can be 0).
- "shape_label": ["cube", "cylinder", "sphere", "cone", "torus", "gear",
"torus_knot", "sponge", "spot", "teapot", "suzanne"]
- "scale": float between 0.7 and 1.4
- "color": (3,) [float32]
Color of the object in RGB.
- "material_label": ["metal", "rubber"]
Information about the camera in sample["camera"]
(given for each frame eventhough the camera is static, so as to stay
consistent with other variants of the dataset):
- "focal_length": [float32]
- "sensor_width": [float32]
- "field_of_view": [float32]
- "positions": (s, 3) [float32]
- "quaternions": (s, 4) [float32]
A single entry about the background:
- "background_color": (3,) [float32]
And finally information about collision events in sample["events"]["collisions"]:
- "instances": (2,)[uint16]
Indices of the two instance between which the collision happened.
Note that collisions with the floor/background objects are marked with 65535
- "frame": tf.int32,
Frame in which the collision happenend.
- "force": tf.float32,
The force (strength) of the collision.
- "position": tfds.features.Tensor(shape=(3,), dtype=tf.float32),
Position of the collision event in 3D world coordinates.
- "image_position": tfds.features.Tensor(shape=(2,), dtype=tf.float32),
Position of the collision event projected onto normalized 2D image coordinates.
- "contact_normal": tfds.features.Tensor(shape=(3,), dtype=tf.float32),
The normal-vector of the contact (direction of the force).
"""
_CITATION = """\
@inproceedings{greff2022kubric,
title = {Kubric: a scalable dataset generator},
author = {Klaus Greff and Francois Belletti and Lucas Beyer and Carl Doersch and
Yilun Du and Daniel Duckworth and David J Fleet and Dan Gnanapragasam and
Florian Golemo and Charles Herrmann and Thomas Kipf and Abhijit Kundu and
Dmitry Lagun and Issam Laradji and Hsueh-Ti (Derek) Liu and Henning Meyer and
Yishu Miao and Derek Nowrouzezahrai and Cengiz Oztireli and Etienne Pot and
Noha Radwan and Daniel Rebain and Sara Sabour and Mehdi S. M. Sajjadi and Matan Sela and
Vincent Sitzmann and Austin Stone and Deqing Sun and Suhani Vora and Ziyu Wang and
Tianhao Wu and Kwang Moo Yi and Fangcheng Zhong and Andrea Tagliasacchi},
booktitle = {{IEEE} Conference on Computer Vision and Pattern Recognition, {CVPR}},
year = {2022},
publisher = {Computer Vision Foundation / {IEEE}},
}"""
@dataclasses.dataclass
class MoviBConfig(tfds.core.BuilderConfig):
""""Configuration for Multi-Object Video (MOVi) dataset."""
height: int = 256
width: int = 256
num_frames: int = 24
validation_ratio: float = 0.1
train_val_path: str = None
test_split_paths: Dict[str, str] = dataclasses.field(default_factory=dict)
class MoviB(tfds.core.BeamBasedBuilder):
"""DatasetBuilder for Movi-B dataset."""
VERSION = tfds.core.Version("1.0.0")
RELEASE_NOTES = {
"1.0.0": "initial release",
}
BUILDER_CONFIGS = [
MoviBConfig(
name="256x256",
description="Full resolution of 256x256",
height=256,
width=256,
validation_ratio=0.025,
# train_val_path="/usr/local/google/home/klausg/movi_tmp",
train_val_path="gs://research-brain-kubric-xgcp/jobs/movi_b_regen_10k/",
test_split_paths={
}
),
MoviBConfig(
name="128x128",
description="Downscaled to 128x128",
height=128,
width=128,
validation_ratio=0.025,
# train_val_path="/usr/local/google/home/klausg/movi_tmp",
train_val_path="gs://research-brain-kubric-xgcp/jobs/movi_b_regen_10k/",
test_split_paths={
}
),
]
def _info(self) -> tfds.core.DatasetInfo:
"""Returns the dataset metadata."""
h = self.builder_config.height
w = self.builder_config.width
s = self.builder_config.num_frames
def get_movi_b_instance_features(seq_length: int):
features = get_instance_features(seq_length)
features.update({
"shape_label": tfds.features.ClassLabel(
names=["cube", "cylinder", "sphere", "cone", "torus", "gear",
"torus_knot", "sponge", "spot", "teapot", "suzanne"]),
"color": tfds.features.Tensor(shape=(3,), dtype=tf.float32),
"material_label": tfds.features.ClassLabel(
names=["metal", "rubber"]),
})
return features
return tfds.core.DatasetInfo(
builder=self,
description=_DESCRIPTION,
features=tfds.features.FeaturesDict({
"metadata": {
"video_name": tfds.features.Text(),
"width": tf.int32,
"height": tf.int32,
"num_frames": tf.int32,
"num_instances": tf.uint16,
"depth_range": tfds.features.Tensor(shape=(2,),
dtype=tf.float32),
"forward_flow_range": tfds.features.Tensor(shape=(2,),
dtype=tf.float32),
"backward_flow_range": tfds.features.Tensor(shape=(2,),
dtype=tf.float32),
},
"background_color": tfds.features.Tensor(shape=(3,),
dtype=tf.float32),
"instances": tfds.features.Sequence(
feature=get_movi_b_instance_features(seq_length=s)),
"camera": get_camera_features(s),
"events": get_events_features(),
# -----
"video": tfds.features.Video(shape=(s, h, w, 3)),
"segmentations": tfds.features.Sequence(
tfds.features.Image(shape=(h, w, 1), dtype=tf.uint8),
length=s),
"forward_flow": tfds.features.Sequence(
tfds.features.Tensor(shape=(h, w, 2), dtype=tf.uint16),
length=s),
"backward_flow": tfds.features.Sequence(
tfds.features.Tensor(shape=(h, w, 2), dtype=tf.uint16),
length=s),
"depth": tfds.features.Sequence(
tfds.features.Image(shape=(h, w, 1), dtype=tf.uint16),
length=s),
"normal": tfds.features.Video(shape=(s, h, w, 3), dtype=tf.uint16),
"object_coordinates": tfds.features.Video(shape=(s, h, w, 3),
dtype=tf.uint16),
}),
supervised_keys=None,
homepage="https://github.com/google-research/kubric",
citation=_CITATION)
def _split_generators(self, unused_dl_manager: tfds.download.DownloadManager):
"""Returns SplitGenerators."""
del unused_dl_manager
path = tfds.core.as_path(self.builder_config.train_val_path)
all_subdirs = [str(d) for d in path.iterdir()]
logging.info("Found %d sub-folders in master path: %s",
len(all_subdirs), path)
# shuffle
rng = np.random.RandomState(seed=42)
rng.shuffle(all_subdirs)
validation_ratio = self.builder_config.validation_ratio
validation_examples = max(1, round(len(all_subdirs) * validation_ratio))
training_examples = len(all_subdirs) - validation_examples
logging.info("Using %f of examples for validation for a total of %d",
validation_ratio, validation_examples)
logging.info("Using the other %d examples for training", training_examples)
splits = {
tfds.Split.TRAIN: self._generate_examples(all_subdirs[:training_examples]),
tfds.Split.VALIDATION: self._generate_examples(all_subdirs[training_examples:]),
}
for key, path in self.builder_config.test_split_paths.items():
path = tfds.core.as_path(path)
split_dirs = [d for d in path.iterdir()]
# sort the directories by their integer number
split_dirs = sorted(split_dirs, key=lambda x: int(x.name))
logging.info("Found %d sub-folders in '%s' path: %s",
len(split_dirs), key, path)
splits[key] = self._generate_examples([str(d) for d in split_dirs])
return splits
def _generate_examples(self, directories: List[str]):
"""Yields examples."""
target_size = (self.builder_config.height, self.builder_config.width)
def _process_example(video_dir):
key, result, metadata = load_scene_directory(video_dir, target_size)
# add Movi-B specific instance information:
for i, obj in enumerate(result["instances"]):
obj["shape_label"] = metadata["instances"][i]["shape"]
obj["material_label"] = metadata["instances"][i]["material"]
obj["color"] = np.array(metadata["instances"][i]["color"],
dtype=np.float32)
return key, result
beam = tfds.core.lazy_imports.apache_beam
return (beam.Create(directories) |
beam.Filter(is_complete_dir) |
beam.Map(_process_example))
DEFAULT_LAYERS = ("rgba", "segmentation", "forward_flow", "backward_flow",
"depth", "normal", "object_coordinates")
def load_scene_directory(scene_dir, target_size, layers=DEFAULT_LAYERS):
scene_dir = tfds.core.as_path(scene_dir)
example_key = f"{scene_dir.name}"
with tf.io.gfile.GFile(str(scene_dir / "data_ranges.json"), "r") as fp:
data_ranges = json.load(fp)
with tf.io.gfile.GFile(str(scene_dir / "metadata.json"), "r") as fp:
metadata = json.load(fp)
with tf.io.gfile.GFile(str(scene_dir / "events.json"), "r") as fp:
events = json.load(fp)
num_frames = metadata["metadata"]["num_frames"]
result = {
"metadata": {
"video_name": example_key,
"width": target_size[1],
"height": target_size[0],
"num_frames": num_frames,
"num_instances": metadata["metadata"]["num_instances"],
},
"background_color": rgb_from_hexstr(metadata["metadata"]["background"]),
"instances": [format_instance_information(obj)
for obj in metadata["instances"]],
"camera": format_camera_information(metadata),
"events": format_events_information(events),
}
resolution = metadata["metadata"]["resolution"]
assert resolution[0] / target_size[0] == resolution[1] / target_size[1]
scale = resolution[0] / target_size[0]
assert scale == resolution[0] // target_size[0]
paths = {
key: [scene_dir / f"{key}_{f:05d}.png" for f in range(num_frames)]
for key in layers if key != "depth"
}
if "depth" in layers:
depth_paths = [scene_dir / f"depth_{f:05d}.tiff" for f in range(num_frames)]
depth_frames = np.array([
subsample_nearest_neighbor(read_tiff(frame_path), target_size)
for frame_path in depth_paths])
depth_min, depth_max = np.min(depth_frames), np.max(depth_frames)
result["depth"] = convert_float_to_uint16(depth_frames, depth_min, depth_max)
result["metadata"]["depth_range"] = [depth_min, depth_max]
if "forward_flow" in layers:
result["metadata"]["forward_flow_range"] = [
data_ranges["forward_flow"]["min"] / scale,
data_ranges["forward_flow"]["max"] / scale]
result["forward_flow"] = [
subsample_nearest_neighbor(read_png(frame_path)[..., :2],
target_size)
for frame_path in paths["forward_flow"]]
if "backward_flow" in layers:
result["metadata"]["backward_flow_range"] = [
data_ranges["backward_flow"]["min"] / scale,
data_ranges["backward_flow"]["max"] / scale]
result["backward_flow"] = [
subsample_nearest_neighbor(read_png(frame_path)[..., :2],
target_size)
for frame_path in paths["backward_flow"]]
for key in ["normal", "object_coordinates", "uv"]:
if key in layers:
result[key] = [
subsample_nearest_neighbor(read_png(frame_path),
target_size)
for frame_path in paths[key]]
if "segmentation" in layers:
# somehow we ended up calling this "segmentations" in TFDS and
# "segmentation" in kubric. So we have to treat it separately.
result["segmentations"] = [
subsample_nearest_neighbor(read_png(frame_path),
target_size)
for frame_path in paths["segmentation"]]
if "rgba" in layers:
result["video"] = [
subsample_avg(read_png(frame_path), target_size)[..., :3]
for frame_path in paths["rgba"]]
return example_key, result, metadata
def get_camera_features(seq_length):
return {
"focal_length": tf.float32,
"sensor_width": tf.float32,
"field_of_view": tf.float32,
"positions": tfds.features.Tensor(shape=(seq_length, 3),
dtype=tf.float32),
"quaternions": tfds.features.Tensor(shape=(seq_length, 4),
dtype=tf.float32),
}
def format_camera_information(metadata):
return {
"focal_length": metadata["camera"]["focal_length"],
"sensor_width": metadata["camera"]["sensor_width"],
"field_of_view": metadata["camera"]["field_of_view"],
"positions": np.array(metadata["camera"]["positions"], np.float32),
"quaternions": np.array(metadata["camera"]["quaternions"], np.float32),
}
def get_events_features():
return {
"collisions": tfds.features.Sequence({
"instances": tfds.features.Tensor(shape=(2,), dtype=tf.uint16),
"frame": tf.int32,
"force": tf.float32,
"position": tfds.features.Tensor(shape=(3,), dtype=tf.float32),
"image_position": tfds.features.Tensor(shape=(2,), dtype=tf.float32),
"contact_normal": tfds.features.Tensor(shape=(3,), dtype=tf.float32),
})
}
def format_events_information(events):
return {
"collisions": [{
"instances": np.array(c["instances"], dtype=np.uint16),
"frame": c["frame"],
"force": c["force"],
"position": np.array(c["position"], dtype=np.float32),
"image_position": np.array(c["image_position"], dtype=np.float32),
"contact_normal": np.array(c["contact_normal"], dtype=np.float32),
} for c in events["collisions"]],
}
def get_instance_features(seq_length: int):
return {
"mass": tf.float32,
"friction": tf.float32,
"restitution": tf.float32,
"positions": tfds.features.Tensor(shape=(seq_length, 3),
dtype=tf.float32),
"quaternions": tfds.features.Tensor(shape=(seq_length, 4),
dtype=tf.float32),
"velocities": tfds.features.Tensor(shape=(seq_length, 3),
dtype=tf.float32),
"angular_velocities": tfds.features.Tensor(shape=(seq_length, 3),
dtype=tf.float32),
"bboxes_3d": tfds.features.Tensor(shape=(seq_length, 8, 3),
dtype=tf.float32),
"image_positions": tfds.features.Tensor(shape=(seq_length, 2),
dtype=tf.float32),
"bboxes": tfds.features.Sequence(
tfds.features.BBoxFeature()),
"bbox_frames": tfds.features.Sequence(
tfds.features.Tensor(shape=(), dtype=tf.int32)),
"visibility": tfds.features.Tensor(shape=(seq_length,), dtype=tf.uint16),
}
def format_instance_information(obj):
return {
"mass": obj["mass"],
"friction": obj["friction"],
"restitution": obj["restitution"],
"positions": np.array(obj["positions"], np.float32),
"quaternions": np.array(obj["quaternions"], np.float32),
"velocities": np.array(obj["velocities"], np.float32),
"angular_velocities": np.array(obj["angular_velocities"], np.float32),
"bboxes_3d": np.array(obj["bboxes_3d"], np.float32),
"image_positions": np.array(obj["image_positions"], np.float32),
"bboxes": [tfds.features.BBox(*bbox) for bbox in obj["bboxes"]],
"bbox_frames": np.array(obj["bbox_frames"], dtype=np.uint16),
"visibility": np.array(obj["visibility"], dtype=np.uint16),
}
def subsample_nearest_neighbor(arr, size):
src_height, src_width, _ = arr.shape
dst_height, dst_width = size
height_step = src_height // dst_height
width_step = src_width // dst_width
assert height_step * dst_height == src_height
assert width_step * dst_width == src_width
height_offset = int(np.floor((height_step-1)/2))
width_offset = int(np.floor((width_step-1)/2))
subsampled = arr[height_offset::height_step, width_offset::width_step, :]
return subsampled
def convert_float_to_uint16(array, min_val, max_val):
return np.round((array - min_val) / (max_val - min_val) * 65535
).astype(np.uint16)
def subsample_avg(arr, size):
src_height, src_width, channels = arr.shape
dst_height, dst_width = size
height_bin = src_height // dst_height
width_bin = src_width // dst_width
return np.round(arr.reshape((dst_height, height_bin,
dst_width, width_bin,
channels)).mean(axis=(1, 3))).astype(np.uint8)
def is_complete_dir(video_dir, layers=DEFAULT_LAYERS):
video_dir = tfds.core.as_path(video_dir)
filenames = [d.name for d in video_dir.iterdir()]
if not ("data_ranges.json" in filenames and
"metadata.json" in filenames and
"events.json" in filenames):
return False
nr_frames_per_category = {
key: len([fn for fn in filenames if fn.startswith(key)])
for key in layers}
nr_expected_frames = nr_frames_per_category["rgba"]
if nr_expected_frames == 0:
return False
if not all(nr_frames == nr_expected_frames
for nr_frames in nr_frames_per_category.values()):
return False
return True
PathLike = Union[str, tfds.core.ReadWritePath]
def as_path(path: PathLike) -> tfds.core.ReadWritePath:
"""Convert str or pathlike object to tfds.core.ReadWritePath.
Instead of pathlib.Paths, we use the TFDS path because they transparently
support paths to GCS buckets such as "gs://kubric-public/GSO".
"""
return tfds.core.as_path(path)
def read_png(filename, rescale_range=None) -> np.ndarray:
filename = as_path(filename)
png_reader = png.Reader(bytes=filename.read_bytes())
width, height, pngdata, info = png_reader.read()
del png_reader
bitdepth = info["bitdepth"]
if bitdepth == 8:
dtype = np.uint8
elif bitdepth == 16:
dtype = np.uint16
else:
raise NotImplementedError(f"Unsupported bitdepth: {bitdepth}")
plane_count = info["planes"]
pngdata = np.vstack(list(map(dtype, pngdata)))
if rescale_range is not None:
minv, maxv = rescale_range
pngdata = pngdata / 2**bitdepth * (maxv - minv) + minv
return pngdata.reshape((height, width, plane_count))
def write_tiff(data: np.ndarray, filename: PathLike):
"""Save data as as tif image (which natively supports float values)."""
assert data.ndim == 3, data.shape
assert data.shape[2] in [1, 3, 4], "Must be grayscale, RGB, or RGBA"
img_as_bytes = imageio.imwrite("<bytes>", data, format="tiff")
filename = as_path(filename)
filename.write_bytes(img_as_bytes)
def read_tiff(filename: PathLike) -> np.ndarray:
filename = as_path(filename)
img = imageio.imread(filename.read_bytes(), format="tiff")
if img.ndim == 2:
img = img[:, :, None]
return img
def rgb_from_hexstr(hexstr: str):
"""Create a Color instance from a hex string like #ffaa22 or #11aa88ff.
Supports both long and short form (i.e. #ffffff is the same as #fff), and also an optional
alpha value (e.g. #112233ff or #123f).
"""
if hexstr[0] == "#": # get rid of leading #
hexstr = hexstr[1:]
if len(hexstr) == 3:
r = int(hexstr[0], 16) / 15.
g = int(hexstr[1], 16) / 15.
b = int(hexstr[2], 16) / 15.
return r, g, b
elif len(hexstr) == 4:
r = int(hexstr[0], 16) / 15.
g = int(hexstr[1], 16) / 15.
b = int(hexstr[2], 16) / 15.
a = int(hexstr[3], 16) / 15.
return r, g, b
elif len(hexstr) == 6:
r = int(hexstr[0:2], 16) / 255.0
g = int(hexstr[2:4], 16) / 255.0
b = int(hexstr[4:6], 16) / 255.0
return r, g, b
elif len(hexstr) == 8:
r = int(hexstr[0:2], 16) / 255.0
g = int(hexstr[2:4], 16) / 255.0
b = int(hexstr[4:6], 16) / 255.0
a = int(hexstr[6:8], 16) / 255.0
return r, g, b
else:
raise ValueError("invalid color hex string") | 39.18845 | 102 | 0.643334 |
280f1e56923aabf05fac023d1d9c4302137b5fee | 557 | bzl | Python | source/bazel/deps/bazel_gazelle/get.bzl | luxe/unilang | 6c8a431bf61755f4f0534c6299bd13aaeba4b69e | [
"MIT"
] | 33 | 2019-05-30T07:43:32.000Z | 2021-12-30T13:12:32.000Z | source/bazel/deps/bazel_gazelle/get.bzl | luxe/unilang | 6c8a431bf61755f4f0534c6299bd13aaeba4b69e | [
"MIT"
] | 371 | 2019-05-16T15:23:50.000Z | 2021-09-04T15:45:27.000Z | source/bazel/deps/bazel_gazelle/get.bzl | luxe/unilang | 6c8a431bf61755f4f0534c6299bd13aaeba4b69e | [
"MIT"
] | 6 | 2019-08-22T17:37:36.000Z | 2020-11-07T07:15:32.000Z | # Do not edit this file directly.
# It was auto-generated by: code/programs/reflexivity/reflexive_refresh
load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
def bazelGazelle():
http_archive(
name="bazel_gazelle" ,
sha256="079e615ddbbcd53a0a730341dfe5d52dcc06771bcf782254eb6fd581ec20b102" ,
strip_prefix="bazel-gazelle-2f9261b27f1d2660e2952bc9a0e79e4e81691afe" ,
urls = [
"https://github.com/Unilang/bazel-gazelle/archive/2f9261b27f1d2660e2952bc9a0e79e4e81691afe.tar.gz",
],
)
| 34.8125 | 111 | 0.72711 |
29f0e0ba0c22a253b1ab09e9c814815b0f771b30 | 4,484 | py | Python | PROJ/SABR/Helper_Functions/getNonUniformGrid.py | mattslezak-shell/PROJ_Option_Pricing_Matlab | 6105bd00ba3471802180c122fdf81e90833a91c4 | [
"MIT"
] | null | null | null | PROJ/SABR/Helper_Functions/getNonUniformGrid.py | mattslezak-shell/PROJ_Option_Pricing_Matlab | 6105bd00ba3471802180c122fdf81e90833a91c4 | [
"MIT"
] | null | null | null | PROJ/SABR/Helper_Functions/getNonUniformGrid.py | mattslezak-shell/PROJ_Option_Pricing_Matlab | 6105bd00ba3471802180c122fdf81e90833a91c4 | [
"MIT"
] | 1 | 2022-01-07T15:31:45.000Z | 2022-01-07T15:31:45.000Z | # Generated with SMOP 0.41-beta
try:
from smop.libsmop import *
except ImportError:
raise ImportError('File compiled with `smop3`, please install `smop3` to run it.') from None
# getNonUniformGrid.m
@function
def getNonUniformGrid(m_0=None,lx=None,ux=None,gridMethod=None,center=None,manualPoint=None,gridMultParam=None,*args,**kwargs):
varargin = getNonUniformGrid.varargin
nargin = getNonUniformGrid.nargin
#UNTITLED Summary of this function goes here
# Detailed explanation goes here
nx=copy(m_0)
# getNonUniformGrid.m:5
dx=(ux - lx) / nx
# getNonUniformGrid.m:5
if gridMethod == 1:
v=lx + dot((arange(1,nx)).T,dx)
# getNonUniformGrid.m:8
else:
if gridMethod == 2:
v=zeros(m_0,1)
# getNonUniformGrid.m:10
v[1]=lx
# getNonUniformGrid.m:11
v[m_0]=ux
# getNonUniformGrid.m:11
mid=floor(m_0 / 2)
# getNonUniformGrid.m:12
for k in arange(2,mid).reshape(-1):
v[k]=center + sinh(dot((1 - (k - 1) / (mid - 1)),asinh(v(1) - center)))
# getNonUniformGrid.m:14
for k in arange(mid + 1,m_0 - 1).reshape(-1):
v[k]=center + sinh(dot(((k - mid) / (mid)),asinh(v(m_0) - center)))
# getNonUniformGrid.m:17
else:
if gridMethod == 3:
x=arange(0,1,1 / (m_0 - 1))
# getNonUniformGrid.m:20
alpha=dot(0.8,(ux - lx))
# getNonUniformGrid.m:21
c1=asinh((lx - center) / alpha)
# getNonUniformGrid.m:22
c2=asinh((ux - center) / alpha)
# getNonUniformGrid.m:23
v=center + dot(alpha,(dot(c2,sinh(multiply(c2,x) + multiply(c1,(1 - x))))))
# getNonUniformGrid.m:24
else:
if gridMethod == 4:
v=zeros(m_0,1)
# getNonUniformGrid.m:26
v[1]=lx
# getNonUniformGrid.m:27
v[m_0]=ux
# getNonUniformGrid.m:28
alpha=dot(gridMultParam,(v(m_0) - v(1)))
# getNonUniformGrid.m:29
c1=asinh((v(1) - center) / alpha)
# getNonUniformGrid.m:30
c2=asinh((v(m_0) - center) / alpha)
# getNonUniformGrid.m:31
v[arange(2,m_0 - 1)]=center + dot(alpha,sinh(dot(c2 / m_0,(arange(2,m_0 - 1))) + dot(c1,(1 - (arange(2,m_0 - 1)) / m_0))))
# getNonUniformGrid.m:32
else:
if gridMethod == 5:
tol=1e-07
# getNonUniformGrid.m:34
v=zeros(m_0,1)
# getNonUniformGrid.m:35
v[1]=lx
# getNonUniformGrid.m:36
v[m_0]=ux
# getNonUniformGrid.m:37
alpha=dot(gridMultParam,(v(m_0) - v(1)))
# getNonUniformGrid.m:38
c1=asinh((v(1) - center) / alpha)
# getNonUniformGrid.m:39
c2=asinh((v(m_0) - center) / alpha)
# getNonUniformGrid.m:40
vtil=zeros(m_0 - 1,1)
# getNonUniformGrid.m:41
vtil[arange(2,m_0 - 2)]=center + dot(alpha,sinh(dot(c2 / (m_0 - 1),(arange(2,m_0 - 2))) + dot(c1,(1 - (arange(2,m_0 - 2)) / (m_0 - 1)))))
# getNonUniformGrid.m:42
nnot_til=1
# getNonUniformGrid.m:43
while vtil(nnot_til) < manualPoint:
nnot_til=nnot_til + 1
# getNonUniformGrid.m:45
nnot_til=nnot_til - 1
# getNonUniformGrid.m:47
v[arange(2,nnot_til)]=vtil(arange(2,nnot_til))
# getNonUniformGrid.m:48
v[arange(nnot_til + 2,m_0 - 1)]=vtil(arange(nnot_til + 1,m_0 - 2))
# getNonUniformGrid.m:49
if manualPoint - vtil(nnot_til) < tol:
v[nnot_til]=manualPoint
# getNonUniformGrid.m:51
v[nnot_til + 1]=(manualPoint + vtil(nnot_til + 1)) / 2
# getNonUniformGrid.m:52
else:
if vtil(nnot_til + 1) - manualPoint < tol:
v[nnot_til + 2]=manualPoint
# getNonUniformGrid.m:54
v[nnot_til + 1]=(v(nnot_til + 2) + v(nnot_til)) / 2
# getNonUniformGrid.m:55
else:
v[nnot_til + 1]=manualPoint
# getNonUniformGrid.m:57
return v
if __name__ == '__main__':
pass
| 37.366667 | 162 | 0.510259 |
eb12fb8102deaf438621d303589488b0e831d774 | 17,188 | py | Python | youtube_dl/extractor/npo.py | builder07/ytdl | 2c0a5d50af7ecc7302c813d649ee72dcd457a50a | [
"Unlicense"
] | null | null | null | youtube_dl/extractor/npo.py | builder07/ytdl | 2c0a5d50af7ecc7302c813d649ee72dcd457a50a | [
"Unlicense"
] | null | null | null | youtube_dl/extractor/npo.py | builder07/ytdl | 2c0a5d50af7ecc7302c813d649ee72dcd457a50a | [
"Unlicense"
] | null | null | null | from __future__ import unicode_literals
import re
from .common import InfoExtractor
from ..utils import (
fix_xml_ampersands,
parse_duration,
qualities,
strip_jsonp,
unified_strdate,
)
class NPOBaseIE(InfoExtractor):
def _get_token(self, video_id):
token_page = self._download_webpage(
'http://ida.omroep.nl/npoplayer/i.js',
video_id, note='Downloading token')
token = self._search_regex(
r'npoplayer\.token = "(.+?)"', token_page, 'token')
# Decryption algorithm extracted from http://npoplayer.omroep.nl/csjs/npoplayer-min.js
token_l = list(token)
first = second = None
for i in range(5, len(token_l) - 4):
if token_l[i].isdigit():
if first is None:
first = i
elif second is None:
second = i
if first is None or second is None:
first = 12
second = 13
token_l[first], token_l[second] = token_l[second], token_l[first]
return ''.join(token_l)
class NPOIE(NPOBaseIE):
IE_NAME = 'npo'
IE_DESC = 'npo.nl and ntr.nl'
_VALID_URL = r'''(?x)
(?:
npo:|
https?://
(?:www\.)?
(?:
npo\.nl/(?!live|radio)(?:[^/]+/){2}|
ntr\.nl/(?:[^/]+/){2,}|
omroepwnl\.nl/video/fragment/[^/]+__
)
)
(?P<id>[^/?#]+)
'''
_TESTS = [
{
'url': 'http://www.npo.nl/nieuwsuur/22-06-2014/VPWON_1220719',
'md5': '4b3f9c429157ec4775f2c9cb7b911016',
'info_dict': {
'id': 'VPWON_1220719',
'ext': 'm4v',
'title': 'Nieuwsuur',
'description': 'Dagelijks tussen tien en elf: nieuws, sport en achtergronden.',
'upload_date': '20140622',
},
},
{
'url': 'http://www.npo.nl/de-mega-mike-mega-thomas-show/27-02-2009/VARA_101191800',
'md5': 'da50a5787dbfc1603c4ad80f31c5120b',
'info_dict': {
'id': 'VARA_101191800',
'ext': 'm4v',
'title': 'De Mega Mike & Mega Thomas show: The best of.',
'description': 'md5:3b74c97fc9d6901d5a665aac0e5400f4',
'upload_date': '20090227',
'duration': 2400,
},
},
{
'url': 'http://www.npo.nl/tegenlicht/25-02-2013/VPWON_1169289',
'md5': 'f8065e4e5a7824068ed3c7e783178f2c',
'info_dict': {
'id': 'VPWON_1169289',
'ext': 'm4v',
'title': 'Tegenlicht: De toekomst komt uit Afrika',
'description': 'md5:52cf4eefbc96fffcbdc06d024147abea',
'upload_date': '20130225',
'duration': 3000,
},
},
{
'url': 'http://www.npo.nl/de-nieuwe-mens-deel-1/21-07-2010/WO_VPRO_043706',
'info_dict': {
'id': 'WO_VPRO_043706',
'ext': 'wmv',
'title': 'De nieuwe mens - Deel 1',
'description': 'md5:518ae51ba1293ffb80d8d8ce90b74e4b',
'duration': 4680,
},
'params': {
# mplayer mms download
'skip_download': True,
}
},
# non asf in streams
{
'url': 'http://www.npo.nl/hoe-gaat-europa-verder-na-parijs/10-01-2015/WO_NOS_762771',
'md5': 'b3da13de374cbe2d5332a7e910bef97f',
'info_dict': {
'id': 'WO_NOS_762771',
'ext': 'mp4',
'title': 'Hoe gaat Europa verder na Parijs?',
},
},
{
'url': 'http://www.ntr.nl/Aap-Poot-Pies/27/detail/Aap-poot-pies/VPWON_1233944#content',
'md5': '01c6a2841675995da1f0cf776f03a9c3',
'info_dict': {
'id': 'VPWON_1233944',
'ext': 'm4v',
'title': 'Aap, poot, pies',
'description': 'md5:c9c8005d1869ae65b858e82c01a91fde',
'upload_date': '20150508',
'duration': 599,
},
},
{
'url': 'http://www.omroepwnl.nl/video/fragment/vandaag-de-dag-verkiezingen__POMS_WNL_853698',
'md5': 'd30cd8417b8b9bca1fdff27428860d08',
'info_dict': {
'id': 'POW_00996502',
'ext': 'm4v',
'title': '''"Dit is wel een 'landslide'..."''',
'description': 'md5:f8d66d537dfb641380226e31ca57b8e8',
'upload_date': '20150508',
'duration': 462,
},
}
]
def _real_extract(self, url):
video_id = self._match_id(url)
return self._get_info(video_id)
def _get_info(self, video_id):
metadata = self._download_json(
'http://e.omroep.nl/metadata/%s' % video_id,
video_id,
# We have to remove the javascript callback
transform_source=strip_jsonp,
)
# For some videos actual video id (prid) is different (e.g. for
# http://www.omroepwnl.nl/video/fragment/vandaag-de-dag-verkiezingen__POMS_WNL_853698
# video id is POMS_WNL_853698 but prid is POW_00996502)
video_id = metadata.get('prid') or video_id
# titel is too generic in some cases so utilize aflevering_titel as well
# when available (e.g. http://tegenlicht.vpro.nl/afleveringen/2014-2015/access-to-africa.html)
title = metadata['titel']
sub_title = metadata.get('aflevering_titel')
if sub_title and sub_title != title:
title += ': %s' % sub_title
token = self._get_token(video_id)
formats = []
pubopties = metadata.get('pubopties')
if pubopties:
quality = qualities(['adaptive', 'wmv_sb', 'h264_sb', 'wmv_bb', 'h264_bb', 'wvc1_std', 'h264_std'])
for format_id in pubopties:
format_info = self._download_json(
'http://ida.omroep.nl/odi/?prid=%s&puboptions=%s&adaptive=yes&token=%s'
% (video_id, format_id, token),
video_id, 'Downloading %s JSON' % format_id)
if format_info.get('error_code', 0) or format_info.get('errorcode', 0):
continue
streams = format_info.get('streams')
if streams:
video_info = self._download_json(
streams[0] + '&type=json',
video_id, 'Downloading %s stream JSON' % format_id)
else:
video_info = format_info
video_url = video_info.get('url')
if not video_url:
continue
if format_id == 'adaptive':
formats.extend(self._extract_m3u8_formats(video_url, video_id))
else:
formats.append({
'url': video_url,
'format_id': format_id,
'quality': quality(format_id),
})
streams = metadata.get('streams')
if streams:
for i, stream in enumerate(streams):
stream_url = stream.get('url')
if not stream_url:
continue
if '.asf' not in stream_url:
formats.append({
'url': stream_url,
'quality': stream.get('kwaliteit'),
})
continue
asx = self._download_xml(
stream_url, video_id,
'Downloading stream %d ASX playlist' % i,
transform_source=fix_xml_ampersands)
ref = asx.find('./ENTRY/Ref')
if ref is None:
continue
video_url = ref.get('href')
if not video_url:
continue
formats.append({
'url': video_url,
'ext': stream.get('formaat', 'asf'),
'quality': stream.get('kwaliteit'),
})
self._sort_formats(formats)
subtitles = {}
if metadata.get('tt888') == 'ja':
subtitles['nl'] = [{
'ext': 'vtt',
'url': 'http://e.omroep.nl/tt888/%s' % video_id,
}]
return {
'id': video_id,
'title': title,
'description': metadata.get('info'),
'thumbnail': metadata.get('images', [{'url': None}])[-1]['url'],
'upload_date': unified_strdate(metadata.get('gidsdatum')),
'duration': parse_duration(metadata.get('tijdsduur')),
'formats': formats,
'subtitles': subtitles,
}
class NPOLiveIE(NPOBaseIE):
IE_NAME = 'npo.nl:live'
_VALID_URL = r'https?://(?:www\.)?npo\.nl/live/(?P<id>.+)'
_TEST = {
'url': 'http://www.npo.nl/live/npo-1',
'info_dict': {
'id': 'LI_NEDERLAND1_136692',
'display_id': 'npo-1',
'ext': 'mp4',
'title': 're:^Nederland 1 [0-9]{4}-[0-9]{2}-[0-9]{2} [0-9]{2}:[0-9]{2}$',
'description': 'Livestream',
'is_live': True,
},
'params': {
'skip_download': True,
}
}
def _real_extract(self, url):
display_id = self._match_id(url)
webpage = self._download_webpage(url, display_id)
live_id = self._search_regex(
r'data-prid="([^"]+)"', webpage, 'live id')
metadata = self._download_json(
'http://e.omroep.nl/metadata/%s' % live_id,
display_id, transform_source=strip_jsonp)
token = self._get_token(display_id)
formats = []
streams = metadata.get('streams')
if streams:
for stream in streams:
stream_type = stream.get('type').lower()
# smooth streaming is not supported
if stream_type in ['ss', 'ms']:
continue
stream_info = self._download_json(
'http://ida.omroep.nl/aapi/?stream=%s&token=%s&type=jsonp'
% (stream.get('url'), token),
display_id, 'Downloading %s JSON' % stream_type)
if stream_info.get('error_code', 0) or stream_info.get('errorcode', 0):
continue
stream_url = self._download_json(
stream_info['stream'], display_id,
'Downloading %s URL' % stream_type,
'Unable to download %s URL' % stream_type,
transform_source=strip_jsonp, fatal=False)
if not stream_url:
continue
if stream_type == 'hds':
f4m_formats = self._extract_f4m_formats(stream_url, display_id)
# f4m downloader downloads only piece of live stream
for f4m_format in f4m_formats:
f4m_format['preference'] = -1
formats.extend(f4m_formats)
elif stream_type == 'hls':
formats.extend(self._extract_m3u8_formats(stream_url, display_id, 'mp4'))
else:
formats.append({
'url': stream_url,
'preference': -10,
})
self._sort_formats(formats)
return {
'id': live_id,
'display_id': display_id,
'title': self._live_title(metadata['titel']),
'description': metadata['info'],
'thumbnail': metadata.get('images', [{'url': None}])[-1]['url'],
'formats': formats,
'is_live': True,
}
class NPORadioIE(InfoExtractor):
IE_NAME = 'npo.nl:radio'
_VALID_URL = r'https?://(?:www\.)?npo\.nl/radio/(?P<id>[^/]+)/?$'
_TEST = {
'url': 'http://www.npo.nl/radio/radio-1',
'info_dict': {
'id': 'radio-1',
'ext': 'mp3',
'title': 're:^NPO Radio 1 [0-9]{4}-[0-9]{2}-[0-9]{2} [0-9]{2}:[0-9]{2}$',
'is_live': True,
},
'params': {
'skip_download': True,
}
}
@staticmethod
def _html_get_attribute_regex(attribute):
return r'{0}\s*=\s*\'([^\']+)\''.format(attribute)
def _real_extract(self, url):
video_id = self._match_id(url)
webpage = self._download_webpage(url, video_id)
title = self._html_search_regex(
self._html_get_attribute_regex('data-channel'), webpage, 'title')
stream = self._parse_json(
self._html_search_regex(self._html_get_attribute_regex('data-streams'), webpage, 'data-streams'),
video_id)
codec = stream.get('codec')
return {
'id': video_id,
'url': stream['url'],
'title': self._live_title(title),
'acodec': codec,
'ext': codec,
'is_live': True,
}
class NPORadioFragmentIE(InfoExtractor):
IE_NAME = 'npo.nl:radio:fragment'
_VALID_URL = r'https?://(?:www\.)?npo\.nl/radio/[^/]+/fragment/(?P<id>\d+)'
_TEST = {
'url': 'http://www.npo.nl/radio/radio-5/fragment/174356',
'md5': 'dd8cc470dad764d0fdc70a9a1e2d18c2',
'info_dict': {
'id': '174356',
'ext': 'mp3',
'title': 'Jubileumconcert Willeke Alberti',
},
}
def _real_extract(self, url):
audio_id = self._match_id(url)
webpage = self._download_webpage(url, audio_id)
title = self._html_search_regex(
r'href="/radio/[^/]+/fragment/%s" title="([^"]+)"' % audio_id,
webpage, 'title')
audio_url = self._search_regex(
r"data-streams='([^']+)'", webpage, 'audio url')
return {
'id': audio_id,
'url': audio_url,
'title': title,
}
class VPROIE(NPOIE):
IE_NAME = 'vpro'
_VALID_URL = r'https?://(?:www\.)?(?:tegenlicht\.)?vpro\.nl/(?:[^/]+/){2,}(?P<id>[^/]+)\.html'
_TESTS = [
{
'url': 'http://tegenlicht.vpro.nl/afleveringen/2012-2013/de-toekomst-komt-uit-afrika.html',
'md5': 'f8065e4e5a7824068ed3c7e783178f2c',
'info_dict': {
'id': 'VPWON_1169289',
'ext': 'm4v',
'title': 'De toekomst komt uit Afrika',
'description': 'md5:52cf4eefbc96fffcbdc06d024147abea',
'upload_date': '20130225',
},
},
{
'url': 'http://www.vpro.nl/programmas/2doc/2015/sergio-herman.html',
'info_dict': {
'id': 'sergio-herman',
'title': 'Sergio Herman: Fucking perfect',
},
'playlist_count': 2,
},
{
# playlist with youtube embed
'url': 'http://www.vpro.nl/programmas/2doc/2015/education-education.html',
'info_dict': {
'id': 'education-education',
'title': '2Doc',
},
'playlist_count': 2,
}
]
def _real_extract(self, url):
playlist_id = self._match_id(url)
webpage = self._download_webpage(url, playlist_id)
entries = [
self.url_result('npo:%s' % video_id if not video_id.startswith('http') else video_id)
for video_id in re.findall(r'data-media-id="([^"]+)"', webpage)
]
playlist_title = self._search_regex(
r'<title>\s*([^>]+?)\s*-\s*Teledoc\s*-\s*VPRO\s*</title>',
webpage, 'playlist title', default=None) or self._og_search_title(webpage)
return self.playlist_result(entries, playlist_id, playlist_title)
class WNLIE(InfoExtractor):
_VALID_URL = r'https?://(?:www\.)?omroepwnl\.nl/video/detail/(?P<id>[^/]+)__\d+'
_TEST = {
'url': 'http://www.omroepwnl.nl/video/detail/vandaag-de-dag-6-mei__060515',
'info_dict': {
'id': 'vandaag-de-dag-6-mei',
'title': 'Vandaag de Dag 6 mei',
},
'playlist_count': 4,
}
def _real_extract(self, url):
playlist_id = self._match_id(url)
webpage = self._download_webpage(url, playlist_id)
entries = [
self.url_result('npo:%s' % video_id, 'NPO')
for video_id, part in re.findall(
r'<a[^>]+href="([^"]+)"[^>]+class="js-mid"[^>]*>(Deel \d+)', webpage)
]
playlist_title = self._html_search_regex(
r'(?s)<h1[^>]+class="subject"[^>]*>(.+?)</h1>',
webpage, 'playlist title')
return self.playlist_result(entries, playlist_id, playlist_title)
| 35.149284 | 111 | 0.490226 |
bd2c32ace9dfa9717ebd3eb59134b94b14132f19 | 20 | py | Python | arboretum_project/arboretum/__init__.py | m-libbrecht/django-arboretum | 11b97254ed9885b03d25d99188157c3f79a14a4f | [
"MIT"
] | 3 | 2017-03-11T04:25:22.000Z | 2021-05-29T01:44:38.000Z | arboretum_project/arboretum/__init__.py | m-libbrecht/django-arboretum | 11b97254ed9885b03d25d99188157c3f79a14a4f | [
"MIT"
] | null | null | null | arboretum_project/arboretum/__init__.py | m-libbrecht/django-arboretum | 11b97254ed9885b03d25d99188157c3f79a14a4f | [
"MIT"
] | 1 | 2016-10-26T10:44:47.000Z | 2016-10-26T10:44:47.000Z | __author__ = 'mark'
| 10 | 19 | 0.7 |
ce4a14abfa2caa8dd427db1e8cdcf8efcb5a5b4f | 732 | py | Python | gamefixes/39210.py | 0xReki/protonfixes | 9aecb37eb82bb238b50b5a8667e03906fb70da99 | [
"BSD-2-Clause"
] | null | null | null | gamefixes/39210.py | 0xReki/protonfixes | 9aecb37eb82bb238b50b5a8667e03906fb70da99 | [
"BSD-2-Clause"
] | null | null | null | gamefixes/39210.py | 0xReki/protonfixes | 9aecb37eb82bb238b50b5a8667e03906fb70da99 | [
"BSD-2-Clause"
] | null | null | null | """ Game fix for FFXIV
"""
#pylint: disable=C0103
from protonfixes import util
import os
def main():
""" FFXIV add NOSTEAM option.
"""
# Fixes the startup process.
if 'NOSTEAM' in os.environ:
util.replace_command('-issteam', '')
# Runs XIVLauncher instead of Stock Launcher
if 'XL_WINEONLINUX' in os.environ:
util.set_environment('PROTON_SET_GAME_DRIVE', '1')
util.protontricks_proton_5('dotnet48')
util.protontricks('vcrun2019')
util.replace_command('common/FINAL FANTASY XIV Online/boot/ffxivboot.exe', 'compatdata/39210/pfx/drive_c/users/steamuser/AppData/Local/XIVLauncher/XIVLauncher.exe')
util.replace_command('-issteam', '')
| 33.272727 | 173 | 0.668033 |
0a09d5482561bc297a625d45168ad8067db93e5a | 1,203 | py | Python | backend/backend/urls.py | Savage-Aim/app | ecb3b7635caba552ded17172c0aa3535b5f3b98b | [
"MIT"
] | null | null | null | backend/backend/urls.py | Savage-Aim/app | ecb3b7635caba552ded17172c0aa3535b5f3b98b | [
"MIT"
] | 1 | 2022-02-07T02:58:14.000Z | 2022-02-07T02:58:14.000Z | backend/backend/urls.py | Savage-Aim/app | ecb3b7635caba552ded17172c0aa3535b5f3b98b | [
"MIT"
] | null | null | null | """backend URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/3.1/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from allauth.socialaccount.providers.discord.urls import urlpatterns as discord_urls
from django.contrib import admin
from django.contrib.auth.views import LogoutView
from django.views.generic import TemplateView
from django.urls import path, include
patterns = [
path('admin/', admin.site.urls),
path('api/', include(('api.urls', 'api'))),
# Auth stuff (TODO - replace this because it's sorta workaroundy)
path('accounts/', include(discord_urls)),
path('logout/', LogoutView.as_view()),
]
urlpatterns = [
path('backend/', include(patterns)),
]
| 35.382353 | 84 | 0.715711 |
7fdf1c2aaaa10be467372facd148667e309e2613 | 567 | py | Python | Testing/TestInfra/dashboard.py | ezeasorekene/DevOps | f72478c9ca6295f236f0be87db3052b28d0f567e | [
"MIT"
] | 18 | 2015-03-19T12:50:32.000Z | 2021-03-03T18:35:56.000Z | Testing/TestInfra/dashboard.py | ezeasorekene/DevOps | f72478c9ca6295f236f0be87db3052b28d0f567e | [
"MIT"
] | 209 | 2015-01-05T14:28:48.000Z | 2020-08-19T18:13:57.000Z | Testing/TestInfra/dashboard.py | AAROC/DevOps | ae5e2edd9248c108fe4f89fe5527010b84956f08 | [
"MIT"
] | 36 | 2015-03-05T09:38:17.000Z | 2020-06-25T03:44:44.000Z | # Infrastructure tests on grafana dashboards provisioned with
# the grafana.yml playbook and grafana role.
def test_package(host):
assert host.package('grafana').is_installed
def test_init(host):
file=host.file('/etc/grafana/grafana.ini')
assert file.exists
assert file.user == 'root'
assert file.group == 'grafana'
assert oct(file.mode) == '0640'
def test_service(host):
service=host.service('grafana-server')
assert service.is_running
assert service.is_enabled
assert host.socket('tcp://192.168.0.117:3000').is_listening | 28.35 | 63 | 0.72134 |
fcb14a203ff747cfaf26e1413369445f3788b61a | 699 | py | Python | src/betamax/__init__.py | santosh653/betamax | 73e601d34d692255d826b2fceec6d9bc8b4c0420 | [
"Apache-2.0"
] | 226 | 2017-10-19T20:46:53.000Z | 2022-03-11T08:11:10.000Z | src/betamax/__init__.py | santosh653/betamax | 73e601d34d692255d826b2fceec6d9bc8b4c0420 | [
"Apache-2.0"
] | 93 | 2015-01-01T15:47:33.000Z | 2017-10-03T14:15:50.000Z | src/betamax/__init__.py | santosh653/betamax | 73e601d34d692255d826b2fceec6d9bc8b4c0420 | [
"Apache-2.0"
] | 44 | 2015-03-25T19:42:13.000Z | 2017-09-08T17:56:16.000Z | """
betamax.
=======
See https://betamax.readthedocs.io/ for documentation.
:copyright: (c) 2013-2018 by Ian Stapleton Cordasco
:license: Apache 2.0, see LICENSE for more details
"""
from .decorator import use_cassette
from .exceptions import BetamaxError
from .matchers import BaseMatcher
from .recorder import Betamax
from .serializers import BaseSerializer
__all__ = ('BetamaxError', 'Betamax', 'BaseMatcher', 'BaseSerializer',
'use_cassette')
__author__ = 'Ian Stapleton Cordasco'
__copyright__ = 'Copyright 2013-2018 Ian Stapleton Cordasco'
__license__ = 'Apache 2.0'
__title__ = 'betamax'
__version__ = '0.8.2'
__version_info__ = tuple(int(i) for i in __version__.split('.'))
| 25.888889 | 70 | 0.745351 |
e8427068f7b5d23ea9b411087739b5bd2b557538 | 9,972 | py | Python | src/windows.py | deepansh27/pylyrics | 53e087701c86f910fd309f479596c489ccecc7d6 | [
"Unlicense"
] | 219 | 2017-03-11T16:12:08.000Z | 2022-03-06T06:26:03.000Z | src/windows.py | adrianstaniec/Instant-Lyrics | 29e2a817fee3853a6a0f0bdc825145fd4004d1e1 | [
"Unlicense"
] | 18 | 2017-03-12T18:32:55.000Z | 2019-05-10T17:33:30.000Z | src/windows.py | adrianstaniec/Instant-Lyrics | 29e2a817fee3853a6a0f0bdc825145fd4004d1e1 | [
"Unlicense"
] | 47 | 2017-03-12T06:37:57.000Z | 2021-03-03T23:52:50.000Z | import gi
gi.require_version('Gtk', '3.0')
from gi.repository import Gtk, Gdk
import dbus
import threading
try:
import configparser
except ImportError:
import ConfigParser as configparser
from src.lyrics import get_lyrics
from src.settings import CONFIG_PATH
from . import utils
class LyricsWindow(Gtk.Window):
def __init__(self, type, app):
Gtk.Window.__init__(self, title="Lyrics")
self.set_icon_from_file(
utils.get_icon_path('../icons/instant-lyrics-32.png'))
self.set_border_width(20)
self.set_default_size(
int(app.Config.get('Main', 'window width')),
int(app.Config.get('Main', 'window height')))
self.set_position(Gtk.WindowPosition.CENTER)
self.main_box = Gtk.Box(
orientation=Gtk.Orientation.VERTICAL, spacing=6)
self.main_box.set_size_request(
int(app.Config.get('Main', 'window width')),
int(app.Config.get('Main', 'window height')))
if(type == "get"):
entry_hbox = self.create_input_box()
self.main_box.pack_start(entry_hbox, False, False, 10)
lyrics_vbox = self.create_lyrics_box(app)
self.main_box.pack_start(lyrics_vbox, True, True, 0)
scrolled = Gtk.ScrolledWindow()
scrolled.set_policy(Gtk.PolicyType.NEVER, Gtk.PolicyType.AUTOMATIC)
scrolled.add(self.main_box)
self.add(scrolled)
self.show_all()
def on_key_release(self, widget, ev, data=None):
if ev.keyval == Gdk.KEY_Return:
self.fetch_lyrics()
def create_input_box(self):
entry_hbox = Gtk.Box(
orientation=Gtk.Orientation.HORIZONTAL, spacing=50)
entry_hbox.set_property("margin", 10)
self.input = Gtk.Entry()
self.input.set_text("song/artist")
self.input.connect("key-release-event", self.on_key_release)
entry_hbox.pack_start(self.input, True, True, 0)
submit = Gtk.Button.new_with_label("Get Lyrics")
submit.connect("clicked", self.fetch_lyrics)
entry_hbox.pack_start(submit, True, True, 0)
return entry_hbox
def create_lyrics_box(self, app):
lyrics_vbox = Gtk.Box(orientation=Gtk.Orientation.VERTICAL)
self.title = Gtk.Label()
self.title.set_justify(Gtk.Justification.CENTER)
self.lyrics = Gtk.Label()
self.lyrics.set_justify(Gtk.Justification.CENTER)
self.lyrics.set_property("margin_left", 40)
self.lyrics.set_property("margin_right", 40)
self.lyrics.set_line_wrap(True)
self.spinner = Gtk.Spinner()
lyrics_vbox.pack_start(self.title, False, False, 5)
lyrics_vbox.pack_start(self.spinner, False, False, 5)
lyrics_vbox.pack_start(self.lyrics, False, False, 5)
lyrics_vbox.set_size_request(
int(app.Config.get('Main', 'window width')),
int(app.Config.get('Main', 'window height')))
return lyrics_vbox
def put_lyrics(self, song):
self.spinner.start()
self.lyrics.set_text("")
lyrics = get_lyrics(song)
self.lyrics.set_text(lyrics)
self.spinner.stop()
def fetch_lyrics(self, source=None):
input = self.input.get_text()
text = "<b><big>" + input + "</big></b>"
self.title.set_markup(text)
thread = threading.Thread(
target=self.put_lyrics, kwargs={'song': input})
thread.daemon = True
thread.start()
def get_spotify_song_data(self):
session_bus = dbus.SessionBus()
spotify_bus = session_bus.get_object(
"org.mpris.MediaPlayer2.spotify", "/org/mpris/MediaPlayer2")
spotify_properties = dbus.Interface(
spotify_bus, "org.freedesktop.DBus.Properties")
metadata = spotify_properties.Get(
"org.mpris.MediaPlayer2.Player", "Metadata")
title = metadata['xesam:title'].encode(
'utf-8').decode('utf-8').replace("&", "&")
artist = metadata['xesam:artist'][0].encode(
'utf-8').decode('utf-8').replace("&", "&")
return {'title': title, 'artist': artist}
def get_spotify(self):
try:
song_data = self.get_spotify_song_data()
song = song_data['title']
artist = song_data['artist']
except:
self.title.set_markup("<big><b>Error</b></big>")
message = ("Could not get current spotify song\n"
"Either spotify is not running or\n"
"no song is playing on spotify.\n\n"
"Else, report an issue <a href=\"https://"
"github.com/bhrigu123/Instant-Lyrics\" "
"title=\"Repo url\">here</a>")
self.lyrics.set_markup(message)
return
title = "<b><big>" + song + "</big>\n" + artist + "</b>"
self.title.set_markup(title)
self.put_lyrics(song + " " + artist)
class PreferenceWindow(Gtk.Window):
def __init__(self, app):
Gtk.Window.__init__(self, title="Instant-Lyrics Prefenreces")
self.set_icon_from_file(utils.get_icon_path('../icons/instant-lyrics-32.png'))
self.set_border_width(20)
#self.set_default_size(350, 550)
self.set_position(Gtk.WindowPosition.CENTER)
self.main_box = Gtk.Box(
orientation=Gtk.Orientation.VERTICAL, spacing=6)
self.save = Gtk.Button.new_with_label("Save")
self.save.set_sensitive(False)
self.save.connect("clicked", self.save_config, app)
pref_box = self.create_pref_box(app)
self.main_box.pack_start(pref_box, True, True, 0)
reset = Gtk.Button.new_with_label("Reset to default")
reset.connect("clicked", self.reset_config, app)
button_hbox = Gtk.Box(spacing=10)
button_hbox.pack_start(reset, True, True, 0)
button_hbox.pack_start(self.save, True, True, 0)
desktop_entry = Gtk.Button.new_with_label("Create Desktop Entry")
desktop_entry.connect("clicked", self.create_desktop_entry)
self.message = Gtk.Label()
self.main_box.pack_start(button_hbox, False, False, 0)
self.main_box.pack_start(desktop_entry, True, True, 0)
self.main_box.pack_start(self.message, True, True, 0)
self.add(self.main_box)
self.show_all()
def create_pref_box(self, app):
listbox = Gtk.ListBox()
listbox.set_selection_mode(Gtk.SelectionMode.NONE)
row = Gtk.ListBoxRow()
hbox = Gtk.Box(orientation=Gtk.Orientation.HORIZONTAL, spacing=50)
row.add(hbox)
width = Gtk.Label("Lyrics window width", xalign=0)
self.width_val = Gtk.Entry()
self.width_val.set_text(app.Config.get('Main', 'window width'))
self.width_val.connect("changed", self.entry_change)
hbox.pack_start(width, True, True, 0)
hbox.pack_start(self.width_val, False, True, 0)
listbox.add(row)
row = Gtk.ListBoxRow()
hbox = Gtk.Box(orientation=Gtk.Orientation.HORIZONTAL, spacing=50)
row.add(hbox)
height = Gtk.Label("Lyrics window height", xalign=0)
self.height_val = Gtk.Entry()
self.height_val.set_text(app.Config.get('Main', 'window height'))
self.height_val.connect("changed", self.entry_change)
hbox.pack_start(height, True, True, 0)
hbox.pack_start(self.height_val, False, True, 0)
listbox.add(row)
""" TODO: autostart
row = Gtk.ListBoxRow()
hbox = Gtk.Box(orientation=Gtk.Orientation.HORIZONTAL, spacing=50)
row.add(hbox)
label = Gtk.Label("Auto start", xalign=0)
self.switch = Gtk.Switch()
self.switch.connect("state-set", self.entry_change)
self.switch.props.valign = Gtk.Align.CENTER
switch_val = app.Config.getboolean('Main', 'auto start')
if(switch_val):
self.switch.set_active(True)
else:
self.switch.set_active(False)
hbox.pack_start(label, True, True, 0)
hbox.pack_start(self.switch, False, True, 0)
listbox.add(row)
"""
return listbox
def save_config(self, source, *arg):
self.save.set_sensitive(False)
self.message.set_markup("")
app = arg[0]
new_width = self.width_val.get_text()
new_height = self.height_val.get_text()
if(new_width.isdigit() and new_height.isdigit()):
app.Config.set('Main', "window width", new_width)
app.Config.set('Main', "window height", new_height)
with open(CONFIG_PATH, 'w') as config_file:
app.Config.write(config_file)
return
msg = ("Invalid values of height and width\n"
"Please add valid positive integers")
self.show_message(msg)
def entry_change(self, source):
self.save.set_sensitive(True)
def reset_config(self, source, *arg):
utils.create_default_config()
app = arg[0]
app.Config = utils.get_config()
self.width_val.set_text(app.Config.get('Main', 'window width'))
self.height_val.set_text(app.Config.get('Main', 'window height'))
self.save.set_sensitive(False)
def create_desktop_entry(self, source):
utils.create_desktop_entry()
msg = ("Desktop entry created. You can now start the\n"
"application from your Applications Launcher.\n\n"
"<small>If you ever change the location "
"of the Instant-Lyrics\nfolder, you will "
"need to create the Desktop Entry\nfrom "
"here again.</small>")
self.show_message(msg)
def show_message(self, msg):
self.message.set_markup(msg)
| 34.745645 | 86 | 0.609206 |
1ab08164cdfbe01d6947c877830b567cd42b34ce | 2,651 | py | Python | Task 1 - Insurance Selling/Code/Task1_SVM.py | AndesPooh258/Data-Mining-Project | 5fc3fc7bf156a9bf7e3aaf7674dcdc2d97d9a882 | [
"FTL"
] | null | null | null | Task 1 - Insurance Selling/Code/Task1_SVM.py | AndesPooh258/Data-Mining-Project | 5fc3fc7bf156a9bf7e3aaf7674dcdc2d97d9a882 | [
"FTL"
] | null | null | null | Task 1 - Insurance Selling/Code/Task1_SVM.py | AndesPooh258/Data-Mining-Project | 5fc3fc7bf156a9bf7e3aaf7674dcdc2d97d9a882 | [
"FTL"
] | null | null | null | import pandas as pd
from sklearn import preprocessing
from sklearn.svm import SVC
import evaluateTask1
# import csv data
data = pd.read_csv('insurance-train.csv')
data_test = pd.read_csv('insurance-test.csv')
df = pd.DataFrame(data)
# translate data
df.loc[df["Gender"]=="Male", "Gender"] = 0
df.loc[df["Gender"]=="Female", "Gender"] = 1
df.loc[df["Vehicle_Age"]=="< 1 Year", "Vehicle_Age"] = 0
df.loc[df["Vehicle_Age"]=="1-2 Year", "Vehicle_Age"] = 1
df.loc[df["Vehicle_Age"]=="> 2 Years", "Vehicle_Age"] = 2
df.loc[df["Vehicle_Damage"]=="No", "Vehicle_Damage"] = 0
df.loc[df["Vehicle_Damage"]=="Yes", "Vehicle_Damage"] = 1
df = df[:540] # select a number of records to be used
# feature selection
columnsIncluded = ["Age", "Region_Code", "Previously_Insured", "Vehicle_Age",
"Vehicle_Damage", "Annual_Premium", "Vintage"]
df_dataArray = df[columnsIncluded].copy() # df without "Response"
dataArray = df_dataArray.values
dataArray_scaled = preprocessing.scale(dataArray)
target = df["Response"].tolist()
target_names = ["0", "1"]
dataset = {
"data": dataArray_scaled,
"target": target,
"feature_names": columnsIncluded,
"target_names": target_names
}
# predict and output the test result
df_test = pd.DataFrame(data_test)
df_test.loc[df_test["Gender"]=="Male", "Gender"] = 0
df_test.loc[df_test["Gender"]=="Female", "Gender"] = 1
df_test.loc[df_test["Vehicle_Age"]=="< 1 Year", "Vehicle_Age"] = 0
df_test.loc[df_test["Vehicle_Age"]=="1-2 Year", "Vehicle_Age"] = 1
df_test.loc[df_test["Vehicle_Age"]=="> 2 Years", "Vehicle_Age"] = 2
df_test.loc[df_test["Vehicle_Damage"]=="No", "Vehicle_Damage"] = 0
df_test.loc[df_test["Vehicle_Damage"]=="Yes", "Vehicle_Damage"] = 1
df_test_Array = df_test[columnsIncluded].copy()
Array = df_test_Array[columnsIncluded].values
Array_scaled = preprocessing.scale(df_test_Array)
##########################################################
# Train the model
clf = SVC(C=10000, degree=3, kernel='poly', gamma='auto', probability=False, cache_size=1000)
clf.fit(dataset['data'], dataset['target'])
# C can be different number and C>0; kernel:{‘linear’, ‘poly’, ‘rbf’, ‘sigmoid’, ‘precomputed’}, default=’rbf’ and gamma:{‘scale’, ‘auto’} or float, default=’scale’
predictedTestResult = clf.predict(Array_scaled)
# output to corresponding file
df_testOutput = df_test[["id"]]
df_testOutput.insert(1, "Response", predictedTestResult, True)
df_testOutput.to_csv("submission_1_SVM.csv", index=False)
# compute f1 score
f1_score_result = evaluateTask1.f1_score(predictedTestResult)
print("f1-score: " + str(f1_score_result)) | 38.985294 | 165 | 0.683138 |
eaeb9f0108ae40decbb65ba725a849e3308a52b3 | 1,463 | py | Python | util.py | kex5n/Vehicles-Dispatch-Simulator | d0cca03fbf56e4b0ceeef8dafc59de105c1d4507 | [
"MIT"
] | null | null | null | util.py | kex5n/Vehicles-Dispatch-Simulator | d0cca03fbf56e4b0ceeef8dafc59de105c1d4507 | [
"MIT"
] | null | null | null | util.py | kex5n/Vehicles-Dispatch-Simulator | d0cca03fbf56e4b0ceeef8dafc59de105c1d4507 | [
"MIT"
] | null | null | null | from datetime import datetime, timedelta
from math import asin, cos, radians, sin, sqrt
import numpy as np
from domain.demand_prediction_mode import DemandPredictionMode
def haversine(lon1, lat1, lon2, lat2) -> float:
"""
Calculate the great circle distance between two points
on the earth (specified in decimal degrees)
"""
lon1, lat1, lon2, lat2 = map(radians, [lon1, lat1, lon2, lat2])
# haversine
dlon = lon2 - lon1
dlat = lat2 - lat1
a = np.sin(dlat / 2) ** 2 + cos(lat1) * cos(lat2) * sin(dlon / 2) ** 2
c = 2 * asin(sqrt(a))
r = 6371
return c * r
class DataModule:
def __init__(self, demand_prediction_mode: DemandPredictionMode):
self.__demand_prediction_mode = demand_prediction_mode
if self.__demand_prediction_mode == DemandPredictionMode.TRAIN:
self.__date = datetime(2016, 6, 1)
else:
self.__date = datetime(2016, 6, 24)
@property
def date(self) -> str:
return f"{self.__date.month}".zfill(2) + f"{self.__date.day}".zfill(2)
def next(self) -> bool:
next_day = self.__date + timedelta(days=1)
self.__date = next_day
if self.__demand_prediction_mode == DemandPredictionMode.TRAIN:
if next_day.day == 24:
return False
else:
if next_day.month != 6:
return False
return True
def __str__(self) -> str:
return self.date
| 29.857143 | 78 | 0.62201 |
e1ad132cb0a16089030d6195dfa234895682e29e | 13,372 | py | Python | train.py | hanskrupakar/MRI-tumor-segmentation-Brats | 5a1a51b159a556261cd485db45f4d705974c86f4 | [
"MIT"
] | null | null | null | train.py | hanskrupakar/MRI-tumor-segmentation-Brats | 5a1a51b159a556261cd485db45f4d705974c86f4 | [
"MIT"
] | null | null | null | train.py | hanskrupakar/MRI-tumor-segmentation-Brats | 5a1a51b159a556261cd485db45f4d705974c86f4 | [
"MIT"
] | null | null | null | import numpy as np
import tf_models
from sklearn.preprocessing import scale
import tensorflow as tf
from tensorflow.keras.backend import learning_phase
from tensorflow.keras.layers import concatenate, Conv3D
from nibabel import load as load_nii
import os
import argparse
import keras
import glob
import Queue
from threading import Thread # Thread OOMs out for some reason. Have to debug!!
import time
from partition_brats_dir import get_dataset_dirnames
def parse_inputs():
parser = argparse.ArgumentParser(description='train the model')
parser.add_argument('-r', '--root-path', dest='root_path', default='/media/lele/Data/spie/Brats17TrainingData/HGG')
parser.add_argument('-sp', '--save-path', dest='save_path', default='dense24_correction')
parser.add_argument('-ng', '--gpu-ids', dest='gpu_ids', default=[0], nargs='+', type=int)
parser.add_argument('-lp', '--load-path', dest='load_path', default='dense24_correction')
parser.add_argument('-ow', '--offset-width', dest='offset_w', type=int, default=12)
parser.add_argument('-oh', '--offset-height', dest='offset_h', type=int, default=12)
parser.add_argument('-oc', '--offset-channel', dest='offset_c', nargs='+', type=int, default=12)
parser.add_argument('-ws', '--width-size', dest='wsize', type=int, default=38)
parser.add_argument('-hs', '--height-size', dest='hsize', type=int, default=38)
parser.add_argument('-cs', '--channel-size', dest='csize', type=int, default=38)
parser.add_argument('-ps', '--pred-size', dest='psize', type=int, default=12)
parser.add_argument('-bs', '--batch-size', dest='batch_size', type=int, default=2)
parser.add_argument('-e', '--num-epochs', dest='num_epochs', type=int, default=5)
parser.add_argument('-c', '--continue-training', dest='continue_training', type=bool, default=False)
parser.add_argument('-mn', '--model_name', dest='model_name', type=str, default='dense24')
parser.add_argument('-nc', '--n4correction', dest='correction', type=bool, default=False)
return vars(parser.parse_args())
options = parse_inputs()
def acc_tf(y_pred, y_true):
correct_prediction = tf.equal(tf.cast(tf.argmax(y_pred, -1), tf.int32), tf.cast(tf.argmax(y_true, -1), tf.int32))
return 100 * tf.reduce_mean(tf.cast(correct_prediction, tf.float32))
def get_patches_3d(data, labels, centers, hsize, wsize, csize, psize, preprocess=True):
"""
:param data: 4D nparray (h, w, c, ?)
:param centers:
:param hsize:
:param wsize:
:param csize:
:return:
"""
patches_x, patches_y = [], []
offset_p = (hsize - psize) / 2
for i in range(len(centers[0])):
h, w, c = centers[0, i], centers[1, i], centers[2, i]
h_beg = min(max(0, h - hsize / 2), 240 - hsize)
w_beg = min(max(0, w - wsize / 2), 240 - wsize)
c_beg = min(max(0, c - csize / 2), 155 - csize)
ph_beg = h_beg + offset_p
pw_beg = w_beg + offset_p
pc_beg = c_beg + offset_p
vox = data[h_beg:h_beg + hsize, w_beg:w_beg + wsize, c_beg:c_beg + csize, :]
vox_labels = labels[ph_beg:ph_beg + psize, pw_beg:pw_beg + psize, pc_beg:pc_beg + psize]
patches_x.append(vox)
patches_y.append(vox_labels)
return np.array(patches_x), np.array(patches_y)
def positive_ratio(x):
return float(np.sum(np.greater(x, 0))) / np.prod(x.shape)
def norm(image):
image = np.squeeze(image)
image_nonzero = image[np.nonzero(image)]
return (image - image_nonzero.mean()) / image_nonzero.std()
def segmentation_loss(y_true, y_pred, n_classes):
y_true = tf.reshape(y_true, (-1, n_classes))
y_pred = tf.reshape(y_pred, (-1, n_classes))
return tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(labels=y_true,
logits=y_pred))
def vox_preprocess(vox):
vox_shape = vox.shape
vox = np.reshape(vox, (-1, vox_shape[-1]))
vox = scale(vox, axis=0)
return np.reshape(vox, vox_shape)
def one_hot(y, num_classes):
y_ = np.zeros([len(y), num_classes])
y_[np.arange(len(y)), y] = 1
return y_
def dice_coef_np(y_true, y_pred, num_classes):
"""
:param y_true: sparse labels
:param y_pred: sparse labels
:param num_classes: number of classes
:return:
"""
y_true = y_true.astype(int)
y_pred = y_pred.astype(int)
y_true = y_true.flatten()
y_true = one_hot(y_true, num_classes)
y_pred = y_pred.flatten()
y_pred = one_hot(y_pred, num_classes)
intersection = np.sum(y_true * y_pred, axis=0)
return (2. * intersection) / (np.sum(y_true, axis=0) + np.sum(y_pred, axis=0))
def vox_generator(all_files, n_pos, n_neg):
path = options['root_path']
while 1:
for file in all_files:
coll = glob.glob(os.path.join(path, file)+'/*')
for c in coll:
if 'flair.' in c or 'flair_corrected.' in c:
flair_path = c
if 't1.' in c or 't1_corrected.' in c:
t1_path = c
if 't2.' in c or 't2_corrected.' in c:
t2_path = c
if 't1ce.'in c or 't1ce_corrected.' in c:
t1ce_path = c
flair = load_nii(flair_path).get_data()
t2 = load_nii(t2_path).get_data()
t1 = load_nii(t1_path).get_data()
t1ce = load_nii(t1ce_path).get_data()
data_norm = np.array([norm(flair), norm(t2), norm(t1), norm(t1ce)])
data_norm = np.transpose(data_norm, axes=[1, 2, 3, 0])
labels = load_nii(os.path.join(path, file, file+'_seg.nii.gz')).get_data()
foreground = np.array(np.where(labels > 0))
background = np.array(np.where((labels == 0) & (flair > 0)))
# n_pos = int(foreground.shape[1] * discount)
foreground = foreground[:, np.random.permutation(foreground.shape[1])[:n_pos]]
background = background[:, np.random.permutation(background.shape[1])[:n_neg]]
centers = np.concatenate((foreground, background), axis=1)
centers = centers[:, np.random.permutation(n_neg+n_pos)]
yield data_norm, labels, centers
def label_transform(y, nlabels):
return [
keras.utils.to_categorical(np.copy(y).astype(dtype=np.bool),
num_classes=2).reshape([y.shape[0], y.shape[1], y.shape[2], y.shape[3], 2]),
keras.utils.to_categorical(y,
num_classes=nlabels).reshape([y.shape[0], y.shape[1], y.shape[2], y.shape[3], nlabels])
]
def train():
NUM_EPOCHS = options['num_epochs']
LOAD_PATH = options['load_path']
SAVE_PATH = options['save_path']
PSIZE = options['psize']
HSIZE = options['hsize']
WSIZE = options['wsize']
CSIZE = options['csize']
model_name= options['model_name']
BATCH_SIZE = options['batch_size']
continue_training = options['continue_training']
lr = tf.Variable(5e-4, trainable=False)
files = []
num_labels = 5
files = get_dataset_dirnames(options['root_path'])
print '%d training samples' % len(files)
flair_t2_node = tf.placeholder(dtype=tf.float32, shape=(None, HSIZE, WSIZE, CSIZE, 2))
t1_t1ce_node = tf.placeholder(dtype=tf.float32, shape=(None, HSIZE, WSIZE, CSIZE, 2))
flair_t2_gt_node = tf.placeholder(dtype=tf.int32, shape=(None, PSIZE, PSIZE, PSIZE, 2))
t1_t1ce_gt_node = tf.placeholder(dtype=tf.int32, shape=(None, PSIZE, PSIZE, PSIZE, 5))
if model_name == 'dense48':
flair_t2_15, flair_t2_27 = tf_models.BraTS2ScaleDenseNetConcat_large(input=flair_t2_node, name='flair')
t1_t1ce_15, t1_t1ce_27 = tf_models.BraTS2ScaleDenseNetConcat_large(input=t1_t1ce_node, name='t1')
elif model_name == 'no_dense':
flair_t2_15, flair_t2_27 = tf_models.PlainCounterpart(input=flair_t2_node, name='flair')
t1_t1ce_15, t1_t1ce_27 = tf_models.PlainCounterpart(input=t1_t1ce_node, name='t1')
elif model_name == 'dense24':
flair_t2_15, flair_t2_27 = tf_models.BraTS2ScaleDenseNetConcat(input=flair_t2_node, name='flair')
t1_t1ce_15, t1_t1ce_27 = tf_models.BraTS2ScaleDenseNetConcat(input=t1_t1ce_node, name='t1')
else:
print' No such model name '
t1_t1ce_15 = concatenate([t1_t1ce_15, flair_t2_15])
t1_t1ce_27 = concatenate([t1_t1ce_27, flair_t2_27])
flair_t2_15 = Conv3D(2, kernel_size=1, strides=1, padding='same', name='flair_t2_15_cls')(flair_t2_15)
flair_t2_27 = Conv3D(2, kernel_size=1, strides=1, padding='same', name='flair_t2_27_cls')(flair_t2_27)
t1_t1ce_15 = Conv3D(num_labels, kernel_size=1, strides=1, padding='same', name='t1_t1ce_15_cls')(t1_t1ce_15)
t1_t1ce_27 = Conv3D(num_labels, kernel_size=1, strides=1, padding='same', name='t1_t1ce_27_cls')(t1_t1ce_27)
flair_t2_score = flair_t2_15[:, 13:25, 13:25, 13:25, :] + \
flair_t2_27[:, 13:25, 13:25, 13:25, :]
t1_t1ce_score = t1_t1ce_15[:, 13:25, 13:25, 13:25, :] + \
t1_t1ce_27[:, 13:25, 13:25, 13:25, :]
loss = segmentation_loss(flair_t2_gt_node, flair_t2_score, 2) + \
segmentation_loss(t1_t1ce_gt_node, t1_t1ce_score, 5)
acc_flair_t2 = acc_tf(y_pred=flair_t2_score, y_true=flair_t2_gt_node)
acc_t1_t1ce = acc_tf(y_pred=t1_t1ce_score, y_true=t1_t1ce_gt_node)
update_ops = tf.get_collection(tf.GraphKeys.UPDATE_OPS)
with tf.control_dependencies(update_ops):
optimizer = tf.train.AdamOptimizer(learning_rate=lr).minimize(loss)
saver = tf.train.Saver(max_to_keep=15)
data_gen_train = vox_generator(all_files=files, n_pos=200, n_neg=200)
def single_gpu_fn(nb, gpuname='/device:GPU:0', q=None): # q - result queue
with tf.device(gpuname):
offset_batch = min(nb * BATCH_SIZE, centers.shape[1] - BATCH_SIZE)
data_batch, label_batch = get_patches_3d(data, labels, centers[:, offset_batch:offset_batch + BATCH_SIZE], HSIZE, WSIZE, CSIZE, PSIZE, False)
label_batch = label_transform(label_batch, 5)
_, l, acc_ft, acc_t1c = sess.run(fetches=[optimizer, loss, acc_flair_t2, acc_t1_t1ce],
feed_dict={flair_t2_node: data_batch[:, :, :, :, :2],
t1_t1ce_node: data_batch[:, :, :, :, 2:],
flair_t2_gt_node: label_batch[0],
t1_t1ce_gt_node: label_batch[1],
})
n_pos_sum = np.sum(np.reshape(label_batch[0], (-1, 2)), axis=0)
return acc_ft, acc_t1c, l, n_pos_sum
if not os.path.isdir('chkpts'):
os.mkdir('chkpts')
os.mkdir('chkpts/0')
save_point = 0
else:
save_point = sorted([int(x.split('/')[-1]) for x in glob.glob('chkpts/*')])[-1] + 1
os.mkdir('chkpts/%d'%save_point)
with tf.Session() as sess:
if continue_training:
saver.restore(sess, LOAD_PATH)
else:
sess.run(tf.global_variables_initializer())
for ei in range(NUM_EPOCHS):
for pi in range(len(files)):
acc_pi, loss_pi = [], []
data, labels, centers = data_gen_train.next()
n_batches = int(np.ceil(float(centers.shape[1]) / BATCH_SIZE))
threads = []
for nb in range(0, n_batches, len(options['gpu_ids'])):
for gi, x in enumerate(options['gpu_ids']):
#t = time.time()
acc_ft, acc_t1c, l, n_pos_sum = single_gpu_fn(nb+gi)
acc_pi.append([acc_ft, acc_t1c])
loss_pi.append(l)
'''
q = [Queue.Queue() for _ in range(4)]
t = Thread(target=single_gpu_fn, args=(nb+gi,'/device:GPU:%d'%x, q))
threads.append(t)
for th in threads:
th.start()
for th in threads:
th.join()
threads = []
queue_avg = lambda x, i: np.average(list(x[i].queue))
acc_ft, acc_t1c, l, n_pos_sum = queue_avg(q, 0), queue_avg(q, 1), queue_avg(q, 2), np.mean(list(q[3].queue), axis=0)
'''
#print ('TIME: %.4f'%(time.time()-t))
print 'epoch-patient: %d, %d, iter: %d-%d, p%%: %.4f, loss: %.4f, acc_flair_t2: %.2f%%, acc_t1_t1ce: %.2f%%' % \
(ei + 1, pi + 1, nb + 1, n_batches, n_pos_sum[1]/float(np.sum(n_pos_sum)), l, acc_ft, acc_t1c)
print 'patient loss: %.4f, patient acc: %.4f' % (np.mean(loss_pi), np.mean(acc_pi))
saver.save(sess, 'chkpts/'+str(save_point)+'/'+SAVE_PATH+'.ckpt', global_step=ei)
print 'model saved'
lr = tf.train.exponential_decay(lr, ei, 1, 0.25, staircase=True)
if __name__ == '__main__':
train()
| 43.842623 | 153 | 0.593703 |
ef8b8fd9295666163502df96cd6cb6f1f43c6a39 | 1,256 | py | Python | src/mods/netdata.py | 8cylinder/boss | b57df6c1bb6064bfb5ad92313d88854281c0f18e | [
"MIT"
] | null | null | null | src/mods/netdata.py | 8cylinder/boss | b57df6c1bb6064bfb5ad92313d88854281c0f18e | [
"MIT"
] | null | null | null | src/mods/netdata.py | 8cylinder/boss | b57df6c1bb6064bfb5ad92313d88854281c0f18e | [
"MIT"
] | null | null | null | # run-shell-command :: ../../build.bash
from bash import Bash
from dist import Dist
from errors import *
class Netdata(Bash):
"""Netdata server monitoring"""
# https://github.com/firehol/netdata
# https://github.com/firehol/netdata/wiki/Running-behind-apache
# https://www.digitalocean.com/community/tutorials/how-to-set-up-real-time-performance-monitoring-with-netdata-on-ubuntu-16-04
# args: username (default:netdata), password (default:<random>)
provides = ['netdata']
requires = ['apache2']
title = 'Netdata'
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
if self.distro >= (Dist.UBUNTU, Dist.V18_04):
self.apt_pkgs = ['netdata']
else:
raise PlatformError('Netdata only available on Ubuntu 18.04 or greater')
# manual install: bash <(curl -Ss https://my-netdata.io/kickstart.sh) --non-interactive all
def post_install(self):
if self.distro >= (Dist.UBUNTU, Dist.V18_04):
self.sed('s/bind socket to IP = .*$/bind socket to IP = *.*.*.*/', '/etc/netdata/netdata.conf')
self.run('sudo systemctl restart netdata')
self.info('Netdata', 'http://{}:19999'.format(self.args.servername))
| 36.941176 | 130 | 0.640924 |
e5ec8ed4b86fc05342213514ee84cfff94490777 | 1,159 | py | Python | text-output.py | ruozhou/UConnGCMSPy | bb56639d097bf6b819a8979f014fb4a4c0c9cc31 | [
"MIT"
] | 1 | 2021-07-17T22:04:47.000Z | 2021-07-17T22:04:47.000Z | text-output.py | UCONN-CDL/UConnGCMSPy | bb56639d097bf6b819a8979f014fb4a4c0c9cc31 | [
"MIT"
] | null | null | null | text-output.py | UCONN-CDL/UConnGCMSPy | bb56639d097bf6b819a8979f014fb4a4c0c9cc31 | [
"MIT"
] | null | null | null | """
Simple script to convert HDF files to text output
Authors
-------
Bryan Webber, Nick Curtis
2017
"""
# coding: utf-8
from netCDF4 import Dataset
import os
import numpy as np
delt = 0.17
t0 = 2.4*60
# find directories to check starting from cwd
dirs = next(os.walk('.'))[1]
for d in dirs:
files = os.listdir(d)
for file in files:
# find all CDF files in dir
if file.endswith('.CDF'):
# open datasets and find total intensity
rootgrp = Dataset(file, 'r')
print(os.path.join(d, file))
total_intensity = rootgrp.variables['total_intensity']
num_points = len(total_intensity)
# load time for intensity values
time = np.zeros(num_points)
for j in range(num_points):
time[j] = t0 + delt*j
time = time/60 # Convert to minutes
output = np.transpose(np.vstack((time, total_intensity)))
# save to csv
outfile = file.rstrip('.CDF') + '.csv'
outloc = os.path.join(d, outfile)
np.savetxt(fname=outloc, X=output, delimiter=',')
rootgrp.close()
| 27.595238 | 69 | 0.578947 |
f2cffaf612449d34dc4ed151bb9d7cae6c0a2976 | 26,727 | py | Python | sdk/network/azure-mgmt-network/azure/mgmt/network/v2017_06_01/aio/operations/_route_filter_rules_operations.py | vbarbaresi/azure-sdk-for-python | 397ba46c51d001ff89c66b170f5576cf8f49c05f | [
"MIT"
] | 8 | 2021-01-13T23:44:08.000Z | 2021-03-17T10:13:36.000Z | sdk/network/azure-mgmt-network/azure/mgmt/network/v2017_06_01/aio/operations/_route_filter_rules_operations.py | vbarbaresi/azure-sdk-for-python | 397ba46c51d001ff89c66b170f5576cf8f49c05f | [
"MIT"
] | null | null | null | sdk/network/azure-mgmt-network/azure/mgmt/network/v2017_06_01/aio/operations/_route_filter_rules_operations.py | vbarbaresi/azure-sdk-for-python | 397ba46c51d001ff89c66b170f5576cf8f49c05f | [
"MIT"
] | null | null | null | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
from ... import models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class RouteFilterRulesOperations:
"""RouteFilterRulesOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2017_06_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
async def _delete_initial(
self,
resource_group_name: str,
route_filter_name: str,
rule_name: str,
**kwargs
) -> None:
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2017-06-01"
# Construct URL
url = self._delete_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'routeFilterName': self._serialize.url("route_filter_name", route_filter_name, 'str'),
'ruleName': self._serialize.url("rule_name", rule_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/routeFilters/{routeFilterName}/routeFilterRules/{ruleName}'} # type: ignore
async def begin_delete(
self,
resource_group_name: str,
route_filter_name: str,
rule_name: str,
**kwargs
) -> AsyncLROPoller[None]:
"""Deletes the specified rule from a route filter.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param route_filter_name: The name of the route filter.
:type route_filter_name: str
:param rule_name: The name of the rule.
:type rule_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._delete_initial(
resource_group_name=resource_group_name,
route_filter_name=route_filter_name,
rule_name=rule_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/routeFilters/{routeFilterName}/routeFilterRules/{ruleName}'} # type: ignore
async def get(
self,
resource_group_name: str,
route_filter_name: str,
rule_name: str,
**kwargs
) -> "models.RouteFilterRule":
"""Gets the specified rule from a route filter.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param route_filter_name: The name of the route filter.
:type route_filter_name: str
:param rule_name: The name of the rule.
:type rule_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: RouteFilterRule, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2017_06_01.models.RouteFilterRule
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.RouteFilterRule"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2017-06-01"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'routeFilterName': self._serialize.url("route_filter_name", route_filter_name, 'str'),
'ruleName': self._serialize.url("rule_name", rule_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('RouteFilterRule', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/routeFilters/{routeFilterName}/routeFilterRules/{ruleName}'} # type: ignore
async def _create_or_update_initial(
self,
resource_group_name: str,
route_filter_name: str,
rule_name: str,
route_filter_rule_parameters: "models.RouteFilterRule",
**kwargs
) -> "models.RouteFilterRule":
cls = kwargs.pop('cls', None) # type: ClsType["models.RouteFilterRule"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2017-06-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._create_or_update_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'routeFilterName': self._serialize.url("route_filter_name", route_filter_name, 'str'),
'ruleName': self._serialize.url("rule_name", rule_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(route_filter_rule_parameters, 'RouteFilterRule')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('RouteFilterRule', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('RouteFilterRule', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/routeFilters/{routeFilterName}/routeFilterRules/{ruleName}'} # type: ignore
async def begin_create_or_update(
self,
resource_group_name: str,
route_filter_name: str,
rule_name: str,
route_filter_rule_parameters: "models.RouteFilterRule",
**kwargs
) -> AsyncLROPoller["models.RouteFilterRule"]:
"""Creates or updates a route in the specified route filter.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param route_filter_name: The name of the route filter.
:type route_filter_name: str
:param rule_name: The name of the route filter rule.
:type rule_name: str
:param route_filter_rule_parameters: Parameters supplied to the create or update route filter
rule operation.
:type route_filter_rule_parameters: ~azure.mgmt.network.v2017_06_01.models.RouteFilterRule
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either RouteFilterRule or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.network.v2017_06_01.models.RouteFilterRule]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["models.RouteFilterRule"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._create_or_update_initial(
resource_group_name=resource_group_name,
route_filter_name=route_filter_name,
rule_name=rule_name,
route_filter_rule_parameters=route_filter_rule_parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('RouteFilterRule', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/routeFilters/{routeFilterName}/routeFilterRules/{ruleName}'} # type: ignore
async def _update_initial(
self,
resource_group_name: str,
route_filter_name: str,
rule_name: str,
route_filter_rule_parameters: "models.PatchRouteFilterRule",
**kwargs
) -> "models.RouteFilterRule":
cls = kwargs.pop('cls', None) # type: ClsType["models.RouteFilterRule"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2017-06-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._update_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'routeFilterName': self._serialize.url("route_filter_name", route_filter_name, 'str'),
'ruleName': self._serialize.url("rule_name", rule_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(route_filter_rule_parameters, 'PatchRouteFilterRule')
body_content_kwargs['content'] = body_content
request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('RouteFilterRule', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/routeFilters/{routeFilterName}/routeFilterRules/{ruleName}'} # type: ignore
async def begin_update(
self,
resource_group_name: str,
route_filter_name: str,
rule_name: str,
route_filter_rule_parameters: "models.PatchRouteFilterRule",
**kwargs
) -> AsyncLROPoller["models.RouteFilterRule"]:
"""Updates a route in the specified route filter.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param route_filter_name: The name of the route filter.
:type route_filter_name: str
:param rule_name: The name of the route filter rule.
:type rule_name: str
:param route_filter_rule_parameters: Parameters supplied to the update route filter rule
operation.
:type route_filter_rule_parameters: ~azure.mgmt.network.v2017_06_01.models.PatchRouteFilterRule
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either RouteFilterRule or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.network.v2017_06_01.models.RouteFilterRule]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["models.RouteFilterRule"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._update_initial(
resource_group_name=resource_group_name,
route_filter_name=route_filter_name,
rule_name=rule_name,
route_filter_rule_parameters=route_filter_rule_parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('RouteFilterRule', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/routeFilters/{routeFilterName}/routeFilterRules/{ruleName}'} # type: ignore
def list_by_route_filter(
self,
resource_group_name: str,
route_filter_name: str,
**kwargs
) -> AsyncIterable["models.RouteFilterRuleListResult"]:
"""Gets all RouteFilterRules in a route filter.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param route_filter_name: The name of the route filter.
:type route_filter_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either RouteFilterRuleListResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2017_06_01.models.RouteFilterRuleListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.RouteFilterRuleListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2017-06-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_by_route_filter.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'routeFilterName': self._serialize.url("route_filter_name", route_filter_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('RouteFilterRuleListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_by_route_filter.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/routeFilters/{routeFilterName}/routeFilterRules'} # type: ignore
| 49.402957 | 221 | 0.671755 |
3af235152c3918354ae71d060e10f503673cdef2 | 2,773 | py | Python | 76_Russian_Roulette/python/russianroulette.py | uMetalooper/basic-computer-games | b31c624703e5d4476a39a215183b667df348ac2e | [
"Unlicense"
] | 1 | 2022-03-24T17:56:31.000Z | 2022-03-24T17:56:31.000Z | 76_Russian_Roulette/python/russianroulette.py | uMetalooper/basic-computer-games | b31c624703e5d4476a39a215183b667df348ac2e | [
"Unlicense"
] | 1 | 2022-03-24T20:16:26.000Z | 2022-03-24T20:16:26.000Z | 76_Russian_Roulette/python/russianroulette.py | uMetalooper/basic-computer-games | b31c624703e5d4476a39a215183b667df348ac2e | [
"Unlicense"
] | 1 | 2022-03-11T14:14:06.000Z | 2022-03-11T14:14:06.000Z | ########################################################
#
# Russian Roulette
#
# From Basic Computer Games (1978)
#
# In this game, you are given by the computer a
# revolver loaded with one bullet and five empty
# chambers. You spin the chamber and pull the trigger
# by inputting a "1", or, if you want to quit, input
# a "2". You win if you play ten times and are still
# alive.
# Tom Adametx wrote this program while a student at
# Curtis Jr. High School in Sudbury, Massachusetts.
#
########################################################
from random import random
NUMBER_OF_ROUNDS = 9
def initial_message() -> None:
print(" " * 28 + "Russian Roulette")
print(" " * 15 + "Creative Computing Morristown, New Jersey\n\n\n")
print("This is a game of >>>>>>>>>>Russian Roulette.\n")
print("Here is a Revolver.")
def parse_input() -> int:
while True:
try:
i = int(input("? "))
return i
except ValueError:
print("Number expected...")
def main() -> None:
initial_message()
while True:
dead = False
n = 0
print("Type '1' to Spin chamber and pull trigger")
print("Type '2' to Give up")
print("Go")
while not dead:
i = parse_input()
if i == 2:
break
if random() > 0.8333333333333334:
dead = True
else:
print("- CLICK -\n")
n += 1
if n > NUMBER_OF_ROUNDS:
break
if dead:
print("BANG!!!!! You're Dead!")
print("Condolences will be sent to your relatives.\n\n\n")
print("...Next victim...")
else:
if n > NUMBER_OF_ROUNDS:
print("You win!!!!!")
print("Let someone else blow his brain out.\n")
else:
print(" Chicken!!!!!\n\n\n")
print("...Next victim....")
if __name__ == "__main__":
main()
########################################################
# Porting Notes
#
# Altough the description says that accepts "1" or "2",
# the original game accepts any number as input, and
# if it's different of "2" the program considers
# as if the user had passed "1". That feature was
# kept in this port.
# Also, in the original game you must "pull the trigger"
# 11 times instead of 10 in orden to win,
# given that N=0 at the beginning and the condition to
# win is "IF N > 10 THEN 80". That was fixed in this
# port, asking the user to pull the trigger only ten
# times, tough the number of round can be set changing
# the constant NUMBER_OF_ROUNDS.
#
########################################################
| 29.189474 | 72 | 0.51749 |
dc1e240e9950e678b53ebb4083252f7bfbdd5195 | 1,891 | py | Python | setup.py | dstansby/reproject | 4beeba733ca61930c1ecf339905589ec91176d4e | [
"BSD-3-Clause"
] | null | null | null | setup.py | dstansby/reproject | 4beeba733ca61930c1ecf339905589ec91176d4e | [
"BSD-3-Clause"
] | null | null | null | setup.py | dstansby/reproject | 4beeba733ca61930c1ecf339905589ec91176d4e | [
"BSD-3-Clause"
] | null | null | null | #!/usr/bin/env python
# Licensed under a 3-clause BSD style license - see LICENSE.rst
import os
import sys
import builtins
# Append current working directory for pip 19
sys.path.append(os.path.abspath("."))
import ah_bootstrap # noqa
from setuptools import setup
from setuptools.config import read_configuration
from astropy_helpers.distutils_helpers import is_distutils_display_option
from astropy_helpers.setup_helpers import register_commands, get_package_info
from astropy_helpers.version_helpers import generate_version_py
# Store the package name in a built-in variable so it's easy
# to get from other parts of the setup infrastructure
builtins._ASTROPY_PACKAGE_NAME_ = read_configuration('setup.cfg')['metadata']['name']
# Create a dictionary with setup command overrides. Note that this gets
# information about the package (name and version) from the setup.cfg file.
cmdclass = register_commands()
# Freeze build information in version.py. Note that this gets information
# about the package (name and version) from the setup.cfg file.
version = generate_version_py()
# Get configuration information from all of the various subpackages.
# See the docstring for setup_helpers.update_package_files for more
# details.
package_info = get_package_info()
if is_distutils_display_option():
# Avoid installing setup_requires dependencies if the user just
# queries for information
setup_requires = []
else:
setup_requires = read_configuration('setup.cfg')['options']['setup_requires']
# Make sure we have the packages needed for building astropy, but do not
# require them when installing from an sdist as the c files are included.
if not os.path.exists(os.path.join(os.path.dirname(__file__), 'PKG-INFO')):
setup_requires.extend(['cython>=0.29.13'])
setup(version=version, cmdclass=cmdclass, setup_requires=setup_requires, **package_info)
| 38.591837 | 88 | 0.787943 |
ff46a61db1c874e1cf050167b0bebeed24bea5f9 | 945 | py | Python | kubernetes/test/test_v1_storage_class.py | scele/kubernetes-client-python | 9e982cbdb5f19dc1a3935a75bdd92288f3b807fb | [
"Apache-2.0"
] | null | null | null | kubernetes/test/test_v1_storage_class.py | scele/kubernetes-client-python | 9e982cbdb5f19dc1a3935a75bdd92288f3b807fb | [
"Apache-2.0"
] | null | null | null | kubernetes/test/test_v1_storage_class.py | scele/kubernetes-client-python | 9e982cbdb5f19dc1a3935a75bdd92288f3b807fb | [
"Apache-2.0"
] | null | null | null | # coding: utf-8
"""
Kubernetes
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: v1.8.2
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import os
import sys
import unittest
import kubernetes.client
from kubernetes.client.rest import ApiException
from kubernetes.client.models.v1_storage_class import V1StorageClass
class TestV1StorageClass(unittest.TestCase):
""" V1StorageClass unit test stubs """
def setUp(self):
pass
def tearDown(self):
pass
def testV1StorageClass(self):
"""
Test V1StorageClass
"""
# FIXME: construct object with mandatory attributes with example values
#model = kubernetes.client.models.v1_storage_class.V1StorageClass()
pass
if __name__ == '__main__':
unittest.main()
| 21 | 105 | 0.703704 |
912d83ab7f67fd06d0a0f4d878eb67a0d6294459 | 3,021 | py | Python | webscrape.py | Ashton-Sidhu/webscraper | 44e25b0d51aa931863a4f2f4a298b8847cc20d8e | [
"BSD-3-Clause"
] | null | null | null | webscrape.py | Ashton-Sidhu/webscraper | 44e25b0d51aa931863a4f2f4a298b8847cc20d8e | [
"BSD-3-Clause"
] | null | null | null | webscrape.py | Ashton-Sidhu/webscraper | 44e25b0d51aa931863a4f2f4a298b8847cc20d8e | [
"BSD-3-Clause"
] | null | null | null | import asyncio
import re
from contextlib import closing
import click
from bs4 import BeautifulSoup, Comment, Tag
from requests import get
from requests.exceptions import RequestException
MENU_CHECK = ['sidebar', 'menu', 'dropdown', 'mail', 'social']
@click.command()
@click.option('-o', '--output-file', help='Name and location of the file')
@click.argument('urls')
def scrape(urls, output_file=''):
webscrape(urls, output_file=output_file)
def webscrape(*urls, output_file=''):
"""
Gets all the text from a webpage.
Parameters
----------
urls : str
Urls to scrape
output_file : str, optional
output file name/location, by default cwd
"""
for url in urls:
raw_html = _simple_get(url)
try:
soup = BeautifulSoup(raw_html, 'html.parser')
soup = soup.body
# Delete any comments
for comments in soup.findAll(text=lambda text: isinstance(text, Comment)):
comments.decompose()
# kill all script and style elements
for script in soup(["header", "footer", "script", "style", "code", "form"]):
script.decompose() # rip it out
# Remove any menus from the html
for div in soup.find_all('div'):
if isinstance(div, Tag):
if div.attrs:
if 'class' in div.attrs:
for menu_item in MENU_CHECK:
if menu_item in " ".join(div.attrs['class']):
div.decompose()
break
# Clean up text from raw html a little
cleaned_content = list(map(lambda x: re.sub('\s+', ' ', x).strip(), soup.find_all(text=True)))
return (" ".join(filter(lambda x: x != '', cleaned_content))).strip()
except:
return ""
def _simple_get(url):
"""
Attempts to get the content at `url` by making an HTTP GET request.
If the content-type of response is some kind of HTML/XML, return the
text content, otherwise return None.
"""
try:
with closing(get(url)) as resp:
if _is_good_response(resp):
return resp.content
else:
print(f"Request failed for {url}")
return None
except RequestException as e:
_log_error('Error during requests to {0} : {1}'.format(url, str(e)))
return None
def _is_good_response(resp):
"""
Returns True if the response seems to be HTML, False otherwise.
"""
content_type = resp.headers['Content-Type'].lower()
return (resp.status_code == 200
and content_type is not None
and content_type.find('html') > -1)
def _log_error(e):
"""
It is always a good idea to log errors.
This function just prints them, but you can
make it do anything.
"""
print(e)
| 30.21 | 106 | 0.558093 |
940e2518b313ddde39879fad0fdeb4ef54e3a096 | 1,446 | py | Python | knowledge_neurons/__init__.py | jmerizia/knowledge-neurons | be1a84adafd6e973ab0ee012527ac1988632a5e8 | [
"MIT"
] | 68 | 2021-08-01T22:12:34.000Z | 2022-02-16T19:57:58.000Z | knowledge_neurons/__init__.py | jmerizia/knowledge-neurons | be1a84adafd6e973ab0ee012527ac1988632a5e8 | [
"MIT"
] | 2 | 2021-11-14T16:35:56.000Z | 2021-12-03T05:06:37.000Z | knowledge_neurons/__init__.py | jmerizia/knowledge-neurons | be1a84adafd6e973ab0ee012527ac1988632a5e8 | [
"MIT"
] | 5 | 2021-08-06T04:04:40.000Z | 2022-02-09T20:16:49.000Z | from transformers import (
BertTokenizer,
BertLMHeadModel,
GPT2Tokenizer,
GPT2LMHeadModel,
GPTNeoForCausalLM,
)
from .knowledge_neurons import KnowledgeNeurons
from .data import pararel, pararel_expanded, PARAREL_RELATION_NAMES
BERT_MODELS = ["bert-base-uncased", "bert-base-multilingual-uncased"]
GPT2_MODELS = ["gpt2"]
GPT_NEO_MODELS = [
"EleutherAI/gpt-neo-125M",
"EleutherAI/gpt-neo-1.3B",
"EleutherAI/gpt-neo-2.7B",
]
ALL_MODELS = BERT_MODELS + GPT2_MODELS + GPT_NEO_MODELS
def initialize_model_and_tokenizer(model_name: str):
if model_name in BERT_MODELS:
tokenizer = BertTokenizer.from_pretrained(model_name)
model = BertLMHeadModel.from_pretrained(model_name)
elif model_name in GPT2_MODELS:
tokenizer = GPT2Tokenizer.from_pretrained(model_name)
model = GPT2LMHeadModel.from_pretrained(model_name)
elif model_name in GPT_NEO_MODELS:
tokenizer = GPT2Tokenizer.from_pretrained(model_name)
model = GPTNeoForCausalLM.from_pretrained(model_name)
else:
raise ValueError("Model {model_name} not supported")
model.eval()
return model, tokenizer
def model_type(model_name: str):
if model_name in BERT_MODELS:
return "bert"
elif model_name in GPT2_MODELS:
return "gpt2"
elif model_name in GPT_NEO_MODELS:
return "gpt_neo"
else:
raise ValueError("Model {model_name} not supported")
| 30.125 | 69 | 0.726833 |
bdefbe677275b319c6563425a04f8d56409273af | 15,140 | py | Python | python/sdk/client/models/version_endpoint.py | karzuo/merlin | bdbdac35071d81beb1b8b5b807697bf2eac69a40 | [
"Apache-2.0"
] | 1 | 2021-12-26T09:04:12.000Z | 2021-12-26T09:04:12.000Z | python/sdk/client/models/version_endpoint.py | ibnummuhammad/merlin | acf10a350bcacfdfe67f7020d535467b71ff1d89 | [
"Apache-2.0"
] | null | null | null | python/sdk/client/models/version_endpoint.py | ibnummuhammad/merlin | acf10a350bcacfdfe67f7020d535467b71ff1d89 | [
"Apache-2.0"
] | null | null | null | # coding: utf-8
"""
Merlin
API Guide for accessing Merlin's model management, deployment, and serving functionalities # noqa: E501
OpenAPI spec version: 0.14.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
from client.configuration import Configuration
class VersionEndpoint(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'id': 'str',
'version_id': 'int',
'status': 'EndpointStatus',
'url': 'str',
'service_name': 'str',
'environment_name': 'str',
'environment': 'Environment',
'monitoring_url': 'str',
'message': 'str',
'resource_request': 'ResourceRequest',
'env_vars': 'list[EnvVar]',
'transformer': 'Transformer',
'logger': 'Logger',
'deployment_mode': 'DeploymentMode',
'autoscaling_policy': 'AutoscalingPolicy',
'created_at': 'datetime',
'updated_at': 'datetime'
}
attribute_map = {
'id': 'id',
'version_id': 'version_id',
'status': 'status',
'url': 'url',
'service_name': 'service_name',
'environment_name': 'environment_name',
'environment': 'environment',
'monitoring_url': 'monitoring_url',
'message': 'message',
'resource_request': 'resource_request',
'env_vars': 'env_vars',
'transformer': 'transformer',
'logger': 'logger',
'deployment_mode': 'deployment_mode',
'autoscaling_policy': 'autoscaling_policy',
'created_at': 'created_at',
'updated_at': 'updated_at'
}
def __init__(self, id=None, version_id=None, status=None, url=None, service_name=None, environment_name=None, environment=None, monitoring_url=None, message=None, resource_request=None, env_vars=None, transformer=None, logger=None, deployment_mode=None, autoscaling_policy=None, created_at=None, updated_at=None, _configuration=None): # noqa: E501
"""VersionEndpoint - a model defined in Swagger""" # noqa: E501
if _configuration is None:
_configuration = Configuration()
self._configuration = _configuration
self._id = None
self._version_id = None
self._status = None
self._url = None
self._service_name = None
self._environment_name = None
self._environment = None
self._monitoring_url = None
self._message = None
self._resource_request = None
self._env_vars = None
self._transformer = None
self._logger = None
self._deployment_mode = None
self._autoscaling_policy = None
self._created_at = None
self._updated_at = None
self.discriminator = None
if id is not None:
self.id = id
if version_id is not None:
self.version_id = version_id
if status is not None:
self.status = status
if url is not None:
self.url = url
if service_name is not None:
self.service_name = service_name
if environment_name is not None:
self.environment_name = environment_name
if environment is not None:
self.environment = environment
if monitoring_url is not None:
self.monitoring_url = monitoring_url
if message is not None:
self.message = message
if resource_request is not None:
self.resource_request = resource_request
if env_vars is not None:
self.env_vars = env_vars
if transformer is not None:
self.transformer = transformer
if logger is not None:
self.logger = logger
if deployment_mode is not None:
self.deployment_mode = deployment_mode
if autoscaling_policy is not None:
self.autoscaling_policy = autoscaling_policy
if created_at is not None:
self.created_at = created_at
if updated_at is not None:
self.updated_at = updated_at
@property
def id(self):
"""Gets the id of this VersionEndpoint. # noqa: E501
:return: The id of this VersionEndpoint. # noqa: E501
:rtype: str
"""
return self._id
@id.setter
def id(self, id):
"""Sets the id of this VersionEndpoint.
:param id: The id of this VersionEndpoint. # noqa: E501
:type: str
"""
self._id = id
@property
def version_id(self):
"""Gets the version_id of this VersionEndpoint. # noqa: E501
:return: The version_id of this VersionEndpoint. # noqa: E501
:rtype: int
"""
return self._version_id
@version_id.setter
def version_id(self, version_id):
"""Sets the version_id of this VersionEndpoint.
:param version_id: The version_id of this VersionEndpoint. # noqa: E501
:type: int
"""
self._version_id = version_id
@property
def status(self):
"""Gets the status of this VersionEndpoint. # noqa: E501
:return: The status of this VersionEndpoint. # noqa: E501
:rtype: EndpointStatus
"""
return self._status
@status.setter
def status(self, status):
"""Sets the status of this VersionEndpoint.
:param status: The status of this VersionEndpoint. # noqa: E501
:type: EndpointStatus
"""
self._status = status
@property
def url(self):
"""Gets the url of this VersionEndpoint. # noqa: E501
:return: The url of this VersionEndpoint. # noqa: E501
:rtype: str
"""
return self._url
@url.setter
def url(self, url):
"""Sets the url of this VersionEndpoint.
:param url: The url of this VersionEndpoint. # noqa: E501
:type: str
"""
self._url = url
@property
def service_name(self):
"""Gets the service_name of this VersionEndpoint. # noqa: E501
:return: The service_name of this VersionEndpoint. # noqa: E501
:rtype: str
"""
return self._service_name
@service_name.setter
def service_name(self, service_name):
"""Sets the service_name of this VersionEndpoint.
:param service_name: The service_name of this VersionEndpoint. # noqa: E501
:type: str
"""
self._service_name = service_name
@property
def environment_name(self):
"""Gets the environment_name of this VersionEndpoint. # noqa: E501
:return: The environment_name of this VersionEndpoint. # noqa: E501
:rtype: str
"""
return self._environment_name
@environment_name.setter
def environment_name(self, environment_name):
"""Sets the environment_name of this VersionEndpoint.
:param environment_name: The environment_name of this VersionEndpoint. # noqa: E501
:type: str
"""
self._environment_name = environment_name
@property
def environment(self):
"""Gets the environment of this VersionEndpoint. # noqa: E501
:return: The environment of this VersionEndpoint. # noqa: E501
:rtype: Environment
"""
return self._environment
@environment.setter
def environment(self, environment):
"""Sets the environment of this VersionEndpoint.
:param environment: The environment of this VersionEndpoint. # noqa: E501
:type: Environment
"""
self._environment = environment
@property
def monitoring_url(self):
"""Gets the monitoring_url of this VersionEndpoint. # noqa: E501
:return: The monitoring_url of this VersionEndpoint. # noqa: E501
:rtype: str
"""
return self._monitoring_url
@monitoring_url.setter
def monitoring_url(self, monitoring_url):
"""Sets the monitoring_url of this VersionEndpoint.
:param monitoring_url: The monitoring_url of this VersionEndpoint. # noqa: E501
:type: str
"""
self._monitoring_url = monitoring_url
@property
def message(self):
"""Gets the message of this VersionEndpoint. # noqa: E501
:return: The message of this VersionEndpoint. # noqa: E501
:rtype: str
"""
return self._message
@message.setter
def message(self, message):
"""Sets the message of this VersionEndpoint.
:param message: The message of this VersionEndpoint. # noqa: E501
:type: str
"""
self._message = message
@property
def resource_request(self):
"""Gets the resource_request of this VersionEndpoint. # noqa: E501
:return: The resource_request of this VersionEndpoint. # noqa: E501
:rtype: ResourceRequest
"""
return self._resource_request
@resource_request.setter
def resource_request(self, resource_request):
"""Sets the resource_request of this VersionEndpoint.
:param resource_request: The resource_request of this VersionEndpoint. # noqa: E501
:type: ResourceRequest
"""
self._resource_request = resource_request
@property
def env_vars(self):
"""Gets the env_vars of this VersionEndpoint. # noqa: E501
:return: The env_vars of this VersionEndpoint. # noqa: E501
:rtype: list[EnvVar]
"""
return self._env_vars
@env_vars.setter
def env_vars(self, env_vars):
"""Sets the env_vars of this VersionEndpoint.
:param env_vars: The env_vars of this VersionEndpoint. # noqa: E501
:type: list[EnvVar]
"""
self._env_vars = env_vars
@property
def transformer(self):
"""Gets the transformer of this VersionEndpoint. # noqa: E501
:return: The transformer of this VersionEndpoint. # noqa: E501
:rtype: Transformer
"""
return self._transformer
@transformer.setter
def transformer(self, transformer):
"""Sets the transformer of this VersionEndpoint.
:param transformer: The transformer of this VersionEndpoint. # noqa: E501
:type: Transformer
"""
self._transformer = transformer
@property
def logger(self):
"""Gets the logger of this VersionEndpoint. # noqa: E501
:return: The logger of this VersionEndpoint. # noqa: E501
:rtype: Logger
"""
return self._logger
@logger.setter
def logger(self, logger):
"""Sets the logger of this VersionEndpoint.
:param logger: The logger of this VersionEndpoint. # noqa: E501
:type: Logger
"""
self._logger = logger
@property
def deployment_mode(self):
"""Gets the deployment_mode of this VersionEndpoint. # noqa: E501
:return: The deployment_mode of this VersionEndpoint. # noqa: E501
:rtype: DeploymentMode
"""
return self._deployment_mode
@deployment_mode.setter
def deployment_mode(self, deployment_mode):
"""Sets the deployment_mode of this VersionEndpoint.
:param deployment_mode: The deployment_mode of this VersionEndpoint. # noqa: E501
:type: DeploymentMode
"""
self._deployment_mode = deployment_mode
@property
def autoscaling_policy(self):
"""Gets the autoscaling_policy of this VersionEndpoint. # noqa: E501
:return: The autoscaling_policy of this VersionEndpoint. # noqa: E501
:rtype: AutoscalingPolicy
"""
return self._autoscaling_policy
@autoscaling_policy.setter
def autoscaling_policy(self, autoscaling_policy):
"""Sets the autoscaling_policy of this VersionEndpoint.
:param autoscaling_policy: The autoscaling_policy of this VersionEndpoint. # noqa: E501
:type: AutoscalingPolicy
"""
self._autoscaling_policy = autoscaling_policy
@property
def created_at(self):
"""Gets the created_at of this VersionEndpoint. # noqa: E501
:return: The created_at of this VersionEndpoint. # noqa: E501
:rtype: datetime
"""
return self._created_at
@created_at.setter
def created_at(self, created_at):
"""Sets the created_at of this VersionEndpoint.
:param created_at: The created_at of this VersionEndpoint. # noqa: E501
:type: datetime
"""
self._created_at = created_at
@property
def updated_at(self):
"""Gets the updated_at of this VersionEndpoint. # noqa: E501
:return: The updated_at of this VersionEndpoint. # noqa: E501
:rtype: datetime
"""
return self._updated_at
@updated_at.setter
def updated_at(self, updated_at):
"""Sets the updated_at of this VersionEndpoint.
:param updated_at: The updated_at of this VersionEndpoint. # noqa: E501
:type: datetime
"""
self._updated_at = updated_at
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(VersionEndpoint, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, VersionEndpoint):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, VersionEndpoint):
return True
return self.to_dict() != other.to_dict()
| 28.037037 | 352 | 0.607067 |
70d770e7d43a8dd88c876b64fffeab1fc90bd7ad | 5,179 | py | Python | reader.py | joostkremers/pymal | 35831c985abea69f114b1ab3fa968d420b084618 | [
"BSD-2-Clause-FreeBSD"
] | null | null | null | reader.py | joostkremers/pymal | 35831c985abea69f114b1ab3fa968d420b084618 | [
"BSD-2-Clause-FreeBSD"
] | null | null | null | reader.py | joostkremers/pymal | 35831c985abea69f114b1ab3fa968d420b084618 | [
"BSD-2-Clause-FreeBSD"
] | null | null | null | # coding=utf-8
import re
import mal_types as mal
class Reader:
"""A Reader object for storing a list of tokens and an index into that list.
"""
def __init__(self, tokens):
self.tokens = tokens
self.position = 0
def next(self):
"""Return the current token and increment the index.
If the list of tokens has been exausted, return the empty string.
"""
token = self.peek()
self.position += 1
return token
def peek(self):
"""Return the current token.
If the list of tokens has been exhausted, return the empty string.
"""
if self.position >= len(self.tokens):
return ''
else:
return self.tokens[self.position]
reader_macros = {"'": "quote",
"`": "quasiquote",
"~": "unquote",
"~@": "splice-unquote",
"@": "deref"}
def read_str(input_str):
"""Convert INPUT_STR into a Mal object."""
tokens = tokenize(input_str)
mal_object = read_form(Reader(tokens))
return mal_object
def tokenize(input_str):
"""Tokenize INPUT_STR.
Return a list of tokens."""
token_regexp = (r'[\s,]*'
r'(~@|'
r'[\[\]{}()\'`~^@]|'
r'"(?:\\.|[^\\"])*"'
r'|;.*|'
r'[^\s\[\]{}(\'"`,;)]*)')
tokens = re.findall(token_regexp, input_str)
# The re.findall() call adds an empty match to the end of the list. I'm not
# sure how to remove this other than by checking for it explictly. We also
# filter out comments at this point.
return [token for token in tokens if token != '' and token[0] != ';']
def read_form(form):
token = form.next()
if token in ['(', '[', '{']:
return read_sequence(form, token)
elif token == '^': # with-meta reader macro
return apply_with_meta_macro(form)
elif token in reader_macros:
return apply_reader_macro(form, token)
elif token == '':
return None
else:
return read_atom(token)
def read_sequence(form, token):
"""Read a sequence from FORM.
This function reads list, vectors and hash tables.
"""
res = []
end_token = {'(': ')', '[': ']', '{': '}'}[token]
while True:
token = form.peek()
if token == end_token: # We've found the end of the list.
break
if token == '': # We've reached the end of FORM.
return mal.Error("ParenError", "Missing closing parenthesis")
next_form = read_form(form)
if type(next_form) is mal.Error:
return next_form
res.append(next_form)
# Now we need to move past the end token
form.next()
if end_token == ')':
return mal.List(res)
elif end_token == '}':
return create_hash(res)
else:
return mal.Vector(res)
def create_hash(items):
"""Create a hash table from ITEMS."""
# Hash tables in Mal can have strings or keywords as keys. mal.Keyword are
# hashable, so there's no need to use a rare Unicode character as prefix in
# order to distinguish them from strings, as suggested in the mal_guide.
if (len(items) % 2) != 0:
return mal.Error("HashError", "Insufficient number of items")
res = {}
for i in range(0, len(items), 2):
key = items[i]
if not isinstance(key, (str, mal.Keyword)):
return mal.Error("HashError",
"Cannot hash on {}".format(type(key)))
value = items[i + 1]
res[key] = value
return mal.Hash(res)
def apply_with_meta_macro(form):
data = read_form(form)
if type(data) is mal.Error:
return data
obj = read_form(form)
if type(obj) is mal.Error:
return obj
return mal.List([mal.Symbol('with-meta'), obj, data])
def apply_reader_macro(form, token):
next_form = read_form(form)
if type(next_form) is mal.Error:
return next_form
replacement = mal.Symbol(reader_macros[token])
return mal.List([replacement, next_form])
def read_atom(token):
# integers
if re.match(r'\A-?[0-9]+\Z', token):
return int(token)
# strings
if re.match(r'\A"(.*)"\Z', token):
string = token[1:-1]
string = string.replace(r'\"', '"')
string = string.replace(r'\n', '\n')
string = string.replace(r'\\', '\\')
return string
# keywords
if re.match(r'\A:.*\Z', token):
return mal.Keyword(token)
# boolean
if token == "true":
return mal.Boolean(True)
if token == "false":
return mal.Boolean(False)
# nil
if token == "nil":
return mal.NIL
# symbols
if re.match(r"[^\s\[\]{}('\"`,;)]*", token):
return mal.Symbol(token)
# Found nothing parsable. (Shouldn't really happen, since symbols are a
# catch-all already.)
return mal.Error("ParseError", "Could not parse token: '{}'".
format(token))
def main():
form = '(def (fn a (b c)) (print (+ a b)))'
print(read_str(form))
if __name__ == '__main__':
main()
| 26.156566 | 80 | 0.556478 |
cbaeb4dc72bf61b5cb27bfdad598a1db4181d2c1 | 2,639 | py | Python | pymanopt/manifolds/complex_circle.py | NoemieJaquier/pymanopt | f3ae54b5ebc648a193e64acdb75c97885391c3d7 | [
"BSD-3-Clause"
] | 459 | 2016-02-18T18:53:30.000Z | 2022-03-30T07:28:53.000Z | pymanopt/manifolds/complex_circle.py | NoemieJaquier/pymanopt | f3ae54b5ebc648a193e64acdb75c97885391c3d7 | [
"BSD-3-Clause"
] | 131 | 2016-02-19T16:17:44.000Z | 2022-03-21T09:27:11.000Z | pymanopt/manifolds/complex_circle.py | NoemieJaquier/pymanopt | f3ae54b5ebc648a193e64acdb75c97885391c3d7 | [
"BSD-3-Clause"
] | 127 | 2016-03-11T18:47:15.000Z | 2022-03-11T06:07:37.000Z | import numpy as np
import numpy.linalg as la
import numpy.random as rnd
from pymanopt.manifolds.manifold import EuclideanEmbeddedSubmanifold
class ComplexCircle(EuclideanEmbeddedSubmanifold):
"""
The manifold of complex numbers with unit-modulus.
Description of vectors z in C^n (complex) such that each component z(i)
has unit modulus. The manifold structure is the Riemannian submanifold
structure from the embedding space R^2 x ... x R^2, i.e., the complex
circle is identified with the unit circle in the real plane. This
implementation is based on complexcirclefactory.m from the Manopt MATLAB
package.
"""
def __init__(self, dimension=1):
self._dimension = dimension
if dimension == 1:
name = "Complex circle S^1"
else:
name = "Complex circle (S^1)^{:d}".format(dimension)
super().__init__(name, dimension)
def inner(self, z, v, w):
return v.conj().dot(w).real
def norm(self, x, v):
return la.norm(v)
def dist(self, x, y):
return la.norm(np.arccos((x.conj() * y).real))
@property
def typicaldist(self):
return np.pi * np.sqrt(self._dimension)
def proj(self, z, u):
return u - (u.conj() * z).real * z
tangent = proj
def ehess2rhess(self, z, egrad, ehess, zdot):
return self.proj(z, (z * egrad.conj()).real * zdot)
def exp(self, z, v):
y = np.zeros(self._dimension)
abs_v = np.abs(v)
mask = abs_v > 0
not_mask = np.logical_not(mask)
y[mask] = (z[mask] * np.cos(abs_v[mask]) +
v[mask] * (np.sin(abs_v[mask]) / abs_v[mask]))
y[not_mask] = z[not_mask]
return y
def retr(self, z, v):
return self._normalize(z + v)
def log(self, x1, x2):
v = self.proj(x1, x2 - x1)
abs_v = np.abs(v)
di = np.arccos((x1.conj() * x2).real)
factors = di / abs_v
factors[di <= 1e-6] = 1
return v * factors
def rand(self):
dimension = self._dimension
return self._normalize(
rnd.randn(dimension) + 1j * rnd.randn(dimension))
def randvec(self, z):
v = rnd.randn(self._dimension) * (1j * z)
return v / self.norm(z, v)
def transp(self, x1, x2, d):
return self.proj(x2, d)
def pairmean(self, z1, z2):
return self._normalize(z1 + z2)
def zerovec(self, x):
return np.zeros(self._dimension)
@staticmethod
def _normalize(x):
"""Normalize the entries of x element-wise by their absolute values."""
return x / np.abs(x)
| 28.684783 | 79 | 0.591891 |
715b0f19b84e255b22a6cd394870a0d73c8dbed0 | 10,790 | py | Python | bess_of_agent.py | krsna1729/twink | 6c2f1546bc2a8b1574e37e27676ef4d7a853b4bf | [
"Apache-2.0"
] | 1 | 2016-06-01T21:25:01.000Z | 2016-06-01T21:25:01.000Z | bess_of_agent.py | krsna1729/twink | 6c2f1546bc2a8b1574e37e27676ef4d7a853b4bf | [
"Apache-2.0"
] | null | null | null | bess_of_agent.py | krsna1729/twink | 6c2f1546bc2a8b1574e37e27676ef4d7a853b4bf | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/python
import twink
from twink.ofp4 import *
import twink.ofp4.build as b
import twink.ofp4.parse as p
import twink.ofp4.oxm as oxm
import threading
import binascii
import logging
import zerorpc
import signal
import socket
import errno
import time
import sys
import os
from collections import namedtuple
logging.basicConfig(level=logging.ERROR)
PHY_NAME = "dpif"
dp = 'bess_datapath_instance'
ofp_port_names = '''port_no hw_addr name config state
curr advertised supported peer curr_speed max_speed
pkt_inout_socket'''
of_port = namedtuple('of_port', ofp_port_names)
default_port = of_port('<port no>', '<mac address>', '<port name>', 0, 0,
0x802, 0, 0, 0, 0, 0,
None)
of_ports = {
OFPP_LOCAL: default_port._replace(port_no=OFPP_LOCAL, hw_addr=binascii.a2b_hex("0000deadbeef"), name='br-int', curr=0),
1: default_port._replace(port_no=1, hw_addr=binascii.a2b_hex("0000deaddead"), name='vxlan', curr=0),
2: default_port._replace(port_no=2, hw_addr=binascii.a2b_hex("000000000001"), name=PHY_NAME, curr=0),
}
flows = {}
channel = 0
try:
BESS_PATH = os.getenv('BESSDK','/opt/bess')
sys.path.insert(1, '%s/libbess-python' % BESS_PATH)
from bess import *
except ImportError as e:
print >> sys.stderr, 'Cannot import the API module (libbess-python)', e.message
sys.exit()
def connect_bess():
s = BESS()
try:
s.connect()
except s.APIError as e:
print >> sys.stderr, e.message
sys.exit()
else:
return s
def init_phy_port(bess, name, port_id):
try:
result = bess.create_port('PMD', name, {'port_id': port_id})
bess.resume_all()
except (bess.APIError, bess.Error)as err:
print err.message
return {'name': None}
else:
return result
PKTINOUT_NAME = 'pktinout_%s'
SOCKET_PATH = '/tmp/bess_unix_' + PKTINOUT_NAME
def init_pktinout_port(bess, name):
# br-int alone or vxlan too?
if name == 'br-int':
return None, None
try:
bess.pause_all()
result = bess.create_port('UnixSocket', PKTINOUT_NAME % name, {'path': '@' + SOCKET_PATH % name})
s = socket.socket(socket.AF_UNIX, socket.SOCK_SEQPACKET)
s.connect('\0' + SOCKET_PATH % name)
except (bess.APIError, bess.Error, socket.error) as err:
print err
bess.resume_all()
return {'name': None}, None
else:
# TODO: Handle VxLAN PacketOut if Correct/&Reqd. Create PI connect PI_pktinout_vxlan-->Encap()-->PO_dpif
if name != 'vxlan':
bess.create_module('PortInc', 'PI_' + PKTINOUT_NAME % name, {'port': PKTINOUT_NAME % name})
bess.create_module('PortOut', 'PO_' + name, {'port': name})
bess.connect_modules('PI_' + PKTINOUT_NAME % name, 'PO_' + name)
bess.resume_all()
return result, s
def deinit_pktinout_port(bess, name):
# br-int alone or vxlan too?
if name == 'br-int':
return
try:
bess.pause_all()
# TODO: Handle VxLAN PacketOut if Correct/&Reqd. Create PI connect PI_pktinout_vxlan-->Encap()-->PO_dpif
if name != 'vxlan':
bess.disconnect_modules('PI_' + PKTINOUT_NAME % name, 0)
bess.destroy_module('PI_' + PKTINOUT_NAME % name)
bess.destroy_module('PO_' + name)
bess.destroy_port(PKTINOUT_NAME % name)
bess.resume_all()
return
except (bess.APIError, bess.Error)as err:
bess.resume_all()
print err.message
def switch_proc(message, ofchannel):
msg = p.parse(message)
# TODO: Acquire lock
if msg.header.type == OFPT_FEATURES_REQUEST:
channel.send(b.ofp_switch_features(b.ofp_header(4, OFPT_FEATURES_REPLY, 0, msg.header.xid), 1, 2, 3, 0, 0xF))
elif msg.header.type == OFPT_GET_CONFIG_REQUEST:
channel.send(b.ofp_switch_config(b.ofp_header(4, OFPT_GET_CONFIG_REPLY, 0, msg.header.xid), 0, 0xffe5))
elif msg.header.type == OFPT_ROLE_REQUEST:
channel.send(b.ofp_role_request(b.ofp_header(4, OFPT_ROLE_REPLY, 0, msg.header.xid), msg.role, msg.generation_id))
elif msg.header.type == OFPT_FLOW_MOD:
if msg.cookie not in flows:
flows[msg.cookie] = msg
else:
print "I already have this FlowMod: Cookie", \
msg.cookie, oxm.parse_list(flows[msg.cookie].match), (flows[msg.cookie].instructions)
elif msg.header.type == OFPT_MULTIPART_REQUEST:
if msg.type == OFPMP_FLOW:
channel.send(b.ofp_multipart_reply(b.ofp_header(4, OFPT_MULTIPART_REPLY, 0, msg.header.xid),
msg.type, 0, ["".join(b.ofp_flow_stats(None, f.table_id, 1, 2, f.priority,
f.idle_timeout, f.hard_timeout, f.flags, f.cookie, 0, 0,
f.match, f.instructions)
for f in flows.itervalues())]))
elif msg.type == OFPMP_PORT_DESC:
channel.send(b.ofp_multipart_reply(b.ofp_header(4, OFPT_MULTIPART_REPLY, 0, msg.header.xid),
msg.type, 0, ["".join(b.ofp_port(ofp.port_no, ofp.hw_addr, ofp.name, ofp.config, ofp.state,
ofp.curr, ofp.advertised, ofp.supported, ofp.peer, ofp.curr_speed, ofp.max_speed)
for ofp in of_ports.itervalues())]))
elif msg.type == OFPMP_DESC:
channel.send(b.ofp_multipart_reply(b.ofp_header(4, OFPT_MULTIPART_REPLY, 0, msg.header.xid),
msg.type, 0,
b.ofp_desc("UC Berkeley", "Intel Xeon", "BESS", "commit-6e343", None)))
elif msg.header.type == OFPT_PACKET_OUT:
index = msg.actions[0].port
print "Packet out OF Port %d, Len:%d" % (index, len(msg.data))
sock = of_ports[index].pkt_inout_socket
if sock is not None:
sent = sock.send(msg.data)
if sent != len(msg.data):
print "Incomplete Transmission Sent:%d, Len:%d" % (sent, len(msg.data))
else:
print "Packet out OF Port %d, Len:%d. Failed - Null socket" % (index, len(msg.data))
elif msg.header.type == OFPT_HELLO:
pass
elif msg.header.type == OFPT_SET_CONFIG:
pass
elif msg.header.type == OFPT_BARRIER_REQUEST:
pass
else:
print msg
assert 0
# TODO: Release lock
def of_agent_start(ctl_ip='127.0.0.1', port=6653):
global channel
socket = twink.sched.socket
try:
s = socket.create_connection((ctl_ip, port),)
except socket.error as err:
if err.errno != errno.ECONNREFUSED:
raise err
print 'Is the controller running at %s:%d' % (ctl_ip, port)
return errno.ECONNREFUSED
ch = type("Switch", (
twink.AutoEchoChannel,
twink.LoggingChannel,), {
"accept_versions": [4, ],
"handle": staticmethod(switch_proc)
})()
ch.attach(s)
channel = ch
t1 = threading.Thread(name="Switch Loop", target=ch.loop)
t1.setDaemon(True)
t1.start()
class PortManager(object):
def __init__(self):
self.of_port_num = 2
def add_port(self, dev, mac):
if (self.of_port_num + 1) == OFPP_LOCAL:
return "Unable to add dev: %s. Reached max OF port_num" % dev
self.of_port_num += 1
# Create vhost-user port
ret = dp.create_port('vhost_user', dev)
# Create corresponding pkt_in_out port
ret, sock = init_pktinout_port(dp, dev)
of_ports[self.of_port_num] = default_port._replace(
port_no=self.of_port_num, hw_addr=binascii.a2b_hex(mac[:12]), name=dev, pkt_inout_socket=sock)
ofp = of_ports[self.of_port_num]
channel.send(b.ofp_port_status(b.ofp_header(4, OFPT_PORT_STATUS, 0, 0), OFPPR_ADD,
b.ofp_port(ofp.port_no, ofp.hw_addr, ofp.name, ofp.config, ofp.state,
ofp.curr, ofp.advertised, ofp.supported, ofp.peer, ofp.curr_speed, ofp.max_speed
)))
print 'Current OF ports:\n', of_ports
return "Successfully added dev: %s with MAC: %s as ofport:%d" % (dev, mac, self.of_port_num)
def del_port(self, dev):
for port_no, port_details in of_ports.iteritems():
if port_details.name == dev:
ofp = of_ports[port_no]
channel.send(b.ofp_port_status(b.ofp_header(4, OFPT_PORT_STATUS, 0, 0), OFPPR_DELETE,
b.ofp_port(ofp.port_no, ofp.hw_addr, ofp.name, ofp.config, ofp.state,
ofp.curr, ofp.advertised, ofp.supported, ofp.peer,
ofp.curr_speed, ofp.max_speed
)))
deinit_pktinout_port(dp, port_details.name)
dp.destroy_port(dev)
del of_ports[port_no]
print 'Current OF ports:\n', of_ports
return "Successfully deleted dev: %s which was ofport: %d" % (dev, port_no)
return "Unable to locate dev: %s" % dev
def nova_agent_start():
s = zerorpc.Server(PortManager())
s.bind("tcp://0.0.0.0:10515")
print "Port Manager listening on 10515"
#blocks?
s.run()
def print_stupid():
while 1:
print "####################### Stupid #######################"
time.sleep(2)
pass
if __name__ == "__main__":
dp = connect_bess()
dp.resume_all()
def cleanup(*args):
dp.pause_all()
dp.reset_all()
sys.exit()
signal.signal(signal.SIGINT, cleanup)
signal.signal(signal.SIGTERM, cleanup)
if init_phy_port(dp, PHY_NAME, 0)['name'] == PHY_NAME:
print "Successfully created PMD port : %s" % PHY_NAME
else:
print 'Failed to create PMD port. Check if it exists already'
print 'Initial list of Openflow ports', of_ports
for port_num, port in of_ports.iteritems():
ret, sock = init_pktinout_port(dp, port.name)
of_ports[port_num] = of_ports[port_num]._replace(pkt_inout_socket=sock)
print ret, ' ', of_ports[port_num].pkt_inout_socket
while of_agent_start() == errno.ECONNREFUSED:
pass
# TODO: Start a thread that will select poll on all of those UNIX sockets
t2 = threading.Thread(name="Stupid Thread", target=print_stupid)
t2.setDaemon(True)
t2.start()
nova_agent_start()
signal.pause()
| 34.919094 | 139 | 0.590732 |
2b35182f7e8e2357b7ad572f469ba5a904a6c5e8 | 34,875 | py | Python | lithops/executors.py | gfinol/lithops | e165a78e0facbb70c852d7627a7407e8a8d1b946 | [
"Apache-2.0"
] | null | null | null | lithops/executors.py | gfinol/lithops | e165a78e0facbb70c852d7627a7407e8a8d1b946 | [
"Apache-2.0"
] | null | null | null | lithops/executors.py | gfinol/lithops | e165a78e0facbb70c852d7627a7407e8a8d1b946 | [
"Apache-2.0"
] | null | null | null | #
# (C) Copyright IBM Corp. 2020
# (C) Copyright Cloudlab URV 2020
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import os
import sys
import copy
import signal
import logging
import atexit
import pickle
import tempfile
import numpy as np
import subprocess as sp
from datetime import datetime
from functools import partial
from lithops import constants
from lithops.invokers import ServerlessInvoker, StandaloneInvoker, CustomizedRuntimeInvoker
from lithops.storage import InternalStorage
from lithops.wait import wait_storage, wait_rabbitmq, ALL_COMPLETED
from lithops.job import create_map_job, create_reduce_job
from lithops.config import get_mode, default_config, \
extract_localhost_config, extract_standalone_config, \
extract_serverless_config, get_log_info, extract_storage_config
from lithops.constants import LOCALHOST, SERVERLESS, STANDALONE, CLEANER_DIR, \
CLEANER_LOG_FILE
from lithops.utils import timeout_handler, is_notebook, setup_lithops_logger, \
is_unix_system, is_lithops_worker, create_executor_id
from lithops.localhost.localhost import LocalhostHandler
from lithops.standalone.standalone import StandaloneHandler
from lithops.serverless.serverless import ServerlessHandler
from lithops.storage.utils import create_job_key
logger = logging.getLogger(__name__)
class FunctionExecutor:
"""
Executor abstract class that contains the common logic
for the Localhost, Serverless and Standalone executors
"""
def __init__(self, mode=None, config=None, backend=None, storage=None,
runtime=None, runtime_memory=None, rabbitmq_monitor=None,
workers=None, remote_invoker=None, log_level=False):
""" Create a FunctionExecutor Class """
if mode and mode not in [LOCALHOST, SERVERLESS, STANDALONE]:
raise Exception("Function executor mode must be one of '{}', '{}' "
"or '{}'".format(LOCALHOST, SERVERLESS, STANDALONE))
self.is_lithops_worker = is_lithops_worker()
# setup lithops logging
if not self.is_lithops_worker:
# if is lithops worker, logging has been set up in entry_point.py
if log_level:
setup_lithops_logger(log_level)
elif log_level is False and logger.getEffectiveLevel() == logging.WARNING:
# Set default logging from config
setup_lithops_logger(*get_log_info(config))
self.setup_progressbar = (not self.is_lithops_worker and
log_level is not None
and logger.getEffectiveLevel() == logging.INFO)
# load mode of execution
mode = mode or get_mode(backend, config)
config_ow = {'lithops': {'mode': mode}, mode: {}}
# overwrite user-provided parameters
if runtime is not None:
config_ow[mode]['runtime'] = runtime
if backend is not None:
config_ow[mode]['backend'] = backend
if runtime_memory is not None:
config_ow[mode]['runtime_memory'] = int(runtime_memory)
if remote_invoker is not None:
config_ow[mode]['remote_invoker'] = remote_invoker
if storage is not None:
config_ow['lithops']['storage'] = storage
if workers is not None:
config_ow['lithops']['workers'] = workers
if rabbitmq_monitor is not None:
config_ow['lithops']['rabbitmq_monitor'] = rabbitmq_monitor
self.config = default_config(copy.deepcopy(config), config_ow)
self.executor_id = create_executor_id()
self.data_cleaner = self.config['lithops'].get('data_cleaner', True)
if self.data_cleaner and not self.is_lithops_worker:
spawn_cleaner = int(self.executor_id.split('-')[1]) == 0
atexit.register(self.clean, spawn_cleaner=spawn_cleaner,
clean_cloudobjects=False)
self.rabbitmq_monitor = self.config['lithops'].get('rabbitmq_monitor', False)
if self.rabbitmq_monitor:
if 'rabbitmq' in self.config and 'amqp_url' in self.config['rabbitmq']:
self.rabbit_amqp_url = self.config['rabbitmq'].get('amqp_url')
else:
raise Exception("You cannot use rabbitmq_mnonitor since "
"'amqp_url' is not present in configuration")
storage_config = extract_storage_config(self.config)
self.internal_storage = InternalStorage(storage_config)
self.storage = self.internal_storage.storage
self.futures = []
self.cleaned_jobs = set()
self.total_jobs = 0
self.last_call = None
if mode == LOCALHOST:
localhost_config = extract_localhost_config(self.config)
self.compute_handler = LocalhostHandler(localhost_config)
self.invoker = StandaloneInvoker(self.config,
self.executor_id,
self.internal_storage,
self.compute_handler)
elif mode == SERVERLESS:
serverless_config = extract_serverless_config(self.config)
self.compute_handler = ServerlessHandler(serverless_config,
self.internal_storage)
if self.config[mode].get('customized_runtime'):
self.invoker = CustomizedRuntimeInvoker(self.config,
self.executor_id,
self.internal_storage,
self.compute_handler)
else:
self.invoker = ServerlessInvoker(self.config,
self.executor_id,
self.internal_storage,
self.compute_handler)
elif mode == STANDALONE:
standalone_config = extract_standalone_config(self.config)
self.compute_handler = StandaloneHandler(standalone_config)
self.invoker = StandaloneInvoker(self.config,
self.executor_id,
self.internal_storage,
self.compute_handler)
logger.info('{} Executor created with ID: {}'
.format(mode.capitalize(), self.executor_id))
self.log_path = None
def __enter__(self):
return self
def _create_job_id(self, call_type):
job_id = str(self.total_jobs).zfill(3)
self.total_jobs += 1
return '{}{}'.format(call_type, job_id)
def call_async(self, func, data, extra_env=None, runtime_memory=None,
timeout=None, include_modules=[], exclude_modules=[]):
"""
For running one function execution asynchronously
:param func: the function to map over the data
:param data: input data
:param extra_env: Additional env variables for action environment
:param runtime_memory: Memory to use to run the function
:param timeout: Time that the functions have to complete their
execution before raising a timeout
:param include_modules: Explicitly pickle these dependencies
:param exclude_modules: Explicitly keep these modules from pickled
dependencies
:return: future object.
"""
job_id = self._create_job_id('A')
self.last_call = 'call_async'
runtime_meta = self.invoker.select_runtime(job_id, runtime_memory)
job = create_map_job(self.config, self.internal_storage,
self.executor_id, job_id,
map_function=func,
iterdata=[data],
runtime_meta=runtime_meta,
runtime_memory=runtime_memory,
extra_env=extra_env,
include_modules=include_modules,
exclude_modules=exclude_modules,
execution_timeout=timeout)
futures = self.invoker.run(job)
self.futures.extend(futures)
return futures[0]
def map(self, map_function, map_iterdata, chunksize=None, worker_processes=None,
extra_args=None, extra_env=None, runtime_memory=None, chunk_size=None,
chunk_n=None, obj_chunk_size=None, obj_chunk_number=None, timeout=None,
invoke_pool_threads=None, include_modules=[], exclude_modules=[]):
"""
For running multiple function executions asynchronously
:param map_function: the function to map over the data
:param map_iterdata: An iterable of input data
:param chunksize: Split map_iteradata in chunks of this size.
Lithops spawns 1 worker per resulting chunk. Default 1
:param worker_processes: Number of concurrent/parallel processes in each worker. Default 1
:param extra_args: Additional args to pass to the function activations
:param extra_env: Additional env variables for action environment
:param runtime_memory: Memory to use to run the function
:param obj_chunk_size: the size of the data chunks to split each object.
'None' for processing the whole file in one function
activation.
:param obj_chunk_number: Number of chunks to split each object. 'None' for
processing the whole file in one function activation
:param remote_invocation: Enable or disable remote_invocation mechanism
:param timeout: Time that the functions have to complete their execution
before raising a timeout
:param invoke_pool_threads: Number of threads to use to invoke
:param include_modules: Explicitly pickle these dependencies
:param exclude_modules: Explicitly keep these modules from pickled
dependencies
:return: A list with size `len(iterdata)` of futures.
"""
job_id = self._create_job_id('M')
self.last_call = 'map'
runtime_meta = self.invoker.select_runtime(job_id, runtime_memory)
job = create_map_job(self.config, self.internal_storage,
self.executor_id, job_id,
map_function=map_function,
iterdata=map_iterdata,
chunksize=chunksize,
worker_processes=worker_processes,
runtime_meta=runtime_meta,
runtime_memory=runtime_memory,
extra_env=extra_env,
include_modules=include_modules,
exclude_modules=exclude_modules,
execution_timeout=timeout,
extra_args=extra_args,
chunk_size=chunk_size,
chunk_n=chunk_n,
obj_chunk_size=obj_chunk_size,
obj_chunk_number=obj_chunk_number,
invoke_pool_threads=invoke_pool_threads)
futures = self.invoker.run(job)
self.futures.extend(futures)
return futures
def map_reduce(self, map_function, map_iterdata, reduce_function, chunksize=None,
worker_processes=None, extra_args=None, extra_env=None,
map_runtime_memory=None, obj_chunk_size=None, obj_chunk_number=None,
reduce_runtime_memory=None, chunk_size=None, chunk_n=None,
timeout=None, invoke_pool_threads=None, reducer_one_per_object=False,
reducer_wait_local=False, include_modules=[], exclude_modules=[]):
"""
Map the map_function over the data and apply the reduce_function across all futures.
This method is executed all within CF.
:param map_function: the function to map over the data
:param map_iterdata: An iterable of input data
:param chunksize: Split map_iteradata in chunks of this size.
Lithops spawns 1 worker per resulting chunk. Default 1
:param worker_processes: Number of concurrent/parallel processes in each worker Default 1
:param reduce_function: the function to reduce over the futures
:param extra_env: Additional environment variables for action environment. Default None.
:param extra_args: Additional arguments to pass to function activation. Default None.
:param map_runtime_memory: Memory to use to run the map function. Default None (loaded from config).
:param reduce_runtime_memory: Memory to use to run the reduce function. Default None (loaded from config).
:param obj_chunk_size: the size of the data chunks to split each object. 'None' for processing
the whole file in one function activation.
:param obj_chunk_number: Number of chunks to split each object. 'None' for processing the whole
file in one function activation.
:param remote_invocation: Enable or disable remote_invocation mechanism. Default 'False'
:param timeout: Time that the functions have to complete their execution before raising a timeout.
:param reducer_one_per_object: Set one reducer per object after running the partitioner
:param reducer_wait_local: Wait for results locally
:param invoke_pool_threads: Number of threads to use to invoke.
:param include_modules: Explicitly pickle these dependencies.
:param exclude_modules: Explicitly keep these modules from pickled dependencies.
:return: A list with size `len(map_iterdata)` of futures.
"""
self.last_call = 'map_reduce'
map_job_id = self._create_job_id('M')
runtime_meta = self.invoker.select_runtime(map_job_id, map_runtime_memory)
map_job = create_map_job(self.config, self.internal_storage,
self.executor_id, map_job_id,
map_function=map_function,
iterdata=map_iterdata,
chunksize=chunksize,
worker_processes=worker_processes,
runtime_meta=runtime_meta,
runtime_memory=map_runtime_memory,
extra_args=extra_args,
extra_env=extra_env,
chunk_size=chunk_size,
chunk_n=chunk_n,
obj_chunk_size=obj_chunk_size,
obj_chunk_number=obj_chunk_number,
include_modules=include_modules,
exclude_modules=exclude_modules,
execution_timeout=timeout,
invoke_pool_threads=invoke_pool_threads)
map_futures = self.invoker.run(map_job)
self.futures.extend(map_futures)
if reducer_wait_local:
self.wait(fs=map_futures)
reduce_job_id = map_job_id.replace('M', 'R')
runtime_meta = self.invoker.select_runtime(reduce_job_id, reduce_runtime_memory)
reduce_job = create_reduce_job(self.config, self.internal_storage,
self.executor_id, reduce_job_id,
reduce_function, map_job, map_futures,
runtime_meta=runtime_meta,
runtime_memory=reduce_runtime_memory,
reducer_one_per_object=reducer_one_per_object,
extra_env=extra_env,
include_modules=include_modules,
exclude_modules=exclude_modules)
reduce_futures = self.invoker.run(reduce_job)
self.futures.extend(reduce_futures)
for f in map_futures:
f._produce_output = False
return map_futures + reduce_futures
def wait(self, fs=None, throw_except=True, return_when=ALL_COMPLETED,
download_results=False, timeout=None, THREADPOOL_SIZE=128,
WAIT_DUR_SEC=1):
"""
Wait for the Future instances (possibly created by different Executor instances)
given by fs to complete. Returns a named 2-tuple of sets. The first set, named done,
contains the futures that completed (finished or cancelled futures) before the wait
completed. The second set, named not_done, contains the futures that did not complete
(pending or running futures). timeout can be used to control the maximum number of
seconds to wait before returning.
:param fs: Futures list. Default None
:param throw_except: Re-raise exception if call raised. Default True.
:param return_when: One of `ALL_COMPLETED`, `ANY_COMPLETED`, `ALWAYS`
:param download_results: Download results. Default false (Only get statuses)
:param timeout: Timeout of waiting for results.
:param THREADPOOL_SIZE: Number of threads to use. Default 64
:param WAIT_DUR_SEC: Time interval between each check.
:return: `(fs_done, fs_notdone)`
where `fs_done` is a list of futures that have completed
and `fs_notdone` is a list of futures that have not completed.
:rtype: 2-tuple of list
"""
futures = fs or self.futures
if type(futures) != list:
futures = [futures]
if not futures:
raise Exception('You must run the call_async(), map() or map_reduce(), or provide'
' a list of futures before calling the wait()/get_result() method')
if download_results:
msg = 'ExecutorID {} - Getting results'.format(self.executor_id)
fs_done = [f for f in futures if f.done]
fs_not_done = [f for f in futures if not f.done]
# fs_not_ready = [f for f in futures if not f.ready and not f.done]
else:
msg = 'ExecutorID {} - Waiting for functions to complete'.format(self.executor_id)
fs_done = [f for f in futures if f.ready or f.done]
fs_not_done = [f for f in futures if not f.done]
# fs_not_ready = [f for f in futures if not f.ready and not f.done]
if not fs_not_done:
return fs_done, fs_not_done
logger.info(msg)
if is_unix_system() and timeout is not None:
logger.debug('Setting waiting timeout to {} seconds'.format(timeout))
error_msg = ('Timeout of {} seconds exceeded waiting for '
'function activations to finish'.format(timeout))
signal.signal(signal.SIGALRM, partial(timeout_handler, error_msg))
signal.alarm(timeout)
# Setup progress bar
pbar = None
if not self.is_lithops_worker and self.setup_progressbar:
from tqdm.auto import tqdm
if not is_notebook():
print()
pbar = tqdm(bar_format=' {l_bar}{bar}| {n_fmt}/{total_fmt} ',
total=len(fs_not_done), disable=None)
# Start waiting for results
error = False
try:
if self.rabbitmq_monitor:
wait_rabbitmq(futures, self.internal_storage,
rabbit_amqp_url=self.rabbit_amqp_url,
download_results=download_results,
throw_except=throw_except,
pbar=pbar, return_when=return_when,
THREADPOOL_SIZE=THREADPOOL_SIZE)
else:
wait_storage(futures, self.internal_storage,
download_results=download_results,
throw_except=throw_except,
return_when=return_when, pbar=pbar,
THREADPOOL_SIZE=THREADPOOL_SIZE,
WAIT_DUR_SEC=WAIT_DUR_SEC)
except KeyboardInterrupt as e:
if download_results:
not_dones_call_ids = [(f.job_id, f.call_id)
for f in futures if not f.done]
else:
not_dones_call_ids = [(f.job_id, f.call_id)
for f in futures if not f.ready and not f.done]
msg = ('ExecutorID {} - Cancelled - Total Activations not done: {}'
.format(self.executor_id, len(not_dones_call_ids)))
if pbar and not pbar.disable:
pbar.close()
print()
logger.info(msg)
error = True
if self.data_cleaner and not self.is_lithops_worker:
self.clean(clean_cloudobjects=False, force=True)
raise e
except Exception as e:
if pbar and not pbar.disable:
pbar.close()
print()
error = True
if self.data_cleaner and not self.is_lithops_worker:
self.clean(clean_cloudobjects=False, force=True)
raise e
finally:
self.invoker.stop()
if is_unix_system():
signal.alarm(0)
if pbar and not pbar.disable:
pbar.close()
if not is_notebook():
print()
if self.data_cleaner and not self.is_lithops_worker:
self.clean(clean_cloudobjects=False)
if not fs and error and is_notebook():
del self.futures[len(self.futures) - len(futures):]
if download_results:
fs_done = [f for f in futures if f.done]
fs_notdone = [f for f in futures if not f.done]
else:
fs_done = [f for f in futures if f.ready or f.done]
fs_notdone = [f for f in futures if not f.ready and not f.done]
return fs_done, fs_notdone
def get_result(self, fs=None, throw_except=True, timeout=None,
THREADPOOL_SIZE=128, WAIT_DUR_SEC=1):
"""
For getting the results from all function activations
:param fs: Futures list. Default None
:param throw_except: Reraise exception if call raised. Default True.
:param verbose: Shows some information prints. Default False
:param timeout: Timeout for waiting for results.
:param THREADPOOL_SIZE: Number of threads to use. Default 128
:param WAIT_DUR_SEC: Time interval between each check.
:return: The result of the future/s
"""
fs_done, _ = self.wait(fs=fs, throw_except=throw_except,
timeout=timeout, download_results=True,
THREADPOOL_SIZE=THREADPOOL_SIZE,
WAIT_DUR_SEC=WAIT_DUR_SEC)
result = []
fs_done = [f for f in fs_done if not f.futures and f._produce_output]
for f in fs_done:
if fs:
# Process futures provided by the user
result.append(f.result(throw_except=throw_except,
internal_storage=self.internal_storage))
elif not fs and not f._read:
# Process internally stored futures
result.append(f.result(throw_except=throw_except,
internal_storage=self.internal_storage))
f._read = True
logger.debug("ExecutorID {} Finished getting results"
.format(self.executor_id))
if len(result) == 1 and self.last_call != 'map':
return result[0]
return result
def plot(self, fs=None, dst=None):
"""
Creates timeline and histogram of the current execution in dst_dir.
:param dst_dir: destination folder to save .png plots.
:param dst_file_name: prefix name of the file.
:param fs: list of futures.
"""
ftrs = self.futures if not fs else fs
if type(ftrs) != list:
ftrs = [ftrs]
ftrs_to_plot = [f for f in ftrs if (f.ready or f.done) and not f.error]
if not ftrs_to_plot:
logger.debug('ExecutorID {} - No futures ready to plot'
.format(self.executor_id))
return
logging.getLogger('matplotlib').setLevel(logging.WARNING)
from lithops.plots import create_timeline, create_histogram
logger.info('ExecutorID {} - Creating execution plots'.format(self.executor_id))
create_timeline(ftrs_to_plot, dst)
create_histogram(ftrs_to_plot, dst)
def clean(self, fs=None, cs=None, clean_cloudobjects=True, spawn_cleaner=True, force=False):
"""
Deletes all the temp files from storage. These files include the function,
the data serialization and the function invocation results. It can also clean
cloudobjects.
:param fs: list of futures to clean
:param cs: list of cloudobjects to clean
:param clean_cloudobjects: true/false
:param spawn_cleaner true/false
"""
os.makedirs(CLEANER_DIR, exist_ok=True)
def save_data_to_clean(data):
with tempfile.NamedTemporaryFile(dir=CLEANER_DIR, delete=False) as temp:
pickle.dump(data, temp)
if cs:
data = {'cos_to_clean': list(cs),
'storage_config': self.internal_storage.get_storage_config()}
save_data_to_clean(data)
if not fs:
return
futures = fs or self.futures
futures = [futures] if type(futures) != list else futures
present_jobs = {create_job_key(f.executor_id, f.job_id) for f in futures
if (f.executor_id.count('-') == 1 and f.done) or force}
jobs_to_clean = present_jobs - self.cleaned_jobs
if jobs_to_clean:
logger.info("ExecutorID {} - Cleaning temporary data"
.format(self.executor_id))
data = {'jobs_to_clean': jobs_to_clean,
'clean_cloudobjects': clean_cloudobjects,
'storage_config': self.internal_storage.get_storage_config()}
save_data_to_clean(data)
self.cleaned_jobs.update(jobs_to_clean)
self.compute_handler.clear()
if (jobs_to_clean or cs) and spawn_cleaner:
log_file = open(CLEANER_LOG_FILE, 'a')
cmdstr = '{} -m lithops.scripts.cleaner'.format(sys.executable)
sp.Popen(cmdstr, shell=True, stdout=log_file, stderr=log_file)
def dismantle(self):
self.compute_handler.dismantle()
def __exit__(self, exc_type, exc_value, traceback):
self.invoker.stop()
def job_summary(self, cloud_objects_n=0):
"""
logs information of a job executed by the calling function executor.
currently supports: code_engine, ibm_vpc and ibm_cf.
on future commits, support will extend to code_engine and ibm_vpc :
:param cloud_objects_n: number of cloud object used in COS, declared by user.
"""
import pandas as pd
def init():
headers = ['Job_ID', 'Function', 'Invocations', 'Memory(MB)', 'AvgRuntime', 'Cost', 'CloudObjects']
pd.DataFrame([], columns=headers).to_csv(self.log_path, index=False)
def append(content):
""" appends job information to log file."""
pd.DataFrame(content).to_csv(self.log_path, mode='a', header=False, index=False)
def append_summary():
""" add a summary row to the log file"""
df = pd.read_csv(self.log_path)
total_average = sum(df.AvgRuntime * df.Invocations) / df.Invocations.sum()
total_row = pd.DataFrame([['Summary', ' ', df.Invocations.sum(), df['Memory(MB)'].sum(),
round(total_average, 10), df.Cost.sum(), cloud_objects_n]])
total_row.to_csv(self.log_path, mode='a', header=False, index=False)
def get_object_num():
"""returns cloud objects used up to this point, using this function executor. """
df = pd.read_csv(self.log_path)
return float(df.iloc[-1].iloc[-1])
# Avoid logging info unless chosen computational backend is supported.
if hasattr(self.compute_handler.backend, 'calc_cost'):
if self.log_path: # retrieve cloud_objects_n from last log file
cloud_objects_n += get_object_num()
else:
self.log_path = os.path.join(constants.LOGS_DIR, datetime.now().strftime("%Y-%m-%d_%H:%M:%S.csv"))
# override current logfile
init()
futures = self.futures
if type(futures) != list:
futures = [futures]
memory = []
runtimes = []
curr_job_id = futures[0].job_id
job_func = futures[0].function_name # each job is conducted on a single function
for future in futures:
if curr_job_id != future.job_id:
cost = self.compute_handler.backend.calc_cost(runtimes, memory)
append([[curr_job_id, job_func, len(runtimes), sum(memory),
np.round(np.average(runtimes), 10), cost, ' ']])
# updating next iteration's variables:
curr_job_id = future.job_id
job_func = future.function_name
memory.clear()
runtimes.clear()
memory.append(future.runtime_memory)
runtimes.append(future.stats['worker_exec_time'])
# appends last Job-ID
cost = self.compute_handler.backend.calc_cost(runtimes, memory)
append([[curr_job_id, job_func, len(runtimes), sum(memory),
np.round(np.average(runtimes), 10), cost, ' ']])
# append summary row to end of the dataframe
append_summary()
else: # calc_cost() doesn't exist for chosen computational backend.
logger.warning("Could not log job: {} backend isn't supported by this function."
.format(self.self.compute_handler.backend.name))
return
logger.info("View log file logs at {}".format(self.log_path))
class LocalhostExecutor(FunctionExecutor):
def __init__(self, config=None, runtime=None, workers=None,
storage=None, rabbitmq_monitor=None, log_level=False):
"""
Initialize a LocalhostExecutor class.
:param config: Settings passed in here will override those in config file.
:param runtime: Runtime name to use.
:param storage: Name of the storage backend to use.
:param workers: Max number of concurrent workers.
:param rabbitmq_monitor: use rabbitmq as the monitoring system.
:param log_level: log level to use during the execution.
:return `LocalhostExecutor` object.
"""
super().__init__(mode=LOCALHOST, config=config,
runtime=runtime, storage=storage,
log_level=log_level, workers=workers,
rabbitmq_monitor=rabbitmq_monitor)
class ServerlessExecutor(FunctionExecutor):
def __init__(self, config=None, runtime=None, runtime_memory=None,
backend=None, storage=None, workers=None, rabbitmq_monitor=None,
remote_invoker=None, log_level=False):
"""
Initialize a ServerlessExecutor class.
:param config: Settings passed in here will override those in config file.
:param runtime: Runtime name to use.
:param runtime_memory: memory to use in the runtime.
:param backend: Name of the serverless compute backend to use.
:param storage: Name of the storage backend to use.
:param workers: Max number of concurrent workers.
:param rabbitmq_monitor: use rabbitmq as the monitoring system.
:param log_level: log level to use during the execution.
:return `ServerlessExecutor` object.
"""
super().__init__(mode=SERVERLESS, config=config, runtime=runtime,
runtime_memory=runtime_memory, backend=backend,
storage=storage, workers=workers,
rabbitmq_monitor=rabbitmq_monitor, log_level=log_level,
remote_invoker=remote_invoker)
class StandaloneExecutor(FunctionExecutor):
def __init__(self, config=None, backend=None, runtime=None, storage=None,
workers=None, rabbitmq_monitor=None, log_level=False):
"""
Initialize a StandaloneExecutor class.
:param config: Settings passed in here will override those in config file.
:param runtime: Runtime name to use.
:param backend: Name of the standalone compute backend to use.
:param storage: Name of the storage backend to use.
:param workers: Max number of concurrent workers.
:param rabbitmq_monitor: use rabbitmq as the monitoring system.
:param log_level: log level to use during the execution.
:return `StandaloneExecutor` object.
"""
super().__init__(mode=STANDALONE, config=config, runtime=runtime,
backend=backend, storage=storage, workers=workers,
rabbitmq_monitor=rabbitmq_monitor, log_level=log_level)
def create(self):
runtime_key, runtime_meta = self.compute_handler.create()
self.internal_storage.put_runtime_meta(runtime_key, runtime_meta)
| 45.948617 | 114 | 0.601806 |
e168e85a11b5aca1b4a003e57e52d24a6a606827 | 39,515 | py | Python | scripts/automation/trex_control_plane/interactive/trex/stl/trex_stl_wlc.py | DhruvamSharma/trex-core | 4d1893d839f4c6da4adc3f69ff35fe1cadd81f52 | [
"Apache-2.0"
] | null | null | null | scripts/automation/trex_control_plane/interactive/trex/stl/trex_stl_wlc.py | DhruvamSharma/trex-core | 4d1893d839f4c6da4adc3f69ff35fe1cadd81f52 | [
"Apache-2.0"
] | 3 | 2018-04-25T20:28:49.000Z | 2018-07-16T13:45:40.000Z | scripts/automation/trex_control_plane/interactive/trex/stl/trex_stl_wlc.py | mcallaghan-sandvine/trex-core | 55fc1ed6d2e3d066e7895321eae233bb0339b722 | [
"Apache-2.0"
] | null | null | null | #!/bin/python
import base64
import copy
from trex.stl.api import *
from trex.common.services.trex_service_ap import *
from trex.utils import text_tables, parsing_opts
from trex.utils.parsing_opts import ArgumentPack, ArgumentGroup, is_valid_file, check_mac_addr, check_ipv4_addr, MUTEX
from scapy.contrib.capwap import *
from scapy.contrib.capwap import CAPWAP_PKTS # ensure capwap_internal is imported
from trex_openssl import *
from texttable import ansi_len
from collections import deque
import threading
import re
import os
import yaml
import ctypes
import struct
import binascii
def base64encode(buf):
try:
return base64.b64encode(buf).decode()
except:
print('Could not encode: %s' % buf)
raise
def base64decode(buf):
try:
return base64.b64decode(buf)
except:
print('Could not decode: %s' % buf)
raise
class SSL_Context:
''' Shared among all APs '''
def __init__(self):
self.ctx = None
self.evp = None
bne = None
rsa = None
self.ctx = libssl.SSL_CTX_new(libssl.DTLSv1_method())
if self.ctx is None:
raise Exception('Could not create SSL Context')
try:
bne = libcrypto.BN_new()
libcrypto.BN_set_word(bne, SSL_CONST.RSA_F4)
rsa = libcrypto.RSA_new()
if libcrypto.RSA_generate_key_ex(rsa, 1024, bne, None) != 1:
raise Exception('Could not generate RSA key in SSL Context')
if libssl.SSL_CTX_use_RSAPrivateKey(self.ctx, rsa) != 1:
raise Exception('Could not set RSA key into SSL Context')
self.evp = libcrypto.EVP_PKEY_new()
if libcrypto.EVP_PKEY_set1_RSA(self.evp, rsa) != 1:
raise Exception('Could not create EVP_PKEY in SSL Context')
libssl.SSL_CTX_set_options(self.ctx, SSL_CONST.SSL_OP_NO_TICKET) # optimization
finally:
if bne:
libcrypto.BN_free(bne)
if rsa:
libcrypto.RSA_free(rsa)
def __del__(self):
if libssl and self.ctx:
libssl.SSL_CTX_free(self.ctx)
if libcrypto and self.evp:
libcrypto.EVP_PKEY_free(self.evp)
class AP:
VERB_QUIET = 0
VERB_ERR = 1
VERB_WARN = 2 # default
VERB_INFO = 3
VERB_DEBUG = 4
_scapy_cache_static = {}
def __init__(self, ssl_ctx, logger, trex_port, mac, ip, port, radio_mac, verbose_level = VERB_WARN, rsa_priv_file = None, rsa_cert_file = None):
self.ssl_ctx = ssl_ctx
self.logger = logger
self.trex_port = trex_port
self.port_id = trex_port.port_id
check_mac_addr(mac)
check_ipv4_addr(ip)
check_mac_addr(radio_mac)
try:
self.mac_bytes = mac2str(mac)
except:
raise Exception('Bad MAC format, expected aa:bb:cc:dd:ee:ff')
self.mac = mac
self.name = 'AP%s%s.%s%s.%s%s' % (mac[:2], mac[3:5], mac[6:8], mac[9:11], mac[12:14], mac[15:17])
self.name_bytes = self.name.encode('ascii')
assert '.' in ip, 'Bad IP format, expected x.x.x.x'
self.ip_src = is_valid_ipv4_ret(ip)
self.ip_hum = ip
self.udp_port = port
self.udp_port_str = int2str(port, 2)
try:
self.radio_mac_bytes = mac2str(radio_mac)
except:
raise Exception('Bad radio MAC format, expected aa:bb:cc:dd:ee:ff')
self.radio_mac = radio_mac
self.ssl = None
self.in_bio = None
self.out_bio = None
self.serial_number = 'FCZ1853QQQ'
self.country = 'CH '
self.echo_req_interval = 60
self.last_echo_req_ts = 0
self.verbose_level = verbose_level
self.clients = []
self.rsa_priv_file = rsa_priv_file
self.rsa_cert_file = rsa_cert_file
self.capwap_MaxRetransmit = 5
self.capwap_RetransmitInterval = 0.5
self.ssl_lock = threading.RLock()
self._create_ssl()
self.reset_vars()
def reset_vars(self):
self.rx_buffer = deque(maxlen = 100)
self.capwap_assemble = {}
self.wlc_name = ''
self.wlc_sw_ver = []
self.is_connected = False
self.echo_resp_timer = None
self.echo_resp_retry = 0
self.echo_resp_timeout = 0
self.SSID = {}
self.session_id = None
self.mac_dst = None
self.mac_dst_bytes = None
self.ip_dst = None
self.ip_dst_bytes = None
self.dot11_seq = 0
self.__capwap_seq = 0
self._scapy_cache = {}
self.last_recv_ts = None
self.is_handshake_done = False
self.is_dtls_closed = False
self.got_keep_alive = False
self.rx_responses = {}
def debug(self, msg):
if self.is_debug:
self.logger.urgent_async_log(msg)
def info(self, msg):
if self.verbose_level >= self.VERB_INFO:
self.logger.urgent_async_log(msg)
def warn(self, msg):
if self.verbose_level >= self.VERB_WARN:
self.logger.urgent_async_log(msg)
def err(self, msg):
if self.verbose_level >= self.VERB_ERR:
self.logger.urgent_async_log(msg)
def fatal(self, msg):
raise Exception('%s: %s' % (self.name, msg))
@property
def is_debug(self):
return self.verbose_level >= self.VERB_DEBUG
def __del__(self):
if getattr(self, 'ssl', None) and libssl:
libssl.SSL_free(self.ssl)
def _create_ssl(self):
self.ssl = libssl.SSL_new(self.ssl_ctx.ctx)
self.openssl_buf = c_buffer(9999)
self.in_bio = libcrypto.BIO_new(libcrypto.BIO_s_mem())
self.out_bio = libcrypto.BIO_new(libcrypto.BIO_s_mem())
if self.rsa_priv_file and self.rsa_cert_file:
self.debug('Using provided certificate')
if libssl.SSL_use_certificate_file(self.ssl, c_buffer(self.rsa_cert_file), SSL_CONST.SSL_FILETYPE_PEM) != 1:
self.fatal('Could not load given certificate file %s' % self.rsa_cert_file)
if libssl.SSL_use_PrivateKey_file(self.ssl, c_buffer(self.rsa_priv_file), SSL_CONST.SSL_FILETYPE_PEM) != 1:
self.fatal('Could not load given private key %s' % self.rsa_priv_file)
else:
x509_cert = None
x509_name = None
try:
x509_cert = libcrypto.X509_new()
x509_name = libcrypto.X509_NAME_new()
'''
Cheetah:
Data:
Version: 3 (0x2)
Serial Number:
1b:56:b2:2d:00:00:00:05:0f:d0
Signature Algorithm: sha256WithRSAEncryption
Issuer: O=Cisco, CN=Cisco Manufacturing CA SHA2
Validity
Not Before: Sep 16 19:17:57 2016 GMT
Not After : Sep 16 19:27:57 2026 GMT
Subject: C=US, ST=California, L=San Jose, O=Cisco Systems, CN=AP1G4-94D469F82DE8/emailAddress=support@cisco.com
eWLC:
Data:
Version: 3 (0x2)
Serial Number:
1d:6e:0a:b4:00:00:00:2f:cc:64
Signature Algorithm: sha1WithRSAEncryption
Issuer: O=Cisco Systems, CN=Cisco Manufacturing CA
Validity
Not Before: May 6 10:12:56 2015 GMT
Not After : May 6 10:22:56 2025 GMT
Subject: C=US, ST=California, L=San Jose, O=Cisco Systems, CN=0050569C622D/emailAddress=support@cisco.com
'''
if libcrypto.X509_set_version(x509_cert, 2) != 1:
self.fatal('Could not set version of certificate')
if libcrypto.X509_set_pubkey(x509_cert, self.ssl_ctx.evp) != 1:
self.fatal('Could not assign public key to certificate')
if libcrypto.X509_NAME_add_entry_by_txt(x509_name, b'C', SSL_CONST.MBSTRING_ASC, b'US', -1, -1, 0) != 1:
self.fatal('Could not assign C to certificate')
if libcrypto.X509_NAME_add_entry_by_txt(x509_name, b'ST', SSL_CONST.MBSTRING_ASC, b'California', -1, -1, 0) != 1:
self.fatal('Could not assign ST to certificate')
if libcrypto.X509_NAME_add_entry_by_txt(x509_name, b'L', SSL_CONST.MBSTRING_ASC, b'San Jose', -1, -1, 0) != 1:
self.fatal('Could not assign L to certificate')
#if libcrypto.X509_NAME_add_entry_by_txt(x509_name, b'O', SSL_CONST.MBSTRING_ASC, b'Cisco Systems', -1, -1, 0) != 1:
if libcrypto.X509_NAME_add_entry_by_txt(x509_name, b'O', SSL_CONST.MBSTRING_ASC, b'Cisco Virtual Wireless LAN Controller', -1, -1, 0) != 1:
self.fatal('Could not assign O to certificate')
#if libcrypto.X509_NAME_add_entry_by_txt(x509_name, b'CN', SSL_CONST.MBSTRING_ASC, ('AP1G4-%s' % hex(self.mac_bytes, delimiter = '').upper()).encode('ascii'), -1, -1, 0) != 1:
if libcrypto.X509_NAME_add_entry_by_txt(x509_name, b'CN', SSL_CONST.MBSTRING_ASC, b'CA-vWLC', -1, -1, 0) != 1:
self.fatal('Could not assign CN to certificate')
if libcrypto.X509_set_subject_name(x509_cert, x509_name) != 1:
self.fatal('Could not set subject name to certificate')
if libcrypto.X509_set_issuer_name(x509_cert, x509_name) != 1:
self.fatal('Could not set issuer name to certificate')
if not libcrypto.X509_time_adj_ex(libcrypto.X509_getm_notBefore(x509_cert), -999, 0, None):
self.fatal('Could not set "Not before" time to certificate"')
if not libcrypto.X509_time_adj_ex(libcrypto.X509_getm_notAfter(x509_cert), 999, 0, None):
self.fatal('Could not set "Not after" time to certificate"')
if not libcrypto.X509_sign(x509_cert, self.ssl_ctx.evp, libcrypto.EVP_sha256()):
self.fatal('Could not sign the certificate')
libssl.SSL_use_certificate(self.ssl, x509_cert)
finally:
if x509_name:
libcrypto.X509_NAME_free(x509_name)
if x509_cert:
libcrypto.X509_free(x509_cert)
if libssl.SSL_check_private_key(self.ssl) != 1:
self.fatal('Error: check of RSA private key failed.')
libssl.SSL_set_bio(self.ssl, self.in_bio, self.out_bio)
libssl.SSL_set_connect_state(self.ssl)
def get_config_update_capwap(self, seq):
if 'config_update' in self._scapy_cache:
self._scapy_cache['config_update'][20] = struct.pack('!B', seq)
else:
self._scapy_cache['config_update'] = CAPWAP_PKTS.config_update(self, seq)
return self._scapy_cache['config_update']
def get_echo_capwap(self):
if 'echo_pkt' in self._scapy_cache:
self._scapy_cache['echo_pkt'][20] = struct.pack('!B', self.get_capwap_seq())
else:
self._scapy_cache['echo_pkt'] = CAPWAP_PKTS.echo(self)
return self._scapy_cache['echo_pkt']
def get_echo_wrap(self, encrypted):
if 'echo_wrap' not in self._scapy_cache:
self._scapy_cache['echo_wrap'] = bytes(self.wrap_capwap_pkt(b'\1\0\0\0' + encrypted))[:-len(encrypted)]
return self._scapy_cache['echo_wrap'] + encrypted
def wrap_capwap_pkt(self, capwap_bytes, is_discovery = False, dst_port = 5246):
if isinstance(capwap_bytes, ctypes.Array):
capwap_bytes = capwap_bytes.raw
assert isinstance(capwap_bytes, bytes)
if is_discovery:
ip = b'\x45\x00' + struct.pack('!H', 28 + len(capwap_bytes)) + b'\x00\x01\x00\x00\x40\x11\0\0' + self.ip_src + b'\xff\xff\xff\xff'
checksum = scapy.utils.checksum(ip)
ip = ip[:10] + struct.pack('!H', checksum) + ip[12:]
return (
b'\xff\xff\xff\xff\xff\xff' + self.mac_bytes + b'\x08\x00' +
ip +
struct.pack('!H', self.udp_port) + struct.pack('!H', dst_port) + struct.pack('!H', 8 + len(capwap_bytes)) + b'\0\0' +
capwap_bytes
)
if 'capwap_wrap' not in self._scapy_cache:
self._scapy_cache['capwap_wrap_ether'] = self.mac_dst_bytes + self.mac_bytes + b'\x08\x00'
self._scapy_cache['capwap_wrap_ip1'] = b'\x45\x00' # 2 bytes of total length after this one
self._scapy_cache['capwap_wrap_ip2'] = b'\x00\x01\x00\x00\x40\x11\0\0' + self.ip_src + self.ip_dst_bytes
self._scapy_cache['capwap_wrap_udp_src'] = struct.pack('!H', self.udp_port)
self._scapy_cache['capwap_wrap'] = True
ip = self._scapy_cache['capwap_wrap_ip1'] + struct.pack('!H', 28 + len(capwap_bytes)) + self._scapy_cache['capwap_wrap_ip2']
checksum = scapy.utils.checksum(ip)
ip = ip[:10] + struct.pack('!H', checksum) + ip[12:]
udp = self._scapy_cache['capwap_wrap_udp_src'] + struct.pack('!H', dst_port) + struct.pack('!H', 8 + len(capwap_bytes)) + b'\0\0'
return self._scapy_cache['capwap_wrap_ether'] + ip + udp + capwap_bytes
def wrap_pkt_by_wlan(self, client, pkt):
assert type(pkt) is bytes, 'wrap_pkt_by_wlan() expects bytes, got: %s' % type(pkt)
assert len(pkt) >= 14, 'Too small buffer to wrap'
self.dot11_seq += 1
if self.dot11_seq > 0xfff:
self.dot11_seq = 0
verify = False
if 'wlan_wrapping' not in AP._scapy_cache_static:
verify = True
p1 = bytes(
CAPWAP_DATA(
header = CAPWAP_Header(
wbid = 1,
flags = 'WT',
wireless_info_802 = CAPWAP_Wireless_Specific_Information_IEEE802_11(
rssi = 216,
snr = 31,
data_rate = 0,
)
)
)/
Dot11_swapped(
FCfield = 'to-DS',
subtype = 8,
type = 'Data',
ID = 0,
addr1 = self.radio_mac,
addr2 = client.mac,
addr3 = str2mac(pkt[:6]),
#SC = self.dot11_seq << 4
)/
Dot11QoS()/
LLC(dsap = 170, ssap = 170, ctrl = 3)/
SNAP()[0])
AP._scapy_cache_static['wlan_wrapping_1'] = p1[:20]
AP._scapy_cache_static['wlan_wrapping_2'] = p1[38:-2]
AP._scapy_cache_static['wlan_wrapping'] = True
p = (
AP._scapy_cache_static['wlan_wrapping_1'] +
self.radio_mac_bytes +
client.mac_bytes +
pkt[:6] +
AP._scapy_cache_static['wlan_wrapping_2']
)
#CAPWAP_DATA(p).dump_offsets_tree()
# need to update following:
# Dot11_swapped.addr1 = self.radio_mac_bytes
# Dot11_swapped.addr2 = client.mac_bytes
# Dot11_swapped.addr3 = pkt.dst
# Dot11_swapped.SC = self.dot11_seq << 4
# SNAP.code = struct.unpack('!H', pkt[12:14]
#p1 = (
# p[:9] + ap.radio_mac_bytes +
# p[15:20] + struct.pack('!B', capwap_seq) +
# p[21:]
# )
#
if verify and os.getenv('VERIFY_SCAPY_CACHE'):
print('verifying wlan_wrapping')
assert p == p1[:-2], '\n%s\n%s\n\n%s\n%s' % (type(p), hexstr(p), type(p1), hexstr(p1))
return self.wrap_capwap_pkt(p + pkt[12:], dst_port = 5247)
def patch_stream(self, client, stream):
assert type(stream) is STLStream, 'patch_stream() expects STLStream, got: %s' % type(stream)
stream = copy.deepcopy(stream)
patched_pkt = Ether(stream.pkt)
if stream.fields['packet']['meta']:
pkt_meta = '%s\nPatched stream: Added WLAN' % stream.fields['packet']['meta']
else:
pkt_meta = 'Patched stream: Added WLAN'
port_layer = self.trex_port.get_layer_cfg()
if stream.fields['flags'] & 1 == 0:
pkt_meta += ', Changed source'
patched_pkt.src = port_layer['ether']['src']
if stream.fields['flags'] & 0x110 == 0:
pkt_meta += ', Changed destination'
patched_pkt.dst = port_layer['ether']['dst']
stream.pkt = self.wrap_pkt_by_wlan(client, bytes(patched_pkt))
stream.fields['packet'] = {'binary': base64encode(stream.pkt),
'meta': pkt_meta}
for inst in stream.fields['vm']['instructions']:
if 'pkt_offset' in inst:
inst['pkt_offset'] += 78 # Size of wrapping layers minus removed Ethernet
elif 'offset' in inst:
inst['offset'] += 78
return stream
def is_handshake_done_libssl(self):
with self.ssl_lock:
return bool(libssl.SSL_is_init_finished(self.ssl))
def is_dtls_closed_libssl(self):
with self.ssl_lock:
return bool(libssl.SSL_get_shutdown(self.ssl))
@property
def is_dtls_established(self):
return self.is_handshake_done and not self.is_dtls_closed
def ssl_read(self):
with self.ssl_lock:
ret = libcrypto.BIO_read(self.out_bio, self.openssl_buf, 10000)
if ret >= 0:
return self.openssl_buf[:ret]
ret = libcrypto.BIO_test_flags(self.out_bio, SSL_CONST.BIO_FLAGS_SHOULD_RETRY)
if ret:
return ''
self.is_connected = False
# without lock, careful
def __ssl_write(self, buf):
if isinstance(buf, ctypes.Array):
ret = libcrypto.BIO_write(self.in_bio, buf, len(buf))
else:
ret = libcrypto.BIO_write(self.in_bio, c_buffer(buf), len(buf) + 1)
if ret >= 0:
return ret
ret = libcrypto.BIO_test_flags(out_bio, SSL_CONST.BIO_FLAGS_SHOULD_RETRY)
if ret:
return ''
self.is_connected = False
def encrypt(self, buf):
with self.ssl_lock:
if isinstance(buf, Packet):
raise Exception('Consider converting to buffer: %s' % buf.command())
if isinstance(buf, ctypes.Array):
ret = libssl.SSL_write(self.ssl, buf, len(buf))
else:
ret = libssl.SSL_write(self.ssl, c_buffer(buf), len(buf))
#err = SSL_CONST.ssl_err.get(libcrypto.ERR_get_error(self.ssl, ret))
#if err != 'SSL_ERROR_NONE':
# self.fatal('Got SSL error: %s (ret %s)' % (err, ret))
return self.ssl_read()
def decrypt(self, buf):
with self.ssl_lock:
self.__ssl_write(buf)
ret = libssl.SSL_read(self.ssl, self.openssl_buf, 10000)
#err = SSL_CONST.ssl_err.get(libcrypto.ERR_get_error(self.ssl, ret))
#if err != 'SSL_ERROR_NONE':
# self.fatal('Got SSL error: %s' % (err, ret))
return self.openssl_buf[:ret]
def get_arp_pkt(self, op, client):
if op == 'who-has':
arp_dst = b'\xff\xff\xff\xff\xff\xff' + self.ip_dst_bytes
elif op == 'is-at':
arp_dst = self.mac_dst_bytes + self.ip_dst_bytes
elif op == 'garp':
arp_dst = b'\0\0\0\0\0\0' + client.ip_bytes
else:
raise Exception('Bad op of ARP: %s' % op)
return (
(b'\xff\xff\xff\xff\xff\xff' if op in ('who-has', 'garp') else self.mac_dst_bytes) + client.mac_bytes + b'\x08\x06' + # Ethernet
b'\x00\x01\x08\x00\x06\x04' +
(b'\x00\x01' if op in ('who-has', 'garp') else b'\x00\x02') +
client.mac_bytes + client.ip_bytes + arp_dst # ARP
)
def get_capwap_seq(self):
seq = self.__capwap_seq
if self.__capwap_seq < 0xff:
self.__capwap_seq += 1
else:
self.__capwap_seq = 0
return seq
class APClient:
def __init__(self, mac, ip, ap):
if ':' in mac:
self.mac_bytes = mac2str(mac)
self.mac = mac
else:
self.mac_bytes = mac
self.mac = str2mac(mac)
if '.' in ip:
self.ip_bytes = is_valid_ipv4_ret(ip)
self.ip = ip
elif len(ip) == 4:
self.ip_bytes = ip
self.ip = str2ip(ip)
else:
raise Exception('Bad IP provided, should be x.x.x.x, got: %s' % ip)
check_mac_addr(self.mac)
check_ipv4_addr(self.ip)
assert isinstance(ap, AP)
self.ap = ap
self.reset()
def reset(self):
self.got_disconnect = False
self.is_associated = False
self.seen_arp_reply = False
def disconnect(self):
self.reset()
self.got_disconnect = True
class AP_Manager:
def __init__(self, trex_client = None, server = None):
self.ssl_ctx = None
if not (bool(server) ^ bool(trex_client)):
raise STLError('Please specify either trex_client or server argument.')
if not server:
server = trex_client.get_connection_info()['server']
self.bg_client = STLClient('AP Manager', server, verbose_level = 'none')
self.trex_client = trex_client or self.bg_client
self.aps = []
self.clients = []
self.ap_by_name = {}
self.ap_by_mac = {}
self.ap_by_ip = {}
self.ap_by_udp_port = {}
self.ap_by_radio_mac = {}
self.client_by_id = {}
self.bg_lock = threading.RLock()
self.service_ctx = {}
self.base_file_path = '/tmp/trex/console/%s_%s.wlc_base' % (get_current_user(), server)
base_file_dir = os.path.dirname(self.base_file_path)
if not os.path.exists(base_file_dir):
os.makedirs(base_file_dir, mode = 0o777)
self._init_base_vals()
def init(self, trex_port_ids):
if type(trex_port_ids) is int:
trex_port_ids = [trex_port_ids]
if not self.bg_client.is_connected():
self.bg_client.connect()
for port_id in trex_port_ids:
if port_id in self.service_ctx:
raise Exception('AP manager already initialized on port %s. Close it to proceed.' % port_id)
if port_id >= len(self.trex_client.ports):
raise Exception('TRex port %s does not exist!' % port_id)
trex_port = self.trex_client.ports[port_id]
if not trex_port.is_acquired():
raise Exception('Port %s is not acquired' % port_id)
if trex_port.get_vlan_cfg():
raise Exception('Port %s has VLAN, plugin does not support it. Use trunk with native vlans.' % port_id)
for port_id in trex_port_ids:
success = False
try:
self.service_ctx[port_id] = {}
if not self.ssl_ctx:
self.ssl_ctx = SSL_Context()
self.trex_client.set_service_mode(port_id, True)
if not self.trex_client.get_port_attr(port = port_id)['prom'] == 'on':
self.trex_client.set_port_attr(ports = port_id, promiscuous = True)
self.service_ctx[port_id]['synced'] = True
self.service_ctx[port_id]['bg'] = ServiceApBgMaintenance(self, port_id)
self.service_ctx[port_id]['fg'] = ServiceBufferedCtx(self.trex_client, port_id)
self.service_ctx[port_id]['bg'].run()
success = True
finally:
if not success:
del self.service_ctx[port_id]
def _init_base_vals(self):
try:
self.set_base_values(load = True)
except:
self.next_ap_mac = '94:12:12:12:12:01'
self.next_ap_ip = '9.9.12.1'
self.next_ap_udp = 10001
self.next_ap_radio = '94:14:14:14:01:00'
self.next_client_mac = '94:13:13:13:13:01'
self.next_client_ip = '9.9.13.1'
def _get_ap_by_id(self, ap_id):
if isinstance(ap_id, AP):
return ap_id
if ap_id in self.ap_by_name:
return self.ap_by_name[ap_id]
elif ap_id in self.ap_by_mac:
return self.ap_by_mac[ap_id]
elif ap_id in self.ap_by_ip:
return self.ap_by_ip[ap_id]
elif ap_id in self.ap_by_udp_port:
return self.ap_by_udp_port[ap_id]
elif ap_id in self.ap_by_radio_mac:
return self.ap_by_radio_mac[ap_id]
else:
raise Exception('AP with id %s does not exist!' % ap_id)
def _get_client_by_id(self, client_id):
if isinstance(client_id, APClient):
return client_id
elif client_id in self.client_by_id:
return self.client_by_id[client_id]
else:
raise Exception('Client with id %s does not exist!' % client_id)
def create_ap(self, trex_port_id, mac, ip, udp_port, radio_mac, verbose_level = AP.VERB_WARN, rsa_priv_file = None, rsa_cert_file = None):
if trex_port_id not in self.service_ctx:
raise Exception('TRex port %s does not exist!' % trex_port_id)
if ':' not in mac:
mac = str2mac(mac)
if ':' not in radio_mac:
radio_mac = str2mac(radio_mac)
if mac in self.ap_by_mac:
raise Exception('AP with such MAC (%s) already exists!' % mac)
if ip in self.ap_by_ip:
raise Exception('AP with such IP (%s) already exists!' % ip)
if udp_port in self.ap_by_udp_port:
raise Exception('AP with such UDP port (%s) already exists!' % udp_port)
if radio_mac in self.ap_by_radio_mac:
raise Exception('AP with such radio MAC port (%s) already exists!' % radio_mac)
ap = AP(self.ssl_ctx, self.trex_client.logger, self.trex_client.ports[trex_port_id], mac, ip, udp_port, radio_mac, verbose_level, rsa_priv_file, rsa_cert_file)
self.ap_by_name[ap.name] = ap
self.ap_by_mac[mac] = ap
self.ap_by_ip[ip] = ap
self.ap_by_udp_port[udp_port] = ap
self.ap_by_radio_mac[radio_mac] = ap
with self.bg_lock:
self.aps.append(ap)
self.service_ctx[trex_port_id]['synced'] = False
def remove_ap(self, ap_id):
ap = self._get_ap_by_id(ap_id)
if ap.is_dtls_established:
self.service_ctx[ap.port_id]['fg'].run([ServiceApShutdownDTLS(ap)])
with self.bg_lock:
for client in ap.clients:
for key, val in dict(self.client_by_id).items():
if val == client:
del self.client_by_id[key]
self.clients.remove(client)
self.aps.remove(ap)
self.service_ctx[ap.port_id]['synced'] = False
del self.ap_by_name[ap.name]
del self.ap_by_mac[ap.mac]
del self.ap_by_ip[ap.ip_hum]
del self.ap_by_udp_port[ap.udp_port]
del self.ap_by_radio_mac[ap.radio_mac]
def remove_client(self, id):
client = self._get_client_by_id(id)
with self.bg_lock:
self.clients.remove(client)
client.ap.clients.remove(client)
self.service_ctx[client.ap.port_id]['synced'] = False
for key, val in dict(self.client_by_id).items():
if val == client:
del self.client_by_id[key]
@staticmethod
def _get_ap_per_port(aps):
ap_per_port = {}
for ap in aps:
if ap.port_id in ap_per_port:
ap_per_port[ap.port_id].append(ap)
else:
ap_per_port[ap.port_id] = [ap]
return ap_per_port
def _compare_aps(self, good_aps, aps, err, show_success = True):
if len(good_aps) != len(aps):
self.trex_client.logger.post_cmd(False)
bad_aps = set(aps) - set(good_aps)
raise Exception('Following AP(s) could not %s: %s' % (err, ', '.join(sorted([ap.name for ap in bad_aps], key = natural_sorted_key))))
if show_success:
self.trex_client.logger.post_cmd(True)
'''
ids is a list, each index can be either mac, ip, udp_port or name
'''
def join_aps(self, ids = None):
if not ids:
aps = self.aps
else:
aps = [self._get_ap_by_id(id) for id in ids]
if not aps:
raise Exception('No APs to join!')
MAX_JOINS = 512
if len(aps) > MAX_JOINS:
raise Exception('Can not join more than %s at once, please split the joins' % MAX_JOINS)
# discover
self.trex_client.logger.pre_cmd('Discovering WLC')
for port_id, aps_of_port in self._get_ap_per_port(aps).items():
self.service_ctx[port_id]['fg'].run([ServiceApDiscoverWLC(ap) for ap in aps_of_port])
# check results
good_aps = [ap for ap in aps if ap.ip_dst]
self._compare_aps(good_aps, aps, 'discover WLC')
# establish DTLS
self.trex_client.logger.pre_cmd('Establishing DTLS connection')
for port_id, aps_of_port in self._get_ap_per_port(aps).items():
self.service_ctx[port_id]['fg'].run([ServiceApEstablishDTLS(ap) for ap in aps_of_port])
# check results
good_aps = [ap for ap in aps if ap.is_dtls_established]
self._compare_aps(good_aps, aps, 'establish DTLS session')
# join ap
self.trex_client.logger.pre_cmd('Join WLC and get SSID')
for port_id, aps_of_port in self._get_ap_per_port(aps).items():
self.service_ctx[port_id]['fg'].run([ServiceApJoinWLC(ap) for ap in aps_of_port])
# check results
good_aps = [ap for ap in aps if ap.SSID]
self._compare_aps(good_aps, aps, 'join or get SSID', show_success = False)
good_aps = [ap for ap in aps if ap.is_connected]
self._compare_aps(good_aps, aps, 'get Keep-alive response')
def create_client(self, mac, ip, ap_id):
if ':' not in mac:
mac = str2mac(mac)
ap = self._get_ap_by_id(ap_id)
if mac in self.client_by_id:
raise Exception('Client with such MAC (%s) already exists!' % mac)
if ip in self.client_by_id:
raise Exception('Client with such IP (%s) already exists!' % ip)
client = APClient(mac = mac, ip = ip, ap = ap)
self.client_by_id[mac] = client
self.client_by_id[ip] = client
with self.bg_lock:
client.ap.clients.append(client)
self.clients.append(client)
self.service_ctx[ap.port_id]['synced'] = False
def join_clients(self, ids = None):
if not ids:
clients = self.clients
else:
clients = set([self._get_client_by_id(id) for id in ids])
if not clients:
raise Exception('No Clients to join!')
# Assoc clients
batch_size = 1024
self.trex_client.logger.pre_cmd('Associating clients')
clients_per_ap_per_port = {}
clients_count = 0
for client in clients:
clients_count += 1
if client.ap.port_id not in clients_per_ap_per_port:
clients_per_ap_per_port[client.ap.port_id] = {}
if client.ap not in clients_per_ap_per_port[client.ap.port_id]:
clients_per_ap_per_port[client.ap.port_id][client.ap] = [client]
else:
clients_per_ap_per_port[client.ap.port_id][client.ap].append(client)
if clients_count >= batch_size:
for port_id, clients_per_ap in clients_per_ap_per_port.items():
self.service_ctx[port_id]['fg'].run([ServiceApAddClients(ap, c) for ap, c in clients_per_ap.items()])
clients_per_ap_per_port = {}
clients_count = 0
for port_id, clients_per_ap in clients_per_ap_per_port.items():
self.service_ctx[port_id]['fg'].run([ServiceApAddClients(ap, c) for ap, c in clients_per_ap.items()])
# check results
no_assoc_clients = [client.ip for client in clients if not client.is_associated]
if no_assoc_clients:
self.trex_client.logger.post_cmd(False)
raise Exception('Following client(s) could not be associated: %s' % ', '.join(no_assoc_clients))
no_resp_clients = [client.ip for client in clients if not client.seen_arp_reply]
if no_resp_clients:
self.trex_client.logger.post_cmd(False)
raise Exception('Following client(s) did not receive ARP response from WLC: %s' % ', '.join(no_resp_clients))
self.trex_client.logger.post_cmd(True)
def add_streams(self, client_id, streams):
if isinstance(streams, STLStream):
streams = [streams]
client = self._get_client_by_id(client_id)
streams = [client.ap.patch_stream(client, stream) for stream in streams]
self.trex_client.add_streams(streams, [client.ap.port_id])
def add_profile(self, client_id, filename, **k):
validate_type('filename', filename, basestring)
profile = STLProfile.load(filename, **k)
self.add_streams(client_id, profile.get_streams())
def get_info(self):
info_per_port = {}
for ap in self.aps:
ssid = ap.SSID.get(0)
if type(ssid) is bytes:
ssid = ssid.decode('ascii')
if ap.port_id not in info_per_port:
info_per_port[ap.port_id] = {
'bg_thread_alive': bool(self.service_ctx[ap.port_id]['bg'].is_running()),
'aps': {},
}
info_per_port[ap.port_id]['aps'][ap.name] = {
'mac': ap.mac,
'ip': ap.ip_hum,
'dtls_established': ap.is_dtls_established,
'is_connected': ap.is_connected,
'ssid': ssid,
'clients': [],
}
for client in ap.clients:
info_per_port[ap.port_id]['aps'][ap.name]['clients'].append({
'mac': client.mac,
'ip': client.ip,
'is_associated': client.is_associated,
})
return info_per_port
def get_connected_aps(self):
return [ap for ap in self.aps if ap.is_connected]
def close(self, ports = None):
if ports is None:
ports = list(self.service_ctx.keys())
else:
ports = listify(ports)
ap_per_port = self._get_ap_per_port([ap for ap in self.aps if ap.port_id in ports])
for port_id in ports:
if port_id not in self.service_ctx:
raise Exception('AP manager is not initialized on port %s!' % port_id)
service = self.service_ctx[port_id]
service['bg'].stop()
aps = ap_per_port.get(port_id, [])
if aps:
service['fg'].run([ServiceApShutdownDTLS(ap) for ap in aps])
for ap in aps:
self.remove_ap(ap)
del self.service_ctx[port_id]
def _gen_ap_params(self):
# mac
while self.next_ap_mac in self.ap_by_mac:
self.next_ap_mac = increase_mac(self.next_ap_mac)
assert is_valid_mac(self.next_ap_mac)
# ip
while self.next_ap_ip in self.ap_by_ip:
self.next_ap_ip = increase_ip(self.next_ap_ip)
assert is_valid_ipv4(self.next_ap_ip)
# udp
while self.next_ap_udp in self.ap_by_udp_port:
if self.next_ap_udp >= 65500:
raise Exception('Can not increase base UDP any further: %s' % self.next_ap_udp)
self.next_ap_udp += 1
# radio
while self.next_ap_radio in self.ap_by_radio_mac:
self.next_ap_radio = increase_mac(self.next_ap_radio, 256)
assert is_valid_mac(self.next_ap_radio)
return self.next_ap_mac, self.next_ap_ip, self.next_ap_udp, self.next_ap_radio
def _gen_client_params(self):
# mac
while self.next_client_mac in self.client_by_id:
self.next_client_mac = increase_mac(self.next_client_mac)
assert is_valid_mac(self.next_client_mac)
# ip
while self.next_client_ip in self.client_by_id:
self.next_client_ip = increase_ip(self.next_client_ip)
assert is_valid_ipv4(self.next_client_ip)
return self.next_client_mac, self.next_client_ip
def log(self, msg):
self.trex_client.logger.log(msg)
def set_base_values(self, mac = None, ip = None, udp = None, radio = None, client_mac = None, client_ip = None, save = None, load = None):
if load:
if any([mac, ip, udp, radio, client_mac, client_ip, save]):
raise Exception('Can not use --load with other arguments.')
if not os.path.exists(self.base_file_path):
raise Exception('No saved file.')
try:
self.trex_client.logger.pre_cmd('Loading base values')
with open(self.base_file_path) as f:
base_values = yaml.safe_load(f.read())
mac = base_values['ap_mac']
ip = base_values['ap_ip']
udp = base_values['ap_udp']
radio = base_values['ap_radio']
client_mac = base_values['client_mac']
client_ip = base_values['client_ip']
except Exception as e:
self.trex_client.logger.post_cmd(False)
raise Exception('Parsing of config file %s failed, error: %s' % (self.base_file_path, e))
self.trex_client.logger.post_cmd(True)
# first pass, check arguments
if mac:
check_mac_addr(mac)
if ip:
check_ipv4_addr(ip)
if udp:
if udp < 1023 and udp > 65000:
raise Exception('Base UDP port should be within range 1024-65000')
if radio:
check_mac_addr(radio)
if radio.split(':')[-1] != '00':
raise Exception('Radio MACs should end with zero, got: %s' % radio)
if client_mac:
check_mac_addr(client_mac)
if client_ip:
check_ipv4_addr(client_ip)
# second pass, assign arguments
if mac:
self.next_ap_mac = mac
if ip:
self.next_ap_ip = ip
if udp:
self.next_ap_udp = udp
if radio:
self.next_ap_radio = radio
if client_mac:
self.next_client_mac = client_mac
if client_ip:
self.next_client_ip = client_ip
if save:
self.trex_client.logger.pre_cmd('Saving base values')
try:
with open(self.base_file_path, 'w') as f:
f.write(yaml.dump({
'ap_mac': self.next_ap_mac,
'ap_ip': self.next_ap_ip,
'ap_udp': self.next_ap_udp,
'ap_radio': self.next_ap_radio,
'client_mac': self.next_client_mac,
'client_ip': self.next_client_ip,
}))
except Exception as e:
self.trex_client.logger.post_cmd(False)
raise Exception('Could not save config file %s, error: %s' % (self.base_file_path, e))
self.trex_client.logger.post_cmd(True)
def __del__(self):
self.close()
| 39.201389 | 191 | 0.583854 |
e7bef06efcfbf42673ab5fed207211953dae56a1 | 5,385 | py | Python | python/tvm/auto_tensorize/policy/transform_policy.py | QinHan-Erin/AMOS | 634bf48edf4015e4a69a8c32d49b96bce2b5f16f | [
"Apache-2.0"
] | 22 | 2022-03-18T07:29:31.000Z | 2022-03-23T14:54:32.000Z | python/tvm/auto_tensorize/policy/transform_policy.py | QinHan-Erin/AMOS | 634bf48edf4015e4a69a8c32d49b96bce2b5f16f | [
"Apache-2.0"
] | null | null | null | python/tvm/auto_tensorize/policy/transform_policy.py | QinHan-Erin/AMOS | 634bf48edf4015e4a69a8c32d49b96bce2b5f16f | [
"Apache-2.0"
] | 2 | 2022-03-18T08:26:34.000Z | 2022-03-20T06:02:48.000Z | from functools import reduce
from ..utils import bi_product
import numpy as np
from ..tensorization_phases import MappingGenerator, MappingApplier
def all_fit(match_results):
minimum_padding = -1
chosen_match = None
# choose match result according minimum padding
for match_result in match_results:
effective_volume = 1
target_volume = 1
for iv, tv_lst in match_result.axis_map.items():
intrin_extent = int(iv.dom.extent)
ext = reduce(lambda x, y: x * int(y.dom.extent), tv_lst, 1)
target_volume *= ext
iterations = (ext + intrin_extent) - 1 // intrin_extent
effective_volume *= iterations * intrin_extent
padding_volume = effective_volume - target_volume
if minimum_padding < 0:
minimum_padding = padding_volume
chosen_match = match_result
else:
if padding_volume < minimum_padding:
minimum_padding = padding_volume
chosen_match = match_result
gen = MappingGenerator(chosen_match)
record = gen.get(policy="random")
# here is transform policy
record.vmap_choice = ([1 for _ in record.vmap_choice[0]], record.vmap_choice[1])
# app = MappingApplier(match_result, verbose=False)
# new_state = app.apply(record)
return chosen_match, record
def first_fit(match_results):
for match_result in match_results:
if not len(match_result.axis_map.values()):
continue
choices = bi_product(len(list(match_result.axis_map.values())[0]))
# random permutation
np.random.shuffle(choices)
gen = MappingGenerator(match_result)
record = gen.get(policy="random")
for bit_vec in choices:
if reduce(lambda x, y: x + y, bit_vec, 0) == 0:
continue
tmp_set = {}
value_set = {}
for ind, v in enumerate(bit_vec):
if v:
for k, lst in match_result.axis_map.items():
if k not in tmp_set:
tmp_set[k] = set()
value_set[k] = 1
if hash(lst[ind]) not in tmp_set[k]:
tmp_set[k].add(hash(lst[ind]))
value_set[k] *= int(lst[ind].dom.extent)
found = True
for k, v in value_set.items():
if v < int(k.dom.extent):
found = False
break
if found:
record.vmap_choice = (bit_vec, record.vmap_choice[1])
return match_result, record
assert match_result is not None
assert record is not None
# return the last one searched
return match_result, record
def default_score_func(*args, **kwargs):
assert len(args) > 2
value_map = args[0]
intrin_op = args[1]
target_op = args[2]
total_volume = 1
org_volume = 1
for k, lst in value_map.items():
ext = reduce(lambda x, y: x * int(y.dom.extent), lst, 1)
intrin_extent = int(k.dom.extent)
tiles = (ext + intrin_extent - 1) / intrin_extent
total_volume *= tiles * intrin_extent
org_volume *= ext
return (total_volume - org_volume) / (org_volume + 1e-5)
def best_fit(match_results, score_func=default_score_func):
def helper2(args):
match_result, bit_vec = args
if reduce(lambda x, y: x + y, bit_vec, 0) == 0:
return 1e10
tmp_set = {}
value_set = {}
for ind, v in enumerate(bit_vec):
if v:
for k, lst in match_result.axis_map.items():
if k not in tmp_set:
tmp_set[k] = set()
value_set[k] = []
if hash(lst[ind]) not in tmp_set[k]:
tmp_set[k].add(hash(lst[ind]))
value_set[k].append(lst[ind])
for intrin_op, target_op in match_result.main_op_map.items():
pass
score = score_func(value_set, intrin_op, target_op)
return score
def helper1(idx):
match_result = match_results[idx]
choices = bi_product(len(list(match_result.axis_map.values())[0]))
args = [(match_result, choice) for choice in choices]
score_lst = list(map(helper2, args))
best_ind = np.argmin(score_lst)
return (match_result, choices[best_ind], score_lst[best_ind])
args = range(len(match_results))
results = list(map(helper1, args))
results = sorted(results, key=lambda x: x[2])
assert len(results) > 0
# choose the minimal one
match_result, choice, score = results[0]
gen = MappingGenerator(match_result)
record = gen.get(policy="random")
# here is transform policy
record.vmap_choice = (choice, record.vmap_choice[1])
return match_result, record
def choose_one(match_results, match_id, mapping_id):
assert match_id < len(match_results)
match_result = match_results[match_id]
gen = MappingGenerator(match_result)
mappings = gen.get_all()
print("List all possible mappings for this matching", flush=True)
for i, m in enumerate(mappings):
print(i, ":", str(m), flush=True)
assert mapping_id < len(mappings)
record = mappings[mapping_id]
return match_result, record
| 37.395833 | 84 | 0.594243 |
bed695ae4944e53b7412e61070e010cfcf8cc0d4 | 325 | py | Python | amy/workshops/signals/__init__.py | code-review-doctor/amy | 268c1a199510457891459f3ddd73fcce7fe2b974 | [
"MIT"
] | 42 | 2019-06-20T16:32:16.000Z | 2022-02-02T10:39:38.000Z | amy/workshops/signals/__init__.py | code-review-doctor/amy | 268c1a199510457891459f3ddd73fcce7fe2b974 | [
"MIT"
] | 484 | 2019-06-19T10:45:37.000Z | 2022-03-28T10:37:25.000Z | amy/workshops/signals/__init__.py | code-review-doctor/amy | 268c1a199510457891459f3ddd73fcce7fe2b974 | [
"MIT"
] | 14 | 2019-11-04T11:08:24.000Z | 2022-03-10T10:06:11.000Z | from django.dispatch import Signal
# signal generated when a comment regarding specific object should be saved
create_comment_signal = Signal(
providing_args=["content_object", "comment", "timestamp"]
)
# signal generated when a Person object has been archived
person_archived_signal = Signal(providing_args=["person"])
| 36.111111 | 75 | 0.796923 |
39225cb02af508af668015e6cae20382f81f73a2 | 2,557 | py | Python | owllook/spiders/qidian_ranking.py | zhouinfo/owllook | ed43fc1a63292b3e03806de4a3641ad82de99e99 | [
"Apache-2.0"
] | 4 | 2021-09-24T23:49:52.000Z | 2021-11-18T10:58:01.000Z | owllook/spiders/qidian_ranking.py | zhouinfo/owllook | ed43fc1a63292b3e03806de4a3641ad82de99e99 | [
"Apache-2.0"
] | null | null | null | owllook/spiders/qidian_ranking.py | zhouinfo/owllook | ed43fc1a63292b3e03806de4a3641ad82de99e99 | [
"Apache-2.0"
] | 1 | 2021-12-19T14:27:47.000Z | 2021-12-19T14:27:47.000Z | #!/usr/bin/env python
import time
from talonspider import Spider, Item, TextField, AttrField
from talonspider.utils import get_random_user_agent
from pprint import pprint
from owllook.database.mongodb import MotorBase
from owllook.utils.tools import async_callback
class RankingItem(Item):
target_item = TextField(css_select='.rank-list')
ranking_title = TextField(css_select='h3.wrap-title')
more = AttrField(css_select='h3>a.more', attr='href')
book_list = TextField(css_select='div.book-list>ul>li')
def tal_more(self, more):
return "http:" + more
class NameItem(Item):
top_name = TextField(css_select='h4>a')
other_name = TextField(css_select='a.name')
class QidianRankingSpider(Spider):
start_urls = ["http://r.qidian.com/?chn=" + str(url) for url in [-1, 21, 1, 2, 22, 4, 15, 6, 5, 7, 8, 9, 10, 12]]
headers = {
"User-Agent": get_random_user_agent()
}
set_mul = True
def parse(self, res):
items_data = RankingItem.get_items(html=res.html)
result = []
res_dic = {}
for item in items_data:
each_book_list = []
# 只取排名前十的书籍数据
for index, value in enumerate(item.book_list[:10]):
item_data = NameItem.get_item(html_etree=value)
name = item_data.get('top_name') or item_data.get('other_name')
each_book_list.append({
'num': index + 1,
'name': name
})
data = {
'title': item.ranking_title,
'more': item.more,
'book_list': each_book_list,
'updated_at': time.strftime("%Y-%m-%d %X", time.localtime()),
}
result.append(data)
res_dic['data'] = result
res_dic['target_url'] = res.url
res_dic['spider'] = "qidian"
async_callback(self.save, res_dic=res_dic)
async def save(self, **kwargs):
# 存进数据库
res_dic = kwargs.get('res_dic')
try:
motor_db = MotorBase().db
await motor_db.novels_ranking.update_one({
'target_url': res_dic['target_url']},
{'$set': {
'data': res_dic['data'],
'spider': res_dic['spider'],
'finished_at': time.strftime("%Y-%m-%d %X", time.localtime())
}},
upsert=True)
except Exception as e:
self.logger.info(e)
if __name__ == '__main__':
QidianRankingSpider().start()
| 32.782051 | 117 | 0.565115 |
9e5263f49fb9a68c97d0d59383186109d38f5807 | 2,898 | py | Python | examples/adspygoogle/dfp/v201308/activity_service/get_all_activities.py | cherry-wb/googleads-python-lib | 24a1ecb7c1cca5af3624a3b03ebaa7f5147b4a04 | [
"Apache-2.0"
] | null | null | null | examples/adspygoogle/dfp/v201308/activity_service/get_all_activities.py | cherry-wb/googleads-python-lib | 24a1ecb7c1cca5af3624a3b03ebaa7f5147b4a04 | [
"Apache-2.0"
] | null | null | null | examples/adspygoogle/dfp/v201308/activity_service/get_all_activities.py | cherry-wb/googleads-python-lib | 24a1ecb7c1cca5af3624a3b03ebaa7f5147b4a04 | [
"Apache-2.0"
] | 2 | 2020-04-02T19:00:31.000Z | 2020-08-06T03:28:38.000Z | #!/usr/bin/python
#
# Copyright 2013 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This code example gets all activities.
To create activities, run create_activities.py.
Tags: ActivityService.getActivitiesByStatement
Tags: ActivityGroupService.getActivityGroupsByStatement
"""
__author__ = 'Vincent Tsao'
# Locate the client library. If module was installed via "setup.py" script, then
# the following two lines are not needed.
import os
import sys
sys.path.insert(0, os.path.join('..', '..', '..', '..', '..'))
# Import appropriate classes from the client library.
from adspygoogle import DfpClient
from adspygoogle.dfp import DfpUtils
def GetAllActivityGroupIds(client):
"""Gets all activity group IDs."""
activity_group_ids = []
# Initialize appropriate service.
activity_group_service = client.GetService('ActivityGroupService',
version='v201308')
# Get activity groups by statement.
activity_groups = DfpUtils.GetAllEntitiesByStatementWithService(
activity_group_service)
# Display results.
for activity_group in activity_groups:
activity_group_ids.append(activity_group['id'])
return activity_group_ids
def main(client):
# Initialize appropriate service.
activity_service = client.GetService('ActivityService', version='v201308')
total_results_counter = 0
activity_group_ids = GetAllActivityGroupIds(client)
for activity_group_id in activity_group_ids:
# Set the activity group ID to select from.
values = [{
'key': 'activityGroupId',
'value': {
'xsi_type': 'NumberValue',
'value': activity_group_id
}
}]
query = 'WHERE activityGroupId = :activityGroupId'
# Get activities by statement.
activities = DfpUtils.GetAllEntitiesByStatementWithService(
activity_service, query=query, bind_vars=values)
total_results_counter += len(activities)
# Display results.
for activity in activities:
print ('Activity with ID \'%s\', name \'%s\', and type \'%s\' was '
'found.' % (activity['id'], activity['name'], activity['type']))
print
print 'Number of results found: %s' % total_results_counter
if __name__ == '__main__':
# Initialize client object.
dfp_client = DfpClient(path=os.path.join('..', '..', '..', '..', '..'))
main(dfp_client)
| 30.1875 | 80 | 0.706004 |
1cb8d22fd0d95ad0007ba4014a8fe706ff961ba5 | 8,026 | py | Python | webapp/library.py | JoshuaGud777/AppleQuest | 640578bf7ffcab35e25a8352b5a72921bf4bc9ae | [
"Apache-2.0"
] | null | null | null | webapp/library.py | JoshuaGud777/AppleQuest | 640578bf7ffcab35e25a8352b5a72921bf4bc9ae | [
"Apache-2.0"
] | 14 | 2015-02-14T01:01:46.000Z | 2015-03-02T19:46:52.000Z | webapp/library.py | JoshuaGud777/AppleQuest | 640578bf7ffcab35e25a8352b5a72921bf4bc9ae | [
"Apache-2.0"
] | null | null | null | '''This is the library files for all used code
that needs to be uused multi times'''
import binascii
import cgi
import hashlib
import http.cookies
import os
import sqlite3
import time
# Global Variables
HTML_DIR = 'html\\'
REDIRECT_DIR = 'redirect\\'
DB_DIR = 'db17b1a5c2b2f6d370af2c59c885d5db\\'
# COOKIE_MAX_AGE = 300
# COOKIE_DOMAIN = 'applequest.fallenofftheedge.com'
COOKIE_PATH = '/'
conn = None
c = None
def open_conn(database): # Function 1
'''Open SQL Connection to a given sqlite databsase'''
global conn
global c
conn = sqlite3.connect(database)
c = conn.cursor()
def save_conn(): # Function 2
'''Savesthe conn'''
conn.commit()
def save_close_conn(): # Function 3
'''Saves and closes the conn'''
conn.commit()
conn.close()
def close_conn(): # Function 4
'''Closes the database conn'''
conn.close()
def add_user(username, pword, email=None): # Function 5
'''For a givven username and pasword and maybe an e-mail. Adds the user to
the database. If the user is allready there then it returns false. if it
added the database it sends True'''
display = username[:]
username = username.lower()
salt = binascii.hexlify(os.urandom(64)) # 64 bytes = 512 bits
utf8pword = pword.encode("utf-8")
utf8pword_salt = utf8pword + salt
hashed_salted_password = hashlib.sha512(utf8pword_salt)
enchexpass = hashed_salted_password.hexdigest()
try:
c.execute("INSERT INTO logon VALUES (?, ?, ?, ?, ?)", (username,
display,
enchexpass,
salt, email))
except:
return False
return True
def issue_session_id(username, pword): # Function 6
'''issues a session id for a given username, checks the user and pass
agenst the db then sends back a sessionif, epx, and theusername it is sent
agenst | noauth means username and password is wrrong | sqlerror means the
server is haveing issues'''
username = username.lower()
authuser = check_user(username, pword)
if authuser is True:
sqlretry = 0
sqlgood = False
while sqlgood is False:
# c.execute("SELECT * FROM logon WHERE username = ?", [username])
# dbdata = c.fetchone()
# db_username = dbdata[0]
# db_display = dbdata[1]
exp = int(time.time()) + 300
# seconds till this is expired | 300 = 5 min | 1 = 1 sec
sessionid = binascii.hexlify(os.urandom(16)).decode("utf-8")
try:
c.execute("DELETE FROM sessions WHERE username = ?",
[username])
c.execute("INSERT INTO sessions VALUES (?, ?, ?)",
[sessionid, exp, username])
sqlgood = True
except:
sqlretry += 1
if sqlretry == 10:
return ('sqlerror', 'sqlerror', 'sqlerror')
save_conn()
return (sessionid, exp, username)
return ('noauth', 'noauth', 'noauth')
def renew_session_id(old_id, username): # Function 7
'''givven the old session id and username it checks that the session is
is still good then send a newone if OK, else it sends out a "sqlerror" in
the case the server is erroring and a "expired" if the session is old'''
username = username.lower()
c.execute("SELECT * FROM sessions WHERE username = ? AND id = ?",
[username, old_id])
dbdata = c.fetchone()
if dbdata is None:
return False
db_exp = int(dbdata[1])
if int(time.time()) > db_exp:
return 'expired'
elif int(time.time()) <= db_exp:
sqlgood = False
sqlretry = 0
while sqlgood is False:
exp = int(time.time()) + 300
# seconds till this is expired | 300 = 5 min | 1 = 1 sec
sessionid = binascii.hexlify(os.urandom(16)).decode("utf-8")
try:
c.execute("DELETE FROM sessions WHERE username = ?",
[username])
c.execute("INSERT INTO sessions VALUES (?, ?, ?)",
[sessionid, exp, username])
sqlgood = True
except:
sqlretry += 1
if sqlretry == 10:
return 'sqlerror'
save_conn()
return (sessionid, exp, username)
def delete_session(sessionid, username): # Function 8
'''deletes a session from the database in the case the client wants to
"logoff"'''
username = username.lower()
c.execute("SELECT * FROM sessions WHERE username = ? OR id = ?",
[username, sessionid])
dbdata = c.fetchone()
if dbdata is None:
return False
c.execute("DELETE FROM sessions WHERE username = ? OR id = ?",
[username, sessionid])
save_conn()
return True
def check_user(username, pword):
'''checks the username and password agenst the data base loaded with the
open_conn(), returns True is they are correct'''
username = username.lower()
c.execute("SELECT username, password, salt FROM logon WHERE username = ?",
[username])
dbdata = c.fetchone()
if dbdata is None:
return None
enchexpassdb = dbdata[1]
salt = dbdata[2]
utf8pword = pword.encode('utf8')
utf8pword_salt = utf8pword + salt
hashed_salted_password = hashlib.sha512(utf8pword_salt)
enchexpass = hashed_salted_password.hexdigest()
if slow_equal(enchexpassdb, enchexpass):
return True
else:
return False
def slow_equal(hexstrA, hexstrB): # Function 9
'''TODO : make the compair bit for bit in binary using XNOR OR SOMETHING
Instead of comparing the string with == it checkes each part on at a
time, this makes it slower and therefor harder to crack.'''
length = 0
errors = 0
a = ''.join(format(ord(char), 'b') for char in hexstrA)
b = ''.join(format(ord(char), 'b') for char in hexstrB)
if len(a) == len(b):
length = len(a)
else:
time.sleep(1)
length = 0
errors = 1000
for i in range(length):
errors += int(a[i]) ^ int(b[i])
if errors == 0:
return True
else:
return False
def cookie_wright(sessionid, exp, username): # Function 10
'''give the imput data it returns a session cookie ment to be placed in the
print_header function to send to the client'''
cookie = http.cookies.BaseCookie()
cookie['id'] = sessionid
cookie['exp'] = exp
cookie['username'] = username
for morsel in cookie:
# cookie[morsel]['max-age'] = COOKIE_MAX_AGE
# cookie[morsel]['domain'] = COOKIE_DOMAIN
cookie[morsel]['path'] = COOKIE_PATH
return cookie
def get_cookies(): # Function 11
'''returns a cookie opject of the request header sent to the server from
the client'''
cookie = http.cookies.BaseCookie()
if 'HTTP_COOKIE' in os.environ:
cookie.load(os.environ['HTTP_COOKIE'])
return cookie
return None
def print_header(cookie=''): # Function 12
'''Prints the standard HTTP header needed by CGI along with any cookie data
sent to the function - cookie must be a cookie object'''
print('Content-type: text/html')
print('Status: 200 OK')
print(cookie)
if not cookie == '':
print()
def get_html(filepath): # Function 13
'''For the given path it returns a str of all the data in that file.
\n and all'''
file = open(filepath)
txt = file.read()
return txt
def print_me(filename): # Function 14
'''prints file to screen - use for redirects'''
file = open(filename)
txt = file.read()
print(txt)
def get_cgi_data(): # Function 15
'''gets the cgi data from the last form the client summited'''
cgidata = cgi.FieldStorage()
return cgidata
| 30.286792 | 79 | 0.598804 |
68ca340a5d7084933c19233d894d6db471755718 | 9,294 | py | Python | pyjobs_web/pyjobsweb/lib/search_query.py | pyjobs/web | 183742ae571ee83d4fe4b34f1f0ce9f2204c449e | [
"MIT"
] | 8 | 2016-01-29T13:06:26.000Z | 2020-11-02T07:23:57.000Z | pyjobs_web/pyjobsweb/lib/search_query.py | pyjobs/web | 183742ae571ee83d4fe4b34f1f0ce9f2204c449e | [
"MIT"
] | 18 | 2016-02-11T08:17:13.000Z | 2022-03-02T14:53:38.000Z | pyjobs_web/pyjobsweb/lib/search_query.py | pyjobs/web | 183742ae571ee83d4fe4b34f1f0ce9f2204c449e | [
"MIT"
] | 5 | 2016-02-05T08:57:12.000Z | 2018-01-15T08:19:43.000Z | # -*- coding: utf-8 -*-
import abc
class Translatable(object):
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def translate(self, translator):
pass
@abc.abstractmethod
def __str__(self):
return str()
class QueryStatement(Translatable):
__metaclass__ = abc.ABCMeta
class SortStatement(Translatable):
__metaclass__ = abc.ABCMeta
_to_sort = None
def __init__(self, to_sort):
self.to_sort = to_sort
@property
def to_sort(self):
return self._to_sort
@to_sort.setter
def to_sort(self, to_sort):
self._to_sort = to_sort
class Filter(QueryStatement):
__metaclass__ = abc.ABCMeta
class Sort(list, QueryStatement):
def __init__(self):
super(list, self).__init__()
self._type = SortStatement
def append(self, sort):
if not isinstance(sort, SortStatement):
raise TypeError('sort should be of type %s.' % self._type)
super(Sort, self).append(sort)
def translate(self, translator):
return translator.translate_sort(self)
def __str__(self):
res = 'Sort['
for i, e in enumerate(self):
if i > 0:
res = '{}, '.format(res)
res = '{}{}'.format(res, e)
return '{}]'.format(res)
class AscSortStatement(SortStatement):
def translate(self, translator):
return translator.translate_asc_sort_statement(self)
def __str__(self):
return 'AscSortStatement[to_sort: {}]'.format(self.to_sort)
class DescSortStatement(SortStatement):
def translate(self, translator):
return translator.translate_desc_sort_statement(self)
def __str__(self):
return 'DescSortStatement[to_sort: {}]'.format(self.to_sort)
class BooleanFilter(Filter):
_field = None
_value = None
def __init__(self, field, value):
self.field = field
self.value = value
def translate(self, translator):
return translator.translate_boolean_filter(self)
@property
def field(self):
return self._field
@field.setter
def field(self, field):
if not isinstance(field, basestring):
raise TypeError('field should be of type: %s.' % basestring)
self._field = field
@property
def value(self):
return self._value
@value.setter
def value(self, value):
if not isinstance(value, bool):
raise TypeError('value should be of type: %s.' % bool)
self._value = value
def __str__(self):
return 'BooleanFilter: [{} == {}]'.format(self.field, self.value)
class KeywordFilter(Filter):
_fields = None
_keywords = None
def __init__(self, fields, keywords):
self.fields = fields
self.keywords = keywords
def translate(self, translator):
return translator.translate_keyword_filter(self)
@property
def fields(self):
return self._fields
@fields.setter
def fields(self, fields):
if not isinstance(fields, list) \
or not all(isinstance(f, basestring) for f in fields):
raise TypeError('fields should be a list of strings.')
self._fields = fields
@property
def keywords(self):
return self._keywords
@keywords.setter
def keywords(self, keywords):
if not isinstance(keywords, list) \
or not all(isinstance(kw, basestring) for kw in keywords):
raise TypeError('keywords should be a list of strings.')
self._keywords = keywords
def __str__(self):
return 'KeywordFilter: [{}, {}]'.format(self._fields, self._keywords)
class GeolocationFilter(Filter):
_center = None
_radius = None
_unit = None
from enum import Enum
class UnitsEnum(Enum):
KM = 'km'
M = 'm'
class Center(object):
_latitude = None
_longitude = None
def __init__(self, latitude, longitude):
self.latitude = latitude
self.longitude = longitude
@property
def latitude(self):
return self._latitude
@latitude.setter
def latitude(self, latitude):
if not isinstance(latitude, float):
raise TypeError('latitude should be of type %s.' % float)
self._latitude = latitude
@property
def longitude(self):
return self._longitude
@longitude.setter
def longitude(self, longitude):
if not isinstance(longitude, float):
raise TypeError('longitude should be of type %s.' % float)
self._longitude = longitude
def __str__(self):
return '[{}, {}]'.format(self.latitude, self.longitude)
def __init__(self, center, radius, unit=UnitsEnum.KM):
self.center = center
self.radius = radius
self.unit = unit
def translate(self, translator):
return translator.translate_geolocation_filter(self)
@property
def center(self):
return self._center
@center.setter
def center(self, center):
center_type = GeolocationFilter.Center
if not isinstance(center, center_type):
raise TypeError('center should be of type %s.', center_type)
self._center = center
@property
def radius(self):
return self._radius
@radius.setter
def radius(self, radius):
if not isinstance(radius, float):
raise TypeError('radius should be of type %s.' % float)
self._radius = radius
@property
def unit(self):
return self._unit.value
@unit.setter
def unit(self, unit):
unit_enum = GeolocationFilter.UnitsEnum
if unit not in unit_enum:
raise TypeError('unit should be of type %s.' % unit_enum)
self._unit = unit
def __str__(self):
return 'GeolocationFilter[Center: {}, Radius: {}, Unit: {}]'\
.format(self._center, self._radius, self._unit)
class Query(list):
def __init__(self):
super(list, self).__init__()
self._type = QueryStatement
def append(self, query_elem):
if not isinstance(query_elem, QueryStatement):
raise TypeError('search_filter should be of type %s.' % self._type)
super(Query, self).append(query_elem)
def __str__(self):
res = 'Query['
for i, e in enumerate(self):
if i > 0:
res = '{}, '.format(res)
res = '{}{}'.format(res, e)
return '{}]'.format(res)
class QueryTranslator(object):
__metaclass__ = abc.ABCMeta
_query_object = None
@abc.abstractmethod
def __init__(self, query_object):
self.query_object = query_object
@property
def query_object(self):
return self._query_object
@query_object.setter
def query_object(self, query_object):
if not query_object:
raise ValueError('query_object should not be null.')
self._query_object = query_object
def translate(self, query):
if not isinstance(query, Query):
raise TypeError('query should be of type %s.' % Query)
for search_query in query:
self.query_object = search_query.translate(self)
return self.query_object
@abc.abstractmethod
def translate_sort(self, multi_sort):
pass
@abc.abstractmethod
def translate_asc_sort_statement(self, asc_sort):
pass
@abc.abstractmethod
def translate_desc_sort_statement(self, desc_sort):
pass
@abc.abstractmethod
def translate_boolean_filter(self, search_filter):
pass
@abc.abstractmethod
def translate_keyword_filter(self, search_filter):
pass
@abc.abstractmethod
def translate_geolocation_filter(self, search_filter):
pass
class QueryBuilder(object):
_translator = None
def __init__(self, translator):
self.translator = translator
self._query = Query()
@property
def translator(self):
return self._translator
@translator.setter
def translator(self, translator):
qt_type = QueryTranslator
if not isinstance(translator, qt_type):
raise TypeError('translator should be of type %s.' % qt_type)
self._translator = translator
def add_elem(self, elem):
self._query.append(elem)
def build(self):
return self.translator.translate(self._query)
def __str__(self):
return self._query.__str__()
class BaseSearchQuery(object):
__metaclass__ = abc.ABCMeta
_query_builder = None
@abc.abstractmethod
def __init__(self, query_builder):
self.builder = query_builder
@property
def builder(self):
return self._query_builder
@builder.setter
def builder(self, builder):
build_type = QueryBuilder
if not isinstance(builder, build_type):
raise TypeError('query_builder should be of type %s.' % build_type)
self._query_builder = builder
def add_elem(self, elem):
self.builder.add_elem(elem)
@abc.abstractmethod
def execute_query(self):
pass
def __str__(self):
return self.builder.__str__()
| 23.953608 | 79 | 0.62309 |
46555e13ae509514ce93fcc326684a750c3d665b | 1,967 | py | Python | MainInventory.py | marshallbrain/CSC-450-Final-Project | 4ddcfbc98835cf03e0d26a672d68ccdb86419ffc | [
"MIT"
] | null | null | null | MainInventory.py | marshallbrain/CSC-450-Final-Project | 4ddcfbc98835cf03e0d26a672d68ccdb86419ffc | [
"MIT"
] | null | null | null | MainInventory.py | marshallbrain/CSC-450-Final-Project | 4ddcfbc98835cf03e0d26a672d68ccdb86419ffc | [
"MIT"
] | null | null | null | import tkinter as tk
import tkinter.ttk as ttk
import utility
class MainInventory:
def __init__(self, main_frame, db):
self.tab = ttk.Frame(main_frame)
self.col = "id"
self.ascending = False
self.db = db
self.search = ""
self.tab.bind("<Visibility>", self.update)
columns = ('id', 'name', 'amount')
table = ttk.Treeview(self.tab, columns=columns, show='headings')
self.table = table
frame = ttk.Frame(self.tab)
search_var = tk.StringVar()
search_input = tk.Entry(frame, textvariable=search_var, width=30)
search_input.pack(side=tk.LEFT)
search = ttk.Button(frame, text="Search", command=lambda: self.search_table(search_var.get()))
search.pack(side=tk.LEFT)
frame.pack()
table.heading('id', text='ID', anchor='w', command=lambda: self.sort_table("id", False))
table.column('id', anchor="w", width=50)
table.heading('name', text='Product Name', command=lambda: self.sort_table("name", False))
table.column('name', anchor='center', width=200)
table.heading('amount', text='Amount', command=lambda: self.sort_table("amount", False))
table.column('amount', anchor='center', width=100)
table.pack()
self.update_table()
def sort_table(self, col, ascending):
self.col = col
self.ascending = ascending
self.update_table()
self.table.heading(col, command=lambda: self.sort_table(col, not ascending))
def update_table(self):
rows = utility.update_table(self.db, "inventory", self.col, self.ascending, self.search)
for i in self.table.get_children():
self.table.delete(i)
for row in rows:
self.table.insert('', 'end', values=row)
def search_table(self, search):
self.search = search
self.update_table()
def update(self, event):
self.update_table()
| 32.783333 | 102 | 0.6182 |
02f47dadea79576a9f157ac8f16ff4cc069abf71 | 106 | py | Python | verilogparser/__init__.py | sepandhaghighi/verilogparser | 8983b8d74fa28605b6a6772c6a02eafa6e6ba213 | [
"MIT"
] | 13 | 2017-10-29T15:52:19.000Z | 2022-02-06T18:32:20.000Z | verilogparser/__init__.py | sepandhaghighi/verilogparser | 8983b8d74fa28605b6a6772c6a02eafa6e6ba213 | [
"MIT"
] | null | null | null | verilogparser/__init__.py | sepandhaghighi/verilogparser | 8983b8d74fa28605b6a6772c6a02eafa6e6ba213 | [
"MIT"
] | 4 | 2020-01-20T07:13:26.000Z | 2022-02-06T18:32:59.000Z | # -*- coding: utf-8 -*-
from .verilogparser import *
from .logics import *
from .deductivelogic import * | 17.666667 | 29 | 0.688679 |
f8032138ade8afcc9f22d57524345f4e8200d779 | 4,693 | py | Python | inference/klue-nli/dataset.py | OhDasom88/KoreanNLPTaskPractice | 57261d194b926eafe9c2215d9290a3a96d7f8b2f | [
"Apache-2.0"
] | 18 | 2021-12-22T09:41:24.000Z | 2022-03-19T12:54:30.000Z | inference/klue-nli/dataset.py | OhDasom88/KoreanNLPTaskPractice | 57261d194b926eafe9c2215d9290a3a96d7f8b2f | [
"Apache-2.0"
] | null | null | null | inference/klue-nli/dataset.py | OhDasom88/KoreanNLPTaskPractice | 57261d194b926eafe9c2215d9290a3a96d7f8b2f | [
"Apache-2.0"
] | 4 | 2021-12-26T11:31:46.000Z | 2022-03-28T07:55:45.000Z | import dataclasses
import json
import logging
from dataclasses import dataclass
from typing import List, Optional, Union
import torch
from transformers import PreTrainedTokenizer
logger = logging.getLogger(__name__)
@dataclass
class KlueNliInputExample:
"""A single training/test example for klue natural language inference.
Args:
guid: Unique id for the example.
text_a: string. The untokenized text of the first sequence. For single
sequence tasks, only this sequence must be specified.
text_b: string. The untokenized text of the second sequence.
Only must be specified for sequence pair tasks.
label: The label of the example.
"""
guid: str
text_a: str
text_b: str
label: float
def to_dict(self):
return dataclasses.asdict(self)
def to_json_string(self):
"""Serializes this instance to a JSON string."""
return json.dumps(self.to_dict(), indent=2) + "\n"
@dataclass(frozen=True)
class KlueNliInputFeatures:
"""A single set of features of data. Property names are the same names as the corresponding inputs to a model.
Args:
input_ids: Indices of input sequence tokens in the vocabulary.
attention_mask: Mask to avoid performing attention on padding token indices.
Mask values selected in ``[0, 1]``: Usually ``1`` for tokens that are NOT MASKED, ``0`` for MASKED (padded)
tokens.
token_type_ids: (Optional) Segment token indices to indicate first and second
portions of the inputs. Only some models use them.
label: (Optional) Label corresponding to the input. Int for classification problems,
float for regression problems.
"""
input_ids: List[int]
attention_mask: Optional[List[int]] = None
token_type_ids: Optional[List[int]] = None
label: Optional[Union[int, float]] = None
def to_json_string(self):
"""Serializes this instance to a JSON string."""
return json.dumps(dataclasses.asdict(self)) + "\n"
class KlueNliDataset:
labels = ["entailment", "contradiction", "neutral"]
def __init__(self, data: list, tokenizer: PreTrainedTokenizer, max_seq_length: int):
"""Dataset for KlueStsDataset
Args:
data: json-loaded list
"""
self.data = data
self.tokenizer = tokenizer
self.max_seq_length = max_seq_length
self.features = self._convert_features(self._create_examples(self.data))
def __len__(self):
return len(self.data)
def __getitem__(self, idx):
feature = self.features[idx]
input_ids = torch.tensor(feature.input_ids, dtype=torch.long)
attn_mask = torch.tensor(feature.attention_mask, dtype=torch.long)
token_type_ids = torch.tensor(
0 if feature.token_type_ids is None else feature.token_type_ids,
dtype=torch.long,
)
labels = torch.tensor(feature.label, dtype=torch.float)
return (input_ids, attn_mask, token_type_ids, labels)
def _create_examples(self, data):
examples = [
KlueNliInputExample(
guid=d["guid"],
text_a=d["premise"],
text_b=d["hypothesis"],
label=d["gold_label"],
)
for d in self.data
]
return examples
def _convert_features(
self, examples: List[KlueNliInputExample]
) -> List[KlueNliInputFeatures]:
return convert_examples_to_features(
examples,
self.tokenizer,
max_length=self.max_seq_length,
label_list=self.labels,
)
def convert_examples_to_features(
examples: List[KlueNliInputExample],
tokenizer: PreTrainedTokenizer,
max_length: Optional[int] = None,
label_list=None,
):
if max_length is None:
max_length = tokenizer.model_max_length
label_map = {label: i for i, label in enumerate(label_list)}
labels = [label_map[example.label] for example in examples]
batch_encoding = tokenizer(
[(example.text_a, example.text_b) for example in examples],
max_length=max_length,
padding="max_length",
truncation=True,
)
features = []
for i in range(len(examples)):
inputs = {k: batch_encoding[k][i] for k in batch_encoding}
feature = KlueNliInputFeatures(**inputs, label=labels[i])
features.append(feature)
for i, example in enumerate(examples[:5]):
logger.info("*** Example ***")
logger.info("guid: %s" % (example.guid))
logger.info("features: %s" % features[i])
return features
| 31.92517 | 119 | 0.648413 |
35589e3808d255ac5a1cec9fca740ffa97b3bbf2 | 331 | py | Python | global_finprint/core/management/commands/dropviews.py | GlobalFinPrint/global_finprint | 8a91ceaaed42aaa716d8c9f27518ba673ebf351c | [
"Apache-2.0"
] | null | null | null | global_finprint/core/management/commands/dropviews.py | GlobalFinPrint/global_finprint | 8a91ceaaed42aaa716d8c9f27518ba673ebf351c | [
"Apache-2.0"
] | 6 | 2020-06-05T18:42:32.000Z | 2022-01-13T00:48:57.000Z | global_finprint/core/management/commands/dropviews.py | GlobalFinPrint/global_finprint | 8a91ceaaed42aaa716d8c9f27518ba673ebf351c | [
"Apache-2.0"
] | null | null | null | from django.core.management.base import BaseCommand
from django.db import connection
class Command(BaseCommand):
help = 'Drops DB views'
def handle(self, *args, **options):
with connection.cursor() as cursor:
cursor.execute(open('database/drop_views.sql', 'r').read())
print('Views dropped')
| 27.583333 | 71 | 0.676737 |
018869ef8aa5116e2885ee71abb885c8e431968d | 1,141 | py | Python | src/VersionControlProvider/Topicer.py | flexiooss/flexio-flow | 47491c7e5b49a02dc859028de0d486edc0014b26 | [
"Apache-2.0"
] | null | null | null | src/VersionControlProvider/Topicer.py | flexiooss/flexio-flow | 47491c7e5b49a02dc859028de0d486edc0014b26 | [
"Apache-2.0"
] | 44 | 2019-04-05T06:08:15.000Z | 2021-09-13T19:37:49.000Z | src/VersionControlProvider/Topicer.py | flexiooss/flexio-flow | 47491c7e5b49a02dc859028de0d486edc0014b26 | [
"Apache-2.0"
] | null | null | null | from __future__ import annotations
import abc
from typing import List
from Core.ConfigHandler import ConfigHandler
from FlexioFlow.StateHandler import StateHandler
from VersionControlProvider.DefaultTopic import DefaultTopic
from VersionControlProvider.Issue import Issue
from VersionControlProvider.Topic import Topic
class Topicer(abc.ABC):
state_handler: StateHandler
config_handler: ConfigHandler
def __init__(self, state_handler: StateHandler, config_handler: ConfigHandler):
self.state_handler: StateHandler = state_handler
self.config_handler: ConfigHandler = config_handler
@abc.abstractmethod
def create(self) -> Topic:
pass
@abc.abstractmethod
def attach_or_create(self) -> List[Topic]:
pass
@abc.abstractmethod
def attach_issue(self, topic: Topic, issue: Issue) -> Topicer:
pass
@abc.abstractmethod
def topic_builder(self) -> Topic:
pass
@abc.abstractmethod
def from_default(self, topic: DefaultTopic) -> Topic:
pass
@abc.abstractmethod
def read_topic_by_number(self, number: int) -> Topic:
pass
| 26.534884 | 83 | 0.733567 |
9b760990282cc7392fd40ac53a1f391de0a577ea | 9,400 | py | Python | readthedocs/organizations/models.py | astrojuanlu/readthedocs.org | 939d50541c179476d523613943a286d9a2fdbbd0 | [
"MIT"
] | 1 | 2021-07-21T17:16:59.000Z | 2021-07-21T17:16:59.000Z | readthedocs/organizations/models.py | astrojuanlu/readthedocs.org | 939d50541c179476d523613943a286d9a2fdbbd0 | [
"MIT"
] | null | null | null | readthedocs/organizations/models.py | astrojuanlu/readthedocs.org | 939d50541c179476d523613943a286d9a2fdbbd0 | [
"MIT"
] | null | null | null | """Organizations models."""
from autoslug import AutoSlugField
from django.contrib.auth.models import User
from django.db import models
from django.urls import reverse
from django.utils.crypto import salted_hmac
from django.utils.translation import ugettext_lazy as _
from readthedocs.core.permissions import AdminPermission
from readthedocs.core.utils import slugify
from . import constants
from .managers import TeamManager, TeamMemberManager
from .querysets import OrganizationQuerySet
from .utils import send_team_add_email, send_team_invite_email
class Organization(models.Model):
"""
Organization model.
stripe_id: Customer id from Stripe API
"""
# Auto fields
pub_date = models.DateTimeField(_('Publication date'), auto_now_add=True)
modified_date = models.DateTimeField(_('Modified date'), auto_now=True)
# Foreign
projects = models.ManyToManyField(
'projects.Project',
verbose_name=_('Projects'),
related_name='organizations',
)
owners = models.ManyToManyField(
User,
verbose_name=_('Owners'),
related_name='owner_organizations',
through='OrganizationOwner',
)
# Local
name = models.CharField(_('Name'), max_length=100)
slug = models.SlugField(
_('Slug'),
max_length=255,
unique=True,
null=False,
blank=False,
)
email = models.EmailField(
_('E-mail'),
help_text='How can we get in touch with you?',
max_length=255,
blank=True,
null=True,
)
description = models.TextField(
_('Description'),
help_text='Tell us a little about yourself.',
blank=True,
null=True,
)
url = models.URLField(
_('Home Page'),
help_text='The main website for your Organization',
max_length=255,
blank=True,
null=True,
)
disabled = models.BooleanField(
_('Disabled'),
help_text='Docs and builds are disabled for this organization',
default=False,
)
max_concurrent_builds = models.IntegerField(
_('Maximum concurrent builds allowed for this organization'),
null=True,
blank=True,
)
stripe_id = models.CharField(
_('Stripe customer ID'),
max_length=100,
blank=True,
null=True,
)
# Manager
objects = OrganizationQuerySet.as_manager()
class Meta:
base_manager_name = 'objects'
ordering = ['name']
get_latest_by = ['-pub_date']
def __str__(self):
return self.name
def get_absolute_url(self):
return reverse('organization_detail', args=(self.slug,))
@property
def users(self):
return AdminPermission.members(self)
@property
def members(self):
return AdminPermission.members(self)
def save(self, *args, **kwargs): # pylint: disable=signature-differs
if not self.slug:
self.slug = slugify(self.name)
super().save(*args, **kwargs)
# pylint: disable=no-self-use
def add_member(self, user, team):
"""
Add member to organization team.
user
User to add to organization team
team
Team instance to add user to
"""
if not team.members.filter(pk=user.pk).exists():
TeamMember.objects.create(team=team, member=user)
class OrganizationOwner(models.Model):
"""Intermediate table for Organization <-> User relationships."""
owner = models.ForeignKey(
User,
on_delete=models.CASCADE,
)
organization = models.ForeignKey(
Organization,
on_delete=models.CASCADE,
)
def __str__(self):
return _('{org} owner {owner}').format(
org=self.organization.name,
owner=self.owner.username,
)
class Team(models.Model):
"""Team model."""
# Auto fields
pub_date = models.DateTimeField(_('Publication date'), auto_now_add=True)
modified_date = models.DateTimeField(_('Modified date'), auto_now=True)
# Foreign
organization = models.ForeignKey(
Organization,
related_name='teams',
on_delete=models.CASCADE,
)
projects = models.ManyToManyField(
'projects.Project',
verbose_name=_('Projects'),
related_name='teams',
blank=True,
)
members = models.ManyToManyField(
User,
verbose_name=_('Users'),
related_name='teams',
blank=True,
through='TeamMember',
)
# Local
name = models.CharField(_('Name'), max_length=100)
slug = AutoSlugField(
populate_from='name',
always_update=True,
unique_with=['organization'],
)
access = models.CharField(
_('Access'),
max_length=100,
choices=constants.ACCESS_LEVELS,
default='readonly',
)
auto_join_email_users = models.BooleanField(
default=False,
help_text="Auto join users with an organization's email address to this team.",
)
# Manager
objects = TeamManager()
class Meta:
base_manager_name = 'objects'
unique_together = (
('slug', 'organization'),
('name', 'organization'),
)
def get_absolute_url(self):
return reverse(
'organization_team_detail',
args=(self.organization.slug, self.slug),
)
def __str__(self):
return '{organization}/{team}'.format(
organization=self.organization.name,
team=self.name,
)
def save(self, *args, **kwargs): # pylint: disable=signature-differs
if not self.slug:
self.slug = slugify(self.name)
super().save(*args, **kwargs)
class TeamInvite(models.Model):
"""Model to keep track of invitations to an organization."""
# Auto fields
pub_date = models.DateTimeField(_('Publication date'), auto_now_add=True)
modified_date = models.DateTimeField(_('Modified date'), auto_now=True)
# Foreign
organization = models.ForeignKey(
Organization,
related_name='invites',
on_delete=models.CASCADE,
)
team = models.ForeignKey(
Team,
verbose_name=_('Team'),
related_name='invites',
on_delete=models.CASCADE,
)
email = models.EmailField(_('E-mail'))
hash = models.CharField(_('Hash'), max_length=250)
count = models.IntegerField(_('Count'), default=0)
total = models.IntegerField(_('Total'), default=10)
class Meta:
unique_together = ('team', 'email')
def __str__(self):
return '{email} to {team}'.format(
email=self.email,
team=self.team,
)
def save(self, *args, **kwargs): # pylint: disable=signature-differs
hash_ = salted_hmac(
# HMAC key per applications
'.'.join([self.__module__, self.__class__.__name__]),
# HMAC message
''.join([str(self.team), str(self.email)]),
)
self.hash = hash_.hexdigest()[::2]
super().save(*args, **kwargs)
class TeamMember(models.Model):
"""Intermediate table for Team <-> Member/Invite relationships."""
class Meta:
unique_together = (
('team', 'member', 'invite'),
('team', 'member'),
('team', 'invite'),
)
team = models.ForeignKey(
Team,
on_delete=models.CASCADE,
)
member = models.ForeignKey(
User,
blank=True,
null=True,
default=None,
on_delete=models.CASCADE,
)
invite = models.ForeignKey(
TeamInvite,
blank=True,
null=True,
default=None,
on_delete=models.SET_NULL,
)
objects = TeamMemberManager()
def __str__(self):
state = ''
if self.is_invite:
state = ' (pending)'
return '{username} to {team}{state}'.format(
username=self.username,
team=self.team,
state=state,
)
@property
def username(self):
"""Return member username or invite email as username."""
if self.is_member:
return self.member.username
if self.invite is not None:
return self.invite.email
return 'Unknown'
@property
def full_name(self):
"""Return member or invite full name."""
if self.is_member:
return self.member.get_full_name()
return ''
@property
def email(self):
"""Return member or invite email address."""
if self.is_member:
return self.member.email
return self.invite.email
@property
def is_member(self):
"""Is this team member a user yet."""
return self.member is not None
@property
def is_invite(self):
"""Is this team member pending invite accept."""
return self.member is None and self.invite is not None
def send_add_notification(self, request):
"""Notify member or invite of being added to a team."""
if self.invite is None and self.member is not None:
send_team_add_email(team_member=self, request=request)
elif self.member is None and self.invite is not None:
send_team_invite_email(invite=self.invite, request=request)
| 26.256983 | 87 | 0.602553 |
effa27fce1366dd633aa61da6a8ab355321a6dc4 | 8,827 | py | Python | src/server_manager/generate_dhcp_template.py | Juniper/contrail-server-manager | 61a586495b4819904887b5dccb9288b9cf3d2ad5 | [
"Apache-2.0"
] | 12 | 2015-07-28T15:31:51.000Z | 2019-03-03T23:39:10.000Z | src/server_manager/generate_dhcp_template.py | Juniper/contrail-server-manager | 61a586495b4819904887b5dccb9288b9cf3d2ad5 | [
"Apache-2.0"
] | 4 | 2017-01-25T05:24:17.000Z | 2019-04-03T00:25:13.000Z | src/server_manager/generate_dhcp_template.py | Juniper/contrail-server-manager | 61a586495b4819904887b5dccb9288b9cf3d2ad5 | [
"Apache-2.0"
] | 33 | 2015-01-07T10:01:28.000Z | 2020-07-26T08:22:53.000Z | #!/usr/bin/python
#
# Copyright (c) 2016 Juniper Networks, Inc. All rights reserved.
#
import string
import sys
import platform
import os
import pdb
import ast
import uuid
import subprocess
from netaddr import *
from server_mgr_err import *
from server_mgr_utils import *
from server_mgr_exception import ServerMgrException as ServerMgrException
from server_mgr_logger import ServerMgrlogger as ServerMgrlogger
from server_mgr_db import ServerMgrDb as db
dhcp_template = string.Template("""
# ******************************************************************
# Cobbler managed dhcpd.conf file
#
# generated from cobbler dhcp.conf template ($date)
# Do NOT make changes to /etc/dhcpd.conf. Instead, make your changes
# in /etc/cobbler/dhcp.template, as /etc/dhcpd.conf will be
# overwritten.
#
# ******************************************************************
ddns-update-style interim;
allow booting;
allow bootp;
ignore client-updates;
set vendorclass = option vendor-class-identifier;
$__subnet_stanza__
$__host_stanza__
#for dhcp_tag in $dhcp_tags.keys():
## group could be subnet if your dhcp tags line up with your subnets
## or really any valid dhcpd.conf construct ... if you only use the
## default dhcp tag in cobbler, the group block can be deleted for a
## flat configuration
# group for Cobbler DHCP tag: $dhcp_tag
group {
#for mac in $dhcp_tags[$dhcp_tag].keys():
#set iface = $dhcp_tags[$dhcp_tag][$mac]
host $iface.name {
hardware ethernet $mac;
#if $iface.ip_address:
fixed-address $iface.ip_address;
#end if
#if $iface.hostname:
option host-name "$iface.hostname";
#end if
#if $iface.netmask:
option subnet-mask $iface.netmask;
#end if
#if $iface.gateway:
option routers $iface.gateway;
#end if
filename "$iface.filename";
## Cobbler defaults to $next_server, but some users
## may like to use $iface.system.server for proxied setups
next-server $next_server;
## next-server $iface.next_server;
}
#end for
}
#end for
""")
subnet_template = string.Template("""
subnet $__subnet_address__ netmask $__subnet_mask__ {
option routers $__subnet_gateway__;
option subnet-mask $__subnet_mask__;
option domain-name-servers $__dns_server_list__;
option domain-search $__search_domains_list__;
option domain-name $__subnet_domain__;
option ntp-servers $next_server;
$__range_dynamic_bootp_line__
default-lease-time $__default_lease_time__;
max-lease-time $__max_lease_time__;
next-server $next_server;
filename "/pxelinux.0";
on commit {
set clip = binary-to-ascii(10, 8, ".", leased-address);
set clhw = binary-to-ascii(16, 8, ":", substring(hardware, 1, 6));
execute("/opt/contrail/server_manager/smgr_dhcp_event.py", "commit", clip, clhw);
set ClientHost = pick-first-value(host-decl-name,
option fqdn.hostname,
option host-name,
"none");
execute("/opt/contrail/server_manager/smgr_dhcp_event.py", "commit", clip, clhw, ClientHost);
}
on release {
set clip = binary-to-ascii(10, 8, ".", leased-address);
set clhw = binary-to-ascii(16, 8, ":", substring(hardware, 1, 6));
execute("/opt/contrail/server_manager/smgr_dhcp_event.py", "release", clip, clhw);
set ClientHost = pick-first-value(host-decl-name,
option fqdn.hostname,
option host-name,
"none");
execute("/opt/contrail/server_manager/smgr_dhcp_event.py", "release", clip, clhw, ClientHost);
}
on expiry {
set clip = binary-to-ascii(10, 8, ".", leased-address);
set clhw = binary-to-ascii(16, 8, ":", substring(hardware, 1, 6));
execute("/opt/contrail/server_manager/smgr_dhcp_event.py", "expiry", clip, clhw);
set ClientHost = pick-first-value(host-decl-name,
option fqdn.hostname,
option host-name,
"none");
execute("/opt/contrail/server_manager/smgr_dhcp_event.py", "expiry", clip, clhw, ClientHost);
}
}
""")
host_template = string.Template("""
host $__host_fqdn__ {
hardware ethernet $__host_mac__;
fixed-address $__host_listen_ip__;
option host-name "$__host_name__";
filename "/pxelinux.0";
option ntp-servers $next_server;
next-server $next_server;
}
""")
_DEF_SMGR_IP = '__$IPADDRESS__'
_DEF_SMGR_MAC = '__$MACADDRESS__'
_DEF_SMGR_FQDN = '__$HOSTFQDN__'
_DEF_SMGR_HOST_NAME = '__$HOSTNAME__'
_DEF_SMGR_SUBNET_ADDRESS = '__$SUBNETADDRESS__'
_DEF_SMGR_SUBNET_GATEWAY = '__$SUBNETGATEWAY__'
_DEF_SMGR_SUBNET_MASK = '__$SUBNETMASK__'
_DEF_SMGR_DOMAIN = '__$DOMAIN__'
smgr_subnet_config = {
"subnet_address": _DEF_SMGR_SUBNET_ADDRESS,
"subnet_mask": _DEF_SMGR_SUBNET_MASK,
"subnet_gateway": _DEF_SMGR_SUBNET_GATEWAY,
"subnet_domain": _DEF_SMGR_DOMAIN,
"dns_server_list": [_DEF_SMGR_IP],
"search_domains_list": [_DEF_SMGR_DOMAIN],
"default_lease_time": 21600,
"max_lease_time": 43200
}
class DHCPTemplateGenerator:
# We auto add the DHCP subnet for the subnet that SM IP belongs to
# This means the subnet is auto added to the cobbler dhcp template if user doesn't add it automatically
def __init__(self, server_db, smgr_config=None):
''' Constructor '''
self._serverDb = server_db
if smgr_config and isinstance(smgr_config,dict):
for k in smgr_config.keys():
smgr_host_config[k] = smgr_config[k]
self._serverDb.add_dhcp_host(smgr_host_config)
self._serverDb.add_dhcp_subnet(smgr_subnet_config)
def get_subnet_stanza(self):
subnets_stanza = ""
subnets = self._serverDb.get_dhcp_subnet()
for dhcp_subnet in subnets:
if "dhcp_range" in dhcp_subnet and dhcp_subnet["dhcp_range"] and len(dhcp_subnet["dhcp_range"])==2:
range_dynamic_bootp_line = "range dynamic-bootp " + \
dhcp_subnet["dhcp_range"][0] + " " + dhcp_subnet["dhcp_range"][1] + ";"
else:
range_dynamic_bootp_line = ""
dhcp_subnet['search_domains_list'] = [str("\""+str(x)+"\"") for x in ast.literal_eval(dhcp_subnet['search_domains_list'])]
dhcp_subnet['subnet_domain'] = str("\"" + dhcp_subnet['subnet_domain'] + "\"")
subnet_stanza = subnet_template.safe_substitute({
'__subnet_address__': dhcp_subnet['subnet_address'],
'__subnet_mask__': dhcp_subnet['subnet_mask'],
'__subnet_gateway__': dhcp_subnet['subnet_gateway'],
'__subnet_domain__': dhcp_subnet['subnet_domain'],
'__dns_server_list__': ", ".join(ast.literal_eval(dhcp_subnet['dns_server_list'])),
'__search_domains_list__': ", ".join(dhcp_subnet['search_domains_list']),
'__default_lease_time__': dhcp_subnet['default_lease_time'],
'__max_lease_time__': dhcp_subnet['max_lease_time'],
'__range_dynamic_bootp_line__': range_dynamic_bootp_line
})
subnets_stanza += subnet_stanza + "\n"
return subnets_stanza
def get_hosts_stanza(self):
hosts_stanza = ""
hosts = self._serverDb.get_dhcp_host()
for dhcp_host in hosts:
host_stanza = host_template.safe_substitute({
'__host_fqdn__': dhcp_host['host_fqdn'],
'__host_mac__': dhcp_host['mac_address'],
'__host_name__': dhcp_host['host_name'],
'__host_listen_ip__': dhcp_host['ip_address']
})
hosts_stanza += host_stanza + "\n"
return hosts_stanza
def generate_dhcp_template(self):
try:
subnets_stanza = ""
hosts_stanza = ""
subnets_stanza = self.get_subnet_stanza()
hosts_stanza = self.get_hosts_stanza()
dhcp_template_config = dhcp_template.safe_substitute({
'__subnet_stanza__' : subnets_stanza,
'__host_stanza__' : hosts_stanza
})
dhcp_template_file = open('/etc/cobbler/dhcp.template', 'w+')
dhcp_template_file.write(dhcp_template_config)
dhcp_template_file.close()
except Exception as e:
raise e
| 38.545852 | 134 | 0.614478 |
aa72476b54bf6e692bbce7f7694d53aeeaa5618a | 9,164 | py | Python | rcnn/core/module.py | Xilinx/progressface | f421ba8a83e1a57f78c4eaba5601469bc1dc8771 | [
"Apache-2.0",
"MIT"
] | null | null | null | rcnn/core/module.py | Xilinx/progressface | f421ba8a83e1a57f78c4eaba5601469bc1dc8771 | [
"Apache-2.0",
"MIT"
] | null | null | null | rcnn/core/module.py | Xilinx/progressface | f421ba8a83e1a57f78c4eaba5601469bc1dc8771 | [
"Apache-2.0",
"MIT"
] | 1 | 2022-01-05T01:20:00.000Z | 2022-01-05T01:20:00.000Z | # Copyright 2021 Xilinx Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""A `MutableModule` implement the `BaseModule` API, and allows input shape
varying with training iterations. If shapes vary, executors will rebind,
using shared arrays from the initial module binded with maximum shape.
"""
import logging
from mxnet import context as ctx
from mxnet.initializer import Uniform
from mxnet.module.base_module import BaseModule
from mxnet.module.module import Module
class MutableModule(BaseModule):
"""A mutable module is a module that supports variable input data.
Parameters
----------
symbol : Symbol
data_names : list of str
label_names : list of str
logger : Logger
context : Context or list of Context
work_load_list : list of number
max_data_shapes : list of (name, shape) tuple, designating inputs whose shape vary
max_label_shapes : list of (name, shape) tuple, designating inputs whose shape vary
fixed_param_prefix : list of str, indicating fixed parameters
"""
def __init__(self, symbol, data_names, label_names,
logger=logging, context=ctx.cpu(), work_load_list=None,
max_data_shapes=None, max_label_shapes=None, fixed_param_prefix=None):
super(MutableModule, self).__init__(logger=logger)
self._symbol = symbol
self._data_names = data_names
self._label_names = label_names
self._context = context
self._work_load_list = work_load_list
self._curr_module = None
self._max_data_shapes = max_data_shapes
self._max_label_shapes = max_label_shapes
self._fixed_param_prefix = fixed_param_prefix
fixed_param_names = list()
if fixed_param_prefix is not None:
for name in self._symbol.list_arguments():
for prefix in self._fixed_param_prefix:
if prefix in name:
fixed_param_names.append(name)
self._fixed_param_names = fixed_param_names
def _reset_bind(self):
self.binded = False
self._curr_module = None
@property
def data_names(self):
return self._data_names
@property
def output_names(self):
return self._symbol.list_outputs()
@property
def data_shapes(self):
assert self.binded
return self._curr_module.data_shapes
@property
def label_shapes(self):
assert self.binded
return self._curr_module.label_shapes
@property
def output_shapes(self):
assert self.binded
return self._curr_module.output_shapes
def get_params(self):
assert self.binded and self.params_initialized
return self._curr_module.get_params()
def init_params(self, initializer=Uniform(0.01), arg_params=None, aux_params=None,
allow_missing=False, force_init=False, allow_extra=False):
if self.params_initialized and not force_init:
return
assert self.binded, 'call bind before initializing the parameters'
self._curr_module.init_params(initializer=initializer, arg_params=arg_params,
aux_params=aux_params, allow_missing=allow_missing,
force_init=force_init, allow_extra=allow_extra)
self.params_initialized = True
def bind(self, data_shapes, label_shapes=None, for_training=True,
inputs_need_grad=False, force_rebind=False, shared_module=None):
# in case we already initialized params, keep it
if self.params_initialized:
arg_params, aux_params = self.get_params()
# force rebinding is typically used when one want to switch from
# training to prediction phase.
if force_rebind:
self._reset_bind()
if self.binded:
self.logger.warning('Already binded, ignoring bind()')
return
assert shared_module is None, 'shared_module for MutableModule is not supported'
self.for_training = for_training
self.inputs_need_grad = inputs_need_grad
self.binded = True
max_shapes_dict = dict()
if self._max_data_shapes is not None:
max_shapes_dict.update(dict(self._max_data_shapes))
if self._max_label_shapes is not None:
max_shapes_dict.update(dict(self._max_label_shapes))
max_data_shapes = list()
for name, shape in data_shapes:
if name in max_shapes_dict:
max_data_shapes.append((name, max_shapes_dict[name]))
else:
max_data_shapes.append((name, shape))
max_label_shapes = list()
if label_shapes is not None:
for name, shape in label_shapes:
if name in max_shapes_dict:
max_label_shapes.append((name, max_shapes_dict[name]))
else:
max_label_shapes.append((name, shape))
if len(max_label_shapes) == 0:
max_label_shapes = None
module = Module(self._symbol, self._data_names, self._label_names, logger=self.logger,
context=self._context, work_load_list=self._work_load_list,
fixed_param_names=self._fixed_param_names)
module.bind(max_data_shapes, max_label_shapes, for_training, inputs_need_grad,
force_rebind=False, shared_module=None)
self._curr_module = module
# copy back saved params, if already initialized
if self.params_initialized:
self.set_params(arg_params, aux_params)
def init_optimizer(self, kvstore='local', optimizer='sgd',
optimizer_params=(('learning_rate', 0.01),), force_init=False):
assert self.binded and self.params_initialized
if self.optimizer_initialized and not force_init:
self.logger.warning('optimizer already initialized, ignoring.')
return
self._curr_module.init_optimizer(kvstore, optimizer, optimizer_params,
force_init=force_init)
self.optimizer_initialized = True
def forward(self, data_batch, is_train=None):
assert self.binded and self.params_initialized
# get current_shapes
if self._curr_module.label_shapes is not None:
current_shapes = dict(self._curr_module.data_shapes + self._curr_module.label_shapes)
else:
current_shapes = dict(self._curr_module.data_shapes)
# get input_shapes
if data_batch.provide_label is not None:
input_shapes = dict(data_batch.provide_data + data_batch.provide_label)
else:
input_shapes = dict(data_batch.provide_data)
# decide if shape changed
shape_changed = False
for k, v in current_shapes.items():
if v != input_shapes[k]:
shape_changed = True
if shape_changed:
module = Module(self._symbol, self._data_names, self._label_names,
logger=self.logger, context=self._context,
work_load_list=self._work_load_list,
fixed_param_names=self._fixed_param_names)
module.bind(data_batch.provide_data, data_batch.provide_label, self._curr_module.for_training,
self._curr_module.inputs_need_grad, force_rebind=False,
shared_module=self._curr_module)
self._curr_module = module
self._curr_module.forward(data_batch, is_train=is_train)
def backward(self, out_grads=None):
assert self.binded and self.params_initialized
self._curr_module.backward(out_grads=out_grads)
def update(self):
assert self.binded and self.params_initialized and self.optimizer_initialized
self._curr_module.update()
def get_outputs(self, merge_multi_context=True):
assert self.binded and self.params_initialized
return self._curr_module.get_outputs(merge_multi_context=merge_multi_context)
def get_input_grads(self, merge_multi_context=True):
assert self.binded and self.params_initialized and self.inputs_need_grad
return self._curr_module.get_input_grads(merge_multi_context=merge_multi_context)
def update_metric(self, eval_metric, labels):
assert self.binded and self.params_initialized
self._curr_module.update_metric(eval_metric, labels)
def install_monitor(self, mon):
""" Install monitor on all executors """
assert self.binded
self._curr_module.install_monitor(mon)
| 39.843478 | 106 | 0.670122 |
34b8d87fc31859a3202ff7466183eea6c60faeaf | 10,419 | py | Python | salt/modules/pdbedit.py | pass-by-value/salt | 2ede44fe54516242e10fe428629d5f5a18e5f7ea | [
"Apache-2.0",
"MIT"
] | 2 | 2015-09-21T14:13:30.000Z | 2016-02-12T11:33:46.000Z | salt/modules/pdbedit.py | pass-by-value/salt | 2ede44fe54516242e10fe428629d5f5a18e5f7ea | [
"Apache-2.0",
"MIT"
] | 1 | 2019-09-06T13:57:28.000Z | 2019-09-06T13:57:28.000Z | salt/modules/pdbedit.py | pass-by-value/salt | 2ede44fe54516242e10fe428629d5f5a18e5f7ea | [
"Apache-2.0",
"MIT"
] | 1 | 2020-09-30T16:09:48.000Z | 2020-09-30T16:09:48.000Z | # -*- coding: utf-8 -*-
'''
Manage accounts in Samba's passdb using pdbedit
:maintainer: Jorge Schrauwen <sjorge@blackdot.be>
:maturity: new
:platform: posix
.. versionadded:: nitrogen
'''
from __future__ import absolute_import
# Import Python libs
import logging
import hashlib
import binascii
try:
from shlex import quote as _quote_args # pylint: disable=e0611
except ImportError:
from pipes import quote as _quote_args
# Import Salt libs
import salt.utils
log = logging.getLogger(__name__)
# Define the module's virtual name
__virtualname__ = 'pdbedit'
# Function aliases
__func_alias__ = {
'list_users': 'list',
'get_user': 'get',
}
def __virtual__():
'''
Provides pdbedit if available
'''
if salt.utils.which('pdbedit'):
return __virtualname__
return (
False,
'{0} module can only be loaded when pdbedit is available'.format(
__virtualname__
)
)
def generate_nt_hash(password):
'''
Generate a NT HASH
CLI Example:
.. code-block:: bash
salt '*' pdbedit.generate_nt_hash my_passwd
'''
return binascii.hexlify(
hashlib.new(
'md4',
password.encode('utf-16le')
).digest()
).upper()
def list_users(verbose=True, hashes=False):
'''
List user accounts
verbose : boolean
return all information
hashes : boolean
include NT HASH and LM HASH in verbose output
CLI Example:
.. code-block:: bash
salt '*' pdbedit.list
'''
users = {} if verbose else []
if verbose:
## parse detailed user data
res = __salt__['cmd.run_all'](
'pdbedit --list --verbose {hashes}'.format(hashes="--smbpasswd-style" if hashes else ""),
)
if res['retcode'] > 0:
log.error(res['stderr'] if 'stderr' in res else res['stdout'])
else:
user_data = {}
for user in res['stdout'].splitlines():
if user.startswith('-'):
if len(user_data) > 0:
users[user_data['unix username']] = user_data
user_data = {}
else:
label = user[:user.index(':')].strip().lower()
data = user[(user.index(':')+1):].strip()
user_data[label] = data
if len(user_data) > 0:
users[user_data['unix username']] = user_data
else:
## list users
res = __salt__['cmd.run_all']('pdbedit --list')
if res['retcode'] > 0:
return {'Error': res['stderr'] if 'stderr' in res else res['stdout']}
else:
for user in res['stdout'].splitlines():
users.append(user.split(':')[0])
return users
def get_user(login, hashes=False):
'''
Get user account details
login : string
login name
hashes : boolean
include NTHASH and LMHASH in verbose output
CLI Example:
.. code-block:: bash
salt '*' pdbedit.get kaylee
'''
users = list_users(verbose=True, hashes=hashes)
return users[login] if login in users else {}
def delete(login):
'''
Delete user account
login : string
login name
CLI Example:
.. code-block:: bash
salt '*' pdbedit.delete wash
'''
if login in list_users(False):
res = __salt__['cmd.run_all'](
'pdbedit --delete {login}'.format(login=_quote_args(login)),
)
if res['retcode'] > 0:
return {login: res['stderr'] if 'stderr' in res else res['stdout']}
return {login: 'deleted'}
return {login: 'absent'}
def create(login, password, password_hashed=False, machine_account=False):
'''
Create user account
login : string
login name
password : string
password
password_hashed : boolean
set if password is a nt hash instead of plain text
machine_account : boolean
set to create a machine trust account instead
CLI Example:
.. code-block:: bash
salt '*' pdbedit.create zoe 9764951149F84E770889011E1DC4A927 nthash
salt '*' pdbedit.create river 1sw4ll0w3d4bug
'''
ret = 'unchanged'
## generate nt hash if needed
if password_hashed:
password_hash = password.upper()
password = "" # wipe password
else:
password_hash = generate_nt_hash(password)
## create user
if login not in list_users(False):
# NOTE: --create requires a password, even if blank
res = __salt__['cmd.run_all'](
cmd='pdbedit --create --user {login} -t {machine}'.format(
login=_quote_args(login),
machine="--machine" if machine_account else "",
),
stdin="{password}\n{password}\n".format(password=password),
)
if res['retcode'] > 0:
return {login: res['stderr'] if 'stderr' in res else res['stdout']}
ret = 'created'
## update password if needed
user = get_user(login, True)
if user['nt hash'] != password_hash:
res = __salt__['cmd.run_all'](
'pdbedit --modify --user {login} --set-nt-hash={nthash}'.format(
login=_quote_args(login),
nthash=_quote_args(password_hash)
),
)
if res['retcode'] > 0:
return {login: res['stderr'] if 'stderr' in res else res['stdout']}
if ret != 'created':
ret = 'updated'
return {login: ret}
def modify(
login, password=None, password_hashed=False,
domain=None, profile=None, script=None,
drive=None, homedir=None, fullname=None,
account_desc=None, account_control=None,
machine_sid=None, user_sid=None,
reset_login_hours=False, reset_bad_password_count=False,
):
'''
Modify user account
login : string
login name
password : string
password
password_hashed : boolean
set if password is a nt hash instead of plain text
domain : string
users domain
profile : string
profile path
script : string
logon script
drive : string
home drive
homedir : string
home directory
fullname : string
full name
account_desc : string
account description
machine_sid : string
specify the machines new primary group SID or rid
user_sid : string
specify the users new primary group SID or rid
account_control : string
specify user account control properties
.. note::
Only the follwing can be set:
- N: No password required
- D: Account disabled
- H: Home directory required
- L: Automatic Locking
- X: Password does not expire
reset_login_hours : boolean
reset the users allowed logon hours
reset_bad_password_count : boolean
reset the stored bad login counter
.. note::
if user is absent and password is provided, the user will be created
CLI Example:
.. code-block:: bash
salt '*' pdbedit.modify inara fullname='Inara Serra'
salt '*' pdbedit.modify simon password=r1v3r
salt '*' pdbedit.modify jane drive='V:' homedir='\\\\serenity\\jane\\profile'
salt '*' pdbedit.modify mal account_control=NX
'''
ret = 'unchanged'
## flag mapping
flags = {
'domain': '--domain=',
'full name': '--fullname=',
'account desc': '--account-desc=',
'home directory': '--homedir=',
'homedir drive': '--drive=',
'profile path': '--profile=',
'logon script': '--script=',
'account flags': '--account-control=',
'user sid': '-U ',
'machine sid': '-M ',
}
## field mapping
provided = {
'domain': domain,
'full name': fullname,
'account desc': account_desc,
'home directory': homedir,
'homedir drive': drive,
'profile path': profile,
'logon script': script,
'account flags': account_control,
'user sid': user_sid,
'machine sid': machine_sid,
}
## update password
if password:
ret = create(login, password, password_hashed)[login]
if ret not in ['updated', 'created', 'unchanged']:
return {login: ret}
elif login not in list_users(False):
return {login: 'absent'}
## check for changes
current = get_user(login, hashes=True)
changes = {}
for key, val in provided.items():
if key in ['user sid', 'machine sid']:
if val is not None and key in current and not current[key].endswith(str(val)):
changes[key] = str(val)
elif key in ['account flags']:
if val is not None:
if val.startswith('['):
val = val[1:-1]
new = []
for f in val.upper():
if f not in ['N', 'D', 'H', 'L', 'X']:
log.warning(
'pdbedit.modify - unknown {f} flag for account_control, ignored'.format(f=f)
)
else:
new.append(f)
changes[key] = "[{flags}]".format(flags="".join(new))
else:
if val is not None and key in current and current[key] != val:
changes[key] = val
## apply changes
if len(changes) > 0 or reset_login_hours or reset_bad_password_count:
cmds = []
for change in changes:
cmds.append('{flag}{value}'.format(
flag=flags[change],
value=_quote_args(changes[change]),
))
if reset_login_hours:
cmds.append('--logon-hours-reset')
if reset_bad_password_count:
cmds.append('--bad-password-count-reset')
res = __salt__['cmd.run_all'](
'pdbedit --modify --user {login} {changes}'.format(
login=_quote_args(login),
changes=" ".join(cmds),
),
)
if res['retcode'] > 0:
return {login: res['stderr'] if 'stderr' in res else res['stdout']}
if ret != 'created':
ret = 'updated'
return {login: ret}
# vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4
| 27.203655 | 104 | 0.559939 |
a12c5aa39d598f75cc3cf3df4635266ff86e7b60 | 1,312 | py | Python | econml/tests/test_notebooks.py | ashlili0401/econmic | e2137e20af0f90382c2fcf2db53823b8737124d0 | [
"MIT"
] | 3 | 2019-07-12T09:24:53.000Z | 2020-04-15T02:50:39.000Z | econml/tests/test_notebooks.py | bquistorff/EconML | 73a21bfe3470e7f0d1702a6db71efd0892cfee9d | [
"MIT"
] | null | null | null | econml/tests/test_notebooks.py | bquistorff/EconML | 73a21bfe3470e7f0d1702a6db71efd0892cfee9d | [
"MIT"
] | 2 | 2020-02-27T17:03:48.000Z | 2021-03-31T20:59:43.000Z | # Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
import pytest
import html
import os
import nbformat
import nbconvert
import traitlets
_nbdir = os.path.join(os.path.dirname(__file__), '..', '..', 'notebooks')
_notebooks = [path
for path in os.listdir(_nbdir)
if path.endswith('.ipynb')]
@pytest.mark.parametrize("file", _notebooks)
@pytest.mark.notebook
def test_notebook(file):
nb = nbformat.read(os.path.join(_nbdir, file), as_version=4)
# require all cells to complete within 15 minutes, which will help prevent us from
# creating notebooks that are annoying for our users to actually run themselves
ep = nbconvert.preprocessors.ExecutePreprocessor(
timeout=900, allow_errors=True, extra_arguments=["--HistoryManager.enabled=False"])
ep.preprocess(nb, {'metadata': {'path': _nbdir}})
errors = [nbconvert.preprocessors.CellExecutionError.from_cell_and_msg(cell, output)
for cell in nb.cells if "outputs" in cell
for output in cell["outputs"]
if output.output_type == "error"]
if errors:
err_str = "\n".join(html.unescape(str(err)) for err in errors)
raise AssertionError("Encountered {0} exception(s):\n{1}".format(len(errors), err_str))
| 38.588235 | 95 | 0.692835 |
62fa35d7731e79a72b916e960de2ccb39adfb738 | 1,952 | py | Python | hard-gists/4aed548e606f11971f5a/snippet.py | jjhenkel/dockerizeme | eaa4fe5366f6b9adf74399eab01c712cacaeb279 | [
"Apache-2.0"
] | 21 | 2019-07-08T08:26:45.000Z | 2022-01-24T23:53:25.000Z | hard-gists/4aed548e606f11971f5a/snippet.py | jjhenkel/dockerizeme | eaa4fe5366f6b9adf74399eab01c712cacaeb279 | [
"Apache-2.0"
] | 5 | 2019-06-15T14:47:47.000Z | 2022-02-26T05:02:56.000Z | hard-gists/4aed548e606f11971f5a/snippet.py | jjhenkel/dockerizeme | eaa4fe5366f6b9adf74399eab01c712cacaeb279 | [
"Apache-2.0"
] | 17 | 2019-05-16T03:50:34.000Z | 2021-01-14T14:35:12.000Z | from rest_framework import status
from rest_framework.authtoken.models import Token
from rest_framework.authtoken.views import ObtainAuthToken
from rest_framework.generics import ListAPIView
from rest_framework.response import Response
from rest_framework.reverse import reverse
from rest_framework.settings import api_settings
from social.apps.django_app.utils import strategy
from social.backends.oauth import BaseOAuth1, BaseOAuth2
from api.serializers.social_login import ObtainSocialAuthTokenSerializer
@strategy()
def _register_by_access_token(request, backend):
"""
Checks what OAuth protocol is being used for social authentication, backend corresponds to the allowed backend types
and authenticates the user using the access token from the request.
"""
backend = request.strategy.backend
if isinstance(backend, BaseOAuth1):
token = {
'oauth_token': request.POST.get('access_token'),
'oauth_token_secret': '<secret>' # required by python-social-auth, but is not used
}
elif isinstance(backend, BaseOAuth2):
token = request.POST.get('access_token')
else:
raise Response("Wrong backend type", status=status.HTTP_400_BAD_REQUEST)
return backend.do_auth(token)
class ObtainSocialAuthTokenView(ObtainAuthToken):
serializer_class = ObtainSocialAuthTokenSerializer
renderer_classes = api_settings.DEFAULT_RENDERER_CLASSES
parser_classes = api_settings.DEFAULT_PARSER_CLASSES
class Meta():
list_wrapper = "tokens"
instance_wrapper = "token"
def post(self, request, backend):
serializer = self.serializer_class(data=request.DATA)
if serializer.is_valid():
user = _register_by_access_token(request, backend)
if user:
user_url = reverse('user-instance', args=[user.pk], request=request)
token, created = Token.objects.get_or_create(user=user)
return Response({'token': token.key, 'user_id': user.id, 'user_url': user_url})
return Response(serializer.errors, status.HTTP_400_BAD_REQUEST) | 35.490909 | 117 | 0.797643 |
61df3b019838b0ed03f95046251512707633772f | 1,365 | py | Python | arekit/contrib/networks/enum_name_types.py | nicolay-r/AREk | 19c39ec0dc9a17464cade03b9c4da0c6d1d21191 | [
"MIT"
] | 18 | 2019-12-14T18:43:11.000Z | 2022-03-21T05:55:36.000Z | arekit/contrib/networks/enum_name_types.py | nicolay-r/AREk | 19c39ec0dc9a17464cade03b9c4da0c6d1d21191 | [
"MIT"
] | 284 | 2020-08-08T20:52:44.000Z | 2022-03-31T05:26:20.000Z | arekit/contrib/networks/enum_name_types.py | nicolay-r/AREk | 19c39ec0dc9a17464cade03b9c4da0c6d1d21191 | [
"MIT"
] | 1 | 2021-08-07T13:17:43.000Z | 2021-08-07T13:17:43.000Z | from enum import Enum
class ModelNames(Enum):
CNN = 'cnn'
AttEndsCNN = 'att-cnn'
AttEndsAndFramesCNN = 'att-ef-cnn'
AttSynonymEndsCNN = 'att-se-cnn'
AttSynonymEndsPCNN = 'att-se-pcnn'
AttSynonymEndsBiLSTM = 'att-se-bilstm'
AttSynonymEndsAndFramesCNN = 'att-sef-cnn'
AttSynonymEndsAndFramesPCNN = 'att-sef-pcnn'
AttSynonymEndsAndFramesBiLSTM = 'att-sef-bilstm'
AttEndsAndFramesPCNN = 'att-ef-pcnn'
AttEndsAndFramesBiLSTM = 'att-ef-bilstm'
AttEndsPCNN = 'att-pcnn'
AttFramesCNN = 'att-frames-cnn'
AttFramesPCNN = 'att-frames-pcnn'
SelfAttentionBiLSTM = 'self-att-bilstm'
BiLSTM = 'bilstm'
IANFrames = 'ian'
IANEnds = 'ian-ends'
IANEndsAndFrames = 'ian-ef'
IANSynonymEnds = 'ian-se'
IANSynonymEndsAndFrames = 'ian-sef'
PCNN = 'pcnn'
LSTM = 'rnn'
RCNN = 'rcnn'
RCNNAttPZhou = 'rcnn-att-p-zhou'
RCNNAttZYang = 'rcnn-att-z-yang'
AttFramesBiLSTM = 'att-frames-bilstm'
AttSelfZYangBiLSTM = 'att-bilstm-z-yang'
AttSelfPZhouBiLSTM = 'att-bilstm'
class ModelNamesService(object):
__names = dict([(item.value, item) for item in ModelNames])
@staticmethod
def get_type_by_name(name):
return ModelNamesService.__names[name]
@staticmethod
def iter_supported_names():
return iter(list(ModelNamesService.__names.keys()))
| 27.857143 | 63 | 0.679853 |
077e15058e046084c3683a881b8cc077ca738494 | 1,066 | py | Python | 1/version_number_compare.py | IronCore864/leetcode | a62a4cdde9814ae48997176debcaad537f7ad01f | [
"Apache-2.0"
] | 4 | 2018-03-07T02:56:03.000Z | 2021-06-15T05:43:31.000Z | 1/version_number_compare.py | IronCore864/leetcode | a62a4cdde9814ae48997176debcaad537f7ad01f | [
"Apache-2.0"
] | null | null | null | 1/version_number_compare.py | IronCore864/leetcode | a62a4cdde9814ae48997176debcaad537f7ad01f | [
"Apache-2.0"
] | 1 | 2021-09-02T12:05:15.000Z | 2021-09-02T12:05:15.000Z | class Solution(object):
def compareVersion(self, version1, version2):
"""
:type version1: str
:type version2: str
:rtype: int
"""
version1 = version1.split('.')
version2 = version2.split('.')
version1 = [int(n) for n in version1]
version2 = [int(n) for n in version2]
i, j = 0, 0
while i < len(version1) and j < len(version2):
if version1[i] > version2[j]:
return 1
elif version1[i] < version2[j]:
return -1
else:
i += 1
j += 1
if i < len(version1):
while i < len(version1):
if version1[i] != 0:
return 1
else:
i += 1
return 0
elif j < len(version2):
while j < len(version2):
if version2[j] != 0:
return -1
else:
j += 1
return 0
else:
return 0
| 27.333333 | 54 | 0.400563 |
c27a71315d8542d6db869616c1dab70b5bd316bf | 206 | py | Python | abc/abc190/abc190b.py | c-yan/atcoder | 940e49d576e6a2d734288fadaf368e486480a948 | [
"MIT"
] | 1 | 2019-08-21T00:49:34.000Z | 2019-08-21T00:49:34.000Z | abc/abc190/abc190b.py | c-yan/atcoder | 940e49d576e6a2d734288fadaf368e486480a948 | [
"MIT"
] | null | null | null | abc/abc190/abc190b.py | c-yan/atcoder | 940e49d576e6a2d734288fadaf368e486480a948 | [
"MIT"
] | null | null | null | N, S, D = map(int, input().split())
for _ in range(N):
X, Y = map(int, input().split())
if X >= S:
continue
if Y <= D:
continue
print('Yes')
break
else:
print('No')
| 15.846154 | 36 | 0.470874 |
031354d3e3a71a8bc6f18c21a33b8b3e4702e8ac | 7,835 | py | Python | junk/torchscript_test.py | rtloftin/interactive_agents | f7d57d1421000b2e8a79a9dff179b8fe7c8d3fc0 | [
"MIT"
] | null | null | null | junk/torchscript_test.py | rtloftin/interactive_agents | f7d57d1421000b2e8a79a9dff179b8fe7c8d3fc0 | [
"MIT"
] | 5 | 2022-03-11T07:58:53.000Z | 2022-03-17T12:57:26.000Z | junk/torchscript_test.py | rtloftin/interactive_agents | f7d57d1421000b2e8a79a9dff179b8fe7c8d3fc0 | [
"MIT"
] | 1 | 2022-03-11T19:28:53.000Z | 2022-03-11T19:28:53.000Z | '''Test the us of TorchScript to export and import of models with graph structure'''
from collections import namedtuple
import gym
from gym.spaces import Discrete, Box
import numpy as np
import torch
import torch.nn as nn
from torch.optim import Adam
from typing import Union, Tuple, Optional
Sample = namedtuple("Step", ["obs", "action"])
class MemoryGame(gym.Env):
'''The n-step memory game with noisy observations'''
def __init__(self, length=5, num_cues=2, noise=0.1):
self.observation_space = Box(0, 2, shape=(num_cues + 2,))
self.action_space = Discrete(num_cues)
self._length = length
self._num_cues = num_cues
self._noise = noise
self._current_step = 0
self._current_cue = 0
def _obs(self):
obs = np.random.uniform(0, self._noise, self.observation_space.shape)
if 0 == self._current_step:
obs[-2] += 1
obs[self._current_cue] += 1
elif self._length == self._current_step:
obs[-1] += 1
return obs
def reset(self):
self._current_step = 0
self._current_cue = np.random.randint(self._num_cues)
return self._obs()
def step(self, action):
if self._current_step < self._length:
self._current_step += 1
return self._obs(), 0, False, {}
else:
reward = (1 if action == self._current_cue else 0)
return self._obs(), reward, True, {}
def expert(self):
if self._current_step < self._length:
return self.action_space.sample()
else:
return self._current_cue
def generate_data(env, episodes):
data = []
for _ in range(episodes):
current_seq = []
obs = env.reset()
done = False
while not done:
action = env.expert()
current_seq.append(Sample(obs, action))
obs, _, done, _ = env.step(action)
data.append(current_seq)
return data
def evaluate(env, model, episodes):
total_reward = 0
total_successes = 0
for _ in range(episodes):
obs = env.reset()
episode_reward = 0
done = False
hidden = model.get_h0()
while not done:
# TODO: Switching to ONNX may change how the policy needs to be evaluated
logits, hidden = model(torch.as_tensor(obs, dtype=torch.float32).reshape(1,1, -1), hidden)
action = np.argmax(logits.detach().numpy()[0,0])
obs, reward, done, _ = env.step(action)
episode_reward += reward
total_reward += episode_reward
if episode_reward > 0:
total_successes += 1
return (total_reward / episodes), (total_successes / episodes)
class ReplayBuffer:
def __init__(self, num_actions, capacity=128):
self._num_actions = num_actions
self._capacity = capacity
self._index = 0
self._obs = []
self._actions = []
def add(self, episode):
obs = []
actions = []
for step in episode:
obs.append(step.obs)
actions.append(step.action)
obs = torch.tensor(obs, dtype=torch.float32)
actions = torch.tensor(actions, dtype=torch.int64)
actions = nn.functional.one_hot(actions, self._num_actions)
if len(obs) < self._capacity:
self._obs.append(obs)
self._actions.append(actions)
else:
self._obs[self._index] = obs
self._actions[self._index] = actions
self._index = (self._index + 1) % self._capacity
def sample(self, batch_size):
indices = np.random.randint(len(self._obs), size=batch_size)
obs_batch = [self._obs[idx] for idx in indices]
action_batch = [self._actions[idx] for idx in indices]
seq_mask = [torch.ones(len(seq), dtype=torch.float32) for seq in obs_batch]
seq_mask = nn.utils.rnn.pad_sequence(seq_mask)
obs_batch = nn.utils.rnn.pad_sequence(obs_batch)
action_batch = nn.utils.rnn.pad_sequence(action_batch)
return obs_batch, action_batch, seq_mask
class LSTMNet(nn.Module):
'''Simple LSTM network class'''
def __init__(self, input_size, output_size, lstm_size):
super(LSTMNet, self).__init__()
self._lstm = nn.LSTM(input_size, lstm_size)
self._linear = nn.Linear(lstm_size, output_size)
self._lstm_size = lstm_size
def forward(self, obs, hidden: Optional[Tuple[torch.Tensor, torch.Tensor]]):
out, hidden = self._lstm(obs, hidden)
out = self._linear(out)
return out, hidden
@torch.jit.export
def get_h0(self, batch_size: int=1):
hidden = torch.zeros((1, batch_size, self._lstm_size), dtype=torch.float32)
cell = torch.zeros((1, batch_size, self._lstm_size), dtype=torch.float32)
return hidden, cell
if __name__ == "__main__":
'''
seq = torch.tensor([[[1,2,3,4],[5,6,7,8]]], dtype=torch.float32)
h0 = (torch.zeros((1,2,32)), torch.zeros((1,2,32)))
class RNN(nn.Module):
def __init__(self):
super(RNN, self).__init__()
self.rnn = nn.LSTM(4,32)
# self.rnn = torch.jit.script(nn.LSTM(4,32)) # Doesn't fix anything
def forward(self, input, hidden: Optional[Tuple[torch.Tensor, torch.Tensor]]): # Note: Cannot pass two arguments to LSTM, seems to throw off torchscript
return self.rnn(input, hidden)
lstm = torch.jit.script(RNN())
# lstm = nn.LSTM(4,32)
# lstm = torch.jit.script(lstm)
print(lstm(seq, h0))
exit()
'''
# Configuration
env = MemoryGame(10, 4)
num_demonstrations = 1024
batch_size = 32
hidden_size = 10
training_epochs = 3000
eval_interval = 100
eval_episodes = 128
# Generate Data
data = generate_data(env, num_demonstrations)
buffer = ReplayBuffer(env.action_space.n, capacity=num_demonstrations)
for episode in data:
buffer.add(episode)
# Initialize model
model = LSTMNet(env.observation_space.shape[0], env.action_space.n, hidden_size)
model = torch.jit.script(model)
# Train pytoch model
print("\n===== Training Model =====")
optimizer = Adam(model.parameters(), lr=0.001)
initial_hidden = model.get_h0(batch_size)
for epoch in range(training_epochs):
obs_batch, action_batch, seq_mask = buffer.sample(batch_size)
optimizer.zero_grad()
logits, _ = model(obs_batch, initial_hidden)
# likelihoods = nn.functional.softmax(logits, -1)
likelihoods = nn.functional.log_softmax(logits, -1)
likelihoods = torch.sum(action_batch * likelihoods, -1)
loss = -torch.mean(seq_mask * likelihoods)
loss.backward()
optimizer.step()
if 0 == (epoch + 1) % eval_interval:
mean_reward, success_rate = evaluate(env, model, eval_episodes)
print(f"\n----- Epoch {epoch + 1} -----")
print(f" mean return: {mean_reward}")
print(f" success rate: {success_rate * 100}%")
# Export model to .pt file
torch.jit.save(model, "torch_lstm.pt")
# Import model from .pt file
model = torch.jit.load("torch_lstm.pt")
mean_reward, success_rate = evaluate(env, model, eval_episodes)
print(f"\n----- Serialized Model -----")
print(f" mean return: {mean_reward}")
print(f" success rate: {success_rate * 100}%")
# Copy model
model.eval()
model = torch.jit.freeze(model, ["get_h0"])
mean_reward, success_rate = evaluate(env, model, eval_episodes)
print(f"\n----- Frozen Model -----")
print(f" mean return: {mean_reward}")
print(f" success rate: {success_rate * 100}%")
| 31.34 | 161 | 0.608934 |
3324d5e85232c908b721db1e73dcbd20d2cb0a50 | 5,365 | py | Python | ward/expect.py | jayeshathila/ward | 966c3a61dc663b06aebc7d28a9acb9909d4f5810 | [
"MIT"
] | null | null | null | ward/expect.py | jayeshathila/ward | 966c3a61dc663b06aebc7d28a9acb9909d4f5810 | [
"MIT"
] | null | null | null | ward/expect.py | jayeshathila/ward | 966c3a61dc663b06aebc7d28a9acb9909d4f5810 | [
"MIT"
] | null | null | null | import inspect
import types
from enum import Enum
from typing import Type, Any, ContextManager, TypeVar, Generic, Optional, cast
_E = TypeVar("_E", bound=Exception)
class raises(Generic[_E], ContextManager["raises[_E]"]):
raised: _E
def __init__(self, expected_ex_type: Type[_E]):
self.expected_ex_type = expected_ex_type
def __enter__(self) -> "raises[_E]":
return self
def __exit__(
self,
exc_type: Optional[Type[BaseException]],
exc_val: Optional[BaseException],
exc_tb: Optional[types.TracebackType],
) -> bool:
if exc_type is not self.expected_ex_type:
raise AssertionError(
f"Expected exception {self.expected_ex_type}, but {exc_type} was raised instead."
)
self.raised: _E = cast(_E, exc_val)
return True
class Comparison(Enum):
Equals = "=="
NotEquals = "!="
In = "in"
NotIn = "not in"
Is = "is"
IsNot = "is not"
LessThan = "<"
LessThanEqualTo = "<="
GreaterThan = ">"
GreaterThanEqualTo = ">="
class TestFailure(Exception):
def __init__(
self,
message: str,
lhs: Any,
rhs: Any,
error_line: int,
operator: Comparison,
assert_msg: str,
):
self.lhs = lhs
self.rhs = rhs
self.message = message
self.error_line = error_line
self.operator = operator
self.assert_msg = assert_msg
def assert_equal(lhs_val, rhs_val, assert_msg):
if lhs_val != rhs_val:
error_line_no = inspect.currentframe().f_back.f_lineno
raise TestFailure(
f"{lhs_val} does not equal {rhs_val}",
lhs=lhs_val,
rhs=rhs_val,
error_line=error_line_no,
operator=Comparison.Equals,
assert_msg=assert_msg,
)
def assert_not_equal(lhs_val, rhs_val, assert_msg):
if lhs_val == rhs_val:
error_line_no = inspect.currentframe().f_back.f_lineno
raise TestFailure(
f"{lhs_val} does equal {rhs_val}",
lhs=lhs_val,
rhs=rhs_val,
error_line=error_line_no,
operator=Comparison.NotEquals,
assert_msg=assert_msg,
)
def assert_in(lhs_val, rhs_val, assert_msg):
if lhs_val not in rhs_val:
error_line_no = inspect.currentframe().f_back.f_lineno
raise TestFailure(
f"{lhs_val} is not in {rhs_val}",
lhs=lhs_val,
rhs=rhs_val,
error_line=error_line_no,
operator=Comparison.In,
assert_msg=assert_msg,
)
def assert_not_in(lhs_val, rhs_val, assert_msg):
if lhs_val in rhs_val:
error_line_no = inspect.currentframe().f_back.f_lineno
raise TestFailure(
f"{lhs_val} is in {rhs_val}",
lhs=lhs_val,
rhs=rhs_val,
error_line=error_line_no,
operator=Comparison.NotIn,
assert_msg=assert_msg,
)
def assert_is(lhs_val, rhs_val, assert_msg):
if lhs_val is not rhs_val:
error_line_no = inspect.currentframe().f_back.f_lineno
raise TestFailure(
f"{lhs_val} is not {rhs_val}",
lhs=lhs_val,
rhs=rhs_val,
error_line=error_line_no,
operator=Comparison.Is,
assert_msg=assert_msg,
)
def assert_is_not(lhs_val, rhs_val, assert_msg):
if lhs_val is rhs_val:
error_line_no = inspect.currentframe().f_back.f_lineno
raise TestFailure(
f"{lhs_val} is {rhs_val}",
lhs=lhs_val,
rhs=rhs_val,
error_line=error_line_no,
operator=Comparison.IsNot,
assert_msg=assert_msg,
)
def assert_less_than(lhs_val, rhs_val, assert_msg):
if lhs_val >= rhs_val:
error_line_no = inspect.currentframe().f_back.f_lineno
raise TestFailure(
f"{lhs_val} >= {rhs_val}",
lhs=lhs_val,
rhs=rhs_val,
error_line=error_line_no,
operator=Comparison.LessThan,
assert_msg=assert_msg,
)
def assert_less_than_equal_to(lhs_val, rhs_val, assert_msg):
if lhs_val > rhs_val:
error_line_no = inspect.currentframe().f_back.f_lineno
raise TestFailure(
f"{lhs_val} > {rhs_val}",
lhs=lhs_val,
rhs=rhs_val,
error_line=error_line_no,
operator=Comparison.LessThanEqualTo,
assert_msg=assert_msg,
)
def assert_greater_than(lhs_val, rhs_val, assert_msg):
if lhs_val <= rhs_val:
error_line_no = inspect.currentframe().f_back.f_lineno
raise TestFailure(
f"{lhs_val} <= {rhs_val}",
lhs=lhs_val,
rhs=rhs_val,
error_line=error_line_no,
operator=Comparison.GreaterThan,
assert_msg=assert_msg,
)
def assert_greater_than_equal_to(lhs_val, rhs_val, assert_msg):
if lhs_val < rhs_val:
error_line_no = inspect.currentframe().f_back.f_lineno
raise TestFailure(
f"{lhs_val} < {rhs_val}",
lhs=lhs_val,
rhs=rhs_val,
error_line=error_line_no,
operator=Comparison.GreaterThanEqualTo,
assert_msg=assert_msg,
)
| 28.089005 | 97 | 0.59329 |
ec545919fdb4cca0e0963df0e81f24becc55be9c | 823 | py | Python | doc/config_doc.py | omergertel/slash | 7dd5710a05822bbbaadc6c6517cefcbaa6397eab | [
"BSD-3-Clause"
] | null | null | null | doc/config_doc.py | omergertel/slash | 7dd5710a05822bbbaadc6c6517cefcbaa6397eab | [
"BSD-3-Clause"
] | null | null | null | doc/config_doc.py | omergertel/slash | 7dd5710a05822bbbaadc6c6517cefcbaa6397eab | [
"BSD-3-Clause"
] | null | null | null | from docutils.parsers.rst import directives, Directive
from docutils import nodes
from slash.conf import config
class ConfigDoc(Directive):
required_arguments = 0
optional_arguments = 0
def run(self):
returned = []
for path, leaf in config.traverse_leaves():
section = nodes.section(names=["conf." + path])
self.state.document.note_explicit_target(section)
section.append(nodes.title(text=path))
section.append(nodes.strong(text="Default: {0}".format(leaf.get_value())))
if leaf.metadata and "doc" in leaf.metadata:
section.append(nodes.paragraph(text=str(leaf.metadata["doc"])))
returned.append(section)
return returned
def setup(app):
directives.register_directive('config_doc', ConfigDoc)
| 35.782609 | 86 | 0.663426 |
acb8a4b7f50f6c90b1ba1a69187f3fdc0c558155 | 355 | py | Python | dbt/adapters/firebolt/__init__.py | firebolt-db/dbt-firebolt | cb89b9917b42606232dfccc5c28515bd21f694a9 | [
"Apache-2.0"
] | 20 | 2021-11-02T23:42:18.000Z | 2022-03-22T22:59:03.000Z | dbt/adapters/firebolt/__init__.py | firebolt-db/dbt-firebolt | cb89b9917b42606232dfccc5c28515bd21f694a9 | [
"Apache-2.0"
] | 33 | 2021-11-02T21:15:40.000Z | 2022-03-29T20:30:19.000Z | dbt/adapters/firebolt/__init__.py | firebolt-db/dbt-firebolt | cb89b9917b42606232dfccc5c28515bd21f694a9 | [
"Apache-2.0"
] | 6 | 2021-11-02T18:32:40.000Z | 2022-03-26T15:24:43.000Z | from dbt.adapters.base import AdapterPlugin
from dbt.adapters.firebolt.connections import FireboltCredentials
from dbt.adapters.firebolt.impl import FireboltAdapter
from dbt.include import firebolt
__version__ = '1.0.4'
Plugin = AdapterPlugin(
adapter=FireboltAdapter,
credentials=FireboltCredentials,
include_path=firebolt.PACKAGE_PATH,
)
| 25.357143 | 65 | 0.816901 |
eb397966d2ca53e0fb6ea9217af6706d8c50bf44 | 805 | py | Python | src/lib/utils/post_process.py | MLDSAI/FairMOT | 728ce0f51bf168b18b8737e0b4ba4b080f7722d0 | [
"MIT"
] | 7,899 | 2019-12-14T20:39:16.000Z | 2022-03-31T12:13:27.000Z | src/lib/utils/post_process.py | MLDSAI/FairMOT | 728ce0f51bf168b18b8737e0b4ba4b080f7722d0 | [
"MIT"
] | 472 | 2020-04-07T15:33:15.000Z | 2022-03-31T02:31:10.000Z | src/lib/utils/post_process.py | MLDSAI/FairMOT | 728ce0f51bf168b18b8737e0b4ba4b080f7722d0 | [
"MIT"
] | 1,025 | 2019-12-18T06:30:48.000Z | 2022-03-24T06:55:04.000Z | from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from .image import transform_preds
def ctdet_post_process(dets, c, s, h, w, num_classes):
# dets: batch x max_dets x dim
# return 1-based class det dict
ret = []
for i in range(dets.shape[0]):
top_preds = {}
dets[i, :, :2] = transform_preds(
dets[i, :, 0:2], c[i], s[i], (w, h))
dets[i, :, 2:4] = transform_preds(
dets[i, :, 2:4], c[i], s[i], (w, h))
classes = dets[i, :, -1]
for j in range(num_classes):
inds = (classes == j)
top_preds[j + 1] = np.concatenate([
dets[i, inds, :4].astype(np.float32),
dets[i, inds, 4:5].astype(np.float32)], axis=1).tolist()
ret.append(top_preds)
return ret
| 28.75 | 64 | 0.608696 |
4dec5ac09f75a6ad2df6e79f9a923bfc179e6545 | 5,262 | py | Python | tensorflow_datasets/structured/e2e_cleaned.py | stwind/datasets | 118d3d2472a3bf2703d1374e25c2223dc7942c13 | [
"Apache-2.0"
] | 1 | 2020-10-26T09:02:03.000Z | 2020-10-26T09:02:03.000Z | tensorflow_datasets/structured/e2e_cleaned.py | cbaront/datasets | b097e0985eaaadc6b0c1f4dfa3b3cf88d116c607 | [
"Apache-2.0"
] | 1 | 2021-02-23T20:16:05.000Z | 2021-02-23T20:16:05.000Z | tensorflow_datasets/structured/e2e_cleaned.py | cbaront/datasets | b097e0985eaaadc6b0c1f4dfa3b3cf88d116c607 | [
"Apache-2.0"
] | null | null | null | # coding=utf-8
# Copyright 2020 The TensorFlow Datasets Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""e2e_cleaned dataset."""
import csv
import tensorflow.compat.v2 as tf
import tensorflow_datasets.public_api as tfds
_CITATION = r"""
@inproceedings{dusek-etal-2019-semantic,
title = "Semantic Noise Matters for Neural Natural Language Generation",
author = "Du{\v{s}}ek, Ond{\v{r}}ej and
Howcroft, David M. and
Rieser, Verena",
booktitle = "Proceedings of the 12th International Conference on Natural Language Generation",
month = oct # "{--}" # nov,
year = "2019",
address = "Tokyo, Japan",
publisher = "Association for Computational Linguistics",
url = "https://www.aclweb.org/anthology/W19-8652",
doi = "10.18653/v1/W19-8652",
pages = "421--426",
abstract = "Neural natural language generation (NNLG) systems are known for their pathological outputs, i.e. generating text which is unrelated to the input specification. In this paper, we show the impact of semantic noise on state-of-the-art NNLG models which implement different semantic control mechanisms. We find that cleaned data can improve semantic correctness by up to 97{\%}, while maintaining fluency. We also find that the most common error is omitting information, rather than hallucination.",
}
"""
_DESCRIPTION = """
An update release of E2E NLG Challenge data with cleaned MRs. The E2E data
contains dialogue act-based meaning representation (MR) in the restaurant domain
and up to 5 references in natural language, which is what one needs to predict.
"""
_HOMEPAGE_URL = 'https://github.com/tuetschek/e2e-cleaning'
_TRAIN_URL = 'https://github.com/tuetschek/e2e-cleaning/raw/master/cleaned-data/train-fixed.no-ol.csv'
_DEV_URL = 'https://github.com/tuetschek/e2e-cleaning/raw/master/cleaned-data/devel-fixed.no-ol.csv'
_TEST_URL = 'https://github.com/tuetschek/e2e-cleaning/raw/master/cleaned-data/test-fixed.csv'
def _get_table_from_mr(mr):
"""Converts a meaningful representation from e2e_cleaned dataset in a table."""
mr_as_table = []
for type_value in mr.split(', '):
type_value_delimiter = type_value.find('[')
type_ = type_value[0:type_value_delimiter]
value = type_value[type_value_delimiter + 1:-1]
mr_as_table.append({
'column_header': type_,
'row_number': 1,
'content': value,
})
return mr_as_table
class E2eCleaned(tfds.core.GeneratorBasedBuilder):
"""MR in the restaurant domain and target utterances describing it."""
VERSION = tfds.core.Version('0.1.0')
def _info(self):
return tfds.core.DatasetInfo(
builder=self,
# This is the description that will appear on the datasets page.
description=_DESCRIPTION,
# tfds.features.FeatureConnectors
features=tfds.features.FeaturesDict({
'input_text': {
'table':
tfds.features.Sequence({
'column_header': tf.string,
'row_number': tf.int16,
'content': tf.string,
})
},
'target_text': tf.string,
}),
# If there's a common (input, target) tuple from the features,
# specify them here. They'll be used if as_supervised=True in
# builder.as_dataset.
supervised_keys=('input_text', 'target_text'),
# Homepage of the dataset for documentation
homepage=_HOMEPAGE_URL,
citation=_CITATION,
)
def _split_generators(self, dl_manager):
"""Returns SplitGenerators."""
extracted_path = dl_manager.download_and_extract({
'train_path': _TRAIN_URL,
'dev_path': _DEV_URL,
'test_path': _TEST_URL
})
return [
tfds.core.SplitGenerator(
name=tfds.Split.TRAIN,
# These kwargs will be passed to _generate_examples
gen_kwargs={'csv_path': extracted_path['train_path']},
),
tfds.core.SplitGenerator(
name=tfds.Split.VALIDATION,
# These kwargs will be passed to _generate_examples
gen_kwargs={'csv_path': extracted_path['dev_path']},
),
tfds.core.SplitGenerator(
name=tfds.Split.TEST,
# These kwargs will be passed to _generate_examples
gen_kwargs={'csv_path': extracted_path['test_path']},
),
]
def _generate_examples(self, csv_path):
"""Yields examples."""
with tf.io.gfile.GFile(csv_path) as f:
reader = csv.DictReader(f)
for i, row in enumerate(reader):
yield i, {
'input_text': {
'table': _get_table_from_mr(row['mr']),
},
'target_text': row['ref']
}
| 38.691176 | 511 | 0.663816 |
7c7b32921c253f9ee5f5d504421c49cfa54c59de | 5,382 | py | Python | docs/conf.py | y-pleim/MonteCarlo | a3e328630c3e0866878f4d25749a562a8be56a37 | [
"MIT"
] | null | null | null | docs/conf.py | y-pleim/MonteCarlo | a3e328630c3e0866878f4d25749a562a8be56a37 | [
"MIT"
] | null | null | null | docs/conf.py | y-pleim/MonteCarlo | a3e328630c3e0866878f4d25749a562a8be56a37 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
#
# Configuration file for the Sphinx documentation builder.
#
# This file does only contain a selection of the most common options. For a
# full list see the documentation:
# http://www.sphinx-doc.org/en/stable/config
# -- Path setup --------------------------------------------------------------
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
# Incase the project was not installed
import os
import sys
sys.path.insert(0, os.path.abspath('..'))
import montecarlo
import sphinx_rtd_theme
# -- Project information -----------------------------------------------------
project = 'MonteCarlo'
copyright = ("2022, Yannick Pleimling. Project structure based on the "
"Computational Molecular Science Python Cookiecutter version 1.6")
author = 'Yannick Pleimling'
# The short X.Y version
version = ''
# The full version, including alpha/beta/rc tags
release = ''
# -- General configuration ---------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autosummary',
'sphinx.ext.autodoc',
'sphinx.ext.mathjax',
'sphinx.ext.viewcode',
'sphinx.ext.napoleon',
'sphinx.ext.intersphinx',
'sphinx.ext.extlinks',
'sphinx_rtd_theme'
]
autosummary_generate = True
napoleon_google_docstring = False
napoleon_use_param = False
napoleon_use_ivar = True
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This pattern also affects html_static_path and html_extra_path .
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'default'
# -- Options for HTML output -------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'sphinx_rtd_theme'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Custom sidebar templates, must be a dictionary that maps document names
# to template names.
#
# The default sidebars (for documents that don't match any pattern) are
# defined by theme itself. Builtin themes are using these templates by
# default: ``['localtoc.html', 'relations.html', 'sourcelink.html',
# 'searchbox.html']``.
#
# html_sidebars = {}
# -- Options for HTMLHelp output ---------------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = 'montecarlodoc'
# -- Options for LaTeX output ------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'montecarlo.tex', 'MonteCarlo Documentation',
'montecarlo', 'manual'),
]
# -- Options for manual page output ------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'montecarlo', 'MonteCarlo Documentation',
[author], 1)
]
# -- Options for Texinfo output ----------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'montecarlo', 'MonteCarlo Documentation',
author, 'montecarlo', 'A Pyython package for installing an N-spin system.',
'Miscellaneous'),
]
# -- Extension configuration -------------------------------------------------
| 30.579545 | 80 | 0.659978 |
4d11163e1628ec3823c123e9352e1674a33be0cd | 4,736 | py | Python | src/trainer.py | dengshuo/chainer-slimming | f98aacb7f1687bc495481f5fce8f5490305737e2 | [
"MIT"
] | 9 | 2017-08-29T01:37:32.000Z | 2021-03-04T07:41:39.000Z | src/trainer.py | dsanno/chainer-fashion-mnist | 86957147bdaeb3a1888244ce06c31525cd938629 | [
"MIT"
] | null | null | null | src/trainer.py | dsanno/chainer-fashion-mnist | 86957147bdaeb3a1888244ce06c31525cd938629 | [
"MIT"
] | 2 | 2017-11-21T11:56:22.000Z | 2021-03-04T07:41:51.000Z | import math
import numpy as np
import six
from scipy.misc import imresize, imrotate
import time
import chainer
from chainer.dataset import convert
from chainer import functions as F
from chainer import cuda
class CifarTrainer(object):
def __init__(self, net, optimizer, epoch_num=100, batch_size=100, device_id=-1, lr_shape='multistep', lr_decay=[0]):
self.net = net
self.optimizer = optimizer
self.epoch_num = epoch_num
self.batch_size = batch_size
self.device_id = device_id
if hasattr(optimizer, 'alpha'):
self.initial_lr = optimizer.alpha
else:
self.initial_lr = optimizer.lr
self.lr_shape = lr_shape
self.lr_decay = lr_decay
if device_id >= 0:
self.xp = cuda.cupy
self.net.to_gpu(device_id)
else:
self.xp = np
def fit(self, train_data, valid_data=None, test_data=None, callback=None):
if self.device_id >= 0:
with cuda.cupy.cuda.Device(self.device_id):
return self.__fit(train_data, valid_data, test_data, callback)
else:
return self.__fit(train_data, valid_data, test_data, callback)
def __fit(self, train_data, valid_data, test_data, callback):
batch_size = self.batch_size
train_iterator = chainer.iterators.SerialIterator(train_data, self.batch_size, repeat=True, shuffle=True)
train_loss = 0
train_acc = 0
num = 0
iteration = 0
iteration_num = len(train_data) * self.epoch_num // self.batch_size
while train_iterator.epoch < self.epoch_num:
if self.lr_shape == 'cosine':
lr = 0.5 * self.initial_lr * (1 + math.cos(math.pi * iteration / iteration_num))
if hasattr(self.optimizer, 'alpha'):
self.optimizer.alpha = lr
else:
self.optimizer.lr = lr
batch = train_iterator.next()
t = convert.concat_examples(batch, self.device_id)
x_batch, y_batch = convert.concat_examples(batch, self.device_id)
loss, acc = self.__forward(x_batch, y_batch)
self.net.cleargrads()
loss.backward()
self.optimizer.update()
train_loss += float(loss.data) * len(x_batch)
train_acc += float(acc.data) * len(x_batch)
num += len(x_batch)
iteration += 1
if not train_iterator.is_new_epoch:
continue
train_loss /= num
train_acc /= num
valid_loss = None
valid_acc = None
if valid_data is not None:
valid_loss, valid_acc = self.__evaluate(valid_data)
test_loss = None
test_acc = None
test_time = 0
if test_data is not None:
start_clock = time.clock()
test_loss, test_acc = self.__evaluate(test_data)
test_time = time.clock() - start_clock
epoch = train_iterator.epoch
if callback is not None:
callback(epoch, self.net, self.optimizer, train_loss, train_acc, valid_loss, valid_acc, test_loss, test_acc, test_time)
train_loss = 0
train_acc = 0
num = 0
if self.lr_shape == 'multistep':
lr_decay = self.lr_decay
if len(lr_decay) == 1 and lr_decay[0] > 0 and epoch % lr_decay[0] == 0 or epoch in lr_decay:
if hasattr(self.optimizer, 'alpha'):
self.optimizer.alpha *= 0.1
else:
self.optimizer.lr *= 0.1
train_iterator.finalize()
def __evaluate(self, data):
iterator = chainer.iterators.SerialIterator(data, self.batch_size, repeat=False, shuffle=False)
total_loss = 0
total_acc = 0
num = 0
with chainer.using_config('enable_backprop', False):
with chainer.using_config('train', False):
for batch in iterator:
x_batch, y_batch = convert.concat_examples(batch, self.device_id)
loss, acc = self.__forward(x_batch, y_batch)
total_loss += float(loss.data) * len(x_batch)
total_acc += float(acc.data) * len(x_batch)
num += len(x_batch)
iterator.finalize()
return total_loss / num, total_acc / num
def __forward(self, batch_x, batch_t):
xp = self.xp
x = xp.asarray(batch_x)
t = xp.asarray(batch_t)
y = self.net(x)
loss = F.softmax_cross_entropy(y, t)
acc = F.accuracy(y, t)
return loss, acc
| 40.135593 | 135 | 0.575169 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.