id stringlengths 1 8 | text stringlengths 6 1.05M | dataset_id stringclasses 1
value |
|---|---|---|
20819 | import os
import fnmatch
from pathlib import Path
from jinja2 import Template
from .Metadata import Metadata
class JSGenerator:
def __init__(self, j):
"""
"""
self._j = j
self._generated = False
def _check_process_file(self, path):
bname = os.path.basename(path)
if bname.startswith("_"):
return False
IGNORE = ["/template", "JSLoader.py", "SystemFSDecorators.py", "FixerReplace"]
for item in IGNORE:
if path.find(item) != -1:
return False
return True
def lib_link(self, path):
"""
look for ".jumpscalemodules" and link the parent directory to the JSX lib dir
:param path:
:return:
"""
j = self._j
# can use j here because will never be used in first step
for path in j.sal.fs.listFilesInDir(path, True, filter=".jumpscalemodules"):
dpath = j.sal.fs.getDirName(path)
target = j.core.tools.text_replace("{DIR_BASE}/lib/jumpscale/%s" % j.sal.fs.getBaseName(dpath))
j.sal.fs.symlink(dpath, target, True)
def generate(self, methods_find=False, action_method=None, action_args={}, path=None):
"""
walk over all found jumpscale libraries
look for the classes where there is a __jslocation__ inside these are classes which need to be loaded
:param reset:
:return:
"""
self.md = Metadata(self._j)
# find the directory in which we have all repo's of threefoldtech
if path:
rootDir = path
else:
rootDir = os.path.dirname(self._j.core.dir_jumpscale.rstrip("/"))
p = Path(rootDir)
for dpath in p.iterdir():
if not dpath.is_dir():
continue
if dpath.name.startswith("."):
continue
for dpath2 in dpath.iterdir():
jsmodpath = os.path.join(os.fspath(dpath2), ".jumpscalemodules")
if not os.path.exists(jsmodpath):
continue
js_lib_path = os.path.join(os.fspath(dpath2))
# NOW WE HAVE FOUND A SET OF JUMPSCALE MODULES
jumpscale_repo_name = os.path.basename(dpath2)
for dirName, subdirList, fileList in os.walk(os.fspath(dpath2), followlinks=True):
if dirName.find("egg-info") != -1:
self._j.shell()
if dirName.find("Jumpscale/core") is not -1:
continue
if dirName.find("notebooks/") is not -1:
continue
# skip the core files, they don't need to be read
for item in fnmatch.filter(fileList, "*.py"):
path = os.path.join(dirName, item)
self._log("process", path)
if self._check_process_file(path):
# self._log("process_ok:")
self.md.jsmodule_get(
path=path,
jumpscale_repo_name=jumpscale_repo_name,
js_lib_path=js_lib_path,
methods_find=methods_find,
action_method=action_method,
action_args=action_args,
)
self.md.groups_load() # make sure we find all groups
# self._j.shell()
self._render()
self.report()
return action_args
def _log(self, cat, msg=""):
print("- %-15s %s" % (cat, msg))
pass
def _render(self):
# create the jumpscale dir if it does not exist yet
dpath = "%s/jumpscale/" % self._j.dirs.TMPDIR
if not os.path.exists(dpath):
os.makedirs(dpath)
# write the __init__ file otherwise cannot include
dpath = "%s/jumpscale/__init__.py" % self._j.dirs.TMPDIR
file = open(dpath, "w")
file.write("")
file.close()
if self._j.application._check_debug():
template_name = "template_jumpscale_debug.py"
else:
template_name = "template_jumpscale.py"
template_path = os.path.join(os.path.dirname(__file__), "templates", template_name)
template = Path(template_path).read_text()
t = Template(template)
C = t.render(md=self.md)
dpath = self._j.core.application._lib_generation_path
file = open(dpath, "w")
file.write(C)
file.close()
self._generated = True
def report(self):
"""
kosmos "j.core.jsgenerator.report()"
write reports to /tmp/jumpscale/code_report.md
:return:
"""
# if self._generated is False:
# self.generate()
for name, jsgroup in self.md.jsgroups.items():
path = "%s/jumpscale/code_report_%s.md" % (self._j.dirs.TMPDIR, jsgroup.name)
file = open(path, "w")
file.write(jsgroup.markdown)
file.close()
self.report_errors()
self.report_line_changes()
def report_errors(self):
out = ""
for cat, obj, error, trace in self._j.application.errors_init:
out += "## %s:%s\n\n" % (cat, obj)
out += "%s\n\n" % error
out += "%s\n\n" % trace
path = "%s/jumpscale/ERRORS_report.md" % (self._j.dirs.TMPDIR)
file = open(path, "w")
file.write(out)
file.close()
return len(self._j.application.errors_init)
def report_line_changes(self):
out = ""
for item in self.md.line_changes:
out += str(item)
path = "%s/jumpscale/LINECHANGES_report.md" % (self._j.dirs.TMPDIR)
file = open(path, "w")
file.write(out)
file.close()
| StarcoderdataPython |
11293596 | """
Given a string, return whether it represents a number. Here are the different kinds of numbers:
"10", a positive integer
"-10", a negative integer
"10.1", a positive real number
"-10.1", a negative real number
"1e5", a number in scientific notation
And here are examples of non-numbers:
"a"
"x 1"
"a -2"
"-"
"""
def verify_beginning(text: str) -> bool:
return text[0] in ("-", "+") or text[0].isdigit()
def verify_body(text: str) -> bool:
len_text = len(text)
if len_text > 2:
count_e = 0
count_dot = 0
count_plus = 0
count_minus = 0
for index in range(1, len_text - 1):
if text[index] == "e":
if count_e > 0:
return False
count_e += 1
elif text[index] == ".":
if count_e > 0 or count_dot > 0:
return False
count_dot += 1
elif text[index] in ("+", "-"):
if text[index - 1] != "e" or count_plus > 0 or count_minus > 0:
return False
elif text[index] == "+":
count_plus += 1
else:
count_minus += 1
elif not text[index].isdigit():
return False
return count_e < 2 and count_dot < 2
else: # No need to test. can be tested with beg and end.
return True
def verify_end(text: str) -> bool:
return text[-1].isdigit()
def string_is_number(text: str) -> bool:
if not text:
return False
return verify_beginning(text) and verify_body(text) and verify_end(text)
if __name__ == "__main__":
assert string_is_number("10.9") is True
assert string_is_number("-15.7") is True
assert string_is_number("1") is True
assert string_is_number("1e5") is True
assert string_is_number("1.5e+10") is True
assert string_is_number("5.4e-10") is True
assert string_is_number("4x3") is False
assert string_is_number("a") is False
assert string_is_number("x 1") is False
assert string_is_number("1 e5") is False
assert string_is_number("a - 2") is False
assert string_is_number("10-1") is False
assert string_is_number("10+1") is False
assert string_is_number("-") is False
assert string_is_number("-e") is False
assert string_is_number("+e") is False
| StarcoderdataPython |
9633916 | <gh_stars>0
from flask import Flask
from config import Config
from flask_sqlalchemy import SQLAlchemy
from flask_cors import CORS
app = Flask(__name__, static_folder='../static/dist',
template_folder='../static/src')
CORS(app)
app.config.from_object(Config)
db = SQLAlchemy(app)
from app import routes, models
| StarcoderdataPython |
4868193 | # -*- coding: utf-8 -*-
from django import VERSION
from django.template import Library
register = Library()
if VERSION >= (1, 5):
from .absolute import absolute, site
register.tag(absolute)
register.tag(site)
else:
from django.templatetags.future import url
from .absolute import AbsoluteUrlNode, SiteUrlNode
@register.tag
def absolute(parser, token):
'''
Returns a full absolute URL based on the request host.
This template tag takes exactly the same paramters as url template tag.
'''
node = url(parser, token)
view_name = str(node.view_name).strip('"\'')
return AbsoluteUrlNode(
view_name=view_name,
args=node.args,
kwargs=node.kwargs,
asvar=node.asvar
)
@register.tag
def site(parser, token):
'''
Returns a full absolute URL based on the current site.
This template tag takes exactly the same paramters as url template tag.
'''
node = url(parser, token)
view_name = str(node.view_name).strip('"\'')
return SiteUrlNode(
view_name=view_name,
args=node.args,
kwargs=node.kwargs,
asvar=node.asvar
)
| StarcoderdataPython |
1906062 | <gh_stars>0
import toolz, pyrsistent, hypothesis, effect, pytest
class User(object):
def __init__(self, id, name, organization):
self.id = id
self.name = name
self.organization = organization
def load_from_db(store, id):
name, org = store.find(User, id=id)
return User(id, name, org)
def load_owned_products(store, user_id):
pass
def get_email_adress(name, organization):
return <EMAIL>(name, organization)
assert get_email_adress('name', 'organization') == "<EMAIL>"
print("OK!")
class Person(object):
def __init__(self, name):
self.name = name
# Recursion
class Group(object):
"""Groups can contain Persons or more Groups."""
def __init__(self, members, subgroups):
self.members = members
self.subgroups = subgroups
def get_all_members(group):
sub_members = []
for subgroup in group.subgroups:
sub_members.extend(get_all_members(subgroup))
return group.members + sub_members
group = Group(['Sam', 'Jessie'], [Group(['Reese', 'Taylor'], [])])
print(get_all_members(group))
# ~~~~~~~~~~~~~~ #
def mymap(f, l):
if l == []:
return l
return [f(l[0])] + mymap(f, l[1:])
print(mymap(lambda n: n+2, [1, 2, 3]))
def biggest(nums, current_biggest=0):
if nums == []:
return current_biggest
else:
bigger = nums[0] if nums[0] > current_biggest else current_biggest
return biggest(nums[1:], bigger)
print(biggest([1, 2, 5, 3, -1]))
# Recursive version that isn't well optimized in Python
def myreduce(f, l, acc):
if l == []:
return acc
else:
new_acc = f(l[0], acc)
return myreduce(f, l[1:], new_acc)
# Iterative version that optimizes functions that depend upon this function
def myreduce(f, l, acc):
for el in l:
acc = f(el, acc)
return acc
def biggest(l):
return myreduce(lambda num, acc: num if num > acc else acc, l, 0)
def length(l):
return myreduce(lambda _, acc: acc + 1, l, 0)
def mysum(l):
return myreduce(lambda num, acc: num + acc, l, 0)
def mymap(f, l):
return myreduce(lambda el, acc: acc + [f(el)], l, [])
def myfilter(f, l):
return myreduce(lambda el, acc: acc + [el] if f(el) else acc, l, [])
print("Biggest", biggest([1, 2, 5, 3]))
print("Length", length([1, 2, 3, 5, 6]))
print("Sum", mysum([5, 5, 6]))
print("Add2", mymap(lambda n: n+2, [1, 2, 3]))
print("Evens", myfilter(lambda num: num % 2 == 0, [1, 2, 3, 4, 5]))
# ~~~~~~~~~~~~~~ #
def trace(name, x, f):
print("<{}({})>".format(name, x), end='')
result = f(x)
print("</{}({}): {}>\n".format(name, x, result), end='')
def trace_decorator(f):
return lambda x: trace(f.__name__, x, f)
@trace_decorator
def doWork(x):
return x * 2
@trace_decorator
def otherWork(x):
return x - 5
doWork(50)
otherWork(10)
# ~~~~~~~~~~~~~~ #
class Point(object):
def __init__(self, x, y):
self.x = x
self.y = y
def translate(self, x_trans, y_trans):
# self.x = x + x_trans
# self.y = y + y_trans
return Point(self.x + x_trans, self.y + y_trans)
# ~~~~~~~~~~~~~~ #
from pyrsistent import pvector, v
vec = pvector([1, 2, 3])
assert vec == v(1, 2, 3)
print(vec, '\n', vec.append(4), vec.set(0, 'hello'), '\n', vec, '\n')
from pyrsistent import pmap, m
dic = pmap({'one': 'yes', 2: 0})
m(a=1, b=2)
print(dic, '\n', dic.set('one', 'yusss'), '\n', dic, '\n')
from pyrsistent import pset, s
animals = pset(['dog', 'cat', 'bird']) | s('bear', 'lion')
print(animals, '\n', animals.add('spider'), '\n', animals.remove('bird'), '\n', animals, '\n')
from pyrsistent import freeze, thaw
dic = {'sublist': [{'a', 'b', 'c'}, {'d', 'e', 'f'}]}
frozen = freeze(dic)
thawed = thaw(frozen)
print(dic, '\n', frozen, '\n', thawed, '\n')
from pyrsistent import PClass, field
class Point(PClass):
x = field()
y = field()
def translate(self, x_trans, y_trans):
return self.set(x=self.x + x_trans, y=self.y + y_trans)
p = Point(x=1, y=2)
print(p, '\n', p.translate(1, 1), '\n', p, '\n', p.serialize(), '\n')
# ~~~~~~~~~~~~~~ #
first = pvector(range(8)) # pvector([0, 1, 2, 3, 4, 5, 6, 7])
second = first.set(5, 99) # pvector([0, 1, 2, 3, 4, 99, 6, 7])
# ~~~~~~~~~~~~~~ #
def g(x):
return x + 10
def f(x):
return x * 5
from toolz.functoolz import compose
compose(f, g)(3) # 65
compose(g, f)(3) # 25
map(compose(f, g), [1, 2, 3]) # [55, 60, 65]
map(lambda x: f(g(x)), [1, 2, 3]) # ^
from toolz.functoolz import curry
@curry
def plus(x, y):
return x + y
plus(1, 2) # 3
plus(1)(2) # 3
map(plus(1), [1, 2, 3]) # [2, 3, 4]
from toolz.functoolz import identity, memoize
identity(3) # equivalent to 'lambda x: x'
@curry
@memoize # the return value for a given argument('s) is remembered
def minus(x, y):
return x - y
minus(3, 4) # -1
minus(3)(4) # -1
from toolz.functoolz import thread_first
def inc(a): return a + 1
def double(a): return a * 2
thread_first(1, inc, double) # 4
def add(a, n): return a + n
def mul(a, n): return a * n
thread_first(1, (add, 5), (mul, 3)) # 18
from toolz.itertoolz import nth, last, drop, take, groupby, interpose, first
nth(1, iter([1, 2, 3])) # 2
last(iter([1, 2, 3])) # 3
list(drop(2, iter([1, 2, 3]))) # [3]
list(take(2, iter([1, 2, 3]))) # [1, 2]
groupby(first, ['ABC', 'ABA', 'BAB', 'BAA']) # {'A': ['ABC', 'ABA'], 'B': ['BAB', 'BAA']}
groupby(last, ['ABC', 'ABA', 'BAB', 'BAA']) # {'A': ['ABA', 'BAA'], 'B': ['BAB'], 'C': ['ABC']}
list(interpose('meow', ['bark', 'squeal', 'bulbulbul'])) # ['bark', 'meow', 'squeal', 'meow', 'bulbulbul']
first('ABC') # 'A'
first(iter('ABC')) # 'A'
d1 = {'cat': 'meow'}
d2 = {'dog': 'woof'}
from toolz.dicttoolz import merge, assoc, dissoc, get_in
merge(d1, d2) # {'cat': 'meow', 'dog': 'woof'}
assoc(d1, 'fish', 'bulbulbul') # {'fish': 'bulbulbul', 'cat': 'meow'}
dissoc(d1, 'cat') # {}
struct = {'a': [{'c': 'hi'}]}
print(struct['a'][0]['c']) # 'hi'
get_in(['a', 0, 'c'], struct) # 'hi'
get_in(['b', 0, 'c'], struct, 'not found!') # 'not found!'
# ~~~~~~~~~~~~~~ #
def add2(num):
return num + 2
from hypothesis import given, strategies as st
@given(st.integers())
def test_adds_2(num):
assert add2(num) == num + 2
test_adds_2()
from operator import add
def mysum(nums):
from functools import reduce
return reduce(add, nums, 0)
@given(st.lists(st.integers()))
def test_mysum(nums):
assert mysum(nums) == sum(nums)
test_mysum()
class Herd(object):
def __init__(self, animal, number):
self.animal = animal
self.number = number
def __repr__(self):
return "<Herd of animal={} number={}>".format(self.animal, self.number)
def __eq__(self, other):
return (type(self) is type(other)) \
and self.animal == other.animal \
and self.number == other.number
animals = st.sampled_from(['moose', 'duck', 'bear', 'lion'])
print(animals.example()) # 'bear' (or something else)
herds = st.builds(Herd, animals, st.integers(min_value=0))
print(herds.example()) # '<Herd of animal=moose number=46373>' (or something else)
def parse_herd(s):
try:
name, num = s.split(',')
num = int(num)
except ValueError:
raise SyntaxError()
return Herd(name, num)
parse_herd('cat,10') # '<Herd of animal=cat number=10>'
def serialize_herd(herd):
return '{},{}'.format(herd.animal, herd.number)
serialize_herd(Herd('dog', 10)) # 'dog,10'
@given(herds)
def test_herd_roundtrip(herd):
assert parse_herd(serialize_herd(herd)) == herd
test_herd_roundtrip()
@given(st.text())
def fuzz_herd_parser(s):
try:
parse_herd(s)
except SyntaxError:
pass
fuzz_herd_parser()
def guess_number(num):
"Returns a message and how far away you are"
if num == answer:
return "You got it right!", 0
else:
return "Nope", answer - num
answer = 4848
from hypothesis import example
@given(st.integers())
@example(answer)
def test_guess_number(num):
assert guess_number(num)[1] == answer - num
# ~~~~~~~~~~~~~~ #
from effect import Effect, sync_perform, sync_performer
from effect import ComposedDispatcher, TypeDispatcher, base_dispatcher
from effect.do import do
def compliment(name):
return "Oh, {} is a lovely name~".format(name)
@do
def main():
name = yield Effect(Input("Enter your name: "))
yield Effect(Print(compliment(name)))
class Input(object):
def __init__(self, prompt):
self.prompt = prompt
def __eq__(self, other):
return type(self) is type(other) and self.prompt == other.prompt
class Print(object):
def __init__(self, message):
self.message = message
def __eq__(self, other):
return type(self) is type(other) and self.message == other.message
@sync_performer
def perform_input(dispatcher, intent):
return input(intent.prompt)
@sync_performer
def perform_print(dispatcher, intent):
print(intent.message)
# io = TypeDispatcher({
# Input: perform_input,
# Print: perform_input
# })
#
# dispatcher = ComposedDispatcher([io, base_dispatcher])
#
# if __name__ == '__main__':
# eff = main()
# sync_perform(dispatcher, eff)
# sync_perform(dispatcher, eff)
from effect.testing import SequenceDispatcher
def test_main():
seq = SequenceDispatcher([
(Input("Enter your name: "), lambda i: "woof"),
(Print('Oh, woof is a lovely name~'), lambda i: None)
])
with seq.consume():
sync_perform(ComposedDispatcher([seq, base_dispatcher]), main())
test_main()
| StarcoderdataPython |
1633262 | import asyncio
import time
import traceback
from src import database, amino_async, configs
from src.utils import service_align, logger, file_logger
DEVICES = open(configs.DEVICES_PATH, "r").readlines()
async def login(account: tuple):
client = amino_async.Client()
email = account[0]
password = account[1]
while True:
try:
await client.login(email, password)
return client
except amino_async.utils.exceptions.ActionNotAllowed:
client.device_id = client.headers.device_id = random.choice(DEVICES).strip()
except amino_async.utils.exceptions.VerificationRequired as verify:
logger.error("[" + email + "]: " + str(verify.args[0]["url"]))
await client.session.close()
return False
except Exception as e:
logger.error("[" + email + "]: " + e.args[0]["api:message"])
file_logger.debug(traceback.format_exc())
await client.session.close()
return False
async def login_sid(account: tuple):
email = account[0]
sid = account[2]
is_valid = account[3]
if is_valid == 1:
client = amino_async.Client()
while True:
try:
await client.login_sid(sid)
return client
except amino_async.utils.exceptions.ActionNotAllowed:
client.device_id = client.headers.device_id = random.choice(DEVICES).strip()
except amino_async.utils.exceptions.VerificationRequired as verify:
service_align(email, verify.args[0]["url"], level="error")
await client.session.close()
return False
except Exception as e:
service_align(email, e.args[0]["api:message"], level="error")
file_logger.debug(traceback.format_exc())
await client.session.close()
return False
async def check_accounts():
accounts = database.get_bots()
invalids = []
bads = []
for i in accounts:
sid = i[2]
is_valid = i[3]
valid_time = i[4]
if is_valid == 0:
invalids.append(i)
continue
if sid is None or valid_time is None or is_valid is None:
bads.append(i)
continue
if valid_time <= int(time.time()):
bads.append(i)
continue
if invalids:
logger.warning(f"{len(invalids)} нерабочих аккаунтов")
if bads:
logger.warning(f"{len(bads)} аккаунтов поставлено в очередь для обновления SID...")
valid_list = await asyncio.gather(*[asyncio.create_task(update_sid(i)) for i in bads])
for i in valid_list:
database.remove_bot(i.get("email"))
database.set_bots(list(valid_list))
async def update_sid(account: tuple):
email = account[0]
password = account[1]
client = await login(account)
if client:
service_align(email, "SID обновлён")
await client.session.close()
return {"email": email, "password": password, "sid": client.sid, "isValid": True, "validTime": int(time.time()) + 43200}
else:
return {"email": email, "password": password, "isValid": False}
| StarcoderdataPython |
8117294 | import sqlite3
import datetime
from flask import Flask, render_template
from bokeh.plotting import figure
from bokeh.resources import INLINE
from bokeh.embed import components
app = Flask(__name__)
@app.route('/')
def index():
return 'hello world'
@app.route('/chart')
def chart():
# create chart
p = figure(plot_width=1000, plot_height=400, x_axis_type='datetime')
# add a line renderer
x = []
y = []
all_data = get_data()
# refactor
for value in all_data:
date = datetime.datetime.fromtimestamp(value[0])
x.append(date)
y.append(value[1])
p.line(x, y, line_width=2)
# create static files
js_resources = INLINE.render_js()
css_resources = INLINE.render_css()
script, div = components(p)
# render template
return render_template(
'chart.html',
plot_script=script,
plot_div=div,
js_resources=js_resources,
css_resources=css_resources
)
def get_data():
# Open database connection
with sqlite3.connect('greenhouse.db') as connection:
c = connection.cursor()
# Generate SQL query
c.execute('SELECT * FROM greenhouse')
rows = c.fetchall()
return rows
if __name__ == '__main__':
app.run(debug=True)
| StarcoderdataPython |
6488960 | """Helper module for handling files on the os."""
import os
import os.path
def get_all_file_names(path, extension=None):
"""Returns all files in the given directory.
Args:
path(string): Path to an existing directory.
extension(string, optional): Set this to get only a certain
filetype, for example '.txt'.
Default: None
Returns:
list: List of file names.
Raises:
NotADirectoryError: If path doesn't exist or is not a directory.
"""
if os.path.isdir(path):
files = os.listdir(path)
if extension is None:
return files
else:
return _filter_files(files, extension)
else:
error_msg = "Path doesn't exist or is not a directory"
raise NotADirectoryError(error_msg)
def _filter_files(files, extension):
filtered_files = []
for file in files:
if file.lower().endswith(extension):
filtered_files.append(file)
return filtered_files
| StarcoderdataPython |
3343728 | from __future__ import annotations
import aiosqlite
from bot.config import Config
from bot.data import channel_points_handler
from bot.data import command
from bot.data import esc
from bot.data import format_msg
from bot.message import Message
async def ensure_motd_table_exists(db: aiosqlite.Connection) -> None:
await db.execute(
'CREATE TABLE IF NOT EXISTS motd ('
' user TEXT NOT NULL,'
' msg TEXT NOT NULL,'
' points INT NOT NULL,'
' timestamp TIMESTAMP DEFAULT CURRENT_TIMESTAMP'
')',
)
await db.commit()
async def set_motd(db: aiosqlite.Connection, user: str, msg: str) -> None:
await ensure_motd_table_exists(db)
query = 'INSERT INTO motd (user, msg, points) VALUES (?, ?, ?)'
await db.execute(query, (user, msg, 250))
await db.commit()
async def get_motd(db: aiosqlite.Connection) -> str:
await ensure_motd_table_exists(db)
query = 'SELECT msg FROM motd ORDER BY ROWID DESC LIMIT 1'
async with db.execute(query) as cursor:
row = await cursor.fetchone()
if row is None:
return 'nothing???'
else:
return esc(row[0])
async def msg_count(db: aiosqlite.Connection, msg: str) -> int:
await ensure_motd_table_exists(db)
query = 'SELECT COUNT(1) FROM motd WHERE msg = ?'
async with db.execute(query, (msg,)) as cursor:
ret, = await cursor.fetchone()
return ret
@channel_points_handler('a2fa47a2-851e-40db-b909-df001801cade')
async def cmd_set_motd(config: Config, msg: Message) -> str:
async with aiosqlite.connect('db.db') as db:
await set_motd(db, msg.name_key, msg.msg)
s = 'motd updated! thanks for spending points!'
if msg.msg == '!motd':
motd_count = await msg_count(db, msg.msg)
s = f'{s} it has been set to !motd {motd_count} times!'
return format_msg(msg, s)
@command('!motd')
async def cmd_motd(config: Config, msg: Message) -> str:
async with aiosqlite.connect('db.db') as db:
return format_msg(msg, await get_motd(db))
| StarcoderdataPython |
5160514 | <reponame>actris-cloudnet/model-evaluation
import os
import netCDF4
from typing import Union
from datetime import datetime
from cloudnetpy import utils, output
from model_evaluation import version
from model_evaluation.products.model_products import ModelManager
from model_evaluation.metadata import MetaData, MODEL_ATTRIBUTES, CYCLE_ATTRIBUTES, MODEL_L3_ATTRIBUTES, \
REGRID_PRODUCT_ATTRIBUTES
def update_attributes(model_downsample_variables: dict, attributes: dict):
"""Overrides existing Cloudnet-ME Array-attributes.
Overrides existing attributes using hard-coded values.
New attributes are added.
Args:
model_downsample_variables (dict): Array instances.
attributes (dict): Product-specific attributes.
"""
for key in model_downsample_variables:
x = len(key.split('_')) - 1
key_parts = key.split('_', x)
if key in list(attributes.keys()):
model_downsample_variables[key].set_attributes(attributes[key])
if key in MODEL_ATTRIBUTES:
model_downsample_variables[key].set_attributes(MODEL_ATTRIBUTES[key])
elif '_'.join(key_parts[0:-1]) in REGRID_PRODUCT_ATTRIBUTES:
model_downsample_variables[key].set_attributes(REGRID_PRODUCT_ATTRIBUTES['_'.join(key_parts[0:-1])])
elif '_'.join(key_parts[0:-2]) in REGRID_PRODUCT_ATTRIBUTES:
model_downsample_variables[key].set_attributes(REGRID_PRODUCT_ATTRIBUTES['_'.join(key_parts[0:-2])])
elif '_'.join(key_parts[1:]) in MODEL_L3_ATTRIBUTES or '_'.join(key_parts[2:]) in MODEL_L3_ATTRIBUTES:
try:
model_downsample_variables[key].set_attributes(MODEL_L3_ATTRIBUTES['_'.join(key_parts[1:])])
except KeyError:
model_downsample_variables[key].set_attributes(MODEL_L3_ATTRIBUTES['_'.join(key_parts[2:])])
elif '_'.join(key_parts[1:]) in CYCLE_ATTRIBUTES:
model_downsample_variables[key].set_attributes(CYCLE_ATTRIBUTES['_'.join(key_parts[1:])])
elif '_'.join(key_parts[2:]) in CYCLE_ATTRIBUTES:
model_downsample_variables[key].set_attributes(CYCLE_ATTRIBUTES['_'.join(key_parts[2:])])
def save_downsampled_file(id_mark: str,
file_name: str,
objects: tuple,
files: tuple,
keep_uuid: bool,
uuid: Union[str, None]):
"""Saves a standard downsampled day product file.
Args:
id_mark (str): File identifier, format "(product name)_(model name)"
file_name (str): Name of the output file to be generated
objects (tuple): Include two objects: The :class:'ModelManager' and
The :class:'ObservationManager.
files (tuple): Includes two sourcefile group: List of model file(s) used
for processing output file and Cloudnet L2 product file
keep_uuid (bool): If True, keeps the UUID of the old file, if that exists.
Default is False when new UUID is generated.
uuid (str): Set specific UUID for the file.
"""
obj = objects[0]
dimensions = {'time': len(obj.time),
'level': len(obj.data['level'][:])}
root_group = output.init_file(file_name, dimensions, obj.data, uuid)
_augment_global_attributes(root_group)
uuid = root_group.file_uuid
root_group.cloudnet_file_type = id_mark.split('-')[0]
root_group.title = f"Downsampled {id_mark.capitalize().replace('_', ' of ')} from {obj.dataset.location}"
_add_source(root_group, objects, files)
output.copy_global(obj.dataset, root_group, ('location', 'day', 'month', 'year'))
try:
obj.dataset.day
except AttributeError:
root_group.year, root_group.month, root_group.day = obj.date
output.merge_history(root_group, id_mark, {'l3': obj})
root_group.close()
return uuid
def add_var2ncfile(obj: ModelManager, file_name: str):
nc_file = netCDF4.Dataset(file_name, 'r+', format='NETCDF4_CLASSIC')
_write_vars2nc(nc_file, obj.data)
nc_file.close()
def _write_vars2nc(rootgrp: netCDF4.Dataset, cloudnet_variables: dict):
"""Iterates over Cloudnet-ME instances and write to given rootgrp."""
def _get_dimensions(array):
"""Finds correct dimensions for a variable."""
if utils.isscalar(array):
return ()
variable_size = ()
file_dims = rootgrp.dimensions
array_dims = array.shape
for length in array_dims:
dim = [key for key in file_dims.keys()
if file_dims[key].size == length][0]
variable_size = variable_size + (dim,)
return variable_size
for key in cloudnet_variables:
obj = cloudnet_variables[key]
size = _get_dimensions(obj.data)
try:
nc_variable = rootgrp.createVariable(obj.name, obj.data_type, size,
zlib=True)
nc_variable[:] = obj.data
for attr in obj.fetch_attributes():
setattr(nc_variable, attr, getattr(obj, attr))
except RuntimeError:
continue
def _augment_global_attributes(root_group: netCDF4.Dataset):
root_group.Conventions = 'CF-1.8'
root_group.cloudnetme_version = version.__version__
def _add_source(root_ground: netCDF4.Dataset, objects: tuple, files: tuple):
"""Generates source info for multiple files"""
model, obs = objects
model_files, obs_file = files
source = f"Observation file: {os.path.basename(obs_file)}"
source += f"\n"
source += f"{model.model} file(s): "
for i, f in enumerate(model_files):
source += f"{os.path.basename(f)}"
if i < len(model_files) - 1:
source += f"\n"
root_ground.source = source
root_ground.source_file_uuids = output.get_source_uuids(model, obs)
def add_time_attribute(date: datetime) -> dict:
""""Adds time attribute with correct units.
Args:
attributes: Attributes of variables.
date: Date as Y M D 0 0 0.
Returns:
dict: Same attributes with 'time' attribute added.
"""
d = date.strftime('%y.%m.%d')
attributes = {}
attributes['time'] = MetaData(units=f'hours since {d} 00:00:00')
return attributes
| StarcoderdataPython |
11377310 | #!python
# -*- coding: utf-8 -*-
"""
VRT Widesky low-level Password mix-in.
"""
class PasswordOpsMixin(object):
"""
The Password operations mix-in implements low-level support for
modifying the current Widesky user's password.
"""
def update_password(self, new_password, callback=None):
"""
Change the current logged in user's password.
If the update is unsuccessful then AsynchronousException is raised.
:param new_password: Password value.
:param callback: The function to call after this operation
is complete.
"""
op = self._PASSWORD_CHANGE_OPERATION(self, new_password)
if callback is not None:
op.done_sig.connect(callback)
op.go()
return op
| StarcoderdataPython |
4906160 | """This module implements a multilayer perceptron
Author: <NAME>"""
import torch.nn as nn
class RegressorMlp(nn.Module):
"""Class to construct fully connected neural networks with a
number of hidden layers"""
def __init__(self, layer_sizes, afunc=nn.ReLU):
"""Initialize a multilayer perceptron
:param layer_sizes: list of layer sizes
:param afunc: activation function
"""
super().__init__()
n_layers = len(layer_sizes)
self.net = nn.Sequential()
for i in range(n_layers - 1):
self.net.add_module('layer_{}'.format(i),
nn.Linear(layer_sizes[i], layer_sizes[i+1]))
# Add activation function, except for the last layer
if afunc and i != n_layers - 2:
self.net.add_module('afunc_{}'.format(i), afunc())
self.net.double() # We use doubles by default
def forward(self, X, **kwargs):
return self.net(X)
| StarcoderdataPython |
6509489 | <filename>tests/test_gs.py
# -*- coding: utf-8 -*-
"""
use Kreuz spikes from Fig 2B to test global_sync code
"""
from __future__ import print_function, division
import numpy as np
import numpy.testing as nptest
import pandas as pd
from burst_sync import global_sync as gs
def test_get_phase_spikes():
spikes = np.round(pd.read_csv('tests/test_data.csv').values[:, 1])
spikes[0] = 2
phase = gs.get_phase_spikes(spikes, 4000)
# kreuz_phase.csv from running Matlab code
result = pd.read_csv('tests/kreuz_phase.csv', header=None)[0].values
nptest.assert_allclose(phase[:-1], result[:-1]) # last value is random
def test_calc_global_sync():
spikes = np.round(pd.read_csv('tests/test_data.csv').values[:, 1:])
(M, N) = spikes.shape
# Matlab code assumed unique integer spike times
spikes[spikes == 0] = 2
for i in range(M):
for j in range(N-1):
if spikes[i, j] == spikes[i, j+1]:
spikes[i, j+1] += 1
spike_list = [list(spikes[:, i]) for i in range(N)]
sync = gs.calc_global_sync(spike_list, 4000)
SI = (20.632722 - 1)/(N - 1) # 20.63 from running Matlab code
nptest.assert_allclose(sync, SI, atol=.001)
| StarcoderdataPython |
11263500 | <reponame>dvdzhang/uncertainty-baselines
# coding=utf-8
# Copyright 2022 The Uncertainty Baselines Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
r"""Entry point for Uncertainty Baselines.
"""
import os.path
from typing import Optional
from absl import app
from absl import flags
from absl import logging
import numpy as np
import tensorflow.compat.v2 as tf
import uncertainty_baselines as ub
import eval as eval_lib # local file import from experimental.one_vs_all
import flags as flags_lib # local file import from experimental.one_vs_all
import losses as loss_lib # local file import from experimental.one_vs_all
import models as models_lib # local file import from experimental.one_vs_all
import train as train_lib # local file import from experimental.one_vs_all
FLAGS = flags.FLAGS
# TODO(znado): remove this and add padding to last batch.
def _check_batch_replica_divisible(
batch_size: int,
strategy: tf.distribute.Strategy):
"""Ensure the batch size is evenly divisible by the number of replicas."""
if batch_size % strategy.num_replicas_in_sync != 0:
raise ValueError(
'Batch size must be evenly divisible by the number of replicas in the '
'job. Batch size: {}, num replicas: {}'.format(
batch_size, strategy.num_replicas_in_sync))
def _setup_trial_dir(trial_dir: str, flag_string: Optional[str]):
if not trial_dir:
return
if not tf.io.gfile.exists(trial_dir):
tf.io.gfile.makedirs(trial_dir)
if flag_string:
flags_filename = os.path.join(trial_dir, 'flags.cfg')
with tf.io.gfile.GFile(flags_filename, 'w+') as flags_file:
flags_file.write(flag_string)
def _maybe_setup_trial_dir(
strategy,
trial_dir: str,
flag_string: Optional[str]):
"""Create `trial_dir` if it does not exist and save the flags if provided."""
if trial_dir:
logging.info('Saving to dir: %s', trial_dir)
else:
logging.warning('Not saving any experiment outputs!')
if flag_string:
logging.info('Running with flags:\n%s', flag_string)
# Only write to the flags file on the first replica, otherwise can run into a
# file writing error.
if strategy.num_replicas_in_sync > 1:
if strategy.cluster_resolver.task_id == 0:
_setup_trial_dir(trial_dir, flag_string)
else:
_setup_trial_dir(trial_dir, flag_string)
def run(trial_dir: str, flag_string: Optional[str]):
"""Run the experiment.
Args:
trial_dir: String to the dir to write checkpoints to and read them from.
flag_string: Optional string used to record what flags the job was run with.
"""
tf.random.set_seed(FLAGS.seed)
np.random.seed(FLAGS.seed)
if not FLAGS.eval_frequency:
FLAGS.eval_frequency = FLAGS.log_frequency
if FLAGS.eval_frequency % FLAGS.log_frequency != 0:
raise ValueError(
'log_frequency ({}) must evenly divide eval_frequency '
'({}).'.format(FLAGS.log_frequency, FLAGS.eval_frequency))
strategy = ub.strategy_utils.get_strategy(FLAGS.tpu, FLAGS.use_tpu)
with strategy.scope():
_maybe_setup_trial_dir(strategy, trial_dir, flag_string)
# TODO(znado): pass all dataset and model kwargs.
train_dataset_builder = ub.datasets.get(
dataset_name=FLAGS.dataset_name,
split='train',
validation_percent=FLAGS.validation_percent,
shuffle_buffer_size=FLAGS.shuffle_buffer_size)
if FLAGS.validation_percent > 0:
validation_dataset_builder = ub.datasets.get(
dataset_name=FLAGS.dataset_name,
split='validation',
validation_percent=FLAGS.validation_percent)
else:
validation_dataset_builder = None
test_dataset_builder = ub.datasets.get(
dataset_name=FLAGS.dataset_name,
split='test')
model = models_lib.create_model(
batch_size=FLAGS.batch_size,
num_classes=10,
distance_logits=FLAGS.distance_logits)
loss_fn = loss_lib.get(
FLAGS.loss_name, from_logits=True, dm_alpha=FLAGS.dm_alpha)
if FLAGS.mode == 'eval':
_check_batch_replica_divisible(FLAGS.eval_batch_size, strategy)
eval_lib.run_eval_loop(
validation_dataset_builder=validation_dataset_builder,
test_dataset_builder=test_dataset_builder,
batch_size=FLAGS.eval_batch_size,
model=model,
loss_fn=loss_fn,
trial_dir=trial_dir,
train_steps=FLAGS.train_steps,
strategy=strategy,
metric_names=['accuracy', 'loss'],
checkpoint_step=FLAGS.checkpoint_step)
return
_check_batch_replica_divisible(FLAGS.batch_size, strategy)
if FLAGS.mode == 'train_and_eval':
_check_batch_replica_divisible(FLAGS.eval_batch_size, strategy)
steps_per_epoch = train_dataset_builder.num_examples // FLAGS.batch_size
optimizer_kwargs = {
k[len('optimizer_hparams_'):]: FLAGS[k].value for k in FLAGS
if k.startswith('optimizer_hparams_')
}
optimizer = ub.optimizers.get(
optimizer_name=FLAGS.optimizer,
learning_rate_schedule=FLAGS.learning_rate_schedule,
learning_rate=FLAGS.learning_rate,
weight_decay=FLAGS.weight_decay,
steps_per_epoch=steps_per_epoch,
**optimizer_kwargs)
train_lib.run_train_loop(
train_dataset_builder=train_dataset_builder,
validation_dataset_builder=validation_dataset_builder,
test_dataset_builder=test_dataset_builder,
batch_size=FLAGS.batch_size,
eval_batch_size=FLAGS.eval_batch_size,
model=model,
optimizer=optimizer,
loss_fn=loss_fn,
eval_frequency=FLAGS.eval_frequency,
log_frequency=FLAGS.log_frequency,
trial_dir=trial_dir,
train_steps=FLAGS.train_steps,
mode=FLAGS.mode,
strategy=strategy,
metric_names=['accuracy', 'loss'])
def main(program_flag_names):
logging.info('Starting Uncertainty Baselines experiment!')
logging.info(
'\n\nRun the following command to view outputs in tensorboard.dev:\n\n'
'tensorboard dev upload --logdir %s\n\n', FLAGS.output_dir)
# TODO(znado): when open sourced tuning is supported, change this to include
# the trial number.
trial_dir = os.path.join(FLAGS.output_dir, '0')
program_flags = {name: FLAGS[name].value for name in program_flag_names}
flag_string = flags_lib.serialize_flags(program_flags)
run(trial_dir, flag_string)
if __name__ == '__main__':
defined_flag_names = flags_lib.define_flags()
app.run(lambda _: main(defined_flag_names))
| StarcoderdataPython |
1695121 | <filename>test/python/mock/test_new_qasm_backend.py
# This code is part of Qiskit.
#
# (C) Copyright IBM 2021.
#
# This code is licensed under the Apache License, Version 2.0. You may
# obtain a copy of this license in the LICENSE.txt file in the root directory
# of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
#
# Any modifications or derivative works of this code must retain this
# copyright notice, and modified files need to carry a notice indicating
# that they have been altered from the originals.
"""Test of qasm fake backends from qiskit.mock package."""
import unittest
from qiskit import QuantumCircuit, transpile
from qiskit.test import QiskitTestCase
from qiskit.providers.fake_provider import FakeBogota
from qiskit.utils import optionals as _optionals
class FakeQasmBackendsTest(QiskitTestCase):
"""Tests for FakeQasmBackend"""
@unittest.skipUnless(_optionals.HAS_AER, "qiskit-aer is required to run this test")
def test_fake_qasm_backend_configured(self):
"""Fake backends honor kwargs passed."""
backend = FakeBogota() # this is a FakePulseBackend implementation
qc = QuantumCircuit(2)
qc.x(range(0, 2))
qc.measure_all()
trans_qc = transpile(qc, backend)
raw_counts = backend.run(trans_qc, shots=1000).result().get_counts()
self.assertEqual(sum(raw_counts.values()), 1000)
| StarcoderdataPython |
5040270 | import matplotlib.pyplot as plt
import numpy as np
label = ['Adventure', 'Action', 'Drama', 'Comedy', 'Thriller/Suspense', 'Horror', 'Romantic Comedy', 'Musical',
'Documentary', 'Black Comedy', 'Western', 'Concert/Performance', 'Multiple Genres', 'Reality']
no_movies = [941,854,4595,2125,942,509,548,149,1952,161,64,61,35,5]
index = np.arange(len(label))
plt.bar(index, no_movies)
plt.xlabel('Movietype', fontsize=8)
plt.ylabel('No of Movies', fontsize=8)
plt.xticks(index, label, fontsize=8, rotation=90)
plt.title('Movie type released during 2017-2019')
plt.show()
| StarcoderdataPython |
19760 | <gh_stars>10-100
import random as rnd
from itertools import combinations
from time import perf_counter as pfc
def seite_ermitteln(versuch):
seite = [0]*anz_kugeln
links = set(versuch[:len(versuch)//2])
for nr in versuch:
seite[nr] = -1 if nr in links else 1
return seite
def wiegen(nr, gewicht, seite):
return gewicht * seite[nr]
def statusänderung(wiegung, seite):
for nr, status in kugeln:
if wiegung == 0 and seite[nr] == 0: continue
if (wiegung == 0 and seite[nr] != 0) or (wiegung != 0 and seite[nr] == 0):
kugeln[nr][1] = '='
else:
kugeln[nr][1] = stati[wiegung == seite[nr]].get(status, status)
def kugel2str(liste):
return ' '.join([f'{nr}{kugeln[nr][1]}' for nr in liste])
def prüfung(v1, v2m, v2lr):
text = ''
for nr in range(anz_kugeln):
for k in kugeln: k[1] = '?'
gesucht = (nr, rnd.choice((-1, 1)))
text += f'Gesucht wird {gesucht[0]}{"+" if gesucht[1] == 1 else "-"}\n'
for n in range(2):
v = v1 if n == 0 else v2m if wiegung == 0 else v2lr
seite = seite_ermitteln(v)
wiegung = wiegen(*gesucht, seite)
statusänderung(wiegung, seite)
text += f'{wiegung} {kugel2str(v)}\n'
kandidaten = [k[0] for k in kugeln if k[1] != '=']
if len(kandidaten) > 3: return False, text
text += f'Kandidaten = {kugel2str(kandidaten)}\n\n'
return True, text
def prüfe_varianten(modus):
anz_lösungen = 0
vs = set()
for anz in range(1, anz_kugeln//2+1):
for v2l in combinations(range(anz_kugeln), anz):
for v2r in combinations(range(anz_kugeln), anz):
if set(v2l) & set(v2r): continue
if (v2r,v2l) in vs: continue
vs.add((v2l, v2r))
e, text = prüfung(v1, v2m, v2l+v2r)
if e:
anz_lösungen += 1
if modus > 0: print(f'Lösung Nr. {anz_lösungen} für V2lr {v2l} <-> {v2r}')
if modus > 1: print(text+'\n\n')
if modus > 2: return
print(f'Anzahl Lösungen für V2lr: {anz_lösungen}')
start = pfc()
stati = {True: {'?': '+', '-': '='},
False: {'?': '-', '+': '='}}
anz_kugeln = 12
kugeln = [[nr, '?'] for nr in range(anz_kugeln)]
v1 = [0, 1, 2, 3, 4, 5, 6, 7]
v2m = [8, 9, 10, 0, 1, 2]
prüfe_varianten(0)
print(f'{pfc()-start:.2f} Sek.')
| StarcoderdataPython |
290090 | <filename>backend/attention_keeper/controller/event.py
import multiprocessing
import time
import feedparser
import psutil as psutil
from flask import current_app
from flask_api import status
from attention_keeper.model.event import Event
from attention_keeper.model.item import Item
from attention_keeper.util import logger
from attention_keeper.util.question_generator import QuestionGenerator
from attention_keeper.view.api import app
from attention_keeper.view.api import db
LOGGER = logger.get_logger(__name__)
def decode_rss(polling_frequency: int, rss_feed: str, event_id: int):
while True:
LOGGER.debug('polling rss feed: %s', rss_feed)
feed = feedparser.parse(rss_feed)
with app.app_context():
items = Item.query.filter_by(event_id=event_id).all()
for entry in feed['entries'][len(items):]:
item = Item(event_id=event_id, title=entry['inception_slug'],
isBreak=entry['inception_break'] == 'true')
db.session.add(item)
if entry['inception_slug']:
QuestionGenerator(entry['inception_slug'], event_id).start()
db.session.commit()
time.sleep(polling_frequency)
def create_rss_feed_process(polling_frequency: int, rss_feed: str, event_id: int) -> int:
process = multiprocessing.Process(target=decode_rss,
kwargs={'polling_frequency': polling_frequency, 'rss_feed': rss_feed,
'event_id': event_id})
process.start()
return process.pid
def create_event(rss_feed: str, name: str):
event = Event(name=name, rss_feed=rss_feed)
db.session.add(event)
db.session.commit()
event.pid = create_rss_feed_process(current_app.config['POLLING_FREQUENCY'], rss_feed, event.event_id)
db.session.commit()
return {"event_id": event.event_id}
def delete_event(event_id: int):
event = Event.query.filter_by(event_id=event_id).first()
if event is None:
return "Bad event_id", status.HTTP_400_BAD_REQUEST
else:
process = psutil.Process(event.pid)
process.kill()
db.session.delete(event)
db.session.commit()
return "Operation successful", status.HTTP_200_OK
def is_on_break(event_id: int):
item = Item.query.filter_by(event_id=event_id).order_by(Item.item_id.desc()).first()
if item is None:
return "No item for event_id", status.HTTP_400_BAD_REQUEST
return {'is_on_break': Item.query.filter_by(event_id=event_id).order_by(Item.item_id.desc()).first().isBreak}
| StarcoderdataPython |
3515930 | <reponame>muma7490/PyLightSupport
import os
import json
import logging.config
def setup_logging( default_path = f"{os.path.dirname(__file__)}/logsettings.json", default_level=logging.DEBUG,):
'''Setup logging configuration
'''
logging.getLogger("huey.contrib.djhuey.management.commands.run_huey").setLevel(logging.INFO)
logging.getLogger("huey.contrib.djhuey.management.commands").setLevel(logging.INFO)
logging.getLogger("huey.contrib.djhuey.management").setLevel(logging.INFO)
logging.getLogger("huey.contrib.djhuey").setLevel(logging.INFO)
logging.getLogger("huey.contrib").setLevel(logging.INFO)
logging.getLogger("huey").setLevel(logging.INFO)
logging.getLogger("requests").setLevel(logging.INFO)
print(default_path)
path = default_path
if os.path.exists(path):
with open(path, 'rt') as f:
config = json.load(f)
logging.config.dictConfig(config)
else:
print(default_path)
print(__name__)
logging.basicConfig(level=default_level)
| StarcoderdataPython |
5150278 | import time
LOCATOR_LIST = [
"text",
"textContains",
"textMatches",
"textStartsWith",
"className",
"classNameMatches",
"description",
"descriptionContains",
"descriptionMatches",
"descriptionStartsWith",
"checkable",
"checked",
"clickable",
"longClickable",
"scrollable",
"enabled",
"focusable",
"focused",
"selected",
"packageName",
"packageNameMatches",
"resourceId",
"resourceIdMatches",
"index",
"instance",
]
class Page(object):
def __init__(self, dr):
self.driver = dr
class PageElement(object):
driver = None
def __init__(self, timeout=10, describe=None, **kwargs):
self.time_out = timeout
if not kwargs:
raise ValueError("Please specify a locator")
self.kwargs = kwargs
self.k, self.v = next(iter(kwargs.items()))
if self.k not in LOCATOR_LIST:
raise KeyError("Element positioning of type '{}' is not supported.".format(self.k))
def __get__(self, instance, owner):
if instance is None:
return None
global driver
driver = instance.driver
return self
def click(self):
"""
click element
"""
global driver
for i in range(self.time_out):
if driver(**self.kwargs).exists:
break
else:
time.sleep(1)
else:
raise NameError("No corresponding element was found")
driver(**self.kwargs).click()
def set_text(self, text):
"""
input text
:param text:
"""
global driver
for i in range(self.time_out):
if driver(**self.kwargs).exists:
break
else:
time.sleep(1)
else:
raise NameError("No corresponding element was found")
driver(**self.kwargs).set_text(text=text)
def get_text(self):
"""
get element text
"""
global driver
for i in range(self.time_out):
if driver(**self.kwargs).exists:
break
else:
time.sleep(1)
else:
raise NameError("No corresponding element was found")
return driver(**self.kwargs).get_text()
| StarcoderdataPython |
3376251 | # Generated by Django 2.2.7 on 2019-11-20 20:04
from django.db import migrations, models
import django.utils.timezone
class Migration(migrations.Migration):
dependencies = [
('ems', '0003_auto_20191120_1910'),
]
operations = [
migrations.RenameModel(
old_name='Entry',
new_name='VisitorEntry',
),
migrations.AddField(
model_name='user',
name='datetime',
field=models.DateTimeField(auto_now=True),
),
migrations.AddField(
model_name='user',
name='role',
field=models.CharField(choices=[('Host', 'Host'), ('Visitor', 'Visitor')], default='Host', max_length=30),
preserve_default=False,
),
]
| StarcoderdataPython |
1687360 | """Export annotated corpus data to format-preserved xml."""
import logging
import os
import xml.etree.ElementTree as etree
import sparv.util as util
from sparv import (AnnotationData, Config, Document, Export, ExportAnnotations, SourceAnnotations, Text, exporter)
from . import xml_utils
log = logging.getLogger(__name__)
@exporter("XML export preserving whitespaces from source file", config=[
Config("xml_export.filename_formatted", default="{doc}_export.xml",
description="Filename pattern for resulting XML files, with '{doc}' representing the source name.")
])
def preserved_format(doc: Document = Document(),
text: Text = Text(),
docid: AnnotationData = AnnotationData("<docid>"),
out: Export = Export("xml_preserved_format/[xml_export.filename_formatted]"),
annotations: ExportAnnotations = ExportAnnotations("xml_export.annotations"),
source_annotations: SourceAnnotations = SourceAnnotations("xml_export.source_annotations"),
header_annotations: SourceAnnotations = SourceAnnotations("xml_export.header_annotations"),
remove_namespaces: bool = Config("export.remove_module_namespaces", False),
sparv_namespace: str = Config("export.sparv_namespace"),
source_namespace: str = Config("export.source_namespace"),
include_empty_attributes: bool = Config("xml_export.include_empty_attributes")):
"""Export annotations to XML in export_dir and keep whitespaces and indentation from original file.
Args:
doc: Name of the original document.
text: The corpus text.
docid: Annotation with document IDs.
out: Path and filename pattern for resulting file.
annotations: List of elements:attributes (annotations) to include.
source_annotations: List of elements:attributes from the original document
to be kept. If not specified, everything will be kept.
header_annotations: List of header elements from the original document to include
in the export. If not specified, all headers will be kept.
remove_namespaces: Whether to remove module "namespaces" from element and attribute names.
Disabled by default.
sparv_namespace: The namespace to be added to all Sparv annotations.
source_namespace: The namespace to be added to all annotations present in the source.
include_empty_attributes: Whether to include attributes even when they are empty. Disabled by default.
"""
# Create export dir
os.makedirs(os.path.dirname(out), exist_ok=True)
# Read corpus text and document ID
corpus_text = text.read()
docid = docid.read()
# Get annotation spans, annotations list etc.
annotation_list, _, export_names = util.get_annotation_names(annotations, source_annotations, doc=doc,
remove_namespaces=remove_namespaces,
sparv_namespace=sparv_namespace,
source_namespace=source_namespace)
h_annotations, h_export_names = util.get_header_names(header_annotations, doc=doc)
export_names.update(h_export_names)
span_positions, annotation_dict = util.gather_annotations(annotation_list, export_names, h_annotations, doc=doc,
flatten=False, split_overlaps=True)
sorted_positions = [(pos, span[0], span[1]) for pos, spans in sorted(span_positions.items()) for span in spans]
# Root tag sanity check
if not xml_utils.valid_root(sorted_positions[0], sorted_positions[-1]):
raise util.SparvErrorMessage("Root tag is missing! If you have manually specified which elements to include, "
"make sure to include an element that encloses all other included elements and "
"text content.")
# Create root node
root_span = sorted_positions[0][2]
root_span.set_node()
node_stack = []
last_pos = 0 # Keeps track of the position of the processed text
for x, (_pos, instruction, span) in enumerate(sorted_positions):
# Open node: Create child node under the top stack node
if instruction == "open":
# Set tail for previous node if necessary
if last_pos < span.start:
# Get last closing node in this position
_, tail_span = [i for i in span_positions[last_pos] if i[0] == "close"][-1]
tail_span.node.tail = corpus_text[last_pos:span.start]
last_pos = span.start
# Handle headers
if span.is_header:
header = annotation_dict[span.name][util.HEADER_CONTENTS][span.index]
header_xml = etree.fromstring(header)
header_xml.tag = span.export # Rename element if needed
span.node = header_xml
node_stack[-1].node.append(header_xml)
else:
if node_stack: # Don't create root node, it already exists
span.set_node(parent_node=node_stack[-1].node)
xml_utils.add_attrs(span.node, span.name, annotation_dict, export_names, span.index,
include_empty_attributes)
if span.overlap_id:
if sparv_namespace:
span.node.set(f"{sparv_namespace}.{util.OVERLAP_ATTR}", f"{docid}-{span.overlap_id}")
else:
span.node.set(f"{util.SPARV_DEFAULT_NAMESPACE}.{util.OVERLAP_ATTR}",
f"{docid}-{span.overlap_id}")
node_stack.append(span)
# Set text if there should be any between this node and the next one
next_item = sorted_positions[x + 1]
if next_item[1] == "open" and next_item[2].start > span.start:
span.node.text = corpus_text[last_pos:next_item[2].start]
last_pos = next_item[2].start
# Close node
else:
if span.is_header:
continue
if last_pos < span.end:
# Set node text if necessary
if span.start == last_pos:
span.node.text = corpus_text[last_pos:span.end]
# Set tail for previous node if necessary
else:
# Get last closing node in this position
_, tail_span = [i for i in span_positions[last_pos] if i[0] == "close"][-1]
tail_span.node.tail = corpus_text[last_pos:span.end]
last_pos = span.end
# Make sure closing node == top stack node
assert span == node_stack[-1], "Overlapping elements found: {}".format(node_stack[-2:])
# Pop stack and move on to next span
node_stack.pop()
# Write xml to file
etree.ElementTree(root_span.node).write(out, encoding="unicode", method="xml", xml_declaration=True)
log.info("Exported: %s", out)
| StarcoderdataPython |
8002802 | <reponame>GearPlug/predictivoAPI-python
class BaseError(Exception):
pass
class BadRequest(BaseError):
pass
class APIKeyMissing(BaseError):
pass
class Forbidden(BaseError):
pass
class ResourceNotFound(BaseError):
pass
class MethodNotAllowed(BaseError):
pass
class ResourceNestingTooDeep(BaseError):
pass
class InvalidMethodOverride(BaseError):
pass
class TooManyRequests(BaseError):
pass
class InternalServerError(BaseError):
pass
class ComplianceRelated(BaseError):
pass
class UnexpectedError(BaseError):
pass
class CredentialRequired(BaseError):
pass
class InvalidToken(BaseError):
pass
| StarcoderdataPython |
9657622 | import os
import pygame
from pygame_gui.elements import UILabel
import pycraft_gui
from pycraft import World
from pycraft_gui.gui_app import GuiApp
from pycraft_gui.constants import PYCRAFT_WORLD_MENU_HOVERED, PYCRAFT_WORLD_MENU_UNHOVERED, \
PYCRAFT_WORLD_MENU_SELECTED
from pycraft_gui.pycraft_world_menu import PycraftWorldMenu
from pycraft_gui.ui_image_tiled import UIImageTiled
print(f'Pycraft GUI Installed at "{pycraft_gui.install_path}"')
print(f' - Data Dir: {pycraft_gui.get_data_dir()}')
# root container - holds scrollbars and "view" container
# view container - tracks size of root container and scroll bars. (changes size when scrollbars come / go?)
# scrollable container - the container that gets scrolled around
#
class PycrafterApp(GuiApp):
def __init__(self, size, framerate=60, ):
self._world_menu = None
self._bottom_dirt = None
self._dirt_label = None
self._top_dirt = None
title = 'Pycraft'
super(PycrafterApp, self).__init__(size, framerate=framerate, title=title)
themes_file = pycraft_gui.get_themes_file_path('pycraft_theme.json')
print(f'themes dir: {themes_file}')
if themes_file:
self.ui_manager.get_theme().load_theme(themes_file)
else:
print(f'WARNING: theme file not found')
def setup(self):
x = 0
y = 0
window_size = self.size
width = window_size[0]
height = 150
# Top Panel:
# tiled image background panel
dirt_image_path = pycraft_gui.get_data_file_path('dark_dirt.jpg')
dirt_surface = pygame.image.load(dirt_image_path)
dirt_rect = pygame.Rect(x, y, width, height)
dirt_anchors = {
'top': 'top',
'left': 'left',
'bottom': 'top',
'right': 'right'
}
self._top_dirt = UIImageTiled(
dirt_rect,
dirt_surface,
self.ui_manager,
object_id='@dirt_background',
anchors=dirt_anchors
)
label_rect = pygame.Rect(dirt_rect)
self._dirt_label = UILabel(
label_rect,
'PyCraft', self.ui_manager,
object_id='@title_label',
anchors=dirt_anchors
)
rect = pygame.Rect(0, -height, width, height)
print(f'size: {self.size}')
print(f'rect: {rect}')
self._bottom_dirt = UIImageTiled(
rect,
dirt_surface,
self.ui_manager,
# object_id='@dirt_background',
anchors={
'top': 'bottom',
'left': 'left',
'bottom': 'bottom',
'right': 'right'
}
)
width = self.size[0] - 150
height = self.size[1] - (self._top_dirt.rect.height + self._bottom_dirt.rect.height)
x = self.size[0] / 2 - width / 2
rr = pygame.Rect(x, 0, width, height)
print(f'rr: {rr}')
self._world_menu = PycraftWorldMenu(
rr, self.ui_manager,
starting_height=5,
anchors={
'top': 'top',
'left': 'left',
'bottom': 'bottom',
'right': 'right',
'top_target': self._top_dirt,
'bottom_target': self._bottom_dirt
}
)
saved_worlds = World.get_saved_worlds()
for world in saved_worlds:
print(f'World: {world["name"]}')
self._world_menu.add_item(
world['icon_path'],
world['name'], world['file_name'],
world['last_played'], world['mode'], world['cheats'], world['version']
)
self._world_menu.fit_scrolling_area_to_items()
def handle_event(self, event):
if event.type == PYCRAFT_WORLD_MENU_HOVERED:
print(f'PYCRAFT_WORLD_MENU_HOVERED: {event.world_data["name"]}')
if event.type == PYCRAFT_WORLD_MENU_UNHOVERED:
print(f'PYCRAFT_WORLD_MENU_UNHOVERED: {event.world_data["name"]}')
if event.type == PYCRAFT_WORLD_MENU_SELECTED:
print(f'PYCRAFT_WORLD_MENU_SELECTED: {event.world_data["name"]}')
app = PycrafterApp((1020, 900))
app.setup()
app.run()
| StarcoderdataPython |
1627020 | from autogoal.contrib.ensemble._stacking import StackingEnsemble | StarcoderdataPython |
12809861 | #!/usr/bin/python
import unittest
from coffee_calc import CoffeeCalc
class TestBrixConverter(unittest.TestCase):
"""
Tests all the functions in our conversion class, which backs the API
responses in our Main app.
"""
def setUp(self):
self.cc = CoffeeCalc()
def test_tds_takes_float_or_int(self):
# make sure get_tds() works with either a float or
# an integer.
i = 11
f = 11.0
tds_i = self.cc.get_tds(i)
tds_f = self.cc.get_tds(f)
self.assertEqual(tds_i, tds_f)
def test_return_tds_is_less_than_brix(self):
# TDS readings should never be more than the brix reading,
# if it's higher, something is broken.
brix = 10
tds = self.cc.get_tds(brix)
self.assertLess(tds, brix)
def test_reduced_ratio_decimal_place(self):
# Make sure we always get tenths place decimal
self.assertEqual(self.cc.get_reduced_ratio(21.5, 324), 15.1)
self.assertEqual(self.cc.get_reduced_ratio(16, 256), 16.0)
def test_get_extraction_yield_decimal_place(self):
self.assertEqual(self.cc.get_extraction_yield(19, 40, 11.049999999999999), 23.26)
| StarcoderdataPython |
9705994 | #!/usr/bin/python
# -*- coding: utf-8 -*-
"""Tests for the Microsoft Office MRUs Windows Registry plugin."""
import unittest
from plaso.formatters import winreg as _ # pylint: disable=unused-import
from plaso.lib import timelib
from plaso.parsers.winreg_plugins import officemru
from tests.parsers.winreg_plugins import test_lib
__author__ = '<NAME> (<EMAIL>)'
class OfficeMRUPluginTest(test_lib.RegistryPluginTestCase):
"""Tests for the Microsoft Office MRUs Windows Registry plugin."""
def setUp(self):
"""Makes preparations before running an individual test."""
self._plugin = officemru.OfficeMRUPlugin()
def testProcess(self):
"""Tests the Process function."""
test_file_entry = self._GetTestFileEntryFromPath([u'NTUSER-WIN7.DAT'])
key_path = (
u'HKEY_CURRENT_USER\\Software\\Microsoft\\Office\\14.0\\Word\\'
u'File MRU')
win_registry = self._GetWinRegistryFromFileEntry(test_file_entry)
registry_key = win_registry.GetKeyByPath(key_path)
event_queue_consumer = self._ParseKeyWithPlugin(
self._plugin, registry_key, file_entry=test_file_entry)
event_objects = self._GetEventObjectsFromQueue(event_queue_consumer)
self.assertEqual(len(event_objects), 6)
event_object = event_objects[0]
self.assertEqual(event_object.pathspec, test_file_entry.path_spec)
# This should just be the plugin name, as we're invoking it directly,
# and not through the parser.
self.assertEqual(event_object.parser, self._plugin.plugin_name)
expected_timestamp = timelib.Timestamp.CopyFromString(
u'2012-03-13 18:27:15.083')
self.assertEqual(event_object.timestamp, expected_timestamp)
regvalue_identifier = u'Item 1'
expected_value = (
u'[F00000000][T01CD0146EA1EADB0][O00000000]*'
u'C:\\Users\\nfury\\Documents\\StarFury\\StarFury\\'
u'SA-23E Mitchell-Hyundyne Starfury.docx')
self._TestRegvalue(event_object, regvalue_identifier, expected_value)
expected_message = u'[{0:s}] {1:s}: {2:s}'.format(
key_path, regvalue_identifier, expected_value)
expected_short_message = u'{0:s}...'.format(expected_message[0:77])
self._TestGetMessageStrings(
event_object, expected_message, expected_short_message)
if __name__ == '__main__':
unittest.main()
| StarcoderdataPython |
239553 | #!/usr/bin/python
#
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from google.cloud import datastore
def get_client():
return datastore.Client(namespace='hrefin')
def delete(model):
ds = get_client()
q = ds.query(kind=model)
for r in q.fetch():
print r.key
ds.delete(r.key)
def clean_db():
ds = get_client()
delete('Msg')
delete('User')
| StarcoderdataPython |
4946753 | class Person:
def __init__(self,Initialage):
if(Initialage < 0):
self.age=0
print("Age is not valid, setting age to 0.")
self.age=Initialage
def yearPasses(self):
self.age+=1
def amIOld(self):
if(self.age<13):
print("You are young")
elif(self.age >=13 and self.age < 18):
print("You are a teenager")
else:
print("You are old")
t = int(input())
inp=[]
for i in range(0,t):
age = int(input())
inp.append(str(age))
for i in inp:
p = Person(int(i))
p.amIOld()
for j in range(0, 3):
p.yearPasses()
p.amIOld()
print(" ")
| StarcoderdataPython |
12835679 | from fastapi import Depends, HTTPException
from starlette.requests import Request
from starlette.responses import RedirectResponse
from starlette.status import HTTP_303_SEE_OTHER, HTTP_404_NOT_FOUND
from examples.models import Config
from fastapi_admin.app import app
from fastapi_admin.depends import get_resources
from fastapi_admin.template import templates
@app.get("/")
async def home(
request: Request,
resources=Depends(get_resources),
):
return templates.TemplateResponse(
"dashboard.html",
context={
"request": request,
"resources": resources,
"resource_label": "Dashboard",
"page_pre_title": "overview",
"page_title": "Dashboard",
},
)
@app.put("/config/action/switch_status/{config_id}")
async def switch_config_status(request: Request, config_id: int):
config = await Config.get_or_none(pk=config_id)
if not config:
raise HTTPException(status_code=HTTP_404_NOT_FOUND)
config.status = not config.status
await config.save(update_fields=["status"])
return RedirectResponse(url=request.headers.get("referer"), status_code=HTTP_303_SEE_OTHER)
| StarcoderdataPython |
3462011 | import numpy
wl = 460
mappings = [[561, 1, 7023] , [488, 2, 12873] , [460, 3, 6633]]
search = wl
for (wav, ch, amp) in mappings:
if wav == wl:
channel = ch
rfamp = amp
else :
pass
print("Channel = ", channel)
print("Amplitude = ", rfamp)
for i in range(0,11 ):
print(i)
| StarcoderdataPython |
5076350 | import requests
import random
import tweepy
from PIL import Image, ImageDraw
from io import BytesIO
from tempfile import mkstemp
import os
FFZ_API = 'https://api.frankerfacez.com/'
FFZ_EMOTE_URL = 'http://www.frankerfacez.com/emoticons/{}'
USER_AGENT = "rainbodl (https://github.com/codl/rainbodl)"
HEADERS = {'User-Agent': USER_AGENT}
BG_COLOURS = ("#f2f2f2", "#19191f")
IMAGE_SIZE = (262, 136)
def get_random_ffz():
r = requests.get(FFZ_API + 'v1/emoticons', headers=HEADERS)
num_pages = r.json()['_pages']
page = random.randint(1, num_pages)
r = requests.get(FFZ_API + 'v1/emoticons', params={'page': page}, headers=HEADERS)
emote = random.choice(r.json()['emoticons'])
max_multiplier = 4 if '4' in emote['urls'] else 2 if '2' in emote['urls'] else 1
url = emote['urls'][str(max_multiplier)]
if url[:2] == '//':
url = "https:" + url
return { 'url': url, 'multiplier': max_multiplier, 'id': emote['id'], 'name': emote['name'] }
def make_pic(ffz):
# set up background
im = Image.new("RGBA", (IMAGE_SIZE[0] * ffz['multiplier'], IMAGE_SIZE[1] * ffz['multiplier']), BG_COLOURS[0])
draw = ImageDraw.Draw(im)
draw.rectangle([im.size[0]//2, 0, im.size[0], im.size[1]], fill=BG_COLOURS[1])
del draw
# get the emote
req = requests.get(ffz["url"], headers=HEADERS);
emote = Image.open(BytesIO(req.content))
if emote.mode != "RGBA":
emote = emote.convert("RGBA")
# paste it on
try:
im.paste(emote, ( im.size[0]//4 - emote.size[0]//2, im.size[1]//2 - emote.size[1]//2 ), emote)
im.paste(emote, ( 3 * im.size[0]//4 - emote.size[0]//2, im.size[1]//2 - emote.size[1]//2 ), emote)
except ValueError as e:
print("fucked up if true!!!")
print(ffz)
raise e
return im
def tweet(api):
ffz = get_random_ffz()
im = make_pic(ffz)
descriptor, filename = mkstemp(suffix='.png')
f = open(descriptor, 'w+b')
im.save(f, 'PNG')
url = FFZ_EMOTE_URL.format(ffz['id'])
name = ffz['name']
try:
media = api.media_upload(filename);
api.update_status(media_ids=[media.media_id,], status="{} {}".format(name, url));
except tweepy.TweepError as e:
print(e.with_traceback())
exit(1)
f.close()
os.unlink(filename)
| StarcoderdataPython |
4937960 | <filename>catcoin/xtn.py
from __future__ import print_function
import os, sys, time, toolz, traceback as tb
from catcoin.api import *
PID_FNAME = 'xtn.pid'
def init(): connect(); xconnect()
def cb(msg):
xid = scrape_xid(msg)
if db_get(xid):
print("Transaction already exists, discarding . . .")
return
print("New Transaction. verify it.")
verify(*msg.split('\n', 1))
print("Saving new Transaction", xid[:60]+'...')
uxid = '^u:'+xid[1:]
saved_it = db_put(xid, msg)
saved_it = db_put(uxid, '1')
publish("xaction", msg)
xpublish("xaction", msg)
pass
def main(): client_loop(cb, 'xtn')
if __name__ == "__main__":
try: toolz.kill(PID_FNAME)
except: pass
if sys.argv[1:] and sys.argv[1]=='-k': exit()
elif sys.argv[1:] and sys.argv[1]=='-f': init(), main()
else: init(), toolz.daemon(main, PID_FNAME)
| StarcoderdataPython |
12863757 | ###############################################################################
# Copyright 2019 <NAME>.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
###############################################################################
from . import vcp
import sys
from typing import Type, List, Union, Iterable
class Monitor:
"""
A physical monitor attached to a Virtual Control Panel (VCP).
Generated with :py:meth:`get_monitors()` or
:py:meth:`iterate_monitors()`.
Args:
vcp: virtual control panel for the monitor
"""
#: Power modes and their integer values.
POWER_MODES = {
"on": 0x01,
"standby": 0x02,
"suspend": 0x03,
"off_soft": 0x04,
"off_hard": 0x05,
}
def __init__(self, vcp: Type[vcp.VCP]):
self.vcp = vcp
self.code_maximum = {}
def __enter__(self):
self.open()
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self.close()
def open(self):
"""
Opens the connection to the VCP.
Raises:
VCPError: failed to open VCP
"""
self.vcp.open()
def close(self):
"""
Closes the connection to the VCP.
Raises:
VCPError: failed to close VCP
"""
self.vcp.close()
def _get_code_maximum(self, code: Type[vcp.VCPCode]) -> int:
"""
Gets the maximum values for a given code, and caches in the
class dictionary if not already found.
Args:
code: feature code definition class
Returns:
maximum value for the given code
Raises:
TypeError: code is write only
"""
if not code.readable:
raise TypeError(f"code is not readable: {code.name}")
if code.value in self.code_maximum:
return self.code_maximum[code.value]
else:
_, maximum = self.vcp.get_vcp_feature(code.value)
self.code_maximum[code.value] = maximum
return maximum
def _set_vcp_feature(self, code: Type[vcp.VCPCode], value: int):
"""
Sets the value of a feature on the virtual control panel.
Args:
code: feature code definition class
value: feature value
Raises:
TypeError: code is ready only
ValueError: value is greater than the maximum allowable
VCPError: failed to get VCP feature
"""
if code.type == "ro":
raise TypeError(f"cannot write read-only code: {code.name}")
elif code.type == "rw":
maximum = self._get_code_maximum(code)
if value > maximum:
raise ValueError(
f"value of {value} exceeds code maximum of {maximum}"
)
self.vcp.set_vcp_feature(code.value, value)
def _get_vcp_feature(self, code: Type[vcp.VCPCode]) -> int:
"""
Gets the value of a feature from the virtual control panel.
Args:
code: feature code definition class
Returns:
current feature value
Raises:
TypeError: code is write only
VCPError: failed to get VCP feature
"""
if code.type == "wo":
raise TypeError(f"cannot read write-only code: {code.name}")
current, maximum = self.vcp.get_vcp_feature(code.value)
return current
@property
def luminance(self) -> int:
"""
Gets the monitors back-light luminance.
Returns:
current luminance value
Raises:
VCPError: failed to get luminance from the VCP
"""
code = vcp.get_vcp_code_definition("image_luminance")
return self._get_vcp_feature(code)
@luminance.setter
def luminance(self, value: int):
"""
Sets the monitors back-light luminance.
Args:
value: new luminance value (typically 0-100)
Raises:##### have not implemented or checked
ValueError: luminance outside of valid range
VCPError: failed to set luminance in the VCP
"""
code = vcp.get_vcp_code_definition("image_luminance")
self._set_vcp_feature(code, value)
@property
def contrast(self) -> int:
"""
Gets the monitors contrast.
Returns:
current contrast value
Raises:
VCPError: failed to get contrast from the VCP
"""
code = vcp.get_vcp_code_definition("image_contrast")
return self._get_vcp_feature(code)
@contrast.setter
def contrast(self, value: int):
"""
Sets the monitors back-light contrast.
Args:
value: new contrast value (typically 0-100)
Raises:
ValueError: contrast outside of valid range
VCPError: failed to set contrast in the VCP
"""
code = vcp.get_vcp_code_definition("image_contrast")
self._set_vcp_feature(code, value)
@property
def power_mode(self) -> int:
"""
The monitor power mode.
When used as a getter this returns the integer value of the
monitor power mode.
When used as a setter an integer value or a power mode
string from :py:attr:`Monitor.POWER_MODES` may be used.
Raises:
VCPError: failed to get or set the power mode
ValueError: set power state outside of valid range
KeyError: set power mode string is invalid
"""
code = vcp.get_vcp_code_definition("display_power_mode")
return self._get_vcp_feature(code)
@power_mode.setter
def power_mode(self, value: Union[int, str]):
if isinstance(value, str):
mode_value = Monitor.POWER_MODES[value]
elif isinstance(value, int):
mode_value = value
else:
raise TypeError("unsupported mode type: " + repr(type(value)))
if mode_value not in Monitor.POWER_MODES.values():
raise ValueError(f"cannot set reserved mode value: {mode_value}")
code = vcp.get_vcp_code_definition("display_power_mode")
self._set_vcp_feature(code, mode_value)
def get_vcps() -> List[Type[vcp.VCP]]:
"""
Discovers virtual control panels.
This function should not be used directly in most cases, use
:py:meth:`get_monitors()` or :py:meth:`iterate_monitors()` to
get monitors with VCPs.
Returns:
List of VCPs in a closed state.
Raises:
NotImplementedError: not implemented for your operating system
VCPError: failed to list VCPs
"""
if sys.platform == "win32" or sys.platform.startswith("linux"):
return vcp.get_vcps()
else:
raise NotImplementedError(f"not implemented for {sys.platform}")
def get_monitors() -> List[Monitor]:
"""
Creates a list of all monitors.
Returns:
List of monitors in a closed state.
Raises:
NotImplementedError: not implemented for your operating system
VCPError: failed to list VCPs
Example:
Setting the power mode of all monitors to standby::
for monitor in get_monitors():
try:
monitor.open()
# put monitor in standby mode
monitor.power_mode = "standby"
except VCPError:
print("uh-oh")
raise
finally:
monitor.close()
Setting all monitors to the maximum brightness using the
context manager::
for monitor in get_monitors():
with monitor as m:
# set back-light luminance to 100%
m.luminance = 100
"""
return [Monitor(v) for v in get_vcps()]
def iterate_monitors() -> Iterable[Monitor]:
"""
Iterates through all monitors, opening and closing the VCP for
each monitor.
Yields:
Monitor in an open state.
Raises:
NotImplementedError: not implemented for this platform
VCPError: failed to list VCPs
Example:
Setting all monitors to the maximum brightness::
for monitor in iterate_monitors():
monitor.luminance = 100
"""
for v in get_vcps():
monitor = Monitor(v)
with monitor:
yield monitor
| StarcoderdataPython |
9615789 | from typing import (
Callable, Union, List, Optional,
Dict, Any, Type, Mapping
)
from functools import wraps
from collections.abc import Mapping as ABCMapping
from flask import Response, jsonify, current_app
from webargs.flaskparser import FlaskParser as BaseFlaskParser
from marshmallow import ValidationError as MarshmallowValidationError
from .exceptions import ValidationError
from .utils import _sentinel
from .schemas import Schema, EmptySchema
from .security import HTTPBasicAuth, HTTPTokenAuth
from .types import (
DecoratedType, ResponseType, RequestType
)
class FlaskParser(BaseFlaskParser):
"""Overwrite the default `webargs.FlaskParser.handle_error` to
change the default status code and the error description.
"""
def handle_error( # type: ignore
self,
error: MarshmallowValidationError,
req: RequestType,
schema: Schema,
*,
error_status_code: int,
error_headers: Mapping[str, str]
) -> None:
raise ValidationError(
error_status_code or current_app.config['VALIDATION_ERROR_STATUS_CODE'],
current_app.config['VALIDATION_ERROR_DESCRIPTION'],
error.messages,
error_headers
)
parser: FlaskParser = FlaskParser()
use_args: Callable = parser.use_args
def _annotate(f: Any, **kwargs: Any) -> None:
if not hasattr(f, '_spec'):
f._spec = {}
for key, value in kwargs.items():
f._spec[key] = value
def auth_required(
auth: Union[Type[HTTPBasicAuth], Type[HTTPTokenAuth]],
role: Optional[Union[list, str]] = None,
optional: Optional[str] = None
) -> Callable[[DecoratedType], DecoratedType]:
"""Protect a view with provided authentication settings.
> Be sure to put it under the routes decorators (i.e. `app.route`, `app.get`,
`app.post`, etc.).
Examples:
```python
from apiflask import APIFlask, HTTPTokenAuth, auth_required
app = APIFlask(__name__)
auth = HTTPTokenAuth()
@app.get('/')
@auth_required(auth)
def hello():
return 'Hello'!
```
Arguments:
auth: The `auth` object, an instance of [`HTTPBasicAuth`][apiflask.security.HTTPBasicAuth]
or [`HTTPTokenAuth`][apiflask.security.HTTPTokenAuth].
role: The selected role to allow to visit this view, accepts a string or a list.
See [Flask-HTTPAuth's documentation](role) for more details.
optional: To allow the view to execute even the authentication information
is not included with the request, in which case `auth.current_user` will be `None`.
[role]: https://flask-httpauth.readthedocs.io/en/latest/#user-roles
"""
roles = role
if not isinstance(role, list): # pragma: no cover
roles = [role] if role is not None else []
def decorator(f):
_annotate(f, auth=auth, roles=roles)
return auth.login_required(role=role, optional=optional)(f)
return decorator
def _generate_schema_from_mapping(schema, schema_name):
if schema_name is None:
schema_name = 'GeneratedSchema'
return Schema.from_dict(schema, name=schema_name)()
def input(
schema: Schema,
location: str = 'json',
schema_name: Optional[str] = None,
example: Optional[Any] = None,
**kwargs: Any
) -> Callable[[DecoratedType], DecoratedType]:
"""Add input settings for view functions.
> Be sure to put it under the routes decorators (i.e. `app.route`, `app.get`,
`app.post`, etc.).
If the validation passed, the data will be inject to view
function as a positional argument in the form of `dict`. Otherwise,
an error response with the detail of validation result will be returned.
Examples:
```python
from apiflask import APIFlask, input
app = APIFlask(__name__)
@app.get('/')
@input(PetInSchema)
def hello(parsed_and_validated_input_data):
print(parsed_and_validated_input_data)
return 'Hello'!
```
Arguments:
schema: The Marshmallow schema used to validate the input data.
location: The location of the input data, one of `'json'` (default),
`'files'`, `'form'`, `'cookies'`, `'headers'`, `'query'`
(same as `'querystring'`).
schema_name: The schema name for dict schema, only needed when you pass
a `dict` schema (e.g. `{'name': String(required=True)}`) for `json`
location.
example: The example data for request body.
"""
if isinstance(schema, ABCMapping):
schema = _generate_schema_from_mapping(schema, schema_name)
if isinstance(schema, type): # pragma: no cover
schema = schema()
def decorator(f):
if location not in [
'json', 'query', 'headers', 'cookies', 'files', 'form', 'querystring'
]:
raise RuntimeError(
'Unknown input location. The supported locations are: "json", "files",'
' "form", "cookies", "headers", "query" (same as "querystring").'
f' Got "{location}" instead.'
)
if location == 'json':
_annotate(f, body=schema, body_example=example)
else:
if not hasattr(f, '_spec') or f._spec.get('args') is None:
_annotate(f, args=[])
# Todo: Support set example for request parameters
f._spec['args'].append((schema, location))
return use_args(schema, location=location, **kwargs)(f)
return decorator
def output(
schema: Schema,
status_code: int = 200,
description: Optional[str] = None,
schema_name: Optional[str] = None,
example: Optional[Any] = None
) -> Callable[[DecoratedType], DecoratedType]:
"""Add output settings for view functions.
> Be sure to put it under the routes decorators (i.e. `app.route`, `app.get`,
`app.post`, etc.).
The decorator will formatting the return value of your view
function with provided Marshmallow schema. You can just return a
dict or an object (such as a Model instance of ORMs). APIFlask will
handle the formatting and turn your return value into a JSON response.
P.S. The output data will not be validated, it's a design choice of Marshmallow.
This output validation may be supported in Marshmallow 4.0.
Examples:
```python
from apiflask import APIFlask, output
app = APIFlask(__name__)
@app.get('/')
@output(PetOutSchema)
def hello():
return the_dict_or_object_match_petout_schema
```
Arguments:
schema: The schemas of the output data.
status_code: The status code of the response, defaults to `200`.
description: The description of the response.
schema_name: The schema name for dict schema, only needed when you pass
a `dict` schema (e.g. `{'name': String()}`).
example: The example data for response.
"""
if isinstance(schema, ABCMapping) and schema != {}:
schema = _generate_schema_from_mapping(schema, schema_name)
if isinstance(schema, type): # pragma: no cover
schema = schema()
if isinstance(schema, EmptySchema):
status_code = 204
def decorator(f):
_annotate(f, response={
'schema': schema,
'status_code': status_code,
'description': description,
'example': example
})
def _jsonify(obj, many=_sentinel, *args, **kwargs): # pragma: no cover
"""From Flask-Marshmallow, see NOTICE file for license informaiton."""
if many is _sentinel:
many = schema.many
data = schema.dump(obj, many=many)
return jsonify(data, *args, **kwargs)
@wraps(f)
def _response(*args: Any, **kwargs: Any) -> ResponseType:
rv = f(*args, **kwargs)
if isinstance(rv, Response): # pragma: no cover
raise RuntimeError(
'The @output decorator cannot handle Response objects.')
if isinstance(rv, tuple):
json = _jsonify(rv[0])
if len(rv) == 2:
if not isinstance(rv[1], int):
rv = (json, status_code, rv[1])
else:
rv = (json, rv[1])
elif len(rv) >= 3:
rv = (json, rv[1], rv[2])
else:
rv = (json, status_code)
return rv
else:
return _jsonify(rv), status_code
return _response
return decorator
def doc(
summary: Optional[str] = None,
description: Optional[str] = None,
tag: Optional[Union[List[str], List[Dict[str, Any]]]] = None,
responses: Optional[Union[List[int], Dict[int, str]]] = None,
deprecated: Optional[bool] = False,
hide: Optional[bool] = False
) -> Callable[[DecoratedType], DecoratedType]:
"""Set up the OpenAPI Spec for view functions.
> Be sure to put it under the routes decorators (i.e. `app.route`, `app.get`,
`app.post`, etc.).
Examples:
```python
from apiflask import APIFlask, doc
app = APIFlask(__name__)
@app.get('/')
@doc(summary='Say hello', tag='Foo')
def hello():
return 'Hello'
```
Arguments:
summary: The summary of this endpoint. If not set, the name of the view function
will be used. If your view function named with `get_pet`, then the summary
will be "Get Pet". If the view function has docstring, then the first line of
the docstring will be used. The precedence will be:
```
@doc(summary='blah') > the first line of docstring > the view function name
```
description: The description of this endpoint. If not set, the lines after the empty
line of the docstring will be used.
tag: The tag or tag list of this endpoint, map the tags you passed in the `app.tags`
attribute. You can pass a list of tag names or just a single tag name string.
If `app.tags` not set, the blueprint name will be used as tag name.
responses: The other responses for this view function, accepts a dict in a format
of `{404: 'Not Found'}` or a list of status code (`[404, 418]`).
deprecated: Flag this endpoint as deprecated in API docs. Defaults to `False`.
hide: Hide this endpoint in API docs. Defaults to `False`.
*Version changed: 0.3.0*
- Change the default value of `deprecated` from `None` to `False`.
- Rename argument `tags` to `tag`.
*Version added: 0.2.0*
"""
def decorator(f):
_annotate(
f,
summary=summary,
description=description,
tag=tag,
responses=responses,
deprecated=deprecated,
hide=hide
)
return f
return decorator
| StarcoderdataPython |
6607783 | <gh_stars>0
# -*- coding: utf-8 -*-
"""
Created on Mon Aug 30 11:22:14 2021
@author: <NAME>
"""
def skriv_firkant(hoyde=5, bredde=5, tegn="*"):
for j in range(hoyde):
for i in range(bredde):
print(tegn, end="")
print()
hoyde = int(input("Høyde: "))
bredde = int(input("Bredde: "))
skriv_firkant(2, 4)
print("\n ny firkant \n")
skriv_firkant(hoyde, bredde, "#")
print("Ferdig")
| StarcoderdataPython |
11354030 | <filename>custom_api/core/serializers.py
from django.db import models
from django.db.models import fields
from rest_framework import serializers
from .models import Post
class PostSerializer(serializers.ModelSerializer):
class Meta:
model = Post
fields = {
'title','description'
} | StarcoderdataPython |
1864064 | test = { 'name': 'q2_1',
'points': 1,
'suites': [ { 'cases': [ {'code': '>>> prof_names.num_columns == 2\nTrue', 'hidden': False, 'locked': False},
{'code': '>>> prof_names.num_rows == 71\nTrue', 'hidden': False, 'locked': False},
{ 'code': '>>> # Make sure that you have the correct column labels!;\n>>> np.asarray(prof_names.labels).item(1) != "name identity"\nTrue',
'hidden': False,
'locked': False},
{ 'code': '>>> # Make sure that you have the correct column labels!;\n>>> np.asarray(prof_names.labels).item(0) != "faculty"\nTrue',
'hidden': False,
'locked': False}],
'scored': True,
'setup': '',
'teardown': '',
'type': 'doctest'}]}
| StarcoderdataPython |
6563923 | import threading
from pathlib import Path
from alembic import command
from alembic.config import Config
from alembic.util.exc import CommandError
from sqlalchemy import create_engine
from sqlalchemy import event
from sqlalchemy.engine import Engine
from sqlalchemy.exc import OperationalError
from sqlalchemy.orm import sessionmaker
from sqlalchemy.orm.session import Session
from .models import Base
class DBVersionMismatch(Exception):
pass
def configure(db_path: str):
global _engine
global _Session
_engine = create_engine(f'sqlite:///{db_path}')
_Session = sessionmaker(bind=_engine)
def get_engine():
return _engine
def get_session() -> Session:
if not hasattr(_session_store, 'session'):
_session_store.session = _Session()
return _session_store.session
def init():
alembic_cfg = Config()
alembic_cfg.set_main_option(
'script_location',
str(Path(__file__).parent / 'migrations'),
)
try:
Base.metadata.create_all(_engine, checkfirst=False)
except OperationalError:
# DB was initialized before. Applying migrations.
try:
command.upgrade(alembic_cfg, 'head')
except CommandError as exc:
db_path = str(get_engine().url).replace('sqlite:///', '')
if str(exc).startswith("Can't locate revision identified by"):
raise DBVersionMismatch(
'Looks like DB version is newer than version of the app. '
'Please update the app or reset the DB by removing the '
f'DB file ({db_path}) or pointing the app to an another DB '
'file path.'
) from exc
raise
else:
# We have a fresh DB. Stamping with the latest version.
command.stamp(alembic_cfg, 'head')
# If some migrations dropped tables which must be recreated
Base.metadata.create_all(_engine)
def update_model(model: Base, data: dict, exclude_fields=None):
for name, value in data.items():
exclude = exclude_fields and name in exclude_fields
if not exclude and name in model.__table__.columns:
setattr(model, name, value)
@event.listens_for(Engine, 'connect')
def set_sqlite_pragma(dbapi_connection, connection_record):
cursor = dbapi_connection.cursor()
cursor.execute('PRAGMA foreign_keys=ON')
cursor.close()
_engine = None
_Session = None
_session_store = threading.local()
| StarcoderdataPython |
6662119 | <filename>feature_gen/vgg.py
'''
Modified from https://github.com/pytorch/vision.git
'''
import math
import torch
import torch.nn as nn
import torch.nn.init as init
import torch.nn.functional as F
__all__ = ['VGG', 'vgg', 'vgg_s', 'fc', 'resnet']
IN_CHANNELS = 3
CLASSES = 10
class VGG(nn.Module):
'''
VGG model
'''
def __init__(self, features, linear_in=512, dropout=0.0, custom_weight_init=None, classes=CLASSES):
super(VGG, self).__init__()
self.features = features
print('dropout = {}'.format(dropout))
self.classifier = nn.Sequential(
nn.Linear(linear_in, 512),
nn.ReLU(True),
nn.Linear(512, 512),
nn.ReLU(True),
nn.Linear(512, classes),
)
# if custom_weight_init == 'orthogonal':
# for m in self.modules():
# print(m)
# if isinstance(m, nn.Conv2d):
# n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels
# m.weight.data.normal_(0, math.sqrt(2. / n))
# m.bias.data.zero_()
# elif custom_weight_init == 'original':
# # Initialize weights
for m in self.modules():
#print(m)
if isinstance(m, nn.Conv2d):
n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels
m.weight.data.normal_(0, math.sqrt(2. / n))
m.bias.data.zero_()
def forward(self, x):
x = self.features(x)
x = x.view(x.size(0), -1)
x = self.classifier(x)
return x
class VGG_s(nn.Module):
'''
VGG model with shallow classifier
'''
def __init__(self, features, linear_in=16384, dropout=0.0, custom_weight_init=False, classes=CLASSES):
super(VGG_s, self).__init__()
self.features = features
print('dropout = {}'.format(dropout))
self.classifier = nn.Sequential(
nn.Linear(linear_in, classes), # 65536 is number of inputs
)
#if custom_weight_init:
# Initialize weights
for m in self.modules():
#print(m)
if isinstance(m, nn.Conv2d):
n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels
m.weight.data.normal_(0, math.sqrt(2. / n))
m.bias.data.zero_()
def forward(self, x):
x = self.features(x)
x = x.view(x.size(0), -1)
x = self.classifier(x)
return x
class FC(nn.Module):
'''
fully connected model
'''
def __init__(self, features, linear_in=256, dropout=0.0, classes=CLASSES):
super(FC, self).__init__()
self.features = features
print('dropout = {}'.format(dropout))
self.classifier = nn.Sequential(
nn.Linear(linear_in, classes),
)
def forward(self, x):
x = x.view(-1, 28*28)
x = self.features(x)
x = self.classifier(x)
return x
## resnet construction
class BasicBlock(nn.Module):
expansion = 1
def __init__(self, in_planes, planes, stride=1):
super(BasicBlock, self).__init__()
self.conv = nn.Sequential(
nn.Conv2d(in_planes, planes, kernel_size=3, stride=stride, padding=1, bias=False),
nn.ReLU(inplace=True),
nn.Conv2d(planes, planes, kernel_size=3, stride=1, padding=1, bias=False)
)
self.shortcut = nn.Sequential()
self.ReLU = nn.ReLU(inplace=True)
if stride != 1 or in_planes != self.expansion*planes:
self.shortcut = nn.Sequential(
nn.Conv2d(in_planes, self.expansion*planes, kernel_size=1, stride=stride, bias=False),
#nn.BatchNorm2d(self.expansion*planes)
)
def forward(self, x):
out = self.conv(x)
out += self.shortcut(x)
out = self.ReLU(out)
return out
class Bottleneck(nn.Module):
expansion = 4
def __init__(self, in_planes, planes, stride=1):
super(Bottleneck, self).__init__()
self.conv = nn.Sequential(
nn.Conv2d(in_planes, planes, kernel_size=1, bias=False),
nn.ReLU(inplace=True),
nn.Conv2d(planes, planes, kernel_size=3, stride=stride, padding=1, bias=False),
nn.ReLU(inplace=True),
nn.Conv2d(planes, self.expansion*planes, kernel_size=1, bias=False),
)
self.shortcut = nn.Sequential()
self.ReLU = nn.ReLU(inplace=True)
if stride != 1 or in_planes != self.expansion*planes:
self.shortcut = nn.Sequential(
nn.Conv2d(in_planes, self.expansion*planes, kernel_size=1, stride=stride, bias=False),
#nn.BatchNorm2d(self.expansion*planes)
)
def forward(self, x):
out = self.conv(x)
out += self.shortcut(x)
out = self.ReLU(out)
return out
class ResNet(nn.Module):
def __init__(self, block, num_blocks, num_classes=10):
super(ResNet, self).__init__()
self.in_planes = 64
self.conv1 = nn.Sequential(
nn.Conv2d(3, 64, kernel_size=3, stride=1, padding=1, bias=False),
nn.ReLU(inplace=True)
)
#self.bn1 = nn.BatchNorm2d(64)
self.layer1 = self._make_layer(block, 64, num_blocks[0], stride=1)
self.layer2 = self._make_layer(block, 128, num_blocks[1], stride=2)
self.layer3 = self._make_layer(block, 256, num_blocks[2], stride=2)
self.layer4 = self._make_layer(block, 512, num_blocks[3], stride=2)
self.avgpool = nn.AvgPool2d(4)
self.linear = nn.Linear(512*block.expansion, num_classes)
def _make_layer(self, block, planes, num_blocks, stride):
strides = [stride] + [1]*(num_blocks-1)
layers = []
for stride in strides:
layers.append(block(self.in_planes, planes, stride))
self.in_planes = planes * block.expansion
return nn.Sequential(*layers)
def forward(self, x):
#out = F.relu(self.bn1(self.conv1(x)))
out = self.conv1(x)
out = self.layer1(out)
out = self.layer2(out)
out = self.layer3(out)
out = self.layer4(out)
out = self.avgpool(out)
out = out.view(out.size(0), -1)
out = self.linear(out)
return out
#class BasicBlock(nn.Module):
# expansion = 1
#
# def __init__(self, in_planes, planes, stride=1):
# super(BasicBlock, self).__init__()
# self.conv1 = nn.Conv2d(in_planes, planes, kernel_size=3, stride=stride, padding=1, bias=False)
# #self.bn1 = nn.BatchNorm2d(planes)
# self.conv2 = nn.Conv2d(planes, planes, kernel_size=3, stride=1, padding=1, bias=False)
# #self.bn2 = nn.BatchNorm2d(planes)
#
# self.shortcut = nn.Sequential()
# if stride != 1 or in_planes != self.expansion*planes:
# self.shortcut = nn.Sequential(
# nn.Conv2d(in_planes, self.expansion*planes, kernel_size=1, stride=stride, bias=False),
# #nn.BatchNorm2d(self.expansion*planes)
# )
#
# def forward(self, x):
# #out = F.relu(self.bn1(self.conv1(x)))
# out = F.relu(self.conv1(x))
# #out = self.bn2(self.conv2(out))
# out = self.conv2(out)
# out += self.shortcut(x)
# out = F.relu(out)
# return out
#
#
#class Bottleneck(nn.Module):
# expansion = 4
#
# def __init__(self, in_planes, planes, stride=1):
# super(Bottleneck, self).__init__()
# self.conv1 = nn.Conv2d(in_planes, planes, kernel_size=1, bias=False)
# #self.bn1 = nn.BatchNorm2d(planes)
# self.conv2 = nn.Conv2d(planes, planes, kernel_size=3, stride=stride, padding=1, bias=False)
# #self.bn2 = nn.BatchNorm2d(planes)
# self.conv3 = nn.Conv2d(planes, self.expansion*planes, kernel_size=1, bias=False)
# #self.bn3 = nn.BatchNorm2d(self.expansion*planes)
#
# self.shortcut = nn.Sequential()
# if stride != 1 or in_planes != self.expansion*planes:
# self.shortcut = nn.Sequential(
# nn.Conv2d(in_planes, self.expansion*planes, kernel_size=1, stride=stride, bias=False),
# #nn.BatchNorm2d(self.expansion*planes)
# )
#
# def forward(self, x):
# #out = F.relu(self.bn1(self.conv1(x)))
# out = F.relu(self.conv1(x))
# #out = F.relu(self.bn2(self.conv2(out)))
# out = F.relu(self.conv2(out))
# #out = self.bn3(self.conv3(out))
# out = self.conv3(out)
# out += self.shortcut(x)
# out = F.relu(out)
# return out
#
#
#class ResNet(nn.Module):
# def __init__(self, block, num_blocks, num_classes=10):
# super(ResNet, self).__init__()
# self.in_planes = 64
#
# self.conv1 = nn.Conv2d(3, 64, kernel_size=3, stride=1, padding=1, bias=False)
# #self.bn1 = nn.BatchNorm2d(64)
# self.layer1 = self._make_layer(block, 64, num_blocks[0], stride=1)
# self.layer2 = self._make_layer(block, 128, num_blocks[1], stride=2)
# self.layer3 = self._make_layer(block, 256, num_blocks[2], stride=2)
# self.layer4 = self._make_layer(block, 512, num_blocks[3], stride=2)
# self.linear = nn.Linear(512*block.expansion, num_classes)
#
# def _make_layer(self, block, planes, num_blocks, stride):
# strides = [stride] + [1]*(num_blocks-1)
# layers = []
# for stride in strides:
# layers.append(block(self.in_planes, planes, stride))
# self.in_planes = planes * block.expansion
# return nn.Sequential(*layers)
#
# def forward(self, x):
# #out = F.relu(self.bn1(self.conv1(x)))
# out = F.relu(self.conv1(x))
# out = self.layer1(out)
# out = self.layer2(out)
# out = self.layer3(out)
# out = self.layer4(out)
# out = F.avg_pool2d(out, 4)
# out = out.view(out.size(0), -1)
# out = self.linear(out)
# return out
def make_layers(cfg, arch='vgg', batchnorm=False):
layers = []
in_channels = IN_CHANNELS
if arch == 'vgg':
for v in cfg:
if v == 'M':
layers += [nn.MaxPool2d(kernel_size=2, stride=2)]
else:
conv2d = nn.Conv2d(in_channels, v, kernel_size=3, padding=1)
if batchnorm:
layers += [conv2d, nn.BatchNorm2d(v), nn.ReLU(inplace=True)]
else:
layers += [conv2d, nn.ReLU(inplace=True)]
in_channels = v
elif arch == 'fc':
input_size = 784 ## yucky hard coded for MNIST
for v in cfg:
linear = nn.Linear(input_size, v)
layers += [linear, nn.ReLU(inplace=True)]
input_size = v
return nn.Sequential(*layers)
cfg = {
'vgg1' : [64, 'M'],
'vgg2' : [64, 64, 'M'],
'vgg3' : [64, 64, 'M', 128, 'M'],
'vgg4' : [64, 64, 'M', 128, 128, 'M'],
'vgg5' : [64, 64, 'M', 128, 128, 'M', 256, 'M'],
'vgg6' : [64, 64, 'M', 128, 128, 'M', 256, 256, 'M'],
'vgg7' : [64, 64, 'M', 128, 128, 'M', 256, 256, 'M', 512, 'M'],
'vgg8' : [64, 64, 'M', 128, 128, 'M', 256, 256, 'M', 512, 512, 'M'],
'vgg9' : [64, 64, 'M', 128, 128, 'M', 256, 256, 'M', 512, 512, 'M', 512, 'M'],
'vgg10': [64, 64, 'M', 128, 128, 'M', 256, 256, 'M', 512, 512, 'M', 512, 512, 'M'],
'vgg11': [64, 64, 'M', 128, 128, 'M', 256, 256, 256, 'M', 512, 512, 'M', 512, 512, 'M'],
'vgg12': [64, 64, 'M', 128, 128, 'M', 256, 256, 256, 'M', 512, 512, 512, 'M', 512, 512, 'M'],
'vgg13': [64, 64, 'M', 128, 128, 'M', 256, 256, 256, 'M', 512, 512, 512, 'M', 512, 512, 512, 'M'],
'vggm2': [64, 'M', 128, 'M'],
'vggm3': [64, 'M', 128, 'M', 256, 'M'],
'vggm4': [64, 'M', 128, 'M', 256, 'M', 512, 'M'],
'vggm5': [64, 'M', 128, 'M', 256, 'M', 512, 'M', 512, 'M'],
'vgg11': [64, 'M', 128, 'M', 256, 256, 'M', 512, 512, 'M', 512, 512, 'M'],
'vgg16': [64, 64, 'M', 128, 128, 'M', 256, 256, 256, 'M', 512, 512, 512, 'M', 512, 512, 512, 'M'],
'vgg19': [64, 64, 'M', 128, 128, 'M', 256, 256, 256, 256, 'M', 512, 512, 512, 512, 'M', 512, 512, 512, 512, 'M'],
'stuck': [16, 'M', 32, 'M', 32, 'M'],
'fc5': [256, 256, 256, 256, 256],
'fc7w': [512, 512, 512, 512, 512],
'resnet18': [2,2,2,2],
'resnet34': [3,4,6,3],
'resnet50': [3,4,6,3]
}
in_num = {
'vgg1' : 16384,
'vgg2' : 16384,
'vgg3' : 8192,
'vgg4' : 8192,
'vgg5' : 4096,
'vgg6' : 4096,
'vgg7' : 2048,
'vgg8' : 2048,
'vgg9' : 512,
'vgg10': 512,
'vgg11': 512,
'vgg12': 512,
'vgg13': 512,
'vggm2': 8192,
'vggm3': 4096,
'vggm4': 2048,
'vggm5': 512,
'vgg11': 512,
'vgg16': 512,
'vgg19': 512,
'stuck': 512,
'fc5': 256,
'fc7w': 512
}
def vgg(model, classes=CLASSES, batchnorm=False, dropout=0.0):
layers = make_layers(cfg[model], batchnorm=batchnorm)
return VGG(layers, linear_in=in_num[model], dropout=dropout, classes=classes)
def vgg_s(model, classes=CLASSES, batchnorm=False, dropout=0.0):
return VGG_s(make_layers(cfg[model], batchnorm=batchnorm), linear_in=in_num[model], dropout=dropout, classes=classes)
def fc(model, classes=CLASSES, batchnorm=False, dropout=0.0):
return FC(make_layers(cfg[model], arch='fc', batchnorm=batchnorm), linear_in=in_num[model], dropout=dropout, classes=classes)
def resnet(model, classes=CLASSES, batchnorm=False, dropout=0.0):
if '50' in model:
return ResNet(Bottleneck, cfg[model], num_classes=classes)
else:
return ResNet(BasicBlock, cfg[model], num_classes=classes)
| StarcoderdataPython |
102109 |
"""
funcio def aplicar(valor1, valor2, operacio)
ef operacio ():
resultat = input( ' Quina operació vols fer?? (multiplicar sumar o restar) \n' )
return resultat
return varo1 operacio valor2
"""
#Definició
def operacionvalores (valor1,valor2,operacion) :
if operacion == "sumar":
resultat = valor1+valor2
elif operacion == "restar":
resultat = valor1-valor2
elif operacion == "multiplicar":
resultat = valor1 * valor2
return resultat
#Execució
if __name__ == "__main__":
resultat = operacionvalores (1,2, "multiplicar")
print(resultat) | StarcoderdataPython |
6495669 | from werkzeug.security import generate_password_hash, check_password_hash
from flask_login import UserMixin
from app import db
class Account(UserMixin, db.Model):
id = db.Column(db.Integer, primary_key=True)
username = db.Column(db.String(10), unique=True, nullable=False)
email = db.Column(db.String(30))
password_hash = db.Column(db.String(128))
stocks = db.Column(db.String(32))
def __init__(self, username, email, password_hash, stocks):
self.username = username
self.email = email
self.password_hash = password_hash
self.stocks = stocks
def set_password(self, password):
self.password_hash = generate_password_hash(password)
def check_password(self, password):
return check_password_hash(self.password_hash, password)
def __repr__(self):
return '<Account info: {}>'.format(self.username) | StarcoderdataPython |
3222921 | import voluptuous as vol
from esphome import pins
from esphome.components import light
from esphome.components.light import AddressableLight
from esphome.components.power_supply import PowerSupplyComponent
import esphome.config_validation as cv
from esphome.const import CONF_CLOCK_PIN, CONF_COLOR_CORRECT, CONF_DATA_PIN, \
CONF_DEFAULT_TRANSITION_LENGTH, CONF_EFFECTS, CONF_GAMMA_CORRECT, CONF_MAKE_ID, CONF_METHOD, \
CONF_NAME, CONF_NUM_LEDS, CONF_PIN, CONF_POWER_SUPPLY, CONF_TYPE, CONF_VARIANT
from esphome.core import CORE
from esphome.cpp_generator import TemplateArguments, add, get_variable, variable
from esphome.cpp_helpers import setup_component
from esphome.cpp_types import App, Application, Component, global_ns
NeoPixelBusLightOutputBase = light.light_ns.class_('NeoPixelBusLightOutputBase', Component,
AddressableLight)
ESPNeoPixelOrder = light.light_ns.namespace('ESPNeoPixelOrder')
def validate_type(value):
value = cv.string(value).upper()
if 'R' not in value:
raise vol.Invalid("Must have R in type")
if 'G' not in value:
raise vol.Invalid("Must have G in type")
if 'B' not in value:
raise vol.Invalid("Must have B in type")
rest = set(value) - set('RGBW')
if rest:
raise vol.Invalid("Type has invalid color: {}".format(', '.join(rest)))
if len(set(value)) != len(value):
raise vol.Invalid("Type has duplicate color!")
return value
def validate_variant(value):
value = cv.string(value).upper()
if value == 'WS2813':
value = 'WS2812X'
if value == 'WS2812':
value = '800KBPS'
if value == 'LC8812':
value = 'SK6812'
return cv.one_of(*VARIANTS)(value)
def validate_method(value):
if value is None:
if CORE.is_esp32:
return 'ESP32_I2S_1'
if CORE.is_esp8266:
return 'ESP8266_DMA'
raise NotImplementedError
if CORE.is_esp32:
return cv.one_of(*ESP32_METHODS, upper=True, space='_')(value)
if CORE.is_esp8266:
return cv.one_of(*ESP8266_METHODS, upper=True, space='_')(value)
raise NotImplementedError
def validate_method_pin(value):
method = value[CONF_METHOD]
method_pins = {
'ESP8266_DMA': [3],
'ESP8266_UART0': [1],
'ESP8266_ASYNC_UART0': [1],
'ESP8266_UART1': [2],
'ESP8266_ASYNC_UART1': [2],
'ESP32_I2S_0': list(range(0, 32)),
'ESP32_I2S_1': list(range(0, 32)),
}
if CORE.is_esp8266:
method_pins['BIT_BANG'] = list(range(0, 16))
elif CORE.is_esp32:
method_pins['BIT_BANG'] = list(range(0, 32))
pins_ = method_pins[method]
for opt in (CONF_PIN, CONF_CLOCK_PIN, CONF_DATA_PIN):
if opt in value and value[opt] not in pins_:
raise vol.Invalid("Method {} only supports pin(s) {}".format(
method, ', '.join('GPIO{}'.format(x) for x in pins_)
), path=[CONF_METHOD])
return value
VARIANTS = {
'WS2812X': 'Ws2812x',
'SK6812': 'Sk6812',
'800KBPS': '800Kbps',
'400KBPS': '400Kbps',
}
ESP8266_METHODS = {
'ESP8266_DMA': 'NeoEsp8266Dma{}Method',
'ESP8266_UART0': 'NeoEsp8266Uart0{}Method',
'ESP8266_UART1': 'NeoEsp8266Uart1{}Method',
'ESP8266_ASYNC_UART0': 'NeoEsp8266AsyncUart0{}Method',
'ESP8266_ASYNC_UART1': 'NeoEsp8266AsyncUart1{}Method',
'BIT_BANG': 'NeoEsp8266BitBang{}Method',
}
ESP32_METHODS = {
'ESP32_I2S_0': 'NeoEsp32I2s0{}Method',
'ESP32_I2S_1': 'NeoEsp32I2s1{}Method',
'BIT_BANG': 'NeoEsp32BitBang{}Method',
}
def format_method(config):
variant = VARIANTS[config[CONF_VARIANT]]
method = config[CONF_METHOD]
if CORE.is_esp8266:
return ESP8266_METHODS[method].format(variant)
if CORE.is_esp32:
return ESP32_METHODS[method].format(variant)
raise NotImplementedError
def validate(config):
if CONF_PIN in config:
if CONF_CLOCK_PIN in config or CONF_DATA_PIN in config:
raise vol.Invalid("Cannot specify both 'pin' and 'clock_pin'+'data_pin'")
return config
if CONF_CLOCK_PIN in config:
if CONF_DATA_PIN not in config:
raise vol.Invalid("If you give clock_pin, you must also specify data_pin")
return config
raise vol.Invalid("Must specify at least one of 'pin' or 'clock_pin'+'data_pin'")
MakeNeoPixelBusLight = Application.struct('MakeNeoPixelBusLight')
PLATFORM_SCHEMA = cv.nameable(light.LIGHT_PLATFORM_SCHEMA.extend({
cv.GenerateID(): cv.declare_variable_id(light.AddressableLightState),
cv.GenerateID(CONF_MAKE_ID): cv.declare_variable_id(MakeNeoPixelBusLight),
vol.Optional(CONF_TYPE, default='GRB'): validate_type,
vol.Optional(CONF_VARIANT, default='800KBPS'): validate_variant,
vol.Optional(CONF_METHOD, default=None): validate_method,
vol.Optional(CONF_PIN): pins.output_pin,
vol.Optional(CONF_CLOCK_PIN): pins.output_pin,
vol.Optional(CONF_DATA_PIN): pins.output_pin,
vol.Required(CONF_NUM_LEDS): cv.positive_not_null_int,
vol.Optional(CONF_GAMMA_CORRECT): cv.positive_float,
vol.Optional(CONF_COLOR_CORRECT): vol.All([cv.percentage], vol.Length(min=3, max=4)),
vol.Optional(CONF_DEFAULT_TRANSITION_LENGTH): cv.positive_time_period_milliseconds,
vol.Optional(CONF_POWER_SUPPLY): cv.use_variable_id(PowerSupplyComponent),
vol.Optional(CONF_EFFECTS): light.validate_effects(light.ADDRESSABLE_EFFECTS),
}).extend(cv.COMPONENT_SCHEMA.schema), validate, validate_method_pin)
def to_code(config):
type_ = config[CONF_TYPE]
has_white = 'W' in type_
if has_white:
func = App.make_neo_pixel_bus_rgbw_light
color_feat = global_ns.NeoRgbwFeature
else:
func = App.make_neo_pixel_bus_rgb_light
color_feat = global_ns.NeoRgbFeature
template = TemplateArguments(getattr(global_ns, format_method(config)), color_feat)
rhs = func(template, config[CONF_NAME])
make = variable(config[CONF_MAKE_ID], rhs, type=MakeNeoPixelBusLight.template(template))
output = make.Poutput
if CONF_PIN in config:
add(output.add_leds(config[CONF_NUM_LEDS], config[CONF_PIN]))
else:
add(output.add_leds(config[CONF_NUM_LEDS], config[CONF_CLOCK_PIN], config[CONF_DATA_PIN]))
add(output.set_pixel_order(getattr(ESPNeoPixelOrder, type_)))
if CONF_POWER_SUPPLY in config:
for power_supply in get_variable(config[CONF_POWER_SUPPLY]):
yield
add(output.set_power_supply(power_supply))
if CONF_COLOR_CORRECT in config:
add(output.set_correction(*config[CONF_COLOR_CORRECT]))
light.setup_light(make.Pstate, config)
setup_component(output, config)
REQUIRED_BUILD_FLAGS = '-DUSE_NEO_PIXEL_BUS_LIGHT'
LIB_DEPS = 'NeoPixelBus@2.4.1'
| StarcoderdataPython |
3285655 | from marmot.features.feature_extractor import FeatureExtractor
from marmot.util.ngram_window_extractor import left_context, right_context
class PairedFeatureExtractor(FeatureExtractor):
'''
Paired features:
- target token + left context
- target token + right context
- target token + source token
- target POS + source POS
'''
def get_features(self, context_obj):
token = context_obj['token']
left = ' '.join(left_context(context_obj['target'], token, context_size=1, idx=context_obj['index']))
right = ' '.join(right_context(context_obj['target'], token, context_size=1, idx=context_obj['index']))
tg_pos = context_obj['target_pos'][context_obj['index']] if context_obj['target_pos'] != [] else ''
align_idx = context_obj['alignments'][context_obj['index']]
if align_idx is None:
src_token = '__unaligned__'
src_pos = '__unaligned__'
else:
src_token = context_obj['source'][align_idx]
src_pos = context_obj['source_pos'][align_idx]
return [token + '|' + left, token + '|' + right, token + '|' + src_token, tg_pos + '|' + src_pos]
def get_feature_names(self):
return ['token+left', 'token+right', 'token+source', 'POS+sourcePOS']
| StarcoderdataPython |
9772431 | <filename>test/test_generation.py
# Note: This file was taken mostly as is from the svg.path module (v 2.0)
#------------------------------------------------------------------------------
from __future__ import division, absolute_import, print_function
import unittest
from svgpathtools import *
class TestGeneration(unittest.TestCase):
def test_path_parsing(self):
"""Examples from the SVG spec"""
paths = [
'M 100,100 L 300,100 L 200,300 Z',
'M 0,0 L 50,20 M 100,100 L 300,100 L 200,300 Z',
'M 100,100 L 200,200',
'M 100,200 L 200,100 L -100,-200',
'M 100,200 C 100,100 250,100 250,200 S 400,300 400,200',
'M 100,200 C 100,100 400,100 400,200',
'M 100,500 C 25,400 475,400 400,500',
'M 100,800 C 175,700 325,700 400,800',
'M 600,200 C 675,100 975,100 900,200',
'M 600,500 C 600,350 900,650 900,500',
'M 600,800 C 625,700 725,700 750,800 S 875,900 900,800',
'M 200,300 Q 400,50 600,300 T 1000,300',
'M -3.4E+38,3.4E+38 L -3.4E-38,3.4E-38',
'M 0,0 L 50,20 M 50,20 L 200,100 Z',
'M 600,350 L 650,325 A 25,25 -30 0,1 700,300 L 750,275',
]
float_paths = [
'M 100.0,100.0 L 300.0,100.0 L 200.0,300.0 L 100.0,100.0',
'M 0.0,0.0 L 50.0,20.0 M 100.0,100.0 L 300.0,100.0 L 200.0,300.0 L 100.0,100.0',
'M 100.0,100.0 L 200.0,200.0',
'M 100.0,200.0 L 200.0,100.0 L -100.0,-200.0',
'M 100.0,200.0 C 100.0,100.0 250.0,100.0 250.0,200.0 C 250.0,300.0 400.0,300.0 400.0,200.0',
'M 100.0,200.0 C 100.0,100.0 400.0,100.0 400.0,200.0',
'M 100.0,500.0 C 25.0,400.0 475.0,400.0 400.0,500.0',
'M 100.0,800.0 C 175.0,700.0 325.0,700.0 400.0,800.0',
'M 600.0,200.0 C 675.0,100.0 975.0,100.0 900.0,200.0',
'M 600.0,500.0 C 600.0,350.0 900.0,650.0 900.0,500.0',
'M 600.0,800.0 C 625.0,700.0 725.0,700.0 750.0,800.0 C 775.0,900.0 875.0,900.0 900.0,800.0',
'M 200.0,300.0 Q 400.0,50.0 600.0,300.0 Q 800.0,550.0 1000.0,300.0',
'M -3.4e+38,3.4e+38 L -3.4e-38,3.4e-38',
'M 0.0,0.0 L 50.0,20.0 L 200.0,100.0 L 50.0,20.0',
('M 600.0,350.0 L 650.0,325.0 A 27.9508497187,27.9508497187 -30.0 0,1 700.0,300.0 L 750.0,275.0', # Python 2
'M 600.0,350.0 L 650.0,325.0 A 27.95084971874737,27.95084971874737 -30.0 0,1 700.0,300.0 L 750.0,275.0') # Python 3
]
for path, flpath in zip(paths[::-1], float_paths[::-1]):
# Note: Python 3 and Python 2 differ in the number of digits
# truncated when returning a string representation of a float
parsed_path = parse_path(path)
res = parsed_path.d()
if isinstance(flpath, tuple):
option3 = res == flpath[1] # Python 3
flpath = flpath[0]
else:
option3 = False
option1 = res == path
option2 = res == flpath
msg = ('\npath =\n {}\nflpath =\n {}\nparse_path(path).d() =\n {}'
''.format(path, flpath, res))
self.assertTrue(option1 or option2 or option3, msg=msg)
for flpath in float_paths[:-1]:
res = parse_path(flpath).d()
msg = ('\nflpath =\n {}\nparse_path(path).d() =\n {}'
''.format(flpath, res))
self.assertTrue(res == flpath, msg=msg)
def test_normalizing(self):
# Relative paths will be made absolute, subpaths merged if they can,
# and syntax will change.
path = 'M0 0L3.4E2-10L100.0,100M100,100l100,-100'
ps = 'M 0,0 L 340,-10 L 100,100 L 200,0'
psf = 'M 0.0,0.0 L 340.0,-10.0 L 100.0,100.0 L 200.0,0.0'
self.assertTrue(parse_path(path).d() in (ps, psf))
if __name__ == '__main__':
unittest.main()
| StarcoderdataPython |
1624312 | <gh_stars>0
import requests
from bs4 import BeautifulSoup
import json
def getTitle(id=1):
url = f"https://www.handspeak.com/word/search/index.php?id={id}"
get_url = requests.get(url)
get_text = get_url.text
soup = BeautifulSoup(get_text, "html.parser")
title = list(soup.select('h1')[0].text.strip().split(" "))[-1].lower()
return (title, url)
def generateWordsFile(starting=1, maximum=100, filename='words.json'):
for x in range(starting, maximum + 1):
title, url = getTitle(x)
entry = {
"name":f"{title}",
"link": f"{url}"
}
print(title, x) # Optional
with open('words.json', "r+") as file:
data = json.load(file)
data.append(entry)
#print(data)
file.seek(0)
json.dump(data, file)
if __name__ == "__main__":
generateWordsFile(starting=1, maximum=10464)
| StarcoderdataPython |
6649554 | <reponame>Cal-CS-61A-Staff/examtool
from os import getenv
from examtool.api.server_delegate import server_only
from examtool.api.utils import as_list
if getenv("ENV") == "SERVER":
from google.cloud import firestore
from google.cloud.exceptions import NotFound
BATCH_SIZE = 400
assert BATCH_SIZE < 500
def clear_collection(db: "firestore.Client", ref):
batch = db.batch()
cnt = 0
for document in ref.stream():
batch.delete(document.reference)
cnt += 1
if cnt > BATCH_SIZE:
batch.commit()
batch = db.batch()
cnt = 0
batch.commit()
@server_only
def get_exam(*, exam):
try:
db = firestore.Client()
out = db.collection("exams").document(exam).get().to_dict()
if "secret" in out and isinstance(out["secret"], bytes):
out["secret"] = out["secret"].decode("utf-8")
return out
except NotFound:
raise KeyError
@server_only
def set_exam(*, exam, json):
db = firestore.Client()
db.collection("exams").document(exam).set(json)
ref = db.collection("exams").document("all")
data = ref.get().to_dict()
if exam not in data["exam-list"]:
data["exam-list"].append(exam)
ref.set(data)
@server_only
@as_list
def get_roster(*, exam):
db = firestore.Client()
for student in db.collection("roster").document(exam).collection("deadline").stream():
yield student.id, student.to_dict()["deadline"]
@server_only
def set_roster(*, exam, roster):
db = firestore.Client()
ref = db.collection("roster").document(exam).collection("deadline")
batch = db.batch()
cnt = 0
for document in ref.stream():
batch.delete(document.reference)
cnt += 1
if cnt > 400:
batch.commit()
batch = db.batch()
cnt = 0
batch.commit()
batch = db.batch()
cnt = 0
for email, deadline in roster:
doc_ref = ref.document(email)
batch.set(doc_ref, {"deadline": int(deadline)})
cnt += 1
if cnt > 400:
batch.commit()
batch = db.batch()
cnt = 0
batch.commit()
@server_only
@as_list
def get_submissions(*, exam):
db = firestore.Client()
for ref in db.collection(exam).stream():
yield ref.id, ref.to_dict()
@server_only
@as_list
def get_logs(*, exam, email):
db = firestore.Client()
for ref in db.collection(exam).document(email).collection("log").stream():
yield ref.to_dict()
@server_only
def process_ok_exam_upload(*, exam, data, clear=True):
"""
data: {
"students": [
{
"email": string,
"questions": [
{
"student_question_name": string,
"canonical_question_name": string,
"start_time": int,
"end_time": int,
}
],
"start_time": int,
"end_time": int,
}
]
"questions": [
{
"canonical_question_name": string,
}
],
}
"""
db = firestore.Client()
db.collection("exam-alerts").document(exam).set(
{"questions": data["questions"]}
)
ref = (
db.collection("exam-alerts").document(exam).collection("students")
)
if clear:
clear_collection(db, ref)
batch = db.batch()
cnt = 0
for student in data["students"]:
doc_ref = ref.document(student["email"])
batch.set(doc_ref, student)
cnt += 1
if cnt > BATCH_SIZE:
batch.commit()
batch = db.batch()
cnt = 0
batch.commit()
ref = db.collection("exam-alerts").document("all")
exam_list_data = ref.get().to_dict()
if exam not in exam_list_data["exam-list"]:
exam_list_data["exam-list"].append(exam)
ref.set(exam_list_data)
| StarcoderdataPython |
5104266 | <gh_stars>0
import numpy as np
from keras import Sequential
from keras.layers import Lambda, Conv2D, BatchNormalization, Activation, MaxPooling2D, Dropout, GlobalAveragePooling2D, \
Dense
from mydeep_api.dataset.numpy_column import NumpyColumn
from mydeep_keras.k_model import KModel, KFitConfig
def test_fit():
input_shape = (10, 10, 3)
output_class_number = 10
model = KModel.from_keras(
keras_model=Sequential([
Lambda(lambda x: x, input_shape=input_shape, name="input_lambda"),
Conv2D(16, (3, 3), padding='same'),
BatchNormalization(),
Activation(activation='relu'),
MaxPooling2D(),
Dropout(.5),
GlobalAveragePooling2D(),
Dropout(.5),
Dense(output_class_number, activation='softmax')
]),
compile_params={
'loss': 'binary_crossentropy',
'optimizer': 'Adam',
'metrics': ['accuracy']
})
model.fit(
x=NumpyColumn(np.arange(100 * 10 * 10 * 3).reshape((100, 10, 10, 3))),
y=NumpyColumn(np.arange(100 * 10).reshape((100, 10))),
config=KFitConfig(
epochs=3,
))
| StarcoderdataPython |
3590362 | """Team prize test"""
import datetime
from django.contrib.auth.models import User
from django.test import TransactionTestCase
from apps.managers.team_mgr.models import Group, Team
from apps.utils import test_utils
from apps.managers.challenge_mgr.models import RoundSetting
class DormTeamPrizeTests(TransactionTestCase):
"""
Tests awarding a prize to a dorm team points winner.
"""
def setUp(self):
"""
Sets up a test team prize for the rest of the tests.
This prize is not saved, as the round field is not yet set.
"""
self.prize = test_utils.setup_prize(award_to="team_group", competition_type="points")
self.current_round = "Round 1"
test_utils.set_competition_round()
# Create test groups, teams, and users.
self.groups = [Group(name="Test Group %d" % i) for i in range(0, 2)]
_ = [d.save() for d in self.groups]
self.teams = [Team(name=str(i), group=self.groups[i % 2]) for i in range(0, 4)]
_ = [f.save() for f in self.teams]
self.users = [User.objects.create_user("test%d" % i, "<EMAIL>") for i in range(0, 4)]
# Assign users to teams.
for index, user in enumerate(self.users):
user.get_profile().team = self.teams[index % 4]
user.get_profile().save()
def testNumAwarded(self):
"""Checks that the number of prizes to award for this prize is the same as the
number of groups.
"""
self.prize.round = RoundSetting.objects.get(name="Round 1")
self.prize.save()
self.assertEqual(self.prize.num_awarded(self.teams[0]), len(self.groups),
"One prize should be awarded to each of the groups in the competition.")
def testRoundLeader(self):
"""
Tests that we can retrieve the overall individual points leader for a round prize.
"""
self.prize.round = RoundSetting.objects.get(name="Round 1")
self.prize.save()
# Test one user will go ahead in points.
profile = self.users[0].get_profile()
profile.add_points(10, datetime.datetime.today() + datetime.timedelta(minutes=1), "test")
profile.save()
self.assertEqual(self.prize.leader(profile.team), profile.team,
"The user's team is not leading in the prize.")
# Test a user in a different group.
profile1 = self.users[1].get_profile()
profile1.add_points(profile.points() + 1,
datetime.datetime.today() + datetime.timedelta(minutes=1), "test")
profile1.save()
self.assertEqual(self.prize.leader(profile.team), profile.team,
"The leader for this prize in first users dorm should not change.")
self.assertEqual(self.prize.leader(profile1.team), profile1.team,
"The leader in profile1's dorm is not profile1.")
# Test that a user in a different team but same dorm changes the leader for the
# original user.
profile2 = self.users[2].get_profile()
profile2.add_points(profile.points() + 1,
datetime.datetime.today() + datetime.timedelta(minutes=1), "test")
profile2.save()
self.assertEqual(self.prize.leader(profile.team), profile2.team,
"The leader for this prize did not change.")
self.assertEqual(self.prize.leader(profile1.team), profile1.team,
"The leader in profile1's dorm is not profile1.")
def tearDown(self):
"""
Deletes the created image file in prizes.
"""
self.prize.image.delete()
self.prize.delete()
class OverallTeamPrizeTest(TransactionTestCase):
"""
Tests awarding a prize to a dorm team points winner.
"""
def setUp(self):
"""
Sets up a test team overall prize for the rest of the tests.
This prize is not saved, as the round field is not yet set.
"""
self.prize = test_utils.setup_prize(award_to="team_overall", competition_type="points")
self.current_round = "Round 1"
test_utils.set_competition_round()
test_utils.create_teams(self)
def testNumAwarded(self):
"""
Simple test to check that the number of prizes to be awarded is one.
"""
self.prize.round = RoundSetting.objects.get(name="Round 1")
self.prize.save()
self.assertEqual(self.prize.num_awarded(),
1,
"This prize should not be awarded to more than one user.")
def testRoundLeader(self):
"""
Tests that we can retrieve the overall individual points leader for a round prize.
"""
self.prize.round = RoundSetting.objects.get(name="Round 1")
self.prize.save()
# Test one user will go ahead in points.
profile = self.users[0].get_profile()
profile.add_points(10, datetime.datetime.today() + datetime.timedelta(minutes=1), "test")
profile.save()
self.assertEqual(self.prize.leader(profile.team), profile.team,
"The user's team is not leading in the prize.")
# Test that a user in a different team changes the leader for the original user.
profile2 = self.users[2].get_profile()
profile2.add_points(profile.points() + 1,
datetime.datetime.today() + datetime.timedelta(minutes=1), "test")
profile2.save()
self.assertEqual(self.prize.leader(profile.team), profile2.team,
"The leader for this prize did not change.")
def tearDown(self):
"""
Deletes the created image file in prizes.
"""
self.prize.image.delete()
self.prize.delete()
| StarcoderdataPython |
6484262 | #87671-<NAME> 87693-<NAME> Grupo 15
# -*- coding: utf-8 -*-
"""
Created on Mon Oct 16 20:31:54 2017
@author: mlopes
"""
import numpy as np
import random
from tempfile import TemporaryFile
outfile = TemporaryFile()
class finiteMDP:
def __init__(self, nS, nA, gamma, P=[], R=[], absorv=[]):
self.nS = nS #numero de states
self.nA = nA #numero de acoes
self.gamma = gamma #gamma - discount factor
self.Q = np.zeros((self.nS,self.nA)) #valores de Q para todas as trajetorias
self.P = P
self.R = R
self.absorv = absorv
# completar se necessario
def runPolicy(self, n, x0, poltype = 'greedy', polpar=[]):
#nao alterar
traj = np.zeros((n,4)) #vetor para valores de trajetoria
x = x0
J = 0
for ii in range(0,n):
a = self.policy(x,poltype,polpar) #acao
r = self.R[x,a] #reward
y = np.nonzero(np.random.multinomial( 1, self.P[x,a,:]))[0][0] #estado de chegada
traj[ii,:] = np.array([x, a, y, r]) #calcula trajetoria para cada SARS'
J = J + r * self.gamma**ii #funcao a maximizar (funcao J)
if self.absorv[x]:
y = x0
x = y #avanca para estado de chegada x->y
return J,traj
def VI(self):
#nao alterar
nQ = np.zeros((self.nS,self.nA))
while True:
self.V = np.max(self.Q,axis=1)
for a in range(0,self.nA):
nQ[:,a] = self.R[:,a] + self.gamma * np.dot(self.P[:,a,:],self.V)
err = np.linalg.norm(self.Q-nQ)
self.Q = np.copy(nQ)
if err<1e-7:
break
#update policy
self.V = np.max(self.Q,axis=1)
#correct for 2 equal actions
self.Pol = np.argmax(self.Q, axis=1)
return self.Q, self.Q2pol(self.Q)
def traces2Q(self, trace): #calcula valores de Q para todas as trajetorias
# implementar esta funcao
self.Q = np.zeros((self.nS,self.nA))
temporaryQ = np.zeros((self.nS, self.nA))
alpha=0.1
while True:
for elem in trace:
s=int(elem[0])
a=int(elem[1])
r=elem[3]
s_next=int(elem[2])
temporaryQ[s,a] += alpha*(r+self.gamma*max(temporaryQ[s_next,:]) - temporaryQ[s,a])
dif = np.linalg.norm(self.Q - temporaryQ)
self.Q = np.copy(temporaryQ)
if dif < 1e-2:
break
return self.Q
def policy(self, x, poltype = 'exploration', par = []):
# implementar esta funcao
if poltype == 'exploitation':
#usa o polpar e verifica qual a melhor acao para o seu estado
a=np.argmax(par[x])#retorna index do maior valor para aquele estado(retorna s' = proximo estado)
elif poltype == 'exploration':
#randint(self.nA - 1)
a=np.random.randint(0,self.nA) #explora de forma aleatoria para explorar melhor o ambiente
return a
def Q2pol(self, Q, eta=5):
# implementar esta funcao
return np.exp(eta*Q)/np.dot(np.exp(eta*Q),np.array([[1,1],[1,1]])) | StarcoderdataPython |
9621135 | def replace_project_name(project_name):
# Read in the file
files = ['./app/templates/base.html', './app/templates/auth/login.html', './app/templates/main/index.html']
for file in files:
with open(file, 'r') as f:
filedata = f.read()
# Replace the target string
filedata = filedata.replace('%%PROJECT_NAME%%', project_name)
# Write the file out again
with open(file, 'w') as f:
f.write(filedata)
def post_hook():
project_slug = '{{cookiecutter.project_slug}}'
project_name = '{{cookiecutter.project_name}}'
print(project_name, project_slug)
replace_project_name(project_name)
print("**************************************************")
print("\tPost Install Instructions")
print("\tExecute the following")
print(f"\tsee {project_slug}/post_instructions.txt")
print("**************************************************")
post_instructions = f"""
cd {project_slug}
python3 -m venv venv
source venv/bin/activate
pip install -r requirements.txt
python main.py
"""
print(post_instructions)
with open('./post_instructions.txt', 'w') as f:
f.write(post_instructions)
if __name__ == '__main__':
post_hook() | StarcoderdataPython |
8076694 | from datetime import datetime
from django.http import HttpRequest
from django.test import TestCase
from django.utils.crypto import get_random_string
from zentral.core.events.base import BaseEvent, EventMetadata
from zentral.core.incidents.events import (IncidentCreatedEvent, IncidentSeverityUpdatedEvent,
IncidentStatusUpdatedEvent, MachineIncidentCreatedEvent,
MachineIncidentStatusUpdatedEvent)
from zentral.core.incidents.models import Incident, IncidentUpdate, MachineIncident, Severity, Status
from zentral.core.incidents.utils import apply_incident_updates, update_incident_status, update_machine_incident_status
class TestEvent(BaseEvent):
event_type = "test_event"
def get_linked_objects_keys(self):
return {"yolo": [(17,)]}
class IncidentTestCase(TestCase):
def _create_event(self, severity=Severity.CRITICAL, serial_number=None):
incident_type = get_random_string(12)
key = {"key": get_random_string(12)}
incident_update = IncidentUpdate(incident_type, key, severity)
return TestEvent(
EventMetadata(
machine_serial_number=serial_number,
incident_updates=[incident_update],
), {}
), incident_type, key
def test_open_incident_no_existing_incident_create(self):
# no existing incident, create one, return one IncidentCreatedEvent
original_event, incident_type, key = self._create_event()
events = apply_incident_updates(original_event)
self.assertEqual(len(events), 1)
event = events[0]
self.assertIsInstance(event, IncidentCreatedEvent)
incident = Incident.objects.get(pk=event.payload["pk"])
self.assertEqual(
event.metadata.objects,
{"yolo": [(17,)], # copied from the original event
"incident": [(incident.pk,)]}
)
self.assertEqual(
original_event.metadata.objects,
{"yolo": [(17,)],
"incident": [(incident.pk,)]} # copied from the incident event
)
self.assertEqual(incident.incident_type, incident_type)
self.assertEqual(incident.key, key)
self.assertEqual(incident.severity, Severity.CRITICAL.value)
def test_open_incident_existing_open_incident_greater_severity_noop(self):
# existing open incident, with greater Severity, noop
original_event, incident_type, key = self._create_event(severity=Severity.MAJOR)
existing_incident = Incident.objects.create(
incident_type=incident_type,
key=key,
status=Status.OPEN.value,
status_time=datetime.utcnow(),
severity=Severity.CRITICAL.value
)
events = apply_incident_updates(original_event)
self.assertEqual(len(events), 0)
existing_incident.refresh_from_db()
self.assertEqual(existing_incident.status, Status.OPEN.value)
self.assertEqual(existing_incident.severity, Severity.CRITICAL.value)
def test_open_incident_existing_open_incident_lower_severity_update(self):
# existing open incident, with lower Severity, return one IncidentSeverityUpdatedEvent
original_event, incident_type, key = self._create_event(severity=Severity.CRITICAL)
existing_incident = Incident.objects.create(
incident_type=incident_type,
key=key,
status=Status.OPEN.value,
status_time=datetime.utcnow(),
severity=Severity.MAJOR.value
)
events = apply_incident_updates(original_event)
self.assertEqual(len(events), 1)
event = events[0]
self.assertIsInstance(event, IncidentSeverityUpdatedEvent)
self.assertEqual(
event.metadata.objects,
{"yolo": [(17,)], # copied from the original event
"incident": [(existing_incident.pk,)]}
)
self.assertEqual(
original_event.metadata.objects,
{"yolo": [(17,)],
"incident": [(existing_incident.pk,)]} # copied from the incident event
)
self.assertEqual(event.payload["pk"], existing_incident.pk)
self.assertEqual(event.payload["severity"], Severity.CRITICAL.value)
self.assertEqual(event.payload["previous_severity"], Severity.MAJOR.value)
existing_incident.refresh_from_db()
self.assertEqual(existing_incident.severity, Severity.CRITICAL.value)
def test_close_open_incident_no_existing_incident_noop(self):
# no existing incident, noop
original_event, incident_type, key = self._create_event(severity=Severity.NONE)
events = apply_incident_updates(original_event)
self.assertEqual(len(events), 0)
self.assertEqual(Incident.objects.filter(incident_type=incident_type, key=key).count(), 0)
def test_close_open_incident_existing_open_incident(self):
# existing open incident, IncidentStatusUpdatedEvent
original_event, incident_type, key = self._create_event(severity=Severity.NONE)
existing_incident = Incident.objects.create(
incident_type=incident_type,
key=key,
status=Status.OPEN.value,
status_time=datetime.utcnow(),
severity=Severity.MAJOR.value
)
events = apply_incident_updates(original_event)
self.assertEqual(len(events), 1)
event = events[0]
self.assertIsInstance(event, IncidentStatusUpdatedEvent)
self.assertEqual(
event.metadata.objects,
{"yolo": [(17,)], # copied from the original event
"incident": [(existing_incident.pk,)]}
)
self.assertEqual(
original_event.metadata.objects,
{"yolo": [(17,)],
"incident": [(existing_incident.pk,)]} # copied from the incident event
)
self.assertEqual(event.payload["pk"], existing_incident.pk)
self.assertEqual(event.payload["previous_status"]["status"], Status.OPEN.value)
self.assertEqual(event.payload["status"], Status.CLOSED.value)
existing_incident.refresh_from_db()
self.assertEqual(existing_incident.status, Status.CLOSED.value)
def test_close_open_incident_existing_in_progress_incident_noop(self):
# existing in progress incident, noop
original_event, incident_type, key = self._create_event(severity=Severity.NONE)
existing_incident = Incident.objects.create(
incident_type=incident_type,
key=key,
status=Status.IN_PROGRESS.value,
status_time=datetime.utcnow(),
severity=Severity.MAJOR.value
)
events = apply_incident_updates(original_event)
self.assertEqual(len(events), 0)
existing_incident.refresh_from_db()
self.assertEqual(existing_incident.status, Status.IN_PROGRESS.value)
def test_close_open_incident_existing_open_incident_and_machine_incident_noop(self):
# existing open incident and machine incident, noop
original_event, incident_type, key = self._create_event(severity=Severity.NONE)
existing_incident = Incident.objects.create(
incident_type=incident_type,
key=key,
status=Status.OPEN.value,
status_time=datetime.utcnow(),
severity=Severity.MAJOR.value
)
existing_machine_incident = MachineIncident.objects.create(
incident=existing_incident,
serial_number=get_random_string(12),
status=Status.OPEN.value,
status_time=datetime.utcnow()
)
events = apply_incident_updates(original_event)
self.assertEqual(len(events), 0)
existing_incident.refresh_from_db()
self.assertEqual(existing_incident.status, Status.OPEN.value)
existing_machine_incident.refresh_from_db()
self.assertEqual(existing_machine_incident.status, Status.OPEN.value)
def test_close_open_machine_incident_no_machine_incident_noop(self):
# no existing machine incident, noop
original_event, incident_type, key = self._create_event(severity=Severity.NONE, serial_number="87654321")
existing_incident = Incident.objects.create(
incident_type=incident_type,
key=key,
status=Status.OPEN.value,
status_time=datetime.utcnow(),
severity=Severity.MAJOR.value
)
existing_machine_incident = MachineIncident.objects.create(
incident=existing_incident,
serial_number="12345678",
status=Status.OPEN.value,
status_time=datetime.utcnow()
)
events = apply_incident_updates(original_event)
self.assertEqual(len(events), 0)
existing_incident.refresh_from_db()
self.assertEqual(existing_incident.status, Status.OPEN.value)
existing_machine_incident.refresh_from_db()
self.assertEqual(existing_machine_incident.status, Status.OPEN.value)
def test_close_open_machine_incident_and_parent(self):
# existing machine incident, open incident without other machine incidents, two status updates
original_event, incident_type, key = self._create_event(severity=Severity.NONE, serial_number="12345678")
existing_incident = Incident.objects.create(
incident_type=incident_type,
key=key,
status=Status.OPEN.value,
status_time=datetime.utcnow(),
severity=Severity.MAJOR.value
)
existing_machine_incident = MachineIncident.objects.create(
incident=existing_incident,
serial_number="12345678",
status=Status.OPEN.value,
status_time=datetime.utcnow()
)
events = apply_incident_updates(original_event)
self.assertEqual(len(events), 2)
machine_incident_event = events[0]
self.assertIsInstance(machine_incident_event, MachineIncidentStatusUpdatedEvent)
self.assertEqual(
machine_incident_event.metadata.objects,
{"yolo": [(17,)], # copied from the original event
"incident": [(existing_incident.pk,)],
"machine_incident": [(existing_machine_incident.pk,)]}
)
self.assertEqual(machine_incident_event.payload["machine_incident"]["pk"], existing_machine_incident.pk)
self.assertEqual(machine_incident_event.payload["machine_incident"]["status"], Status.CLOSED.value)
self.assertEqual(machine_incident_event.payload["machine_incident"]["previous_status"]["status"],
Status.OPEN.value)
existing_machine_incident.refresh_from_db()
self.assertEqual(existing_machine_incident.status, Status.CLOSED.value)
incident_event = events[1]
self.assertIsInstance(incident_event, IncidentStatusUpdatedEvent)
self.assertEqual(
incident_event.metadata.objects,
{"yolo": [(17,)], # copied from the original event
"incident": [(existing_incident.pk,)]}
)
self.assertEqual(incident_event.payload["pk"], existing_incident.pk)
self.assertEqual(incident_event.payload["status"], Status.CLOSED.value)
self.assertEqual(incident_event.payload["previous_status"]["status"], Status.OPEN.value)
existing_incident.refresh_from_db()
self.assertEqual(existing_incident.status, Status.CLOSED.value)
self.assertEqual(
original_event.metadata.objects,
{"yolo": [(17,)],
"incident": [(existing_incident.pk,)],
"machine_incident": [(existing_machine_incident.pk,)]}
)
def test_close_open_machine_incident_not_parent(self):
# existing machine incident, open incident with other machine incidents, one status updates
original_event, incident_type, key = self._create_event(severity=Severity.NONE, serial_number="12345678")
existing_incident = Incident.objects.create(
incident_type=incident_type,
key=key,
status=Status.OPEN.value,
status_time=datetime.utcnow(),
severity=Severity.MAJOR.value
)
existing_machine_incident = MachineIncident.objects.create(
incident=existing_incident,
serial_number="12345678",
status=Status.OPEN.value,
status_time=datetime.utcnow()
)
existing_machine_incident2 = MachineIncident.objects.create(
incident=existing_incident,
serial_number="87654321",
status=Status.OPEN.value,
status_time=datetime.utcnow()
)
events = apply_incident_updates(original_event)
self.assertEqual(len(events), 1)
machine_incident_event = events[0]
self.assertIsInstance(machine_incident_event, MachineIncidentStatusUpdatedEvent)
self.assertEqual(
machine_incident_event.metadata.objects,
{"yolo": [(17,)], # copied from the original event
"incident": [(existing_incident.pk,)],
"machine_incident": [(existing_machine_incident.pk,)]}
)
self.assertEqual(
original_event.metadata.objects,
{"yolo": [(17,)],
"incident": [(existing_incident.pk,)],
"machine_incident": [(existing_machine_incident.pk,)]}
)
self.assertEqual(machine_incident_event.payload["machine_incident"]["pk"], existing_machine_incident.pk)
self.assertEqual(machine_incident_event.payload["machine_incident"]["status"], Status.CLOSED.value)
self.assertEqual(machine_incident_event.payload["machine_incident"]["previous_status"]["status"],
Status.OPEN.value)
existing_machine_incident.refresh_from_db()
self.assertEqual(existing_machine_incident.status, Status.CLOSED.value)
existing_incident.refresh_from_db()
self.assertEqual(existing_incident.status, Status.OPEN.value)
existing_machine_incident2.refresh_from_db()
self.assertEqual(existing_machine_incident2.status, Status.OPEN.value)
def test_open_machine_incident_noop(self):
# incident and machine incident already exist, noop
original_event, incident_type, key = self._create_event(serial_number="12345678")
existing_incident = Incident.objects.create(
incident_type=incident_type,
key=key,
status=Status.OPEN.value,
status_time=datetime.utcnow(),
severity=Severity.CRITICAL.value
)
MachineIncident.objects.create(
incident=existing_incident,
serial_number="12345678",
status=Status.OPEN.value,
status_time=datetime.utcnow()
)
events = apply_incident_updates(original_event)
self.assertEqual(len(events), 0)
self.assertEqual(Incident.objects.filter(incident_type=incident_type, key=key).count(), 1)
self.assertEqual(MachineIncident.objects.filter(incident__incident_type=incident_type,
incident__key=key).count(), 1)
def test_open_machine_incident_one_machine_incident_created_event(self):
# incident already exist, no machine incident, one MachineIncidentCreatedEvent
original_event, incident_type, key = self._create_event(serial_number="12345678")
existing_incident = Incident.objects.create(
incident_type=incident_type,
key=key,
status=Status.OPEN.value,
status_time=datetime.utcnow(),
severity=Severity.CRITICAL.value
)
MachineIncident.objects.create(
incident=existing_incident,
serial_number="87654321",
status=Status.OPEN.value,
status_time=datetime.utcnow()
)
events = apply_incident_updates(original_event)
self.assertEqual(len(events), 1)
event = events[0]
self.assertIsInstance(event, MachineIncidentCreatedEvent)
machine_incident = MachineIncident.objects.get(incident__incident_type=incident_type,
incident__key=key,
serial_number="12345678")
self.assertEqual(event.payload["machine_incident"]["pk"], machine_incident.pk)
self.assertEqual(
event.metadata.objects,
{"yolo": [(17,)], # copied from the original event
"incident": [(existing_incident.pk,)],
"machine_incident": [(machine_incident.pk,)]}
)
self.assertEqual(
original_event.metadata.objects,
{"yolo": [(17,)],
"incident": [(existing_incident.pk,)],
"machine_incident": [(machine_incident.pk,)]}
)
self.assertEqual(MachineIncident.objects.filter(incident__incident_type=incident_type,
incident__key=key).count(), 2)
def test_open_machine_incident_one_incident_severity_updated_one_machine_incident_created(self):
# incident already exist with different severity, no machine incident,
# one IncidentSeverityUpdatedEvent, one MachineIncidentCreatedEvent
original_event, incident_type, key = self._create_event(serial_number="12345678")
existing_incident = Incident.objects.create(
incident_type=incident_type,
key=key,
status=Status.OPEN.value,
status_time=datetime.utcnow(),
severity=Severity.MAJOR.value
)
MachineIncident.objects.create(
incident=existing_incident,
serial_number="87654321",
status=Status.OPEN.value,
status_time=datetime.utcnow()
)
events = apply_incident_updates(original_event)
self.assertEqual(len(events), 2)
incident_event = events[0]
self.assertIsInstance(incident_event, IncidentSeverityUpdatedEvent)
self.assertEqual(
incident_event.metadata.objects,
{"yolo": [(17,)], # copied from the original event
"incident": [(existing_incident.pk,)]}
)
self.assertEqual(incident_event.payload["previous_severity"], Severity.MAJOR.value)
self.assertEqual(incident_event.payload["severity"], Severity.CRITICAL.value)
existing_incident.refresh_from_db()
self.assertEqual(existing_incident.severity, Severity.CRITICAL.value)
machine_incident_event = events[1]
self.assertIsInstance(machine_incident_event, MachineIncidentCreatedEvent)
machine_incident = MachineIncident.objects.get(incident__incident_type=incident_type,
incident__key=key,
serial_number="12345678")
self.assertEqual(machine_incident_event.payload["machine_incident"]["pk"],
machine_incident.pk)
self.assertEqual(
machine_incident_event.metadata.objects,
{"yolo": [(17,)], # copied from the original event
"incident": [(existing_incident.pk,)],
"machine_incident": [(machine_incident.pk,)]}
)
self.assertEqual(MachineIncident.objects.filter(incident__incident_type=incident_type,
incident__key=key).count(), 2)
self.assertEqual(
original_event.metadata.objects,
{"yolo": [(17,)],
"incident": [(existing_incident.pk,)],
"machine_incident": [(machine_incident.pk,)]}
)
def test_open_incident_and_machine_incident(self):
# incident already exist, with machine incident, but not open
# one IncidentCreatedEvent, one MachineIncidentCreatedEvent
original_event, incident_type, key = self._create_event(serial_number="12345678")
existing_incident = Incident.objects.create(
incident_type=incident_type,
key=key,
status=Status.CLOSED.value,
status_time=datetime.utcnow(),
severity=Severity.MAJOR.value
)
existing_machine_incident = MachineIncident.objects.create(
incident=existing_incident,
serial_number="12345678",
status=Status.CLOSED.value,
status_time=datetime.utcnow()
)
events = apply_incident_updates(original_event)
self.assertEqual(len(events), 2)
incident_event = events[0]
self.assertIsInstance(incident_event, IncidentCreatedEvent)
new_incident = Incident.objects.get(pk=incident_event.payload["pk"])
self.assertEqual(
incident_event.metadata.objects,
{"yolo": [(17,)], # copied from the original event
"incident": [(new_incident.pk,)]}
)
self.assertEqual(new_incident.incident_type, incident_type)
self.assertEqual(new_incident.key, key)
self.assertEqual(new_incident.status, Status.OPEN.value)
self.assertNotEqual(new_incident.pk, existing_incident.pk)
existing_incident.refresh_from_db()
self.assertEqual(existing_incident.status, Status.CLOSED.value)
self.assertEqual(Incident.objects.filter(incident_type=incident_type, key=key).count(), 2)
machine_incident_event = events[1]
self.assertIsInstance(machine_incident_event, MachineIncidentCreatedEvent)
new_machine_incident = MachineIncident.objects.get(incident__incident_type=incident_type,
incident__key=key,
status=Status.OPEN.value,
serial_number="12345678")
self.assertEqual(
machine_incident_event.metadata.objects,
{"yolo": [(17,)], # copied from the original event
"incident": [(new_incident.pk,)],
"machine_incident": [(new_machine_incident.pk,)]}
)
self.assertEqual(machine_incident_event.payload["machine_incident"]["pk"],
new_machine_incident.pk)
existing_machine_incident.refresh_from_db()
self.assertEqual(existing_machine_incident.status, Status.CLOSED.value)
self.assertEqual(MachineIncident.objects.filter(incident__incident_type=incident_type,
incident__key=key).count(), 2)
self.assertEqual(
original_event.metadata.objects,
{"yolo": [(17,)],
"incident": [(new_incident.pk,)],
"machine_incident": [(new_machine_incident.pk,)]}
)
def test_update_incident_status_noop(self):
incident = Incident.objects.create(
incident_type=get_random_string(12),
key={"key": get_random_string(12)},
status=Status.CLOSED.value,
status_time=datetime.utcnow(),
severity=Severity.MAJOR.value
)
request = HttpRequest()
request.user = None
updated_incident, event = update_incident_status(incident, Status.IN_PROGRESS, request)
self.assertEqual(updated_incident, incident)
self.assertEqual(updated_incident.status, Status.CLOSED.value)
self.assertIsNone(event)
def test_update_incident_status_ok(self):
incident = Incident.objects.create(
incident_type=get_random_string(12),
key={"key": get_random_string(12)},
status=Status.OPEN.value,
status_time=datetime.utcnow(),
severity=Severity.MAJOR.value
)
request = HttpRequest()
request.user = None
updated_incident, event = update_incident_status(incident, Status.IN_PROGRESS, request)
self.assertEqual(updated_incident, incident)
self.assertEqual(updated_incident.status, Status.IN_PROGRESS.value)
self.assertIsInstance(event, IncidentStatusUpdatedEvent)
self.assertEqual(event.payload["status"], Status.IN_PROGRESS.value)
self.assertEqual(event.payload["previous_status"]["status"], Status.OPEN.value)
self.assertEqual(
event.metadata.objects,
{"incident": [(incident.pk,)]}
)
def test_update_machine_incident_status_noop(self):
incident = Incident.objects.create(
incident_type=get_random_string(12),
key={"key": get_random_string(12)},
status=Status.CLOSED.value,
status_time=datetime.utcnow(),
severity=Severity.MAJOR.value
)
machine_incident = MachineIncident.objects.create(
incident=incident,
serial_number="12345678",
status=Status.CLOSED.value,
status_time=datetime.utcnow(),
)
request = HttpRequest()
request.user = None
updated_machine_incident, event = update_machine_incident_status(
machine_incident, Status.IN_PROGRESS, request
)
self.assertEqual(updated_machine_incident, machine_incident)
self.assertEqual(updated_machine_incident.status, Status.CLOSED.value)
self.assertIsNone(event)
def test_update_machine_incident_status_ok(self):
incident = Incident.objects.create(
incident_type=get_random_string(12),
key={"key": get_random_string(12)},
status=Status.OPEN.value,
status_time=datetime.utcnow(),
severity=Severity.MAJOR.value
)
machine_incident = MachineIncident.objects.create(
incident=incident,
serial_number="12345678",
status=Status.OPEN.value,
status_time=datetime.utcnow(),
)
request = HttpRequest()
request.user = None
updated_machine_incident, event = update_machine_incident_status(
machine_incident, Status.IN_PROGRESS, request
)
self.assertEqual(updated_machine_incident, machine_incident)
self.assertEqual(updated_machine_incident.status, Status.IN_PROGRESS.value)
self.assertIsInstance(event, MachineIncidentStatusUpdatedEvent)
self.assertEqual(event.payload["machine_incident"]["status"], Status.IN_PROGRESS.value)
self.assertEqual(event.payload["machine_incident"]["previous_status"]["status"], Status.OPEN.value)
self.assertEqual(
event.metadata.objects,
{"incident": [(incident.pk,)],
"machine_incident": [(machine_incident.pk,)]}
)
| StarcoderdataPython |
239517 | <filename>tests/test_mcpython/common/block/test_Barrel.py
"""
mcpython - a minecraft clone written in python licenced under the MIT-licence
(https://github.com/mcpython4-coding/core)
Contributors: uuk, xkcdjerry (inactive)
Based on the game of fogleman (https://github.com/fogleman/Minecraft), licenced under the MIT-licence
Original game "minecraft" by Mojang Studios (www.minecraft.net), licenced under the EULA
(https://account.mojang.com/documents/minecraft_eula)
Mod loader inspired by "Minecraft Forge" (https://github.com/MinecraftForge/MinecraftForge) and similar
This project is not official by mojang and does not relate to it.
"""
from unittest import TestCase
from mcpython import shared
from pyglet.window import key, mouse
from test_mcpython.fakeHelpers import (
FakeCraftingHandler,
FakeInventoryHandler,
FakeWorld,
)
class TestBarrel(TestCase):
def test_module_import(self):
shared.crafting_handler = FakeCraftingHandler()
import mcpython.common.block.Barrel
self.assertEqual(mcpython.common.block.Barrel.Barrel.NAME, "minecraft:barrel")
async def test_on_player_interaction(self):
shared.crafting_handler = FakeCraftingHandler()
import mcpython.common.block.Barrel
shared.inventory_handler = FakeInventoryHandler
FakeInventoryHandler.SHOWN = False
instance = mcpython.common.block.Barrel.Barrel()
await instance.on_player_interaction(None, mouse.RIGHT, 0, None, None)
self.assertTrue(FakeInventoryHandler.SHOWN)
FakeInventoryHandler.SHOWN = False
await instance.on_player_interaction(
None, mouse.RIGHT, key.MOD_SHIFT, None, None
)
self.assertFalse(FakeInventoryHandler.SHOWN)
def test_model_state_serialization(self):
shared.crafting_handler = FakeCraftingHandler()
import mcpython.common.block.Barrel
shared.inventory_handler = FakeInventoryHandler
instance = mcpython.common.block.Barrel.Barrel()
state = instance.get_model_state()
instance.set_model_state({"facing": "north", "open": "true"})
self.assertNotEqual(state, instance.get_model_state())
instance.set_model_state(state)
self.assertEqual(state, instance.get_model_state())
async def test_serializer(self):
from mcpython import shared
from mcpython.common.container.ResourceStack import ItemStack
from mcpython.common.item.AbstractItem import AbstractItem
from mcpython.engine.network.util import ReadBuffer, WriteBuffer
shared.IS_CLIENT = False
@shared.registry
class TestItem(AbstractItem):
NAME = "minecraft:test_item"
shared.crafting_handler = FakeCraftingHandler()
import mcpython.common.block.Barrel
shared.inventory_handler = FakeInventoryHandler
FakeInventoryHandler.SHOWN = False
instance = mcpython.common.block.Barrel.Barrel()
await instance.inventory.init()
instance.inventory.slots[0].set_itemstack(ItemStack(TestItem()))
buffer = WriteBuffer()
await instance.write_to_network_buffer(buffer)
instance2 = mcpython.common.block.Barrel.Barrel()
await instance2.inventory.init()
await instance2.read_from_network_buffer(ReadBuffer(buffer.get_data()))
self.assertEqual(
instance2.inventory.slots[0].get_itemstack().get_item_name(),
"minecraft:test_item",
)
| StarcoderdataPython |
6561377 | # Copyright 2019 Intel Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import logging
import base64
from utility.hex_utils import is_valid_hex_str
import crypto.crypto as crypto
from error_code.error_status import ReceiptCreateStatus, SignatureStatus,\
JRPCErrorCodes
import utility.signature as signature
from jsonrpc.exceptions import JSONRPCDispatchException
logger = logging.getLogger(__name__)
class TCSWorkOrderReceiptHandler:
"""
TCSWorkOrderReceiptHandler processes Work Order Receipt Direct API
requests. It reads appropriate work order information from the
KV storage to create the response.
Work order receipts are created and placed in the KV storage by the
SGX Enclave Manager after the work order (successfully) completed.
"""
# -----------------------------------------------------------------------------
def __init__(self, kv_helper):
"""
Function to perform init activity
Parameters:
- kv_helper is a object of lmdb database
"""
self.kv_helper = kv_helper
self.__workorder_receipt_on_boot()
# Special index 0xFFFFFFFF value to fetch last update to receipt
self.LAST_RECEIPT_INDEX = 1 << 32
# Supported hashing and signing algorithms
self.SIGNING_ALGORITHM = "SECP256K1"
self.HASHING_ALGORITHM = "SHA-256"
# -----------------------------------------------------------------------------
def __workorder_receipt_on_boot(self):
"""
Function to perform on-boot process of work order handler
"""
# TODO: Boot time flow need to be implemented.
pass
# -----------------------------------------------------------------------------
def WorkOrderReceiptCreate(self, **params):
"""
Function to process work order request
Parameters:
- params is variable-length arugment list containing work request
as defined in EEA spec 7.2.2
Returns jrpc response as defined in 4.1
"""
wo_id = params["workOrderId"]
input_json_str = params["raw"]
input_value = json.loads(input_json_str)
wo_request = self.kv_helper.get("wo-requests", wo_id)
if wo_request is None:
raise JSONRPCDispatchException(
JRPCErrorCodes.INVALID_PARAMETER_FORMAT_OR_VALUE,
"Work order id does not exist, "
"hence invalid request"
)
else:
wo_receipt = self.kv_helper.get("wo-receipts", wo_id)
if wo_receipt is None:
status, err_msg = \
self.__validate_work_order_receipt_create_req(
input_value, wo_request)
if status is True:
self.kv_helper.set("wo-receipts", wo_id, input_json_str)
raise JSONRPCDispatchException(
JRPCErrorCodes.SUCCESS,
"Receipt created successfully"
)
else:
raise JSONRPCDispatchException(
JRPCErrorCodes.INVALID_PARAMETER_FORMAT_OR_VALUE,
err_msg
)
else:
raise JSONRPCDispatchException(
JRPCErrorCodes.INVALID_PARAMETER_FORMAT_OR_VALUE,
"Work order receipt already exists. " +
"Hence invalid parameter"
)
# -----------------------------------------------------------------------------
def __validate_work_order_receipt_create_req(self, wo_receipt_req,
wo_request):
"""
Function to validate the work order receipt create request parameters
Parameters:
- wo_receipt_req is work order receipt request as dictionary
- wo_request is string containing jrpc work order request
Returns - tuple containing validation status(Boolean) and
error message(string)
"""
# Valid parameters list
valid_params = [
"workOrderId", "workerServiceId", "workerId",
"requesterId", "receiptCreateStatus", "workOrderRequestHash",
"requesterGeneratedNonce", "requesterSignature", "signatureRules",
"receiptVerificationKey"]
for key in wo_receipt_req["params"]:
if key not in valid_params:
return False, "Missing parameter " + key + " in the request"
else:
if key in ["workOrderId", "workerServiceId", "workerId",
"requesterId", "requesterGeneratedNonce"]:
if not is_valid_hex_str(wo_receipt_req["params"][key]):
return False, "invalid data parameter for " + key
elif key in ["workOrderRequestHash", "requesterSignature"]:
try:
base64.b64decode(wo_receipt_req["params"][key])
except Exception as e:
return False, \
"Invalid data format for " + key
receipt_type = wo_receipt_req["params"]["receiptCreateStatus"]
try:
receipt_enum_type = ReceiptCreateStatus(receipt_type)
except Exception as err:
return False, "Invalid receipt status type {}: {}".format(
receipt_enum_type, str(err))
# Validate signing rules
signing_rules = wo_receipt_req["params"]["signatureRules"]
rules = signing_rules.split("/")
if len(rules) == 2 and (rules[0] != self.HASHING_ALGORITHM or
rules[1] != self.SIGNING_ALGORITHM):
return False, "Unsupported the signing rules"
signature_obj = signature.ClientSignature()
# Verify work order request is calculated properly or not.
wo_req_hash = \
signature_obj.calculate_request_hash(json.loads(wo_request))
if wo_req_hash != wo_receipt_req["params"]["workOrderRequestHash"]:
return False, "Work order request hash does not match"
# Verify requester signature with signing key in the request
status = signature_obj.verify_create_receipt_signature(wo_receipt_req)
if status != SignatureStatus.PASSED:
return False, "Receipt create requester signature does not match"
# If all parameters are verified in the request
return True, ""
# -----------------------------------------------------------------------------
def WorkOrderReceiptUpdate(self, **params):
"""
Function to process update work order request
Parameters:
- params is variable-length arugment list containing work request
as defined in EEA spec 7.2.3
Returns jrpc response as defined in 4.1
"""
wo_id = params["workOrderId"]
input_json_str = params["raw"]
input_value = json.loads(input_json_str)
# Check if receipt for work order id is created or not
value = self.kv_helper.get("wo-receipts", wo_id)
if value:
# Receipt is created, validate update receipt request
status, err_msg = self.__validate_work_order_receipt_update_req(
input_value)
if status is True:
# Load previous updates to receipt
updates_to_receipt = \
self.kv_helper.get("wo-receipt-updates", wo_id)
# If it is first update to receipt
if updates_to_receipt is None:
updated_receipt = []
else:
updated_receipt = json.loads(updates_to_receipt)
# Get last update to receipt
last_update = updated_receipt[len(updated_receipt) - 1]
if last_update["updateType"] == \
ReceiptCreateStatus.COMPLETED.value:
raise JSONRPCDispatchException(
JRPCErrorCodes.INVALID_PARAMETER_FORMAT_OR_VALUE,
"Receipt update status is set to completed, "
"No further update allowed"
)
# If last update to receipt is processed then below status
# are invalid
if last_update["updateType"] == \
ReceiptCreateStatus.PROCESSED.value:
if input_value["params"]["updateType"] in [
ReceiptCreateStatus.PENDING.value,
ReceiptCreateStatus.FAILED.value,
ReceiptCreateStatus.REJECTED.value
]:
raise JSONRPCDispatchException(
JRPCErrorCodes.INVALID_PARAMETER_FORMAT_OR_VALUE,
"Current receipt status is set to processed, "
"setting it to status " +
str(input_value["params"]["updateType"]) +
" is not allowed"
)
updated_receipt.append(input_value)
self.kv_helper.set("wo-receipt-updates", wo_id,
json.dumps(updated_receipt))
raise JSONRPCDispatchException(
JRPCErrorCodes.SUCCESS,
"Receipt updated successfully"
)
else:
# Receipt update request validation failed
raise JSONRPCDispatchException(
JRPCErrorCodes.INVALID_PARAMETER_FORMAT_OR_VALUE,
err_msg
)
else:
# Receipt for the work order is not created yet
# Throw an exception
raise JSONRPCDispatchException(
JRPCErrorCodes.INVALID_PARAMETER_FORMAT_OR_VALUE,
"Work order receipt with id {} is not created yet, "
"hence invalid parameter".format(
wo_id
)
)
# -----------------------------------------------------------------------------
def __validate_work_order_receipt_update_req(self, wo_receipt_req):
"""
Function to validate the work order receipt create request parameters
Parameters:
- wo_receipt_req is work order receipt request as dictionary
Returns - tuple containing validation status(Boolean) and error
message(string)
"""
valid_params = ["workOrderId", "updaterId", "updateType", "updateData",
"updateSignature", "signatureRules",
"receiptVerificationKey"]
for key in wo_receipt_req["params"]:
if key not in valid_params:
return False, "Missing parameter " + key + " in the request"
else:
if key in ["workOrderId", "updaterId"]:
if not is_valid_hex_str(wo_receipt_req[key]):
return False, "invalid data parameter for " + key
elif key in ["updateData", "updateSignature"]:
try:
base64.b64decode(wo_receipt_req[key])
except Exception:
return False, "Invalid data format for " + key
update_type = wo_receipt_req["params"]["updateType"]
try:
update_enum_value = ReceiptCreateStatus(update_type)
except Exception as err:
return False, "Invalid receipt update type {}: {}".format(
update_enum_value, str(err))
# If update type is completed or processed,
# it is a hash value of the Work Order Response
if wo_receipt_req["params"]["updateType"] in [
ReceiptCreateStatus.PROCESSED.value,
ReceiptCreateStatus.COMPLETED.value
]:
wo_id = wo_receipt_req["params"]["workOrderId"]
# Load the work order response and calculate it's hash
wo_resp = self.kv_helper.get("wo-responses", wo_id)
wo_resp_bytes = bytes(wo_resp, "UTF-8")
wo_resp_hash = crypto.compute_message_hash(wo_resp_bytes)
wo_resp_hash_str = crypto.byte_array_to_hex(wo_resp_hash)
if wo_resp_hash_str != wo_receipt_req["params"]["updateData"]:
return False, "Invalid Update data in the request"
# If all validation is pass
return True, ""
# -----------------------------------------------------------------------------
def __lookup_basics(self, is_lookup_next, params):
receipt_pool = self.kv_helper.lookup("wo-receipts")
total_count = 0
ids = []
lookupTag = ""
for wo_id in receipt_pool:
if is_lookup_next:
is_lookup_next = (wo_id != params["lastLookUpTag"])
continue
value = self.kv_helper.get("wo-receipts", wo_id)
if not value:
continue
criteria = ["workerServiceId",
"workerId", "requesterId", "requestCreateStatus"]
wo = json.loads(value)
matched = True
for c in criteria:
if c not in params:
continue
matched = (wo["params"][c] == params[c])
if not matched:
break
if matched:
total_count = total_count + 1
ids.append(wo_id)
lookupTag = wo_id
result = {
"totalCount": total_count,
"lookupTag": lookupTag,
"ids": ids,
}
return result
# -----------------------------------------------------------------------------
def WorkOrderReceiptLookUp(self, **params):
"""
Function to look the set of work order receipts available
Parameters:
- params is variable-length arugment list containing work request
as defined in EEA spec 7.2.8
Returns jrpc response as defined EEA spec 7.2.9
"""
return self.__lookup_basics(False, params)
# -----------------------------------------------------------------------------
def WorkOrderReceiptLookUpNext(self, **params):
"""
Function to look the set of work order receipt newly added
Parameters:
- params is variable-length arugment list containing work request
as defined in EEA spec 7.2.10
Returns jrpc response as defined EEA spec 7.2.9
"""
return self.__lookup_basics(True, params)
# -----------------------------------------------------------------------------
def WorkOrderReceiptRetrieve(self, **params):
"""
Function to retrieve the details of worker
Parameters:
- params is variable-length arugment list containing work order
receipt request request as defined in EEA spec 7.2.4
Returns jrpc response as defined in 7.2.5
"""
wo_id = params["workOrderId"]
value = self.kv_helper.get("wo-receipts", wo_id)
if value:
receipt = json.loads(value)
receipt_updates = self.kv_helper.get("wo-receipt-updates", wo_id)
if receipt_updates is None:
receipt["params"]["receiptCurrentStatus"] = \
receipt["params"]["receiptCreateStatus"]
else:
receipt_updates_json = json.loads(receipt_updates)
# Get the recent update to receipt
last_receipt = receipt_updates_json[len(receipt_updates_json)
- 1]
receipt["params"]["receiptCurrentStatus"] = \
last_receipt["updateType"]
return receipt["params"]
else:
raise JSONRPCDispatchException(
JRPCErrorCodes.INVALID_PARAMETER_FORMAT_OR_VALUE,
"Work order receipt for work order id {} not found in the "
"database. Hence invalid parameter".format(
wo_id
))
# -----------------------------------------------------------------------------
def WorkOrderReceiptUpdateRetrieve(self, **params):
"""
Function to retrieve the update to work order receipt
Parameters:
- params is variable-length arugment list containing work order
update retrieve request as defined in EEA spec 7.2.6
Returns:
Jrpc response as defined in EEA spec 7.2.7
"""
wo_id = params["workOrderId"]
input_json_str = params["raw"]
input_json = json.loads(input_json_str)
input_params = input_json["params"]
updater_id = None
if "updaterId" in input_params and input_params["updaterId"]:
updater_id = input_params["updaterId"]
# update_index is index to fetch the particular update
# starts from 1
update_index = input_params["updateIndex"]
# Load list of updates to the receipt
receipt_updates = self.kv_helper.get("wo-receipt-updates", wo_id)
if receipt_updates:
receipt_updates_json = json.loads(receipt_updates)
total_updates = len(receipt_updates_json)
if update_index <= 0:
raise JSONRPCDispatchException(
JRPCErrorCodes.INVALID_PARAMETER_FORMAT_OR_VALUE,
"Update index should be positive non-zero number."
" Hence invalid parameter")
elif update_index > total_updates:
if update_index == self.LAST_RECEIPT_INDEX:
# set to the index of last update to receipt
update_index = total_updates - 1
else:
raise JSONRPCDispatchException(
JRPCErrorCodes.INVALID_PARAMETER_FORMAT_OR_VALUE,
"Update index is larger than total update count."
" Hence invalid parameter")
else:
# If the index is less than total updates
# then decrement by one since it is zero based array
update_index = update_index - 1
update_to_receipt = receipt_updates_json[update_index]
# If updater id is present then check whether it matches
if updater_id:
if update_to_receipt["updaterId"] != updater_id:
raise JSONRPCDispatchException(
JRPCErrorCodes.INVALID_PARAMETER_FORMAT_OR_VALUE,
"Update index and updater id doesn't match"
" Hence invalid parameter")
update_to_receipt["updateCount"] = total_updates
return update_to_receipt
else:
raise JSONRPCDispatchException(
JRPCErrorCodes.INVALID_PARAMETER_FORMAT_OR_VALUE,
"There is no updates available to this receipt"
" Hence invalid parameter")
| StarcoderdataPython |
12810370 | from scipy.optimize import minimize
from .measure.measure_sample import dst,hst,dst_source
from .measure.measure_sim import sep_purity, fid_ref, c_entropy
def vcirc_test(
x,
statein,
vcirc,
test_func=sep_purity,
ansatz_li=None,
update=False,
*args,
**kwargs):
vcirc.update_ansatzes(x,ansatz_li)
N = vcirc.N
if (statein.dims[0] != [2]*N):
raise ValueError("Invalid input state, must be state on %s qubits system." % N)
stateout = vcirc.apply_to(statein,update)
return test_func(stateout,*args,**kwargs)
def __vcirc_test_neg(
x,
statein,
vcirc,
test_func=sep_purity,
ansatz_li=None,
update=False,
*args,
**kwargs):
vcirc.update_ansatzes(x,ansatz_li)
N = vcirc.N
if (statein.dims[0] != [2]*N):
raise ValueError("Invalid input state, must be state on %s qubits system." % N)
stateout = vcirc.apply_to(statein,update)
return -test_func(stateout,*args,**kwargs)
def circ_minimize(
x0,
statein,
vcirc,
test_func=sep_purity,
*args,
ansatz_li=None,
update=False,
opt_method="BFGS",
jac=None, hess=None, hessp=None, bounds=None,
constraints=(), tol=None, callback=None, options=None):
res = minimize(vcirc_test,x0,(statein,vcirc,test_func,ansatz_li,update)+args,opt_method,
jac, hess, hessp, bounds, constraints, tol, callback, options)
return res
def circ_maximize(
x0,
statein,
vcirc,
test_func=sep_purity,
*args,
ansatz_li=None,
update=False,
opt_method="BFGS",
jac=None, hess=None, hessp=None, bounds=None,
constraints=(), tol=None, callback=None, options=None):
res = minimize(__vcirc_test_neg,x0,(statein,vcirc,test_func,ansatz_li,update)+args,opt_method,
jac, hess, hessp, bounds, constraints, tol, callback, options)
res.fun = -res.fun
return res | StarcoderdataPython |
11290678 | <reponame>boweyism/devops-for-data
from src.jobs.word_count import get_keyval, run
def test_get_keyval():
words = ['this', 'are', 'words', 'words']
expected_results = [['this', 1], ['are', 1], ['words', 1], ['words', 1]]
assert expected_results == get_keyval(words)
def test_word_count_run(spark_session):
expected_results = [('one', 1), ('two', 1), ('three', 2), ('four', 2), ('test', 1)]
conf = {
'relative_path': 'test/jobs',
'words_file_path': '/word_count/resources/word_count.csv'
}
assert sorted(expected_results) == sorted(run(spark_session, conf))
| StarcoderdataPython |
11284422 | <reponame>Nic30/libEveryPacketClassifier<filename>tests/benchmark_graph_gen.py
import json
from os import listdir
import os
from typing import List, Tuple
import matplotlib.pyplot as plt
from tests.constants import RULESET_ROOT
# @lru_cache(maxsize=512)
def get_real_rule_cnt(ruleset:str, rule_cnt:str):
with open(os.path.join(RULESET_ROOT, f'{ruleset:s}_{rule_cnt:s}')) as f:
return len([line for line in f])
def split_result_name(name):
f_split1 = name.split("_")
alg = f_split1[0]
ruleset = "_".join(f_split1[1:-2])
nominal_rule_cnt = f_split1[-2]
thread_cnt = f_split1[-1]
assert thread_cnt.startswith('t'), thread_cnt
thread_cnt = int(thread_cnt[1:])
return alg, ruleset, nominal_rule_cnt, thread_cnt
def load_data(data_dir, algs, ruleset_files, thread_cnts:List[int]):
result_files = set()
for f in ruleset_files:
f = os.path.basename(f)
for a in algs:
for t in thread_cnts:
result_files.add(f"{a}_{f}_t{t:d}")
data = []
existing_files = listdir(data_dir)
for f in result_files:
if f not in existing_files:
print(f"[warning] Missing {f}")
continue
f_path = os.path.join(data_dir, f)
# if not isfile(f_path) or f_path.endswith(".png"):
# continue
alg, ruleset, nominal_rule_cnt, thread_cnt = split_result_name(f)
rule_cnt = get_real_rule_cnt(ruleset, nominal_rule_cnt)
with open(f_path) as fp:
results = json.load(fp)
data.append((alg, ruleset, nominal_rule_cnt, rule_cnt, thread_cnt, results))
return data
def load_likwid_data(data_dir, algs: List[str], ruleset_files: List[str], counter_groups: List[str], thread_cnts:List[int]):
result_files = set()
for f in ruleset_files:
f = os.path.basename(f)
for a in algs:
for cg in counter_groups:
for thread_cnt in thread_cnts:
result_files.add(f"{a:s}_{f:s}_t{thread_cnt:d}.{cg:s}.csv")
data = []
existing_files = listdir(data_dir)
for f in result_files:
if f not in existing_files:
print(f"[warning] missing {f:s}")
continue
f_path = os.path.join(data_dir, f)
# if not isfile(f_path) or f_path.endswith(".png"):
# continue
f_split0 = f.split(".")
assert len(f_split0) == 3 and f_split0[2] == "csv", f_split0
f, counter_group, _ = f_split0
alg, ruleset, nominal_rule_cnt, thread_cnt = split_result_name(f)
rule_cnt = get_real_rule_cnt(ruleset, nominal_rule_cnt)
with open(f_path) as fp:
results = []
for line in fp:
line = line.strip().split(",")
results.append(line)
data.append((alg, ruleset, counter_group, nominal_rule_cnt, rule_cnt, thread_cnt, results))
return data
def get_rulset_name(ruleset_info):
s = ruleset_info
size = s[0][1]
if size.endswith("000"):
size = size[:-3] + "K"
return "%s_%s" % (s[0][0], size)
def generate_graphs(result_dir, algs, ruleset_files, key,
title, filename, ylabel, xlabel):
# {salg_name: {number_of_rule: sizes}}
data = {}
for alg, ruleset, nominal_rule_cnt, rule_cnt, results in load_data(
result_dir, algs, ruleset_files):
try:
alg_d = data[alg]
except KeyError:
alg_d = {}
data[alg] = alg_d
val = results[key]
val = float(val)
k = (ruleset, nominal_rule_cnt, rule_cnt)
try:
vals = alg_d[k]
except KeyError:
vals = []
alg_d[k] = vals
vals.append(val)
# for each alg plot dependency on rule count
for alg, sizes in data.items():
fig1, ax1 = plt.subplots()
size_series = list(sorted(sizes.items(), key=lambda x: x[0][2]))
ax1.set_title(title.format(alg=alg))
x = [get_rulset_name(s) for s in size_series]
y = [s[1] for s in size_series]
ax1.plot(x, y, 'o', label=alg, marker="X")
# ax1.boxplot([x[1] for x in size_series])
# plt.xticks([i for i in range(1, len(size_series) + 1)],
# [x[0] for x in size_series])
plt.xlabel(xlabel)
plt.ylabel(ylabel)
plt.grid()
fig1.savefig(os.path.join(result_dir, filename.format(alg=alg)))
plt.close(fig1)
def generate_summary_graph(result_dir, algs, ruleset_files, key, title,
filename, ylabel, xlabel, y_map, y_log_scale, figsize):
# {salg_name: {number_of_rule: sizes}}
data = {}
for alg, ruleset, nominal_rule_cnt, rule_cnt, results in load_data(
result_dir, algs, ruleset_files):
try:
alg_d = data[alg]
except KeyError:
alg_d = {}
data[alg] = alg_d
val = results[key]
val = float(val)
k = (ruleset, nominal_rule_cnt, rule_cnt)
try:
vals = alg_d[k]
except KeyError:
vals = []
alg_d[k] = vals
vals.append(val)
# for each alg plot dependency on rule count
fig1, ax1 = plt.subplots(figsize=figsize, dpi=80)
if title is not None:
ax1.set_title(title)
if y_log_scale:
ax1.set_yscale("log")
for alg, sizes in data.items():
size_series = list(sorted(sizes.items(), key=lambda x: x[0][2]))
x = [get_rulset_name(s) for s in size_series]
y = [y_map(s[1][0]) for s in size_series]
ax1.plot(x, y, 'o', label=alg, marker="X")
plt.xticks(x, x, rotation='vertical')
ax1.set_ylim(ymin=0)
plt.margins(0.2)
# Tweak spacing to prevent clipping of tick-labels
plt.subplots_adjust(bottom=0.25)
# ax1.tick_params(axis='x', which='major', pad=15)
plt.xlabel(xlabel)
plt.ylabel(ylabel)
ax1.legend(loc='best', shadow=True, fancybox=True)
plt.grid()
fig1.savefig(os.path.join(result_dir, filename))
plt.close(fig1)
class GraphGen():
def _add_alg_labels(self, ax1, algs):
x = list(range(1, len(algs) + 1))
ax1.xaxis.set_ticks(x)
ax1.xaxis.set_ticklabels(algs)
def __init__(self, RESULT_DIR, RULESET_FILES, ALGS):
self.RESULT_DIR = RESULT_DIR
self.RULESET_FILES = RULESET_FILES
self.ALGS = ALGS
def generate_graphs(self, key: str, title: str, filename: str, ylabel: str, xlabel: str):
generate_graphs(self.RESULT_DIR, self.ALGS, self.RULESET_FILES,
key, title, filename, ylabel, xlabel)
def generate_summary_graph(self, key, title, filename, ylabel,
xlabel, y_map=lambda y: y, y_log_scale=False,
figsize=(8, 6)):
generate_summary_graph(self.RESULT_DIR, self.ALGS, self.RULESET_FILES,
key, title, filename, ylabel, xlabel, y_map,
y_log_scale, figsize)
def generate_summary_grap_box_plot(self, key, title, filename, ylabel,
xlabel,
quantization: List[Tuple[int, int, str]],
thread_cnts:List[int],
y_map=lambda y: y,
y_log_scale=False, figsize=(8, 6)):
result_dir = self.RESULT_DIR
algs = self.ALGS
# {alg_name: {number_of_rule: sizes}}
data = {}
for alg, ruleset, nominal_rule_cnt, rule_cnt, thread_cnt, results in load_data(
result_dir, algs, self.RULESET_FILES, thread_cnts):
try:
alg_d = data[alg]
except KeyError:
alg_d = {}
data[alg] = alg_d
val = results[key]
val = float(val)
k = (ruleset, nominal_rule_cnt, rule_cnt, thread_cnt)
try:
vals = alg_d[k]
except KeyError:
vals = []
alg_d[k] = vals
vals.append(val)
for quantum in quantization:
# for each alg plot dependency on rule count
fig1, ax1 = plt.subplots(figsize=figsize, dpi=80)
if title is not None:
ax1.set_title(title)
if y_log_scale:
ax1.set_yscale("log")
algs = []
_data = []
for alg, sizes in data.items():
for thread_cnt in thread_cnts:
size_series = list(sorted(((k, v) for k, v in sizes.items()
if k[-1] == thread_cnt and
k[2] >= quantum[0] and
k[2] < quantum[1]), \
key=lambda x: x[0][2]))
# x = [get_rulset_name(s) for s in size_series]
y = [y_map(s[1][0]) for s in size_series]
algs.append(f"{alg:s} t{thread_cnt:d}")
_data.append(y)
# ax1.plot(x, y, 'o', label=alg, marker="X")
# plt.xticks(x, x, rotation='vertical')
ax1.boxplot(_data)
self._add_alg_labels(ax1, algs)
# plt.margins(0.2)
# Tweak spacing to prevent clipping of tick-labels
# plt.subplots_adjust(bottom=0.25)
# ax1.tick_params(axis='x', which='major', pad=15)
plt.xlabel(xlabel)
plt.ylabel(ylabel)
# ax1.locator_params(axis="y", nbins=10)
# plt.locator_params(axis='x', nbins=len(algs))
#ax1.legend(loc='best', shadow=True, fancybox=True)
plt.grid()
fig1.savefig(os.path.join(result_dir, f"{filename:s}.{quantum[2]:s}.png"))
plt.close(fig1)
def generate_likwid_summary_grap_box_plot(self, sub_table, group, row_label, title, filename, ylabel,
xlabel,
quantization: List[Tuple[int, int, str]],
thread_cnts,
y_map=lambda y: y,
y_log_scale=False, figsize=(8, 6)):
result_dir = self.RESULT_DIR
algs = self.ALGS
ruleset_files = self.RULESET_FILES
# {alg_name: {number_of_rule: sizes}}
data = {}
for alg, ruleset, counter_group, nominal_rule_cnt, rule_cnt, thread_cnt, results in load_likwid_data(
result_dir, algs, ruleset_files, ["CYCLE_STALLS", "MEM_DP"], thread_cnts):
if group != counter_group:
continue
try:
alg_d = data[alg]
except KeyError:
alg_d = {}
data[alg] = alg_d
if not results:
print("[WARNING] no data for ", alg, ruleset, counter_group, nominal_rule_cnt)
# no data available the test crashed
continue
_results = iter(results)
val = None
for row in _results:
if row[0] == "TABLE" and row[1] == sub_table:
for row in _results:
if row[0] == row_label:
val = row[1]
break
break
assert val is not None, (alg, ruleset, counter_group, nominal_rule_cnt)
if val == "-":
print("[WARNING] unknown value for ", alg, ruleset, counter_group, nominal_rule_cnt)
continue
val = float(val)
k = (ruleset, nominal_rule_cnt, rule_cnt, thread_cnt)
try:
vals = alg_d[k]
except KeyError:
vals = []
alg_d[k] = vals
vals.append(val)
for quantum in quantization:
# for each alg plot dependency on rule count
fig1, ax1 = plt.subplots(figsize=figsize, dpi=80)
if title is not None:
ax1.set_title(title)
if y_log_scale:
ax1.set_yscale("log")
algs = []
_data = []
for alg, sizes in data.items():
for thread_cnt in thread_cnts:
size_series = list(sorted(((k, v) for k, v in sizes.items()
if k[-1] == thread_cnt and
k[2] >= quantum[0] and
k[2] < quantum[1]), \
key=lambda x: x[0][2]))
# x = [get_rulset_name(s) for s in size_series]
y = [y_map(s[1][0]) for s in size_series]
algs.append(f"{alg} t{thread_cnt:d}")
_data.append(y)
# ax1.plot(x, y, 'o', label=alg, marker="X")
# plt.xticks(x, x, rotation='vertical')
ax1.boxplot(_data)
self._add_alg_labels(ax1, algs)
# ax1.xticks(range(len(algs)), algs)
# ax1.set_ylim(ymin=0)
# plt.margins(0.2)
# Tweak spacing to prevent clipping of tick-labels
# plt.subplots_adjust(bottom=0.25)
# ax1.tick_params(axis='x', which='major', pad=15)
plt.xlabel(xlabel)
plt.ylabel(ylabel)
#ax1.legend(loc='best', shadow=True, fancybox=True)
plt.grid()
fig1.savefig(os.path.join(result_dir, f"{filename:s}.{quantum[2]:s}.png"))
plt.close(fig1)
| StarcoderdataPython |
6672373 | # 24. 两两交换链表中的节点
# 给定一个链表,两两交换其中相邻的节点,并返回交换后的链表。
# 你不能只是单纯的改变节点内部的值,而是需要实际的进行节点交换。
# 示例:
# 给定 1->2->3->4, 你应该返回 2->1->4->3.
# Definition for singly-linked list.
class ListNode:
def __init__(self, x):
self.val = x
self.next = None
class Solution:
def swapPairs(self, head: ListNode) -> ListNode:
def reverse2node(node):
pre = None
while node:
nex = node.next
node.next = pre
pre = node
node = nex
return pre
if not head:
return None
trev = rev = ListNode(0)
begin = tail = head
while True:
n = 2
while tail and n:
tail = tail.next
n -= 2
if n > 0 or not tail:
rev.next = head
break
temp = tail.next
tail.next = None
rev.next = reverse2node(head)
rev = head
head = temp
tail = temp
return trev.next
| StarcoderdataPython |
220229 | from raw_preprocessing.preprocessor import run_parse
if __name__ == '__main__':
run_parse(10) | StarcoderdataPython |
6702647 | # coding=utf-8
# *** WARNING: this file was generated by pulumigen. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
from . import _utilities
import typing
# Export this package's modules as members:
from .kustomize import *
from .provider import *
from .yaml import *
# Make subpackages available:
if typing.TYPE_CHECKING:
import pulumi_kubernetes.admissionregistration as __admissionregistration
admissionregistration = __admissionregistration
import pulumi_kubernetes.apiextensions as __apiextensions
apiextensions = __apiextensions
import pulumi_kubernetes.apiregistration as __apiregistration
apiregistration = __apiregistration
import pulumi_kubernetes.apps as __apps
apps = __apps
import pulumi_kubernetes.auditregistration as __auditregistration
auditregistration = __auditregistration
import pulumi_kubernetes.authentication as __authentication
authentication = __authentication
import pulumi_kubernetes.authorization as __authorization
authorization = __authorization
import pulumi_kubernetes.autoscaling as __autoscaling
autoscaling = __autoscaling
import pulumi_kubernetes.batch as __batch
batch = __batch
import pulumi_kubernetes.certificates as __certificates
certificates = __certificates
import pulumi_kubernetes.coordination as __coordination
coordination = __coordination
import pulumi_kubernetes.core as __core
core = __core
import pulumi_kubernetes.discovery as __discovery
discovery = __discovery
import pulumi_kubernetes.events as __events
events = __events
import pulumi_kubernetes.extensions as __extensions
extensions = __extensions
import pulumi_kubernetes.flowcontrol as __flowcontrol
flowcontrol = __flowcontrol
import pulumi_kubernetes.helm as __helm
helm = __helm
import pulumi_kubernetes.meta as __meta
meta = __meta
import pulumi_kubernetes.networking as __networking
networking = __networking
import pulumi_kubernetes.node as __node
node = __node
import pulumi_kubernetes.policy as __policy
policy = __policy
import pulumi_kubernetes.rbac as __rbac
rbac = __rbac
import pulumi_kubernetes.scheduling as __scheduling
scheduling = __scheduling
import pulumi_kubernetes.settings as __settings
settings = __settings
import pulumi_kubernetes.storage as __storage
storage = __storage
else:
admissionregistration = _utilities.lazy_import('pulumi_kubernetes.admissionregistration')
apiextensions = _utilities.lazy_import('pulumi_kubernetes.apiextensions')
apiregistration = _utilities.lazy_import('pulumi_kubernetes.apiregistration')
apps = _utilities.lazy_import('pulumi_kubernetes.apps')
auditregistration = _utilities.lazy_import('pulumi_kubernetes.auditregistration')
authentication = _utilities.lazy_import('pulumi_kubernetes.authentication')
authorization = _utilities.lazy_import('pulumi_kubernetes.authorization')
autoscaling = _utilities.lazy_import('pulumi_kubernetes.autoscaling')
batch = _utilities.lazy_import('pulumi_kubernetes.batch')
certificates = _utilities.lazy_import('pulumi_kubernetes.certificates')
coordination = _utilities.lazy_import('pulumi_kubernetes.coordination')
core = _utilities.lazy_import('pulumi_kubernetes.core')
discovery = _utilities.lazy_import('pulumi_kubernetes.discovery')
events = _utilities.lazy_import('pulumi_kubernetes.events')
extensions = _utilities.lazy_import('pulumi_kubernetes.extensions')
flowcontrol = _utilities.lazy_import('pulumi_kubernetes.flowcontrol')
helm = _utilities.lazy_import('pulumi_kubernetes.helm')
meta = _utilities.lazy_import('pulumi_kubernetes.meta')
networking = _utilities.lazy_import('pulumi_kubernetes.networking')
node = _utilities.lazy_import('pulumi_kubernetes.node')
policy = _utilities.lazy_import('pulumi_kubernetes.policy')
rbac = _utilities.lazy_import('pulumi_kubernetes.rbac')
scheduling = _utilities.lazy_import('pulumi_kubernetes.scheduling')
settings = _utilities.lazy_import('pulumi_kubernetes.settings')
storage = _utilities.lazy_import('pulumi_kubernetes.storage')
_utilities.register(
resource_modules="""
[
{
"pkg": "kubernetes",
"mod": "admissionregistration.k8s.io/v1",
"fqn": "pulumi_kubernetes.admissionregistration.v1",
"classes": {
"kubernetes:admissionregistration.k8s.io/v1:MutatingWebhookConfiguration": "MutatingWebhookConfiguration",
"kubernetes:admissionregistration.k8s.io/v1:MutatingWebhookConfigurationList": "MutatingWebhookConfigurationList",
"kubernetes:admissionregistration.k8s.io/v1:ValidatingWebhookConfiguration": "ValidatingWebhookConfiguration",
"kubernetes:admissionregistration.k8s.io/v1:ValidatingWebhookConfigurationList": "ValidatingWebhookConfigurationList"
}
},
{
"pkg": "kubernetes",
"mod": "admissionregistration.k8s.io/v1beta1",
"fqn": "pulumi_kubernetes.admissionregistration.v1beta1",
"classes": {
"kubernetes:admissionregistration.k8s.io/v1beta1:MutatingWebhookConfiguration": "MutatingWebhookConfiguration",
"kubernetes:admissionregistration.k8s.io/v1beta1:MutatingWebhookConfigurationList": "MutatingWebhookConfigurationList",
"kubernetes:admissionregistration.k8s.io/v1beta1:ValidatingWebhookConfiguration": "ValidatingWebhookConfiguration",
"kubernetes:admissionregistration.k8s.io/v1beta1:ValidatingWebhookConfigurationList": "ValidatingWebhookConfigurationList"
}
},
{
"pkg": "kubernetes",
"mod": "apiextensions.k8s.io/v1",
"fqn": "pulumi_kubernetes.apiextensions.v1",
"classes": {
"kubernetes:apiextensions.k8s.io/v1:CustomResourceDefinition": "CustomResourceDefinition",
"kubernetes:apiextensions.k8s.io/v1:CustomResourceDefinitionList": "CustomResourceDefinitionList"
}
},
{
"pkg": "kubernetes",
"mod": "apiextensions.k8s.io/v1beta1",
"fqn": "pulumi_kubernetes.apiextensions.v1beta1",
"classes": {
"kubernetes:apiextensions.k8s.io/v1beta1:CustomResourceDefinition": "CustomResourceDefinition",
"kubernetes:apiextensions.k8s.io/v1beta1:CustomResourceDefinitionList": "CustomResourceDefinitionList"
}
},
{
"pkg": "kubernetes",
"mod": "apiregistration.k8s.io/v1",
"fqn": "pulumi_kubernetes.apiregistration.v1",
"classes": {
"kubernetes:apiregistration.k8s.io/v1:APIService": "APIService",
"kubernetes:apiregistration.k8s.io/v1:APIServiceList": "APIServiceList"
}
},
{
"pkg": "kubernetes",
"mod": "apiregistration.k8s.io/v1beta1",
"fqn": "pulumi_kubernetes.apiregistration.v1beta1",
"classes": {
"kubernetes:apiregistration.k8s.io/v1beta1:APIService": "APIService",
"kubernetes:apiregistration.k8s.io/v1beta1:APIServiceList": "APIServiceList"
}
},
{
"pkg": "kubernetes",
"mod": "apps/v1",
"fqn": "pulumi_kubernetes.apps.v1",
"classes": {
"kubernetes:apps/v1:ControllerRevision": "ControllerRevision",
"kubernetes:apps/v1:ControllerRevisionList": "ControllerRevisionList",
"kubernetes:apps/v1:DaemonSet": "DaemonSet",
"kubernetes:apps/v1:DaemonSetList": "DaemonSetList",
"kubernetes:apps/v1:Deployment": "Deployment",
"kubernetes:apps/v1:DeploymentList": "DeploymentList",
"kubernetes:apps/v1:ReplicaSet": "ReplicaSet",
"kubernetes:apps/v1:ReplicaSetList": "ReplicaSetList",
"kubernetes:apps/v1:StatefulSet": "StatefulSet",
"kubernetes:apps/v1:StatefulSetList": "StatefulSetList"
}
},
{
"pkg": "kubernetes",
"mod": "apps/v1beta1",
"fqn": "pulumi_kubernetes.apps.v1beta1",
"classes": {
"kubernetes:apps/v1beta1:ControllerRevision": "ControllerRevision",
"kubernetes:apps/v1beta1:ControllerRevisionList": "ControllerRevisionList",
"kubernetes:apps/v1beta1:Deployment": "Deployment",
"kubernetes:apps/v1beta1:DeploymentList": "DeploymentList",
"kubernetes:apps/v1beta1:StatefulSet": "StatefulSet",
"kubernetes:apps/v1beta1:StatefulSetList": "StatefulSetList"
}
},
{
"pkg": "kubernetes",
"mod": "apps/v1beta2",
"fqn": "pulumi_kubernetes.apps.v1beta2",
"classes": {
"kubernetes:apps/v1beta2:ControllerRevision": "ControllerRevision",
"kubernetes:apps/v1beta2:ControllerRevisionList": "ControllerRevisionList",
"kubernetes:apps/v1beta2:DaemonSet": "DaemonSet",
"kubernetes:apps/v1beta2:DaemonSetList": "DaemonSetList",
"kubernetes:apps/v1beta2:Deployment": "Deployment",
"kubernetes:apps/v1beta2:DeploymentList": "DeploymentList",
"kubernetes:apps/v1beta2:ReplicaSet": "ReplicaSet",
"kubernetes:apps/v1beta2:ReplicaSetList": "ReplicaSetList",
"kubernetes:apps/v1beta2:StatefulSet": "StatefulSet",
"kubernetes:apps/v1beta2:StatefulSetList": "StatefulSetList"
}
},
{
"pkg": "kubernetes",
"mod": "auditregistration.k8s.io/v1alpha1",
"fqn": "pulumi_kubernetes.auditregistration.v1alpha1",
"classes": {
"kubernetes:auditregistration.k8s.io/v1alpha1:AuditSink": "AuditSink",
"kubernetes:auditregistration.k8s.io/v1alpha1:AuditSinkList": "AuditSinkList"
}
},
{
"pkg": "kubernetes",
"mod": "authentication.k8s.io/v1",
"fqn": "pulumi_kubernetes.authentication.v1",
"classes": {
"kubernetes:authentication.k8s.io/v1:TokenRequest": "TokenRequest",
"kubernetes:authentication.k8s.io/v1:TokenReview": "TokenReview"
}
},
{
"pkg": "kubernetes",
"mod": "authentication.k8s.io/v1beta1",
"fqn": "pulumi_kubernetes.authentication.v1beta1",
"classes": {
"kubernetes:authentication.k8s.io/v1beta1:TokenReview": "TokenReview"
}
},
{
"pkg": "kubernetes",
"mod": "authorization.k8s.io/v1",
"fqn": "pulumi_kubernetes.authorization.v1",
"classes": {
"kubernetes:authorization.k8s.io/v1:LocalSubjectAccessReview": "LocalSubjectAccessReview",
"kubernetes:authorization.k8s.io/v1:SelfSubjectAccessReview": "SelfSubjectAccessReview",
"kubernetes:authorization.k8s.io/v1:SelfSubjectRulesReview": "SelfSubjectRulesReview",
"kubernetes:authorization.k8s.io/v1:SubjectAccessReview": "SubjectAccessReview"
}
},
{
"pkg": "kubernetes",
"mod": "authorization.k8s.io/v1beta1",
"fqn": "pulumi_kubernetes.authorization.v1beta1",
"classes": {
"kubernetes:authorization.k8s.io/v1beta1:LocalSubjectAccessReview": "LocalSubjectAccessReview",
"kubernetes:authorization.k8s.io/v1beta1:SelfSubjectAccessReview": "SelfSubjectAccessReview",
"kubernetes:authorization.k8s.io/v1beta1:SelfSubjectRulesReview": "SelfSubjectRulesReview",
"kubernetes:authorization.k8s.io/v1beta1:SubjectAccessReview": "SubjectAccessReview"
}
},
{
"pkg": "kubernetes",
"mod": "autoscaling/v1",
"fqn": "pulumi_kubernetes.autoscaling.v1",
"classes": {
"kubernetes:autoscaling/v1:HorizontalPodAutoscaler": "HorizontalPodAutoscaler",
"kubernetes:autoscaling/v1:HorizontalPodAutoscalerList": "HorizontalPodAutoscalerList"
}
},
{
"pkg": "kubernetes",
"mod": "autoscaling/v2beta1",
"fqn": "pulumi_kubernetes.autoscaling.v2beta1",
"classes": {
"kubernetes:autoscaling/v2beta1:HorizontalPodAutoscaler": "HorizontalPodAutoscaler",
"kubernetes:autoscaling/v2beta1:HorizontalPodAutoscalerList": "HorizontalPodAutoscalerList"
}
},
{
"pkg": "kubernetes",
"mod": "autoscaling/v2beta2",
"fqn": "pulumi_kubernetes.autoscaling.v2beta2",
"classes": {
"kubernetes:autoscaling/v2beta2:HorizontalPodAutoscaler": "HorizontalPodAutoscaler",
"kubernetes:autoscaling/v2beta2:HorizontalPodAutoscalerList": "HorizontalPodAutoscalerList"
}
},
{
"pkg": "kubernetes",
"mod": "batch/v1",
"fqn": "pulumi_kubernetes.batch.v1",
"classes": {
"kubernetes:batch/v1:CronJob": "CronJob",
"kubernetes:batch/v1:CronJobList": "CronJobList",
"kubernetes:batch/v1:Job": "Job",
"kubernetes:batch/v1:JobList": "JobList"
}
},
{
"pkg": "kubernetes",
"mod": "batch/v1beta1",
"fqn": "pulumi_kubernetes.batch.v1beta1",
"classes": {
"kubernetes:batch/v1beta1:CronJob": "CronJob",
"kubernetes:batch/v1beta1:CronJobList": "CronJobList"
}
},
{
"pkg": "kubernetes",
"mod": "batch/v2alpha1",
"fqn": "pulumi_kubernetes.batch.v2alpha1",
"classes": {
"kubernetes:batch/v2alpha1:CronJob": "CronJob",
"kubernetes:batch/v2alpha1:CronJobList": "CronJobList"
}
},
{
"pkg": "kubernetes",
"mod": "certificates.k8s.io/v1",
"fqn": "pulumi_kubernetes.certificates.v1",
"classes": {
"kubernetes:certificates.k8s.io/v1:CertificateSigningRequest": "CertificateSigningRequest",
"kubernetes:certificates.k8s.io/v1:CertificateSigningRequestList": "CertificateSigningRequestList"
}
},
{
"pkg": "kubernetes",
"mod": "certificates.k8s.io/v1beta1",
"fqn": "pulumi_kubernetes.certificates.v1beta1",
"classes": {
"kubernetes:certificates.k8s.io/v1beta1:CertificateSigningRequest": "CertificateSigningRequest",
"kubernetes:certificates.k8s.io/v1beta1:CertificateSigningRequestList": "CertificateSigningRequestList"
}
},
{
"pkg": "kubernetes",
"mod": "coordination.k8s.io/v1",
"fqn": "pulumi_kubernetes.coordination.v1",
"classes": {
"kubernetes:coordination.k8s.io/v1:Lease": "Lease",
"kubernetes:coordination.k8s.io/v1:LeaseList": "LeaseList"
}
},
{
"pkg": "kubernetes",
"mod": "coordination.k8s.io/v1beta1",
"fqn": "pulumi_kubernetes.coordination.v1beta1",
"classes": {
"kubernetes:coordination.k8s.io/v1beta1:Lease": "Lease",
"kubernetes:coordination.k8s.io/v1beta1:LeaseList": "LeaseList"
}
},
{
"pkg": "kubernetes",
"mod": "core/v1",
"fqn": "pulumi_kubernetes.core.v1",
"classes": {
"kubernetes:core/v1:Binding": "Binding",
"kubernetes:core/v1:ConfigMap": "ConfigMap",
"kubernetes:core/v1:ConfigMapList": "ConfigMapList",
"kubernetes:core/v1:Endpoints": "Endpoints",
"kubernetes:core/v1:EndpointsList": "EndpointsList",
"kubernetes:core/v1:Event": "Event",
"kubernetes:core/v1:EventList": "EventList",
"kubernetes:core/v1:LimitRange": "LimitRange",
"kubernetes:core/v1:LimitRangeList": "LimitRangeList",
"kubernetes:core/v1:Namespace": "Namespace",
"kubernetes:core/v1:NamespaceList": "NamespaceList",
"kubernetes:core/v1:Node": "Node",
"kubernetes:core/v1:NodeList": "NodeList",
"kubernetes:core/v1:PersistentVolume": "PersistentVolume",
"kubernetes:core/v1:PersistentVolumeClaim": "PersistentVolumeClaim",
"kubernetes:core/v1:PersistentVolumeClaimList": "PersistentVolumeClaimList",
"kubernetes:core/v1:PersistentVolumeList": "PersistentVolumeList",
"kubernetes:core/v1:Pod": "Pod",
"kubernetes:core/v1:PodList": "PodList",
"kubernetes:core/v1:PodTemplate": "PodTemplate",
"kubernetes:core/v1:PodTemplateList": "PodTemplateList",
"kubernetes:core/v1:ReplicationController": "ReplicationController",
"kubernetes:core/v1:ReplicationControllerList": "ReplicationControllerList",
"kubernetes:core/v1:ResourceQuota": "ResourceQuota",
"kubernetes:core/v1:ResourceQuotaList": "ResourceQuotaList",
"kubernetes:core/v1:Secret": "Secret",
"kubernetes:core/v1:SecretList": "SecretList",
"kubernetes:core/v1:Service": "Service",
"kubernetes:core/v1:ServiceAccount": "ServiceAccount",
"kubernetes:core/v1:ServiceAccountList": "ServiceAccountList",
"kubernetes:core/v1:ServiceList": "ServiceList"
}
},
{
"pkg": "kubernetes",
"mod": "discovery.k8s.io/v1",
"fqn": "pulumi_kubernetes.discovery.v1",
"classes": {
"kubernetes:discovery.k8s.io/v1:EndpointSlice": "EndpointSlice",
"kubernetes:discovery.k8s.io/v1:EndpointSliceList": "EndpointSliceList"
}
},
{
"pkg": "kubernetes",
"mod": "discovery.k8s.io/v1beta1",
"fqn": "pulumi_kubernetes.discovery.v1beta1",
"classes": {
"kubernetes:discovery.k8s.io/v1beta1:EndpointSlice": "EndpointSlice",
"kubernetes:discovery.k8s.io/v1beta1:EndpointSliceList": "EndpointSliceList"
}
},
{
"pkg": "kubernetes",
"mod": "events.k8s.io/v1",
"fqn": "pulumi_kubernetes.events.v1",
"classes": {
"kubernetes:events.k8s.io/v1:Event": "Event",
"kubernetes:events.k8s.io/v1:EventList": "EventList"
}
},
{
"pkg": "kubernetes",
"mod": "events.k8s.io/v1beta1",
"fqn": "pulumi_kubernetes.events.v1beta1",
"classes": {
"kubernetes:events.k8s.io/v1beta1:Event": "Event",
"kubernetes:events.k8s.io/v1beta1:EventList": "EventList"
}
},
{
"pkg": "kubernetes",
"mod": "extensions/v1beta1",
"fqn": "pulumi_kubernetes.extensions.v1beta1",
"classes": {
"kubernetes:extensions/v1beta1:DaemonSet": "DaemonSet",
"kubernetes:extensions/v1beta1:DaemonSetList": "DaemonSetList",
"kubernetes:extensions/v1beta1:Deployment": "Deployment",
"kubernetes:extensions/v1beta1:DeploymentList": "DeploymentList",
"kubernetes:extensions/v1beta1:Ingress": "Ingress",
"kubernetes:extensions/v1beta1:IngressList": "IngressList",
"kubernetes:extensions/v1beta1:NetworkPolicy": "NetworkPolicy",
"kubernetes:extensions/v1beta1:NetworkPolicyList": "NetworkPolicyList",
"kubernetes:extensions/v1beta1:PodSecurityPolicy": "PodSecurityPolicy",
"kubernetes:extensions/v1beta1:PodSecurityPolicyList": "PodSecurityPolicyList",
"kubernetes:extensions/v1beta1:ReplicaSet": "ReplicaSet",
"kubernetes:extensions/v1beta1:ReplicaSetList": "ReplicaSetList"
}
},
{
"pkg": "kubernetes",
"mod": "flowcontrol.apiserver.k8s.io/v1alpha1",
"fqn": "pulumi_kubernetes.flowcontrol.v1alpha1",
"classes": {
"kubernetes:flowcontrol.apiserver.k8s.io/v1alpha1:FlowSchema": "FlowSchema",
"kubernetes:flowcontrol.apiserver.k8s.io/v1alpha1:FlowSchemaList": "FlowSchemaList",
"kubernetes:flowcontrol.apiserver.k8s.io/v1alpha1:PriorityLevelConfiguration": "PriorityLevelConfiguration",
"kubernetes:flowcontrol.apiserver.k8s.io/v1alpha1:PriorityLevelConfigurationList": "PriorityLevelConfigurationList"
}
},
{
"pkg": "kubernetes",
"mod": "flowcontrol.apiserver.k8s.io/v1beta1",
"fqn": "pulumi_kubernetes.flowcontrol.v1beta1",
"classes": {
"kubernetes:flowcontrol.apiserver.k8s.io/v1beta1:FlowSchema": "FlowSchema",
"kubernetes:flowcontrol.apiserver.k8s.io/v1beta1:FlowSchemaList": "FlowSchemaList",
"kubernetes:flowcontrol.apiserver.k8s.io/v1beta1:PriorityLevelConfiguration": "PriorityLevelConfiguration",
"kubernetes:flowcontrol.apiserver.k8s.io/v1beta1:PriorityLevelConfigurationList": "PriorityLevelConfigurationList"
}
},
{
"pkg": "kubernetes",
"mod": "helm.sh/v3",
"fqn": "pulumi_kubernetes.helm.v3",
"classes": {
"kubernetes:helm.sh/v3:Release": "Release"
}
},
{
"pkg": "kubernetes",
"mod": "meta/v1",
"fqn": "pulumi_kubernetes.meta.v1",
"classes": {
"kubernetes:meta/v1:Status": "Status"
}
},
{
"pkg": "kubernetes",
"mod": "networking.k8s.io/v1",
"fqn": "pulumi_kubernetes.networking.v1",
"classes": {
"kubernetes:networking.k8s.io/v1:Ingress": "Ingress",
"kubernetes:networking.k8s.io/v1:IngressClass": "IngressClass",
"kubernetes:networking.k8s.io/v1:IngressClassList": "IngressClassList",
"kubernetes:networking.k8s.io/v1:IngressList": "IngressList",
"kubernetes:networking.k8s.io/v1:NetworkPolicy": "NetworkPolicy",
"kubernetes:networking.k8s.io/v1:NetworkPolicyList": "NetworkPolicyList"
}
},
{
"pkg": "kubernetes",
"mod": "networking.k8s.io/v1beta1",
"fqn": "pulumi_kubernetes.networking.v1beta1",
"classes": {
"kubernetes:networking.k8s.io/v1beta1:Ingress": "Ingress",
"kubernetes:networking.k8s.io/v1beta1:IngressClass": "IngressClass",
"kubernetes:networking.k8s.io/v1beta1:IngressClassList": "IngressClassList",
"kubernetes:networking.k8s.io/v1beta1:IngressList": "IngressList"
}
},
{
"pkg": "kubernetes",
"mod": "node.k8s.io/v1",
"fqn": "pulumi_kubernetes.node.v1",
"classes": {
"kubernetes:node.k8s.io/v1:RuntimeClass": "RuntimeClass",
"kubernetes:node.k8s.io/v1:RuntimeClassList": "RuntimeClassList"
}
},
{
"pkg": "kubernetes",
"mod": "node.k8s.io/v1alpha1",
"fqn": "pulumi_kubernetes.node.v1alpha1",
"classes": {
"kubernetes:node.k8s.io/v1alpha1:RuntimeClass": "RuntimeClass",
"kubernetes:node.k8s.io/v1alpha1:RuntimeClassList": "RuntimeClassList"
}
},
{
"pkg": "kubernetes",
"mod": "node.k8s.io/v1beta1",
"fqn": "pulumi_kubernetes.node.v1beta1",
"classes": {
"kubernetes:node.k8s.io/v1beta1:RuntimeClass": "RuntimeClass",
"kubernetes:node.k8s.io/v1beta1:RuntimeClassList": "RuntimeClassList"
}
},
{
"pkg": "kubernetes",
"mod": "policy/v1",
"fqn": "pulumi_kubernetes.policy.v1",
"classes": {
"kubernetes:policy/v1:PodDisruptionBudget": "PodDisruptionBudget",
"kubernetes:policy/v1:PodDisruptionBudgetList": "PodDisruptionBudgetList"
}
},
{
"pkg": "kubernetes",
"mod": "policy/v1beta1",
"fqn": "pulumi_kubernetes.policy.v1beta1",
"classes": {
"kubernetes:policy/v1beta1:PodDisruptionBudget": "PodDisruptionBudget",
"kubernetes:policy/v1beta1:PodDisruptionBudgetList": "PodDisruptionBudgetList",
"kubernetes:policy/v1beta1:PodSecurityPolicy": "PodSecurityPolicy",
"kubernetes:policy/v1beta1:PodSecurityPolicyList": "PodSecurityPolicyList"
}
},
{
"pkg": "kubernetes",
"mod": "rbac.authorization.k8s.io/v1",
"fqn": "pulumi_kubernetes.rbac.v1",
"classes": {
"kubernetes:rbac.authorization.k8s.io/v1:ClusterRole": "ClusterRole",
"kubernetes:rbac.authorization.k8s.io/v1:ClusterRoleBinding": "ClusterRoleBinding",
"kubernetes:rbac.authorization.k8s.io/v1:ClusterRoleBindingList": "ClusterRoleBindingList",
"kubernetes:rbac.authorization.k8s.io/v1:ClusterRoleList": "ClusterRoleList",
"kubernetes:rbac.authorization.k8s.io/v1:Role": "Role",
"kubernetes:rbac.authorization.k8s.io/v1:RoleBinding": "RoleBinding",
"kubernetes:rbac.authorization.k8s.io/v1:RoleBindingList": "RoleBindingList",
"kubernetes:rbac.authorization.k8s.io/v1:RoleList": "RoleList"
}
},
{
"pkg": "kubernetes",
"mod": "rbac.authorization.k8s.io/v1alpha1",
"fqn": "pulumi_kubernetes.rbac.v1alpha1",
"classes": {
"kubernetes:rbac.authorization.k8s.io/v1alpha1:ClusterRole": "ClusterRole",
"kubernetes:rbac.authorization.k8s.io/v1alpha1:ClusterRoleBinding": "ClusterRoleBinding",
"kubernetes:rbac.authorization.k8s.io/v1alpha1:ClusterRoleBindingList": "ClusterRoleBindingList",
"kubernetes:rbac.authorization.k8s.io/v1alpha1:ClusterRoleList": "ClusterRoleList",
"kubernetes:rbac.authorization.k8s.io/v1alpha1:Role": "Role",
"kubernetes:rbac.authorization.k8s.io/v1alpha1:RoleBinding": "RoleBinding",
"kubernetes:rbac.authorization.k8s.io/v1alpha1:RoleBindingList": "RoleBindingList",
"kubernetes:rbac.authorization.k8s.io/v1alpha1:RoleList": "RoleList"
}
},
{
"pkg": "kubernetes",
"mod": "rbac.authorization.k8s.io/v1beta1",
"fqn": "pulumi_kubernetes.rbac.v1beta1",
"classes": {
"kubernetes:rbac.authorization.k8s.io/v1beta1:ClusterRole": "ClusterRole",
"kubernetes:rbac.authorization.k8s.io/v1beta1:ClusterRoleBinding": "ClusterRoleBinding",
"kubernetes:rbac.authorization.k8s.io/v1beta1:ClusterRoleBindingList": "ClusterRoleBindingList",
"kubernetes:rbac.authorization.k8s.io/v1beta1:ClusterRoleList": "ClusterRoleList",
"kubernetes:rbac.authorization.k8s.io/v1beta1:Role": "Role",
"kubernetes:rbac.authorization.k8s.io/v1beta1:RoleBinding": "RoleBinding",
"kubernetes:rbac.authorization.k8s.io/v1beta1:RoleBindingList": "RoleBindingList",
"kubernetes:rbac.authorization.k8s.io/v1beta1:RoleList": "RoleList"
}
},
{
"pkg": "kubernetes",
"mod": "scheduling.k8s.io/v1",
"fqn": "pulumi_kubernetes.scheduling.v1",
"classes": {
"kubernetes:scheduling.k8s.io/v1:PriorityClass": "PriorityClass",
"kubernetes:scheduling.k8s.io/v1:PriorityClassList": "PriorityClassList"
}
},
{
"pkg": "kubernetes",
"mod": "scheduling.k8s.io/v1alpha1",
"fqn": "pulumi_kubernetes.scheduling.v1alpha1",
"classes": {
"kubernetes:scheduling.k8s.io/v1alpha1:PriorityClass": "PriorityClass",
"kubernetes:scheduling.k8s.io/v1alpha1:PriorityClassList": "PriorityClassList"
}
},
{
"pkg": "kubernetes",
"mod": "scheduling.k8s.io/v1beta1",
"fqn": "pulumi_kubernetes.scheduling.v1beta1",
"classes": {
"kubernetes:scheduling.k8s.io/v1beta1:PriorityClass": "PriorityClass",
"kubernetes:scheduling.k8s.io/v1beta1:PriorityClassList": "PriorityClassList"
}
},
{
"pkg": "kubernetes",
"mod": "settings.k8s.io/v1alpha1",
"fqn": "pulumi_kubernetes.settings.v1alpha1",
"classes": {
"kubernetes:settings.k8s.io/v1alpha1:PodPreset": "PodPreset",
"kubernetes:settings.k8s.io/v1alpha1:PodPresetList": "PodPresetList"
}
},
{
"pkg": "kubernetes",
"mod": "storage.k8s.io/v1",
"fqn": "pulumi_kubernetes.storage.v1",
"classes": {
"kubernetes:storage.k8s.io/v1:CSIDriver": "CSIDriver",
"kubernetes:storage.k8s.io/v1:CSIDriverList": "CSIDriverList",
"kubernetes:storage.k8s.io/v1:CSINode": "CSINode",
"kubernetes:storage.k8s.io/v1:CSINodeList": "CSINodeList",
"kubernetes:storage.k8s.io/v1:StorageClass": "StorageClass",
"kubernetes:storage.k8s.io/v1:StorageClassList": "StorageClassList",
"kubernetes:storage.k8s.io/v1:VolumeAttachment": "VolumeAttachment",
"kubernetes:storage.k8s.io/v1:VolumeAttachmentList": "VolumeAttachmentList"
}
},
{
"pkg": "kubernetes",
"mod": "storage.k8s.io/v1alpha1",
"fqn": "pulumi_kubernetes.storage.v1alpha1",
"classes": {
"kubernetes:storage.k8s.io/v1alpha1:CSIStorageCapacity": "CSIStorageCapacity",
"kubernetes:storage.k8s.io/v1alpha1:CSIStorageCapacityList": "CSIStorageCapacityList",
"kubernetes:storage.k8s.io/v1alpha1:VolumeAttachment": "VolumeAttachment",
"kubernetes:storage.k8s.io/v1alpha1:VolumeAttachmentList": "VolumeAttachmentList"
}
},
{
"pkg": "kubernetes",
"mod": "storage.k8s.io/v1beta1",
"fqn": "pulumi_kubernetes.storage.v1beta1",
"classes": {
"kubernetes:storage.k8s.io/v1beta1:CSIDriver": "CSIDriver",
"kubernetes:storage.k8s.io/v1beta1:CSIDriverList": "CSIDriverList",
"kubernetes:storage.k8s.io/v1beta1:CSINode": "CSINode",
"kubernetes:storage.k8s.io/v1beta1:CSINodeList": "CSINodeList",
"kubernetes:storage.k8s.io/v1beta1:CSIStorageCapacity": "CSIStorageCapacity",
"kubernetes:storage.k8s.io/v1beta1:CSIStorageCapacityList": "CSIStorageCapacityList",
"kubernetes:storage.k8s.io/v1beta1:StorageClass": "StorageClass",
"kubernetes:storage.k8s.io/v1beta1:StorageClassList": "StorageClassList",
"kubernetes:storage.k8s.io/v1beta1:VolumeAttachment": "VolumeAttachment",
"kubernetes:storage.k8s.io/v1beta1:VolumeAttachmentList": "VolumeAttachmentList"
}
}
]
""",
resource_packages="""
[
{
"pkg": "kubernetes",
"token": "pulumi:providers:kubernetes",
"fqn": "pulumi_kubernetes",
"class": "Provider"
}
]
"""
)
| StarcoderdataPython |
4919645 | """Utility module for system checks"""
import subprocess
def check_fsl():
"""
This function checks for the correct installation of freesurfer.
It does so by calling 'freesurfer' to the main terminal
and checking for errors.
"""
# Initialize test vars
test_ok = True
error_msg = ""
# Define test commands
test_commands = ["flirt", "bet"]
# Iteratively run test commands
for command in test_commands:
# Open stream and pass a test command command
check_stream = subprocess.Popen([command], shell=True,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
# Read output
_, error = check_stream.communicate()
# End stream
check_stream.terminate()
# If there was an error, update test_ok var
if error:
test_ok = False
error_msg = error_msg + error.decode("utf-8")
# If there was an error in any of the tests, raise a warning.
if test_ok:
return True
else:
raise UserWarning("FSL is not installed correctly.\nPlease check "
"https://fsl.fmrib.ox.ac.uk"
"/fsl/fslwiki/FslInstallation "
"for elaboration on the installation process."
f"\nSystem error message:\n{error_msg}")
def check_freesurfer():
"""
This function checks for the correct installation of freesurfer.
It does so by calling 'freesurfer' to the main terminal
and checking for errors.
"""
# Open stream and pass 'freesurfer' command
check_stream = subprocess.Popen(['freesurfer'], shell=True,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
# Read output
_, error = check_stream.communicate()
# End stream
check_stream.terminate()
# If there was an error, raise a warning.
if error:
raise UserWarning("FreeSurfer is not installed correctly."
"\nPlease check "
"https://surfer.nmr.mgh.harvard.edu/fswiki/"
"LinuxInstall "
"for elaboration on the installation process."
f"\nSystem error message:\n{error.decode('utf-8')}")
else:
return True
| StarcoderdataPython |
179833 | <gh_stars>0
__author__ = '<NAME>'
import MySQLdb as mdb
def retrieve_connection(db):
"""
Retrieves Database connection object for a given connection parameters.
:param db: Json object containing connection parameters
:type db: dict
:return: MySQLDB.Connection object
"""
con = mdb.connect(host=db['host'], user=db['username'], passwd=db['password'], port=db['port'], db=db['name'])
return con
def execute_select(sql_stmt, db):
"""
Executes select statement and returning list of results.
:param sql_stmt: SQL statement to be executed.
:type sql_stmt: str
:param db: Json object containing connection string parameters
:type db: dict
:return: list of results from DB
"""
res = []
con = None
try:
con = retrieve_connection(db)
cur = con.cursor(mdb.cursors.DictCursor)
cur.execute(sql_stmt)
res = cur.fetchall()
except mdb.Error, e:
print "Error %d: %s" % (e.args[0], e.args[1])
finally:
if con:
con.close()
return res
def execute_insert(sql_stmt, db):
"""
Executes insert/update statement
:param sql_stmt: SQL statement to be executed.
:type sql_stmt: str
:param db: Json object containing connection string parameters
:type db: dict
"""
con = None
try:
con = retrieve_connection(db)
cur = con.cursor(mdb.cursors.DictCursor)
cur.execute(sql_stmt)
con.commit()
except mdb.Error, e:
print "Error %d: %s" % (e.args[0], e.args[1])
finally:
if con:
con.close()
| StarcoderdataPython |
3504464 | from .base import BrokerBase
from .context import TradingContext
from .broker import CryptoBroker
from .backtest import BacktestBroker
from .dryrun import DryrunBroker
| StarcoderdataPython |
3419524 | '/a/b/c'
| StarcoderdataPython |
6621204 | <gh_stars>0
# SPDX-FileCopyrightText: 2021 <NAME> <<EMAIL>>
#
# SPDX-License-Identifier: MIT
import glob
import os
import shutil
import subprocess
import xarray as xr
from termcolor import cprint
from trace_for_guess.skip import skip
def calculate_fsdscl(cldtot_file, fsds_file, fsdsc_file, out_file):
"""Re-construct the CCSM3 FSDSCL variable from CLDTOT, FSDS, and FSDSC.
- FSDS: Downwelling solar flux at surface in W/m².
- CLDTOT: Vertically-integrated total cloud fraction. This is equivalent to
the cld variable in the CRU dataset.
- FSDSC: Incoming radiation with a completely clear sky (zero cloud cover).
- FSDSCL: Incoming radiation with a completely overcast sky (100% cloud
cover).
Args:
cldtot_file: Path to the CLDTOT input file.
fsds_file: Path to the FSDS input file.
fsdsc_file: Path to the FSDSC input file.
out_file: Path to the FSDSCL output file (to be created).
Returns:
The path to the output file (=`out_file`).
Raises:
FileNotFoundError: One of the 3 input files is missing.
"""
if not os.path.isfile(cldtot_file):
raise FileNotFoundError("Could not find CLDTOT file: '%s'" %
cldtot_file)
if not os.path.isfile(fsds_file):
raise FileNotFoundError("Could not find FSDS file: '%s'" % fsds_file)
if not os.path.isfile(fsdsc_file):
raise FileNotFoundError("Could not find FSDSC file: '%s'" % fsdsc_file)
# TODO: check for commands
if skip([cldtot_file, fsds_file, fsdsc_file], out_file):
return out_file
cprint(f"Generating FSDSCL file: '{out_file}'", 'yellow')
try:
# Merge all variables (FSDS, FSDSC, CLDTOT) into one file, and then
# perform the operation in it.
subprocess.run(['ncks', '--append', fsds_file, out_file], check=True)
subprocess.run(['ncks', '--append', fsdsc_file, out_file], check=True)
subprocess.run(['ncks', '--append', cldtot_file, out_file], check=True)
script = 'FSDSCL = (FSDS - FSDSC * (1 - CLDTOT)) / CLDTOT'
subprocess.run(['ncap2', '--append', '--script', script, out_file],
check=True)
except Exception:
if os.path.isfile(out_file):
cprint(f"Removing file '{out_file}'.", 'red')
os.remove(out_file)
# Remove temporary file created by ncks.
for g in glob(f'{out_file}.pid*.ncks.tmp'):
cprint(f"Removing file '{g}'.", 'red')
os.remove(g)
raise
assert (os.path.isfile(out_file))
cprint(f"Successfully created '{out_file}'.", 'green')
return out_file
| StarcoderdataPython |
9625489 | <reponame>Claayton/ExerciciosPython
# Ex029.2
"""Write a program that reads the speed of car
If he exceeds the speed of 80km/h, show a message saying that he was fined
The fine will coast R$:7.00 for each km above the limit"""
print('=-'*15)
print('\033[31mRADAR MY FINE, MY LIFE, SPEED LIMIT: 80Km/h\033[m')
print('=-'*15)
speed = float(input('What is de current speed of the car?: '))
if speed > 80:
print(f'\033[31mYOU WERE FINED! the amount of the fine is: R$: {(speed - 80) * 7:.2f}\033[m')
else:
print('\033[32mYou are within the speed limit')
print('\033[32mHave a nice day, and drive safely\033[m')
| StarcoderdataPython |
4970374 | # Copyright (C) 2019 Google Inc.
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
"""Integration test for Assessment object sync cron job."""
import ddt
import mock
from ggrc import settings
from ggrc.integrations.synchronization_jobs import assessment_sync_job
from ggrc.integrations.synchronization_jobs import sync_utils
from ggrc.integrations import constants
from integration import ggrc
from integration.ggrc.models import factories
@ddt.ddt
@mock.patch.object(settings, "ISSUE_TRACKER_ENABLED", True)
@mock.patch('ggrc.integrations.issues.Client.update_issue',
return_value=mock.MagicMock())
class TestAsmtSyncJob(ggrc.TestCase):
"""Test cron job for sync Assessment object attributes."""
@staticmethod
def _create_asmt(people_sync_enabled):
"""Helper function creating assessment and audit."""
with factories.single_commit():
asmt = factories.AssessmentFactory()
factories.IssueTrackerIssueFactory(
enabled=True,
issue_tracked_obj=asmt.audit,
people_sync_enabled=people_sync_enabled,
**TestAsmtSyncJob._issuetracker_data()
)
factories.IssueTrackerIssueFactory(
enabled=True,
issue_tracked_obj=asmt,
**TestAsmtSyncJob._issuetracker_data()
)
return asmt
@staticmethod
def _issuetracker_data():
"""Helper function returning default issue tracker settings."""
return dict(
component_id=constants.DEFAULT_ISSUETRACKER_VALUES["component_id"],
hotlist_id=constants.DEFAULT_ISSUETRACKER_VALUES["hotlist_id"],
issue_type=constants.DEFAULT_ISSUETRACKER_VALUES["issue_type"],
issue_priority=constants.DEFAULT_ISSUETRACKER_VALUES["issue_priority"],
issue_severity=constants.DEFAULT_ISSUETRACKER_VALUES["issue_severity"],
)
@staticmethod
def _to_issuetrakcer_repr(asmt):
"""Return issue tracker representation of assessment."""
return {
asmt.issuetracker_issue.issue_id: dict(
component_id=int(asmt.issuetracker_issue.component_id),
status=asmt.status,
type=asmt.issuetracker_issue.issue_type,
priority=asmt.issuetracker_issue.issue_priority,
severity=asmt.issuetracker_issue.issue_severity,
reporter=asmt.issuetracker_issue.reporter or "",
assignee=asmt.issuetracker_issue.assignee or "",
verifier=asmt.issuetracker_issue.assignee or "",
ccs=asmt.issuetracker_issue.cc_list or [],
),
}
@staticmethod
def _construct_expected_upd_call(current_repr, new_audit_captains=(),
new_asmt_assignees=(),
people_sync_enabled=False):
"""Return expected args for client update_issue call."""
issue_id, = current_repr.keys()
body = dict(current_repr[issue_id])
new_audit_captains = {a.email for a in new_audit_captains}
new_asmt_assignees = {a.email for a in new_asmt_assignees}
if people_sync_enabled:
if new_audit_captains:
body["reporter"] = min(new_audit_captains)
if new_asmt_assignees:
body["assignee"] = min(new_asmt_assignees)
body["verifier"] = body["assignee"]
body["ccs"] = list(
(new_audit_captains | new_asmt_assignees) -
{body["reporter"], body["assignee"]}
)
body["status"] = constants.STATUSES_MAPPING.get(body["status"])
return str(issue_id), body
@ddt.data(True, False)
def test_assignee_people_sync(self, people_sync_enabled, update_issue_mock):
"""Test sync of Assignees when people_sync_enabled is on/off."""
asmt = self._create_asmt(people_sync_enabled=people_sync_enabled)
issuetracker_repr = self._to_issuetrakcer_repr(asmt)
with factories.single_commit():
assignee_1 = factories.PersonFactory()
assignee_2 = factories.PersonFactory()
expected_upd_args = self._construct_expected_upd_call(
current_repr=issuetracker_repr,
new_asmt_assignees=(assignee_1, assignee_2),
people_sync_enabled=people_sync_enabled,
)
asmt.add_person_with_role_name(assignee_1, "Assignees")
asmt.add_person_with_role_name(assignee_2, "Assignees")
with mock.patch.object(sync_utils, "iter_issue_batches",
return_value=[issuetracker_repr]):
assessment_sync_job.sync_assessment_attributes()
update_issue_mock.assert_called_once_with(*expected_upd_args)
@ddt.data(True, False)
def test_captains_people_sync_on(self, people_sync_enabled,
update_issue_mock):
"""Test sync of Audit Captain when people_sync_enabled is on/off."""
asmt = self._create_asmt(people_sync_enabled=people_sync_enabled)
issuetracker_repr = self._to_issuetrakcer_repr(asmt)
with factories.single_commit():
audit_captain_1 = factories.PersonFactory()
audit_captain_2 = factories.PersonFactory()
expected_upd_args = self._construct_expected_upd_call(
current_repr=issuetracker_repr,
new_audit_captains=(audit_captain_1, audit_captain_2),
people_sync_enabled=people_sync_enabled,
)
asmt.audit.add_person_with_role_name(audit_captain_1, "Audit Captains")
asmt.audit.add_person_with_role_name(audit_captain_2, "Audit Captains")
with mock.patch.object(sync_utils, "iter_issue_batches",
return_value=[issuetracker_repr]):
assessment_sync_job.sync_assessment_attributes()
update_issue_mock.assert_called_once_with(*expected_upd_args)
| StarcoderdataPython |
12862133 | <gh_stars>0
# This an autogenerated file
#
# Generated with CRSAxialFrictionModel
from typing import Dict,Sequence,List
from dmt.entity import Entity
from dmt.blueprint import Blueprint
from .blueprints.crsaxialfrictionmodel import CRSAxialFrictionModelBlueprint
from typing import Dict
from sima.sima.moao import MOAO
from sima.sima.scriptablevalue import ScriptableValue
class CRSAxialFrictionModel(MOAO):
"""
Keyword arguments
-----------------
name : str
(default "")
description : str
(default "")
_id : str
(default "")
scriptableValues : List[ScriptableValue]
staticFriction : float
Static friction force corresponding to elongation(default 0.0)
staticElongation : float
Relative elongation(default 0.0)
dynamicFriction : float
Dynamic friction force corresponding to elongation(default 0.0)
dynamicElongation : float
Relative elongation(default 0.0)
axialFriction : bool
Local axial friction model(default False)
"""
def __init__(self , name="", description="", _id="", staticFriction=0.0, staticElongation=0.0, dynamicFriction=0.0, dynamicElongation=0.0, axialFriction=False, **kwargs):
super().__init__(**kwargs)
self.name = name
self.description = description
self._id = _id
self.scriptableValues = list()
self.staticFriction = staticFriction
self.staticElongation = staticElongation
self.dynamicFriction = dynamicFriction
self.dynamicElongation = dynamicElongation
self.axialFriction = axialFriction
for key, value in kwargs.items():
if not isinstance(value, Dict):
setattr(self, key, value)
@property
def blueprint(self) -> Blueprint:
"""Return blueprint that this entity represents"""
return CRSAxialFrictionModelBlueprint()
@property
def name(self) -> str:
""""""
return self.__name
@name.setter
def name(self, value: str):
"""Set name"""
self.__name = str(value)
@property
def description(self) -> str:
""""""
return self.__description
@description.setter
def description(self, value: str):
"""Set description"""
self.__description = str(value)
@property
def _id(self) -> str:
""""""
return self.___id
@_id.setter
def _id(self, value: str):
"""Set _id"""
self.___id = str(value)
@property
def scriptableValues(self) -> List[ScriptableValue]:
""""""
return self.__scriptableValues
@scriptableValues.setter
def scriptableValues(self, value: List[ScriptableValue]):
"""Set scriptableValues"""
if not isinstance(value, Sequence):
raise Exception("Expected sequense, but was " , type(value))
self.__scriptableValues = value
@property
def staticFriction(self) -> float:
"""Static friction force corresponding to elongation"""
return self.__staticFriction
@staticFriction.setter
def staticFriction(self, value: float):
"""Set staticFriction"""
self.__staticFriction = float(value)
@property
def staticElongation(self) -> float:
"""Relative elongation"""
return self.__staticElongation
@staticElongation.setter
def staticElongation(self, value: float):
"""Set staticElongation"""
self.__staticElongation = float(value)
@property
def dynamicFriction(self) -> float:
"""Dynamic friction force corresponding to elongation"""
return self.__dynamicFriction
@dynamicFriction.setter
def dynamicFriction(self, value: float):
"""Set dynamicFriction"""
self.__dynamicFriction = float(value)
@property
def dynamicElongation(self) -> float:
"""Relative elongation"""
return self.__dynamicElongation
@dynamicElongation.setter
def dynamicElongation(self, value: float):
"""Set dynamicElongation"""
self.__dynamicElongation = float(value)
@property
def axialFriction(self) -> bool:
"""Local axial friction model"""
return self.__axialFriction
@axialFriction.setter
def axialFriction(self, value: bool):
"""Set axialFriction"""
self.__axialFriction = bool(value)
| StarcoderdataPython |
1796049 | <filename>src/the_tale/the_tale/game/abilities/tests/test_requests.py
import smart_imports
smart_imports.all()
class AbilityRequests(utils_testcase.TestCase):
def setUp(self):
super(AbilityRequests, self).setUp()
game_logic.create_test_map()
self.account = self.accounts_factory.create_account()
def test_activate_ability_unlogined(self):
self.check_ajax_error(self.client.post(logic.use_ability_url(relations.ABILITY_TYPE.HELP)), 'common.login_required')
def test_activate_ability(self):
self.request_login(self.account.email)
response = self.client.post(logic.use_ability_url(relations.ABILITY_TYPE.HELP))
task = PostponedTaskPrototype._db_get_object(0)
self.check_ajax_processing(response, task.status_url)
@mock.patch('the_tale.game.abilities.relations.ABILITY_TYPE.HELP.cost', 100500)
def test_activate_ability__no_energy(self):
self.request_login(self.account.email)
self.check_ajax_error(self.client.post(logic.use_ability_url(relations.ABILITY_TYPE.HELP)), 'game.abilities.use.no_enough_energy')
self.assertEqual(PostponedTaskPrototype._db_count(), 0)
| StarcoderdataPython |
6431506 | import numpy as np
import torch
from .cut_model import CUTModel
from . import networks
from .patchnce import PatchNCELoss
import util.util as util
from .modules import loss
import torch.nn.functional as F
from util.util import gaussian
from util.iter_calculator import IterCalculator
from util.network_group import NetworkGroup
class CUTSemanticModel(CUTModel):
""" This class implements CUT and FastCUT model, described in the paper
Contrastive Learning for Unpaired Image-to-Image Translation
<NAME>, <NAME>, <NAME>, <NAME>
ECCV, 2020
The code borrows heavily from the PyTorch implementation of CycleGAN
https://github.com/junyanz/pytorch-CycleGAN-and-pix2pix
"""
@staticmethod
def modify_commandline_options(parser, is_train=True):
""" Configures options specific for CUT semantic model
"""
return parser
def __init__(self, opt,rank):
super().__init__(opt,rank)
# specify the training losses you want to print out.
# The training/test scripts will call <BaseModel.get_current_losses>
losses_G = ['sem']
losses_CLS = ['CLS']
self.loss_names_G += losses_G
self.loss_names_CLS = losses_CLS
self.loss_names = self.loss_names_G + self.loss_names_CLS + self.loss_names_D
# define networks (both generator and discriminator)
if self.isTrain:
self.netCLS = networks.define_C(opt.model_output_nc, opt.f_s_nf,opt.data_crop_size,
init_type=opt.model_init_type, init_gain=opt.model_init_gain,
gpu_ids=self.gpu_ids, nclasses=opt.f_s_semantic_nclasses)
self.model_names += ["CLS"]
# define loss functions
self.criterionCLS = torch.nn.modules.CrossEntropyLoss()
self.optimizer_CLS = torch.optim.Adam(self.netCLS.parameters(), lr=opt.train_sem_lr_f_s, betas=(opt.train_beta1, opt.train_beta2))
if opt.train_sem_regression:
if opt.train_sem_l1_regression:
self.criterionCLS = torch.nn.L1Loss()
else:
self.criterionCLS = torch.nn.modules.MSELoss()
else:
self.criterionCLS = torch.nn.modules.CrossEntropyLoss()
self.optimizers.append(self.optimizer_CLS)
if self.opt.train_iter_size > 1 :
self.iter_calculator = IterCalculator(self.loss_names)
for i,cur_loss in enumerate(self.loss_names):
self.loss_names[i] = cur_loss + '_avg'
setattr(self, "loss_" + self.loss_names[i], 0)
###Making groups
self.group_CLS = NetworkGroup(networks_to_optimize=["CLS"],forward_functions=None,backward_functions=["compute_CLS_loss"],loss_names_list=["loss_names_CLS"],optimizer=["optimizer_CLS"],loss_backward=["loss_CLS"])
self.networks_groups.append(self.group_CLS)
def set_input_first_gpu(self,data):
super().set_input_first_gpu(data)
self.input_A_label=self.input_A_label[:self.bs_per_gpu]
if hasattr(self,'input_B_label'):
self.input_B_label=self.input_B_label[:self.bs_per_gpu]
def set_input(self, input):
"""Unpack input data from the dataloader and perform necessary pre-processing steps.
Parameters:
input (dict): include the data itself and its metadata information.
The option 'direction' can be used to swap domain A and domain B.
"""
super().set_input(input)
if 'A_label' in input :
if not self.opt.train_sem_regression:
self.input_A_label = input['A_label'].to(self.device)
else:
self.input_A_label = input['A_label'].to(torch.float).to(device=self.device)
if self.opt.train_sem_cls_B and 'B_label' in input:
if not self.opt.train_sem_regression:
self.input_B_label = input['B_label'].to(self.device)
else:
self.input_B_label = input['B_label'].to(torch.float).to(device=self.device)
def forward(self):
"""Run forward pass; called by both functions <optimize_parameters> and <test>."""
super().forward()
d = 1
self.pred_real_A = self.netCLS(self.real_A)
if not self.opt.train_sem_regression:
_,self.gt_pred_A = self.pred_real_A.max(1)
self.pred_fake_B = self.netCLS(self.fake_B)
if not self.opt.train_sem_regression:
_,self.pfB = self.pred_fake_B.max(1)
def compute_G_loss(self):
"""Calculate GAN and NCE loss for the generator"""
super().compute_G_loss()
if not self.opt.train_sem_regression:
self.loss_sem = self.criterionCLS(self.pred_fake_B, self.input_A_label)
else:
self.loss_sem = self.criterionCLS(self.pred_fake_B.squeeze(1), self.input_A_label)
if not hasattr(self, 'loss_CLS') or self.loss_CLS > self.opt.f_s_semantic_threshold:
self.loss_sem = 0 * self.loss_sem
self.loss_G += self.loss_sem
def compute_CLS_loss(self):
label_A = self.input_A_label
# forward only real source image through semantic classifier
pred_A = self.netCLS(self.real_A)
if not self.opt.train_sem_regression:
self.loss_CLS = self.opt.train_sem_lambda * self.criterionCLS(pred_A, label_A)
else:
self.loss_CLS = self.opt.train_sem_lambda * self.criterionCLS(pred_A.squeeze(1), label_A)
if self.opt.train_sem_cls_B:
label_B = self.input_B_label
pred_B = self.netCLS(self.real_B)
if not self.opt.train_sem_regression:
self.loss_CLS += self.opt.train_sem_lambda * self.criterionCLS(pred_B, label_B)
else:
self.loss_CLS += self.opt.train_sem_lambda * self.criterionCLS(pred_B.squeeze(1), label_B)
| StarcoderdataPython |
6498011 | <reponame>royalbhati/fastai
import torch
import torch.nn as nn
from torch.autograd import Variable
from functools import reduce
class LambdaBase(nn.Sequential):
def __init__(self, fn, *args):
super(LambdaBase, self).__init__(*args)
self.lambda_func = fn
def forward_prepare(self, input):
output = []
for module in self._modules.values():
output.append(module(input))
return output if output else input
class Lambda(LambdaBase):
def forward(self, input):
return self.lambda_func(self.forward_prepare(input))
class LambdaMap(LambdaBase):
def forward(self, input):
return list(map(self.lambda_func,self.forward_prepare(input)))
class LambdaReduce(LambdaBase):
def forward(self, input):
return reduce(self.lambda_func,self.forward_prepare(input))
def resnext_101_32x4d(): return nn.Sequential( # Sequential,
nn.Conv2d(3,64,(7, 7),(2, 2),(3, 3),1,1,bias=False),
nn.BatchNorm2d(64),
nn.ReLU(),
nn.MaxPool2d((3, 3),(2, 2),(1, 1)),
nn.Sequential( # Sequential,
nn.Sequential( # Sequential,
LambdaMap(lambda x: x, # ConcatTable,
nn.Sequential( # Sequential,
nn.Sequential( # Sequential,
nn.Conv2d(64,128,(1, 1),(1, 1),(0, 0),1,1,bias=False),
nn.BatchNorm2d(128),
nn.ReLU(),
nn.Conv2d(128,128,(3, 3),(1, 1),(1, 1),1,32,bias=False),
nn.BatchNorm2d(128),
nn.ReLU(),
),
nn.Conv2d(128,256,(1, 1),(1, 1),(0, 0),1,1,bias=False),
nn.BatchNorm2d(256),
),
nn.Sequential( # Sequential,
nn.Conv2d(64,256,(1, 1),(1, 1),(0, 0),1,1,bias=False),
nn.BatchNorm2d(256),
),
),
LambdaReduce(lambda x,y: x+y), # CAddTable,
nn.ReLU(),
),
nn.Sequential( # Sequential,
LambdaMap(lambda x: x, # ConcatTable,
nn.Sequential( # Sequential,
nn.Sequential( # Sequential,
nn.Conv2d(256,128,(1, 1),(1, 1),(0, 0),1,1,bias=False),
nn.BatchNorm2d(128),
nn.ReLU(),
nn.Conv2d(128,128,(3, 3),(1, 1),(1, 1),1,32,bias=False),
nn.BatchNorm2d(128),
nn.ReLU(),
),
nn.Conv2d(128,256,(1, 1),(1, 1),(0, 0),1,1,bias=False),
nn.BatchNorm2d(256),
),
Lambda(lambda x: x), # Identity,
),
LambdaReduce(lambda x,y: x+y), # CAddTable,
nn.ReLU(),
),
nn.Sequential( # Sequential,
LambdaMap(lambda x: x, # ConcatTable,
nn.Sequential( # Sequential,
nn.Sequential( # Sequential,
nn.Conv2d(256,128,(1, 1),(1, 1),(0, 0),1,1,bias=False),
nn.BatchNorm2d(128),
nn.ReLU(),
nn.Conv2d(128,128,(3, 3),(1, 1),(1, 1),1,32,bias=False),
nn.BatchNorm2d(128),
nn.ReLU(),
),
nn.Conv2d(128,256,(1, 1),(1, 1),(0, 0),1,1,bias=False),
nn.BatchNorm2d(256),
),
Lambda(lambda x: x), # Identity,
),
LambdaReduce(lambda x,y: x+y), # CAddTable,
nn.ReLU(),
),
),
nn.Sequential( # Sequential,
nn.Sequential( # Sequential,
LambdaMap(lambda x: x, # ConcatTable,
nn.Sequential( # Sequential,
nn.Sequential( # Sequential,
nn.Conv2d(256,256,(1, 1),(1, 1),(0, 0),1,1,bias=False),
nn.BatchNorm2d(256),
nn.ReLU(),
nn.Conv2d(256,256,(3, 3),(2, 2),(1, 1),1,32,bias=False),
nn.BatchNorm2d(256),
nn.ReLU(),
),
nn.Conv2d(256,512,(1, 1),(1, 1),(0, 0),1,1,bias=False),
nn.BatchNorm2d(512),
),
nn.Sequential( # Sequential,
nn.Conv2d(256,512,(1, 1),(2, 2),(0, 0),1,1,bias=False),
nn.BatchNorm2d(512),
),
),
LambdaReduce(lambda x,y: x+y), # CAddTable,
nn.ReLU(),
),
nn.Sequential( # Sequential,
LambdaMap(lambda x: x, # ConcatTable,
nn.Sequential( # Sequential,
nn.Sequential( # Sequential,
nn.Conv2d(512,256,(1, 1),(1, 1),(0, 0),1,1,bias=False),
nn.BatchNorm2d(256),
nn.ReLU(),
nn.Conv2d(256,256,(3, 3),(1, 1),(1, 1),1,32,bias=False),
nn.BatchNorm2d(256),
nn.ReLU(),
),
nn.Conv2d(256,512,(1, 1),(1, 1),(0, 0),1,1,bias=False),
nn.BatchNorm2d(512),
),
Lambda(lambda x: x), # Identity,
),
LambdaReduce(lambda x,y: x+y), # CAddTable,
nn.ReLU(),
),
nn.Sequential( # Sequential,
LambdaMap(lambda x: x, # ConcatTable,
nn.Sequential( # Sequential,
nn.Sequential( # Sequential,
nn.Conv2d(512,256,(1, 1),(1, 1),(0, 0),1,1,bias=False),
nn.BatchNorm2d(256),
nn.ReLU(),
nn.Conv2d(256,256,(3, 3),(1, 1),(1, 1),1,32,bias=False),
nn.BatchNorm2d(256),
nn.ReLU(),
),
nn.Conv2d(256,512,(1, 1),(1, 1),(0, 0),1,1,bias=False),
nn.BatchNorm2d(512),
),
Lambda(lambda x: x), # Identity,
),
LambdaReduce(lambda x,y: x+y), # CAddTable,
nn.ReLU(),
),
nn.Sequential( # Sequential,
LambdaMap(lambda x: x, # ConcatTable,
nn.Sequential( # Sequential,
nn.Sequential( # Sequential,
nn.Conv2d(512,256,(1, 1),(1, 1),(0, 0),1,1,bias=False),
nn.BatchNorm2d(256),
nn.ReLU(),
nn.Conv2d(256,256,(3, 3),(1, 1),(1, 1),1,32,bias=False),
nn.BatchNorm2d(256),
nn.ReLU(),
),
nn.Conv2d(256,512,(1, 1),(1, 1),(0, 0),1,1,bias=False),
nn.BatchNorm2d(512),
),
Lambda(lambda x: x), # Identity,
),
LambdaReduce(lambda x,y: x+y), # CAddTable,
nn.ReLU(),
),
),
nn.Sequential( # Sequential,
nn.Sequential( # Sequential,
LambdaMap(lambda x: x, # ConcatTable,
nn.Sequential( # Sequential,
nn.Sequential( # Sequential,
nn.Conv2d(512,512,(1, 1),(1, 1),(0, 0),1,1,bias=False),
nn.BatchNorm2d(512),
nn.ReLU(),
nn.Conv2d(512,512,(3, 3),(2, 2),(1, 1),1,32,bias=False),
nn.BatchNorm2d(512),
nn.ReLU(),
),
nn.Conv2d(512,1024,(1, 1),(1, 1),(0, 0),1,1,bias=False),
nn.BatchNorm2d(1024),
),
nn.Sequential( # Sequential,
nn.Conv2d(512,1024,(1, 1),(2, 2),(0, 0),1,1,bias=False),
nn.BatchNorm2d(1024),
),
),
LambdaReduce(lambda x,y: x+y), # CAddTable,
nn.ReLU(),
),
nn.Sequential( # Sequential,
LambdaMap(lambda x: x, # ConcatTable,
nn.Sequential( # Sequential,
nn.Sequential( # Sequential,
nn.Conv2d(1024,512,(1, 1),(1, 1),(0, 0),1,1,bias=False),
nn.BatchNorm2d(512),
nn.ReLU(),
nn.Conv2d(512,512,(3, 3),(1, 1),(1, 1),1,32,bias=False),
nn.BatchNorm2d(512),
nn.ReLU(),
),
nn.Conv2d(512,1024,(1, 1),(1, 1),(0, 0),1,1,bias=False),
nn.BatchNorm2d(1024),
),
Lambda(lambda x: x), # Identity,
),
LambdaReduce(lambda x,y: x+y), # CAddTable,
nn.ReLU(),
),
nn.Sequential( # Sequential,
LambdaMap(lambda x: x, # ConcatTable,
nn.Sequential( # Sequential,
nn.Sequential( # Sequential,
nn.Conv2d(1024,512,(1, 1),(1, 1),(0, 0),1,1,bias=False),
nn.BatchNorm2d(512),
nn.ReLU(),
nn.Conv2d(512,512,(3, 3),(1, 1),(1, 1),1,32,bias=False),
nn.BatchNorm2d(512),
nn.ReLU(),
),
nn.Conv2d(512,1024,(1, 1),(1, 1),(0, 0),1,1,bias=False),
nn.BatchNorm2d(1024),
),
Lambda(lambda x: x), # Identity,
),
LambdaReduce(lambda x,y: x+y), # CAddTable,
nn.ReLU(),
),
nn.Sequential( # Sequential,
LambdaMap(lambda x: x, # ConcatTable,
nn.Sequential( # Sequential,
nn.Sequential( # Sequential,
nn.Conv2d(1024,512,(1, 1),(1, 1),(0, 0),1,1,bias=False),
nn.BatchNorm2d(512),
nn.ReLU(),
nn.Conv2d(512,512,(3, 3),(1, 1),(1, 1),1,32,bias=False),
nn.BatchNorm2d(512),
nn.ReLU(),
),
nn.Conv2d(512,1024,(1, 1),(1, 1),(0, 0),1,1,bias=False),
nn.BatchNorm2d(1024),
),
Lambda(lambda x: x), # Identity,
),
LambdaReduce(lambda x,y: x+y), # CAddTable,
nn.ReLU(),
),
nn.Sequential( # Sequential,
LambdaMap(lambda x: x, # ConcatTable,
nn.Sequential( # Sequential,
nn.Sequential( # Sequential,
nn.Conv2d(1024,512,(1, 1),(1, 1),(0, 0),1,1,bias=False),
nn.BatchNorm2d(512),
nn.ReLU(),
nn.Conv2d(512,512,(3, 3),(1, 1),(1, 1),1,32,bias=False),
nn.BatchNorm2d(512),
nn.ReLU(),
),
nn.Conv2d(512,1024,(1, 1),(1, 1),(0, 0),1,1,bias=False),
nn.BatchNorm2d(1024),
),
Lambda(lambda x: x), # Identity,
),
LambdaReduce(lambda x,y: x+y), # CAddTable,
nn.ReLU(),
),
nn.Sequential( # Sequential,
LambdaMap(lambda x: x, # ConcatTable,
nn.Sequential( # Sequential,
nn.Sequential( # Sequential,
nn.Conv2d(1024,512,(1, 1),(1, 1),(0, 0),1,1,bias=False),
nn.BatchNorm2d(512),
nn.ReLU(),
nn.Conv2d(512,512,(3, 3),(1, 1),(1, 1),1,32,bias=False),
nn.BatchNorm2d(512),
nn.ReLU(),
),
nn.Conv2d(512,1024,(1, 1),(1, 1),(0, 0),1,1,bias=False),
nn.BatchNorm2d(1024),
),
Lambda(lambda x: x), # Identity,
),
LambdaReduce(lambda x,y: x+y), # CAddTable,
nn.ReLU(),
),
nn.Sequential( # Sequential,
LambdaMap(lambda x: x, # ConcatTable,
nn.Sequential( # Sequential,
nn.Sequential( # Sequential,
nn.Conv2d(1024,512,(1, 1),(1, 1),(0, 0),1,1,bias=False),
nn.BatchNorm2d(512),
nn.ReLU(),
nn.Conv2d(512,512,(3, 3),(1, 1),(1, 1),1,32,bias=False),
nn.BatchNorm2d(512),
nn.ReLU(),
),
nn.Conv2d(512,1024,(1, 1),(1, 1),(0, 0),1,1,bias=False),
nn.BatchNorm2d(1024),
),
Lambda(lambda x: x), # Identity,
),
LambdaReduce(lambda x,y: x+y), # CAddTable,
nn.ReLU(),
),
nn.Sequential( # Sequential,
LambdaMap(lambda x: x, # ConcatTable,
nn.Sequential( # Sequential,
nn.Sequential( # Sequential,
nn.Conv2d(1024,512,(1, 1),(1, 1),(0, 0),1,1,bias=False),
nn.BatchNorm2d(512),
nn.ReLU(),
nn.Conv2d(512,512,(3, 3),(1, 1),(1, 1),1,32,bias=False),
nn.BatchNorm2d(512),
nn.ReLU(),
),
nn.Conv2d(512,1024,(1, 1),(1, 1),(0, 0),1,1,bias=False),
nn.BatchNorm2d(1024),
),
Lambda(lambda x: x), # Identity,
),
LambdaReduce(lambda x,y: x+y), # CAddTable,
nn.ReLU(),
),
nn.Sequential( # Sequential,
LambdaMap(lambda x: x, # ConcatTable,
nn.Sequential( # Sequential,
nn.Sequential( # Sequential,
nn.Conv2d(1024,512,(1, 1),(1, 1),(0, 0),1,1,bias=False),
nn.BatchNorm2d(512),
nn.ReLU(),
nn.Conv2d(512,512,(3, 3),(1, 1),(1, 1),1,32,bias=False),
nn.BatchNorm2d(512),
nn.ReLU(),
),
nn.Conv2d(512,1024,(1, 1),(1, 1),(0, 0),1,1,bias=False),
nn.BatchNorm2d(1024),
),
Lambda(lambda x: x), # Identity,
),
LambdaReduce(lambda x,y: x+y), # CAddTable,
nn.ReLU(),
),
nn.Sequential( # Sequential,
LambdaMap(lambda x: x, # ConcatTable,
nn.Sequential( # Sequential,
nn.Sequential( # Sequential,
nn.Conv2d(1024,512,(1, 1),(1, 1),(0, 0),1,1,bias=False),
nn.BatchNorm2d(512),
nn.ReLU(),
nn.Conv2d(512,512,(3, 3),(1, 1),(1, 1),1,32,bias=False),
nn.BatchNorm2d(512),
nn.ReLU(),
),
nn.Conv2d(512,1024,(1, 1),(1, 1),(0, 0),1,1,bias=False),
nn.BatchNorm2d(1024),
),
Lambda(lambda x: x), # Identity,
),
LambdaReduce(lambda x,y: x+y), # CAddTable,
nn.ReLU(),
),
nn.Sequential( # Sequential,
LambdaMap(lambda x: x, # ConcatTable,
nn.Sequential( # Sequential,
nn.Sequential( # Sequential,
nn.Conv2d(1024,512,(1, 1),(1, 1),(0, 0),1,1,bias=False),
nn.BatchNorm2d(512),
nn.ReLU(),
nn.Conv2d(512,512,(3, 3),(1, 1),(1, 1),1,32,bias=False),
nn.BatchNorm2d(512),
nn.ReLU(),
),
nn.Conv2d(512,1024,(1, 1),(1, 1),(0, 0),1,1,bias=False),
nn.BatchNorm2d(1024),
),
Lambda(lambda x: x), # Identity,
),
LambdaReduce(lambda x,y: x+y), # CAddTable,
nn.ReLU(),
),
nn.Sequential( # Sequential,
LambdaMap(lambda x: x, # ConcatTable,
nn.Sequential( # Sequential,
nn.Sequential( # Sequential,
nn.Conv2d(1024,512,(1, 1),(1, 1),(0, 0),1,1,bias=False),
nn.BatchNorm2d(512),
nn.ReLU(),
nn.Conv2d(512,512,(3, 3),(1, 1),(1, 1),1,32,bias=False),
nn.BatchNorm2d(512),
nn.ReLU(),
),
nn.Conv2d(512,1024,(1, 1),(1, 1),(0, 0),1,1,bias=False),
nn.BatchNorm2d(1024),
),
Lambda(lambda x: x), # Identity,
),
LambdaReduce(lambda x,y: x+y), # CAddTable,
nn.ReLU(),
),
nn.Sequential( # Sequential,
LambdaMap(lambda x: x, # ConcatTable,
nn.Sequential( # Sequential,
nn.Sequential( # Sequential,
nn.Conv2d(1024,512,(1, 1),(1, 1),(0, 0),1,1,bias=False),
nn.BatchNorm2d(512),
nn.ReLU(),
nn.Conv2d(512,512,(3, 3),(1, 1),(1, 1),1,32,bias=False),
nn.BatchNorm2d(512),
nn.ReLU(),
),
nn.Conv2d(512,1024,(1, 1),(1, 1),(0, 0),1,1,bias=False),
nn.BatchNorm2d(1024),
),
Lambda(lambda x: x), # Identity,
),
LambdaReduce(lambda x,y: x+y), # CAddTable,
nn.ReLU(),
),
nn.Sequential( # Sequential,
LambdaMap(lambda x: x, # ConcatTable,
nn.Sequential( # Sequential,
nn.Sequential( # Sequential,
nn.Conv2d(1024,512,(1, 1),(1, 1),(0, 0),1,1,bias=False),
nn.BatchNorm2d(512),
nn.ReLU(),
nn.Conv2d(512,512,(3, 3),(1, 1),(1, 1),1,32,bias=False),
nn.BatchNorm2d(512),
nn.ReLU(),
),
nn.Conv2d(512,1024,(1, 1),(1, 1),(0, 0),1,1,bias=False),
nn.BatchNorm2d(1024),
),
Lambda(lambda x: x), # Identity,
),
LambdaReduce(lambda x,y: x+y), # CAddTable,
nn.ReLU(),
),
nn.Sequential( # Sequential,
LambdaMap(lambda x: x, # ConcatTable,
nn.Sequential( # Sequential,
nn.Sequential( # Sequential,
nn.Conv2d(1024,512,(1, 1),(1, 1),(0, 0),1,1,bias=False),
nn.BatchNorm2d(512),
nn.ReLU(),
nn.Conv2d(512,512,(3, 3),(1, 1),(1, 1),1,32,bias=False),
nn.BatchNorm2d(512),
nn.ReLU(),
),
nn.Conv2d(512,1024,(1, 1),(1, 1),(0, 0),1,1,bias=False),
nn.BatchNorm2d(1024),
),
Lambda(lambda x: x), # Identity,
),
LambdaReduce(lambda x,y: x+y), # CAddTable,
nn.ReLU(),
),
nn.Sequential( # Sequential,
LambdaMap(lambda x: x, # ConcatTable,
nn.Sequential( # Sequential,
nn.Sequential( # Sequential,
nn.Conv2d(1024,512,(1, 1),(1, 1),(0, 0),1,1,bias=False),
nn.BatchNorm2d(512),
nn.ReLU(),
nn.Conv2d(512,512,(3, 3),(1, 1),(1, 1),1,32,bias=False),
nn.BatchNorm2d(512),
nn.ReLU(),
),
nn.Conv2d(512,1024,(1, 1),(1, 1),(0, 0),1,1,bias=False),
nn.BatchNorm2d(1024),
),
Lambda(lambda x: x), # Identity,
),
LambdaReduce(lambda x,y: x+y), # CAddTable,
nn.ReLU(),
),
nn.Sequential( # Sequential,
LambdaMap(lambda x: x, # ConcatTable,
nn.Sequential( # Sequential,
nn.Sequential( # Sequential,
nn.Conv2d(1024,512,(1, 1),(1, 1),(0, 0),1,1,bias=False),
nn.BatchNorm2d(512),
nn.ReLU(),
nn.Conv2d(512,512,(3, 3),(1, 1),(1, 1),1,32,bias=False),
nn.BatchNorm2d(512),
nn.ReLU(),
),
nn.Conv2d(512,1024,(1, 1),(1, 1),(0, 0),1,1,bias=False),
nn.BatchNorm2d(1024),
),
Lambda(lambda x: x), # Identity,
),
LambdaReduce(lambda x,y: x+y), # CAddTable,
nn.ReLU(),
),
nn.Sequential( # Sequential,
LambdaMap(lambda x: x, # ConcatTable,
nn.Sequential( # Sequential,
nn.Sequential( # Sequential,
nn.Conv2d(1024,512,(1, 1),(1, 1),(0, 0),1,1,bias=False),
nn.BatchNorm2d(512),
nn.ReLU(),
nn.Conv2d(512,512,(3, 3),(1, 1),(1, 1),1,32,bias=False),
nn.BatchNorm2d(512),
nn.ReLU(),
),
nn.Conv2d(512,1024,(1, 1),(1, 1),(0, 0),1,1,bias=False),
nn.BatchNorm2d(1024),
),
Lambda(lambda x: x), # Identity,
),
LambdaReduce(lambda x,y: x+y), # CAddTable,
nn.ReLU(),
),
nn.Sequential( # Sequential,
LambdaMap(lambda x: x, # ConcatTable,
nn.Sequential( # Sequential,
nn.Sequential( # Sequential,
nn.Conv2d(1024,512,(1, 1),(1, 1),(0, 0),1,1,bias=False),
nn.BatchNorm2d(512),
nn.ReLU(),
nn.Conv2d(512,512,(3, 3),(1, 1),(1, 1),1,32,bias=False),
nn.BatchNorm2d(512),
nn.ReLU(),
),
nn.Conv2d(512,1024,(1, 1),(1, 1),(0, 0),1,1,bias=False),
nn.BatchNorm2d(1024),
),
Lambda(lambda x: x), # Identity,
),
LambdaReduce(lambda x,y: x+y), # CAddTable,
nn.ReLU(),
),
nn.Sequential( # Sequential,
LambdaMap(lambda x: x, # ConcatTable,
nn.Sequential( # Sequential,
nn.Sequential( # Sequential,
nn.Conv2d(1024,512,(1, 1),(1, 1),(0, 0),1,1,bias=False),
nn.BatchNorm2d(512),
nn.ReLU(),
nn.Conv2d(512,512,(3, 3),(1, 1),(1, 1),1,32,bias=False),
nn.BatchNorm2d(512),
nn.ReLU(),
),
nn.Conv2d(512,1024,(1, 1),(1, 1),(0, 0),1,1,bias=False),
nn.BatchNorm2d(1024),
),
Lambda(lambda x: x), # Identity,
),
LambdaReduce(lambda x,y: x+y), # CAddTable,
nn.ReLU(),
),
nn.Sequential( # Sequential,
LambdaMap(lambda x: x, # ConcatTable,
nn.Sequential( # Sequential,
nn.Sequential( # Sequential,
nn.Conv2d(1024,512,(1, 1),(1, 1),(0, 0),1,1,bias=False),
nn.BatchNorm2d(512),
nn.ReLU(),
nn.Conv2d(512,512,(3, 3),(1, 1),(1, 1),1,32,bias=False),
nn.BatchNorm2d(512),
nn.ReLU(),
),
nn.Conv2d(512,1024,(1, 1),(1, 1),(0, 0),1,1,bias=False),
nn.BatchNorm2d(1024),
),
Lambda(lambda x: x), # Identity,
),
LambdaReduce(lambda x,y: x+y), # CAddTable,
nn.ReLU(),
),
nn.Sequential( # Sequential,
LambdaMap(lambda x: x, # ConcatTable,
nn.Sequential( # Sequential,
nn.Sequential( # Sequential,
nn.Conv2d(1024,512,(1, 1),(1, 1),(0, 0),1,1,bias=False),
nn.BatchNorm2d(512),
nn.ReLU(),
nn.Conv2d(512,512,(3, 3),(1, 1),(1, 1),1,32,bias=False),
nn.BatchNorm2d(512),
nn.ReLU(),
),
nn.Conv2d(512,1024,(1, 1),(1, 1),(0, 0),1,1,bias=False),
nn.BatchNorm2d(1024),
),
Lambda(lambda x: x), # Identity,
),
LambdaReduce(lambda x,y: x+y), # CAddTable,
nn.ReLU(),
),
nn.Sequential( # Sequential,
LambdaMap(lambda x: x, # ConcatTable,
nn.Sequential( # Sequential,
nn.Sequential( # Sequential,
nn.Conv2d(1024,512,(1, 1),(1, 1),(0, 0),1,1,bias=False),
nn.BatchNorm2d(512),
nn.ReLU(),
nn.Conv2d(512,512,(3, 3),(1, 1),(1, 1),1,32,bias=False),
nn.BatchNorm2d(512),
nn.ReLU(),
),
nn.Conv2d(512,1024,(1, 1),(1, 1),(0, 0),1,1,bias=False),
nn.BatchNorm2d(1024),
),
Lambda(lambda x: x), # Identity,
),
LambdaReduce(lambda x,y: x+y), # CAddTable,
nn.ReLU(),
),
),
nn.Sequential( # Sequential,
nn.Sequential( # Sequential,
LambdaMap(lambda x: x, # ConcatTable,
nn.Sequential( # Sequential,
nn.Sequential( # Sequential,
nn.Conv2d(1024,1024,(1, 1),(1, 1),(0, 0),1,1,bias=False),
nn.BatchNorm2d(1024),
nn.ReLU(),
nn.Conv2d(1024,1024,(3, 3),(2, 2),(1, 1),1,32,bias=False),
nn.BatchNorm2d(1024),
nn.ReLU(),
),
nn.Conv2d(1024,2048,(1, 1),(1, 1),(0, 0),1,1,bias=False),
nn.BatchNorm2d(2048),
),
nn.Sequential( # Sequential,
nn.Conv2d(1024,2048,(1, 1),(2, 2),(0, 0),1,1,bias=False),
nn.BatchNorm2d(2048),
),
),
LambdaReduce(lambda x,y: x+y), # CAddTable,
nn.ReLU(),
),
nn.Sequential( # Sequential,
LambdaMap(lambda x: x, # ConcatTable,
nn.Sequential( # Sequential,
nn.Sequential( # Sequential,
nn.Conv2d(2048,1024,(1, 1),(1, 1),(0, 0),1,1,bias=False),
nn.BatchNorm2d(1024),
nn.ReLU(),
nn.Conv2d(1024,1024,(3, 3),(1, 1),(1, 1),1,32,bias=False),
nn.BatchNorm2d(1024),
nn.ReLU(),
),
nn.Conv2d(1024,2048,(1, 1),(1, 1),(0, 0),1,1,bias=False),
nn.BatchNorm2d(2048),
),
Lambda(lambda x: x), # Identity,
),
LambdaReduce(lambda x,y: x+y), # CAddTable,
nn.ReLU(),
),
nn.Sequential( # Sequential,
LambdaMap(lambda x: x, # ConcatTable,
nn.Sequential( # Sequential,
nn.Sequential( # Sequential,
nn.Conv2d(2048,1024,(1, 1),(1, 1),(0, 0),1,1,bias=False),
nn.BatchNorm2d(1024),
nn.ReLU(),
nn.Conv2d(1024,1024,(3, 3),(1, 1),(1, 1),1,32,bias=False),
nn.BatchNorm2d(1024),
nn.ReLU(),
),
nn.Conv2d(1024,2048,(1, 1),(1, 1),(0, 0),1,1,bias=False),
nn.BatchNorm2d(2048),
),
Lambda(lambda x: x), # Identity,
),
LambdaReduce(lambda x,y: x+y), # CAddTable,
nn.ReLU(),
),
),
nn.AvgPool2d((7, 7),(1, 1)),
Lambda(lambda x: x.view(x.size(0),-1)), # View,
nn.Sequential(Lambda(lambda x: x.view(1,-1) if 1==len(x.size()) else x ),nn.Linear(2048,1000)), # Linear,
) | StarcoderdataPython |
349031 | <filename>src/tests/comments/test_comment.py<gh_stars>0
from django.test import TestCase
from comments.models import Comment
from articles.models import Article
from django.contrib.auth.models import User
class CommentTestCase(TestCase):
admin = None
def set_up_user(self):
user=User.objects.create_user('saroj', password='<PASSWORD>@##')
user.is_superuser=True
user.is_staff=True
user.save()
self.admin = user
def set_environ(self):
self.set_up_user()
a1 = Article.objects.create(user=self.admin, title='a1', content='article1', category='RN')
article_content_type = a1.content_type
# add comment to a1 : Article
c1 = Comment.objects.create(user=self.admin, content='a1c1', object_id=a1.id, content_type=article_content_type)
c2 = Comment.objects.create(user=self.admin, content='a1c2', object_id=a1.id, content_type=article_content_type)
# add comments to c2 : Comment
c3 = Comment.objects.create(user=self.admin, content='c2c3', object_id=a1.id, content_type=article_content_type, parent=c2)
c4 = Comment.objects.create(user=self.admin, content='c2c4', object_id=a1.id, content_type=article_content_type, parent=c2)
# add commnent to c4 : Comment
c5 = Comment.objects.create(user=self.admin, content='c4c5', object_id=a1.id, content_type=article_content_type, parent=c4)
def test_create_count(self):
self.set_environ()
a1 = Article.objects.get(title='a1')
a1_total_comments = a1.all_comments
a1_comments = a1.comments.count()
self.assertEqual(a1_total_comments, 5)
self.assertEqual(a1_comments, 2)
c2 = Comment.objects.get(content='a1c2')
c2_replies = c2.children.count()
self.assertEqual(c2_replies, 2)
def test_comment_thread(self):
self.set_environ()
c2 = Comment.objects.get(content='a1c2')
c2_replies = c2.children
first_reply = c2_replies[0]
self.assertEquals(c2, first_reply.parent)
second_reply = c2_replies[1]
self.assertEqual(second_reply.content, 'c2c4')
third_level_nested_reply = second_reply.children[0]
expected_reply = Comment.objects.get(content='c4c5')
self.assertEquals(third_level_nested_reply, expected_reply)
def test_functions(self):
self.set_environ()
c1 = Comment.objects.get(content='a1c1')
username = self.admin.username
self.assertEquals(str(c1), username)
| StarcoderdataPython |
6605717 | import os
import sys
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '../src')))
from api_key_loader import ApiKeyLoader
from env_loader import EnvLoader
from ingredients_input import IngredientsInput
from search_recipes_by_ingredients import SearchRecipesByIngredients
from recipe_view import RecipeView
from recipe_price_breakdown import RecipePriceBreakdown
from shopping_list import ShoppingList
| StarcoderdataPython |
4871694 | <gh_stars>1-10
import netifaces
print("Finding your Interface.....")
list_of_interfaces = netifaces.interfaces()
print(list_of_interfaces)
def interface():
interf = input("Enter your interface:")
if interf in list_of_interfaces:
return interf
else:
print("Interface not found be sure to enter your selecction excatly")
return interface()
def find_gateway():
inter = interface()
temp_list = []
Addresses = netifaces.ifaddresses(inter)
gws = netifaces.gateways()
temp_list = list (gws['default'][netifaces.AF_INET])
count =0
for item in temp_list:
count +=1
if count ==1:
print (item)
return item
else:
pass
# find_gateway()
| StarcoderdataPython |
114963 | #!/usr/bin/env python
import os
import ssl
import sys
import json
import redis
import asyncio
import websockets
from datetime import datetime
LOG_FILE="/var/log/honeybot.log"
if not os.environ["HB_ID"]:
log("{} [ERROR] HB_ID environment variable is not set".format(str(datetime.now())))
sys.exit("HB_ID environment variable is not set")
if not os.environ["HG_KEY"]:
log("{} [ERROR] HG_KEY environment variable is not set".format(str(datetime.now())))
sys.exit("HG_KEY environment variable is not set")
if not os.environ["HG_HOST"]:
log("{} [ERROR] HG_HOST environment variable is not set".format(str(datetime.now())))
sys.exit("HG_HOST environment variable is not set")
def get_from_redis():
result = []
r = redis.Redis(unix_socket_path='/var/run/redis/redis.sock')
for key in r.scan_iter("*"):
try:
value=json.loads(r.get(key).decode("utf8", "ignore"))
if "ip" in value and "id" in value and "method" in value:
result.append(value)
r.delete(key)
except Exception as err:
print(err)
if len(result) > 1000:
break
return result
def log(data):
with open(LOG_FILE,"a") as f:
f.write("{}\n".format(data))
async def send_data(data):
async with websockets.connect("wss://{}:443".format(os.environ["HG_HOST"]), ssl=ssl.SSLContext(protocol=ssl.PROTOCOL_TLSv1_2)) as websocket:
await websocket.send(json.dumps(data))
response = await websocket.recv()
response = json.loads(response)
if "status" in response and response["status"] == "ok":
log("{} [INFO] Sent {}".format(str(datetime.now()),len(data["data"])))
else:
log("{} [ERROR] {}".format(str(datetime.now()),str(response)))
if __name__ == '__main__':
data = {
"id": os.environ["HB_ID"],
"key": os.environ["HG_KEY"],
"data": get_from_redis()
}
if len(data["data"]) > 0:
try:
asyncio.get_event_loop().run_until_complete(
send_data(data))
except Exception as e:
log("{} [ERROR] {}".format(str(datetime.now()),e))
r = redis.Redis(unix_socket_path='/var/run/redis/redis.sock')
for item in data["data"]:
r.set(item["id"], json.dumps(item))
| StarcoderdataPython |
3302805 | <reponame>DongjunLee/kino-bot<filename>kino/notifier/scheduler.py<gh_stars>100-1000
# -*- coding: utf-8 -*-
import json
import random
from .skill_list import SkillList
from .between import Between
from ..functions import RegisteredFuctions
from ..slack.resource import MsgResource
from ..slack.slackbot import SlackerAdapter
from ..slack.template import MsgTemplate
from ..utils.arrow import ArrowUtil
from ..utils.data_handler import DataHandler
from ..utils.state import State
class Scheduler(object):
def __init__(self, text=None, slackbot=None):
self.input = text
self.data_handler = DataHandler()
self.fname = "schedule.json"
if slackbot is None:
self.slackbot = SlackerAdapter()
else:
self.slackbot = slackbot
def create(self, step=0, params=None):
state = State()
# 알람 생성 시작
def step_0(params):
self.slackbot.send_message(text=MsgResource.SCHEDULER_CREATE_START)
self.data_handler.read_json_then_add_data(self.fname, "alarm", {})
state.flow_start("Scheduler", "create")
if Between().read() == "success":
self.slackbot.send_message(text=MsgResource.SCHEDULER_CREATE_STEP1)
else:
self.slackbot.send_message(
text=MsgResource.SCHEDULER_CREATE_STEP1_ONLY_TIME
)
# 시간대 지정
def step_1(params):
a_index, current_alarm_data = self.data_handler.get_current_data(
self.fname, "alarm"
)
if params.startswith("#"):
current_alarm_data["between_id"] = params
state.flow_next_step()
self.slackbot.send_message(text=MsgResource.SCHEDULER_CREATE_STEP2)
else:
current_alarm_data["time"] = params
state.flow_next_step(num=2)
SkillList().read()
self.slackbot.send_message(text=MsgResource.SCHEDULER_CREATE_STEP3)
self.data_handler.read_json_then_edit_data(
self.fname, "alarm", a_index, current_alarm_data
)
# 주기
def step_2(params):
a_index, current_alarm_data = self.data_handler.get_current_data(
self.fname, "alarm"
)
current_alarm_data["period"] = params
self.data_handler.read_json_then_edit_data(
self.fname, "alarm", a_index, current_alarm_data
)
state.flow_next_step()
SkillList().read()
self.slackbot.send_message(text=MsgResource.SCHEDULER_CREATE_STEP3)
# 함수
def step_3(params):
a_index, current_alarm_data = self.data_handler.get_current_data(
self.fname, "alarm"
)
if "," in params:
f_name, f_params = params.split(",")
current_alarm_data["f_name"] = f_name.strip()
current_alarm_data["params"] = json.loads(
f_params.strip().replace("”", '"').replace("“", '"')
)
else:
current_alarm_data["f_name"] = params.strip()
self.data_handler.read_json_then_edit_data(
self.fname, "alarm", a_index, current_alarm_data
)
state.flow_complete()
self.slackbot.send_message(text=MsgResource.CREATE)
locals()["step_" + str(step)](params)
def create_with_ner(
self,
time_of_day=None,
day_of_week=None,
time_unit=None,
period=None,
skills=None,
params=None,
):
if skills is None:
self.slackbot.send_message(text=MsgResource.WORKER_FUNCTION_NOT_FOUND)
return
else:
self.slackbot.send_message(text=MsgResource.WORKER_CREATE_START)
if time_of_day is None:
time_of_day = "all_day"
if period == "real-time":
period = "7 minutes"
elif period == "interval":
period = "interval"
else:
period = str(random.randint(25, 35)) + " minutes"
if day_of_week is None:
day_of_week = ["0"]
if time_unit is None:
time = None
elif len(time_unit) == 1 and period == "interval":
period = time_unit[0]
period = period.replace("분", " 분")
period = period.replace("시", " 시")
time = None
else:
time_of_day = None
period = None
time = ":"
for t in time_unit:
minute = 0
if "시" in t:
hour = int(t[: t.index("시")])
if "분" in t:
minute = int(t[: t.index("분")])
time = "{0:02d}".format(hour) + time + "{0:02d}".format(minute)
f_params = {}
if params is not None:
for k, v in params.items():
if v is None:
continue
f_params[k] = v
alarm_data = {
"between_id": time_of_day,
"period": period,
"time": time,
"day_of_week": day_of_week,
"f_name": skills,
"f_params": f_params,
}
alarm_data = dict((k, v) for k, v in alarm_data.items() if v)
self.data_handler.read_json_then_add_data(self.fname, "alarm", alarm_data)
self.slackbot.send_message(text=MsgResource.CREATE)
def read(self):
schedule_data = self.data_handler.read_file(self.fname)
alarm_data = schedule_data.get("alarm", {})
if alarm_data == {} or len(alarm_data) == 1:
self.slackbot.send_message(text=MsgResource.EMPTY)
return "empty"
between_data = schedule_data.get("between", {})
for k, v in alarm_data.items():
if k == "index":
continue
if "between_id" in v:
between = between_data[v["between_id"]]
self.__alarm_in_between(between, k, v, repeat=True)
elif "time" in v:
specific = between_data.get("specific time", {})
specific["time_interval"] = ""
specific["description"] = "특정 시간"
between_data["specific time"] = self.__alarm_in_between(specific, k, v)
attachments = MsgTemplate.make_schedule_template("", between_data)
self.slackbot.send_message(text=MsgResource.READ, attachments=attachments)
return "success"
def __alarm_in_between(self, between, a_index, alarm_data, repeat=False):
f_name = alarm_data["f_name"]
f_detail = RegisteredFuctions().list[f_name]
if repeat:
key = "Alarm " + a_index + " (repeat: " + alarm_data["period"] + ")"
else:
key = "Alarm " + a_index + " (time: " + alarm_data["time"] + ")"
value = (
f_detail["icon"]
+ f_name
+ ", "
+ str(alarm_data.get("f_params", ""))
+ " | "
+ ArrowUtil.format_day_of_week(alarm_data["day_of_week"])
)
registered_alarm = "registered_alarm"
if registered_alarm in between:
between[registered_alarm][key] = value
else:
between[registered_alarm] = {key: value}
return between
def update(self, step=0, params=None):
a_index, input_text, input_period, input_between_id = params[0].split(" + ")
input_alarm = {
"text": input_text,
"period": input_period,
"between_id": input_between_id,
}
result = self.data_handler.read_json_then_edit_data(
self.fname, "alarm", a_index, input_alarm
)
if result == "sucess":
attachments = MsgTemplate.make_schedule_template(
MsgResource.UPDATE, {a_index: input_alarm}
)
self.slackbot.send_message(attachments=attachments)
else:
self.slackbot.send_message(text=MsgResource.ERROR)
def delete(self, step=0, params=None):
state = State()
def step_0(params):
self.slackbot.send_message(text=MsgResource.SCHEDULER_DELETE_START)
if self.read() == "success":
state.flow_start("Scheduler", "delete")
def step_1(params):
a_index = params
self.data_handler.read_json_then_delete(self.fname, "alarm", a_index)
state.flow_complete()
self.slackbot.send_message(text=MsgResource.DELETE)
locals()["step_" + str(step)](params)
| StarcoderdataPython |
9799184 | from dateparser import parse
from datetime import datetime, timedelta
import re
import iwant_bot.pipeline
class IwantRequest(object):
"""Valid iwant command create this object, which extracts information with /iwant requests.
Other_words are expected words in the user message, which should be removed before dateparse."""
def __init__(self, body: dict, possible_activities: tuple, possible_behests: tuple,
user_pattern: str, other_words: tuple = (), default_duration: float = 900.0,
max_duration: float = None):
self.possible_activities = possible_activities
self.possible_behests = possible_behests
self.default_duration = default_duration
self.max_duration = max_duration
self.data = body
self.data['activities'] = get_words_present_in_text(body['text'], possible_activities)
self.data['behests'] = get_words_present_in_text(body['text'], possible_behests)
self.data['invite_users_id'] = get_unique_strings_matching_pattern(
body['text'], user_pattern)
date = parse_text_for_time(
body['text'], user_pattern, possible_activities + possible_behests + other_words)
if date is None:
date = datetime.now() + timedelta(seconds=default_duration)
self.data['deadline'] = date
self.data['action_start'] = datetime.now()
self.data['action_duration'] = (self.data['deadline']
- self.data['action_start']).total_seconds()
if max_duration is not None:
self.data['action_duration'] = min(self.data['action_duration'], max_duration)
if 'callback_id' not in self.data:
self.data['callback_id'] = ''
def return_list_of_parameters(self) -> dict:
text = 'Available activities are:\n`'\
+ '`\n`'.join(self.possible_activities) \
+ '`\nAvailable behests are:\n`'\
+ '`\n`'.join(self.possible_behests) + '`'
return {'text': text}
def store_iwant_task(self, activity) -> str:
"""Store to the database and get id -> Slack callback_id."""
storage_object = iwant_bot.pipeline.pipeline.add_activity_request(
self.data['user_id'], activity, self.data['deadline'],
self.data['action_start'], self.data['action_duration']
)
return storage_object.id
def cancel_iwant_task(self):
print(f'INFO: Canceling request of user {self.data["user_id"]}'
f' with uuid {self.data["callback_id"]}.')
iwant_bot.pipeline.pipeline.add_cancellation_request(
self.data['user_id'], self.data['callback_id'])
def create_accepted_response(self) -> dict:
"""Create confirmation text and the cancel button."""
text = (f"{', and '.join(self.data['activities'])}! "
f"I am looking for someone for {round(self.data['action_duration'] / 60)} minutes.")
attachment = [
{
'text': f"You can cancel {', '.join(self.data['activities'])}:",
'callback_id': self.data['callback_id'],
'fallback': 'This should be the cancel button.',
'attachment_type': 'default',
'actions': [
{
'name': 'Cancel',
'text': 'Cancel',
'type': 'button',
'value': '0'
}
]
}
]
return {'text': text, 'attachments': attachment}
@staticmethod
def create_help_message() -> dict:
"""Create help message for /iwant Slack command."""
text = ('This is `/iwant help`.\n'
'Use `/iwant activity` to let me know, what you want to do.'
' I will find someone, who will join you!\n'
'You can get the list of available activities by `/iwant list`.\n'
'Some examples:\n'
' `/iwant coffee`\n'
' `/iwant coffee in 35 min with @alex`'
' (I will notify @alex, but everyone can join.)\n'
)
return {'text': text}
def get_words_present_in_text(text: str, words: iter) -> list:
"""Search text for presence of whole given words."""
return [word for word in words if
re.search(fr'\b{word}\b', text)]
def get_unique_strings_matching_pattern(text: str, pattern: str) -> list:
"""Search text for e.g. names of users define by pattern."""
return list(set(re.findall(pattern, text)))
def parse_text_for_time(text: str, user_pattern: str, words: iter) -> datetime or None:
"""Search text for duration of activities.
Remove all other expected whole words and parse the rest.
Note: time can contain symbols +-:./"""
words_to_remove = r'\b{}\b'.format(r'\b|\b'.join(words))
only_time_text = re.sub(rf'({user_pattern}|{words_to_remove}|'
r'''[][,;`*(){}"'!?\\]|'''
'\s*\.*\s*$)',
'', text)
return parse(only_time_text)
| StarcoderdataPython |
3398787 | <reponame>AI0702/Among-Us-clone<filename>settings.py<gh_stars>0
import pygame
# define some colors (R, G, B)
WHITE = (255, 255, 255)
BLACK = (0, 0, 0)
SKYBLUE = (135, 206, 235)
DARKGREY = (40, 40, 40)
LIGHTGREY = (100, 100, 100)
GREEN = (0, 255, 0)
RED = (255, 0, 0)
YELLOW = (255, 255, 0)
Orange = (255, 165, 0)
Brown = (106, 55, 5)
Transparent_Black = (0, 0, 0, 1)
MENU_FONT_COLOR = (255, 255, 255)
# game settings
WIDTH = 1280 # 16 * 64 or 32 * 32 or 64 * 16
HEIGHT = 640 # 16 * 48 or 32 * 24 or 64 * 12
FPS = 60
TITLE = "Multi Player Game"
BGCOLOR = Brown
NO_OF_MISSIONS = 8
NO_OF_BOTS = 9
TILESIZE = 32
GRIDWIDTH = WIDTH / TILESIZE
GRIDHEIGHT = HEIGHT / TILESIZE
FONT = 'Assets/Fonts/Rubik-ExtraBold.TTF'
# Menu setting
INTRO_SPRITE_WIDTH = 40
INTRO_SPRITE_HEIGHT = 40
INTRO_SPRITE_POS_X = 0.37
OPTIONS_SPRITE_WIDTH = 45
OPTIONS_SPRITE_HEIGHT = 45
OPTIONS_SPRITE_POS_X = 0.3
# Player settings
PLAYER_SPEED = 400
# Walls setting
WALL_IMG = 'wall.png'
# Sprite Layers
WALL_LAYER = 1
PLAYER_LAYER = 2
BOT_LAYER = 1
EFFECTS_LAYER = 3
ITEM_LAYER = 1
# Sound Effects
#BG_MUSIC1 = 'Background/background.wav'
#BG_MUSIC2 = 'Background/espionage.ogg'
BG_MUSIC3 = 'Ambience/AMB_Main.wav'
CAFETERIA_AMBIENT_DETECT_RADIUS = 750
MEDBAY_AMBIENT_DETECT_RADIUS = 450
SECURITY_ROOM_AMBIENT_DETECT_RADIUS = 350
REACTOR_ROOM_AMBIENT_DETECT_RADIUS = 450
ENGINE_ROOM_AMBIENT_DETECT_RADIUS = 400
ELECTRICAL_ROOM_AMBIENT_DETECT_RADIUS = 570
STORAGE_ROOM_AMBIENT_DETECT_RADIUS = 580
ADMIN_ROOM_AMBIENT_DETECT_RADIUS = 400
COMMUNICATION_ROOM_AMBIENT_DETECT_RADIUS = 370
OXYGEN_ROOM_AMBIENT_DETECT_RADIUS = 250
COCKPIT_ROOM_AMBIENT_DETECT_RADIUS = 300
WEAPON_ROOM_AMBIENT_DETECT_RADIUS = 400
stepping_rate = 230 # the time interval between each footstep sound played in milisecs
FOOTSTEP_SOUNDS = ['Footsteps/Footstep01.wav',
'Footsteps/Footstep02.wav',
'Footsteps/Footstep03.wav',
'Footsteps/Footstep04.wav',
'Footsteps/Footstep05.wav',
'Footsteps/Footstep06.wav',
'Footsteps/Footstep07.wav',
'Footsteps/Footstep08.wav'
]
EFFECT_SOUNDS = {'main_menu_music': 'Background/main_menu_music.mp3',
'start_game': 'General/roundstart.wav',
'emergency_alarm': 'General/alarm_emergencymeeting.wav',
'dead_body_found': 'General/report_Bodyfound.wav',
'crises_alarm': 'General/crises.wav',
'invisible': 'General/swap.wav',
'vent': 'General/vent.wav',
'victory_crew': 'General/victory_crew.wav',
'victory_imposter': 'General/victory_impostor.wav',
'game_left': 'General/victory_disconnect.wav',
'fill_gas_can': 'General/gas_can_fill.wav',
'pick_gas_can': 'General/pick_up_gas_can.wav',
'menu_sel': 'UI/select.wav',
'go_back': 'UI/back2.wav',
'selected': 'UI/selected2.wav',
'pause': 'UI/pause.wav',
'backspace': 'UI/backspace.wav',
'keypress': 'UI/keypress.wav',
'map_click': 'UI/map_btn_click.wav',
'map_click2': 'UI/pause.wav',
'task_completed': 'General/task_complete.wav',
'imposter_kill_sound': 'Kill/imposter_kill.wav',
'imposter_kill_cooldown_sound': 'Kill/imposter_kill_cooldown.wav',
'imposter_kill_victim_sound': 'Kill/imposter_kill_victim.wav',
'vote_sound': 'UI/votescreen_locking.wav',
'fix_electric_wires_BG': 'Tasks Backgrounds/AMB_Electrical.wav',
'fixed_electric_wires_BG': 'Tasks Backgrounds/AMB_ElectricRoom.wav',
'stabilize_nav_BG': 'Tasks Backgrounds/AMB_Admin.wav',
'emtpy_garbage_BG': 'Tasks Backgrounds/AMB_DecontaminationHall.wav',
'reboot_wifi_BG': 'Tasks Backgrounds/AMB_Laboratory.wav',
'rebooted_wifi_BG': 'Tasks Backgrounds/AMB_comms #16940.wav',
}
ELECTRIC_SHOCK_SOUNDS = ['Electric Shock/AMB_Electricshock1.wav',
'Electric Shock/AMB_Electricshock2.wav',
'Electric Shock/AMB_Electricshock3.wav',
'Electric Shock/AMB_Electricshock4.wav'
]
COMMS_RADIO_SOUNDS = ['Comms Radio/AMB_comms #16940.wav',
'Comms Radio/AMB_Comms.wav',
'Comms Radio/AMB_CommsRoom.wav',
]
AMBIENT_SOUNDS = {'admin_room': 'Ambience/AMB_Admin.wav',
'cafeteria': 'Ambience/AMB_Cafeteria.wav',
'cockpit': 'Ambience/AMB_Cockpit.wav',
'comms1': 'Ambience/AMB_comms #16940.wav',
'comms2': 'Ambience/AMB_Comms.wav',
'comms3': 'Ambience/AMB_CommsRoom.wav',
'electrical1': 'Ambience/AMB_Electrical.wav',
'medbay_room': 'Ambience/AMB_MedbayRoom.wav',
'electrical_room': 'Ambience/AMB_ElectricRoom.wav',
'u_engine_room': 'Ambience/AMB_EngineRoom.wav',
'l_engine_room': 'Ambience/AMB_EngineRoom.wav',
'reactor_room': 'Ambience/AMB_ReactorRoom.wav',
'security_room': 'Ambience/AMB_SecurityRoom.wav',
'storage_room': 'Ambience/AMB_Storage.wav',
'oxygen_room': 'Ambience/AMB_Oxygen.wav',
'launchpad': 'Ambience/AMB_Launchpad.wav',
'main': 'Ambience/AMB_Main.wav',
'weapons': 'Ambience/AMB_Weapons.wav',
}
# Visual Effects
LIGHT_MASK = 'light_350_med.png'
LIGHT_MASK_REACTOR = 'light_350_med_reactor.png'
NIGHT_COLOR = (20, 20, 20)
NIGHT_COLOR_REACTOR = (200, 20, 20)
LIGHT_RADIUS = (500, 500)
LIGHT_RADIUS_REACTOR = (500, 500)
# Bots Position
BOT_POS = [(5401, 1530), (3686, 1857), (3733, 2626), (2325, 1814),
(1718, 1282), (1288, 2418), (1249, 506), (2513, 1286)
]
# Mini Map
MAP_BUTTON = "UI/map_button.png"
# ITEMS-------------------
ITEM_IMAGES = {'health': 'health_pack.png',
'weapon': 'shotgun.png',
'vent': 'ventilation.png',
'emerg_btn': 'emergency_icon_inv.png',
'destroy_asteroids': 'destroy_asteroids.png',
'nav': 'nav.png',
'nav_highlight': 'nav_highlight.png'
}
CLEAR_ASTEROIDS_IMAGES = ['Assets/Images/Tasks/Clear Asteroids/asteroid1.png',
'Assets/Images/Tasks/Clear Asteroids/asteroid2.png',
'Assets/Images/Tasks/Clear Asteroids/asteroid3.png',
'Assets/Images/Tasks/Clear Asteroids/asteroid4.png'
]
# Tasks Setting
DETECT_RADIUS = 250
DETECT_RADIUS_SABOTAGE_FIX = 50
STABILIZE_NAV_RADIUS = 140
EMPTY_GARBAGE_RADIUS = 70
REBOOT_WIFI_RADIUS = 50
FIX_ELECTRICITY_WIRES_RADIUS = 50
VIEW_ADMIN_MAP_CONTROL_RADIUS = 85
VIEW_SECURITY_MONITOR_RADIUS = 170
DIVERT_POWER_TOP_REACTOR_RADIUS = 50
ALIGN_ENGINE_OUTPUT = 50
PICK_STORAGE_GAS_CAN_RADIUS = 50
FUEL_ENGINE = 50
# Pygame Mouse Button Codes
LEFT_MOUSE_BUTTON = 1
MIDDLE_MOUSE_BUTTON = 2
RIGHT_MOUSE_BUTTON = 3
# PLAYER SPRITES MOVEMENTS ----------------------------
# Red Player Movements
# Player left movement
red_player_imgs_left = []
# loops 1 to N-1
for i in range(1, 18):
red_player_imgs_left.append(pygame.image.load('Assets/Images/Player/Red/red_left_walk/'+'step'+str(i)+'.png'))
# loops 1 to N-1
for i in range(0, 17):
red_player_imgs_left[i] = pygame.transform.smoothscale(red_player_imgs_left[i], (64, 86))
# Player right movement
red_player_imgs_right = []
# loops 1 to 17
for i in range(1, 18):
red_player_imgs_right.append(pygame.image.load('Assets/Images/Player/Red/red_right_walk/'+'step'+str(i)+'.png'))
# loops 1 to 16
for i in range(0, 17):
red_player_imgs_right[i] = pygame.transform.smoothscale(red_player_imgs_right[i], (64, 86))
# Player down movement
red_player_imgs_down = []
for i in range(1, 19):
red_player_imgs_down.append(pygame.image.load('Assets/Images/Player/Red/red_down_walk/'+'step'+str(i)+'.png'))
# loops 1 to 16
for i in range(0, 18):
red_player_imgs_down[i] = pygame.transform.smoothscale(red_player_imgs_down[i], (64, 86))
# Player Up movement
red_player_imgs_up = []
# loops 1 to 16
for i in range(1, 18):
red_player_imgs_up.append(pygame.image.load('Assets/Images/Player/Red/red_up_walk/'+'step'+str(i)+'.png'))
# loops 1 to 16
for i in range(0, 17):
red_player_imgs_up[i] = pygame.transform.smoothscale(red_player_imgs_up[i], (64, 86))
red_player_imgs_dead = pygame.image.load('Assets/Images/Player/Dead/Deadred.png')
red_player_imgs_ghost_left = pygame.image.load('Assets/Images/Player/Red/red_ghost/step1_left.png')
red_player_imgs_ghost_left = pygame.transform.smoothscale(red_player_imgs_ghost_left, (64, 86))
red_player_imgs_ghost_right = pygame.image.load('Assets/Images/Player/Red/red_ghost/step1_right.png')
red_player_imgs_ghost_right = pygame.transform.smoothscale(red_player_imgs_ghost_right, (64, 86))
red_player_emergency_meeting = pygame.image.load('Assets/Images/Alerts/emergency_meeting_red.png')
red_player_emergency_meeting_report = pygame.image.load('Assets/Images/Alerts/report_dead_body_red.png')
# Blue Player Movements-----------------
# Player left movement
blue_player_imgs_left = []
# loops 1 to N-1
for i in range(1, 18):
blue_player_imgs_left.append(pygame.image.load('Assets/Images/Player/Blue/blue_left_walk/'+'step'+str(i)+'.png'))
# loops 1 to N-1
for i in range(0, 17):
blue_player_imgs_left[i] = pygame.transform.smoothscale(blue_player_imgs_left[i], (64, 86))
# Player right movement
blue_player_imgs_right = []
# loops 1 to 17
for i in range(1, 18):
blue_player_imgs_right.append(pygame.image.load('Assets/Images/Player/Blue/blue_right_walk/'+'step'+str(i)+'.png'))
# loops 1 to 16
for i in range(0, 17):
blue_player_imgs_right[i] = pygame.transform.smoothscale(blue_player_imgs_right[i], (64, 86))
# Player down movement
blue_player_imgs_down = []
for i in range(1, 19):
blue_player_imgs_down.append(pygame.image.load('Assets/Images/Player/Blue/blue_down_walk/'+'step'+str(i)+'.png'))
# loops 1 to 16
for i in range(0, 18):
blue_player_imgs_down[i] = pygame.transform.smoothscale(blue_player_imgs_down[i], (64, 86))
# Player Up movement
blue_player_imgs_up = []
# loops 1 to 16
for i in range(1, 18):
blue_player_imgs_up.append(pygame.image.load('Assets/Images/Player/Blue/blue_up_walk/'+'step'+str(i)+'.png'))
# loops 1 to 16
for i in range(0, 17):
blue_player_imgs_up[i] = pygame.transform.smoothscale(blue_player_imgs_up[i], (64, 86))
blue_player_imgs_dead = pygame.image.load('Assets/Images/Player/Dead/Deadblue.png')
blue_player_imgs_ghost_left = pygame.image.load('Assets/Images/Player/Blue/blue_ghost/step1_left.png')
blue_player_imgs_ghost_left = pygame.transform.smoothscale(blue_player_imgs_ghost_left, (64, 86))
blue_player_imgs_ghost_right = pygame.image.load('Assets/Images/Player/Blue/blue_ghost/step1_right.png')
blue_player_imgs_ghost_right = pygame.transform.smoothscale(blue_player_imgs_ghost_right, (64, 86))
blue_player_emergency_meeting = pygame.image.load('Assets/Images/Alerts/emergency_meeting_blue.png')
blue_player_emergency_meeting_report = pygame.image.load('Assets/Images/Alerts/report_dead_body_blue.png')
# Green Player Movements-----------------
# Player left movement
green_player_imgs_left = []
# loops 1 to N-1
for i in range(1, 18):
green_player_imgs_left.append(pygame.image.load('Assets/Images/Player/Green/green_left_walk/'+'step'+str(i)+'.png'))
# loops 1 to N-1
for i in range(0, 17):
green_player_imgs_left[i] = pygame.transform.smoothscale(green_player_imgs_left[i], (64, 86))
# Player right movement
green_player_imgs_right = []
# loops 1 to 17
for i in range(1, 18):
green_player_imgs_right.append(pygame.image.load('Assets/Images/Player/Green/green_right_walk/'+'step'+str(i)+'.png'))
# loops 1 to 16
for i in range(0, 17):
green_player_imgs_right[i] = pygame.transform.smoothscale(green_player_imgs_right[i], (64, 86))
# Player down movement
green_player_imgs_down = []
for i in range(1, 19):
green_player_imgs_down.append(pygame.image.load('Assets/Images/Player/Green/green_down_walk/'+'step'+str(i)+'.png'))
# loops 1 to 16
for i in range(0, 18):
green_player_imgs_down[i] = pygame.transform.smoothscale(green_player_imgs_down[i], (64, 86))
# Player Up movement
green_player_imgs_up = []
# loops 1 to 16
for i in range(1, 18):
green_player_imgs_up.append(pygame.image.load('Assets/Images/Player/Green/green_up_walk/'+'step'+str(i)+'.png'))
# loops 1 to 16
for i in range(0, 17):
green_player_imgs_up[i] = pygame.transform.smoothscale(green_player_imgs_up[i], (64, 86))
green_player_imgs_dead = pygame.image.load('Assets/Images/Player/Dead/Deadgreen.png')
green_player_imgs_ghost_left = pygame.image.load('Assets/Images/Player/Green/green_ghost/step1_left.png')
green_player_imgs_ghost_left = pygame.transform.smoothscale(green_player_imgs_ghost_left, (64, 86))
green_player_imgs_ghost_right = pygame.image.load('Assets/Images/Player/Green/green_ghost/step1_right.png')
green_player_imgs_ghost_right = pygame.transform.smoothscale(green_player_imgs_ghost_right, (64, 86))
green_player_emergency_meeting = pygame.image.load('Assets/Images/Alerts/emergency_meeting_green.png')
green_player_emergency_meeting_report = pygame.image.load('Assets/Images/Alerts/report_dead_body_green.png')
# Orange Player Movements-----------------
# Player left movement
orange_player_imgs_left = []
# loops 1 to N-1
for i in range(1, 18):
orange_player_imgs_left.append(pygame.image.load('Assets/Images/Player/Orange/orange_left_walk/'+'step'+str(i)+'.png'))
# loops 1 to N-1
for i in range(0, 17):
orange_player_imgs_left[i] = pygame.transform.smoothscale(orange_player_imgs_left[i], (64, 86))
# Player right movement
orange_player_imgs_right = []
# loops 1 to 17
for i in range(1, 18):
orange_player_imgs_right.append(pygame.image.load('Assets/Images/Player/Orange/orange_right_walk/'+'step'+str(i)+'.png'))
# loops 1 to 16
for i in range(0, 17):
orange_player_imgs_right[i] = pygame.transform.smoothscale(orange_player_imgs_right[i], (64, 86))
# Player down movement
orange_player_imgs_down = []
for i in range(1, 19):
orange_player_imgs_down.append(pygame.image.load('Assets/Images/Player/Orange/orange_down_walk/'+'step'+str(i)+'.png'))
# loops 1 to 16
for i in range(0, 18):
orange_player_imgs_down[i] = pygame.transform.smoothscale(orange_player_imgs_down[i], (64, 86))
# Player Up movement
orange_player_imgs_up = []
# loops 1 to 16
for i in range(1, 18):
orange_player_imgs_up.append(pygame.image.load('Assets/Images/Player/Orange/orange_up_walk/'+'step'+str(i)+'.png'))
# loops 1 to 16
for i in range(0, 17):
orange_player_imgs_up[i] = pygame.transform.smoothscale(orange_player_imgs_up[i], (64, 86))
orange_player_imgs_dead = pygame.image.load('Assets/Images/Player/Dead/Deadorange.png')
orange_player_imgs_ghost_left = pygame.image.load('Assets/Images/Player/Orange/orange_ghost/step1_left.png')
orange_player_imgs_ghost_left = pygame.transform.smoothscale(orange_player_imgs_ghost_left, (64, 86))
orange_player_imgs_ghost_right = pygame.image.load('Assets/Images/Player/Orange/orange_ghost/step1_right.png')
orange_player_imgs_ghost_right = pygame.transform.smoothscale(orange_player_imgs_ghost_right, (64, 86))
orange_player_emergency_meeting = pygame.image.load('Assets/Images/Alerts/emergency_meeting_orange.png')
orange_player_emergency_meeting_report = pygame.image.load('Assets/Images/Alerts/report_dead_body_orange.png')
# Yellow Player Movements-----------------
# Player left movement
yellow_player_imgs_left = []
# loops 1 to N-1
for i in range(1, 18):
yellow_player_imgs_left.append(pygame.image.load('Assets/Images/Player/Yellow/yellow_left_walk/'+'step'+str(i)+'.png'))
# loops 1 to N-1
for i in range(0, 17):
yellow_player_imgs_left[i] = pygame.transform.smoothscale(yellow_player_imgs_left[i], (64, 86))
# Player right movement
yellow_player_imgs_right = []
# loops 1 to 17
for i in range(1, 18):
yellow_player_imgs_right.append(pygame.image.load('Assets/Images/Player/Yellow/yellow_right_walk/'+'step'+str(i)+'.png'))
# loops 1 to 16
for i in range(0, 17):
yellow_player_imgs_right[i] = pygame.transform.smoothscale(yellow_player_imgs_right[i], (64, 86))
# Player down movement
yellow_player_imgs_down = []
for i in range(1, 19):
yellow_player_imgs_down.append(pygame.image.load('Assets/Images/Player/Yellow/yellow_down_walk/'+'step'+str(i)+'.png'))
# loops 1 to 16
for i in range(0, 18):
yellow_player_imgs_down[i] = pygame.transform.smoothscale(yellow_player_imgs_down[i], (64, 86))
# Player Up movement
yellow_player_imgs_up = []
# loops 1 to 16
for i in range(1, 18):
yellow_player_imgs_up.append(pygame.image.load('Assets/Images/Player/Yellow/yellow_up_walk/'+'step'+str(i)+'.png'))
# loops 1 to 16
for i in range(0, 17):
yellow_player_imgs_up[i] = pygame.transform.smoothscale(yellow_player_imgs_up[i], (64, 86))
yellow_player_imgs_dead = pygame.image.load('Assets/Images/Player/Dead/Deadyellow.png')
yellow_player_imgs_ghost_left = pygame.image.load('Assets/Images/Player/Yellow/yellow_ghost/step1_left.png')
yellow_player_imgs_ghost_left = pygame.transform.smoothscale(yellow_player_imgs_ghost_left, (64, 86))
yellow_player_imgs_ghost_right = pygame.image.load('Assets/Images/Player/Yellow/yellow_ghost/step1_right.png')
yellow_player_imgs_ghost_right = pygame.transform.smoothscale(yellow_player_imgs_ghost_right, (64, 86))
yellow_player_emergency_meeting = pygame.image.load('Assets/Images/Alerts/emergency_meeting_yellow.png')
yellow_player_emergency_meeting_report = pygame.image.load('Assets/Images/Alerts/report_dead_body_yellow.png')
# Black Player Movements-----------------
# Player left movement
black_player_imgs_left = []
black_player_imgs_left.append(pygame.image.load('Assets/Images/Player/Black/black_left_walk/step1.png'))
black_player_imgs_left[0] = pygame.transform.smoothscale(black_player_imgs_left[0], (64, 86))
# Player right movement
black_player_imgs_right = []
black_player_imgs_right.append(pygame.image.load('Assets/Images/Player/Black/black_right_walk/step1.png'))
black_player_imgs_right[0] = pygame.transform.smoothscale(black_player_imgs_right[0], (64, 86))
# Player down movement
black_player_imgs_down = []
black_player_imgs_down.append(pygame.image.load('Assets/Images/Player/Black/black_down_walk/step1.png'))
black_player_imgs_down[0] = pygame.transform.smoothscale(black_player_imgs_down[0], (64, 86))
# Player Up movement
black_player_imgs_up = []
black_player_imgs_up.append(pygame.image.load('Assets/Images/Player/Black/black_up_walk/step1.png'))
black_player_imgs_up[0] = pygame.transform.smoothscale(black_player_imgs_up[0], (64, 86))
black_player_imgs_dead = pygame.image.load('Assets/Images/Player/Dead/Deadblack.png')
# Brown Player Movements-----------------
# Player left movement
brown_player_imgs_left = []
brown_player_imgs_left.append(pygame.image.load('Assets/Images/Player/Brown/brown_left_walk/step1.png'))
brown_player_imgs_left[0] = pygame.transform.smoothscale(brown_player_imgs_left[0], (64, 86))
# Player right movement
brown_player_imgs_right = []
brown_player_imgs_right.append(pygame.image.load('Assets/Images/Player/Brown/brown_right_walk/step1.png'))
brown_player_imgs_right[0] = pygame.transform.smoothscale(brown_player_imgs_right[0], (64, 86))
# Player down movement
brown_player_imgs_down = []
brown_player_imgs_down.append(pygame.image.load('Assets/Images/Player/Brown/brown_down_walk/step1.png'))
brown_player_imgs_down[0] = pygame.transform.smoothscale(brown_player_imgs_down[0], (64, 86))
# Player Up movement
brown_player_imgs_up = []
brown_player_imgs_up.append(pygame.image.load('Assets/Images/Player/Brown/brown_up_walk/step1.png'))
brown_player_imgs_up[0] = pygame.transform.smoothscale(brown_player_imgs_up[0], (64, 86))
brown_player_imgs_dead = pygame.image.load('Assets/Images/Player/Dead/Deadbrown.png')
# Pink Player Movements-----------------
# Player left movement
pink_player_imgs_left = []
pink_player_imgs_left.append(pygame.image.load('Assets/Images/Player/Pink/pink_left_walk/step1.png'))
pink_player_imgs_left[0] = pygame.transform.smoothscale(pink_player_imgs_left[0], (64, 86))
# Player right movement
pink_player_imgs_right = []
pink_player_imgs_right.append(pygame.image.load('Assets/Images/Player/Pink/pink_right_walk/step1.png'))
pink_player_imgs_right[0] = pygame.transform.smoothscale(pink_player_imgs_right[0], (64, 86))
# Player down movement
pink_player_imgs_down = []
pink_player_imgs_down.append(pygame.image.load('Assets/Images/Player/Pink/pink_down_walk/step1.png'))
pink_player_imgs_down[0] = pygame.transform.smoothscale(pink_player_imgs_down[0], (64, 86))
# Player Up movement
pink_player_imgs_up = []
pink_player_imgs_up.append(pygame.image.load('Assets/Images/Player/Pink/pink_up_walk/step1.png'))
pink_player_imgs_up[0] = pygame.transform.smoothscale(pink_player_imgs_up[0], (64, 86))
pink_player_imgs_dead = pygame.image.load('Assets/Images/Player/Dead/Deadpink.png')
# Purple Player Movements-----------------
# Player left movement
purple_player_imgs_left = []
purple_player_imgs_left.append(pygame.image.load('Assets/Images/Player/Purple/Purple_left_walk/step1.png'))
purple_player_imgs_left[0] = pygame.transform.smoothscale(purple_player_imgs_left[0], (64, 86))
# Player right movement
purple_player_imgs_right = []
purple_player_imgs_right.append(pygame.image.load('Assets/Images/Player/Purple/Purple_right_walk/step1.png'))
purple_player_imgs_right[0] = pygame.transform.smoothscale(purple_player_imgs_right[0], (64, 86))
# Player down movement
purple_player_imgs_down = []
purple_player_imgs_down.append(pygame.image.load('Assets/Images/Player/Purple/Purple_down_walk/step1.png'))
purple_player_imgs_down[0] = pygame.transform.smoothscale(purple_player_imgs_down[0], (64, 86))
# Player Up movement
purple_player_imgs_up = []
purple_player_imgs_up.append(pygame.image.load('Assets/Images/Player/Purple/Purple_up_walk/step1.png'))
purple_player_imgs_up[0] = pygame.transform.smoothscale(purple_player_imgs_up[0], (64, 86))
purple_player_imgs_dead = pygame.image.load('Assets/Images/Player/Dead/DeadPurple.png')
# White Player Movements-----------------
# Player left movement
white_player_imgs_left = []
white_player_imgs_left.append(pygame.image.load('Assets/Images/Player/White/White_left_walk/step1.png'))
white_player_imgs_left[0] = pygame.transform.smoothscale(white_player_imgs_left[0], (64, 86))
# Player right movement
white_player_imgs_right = []
white_player_imgs_right.append(pygame.image.load('Assets/Images/Player/White/White_right_walk/step1.png'))
white_player_imgs_right[0] = pygame.transform.smoothscale(white_player_imgs_right[0], (64, 86))
# Player down movement
white_player_imgs_down = []
white_player_imgs_down.append(pygame.image.load('Assets/Images/Player/White/White_down_walk/step1.png'))
white_player_imgs_down[0] = pygame.transform.smoothscale(white_player_imgs_down[0], (64, 86))
# Player Up movement
white_player_imgs_up = []
white_player_imgs_up.append(pygame.image.load('Assets/Images/Player/White/White_up_walk/step1.png'))
white_player_imgs_up[0] = pygame.transform.smoothscale(white_player_imgs_up[0], (64, 86))
white_player_imgs_dead = pygame.image.load('Assets/Images/Player/Dead/DeadWhite.png') | StarcoderdataPython |
5078369 | <gh_stars>0
# flake8: noqa
from .client import Client
from .compatpatch import ClientCompatPatch
from .endpoints.common import MediaTypes
from .endpoints.upload import MediaRatios
from .errors import (
ClientError, ClientLoginError, ClientLoginRequiredError,
ClientCookieExpiredError, ClientThrottledError, ClientConnectionError,
ClientCheckpointRequiredError, ClientChallengeRequiredError,
ClientSentryBlockError, ClientReqHeadersTooLargeError,
)
__version__ = '1.6.0'
| StarcoderdataPython |
1677334 | from unittest import TestCase
from unittest.mock import MagicMock
from giru.core.commands import Julien
from giru.core.commands import julien as JULIEN_PICTURES
from tests.mixins import CommandTestCaseMixin
class JulienCommandTestCase(CommandTestCaseMixin, TestCase):
def test_it_sends_a_photo(self):
expected_chat_id = 123
update = self.create_mock_update_with_chat_id_and_message(expected_chat_id)
self.bot.send_photo = MagicMock()
Julien(self.bot, update)
self.assertIsNotNone(self.bot.send_photo.call_args)
args, kwargs = self.bot.send_photo.call_args
photo = kwargs.get("photo")
self.assertIn(photo, JULIEN_PICTURES)
| StarcoderdataPython |
8100821 | <filename>block.py
from ursina import *
from ursina import curve
# Normal Block Class
class NormalBlock(Entity):
def __init__(self, position = (0, 0, 0), rotation = (0, 0, 0), scale = (5, 0.8, 5)):
super().__init__(
model = "cube",
scale = scale,
color = "#AFFF3C",
collider = "box",
texture = "white_cube",
position = position,
rotation = rotation,
)
# Jump Block Class
class JumpBlock(Entity):
def __init__(self, position = (0, 0, 0), rotation = (0, 0, 0)):
super().__init__(
model = "cube",
scale = Vec3(5, 0.8, 5),
color = "#FF8B00",
collider = "box",
texture = "white_cube",
position = position,
rotation = rotation
)
# Speed Block Class
class SpeedBlock(Entity):
def __init__(self, position = (0, 0, 0), rotation = (0, 0, 0), scale = (5, 0.5, 10)):
super().__init__(
model = "cube",
scale = scale,
color = "#53FFF5",
collider = "box",
texture = "white_cube",
position = position,
rotation = rotation
)
class EndBlock(Entity):
def __init__(self, position = (0, 0, 0), rotation = (0, 0, 0)):
super().__init__(
model = "cube",
scale = (4, 4, 4),
color = "#2D49FB",
collider = "box",
texture = "white_cube",
position = position,
rotation = rotation
)
| StarcoderdataPython |
5004233 |
import login
from BioSimSpaceCloud import ObjectStore as objstore
import datetime
import sys
bucket = login.login()
key = sys.argv[1]
try:
filename = sys.argv[2]
except:
filename = None
if filename:
objstore.get_object_as_file(bucket, key, filename)
else:
data = objstore.get_string_object(bucket, key)
print(data)
| StarcoderdataPython |
179865 | <gh_stars>0
import os
from scope_plot.specification import Specification
FIXTURES_DIR = os.path.join(
os.path.dirname(os.path.realpath(__file__)), "..", "__fixtures")
def test_bokeh_errorbar_spec():
figure_spec = Specification.load_yaml(
os.path.join(FIXTURES_DIR, "bokeh_errorbar.yml"))
assert figure_spec.ty() == "errorbar"
def test_bokeh_bar_spec():
figure_spec = Specification.load_yaml(
os.path.join(FIXTURES_DIR, "bokeh_bar.yml"))
assert figure_spec.ty() == "bar"
def test_bokeh_subplots_spec():
figure_spec = Specification.load_yaml(
os.path.join(FIXTURES_DIR, "bokeh_subplots.yml"))
assert figure_spec.subplots[0].ty() == "errorbar"
def test_matplotlib_regplot_spec():
figure_spec = Specification.load_yaml(
os.path.join(FIXTURES_DIR, "matplotlib_regplot.yml"))
assert figure_spec.ty() == "regplot"
| StarcoderdataPython |
1864927 | """
Officially supported builder workflows
"""
import aws_lambda_builders.workflows.python_pip
import aws_lambda_builders.workflows.nodejs_npm
import aws_lambda_builders.workflows.ruby_bundler
import aws_lambda_builders.workflows.go_dep
import aws_lambda_builders.workflows.go_modules
import aws_lambda_builders.workflows.java_gradle
import aws_lambda_builders.workflows.dotnet_clipackage
| StarcoderdataPython |
9722245 | import os
def tru(val):
return val
def notdefault(val):
return val if val != 'default' else None
def is_subdir(path, directory):
path = os.path.realpath(path)
directory = os.path.realpath(directory)
relative = os.path.relpath(path, directory)
return not (relative == os.pardir or relative.startswith(os.pardir + os.sep))
| StarcoderdataPython |
65801 | n = [ 1 ] + [ 50 ] * 10 + [ 1 ]
with open('8.in', 'r') as f:
totn, m, k, op = [ int(x) for x in f.readline().split() ]
for i in range(m):
f.readline()
for i, v in enumerate(n):
with open('p%d.in' % i, 'w') as o:
o.write('%d 0 %d 2\n' % (v, k))
for j in range(v):
o.write(f.readline() + '\n')
| StarcoderdataPython |
5073955 | <reponame>lucaskup/VizFracSet
# -*- coding: utf-8 -*-
"""
Created on Fri Jun 26 18:22:52 2020
@author: LKUPSSINSKU
"""
#to install the library:
#pip install git+https://github.com/philferriere/cocoapi.git#subdirectory=PythonAPI
from pycocotools.coco import COCO
import numpy as np
import imageio as io
dataDir='../dataset/jandaira/pixel/'
dataType='train2014'
annFile='../dataset/jandaira/pixel/jandaira.json'.format(dataDir,dataType)
coco=COCO(annFile)
catIds = coco.getCatIds(catNms=['Fracture'])
imgIds = coco.getImgIds(catIds=catIds );
imgIds = coco.getImgIds(imgIds = imgIds[0])
img = coco.loadImgs(imgIds[np.random.randint(0,len(imgIds))])[0]
I = io.imread('../dataset/jandaira/pixel/'+img['file_name'])
cat_ids = coco.getCatIds()
anns_ids = coco.getAnnIds(imgIds=img['id'], catIds=cat_ids, iscrowd=None)
anns = coco.loadAnns(anns_ids)
anns_img = np.zeros((img['height'],img['width']))
for ann in anns:
anns_img = np.maximum(anns_img,coco.annToMask(ann)*ann['category_id'])
io.imwrite('../dataset/jandaira/mask/jandaira.png', anns_img) | StarcoderdataPython |
8146340 |
# import python
from abc import ABC
# import genie
from genie.conf.base.config import CliConfig
from genie.conf.base.cli import CliConfigBuilder
from genie.conf.base.attributes import UnsupportedAttributeWarning, \
AttributesHelper
class Pim(ABC):
class DeviceAttributes(ABC):
def build_config(self, apply=True, attributes=None, unconfig=False,
**kwargs):
assert not kwargs, kwargs
attributes = AttributesHelper(self, attributes)
configurations = CliConfigBuilder(unconfig=unconfig)
cfg_line = []
unconfig_line = []
# enabled
if attributes.value('enabled'):
if unconfig is False:
configurations.append_line(attributes.format('feature pim'))
configurations.append_line(attributes.format('feature pim6'))
# Make sure that only enabled was provided in attributes
# If wildcard, then delete everything
elif unconfig is True and\
attributes.attributes == {'enabled': {True: None}} or \
attributes.iswildcard:
configurations.append_line('no feature pim', raw=True)
configurations.append_line('no feature pim6', raw=True)
if apply:
if configurations:
self.device.configure(configurations)
else:
return CliConfig(device=self.device, unconfig=unconfig,
cli_config=configurations)
# enable_pim
elif attributes.value('enabled_pim'):
cfg_line.append('feature pim')
unconfig_line.append('no feature pim')
# enable_pim6
elif attributes.value('enabled_pim6'):
cfg_line.append('feature pim6')
unconfig_line.append('no feature pim6')
if cfg_line:
if unconfig is False:
configurations.append_line('\n'.join(cfg_line))
elif unconfig is True:
configurations.append_line('\n'.join(unconfig_line), raw=True)
if apply:
if configurations:
self.device.configure(configurations)
else:
return CliConfig(device=self.device, unconfig=unconfig,
cli_config=configurations)
# VrfAttributes
for sub, attributes2 in attributes.mapping_values('vrf_attr',
sort=True, keys=self.vrf_attr):
configurations.append_block(
sub.build_config(apply=False,
attributes=attributes2,
unconfig=unconfig))
if apply:
if configurations:
self.device.configure(configurations)
else:
return CliConfig(device=self.device, unconfig=unconfig,
cli_config=configurations)
def build_unconfig(self, apply=True, attributes=None, **kwargs):
return self.build_config(apply=apply, attributes=attributes,
unconfig=True, **kwargs)
class VrfAttributes(ABC):
def build_config(self, apply=True, attributes=None, unconfig=False,
**kwargs):
assert not apply
assert not kwargs, kwargs
attributes = AttributesHelper(self, attributes)
configurations = CliConfigBuilder(unconfig=unconfig)
# address_family
for address_family_sub, address_family_attributes in \
attributes.mapping_values(
'address_family_attr', sort=True,
keys = self.address_family_attr):
configurations.append_block(
address_family_sub.build_config(apply=False,
attributes=address_family_attributes,
unconfig=unconfig))
return str(configurations)
def build_unconfig(self, apply=True, attributes=None, **kwargs):
return self.build_config(apply=apply, attributes=attributes,
unconfig=True, **kwargs)
class AddressFamilyAttributes(ABC):
def build_config(self, apply=True, attributes=None,
unconfig=False, **kwargs):
assert not apply
assert not kwargs, kwargs
attributes = AttributesHelper(self, attributes)
configurations = CliConfigBuilder(unconfig=unconfig)
# assign ip version according to the address_family_attr
if hasattr(self, 'address_family'):
if 'ipv4' in self.address_family.value:
self.ip = 'ip'
if 'ipv6' in self.address_family.value:
self.ip = 'ipv6'
if unconfig:
attributes.attributes['ip'] = self.ip
with configurations.submode_context(
None if self.vrf_name == 'default' else
attributes.format(
'vrf context {vrf_name}', force=True)):
# no configuration append if vrf is default
if self.vrf_name != 'default' and unconfig and \
attributes.iswildcard:
configurations.submode_unconfig()
# ==== PIM Auto-RP =======
if attributes.value('auto_rp') or \
attributes.value('send_rp'):
# auto-rp rp-candidate | send-rp-announce
if attributes.value('auto_rp'):
auto_rp_str = 'ip pim auto-rp rp-candidate'
elif attributes.value('send_rp'):
auto_rp_str = 'ip pim send-rp-announce'
# + {send_rp_announce_rp_group} |
# + {send_rp_announce_intf}
if attributes.value('send_rp_announce_rp_group'):
auto_rp_str += ' {send_rp_announce_rp_group}'
elif attributes.value('send_rp_announce_intf'):
auto_rp_str += ' {send_rp_announce_intf}'
else:
auto_rp_str = ''
# + group-list {send_rp_announce_group_list} |
# + route-map {send_rp_announce_route_map} |
# + prefix-list {send_rp_announce_prefix_list}
if auto_rp_str:
if attributes.value('send_rp_announce_group_list'):
auto_rp_str += ' group-list'\
' {send_rp_announce_group_list}'
elif attributes.value('send_rp_announce_route_map'):
auto_rp_str += ' route-map'\
' {send_rp_announce_route_map}'
elif attributes.value('send_rp_announce_prefix_list'):
auto_rp_str += ' prefix-list'\
' {send_rp_announce_prefix_list}'
else:
auto_rp_str = ''
if auto_rp_str:
# + interval {send_rp_announce_interval}
if attributes.value('send_rp_announce_interval'):
auto_rp_str += ' interval'\
' {send_rp_announce_interval}'
# + scope {send_rp_announce_scope}
if attributes.value('send_rp_announce_scope'):
auto_rp_str += ' scope {send_rp_announce_scope}'
# + bidir
if attributes.value('send_rp_announce_bidir'):
auto_rp_str += ' bidir'
configurations.append_line(
attributes.format(auto_rp_str))
# === PIM Send-RP-Discovery ===
# ip pim auto-rp mapping-agent|send-rp-discovery
# <send_rp_discovery_intf>
# [scope ,send_rp_discovery_scope>]
if attributes.value('auto_rp_discovery') or \
attributes.value('send_rp_discovery'):
# set auto-rp method
if attributes.value('auto_rp_discovery'):
pre_str = 'ip pim auto-rp mapping-agent'
if attributes.value('send_rp_discovery'):
pre_str = 'ip pim send-rp-discovery'
# <send_rp_discovery_intf>
# [scope ,send_rp_discovery_scope>]
if attributes.value('send_rp_discovery_intf') and \
attributes.value('send_rp_discovery_scope'):
pre_str +=' {send_rp_discovery_intf}'\
' scope {send_rp_discovery_scope}'
elif attributes.value('send_rp_discovery_intf') and \
not attributes.value('send_rp_discovery_scope'):
pre_str +=' {send_rp_discovery_intf}'
configurations.append_line(
attributes.format(pre_str))
# initial it back
pre_str = ''
# ip pim auto-rp forward listen
if attributes.value('autorp_listener'):
configurations.append_line(
attributes.format('ip pim auto-rp forward listen'))
# ==== PIM BSR =======
# == bsr bsr-candidate ==
# ip/ipv6 pim auto-rp forward listen
# ip/ipv6 pim [bsr] bsr-candidate <bsr_candidate_interface>
if attributes.value('bsr_candidate_interface'):
# ip/ipv6 pim bsr forward listen
configurations.append_line(
attributes.format('{ip} pim bsr forward listen'))
# ip/ipv6 pim bsr-candidate {bsr_candidate_interface}
bsr_str = '{ip} pim bsr-candidate '\
'{bsr_candidate_interface}'
# + hash-len <bsr_candidate_hash_mask_length>
if attributes.value('bsr_candidate_hash_mask_length'):
bsr_str += ' hash-len '\
'{bsr_candidate_hash_mask_length}'
# + interval <bsr_candidate_interval>
if attributes.value('bsr_candidate_interval'):
bsr_str += ' interval {bsr_candidate_interval}'
# + priority <bsr_candidate_priority>
if attributes.value('bsr_candidate_priority'):
bsr_str += ' priority {bsr_candidate_priority}'
configurations.append_line(
attributes.format(bsr_str))
# == bsr rp-candidate ==
# ip/ipv6 pim auto-rp forward listen
# ip/ipv6 pim [bsr] rp-candidate <bsr_rp_candidate_interface>
if attributes.value('bsr_rp_candidate_interface'):
# ip/ipv6 pim bsr forward listen
configurations.append_line(
attributes.format('{ip} pim bsr forward listen'))
# ip/ipv6 pim rp-candidate {bsr_rp_candidate_interface}
bsr_rp_str = '{ip} pim rp-candidate '\
'{bsr_rp_candidate_interface}'
# + group-list {bsr_rp_candidate_group_list} |
# + route-map {bsr_rp_candidate_route_map} |
# + prefix-list {bsr_rp_candidate_prefix_list}
if attributes.value('bsr_rp_candidate_group_list'):
bsr_rp_str += ' group-list'\
' {bsr_rp_candidate_group_list}'
elif attributes.value('bsr_rp_candidate_route_map'):
bsr_rp_str += ' route-map'\
' {bsr_rp_candidate_route_map}'
elif attributes.value('bsr_rp_candidate_prefix_list'):
bsr_rp_str += ' prefix-list'\
' {bsr_rp_candidate_prefix_list}'
else:
bsr_rp_str = ''
if bsr_rp_str:
# +priority <bsr_rp_candidate_priority>
if attributes.value('bsr_rp_candidate_priority'):
bsr_rp_str += ' priority '\
'{bsr_rp_candidate_priority}'
# +interval <bsr_rp_candidate_interval>
if attributes.value('bsr_rp_candidate_interval'):
bsr_rp_str += ' interval {bsr_rp_candidate_interval}'
# +bidir
if attributes.value('bsr_rp_candidate_bidir'):
bsr_rp_str += ' bidir'
configurations.append_line(
attributes.format(bsr_rp_str))
# ip/ipv6 pim register-policy <accept_register>
if attributes.value('accept_register'):
configurations.append_line(
attributes.format(
'{ip} pim register-policy '
'{accept_register}'))
# ip pim register-policy prefix-list
# <accept_register_prefix_list>
if attributes.value('accept_register_prefix_list') \
and self.ip == 'ip':
configurations.append_line(
attributes.format(
'ip pim register-policy prefix-list '
'{accept_register_prefix_list}'))
# ip/ipv6 pim log-neighbor-changes
if attributes.value('log_neighbor_changes'):
configurations.append_line(
attributes.format(
'{ip} pim log-neighbor-changes'))
# ip pim register-source <register_source>
if attributes.value('register_source') and \
self.ip == 'ip':
configurations.append_line(
attributes.format(
'ip pim register-source {register_source}'))
# ip pim sg-expiry-timer infinity
if attributes.value('sg_expiry_timer_infinity') and \
not attributes.value('sg_expiry_timer_prefix_list') \
and not attributes.value('sg_expiry_timer_sg_list'):
configurations.append_line(
attributes.format(
'ip pim sg-expiry-timer infinity'))
# ip pim sg-expiry-timer <sg_expiry_timer>
if attributes.value('sg_expiry_timer') and \
not attributes.value('sg_expiry_timer_prefix_list') \
and not attributes.value('sg_expiry_timer_sg_list'):
configurations.append_line(
attributes.format(
'ip pim sg-expiry-timer {sg_expiry_timer}'))
# ip pim sg-expiry-timer <sg_expiry_timer>
# prefix-list <sg_expiry_timer_prefix_list>
if attributes.value('sg_expiry_timer') and \
attributes.value('sg_expiry_timer_prefix_list'):
configurations.append_line(
attributes.format(
'ip pim sg-expiry-timer {sg_expiry_timer} '
'prefix-list {sg_expiry_timer_prefix_list}'))
# ip pim sg-expiry-timer <sg_expiry_timer>
# sg-list <sg_expiry_timer_sg_list>
if attributes.value('sg_expiry_timer') and \
attributes.value('sg_expiry_timer_sg_list'):
configurations.append_line(
attributes.format(
'ip pim sg-expiry-timer {sg_expiry_timer} '
'sg-list {sg_expiry_timer_sg_list}'))
# ip pim sg-expiry-timer infinity
# prefix-list <sg_expiry_timer_prefix_list>
if attributes.value('sg_expiry_timer_infinity') and \
attributes.value('sg_expiry_timer_prefix_list'):
configurations.append_line(
attributes.format(
'ip pim sg-expiry-timer infinity '
'prefix-list {sg_expiry_timer_prefix_list}'))
# ip pim sg-expiry-timer infinity
# sg-list <sg_expiry_timer_sg_list>
if attributes.value('sg_expiry_timer_infinity') and \
attributes.value('sg_expiry_timer_sg_list'):
configurations.append_line(
attributes.format(
'ip pim sg-expiry-timer infinity '
'sg-list {sg_expiry_timer_sg_list}'))
# ip/ipv6 pim spt-threshold infinity group-list
# <spt_switch_policy>
if attributes.value('spt_switch_infinity') and \
attributes.value('spt_switch_policy'):
configurations.append_line(
attributes.format(
'{ip} pim spt-threshold {spt_switch_infinity.value} '
'group-list {spt_switch_policy}'))
# ip/ipv6 pim use-shared-tree-only group-list <spt_switch_policy>
if not attributes.value('spt_switch_infinity') and \
attributes.value('spt_switch_policy'):
configurations.append_line(
attributes.format(
'{ip} pim use-shared-tree-only group-list'
' {spt_switch_policy}'))
# Static RP address Attributes under top level config
for groups, attributes2 in attributes.sequence_values(
'rp_addresses', sort=True):
kwargs = {'ip_type': self.ip}
if unconfig:
configurations.append_block(groups.build_unconfig(
apply=False, attributes=attributes2, **kwargs))
else:
configurations.append_block(groups.build_config(
apply=False, attributes=attributes2, **kwargs))
# InterfaceAttributes
for sub, attributes2 in attributes.mapping_values(
'interface_attr', keys=self.interface_attr,
sort=True):
configurations.append_block(
sub.build_config(apply=False,
attributes=attributes2,
unconfig=unconfig))
return str(configurations)
def build_unconfig(self, apply=True, attributes=None,
**kwargs):
return self.build_config(apply=apply,
attributes=attributes,
unconfig=True, **kwargs)
class InterfaceAttributes(ABC):
def build_config(self, apply=True, attributes=None,
unconfig=False, **kwargs):
assert not apply
assert not kwargs, kwargs
attributes = AttributesHelper(self, attributes)
configurations = CliConfigBuilder(unconfig=unconfig)
# if self.vrf_name != 'default':
# configurations.append_line('exit')
# the interface should have vrf(name = vrf_name) attached
with configurations.submode_context(
attributes.format('interface {interface_name}',
force=True)):
if unconfig and attributes.iswildcard:
configurations.submode_unconfig()
if unconfig:
attributes.attributes['ip'] = self.ip
# interface <intf_name>
# ip/ipv6 pim sparse-mode
if attributes.value('mode'):
configurations.append_line(
attributes.format('{ip} pim sparse-mode'))
# interface <intf_name>
# ip/ipv6 pim jp-policy <boundary> [in|out]
if attributes.value('boundary') and \
attributes.value('boundary_in') and \
attributes.value('boundary_out'):
configurations.append_line(
attributes.format(
'{ip} pim jp-policy {boundary} in'))
configurations.append_line(
attributes.format(
'{ip} pim jp-policy {boundary} out'))
elif attributes.value('boundary') and \
attributes.value('boundary_in') and \
not attributes.value('boundary_out'):
configurations.append_line(
attributes.format(
'{ip} pim jp-policy {boundary} in'))
elif attributes.value('boundary') and \
not attributes.value('boundary_in') and \
attributes.value('boundary_out'):
configurations.append_line(
attributes.format(
'{ip} pim jp-policy {boundary} out'))
elif attributes.value('boundary') and \
not attributes.value('boundary_in') and \
not attributes.value('boundary_out'):
configurations.append_line(
attributes.format(
'{ip} pim jp-policy {boundary}'))
# interface <intf_name>
# ip/ipv6 pim border
if attributes.value('bsr_border'):
configurations.append_line(
attributes.format('{ip} pim border'))
# interface <intf_name>
# ip/ipv6 pim hello-interval {hello_interval}
if attributes.value('hello_interval'):
configurations.append_line(
attributes.format(
'{ip} pim hello-interval '
'{hello_interval}'))
# interface <intf_name>
# ip/ipv6 pim dr-priority {dr_priority}
if attributes.value('dr_priority'):
configurations.append_line(
attributes.format(
'{ip} pim dr-priority '
'{dr_priority}'))
# interface <intf_name>
# ip/ipv6 pim neighbor-policy {neighbor_filter}
if attributes.value('neighbor_filter'):
configurations.append_line(
attributes.format(
'{ip} pim neighbor-policy '
'{neighbor_filter}'))
# interface <intf_name>
# ip/ipv6 pim neighbor-policy prefix-list
# <neighbor_filter_prefix_list>
if attributes.value('neighbor_filter_prefix_list') \
and self.ip == 'ip':
configurations.append_line(
attributes.format(
'ip pim neighbor-policy prefix-list '
'{neighbor_filter_prefix_list}'))
return str(configurations)
def build_unconfig(self, apply=True, attributes=None,
**kwargs):
return self.build_config(apply=apply,
attributes=attributes,
unconfig=True, **kwargs)
| StarcoderdataPython |
5064619 | import random
import numpy as np
from itertools import combinations
import torch
import torch.nn.functional as F
random.seed(0)
def get_random_triplets(embeddings) -> torch.Tensor:
'''
For each image in data (Anchor), randomly sample a Positive image from its class.
Then from each of the other classes sample one image (randomly) as the Negative. Hence, for every Anchor
you will have 1 randomly selected positive from it's class and randomly selected Negative from each of the n-1 classes
where n is total number of classes. For every Anchor image you would have n-1 triplets.
So if you're having 3 classes of 10 images each then you would have 60 triplets.
'''
triplets = []
for i, embedding in enumerate(embeddings):
temp = embeddings.pop(i)
for anchor in embedding:
positive = random.choice(embedding)
for negatives in embeddings:
negative = random.choice(negatives)
triplets.append(torch.stack([anchor, positive, negative], dim=0))
embeddings.insert(i, temp)
return torch.stack(triplets, dim=0)
def triplet_loss(anchor, positive, negative, margin=1):
pos_dist = (anchor - positive).pow(2).sum(-1) #.pow(.5)
neg_dist = (anchor - negative).pow(2).sum(-1) #.pow(.5)
loss = F.relu(pos_dist - neg_dist + margin)
return loss.mean()
# ref: https://github.com/adambielski/siamese-triplet/blob/master/losses.py#L24
def count_parameters(model):
return sum(p.numel() for p in model.parameters() if p.requires_grad)
#################################################
#### CODE BEYOND THIS LINE IS NO LONGER USED ####
#################################################
def __get_random_triplets(no_classes:int, images, target, no_triplets:int):
# Ref : https://github.com/tamerthamoqa/facenet-pytorch-vggface2/blob/master/datasets/TripletLossDataset.py#L76-L136
randomstate = np.random.RandomState(seed=None)
# no_class = len(lfw_people.target_names)
triplets = []
class_pairs = []
# progress_bar = tqdm(range(no_triplets), desc='fetching triplets')
for progress in range(no_triplets):
pos_class = randomstate.choice(no_classes)
neg_class = randomstate.choice(no_classes)
while pos_class == neg_class:
neg_class = randomstate.choice(no_classes)
# pos_name = lfw_people.target_names[pos_class]
# neg_name = lfw_people.target_names[neg_class]
pos_imgs = images[target == pos_class]
neg_imgs = images[target == neg_class]
if pos_imgs.shape[0] == 2:
ianc, ipos = 0,1
else:
ianc = randomstate.randint(0, pos_imgs.shape[0])
ipos = randomstate.randint(0, pos_imgs.shape[0])
while ianc == ipos:
ipos = randomstate.randint(0, pos_imgs.shape[0])
ineg = randomstate.randint(0, neg_imgs.shape[0])
triplets.append(
torch.stack([
torch.from_numpy(pos_imgs[ianc] / 255),
torch.from_numpy(pos_imgs[ipos] / 255),
torch.from_numpy(neg_imgs[ineg] / 255)
]))
class_pairs.append((pos_class, neg_class))
return torch.stack(triplets) , class_pairs
def __get_all_tensor_triplets(embeddings:list, targets:list) -> torch.Tensor:
'''
The reason for not consider all possible triplets:
1. Since we are considering all possible triplets, there might
chances of overfitting.
2. In larger dataset this is a bad practice i.e getting all possible
triplets isn't feasible and also model will memorize the data
resulting in overfitting.
'''
'''
Parameters
----------
embeddings : list of torch.tensor each of shape torch.Size([?, 128])
targets : list of ints
Returns
-------
triplets : torch.tensor of shape torch.Size([?, no(triplets), 3])
'''
# eg : no(targets) = 3
# eg : no(embeds) = 10
assert len(embeddings) == len(targets) , "Embeddings and Targets must have same lenght"
triplets = []
for i, anchor in enumerate(embeddings):
positive_pairs = list(combinations(anchor,2)) # this will give of distinct pairs of elements
# no(pos_pairs) = no(targets)P2 / 2! # eg : 45
temp = embeddings.pop(i) # embeddings list except the anchor
for negative_embeddings in torch.cat(embeddings,dim=0): # loops runs for no(targets)-1 * no(embeds) times # eg : (3-1)*10=20
triple = [torch.stack([positive_pair[0], positive_pair[1], negative_embeddings], dim=0) for positive_pair in positive_pairs] # no(triple) = n(pos_pair) # eg: 45
triplets.extend(triple) # no. of triplets added = no(pos_pairs)*(no(targets) - 1)*(no(embeds)) # eg: 45*2*10=900
embeddings.insert(i, temp)
return torch.stack(triplets, dim=0) # no(triplets) = no(added_triples) * no(embeds) # eg: 900*3 = 2700
| StarcoderdataPython |
5129909 | #
# Generic classes for the sector implementation in subtraction schemes
#
import commons.generic_sectors as generic_sectors
import madgraph.various.misc as misc
class Sector(generic_sectors.GenericSector):
""" Class implementing a particular sector, with attributes identifying it and methods
for its evaluation."""
def __init__(self, leg_numbers, **opts):
super(Sector, self).__init__(**opts)
self.leg_numbers = leg_numbers
def __call__(self, PS_point, PDGs, counterterm_index=-1, input_mapping_index=-1):
""" Given a kinematic configuration (PS_point is a *dict* here) and flavors of external states, returns the sector weight (float)
from this sector function.
Notice that the sectoring function will be called for both the real(s) event and *also* with the reduced
kinematics of the counterevent, in which case it may be necessary to know which counterterm this reduced
kinematics comes from, so that the counterterm_index is specified (not the full counterterm, because
this function should eventually be coded at low-level and do minimal logic, so please do all the logic
the sector generator function. -1 for the counterterm_index implies no counterterm.
The flavor mapping is set to None if a local counterterm is considered, and to the corresponding input
mapping index if an integrated counterterm is considered.
"""
# Below is a hard-coded sector implementation for the real-emission process e+(1) e-(2) > g(3) d(4) d~(5)
# No sectoring necessary for the reduced NLO singly-unresolved kinematics
if len(PDGs[1])==2:
return 1.0
# Then simply use the partial-fractioning facotors s(3,4) / ( s(3,4) + s(3,5) )
if self.leg_numbers==(3,4):
sector_weight = (PS_point[3]+PS_point[5]).square()
else:
sector_weight = (PS_point[3]+PS_point[4]).square()
sector_weight /= ((PS_point[3]+PS_point[4]).square() + (PS_point[3]+PS_point[5]).square())
return sector_weight
def __str__(self):
""" String representation of this sector. """
return "(%s)"%(','.join('%d'%ln for ln in self.leg_numbers))
class SectorGenerator(generic_sectors.GenericSectorGenerator):
""" Class responsible for generating the correct list of sectors to consider for specific processes."""
def __init__(self, *args, **opts):
super(SectorGenerator, self).__init__(*args, **opts)
def __call__(self, contrib_definition, defining_process, counterterms=None, integrated_counterterms=None):
""" Given a particular contribution definition, a particular defining process instance and the counterterms contained, this
function must build the list of sectors to consider (or None if the subtraction
does not require any) as well as the list of counterterms to consider for each of them.
The list returned has the following format:
all_sectors = [ sector_1, sector_2, ... ]
where each sector_<i> is a dictionary with the following format:
sector_<i> = {
'sector' : sector_instance_or_identifier (exact format is up to the subtraction_scheme but must be callable)
'counterterms' : [list of counterterm indices (as per the ordering in self.counterterms of the integrand) to be considered],
'integrated_counterterms' : [list of tuples (counterterm indices, input_mapping_index) for integrated CT to be considered]
}
"""
model = defining_process.get('model')
initial_state_PDGs, final_state_PDGs = defining_process.get_cached_initial_final_pdgs()
# Hardcoding to FKS-like sectors for e+(1) e-(2) > g(3) d(4) d~(5)
if initial_state_PDGs != (-11,11) or final_state_PDGs != (21, 1, -1):
return None
all_sectors = []
for sector_legs in [(3,4), (3,5)]:
a_sector = {
'sector' : None,
'counterterms' : None,
'integrated_counterterms' : None
}
a_sector['sector'] = Sector(leg_numbers=sector_legs)
if counterterms is not None:
a_sector['counterterms'] = []
for i_ct, ct in enumerate(counterterms):
current = ct.nodes[0].current
singular_structure = current.get('singular_structure').substructures[0]
all_legs = singular_structure.get_all_legs()
if singular_structure.name()=='S':
if all_legs[0].n == sector_legs[0]:
a_sector['counterterms'].append(i_ct)
if singular_structure.name()=='C':
if sorted([l.n for l in all_legs]) == sorted(sector_legs):
a_sector['counterterms'].append(i_ct)
# Uncomment below for enabling all counterterms
# a_sector['counterterms'] = range(len(counterterms))
# Irrelevant if this NLO example, but let me specify all of them explicitly so as to make the strucuture clear.
if integrated_counterterms is not None:
a_sector['integrated_counterterms'] = {}
for i_ct, ct in enumerate(integrated_counterterms):
# For now enable all integrated counterterms. Notice that the value None in this dictionary
# is interpreted as all input mappings contributing, but for the sake of example here
# we list explicitly each index.
a_sector['integrated_counterterms'][i_ct] = range(len(ct['input_mappings']))
all_sectors.append(a_sector)
return all_sectors
| StarcoderdataPython |
1927500 | <gh_stars>0
'''Example 09
Shows how to embed visvis figures in a wxPython GUI.
Visvis can also be embedded in Qt4 (PyQt), GTK, and FLTK.
Based on http://code.google.com/p/visvis/wiki/example_embeddingInWx
'''
import wx
from pycalfem import *
from pycalfem_utils import *
from pycalfem_mesh import *
import pycalfem_vis as pcv
import visvis as vv
# Create a visvis app instance, which wraps a wx application object.
# This needs to be done *before* instantiating the main window.
app = vv.use('qt')
class MainWindow(wx.Frame):
def __init__(self):
wx.Frame.__init__(self, None, -1, "Embedding in WX", size=(560, 420))
# Make a panel with buttons
self.panel = wx.Panel(self)
but1 = wx.Button(self.panel, -1, 'Calc')
but2 = wx.Button(self.panel, -1, 'Plot')
but3 = wx.Button(self.panel, -1, 'Clear')
#Make panel sizer and embed stuff
self.panelsizer = wx.BoxSizer(wx.VERTICAL)
self.panelsizer.Add(but1)
self.panelsizer.Add(but2)
self.panelsizer.Add(but3)
self.panel.SetSizer(self.panelsizer)
# Make figure using "self" as a parent
Figure = app.GetFigureClass()
self.fig = Figure(self)
# Make window sizer and embed stuff
self.sizer = wx.BoxSizer(wx.HORIZONTAL)
self.sizer.Add(self.panel, 1, wx.EXPAND)
self.sizer.Add(self.fig._widget, 2, wx.EXPAND)
# Make callback
but1.Bind(wx.EVT_BUTTON, self._Calc)
but2.Bind(wx.EVT_BUTTON, self._Plot)
but3.Bind(wx.EVT_BUTTON, self._Clear)
# Apply window sizers
self.SetSizer(self.sizer)
self.SetAutoLayout(True)
self.Layout()
# Finish
self.Show()
def _Calc(self, event):
#Calculations are taken from example 08.
# Constants:
t = 0.2
v = 0.35
E = 2.1e9
ptype = 1
ep = [ptype,t]
D=hooke(ptype, E, v)
# Create Geometry:
g = "examplegeo\ex8.geo"
self.elType = 3 #3 Quads
self.dofsPerNode = 2
mesher = GmshMesher(geoData = g,
gmshExecPath = None, #"gmsh\gmsh.exe"
elType = self.elType,
dofsPerNode= self.dofsPerNode)
self.coords, self.edof, self.dofs, self.bdofs, _ = mesher.create()
# Assem systems matrix:
nDofs = size(self.dofs)
ex, ey = coordxtr(self.edof, self.coords, self.dofs)
K = zeros([nDofs,nDofs])
for eltopo, elx, ely in zip(self.edof, ex, ey):
Ke = planqe(elx, ely, ep, D)
assem(eltopo, K, Ke)
# Solve:
f = zeros([nDofs,1])
bc = array([],'i')
bcVal = array([],'i')
bc, bcVal = applybc(self.bdofs, bc, bcVal, 5, 0.0, 0)
applyforce(self.bdofs, f, 7, 10e5, 1)
self.a, _ = solveq(K,f,bc,bcVal)
def _Plot(self, event):
# Make sure our figure is the active one
# If only one figure, this is not necessary.
#vv.figure(self.fig.nr)
# Clear it:
vv.clf()
# Plot:
pcv.drawDisplacements(self.a, self.coords, self.edof, self.dofsPerNode, self.elType, doDrawUndisplacedMesh=True, title="Example 09")
def _Clear(self, event):
vv.clf() #Clear current figure
# Two ways to create the application and start the main loop
if True:
# The visvis way. Will run in interactive mode when used in IEP or IPython.
app.Create()
m = MainWindow()
app.Run()
else:
# The native way.
wxApp = wx.App()
m = MainWindow()
wxApp.MainLoop() | StarcoderdataPython |
9648406 | <reponame>Sigmanificient/codewars<gh_stars>1-10
"""Kata url: https://www.codewars.com/kata/55d24f55d7dd296eb9000030."""
def summation(num: int) -> int:
return sum(range(1, num + 1))
| StarcoderdataPython |
6702496 | <filename>anno/managers.py<gh_stars>0
from django.db.models import Manager
from django.db.models import Q
class SearchManager(Manager):
'''builds Q expression for `platform` annotation property.
to build a custom search, extend this class and override the
'search_expression' method.
'''
def search_expression(self, params):
'''builds Q expression for `platform` according to params.'''
q = Q()
platform_name = params.get('platform', None)
if platform_name:
kwargs = {'raw__platform__platform_name': str(platform_name)}
q = q & Q(**kwargs)
context_id = params.get('context_id', None)
if context_id:
kwargs = {'raw__platform__context_id': str(context_id)}
q = q & Q(**kwargs)
collection_id = params.get('collection_id', None)
if collection_id:
kwargs = {'raw__platform__collection_id': str(collection_id)}
q = q & Q(**kwargs)
target_source_id = params.get('source_id', None)
if target_source_id:
kwargs = {
'raw__platform__target_source_id': str(target_source_id)}
q = q & Q(**kwargs)
return q
| StarcoderdataPython |
301614 | """
Position controller module.
"""
import math
from highlevel.logger import LOGGER
from highlevel.robot.entity.configuration import Configuration
from highlevel.util.filter.odometry import OdometryFunc
from highlevel.util.geometry.vector import Vector2
from highlevel.util.probe import Probe
from highlevel.util.type import Millimeter, Radian, MillimeterPerSec, RadianPerSec, \
tick_to_mm
# Attributes could be merged, but it is clearer this way
# pylint: disable=too-many-instance-attributes
class PositionController:
"""
Keeps track of the robot's position & angle and gives access to it.
"""
def __init__(self, odometry_function: OdometryFunc,
configuration: Configuration, probe: Probe):
self.odometry = odometry_function
self.configuration = configuration
self.probe = probe
self.distance_travelled: Millimeter = 0.0
self.speed: MillimeterPerSec = 0.0
self.angular_velocity: RadianPerSec = 0.0
self.position: Vector2 = configuration.initial_position
self.angle: Radian = configuration.initial_angle
self.position_left_last: Millimeter = 0.0
self.position_right_last: Millimeter = 0.0
self.position_left: Millimeter = 0.0
self.position_right: Millimeter = 0.0
self.distance_init: Millimeter = 0.0
self.initialized = False
def update_odometry(self, tick_left: int, tick_right: int) -> None:
"""
Updates current position with new samples.
The first call will initialize the previous encoder positions used for deltas.
The position/angle will not be updated on this first call.
"""
self.position_left = tick_to_mm(
tick_left, self.configuration.encoder_ticks_per_revolution,
self.configuration.wheel_radius)
self.position_right = tick_to_mm(
tick_right, self.configuration.encoder_ticks_per_revolution,
self.configuration.wheel_radius)
self.probe.emit("encoder_left", self.position_left)
self.probe.emit("encoder_right", self.position_right)
if not self.initialized:
self.position_left_last = self.position_left
self.position_right_last = self.position_right
self.distance_init = (self.position_left + self.position_right) / 2
self.initialized = True
return
distance_old = self.distance_travelled
angle_old = self.angle
self.position, self.angle = self.odometry(
self.position_left - self.position_left_last,
self.position_right - self.position_right_last, self.position,
self.angle, self.configuration)
self.distance_travelled = ((self.position_left + self.position_right) /
2) - self.distance_init
LOGGER.get().debug('position_controller_update_odometry',
left_tick=tick_left,
right_tick=tick_right,
new_position=self.position,
new_angle=self.angle / (2 * math.pi) * 360)
self.position_left_last = self.position_left
self.position_right_last = self.position_right
self.speed = \
(self.distance_travelled - distance_old) * self.configuration.encoder_update_rate
self.angular_velocity = (
self.angle - angle_old) * self.configuration.encoder_update_rate
self.probe.emit("position", self.position)
self.probe.emit("angle", self.angle)
| StarcoderdataPython |
11302433 | <reponame>Henrique-Sc/cursoemvideo-python
pessoa = ('Henrique', 15, 'M', 45)
lanche = 'Hambúguer'
del pessoa # Apaga da memória qualquer variavel selecionada
print(pessoa)
| StarcoderdataPython |
105721 | <reponame>MHelena45/feup-iope
import math
from gurobipy import *
d={}
d={ ("Doce","Doce"): 0, ("Doce","Bom"):math.inf, ("Doce","Sky"):6000, ("Doce","Moon"):5000, ("Doce","Mars"):5500,
("Bom","Doce"):math.inf, ("Bom","Bom"):0, ("Bom","Sky"): 6000, ("Bom","Moon"): 5800, ("Bom","Mars"):4800,
("Sky","Doce"): 6000, ("Sky","Bom"): 6000, ("Sky","Sky"): 0, ("Sky","Moon"): 500 , ("Sky","Mars"): 2000,
("Moon","Doce"):5000, ("Moon","Bom"):5800, ("Moon","Sky"): 500, ("Moon","Moon"): 0, ("Moon","Mars"): 1000,
("Mars","Doce"):5500, ("Mars","Bom"):4800, ("Mars","Sky"): 2000, ("Mars","Moon"): 1000, ("Mars","Mars"): 0}
'''
ID Route Vessel type Product
1 Doce – Moon – Doce 1 Corn e Copper
2 Doce – Moon - Mars - Doce 1 Corn e Iron
3 Doce – Moon - Sky - Doce 1 Corn e Copper
4 Doce – Moon - Sky - Doce 1 Corn e Iron
5 Doce – Mars – Moon – Doce 1 Corn e Copper
6 Doce – Mars – Doce 1 e 2 Corn e Iron
7 Doce – Mars – Sky – Doce 1 e 2 Corn e Copper
8 Doce – Mars – Sky – Doce 1 e 2 Corn e Iron
9 Bom – Sky – Bom 1 e 2 Wheat e Iron
10 Bom – Mars – Bom 1 e 2 Wheat e Iron
11 Bom – Sky - Mars – Bom 1 e 2 Wheat e Iron
12 Bom – Mars - Sky - Bom 1 e 2 Wheat e Iron
'''
t={}
# Doce – Moon – Doce
t[1, 1] = d["Doce","Moon"]/25 * 2 # time needed to perform de trip
# Doce – Moon - Mars - Doce
t[2, 1] = d["Doce","Moon"]/25 + d["Moon","Mars"]/30 + d["Mars","Doce"]/25
# Doce – Moon - Sky - Doce
t[3, 1] = d["Doce","Moon"]/25 + d["Moon","Sky"]/30 + d["Sky","Doce"]/25
t[4, 1] = d["Doce","Moon"]/25 + d["Moon","Sky"]/30 + d["Sky","Doce"]/25
# Doce – Mars – Moon – Doce
t[5, 1] = d["Doce","Mars"]/25 + d["Mars","Moon"]/30 + d["Moon","Doce"]/25
# Doce – Mars – Doce
t[6, 1] = d["Doce","Mars"]/25 * 2
t[6, 2] = d["Doce","Mars"]/20 * 2
# Doce – Mars – Sky – Doce
t[7, 1] = d["Doce","Mars"]/25 + d["Mars","Sky"]/30 + d["Sky","Doce"]/25
t[7, 2] = d["Doce","Mars"]/20 + d["Mars","Sky"]/24 + d["Sky","Doce"]/20
t[8, 1] = d["Doce","Mars"]/25 + d["Mars","Sky"]/30 + d["Sky","Doce"]/25
t[8, 2] = d["Doce","Mars"]/20 + d["Mars","Sky"]/24 + d["Sky","Doce"]/20
# Bom – Sky – Bom
t[9, 1] = d["Bom","Sky"]/25 * 2
t[9, 2] = d["Bom","Sky"]/20 * 2
# Bom – Mars – Bom
t[10, 1] = d["Bom","Mars"]/25 * 2
t[10, 2] = d["Bom","Mars"]/20 * 2
# Bom – Sky - Mars – Bom
t[11, 1] = d["Bom","Sky"]/25 + d["Sky","Mars"]/30 + d["Mars","Bom"]/25
t[11, 2] = d["Bom","Sky"]/20 + d["Sky","Mars"]/24 + d["Mars","Bom"]/20
# Bom – Mars - Sky - Bom
t[12, 1] = d["Bom","Mars"]/25 + d["Mars","Sky"]/30 + d["Sky","Bom"]/25
t[12, 2] = d["Bom","Mars"]/20 + d["Mars","Sky"]/24 + d["Sky","Bom"]/20
type2Trips = [i for i in range(6, 13)] # the range is [6, 13[
type1Trips = [i for i in range(1, 13)] # the range is [1, 13[
M = 200; # Value greater than the number of needed vehicles for sure
model = Model("P2")
# number of ships of type 1 needed
vessel1 = model.addVar(vtype="I", name="vessel1")
# number of ships of type 2 needed
vessel2 = model.addVar(vtype="I", name="vessel2")
x = {}
a = {} # assignment of trips
for vessel in range(1, M):
x[vessel,1] = model.addVar(vtype="B", name="x(%s,%s)"% (vessel,1))
x[vessel,2] = model.addVar(vtype="B", name="x(%s,%s)"% (vessel,2))
for tripType in type1Trips:
a[vessel,tripType,1] = model.addVar(vtype="I", name="a(%s,%s,%s)" % (vessel,tripType,1))
for type in type2Trips:
a[vessel,type,2] = model.addVar(vtype="I", name="a(%s,%s,%s)" % (vessel,type,2))
# distance traveled with the type 1 vessel in Loaded
dLoaded1 = model.addVar(vtype="I", name="dLoaded(%s)" % (1))
# distance traveled with the type 2 vessel in Loaded
dLoaded2 = model.addVar(vtype="I", name="dLoaded(%s)" % (2))
# distance traveled with the type 1 vessel empty
dEmpty1 = model.addVar(vtype="I", name="dEmpty(%s)" % (1))
# distance traveled with the type 2 vessel empty
dEmpty2 = model.addVar(vtype="I", name="dEmpty(%s)" % (2))
trips = {}
# number of trips made by ship type 1 of trip 1 to 12
for tripType in type1Trips:
trips[tripType,1] = model.addVar(vtype="I", name="trips(%s,%s)" % (tripType,1))
# number of trips made by ship type 1 of trip 6 to 12
for type in type2Trips:
trips[type,2] = model.addVar(vtype="I", name="trips(%s,%s)" % (type,2))
model.update()
# Wheat
model.addConstr(quicksum(a[vessel,trip,1] for trip in range(9,13) for vessel in range(1, M)) * 35 +
quicksum(a[vessel,trip,2] for trip in range(9,13) for vessel in range(1, M)) * 70 >= 50000, "c1")
# Corn
model.addConstr(quicksum(a[vessel,trip,1] for trip in range(1,9) for vessel in range(1, M)) * 35 +
quicksum(a[vessel,trip,2] for trip in range(6,9) for vessel in range(1, M)) * 70 >= 40000, "c2")
# Iron of BOM
model.addConstr(quicksum(a[vessel,trip,1] for trip in range(9,13) for vessel in range(1, M)) * 35
+ ( quicksum(a[vessel,trip,2] for trip in range(9,13) for vessel in range(1, M)))* 70 >= 50000, "c3")
# Copper
model.addConstr(quicksum(a[vessel,1,1] + a[vessel,3,1] + a[vessel,5,1] + a[vessel,7,1] for vessel in range(1, M)) * 35 +
quicksum(a[vessel,7,2] for vessel in range(1, M))* 70 >= 20000, "c4")
# Iron
model.addConstr(quicksum(a[vessel,2,1] + a[vessel,4,1] + a[vessel,6,1] + a[vessel,8,1] for vessel in range(1, M)) * 35 +
quicksum(a[vessel,6,2] + a[vessel,8,2] for vessel in range(1, M))* 70 >= 20000, "c5")
# Iron - Mars
model.addConstr(quicksum(a[vessel,2,1] + a[vessel,6,1] + a[vessel,10,1] + a[vessel,11,1] for vessel in range(1, M)) * 35 +
quicksum(a[vessel,6,2] + a[vessel,10,2] + a[vessel,11,2] for vessel in range(1, M))* 70 >= 30000, "c6")
# Wheat - Mars
model.addConstr(quicksum(a[vessel,10,1] + a[vessel,12,1]for vessel in range(1, M)) * 35 +
quicksum(a[vessel,10,2] + a[vessel,12,2] for vessel in range(1, M))* 70 >= 20000, "c7")
# Corn - Mars
model.addConstr(quicksum(a[vessel,trip,1] for trip in range(5,9) for vessel in range(1, M)) * 35
+ ( quicksum(a[vessel,trip,2] for trip in range(6,9) for vessel in range(1, M)))* 70 >= 10000, "c8")
# Copper - Sky
model.addConstr(quicksum(a[vessel,3,1] + a[vessel,7,1] for vessel in range(1, M)) * 35 +
quicksum(a[vessel,7,2] for vessel in range(1, M))* 70 >= 10000, "c9")
# Iron - Sky
model.addConstr(quicksum(a[vessel,4,1] + a[vessel,8,1] + a[vessel,9,1] + a[vessel,12,1] for vessel in range(1, M)) * 35 +
quicksum(a[vessel,8,2] + a[vessel,9,2] + a[vessel,12,2] for vessel in range(1, M))* 70 >= 40000, "c10")
# Wheat - Sky
model.addConstr(quicksum(a[vessel,9,1] + a[vessel,11,1] for vessel in range(1, M)) * 35 +
quicksum(a[vessel,9,2] + a[vessel,11,2] for vessel in range(1, M))* 70 >= 30000, "c11")
# Copper - Moon
model.addConstr(quicksum(a[vessel,1,1] + a[vessel,5,1] for vessel in range(1, M)) * 35 >= 10000, "c12")
# Corn - Moon
model.addConstr(quicksum(a[vessel,trip,1] for trip in range(1,5) for vessel in range(1, M)) * 35 >= 30000, "c13")
# for each vehicle
for vessel in range(1, M):
#makes sure that the trips assignee last less than the operation time
model.addConstr(quicksum(t[tripType,1] * a[vessel,tripType,1] for tripType in type1Trips) <= 345 * 24, "c14")
model.addConstr(quicksum(t[tripType,2] * a[vessel,tripType,2] for tripType in type2Trips) <= 345 * 24, "c15")
# for each vehicle
for vessel in range(1, M):
# if a trip is assignee to a boot, it is used
model.addConstr(quicksum(a[vessel,tripType,1] for tripType in type1Trips)
>= x[vessel,1], "c20(%s,%s)" % (vessel,1)) # if nothing is assigned, the value is 0
model.addConstr(x[vessel,1] * quicksum(a[vessel,tripType,1] for tripType in type1Trips)
>= quicksum(a[vessel,tripType,1] for tripType in type1Trips), "c21(%s,%s)" % (vessel,1)) # if something is assigned, the value is 1
model.addConstr(quicksum(a[vessel,tripType,2] for tripType in type2Trips)
>= x[vessel,2], "c22(%s,%s)" % (vessel,2)) # if nothing is assigned, the value is 0
model.addConstr(x[vessel,2] * quicksum(a[vessel,tripType,2] for tripType in type2Trips)
>= quicksum(a[vessel,tripType,2] for tripType in type2Trips), "c23(%s,%s)" % (vessel,2)) # if something is assigned, the value is 1
# ensure that a boot x can only be use if the boot x-1 has been used
if vessel >=2:
model.addConstr(x[vessel,1] <= x[vessel-1,1], "c24(%s,%s)" % (vessel,1))
model.addConstr(x[vessel,2] <= x[vessel-1,2], "c25(%s,%s)" % (vessel,2))
model.addConstr(dLoaded1 == quicksum(a[vessel,1, 1] * d["Doce","Moon"] * 2 + # Doce – Moon – Doce
a[vessel,2, 1] * (d["Doce","Moon"] + d["Mars","Doce"]) + # Doce – Moon - Mars - Doce
(a[vessel,4, 1] + a[vessel,3, 1]) * (d["Doce","Moon"] + d["Sky","Doce"]) + # Doce – Moon - Sky - Doce
a[vessel,4, 1] * (d["Doce","Moon"] + d["Sky","Doce"]) + # Doce – Moon - Sky - Doce
a[vessel,5, 1] * (d["Doce","Mars"] + d["Moon","Doce"]) + # Doce – Mars – Moon – Doce
a[vessel,6, 1] * d["Doce","Mars"] * 2 + # Doce – Mars – Doce
(a[vessel,7, 1] + a[vessel,8, 1]) * (d["Doce","Mars"] + d["Sky","Doce"]) + # Doce – Mars – Sky – Doce
a[vessel,9, 1] * d["Bom","Sky"] * 2 + # Bom – Sky – Bom
a[vessel,10, 1] * d["Bom","Mars"] * 2 + # Bom – Mars – Bom
a[vessel,11, 1] * (d["Bom","Sky"] + d["Mars","Bom"]) + # Bom – Sky - Mars – Bom
a[vessel,12, 1] * (d["Bom","Mars"] + d["Sky","Bom"]) for vessel in range(1, M)),"c16") # Bom – Mars - Sky - Bom
model.addConstr(dLoaded2 == quicksum(a[vessel,6, 2] * d["Doce","Mars"] * 2 + # Doce – Mars – Doce
(a[vessel,7, 2] + a[vessel,8, 2]) * (d["Doce","Mars"] + d["Sky","Doce"]) + # Doce – Mars – Sky – Doce
a[vessel,9, 2] * d["Bom","Sky"] * 2 + # Bom – Sky – Bom
a[vessel,10, 2] * d["Bom","Mars"] * 2 + # Bom – Mars – Bom
a[vessel,11, 2] * (d["Bom","Sky"] + d["Mars","Bom"]) + # Bom – Sky - Mars – Bom
a[vessel,12, 2] * (d["Bom","Mars"] + d["Sky","Bom"]) for vessel in range(1, M)),"c17") # Bom – Mars - Sky - Bom
model.addConstr(dEmpty1 == quicksum(a[vessel,2, 1] * d["Moon","Mars"] + # Doce – Moon - Mars - Doce
(a[vessel,3, 1] + a[vessel,4, 1]) * d["Moon","Sky"] + # Doce – Moon - Sky - Doce
a[vessel,5, 1] * d["Mars","Moon"] + # Doce – Mars – Moon – Doce
(a[vessel,7, 1] + a[vessel,8, 1]) * d["Mars","Sky"] + # Doce – Mars – Sky – Doce
a[vessel,11, 1] * d["Sky","Mars"] + # Bom – Sky - Mars – Bom
a[vessel,12, 1] * d["Mars","Sky"] for vessel in range(1, M)), "c18") # Bom – Mars - Sky - Bom
model.addConstr(dEmpty2 == quicksum((a[vessel,7, 2] + a[vessel,8, 2] + a[vessel,12, 2]) * d["Mars","Sky"] + # Doce – Mars – Sky – Doce && Bom – Mars - Sky - Bom
a[vessel,11, 2] * d["Sky","Mars"] for vessel in range(1, M)), "c19") # Bom – Mars - Sky - Bom
model.addConstr(vessel1 == quicksum(x[vessel,1] for vessel in range(1, M)), "c20")
model.addConstr(vessel2 == quicksum(x[vessel,2] for vessel in range(1, M)), "c21")
for tripType in type1Trips:
model.addConstr(trips[tripType,1] == quicksum(a[vessel,tripType, 1] for vessel in range(1, M)), "c22")
for tripType in type2Trips:
model.addConstr(trips[tripType,2] == quicksum(a[vessel,tripType, 2] for vessel in range(1, M)), "c23")
# 0,1 * (número de veículos do tipo 1 * 1 000 000 + número de veículos do tipo 2 * 1 500 000) +
# ((número de veículos do tipo 1 * 1 000 000 + número de veículos do tipo 2 * 1500 000) / 25) +
# número de veículos do tipo 1 * 70 000 +
# número de veículos do tipo 2 * 75 000 +
# ( distância percorrida cheio pelo veículo tipo 1/1000 * 50 + distância percorrida vazio pelo veículo tipo 1/1000 * 42)* custo de combustível +
# ( distância percorrida cheio pelo veículo tipo 2/1000 * 40 + distância percorrida vazio pelo veículo tipo 2/1000 * 30)* custo de combustível
model.setObjective(0.1 * (vessel1 * 1000000 + vessel2 * 1500000) + (vessel1 * 1000000 + vessel2 * 1500000) / 25 + vessel1 * 70000 + vessel2 * 75000 +
((dLoaded1/1000) * 50 + (dEmpty1/1000) * 42 + (dLoaded2/1000) * 40 + (dEmpty2/1000) * 30) * 0.8, GRB.MINIMIZE)
model.update()
model.optimize()
# generates the file with the solution
model.write("../Solucoes/Solution-P2-Exact.sol")
# generates lp file
model.write("P2-Exact.lp") | StarcoderdataPython |
6522444 | '''
Finite State Machine algorithm used to assess score of Python found in
statements of the form 'Python is ___'. The 2 possible scores are
'positive' and 'negative'.
Reference: http://www.python-course.eu/finite_state_machine.php
+--------------+-------------+--------------+
| From State | Input | To State |
+--------------+-------------+--------------+
| Start | 'Python' | Python_state |
| Start | Not handled | error_state |
| Python_state | 'is' | is_state |
| Python_state | Not handled | error_state |
| is_state | {positive} | pos_state |
| is_state | {negative} | neg_state |
| is_state | 'not' | not_state |
| is_state | Not handled | error_state |
| not_state | {positive} | neg_state |
| not_state | {negative} | pos_state |
| not_state | Not handled | error_state |
| pos_state | Any | End |
| neg_state | Any | End |
| error_state | Any | End |
+--------------+-------------+--------------+
Input:
------
The first line contains a single integer for the number of test cases.
Each test case then appears on its own line as a word ('hex2dec' or 'dec2hex')
and a value to be converted.
+------------------------------------------------------------------+
| 3 |
| fsm_score('Python is great') |
| fsm_score('Python is difficult') |
| fsm_score('Perl is great') |
+------------------------------------------------------------------+
Output:
-------
For each test case, the result will displayed on a line.
+------------------------------------------------------------------+
| fsm_score('Python is great') = positive |
| fsm_score('Python is difficult') = negative |
| fsm_score('Perl is great') = error |
+------------------------------------------------------------------+
'''
from general import state_machine
POSITIVE_ADJECTIVES = []
NEGATIVE_ADJECTIVES = []
IS_STATE = 'is_state'
PYTHON_STATE = 'Python_state'
START_STATE = 'Start'
ERROR_STATE = 'error_state'
NOT_STATE = 'not_state'
POS_STATE = 'pos_state'
NEG_STATE = 'neg_state'
END_STATE = 'End'
SENTIMENT = ''
###############################################################################
def start_transitions(text):
'''Perform transition at the state designated as start.'''
splitted_text = text.split(None, 1)
word, text = splitted_text if len(splitted_text) > 1 else (text,'')
if word == "Python":
newState = PYTHON_STATE
else:
newState = ERROR_STATE
global SENTIMENT
SENTIMENT = process_sentiment(ERROR_STATE)
return (newState, text)
###############################################################################
def python_state_transitions(text):
'''Perform transition from state designated by last transition "Python".'''
splitted_text = text.split(None, 1)
word, text = splitted_text if len(splitted_text) > 1 else (text,'')
if word == "is":
newState = IS_STATE
else:
newState = ERROR_STATE
global SENTIMENT
SENTIMENT = process_sentiment(ERROR_STATE)
return (newState, text)
###############################################################################
def is_state_transitions(text):
'''Perform transition from state designated by last transition "is".'''
splitted_text = text.split(None, 1)
word, text = splitted_text if len(splitted_text) > 1 else (text,'')
if word == "not":
newState = NOT_STATE
else:
if word in POSITIVE_ADJECTIVES:
newState = POS_STATE
elif word in NEGATIVE_ADJECTIVES:
newState = NEG_STATE
else:
newState = ERROR_STATE
global SENTIMENT
SENTIMENT = process_sentiment(newState)
return (newState, text)
###############################################################################
def not_state_transitions(text):
'''Perform transition from state designated by last transition "not".'''
splitted_text = text.split(None, 1)
word, text = splitted_text if len(splitted_text) > 1 else (text,'')
if word in POSITIVE_ADJECTIVES:
newState = NEG_STATE
elif word in NEGATIVE_ADJECTIVES:
newState = POS_STATE
else:
newState = ERROR_STATE
global SENTIMENT
SENTIMENT = process_sentiment(newState)
return (newState, text)
###############################################################################
def final_transitions(text):
'''Perform transition from any state to designated end state.'''
return (END_STATE, text)
###############################################################################
def process_sentiment(text):
'''Compute sentiment from resolved state identifier.'''
if text == POS_STATE: result = 'positive'
elif text == NEG_STATE: result = 'negative'
else: result = 'error'
return result
###############################################################################
fsm = state_machine.StateMachine()
fsm.add_state(START_STATE, start_transitions)
fsm.add_state(PYTHON_STATE, python_state_transitions)
fsm.add_state(IS_STATE, is_state_transitions)
fsm.add_state(NOT_STATE, not_state_transitions)
fsm.add_state(NEG_STATE, final_transitions)
fsm.add_state(POS_STATE, final_transitions)
fsm.add_state(ERROR_STATE, final_transitions)
fsm.add_state(END_STATE, None)
fsm.set_start(START_STATE)
fsm.set_end(END_STATE)
if __name__== "__main__":
def parse_command(text, enclosures = '()'):
lparen = text.find(enclosures[0])
rparen = text.rfind(enclosures[1])
return text[:lparen], text[lparen + 1: rparen]
for line in range(int(input())):
command, value = parse_command(input().strip())
if command == 'ADD_POSITIVE':
POSITIVE_ADJECTIVES.append(eval(value))
elif command == 'ADD_NEGATIVE':
NEGATIVE_ADJECTIVES.append(eval(value))
elif command == 'RUN':
fsm.run(eval(value))
print(SENTIMENT)
| StarcoderdataPython |
5124082 | from zope.interface import Interface
from guillotina.fields.annotation import BucketListField
class IVersioning(Interface):
diffs = BucketListField(readonly=True, annotation_prefix="diffs-", bucket_len=5)
class IVersioningMarker(Interface):
"""Marker interface an object is Versioning"""
class IDiffCalculator(Interface):
"""Interface for an adapter to look for diffs"""
async def __call__(payload):
pass
| StarcoderdataPython |
8018331 | <filename>tests/test_hybrid.py
"""
test_hybrid: unit tests for Hybrid
<NAME> <<EMAIL>>
Copyright 2020-2021, <NAME>
License: Apache-2.0 (https://www.apache.org/licenses/LICENSE-2.0)
"""
import dataclasses
import sourdough
@dataclasses.dataclass
class AnElement(sourdough.quirks.Element):
pass
@dataclasses.dataclass
class AnotherElement(sourdough.quirks.Element):
pass
def test_hybrid():
workflow = sourdough.Hybrid()
workflow.setdefault('default value')
a_element = AnElement(name = 'test_name')
another_element = AnotherElement()
some_element = AnotherElement(name = 'some_element')
workflow.add(a_element)
workflow.add(another_element)
workflow.extend([a_element, another_element])
workflow.insert(3, some_element)
assert workflow.keys() == [
'test_name',
'another_element',
'test_name',
'some_element',
'another_element']
assert workflow.values() == [
a_element,
another_element,
a_element,
some_element,
another_element]
for key, value in workflow.items():
pass
subset_workflow = workflow.subsetify(subset = ['test_name'])
assert subset_workflow.keys() == [
'test_name',
'test_name']
assert workflow.pop(1) == another_element
assert workflow.pop('test_name') == sourdough.Hybrid(
contents = [a_element, a_element])
workflow.update({'new_workflow': a_element})
assert workflow.keys() == [
'some_element',
'another_element',
'new_workflow']
assert workflow.get('nothing') == 'default value'
workflow.setdefault(None)
assert workflow.get('nothing') == None
workflow['crazy_element'] = AnotherElement(name = 'crazy')
assert len(workflow) == 4
workflow.clear()
assert len(workflow) == 0
workflow += another_element
assert len(workflow) == 1
workflow.remove(0)
assert len(workflow) == 0
return
if __name__ == '__main__':
test_hybrid()
| StarcoderdataPython |
1790682 | <gh_stars>1-10
from flask import Blueprint, request, jsonify
from flask import current_app as app
from flask_pymongo import PyMongo
import pymongo, ssl, sys, json
from bson.json_util import dumps
import uuid
user_roles_bp = Blueprint('user_roles', __name__)
@user_roles_bp.route('/roles', methods=['GET'])
def roles():
results = []
roles = app.db.UserRoles.find({},{'_id': False})
for role in roles:
results.append(role)
return jsonify(results), 200
@user_roles_bp.route('/role/<role_id>', methods=['GET'])
def role(role_id):
results = []
role = app.db.UserRoles.find_one({'role_id': role_id},{'_id': False})
results.append(role)
return jsonify(results), 200
@user_roles_bp.route('/role', methods=['POST'])
def create_role(role_id):
data = request.get_json(force=True)
role_name = data.get('role_name')
role_id = str(uuid.uuid1())
result = app.db.UserRoles.insert({"role_id": role_id, "role_name": role_name})
return role_id, 200
| StarcoderdataPython |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.