id stringlengths 1 8 | text stringlengths 6 1.05M | dataset_id stringclasses 1
value |
|---|---|---|
1937597 | <reponame>pguermo/pytest-ansible
import warnings
import ansible.constants
import ansible.utils
import ansible.errors
from ansible.plugins.callback import CallbackBase
from ansible.executor.task_queue_manager import TaskQueueManager
from ansible.playbook.play import Play
# from ansible.plugins.loader import module_loader
from ansible.cli import CLI
from pytest_ansible.logger import get_logger
from pytest_ansible.module_dispatcher import BaseModuleDispatcher
from pytest_ansible.results import AdHocResult
from pytest_ansible.errors import AnsibleConnectionFailure
from pytest_ansible.has_version import has_ansible_v2
if not has_ansible_v2:
raise ImportError("Only supported with ansible-2.* and newer")
log = get_logger(__name__)
class ResultAccumulator(CallbackBase):
"""Fixme."""
def __init__(self, *args, **kwargs):
"""Initialize object."""
super(ResultAccumulator, self).__init__(*args, **kwargs)
self.contacted = {}
self.unreachable = {}
def v2_runner_on_failed(self, result, *args, **kwargs):
self.contacted[result._host.get_name()] = result._result
v2_runner_on_ok = v2_runner_on_failed
def v2_runner_on_unreachable(self, result):
self.unreachable[result._host.get_name()] = result._result
@property
def results(self):
return dict(contacted=self.contacted, unreachable=self.unreachable)
class ModuleDispatcherV2(BaseModuleDispatcher):
"""Pass."""
required_kwargs = ('inventory', 'inventory_manager', 'variable_manager', 'host_pattern', 'loader')
def has_module(self, name):
return ansible.plugins.module_loader.has_plugin(name)
# return module_loader.has_plugin(name)
def _run(self, *module_args, **complex_args):
"""Execute an ansible adhoc command returning the result in a AdhocResult object."""
# Assemble module argument string
if module_args:
complex_args.update(dict(_raw_params=' '.join(module_args)))
# Assert hosts matching the provided pattern exist
hosts = self.options['inventory_manager'].list_hosts()
no_hosts = False
if len(hosts) == 0:
no_hosts = True
warnings.warn("provided hosts list is empty, only localhost is available")
self.options['inventory_manager'].subset(self.options.get('subset'))
hosts = self.options['inventory_manager'].list_hosts(self.options['host_pattern'])
if len(hosts) == 0 and not no_hosts:
raise ansible.errors.AnsibleError("Specified hosts and/or --limit does not match any hosts")
# Log the module and parameters
log.debug("[%s] %s: %s" % (self.options['host_pattern'], self.options['module_name'], complex_args))
parser = CLI.base_parser(
runas_opts=True,
inventory_opts=True,
async_opts=True,
output_opts=True,
connect_opts=True,
check_opts=True,
runtask_opts=True,
vault_opts=True,
fork_opts=True,
module_opts=True,
)
(options, args) = parser.parse_args([])
# Pass along cli options
options.verbosity = 5
options.connection = self.options.get('connection')
options.remote_user = self.options.get('user')
options.become = self.options.get('become')
options.become_method = self.options.get('become_method')
options.become_user = self.options.get('become_user')
options.module_path = self.options.get('module_path')
# Initialize callback to capture module JSON responses
cb = ResultAccumulator()
kwargs = dict(
inventory=self.options['inventory_manager'],
variable_manager=self.options['variable_manager'],
loader=self.options['loader'],
options=options,
stdout_callback=cb,
passwords=dict(conn_pass=None, become_pass=None),
)
# create a pseudo-play to execute the specified module via a single task
play_ds = dict(
name="pytest-ansible",
hosts=self.options['host_pattern'],
gather_facts='no',
tasks=[
dict(
action=dict(
module=self.options['module_name'], args=complex_args
),
),
]
)
log.debug("Play(%s)", play_ds)
play = Play().load(play_ds, variable_manager=self.options['variable_manager'], loader=self.options['loader'])
# now create a task queue manager to execute the play
tqm = None
try:
log.debug("TaskQueueManager(%s)", kwargs)
tqm = TaskQueueManager(**kwargs)
tqm.run(play)
finally:
if tqm:
tqm.cleanup()
# Log the results
log.debug(cb.results)
# Raise exception if host(s) unreachable
# FIXME - if multiple hosts were involved, should an exception be raised?
if cb.unreachable:
raise AnsibleConnectionFailure("Host unreachable", dark=cb.unreachable, contacted=cb.contacted)
# Success!
return AdHocResult(contacted=cb.contacted)
| StarcoderdataPython |
1788666 | # Generated by the protocol buffer compiler. DO NOT EDIT!
# source: control_delegation.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf.internal import enum_type_wrapper
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
from google.protobuf import descriptor_pb2
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(
name='control_delegation.proto',
package='protocol',
syntax='proto2',
serialized_pb=_b('\n\x18\x63ontrol_delegation.proto\x12\x08protocol*<\n\x1bprp_control_delegation_type\x12\x1d\n\x19PRCDT_MAC_DL_UE_SCHEDULER\x10\x01')
)
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
_PRP_CONTROL_DELEGATION_TYPE = _descriptor.EnumDescriptor(
name='prp_control_delegation_type',
full_name='protocol.prp_control_delegation_type',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='PRCDT_MAC_DL_UE_SCHEDULER', index=0, number=1,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=38,
serialized_end=98,
)
_sym_db.RegisterEnumDescriptor(_PRP_CONTROL_DELEGATION_TYPE)
prp_control_delegation_type = enum_type_wrapper.EnumTypeWrapper(_PRP_CONTROL_DELEGATION_TYPE)
PRCDT_MAC_DL_UE_SCHEDULER = 1
DESCRIPTOR.enum_types_by_name['prp_control_delegation_type'] = _PRP_CONTROL_DELEGATION_TYPE
# @@protoc_insertion_point(module_scope)
| StarcoderdataPython |
1838980 | a_string = 'Hello World'
print(a_string)
print(a_string[0])
print(a_string[0:5]) # the first five characters
# Sets
basket = {'Apple', 'Orange', 'Apple', 'pear', 'orange', 'banana'}
print(basket) # Duplicates will be removed
a = set('abracadabra')
print(a)
a.add('z')
print(a)
# Frozen sets
b = frozenset('asdadasa')
print(b)
cities = frozenset(['Frankfurt', "Basel", "Freiburg"])
print(cities)
| StarcoderdataPython |
8039603 | <gh_stars>0
import RPi.GPIO as gpio
import time
class move:
def __init__(self, name):
self.name = name
def init(self):
gpio.setmode(gpio.BCM)
gpio.setup(17, gpio.OUT)
gpio.setup(22, gpio.OUT)
gpio.setup(23, gpio.OUT)
gpio.setup(24, gpio.OUT)
def forward(self, sec):
self.init()
gpio.output(17, True) #M1 FWD
gpio.output(22, False) #M1 REV
gpio.output(23, True) #M2 FWD
gpio.output(24, False) #M2 REV
time.sleep(sec)
gpio.cleanup()
def reverse(self, sec):
self.init()
gpio.output(17, False)
gpio.output(22, True)
gpio.output(23, False)
gpio.output(24, True)
time.sleep(sec)
gpio.cleanup()
def left(self, sec):
self.init()
gpio.output(17, False)
gpio.output(22, True)
gpio.output(23, False)
gpio.output(24, False)
time.sleep(sec)
gpio.cleanup()
def right(self, sec):
self.init()
gpio.output(17, False)
gpio.output(22, False)
gpio.output(23, False)
gpio.output(24, True)
time.sleep(sec)
gpio.cleanup()
def init_test(self):
self.forward(.05)
time.sleep(.1)
self.reverse(.05)
time.sleep(.1)
self.left(.05)
time.sleep(.1)
self.right(.05)
print(f"Initialization Test Passed! {self.name} is ready to roll!")
# Perform Initialization Test
COVID_BOT = move("COVID Bot")
COVID_BOT.init_test()
time.sleep(3)
COVID_BOT.forward(1) | StarcoderdataPython |
3472358 | <filename>drawwithopencv.py
import numpy as np
import cv2
import keras
from PIL import ImageGrab, Image
#globale variable
canvas = np.zeros([400,400,3],'uint8')
radius = 10
color = (255,255,255)
pressed = False
#fourcc = cv2.VideoWriter_fourcc(*'XVID')
#out = cv2.VideoWriter('digitClassify.avi',fourcc, 20.0, (640,480))
def preprocess_image(img):
gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
blured = cv2.GaussianBlur(gray, (3,3), 10)
_, thresh = cv2.threshold(blured, 150, 255, cv2.THRESH_BINARY_INV)
img11, contours, hierarchy = cv2.findContours(thresh, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)
x1, y1, w1, h1 = cv2.boundingRect(contours[0])
t = None
if w1 > h1:
t = w1
else:
t = h1
t = t+10
mask = np.zeros((t,t), dtype='uint8')
x2 = int((t-w1)/2)
y2 = int((t-h1)/2)
mask[y2:y2+h1, x2:x2+w1] = blured[y1:y1+h1,x1:x1+w1]
resize = cv2.resize(mask, (28,28))
tpred = resize.reshape(1,28,28,1)
#model = keras.models.load_model("mnist_digit.h5")
#model = keras.models.load_model("mnist_digit_convolution.h5")
model = keras.models.load_model('mnist_digit_convolution_w5e.h5')
return np.argmax(model.predict(tpred))
#click function
def click(event, x, y, flag, param):
#print("Event: ", event)
#print("X: ", x, " Y: ", y)
#print("Flag: ", flag)
#print("Param: ", param)
global canvas, pressed, color
if event == cv2.EVENT_LBUTTONDOWN:
pressed = True
cv2.circle(canvas,(x,y),radius,color,-1)
elif event == cv2.EVENT_MOUSEMOVE and pressed:
cv2.circle(canvas,(x,y),radius,color,-1)
elif event == cv2.EVENT_LBUTTONUP:
pressed = False
cv2.imwrite("digit.png", canvas)
pred = preprocess_image(canvas)
print("Predicted : " + str(pred))
elif event == cv2.EVENT_RBUTTONDOWN:
pressed = True
color = (0, 0, 255)
cv2.circle(canvas,(x,y),radius,color,-1)
elif event == cv2.EVENT_RBUTTONUP:
pressed = False
#elif cv2.waitKey(0):
# print(
# print("pressed A")
# color = (0,0,255)
cv2.namedWindow("canvas")
cv2.setMouseCallback("canvas", click)
while True:
cv2.imshow("canvas", canvas)
#screen = np.array(ImageGrab.grab(bbox=(10,10,900,900)))
#print(screen.shape)
#resized_screen = cv2.resize(screen, (640,480), Image.ANTIALIAS)
#cv2.imshow("Screen", resized_screen)
#out.write(resized_screen)
ch = cv2.waitKey(1)
if ch & 0xFF == ord('q'):
break
if ch == ord('c'):
canvas = canvas * 0
cv2.destroyAllWindows()
| StarcoderdataPython |
5064122 | # -*- coding: utf-8 -*-
"""Common Jinja2 filters for manipulating ansible vars."""
import itertools
import math
import operator
import os.path
def hostname(fqdn):
"""Return hostname part of FQDN."""
return fqdn.partition('.')[0]
def domain(fqdn):
"""Return domain part of FQDN."""
return fqdn.partition('.')[2]
def split_filename(filename):
"""Return extension of filename."""
return os.path.splitext(filename)[0]
def split_extension(filename):
"""Return filename without extension."""
return os.path.splitext(filename)[1]
def rstrip_substring(name, substring):
"""Strip given substring from the end of name."""
if name.endswith(substring):
return name[:-len(substring)]
return name
def attrs(dict_list, key):
"""Iterate values of specified key in list of dicts."""
return itertools.imap(operator.itemgetter(key), dict_list)
def ceil2(number, min_value=0):
"""Round up number to the next highest power of 2."""
ceiled = 2 ** int(math.ceil(math.log(number, 2))) if number > 0 else 0
return max(ceiled, min_value)
class FilterModule(object):
"""Common Jinja2 filters for manipulating ansible vars."""
def filters(self):
"""Return filter functions."""
return {
'hostname': hostname,
'domain': domain,
'split_filename': split_filename,
'split_extension': split_extension,
'rstrip_substring': rstrip_substring,
'attrs': attrs,
'ceil2': ceil2,
}
| StarcoderdataPython |
11224603 | <filename>adstxt/rabbitmq_test/receive.py
import pika
connection = pika.BlockingConnection(pika.ConnectionParameters(host="localhost"))
channel = connection.channel()
channel.queue_declare(queue="hello")
def callback(ch, method, properties, body):
print(" [x] Received : {}".format(body))
channel.basic_consume(queue="hello", on_message_callback=callback, auto_ack=True)
print(" [*] Waiting for messages.")
channel.start_consuming()
| StarcoderdataPython |
6652836 | <gh_stars>0
# Written by <NAME> 07/17
import praw
import pickle
import time
from Structures.Queue import Queue
import RedditSilverRobot
from datetime import datetime
print("Starting up the bots!")
reddit = praw.Reddit(client_id='client_id',
client_secret='client_secret',
user_agent='raspberrypi:com.rudypikulik.redditsilverrobot:v1.1.1',
username=‘**********’,
password=‘**********’)
# This defines the domain from which to collect comments. "all" for all comments.
sub = reddit.subreddit("all")
bots = [RedditSilverRobot]
def start_stream():
comments = sub.stream.comments()
for comment in comments:
for bot in bots:
if bot.validate_comment(comment):
queue = pickle.load(open(bot.file, 'rb'))
if queue:
queue.enqueue(comment.id)
else:
queue = Queue()
queue.enqueue(comment.id)
pickle.dump(queue, open(bot.file, 'wb'))
timestr = str(time.localtime()[3]) + ":" + str(time.localtime()[4])
print("> %s - Added comment to queue! Queue length: %s" % (timestr, len(queue)))
while True:
try:
print('Starting comment stream at %s' % (datetime.now()))
start_stream()
except Exception as e:
print("> %s - Connection lost. Restarting in 3 seconds... %s" % (datetime.now(), e))
time.sleep(3)
continue
| StarcoderdataPython |
97564 | <gh_stars>0
#__author__ = 'Gavin'
from django.conf.urls import patterns, include, url
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'mysite.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^$','test.views.index',name='index'),
url(r'^2/$','test.views.index2',name='index2')
) | StarcoderdataPython |
5130657 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright 2014 Ciel, http://ciel.im
# Distributed under terms of the MIT license.
# git ignore delete
import os
from config import USER_PATH
from file_operation import test_folder
from file_operation import test_file
from file_operation import find_all_files
from file_operation import delete_file
from file_operation import search_file
def git_ignore_delete(filenames):
files = []
if len(filenames)==0:
path = os.getcwd()+"/.gitignore"
if test_file(path):
delete_file(path)
return
test_folder(USER_PATH)
find_all_files(USER_PATH, files)
delete_files = []
for filename in filenames:
path = search_file(filename, files)
if path!="":
delete_files.append(path)
for delete in delete_files:
delete_file(delete) | StarcoderdataPython |
8036778 | <filename>questions/45964913/mesh_lib/model.py
from __future__ import print_function # backwards compatibility
from __future__ import division
# heap queue data structure from standard python libraries
# used for the search algorithm
import heapq
import numpy as np
def dijkstra(vertexes_dict, start_i, target_i):
"""dijkstra function
vertexes - vertex data structure
start_i - index of starting vertex in data structure
target_i - index of end vertex in data structure
"""
# forming a data structure for a search
vertexes = {}
for v in vertexes_dict:
vertexes[v] = {
"dist":float("inf"),
"vertex":vertexes_dict[v]
}
frontier = [] # search frontier
looked_at = set() # indexes of vertexes that heve been observed
# frontier is ordered by path length to start vertex i.e. Dijkstra
# heap queue frontier (priority, index)
heapq.heappush(frontier, (0, start_i))
# in data structure distance to start is 0
vertexes[start_i]["dist"] = 0
# in data structure path to start is just it's index
vertexes[start_i]["path"] = [start_i]
# iterative search from start to target
while (frontier):
# getting next vertex from heap queue
# fisrt argument is priority, we don't need it anymore
# so it's assigned to variable _ and ignored
_, v_i = heapq.heappop(frontier)
# local variable just for cenvenience
vertex = vertexes[v_i]["vertex"]
if v_i == target_i: # if we have found the target
# path length and path is returned
return vertexes[v_i]["dist"], vertexes[v_i]["path"]
# if vertex has already been observed we ignore it
if vertex.index in looked_at:
continue
# adding vertex to observed set
looked_at.add(vertex.index)
for n in vertex.get_neighbors():
if n.index in looked_at:
continue
new_dist = vertexes[v_i]["dist"]+vertex.get_dist_to(n)
if new_dist < vertexes[n.index]["dist"]:
vertexes[n.index]["dist"] = new_dist
vertexes[n.index]["path"] = vertexes[v_i]["path"] + [n.index]
heapq.heappush(frontier,(new_dist, n.index))
# if code gets to this place that means no path has been found
# as this should not happen an error is raised with description
raise Exception(
"No path found between vertex: {0} and vertex: {1}".format(
start_i,
target_i
)
)
# Vertex class
class Vertex(object):
"""
Vertex class used for one vertex/node in a graph
contains index, coordinates, and list of neighbor vertexes
"""
def __init__(self, index, x, y, z):
"""index - integer index of vertex
x, y, z - float, cartesian coordinates"""
self.index = index
self.coords = np.array((x, y, z), dtype=float)
self.neighbors = []
def add_neighbors(self, n):
"""method to add neighbors to vertex.
if graph is undirected neighbor relations
have to be added to both ends of egde"""
# if passed value is no Vertex an Error is created
assert isinstance(n, Vertex)
# if Vertex is already added as a neighbor
# if is ignored
if n not in self.neighbors:
# neighbor list is appended with new neighbor value
self.neighbors.append(n)
def get_coords(self):
"""
returns np.array([x,y,z]) coordinates of vertex/node
"""
return self.coords
def get_neighbors(self):
"""returns all neighbors"""
for n in self.neighbors:
yield n
def get_dist_to(self, b):
"""calculates the linear distance to another Vertex object"""
# Cartesian distance is calculated
return np.linalg.norm(self.coords-b.coords)
class Model(object):
"""
Model class to store *obj file mesh
"""
def __init__(self, file_path):
"""
Reads *.obj file and creates Model object
Model is represented by a dictionary of vertex objects
with relations with other vertexes via edges.
Faces not implemented as they are not needed.
"""
self.vertexes = {}
# as we read lines we number indexes
# in *.obj file they are numbered starting at 1
# so within this code same notation is used
# onle when the user inputs node numbers they are adjusted to notation
# from Meshlab
v_index = 1
with open(file_path, "r") as fin:
for line in fin:
line_list = line.split()
if len(line_list) == 0:
continue
if line_list[0] == "v": # begins with v if describes vertex
vertex = Vertex(
v_index, # "telling vertex its index"
# strings, vertex conderts to float
line_list[1], # x
line_list[2], # y
line_list[3] # z
)
self.vertexes[v_index] = vertex # add vertex to dictionary
v_index += 1 # index increment
elif line_list[0] == "f": # begins with f it describes a face
a_i = int(line_list[1]) # vertex index 1
b_i = int(line_list[2]) # vertex index 2
c_i = int(line_list[3]) # vertex index 3
a = self.vertexes[a_i] # vertex objcet 1
b = self.vertexes[b_i] # vertex objcet 2
c = self.vertexes[c_i] # vertex objcet 3
# 6 neighbor relations in one triangular face
a.add_neighbors(b)
a.add_neighbors(c)
b.add_neighbors(a)
b.add_neighbors(c)
c.add_neighbors(a)
c.add_neighbors(b)
def get_coords(self, vertex_list=None):
"""
If no parameter passed returns coordinates of all vertexes
if a list of indexes passed then their coordinates returned
returned values shaped like this
[
[x_1, y_1, z_1],
[x_2, y_2, z_2],
[x_3, y_3, z_3],
...
]
"""
if vertex_list is not None:
coords = np.empty((len(vertex_list),3), dtype=float)
for i, vert in enumerate(vertex_list):
coords[i] = self.vertexes[vert].get_coords()
else:
coords = np.empty((len(self),3), dtype=float)
for i, vert in enumerate(sorted(self.vertexes)):
coords[i] = self.vertexes[vert].get_coords()
return coords
def get_edges(self):
all_edges = set()
for vert in self.vertexes:
v = self.vertexes[vert]
for e in v.get_neighbors():
all_edges.add(sorted([v.index, e.index]))
return np.array(list(all_edges), dtype=int)
def __len__(self):
return len(self.vertexes)
def get_path(self, start_i, end_i):
"""
returns the indexes of shortest path along the shortest path
uses Dijkstra algorithm
"""
_, path = dijkstra(self.vertexes, start_i, end_i)
return path
| StarcoderdataPython |
8002415 | from abc import ABCMeta, abstractmethod
from threading import Lock
from _pyio import __metaclass__
class VirtualFile(object):
__metaclass__ = ABCMeta
def __init__(self, absRootPath):
self.path = absRootPath
def __enter__(self):
return self
def __exit__(self, *exc):
self.closeFileHandle()
def getPath(self):
return self.path
@abstractmethod
def read(self, offset, size):
'''
Returns the read bytes from offset with the given size
or less if EOF is reached.
'''
@abstractmethod
def size(self):
'''
Returns the size of the file
'''
@abstractmethod
def closeFileHandle(self):
'''
Closes possibly open file handles
'''
class LazyFile(object):
'''
Wrapper for a file handle object.
The wrapper uses lazy instantiation, so the file handle is not initialized
before the first usage.
The wrapper is thread-safe, so it can be used from within multiple threads.
In such cases the requests to the wrapper are executed sequentially.
'''
def __init__(self, absPath):
self.path = absPath
self.file = None
self.lock = Lock()
def getPath(self):
return self.path
def read(self, offset, length):
with self.lock:
f = self.__getFile()
f.seek(offset)
return f.read(length)
def close(self):
with self.lock:
if self.file is not None:
self.file.close()
def __getFile(self):
if self.file is None:
self.file = open(self.path, "rb")
return self.file
| StarcoderdataPython |
1800060 | <reponame>Razz21/Nuxt-Django-E-Commerce-Demo
# Generated by Django 2.2.9 on 2020-01-18 14:45
from decimal import Decimal
import django.core.validators
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('core', '0001_initial'),
]
operations = [
migrations.AlterModelOptions(
name='category',
options={'verbose_name_plural': 'Categories'},
),
migrations.AlterField(
model_name='item',
name='discount_price',
field=models.DecimalField(blank=True, decimal_places=2, max_digits=10, null=True, validators=[django.core.validators.MinValueValidator(Decimal('0.01'))]),
),
migrations.AlterField(
model_name='item',
name='price',
field=models.DecimalField(decimal_places=2, max_digits=10, validators=[django.core.validators.MinValueValidator(Decimal('0.01'))]),
),
]
| StarcoderdataPython |
8154949 | <filename>checkers/CheckersGame.py
import sys
from .Piece import Piece
from .Board import Board, _mirror_action
#sys.path.append('..')
from Game import Game
import numpy as np
import copy
W = 4
H = 8
class CheckersGame(Game):
def __init__(self):
pass
def getInitBoard(self):
return Board()
def getBoardSize(self):
return (W, H)
def getActionSize(self):
return W * H * 4 * 2
def getNextState(self, board, player, action):
if board.flipped_board:
action = _mirror_action(action)
next_board = copy.deepcopy(board)
next_turn = next_board.play_move(player, action)
return next_board, next_turn
def getValidMoves(self, board, player):
return board.get_valid_moves(player).flatten()
def getGameEnded(self, board, player):
return board.winner(player)
def getCanonicalForm(self, board, player):
# If the player is white, return the board unchanged, otherwise flip it.
if player == Piece.WHITE:
if board.flipped_board:
return board.flipped()
return board
else:
if board.flipped_board:
return board
return board.flipped()
def getSymmetries(self, board, pi):
return [(board, pi)]
def stringRepresentation(self, board):
return str(board.tostring()) + str(board.mid_capture) + str(board.flipped_board)
| StarcoderdataPython |
1816395 | class AdoptionCenter:
"""
The AdoptionCenter class stores the important information that a
client would need to know about, such as the different numbers of
species stored, the location, and the name. It also has a method to adopt a pet.
"""
def __init__(self, name, species_types, location):
self.name = name
self.species_types = species_types
self.x = location[0]
self.y = location[1]
def get_number_of_species(self, animal):
return self.species_types.get(animal, 0.0)
def get_location(self):
return self.x, self.y
def get_species_count(self):
return self.species_types.copy()
def get_name(self):
return self.name
def adopt_pet(self, species):
if species in self.species_types.keys():
self.species_types[species] -= 1
if self.species_types.get(species, 0) == 0:
del self.species_types[species]
class Adopter:
"""
Adopters represent people interested in adopting a species.
They have a desired species type that they want, and their score is
simply the number of species that the shelter has of that species.
"""
def __init__(self, name, desired_species):
self.name = name
self.desired_species = desired_species
def get_name(self):
return self.name
def get_desired_species(self):
return self.desired_species
def get_score(self, adoption_center):
return float(adoption_center.get_number_of_species(self.get_desired_species()))
class FlexibleAdopter(Adopter):
"""
A FlexibleAdopter still has one type of species that they desire,
but they are also alright with considering other types of species.
considered_species is a list containing the other species the adopter will consider
Their score should be 1x their desired species + .3x all of their desired species
"""
# Your Code Here, should contain an __init__ and a get_score method.
def __init__(self, name, desired_species, considered_species):
Adopter.__init__(self, name, desired_species)
self.considered_species = considered_species
def get_score(self, adoption_center):
adopter_score = Adopter.get_score(self, adoption_center)
num_other = 0.0
for char in self.considered_species:
num_other += adoption_center.get_number_of_species(char)
return adopter_score + 0.3 * num_other
class FearfulAdopter(Adopter):
"""
A FearfulAdopter is afraid of a particular species of animal.
If the adoption center has one or more of those animals in it, they will
be a bit more reluctant to go there due to the presence of the feared species.
Their score should be 1x number of desired species - .3x the number of feared species
"""
# Your Code Here, should contain an __init__ and a get_score method.
def __init__(self, name, desired_species, feared_species):
Adopter.__init__(self, name, desired_species)
self.feared_species = feared_species
def get_score(self, adoption_center):
adopter_score = Adopter.get_score(self, adoption_center)
num_feared = adoption_center.get_number_of_species(self.feared_species)
fearfulScore = adopter_score - 0.3 * num_feared
if fearfulScore <= 0.0:
return 0.0
else:
return float(fearfulScore)
class AllergicAdopter(Adopter):
"""
An AllergicAdopter is extremely allergic to a one or more species and cannot
even be around it a little bit! If the adoption center contains one or more of
these animals, they will not go there.
Score should be 0 if the center contains any of the animals, or 1x number of desired animals if not
"""
# Your Code Here, should contain an __init__ and a get_score method.
def __init__(self, name, desired_species, allergic_species):
Adopter.__init__(self, name, desired_species)
self.allergic_species = allergic_species
def get_score(self, adoption_center):
for char in self.allergic_species:
if adoption_center.get_number_of_species(char) > 0:
return 0.0
return Adopter.get_score(self, adoption_center)
class MedicatedAllergicAdopter(AllergicAdopter):
"""
A MedicatedAllergicAdopter is extremely allergic to a particular species
However! They have a medicine of varying effectiveness, which will be given in a DICTIONARY
To calculate the score for a specific adoption center, we want to find what is the most allergy-inducing species that the adoption center has for the particular MedicatedAllergicAdopter.
To do this, first examine what species the AdoptionCenter has that the MedicatedAllergicAdopter is allergic to, then compare them to the medicine_effectiveness dictionary.
Take the lowest medicine_effectiveness found for these species, and multiply that value by the Adopter's calculate score method.
"""
# Your Code Here, should contain an __init__ and a get_score method.
def __init__(self, name, desired_species, allergic_species, medicine_effectiveness):
AllergicAdopter.__init__(self, name, desired_species, allergic_species)
self.medicine_effectiveness = medicine_effectiveness
def get_score(self, adoption_center):
lst = []
for a in self.allergic_species:
if (a in adoption_center.get_species_count()) and (a in self.medicine_effectiveness):
lst.append(self.medicine_effectiveness[a])
if len(lst) != 0:
min_value = min(lst)
else:
min_value = 1.0
medicine_score = min_value * Adopter.get_score(self, adoption_center)
return float(medicine_score)
class SluggishAdopter(Adopter):
"""
A SluggishAdopter really dislikes travelleng. The further away the
AdoptionCenter is linearly, the less likely they will want to visit it.
Since we are not sure the specific mood the SluggishAdopter will be in on a
given day, we will asign their score with a random modifier depending on
distance as a guess.
Score should be
If distance < 1 return 1 x number of desired species
elif distance < 3 return random between (.7, .9) times number of desired species
elif distance < 5. return random between (.5, .7 times number of desired species
else return random between (.1, .5) times number of desired species
"""
# Your Code Here, should contain an __init__ and a get_score method.
def __init__(self, name, desired_species, location):
Adopter.__init__(self, name, desired_species)
self.location = location
def get_linear_distance(self, to_location):
x1 = to_location[0]
y1 = to_location[1]
x0 = self.location[0]
y0 = self.location[1]
return ((x1 - x0) ** 2 + (y1 - y0) ** 2) ** 0.5
def get_score(self, adoption_center):
distance = self.get_linear_distance(adoption_center.get_location())
import random
if distance < 1:
return Adopter.get_score(self, adoption_center)
elif 1 <= distance < 3:
return random.uniform(0.7, 0.9) * Adopter.get_score(self, adoption_center)
elif 3 <= distance < 5:
return random.uniform(0.5, 0.7) * Adopter.get_score(self, adoption_center)
elif distance >= 5:
return random.uniform(0.1, 0.5) * Adopter.get_score(self, adoption_center)
def get_ordered_adoption_center_list(adopter, list_of_adoption_centers):
"""
The method returns a list of an organized adoption_center such that the scores for each AdoptionCenter to the Adopter will be ordered from highest score to lowest score.
"""
# Your Code Here
return sorted(list_of_adoption_centers,
key=lambda adoption_center: (-adopter.get_score(adoption_center), adoption_center.get_name()))
def get_adopters_for_advertisement(adoption_center, list_of_adopters, n):
"""
The function returns a list of the top n scoring Adopters from list_of_adopters (in numerical order of score)
"""
# Your Code Here
ordered_by_score = sorted(list_of_adopters,
key=lambda adopter: (-adopter.get_score(adoption_center), adopter.get_name()))
return ordered_by_score[:n]
| StarcoderdataPython |
11387591 | #!/usr/bin/env python
import rospy, math
from servo_controller import Servo
import Adafruit_BBIO.GPIO as GPIO
import Adafruit_BBIO.PWM as PWM
class Car:
def __init__(self, s_pin = "P8_13", f_pin = "P9_14", b_pin = "P9_22", debug = False):
# initialize servo for steering
self.steer = Servo(s_pin)
# save PWM pins for forward and reverse
self.forward = f_pin
self.reverse = b_pin
# start servos and motors
self.start()
# center the steering
center_steering()
# set the "steering wheel" of the car
# negative is left
# positive is right
def turn(self, angle = 0):
self.steer.set_angle(self.center + angle)
if debug:
print angle
return
# set the car speed
def set_speed(self, speed):
speed = clamp(speed, -100, 100)
if speed > 0:
PWM.set_duty_cycle(reverse, 0)
PWM.set_duty_cycle(forward, speed)
else:
PWM.set_duty_cycle(forward, 0)
PWM.set_duty_cycle(reverse, -speed)
return
# start PWM lines for forward and reverse and start steering servo
def start(self):
PWM.start(self.forward, 0, 200)
PWM.start(self.reverse, 0, 200)
self.steer.start()
return
# stop PWM lines for forward and reverse and stop steering servo
def stop(self):
PWM.stop(self.forward)
PWM.stop(self.reverse)
self.steer.stop()
PWM.cleanup()
return
# center the steering servo between two limit switches
def center_steering():
pin1 = "P8_12"
pin2 = "P8_14"
GPIO.setup(pin1, GPIO.IN)
GPIO.setup(pin2, GPIO.IN)
# guessed center point
center = 100
# current angle
angle = center
# left and right limits
limit1 = angle
limit2 = angle
# turning left
while not GPIO.input(pin1) and not GPIO.input(pin2):
angle += .5
self.steer.set_angle(angle)
time.sleep(.05)
limit1 = angle
time.sleep(.5)
# resetting servo to guessed center
angle = center
servo.set_angle(angle)
time.sleep(.2)
# turning right
while not GPIO.input(pin1) and not GPIO.input(pin2):
angle -= .5
self.steer.set_angle(angle)
time.sleep(.05)
limit2 = angle
time.sleep(.5)
# calculating center from left and right limits
self.center = (limit1 + limit2) / 2
# calculate total range (102% of limit differences)
self.span = (limit1 - limit2) / 2 * 1.02 # allow for a little extra wiggle
self.steer.set_limits(limit2, limit1)
if debug:
print "center = " + str(center)
print "span = " + str(span)
servo.set_angle(center)
return
# clamp x value between specified min and max values
def clamp(x, min, max):
if min > max:
return False
if x > max:
return max
if x < min:
return min
return x
| StarcoderdataPython |
4945688 | #!/usr/bin/python
# -*- coding: utf-8 -*-
import os
import torch
import torch.nn as nn
import torch.nn.init as init
import torch.nn.functional as F
class BasicConv(nn.Module):
def __init__(
self,
in_planes,
out_planes,
kernel_size,
stride=1,
padding=0,
dilation=1,
groups=1,
relu=True,
bn=True,
bias=True,
scale_factor=1,
):
super(BasicConv, self).__init__()
self.conv = nn.Conv2d(
in_planes,
out_planes,
kernel_size=kernel_size,
stride=stride,
padding=padding,
dilation=dilation,
groups=groups,
bias=bias,
)
self.bn = (nn.BatchNorm2d(out_planes, eps=1e-5, momentum=0.01,
affine=True) if bn else None)
self.relu = (nn.ReLU(inplace=False) if relu else None)
self.upsample = (nn.Upsample(scale_factor=scale_factor,
mode='nearest') if scale_factor > 1 else None)
def forward(self, x):
x = self.conv(x)
if self.bn is not None:
x = self.bn(x)
if self.relu is not None:
x = self.relu(x)
if self.upsample is not None:
x = self.upsample(x)
return x
| StarcoderdataPython |
3404243 | #########################################
####### Rig On The Fly #######
####### Copyright © 2020 Dypsloom #######
####### https://dypsloom.com/ #######
#########################################
import bpy
from . PolygonShapesUtility import PolygonShapes
from . Utility import StateUtility, Channel
from . DypsloomBake import DypsloomBakeUtils
class RigOnSkeletonUtils:
def RigOnSkeleton (self, context):
#add controller shapes to the scene
PolygonShapes.AddControllerShapes()
#set aside the armature as a variable
obj = bpy.context.object
armature = obj.data
originalLayers = list()
layersToTurnOff = list()
for layer in range(32):
if armature.layers[layer] == True:
originalLayers.append(layer)
else:
armature.layers[layer] = True
layersToTurnOff.append(layer)
baseLayer = obj.baseBonesLayer
rigLayer = obj.rigBonesLayer
#force pose mode
bpy.ops.object.mode_set(mode='POSE')
for boneP in bpy.context.selected_pose_bones:
boneP.bone.select = False
for layer in range(32):
if layer in layersToTurnOff:
armature.layers[layer] = False
#select base armature
bpy.ops.pose.select_all(action='SELECT')
bonesToRigN = list()
for pbone in bpy.context.selected_pose_bones:
bonesToRigN.append(pbone.name)
#create and assign bone groups to selected pose bones
StateUtility.PoseBoneGroups()
#force edit mode
StateUtility.SetEditMode()
#select base armature
bpy.ops.armature.select_all(action='DESELECT')
bpy.ops.armature.select_all(action='SELECT')
#move rig bones to the rig layer.
StateUtility.MoveBonesToLayer(baseLayer)
#make rig layer visible
armature.layers[baseLayer] = True
#hide originally visible layers
for layer in range(32):
if layer != baseLayer:
armature.layers[layer] = False
for baseBoneE in bpy.context.selected_editable_bones:
baseBoneE.use_connect = False
StateUtility.DuplicateBones(obj,".rig")
#move rig bones to the rig layer.
StateUtility.MoveBonesToLayer(rigLayer)
#make rig layer visible
armature.layers[rigLayer] = True
#armature is in pose mode
bpy.ops.object.mode_set(mode='POSE')
#change rig bones' display to circle, rotation mode to euler YZX and adds copy transform constraint to copy the base armature's animation.
selectedPBones = bpy.context.selected_pose_bones.copy()
selectedPBones.sort(key = lambda x:len(x.parent_recursive))
for i, rigBoneP in enumerate(selectedPBones):
rigBoneP.custom_shape = bpy.data.objects["RotF_Circle"]
armature.bones[rigBoneP.name].show_wire = True
#rigBoneP.rotation_mode = 'YZX'
#for the first two bones of the hierarchy have the controller size bigger
if i < 2:
objDimensions = (obj.dimensions[0] + obj.dimensions[1] + obj.dimensions[2])/3
objWorldScaleV = obj.matrix_world.to_scale()
objWorldScale = (objWorldScaleV[0] + objWorldScaleV[1] + objWorldScaleV[2])/3
objSize = objDimensions / objWorldScale
sizeMultiplyer = objSize / rigBoneP.length
rigBoneP.custom_shape_scale *= sizeMultiplyer/(2*(i+3))
#for rigBoneP in bpy.context.selected_pose_bones:
for boneN in bonesToRigN:
rigBoneN = StateUtility.LeftRightSuffix(boneN) +".rig"
rigBoneP = obj.pose.bones[rigBoneN]
copyTransforms = rigBoneP.constraints.new('COPY_TRANSFORMS')
copyTransforms.target = obj
copyTransforms.subtarget = boneN #rigBoneP.name.replace(".rig","")
#if object being rigged has animation data
if obj.animation_data:
#bake rig bones animation so that they have the same animation as the base armature.
# -----------------------------------------------------------------------------------------------------------------------------------
#BAKE SELECTED BONES
objectActionsDictionary = StateUtility.FindActions() #find relevant action for each selected object
ActionInitialState = StateUtility.ActionInitialState(objectActionsDictionary) #store objects' actions state to know if they were in tweak mode
for obj in objectActionsDictionary:
initialAction = obj.animation_data.action
tracksStateDict, soloTrack, activeActionBlendMode = StateUtility.SoloRestPoseTrack(obj) #add an nla track to solo so that baking is done without other tracks influencing the result
for action in objectActionsDictionary[obj]:
obj.animation_data.action = action #switch obj's current action
frames = list() #list of frames to key
bonePChannelsToBake = dict() #dictionary containing which channels to key on selected pose bones
if not bpy.context.scene.smartFrames:
frameRange = action.frame_range
frames = [*range(int(frameRange.x), int(frameRange.y) + 1, 1)]
#locationXYZList = [Channel.locationX, Channel.locationY, Channel.locationZ]
#quaternionWXYZList = [Channel.quaternionW, Channel.quaternionX, Channel.quaternionY, Channel.quaternionZ]
eulerXYZList = [Channel.eulerX, Channel.eulerY, Channel.eulerZ]
#scaleXYZList = [Channel.scaleX, Channel.scaleY, Channel.scaleZ]
#for boneP in bpy.context.selected_pose_bones:
for boneN in bonesToRigN:
rigBoneN = StateUtility.LeftRightSuffix(boneN) +".rig"
boneP = obj.pose.bones[rigBoneN]
channelsList = list()
targetBoneP = obj.pose.bones[boneN] #obj.pose.bones[boneP.name.replace(".rig","")]
targetBoneDataPath = targetBoneP.path_from_id()
#looking for translation channels
for i in range(3):
fcurve = action.fcurves.find(targetBoneDataPath + ".location",index=i)
if fcurve:
if i == 0: #if location X channel
channelsList.append(Channel.locationX)
if i == 1: #if location Y channel
channelsList.append(Channel.locationY)
if i == 2: #if location Z channel
channelsList.append(Channel.locationZ)
StateUtility.GetFramePointFromFCurve(fcurve, frames)
if boneP.rotation_mode == targetBoneP.rotation_mode:
#looking for euler channels
for i in range(3):
fcurve = action.fcurves.find(targetBoneDataPath + ".rotation_euler",index=i)
if fcurve:
if i == 0: #if euler X channel
channelsList.append(Channel.eulerX)
if i == 1: #if euler Y channel
channelsList.append(Channel.eulerY)
if i == 2: #if euler Z channel
channelsList.append(Channel.eulerZ)
StateUtility.GetFramePointFromFCurve(fcurve, frames)
else:
#looking for quaternion channels
for i in range(4):
fcurve = action.fcurves.find(targetBoneDataPath + ".rotation_quaternion",index=i)
if fcurve:
channelsList.extend(eulerXYZList)
StateUtility.GetFramePointFromFCurve(fcurve, frames)
#looking for euler channels
for i in range(3):
fcurve = action.fcurves.find(targetBoneDataPath + ".rotation_euler",index=i)
if fcurve:
channelsList.extend(eulerXYZList)
StateUtility.GetFramePointFromFCurve(fcurve, frames)
#looking for scale channels
for i in range(3):
fcurve = action.fcurves.find(targetBoneDataPath + ".scale",index=i)
if fcurve:
if i == 0: #if scale X channel
channelsList.append(Channel.scaleX)
if i == 1: #if scale Y channel
channelsList.append(Channel.scaleY)
if i == 2: #if scale Z channel
channelsList.append(Channel.scaleZ)
StateUtility.GetFramePointFromFCurve(fcurve, frames)
bonePChannelsToBake[boneP] = channelsList
DypsloomBakeUtils.DypsloomBake(obj, action, frames, bonePChannelsToBake)
StateUtility.RestoreTracksState(obj, tracksStateDict, soloTrack, activeActionBlendMode) #remove the bakeTrack
obj.animation_data.action = initialAction
StateUtility.RestoreActionState(ActionInitialState, objectActionsDictionary) #return objects' actions to tweak mode if it was their initial state
#------------------------------------------------------------------------------------------------------------------------------------
StateUtility.RemoveConstraintsOfSelectedPoseBones()
#hide first layer to show only rig bones.
armature.layers[baseLayer] = False
#deselect all rig bones
bpy.ops.pose.select_all(action='TOGGLE')
#display base armature layer and hide rig armature layer
armature.layers[baseLayer] = True
armature.layers[rigLayer] = False
#select base armature
bpy.ops.pose.select_all(action='SELECT')
#base armature now follows rig armature
for bone in bpy.context.selected_pose_bones:
copyTransforms = bone.constraints.new('COPY_TRANSFORMS')
copyTransforms.target = obj
copyTransforms.subtarget = StateUtility.LeftRightSuffix(bone.name) + ".rig"
if obj.animation_data:
#clear all key frames of selected bones
StateUtility.KeyframeClear()
#deselect base armature
bpy.ops.pose.select_all(action='DESELECT')
#show rig armature
armature.layers[rigLayer] = True
armature.layers[baseLayer] = False
def RestPoseTrack (self, context):
obj = bpy.context.object
initialFrame = bpy.context.scene.frame_current
bpy.ops.object.mode_set(mode='POSE') #force pose mode
bpy.ops.pose.select_all(action='SELECT') #select all available pose bones
#if armature object does not have animation key current pose
if not obj.animation_data:
bpy.ops.anim.keyframe_insert_menu(type='LocRotScale') #add keyframe to all selected bones, adding in the process a new action
initialAction = obj.animation_data.action #store initial action to return to it once the script is done
restPoseAction = bpy.data.actions.new(obj.name + " Rest Pose") #create new action used for storing the rest pose
obj.animation_data.action = restPoseAction #assign reste pose action to store the rest pose of the armature
bpy.context.scene.frame_current = 0 #go to frame 0
initialBlendType = obj.animation_data.action_blend_type
obj.animation_data.action_blend_type = 'REPLACE'
bpy.ops.pose.transforms_clear()#put selected bones intos rest pose
bpy.ops.anim.keyframe_insert_menu(type='LocRotScale') #key rest pose
initialAreaType = bpy.context.area.type #store initial area type
bpy.context.area.type = 'NLA_EDITOR' #change area type to NLA_EDITOR to get the right context for the operator
for track in obj.animation_data.nla_tracks:# deselect all tracks before adding the restPoseTrack
track.select = False
restPoseTrack = obj.animation_data.nla_tracks.new() #add new restPoseTrack, it gets selected by default
restPoseTrack.name = "RotF Rest Pose " + obj.name #name it appropriately
restPoseStrip = restPoseTrack.strips.new("RotF Rest Pose "+ obj.name, 0, restPoseAction) #add new restPoseStrip containing the restPoseAction
restPoseStrip.blend_type = 'REPLACE'
bpy.ops.anim.channels_move(direction='BOTTOM') #move selected tracks to the bottom of the nla
obj.animation_data.action_blend_type = initialBlendType
bpy.context.area.type = initialAreaType #return to initial area type
obj.animation_data.action = initialAction #return to initial action
bpy.context.scene.frame_current = initialFrame #return to initial frame
def ArmatureMotionToBone(self, context):
obj = bpy.context.object
armature = obj.data
initialAction = obj.animation_data.action
#tracksStateDict, soloTrack, activeActionBlendMode = StateUtility.SoloRestPoseTrack(obj) #add an nla track to solo so that baking is done without other tracks influencing the result
wasInTweakMode = False
if obj.animation_data.use_tweak_mode:
wasInTweakMode = True
obj.animation_data.use_tweak_mode = False #exit nla tweak mode
actionList = list()
objHasAnimation = False
if obj.animation_data:
if obj.animation_data.action:
currentAction = obj.animation_data.action
actionList.append(currentAction) #add the current action name to objectActionsDictionary[object name][list]
for nlaTrack in obj.animation_data.nla_tracks: #go through object's nla tracks
for actionStrip in nlaTrack.strips: #go through the strips in it's nla tracks
action = actionStrip.action
if action not in actionList: #if action used in strips of the nla tracks are not yet in actionList
actionList.append(action) #add the action name to actionList
#check all relevant actions to see if armature object has animation
for action in actionList:
obj.animation_data.action = action
for i in range(3):
location = action.fcurves.find("location",index=i)
if location:
objHasAnimation = True
rotationEuler = action.fcurves.find("rotation_euler",index=i)
if rotationEuler:
objHasAnimation = True
scale = action.fcurves.find("scale",index=i)
if scale:
objHasAnimation = True
for i in range(4):
rotationQuaternion = action.fcurves.find("rotation_quaternion",index=i)
if rotationQuaternion:
objHasAnimation = True
if objHasAnimation:
if obj.pose.bone_groups.get('RigOnTheFly Armature Motion') is None:
armatureMotionBoneGroup = obj.pose.bone_groups.new(name="RigOnTheFly Armature Motion")
armatureMotionBoneGroup.color_set = 'THEME11'
else:
armatureMotionBoneGroup = obj.pose.bone_groups['RigOnTheFly Armature Motion']
#force edit mode
StateUtility.SetEditMode()
#create new bone
newBoneN = "RotF_ArmatureMotion"
newEBone = armature.edit_bones.new(newBoneN)
newEBone.use_deform = False
newEBone.tail = (0,1,0) #tail position
objDimensions = (obj.dimensions[0] + obj.dimensions[1] + obj.dimensions[2])/3
objWorldScaleV = obj.matrix_world.to_scale()
objWorldScale = (objWorldScaleV[0] + objWorldScaleV[1] + objWorldScaleV[2])/3
objSize = objDimensions / objWorldScale
sizeMultiplyer = objSize / newEBone.length
newEBone.length = sizeMultiplyer/3
for ebone in armature.edit_bones:
if ebone.parent == None: #and ".rig" in ebone.name:
ebone.parent = newEBone
#force pose mode
bpy.ops.object.mode_set(mode='POSE')
newPBone = obj.pose.bones[newBoneN]
newPBone.rotation_mode = obj.rotation_mode
newPBone.bone_group = armatureMotionBoneGroup
boneDataPath = newPBone.path_from_id()
for action in actionList:
#copy the armature's object motion to the new bone
for transformType in ["location","rotation_euler","rotation_quaternion","scale"]:
index = int()
if transformType == "rotation_quaternion":
index = 4
else:
index = 3
for i in range(index):
objFCurve = action.fcurves.find(transformType,index=i)
if not objFCurve:
continue
else:
data_path = boneDataPath+"."+transformType
fcurve = action.fcurves.find(data_path, index=i)
if fcurve == None:
fcurve = action.fcurves.new(data_path, index=i, action_group=newPBone.name)
num_keys = len(objFCurve.keyframe_points)
keys_to_add = num_keys - len(fcurve.keyframe_points) #find how many keyframe points need to be added
fcurve.keyframe_points.add(keys_to_add) #add the needed keyframe points
for key in range(num_keys):
fcurve.keyframe_points[key].co = objFCurve.keyframe_points[key].co
fcurve.keyframe_points[key].handle_left = objFCurve.keyframe_points[key].handle_left
fcurve.keyframe_points[key].handle_right = objFCurve.keyframe_points[key].handle_right
#remove fcurve on armature object
action.fcurves.remove(objFCurve)
#zero armature's object transforms
obj.location = (0,0,0)
obj.rotation_euler = (0,0,0)
obj.rotation_quaternion = (1,0,0,0)
obj.scale = (1,1,1)
#StateUtility.RestoreTracksState(obj, tracksStateDict, soloTrack, activeActionBlendMode) #remove the bakeTrack
obj.animation_data.action = initialAction
if wasInTweakMode:
obj.animation_data.use_tweak_mode = True
return objHasAnimation
def ArmatureMotionBoneShape(self, context):
obj = bpy.context.object
armature = obj.data
for pbone in obj.pose.bones:
if "RotF_ArmatureMotion" in pbone.name and ".rig" in pbone.name:
pbone.custom_shape = bpy.data.objects["RotF_Square"]
armature.bones[pbone.name].show_wire=True
| StarcoderdataPython |
8178549 | import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import time
import os
from PIL import Image
import torch
import torch.nn as nn
import torchvision
from torch.utils.data import DataLoader, Dataset
% matplotlib inline
warnings.filterwarnings('ignore')
class TwinsDataloader(Dataset):
def __init__(self, dataroot, df, transform):
'''
dataroot: path to folder with items
df: pandas dataframe with fields view, id_a, id_b
transform: torchvision transform
'''
self.dataroot = dataroot
self.df = df
self.transform = transform
def __getitem__(self, index):
def get_img_path(img_id, view):
#return os.path.join(self.dataroot, f'{img_id}/{img_id}d{view}__face.jpg')
return self.dataroot+f'{img_id}/{img_id}d{view}__face.jpg'
#print(self.df.iloc[index].values[0])
view, id_a, id_b = self.df.iloc[index].values
#print(view)
#view = np.random.choice(views)
#print(view, id_a, id_b)
path_a = get_img_path(id_a, view)
path_b = get_img_path(id_b, view)
img_a = Image.open(path_a)
img_b = Image.open(path_b)
#plt.imshow(img_a)
#plt.show()
#plt.imshow(img_b)
img_a = self.transform(img_a)
img_b = self.transform(img_b)
return {'img_a': img_a, 'img_b': img_b, 'class_a':id_a,'class_b':id_b}#'A_paths': path_a, 'B_paths': path_b }
def __len__(self):
return self.df.shape[0] | StarcoderdataPython |
1955825 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.service_client import SDKClient
from msrest import Serializer, Deserializer
from msrestazure import AzureConfiguration
from .version import VERSION
from .operations.autoscale_settings_operations import AutoscaleSettingsOperations
from .operations.operations import Operations
from .operations.alert_rule_incidents_operations import AlertRuleIncidentsOperations
from .operations.alert_rules_operations import AlertRulesOperations
from .operations.log_profiles_operations import LogProfilesOperations
from .operations.diagnostic_settings_operations import DiagnosticSettingsOperations
from .operations.diagnostic_settings_category_operations import DiagnosticSettingsCategoryOperations
from .operations.action_groups_operations import ActionGroupsOperations
from .operations.activity_log_alerts_operations import ActivityLogAlertsOperations
from .operations.activity_logs_operations import ActivityLogsOperations
from .operations.event_categories_operations import EventCategoriesOperations
from .operations.tenant_activity_logs_operations import TenantActivityLogsOperations
from .operations.metric_definitions_operations import MetricDefinitionsOperations
from .operations.metrics_operations import MetricsOperations
from .operations.metric_baseline_operations import MetricBaselineOperations
from .operations.metric_alerts_operations import MetricAlertsOperations
from .operations.metric_alerts_status_operations import MetricAlertsStatusOperations
from .operations.scheduled_query_rules_operations import ScheduledQueryRulesOperations
from .operations.metric_namespaces_operations import MetricNamespacesOperations
from .operations.vm_insights_operations import VMInsightsOperations
from . import models
class MonitorManagementClientConfiguration(AzureConfiguration):
"""Configuration for MonitorManagementClient
Note that all parameters used to create this instance are saved as instance
attributes.
:param credentials: Credentials needed for the client to connect to Azure.
:type credentials: :mod:`A msrestazure Credentials
object<msrestazure.azure_active_directory>`
:param subscription_id: The Azure subscription Id.
:type subscription_id: str
:param str base_url: Service URL
"""
def __init__(
self, credentials, subscription_id, base_url=None):
if credentials is None:
raise ValueError("Parameter 'credentials' must not be None.")
if subscription_id is None:
raise ValueError("Parameter 'subscription_id' must not be None.")
if not base_url:
base_url = 'https://management.azure.com'
super(MonitorManagementClientConfiguration, self).__init__(base_url)
self.add_user_agent('azure-mgmt-monitor/{}'.format(VERSION))
self.add_user_agent('Azure-SDK-For-Python')
self.credentials = credentials
self.subscription_id = subscription_id
class MonitorManagementClient(SDKClient):
"""Monitor Management Client
:ivar config: Configuration for client.
:vartype config: MonitorManagementClientConfiguration
:ivar autoscale_settings: AutoscaleSettings operations
:vartype autoscale_settings: azure.mgmt.monitor.operations.AutoscaleSettingsOperations
:ivar operations: Operations operations
:vartype operations: azure.mgmt.monitor.operations.Operations
:ivar alert_rule_incidents: AlertRuleIncidents operations
:vartype alert_rule_incidents: azure.mgmt.monitor.operations.AlertRuleIncidentsOperations
:ivar alert_rules: AlertRules operations
:vartype alert_rules: azure.mgmt.monitor.operations.AlertRulesOperations
:ivar log_profiles: LogProfiles operations
:vartype log_profiles: azure.mgmt.monitor.operations.LogProfilesOperations
:ivar diagnostic_settings: DiagnosticSettings operations
:vartype diagnostic_settings: azure.mgmt.monitor.operations.DiagnosticSettingsOperations
:ivar diagnostic_settings_category: DiagnosticSettingsCategory operations
:vartype diagnostic_settings_category: azure.mgmt.monitor.operations.DiagnosticSettingsCategoryOperations
:ivar action_groups: ActionGroups operations
:vartype action_groups: azure.mgmt.monitor.operations.ActionGroupsOperations
:ivar activity_log_alerts: ActivityLogAlerts operations
:vartype activity_log_alerts: azure.mgmt.monitor.operations.ActivityLogAlertsOperations
:ivar activity_logs: ActivityLogs operations
:vartype activity_logs: azure.mgmt.monitor.operations.ActivityLogsOperations
:ivar event_categories: EventCategories operations
:vartype event_categories: azure.mgmt.monitor.operations.EventCategoriesOperations
:ivar tenant_activity_logs: TenantActivityLogs operations
:vartype tenant_activity_logs: azure.mgmt.monitor.operations.TenantActivityLogsOperations
:ivar metric_definitions: MetricDefinitions operations
:vartype metric_definitions: azure.mgmt.monitor.operations.MetricDefinitionsOperations
:ivar metrics: Metrics operations
:vartype metrics: azure.mgmt.monitor.operations.MetricsOperations
:ivar metric_baseline: MetricBaseline operations
:vartype metric_baseline: azure.mgmt.monitor.operations.MetricBaselineOperations
:ivar metric_alerts: MetricAlerts operations
:vartype metric_alerts: azure.mgmt.monitor.operations.MetricAlertsOperations
:ivar metric_alerts_status: MetricAlertsStatus operations
:vartype metric_alerts_status: azure.mgmt.monitor.operations.MetricAlertsStatusOperations
:ivar scheduled_query_rules: ScheduledQueryRules operations
:vartype scheduled_query_rules: azure.mgmt.monitor.operations.ScheduledQueryRulesOperations
:ivar metric_namespaces: MetricNamespaces operations
:vartype metric_namespaces: azure.mgmt.monitor.operations.MetricNamespacesOperations
:ivar vm_insights: VMInsights operations
:vartype vm_insights: azure.mgmt.monitor.operations.VMInsightsOperations
:param credentials: Credentials needed for the client to connect to Azure.
:type credentials: :mod:`A msrestazure Credentials
object<msrestazure.azure_active_directory>`
:param subscription_id: The Azure subscription Id.
:type subscription_id: str
:param str base_url: Service URL
"""
def __init__(
self, credentials, subscription_id, base_url=None):
self.config = MonitorManagementClientConfiguration(credentials, subscription_id, base_url)
super(MonitorManagementClient, self).__init__(self.config.credentials, self.config)
client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)}
self._serialize = Serializer(client_models)
self._deserialize = Deserializer(client_models)
self.autoscale_settings = AutoscaleSettingsOperations(
self._client, self.config, self._serialize, self._deserialize)
self.operations = Operations(
self._client, self.config, self._serialize, self._deserialize)
self.alert_rule_incidents = AlertRuleIncidentsOperations(
self._client, self.config, self._serialize, self._deserialize)
self.alert_rules = AlertRulesOperations(
self._client, self.config, self._serialize, self._deserialize)
self.log_profiles = LogProfilesOperations(
self._client, self.config, self._serialize, self._deserialize)
self.diagnostic_settings = DiagnosticSettingsOperations(
self._client, self.config, self._serialize, self._deserialize)
self.diagnostic_settings_category = DiagnosticSettingsCategoryOperations(
self._client, self.config, self._serialize, self._deserialize)
self.action_groups = ActionGroupsOperations(
self._client, self.config, self._serialize, self._deserialize)
self.activity_log_alerts = ActivityLogAlertsOperations(
self._client, self.config, self._serialize, self._deserialize)
self.activity_logs = ActivityLogsOperations(
self._client, self.config, self._serialize, self._deserialize)
self.event_categories = EventCategoriesOperations(
self._client, self.config, self._serialize, self._deserialize)
self.tenant_activity_logs = TenantActivityLogsOperations(
self._client, self.config, self._serialize, self._deserialize)
self.metric_definitions = MetricDefinitionsOperations(
self._client, self.config, self._serialize, self._deserialize)
self.metrics = MetricsOperations(
self._client, self.config, self._serialize, self._deserialize)
self.metric_baseline = MetricBaselineOperations(
self._client, self.config, self._serialize, self._deserialize)
self.metric_alerts = MetricAlertsOperations(
self._client, self.config, self._serialize, self._deserialize)
self.metric_alerts_status = MetricAlertsStatusOperations(
self._client, self.config, self._serialize, self._deserialize)
self.scheduled_query_rules = ScheduledQueryRulesOperations(
self._client, self.config, self._serialize, self._deserialize)
self.metric_namespaces = MetricNamespacesOperations(
self._client, self.config, self._serialize, self._deserialize)
self.vm_insights = VMInsightsOperations(
self._client, self.config, self._serialize, self._deserialize)
| StarcoderdataPython |
1985869 | <reponame>leVirve/ELD<gh_stars>0
import torch.nn as nn
from .Unet import UNetSeeInDark
def unet(in_channels, out_channels, **kwargs):
return UNetSeeInDark(in_channels, out_channels)
| StarcoderdataPython |
20348 | <reponame>h4ckfu/data<filename>bob-ross/cluster-paintings.py
"""
Clusters Bob Ross paintings by features.
By <NAME> <<EMAIL>>
See http://fivethirtyeight.com/features/a-statistical-analysis-of-the-work-of-bob-ross/
"""
import numpy as np
from scipy.cluster.vq import vq, kmeans, whiten
import math
import csv
def main():
# load data into vectors of 1s and 0s for each tag
with open('elements-by-episode.csv','r') as csvfile:
reader = csv.reader(csvfile)
reader.next() # skip header
data = []
for row in reader:
data.append(map(lambda x: int(x), row[2:])) # exclude EPISODE and TITLE columns
# convert to numpy matrix
matrix = np.array(data)
# remove colums that have been tagged less than 5 times
columns_to_remove = []
for col in range(np.shape(matrix)[1]):
if sum(matrix[:,col]) <= 5:
columns_to_remove.append(col)
matrix = np.delete(matrix, columns_to_remove, axis=1)
# normalize according to stddev
whitened = whiten(matrix)
output = kmeans(whitened, 10)
print "episode", "distance", "cluster"
# determine distance between each of 403 vectors and each centroid, find closest neighbor
for i, v in enumerate(whitened):
# distance between centroid 0 and feature vector
distance = math.sqrt(sum((v - output[0][0]) ** 2))
# group is the centroid it is closest to so far, set initally to centroid 0
group = 0
closest_match = (distance, group)
# test the vector i against the 10 centroids, find nearest neighbor
for x in range (0, 10):
dist_x = math.sqrt(sum((v - output[0][x]) ** 2))
if dist_x < closest_match[0]:
closest_match = (dist_x, x)
print i+1, closest_match[0], closest_match[1]
if __name__ == "__main__":
main() | StarcoderdataPython |
3519153 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import sorl.thumbnail.fields
import system.core.models
class Migration(migrations.Migration):
dependencies = [
]
operations = [
migrations.CreateModel(
name='About',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('body', models.TextField(help_text='About body', max_length=5120, verbose_name='Body')),
],
options={
'verbose_name': 'About',
'verbose_name_plural': 'Abouts',
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Address',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('street', models.CharField(help_text='Address street', max_length=32, verbose_name='Street')),
('number', models.CharField(help_text='Address number', max_length=8, verbose_name='Number')),
('complement', models.CharField(help_text='Address complement', max_length=64, verbose_name='Complement')),
('district', models.CharField(help_text='Address district', max_length=32, verbose_name='District')),
('zip_code', models.IntegerField(help_text='Address zip code', max_length=8, verbose_name='Zip code')),
('city', models.CharField(help_text='Address city', max_length=32, verbose_name='City')),
('state', models.CharField(help_text='Address state', max_length=2, verbose_name='State', choices=[(b'AC', b'AC'), (b'AL', b'AL'), (b'AP', b'AP'), (b'AP', b'AP'), (b'BA', b'BA'), (b'CE', b'CE'), (b'DF', b'DF'), (b'GO', b'GO'), (b'ES', b'ES'), (b'MA', b'MA'), (b'MT', b'MT'), (b'MS', b'MS'), (b'MG', b'MG'), (b'PA', b'PA'), (b'PB', b'PB'), (b'PR', b'PR'), (b'PE', b'PE'), (b'PI', b'PI'), (b'RJ', b'RJ'), (b'RN', b'RN'), (b'RS', b'RS'), (b'RO', b'RO'), (b'RR', b'RR'), (b'SP', b'SP'), (b'SC', b'SC'), (b'SE', b'SE'), (b'TO', b'TO')])),
],
options={
'verbose_name': 'Address',
'verbose_name_plural': 'Addresses',
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Author',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(help_text='Author name', max_length=64, verbose_name='Name')),
('about', models.TextField(help_text='Author about', max_length=256, verbose_name='About', blank=True)),
('email', models.EmailField(help_text='Author email', max_length=32, verbose_name='Email', blank=True)),
('phone', models.CharField(help_text='Author phone', max_length=32, verbose_name='Phone', blank=True)),
('photo', sorl.thumbnail.fields.ImageField(blank=True, help_text='Author photo', max_length=256, upload_to=b'authors', validators=[system.core.models.validate_photo])),
],
options={
'verbose_name': 'Author',
'verbose_name_plural': 'Authors',
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Contact',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('type', models.CharField(help_text='Contact type', max_length=16, verbose_name='Type', choices=[(b'email', b'E-mail'), (b'phone', b'Phone number'), (b'skype', b'Skype id')])),
('value', models.CharField(help_text='Contact value', max_length=32, verbose_name='Value')),
],
options={
'verbose_name': 'Contact',
'verbose_name_plural': 'Contacts',
},
bases=(models.Model,),
),
migrations.CreateModel(
name='CurricularPractice',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(help_text='Curricular practice name', max_length=32, verbose_name='Name')),
],
options={
'verbose_name': 'Curricular practice',
'verbose_name_plural': 'Curricular practices',
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Discipline',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(help_text='Discipline name', unique=True, max_length=32, verbose_name='Discipline')),
],
options={
'verbose_name': 'Discipline',
'verbose_name_plural': 'Disciplines',
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Editorial',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(help_text='Editorial name', unique=True, max_length=32, verbose_name='Editorial')),
],
options={
'verbose_name': 'Editorial',
'verbose_name_plural': 'Editorials',
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Event',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('type', models.CharField(max_length=16, choices=[(b'notice', b'Notice'), (b'photogallery', b'Photogallery'), (b'video library', b'Video Library'), (b'podcast', b'Podcast')])),
('views', models.IntegerField(default=0, max_length=32)),
('comments', models.IntegerField(default=0, max_length=32)),
('likes', models.IntegerField(default=0, max_length=32)),
('active', models.BooleanField(default=True, help_text='Is this active?', verbose_name='Active')),
('featured', models.BooleanField(default=True, help_text='Is this in featured session?', verbose_name='Featured')),
('date', models.DateField(help_text='Date', verbose_name='Date')),
('date_modified', models.DateTimeField(auto_now=True, verbose_name='Last modified')),
('title', models.CharField(help_text='Title', max_length=64, verbose_name='Title')),
('subtitle', models.CharField(help_text='Subtitle', max_length=128, verbose_name='Subtitle', blank=True)),
('body', models.TextField(help_text='Body', max_length=10240, verbose_name='Body')),
],
options={
'verbose_name': 'Event',
'verbose_name_plural': 'Events',
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Member',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(help_text='Member name', max_length=64, verbose_name='Name')),
('about', models.TextField(help_text='Member about', max_length=256, verbose_name='About', blank=True)),
('email', models.EmailField(help_text='Member email', max_length=32, verbose_name='Email', blank=True)),
('phone', models.CharField(help_text='Member phone', max_length=32, verbose_name='Phone', blank=True)),
('photo', sorl.thumbnail.fields.ImageField(help_text='Member photo', max_length=256, upload_to=b'members', validators=[system.core.models.validate_photo])),
],
options={
'verbose_name': 'Member',
'verbose_name_plural': 'Members',
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Notice',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('type', models.CharField(max_length=16, choices=[(b'notice', b'Notice'), (b'photogallery', b'Photogallery'), (b'video library', b'Video Library'), (b'podcast', b'Podcast')])),
('views', models.IntegerField(default=0, max_length=32)),
('comments', models.IntegerField(default=0, max_length=32)),
('likes', models.IntegerField(default=0, max_length=32)),
('active', models.BooleanField(default=True, help_text='Is this active?', verbose_name='Active')),
('featured', models.BooleanField(default=True, help_text='Is this in featured session?', verbose_name='Featured')),
('date', models.DateField(help_text='Date', verbose_name='Date')),
('date_modified', models.DateTimeField(auto_now=True, verbose_name='Last modified')),
('title', models.CharField(help_text='Title', max_length=64, verbose_name='Title')),
('subtitle', models.CharField(help_text='Subtitle', max_length=128, verbose_name='Subtitle', blank=True)),
('body', models.TextField(help_text='Body', max_length=10240, verbose_name='Body')),
('photo', sorl.thumbnail.fields.ImageField(help_text='Notice photo', upload_to=b'news', max_length=256, verbose_name='Photo', validators=[system.core.models.validate_photo])),
('author', models.ForeignKey(verbose_name='Author', to='core.Author', help_text='Author')),
('curricular_practice', models.ForeignKey(blank=True, to='core.CurricularPractice', help_text='Notice curricular practice', null=True, verbose_name='Curricular practice')),
('discipline', models.ForeignKey(blank=True, to='core.Discipline', help_text='Notice discipline', null=True, verbose_name='Discipline')),
('editorial', models.ForeignKey(verbose_name='Editorial', to='core.Editorial', help_text='Notice editorial')),
],
options={
'verbose_name': 'Notice',
'verbose_name_plural': 'Notices',
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Photo',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('title', models.CharField(help_text='Photo title', max_length=128, verbose_name='Title')),
('photo', sorl.thumbnail.fields.ImageField(help_text='Photo', max_length=256, upload_to=b'photogallery', validators=[system.core.models.validate_photo])),
],
options={
'verbose_name': 'Photo',
'verbose_name_plural': 'Photos',
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Photogallery',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('type', models.CharField(max_length=16, choices=[(b'notice', b'Notice'), (b'photogallery', b'Photogallery'), (b'video library', b'Video Library'), (b'podcast', b'Podcast')])),
('views', models.IntegerField(default=0, max_length=32)),
('comments', models.IntegerField(default=0, max_length=32)),
('likes', models.IntegerField(default=0, max_length=32)),
('active', models.BooleanField(default=True, help_text='Is this active?', verbose_name='Active')),
('featured', models.BooleanField(default=True, help_text='Is this in featured session?', verbose_name='Featured')),
('date', models.DateField(help_text='Date', verbose_name='Date')),
('date_modified', models.DateTimeField(auto_now=True, verbose_name='Last modified')),
('title', models.CharField(help_text='Title', max_length=64, verbose_name='Title')),
('subtitle', models.CharField(help_text='Subtitle', max_length=128, verbose_name='Subtitle', blank=True)),
('body', models.TextField(help_text='Body', max_length=10240, verbose_name='Body')),
('curricular_practice', models.ForeignKey(blank=True, to='core.CurricularPractice', help_text='Photogallery curricular practice', null=True, verbose_name='Curricular practice')),
('discipline', models.ForeignKey(blank=True, to='core.Discipline', help_text='Photogallery discipline', null=True, verbose_name='Discipline')),
('editorial', models.ForeignKey(verbose_name='Editorial', to='core.Editorial', help_text='Photogallery editorial')),
],
options={
'verbose_name': 'Photogallery',
'verbose_name_plural': 'Photogalleries',
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Photographer',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(help_text='Photographer name', max_length=64, verbose_name='Name')),
('about', models.TextField(help_text='Photographer about', max_length=256, verbose_name='About', blank=True)),
('email', models.EmailField(help_text='Photographer email', max_length=32, verbose_name='Email', blank=True)),
('phone', models.CharField(help_text='Photographer phone', max_length=32, verbose_name='Phone', blank=True)),
('photo', sorl.thumbnail.fields.ImageField(blank=True, help_text='Photographer photo', max_length=256, upload_to=b'photographers', validators=[system.core.models.validate_photo])),
],
options={
'verbose_name': 'Photographer',
'verbose_name_plural': 'Photographers',
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Podcast',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('type', models.CharField(max_length=16, choices=[(b'notice', b'Notice'), (b'photogallery', b'Photogallery'), (b'video library', b'Video Library'), (b'podcast', b'Podcast')])),
('views', models.IntegerField(default=0, max_length=32)),
('comments', models.IntegerField(default=0, max_length=32)),
('likes', models.IntegerField(default=0, max_length=32)),
('active', models.BooleanField(default=True, help_text='Is this active?', verbose_name='Active')),
('featured', models.BooleanField(default=True, help_text='Is this in featured session?', verbose_name='Featured')),
('date', models.DateField(help_text='Date', verbose_name='Date')),
('date_modified', models.DateTimeField(auto_now=True, verbose_name='Last modified')),
('title', models.CharField(help_text='Title', max_length=64, verbose_name='Title')),
('subtitle', models.CharField(help_text='Subtitle', max_length=128, verbose_name='Subtitle', blank=True)),
('body', models.TextField(help_text='Body', max_length=10240, verbose_name='Body')),
('download_url', models.URLField(help_text='Podcast download url', max_length=128, verbose_name='Download URL', blank=True)),
],
options={
'verbose_name': 'Podcast',
'verbose_name_plural': 'Podcasts',
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Role',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(help_text='Role name', max_length=64, verbose_name='Role')),
],
options={
'verbose_name': 'Role',
'verbose_name_plural': 'Roles',
},
bases=(models.Model,),
),
migrations.CreateModel(
name='SocialNetwork',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('type', models.CharField(help_text='Social network type', max_length=16, verbose_name='Type', choices=[(b'facebook', b'Facebook'), (b'twitter', b'Twitter'), (b'instagram', b'Instagram'), (b'googleplus', b'Google +')])),
('url', models.CharField(help_text='Social network url', max_length=32, verbose_name='URL')),
],
options={
'verbose_name': 'Social network',
'verbose_name_plural': 'Social networks',
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Video',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('youtube', models.CharField(help_text='Ex: umMIcZODm2k of http://www.youtube.com/embed/umMIcZODm2k', max_length=32, verbose_name='Youtube code', blank=True)),
('vimeo', models.CharField(help_text='Ex: 85228844 of http://player.vimeo.com/video/85228844', max_length=32, verbose_name='Vimeo code', blank=True)),
],
options={
'verbose_name': 'Video',
'verbose_name_plural': 'Videos',
},
bases=(models.Model,),
),
migrations.CreateModel(
name='VideoLibrary',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('type', models.CharField(max_length=16, choices=[(b'notice', b'Notice'), (b'photogallery', b'Photogallery'), (b'video library', b'Video Library'), (b'podcast', b'Podcast')])),
('views', models.IntegerField(default=0, max_length=32)),
('comments', models.IntegerField(default=0, max_length=32)),
('likes', models.IntegerField(default=0, max_length=32)),
('active', models.BooleanField(default=True, help_text='Is this active?', verbose_name='Active')),
('featured', models.BooleanField(default=True, help_text='Is this in featured session?', verbose_name='Featured')),
('date', models.DateField(help_text='Date', verbose_name='Date')),
('date_modified', models.DateTimeField(auto_now=True, verbose_name='Last modified')),
('title', models.CharField(help_text='Title', max_length=64, verbose_name='Title')),
('subtitle', models.CharField(help_text='Subtitle', max_length=128, verbose_name='Subtitle', blank=True)),
('body', models.TextField(help_text='Body', max_length=10240, verbose_name='Body')),
('curricular_practice', models.ForeignKey(blank=True, to='core.CurricularPractice', help_text='Video library curricular practice', null=True, verbose_name='Curricular practice')),
('discipline', models.ForeignKey(blank=True, to='core.Discipline', help_text='Video library discipline', null=True, verbose_name='Discipline')),
('editorial', models.ForeignKey(verbose_name='Editorial', to='core.Editorial', help_text='Video library editorial')),
],
options={
'verbose_name': 'Video library',
'verbose_name_plural': 'Video libraries',
},
bases=(models.Model,),
),
migrations.AddField(
model_name='video',
name='video_library',
field=models.ForeignKey(related_name=b'videos', to='core.VideoLibrary'),
preserve_default=True,
),
migrations.AddField(
model_name='photo',
name='photogallery',
field=models.ForeignKey(related_name=b'photos', to='core.Photogallery'),
preserve_default=True,
),
migrations.AddField(
model_name='photo',
name='photographer',
field=models.ForeignKey(related_name=b'photo_photographer', verbose_name='Photographer', to='core.Photographer', help_text='Notice photographer'),
preserve_default=True,
),
migrations.AddField(
model_name='notice',
name='photographer',
field=models.ForeignKey(verbose_name='Photographer', to='core.Photographer', help_text='Notice photographer'),
preserve_default=True,
),
migrations.AddField(
model_name='member',
name='role',
field=models.ForeignKey(verbose_name='Role', to='core.Role', help_text='Member role'),
preserve_default=True,
),
migrations.AddField(
model_name='event',
name='notices',
field=models.ManyToManyField(help_text='Event notices', to='core.Notice', verbose_name='Notice', blank=True),
preserve_default=True,
),
migrations.AddField(
model_name='event',
name='photogalleries',
field=models.ManyToManyField(help_text='Event photogalleries', to='core.Photogallery', verbose_name='Photogalleries', blank=True),
preserve_default=True,
),
migrations.AddField(
model_name='event',
name='podcasts',
field=models.ManyToManyField(help_text='Event podcasts', to='core.Podcast', verbose_name='Podcasts', blank=True),
preserve_default=True,
),
migrations.AddField(
model_name='event',
name='video_libraries',
field=models.ManyToManyField(help_text='Event video libraries', to='core.VideoLibrary', verbose_name='Video libraries', blank=True),
preserve_default=True,
),
migrations.AddField(
model_name='curricularpractice',
name='discipline',
field=models.ForeignKey(related_name=b'curricular_practices', verbose_name='Discipline', to='core.Discipline', help_text='Curricular practice discipline'),
preserve_default=True,
),
migrations.AlterUniqueTogether(
name='curricularpractice',
unique_together=set([('name', 'discipline')]),
),
]
| StarcoderdataPython |
1816403 | <filename>utils.py
from random import uniform, randint
from abc import abstractmethod, ABC
from time import time, perf_counter
import matplotlib.pyplot as plt
from seed_random import IsolatedBernoulliArm
from permutation import IsolatedPermutation
class Timer:
"""Timer class allows to time a arbitrary blocks of code by using "with" python statement.
This object allows to time several not nested blocks of code as follows:
timer = Timer()
with timer:
sleep(20)
print("Execution time (s): {}".format(time.execution_time_in_seconds()))
Warning: Current implementation of does not support nested blocks timing.
In a such way, the timer will be reset each time the time is reused in a with statement.
"""
def __init__(self):
self.__total_execution_time : float = 0
self.__running : bool = False
self.__start : float = 0
def __enter__(self):
self.__has_start = True
self.__start = perf_counter()
def __exit__(self, exc_type, exc_val, exc_tb):
end = perf_counter()
self.__total_execution_time += end - self.__start
self.__running = False
self.__start = 0
def execution_time_in_seconds(self) -> float:
"""Returns elapsed time in seconds.
Returns:
The elapsed time between the beginning and the end of the "with" block.
"""
return self.__total_execution_time
def randint_if_none( value ):
if value is None:
return randint( 10, 1000 )
else:
return value
def parse_bench_log( filename : str ):
result = {}
with open(filename) as file:
lines = "".join(file.readlines()).replace("\n", "").split("#")
for line in lines:
if line == "": continue
node, cpu_usage_time = line.split(":")
result[node] = float(cpu_usage_time)
return result
class BernoulliArm:
"""Bandit arm with a bernoulli bandit arm.
"""
def __init__(self, p):
"""Bernoulli arm initialization.
This arm returns 1 value with a given probability p and 0 with a probability 1 - p.
:param p: Probability to obtains an 1
"""
self.p = p
def pull(self):
"""Pulled arm in order to obtain a value in bernoulli distribution.
Returns 1 with a probability p and 0 with a probability 1 - p.
"""
return int(uniform(0, 1) < self.p)
class BanditsAlgorithm:
@abstractmethod
def play( self, budget ) -> int: pass
class DebugBanditsAlgorithm(BanditsAlgorithm):
@abstractmethod
def pulled_arm_at_each_turn(self) -> [BernoulliArm] : pass
@abstractmethod
def rewards_at_each_turn(self) -> [int]: pass
@abstractmethod
def get_probabilities(self) -> [float]: pass
class StandardBanditsAlgorithm(BanditsAlgorithm, ABC):
def __init__(self, arms_probabilities: [float], reward_seed = 123):
self.K = len(arms_probabilities)
self.arms = [ IsolatedBernoulliArm( p, reward_seed ) for p in arms_probabilities ]
def get_arm_by_index(self, arm_index) -> IsolatedBernoulliArm: return self.arms[arm_index]
def debug_algorithm( budget : int, algorithms : [DebugBanditsAlgorithm] ):
if not isinstance(algorithms, list):
algorithms = [algorithms]
for algorithm in algorithms:
print("Debugging ", type(algorithm))
start = time()
algorithm.play( budget )
end = time()
rewards_at_each_turn = algorithm.rewards_at_each_turn()
arm_at_each_turn = algorithm.pulled_arm_at_each_turn()
assert len(arm_at_each_turn) == budget, "Pulled arm at each turn has not the same length that budget: {} instead of {}".format(len(arm_at_each_turn), budget)
assert len(rewards_at_each_turn) == budget, "Rewards at each turn has not the same length that budget: {} instread of {}".format(len(rewards_at_each_turn), budget)
# Computing regret at each turn.
# Starting by searching the best arm's probability.
probs = algorithm.get_probabilities()
prob_max = max(probs)
optimal_rewards = int(budget * prob_max)
total_regret = optimal_rewards - sum(rewards_at_each_turn)
regret_at_each_turn = []
for i in range( budget ):
regret_at_each_turn.append( i * prob_max - sum(rewards_at_each_turn[:(i+1)]) )
#plt.plot(regret_at_each_turn, label=type(algorithm).__name__)
best_arm_pulling_percentage_at_each_turn = []
best_arm_pulling_number = 0
for i in range(budget):
pulled_arm_at_turn_i = arm_at_each_turn[i]
if pulled_arm_at_turn_i.p == prob_max:
best_arm_pulling_number += 1
best_arm_pulling_percentage_at_each_turn.append(best_arm_pulling_number / (i + 1))
plt.plot( best_arm_pulling_percentage_at_each_turn, label=type(algorithm).__name__ )
plt.legend()
plt.show()
def permute_and_max( l, perm_seed : int, turn : int, key = lambda x: x ):
permutation = IsolatedPermutation.new(len(l), perm_seed, turn)
permuted_l = permutation.permute(l)
max_index = 0
max_value = key(permuted_l[0])
for i in range(1, len(permuted_l)):
vi = key(permuted_l[i])
if vi > max_value:
max_index = i
max_value = vi
return permuted_l[max_index]
def read_arms_from_file( filename ):
with open(filename) as file:
lines = file.readlines()[1:]
arms_probs = []
for line in lines:
arms_probs.append(float(line))
return arms_probs | StarcoderdataPython |
11225186 | import dash
import dash_core_components as dcc
import dash_html_components as html
from dash.dependencies import Input, Output, State
import pandas as pd
import dash_table
import json
import numpy as np
# from utils.plot_geojson import dart_plot
external_stylesheets = ['https://codepen.io/chriddyp/pen/bWLwgP.css']
# app = dash.Dash(__name__, external_stylesheets=external_stylesheets)
# df_players = pd.read_csv("players_New.csv")
# app.layout = html.Div([
# dash_table.DataTable(
# id='List_Of_Existing_Players',
# columns=[{
# 'name': df_players.columns[i],
# 'id': df_players.columns[i],
# # 'renamable': True
# } for i in range(0,4)],
# data = df_players.to_dict('records'),
# page_action='none',
# style_table={'height': '300px', 'overflowY': 'auto'},
# style_cell={'textAlign': 'center'},
# #style_table={
# # 'maxHeight': '50ex',
# # 'width': '100%',
# # 'minWidth': '100%',
# # "horizontalAlign": "bottom"
# # },
# style_header={
# 'backgroundColor': 'rgb(230, 230, 230)',
# 'fontWeight': 'bold'
# },
# editable = False,
# selected_rows=[],
# row_selectable="multi",
# # filter_action="native", # allow filtering of data by user ('native') or not ('none')
# ),
# ### Add a player to the above table or launch the game!
# dcc.Input(
# id = 'new-player',
# placeholder = '<NAME>',
# type = 'text',
# value = ''
# ),
# html.Button('Add Player', id='editing-rows-button', n_clicks=0),
# html.Button('Lancer la partie', id='Start_Game', n_clicks=0)
# ])
# ### Callback to add a player to the list of players. It is called upon when you double click on 'add player', checks that the players name doesn't already exists, then creates a file for that player and adds him to the genreal available player file. Output brings up to data the list of player table.
# @app.callback(
# Output('List_Of_Existing_Players', 'data'),
# Input('editing-rows-button', 'n_clicks'),
# State('List_Of_Existing_Players', 'data'),
# State('List_Of_Existing_Players', 'columns'),
# State('new-player','value')
# )
# def add_row(n_clicks, rows, columns, New_Player_Name):
# Name_Exists = 0
# if n_clicks > 1 :
# n_clicks = 0 # need to double click
# for i in range (0, len(rows)):
# if New_Player_Name == rows[i]['name']:
# Name_Exists = 1
# if Name_Exists == 0:
# rows.append({'name': New_Player_Name, '# Partie': 0, '% Victoire': None, 'Touche / Tour': None})
# pd.DataFrame(rows).to_csv("ressources/players_New.csv",index = False)
# Player_file = pd.DataFrame(None,columns=['Tour','Fleche','Valeur', 'Coef', 'Degats','Touche'])
# Player_file.to_csv('ressources/Player_Info/{}.csv'.format(New_Player_Name),index = False)
# return rows
def create_ap(app, room_number):
# app = dash.Dash(__name__,
# external_stylesheets=external_stylesheets,
# url_base_pathname=url_base)
df_players = pd.read_csv("ressources/players_New.csv")
layout = html.Div([
dash_table.DataTable(
id=f'List_Of_Existing_Players_{room_number}',
columns=[{
'name': df_players.columns[i],
'id': df_players.columns[i],
# 'renamable': True
} for i in range(0,4)],
data = df_players.to_dict('records'),
page_action='none',
style_table={'height': '300px', 'overflowY': 'auto'},
style_cell={'textAlign': 'center'},
#style_table={
# 'maxHeight': '50ex',
# 'width': '100%',
# 'minWidth': '100%',
# "horizontalAlign": "bottom"
# },
style_header={
'backgroundColor': 'rgb(230, 230, 230)',
'fontWeight': 'bold'
},
editable = False,
selected_rows=[],
row_selectable="multi",
# filter_action="native", # allow filtering of data by user ('native') or not ('none')
),
### Add a player to the above table or launch the game!
dcc.Input(
id = f'new-player_{room_number}',
placeholder = '<NAME>',
type = 'text',
value = ''
),
html.Button('Add Player', id=f'editing-rows-button_{room_number}', n_clicks=0),
html.Button("start", id=f'Start_Game_{room_number}', n_clicks=0)
])
### Callback to add a player to the list of players. It is called upon when you double click on 'add player', checks that the players name doesn't already exists, then creates a file for that player and adds him to the genreal available player file. Output brings up to data the list of player table.
@app.callback(
Output(f'List_Of_Existing_Players_{room_number}', 'data'),
Input(f'editing-rows-button_{room_number}', 'n_clicks'),
State(f'List_Of_Existing_Players_{room_number}', 'data'),
State(f'List_Of_Existing_Players_{room_number}', 'columns'),
State(f'new-player_{room_number}','value')
)
def add_row(n_clicks, rows, columns, New_Player_Name):
Name_Exists = 0
if n_clicks > 1 :
n_clicks = 0 # need to double click
for i in range(0, len(rows)):
if New_Player_Name == rows[i]['name']:
Name_Exists = 1
if Name_Exists == 0:
rows.append({'name': New_Player_Name, '# Partie': 0, '% Victoire': None, 'Touche / Tour': None})
pd.DataFrame(rows).to_csv("ressources/players_New.csv",index = False)
Player_file = pd.DataFrame(None, columns=['Tour','Fleche','Valeur', 'Coef', 'Degats','Touche'])
Player_file.to_csv('ressources/Player_Info/{}.csv'.format(New_Player_Name), index = False)
return rows
return app, layout
import argparse
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument("--port", type=int,
help="port number")
parser.add_argument("--url_base",
help="base")
parser.add_argument("-v", "--verbose", action="store_true",
help="to print information")
parser.add_argument("--param")
args = parser.parse_args()
app = create_ap(args.param, args.url_base)
app.run_server(debug=False, port = args.port) | StarcoderdataPython |
4812396 | <reponame>vartagg/rempycs
from rempycs import *
| StarcoderdataPython |
1751040 | <reponame>JoanAzpeitia/lp_sg
# Copyright (c) 2013 Shotgun Software Inc.
#
# CONFIDENTIAL AND PROPRIETARY
#
# This work is provided "AS IS" and subject to the Shotgun Pipeline Toolkit
# Source Code License included in this distribution package. See LICENSE.
# By accessing, using, copying or modifying this work you indicate your
# agreement to the Shotgun Pipeline Toolkit Source Code License. All rights
# not expressly granted therein are reserved by Shotgun Software Inc.
import os
import sys
from tank.platform.qt import QtCore, QtGui
class ThumbnailLabel(QtGui.QLabel):
def __init__(self, parent=None):
QtGui.QLabel.__init__(self, parent)
def setPixmap(self, pixmap):
# scale the pixmap down to fit
if pixmap.height() > 40 or pixmap.width() > 60:
# scale it down to 120x80
pixmap = pixmap.scaled( QtCore.QSize(60,40), QtCore.Qt.KeepAspectRatio, QtCore.Qt.SmoothTransformation)
# now slap it on top of a 120x80 transparent canvas
rendered_pixmap = QtGui.QPixmap(60, 40)
rendered_pixmap.fill(QtCore.Qt.transparent)
w_offset = (60 - pixmap.width()) / 2
h_offset = (40 - pixmap.height()) / 2
painter = QtGui.QPainter(rendered_pixmap)
painter.drawPixmap(w_offset, h_offset, pixmap)
painter.end()
# and finally assign it
QtGui.QLabel.setPixmap(self, rendered_pixmap)
| StarcoderdataPython |
1730697 | #!/usr/bin/env python3
"""
Author : <NAME> <<EMAIL>>
Date : 2021-10-18
Purpose: Translates IUPAC codes
"""
import argparse
import sys
# --------------------------------------------------
def get_args():
"""Get command-line arguments"""
parser = argparse.ArgumentParser(
description='Rock the Casbah',
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('SEQ',
metavar='str',
help='Sequences to translate',
nargs='+',
type=str)
parser.add_argument('-o',
'--outfile',
help='specificy output file',
metavar='FILE',
type=argparse.FileType('wt'),
default=sys.stdout)
return parser.parse_args()
# --------------------------------------------------
def main():
"""Make a jazz noise here"""
args = get_args()
codes = {
"R": "AG",
"Y": "CT",
"S": "GC",
"W": "AT",
"K": "GT",
"M": "AC",
"B": "CGT",
"D": "AGT",
"H": "ACT",
"V": "ACG",
"N": "ACGT"
}
for sequences in args.SEQ:
edited = ""
for letter in sequences:
if letter in codes:
edited = edited + "[" + codes.get(letter) + "]"
else:
edited = edited + letter
print(sequences, edited, file=args.outfile)
if args.outfile is not sys.stdout:
print("Done, see output in \"" + str(args.outfile.name) + "\"")
# --------------------------------------------------
if __name__ == '__main__':
main()
| StarcoderdataPython |
4839175 | def get_min_max(ints):
"""
Return a tuple(min, max) out of list of unsorted integers.
Args:
ints(list): list of integers containing one or more integers
"""
# Handle non-list input
if not isinstance(ints, list):
return None, None
# Define variables for min and max value and initialize to None
min_value = None
max_value = None
for index, value in enumerate(ints):
if index == 0:
min_value = value
max_value = value
if value < min_value:
min_value = value
elif value > max_value:
max_value = value
return min_value, max_value
# Example Test Case of Ten Integers
import random
# Test case 1: random int array
l = [i for i in range(0, 10)] # a list containing 0 - 9
print(f"Test case 1 - random list of int: {l}")
random.shuffle(l)
# Should print "Pass" as the result should be (0, 9)
print ("Pass" if ((0, 9) == get_min_max(l)) else "Fail")
# Test case 2: empty array
print(f"Test case 2 - empty array")
# Should print "Pass" as the result should be (None, None)
print ("Pass" if ((None, None) == get_min_max([])) else "Fail")
# Test case 3: array with single item
print(f"Test case 3 - array with single item")
# Should print "Pass" as the result should be (None, None)
print ("Pass" if ((1, 1) == get_min_max([1])) else "Fail")
# Test case 4: non array input
print(f"Test case 4 - non array input")
# Should print "Pass" as the result should be (None, None)
print ("Pass" if ((None, None) == get_min_max(10)) else "Fail")
| StarcoderdataPython |
229659 | <reponame>WilliamHoltam/Financial-Derivatives-Coursework
"""
Created on Wed Feb 21 10:37:33 2018
@author: <NAME>
"""
import numpy as np
import pandas as pd
import pylab as plt
from scipy.stats import norm, probplot
from matplotlib.ticker import FuncFormatter
headers = ['Date', 'Open', 'High', 'Low', 'Close', 'Adj Close', 'Volume']
dtypes = {'Date': 'str', 'Open': 'float', 'High': 'float', 'Low': 'float', 'Close': 'float', 'Adj Close': 'float', 'Volume': 'int'}
parse_dates = ['Date']
df = pd.read_csv('DNL.L.csv',
delimiter=',',
header=0,
index_col=None,
dtype=dtypes,
parse_dates=parse_dates)
print(df.info())
adj_close = df.loc[:,"Adj Close"]
adj_close = adj_close.values.tolist()
daily_returns=[0]
for i in np.arange(1,len(adj_close)-1):
returns = (adj_close[i]-adj_close[i-1])/adj_close[i-1]
daily_returns.append(returns)
plt.hist(adj_close, bins=30)
plt.show()
mu, std = norm.fit(daily_returns)
print(norm.fit(daily_returns))
print(mu)
fig, axes = plt.subplots(ncols=1, sharey=True)
fig = plt.hist(daily_returns, bins=100, density=True)
axes.xaxis.set_major_formatter(FuncFormatter(lambda x, _: '{:.0%}'.format(x)))
xmin,xmax = plt.xlim()
plt.xlim(xmin,xmax)
x = np.linspace(xmin,xmax,100)
p = norm.pdf(x,mu,std)
plt.plot(x,p,'k',linewidth=2)
title = "Fit results: mu = %.5f, std = %.3f" % (mu, std)
plt.title(title)
plt.show()
probplot(daily_returns, plot=plt)
plt.xlim(-4,4)
plt.ylim(-0.3,0.15)
plt.show()
five_day_returns = [0]
for i in np.arange(4,len(adj_close)-1,5):
returns = (adj_close[i]-adj_close[i-1]) / adj_close[i-1]
five_day_returns.append(returns)
fig, axes = plt.subplots(ncols=1, sharey=True)
fig = plt.hist(five_day_returns, bins=100, density=True)
axes.xaxis.set_major_formatter(FuncFormatter(lambda x, _: '{:.0%}'.format(x)))
xmin,xmax = plt.xlim()
plt.xlim(xmin,xmax)
x = np.linspace(xmin,xmax,100)
p = norm.pdf(x,mu,std)
plt.plot(x,p,'k',linewidth=2)
title = "Fit results: mu = %.5f, std = %.3f" % (mu, std)
plt.title(title)
plt.show()
probplot(five_day_returns, plot=plt)
plt.xlim(-4,4)
plt.ylim(-0.3,0.15)
plt.show()
ten_day_returns = [0]
for i in np.arange(9,len(adj_close)-1,10):
returns = (adj_close[i]-adj_close[i-1]) / adj_close[i-1]
ten_day_returns.append(returns)
fig, axes = plt.subplots(ncols=1, sharey=True)
fig = plt.hist(ten_day_returns, bins=100, density=True)
axes.xaxis.set_major_formatter(FuncFormatter(lambda x, _: '{:.0%}'.format(x)))
xmin,xmax = plt.xlim()
plt.xlim(xmin,xmax)
x = np.linspace(xmin,xmax,100)
p = norm.pdf(x,mu,std)
plt.plot(x,p,'k',linewidth=2)
title = "Fit results: mu = %.5f, std = %.3f" % (mu, std)
plt.title(title)
plt.show()
probplot(ten_day_returns, plot=plt)
plt.xlim(-4,4)
plt.ylim(-0.3,0.15)
plt.show()
k = 0
number_of_days = [1,4,9]
increment = [1,5,10]
increment_label = ["Daily Returns", "Five Day Returns", "Ten Day Returns"]
list_label = ["daily_returns", "five_day_returns", "ten_day_returns"]
for j in list_label:
j = [0]
for i in np.arange(number_of_days[k],len(adj_close)-1,increment[k]):
returns = (adj_close[i]-adj_close[i-1]) / adj_close[i-1]
j.append(returns)
mu, std = norm.fit(j)
fig, axes = plt.subplots(ncols=1, sharey=True)
fig = plt.hist(j, bins=100, density=True)
axes.xaxis.set_major_formatter(FuncFormatter(lambda x, _: '{:.0%}'.format(x)))
xmin,xmax = plt.xlim()
plt.xlim(xmin,xmax)
x = np.linspace(xmin,xmax,100)
p = norm.pdf(x,mu,std)
plt.plot(x,p,'k',linewidth=2) # This isn't correct but it's a start
title = "Fit results: mu = %.5f, std = %.3f" % (mu, std)
plt.title(title)
plt.show()
probplot(j, plot=plt)
plt.title("Probability Plot of " + increment_label[k])
plt.xlim(-4,4)
plt.ylim(-0.3,0.15)
plt.show()
k += 1
| StarcoderdataPython |
40953 | <reponame>loghmanb/daily-coding-problem<filename>google_gas_station.py
'''
Gas Station
Asked in: Bloomberg, Google, DE Shaw, Amazon, Flipkart
Given two integer arrays A and B of size N.
There are N gas stations along a circular route, where the amount of gas at station i is A[i].
You have a car with an unlimited gas tank and it costs B[i] of gas to travel from station i
to its next station (i+1). You begin the journey with an empty tank at one of the gas stations.
Return the minimum starting gas station’s index if you can travel around the circuit once, otherwise return -1.
You can only travel in one direction. i to i+1, i+2, … n-1, 0, 1, 2.. Completing the circuit means starting at i and
ending up at i again.
Input Format
The first argument given is the integer array A.
The second argument given is the integer array B.
Output Format
Return the minimum starting gas station's index if you can travel around the circuit once, otherwise return -1.
For Example
Input 1:
A = [1, 2]
B = [2, 1]
Output 1:
1
Explanation 1:
If you start from index 0, you can fill in A[0] = 1 amount of gas. Now your tank has 1 unit of gas. But you need B[0] = 2 gas to travel to station 1.
If you start from index 1, you can fill in A[1] = 2 amount of gas. Now your tank has 2 units of gas. You need B[1] = 1 gas to get to station 0. So, you travel to station 0 and still have 1 unit of gas left over. You fill in A[0] = 1 unit of additional gas, making your current gas = 2. It costs you B[0] = 2 to get to station 1, which you do and complete the circuit.
Solution by interviewbit.com
'''
# @param A : tuple of integers
# @param B : tuple of integers
# @return an integer
def canCompleteCircuit(gas, cost):
sumo=0
fuel=0
start=0
for i in range(len(gas)):
sumo = sumo + (gas[i] - cost[i])
fuel = fuel + (gas[i] - cost[i])
if fuel<0:
fuel=0
start=i+1
if sumo>=0:
return (start%len(gas))
else:
return -1
if __name__ == "__main__":
data = [
] | StarcoderdataPython |
4859726 | class Sort():
@staticmethod
def bubble_sort(arr):
arr=list(arr)
if len(arr)<=1:
return arr
for i in range(1,len(arr)):
for j in range(len(arr)-i):
if arr[j] > arr[j+1]:
arr[j],arr[j+1]=arr[j+1],arr[j]
return arr
@staticmethod
def quick_sort(arr):
arr=list(arr)
if len(arr)<=1:
return arr
arr_l = []
arr_r = []
arr_m = []
key = arr[0]
for i in arr:
if i<key:
arr_l.append(i)
elif i>key:
arr_r.append(i)
else:
arr_m.append(i)
arr_l = Sort.quick_sort(arr_l)
arr_r = Sort.quick_sort(arr_r)
return arr_l + arr_m + arr_r
| StarcoderdataPython |
1878282 | <reponame>pg-irc/pathways-backend
from drf_yasg2 import openapi, views
from rest_framework import permissions
def build_schema_view():
info = openapi.Info(title='Pathways HSDA',
default_version='v1',
description='PeaceGeeks implementation of OpenReferral Human Services HSDA',
#terms_of_service='https://www.google.com/policies/terms/',
contact=openapi.Contact(email='<EMAIL>'),
license=openapi.License(name='MIT License'),
)
return views.get_schema_view(info,
#validators=['flex', 'ssv'],
public=True,
permission_classes=(permissions.AllowAny,),
)
| StarcoderdataPython |
4965113 | from __future__ import absolute_import
from __future__ import print_function
import numpy as np
from scipy.stats import sigmaclip
from astropy.io import fits
import os
from . import focasifu as fi
def MkBiasTemplate(filename, nsigma=4.0, rawdatadir='', overwrite=False,
outputdir='.'):
path = os.path.join(rawdatadir, filename)
hdulist = fits.open(path)
binfac1 = hdulist[0].header['BIN-FCT1'] # X direction on DS9
binfac2 = hdulist[0].header['BIN-FCT2'] # Y direction on DS9
detid = hdulist[0].header['DET-ID']
scidata = hdulist[0].data
hdulist.close()
average1d = np.zeros(scidata.shape[1])
for i in range(len(average1d)):
clipped, low, upp = sigmaclip(scidata[:,i], low=nsigma, high=nsigma)
average1d[i] = np.mean(clipped)
outfilename = os.path.join(outputdir, 'bias_template'+str(binfac1)+str(detid)+'.fits')
if os.path.isfile(outfilename) and not overwrite:
print(('File exists. '+outfilename))
return
hdu = fits.PrimaryHDU(data=average1d)
hdulist = fits.HDUList([hdu])
hdulist = fi.put_version(hdulist)
hdulist.writeto(outfilename, overwrite=overwrite)
print(('Bias template file was created. '+outfilename))
return
def MkTwoBiasTemplate(filename, rawdatadir='', overwrite=False,
outputdir='.'):
MkBiasTemplate(filename, rawdatadir=rawdatadir, overwrite=overwrite,
outputdir=outputdir)
path = os.path.join(rawdatadir, filename)
basename = fits.getval(path, 'FRAMEID')
filename2 = str('FCSA%08d.fits'%(int(basename[4:])+1))
MkBiasTemplate(filename2, rawdatadir=rawdatadir, overwrite=overwrite,
outputdir=outputdir)
return
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='This is the script for making bias template files..')
parser.add_argument('filename',help='Bias FITS file')
parser.add_argument('-o', help='Overwrite flag', dest='overwrite',
action='store_true', default=False)
parser.add_argument('-d', help='Raw data directory', \
dest='rawdatadir', action='store', default='')
args = parser.parse_args()
MkTwoBiasTemplate(args.filename, rawdatadir=args.rawdatadir, \
overwrite=args.overwrite)
| StarcoderdataPython |
6555603 | <gh_stars>0
# 一个节点的数据类型,包含左子孩子节点指针 右孩子节点指针 和值
class Node(object):
def __init__(self, item):
self.left = None # 指向左子节点
self.right = None # 指向右子节点
self.item = item # 保存值
# 树的类
class Tree(object):
def __init__(self):
self.root = None # 保存树根所在位置
# 添加节点方法,按照层次由低到高,优先靠左的思想添加
def add(self, item):
node = Node(item) # 首先创建一个节点
# 如果树还没有树根
if self.root is None:
self.root = node
else:
# 这里需要用到广度优先遍历的思想来找第一个可以添加节点的位置
# 开一个队列用于广度优先搜索 先把树根放进去
queue = [self.root]
# 循环操作:
# 出队一个节点,如果它没有左海子,为它添加左孩子 退出 否则 左孩子入队列
# 如果他没有右孩子,为它添加右孩子 退出 否则 右孩子如队列
# 如果队列里面有元素我们就一直操作。队列空了就退出来(这个只是保险条件,一般队列还没空就找到空位创建节点然后退出了)
while queue:
# 取出队节点
temp = queue.pop(0)
# 如果没有左孩子 我们 添加左孩子后退出
if temp.left is None:
temp.left = node
return
# 如果有左孩子 我们把左孩子入队列
else:
queue.append(temp.left)
# 如果没有右孩子 我们添加右孩子 然后退出
if temp.right is None:
temp.right = node
return
# 如果有右孩子 我们把右孩子入队列
else:
queue.append(temp.right)
# 广度优先遍历
def breadth_travel(self):
# 开启一个队列 把树根放进去
queue = [self.root]
# 循环操作:从对头取出节点,把值输出后 把他们的左孩子右孩子添加到队列里,一直到队列空了,说明遍历结束
# 只要队列不是空的 我们就一直遍历
while queue:
# 从队列头取出一个元素
temp = queue.pop(0)
# 输出节点的值
print(temp.item, end=" ")
# 如果节点有左孩子 就把左孩子追加到队列
if temp.left is not None:
queue.append(temp.left)
# 如果节点有右孩子 就把右孩子追加到队列
if temp.right is not None:
queue.append(temp.right)
# 最后来一个换行
print()
# 先序遍历 按照 根 左 右 进行遍历
# 把当前子树的树根传进去做参数
def preOder(self, node):
# 如果传进来的十个None,说明上一个节点 没有左孩子或者右孩子 传进来一个None 那就不遍历这个节点
if not node:
return
# 先把根的值输出来
print(node.item, end=" ")
# 然后对左孩子进行遍历
self.preOder(node.left)
# 然后对右孩子遍历
self.preOder(node.right)
# 中序遍历 按照 左 根 右 的顺序进行遍历
# 传入当前要遍历的子树的根
def inOrder(self, node):
# 当传入的子树是None 说明上一个节点没有这个子树 传进来了None 此时不用遍历它了
if not node:
return None
# 先对左子树进行遍历
self.inOrder(node.left)
# 再输出自己的数值
print(node.item, end=" ")
# 最后对右子树进行遍历
self.inOrder(node.right)
# 后序遍历 按照 左 右 根 的顺序进行遍历
# 把当前子树的树根传进去做参数
def postOrder(self, node):
# 如果传进来一个None 说明上一个节点没有这可子树,这时候不用遍历
if not node:
return
# 先对左子树进行遍历
self.postOrder(node.left)
# 再对右子树进行遍历
self.postOrder(node.right)
# 最后输出自己的值
print(node.item, end=" ")
# 我们再封装一下,在外部调用自己的三个深度优先遍历可以不传入自己的根
def preOrder_travel(self):
self.preOder(self.root)
def inOrder_travel(self):
self.inOrder(self.root)
def postOrder_travel(self):
self.postOrder(self.root)
if __name__ == '__main__':
tree = Tree()
tree.add(1)
tree.add(2)
tree.add(3)
tree.add(4)
tree.add(5)
tree.add(6)
tree.add(7)
tree.breadth_travel() # 1 2 3 4 5 6 7
tree.preOrder_travel() # 1 2 4 5 3 6 7
print() # 回车换行
tree.inOrder_travel() # 4 2 5 1 6 3 7
print() # 回车换行
tree.postOrder_travel() # 4 5 2 6 7 3 1
| StarcoderdataPython |
210180 | vl=input().split()
A=int(vl[0])
B=int(vl[1])
if A==B:
print("Nao sao Multiplos")
elif A%B==0 or B%A==0:
print("Sao Multiplos")
else:
print("Nao sao Multiplos")
| StarcoderdataPython |
9699896 | <gh_stars>1-10
from __future__ import absolute_import
from django import forms
from .exceptions import DocumentAlreadyCheckedOut
from .models import DocumentCheckout
from .widgets import SplitTimeDeltaField
class DocumentCheckoutForm(forms.ModelForm):
expiration_datetime = SplitTimeDeltaField()
class Meta:
model = DocumentCheckout
exclude = ('checkout_datetime', 'user_content_type', 'user_object_id')
widgets = {
'document': forms.widgets.HiddenInput(),
}
def clean_document(self):
document = self.cleaned_data['document']
if document.is_checked_out():
raise DocumentAlreadyCheckedOut
return document
| StarcoderdataPython |
4921149 | #!/usr/bin/env python3
import sys
import socketserver
import logging
import json
from lib.MyTCPHandler import MyTCPHandler
# Load the server and TCP Handler configuration from file
def load_server_handler_config(config_file):
logging.debug("Opening socketserver config: " + config_file)
with open(config_file, 'r') as f:
logging.debug("Reading socketserver config")
config = json.load(f)
HOST = config['host']
PORT = config['port']
logging.info("Loaded socketserver config")
socketserver.TCPServer.allow_reuse_address = True
logging.info("Attempting to listen on {host} tcp port {port}"
.format(host=HOST, port=PORT))
return socketserver.TCPServer((HOST, PORT), MyTCPHandler(config_file))
# Start the socket server
def run_updater_server(config_file="config.json"):
# Load socketserver config
server = load_server_handler_config(config_file)
logging.info("Now serving connections (abort with crtl-c).")
# Run until the program is forcefully killed
try:
# Host a TCP-server on host at a specified port and handle connections
server.serve_forever()
except KeyboardInterrupt as ki:
logging.info("Exiting due to keyboard interrupt.")
sys.exit(0)
| StarcoderdataPython |
4823606 | """
Copyright 2010 <NAME>
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import threading
import logging
import traceback
import sqlalchemy
from sqlalchemy import create_engine, Table, Column, Integer, String, MetaData, ForeignKey, DateTime #, UniqueConstraint
from sqlalchemy.orm import mapper, relation, sessionmaker, scoped_session, backref #, eagerload
_sessionmaker = None # should be initialized by bootstrap
_threadlocal = threading.local()
logger = logging.getLogger("persistence.transactional")
def init(sessionmaker):
global _sessionmaker
_sessionmaker = sessionmaker
def transactional(f):
def do(*args, **kwargs):
def callback(session):
return f(*args, **kwargs)
return SessionTemplate(_sessionmaker).do_with_session(callback)
return do
class SessionTemplate(object):
""" Simple helper class akin to Spring-JDBC/Hibernate/ORM Template.
It doesnt't commit nor releases resources if other do_with_session() calls are pending
See http://www.sqlalchemy.org/trac/ticket/1084#comment:3 for suggestions on how to improve this
without using a custom threadlocal variable
"""
def __init__(self, sessionmaker):
assert sessionmaker is not None
self._sessionmaker = sessionmaker
def do_with_session(self, session_callback):
try:
session = begin_scope(self._sessionmaker)
result = session_callback(session)
except Exception as e1:
_mark_for_rollback(self._sessionmaker)
raise
finally:
end_scope(self._sessionmaker)
return result
class BoundSession(object):
def __init__(self, session, count=0):
assert count >= 0
self.session = session
self.count = count
self.should_commit = True
self.should_renew = False
def increment(self):
self.count=self.count+1
def decrement(self):
self.count=self.count -1
def mark_for_rollback(self):
self.should_commit = False
def mark_for_renewal(self):
self.should_renew = True
def begin_scope(session_maker):
bound_session = _threadlocal.current_session if _session_exists() else BoundSession(session_maker())
bound_session.increment()
_threadlocal.current_session = bound_session
if _threadlocal.current_session.should_renew:
_threadlocal.current_session.session = session_maker()
return bound_session.session
def end_scope(session_maker, force_rollback=False):
if _current_count() == 1 : # top level, we either commit or rollback
try:
if _should_commit() and (not force_rollback):
_session().commit()
else:
_rollback(session_maker)
finally:
_cleanup(session_maker)
else:
if not _should_commit() or force_rollback:
_rollback_and_mark_for_renewal(session_maker)
_threadlocal.current_session.decrement()
def _rollback_and_mark_for_renewal(session_maker):
_rollback(session_maker)
_threadlocal.current_session.mark_for_renewal()
def _rollback(session_maker):
#if not _session_exists():
# return
try:
conn = _session().connection().invalidate()
except sqlalchemy.exc.InvalidRequestError:
# ignore the following exception that happens on windows...
# InvalidRequestError("The transaction is inactive
# due to a rollback in a subtransaction and should be closed")
#
pass
except Exception:
pass
_session().rollback()
def _cleanup(session_maker):
try:
_session().close()
session_maker.remove()
finally:
del _threadlocal.current_session
def _session():
return _threadlocal.current_session.session if _session_exists() else None
def _current_count():
return _threadlocal.current_session.count if _session_exists() else 0
def _should_commit():
return _threadlocal.current_session.should_commit
def _session_exists():
return hasattr(_threadlocal, 'current_session')
def _mark_for_rollback(session_maker):
_threadlocal.current_session.mark_for_rollback()
| StarcoderdataPython |
6409960 | """
pluginName = TLShort
Senario Short Timelapse Project
-------------------------------
This setup will save images in number sequence in case date/time is not maintained
due to a reboot and no internet NTP server is available. It will Not create subfolders.
Depending on the full duration of the timelapse sequence it is advised saving files to
an attached hard drive or USB memory stick. Due to the short nature no subfolders
will be created.
Edit the settings below to suit your project needs.
if config.py variable pluginEnable=True and pluginName=TLshort
then these settings will override the config.py settings.
"""
# Customize settings below to suit your project needs
# ---------------------------------------------------
IMAGE_NAME_PREFIX = 'short-' # Default= 'cam1-' for all image file names. Eg garage-
IMAGE_WIDTH = 1920 # Default= 1024 Full Size Image Width in px
IMAGE_HEIGHT = 1080 # Default= 768 Full Size Image Height in px
IMAGE_FORMAT = ".jpg" # Default= ".jpg" image Formats .jpeg .png .gif .bmp
IMAGE_JPG_QUAL = 95 # Default= 95 jpg Encoder Quality Values 1(low)-100(high min compression) 0=85
TIMELAPSE_ON = True # Default= False True=Turn timelapse On, False=Off
TIMELAPSE_PREFIX = "tl-" # Default= "tl-" Prefix for All timelapse images with this prefix
TIMELAPSE_TIMER_SEC = 10 # Default= 120 (2 min) Seconds between timelapse images.
TIMELAPSE_DIR = "media/shortl" # Default= "media/timelapse" Storage Folder Path for Time Lapse Image Storage
TIMELAPSE_RECENT_DIR = "media/recent/shortl" # Default= "media/recent/timelapse" location of timelapseRecent files
TIMELAPSE_RECENT_MAX = 100 # Default= 0 off or specify number of most recent files to save in TIMELAPSE_RECENT_DIR
TIMELAPSE_NUM_ON = True # Default= True filenames Sequenced by Number False=filenames by date/time
TIMELAPSE_NUM_RECYCLE_ON = True # Default= True Restart Numbering at NumStart False= Surpress Timelapse at NumMax
TIMELAPSE_NUM_START = 10000 # Default= 1000 Start of timelapse number sequence
TIMELAPSE_NUM_MAX = 0 # Default= 2000 Max number of timelapse images desired. 0=Continuous
TIMELAPSE_EXIT_SEC = 0 # Default= 0 seconds Surpress Timelapse after specified Seconds 0=Continuous
TIMELAPSE_MAX_FILES = 0 # Default= 0 off or specify MaxFiles to maintain then oldest are deleted Default=0 (off)
TIMELAPSE_SUBDIR_MAX_FILES = 5000 # Default= 0 off or specify MaxFiles - Creates New dated sub-folder if MaxFiles exceeded
TIMELAPSE_SUBDIR_MAX_HOURS = 0 # Default= 0 off or specify MaxHours - Creates New dated sub-folder if MaxHours exceeded
TIMELAPSE_PANTILT_ON = False # True= Move pantilt to next TIMELAPSE_PANTILT_STOPS position for
# each timelapse triggered. Set PANTILT_ON = True below.
# Turn off other features
MOTION_TRACK_ON = False # Default= True True=Turns Motion Detect On, False=Off
MOTION_TRACK_QUICK_PIC_ON = False # Default= False True= Grab stream frame rather than stopping stream to take full size image
MOTION_VIDEO_ON = False # Default= False True=Take a video clip rather than image
MOTION_TRACK_MINI_TL_ON = False # Default= False True=Take a quick time lapse sequence rather than a single image (overrides MOTION_VIDEO_ON)
VIDEO_REPEAT_ON = False # Turn on Video Repeat Mode IMPORTANT Overrides timelapse and motion
PANTILT_ON = False # True= Enable Pan Tilt Hat hardware, False= Disable for TIMELAPSE_PANTILT_ON and PANO_ON
| StarcoderdataPython |
8079498 | <gh_stars>10-100
"""
Script that pulls prices and rates of specified currencies using forex python.
The data is then formatted and published via redis.
It would be cumbersome to query and reformat the query result with every api request,
especially since the requests are rarely dependant on external inputs.
this way the preformatted data can be accessed easily and quickly by redis every request.
"""
import redis, json, time, datetime, config
from forex_python.converter import CurrencyRates
from forex_python.converter import CurrencyCodes
from forex_python.bitcoin import BtcConverter
r = redis.from_url(config.REDIS_URL)
tic = 30.0
latest_currencies = {
'currencies': []
}
chart_data = {
'labels': [],
'datasets': []
}
"""
Hard coded list of colours instead to ensure colour diversity.
# Generate a unique colour based on unique currency code.
# Get the ASCII code values for the char's A-Y are 65-90.
def rgbChar(c):
return str(int((((ord(c)-65)/25)*255)))
"""
time.sleep(60 - datetime.datetime.now().second)
starttime = time.time()
def pullData():
t = '{:%H:%M:%S}'.format(datetime.datetime.now() + datetime.timedelta(hours=1))
#t = time.strftime("%H:%M:%S")
print("Starting at number: " + str(datetime.datetime.utcnow()))
# Using forex to get latest data: https://media.readthedocs.org/pdf/forex-python/latest/forex-python.pdf
c = CurrencyRates()
b = BtcConverter()
rates = c.get_rates(config.LOCAL_CURR_CODE)
pop = False
# Adapted from: https://stackoverflow.com/questions/30071886/how-to-get-current-time-in-python-and-break-up-into-year-month-day-hour-minu
chart_data['labels'].append(t)
# If 20 dates are already currently in the list - pop.
if len(chart_data['labels']) >= 20:
chart_data['labels'].pop(0)
pop = True
# Loop through array of datasets to append or append and pop.
if chart_data['datasets']:
for i, code in enumerate(config.CURR_CODES):
if code == 'BTC':
price = round(b.get_latest_price(config.LOCAL_CURR_CODE),2)
rate = round(b.convert_to_btc(1, config.LOCAL_CURR_CODE),5)
else:
price = round(c.get_rate(code, config.LOCAL_CURR_CODE),2)
rate = round(rates[chart_data['datasets'][i]['label']],5)
chart_data['datasets'][i]['data'].append(price)
latest_currencies['currencies'][i]['data'] = rate
if pop:
chart_data['datasets'][i]['data'].pop(0)
else:
co = CurrencyCodes()
# Prepare data objects and pull first prices.
for i, code in enumerate(config.CURR_CODES):
if code == 'BTC':
symbol = b.get_symbol()
name = 'Bitcoin'
price = round(b.get_latest_price(config.LOCAL_CURR_CODE),2)
rate = round(b.convert_to_btc(1, config.LOCAL_CURR_CODE),5)
else:
name = co.get_currency_name(code)
symbol = co.get_symbol(code)
price = round(c.get_rate(code, config.LOCAL_CURR_CODE),2)
rate = round(rates[code], 5)
chart_data['datasets'].append({
'label': code,
'backgroundColor': config.CURR_COLORS[i],
'data': [price]
})
latest_currencies['currencies'].append({
'code': code,
'name': name,
'symbol': symbol,
'data': rate
})
r.set(config.REDIS_CHAN_LIST, latest_currencies)
r.set(config.REDIS_CHAN_GRAPH, chart_data)
print("Finishing at number: " + str(datetime.datetime.utcnow()))
while True:
pullData()
# Adapted from: https://stackoverflow.com/questions/474528/what-is-the-best-way-to-repeatedly-execute-a-function-every-x-seconds-in-python/38317060
time.sleep(tic - ((time.time() - starttime) % tic)) | StarcoderdataPython |
11384864 | <reponame>chen940303/Diaosier_home
#-*-coding:utf-8-*-
from flask import render_template,request,jsonify
from . import main
@main.app_errorhandler(404)
def page_not_found(e):
if request.accept_mimetypes.accept_json and not request.accept_mimetypes.accept_html:
response=jsonify({'error':'not found'})
response.status_code=404
return response #改成适合api用的
return render_template('404.html'),404
@main.app_errorhandler(500)
def internal_server_error(e):
return render_template('500.html'),500
| StarcoderdataPython |
5059442 | from pathlib import Path
from collage.utils import extract_attrs, get_input_shape
import json
import pickle
from os import path
import logging
# @sunggg: [TODO] Need to check hash conflict
# configuration includes operator name, operator type (backend operators from different targets might have the same type),
# data shape of all free variables, and node attributes
class Config(object):
# We have data_shape and attrs as arguments for debugging purpose
def __init__(self, op_name, pattern, expr, data_shape=None, attrs=None):
self._op_name = op_name
self._pattern = pattern
if expr != None:
self._data_shape = get_input_shape(expr)
self._attrs = extract_attrs(expr)
else:
# Debugging purpose
self._data_shape = data_shape
self._attrs = attrs
def __hash__(self):
return hash((self._op_name, self._pattern, self._data_shape, self._attrs))
def __eq__(self, other):
# print(f"Check equality, {type(self._op_name)}, {type(self._pattern)}, {type(self._data_shape)}, {type(self._attrs)}")
return (self._op_name == other._op_name and self._pattern == other._pattern
and self._data_shape == other._data_shape and self._attrs == other._attrs)
def __repr__(self):
return "op_name: {0}, pattern: {1}, data_shape: {2}, attrs: {3}".format(
self._op_name, self._pattern, self._data_shape, self._attrs)
def __str__(self):
return "pattern: {0}, data_shape: {1}, attrs: {2}, op_name: {3}".format(
self._pattern, self._data_shape, self._attrs, self._op_name)
# @sunggg: Do we need this per backend?
# class to save costs of already evaluated configurations so we do not need to reevaluate them
class OpCostLogger(object):
def __init__(self, log_path = None, dump_readable = False):
# maps configurations already measured to the measured cost (in ms)
self.measured_configs = dict()
self.log_path = "operator_cost.log" if log_path is None else log_path
self.log_path_readable = "readable_" + self.log_path + ".json"
self.dump_readable = dump_readable
def get_cost(self, config):
if config in self.measured_configs:
return self.measured_configs[config]
return None
# cost is (mean(cost), std(cost))
def save_cost(self, config, cost):
self.measured_configs[config] = cost
def save_to_log(self):
with open(self.log_path, 'wb+') as log:
pickle.dump(self.measured_configs, log)
if self.dump_readable:
str_configs = dict()
for key, perf in self.measured_configs.items():
str_configs[str(key)] = perf
with open(self.log_path_readable, 'w+') as log:
json.dump(str_configs, log, sort_keys=True, indent=4)
# If log doesn't exist, it uses default empty dictionary.
def load_from_log(self):
if path.exists(self.log_path):
with open(self.log_path, 'rb') as log:
logging.info(">> Start with previous op cost log")
self.measured_configs = pickle.load(log)
else:
logging.info(">> Start from scratch") | StarcoderdataPython |
5192569 | <gh_stars>1-10
# -*- coding: utf-8 -*-
import uuid
import requests
import hashlib
import time
class Translator:
YOUDAO_URL = 'https://openapi.youdao.com/api'
APP_KEY = ''
APP_SECRET = ''
def encrypt(self, signStr):
hash_algorithm = hashlib.sha256()
hash_algorithm.update(signStr.encode('utf-8'))
return hash_algorithm.hexdigest()
def truncate(self, q):
if q is None:
return None
size = len(q)
return q if size <= 20 else q[0:10] + str(size) + q[size - 10:size]
def do_request(self, data):
headers = {'Content-Type': 'application/x-www-form-urlencoded'}
return requests.post(self.YOUDAO_URL, data=data, headers=headers)
def translate(self, text, f='auto', t='auto'):
q = text
data = {}
data['from'] = f
data['to'] = t
data['signType'] = 'v3'
curtime = str(int(time.time()))
data['curtime'] = curtime
salt = str(uuid.uuid1())
signStr = self.APP_KEY + self.truncate(q) + salt + curtime + self.APP_SECRET
sign = self.encrypt(signStr)
data['appKey'] = self.APP_KEY
data['q'] = q
data['salt'] = salt
data['sign'] = sign
response = self.do_request(data).json()
if response['errorCode'] == '0':
return response['translation'][0]
else:
return response['errorCode']
| StarcoderdataPython |
6502112 | import numpy as np
import time
def compute_roc_points(labels, scores, fprs, use_sklearn=True):
tpr_k_score = []
th_k_score = []
sp_tpr = 0
print(labels.shape)
print(scores.shape)
if use_sklearn:
from sklearn.metrics import roc_curve
roc_fpr, roc_tpr, roc_thresholds = roc_curve(labels, scores, pos_label=1, drop_intermediate=False)
sp_idx = np.argmin(np.abs(roc_tpr+roc_fpr-1))
sp_tpr = roc_tpr[sp_idx]
for fpr_ratio in fprs:
idx = np.argmin(np.abs(roc_fpr - fpr_ratio))
tpr = roc_tpr[idx]
th = roc_thresholds[idx]
tpr_k_score.append(tpr)
th_k_score.append(th)
return tpr_k_score, th_k_score, sp_tpr
sorted_idx = np.argsort(scores)
sorted_scores = scores[sorted_idx]
sorted_labels = labels[sorted_idx]
cum_pos = np.cumsum(sorted_labels, dtype=float)
t4 = time.time()
total_pos = cum_pos[-1]
n = labels.size
fn = cum_pos - sorted_labels
tp = total_pos - fn
fp = np.arange(n,0,-1) - tp
t5 = time.time()
tpr = tp/total_pos
fpr = fp/(n-total_pos)
sp_idx = np.argmin(np.abs(tpr+fpr-1))
for fp in fprs:
idx = np.argmin(np.abs(fpr-fp))
tpr_k_score.append(tpr[idx])
th_k_score.append(sorted_scores[idx])
# print("%6f %7f %6f" % (tpr[idx], fpr[idx], sorted_scores[idx]))
# print("%6f"%tpr[sp_idx])
return tpr_k_score, th_k_score, tpr[sp_idx]
def compute_roc_part(worker_id, feat1, feat2, meta1, meta2, delta, thres, tp, fp, total_pos_neg):
scores = feat1.dot(feat2.T)
labels = (meta1.reshape(-1,1) == meta2.reshape(1,-1)).astype(np.int)
if delta != -1:
indices = np.triu_indices(delta, k=1)
scores = scores[indices]
labels = labels[indices]
else:
scores = scores.reshape(-1)
labels = labels.reshape(-1)
sorted_idx = np.argsort(scores)
sorted_scores = scores[sorted_idx]
sorted_labels = labels[sorted_idx]
cum_pos = np.cumsum(sorted_labels, dtype=float)
total_pos = cum_pos[-1]
n = labels.size
fn = cum_pos - sorted_labels
tp_tmp = total_pos - fn
fp_tmp = np.arange(n, 0, -1) - tp_tmp
import bisect
c_tp = [0]*len(thres)
c_fp = [0]*len(thres)
start = 0
for i, th in enumerate(thres):
#'Find rightmost value less than or equal to x'
pos = bisect.bisect_right(sorted_scores, th, start)
if pos != len(sorted_scores):
c_tp[i] = tp_tmp[pos]
c_fp[i] = fp_tmp[pos]
start = pos
else:
c_tp[i] = total_pos
c_fp[i] = 0
total_pos_neg[worker_id] = np.array([total_pos, n - total_pos])
tp[worker_id] = c_tp
fp[worker_id] = c_fp
| StarcoderdataPython |
12858957 | import cocotb
from cocotb.clock import Clock
from cocotb.triggers import ClockCycles, RisingEdge, FallingEdge, NextTimeStep, ReadWrite
N = 16
test_input = list(range(N))
async def writer(dut):
for i in test_input:
busy_check = lambda : not dut.ready_for_input.value
while busy_check():
await ClockCycles(dut.clk, 1)
dut.input_valid <= 1
dut.data_in <= i
await ClockCycles(dut.clk, 1)
dut.input_valid <= 0
await ClockCycles(dut.clk, 1)
# FIXME add more unit tests here
async def reader(dut):
dut.ready_for_output <=1
data_out = []
while (len(data_out) < N):
await RisingEdge(dut.clk)
await ReadWrite()
if dut.output_valid.value:
data_out.append(int(dut.data_out.value))
print(int(dut.data_out.value))
# Introduce random read delay to show that the fifo will respect
# ready for output signals
if (len(data_out) % (N//6)) == 0:
dut.ready_for_output <= 0
await ClockCycles(dut.clk, 100)
dut.ready_for_output <= 1
return data_out
@cocotb.test()
async def test_fifo(dut):
clk = dut.clk
cocotb.fork(Clock(clk, 10, units="ns").start())
# Reset Started
await NextTimeStep()
dut.reset <= 1
await ClockCycles(clk, 1)
dut.reset <= 0
await ClockCycles(clk, 1)
# Reset Done
writer_process = cocotb.fork(writer(dut))
fifo_readback = await reader(dut)
assert(test_input == fifo_readback)
| StarcoderdataPython |
6662895 | <reponame>xWasp97x/Greenhouse<filename>greenhouse/Dashboard/observer_pattern.py
class Observer:
def update(self, payload):
raise NotImplementedError
class Observable:
def __init__(self):
self.observers = set()
def add_observer(self, observer: Observer):
self.observers.add(observer)
def remove_observer(self, observer: Observer):
self.observers.remove(observer)
@staticmethod
def notify_observer(observer: Observer, payload):
observer.update(payload)
def notify_observers(self, payload):
[self.notify_observer(observer, payload) for observer in self.observers]
| StarcoderdataPython |
1896499 | AUTO_UPDATE_TIME = 20
SERVER_INVITE = "https://discord.gg/xP2UPUn"
BOT_INVITE = "https://discord.com/oauth2/authorize?client_id=669978762120790045&permissions=0&scope=bot"
GITHUB_LINK = "https://github.com/pseudocoder10/Lockout-Bot"
ADMIN_PRIVILEGE_ROLES = ['Admin', 'Moderator', 'Lockout Manager']
OWNERS = [515920333623263252]
BACKUP_DIR = "./data/backup/"
| StarcoderdataPython |
6580564 | # -*- coding: utf-8 -*-
from setuptools import setup, find_packages
import IReadiTunes
setup(
name='IReadiTunes',
version=IReadiTunes.__version__,
packages=find_packages(),
author="Mickael",
author_email="<EMAIL>",
description="Tool to get any information about iTunes tracks and playlists quickly and easily",
long_description_content_type = "text/markdown",
long_description=open('README.md').read(),
url='https://github.com/mickael2054/IReadiTunes',
install_requires=[],
classifiers=[
"Programming Language :: Python :: 3.5",
"Operating System :: OS Independent",
'Topic :: Utilities',
],
license="MIT",
) | StarcoderdataPython |
1802178 | <reponame>sophy7074/FALCON
#import falcon_kit.mains.run as mod
'''
def test_help():
try:
mod.main(['prog', '--help'])
except SystemExit:
pass
'''
| StarcoderdataPython |
294385 | <filename>PYex/hexGame/hex.py
## <NAME> - franr.com.ar/hex |
## ------------------------------------/
# import os
import random
from threading import Thread
import pygame
# constantes
RUN = True
LONG = 20
AMARILLO = (255, 231, 0)
AMARILLO_C = (255, 255, 50)
AZUL = (0, 127, 245)
AZUL_C = (50, 177, 255)
BLANCO = (255,255,255)
NEGRO = (0,0,0)
color_jugador_claro = AZUL_C
jugador = AZUL
def cambiar_jugador():
global jugador
global color_jugador_claro
if jugador == AZUL:
jugador = AMARILLO
color_jugador_claro = AMARILLO_C
else:
jugador = AZUL
color_jugador_claro = AZUL_C
class Fuente:
def __init__(self):
pygame.font.init()
self.fuente = pygame.font.Font("cubicfive10.ttf", 20)
def render(self, texto):
return self.fuente.render(texto, False, NEGRO)
class Hexagono:
def __init__(self, pantalla, x, y, id, azul_p, azul_f, amarillo_p, amarillo_f):
self.pantalla = pantalla
self.d = LONG
self.color = BLANCO
self.marcada = False
self.id = id
self.azul_p = azul_p
self.azul_f = azul_f
self.amarillo_p = amarillo_p
self.amarillo_f = amarillo_f
# coordenadas del centro
self.x = x
self.y = y
self.rect = pygame.Rect(self.x - self.d/2 - 4, self.y - self.d, self.d + 8, self.d*2)
def dibujar(self):
pl = [(self.x - self.d, self.y),
(self.x - self.d/2, self.y - self.d),
(self.x + self.d/2, self.y - self.d),
(self.x + self.d, self.y),
(self.x + self.d/2, self.y + self.d),
(self.x - self.d/2, self.y + self.d)]
pygame.draw.polygon(self.pantalla, self.color, pl)
pygame.draw.polygon(self.pantalla, (100,100,100), pl, 3)
# pygame.draw.rect(self.pantalla, NEGRO, self.rect)
def update(self, x, y, p):
c = self.rect.collidepoint(x, y)
if c:
if p and self.color == color_jugador_claro:
self.marcar()
cambiar_jugador()
return 1
return 2
return 0
def marcar(self):
self.color = jugador
self.marcada = True
def enfocar(self):
if not self.marcada:
self.color = color_jugador_claro
def desenfocar(self):
if not self.marcada:
self.color = BLANCO
class Tablero:
def __init__(self, pantalla):
self.pantalla = pantalla
self.iniciar()
def iniciar(self):
self.hexas = {}
self.foco = None
self.id = 0
dx = LONG
dy = LONG*11
# tablero
for i in range(11):
for e in range(11):
x = dx + LONG*(e + i)*1.5
y = dy + LONG*(i - e)
self.id += 1
azp, azf, amp, amf = self.borde(self.id)
self.hexas[self.id] = Hexagono(self.pantalla, x, y, self.id, azp, azf, amp, amf)
def borde(self, id):
# esquina <
if id == 1:
return True, False, True, False
# esquina ^
elif id == 11:
return False, True, True, False
# esquina V
elif id == 111:
return True, False, True, False
# esquina >
elif id == 121:
return False, True, False, True
# borde <V azul_p
elif id % 11 == 1:
return True, False, False, False
# borde <^ amarillo_p
elif id > 1 and id < 11:
return False, False, True, False
# borde ^> azul_f
elif (id % 11) == 0:
return False, True, False, False
# borde ^> amarillo_f
elif (id - 110) > 1 and (id - 110) < 11:
return False, False, False, True
# medio
else:
return False, False, False, False
def dibujar(self):
pygame.draw.rect(self.pantalla, AMARILLO, (0, 0, LONG*11*1.5, LONG*11))
pygame.draw.rect(self.pantalla, AZUL, (LONG*11*1.5, 0, LONG*11*1.5*2, LONG*11))
pygame.draw.rect(self.pantalla, AZUL, (0, LONG*11, LONG*11*1.5, LONG*11))
pygame.draw.rect(self.pantalla, AMARILLO, (LONG*11*1.5, LONG*11, LONG*11*1.5, LONG*11))
x, y = pygame.mouse.get_pos()
click = pygame.event.wait().type == pygame.MOUSEBUTTONDOWN
gano = None
for h in self.hexas.values():
r = h.update(x, y, click)
if r:
# marco
if r == 1:
self.foco = None
gano = self.resolver(h.id)
# enfoco
elif r == 2:
if self.foco and self.foco != h:
self.foco.desenfocar()
self.foco = h
if self.foco:
self.foco.enfocar()
h.dibujar()
return gano
def resolver(self, id):
vistos = []
color = self.hexas[id].color
cadena = [h for h in self.alrededor(id, color, vistos)]
if self.principio(cadena, color) and self.fin(cadena, color):
return color
return None
def alrededor(self, id, color, vistos):
# devuelve los ids de los hexagonos del mismo color alrededor de uno
if self.borde(id)[0] == True:
pos = 0, -10, -11, 1, 11
elif self.borde(id)[1] == True:
pos = 0, -11, -1, 11, 10
else:
pos = 0, -10, -11, 1, -1, 11, 10
alr = [self.hexas[id+i].id for i in pos if (self.hexas.has_key(id+i) and (id+i not in vistos))]
cadena = [self.hexas[h].id for h in alr if (self.hexas[h].color == color)]
vistos.extend(cadena)
for i in cadena:
self.alrededor(i, color, vistos)
return vistos
def principio(self, cadena, color):
if color == AZUL:
for c in cadena:
if self.hexas[c].azul_p:
return True
else:
for c in cadena:
if self.hexas[c].amarillo_p:
return True
return False
def fin(self, cadena, color):
if color == AZUL:
for c in cadena:
if self.hexas[c].azul_f:
return True
else:
for c in cadena:
if self.hexas[c].amarillo_f:
return True
return False
class Pantalla:
def __init__(self):
pygame.init()
pygame.display.set_caption("Hex")
self.clock = pygame.time.Clock()
# os.environ["SDL_VIDEO_CENTERED"] = "1"
self.pantalla = pygame.display.set_mode((LONG*32, LONG*11*2))
self.t = Tablero(self.pantalla)
self.gano = True
self.color = None
self.fuente = Fuente()
self.main()
def main(self):
global RUN
while RUN:
self.pantalla.fill(AZUL_C)
# mostramos
pygame.event.pump()
if not self.gano:
color = self.t.dibujar()
if color:
self.gano = True
self.color = color
else:
self.ganador()
pygame.display.update()
if not self.update():
RUN = False
break
self.clock.tick(40)
pygame.quit()
def ganador(self):
if self.color == AZUL:
color = "Azul"
else:
color = "Amarillo"
if self.color:
r1 = self.fuente.render("Gano el jugador " + color)
r2 = self.fuente.render("[i] Iniciar")
r3 = self.fuente.render("[Esc] Salir")
r4 = self.fuente.render("franr.com.ar/hex")
if self.color:
self.pantalla.blit(r1, (200,50))
self.pantalla.blit(r2, (200,200))
self.pantalla.blit(r3, (200,250))
self.pantalla.blit(r4, (370,410))
def update(self):
k = pygame.key.get_pressed()
if k[pygame.K_ESCAPE]:
return False
elif k[pygame.K_i]:
self.gano = False
self.t.iniciar()
for evento in pygame.event.get():
if evento.type == pygame.QUIT:
return False
return True
Pantalla() | StarcoderdataPython |
1861333 | import chart_studio
import os
import json
import requests
from requests.auth import HTTPBasicAuth
def get_pages(username, page_size, auth, headers):
url = 'https://api.plot.ly/v2/folders/all?user='+username+'&page_size='+str(page_size)
response = requests.get(url, auth=auth, headers=headers)
if response.status_code != 200:
return
page = json.loads(response.content.decode('utf-8'))
yield page
while True:
resource = page['children']['next']
if not resource:
break
response = requests.get(resource, auth=auth, headers=headers)
if response.status_code != 200:
break
page = json.loads(response.content.decode('utf-8'))
yield page
def permanently_delete_files(username, auth, headers, page_size=500, filetype_to_delete='plot'):
for page in get_pages(username, page_size, auth, headers):
for x in range(0, len(page['children']['results'])):
fid = page['children']['results'][x]['fid']
res = requests.get('https://api.plot.ly/v2/files/' + fid, auth=auth, headers=headers)
res.raise_for_status()
if res.status_code == 200:
json_res = json.loads(res.content.decode('utf-8'))
if json_res['filetype'] == filetype_to_delete:
# move to trash
requests.post('https://api.plot.ly/v2/files/'+fid+'/trash', auth=auth, headers=headers)
# permanently delete
requests.delete('https://api.plot.ly/v2/files/'+fid+'/permanent_delete', auth=auth, headers=headers)
def delete_all_earlier_charts():
username = 'testblame'
api_key = os.environ['API_KEY']
auth = HTTPBasicAuth(username, api_key)
headers = {'Plotly-Client-Platform': 'python'}
chart_studio.tools.set_credentials_file(username=username, api_key=api_key)
permanently_delete_files(username, filetype_to_delete='plot', auth=auth, headers=headers)
permanently_delete_files(username, filetype_to_delete='plot', auth=auth, headers=headers)
| StarcoderdataPython |
9747534 | # Databricks notebook source
# Instrument for unit tests. This is only executed in local unit tests, not in Databricks.
if 'dbutils' not in locals():
import databricks_test
databricks_test.inject_variables()
# COMMAND ----------
data = spark.range(0, 5)
data.write.format("delta").save(dbutils.widgets.get('output'))
| StarcoderdataPython |
4951659 | import unittest
from pyspark import SparkContext
class Base(unittest.TestCase):
def setUp(self):
self.sc = SparkContext.getOrCreate()
self.sc.setLogLevel('ERROR')
| StarcoderdataPython |
11273860 | """
Defines Annalist built-in identifier values (URIs)
"""
__author__ = "<NAME> (<EMAIL>)"
__copyright__ = "Copyright 2014, <NAME>"
__license__ = "MIT (http://opensource.org/licenses/MIT)"
import logging
log = logging.getLogger(__name__)
class Curiespace(object):
"""
Placeholder class for CURIE values in namespace.
"""
def __init__(self):
return
class Namespace(object):
"""
Class represents namespace of URI identifiers.
Provides expressions for URI and CURIE values of each identifier in the namespace.
>>> ns = Namespace("test", "http://example.com/test/")
>>> cf = ns.mk_curie("foo")
>>> cf
'test:foo'
>>> uf = ns.mk_uri("foo")
>>> uf
'http://example.com/test/foo'
>>> ns.to_uri(cf)
'http://example.com/test/foo'
>>> ns.to_uri("notest:bar")
'notest:bar'
"""
def __init__(self, prefix, baseUri):
"""
Initialise a namespace.
prefix a CURIE prefix to be associated with this namespace.
_baseUri a base URI for all names in this namespace
"""
self._prefix = prefix
self._baseUri = baseUri
self.CURIE = Curiespace()
return
def mk_curie(self, name):
"""
Make a CURIE string for an identifier in this namespace
"""
return self._prefix+":"+name
def mk_uri(self, name):
"""
Make a URI string for an identifier in this namespace
"""
return self._baseUri+name
def to_uri(self, curie):
"""
Converts a supplied CURIE to a URI if it uses the current namespace prefix.
"""
parts = curie.split(':', 1)
if (len(parts) == 2) and (parts[0] == self._prefix):
return self.mk_uri(parts[1])
return curie
def makeNamespace(prefix, baseUri, names):
"""
Create a namespace with given prefix, base URI and set of local names.
Returns the namespace value. Attributes of the namespace value are URIs
for the corresponding identifier (e.g. ANNAL.Site, cf. below). Attributes of
the CURIE attribute are CURIES (e.g. ANNAL.CURIE.Site).
"""
ns = Namespace(prefix, baseUri)
for name in names:
setattr(ns, name, ns.mk_uri(name))
setattr(ns.CURIE, name, ns.mk_curie(name))
return ns
"""
Partial enumeration of RDF namespace - add others as needed
"""
RDF = makeNamespace("rdf", "http://www.w3.org/1999/02/22-rdf-syntax-ns#",
[ "Property", "Statement", "List"
, "type", "value"
, "first", "rest", "nil"
])
"""
Partial enumeration of RDFS namespace - add others as needed
"""
RDFS = makeNamespace("rdfs", "http://www.w3.org/2000/01/rdf-schema#",
[ "Resource", "Class", "Literal", "Container", "Datatype"
, "label", "comment", "member", "seeAlso"
])
"""
Partial enumeration of OWL namespace
"""
OWL = makeNamespace("owl", "http://www.w3.org/2002/07/owl#",
[ "Thing", "Nothing"
, "sameAs", "differentFrom", "equivalentClass"
])
"""
Annalist namespace terms
"""
ANNAL = makeNamespace("annal", "http://purl.org/annalist/2014/#",
[ "EntityRoot", "Entity"
, "Site", "SiteData", "Collection", "Entity", "EntityRoot"
, "Collection_Types", "Collection_Views", "Collection_Lists"
, "Type_Data", "EntityData", "Metadata"
# Entity types
, "User", "Type", "List", "View", "Field_group", "Field", "Enum"
, "Text", "Longtext", "Richtext", "Slug", "Identifier"
, "Placement", "Image", "Audio", "User", "Vocabulary"
, "Import", "Upload"
, "Default_type", "unknown_type"
# Properties
, "software_version", "comment", "inherit_from"
, "id", "type_id", "type"
, "label", "help", "url", "uri", "record_type"
, "supertype_uris", "supertype_uri"
, "display_type", "type_list", "type_view"
, "field_aliases", "alias_target", "alias_source"
, "user_uri", "user_permissions"
, "group_fields"
, "view_fields"
, "list_entity_selector", "open_view"
, "list_entities", "list_fields"
, "placeholder", "default_value", "property_uri", "options_valkey"
, "field_ref_type", "field_ref_restriction", "field_ref_field"
, "repeat", "repeat_id", "repeat_label", "repeat_label_add", "repeat_label_delete"
, "default_type", "default_view" , "default_list"
, "field_id", "field_name", "field_placement"
, "field_render_type", "field_value_mode", "field_entity_type"
, "field_value_type", "field_target_type"
, "group_ref", "repeat_label_add", "repeat_label_delete"
, "task_buttons", "button_id", "button_label"
# Deprecated properties - in migration tables
, "options_typeref", "restrict_values", "target_field"
])
# End.
| StarcoderdataPython |
6686401 | import pytest
from pytest_mock import MockerFixture
from dataclass_wizard.utils.lazy_loader import LazyLoader
@pytest.fixture
def mock_logging(mocker: MockerFixture):
return mocker.patch('dataclass_wizard.utils.lazy_loader.logging')
def test_lazy_loader_when_module_not_found():
extra_name = 'my-extra'
mod = LazyLoader(globals(), 'my_module', extra_name)
with pytest.raises(ImportError) as e:
_ = mod.my_var
assert 'pip install' in e.value.msg
assert extra_name in e.value.msg
def test_lazy_loader_with_warning(mock_logging):
warning_msg = 'My test warning'
mod = LazyLoader(globals(), 'pytimeparse', warning=warning_msg)
_ = mod.parse
# Assert a warning is logged
mock_logging.warning.assert_called_once_with(warning_msg)
# Add for code coverage
_ = dir(mod)
| StarcoderdataPython |
3598251 | <gh_stars>0
import logging
import threading
import time
import array
import mlperf_loadgen as lg
import numpy as np
from ..constants import QUERY_COUNT, NANO_SEC, MILLI_SEC
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(name)s - %(levelname)s - %(message)s')
logger = logging.getLogger(__name__)
class ServerRunner():
def __init__(self, session, ds, optimization_config, onnx_output_names):
self.session = session
self.threads = optimization_config.threads_num
self.max_batchsize = optimization_config.dynamic_batching_size
self.ds = ds
self.onnx_output_names = onnx_output_names
self.guess = None
self.cv = threading.Condition()
self.done = False
self.q_idx = []
self.q_query_id = []
self.workers = []
self.settings = lg.TestSettings()
self.settings.scenario = lg.TestScenario.Server
self.settings.mode = lg.TestMode.FindPeakPerformance
log_output_settings = lg.LogOutputSettings()
log_output_settings.outdir = optimization_config.result_path
log_output_settings.copy_summary_to_stdout = False
self.log_settings = lg.LogSettings()
self.log_settings.enable_trace = False
self.log_settings.log_output = log_output_settings
self.sut = lg.ConstructSUT(self.issue_queries, self.flush_queries, self.process_latencies)
self.qsl = lg.ConstructQSL(QUERY_COUNT, QUERY_COUNT, ds.load_query_samples, ds.unload_query_samples)
self.settings.server_coalesce_queries = True
self.settings.server_target_latency_ns = int(optimization_config.max_latency_ms * NANO_SEC / MILLI_SEC)
self.settings.server_target_latency_percentile = optimization_config.max_latency_percentile
self.settings.min_duration_ms = optimization_config.min_duration_sec * MILLI_SEC
# start all threads
for _ in range(self.threads):
worker = threading.Thread(target=self.handle_tasks, args=(self.cv,))
worker.daemon = True
self.workers.append(worker)
worker.start()
time.sleep(1)
def issue_queries(self, query_samples):
self.enqueue(query_samples)
def flush_queries(self):
pass
def process_latencies(self, latencies_ms):
pass
def handle_tasks(self, cv):
"""Worker thread."""
max_batchsize = self.max_batchsize
stats = [0] * (max_batchsize + 1)
while True:
with cv:
# wait for something to do
while len(self.q_idx) == 0 and not self.done:
cv.wait()
idx = self.q_idx
query_id = self.q_query_id
if len(idx) > max_batchsize:
# only take max_batchsize
self.q_idx = idx[max_batchsize:]
self.q_query_id = query_id[max_batchsize:]
idx = idx[:max_batchsize]
query_id = query_id[:max_batchsize]
# wake up somebody to take care of it
cv.notify()
else:
# swap the entire queue
self.q_idx = []
self.q_query_id = []
if self.done:
# parent wants us to exit
break
# run inference, lock is released
feed = self.ds.make_batch(idx)
self.run_one_item((query_id, idx, feed))
# count stats
stats[len(idx)] += 1
def run_one_item(self, qitem):
# run the prediction
processed_results = []
query_id, content_id, feed = qitem
results = self.session.run(self.onnx_output_names, feed)
processed_results = [[]] * len(query_id)
response_array_refs = []
response = []
for idx, qid in enumerate(query_id):
response_array = array.array("B", np.array(processed_results[idx], np.float32).tobytes())
response_array_refs.append(response_array)
bi = response_array.buffer_info()
response.append(lg.QuerySampleResponse(qid, bi[0], bi[1]))
lg.QuerySamplesComplete(response)
def enqueue(self, query_samples):
idx = [q.index for q in query_samples]
query_id = [q.id for q in query_samples]
with self.cv:
scheduled = len(self.q_idx)
# add new items to the queue
self.q_idx.extend(idx)
self.q_query_id.extend(query_id)
# notify only if queue was empty
if scheduled == 0:
self.cv.notify()
def finish(self):
# exit all threads
self.done = True
for worker in self.workers:
with self.cv:
self.cv.notify()
for worker in self.workers:
worker.join()
def start_run(self):
lg.StartTestWithLogSettings(self.sut, self.qsl, self.settings, self.log_settings)
def warmup(self, warmup_num):
self.ds.load_query_samples([0])
start = time.time()
for _ in range(warmup_num):
feed = self.ds.make_batch([0])
_ = self.session.run(self.onnx_output_names, feed)
self.guess = (time.time() - start) / warmup_num
self.settings.server_target_qps = int(1 / self.guess / 3)
self.ds.unload_query_samples(None) | StarcoderdataPython |
3577976 | <filename>core/entities/default_race_entity.py
from core.structs import AbilityScoreStruct
from core.structs import RaceStruct
class DefaultRaceEntity(object):
def __init__(self):
self.race = RaceStruct()
def get_struct(self):
return self.race
def set_ability_score(self, strength=0, constitution=0, dexterity=0, intelligence=0, wisdom=0, charisma=0):
ability_score = AbilityScoreStruct(strength, constitution, dexterity, intelligence, wisdom, charisma)
self.race.ability_score = ability_score
| StarcoderdataPython |
1638487 | from rest_framework import serializers
from .models import WorkingHour
class WorkingHourSerializer(serializers.ModelSerializer):
class Meta:
model = WorkingHour
fields = ('id', 'hour')
| StarcoderdataPython |
9742304 | <gh_stars>0
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('assessments', '0007_behavior_trait_synset'),
]
operations = [
migrations.AddField(
model_name='assessment',
name='contributors',
field=models.ManyToManyField(related_query_name=b'contributor', related_name='assessment_contributors', to=settings.AUTH_USER_MODEL, blank=True, help_text=b'Select other CogatPheno users to add as contributes to the assessment. Contributors can add, edit and delete questions in the assessment.', verbose_name=b'Contributors'),
preserve_default=True,
),
migrations.AddField(
model_name='assessment',
name='owner',
field=models.ForeignKey(default=None, blank=True, to=settings.AUTH_USER_MODEL, null=True),
preserve_default=True,
),
]
| StarcoderdataPython |
8181912 | <gh_stars>1-10
# -*- coding: utf-8 -*-
import pytest
from roswire.common import PackageDatabase
from roswire.ros1 import ROS1MsgFormat, ROS1Package, ROS1PackageDatabase, ROS1SrvFormat
def test_to_and_from_dict():
pkg = "tf"
msg_tf = ROS1MsgFormat.from_dict(
{
"package": pkg,
"name": "tfMessage",
"definition": "geometry_msgs/TransformStamped[] transforms\n",
"fields": [
{
"type": "geometry_msgs/TransformStamped[]",
"name": "transforms",
}
],
}
)
srv_fg = ROS1SrvFormat.from_dict(
{
"package": pkg,
"name": "FrameGraph",
"definition": "---\nstring dot_graph\n",
"response": {
"definition": "string dot_graph\n",
"fields": [{"type": "string", "name": "dot_graph"}],
},
}
)
p = ROS1Package(
name=pkg,
path="/ros_ws/src/geometry/tf",
messages=[msg_tf],
actions=[],
services=[srv_fg],
)
assert p == ROS1Package.from_dict(p.to_dict())
@pytest.mark.parametrize("sut", ["fetch"], indirect=True)
def test_build(sut):
path = "/opt/ros/melodic/share/tf"
expected = ROS1Package.from_dict(
{
"path": path,
"name": "tf",
"messages": [
{
"name": "tfMessage",
"definition": "geometry_msgs/TransformStamped[] transforms\n",
"fields": [
{
"type": "geometry_msgs/TransformStamped[]",
"name": "transforms",
}
],
}
],
"services": [
{
"name": "FrameGraph",
"definition": "---\nstring dot_graph\n",
"response": {
"definition": "string dot_graph",
"fields": [{"type": "string", "name": "dot_graph"}],
},
}
],
}
)
actual = ROS1Package.build(path, sut)
assert actual == expected
@pytest.mark.parametrize("sut", ["fetch"], indirect=True)
def test_database_paths(sut):
expected = {
"/opt/ros/melodic/share/moveit_ros_occupancy_map_monitor",
"/opt/ros/melodic/share/common_msgs",
"/opt/ros/melodic/share/nodelet_core",
"/opt/ros/melodic/share/ros_comm",
"/opt/ros/melodic/share/bond_core",
"/opt/ros/melodic/share/ros_base",
"/opt/ros/melodic/share/ros_core",
"/opt/ros/melodic/share/roscpp_core",
"/opt/ros/melodic/share/ros",
"/opt/ros/melodic/share/actionlib",
"/opt/ros/melodic/share/actionlib_msgs",
"/opt/ros/melodic/share/amcl",
"/opt/ros/melodic/share/angles",
"/opt/ros/melodic/share/base_local_planner",
"/opt/ros/melodic/share/bond",
"/opt/ros/melodic/share/bondcpp",
"/opt/ros/melodic/share/bondpy",
"/opt/ros/melodic/share/camera_calibration_parsers",
"/opt/ros/melodic/share/camera_info_manager",
"/opt/ros/melodic/share/catkin",
"/opt/ros/melodic/share/class_loader",
"/opt/ros/melodic/share/clear_costmap_recovery",
"/opt/ros/melodic/share/cmake_modules",
"/opt/ros/melodic/share/control_msgs",
"/opt/ros/melodic/share/control_toolbox",
"/opt/ros/melodic/share/costmap_2d",
"/opt/ros/melodic/share/cpp_common",
"/opt/ros/melodic/share/cv_bridge",
"/opt/ros/melodic/share/depth_image_proc",
"/opt/ros/melodic/share/diagnostic_msgs",
"/opt/ros/melodic/share/diagnostic_updater",
"/opt/ros/melodic/share/dynamic_reconfigure",
"/opt/ros/melodic/share/eigen_conversions",
"/opt/ros/melodic/share/eigen_stl_containers",
"/opt/ros/melodic/share/eigenpy",
"/ros_ws/src/fetch_ros/fetch_depth_layer",
"/ros_ws/src/fetch_ros/fetch_description",
"/ros_ws/src/fetch_gazebo/fetch_gazebo",
"/ros_ws/src/fetch_gazebo/fetch_gazebo_demo",
"/ros_ws/src/fetch_ros/fetch_ikfast_plugin",
"/ros_ws/src/fetch_ros/fetch_maps",
"/ros_ws/src/fetch_ros/fetch_moveit_config",
"/ros_ws/src/fetch_ros/fetch_navigation",
"/opt/ros/melodic/share/gazebo_dev",
"/opt/ros/melodic/share/gazebo_msgs",
"/opt/ros/melodic/share/gazebo_plugins",
"/opt/ros/melodic/share/gazebo_ros",
"/opt/ros/melodic/share/gencpp",
"/opt/ros/melodic/share/geneus",
"/opt/ros/melodic/share/genlisp",
"/opt/ros/melodic/share/genmsg",
"/opt/ros/melodic/share/gennodejs",
"/opt/ros/melodic/share/genpy",
"/opt/ros/melodic/share/geometric_shapes",
"/opt/ros/melodic/share/geometry_msgs",
"/opt/ros/melodic/share/grasping_msgs",
"/opt/ros/melodic/share/image_geometry",
"/opt/ros/melodic/share/image_proc",
"/opt/ros/melodic/share/image_transport",
"/opt/ros/melodic/share/interactive_markers",
"/opt/ros/melodic/share/joint_state_publisher",
"/opt/ros/melodic/share/kdl_conversions",
"/opt/ros/melodic/share/kdl_parser",
"/opt/ros/melodic/share/laser_geometry",
"/opt/ros/melodic/share/map_msgs",
"/opt/ros/melodic/share/map_server",
"/opt/ros/melodic/share/media_export",
"/opt/ros/melodic/share/message_filters",
"/opt/ros/melodic/share/message_generation",
"/opt/ros/melodic/share/message_runtime",
"/opt/ros/melodic/share/mk",
"/opt/ros/melodic/share/move_base",
"/opt/ros/melodic/share/move_base_msgs",
"/opt/ros/melodic/share/moveit_commander",
"/opt/ros/melodic/share/moveit_core",
"/opt/ros/melodic/share/moveit_fake_controller_manager",
"/opt/ros/melodic/share/moveit_kinematics",
"/opt/ros/melodic/share/moveit_msgs",
"/opt/ros/melodic/share/moveit_planners_ompl",
"/opt/ros/melodic/share/moveit_python",
"/opt/ros/melodic/share/moveit_ros_manipulation",
"/opt/ros/melodic/share/moveit_ros_move_group",
"/opt/ros/melodic/share/moveit_ros_perception",
"/opt/ros/melodic/share/moveit_ros_planning",
"/opt/ros/melodic/share/moveit_ros_planning_interface",
"/opt/ros/melodic/share/moveit_ros_robot_interaction",
"/opt/ros/melodic/share/moveit_ros_visualization",
"/opt/ros/melodic/share/moveit_ros_warehouse",
"/opt/ros/melodic/share/moveit_simple_controller_manager",
"/opt/ros/melodic/share/nav_core",
"/opt/ros/melodic/share/nav_msgs",
"/opt/ros/melodic/share/navfn",
"/opt/ros/melodic/share/nodelet",
"/opt/ros/melodic/share/nodelet_topic_tools",
"/opt/ros/melodic/share/object_recognition_msgs",
"/opt/ros/melodic/share/octomap",
"/opt/ros/melodic/share/octomap_msgs",
"/opt/ros/melodic/share/ompl",
"/opt/ros/melodic/share/open_karto",
"/opt/ros/melodic/share/orocos_kdl",
"/opt/ros/melodic/share/pcl_conversions",
"/opt/ros/melodic/share/pcl_msgs",
"/opt/ros/melodic/share/pcl_ros",
"/opt/ros/melodic/share/pluginlib",
"/opt/ros/melodic/share/polled_camera",
"/opt/ros/melodic/share/python_orocos_kdl",
"/opt/ros/melodic/share/python_qt_binding",
"/opt/ros/melodic/share/random_numbers",
"/opt/ros/melodic/share/realtime_tools",
"/opt/ros/melodic/share/resource_retriever",
"/opt/ros/melodic/share/rgbd_launch",
"/opt/ros/melodic/share/robot_controllers",
"/opt/ros/melodic/share/robot_controllers_interface",
"/opt/ros/melodic/share/robot_controllers_msgs",
"/opt/ros/melodic/share/robot_state_publisher",
"/opt/ros/melodic/share/ros_environment",
"/opt/ros/melodic/share/rosbag",
"/opt/ros/melodic/share/rosbag_migration_rule",
"/opt/ros/melodic/share/rosbag_storage",
"/opt/ros/melodic/share/rosbash",
"/opt/ros/melodic/share/rosboost_cfg",
"/opt/ros/melodic/share/rosbuild",
"/opt/ros/melodic/share/rosclean",
"/opt/ros/melodic/share/rosconsole",
"/opt/ros/melodic/share/rosconsole_bridge",
"/opt/ros/melodic/share/roscpp",
"/opt/ros/melodic/share/roscpp_serialization",
"/opt/ros/melodic/share/roscpp_traits",
"/opt/ros/melodic/share/roscreate",
"/opt/ros/melodic/share/rosgraph",
"/opt/ros/melodic/share/rosgraph_msgs",
"/opt/ros/melodic/share/roslang",
"/opt/ros/melodic/share/roslaunch",
"/opt/ros/melodic/share/roslib",
"/opt/ros/melodic/share/roslisp",
"/opt/ros/melodic/share/roslz4",
"/opt/ros/melodic/share/rosmake",
"/opt/ros/melodic/share/rosmaster",
"/opt/ros/melodic/share/rosmsg",
"/opt/ros/melodic/share/rosnode",
"/opt/ros/melodic/share/rosout",
"/opt/ros/melodic/share/rospack",
"/opt/ros/melodic/share/rosparam",
"/opt/ros/melodic/share/rospy",
"/opt/ros/melodic/share/rosservice",
"/opt/ros/melodic/share/rostest",
"/opt/ros/melodic/share/rostime",
"/opt/ros/melodic/share/rostopic",
"/opt/ros/melodic/share/rosunit",
"/opt/ros/melodic/share/roswtf",
"/opt/ros/melodic/share/rotate_recovery",
"/opt/ros/melodic/share/rviz",
"/opt/ros/melodic/share/sensor_msgs",
"/opt/ros/melodic/share/shape_msgs",
"/opt/ros/melodic/share/simple_grasping",
"/opt/ros/melodic/share/slam_karto",
"/opt/ros/melodic/share/smclib",
"/opt/ros/melodic/share/sparse_bundle_adjustment",
"/opt/ros/melodic/share/srdfdom",
"/opt/ros/melodic/share/std_msgs",
"/opt/ros/melodic/share/std_srvs",
"/opt/ros/melodic/share/stereo_msgs",
"/opt/ros/melodic/share/teleop_twist_keyboard",
"/opt/ros/melodic/share/tf",
"/opt/ros/melodic/share/tf2",
"/opt/ros/melodic/share/tf2_eigen",
"/opt/ros/melodic/share/tf2_geometry_msgs",
"/opt/ros/melodic/share/tf2_kdl",
"/opt/ros/melodic/share/tf2_msgs",
"/opt/ros/melodic/share/tf2_py",
"/opt/ros/melodic/share/tf2_ros",
"/opt/ros/melodic/share/tf_conversions",
"/opt/ros/melodic/share/topic_tools",
"/opt/ros/melodic/share/trajectory_msgs",
"/opt/ros/melodic/share/urdf",
"/opt/ros/melodic/share/urdfdom_py",
"/opt/ros/melodic/share/visualization_msgs",
"/opt/ros/melodic/share/voxel_grid",
"/opt/ros/melodic/share/warehouse_ros",
"/opt/ros/melodic/share/xacro",
"/opt/ros/melodic/share/xmlrpcpp",
}
actual = set(ROS1PackageDatabase._determine_paths(sut))
assert actual == expected
@pytest.mark.parametrize("sut", ["fetch"], indirect=True)
def test_database_from_paths(sut):
paths = [
"/opt/ros/melodic/share/angles",
"/opt/ros/melodic/share/tf2",
"/opt/ros/melodic/share/tf2_msgs",
"/opt/ros/melodic/share/tf2_py",
"/opt/ros/melodic/share/tf2_ros",
]
db = ROS1PackageDatabase.build(sut, paths)
assert len(db) == len(paths)
assert set(db) == {"angles", "tf2", "tf2_msgs", "tf2_py", "tf2_ros"}
@pytest.mark.skip(reason="ROS2 is not fully supported")
@pytest.mark.parametrize("sut", ["turtlebot3-ros2"], indirect=True)
def test_package_location_ros2(sut):
expected_paths = {
"/ros_ws/install/pcl_conversions",
"/ros_ws/install/ament_pep257",
"/ros_ws/install/class_loader",
"/ros_ws/install/tf2",
"/ros_ws/install/rosidl_typesupport_introspection_c",
"/ros_ws/install/rviz2",
"/ros_ws/install/rmw_fastrtps_shared_cpp",
"/ros_ws/install/tf2_msgs",
"/ros_ws/install/rosidl_typesupport_opensplice_c",
"/ros_ws/install/rcl_logging_noop",
"/ros_ws/install/turtlebot3_teleop",
"/ros_ws/install/turtlebot3_fake_node",
"/ros_ws/install/turtlebot3_gazebo",
"/ros_ws/install/turtlebot3_simulations",
"/ros_ws/install/test_msgs",
"/ros_ws/install/dwb_plugins",
"/ros_ws/install/ament_copyright",
"/ros_ws/install/rclcpp_lifecycle",
"/ros_ws/install/rcl_action",
"/ros_ws/install/ament_cmake_export_libraries",
"/ros_ws/install/geometry_msgs",
"/ros_ws/install/rviz_common",
"/ros_ws/install/rosgraph_msgs",
"/ros_ws/install/rosidl_adapter",
"/ros_ws/install/rcutils",
"/ros_ws/install/nav2_voxel_grid",
"/ros_ws/install/rmw_fastrtps_cpp",
"/ros_ws/install/ament_lint",
"/ros_ws/install/test_interface_files",
"/ros_ws/install/ament_cmake_auto",
"/ros_ws/install/ament_cmake_uncrustify",
"/ros_ws/install/ament_cmake",
"/ros_ws/install/ament_index_cpp",
"/ros_ws/install/nav_msgs",
"/ros_ws/install/dwb_msgs",
"/ros_ws/install/rviz_rendering_tests",
"/ros_ws/install/libcurl_vendor",
"/ros_ws/install/rviz_ogre_vendor",
"/ros_ws/install/nav_2d_utils",
"/ros_ws/install/costmap_queue",
"/ros_ws/install/rcpputils",
"/ros_ws/install/map_msgs",
"/ros_ws/install/nav2_costmap_2d",
"/ros_ws/install/rcl_interfaces",
"/ros_ws/install/ament_cmake_flake8",
"/ros_ws/install/ament_cmake_xmllint",
"/ros_ws/install/ament_cmake_gtest",
"/ros_ws/install/rclcpp",
"/ros_ws/install/std_srvs",
"/ros_ws/install/rcl",
"/ros_ws/install/builtin_interfaces",
"/ros_ws/install/ament_lint_auto",
"/ros_ws/install/console_bridge_vendor",
"/ros_ws/install/tf2_ros",
"/ros_ws/install/sensor_msgs",
"/ros_ws/install/rmw_implementation",
"/ros_ws/install/visualization_msgs",
"/ros_ws/install/ament_cmake_target_dependencies",
"/ros_ws/install/unique_identifier_msgs",
"/ros_ws/install/ament_cmake_ros",
"/ros_ws/install/fastrtps_cmake_module",
"/ros_ws/install/turtlebot3_navigation2",
"/ros_ws/install/opensplice_cmake_module",
"/ros_ws/install/rcl_yaml_param_parser",
"/ros_ws/install/libyaml_vendor",
"/ros_ws/install/urdf",
"/ros_ws/install/ament_lint_cmake",
"/ros_ws/install/ament_cpplint",
"/ros_ws/install/nav2_util",
"/ros_ws/install/ament_cmake_cpplint",
"/ros_ws/install/nav2_map_server",
"/ros_ws/install/nav2_bt_navigator",
"/ros_ws/install/python_cmake_module",
"/ros_ws/install/nav2_bringup",
"/ros_ws/install/tf2_geometry_msgs",
"/ros_ws/install/dwb_core",
"/ros_ws/install/ament_package",
"/ros_ws/install/osrf_pycommon",
"/ros_ws/install/ament_cmake_pep257",
"/ros_ws/install/pluginlib",
"/ros_ws/install/action_msgs",
"/ros_ws/install/cartographer_ros_msgs",
"/ros_ws/install/message_filters",
"/ros_ws/install/turtlebot3_cartographer",
"/ros_ws/install/ament_flake8",
"/ros_ws/install/dwb_controller",
"/ros_ws/install/nav2_dwb_controller",
"/ros_ws/install/rmw",
"/ros_ws/install/rviz_assimp_vendor",
"/ros_ws/install/turtlebot3_msgs",
"/ros_ws/install/nav_2d_msgs",
"/ros_ws/install/rviz_default_plugins",
"/ros_ws/install/pcl_msgs",
"/ros_ws/install/rosidl_cmake",
"/opt/ros/dashing",
"/ros_ws/install/ament_cmake_gmock",
"/ros_ws/install/nav2_lifecycle_manager",
"/ros_ws/install/rosidl_typesupport_introspection_cpp",
"/ros_ws/install/ament_cmake_export_definitions",
"/ros_ws/install/lifecycle_msgs",
"/ros_ws/install/dwb_critics",
"/ros_ws/install/rviz_rendering",
"/ros_ws/install/rosidl_typesupport_interface",
"/ros_ws/install/ament_cmake_libraries",
"/ros_ws/install/ament_lint_common",
"/ros_ws/install/rosidl_typesupport_fastrtps_c",
"/ros_ws/install/ament_cmake_python",
"/ros_ws/install/behaviortree_cpp",
"/ros_ws/install/rosidl_typesupport_cpp",
"/ros_ws/install/launch_testing",
"/ros_ws/install/ament_cmake_copyright",
"/ros_ws/install/rclcpp_action",
"/ros_ws/install/rclpy",
"/ros_ws/install/ament_cmake_pytest",
"/ros_ws/install/dynamixel_sdk",
"/ros_ws/install/ament_cmake_export_include_directories",
"/ros_ws/install/std_msgs",
"/ros_ws/install/resource_retriever",
"/ros_ws/install/nav2_world_model",
"/ros_ws/install/nav2_rviz_plugins",
"/ros_ws/install/rosidl_parser",
"/ros_ws/install/turtlebot3_description",
"/ros_ws/install/nav2_common",
"/ros_ws/install/ament_cmake_cppcheck",
"/ros_ws/install/ament_cmake_core",
"/ros_ws/install/rosidl_typesupport_opensplice_cpp",
"/ros_ws/install/robot_state_publisher",
"/ros_ws/install/tf2_eigen",
"/ros_ws/install/nav2_recoveries",
"/ros_ws/install/rosidl_default_runtime",
"/ros_ws/install/uncrustify_vendor",
"/ros_ws/install/tf2_sensor_msgs",
"/ros_ws/install/ament_cmake_export_interfaces",
"/ros_ws/install/navigation2",
"/ros_ws/install/rosidl_typesupport_c",
"/ros_ws/install/laser_geometry",
"/ros_ws/install/rosidl_generator_py",
"/ros_ws/install/rosidl_generator_cpp",
"/ros_ws/install/ament_cmake_test",
"/ros_ws/install/rviz_visual_testing_framework",
"/ros_ws/install/angles",
"/ros_ws/install/launch_testing_ament_cmake",
"/ros_ws/install/ament_cppcheck",
"/ros_ws/install/cartographer_ros",
"/ros_ws/install/rosidl_generator_dds_idl",
"/ros_ws/install/turtlebot3_node",
"/ros_ws/install/rosidl_generator_c",
"/ros_ws/install/kdl_parser",
"/ros_ws/install/rcl_lifecycle",
"/ros_ws/install/turtlebot3_bringup",
"/ros_ws/install/launch_ros",
"/ros_ws/install/rosidl_typesupport_fastrtps_cpp",
"/ros_ws/install/nav2_msgs",
"/ros_ws/install/composition_interfaces",
"/ros_ws/install/ament_xmllint",
"/ros_ws/install/hls_lfcd_lds_driver",
"/ros_ws/install/eigen3_cmake_module",
"/ros_ws/install/ament_index_python",
"/ros_ws/install/ament_cmake_lint_cmake",
"/ros_ws/install/rmw_implementation_cmake",
"/ros_ws/install/rmw_opensplice_cpp",
"/ros_ws/install/turtlebot3",
"/ros_ws/install/yaml_cpp_vendor",
"/ros_ws/install/nav2_amcl",
"/ros_ws/install/ament_cmake_export_dependencies",
"/ros_ws/install/nav2_behavior_tree",
"/ros_ws/install/nav2_navfn_planner",
"/ros_ws/install/ament_uncrustify",
"/ros_ws/install/rosidl_default_generators",
"/ros_ws/install/ament_cmake_include_directories",
"/ros_ws/install/launch",
"/ros_ws/install/ament_cmake_export_link_flags",
}
db = PackageDatabase.build(sut)
actual_paths = set(db.paths)
assert actual_paths == expected_paths
| StarcoderdataPython |
171987 | #!/usr/bin/env python3
import sys
import numpy as np
from config import Config
from base import Connect4Base
from random_agent import RandomAgent
from simple_agent import SimpleAgent
from one_step_lookahead_agent import OneStepLookaheadAgent
from n_steps_lookahead_agent import NStepsLookaheadAgent
from cnn_agent import CNNAgent
from network_128x4_64_64 import Network1
class Tournament(Connect4Base):
def __init__(self, config, agent1, agent2):
super().__init__(config)
self.agent1 = agent1
self.agent2 = agent2
self.agent1.setup(1)
self.agent2.setup(2)
print("Player 1 - {}".format(agent1.name()))
print("Player 2 - {}".format(agent2.name()))
def run(self):
board = np.full((self.config.rows, self.config.columns), 0, np.int)
piece = 1 # starts first
winner = 0
while len(self.valid_moves(board)) > 0:
agent = self.agent1 if piece == 1 else self.agent2
col = agent.move(board)
board = self.drop_piece(board, col, piece)
if self.check_if_winning(board, piece):
winner = piece
break
piece = piece%2+1
self.agent1.game_over(winner)
self.agent2.game_over(winner)
return winner
def end(self):
self.agent1.teardown()
self.agent2.teardown()
# run agents
nruns = 100 if len(sys.argv) < 2 else int(sys.argv[1])
print("Number of runs", nruns)
winners = list()
config = Config(6, 7, 4)
#tournament = Tournament(config, RandomAgent(config), SimpleAgent(config))
#tournament = Tournament(config, SimpleAgent(config), NStepsLookaheadAgent(config, 1))
#tournament = Tournament(config, RandomAgent(config), CNNAgent(config, Network1(), 'rnd'))
#tournament = Tournament(config, OneStepLookaheadAgent(config), CNNAgent(config, Network1(), '1sla'))
tournament = Tournament(config, NStepsLookaheadAgent(config, 3), CNNAgent(config, Network1(), '3sla'))
#tournament = Tournament(config, CNNAgent(config, Network1(), 'cnn'), CNNAgent(config, Network1(), 'cnn'))
for n in range(nruns):
winner = tournament.run()
winners.append(winner)
print("Game", n, ", player", winner, "wins")
tournament.end()
draw = len([n for n in winners if n == 0])
won_1 = len([n for n in winners if n == 1])
won_2 = len([n for n in winners if n == 2])
print("player1:", won_1, ", player2:", won_2, ", draw:", draw)
| StarcoderdataPython |
9757810 | <reponame>Pavloid21/awx<filename>awx/api/urls/deploytemplate.py
from django.conf.urls import url
from awx.api.views.deploytemplate import (DeployTemplateList, DeployTemplateDetail)
urls = [
url(r'^$', DeployTemplateList.as_view(), name='deploy_template_list'),
url(r'^(?P<pk>[0-9]+)/$', DeployTemplateDetail.as_view(), name='deploy_template_detail'),
]
__all__ = ['urls'] | StarcoderdataPython |
11340686 | #!/usr/bin/env python
'''
Created on Jul 29, 2015
@author: adrian
'''
import matplotlib
matplotlib.use('Agg')
import json
import os
import scipy.io as sio
import shutil
import sys
from oct2py import octave
from pprint import pprint
from pylab import * # @UnusedWildImport
PNGDIR = os.path.abspath('.') + '/png/'
MATDIR = os.path.abspath('.') + '/mat/'
label_on = False
VERMAGIC = datetime.datetime.now().strftime("data_%m%d")
JSON_NAME = 'json/' + VERMAGIC + '.json'
def jsonify_csi(csi_contents, rssi, pkt_index, xpos, ypos):
csi_contents = csi_contents[0] # we only have the RX dimension
csi_node = {}
csi_node['index'] = pkt_index
csi_node['csi_a'] = [str(c) for c in csi_contents[0]]
csi_node['csi_b'] = [str(c) for c in csi_contents[1]]
csi_node['csi_c'] = [str(c) for c in csi_contents[2]]
csi_node['rssi_a'] = rssi[0]
csi_node['rssi_b'] = rssi[1]
csi_node['rssi_c'] = rssi[2]
csi_dict[str((xpos, ypos))].append(csi_node)
def plot_csi(csi_contents, pkt_number):
global label_on, plot_dir
Ntx, Nrx = csi_contents.shape[:2]
if Ntx != 1:
print "We'll stick to a single TX path for now.", pkt_number
return
csi_contents = csi_contents[0] # we only have the RX dimension
for antenna in range(Nrx):
# amplitude
csi_contents[antenna] = [abs(x) for x in csi_contents[antenna]]
# power
csi_contents[antenna] = [pow(x, 2) for x in csi_contents[antenna]]
# get rid of RuntimeWarning: divide by zero encountered in log10
csi_contents[antenna] = [0.1 if x == 0.0 else x for x in csi_contents[antenna]]
# dB
csi_contents[antenna] = 10. * np.log10(csi_contents[antenna])
# We no longer have complex numbers, so remove +0j
csi_contents = [abs(x) for x in csi_contents]
# csi_contents = np.transpose(csi_contents)
if not label_on:
plot(csi_contents[0], label='RX Antenna A')
plot(csi_contents[1], label='RX Antenna B')
plot(csi_contents[2], label='RX Antenna C')
label_on = True
else:
plot(csi_contents[0])
plot(csi_contents[1])
plot(csi_contents[2])
axis([0, 30, 5, 30])
xlabel('Subcarrier index')
ylabel('SNR [dB]')
legend(loc='lower right')
savefig(plot_dir + '%04d' % pkt_number + '.png', bbox_inches='tight')
# close()
if __name__ == '__main__':
if len(sys.argv) < 4:
print 'Usage: $ %s <.dat file> <xpos> <ypos>' % sys.argv[0]
sys.exit(1)
# online phase
if (sys.argv[2] == '?' and sys.argv[3] == '?'):
JSON_NAME = 'json/online.json'
plot_dir = PNGDIR + VERMAGIC + '_NA_NA/'
csi_dict = {}
xpos, ypos = '?', '?'
# offline phase
else:
xpos, ypos = int(sys.argv[2]), int(sys.argv[3])
plot_dir = PNGDIR + VERMAGIC + '_' + \
'%02d' % xpos + '_' + '%02d' % ypos + '/'
if os.path.exists(JSON_NAME):
with open(JSON_NAME, 'rt') as infile:
csi_dict = json.load(infile)
else:
csi_dict = {}
if os.path.exists(plot_dir):
shutil.rmtree(plot_dir)
os.mkdir(plot_dir)
dat_path = os.path.abspath(sys.argv[1])
octave.addpath('/home/adrian/csi/linux-80211n-csitool-supplementary/matlab')
# FAQ #2
octave.eval("csi_trace = read_bf_file('" + dat_path + "');")
pkts = octave.eval("rows(csi_trace);")
print 'Trace has', pkts, 'packets.'
# overwrite is permitted
csi_dict[str((xpos, ypos))] = []
for index in range(1, int(pkts) + 1): # Octave indexes from 1
octave.eval("csi_entry = csi_trace{" + str(index) + "};")
rssi_a, rssi_b, rssi_c = octave.eval("csi_entry.rssi_a;"), \
octave.eval("csi_entry.rssi_b;"), octave.eval("csi_entry.rssi_c;")
octave.eval("csi = get_scaled_csi(csi_entry);")
octave.eval("save -6 " + MATDIR + "temp.mat csi;")
mat_contents = sio.loadmat(MATDIR + 'temp.mat')['csi']
jsonify_csi(mat_contents, [rssi_a, rssi_b, rssi_c], index, xpos, ypos)
plot_csi(mat_contents, index)
with open(JSON_NAME, 'w+') as outfile:
json.dump(csi_dict, outfile, sort_keys=True, indent=4)
| StarcoderdataPython |
205567 | <filename>200Python/demo/01basic-hello/02basic/dict_set.py
scores = {'AA': 10, 'BB': 20, "CC": 30}
print("AA score:", scores['AA'])
print("Before BB score:", scores['BB'])
scores['BB'] = 100
print("After BB score:", scores["BB"])
age = {1, 2, 3}
print(age)
| StarcoderdataPython |
1870149 | #!/usr/bin/python
'''
1. fasta fname
2. replace with?
'''
from sys import argv,exit
from Bio import SeqIO
try:
fname = argv[1]
repl = argv[2]
except:
exit(__doc__)
f = open(fname, 'r')
records = SeqIO.parse(f,'fasta')
for r in records:
seq = r.seq
newSeq = ''
for l in seq:
if l.lower() not in ['a','t','c','g']:
newSeq += repl
else:
newSeq += l
print ">%s\n%s\n"%(r.description, newSeq)
| StarcoderdataPython |
31805 | import logging; module_logger = logging.getLogger(__name__)
from pathlib import Path
# ----------------------------------------------------------------------
def get_chart(virus_type, assay, lab, infix="", chart_dir=Path("merges")):
if virus_type in ["bvic", "byam"]:
vt = virus_type[:2] # virus_type[0] + "-" + virus_type[1:]
# elif virus_type in ["h1"]:
# vt = "h1pdm"
else:
vt = virus_type
chart_filename = chart_dir.joinpath(f"{lab.lower()}-{vt}-{assay.lower()}{infix}.ace")
if not chart_filename.exists():
raise RuntimeError(f"{chart_filename} not found")
return chart_filename # do not .resolve(), better to use symlink to avoid regenerating .sh scripts when changing charts
# ======================================================================
### Local Variables:
### eval: (if (fboundp 'eu-rename-buffer) (eu-rename-buffer))
### End:
| StarcoderdataPython |
8139455 | from helga import settings
from helga.plugins import command
@command('showme', aliases=['whois', 'whothehellis'],
help="Show a URL for the user's intranet page. Usage: helga (showme|whois|whothehellis) <nick>")
def wiki_whois(client, channel, nick, message, cmd, args): # pragma: no cover
"""
Show the intranet page for a user. Settings must have a WIKI_URL value with formattable
substring named {user}
"""
return settings.WIKI_URL.format(user=args[0])
| StarcoderdataPython |
4907520 | <reponame>Trondheim-kommune/Tilskuddsbasen
"""rapport purret dato
Revision ID: 4d76a1567fe8
Revises: <KEY>
Create Date: 2015-01-16 10:33:55.678888
"""
# revision identifiers, used by Alembic.
revision = '4d76a1567fe8'
down_revision = '<KEY>'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('vedtak', sa.Column('rapport_purret_dato', sa.DateTime(), nullable=True))
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_column('vedtak', 'rapport_purret_dato')
### end Alembic commands ###
| StarcoderdataPython |
5074488 | #!/usr/bin/env python
import numpy as np
from despyastro.coords import *
tic, t_ra, t_dec, t_g_lon, t_g_lat, t_ec_lon, t_ec_lat = np.loadtxt("data/tic_data.dat", unpack=True)
tic = tic.astype(int)
ec_lon, ec_lat = gal2ec(t_g_lon, t_g_lat)
ra, dec = ec2eq(ec_lon, ec_lat)
ec_comp = np.column_stack((t_ec_lon, ec_lon, t_ec_lat, ec_lat))
print(" true_ec_lon ec_lon true_ec_lat ec_lat")
print(ec_comp)
print("\nMaximum diference in ec_lat: {:}".format(max(abs(ec_lat-t_ec_lat))))
print("Maximum diference in ec_lon: {:}\n".format(max(abs(ec_lon-t_ec_lon))))
eq_comp = np.column_stack((t_ra, ra, t_dec, dec))
print(" true_ra ra true_dec dec")
print(eq_comp)
print("\nMaximum diference in ra: {:}".format(max(abs(ra-t_ra))))
print("Maximum diference in dec: {:}".format(max(abs(dec-t_dec)))) | StarcoderdataPython |
8139869 | # this segment tree will support two operations:
# 1. set segment [r, l) equal to v
# 2. For segment [r, l) find number of black parts and their length
# We will keep tuple with 4 elements for T:
# number of black parts, their total length, color of left and right ends
# For L we will keep one value 0 or 1: the lazy update for color
class SegmentTree:
def __init__(self, n):
self.size = 1
while self.size < n:
self.size *= 2
self.NO_OPERATION = -float("inf") # it should be neutral with respect to op_modify
self.ZERO = (0, 0, 0, 0)
self.T = [self.ZERO] * (2 * self.size - 1)
self.L = [self.NO_OPERATION] * (2 * self.size - 1)
def op_sum(self, a, b):
return a[0] + b[0] - int(a[3] == 1 and b[2] == 1), a[1] + b[1], a[2], b[3]
def propagate(self, x, lx, rx):
if self.L[x] == self.NO_OPERATION or rx - lx == 1:
return
mx = (lx + rx)//2
if self.L[x] == 0:
self.L[2 * x + 1] = 0
self.L[2 * x + 2] = 0
self.T[2 * x + 1] = (0, 0, 0, 0)
self.T[2 * x + 2] = (0, 0, 0, 0)
else:
self.L[2 * x + 1] = 1
self.L[2 * x + 2] = 1
self.T[2 * x + 1] = (1, mx - lx, 1, 1)
self.T[2 * x + 2] = (1, rx - mx, 1, 1)
self.L[x] = self.NO_OPERATION
def _update(self, l, r, color, x, lx, rx):
self.propagate(x, lx, rx)
if l >= rx or lx >= r:
return
if lx >= l and rx <= r:
if color == 0:
self.T[x] = (0, 0, 0, 0)
self.L[x] = 0
else:
self.T[x] = (1, rx - lx, 1, 1)
self.L[x] = 1
return
mx = (lx + rx)//2
self._update(l, r, color, 2*x+1, lx, mx)
self._update(l, r, color, 2*x+2, mx, rx)
self.T[x] = self.op_sum(self.T[2*x+1], self.T[2*x+2])
def update(self, l, r, color):
return self._update(l, r, color, 0, 0, self.size)
# import sys
# input = sys.stdin.readline
if __name__ == '__main__':
n = 500001
m = int(input())
STree = SegmentTree(2*n)
for i in range(m):
t = [i for i in input().split()]
l, r = int(t[1]) + n, int(t[1]) + int(t[2]) + n
if t[0] == "W":
STree.update(l, r, 0)
else:
STree.update(l, r, 1)
x = STree.T[0]
print(str(x[0]) + " " + str(x[1])) | StarcoderdataPython |
40776 | from pettingzoo import AECEnv
from pettingzoo.utils import agent_selector
from pettingzoo.utils import wrappers
from pettingzoo.utils.conversions import parallel_wrapper_fn
from gym_stag_hunt.envs.hunt import HuntEnv
from gym.spaces import Box
import cv2
import numpy as np
def env(grid_size=(5, 5), screen_size=(600, 600), obs_type='image', enable_multiagent=False, opponent_policy='random',
load_renderer=False, episodes_per_game=1000, stag_follows=True, run_away_after_maul=False,
forage_quantity=2, stag_reward=5, forage_reward=1, mauling_punishment=-5, max_time_steps=100,
obs_shape=(42, 42)):
"""
The env function wraps the environment in 3 wrappers by default. These
wrappers contain logic that is common to many pettingzoo environments.
We recommend you use at least the OrderEnforcingWrapper on your own environment
to provide sane error messages. You can find full documentation for these methods
elsewhere in the developer documentation.
"""
env_init = ZooHuntEnvironment(grid_size, screen_size, obs_type, enable_multiagent, opponent_policy, load_renderer,
episodes_per_game, stag_follows, run_away_after_maul, forage_quantity, stag_reward,
forage_reward, mauling_punishment, max_time_steps, obs_shape)
env_init = wrappers.CaptureStdoutWrapper(env_init)
env_init = wrappers.AssertOutOfBoundsWrapper(env_init)
env_init = wrappers.OrderEnforcingWrapper(env_init)
return env_init
parallel_env = parallel_wrapper_fn(env)
class ZooHuntEnvironment(AECEnv):
metadata = {'render.modes': ['human'], 'name': "pettingzoo_hunt"}
def __init__(self, grid_size=(5, 5), screen_size=(600, 600), obs_type='image', enable_multiagent=False,
opponent_policy='random', load_renderer=False, episodes_per_game=1000, stag_follows=True,
run_away_after_maul=False, forage_quantity=2, stag_reward=5, forage_reward=1, mauling_punishment=-5,
max_time_steps=100, obs_shape=(42, 42)):
"""
:param grid_size: A (W, H) tuple corresponding to the grid dimensions. Although W=H is expected, W!=H works also
:param screen_size: A (W, H) tuple corresponding to the pixel dimensions of the game window
:param obs_type: Can be 'image' for pixel-array based observations, or 'coords' for just the entity coordinates
:param episodes_per_game: How many timesteps take place before we reset the entity positions.
:param stag_follows: Should the stag seek out the nearest agent (true) or take a random move (false)
:param run_away_after_maul: Does the stag stay on the same cell after mauling an agent (true) or respawn (false)
:param forage_quantity: How many plants will be placed on the board.
:param stag_reward: How much reinforcement the agents get for catching the stag
:param forage_reward: How much reinforcement the agents get for harvesting a plant
:param mauling_punishment: How much reinforcement the agents get for trying to catch a stag alone (MUST be neg.)
"""
super().__init__()
self.hunt_env = HuntEnv(grid_size, screen_size, obs_type, enable_multiagent, opponent_policy, load_renderer,
episodes_per_game, stag_follows, run_away_after_maul, forage_quantity, stag_reward,
forage_reward, mauling_punishment)
self.possible_agents = ["player_" + str(r) for r in range(2)]
self.agents = self.possible_agents[:]
self.shape = obs_shape
observation_space = Box(low=0, high=255, shape=self.shape + self.hunt_env.observation_space.shape[2:],
dtype=np.uint8)
self.observation_spaces = {agent: observation_space for agent in self.possible_agents}
self.action_spaces = {agent: self.hunt_env.action_space for agent in self.possible_agents}
self.has_reset = True
self.agent_name_mapping = dict(zip(self.possible_agents, list(range(len(self.possible_agents)))))
self.agent_selection = None
self._agent_selector = agent_selector(self.agents)
self.done = False
self.rewards = dict(zip(self.agents, [0 for _ in self.agents]))
self._cumulative_rewards = dict(zip(self.agents, [0 for _ in self.agents]))
self.dones = dict(zip(self.agents, [False for _ in self.agents]))
self.infos = dict(zip(self.agents, [{} for _ in self.agents]))
self.accumulated_actions = []
self.current_observation = {agent: self.observation_spaces[agent].sample() for agent in self.agents}
self.t = 0
self.last_rewards = [0, 0]
self.max_time_steps = max_time_steps
def observation_space(self, agent):
return self.observation_spaces[agent]
def action_space(self, agent):
return self.action_spaces[agent]
def reset(self):
obs = self.hunt_env.reset()
self.agents = self.possible_agents[:]
self._agent_selector.reinit(self.agents)
self.agent_selection = self._agent_selector.next()
self.current_observation = {agent: obs for agent in self.agents}
# Get an image observation
# image_obs = self.game.get_image_obs()
self.agent_name_mapping = dict(zip(self.possible_agents, list(range(len(self.possible_agents)))))
self.rewards = dict(zip(self.agents, [0 for _ in self.agents]))
self._cumulative_rewards = dict(zip(self.agents, [0 for _ in self.agents]))
self.dones = dict(zip(self.agents, [False for _ in self.agents]))
self.infos = dict(zip(self.agents, [{} for _ in self.agents]))
self.accumulated_actions = []
self.t = 0
def step(self, action):
agent = self.agent_selection
self.accumulated_actions.append(action)
for idx, agent in enumerate(self.agents):
self.rewards[agent] = 0
if self._agent_selector.is_last():
self.accumulated_step(self.accumulated_actions)
self.accumulated_actions = []
self.agent_selection = self._agent_selector.next()
self._cumulative_rewards[agent] = 0
def accumulated_step(self, actions):
# Track internal environment info.
self.t += 1
obs, rewards, done, info = self.hunt_env.step(actions)
self.last_rewards = rewards
if self.t >= self.max_time_steps:
done = True
info = {"t": self.t}
for idx, agent in enumerate(self.agents):
self.dones[agent] = done
self.current_observation[agent] = obs[idx]
self.rewards[agent] = rewards[idx]
self.infos[agent] = info
def observe(self, agent):
returned_observation = self.current_observation[agent]
returned_observation = cv2.resize(returned_observation, self.shape[::-1], interpolation=cv2.INTER_AREA)
return returned_observation
def render(self, mode='human'):
self.hunt_env.render(mode)
def state(self):
pass
def close(self):
self.hunt_env.close()
| StarcoderdataPython |
1810565 | <reponame>pysga1996/python-basic-programming<gh_stars>0
import re
txt = 'The rain in Spain'
x = re.search('ai', txt)
print(x) # this will print an object
# Print the position (start- and end-position) of the first match occurrence
print(x.span())
# Print the string passed into the function
print(x.string)
# The regular expression looks for any words that starts with an upper case "S"
print(x.group())
| StarcoderdataPython |
12801969 | import boto3
import logging
import os
from random import randrange
from urllib.request import urlopen
from random import randint
# It is not recommended to enable DEBUG logs in production,
# this is just to show an example of a recommendation
# by Amazon CodeGuru Profiler.
logging.getLogger('botocore').setLevel(logging.DEBUG)
SITE = 'http://www.python.org/'
CW_NAMESPACE = 'ProfilerPythonDemo'
S3_BUCKET = os.environ['S3_BUCKET']
def lambda_handler(event, context):
"""Sample Lambda function which mocks the operation of checking the current price
of a stock.
For demonstration purposes this Lambda function simply returns
a random integer between 0 and 100 as the stock price.
Parameters
----------
event: dict, required
Input event to the Lambda function
context: object, required
Lambda Context runtime methods and attributes
Returns
------
dict: Object containing the current price of the stock
"""
# Check current price of the stock
stock_price = randint(
0, 100
) # Current stock price is mocked as a random integer between 0 and 100
# Make some network calls using urllib and s3 client.
with urlopen(SITE) as response:
s3_client = boto3.client('s3')
s3_client.put_object(Body=response.read(),
Bucket=S3_BUCKET,
Key='response.txt')
# Publish metrics.
content_length = int(response.headers['Content-Length'])
put_metric('ResponseContentLength', content_length)
put_metric(str(response.status)[0] + 'xxStatus', 1)
# Generate some CPU-intensive work.
num = randrange(content_length)
count = 0
for _ in range(num):
x = randrange(num)
if check_prime(x):
count += 1
return {"stock_price": stock_price}
def put_metric(name, value):
cw_client = boto3.client('cloudwatch')
metric_data_num = [{'MetricName': name, 'Value': value}]
cw_client.put_metric_data(Namespace=CW_NAMESPACE, MetricData=metric_data_num)
def check_prime(num):
if num == 1 or num == 0:
return False
sq_root = 2
while sq_root * sq_root <= num:
if num % sq_root == 0:
return False
sq_root += 1
return True
| StarcoderdataPython |
3575369 | <filename>tests/test_tie_nomove.py
import unittest
from .helpers import C, WHITE, BLACK, NONE
class TestTieNoMove(unittest.TestCase):
def get_board(self, *args, **kwargs):
from chess.models import Board
return Board(*args, **kwargs)
def get_tie(self, *args, **kwargs):
from chess.models import tie_nomove
return tie_nomove(*args, **kwargs)
def test_whitenotie(self):
config = '........' + \
'........' + \
'........' + \
'.....p..' + \
'........' + \
'.....P..' + \
'........' + \
'........'
board = self.get_board(config)
result = self.get_tie(board)
self.assertFalse(result)
def test_blacknotie(self):
config = '........' + \
'........' + \
'........' + \
'........' + \
'......p.' + \
'.......P' + \
'........' + \
'........'
board = self.get_board(config)
board.who_moves = BLACK
result = self.get_tie(board)
self.assertFalse(result)
def test_whitetie(self):
config = '........' + \
'........' + \
'........' + \
'.....p..' + \
'.....P..' + \
'........' + \
'........' + \
'........'
board = self.get_board(config)
result = self.get_tie(board)
self.assertTrue(result)
def test_blacktie(self):
config = '........' + \
'........' + \
'........' + \
'.....p..' + \
'.....P..' + \
'........' + \
'........' + \
'........'
board = self.get_board(config)
board.who_moves = BLACK
result = self.get_tie(board)
self.assertTrue(result)
if __name__ == '__main__':
unittest.main() | StarcoderdataPython |
337134 | import click
from esque.cli.options import State, default_options
from .offsets import edit_offsets
from .topic import edit_topic
@click.group(help="Edit a resource.", no_args_is_help=True)
@default_options
def edit(state: State):
pass
edit.add_command(edit_offsets)
edit.add_command(edit_topic)
| StarcoderdataPython |
11163 | import pytest
import cudf
import mock
from cuxfilter.charts.core.non_aggregate.core_non_aggregate import (
BaseNonAggregate,
)
from cuxfilter.dashboard import DashBoard
from cuxfilter import DataFrame
from cuxfilter.layouts import chart_view
class TestCoreNonAggregateChart:
def test_variables(self):
bnac = BaseNonAggregate()
# BaseChart variables
assert bnac.chart_type is None
assert bnac.x is None
assert bnac.y is None
assert bnac.aggregate_fn == "count"
assert bnac.color is None
assert bnac.height == 0
assert bnac.width == 0
assert bnac.add_interaction is True
assert bnac.chart is None
assert bnac.source is None
assert bnac.source_backup is None
assert bnac.data_points == 0
assert bnac._library_specific_params == {}
assert bnac.stride is None
assert bnac.stride_type == int
assert bnac.min_value == 0.0
assert bnac.max_value == 0.0
assert bnac.x_label_map == {}
assert bnac.y_label_map == {}
assert bnac.title == ""
# test chart name setter
bnac.x = "x"
bnac.y = "y"
bnac.chart_type = "test_chart_type"
assert bnac.name == "x_y_count_test_chart_type_"
# BaseNonAggregateChart variables
assert bnac.use_data_tiles is False
assert bnac.reset_event is None
assert bnac.x_range is None
assert bnac.y_range is None
assert bnac.aggregate_col is None
def test_label_mappers(self):
bnac = BaseNonAggregate()
library_specific_params = {
"x_label_map": {"a": 1, "b": 2},
"y_label_map": {"a": 1, "b": 2},
}
bnac.library_specific_params = library_specific_params
assert bnac.x_label_map == {"a": 1, "b": 2}
assert bnac.y_label_map == {"a": 1, "b": 2}
@pytest.mark.parametrize("chart, _chart", [(None, None), (1, 1)])
def test_view(self, chart, _chart):
bnac = BaseNonAggregate()
bnac.chart = chart
bnac.width = 400
bnac.title = "test_title"
assert str(bnac.view()) == str(
chart_view(_chart, width=bnac.width, title=bnac.title)
)
def test_get_selection_geometry_callback(self):
bnac = BaseNonAggregate()
df = cudf.DataFrame({"a": [1, 2, 2], "b": [3, 4, 5]})
dashboard = DashBoard(dataframe=DataFrame.from_dataframe(df))
assert (
bnac.get_selection_geometry_callback(dashboard).__name__
== "selection_callback"
)
assert callable(type(bnac.get_selection_geometry_callback(dashboard)))
def test_box_selection_callback(self):
bnac = BaseNonAggregate()
bnac.x = "a"
bnac.y = "b"
bnac.chart_type = "temp"
self.result = None
def t_function(data, patch_update=False):
self.result = data
bnac.reload_chart = t_function
df = cudf.DataFrame({"a": [1, 2, 2], "b": [3, 4, 5]})
dashboard = DashBoard(dataframe=DataFrame.from_dataframe(df))
dashboard._active_view = bnac
class evt:
geometry = dict(x0=1, x1=2, y0=3, y1=4, type="rect")
t = bnac.get_selection_geometry_callback(dashboard)
t(evt)
assert self.result.equals(df.query("1<=a<=2 and 3<=b<=4"))
def test_lasso_election_callback(self):
bnac = BaseNonAggregate()
bnac.x = "a"
bnac.y = "b"
bnac.chart_type = "temp"
def t_function(data, patch_update=False):
self.result = data
bnac.reload_chart = t_function
df = cudf.DataFrame({"a": [1, 2, 2], "b": [3, 4, 5]})
dashboard = DashBoard(dataframe=DataFrame.from_dataframe(df))
class evt:
geometry = dict(x=[1, 1, 2], y=[1, 2, 1], type="poly")
final = True
t = bnac.get_selection_geometry_callback(dashboard)
with mock.patch("cuspatial.point_in_polygon") as pip:
pip.return_value = cudf.DataFrame(
{"selection": [True, False, True]}
)
t(evt)
assert pip.called
@pytest.mark.parametrize(
"data, _data",
[
(cudf.DataFrame(), cudf.DataFrame()),
(
cudf.DataFrame({"a": [1, 2, 2], "b": [3, 4, 5]}),
cudf.DataFrame({"a": [1, 2, 2], "b": [3, 4, 5]}),
),
],
)
def test_calculate_source(self, data, _data):
"""
Calculate source just calls to the format_source_data function
which is implemented by chart types inheriting this class.
"""
bnac = BaseNonAggregate()
self.result = None
def t_function(data, patch_update=False):
self.result = data
bnac.format_source_data = t_function
bnac.calculate_source(data)
assert self.result.equals(_data)
@pytest.mark.parametrize(
"x_range, y_range, query, local_dict",
[
(
(1, 2),
(3, 4),
"@x_min<=x<=@x_max and @y_min<=y<=@y_max",
{"x_min": 1, "x_max": 2, "y_min": 3, "y_max": 4},
),
(
(0, 2),
(3, 5),
"@x_min<=x<=@x_max and @y_min<=y<=@y_max",
{"x_min": 0, "x_max": 2, "y_min": 3, "y_max": 5},
),
],
)
def test_compute_query_dict(self, x_range, y_range, query, local_dict):
bnac = BaseNonAggregate()
bnac.chart_type = "test"
bnac.x = "x"
bnac.y = "y"
bnac.x_range = x_range
bnac.y_range = y_range
df = cudf.DataFrame({"x": [1, 2, 2], "y": [3, 4, 5]})
dashboard = DashBoard(dataframe=DataFrame.from_dataframe(df))
bnac.compute_query_dict(
dashboard._query_str_dict, dashboard._query_local_variables_dict
)
bnac_key = (
f"{bnac.x}_{bnac.y}"
f"{'_' + bnac.aggregate_col if bnac.aggregate_col else ''}"
f"_{bnac.aggregate_fn}_{bnac.chart_type}_{bnac.title}"
)
assert dashboard._query_str_dict[bnac_key] == query
for key in local_dict:
assert (
dashboard._query_local_variables_dict[key] == local_dict[key]
)
@pytest.mark.parametrize(
"add_interaction, reset_event, event_1, event_2",
[
(True, None, "selection_callback", None),
(True, "test_event", "selection_callback", "reset_callback"),
(False, "test_event", None, "reset_callback"),
],
)
def test_add_events(self, add_interaction, reset_event, event_1, event_2):
bnac = BaseNonAggregate()
bnac.add_interaction = add_interaction
bnac.reset_event = reset_event
df = cudf.DataFrame({"a": [1, 2, 2], "b": [3, 4, 5]})
dashboard = DashBoard(dataframe=DataFrame.from_dataframe(df))
self.event_1 = None
self.event_2 = None
def t_func(fn):
self.event_1 = fn.__name__
def t_func1(event, fn):
self.event_2 = fn.__name__
bnac.add_selection_geometry_event = t_func
bnac.add_event = t_func1
bnac.add_events(dashboard)
assert self.event_1 == event_1
assert self.event_2 == event_2
def test_add_reset_event(self):
bnac = BaseNonAggregate()
bnac.chart_type = "test"
bnac.x = "a"
bnac.x_range = (0, 2)
bnac.y_range = (3, 5)
df = cudf.DataFrame({"a": [1, 2, 2], "b": [3, 4, 5]})
dashboard = DashBoard(dataframe=DataFrame.from_dataframe(df))
dashboard._active_view = bnac
def t_func1(event, fn):
fn("event")
bnac.add_event = t_func1
bnac.add_reset_event(dashboard)
assert bnac.x_range is None
assert bnac.y_range is None
def test_query_chart_by_range(self):
bnac = BaseNonAggregate()
bnac.chart_type = "test"
bnac.x = "a"
bnac_1 = BaseNonAggregate()
bnac_1.chart_type = "test"
bnac_1.x = "b"
query_tuple = (4, 5)
df = cudf.DataFrame({"a": [1, 2, 3, 4], "b": [3, 4, 5, 6]})
bnac.source = df
self.result = None
self.patch_update = None
def t_func(data, patch_update):
self.result = data
self.patch_update = patch_update
# creating a dummy reload chart fn as its not implemented in core
# non aggregate chart class
bnac.reload_chart = t_func
bnac.query_chart_by_range(
active_chart=bnac_1, query_tuple=query_tuple, datatile=None
)
assert self.result.to_string() == " a b\n1 2 4\n2 3 5"
assert self.patch_update is False
@pytest.mark.parametrize(
"new_indices, result",
[
([4, 5], " a b\n1 2 4\n2 3 5"),
([], " a b\n0 1 3\n1 2 4\n2 3 5\n3 4 6"),
([3], " a b\n0 1 3"),
],
)
def test_query_chart_by_indices(self, new_indices, result):
bnac = BaseNonAggregate()
bnac.chart_type = "test"
bnac.x = "a"
bnac_1 = BaseNonAggregate()
bnac_1.chart_type = "test"
bnac_1.x = "b"
new_indices = new_indices
df = cudf.DataFrame({"a": [1, 2, 3, 4], "b": [3, 4, 5, 6]})
bnac.source = df
self.result = None
self.patch_update = None
def t_func(data, patch_update):
self.result = data
self.patch_update = patch_update
# creating a dummy reload chart fn as its not implemented in core
# non aggregate chart class
bnac.reload_chart = t_func
bnac.query_chart_by_indices(
active_chart=bnac_1,
old_indices=[],
new_indices=new_indices,
datatile=None,
)
assert self.result.to_string() == result
assert self.patch_update is False
| StarcoderdataPython |
3599780 | import requests
from allauth.socialaccount.providers.oauth2.views import (
OAuth2Adapter,
OAuth2LoginView,
OAuth2CallbackView,
)
from django.conf import settings
from ditsso_internal.provider import DitSSOInternalProvider
class DitSSOInternalAdapter(OAuth2Adapter):
provider_id = DitSSOInternalProvider.id
hostname = getattr(settings, 'DIT_SSO_INTERNAL_HOSTNAME', 'staff-sso-staging.herokuapp.com')
access_token_url = 'https://{hostname}/o/token/'.format(hostname=hostname)
authorize_url = 'https://{hostname}/o/authorize/'.format(hostname=hostname)
profile_url = 'https://{hostname}/api/v1/user/me/'.format(hostname=hostname)
def complete_login(self, request, app, token, **kwargs):
resp = requests.get(self.profile_url,
params={'access_token': token.token,
'alt': 'json'})
resp.raise_for_status()
extra_data = resp.json()
try:
extra_data.update(resp.json)
except TypeError:
pass
login = self.get_provider().sociallogin_from_response(request, extra_data)
return login
oauth2_login = OAuth2LoginView.adapter_view(DitSSOInternalAdapter)
oauth2_callback = OAuth2CallbackView.adapter_view(DitSSOInternalAdapter) | StarcoderdataPython |
5069368 | from ..model_tests_utils import (
status_codes,
DELETE,
PUT,
POST,
GET,
ERROR,
random_model_dict,
check_status_code,
compare_data
)
from core.models import (
UnitType,
)
unittype_test_data = {}
unittype_tests = [
##----TEST 0----##
#creates an unittype
#gets the unittype
#puts the unittype
#gets the updated unittype
#deletes the updated unittype
#gets the unittype (should return error)
[
{
'name': 'unittype0',
'method': POST,
'endpoint': 'unittype-list',
'body': (request_body := random_model_dict(UnitType)),
'args': [],
'query_params': [],
'is_valid_response': {
'function': compare_data,
'args': [],
'kwargs': {
'status_code': POST,
'request_body': request_body
}
}
},
{
'name': 'unittype0_get_0',
'method': GET,
'endpoint': 'unittype-detail',
'body': {},
'args': [
'unittype0__uuid'
],
'query_params': [],
'is_valid_response': {
'function': check_status_code,
'args': [],
'kwargs': {
'status_code': GET
}
}
},
{
'name': 'unittype0_update_0',
'method': PUT,
'endpoint': 'unittype-detail',
'body': (request_body := random_model_dict(UnitType)),
'args': [
'unittype0__uuid'
],
'query_params': [],
'is_valid_response': {
'function': compare_data,
'args': [],
'kwargs': {
'status_code': PUT,
'request_body': request_body
}
}
},
{
'name': 'unittype0_get_1',
'method': GET,
'endpoint': 'unittype-detail',
'body': {},
'args': [
'unittype0__uuid'
],
'query_params': [],
'is_valid_response': {
'function': check_status_code,
'args': [],
'kwargs': {
'status_code': GET
}
}
},
{
'name': 'unittype0_delete_0',
'method': DELETE,
'endpoint': 'unittype-detail',
'body': {},
'args': [
'unittype0__uuid'
],
'query_params': [],
'is_valid_response': {
'function': check_status_code,
'args': [],
'kwargs': {
'status_code': DELETE
}
}
},
{
'name': 'unittype0_get_2',
'method': GET,
'endpoint': 'unittype-detail',
'body': {},
'args': [
'unittype0__uuid'
],
'query_params': [],
'is_valid_response': {
'function': check_status_code,
'args': [],
'kwargs': {
'status_code': ERROR
}
}
},
],
] | StarcoderdataPython |
1799299 | <reponame>dschultz0/awslarry<gh_stars>1-10
import unittest
import larry as lry
ENVIRONMENT_PROD = 'production'
ENVIRONMENT_SANDBOX = 'sandbox'
SANDBOX_HIT = '39HYCOOPKNK26VOMWWPV050D1O9MD5'
SANDBOX_HIT_TYPE = '3W679PTMVMW4B1YPP05F1CL2SYKBXP'
SANDBOX_ASSIGNMENT = '3TEM0PF1Q5W8Q0F8XU7ZRSPG1ARD0O'
PROD_HIT = '30Y6N4AHYOVT3B1E15NSX07Z8YNRDS'
PROD_HIT_TYPE = '32CVJ4DS80UD0FXOVYK5MQJIWDSKV8'
PROD_ASSIGNMENT = '3N4BPTXIO8RWKSXYNI9LV8K4SNYUK5'
SIMPLE_QUESTION = '<script src="https://assets.crowd.aws/crowd-html-elements.js"></script><crowd-form><p>What is the date today?</p><input name="date"></crowd-form>'
SIMPLE_TEMPLATE = '<script src="https://assets.crowd.aws/crowd-html-elements.js"></script><crowd-form><p>What day of the week was {{ date }}?</p><input name="date"></crowd-form>'
SIMPLE_TEMPLATE_URI = 's3://larry-testing/test-objects/mturk/simple_template.html'
BASIC_ANNOTATION_DICT = {'path': 'detail'}
BASIC_ANNOTATION_STRING = 'For easier data science use Larry'
EXTERNAL_URL = 'https://www.google.com'
class MTurkTests(unittest.TestCase):
def test_use_production(self):
lry.mturk.use_production()
self.assertEqual(lry.mturk.environment(), ENVIRONMENT_PROD)
self.assertTrue(lry.mturk.production())
self.assertFalse(lry.mturk.sandbox())
def test_use_sandbox(self):
lry.mturk.use_sandbox()
self.assertEqual(lry.mturk.environment(), ENVIRONMENT_SANDBOX)
self.assertTrue(lry.mturk.sandbox())
self.assertFalse(lry.mturk.production())
def test_set_environment_prod(self):
lry.mturk.set_environment('prod')
self.assertEqual(lry.mturk.environment(), ENVIRONMENT_PROD)
self.assertTrue(lry.mturk.production())
self.assertFalse(lry.mturk.sandbox())
def test_set_environment_sandbox(self):
lry.mturk.set_environment('sandbox')
self.assertEqual(lry.mturk.environment(), ENVIRONMENT_SANDBOX)
self.assertTrue(lry.mturk.sandbox())
self.assertFalse(lry.mturk.production())
def test_set_environment_prod_hit(self):
lry.mturk.set_environment(hit_id=PROD_HIT)
self.assertEqual(lry.mturk.environment(), ENVIRONMENT_PROD)
self.assertTrue(lry.mturk.production())
self.assertFalse(lry.mturk.sandbox())
def test_set_environment_sandbox_hit(self):
lry.mturk.set_environment(hit_id=SANDBOX_HIT)
self.assertEqual(lry.mturk.environment(), ENVIRONMENT_SANDBOX)
self.assertTrue(lry.mturk.sandbox())
self.assertFalse(lry.mturk.production())
def test_set_environment_prod_assignment(self):
lry.mturk.set_environment(assignment_id=PROD_ASSIGNMENT)
self.assertEqual(lry.mturk.environment(), ENVIRONMENT_PROD)
self.assertTrue(lry.mturk.production())
self.assertFalse(lry.mturk.sandbox())
def test_set_environment_sandbox_assignment(self):
lry.mturk.set_environment(assignment_id=SANDBOX_ASSIGNMENT)
self.assertEqual(lry.mturk.environment(), ENVIRONMENT_SANDBOX)
self.assertTrue(lry.mturk.sandbox())
self.assertFalse(lry.mturk.production())
def test_create_hit(self):
lry.mturk.use_sandbox()
hit = lry.mturk.create_hit("Simple task", "Answer a simple question", reward_cents=10, lifetime=60,
assignment_duration=60, max_assignments=1, auto_approval_delay=600,
html_question=SIMPLE_QUESTION, annotation=BASIC_ANNOTATION_DICT)
self.assertFalse(hit.production)
hit = lry.mturk.get_hit(hit.hit_id)
self.assertEqual(hit.annotation, BASIC_ANNOTATION_DICT)
hit = lry.mturk.create_hit("Simple task", "Answer a simple question", reward_cents=10, lifetime=60,
assignment_duration=60, max_assignments=1, auto_approval_delay=600,
html_question=SIMPLE_QUESTION, annotation=BASIC_ANNOTATION_STRING)
self.assertFalse(hit.production)
hit = lry.mturk.get_hit(hit.hit_id)
self.assertEqual(hit.annotation, BASIC_ANNOTATION_STRING)
hit = lry.mturk.create_hit("Simple task", "Answer a simple question", reward='0.10', lifetime=60,
assignment_duration=60, max_assignments=1, auto_approval_delay=600,
question=lry.mturk.render_html_question(SIMPLE_QUESTION))
self.assertFalse(hit.production)
hit = lry.mturk.create_hit("Simple task", "Answer a simple question", reward='0.10', lifetime=60,
assignment_duration=60, max_assignments=1, auto_approval_delay=600,
question=lry.mturk.render_external_question(EXTERNAL_URL))
self.assertFalse(hit.production)
hit = lry.mturk.create_hit("Simple task", "Answer a simple question", reward='0.10', lifetime=60,
assignment_duration=60, max_assignments=1, auto_approval_delay=600,
external_question=EXTERNAL_URL)
self.assertFalse(hit.production)
hit = lry.mturk.create_hit("Simple task", "Answer a simple question", reward='0.10', lifetime=60,
assignment_duration=60, max_assignments=1, auto_approval_delay=600,
question_template=SIMPLE_TEMPLATE, template_context={'date': '2/13/2020'})
self.assertFalse(hit.production)
hit = lry.mturk.create_hit("Simple task", "Answer a simple question", reward='0.10', lifetime=60,
assignment_duration=60, max_assignments=1, auto_approval_delay=600,
question_template_uri=SIMPLE_TEMPLATE_URI, template_context={'date': '2/13/2020'})
self.assertFalse(hit.production)
def test_create_by_hit_type(self):
lry.mturk.use_sandbox()
hit_type_id = lry.mturk.create_hit_type(title="Simple task", description="Answer a simple question",
reward="0.10", assignment_duration=60)
hit_type_id = lry.mturk.create_hit_type(title="Simple task", description="Answer a simple question",
reward_cents=10, assignment_duration=60, auto_approval_delay=60,
keywords='foo,bar')
hit = lry.mturk.create_hit(hit_type_id=hit_type_id, lifetime=60, max_assignments=1,
html_question=SIMPLE_QUESTION)
self.assertFalse(hit.production)
if __name__ == '__main__':
unittest.main()
| StarcoderdataPython |
346717 | <gh_stars>0
#annapolis
latitude = 38.9784
# longitude = -76.4922
longitude = 283.5078
height = 13 | StarcoderdataPython |
5078216 | <gh_stars>1-10
from typing import Dict
from uuid import UUID, uuid4
import Pyro4
_START_PORT = 13337
Pyro4.config.SERIALIZERS_ACCEPTED = ['pickle']
Pyro4.config.SERIALIZER = 'pickle'
@Pyro4.expose
class Client(object):
def __init__(self, uuid: UUID):
self._uuid: UUID = uuid
def get_uuid(self) -> UUID:
return self._uuid
@Pyro4.expose
class Server(object):
def __init__(self, start_port: int = _START_PORT):
self._start_port: int = start_port
self._clients_by_uuid: Dict[UUID, Client] = {}
def register_client(self):
uuid = uuid4()
client = Client(uuid=uuid)
self._clients_by_uuid[uuid] = client
return client
def unregister_client(self, uuid: UUID):
client = self._clients_by_uuid.pop(uuid, None)
if client is None:
raise ValueError('could not find Client for {}'.format(uuid))
| StarcoderdataPython |
6571369 | import operator
import platform
from abc import ABC, abstractmethod
from collections import namedtuple
from os import get_terminal_size
from typing import NoReturn, Optional
Position = namedtuple("Position", ["x", "y"])
Size = namedtuple("Size", ["width", "height"])
Rectangle = namedtuple("Rectangle", ["x1", "y1", "x2", "y2"])
Edges = namedtuple("Corners", ["left", "top", "right", "bottom"])
Color = namedtuple("Color", ["fg", "bg"])
Menu = namedtuple("Menu", ["text_lines", "options", "options_actions"])
current_platform = platform.system()
if current_platform == "Windows":
import win32console
import win32gui
elif current_platform == "Linux":
from Xlib import X
from Xlib.display import Display
from Xlib.xobject.drawable import Window
elif current_platform == "Darwin":
import applescript
else:
raise RuntimeError("OS is not supported")
class Singleton(type):
"""Metaclass to convert any class into a singleton"""
def __init__(cls, name, bases, d): # noqa: ANN001 D101
super(Singleton, cls).__init__(name, bases, d)
cls.instance = None
def __call__(cls, *args, **kwargs): # noqa: D101 D102
if cls.instance is None:
cls.instance = super(Singleton, cls).__call__(*args, **kwargs)
return cls.instance
class ABCSingleton(type(ABC), type(Singleton)):
"""Metaclass that combines ABC and Singleton behavior"""
pass
class AbstractWindowManager(metaclass=ABCSingleton):
"""Class to get and manage window position and size, as well as movement and resizing"""
def __init__(self):
# Window constraints attributes.
# Set any of them to enable constraints (to disallow resizing or moving the window past a certain point)
# Set any of them to None to disable constraints
self.min_size: Optional[Size] = None
self.max_size: Optional[Size] = None
self.min_pos: Optional[Position] = None
self.max_pos: Optional[Position] = None
self.current_rect: Rectangle = None # window rect coordinates on current frame
self.previous_rect: Rectangle = None # window rectangle coordinates on previous frame
@staticmethod
def get_position(rect: Rectangle) -> Position:
"""Extracts position (x, y) from rectangle coordinates (upper left corner)"""
x1, y1, _, _ = rect
return Position(x1, y1)
@staticmethod
def get_size(rect: Rectangle) -> Size:
"""Extracts size (width, height) from rectangle coordinates"""
x1, y1, x2, y2 = rect
width = x2 - x1
height = y2 - y1
return Size(width, height)
def get_font_size(self, rect: Rectangle) -> Size:
"""Extracts size (width, height) of each character as pixels"""
width = int(self.get_size(rect).width / get_terminal_size().columns)
height = int(self.get_size(rect).height / get_terminal_size().lines)
return Size(width, height)
@property
def position(self) -> Position:
"""Position (x, y) of the window on current frame (upper left corner)"""
return self.get_position(self.current_rect)
@property
def size(self) -> Size:
"""Size (width, height) of the window on current frame"""
return self.get_size(self.current_rect)
@property
def font_size(self) -> Size:
"""Size (width, height) of each character as pixels"""
return self.get_font_size(self.current_rect)
@property
def rect_diff(self) -> Rectangle:
"""
Difference between current and previous rectangle coordinates of the window.
Use translated_edges_by for clearer attributes.
"""
return Rectangle(*map(operator.sub, self.current_rect, self.previous_rect))
@property
def translated_edges_by(self) -> Edges:
"""Same as rect_diff, but with clearer attributes in namedtuple"""
return Edges(*self.rect_diff)
@property
def translated_by(self) -> Position:
"""Difference between current and previous position of the window (upper left corner)"""
return self.get_position(self.rect_diff)
@property
def scaled_by(self) -> Size:
"""Difference between current and previous size of the window (in pixels)"""
return self.get_size(self.rect_diff)
@property
def scaled_by_relative(self) -> Size:
"""Difference between current and previous size of the window (as ratios)"""
width_current, height_current = self.get_size(self.current_rect)
width_previous, height_previous = self.get_size(self.previous_rect)
return Size(width_current / width_previous, height_current / height_previous)
@property
def was_moved(self) -> bool:
"""Whether the window was moved relative to previous frame"""
return self.translated_by != (0, 0)
@property
def was_resized(self) -> bool:
"""Whether the window was resized relative to previous frame"""
return self.scaled_by != (0, 0)
@property
def was_changed(self) -> bool:
"""Whether the window was moved or resized relative to previous frame"""
return self.was_moved or self.was_resized
def update(self) -> NoReturn:
"""Updates
Updates current_rect and previous_rect attributes (!)
Updates window size/position if user changed it past specified constrains
This method should be called on every frame, and called only once
This method should be called BEFORE any operations on the window in any given frame,
including getting values from properties
"""
self.previous_rect = self.current_rect
self.current_rect = self._get_window_rect()
# Resize window to fit constraints
constrained_rect = self._fit_constraints(self.current_rect)
if self.current_rect != constrained_rect:
self._set_window_rect(constrained_rect)
self.current_rect = constrained_rect
def _fit_constraints(self, rect: Rectangle) -> Rectangle:
size = self.get_size(rect)
if self.min_size and any(map(operator.lt, size, self.min_size)):
size = tuple(map(max, size, self.min_size))
if self.max_size and any(map(operator.gt, size, self.max_size)):
size = tuple(map(min, size, self.max_size))
pos = self.get_position(rect)
if self.min_pos and any(map(operator.lt, pos, self.min_pos)):
pos = tuple(map(max, pos, self.min_pos))
if self.max_pos and any(map(operator.gt, pos, self.max_pos)):
pos = tuple(map(min, pos, self.max_pos))
return Rectangle(*pos, *map(operator.add, pos, size))
def set_window_rect(self, rect: Rectangle) -> NoReturn:
"""Sets window rectangle coordinates
Sets window rectangle coordinates to passed rect, adhering to specified in attributes constraints
For clarity it's better if this method is called no more than once per frame, after everything else
"""
constrained_rect = self._fit_constraints(rect)
self._set_window_rect(constrained_rect)
self.current_rect = constrained_rect
@abstractmethod
def _get_window_rect(self) -> Rectangle:
"""Get window position and size as coordinates
OS - specific implementation of calls
"""
...
@abstractmethod
def _set_window_rect(self, rect: Rectangle) -> NoReturn:
"""Set window position and size to coordinates
OS - specific implementation of calls
"""
...
class Win32WindowManager(AbstractWindowManager):
"""Window manager class for Win32"""
def __init__(self):
super().__init__()
self.hwnd = win32console.GetConsoleWindow()
def _get_window_rect(self) -> Rectangle:
rect = win32gui.GetWindowRect(self.hwnd)
return Rectangle(*rect)
def _set_window_rect(self, rect: Rectangle) -> NoReturn:
hwnd = win32console.GetConsoleWindow()
win32gui.MoveWindow(hwnd, *self.get_position(rect), *self.get_size(rect), True)
class DarwinWindowManager(AbstractWindowManager):
"""Window manager class for Darwin"""
def _get_window_rect(self) -> Rectangle:
rect = applescript.run('tell application "Terminal" to get the bounds of the front window').out.split(", ")
rect_int = map(int, rect)
return Rectangle(*rect_int)
def _set_window_rect(self, rect: Rectangle) -> NoReturn:
rect_str = ", ".join(map(str, [rect.x1, rect.y1, rect.x2, rect.y2]))
applescript.run('tell application "Terminal" to set the bounds of the front window to {' + rect_str + "}")
class X11WindowManager(AbstractWindowManager):
"""Window manager class for X11"""
def __init__(self):
super().__init__()
self.display = Display()
self.root = self.display.screen().root
self.window_id = self.root.get_full_property(
self.display.intern_atom("_NET_ACTIVE_WINDOW"), X.AnyPropertyType
).value[0]
self.window = self.display.create_resource_object("window", self.window_id)
def _get_window_rect(self) -> Rectangle:
geometry = Window.get_geometry(self.window.query_tree().parent)._data
rect = (
geometry.get("x"),
geometry.get("y"),
geometry.get("width") + geometry.get("x"),
geometry.get("height") + geometry.get("y"),
)
return Rectangle(*rect)
def _set_window_rect(self, rect: Rectangle) -> NoReturn:
self.window.configure(x=rect.x1, y=rect.y1, width=(rect.x2 - rect.x1), height=(rect.y2 - rect.y1))
self.display.sync()
window_managers = {
"Windows": Win32WindowManager,
"Darwin": DarwinWindowManager,
"Linux": X11WindowManager,
}
WindowManager = window_managers[current_platform] # import this name to get window manager for current platform!
| StarcoderdataPython |
295099 | """Read in an Ortec SPE file."""
import datetime
import os
import warnings
import dateutil.parser
import numpy as np
from .spectrum_file import (
SpectrumFile,
SpectrumFileParsingError,
SpectrumFileParsingWarning,
)
warnings.simplefilter("always", DeprecationWarning)
class SpeFileParsingError(SpectrumFileParsingError):
"""Failed while parsing an SPE file."""
pass
class SpeFileWritingError(SpectrumFileParsingError):
"""Failed while writing an SPE file."""
pass
class SpeFile(SpectrumFile):
"""SPE ASCII file parser.
Just instantiate a class with a filename:
spec = SpeFile(filename)
Then the data are in
spec.data [counts]
spec.channels
spec.energies
spec.bin_edges_kev
spec.energy_bin_widths
spec.energy_bin_edges (deprecated)
ORTEC's SPE file format is given on page 73 of this document:
http://www.ortec-online.com/download/ortec-software-file-structure-manual.pdf
"""
def __init__(self, filename):
"""Initialize the SPE file."""
super(SpeFile, self).__init__(filename)
_, ext = os.path.splitext(self.filename)
if ext.lower() != ".spe":
raise SpeFileParsingError("File extension is incorrect: " + ext)
# SPE-specific members
self.first_channel = 0
self.ROIs = []
self.energy_cal = []
self.shape_cal = []
# read in the data
self.read()
self.apply_calibration()
def read(self, verbose=False):
"""Read in the file."""
print("SpeFile: Reading file " + self.filename)
self.realtime = 0.0
self.livetime = 0.0
self.channels = np.array([], dtype=float)
self.data = np.array([], dtype=float)
self.cal_coeff = []
with open(self.filename, "r") as f:
# read & remove newlines from end of each line
lines = [line.strip() for line in f.readlines()]
i = 0
while i < len(lines):
# check whether we have reached a keyword and parse accordingly
if lines[i] == "$SPEC_ID:":
i += 1
self.spectrum_id = lines[i]
if verbose:
print(self.spectrum_id)
elif lines[i] == "$SPEC_REM:":
self.sample_description = ""
i += 1
while i < len(lines) and not lines[i].startswith("$"):
self.sample_description += lines[i] + "\n"
i += 1
self.sample_description = self.sample_description[:-1]
i -= 1
if verbose:
print(self.sample_description)
elif lines[i] == "$DATE_MEA:":
i += 1
self.collection_start = dateutil.parser.parse(lines[i])
if verbose:
print(self.collection_start)
elif lines[i] == "$MEAS_TIM:":
i += 1
self.livetime = float(lines[i].split(" ")[0])
self.realtime = float(lines[i].split(" ")[1])
if verbose:
print(self.livetime, self.realtime)
elif lines[i] == "$DATA:":
i += 1
self.first_channel = int(lines[i].split(" ")[0])
# I don't know why it would be nonzero
if self.first_channel != 0:
raise SpeFileParsingError(
"First channel is not 0: {}".format(self.first_channel)
)
self.num_channels = int(lines[i].split(" ")[1])
if verbose:
print(self.first_channel, self.num_channels)
j = self.first_channel
while j <= self.num_channels + self.first_channel:
i += 1
self.data = np.append(self.data, int(lines[i]))
self.channels = np.append(self.channels, j)
j += 1
elif lines[i] == "$ROI:":
self.ROIs = []
i += 1
while i < len(lines) and not lines[i].startswith("$"):
self.ROIs.append(lines[i])
i += 1
i -= 1
if verbose:
print(self.ROIs)
elif lines[i] == "$ENER_FIT:":
i += 1
self.energy_cal.append(float(lines[i].split(" ")[0]))
self.energy_cal.append(float(lines[i].split(" ")[1]))
if verbose:
print(self.energy_cal)
elif lines[i] == "$MCA_CAL:":
i += 1
n_coeff = int(lines[i])
i += 1
for j in range(n_coeff):
self.cal_coeff.append(float(lines[i].split(" ")[j]))
if verbose:
print(self.cal_coeff)
elif lines[i] == "$SHAPE_CAL:":
i += 1
n_coeff = int(lines[i])
i += 1
for j in range(n_coeff):
self.shape_cal.append(float(lines[i].split(" ")[j]))
if verbose:
print(self.shape_cal)
elif lines[i].startswith("$"):
key = lines[i][1:].rstrip(":")
i += 1
values = []
while i < len(lines) and not lines[i].startswith("$"):
values.append(lines[i])
i += 1
if i < len(lines):
if lines[i].startswith("$"):
i -= 1
self.metadata[key] = values
else:
warnings.warn(
"Line {} unknown: ".format(i + 1) + lines[i],
SpectrumFileParsingWarning,
)
i += 1
if self.realtime <= 0.0:
raise SpeFileParsingError(
"Realtime not parsed correctly: {}".format(self.realtime)
)
if self.livetime <= 0.0:
raise SpeFileParsingError(
"Livetime not parsed correctly: {}".format(self.livetime)
)
if self.livetime > self.realtime:
raise SpeFileParsingError(
"Livetime > realtime: {} > {}".format(self.livetime, self.realtime)
)
self.collection_stop = self.collection_start + datetime.timedelta(
seconds=self.realtime
)
def _spe_format(self):
"""Format of this spectrum for writing to file."""
s = ""
s += "$SPEC_ID:\n"
s += self.spectrum_id + "\n"
s += "$SPEC_REM:\n"
s += self.sample_description + "\n"
if self.collection_start is not None:
s += "$DATE_MEA:\n"
s += "{:%m/%d/%Y %H:%M:%S}\n".format(self.collection_start)
s += "$MEAS_TIM:\n"
s += "{:.0f} {:.0f}\n".format(self.livetime, self.realtime)
s += "$DATA:\n"
s += "{:.0f} {:d}\n".format(self.first_channel, self.num_channels)
for j in range(self.num_channels):
s += " {:.0f}\n".format(self.data[j])
s += "$ROI:\n"
for line in self.ROIs:
s += line + "\n"
if len(self.energy_cal) > 0:
s += "$ENER_FIT:\n"
s += "{:f} {:f}\n".format(self.energy_cal[0], self.energy_cal[1])
if len(self.cal_coeff) > 0:
s += "$MCA_CAL:\n"
n_coeff = len(self.cal_coeff)
s += "{:d}\n".format(n_coeff)
s += "{:E}".format(self.cal_coeff[0])
for j in range(1, n_coeff):
s += " {:E}".format(self.cal_coeff[j])
s += "\n"
if len(self.shape_cal) > 0:
s += "$SHAPE_CAL:\n"
n_coeff = len(self.shape_cal)
s += "{:d}\n".format(n_coeff)
s += "{:E}".format(self.shape_cal[0])
for j in range(1, n_coeff):
s += " {:E}".format(self.shape_cal[j])
s += "\n"
if len(self.metadata.keys()) > 0:
for key, values in self.metadata.items():
s += "$" + key + ":\n"
for val in values:
s += str(val) + "\n"
return s[:-1]
def write(self, filename):
"""Write back to a file."""
_, ext = os.path.splitext(filename)
if ext.lower() != ".spe":
raise SpeFileWritingError("File extension is incorrect: " + ext)
with open(filename, "w") as outfile:
print(self._spe_format(), file=outfile)
| StarcoderdataPython |
6574471 | <filename>env/Lib/site-packages/pip/req/req_file.py
from __future__ import absolute_import
import os
import re
from pip._vendor.six.moves.urllib import parse as urllib_parse
from pip.download import get_file_content
from pip.req.req_install import InstallRequirement
from pip.utils import normalize_name
_scheme_re = re.compile(r'^(http|https|file):', re.I)
def _remove_prefixes(line, short_prefix, long_prefix):
if line.startswith(short_prefix):
return line[len(short_prefix):].lstrip()
else:
return _remove_prefix(line, long_prefix)
def _remove_prefix(line, prefix):
"""Remove the prefix and eventually one '=' or spaces"""
return re.sub(r'\s*=?\s*', '', line[len(prefix):])
def parse_requirements(filename, finder=None, comes_from=None, options=None,
session=None):
if session is None:
raise TypeError(
"parse_requirements() missing 1 required keyword argument: "
"'session'"
)
skip_match = None
skip_regex = options.skip_requirements_regex if options else None
if skip_regex:
skip_match = re.compile(skip_regex)
reqs_file_dir = os.path.dirname(os.path.abspath(filename))
filename, content = get_file_content(
filename,
comes_from=comes_from,
session=session,
)
for line_number, line in enumerate(content.splitlines(), 1):
line = line.strip()
# Remove comments from file and all spaces before it
line = re.sub(r"(^|\s)+#.*$", "", line)
if not line:
continue
if skip_match and skip_match.search(line):
continue
if line.startswith(('-r', '--requirement')):
req_url = _remove_prefixes(line, '-r', '--requirement')
if _scheme_re.search(filename):
# Relative to a URL
req_url = urllib_parse.urljoin(filename, req_url)
elif not _scheme_re.search(req_url):
req_url = os.path.join(os.path.dirname(filename), req_url)
for item in parse_requirements(
req_url, finder,
comes_from=filename,
options=options,
session=session):
yield item
elif line.startswith(('-Z', '--always-unzip')):
# No longer used, but previously these were used in
# requirement files, so we'll ignore.
pass
elif line.startswith(('-f', '--find-links')):
find_links = _remove_prefixes(line, '-f', '--find-links')
# FIXME: it would be nice to keep track of the source of
# the find_links:
# support a find-links local path relative to a requirements file
relative_to_reqs_file = os.path.join(reqs_file_dir, find_links)
if os.path.exists(relative_to_reqs_file):
find_links = relative_to_reqs_file
if finder:
finder.find_links.append(find_links)
elif line.startswith(('-i', '--index-url')):
index_url = _remove_prefixes(line, '-i', '--index-url')
if finder:
finder.index_urls = [index_url]
elif line.startswith('--extra-index-url'):
line = _remove_prefix(line, '--extra-index-url')
if finder:
finder.index_urls.append(line)
elif line.startswith('--use-wheel'):
# Default in 1.5
pass
elif line.startswith('--no-use-wheel'):
if finder:
finder.use_wheel = False
elif line.startswith('--no-index'):
if finder:
finder.index_urls = []
elif line.startswith("--allow-external"):
line = _remove_prefix(line, '--allow-external')
if finder:
finder.allow_external |= set([normalize_name(line).lower()])
elif line.startswith("--allow-all-external"):
if finder:
finder.allow_all_external = True
# Remove in 7.0
elif line.startswith("--no-allow-external"):
pass
# Remove in 7.0
elif line.startswith("--no-allow-insecure"):
pass
# Remove after 7.0
elif line.startswith("--allow-insecure"):
line = _remove_prefix(line, '--allow-insecure')
if finder:
finder.allow_unverified |= set([normalize_name(line).lower()])
elif line.startswith("--allow-unverified"):
line = _remove_prefix(line, '--allow-unverified')
if finder:
finder.allow_unverified |= set([normalize_name(line).lower()])
else:
comes_from = '-r %s (line %s)' % (filename, line_number)
if line.startswith(('-e', '--editable')):
editable = _remove_prefixes(line, '-e', '--editable')
req = InstallRequirement.from_editable(
editable,
comes_from=comes_from,
default_vcs=options.default_vcs if options else None,
isolated=options.isolated_mode if options else False,
)
else:
req = InstallRequirement.from_line(
line,
comes_from,
isolated=options.isolated_mode if options else False,
)
yield req
| StarcoderdataPython |
1625345 | # Generated by Django 2.2.4 on 2019-10-02 18:43
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('django_eveonline_connector', '0003_auto_20190903_2005'),
]
operations = [
migrations.RemoveField(
model_name='eveclient',
name='esi_scopes',
),
]
| StarcoderdataPython |
6621556 | import json
import random
import sys
import time
import deck_stats.deck as deck
def analyze(deck_json):
my_deck = deck.Deck(deck_json)
# don't show dozens/hundreds of hands with less than 1% chance of occuring
num_hands_to_print = 9000
total_runs = 10000
opening_hand_mana = {}
for step in range(0, total_runs):
if step < total_runs - 1:
print("Running simulation... [%d] out of [%d]\r" % (step, total_runs,) , end="")
else:
print("Running simulation... [%d] out of [%d]" % (total_runs, total_runs,))
# TODO do we want to clone? shuffle modifies original
cards = my_deck.cards
# TODO do we need true randomness? Does this match Magic Arena's algorithm for randomness?
# use shuffle instead of sample so we can see what next turns will look like
random.shuffle(cards)
opening_hand = cards[0:6]
mana_counts = {}
for card in opening_hand:
# count mana in opening hand
if isinstance(card, deck.LandCard):
mana_key = card.get_mana_key()
if mana_key not in mana_counts:
mana_counts[mana_key] = 0
mana_counts[mana_key] += 1
# now make an appropriate key based on the mana
opening_hand_mana_keys = []
for mana_count_key, count in sorted(mana_counts.items()):
# count = mana_counts[mana_count_key]
opening_hand_mana_keys.append(str(count) + ' ' + mana_count_key + ' lands')
opening_hand_mana_key = ', '.join(opening_hand_mana_keys)
if opening_hand_mana_key not in opening_hand_mana:
opening_hand_mana[opening_hand_mana_key] = 0
opening_hand_mana[opening_hand_mana_key] += 1
print("Simulation was completed!!!")
sorted_opening_hands = sorted(opening_hand_mana.items(), key=lambda kv: kv[1])
num_hands = 0
for soh_tuple in reversed(sorted_opening_hands):
key = soh_tuple[0]
count = soh_tuple[1]
if len(key) == 0:
key = ' no lands'
print(count, " hands with ", key)
num_hands += count
if num_hands >= num_hands_to_print:
break
if __name__ == "__main__":
full_file_path = sys.argv[1]
deck_json_file = open(full_file_path, 'r')
deck_json = deck_json_file.read()
deck_json = json.loads(deck_json)
analyze(deck_json) | StarcoderdataPython |
1642125 | <reponame>SMSajadi99/Python-Advance<gh_stars>0
from collections import defaultdict
n = int(input())
tran = {}
name = []
for i in range(n):
x=input()
x=x.split()
name.extend(x)
# print(name)
def Convert(lst):
res_dct = {(lst[i+1],lst[i+2],lst[i+3]): lst[i] for i in range(0, len(lst), 4)}
return res_dct
lst = Convert(name)
# print(lst)
t = input()
t=t.split()
h = []
dIndex = defaultdict(list)
[dIndex[k].append(t) for t in lst for k in t]
keysWith2 = []
newList = []
for i in t:
keysWith2.append(dIndex[i])
valuesOf2 = [lst[j] for j in dIndex[i]]
if (dIndex[i] == []):
valuesOf2 = i
newList.append(valuesOf2)
# print(newList)
# print(i)
# print('$#%$%$%$%$%$')
# print(valuesOf2)
# print('$#%$%$%$%$%$')
# print(dIndex[i])
# print('$#%$%$%$%$%$')
# print(newList)
# for i in t:
# h.append(lst.get(i,i))
# print(' '.join(h))
res = [''.join(ele) for ele in newList]
def listToString(s):
# initialize an empty string
str1 = " "
# return string
return (str1.join(s))
# s = ['Geeks', 'for', 'Geeks']
print(listToString(res))
####################################################################
# c=dict()
# x=int(input())
# for i in range(x):
# y=input()
# l=y.split()
# c[l[0]]=l[1]
# m=[]
# t=input()
# t=t.split()
# for j in t:
# m.append(c.get(j,j))
# print(' '.join(m)) | StarcoderdataPython |
3553085 | <reponame>mckinly/cms-django<gh_stars>0
"""
Form for creating a user object
"""
import logging
from django import forms
from django.utils.translation import ugettext_lazy as _
from ...models import UserProfile
from ...utils.translation_utils import ugettext_many_lazy as __
from ..custom_model_form import CustomModelForm
logger = logging.getLogger(__name__)
class UserProfileForm(CustomModelForm):
"""
Form for creating and modifying user profile objects
"""
send_activation_link = forms.BooleanField(
initial=True,
required=False,
label=_("Send activation link"),
help_text=__(
_(
"Select this option to create an inactive user account and send an activation link per email to the user."
),
_(
"This link allows the user to choose a password and activates the account after confirmation."
),
),
)
class Meta:
"""
This class contains additional meta configuration of the form class, see the :class:`django.forms.ModelForm`
for more information.
"""
#: The model of this :class:`django.forms.ModelForm`
model = UserProfile
#: The fields of the model which should be handled by this form
fields = ["regions", "organization", "expert_mode"]
# pylint: disable=signature-differs
def save(self, *args, **kwargs):
"""
This method extends the default ``save()``-method of the base :class:`~django.forms.ModelForm` to set attributes
which are not directly determined by input fields.
:param args: The supplied arguments
:type args: list
:param kwargs: The supplied keyword arguments
:type kwargs: dict
:return: The saved user profile object
:rtype: ~cms.models.users.user_profile.UserProfile
"""
# pop kwarg to make sure the super class does not get this param
user = kwargs.pop("user", None)
if not self.instance.id:
# don't commit saving of ModelForm, because required user field is still missing
kwargs["commit"] = False
# save ModelForm
user_profile = super().save(*args, **kwargs)
if not self.instance.id:
user_profile.user = user
user_profile.save()
# check if called from UserProfileForm or RegionUserProfileForm
if "regions" in self.cleaned_data:
# regions can't be saved if commit=False on the ModelForm, so we have to save them explicitly
user_profile.regions.set(self.cleaned_data["regions"])
return user_profile
| StarcoderdataPython |
4959067 | import os
import textwrap
import uuid
from contextlib import contextmanager
import pytest
from dagster import asset, build_init_resource_context, build_input_context, build_output_context
from hacker_news_assets.resources.snowflake_io_manager import (
DB_SCHEMA,
SHARED_SNOWFLAKE_CONF,
connect_snowflake,
snowflake_io_manager,
spark_columns_to_markdown,
)
from pandas import DataFrame as PandasDataFrame
from pyspark.sql import Row, SparkSession
from pyspark.sql.types import IntegerType, StringType, StructField, StructType
def mock_output_context(table_name):
@asset(name=table_name)
def my_asset():
pass
return build_output_context(op_def=my_asset.op, name="result")
def mock_input_context(upstream_output_context):
return build_input_context(
upstream_output=upstream_output_context, name=upstream_output_context.name
)
@contextmanager
def temporary_snowflake_table(contents: PandasDataFrame):
snowflake_config = dict(database="TESTDB", **SHARED_SNOWFLAKE_CONF)
table_name = "a" + str(uuid.uuid4()).replace("-", "_")
with connect_snowflake(snowflake_config) as con:
contents.to_sql(name=table_name, con=con, index=False, schema=DB_SCHEMA)
try:
yield table_name
finally:
with connect_snowflake(snowflake_config) as conn:
conn.execute(f"drop table {DB_SCHEMA}.{table_name}")
@pytest.mark.skipif(
os.environ.get("TEST_SNOWFLAKE") != "true", reason="avoid dependency on snowflake for tests"
)
def test_handle_output_then_load_input_pandas():
snowflake_manager = snowflake_io_manager(
build_init_resource_context(config={"database": "TESTDB"})
)
contents1 = PandasDataFrame([{"col1": "a", "col2": 1}]) # just to get the types right
contents2 = PandasDataFrame([{"col1": "b", "col2": 2}]) # contents we will insert
with temporary_snowflake_table(contents1) as temp_table_name:
output_context = mock_output_context(temp_table_name)
list(snowflake_manager.handle_output(output_context, contents2)) # exhaust the iterator
input_context = mock_input_context(output_context)
input_value = snowflake_manager.load_input(input_context)
assert input_value.equals(contents2), f"{input_value}\n\n{contents2}"
@pytest.mark.skipif(
os.environ.get("TEST_SNOWFLAKE") != "true", reason="avoid dependency on snowflake for tests"
)
def test_handle_output_spark_then_load_input_pandas():
snowflake_manager = snowflake_io_manager(
build_init_resource_context(config={"database": "TESTDB"})
)
spark = SparkSession.builder.config(
"spark.jars.packages",
"net.snowflake:snowflake-jdbc:3.8.0,net.snowflake:spark-snowflake_2.12:2.8.2-spark_3.0",
).getOrCreate()
schema = StructType([StructField("col1", StringType()), StructField("col2", IntegerType())])
contents = spark.createDataFrame([Row(col1="Thom", col2=51)], schema)
with temporary_snowflake_table(PandasDataFrame([{"col1": "a", "col2": 1}])) as temp_table_name:
output_context = mock_output_context(temp_table_name)
list(snowflake_manager.handle_output(output_context, contents)) # exhaust the iterator
input_context = mock_input_context(output_context)
input_value = snowflake_manager.load_input(input_context)
contents_pandas = contents.toPandas()
assert str(input_value) == str(contents_pandas), f"{input_value}\n\n{contents_pandas}"
def test_spark_columns_to_markdown():
schema = StructType([StructField("col1", StringType()), StructField("col2", IntegerType())])
result = spark_columns_to_markdown(schema)
expected = textwrap.dedent(
"""
| Name | Type |
| ---- | ---- |
| col1 | string |
| col2 | integer |"""
)
assert result == expected
| StarcoderdataPython |
11251088 | <gh_stars>0
# -*-coding:utf-8 -*-
'''
@File : oneho_model.py
@Author : <NAME>
@Date : 2020/5/24
@Desc :
'''
import time
from ServiceOrientedChatbot.reader.data_helper import load_corpus_file
from ServiceOrientedChatbot.utils.logger import logger
class OneHotModel(object):
def __init__(self, corpus_file, word2index):
time_s = time.time()
self.contexts, self.responses = load_corpus_file(corpus_file, word2index)
logger.debug("Time to build onehot model by %s : %2.f seconds." % (corpus_file, time.time() - time_s))
def score(self, l1, l2):
"""
通过text_vector和pos_vector 获取相似度
parameters
l1: input sentence list
l2: sentence list which to be compared
"""
score = 0
if not l1 or not l2:
return score
down = l1 if len(l1) > len(l2) else l2
# simple word name overlapping coefficient
score = len(set(l1) & set(l2)) / len(set(down)) #l1 l2交集占比
return score
def similarity(self, query, size=10):
"""
获得所有contexts的相似度结果
parameters
query: 新输入的问句,segment tokens(list)
size: 前几位的排序
"""
scores = []
for question in self.contexts:
score = self.score(query, question)
scores.append(score)
scores_sort = sorted(list(enumerate(scores)), key=lambda item:item[1], reverse=True)
return scores_sort[:size]
def get_docs(self, simi_items):
docs = [self.contexts[id_] for id_, score in simi_items]
answers = [self.responses[id_] for id_, score in simi_items]
return docs, answers
| StarcoderdataPython |
1705547 | <gh_stars>10-100
import smtplib
from email.message import EmailMessage
with open('global_config/config.yaml') as settings:
cfg = yaml.load(settings)
from_address = (cfg['from_address'])
to_address = (cfg['to_address'])
password = (cfg['password'])
smtp_server = (cfg['smtp_server'])
smtp_port = (cfg['smtp_port'])
def send_exception_email(exchange_directory):
msg = EmailMessage()
msg['From'] = from_address
msg['To'] = to_address
msg['Subject'] = 'Empty Directory In EOD Data'
msg.set_content('There are no files in ' + exchange_directory)
try:
server = smtplib.SMTP_SSL(smtp_server, smtp_port)
server.login(from_address, password)
server.send_message(msg)
server.quit()
except TimeoutError as e:
print(str(e)) | StarcoderdataPython |
1861271 | <reponame>haichungcn/fs-projectmanager-api
"""empty message
Revision ID: 749aafa62aa6
Revises: <PASSWORD>
Create Date: 2019-12-15 00:47:34.859436
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '<KEY>'
down_revision = '<PASSWORD>'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('projects', 'boardOrder')
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('projects', sa.Column('boardOrder', sa.VARCHAR(), autoincrement=False, nullable=True))
# ### end Alembic commands ###
| StarcoderdataPython |
8006447 | from Game import InvalidMove_Error
from Move import Move
def call_method_on_each(arr, method, *args): # via stackoverflow.com/a/2682075/2474159
for obj in arr:
getattr(obj, method)(*args)
def str2move(move_str: str, board_size: int) -> Move:
if not move_str:
return Move(is_pass=True)
col = _chr2ord(move_str[0], board_size)
row = _chr2ord(move_str[1], board_size)
return Move(col, row)
def _chr2ord(c: str, board_size: int) -> int:
idx = ord(c) - ord('a')
if idx < 0 or idx >= board_size:
raise InvalidMove_Error(
c + '=' + str(idx) +
' is an invalid row/column index, board size is ' +
str(board_size))
return idx
| StarcoderdataPython |
9700740 | import numpy as np
from stable_baselines.common.policies import MlpPolicy
from stable_baselines.common import make_vec_env
from stable_baselines import TRPO
import os
import gym
from stable_baselines.common.vec_env import DummyVecEnv, VecNormalize
envid = 'PointMazeLeft-v0'
savedir = "MazeTrainedPoliciesBD"
os.makedirs(savedir, exist_ok=True)
def gentraj(weights, ntrajectories, trial, algo, savefile=True):
weights = np.append(weights,[0])
algodir = os.path.join(savedir,algo)
os.makedirs(algodir,exist_ok=True)
np.save(os.path.join(algodir, "weights%d.npy") % trial, weights)
allnll = []
for t in range(1):
model,trajs = train_maze(weights, ntrajectories, trial, savefile=savefile, attempt=t, algodir = algodir)
return trajs
def train_maze(weights, ntrajectories, fid, algodir, savefile=True, attempt=1):
#eargs = {"weights": weights}
#env_temp = gym.make(envid, weights=weights)
#env = make_vec_env(env_temp.__class__, env_kwargs=eargs, n_envs=8)
#env = make_vec_env(envid, env_kwargs={"weights": weights}, n_envs=8)
#env = DummyVecEnv([lambda: gym.make(envid)])
env = gym.make(envid, weights=weights, randomstart=True)
#model = PPO2(MlpPolicy, env, verbose=0, ent_coef=0, gamma=0.9)
model = TRPO(MlpPolicy, env, verbose=0, gamma=0.9, n_cpu_tf_sess=8)
model.learn(total_timesteps=300000)
if savefile:
model.save(os.path.join(algodir, "ppo2_pm_%d_%d" % (fid, attempt)))
trajectories = []
count = -1
while len(trajectories) < ntrajectories:
count += 1
current_obs = None
current_acts = None
obs = env.reset()
dones = False
while not (dones):
action = model.predict(obs)[0] # model.predict(obs)[0]#env.action_space.sample()#
current_obs = obs if current_obs is None else np.vstack((current_obs, np.array(obs)))
current_acts = action if current_acts is None else np.vstack((current_acts, np.array(action)))
obs, rewards, dones, info = env.step(action)
dones = len(current_obs) >= 100
current_traj = {"observations": current_obs, "actions": current_acts}
trajectories.append(current_traj)
env.close()
return model, trajectories
def single_likelihood(model, obs, action):
probs = []
for ntraj in range(obs.shape[0]):
current_prob = 0.
for l in range(obs.shape[1]):
current_prob += model.action_probability(observation=obs[ntraj, l], actions=action[ntraj, l],
logp=True).item()
probs.append(-1 * current_prob)
probs = np.array(probs)
return np.mean(probs)
def gen_traj_pol(algo,n,ntrajectories):
env = gym.make(envid, randomstart=True)
fid = os.path.join(savedir,algo+"_good1","ppo2_pm_%d_0.zip")%n
model = TRPO.load(fid,env=env)#(MlpPolicy, env, verbose=0, gamma=0.9, n_cpu_tf_sess=8)
trajectories = []
count = -1
while len(trajectories) < ntrajectories:
count += 1
current_obs = None
current_acts = None
obs = env.reset()
dones = False
while not (dones):
action = model.predict(obs)[0] # model.predict(obs)[0]#env.action_space.sample()#
current_obs = obs if current_obs is None else np.vstack((current_obs, np.array(obs)))
current_acts = action if current_acts is None else np.vstack((current_acts, np.array(action)))
obs, rewards, dones, info = env.step(action)
dones = len(current_obs) >= 100
current_traj = {"observations": current_obs, "actions": current_acts}
trajectories.append(current_traj)
env.close()
return model, trajectories
| StarcoderdataPython |
275233 | <reponame>Hyoshin-Park/Test
class MaxAlgorithm: #최대값 알고리즘
def __init__(self, ns):
self.nums = ns
self.maxNum = 0
self.maxNumIdx = 0
def setMaxIdxAndNum(self):
self.maxNum = self.nums[0]
self.maxNumIdx = 0
for i, n in enumerate(self.nums):
if self.maxNum < n:
self.maxNum = n
self.maxNumIdx = i
def getMaxNum(self):
return self.maxNum
def getMaxNumIdx(self):
return self.maxNumIdx
nums = [1, 3, 7, 6, 7, 7, 7, 12, 12, 17]
maxAlo = MaxAlgorithm(nums) #nums의 최대값 구하기
maxAlo.setMaxIdxAndNum()
maxNum = maxAlo.getMaxNum() #maxNum을 가져와서 17가져오기
print(maxNum)# 17
indexes = [ 0 for i in range(maxNum + 1)]
# 최대값보다 하나 큰 인덱스만큼 0으로 리스트 만들기
print(indexes)
for n in nums:#nums를 indexes에 넣어주며 갯수만큼 더하는 과정
indexes[n] = indexes[n] + 1
print(indexes)
maxAlo = MaxAlgorithm(indexes)#indexes에서의 맥스값 구하기
maxAlo.setMaxIdxAndNum()
maxNum = maxAlo.getMaxNum()
maxNumIdx = maxAlo.getMaxNumIdx()#그 맥스값의 인덱스 구하기
print(f"maxnum: {maxNum}")
print(f"maxnumIdx: {maxNumIdx}")
print(f"{maxNumIdx} happened {maxNum} times")
| StarcoderdataPython |
323911 | """This module defines a very basic store that's used by the CGI interface
to store session and one-time-key information.
Yes, it's called "sessions" - because originally it only defined a session
class. It's now also used for One Time Key handling too.
"""
__docformat__ = 'restructuredtext'
import os, marshal, time
from cgi import escape
from roundup import hyperdb
from roundup.i18n import _
from roundup.anypy.dbm_ import anydbm, whichdb
class BasicDatabase:
''' Provide a nice encapsulation of an anydbm store.
Keys are id strings, values are automatically marshalled data.
'''
_db_type = None
name = None
def __init__(self, db):
self.config = db.config
self.dir = db.config.DATABASE
os.umask(db.config.UMASK)
def exists(self, infoid):
db = self.opendb('c')
try:
return infoid in db
finally:
db.close()
def clear(self):
path = os.path.join(self.dir, self.name)
if os.path.exists(path):
os.remove(path)
elif os.path.exists(path+'.db'): # dbm appends .db
os.remove(path+'.db')
def cache_db_type(self, path):
''' determine which DB wrote the class file, and cache it as an
attribute of __class__ (to allow for subclassed DBs to be
different sorts)
'''
db_type = ''
if os.path.exists(path):
db_type = whichdb(path)
if not db_type:
raise hyperdb.DatabaseError(
_("Couldn't identify database type"))
elif os.path.exists(path+'.db'):
# if the path ends in '.db', it's a dbm database, whether
# anydbm says it's dbhash or not!
db_type = 'dbm'
self.__class__._db_type = db_type
_marker = []
def get(self, infoid, value, default=_marker):
db = self.opendb('c')
try:
if infoid in db:
values = marshal.loads(db[infoid])
else:
if default != self._marker:
return default
raise KeyError('No such %s "%s"'%(self.name, escape(infoid)))
return values.get(value, None)
finally:
db.close()
def getall(self, infoid):
db = self.opendb('c')
try:
try:
d = marshal.loads(db[infoid])
del d['__timestamp']
return d
except KeyError:
raise KeyError('No such %s "%s"'%(self.name, escape(infoid)))
finally:
db.close()
def set(self, infoid, **newvalues):
db = self.opendb('c')
try:
if infoid in db:
values = marshal.loads(db[infoid])
else:
values = {'__timestamp': time.time()}
values.update(newvalues)
db[infoid] = marshal.dumps(values)
finally:
db.close()
def list(self):
db = self.opendb('r')
try:
return list(db.keys())
finally:
db.close()
def destroy(self, infoid):
db = self.opendb('c')
try:
if infoid in db:
del db[infoid]
finally:
db.close()
def opendb(self, mode):
'''Low-level database opener that gets around anydbm/dbm
eccentricities.
'''
# figure the class db type
path = os.path.join(os.getcwd(), self.dir, self.name)
if self._db_type is None:
self.cache_db_type(path)
db_type = self._db_type
# new database? let anydbm pick the best dbm
if not db_type:
return anydbm.open(path, 'c')
# open the database with the correct module
dbm = __import__(db_type)
return dbm.open(path, mode)
def commit(self):
pass
def close(self):
pass
def updateTimestamp(self, sessid):
''' don't update every hit - once a minute should be OK '''
sess = self.get(sessid, '__timestamp', None)
now = time.time()
if sess is None or now > sess + 60:
self.set(sessid, __timestamp=now)
def clean(self):
''' Remove session records that haven't been used for a week. '''
now = time.time()
week = 60*60*24*7
for sessid in self.list():
sess = self.get(sessid, '__timestamp', None)
if sess is None:
self.updateTimestamp(sessid)
continue
interval = now - sess
if interval > week:
self.destroy(sessid)
class Sessions(BasicDatabase):
name = 'sessions'
class OneTimeKeys(BasicDatabase):
name = 'otks'
# vim: set sts ts=4 sw=4 et si :
| StarcoderdataPython |
5114758 | <gh_stars>1-10
""" User Model """
from werkzeug.security import check_password_hash, generate_password_hash
from mongoengine import *
import datetime
import app.config
import jwt
class User(Document):
username = StringField(max_length=50, required=True, unique=True)
password_hash = StringField(max_length=128, required=True)
yubikey_id = StringField(max_length=20, required=True)
meta = {'unique': True}
@staticmethod
def hash_password(password):
return generate_password_hash(password)
def check_password(self, password):
return check_password_hash(self.password_hash, password)
def get_u2f_devices(self):
"""Returns U2F devices"""
return json.loads(self.u2f_devices)
def set_u2f_devices(self, devices):
"""Saves U2F devices"""
self.u2f_devices = json.dumps(devices)
def has_u2f_devices(self):
"""Checks if user has any enrolled u2f devices"""
return len(self.get_u2f_devices()) > 0
@classmethod
def find_by_username(self, user_name):
for user in User.objects(username = user_name):
return user
""" Token Model """
from mongoengine import *
class RevokedToken(Document):
jti = StringField(max_length=120, required=True)
@classmethod
def is_jti_blacklisted(self, jtokeni):
for token in RevokedToken.objects(jti = jtokeni):
return True
return False | StarcoderdataPython |
1886903 | <filename>vPy27/Application.py
import GUI
import Settings
import Socket
import Initialize
from UserList import UserList
class Application():
__gui = ''
__connected = False
__logNames = False
__socket = ''
__userList = UserList()
__saveFile = ''
def __init__(self):
self.__gui = GUI.GUI(self, GUI.Tk())
Settings.loadCredentials(self.__gui)
def addToList(self, user, message):
if not self.__logNames:
return
self.__userList.addToList(user, message, self.__gui.getChatBox(), self.__gui.getIngoreStr(), self.__gui.getSaveStr())
if self.__userList.size() == 1 and Settings.getSaveFileFromKey() != self.__gui.getSaveStr():
saveFileInKey(self.getSaveStr())
def connectSocket(self):
if not self.isConnectionHealthy():
if (self.__gui.getOauthStr() and self.__gui.getNameStr() and self.__gui.getChnlStr()):
self.__socket = Socket.openSocket(str(self.__gui.getOauthStr()), str(self.__gui.getNameStr()), str(self.__gui.getChnlStr()))
self.isConnected(Initialize.joinRoom(self.__socket), True)
def isConnected(self, boolean=None, fromConnection=False):
if boolean != None and boolean != self.__connected:
self.__gui.setConnecButton(boolean, fromConnection)
self.__connected = boolean
return self.__connected
def sendMessage(self, message=None):
if not self.__socket:
return
if not message:
Socket.sendMessage(self.__socket)
else:
Socket.sendMessage(self.__socket, message, self.getChnlStr())
def recvBuff(self):
return Socket.recv_timeout(self.__socket)
def isConnectionHealthy(self):
return self.isConnected() and self.__gui.isConnectActive()
def isLoggingActive(self, boolean=None):
if boolean != None:
self.__logNames = boolean
self.__logNames = self.__logNames and self.isConnectionHealthy()
return self.__logNames
def deleteList(self):
self.__userList.deleteList()
def setConnection(self, boolean):
if boolean:
self.connectSocket()
else:
self.__socket.close()
self.isConnected(False)
def after(self, time, method):
if self.__gui:
self.__gui.after(time, method)
def mainloop(self):
if self.__gui:
self.__gui.mainloop()
| StarcoderdataPython |
6423382 | import gi
gi.require_version('Gtk', '3.0')
from gi.repository import Gtk
class ContentLayer:
def __init__(self, window_width, window_height, dimensions):
""" Constructor """
self.__window_width = window_width
self.__window_height = window_height
self.__contentAreaDimensions = dimensions
self.__layout_container = Gtk.Grid(column_homogeneous=False, column_spacing=0, row_spacing=0)
self.__build_layer()
def get_layout_container(self):
""" Accessor function: returns Gtk layout container """
return self.__layout_container
def __build_layer(self):
""" Initilization: composes layout of content area """
self.__content_area = Gtk.Box(orientation=Gtk.Orientation.VERTICAL) # Add a box below the message bar
self.__content_area.set_hexpand(True) # Set box height to 100% of remaining space
self.__content_area.set_vexpand(True)
self.__content_area.set_margin_right(self.__window_width - self.__contentAreaDimensions[0])
self.__content_area.set_margin_top(self.__window_height - self.__contentAreaDimensions[1])
self.__layout_container.attach(child=self.__content_area, left=0, top=0, width=1, height=1) # Attach this box to the layout below the message bar
def addLayoutContainer(self, container):
self.__content_area.add(container)
container.show_all()
def removeLayoutContainer(self, container):
self.__content_area.remove(container)
def updateContentAreaDimensions(self, window_width, window_height):
self.__window_width = window_width
self.__window_height = window_height
self.__content_area.set_margin_right(self.__window_width - self.__contentAreaDimensions[0])
self.__content_area.set_margin_top(self.__window_height - self.__contentAreaDimensions[1])
| StarcoderdataPython |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.