id stringlengths 1 8 | text stringlengths 6 1.05M | dataset_id stringclasses 1
value |
|---|---|---|
50938 | import getpass
import telnetlib
HOST = "192.168.178.55"
user = input("Enter your telnet username: ")
password = <PASSWORD>()
tn = telnetlib.Telnet(HOST)
tn.read_until(b"Username: ")
tn.write(user.encode('ascii') + b"\n")
if password:
tn.read_until(b"Password: ")
tn.write(password.encode('ascii') + b"\n")
tn.write(b"enable\n")
tn.write(b"cisco\n")
tn.write(b"conf t\n")
tn.write(b"int loop 20\n")
tn.write(b"ip address 172.16.17.32 255.255.255.255\n")
tn.write(b"int loop 21\n")
tn.write(b"ip address 172.16.31.10 255.255.255.255\n")
tn.write(b"router ospf 4\n")
tn.write(b"network 0.0.0.0 255.255.255.255 area 0\n")
tn.write(b"end\n")
tn.write(b"exit\n")
print(tn.read_all().decode('ascii'))
| StarcoderdataPython |
4955387 | <filename>tests/http_client_test.py
import unittest
import responses
from braintreehttp import HttpClient, File
from braintreehttp.testutils import TestHarness
class GenericRequest:
def __init__(self):
self.path = ""
self.verb = ""
self.headers = {}
def __str__(self):
s = ""
for key in dir(self):
if not key.startswith("__"):
s += "{0}: {1}\n".format(key, getattr(self, key))
return s
class HttpClientTest(TestHarness):
@responses.activate
def test_HttpClient_execute_addsHeaders(self):
client = HttpClient(self.environment())
request = GenericRequest()
request.path = "/"
request.verb = "GET"
self.stub_request_with_empty_reponse(request)
client.execute(request)
call = responses.calls[0].request
self.assertEqual(call.headers["User-Agent"], client.get_user_agent())
def testHttpClient_addInjector_throwsWhenArgumentNotFunctional(self):
client = HttpClient(self.environment())
try:
client.add_injector(1)
self.fail("client.add_injector did not throw for non-functional argument")
except TypeError as e:
self.assertEqual(str(e), "injector must be a function or implement the __call__ method")
@responses.activate
def test_HttpClient_addInjector_usesInjectorClass(self):
client = HttpClient(self.environment())
class TestInjector():
def __call__(self, request):
request.headers["Foo"] = "Bar"
client.add_injector(TestInjector())
request = GenericRequest()
request.path = "/"
request.verb = "GET"
self.stub_request_with_empty_reponse(request)
client.execute(request)
call = responses.calls[0].request
self.assertEqual(call.headers["Foo"], "Bar")
@responses.activate
def test_HttpClient_addInjector_usesInjectorFunction(self):
client = HttpClient(self.environment())
def inj(request):
request.headers["Foo"] = "Bar"
client.add_injector(inj)
request = GenericRequest()
request.path = "/"
request.verb = "GET"
self.stub_request_with_empty_reponse(request)
client.execute(request)
call = responses.calls[0].request
self.assertEqual(call.headers["Foo"], "Bar")
@responses.activate
def test_HttpClient_addInjector_usesInjectorLambda(self):
client = HttpClient(self.environment())
client.add_injector(lambda req: req.headers.clear())
request = GenericRequest()
request.path = "/"
request.verb = "GET"
request.headers = {"Foo": "Bar"}
self.stub_request_with_empty_reponse(request)
client.execute(request)
call = responses.calls[0].request
self.assertFalse("Foo" in call.headers)
@responses.activate
def test_HttpClient_execute_usesAllParamsInRequest(self):
client = HttpClient(self.environment())
request = GenericRequest()
request.path = "/"
request.verb = "POST"
request.headers = {
"Test": "Header",
"Content-Type": "text/plain"
}
request.body = "Some data"
self.stub_request_with_empty_reponse(request)
client.execute(request)
self.assertEqual(len(responses.calls), 1)
call = responses.calls[0].request
self.assertEqual(call.method, "POST")
self.assertEqual(call.url, "http://localhost/")
self.assertEqual(call.headers["Test"], "Header")
self.assertEqual(call.body, "Some data")
@responses.activate
def test_HttpClient_onError_throwsHttpErrorForNon200StatusCode(self):
client = HttpClient(self.environment())
request = GenericRequest()
request.path = "/error"
request.verb = "POST"
self.stub_request_with_response(request, status=400, response_body="An error occurred!")
try:
client.execute(request)
except BaseException as e:
self.assertEqual("HttpError", e.__class__.__name__)
self.assertEqual("An error occurred!", str(e))
@responses.activate
def test_HttpClient_onSuccess_returnsResponse_with_empty_body(self):
client = HttpClient(self.environment())
request = GenericRequest()
request.path = "/"
request.verb = "GET"
self.stub_request_with_response(request, status=204)
response = client.execute(request)
self.assertIsNone(response.result)
@responses.activate
def test_HttpClient_onSuccess_escapesDashesWhenUnmarshaling(self):
client = HttpClient(self.environment())
request = GenericRequest()
request.path = "/"
request.verb = "GET"
self.stub_request_with_response(request, "{\"valid-key\": \"valid-data\"}", 201)
try:
response = client.execute(request)
self.assertEqual(response.result.valid_key, "valid-data")
except BaseException as exception:
self.fail(exception.message)
@responses.activate
def test_HttpClient_executeDoesNotModifyRequest(self):
client = HttpClient(self.environment())
request = GenericRequest()
request.path = "/"
request.verb = "GET"
self.stub_request_with_response(request, "{\"valid-key\": \"valid-data\"}", 201)
client.execute(request)
self.assertEqual(len(request.headers), 0)
@responses.activate
def test_HttpClient_execute_filesArePreservedInCopy(self):
client = HttpClient(self.environment())
request = GenericRequest()
request.path = "/"
request.verb = "GET"
request.headers["Content-Type"] = "multipart/related"
license = File("LICENSE")
request.body = {
"license": license
}
self.stub_request_with_empty_reponse(request)
client.execute(request)
self.assertTrue(str(license.read()) in responses.calls[0].request.body)
license.close()
if __name__ == '__main__':
unittest.main()
| StarcoderdataPython |
5161723 | <gh_stars>0
import gc
import os
import sys
sys.path.append(os.path.dirname(os.path.dirname(os.path.dirname(__file__))))
import tensorflow as tf
from rl_coach.base_parameters import TaskParameters, DistributedTaskParameters, Frameworks
from rl_coach.core_types import EnvironmentSteps
from rl_coach.utils import get_open_port
from multiprocessing import Process
from tensorflow import logging
import pytest
logging.set_verbosity(logging.INFO)
@pytest.mark.unit_test
def test_basic_rl_graph_manager_with_pong_a3c():
tf.reset_default_graph()
from rl_coach.presets.Atari_A3C import graph_manager
assert graph_manager
graph_manager.env_params.level = "PongDeterministic-v4"
graph_manager.create_graph(task_parameters=TaskParameters(framework_type=Frameworks.tensorflow,
experiment_path="./experiments/test"))
# graph_manager.improve()
@pytest.mark.unit_test
def test_basic_rl_graph_manager_with_pong_nec():
tf.reset_default_graph()
from rl_coach.presets.Atari_NEC import graph_manager
assert graph_manager
graph_manager.env_params.level = "PongDeterministic-v4"
graph_manager.create_graph(task_parameters=TaskParameters(framework_type=Frameworks.tensorflow,
experiment_path="./experiments/test"))
# graph_manager.improve()
@pytest.mark.unit_test
def test_basic_rl_graph_manager_with_cartpole_dqn():
tf.reset_default_graph()
from rl_coach.presets.CartPole_DQN import graph_manager
assert graph_manager
graph_manager.create_graph(task_parameters=TaskParameters(framework_type=Frameworks.tensorflow,
experiment_path="./experiments/test"))
# graph_manager.improve()
# Test for identifying memory leak in restore_checkpoint
@pytest.mark.unit_test
def test_basic_rl_graph_manager_with_cartpole_dqn_and_repeated_checkpoint_restore():
tf.reset_default_graph()
from rl_coach.presets.CartPole_DQN import graph_manager
assert graph_manager
graph_manager.create_graph(task_parameters=TaskParameters(framework_type=Frameworks.tensorflow,
experiment_path="./experiments/test",
apply_stop_condition=True))
# graph_manager.improve()
# graph_manager.evaluate(EnvironmentSteps(1000))
# graph_manager.save_checkpoint()
#
# graph_manager.task_parameters.checkpoint_restore_dir = "./experiments/test/checkpoint"
# while True:
# graph_manager.restore_checkpoint()
# graph_manager.evaluate(EnvironmentSteps(1000))
# gc.collect()
if __name__ == '__main__':
pass
# test_basic_rl_graph_manager_with_pong_a3c()
# test_basic_rl_graph_manager_with_ant_a3c()
# test_basic_rl_graph_manager_with_pong_nec()
# test_basic_rl_graph_manager_with_cartpole_dqn()
# test_basic_rl_graph_manager_with_cartpole_dqn_and_repeated_checkpoint_restore()
#test_basic_rl_graph_manager_multithreaded_with_pong_a3c()
#test_basic_rl_graph_manager_with_doom_basic_dqn() | StarcoderdataPython |
5136741 | <reponame>TiddlySpace/tiddlyspace
"""
Enhance the default HTML serialization so that when we display
a single tiddler it includes a link to the tiddler in its space.
"""
from tiddlyweb.model.bag import Bag
from tiddlyweb.model.policy import PermissionsError
from tiddlyweb.model.recipe import Recipe
from tiddlyweb.serializations.html import Serialization as HTMLSerialization
from tiddlyweb.wikitext import render_wikitext
from tiddlyweb.web.util import encode_name, tiddler_url
from tiddlywebplugins.tiddlyspace.space import Space
from tiddlywebplugins.tiddlyspace.spaces import space_uri
from tiddlywebplugins.tiddlyspace.template import send_template
class Serialization(HTMLSerialization):
"""
Subclass of the HTML serialization that adds a "space link"
linking to the tiddler in the wiki. Uses templates instead of
HTMLPresenter.
"""
def __init__(self, environ=None):
"""
Initialize the serialization. Delete tiddlyweb.title to
turn off HTMLPresenter.
"""
HTMLSerialization.__init__(self, environ)
del self.environ['tiddlyweb.title']
def list_recipes(self, recipes):
"""
Send recipes out recipes.html template.
"""
return send_template(self.environ, 'recipes.html', {
'meta_keywords': 'recipes, tiddlyspace',
'meta_description': 'A list of recipes on TiddlySpace',
'recipes': recipes,
'title': 'Recipes'})
def list_bags(self, bags):
"""
Send bags out bags.html template.
"""
return send_template(self.environ, 'bags.html', {
'meta_keywords': 'bags, tiddlyspace',
'meta_description': 'A list of bags on TiddlySpace',
'bags': bags,
'title': 'Bags'})
def list_tiddlers(self, tiddlers):
"""
List the tiddlers from a container. Include a link
to the container if it can be viewed by the current
user. List the available serializations for the tiddlers.
"""
tiddlers_url = (self.environ.get('SCRIPT_NAME', '')
+ self.environ.get('PATH_INFO', ''))
if tiddlers_url.startswith('/tiddlers'):
tiddlers.link = '/tiddlers'
template_name = 'friendlytiddlers.html'
if '/bags/' in tiddlers.link or '/recipes/' in tiddlers.link:
template_name = 'tiddlers.html'
container_name = ''
container_type = 'bags'
container_url = ''
container_policy = False
store = self.environ['tiddlyweb.store']
user = self.environ['tiddlyweb.usersign']
space_name = ''
if not (tiddlers.is_search or tiddlers.is_revisions):
if tiddlers.recipe:
name = tiddlers.recipe
try:
space_name = Space.name_from_recipe(name)
tiddlers.title = 'Tiddlers in %s' % space_name
except ValueError:
pass
container_url = '/recipes/%s' % name
container_name = 'Recipe %s' % name
container_type = 'recipes'
try:
store.get(Recipe(name)).policy.allows(user, 'read')
container_policy = True
except PermissionsError:
pass
elif tiddlers.bag:
name = tiddlers.bag
try:
space_name = Space.name_from_recipe(name)
tiddlers.title = 'Tiddlers in %s' % space_name
except ValueError:
pass
container_url = '/bags/%s' % name
container_name = 'Bag %s' % name
try:
store.get(Bag(name)).policy.allows(user, 'manage')
container_policy = True
except PermissionsError:
pass
if tiddlers.is_revisions:
container_policy = True
container_url = tiddlers.link.rsplit('/revisions')[0]
container_name = 'Head'
try:
query_string = self.environ.get('QUERY_STRING', '').decode('utf-8')
except UnicodeDecodeError:
query_string = u'invalid+query+string+encoding'
links = self.environ.get('tiddlyweb.config',
{}).get('extension_types', {}).keys()
if query_string:
query_string = '?%s' % query_string
if tiddlers.is_search:
template_name = 'search.html'
if 'tiddlyweb.query.original' in self.environ:
tiddlers.title = ('Search for %s'
% self.environ['tiddlyweb.query.original'])
return send_template(self.environ, template_name, {
'meta_keywords': 'tiddlers, tiddlyspace',
'meta_description': 'A list of tiddlers on TiddlySpace',
'title': tiddlers.title,
'tiddler_url': tiddler_url,
'environ': self.environ,
'revisions': tiddlers.is_revisions,
'tiddlers_url': tiddlers.link,
'space_uri': space_uri,
'space_bag': space_bag,
'query_string': query_string,
'container_type': container_type,
'container_name': container_name,
'container_url': container_url,
'container_policy': container_policy,
'links': links,
'space_name': space_name,
'tiddlers': tiddlers})
def recipe_as(self, recipe):
"""
Send a recipe out the recipe.html template.
"""
return send_template(self.environ, 'recipe.html', {
'meta_keywords': 'recipe, tiddlyspace',
'meta_description': 'A recipe on TiddlySpace',
'recipe': recipe,
'title': 'Recipe %s' % recipe.name})
def bag_as(self, bag):
"""
Send a bag out as HTML via the bag.html template.
Report on the permissions and policy for this bag
for the viewing user.
"""
user = self.environ['tiddlyweb.usersign']
policy = bag.policy
policy.owner = [policy.owner]
user_perms = bag.policy.user_perms(user)
return send_template(self.environ, 'bag.html', {
'meta_keywords': 'bag, tiddlyspace',
'meta_description': 'A bag on TiddlySpace',
'policy': policy,
'user_perms': user_perms,
'bag': bag,
'title': 'Bag %s' % bag.name})
def tiddler_as(self, tiddler):
"""
Transform the provided tiddler into an HTML
representation of the tiddler packaged in a
DIV. Render the content using the render_wikitext
subsystem. Links to the tiddler in the wiki are
provided.
"""
tiddlers_url = (self.environ.get('SCRIPT_NAME', '')
+ self.environ.get('PATH_INFO', ''))
template_name = 'friendlytiddler.html'
if '/tiddlers/' in tiddlers_url:
template_name = 'tiddler.html'
revision = False
if '/revisions/' in tiddlers_url:
revision = True
user = self.environ['tiddlyweb.usersign']
store = self.environ['tiddlyweb.store']
if tiddler.recipe:
list_link = '/recipes/%s/tiddlers' % encode_name(tiddler.recipe)
list_title = 'Tiddlers in Recipe %s' % tiddler.recipe
else:
list_link = '/bags/%s/tiddlers' % encode_name(tiddler.bag)
list_title = 'Tiddlers in Bag %s' % tiddler.bag
tiddlerurl = tiddler_url(self.environ, tiddler)
if revision:
list_link = '%s/%s/revisions' % (list_link,
encode_name(tiddler.title))
list_title = 'Revisions of %s' % tiddler.title
tiddlerurl = '%s/revisions/%s' % (tiddlerurl,
encode_name('%s' % tiddler.revision))
try:
store.get(Bag(tiddler.bag)).policy.allows(user, 'manage')
container_policy = True
except PermissionsError:
container_policy = False
if not self.environ['tiddlyweb.space_settings'].get('index', None):
space_link, space_name = self._space_link(tiddler)
else:
space_link = ''
space_name = ''
try:
modifier_link = space_uri(self.environ, tiddler.modifier)
except AttributeError:
modifier_link = ""
try:
creator_link = space_uri(self.environ, tiddler.creator)
except AttributeError:
creator_link = ""
links = self.environ.get('tiddlyweb.config',
{}).get('extension_types', {}).keys()
def call_space_uri(tiddler):
space_name = tiddler.recipe.split('_', 1)[0]
return space_uri(self.environ, space_name)
html = render_wikitext(tiddler, self.environ)
return send_template(self.environ, template_name, {
'meta_keywords': ', '.join(tiddler.tags),
'meta_description': tiddler.title,
'title': '%s' % tiddler.title,
'tags': tiddler.tags,
'modifier_link': modifier_link,
'creator_link': creator_link,
'fields': tiddler.fields,
'html': html,
'list_link': list_link,
'list_title': list_title,
'space_link': space_link,
'space_name': space_name,
'space_uri': call_space_uri,
'tiddler': tiddler,
'container_policy': container_policy,
'links': links,
'tiddler_url': tiddlerurl})
def _space_link(self, tiddler):
"""
Create a link back to this tiddler in its space.
"""
if tiddler.recipe:
space_name = tiddler.recipe.split('_', 1)[0]
link = _encode_space_link(tiddler)
elif space_bag(tiddler.bag):
space_name = tiddler.bag.split('_', 1)[0]
space_link_uri = space_uri(self.environ, space_name).rstrip('/')
link = _encode_space_link(tiddler)
link = '%s%s' % (space_link_uri, link)
else:
return '', ''
return '%s%s' % (self._server_prefix(), link), space_name
def space_bag(bag_name):
"""
Return true if the bag is a standard space bag. If it is
there will be a space link.
"""
return Space.bag_is_public(bag_name) or Space.bag_is_private(bag_name)
def _encode_space_link(tiddler):
"""
Make the space link form: #[[tiddler.title]]
"""
return '/#%%5B%%5B%s%%5D%%5D' % encode_name(tiddler.title)
| StarcoderdataPython |
1814190 | <reponame>jvarho/python-oracle-serverless
import json
import cx_Oracle
def version(event, context):
conn = cx_Oracle.connect('user', 'pass', 'host')
cursor = conn.cursor()
res = cursor.execute('SELECT * from v$version')
response = {
"statusCode": 200,
"body": json.dumps([i for i in res])
}
return response
| StarcoderdataPython |
4849333 | import falcon
from broker.rabbitmq_consumer import Consumer
class MessageConsumerResource:
def __init__(self):
self._consumer = Consumer()
def on_get(self, req, res):
queue = req.get_param('queue', 'default')
print(f'Starting consumer for queue: {queue}...')
self._consumer.consume(queue)
res.status = falcon.HTTP_200
| StarcoderdataPython |
3478817 | import operator
import re
from typing import Iterable
from aio_pika import IncomingMessage
from gino import NoResultFound
from ninjin.decorator import (
actor,
lazy
)
from ninjin.exceptions import (
UnknownHandler,
ValidationError
)
from ninjin.filtering import (
ALL,
BasicFiltering
)
from ninjin.logger import logger
from ninjin.ordering import BasicOrdering
from ninjin.pagination import BasicPagination
from ninjin.schema import IdSchema
class Resource():
pool = None
consumer_key = None
serializer_class = None
deserializer_class = None
actors = {}
periodic_tasks = {}
@classmethod
def resource_name(cls):
return re.sub('(resource)$', '', getattr(cls, 'model', cls).__name__.lower())
def __repr__(self):
return 'resource'
def __init__(self, deserialized_data, message: IncomingMessage):
self.deserialized_data = deserialized_data
self.message = message
self.raw = deserialized_data.get('payload', {})
self.payload = None
async def filter(self, *args, **kwargs):
raise NotImplementedError()
async def paginate(self, *args, **kwargs):
raise NotImplementedError()
async def order(self, *args, **kwargs):
raise NotImplementedError()
def serialize(self, data: [dict, Iterable]) -> dict:
if not self.serializer_class:
return data
return self.serializer_class(many=isinstance(data, list)).dump(data)
def deserialize(self, data: dict) -> dict:
"""
many is not allowed at the moment
:param data:
:return:
"""
if not self.deserializer_class:
return data
return self.deserializer_class().load(data)
def validate(self, data: dict):
"""
Not necessary as soon as it validated during deserialization already
:param data:
:return:
"""
if self.serializer_class:
errors = self.serializer_class.validate(data)
if errors:
raise ValidationError('Deserialization Error: {}'.format(errors))
async def dispatch(self):
handler_name = self.deserialized_data['handler']
handler = self.actors.get(handler_name, self.periodic_tasks.get(handler_name))
if not handler:
raise UnknownHandler('Handler with name `{}` is not registered at {}'.format(
handler_name,
self.__class__.__name__
))
self.serializer_class = getattr(handler, 'serializer_class', self.serializer_class)
self.deserializer_class = getattr(handler, 'deserializer_class', self.deserializer_class)
self.payload = self.deserialize(self.raw)
return await handler(self)
class ModelResource(Resource):
model = None
serializer_class = IdSchema
deserializer_class = serializer_class
filtering_class = BasicFiltering
pagination_class = BasicPagination
ordering_class = BasicOrdering
allowed_filters = {
'id': ALL
}
allowed_ordering = None
items_per_page = 100
max_items_per_page = 1000
def __init__(self, deserialized_data, message: IncomingMessage):
super().__init__(deserialized_data, message)
self.filtering = self.payload
self.filtering = self.filtering_class(
self.model,
filtering=deserialized_data.get('filtering'),
allowed_filters=self.allowed_filters
)
self.ordering = self.ordering_class(
ordering=deserialized_data.get('ordering'),
allowed_ordering=self.allowed_ordering
)
self.pagination = self.pagination_class(
deserialized_data.get('pagination'),
items_per_page=self.items_per_page,
max_items_per_page=self.max_items_per_page
)
@lazy
def _db(self):
return self.model.__metadata__
@lazy
def _table(self):
return self._db.tables[self.model.__tablename__]
@lazy
def _primary_key(self):
return self._table.primary_key.columns.keys()[0]
def filter(self, query):
return self.filtering.filter(query)
def paginate(self, query):
return self.pagination.paginate(query)
def order(self, query):
return self.ordering.order_by(query)
@lazy
def query(self):
"""
To provide an easy inheritance
:return:
"""
return self.filter(self.model.query)
@lazy
def ident(self):
try:
return self.payload.pop(self._primary_key)
except KeyError:
return self.filtering.filtering.get(self._primary_key)
async def exists(self, expr):
return await self._db.scalar(self._db.exists().where(
expr
).select())
async def perform_create(self):
expr = operator.eq(getattr(self.model, self._primary_key), self.ident)
if not await self.exists(expr):
return await self.model.create(
**{self._primary_key: self.ident},
**self.payload,
)
else:
logger.debug('Object {} with ident = {} already exists'.format(
self.model.__name__,
self.ident
))
@actor(never_reply=True)
async def create(self):
return await self.perform_create()
async def perform_update(self):
"""
bulk update is not supported
:return:
"""
obj = await self.perform_get()
if obj:
await obj.update(**self.payload).apply()
return obj
@actor(never_reply=True)
async def update(self):
return await self.perform_update()
async def perform_delete(self):
obj = await self.perform_get()
if obj:
await obj.delete()
return obj
@actor(never_reply=True)
async def delete(self):
return await self.perform_delete()
async def perform_get(self):
try:
expr = operator.eq(getattr(self.model, self._primary_key), self.ident)
return await self.query.where(expr).gino.one()
except NoResultFound:
return None
@actor()
async def get(self):
return await self.perform_get()
async def perform_get_list(self):
query = self.order(self.query)
query = self.paginate(query)
return await query.gino.all()
@actor()
async def get_list(self):
return await self.perform_get_list()
| StarcoderdataPython |
6552519 | from plasTeX import Command
# Dummy bm package - handled by mathjax
class bm(Command):
pass
| StarcoderdataPython |
1832921 | import numpy as np
import pandas as pd
import plotly.express as px
def update_liquidity_pool(quantity, liquidity_df , const_fee, token="Token 1"):
"""Finds and fills the order based on the token and liquidity provided, if it is not possible returns
the original liquidity data frame. To sell asset at the specified price negative quantity has to be provided. In case of a
successful transaction the data frame with accumulated fees is returned"""
np.round(liquidity_df, decimals=2)
if (const_fee<0) or (const_fee>0.03):
raise ValueError("const_fee must take value in [0.0, 0.03]")
if quantity <=0:
raise ValueError("quantity has to be a positive value")
if token=="Token 1":
if quantity > liquidity_df["Token 1 Supply"].iloc[0]:
print("Not enough token supply to fill your order")
return liquidity_df
updated_token_supply = liquidity_df["Token 1 Supply"][0]-quantity
updated_other_supply = liquidity_df["Char Number"][0]/updated_token_supply
price_and_fee = updated_other_supply - liquidity_df["Token 2 Supply"][0]
fee=quantity*const_fee #In terms of Token 1
price = price_and_fee/(quantity*(1+const_fee))
print("Your order of ", quantity, " has been filled at the price of", price)
#Update the liquidity_df
liquidity_df["Token 1 Supply"][0] = updated_token_supply
liquidity_df["Token 2 Supply"][0] = updated_other_supply
liquidity_df["Fees Accumulated"][0] = liquidity_df["Fees Accumulated"][0]+fee #In terms of Token 1
print("Total number of transactions: 1")
print("Total fees accumulated:", liquidity_df["Fees Accumulated"][0], "(In terms of Token 1)")
return liquidity_df
elif token=="Token 2":
if quantity > liquidity_df["Token 2 Supply"].iloc[0]:
print("Not enough token supply to fill your order")
return liquidity_df
updated_token_supply = liquidity_df["Token 2 Supply"][0]-quantity
updated_other_supply = liquidity_df["Char Number"][0]/updated_token_supply
price_and_fee = updated_other_supply - liquidity_df["Token 1 Supply"][0]
fee=quantity*const_fee #In terms of Token 2
price = price_and_fee/(quantity*(1+const_fee))
print("Your order of ", quantity, " has been filled at the price of", price)
#Update the liquidity_df
liquidity_df["Token 2 Supply"][0] = updated_token_supply
liquidity_df["Token 1 Supply"][0] = updated_other_supply
liquidity_df["Fees Accumulated"][0] = liquidity_df["Fees Accumulated"][0]+fee*price #In terms of Token 1
print("Total number of transactions: 1")
print("Total fees accumulated:", liquidity_df["Fees Accumulated"][0], "(In terms of Token 1)")
return liquidity_df
else:
raise ValueError("token must take value in [Token 1, Token 2]")
if __name__ == "__main__":
#Token 1 = $10; Token 2 = $100 => Token 1/Token 2 = 10
#Simple liquidity pool example
# $10*1000 Token 1 = 10000; and $100*100 Token 2 = 10000
# n=1000*100=100000
#Liquidity curve is Token 1*Token 2 = 100000 = characteristic number
liq = pd.DataFrame(np.array([1000, 100, 100000, 0.0])).transpose()
liq.columns = ["Token 1 Supply", "Token 2 Supply", "Char Number", "Fees Accumulated"]
print(update_liquidity_pool(1, liq, 0.01, token="Token 2"))
x = np.linspace(1, 1000, 1001)
y = 100000/x
fig = px.line(x=x, y=y, labels={"x":"Suppply of Token 2", "y":"Supply of Token 1"})
fig.show() | StarcoderdataPython |
5099763 | <filename>service_beacons_python/logic/IBeaconsController.py
# -*- coding: utf-8 -*-
"""
logic.IBeaconsController
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
IBeaconsController class
:copyright: Conceptual Vision Consulting LLC 2018-2021, see AUTHORS for more details.
:license: MIT, see LICENSE for more details.
"""
from abc import ABC
from typing import Any, List, Optional
from pip_services3_commons.data import PagingParams, FilterParams, DataPage
from service_beacons_python.data.version1 import BeaconV1
class IBeaconsController(ABC):
def get_beacons_by_filter(self, correlation_id: Optional[str], filter: FilterParams,
paging: PagingParams) -> DataPage:
raise NotImplementedError('Method from interface definition')
def get_beacon_by_id(self, correlation_id: Optional[str], id: str) -> BeaconV1:
raise NotImplementedError('Method from interface definition')
def get_beacon_by_udi(self, correlation_id: Optional[str], udi: str) -> BeaconV1:
raise NotImplementedError('Method from interface definition')
def calculate_position(self, correlation_id: Optional[str], site_id: str, udis: str) -> Any:
raise NotImplementedError('Method from interface definition')
def create_beacon(self, correlation_id: Optional[str], entity: BeaconV1) -> BeaconV1:
raise NotImplementedError('Method from interface definition')
def update_beacon(self, correlation_id: Optional[str], entity: BeaconV1) -> BeaconV1:
raise NotImplementedError('Method from interface definition')
def delete_beacon_by_id(self, correlation_id: Optional[str], id: str) -> BeaconV1:
raise NotImplementedError('Method from interface definition')
| StarcoderdataPython |
4888775 | """
[summary]
[extended_summary]
"""
# region [Imports]
# * Standard Library Imports ---------------------------------------------------------------------------->
import os
# * Third Party Imports --------------------------------------------------------------------------------->
# * Gid Imports ----------------------------------------------------------------------------------------->
import gidlogger as glog
# * Local Imports --------------------------------------------------------------------------------------->
from antipetros_discordbot.init_userdata.user_data_setup import ParaStorageKeeper
# endregion[Imports]
# region [TODO]
# endregion [TODO]
# region [AppUserData]
# endregion [AppUserData]
# region [Logging]
log = glog.aux_logger(__name__)
log.info(glog.imported(__name__))
# endregion[Logging]
# region [Constants]
APPDATA = ParaStorageKeeper.get_appdata()
BASE_CONFIG = ParaStorageKeeper.get_config('base_config')
THIS_FILE_DIR = os.path.abspath(os.path.dirname(__file__))
# endregion[Constants]
# region[Main_Exec]
if __name__ == '__main__':
pass
# endregion[Main_Exec] | StarcoderdataPython |
3568473 | <filename>main.py
#!/usr/bin/env pybricks-micropython
from ev3dev2.motor import MediumMotor, OUTPUT_D, MoveTank, OUTPUT_A, OUTPUT_B
from pybricks import ev3brick as brick
from pybricks.ev3devices import (Motor, TouchSensor, ColorSensor,
InfraredSensor, UltrasonicSensor, GyroSensor)
from pybricks.parameters import (Port, Stop, Direction, Button, Color,
SoundFile, ImageFile, Align)
from pybricks.tools import print, wait, StopWatch
from pybricks.robotics import DriveBase
import math
from time import sleep
L = Motor(Port.A)
R = Motor(Port.B)
color_1P=ColorSensor(Port.S1)
color_2P=ColorSensor(Port.S2)
color_B1P=ColorSensor(Port.S3)
color_B2P=ColorSensor(Port.S4)
B1=color_B1P.color()
B2=color_B2P.color()
M_motor=MediumMotor(OUTPUT_D)
tank_pair = MoveTank(OUTPUT_A, OUTPUT_B)
wheel_diameter = 56
axle_track = 110
robot = DriveBase( L, R, wheel_diameter, axle_track)
brick.sound.beep()
wait(150)
brick.sound.beep()
wait(150)
def takeb() :
robot.drive(500,0)
wait(2000)
def forward_on_blackhole() :
L.reset_angle(0)
R.reset_angle(0)
while True :
color1=color_1P.color()
color2=color_2P.color()
robot.drive(-300,0)
if color1 == 1 or color2 == 1 :
robot.stop(Stop.BRAKE)
wait(100)
L.reset_angle(0)
R.reset_angle(0)
break
robot.stop(Stop.BRAKE)
def backward_on_blackhole() :
L.reset_angle(0)
R.reset_angle(0)
while True :
color1=color_B1P.color()
color2=color_B2P.color()
robot.drive(300,0)
if color1 == 1 or color2 == 1 :
robot.stop(Stop.BRAKE)
wait(100)
L.reset_angle(0)
R.reset_angle(0)
break
robot.stop(Stop.BRAKE)
def forward_wow() :
L.reset_angle(0)
R.reset_angle(0)
while True :
color1=color_B1P.color()
color2=color_B2P.color()
robot.drive(-300,0)
if color1 == 1 or color2 == 1 :
robot.stop(Stop.BRAKE)
wait(100)
L.reset_angle(0)
R.reset_angle(0)
break
'''if color1 != 1 and color2 == 1 :
robot.drive_time(0, -50, 300)
robot.drive (500, 0)
wait(500)
forward()
break
if color1 == 1 and color2 != 1 :
robot.drive_time(0, 50, 300)
robot.drive (500, 0)
wait(500)
forward()
break'''
robot.drive (-150, 0)
while L.angle()< -80:
pass
robot.stop(Stop.BRAKE)
def forward() :
L.reset_angle(0)
R.reset_angle(0)
while True :
color1=color_1P.color()
color2=color_2P.color()
robot.drive(-300,0)
if color1 == 1 or color2 == 1 :
robot.stop(Stop.BRAKE)
wait(100)
L.reset_angle(0)
R.reset_angle(0)
break
'''if color1 != 1 and color2 == 1 :
robot.drive_time(0, -50, 300)
robot.drive (500, 0)
wait(500)
forward()
break
if color1 == 1 and color2 != 1 :
robot.drive_time(0, 50, 300)
robot.drive (500, 0)
wait(500)
forward()
break'''
robot.drive (150, 0)
while L.angle()< 80:
pass
robot.stop(Stop.BRAKE)
def backward() :
while True :
B1=color_B1P.color()
B2=color_B2P.color()
L.run(350)
R.run(280)
if B1 == 1 :
break
while True :
B1=color_B1P.color()
B2=color_B2P.color()
L.run(-80)
R.run(140)
if B2 == 1 :
break
robot.stop(Stop.BRAKE)
def dum_check() :
while True :
B1=color_1P.color()
B2=color_2P.color()
L.run(-350)
R.run(-280)
if B2 == 1 :
break
while True :
B1=color_B1P.color()
B2=color_B2P.color()
L.run(140)
R.run(-80)
if B2 == 1 and B1 == 1:
break
elif B2 == 1 and B1 != 1:
dw_antibug()
backward()
break
robot.stop(Stop.BRAKE)
def fw_backward() :
while True :
F1=color_1P.color()
F2=color_2P.color()
L.run(-350)
R.run(-280)
if F1 == 1 :
break
while True :
F1=color_1P.color()
F2=color_2P.color()
L.run(80)
R.run(-140)
if F2 == 1 :
break
robot.stop(Stop.BRAKE)
def backward_green() :
while True :
B1=color_B1P.color()
B2=color_B2P.color()
L.run(250)
R.run(180)
if B1 == 3 :
break
while True :
B1=color_B1P.color()
B2=color_B2P.color()
L.run(-70)
R.run(150)
if B2 == 3 :
break
robot.stop(Stop.BRAKE)
def left_abug() :
robot.drive_time(0, 140, 2000)
robot.stop(Stop.BRAKE)
wait(100)
def right_abug() :
robot.drive_time(0, -140, 2000)
robot.stop(Stop.BRAKE)
wait(100)
def right_a2bug() :
robot.drive_time(0, -130, 2000)
robot.stop(Stop.BRAKE)
wait(100)
def right_aabug() :
robot.drive_time(0, -140, 1600)
robot.stop(Stop.BRAKE)
wait(100)
def right_max() :
robot.drive_time(0, -150, 1000)
robot.stop(Stop.BRAKE)
wait(100)
def left_max() :
robot.drive_time(0, 150, 1000)
robot.stop(Stop.BRAKE)
wait(100)
def right() :
robot.drive_time(0, -160, 1000)
robot.stop(Stop.BRAKE)
wait(100)
def right_bug() :
robot.drive_time(0, -130, 1000)
robot.stop(Stop.BRAKE)
wait(100)
def right_bbug() :
robot.drive_time(0, -100, 1000)
robot.stop(Stop.BRAKE)
wait(100)
def right_gg() :
robot.drive_time(0, -140, 1000)
robot.stop(Stop.BRAKE)
wait(100)
def left_agg() :
robot.drive_time(0, 210, 1000)
robot.stop(Stop.BRAKE)
wait(100)
def right_agg() :
robot.drive_time(0, -210, 1000)
robot.stop(Stop.BRAKE)
wait(100)
def left() :
robot.drive_time(0, 160, 1000)
robot.stop(Stop.BRAKE)
wait(100)
def left_bug() :
robot.drive_time(0, 130, 1000)
robot.stop(Stop.BRAKE)
wait(100)
def left_gg() :
robot.drive_time(0, 140, 1000)
robot.stop(Stop.BRAKE)
wait(100)
def push() :
M_motor.on_for_degrees(speed=-100,degrees=100)
wait(400)
M_motor.on_for_degrees(speed=100,degrees=100)
sleep(0.5)
def green() :
L.reset_angle(0)
R.reset_angle(0)
while True :
color1=color_1P.color()
color2=color_2P.color()
robot.drive(-350,0)
if color1 == 3 or color2 == 3 :
robot.stop(Stop.BRAKE)
wait(500)
L.reset_angle(0)
R.reset_angle(0)
push()
tank_pair.on_for_rotations(left_speed=50,right_speed=50,rotations=1.3);
break
def confirm() :
while not any(brick.buttons()):
wait(10)
while any(brick.buttons()):
wait(10)
def blackhole_end() :
tank_pair.on_for_rotations(left_speed=-100, right_speed=-100, rotations=1.1);
def blackhole_end2() :
tank_pair.on_for_rotations(left_speed=100, right_speed=100, rotations=1.1);
def bypass() :
tank_pair.on_for_rotations(left_speed=-100, right_speed=-100, rotations=3.5);
def fw_antibug() :
robot.drive(-100,0)
wait(500)
def bw_antibug() :
robot.drive(100,0)
wait(500)
def forward_blackhole() :
tank_pair.on_for_rotations(left_speed=-70,right_speed=-70,rotations=3.7);
tank_pair.on_for_rotations(left_speed=-100,right_speed=-0,rotations=1.3);
wait(100)
backward()
def forward_block() :
robot.drive(-600,0)
wait(1000)
robot.stop(Stop.BRAKE)
def forward_green() :
robot.drive(-500,0)
wait(1300)
robot.stop(Stop.BRAKE)
def backward_block() :
robot.drive(400,0)
wait(500)
robot.stop(Stop.BRAKE)
def endstart() :
L.reset_angle(0)
R.reset_angle(0)
while True :
color1=color_B1P.color()
color2=color_B2P.color()
robot.drive(-400,0)
if color1 == 5 or color2 == 5 :
robot.stop(Stop.BRAKE)
wait(100)
L.reset_angle(0)
R.reset_angle(0)
break
#start
brick.sound.beep()
wait(150)
forward()
right()
backward()
forward()
right()
backward()
forward()
right()
backward()
forward()
left()
backward()
green()
left_bug()
left_bug()
left_bug()
backward_block()
fw_antibug()
backward_on_blackhole()
robot.drive(600,0)
wait(2700)
right()
backward()
forward()
right_bug()
green()
right_abug()
backward_block()
backward_block()
backward()
green()
backward_block()
left()
backward()
forward()
left_bug()
forward()
right()
backward()
green()
right()
backward()
forward()
forward() | StarcoderdataPython |
9788667 | #!/usr/bin/env python3
#
# Copyright (c) 2018-present, Facebook, Inc.
# All rights reserved.
#
# This source code is licensed under the BSD-style license found in the
# LICENSE file in the root directory of this source tree.
#
import argparse
import binascii
import ssl
import tempfile
import unittest
import dns.message
import dns.rcode
try:
import netifaces
except ImportError as e:
netifaces = e
from unittest.mock import MagicMock, patch
from dohproxy import constants, server_protocol, utils
from unittest_data_provider import data_provider
# Randomly generated source of words/b64
# gshuf /usr/share/dict/words | head -n 20 | while read line
# do
# echo -e "(b'$line', '$(echo -n $line | base64 | tr -d '=' )',),"
# done
TEST_CA = (
"-----BEGIN CERTIFICATE-----\n"
"<KEY>n"
"BAYTAlhYMRUwEwYDVQQHDAxEZWZhdWx0IENpdHkxHDAaBgNVBAoME0RlZmF1bHQg\n"
"Q<KEY>SBMdGQwHhcNMTgwMjI2MjIxODA3WhcNMjgwMjI0MjIxODA3WjBCMQsw\n"
"CQYDVQQGEwJYWDEVMBMGA1UEBwwMRGVmYXVsdCBDaXR5MRwwGgYDVQQKDBNEZWZh\n"
"dWx0IENvbXBhbnkgTHRkMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA\n"
"zkceT8GjMPz7e6nU30CO6aEonx3iszpNXpa+nH31M1NBs4wF2Rli9M1exyX2tAu9\n"
"gr4ImpIXurryeT61RJYprRBLBdy2FBwx7tgSOeaxZupnQkfd7HwtBJD3dg7cBGpe\n"
"RbJ44CQozLt0n16FM7yX2NwBxBxMKG+Brqo+PB9dR219Nzh5jB/UTWH21rrMYjiW\n"
"ABa0OnMh/oc/YGSuR7ymtYWIKL2u3fZ1wV6yCblAKDIhAOhxY3yL6SxyS4uE2j8i\n"
"<KEY>"
"<KEY>"
"<KEY>"
"<KEY>"
"fDoRndTZXMH3bKmIb+2DlTjcLvHUeFSs21opssPL1U1fcvJRi3Yd5DYboiKILjO/\n"
"0iNVGx6CPMiZZsYb+yeoA2ZtVqe/HoKzmeak4nP/QTv5xYRtFgSzXFmEPuC8CWdr\n"
"xBdVAGX08H8vYlQk72YjKS/eJ6WbrijU0OnI3ZVlhMmlhwzW1cr/QmJSPoTsbS+a\n"
"3c2aLV6NGplhmr2CuqqznDKT/QfxSk5qMoKAMdtA4iT5S5fPG5kGExt2MD+aimOw\n"
"DOeHuyCLRXxIolT+8r2BY56sV1uYyuBFw0RAnEpmnc2d072DND6XcDeQCw==\n"
"-----END CERTIFICATE-----"
)
def b64_source():
return [
(b"punner", "cHVubmVy",),
(b"visitation", "dmlzaXRhdGlvbg",),
(b"werf", "d2VyZg",),
(b"Hysterophyta", "SHlzdGVyb3BoeXRh",),
(b"diurne", "ZGl1cm5l",),
(b"reputableness", "cmVwdXRhYmxlbmVzcw",),
(b"uncompletely", "dW5jb21wbGV0ZWx5",),
(b"thalami", "dGhhbGFtaQ",),
(b"unpapal", "dW5wYXBhbA",),
(b"nonapposable", "bm9uYXBwb3NhYmxl",),
(b"synalgic", "c3luYWxnaWM",),
(b"exscutellate", "ZXhzY3V0ZWxsYXRl",),
(b"predelegation", "cHJlZGVsZWdhdGlvbg",),
(b"Varangi", "VmFyYW5naQ",),
(b"coucal", "Y291Y2Fs",),
(b"intensely", "aW50ZW5zZWx5",),
(b"apprize", "YXBwcml6ZQ",),
(b"jirble", "amlyYmxl",),
(b"imparalleled", "aW1wYXJhbGxlbGVk",),
(b"dinornithic", "ZGlub3JuaXRoaWM",),
]
class TestDOHB64(unittest.TestCase):
@data_provider(b64_source)
def test_b64_encode(self, input, output):
self.assertEqual(utils.doh_b64_encode(input), output)
@data_provider(b64_source)
def test_b64_decode(self, output, input):
self.assertEqual(utils.doh_b64_decode(input), output)
def test_b64_decode_invalid(self):
""" When providing an invalid input to base64.urlsafe_b64decode it
should raise a binascii.Error exception.
"""
with self.assertRaisesRegex(
binascii.Error, "^(Invalid base64-encoded string|Incorrect padding)"
):
utils.doh_b64_decode("_")
def make_url_source():
return [
("foo", "uri", "https://foo/uri",),
("foo", "/uri", "https://foo/uri",),
("foo", "/uri/", "https://foo/uri/",),
("foo:8443", "/uri/", "https://foo:8443/uri/",),
]
class TestMakeURL(unittest.TestCase):
@data_provider(make_url_source)
def test_make_url(self, domain, uri, output):
self.assertEqual(utils.make_url(domain, uri), output)
class TestBuildQueryParams(unittest.TestCase):
def test_has_right_keys(self):
""" Check that this function returns body only. """
keys = {
constants.DOH_DNS_PARAM,
}
self.assertEqual(keys, utils.build_query_params(b"").keys())
def test_query_must_be_bytes(self):
""" Check that this function raises when we pass a string. """
with self.assertRaises(TypeError):
utils.build_query_params("")
def test_query_accepts_bytes(self):
""" Check that this function accepts a bytes-object. """
utils.build_query_params(b"")
def test_body_b64encoded(self):
""" Check that this function is b64 encoding the content of body. """
q = b""
params = utils.build_query_params(q)
self.assertEqual(utils.doh_b64_encode(q), params[constants.DOH_DNS_PARAM])
class TestTypoChecker(unittest.TestCase):
def test_client_base_parser(self):
""" Basic test to check that there is no stupid typos.
"""
utils.client_parser_base()
def test_proxy_base_parser_noargs(self):
""" We must provide a port parameter to proxy_parser_base. """
with self.assertRaises(TypeError):
utils.proxy_parser_base()
def test_proxy_base_default_secure_require_certs(self):
""" If secure (default), will ask for the certfile and keyfile """
p = utils.proxy_parser_base(port=80)
# Since we are secure, we need --certfile and --keyfile
with self.assertRaises(SystemExit) as e:
args, left = p.parse_known_args()
self.assertEqual(e.exception.code, 2) # exit status must be 2
def test_proxy_base_non_secure_no_certfile(self):
""" If not using TLS, we don't suggest TLS related arguments. """
p = utils.proxy_parser_base(port=80, secure=False)
args, left = p.parse_known_args()
# The values for cerfile and keyfile must be empty
self.assertIsNone(args.certfile)
self.assertIsNone(args.keyfile)
def test_configure_logger(self):
""" Basic test to check that there is no stupid typos.
"""
utils.configure_logger()
def test_configure_logger_unknown_level(self):
""" Basic test to check that there is no stupid typos.
"""
with self.assertRaises(Exception):
utils.configure_logger(level="thisisnotalevel")
def extract_path_params_source():
return [
("/foo?a=b&c=d#1234", ("/foo", {"a": ["b"], "c": ["d"]})),
("/foo", ("/foo", {})),
("/foo?#", ("/foo", {})),
("foo", ("foo", {})),
# Test that we keep empty values
("/foo?a=b&c", ("/foo", {"a": ["b"], "c": [""]})),
("/foo?a=b&c=", ("/foo", {"a": ["b"], "c": [""]})),
]
class TestExtractPathParams(unittest.TestCase):
@data_provider(extract_path_params_source)
def test_extract_path_params(self, uri, output):
path, params = utils.extract_path_params(uri)
self.assertEqual(path, output[0])
self.assertDictEqual(params, output[1])
def extract_ct_body_valid_source():
return [
("/foo?ct&dns=aW1wYXJhbGxlbGVk", (constants.DOH_MEDIA_TYPE, b"imparalleled"),),
("/foo?ct=&dns=aW1wYXJhbGxlbGVk", (constants.DOH_MEDIA_TYPE, b"imparalleled"),),
(
"/foo?ct=bar&dns=aW1wYXJhbGxlbGVk",
(constants.DOH_MEDIA_TYPE, b"imparalleled"),
),
("/foo?dns=aW1wYXJhbGxlbGVk", (constants.DOH_MEDIA_TYPE, b"imparalleled"),),
]
def extract_ct_body_invalid_source():
return [
("/foo?ct=&dns=", "Missing Body",),
("/foo?ct=", "Missing Body Parameter",),
("/foo?ct=bar&dns=_", "Invalid Body Parameter",),
]
class TestExtractCtBody(unittest.TestCase):
@data_provider(extract_ct_body_valid_source)
def test_extract_ct_body_valid(self, uri, output):
path, params = utils.extract_path_params(uri)
ct, body = utils.extract_ct_body(params)
self.assertEqual(ct, output[0])
self.assertEqual(body, output[1])
@data_provider(extract_ct_body_invalid_source)
def test_extract_ct_body_invalid(self, uri, output):
path, params = utils.extract_path_params(uri)
with self.assertRaisesRegex(server_protocol.DOHParamsException, output):
utils.extract_ct_body(params)
class TestDNSQueryFromBody(unittest.TestCase):
def test_invalid_message_no_debug(self):
body = "a"
with self.assertRaisesRegex(
server_protocol.DOHDNSException, "Malformed DNS query"
):
utils.dns_query_from_body(body)
def test_invalid_message_with_debug(self):
body = "a"
with self.assertRaisesRegex(server_protocol.DOHDNSException, "is too short"):
utils.dns_query_from_body(body, debug=True)
def test_valid_message(self):
dnsq = dns.message.Message()
body = dnsq.to_wire()
self.assertEqual(utils.dns_query_from_body(body), dnsq)
class TestDNSQuery2Log(unittest.TestCase):
def setUp(self):
self._qname = "example.com"
self._qtype = "A"
self._q = dns.message.make_query(self._qname, self._qtype)
def test_valid_query(self):
"""
test that no exception is thrown with a legitimate query.
"""
utils.dnsquery2log(self._q)
def test_valid_response(self):
"""
test that no exception is thrown with a legitimate response.
"""
r = dns.message.make_response(self._q, recursion_available=True)
utils.dnsquery2log(r)
def test_refused_response_no_question(self):
"""
test that no exception is thrown with a legitimate response.
"""
r = dns.message.make_response(self._q, recursion_available=True)
r.set_rcode(dns.rcode.REFUSED)
r.question = []
utils.dnsquery2log(r)
class TestDNSAns2Log(unittest.TestCase):
def setUp(self):
self._qname = "example.com"
self._qtype = "A"
self._q = dns.message.make_query(self._qname, self._qtype)
def test_valid_query(self):
"""
test that no exception is thrown with a legitimate query.
"""
utils.dnsans2log(self._q)
def test_valid_response(self):
"""
test that no exception is thrown with a legitimate response.
"""
r = dns.message.make_response(self._q, recursion_available=True)
utils.dnsans2log(r)
def test_refused_response_no_question(self):
"""
test that no exception is thrown with a legitimate response.
"""
r = dns.message.make_response(self._q, recursion_available=True)
r.set_rcode(dns.rcode.REFUSED)
r.question = []
utils.dnsans2log(r)
@patch("ssl.SSLContext.set_alpn_protocols", MagicMock())
@patch("ssl.SSLContext.load_cert_chain", MagicMock())
class TestProxySSLContext(unittest.TestCase):
def setUp(self):
self.args = argparse.Namespace()
self.args.certfile = None
self.args.keyfile = None
# not all opnssl version may support DOH_CIPHERS, override with the one
# supported by the testing platform
constants.DOH_CIPHERS = ssl._DEFAULT_CIPHERS
def test_proxy_ssl_context(self):
""" Test a default ssl context, it should have http2 disabled """
ssl_context = utils.create_ssl_context(self.args)
self.assertIsInstance(ssl_context, ssl.SSLContext)
# don't enable http2
self.assertEqual(ssl_context.set_alpn_protocols.called, 0)
def test_proxy_ssl_context_http2_enabled(self):
""" Test a ssl context with http2 enabled """
ssl_context = utils.create_ssl_context(self.args, http2=True)
self.assertIsInstance(ssl_context, ssl.SSLContext)
# enable http2
self.assertEqual(ssl_context.set_alpn_protocols.called, 1)
class TestSSLContext(unittest.TestCase):
def setUp(self):
self._CA = TEST_CA
self._CA_serial = "E198832A55D5B708"
# ALPN requires >=openssl-1.0.2
# NPN requires >=openssl-1.0.1
for fn in ["set_alpn_protocols"]:
patcher = unittest.mock.patch("ssl.SSLContext.{0}".format(fn))
patcher.start()
self.addCleanup(patcher.stop)
def test_insecure_context(self):
"""
Test that insecure flag creates a context where verify method is
CERT_NONE
"""
sslctx = utils.create_custom_ssl_context(insecure=True)
self.assertEqual(sslctx.verify_mode, ssl.CERT_NONE)
def test_secure_context(self):
"""
Test that if insecure is False, the ssl context created has
CERT_REQUIRED as the verify method
"""
sslctx = utils.create_custom_ssl_context(insecure=False)
self.assertEqual(sslctx.verify_mode, ssl.CERT_REQUIRED)
def test_cafile(self):
with tempfile.NamedTemporaryFile() as ca:
ca.write(self._CA.encode())
ca.flush()
sslctx = utils.create_custom_ssl_context(insecure=False, cafile=ca.name)
self.assertTrue(
self._CA_serial
in [crt["serialNumber"] for crt in sslctx.get_ca_certs()]
)
@unittest.skipIf(isinstance(netifaces, ImportError), "netifaces not installed")
class TestGetSystemAddresses(unittest.TestCase):
def test_get_system_addresses(self):
self.assertIn("127.0.0.1", utils.get_system_addresses())
class TestHandleDNSTCPData(unittest.TestCase):
def setUp(self):
self._data = (
b"\x00/\x00\x00\x01\x00\x00\x01\x00\x00\x00\x00\x00\x00"
b"\x11connectivitycheck\x07gstatic\x03com\x00\x00\x1c\x00\x01"
)
self._cb_data = []
def _cb(self, data):
self._cb_data.append(data)
def test_short(self):
# Short message (no length check), returns itself
res = utils.handle_dns_tcp_data(self._data[0:1], self._cb)
self.assertEqual(res, self._data[0:1])
self.assertEqual(self._cb_data, [])
def test_partial(self):
# Partial message (no cb), returns itself
res = utils.handle_dns_tcp_data(self._data[0:10], self._cb)
self.assertEqual(res, self._data[0:10])
self.assertEqual(self._cb_data, [])
def test_complete(self):
# Complete message (calls cb once)
res = utils.handle_dns_tcp_data(self._data, self._cb)
self.assertEqual(res, b"")
self.assertIsInstance(self._cb_data[0], dns.message.Message)
def test_complete_plus_partial(self):
# Complete message (calls cb once) + partial message
res = utils.handle_dns_tcp_data(self._data + self._data[0:10], self._cb)
self.assertEqual(res, self._data[0:10])
self.assertIsInstance(self._cb_data[0], dns.message.Message)
def test_complete_multiple(self):
# Muliple complete messages will call the cb multiple times
res = utils.handle_dns_tcp_data(self._data + self._data, self._cb)
self.assertEqual(res, b"")
self.assertIsInstance(self._cb_data[0], dns.message.Message)
self.assertIsInstance(self._cb_data[1], dns.message.Message)
class TestDNSECS(unittest.TestCase):
def test_set_dns_ecs_ipv4(self):
dnsq = dns.message.make_query("www.example.com", rdtype="A")
utils.set_dns_ecs(dnsq, "10.0.0.242")
self.assertEqual(dnsq.edns, 0)
self.assertEqual(dnsq.options[0].address, "10.0.0.0")
self.assertEqual(dnsq.options[0].srclen, 24)
def test_set_dns_ecs_ipv6(self):
dnsq = dns.message.make_query("www.example.com", rdtype="A")
utils.set_dns_ecs(dnsq, "fc00:e968:6179::de52:7100")
self.assertEqual(dnsq.edns, 0)
self.assertEqual(dnsq.options[0].address, "2000::")
self.assertEqual(dnsq.options[0].srclen, 56)
| StarcoderdataPython |
1895069 | import sys
from onnx_caffe import frontend
import argparse
import logging
#logging.basicConfig(level=logging.DEBUG)
parser = argparse.ArgumentParser(description='Convert a caffe model into an onnx file.')
#parser.add_argument('kfile', metavar='KerasFile', help='an input hdf5 file')
parser.add_argument('-n', metavar='prototxt', help='an input prototxt file')
parser.add_argument('-w', metavar='caffemodel', help='an input caffemodel file')
parser.add_argument('-o', '--output', dest='ofile', type=str, default="model.onnx", help='the output onnx file')
parser.add_argument('-c', '--custom', dest='custom', type=str, default=None, help='the customized layers definition file')
parser.add_argument('-D', '--debug', action='store_true', default=False, help='whether do various optimizations')
args = parser.parse_args()
# If in debug mode, output debug message
if args.debug:
logging.basicConfig(level=logging.DEBUG)
# Convert it
converter = frontend.CaffeFrontend()
converter.loadFromFile(args.n, args.w)
onnx_model = converter.convertToOnnx()
converter.saveToFile(args.ofile)
| StarcoderdataPython |
6695497 | <reponame>koturn/kotemplate
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Description
"""
__author__ = '<+AUTHOR+> <+MAIL_ADDRESS+>'
__status__ = "production"
__version__ = '0.0.1'
__date__ = '<+DATE+>'
import optparse
import sys
if __name__ == '__main__':
N_REQUIRED_MEMAININGS = 1
parser = optparse.OptionParser(
usage='\n $ python %prog [options] FILES...',
version='%prog 0.0.1',
description='description')
parser.add_option('-a', '--apple', dest='apple',
default=False, action='store_true',
help='apple apple apple')
parser.add_option('-b', '--banana', dest='banana',
type='string', default='', action='store', metavar='BANANA',
help='banana banana banana')
parser.add_option('-c', '--cake', dest='cake',
type='int', default='0', action='store', metavar='CAKE',
help='cake cake cake')
opts, args = parser.parse_args()
args = filter(lambda arg: arg != '', args)
print opts
print args
if len(args) < N_REQUIRED_MEMAININGS:
print 'Specify files one or more'
parser.print_usage()
sys.exit(1)
<+CURSOR+>
| StarcoderdataPython |
1924619 | # retrieved from: https://gist.github.com/zyegfryed/918403, https://gist.github.com/grantmcconnaughey/ce90a689050c07c61c96
# used for creating pdf files to be served using django
# -*- coding: utf-8 -*-
import codecs
import subprocess
from fdfgen import forge_fdf
from django.core.exceptions import ImproperlyConfigured
from django.template import engines
from django.template.backends.base import BaseEngine
from django.template.engine import Engine, _dirs_undefined
class PdfTemplateError(Exception):
pass
class PdftkEngine(BaseEngine):
# Going ahead and defining this, but really PDFs should still be placed
# in the templates directory of an app because the loader checks templates
app_dirname = 'pdfs'
def __init__(self, params):
params = params.copy()
options = params.pop('OPTIONS').copy()
super(PdftkEngine, self).__init__(params)
self.engine = self._Engine(self.dirs, self.app_dirs, **options)
def get_template(self, template_name, dirs=_dirs_undefined):
return PdfTemplate(self.engine.get_template(template_name, dirs))
class _Engine(Engine):
def make_origin(self, display_name, loader, name, dirs):
# Always return an Origin object, because PDFTemplate need it to
# render the PDF Form file.
from django.template.loader import LoaderOrigin
return LoaderOrigin(display_name, loader, name, dirs)
class PdfTemplate(object):
pdftk_bin = None
def __init__(self, template):
self.template = template
self.set_pdftk_bin()
@property
def origin(self):
return self.template.origin
def render(self, context=None, request=None):
if context is None:
context = {}
context = context.items()
output, err = self.fill_form(context, self.origin.name)
if err:
raise PdfTemplateError(err)
return output
def fill_form(self, fields, src, pdftk_bin=None):
fdf_stream = forge_fdf(fdf_data_strings=fields)
cmd = [self.pdftk_bin, src, 'fill_form', '-', 'output', '-', 'flatten']
cmd = ' '.join(cmd)
return self.run_cmd(cmd, fdf_stream)
def dump_data_fields(self):
cmd = [self.pdftk_bin, self.origin.name, 'dump_data_fields']
cmd = ' '.join(cmd)
output, err = self.run_cmd(cmd, None)
if err:
raise PdfTemplateError(err)
return output
def run_cmd(self, cmd, input_data):
try:
process = subprocess.Popen(cmd, stdin=subprocess.PIPE,
stdout=subprocess.PIPE, shell=True)
if input_data:
return process.communicate(input=input_data)
else:
return process.communicate()
except OSError, e:
return None, e
def set_pdftk_bin(self):
if self.pdftk_bin is None:
from django.conf import settings
if not hasattr(settings, 'PDFTK_BIN'):
msg = "PDF generation requires pdftk " \
"(http://www.pdflabs.com/tools/pdftk-the-pdf-toolkit). " \
"Edit your PDFTK_BIN settings accordingly."
raise ImproperlyConfigured(msg)
self.pdftk_bin = settings.PDFTK_BIN
return self.pdftk_bin
def version(self):
cmd = [self.pdftk_bin, '--version']
cmd = ' '.join(cmd)
output, err = self.run_cmd(cmd, None)
if err:
raise PdfTemplateError(err)
return output
def get_template(template_name):
"""
Returns a compiled Template object for the given template name,
handling template inheritance recursively.
"""
def strict_errors(exception):
raise exception
def fake_strict_errors(exception):
return (u'', -1)
# Loading hacks
# Ignore UnicodeError, due to PDF file read
codecs.register_error('strict', fake_strict_errors)
if template_name.endswith('.pdf'):
template = engines['pdf'].get_template(template_name)
else:
template = engines['django'].get_template(template_name)
# Loading hacks
codecs.register_error('strict', strict_errors)
return template
| StarcoderdataPython |
1983309 | <filename>golem/core/test_data.py
"""Methods for dealing with test data files
Data files have csv or json extensions and are stored in the same
directory as the test.
"""
import ast
import csv
import json
import os
import traceback
from golem.core import test as test_module
from golem.core import utils
def csv_file_path(project, test_name):
test = test_module.Test(project, test_name)
return os.path.join(test.dirname, f'{test.stem_name}.csv')
def save_csv_test_data(project, test_name, test_data):
"""Save data to csv file.
test_data must be a list of dictionaries
"""
if test_data:
with open(csv_file_path(project, test_name), 'w', encoding='utf-8') as f:
writer = csv.DictWriter(f, fieldnames=test_data[0].keys(), lineterminator='\n')
writer.writeheader()
for row in test_data:
writer.writerow(row)
else:
remove_csv_if_present(project, test_name)
def get_csv_test_data(project, test_name):
"""Get data from csv file as a list of dicts"""
data_list = []
csv_path = csv_file_path(project, test_name)
if os.path.isfile(csv_path):
with open(csv_path, 'r', encoding='utf-8') as f:
dict_reader = csv.DictReader(f)
for data_set in dict_reader:
data_list.append(dict(data_set))
return data_list
def remove_csv_if_present(project, test_name):
"""Remove csv data file from tests/ folder"""
csv_path = csv_file_path(project, test_name)
if os.path.isfile(csv_path):
os.remove(csv_path)
def json_file_path(project, test_name):
test = test_module.Test(project, test_name)
return os.path.join(test.dirname, f'{test.stem_name}.json')
def save_json_test_data(project, test_name, json_data_str):
"""Save data to json file. Data is not saved if json is not valid."""
if json_data_str and not utils.json_parse_error(json_data_str):
with open(json_file_path(project, test_name), 'w', encoding='utf-8') as f:
f.write(json_data_str)
def get_json_test_data(project, test_name):
"""Get data from json file.
If json data is not of type dict or list of dicts it is ignored.
"""
json_data = None
json_path = json_file_path(project, test_name)
if os.path.isfile(json_path):
try:
with open(json_path, encoding='utf-8') as f:
json_data = json.load(f)
except json.JSONDecodeError:
pass
if type(json_data) is dict:
return [json_data]
if type(json_data) is list:
if all(type(x) is dict for x in json_data):
return json_data
return []
def get_json_test_data_as_string(project, test_name):
"""Get data from json file as string"""
json_data = ''
json_path = json_file_path(project, test_name)
if os.path.isfile(json_path):
with open(json_path, encoding='utf-8') as f:
json_data = f.read()
return json_data
def remove_json_data_if_present(project, test_name):
"""Remove json data file from tests/ folder"""
json_path = json_file_path(project, test_name)
if os.path.isfile(json_path):
os.remove(json_path)
def validate_internal_data(internal_data_str):
"""Validate that internal data string of Python code
does not contain SyntaxError
"""
try:
ast.parse(internal_data_str, filename='')
except SyntaxError:
return [traceback.format_exc(limit=0)]
return []
def get_internal_test_data_as_string(project, full_test_case_name):
"""Get test data defined inside the test itself."""
data_str = ''
tm = test_module.Test(project, full_test_case_name).module
if hasattr(tm, 'data'):
data_variable = getattr(tm, 'data')
data_str = format_internal_data_var(data_variable)
return data_str
def format_internal_data_var(data_var):
"""Convert data_var to a properly formatted Python code string"""
def _format_dict(d, indent):
dict_str = indent + '{\n'
for key, value in d.items():
if type(value) == str:
v = repr(value)
else:
v = str(value)
dict_str += indent + ' ' + repr(key) + ': ' + v + ',\n'
dict_str += indent + '}'
return dict_str
if type(data_var) is list:
data_str = '[\n'
for e in data_var:
data_str += _format_dict(e, indent=' ') + ',\n'
data_str += ']\n'
elif type(data_var) is dict:
data_str = _format_dict(data_var, indent='')
else:
data_str = repr(data_var)
data_str = 'data = ' + data_str
return data_str
def get_internal_test_data(project, test_name):
"""Get test data defined inside the test itself.
data var is ignored unless it's a dictionary or a
list of dictionaries
"""
test = test_module.Test(project, test_name)
if hasattr(test.module, 'data'):
data_var = getattr(test.module, 'data')
if type(data_var) is dict:
return [data_var]
if type(data_var) is list:
if all(type(x) is dict for x in data_var):
return data_var
return []
def get_test_data(project, test_name):
"""Get csv data as list of dicts; json & internal data as string"""
return {
'csv': get_csv_test_data(project, test_name),
'json': get_json_test_data_as_string(project, test_name),
'internal': get_internal_test_data_as_string(project, test_name)
}
def get_parsed_test_data(project, test_name):
"""Get test data for test execution.
If more than one data source exist only one will be used.
For Json or internal data, it must be of type dictionary or list
of dictionaries otherwise it is ignored.
"""
csv_data = get_csv_test_data(project, test_name)
if csv_data:
return csv_data
json_data = get_json_test_data(project, test_name)
if json_data:
return json_data
internal_data = get_internal_test_data(project, test_name)
if internal_data:
return internal_data
return [{}]
| StarcoderdataPython |
5011821 | import requests
from .exceptions import (
PyarrAccessRestricted,
PyarrBadGateway,
PyarrConnectionError,
PyarrMethodNotAllowed,
PyarrResourceNotFound,
PyarrUnauthorizedError,
)
class RequestHandler:
"""Base class for API Wrappers"""
def __init__(
self,
host_url: str,
api_key: str,
):
"""Constructor for connection to Arr API
Args:
host_url (str): Host URL to Arr api
api_key (str): API Key for Arr api
"""
self.host_url = host_url
self.api_key = api_key
self.session = requests.Session()
self.auth = None
def _request_url(self, path, ver_uri):
"""Builds the URL for the request to use.
Args:
path (str): Destination for specific call
ver_uri (str): API Version number
Returns:
str: string URL for API endpoint
"""
return f"{self.host_url}/api{ver_uri}/{path}"
def basic_auth(self, username, password):
"""If you have basic authentication setup you will need to pass your
username and passwords to the requests.auth.HTTPBASICAUTH() method.
Args:
username (str): Username for basic auth.
password (str): Password for basic auth.
Returns:
Object: HTTP Auth object
"""
self.auth = requests.auth.HTTPBasicAuth(username, password)
return self.auth
def request_get(self, path, ver_uri="", params=None):
"""Wrapper on any get requests
Args:
path (str): Path to API endpoint e.g. /api/manualimport
params (dict, optional): URL Parameters to send with the request. Defaults to None.
Returns:
Object: Response object from requests
"""
headers = {"X-Api-Key": self.api_key}
try:
res = self.session.get(
self._request_url(path, ver_uri),
headers=headers,
params=params,
auth=self.auth,
)
except requests.Timeout as exception:
raise PyarrConnectionError(
"Timeout occurred while connecting to API"
) from exception
return _process_response(res)
def request_post(self, path, ver_uri="", params=None, data=None):
"""Wrapper on any post requests
Args:
path (str): Path to API endpoint e.g. /api/manualimport
params (dict, optional): URL Parameters to send with the request. Defaults to None.
data (dict, optional): Payload to send with request. Defaults to None.
Returns:
Object: Response object from requests
"""
headers = {"X-Api-Key": self.api_key}
try:
res = self.session.post(
self._request_url(path, ver_uri),
headers=headers,
params=params,
json=data,
auth=self.auth,
)
except requests.Timeout as exception:
raise PyarrConnectionError(
"Timeout occurred while connecting to API"
) from exception
return _process_response(res)
def request_put(self, path, ver_uri="", params=None, data=None):
"""Wrapper on any put requests
Args:
path (str): Path to API endpoint e.g. /api/manualimport
params (dict, optional): URL Parameters to send with the request. Defaults to None.
data (dict, optional): Payload to send with request. Defaults to None.
Returns:
Object: Response object from requests
"""
headers = {"X-Api-Key": self.api_key}
try:
res = self.session.put(
self._request_url(path, ver_uri),
headers=headers,
params=params,
json=data,
auth=self.auth,
)
except requests.Timeout as exception:
raise PyarrConnectionError(
"Timeout occurred while connecting to API"
) from exception
return _process_response(res)
def request_del(self, path, ver_uri="", params=None, data=None):
"""Wrapper on any delete requests
Args:
path (str): Path to API endpoint e.g. /api/manualimport
params (dict, optional): URL Parameters to send with the request. Defaults to None.
data (dict, optional): Payload to send with request. Defaults to None.
Returns:
Object: Response object from requests
"""
headers = {"X-Api-Key": self.api_key}
try:
res = self.session.delete(
self._request_url(path, ver_uri),
headers=headers,
params=params,
json=data,
auth=self.auth,
)
except requests.Timeout as exception:
raise PyarrConnectionError(
"Timeout occurred while connecting to API"
) from exception
return _process_response(res)
def _process_response(res):
"""Check the response status code and error or return results
Args:
res (str): JSON or Text response from API Call
Raises:
PyarrUnauthorizedError: Invalid API Key
PyarrAccessRestricted: Invalid Permissions
PyarrResourceNotFound: Incorrect Resource
PyarrBadGateway: Bad Gateway
Returns:
JSON: Array
"""
if res.status_code == 401:
raise PyarrUnauthorizedError(
"Unauthorized. Please ensure valid API Key is used.", {}
)
if res.status_code == 403:
raise PyarrAccessRestricted(
"Access restricted. Please ensure API Key has correct permissions", {}
)
if res.status_code == 404:
raise PyarrResourceNotFound("Resource not found")
if res.status_code == 502:
raise PyarrBadGateway("Bad Gateway. Check your server is accessible")
if res.status_code == 405:
raise PyarrMethodNotAllowed(f"The endpoint {res.url} is not allowed")
content_type = res.headers.get("Content-Type", "")
if "application/json" in content_type:
return res.json()
return res
| StarcoderdataPython |
11220077 | <reponame>pennycxl/BearSki<filename>src/runtestt.py<gh_stars>1-10
import unittest
import BearSki.RunUnittest as rut
from BearSki.utils.logger import SkiLogger
from BearSki.report.LocalReportRunner import LocalReportRunner
import time
import sys
import logging
from BearSki.utils.arguments import runArg
def get_test_cases(dirpath,name="test_",isrunonecase=False):
test_cases = unittest.TestSuite()
# 测试用例使用"ski_"开头命名
if isrunonecase:
# 执行单条用例,顺序为 目录名 文件名 类名 方法名 中间"." 间隔 例如:"testcase.debug_test_user.TestUserLogin.test_login"
suite=unittest.TestLoader().loadTestsFromName(dirpath)
test_cases.addTests(suite)
return test_cases
else:
suites = unittest.defaultTestLoader.discover(dirpath, name+'*.py', top_level_dir=dirpath)
for suite in suites:
test_cases.addTests(suite)
return test_cases
if __name__ == '__main__':
rag=runArg()
logger=SkiLogger('runtestt')
report_type='html'
isrunonecase='False'
casepath='utest.testcase.test_CommonData'
casename='test_'
cases = get_test_cases(casepath,casename,isrunonecase)
now = time.strftime("%Y-%m-%d %H_%M_%S") # 报告生成时间
test_reports_address = './utest/report' # 测试报告存放位置
filename = './utest/report/' + now + 'report.html' # 设置报告文件名
if(report_type=='text'):
logger.info("开始执行测试,报告输出模式text")
runner=unittest.TextTestRunner()
runner.run(cases)
elif(report_type=='html'):
logger.info("开始执行测试,报告输出模式html")
lruner=LocalReportRunner()
lruner.run(cases)
| StarcoderdataPython |
1635868 | <reponame>Valaraucoo/raven
import datetime
import os
import uuid
from django.conf import settings
from django.contrib.auth import models as auth_models
from django.contrib.auth.signals import user_logged_in, user_logged_out
from django.db import models
from django.dispatch import receiver
from django.urls import reverse
from django.utils import timezone
from django.utils.translation import ugettext_lazy as _
from users import managers
def get_file_path(instance, filename: str) -> str:
today = datetime.date.today().strftime("%Y-%m-%d")
return os.path.join(settings.UPLOAD_FILES_DIR, today, str(uuid.uuid4()) + filename)
GENDER_CHOICES = (
('male', _('Male')),
('female', _('Female')),
('none', 'none'),
)
ROLE_CHOICES = (
('student', _('Student')),
('teacher', _('Teacher')),
)
class User(auth_models.AbstractUser):
"""
Custom User model for raven platform.
email: PK user's email, used for logging in
first_name: str user's first name
last_name: str user's last name
...
if role == 'student'
grade: FK(...) user's grade/class model
grades: FK(...) user's grades
if role == 'teacher':
running_courses: FK(...)
"""
username = None
first_name = models.CharField(max_length=30, blank=True, verbose_name=_('First name'))
last_name = models.CharField(max_length=150, blank=True, verbose_name=_('Last name'))
email = models.EmailField(unique=True, verbose_name=_('Email address'))
address = models.CharField(max_length=200, blank=True, verbose_name=_('Address'),
help_text=_('<b>Address in format</b>: [STREET NAME] [NUMBER], [CITY]'))
phone = models.CharField(max_length=9, blank=True, verbose_name=_('Phone number'))
gender = models.CharField(max_length=10, default='none', choices=GENDER_CHOICES,
verbose_name=_("User's gender"))
role = models.CharField(max_length=9, choices=ROLE_CHOICES)
is_staff = models.BooleanField(default=False)
is_active = models.BooleanField(default=True)
date_joined = models.DateTimeField(verbose_name=_('Date joined'), default=timezone.now)
date_birth = models.DateField(verbose_name=_('Date of birth'), blank=True, null=True,
help_text=_('<b>Birthday date in format:</b> YYYY-MM-DD'))
is_online = models.BooleanField(default=False)
description = models.TextField(null=True, blank=True, default="")
image = models.ImageField(upload_to=get_file_path, default=settings.DEFAULT_USER_IMAGE)
first_login = models.BooleanField(default=True)
USERNAME_FIELD = 'email'
REQUIRED_FIELDS = ('first_name', 'last_name',)
objects = managers.CustomUserManager()
@property
def full_username(self) -> str:
return f"{self.first_name} {self.last_name} ({self.email})"
@property
def is_student(self) -> bool:
return self.role == 'student'
@property
def is_teacher(self) -> bool:
return self.role == 'teacher'
def __str__(self):
return self.full_username
def get_absolute_url(self):
return reverse('users:profile-detail', args=(self.pk,))
def get_image_url(self):
return self.image.url
class Teacher(User):
"""
Teacher is a submodel of the User
"""
objects = managers.TeacherUserManager()
class Meta:
proxy = True
verbose_name = _("Teacher")
verbose_name_plural = _("Teachers")
class Student(User):
"""
Student is a submodel of the User
"""
objects = managers.StudentUserManager()
class Meta:
proxy = True
verbose_name = _("Student")
verbose_name_plural = _("Students")
@receiver(user_logged_in)
def got_online(sender, user, request, **kwargs):
user.is_online = True
user.save()
@receiver(user_logged_out)
def got_offline(sender, user, request, **kwargs):
user.is_online = False
user.save()
| StarcoderdataPython |
1624678 | #!/usr/bin/env python3
## In this example, we demonstrate how a Korali experiment can
## be resumed from any point (generation). This is a useful feature
## for continuing jobs after an error, or to fragment big jobs into
## smaller ones that can better fit a supercomputer queue.
## First, we run a simple Korali experiment.
import sys
sys.path.append('./_model')
from model import *
import korali
k = korali.Engine()
e = korali.Experiment()
e["Problem"]["Type"] = "Optimization"
e["Problem"]["Objective Function"] = model
e["Solver"]["Type"] = "Optimizer/DEA"
e["Solver"]["Population Size"] = 10
e["Solver"]["Termination Criteria"]["Max Generations"] = 50
e["Variables"][0]["Name"] = "X"
e["Variables"][0]["Lower Bound"] = -10.0
e["Variables"][0]["Upper Bound"] = +10.0
e["Console Output"]["Frequency"] = 5
e["File Output"]["Frequency"] = 5
print('------------------------------------------------------')
print('Now running first 50 generations...')
print('------------------------------------------------------')
k.run(e)
print('------------------------------------------------------')
print('Now running last 50 generations...')
print('------------------------------------------------------')
e["Solver"]["Termination Criteria"]["Max Generations"] = 100
k.run(e)
| StarcoderdataPython |
9723121 | from types import MethodType
def deco_node_beta(self, node=None, level=0, indent=0):
return str(self.level) + node
class Obj:
pass
a = Obj()
a.level = 2
a.deco_node_beta = MethodType(deco_node_beta, a)
print(a.deco_node_beta('beta'))
| StarcoderdataPython |
3334620 | <gh_stars>1-10
# Created by <NAME>.
# GitHub: https://github.com/ikostan
# LinkedIn: https://www.linkedin.com/in/egor-kostan/
def check_for_factor(base, factor):
"""
This function should test if the
factor is a factor of base.
Factors are numbers you can multiply
together to get another number.
Return true if it is a factor or
false if it is not.
:param base:
:param factor:
:return:
"""
return True if base % factor == 0 else False
| StarcoderdataPython |
3300240 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Author: huanglizhuo
# @Date: Sat Nov 18 16:57:23 CST 2017
'''
_____ __ __ ___ _ _ ___ _____
|_ _|\ \ / // __| | || | / \ |_ _|
| | \ \/\/ /| (__ | __ | | - | | |
_|_|_ \_/\_/ \___| |_||_| |_|_| _|_|_
_|"""""|_|"""""|_|"""""|_|"""""|_|"""""|_|"""""|
"`-0-0-'"`-0-0-'"`-0-0-'"`-0-0-'"`-0-0-'"`-0-0-'
WeChat in terminal create by huanglizhuo
'''
from setuptools import setup, find_packages
setup(
name='TWchat',
version='0.0.7.1',
packages=find_packages(),
install_requires=[
'itchat',
'urwid',
'Pillow',
'pypinyin',
],
entry_points={
'console_scripts': [
'twchat= TWchat:start'
],
},
license='MIT',
author='huanglizhuo',
author_email='<EMAIL>',
url='https://github.com/huanglizhuo/TWchat',
description='A Geek style client for WeChat',
keywords=['wechat', 'Geek', 'cli', 'terminal'],
)
| StarcoderdataPython |
8098876 | import os
# use if needed to pass args to external modules
import sys
# used for directory handling
import glob
import time
import threading
from helpers.parameters import (
parse_args, load_config
)
# Load creds modules
from helpers.handle_creds import (
load_correct_creds, test_api_key,
load_telegram_creds
)
from bot.settings import *
def dynamic_settings(type: str, TIME_DIFFERENCE: float, RECHECK_INTERVAL: float) -> None:
global session_struct, settings_struct, trading_struct
DYNAMIC_STOP_LOSS = settings_struct['STOP_LOSS']
STOP_LOSS = parsed_config['trading_options']['STOP_LOSS']
TRAILING_STOP_LOSS = parsed_config['trading_options']['TRAILING_STOP_LOSS']
TIME_DIFFERENCE = parsed_config['trading_options']['TIME_DIFFERENCE']
DYNAMIC_MIN_MAX = parsed_config['trading_options']['DYNAMIC_MIN_MAX']
HOLDING_PRICE_THRESHOLD = parsed_config['trading_options']['HOLDING_PRICE_THRESHOLD']
TRADE_SLOTS = parsed_config['trading_options']['TRADE_SLOTS']
# calculate and define win/loss percent for STOPLOSS calculations
if (session_struct['win_trade_count'] > 0) and (session_struct['loss_trade_count'] > 0):
WIN_LOSS_PERCENT = round((session_struct['win_trade_count'] / (session_struct['win_trade_count'] + session_struct['loss_trade_count'])) * 100, 2)
else:
WIN_LOSS_PERCENT = 100
if DYNAMIC_SETTINGS:
#limiting STOP_LOSS TIME_DIFFERENCE and TRAILING_STOP_LOSS to dynamic min and max values
if settings_struct['STOP_LOSS'] < STOP_LOSS / DYNAMIC_MIN_MAX:
settings_struct['STOP_LOSS'] = STOP_LOSS / DYNAMIC_MIN_MAX
if settings_struct['TRAILING_STOP_LOSS'] < TRAILING_STOP_LOSS / DYNAMIC_MIN_MAX:
settings_struct['TRAILING_STOP_LOSS'] = TRAILING_STOP_LOSS /DYNAMIC_MIN_MAX
# modifying of STOPLOSS based on closedtrades/tradeslots * win/loss percent and trailing stoploss based on profit to trade ratio
# so we can not loose more than we can afford to
if session_struct['closed_trades_percent'] > 0 and WIN_LOSS_PERCENT > 0 and session_struct['trade_slots'] > 0 and trading_struct['stop_loss_adjust'] == True:
DYNAMIC_STOP_LOSS = session_struct['closed_trades_percent'] / TRADE_SLOTS * WIN_LOSS_PERCENT / 100
settings_struct['STOP_LOSS'] = (settings_struct['STOP_LOSS'] + DYNAMIC_STOP_LOSS) / 2
settings_struct['TRAILING_STOP_LOSS'] = settings_struct['TRAILING_STOP_LOSS'] + session_struct['profit_to_trade_ratio'] / 2
trading_struct['stop_loss_adjust'] = False
if settings_struct['TIME_DIFFERENCE'] < TIME_DIFFERENCE / DYNAMIC_MIN_MAX:
settings_struct['TIME_DIFFERENCE'] = TIME_DIFFERENCE / DYNAMIC_MIN_MAX
#if settings_struct['STOP_LOSS'] > STOP_LOSS * DYNAMIC_MIN_MAX:
#settings_struct['STOP_LOSS'] = STOP_LOSS * DYNAMIC_MIN_MAX
if settings_struct['TIME_DIFFERENCE'] > TIME_DIFFERENCE * DYNAMIC_MIN_MAX:
settings_struct['TIME_DIFFERENCE'] = TIME_DIFFERENCE * DYNAMIC_MIN_MAX
if settings_struct['TRAILING_STOP_LOSS'] > STOP_LOSS * DYNAMIC_MIN_MAX:
settings_struct['TRAILING_STOP_LOSS'] = TRAILING_STOP_LOSS * DYNAMIC_MIN_MAX
if settings_struct['HOLDING_PRICE_THRESHOLD'] < HOLDING_PRICE_THRESHOLD:
settings_struct['HOLDING_PRICE_THRESHOLD'] = HOLDING_PRICE_THRESHOLD
# this part checks to see if last trade was a win if it was it checks to see what was previous dynamics state and if it was up
# it will go up with TIMEDIFFERENCE by % percent and if it was down it will go down with it, also it will TRIGGER
# all other settings adding % on every win
if session_struct['last_trade_won'] == True:
if session_struct['dynamics_state'] == 'up':
settings_struct['TIME_DIFFERENCE'] = settings_struct['TIME_DIFFERENCE'] + (settings_struct['TIME_DIFFERENCE'] * DYNAMIC_WIN_LOSS_UP) /100
session_struct['dynamics_state'] = 'up'
if session_struct['dynamics_state'] == 'down':
settings_struct['TIME_DIFFERENCE'] = settings_struct['TIME_DIFFERENCE'] - (settings_struct['TIME_DIFFERENCE'] * DYNAMIC_WIN_LOSS_UP) /100
session_struct['dynamics_state'] = 'down'
session_struct['last_trade_won'] = 'none'
type = 'performance_adjust_up'
# this code will change "direction" for timedifference change aka if it was up it will go down and vice versa on next win
# to prevent accumulating losses on same timedifference and to sync with market better, also it will subtract all other
# dynamic settings by corresponding numberes to protect from consecutive losses
if session_struct['last_trade_won'] == False:
if session_struct['dynamics_state'] == 'up':
session_struct['dynamics_state'] = 'down'
if session_struct['dynamics_state'] == 'down':
session_struct['dynamics_state'] = 'up'
session_struct['last_trade_won'] = 'none'
type = 'performance_adjust_down'
# this part of code jumps to different part of timedifference scale this is to protect from consecutive losses
# and to change context so bot goes from 5 minute range to 50 minute range for example if those were corresponding
# scale values, it jumps on 2 consecutive losses
if trading_struct['consecutive_loss'] > 1:
if settings_struct['TIME_DIFFERENCE'] > TIME_DIFFERENCE:
settings_struct['TIME_DIFFERENCE'] = TIME_DIFFERENCE - (settings_struct['TIME_DIFFERENCE'] / TIME_DIFFERENCE * TIME_DIFFERENCE/DYNAMIC_MIN_MAX)
print(f"TIMEFRAME JUMP TRIGGERED! TIME_DIFFERENCE: {settings_struct['TIME_DIFFERENCE']}")
if settings_struct['TIME_DIFFERENCE'] < TIME_DIFFERENCE:
settings_struct['TIME_DIFFERENCE'] = (TIME_DIFFERENCE * DYNAMIC_MIN_MAX) - (settings_struct['TIME_DIFFERENCE']/TIME_DIFFERENCE * TIME_DIFFERENCE * DYNAMIC_MIN_MAX)
print(f"TIMEFRAME JUMP TRIGGERED! TIME_DIFFERENCE: {settings_struct['TIME_DIFFERENCE']}")
trading_struct['consecutive_loss'] = 0
#print(f'{txcolors.NOTICE}>> TRADE_WON: {session_struct['last_trade_won']} and DYNAMICS_STATE: {session_struct['dynamics_state']} <<<{txcolors.DEFAULT}')
# this part of code alteres trading settings for next trade based on win/loss so if we win all our settings get more
# if we loose they get less so we protect from consecutive losses and we are more "brave" on consecutive wins
if type == 'performance_adjust_up':
settings_struct['STOP_LOSS'] = settings_struct['STOP_LOSS'] + (settings_struct['STOP_LOSS'] * DYNAMIC_WIN_LOSS_UP) / 100
settings_struct['TAKE_PROFIT'] = settings_struct['TAKE_PROFIT'] + (settings_struct['TAKE_PROFIT'] * DYNAMIC_WIN_LOSS_UP) / 100
settings_struct['TRAILING_STOP_LOSS'] = settings_struct['TRAILING_STOP_LOSS'] + (settings_struct['TRAILING_STOP_LOSS'] * DYNAMIC_WIN_LOSS_UP) / 100
settings_struct['CHANGE_IN_PRICE_MAX'] = settings_struct['CHANGE_IN_PRICE_MAX'] - (settings_struct['CHANGE_IN_PRICE_MAX'] * DYNAMIC_WIN_LOSS_UP) /100
settings_struct['CHANGE_IN_PRICE_MIN'] = settings_struct['CHANGE_IN_PRICE_MIN'] + (settings_struct['CHANGE_IN_PRICE_MIN'] * DYNAMIC_WIN_LOSS_UP) /100
settings_struct['DYNAMIC_CHANGE_IN_PRICE'] = settings_struct['DYNAMIC_CHANGE_IN_PRICE'] - (settings_struct['DYNAMIC_CHANGE_IN_PRICE'] * DYNAMIC_WIN_LOSS_UP) / 100 \
- (settings_struct['DYNAMIC_CHANGE_IN_PRICE'] * settings_struct['TIME_DIFFERENCE']) / 100
settings_struct['HOLDING_PRICE_THRESHOLD'] = settings_struct['HOLDING_PRICE_THRESHOLD'] + (settings_struct['HOLDING_PRICE_THRESHOLD'] * DYNAMIC_WIN_LOSS_UP) / 100
session_struct['dynamic'] = 'none'
print(f"{txcolors.NOTICE}>> DYNAMICS_UP Changing STOP_LOSS: {settings_struct['STOP_LOSS']:.2f}/{DYNAMIC_WIN_LOSS_UP:.2f} - TAKE_PROFIT: {settings_struct['TAKE_PROFIT']:.2f}/{DYNAMIC_WIN_LOSS_UP:.2f} - TRAILING_STOP_LOSS: {settings_struct['TRAILING_STOP_LOSS']:.2f}/{DYNAMIC_WIN_LOSS_UP:.2f} CIP:{settings_struct['CHANGE_IN_PRICE_MIN']:.4f}/{settings_struct['CHANGE_IN_PRICE_MAX']:.4f}/{DYNAMIC_WIN_LOSS_UP:.2f} HTL: {settings_struct['HOLDING_TIME_LIMIT']:.2f} TD: {settings_struct['TIME_DIFFERENCE']} RI: {settings_struct['RECHECK_INTERVAL']} <<{txcolors.DEFAULT}")
if type == 'performance_adjust_down':
settings_struct['STOP_LOSS'] = settings_struct['STOP_LOSS'] - (settings_struct['STOP_LOSS'] * DYNAMIC_WIN_LOSS_DOWN) / 100
settings_struct['TAKE_PROFIT'] = settings_struct['TAKE_PROFIT'] - (settings_struct['TAKE_PROFIT'] * DYNAMIC_WIN_LOSS_DOWN) / 100
settings_struct['TRAILING_STOP_LOSS'] = settings_struct['TRAILING_STOP_LOSS'] - (settings_struct['TRAILING_STOP_LOSS'] * DYNAMIC_WIN_LOSS_DOWN) / 100
settings_struct['CHANGE_IN_PRICE_MAX'] = settings_struct['CHANGE_IN_PRICE_MAX'] + (settings_struct['CHANGE_IN_PRICE_MAX'] * DYNAMIC_WIN_LOSS_DOWN) /100
settings_struct['CHANGE_IN_PRICE_MIN'] = settings_struct['CHANGE_IN_PRICE_MIN'] - (settings_struct['CHANGE_IN_PRICE_MIN'] * DYNAMIC_WIN_LOSS_DOWN) /100
settings_struct['DYNAMIC_CHANGE_IN_PRICE'] = settings_struct['DYNAMIC_CHANGE_IN_PRICE'] + (settings_struct['DYNAMIC_CHANGE_IN_PRICE'] * DYNAMIC_WIN_LOSS_DOWN) / 100 \
+ (settings_struct['DYNAMIC_CHANGE_IN_PRICE'] * settings_struct['TIME_DIFFERENCE']) / 100
settings_struct['HOLDING_PRICE_THRESHOLD'] = settings_struct['HOLDING_PRICE_THRESHOLD'] - (settings_struct['HOLDING_PRICE_THRESHOLD'] * DYNAMIC_WIN_LOSS_DOWN) / 100
session_struct['dynamic'] = 'none'
print(f"{txcolors.NOTICE}>> DYNAMICS_DOWN Changing STOP_LOSS: {settings_struct['STOP_LOSS']:.2f}/{DYNAMIC_WIN_LOSS_DOWN:.2f} - TAKE_PROFIT: {settings_struct['TAKE_PROFIT']:.2f}/{DYNAMIC_WIN_LOSS_DOWN:.2f} - TRAILING_STOP_LOSS: {settings_struct['TRAILING_STOP_LOSS']:.2f}/{DYNAMIC_WIN_LOSS_DOWN:.2f} CIP:{settings_struct['CHANGE_IN_PRICE_MIN']:.4f}/{settings_struct['CHANGE_IN_PRICE_MAX']:.4f}/{DYNAMIC_WIN_LOSS_DOWN:.2f} HTL: {settings_struct['HOLDING_TIME_LIMIT']:.2f} TD: {settings_struct['TIME_DIFFERENCE']} RI: {settings_struct['RECHECK_INTERVAL']} <<{txcolors.DEFAULT}")
# this code makes our market resistance and support levels triggres for buys and also applyies our dynamics based on wins/losses
if type == 'mrs_settings':
if session_struct['prices_grabbed'] == True:
settings_struct['CHANGE_IN_PRICE_MIN'] = session_struct['market_support'] + (session_struct['market_support'] * settings_struct['DYNAMIC_CHANGE_IN_PRICE']) / 100
settings_struct['CHANGE_IN_PRICE_MAX'] = session_struct['market_support'] - (session_struct['market_support'] * settings_struct['DYNAMIC_CHANGE_IN_PRICE']) / 100
settings_struct['TAKE_PROFIT'] = session_struct['market_resistance'] + (session_struct['market_resistance'] * settings_struct['DYNAMIC_CHANGE_IN_PRICE']) / 100
if session_struct['loss_trade_count'] > 1:
trading_struct['trade_support'] = trading_struct['sum_lost_trades'] / session_struct['loss_trade_count']
if session_struct['win_trade_count'] > 1:
trading_struct['trade_resistance'] = trading_struct['sum_won_trades'] / session_struct['win_trade_count']
settings_struct['TRAILING_STOP_LOSS'] = trading_struct['trade_resistance']
# this part of code changes time if we use TEST or REAL mode based on timing in each, aka realmode uses miliseconds so we
# multiply for HOLDING TIME LIMIT
settings_struct['HOLDING_TIME_LIMIT'] = (settings_struct['TIME_DIFFERENCE'] * 60 * 1000) * HOLDING_INTERVAL_LIMIT
| StarcoderdataPython |
191040 | ### game.py is sort of the controller of the IronPython SGF Editor. The main
### class is Game, which provides calls for GUI event handling and makes calls
### to update the board and moves model.
import wpf
### Don't need this now due to new wpf module, but left as documentation of usage.
###
### Needed for System.Windows.Media ...
#clr.AddReference("PresentationCore")
## Needed for System.Windows
#clr.AddReference('PresentationFramework')
from System.Windows.Media import Colors
from System.Windows import MessageBox
import goboard
import sgfparser
__all__ = ["Game", "create_default_game", "MAX_BOARD_SIZE", "opposite_move_color",
"create_parsed_game"]
MAX_BOARD_SIZE = 19
MIN_BOARD_SIZE = 9
DEFAULT_KOMI = "6.5"
### The Game class is a controller of sorts for the app. It provides helper functions
### for the UI that event handlers can call. These functions call on the model,
### GoBoard, and they call on the UI APIs to update, such as enabling/disabling buttons.
###
class Game (object):
def __init__ (self, main_win, size, handicap, komi, handicap_stones = None):
## _main_win is the WPF application object.
self._main_win = main_win
## board holds the GoBoard model.
self.board = goboard.GoBoard(size)
self._init_handicap_next_color(handicap, handicap_stones)
## komi is either 0.5, 6.5, or <int>.5
self.komi = komi
## _state helps with enabling and disabling buttons.
self._state = GameState.NOT_STARTED
## first_move holds the first move which links to subsequent moves.
## when displaying the intial board state of a started game, this is
## the current move.
self.first_move = None
## Branches holds all the first moves, while first_move points to one
## of these. This is None until there's more than one first move.
self.branches = None
## The following hold any markup for the initial board state
self.setup_adornments = []
## current_move keeps point to the move in the tree we're focused on It
## is None when the board is in the initial state, and first_move then
## points to the current move.
self.current_move = None
## Comments holds any initial board state comments for the game. This
## is guaranteed to be set when opening a file, which write_game
## depends on.
self.comments = ""
## Move count is just a counter in case we add a feature to number
## moves or show the move # in the title bar or something.
self.move_count = 0
## parsed_game is not None when we opened a file to edit.
self.parsed_game = None
## filename holds the full pathname if we read from a file or ever
## saved this game. Filebase is just <name>.<ext>.
self.filename = None
self.filebase = None
## dirty means we've modified this game in some way
self.dirty = False
## player members hold strings if there are player names in record.
self.player_black = None
self.player_white = None
## _cut_move holds the head of a sub tree that was last cut.
## Note, the public cut_move is a method.
self._cut_move = None
main_win.setup_board_display(self)
### _init_handicap_next_color sets the next color to play and sets up any
### handicap state. If there is a handicap,the moves may be specified in a
### parsed game; otherwise, this fills in traditional locations. If there
### is a handicap and stones are supplied, then their number must agree.
###
def _init_handicap_next_color (self, handicap, handicap_stones):
self.handicap = handicap
if handicap == 0 or handicap == "0":
self.handicap_moves = None
self.next_color = Colors.Black
else:
self.next_color = Colors.White
self.handicap_moves = handicap_stones
if handicap_stones is None:
self.handicap_moves = []
def make_move (row, col):
m = goboard.Move(row, col, Colors.Black)
self.handicap_moves.append(m)
self.board.add_stone(m)
if handicap >= 2:
make_move(4, 16)
make_move(16, 4)
if handicap >= 3:
make_move(16, 16)
if handicap >= 4:
make_move(4, 4)
if handicap == 5:
make_move(10, 10)
if handicap >= 6:
make_move(10, 4)
make_move(10, 16)
if handicap == 7:
make_move(10, 10)
if handicap >= 8:
make_move(4, 10)
make_move(16, 10)
if handicap == 9:
make_move(10, 10)
elif len(handicap_stones) != handicap:
raise Exception("Handicap number is not equal to all " +
"black stones in parsed root node.")
else:
for m in handicap_stones:
self.board.add_stone(m)
#def _message_board (self):
# res = ""
# for row in self.board.moves:
# col_str = ""
# for col in row:
# if col is None:
# col_str = col_str + ". "
# elif col.color == Colors.Black:
# col_str = col_str + "X "
# elif col.color == Colors.White:
# col_str = col_str + "O "
# else:
# col_str = col_str + "?!"
# res = res + col_str + "\n"
# MessageBox.Show(res)
###
### Making Moves while Playing Game
###
### make_move adds a move in sequence to the game and board at row, col.
### Row, col index from the top left corner. Other than marking the
### current move with a UI adornments, this handles clicking and adding
### moves to a game. It handles branching if the current move already has
### next moves and displays message if the row, col already has a move at
### that location. If this is the first move, this function sets the game
### state to started. It sets next move color and so on. This returns the
### new move (or an existing move if the user clicked on a location where
### there is a move on another branch following the current move).
###
def make_move (self, row, col):
cur_move = self.current_move
maybe_branching = ((cur_move is not None and cur_move.next is not None) or
(cur_move is None and self.first_move is not None))
if self.board.has_stone(row, col):
MessageBox.Show("Can't play where there already is a stone.")
return None
## move may be set below to pre-existing move, tossing this new object.
move = goboard.Move(row, col, self.next_color)
if self._check_self_capture_no_kill(move):
MessageBox.Show("You cannot make a move that removes a group's last liberty")
return None
if maybe_branching:
tmp = self._make_branching_move(cur_move, move)
if tmp is move:
## Just because we're branching, doesn't mean the game is dirty.
## If added new move, mark dirty since user could have saved game.
self.dirty = True
else:
## Found existing move at location in branches, just reply it for
## capture effects, etc. Don't need to check ReplayMove for conflicting
## board move since user clicked and space is empty.
return self.replay_move()
else:
if self._state is GameState.NOT_STARTED:
self.first_move = move
self._state = GameState.STARTED
else:
cur_move.next = move
move.previous = cur_move
self.dirty = True
self._save_and_update_comments(cur_move, move)
self.board.add_stone(move)
self.current_move = move
move.number = self.move_count + 1
self.move_count += 1
self.next_color = opposite_move_color(self.next_color)
self._main_win.prevButton.IsEnabled = True
self._main_win.homeButton.IsEnabled = True
if move.next is None:
## Made a move or branch that is at end of line of play.
self._main_win.nextButton.IsEnabled = False
self._main_win.endButton.IsEnabled = False
else:
## Made a move that is already the next move in some branch,
## and it has a next move.
self._main_win.nextButton.IsEnabled = True
self._main_win.endButton.IsEnabled = True
if len(move.dead_stones) != 0:
self.remove_stones(move.dead_stones)
return move
### CheckSelfCaptureNoKill returns true if move removes the last liberty of
### its group without killing an opponent group. It needs to temporarily add
### the move to the board, then remove it. Try catch may be over kill here, but ....
###
def _check_self_capture_no_kill (self, move):
try:
self.board.add_stone(move)
noKill = len(self.check_for_kill(move)) == 0
return (not self.find_liberty(move.row, move.column, move.color)) and noKill
finally:
self.board.remove_stone(move)
### _make_branching_move sets up cur_move to have more than one next move,
### that is, branches. If the new move, move, is at the same location as
### a next move of cur_move, then this function loses move in lieu of the
### existing next move. This also sets up any next and prev pointers as
### appropriate and updates the branches combo.
###
def _make_branching_move (self, cur_move, move):
if cur_move is None:
move = self._make_branching_move_branches(self, self.first_move, move)
self.first_move = move
else:
move = self._make_branching_move_branches(cur_move, cur_move.next, move)
cur_move.next = move
move.previous = cur_move
## move may be pre-existing move with branches, or may need to clear combo ...
self._main_win.update_branch_combo(move.branches, move.next)
return move
### _make_branching_move_branches takes a game or move object (the current
### move), the current next move, and a move representing where the user
### clicked. If there are no branches yet, then see if new_move is at the
### same location as next and toss new_move in this case, which also means
### there are still no branches yet.
###
def _make_branching_move_branches (self, game_or_move, next, new_move):
if game_or_move.branches is None:
game_or_move.branches = [next] # Must pass non-None branches.
move = self._maybe_update_branches(game_or_move, new_move)
if move is next:
## new_move and next are same, keep next and clean up branches.
game_or_move.branches = None
return next
## Keep branches and return move, which may be new or pre-existing.
return move
else:
return self._maybe_update_branches(game_or_move, new_move)
### _maybe_update_branches takes a game or move object (has a .branches member)
### and a next move. Branches must not be None. It returns a pre-existing
### move if the second argument represents a move at a location for which there
### already is a move; otherwise, it returns the second argument as a new next
### move. If this is a new next move, we add it to .branches.
###
def _maybe_update_branches (self, game_or_move, move):
already_move = _list_find(move, game_or_move.branches,
lambda x,y: x.row == y.row and x.column == y.column)
if already_move != -1:
m = game_or_move.branches[already_move]
if not m.rendered:
self._ready_for_rendering(m)
return m
else:
game_or_move.branches.append(move)
return move
### check_for_kill determines if move kills any stones on the board and
### returns a list of move objects that were killed after storing them in
### the Move object. We use find_liberty and collect_stones rather than
### try to build list as we go to simplify code. Worse case we recurse all
### the stones twice, but it doesn't impact observed performance.
###
### We do not need to clear visited between each outer 'if' check. If we
### start descending on a different stone in a different outer 'if' and
### encounter a stone, S, that we've visited before, then searching that
### stone previously must have returned false. That is, searching all the
### stones connected to S found no liberties before, and the current outer
### 'if' must be searching the same group. The new stone is going to
### return false again, so no reason to clear visited.
###
def check_for_kill (self, move):
row = move.row
col = move.column
## Consider later if this is too much consing per move.
visited = [[False for j in xrange(self.board.size)] for i in xrange(self.board.size)]
opp_color = opposite_move_color(move.color)
dead_stones = []
if (self.board.has_stone_color_left(row, col, opp_color) and
not self.find_liberty(row, col - 1, opp_color, visited)):
self.collect_stones(row, col - 1, opp_color, dead_stones)
if (self.board.has_stone_color_up(row, col, opp_color) and
not self.find_liberty(row - 1, col, opp_color, visited)):
self.collect_stones(row - 1, col, opp_color, dead_stones)
if (self.board.has_stone_color_right(row, col, opp_color) and
not self.find_liberty(row, col + 1, opp_color, visited)):
self.collect_stones(row, col + 1, opp_color, dead_stones)
if (self.board.has_stone_color_down(row, col, opp_color) and
not self.find_liberty(row + 1, col, opp_color, visited)):
self.collect_stones(row + 1, col, opp_color, dead_stones)
move.dead_stones = dead_stones
return dead_stones
### find_Liberty starts at row, col traversing all stones with the supplied
### color to see if any stone has a liberty. It returns true if it finds a
### liberty. If we've already been here, then its search is still pending
### (and other stones it connects with should be searched). See comment
### for check_for_kill. Visited can be null if you just want to check if a
### single stone/group has any liberties, say, to see if a move was a self capture.
###
def find_liberty (self, row, col, color, visited = None):
if visited is None:
## Consider later if this is too much consing per move.
## We cons this for self kill check, cons another for CheckForKill of opponent stones.
visited = [[None for i in xrange(self.board.size)]
for j in xrange(self.board.size)]
if visited[row - 1][col - 1]:
return False
## Check for immediate liberty (breadth first).
if col != 1 and not self.board.has_stone_left(row, col):
return True
if row != 1 and not self.board.has_stone_up(row, col):
return True
if col != self.board.size and not self.board.has_stone_right(row, col):
return True
if row != self.board.size and not self.board.has_stone_down(row, col):
return True
## No immediate liberties, so keep looking ...
visited[row - 1][col - 1] = True
if (self.board.has_stone_color_left(row, col, color) and
self.find_liberty(row, col - 1, color, visited)):
return True;
if (self.board.has_stone_color_up(row, col, color) and
self.find_liberty(row - 1, col, color, visited)):
return True;
if (self.board.has_stone_color_right(row, col, color) and
self.find_liberty(row, col + 1, color, visited)):
return True;
if (self.board.has_stone_color_down(row, col, color) and
self.find_liberty(row + 1, col, color, visited)):
return True;
## No liberties ...
return False;
### CollectStones gathers all the stones at row, col of color color, adding them
### to the list dead_stones. This does not update the board model by removing
### the stones. CheckForKill uses this to collect stones, ReadyForRendering calls
### CheckForKill to prepare moves for rendering, but it shouldn't remove stones
### from the board.
###
def collect_stones (self, row, col, color, dead_stones, visited = None):
if visited is None:
## Consider later if this is too much consing per move.
## We cons this for self kill check, cons another for CheckForKill of opponent stones.
visited = [[None for i in xrange(self.board.size)]
for j in xrange(self.board.size)]
dead_stones.append(self.board.move_at(row, col))
visited[row - 1][col - 1] = True
##self.board.remove_stone_at(row, col)
if self.board.has_stone_color_left(row, col, color) and not visited[row - 1][col - 2]:
self.collect_stones(row, col - 1, color, dead_stones, visited)
if self.board.has_stone_color_up(row, col, color) and not visited[row - 2][col - 1]:
self.collect_stones(row - 1, col, color, dead_stones, visited)
if self.board.has_stone_color_right(row, col, color) and not visited[row - 1][col]:
self.collect_stones(row, col + 1, color, dead_stones, visited)
if self.board.has_stone_color_down(row, col, color) and not visited[row][col - 1]:
self.collect_stones(row + 1, col, color, dead_stones, visited)
###
### Unwinding Moves and Goign to Start
###
### unwind_move removes the last move made (see make_move). Other than
### marking the previous move as the current move with a UI adornments,
### this handles rewinding game moves. If the game has not started, or
### there's no current move, this signals an error. This returns the move
### that was current before rewinding.
###
def unwind_move (self):
if self._state is GameState.NOT_STARTED:
raise Exception("Previous button should be disabled if game not started.")
current = self.current_move
if current is None:
raise Exception("Previous button should be disabled if no current move.")
if not current.is_pass:
self.board.remove_stone(current)
self.add_stones(current.dead_stones)
self.next_color = current.color
self.move_count -= 1
previous = current.previous
self._save_and_update_comments(current, previous)
if previous is None:
self._main_win.prevButton.IsEnabled = False
self._main_win.homeButton.IsEnabled = False
# self._main_ui.MainWindow.FindName("prevButton").IsEnabled = False
# self._main_ui.MainWindow.FindName("homeButton").IsEnabled = False
#self._main_ui.MainWindow.FindName("nextButton").IsEnabled = True
#self._main_ui.MainWindow.FindName("endButton").IsEnabled = True
self._main_win.nextButton.IsEnabled = True
self._main_win.endButton.IsEnabled = True
if previous is None:
self._main_win.update_branch_combo(self.branches, current)
else:
self._main_win.update_branch_combo(previous.branches, current)
self.current_move = previous
return current
def can_unwind_move (self):
return (not self._state is GameState.NOT_STARTED and
self.current_move is not None)
def add_stones (self, stones):
self._main_win.add_stones(stones)
for m in stones:
self.board.add_stone(m)
### goto_start resets the model to the initial board state before any moves
### have been played, and then resets the UI. This assumes the game has
### started.
###
def goto_start (self):
if self._state is GameState.NOT_STARTED:
raise Exception("Home button should be disabled if game not started.")
current = self.current_move
if current is None:
raise Exception("Home button should be disabled if no current move.")
self._save_and_update_comments(current, None)
self.board.goto_start()
self._main_win.reset_to_start(current)
## Updating self.current_move, so after here, lexical 'current' is different
self.next_color = (self.handicap_moves is None and Colors.Black) or Colors.White
self.current_move = None
self.move_count = 0
self._main_win.update_branch_combo(self.branches, self.first_move)
self._main_win.prevButton.IsEnabled = False
self._main_win.homeButton.IsEnabled = False
self._main_win.nextButton.IsEnabled = True
self._main_win.endButton.IsEnabled = True
###
### Replaying Moves and Goign to End
###
### replay_move add the next that follows the current move. move made (see
### make_move). Other than marking the next move as the current move with
### a UI adornments, this handles replaying game moves. The next move is
### always move.next which points to the selected branch if there is more
### than one next move. If the game hasn't started, or there's no next
### move, this signals an error. This returns the move that was current
### before rewinding.
###
def replay_move (self):
if self._state is GameState.NOT_STARTED:
raise Exception("Next button should be disabled if game not started.")
## advance self.current_move to the next move.
fixup_move = self.current_move
if self.current_move is None:
self.current_move = self.first_move
elif self.current_move.next is None:
raise Exception("Next button should be disabled if no next move.")
else:
self.current_move = self.current_move.next
if self._replay_move_update_model(self.current_move) is None:
self.current_move = fixup_move
return None
self._save_and_update_comments(self.current_move.previous, self.current_move)
if self.current_move.next is None:
self._main_win.nextButton.IsEnabled = False
self._main_win.endButton.IsEnabled = False
# self._main_ui.MainWindow.FindName("nextButton").IsEnabled = False
# self._main_ui.MainWindow.FindName("endButton").IsEnabled = False
#self._main_ui.MainWindow.FindName("prevButton").IsEnabled = True
self._main_win.prevButton.IsEnabled = True
self._main_win.homeButton.IsEnabled = True
self._main_win.update_branch_combo(self.current_move.branches, self.current_move.next)
self._main_win.commentBox.Text = self.current_move.comments
return self.current_move
def can_replay_move (self):
return (self._state is GameState.STARTED and
(self.current_move is None or self.current_move.next is not None))
### goto_last_move handles jumping to the end of the game record following
### all the currently selected branches. This handles all game/board model
### and UI updates, including current move adornments. If the game hasn't
### started, this throws an error.
###
def goto_last_move (self):
if self._state is GameState.NOT_STARTED:
raise Exception("End button should be disabled if game not started.")
current = self.current_move
save_orig_current = current
## Setup for loop ...
if current is None:
current = self.first_move
if self._replay_move_update_model(current) is None:
## No partial actions/state to cleanup or revert
return
self._main_win.add_next_stone_no_current(current)
next = current.next
else:
next = current.next
## Walk to last move
while next is not None:
if self._replay_move_update_model(next) is None:
MessageBox.Show("Next move conincides with a move on the board. " +
"You are replying moves from a pasted branch that's inconsistent.")
break
self._main_win.add_next_stone_no_current(next)
current = next
next = current.next
## Update last move UI
self._save_and_update_comments(save_orig_current, current)
self._main_win.add_current_adornments(current)
self.current_move = current
self.move_count = current.number
self.next_color = opposite_move_color(current.color)
self._main_win.prevButton.IsEnabled = True
self._main_win.homeButton.IsEnabled = True
self._main_win.nextButton.IsEnabled = next is not None
self._main_win.endButton.IsEnabled = next is not None
## There can't be any branches, but this ensures UI is cleared.
if next is not None:
self._main_win.update_branch_combo(current.branches, next)
else:
self._main_win.update_branch_combo(None, None)
### _replay_move_update_model updates the board model, next move color,
### etc., when replaying a move in the game record. This also handles
### rendering a move that has only been read from a file and never
### displayed in the UI. Rendering here just means its state will be as if
### it had been rendedered before. We must setup branches to Move objects,
### and make sure the next Move object is created and marked unrendered so
### that code elsewhere that checks move.next will know there's a next
### move.
###
def _replay_move_update_model (self, move):
if not move.is_pass:
## Check if board has stone already since might be replaying branch
## that was pasted into tree (and moves could conflict).
if not self.board.has_stone(move.row, move.column):
self.board.add_stone(move)
else:
return None;
self.next_color = opposite_move_color(move.color)
if not move.rendered:
## Move points to a ParsedNode and has never been displayed.
self._ready_for_rendering(move)
self.move_count += 1
self.remove_stones(move.dead_stones)
return move
def remove_stones (self, stones):
self._main_win.remove_stones(stones)
for m in stones:
self.board.remove_stone(m)
### _ready_for_rendering puts move in a state as if it had been displayed
### on the screen before. Moves from parsed nodes need to be created when
### their previous move is actually displayed on the board so that there is
### a next Move object in the game three for consistency with the rest of
### model. However, until the moves are actually ready to be displayed
### they do not have captured lists hanging off them, their next branches
### and moves set up, etc. This function makes the moves completely ready
### for display.
###
def _ready_for_rendering (self, move):
if not move.is_pass:
self.check_for_kill(move)
pn = move.parsed_node
mnext = None
if pn.branches is not None:
moves = []
for n in pn.branches:
m = _parsed_node_to_move(n)
m.number = self.move_count + 2
m.previous = move
moves.append(m)
move.branches = moves
mnext = moves[0]
elif pn.next is not None:
mnext = _parsed_node_to_move(pn.next)
mnext.number = self.move_count + 2
mnext.previous = move
move.next = mnext
self._replay_unrendered_adornments(move)
move.rendered = True
return move
### _replay_unrendered_adornments is just a helper for
### _replay_move_update_model. This does not need to check add_adornment
### for a None result since we're trusting the file was written correctly,
### or it doesn't matter if there are dup'ed letters.
###
def _replay_unrendered_adornments (self, move):
props = move.parsed_node.properties
if "TR" in props:
coords = [goboard.parsed_to_model_coordinates(x) for x in props["TR"]]
adorns = [self.add_adornment(move, x[0], x[1], goboard.Adornments.triangle)
for x in coords]
for x in adorns:
self._main_win.add_unrendered_adornment(x)
if "SQ" in props:
coords = [goboard.parsed_to_model_coordinates(x) for x in props["SQ"]]
adorns = [self.add_adornment(move, x[0], x[1], goboard.Adornments.square)
for x in coords]
for x in adorns:
self._main_win.add_unrendered_adornment(x)
if "LB" in props:
coords = [goboard.parsed_label_model_coordinates(x) for x in props["LB"]]
adorns = [self.add_adornment(move, x[0], x[1], goboard.Adornments.letter, x[2])
for x in coords]
for x in adorns:
self._main_win.add_unrendered_adornment(x)
### _save_and_update_comments ensures the model captures any comment
### changes for the origin and displays dest's comments. Dest may be a new
### move, and its empty string comment clears the textbox. Dest may also
### be the previous move of origin if we're unwinding a move right now.
### Dest and origin may not be contiguous when jumping to the end or start
### of the game. If either origin or dest is None, then it represents the
### intial board state. If the captured comment has changed, mark game as
### dirty.
###
def _save_and_update_comments (self, origin, dest):
self.save_comment(origin)
if dest is not None:
self._main_win.commentBox.Text = dest.comments
else:
self._main_win.commentBox.Text = self.comments
### save_current_comment makes sure the current comment is persisted from the UI to
### the model. This is used from the UI, such as when saving a file.
###
def save_current_comment (self):
self.save_comment(self.current_move)
### save_comment takes a move to update with the current comment from the UI.
### If move is null, the comment belongs to the game start or empty board.
###
def save_comment (self, move):
cur_comment = self._main_win.commentBox.Text
if move is not None:
if move.comments != cur_comment:
move.comments = cur_comment
self.dirty = True
else:
if self.comments != cur_comment:
self.comments = cur_comment
self.dirty = True
###
### Cutting and Pasting Sub Trees
###
### cut_move must be invoked on a current move. It leaves the game state
### with the previous move or initial board as the current state, and it
### updates UI.
###
def cut_move (self):
cut_move = self.current_move
if cut_move is None:
raise Exception("Must cut current move, so cannot be initial board state.")
## unwind move with all UI updates and game model updates (and saves comments)
self._main_win.prevButton_left_down(None, None)
prev_move = self.current_move
cut_move.previous = None
if prev_move is None:
## Handle initial board state. Can't use _cut_next_move here due
## to special handling of initial board and self._state.
branches = self.branches
if branches is None:
self.first_move = None
self._state = GameState.NOT_STARTED
else:
cut_index = _list_find(cut_move, branches)
new_branches = branches[:cut_index] + branches[cut_index + 1:]
self.first_move = new_branches[0]
if len(new_branches) == 1:
self.branches = None
else:
self.branches = new_branches
if (self.parsed_game is not None and
self.parsed_game.nodes.next is not None):
## May not be parsed node to cut since the cut move
## could be new (not from parsed file)
self._cut_next_move(self.parsed_game.nodes, self.parsed_game.nodes.next)
else:
## Handle regular move.
self._cut_next_move(prev_move, cut_move)
self._cut_move = cut_move
self.dirty = True
## Update UI now that current move's next/branches have changed.
if prev_move is None:
self._main_win.nextButton.IsEnabled = self.first_move is not None
self._main_win.endButton.IsEnabled = self.first_move is not None
self._main_win.update_branch_combo(self.branches, self.first_move)
else:
self._main_win.nextButton.IsEnabled = prev_move.next is not None
self._main_win.endButton.IsEnabled = prev_move.next is not None
self._main_win.update_branch_combo(prev_move.branches, prev_move.next)
### _cut_next_move takes a Move or ParsedNode that is the previous move of
### the second argument, which is the move being cut. This cleans up next
### pointers and branches list appropriately for the move_or_parsednode.
###
def _cut_next_move (self, move_or_parsednode, cut_move):
branches = move_or_parsednode.branches
if branches is None:
move_or_parsednode.next = None
if (type(move_or_parsednode) is goboard.Move and
move_or_parsednode.parsed_node is not None and
move_or_parsednode.parsed_node.next is not None):
self._cut_next_move(move_or_parsednode.parsed_node,
move_or_parsednode.parsed_node.next)
else:
cut_index = _list_find(cut_move, branches)
new_branches = branches[:cut_index] + branches[cut_index + 1:]
move_or_parsednode.next = new_branches[0]
if len(new_branches) == 1:
move_or_parsednode.branches = None
else:
move_or_parsednode.branches = new_branches
### can_paste returns whether there is a cut sub tree, but it does not
### check whether the cut tree actually can be pasted at the current move.
### It ignores whether the right move color will follow the current move,
### which paste_move allows, but this does not check whether all the moves
### will occupy open board locations, which paste_move requires.
###
def can_paste (self):
return self._cut_move is not None
### paste_move makes self._cut_move be the next move of the current move
### displayed. It does not worry about duplicate next moves; it just
### pastes the sub tree. If there is a next move at the same loc, we do
### not merge the trees matching moves since this would lose node
### information (marked up and comments).
###
def paste_move (self):
if self._cut_move is None:
raise Exception("No cut sub tree to paste.")
if self._cut_move.color != self.next_color:
MessageBox.Show("Cannot paste cut move that is same color as current move.");
return;
cur_move = self.current_move
if cur_move is not None:
_paste_next_move(cur_move, self._cut_move)
else:
if self.first_move is not None:
# branching initial board state
if self.branches is None:
self.branches = [self.first_move, self._cut_move]
else:
self.branches.append(self._cut_move)
self.first_move = self._cut_move
self.first_move.number = 1
if (self.parsed_game is not None and
self._cut_move.parsed_node is not None):
_paste_next_move(self.parsed_game.nodes,
self._cut_move.parsed_node)
else:
if self._state is not GameState.NOT_STARTED:
raise Exception("Internal error: " +
"no first move and game not started?!")
# not branching initial board state
self.first_move = self._cut_move
self.first_move.number = 1
self._state = GameState.STARTED
self._cut_move.previous = cur_move # stores None appropriately when no current
self.dirty = True
_renumber_moves(self._cut_move)
self._cut_move = None
self._main_win.nextButton_left_down(None, None)
###
### Adornments
###
### add_adornment creates the Adornments object in the model and adds it to
### move. If move is None (or game not started), then this affects the
### initial game state. The returns the new adornment. If all the letter
### adornments have been used at this point in the game tree, then this
### adds nothing and returns None.
###
def add_adornment (self, move, row, col, kind, data = None):
def make_adornment (adornments, data):
## Pass in adornments because access is different for initial board
## state vs. a move. Pass in data because Python has broken
## closure semantics or poor lexical model, take your pick :-).
if kind is goboard.Adornments.letter and data is None:
letters = [a for a in adornments if
a.kind is goboard.Adornments.letter]
if len(letters) == 26:
return None, None
for elt in ['A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K',
'L', 'M', 'N', 'O', 'P', 'Q', 'R', 'S', 'T', 'U', 'V',
'W', 'X', 'Y', 'Z']:
if _list_find(elt, letters,
lambda x,y: x == y.cookie.Child.Content) == -1:
data = elt #chr(ord('A') + len(letters))
break
return goboard.Adornments(kind, row, col, None, data), data
if self._state is GameState.NOT_STARTED or move is None:
adornment, data = make_adornment(self.setup_adornments, data)
if adornment is None: return None
self.setup_adornments.append(adornment)
elif move is not None:
adornment, data = make_adornment(move.adornments, data)
if adornment is None: return None
move.add_adornment(adornment)
else:
raise Exception("Should never get here.")
return adornment
### get_adornment return the adornment of kind kind at the row, col
### location if there is one, otherwise it returns None.
###
def get_adornment (self, row, col, kind):
move = self.current_move
if self._state is GameState.NOT_STARTED or move is None:
adornments = self.setup_adornments
elif move is not None:
adornments = move.adornments
else:
raise Exception("Should never get here.")
for a in adornments:
if a.kind is kind and a.row == row and a.column == col:
return a
return None
### remove_adornment assumes a is in the current adornments list, and
### signals an error if it is not. You can always call this immediately
### after get_adornment if no move state has changed.
###
def remove_adornment (self, a):
move = self.current_move
if self._state is GameState.NOT_STARTED or move is None:
adornments = self.setup_adornments
elif move is not None:
adornments = move.adornments
else:
raise Exception("Should never get here.")
adornments.remove(a)
###
### Misc Branches UI helpers
###
### set_current_branch is a helper for UI that changes which branch to take
### following the current move. Cur is the index of the selected item in
### the branches combo box, which maps to the branches list for the current
### move.
###
def set_current_branch (self, cur):
if self.current_move is None:
move = self.branches[cur]
self.first_move = move
else:
move = self.current_move.branches[cur]
self.current_move.next = move
### move_branch_up and move_branch_down move the current move (if it
### follows a move or initial board state with branching) to be higher or
### lower in the previous branches list. If the game hasn't started, or
### the conditions aren't met, this informs the user.
###
def move_branch_up (self):
(branches, cur_index) = self._branches_for_moving()
if branches is not None:
self._move_branch(branches, cur_index, -1)
def move_branch_down (self):
(branches, cur_index) = self._branches_for_moving()
if branches is not None:
self._move_branch(branches, cur_index, 1)
### _branches_for_moving returns the branches list (from previous move or
### intial board state) and the index in that list of the current move.
### This does user interation for move_branch_up and move_branch_down.
###
def _branches_for_moving (self):
## Check if have move
if self._state is GameState.NOT_STARTED:
MessageBox.Show("Game not started, now branches to modify.")
return (None, None)
current = self.current_move
if current is None:
MessageBox.Show("Must be on the first move of a branch to move it.")
return (None, None)
## Get appropriate branches
prev = current.previous
if prev is None:
branches = self.branches
else:
branches = prev.branches
## Get index of current move in branches
if branches is None:
MessageBox.Show("Must be on the first move of a branch to move it.")
return (None, None)
elif prev is None:
cur_index = branches.index(self.first_move)
else:
cur_index = branches.index(prev.next)
## Successful result ...
return (branches, cur_index)
### _move_branch takes a list of brances and the index of a branch to move
### up or down, depending on delta. This provides feedback to the user of
### the result.
###
def _move_branch (self, branches, cur_index, delta):
if delta not in [1, -1]:
raise Exception("Branch moving delta must be 1 or -1.")
def swap ():
tmp = branches[cur_index]
branches[cur_index] = branches[cur_index + delta]
branches[cur_index + delta] = tmp
if delta < 0:
if cur_index > 0:
swap()
MessageBox.Show("Branch moved up.")
else:
MessageBox.Show("This branch is the main branch.")
elif delta > 0:
if cur_index < (len(branches) - 1):
swap()
MessageBox.Show("Branch moved down.")
else:
MessageBox.Show("This branch is the last branch.")
else:
raise Exception("Must call _move_branch with non-zero delta.")
###
### File Writing
###
### write_game takes a filename to write an .sgf file. This maps the game
### to an sgfparser.ParsedGame and uses its __str__ method to produce the
### output.
###
def write_game (self, filename = None):
if filename is None:
if self.filename is None:
raise Exception ("Need filename to write file.")
filename = self.filename
pg = parsed_game_from_game(self)
f = open(filename, "w")
f.write(str(pg))
f.close()
self.dirty = False
self.filename = filename
self.filebase = filename[filename.rfind("\\") + 1:]
if self.current_move is None:
number = 0
is_pass = False
else:
number = self.current_move.number
is_pass = self.current_move.is_pass
self._main_win.update_title(number, is_pass, self.filebase)
#self.Title = "SGFEd -- " + self.filebase + "; Move " + str(number)
### write_flipped_game saves all the game moves as a diagonal mirror image.
### You can share a game you recorded with your opponents, and they can see
### it from their points of view. Properties to modify: AB, AW, B, W, LB,
### SQ, TR, MA.
###
def write_flipped_game (self, filename):
pg = parsed_game_from_game(self, True) # True = flipped
f = open(filename, "w")
f.write(str(pg))
f.close()
self.dirty = False
### end Game class
###
### Mapping Games to ParsedGames (for printing)
###
### parsed_game_from_game returns a ParsedGame representing game, re-using
### existing parsed node properties where appropriate to avoid losing any we
### ignore from parsed files. If flipped is true, then moves and adornment
### indexes are diagonally mirrored; see write_flipped_game.
###
def parsed_game_from_game (game, flipped = False):
pgame = sgfparser.ParsedGame()
pgame.nodes = _gen_parsed_game_root(game, flipped)
if game.branches is None:
if game.first_move is not None:
pgame.nodes.next = _gen_parsed_nodes(game.first_move, flipped)
pgame.nodes.next.previous = pgame.nodes
else:
branches = []
for m in game.branches:
tmp = _gen_parsed_nodes(m, flipped)
branches.append(tmp)
tmp.previous = pgame.nodes
pgame.nodes.next = branches[0]
return pgame
### _gen_parsed_game_root returns a ParsedNode that is based on the Game object
### and that represents the first node in a ParsedGame. It grabs any existing
### root node properties if there's an existing ParsedGame root node. If
### flipped is true, then moves and adornment indexes are diagonally mirrored;
### see write_flipped_game.
###
### NOTE, this function needs to overwrite any node properties that the UI
### supports editing. For example, if the end user can change the players
### names or rank, then this function needs to overwrite the node properties
### value with the game object's value. It also needs to write properties from
### new games.
###
def _gen_parsed_game_root (game, flipped):
n = sgfparser.ParsedNode()
if game.parsed_game is not None:
n.properties = _copy_properties(game.parsed_game.nodes.properties)
n.properties["AP"] = ["SGFPy"]
n.properties["SZ"] = [str(game.board.size)]
## Comments
if "GC" in n.properties:
## game.comments has merged GC and C comments.
del n.properties["GC"]
if game.comments != "":
n.properties["C"] = [game.comments]
elif "C" in n.properties:
del n.properties["C"]
## Handicap/Komi
if game.handicap != 0 and game.handicap != "0":
n.properties["HA"] = [str(game.handicap)]
elif "HA" in n.properties:
del n.properties["HA"]
n.properties["KM"] = [game.komi]
if "AB" in n.properties:
if flipped:
n.properties["AB"] = flip_coordinates(n.properties["AB"])
## else leave them as-is
else:
if game.handicap != 0 and game.handicap != "0":
n.properties["AB"] = [goboard.get_parsed_coordinates(m, flipped) for
m in game.handicap_moves]
## Player names
n.properties["PB"] = ((game.player_black is not None and [game.player_black]) or
["Black"])
n.properties["PW"] = ((game.player_white is not None and [game.player_white]) or
["White"])
return n
def _copy_properties (props):
res = {}
for k,v in props.iteritems():
res[k] = v[:]
return res
### _gen_parsed_nodes returns a ParsedNode with all the moves following move
### represented in the linked list. If move has never been rendered, then the
### rest of the list is the parsed nodes hanging from it since the user could
### not have modified the game at this point. This recurses on move objects
### with branches. If flipped is true, then moves and adornment indexes are
### diagonally mirrored; see write_flipped_game.
###
def _gen_parsed_nodes (move, flipped):
if not move.rendered:
## If move exists and not rendered, then must be ParsedNode.
if flipped:
return _clone_and_flip_nodes(move.parsed_node)
else:
return move.parsed_node
cur_node = _gen_parsed_node(move, flipped)
first = cur_node
if move.branches is None:
move = move.next
while move is not None:
cur_node.next = _gen_parsed_node(move, flipped)
cur_node.next.previous = cur_node
if move.branches is None:
cur_node = cur_node.next
move = move.next
else:
cur_node = cur_node.next
break
## Only get here when move is None, or we're recursing on branches.
if move is not None:
cur_node.branches = []
for m in move.branches:
tmp = _gen_parsed_nodes(m, flipped)
cur_node.branches.append(tmp)
tmp.previous = cur_node
cur_node.next = cur_node.branches[0]
return first
### _gen_parsed_node returns a ParsedNode that is based on the Move object. It
### grabs any existing parsed node properties from move to preserve any move
### properties that we ignore from a file we read. This does not just take the
### whole parsed node from move to avoid keeping branches or whatnot that we've
### deleted. If flipped is true, then moves and adornment indexes are
### diagonally mirrored; see write_flipped_game.
###
### NOTE, this function needs to overwrite any node properties that the UI
### supports editing. For example, if the end user modified adornments.
###
def _gen_parsed_node (move, flipped):
if not move.rendered:
## If move exists and not rendered, then must be ParsedNode.
if flipped:
return _clone_and_flip_nodes(move.parsed_node)
else:
return move.parsed_node
node = sgfparser.ParsedNode()
node.properties = ((move.parsed_node is not None and
_copy_properties(move.parsed_node.properties)) or
node.properties)
props = node.properties
## Color
if move.color == Colors.Black:
props["B"] = [goboard.get_parsed_coordinates(move, flipped)]
elif move.color == Colors.White:
props["W"] = [goboard.get_parsed_coordinates(move, flipped)]
else:
raise Exception ("Should have only B or W moves.")
## Comments
if move.comments != "":
props["C"] = [move.comments]
elif "C" in props:
del props["C"]
## Adornments
if "TR" in props:
del props["TR"]
if "SQ" in props:
del props["SQ"]
if "LB" in props:
del props["LB"]
for a in move.adornments:
coords = goboard.get_parsed_coordinates(a, flipped)
if a.kind is goboard.Adornments.triangle:
if "TR" in props:
props["TR"].append(coords)
else:
props["TR"] = [coords]
if a.kind is goboard.Adornments.square:
if "SQ" in props:
props["SQ"].append(coords)
else:
props["SQ"] = [coords]
if a.kind is goboard.Adornments.letter:
data = coords + ":" + a.cookie.Child.Content
if "LB" in props:
props["LB"].append(data)
else:
props["LB"] = [data]
return node
### _clone_and_flip_nodes is similar to _gen_parsed_nodes. This returns a
### ParsedNode with all the nodes following the argument represented in the
### resulting linked list, but their coordinates have been transposed to the
### diagonal mirror image, see write_flipped_game. This recurses on nodes with
### branches.
###
def _clone_and_flip_nodes (nodes):
first = _clone_and_flip_node(nodes)
cur_node = first
if nodes.branches is None:
nodes = nodes.next
while nodes is not None:
cur_node.next = _clone_and_flip_node(nodes)
cur_node.next.previous = cur_node
if nodes.branches is None:
cur_node = cur_node.next
nodes = nodes.next
else:
cur_node = cur_node.next
break
## Only get here when nodes is None, or we're recursing on branches.
if nodes is not None:
cur_node.branches = []
for m in nodes.branches:
tmp = _clone_and_flip_nodes(m)
cur_node.branches.append(tmp)
tmp.previous = cur_node
cur_node.next = cur_node.branches[0]
return first
### _clone_and_flip_node is similar to _gen_parsed_node. This returns a
### ParsedNode that is a clone of node, but any indexes are diagonally mirror
### transposed, see write_flipped_game.
###
def _clone_and_flip_node (node):
new_node = sgfparser.ParsedNode()
new_node.properties = _copy_properties(node.properties)
props = new_node.properties
## Color
if "B" in props:
props["B"] = flip_coordinates(props["B"])
elif "W" in props:
props["W"] = flip_coordinates(props["W"])
else:
raise Exception ("Should have only B or W moves.")
## Adornments
if "TR" in props:
props["TR"] = flip_coordinates(props["TR"])
if "SQ" in props:
props["SQ"] = flip_coordinates(props["SQ"])
if "LB" in props:
props["LB"] = flip_coordinates(props["LB"], True)
return new_node
### flip_coordinates takes a list of parsed coordinate strings and returns the
### same kind of list with the coorindates diagonally flipped (see
### write_flipped_game).
###
def flip_coordinates (coords, labels = False):
if labels:
## coords elts are "<col><row>:<letter>"
return [x + y for x in flip_coordinates([l[:2] for l in coords])
for y in [lb[2:] for lb in coords]]
else:
return [goboard.flip_parsed_coordinates(yx) for yx in coords]
###
### Internal utilities for Game methods.
###
### _paste_next_move takes a Move or ParsedNode that is the current move to
### which _paste_next_move adds cut_move as the next move. This sets up next
### pointers and the branches list appropriately for the move_or_parsednode.
###
def _paste_next_move (move_or_parsednode, cut_move):
if move_or_parsednode.next is not None:
## need branches
if move_or_parsednode.branches is None:
move_or_parsednode.branches = [move_or_parsednode.next, cut_move]
else:
move_or_parsednode.branches.append(cut_move)
move_or_parsednode.next = cut_move
else:
move_or_parsednode.next = cut_move
cut_move.previous = move_or_parsednode
if (type(move_or_parsednode) is goboard.Move):
move_or_parsednode.next.number = move_or_parsednode.number + 1
if (move_or_parsednode.parsed_node is not None and
cut_move.parsed_node is not None):
_paste_next_move(move_or_parsednode.parsed_node, cut_move.parsed_node)
### _renumber_moves takes a move with the correct number assignment and walks
### the sub tree of moves to reassign new numbers to the nodes. This is used
### by game._paste_move.
###
def _renumber_moves (move):
count = move.number
if move.branches is None:
move = move.next
while move is not None:
move.number = count + 1
count += 1
if move.branches is None:
move = move.next
else:
break
## Only get here when move is None, or we're recursing on branches.
if move is not None:
for m in move.branches:
m.number = count
_renumber_moves(m)
### _check_for_coincident_moves checks if every move in a cut tree can play
### where pasted to ensure no conflicts.
###
### Turns out this test isn't so good. Need to do "abstract interpretation"
### to see if moves are played where moves would be cut. KGS just lets you paste
### and just plays stones over other stones on the board, not sound but works.
###
#def _check_for_coincident_moves (board, move):
# while move is not None:
# if board.move_at(move.row, move.column) is not None:
# return True
# if move.branches is None:
# move = move.next
# else:
# break
# ## Only get here when move is None from while loop, or we're recursing on branches.
# if move is not None:
# for m in move.branches:
# if _check_for_coincident_moves(board, m):
# return True
# return False
### _list_find returns the index of elt in the first argument using the compare
### test. The test defaults to identity. Need to define this since python
### doesn't have general sequence utilities. Str has find, but list doesn't.
###
def _list_find (elt, l, compare = lambda x,y: x is y):
for k, v in enumerate(l):
if compare(elt, v): return k
else:
return -1
###
### External Helper Functions
###
def opposite_move_color (color):
return (color == Colors.Black and Colors.White) or Colors.Black
def create_default_game (main_win):
return Game(main_win, MAX_BOARD_SIZE, 0, DEFAULT_KOMI)
### create_parsed_game takes a ParsedGame and main UI window. It creates a new
### Game (which cleans up the current game) and sets up the first moves so that
### the user can start advancing through the moves.
###
def create_parsed_game (pgame, main_win):
## Check some root properties
props = pgame.nodes.properties
## Handicap stones
if "HA" in props:
## KGS saves HA[6] and then AB[]...
handicap = int(props["HA"][0])
if "AB" not in props:
raise Exception("If parsed game has handicap, then need handicap stones.")
def make_handicap_move (coords):
row, col = goboard.parsed_to_model_coordinates(coords)
m = goboard.Move(row, col, Colors.Black)
m.parsed_node = pgame.nodes
m.rendered = False
return m
all_black = [make_handicap_move(x) for x in props["AB"]]
else:
handicap = 0
all_black = None
if "AW" in props:
raise Exception("Don't support multiple white stones at root.")
## Board size
if "SZ" not in props:
raise Exception("No board size property?!")
size = int(props["SZ"][0])
if size != 19:
raise Exception("Only work with size 19 currently, got %s" % (size))
## Komi
if "KM" in props:
komi = props["KM"][0]
else:
komi = ((handicap == 0) and DEFAULT_KOMI) or "0.5"
## Creating new game cleans up current game
g = Game(main_win, size, handicap, komi, all_black)
## Player names
if "PB" in props:
g.player_black = props["PB"][0]
if "PW" in props:
g.player_white = props["PW"][0]
## Initial board state comments
if "C" in props:
g.comments = props["C"][0]
if "GC" in props:
g.comments = props["GC"][0] + g.comments
## Setup remaining model for first moves and UI
g.parsed_game = pgame
_setup_first_parsed_move(g, pgame.nodes)
## Setup navigation UI so that user can advance through game.
if g.first_move is not None:
## No first move if file just has handicap stones.
g._state = GameState.STARTED
main_win.nextButton.IsEnabled = True
main_win.endButton.IsEnabled = True
main_win.update_branch_combo(g.branches, g.first_move)
else:
main_win.nextButton.IsEnabled = False
main_win.endButton.IsEnabled = False
main_win.update_branch_combo(g.branches, None)
main_win.commentBox.Text = g.comments
main_win.game = g
return g
### _setup_first_parsed_move takes a game and the head of ParsedNodes. It sets
### up the intial move models, handling initial node branching and so on. The
### basic invariant here is that we always have the next move models created,
### but they are in an unrendered state. This means their branches have not
### been processed, adornments have never been created, captured stones never
### processed, etc. When we advance to a move, we render it and set up its
### next move(s) as unrendered. Keeping the next move pointer of a Move object
### set up makes several other invariants in helper functions and game
### processing fall out. This function returns a None g.first_move if the .sgf
### file only had a root node.
###
def _setup_first_parsed_move (g, nodes):
props = nodes.properties
if "B" in props or "W" in props:
raise Exception("Unexpected move in root parsed node.")
if "PL" in props:
raise Exception("Do not support player-to-play for changing start color.")
if "AW" in props:
raise Exception("Do not support AW in root node.")
if "TR" in props or "SQ" in props or "LB" in props:
raise Exception("Don't handle adornments on initial board from parsed game yet.")
if nodes.branches is not None:
## Game starts with branches
moves = []
for n in nodes.branches:
m = _parsed_node_to_move(n)
m.number = g.move_count + 1
## Don't set m.previous since they are fist moves.
moves.append(m)
g.branches = moves
m = moves[0]
else:
nodes = nodes.next
if nodes is None:
m = None
else:
m = _parsed_node_to_move(nodes)
## Note, do not incr g.move_count since first move has not been rendered,
## so if user clicks, that should be number 1 too.
m.number = g.move_count + 1
g.first_move = m
return m
### _parsed_node_to_move takes a ParsedNode and returns a Move model for it.
### For now, this is fairly constrained to expected next move colors and no
### random setup nodes that place several moves or just place adornments.
###
def _parsed_node_to_move (n):
if "B" in n.properties:
color = Colors.Black
row, col = goboard.parsed_to_model_coordinates(n.properties["B"][0])
elif "W" in n.properties:
color = Colors.White
row, col = goboard.parsed_to_model_coordinates(n.properties["W"][0])
else:
raise Exception("Next nodes must be moves, don't handle arbitrary nodes yet -- %s" %
(n.node_str(False)))
m = goboard.Move(row, col, color)
m.parsed_node = n
m.rendered = False
if "C" in n.properties:
m.comments = n.properties["C"][0]
return m
### GameState simply represents whether a game has started (that is, there's a
### first move). DONE is gratuitous right now and isn't used. If we support
### pass move, and there's two, perhaps DONE becomes relevant :-).
###
class GameState (object):
NOT_STARTED = object()
STARTED = object()
DONE = object()
###############################################################################
## The display is a grid of columns and rows, with the main game tree spine
## drawn across the first row, with branches descending from it. So, columns
## map to tree depth, and a column N, should have a move with number N,
## due to fake node added for board start in column zero.
##
## show_tree displays a grid of node objects that represent moves in the game tree,
## where lines between moves need to bend, or where lines need to descend straight
## downward before angling to draw next move in a branch.
##
test_columns = 50
test_rows = 10
def show_tree (game):
pn = game.parsed_game.nodes
tree_grid = [[None for col in xrange(test_columns)] for row in xrange(test_rows)]
max_rows = [0 for col in range(test_columns)]
layout(pn, tree_grid, max_rows, 0, 0, 0, 0)
display = [[" " for col in xrange(test_columns)] for row in xrange(test_rows)]
for i in xrange(len(tree_grid)):
for j in xrange(len(tree_grid[i])):
if tree_grid[i][j] is None:
display[i][j] = "+"
elif tree_grid[i][j].color == Colors.Black:
display[i][j] = "X"
elif tree_grid[i][j].color == Colors.White:
display[i][j] = "O"
elif tree_grid[i][j].kind is TreeViewNode.line_bend_kind:
display[i][j] = "L"
else: #if tree_grid[i][j].color == Colors.BurlyWood:
display[i][j] = "S"
MessageBox.Show("\n".join(["".join(r) for r in display]))
## layout recurses through the moves assigning them to a location in the display grid.
## max_rows is an array mapping the column number to the next free row that
## can hold a node. cum_max_row is the max row used while descending a branch
## of the game tree, which we use to create branch lines that draw straight across,
## rather than zigging and zagging along the contour of previously placed nodes.
## tree_depth is just that, and branch_depth is the heigh to the closest root node of a
## branch, where its immediate siblings branch too.
##
def layout (pn, tree_grid, max_rows, cum_max_row, tree_depth, branch_depth, branch_root_row):
model = setup_layout_model(pn, max_rows, cum_max_row, tree_depth)
if branch_depth == 0:
## If we're not doing a branch, keep the zero.
new_branch_depth = 0
else: # Increment the depth for the children
new_branch_depth = branch_depth + 1
## Layout main child branch
if pn.next is None:
## If no next, then no branches to check below
maybe_add_bend_node(tree_grid, max_rows, model.row, tree_depth, branch_depth, branch_root_row)
tree_grid[model.row][tree_depth] = model
return model
else:
next_model = layout(pn.next, tree_grid, max_rows, model.row, tree_depth + 1,
new_branch_depth, branch_root_row)
adjust_layout_row(model, tree_grid, max_rows, next_model.row, tree_depth, branch_depth, branch_root_row)
maybe_add_bend_node(tree_grid, max_rows, model.row, tree_depth, branch_depth, branch_root_row)
tree_grid[model.row][tree_depth] = model
## Layout branches if any
if pn.branches is not None:
for i in xrange(1, len(pn.branches)):
layout(pn.branches[i], tree_grid, max_rows, model.row, tree_depth + 1, 1, model.row)
return model
## setup_layout_model initializes the current node model for the display, with row, column,
## color, etc. This returns the new model element.
##
def setup_layout_model (pn, max_rows, cum_max_row, tree_depth):
model = TreeViewNode((tree_depth == 0 and TreeViewNode.start_board_kind)
or TreeViewNode.move_kind, pn)
## Get column's free row or use row from parent
row = max(cum_max_row, max_rows[tree_depth])
model.row = row
max_rows[tree_depth] = row + 1
model.col = tree_depth
## Set color
if "B" in pn.properties:
model.color = Colors.Black
elif "W" in pn.properties:
model.color = Colors.White
elif tree_depth == 0:
## This is the empty board start node
model.color = Colors.BurlyWood # sentinel color
else:
raise Exception("eh?! Node is not move, nor are we at the start of the parsed tree -- %s" %
(pn.node_str(False)))
return model
## adjust_layout_row adjusts moves downward if moves farther out on the branch
## had to occupy lower rows. This keeps branches drawn straighter, rather than
## zig-zagging with node contours. Then this function checks to see if we're
## within the square defined by the current model and the branch root, and if we
## this is the case, then start subtracting one row at at time to get a diagonal
## line of moves up to the branch root.
##
def adjust_layout_row (model, tree_grid, max_rows, next_row_used, tree_depth,
branch_depth, branch_root_row):
## If we're on a branch, and it had to be moved down farther out to the right
## in the layout, then move this node down to keep a straight line.
if next_row_used > model.row:
model.row = next_row_used
max_rows[tree_depth] = next_row_used + 1
## If we're unwinding back toward this node's branch root, and we're within a direct
## diagonal line from the root, start decreasing the row by one.
if (branch_depth < model.row - branch_root_row) and (tree_grid[model.row - 1][tree_depth] is None):
## row - 1 does not index out of bounds since model.row would have to be zero,
## and zero minus anything will not be greater than branch depth (which would be zero)
## if row - 1 were less than zero.
model.row = model.row - 1
max_rows[tree_depth] = model.row
## maybe_add_bend_node checks if the diagonal line of rows for a branch hit the column
## for the branch's root at a row great than the root's row. If this happens, then we
## need a model node to represent where to draw the line bend to start the diagonal line.
##
def maybe_add_bend_node (tree_grid, max_rows, row, tree_depth, branch_depth, branch_root_row):
if (branch_depth == 1) and (row - branch_root_row > 1) and (tree_grid[row - 1][tree_depth - 1] is None):
## last test should always be true
bend = TreeViewNode(TreeViewNode.line_bend_kind)
bend.row = row - 1
bend.col = tree_depth - 1
max_rows[tree_depth - 1] = row
tree_grid[bend.row][bend.col] = bend
class TreeViewNode (object):
move_kind = object()
line_bend_kind = object()
start_board_kind = object()
def __init__ (self, kind = move_kind, node = None):
self.kind = kind
self.cookie = None
self.node = node
self.row = 0
self.col = 0
self.color = None
#def __str__ (self):
# if self.color == Colors.Black:
# return "X"
# elif self.color == Colors.White:
# return "O"
# elif self.kind is TreeViewNode.line_bend_kind:
# return "\""
# else: # Colors.BurlyWood for start of boardi
# return "S"
| StarcoderdataPython |
11376409 | # Complete the check_log_history function below.
def check_log_history(events):
stack = []
row = 0
for event in events:
row += 1
if event.startswith('A'):
lockNum = event.split(' ')[1]
# 重复输入
if lockNum in stack:
return row
stack.append(lockNum)
else:
lockNum = event.split(' ')[1]
if stack.pop() != lockNum:
return row
# 检查栈空
return 0 if not stack else row + 1
# 各种边界条件
# 正解
# 1. ACQUIRE 364
# 2. ACQUIRE 84
# 3. RELEASE 84
# 4. RELEASE 364
# 错误
# 1. ACQUIRE 364
# 2. ACQUIRE 84
# 3. RELEASE 84
# 4. ACQUIRE 364
# 5. RELEASE 364
# 错误
# 1. ACQUIRE 364
# 2. ACQUIRE 84
# 3. RELEASE 364
# 4. RELEASE 84
# 错误
# 1. ACQUIRE 364
# 2. ACQUIRE 84
# 3. RELEASE 84
# 4. ACQUIRE 87 什么都没做就直接退出了
# ACQUIRE OR RELEASE NON EXIST, AGAIN
| StarcoderdataPython |
11291671 | #########################################################################
# 2020
# Author: <NAME>
#########################################################################
from typing import List
import cv2
import numpy as np
import csv
from pathlib import Path
from tqdm import tqdm
from torch.utils.data import Dataset
from copy import deepcopy
from dataset.types import Sample, BatchSample
class DatasetCULane(Dataset):
def __init__(self,
root_path="/media/zzhou/data-culane/",
index_file="list/train.txt",
transform=None):
self.root_path = Path(root_path)
self.index_file = Path(index_file)
self.transform = transform
# mask for car hood
carhood_mask = self.get_car_hood_mask() # key=y, value=(xmin, xmax)
# read index file
self.train_files_path = []
with open(self.root_path / self.index_file) as f:
while True:
line = f.readline()
if not line:
break
self.train_files_path.append(line[1:-1]) # remove "/" at beginning, "\n" at end
# read labels
self.label_list: List[Sample] = [] # READ ONLY after initialized
pbar = tqdm(total=len(self.train_files_path))
for each in self.train_files_path:
image_path = self.root_path / each
label_path = image_path.with_suffix(".lines.txt")
lines = list(self._read_lines(label_path, mask=carhood_mask))
self.label_list.append({"image_path": image_path.absolute().as_posix(),
"label_path": label_path.absolute().as_posix(),
"lane_list": lines})
pbar.update()
pbar.close()
def __len__(self):
return len(self.label_list) # 88880 CULane train samples
def __getitem__(self, idx: int) -> Sample:
"""return readable sample data
:param idx:
:return:
"""
label = self.label_list[idx]
img = cv2.imread(label["image_path"]) # (590, 1640, 3)
img = img[:, :, ::-1] # bgr to rgb
rows, cols = img.shape[:2]
sample: Sample = {"image_path": label["image_path"],
"image": img,
"id": idx,
"lane_list": deepcopy(label["lane_list"]),
"original_size": (rows, cols)
}
if self.transform is not None:
sample = self.transform(sample)
return sample
@staticmethod
def collate_fn(batch: List[Sample]) -> BatchSample:
"""convert readable sample data as aligned ground truth data
Ground truth data is used for loss calculation
:param batch: List[Sample]
:return:
"""
grid_y = 32
grid_x = 64
resize_ratio = 8 # resize ratio = 8, resize [256, 512] to [32, 64]
img = np.stack([b['image'] for b in batch], axis=0)
image_id_list = [each["id"] for each in batch]
n_samples = len(batch)
# build detection ground truth
detection_gt = np.zeros((n_samples, 3, grid_y, grid_x)) # [3, 1, 32, 64]
for i_smaple, sample in enumerate(batch):
for i_lane, lane_pts in enumerate(sample['lane_list']):
for i_pt, xy_pt in enumerate(lane_pts):
x_index = int(xy_pt[0] / resize_ratio) # resize ratio = 8, resize [256, 512] to [32, 64]
y_index = int(xy_pt[1] / resize_ratio)
detection_gt[i_smaple][0][y_index][x_index] = 1.0 # confidence
detection_gt[i_smaple][1][y_index][x_index] = (xy_pt[0] * 1.0 / resize_ratio) - x_index # offset x
detection_gt[i_smaple][2][y_index][x_index] = (xy_pt[1] * 1.0 / resize_ratio) - y_index # offset y
# build instance ground truth, inefficient code but better reading
instance_gt = np.zeros((n_samples, 1, grid_y * grid_x, grid_y * grid_x)) # [8, 1, 2048, 2048]
for i_smaple, sample in enumerate(batch):
temp = np.zeros((1, grid_y, grid_x)) # e.g. [1, 32, 64]
lane_cluster = 1
for i_lane, lane_pts in enumerate(sample['lane_list']):
previous_x_index = 0
previous_y_index = 0
for i_pt, xy_pt in enumerate(lane_pts):
x_index = int(xy_pt[0] / resize_ratio) # resize ratio = 8, resize [256, 512] to [32, 64]
y_index = int(xy_pt[1] / resize_ratio)
temp[0][y_index][x_index] = lane_cluster
if previous_x_index != 0 or previous_y_index != 0: # interpolation make more dense data
temp_x = previous_x_index
temp_y = previous_y_index
while True:
delta_x = 0
delta_y = 0
temp[0][temp_y][temp_x] = lane_cluster
if temp_x < x_index:
temp[0][temp_y][temp_x + 1] = lane_cluster
delta_x = 1
elif temp_x > x_index:
temp[0][temp_y][temp_x - 1] = lane_cluster
delta_x = -1
if temp_y < y_index:
temp[0][temp_y + 1][temp_x] = lane_cluster
delta_y = 1
elif temp_y > y_index:
temp[0][temp_y - 1][temp_x] = lane_cluster
delta_y = -1
temp_x += delta_x
temp_y += delta_y
if temp_x == x_index and temp_y == y_index:
break
previous_x_index = x_index
previous_y_index = y_index
lane_cluster += 1
for i_sim in range(grid_y * grid_x): # make gt
temp = temp[temp > -1]
gt_one = deepcopy(temp)
if temp[i_sim] > 0:
gt_one[temp == temp[i_sim]] = 1 # same instance
if temp[i_sim] == 0:
gt_one[temp != temp[i_sim]] = 3 # different instance, different class
else:
gt_one[temp != temp[i_sim]] = 2 # different instance, same class
gt_one[temp == 0] = 3 # different instance, different class
instance_gt[i_smaple][0][i_sim] += gt_one
batch_sample = {"image": img,
"image_id": image_id_list, # help tracing source image
"detection_gt": detection_gt,
"instance_gt": instance_gt,
}
return batch_sample
@staticmethod
def get_car_hood_mask():
mask_y = list(range(430, 600, 10))
mask_x_min = [718, 660, 610, 567, 531, 503, 478, 453, 434, 412, 386, 361, 342, 321, 295, 272, 249]
mask_x_max = [1076, 1134, 1175, 1220, 1258, 1307, 1343, 1387, 1425, 1473, 1512, 1555, 1594, 1639, 1639, 1639, 1639]
carhood_mask = {} # key=y, value=(xmin, xmax)
for i in range(len(mask_y)):
x_min = mask_x_min[i] - 5 # add 5 pixels for margin
x_max = mask_x_max[i] + 5 if mask_x_max[i] + 5 < 1639 else 1639 # add 5 pixels for margin
carhood_mask[mask_y[i]] = (x_min, x_max)
return carhood_mask
@staticmethod
def _read_lines(file_path, mask=None):
with open(file_path) as f:
reader = csv.reader(f, delimiter=' ', skipinitialspace=True)
for row in reader:
row = row[:-1] # remove a ghost space char
if mask is None:
yield np.array([float(i) for i in row], dtype=float).reshape([-1, 2])
else:
x_list = row[::2]
y_list = row[1::2]
filtered_list = []
for i in range(len(x_list)):
y = float(y_list[i])
x = float(x_list[i])
if y >= 430:
if mask[y][0] < x < mask[y][1]:
continue
filtered_list.append((x, y))
if len(filtered_list) > 0: # skip empty lane list after filtering
yield np.array(filtered_list, dtype=float) | StarcoderdataPython |
6425717 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
-------------------------------------------------
Description : visualization library
Email : <EMAIL>
Date:2018/3/30
"""
from .vis_imports import *
from .line_plot import line
| StarcoderdataPython |
1777364 | # Generated by Django 3.1.8 on 2021-04-13 13:23
from django.db import migrations, models
import turtle_shell.utils
class Migration(migrations.Migration):
dependencies = [
("turtle_shell", "0005_auto_20210412_2320"),
]
operations = [
migrations.AddField(
model_name="executionresult",
name="traceback",
field=models.JSONField(
decoder=turtle_shell.utils.EnumAwareDecoder,
default=dict,
encoder=turtle_shell.utils.EnumAwareEncoder,
null=True,
),
),
]
| StarcoderdataPython |
1624976 | from zipfile import ZipFile
from urllib.request import urlopen
from io import BytesIO
import pandas as pd
class WDIIndicators:
"""Retrieve WDI Indicators from the World Bank"""
def __init__(self, file_storage, s3_api):
""" Create a new instance of the WDIIndicators class
Parameters
----------
:param file_storage: FileStorage, Required
The file storage class used to store raw/processed data
:param s3_api: S3_API, Required
The S3 api wrapper class used to store data in AWS S3
----------
"""
self._file_storage = file_storage
self._s3_api = s3_api
self._base_url = 'https://databank.worldbank.org/data/download/WDI_csv.zip'
self._wdi_data = 'WDIData.csv'
def retrieve_wdi_indicator_data(self):
"""Retrieves the raw wdi indicator data"""
print('Getting zip file from url', self._base_url)
with urlopen(self._base_url) as zip_response:
with ZipFile(BytesIO(zip_response.read()), 'r') as zip:
try:
df = pd.read_csv(zip.open(self._wdi_data))
print(df.head())
output_file_name = '../raw_data/wdi_data/wdi_data.csv'
self._file_storage.create_directory_if_not_exists(output_file_name)
df.to_csv(output_file_name)
except Exception as error:
print('An error occurred reading', self._wdi_data, error) | StarcoderdataPython |
3581949 | # util.py
""" Auxiliary functions """
""" some functions extracted from https://github.com/hyperledger/aries-cloudagent-python/blob/master/aries_cloudagent/messaging/connections/models/diddoc/util.py """
import datetime
def timestamp():
return datetime.datetime.utcnow().replace(tzinfo=datetime.timezone.utc).isoformat(' ')
def check_args(x, type_x):
if not isinstance(x, type_x):
raise TypeError('Please provide a {} argument'.format(type_x))
def get_did_in_service(uri_did:str) -> str:
return resource(uri_did)[0]
def get_fragment_in_service(uri_did:str) -> str:
return resource(uri_did)[1]
def clean_did(ref: str) -> str:
""" extract the resource for an identifier,
using '#', ';' and '?' as possible delimiters
"""
# default delimiter '#'
did1 = resource(ref)[0]
# delimiter ';'
did2 = resource(did1, ';')[0]
# delimiter '?'
return resource(did2, '?')[0]
def resource(ref: str, delimiter: str = None) -> list:
"""
Extract the resource for an identifier.
Given a (URI) reference, return up to its delimiter and also the rest
Args:
ref: reference
delimiter: delimiter character
(default None maps to '#', or ';' introduces identifiers)
"""
return ref.split(delimiter if delimiter else "#")
| StarcoderdataPython |
12809745 | # Copyright 2018 the rules_m4 authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# SPDX-License-Identifier: Apache-2.0
M4_TOOLCHAIN_TYPE = "@rules_m4//m4:toolchain_type"
M4ToolchainInfo = provider(fields = ["all_files", "m4_tool", "m4_env"])
def _template_vars(toolchain):
return platform_common.TemplateVariableInfo({
"M4": toolchain.m4_tool.executable.path,
})
def _m4_toolchain_info(ctx):
m4_runfiles = ctx.attr.m4_tool[DefaultInfo].default_runfiles.files
toolchain = M4ToolchainInfo(
all_files = depset(
direct = [ctx.executable.m4_tool],
transitive = [m4_runfiles],
),
m4_tool = ctx.attr.m4_tool.files_to_run,
m4_env = ctx.attr.m4_env,
)
return [
platform_common.ToolchainInfo(m4_toolchain = toolchain),
_template_vars(toolchain),
]
m4_toolchain_info = rule(
_m4_toolchain_info,
attrs = {
"m4_tool": attr.label(
mandatory = True,
executable = True,
cfg = "host",
),
"m4_env": attr.string_dict(),
},
provides = [
platform_common.ToolchainInfo,
platform_common.TemplateVariableInfo,
],
)
def _m4_toolchain_alias(ctx):
toolchain = ctx.toolchains[M4_TOOLCHAIN_TYPE].m4_toolchain
return [
DefaultInfo(files = toolchain.all_files),
_template_vars(toolchain),
]
m4_toolchain_alias = rule(
_m4_toolchain_alias,
toolchains = [M4_TOOLCHAIN_TYPE],
provides = [
DefaultInfo,
platform_common.TemplateVariableInfo,
],
)
| StarcoderdataPython |
8028673 | import cv2
import numpy as np
from dataset.skeleton import *
from utils.converter import world_coords_to_image
if __name__ == '__main__': # Must run with main for multiprocessing
skeleton_dataset = SkeletonDatasetFlorence('D:/Illumine/Y4/METR4901/dataset/Florence', 0.8)
labels = skeleton_dataset.get_labels()
for (data, label) in skeleton_dataset:
for frame in data:
img = np.zeros([640, 480, 3], dtype=np.uint8)
for idx in range(15):
joint_coords = (int(frame[idx * 3].numpy()),
int(frame[idx * 3 + 1].numpy()),
int(frame[idx * 3 + 2].numpy()))
joint_img_coords = world_coords_to_image(joint_coords, 531.15, 640, 480)
cv2.circle(img, joint_img_coords, 5, (255, 255, 0), thickness=2)
cv2.namedWindow('Florence')
cv2.imshow('Florence', img)
if cv2.waitKey(34) == 27:
break
| StarcoderdataPython |
3383736 | <filename>src/decompress.py
from bitstring import Bits
from image_manipulation import show_image_from_numpy_array, load_image_to_numpy_array, save_image_from_numpy_array
from structure.Blob import Blob, TYPES
from structure.Blobs import Blobs
from structure.Image import Image
from structure.Vector import Vector2
def loadBlobsInformation(filename: str) -> Blobs:
with open(filename, "rb") as f:
sizeX: int = int.from_bytes(f.read(4), byteorder="big", signed=False)
sizeY: int = int.from_bytes(f.read(4), byteorder="big", signed=False)
blobs = []
for posY in range(0, sizeY):
blobsRow = []
for posX in range(0, sizeX):
blobData = f.read(1)
blobDataBits = Bits(blobData)
blobsRow.append(Blob.fromBits(blobDataBits, Vector2(posX, posY), Vector2(8, 8)))
blobs.append(blobsRow)
blobs = Blobs.fromBlobsList(blobs)
return blobs
def loadBlobsPixels(blobs: Blobs, filename: str):
image = load_image_to_numpy_array(filename)
xPositionInImage = 0
for blob in blobs.getFlattenedBlobsArray():
if blob.type == TYPES.FIXED:
pixels = image[0:8, xPositionInImage:xPositionInImage+8]
blob._pixels = pixels
xPositionInImage += 8
def main(blobs_image_path: str, blobs_info_path: str, output_path: str):
# image: Image = Image.fromFile(blobs_image_path, Vector2(8, 8))
# image.showFromBlobs()
# LOAD
blobs: Blobs = loadBlobsInformation(blobs_info_path)
loadBlobsPixels(blobs, blobs_image_path)
# SHOW
image = blobs.toPixels()
show_image_from_numpy_array(image, "decompressed")
save_image_from_numpy_array(output_path, image)
print("Decompression [DONE]")
if __name__ == '__main__':
main("temp/saved_blobs.png", "temp/saved_data.pcf", "temp/saved_decompressed.png")
| StarcoderdataPython |
8026928 | #!/usr/bin/env python3.6
import os
import tarfile
from six.moves import urllib
DOWNLOAD_ROOT = "https://raw.githubusercontent.com/ageron/handson-ml/master/"
HOUSING_PATH = os.path.join(*[os.path.pardir, "datasets", "housing"])
HOUSING_URL = DOWNLOAD_ROOT + "datasets/housing/housing.tgz"
def fetch_housing_data(housing_url=HOUSING_URL, housing_path=HOUSING_PATH):
if not os.path.isdir(housing_path):
os.makedirs(housing_path)
tgz_path = os.path.join(housing_path, "housing.tgz")
urllib.request.urlretrieve(housing_url, tgz_path)
housing_tgz = tarfile.open(tgz_path)
housing_tgz.extractall(path=housing_path)
housing_tgz.close()
if __name__ == '__main__':
fetch_housing_data()
| StarcoderdataPython |
1971184 | <reponame>lgaravaglia999/plugin.streaming.cava
import sys
from resources.lib.views.TvshowView import TvShowView
from resources.lib.router_urls.websites_config import WebsitesConfig as cfg
#WEBSITE = "gs"
WEBSITE = cfg.get_path(cfg.GUARDASERIE)
class GuardaserieView(TvShowView):
def __init__(self):
if sys.version_info[0] < 3:
super(GuardaserieView, self).__init__(WEBSITE)
else:
super().__init__(WEBSITE)
| StarcoderdataPython |
12835352 | <gh_stars>1-10
import datetime
def get_timestamp_min_in_past(min_ago: int) -> datetime.datetime:
dt = datetime.datetime.now() - datetime.timedelta(minutes=min_ago)
return dt.replace(tzinfo=datetime.timezone.utc)
| StarcoderdataPython |
9767019 | <gh_stars>1-10
from sklearn.naive_bayes import MultinomialNB
from sklearn.linear_model import SGDClassifier
from sklearn.preprocessing import LabelEncoder
from sklearn.feature_extraction.text import CountVectorizer
from sklearn.feature_extraction.text import TfidfTransformer
from sklearn.svm import LinearSVC
from sklearn.pipeline import Pipeline
from sklearn.metrics import classification_report, confusion_matrix
def load_data(path):
X = []
y = []
for text in open(path, encoding='utf8').readlines():
words = text.strip().split()
X.append(' '.join(words[1:]))
y.append(words[0])
return X, y
def nb(X, y):
text_clf_svm = Pipeline(
[('vect', CountVectorizer(max_features=None, min_df=10, max_df=0.9)), ('tfidf', TfidfTransformer()),
('clf', MultinomialNB())])
text_clf_svm.fit(X, y)
return text_clf_svm
def svm(X, y):
text_clf_svm = Pipeline(
[('vect', CountVectorizer(max_features=None, min_df=10, max_df=0.9)), ('tfidf', TfidfTransformer()),
('clf', LinearSVC())])
text_clf_svm.fit(X, y)
return text_clf_svm
def sgd(X, y):
text_clf_svm = Pipeline(
[('vect', CountVectorizer(max_features=None, min_df=10, max_df=0.9)), ('tfidf', TfidfTransformer()),
('clf', SGDClassifier())])
text_clf_svm.fit(X, y)
return text_clf_svm
def evaluate(sk_model, X, y):
if sk_model is None:
raise ValueError('Model is None. Call train method')
y_pred = sk_model.predict(X)
print(confusion_matrix(y, y_pred))
print(classification_report(y, y_pred))
if __name__ == '__main__':
X_train, y_train = load_data('data/train.txt')
X_test, y_test = load_data('data/test.txt')
lb = LabelEncoder()
lb.fit(y_train)
y_train = lb.transform(y_train)
y_test = lb.transform(y_test)
model = sgd(X_train, y_train)
evaluate(model, X_test, y_test)
print(lb.classes_)
| StarcoderdataPython |
11220444 | <filename>tests/population_test.py
import unittest
import sys
sys.path.insert(1, '..')
import evogression
from test_data import categorical_data, surface_3d_data
from pprint import pprint as pp
import matplotlib
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import axes3d
import random
random.seed(10) # for reproducing the same plot
class TestPopulationCateogry(unittest.TestCase):
def test_population_category_2d(self):
population = evogression.Population('y', categorical_data, split_parameter='cat', optimize=3)
y_test = [population.predict(d, 'pred')['pred'] for d in categorical_data]
plt.scatter([pd['x'] for pd in categorical_data], [pd['y'] for pd in categorical_data], s=100)
x_test_A = [i / 10 for i in range(0, 55)]
y_test_A = [population.predict({'cat': 'A', 'x': x}, 'pred')['pred'] for x in x_test_A]
x_test_B = [i / 10 for i in range(87, 135)]
y_test_B = [population.predict({'cat': 'B', 'x': x}, 'pred')['pred'] for x in x_test_B]
plt.scatter(x_test_A, y_test_A, s=10)
plt.scatter(x_test_B, y_test_B, s=10)
plt.xlabel('x')
plt.ylabel('y')
plt.text(1, 17, 'Category A')
plt.text(10.5, 17, 'Category B')
plt.title('Category Regression - Population Test')
plt.show()
def test_population_continuous_3d(self):
population = evogression.Population('z', surface_3d_data, num_creatures=5000, num_cycles=10, group_size=5, optimize=5, split_parameter='y', category_or_continuous='continuous')
data = population.predict(surface_3d_data, 'z_predicted')
fig = plt.figure()
ax = fig.add_subplot(111, projection='3d')
x = [point_dict['x'] for point_dict in data]
y = [point_dict['y'] for point_dict in data]
z = [point_dict['z'] for point_dict in data]
z_test = [point_dict['z_predicted'] for point_dict in data]
ax.scatter3D(x, y, z)
ax.scatter3D(x, y, z_test)
ax.set_xlabel('x')
ax.set_ylabel('y')
ax.set_zlabel('z')
plt.title('Surface Regression - Continuous Population Test')
plt.show()
if __name__ == '__main__':
unittest.main()
| StarcoderdataPython |
6550981 | <gh_stars>10-100
from Sakurajima.models.base_models import Anime, Episode, AniWatchEpisode
from Sakurajima.models.chronicle import ChronicleEntry
from Sakurajima.models.media import Media, UserMedia
from Sakurajima.models.notification import Notification
from Sakurajima.models.recommendation import RecommendationEntry
from Sakurajima.models.relation import Relation
from Sakurajima.models.stats import AniwatchStats
from Sakurajima.models.user_models import UserAnimeListEntry, UserOverview
from Sakurajima.models.watchlist import WatchListEntry
__all__ = [
'base_models',
'chronicle',
'media',
'notification',
'recommendation',
'relation',
'stats',
'user_models',
'watchlist'
] | StarcoderdataPython |
3550571 | import time
from datetime import datetime
import json
from io import StringIO
import subprocess
import shlex
def run():
commands = shlex.split("python3 -c 'import main_app.runner; main_app.runner.run_real()'")
subprocess.Popen(commands)
def run_real():
import main_app.crawler as main
f = open('static/settings.txt', 'r').read()
settings_dict = {}
if f is not None and not f == "":
settings_dict = json.loads(f)
settings_dict = get_settings()
settings_dict["scanning"] = True
f = open('static/settings.txt', 'w')
io = StringIO()
json.dump(settings_dict, io)
f.write(str(io.getvalue()))
f.close()
start_time = time.time()
main.set_accepted_domains(settings_dict["allowed_urls"])
main.set_disallowed_domains(settings_dict["disallowed_urls"])
main.boot_db()
main.get_redirects(settings_dict["recheck_redirects"])
main.start_crawl(settings_dict["start_page"])
crawl_stats = {}
seconds = time.time() - start_time
m, s = divmod(seconds, 60)
h, m = divmod(m, 60)
crawl_stats['time_to_complete'] = "%d hours, %02d minutes and %02d seconds" % (h, m, s)
d = datetime.now()
current_time = d.strftime("%I:%M %p")
current_date = d.strftime("%m/%d/%y")
crawl_stats['full_time'] = current_date + " at " + current_time
f = open('static/crawl_stats.txt', 'w')
io = StringIO()
json.dump(crawl_stats, io)
f.write(str(io.getvalue()))
f.close()
f = open('static/settings.txt', 'r').read()
settings_dict = {}
if f is not None and not f == "":
settings_dict = json.loads(f)
settings_dict["scanning"] = False
f = open('static/settings.txt', 'w')
io = StringIO()
json.dump(settings_dict, io)
f.write(str(io.getvalue()))
f.close()
def get_settings():
f = open('static/settings.txt', 'r').read()
if f is not None and not f == "":
settings_dict = json.loads(f)
else:
settings_dict = json.loads(open('static/backup_settings.txt', 'r').read())
f = open('static/settings.txt', 'w')
io = StringIO()
json.dump(settings_dict, io)
f.write(str(io.getvalue()))
f.close()
return settings_dict
if __name__ == "__main__":
import sys
sys.path.append(sys.argv[1])
run_real()
| StarcoderdataPython |
3309091 | from django.conf.urls import patterns, include, url
from django.views.generic import ListView, DetailView
from django.views.generic.edit import UpdateView
from django.contrib.auth.decorators import login_required, permission_required
from models import *
from views import *
urlpatterns = patterns('',
url(r'cursos^$', CursoListView.as_view(),name="cursos_lista"),
url(r'cursos/nuevo$',CursoCreateView.as_view(), name="curso_nuevo"),
url(r'cursos/editar/(?P<pk>\d+)/$',CursoUpdateView.as_view(), name="curso_editar"),
url(r'cursos/borrar/(?P<pk>\d+)/$',CursoDeleteView.as_view(), name="curso_borrar"),
url(r'cursos/(?P<pk>\d+)/$',CursoDetailView.as_view(), name="curso_detalle"),
url(r'^$', login_required(AlumnoListView.as_view()),name="alumnos_lista"),
url(r'nuevo$',AlumnoCreateView.as_view(), name="alumno_nuevo"),
url(r'editar/(?P<pk>\d+)/$',AlumnoUpdateView.as_view(), name="alumno_editar"),
url(r'borrar/(?P<pk>\d+)/$',AlumnoDeleteView.as_view(), name="alumno_borrar"),
url(r'(?P<pk>\d+)/$',AlumnoDetailView.as_view(), name="alumno_detalle"),
)
| StarcoderdataPython |
1866012 | <reponame>pordnajela/AlgoritmosCriptografiaClasica<gh_stars>0
#!/usr/bin/env python3
# -*- coding: UTF-8 -*-
from Plantilla import Template
from Transposicion.TransposicionSimple import TransposicionSimple
from Transposicion.TransposicionGrupo import TransposicionGrupo
from Transposicion.TransposicionSerie import TransposicionSerie
class ControladorTransposicionSD(Template):
"""
Clase concreta que va a implementar el modoCifrar y modoDescifrar de la clase Template.
"""
def __init__(self, n=None, archivoOriginal=None):
Template.__init__(self)
self.tSimple = TransposicionSimple()
self.n = n
self.archivoOriginal = archivoOriginal
def modoCifrar(self, *argumentos):
try:
argumentos = list(argumentos)
cadena = argumentos[0]
except IndexError:
pass
self.tSimple.cadena = cadena
self.tSimple.cifrar()
for x in range(0, self.n):
self.tSimple.cadena = self.tSimple.textoCifrado.split("\n")
self.tSimple.cifrar()
return self.tSimple.textoCifrado
def modoDescifrar(self, *argumentos):
try:
argumentos = list(argumentos)
cadena = argumentos[0]
clave = argumentos[1]
except IndexError:
pass
self.n = int(clave)
self.tSimple.cadena = cadena
self.tSimple.descifrar()
for x in range(0, self.n):
self.tSimple.cadena = self.tSimple.textoClaro.split("\n")
self.tSimple.descifrar()
return self.tSimple.textoClaro
#------------------------------------------------------------------------------------------------
class ControladorTransposicionGrupo(Template):
"""
Clase concreta que va a implementar el modoCifrar y modoDescifrar de la clase Template.
"""
def __init__(self, clave=None, archivoOriginal=None):
Template.__init__(self)
self.tGrupo = TransposicionGrupo(None, clave)
self.archivoOriginal = archivoOriginal
def modoCifrar(self, *argumentos):
try:
argumentos = list(argumentos)
cadena = argumentos[0]
except IndexError:
pass
'''
nombre = ControladorTransposicionTemplate.obtenerArchivoMetadatos(self,self.archivoOriginal)[0]
self.utilidad.crearArchivo(nombre+".mtd", str(self.tGrupo.clave)+"\n", "a")
'''
self.tGrupo.cadena = cadena
self.tGrupo.cifrar()
return self.tGrupo.textoCifrado
def modoDescifrar(self, *argumentos):
try:
argumentos = list(argumentos)
cadena = argumentos[0]
clave = argumentos[1]
except IndexError:
pass
self.tGrupo.clave = int(clave)
self.tGrupo.cadena = cadena
self.tGrupo.descifrar()
cadenaDescifradaRelleno =self.tGrupo.textoClaro.split("\n")
cadenaSinRelleno = self.tGrupo.eliminarRelleno(cadenaDescifradaRelleno)
self.tGrupo.textoClaro = cadenaSinRelleno
return self.tGrupo.textoClaro
#------------------------------------------------------------------------------------------------
class ControladorTransposicionSerie(Template):
"""
Clase concreta que va a implementar el modoCifrar y modoDescifrar de la clase Template.
"""
def __init__(self, funciones=None, archivoOriginal=None):
Template.__init__(self)
'''Funciones es una lista, donde cada posición es una lista de números que
corresponde a una función en particular. Ej: [ [1,2,3,5,7], [4,6,8], [9] ]'''
self.tSerie = TransposicionSerie(funciones)
self.archivoOriginal = archivoOriginal
def modoCifrar(self, *argumentos):
try:
argumentos = list(argumentos)
cadena = argumentos[0]
except IndexError:
pass
self.tSerie.cadena = cadena
self.tSerie.cifrar()
return self.tSerie.textoCifrado
def modoDescifrar(self, *argumentos):
try:
argumentos = list(argumentos)
cadena = argumentos[0]
clave = argumentos[1]
except IndexError:
pass
self.tSerie.series = clave
self.tSerie.cadena = cadena
self.tSerie.descifrar()
return self.tSerie.textoClaro | StarcoderdataPython |
12836134 | import numpy as np
import pandas as pd
import os
from PIL import Image
from wordcloud import WordCloud, STOPWORDS, ImageColorGenerator
import matplotlib.pyplot as plt
import seaborn as sns
class GraphGenerator():
"""
A class that generates graphs to visualize data
"""
def create_wordcloud(self, freq_dicc):
"""
Creates a world cloud given a frequency dictionary
Args:
freq_dicc (Dictionary): frequency dictionary
"""
wc = WordCloud(background_color="black",width=1000,height=1000,relative_scaling=0.5,normalize_plurals=False).generate_from_frequencies(freq_dicc)
plt.figure()
plt.imshow(wc, interpolation="bilinear")
plt.axis("off")
plt.show()
def create_graph_bar(self, x_label_list, y_label_list, title, xlabel_title, ylabel_title):
"""
Creates a graph bar
Args:
x_label_list ([List]): Independent variable
y_label_list ([List]): Dependent variable
title (String): Graph title
xlabel_title (String): Independent variable name
ylabel_title (String): Dependent variable name
"""
plt.figure(figsize=(20, 10))
plt.title(title)
sns.barplot(x=x_label_list, y=y_label_list, alpha=0.8)
plt.xlabel(xlabel_title, fontsize=12)
plt.ylabel(ylabel_title, fontsize=12)
plt.show()
def plot_line(self, x_label_list, y_label_list, title, xlabel_title, ylabel_title):
"""
Plots a line given a set of (x, y) points
Args:
x_label_list ([List]): Independent variable
y_label_list ([List]): Dependent variable
title (String): Graph title
xlabel_title (String): Independent variable name
ylabel_title (String): Dependent variable name
"""
plt.figure(figsize=(20, 10))
plt.title(title)
sns.lineplot(x=x_label_list, y=y_label_list)
plt.xlabel(xlabel_title)
plt.ylabel(ylabel_title)
plt.show()
def create_heatmap(self, data, title, xlabel_title, ylabel_title):
"""
Creates a heatmap
Args:
data (DataFrame) : data to use
title (String): Graph title
xlabel_title (String): Independent variable name
ylabel_title (String): Dependent variable name
"""
plt.figure(figsize=(14,7))
plt.title(title)
sns.heatmap(data=data, annot=True)
plt.xlabel(xlabel_title)
plt.ylabel(ylabel_title)
plt.show()
def plot_points(self, x_label_list, y_label_list, title, xlabel_title, ylabel_title):
"""
Método que gráfica un conjunto de puntos
Args:
x_label_list ([List]): Independent variable
y_label_list ([List]): Dependent variable
title (String): Graph title
xlabel_title (String): Independent variable name
ylabel_title (String): Dependent variable name
"""
plt.figure(figsize=(20,10))
plt.title(title)
sns.scatterplot(x=x_label_list, y=y_label_list)
plt.xlabel(xlabel_title)
plt.ylabel(ylabel_title)
plt.show()
def plot_regression_line(self, x_label_list, y_label_list, title, xlabel_title, ylabel_title):
"""
Plots a regression line given a set of (x, y) points
Args:
x_label_list ([List]): Independent variable
y_label_list ([List]): Dependent variable
title (String): Graph title
xlabel_title (String): Independent variable name
ylabel_title (String): Dependent variable name
"""
plt.figure(figsize=(20,10))
plt.title(title)
sns.regplot(x=x_label_list, y=y_label_list)
plt.xlabel(xlabel_title)
plt.ylabel(ylabel_title)
plt.show()
def plot_points_3Variables(self, x_label_list, y_label_list, title, xlabel_title, ylabel_title, hue):
"""
Plots a set points given 3 variables
Args:
x_label_list ([List]): Independent variable
y_label_list ([List]): Dependent variable
title (String): Graph title
xlabel_title (String): Independent variable name
ylabel_title (String): Dependent variable name
hue (List): hue List
"""
plt.figure(figsize=(20,10))
plt.title(title)
sns.scatterplot(x=x_label_list, y=y_label_list, hue=hue)
plt.xlabel(xlabel_title)
plt.ylabel(ylabel_title)
plt.show()
def create_categorical_graph(self, x_label_list, y_label_list, title, xlabel_title, ylabel_title):
"""
Creates a categorical graph
Args:
x_label_list ([List]): Independent variable
y_label_list ([List]): Dependent variable
title (String): Graph title
xlabel_title (String): Independent variable name
ylabel_title (String): Dependent variable name
"""
plt.figure(figsize=(20,10))
plt.title(title)
sns.swarmplot(x=x_label_list, y=y_label_list)
plt.xlabel(xlabel_title)
plt.ylabel(ylabel_title)
plt.show()
def create_histogram(self, data):
"""
Creates a histogram given a data set
Args:
data (List): Data set
"""
plt.figure(figsize=(20,10))
sns.distplot(a=data, kde=False)
plt.show()
| StarcoderdataPython |
5042719 | n, k = map(int, input().split())
h = list(map(int, input().split()))
s = float("inf")
index = None
sm = 0
for i in range(k):
sm += h[i]
index = 0
j = 0
s = sm
for i in range(k, n):
# print(i, j)
# print(sm)
sm -= h[j]
sm += h[i]
# print(sm)
j += 1
if sm < s:
sm = s
index = j
print(index + 1)
| StarcoderdataPython |
178384 | import discord
import re
import db
def parse(message, quotes_file):
content = message.content
match = re.search('.+([A-Za-z0-9]:|\]:)+.+(\n[A-Za-z0-9].*)*', content)
if(match is not None):
if(match.group(0) == content):
db.insert('quotes', {'content': content})
return True
return False
def choose():
c = db.cursor()
c.execute("SELECT content FROM quotes ORDER BY RANDOM() LIMIT 1")
return c.fetchone()[0]
def search(query):
c = db.cursor()
c.execute("SELECT content FROM quotes WHERE content LIKE ?", ('%' + query + '%',))
results = []
for result in c.fetchall():
results.append(result[0])
return results
| StarcoderdataPython |
8077002 | from flask import render_template,redirect,url_for,abort,request
from . import main
from app.requests import get_quote
from .forms import ReviewForm,UpdateProfile,ArticleForm
from .. models import Reviews,User,Articles
from flask import jsonify
from flask_login import login_required,UserMixin,current_user
from .. import db,photos
@main.route('/')
def index():
'''
view root page function that returns the index page and its data
'''
show_quote = get_quote()
quote = show_quote["quote"]
quote_author = show_quote["author"]
articles = Articles.query.all()
title = "Home of stories"
return render_template('main/index.html',title = title, quote = quote, quote_author = quote_author,articles = articles , author = current_user)
@main.route("/review/<int:id>",methods=['GET','POST'])
@login_required
def review(id):
form = ReviewForm()
#article = Articles.query.get_or_404(id)
if form.validate_on_submit():
review = form.review.data
new_review = Reviews()
new_review.review= review
new_review.save_review()
new_review = Reviews(review = review)
return redirect(url_for('main.index',id = id))
title="Post your review"
return render_template('main/new_review.html',review_form=form)
@main.route('/user/<int:user_id>')
def user(user_id):
'''
view function that returns the users details page and its data
'''
return render_template('main/articles.html', id = user_id)
@main.route("/post",methods=['GET','POST'])
@login_required
def post():
form = ArticleForm()
if form.validate_on_submit():
title = form.title.data
content = form.content.data
new_post = Articles()
new_post.title = title
new_post.content= content
new_post.save_article()
new_article = Articles(title=title,content = content)
reviews = Reviews.query.all()
return redirect(url_for('main.index'))
title="Post your article"
return render_template('main/post.html',title=title,article_form=form)
@main.route('/user/<uname>')
def profile(uname):
user = User.query.filter_by(username = uname).first()
if user is None:
abort(404)
return render_template("profile/profile.html", user = user)
@main.route('/user/<uname>/update',methods = ['GET','POST'])
@login_required
def update_profile(uname):
user = User.query.filter_by(username = uname).first()
if user is None:
abort(404)
form = UpdateProfile()
if form.validate_on_submit():
user.bio = form.bio.data
db.session.add(user)
db.session.commit()
return redirect(url_for('.profile',uname=user.username))
return render_template('profile/update_profile.html',form =form)
#photos logic
@main.route('/user/<uname>/update/pic',methods= ['POST'])
@login_required
def update_pic(uname):
user = User.query.filter_by(username = uname).first()
if 'photo' in request.files:
filename = photos.save(request.files['photo'])
path = f'photos/{filename}'
user.profile_pic_path = path
db.session.commit()
return redirect(url_for('main.profile',uname=uname))
@main.route('/review/<int:id>')
def single_review(id):
review=Reviews.query.get(id)
if review is None:
abort(404)
return render_template('review.html',review = review)
| StarcoderdataPython |
6638475 | <filename>runner_service/controllers/jobs.py
import os
# from flask import request
from flask_restful import Resource
# import logging
from .utils import requires_auth, log_request
from ..services.jobs import get_events, get_event
from ..services.utils import build_pb_path
import logging
logger = logging.getLogger(__name__)
class ListEvents(Resource):
"""Return a list of events within a given playbook run (job) """
@requires_auth
@log_request(logger)
def get(self, play_uuid=None):
"""
GET {play_uuid}
Return a list of the event uuid's for the given job(play_uuid)
Example
```
[paul@rh460p ~]$ curl -k -i https://localhost:5001/api/v1/jobs/da069894-99d7-11e8-9ffc-c85b7671906d/events -X GET
HTTP/1.0 200 OK
Content-Type: application/json
Content-Length: 448
Server: Werkzeug/0.12.2 Python/2.7.15
Date: Tue, 07 Aug 2018 01:51:45 GMT
{
"job_events": [
"10-c85b7671-906d-a14a-6984-000000000005",
"11-c85b7671-906d-a14a-6984-000000000007",
"12-74766024-2bc7-477a-ab67-86ac09221992",
"13-c85b7671-906d-a14a-6984-000000000008",
"14-ea7d6fef-9e5a-497a-9f22-2fba98b30304",
"15-50bf2a7e-1a28-495a-8360-87303fbfe9f3",
"9-32737fb7-3688-4146-84c4-97d2faf13ad6"
],
"play_uuid": "da069894-99d7-11e8-9ffc-c85b7671906d"
}
```
"""
if not play_uuid:
return {"message": "playbook uuid missing"}, 400
pb_path = build_pb_path(play_uuid)
if not os.path.exists(pb_path):
return {"message": "playbook uuid given does not exist"}, 404
response = get_events(pb_path)
if response:
return {"play_uuid": play_uuid,
"job_events": response}, 200
else:
return {"message": "Unable to gather tasks for {}".format(play_uuid)}, 500
class GetEvent(Resource):
"""Return the output of a specific task within a playbook"""
@requires_auth
@log_request(logger)
def get(self, play_uuid, event_uuid):
"""
GET {play_uuid, event_uuid}
Return the json job event data for a given event uuid within a job
Example
```
[paul@rh460p ~]$ curl -k -i https://localhost:5001/api/v1/jobs/da069894-99d7-11e8-9ffc-c85b7671906d/events/14-ea7d6fef-9e5a-497a-9f22-2fba98b30304 -X GET
HTTP/1.0 200 OK
Content-Type: application/json
Content-Length: 2710
Server: Werkzeug/0.12.2 Python/2.7.15
Date: Tue, 07 Aug 2018 01:52:20 GMT
{
"data": {
"counter": 14,
"created": "2018-08-07T00:22:00.284164",
"end_line": 16,
"event": "runner_on_ok",
"event_data": {
"event_loop": null,
"host": "localhost",
"pid": 18708,
"play": "test Playbook",
:
:
},
"pid": 18708,
"start_line": 15,
"stdout": "\u001b[0;33mchanged: [localhost]\u001b[0m",
"uuid": "ea7d6fef-9e5a-497a-9f22-2fba98b30304"
},
"event_uuid": "14-ea7d6fef-9e5a-497a-9f22-2fba98b30304",
"play_uuid": "da069894-99d7-11e8-9ffc-c85b7671906d"
}
```
"""
pb_path = build_pb_path(play_uuid)
event_data = get_event(pb_path, event_uuid)
if event_data:
return {"play_uuid": play_uuid,
"event_uuid": event_uuid,
"data": event_data}, 200
else:
return {"message": "Task requested not found"}, 404
| StarcoderdataPython |
6413956 | <filename>kfac/comm.py
import enum
import os
import torch
import torch.distributed as dist
try:
import horovod.torch as hvd
HVD_EXISTS = True
except:
HVD_EXISTS = False
# The global var containing the current initialized backend object
backend = None
def init_comm_backend():
global backend
if backend is None:
backend = _get_comm_backend()
def _get_comm_backend():
if _horovod_is_initialized():
return HorovodBackend()
elif _torch_distributed_is_initialized():
return TorchBackend()
else:
return CommBackend()
def _horovod_is_initialized():
if not HVD_EXISTS:
return False
try:
# If hvd.init() has not been called, this will fail
world_size = hvd.size()
except:
return False
else:
return True
def _torch_distributed_is_initialized():
return dist.is_initialized()
class Ops(enum.Enum):
Average = "average"
Sum = "sum"
class CommGroup(object):
def __init__(self, ranks):
self.ranks = ranks
if (_torch_distributed_is_initialized() and
self.size > 1 and self.size < backend.size()):
self.group = dist.new_group(ranks)
else:
self.group = None
@property
def size(self):
return len(self.ranks)
class CommBackend(object):
"""Distributed training communication abstraction."""
def __init__(self):
self.Average = Ops.Average
self.Sum = Ops.Sum
def size(self):
"""Get worker count"""
return 1
def local_rank(self):
"""Get workers local rank"""
return 0
def rank(self):
"""Get unique worker rank"""
return 0
def allreduce(self, tensor, op=Ops.Average, group=None, async_op=True):
"""Allreduce tensor inplace.
Args:
tensor (torch.Tensor)
op (Op): reduction operation to apply (default: Ops.Average)
group (CommGroup): CommGroup for collective
communication. If None, uses default group (default: None).
async_op (bool): whether this op should be asynchronous (default: True)
Returns:
Async work handle, if async_op is True, else None
"""
return
def broadcast(self, tensor, src, group=None, async_op=True):
"""Broadcast tensor.
Args:
tensor (torch.Tensor)
src (int): source rank for tensor.
group (CommGroup): CommGroup for collective
communication. If None, uses default group (default: None).
async_op (bool, optional): whether this op should be asynchronous
Returns:
Async work handle, if async_op is True, else None
"""
return
def reduce(self, tensor, dst, op=Ops.Average, async_op=True):
"""Reduce tensor inplace.
Args:
tensor (torch.Tensor)
dst (int): dest rank for reduction
op (Op): reduction operation to apply (default: Ops.Average)
async_op (bool): whether this op should be asynchrous (default: True)
Returns:
Async work handle, if async_op is True, else None
"""
return
def barrier(self):
return
def sync(self, handles):
"""Executes asynchonous handles.
Args:
handles (handle or list(handles): handles returns by async functions
"""
return
def wait(self, handle):
"""Execute handle and block until finished"""
return
class HorovodBackend(CommBackend):
def size(self):
return hvd.size()
def local_rank(self):
return hvd.local_rank()
def rank(self):
return hvd.rank()
def allreduce(self, tensor, op=Ops.Average, group=None, async_op=True):
op = self._get_op(op)
if async_op:
return hvd.allreduce_async_(tensor, op=op)
else:
hvd.allreduce_(tensor, op=op)
def broadcast(self, tensor, src, group=None, async_op=True):
# HVD does not have broadcast groups so we ignore
if async_op:
return hvd.broadcast_async_(tensor, root_rank=src)
else:
hvd.broadcast_(tensor, root_rank=rank)
def reduce(self, tensor, dst, op=Ops.Average, async_op=True):
# Horovod only support allreduce
self.allreduce(tensor, op=op, async_op=async_op)
def barrier(self):
hvd.allreduce(torch.tensor(1), name='barrier')
def _get_op(self, op):
if op == Ops.Average:
return hvd.Average
elif op == Ops.Sum:
return hvd.Sum
else:
raise ValueError('Unknown communication operation {}'.format(op))
def sync(self, handles):
if isinstance(handles, list):
for handle in handles:
self.wait(handle)
else:
self.wait(handles)
def wait(self, handle):
if handle is not None:
hvd.synchronize(handle)
class TorchBackend(CommBackend):
def size(self):
return dist.get_world_size()
def local_rank(self):
try:
return os.environ['LOCAL_RANK']
except:
raise RuntimeError('LOCAL_RANK must be set in the environment'
'when using torch.distributed')
def rank(self):
return dist.get_rank()
def allreduce(self, tensor, op=Ops.Average, group=None, async_op=True):
if group is not None:
if group.size <= 1:
return
kwargs = {'group': group.group} if group.group is not None else {}
else:
kwargs = {}
# Note: actually returns tuple(handle, tensor)
# because there is no average op in Torch distribted so
# we need to pass the tensor to sync() to be averages
handle = dist.all_reduce(tensor, async_op=async_op, **kwargs)
if not async_op:
if op == Ops.Average:
tensor /= self.size()
return
else:
if op == Ops.Average:
return (handle, tensor)
return handle
def broadcast(self, tensor, src, group=None, async_op=True):
if group is not None:
if group.size <= 1:
return
kwargs = {'group': group.group} if group.group is not None else {}
return dist.broadcast(tensor, src=src, async_op=async_op, **kwargs)
return dist.broadcast(tensor, src=src, async_op=async_op)
def reduce(self, tensor, dst, op=Ops.Average, async_op=True):
# Note: actually returns tuple(handle, should_average)
# because there is no average op in Torch distribted
handle = dist.reduce(tensor, dst=dst, async_op=async_op)
if not async_op:
if op == Ops.Average:
tensor /= self.size()
return
else:
if op == Ops.Average:
return (handle, tensor)
return handle
def barrier(self):
dist.barrier()
def sync(self, handles):
if isinstance(handles, list):
if len(handles) == 0:
return
if isinstance(handles[0], tuple):
for handle, tensor in handles:
self.wait(handle)
tensor /= self.size()
else:
for handle in handles:
self.wait(handle)
else:
if isinstance(handles, tuple):
handle, tensor = handles
self.wait(handle)
tensor /= self.size()
else:
self.wait(handles)
def wait(self, handle):
if handle is not None:
handle.wait()
| StarcoderdataPython |
11334475 | from django.test import TestCase
class ReporterTestModel(TestCase):
def __init__(self, *args, **kwargs):
super().__init__(self, *args, **kwargs)
# TODO create EMBA-result with FWA and Result object
def test_download(self):
# TODO
pass
| StarcoderdataPython |
6589492 | <gh_stars>10-100
# Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
The file structure is as following:
MRSpineSeg
|--MRI_train.zip
|--MRI_spine_seg_raw
│ └── MRI_train
│ └── train
│ ├── Mask
│ └── MR
├── MRI_spine_seg_phase0
│ ├── images
│ ├── labels
│ │ ├── Case129.npy
│ │ ├── ...
│ ├── train_list.txt
│ └── val_list.txt
└── MRI_train.zip
support:
1. download and uncompress the file.
2. save the normalized data as the above format.
3. split the training data and save the split result in train_list.txt and val_list.txt (we use all the data for training, since this is trainsplit)
"""
import os
import sys
import zipfile
import functools
import numpy as np
sys.path.append(
os.path.join(os.path.dirname(os.path.realpath(__file__)), ".."))
from prepare import Prep
from preprocess_utils import resample, normalize, label_remap
from medicalseg.utils import wrapped_partial
urls = {
"MRI_train.zip":
"https://bj.bcebos.com/v1/ai-studio-online/4e1d24412c8b40b082ed871775ea3e090ce49a83e38b4dbd89cc44b586790108?responseContentDisposition=attachment%3B%20filename%3Dtrain.zip&authorization=bce-auth-v1%2F0ef6765c1e494918bc0d4c3ca3e5c6d1%2F2021-04-15T02%3A23%3A20Z%2F-1%2F%2F999e2a80240d9b03ce71b09418b3f2cb1a252fd9cbdff8fd889f7ab21fe91853",
}
class Prep_mri_spine(Prep):
def __init__(self):
super().__init__(
dataset_root="data/MRSpineSeg",
raw_dataset_dir="MRI_spine_seg_raw/",
images_dir="MRI_train/train/MR",
labels_dir="MRI_train/train/Mask",
phase_dir="MRI_spine_seg_phase0_class20_big_12/",
urls=urls,
valid_suffix=("nii.gz", "nii.gz"),
filter_key=(None, None),
uncompress_params={"format": "zip",
"num_files": 1})
self.preprocess = {
"images": [
wrapped_partial(
normalize, min_val=0, max_val=2650), wrapped_partial(
resample, new_shape=[512, 512, 12], order=1)
], # original shape is (1008, 1008, 12)
"labels":
[wrapped_partial(
resample, new_shape=[512, 512, 12], order=0)]
}
def generate_txt(self, train_split=1.0):
"""generate the train_list.txt and val_list.txt"""
txtname = [
os.path.join(self.phase_path, 'train_list.txt'),
os.path.join(self.phase_path, 'val_list.txt')
]
image_files_npy = os.listdir(self.image_path)
label_files_npy = [
name.replace("Case", "mask_case") for name in image_files_npy
]
self.split_files_txt(txtname[0], image_files_npy, label_files_npy,
train_split)
self.split_files_txt(txtname[1], image_files_npy, label_files_npy,
train_split)
if __name__ == "__main__":
prep = Prep_mri_spine()
prep.generate_dataset_json(
modalities=('MRI-T2', ),
labels={
0: "Background",
1: "S",
2: "L5",
3: "L4",
4: "L3",
5: "L2",
6: "L1",
7: "T12",
8: "T11",
9: "T10",
10: "T9",
11: "L5/S",
12: "L4/L5",
13: "L3/L4",
14: "L2/L3",
15: "L1/L2",
16: "T12/L1",
17: "T11/T12",
18: "T10/T11",
19: "T9/T10"
},
dataset_name="MRISpine Seg",
dataset_description="There are 172 training data in the preliminary competition, including MR images and mask labels, 20 test data in the preliminary competition and 23 test data in the second round competition. The labels of the preliminary competition testset and the second round competition testset are not published, and the results can be evaluated online on this website.",
license_desc="https://www.spinesegmentation-challenge.com/wp-content/uploads/2021/12/Term-of-use.pdf",
dataset_reference="https://www.spinesegmentation-challenge.com/", )
prep.load_save()
prep.generate_txt()
| StarcoderdataPython |
12827070 | import ast
import bisect
import itertools
import tokenize
def iter_attribute_tokens(fname):
with open(fname, "rb") as file:
# The call to filter handles cases where an attribute access dot is at
# the end of a line and the attribute itself on the next one.
tokens = filter(lambda token: token.string != "\n",
tokenize.tokenize(file.readline))
for token in tokens:
if token.string == ".":
yield next(tokens) # Also catches submodule imports :/
def parse(fname, code_line_idxs):
attr_tokens = iter_attribute_tokens(fname)
with tokenize.open(fname) as file:
source = file.read()
lines = source.splitlines(keepends=True)
skipped_line_idxs = {*range(1, len(lines) + 1)}.difference(code_line_idxs)
for idx in skipped_line_idxs:
lines[idx - 1] = ""
line_start_offsets = [
0, *itertools.accumulate(len(line) for line in lines)]
def to_offset(lineno, col_offset):
return line_start_offsets[lineno - 1] + col_offset
class OffsetAnnotator(ast.NodeVisitor):
def visit_Name(self, node):
self.generic_visit(node)
# NOTE: For decorators, this will miss the "@" just before. This
# is taken into account at the annotation embedding stage.
# NOTE: Something funky is going on with whether @foo.bar is
# highlighted fully as a decorator or only partially...
node.offset = to_offset(node.lineno, node.col_offset)
def visit_Attribute(self, node):
self.generic_visit(node)
while True:
# Skip spurious ".foo" coming from submodule imports.
token = next(attr_tokens)
if node.attr == token.string:
break
node.offset = to_offset(*token.start)
# These are only necessary to handle fields in the order in which they
# appear in the source, rather than the order they appear in the node.
def visit_FunctionDef(self, node):
for expr in node.decorator_list:
self.visit(expr)
self.visit(node.args)
if node.returns:
self.visit(node.returns)
for stmt in node.body:
self.visit(stmt)
visit_AsyncFunctionDef = visit_FunctionDef
def visit_ClassDef(self, node):
for expr in node.decorator_list:
self.visit(expr)
for expr in node.bases:
self.visit(expr)
for keyword in node.keywords:
self.visit(keyword)
for stmt in node.body:
self.visit(stmt)
mod = ast.parse(source)
OffsetAnnotator().visit(mod)
return mod
| StarcoderdataPython |
6479120 | from rest_framework.views import APIView
from rest_framework.response import Response
from rest_framework import status
from rest_framework.parsers import JSONParser
from django.contrib.auth import get_user_model
# Create your views here.
class Register(APIView):
def get(self, request, format=None):
return Response(
{
"message": "try to access this as post api"
},
status=status.HTTP_200_OK
)
def post(self, request, format=None):
try:
userData = JSONParser().parse(request)
user = get_user_model().objects.create_user(
email=userData["email"],
password=userData["password"]
)
user.save()
return Response(
{
"message": "new user created successfully"
},
status=status.HTTP_200_OK
)
except Exception as e:
return Response(
{
"message": "error occured",
"error": str(e)
},
status=status.HTTP_400_BAD_REQUEST
) | StarcoderdataPython |
8107949 | <reponame>fengggli/pegasus<gh_stars>0
#!/usr/bin/env python
import os
import sys
import subprocess
if len(sys.argv) != 2:
print "Usage: %s CLUSTER_PEGASUS_HOME" % (sys.argv[0])
sys.exit(1)
cluster_pegasus_home=sys.argv[1]
# to setup python lib dir for importing Pegasus PYTHON DAX API
#pegasus_config = os.path.join("pegasus-config") + " --noeoln --python"
#lib_dir = subprocess.Popen(pegasus_config, stdout=subprocess.PIPE, shell=True).communicate()[0]
#Insert this directory in our search path
#os.sys.path.insert(0, lib_dir)
from Pegasus.DAX3 import *
# Create a abstract dag
adag = ADAG('local-hierarchy')
daxfile = File('blackdiamond.dax')
dax1 = DAX (daxfile)
#DAX jobs are called with same arguments passed, while planning the root level dax
dax1.addArguments('--output-site local')
dax1.addArguments( '-vvv')
adag.addJob(dax1)
# this dax job uses a pre-existing dax file
# that has to be present in the replica catalog
daxfile2 = File('sleep.dax')
dax2 = DAX (daxfile2)
dax2.addArguments('--output-site local')
dax2.addArguments( '-vvv')
adag.addJob(dax2)
# Add control-flow dependencies
#adag.addDependency(Dependency(parent=dax1, child=dax2))
# Write the DAX to stdout
adag.writeXML(sys.stdout)
| StarcoderdataPython |
3202643 | #!/usr/bin/env python
# coding: utf-8
# info
__version__ = "0.1"
__author__ = "<NAME>"
__date__ = "04/10/19"
from gpiozero import Button
import time
BUCKET_SIZE = 0.2794
rain_count = 0
rain_interval = 5
def bucket_tipped():
global rain_count
rain_count += 1
def reset_rainfall():
global rain_count
rain_count = 0
def calculate_rainfall():
global rain_count
rainfall = rain_count * BUCKET_SIZE
return rainfall
rain_sensor = Button(6)
rain_sensor.when_pressed = bucket_tipped
while True:
rain_count = 0
time.sleep(rain_interval)
print(calculate_rainfall(), "mm") | StarcoderdataPython |
104901 | import traceback
import asyncio
# got the semaphore idea from https://asyncpyneng.readthedocs.io/ru/latest/book/using_asyncio/semaphore.html
class WithSemaphore(object):
def __init__(self, num_workers: int = 20) -> None:
self.num_workers = num_workers
def run(self, task, name=None, inventory=None, **kwargs):
# inject positional argument 'task' into kwargs for use in task
kwargs['task'] = task
results = {}
results['task'] = name or task.__name__
results['devices'] = []
loop = asyncio.get_event_loop()
semaphore = asyncio.Semaphore(self.num_workers)
coroutines = [
task_wrapper_with_semaphore(semaphore, device=device, **kwargs)
for device in inventory.values()
]
result = loop.run_until_complete(asyncio.gather(*coroutines))
loop.close()
results['devices'] = result
return results
async def task_wrapper_with_semaphore(semaphore, **kwargs):
task = kwargs.pop('task', None)
device = kwargs.pop('device', None)
result = {}
result['device'] = device['name']
result['task'] = task.__name__
async with semaphore:
try:
result['result'] = await task(device, **kwargs)
except Exception as e:
result['exception'] = e
result['result'] = traceback.format_exc()
return result
async def task_wrapper(**kwargs):
task = kwargs.pop('task', None)
device = kwargs.pop('device', None)
result = {}
result['device'] = device['name']
result['task'] = task.__name__
try:
result['result'] = await task(device, **kwargs)
except Exception as e:
result['exception'] = e
result['result'] = traceback.format_exc()
return result | StarcoderdataPython |
1807895 | #!/usr/bin/env python3
import time
from datetime import datetime
from urllib.parse import urlparse
#from playsound import playsound
from selenium import webdriver
from selenium.webdriver.chrome.service import Service
from shops.Const import Const
from shops.ShopAlternateDE import ShopAlternateDE
from shops.ShopAlternateBE import ShopAlternateBE
from shops.ShopAlternateNL import ShopAlternateNL
from shops.ShopCaseking import ShopCaseking
from shops.ShopConrad import ShopConrad
from shops.ShopCyberport import ShopCyberport
from shops.ShopMindfactory import ShopMindfactory
from shops.ShopNotebooksbilliger import ShopNotebooksbilliger
from DataHandler import DataHandler
class GPUCrawler:
shopList = [ # shop modules
ShopAlternateDE(),
# ShopAlternateBE(),
# ShopAlternateNL(),
# ShopMindfactory(),
# ShopNotebooksbilliger(),
# ShopCyberport(),
# ShopCaseking(),
# ShopConrad()
]
SHOPS = {} # shop dict <Domain : Shop>
SCRIPT_START_TIME = datetime.now().strftime("%d.%m.%Y at %H:%M:%S")
SCRIPT_LAST_RUN_TIME = None
###CRAWL_WEBSITES_SCHEDULE = 200.0 # crawl Websites every 200 seconds
###SOUND_CHEAP_ITEM = "cheap_item.wav"
def __init__(self):
# populate dict from shop modules
for shop in self.shopList:
self.SHOPS[shop.getDomain()] = shop
self.dataHandler = DataHandler()
def getDomainFromURL(self, url):
u = urlparse(url)
domain = ".".join(u.netloc.split(":")[0].split(".")[-2:]) # e.g. get "xyz.com" from "abc.xyz.com:80"
return domain
def crawlWebsite(self, url):
domain = self.getDomainFromURL(url)
if shopHandler := self.SHOPS.get(domain, None):
return shopHandler.getProducts(url)
else:
print("No Shop assigned for domain %s" % domain)
return None
def main(self):
print('== Started: %s | Last Check: %s ==' % (self.SCRIPT_START_TIME, self.SCRIPT_LAST_RUN_TIME))
# read URLs file
with open(Const.PATH_URL_LIST) as f:
urls = f.read().splitlines()
for url in urls: # iterate URLs
if products_from_url := self.crawlWebsite(url):
self.dataHandler.addData(products_from_url)
print()
else:
print('✘ NO PRODUCTS FOUND IN %s \n' % url)
### goodPrices = checkPrices(createDataFrame(products, None))
### examineProducts(goodPrices)
self.dataHandler.exportAsXLSX()
self.SCRIPT_LAST_RUN_TIME = datetime.now().strftime("%d.%m.%Y at %H:%M:%S")
print("========================================================================= ")
if __name__ == '__main__':
GPUCrawler().main() | StarcoderdataPython |
4946735 | <reponame>GennadyBarchenkov/python_training<gh_stars>0
from model.group import Group
import random
def test_full_edit_group(app, db, check_ui):
if len(db.get_group_list()) == 0:
app.group.create(Group(name="test", header="test", footer="test"))
old_groups = db.get_group_list()
group = Group(name="edit1", header="edit2", footer="edit3")
select_group = random.choice(old_groups)
group.id = select_group.id
app.group.modify_group_by_id(select_group.id, group)
new_groups = db.get_group_list()
old_groups.remove(select_group)
old_groups.append(group)
assert sorted(old_groups, key=Group.id_or_max) == sorted(new_groups, key=Group.id_or_max)
if check_ui:
assert sorted(new_groups, key=Group.id_or_max) == sorted(app.group.get_group_list(), key=Group.id_or_max)
def test_modify_group_name(app, db, check_ui):
if len(db.get_group_list()) == 0:
app.group.create(Group(name="test"))
old_groups = db.get_group_list()
group = Group(name="New Group")
select_group = random.choice(old_groups)
group.id = select_group.id
app.group.modify_group_by_id(select_group.id, group)
new_groups = db.get_group_list()
old_groups.remove(select_group)
old_groups.append(group)
assert sorted(old_groups, key=Group.id_or_max) == sorted(new_groups, key=Group.id_or_max)
if check_ui:
assert sorted(new_groups, key=Group.id_or_max) == sorted(app.group.get_group_list(), key=Group.id_or_max)
def test_modify_group_header(app, db, check_ui):
if len(db.get_group_list()) == 0:
app.group.create(Group(header="test"))
old_groups = db.get_group_list()
group = Group(header="New header")
select_group = random.choice(old_groups)
group.id = select_group.id
app.group.modify_group_by_id(select_group.id, group)
new_groups = db.get_group_list()
old_groups.remove(select_group)
old_groups.append(group)
assert sorted(old_groups, key=Group.id_or_max) == sorted(new_groups, key=Group.id_or_max)
if check_ui:
assert sorted(new_groups, key=Group.id_or_max) == sorted(app.group.get_group_list(), key=Group.id_or_max)
def test_modify_group_footer(app, db, check_ui):
if len(db.get_group_list()) == 0:
app.group.create(Group(footer="test"))
old_groups = db.get_group_list()
group = Group(footer="New footer")
select_group = random.choice(old_groups)
group.id = select_group.id
app.group.modify_group_by_id(select_group.id, group)
new_groups = db.get_group_list()
old_groups.remove(select_group)
old_groups.append(group)
assert sorted(old_groups, key=Group.id_or_max) == sorted(new_groups, key=Group.id_or_max)
if check_ui:
assert sorted(new_groups, key=Group.id_or_max) == sorted(app.group.get_group_list(), key=Group.id_or_max)
| StarcoderdataPython |
3375464 | <filename>Download/DownloadSoccerNet.py
import SoccerNet
from SoccerNet.Downloader import SoccerNetDownloader
mySoccerNetDownloader = SoccerNetDownloader(
LocalDirectory="/path/to/SoccerNet")
mySoccerNetDownloader.password = input("Password for videos?:\n")
mySoccerNetDownloader.downloadGames(files=["Labels-v2.json", "1_ResNET_TF2.npy", "2_ResNET_TF2.npy"], split=[
"train", "valid", "test", "challenge"]) # download Features
| StarcoderdataPython |
1682053 | <filename>Day 6/solution1.py<gh_stars>0
f = open("input.txt", "r")
puzzleInput = f.read()
totalOrbitCount = 0
def FindOrbiters(name, orbitCount):
global totalOrbitCount
totalOrbitCount += orbitCount
checkPos = 0
while True:
out = puzzleInput.find(name + ")", checkPos)
if(out == -1):
break
FindOrbiters(puzzleInput[out + 4:out + 7], orbitCount + 1)
checkPos = out + 4
FindOrbiters("COM", 0)
print(totalOrbitCount)
| StarcoderdataPython |
8124131 | <reponame>C4T-BuT-S4D/ad-boilerplate<filename>services/example/src/app.py
from flask import Flask, request, jsonify
app = Flask(__name__)
notes = set()
@app.route('/put_note', methods=['POST'])
def put_note():
note = request.json
if type(note) != dict or "name" not in note or "value" not in note:
return jsonify({"ok": False, "error": "invalid note"})
name = note["name"]
value = note["value"]
notes.add(name)
with open(f"notes/{name}", "w") as f:
f.write(value)
return jsonify({"ok": True})
@app.route('/get_note', methods=['POST'])
def get_note():
note = request.json
if type(note) != dict or "name" not in note:
return jsonify({"ok": False, "error": "invalid note"})
name = note["name"]
if name not in notes:
return jsonify({"ok": False, "error": "no such note"})
with open(f"notes/{name}", "r") as f:
value = f.read()
return jsonify({"ok": True, "note": value})
| StarcoderdataPython |
9675738 | '''
This script takes an image and splits it up into pieces as separate files.
drawn_quartered test.jpg --width 2 --height 2
drawn_quartered test.jpg outputname.jpg --width 3 --height 4
'''
import argparse
import math
import PIL.Image
import sys
from voussoirkit import pathclass
def drawquarter(image, width=2, height=2):
pieces = []
(image_width, image_height) = image.size
step_x = image_width / width
step_y = image_height / height
if (step_x != int(step_x)):
print('Warning: Imperfect x', step_x)
if (step_y != int(step_y)):
print('Warning: Imperfect y', step_y)
step_x = math.ceil(step_x)
step_y = math.ceil(step_y)
for y in range(height):
end_y = y + 1
for x in range(width):
end_x = x + 1
coords = (step_x * x, step_y * y, step_x * end_x, step_y * end_y)
piece = image.crop(coords)
pieces.append(piece)
return pieces
def drawquarter_argparse(args):
image = PIL.Image.open(args.input_filename)
if args.output_filename is not None:
output_filename = args.output_filename
else:
output_filename = args.input_filename
output_path = pathclass.Path(output_filename)
output_directory = output_path.parent
output_directory.makedirs(exist_ok=True)
output_filename_format = output_path.basename
output_filename_format = output_filename_format.rsplit('.', 1)[0]
output_filename_format += '_%dx%d_{ycoord}-{xcoord}.' % (args.width, args.height)
output_filename_format += args.input_filename.rsplit('.', 1)[1]
pieces = drawquarter(image, width=args.width, height=args.height)
for (index, piece) in enumerate(pieces):
(ycoord, xcoord) = divmod(index, args.height)
output_filename = output_filename_format.format(xcoord=xcoord, ycoord=ycoord)
output_filename = output_directory.with_child(output_filename)
print(output_filename.relative_path)
piece.save(output_filename.absolute_path)
return 0
def main(argv):
parser = argparse.ArgumentParser()
parser.add_argument('input_filename')
parser.add_argument('output_filename', nargs='?', default=None)
parser.add_argument('--width', type=int, default=2)
parser.add_argument('--height', type=int, default=2)
parser.set_defaults(func=drawquarter_argparse)
args = parser.parse_args(argv)
return args.func(args)
if __name__ == '__main__':
raise SystemExit(main(sys.argv[1:]))
| StarcoderdataPython |
1778855 | import unittest
import json
import json2txttree as j2t
class SimpleTest(unittest.TestCase):
def test_json2txttree(self):
with open('sample.json', 'r') as jsonfile:
data = json.load(jsonfile)
tree = j2t.json2txttree(data)
tree_exp = '└─ (object)\n' + \
' ├─ "name" (string)\n' + \
' ├─ "age" (number)\n' + \
' ├─ "languages" (array)\n' + \
' │ └─ (string)\n' + \
' ├─ "subjects" (object)\n' + \
' │ ├─ "Math" (number)\n' + \
' │ └─ "Science" (number)\n' + \
' └─ "ids" (array)\n' + \
' └─ (object)\n' + \
' ├─ "name" (string)\n' + \
' └─ "number" (string)\n'
self.assertEqual(tree_exp, tree)
def test_json2txttable(self):
with open('sample.json', 'r') as jsonfile:
data = json.load(jsonfile)
table = j2t.json2txttable(data)
table_exp = '| Field | Data type | Details |\n' + \
'| ----- | --------- | ------- |\n' + \
'| `name` | `string` | - |\n' + \
'| `age` | `number` | - |\n' + \
'| `languages` | `array` | - |\n' + \
'| `subjects` | `object` | - |\n' + \
'| `Math` | `number` | - |\n' + \
'| `Science` | `number` | - |\n' + \
'| `ids` | `array` | - |\n' + \
'| `name` | `string` | - |\n' + \
'| `number` | `string` | - |\n'
self.assertEqual(table_exp, table)
if __name__ == '__main__':
unittest.main()
| StarcoderdataPython |
1973894 | import codecs
import string
import cryptopals.common as common
def decrypt_xor_encrypted_message(encrypted_message):
characters = [[ord(character)] for character in string.ascii_letters + string.digits]
encrypted_message = codecs.decode(encrypted_message, 'hex')
highest_score = 0
highest_scoring_message = ''
for character in characters:
message = common.utf_from_repeated_xor(character, encrypted_message)
score = common.score_message_using_word_list(message)
if score > highest_score:
highest_score = score
highest_scoring_message = message
return highest_scoring_message
| StarcoderdataPython |
9702115 | from os import environ
from flask import Flask, render_template
import main as twitter
app = Flask(__name__)
@app.route("/bot")
def home():
twitter.main()
@app.route('/')
def root():
return render_template('index.html')
app.run(debug=True)
| StarcoderdataPython |
4810191 | import util
import libtcodpy as tcod
import towers
import items
registered_enemies = []
def enemy_classes ():
return [c for c in registered_enemies if c != Enemy]
class EnemyMeta (type):
def __init__ (class_, name, bases, attrs):
super(EnemyMeta, class_).__init__(name, bases, attrs)
registered_enemies.append(class_)
class Enemy (util.Entity):
__metaclass__ = EnemyMeta
max_hp = 1
speed = 1
damage = 1
def __init__ (self, *args):
super(Enemy, self).__init__(*args)
self.timer = self.state.timers.start(500 / self.speed, self._move)
self.hp = self.max_hp
def _move (self):
# self.x = clamp(self.x + random.randint(-1, 1), 0, self.state.map.w - 1)
# self.y = clamp(self.y + random.randint(-1, 1), 0, self.state.map.h - 1)
step_x, step_y = self.state.heart.x, self.state.heart.y
baits = [e for e in self.state.entities if isinstance(e, towers.Bait)]
if baits:
curr_bait = baits[0]
for bait in baits:
if util.dist(self.x, self.y, curr_bait.x, curr_bait.y) > util.dist(self.x, self.y, bait.x, bait.y):
curr_bait = bait
step_x, step_y = curr_bait.x, curr_bait.y
tcod.line_init(self.x, self.y, step_x, step_y)
x, y = tcod.line_step()
if x is None:
pass
else:
did_hit = False
for e in self.state.entities:
if e.x == x and e.y == y and isinstance(e, towers.Building):
self.hit(e)
did_hit = True
if not did_hit:
self.x = x
self.y = y
def hit (self, e):
if e in self.state.entities:
# print 'Enemy {0} hit the {1}. Damage: {2}'.format(self.__class__.__name__, e.__class__.__name__, self.damage)
e.hurt(self.damage)
def hurt (self, hp):
self.hp -= hp
if self.hp < 1:
self.die()
def die (self):
# TODO: prevent a double drop (add an is_alive attr?)
self.state.entities.append(items.EnergyItem(self.state, self.x, self.y))
if self in self.state.entities:
self.state.entities.remove(self)
if self.timer in self.state.timers:
self.state.timers.remove(self.timer)
class Rat (Enemy):
sym = 'r'
color = tcod.lighter_sepia
score = 1
class Wolf (Enemy):
sym = 'w'
color = tcod.lighter_grey
max_hp = 2
speed = 2
score = 2
class Savage (Enemy):
sym = '@'
color = tcod.darker_pink
max_hp = 4
speed = 1
score = 2
class Elefant (Enemy):
sym = 'e'
color = tcod.lighter_grey
max_hp = 1 * 16
speed = 0.5
score = 16
| StarcoderdataPython |
1833733 | #
# Copyright 2021 Red Hat Inc.
# SPDX-License-Identifier: Apache-2.0
#
"""AWS Report Serializers."""
from django.utils.translation import ugettext as _
from pint.errors import UndefinedUnitError
from rest_framework import serializers
from api.report.serializers import FilterSerializer as BaseFilterSerializer
from api.report.serializers import GroupSerializer
from api.report.serializers import OrderSerializer
from api.report.serializers import ParamSerializer
from api.report.serializers import StringOrListField
from api.report.serializers import validate_field
from api.utils import get_cost_type
from api.utils import UnitConverter
class GroupBySerializer(GroupSerializer):
"""Serializer for handling query parameter group_by."""
_opfields = (
"account",
"az",
"instance_type",
"region",
"service",
"storage_type",
"product_family",
"org_unit_id",
)
# account field will accept both account number and account alias.
account = StringOrListField(child=serializers.CharField(), required=False)
az = StringOrListField(child=serializers.CharField(), required=False)
instance_type = StringOrListField(child=serializers.CharField(), required=False)
region = StringOrListField(child=serializers.CharField(), required=False)
service = StringOrListField(child=serializers.CharField(), required=False)
storage_type = StringOrListField(child=serializers.CharField(), required=False)
product_family = StringOrListField(child=serializers.CharField(), required=False)
org_unit_id = StringOrListField(child=serializers.CharField(), required=False)
class OrderBySerializer(OrderSerializer):
"""Serializer for handling query parameter order_by."""
_opfields = ("usage", "account_alias", "region", "service", "product_family", "date")
usage = serializers.ChoiceField(choices=OrderSerializer.ORDER_CHOICES, required=False)
# ordering by alias is supported, but ordering by account is not due to the
# probability that a human-recognizable alias is more useful than account number.
account_alias = serializers.ChoiceField(choices=OrderSerializer.ORDER_CHOICES, required=False)
region = serializers.ChoiceField(choices=OrderSerializer.ORDER_CHOICES, required=False)
service = serializers.ChoiceField(choices=OrderSerializer.ORDER_CHOICES, required=False)
product_family = serializers.ChoiceField(choices=OrderSerializer.ORDER_CHOICES, required=False)
date = serializers.DateField(required=False)
class FilterSerializer(BaseFilterSerializer):
"""Serializer for handling query parameter filter."""
_opfields = ("account", "service", "region", "az", "product_family", "org_unit_id")
account = StringOrListField(child=serializers.CharField(), required=False)
service = StringOrListField(child=serializers.CharField(), required=False)
region = StringOrListField(child=serializers.CharField(), required=False)
az = StringOrListField(child=serializers.CharField(), required=False)
product_family = StringOrListField(child=serializers.CharField(), required=False)
org_unit_id = StringOrListField(child=serializers.CharField(), required=False)
class QueryParamSerializer(ParamSerializer):
"""Serializer for handling query parameters."""
# Tuples are (key, display_name)
DELTA_CHOICES = (("usage", "usage"), ("cost", "cost"), ("cost_total", "cost_total"))
COST_TYPE_CHOICE = (
("blended_cost", "blended_cost"),
("unblended_cost", "unblended_cost"),
("savingsplan_effective_cost", "savingsplan_effective_cost"),
)
delta = serializers.ChoiceField(choices=DELTA_CHOICES, required=False)
cost_type = serializers.ChoiceField(choices=COST_TYPE_CHOICE, required=False)
units = serializers.CharField(required=False)
compute_count = serializers.NullBooleanField(required=False, default=False)
check_tags = serializers.BooleanField(required=False, default=False)
def __init__(self, *args, **kwargs):
"""Initialize the AWS query param serializer."""
super().__init__(*args, **kwargs)
self._init_tagged_fields(filter=FilterSerializer, group_by=GroupBySerializer, order_by=OrderBySerializer)
def validate(self, data):
"""Validate incoming data.
Args:
data (Dict): data to be validated
Returns:
(Dict): Validated data
Raises:
(ValidationError): if field inputs are invalid
"""
super().validate(data)
if not data.get("cost_type"):
data["cost_type"] = get_cost_type(self.context.get("request"))
error = {}
if "delta" in data.get("order_by", {}) and "delta" not in data:
error["order_by"] = _("Cannot order by delta without a delta param")
raise serializers.ValidationError(error)
return data
def validate_group_by(self, value):
"""Validate incoming group_by data.
Args:
data (Dict): data to be validated
Returns:
(Dict): Validated data
Raises:
(ValidationError): if group_by field inputs are invalid
"""
validate_field(self, "group_by", GroupBySerializer, value, tag_keys=self.tag_keys)
# Org unit id validation
group_by_params = self.initial_data.get("group_by", {})
org_unit_group_keys = ["org_unit_id", "or:org_unit_id"]
group_by_keys = group_by_params.keys()
key_used = []
for acceptable_key in org_unit_group_keys:
if acceptable_key in group_by_keys:
key_used.append(acceptable_key)
if key_used:
if len(key_used) > 1:
# group_by[org_unit_id]=x&group_by[or:org_unit_id]=OU_001 is invalid
# If we ever want to change this we need to decide what would be appropriate to see
# here.
error = {"or_unit_id": _("Multiple org_unit_id must be represented with the or: prefix.")}
raise serializers.ValidationError(error)
key_used = key_used[0]
request = self.context.get("request")
if "costs" not in request.path or self.initial_data.get("group_by", {}).get(key_used, "") == "*":
# Additionally, since we only have the org_unit_id group_by available for cost reports
# we must explicitly raise a validation error if it is a different report type
# or if we are grouping by org_unit_id with the * since that is essentially grouping by
# accounts. If we ever want to change this we need to decide what would be appropriate to see
# here. Such as all org units or top level org units
error = {"org_unit_id": _("Unsupported parameter or invalid value")}
raise serializers.ValidationError(error)
if "or:" not in key_used:
if isinstance(group_by_params.get(key_used), list):
if len(group_by_params.get(key_used)) > 1:
# group_by[org_unit_id]=x&group_by[org_unit_id]=OU_001 is invalid
# because no child nodes would ever intersect due to the tree structure.
error = {"or_unit_id": _("Multiple org_unit_id must be represented with the or: prefix.")}
raise serializers.ValidationError(error)
return value
def validate_order_by(self, value):
"""Validate incoming order_by data.
Args:
data (Dict): data to be validated
Returns:
(Dict): Validated data
Raises:
(ValidationError): if order_by field inputs are invalid
"""
super().validate_order_by(value)
validate_field(self, "order_by", OrderBySerializer, value)
return value
def validate_filter(self, value):
"""Validate incoming filter data.
Args:
data (Dict): data to be validated
Returns:
(Dict): Validated data
Raises:
(ValidationError): if filter field inputs are invalid
"""
validate_field(self, "filter", FilterSerializer, value, tag_keys=self.tag_keys)
return value
def validate_units(self, value):
"""Validate incoming units data.
Args:
data (Dict): data to be validated
Returns:
(Dict): Validated data
Raises:
(ValidationError): if units field inputs are invalid
"""
unit_converter = UnitConverter()
try:
unit_converter.validate_unit(value)
except (AttributeError, UndefinedUnitError):
error = {"units": f"{value} is not a supported unit"}
raise serializers.ValidationError(error)
return value
def validate_delta(self, value):
"""Validate incoming delta value based on path."""
valid_delta = "usage"
request = self.context.get("request")
if request and "costs" in request.path:
valid_delta = "cost_total"
if value == "cost":
return valid_delta
if value != valid_delta:
error = {"delta": f'"{value}" is not a valid choice.'}
raise serializers.ValidationError(error)
return value
def validate_cost_type(self, value):
"""Validate incoming cost_type value based on path."""
valid_cost_type = [choice[0] for choice in self.COST_TYPE_CHOICE]
if value not in valid_cost_type:
error = {"cost_type": f'"{value}" is not a valid choice.'}
raise serializers.ValidationError(error)
return value
| StarcoderdataPython |
1965876 | # -*- coding: utf-8 -*-
import subprocess
from pupylib.PupyModule import *
import subprocess
import time
import datetime
import os
__class_name__="PExec"
@config(cat="admin")
class PExec(PupyModule):
""" Execute shell commands non-interactively on a remote system in background using popen"""
pool_time = 1
pipe = None
completed = False
terminate = False
# daemon = True
dependencies = [ "pupyutils.safepopen" ]
def init_argparse(self):
self.arg_parser = PupyArgumentParser(prog='pexec', description=self.__doc__)
self.arg_parser.add_argument(
'-log',
help='Save output to file. You can use vars: '
'%%h - host, %%m - mac, %%p - platform, %%u - user, %%a - ip address',
)
self.arg_parser.add_argument(
'-n',
action='store_true',
help='Don\'t catch stderr',
)
self.arg_parser.add_argument(
'-F',
action='store_true',
help='Don\'t hide application window (Windows only)'
)
self.arg_parser.add_argument(
'-s',
action='store_true',
help='Start in shell',
)
self.arg_parser.add_argument(
'arguments',
nargs=argparse.REMAINDER,
help='CMD args'
)
def run(self, args):
if not args.arguments:
self.error('No command specified {}'.format(args.__dict__))
return
rsubprocess = self.client.conn.modules['subprocess']
cmdargs = args.arguments
if args.s:
cmdargs = [
'cmd.exe', '/c',
] + cmdargs if self.client.is_windows() else [
'/bin/sh', '-c', ' '.join(
'"'+x.replace('"','\"')+'"' for x in cmdargs
)
]
cmdenv = {
'stderr': (None if args.n else subprocess.STDOUT),
'universal_newlines': False,
}
if self.client.is_windows():
if not args.F:
startupinfo = rsubprocess.STARTUPINFO()
startupinfo.dwFlags |= rsubprocess.STARTF_USESHOWWINDOW
cmdenv.update({
'startupinfo': startupinfo,
})
else:
cmdenv.update({
'close_fds': True,
})
popen = self.client.conn.modules['pupyutils.safepopen'].SafePopen
self.pipe = popen(cmdargs, **cmdenv)
rdatetime = self.client.conn.modules['datetime']
self.success('Started at (local:{} / remote:{}): '.format(
datetime.datetime.now(), rdatetime.datetime.now()))
self.success('Command: {}'.format(' '.join(
x if not ' ' in x else "'" + x + "'" for x in cmdargs
)))
log = None
if args.log:
log = args.log.replace(
'%m', self.client.desc['macaddr']
).replace(
'%p', self.client.desc['platform']
).replace(
'%a', self.client.desc['address']
).replace(
'%h', self.client.desc['hostname'].replace(
'..', '__'
).replace(
'/', '_'
)
).replace(
'%u', self.client.desc['user'].replace(
'..', '__'
).replace(
'/', '_'
)
)
dirname = os.path.dirname(log)
if not os.path.exists(dirname):
os.makedirs(dirname)
log = open(log, 'w')
for data in self.pipe.execute():
if data:
if not self.terminate:
self.log(data)
if log:
log.write(data)
if log:
log.close()
if self.pipe.returncode == 0:
self.success('Finished at (local:{} / remote:{}): '.format(
datetime.datetime.now(), rdatetime.datetime.now()))
else:
self.error('Finished at (local:{} / remote:{})'.format(
datetime.datetime.now(), rdatetime.datetime.now(),
))
self.error('Ret: {}'.format(self.pipe.returncode))
if hasattr(self.job, 'id'):
self.job.pupsrv.handler.display_srvinfo('(Job id: {}) Command {} completed'.format(
self.job.id, cmdargs))
def interrupt(self):
if not self.completed and self.pipe:
self.error('Stopping command')
self.pipe.terminate()
self.terminate = True
self.error('Stopped')
| StarcoderdataPython |
5003120 | <gh_stars>0
""" Converts coordinates in a Cartesian Reference System (CRS) into GPS coordinates (latitude + longitude)
Requires pyproj and utm libraries, install simply with: pip install pyproj utm"""
import pyproj
import utm
import math
# defining World Geodetic Format, should be set accordinly to the GPS device's WGS
WGS = "WGS84"
r11_latlon = 43.649849877192985,1.3745697631578948
r11_cart = (-37.065, -6.410, -3.854) # "x":-25.065, "y":-6.410, "z":-3.854,
r12_latlon = 43.64979264035088,1.374912307017544
r12_cart = (-17.510, 13.410, -3.854 ) #"x":-17.510, "y":13.410, "z":-3.854
# r13 = 43.64953705263158,1.3750360000000001 "x":11.660, "y":13.410, "z":-3.854
# std : 3E-6
class Point:
"""A representation of a 2D or 3D vector in a geolocation context"""
def __init__(self, coords):
if len(coords) == 2:
self.x, self.y = coords
self.z = None
elif len(coords) == 3:
self.x, self.y, self.z = coords
else:
raise ValueError("2D or 3D coordinates must be provded as an tuple or a list")
def get_coord(self):
"""Returns the 2D or 3D coordinates as a tuple"""
return((self.x, self.y, self.z) if self.z else (self.x, self.y))
class Vector:
"""A representation of a 2D or 3D vector in a geolocation context.
Does not keep track of azimuth for 3D vectors.
Can be initialized from both cartesian and polar coordinates, and from both 2D and 3D coordinates
Attributes:
x: coordinate on the x axis
y: coordinate on the y axis
z: coordinate on the z axis (optional)
arg: polar angle
norm: polar radius
"""
@staticmethod
def to_polar(x, y, z = None):
"""Converts the provided 2D or 3D cartesian coordinates and returns the result.
Note that for 3D coordinates the azimuth is NOT returned"""
arg = math.atan2(y, x)
norm = math.sqrt(y * y + x * x + z * z) if z else math.sqrt(y * y + x * x)
return(norm, arg)
@staticmethod
def to_cartesian(norm, arg, z = None):
"""Converts polar coordinates to cartesian and returns the result.
Does not handle azimuth - elevation is kept unchanged"""
x = norm * math.cos(arg)
y = norm * math.sin(arg)
return(x,y, z)
def __init__(self, x, y, z = None, norm = None, arg = None):
self.x = x
self.y = y
self.z = z
if norm and arg:
self.arg = arg
self.norm = norm
else:
self.norm, self.arg = Vector.to_polar(x, y, z)
@classmethod
def from_polar(cls, norm, arg, z = None):
"""Enables an alternative constructor for polar coordinates"""
(x, y, z) = Vector.to_cartesian(norm, arg, z)
return(cls(x, y, z, norm, arg))
@classmethod
def from_point(cls, point):
if type(point) is Point:
return(cls(*point.get_coord()))
else:
raise TypeError("Two Points should be provided to build a vector from this constructor")
@classmethod
def from_points(cls, head, tail):
if not(type(head) is Point and type(tail) is Point):
raise TypeError("Two Points should be provided to build a vector from this constructor")
elif (head.z and not(tail.z)) or (not(head.z) and (tail.z)):
raise ValueError("Cannot create a vector from a 2D and a 3D points. Dimension should be consistent")
else:
return(Vector.from_point(head).sub(Vector.from_point(tail)))
def sum(self, vector_2sum):
"""Returns the sum of this vector with vector_2sum"""
summed_coords = []
for c1, c2 in zip(self.get_cartesian_coord(), vector_2sum.get_cartesian_coord()):
summed_coords.append(c1 + c2)
return(Vector(*summed_coords))
def sub(self, vector_2sub):
"""Returns the difference of this vector with vector_2sub"""
substracted_coords = []
for c1, c2 in zip(self.get_cartesian_coord(), vector_2sub.get_cartesian_coord()):
substracted_coords.append(c1 - c2)
return(Vector(*substracted_coords))
def get_angle_difference(self, vector):
"""Returns the difference of polar angles with the provided vector"""
return(self.arg - vector.arg)
def rotate_xy(self, angle):
"""Returns a copy of the vector rotated by the given angle on the xy plane"""
return( Vector.from_polar(self.norm, self.arg + angle, self.z) )
def get_cartesian_coord(self):
"""Returns the 2D or 3D coordinates as a tuple"""
return((self.x, self.y, self.z) if self.z else (self.x, self.y))
def get_polar_coord(self):
"""Returns the polar coordinates as a tuple"""
return(self.norm, self.arg)
def __str__(self):
text = "{Cartesian: "
for c in self.get_cartesian_coord():
text += str(c) + ", "
text += "Polar: " + str(self.norm) + ", " + str(self.arg) + "}"
return(text)
def change_coordinates(p1, p2,coords):
"""Computes a coordinate system change.
Parameters:
- p1: coordinates in the new sytem of the origin of the current system
- p2: coordinates in the new sytem of an arbitrary point on the x axis. Note: units are preserved (typically meters)
- coords: coordinates to convert in the new CRS
Returns: converted coordinates"""
x1, y1 = p1
x2, y2 = p2
# computing and normalizing absciss vector
x_vect = [x2 - x1, y2 - y1]
norm = x_vect[0] * x_vect[0] + x_vect[1] * x_vect[1]
x_vect[0] /= norm
x_vect[1] /= norm
# defining an orthonormal sytem
y_vect = (-x_vect[1], x_vect[0])
# unpacking and converting provided coordinates
x , y = coords
new_x = x1 + x * x_vect[0] + y * y_vect[0]
new_y = y1 + x * x_vect[1] + y * y_vect[1]
return(new_x, new_y)
def convert_coordinates_to_gps(coords, p0_latlon, p1_latlon, p0_coord = (0,0), p1_coord = (1,0) , wgs = WGS):
"""Converts the coordinates provided in a given CRS to a latitude, longitude tuple
@param p0_latlon: (latitude, longitude) tuple of the beacon B0 with coordinates (0,0) in the CRS
@param p1_latlon: (latitude, longitude) tuple of the beacon B1; the axis (B0, B1) defines the X axis of the CRS.
Y axis is defined as the 2D axis orthonormal to (B0, B1)
@param coords: The cartesian coordinates to convert to gps coordinates
@return: a (latitude, longitude) tuple
"""
try:
# unpacking tuples
(lat0, lon0) = p0_latlon
(lat1, lon1) = p1_latlon
except:
raise ValueError("Reference coordinates are unproperly formatted. (Latitude, Longitude) tuples are expected")
# selecting UTM zone
zone = utm.from_latlon(lat0, lon0)[2]
# selecting hemisphere
hemisphere = 'north' if lat0 > 0 else 'south'
# forming the pyproj string defining the CRS
# Setting projection to Univeral Transverse Mercator
projstring = "+proj=utm"
# defining UTM zone
projstring += " +zone=" + str(zone)
# defining hemisphère
projstring += " +" + hemisphere
# defining the world geodic system
projstring += " +ellps=" + WGS
projstring += " +datum=" + WGS
# using meters as unit
projstring += " +units=m"
projstring += " +no_defs"
# Applying UTM projection
p = pyproj.Proj(projstring, preserve_units=True)
p0_utm = Point(p(lon0, lat0))
p1_utm = Point(p(lon1, lat1))
# point to convert is given in a CRS (Origin, vx, vy)
# converting its coordinates in the CRS (p0, vs, vy) with a translation (p0,Origin )
point_to_convert = Vector.from_point(Point(coords))
local_translation = Vector.from_point(Point(p0_coord))
point_to_convert = point_to_convert.sub(local_translation)
# calculating the rotation angle
utm_vect = Vector.from_points(p1_utm, p0_utm)
cartesian_vect = Vector.from_points(Point(p1_coord), Point(p0_coord))
rotation_angle = utm_vect.get_angle_difference(cartesian_vect)
# translation to convert UTM CRS centered on p0 to a standard UTM CRS
crs_translation = Vector.from_point(p0_utm)
# performing the coordinate change with a (xy) rotation + origin translation
utm_coords = point_to_convert.rotate_xy(rotation_angle).sum(crs_translation).get_cartesian_coord()
utm_x, utm_y = utm_coords[:2]
# reverse projection to convert UTM coords back to geodetic coordinates
lon, lat = p(utm_x, utm_y , inverse=True)
return(lat, lon)
if __name__ == "__main__":
# sample test programm
# test coordinates (points to Blagnac IUT)
ANCHOR0_LATLON = 43.647926403525446, 1.3752879893023537
ANCHOR1_LATLON = 43.64917628655427, 1.3754650150912193
ANCHOR0 = 20, 0
ANCHOR1= 100, 0
# print(convert_coordinates_to_gps( (200, -50), ANCHOR0_LATLON, ANCHOR1_LATLON, (0, 0), (100, 0) ) )
test_coord = (0, 0, 0)
print(convert_coordinates_to_gps(test_coord, r11_latlon, r12_latlon, r11_cart, r12_cart))
# v1 = Vector(1, 2)
# v2 = Vector(3, 4)
# v7= v2.rotate_xy(1.5)
# v8= v7.sum(v1)
# p1 = Point((1, 2))
# p2 = Point((5, 6))
# v5 = Vector.from_points(p2, p1)
# v6 = v5.sub(v2)
# # v2 = v.rotate_xy(0.3)
# print(v2, v7, v8)
| StarcoderdataPython |
6441549 | #! /usr/bin/env python3
import build_utils, common, os, shutil, sys
def main():
os.chdir(common.basedir)
build_utils.rmdir("target")
build_utils.rmdir("shared/target")
build_utils.rmdir("platform/build")
build_utils.rmdir("platform/target")
build_utils.rmdir("tests/target")
build_utils.rmdir("examples/lwjgl/target")
build_utils.rmdir("examples/kwinit/target")
build_utils.rmdir("examples/jwm/target")
build_utils.rmdir("examples/swt/target")
return 0
if __name__ == '__main__':
sys.exit(main()) | StarcoderdataPython |
1859538 | # -*- coding: utf-8 -*-
# Copyright (c) 2015, <NAME>
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# Neither the name of the <ORGANIZATION> nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
# INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
# OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
# OF THE POSSIBILITY OF SUCH DAMAGE.
import os
import json
import socket
from functools import reduce
from py4j.java_gateway import JavaGateway, GatewayClient, Py4JNetworkError
from .XSLTLibrary import XSLTLibrary
DEFAULT_PORT = 25333
XML_TO_JSON_KEY = '<KEY>' # A safe key for the XML to JSON XSLT transformation
class Gateway(object):
def __init__(self, port=DEFAULT_PORT, **_):
""" Construct a new XSLT gateway. This uses the py4j gateway to connect to a java server.
@param port: py4j gateway port (default: DEFAULT_PORT)
"""
self._gwPort = int(port)
self._converters = {}
self._xsltLibrary = XSLTLibrary()
self._xsltFactory = None
self._gateway = None
self._add_json_xslt()
self.reconnect()
def reconnect(self):
""" (Re)establish the gateway connection
@return: True if connection was established
"""
self._converters.clear()
self._gateway = None
self._xsltFactory = None
try:
# print("Starting Java gateway on port: %s" % self._gwPort)
self._gateway = JavaGateway(GatewayClient(port=self._gwPort))
self._xsltFactory = self._gateway.jvm.org.pyjxslt.XSLTTransformerFactory('')
self._refresh_converters()
except (socket.error, Py4JNetworkError) as e:
print(e)
self._gateway = None
return False
return True
def gateway_connected(self, reconnect=True):
""" Determine whether the gateway is connected
@param reconnect: True means try to reconnect if not connected
@return: True if the gateway is active
"""
return self._gateway is not None or (reconnect and self.reconnect())
def to_json(self, xml):
ugly_json = self.transform(XML_TO_JSON_KEY, xml)
if not ugly_json:
rval = None
else:
try:
rval = json.dumps(json.loads(ugly_json), indent=4)
except json.JSONDecodeError:
rval = ugly_json
return rval
def add_transform(self, key, xslt):
""" Add or update a transform.
@param key: Transform key to use when executing transformations
@param xslt: Text or file name of an xslt transform
"""
self._remove_converter(key)
self._xsltLibrary[key] = xslt
self._add_converter(key)
def drop_transform(self, key):
self._remove_converter(key)
def _add_json_xslt(self):
self.add_transform(XML_TO_JSON_KEY,
os.path.join(os.path.join(os.getcwd(), os.path.dirname(__file__)), 'xsl', 'XMLToJson.xsl'))
def _refresh_converters(self):
""" Refresh all of the converters in the py4j library
@return: True if all converters were succesfully updated
"""
self._converters.clear()
return reduce(lambda a, b: a and b, [self._add_converter(k) for k in list(self._xsltLibrary.keys())], True)
def _add_converter(self, key):
# Do the checkConnected first, as, if the connection is isn't reestablished not much we can do
if self.gateway_connected(reconnect=False) and key not in self._converters:
try:
self._converters[key] = self._xsltFactory.transformer(key, self._xsltLibrary[key])
return True
except socket.error as e:
print(e)
self._gateway = None
return False
def _remove_converter(self, key):
if self.gateway_connected(reconnect=False) and key in self._converters:
self._xsltFactory.removeTransformer(key)
self._converters.pop(key, None)
def _parms(self, **kwargs):
m = self._gateway.jvm.java.util.HashMap()
for k, v in kwargs.items():
m[k] = v
return m
def transform(self, key, xml, **kwargs):
"""
Transform the supplied XML using the transform identified by key
@param key: name of the transform to apply
@param xml: XML to transform
@param kwargs: XSLT parameters
@return: Transform output or None if transform failed
"""
if key in self._xsltLibrary and self.gateway_connected() and key in self._converters:
return self._converters[key].transform(xml, self._parms(**kwargs))
return None
| StarcoderdataPython |
3201806 | <filename>poller/main.py
import traceback
from time import sleep
from modules.APICaller import APIHandler
from modules.DBConnector import DBConnector
def main():
#Setup, maybe read from config file to read intervall, servers etc.
api = APIHandler()
db = DBConnector()
while(True):
try:
playerList = api.call()
db.writePlayers(playerList)
except Exception as e:
print("Error XD \n" + traceback.format_exc())
continue
sleep(15)
if __name__ == "__main__":
main() | StarcoderdataPython |
5038053 | from dis import dis
from types import CodeType
from ..containers import Context
def debug(code: CodeType, context: Context) -> CodeType:
dis(code)
return code
EXTENSION = debug
| StarcoderdataPython |
8121397 | <reponame>purush34/a2oj-solutions
a,b=int(input()),input()
c=b.count('5')
d=a-c
print(int('5'*(9*(c//9))+'0'*d) if d else '-1') | StarcoderdataPython |
12851174 | import discord
from discord.ext import commands
import json
from utils import error, RARITY_DICT
from parse_profile import get_profile_data
from extract_ids import extract_internal_names
# Create the master list!
from text_files.accessory_list import talisman_upgrades
# Get a list of all accessories
ACCESSORIES = []
with open("text_files/MASTER_ITEM_DICT.json", "r", encoding="utf-8") as file:
item_dict = json.load(file)
for item in item_dict:
if item_dict[item].get("rarity", False) and item_dict[item]["rarity"] != "UNKNOWN":
ACCESSORIES.append(item_dict[item])
# Now remove all the low tier ones
MASTER_ACCESSORIES = []
for accessory in ACCESSORIES:
if accessory["internal_name"] not in talisman_upgrades.keys():
MASTER_ACCESSORIES.append(accessory)
class missing_cog(commands.Cog):
def __init__(self, bot):
self.client = bot
@commands.command(aliases=['missing_accessories', 'accessories', 'miss', 'm'])
async def missing(self, ctx, username=None):
player_data = await get_profile_data(ctx, username)
if player_data is None:
return
username = player_data["username"]
accessory_bag = player_data.get("talisman_bag", None)
inv_content = player_data.get("inv_contents", {"data": []})
if not accessory_bag:
return await error(ctx, "Error, could not find this person's accessory bag", "Do they have their API disabled for this command?")
accessory_bag = extract_internal_names(accessory_bag["data"])
inventory = extract_internal_names(inv_content["data"])
missing = [x for x in MASTER_ACCESSORIES if x["internal_name"] not in accessory_bag+inventory]
if not missing:
return await error(ctx, f"Completion!", f"{username} already has all accessories!")
sorted_accessories = sorted(missing, key=lambda x: x["name"])[:42]
extra = "" if len(missing) <= 36 else f", showing the first {len(sorted_accessories)}"
embed = discord.Embed(title=f"Missing {len(missing)} accessories for {username}{extra}", colour=0x3498DB)
def make_embed(embed, acc_list):
text = ""
for item in acc_list:
internal_name, name, rarity, wiki_link = item.values()
wiki_link = "<Doesn't exist>" if not wiki_link else f"[wiki]({wiki_link})"
text += f"{RARITY_DICT[rarity]} {name}\nLink: {wiki_link}\n"
embed.add_field(name=f"{acc_list[0]['name'][0]}-{acc_list[-1]['name'][0]}", value=text, inline=True)
if len(sorted_accessories) < 6: # For people with only a few missing
make_embed(embed, sorted_accessories)
else:
list_length = int(len(sorted_accessories)/6)
for row in range(6):
row_accessories = sorted_accessories[row*list_length:(row+1)*list_length] # Get the first group out of 6
make_embed(embed, row_accessories)
embed.set_footer(text=f"Command executed by {ctx.author.display_name} | Community Bot. By the community, for the community.")
await ctx.send(embed=embed)
| StarcoderdataPython |
181093 | from __future__ import absolute_import
import logging
from kafka import KafkaProducer
from django.utils.functional import cached_property
from sentry import quotas
from sentry.models import Organization
from sentry.eventstream.base import EventStream
from sentry.utils import json
from sentry.utils.pubsub import QueuedPublisher
logger = logging.getLogger(__name__)
# Beware! Changing this, or the message format/fields themselves requires
# consideration of all downstream consumers.
# Version 0 format: (0, '(insert|delete)', {..event json...})
EVENT_PROTOCOL_VERSION = 0
class KafkaPublisher(object):
def __init__(self, connection):
self.connection = connection or {}
@cached_property
def client(self):
return KafkaProducer(**self.connection)
def publish(self, topic, value, key=None):
return self.client.send(topic, key=key, value=value)
class KafkaEventStream(EventStream):
def __init__(self, publish_topic='events', sync=False, connection=None, **options):
self.publish_topic = publish_topic
self.pubsub = KafkaPublisher(connection)
if not sync:
self.pubsub = QueuedPublisher(self.pubsub)
def publish(self, group, event, is_new, is_sample, is_regression, is_new_group_environment, primary_hash, skip_consume=False):
project = event.project
retention_days = quotas.get_event_retention(
organization=Organization(project.organization_id)
)
try:
key = '%s:%s' % (event.project_id, event.event_id)
value = (EVENT_PROTOCOL_VERSION, 'insert', {
'group_id': event.group_id,
'event_id': event.event_id,
'organization_id': project.organization_id,
'project_id': event.project_id,
'message': event.message,
'platform': event.platform,
'datetime': event.datetime,
'data': event.data.data,
'primary_hash': primary_hash,
'retention_days': retention_days,
})
self.pubsub.publish(self.publish_topic, key=key.encode('utf-8'), value=json.dumps(value))
except Exception as error:
logger.warning('Could not publish event: %s', error, exc_info=True)
raise
| StarcoderdataPython |
9612351 | <reponame>MichaelRol/Threaded-Bristol-Stock-Exchange<filename>tbse_sys_consts.py
"""
constants uses across BSE
"""
TBSE_SYS_MIN_PRICE = 1 # minimum price in the system, in cents/pennies
TBSE_SYS_MAX_PRICE = 500 # maximum price in the system, in cents/pennies: Todo -- eliminate reliance on this
TICK_SIZE = 1 # minimum change in price, in cents/pennies
| StarcoderdataPython |
8174560 | <filename>sap/adt/annotations.py<gh_stars>0
"""Python decorators for conversions of Python objects to ADT XML fragments"""
from enum import Enum
import collections
def _make_attr_name_for_version(element_name, version):
"""Makes the given name unique for the given version parameter
which can be:
- None : version is irrelevant
- str : single version
- list|set : name appears in several versions
"""
def format_name(version, suffix):
return f'{version}_{suffix}'
name = f'_{element_name}'.replace(':', '_')
if version is None:
# No suffix needed
return name
if isinstance(version, str):
# Single version
return format_name(name, version)
if isinstance(version, (list, set)):
# Multiple versions
return format_name(name, '_'.join(version))
raise TypeError(f'Version cannot be of the type {type(version).__name__}')
class OrderedClassMembers(type):
"""MetaClass to preserve get order of member declarations
to serialize the XML elements in the expected order.
"""
@classmethod
# pylint: disable=unused-argument
def __prepare__(mcs, name, bases):
return collections.OrderedDict()
def __new__(mcs, name, bases, classdict):
members = []
if bases:
parent = bases[-1]
if hasattr(parent, '__ordered__'):
members.extend(parent.__ordered__)
members.extend([key for key in classdict.keys()
if key not in ('__module__', '__qualname__')])
classdict['__ordered__'] = members
return type.__new__(mcs, name, bases, classdict)
class XmlElementKind(Enum):
"""XML element kinds"""
OBJECT = 1
TEXT = 2
# pylint: disable=too-few-public-methods
class XmlAttributeProperty(property):
"""XML Annotation"""
def __init__(self, name, fget, fset=None, deserialize=True, version=None):
super().__init__(fget, fset)
self.name = name
self.deserialize = deserialize
self.version = version
def setter(self, fset):
return type(self)(self.name, self.fget, fset, deserialize=self.deserialize)
# pylint: disable=too-few-public-methods,too-many-arguments
class XmlElementProperty(property):
"""XML Annotation"""
NAME_FROM_OBJECT = None
def __init__(self, name, fget, fset=None, deserialize=True, factory=None, kind=XmlElementKind.OBJECT,
version=None):
super().__init__(fget, fset)
self.name = name
self.deserialize = deserialize
self.factory = factory
self.kind = kind
self.version = version
def setter(self, fset):
return type(self)(self.name, self.fget, fset, deserialize=self.deserialize, factory=self.factory,
kind=self.kind)
class XmlPropertyImpl:
"""XML Property implementation which enriches the given object with a new
attribute whose name is built from the corresponding XML name.
"""
def __init__(self, name, default_value=None, version=None):
self.attr = _make_attr_name_for_version(name, version)
self.default_value = default_value
def get(self, obj):
"""Getter"""
try:
return getattr(obj, self.attr)
except AttributeError:
return self.default_value
def set(self, obj, value):
"""Setter"""
obj.__dict__[self.attr] = value
class XmlNodeProperty(XmlElementProperty, XmlPropertyImpl):
"""A descriptor class to avoid the need to define 2 useless functions
get/set when absolutely not necessary.
"""
def __init__(self, name, value=None, deserialize=True, factory=None, kind=XmlElementKind.OBJECT, version=None):
super().__init__(name, self.get, fset=self.set, deserialize=deserialize, factory=factory,
kind=kind, version=version)
XmlPropertyImpl.__init__(self, name, default_value=value, version=version)
def setter(self, fset):
"""Turned off setter decorator which is not necessary and confusing"""
# TODO: reorder inheritance - this is stupid!
raise NotImplementedError()
class XmlNodeAttributeProperty(XmlAttributeProperty, XmlPropertyImpl):
"""A descriptor class to avoid the need to define 2 useless functions
get/set when absolutely not necessary.
"""
def __init__(self, name, value=None, deserialize=True, version=None):
super().__init__(name, self.get, fset=self.set, deserialize=deserialize,
version=version)
XmlPropertyImpl.__init__(self, name, default_value=value, version=version)
def setter(self, fset):
"""Turned off setter decorator which is not necessary and confusing"""
# TODO: reorder inheritance - this is stupid!
raise NotImplementedError()
class XmlListNodeProperty(XmlElementProperty):
"""Many repetitions of the same tag"""
def __init__(self, name, value=None, deserialize=True, factory=None, kind=XmlElementKind.OBJECT, version=None):
super().__init__(name, self.get, fset=self.append, deserialize=deserialize,
factory=factory, kind=kind, version=version)
if value is not None and not isinstance(value, list):
raise RuntimeError()
self.attr = _make_attr_name_for_version(name, version)
self.default_value = value
def _get_list(self, obj):
items = obj.__dict__.get(self.attr, None)
if items is None:
if self.default_value is not None:
items = list(self.default_value)
obj.__dict__[self.attr] = items
return items
def get(self, obj):
"""Getter"""
try:
return getattr(obj, self.attr)
except AttributeError:
return self._get_list(obj)
def append(self, obj, value):
"""Setter"""
items = self._get_list(obj)
if items is None:
items = list()
obj.__dict__[self.attr] = items
items.append(value)
class XmlContainerMeta(OrderedClassMembers):
"""A MetaClass adding the class-method 'define' which returns
a class representing ADT XML container - i.e a wrapping node
with many children of the same tag.
"""
def define(cls, item_element_name, item_factory, version=None):
"""Defines a new class with the property items which will be
annotated as XmlElement.
The annotated property is named 'items' and can be publicly used.
"""
items_property = XmlListNodeProperty(item_element_name, deserialize=True, factory=item_factory,
value=list(), kind=XmlElementKind.OBJECT, version=version)
return type(f'XMLContainer_{item_factory.__name__}', (cls,), dict(items=items_property))
class XmlContainer(metaclass=XmlContainerMeta):
"""A template class with the property items which is annotated as XmlElement."""
def append(self, value):
"""Appends the give value to the XML container"""
# pylint: disable=no-member
self.items.append(value)
def __iter__(self):
# pylint: disable=no-member
return self.items.__iter__()
def __getitem__(self, index):
# pylint: disable=no-member
return self.items.__getitem__(index)
def __len__(self):
# pylint: disable=no-member
return self.items.__len__()
def xml_text_node_property(name, value=None, deserialize=True, version=None):
"""A factory method returning a descriptor property XML Element holding
the value in a text node.
"""
return XmlNodeProperty(name, value=value, deserialize=deserialize, factory=None, kind=XmlElementKind.TEXT,
version=version)
def xml_attribute(name, deserialize=True, version=None):
"""Mark the given property as a XML element attribute of the given name"""
def decorator(meth):
"""Creates a property object"""
return XmlAttributeProperty(name, meth, deserialize=deserialize, version=version)
return decorator
def xml_element(name, deserialize=True, factory=None, kind=XmlElementKind.OBJECT, version=None):
"""Mark the given property as a XML element of the given name"""
def decorator(meth):
"""Creates a property object"""
return XmlElementProperty(name, meth, deserialize=deserialize, factory=factory, kind=kind, version=version)
return decorator
| StarcoderdataPython |
8176108 | <filename>boston_demo.py<gh_stars>10-100
__doc__ = """Uncertainty-GBM applied to Boston real-estate data."""
import regressor
import numpy as np
import matplotlib.pyplot as plt
from sklearn import datasets
from sklearn.utils import shuffle
def main():
boston = datasets.load_boston()
X, y = shuffle(boston.data, boston.target, random_state=13)
X = X.astype(np.float32)
offset = int(X.shape[0] * 0.7)
X_train, y_train = X[:offset], y[:offset]
X_test, y_test = X[offset:], y[offset:]
clf = regressor.UncertaintyGBM(n_estimators=100, max_depth=4,
learning_rate=0.01, verbose=True)
clf.fit(X_train, y_train)
pred_test = clf.predict(X_test)
mu_test = pred_test[:, 0]
std_test = pred_test[:, 1]
plt.figure(figsize=(12, 8))
plt.subplot(2, 2, 1)
plt.title('Predicted mu_test against y_test')
plt.scatter(y_test, mu_test)
plt.subplot(2, 2, 2)
plt.title('Predicted std_test against y_test')
plt.scatter(y_test, std_test)
plt.subplot(2, 2, 3)
plt.title('High-risk/high-reward: mu_test + std_test')
plt.scatter(y_test, mu_test + std_test)
plt.subplot(2, 2, 4)
plt.title('Low-risk/low-reward: mu_test - std_test')
plt.scatter(y_test, mu_test - std_test)
plt.show()
if __name__ == '__main__':
main()
| StarcoderdataPython |
1802320 | <reponame>computerMoMo/FastMaskRCNN
#!/usr/bin/env python
# coding=utf-8
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import functools
import os, sys
import time
import numpy as np
import tensorflow as tf
import tensorflow.contrib.slim as slim
from time import gmtime, strftime
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..'))
import libs.configs.config_v1 as cfg
import libs.datasets.dataset_factory as datasets
import libs.nets.nets_factory as network
import libs.preprocessings.coco_v1 as coco_preprocess
import libs.nets.pyramid_network as pyramid_network
import libs.nets.resnet_v1 as resnet_v1
from train.train_utils import _configure_learning_rate, _configure_optimizer, \
_get_variables_to_train, _get_init_fn, get_var_list_to_restore
from PIL import Image, ImageFont, ImageDraw, ImageEnhance
from libs.datasets import download_and_convert_coco
#from libs.datasets.download_and_convert_coco import _cat_id_to_cls_name
from libs.visualization.pil_utils import cat_id_to_cls_name, draw_img, draw_bbox
FLAGS = tf.app.flags.FLAGS
resnet50 = resnet_v1.resnet_v1_50
def solve(global_step):
"""add solver to losses"""
# learning reate
lr = _configure_learning_rate(82783, global_step)
optimizer = _configure_optimizer(lr)
tf.summary.scalar('learning_rate', lr)
# compute and apply gradient
losses = tf.get_collection(tf.GraphKeys.LOSSES)
regular_losses = tf.get_collection(tf.GraphKeys.REGULARIZATION_LOSSES)
regular_loss = tf.add_n(regular_losses)
out_loss = tf.add_n(losses)
total_loss = tf.add_n(losses + regular_losses)
tf.summary.scalar('total_loss', total_loss)
tf.summary.scalar('out_loss', out_loss)
tf.summary.scalar('regular_loss', regular_loss)
update_ops = []
variables_to_train = _get_variables_to_train()
# update_op = optimizer.minimize(total_loss)
gradients = optimizer.compute_gradients(total_loss, var_list=variables_to_train)
grad_updates = optimizer.apply_gradients(gradients,
global_step=global_step)
update_ops.append(grad_updates)
# update moving mean and variance
if FLAGS.update_bn:
update_bns = tf.get_collection(tf.GraphKeys.UPDATE_OPS)
update_bn = tf.group(*update_bns)
update_ops.append(update_bn)
return tf.group(*update_ops)
def restore(sess):
"""choose which param to restore"""
if FLAGS.restore_previous_if_exists:
try:
checkpoint_path = tf.train.latest_checkpoint(FLAGS.train_dir)
###########
restorer = tf.train.Saver()
###########
###########
# not_restore = [ 'pyramid/fully_connected/weights:0',
# 'pyramid/fully_connected/biases:0',
# 'pyramid/fully_connected/weights:0',
# 'pyramid/fully_connected_1/biases:0',
# 'pyramid/fully_connected_1/weights:0',
# 'pyramid/fully_connected_2/weights:0',
# 'pyramid/fully_connected_2/biases:0',
# 'pyramid/fully_connected_3/weights:0',
# 'pyramid/fully_connected_3/biases:0',
# 'pyramid/Conv/weights:0',
# 'pyramid/Conv/biases:0',
# 'pyramid/Conv_1/weights:0',
# 'pyramid/Conv_1/biases:0',
# 'pyramid/Conv_2/weights:0',
# 'pyramid/Conv_2/biases:0',
# 'pyramid/Conv_3/weights:0',
# 'pyramid/Conv_3/biases:0',
# 'pyramid/Conv2d_transpose/weights:0',
# 'pyramid/Conv2d_transpose/biases:0',
# 'pyramid/Conv_4/weights:0',
# 'pyramid/Conv_4/biases:0',
# 'pyramid/fully_connected/weights/Momentum:0',
# 'pyramid/fully_connected/biases/Momentum:0',
# 'pyramid/fully_connected/weights/Momentum:0',
# 'pyramid/fully_connected_1/biases/Momentum:0',
# 'pyramid/fully_connected_1/weights/Momentum:0',
# 'pyramid/fully_connected_2/weights/Momentum:0',
# 'pyramid/fully_connected_2/biases/Momentum:0',
# 'pyramid/fully_connected_3/weights/Momentum:0',
# 'pyramid/fully_connected_3/biases/Momentum:0',
# 'pyramid/Conv/weights/Momentum:0',
# 'pyramid/Conv/biases/Momentum:0',
# 'pyramid/Conv_1/weights/Momentum:0',
# 'pyramid/Conv_1/biases/Momentum:0',
# 'pyramid/Conv_2/weights/Momentum:0',
# 'pyramid/Conv_2/biases/Momentum:0',
# 'pyramid/Conv_3/weights/Momentum:0',
# 'pyramid/Conv_3/biases/Momentum:0',
# 'pyramid/Conv2d_transpose/weights/Momentum:0',
# 'pyramid/Conv2d_transpose/biases/Momentum:0',
# 'pyramid/Conv_4/weights/Momentum:0',
# 'pyramid/Conv_4/biases/Momentum:0',]
# vars_to_restore = [v for v in tf.all_variables()if v.name not in not_restore]
# restorer = tf.train.Saver(vars_to_restore)
# for var in vars_to_restore:
# print ('restoring ', var.name)
############
restorer.restore(sess, checkpoint_path)
print ('restored previous model %s from %s'\
%(checkpoint_path, FLAGS.train_dir))
time.sleep(2)
return
except:
print ('--restore_previous_if_exists is set, but failed to restore in %s %s'\
% (FLAGS.train_dir, checkpoint_path))
time.sleep(2)
if FLAGS.pretrained_model:
if tf.gfile.IsDirectory(FLAGS.pretrained_model):
checkpoint_path = tf.train.latest_checkpoint(FLAGS.pretrained_model)
else:
checkpoint_path = FLAGS.pretrained_model
if FLAGS.checkpoint_exclude_scopes is None:
FLAGS.checkpoint_exclude_scopes='pyramid'
if FLAGS.checkpoint_include_scopes is None:
FLAGS.checkpoint_include_scopes='resnet_v1_50'
vars_to_restore = get_var_list_to_restore()
for var in vars_to_restore:
print ('restoring ', var.name)
try:
restorer = tf.train.Saver(vars_to_restore)
restorer.restore(sess, checkpoint_path)
print ('Restored %d(%d) vars from %s' %(
len(vars_to_restore), len(tf.global_variables()),
checkpoint_path ))
except:
print ('Checking your params %s' %(checkpoint_path))
raise
def train():
"""The main function that runs training"""
## data
image, ih, iw, gt_boxes, gt_masks, num_instances, img_id = \
datasets.get_dataset(FLAGS.dataset_name,
FLAGS.dataset_split_name,
FLAGS.dataset_dir,
FLAGS.im_batch,
is_training=True)
data_queue = tf.RandomShuffleQueue(capacity=32, min_after_dequeue=16,
dtypes=(
image.dtype, ih.dtype, iw.dtype,
gt_boxes.dtype, gt_masks.dtype,
num_instances.dtype, img_id.dtype))
enqueue_op = data_queue.enqueue((image, ih, iw, gt_boxes, gt_masks, num_instances, img_id))
data_queue_runner = tf.train.QueueRunner(data_queue, [enqueue_op] * 4)
tf.add_to_collection(tf.GraphKeys.QUEUE_RUNNERS, data_queue_runner)
(image, ih, iw, gt_boxes, gt_masks, num_instances, img_id) = data_queue.dequeue()
im_shape = tf.shape(image)
image = tf.reshape(image, (im_shape[0], im_shape[1], im_shape[2], 3))
## network
logits, end_points, pyramid_map = network.get_network(FLAGS.network, image,
weight_decay=FLAGS.weight_decay, is_training=True)
outputs = pyramid_network.build(end_points, im_shape[1], im_shape[2], pyramid_map,
num_classes=81,
base_anchors=9,
is_training=True,
gt_boxes=gt_boxes, gt_masks=gt_masks,
loss_weights=[0.2, 0.2, 1.0, 0.2, 1.0])
total_loss = outputs['total_loss']
losses = outputs['losses']
batch_info = outputs['batch_info']
regular_loss = tf.add_n(tf.get_collection(tf.GraphKeys.REGULARIZATION_LOSSES))
input_image = end_points['input']
final_box = outputs['final_boxes']['box']
final_cls = outputs['final_boxes']['cls']
final_prob = outputs['final_boxes']['prob']
final_gt_cls = outputs['final_boxes']['gt_cls']
gt = outputs['gt']
#############################
tmp_0 = outputs['losses']
tmp_1 = outputs['losses']
tmp_2 = outputs['losses']
tmp_3 = outputs['losses']
tmp_4 = outputs['losses']
# tmp_0 = outputs['tmp_0']
# tmp_1 = outputs['tmp_1']
# tmp_2 = outputs['tmp_2']
tmp_3 = outputs['tmp_3']
tmp_4 = outputs['tmp_4']
############################
## solvers
global_step = slim.create_global_step()
update_op = solve(global_step)
cropped_rois = tf.get_collection('__CROPPED__')[0]
transposed = tf.get_collection('__TRANSPOSED__')[0]
gpu_options = tf.GPUOptions(per_process_gpu_memory_fraction=0.95)
sess = tf.Session(config=tf.ConfigProto(gpu_options=gpu_options))
init_op = tf.group(
tf.global_variables_initializer(),
tf.local_variables_initializer()
)
sess.run(init_op)
summary_op = tf.summary.merge_all()
logdir = os.path.join(FLAGS.train_dir, strftime('%Y%m%d%H%M%S', gmtime()))
if not os.path.exists(logdir):
os.makedirs(logdir)
summary_writer = tf.summary.FileWriter(logdir, graph=sess.graph)
## restore
restore(sess)
## main loop
coord = tf.train.Coordinator()
threads = []
# print (tf.get_collection(tf.GraphKeys.QUEUE_RUNNERS))
for qr in tf.get_collection(tf.GraphKeys.QUEUE_RUNNERS):
threads.extend(qr.create_threads(sess, coord=coord, daemon=True,
start=True))
tf.train.start_queue_runners(sess=sess, coord=coord)
saver = tf.train.Saver(max_to_keep=20)
for step in range(FLAGS.max_iters):
start_time = time.time()
s_, tot_loss, reg_lossnp, img_id_str, \
rpn_box_loss, rpn_cls_loss, refined_box_loss, refined_cls_loss, mask_loss, \
gt_boxesnp, \
rpn_batch_pos, rpn_batch, refine_batch_pos, refine_batch, mask_batch_pos, mask_batch, \
input_imagenp, final_boxnp, final_clsnp, final_probnp, final_gt_clsnp, gtnp, tmp_0np, tmp_1np, tmp_2np, tmp_3np, tmp_4np= \
sess.run([update_op, total_loss, regular_loss, img_id] +
losses +
[gt_boxes] +
batch_info +
[input_image] + [final_box] + [final_cls] + [final_prob] + [final_gt_cls] + [gt] + [tmp_0] + [tmp_1] + [tmp_2] + [tmp_3] + [tmp_4])
duration_time = time.time() - start_time
if step % 1 == 0:
print ( """iter %d: image-id:%07d, time:%.3f(sec), regular_loss: %.6f, """
"""total-loss %.4f(%.4f, %.4f, %.6f, %.4f, %.4f), """
"""instances: %d, """
"""batch:(%d|%d, %d|%d, %d|%d)"""
% (step, img_id_str, duration_time, reg_lossnp,
tot_loss, rpn_box_loss, rpn_cls_loss, refined_box_loss, refined_cls_loss, mask_loss,
gt_boxesnp.shape[0],
rpn_batch_pos, rpn_batch, refine_batch_pos, refine_batch, mask_batch_pos, mask_batch))
# draw_bbox(step,
# np.uint8((np.array(input_imagenp[0])/2.0+0.5)*255.0),
# name='est',
# bbox=final_boxnp,
# label=final_clsnp,
# prob=final_probnp,
# gt_label=np.argmax(np.asarray(final_gt_clsnp),axis=1),
# )
# draw_bbox(step,
# np.uint8((np.array(input_imagenp[0])/2.0+0.5)*255.0),
# name='gt',
# bbox=gtnp[:,0:4],
# label=np.asarray(gtnp[:,4], dtype=np.uint8),
# )
print ("labels")
# print (cat_id_to_cls_name(np.unique(np.argmax(np.asarray(final_gt_clsnp),axis=1)))[1:])
# print (cat_id_to_cls_name(np.unique(np.asarray(gt_boxesnp, dtype=np.uint8)[:,4])))
print (cat_id_to_cls_name(np.unique(np.argmax(np.asarray(tmp_3np),axis=1)))[1:])
#print (cat_id_to_cls_name(np.unique(np.argmax(np.asarray(gt_boxesnp)[:,4],axis=1))))
print ("classes")
print (cat_id_to_cls_name(np.unique(np.argmax(np.array(tmp_4np),axis=1))))
# print (np.asanyarray(tmp_3np))
#print ("ordered rois")
#print (np.asarray(tmp_0np)[0])
#print ("pyramid_feature")
#print ()
#print(np.unique(np.argmax(np.array(final_probnp),axis=1)))
#for var, val in zip(tmp_2, tmp_2np):
# print(var.name)
#print(np.argmax(np.array(tmp_0np),axis=1))
if np.isnan(tot_loss) or np.isinf(tot_loss):
print (gt_boxesnp)
raise Exception
if step % 100 == 0:
summary_str = sess.run(summary_op)
summary_writer.add_summary(summary_str, step)
summary_writer.flush()
if (step % 10000 == 0 or step + 1 == FLAGS.max_iters) and step != 0:
checkpoint_path = os.path.join(FLAGS.train_dir,
FLAGS.dataset_name + '_' + FLAGS.network + '_model.ckpt')
saver.save(sess, checkpoint_path, global_step=step)
if coord.should_stop():
coord.request_stop()
coord.join(threads)
if __name__ == '__main__':
train()
| StarcoderdataPython |
273358 | <reponame>hydrargyrum/UnityPy<filename>UnityPy/CommonString.py
COMMON_STRING = {
0 : "AABB",
5 : "AnimationClip",
19 : "AnimationCurve",
34 : "AnimationState",
49 : "Array",
55 : "Base",
60 : "BitField",
69 : "bitset",
76 : "bool",
81 : "char",
86 : "ColorRGBA",
96 : "Component",
106 : "data",
111 : "deque",
117 : "double",
124 : "dynamic_array",
138 : "FastPropertyName",
155 : "first",
161 : "float",
167 : "Font",
172 : "GameObject",
183 : "Generic Mono",
196 : "GradientNEW",
208 : "GUID",
213 : "GUIStyle",
222 : "int",
226 : "list",
231 : "long long",
241 : "map",
245 : "Matrix4x4f",
256 : "MdFour",
263 : "MonoBehaviour",
277 : "MonoScript",
288 : "byte_size",
299 : "m_Curve",
307 : "m_EditorClassIdentifier",
331 : "m_EditorHideFlags",
349 : "m_Enabled",
359 : "m_ExtensionPtr",
374 : "m_GameObject",
387 : "index",
395 : "is_array",
405 : "m_IsStatic",
416 : "meta_flag",
427 : "name",
434 : "_object_hide_flags",
452 : "m_PrefabInternal",
469 : "m_PrefabParentObject",
490 : "m_Script",
499 : "m_StaticEditorFlags",
519 : "type",
526 : "version",
536 : "Object",
543 : "pair",
548 : "PPtr<Component>",
564 : "PPtr<GameObject>",
581 : "PPtr<Material>",
596 : "PPtr<MonoBehaviour>",
616 : "PPtr<MonoScript>",
633 : "PPtr<Object>",
646 : "PPtr<Prefab>",
659 : "PPtr<Sprite>",
672 : "PPtr<TextAsset>",
688 : "PPtr<Texture>",
702 : "PPtr<Texture2D>",
718 : "PPtr<Transform>",
734 : "Prefab",
741 : "Quaternionf",
753 : "Rectf",
759 : "RectInt",
767 : "RectOffset",
778 : "second",
785 : "set",
789 : "short",
795 : "size",
800 : "SInt16",
807 : "SInt32",
814 : "SInt64",
821 : "SInt8",
827 : "staticvector",
840 : "string",
847 : "TextAsset",
857 : "TextMesh",
866 : "Texture",
874 : "Texture2D",
884 : "Transform",
894 : "TypelessData",
907 : "UInt16",
914 : "UInt32",
921 : "UInt64",
928 : "UInt8",
934 : "unsigned int",
947 : "unsigned long long",
966 : "unsigned short",
981 : "vector",
988 : "Vector2f",
997 : "Vector3f",
1006: "Vector4f",
1015: "m_ScriptingClassIdentifier",
1042: "Gradient",
1051: "Type*",
1057: "int2_storage",
1070: "int3_storage",
1083: "BoundsInt",
1093: "m_CorrespondingSourceObject",
1121: "m_PrefabInstance",
1138: "m_PrefabAsset"
}
| StarcoderdataPython |
4914112 | <filename>setup.py
from distutils.core import setup
setup(name='Distutils',
version='0.1',
description='Python Shooting Game',
author='maTORIx',
author_email='<EMAIL>',
url='http://matorix.tk',
packages=['pygame'],
) | StarcoderdataPython |
141192 | <filename>HW2-6/HW4/Code/CSCI567_hw4_fall16.py
import hw_utils as ml_utils
from datetime import datetime
def main():
start = datetime.now()
print "Loading Data..."
X_tr, y_tr, X_te, y_te = ml_utils.loaddata('./MiniBooNE_PID.txt')
print X_tr.shape, y_tr.shape
"""
print "Normalizing Data..."
nX_tr, nX_te = ml_utils.normalize(X_tr, X_te)
print "Starting Training..."
linear_activations(nX_tr, y_tr, nX_te, y_te)
sigmoid_activations(nX_tr, y_tr, nX_te, y_te)
relu_activations(nX_tr, y_tr, nX_te, y_te)
l2_regularization(nX_tr, y_tr, nX_te, y_te)
best_reg_coeff = early_stopping_l2_regularization(nX_tr, y_tr, nX_te, y_te)
print "\nbest_reg_coeff: {}\n".format(best_reg_coeff)
best_decay = SGD_with_weight_decay(nX_tr, y_tr, nX_te, y_te, din=50, dout=2)
print "\nbest_decay: {}\n".format(best_decay)
best_momentum = momentum_fn(nX_tr, y_tr, nX_te, y_te, best_decay, din=50, dout=2)
print "\nbest_momentum: {}\n".format(best_momentum)
combination(nX_tr, y_tr, nX_te, y_te, best_reg_coeff, best_decay, best_momentum, din=50, dout=2)
grid_search_with_cross_validation(nX_tr, y_tr, nX_te, y_te, din=50, dout=2)
stop = datetime.now()
print "Total Script Time: {}s".format((stop - start).total_seconds())
"""
def linear_activations(nX_tr, y_tr, nX_te, y_te, din=50, dout=2):
print "Linear Activations"
archs_1 = [
[din, dout],
[din, 50, dout],
[din, 50, 50, dout],
[din, 50, 50, 50, dout]
]
ml_utils.testmodels(nX_tr, y_tr, nX_te, y_te, archs_1, actfn='linear', sgd_lr=1e-3, verbose=0)
archs_2 = [
[din, 50, dout],
[din, 500, dout],
[din, 500, 300, dout],
[din, 800, 500, 300, dout],
[din, 800, 800, 500, 300, dout]
]
ml_utils.testmodels(nX_tr, y_tr, nX_te, y_te, archs_2, sgd_lr=1e-3, verbose=0)
print "Linear Activations - END"
def sigmoid_activations(nX_tr, y_tr, nX_te, y_te, din=50, dout=2):
print "Sigmoid Activations"
archs = [
[din, 50, dout],
[din, 500, dout],
[din, 500, 300, dout],
[din, 800, 500, 300, dout],
[din, 800, 800, 500, 300, dout]
]
ml_utils.testmodels(nX_tr, y_tr, nX_te, y_te, archs, actfn='sigmoid', sgd_lr=1e-3, verbose=0)
print "Sigmoid Activations - END"
def relu_activations(nX_tr, y_tr, nX_te, y_te, din=50, dout=2):
print "ReLu Activations"
archs = [
[din, 50, dout],
[din, 500, dout],
[din, 500, 300, dout],
[din, 800, 500, 300, dout],
[din, 800, 800, 500, 300, dout]
]
ml_utils.testmodels(nX_tr, y_tr, nX_te, y_te, archs, actfn='relu', sgd_lr=5e-4 , verbose=0)
print "ReLu Activations - END"
def l2_regularization(nX_tr, y_tr, nX_te, y_te, din=50, dout=2):
print "L2 Regularization"
archs = [ [din, 800, 500, 300, dout] ]
reg_coeffs = [1e-7, 5e-7, 1e-6, 5e-6, 1e-5]
ml_utils.testmodels(nX_tr, y_tr, nX_te, y_te, archs, actfn='relu', reg_coeffs=reg_coeffs, sgd_lr=5e-4 , verbose=0)
print "L2 Regularization - END"
def early_stopping_l2_regularization(nX_tr, y_tr, nX_te, y_te, din=50, dout=2):
print "Early Stopping and L2-regularization"
archs = [ [din, 800, 500, 300, dout] ]
reg_coeffs = [1e-7, 5e-7, 1e-6, 5e-6, 1e-5]
architecture, _lambda, decay, momentum, actfn, best_acc = ml_utils.testmodels(nX_tr, y_tr,
nX_te, y_te, archs, actfn='relu', reg_coeffs=reg_coeffs, sgd_lr=5e-4, EStop=True, verbose=0)
print "Early Stopping and L2-regularization - END"
return _lambda
def SGD_with_weight_decay(nX_tr, y_tr, nX_te, y_te, din=50, dout=2):
print "SGD with weight decay"
archs = [ [din, 800, 500, 300, dout] ]
decays = [5e-5, 1e-4, 3e-4, 7e-4, 1e-3]
architecture, _lambda, decay, momentum, actfn, best_acc = ml_utils.testmodels(nX_tr, y_tr, nX_te, y_te, archs,
actfn='relu', last_act='softmax', reg_coeffs=[5e-7],
num_epoch=100, batch_size=1000, sgd_lr=1e-5, sgd_decays=decays, sgd_moms=[0.0],
sgd_Nesterov=False, EStop=False, verbose=0)
print "SGD with weight decay - END"
return decay
def momentum_fn(nX_tr, y_tr, nX_te, y_te, best_decay, din=50, dout=2):
print "momentum"
archs = [ [din, 800, 500, 300, dout] ]
decays = [1e-5, 5e-5, 1e-4, 3e-4, 7e-4, 1e-3]
architecture, _lambda, decay, momentum, actfn, best_acc = ml_utils.testmodels(nX_tr, y_tr,
nX_te, y_te, archs, actfn='relu', last_act='softmax', reg_coeffs=[0.0],
num_epoch=50, batch_size=1000, sgd_lr=1e-5, sgd_decays=[best_decay], sgd_moms= [0.99, 0.98, 0.95, 0.9, 0.85],
sgd_Nesterov=True, EStop=False, verbose=0)
print "momentum - END"
return momentum
def combination(nX_tr, y_tr, nX_te, y_te, best_reg_coeff, best_decay, best_momentum, din=50, dout=2):
print "best combination"
archs = [ [din, 800, 500, 300, dout] ]
ml_utils.testmodels(nX_tr, y_tr, nX_te, y_te, archs, actfn='relu', last_act='softmax', reg_coeffs=[best_reg_coeff],
num_epoch=100, batch_size=1000, sgd_lr=1e-5, sgd_decays=[best_decay], sgd_moms= [best_momentum],
sgd_Nesterov=True, EStop=True, verbose=0)
print "best combination - END"
def grid_search_with_cross_validation(nX_tr, y_tr, nX_te, y_te, din=50, dout=2):
print "Grid search with cross-validation"
archs = [
[din, 50, dout],
[din, 500, dout],
[din, 500, 300, dout],
[din, 800, 500, 300, dout],
[din, 800, 800, 500, 300, dout]
]
reg_coeffs = [1e-7, 5e-7, 1e-6, 5e-6, 1e-5]
decays = [1e-5, 5e-5, 1e-4]
ml_utils.testmodels(nX_tr, y_tr, nX_te, y_te, archs, actfn='relu', last_act='softmax', reg_coeffs=reg_coeffs,
num_epoch=100, batch_size=1000, sgd_lr=1e-5, sgd_decays=decays, sgd_moms= [0.99],
sgd_Nesterov=True, EStop=True, verbose=0)
print "Grid search with cross-validation - END"
if __name__ == "__main__":
main() | StarcoderdataPython |
11377506 | import serial
positions_file = open("positions_fpga.txt", "a")
with serial.Serial('/dev/ttyACM0', 250000, timeout=1) as ser:
previous = ""
ser.write(b'h')
for i in range(1024):
while
ser.write(b'h')
positions_file.write(repr(ser.read(1)))
positions_file.write("\n")
| StarcoderdataPython |
12818601 | <reponame>gordonmessmer/ansible-bender<filename>ansible_bender/builders/base.py
"""
Base class for builders
"""
from enum import Enum
class BuildState(Enum):
NEW = "new"
IN_PROGRESS = "in_progress"
DONE = "done"
FAILED = "failed"
class Builder:
ansible_connection = "default-value"
name = "default-value"
def __init__(self, build, debug=False):
"""
:param build: instance of Build
:param debug: bool, provide debug output if True
"""
self.build = build
self.ansible_host = None
self.debug = debug
self.python_interpr_prio = (
"/usr/bin/python3",
"/usr/local/bin/python3",
"/usr/bin/python3.7",
"/usr/bin/python37",
"/usr/bin/python3.6",
"/usr/bin/python36",
"/usr/bin/python2",
"/usr/local/bin/python2",
"/usr/bin/python",
"/usr/local/bin/python",
"/usr/libexec/platform-python",
)
def create(self):
"""
create a container where all the work happens
"""
def run(self, image_name, command):
"""
run provided command in the selected image and return output
:param image_name: str
:param command: list of str
:return: str (output)
"""
def commit(self, image_name):
"""
snapshot the artifact and create an image
:param image_name: str, name the snapshot
"""
def clean(self):
"""
clean working container
"""
def get_image_id(self, image_name):
""" return image_id for provided image """
def is_image_present(self, image_reference):
"""
:return: True when the selected image is present, False otherwise
"""
def is_base_image_present(self):
"""
:return: True when the base image is present, False otherwise
"""
return self.is_image_present(self.build.base_image)
def pull(self):
"""
pull base image
"""
def push(self, build, target, force=False):
"""
push built image into a remote location
:param target: str, transport:details
:param build: instance of Build
:param force: bool, bypass checks if True
:return: None
"""
def find_python_interpreter(self):
"""
find python executable in the base image, for prio order see constructor
:return: str, path to python interpreter
"""
def get_logs(self):
"""
obtain logs for the selected build
:return: list of str
"""
def sanity_check(self):
"""
invoke container tooling and thus verify they work well
"""
def check_container_creation(self):
"""
check that containers can be created
"""
| StarcoderdataPython |
47753 | #!/usr/local/python/bin/python
# script to check the previously unsolved files
#
# to do:
# Sanity check all the image_ids in the table actually have a png
# quick check shows 1349 in DB and 1353 pngs, 4 out, not bad
#
from create_movie import create_movie
import os,sys,getpass,time
import glob as g
from astropy.io import fits
from collections import defaultdict
import pyds9
import argparse as ap
me=getpass.getuser()
if me=='ops':
w_dir="/ngts/staging/archive/minisurvey/junk"
astrom_loc="/usr/local/astrometry.net/bin/"
elif me=='James':
w_dir='/Users/James/Desktop/junk'
astrom_loc="/usr/local/bin/"
else:
print("WHOAMI?")
sys.exit(1)
# check for w_dir
if os.path.exists(w_dir)==False:
print("I'm dying... (no w_dir)")
sys.exit(1)
# get command line args
def argParse():
parser=ap.ArgumentParser(description="A script to redo the failed minisurvey publishing step")
parser.add_argument('--astrometry', help = "try redoing the astrometry?", action='store_true')
parser.add_argument('--manual', help = "manually analyse the images with DS9/DSS", action='store_true')
parser.add_argument('--yes2all', help = "select this to skip prompting - used if pass already has been made through the imagaes and all are good", action='store_true')
args=parser.parse_args()
return args
# read in astrometry.net log files
def getAstromFromFile(astromfile):
f = open(astromfile, "r").readlines()
ra=None
dec=None
for i in f:
if i.startswith("Field center: (RA H:M:S"):
tmp = i.split('=')
ra,dec = tmp[1].split(',')
ra = ra.strip()
ra = ra.replace('(','')
dec = dec.strip()
dec = dec.replace(').','')
break
return ra, dec
# do the astrometry
def astrometry(image,scale_l,scale_h,ra=None,dec=None,radius=5.0,cpulimit=90):
astromfile="astrometry_%s.log" % (image)
command = "%s/solve-field %s --scale-low %s --scale-high %s --cpulimit %s --no-plots --overwrite" % (astrom_loc,image, scale_l, scale_h, cpulimit)
command = "%s > %s" % (command,astromfile)
os.system(command)
ra,dec=getAstromFromFile(astromfile)
return ra,dec
args=argParse()
os.chdir(w_dir)
t=sorted(g.glob('*.fits'))
if args.astrometry:
RA,DEC=[],[]
for i in t:
ra,dec=astrometry(i,2.83,2.93,cpulimit=2)
if ra:
RA.append(ra)
DEC.append(dec)
else:
RA.append("0")
DEC.append("0")
if args.manual:
fields=defaultdict(list)
done=defaultdict(list)
# loop over the and check for multiples
# of the same field, if so work on the last one only
for i in t:
h=fits.open(i)[0].header['FIELD']
fields[h].append(i)
d=pyds9.DS9()
time.sleep(5)
d.set('scale zscale')
d.set('preserve scale')
d.set('preserve pan')
print("Remeber to DELETE duplicate images")
rm_string=""
for i in fields:
image=fields[i][-1]
d.set('frame clear all')
h=fits.open(image)[0]
ra=h.header['CMD_RA']
dec=h.header['CMD_DEC']
# print this so we can see which have duplicates to delete
print(fields[i])
if len(fields[i])>1:
for k in range(0,len(fields[i])-1):
rm_string=rm_string+"%s " % (fields[i][k])
# display the image in DS9 and load the correct region of sky beside it
d.set('tile yes')
d.set('frame 1')
d.set('file %s' % (image))
d.set('zoom 2')
d.set('wcs align yes')
d.set('cmap invert yes')
d.set('frame 2')
d.set('dsseso coord %.6f %.6f degrees size 30 30 arcmin' % (ra,dec))
d.set('zoom to fit')
d.set('wcs align yes')
d.set('cmap invert yes')
d.set('frame center all')
if args.yes2all:
done[i].append(image)
else:
yn=input("Do the fields match? (y/n): ")
if yn.lower().startswith('y'):
done[i].append(image)
else:
continue
print(rm_string)
# need to make an astrometry* log file for the manually solved images?
# and also a png too, then update the database as with the others, manually?
table_update_string=""
if len(done)> 0:
print("Check the UPDATE strings as use them to UPDATE the minisurvey table")
for i in done:
create_movie(done[i],images_directory="%s/" % (w_dir),no_time_series=True,include_increment=False,clobber_images_directory=False,resize_factor=4,multiprocess=False)
table_update_string=table_update_string+"UPDATE mini_survey SET checked_out=0,astrometry=1,done=1,png=1,fails=0 where image_id=\"%s\";\n" % (done[i][0][5:-5]) # image name minus IMAGE and .fits
print(table_update_string)
| StarcoderdataPython |
293151 | #!/usr/bin/python3
"Pymilter-based milter that adds Piwik / Matomo tracking parameters to links found in e-mails."
from time import strftime
import urllib
import tempfile
import email
import re
import io
import os
import sys
import Milter
# Configuration
# List of email addresses for which incoming mail should have tracking added:
TRACKED_EMAILS = ('<EMAIL>', '<EMAIL>')
# Absolute URL to piwik.php script, used to track email opening:
PIWIK_IMAGE_URL = "https://domain.com/piwik/piwik.php?idsite=1"
# Socket name (will be used by Postfix to communicate with milter)
#SOCKETNAME = os.getenv("HOME") + "/analyticsmiltersock"
SOCKETNAME = 'inet:12085@127.0.0.1'
# End of Configuration
class AnalyticsMilter(Milter.Milter):
"Milter that adds Matomo tracking to e-mails."
def log(self, *msg):
"Output messages to STDOUT"
print("%s [%d]" % (strftime('%Y%b%d %H:%M:%S'), self.milter_id))
for i in msg:
print(i + "\n")
def __init__(self):
self.tempname = None
self.mailfrom = None
self.buffer = None
self.bodysize = 0
self.milter_id = Milter.uniqueID()
# multiple messages can be received on a single connection
# envfrom (MAIL FROM in the SMTP protocol) seems to mark the start
# of each message.
@Milter.noreply
def envfrom(self, f, *str):
"start of MAIL transaction"
self.log("mail from", f, str)
self.buffer = io.StringIO()
self.tempname = None
self.mailfrom = f
self.bodysize = 0
return Milter.CONTINUE # pylint:disable=E1101
def envrcpt(self, to, *str):
"Check if the To: address is one of the tracked e-mail addresses."
if any(e in to for e in TRACKED_EMAILS):
self.log('Found one! To:', to, str)
return Milter.CONTINUE # pylint:disable=E1101
return Milter.ACCEPT # pylint:disable=E1101
def header(self, name, val):
"Record e-mail header in buffer"
if self.buffer:
self.buffer.write("%s: %s\n" % (name, val)) # add header to buffer
return Milter.CONTINUE # pylint:disable=E1101
def eoh(self):
"Copy headers to a temp file so buffer can be used for body"
if not self.buffer: # not seen by envfrom
return Milter.TEMPFAIL # pylint:disable=E1101
self.buffer.write("\n")
self.buffer.seek(0)
# copy headers to a temp file for scanning the body
headers = self.buffer.getvalue()
self.buffer.close()
self.tempname = fname = tempfile.mktemp(".defang")
self.buffer = open(fname, "w+b")
self.buffer.write(headers) # IOError (e.g. disk full) causes TEMPFAIL
return Milter.CONTINUE # pylint:disable=E1101
def body(self, chunk): # copy body to temp file
"Copy body to a tempfile"
if self.buffer:
self.buffer.write(chunk) # IOError causes TEMPFAIL in milter
self.bodysize += len(chunk)
return Milter.CONTINUE # pylint:disable=E1101
def _header_change(self, msg, name, value):
if value: # add header
self.addheader(name, value)
else: # delete all headers with name
headers = msg.getheaders(name)
cnt = len(headers)
for i in range(cnt, 0, -1):
self.chgheader(name, i-1, '')
def _fix_content(self, content):
content = self._add_tracking_to_links(content)
content = self._add_tracking_image(content)
return content
def _add_tracking_to_links(self, content):
self.log("Adding piwik tracking to links")
relink = re.compile(
r'<(a[^>]+href)="([^"]+)"([^>]*)>(.*?)</(a)>', re.S | re.I)
restrip = re.compile(r'<([^>]+)>', re.S | re.I)
respace = re.compile(r'[\s&]+', re.S)
img_number = 1
for match in relink.finditer(content):
res = match.group(1, 2, 3, 4, 5)
keyword = match.group(4)
if keyword.find('<img') >= 0:
keyword = "image %d" % img_number
img_number += 1
else:
# remove tags from keyword
keyword = restrip.sub('', keyword)
keyword = respace.sub(' ', keyword)
# url encode keyword
keyword = urllib.parse.quote_plus(keyword)
# substitute into content
str1 = '<%s="%s"%s>%s</%s>' % res[0:5]
self.log(str1)
str2 = '<%s="%s#pk_campaign=newsletter%s&pk_kwd=%s"%s>%s</%s>' % (
res[0], res[1], strftime('%Y-%b-%d'), keyword, res[2], res[3], res[4])
self.log(str2)
content = content.replace(str1, str2)
return content
def _add_tracking_image(self, content):
self.log("Adding tracking image to end of e-mail body")
tempstr = \
'<img src="%s&rec=1&bots=1&action_name=newsletter-open' + \
'&e_c=newsletter&e_a=open&e_n=newsletter-%s" height="1" width="1">'
content += tempstr % (PIWIK_IMAGE_URL, strftime('%Y-%b-%d'))
return content
def _modify_part(self, part):
content = part.get_payload(decode=True)
content = self._fix_content(content)
self.log("Encoding part")
part.set_type('text/html')
part.set_payload(content)
del part["content-transfer-encoding"]
email.Encoders.encode_quopri(part)
return part
def _find_html_part(self, part):
parttype = part.get_content_type().lower()
self.log("Part type:", parttype)
if parttype == 'text/html':
self.log("Modifying part")
part = self._modify_part(part)
return True
if parttype.startswith('multipart'):
self.log("Iterating part")
return self._add_tracking(part)
return False
def _add_tracking(self, msg):
if msg.is_multipart():
parts = msg.get_payload()
for part in parts:
# return true if we modified the part
if self._find_html_part(part):
return True
return self._find_html_part(msg)
def eom(self):
"Attempt to replace message body if message matched our critera"
if not self.buffer:
return Milter.ACCEPT # pylint:disable=E1101
self.buffer.seek(0)
msg = email.message_from_file(self.buffer)
# Remove all headers so we can work with just body
msg.headerchange = self._header_change
# Add tracking, if it doesn't work, then just let the e-mail through
# In the case of tracking marketing e-mails, this is safer than blocking the e-mail.
if not self._add_tracking(msg):
self.log("No parts modified")
return Milter.ACCEPT # pylint:disable=E1101
# If message is modified by _add_tracking:
self.log("Temp file:", self.tempname)
self.tempname = None # prevent removal of original message copy
# copy tracked message to a temp file
out = tempfile.TemporaryFile()
try:
msg.dump(out)
out.seek(0)
#msg = rfc822.Message(out)
# msg.rewindbody()
while 1:
buf = out.read(8192)
if len(buf) == 0:
break
self.replacebody(buf) # feed modified message to sendmail
# ACCEPT modified message
return Milter.ACCEPT # pylint:disable=E1101
finally:
out.close()
return Milter.TEMPFAIL # pylint:disable=E1101
def close(self):
"Print output and clean up"
sys.stdout.flush() # make log messages visible
if self.tempname:
os.remove(self.tempname) # remove in case session aborted
if self.buffer:
self.buffer.close()
return Milter.CONTINUE # pylint:disable=E1101
def abort(self):
"Report if AnalyticsMilter is interrupted"
self.log("abort after %d body chars" % self.bodysize)
return Milter.CONTINUE # pylint:disable=E1101
if __name__ == "__main__":
Milter.factory = AnalyticsMilter
print("""To use this with sendmail, add the following to sendmail.cf:
O InputMailFilters=analyticsmilter
Xanalyticsmilter, S=local:%s
See the sendmail README for libmilter.
To use this with Postfix, add the following to main.cf:
smtpd_milters = local:%s $smtpd_milters
tracking milter startup""" % (SOCKETNAME, SOCKETNAME))
sys.stdout.flush()
Milter.runmilter("analyticsmilter", SOCKETNAME, 240)
print("tracking milter shutdown")
| StarcoderdataPython |
1933027 | # https://leetcode.com/problems/rotate-array/discuss/269948/4-solutions-in-python-(From-easy-to-hard)
# https://leetcode.com/problems/rotate-array/discuss/487529/py3-js-5-different-simple-solutions
# https://practice.geeksforgeeks.org/problems/rotate-array-by-n-elements-1587115621/1/?track=md-arrays&batchId=144
def rotate_array(arr,d,n):
# return a[D:]+a[:D]
# def rotateArr(self,arr,d,n):
#Your code here
for i in range(d):
# leftRotatebyOne(arr, n)
temp = arr[0]
for i in range(n-1):
arr[i] = arr[i + 1]
arr[n-1] = temp
return arr
if __name__ == "__main__":
a = [2,4,6,8,10,12,14,16,18,20]
N= 10
D= 3
print(rotate_array(a,D,N)) | StarcoderdataPython |
8004676 | <filename>7_kyu/Say_Me_Please_Operations.py<gh_stars>0
def sayMeOperations(stringNumbers: str) -> str:
numbers = [int(it) for it in stringNumbers.split()]
ERROR = "?"
if len(numbers) < 2:
return ERROR
a,b = numbers[0:2]
names = []
for c in numbers[2:]:
if a+b == c:
r = "addition"
elif a-b == c:
r = "subtraction"
elif a*b == c:
r = "multiplication"
elif b != 0 and a//b == c:
r = "division"
else:
r = ERROR
names.append(r)
a,b = b,c
return ", ".join(names)
| StarcoderdataPython |
1630963 | # -*- coding: utf-8 -*-
# file: __init__.py
# time: 2021/5/21 0021
# author: yangheng <<EMAIL>>
# github: https://github.com/yangheng95
# Copyright (C) 2021. All Rights Reserved.
from pyabsa.core.atepc.models import (lcfs_atepc,
lcfs_atepc_large,
lcf_atepc,
fast_lcfs_atepc,
lcf_template_atepc,
lcf_atepc_large,
fast_lcf_atepc,
bert_base_atepc)
class ATEPCModelList:
BERT_BASE_ATEPC = bert_base_atepc.BERT_BASE_ATEPC
LCF_ATEPC = lcf_atepc.LCF_ATEPC
LCF_ATEPC_LARGE = lcf_atepc_large.LCF_ATEPC_LARGE
FAST_LCF_ATEPC = fast_lcf_atepc.FAST_LCF_ATEPC
LCFS_ATEPC = lcfs_atepc.LCFS_ATEPC
LCFS_ATEPC_LARGE = lcfs_atepc_large.LCFS_ATEPC_LARGE
FAST_LCFS_ATEPC = fast_lcfs_atepc.FAST_LCFS_ATEPC
LCF_TEMPLATE_ATEPC = lcf_template_atepc.LCF_TEMPLATE_ATEPC
| StarcoderdataPython |
3463532 | <filename>patchmatch/python/patchmatch.py<gh_stars>0
# [mask,param] = CMFD_PM(img,param)
#This code is the version 1.0 of the CMFD (Copy-Move Forgery Detection)
# algorithm described in "Efficient dense-field copy-move forgery detection",
# written by <NAME>, <NAME> and <NAME>,
# IEEE Trans. on Information Forensics and Security, in press, 2015.
# Please refer to this paper for a more detailed description of
# the algorithm.
#
##########################################################################
#
# Copyright (c) 2015 Image Processing Research Group of University Federico II of Naples ('GRIP-UNINA').
# All rights reserved.
# This work should only be used for nonprofit purposes.
#
# By downloading and/or using any of these files, you implicitly agree to all the
# terms of the license, as specified in the document LICENSE.txt
# (included in this package) and online at
# http://www.grip.unina.it/download/LICENSE_OPEN.txt
#
##########################################################################
import utils
import numpy as np
import os
from time import time
import matlab.engine
from skimage import morphology
import cv2
def cmfd_pm(img, param):
outData = {}
img = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
print('START')
# (1) Feature Extraction
timestamp = time()
# generation of filters
outData['feat_name'] = param.type_feat
if param.type_feat=='ZM-cart':
bfdata = utils.ZM_bf(param.diameter, param.ZM_order)
elif param.type_feat=='ZM-polar':
bfdata = utils.ZMp_bf(param.diameter, param.ZM_order, param.radiusNum, param.anglesNum)
elif param.type_feat=='PCT-cart':
bfdata = utils.PCT_bf(param.diameter, param.PCT_NM)
elif param.type_feat=='PCT-polar':
bfdata = utils.PCTp_bf(param.diameter, param.PCT_NM, param.radiusNum, param.anglesNum)
elif param.type_feat=='FMT':
bfdata = utils.FMTpl_bf(param.diameter, param.FMT_M, param.radiusNum, param.anglesNum, param.FMT_N, param.radiusMin)
else:
raise ValueError('type of feature not found')
# feature generation
feat = np.abs(utils.bf_filter(img, bfdata))
# cutting off the borders
raggioU = int(np.ceil((param.diameter - 1) / 2))
raggioL = int(np.floor((param.diameter - 1) / 2))
feat = feat[raggioU:(-1-raggioL), raggioU:(-1-raggioL), :]
outData['timeFE'] = time() - timestamp
print('time FE: {:.3f}'.format(outData['timeFE']))
## Matching
timestamp = time()
feat = (feat - np.min(feat.reshape(-1,1))) / (np.max(feat) - np.min(feat)) # mPM requires the features to be in [0,1]
# run matlab compiled file
eng = matlab.engine.start_matlab()
matlab_feat = eng.double(feat.tolist())
cnn = eng.vecnnmex_mod(matlab_feat, matlab_feat, 1, param.num_iter, -param.th_dist1, param.num_tile)
eng.quit()
mpf_y = cnn[:, :, 1, 0].astype(np.double)
mpf_x = cnn[:, :, 0, 0].astype(np.double)
outData['timeMP'] = time() - timestamp
print('time PM: {:.3f}'.format(outData['timeMP']))
outData['cnn'] = cnn
## Post Processing
timestamp = time()
# regularize offsets field by median filtering
DD_med, NN_med = utils.genDisk(param.rd_median)
NN_med = (NN_med + 1) / 2
mpf_y, mpf_x = utils.MPFregularize(mpf_y,mpf_x,DD_med,NN_med)
# Compute the squared error of dense linear fitting
DLFerr = utils.DLFerror(mpf_y,mpf_x,param.rd_dlf)
mask = DLFerr <= param.th2_dlf
outData['maskDLF'] = mask
# removal of close couples
dist2 = utils.MPFspacedist2(mpf_y,mpf_x)
mask = np.logical_and(mask, (dist2>=param.th2_dist2))
# morphological operations
mask = morphology.remove_small_objects(mask, param.th_sizeA, 8);
outData['maskMPF'] = mask
mask = utils.MPFdual(mpf_y, mpf_x, mask) # mirroring of detected regions
mask = morphology.remove_small_objects(mask, param.th_sizeB, 8)
mask = cv2.dilate(mask, morphology.disk(param.rd_dil))
# put the borders
mask = utils.padarray_both(mask,[raggioU,raggioU,raggioL,raggioL],0) #utile? avt false() mais même effet que 0
DLFerr = utils.padarray_both(DLFerr,[raggioU,raggioU,raggioL,raggioL],0)
outData['timePP'] = time() - timestamp
print('time PP: {:.3f}'.format(outData['timePP']))
outData['cnn_end'] = np.concatenate([mpf_x,mpf_y], axis=2) # order in list?
outData['DLFerr'] = DLFerr
## end
print('END : {:.3f}'.format(outData['timeFE'] + outData['timeMP'] + outData['timePP']))
return mask, param, outData | StarcoderdataPython |
3554448 | class Solution:
# @param A : tuple of integers
# @param B : integer
# @return an integer
def search(self, A, B):
left, right = 0, len(A) - 1
while left <= right:
mid = (left + right) / 2
if B == A[mid]:
return mid
if A[left] == A[mid] and A[mid] == A[right]:
left += 1
right -= 1
elif A[left] <= A[mid]:
if A[left]<=B<A[mid]:
right = mid -1
else:
left = mid + 1
else:
if A[mid]<B<=A[right]:
left = mid + 1
else:
right = mid - 1
return -1
if __name__ == "__main__":
print Solution().search([3, 5, 1], 3) # Output 0
print Solution().search([2, 2, 3, 3, 4, 1], 1) # Output 5
print Solution().search([4, 4, 5, 6, 7, 0, 1, 2], 5) # Output 2
| StarcoderdataPython |
1728597 | [1,4,5]
['a','b','c']
[x,y,z,t]
| StarcoderdataPython |
1873475 | # -*- coding: utf-8 -*-
# Copyright 2014 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from ._base import SQLBaseStore
class PresenceStore(SQLBaseStore):
def create_presence(self, user_localpart):
return self._simple_insert(
table="presence",
values={"user_id": user_localpart},
)
def has_presence_state(self, user_localpart):
return self._simple_select_one(
table="presence",
keyvalues={"user_id": user_localpart},
retcols=["user_id"],
allow_none=True,
)
def get_presence_state(self, user_localpart):
return self._simple_select_one(
table="presence",
keyvalues={"user_id": user_localpart},
retcols=["state", "status_msg", "mtime"],
)
def set_presence_state(self, user_localpart, new_state):
return self._simple_update_one(
table="presence",
keyvalues={"user_id": user_localpart},
updatevalues={"state": new_state["state"],
"status_msg": new_state["status_msg"],
"mtime": self._clock.time_msec()},
retcols=["state"],
)
def allow_presence_visible(self, observed_localpart, observer_userid):
return self._simple_insert(
table="presence_allow_inbound",
values={"observed_user_id": observed_localpart,
"observer_user_id": observer_userid},
)
def disallow_presence_visible(self, observed_localpart, observer_userid):
return self._simple_delete_one(
table="presence_allow_inbound",
keyvalues={"observed_user_id": observed_localpart,
"observer_user_id": observer_userid},
)
def is_presence_visible(self, observed_localpart, observer_userid):
return self._simple_select_one(
table="presence_allow_inbound",
keyvalues={"observed_user_id": observed_localpart,
"observer_user_id": observer_userid},
retcols=["observed_user_id"],
allow_none=True,
)
def add_presence_list_pending(self, observer_localpart, observed_userid):
return self._simple_insert(
table="presence_list",
values={"user_id": observer_localpart,
"observed_user_id": observed_userid,
"accepted": False},
)
def set_presence_list_accepted(self, observer_localpart, observed_userid):
return self._simple_update_one(
table="presence_list",
keyvalues={"user_id": observer_localpart,
"observed_user_id": observed_userid},
updatevalues={"accepted": True},
)
def get_presence_list(self, observer_localpart, accepted=None):
keyvalues = {"user_id": observer_localpart}
if accepted is not None:
keyvalues["accepted"] = accepted
return self._simple_select_list(
table="presence_list",
keyvalues=keyvalues,
retcols=["observed_user_id", "accepted"],
)
def del_presence_list(self, observer_localpart, observed_userid):
return self._simple_delete_one(
table="presence_list",
keyvalues={"user_id": observer_localpart,
"observed_user_id": observed_userid},
)
| StarcoderdataPython |
1986077 | <filename>ding/torch_utils/tests/test_metric.py
import random
import pytest
import torch
from ding.torch_utils.metric import levenshtein_distance, hamming_distance
@pytest.mark.unittest
class TestMetric():
def test_levenshtein_distance(self):
r'''
Overview:
Test the Levenshtein Distance
'''
pred = torch.LongTensor([1, 4, 6, 4, 1])
target1 = torch.LongTensor([1, 6, 4, 4, 1])
distance = levenshtein_distance(pred, target1)
assert (distance.item() == 2)
target2 = torch.LongTensor([])
distance = levenshtein_distance(pred, target2)
assert (distance.item() == 5)
target3 = torch.LongTensor([6, 4, 1])
distance = levenshtein_distance(pred, target3)
assert (distance.item() == 2)
target3 = torch.LongTensor([6, 4, 1])
distance = levenshtein_distance(pred, target3, pred, target3, extra_fn=lambda x, y: x + y)
assert distance.item() == 13
target4 = torch.LongTensor([1, 4, 1])
distance = levenshtein_distance(pred, target4, pred, target4, extra_fn=lambda x, y: x + y)
assert distance.item() == 14
def test_hamming_distance(self):
r'''
Overview:
Test the Hamming Distance
'''
base = torch.zeros(8).long()
index = [i for i in range(8)]
for i in range(2):
pred_idx = random.sample(index, 4)
target_idx = random.sample(index, 4)
pred = base.clone()
pred[pred_idx] = 1
target = base.clone()
target[target_idx] = 1
pred = pred.unsqueeze(0)
target = target.unsqueeze(0)
distance = hamming_distance(pred, target)
diff = len(set(pred_idx).union(set(target_idx)) - set(pred_idx).intersection(set(target_idx)))
assert (distance.item() == diff)
| StarcoderdataPython |
1632874 | <filename>azure-mgmt/tests/test_mgmt_sql.py
# coding: utf-8
#-------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#--------------------------------------------------------------------------
import unittest
import azure.mgmt.sql
from testutils.common_recordingtestcase import record
from tests.mgmt_testcase import HttpStatusCode, AzureMgmtTestCase
class MgmtSqlTest(AzureMgmtTestCase):
def setUp(self):
super(MgmtSqlTest, self).setUp()
self.client = self.create_mgmt_client(
azure.mgmt.sql.SqlManagementClient
)
# I don't record resource group creation, since it's another package
if not self.is_playback():
self.create_resource_group()
@record
def test_server(self):
server_name = self.get_resource_name('mypysqlserver')
server = self.client.servers.create_or_update(
self.group_name, # Created by the framework
server_name,
{
'location': self.region, # "self.region" is 'west-us' by default
'version': '12.0',
'administrator_login': 'mysecretname',
'administrator_login_password': '<PASSWORD>'
}
)
self.assertEqual(server.name, server_name)
server = self.client.servers.get_by_resource_group(
self.group_name,
server_name
)
self.assertEqual(server.name, server_name)
my_servers = list(self.client.servers.list_by_resource_group(self.group_name))
self.assertEqual(len(my_servers), 1)
self.assertEqual(my_servers[0].name, server_name)
my_servers = list(self.client.servers.list())
self.assertTrue(len(my_servers) >= 1)
self.assertTrue(any(server.name == server_name for server in my_servers))
usages = list(self.client.servers.list_usages(self.group_name, server_name))
# FIXME test content of "usages", not just the call
self.client.servers.delete(self.group_name, server_name)
@record
def test_database(self):
server_name = self.get_resource_name('mypysqlserver')
db_name = self.get_resource_name('pyarmdb')
server = self.client.servers.create_or_update(
self.group_name, # Created by the framework
server_name,
{
'location': self.region, # "self.region" is 'west-us' by default
'version': '12.0',
'administrator_login': 'mysecretname',
'administrator_login_password': '<PASSWORD>'
}
)
self.assertEqual(server.name, server_name)
async_db_create = self.client.databases.create_or_update(
self.group_name,
server_name,
db_name,
{
'location': self.region
}
)
database = async_db_create.result() # Wait for completion and return created object
self.assertEqual(database.name, db_name)
db = self.client.databases.get(
self.group_name,
server_name,
db_name
)
self.assertEqual(server.name, server_name)
my_dbs = list(self.client.databases.list_by_server(self.group_name, server_name))
print([db.name for db in my_dbs])
self.assertEqual(len(my_dbs), 2)
self.assertEqual(my_dbs[0].name, 'master')
self.assertEqual(my_dbs[1].name, db_name)
usages = list(self.client.databases.list_usages(self.group_name, server_name, db_name))
# FIXME test content of "usages", not just the call
self.client.databases.delete(self.group_name, server_name, db_name)
#------------------------------------------------------------------------------
if __name__ == '__main__':
unittest.main()
| StarcoderdataPython |
6643572 | <reponame>alexandros44/UServer
from helpers.RegexHelpers import uregex as re
try:
import json
except:
import ujson as json
class RequestMethods:
'''
This class handles all of the predifined server HTTP Methods. It simply adds to the
UServer.__router_paths the paths the server wants to listen on.
:userver: A UServer object.
'''
def __init__(self, userver):
self.__userver = userver
self.valid_methods = ["GET", "POST", "PUT", "PATCH", "HEAD", "DELETE"]
def __path_validation(self, path):
if(path == '*'): return ['*']
path_validation = re.findall(r'[/]([A-Za-z0-9_-]|[:]|[/]|[.]|[*])*', path)[0]
if(path_validation != path):
raise Exception('Invalid path name. Check your name again: ' + path)
return re.findall(r'[/]([A-Za-z0-9_-]|[:]|[.]|[*])*', path)
def __check_method_doc(self, description, return_codes, doc_str):
if(doc_str != None):
doc_no_new_lines = "".join(list(map(lambda line: line.strip(), doc_str.split('\n'))))
try:
if('description: ' in doc_no_new_lines and 'return_codes: ' in doc_no_new_lines):
description, return_codes = doc_no_new_lines.split('return_codes: ')
description = description.replace('description: ', '').strip()
if(description[0] == '{' and return_codes[0] != '{'):
description, return_codes = return_codes, description
return_codes = json.loads(return_codes.replace('return_codes: ', '').strip())
elif('description: ' in doc_no_new_lines):
description = doc_no_new_lines.split('description: ')[1]
elif('return_codes: ' in doc_no_new_lines):
return_codes = doc_no_new_lines.split('return_codes: ')[1]
return_codes = json.loads(return_codes)
except:
print("ValueError: Your method documentation is not correct: " + str(doc_str))
return description, return_codes
def handle_methods(self, path, callback, method, redirects=[], description='', return_codes={}, reverse_stack=False):
path = self.__path_validation(path)
redirects = list(map(lambda path: self.__path_validation(path), redirects))
duplicate_routes = list(filter(lambda route: route['path'] == path and route['method'] == method, self.__userver.router_paths))
if(not duplicate_routes):
self.__userver.router_paths.append({
'path': path,
'callback': callback,
'method': method,
'redirects': redirects,
'description': description,
'return_codes': return_codes,
})
if(reverse_stack):
last_route = self.__userver.router_paths.pop()
self.__userver.router_paths.insert(0, last_route)
else:
print("LogicError: The path to the {} with the method {} already exists".format("".join(path), method))
def static_content(self, path, content):
def callback(req, res):
res.send_file_content(path, content)
self.handle_methods(path, [callback], 'GET')
def restful(self, path, class_args=(), middlewares=[], redirects=[], description="", docs="", return_codes={}):
def handler(RestObject):
__instance = RestObject()
n_description, n_return_codes = self.__check_method_doc(description, return_codes, docs)
for method in dir(__instance):
if(method.upper() in self.valid_methods):
callback = getattr(__instance, method)
self.handle_methods(path, middlewares + [callback], method.upper(), redirects, n_description, n_return_codes)
return handler
def on(self, path, req_method, callback, middlewares=[], redirects=[], description='', return_codes={}, docs="", reverse_stack=False):
if(req_method not in self.valid_methods):
raise Exception('Invalid request type. You can only use:\n' + ", ".join(self.valid_methods) + '.')
n_description, n_return_codes = self.__check_method_doc(description, return_codes, docs)
self.handle_methods(path, middlewares + [callback], req_method, redirects, n_description, n_return_codes, reverse_stack)
def get(self, path, middlewares=[], redirects=[], description='', return_codes={}, docs=""):
def handler(callback):
n_description, n_return_codes = self.__check_method_doc(description, return_codes, docs)
self.handle_methods(path, middlewares + [callback], 'GET', redirects, n_description, n_return_codes)
return handler
def post(self, path, middlewares=[], redirects=[], description='', return_codes={}, docs=""):
def handler(callback):
n_description, n_return_codes = self.__check_method_doc(description, return_codes, docs)
self.handle_methods(path, middlewares + [callback], 'POST', redirects, n_description, n_return_codes)
return handler
def patch(self, path, middlewares=[], redirects=[], description='', return_codes={}, docs=""):
def handler(callback):
n_description, n_return_codes = self.__check_method_doc(description, return_codes, docs)
self.handle_methods(path, middlewares + [callback], 'PATCH', redirects, n_description, n_return_codes)
return handler
def put(self, path, middlewares=[], redirects=[], description='', return_codes={}, docs=""):
def handler(callback):
n_description, n_return_codes = self.__check_method_doc(description, return_codes, docs)
self.handle_methods(path, middlewares + [callback], 'PUT', redirects, n_description, n_return_codes)
return handler
def delete(self, path, middlewares=[], redirects=[], description='', return_codes={}, docs=""):
def handler(callback):
n_description, n_return_codes = self.__check_method_doc(description, return_codes, docs)
self.handle_methods(path, middlewares + [callback], 'DELETE', redirects, n_description, n_return_codes)
return handler
def head(self, path, middlewares=[], redirects=[], description='', return_codes={}, docs=""):
def handler(callback):
n_description, n_return_codes = self.__check_method_doc(description, return_codes, docs)
self.handle_methods(path, middlewares + [callback], 'HEAD', redirects, n_description, n_return_codes)
return handler | StarcoderdataPython |
11322874 | # 导入必要的模块和要测试的类
import unittest
from employee import Employee
# 定义测试用例
class TestEmployee(unittest.TestCase):
"""针对Employee类的测试"""
def setUp(self):
"""创建新的雇员实例和属性,供使用的测试方法使用"""
self.my_employee = Employee('yahu', 'yang', 65000)
def test_give_default_raise(self):
"""测试默认加薪"""
self.my_employee.give_raise()
self.assertEqual(self.my_employee.annual_salary, 70000)
def test_give_custom_raise(self):
"""测试自定义加薪"""
self.my_employee.give_raise(10000)
self.assertEqual(self.my_employee.annual_salary, 75000)
unittest.main() | StarcoderdataPython |
192671 | <reponame>kommurisaikumar/savings-manager-server<filename>backend/app/schemas/accounts.py
from pydantic import BaseModel
from typing import Optional
class AccountSingle (BaseModel):
id: int
user_id: int
class AccountList (BaseModel):
id: Optional[int]
user_id: int
class Account(BaseModel):
id: int
name: str
description: Optional[str]
class Config:
orm_mode = True
class AccountCreate(BaseModel):
name: str
description: str
class AccountUpdate(BaseModel):
name: Optional[str]
description: Optional[str] | StarcoderdataPython |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.