id stringlengths 1 8 | text stringlengths 6 1.05M | dataset_id stringclasses 1
value |
|---|---|---|
137893 | import numpy as np
from mapper_0000 import Mapper_0000
class Cartridge:
def __init__(self, name: str):
# Variables for values about the cartridge
self.bImageValid = False
self.nMapperID = np.uint8(0)
self.nPRGBanks = np.uint8(0)
self.nCHRBanks = np.uint8(0)
self.mirror = "horizontal"
# Variable for holding the Mapper class
self.mapper = None
# Arrys holding the cartrige memories
self.vPRGMemory = []
self.vCHRMemory = []
# Call function for reading cartridge
self.readCartridge(name)
"""
Function for reading the cartridge from file
"""
def readCartridge(self, name: str):
loaded = np.fromfile(name,dtype='uint8')
read_from = 0
self.name = loaded[read_from:4]
read_from = 4
self.prg_rom_chunks = loaded[read_from]
read_from += 1
self.chr_rom_chunks = loaded[read_from]
read_from += 1
self.mapper1 = loaded[read_from]
read_from += 1
self.mapper2 = loaded[read_from]
read_from += 1
self.prg_ram_size = loaded[read_from]
read_from += 1
self.tv_system1 = loaded[read_from]
read_from += 1
self.tv_system2 = loaded[read_from]
read_from += 1
read_from += 5
# IF there is a trainer:
if self.mapper1 & 0x04:
read_from += 512
self.nMapperID = ((self.mapper2 >> 4) << 4) | (self.mapper1 >> 4)
self.mirror = "vertical" if (self.mapper1 & 0x01) else "horizontal"
nFileType = 1
if nFileType == 0:
pass
if nFileType == 1:
self.nPRGBanks = self.chr_rom_chunks
self.vPRGMemory = loaded[read_from:read_from+16384]
read_from += 16384
self.nCHRBanks = self.chr_rom_chunks
self.vCHRMemory = loaded[read_from:read_from+8192]
read_from += 8129
if nFileType == 2:
pass
if self.nMapperID == 0:
self.mapper = Mapper_0000(self.nPRGBanks, self.nCHRBanks)
self.bImageValid = True
"""
Function for checking the validity of the cart
"""
def imageValid(self) -> bool:
return self.bImageValid
"""
Functions for reading and writing
"""
def cpuRead(self, addr: np.uint16) -> [bool, np.uint8]:
mapped_addr = 0
is_mapped, mapped_addr = self.mapper.cpuMapRead(addr, mapped_addr)
if is_mapped:
data = self.vPRGMemory[mapped_addr]
return True, data
else:
return False, 0
def cpuWrite(self, addr: np.uint16, data: np.uint8) -> bool:
mapped_addr = 0
is_mapped, mapped_addr = self.mapper.cpuMapWrite(addr, mapped_addr)
if is_mapped:
self.vPRGMemory[mapped_addr] = data
return True
else:
return False
def ppuRead(self, addr: np.uint16) -> [bool, np.uint8]:
mapped_addr = 0
is_mapped, mapped_addr = self.mapper.ppuMapRead(addr, mapped_addr)
if is_mapped:
data = self.vCHRMemory[mapped_addr]
return True, data
else:
return False, 0
def ppuWrite(self, addr: np.uint16, data: np.uint8) -> bool:
mapped_addr = 0
is_mapped, mapped_addr = self.mapper.ppuMapWrite(addr, mapped_addr)
if is_mapped:
self.vCHRMemory[mapped_addr] = data
return True
else:
return False
if __name__ == "__main__":
cart = Cartridge("../branch_timing_tests/1.Branch_Basics.nes")
| StarcoderdataPython |
5138917 | <reponame>two/megumegu.py
# -*- coding: utf-8 -*-
from __future__ import print_function
import warnings
import MySQLdb
class QueryMixin(object):
def sql(self):
raise NotImplementedError
def get_sites(self):
return self.sql("""SELECT mm_site.id as id, name, mm_site.url as url, url2, schedule, notification, model, query_entry, query_id, query_title, query_link, query_content, start_tag, end_tag, options, mm_updates.hash as latest_hash, mm_updates.title as latest_title
FROM mm_site
LEFT JOIN mm_option ON mm_site.id = mm_option.site_id
LEFT JOIN (
SELECT max(id) as id, site_id
FROM mm_updates
GROUP BY site_id
) last_update ON last_update.site_id = mm_site.id
LEFT JOIN mm_updates ON mm_site.id = last_update.site_id AND mm_updates.id = last_update.id
WHERE mm_site.enable is True
""")
def insert_update(self, site_id, url, title, content, update_hash):
param = {}
param['site_id'] = site_id
param['url'] = url
param['title'] = title
param['content'] = content
param['hash'] = update_hash
self.sql("""INSERT INTO mm_updates(%s) VALUES(%s)""", param)
return self.get_last_insert_id()
def has_hash(self, site_id, update_hash):
return self.sql("""SELECT hash
FROM mm_updates
WHERE site_id = %s AND hash = %s""", (site_id, update_hash))
class Mysql(QueryMixin):
def __init__(self, host, user, passwd, db, port=3306, charset='utf8mb4'):
warnings.filterwarnings('ignore', category=MySQLdb.Warning)
try:
self._connect = MySQLdb.connect(host=host, user=user, passwd=passwd, db=db, port=port, charset=charset)
self.set_dict_cursor()
except MySQLdb.Error as e:
raise Exception(e)
def __del__(self):
if self._cursor:
self._cursor.close()
if self._connect:
self._connect.close()
def set_cursor(self):
self._cursor = self._connect.cursor(MySQLdb.cursors.Cursor)
def set_dict_cursor(self):
self._cursor = self._connect.cursor(MySQLdb.cursors.DictCursor)
def get_last_insert_id(self):
return self._cursor.lastrowid
def sql(self, query, values=()):
try:
if values != ():
if isinstance(values, dict):
query = self.build_query(query, values)
values = tuple(values.values())
elif isinstance(values, list):
values = tuple(values)
elif not isinstance(values, tuple):
values = (values,)
self._cursor.execute(query, values)
else:
self._cursor.execute(query)
except Exception:
raise
self._connect.commit()
return self._cursor.fetchall()
def build_query(self, query, values):
col = ', '.join(list(map(lambda x: x, values)))
val = ', '.join(list(map(lambda x: '%s', values)))
return query % (col, val)
| StarcoderdataPython |
8057755 | <gh_stars>10-100
def sum1(a,b):
c = a+b
return c
def mul1(a,b):
c = a*b
return c
| StarcoderdataPython |
1779718 | <filename>backend/migrations/versions/f58846daf788_.py<gh_stars>0
"""empty message
Revision ID: f<PASSWORD>
Revises: None
Create Date: 2016-02-01 13:11:53.606417
"""
# revision identifiers, used by Alembic.
revision = 'f58846daf<PASSWORD>'
down_revision = None
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_table('Pages',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('title', sa.String(length=120), nullable=False),
sa.Column('created', sa.DateTime(), nullable=True),
sa.Column('modified', sa.DateTime(), nullable=True),
sa.Column('text', sa.Text(), nullable=False),
sa.PrimaryKeyConstraint('id')
)
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_table('Pages')
### end Alembic commands ###
| StarcoderdataPython |
12821576 | <filename>arekit/contrib/networks/context/architectures/base/att_pcnn_base.py
import tensorflow as tf
from arekit.contrib.networks.attention import common
from arekit.contrib.networks.attention.helpers import embedding
from arekit.contrib.networks.context.architectures.pcnn import PiecewiseCNN
class AttentionPCNNBase(PiecewiseCNN):
"""
Represents a base (abstract) class with attention scope.
Usage:
implement `get_att_input` method in nested class.
configuration should include AttentionModel.
"""
def __init__(self):
super(AttentionPCNNBase, self).__init__()
self.__att_weights = None
# region properties
@property
def ContextEmbeddingSize(self):
if self.Config.AttentionModel.TermEmbeddingSize is None:
self.__init_aspect_term_embedding_size()
return super(AttentionPCNNBase, self).ContextEmbeddingSize + \
self.Config.AttentionModel.AttentionEmbeddingSize
# endregion
def set_att_weights(self, weights):
self.__att_weights = weights
def get_att_input(self):
"""
This is an abstract method which is considered to be implemented in nested class.
"""
raise NotImplementedError
# region public `init` methods
def init_body_dependent_hidden_states(self):
super(AttentionPCNNBase, self).init_body_dependent_hidden_states()
with tf.variable_scope(common.ATTENTION_SCOPE_NAME):
self.__init_aspect_term_embedding_size()
self.Config.AttentionModel.init_hidden()
def init_context_embedding(self, embedded_terms):
g = super(AttentionPCNNBase, self).init_context_embedding(embedded_terms)
att_e, att_weights = embedding.init_mlp_attention_embedding(
ctx_network=self,
mlp_att=self.Config.AttentionModel,
keys=self.get_att_input())
self.set_att_weights(att_weights)
return tf.concat([g, att_e], axis=-1)
# endregion
# region public 'iter' methods
def iter_input_dependent_hidden_parameters(self):
for name, value in super(AttentionPCNNBase, self).iter_input_dependent_hidden_parameters():
yield name, value
yield common.ATTENTION_WEIGHTS_LOG_PARAMETER, self.__att_weights
# endregion
def __init_aspect_term_embedding_size(self):
with tf.variable_scope(common.ATTENTION_SCOPE_NAME):
self.Config.AttentionModel.init_term_embedding_size(p_names_with_sizes=embedding.get_ns(self))
| StarcoderdataPython |
393944 | <reponame>denkasyanov/education-backend
import pytest
from freezegun import freeze_time
from a12n.utils import get_jwt
pytestmark = [
pytest.mark.django_db,
pytest.mark.freeze_time('2049-01-05'),
]
@pytest.fixture
def refresh_token(api):
def _refresh_token(token, expected_status_code=201):
return api.post('/api/v2/auth/token/refresh/', {
'token': token,
}, format='json', expected_status_code=expected_status_code)
return _refresh_token
@pytest.fixture
def initial_token(api):
with freeze_time('2049-01-03'):
return get_jwt(api.user)
def test_refresh_token_ok(initial_token, refresh_token):
got = refresh_token(initial_token)
assert 'token' in got
def test_refreshed_token_is_a_token(initial_token, refresh_token):
got = refresh_token(initial_token)
assert len(got['token']) > 32
def test_refreshed_token_is_new_one(initial_token, refresh_token):
got = refresh_token(initial_token)
assert got['token'] != initial_token
def test_refresh_token_fails_with_incorrect_previous_token(refresh_token):
got = refresh_token('some-invalid-previous-token', expected_status_code=400)
assert 'non_field_errors' in got
def test_token_is_not_allowed_to_refresh_if_expired(initial_token, refresh_token):
with freeze_time('2049-02-05'):
got = refresh_token(initial_token, expected_status_code=400)
assert 'expired' in got['non_field_errors'][0]
def test_received_token_works(anon, refresh_token, initial_token):
token = refresh_token(initial_token)['token']
anon.credentials(HTTP_AUTHORIZATION=f'Bearer {token}')
got = anon.get('/api/v2/users/me/')
assert got is not None
| StarcoderdataPython |
60061 | # -*- coding: utf-8 -*-
"""
Created on Mon Jan 15 15:41:38 2018
@author: steve
"""
import re,types
from HeroLabStatBase import VERBOSITY,Character
OPERATORS = ["<",">","==",">=","<=","<>","!=","is","not","in","and","or"]
class Matcher(object):
"""
Container for attributes and methods related to finding replacement values
from the character and returning those values
Methods:
getMatch: returns resuult of evaluating the match value
getKeys: returns the list of possible keywords for templates
Attributes:
name = (str) deriverd from character name
type = (str) 'text' or 'image'
matcherDictionary: (dict) provided matcher dictionary
"""
"""
********
Matching
********
The matching method uses keywords surrounded by double wavy brackets and
modified with prefix and suffix elements.
TEXTMATCH
==========
The text match dictionary is for replacing keyworded and bracketed text with
the result of evaluation of the value for each item. Examples of using the
format in a template document are as follows::
{{keyword}}}
{{(keyword)}}
{{head|_keyword}}
{{(head|_keyword)}}
{{head|keyword..}}
{{head|?keyword..}}
{{head|.c.keyword}}
{{(head|_keyword..)}}
{{head|_keyword_..}}
{{head|_keyword__..}}
``keyword``
must have a match in the dictionary to give a value which will be
evaluated. It must also not have a ``..`` (double period) as part of it.
``()``
must be the outer most element, but inside the double brackets. If
the value evaluation results in something parenthesis are placed around it
``head|``
This is replaced with the *head* text if the value evaluation results
in something. The ``|`` (vetical bar) may not be used anywhere else.
``head|?``
This is replaced with the *head* text if the value evaluation results
in something, however only the head text is returned.
``_``
This is used before a keyword to indicate that instead of the evaluated
value the parent Feature's abbreviate method should ne called with the
final attribute as the argument.
If it is used after the keyword, the parent Feature's describe method
is called and the result returned.
If two underscores follow the keyword, the name is prepended colon
separeted from the description.
``.c.``
This before the keyword is used for tracking item lists. The value
should evaluate to an integer value. The ``c`` can be any single character
that character will be repeated interger times based onthe evaluated value
``..``
This after a keyword is used to indicate a possible list. The value
should evaluate to an attribute from the first element in the list. The
list should be one element up from the attribute. The result will be the
same attribute from all the elements in the list. Any text following
the ``..`` will be used as separators between the items in the list.
The value for each item in the text match dictionary should evaluate to the
text which will replace the keyword in the template document, or as mentioned
above the text for the first attribute in a list.
There are also some simple operations which can be done as part of the value
evaluation. These include multiple attribute evaluation, keyword nesting,
and simple conditionals.
``\x1f``
This is used to indicate that there are multiple attributes references
in the keyword item's value. Each attribute is listed with this
character as the separator and will evaluate as a space separated list
``\x1e``
This is used to nest keywords in the values. The double brackets
are not used. However, all the modifiers can be used. Each is
separated with thsi character and will be result in a list of values
``\x1d``
This is used to separate a conditional from the following replacement
The conditional can only be very simple with operators as in the global
OPERATORS, all numbers will be treated as floats, sring comparisons
should be listed without any quotes and any attribute replacements
must be very simple.
"""
TEXTMATCH = {
}
"""
IMAGEMATCH
==========
The image match dictionary is for replacing shape placeholders containing
just the keyword as the placeholder. Examples of using the
format in a template document are as follows::
{{keyword}}
{{h_keyword..}}
{{l_keyword..}}
``keyword``
must have a match in the dictionary to give a value which will be
evaluated. It must also not have a ``..`` (double period) as part of it.
``h_ or l_``
This is used to indicate the resolution for the image. The value has to
evaluate to an object with imageHigh and/or imageLow (default) attribute.
``..``
This is used to indicate a possible list. The value should evaluate to
an attribute from the first element in the list. The list should be
one element up from the attribute. The result will be the same attribute
from all the elements in the list.
The value for each item in the image match dictionary should evaluate to an
object with an imageHigh and/or imageLow attribute (default imageHigh). The
value of this attribute is a tuple containing the filename for the image
and the absolute path filename for the image. If the value is the first in
a list and the `..`` modifier is used, imageHigh and/or imageLow is evaluated
for each item in the list and returned as a list of tuples
"""
IMAGEMATCH = {
}
"""
BOOLEANMATCH
==========
The boolean match dictionary is for replacing a keyword with a returned boolean:
True or False based on the value(s) from the evaluated character attribute.
Examples of using the format in a template document are as follows::
{{keyword}}
{{keyword..}}
``keyword``
must have a match in the dictionary to give a value which will be
evaluated. It must also not have a ``..`` (double period) as part of it.
``..``
This is used to indicate a possible list. The value should evaluate to
an attribute from the first element in the list. The list should be
one element up from the attribute. The result will be derived from the
same attribute from all the elements in the list.
The value for each item in the boolean match dictionary should evaluate to a
case insensityve string of yes, no, true, false, on, or off. These are
then interpreted as a boolean and returned either as a single result of a list.
"""
BOOLEANMATCH = {
}
_booleanDict = {
'yes':True,
'no':False,
'true':True,
'false':False,
'on':True,
'off':False,
}
def __init__(self,character,matcherDictionary=TEXTMATCH,matcherType='text',**kwargs):
"""create a matcher give the character and the match dictionary
Args:
character: (Character) instance from which the data is drawn
matcherDictionary: (dict) dictionary of keywords which are matched
and the values are replacement chracter subobjects which when
evaluated return a string, a boolean, or None for 'text' type
matchers. Alternativelty these vales may return a tuple of
(image filename, image absolute path filename) for 'image'
type matchers
matchType: (string) either 'text' or 'image' or 'boolean'
"""
if 'verbosity' not in kwargs: kwargs['verbosity'] = VERBOSITY
self.verbosity = kwargs['verbosity']
assert type(character) == Character, "First argument must be a Character instance: %s" % character
assert type(matcherDictionary) == dict, "Second argument must be dictionary: %s" % matcherDictionary
assert matcherType == 'text' or matcherType == 'image' or matcherType == 'boolean',"matcherType must be either 'text', 'image', or 'boolean': %s"% matcherType
self._character = character
self.name = "%s.%s %s" % (character.myIndex,character.characterIndex,character.name)
self.type = matcherType
self.matcherDictionary = matcherDictionary
def _exists(self,toTest,*args,**kwargs):
"""check if the attribute exists within the character attribute tree
Returns: a size member tuple
isAttr: (boolean) this attribute exists
testObj: value returned from final test object's attribute
lastTestObj: (Feature) final test object
testAttr: (string) final attribute of the feature being tested
testList: (Feature list) if lastTestObj is a member
testAttrIdx (int or str)
"""
toTestList = toTest.split(".")
testObj = self._character
lastTestObj = testObj
testList = []
testAttrIdx = None
attrCount = 0
isAttr = True
testAttr = ''
# loop through each potential object and attribute from the provided
# oject test string. Starting with testObj = self._character
for (attrCount,myAttr) in enumerate(toTestList):
# save the last successful object test
lastTestObj = testObj
# match the attribute string to identify list element attributes
# or methods of the object. Also match the list index or
# method arguments
attrMatch = re.match(r'([^\[\]\(\)]+)([\[\(](.+)?[\)\]])?',myAttr)
if attrMatch:
# next attribute to test without index or arguments
testAttr = attrMatch.group(1)
testAttrIdx = None
# did we match an index/arguments ?
if len(attrMatch.groups()) == 3:
testAttrIdx = attrMatch.group(3)
# first test, does the testObj have the current attribute
isAttr = hasattr(testObj,testAttr)
if not isAttr:
#print(attrMatch.groups(),testObj,testAttr,dir(testObj))
break
# second test, it the attribute a list element or method
if testAttrIdx != None:
testList = getattr(testObj,testAttr)
if type(testList) == list:
if int(testAttrIdx) >= len(testList):
isAttr = False
break
testObj = testList[int(testAttrIdx)]
elif type(testList) == dict:
testObj = testList[testAttrIdx]
elif type(testList) == types.MethodType:
if type(testList(testAttrIdx.split(","))) == types.GeneratorType:
testObj = testList
testList = [i for i in testObj(testAttrIdx.split(","))]
else:
testObj = [i for i in testObj(testAttrIdx.split(","))]
break
else:
isAttr = False
break
else:
testObj = getattr(testObj,testAttr)
else:
isAttr = False
#if self.type == 'image':
# for testAttr in ['imageLow','imageHigh']:
# isAttr = hasattr(testObj,testAttr)
# if isAttr:
# lastTestObj = testObj
# testObj = getattr(testObj,testAttr)
# break
if testList and lastTestObj not in testList: testList = []
if not isAttr: testObj = toTest
return (isAttr,testObj,lastTestObj,testAttr,testList,testAttrIdx)
def getMatch(self,keyText,*args,**kwargs):
"""Return the match from the included character based on keyText
Args:
keyText: (str) keyword from matcherDictionary possibly with modifiers
for head, parenthesis, lists, image resolution, and/or abbreviation
\x1d separate conditional from replacement
\x1e serarate replacement values when using multiple (which will be joined with a space)
\x1f separate keywords in replacement when nesting keywords
"""
# just in case the keyText is passed with the brackets
myKey = re.sub('^\{\{(.*)\}\}$',r'\1',keyText)
# identify any brackets and strip them off the myKey
(pStart,pEnd) = ('','')
pMatch = re.search(r'^\{(.*)\}$',myKey)
if pMatch: (myKey,pStart,pEnd) = (pMatch.group(1),'{','}')
pMatch = re.search(r'^\[(.*)\]$',myKey)
if pMatch: (myKey,pStart,pEnd) = (pMatch.group(1),'[',']')
pMatch = re.search(r'^\((.*)\)$',myKey)
if pMatch: (myKey,pStart,pEnd) = (pMatch.group(1),'(',')')
# identify any header and strip it off the myKey
headText = ''
hMatch = re.search(r'^([^|]+)\|([^|]+)$',myKey)
if hMatch: (headText,myKey) = hMatch.groups()
hOnlyMatch = re.search(r'^[?]',myKey)
if hOnlyMatch: myKey = re.sub(r'^\?','',myKey)
# identify any repeating characters and strip it off the myKey
repeatText = ''
rMatch = re.search(r'^\.(.)\.(.+)$',myKey)
if rMatch: (repeatText,myKey) = rMatch.groups()
# assign flag for abbreviation
abbreviate = False
if re.match(r'^_',myKey) and self.type != 'image': abbreviate = True
# add in image resoultion
imageRes = ''
if self.type == 'image':
imageRes = 'imageLow'
if re.match(r'^h_',myKey):
imageRes = 'imageHigh'
# match for the list option and separator based on flag
listMatch = re.search(r'\.\.(.*)$',myKey)
joiner = ''
if listMatch:
joiner = listMatch.group(1)
# strip off repeat, resolution, and abbreviate flags down to the key
myKey = re.sub(r'\.\..*$','',re.sub(r'^(h_|l_|_)','',myKey))
# match for the description option and strip the flag
nameDescribe = re.search(r'__$',myKey)
describe = re.search(r'_$',myKey)
myKey = re.sub(r'__?$','',myKey)
# some matchers use the striped key, some use the full key
keyWord = myKey in self.matcherDictionary and myKey or keyText
if keyWord not in self.matcherDictionary:
if self.verbosity >= 2:
print("Warning: key is not in Matcher, %s returned" % keyWord)
return keyWord
rtnList = []
myValue = self.matcherDictionary[keyWord]
testedValue = (False,None,None,str(),list(),None)
# if the value is also keys split them up and get the values
if re.search("\x1f",myValue):
for kw in re.split("\x1f",myValue):
rtnList.append(self.getMatch(kw))
else:
# a special type of text match where two values are separated by a group separator
# in this case the first is evaluated as a boolean which determins if the second is
# displayed.
conditional = False
conditionalResult = []
itemCount = 1
if re.search("\x1d",self.matcherDictionary[keyWord]):
conditional = True
(myConditional,myValue) = re.split("\x1d",myValue)
conditionalList = re.split(r' ',myConditional)
# evaluate each part of the conditional which is a feature to its attribute
# each part of the conditional is also then expanded to a list
for (condIdx,condItem) in enumerate(conditionalList):
testedItem = self._exists(condItem)
# if the keyword asks for a list, the attribute exists, and the attribute comes from a list member
if listMatch and testedItem[0] and testedItem[4]:
# go through each feature in the list and get the relavant attribute value
conditionalList[condIdx] = [hasattr(lf,testedItem[3]) and getattr(lf,testedItem[3]) for lf in testedItem[4]]
itemCount = len(conditionalList[condIdx]) > itemCount and len(conditionalList[condIdx]) or itemCount
else:
conditionalList[condIdx] = [testedItem[1]]
# duplicate the last element in the conditional list part until all are the same length
for (condIdx,condItem) in enumerate(conditionalList):
while len(condItem) < itemCount:
condItem.append(condItem[len(condItem)-1])
conditionalList[condIdx] = condItem
# evaluate set of conditionals for each possible list item
for itemIdx in range(itemCount):
tempConditionalList = []
for condIdx in range(len(conditionalList)):
# all numbers are evaluated as floats
try:
float(conditionalList[condIdx][itemIdx])
if type(conditionalList[condIdx][itemIdx]) != types.BooleanType:
tempConditionalList.append("float(%s)" % conditionalList[condIdx][itemIdx])
else:
if conditionalList[condIdx][itemIdx]:
tempConditionalList.append("True")
else:
tempConditionalList.append("False")
except(ValueError):
if conditionalList[condIdx][itemIdx] not in OPERATORS:
tempConditionalList.append('"'+conditionalList[condIdx][itemIdx]+'"')
else:
tempConditionalList.append(conditionalList[condIdx][itemIdx])
try:
conditionalResult.append(eval(" ".join(tempConditionalList)))
except:
print(tempConditionalList)
raise
# I now have a list of boolean stored in conditionalResult, one for each
# attribute in the list, or a list of one for non-list attributes
# Now lets go through all the values.
valueList = []
maxCount = 0
# loop through each of the \x1e separated values
# these will be interleaved as space separated
# values for each one in a list (if it is a list)
for (valCount,myValue) in enumerate(re.split("\x1e",myValue)):
valueList.append(list())
# append imageRes for images or '' for all else
if self.type == 'image':
myValue = re.sub(r'.image(High|Low)','',myValue)
myValue += "." + imageRes
testedValue = self._exists(myValue)
# if it does not exist append empty result to the list
if not testedValue[0]:
if self.verbosity >= 2: print("Warning: key:%s -> %s is not in Character %s, empty text returned" % (keyWord,myValue,self.name))
#if self.type == 'boolean': return False
#if self.type == 'boolean':
# valueList[valCount].append('false')
#elif self.type == 'image':
# valueList[valCount].append(('',''))
#else:
# valueList[valCount].append('')
valueList[valCount].append(None)
continue
# if we have the value add it/them to the list
feature = testedValue[2]
attr = testedValue[3]
featureList = []
if listMatch and testedValue[4]:
featureList = testedValue[4]
else:
featureList = [feature]
for f in featureList:
if listMatch and hasattr(f,attr) or not listMatch:
if abbreviate:
myVal = f.abbreviate(attr)
else:
myVal = getattr(f,attr)
if describe:
if nameDescribe:
myVal = f.describe(attr,myVal)
else:
myVal = f.describe(attr)
valueList[valCount] += [myVal]
# keep track of max values per valCount
maxCount = len(valueList[valCount]) > maxCount and len(valueList[valCount]) or maxCount
for cntr in range(maxCount):
if conditional:
# use the cntr to find the relavant conditional or if they are mismatched
# just use the last conditional
if (cntr >= len(conditionalResult)):
idx = len(conditionalResult)-1
else:
idx = cntr
if not conditionalResult[idx]:
continue
toJoinList = []
for vIdx in range(len(valueList)):
if cntr < len(valueList[vIdx]):
if (valueList[vIdx][cntr]):
if type(valueList[vIdx][cntr]) == types.MethodType:
toJoinList.append(joiner.join([i for i in valueList[vIdx][cntr]()]))
else:
toJoinList.append(valueList[vIdx][cntr])
if self.type == 'text':
rtnList.append(" ".join(toJoinList))
# multiple value separated by \x1e are ignored for boolean and images
else:
rtnList.append(valueList[0][cntr])
# Now we have a return list of strings or tuples
if rMatch:
newList = []
for i in rtnList:
try:
newList.append(repeatText * int(i))
except ValueError:
if self.verbosity >= 2: print("Warning: key:%s -> %s attribute %s was not an integer for Character %s, 1 repeat used" % (keyWord,testedValue[2],testedValue[3],self.name))
newList.append(repeatText)
rtnList = newList[:]
# if this is a boolean, change the list to boolean list
if self.type == 'boolean':
rtnList = [self._booleanDict[b.lower()] for b in rtnList]
# return the result(s)
rtnList = filter(lambda i:i,rtnList)
if len(rtnList) == 0:
if self.verbosity >= 2: print("Warning: key:%s -> nothing stored in %s attribute %s for Character %s, empty text returned" % (keyWord,testedValue[2],testedValue[3],self.name))
if self.type == 'boolean': return False
if self.type == 'image': return ('','')
return ''
if self.type != 'text':
if len(rtnList) == 1:
return rtnList[0]
return rtnList
if hOnlyMatch: rtnList = []
return ''.join([pStart,headText,joiner.join(rtnList),pEnd])
def getKeys(self,*args,**kwargs):
"""return the list of possible keys for this matcher"""
return self.matcherDictionary.keys() | StarcoderdataPython |
11253363 | # -*- coding: utf8 -*
from time import sleep
import logging
from main_data import MainData
from toolbox import exit_prog, log_record
from read_options import read_opt
from webdriver import (
get_webdriver,
open_url,
wait_window,
get_webdriver_quit,
find_one_element_by_id,
get_info_from_element
)
from keyboard_and_mouse_tools import (
get_foreground_window,
full_screen,
push_button_on_keyboard,
return_wscript_shell,
simple_click,
simple_enter_text,
get_window_rect,
search_hwnd)
'''
модуль для запуска браузера и входа в 1с
'''
def init_prog(tuple_main_data:tuple):
'''
получает пустой кортеж,
получает вебдрайвер, wscript_shell и настройки из opt.txt
инициализирует объект main_data который будет хранить другие объекты, таблицы и настройки
проходя через все функции
'''
tuple_main_data += (get_webdriver(),)
tuple_main_data += (return_wscript_shell(),)
tuple_main_data += (read_opt(),)
main_data = MainData(tuple_main_data)
return main_data
def start_browser(main_data:object):
open_url(
main_data.driver(),
main_data.base_url()
)
hwnd = get_foreground_window()
full_screen(hwnd)
win_rect = get_window_rect(hwnd)
if wait_window(
main_data.driver(),
main_data.auth_win(),
1000):
sleep(5)
main_data.set_hwnd(hwnd)
main_data.set_win_rect(win_rect)
return main_data
else:
get_webdriver_quit(main_data.driver())
logging.shutdown()
exit_prog()
def login_base(main_data:object):
'''
выполняет вход в базу: вводит логин, пароль, нажимает кнопку "ok"
ждет когда загрузится программа, ждет и пытается закрыть окно поддержки 1с
'''
login_elem = get_info_from_element(
find_one_element_by_id(
main_data.driver(),
main_data.login_opt()[0]
)
)
password_elem = get_info_from_element(
find_one_element_by_id(
main_data.driver(),
main_data.passw_opt()[0]
)
)
ok_elem = get_info_from_element(
find_one_element_by_id(
main_data.driver(),
main_data.ok_button_opt()
)
)
if login_elem == None or password_elem == None or ok_elem == None:
get_webdriver_quit(main_data.driver())
logging.shutdown()
exit_prog()
simple_enter_text(
main_data,
login_elem,
main_data.login_opt()[1]
)
simple_enter_text(
main_data,
password_elem,
main_data.passw_opt()[1]
)
simple_click(main_data, ok_elem)
if wait_window(
main_data.driver(),
'themesCellLimiter',
1000
) == True:
sleep(5)
if wait_window(main_data.driver(), 'ps0formHeaderTitle', 10) == True:
sleep(5)
push_button_on_keyboard(
main_data.hwnd(),
main_data.wscript_shell(),
'esc',
1
)
log_record('выполнен вход в базу')
return main_data
else:
get_webdriver_quit(main_data.driver())
logging.shutdown()
exit_prog()
def reload(main_data:object):
if search_hwnd(main_data.hwnd()) == False:
'''
'''
get_webdriver_quit(main_data.driver())
logging.shutdown()
exit_prog()
| StarcoderdataPython |
9688457 | # vim: filetype=python
## load our own python modules
import system
import os, string, platform, subprocess, shutil
import re
## create a top level alias so that the help system will know about it
ALIASES = ''
def top_level_alias(env, name, targets):
global ALIASES
ALIASES = '%s %s' % (ALIASES, name)
env.Alias(name, targets)
def get_all_aliases():
return ALIASES
## Searches for file in the system path. Returns a list of directories containing file
def find_in_path(file):
path = os.environ['PATH']
path = string.split(path, os.pathsep)
return filter(os.path.exists, map(lambda dir, file=file: os.path.join(dir, file), path))
## Returns a list of all files with an extension from the 'valid_extensions' list
def find_files_recursive(path, valid_extensions):
path = os.path.normpath(path)
list = []
for root, dirs, files in os.walk(path):
if '.svn' in dirs:
dirs.remove('.svn')
for f in files:
if os.path.splitext(f)[1] in valid_extensions:
# Build the absolute path and then remove the root path, to get the relative path from root
file = os.path.join(root, f)[len(path) + 1:]
list += [file]
return list
## Copy directories recursively, ignoring .svn dirs
## <dest> directory must not exist
def copy_dir_recursive(src, dest):
for f in os.listdir(src):
src_path = os.path.join(src, f)
dest_path = os.path.join(dest, f)
if os.path.isdir(src_path):
if f != '.svn':
if not os.path.exists(dest_path):
os.makedirs(dest_path)
#shutil.copystat(src_path, dest_path)
copy_dir_recursive(src_path, dest_path)
else:
shutil.copy2(src_path, dest_path)
## handy function to remove files only if they exist
def saferemove(path):
if os.path.exists(path):
os.remove(path)
## translates a process return code (as obtained by os.system or subprocess) into a status string
def process_return_code(retcode):
if retcode == 0:
status = 'OK'
else:
if system.os() == 'windows':
if retcode < 0:
status = 'CRASHED'
else:
status = 'FAILED'
else:
if retcode > 128:
status = 'CRASHED'
else:
status = 'FAILED'
return status
# returns the softimage version
def get_softimage_version(sdk_path):
softimage_version = 'unknown'
rx = re.compile('#\s*define\s+XSISDK_VERSION\s+(\S+)')
try:
for line in open(os.path.join(sdk_path, 'include', 'xsi_version.h')):
result = rx.search(line)
if result:
softimage_version = result.group(1)
break
except:
pass
return softimage_version
## Obtains SItoA version by parsing 'Version.cpp'
def get_sitoa_version(path, components = 3):
MAJOR_VERSION=''
MINOR_VERSION=''
FIX_VERSION=''
f = open(path, 'r')
while True:
line = f.readline().lstrip(' \t')
if line == "":
# We have reached the end of file.
break
if line.startswith('#define'):
tokens = line.split()
if tokens[1] == 'SITOA_MAJOR_VERSION_NUM':
MAJOR_VERSION = tokens[2]
elif tokens[1] == 'SITOA_MINOR_VERSION_NUM':
MINOR_VERSION = tokens[2]
elif tokens[1] == 'SITOA_FIX_VERSION':
FIX_VERSION = tokens[2][1:].strip('"')
f.close()
version = ''
if (components > 0):
version += MAJOR_VERSION
if (components > 1):
version += '.' + MINOR_VERSION
if (components > 2):
version += '.' + FIX_VERSION
return version
## Obtains Arnold library version by parsing 'ai_version.h'
def get_arnold_version(path, components = 4):
ARCH_VERSION=''
MAJOR_VERSION=''
MINOR_VERSION=''
FIX_VERSION=''
f = open(path, 'r')
while True:
line = f.readline().lstrip(' \t')
if line == "":
# We have reached the end of file.
break
if line.startswith('#define'):
tokens = line.split()
if tokens[1] == 'AI_VERSION_ARCH_NUM':
ARCH_VERSION = tokens[2]
elif tokens[1] == 'AI_VERSION_MAJOR_NUM':
MAJOR_VERSION = tokens[2]
elif tokens[1] == 'AI_VERSION_MINOR_NUM':
MINOR_VERSION = tokens[2]
elif tokens[1] == 'AI_VERSION_FIX':
FIX_VERSION = tokens[2].strip('"')
f.close()
if (components > 0):
version = ARCH_VERSION
if (components > 1):
version += '.' + MAJOR_VERSION
if (components > 2):
version += '.' + MINOR_VERSION
if (components > 3):
version += '.' + FIX_VERSION
return version
## This function will give us the information we need about the latest git commit
def get_latest_revision():
revision = 'not found'
url = 'not found'
p = subprocess.Popen('git status -b --porcelain=2', shell=True, stdout = subprocess.PIPE)
retcode = p.wait()
for line in p.stdout:
if line.startswith('# branch.oid '):
revision = line.split()[-1]
p = subprocess.Popen('git remote get-url origin', shell=True, stdout = subprocess.PIPE)
retcode = p.wait()
for line in p.stdout:
if line.startswith('https://'):
url = line.strip()
url = url[:-4]
url += '/commit/' + revision
return (revision, url)
def add_to_library_path(env, new_path):
if system.os() == 'windows':
var_name = 'PATH'
elif system.os() == 'darwin':
var_name = 'DYLD_LIBRARY_PATH'
else:
var_name = 'LD_LIBRARY_PATH'
if env['ENV'].has_key(var_name):
env['ENV'][var_name] = '%s%s%s' % (new_path, os.pathsep, env['ENV'][var_name])
else:
env['ENV'][var_name] = new_path
def set_library_path(env):
if system.os() == 'windows':
var_name = 'PATH'
elif system.os() == 'darwin':
var_name = 'DYLD_LIBRARY_PATH'
else:
var_name = 'LD_LIBRARY_PATH'
env['PREVIOUS_LIBRARY_PATH'] = ''
if os.environ.has_key(var_name):
env['PREVIOUS_LIBRARY_PATH'] = os.environ[var_name]
os.environ[var_name] = env['ENV'][var_name]
def reset_library_path(env):
if env.has_key('PREVIOUS_LIBRARY_PATH'):
if system.os() == 'windows':
var_name = 'PATH'
elif system.os() == 'darwin':
var_name = 'DYLD_LIBRARY_PATH'
else:
var_name = 'LD_LIBRARY_PATH'
os.environ[var_name] = env['PREVIOUS_LIBRARY_PATH']
def add_to_program_path(env, new_path):
if env['ENV'].has_key('PATH'):
env['ENV']['PATH'] = '%s%s%s' % (new_path, os.pathsep, env['ENV']['PATH'])
else:
env['ENV']['PATH'] = new_path
def set_program_path(env):
env['PREVIOUS_PROGRAM_PATH'] = ''
if os.environ.has_key('PATH'):
env['PREVIOUS_PROGRAM_PATH'] = os.environ['PATH']
os.environ['PATH'] = env['ENV']['PATH']
def reset_program_path(env):
if env.has_key('PREVIOUS_PROGRAM_PATH'):
os.environ['PATH'] = env['PREVIOUS_PROGRAM_PATH']
def get_default_path(var, default):
if var in os.environ:
return os.environ[var]
else:
return default
def get_escaped_path(path):
if system.os() == 'windows':
return path.replace("\\", "\\\\")
else:
return path
## Hacky replacement for the string partition method which is only available from Python 2.5
def strpartition(string, sep):
index = string.find(sep)
if index == -1:
return (string, '', '')
return (string[:index], sep, string[index + 1:])
def get_library_extension():
if system.os() == 'windows':
return ".dll"
elif system.os() == 'linux':
return ".so"
elif system.os() == 'darwin':
return ".dylib"
else:
return ""
def get_executable_extension():
if system.os() == 'windows':
return ".exe"
else:
return "" | StarcoderdataPython |
5087681 | <gh_stars>0
""""
This is a parser for the header section of KAF/NAF
"""
from lxml import etree
import time
import platform
class CfileDesc:
"""
This class encapsulates the file description element in the header
"""
def __init__(self,node=None):
"""
Constructor of the object
@type node: xml Element or None (to create and empty one)
@param node: this is the node of the element. If it is None it will create a new object
"""
self.type = 'KAF/NAF'
if node is None:
self.node = etree.Element('fileDesc')
else:
self.node = node
#self.title='' #self.author='' #self.creationtime='' #self.filename='' #self.filetype='' #self.pages=''
class Cpublic:
"""
This class encapsulates the public element in the header
"""
def __init__(self,node=None):
"""
Constructor of the object
@type node: xml Element or None (to create and empty one)
@param node: this is the node of the element. If it is None it will create a new object
"""
self.type = 'KAF/NAF'
if node is None:
self.node = etree.Element('public')
else:
self.node = node
#self.publicId = ''
#slf.uri = ''
class Clp:
"""
This class encapsulates the linguistic processor element in the header
"""
def __init__(self,node=None,name="",version="",timestamp=None,btimestamp=None,etimestamp=None):
"""
Constructor of the object
@type node: xml Element or None (to create an empty one)
@param node: this is the node of the element. If it is None it will create a new object
@type name: string
@param name: the name of the linguistic processor
@type version: string
@param version: the version of the linguistic processor
@type timestamp: string
@param timestamp: the timestamp, or None to set it to the current time
@param btimestamp: the begin timestamp, or None to set it to the current time (NOTE: only use None if header created at begining of process!)
@param etimestamp: the end timestamp, or None to set it (NOTE: only use None if header created at the end of the process!)
"""
self.type = 'KAF/NAF'
if node is None:
self.node = etree.Element('lp')
self.set_name(name)
self.set_version(version)
self.set_timestamp(timestamp)
self.set_beginTimestamp(btimestamp)
self.set_endTimestamp(etimestamp)
#For the hostnameimport platform
self.node.set('hostname',platform.node())
else:
self.node = node
def set_name(self,name):
"""
Set the name of the linguistic processor
@type name:string
@param name: name of the linguistic processor
"""
self.node.set('name',name)
def set_version(self,version):
"""
Set the version of the linguistic processor
@type version:string
@param version: version of the linguistic processor
"""
self.node.set('version',version)
def set_timestamp(self,timestamp=None):
"""
Set the timestamp of the linguistic processor, set to None for the current time
@type timestamp:string
@param timestamp: version of the linguistic processor
"""
if timestamp is None:
import time
timestamp = time.strftime('%Y-%m-%dT%H:%M:%S%Z')
self.node.set('timestamp',timestamp)
def set_beginTimestamp(self,btimestamp=None):
"""
Set the begin timestamp of the linguistic processor, set to None for the current time
@type btimestamp: string
@param btimestamp: version of the linguistic processor
"""
if btimestamp is None:
import time
btimestamp = time.strftime('%Y-%m-%dT%H:%M:%S%Z')
self.node.set('beginTimestamp',btimestamp)
def set_endTimestamp(self,etimestamp=None):
"""
Set the end timestamp of the linguistic processor, set to None for the current time
@type etimestamp: string
@param etimestamp: version of the linguistic processor
"""
if etimestamp is None:
import time
etimestamp = time.strftime('%Y-%m-%dT%H:%M:%S%Z')
self.node.set('endTimestamp',etimestamp)
def get_node(self):
"""
Returns the node of the element
@rtype: xml Element
@return: the node of the element
"""
return self.node
class ClinguisticProcessors:
"""
This class encapsulates the linguistic processors element in the header
"""
def __init__(self,node=None):
"""
Constructor of the object
@type node: xml Element or None (to create and empty one)
@param node: this is the node of the element. If it is None it will create a new object
"""
self.type = 'KAF/NAF'
if node is None:
self.node = etree.Element('linguisticProcessors')
else:
self.node = node
def get_layer(self):
"""
Returns the layer of the element
@rtype: string
@return: the layer of the element
"""
return self.node.get('layer')
def set_layer(self,layer):
"""
Set the layer of the element
@type layer: string
@param layer: layer
"""
self.node.set('layer',layer)
def add_linguistic_processor(self,my_lp):
"""
Add a linguistic processor object to the layer
@type my_lp: L{Clp}
@param my_lp: linguistic processor object
"""
self.node.append(my_lp.get_node())
def get_node(self):
"""
Returns the node of the element
@rtype: xml Element
@return: the node of the element
"""
return self.node
class CHeader:
"""
This class encapsulates the header
"""
def __init__(self,node=None,type='NAF'):
"""
Constructor of the object
@type node: xml Element or None (to create and empty one)
@param node: this is the node of the element. If it is None it will create a new object
@type type: string
@param type: the type of the object (KAF or NAF)
"""
self.type = type
if node is None:
if self.type == 'NAF':
self.node = etree.Element('nafHeader')
elif self.type == 'KAF':
self.node = etree.Element('kafHeader')
else:
self.node = node
def get_node(self):
"""
Returns the node of the element
@rtype: xml Element
@return: the node of the element
"""
return self.node
def to_kaf(self):
"""
Converts the header element to KAF
"""
if self.type == 'NAF':
self.node.tag = 'kafHeader'
self.type = 'KAF'
def to_naf(self):
"""
Converts the header element to NAF
"""
if self.type == 'KAF':
self.node.tag = 'nafHeader'
self.type = 'NAF'
def add_linguistic_processors(self,linpro):
"""Adds a linguistic processors element
@type linpro: ClinguisticProcessors
@param linpro: linguistic processors element
"""
self.node.append(linpro.get_node())
def remove_lp(self,layer):
"""
Removes the linguistic processors for a given layer
@type layer: string
@param layer: the name of the layer
"""
for this_node in self.node.findall('linguisticProcessors'):
if this_node.get('layer') == layer:
self.node.remove(this_node)
break
def add_linguistic_processor(self, layer ,my_lp):
"""
Adds a linguistic processor to a certain layer
@type layer: string
@param layer: the name of the layer
@type my_lp: L{Clp}
@param my_lp: the linguistic processor
"""
## Locate the linguisticProcessor element for taht layer
found_lp_obj = None
for this_lp in self.node.findall('linguisticProcessors'):
lp_obj = ClinguisticProcessors(this_lp)
if lp_obj.get_layer() == layer:
found_lp_obj = lp_obj
break
if found_lp_obj is None: #Not found
found_lp_obj = ClinguisticProcessors()
found_lp_obj.set_layer(layer)
self.add_linguistic_processors(found_lp_obj)
found_lp_obj.add_linguistic_processor(my_lp)
| StarcoderdataPython |
8181088 | from dataclasses import dataclass
from typing import Any, Tuple
from omegaconf import MISSING
@dataclass
class OptimizerConfig:
params: Any = MISSING
lr: float = MISSING
@dataclass
class AdamConfig(OptimizerConfig):
_target_: str = "torch.optim.Adam"
betas: Tuple[float, float] = MISSING
eps: float = MISSING
weight_decay: float = MISSING
amsgrad: bool = MISSING
@dataclass
class SgdConfig(OptimizerConfig):
_target_: str = "torch.optim.SGD"
momentum: float = MISSING
weight_decay: float = MISSING
dampening: float = MISSING
nesterov: bool = MISSING
| StarcoderdataPython |
1893420 | <filename>submissions/abc061/c.py<gh_stars>1-10
import sys
read = sys.stdin.buffer.read
readline = sys.stdin.buffer.readline
readlines = sys.stdin.buffer.readlines
sys.setrecursionlimit(10 ** 7)
n, k = map(int, readline().split())
ab = [tuple(map(int, readline().split())) for _ in range(n)]
ab.sort()
for a, b in ab:
k -= b
if k <= 0:
print(a)
break
| StarcoderdataPython |
3592225 | <gh_stars>0
#! /Users/nsanthony/miniconda3/bin/python
import rooms.room_class as rc
blank = rc.room()
blank.name = 'cottage'
blank.descript = 'This is a plane old cottage....'
blank.size = 'small'
blank.occupied = 1
blank.people = '<NAME>buff sharpening an axe'
blank.coord = [1,2,-1]
blank.seen = 0
cottage = blank
| StarcoderdataPython |
5045940 | <reponame>daniele-mc/HacktoberFest2020-4
def orangesRotting( grid):
rotten = []
r, c, fresh, t = len(grid), len(grid[0]), 0, 0
for i in range(r):
for j in range(c):
if grid[i][j] == 2: rotten.append([i, j])
elif grid[i][j] == 1: fresh += 1
while len(rotten) > 0:
num = len(rotten)
for i in range(num):
x, y = rotten[0]
print(rotten)
rotten.pop(0);
print(x,y,r-1,c-1)
if x > 0 and grid[x-1][y] == 1: grid[x-1][y] = 2; fresh -= 1; rotten.append([x-1, y])
if y > 0 and grid[x][y-1] == 1: grid[x][y-1] = 2; fresh -= 1; rotten.append([x, y-1])
if x < r-1 and grid[x+1][y] == 1: grid[x+1][y] = 2; fresh -= 1; rotten.append([x+1, y])
if y < c-1 and grid[x][y+1] == 1: grid[x][y+1] = 2; fresh -= 1; rotten.append([x, y+1])
if len(rotten) > 0: t += 1
return t if (fresh == 0) else -1
print(orangesRotting([[2,1,1],[1,1,0],[0,1,1]]))
| StarcoderdataPython |
4841030 | """
The SRP definition for CPHD 0.3.
"""
from typing import Union
import numpy
from sarpy.compliance import integer_types
from sarpy.io.phase_history.cphd1_elements.base import DEFAULT_STRICT
# noinspection PyProtectedMember
from sarpy.io.complex.sicd_elements.base import Serializable, _SerializableDescriptor, \
_IntegerEnumDescriptor
__classification__ = "UNCLASSIFIED"
__author__ = "<NAME>"
class FxParametersType(Serializable):
"""
The FX vector parameters.
"""
_fields = ('Fx0', 'Fx_SS', 'Fx1', 'Fx2')
_required = _fields
# descriptors
Fx0 = _IntegerEnumDescriptor(
'Fx0', (8, ), _required, strict=DEFAULT_STRICT, default_value=8,
docstring='The size of the Fx0 field') # type: int
Fx_SS = _IntegerEnumDescriptor(
'Fx_SS', (8, ), _required, strict=DEFAULT_STRICT, default_value=8,
docstring='The size of the Fx_SS field') # type: int
Fx1 = _IntegerEnumDescriptor(
'Fx1', (8, ), _required, strict=DEFAULT_STRICT, default_value=8,
docstring='The size of the Fx1 field') # type: int
Fx2 = _IntegerEnumDescriptor(
'Fx2', (8, ), _required, strict=DEFAULT_STRICT, default_value=8,
docstring='The size of the Fx2 field') # type: int
def __init__(self, Fx0=8, Fx_SS=8, Fx1=8, Fx2=8, **kwargs):
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.Fx0 = Fx0
self.Fx1 = Fx1
self.Fx2 = Fx2
self.Fx_SS = Fx_SS
super(FxParametersType, self).__init__(**kwargs)
@staticmethod
def get_size():
"""
The size in bytes of this component of the vector.
Returns
-------
int
"""
return 32
def get_position_offset_and_size(self, field):
"""
Get the offset and size of the given field from the beginning of the vector.
Parameters
----------
field : str
Returns
-------
None|int
"""
if field not in self._fields:
return None
out = 0
for fld in self._fields:
val = getattr(self, fld)
if fld == field:
return out, val
else:
out += val
return None
def get_dtype_components(self):
"""
Gets the dtype components.
Returns
-------
List[Tuple]
"""
return [(entry, '>f8') for entry in self._fields]
class TOAParametersType(Serializable):
"""
The TOA vector parameters.
"""
_fields = ('DeltaTOA0', 'TOA_SS')
_required = _fields
# descriptors
DeltaTOA0 = _IntegerEnumDescriptor(
'DeltaTOA0', (8, ), _required, strict=DEFAULT_STRICT, default_value=8,
docstring='The size of the DeltaTOA0 field') # type: int
TOA_SS = _IntegerEnumDescriptor(
'TOA_SS', (8, ), _required, strict=DEFAULT_STRICT, default_value=8,
docstring='The size of the TOA_SS field') # type: int
def __init__(self, DeltaTOA0=8, TOA_SS=8, **kwargs):
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.DeltaTOA0 = DeltaTOA0
self.TOA_SS = TOA_SS
super(TOAParametersType, self).__init__(**kwargs)
@staticmethod
def get_size():
"""
The size in bytes of this component of the vector.
Returns
-------
int
"""
return 16
def get_position_offset_and_size(self, field):
"""
Get the offset and size of the given field from the beginning of the vector.
Parameters
----------
field : str
Returns
-------
None|(int, int)
"""
if field not in self._fields:
return None
out = 0
for fld in self._fields:
val = getattr(self, fld)
if fld == field:
return out, val
else:
out += val
return None
def get_dtype_components(self):
"""
Gets the dtype components.
Returns
-------
List[Tuple]
"""
return [(entry, '>f8') for entry in self._fields]
class VectorParametersType(Serializable):
"""
The vector parameters sizes object.
"""
_fields = (
'TxTime', 'TxPos', 'RcvTime', 'RcvPos', 'SRPTime', 'SRPPos', 'AmpSF', 'TropoSRP',
'FxParameters', 'TOAParameters')
_required = (
'TxTime', 'TxPos', 'RcvTime', 'RcvPos', 'SRPPos')
_choice = ({'required': False, 'collection': ('FxParameters', 'TOAParameters')}, )
# descriptors
TxTime = _IntegerEnumDescriptor(
'TxTime', (8, ), _required, strict=DEFAULT_STRICT, default_value=8,
docstring='The size of the TxTime field') # type: int
TxPos = _IntegerEnumDescriptor(
'TxPos', (24, ), _required, strict=DEFAULT_STRICT, default_value=8,
docstring='The size of the TxPos field') # type: int
RcvTime = _IntegerEnumDescriptor(
'RcvTime', (8, ), _required, strict=DEFAULT_STRICT, default_value=8,
docstring='The size of the RcvTime field') # type: int
RcvPos = _IntegerEnumDescriptor(
'RcvPos', (24, ), _required, strict=DEFAULT_STRICT, default_value=8,
docstring='The size of the RcvPos field') # type: int
SRPTime = _IntegerEnumDescriptor(
'SRPTime', (8, ), _required, strict=DEFAULT_STRICT, default_value=None,
docstring='The size of the SRPTime field') # type: int
SRPPos = _IntegerEnumDescriptor(
'SRPPos', (24, ), _required, strict=DEFAULT_STRICT, default_value=8,
docstring='The size of the SRPPos field') # type: int
AmpSF = _IntegerEnumDescriptor(
'AmpSF', (8, ), _required, strict=DEFAULT_STRICT, default_value=None,
docstring='The size of the AmpSF field') # type: int
TropoSRP = _IntegerEnumDescriptor(
'TropoSRP', (8, ), _required, strict=DEFAULT_STRICT, default_value=None,
docstring='The size of the TropoSRP field') # type: int
FxParameters = _SerializableDescriptor(
'FxParameters', FxParametersType, _required, strict=DEFAULT_STRICT,
docstring='The frequency parameters, only present when DomainType is '
'FX.') # type: Union[None, FxParametersType]
TOAParameters = _SerializableDescriptor(
'TOAParameters', FxParametersType, _required, strict=DEFAULT_STRICT,
docstring='The TOA parameters, only present when DomainType is '
'TOA.') # type: Union[None, TOAParametersType]
def __init__(self, TxTime=8, TxPos=24, RcvTime=8, RcvPos=24, SRPTime=None, SRPPos=24,
AmpSF=None, TropoSRP=None, FxParameters=None, TOAParameters=None, **kwargs):
"""
Parameters
----------
TxTime : int
TxPos : int
RcvTime : int
RcvPos : int
SRPTime : None|int
SRPPos : int
AmpSF : None|int
TropoSRP : None|int
FxParameters : None|FxParametersType
TOAParameters : None|TOAParametersType
kwargs
"""
if '_xml_ns' in kwargs:
self._xml_ns = kwargs['_xml_ns']
if '_xml_ns_key' in kwargs:
self._xml_ns_key = kwargs['_xml_ns_key']
self.TxTime = TxTime
self.TxPos = TxPos
self.RcvTime = RcvTime
self.RcvPos = RcvPos
self.SRPTime = SRPTime
self.SRPPos = SRPPos
self.AmpSF = AmpSF
self.TropoSRP = TropoSRP
self.FxParameters = FxParameters
self.TOAParameters = TOAParameters
super(VectorParametersType, self).__init__(**kwargs)
def get_size(self):
"""
The size in bytes of this component of the vector.
Returns
-------
int
"""
out = 0
for fld in self._fields:
val = getattr(self, fld)
if val is None:
pass
elif isinstance(val, integer_types):
out += val
elif isinstance(val, (FxParametersType, TOAParametersType)):
out += val.get_size()
else:
raise TypeError('Got unhandled type {}'.format(type(val)))
return out
def get_position_offset_and_size(self, field):
"""
Get the offset and size of the given field from the beginning of the vector.
Parameters
----------
field : str
Returns
-------
None|(int, int)
"""
out = 0
for fld in self._fields:
val = getattr(self, fld)
if fld == field:
if val is not None:
return out, val
else:
return None
if val is None:
pass
elif isinstance(val, integer_types):
out += val
elif isinstance(val, (FxParametersType, TOAParametersType)):
res = val.get_position_offset_and_size(field)
if res is not None:
return out+res[0], res[1]
else:
out += val.get_size()
else:
raise TypeError('Got unhandled type {}'.format(type(val)))
return None
def get_vector_dtype(self):
"""
Gets the dtype for the corresponding structured array for the full PVP array.
Returns
-------
numpy.dtype
This will be a compound dtype for a structured array.
"""
the_type_info = []
for fld in self._fields:
val = getattr(self, fld)
if val is None:
continue
if fld in ['FxParameters', 'TOAParameters']:
the_type_info.extend(val.get_dtype_components())
else:
assert isinstance(val, integer_types), 'CPHD 0.3 PVP field {} should be an integer, got {}'.format(fld, val)
if val == 8:
the_type_info.append((fld, '>f8'))
elif val == 24:
the_type_info.append((fld, '>f8', (3, )))
else:
raise ValueError('Got unhandled value {} for CPHD 0.3 PVP field {}'.format(val, fld))
return numpy.dtype(the_type_info)
| StarcoderdataPython |
1725214 | import numpy as np
from scipy import ndimage
from sHAM import nu_CWS
import gc
def find_index_first_dense(list_weights):
i = 0
for w in list_weights:
if len(w.shape)==2:
return i
i += 1
def idx_matrix_to_matrix(idx_matrix,centers):
return centers[idx_matrix.reshape(-1,1)].reshape(idx_matrix.shape)
def centroid_gradient_matrix(idx_matrix,gradient,cluster):
return ndimage.sum(gradient,idx_matrix,index=range(cluster)).reshape(cluster,1)
#STOCHASTIC COMPRESSION FUNCTIONS
def generate_intervals(W, n_intervals):
intervals = []
values_dict = {}
for i in range(n_intervals):
lower_extreme = np.quantile(W, i/n_intervals)
upper_extreme = np.quantile(W, (i+1)/n_intervals)
intervals.append((lower_extreme, upper_extreme))
values_dict[i] = lower_extreme
#The last extreme must also be included
values_dict[len(values_dict)]= intervals[-1][1]
return values_dict , intervals
def get_interval(w, intervals):
interval = None
for i in intervals:
if w >= i[0] and w < i[1]:
interval = i
break
if not interval:
interval = intervals[-1]
return interval
def binarize(w, intervals, indices_dict):
[v,V] = get_interval(w, intervals)
return indices_dict[V] if np.random.uniform() <= (w-v)/(V-v) else indices_dict[v]
def stochastic_compression(W, b, dtype=np.uint8):
n_intervals = (2**b) - 1
values_dict, intervals = generate_intervals(W, n_intervals)
indices_dict = {v: k for k,v in values_dict.items()}
vect_bin = np.vectorize(binarize)
vect_bin.excluded.add(1)
vect_bin.excluded.add(2)
return values_dict, vect_bin(W, intervals, indices_dict).astype(dtype)
#END STOCHASTIC COMPRESSION FUNCTIONS
class nu_PWS(nu_CWS.nu_CWS):
def __init__(self, model, bits_for_dense_layers, index_first_dense, apply_compression_bias=False, div=None):
self.model = model
self.bits = bits_for_dense_layers
self.clusters = [ 2**i for i in bits_for_dense_layers]
self.index_first_dense = index_first_dense
if div:
self.div=div
else:
self.div = 1 if apply_compression_bias else 2
def apply_stochastic(self, list_trainable=None, untrainable_per_layers=None):
if not list_trainable:
list_weights = self.model.get_weights()
else:
list_weights=[]
for w in (list_trainable):
list_weights.append(w.numpy())
d = self.index_first_dense
dtypelist = [ "uint8" if i <= 8 else "uint16" for i in self.bits]
result = [stochastic_compression(list_weights[i], self.bits[(i-d)//self.div], dtypelist[(i-d)//self.div]) for i in range (d, len(list_weights), self.div)]
values = [ v for (v , _) in result]
self.centers = []
i = 0
for d in values:
vect = np.zeros(shape=(self.clusters[i], 1), dtype="float32")
for key, v in d.items():
vect[key] = v
self.centers.append(vect)
i = i+1
self.idx_layers = [ m for (_ , m) in result]
if not list_trainable:
self.untrainable_per_layers = 0
self.model.set_weights(self.recompose_weight(list_weights))
else:
self.untrainable_per_layers = untrainable_per_layers
self.model.set_weights(self.recompose_weight(list_weights, True, untrainable_per_layers))
gc.collect()
def recompose_weight(self, list_weights, trainable_vars=False, untrainable_per_layers=None):
if not trainable_vars:
d = self.index_first_dense
return list_weights[:d]+[(idx_matrix_to_matrix(self.idx_layers[(i-d)//self.div], self.centers[(i-d)//self.div])) if i%self.div==0 else (list_weights[i]) for i in range(d,len(list_weights))]
else:
div = self.div + untrainable_per_layers
list_weights = self.trainable_to_weights(self.model.get_weights(), list_weights, untrainable_per_layers)
d = find_index_first_dense(list_weights)
return list_weights[:d]+[(idx_matrix_to_matrix(self.idx_layers[(i-d)//div], self.centers[(i-d)//div])) if i%div==0 else (list_weights[i]) for i in range(d,len(list_weights))]
| StarcoderdataPython |
6562731 | from datetime import datetime
import os
import os.path
from django.db import models
from django.contrib.auth.models import User, Group
from django.conf import settings
from django.utils.translation import ugettext_lazy as _
from django.utils.hashcompat import sha_constructor
from django.db.models.signals import post_save
from djangobb_forum.fields import AutoOneToOneField, ExtendedImageField, JSONField
from djangobb_forum.util import smiles, convert_text_to_html
from djangobb_forum import settings as forum_settings
if 'south' in settings.INSTALLED_APPS:
from south.modelsinspector import add_introspection_rules
add_introspection_rules([], ['^djangobb_forum\.fields\.AutoOneToOneField',
'^djangobb_forum\.fields\.JSONField',
'^djangobb_forum\.fields\.ExtendedImageField'])
TZ_CHOICES = [(float(x[0]), x[1]) for x in (
(-12, '-12'), (-11, '-11'), (-10, '-10'), (-9.5, '-09.5'), (-9, '-09'),
(-8.5, '-08.5'), (-8, '-08 PST'), (-7, '-07 MST'), (-6, '-06 CST'),
(-5, '-05 EST'), (-4, '-04 AST'), (-3.5, '-03.5'), (-3, '-03 ADT'),
(-2, '-02'), (-1, '-01'), (0, '00 GMT'), (1, '+01 CET'), (2, '+02'),
(3, '+03'), (3.5, '+03.5'), (4, '+04'), (4.5, '+04.5'), (5, '+05'),
(5.5, '+05.5'), (6, '+06'), (6.5, '+06.5'), (7, '+07'), (8, '+08'),
(9, '+09'), (9.5, '+09.5'), (10, '+10'), (10.5, '+10.5'), (11, '+11'),
(11.5, '+11.5'), (12, '+12'), (13, '+13'), (14, '+14'),
)]
SIGN_CHOICES = (
(1, 'PLUS'),
(-1, 'MINUS'),
)
PRIVACY_CHOICES = (
(0, _(u'Display your e-mail address.')),
(1, _(u'Hide your e-mail address but allow form e-mail.')),
(2, _(u'Hide your e-mail address and disallow form e-mail.')),
)
MARKUP_CHOICES = [('bbcode', 'bbcode')]
try:
import markdown
MARKUP_CHOICES.append(("markdown", "markdown"))
except ImportError:
pass
path = os.path.join(settings.MEDIA_ROOT, 'forum', 'themes')
THEME_CHOICES = [(theme, theme) for theme in os.listdir(path)
if os.path.isdir(os.path.join(path, theme))]
class Category(models.Model):
name = models.CharField(_('Name'), max_length=80)
groups = models.ManyToManyField(Group,blank=True, null=True, verbose_name=_('Groups'), help_text=_('Only users from these groups can see this category'))
position = models.IntegerField(_('Position'), blank=True, default=0)
class Meta:
ordering = ['position']
verbose_name = _('Category')
verbose_name_plural = _('Categories')
def __unicode__(self):
return self.name
def forum_count(self):
return self.forums.all().count()
@property
def topics(self):
return Topic.objects.filter(forum__category__id=self.id).select_related()
@property
def posts(self):
return Post.objects.filter(topic__forum__category__id=self.id).select_related()
def has_access(self, user):
if self.groups.exists():
if user.is_authenticated():
if not self.groups.filter(user__pk=user.id).exists():
return False
else:
return False
return True
class Forum(models.Model):
category = models.ForeignKey(Category, related_name='forums', verbose_name=_('Category'))
name = models.CharField(_('Name'), max_length=80)
position = models.IntegerField(_('Position'), blank=True, default=0)
description = models.TextField(_('Description'), blank=True, default='')
moderators = models.ManyToManyField(User, blank=True, null=True, verbose_name=_('Moderators'))
updated = models.DateTimeField(_('Updated'), auto_now=True)
post_count = models.IntegerField(_('Post count'), blank=True, default=0)
topic_count = models.IntegerField(_('Topic count'), blank=True, default=0)
last_post = models.ForeignKey('Post', related_name='last_forum_post', blank=True, null=True)
class Meta:
ordering = ['position']
verbose_name = _('Forum')
verbose_name_plural = _('Forums')
def __unicode__(self):
return self.name
@models.permalink
def get_absolute_url(self):
return ('djangobb:forum', [self.id])
@property
def posts(self):
return Post.objects.filter(topic__forum__id=self.id).select_related()
class Topic(models.Model):
forum = models.ForeignKey(Forum, related_name='topics', verbose_name=_('Forum'))
name = models.CharField(_('Subject'), max_length=255)
created = models.DateTimeField(_('Created'), auto_now_add=True)
updated = models.DateTimeField(_('Updated'), null=True)
user = models.ForeignKey(User, verbose_name=_('User'))
views = models.IntegerField(_('Views count'), blank=True, default=0)
sticky = models.BooleanField(_('Sticky'), blank=True, default=False)
closed = models.BooleanField(_('Closed'), blank=True, default=False)
subscribers = models.ManyToManyField(User, related_name='subscriptions', verbose_name=_('Subscribers'), blank=True)
post_count = models.IntegerField(_('Post count'), blank=True, default=0)
last_post = models.ForeignKey('Post', related_name='last_topic_post', blank=True, null=True)
class Meta:
ordering = ['-updated']
get_latest_by = 'updated'
verbose_name = _('Topic')
verbose_name_plural = _('Topics')
def __unicode__(self):
return self.name
def delete(self, *args, **kwargs):
try:
last_post = self.posts.latest()
last_post.last_forum_post.clear()
except Post.DoesNotExist:
pass
else:
last_post.last_forum_post.clear()
forum = self.forum
super(Topic, self).delete(*args, **kwargs)
try:
forum.last_post = Topic.objects.filter(forum__id=forum.id).latest().last_post
except Topic.DoesNotExist:
forum.last_post = None
forum.topic_count = Topic.objects.filter(forum__id=forum.id).count()
forum.post_count = Post.objects.filter(topic__forum__id=forum.id).count()
forum.save()
@property
def head(self):
try:
return self.posts.select_related().order_by('created')[0]
except IndexError:
return None
@property
def reply_count(self):
return self.post_count - 1
@models.permalink
def get_absolute_url(self):
return ('djangobb:topic', [self.id])
def update_read(self, user):
tracking = user.posttracking
#if last_read > last_read - don't check topics
if tracking.last_read and (tracking.last_read > self.last_post.created):
return
if isinstance(tracking.topics, dict):
#clear topics if len > 5Kb and set last_read to current time
if len(tracking.topics) > 5120:
tracking.topics = None
tracking.last_read = datetime.now()
tracking.save()
#update topics if exist new post or does't exist in dict
if self.last_post.id > tracking.topics.get(str(self.id), 0):
tracking.topics[str(self.id)] = self.last_post.id
tracking.save()
else:
#initialize topic tracking dict
tracking.topics = {self.id: self.last_post.id}
tracking.save()
class Post(models.Model):
topic = models.ForeignKey(Topic, related_name='posts', verbose_name=_('Topic'))
user = models.ForeignKey(User, related_name='posts', verbose_name=_('User'))
created = models.DateTimeField(_('Created'), auto_now_add=True)
updated = models.DateTimeField(_('Updated'), blank=True, null=True)
updated_by = models.ForeignKey(User, verbose_name=_('Updated by'), blank=True, null=True)
markup = models.CharField(_('Markup'), max_length=15, default=forum_settings.DEFAULT_MARKUP, choices=MARKUP_CHOICES)
body = models.TextField(_('Message'))
body_html = models.TextField(_('HTML version'))
user_ip = models.IPAddressField(_('User IP'), blank=True, null=True)
class Meta:
ordering = ['created']
get_latest_by = 'created'
verbose_name = _('Post')
verbose_name_plural = _('Posts')
def save(self, *args, **kwargs):
self.body_html = convert_text_to_html(self.body, self.markup)
if forum_settings.SMILES_SUPPORT and self.user.forum_profile.show_smilies:
self.body_html = smiles(self.body_html)
super(Post, self).save(*args, **kwargs)
def delete(self, *args, **kwargs):
self_id = self.id
head_post_id = self.topic.posts.order_by('created')[0].id
forum = self.topic.forum
topic = self.topic
profile = self.user.forum_profile
self.last_topic_post.clear()
self.last_forum_post.clear()
super(Post, self).delete(*args, **kwargs)
#if post was last in topic - remove topic
if self_id == head_post_id:
topic.delete()
else:
try:
topic.last_post = Post.objects.filter(topic__id=topic.id).latest()
except Post.DoesNotExist:
topic.last_post = None
topic.post_count = Post.objects.filter(topic__id=topic.id).count()
topic.save()
try:
forum.last_post = Post.objects.filter(topic__forum__id=forum.id).latest()
except Post.DoesNotExist:
forum.last_post = None
#TODO: for speedup - save/update only changed fields
forum.post_count = Post.objects.filter(topic__forum__id=forum.id).count()
forum.topic_count = Topic.objects.filter(forum__id=forum.id).count()
forum.save()
profile.post_count = Post.objects.filter(user__id=self.user_id).count()
profile.save()
@models.permalink
def get_absolute_url(self):
return ('djangobb:post', [self.id])
def summary(self):
LIMIT = 50
tail = len(self.body) > LIMIT and '...' or ''
return self.body[:LIMIT] + tail
__unicode__ = summary
class Reputation(models.Model):
from_user = models.ForeignKey(User, related_name='reputations_from', verbose_name=_('From'))
to_user = models.ForeignKey(User, related_name='reputations_to', verbose_name=_('To'))
post = models.ForeignKey(Post, related_name='post', verbose_name=_('Post'))
time = models.DateTimeField(_('Time'), auto_now_add=True)
sign = models.IntegerField(_('Sign'), choices=SIGN_CHOICES, default=0)
reason = models.TextField(_('Reason'), max_length=1000)
class Meta:
verbose_name = _('Reputation')
verbose_name_plural = _('Reputations')
unique_together = (('from_user', 'post'),)
def __unicode__(self):
return u'T[%d], FU[%d], TU[%d]: %s' % (self.post.id, self.from_user.id, self.to_user.id, unicode(self.time))
class Profile(models.Model):
user = AutoOneToOneField(User, related_name='forum_profile', verbose_name=_('User'))
status = models.CharField(_('Status'), max_length=30, blank=True)
site = models.URLField(_('Site'), verify_exists=False, blank=True)
jabber = models.CharField(_('Jabber'), max_length=80, blank=True)
icq = models.CharField(_('ICQ'), max_length=12, blank=True)
msn = models.CharField(_('MSN'), max_length=80, blank=True)
aim = models.CharField(_('AIM'), max_length=80, blank=True)
yahoo = models.CharField(_('Yahoo'), max_length=80, blank=True)
location = models.CharField(_('Location'), max_length=30, blank=True)
signature = models.TextField(_('Signature'), blank=True, default='', max_length=forum_settings.SIGNATURE_MAX_LENGTH)
time_zone = models.FloatField(_('Time zone'), choices=TZ_CHOICES, default=float(forum_settings.DEFAULT_TIME_ZONE))
language = models.CharField(_('Language'), max_length=5, default='', choices=settings.LANGUAGES)
avatar = ExtendedImageField(_('Avatar'), blank=True, default='', upload_to=forum_settings.AVATARS_UPLOAD_TO, width=forum_settings.AVATAR_WIDTH, height=forum_settings.AVATAR_HEIGHT)
theme = models.CharField(_('Theme'), choices=THEME_CHOICES, max_length=80, default='default')
show_avatar = models.BooleanField(_('Show avatar'), blank=True, default=True)
show_signatures = models.BooleanField(_('Show signatures'), blank=True, default=True)
show_smilies = models.BooleanField(_('Show smilies'), blank=True, default=True)
privacy_permission = models.IntegerField(_('Privacy permission'), choices=PRIVACY_CHOICES, default=1)
markup = models.CharField(_('Default markup'), max_length=15, default=forum_settings.DEFAULT_MARKUP, choices=MARKUP_CHOICES)
post_count = models.IntegerField(_('Post count'), blank=True, default=0)
class Meta:
verbose_name = _('Profile')
verbose_name_plural = _('Profiles')
def last_post(self):
posts = Post.objects.filter(user__id=self.user_id).order_by('-created')
if posts:
return posts[0].created
else:
return None
def reply_count_minus(self):
return Reputation.objects.filter(to_user__id=self.user_id, sign=-1).count()
def reply_count_plus(self):
return Reputation.objects.filter(to_user__id=self.user_id, sign=1).count()
class PostTracking(models.Model):
"""
Model for tracking read/unread posts.
In topics stored ids of topics and last_posts as dict.
"""
user = AutoOneToOneField(User)
topics = JSONField(null=True)
last_read = models.DateTimeField(null=True)
class Meta:
verbose_name = _('Post tracking')
verbose_name_plural = _('Post tracking')
def __unicode__(self):
return self.user.username
class Report(models.Model):
reported_by = models.ForeignKey(User, related_name='reported_by', verbose_name=_('Reported by'))
post = models.ForeignKey(Post, verbose_name=_('Post'))
zapped = models.BooleanField(_('Zapped'), blank=True, default=False)
zapped_by = models.ForeignKey(User, related_name='zapped_by', blank=True, null=True, verbose_name=_('Zapped by'))
created = models.DateTimeField(_('Created'), blank=True)
reason = models.TextField(_('Reason'), blank=True, default='', max_length='1000')
class Meta:
verbose_name = _('Report')
verbose_name_plural = _('Reports')
def __unicode__(self):
return u'%s %s' % (self.reported_by ,self.zapped)
class Ban(models.Model):
user = models.OneToOneField(User, verbose_name=_('Banned user'), related_name='ban_users')
ban_start = models.DateTimeField(_('Ban start'), default=datetime.now)
ban_end = models.DateTimeField(_('Ban end'), blank=True, null=True)
reason = models.TextField(_('Reason'))
class Meta:
verbose_name = _('Ban')
verbose_name_plural = _('Bans')
def __unicode__(self):
return self.user.username
def save(self, *args, **kwargs):
self.user.is_active = False
self.user.save()
super(Ban, self).save(*args, **kwargs)
def delete(self, *args, **kwargs):
self.user.is_active = True
self.user.save()
super(Ban, self).delete(*args, **kwargs)
class Attachment(models.Model):
post = models.ForeignKey(Post, verbose_name=_('Post'), related_name='attachments')
size = models.IntegerField(_('Size'))
content_type = models.CharField(_('Content type'), max_length=255)
path = models.CharField(_('Path'), max_length=255)
name = models.TextField(_('Name'))
hash = models.CharField(_('Hash'), max_length=40, blank=True, default='', db_index=True)
def __unicode__(self):
return self.name
def save(self, *args, **kwargs):
super(Attachment, self).save(*args, **kwargs)
if not self.hash:
self.hash = sha_constructor(str(self.id) + settings.SECRET_KEY).hexdigest()
super(Attachment, self).save(*args, **kwargs)
@models.permalink
def get_absolute_url(self):
return ('djangobb:forum_attachment', [self.hash])
def get_absolute_path(self):
return os.path.join(settings.MEDIA_ROOT, forum_settings.ATTACHMENT_UPLOAD_TO,
self.path)
from .signals import post_saved, topic_saved
post_save.connect(post_saved, sender=Post, dispatch_uid='djangobb_post_save')
post_save.connect(topic_saved, sender=Topic, dispatch_uid='djangobb_topic_save')
| StarcoderdataPython |
6671765 | <reponame>Ernestyj/PyStudy<gh_stars>1-10
# -*- coding: utf-8 -*-
import unittest
import os
import pickle
import pandas as pd
import numpy as np
from td_query import ROOT_PATH
from td_query.data_manipulate_cc import data_manipulate_cc_instance as instance
from teradata import UdaExec
class TestDataManipulateCC(unittest.TestCase):
@classmethod
def setUpClass(cls):
print("**************************************** setUpClass ****************************************")
instance.init()
print(instance.teradata)
@classmethod
def tearDownClass(cls):
print("************************************** tearDownClass ***************************************")
def setUp(self):
print("****** setUp *******")
def tearDown(self):
print("***** tearDown *****")
def _example(self):
df = instance.query_sample()
# with open(ROOT_PATH + '/external/df_dispatch_bna.pickle', 'wb') as f: # save
# pickle.dump(df, f)
print(df)
def _query(self):
query = '''select top 10 * from pp_scratch_risk.ms_auto_trend_us_bad;'''
df = instance.query(query)
print(df)
def _query_table_schema(self):
dest_db = "pp_scratch_risk"
dest_table = "ms_auto_trend_us2_1_3_100_100_1_1_1"
result_cursor = instance.teradata.execute("show select * from {}.{};".format(dest_db, dest_table))
last_row = result_cursor.fetchall()
print(last_row)
def _query_table_top_rows(self):
table = "pp_scratch_risk.ms_auto_trend_us_bad"
df = instance.query_table_top_rows(table)
print(df)
def _drop_table(self):
dest_db = "pp_scratch_risk"
dest_table = "ms_auto_trend_us2_1_3_100_100_1_1_1"
instance.drop_table(dest_db, dest_table)
# def _transalte_100_63_22_14_1(self):
# rules = [
# "(SELLER_CONSUMER_SEG != 'Y') & (IS_ULP_TRANS_T_F >= 0.5) & (dc_string != '<missing>') & (amt2 == 'a-1k') & (SELLER_CONSUMER_SEG == 'C')",
# "(SELLER_CONSUMER_SEG == 'Y') & (IS_ULP_TRANS_T_F >= 0.5) & (dc_string == '10008') & (amt2 != 'c-1h') & (amt2 != 'e-<50')",
# ]
# result = instance.translate_hyperloop_rules_to_sql(rules)
# print(result)
def _get_all_bad_and_sample_from_good_into_new_table(self):
# src_db = "pp_scratch_risk"
# src_db = "PP_OAP_ROM_T"
src_db = "PP_OAP_SING_JYANG2_T"
# src_table = 'cc_hl_base_case1_train'
src_table = 'cc_hyperloop_base_cg_sel_train'
# dest_db = "pp_scratch_risk"
dest_db = "PP_OAP_SING_JYANG2_T"
bad_scale = 1
good_scale = 3
# dest_table = "cc_hl_case1_train_{}_{}".format(bad_scale, good_scale,)
dest_table = "cc_hyperloop_base_cg_sel_train_{}_{}".format(bad_scale, good_scale,)
instance.get_all_bad_and_sample_from_good_into_new_table(src_db, src_table, dest_db, dest_table, bad_scale, good_scale,)
def _get_all_bad_and_sample_from_good_into_new_table_reverse(self):
# src_db = "pp_scratch_risk"
# src_db = "PP_OAP_ROM_T"
src_db = "PP_OAP_SING_JYANG2_T"
# src_table = 'cc_hl_base_case1_train'
src_table = 'cc_hyperloop_base_cg_sel_train'
# dest_db = "pp_scratch_risk"
dest_db = "PP_OAP_SING_JYANG2_T"
bad_scale = 1
good_scale = 3
# dest_table = "cc_hl_case1_train_{}_{}".format(bad_scale, good_scale,)
dest_table = "cc_hyperloop_base_cg_sel_train_{}_{}".format(bad_scale, good_scale,)
instance.get_all_bad_and_sample_from_good_into_new_table_reverse(src_db, src_table, dest_db, dest_table, bad_scale, good_scale,)
# def _generate_hl_job_json(self):
# training_table = "ms_auto_trend_us2_1_3"
# testing_table = "ms_auto_trend_us_t"
# instance.generate_hl_job_json(training_table, testing_table, template_name='hl_job_template_na.json')
# def _update_weight_col_in_table(self):
# src_db = "pp_scratch_risk"
# src_table = 'ms_auto_trend_us2_1_3'
# src_col = 'PMT_USD_AMT'
# instance.update_weight_col_in_table(src_db, src_table, src_col)
def _update_custom_weight_col_in_table(self):
src_db = "pp_scratch_risk"
src_table = 'ms_auto_trend_us2_1_3'
src_col = 'PMT_USD_AMT'
instance.update_custom_weight_col_in_table(src_db, src_table, src_col)
def _add_is_cc_bad_col_in_table(self):
src_db = "PP_OAP_ROM_T"
src_table = 'cc_hl_base_case2_test'
instance.add_is_cc_bad_col_in_table(src_db, src_table)
def _add_is_cc_bad_reverse_col_in_table(self):
src_db = "PP_OAP_SING_JYANG2_T"
src_table = 'cc_hyperloop_base_cg_sel_test'
instance.add_is_cc_bad_reverse_col_in_table(src_db, src_table)
def _get_all_bad_and_sample_from_good_into_new_table_and_append_custom_weight_col(self):
# src_db = "PP_OAP_ROM_T"
src_db = "PP_OAP_SING_JYANG2_T"
# src_table = 'cc_hl_base_case2_train'
src_table = 'cc_hyperloop_base_cg_sel_test'
# dest_db = "PP_OAP_ROM_T"
dest_db = "PP_OAP_SING_JYANG2_T"
bad_scale = 1
good_scale = 3
dest_table = "cc_hl_case2_train_{}_{}".format(bad_scale, good_scale,)
instance.get_all_bad_and_sample_from_good_into_new_table(src_db, src_table, dest_db, dest_table,
bad_scale, good_scale,)
src_col = 'usd_amt'
instance.update_custom_weight_col_in_table(src_db, src_table=dest_table, src_col=src_col)
instance.add_is_cc_bad_col_in_table(src_db, src_table=dest_table)
| StarcoderdataPython |
4805633 | from typing import Dict, List, Set
import docker
import os
import yaml
from docker import DockerClient
from docker.errors import ImageNotFound
from dbuild.config.config import Config
from dbuild.denvironment import BuildHandler, BuildContainer
def getOrCreateImage(client: DockerClient, config: Config):
image_name = config["container.image"]
# Load value from config or build one
if "container.image" in config:
try:
return client.images.get(image_name)
except ImageNotFound as e:
print("Image '" + image_name + "' not found locally!")
pass
print("Pulling '" + image_name + "'")
return client.images.pull(image_name)
elif "container.build" in config:
# Build from Dockerfile
image_name = "devenv_" + config["container.name"]
print(
"Building from Dockerfile "
+ "'" + config["container.build.dockerfile"] + "'"
+ " in path '" + config["container.build.path"] + "'"
+ " the image '" + image_name + "'"
)
return client.images.build(
path=config["container.build.path"],
dockerfile=config["container.build.dockerfile"],
network_mode=config["container.build.network_mode"],
tag=image_name
)
class DContext:
def __init__(self, config: Config, default_workdir='/workdir/'):
self.config = config
self.client = None
self.default_workdir = default_workdir
self.reloadClient()
self.handlers = set() # type: Set[BuildHandler]
def reloadClient(self):
self.client = None
if "docker" in self.config:
section = self.config["docker"]
assert isinstance(section, dict)
if len(section) > 0:
self.client = docker.DockerClient(**section)
if self.client is None:
self.client = docker.DockerClient.from_env()
def exitGracefully(self, signum, frame):
while self.handlers:
self.handlers.pop().exitGracefully(signum, frame)
def getDefaultWorkdir(self) -> str:
return self.default_workdir
def setDefaultWorkdir(self, workdir: str):
self.default_workdir = workdir
def getCurrentUser(self) -> str:
return str(os.getuid()) + ":" + str(os.getgid())
def clean(self):
container = BuildContainer(client=self.client, name=self.config["container.name"],
workdir=self.getDefaultWorkdir(), image=None, volumes=None)
container.clean()
def execute(self, cmd: List[str], workdir=None, user='', environment: Dict[str, str] = None):
handler = BuildHandler(
client=self.client,
image=lambda: getOrCreateImage(client=self.client, config=self.config),
name=self.config["container.name"],
volumes=self.config.get(["container", "volumes"], {}),
workdir=workdir if workdir is not None else self.getDefaultWorkdir()
)
self.handlers.add(handler)
handler.start()
handler.runCommand(cmd=cmd, environment=environment, privileged=False, user=user)
handler.stop()
self.handlers.remove(handler)
| StarcoderdataPython |
1626084 | """IRC transport implementation."""
import re
import asyncio
import functools
from abc import ABCMeta, abstractmethod
from typing import *
from ..util import LogMixin
from .. import common
from . import response
__all__ = ["ConnectInfo", "ClientProtocol", "Message", "response"]
class User:
"""
An IRC user.
"""
def __init__(self, nick, username, host):
self.nick = nick
self.username = username
self.host = host
RE = re.compile("(?P<nick>[^!]+)!(?P<user>[^@]+)@(?P<host>.+)")
@staticmethod
def parse(s: str):
"""
Parses an IRC user string into a coherent user object. This is the regular expression used:
```
(?P<nick>[^!]+)!(?P<user>[^@]+)@(?P<host>.+)
```
:param s: the username string to be parsed.
:raises: ValueError when the user string does not match the regular expression.
"""
match = User.RE.match(s)
if not match:
raise ValueError("Invalid user pattern")
nick = match.group('nick')
username = match.group('user')
host = match.group('host')
return User(nick, username, host)
class ConnectInfo(common.ConnectInfo):
"""
IRC-specific connect info.
"""
def __init__(self, server: str, nicks: Sequence[str], user: str,
port: Optional[int] = None, server_pass: Optional[str] = None,
ssl: Optional[bool] = None, **_):
"""
Creates a new ConnectInfo object for IRC connections.
:param server: IRC the server to connect to.
:param nicks: the list of nicknames to attempt to use. There must be at least one nickname
in this list.
:param user: the username to use on the IRC server.
:param port: the port to connect to the IRC server on. Optional. Default is 6667.
:param server_pass: the password used to connect to the IRC server. Optional. Default is
None.
:param ssl: whether or not to use SSL to connect to the IRC server. Optional. Default is
True if port is 6697, otherwise False.
"""
if not nicks:
raise ValueError("number of nicks specified in ConnectInfo "
"must contain at least one nick")
# default port 6667
if port is None:
port = 6667
super().__init__(server, port)
self.nicks = nicks
self.user = user
self.server_pass = server_pass
if ssl is None:
self.ssl = (port == 6697)
else:
self.ssl = ssl
class Message(object):
"""
An IRC message with an optional prefix, a command, and optional parameters.
"""
def __init__(self, prefix: Optional[str], command: str, params: Sequence[str]):
"""
Creates an IRC message. This constructor does no validation of parameters beforehand.
:param prefix: the prefix for the message.
:param command: the command to pass, as a string.
:param params: the parameters to pass.
"""
self.prefix = prefix
if prefix is not None:
try:
self.user = User.parse(self.prefix)
except ValueError:
self.user = None
else:
self.user = None
self.command = command
self.params = params
def __str__(self):
"""
Formats the IRC message for sending, excluding CRLF.
:return:
"""
message = ''
if self.prefix:
message += ':{} '.format(self.prefix)
message += self.command.upper()
if self.params:
message += " {}".format(' '.join(self.params))
return message
MESSAGE_RE = re.compile('^(:(?P<prefix>[^ ]+) )?'
'(?P<command>[a-zA-Z]+|[0-9]{3})'
'(?P<params>( [^: \r\n]+)*)'
'(?P<trailing> :[^\r\n]+)?$',
re.MULTILINE)
@staticmethod
def parse(line: str):
"""
Parses an IRC message.
:param line: the message to parse
:raises ValueError: if the line is malformed.
:return: the parsed message
"""
match = Message.MESSAGE_RE.match(line)
if not match:
raise ValueError("invalid IRC message: {}".format(line))
prefix = match.group('prefix')
command = match.group('command')
try:
command = response.CODE_TO_NAME[int(command)]
except (KeyError, ValueError):
# it's okay if we can't translate this code - it just means we won't have a legit
# translation of what it means
pass
params = list(filter(len, match.group('params').split(' ')))
trailing = match.group('trailing')
if trailing:
params += [trailing[2:]]
return Message(prefix, command, params)
class ClientProtocol(asyncio.Protocol, LogMixin, metaclass=ABCMeta):
"""
The IRC asyncio protocol implementation.
"""
def __init__(self, connect_info: ConnectInfo):
"""
Creates a new IRC client protocol object, using the given connection info.
"""
self.connect_info = connect_info
self.transport = None
LogMixin.__init__(self, "{}@{}".format(connect_info.user, connect_info.server))
def connection_made(self, transport):
assert self.transport is None, "transport must be unset after disconnection"
self.info("connected")
self.transport = transport
def data_received(self, data):
for line in filter(len, data.decode().split('\r\n')):
self.debug("%s", line)
def connection_lost(self, exc):
# exc is either an exception or None
# see: https://docs.python.org/3/library/asyncio-protocol.html#asyncio.BaseProtocol.connection_lost
self.info("connection lost")
self.transport = None
# TODO : auto-reconnect module
# TODO : auto-reconnect module would require coupling between the protocol and the chatbot?
# chatbot is not aware of its connection to any server.
@staticmethod
def _make_message(command: str, *params: str):
"""
Constructs an IRC message from a command and its params.
"""
return Message(prefix=None, command=command, params=params)
def _send_message(self, msg: Message):
"""
Sends a message to the IRC server.
:param msg: the message structured to send.
"""
# use a variable here because otherwise it gets calculated twice; in the log and when it's
# used
msg_str = str(msg)
self.debug("%s", msg_str)
self.transport.write("{}\r\n".format(msg_str).encode())
def _send_command(self, command: str, *params: str):
"""
Constructs an IRC message from a command and its params, and sends it.
:param command: the command to construct
:param params: any parameters the command expects
"""
self._send_message(ClientProtocol._make_message(command, *params))
def _schedule_command(self, timeout, command: str, *params: str):
"""
Schedules a command to be sent in a given number of seconds.
"""
loop = asyncio.get_event_loop()
send_command = functools.partial(self._send_command, command, *params)
loop.call_later(timeout, send_command)
# TODO : store the callback from loop.call_later() somewhere, so we can cancel messages
| StarcoderdataPython |
1693434 | import sys
import os
def add_rel_path(*args):
sys.path.append(os.path.normpath(os.path.join(os.path.dirname(__file__), *args)))
add_rel_path('..', 'code')
os.environ.setdefault('WALDO_SETTINGS', 'settings.waldo')
| StarcoderdataPython |
5155659 | <gh_stars>1-10
import numpy as np
import matplotlib.pyplot as plt
class ModelParameters:
""" Encapsulates model parameters. """
def __init__(self,
all_s0, all_time, all_delta, all_sigma, gbm_mu,
jumps_lamda=0.0, jumps_sigma=0.0, jumps_mu=0.0,
cir_a=0.0, cir_mu=0.0, all_r0=0.0, cir_rho=0.0,
ou_a=0.0, ou_mu=0.0,
heston_a=0.0, heston_mu=0.0, heston_vol0=0.0):
# This is the starting asset value
self.all_s0 = all_s0
# This is the amount of time to simulate for
self.all_time = all_time
# This is the delta, the rate of time e.g. 1/252 = daily, 1/12 = monthly
self.all_delta = all_delta
# This is the volatility of the stochastic processes
self.all_sigma = all_sigma
# This is the annual drift factor for geometric brownian motion
self.gbm_mu = gbm_mu
# This is the probability of a jump happening at each point in time
self.lamda = jumps_lamda
# This is the volatility of the jump size
self.jumps_sigma = jumps_sigma
# This is the average jump size
self.jumps_mu = jumps_mu
# This is the rate of mean reversion for Cox Ingersoll Ross
self.cir_a = cir_a
# This is the long run average interest rate for Cox Ingersoll Ross
self.cir_mu = cir_mu
# This is the starting interest rate value
self.all_r0 = all_r0
# This is the correlation between the wiener processes of the Heston model
self.cir_rho = cir_rho
# This is the rate of mean reversion for Ornstein Uhlenbeck
self.ou_a = ou_a
# This is the long run average interest rate for Ornstein Uhlenbeck
self.ou_mu = ou_mu
# This is the rate of mean reversion for volatility in the Heston model
self.heston_a = heston_a
# This is the long run average volatility for the Heston model
self.heston_mu = heston_mu
# This is the starting volatility value for the Heston model
self.heston_vol0 = heston_vol0
def convert_to_returns(log_returns):
"""
This method exponentiates a sequence of log returns to get daily returns.
:param log_returns: the log returns to exponentiated
:return: the exponentiated returns
"""
return np.exp(log_returns)
def convert_to_prices(param, log_returns):
"""
This method converts a sequence of log returns into normal returns (exponentiation) and then computes a price
sequence given a starting price, param.all_s0.
:param param: the model parameters object
:param log_returns: the log returns to exponentiated
:return:
"""
returns = convert_to_returns(log_returns)
# A sequence of prices starting with param.all_s0
price_sequence = [param.all_s0]
for i in range(1, len(returns)):
# Add the price at t-1 * return at t
price_sequence.append(price_sequence[i - 1] * returns[i - 1])
return np.array(price_sequence)
def plot_stochastic_processes(processes, title):
"""
This method plots a list of stochastic processes with a specified title
:return: plots the graph of the two
"""
plt.style.use(['bmh'])
fig, ax = plt.subplots(1)
fig.suptitle(title, fontsize=16)
ax.set_xlabel('Time, t')
ax.set_ylabel('Simulated Asset Price')
x_axis = np.arange(0, len(processes[0]), 1)
for i in range(len(processes)):
plt.plot(x_axis, processes[i])
plt.show()
| StarcoderdataPython |
11362102 | <filename>marseille/pdtb_fields.py
# Author: <NAME> <<EMAIL>>
# License: BSD 3-clause
# interpretation of fields from Penn Discourse Treebank file format
PDTB_FIELDS = [
'reltype',
'section',
'file',
'conn_span',
'conn_gorn_addr',
'conn_raw',
'position',
'sent_no',
'head',
'conn1',
'conn2',
'sem_cls_1_1',
'sem_cls_1_2',
'sem_cls_2_1',
'sem_cls_2_2',
'rel_source',
'rel_type',
'rel_polarity',
'rel_determinacy',
'rel_span',
'rel_gorn_addr',
'rel_raw',
'arg1_span',
'arg1_gorn_addr',
'arg1_raw',
'arg1_source',
'arg1_type',
'arg1_polarity',
'arg1_determinacy',
'arg1_attr_span',
'arg1_attr_gorn_addr',
'arg1_attr_raw',
'arg2_span',
'arg2_gorn_addr',
'arg2_raw',
'arg2_source',
'arg2_type',
'arg2_polarity',
'arg2_determinacy',
'arg2_attr_span',
'arg2_attr_gorn_addr',
'arg2_attr_raw',
'sup1_span',
'sup1_gorn_addr',
'sup1_raw',
'sup2_span',
'sup2_gorn_addr',
'sup2_raw',
]
| StarcoderdataPython |
6689338 | #!/usr/bin/env python
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import in_generator
from name_utilities import lower_first
class CSSProperties(in_generator.Writer):
defaults = {
'alias_for': None,
'runtime_flag': None,
'longhands': '',
'animatable': False,
'inherited': False,
'font': False,
'svg': False,
'name_for_methods': None,
'use_handlers_for': None,
'getter': None,
'setter': None,
'initial': None,
'type_name': None,
'converter': None,
'custom_all': False,
'custom_initial': False,
'custom_inherit': False,
'custom_value': False,
'builder_skip': False,
'direction_aware': False,
}
valid_values = {
'animatable': (True, False),
'inherited': (True, False),
'font': (True, False),
'svg': (True, False),
'custom_all': (True, False),
'custom_initial': (True, False),
'custom_inherit': (True, False),
'custom_value': (True, False),
'builder_skip': (True, False),
'direction_aware': (True, False),
}
def __init__(self, file_paths):
in_generator.Writer.__init__(self, file_paths)
properties = self.in_file.name_dictionaries
self._aliases = [property for property in properties if property['alias_for']]
properties = [property for property in properties if not property['alias_for']]
# StylePropertyMetadata additionally assumes there are under 1024 properties.
assert len(properties) < 512, 'Property aliasing expects there are under 512 properties.'
# We currently assign 0 to CSSPropertyInvalid
self._first_enum_value = 1
for offset, property in enumerate(properties):
property['property_id'] = css_name_to_enum(property['name'])
property['upper_camel_name'] = camelcase_css_name(property['name'])
property['lower_camel_name'] = lower_first(property['upper_camel_name'])
property['enum_value'] = self._first_enum_value + offset
property['is_internal'] = property['name'].startswith('-internal-')
self._properties_including_aliases = properties
self._properties = {property['property_id']: property for property in properties}
# The generated code will only work with at most one alias per property
assert len({property['alias_for'] for property in self._aliases}) == len(self._aliases)
for property in self._aliases:
property['property_id'] = css_alias_name_to_enum(property['name'])
aliased_property = self._properties[css_name_to_enum(property['alias_for'])]
property['enum_value'] = aliased_property['enum_value'] + 512
self._properties_including_aliases += self._aliases
def camelcase_css_name(css_name):
"""Convert hyphen-separated-name to UpperCamelCase.
E.g., '-foo-bar' becomes 'FooBar'.
"""
return ''.join(word.capitalize() for word in css_name.split('-'))
def css_name_to_enum(css_name):
return 'CSSProperty' + camelcase_css_name(css_name)
def css_alias_name_to_enum(css_name):
return 'CSSPropertyAlias' + camelcase_css_name(css_name)
| StarcoderdataPython |
6617252 | <reponame>localmed/django-assetfiles
from __future__ import unicode_literals
from nose.tools import *
from assetfiles import settings
from assetfiles.filters.coffee import CoffeeScriptFilterError
from tests.base import AssetfilesTestCase, filter
class TestCoffeeScriptFilter(AssetfilesTestCase):
def setUp(self):
super(TestCoffeeScriptFilter, self).setUp()
self.original_coffee_options = settings.COFFEE_SCRIPT_OPTIONS
def tearDown(self):
super(TestCoffeeScriptFilter, self).tearDown()
settings.COFFEE_SCRIPT_OPTIONS = self.original_coffee_options
def test_processes_coffee_files(self):
self.mkfile('static/js/simple.coffee', 'a = foo: "1#{2}3"')
assert_in(b'foo: "1" + 2 + "3"', filter('js/simple.js'))
def test_uses_coffee_script_options(self):
settings.COFFEE_SCRIPT_OPTIONS = {'bare': True}
self.mkfile('static/js/simple.coffee', 'a = foo: "1#{2}3"')
assert_not_in(b'(function() {', filter('js/simple.js'))
def test_raises_syntax_error(self):
with assert_raises(CoffeeScriptFilterError):
self.mkfile('static/js/simple.coffee', '\n\n\n\na = foo: "1#{2}3')
filter('js/simple.js')
| StarcoderdataPython |
12856963 | ###########################
# 6.00.2x Problem Set 1: Space Cows
from ps1_partition import get_partitions
import time
#================================
# Part A: Transporting Space Cows
#================================
def load_cows(filename):
"""
Read the contents of the given file. Assumes the file contents contain
data in the form of comma-separated cow name, weight pairs, and return a
dictionary containing cow names as keys and corresponding weights as values.
Parameters:
filename - the name of the data file as a string
Returns:
a dictionary of cow name (string), weight (int) pairs
"""
cow_dict = dict()
f = open(filename, 'r')
for line in f:
line_data = line.split(',')
cow_dict[line_data[0]] = int(line_data[1])
return cow_dict
# Problem 1
def greedy_cow_transport(cows,limit=10):
"""
Uses a greedy heuristic to determine an allocation of cows that attempts to
minimize the number of spaceship trips needed to transport all the cows. The
returned allocation of cows may or may not be optimal.
The greedy heuristic should follow the following method:
1. As long as the current trip can fit another cow, add the largest cow that will fit
to the trip
2. Once the trip is full, begin a new trip to transport the remaining cows
Does not mutate the given dictionary of cows.
Parameters:
cows - a dictionary of name (string), weight (int) pairs
limit - weight limit of the spaceship (an int)
Returns:
A list of lists, with each inner list containing the names of cows
transported on a particular trip and the overall list containing all the
trips
"""
# # TODO: Your code here
# pass
#initialize trip list
#helper function sub_set
selected_cows = []
cow_names = [x for x in cows]
available_cows = [cows[x] for x in cows]
cows_remaining = [cows[x] for x in cows]
set_limit = limit
trip_list = []
cow_list = []
cow_trip_list = []
while len(available_cows) > 0:
limit = set_limit
cows_remaining = available_cows[:]
selected_cows = []
sub_list = []
while limit > 0:
if len(cows_remaining) == 0:
break
a = max(cows_remaining)
cows_remaining.remove(a)
if a <= limit:
limit -= a
selected_cows.append(a)
available_cows.remove(a)
trip_list.append(selected_cows)
for i in selected_cows:
for name in cows:
if cows[name] == i:
if name in cow_names:
cow_list.append(name)
cow_names.remove(name)
break
cow_trip_list.append(cow_list)
cow_list = []
return cow_trip_list
# Problem 2
def brute_force_cow_transport(cows,limit=10):
"""
Finds the allocation of cows that minimizes the number of spaceship trips
via brute forc
e. The brute force algorithm should follow the following method:
1. Enumerate all possible ways that the cows can be divided into separate trips
2. Select the allocation that minimizes the number of trips without making any trip
that does not obey the weight limitation
Does not mutate the given dictionary of cows.
Parameters:
cows - a dictionary of name (string), weight (int) pairs
limit - weight limit of the spaceship (an int)
Returns:
A list of lists, with each inner list containing the names of cows
transported on a particular trip and the overall list containing all the
trips
"""
def count_sum(listofcows, cows):
weight = 0
for i in listofcows:
weight += cows[i]
if weight > limit:
return False
break
return True
cow_list = list(cows.keys())
flight_list = []
all_partitions = get_partitions(cow_list)
for i in all_partitions:
switch = 'green'
for j in i:
if count_sum(j, cows) == False:
switch = 'red'
break
if switch == 'green':
flight_list.append(i)
trip_len_list = [len(i) for i in flight_list]
for i in flight_list:
if len(i) == min(trip_len_list):
ideal_trip = i
break
return ideal_trip
# Problem 3
def compare_cow_transport_algorithms():
"""
Using the data from ps1_cow_data.txt and the specified weight limit, run your
greedy_cow_transport and brute_force_cow_transport functions here. Use the
default weight limits of 10 for both greedy_cow_transport and
brute_force_cow_transport.
Print out the number of trips returned by each method, and how long each
method takes to run in seconds.
Returns:
Does not return anything.
"""
cows = load_cows("ps1_cow_data.txt")
limit=10
start = time.time()
ans_a = greedy_cow_transport(cows, limit)
end = time.time()
print(end-start)
start = time.time()
ans_b = brute_force_cow_transport(cows, limit)
end = time.time()
print(end-start)
return
"""
Here is some test data for you to see the results of your algorithms with.
Do not submit this along with any of your answers. Uncomment the last two
lines to print the result of your problem.
"""
cows = load_cows("ps1_cow_data.txt")
limit=10
#print(cows)
#
#print(greedy_cow_transport(cows, limit))
#print(brute_force_cow_transport(cows, limit))
print(compare_cow_transport_algorithms())
| StarcoderdataPython |
3263633 | <gh_stars>0
from collections import deque
import numpy as np
import torch
class RolloutBuffer:
def __init__(self, buffer_size, state_shape, action_shape, device):
self._p = 0
self.buffer_size = buffer_size
self.states = torch.empty(
(buffer_size + 1, *state_shape), dtype=torch.float, device=device)
self.actions = torch.empty(
(buffer_size, *action_shape), dtype=torch.float, device=device)
self.rewards = torch.empty(
(buffer_size, 1), dtype=torch.float, device=device)
self.dones = torch.empty(
(buffer_size + 1, 1), dtype=torch.float, device=device)
self.log_pis = torch.empty(
(buffer_size, 1), dtype=torch.float, device=device)
def reset(self, state):
self.states[-1].copy_(torch.from_numpy(state))
self.dones[-1] = 0
def append(self, next_state, action, reward, done, log_pi):
if self._p == 0:
self.states[0].copy_(self.states[-1])
self.dones[0].copy_(self.dones[-1])
self.states[self._p + 1].copy_(torch.from_numpy(next_state))
self.actions[self._p].copy_(torch.from_numpy(action))
self.rewards[self._p] = float(reward)
self.dones[self._p + 1] = float(done)
self.log_pis[self._p] = float(log_pi)
self._p = (self._p + 1) % self.buffer_size
class NStepBuffer:
def __init__(self, gamma=0.99, nstep=3):
self.discounts = [gamma ** i for i in range(nstep)]
self.nstep = nstep
self.states = deque(maxlen=self.nstep)
self.actions = deque(maxlen=self.nstep)
self.rewards = deque(maxlen=self.nstep)
def append(self, state, action, reward):
self.states.append(state)
self.actions.append(action)
self.rewards.append(reward)
def get(self):
assert len(self.rewards) > 0
state = self.states.popleft()
action = self.actions.popleft()
reward = self.nstep_reward()
return state, action, reward
def nstep_reward(self):
reward = np.sum([r * d for r, d in zip(self.rewards, self.discounts)])
self.rewards.popleft()
return reward
def is_empty(self):
return len(self.rewards) == 0
def is_full(self):
return len(self.rewards) == self.nstep
def __len__(self):
return len(self.rewards)
class _ReplayBuffer:
def __init__(self, buffer_size, state_shape, action_shape, device,
gamma, nstep):
self._p = 0
self._n = 0
self.buffer_size = buffer_size
self.state_shape = state_shape
self.action_shape = action_shape
self.device = device
self.gamma = gamma
self.nstep = nstep
self.actions = torch.empty(
(buffer_size, *action_shape), dtype=torch.float, device=device)
self.rewards = torch.empty(
(buffer_size, 1), dtype=torch.float, device=device)
self.dones = torch.empty(
(buffer_size, 1), dtype=torch.float, device=device)
if nstep != 1:
self.nstep_buffer = NStepBuffer(gamma, nstep)
def append(self, state, action, reward, done, next_state,
episode_done=None):
if self.nstep != 1:
self.nstep_buffer.append(state, action, reward)
if self.nstep_buffer.is_full():
state, action, reward = self.nstep_buffer.get()
self._append(state, action, reward, done, next_state)
if done or episode_done:
while not self.nstep_buffer.is_empty():
state, action, reward = self.nstep_buffer.get()
self._append(state, action, reward, done, next_state)
else:
self._append(state, action, reward, done, next_state)
def _append(self, state, action, reward, done, next_state):
self.actions[self._p].copy_(torch.from_numpy(action))
self.rewards[self._p] = float(reward)
self.dones[self._p] = float(done)
self._p = (self._p + 1) % self.buffer_size
self._n = min(self._n + 1, self.buffer_size)
class StateReplayBuffer(_ReplayBuffer):
def __init__(self, buffer_size, state_shape, action_shape, device,
gamma, nstep):
super().__init__(
buffer_size, state_shape, action_shape, device, gamma, nstep)
self.states = torch.empty(
(buffer_size, *state_shape), dtype=torch.float, device=device)
self.next_states = torch.empty(
(buffer_size, *state_shape), dtype=torch.float, device=device)
def _append(self, state, action, reward, done, next_state):
self.states[self._p].copy_(torch.from_numpy(state))
self.next_states[self._p].copy_(torch.from_numpy(next_state))
super()._append(None, action, reward, done, None)
def sample(self, batch_size):
idxes = np.random.randint(low=0, high=self._n, size=batch_size)
return (
self.states[idxes],
self.actions[idxes],
self.rewards[idxes],
self.dones[idxes],
self.next_states[idxes]
)
class PixelReplayBuffer(_ReplayBuffer):
def __init__(self, buffer_size, state_shape, action_shape, device,
gamma, nstep):
super().__init__(
buffer_size, state_shape, action_shape, device, gamma, nstep)
self.states = []
self.next_states = []
def _append(self, state, action, reward, done, next_state):
self.states.append(state)
self.next_states.append(next_state)
num_excess = len(self.states) - self.buffer_size
if num_excess > 0:
del self.states[:num_excess]
del self.next_states[:num_excess]
super()._append(None, action, reward, done, None)
def sample(self, batch_size):
idxes = np.random.randint(low=0, high=self._n, size=batch_size)
states = np.empty((batch_size, *self.state_shape), dtype=np.uint8)
next_states = np.empty((batch_size, *self.state_shape), dtype=np.uint8)
# Correct indices for lists of states and next_states.
bias = -self._p if self._n == self.buffer_size else 0
state_idxes = np.mod(idxes+bias, self.buffer_size)
# Convert LazyFrames into np.array.
for i, idx in enumerate(state_idxes):
states[i, ...] = self.states[idx]
next_states[i, ...] = self.next_states[idx]
return (
torch.tensor(states, dtype=torch.uint8, device=self.device),
self.actions[idxes],
self.rewards[idxes],
self.dones[idxes],
torch.tensor(next_states, dtype=torch.uint8, device=self.device)
)
| StarcoderdataPython |
6581848 | <gh_stars>1-10
import os
import cv2
import tensorflow as tf
from tensorflow.keras import layers
imgpath='./tr-vf/'
imgnames=os.listdir(imgpath)
for i in range(0,len(imgnames)):
img=cv2.imread(imgpath+imgnames[i])
height=img.shape[0]
width=img.shape[1]
depth=img.shape[2]
print(imgnames[i],height,width,depth)
if i>20 :
break
# inputs=tf.keras.Input(shape=(height,width,depth),name="vfp")
# cov1=layers.Conv2D(8,(10,10),padding='same',activation='relu')(Input)
# max1=layers.MaxPool2D(2)(cov1)
# cov2=layers.Conv2D(4,(5,5),padding='same',activation='relu')(max1)
# max2=layers.MaxPool2D(5)(cov2)
# cov3=layers.Conv2D(2,(3,3),padding='same',activation='relu')(max2)
# max3=layers.MaxPool2D((8,5),strides=4,padding='valid')(cov3)
# resh=max3.reshape(4,192)
# drop=resh.dropout(0.9)
# outputs=layers.Dense(192,36,activation='softmax')(resh)
# model = tf.keras.Model(inputs=inputs, outputs=outputs, name='VGG16+')
# model.compile(optimizer=tf.keras.optimizers.RMSprop(),
# loss='sparse_categorical_crossentropy', # 直接填api,后面会报错
# metrics=['accuracy'])
# history = model.fit(x_train, y_train, batch_size=64, epochs=5, validation_split=0.2)
# test_scores = model.evaluate(x_test, y_test, verbose=0)
# print('test loss:', test_scores[0])
# print('test acc:', test_scores[1])
| StarcoderdataPython |
11377114 | from sys import stdout,platform
from os import path,geteuid
from time import sleep
from subprocess import Popen, PIPE, STDOUT, run , check_output ,CalledProcessError
from collections import Counter
from re import *
from platform import *
class GeneralGui:
def __init__(self,):
#Renk Tanımlamalarını Yap.
self.CRED = '\33[31m' # Hata Test Renk Stili
self.CEND = '\33[0m' # Default Text Renk Stili.
self.GREEN = '\033[0;32m' # Başarılı Text Renk Stili.
self.CURL = '\33[4m' # Alt Çizgili Dikkatli Renk Stili.
self.screen_clean=run(['clear','-','x']) #Ekran Temizleme.
#Fonksiyon Çağırılım Sırası.
self.system_required()
self.progress_bar()
self.usr_intput()
self.main_progress()
#Sistem Gereksinim Kontrolü
#********** ********* ********* ********* ********* ********* ********* ********* *********
def system_required(self,):
try:
if geteuid() == 0:
stdout.write(self.GREEN)
print(' Sistem Gereksinimleri Kontrol Ediliyor\n')
sleep(1)
self.output = ''
if linux_distribution()[0].lower() == 'centos':
check_output( "systemctl status ufw",stderr=STDOUT,shell=True)
self.output = 'centos'
elif linux_distribution()[0].lower() == 'centos linux':
check_output( "systemctl status ufw",stderr=STDOUT,shell=True)
self.output = 'centos linux'
elif linux_distribution()[0].lower() == 'red hat enterprise linux server':
check_output( "systemctl status ufw",stderr=STDOUT,shell=True)
self.output = 'red hat enterprise linux serve'
else:
print('Kullanmış Olduğunuz İşletim Sistemi :'+linux_distribution()[0]+' Desteklenmemektedir...\nÇıkış Yapılıyor...')
exit()
else:
stdout.write(self.CRED)
print('Fms Waf Root Yetkisi ile çalışmadıktadır lütfen root yetkisi ile çalıştırın. !/n')
sleep(0.5)
print('Çıkış yapılıyor...')
sleep(0.5)
exit()
except CalledProcessError as exc:
if self.output in 'centos' or 'centos linux' or 'red hat enterprise linux server':
print(self.CRED,exc.output.decode("utf-8"))
print(self.GREEN,"Ufw Firewall Yükleniyor...")
sleep(1)
ufw_install = run(['yum','install','ufw','-y'],stderr=stdout)
ufw_enabled = run(['systemctl','enable','ufw'],stderr=stdout)
ufw_start = run(['systemctl','start','ufw'],stderr=stdout)
#Yeni Özellik Tcp Olarak Listen Port Bilgisi Otomatik Firewall Eklenecek..
#Yeni güncellemeler gelicek :)
open_port = check_output(" netstat -tnl | awk -F: {'print $4'}" ,shell=True,universal_newlines=True)
current_port = 0
if current_port < len(open_port.split()):
for i in open_port.split():
current_port_added = run(['ufw','allow',i])
clear_scr = run(['clear','-x'],stderr=stdout)
stdout.write(self.GREEN)
#********** ********* ********* ********* ********* ********* ********* ********* *********
#User İnput
#********** ********* ********* ********* ********* ********* ********* ********* *********
def usr_intput(self,domain=...,time=...,delay=...):
while(True):
stdout.write(self.GREEN)
print(' ____ _ __ __ __ __ ____ ')
print(' | _ \ (_) \ \ / / / _| \ \ / /_ | ')
print(' | |_) | __ _ ___ _ ___ \ \ /\ / /_ _| |_ \ \ / / | | ')
print(' | _ < / _` / __| |/ __| \ \/ \/ / _` | _| \ \/ / | | ')
print(' | |_) | (_| \__ \ | (__ \ /\ / (_| | | \ / | | ')
print(' |____/ \__,_|___/_|\___| \/ \/ \__,_|_| \/ |_| ')
print('')
sleep(0.01)
#Seçenek Listesi...
try:
self.secim_sonuc = (int(input(" Lütfen Yapmak İstediğiniz Seçeneklerden Birisini Seçiniz Rakam İle; \n 1) Basif Waf Koruma Prosedürü \n 2) İp Adresi Engel Kaldırma \n 3) Çıkış \n Seçiminiz :")))
#turple Switch Case :)
self.secim = {
1:'koruma',
2:'engel',
3:'cikis',
}
if self.secim.get(self.secim_sonuc) in "koruma":
self.domain = (str(input(" Lütfen Domain Adresi Giriniz : ")))
self.pattern = '^([A-Za-z0-9]\.|[A-Za-z0-9][A-Za-z0-9-]{0,61}[A-Za-z0-9]\.){1,3}[A-Za-z]{2,6}$'
self.path = "/usr/local/apache/domlogs/"
self.status = {}
self.http_path = self.path+self.domain
self.https_path = self.path+self.domain+'-ssl_log'
if match(self.pattern,self.domain) and (path.exists(self.http_path)):
try:
self.counts = (int(input(" Bir İp Kaç Kez Tekrarlanınca Banlansın ")))
self.time = (int(input(" Kaç Saniye İçersinde Tekrarlanan İp Adresi Banlansın")))
self.flush_time = self.time
self.status = {
'1':'Http',
'2':'Https',
}
self.inp = input(' Lütfen Websitenizin ziyaret edilen protokolü Seçiniz Rakam İle;\n 1)Http 2)Https \n Seçiminiz: ')
#Ekran Temizleme Gelecek...
# screen_clean=run(['clear','-','x'])
self.main_progress()
except ValueError :
print('Boş Bırakılamaz veya Sadece Rakam Girmelisin...')
except :
print(self.CRED,"\n Hatalı Değer Girildi.. Lütfen Tekrar Deneyiniz...")
else:
print(self.CRED,"Böyle bir alan adı yok veya yanlış domain sytax girildi...")
elif self.secim.get(self.secim_sonuc) in "engel":
print(" Fms Waf Tarafından Engellenen İp Adresleri Gösteriliyor...")
sleep(1)
print("")
self.progress_bar()
deny_list = check_output('ufw status numbered | grep FMS',shell=True, universal_newlines=True)
print(self.CRED,deny_list)
stdout.write(self.GREEN)
kaldir = (str(input(" İp Adresi Engeli Kaldırmak İstermisiniz ? \n 1)Evet \n 2)Hayır \n Seçiminiz : ")))
kaldir_secim = {
'1':'evet',
'2':'hayir'
}
if kaldir_secim.get(kaldir) in "evet":
numara = (str(input("Kaldırılmasını İstediğiniz İp adresini Numara İle Seçiniz :")))
run(["ufw","delete",numara])
else:
print(self.CRED,"<NAME>ıkış Yapılıyor...")
print(self.CEND)
exit()
except TypeError:
print(self.CRED,"Hatalı Seçim Yaptınız...")
self.GREEN
except ValueError:
print(self.CRED,"Hatalı Deger Girdiniz...")
self.GREEN
except KeyboardInterrupt:
print(self.CRED,"CTRL + C ile Çıkış Yapıldı...")
print(self.CEND)
exit()
except EOFError:
print(self.CRED,"CTRL + D ile Çıkış Yapıldı...")
print(self.CEND)
exit()
#********** ********* ********* ********* ********* ********* ********* ********* *********
#Loading Bar...
#********** ********* ********* ********* ********* ********* ********* ********* *********
def progress_bar(self):
for self.i in range(100+1):
sleep(0.01)
stdout.write(('#'*self.i)+(''*(100-self.i))+("\r [%d"%self.i+"%]"+("Tamamlandı:")))
stdout.flush()
print('\n')
#********** ********* ********* ********* ********* ********* ********* ********* *********
#Main Status Bar...
#********** ********* ********* ********* ********* ********* ********* ********* *********
'''
1) Sağ ve Sol Olmak üzere 2'ye bölünecekler ana kısım.
2) Sağ kısımda sunucu top kısmından bilgiler.
3) Sol kısımda Engellenen ip adres adet sayısı.
4) Ortalama 1dk içersinde gelen toplam istek sayısı
5) Dahası gelecek..
'''
#********** ********* ********* ********* ********* ********* ********* ********* ********
#Http Main Processler
#********** ********* ********* ********* ********* ********* ********* ********* *********
def main_progress(self):
while(True):
mylist = list()
if self.status.get(self.inp).lower() in 'http':
deneme = Popen (['tail','-f',self.http_path], universal_newlines=True, stdout=PIPE)
cnc = Counter()
for line in deneme.stdout:
line = line[::1]
print(self.GREEN,line)
seperated = line.split()
seperated.append(line)
mylist = list()
mylist.append(seperated[0])
sleep(1)
for i in mylist:
i=(str(i))
cnc[i] += 1
back = self.time
print(self.time,'Zaman Sayacı ')
print(cnc[i],'Tekrar Sayacı')
if (int(cnc[i])) > self.counts and self.time == 0:
print(self.CRED,i,"- - İp adresi Engellenmiştir..")
engelle = run(['sudo','ufw','insert','1','deny','from',i,'to','any','port' ,'comment','FMS WAF Blocked'],stdout=PIPE)
print("")
self.time = self.flush_time
self.time -= 1
#********** ********* ********* ********* ********* ********* ********* ********* *********
#Https Main Processler.
#********** ********* ********* ********* ********* ********* ********* ********* *********
else:
deneme = Popen (['tail','-f',self.https_path], universal_newlines=True, stdout=PIPE)
cnc = Counter()
for line in deneme.stdout:
line = line[::1]
print(self.GREEN,line)
seperated = line.split()
seperated.append(line)
mylist = list()
mylist.append(seperated[0])
sleep(1)
for i in mylist:
i=(str(i))
cnc[i] += 1
back = self.time
print(self.time,'Zaman Sayacı ')
print(cnc[i],'Tekrar Sayacı')
if (int(cnc[i])) > self.counts and self.time == 0:
print(self.CRED,i,"- - İp adresi Engellenmiştir..")
engelle = run(['sudo','ufw','insert','1','deny','from',i,'to','any','port','80','comment','FMS WAF Blocked'],stdout=PIPE)
print("")
self.time = self.flush_time
self.time -= 1
#********** ********* ********* ********* ********* ********* ********* ********* *********
if __name__ == "__main__":
self.general = GeneralGui() # Main Class Çağır.
self.general.usr_intput() # User Fonksiyonlarını Çağır.
self.general.progress_bar()
self.general.main_progress()
| StarcoderdataPython |
9637085 | from colorama import Fore, init
from discord.ext import commands, tasks
import threading, os, random, pyfade
from colorfull import init; init()
# Put "user_id" -> mention user
__MESSAGE__ = '''
'''
__TOKEN__ = '<KEY>'
class Worker(threading.Thread):
def __init__(self, token: str):
threading.Thread.__init__(self)
self.client = commands.Bot(command_prefix= 'zap.', self_bot= True)
self.token = token
self.black_list = []
self.wait_list = []
self.error = 0
init()
@tasks.loop(seconds= 1)
async def update_gui():
print(f'&> Waitlist: {Fore.CYAN}{len(self.wait_list)}{Fore.RESET} DM: {Fore.GREEN}{len(self.black_list) - len(self.wait_list) - self.error}{Fore.RESET} Blocked: {Fore.RED}{self.error}{Fore.RESET} ', end= '\r')
@tasks.loop(minutes= 1)
async def dm_one():
if len(self.wait_list) == 0:
return
ppl = random.choice(self.wait_list)
self.wait_list.remove(ppl)
try:
user = await self.client.fetch_user(ppl)
await user.send(__MESSAGE__.replace('user_id', f'<@{ppl}>'))
except:
self.error += 1
@self.client.event
async def on_ready():
update_gui.start()
dm_one.start()
@self.client.event
async def on_message(ctx):
if not ctx.author.bot:
self.add_user(ctx.author.id)
@self.client.event
async def on_raw_reaction_add(payload):
if not payload.member.bot:
self.add_user(payload.member.id)
@self.client.event
async def on_member_join(member):
if not member.bot:
self.add_user(member.id)
# AttributeError: 'Member' object has no attribute 'member'
@self.client.event
async def on_member_update(before, after):
if not after.bot:
self.add_user(after.id)
@self.client.event
async def on_voice_state_update(member, before, after):
if not member.bot:
self.add_user(member.id)
def add_user(self, user_id: str):
if user_id != self.client.user.id and user_id not in self.black_list:
self.black_list.append(user_id)
self.wait_list.append(user_id)
def run(self):
self.client.run(self.token, bot= False)
if __name__ == '__main__':
os.system('cls && title !ZAPP ~ github.com/its_vichy' if os.name == 'nt' else 'clear')
print(pyfade.Fade.Horizontal(pyfade.Colors.red_to_purple, '''
┬ ╔═╗╔═╗╔═╗╔═╗
│ ╔═╝╠═╣╠═╝╠═╝
o ╚═╝╩ ╩╩ ╩
'''))
Worker(__TOKEN__).start()
| StarcoderdataPython |
1938926 | <reponame>xu-kai-xu/OpenPNM
r"""
Collection of pre-defined algorithms
====================================
The ``algorithms`` module contains classes for conducting transport
simulations on pore networks.
"""
from ._mixins import *
from ._generic_algorithm import *
from ._generic_transport import *
from ._reactive_transport import *
from ._transient_reactive_transport import *
from ._stokes_flow import *
from ._fickian_diffusion import *
from ._transient_fickian_diffusion import *
from ._advection_diffusion import *
from ._transient_advection_diffusion import *
from ._fourier_conduction import *
from ._ohmic_conduction import *
from ._ordinary_percolation import *
from ._invasion_percolation import *
from ._mixed_ip import *
from ._mixed_ip_coop import *
from ._ionic_conduction import *
from ._transient_ionic_conduction import *
| StarcoderdataPython |
4873638 | import logging
from behave import given, then, when
from structlog import wrap_logger
from acceptance_tests import browser
from acceptance_tests.features.pages import create_survey_form, survey
from common.respondent_utilities import create_ru_reference
from common.string_utilities import substitute_context_values
logger = wrap_logger(logging.getLogger(__name__))
@given('the internal user has entered the create survey URL')
@when('the internal user has entered the create survey URL')
def check_user_on_survey_create_page(_):
survey.go_to_create()
expected_title = "Create survey"
assert expected_title in browser.title, f"Unexpected page title {browser.title} ({expected_title} expected)"
@given("they enter new survey details with legal basis of '{legal_basis}'")
@when("they enter new survey details with legal basis of '{legal_basis}'")
def create_new_survey_details(context, legal_basis):
create_survey_form.edit_survey_ref(context.survey_ref)
create_survey_form.edit_short_name(context.short_name)
create_survey_form.edit_long_name(context.long_name)
create_survey_form.edit_legal_basis(legal_basis)
context.legal_basis = legal_basis
create_survey_form.click_save()
@given("they enter new survey details with legal basis of '{legal_basis}' and new short name")
@when("they enter new survey details with legal basis of '{legal_basis}' and new short name")
def create_new_survey_details_with_new_short_name(context, legal_basis):
context.short_name = create_ru_reference()
create_new_survey_details(context, legal_basis)
@then('they are taken to survey list page')
def they_are_taken_to_survey_list_page(context):
expected_title = "Surveys | Survey Data Collection"
assert expected_title in browser.title, \
(f"Unexpected page title {browser.title} ({expected_title} expected) "
f"- possible error: {create_survey_form.save_error()}")
@then('the new survey information is on the page')
def the_new_survey_information_is_on_the_page(context):
surveys = survey.get_surveys()
matching = [s for s in surveys if s['id'] == context.survey_ref]
assert len(matching) > 0, f"Failed to find survey with ref {context.survey_id}"
# We've checked it's length is greater than zero so this is safe
matching_survey = matching[0]
assert matching_survey['id'] == context.survey_ref, \
f"Unexpected survey id {matching_survey['id']} ({context.survey_ref} expected)"
assert matching_survey['name'] == context.long_name, \
f"Unexpected survey name {matching_survey['id']} ({context.long_name} expected)"
assert matching_survey['short_name'] == context.short_name, \
f"Unexpected survey id {matching_survey['short_name']} ({context.short_name} expected)"
assert matching_survey['legal_basis'] == context.legal_basis, \
f"Unexpected survey legal basis {matching_survey['legal_basis']} ({context.legal_basis} expected)"
@then("they get an error message of '{match_string}'")
def check_error_message_matches(context, match_string):
actual_error_message = create_survey_form.save_error()
expected_error_message = substitute_context_values(context, match_string)
assert expected_error_message == actual_error_message, \
f"expected:{expected_error_message} does not match {actual_error_message}"
| StarcoderdataPython |
3424895 | <reponame>vgalaktionov/snaql-migration
try:
import unittest2 as unittest
except ImportError:
import unittest
from io import StringIO
from click import ClickException
from click.testing import CliRunner
from snaql_migration.snaql_migration import snaql_migration, _parse_config, _collect_migrations
class TestConfig(unittest.TestCase):
def setUp(self):
self.runner = CliRunner()
def test_collect_migrations(self):
self.assertEqual(_collect_migrations('snaql_migration/tests/users/migrations'),
['001-create-users',
'002-update-users',
'003-create-index'
])
def test_parse_config(self):
# invalid db uri
input = StringIO(u'db_urii: "postgres://test:@localhost/test"')
self.assertRaises(ClickException, _parse_config, input)
# no migrations defined
input = StringIO(u'db_uri: "postgres://test:@localhost/test"\r\n'
u'migrations: \r\n')
self.assertRaises(ClickException, _parse_config, input)
input = StringIO(u'db_uri: "{0}"\r\n'
u'migrations:\r\n'
u' users_app: "snaql_migration/tests/users/migrations"\r\n'
u' countries_app: "snaql_migration/tests/countries/migrations"')
# valid config
config = _parse_config(input)
self.assertIn('db_uri', config)
self.assertEqual(config['apps'], {
'users_app': {
'migrations': ['001-create-users', '002-update-users', '003-create-index'],
'path': 'snaql_migration/tests/users/migrations'
},
'countries_app': {
'migrations': ['001-create-countries'],
'path': 'snaql_migration/tests/countries/migrations'
}})
def test_invalid_config(self):
result = self.runner.invoke(snaql_migration, ['--config', 'invalid.yml'])
self.assertEqual(result.exit_code, 2)
| StarcoderdataPython |
6686024 | <gh_stars>1-10
"""
File storage routines for openedx_export_plugins Django app.
"""
import logging
import boto
from boto.s3.key import Key
from .app_settings import AWS_ID, AWS_KEY, COURSE_EXPORT_PLUGIN_BUCKET, COURSE_EXPORT_PLUGIN_STORAGE_PREFIX
logger = logging.getLogger(__name__)
def do_store_s3(tmp_fn, storage_path):
""" handle Amazon S3 storage for generated files
"""
local_path = tmp_fn
bucketname = COURSE_EXPORT_PLUGIN_BUCKET
dest_path = COURSE_EXPORT_PLUGIN_STORAGE_PREFIX + "/" + storage_path
s3_conn = boto.connect_s3(AWS_ID, AWS_KEY)
bucket = s3_conn.get_bucket(bucketname)
key = Key(bucket, name=dest_path)
key.set_contents_from_filename(local_path)
logger.info("uploaded {local} to S3 bucket {bucketname}/{s3path}".format(
local=local_path, bucketname=bucketname, s3path=dest_path
))
| StarcoderdataPython |
8076952 | from django.conf.urls import patterns, include, url
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
url(r'^chant/', include('chant.urls')),
url(r'^admin/', include(admin.site.urls)),
url(r'^', include('common.urls')),
url(r'social/', include('social.apps.django_app.urls', namespace='social'))
)
| StarcoderdataPython |
11264595 | """
MIT License
Copyright (c) 2021 <NAME>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
import discord
from typing import List
def create_embed() -> discord.Embed:
"""
Create an empty discord embed with color.
:return: (discord.Embed)
"""
return discord.Embed(color=discord.Color.blurple())
def create_no_argument_embed(arg_name: str ='argument') -> discord.Embed:
"""
Create an embed which alerts the user they need to supply an argument
:param arg_name: (str) The type of argument needed (e.g. channel)
"""
embed = create_embed()
embed.add_field(name=f'Failed!', value=f"You need to supply a {arg_name}!")
return embed
def populate_embed(names: list, values: list, inline: bool = False) -> discord.Embed:
"""Populate an embed with a list of names and values"""
embed = discord.Embed(color=discord.Color.blurple())
for idx in range(len(names)):
embed.add_field(name=names[idx],
value=values[idx],
inline=inline)
return embed
def find_channel(bot, channels, channel_name):
channel = discord.utils.get(channels, name=channel_name)
if channel is None:
channel_id = int(channel_name.replace('>', '').replace('<#', ''))
channel = bot.get_channel(channel_id)
return channel
def category_is_full(category: discord.CategoryChannel) -> bool:
"""Determines whether a category is full (has 50 channels)"""
return len(category.channels) >= 50
async def createchannelgeneric(guild, category, name) -> discord.TextChannel:
"""Command to create channel in same category with given name
Arguments:
- guild (discord.Guild): the guild the channel is being created in
- category (discord.CategoryChannel): the category the channel is being created in
- name (str): the name for the channel
Returns:
- channel (discord.TextChannel): The created channel, or none if the bot does not have sufficient perms.
"""
try:
# create channel
channel = await guild.create_text_channel(name, category=category)
except discord.Forbidden:
return None
return channel
# TODO: I'm going to need to rewriter this at some point...
def split_embed(embed: discord.Embed) -> List[discord.Embed]:
"""Splits embeds that are too long (discord character limit)
Arguments:
- embed (discord.Embed)
Returns
- embed_list (List[discord.Embed]):
"""
if embed.title == discord.Embed.Empty:
embed.title = ""
EMBED_CHARACTER_LIMIT = 2000
FIELD_CHARACTER_LIMIT = 1024
embed_list = []
character_count = len(embed.title) + len(embed.description)
# If the title + description exceeds the character limit, we must break up the description into smaller parts.
if character_count > EMBED_CHARACTER_LIMIT:
print(f"Title and description are too long with {character_count} characters")
characters_remaining = character_count
description = embed.description
while description != "":
embed_list.append(discord.Embed(title=embed.title + " (continued)" if len(embed_list) > 0 else embed.title,
color=embed.color))
# Find the point that is closest to the cutoff but with a space.
cutoff_point = description[:(EMBED_CHARACTER_LIMIT - len(embed.title))].rfind(' ')
if cutoff_point == -1:
cutoff_point = EMBED_CHARACTER_LIMIT - len(embed.title) - 1
embed_list[-1].description = description[:cutoff_point+1]
description = description[cutoff_point+1:]
characters_remaining -= cutoff_point
# If the title + description are small, we can just copy them over
else:
embed_list.append(discord.Embed(title=embed.title,
description=embed.description,
color=embed.color))
character_count = len(embed_list[-1].title) + len(embed_list[-1].description)
# Iterate over all the proposed fields in the embed
for field in embed.fields:
field_description = field.value
field_character_count = len(field_description)
# Cut down the proposed fields to the appropriate size
while field_character_count > FIELD_CHARACTER_LIMIT:
# If we can add a full-sized field to the embed, do it
if character_count + len(field.name) + FIELD_CHARACTER_LIMIT <= EMBED_CHARACTER_LIMIT:
cutoff_point = field_description[:FIELD_CHARACTER_LIMIT].rfind(' ')
if cutoff_point == -1:
cutoff_point = FIELD_CHARACTER_LIMIT-1
embed_list[-1].add_field(name=field.name,
value=field_description[:cutoff_point+1],
inline=False)
field_character_count -= cutoff_point
field_description = field_description[cutoff_point+1:]
# If we can't add a full field to the embed, add a chopped field and then create a new embed
else:
cutoff_point = field_description[:EMBED_CHARACTER_LIMIT - character_count - len(field.name)].rfind(' ')
if cutoff_point == -1:
cutoff_point = EMBED_CHARACTER_LIMIT - character_count - len(field.name) - 1
embed_list[-1].add_field(name=field.name,
value=field_description[:cutoff_point+1],
inline=False)
field_character_count -= cutoff_point
field_description = field_description[cutoff_point+1:]
# We just filled the entire embed up, so now we need to make a new one
embed_list.append(discord.Embed(title=embed.title + " (continued)",
color=embed.color))
character_count = len(embed_list[-1].title)
# Once we've gotten to here, we know that the remaining field character count is able to fit in one field.
# Since the field character limit is smaller than the embed character limit, we know we'd only need one split.
if field_character_count + len(field.name) + character_count > EMBED_CHARACTER_LIMIT:
cutoff_point = field_description[:EMBED_CHARACTER_LIMIT - character_count - len(field.name)].rfind(' ')
if cutoff_point == -1:
cutoff_point = EMBED_CHARACTER_LIMIT - character_count - len(field.name) - 1
embed_list[-1].add_field(name=field.name,
value=field_description[:cutoff_point+1],
inline=False)
embed_list.append(discord.Embed(title=embed.title + " (continued)",
color=embed.color))
field_description = field_description[cutoff_point+1:]
character_count = len(embed_list[-1].title) + len(field.name)
embed_list[-1].add_field(name=field.name,
value=field_description,
inline=False)
# I believe if we run here then we just don't need to split anything.
else:
embed_list[-1].add_field(name=field.name,
value=field_description,
inline=field.inline)
return embed_list | StarcoderdataPython |
3312323 | <gh_stars>1-10
# Source code reference: Microsoft Azure Machine Learning.
import subprocess
def az_login(sp_user : str, sp_password : str, sp_tenant_id : str):
"""
Uses the provided service principal credentials to log into the azure cli.
This should always be the first step in executing az cli commands.
"""
cmd = "az login --service-principal --username {} --password {} --tenant {}"
out, err = run_cmd(cmd.format(sp_user, sp_password, sp_tenant_id))
return out, err
def run_cmd(cmd : str):
"""
Runs an arbitrary command line command. Works for Linux or Windows.
Returns a tuple of output and error.
"""
proc = subprocess.Popen(cmd, shell = True, stdout=subprocess.PIPE, universal_newlines = True)
output, error = proc.communicate()
if proc.returncode !=0:
print('Following command execution failed: {}'.format(cmd))
raise Exception('Operation Failed. Look at console logs for error info')
return output, error
def az_account_set(subscription_id : str):
"""
Sets the correct azure subscription.
This should always be run after the az_login.
"""
cmd = "az account set -s {}"
out, err = run_cmd(cmd.format(subscription_id))
return out, err
def az_acr_create(resource_group : str, acr_name : str):
cmd = "az acr create --resource-group {} --name {} --sku Basic"
out, err = run_cmd(cmd.format(resource_group, acr_name))
return out, err
def az_acr_login(acr_name : str):
cmd = "az acr login --name {}"
out, err = run_cmd(cmd.format(acr_name))
return out, err | StarcoderdataPython |
4857216 | <gh_stars>0
import validators
import logging
import random
import requests
import requests.utils
import json
class Downloader:
user_agent = 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/70.0.3538.102 Safari/537.36'
attempts = 3
default_timeout = 45
http_requests = None
proxies = {}
use_proxy = False
change_proxies_manually = False
use_session = False
proxy_string_list = []
failed_proxy_string_list = []
def __init__(self, use_session=True, proxy_string_list=[], change_proxies_manually=False):
"""
Initiation of the class
:param self:
:param use_session=True: Are we going to keep session between requests?
:param proxy_string_list=[]: Proxies list. (For example: '192.168.127.12:3128', '192.168.127.12:3128', by default proxies will change after each request)
:param change_proxies_manually=False: Are we going to change proxies only manually?
"""
self.use_session = use_session
if proxy_string_list:
self.change_proxies_manually = change_proxies_manually
if change_proxies_manually:
self.use_session = True
else:
self.use_session = False
self.use_proxy = True
self.attempts += 3
self.proxy_string_list = proxy_string_list
self.load_requests()
def create_request(self, type, url, cookies={}, data={}, cookies_text='', headers={}, timeout=None, full_response=False, render_page=False, specific_attempts_count=None):
"""
Create request
:param self:
:param type: Type of the request (post/get)
:param url: Url
:param cookies={}: Cookies
:param data={}: Payload data
:param cookies_text='': Cookies in text mode. (It overwrites other cookies!)
:param headers={}: Headers of the request. (It overwrites cookies and user agent!)
:param timeout=None: Timeout for the request. (If None it will use default_timeout)
:param full_response=False: Do we need to return full response or only text?
:param render_page=False: Do we need to render page?
:param specific_attempts_count=None: Set some int value to use custom attempts count
"""
if not validators.url(url):
logging.error('Url: {url} is not valid. Cannot create {type} request'.format(url=url, type=type))
if full_response == False:
return ''
return None
if headers == {}:
headers = {'User-Agent': self.user_agent}
if cookies_text != '':
headers['Cookie'] = cookies_text
if not specific_attempts_count:
attempts = self.attempts
else:
attempts = specific_attempts_count
if not timeout:
timeout = self.default_timeout
while attempts > 0:
if self.use_proxy:
if self.change_proxies_manually:
if self.proxies == {}:
self.proxies = self.get_random_proxies()
else:
self.proxies = self.get_random_proxies()
if attempts == 1:
self.proxies = {}
logging.warning('Cannot create {type} request to {url} with proxies. Will create request without it'.format(type=type, url=url))
try:
if type == 'post':
r = self.http_requests.post(url, cookies=cookies, data=data, headers=headers, proxies=self.proxies, timeout=timeout)
else:
r = self.http_requests.get(url, cookies=cookies, params=data, headers=headers, proxies=self.proxies, timeout=timeout)
if full_response:
return r
else:
if render_page:
return self.render_html_page(r.text)
return r.text
except:
if self.use_proxy:
if self.change_proxies_manually is False:
self.mark_proxies_as_failed()
attempts = attempts - 1
logging.error('Cannot create {type} request to {url}'.format(type=type, url=url))
if full_response == False:
return ''
def get_page(self, url, cookies={}, data={}, cookies_text='', headers={}, timeout=None, full_response=False, render_page=False, specific_attempts_count=None):
"""
Create get request
:param self:
:param url: Url
:param cookies={}: Cookies
:param data={}: Payload data
:param cookies_text='': Cookies in text mode. (It overwrites other cookies!)
:param headers={}: Headers of the request. (It overwrites cookies and user agent!)
:param timeout=None: Timeout for the request. (If None it will use default_timeout)
:param full_response=False: Do we need to return full response?
:param render_page=False: Do we need to render page?
:param specific_attempts_count=None: Set some int value to use custom attempts count
"""
return self.create_request('get', url, cookies, data, cookies_text, headers, timeout, full_response, render_page, specific_attempts_count)
def post_page(self, url, cookies={}, data={}, cookies_text='', headers={}, timeout=None, full_response=False, specific_attempts_count=None):
"""
Create post request
:param self:
:param url: Url
:param cookies={}: Cookies
:param data={}: Payload data
:param cookies_text='': Cookies in text mode. (It overwrites other cookies!)
:param headers={}: Headers of the request. (It overwrites cookies and user agent!)
:param timeout=None: Timeout for the request. (If None it will use default_timeout)
:param full_response=False: Do we need to return full response?
:param specific_attempts_count=None: Set some int value to use custom attempts count
"""
return self.create_request('post', url, cookies, data, cookies_text, headers, timeout, full_response, False, specific_attempts_count)
# Others
def load_requests(self):
"""
Initiate http_requests
:param self:
"""
if self.use_session:
self.http_requests = requests.Session()
else:
self.http_requests = requests
def reload_requests(self):
"""
Reload http_requests
:param self:
"""
self.load_requests()
def render_html_page(self, page_content):
"""
Render html page
:param self:
:param page_content: Content of the page to render
"""
try:
from requests_html import HTML
html = HTML(html=page_content)
html.render(reload=False)
return html.text
except:
logging.error('Cannot render the page', exc_info=True)
return page_content
# Proxies Logic
def change_proxies(self, proxy_string=None):
"""
Change proxies manually
:param self:
:param proxies=None: Set new proxies
"""
self.reload_requests()
if not proxy_string:
self.proxies = self.get_random_proxies()
else:
self.proxies = self.get_proxies_from_string(proxy_string)
def get_proxies_from_string(self, proxy_string):
"""
Convert string to proxies
:param self:
:param proxy_string:
"""
return {'http': 'http://{0}'.format(proxy_string), 'https': 'https://{0}'.format(proxy_string)}
def get_random_proxies(self):
"""
Get random proxies from list
:param self:
"""
if self.proxy_string_list:
try:
proxy_string = random.choice(self.proxy_string_list)
return self.get_proxies_from_string(proxy_string)
except:
logging.warning('Cannot get random proxies. Using without proxies.', exc_info=True)
return {}
logging.warning('No proxies left. Failed proxies count: {}. Using without proxies.'.format(self.failed_proxy_string_list))
return {}
def mark_proxies_as_failed(self):
"""
Mark current proxies as failed
:param self:
"""
if self.proxies != {}:
proxy_string = self.proxies['http'].split('http://')[-1]
if self.proxy_string_list and proxy_string in self.proxy_string_list:
self.failed_proxy_string_list.append(proxy_string)
self.proxy_string_list.remove(proxy_string)
else:
logging.warning('No proxies left. Failed proxies count: {}'.format(self.failed_proxy_string_list))
else:
logging.warning('No proxies in use')
# CookiesLogic
def save_cookies_to_file(self, cookies={}, name='cookies', current_cookies=False):
"""
Save cookies dict to the file
:param self:
:param cookies={}: Dict of cookies (or use current_cookies)
:param name='cookies': File name to save
:param current_cookies=False: Save cookies from currents session?
"""
try:
if current_cookies:
cookies = self.get_session_cookies()
cookies = json.dumps(cookies)
with open('{}.cookies'.format(name), 'w') as the_file:
the_file.write(cookies)
except:
logging.error('Cannot save cookies to file', exc_info=True)
def get_session_cookies(self):
"""
Get cookies from the current session
:param self:
"""
try:
return requests.utils.dict_from_cookiejar(self.http_requests.cookies)
except:
logging.warning('Cannot get cookies')
return {}
def set_session_cookies(self, cookies):
"""
Set cookies from the current session
:param self:
"""
if isinstance(cookies, str):
cookies = self.get_dict_cookies_from_text(cookies)
if self.use_session:
try:
for key in cookies.keys():
self.http_requests.cookies.set(key, cookies[key])
except:
logging.warning('Cannot set cookies')
else:
logging.error('Session mode is not activated')
def get_cookies_from_file(self, name='cookies'):
"""
Return cookies dict from the file
:param self:
:param name='cookies': File name to load
"""
try:
with open('{}.cookies'.format(name)) as the_file:
cookies_text = the_file.read()
try:
cookies = json.loads(cookies_text)
except:
cookies = self.get_dict_cookies_from_text(cookies_text)
return cookies
except:
logging.error('Cannot get cookies from file', exc_info=True)
return {}
def get_dict_cookies_from_text(self, cookies_text):
"""
Returnt dict from cookies raw text
:param self:
:param cookies_text: Raw cookies text, example: CONSENT=YES+UK.en+; SID=wgdombwvMd;
"""
try:
from http import cookies
cookie = cookies.SimpleCookie()
cookie.load(cookies_text)
cookies = {}
for key, morsel in cookie.items():
cookies[key] = morsel.value
except:
logging.error('Cannot get cookies from raw text', exc_info=True)
cookies = {}
return cookies
def __del__(self):
"""
Close all connections
:param self:
"""
if self.http_requests is not None:
try:
self.http_requests.close()
except:
try:
self.http_requests.session().close()
except:
pass
| StarcoderdataPython |
3386796 | <reponame>jawaidm/moorings
# -*- coding: utf-8 -*-
# Generated by Django 1.10.8 on 2018-11-16 06:10
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('mooring', '0041_booking_send_invoice'),
]
operations = [
migrations.AddField(
model_name='mooringarea',
name='vessel_draft_limit',
field=models.IntegerField(default=0),
),
]
| StarcoderdataPython |
1964779 | <reponame>GunshipPenguin/stockings<gh_stars>1-10
#!/usr/bin/env python3
import socket
import threading
import select
import sys
import struct
import ipaddress
import argparse
import const
def build_socks_reply(cd, dst_port=0x0000, dst_ip='0.0.0.0'):
'''
Build a SOCKS4 reply with the specified reply code, destination port and
destination ip.
'''
dst_ip_bytes = ipaddress.IPv4Address(dst_ip).packed
dst_ip_raw, = struct.unpack('>L', dst_ip_bytes)
return struct.pack('>BBHL', const.SERVER_VN, cd, dst_port, dst_ip_raw)
class ClientRequest:
'''Represents a client SOCKS4 request'''
def __init__(self, data):
'''Construct a new ClientRequeset from the given raw SOCKS request'''
self.invalid = False
# Client requests must be at least 9 bytes to hold all necessary data
if len(data) < 9:
self.invalid = True
return
# Version number (VN)
self.parse_vn(data)
# SOCKS command code (CD)
self.parse_cd(data)
# Destination port
self.parse_dst_port(data)
# Destination IP / Domain name (if specified)
self.parse_ip(data)
# Userid
self.parse_userid(data)
@classmethod
def parse_fixed(cls, data):
'''Parse and return the fixed-length part of a SOCKS request
Returns a tuple containing (vn, cd, dst_port, dst_ip) given the raw
socks request
'''
return struct.unpack('>BBHL', data[:8])
def parse_vn(self, data):
'''Parse and store the version number given the raw SOCKS request'''
vn, _, _, _ = ClientRequest.parse_fixed(data)
if (vn != const.CLIENT_VN):
self.invalid = True
def parse_dst_port(self, data):
'''Parse and store the destination port given the raw SOCKS request'''
_, _, dst_port, _ = ClientRequest.parse_fixed(data)
self.dst_port = dst_port
def parse_cd(self, data):
'''Parse and store the request code given the raw SOCKS request'''
_, cd, _, _ = ClientRequest.parse_fixed(data)
if (cd == const.REQUEST_CD_CONNECT or cd == const.REQUEST_CD_BIND):
self.cd = cd
else:
self.invalid = True
def parse_ip(self, data):
'''Parse and store the destination ip given the raw SOCKS request
If the IP is of the form 0.0.0.(1-255), attempt to resolve the domain
name specified, then store the resolved ip as the destination ip.
'''
_, _, _, dst_ip = ClientRequest.parse_fixed(data)
ip = ipaddress.IPv4Address(dst_ip)
o1, o2, o3, o4 = ip.packed
# Invalid ip address specifying that we must resolve the domain
# specified in data (As specified in SOCKS4a)
if (o1, o2, o3) == (0, 0, 0) and o4 != 0:
try:
# Variable length part of the request containing the userid
# and domain (8th byte onwards)
userid_and_domain = data[8:]
# Extract the domain to resolve
_, domain, _ = userid_and_domain.split(b'\x00')
except ValueError:
# Error parsing request
self.invalid = True
return
try:
resolved_ip = socket.gethostbyname(domain)
except socket.gaierror:
# Domain name not found
self.invalid = True
return
self.dst_ip = resolved_ip
else:
self.dst_ip = ip.exploded
def parse_userid(self, data):
'''Parse and store the userid given the raw SOCKS request'''
try:
index = data.index(b'\x00')
self.userid = data[8:index]
except ValueError:
self.invalid = True
except IndexError:
self.invalid = True
def isInvalid(self):
'''Returns true if this request is invalid, false otherwise'''
return self.invalid
class RelayThread(threading.Thread):
'''Thread object that relays traffic between two hosts'''
def __init__(self, s1, s2):
'''Construct a new RelayThread to relay traffic between the 2 sockets
specified'''
self._s1 = s1
self._s2 = s2
threading.Thread.__init__(self)
def _close_sockets(self):
'''Close both sockets in this Relay Thread'''
self._s1.close()
self._s2.close()
def run(self):
'''Start relaying traffic between the two sockets specified
Traffic is relayed until one socket is closed or encounters an error,
at which point both sockets will be closed and the thread will
terminate.
'''
while True:
ready, _, err = select.select(
[self._s1, self._s2], [], [self._s1, self._s2])
# Handle socket errors
if err:
self._close_sockets()
return
for s in ready:
try:
data = s.recv(const.BUFSIZE)
except ConnectionResetError:
# Connection reset by either s1 or s2, close sockets and
# return
self._close_sockets()
return
if not data:
# Connection gracefully closed, close sockets and return
self._close_sockets()
return
if s is self._s1:
self._s2.sendall(data)
else:
self._s1.sendall(data)
class BindThread(threading.Thread):
'''Thread object that sets up a SOCKS BIND request'''
def __init__(self, client_request, client_conn):
'''Create a new BIND thread for the given ClientRequest and connection
to the client'''
self._client_request = client_request
self._client_conn = client_conn
threading.Thread.__init__(self)
def run(self):
'''Attempt a SOCKS bind operation and relay traffic if successful'''
try:
# Open a listening socket on an open port
server_s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
server_s.bind(('0.0.0.0', 0))
server_s.settimeout(const.SOCKS_TIMEOUT)
ip, port = server_s.getsockname()
server_s.listen(1)
# Inform client of open socket
self._client_conn.sendall(build_socks_reply(
const.RESPONSE_CD_REQUEST_GRANTED, port, ip))
# Wait for the application server to accept the connection
server_conn, addr = server_s.accept()
except:
# Something went wrong, inform the client and return
self._client_conn.sendall(
build_socks_reply(
const.RESPONSE_CD_REQUEST_REJECTED))
self._client_conn.close()
return
# Application server connected, inform client
self._client_conn.sendall(
build_socks_reply(
const.RESPONSE_CD_REQUEST_GRANTED))
# Relay traffic between client_conn and server_conn
relay_thread = RelayThread(self._client_conn, server_conn)
relay_thread.daemon = True
relay_thread.start()
class SocksProxy:
'''A SOCKS4a Proxy'''
def __init__(self, port):
'''Create a new SOCKS4 proxy on the specified port'''
self._host = '0.0.0.0'
self._port = port
self._bufsize = const.BUFSIZE
self._backlog = const.BACKLOG
def start(self):
'''Start listening for SOCKS connections, blocking'''
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
s.bind((self._host, self._port))
s.listen(self._backlog)
while True:
try:
conn, addr = s.accept()
data = conn.recv(self._bufsize)
# Got a connection, handle it with process_request()
self._process_request(data, conn)
except KeyboardInterrupt as ki:
s.close()
print('Caught KeyboardInterrupt, exiting')
sys.exit(0)
def _process_connect_request(self, client_request, client_conn):
'''Process a SOCKS CONNECT request'''
server_conn = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
server_conn.settimeout(const.SOCKS_TIMEOUT)
try:
server_conn.connect(
(client_request.dst_ip, client_request.dst_port))
except socket.timeout:
# Connection to specified host timed out, reject the SOCKS request
server_conn.close()
client_conn.send(
build_socks_reply(
const.RESPONSE_CD_REQUEST_REJECTED))
client_conn.close()
client_conn.send(build_socks_reply(const.RESPONSE_CD_REQUEST_GRANTED))
relay_thread = RelayThread(client_conn, server_conn)
relay_thread.daemon = True
relay_thread.start()
def _process_bind_request(self, client_request, client_conn):
'''Process a SOCKS BIND request'''
bind_thread = BindThread(client_request, client_conn)
bind_thread.daemon = True
bind_thread.start()
def _process_request(self, data, client_conn):
'''Process a general SOCKS request'''
client_request = ClientRequest(data)
# Handle invalid requests
if client_request.isInvalid():
client_conn.send(
build_socks_reply(
const.RESPONSE_CD_REQUEST_REJECTED))
client_conn.close()
return
if client_request.cd == const.REQUEST_CD_CONNECT:
self._process_connect_request(client_request, client_conn)
else:
self._process_bind_request(client_request, client_conn)
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description='SOCKS4a Proxy Implementation',
epilog='Homepage: https://github.com/GunshipPenguin/sock-snake',
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument(
'--port',
type=int,
help='port to listen for incoming SOCKS requests on',
action='store',
default=const.PORT)
args = parser.parse_args()
print('Listening on port', str(args.port))
proxy = SocksProxy(args.port)
proxy.start()
| StarcoderdataPython |
349852 | # -*- coding: utf-8 -*-
from django.conf.urls import patterns, include, url
from NodeSite import settings
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
# 账户设置,系统设置
url(r'^accounts/', include('NodeSite.accounts.urls')),
url(r'^mushroom/', include('NodeSite.mushroom.urls')),
)
urlpatterns += patterns('NodeSite.views',
# 首页
url(r'^$', 'home', name='home'),
# 用户登录/登出
url(r'^login/$', 'login', name='login'),
url(r'^login-test/$', 'login_test', name="login_test"),
url(r'^logout/$', 'logout', name='logout'),
# 修改名称
url(r'^([\w]+)/(\d+)/name/$', 'update_name',),
# 房间信息
url(r'^room/list/$', 'get_rooms', name="get_rooms"),
# url(r'^room/(\d+)/controller/list/$', 'get_room_controller_list',),
# url(r'^room/(\d+)/controller/(\d+)/$', 'get_room_controller',),
# 获取数据
url(r'^data/room/(\d+)/$', 'get_data'),
# 搜索
url(r'^search/$', 'search', name="search"),
# 养殖策略
url(r'^policy/now/room/(\d+)/$', 'get_now_policy_by_room_id',),
url(r'^policy/now/room/(\d+)/timepoint/$', 'get_now_time_point',),
url(r'^policy/list/$', 'policy_list',),
url(r'^policy/(\d+)/$', 'policy_view',),
url(r'^policy/$', 'policy_view'),
# 控制器
url(r'^controller/list/room/(\d+)/$', 'controller_list_view'),
url(r'^controller/(\d+)/$', 'controller_view'),
# 配置文件设置
url(r'^config/log/(\w+)$', 'config_log'),
)
if settings.DEBUG:
# Test
# url(r'^NodeSite/', include('NodeSite.foo.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
url(r'^admin/', include(admin.site.urls)),
# test message
urlpatterns += patterns('NodeSite.views',
(r'^playlist/create/$', 'create_playlist'),
)
| StarcoderdataPython |
8186081 | import random
def strategy(history, memory):
"""
Author: zelosos
Lets write down some assumptions:
- In a group most of the players will not be exploited.
- Players will test, if they can exploit others.
- Both cooperating for the hole time is best for both.
With this said:
- target should be: both coorperate for the hole time
Improvements:
- Later into the game it is beneficial to abuse the build up trust. So increase the cheat chance for longer games.
- Maybe this could be improved by the weights of the score matrix, but I have to go to bed now.
"""
# start with cooperate
if len(history[1]) == 0:
return 1, None
played_rounds = len(history[1])
got_wasted = len(history[1])-sum(history[1]) # how often we got cheated
if random.uniform(0, 1) < got_wasted/played_rounds: # how often we got cheated determin how likely it is to cheat back
return 0, None
return 1, None
| StarcoderdataPython |
329590 | # Neat trick to make simple namespaces:
# http://stackoverflow.com/questions/4984647/accessing-dict-keys-like-an-attribute-in-python
class Namespace(dict):
def __init__(self, *args, **kwargs):
super(Namespace, self).__init__(*args, **kwargs)
self.__dict__ = self
| StarcoderdataPython |
1930660 | <reponame>TeddyTeddy/robot-fw-browser-library-tests
import unittest
from mockito import unstub, verify, verifyNoUnwantedInteractions, expect, mock
from LibraryLoader import LibraryLoader
import LibraryLoaderStub
from Locators import locator, number_of_add_buttons, number_of_change_buttons
from AddGroupPage import AddGroupPage # class under test (CUT)
from ExpectedLinks import expected_add_group_page_url, links
from ExpectedTexts import expected
from ExpectedAttributeValues import eav
from Browser import ElementState, AssertionOperator, MouseButton
from ControlOption import ControlOption
class AddGroupPageUT(unittest.TestCase):
def setUp(self) -> None: # before running an individual test case
# instantiate a mock LibraryLoader, which returns a mock sl
LibraryLoaderStub.configure_mock_library_loader()
def tearDown(self) -> None: # after running an individual test case
unstub()
@staticmethod
def do_test_verify_add_group_page(chosen_permissions_dropdown_text):
# configure the mock browser library for verify_add_group_page()'s calls
# wait until title element is visible on add_group_page
expect(LibraryLoader.get_instance().bl).wait_for_elements_state(
selector=locator['add_group_page']['title'],
state=ElementState.visible).thenReturn(None)
# check the validity of the url on the add_group_page
expect(LibraryLoader.get_instance().bl).get_url(
assertion_operator=AssertionOperator.equal,
assertion_expected=expected_add_group_page_url).thenReturn(None)
# configure mock_calls in verify_texts_on_add_group_page()
expect(LibraryLoader.get_instance().bl).get_text(
selector=locator['add_group_page']['breadcrumbs'],
assertion_operator=AssertionOperator.equal,
assertion_expected=expected['add_group_page']['breadcrumbs_text']).thenReturn(None)
expect(LibraryLoader.get_instance().bl).get_text(
selector=locator['add_group_page']['add_group'],
assertion_operator=AssertionOperator.equal,
assertion_expected=expected['add_group_page']['add_group_text']).thenReturn(None)
expect(LibraryLoader.get_instance().bl).get_text(
selector=locator['add_group_page']['name'],
assertion_operator=AssertionOperator.equal,
assertion_expected=expected['add_group_page']['name_text']).thenReturn(None)
expect(LibraryLoader.get_instance().bl).get_text(
selector=locator['add_group_page']['permissions'],
assertion_operator=AssertionOperator.equal,
assertion_expected=expected['add_group_page']['permissions_text']).thenReturn(None)
expect(LibraryLoader.get_instance().bl).get_text(
selector=locator['add_group_page']['available_permissions_title'],
assertion_operator=AssertionOperator.equal,
assertion_expected=expected['add_group_page']['available_permissions_title_text']).thenReturn(None)
expect(LibraryLoader.get_instance().bl).get_text(
selector=locator['add_group_page']['available_permissions_dropdown'],
assertion_operator=AssertionOperator.equal,
assertion_expected=expected['add_group_page']['available_permissions_dropdown_content']).thenReturn(None)
expect(LibraryLoader.get_instance().bl).get_attribute(
selector=locator['add_group_page']['available_permissions_tooltip'], attribute='title',
assertion_operator=AssertionOperator.equal,
assertion_expected=expected['add_group_page']['available_permissions_tooltip_text']).thenReturn(None)
expect(LibraryLoader.get_instance().bl).get_text(
selector=locator['add_group_page']['choose_all_permissions'],
assertion_operator=AssertionOperator.equal,
assertion_expected=expected['add_group_page']['choose_all_permissions_text']).thenReturn(None)
expect(LibraryLoader.get_instance().bl).get_text(
selector=locator['add_group_page']['help_to_select_multiple_permissions'],
assertion_operator=AssertionOperator.equal,
assertion_expected=expected['add_group_page']['help_to_select_multiple_permissions_text']).thenReturn(None)
expect(LibraryLoader.get_instance().bl).get_text(
selector=locator['add_group_page']['chosen_permissions_title'],
assertion_operator=AssertionOperator.equal,
assertion_expected=expected['add_group_page']['chosen_permissions_text']).thenReturn(None)
expect(LibraryLoader.get_instance().bl).get_attribute(
selector=locator['add_group_page']['chosen_permissions_tooltip'], attribute='title',
assertion_operator=AssertionOperator.equal,
assertion_expected=expected['add_group_page']['chosen_permissions_tooltip_text']).thenReturn(None)
expect(LibraryLoader.get_instance().bl).get_text(
selector=locator['add_group_page']['chosen_permissions_dropdown'],
assertion_operator=AssertionOperator.equal,
assertion_expected=expected['add_group_page']['chosen_permissions_dropdown_text']).thenReturn(None)
expect(LibraryLoader.get_instance().bl).get_text(
selector=locator['add_group_page']['remove_all_permissions_option'],
assertion_operator=AssertionOperator.equal,
assertion_expected=expected['add_group_page']['remove_all_permissions_text']).thenReturn(None)
# configure mock calls in _verify_links_on_add_group_page()
expect(LibraryLoader.get_instance().bl).get_attribute(
selector=locator['add_group_page']['home_link'], attribute='href',
assertion_operator=AssertionOperator.equal,
assertion_expected=links['add_group_page']['home_link']).thenReturn(None)
expect(LibraryLoader.get_instance().bl).get_attribute(
selector=locator['add_group_page']['authentication_and_authorization_link'],
attribute='href',
assertion_operator=AssertionOperator.equal,
assertion_expected=links['add_group_page']['authentication_and_authorization_link']).thenReturn(None)
expect(LibraryLoader.get_instance().bl).get_attribute(
selector=locator['add_group_page']['groups_link'],
attribute='href',
assertion_operator=AssertionOperator.equal,
assertion_expected=links['add_group_page']['groups_link']).thenReturn(None)
expect(LibraryLoader.get_instance().bl).get_attribute(
selector=locator['add_group_page']['choose_all_permissions_option'], attribute='href',
assertion_operator=AssertionOperator.equal,
assertion_expected=links['add_group_page']['choose_all_permissions_link']).thenReturn(None)
# configure_mock_calls_in _verify_the_buttons_on_add_group_page()
expect(LibraryLoader.get_instance().bl).get_attribute(
selector=locator['add_group_page']['save_and_add_another_button'],
attribute='value',
assertion_operator=AssertionOperator.equal,
assertion_expected=expected['add_group_page']['save_and_add_another_button_text']).thenReturn(None)
expect(LibraryLoader.get_instance().bl).get_attribute(
selector=locator['add_group_page']['save_and_continue_editing_button'],
attribute='value',
assertion_operator=AssertionOperator.equal,
assertion_expected=expected['add_group_page']['save_and_continue_editing_button_text']).thenReturn(None)
expect(LibraryLoader.get_instance().bl).get_attribute(
selector=locator['add_group_page']['save_button'],
attribute='value',
assertion_operator=AssertionOperator.equal,
assertion_expected=expected['add_group_page']['save_button_text']).thenReturn(None)
# configure mock calls in _verify_remove_all_permission_link()
expect(LibraryLoader.get_instance().bl).get_element_state(
selector=locator['add_group_page']['remove_all_permissions_option'],
state=ElementState.visible).thenReturn( bool(chosen_permissions_dropdown_text) )
expect(LibraryLoader.get_instance().bl).get_text(
selector=locator['add_group_page']['chosen_permissions_dropdown']).thenReturn(chosen_permissions_dropdown_text)
if chosen_permissions_dropdown_text:
expect(LibraryLoader.get_instance().bl).get_attribute(
selector=locator['add_group_page']['remove_all_permissions_option'],
attribute='class',
assertion_operator=AssertionOperator.equal,
assertion_expected=eav['add_group_page']['remove_all_permissions_active']).thenReturn(None)
else:
# there are no permissions added to chosen_permissions_dropdown element
expect(LibraryLoader.get_instance().bl).get_attribute(
selector=locator['add_group_page']['remove_all_permissions_option'],
attribute='class',
assertion_operator=AssertionOperator.equal,
assertion_expected=eav['add_group_page']['remove_all_permissions_inactive']).thenReturn(None)
# CUT gets magically the mock instances (i.e. _loader & sl)
add_group_page = AddGroupPage()
# method under test gets called
add_group_page.verify_add_group_page()
# Verifies that expectations set via expect are met
# all registered objects will be checked.
verifyNoUnwantedInteractions()
def test_verify_add_group_page(self):
# with permissions_drowdown_text is non empty string, "Remove all" button should be active
AddGroupPageUT.do_test_verify_add_group_page(
chosen_permissions_dropdown_text=expected['add_group_page']['auth_group_can_add_group'])
self.tearDown()
self.setUp()
# with permissions_drowdown_text is an empty string, "Remove all" button should be inactive
AddGroupPageUT.do_test_verify_add_group_page(chosen_permissions_dropdown_text='')
def test_enter_name_for_new_group(self):
# configure the mock browser library for enter_name_for_new_group()'s calls
group_name = 'blog_editors'
expect(LibraryLoader.get_instance().bl).fill_text(selector=locator['add_group_page']['input_name_field'], txt=group_name).thenReturn(None)
# CUT gets magically the mock instances (i.e. _loader & sl)
add_group_page = AddGroupPage()
# method under test gets called
add_group_page.enter_name_for_new_group(group_name=group_name)
# Verifies that expectations set via expect are met
# all registered objects will be checked.
verifyNoUnwantedInteractions()
def test_enter_search_term_in_available_permissions_filter(self):
self.do_test_enter_search_term_in_available_permissions_filter(permission_search_term='blog')
self.tearDown()
self.setUp()
self.do_test_enter_search_term_in_available_permissions_filter(permission_search_term='yielding no permissions')
def do_test_enter_search_term_in_available_permissions_filter(self, permission_search_term):
expect(LibraryLoader.get_instance().bl).type_text(
selector=locator['add_group_page']['input_permission_field'],
txt=permission_search_term).thenReturn(None)
dropdown_text = AddGroupPageUT.do_get_text_for_available_permissions_dropdown(permission_search_term)
expect(LibraryLoader.get_instance().bl).get_text(
selector=locator['add_group_page']['available_permissions_dropdown']).thenReturn(dropdown_text)
# CUT gets magically the mock instances (i.e. _loader & sl)
add_group_page = AddGroupPage()
# method under test gets called
permissions_found = add_group_page.enter_search_term_in_available_permissions_filter(
permission_search_term=permission_search_term
)
self.assertEqual(bool(dropdown_text), permissions_found)
# Verifies that expectations set via expect are met
# all registered objects will be checked.
verifyNoUnwantedInteractions()
@staticmethod
def get_filtered_permission_elements():
element_one = mock()
element_one.text = expected['add_group_page']['auth_user_can_add_user']
element_two = mock()
element_two.text = expected['add_group_page']['auth_user_can_change_user']
element_three = mock()
element_three.text = expected['add_group_page']['auth_user_can_delete_user']
return [element_one, element_two, element_three]
def test_choose_all_filtered_permissions(self):
# configure mock calls in choose_all_filtered_permissions()
filtered_permission_elements = AddGroupPageUT.get_filtered_permission_elements()
expect(LibraryLoader.get_instance().bl).wait_for_elements_state(
selector=locator['add_group_page']['generic_filtered_permission'], state=ElementState.visible).thenReturn(None)
expect(LibraryLoader.get_instance().bl).get_elements(
selector=locator['add_group_page']['generic_filtered_permission']).thenReturn(filtered_permission_elements)
control_option = ControlOption()
setattr(control_option, 'name', 'Control')
for element in filtered_permission_elements:
expect(LibraryLoader.get_instance().bl).get_text(element).thenReturn(element.text)
expect(LibraryLoader.get_instance().bl).click( element, MouseButton.left, 1, None, None, None, False, False, ... ).thenReturn(None)
expect(LibraryLoader.get_instance().bl).click( locator['add_group_page']['choose_all_permissions_option'], MouseButton.left).thenReturn(None)
# configure mock calls in _verify_permissions_added()
chosen_permission_elements = AddGroupPageUT.get_filtered_permission_elements()
expect(LibraryLoader.get_instance().bl).get_elements(
selector=locator['add_group_page']['generic_chosen_permission']).thenReturn(chosen_permission_elements)
for element in chosen_permission_elements:
expect(LibraryLoader.get_instance().bl).get_text(element).thenReturn(element.text)
# CUT gets magically the mock instances (i.e. _loader & sl)
add_group_page = AddGroupPage()
# method under test gets called
add_group_page.choose_all_filtered_permissions()
# Verifies that expectations set via expect are met
# all registered objects will be checked.
verifyNoUnwantedInteractions()
def test_clear_available_permissions_filter(self):
# configure the mock browser library for clear_available_permissions_filter()'s calls
expect(LibraryLoader.get_instance().bl).clear_text(selector=locator['add_group_page']['input_permission_field']).thenReturn(None)
# CUT gets magically the mock instances (i.e. _loader & sl)
add_group_page = AddGroupPage()
# method under test gets called
add_group_page.clear_available_permissions_filter()
# Verifies that expectations set via expect are met
# all registered objects will be checked.
verifyNoUnwantedInteractions()
def test_click_on_save_button(self):
# configure the mock browser library for click_on_save_button()'s calls
expect(LibraryLoader.get_instance().bl).click(selector=locator['add_group_page']['save_button']).thenReturn(None)
# CUT gets magically the mock instances (i.e. _loader & sl)
add_group_page = AddGroupPage()
# method under test gets called
add_group_page.click_on_save_button()
# Verifies that expectations set via expect are met
# all registered objects will be checked.
verifyNoUnwantedInteractions()
@staticmethod
def do_get_text_for_available_permissions_dropdown(permission_search_term):
if permission_search_term == 'blog':
return expected['add_group_page']['postings_blog_post_can_add_blog_post'] + '\n' + \
expected['add_group_page']['postings_blog_post_can_add_blog_post'] + '\n' + \
expected['add_group_page']['postings_blog_post_can_add_blog_post']
elif permission_search_term == 'yielding no permissions':
return ''
else:
raise AssertionError(f'What to yield in get_text for "{permission_search_term}"? Implement it here')
if __name__ == '__main__':
unittest.main()
| StarcoderdataPython |
391657 | <filename>tools/cygprofile/mergetraces.py
#!/usr/bin/python
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# Use: ../mergetraces.py `ls cyglog.* -Sr` > merged_cyglog
""""Merge multiple logs files from different processes into a single log.
Given two log files of execution traces, merge the traces into a single trace.
Merging will use timestamps (i.e. the first two columns of logged calls) to
create a single log that is an ordered trace of calls by both processes.
"""
import optparse
import os
import string
import subprocess
import sys
def ParseLogLines(lines):
"""Parse log file lines.
Args:
lines: lines from log file produced by profiled run
Below is an example of a small log file:
5086e000-52e92000 r-xp 00000000 b3:02 51276 libchromeview.so
secs msecs pid:threadid func
START
1314897086 795828 3587:1074648168 0x509e105c
1314897086 795874 3587:1074648168 0x509e0eb4
1314897086 796326 3587:1074648168 0x509e0e3c
1314897086 796552 3587:1074648168 0x509e07bc
END
Returns:
tuple conisiting of 1) an ordered list of the logged calls, as an array of
fields, 2) the virtual start address of the library, used to compute the
offset of the symbol in the library and 3) the virtual end address
"""
call_lines = []
vm_start = 0
vm_end = 0
dash_index = lines[0].find ('-')
space_index = lines[0].find (' ')
vm_start = int (lines[0][:dash_index], 16)
vm_end = int (lines[0][dash_index+1:space_index], 16)
for line in lines[2:]:
line = line.strip()
# print hex (vm_start)
fields = line.split()
call_lines.append (fields)
return (call_lines, vm_start, vm_end)
def HasDuplicates(calls):
"""Funcition is a sanity check to make sure that calls are only logged once.
Args:
calls: list of calls logged
Returns:
boolean indicating if calls has duplicate calls
"""
seen = []
for call in calls:
if call[3] in seen:
return true
else:
seen.append(call[3])
def CheckTimestamps(calls):
"""Prints warning to stderr if the call timestamps are not in order.
Args:
calls: list of calls logged
"""
index = 0
last_timestamp_secs = -1
last_timestamp_ms = -1
while (index < len (calls)):
timestamp_secs = int (calls[index][0])
timestamp_ms = int (calls[index][1])
timestamp = (timestamp_secs * 1000000) + timestamp_ms
last_timestamp = (last_timestamp_secs * 1000000) + last_timestamp_ms
if (timestamp < last_timestamp):
sys.stderr.write("WARNING: last_timestamp: " + str(last_timestamp_secs)
+ " " + str(last_timestamp_ms) + " timestamp: "
+ str(timestamp_secs) + " " + str(timestamp_ms) + "\n")
last_timestamp_secs = timestamp_secs
last_timestamp_ms = timestamp_ms
index = index + 1
def Convert (call_lines, startAddr, endAddr):
"""Converts the call addresses to static offsets and removes invalid calls.
Removes profiled calls not in shared library using start and end virtual
addresses, converts strings to integer values, coverts virtual addresses to
address in shared library.
Returns:
list of calls as tuples (sec, msec, pid:tid, callee)
"""
converted_calls = []
call_addresses = []
for fields in call_lines:
secs = int (fields[0])
msecs = int (fields[1])
callee = int (fields[3], 16)
# print ("callee: " + hex (callee) + " start: " + hex (startAddr) + " end: "
# + hex (endAddr))
if (callee >= startAddr and callee < endAddr
and (not callee in call_addresses)):
converted_calls.append((secs, msecs, fields[2], (callee - startAddr)))
call_addresses.append(callee)
return converted_calls
def Timestamp(trace_entry):
return int (trace_entry[0]) * 1000000 + int(trace_entry[1])
def AddTrace (tracemap, trace):
"""Adds a trace to the tracemap.
Adds entries in the trace to the tracemap. All new calls will be added to
the tracemap. If the calls already exist in the tracemap then they will be
replaced if they happened sooner in the new trace.
Args:
tracemap: the tracemap
trace: the trace
"""
for trace_entry in trace:
call = trace_entry[3]
if (not call in tracemap) or (
Timestamp(tracemap[call]) > Timestamp(trace_entry)):
tracemap[call] = trace_entry
def main():
"""Merge two traces for code in specified library and write to stdout.
Merges the two traces and coverts the virtual addresses to the offsets in the
library. First line of merged trace has dummy virtual address of 0-ffffffff
so that symbolizing the addresses uses the addresses in the log, since the
addresses have already been converted to static offsets.
"""
parser = optparse.OptionParser('usage: %prog trace1 ... traceN')
(_, args) = parser.parse_args()
if len(args) <= 1:
parser.error('expected at least the following args: trace1 trace2')
step = 0
tracemap = dict()
for trace_file in args:
step += 1
sys.stderr.write(" " + str(step) + "/" + str(len(args)) +
": " + trace_file + ":\n")
trace_lines = map(string.rstrip, open(trace_file).readlines())
(trace_calls, trace_start, trace_end) = ParseLogLines(trace_lines)
CheckTimestamps(trace_calls)
sys.stderr.write("Len: " + str(len(trace_calls)) +
". Start: " + hex(trace_start) +
", end: " + hex(trace_end) + '\n')
trace_calls = Convert(trace_calls, trace_start, trace_end)
sys.stderr.write("Converted len: " + str(len(trace_calls)) + "\n")
AddTrace(tracemap, trace_calls)
sys.stderr.write("Merged len: " + str(len(tracemap)) + "\n")
# Extract the resulting trace from the tracemap
merged_trace = []
for call in tracemap:
merged_trace.append(tracemap[call])
merged_trace.sort(key=Timestamp)
print "0-ffffffff r-xp 00000000 xx:00 00000 ./"
print "secs\tmsecs\tpid:threadid\tfunc"
for call in merged_trace:
print (str(call[0]) + "\t" + str(call[1]) + "\t" + call[2] + "\t" +
hex(call[3]))
if __name__ == '__main__':
main()
| StarcoderdataPython |
1881769 | <reponame>NunoEdgarGFlowHub/studio
import unittest
import yaml
import uuid
import os
import time
import tempfile
import shutil
import requests
import subprocess
import boto3
try:
from urlparse import urlparse
except ImportError:
from urllib.parse import urlparse
from studio import model
from studio.auth import remove_all_keys
from studio.gcloud_artifact_store import GCloudArtifactStore
from studio.s3_artifact_store import S3ArtifactStore
from studio.util import has_aws_credentials
class ArtifactStoreTest(object):
_multiprocess_shared_ = True
def get_store(self, config_name='test_config.yaml'):
config_file = os.path.join(
os.path.dirname(
os.path.realpath(__file__)),
config_name)
with open(config_file) as f:
config = yaml.load(f)
return model.get_db_provider(config).store
def test_get_put_artifact(self):
fb = self.get_store()
tmp_dir = os.path.join(tempfile.gettempdir(), str(uuid.uuid4()))
random_str = str(uuid.uuid4())
os.makedirs(os.path.join(tmp_dir, 'test_dir'))
tmp_filename = os.path.join(
tmp_dir, 'test_dir', str(uuid.uuid4()) + '.txt')
with open(tmp_filename, 'w') as f:
f.write(random_str)
artifact = {'key': 'tests/' + str(uuid.uuid4()) + '.tar'}
fb.put_artifact(artifact, tmp_dir, cache=False)
shutil.rmtree(tmp_dir)
fb.get_artifact(artifact, tmp_dir)
with open(tmp_filename, 'r') as f:
line = f.read()
shutil.rmtree(tmp_dir)
self.assertTrue(line == random_str)
fb.delete_artifact(artifact)
@unittest.skip('sometimes fails in travis at time assertion' +
', peterz figure out')
def test_get_put_cache(self):
fb = self.get_store()
tmp_dir = os.path.join(tempfile.gettempdir(), str(uuid.uuid4()))
strlen = 10000000
random_str = str(os.urandom(strlen))
os.makedirs(os.path.join(tmp_dir, 'test_dir'))
tmp_filename = os.path.join(
tmp_dir, 'test_dir', str(uuid.uuid4()) + '.txt')
with open(tmp_filename, 'w') as f:
f.write(random_str)
artifact = {'key': 'tests/' + str(uuid.uuid4()) + '.tgz'}
fb.put_artifact(artifact, tmp_dir, cache=False)
shutil.rmtree(tmp_dir)
tic1 = time.clock()
fb.get_artifact(artifact, tmp_dir)
os.utime(tmp_dir, None)
tic2 = time.clock()
fb.get_artifact(artifact, tmp_dir)
tic3 = time.clock()
with open(tmp_filename, 'r') as f:
line = f.read()
shutil.rmtree(tmp_dir)
self.assertTrue(line == random_str)
self.assertTrue(tic3 - tic2 < (tic2 - tic1))
fb.delete_artifact(artifact)
def test_get_artifact_url(self):
remove_all_keys()
fb = self.get_store('test_config.yaml')
tmp_filename = os.path.join(
tempfile.gettempdir(),
str(uuid.uuid4()) + '.txt')
random_str = str(uuid.uuid4())
with open(tmp_filename, 'w') as f:
f.write(random_str)
artifact_key = 'tests/test_' + str(uuid.uuid4())
artifact = {'key': artifact_key}
fb.put_artifact(artifact, tmp_filename, cache=False)
url = fb.get_artifact_url(artifact)
os.remove(tmp_filename)
response = requests.get(url)
self.assertEquals(response.status_code, 200)
tar_filename = os.path.join(tempfile.gettempdir(),
str(uuid.uuid4()) + '.tgz')
with open(tar_filename, 'wb') as f:
f.write(response.content)
ptar = subprocess.Popen(['tar', '-xf', tar_filename],
cwd=tempfile.gettempdir())
tarout, _ = ptar.communicate()
with open(tmp_filename, 'r') as f:
self.assertEquals(f.read(), random_str)
os.remove(tmp_filename)
os.remove(tar_filename)
fb.delete_artifact(artifact)
def test_delete_file(self):
fb = self.get_store()
tmp_dir = os.path.join(tempfile.gettempdir(), str(uuid.uuid4()))
random_str = str(uuid.uuid4())
os.makedirs(os.path.join(tmp_dir, 'test_dir'))
tmp_filename = os.path.join(
tmp_dir, 'test_dir', str(uuid.uuid4()) + '.txt')
with open(tmp_filename, 'w') as f:
f.write(random_str)
artifact_key = 'tests/test_' + str(uuid.uuid4())
artifact = {'key': artifact_key}
fb.put_artifact(artifact, tmp_filename, cache=False)
shutil.rmtree(tmp_dir)
fb.delete_artifact(artifact)
fb.get_artifact(artifact, tmp_dir)
exception_raised = False
try:
with open(tmp_filename, 'r') as f:
f.read()
except IOError:
exception_raised = True
self.assertTrue(exception_raised)
def test_get_qualified_location(self):
fb = self.get_store()
key = <KEY>
qualified_location = fb.get_qualified_location(key)
expected_qualified_location = self.get_qualified_location_prefix() + \
key
self.assertEquals(qualified_location, expected_qualified_location)
class FirebaseArtifactStoreTest(ArtifactStoreTest, unittest.TestCase):
# Tests of private methods
def get_qualified_location_prefix(self):
return "gs://studio-ed756.appspot.com/"
def test_get_file_url(self):
remove_all_keys()
fb = self.get_store('test_config.yaml')
tmp_filename = os.path.join(
tempfile.gettempdir(),
str(uuid.uuid4()) + '.txt')
random_str = str(uuid.uuid4())
with open(tmp_filename, 'wt') as f:
f.write(random_str)
key = 'tests/' + str(uuid.uuid4()) + '.txt'
fb._upload_file(key, tmp_filename)
url = fb._get_file_url(key)
response = requests.get(url)
self.assertEquals(response.status_code, 200)
self.assertEquals(response.content.decode('utf-8'),
random_str)
fb._delete_file(key)
os.remove(tmp_filename)
def test_get_file_url_auth(self):
remove_all_keys()
fb = self.get_store('test_config_auth.yaml')
tmp_filename = os.path.join(
tempfile.gettempdir(),
str(uuid.uuid4()) + '.txt')
random_str = str(uuid.uuid4())
with open(tmp_filename, 'w') as f:
f.write(random_str)
key = 'authtest/' + str(uuid.uuid4()) + '.txt'
fb._upload_file(key, tmp_filename)
url = fb._get_file_url(key)
response = requests.get(url)
self.assertEquals(response.status_code, 200)
self.assertEquals(response.content.decode('utf-8'),
random_str)
fb._delete_file(key)
os.remove(tmp_filename)
def test_upload_download_file_noauth(self):
remove_all_keys()
fb = self.get_store('test_config.yaml')
tmp_filename = os.path.join(
tempfile.gettempdir(),
str(uuid.uuid4()) + '.txt')
random_str = str(uuid.uuid4())
with open(tmp_filename, 'w') as f:
f.write(random_str)
key = 'authtest/' + str(uuid.uuid4()) + '.txt'
fb._upload_file(key, tmp_filename)
os.remove(tmp_filename)
fb._download_file(key, tmp_filename)
self.assertTrue(not os.path.exists(tmp_filename))
def test_upload_download_file_bad(self):
# smoke test to make sure attempt to access a wrong file
# in the database is wrapped and does not crash the system
fb = self.get_store('test_bad_config.yaml')
tmp_filename = os.path.join(
tempfile.gettempdir(),
'test_upload_download.txt')
random_str = str(uuid.uuid4())
with open(tmp_filename, 'w') as f:
f.write(random_str)
fb._upload_file('tests/test_upload_download.txt', tmp_filename)
fb._download_file('tests/test_upload_download.txt', tmp_filename)
os.remove(tmp_filename)
def test_upload_download_file_auth(self):
remove_all_keys()
fb = self.get_store('test_config_auth.yaml')
tmp_filename = os.path.join(
tempfile.gettempdir(),
str(uuid.uuid4()) + '.txt')
random_str = str(uuid.uuid4())
with open(tmp_filename, 'w') as f:
f.write(random_str)
key = 'authtest/' + <KEY>
fb._upload_file(key, tmp_filename)
os.remove(tmp_filename)
# test an authorized attempt to delete file
remove_all_keys()
fb = self.get_store('test_config.yaml')
fb._delete_file(key)
remove_all_keys()
fb = self.get_store('test_config_auth.yaml')
# to make sure file is intact and the same as we uploaded
fb._download_file(key, tmp_filename)
with open(tmp_filename, 'r') as f:
line = f.read()
os.remove(tmp_filename)
self.assertTrue(line == random_str)
fb._delete_file(key)
fb._download_file(key, tmp_filename)
self.assertTrue(not os.path.exists(tmp_filename))
def test_upload_download_file(self):
fb = self.get_store()
tmp_filename = os.path.join(
tempfile.gettempdir(),
str(uuid.uuid4()) + '.txt')
random_str = str(uuid.uuid4())
with open(tmp_filename, 'w') as f:
f.write(random_str)
key = 'tests/' + str(uuid.uuid4()) + '.txt'
fb._upload_file(key, tmp_filename)
os.remove(tmp_filename)
fb._download_file(key, tmp_filename)
with open(tmp_filename, 'r') as f:
line = f.read()
os.remove(tmp_filename)
self.assertTrue(line == random_str)
fb._delete_file(key)
fb._download_file(key, tmp_filename)
self.assertTrue(not os.path.exists(tmp_filename))
@unittest.skipIf(
'GOOGLE_APPLICATION_CREDENTIALS' not in os.environ.keys(),
'GOOGLE_APPLICATION_CREDENTIALS environment ' +
'variable not set, won'' be able to use google cloud')
class GCloudArtifactStoreTest(ArtifactStoreTest, unittest.TestCase):
def get_store(self, config_name=None):
store = ArtifactStoreTest.get_store(
self, 'test_config_gcloud_storage.yaml')
self.assertTrue(isinstance(store, GCloudArtifactStore))
return store
def get_qualified_location_prefix(self):
store = self.get_store()
return "gs://" + store.get_bucket() + "/"
@unittest.skipIf(
not has_aws_credentials(),
'AWS credentials not found, '
'won'' be able to use S3')
class S3ArtifactStoreTest(ArtifactStoreTest, unittest.TestCase):
def get_store(self, config_name=None):
store = ArtifactStoreTest.get_store(
self, 'test_config_s3_storage.yaml')
self.assertTrue(isinstance(store, S3ArtifactStore))
return store
def get_qualified_location_prefix(self):
store = self.get_store()
endpoint = urlparse(boto3.client('s3')._endpoint.host)
return "s3://" + endpoint.netloc + "/" + store.bucket + "/"
if __name__ == "__main__":
unittest.main()
| StarcoderdataPython |
321244 | import os
from fppy import __version__
def docs():
"""生成DOC
"""
build_dir = 'docs/build/'+ __version__
source_dir = 'docs/source'
build_main_dir = 'docs/build/main'
os.system((
f'sphinx-build -b html {source_dir} {build_dir}'
))
os.system((
f'sphinx-build -b html {source_dir} {build_main_dir}'
))
def test_with_badge():
"""运行测试
Examples:
```python
## 在代码行种使用如下命令即可执行测试
poetry run test
```
"""
os.system((
'pytest -v '
'--html=dist/report.html '
'--cov-report html:cov_html '
'--cov-report xml:cov.xml '
'--cov-report annotate:cov_annotate '
'--cov-report= '
'--cov=fppy '
'tests/'))
if os.path.isfile('badge/cov-badge.svg'):
os.remove('badge/cov-badge.svg')
os.system('coverage-badge -o badge/cov-badge.svg')
if __name__ == "__main__":
docs()
test_with_badge() | StarcoderdataPython |
6446691 | import random
class BaseReplacementPolicy:
"""
Defines the base set of features a replacement policy controls. These include its clock counter, its name, its
default or instantiation number, its eviction properties, and its update / touch property
"""
def __init__(self):
"""
Assuming the policy is instantiated at startup and is not changing throughout execution
"""
self._clock = 0
@staticmethod
def name():
"""
The name of this policy
:return: str
"""
return "Default"
def default(self):
"""
The default value for a new block
:return: The current clock cycle
"""
return self._clock
def touch(self, block):
"""
Update the block's replacement policy metadata
:param block: The block to update
:return: The new data that should be stored in the blocks metadata section
"""
return block.get_policy_data()
def evict(self, cache_set):
"""
Evict a block from the given set by the property defined in this policy
:param cache_set: The set on which to evict a block
:return: the evicted block, assuming there is something to evicts
"""
return None
def step(self):
"""
Update the step counter for all replacement policies
:return: None
"""
self._clock += 1
class LRUReplacementPolicy(BaseReplacementPolicy):
"""
This defines the most commonly used eviction policy, LRU or Least Recently Used. This policy evicts the block in the
set that was the last one to be touched, read, or updated. It is the oldest hit in the set
"""
@staticmethod
def name():
"""
The name of this policy
:return: str
"""
return 'LRU'
def touch(self, block):
"""
Update the block's replacement policy metadata
:param block: The block to update
:return: The new data that should be stored in the blocks metadata section
"""
return self._clock
def evict(self, cache_set):
"""
Evict a block from the given set by the property defined in this policy
:param cache_set: The set on which to evict a block
:return: the evicted block, assuming there is something to evicts
"""
smallest = min(cache_set, key=lambda block: block.get_policy_data())
return smallest
class RandomReplacementPolicy(BaseReplacementPolicy):
"""
This defines the RAND or Random replacement policy. This policy evicts a random block in the set
"""
@staticmethod
def name():
"""
The name of this policy
:return: str
"""
return 'RAND'
def evict(self, cache_set):
"""
Evict a block from the given set by the property defined in this policy
:param cache_set: The set on which to evict a block
:return: the evicted block, assuming there is something to evicts
"""
return random.choice(cache_set)
class LFUReplacementPolicy(BaseReplacementPolicy):
"""
This defines the LFU or Least Frequently Used replacement policy. This policy evicts the block in the set that has
been accessed the least amount of times among all other blocks, regardless of insertion time.
"""
@staticmethod
def name():
"""
The name of this policy
:return: str
"""
return 'LFU'
def default(self):
"""
The default value for a new block in LFU is zero
:return: The current clock cycle
"""
return 0
def touch(self, block):
"""
Update the block's replacement policy metadata
:param block: The block to update
:return: The new data that should be stored in the blocks metadata section
"""
return block.get_policy_data() + 1
def evict(self, cache_set):
"""
Evict a block from the given set by the property defined in this policy
:param cache_set: The set on which to evict a block
:return: the evicted block, assuming there is something to evicts
"""
smallest = min(cache_set, key=lambda block: block.get_policy_data())
return smallest
class NMFUReplacementPolicy(BaseReplacementPolicy):
"""
This defines the NMFU or Not Most Frequently Used replacement policy. This policy evicts a random block from the set
with the condition that it is not the most frequently used among the set
"""
@staticmethod
def name():
"""
The name of this policy
:return: str
"""
return 'NMFU'
def default(self):
"""
The default value for a new block in MFU is zero
:return: The current clock cycle
"""
return 0
def touch(self, block):
"""
Update the block's replacement policy metadata
:param block: The block to update
:return: The new data that should be stored in the blocks metadata section
"""
return block.get_policy_data() + 1
def evict(self, cache_set):
"""
Evict a block from the given set by the property defined in this policy
:param cache_set: The set on which to evict a block
:return: the evicted block, assuming there is something to evicts
"""
mfu = max(cache_set, key=lambda block: block.get_policy_data())
return random.choice([block for block in cache_set if block != mfu])
class NMRUReplacementPolicy(BaseReplacementPolicy):
"""
This defines the NMRU or Not Most Recently Used replacement policy. This policy evicts a random block from the set
with the condition that it is not the most recently used among the set
"""
@staticmethod
def name():
"""
The name of this policy
:return: str
"""
return 'NMRU'
def touch(self, block):
"""
Update the block's replacement policy metadata
:param block: The block to update
:return: The new data that should be stored in the blocks metadata section
"""
return self._clock
def evict(self, cache_set):
"""
Evict a block from the given set by the property defined in this policy
:param cache_set: The set on which to evict a block
:return: the evicted block, assuming there is something to evicts
"""
mru = max(cache_set, key=lambda block: block.get_policy_data())
return random.choice([block for block in cache_set if block != mru])
| StarcoderdataPython |
1703691 | <reponame>engjoaofaro/irrigation-service-mqtt
import boto3
import base64
from botocore.exceptions import ClientError
import AWSIoTPythonSDK.MQTTLib as awsIot
class AwsConfig:
def __init__(self, name_device, endpoint, ca_file_path, key_path, certificate_path):
self.__name_device = name_device
self.__endpoint = endpoint
self.__ca_file_path = ca_file_path
self.__key_path = key_path
self.__certificate_path = certificate_path
@staticmethod
def client(aws_service, region):
return boto3.client(aws_service, region)
def __build_iot_client(self):
client = awsIot.AWSIoTMQTTClient(self.__name_device)
client.configureEndpoint(self.__endpoint, 8883)
client.configureCredentials(self.__ca_file_path, self.__key_path, self.__certificate_path)
return client
def iot_client(self):
return self.__build_iot_client()
@staticmethod
def get_secret_endpoint():
secret_name = "app/iot/endpoint"
region_name = "sa-east-1"
session = boto3.session.Session()
client = session.client(
'secretsmanager',
region_name
)
try:
get_secret_value_response = client.get_secret_value(
SecretId=secret_name
)
except ClientError as e:
if e.response['Error']['Code'] == 'DecryptionFailureException':
raise e
elif e.response['Error']['Code'] == 'InternalServiceErrorException':
raise e
elif e.response['Error']['Code'] == 'InvalidParameterException':
raise e
elif e.response['Error']['Code'] == 'InvalidRequestException':
raise e
elif e.response['Error']['Code'] == 'ResourceNotFoundException':
raise e
else:
if 'SecretString' in get_secret_value_response:
secret = get_secret_value_response['SecretString']
return secret
else:
decoded_binary_secret = base64.b64decode(get_secret_value_response['SecretBinary'])
return decoded_binary_secret
| StarcoderdataPython |
33924 | <gh_stars>10-100
import random
from pandac.PandaModules import Point3
from direct.gui.DirectGui import DirectFrame, DirectLabel
from direct.fsm import FSM
from direct.interval.IntervalGlobal import *
from pirates.audio import SoundGlobals
from pirates.audio.SoundGlobals import loadSfx
import RepairGlobals
MIN_SCALE = 1.5
MAX_SCALE_ADD = 1.0
MAX_SCRUB_AMT = 20.0
class RepairBarnacle(DirectFrame, FSM.FSM):
barnacleFallSounds = None
def __init__(self, name, barnacleGeom):
self.config = RepairGlobals.Careening
DirectFrame.__init__(self, parent=None, relief=None)
self.barnacleGeom = barnacleGeom
FSM.FSM.__init__(self, 'Barnacle_%sFSM' % name)
self._initAudio()
self._initVars()
self._initGUI()
return
def _initVars(self):
self.heat = 0.0
self.hp = 100
self.maxHP = 100
self.currentShake = None
self.fallingAnim = None
return
def _initAudio(self):
if not self.barnacleFallSounds:
RepairBarnacle.barnacleFallSounds = (
loadSfx(SoundGlobals.SFX_MINIGAME_REPAIR_CAREEN_COMPLETE1), loadSfx(SoundGlobals.SFX_MINIGAME_REPAIR_CAREEN_COMPLETE2), loadSfx(SoundGlobals.SFX_MINIGAME_REPAIR_CAREEN_COMPLETE3), loadSfx(SoundGlobals.SFX_MINIGAME_REPAIR_CAREEN_COMPLETE4), loadSfx(SoundGlobals.SFX_MINIGAME_REPAIR_CAREEN_COMPLETE5))
def _initGUI(self):
self.barnacleGeom.reparentTo(self)
self.barnacleGeom.setScale(0.6)
self.barnacleGeom.setR(random.random() * 360)
if self.config.showBarnacleHP:
self.hpLabel = DirectLabel(text='', scale=(0.025, 0.025, 0.025), pos=(0.0, 0.0, -0.01), textMayChange=1, parent=self)
def destroy(self):
if self.currentShake is not None:
self.currentShake.clearToInitial()
self.currentShake = None
del self.currentShake
if self.fallingAnim is not None:
self.fallingAnim.clearToInitial()
self.fallingAnim = None
del self.fallingAnim
self.cleanup()
if self.config.showBarnacleHP:
self.hpLabel.destroy()
del self.hpLabel
DirectFrame.destroy(self)
self.barnacleGeom.removeNode()
del self.barnacleGeom
return
def setMaxHP(self, newMaxHP, globalMaxHP):
self.maxHP = newMaxHP
self.globalMaxHP = globalMaxHP
def setHP(self, newHP):
self.hp = newHP
if self.config.showBarnacleHP:
self.hpLabel['text'] = '%i' % self.hp
self.hpLabel.setText()
if self.hp <= 0.0:
self.hp = 0.0
self.request('Falling')
self.setScale(self.hp * MAX_SCALE_ADD / self.globalMaxHP + MIN_SCALE)
def reduceHP(self, pushDir, powerScale):
amount = pushDir.length()
pushDir.normalize()
self.heat = min(1.0, self.heat + amount)
amount *= 50
if amount > MAX_SCRUB_AMT:
amount = MAX_SCRUB_AMT
amount *= powerScale
newHP = self.hp - amount
self.setHP(newHP)
if self.currentShake is None:
self.currentShake = Sequence(LerpPosInterval(self, duration=0.03, pos=(self.getX() - pushDir[0] * (0.01 + amount / 1000.0), self.getY(), self.getZ() - pushDir[1] * (0.01 + amount / 1000.0)), blendType='easeIn'), LerpPosInterval(self, duration=0.06, pos=(self.getX(), self.getY(), self.getZ()), blendType='easeOut'), LerpPosInterval(self, duration=0.04, pos=(self.getX() + pushDir[0] * (0.0075 + amount / 2000.0), self.getY(), self.getZ() + pushDir[1] * (0.005 + amount / 2000.0)), blendType='easeIn'), LerpPosInterval(self, duration=0.08, pos=(self.getX(), self.getY(), self.getZ()), blendType='easeOut'), Func(self.clearCurrentShake))
self.currentShake.start()
return
def checkCollision(self, mousePosition):
sld = Point3(mousePosition.getX(), 0.0, mousePosition.getY()) - self.getPos(render2d)
return self.getCurrentOrNextState() == 'Idle' and sld.length() < self.config.barnacleRadius * self.getScale().getX()
def clearCurrentShake(self):
self.currentShake = None
return
def enterIdle(self):
visibleIndex = random.uniform(0, self.barnacleGeom.getNumChildren() - 1)
for i in range(self.barnacleGeom.getNumChildren() - 1):
self.barnacleGeom.getChild(i).unstash()
newHP = self.maxHP
self.heat = 0.0
self.setHP(newHP)
self.unstash()
def exitIdle(self):
pass
def enterFalling(self):
if self.currentShake is not None:
self.currentShake.finish()
sound = random.choice(self.barnacleFallSounds)
sound.play()
self.fallingAnim = Sequence(LerpPosInterval(self, duration=2.0, pos=(self.getX(), self.getY(), self.getZ() - 2.0), blendType='easeIn'), Func(self.request, 'Clean'))
self.fallingAnim.start()
return
def exitFalling(self):
self.stash()
def enterClean(self):
pass
def exitClean(self):
pass | StarcoderdataPython |
322009 | """Testing for TransitionGraph"""
import numpy as np
import pytest
from scipy.sparse import csr_matrix
from sklearn.exceptions import NotFittedError
from giotto.graphs import TransitionGraph
X_tg = np.array([[[1, 0], [2, 3], [5, 4]],
[[0, 1], [3, 2], [4, 5]]])
X_tg_res = np.array([
csr_matrix((np.array([True] * 2),
(np.array([0, 1]),
np.array([1, 0]))), shape=(2, 2)),
csr_matrix((np.array([True] * 2),
(np.array([0, 1]),
np.array([1, 0]))), shape=(2, 2)),
])
def test_transition_graph_not_fitted():
tg = TransitionGraph()
with pytest.raises(NotFittedError):
tg.transform(X_tg)
def test_transition_graph_transform():
tg = TransitionGraph()
Xt = tg.fit_transform(X_tg)
assert np.array_equal(Xt[0].toarray(), X_tg_res[0].toarray())
assert np.array_equal(Xt[1].toarray(), X_tg_res[1].toarray())
def test_parallel_transition_graph_transform():
tg = TransitionGraph(n_jobs=1)
tg_parallel = TransitionGraph(n_jobs=2)
Xt = tg.fit_transform(X_tg)
Xt_parallel = tg_parallel.fit_transform(X_tg)
assert np.array_equal(Xt[0].toarray(), Xt_parallel[0].toarray())
assert np.array_equal(Xt[1].toarray(), Xt_parallel[1].toarray())
| StarcoderdataPython |
8007655 | <reponame>tinesife94/projecteuler.net-solutions
"""Python code to solve problem 6 on the projecteuler.net website, available
at:
https://projecteuler.net/problem=6
For your convinience:
The sum of the squares of the first ten natural numbers is,
1^2 + 2^2 + ... + 10^2 = 385
The square of the sum of the first ten natural numbers is,
(1 + 2 + ... + 10)^2 = 55^2 = 3025
Hence the difference between the sum of the squares of the first ten natural
numbers and the square of the sum is
3025 - 385 = 2640
Find the difference between the sum of the squares of the first one hundred
natural numbers and the square of the sum.
"""
def sum_AP(a1, an, n):
'''Sum of n terms of the arithmetic progression.'''
return ((a1 + an) * n) // 2
def sum_of_natural_numbers(n):
'''Sum of the first n natural numbers.'''
return sum_AP(1, n, n)
def sum_of_odd_numbers(n):
'''Sum of the first n odd numbers.'''
last = (2 * n) - 1
return sum_AP(1, last, n)
def sum_of_the_squares(n):
'''Sum of the squares of the first n natural numbers.'''
return (n * ((2 * n) + 1) * (n + 1)) // 6
n = 100
result = sum_of_natural_numbers(n)
result **= 2
result -= sum_of_the_squares(n)
print(result)
| StarcoderdataPython |
9775309 | # -*- coding: utf-8 -*-
# Generated by Django 1.9.8 on 2016-08-19 19:51
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('jobs', '0005_server_job_running'),
]
operations = [
migrations.CreateModel(
name='JobRunningOnServer',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('start_time', models.DateTimeField(verbose_name='Time When the Job Starts')),
('status', models.CharField(max_length=6000, null=True)),
('pid', models.CharField(max_length=2000)),
],
),
migrations.RemoveField(
model_name='job',
name='pid',
),
migrations.RemoveField(
model_name='job',
name='run_time',
),
migrations.RemoveField(
model_name='job',
name='status',
),
migrations.RemoveField(
model_name='server',
name='job_running',
),
migrations.AddField(
model_name='jobrunningonserver',
name='job',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='jobs.Job'),
),
migrations.AddField(
model_name='jobrunningonserver',
name='server',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='jobs.Server'),
),
migrations.AddField(
model_name='job',
name='server_running',
field=models.ManyToManyField(through='jobs.JobRunningOnServer', to='jobs.Server'),
),
]
| StarcoderdataPython |
4945571 | # -*- coding: utf-8 -*-
"""Holds APIs used by the front end"""
from flask import Response, request
from flask_login import current_user
from sqlalchemy import asc
from app import wjl_app
from app.errors import NotFoundException
from app.model import Session, Match, Team, Field, DB
from app.logging import LOGGER
from app.views.types import ScheduleRecord
from app.authentication import api_admin_required, api_player_required
from sqlalchemy.exc import IntegrityError
import json
@wjl_app.route("/api/field/save", methods=["POST", "PUT"])
@api_admin_required
def save_field():
field = None
try:
field = request.get_json(silent=True)
LOGGER.debug(f"Save/Update field {field}")
saved_field = Field.from_json(field)
if field.get("id", None) is None:
DB.session.add(saved_field)
DB.session.commit()
LOGGER.info(
f"{current_user} saved field {field}")
return Response(json.dumps(saved_field.json()),
status=200, mimetype="application/json")
except NotFoundException as error:
msg = str(error)
LOGGER.warning(
f"{current_user} tried saving field but issue {msg}")
return Response(json.dumps(msg),
status=404, mimetype="application/json")
except IntegrityError as error:
DB.session.rollback()
msg = str(error)
LOGGER.warning(
f"{current_user} tried saving field but issue {msg}")
return Response(json.dumps(msg),
status=400, mimetype="application/json")
@wjl_app.route("/api/team/save", methods=["POST", "PUT"])
@api_admin_required
def save_team():
team = None
try:
team = request.get_json(silent=True)
LOGGER.debug(f"Save/Update team {team}")
saved_team = Team.from_json(team)
if team.get("id", None) is None:
DB.session.add(saved_team)
DB.session.commit()
LOGGER.info(
f"{current_user} saved team {team}")
return Response(json.dumps(saved_team.json()),
status=200, mimetype="application/json")
except NotFoundException as error:
msg = str(error)
LOGGER.warning(
f"{current_user} tried saving team but issue {msg}")
return Response(json.dumps(msg),
status=404, mimetype="application/json")
except IntegrityError as error:
DB.session.rollback()
msg = str(error)
LOGGER.warning(
f"{current_user} tried saving team but issue {msg}")
return Response(json.dumps(msg),
status=400, mimetype="application/json")
@wjl_app.route("/api/session/save", methods=["POST", "PUT"])
@api_admin_required
def save_session():
sesh = None
try:
sesh = request.get_json(silent=True)
LOGGER.debug(f"Save/Update session {sesh}")
saved_session = Session.from_json(sesh)
if sesh.get("id", None) is None:
DB.session.add(saved_session)
DB.session.commit()
LOGGER.info(
f"{current_user} saved session {saved_session}")
return Response(json.dumps(saved_session.json()),
status=200, mimetype="application/json")
except NotFoundException as error:
msg = str(error)
LOGGER.warning(
f"{current_user} tried saving session but issue {msg}")
return Response(json.dumps(msg),
status=404, mimetype="application/json")
except IntegrityError as error:
DB.session.rollback()
msg = str(error)
LOGGER.warning(
f"{current_user} tried saving session but issue {msg}")
return Response(json.dumps(msg),
status=400, mimetype="application/json")
@wjl_app.route("/api/match/save", methods=["POST", "PUT"])
@api_admin_required
def save_match():
match = None
try:
match = request.get_json(silent=True)
LOGGER.debug(f"Save/Update match {match}")
saved_match = Match.from_json(match)
if match.get("id", None) is None:
DB.session.add(saved_match)
DB.session.commit()
LOGGER.info(
f"{current_user} saved match {match}")
return Response(json.dumps(saved_match.json()),
status=200, mimetype="application/json")
except NotFoundException as error:
msg = str(error)
LOGGER.warning(
f"{current_user} tried saving match but issue {msg}")
return Response(json.dumps(msg),
status=404, mimetype="application/json")
except IntegrityError as error:
DB.session.rollback()
msg = str(error)
LOGGER.warning(
f"{current_user} tried saving match but issue {msg}")
return Response(json.dumps(msg),
status=400, mimetype="application/json")
@wjl_app.route("/api/team/<int:team_id>")
@api_player_required
def get_team(team_id):
team = Team.query.get(team_id)
if team is None:
return Response(json.dumps(team_id), status=404,
mimetype="application/json")
return Response(json.dumps(team.json()), status=200,
mimetype="application/json")
@wjl_app.route("/api/teams")
@api_player_required
def get_all_teams():
teams = [team.json() for team in Team.query.all()]
return Response(json.dumps(teams), status=200, mimetype="application/json")
@wjl_app.route("/api/fields")
@api_player_required
def get_all_fields():
fields = [field.json() for field in Field.query.all()]
return Response(json.dumps(fields), status=200,
mimetype="application/json")
@wjl_app.route("/api/session/<int:session_id>/matches")
@api_player_required
def get_matches_in_session(session_id):
sesh = Session.query.get(session_id)
if sesh is None:
return Response(json.dumps(None), status=404,
mimetype="application/json")
matches = (Match.query
.filter(Match.session_id == session_id)
.order_by(asc(Match.date)).all())
matches_data = [ScheduleRecord.create_schedule_record(match)
for match in matches]
return Response(json.dumps(matches_data), status=200,
mimetype="application/json")
@wjl_app.route("/api/session")
def get_all_sessions():
seshes = [sesh.json() for sesh in Session.query.all()]
return Response(json.dumps(seshes), status=200,
mimetype="application/json")
@wjl_app.route("/api/match/<int:match_id>")
@api_player_required
def get_match(match_id):
match = Match.query.get(match_id)
if match is None:
LOGGER.warning(
f"{current_user} tried accessing non-existent match {match_id}")
return Response(None, status=404, mimetype="application/json")
match_data = match.json()
sheets = []
for sheet in match.sheets:
sheets.append(sheet.json())
sheets.sort(key=lambda sheet: sheet['id'])
match_data["sheets"] = sheets
return Response(json.dumps(match_data),
status=200, mimetype="application/json")
| StarcoderdataPython |
1932714 | #!/usr/bin/env python3
from . import command_codes as cc
import asyncio
from collections import namedtuple
import logging
import re
from typing import List, Callable, Union, Sequence, Any
from types import coroutine
class LoggerMetaClass(type):
def __new__(mcs, name, bases, namespace):
inst = type.__new__(mcs, name, bases, namespace)
inst._log = logging.getLogger("bot.{}".format(name))
inst._log.debug("Attached logger to {}".format(name))
return inst
RegEx = type(re.compile(""))
class User(metaclass=LoggerMetaClass):
def __init__(self, nick: str, client: 'Client', hostmask: str=None):
self.name = nick
self.hostmask = hostmask
self.client = client
self._log.debug("Created {}".format(self))
async def message(self, text: str, notice: bool=False) -> None:
await self.client.message(self.name, text, notice=notice)
def __eq__(self, other: 'User') -> bool:
return self.name == other.name
def __hash__(self):
return hash(self.name)
def __repr__(self):
return "<User {self.name}!{self.hostmask}>".format(self=self)
class Channel(metaclass=LoggerMetaClass):
def __init__(self, name: str, client: 'Client'):
self.name = name
self.client = client
self.users = set()
self._log.debug("Created {}".format(self))
def on_message(self, *args, accept_query=False, matcher=None, **kwargs):
"""
Convenience wrapper of `Client.on_message` pre-bound with `channel=self.name`.
"""
if accept_query:
def new_matcher(msg: Message):
ret = True
if matcher:
ret = matcher(msg)
if ret is None or ret is False:
return ret
if msg.recipient is not self and not isinstance(msg.sender, User):
return False
return ret
else:
kwargs.setdefault("channel", self.name)
new_matcher = matcher
return self.client.on_message(*args, matcher=new_matcher, **kwargs)
async def message(self, text: str, notice: bool=False) -> None:
await self.client.message(self.name, text, notice=notice)
async def part(self, reason: str=None, block: bool=False) -> None:
await self.client.part(self.name, reason=reason, block=block)
def __contains__(self, other: User) -> bool:
return other in self.users
def __eq__(self, other: 'Channel') -> bool:
return self.name == other.name
def __hash__(self):
return hash(self.name)
def __repr__(self):
return "<Channel {self.name} users={num_users}>" \
.format(self=self, num_users=len(self.users))
class Message(metaclass=LoggerMetaClass):
def __init__(self, sender: Union[User, Channel],
recipient: Union[User, Channel],
text: str, notice: bool=False):
self.sender = sender
self.recipient = recipient
self.text = text
self.notice = notice
async def reply(self, text: str, notice: bool=None) -> None:
if notice is None:
notice = self.notice
recipient = self.recipient if isinstance(self.recipient, Channel) else self.sender
await recipient.message(text, notice=notice)
def __repr__(self):
return "<Message sender={self.sender} recipient={self.recipient}>".format(self=self)
class Client(metaclass=LoggerMetaClass):
def __init__(self, host: str, port: int, nick: str="TheBot", user: str="bot",
realname: str="The Bot", secure: bool=False, encoding: str="utf-8",
password: str=None):
self.host = host
self.port = port
self.secure = secure
self.nick = nick
self.user = user
self.realname = realname
self.encoding = encoding
self.password = password
self._on_connected_handlers = []
self._on_message_handlers = []
self._users = {}
self._channels = {}
self._on_command_handlers = []
self._on_join_handlers = []
# default chan types, can be overridden by `cc.RPL_ISUPPORT` CHANTYPES
self._channel_types = "#&"
# default user mode prefixes, can be overridden by `cc.RPL_ISUPPORT` PREFIX
self._prefix_map = {"@": "o", "+": "v"}
self._connected = False
self._modules = []
# Register JOIN, QUIT, PART, NICK handlers
self.on_command(cc.JOIN)(self._on_join)
self.on_command(cc.QUIT)(self._on_quit)
self.on_command(cc.PART)(self._on_part)
self.on_command(cc.NICK)(self._on_nick)
def on_connected(self) -> Callable[[Callable], Callable]:
def decorator(fn: Callable[[], None]):
self._on_connected_handlers.append(fn)
return fn
return decorator
MessageHandler = namedtuple("MessageHandler", ("matcher", "handler"))
def on_message(self, message: Union[str, RegEx]=None, channel: Union[str, RegEx]=None,
sender: Union[str, RegEx]=None, matcher: Callable[[Message], None]=None,
notice: bool=None) -> Callable[[Callable], Callable]:
"""
Register a handler that's called after a message is received (PRIVMSG, NOTICE).
The handler is called with the `Message` as argument, must be a coroutine
and is run non-blocking. All filters must match for a message to be accepted.
:param message: message filter, string (exact match) or compiled regex object
:param channel: channel filter, string (exact match) or compiled regex object
:param sender: sender filter, string (exact match) or compiled regex object
:param matcher: test function, return true to accept the message.
Gets the `Message` as parameter
"""
matchers = []
if notice is not None:
def notice_matcher(msg: Message) -> bool:
return msg.notice == notice
matchers.append(notice_matcher)
if matcher:
matchers.append(matcher)
# message
if message is None:
pass
elif isinstance(message, str):
def matcher(msg: Message) -> bool:
return msg.text == message
matchers.append(matcher)
elif hasattr(message, "search"):
# regex or so
def matcher(msg: Message) -> bool:
m = message.search(msg.text)
if m is not None:
return m.groupdict()
matchers.append(matcher)
else:
raise ValueError("Don't know what to do with message={}".format(message))
# sender
if sender is None:
pass
elif isinstance(sender, User):
def matcher(msg: Message) -> bool:
return msg.sender == sender
matchers.append(matcher)
elif isinstance(sender, str):
def matcher(msg: Message) -> bool:
return msg.sender.name == sender
matchers.append(matcher)
elif hasattr(sender, "search"):
# regex or so
def matcher(msg: Message) -> bool:
m = sender.search(msg.sender.name)
if m is not None:
return m.groupdict()
matchers.append(matcher)
else:
raise ValueError("Don't know what to do with sender={}".format(sender))
# channel
if channel is None:
pass
elif isinstance(channel, Channel):
def matcher(msg: Message) -> bool:
return isinstance(msg.recipient, Channel) \
and msg.recipient == channel
matchers.append(matcher)
elif isinstance(channel, str):
def matcher(msg: Message) -> bool:
return isinstance(msg.recipient, Channel) \
and msg.recipient.name == channel
matchers.append(matcher)
elif hasattr(channel, "search"):
# regex or so
def matcher(msg: Message) -> bool:
if not isinstance(msg.recipient, Channel):
return
m = channel.search(msg.recipient.name)
if m is not None:
return m.groupdict()
matchers.append(matcher)
else:
raise ValueError("Don't know what to do with channel={}".format(channel))
def message_matcher(msg: Message) -> bool:
fn_kwargs = {}
for m in matchers:
ret = m(msg)
# Internal matchers may return False or None to fail
if ret is None or ret is False:
return
# If one returns a dict the values in it will be passed to the handler
if isinstance(ret, dict):
fn_kwargs.update(ret)
return fn_kwargs
def decorator(fn: Callable[[Message], None]) -> Callable[[Message], None]:
mh = self.MessageHandler(message_matcher, fn)
self._on_message_handlers.append(mh)
self._log.debug("Added message handler {} with matchers {}".format(mh, matchers))
return fn
return decorator
def remove_message_handler(self, handler: Callable[[Message], None]) -> None:
for mh in self._on_message_handlers:
if mh.handler == handler:
self._log.debug("Removing message handler {}".format(mh))
self._on_message_handlers.remove(mh)
def await_message(self, *args, **kwargs) -> 'asyncio.Future[Message]':
"""
Block until a message matches. See `on_message`
"""
fut = asyncio.Future()
@self.on_message(*args, **kwargs)
async def handler(message):
fut.set_result(message)
# remove handler when done or cancelled
fut.add_done_callback(lambda _: self.remove_message_handler(handler))
return fut
IrcMessage = namedtuple("IrcMessage", ("prefix", "args"))
JoinHandler = namedtuple("JoinHandler", ("channel", "handler"))
def on_join(self, channel: str=None) -> Callable[[Callable], Callable]:
"""
Register a handler that's called after a channel is joined.
The handler is called with the `Channel` as argument, must be a coroutine
and is run non-blocking.
:param channel: channel to look out for or `None` for all channels
"""
def decorator(fn: Callable[[self.IrcMessage], None]):
jh = self.JoinHandler(channel, fn)
self._on_join_handlers.append(jh)
self._log.debug("Added join handler {}".format(jh))
return fn
return decorator
def remove_join_handler(self, handler: Callable[[Channel], None]) -> None:
for jh in self._on_join_handlers:
if jh.handler == handler:
self._log.debug("Removing join handler {}".format(jh))
self._on_join_handlers.remove(jh)
CommandHandler = namedtuple("CommandHandler", ("args", "handler"))
def on_command(self, *args: Sequence[str]) -> Callable[[Callable], Callable]:
"""
Register a handler that's called when (the beginning of) a `IrcMessage` matches.
The handler is called with the `IrcMessage` as argument, must be a coroutine
and is run blocking, i.e. you cannot use `await_command` in it!
:param args: commands args that must match (the actual command is the first arg)
"""
def decorator(fn: Callable[[self.IrcMessage], None]):
ch = self.CommandHandler(args, fn)
self._on_command_handlers.append(ch)
self._log.debug("Added command handler {}".format(ch))
return fn
return decorator
def remove_command_handler(self, handler: Callable[[IrcMessage], None]) -> None:
for ch in self._on_command_handlers:
if ch.handler == handler:
self._log.debug("Removing command handler {}".format(ch))
self._on_command_handlers.remove(ch)
def await_command(self, *args, **kwargs) -> 'asyncio.Future[IrcMessage]':
"""
Block until a command matches. See `on_command`
"""
fut = asyncio.Future()
@self.on_command(*args, **kwargs)
async def handler(msg):
fut.set_result(msg)
# remove handler when done or cancelled
fut.add_done_callback(lambda _: self.remove_command_handler(handler))
return fut
def _parsemsg(self, msg: str) -> IrcMessage:
# adopted from twisted/words/protocols/irc.py
if not msg:
return
prefix = None
if msg[0] == ":":
prefix, msg = msg[1:].split(" ", 1)
if " :" in msg:
msg, rest = msg.split(" :", 1)
args = msg.split() + [rest]
else:
args = msg.split()
return self.IrcMessage(prefix, tuple(args))
def _buildmsg(self, *args: List[str], prefix: str=None) -> str:
msg = ""
if prefix:
msg += ":{} ".format(prefix)
def fmtarg(i, arg):
arg = str(arg)
if i == len(args) - 1 and (" " in arg or arg.startswith(":")):
return ":" + arg
elif i != len(args) - 1 and (" " in arg or arg.startswith(":")):
raise ValueError(f"non-final argument contains space or begins with colon: {args}")
else:
return arg
msg += " ".join((fmtarg(i, arg) for i, arg in enumerate(args)))
return msg
async def _send(self, *args: List[Any], prefix: str=None) -> None:
msg = self._buildmsg(*args, prefix=prefix)
self._log.debug("<- {}".format(msg))
self._writer.write(msg.encode(self.encoding) + b"\r\n")
async def message(self, recipient: str, text: str, notice: bool=False) -> None:
"""
Lower level messaging function used by User and Channel
"""
await self._send(cc.PRIVMSG if not notice else cc.NOTICE, recipient, text)
async def _get_message(self) -> IrcMessage:
line = await self._reader.readline()
line = line.decode(self.encoding).strip("\r\n")
if not line and self._reader.at_eof():
return
self._log.debug("-> {}".format(line))
msg = self._parsemsg(line)
if msg and await self._handle_special(msg):
return
return msg
async def run(self) -> None:
self._reader, self._writer = await asyncio.open_connection(self.host, self.port)
# start connect procedure in the background.
# messages processed in it actually already go through the main loop below.
self._bg(self._connect())
while not self._reader.at_eof():
try:
msg = await self._get_message()
except:
self._log.exception("Error during receiving")
raise
if not msg:
continue
for ch in self._on_command_handlers:
args = msg.args[:len(ch.args)]
if ch.args == args:
self._log.debug("Calling command handler {} with input {}".format(ch, msg))
await ch.handler(msg)
if not self._connected:
continue
if msg.args[0] in (cc.PRIVMSG, cc.NOTICE):
sender = self._resolve_sender(msg.prefix)
recipient = self._resolve_recipient(msg.args[1])
message = Message(sender, recipient, msg.args[2], (msg.args[0] == cc.NOTICE))
await self._handle_on_message(message)
continue
# self._log.info("Unhandled command: {} {}".format(command, kwargs))
self._writer.close()
self._log.info("Connection closed, exiting")
def _bg(self, coro: coroutine) -> asyncio.Task:
"""Run coro in background, log errors"""
async def runner():
try:
await coro
except:
self._log.exception("async: Coroutine raised exception")
return asyncio.ensure_future(runner())
async def _handle_special(self, msg: IrcMessage) -> bool:
if msg.args[0] == cc.PING:
await self._send(cc.PONG, *msg.args[1:])
return True
return False
async def _handle_on_message(self, message: Message) -> None:
for mh in self._on_message_handlers:
match = mh.matcher(message)
if match is not None:
self._bg(mh.handler(message, **match))
async def _connect(self) -> None:
if self.password:
await self._send(cc.PASS, self.password)
nick = self._send(cc.NICK, self.nick)
user = self._send(cc.USER, self.user, 0, "*", self.realname)
@self.on_command(cc.ERR_NICKNAMEINUSE)
async def nick_in_use(msg):
self.nick += "_"
await self._send(cc.NICK, self.nick)
@self.on_command(cc.RPL_ISUPPORT)
async def feature_list(msg):
for feature, _, value in map(lambda arg: arg.partition("="), msg.args):
if feature == "CHANTYPES": # CHANTYPES=#&
self._channel_types = value
if feature == "PREFIX": # PREFIX=(ov)@+
modes, _, prefixes = value[1:].partition(")")
self._prefix_map = dict(zip(prefixes, modes))
def await_motd():
fut = asyncio.Future()
@self.on_command(cc.RPL_ENDOFMOTD)
async def endofmotd(msg):
fut.set_result(msg)
@self.on_command(cc.ERR_NOMOTD)
async def errnomotd(msg):
fut.set_result(msg)
# remove handler when done or cancelled
fut.add_done_callback(lambda _: self.remove_command_handler(endofmotd))
fut.add_done_callback(lambda _: self.remove_command_handler(errnomotd))
return fut
end_motd = await_motd()
await nick
await user
self._log.debug("Waiting for the end of the MOTD")
await end_motd
self._log.debug("End of the MOTD found, running handlers")
# `cc.RPL_ISUPPORT` is either done or not available
self.remove_command_handler(feature_list)
# Nick chosen by now
self.remove_command_handler(nick_in_use)
self._connected = True
for handler in self._on_connected_handlers:
try:
await handler()
except:
self._log.exception("Connect handler {} raised exception".format(handler))
def _resolve_sender(self, prefix: str) -> User:
if "!" in prefix and "@" in prefix:
return self.get_user(prefix)
# message probably sent by the server
return None
def get_user(self, nick: str) -> User:
"""
:param nick: nick or prefix
"""
hostmask = None
if "!" in nick:
nick, _, hostmask = nick.partition("!")
user = self._users.get(nick)
if not user:
self._users[nick] = user = User(nick, self, hostmask=hostmask)
elif not user.hostmask:
user.hostmask = hostmask
return user
def get_channel(self, name: str) -> Channel:
ch = self._channels.get(name)
if not ch:
self._channels[name] = ch = Channel(name, self)
return ch
def _resolve_recipient(self, recipient: str) -> Union[User, Channel]:
if recipient[0] in self._channel_types:
return self.get_channel(recipient)
return self.get_user(recipient)
async def join(self, channel: str, block: bool=False) -> Channel:
if block:
fut = asyncio.Future()
@self.on_join(channel)
async def waiter(channel_obj):
self.remove_join_handler(waiter)
fut.set_result(channel_obj)
self._log.debug("Joining channel {}".format(channel))
await self._send(cc.JOIN, channel)
if block:
return await fut
async def _on_join(self, msg: IrcMessage) -> None:
channel = self.get_channel(msg.args[1])
user = self.get_user(msg.prefix)
if user.name != self.nick:
channel.users.add(user)
self._log.info("{} joined channel {}".format(user, channel))
return
# TODO: make less ugly
@self.on_command(cc.RPL_NAMREPLY, self.nick, "=", channel.name)
@self.on_command(cc.RPL_NAMREPLY, self.nick, "*", channel.name)
@self.on_command(cc.RPL_NAMREPLY, self.nick, "@", channel.name)
async def gather_nicks(msg):
for nick in msg.args[-1].strip().split(" "):
mode = self._prefix_map.get(nick[0], None)
if mode:
nick = nick[1:]
user = self.get_user(nick)
# TODO: channel_user = ChannelUser(user, mode, channel)
channel.users.add(user)
# register a handler for waiting because we can't block in a command handler
@self.on_command(cc.RPL_ENDOFNAMES, self.nick, channel.name)
async def join_finished(msg):
self.remove_command_handler(gather_nicks)
self.remove_command_handler(join_finished)
self._log.info("Joined channel {}".format(channel))
for jh in self._on_join_handlers:
if not jh.channel or jh.channel == channel.name:
self._bg(jh.handler(channel))
async def part(self, channel: str, reason: str=None, block: bool=None) -> None:
if block:
part_done = self.await_command(cc.PART, channel)
await self._send(cc.PART, channel, reason)
if block:
await part_done
async def quit(self, reason: str=None) -> Channel:
await self._send(cc.QUIT, reason)
def add_module(self, module: 'Module'):
self._modules.append(module)
module._populate(self)
async def _on_quit(self, msg: IrcMessage) -> None:
user = self.get_user(msg.prefix)
for channel in self._channels.values():
channel.users.discard(user)
del self._users[user.name]
self._log.info("{} has quit: {}".format(user, msg.args[-1]))
async def _on_part(self, msg: IrcMessage) -> None:
user = self.get_user(msg.prefix)
channel = self.get_channel(msg.args[1])
channel.users.remove(user)
self._log.info("{} has left {}: {}".format(user, channel, msg.args[-1]))
async def _on_nick(self, msg: IrcMessage) -> None:
"""
Nick change
"""
user = self.get_user(msg.prefix)
old_nick = user.name
del self._users[old_nick]
user.name = msg.args[1]
if old_nick == self.nick:
# (Forced?) Nick change for ourself
self.nick = user.name
self._users[user.name] = user
self._log.info("{} changed their nick from {} to {}".format(user, old_nick, user.name))
class Module(metaclass=LoggerMetaClass):
class ChannelProxy(metaclass=LoggerMetaClass):
def __init__(self, name: str, module: 'Module'):
self.name = name
self._module = module
self._channel = None
self._buffered_calls = []
def _populate(self, channel):
"""
Populate proxy with the real channel when available
"""
self._channel = channel
for fn in self._buffered_calls:
self._log.debug("Executing buffered call {}".format(fn))
fn()
def _buffer_call(self, callable):
self._buffered_calls.append(callable)
def __getattr__(self, method):
if self._channel:
return getattr(self._channel, method)
else:
if not method.startswith("on_"):
raise AttributeError(method)
def on_anything(*args, **kwargs):
def decorator(fn):
self._log.debug("Cannot execute method {}(*{}, **{}) now, buffering".format(method, args, kwargs))
self._buffer_call(lambda: getattr(self._channel, method)(*args, **kwargs)(fn))
return fn
return decorator
return on_anything
def __init__(self, name: str):
self.module_name = name
# set by the Client
self.client = None
""":type: Client"""
self._buffered_calls = []
def _populate(self, client):
"""
Populate module with the client when available
"""
self.client = client
for fn in self._buffered_calls:
self._log.debug("Executing buffered call {}".format(fn))
fn()
def _buffer_call(self, callable):
self._buffered_calls.append(callable)
def get_channel(self, name: str) -> Union[Channel, ChannelProxy]:
if self.client:
return self.client.get_channel(name)
self._log.debug("Cannot get channel {} now, returning proxy".format(name))
proxy = self.ChannelProxy(name, self)
self._buffer_call(lambda: proxy._populate(self.client.get_channel(name)))
return proxy
def __getattr__(self, method):
if method not in ("on_connected", "on_message", "on_command", "on_join"):
raise AttributeError(method)
if self.client:
return getattr(self.client, method)(*args, **kwargs)
def on_anything(*args, **kwargs):
def decorator(fn):
self._log.debug("Cannot execute method {}(*{}, **{}) now, buffering".format(method, args, kwargs))
self._buffer_call(lambda: getattr(self.client, method)(*args, **kwargs)(fn))
return fn
return decorator
return on_anything
| StarcoderdataPython |
12820167 | <reponame>sandeepb2003/neural
# coding: utf-8
import random
import sys
from werkzeug.datastructures import FileStorage
from flask import current_app
from flask.ext.admin import form
from flask.ext.admin.form.upload import ImageUploadInput
from flask.ext.admin._compat import urljoin
from quokka.core.models import SubContent, SubContentPurpose
from quokka.modules.media.models import Image
if sys.version_info.major == 3:
unicode = lambda x: u'{}'.format(x) # noqa
class ThumbWidget(ImageUploadInput):
empty_template = ""
data_template = ('<div class="image-thumbnail">'
' <img %(image)s>'
'</div>')
def get_url(self, field):
if field.thumbnail_size:
filename = field.thumbnail_fn(field.data)
else:
filename = field.data
if field.url_relative_path:
filename = urljoin(field.url_relative_path, filename)
return field.data
class ThumbField(form.ImageUploadField):
widget = ThumbWidget()
class ImageUploadField(form.ImageUploadField):
def is_file_allowed(self, filename):
extensions = self.allowed_extensions # noqa
if isinstance(extensions, (str, unicode)) and extensions.isupper():
items = current_app.config.get(extensions, extensions)
merged_items = [
item.lower() for item in items
] + [item.upper() for item in items]
self.allowed_extensions = merged_items
return super(ImageUploadField, self).is_file_allowed(filename)
class ContentImageField(ImageUploadField):
def populate_obj(self, obj, name):
pass
def save_contents(self, obj):
# field = getattr(obj, name, None)
# if field:
# # If field should be deleted, clean it up
# if self._should_delete:
# self._delete_file(field)
# setattr(obj, name, None)
# return
new_image = Image(
title=u"Image: {0}".format(obj.title),
slug=u"{0}-{1}".format(obj.slug, random.getrandbits(8)),
channel=Image.get_default_channel(),
published=True
)
if self.data and isinstance(self.data, FileStorage):
# if field:
# self._delete_file(field)
filename = self.generate_name(new_image, self.data)
filename = self._save_file(self.data, filename)
setattr(new_image, 'path', filename)
new_image.save()
if obj.contents.filter(identifier='mainimage'):
purpose = SubContentPurpose.objects.get(identifier='image')
else:
purpose = SubContentPurpose.objects.get(
identifier='mainimage'
)
subcontent = SubContent(
content=new_image,
purpose=purpose,
)
obj.contents.append(subcontent)
obj.save()
| StarcoderdataPython |
5189572 | <filename>tests/modules/utils.py
import re
import os
import json
from zomato.zomato import Zomato
def do_init(instance="common"):
z = Zomato(API_KEY="e74778cd3728858df3578092ecea02cf")
# o = getattr(s, instance)
if instance.lower() == "common":
return z.common
elif instance.lower() == "location":
return z.location
return z.restaurant
def get_test_data(filename="testingdata/categories.json"):
# filename = os.path.abspath(filename)
directory = os.path.dirname(__file__)
filename = os.path.join(directory, filename)
with open(filename) as jsonfile:
data = json.load(jsonfile)
jsonfile.close()
return data
def convert_to_snake_case(name):
s1 = re.sub('(.)([A-Z][a-z]+)', r'\1_\2', name)
return re.sub('([a-z0-9])([A-Z])', r'\1_\2', s1).lower() | StarcoderdataPython |
1841847 | #!/bin/sh
""":" .
exec python "$0" "$@"
"""
# -*- coding: utf-8 -*-
"""
Copyright (c) 2018 beyond-blockchain.org.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import os
import sys
import argparse
from bbc1.core import bbclib
PRIVATE_KEY = ".private_key"
PUBLIC_KEY = ".public_key"
def create_keypair():
keypair = bbclib.KeyPair()
keypair.generate()
with open(PRIVATE_KEY, "wb") as fout:
fout.write(keypair.private_key)
with open(PUBLIC_KEY, "wb") as fout:
fout.write(keypair.public_key)
print("created private_key and public_key : %s, %s" % (PRIVATE_KEY, PUBLIC_KEY))
def argument_parser():
argparser = argparse.ArgumentParser(description='Generate an user_id')
argparser.add_argument('-u', '--username', action='store', help='username')
return argparser.parse_args()
if __name__ == '__main__':
if os.path.exists(PRIVATE_KEY):
print("Private key file already exists.")
sys.exit(1)
parsed_args = argument_parser()
if parsed_args.username is None:
print("Usage: $0 -u username")
sys.exit(1)
create_keypair()
user_id = bbclib.get_new_id(parsed_args.username, include_timestamp=False)
with open("ID_FILE", "w") as f:
f.write('{\n "name": "%s",\n "id": "%s"\n}\n' % (parsed_args.username, user_id.hex()))
| StarcoderdataPython |
9729383 | from django.db import models, migrations
import django.db.models.deletion
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('libretto', '0017_migrate_types_de_parente'),
]
operations = [
migrations.AlterUniqueTogether(
name='typedeparente',
unique_together=None),
migrations.RemoveField(
model_name='typedeparente',
name='owner'),
migrations.RemoveField(
model_name='typedeparente',
name='polymorphic_ctype'),
migrations.RemoveField(
model_name='typedeparentedindividus',
name='typedeparente_ptr'),
migrations.RemoveField(
model_name='typedeparentedoeuvres',
name='typedeparente_ptr'),
migrations.AlterField(
'ParenteDIndividus', 'type',
models.ForeignKey(
'TypeDeParenteDIndividus2', related_name='parentes',
verbose_name='type', on_delete=models.PROTECT)),
migrations.AlterField(
'ParenteDOeuvres', 'type',
models.ForeignKey(
'TypeDeParenteDOeuvres2', related_name='parentes',
verbose_name='type', on_delete=models.PROTECT)),
migrations.DeleteModel(name='TypeDeParenteDIndividus'),
migrations.DeleteModel(name='TypeDeParenteDOeuvres'),
migrations.DeleteModel(name='TypeDeParente'),
migrations.RenameModel(old_name='TypeDeParenteDIndividus2',
new_name='TypeDeParenteDIndividus'),
migrations.RenameModel(old_name='TypeDeParenteDOeuvres2',
new_name='TypeDeParenteDOeuvres'),
#
# Actualisation automatique
#
migrations.AlterField(
model_name='typedeparentedindividus',
name='owner',
field=models.ForeignKey(related_name='typedeparentedindividus', on_delete=django.db.models.deletion.PROTECT, verbose_name='propri\xe9taire', blank=True, to=settings.AUTH_USER_MODEL, null=True),
preserve_default=True,
),
migrations.AlterField(
model_name='typedeparentedoeuvres',
name='owner',
field=models.ForeignKey(related_name='typedeparentedoeuvres', on_delete=django.db.models.deletion.PROTECT, verbose_name='propri\xe9taire', blank=True, to=settings.AUTH_USER_MODEL, null=True),
preserve_default=True,
),
migrations.AlterUniqueTogether(
name='typedeparentedindividus',
unique_together=set([('nom', 'nom_relatif')]),
),
migrations.AlterUniqueTogether(
name='typedeparentedoeuvres',
unique_together=set([('nom', 'nom_relatif')]),
),
]
| StarcoderdataPython |
8086194 | <filename>config/measurement_config.py
"""The measurement configuration file consists of the parameters required when inferring with point cloud data obtained from measurement systems
:param ms_parameters['measurement_files']: List of measurement files obtained from the measurement system, currently the model is configured to process data (tab delimited output file with features and surface points) from WLS400 Hexagon 3D Optical scanner refer: https://www.hexagonmi.com/products/white-light-scanner-systems/hexagon-metrology-wls400a fro more details
:type ms_parameters['measurement_files']: list (required)
"""
ms_parameters = {
'measurement_files':['MC1.txt','MC2.txt']
} | StarcoderdataPython |
84957 | <reponame>isabella232/nnabla
# Copyright 2021 Sony Corporation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import division
import pytest
import copy
import numpy as np
import nnabla as nn
import nnabla.functions as F
import nnabla.parametric_functions as PF
import refs
from nbla_test_utils import list_context
from nbla_test_utils import function_tester
ctxs = list_context('DeformableConvolution')
def ref_deformable_convolution_2d(x, w, offset, mask, b, base_axis, pad, stride,
dilation, group, deformable_group, channel_last):
if channel_last:
assert False, "channel_last=True is not supported in ref_deformable_convolution_2d."
assert x.shape[0:base_axis] == offset.shape[0:base_axis], "Batch sizes do not match."
# Compute deformable convolution for each batch.
y = []
# Flatten the batch dimensions to pass it the reference function.
ext_x = x.reshape((-1,) + x.shape[base_axis:])
ext_offset = offset.reshape((-1,) + offset.shape[base_axis:])
if mask is None:
mask_shape = x.shape[0:base_axis] + \
(deformable_group * w.shape[2] *
w.shape[3],) + x.shape[base_axis + 1:]
mask = np.ones(mask_shape).astype(np.float32)
ext_mask = mask.reshape((-1,) + mask.shape[base_axis:])
for xx, oo, mm in zip(ext_x, ext_offset, ext_mask):
y += [refs.deformable_convolution_2d(xx, w, oo, mm, b, pad, stride, dilation,
group, deformable_group, channel_last)[np.newaxis]]
y = np.vstack(y)
return y.reshape(x.shape[:base_axis] + y.shape[1:])
@pytest.mark.parametrize("ctx, func_name", ctxs)
@pytest.mark.parametrize("seed", [313])
@pytest.mark.parametrize("inshape, kernel, out_channels, pad, stride, dilation, group, deformable_group, with_bias", [
# ((2, 4, 10, 10), (3, 2), 4, (0, 0), (1, 1), (1, 1), 1, 2, False), # To reduce test time
((2, 4, 6, 6), (3, 2), 4, (0, 0), (1, 1), (1, 1), 2, 2, True),
((2, 2, 5, 7), (3, 3), 2, (1, 1), (1, 2), (2, 1), 1, 1, True),
((2, 2, 5, 7), (3, 3), 2, (1, 1), (1, 2), (2, 1), 1, 2, False),
((2, 2, 5, 7), (3, 3), 2, (1, 1), (1, 2), (2, 1), 2, 1, False),
])
@pytest.mark.parametrize("with_mask", [True, False])
@pytest.mark.parametrize("channel_last", [True, False])
@pytest.mark.parametrize("base_axis", [1, -3])
def test_forward_backward_2d(inshape, kernel, out_channels, pad, stride, dilation, group,
deformable_group, with_mask, channel_last, with_bias, base_axis, seed, ctx, func_name):
if channel_last:
pytest.skip(
'channel_last=True is not supported in any backends so far.')
import platform
if platform.machine().startswith("arm"):
pytest.skip('Skip the arm platform temporarily.')
rng = np.random.RandomState(seed)
# Create arguments
func_args = [base_axis, pad, stride, dilation, group,
deformable_group, channel_last]
# Compute shapes
in_channels = inshape[base_axis]
kshape = (out_channels, in_channels // group) + kernel
offset_channels = 2 * deformable_group * kernel[0] * kernel[1]
offset_shape = inshape[0:base_axis] + \
(offset_channels,) + inshape[base_axis + 1:]
mask_shape = inshape[0:base_axis] + \
(deformable_group * kernel[0] * kernel[1],) + inshape[base_axis + 1:]
if channel_last:
t = refs.ChannelLastToFirstTranspose(len(inshape), len(kernel))
inshape = tuple(inshape[i] for i in t.inv_axes)
t = refs.ChannelLastToFirstTranspose(len(offset_shape), len(kernel))
offset_shape = tuple(offset_shape[i] for i in t.inv_axes)
t = refs.ChannelLastToFirstTranspose(len(kshape), len(kernel))
kshape = tuple(kshape[i] for i in t.inv_axes)
# Create inputs
x = rng.randn(*inshape).astype(np.float32)
w = rng.randn(*kshape).astype(np.float32)
b = rng.randn(out_channels).astype(np.float32) if with_bias else None
# Because numerical gradient cannot be calculated correctly
# near the input boundary, offsets are generated to avoid this case.
# 1. Generate offsets in [-1.9, 1.9].
offsets = (3.8 * rng.rand(*offset_shape).astype(np.float32)) - 1.9
# 2. Adhoc remove the values dstep-neighborhood of {-1, 0, 1}; selecting bad
# values as 0.1-neighborhood (large enough dstep-neighborhood) and shifting
# them +0.5 (must larger than 2 * dstep).
offsets += np.logical_or(np.abs(offsets - np.floor(offsets)) < 0.1,
np.abs(offsets - np.ceil(offsets)) < 0.1).astype(np.int)*0.5
mask = rng.rand(*mask_shape).astype(np.float32) if with_mask else None
inputs = [x, w, offsets, mask, b]
# Test
atol_half = 1.0 if in_channels > 64 else 1.5e-1
function_tester(rng, F.deformable_convolution,
ref_deformable_convolution_2d, inputs, func_args,
atol_f=1e-4, atol_b=1e-2, atol_accum=1e-5, dstep=1e-2,
ctx=ctx, func_name=func_name, atol_half=atol_half)
| StarcoderdataPython |
9681106 | #!/usr/bin/python
import base
import string
import re
import sidetrack
from string import upper
import sys
if sys.platform != "win32":
import readline
import os
targ = None
imp = None
running = 0
#-----------------------------------------------------------------------------
# Name : ReadCommand
# Purpose: Prompt the user for some data and return the result
# Receive: prompt - The prompt to display asking the user for data
# Return : The input the user provided
#-----------------------------------------------------------------------------
def ReadCommand(prompt):
try:
return raw_input(prompt)
except:
running = 0
return None
#-----------------------------------------------------------------------------
# Name : ListTargets
# Purpose: Print a listing of all the targets from the registered in targets.py
# Receive: << nothing >>
# Return : << nothing >>
#-----------------------------------------------------------------------------
def ListTargets(pattern):
print "Targets:\n------------------------------------------"
for targ in base.targetDict.keys():
if not pattern or re.search(pattern,targ) != None:
print "%15s - %15s (%s)" % \
(targ, base.targetDict[targ].host, base.targetDict[targ].ip)
#-----------------------------------------------------------------------------
# Name : GetTarget
# Purpose: Asks the user to select a target, then looks up the record for it
# Receive: << nothing >>
# Return : The target object
#-----------------------------------------------------------------------------
def GetTarget():
while 1:
projName = ReadCommand("Select a target (? for list): ")
if projName == None:
return None
projName = base.SplitCommandString(projName)
if not len(projName):
return None
elif projName[0] == '?' and len(projName) > 1:
ListTargets(upper(projName[1]))
continue
elif projName[0] == '?':
ListTargets(None)
continue
else:
try:
return base.targetDict[upper(projName[0])]
except:
print "Invalid target press ? for list"
continue
#-----------------------------------------------------------------------------
# Name : ListImplants
# Purpose: Output a listing of all registered implants for a target
# Receive: targ - The target containing the list
# Return : << nothing >>
#-----------------------------------------------------------------------------
def ListImplants(targ):
print "Implants:\n---------"
for imp in targ.implantList.keys():
print imp
#-----------------------------------------------------------------------------
# Name : GetImplant
# Purpose: Prompt the user to select an implant, then return the implant obj.
# Receive: targ - The target with a list of implants
# Return : The implant object for the implant the user selected
#-----------------------------------------------------------------------------
def GetImplant(targ):
while 1:
impName = ReadCommand("Select an implant (? for list): ")
if impName == None:
return None
elif impName[:1] == '?':
ListImplants(targ)
continue
else:
try:
return base.GetImplant(impName)
except:
print "Invalid implant press ? for list"
continue
#-----------------------------------------------------------------------------
# Name : ListCommands
# Purpose: List all command avaliable to the user from within the interface
# Receive: session - A session object containing the target and implant
# Return : << nothing >>
#-----------------------------------------------------------------------------
def ListCommands(session):
print "Commands:\n---------"
for cmd in session.implant.commands.keys():
cmd_ = session.GetCommand(cmd)
print "%10s - %s" % (cmd_.name, cmd_.info)
cmd_ = None
# Don't forget to print our commands
print "%10s - %s" % ("exit", "exit the LP")
print "%10s - %s" % ("new", "select a new target")
#-----------------------------------------------------------------------------
# Name : ProcessArg
# Purpose: Tests to see if the argument is a string or number
# Receive: arg - The argument to test
# Return : The original string if a number, or a quoted string if not
#-----------------------------------------------------------------------------
def ProcessArg(arg):
if (re.match('^-?[0-9]*(\.[0-9]+)?$',arg) != None or \
re.match('^0x[0-9a-fA-F]+L?', arg) != None):
return arg
else:
if arg[0:1] == '$':
arg = base.variableDict[arg[1:]]
if type(arg) != type(""):
return "%d" % (arg)
return '"' + arg + '"'
#-----------------------------------------------------------------------------
# Name : ProcessCommand
# Purpose: Process the command line args and execute the command
# Receive: cmd - The command object
# args - The command line arguments for the command
# Return : The result of processing the command
#-----------------------------------------------------------------------------
def ProcessCommand(cmd, args, assignName):
argString = 'myVar = cmd.run('
for i in range(2,len(args)):
if i == 2:
argString = argString + ProcessArg(args[i])
else:
argString = argString + ", " + ProcessArg(args[i])
argString = argString + ')'
base.db(3, argString)
exec(argString)
if assignName != None:
base.variableDict[assignName] = myVar[0]
base.db(3, "Assigned %d to %s" % (myVar[0], assignName))
return myVar
def newSession(targname):
target = GetTarget()
if target == None:
return None
imp = base.GetImplant("SIDETRACK")
session = base.Session(target,imp)
session.name = targname
return session
def SetCommand(args):
if len(args) == 1:
print "Current settings"
print "-------------------------"
print " debug = %d/5" % (base.dblevel)
return
if args[1] == 'debug':
base.dblevel = eval(args[2])
return
return
#-----------------------------------------------------------------------------
# Name : GetCommand
# Purpose: Prompt the user to enter a command, then lookup the command object
# Receive: session - The structure containing both the implant and target objs.
# Return : The result of executing the command
#-----------------------------------------------------------------------------
def GetCommand():
global more_commands
while 1:
assignName = None
cmdLine = ReadCommand("SIDETRACK# ")
if cmdLine == None:
return None
# Parse the command line into the args
args = base.SplitCommandString(cmdLine)
# See what command to run
if len(args) == 0:
continue
if upper(args[0])[:1] == 'E' or upper(args[0])[:1] == 'Q':
return None
if args[0][:1] == '!':
cmd = ""
if len(args[0]) > 1:
cmd = " " + args[0][1:]
elif len(args) == 1:
os.system(os.environ["SHELL"])
continue
for i in range(1,len(args)):
cmd = cmd + " " + args[i]
cmd = cmd[1:]
try:
base.db(2, cmd)
if string.split(cmd)[0] == "cd":
os.chdir(cmd[3:])
else:
os.system(cmd)
except Exception, message:
print message
continue
if args[0][:1] == "#":
continue
if upper(args[0])[:1] == 'H' or args[0] == '?':
print "Base Commands"
print "----------------------"
print " help - This display"
print " targets - List the current targets"
print " set - Set various global options (type 'set' to see the options)"
print " exit - Terminates the application"
print " quit - Same as exit"
print ""
print "For help with target Command and Control enter: t# help (where # is the\n target number)"
print "For help connecting to a SIDETRACK target enter: t# help connect"
continue
if args[0][0:1] == '$':
try:
print base.variableDict[args[0][1:]]
except:
print "Variable %s undefined" % (args[0][1:])
if upper(args[0]) == 'SET':
SetCommand(args)
continue
if upper(args[0])[:2] == 'TA':
keys = base.sessionDict.keys()
for i in range(len(keys)):
if keys[i] != 'me' and base.sessionDict[keys[i]] != None:
print " %-6s - %s(%s)" % (keys[i], base.sessionDict[keys[i]].target.ip, base.sessionDict[keys[i]].target.name)
continue
elif len(args) == 1:
continue
elif len(args) >= 3 and args[1] == "=":
base.db(2, "Assignment statement")
assignName = args[0]
args2 = args
args = {}
for i in range(len(args2)-2):
args[i] = args2[i+2]
try:
exec("session = base.sessionDict['%s']" % args[0])
if session == None:
session = newSession(args[0])
if session != None:
base.sessionDict[args[0]] = session
except:
session = newSession(args[0])
if session != None:
base.sessionDict[args[0]] = session
if session == None:
continue
# Add in the commands for this interface
if args[1] == '?' or upper(args[1][:1]) == 'H':
if len(args) == 3:
try:
cmd = session.GetCommand(args[2])
print args[2], "usage:"
print
print cmd.usage
print
cmd = None
except:
print "Invalid command press ? for list"
continue
else:
ListCommands(session)
continue
# Find the command object
cmd = session.GetCommand(args[1])
if cmd == None:
print "Invalid command press ? for list"
continue
# Run the command and return the result
if base.dblevel < 5:
try:
return ProcessCommand(cmd,args,assignName)
except Exception,message:
print message
print "Invalid usage...try:"
print cmd.usage
continue
else:
return ProcessCommand(cmd,args,assignName)
#-----------------------------------------------------------------------------
# MAIN loop
#-----------------------------------------------------------------------------
# connect
host = "localhost"
port = 912
try:
if len(sys.argv) == 2:
host = sys.argv[1]
elif len(sys.argv) == 3:
port = eval(sys.argv[2])
except:
print "Usage: %s [host [port]]\n" %(sys.argv[0])
sys.exit()
while not running:
try:
base.redir.connect(host,port)
running = 1
except:
print "Could not connect to STTUNCTL at %s:%d" % (host,port)
host = ReadCommand("STTUNCTL Host: ")
if host == None or len(host) == 0:
sys.exit()
port = ReadCommand("STTUNCTL Port: ")
if port == None or len(port) == 0:
sys.exit()
port = eval(port)
while running:
# Get commands
more_commands = 1
while more_commands:
res = GetCommand()
if res == None:
running = 0
more_commands = 0
for targ in base.sessionDict.keys():
base.db(4,targ)
if base.sessionDict[targ] != None and targ != "me":
running = 1
more_commands = 1
break
if running == 1:
redo = ReadCommand("\nThere are still open targets. \nAre you sure you want to exit? (y/N): " )
if redo != None:
redo = upper(redo)
if redo[0:1] == "Y":
more_commands = 0
running = 0
continue
elif res[0]:
print res[1]
else:
print "Failed:",res[1]
print
| StarcoderdataPython |
4993534 | <filename>musicapp-test/musicapp/models.py
from datetime import datetime
from musicapp import db, login_manager
from flask_login import UserMixin
@login_manager.user_loader
def load_user(user_id):
return User.query.get(int(user_id))
listened = db.Table('listened',
db.Column('user_id' ,db.Integer, db.ForeignKey('user.id')),
db.Column('sond_id', db.Integer, db.ForeignKey('song.id'))
)
includes = db.Table('includes',
db.Column('playlist_id', db.Integer, db.ForeignKey('user_song_playlist.id')),
db.Column('song_id', db.Integer, db.ForeignKey('song.id'))
)
subscribed = db.Table('subscribed',
db.Column('user_id', db.Integer, db.ForeignKey('user.id')),
db.Column('podcast_id', db.Integer, db.ForeignKey('podcast.id'))
)
contains = db.Table('contains',
db.Column('song_id', db.Integer, db.ForeignKey('song.id')),
db.Column('playlist_id', db.Integer, db.ForeignKey('playlist.id'))
)
composed = db.Table('composed',
db.Column('artist_id', db.Integer, db.ForeignKey('artist.id')),
db.Column('song_id', db.Integer, db.ForeignKey('song.id'))
)
class User(db.Model, UserMixin):
id = db.Column(db.Integer, primary_key=True)
fname = db.Column(db.String(75), nullable=False)
lname = db.Column(db.String(75), nullable=False)
username = db.Column(db.String(32), unique=True, nullable=False)
email = db.Column(db.String(128), unique=True, nullable=False)
image_file = db.Column(db.String(32), nullable=False, default='default.jpg')
#preflang = db.Column(db.String(20), nullable=False)
IsAdmin = db.Column(db.Boolean, default=False)
password = db.Column(db.String(60), nullable=False)
searches = db.relationship('user_search_history', backref=db.backref('user'))
playlists = db.relationship('user_song_playlist', backref=db.backref('user'))
songs = db.relationship('Song', secondary=listened)
posts = db.relationship('Post', backref='author', lazy=True)
podcasts = db.relationship('Podcast', secondary=subscribed)
def __repr__(self):
return f"User('{self.username}', '{self.fname}', '{self.lname}', '{self.email}', '{self.image_file}')"
class Podcast(db.Model):
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String, nullable=True)
title = db.Column(db.String(75), nullable=False)
description = db.Column(db.String, default='No description')
category = db.Column(db.String, nullable=False)
release_date = db.Column(db.DateTime, nullable=False, default=datetime.utcnow)
file_location = db.Column(db.String(20), nullable=True)
rating = db.Column(db.Integer, default=2)
likes = db.Column(db.Integer, default=0)
dislikes = db.Column(db.Integer, default=0)
artists = db.relationship('Artist', backref=db.backref('podcast'))
def __repr__(self):
return f"Podcast('{self.name}', '{self.category}', '{self.release_date}''{self.rating}')"
class Song(db.Model):
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(75), nullable=False)
title = db.Column(db.String(75), nullable=False)
description = db.Column(db.String, default='No description')
#duration = db.Column(db.Float, nullable=False)
release_date = db.Column(db.DateTime, nullable=False, default=datetime.utcnow)
rating = db.Column(db.Integer, default=2)
likes = db.Column(db.Integer, default=0)
dislikes = db.Column(db.Integer, default=0)
file_location = db.Column(db.String(20), nullable=False)
album = db.Column(db.String(75), nullable=False)
genre = db.Column(db.String(75), nullable=False)
def __repr__(self):
return f"Song('{self.name}', '{self.release_date}', '{self.rating}')"
class Post(db.Model):
id = db.Column(db.Integer, primary_key=True)
title = db.Column(db.String(100), nullable=False)
date_posted = db.Column(db.DateTime, nullable=False, default=datetime.utcnow)
content = db.Column(db.Text, nullable=False)
user_id = db.Column(db.Integer, db.ForeignKey('user.id'), nullable=False)
def __repr__(self):
return f"Post('{self.title}', '{self.date_posted}')"
class Admin(db.Model):
id = db.Column(db.Integer, primary_key=True)
fname = db.Column(db.String(75), nullable=False)
lname = db.Column(db.String(75), nullable=False)
def __repr__(self):
return f"Admin('{self.fname}', '{self.lname}')"
class Artist(db.Model):
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(32), unique=True, nullable=False)
email = db.Column(db.String(128), unique=True, nullable=False)
podcast_id = db.Column(db.Integer, db.ForeignKey('podcast.id'))
compose = db.relationship('Song', secondary=composed, backref=db.backref('composer', lazy='dynamic'))
def __repr__(self):
return f"Artist('{self.name}', '{self.email}', '{self.id}')"
class Playlist(db.Model):
id = db.Column(db.Integer, primary_key=True)
likes = db.Column(db.Integer)
dislikes = db.Column(db.Integer)
rating = db.Column(db.Integer, default=2)
user_id = db.Column(db.Integer, db.ForeignKey('user.id'))
songs = db.relationship('Song', secondary=contains)
def __repr__(self):
return f"Playlist('{self.id}', '{self.rating}')"
class user_song_playlist(db.Model):
id = db.Column(db.Integer, primary_key=True)
user_id = db.Column(db.Integer, db.ForeignKey('user.id'))
songs = db.relationship('Song', secondary=includes)
class user_search_history(db.Model):
id = db.Column(db.Integer, primary_key=True)
datetime = db.Column(db.DateTime, nullable=False, default=datetime.utcnow)
searchno = db.Column(db.Integer, nullable=False)
user_id = db.Column(db.Integer, db.ForeignKey('user.id'))
| StarcoderdataPython |
4962966 | <filename>UserKnox/ApiKnox/apps.py
from django.apps import AppConfig
class ApiknoxConfig(AppConfig):
default_auto_field = 'django.db.models.BigAutoField'
name = 'ApiKnox'
| StarcoderdataPython |
391355 | """General tests to demonstrate the parametric suite. Possible arguments are given below (defaults).
To test more than one option, pass in an Iterable of requested options.
All Parametric Groups
---------------------
'group_type': Controls which type of ParametricGroups to test. Will test all groups if not specified
'local_vector_class': One of ['default', 'petsc'], which local vector class to use for the problem. ('default')
'assembled_jac': bool. If an assembled jacobian should be used. (True)
'jacobian_type': One of ['matvec', 'dense', 'sparse-csc']. How the Jacobians are used.
Controls the type of AssembledJacobian. ('matvec')
- 'matvec': Uses compute_jacvec_product.
- 'dense': Uses an ndarray.
- 'sparse-csc': Uses a Compressed Sparse Col sparse format.
CycleGroup ('group_type': 'cycle')
----------------------------------
'connection_type': One of ['implicit', 'explicit']. If connections are done explicitly or through
promotions ('implicit').
'partial_type': One of ['array', 'sparse', 'aij']. How the component partial derivatives are
specified ('array').
- 'array': Uses an ndarray.
- 'sparse': Uses the Scipy CSR sparse format.
- 'aij': Uses the [values, rows, cols] format.
'partial_method': str. How derivatives should be solved.
Approximated with finite differences, (fd, cs) OR
solved for analytically, (exact).
'num_comp': int. Number of components to use. Must be at least 2. (2)
'num_var': int. Number of variables to use per component. Must be at least 1. (3)
'var_shape': tuple(int). Shape to use for each variable. (2, 3).
"""
import unittest
from openmdao.test_suite.parametric_suite import parametric_suite
from openmdao.utils.assert_utils import assert_near_equal
class ParameterizedTestCases(unittest.TestCase):
"""Demonstration of parametric testing using the full test suite."""
@parametric_suite('*')
def test_openmdao(self, test):
test.setup()
problem = test.problem
root = problem.model
expected_values = root.expected_values
if expected_values:
actual = {key: problem[key] for key in expected_values}
assert_near_equal(actual, expected_values, 1e-8)
error_bound = 1e-4 if root.options['partial_method'] != 'exact' else 1e-8
expected_totals = root.expected_totals
if expected_totals:
# Forward Derivatives Check
totals = test.compute_totals('fwd')
assert_near_equal(totals, expected_totals, error_bound)
# Reverse Derivatives Check
totals = test.compute_totals('rev')
assert_near_equal(totals, expected_totals, error_bound)
class ParameterizedTestCasesSubset(unittest.TestCase):
"""Duplicating some testing to demonstrate filters and default running."""
@parametric_suite(jacobian_type='*',
num_comp=[2, 5, 10],
partial_type='aij',
run_by_default=True)
def test_subset(self, param_instance):
param_instance.setup()
problem = param_instance.problem
model = problem.model
expected_values = model.expected_values
if expected_values:
actual = {key: problem[key] for key in expected_values}
assert_near_equal(actual, expected_values, 1e-8)
expected_totals = model.expected_totals
if expected_totals:
# Reverse Derivatives Check
totals = param_instance.compute_totals('rev')
assert_near_equal(totals, expected_totals, 1e-8)
# Forward Derivatives Check
totals = param_instance.compute_totals('fwd')
assert_near_equal(totals, expected_totals, 1e-8)
# Reverse Derivatives Check
totals = param_instance.compute_totals('rev')
assert_near_equal(totals, expected_totals, 1e-8)
if __name__ == '__main__':
unittest.main()
| StarcoderdataPython |
1662930 | <reponame>milos-simic/neural-normality
import os
from bisect import bisect
import numpy as np
import pandas as pd
import scipy.stats
import random
import datetime
import pickle
import os
import pathlib
import traceback
from kernelgofs import kernel_normality_test, kernel_normality_test_statistic
import rpy2.robjects as robjects
from rpy2.robjects.packages import importr
from rpy2.robjects.vectors import IntVector, FloatVector
from sklearn.model_selection import train_test_split
import sklearn.metrics
def save_to_file(data, filename):
f = open(filename, "w")
for row in data:
line = ",".join([str(x) for x in row])
f.write(line + "\n")
f.close()
def load_from_file(filename):
data = []
f = open(filename, "r")
lines = f.readlines()
for line in lines:
xs = line.split(',')
xs = [float(x) for x in xs]
data.append(xs)
f.close()
return data
def ecdf(x, sample):
sample.sort() # sample HAS TO BE SORTED in the ASCENDING (NON-DESCENDING) order
i = bisect(sample, x)
return float(i) / len(sample)
def pseudoInvEcdf(sample, p):
n = len(sample)
i = 0
s = 1.0 / n
while s < p and i < n:
s = s + 1.0 / n
i = i + 1
if i == n:
i = n - 1
x = sample[i] #min([x for x in sample if ecdf(x, sample) >= p])
return x
def createDescriptor(sample, q):
sample.sort()
m = int(1 / q)
n = len(sample)
maximum = max(sample)
minimum = min(sample)
mean = np.mean(sample)
median = np.median(sample)
sd = np.std(sample)
kurtosis = scipy.stats.kurtosis(sample)
skewness = scipy.stats.skew(sample)
standardized_sample = [(x - mean) / sd for x in sample]
descriptor = [pseudoInvEcdf(standardized_sample, j*q) for j in range(1, m+1)]
descriptor = descriptor + [n, mean, sd, minimum, maximum, median, kurtosis, skewness]
return descriptor
def onlyFinite(xs):
for x in xs:
if np.isinf(x) or np.isnan(x):
return False
return True
def names(v):
q = 10
c = 10
return ["p{}%".format(q*i) for i in range(1, c+1)] + ["n", "mean", "sd", "max", "min", "med"]
rstring = """
function(sample){
library(nortest)
tryCatch({
res <- lillie.test(sample)
p <- res[[2]]
return(p)
}, error = function(e){
return(0.0)
})
}
"""
lf=robjects.r(rstring)
sf_string = """
function(sample){
library(nortest)
tryCatch({
res <- sf.test(sample)
p <- res[[2]]
return(p)
}, error = function(e){
return(0.0)
})
}
"""
sf = robjects.r(sf_string)
cvm_string = """
function(sample){
library(nortest)
tryCatch({
res <- cvm.test(sample)
p <- res[[2]]
return(p)
}, error = function(e){
return(0.0)
})
}
"""
cvm = robjects.r(cvm_string)
def lilliefors(sample, alpha):
return lf(FloatVector(sample))[0] >= alpha
def shapiro_wilk(sample, alpha):
return scipy.stats.shapiro(sample)[1] >= alpha
def anderson_darling(sample, alpha):
if alpha == 0.01:
c = 4
elif alpha == 0.05:
c = 2
elif alpha == 0.1:
c = 1
result = scipy.stats.anderson(sample, dist='norm')
statistic = result[0]
critical_value = result[1][c]
return statistic <= critical_value
def jarque_bera(sample, alpha):
return scipy.stats.jarque_bera(sample)[1] >= alpha
def shapiro_francia(sample, alpha):
return sf(FloatVector(sample))[0] >= alpha
def cramer_von_mises(sample, alpha):
return cvm(FloatVector(sample))[0] >= alpha
#return scipy.stats.cramervonmises(sample, 'norm').pvalue >= alpha
def dp_test(sample, alpha):
return scipy.stats.normaltest(sample)[1] >= alpha
def get_test(code):
if code == 'SW':
return shapiro_wilk, shapiro_wilk_statistic
elif code == 'LF':
return lilliefors, lilliefors_statistic
elif code == 'AD':
return anderson_darling, anderson_darling_statistic
elif code == 'JB':
return jarque_bera, jarque_bera_statistic
elif code == 'SF':
return shapiro_francia, shapiro_francia_statistic
elif code == 'DP':
return dp_test, dp_test_statistic
elif code == 'CVM':
return cramer_von_mises, cramer_von_mises_statistic
elif code == 'FSSD':
return kernel_normality_test, kernel_normality_test_statistic
class TestClassifier(object):
"""docstring for Test"""
def __init__(self, test, statistic, alpha, class_label=1, opposite_label=0):
super(TestClassifier, self).__init__()
self.test = test
self.alpha = alpha
self.statistic = statistic
self._estimator_type = 'classifier'
self.classes_ = [0, 1]
def predict(self, samples):
labels = [2 for sample in samples]
for i, sample in enumerate(samples):
try:
is_normal = self.test(sample, self.alpha)
if is_normal:
labels[i] = 1
else:
labels[i] = 0
except Exception as e:
print(e)
traceback.print_exc()
labels[i] = 2
#labels = [1 if self.test(sample, self.alpha) else 0 for sample in samples]
return labels
def calculate_statistic(self, samples):
if not all([type(x) == list for x in samples]):
return self.statistic(samples)
return np.array([self.statistic(sample) for sample in samples])
class TableTestClassifier(object):
"""docstring for Test"""
def __init__(self, table, statistic, alpha=None, class_label=1, opposite_label=0):
super(TableTestClassifier, self).__init__()
self.table = table
self.alpha = alpha
self.statistic = statistic
self._estimator_type = 'classifier'
self.classes_ = [0, 1]
def test(self, sample):
n = len(sample)
s = self.statistic(sample)
if self.alpha is None:
threshold = self.table[n]
else:
threshold = self.table[self.alpha][n]
if s >= threshold:
return True
else:
return False
def predict(self, samples):
labels = [2 for sample in samples]
for i, sample in enumerate(samples):
try:
is_normal = self.test(sample)
if is_normal:
labels[i] = 1
else:
labels[i] = 0
except Exception as e:
print(e)
traceback.print_exc()
labels[i] = 2
#labels = [1 if self.test(sample, self.alpha) else 0 for sample in samples]
return labels
def calculate_statistic(self, samples):
if not all([type(x) == list for x in samples]):
return self.statistic(samples)
return np.array([self.statistic(sample) for sample in samples])
def get_standard_classifier(test_code, alpha):
test, statistic = get_test(test_code)
classifier = TestClassifier(test, statistic, alpha, 1, 0)
return classifier
# Statistics of normality tests
rstring_lf_stat = """
function(sample){
library(nortest)
tryCatch({
res <- lillie.test(sample)
stat <- res[[1]]
return(stat)
}, error = function(e){
return(-10.0)
})
}
"""
lf_stat = robjects.r(rstring_lf_stat)
rstring_sf_stat = """
function(sample){
library(nortest)
tryCatch({
res <- sf.test(sample)
stat <- res[[1]]
return(stat)
}, error = function(e){
return(-10.0)
})
}
"""
sf_stat = robjects.r(rstring_sf_stat)
rstring_cvm_stat = """
function(sample){
library(nortest)
tryCatch({
res <- cvm.test(sample)
stat <- res[[1]]
return(stat)
}, error = function(e){
return(-10.0)
})
}
"""
cvm_stat=robjects.r(rstring_cvm_stat)
def lilliefors_statistic(sample):
return lf_stat(FloatVector(sample))[0]
def shapiro_francia_statistic(sample):
return sf_stat(FloatVector(sample))[0]
def shapiro_wilk_statistic(sample):
return scipy.stats.shapiro(sample)[0]
def cramer_von_mises_statistic(sample):
return cvm_stat(FloatVector(sample))[0]
def anderson_darling_statistic(sample):
return scipy.stats.anderson(sample, dist='norm')[0]
def jarque_bera_statistic(sample):
return scipy.stats.jarque_bera(sample)[0]
def dp_test_statistic(sample):
return scipy.stats.normaltest(sample)[0]
def random_mean():
return -100 + random.random() * 200
def random_sigma():
return 1 + random.random() * 19
def generate_normal_samples(ns, L):
raw_samples = []
for n in ns:
for l in range(L):
mu = random_mean()
sigma = random_sigma()
sample = np.random.normal(mu, sigma, n)
raw_samples.append(sample.tolist())
#print(n, len(raw_samples))
return raw_samples
pearsonstring = """
function(n, m1, m2, m3, m4){
library(gsl)
library(PearsonDS)
tryCatch({
sample <- rpearson(n,moments=c(mean=m1,variance=m2,skewness=m3,kurtosis=m4))
return(sample)
}, error = function(e){
return(FALSE)
})
}
"""
generate_pearson=robjects.r(pearsonstring)
def generate_pearson_nonnormal_samples(s_range, k_range, ns, L):
h = 0
raw_samples = []
for n in ns:
for s in s_range:
for k in k_range:
if k - s**2 - 1 >= 0 and not(s==0 and k==3):
h = h + 1
for l in range(L):
mean = random_mean()
sd = random_sigma()
response = generate_pearson(n, mean, sd, s, k)
if not(response[0] == False):
sample = response
#confs[(n, mean, sd, skewness, kurtosis)] = True
raw_samples.append(list(sample))
#print(n, h, len(raw_samples))
return raw_samples
def label_samples(samples, label):
return [list(sample) + [label] for sample in samples]
def split(samples, train_size, labels=None, random_state=0):
if labels is None:
labels = [sample[-1] for sample in samples]
X_train, X_test, y_train, y_test = train_test_split(samples, labels,
stratify=labels, train_size=train_size, random_state=random_state)
return X_train, X_test, y_train, y_test
def preprocess1(raw_samples, how={'method' : 'nothing'}):
if how['method'] == 'nothing':
return raw_samples
elif how['method'] == 'descriptors':
q = how['q']
descriptors = [createDescriptor(sample[:-1], q) for sample in raw_samples]
pruned = []
for i in range(len(descriptors)):
if onlyFinite(descriptors[i]):
pruned += [descriptors[i] + [raw_samples[i][-1]]]
return pruned
mean_sd_merge=lambda x: '${:.3f}\pm{:.3f}$'.format(x[0], x[1])
def get_latex_table(df, sort_by=None, merge_instructions=None, renamer=None,
caption=None, label=None, float_format='$%.3f$', index=False):
if sort_by is not None:
df = df.sort_values(sort_by, axis='index')
if merge_instructions is not None:
for instruction in merge_instructions:
merge_function = instruction['merge_function']
new_column = instruction['new_column']
columns_to_merge = instruction['columns_to_merge']
df[new_column] = df[columns_to_merge].apply(merge_function, axis=1)
df = df.drop(columns_to_merge, axis=1)
if renamer is not None:
df = df.rename(columns=renamer)
latex = df.to_latex(index=index,
float_format=float_format, escape=False, caption=caption, label=label)
return latex
mean_sd_merge = lambda x: '${:.3f}\pm{:.3f}$'.format(x[0], x[1])
def now(format='%Y-%m-%d %H-%M-%S.%f'):
return datetime.datetime.now().strftime('%Y-%m-%d %H-%M-%S.%f')
def calculate_metrics(true_labels, guessed_labels, metrics=['A', 'TPR', 'PPV', 'TNR', 'NPV', 'F1', 'UR']):
# Determine the elements of the confusion matrix
matrix = sklearn.metrics.confusion_matrix(true_labels, guessed_labels, labels=[0, 1])
#print(matrix)
T_nonnormal = matrix[0, 0]
F_normal = matrix[0, 1]
F_nonnormal = matrix[1, 0]
T_normal = matrix[1, 1]
# Determine the set specifics
N_normal = sum(matrix[1, :])#T_normal + F_nonnormal
N_nonnormal = sum(matrix[0, :])#T_nonnormal + F_normal
N = N_normal + N_nonnormal
scores = []
#print(matrix)
# Calculate the chosen metrics
if 'A' in metrics:
score = (T_normal + T_nonnormal) / N
scores.append(score)
if 'TPR' in metrics:
score = T_normal / N_normal
scores.append(score)
if 'FNR' in metrics:
score = F_nonnormal / N_normal
scores.append(score)
if 'PPV' in metrics:
score = T_normal / (T_normal + F_normal)
scores.append(score)
if 'TNR' in metrics:
score = T_nonnormal / N_nonnormal
scores.append(score)
if 'NPV' in metrics:
score = T_nonnormal / (T_nonnormal + F_nonnormal)
scores.append(score)
if 'F1' in metrics:
TPR = T_normal / N_normal
PPV = T_normal / (T_normal + F_normal)
score = 2 * TPR * PPV / (TPR + PPV)
scores.append(score)
if 'UR' in metrics:
total = len(guessed_labels)
undecided = len([x for x in guessed_labels if x == 2])
score = undecided / total
scores.append(score)
return scores
def evaluate(samples, true_labels, classifier, metrics=['A', 'TPR', 'PPV', 'TNR', 'NPV', 'F1', 'AUROC', 'U'],
n_range=None):
# Guess the labels
guessed_labels = classifier.predict(samples)
# Calculate the performance metrics for the whole set
total_scores = calculate_metrics(true_labels, guessed_labels, metrics=metrics)
if 'AUROC' in metrics:
prediction_scores = classifier.predict_proba(samples)[:, 1]
where = np.where(~np.isnan(prediction_scores))[0]
t = [true_labels[j] for j in where]
ps = [prediction_scores[j] for j in where]
auroc = sklearn.metrics.roc_auc_score(t, ps)
i = metrics.index('AUROC')
total_scores = total_scores[:i] + [auroc] + total_scores[i:]
if n_range is None:
return total_scores
else:
guessed_by_n = {n : [] for n in n_range}
true_by_n = {n : [] for n in n_range}
prediction_scores_by_n = {n : [] for n in n_range}
for i, sample in enumerate(samples):
n = len(sample)
if n not in n_range:
continue
true_label = int(true_labels[i])
true_by_n[n].append(true_label)
guessed_label = int(guessed_labels[i])
guessed_by_n[n].append(guessed_label)
if 'AUROC' in metrics:
prediction_score = prediction_scores[i]
prediction_scores_by_n[n].append(prediction_score)
all_scores = []
for n in n_range:
scores_for_n = calculate_metrics(true_by_n[n], guessed_by_n[n], metrics=metrics)
if 'AUROC' in metrics:
where = np.where(~np.isnan(prediction_scores_by_n[n]))[0]
t = [true_by_n[n][j] for j in where]
ps = [prediction_scores_by_n[n][j] for j in where]
auroc_for_n = sklearn.metrics.roc_auc_score(t, ps)
i = metrics.index('AUROC')
scores_for_n = scores_for_n[:i] + [auroc_for_n] + scores_for_n[i:]
all_scores.append(scores_for_n)
return all_scores + [total_scores]
def evaluate_pretty(samples, true_labels, classifier,
metrics=['A', 'TPR', 'PPV', 'TNR', 'NPV', 'F1', 'AUROC'], n_range=None, index=None
):
scores_lists = evaluate(samples, true_labels, classifier, metrics=metrics, n_range=n_range)
columns = metrics
if n_range is not None:
for i in range(len(n_range)):
scores_lists[i] = [n_range[i]] + scores_lists[i]
scores_lists[-1] = ['overall'] + scores_lists[-1]
columns = ['n'] + metrics
else:
scores_lists = [scores_lists]
results_df = pd.DataFrame(scores_lists, columns=columns)
if index is not None:
results_df = results_df.set_index(index, drop=True)
return results_df
# https://stackoverflow.com/a/46730656/1518684
def get_activations(clf, X):
hidden_layer_sizes = clf.hidden_layer_sizes
if not hasattr(hidden_layer_sizes, "__iter__"):
hidden_layer_sizes = [hidden_layer_sizes]
hidden_layer_sizes = list(hidden_layer_sizes)
layer_units = [X.shape[1]] + hidden_layer_sizes + \
[clf.n_outputs_]
activations = [X]
for i in range(clf.n_layers_ - 1):
activations.append(np.empty((X.shape[0],
layer_units[i + 1])))
clf._forward_pass(activations)
return activations
class ClassName(object):
"""docstring for ClassName"""
def __init__(self, arg):
super(ClassName, self).__init__()
self.arg = arg
def separate_by_label(samples, labels):
separated = {}
for (sample, label) in zip(samples, labels):
label = int(label)
if label not in separated:
separated[label] = []
separated[label].append(sample)
return separated
def separate_by_size(samples, labels=None):
separated_samples = {}
if labels is not None:
separated_labels = {}
for i in range(len(samples)):
sample = samples[i]
n = len(sample)
if n not in separated_samples:
separated_samples[n] = []
separated_samples[n].append(sample)
if labels is not None:
if n not in separated_labels:
separated_labels[n] = []
separated_labels[n].append(labels[i])
if labels is None:
return separated_samples
else:
return separated_samples, separated_labels
def separate_by_label_and_size(samples):
samples, labels = tuple(zip(*[(sample[:-1], sample[-1]) for sample in samples]))
separated = separate_by_label(samples, labels)
for label in separated:
separated[label] = separate_by_size(separated[label])
return separated
def filter_samples(samples, labels, target_label=None, n=None):
if target_label is None and n is None:
return samples
elif target_label is None and n is not None:
for (sample, label) in zip(samples, labels):
if len(sample) == n:
filtered_samples.append(sample)
elif target_label is not None and n is None:
for (sample, label) in zip(samples, labels):
if label == target_label:
filtered_samples.append(sample)
else:
for (sample, label) in zip(samples, labels):
if label == target_label:
filtered_samples.append(sample)
return filtered_samples
def traverse_and_save(dictionary, img_dir_path):
if type(dictionary) is not dict:
return
for key in dictionary:
if 'fig' in key:
pathlib.Path(*img_dir_path.split(os.sep)).mkdir(parents=True, exist_ok=True)
figure = dictionary[key]
for extension in ['.pdf', '.eps']:
path = os.path.join(img_dir_path, img_dir_path.split(os.sep)[-1] + '_' + key) + extension
print('Saving', path)
if 'savefig' in dir(figure):
figure.savefig(path, bbox_inches='tight')
else:
figure.figure.savefig(path, bbox_inches='tight')
else:
if 'savefig' in dir(dictionary[key]):
dictionary[key].savefig(path, bbox_inches='tight')
else:
traverse_and_save(dictionary[key], os.path.join(img_dir_path, key))
| StarcoderdataPython |
6511333 | <reponame>miswanting/python-spider-starter
import collections
import json
import os
import threading
import time
import urllib.request
from urllib.parse import urlencode
from bs4 import BeautifulSoup
import SpiderEngine
# CACHE_FILE_NAME = 'cache.json'
# cache = {
# 'current_page': 0,
# 'nodes': {},
# 'task': []
# }
status = {
}
url = 'https://www.toli.co.jp/catalog/hinban/hinban.php'
durl = 'https://www.toli.co.jp/catalog/hinban'
d = 'https://www.toli.co.jp/catalog/download.php'
u = 'https://www.toli.co.jp/catalog/hinban/hinban.php?hinban=20FL1001'
# end = False
# def read_page(num):
# global e, end
# count = num*18
# data = {'hb_next': '次を表示',
# 'count': count,
# 'hinban': '',
# 'hinsyu_code': '',
# 'gara': -1,
# 'color': -1,
# 'from': -1,
# 'to': -1,
# 'kinou1': -1,
# 'kinou2': -1,
# 'kinou3': -1
# }
# # request = urllib.request.Request(url, urlencode(data).encode())
# while True:
# try:
# raw_html = e.post(url, data, 'EUC-JP')
# break
# except OSError as e:
# print(123, e)
# soup = BeautifulSoup(raw_html, "lxml")
# data_list = soup.find(
# id='MAIN').div.table.contents[5].contents[3].form.table.find_all('tr', recursive=False)[1:]
# data = {
# 'name': [],
# 'img_path': [],
# 'link_path': []
# }
# for line in range(3):
# im = data_list[line*2]
# for each in im.find_all('td', recursive=False):
# if each.a != None:
# data['img_path'].append(each.a.img['src'])
# data['link_path'].append(each.a['href'])
# else:
# end = True
# te = data_list[line*2+1]
# for each in te.find_all('td', recursive=False):
# if each.table != None:
# data['name'].append(each.table.tr.contents[3].get_text())
# else:
# end = True
# out = {}
# for i in range(len(data['name'])):
# out.update({
# data['name'][i]: {
# 'img': data['img_path'][i],
# 'link': data['link_path'][i]
# }
# })
# time.sleep(1)
# return out
# def mission_probe():
# '''获取基本信息'''
# def task():
# pass
# complete = False
# while not complete:
# # 产生任务
# # 运行任务(多线程)
# cell = read_page(e.data.data['probe_index'])
# for name in cell:
# e.new_task({
# 'name': name,
# 'img': cell[name]['img'],
# 'link': cell[name]['link']
# })
# # e.data.data['task'].append({
# # 'name': name,
# # 'img': cell[name]['img'],
# # 'link': cell[name]['link']
# # })
# e.data.data['nodes'].update(cell)
# print(e.data.data['probe_index'], len(e.data.data['nodes']))
# e.data.data['probe_index'] += 1
# e.save_data()
# print('probe下载完成!共{}项'.format(len(e.data.data['nodes'])))
# def mission_thumb():
# '''索引图片下载'''
# def task():
# pass
# complete = False
# while not complete:
# if not e.task_is_empty():
# # if e.data.data['task']:
# # task = e.data.data['task'].pop()
# task = e.get_task()
# print('{}'.format(task['name']))
# while True:
# try:
# e.save('t/{}.jpg'.format(task['name']),
# e.get(durl+task['img'][1:], encoding='raw'), 'wb')
# break
# except OSError as er:
# print(er)
# e.finish_task()
# time.sleep(1)
# def mission_high():
# '''高清图片下载'''
# def task():
# pass
# complete = False
# def mission_info():
# '''获取详细信息'''
# def task():
# pass
# complete = False
# def monitor():
# '''运行状况监控'''
# print('')
class Project:
def __init__(self):
self.data = {
'done': {
'probe': False
},
'probe_index': 0,
'nodes': {},
'task': []
}
self.engine = SpiderEngine.Engine()
def init(self):
self.engine.config()
self.engine.init()
self.data = self.engine.load_data(self.data)
self.engine.mkdir('t')
def update(self):
self.engine.save_data(self.data)
def start(self):
p = threading.Thread(target=self.mission_probe)
p.start()
t = threading.Thread(target=self.mission_thumb)
t.start()
self.monitor()
def mission_probe(self):
'''获取基本信息'''
def task():
pass
while not self.data['done']['probe']:
# 产生任务
# 运行任务(多线程)
cell = self.read_page(self.data['probe_index'])
for name in cell:
self.engine.new_task({
'name': name,
'img': cell[name]['img'],
'link': cell[name]['link']
})
# e.data.data['task'].append({
# 'name': name,
# 'img': cell[name]['img'],
# 'link': cell[name]['link']
# })
self.data['nodes'].update(cell)
# print(self.data['probe_index'], len(
# self.data['nodes']))
self.data['probe_index'] += 1
self.update()
print('probe下载完成!共{}项'.format(len(self.data['nodes'])))
def mission_thumb(self):
'''索引图片下载'''
def task():
pass
complete = False
while not complete:
if not self.engine.task_is_empty():
# if e.data.data['task']:
# task = e.data.data['task'].pop()
task = self.engine.get_task()
# print('{}'.format(task['name']))
while True:
try:
self.engine.save('t/{}.jpg'.format(task['name']),
self.engine.get(durl+task['img'][1:], encoding='raw'), 'wb')
break
except OSError as er:
print(er)
self.engine.finish_task()
time.sleep(1)
def mission_high(self):
'''高清图片下载'''
def task():
pass
complete = False
def mission_info(self):
'''获取详细信息'''
def task():
pass
complete = False
def monitor(self):
'''运行状况监控'''
while True:
print()
temp = '{}:{}'
print(temp.format('PROB', len(self.data['nodes'])))
print(temp.format('THUM', len(self.data['nodes'])))
time.sleep(3)
def read_page(self, num):
count = num*18
data = {'hb_next': '次を表示',
'count': count,
'hinban': '',
'hinsyu_code': '',
'gara': -1,
'color': -1,
'from': -1,
'to': -1,
'kinou1': -1,
'kinou2': -1,
'kinou3': -1
}
# request = urllib.request.Request(url, urlencode(data).encode())
while True:
try:
raw_html = self.engine.post(url, data, 'EUC-JP')
break
except OSError as er:
print(123, er)
soup = BeautifulSoup(raw_html, "lxml")
data_list = soup.find(
id='MAIN').div.table.contents[5].contents[3].form.table.find_all('tr', recursive=False)[1:]
data = {
'name': [],
'img_path': [],
'link_path': []
}
for line in range(3):
im = data_list[line*2]
for each in im.find_all('td', recursive=False):
if each.a != None:
data['img_path'].append(each.a.img['src'])
data['link_path'].append(each.a['href'])
else:
self.data['done']['probe'] = True
te = data_list[line*2+1]
for each in te.find_all('td', recursive=False):
if each.table != None:
data['name'].append(each.table.tr.contents[3].get_text())
else:
self.data['done']['probe'] = True
out = {}
for i in range(len(data['name'])):
out.update({
data['name'][i]: {
'img': data['img_path'][i],
'link': data['link_path'][i]
}
})
time.sleep(1)
return out
if __name__ == "__main__":
pro = Project()
pro.init()
pro.start()
# e = SpiderEngine.Engine()
# e.config()
# e.init()
# e.load_data({
# 'probe_index': 0,
# 'nodes': {},
# 'task': []
# })
# e.mkdir('t')
# p = threading.Thread(target=mission_probe)
# p.start()
# t = threading.Thread(target=mission_thumb)
# t.start()
# monitor()
| StarcoderdataPython |
3322531 | <reponame>lonePatient/TorchBlocks
from torchblocks.metrics.classification import *
from torchblocks.metrics.regression import *
from torchblocks.metrics.utils_ner import *
from torchblocks.metrics.sequence_labeling import *
| StarcoderdataPython |
3436112 | <reponame>AtmegaBuzz/minirobosim-main
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
@author: <NAME>
@organization: CHArt - Université Paris 8
"""
from Box2D import b2
from Box2D import (b2CircleShape, b2FixtureDef, b2Vec2)
import numpy as np
class World():
VEL_ITERS, POS_ITERS = 10, 10
def __init__( self, gravity = (0,0)):
self.b2world = b2.world()
self.bodies = []
self.b2world.gravity = gravity
def step( self, fw):
for body in self.bodies:
body.step(fw)
self.b2world.Step(fw.TIMESTEP, self.VEL_ITERS, self.POS_ITERS)
self.b2world.ClearForces()
def draw( self, fw):
for body in self.bodies:
body.draw(fw)
class Wall():
__default_boundary = [(-70, -50),
(-70, +50),
(+70, +50),
(+70, -50),
(-70, -50)]
def __init__(self, world, boundary = None):
self.world = world
self.boundary = boundary
if self.boundary is None:
self.boundary = Wall.__default_boundary
self.b2body = world.b2world.CreateStaticBody(position=(0, 20))
self.b2body.CreateEdgeChain( self.boundary)
def step(self, fw):
pass
def draw(self, fw):
vertices = [self.b2body.transform * v for v in self.boundary]
fw.DrawPolygon( vertices, (255, 255, 255, 255))
class Ball():
def __init__(self, world, position = (0,0), radius = 1, density=0.01, friction=0.1):
self.world = world
self.radius = radius
self.color = "green"
self.position = position
self.b2body = world.b2world.CreateDynamicBody(
fixtures=b2FixtureDef(
shape=b2CircleShape(radius=self.radius),
density=density),
bullet=False,
position=position)
def step(self, fw):
pass
def draw(self, fw):
fw.DrawCircle( self.b2body.position, self.radius, (0, 255, 0, 255))
class Maze():
__default_boundaries = [
[(51,-50),(51,-9),(49,-9),(49,-50),(51,-50)],
[(-11,-50),(-11,-29),(29,-29),(29,-9),(31,-9),(31,-31),(-9,-31),(-9,-50),(-11,-50)],
[(-11,50),(-11,29),(11,29),(11,31),(-9,31),(-9,50),(-11,50)],
[(-31,50),(-31,31),(-51,31),(-51,29),(-29,29),(-29,50),(-31,50)],
[(-31,-50),(-31,-31),(-51,-31),(-51,-29),(-31,-29),(-31,-9),(-11,-9),(-11,9),(-49,9),(-49,-11),(-51,-11),(-51,11),(49,11),(49,29),(29,29),(29,31),(51,31),(51,9),(-9,9),(-9,-9),(11,-9),(11,-11),(-29,-11),(-29,-50) ],
[(-70,-50),(-70,+50),(+70,+50),(+70,-50),(-70,-50)]
]
def __init__(self, world, boundaries = None):
self.world = world
self.scale_ratio = 1.5
self.color = "black"
self.boundaries = boundaries
if self.boundaries is None:
self.boundaries = Maze.__default_boundaries
for w_i, wall in enumerate(self.boundaries):
self.boundaries[w_i] = (np.array(wall)*self.scale_ratio).astype(int).tolist()
self.start_line = [b2Vec2(self.boundaries[5][3])+b2Vec2([-19,+25])*self.scale_ratio,
b2Vec2(self.boundaries[5][3])+b2Vec2([0,+25])*self.scale_ratio]
self.end_line = [self.boundaries[4][5],
b2Vec2(self.boundaries[4][5])+b2Vec2([0,17])*self.scale_ratio ]
self.b2body = world.b2world.CreateStaticBody(position=(0, 20))
for boundary in self.boundaries:
self.b2body.CreateEdgeChain( boundary )
def step(self, fw):
pass
def draw(self, fw):
transform = self.b2body.transform
for boundary in self.boundaries:
vertices = [transform * v for v in boundary]
fw.DrawPolygon( vertices, (255, 255, 255, 255))
vertices = [transform * v for v in self.start_line]
fw.DrawPolygon( vertices, (0, 255, 128, 255))
vertices = [transform * v for v in self.end_line]
fw.DrawPolygon( vertices, (0, 255, 0, 255))
| StarcoderdataPython |
111851 | <gh_stars>0
from vs_currency import get_supported_vs_currencies_api
import unittest
class TestVsCurrency(unittest.TestCase):
def test_api_status_without_status_code(self):
url = url = "https://api.coingecko.com/api/v3/"
actual = get_supported_vs_currencies_api(url=url)
expected = [
"btc",
"eth",
"ltc",
"bch",
"bnb",
"eos",
"xrp",
"xlm",
"link",
"dot",
"yfi",
"usd",
"aed",
"ars",
"aud",
"bdt",
"bhd",
"bmd",
"brl",
"cad",
"chf",
"clp",
"cny",
"czk",
"dkk",
"eur",
"gbp",
"hkd",
"huf",
"idr",
"ils",
"inr",
"jpy",
"krw",
"kwd",
"lkr",
"mmk",
"mxn",
"myr",
"ngn",
"nok",
"nzd",
"php",
"pkr",
"pln",
"rub",
"sar",
"sek",
"sgd",
"thb",
"try",
"twd",
"uah",
"vef",
"vnd",
"zar",
"xdr",
"xag",
"xau",
"bits",
"sats",
]
print(actual)
self.assertEqual(actual, expected)
| StarcoderdataPython |
1724676 | <reponame>aimof/rain<filename>python/rain/client/pycode.py
import inspect
import contextlib
import time
import base64
import cloudpickle
from collections import OrderedDict
from .task import Task
from .data import blob
from .session import get_active_session
from ..common import RainException, RainWarning
from .input import Input
from .output import OutputSpec
PICKLE_ARG_SIZE_LIMIT = 256 * 1024
PICKLE_ARG_TIME_LIMIT = 1.0
# Base name of current argument and growing list of input data objects
# while Py task arguments are pickled.
# `[arg_base_name, counter, inputs_list, input_prototype]`
_global_pickle_inputs = None
@contextlib.contextmanager
def _pickle_inputs_context(name, inputs, input_prototype):
"""Context manager to store current argument name and growing input
objects list while Py task arguments are unpickled. Internal, not
thread safe, not reentrant."""
global _global_pickle_inputs
assert _global_pickle_inputs is None
_global_pickle_inputs = [name, 0, inputs, input_prototype]
try:
yield
finally:
_global_pickle_inputs = None
def _checked_cloudpickle(d, name=None):
"""Perform cloudpickle.dumps and issue a warning if the result is
unexpectedly big (PICKLE_ARG_SIZE_LIMIT) or it takes too
long (PICKLE_ARG_TIME_LIMIT)."""
t0 = time.clock()
p = cloudpickle.dumps(d)
if len(p) > PICKLE_ARG_SIZE_LIMIT:
raise RainWarning("Pickled object {} length {} > PICKLE_ARG_SIZE_LIMIT={}. "
"Consider using a blob() for the data."
.format(name or '<unknown>', len(d), PICKLE_ARG_SIZE_LIMIT))
if time.clock() - t0 > PICKLE_ARG_TIME_LIMIT:
raise RainWarning("Pickling object {} took {} s > PICKLE_ARG_TIME_LIMIT={}. "
"Consider using a blob() for the data."
.format(name or '<unknown>', len(d), PICKLE_ARG_TIME_LIMIT))
return p
def _checked_cloudpickle_to_string(d, name=None):
"""Same as _changed_pickle but encodes result to base64 string"""
return base64.b64encode(_checked_cloudpickle(d, name)).decode("ascii")
def remote(*,
outputs=None,
inputs=(),
auto_load=None,
auto_encode=None,
cpus=1):
"Decorator for :py:class:`Remote`, see the documentation there."
def make_remote(fn):
if not inspect.isfunction(fn):
raise RainException(
"remote() arg {!r} is not a function".format(fn))
return Remote(fn,
outputs=outputs,
inputs=inputs,
auto_load=auto_load,
auto_encode=auto_encode,
cpus=cpus)
return make_remote
class Remote:
# The function to run remotely
fn = None
# OutputSpec for output data objects
outputs = None
# Dict of named argument Input specs, including args and kwargs
inputs = None
def __init__(self,
fn, *,
inputs=None,
outputs=None,
auto_load=False,
auto_encode=None,
cpus=1):
self.fn = fn
code = self.fn.__code__
self.cpus = cpus
if 'return' in fn.__annotations__:
assert outputs is None
outputs = fn.__annotations__['return']
elif outputs is None:
outputs = 1
self.outputs = OutputSpec(outputs=outputs)
for o in self.outputs.outputs:
if o.encode is None:
o.encode = auto_encode
self.inputs = {}
for name in code.co_varnames:
if name in inputs:
assert name not in self.fn.__annotations__
inp = inputs[name]
elif name in self.fn.__annotations__:
inp = self.fn.__annotations__[name]
else:
inp = Input(label=name)
assert isinstance(inp, Input)
if inp.load is None:
inp.load = auto_load
self.inputs[name] = inp
def __call__(self, *args, output=None, outputs=None, session=None, **kwargs):
# TODO(gavento): Use Input()s arguments
if session is None:
session = get_active_session()
# cache the code in a static blob
fn_blob = session._static_data.get(self.fn)
if fn_blob is None:
d = _checked_cloudpickle(self.fn, self.fn.__name__)
fn_blob = blob(d, self.fn.__name__, content_type="cloudpickle")
fn_blob.keep()
session._static_data[self.fn] = fn_blob
input_objs = [fn_blob]
# Check the parameter compatibility for fn
# Note that the first arg is the context
sig = inspect.signature(self.fn)
sig.bind(None, *args, **kwargs)
code = self.fn.__code__
# Pickle positional args
pickled_args = []
for i, argval in enumerate(args):
if i < code.co_argcount - 1:
name = code.co_varnames[i + 1]
input_proto = self.inputs[name]
else:
args_name = code.co_varnames[code.co_argcount +
code.co_kwonlyargcount]
name = "{}[{}]".format(args_name, i + 1 - code.co_argcount)
input_proto = self.inputs[args_name]
# Within this session state, the DataObjects are seialized as
# subworker.unpickle_input_object call
assert isinstance(input_proto, Input)
with _pickle_inputs_context(name, input_objs, input_proto):
d = _checked_cloudpickle_to_string(argval, name=name)
pickled_args.append(d)
# Pickle keyword args
pickled_kwargs = OrderedDict()
for name, argval in kwargs.items():
input_proto = self.inputs[code.co_varnames[-1]]
# Within this session state, the DataObjects are seialized as
# subworker.unpickle_input_object call
with _pickle_inputs_context(name, input_objs, input_proto):
d = _checked_cloudpickle_to_string(argval, name=name)
pickled_kwargs[name] = d
# create list of Output objects and DO instances
output_objs = self.outputs.instantiate(
output=output, outputs=outputs, session=session)
task_config = {
'args': pickled_args,
'kwargs': pickled_kwargs,
'encode_outputs': [o.attributes['spec'].get('encode') for o in output_objs]
}
return Task("py", task_config, input_objs, output_objs, cpus=self.cpus)
| StarcoderdataPython |
6591318 | import FWCore.ParameterSet.Config as cms
from Calibration.EcalCalibAlgos.ecalPedestalPCLHarvester_cfi import ECALpedestalPCLHarvester
from DQMServices.Components.EDMtoMEConverter_cfi import *
EDMtoMEConvertEcalPedestals = EDMtoMEConverter.clone()
EDMtoMEConvertEcalPedestals.lumiInputTag = cms.InputTag("MEtoEDMConvertEcalPedestals", "MEtoEDMConverterLumi")
EDMtoMEConvertEcalPedestals.runInputTag = cms.InputTag("MEtoEDMConvertEcalPedestals", "MEtoEDMConverterRun")
DQMStore = cms.Service("DQMStore")
DQMInfoEcalPedestals = cms.EDAnalyzer("DQMEventInfo",
subSystemFolder=cms.untracked.string('AlCaReco'),
)
ALCAHARVESTEcalPedestals = cms.Sequence(EDMtoMEConvertEcalPedestals + DQMInfoEcalPedestals + ECALpedestalPCLHarvester)
| StarcoderdataPython |
1694047 | <gh_stars>0
# Copyright (c) Facebook, Inc. and its affiliates.
"""
Example command
python fairmotion/tasks/clustering/clustering.py \
--features $FEATURES_FILE # see generate_features.py \
--type kmeans \
--num-clusters $NUM_CLUSTERS \
--normalize-features \
--clip-features 90 \
--output-file $OUTPUT_CSV_FILE \
--linkage average
"""
import argparse
import numpy as np
from sklearn.cluster import AgglomerativeClustering, DBSCAN, KMeans, OPTICS
from collections import defaultdict
def calculate_score(centroid, features):
return np.linalg.norm(features - centroid)
def get_ranked_clusters(clusters):
"""
Input:
clusters: defaultdict where items are in the format
cluster_idx: [(name, score)]
Output:
ranked_clusters: defaultdict where key is cluster index, and entry is
ordered list of (name, rank, score) tuples
"""
ranked_clusters = defaultdict(list)
for label in clusters:
sorted_cluster = sorted(clusters[label], key=lambda entry: entry[1])
ranked_cluster = []
for rank, (name, score) in enumerate(sorted_cluster):
ranked_cluster.append((name, rank, score))
ranked_clusters[label] = ranked_cluster
return ranked_clusters
def calculate_cluster_centroids(features, labels):
cluster_centroids = defaultdict(lambda: np.zeros(len(features[0])))
for num, label in enumerate(labels):
cluster_centroids[label] += np.array(features[num])
# Average sum of points to get centroids
for cluster in cluster_centroids:
cluster_centroids[cluster] = cluster_centroids[cluster] / len(
cluster_centroids[cluster]
)
return cluster_centroids
def run_dbscan_clustering(features, names, args):
dbscan = DBSCAN(eps=3, min_samples=2).fit(features)
cluster_centroids = calculate_cluster_centroids(features, dbscan.labels_)
clusters = defaultdict(list)
for num, label in enumerate(dbscan.labels_):
clusters[label].append(
(
names[num],
calculate_score(cluster_centroids[label], features[num]),
)
)
return clusters
def run_optics_clustering(features, names, args):
optics = OPTICS(min_samples=10).fit(features)
cluster_centroids = calculate_cluster_centroids(features, optics.labels_)
clusters = defaultdict(list)
for num, label in enumerate(optics.labels_):
clusters[label].append(
(
names[num],
calculate_score(cluster_centroids[label], features[num]),
)
)
return clusters
def run_kmeans_clustering(features, names, args):
kmeans = KMeans(args.num_clusters).fit(features)
clusters = defaultdict(list)
for num, label in enumerate(kmeans.labels_):
score = calculate_score(kmeans.cluster_centers_[label], features[num])
clusters[label].append((names[num], score))
return clusters
def run_hierarchical_clustering(features, names, args):
hierarchical = AgglomerativeClustering(args.num_clusters, linkage=args.linkage).fit(
features
)
cluster_centroids = calculate_cluster_centroids(features, hierarchical.labels_)
clusters = defaultdict(list)
for num, label in enumerate(hierarchical.labels_):
clusters[label].append(
(
names[num],
calculate_score(cluster_centroids[label], features[num]),
)
)
return clusters
def normalize_features(features):
X = np.array(features)
Xmean, Xstd = X.mean(axis=0), X.std(axis=0)
return (X - Xmean) / (Xstd + 1.0e-8)
def main(args):
features = []
names = []
with open(args.features) as f:
for line in f:
line = line.strip()
names.append(line.split(":")[0])
features.append([float(x) for x in line.split(":")[-1].split("\t")])
if 0.0 < args.clip_features < 100.0:
np.percentile(features, args.clip_features, axis=0, overwrite_input=True)
if args.normalize_features:
features = normalize_features(features)
if args.type == "kmeans":
clusters = run_kmeans_clustering(features, names, args)
elif args.type == "hierarchical":
clusters = run_hierarchical_clustering(features, names, args)
elif args.type == "optics":
clusters = run_optics_clustering(features, names, args)
elif args.type == "dbscan":
clusters = run_dbscan_clustering(features, names, args)
ranked_clusters = get_ranked_clusters(clusters)
with open(args.output_file, "w") as f:
for cluster in ranked_clusters:
for (name, rank, score) in ranked_clusters[cluster]:
f.write(
",".join([str(cluster), str(rank), str(score)])
+ ":"
+ str(name)
+ "\n"
)
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Cluster features with kMeans")
parser.add_argument("--features", type=str, help="Features tsv file")
parser.add_argument(
"--output-csv",
type=str,
help="File to store information about clusters",
required=True,
)
parser.add_argument(
"--type",
type=str,
required=True,
choices=["kmeans", "hierarchical", "optics", "dbscan"],
help="Clustering technique to be used, one of kmeans and hierarchical",
)
parser.add_argument(
"--num-clusters",
type=int,
help="Number of clusters",
required=True,
)
parser.add_argument(
"--linkage",
type=str,
help="Type of linkage in agglomerative clusering. See documentation in"
" scikit-learn https://scikit-learn.org/stable/modules/generated/"
" sklearn.cluster.AgglomerativeClustering.html",
choices=["ward", "complete", "average", "single"],
default="ward",
)
parser.add_argument(
"--normalize-features",
action="store_true",
help="Perform feature normalization",
)
parser.add_argument(
"--clip-features",
type=float,
help="Clip feature by percentile",
default=95,
)
args = parser.parse_args()
main(args)
| StarcoderdataPython |
3353737 | import select, socket, queue, json
from arduino import MIS_Arduino
from threading import Thread, Lock
from time import time
def socket_data_process(from_socket):
from_socket.strip()
#print(from_socket)
head, message = from_socket[:5], from_socket[6:]
#print(head, message)
head = int(head)
#print(head)
return json.loads(message[:head])
def socket_loop(arduino, lockduino):
t0 = time()
i = 0
HEADER_LEN = 5
SENSOR_LEN = 506
COMMAND_LEN = 250
client = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
client.setblocking(False)
client.bind((client.getsockname()[0], 50001))
client.listen(5)
inputs = [client]
outputs = []
message_queues = {}
while inputs:
# get all the connections
readable, writable, exceptional = select.select(inputs, outputs, inputs)
# process readable sockets
for s in readable:
# if the readable socket is this client
# means that a new connection has arrived
if s is client:
# accept the connection
connection, client_address = s.accept()
print(f"SOCKET ACCEPTED: {client_address}")
connection.setblocking(False)
# set the connection in the inputs
inputs.append(connection)
# generate a connection queue fot the new connection
message_queues[connection] = queue.Queue()
# if the socket is not this client
else:
# read 1024 bytes of data
data = s.recv(COMMAND_LEN + HEADER_LEN + 1)
if data:
# commands a variation in the arduino
#print(f"SOCKET GOT: {data}")
try:
data = socket_data_process(data)
#print(data)
except ValueError as v:
print("SOCKET SEND/REC ERROR: ", data)
continue
with lockduino:
arduino.command(data)
# push the data connection queue
## message_queues[s].put(data)
if s not in outputs:
outputs.append(s)
else:
if s in outputs:
outputs.remove(s)
inputs.remove(s)
s.close()
del message_queues[s]
for s in writable:
with lockduino:
MESSAGE = arduino.state_dict()
i += 1
try:
MESSAGE = json.dumps(MESSAGE)
#print("SOCKET SENT", i/(time()-t0))
MESSAGE = f"{len(MESSAGE):>{HEADER_LEN}}:" + f"{MESSAGE:<{SENSOR_LEN}}"
s.send(bytes(MESSAGE, 'utf8'))
except Exception as e:
print("SENDING ERROR: ", MESSAGE)
for s in exceptional:
inputs.remove(s)
if s in outputs:
outputs.remove(s)
s.close()
del message_queues[s]
if __name__ == "__main__":
arduino = MIS_Arduino("/dev/ttyACM0", 11520)
lockduino = Lock()
t_socket = Thread(target=socket_loop, args=(arduino,))
t_socket.start()
t_socket.join()
| StarcoderdataPython |
3332702 | """ Email """
import re
from dataclasses import dataclass
from src.domain.domainmodel.exceptions.invalid_email import InvalidEmail
@dataclass
class Email:
""" This class represents the Email datatype """
value: str
""" Email value object """
def __init__(self, value: str):
if self._validate_email(value) is False:
raise InvalidEmail()
self.value = value
@classmethod
def _validate_email(cls, value: str):
pattern = r"^[A-Za-z](\w|-|\.|_)+@[A-Za-z]+\.[A-Za-z]{1,3}$"
return bool(re.match(pattern, value))
| StarcoderdataPython |
6610894 | <gh_stars>10-100
##################################################################
# Views User Authentication and login / logout
# Author : <NAME> , All Rights reserved with Dr.E<NAME>.
# License : GNU-GPL Version 3
# Date : 01-01-2013
##################################################################
# Import Stdlib
import os
import sys
import urlparse
from datetime import datetime, date, time
import json
# General Django Imports
from django.shortcuts import render_to_response
from django.http import Http404, HttpResponse, HttpResponseRedirect
from django.template import RequestContext
#from django.core.context_processors import csrf
from django.contrib.auth.models import User
from django.core.urlresolvers import reverse
from django.views.decorators.csrf import csrf_exempt
from django.views.decorators.cache import never_cache
from django.views.decorators.csrf import csrf_protect
from django.views.decorators.debug import sensitive_post_parameters
from django.core.paginator import Paginator
#import json
from django.core import serializers
##from django.core.serializers import json
from django.core.serializers.json import DjangoJSONEncoder
from django.contrib.auth.views import login, logout
from django.contrib.auth.decorators import login_required
from django.contrib.auth import REDIRECT_FIELD_NAME
from django.contrib.auth.forms import AuthenticationForm
from django.template.response import TemplateResponse
from django.contrib.sites.models import get_current_site
# AuShadha Imports
import AuShadha.settings as settings
from AuShadha.settings import APP_ROOT_URL
from AuShadha.apps.aushadha_users.models import AuShadhaUser, AuShadhaUserForm
@sensitive_post_parameters()
@csrf_protect
@never_cache
def login_view(request, template_name='registration/login.html',
redirect_field_name=REDIRECT_FIELD_NAME,
# authentication_form=AuthenticationForm,
authentication_form=AuShadhaUserForm,
current_app=None, extra_context=None):
"""Displays the login form and handles the login action."""
redirect_to = request.REQUEST.get(redirect_field_name, '')
if request.method == "POST":
form = authentication_form(data=request.POST)
if form.is_valid():
netloc = urlparse.urlparse(redirect_to)[1]
# Use default setting if redirect_to is empty
if not redirect_to:
redirect_to = settings.LOGIN_REDIRECT_URL
# Heavier security check -- don't allow redirection to a different
# host.
elif netloc and netloc != request.get_host():
redirect_to = settings.LOGIN_REDIRECT_URL
# Okay, security checks complete. Log the user in.
login(request, form.get_user())
if request.session.test_cookie_worked():
request.session.delete_test_cookie()
data = {'success': True,
'error_message': "Successfully Loggged In !",
'redirect_to': redirect_to
}
else:
data = {'success': False,
'error_message' : '''<em class='error_text'>ERROR! Could not login</em>
<p class='suggestion_text'>Please Check your Username & Password.</p>
<i class='help_text'>If you are sure they are correct,
Please contact Administrator to find
out whether you need to activate your account.
</i>
''',
}
jsondata = json.dumps(data)
return HttpResponse(jsondata, content_type='application/json')
else:
form = authentication_form(request)
request.session.set_test_cookie()
current_site = get_current_site(request)
context = {
'form': form,
redirect_field_name: redirect_to,
'site': current_site,
'site_name': current_site.name,
}
if extra_context is not None:
context.update(extra_context)
return TemplateResponse(request, template_name, context,
current_app=current_app)
@login_required
def logout_view(request):
"""View for logging out of AuShadha."""
logout(request)
#return HttpResponseRedirect('/AuShadha/')
return HttpResponseRedirect('/AuShadha/login/')
# Create your views here.
| StarcoderdataPython |
11260367 | <reponame>zubairfarahi/Data-Science--Machine-Learning-
"""
Ex. 13: Scrieti un decorator care sa modifice modul de functionare
al functiei f. Puteti alege voi cum. Momentan, f intoarce 'cmi', un exemplu
ar fi sa intoarca 'CmI' dupa aplicarea decoratorului.
"""
def dec(func):
def wrapper():
x = func()[0:1].upper() + func()[1:len(func()) - 1] + func()[len(func()) - 1:].upper()
print(x)
return wrapper
# decoarate me
@dec
def f():
return 'zubair'
f() | StarcoderdataPython |
107686 | <filename>Files/read_sql_fn4.py
'''
this function can handle oracle script with block comment
handle change drop table table to
begin execute immediate \'drop table UTLMGT_DASHBOARD.temp_gen_dm_auth_2\'; exception when others then null; end
'''
def convertToBODSScript ( path):
f0 = open(path,'r')
f1 = []
for line in f0:
if line.strip():
f1.append(line)
f0.close()
isBegin = True
isCommentBegin = False
for line in f1:
if isCommentBegin == False:
if line.strip(' \t\n\r').startswith('/*') and 'parallel' not in line.lower():
isCommentBegin = True
line = '# ' + line
if line.strip(' \t\n\r').endswith('*/') and 'parallel' not in line.lower():
isCommentBegin = False
print (line)
continue
if line.lstrip(' \t\n\r').startswith('--'):
line = '# ' + line
else:
if line.lstrip(' \t\n\r').find('--') > 0: # this handle in line comment
line = line[0 :line.find('--')] + '\n'
line = line.replace("'", "\\'")
if isBegin==True:
line ="sql('NATLDWH_UTLMGT_DASHBOARD','" + line
isBegin =False
else:
line ="|| '" + line
isBegin =False
if line.rstrip(' \t\n\r').endswith(';'):
line =line.replace(';', " ');\n")
isBegin =True
else:
line =line.rstrip(' \t\n\r') + " '\n"
if line.lower().find("'drop table ") >= 0:
line = line[0:line.lower().index("\'drop table ")] + "\'begin execute immediate \\" + line[line.lower().index("\'drop table "):line.index("\');")] + "\\\'; exception when others then null; end;\');"
if line.lower().find("'drop index ") >= 0:
line = line[0:line.lower().index("\'drop index ")] + "\'begin execute immediate \\" + line[line.lower().index("\'drop index "):line.index("\');")] + "\\\'; exception when others then null; end;\');"
print (line)
else:
line = '# ' + line
print(line)
if line.strip(' \t\n\r').endswith('*/') and 'parallel' not in line.lower() :
isCommentBegin = False
#f1.close()
#convertToBODSScript (r'C:\Users\Wenlei\Desktop\sample.sql')
#convertToBODSScript (r'C:\Users\Wenlei\Desktop\sample2.sql')
convertToBODSScript ( r'C:\Users\Wenlei\Desktop\sample15.sql')
| StarcoderdataPython |
3474867 | from __future__ import annotations
from typing import Any, TypeVar, List, Set, Dict, Tuple, Optional, Union
from grapl_analyzerlib.node_types import (
EdgeT,
PropType,
PropPrimitive,
EdgeRelationship,
)
from grapl_analyzerlib.queryable import Queryable
from grapl_analyzerlib.schema import Schema
from grapl_analyzerlib.nodes.base import BaseView
from grapl_analyzerlib.nodes.entity import EntitySchema, EntityQuery, EntityView
AQ = TypeVar("AQ", bound="AssetQuery")
AV = TypeVar("AV", bound="AssetView")
T = TypeVar("T")
OneOrMany = Union[List[T], T]
def default_asset_properties() -> Dict[str, PropType]:
return {
"hostname": PropType(
PropPrimitive.Str,
False,
),
}
def default_asset_edges() -> Dict[str, Tuple[EdgeT, str]]:
return {
"asset_ip": (
EdgeT(AssetSchema, IpAddressSchema, EdgeRelationship.ManyToMany),
"ip_assigned_to",
),
"asset_processes": (
EdgeT(
AssetSchema,
ProcessSchema,
EdgeRelationship.ManyToOne,
),
"process_asset",
),
"files_on_asset": (
EdgeT(
AssetSchema,
FileSchema,
EdgeRelationship.ManyToOne,
),
"file_asset",
),
}
class AssetSchema(EntitySchema):
def __init__(self):
super(AssetSchema, self).__init__(
default_asset_properties(), default_asset_edges(), view=lambda: AssetView
)
@staticmethod
def self_type() -> str:
return "Asset"
class AssetQuery(EntityQuery[AV, AQ]):
@classmethod
def node_schema(cls) -> Schema:
return AssetSchema()
def with_hostname(
self,
*,
eq: Optional["StrOrNot"] = None,
contains: Optional["OneOrMany[StrOrNot]"] = None,
starts_with: Optional["StrOrNot"] = None,
ends_with: Optional["StrOrNot"] = None,
regexp: Optional["OneOrMany[StrOrNot]"] = None,
distance_lt: Optional[Tuple[str, int]] = None,
) -> AssetQuery:
self._property_filters["hostname"].extend(
_str_cmps(
predicate="hostname",
eq=eq,
contains=contains,
ends_with=ends_with,
starts_with=starts_with,
regexp=regexp,
distance_lt=distance_lt,
)
)
return self
def with_asset_ip(self, *asset_ips: "IpAddressQuery"):
asset_ips = asset_ips or [IpAddressSchema()]
self.set_neighbor_filters("asset_ip", [asset_ips])
for asset_ip in asset_ips:
asset_ip.set_neighbor_filters("ip_assigned_to", [self])
return self
def with_asset_processes(self, *asset_processes: "ProcessQuery"):
asset_processes = asset_processes or [ProcessSchema()]
self.set_neighbor_filters("asset_processes", [asset_processes])
for asset_process in asset_processes:
asset_process.set_neighbor_filters("process_asset", [self])
return self
def with_files_on_asset(self, *files_on_asset: "FileQuery"):
files_on_asset = files_on_asset or [FileSchema()]
self.set_neighbor_filters("files_on_asset", [files_on_asset])
for file_on_asset in files_on_asset:
file_on_asset.set_neighbor_filters("file_asset", [self])
return self
class AssetView(EntityView[AV, AQ]):
"""
.. list-table::
:header-rows: 1
* - Predicate
- Type
- Description
* - node_key
- string
- A unique identifier for this node.
* - hostname
- string
- The hostname of this asset.
* - asset_processes
- List[:doc:`/nodes/process`]
- Processes associated with this asset.
"""
queryable = AssetQuery
@classmethod
def node_schema(cls) -> Schema:
return AssetSchema()
def __init__(
self,
uid: str,
node_key: str,
graph_client: Any,
node_types: Set[str],
hostname: Optional[str] = None,
asset_ip: Optional[List["IpAddressView"]] = None,
asset_processes: Optional[List["ProcessView"]] = None,
files_on_asset: Optional[List["FileView"]] = None,
**kwargs,
):
super().__init__(uid, node_key, graph_client, node_types=node_types, **kwargs)
self.set_predicate("node_types", node_types)
self.set_predicate("hostname", hostname)
self.set_predicate("asset_ip", asset_ip)
self.set_predicate("asset_processes", asset_processes)
self.set_predicate("files_on_asset", files_on_asset)
def get_hostname(self, cached=True) -> Optional[str]:
return self.get_str("hostname", cached=cached)
def with_asset_ip(self, *asset_ips, cached=True):
if cached and self.asset_ip:
return self.asset_ip
self_node = (
self.queryable()
.with_node_key(eq=self.node_key)
.with_asset_ip(asset_ips)
.query_first(self.graph_client)
)
if self_node:
self.asset_ip = self_node.asset_ip
return self.asset_ip
def with_asset_processes(self, *processes, cached=True):
if cached and self.asset_processes:
return self.asset_processes
self_node = (
self.queryable()
.with_node_key(eq=self.node_key)
.with_asset_processes(processes)
.query_first(self.graph_client)
)
if self_node:
self.asset_processes = self_node.asset_processes
return self.asset_processes
def with_files_on_asset(self, *files, cached=True):
if cached and self.files_on_asset:
return self.files_on_asset
self_node = (
self.queryable()
.with_node_key(eq=self.node_key)
.with_files_on_asset(files)
.query_first(self.graph_client)
)
if self_node:
self.files_on_asset = self_node.files_on_asset
return self.files_on_asset
from grapl_analyzerlib.comparators import StrOrNot, _str_cmps
from grapl_analyzerlib.nodes.ip_address import (
IpAddressSchema,
IpAddressView,
IpAddressQuery,
)
from grapl_analyzerlib.nodes.file import FileSchema, FileView, FileQuery
from grapl_analyzerlib.nodes.process import ProcessSchema, ProcessView, ProcessQuery
AssetSchema().init_reverse()
class AssetExtendsProcessQuery(ProcessQuery):
def with_asset(self, *filters):
return self.with_to_neighbor(
AssetQuery, "process_asset", "asset_processes", filters
)
class AssetExtendsProcessView(ProcessView):
def get_asset(self, *filters, cached=True):
return self.get_neighbor(
AssetQuery, "process_asset", "asset_processes", filters, cached=cached
)
ProcessQuery = ProcessQuery.extend_self(AssetExtendsProcessQuery)
ProcessView = ProcessView.extend_self(AssetExtendsProcessView)
| StarcoderdataPython |
5112109 | import argparse
import torch
import torch.nn as nn
import torch.nn.functional as F
import torch.optim as optim
#from torchvision import datasets, transforms
from torch.optim.lr_scheduler import StepLR
# initial network was from here: https://github.com/pytorch/examples/blob/master/mnist/main.py
def custom_init_weights(m):
if type(m) == nn.Linear:
#torch.nn.init.xavier_uniform_(m.weight)
print('normal init..')
nn.init.normal_(m.weight)
nn.init.normal_(m.bias)
elif type(m) == nn.Conv2d:
nn.init.normal_(m.bias)
class ImgNet(nn.Module):
def __init__(self,Ctg):
super(ImgNet, self).__init__()
'''
cnv2 = nn.Conv2d(1,32,2,1)
cnv3 = nn.Conv2d(32,16,2,1,padding=1)
'''
self.conv1 = nn.Conv2d(1, 32, 2, 1)
self.conv2 = nn.Conv2d(32, 16, 2, 1,padding=1)
self.dropout1 = nn.Dropout2d(0.25)
self.dropout2 = nn.Dropout2d(0.25)
self.fc1 = nn.Linear(2048, 64)
self.fc2 = nn.Linear(64, Ctg)
def forward(self, x):
x = self.conv1(x)
x = F.relu(x)
x = self.conv2(x)
x = F.max_pool2d(x, 2)
x = self.dropout1(x)
#pt_info(x)
x = torch.flatten(x, 1)
#pt_info(x)
x = self.fc1(x)
x = F.relu(x)
x = self.dropout2(x)
x = self.fc2(x)
output = F.log_softmax(x, dim=1)
return output | StarcoderdataPython |
33159 | <gh_stars>1-10
# 设置类
class Settings():
'''保存设置信息'''
def __init__(self):
'''初始化游戏的静态设置'''
self.screen_width = 850
self.screen_heght = 600
self.bg_color = (230, 230, 230)
# 玩家飞船数量设置
self.ship_limit = 3
# 子弹设置
self.bullet_width = 3
self.bullet_height = 15
self.bullet_color = 60, 60, 60
self.bullets_allowed = 10
# 外星人设置
## 外星人移动速度
self.fleet_drop_speed = 5
self.speedup_scale = 1.1
self.initialize_dynamic_settings()
def initialize_dynamic_settings(self):
self.fleet_direction = 1 # 1表示向右移,为-1表示向左移
self.ship_speed_factor = 5.3 # 移动步长
self.bullet_speed_factor = 30
self.alien_speed_factor = 1
def increase_speedO(self):
'''提高速度设置'''
self.alien_speed_factor *= self.speedup_scale
self.ship_speed_factor *= self.speedup_scale
self.alien_speed_factor *= self.speedup_scale | StarcoderdataPython |
5104860 | <filename>sdc/hiframes/pd_timestamp_ext.py
# *****************************************************************************
# Copyright (c) 2020, Intel Corporation All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
# OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
# OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
# EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# *****************************************************************************
from numba import types
from numba.extending import (typeof_impl, type_callable, models, register_model, NativeValue,
make_attribute_wrapper, lower_builtin, box, unbox, lower_cast,
lower_getattr, infer_getattr, overload_method, overload, intrinsic)
from numba.core import cgutils
from llvmlite import ir as lir
import pandas as pd
import datetime
from .. import hdatetime_ext
import llvmlite.binding as ll
ll.add_symbol('parse_iso_8601_datetime', hdatetime_ext.parse_iso_8601_datetime)
ll.add_symbol('convert_datetimestruct_to_datetime', hdatetime_ext.convert_datetimestruct_to_datetime)
ll.add_symbol('np_datetime_date_array_from_packed_ints', hdatetime_ext.np_datetime_date_array_from_packed_ints)
# ---------------------------------------------------------------
# datetime.date implementation that uses a single int to store year/month/day
class DatetimeDateType(types.Type):
def __init__(self):
super(DatetimeDateType, self).__init__(
name='DatetimeDateType()')
self.bitwidth = 64
datetime_date_type = DatetimeDateType()
register_model(DatetimeDateType)(models.IntegerModel)
@box(DatetimeDateType)
def box_datetime_date(typ, val, c):
year_obj = c.pyapi.long_from_longlong(c.builder.lshr(val, lir.Constant(lir.IntType(64), 32)))
month_obj = c.pyapi.long_from_longlong(
c.builder.and_(
c.builder.lshr(
val, lir.Constant(
lir.IntType(64), 16)), lir.Constant(
lir.IntType(64), 0xFFFF)))
day_obj = c.pyapi.long_from_longlong(c.builder.and_(val, lir.Constant(lir.IntType(64), 0xFFFF)))
dt_obj = c.pyapi.unserialize(c.pyapi.serialize_object(datetime.date))
res = c.pyapi.call_function_objargs(dt_obj, (year_obj, month_obj, day_obj))
c.pyapi.decref(year_obj)
c.pyapi.decref(month_obj)
c.pyapi.decref(day_obj)
return res
@type_callable(datetime.date)
def type_timestamp(context):
def typer(year, month, day):
# TODO: check types
return datetime_date_type
return typer
@lower_builtin(datetime.date, types.int64, types.int64, types.int64)
def impl_ctor_timestamp(context, builder, sig, args):
typ = sig.return_type
year, month, day = args
nopython_date = builder.add(day,
builder.add(builder.shl(year, lir.Constant(lir.IntType(64), 32)),
builder.shl(month, lir.Constant(lir.IntType(64), 16))))
return nopython_date
# ------------------------------------------------------------------------
class PandasTimestampType(types.Type):
def __init__(self):
super(PandasTimestampType, self).__init__(
name='PandasTimestampType()')
pandas_timestamp_type = PandasTimestampType()
@typeof_impl.register(pd.Timestamp)
def typeof_pd_timestamp(val, c):
return pandas_timestamp_type
ts_field_typ = types.int64
@register_model(PandasTimestampType)
class PandasTimestampModel(models.StructModel):
def __init__(self, dmm, fe_type):
members = [
('year', ts_field_typ),
('month', ts_field_typ),
('day', ts_field_typ),
('hour', ts_field_typ),
('minute', ts_field_typ),
('second', ts_field_typ),
('microsecond', ts_field_typ),
('nanosecond', ts_field_typ),
]
models.StructModel.__init__(self, dmm, fe_type, members)
make_attribute_wrapper(PandasTimestampType, 'year', 'year')
make_attribute_wrapper(PandasTimestampType, 'month', 'month')
make_attribute_wrapper(PandasTimestampType, 'day', 'day')
make_attribute_wrapper(PandasTimestampType, 'hour', 'hour')
make_attribute_wrapper(PandasTimestampType, 'minute', 'minute')
make_attribute_wrapper(PandasTimestampType, 'second', 'second')
make_attribute_wrapper(PandasTimestampType, 'microsecond', 'microsecond')
make_attribute_wrapper(PandasTimestampType, 'nanosecond', 'nanosecond')
@overload_method(PandasTimestampType, 'date')
def overload_pd_timestamp_date(ptt):
def pd_timestamp_date_impl(ptt):
return datetime.date(ptt.year, ptt.month, ptt.day)
return pd_timestamp_date_impl
@unbox(PandasTimestampType)
def unbox_pandas_timestamp(typ, val, c):
year_obj = c.pyapi.object_getattr_string(val, "year")
month_obj = c.pyapi.object_getattr_string(val, "month")
day_obj = c.pyapi.object_getattr_string(val, "day")
hour_obj = c.pyapi.object_getattr_string(val, "hour")
minute_obj = c.pyapi.object_getattr_string(val, "minute")
second_obj = c.pyapi.object_getattr_string(val, "second")
microsecond_obj = c.pyapi.object_getattr_string(val, "microsecond")
nanosecond_obj = c.pyapi.object_getattr_string(val, "nanosecond")
pd_timestamp = cgutils.create_struct_proxy(typ)(c.context, c.builder)
pd_timestamp.year = c.pyapi.long_as_longlong(year_obj)
pd_timestamp.month = c.pyapi.long_as_longlong(month_obj)
pd_timestamp.day = c.pyapi.long_as_longlong(day_obj)
pd_timestamp.hour = c.pyapi.long_as_longlong(hour_obj)
pd_timestamp.minute = c.pyapi.long_as_longlong(minute_obj)
pd_timestamp.second = c.pyapi.long_as_longlong(second_obj)
pd_timestamp.microsecond = c.pyapi.long_as_longlong(microsecond_obj)
pd_timestamp.nanosecond = c.pyapi.long_as_longlong(nanosecond_obj)
c.pyapi.decref(year_obj)
c.pyapi.decref(month_obj)
c.pyapi.decref(day_obj)
c.pyapi.decref(hour_obj)
c.pyapi.decref(minute_obj)
c.pyapi.decref(second_obj)
c.pyapi.decref(microsecond_obj)
c.pyapi.decref(nanosecond_obj)
is_error = cgutils.is_not_null(c.builder, c.pyapi.err_occurred())
return NativeValue(pd_timestamp._getvalue(), is_error=is_error)
@type_callable(pd.Timestamp)
def type_timestamp(context):
def typer(datetime_type):
# TODO: check types
return pandas_timestamp_type
return typer
@type_callable(datetime.datetime)
def type_timestamp(context):
def typer(year, month, day): # how to handle optional hour, minute, second, us, ns?
# TODO: check types
return pandas_timestamp_type
return typer
@lower_builtin(pd.Timestamp, pandas_timestamp_type)
def impl_ctor_ts_ts(context, builder, sig, args):
typ = sig.return_type
rhs = args[0]
ts = cgutils.create_struct_proxy(typ)(context, builder)
rhsproxy = cgutils.create_struct_proxy(typ)(context, builder)
rhsproxy._setvalue(rhs)
cgutils.copy_struct(ts, rhsproxy)
return ts._getvalue()
# , types.int64, types.int64, types.int64, types.int64, types.int64)
@lower_builtin(datetime.datetime, types.int64, types.int64, types.int64)
@lower_builtin(datetime.datetime, types.IntegerLiteral, types.IntegerLiteral, types.IntegerLiteral)
def impl_ctor_datetime(context, builder, sig, args):
typ = sig.return_type
year, month, day = args
ts = cgutils.create_struct_proxy(typ)(context, builder)
ts.year = year
ts.month = month
ts.day = day
ts.hour = lir.Constant(lir.IntType(64), 0)
ts.minute = lir.Constant(lir.IntType(64), 0)
ts.second = lir.Constant(lir.IntType(64), 0)
ts.microsecond = lir.Constant(lir.IntType(64), 0)
ts.nanosecond = lir.Constant(lir.IntType(64), 0)
return ts._getvalue()
| StarcoderdataPython |
5064653 | """
Notes:
1) Known numerical errors. Choosing the initial condition [-9.44500792, 2.92172601] for the trajectory generator and
the initial condition [-9.06892831, -9.7230096 ] for the closed-loop system leads to a trajectory error, when the
odeint is executed with the option mxstep<1500
"""
import logging
import inspect
import numpy as np
from pylab import *
from scipy.integrate import odeint
from scipy.integrate import quad
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
import random
import itertools
def trajectory_plot(t, trajectory, case, title, linestyle, ax1 = None, ax2 = None):
fig = plt.figure(title, figsize=(6, 8), dpi=80)
fig.subplots_adjust(left=0.2, wspace=0.4, hspace = 0.5)
# st = fig.suptitle(title)
# st.set_y(0.95)
labelx = -0.15
components = itertools.cycle([r'$x(t)$', r'$y(t)$'])
color_vector = itertools.cycle(['red', 'blue', 'green', 'black'])
for counter in range(trajectory.shape[1]):
if (counter % 2) == 0:
if (case is 'diagonal') & (counter == 0) & (ax1 == None):
ax1 = plt.subplot2grid((4, 2), ((counter % 2), 0))
elif (case is 'neighbour') & (counter == 0) & (ax1 == None):
ax1 = plt.subplot2grid((4, 2), ((counter % 2) + 2, 0))
elif (case is 'unconstrained') & (counter == 0) & (ax1 == None):
ax1 = plt.subplot2grid((4, 2), ((counter % 2), 1))
elif (counter == 0) & (ax1 == None):
ax1 = plt.subplot2grid((4, 2), ((counter % 2) + 2, 1))
color = color_vector.next()
ax1.plot(t, trajectory[:, counter], label=r'x' + str(counter / 2 + 1), linestyle=linestyle, c=color)
ax1.set_xticklabels([])
ax1.set_title(case)
#ax1.yaxis.set_label_coords(labelx, 0.5)
else:
if (case is 'diagonal') & (counter == 1) & (ax2 == None):
ax2 = plt.subplot2grid((4, 2), ((counter % 2), 0))
ax2.set_xticklabels([])
elif (case is 'neighbour') & (counter == 1) & (ax2 == None):
ax2 = plt.subplot2grid((4, 2), ((counter % 2) + 2, 0))
ax2.set_xlabel(r'time $t$')
elif (case is 'unconstrained') & (counter == 1) & (ax2 == None):
ax2 = plt.subplot2grid((4, 2), ((counter % 2), 1))
ax2.set_xticklabels([])
elif (counter == 1) & (ax2 == None):
ax2 = plt.subplot2grid((4, 2), ((counter % 2) + 2, 1))
ax2.set_xlabel(r'time $t$')
plt.plot(t, trajectory[:, counter], label=r'y' + str(counter / 2 + 1), linestyle=linestyle, c=color)
#ax2.yaxis.set_label_coords(labelx, 0.5)
# plt.legend(loc='best')
plt.ylabel(components.next())
plt.grid(True)
plt.tight_layout()
return ax1, ax2
def initial_condition_generator(n_agents, zero = False):
logging.info('Starting')
logging.debug('input = %s' %n_agents)
if zero is False:
ic = []
for counter in range(2 * n_agents):
ic.append(random.random() * (-1) ** random.randrange(1, 10))
else:
ic = np.zeros(2 * n_agents)
ic = np.array(ic).reshape(2 * n_agents, 1) * 10
logging.debug('output = %s' % ic)
logging.info('End\n\n')
return ic
def load_system(states, t, open_loop, M = np.eye(2), target_trajectory = None, case = 'zero'):
logging.info('\n Starting')
logging.debug('')
n_agents = len(states) / 2
x = np.array([states[2 * item] for item in range(n_agents)])
y = np.array([states[2 * item + 1] for item in range(n_agents)])
if open_loop is True:
u = np.zeros(n_agents)
else:
q_tt = target_trajectory.next()
u = feedback_law(x, y, q_tt, M, case)
dq = np.squeeze(system(x, y, u)) + 0.0
logging.debug('output = %s' %dq)
logging.info('End\n')
return dq
def feedback_law(x, y, q_tt, M, case):
logging.info('\n Starting')
n_agents = len(x)
x_tt = np.array([q_tt[2 * item] for item in range(n_agents)])
y_tt = np.array([q_tt[2 * item + 1] for item in range(n_agents)])
q = np.squeeze(np.array([[x[0]], [y[0]], [x[1]], [y[1]], [x[2]], [y[2]], [x[3]], [y[3]]]))
K1 = lambda s: np.dot(Y_matrix(x + s * (x_tt - x), y + s * (y_tt - y), case), M.dot(q_tt - q))[0, 0]
K2 = lambda s: np.dot(Y_matrix(x + s * (x_tt - x), y + s * (y_tt - y), case), M.dot(q_tt - q))[0, 1]
K3 = lambda s: np.dot(Y_matrix(x + s * (x_tt - x), y + s * (y_tt - y), case), M.dot(q_tt - q))[0, 2]
K4 = lambda s: np.dot(Y_matrix(x + s * (x_tt - x), y + s * (y_tt - y), case), M.dot(q_tt - q))[0, 3]
k1 = quad(K1, 0.0, 1.0)
k2 = quad(K2, 0.0, 1.0)
k3 = quad(K3, 0.0, 1.0)
k4 = quad(K4, 0.0, 1.0)
k1 = k1[0]
k2 = k2[0]
k3 = k3[0]
k4 = k4[0]
k = -np.array([[k1], [k2], [k3], [k4]])
logging.debug('output = %s' %k)
logging.info('\n End')
return k
def Y_matrix(x, y, case):
x1 = x[0]
x2 = x[1]
x3 = x[2]
x4 = x[3]
y1 = y[0]
y2 = y[1]
y3 = y[2]
y4 = y[3]
if case is 'diagonal':
Y = np.mat([
[-2 * y1, - 3.0 - 2.0 * (x1 ** 2 + y1 ** 2), 0, 0, 0, 0, 0, 0],
[0, 0, -2 * y2, - 3.0 - 2.0 * (x2 ** 2 + y2 ** 2), 0, 0, 0, 0],
[0, 0, 0, 0, -2 * y3, - 3.0 - 2.0 * (x3 ** 2 + y3 ** 2), 0, 0],
[0, 0, 0, 0, 0, 0, -2 * y4, - 3.0 - 2.0 * (x4 ** 2 + y4 ** 2)]
])
elif case is 'neighbour':
Y = np.mat([
[-2.0 * y1, -2.0 * (1.0 + x2 ** 2 + y2 ** 2), 0, 0, 0, 0, 0, 0],
[0, 0, -2.0 * y2, -2.0 * (1.0 + x1 ** 2 + y1 ** 2 + x3 ** 2 + y3 ** 2), 0, 0, 0, 0],
[0, 0, 0, 0, -2.0 * y3, -2.0 * (1.0 + x2 ** 2 + y2 ** 2 + x4 ** 2 + y4 ** 2), 0, 0],
[0, 0, 0, 0, 0, 0, -2.0 * y4, -2.0 * (1.0 + x3 ** 2 + y3 ** 2)],
])
elif case is 'unconstrained':
Y = np.mat([
[-2 * y1, -2 * (1 + x1 ** 2 + y1 ** 2 + x2 ** 2 + y2 ** 2 + x3 ** 2 + y3 ** 2 + x4 ** 2 + y4 ** 2), 0, 0,
0.03 * y1, 0, 0, 0],
[0, 0, -2 * y2, -2 * (1 + x1 ** 2 + y1 ** 2 + x2 ** 2 + y2 ** 2 + x3 ** 2 + y3 ** 2 + x4 ** 2 + y4 ** 2), 0,
0, 0.04 * y2, 0],
[0.04 * y2, 0, 0, 0, -2 * y3,
-2 * (1 + x1 ** 2 + y1 ** 2 + x2 ** 2 + y2 ** 2 + x3 ** 2 + y3 ** 2 + x4 ** 2 + y4 ** 2), 0, 0],
[0, 0, 0.03 * y4, 0, 0, 0, -2 * y4,
-2 * (1 + x1 ** 2 + y1 ** 2 + x2 ** 2 + y2 ** 2 + x3 ** 2 + y3 ** 2 + x4 ** 2 + y4 ** 2)]
])
else:
Y = np.mat(np.zeros((4, 8)))
return Y
def system(x, y, u):
logging.info('\n Starting')
logging.debug('input = %s' % list((x, y, u)))
x1 = x[0]
x2 = x[1]
x3 = x[2]
x4 = x[3]
y1 = y[0]
y2 = y[1]
y3 = y[2]
y4 = y[3]
u1 = u[0]
u2 = u[1]
u3 = u[2]
u4 = u[3]
InterconnectionParameter = 0.01
# compute acceleration xdd
dx1 = -x1 - x1 ** 3 + y1 ** 2 + InterconnectionParameter * (x1 ** 3 - 2 * x1 ** 3 + x2 ** 3)
dy1 = u1
dx2 = -x2 - x2 ** 3 + y2 ** 2 + InterconnectionParameter * (x1 ** 3 - 2 * x2 ** 3 + x3 ** 3)
dy2 = u2
dx3 = -x3 - x3 ** 3 + y3 ** 2 + InterconnectionParameter * (x2 ** 3 - 2 * x3 ** 3 + x4 ** 3)
dy3 = u3
dx4 = -x4 - x4 ** 3 + y4 ** 2 + InterconnectionParameter * (x3 ** 3 - 2 * x4 ** 3 + x4 ** 3)
dy4 = u4
output = np.array([[dx1], [dy1], [dx2], [dy2], [dx3], [dy3], [dx4], [dy4]])
# return the two state derivatives
logging.debug('output = %s' %output)
logging.info('End\n')
return output
def trajectory_generator(t, ic, open_loop, M = np.eye(4), target_trajectory = None, case = 'zero'):
logging.info('\n Starting')
logging.debug('input = %s' % list((t, ic, open_loop)))
trajectory, information = odeint(load_system, ic, t, args=(open_loop, M, target_trajectory, case), Dfun=None,
col_deriv=0, full_output=1, mxstep=2000)
logging.debug('ode info = %s' % information)
logging.debug('output = %s' %trajectory)
logging.info('End\n')
return trajectory
def main():
logging.basicConfig(filename='4agents.log', filemode='w', level=logging.DEBUG,
format='%(asctime)s:%(levelname)s:%(funcName)s:%(message)s', datefmt='%m/%d/%Y %I:%M:%S %p')
logging.info('\n Starting')
n_agents = 4
open_loop = True
t = np.arange(0.0, 1.0, 0.001)
ic = np.squeeze(initial_condition_generator(n_agents, True))
# The following works with initial conditions that is not zero
# ic = np.array([ 6.03898782, 7.80603852, -7.16372085, 5.69110573, -0.0326517, 8.82421257, 0.18566321,
# -1.97987788])
print 'Initial condition for trajectory generation: ', ic
target_trajectory = trajectory_generator(t, ic, open_loop)
tt = itertools.cycle(target_trajectory)
styles_vector = itertools.cycle(['-', '--', '-.', ':'])
for case in ['diagonal', 'neighbour', 'unconstrained', 'open loop']:
line_style = styles_vector.next()
# ax1, ax2 = trajectory_plot(t, target_trajectory, case, 'Simulation', ':')
if case is 'diagonal':
W = np.diag([2, 1, 1, 1, 1, 1, 1, 1])
ic = np.squeeze(initial_condition_generator(n_agents))
# To generate a nice figure
ic = np.array([-1.44395373, -5.17801414, 6.1762607, -8.0185518, -7.05370655, 9.64592077, 7.94661802,
3.66124175])
elif case is 'neighbour':
W = np.diag([2, 1, 1, 1, 1, 1, 2, 1])
elif case is 'unconstrained':
W = np.diag([1, 1, 2, 1, 2, 1, 1, 1])
else:
W = np.eye(2 * n_agents)
M = np.linalg.inv(W)
print 'Initial condition for closed-loop system: ', ic
open_loop = False
simulation = trajectory_generator(t, ic, open_loop, M, tt, case)
trajectory_plot(t, simulation, case, 'Simulation', line_style)
V_temp = np.dot((target_trajectory - simulation), M.dot((target_trajectory - simulation).transpose()))
V = []
for counter in range(len(V_temp)):
if case is None:#'open loop':
V.append(np.exp(-7.5*t[counter]) * V_temp.max())
else:
V.append(V_temp[counter, counter])
V = np.array(V)
labelx = -0.15
fig = plt.figure('Riemannian Energy')
fig.subplots_adjust(left=0.2, wspace=0.4, hspace=0.0)
if case is 'diagonal':
ax3 = plt.subplot2grid((2, 1), (0, 0), colspan=2)
ax4 = plt.subplot2grid((2, 1), (1, 0), colspan=2)
ax3.plot(t, V, label=case, linestyle=line_style)
ax3.grid(True, linewidth=0.5)
ax3.set_ylabel(r'$e(t)$')
ax3.legend(loc='best')
ax3.set_title('Riemannian energy')
ax3.set_xticklabels([])
ax4.plot(t, V, label=case, linestyle=line_style)
ax4.grid(True, which="both", linewidth=0.5)
ax4.set_yscale('log')
ax4.set_xlabel(r'time $t$')
ax4.set_ylabel(r'$\log(e(t))$')
ax4.legend(loc='best')
plt.tight_layout()
#ax3.yaxis.set_label_coords(labelx, 0.5)
plt.savefig('Riemannian_energy.eps', format='eps', dpi=1200, bbox_inches='tight')
fig = plt.figure('Simulation')
plt.savefig('simulation.eps', format='eps', dpi=1200, bbox_inches='tight')
plt.show()
logging.info('End\n')
return
if __name__ == '__main__':
main() | StarcoderdataPython |
9732857 | <reponame>dawar-s/algo-ds-sol-python
def selectionSort(array):
i = 0
while i < len(array):
j = i
k = j
smallest = array[i]
while j < len(array):
if array[j] < smallest:
smallest = array[j]
k = j
j += 1
array[i], array[k] = array[k], array[i]
i += 1
return array
| StarcoderdataPython |
125748 | import numpy
import cv2
def make2Dcolormap(
colors=(
(1, 1, 0),
(0, 0, 1),
(0, 1, 0),
(1, 0, 0),
), size=20):
######################
colormap = numpy.zeros((2, 2, 3))
colormap[1, 1] = colors[0]
colormap[0, 1] = colors[1]
colormap[0, 0] = colors[2]
colormap[1, 0] = colors[3]
size = size + 1
colormap = cv2.resize(colormap, (size, size))
colormap = numpy.clip(colormap, 0, 1)
return colormap
def flat_combine(lst):
return numpy.concatenate([x.reshape(-1, x.shape[-1]) for x in lst])
| StarcoderdataPython |
162944 | <reponame>anmolmalik01/mediapipe<filename>holistic.py<gh_stars>1-10
import cv2
import mediapipe as mp
import time
class mediapipe:
# ============================================ init =================================================
def __init__(self):
# mediapipe solutions variable
mp_drawing = mp.solutions.drawing_utils
mp_holistic = mp.solutions.holistic
mp_drawing_styles = mp.solutions.drawing_styles
mp_face_detection = mp.solutions.face_detection
mp_face_mesh = mp.solutions.face_mesh
mp_hands = mp.solutions.hands
mp_pose = mp.solutions.pose
pTime = 0
self.mp_drawing = mp_drawing
self.mp_holistic = mp_holistic
self.mp_drawing_styles = mp_drawing_styles
self.mp_face_detection = mp_face_detection
self.mp_hands = mp_hands
self.mp_pose = mp_pose
self.mp_face_mesh = mp_face_mesh
self.pTime = pTime
# ===================================================================================================
def simple_holistic(self, show_fps=False):
# capturing webcam 0
cap = cv2.VideoCapture(0)
# Initiate holistic model
with self.mp_holistic.Holistic( min_detection_confidence=0.5, min_tracking_confidence=0.5 ) as holistic:
while cap.isOpened():
success, image = cap.read()
if not success:
print("Ignoring empty camera frame.")
continue
# Recolor Feed
image.flags.writeable = False
image = cv2.cvtColor(image, cv2.COLOR_BGR2RGB)
# Make Detections
results = holistic.process(image)
# Recolor image back to BGR for rendering
image.flags.writeable = True
image = cv2.cvtColor(image, cv2.COLOR_RGB2BGR)
# face landmarks
self.mp_drawing.draw_landmarks(
image,
results.face_landmarks,
self.mp_holistic.FACEMESH_CONTOURS,
landmark_drawing_spec=None,
connection_drawing_spec=self.mp_drawing_styles.DrawingSpec(color=(0, 167, 196), thickness=2, circle_radius=1)
)
# pose landmarks
self.mp_drawing.draw_landmarks(
image,
results.pose_landmarks,
self.mp_holistic.POSE_CONNECTIONS,
landmark_drawing_spec=self.mp_drawing_styles.DrawingSpec(color=(0,0,0), thickness=2, circle_radius=2),
connection_drawing_spec=self.mp_drawing_styles.DrawingSpec(color=(255, 255, 255), thickness=3, circle_radius=2)
)
# fliping image
flip_image = cv2.flip(image, 1)
# fps
if show_fps==True:
cTime = time.time()
fps = 1 / (cTime - self.pTime)
self.pTime = cTime
cv2.putText(flip_image, str(int(fps)), (70, 50), cv2.FONT_HERSHEY_PLAIN, 3, (255, 0, 0), 3)
# Showimg image
cv2.imshow('MediaPipe Holistic', flip_image)
# quiting
if cv2.waitKey(10) & 0xFF == ord('q'):
break
cap.release()
cv2.destroyAllWindows()
# ===================================================================================================
def complex_holistic(self, show_fps=True):
# capturing webcam 0
cap = cv2.VideoCapture(0)
# Initiate holistic model
with self.mp_holistic.Holistic( min_detection_confidence=0.5, min_tracking_confidence=0.5 ) as holistic:
while cap.isOpened():
success, image = cap.read()
if not success:
print("Ignoring empty camera frame.")
continue
# Recolor Feed
image.flags.writeable = False
image = cv2.cvtColor(image, cv2.COLOR_BGR2RGB)
# Make Detections
results = holistic.process(image)
# Recolor image back to BGR for rendering
image.flags.writeable = True
image = cv2.cvtColor(image, cv2.COLOR_RGB2BGR)
# face landmarks
self.mp_drawing.draw_landmarks(
image,
results.face_landmarks,
self.mp_holistic.FACEMESH_TESSELATION,
landmark_drawing_spec=self.mp_drawing.DrawingSpec(color=(255, 255, 255), thickness=1, circle_radius=1),
connection_drawing_spec=self.mp_drawing.DrawingSpec(color=(255, 255, 255), thickness=1, circle_radius=1)
)
# Right hand
self.mp_drawing.draw_landmarks(
image,
results.right_hand_landmarks,
self.mp_holistic.HAND_CONNECTIONS,
landmark_drawing_spec=self.mp_drawing.DrawingSpec(color=(0, 0, 0), thickness=3, circle_radius=3),
connection_drawing_spec=self.mp_drawing.DrawingSpec(color=(255, 255, 255), thickness=2, circle_radius=2)
)
# Left Hand
self.mp_drawing.draw_landmarks(
image,
results.left_hand_landmarks,
self.mp_holistic.HAND_CONNECTIONS,
landmark_drawing_spec=self.mp_drawing.DrawingSpec(color=(0, 0, 0), thickness=3, circle_radius=3),
connection_drawing_spec=self.mp_drawing.DrawingSpec(color=(255, 255, 255), thickness=2, circle_radius=2)
)
# Pose landmarks
self.mp_drawing.draw_landmarks(
image,
results.pose_landmarks,
self.mp_holistic.POSE_CONNECTIONS,
landmark_drawing_spec=self.mp_drawing_styles.DrawingSpec(color=(0,0,0), thickness=2, circle_radius=2),
connection_drawing_spec=self.mp_drawing_styles.DrawingSpec(color=(255, 255, 255), thickness=3, circle_radius=2)
)
# fliping image
flip_image = cv2.flip(image, 1)
# fps
if show_fps==True:
cTime = time.time()
fps = 1 / (cTime - self.pTime)
self.pTime = cTime
cv2.putText(flip_image, str(int(fps)), (70, 50), cv2.FONT_HERSHEY_PLAIN, 3, (255, 0, 0), 3)
# showing fliped image
cv2.imshow('MediaPipe Holistic', flip_image)
# quiting
if cv2.waitKey(10) & 0xFF == ord('q'):
break
cap.release()
cv2.destroyAllWindows()
# =========================================================================================================================
def face_mesh(self, show_fps=True, contours=True ):
# capturing webcam 0
cap = cv2.VideoCapture(0)
# Initiate face mesh
with self.mp_face_mesh.FaceMesh( min_detection_confidence=0.5, min_tracking_confidence=0.5 ) as face_mesh:
while cap.isOpened():
success, image = cap.read()
if not success:
print("Ignoring empty camera frame.")
# If loading a video, use 'break' instead of 'continue'.
continue
# Recolor Feed
image.flags.writeable = False
# Make Detections
results = face_mesh.process(image)
# Recolor image back to BGR for rendering
image.flags.writeable = True
# face mesh results
if results.multi_face_landmarks:
for face_landmarks in results.multi_face_landmarks:
self.mp_drawing.draw_landmarks(
image=image,
landmark_list=face_landmarks,
connections=self.mp_face_mesh.FACEMESH_TESSELATION,
landmark_drawing_spec=None,
connection_drawing_spec=self.mp_drawing.DrawingSpec(color=(255, 255, 255), thickness=1, circle_radius=1 )
)
if contours==True:
self.mp_drawing.draw_landmarks(
image=image,
landmark_list=face_landmarks,
connections=self.mp_face_mesh.FACEMESH_CONTOURS,
landmark_drawing_spec=None,
connection_drawing_spec=self.mp_drawing_styles.get_default_face_mesh_contours_style()
)
# fliping image
flip_image = cv2.flip(image, 1)
# fps
if show_fps==True:
cTime = time.time()
fps = 1 / (cTime - self.pTime)
self.pTime = cTime
cv2.putText(flip_image, str(int(fps)), (70, 50), cv2.FONT_HERSHEY_PLAIN, 3, (255, 0, 0), 3)
# showing image
cv2.imshow('MediaPipe Holistic', flip_image)
# quiting
if cv2.waitKey(10) & 0xFF == ord('q'):
break
cap.release()
cv2.destroyAllWindows()
# ==================================================================================================
def hand_detector(self, show_fps=True):
# Capuring webcam 0
cap = cv2.VideoCapture(0)
with self.mp_hands.Hands( min_detection_confidence=0.5, min_tracking_confidence=0.5 ) as hands:
while cap.isOpened():
success, image = cap.read()
if not success:
print("Ignoring empty camera frame.")
# If loading a video, use 'break' instead of 'continue'.
continue
# Recolor Feed
image = cv2.cvtColor(image, cv2.COLOR_BGR2RGB)
image.flags.writeable = False
# Make Detections
results = hands.process(image)
# Recolor image back to BGR for rendering
image.flags.writeable = True
image = cv2.cvtColor(image, cv2.COLOR_RGB2BGR)
# hand detector results
if results.multi_hand_landmarks:
for hand_landmarks in results.multi_hand_landmarks:
self.mp_drawing.draw_landmarks(
image,
hand_landmarks,
self.mp_hands.HAND_CONNECTIONS,
landmark_drawing_spec=self.mp_drawing.DrawingSpec(color=(0, 0, 0), thickness=3, circle_radius=3),
connection_drawing_spec=self.mp_drawing.DrawingSpec(color=(255, 255, 255), thickness=2, circle_radius=2)
)
# flipping image
flip_image = cv2.flip(image, 1)
# fps
if show_fps==True:
cTime = time.time()
fps = 1 / (cTime - self.pTime)
self.pTime = cTime
cv2.putText(flip_image, str(int(fps)), (70, 50), cv2.FONT_HERSHEY_PLAIN, 3, (255, 0, 0), 3)
# showing flipped image
cv2.imshow('MediaPipe Holistic', flip_image)
# quitting
if cv2.waitKey(10) & 0xFF == ord('q'):
break
cap.release()
cv2.destroyAllWindows()
# ==================================================================================================
def pose(self, show_fps=True):
# capuring webcam 0
cap = cv2.VideoCapture(0)
with self.mp_pose.Pose( min_detection_confidence=0.5, min_tracking_confidence=0.5 ) as pose:
while cap.isOpened():
success, image = cap.read()
if not success:
print("Ignoring empty camera frame.")
# If loading a video, use 'break' instead of 'continue'.
continue
# Recolor Feed
image = cv2.cvtColor(image, cv2.COLOR_BGR2RGB)
image.flags.writeable = False
# Make Detections
results = pose.process(image)
# Recolor image back to BGR for rendering
image.flags.writeable = True
image = cv2.cvtColor(image, cv2.COLOR_RGB2BGR)
self.mp_drawing.draw_landmarks(
image,
results.pose_landmarks,
self.mp_pose.POSE_CONNECTIONS,
landmark_drawing_spec=self.mp_drawing_styles.DrawingSpec(color=(0,0,0), thickness=2, circle_radius=2),
connection_drawing_spec=self.mp_drawing_styles.DrawingSpec(color=(255, 255, 255), thickness=3, circle_radius=2)
)
# flipping image
flip_image = cv2.flip(image, 1)
# fps
if show_fps==True:
cTime = time.time()
fps = 1 / (cTime - self.pTime)
self.pTime = cTime
cv2.putText(flip_image, str(int(fps)), (70, 50), cv2.FONT_HERSHEY_PLAIN, 3, (255, 0, 0), 3)
# showing flipped image
cv2.imshow('MediaPipe Holistic', flip_image)
# quitting
if cv2.waitKey(10) & 0xFF == ord('q'):
break
cap.release()
cv2.destroyAllWindows()
# ================================================ class end ===============================================================
if __name__ == '__main__':
pipe = mediapipe()
pipe.simple_holistic() | StarcoderdataPython |
6602925 | from PIL import Image
import torch
from torch.utils.data import Dataset
from data.transforms import simple_image_preprocess
class CassavaDataset(Dataset):
"""Torch dataset for the problem
Args:
Dataset (Dataframe): Pandas dataframe containing informations
"""
def __init__(self, df, augmentations=None,
config={},
train=True):
self.df = df
self.train = train
self.config = config
self.augmentations = augmentations
self.y = self.df['label']
self.labels = self.df['label'].values
# TODO : remove
print('\n \n', '<CONFIGS>\n \n', config, '\n \n')
def __len__(self):
return len(self.labels)
def __getitem__(self, idx):
image_path = self.df.loc[idx]['path']
input_image = Image.open(image_path)
if self.augmentations is not None:
input_tensor = self.augmentations(
image=input_image, img_size=self.config['img_size'])
input_tensor = torch.tensor(input_tensor).permute(2, 0, 1)
else:
input_tensor = simple_image_preprocess(
input_image, img_size=self.config['img_size'])
return input_tensor, self.y[idx]
| StarcoderdataPython |
3525273 | class Solution:
def gameOfLife(self, board):
self.m = len(board)
self.n = len(board[0])
self.board = [rows.copy() for rows in board]
for i in range(self.m):
for j in range(self.n):
state = self.board[i][j]
dataDict = self.CheckNeighbors(i,j)
nextGen = self.NextGenState(state,dataDict)
board[i][j] = nextGen
def CheckNeighbors(self,row,column):
retDict = {"live":0,"dead":0}
lb = max(column-1,0)
rb = min(column+2,self.n)
tb = max(row -1,0)
bb = min(row+2,self.m)
for rows in range(tb,bb):
for columns in range(lb,rb):
if rows != row or columns != column :
neighbor = self.board[rows][columns]
if neighbor == 1:
retDict["live"] += 1
else:
retDict["dead"] += 1
return retDict
def NextGenState(self,state,dataDict):
finalState = 0
liveNeighbors = dataDict["live"]
if state == 1:
if liveNeighbors == 2 or liveNeighbors == 3:
finalState = 1
else:
if liveNeighbors == 3:
finalState = 1
return finalState
| StarcoderdataPython |
3201426 | #!/usr/bin/python
# This script computes the round key of the 10th round from the output of the 9th round (faulty ciphertext) and output of the 10th round (non-faulty ciphertext).
#
# The output of the 9th round is acquired by skipping the last round of the algorithm via LFI (see paper section VII.A).
# When provided with the faulty and the non-faulty ciphertext, this script computes the round key of the 10th round.
# The main key of the AES-128 encryption can be derived from any round key by inverting the key schedule, however, this is not shown here.
import numpy as np
# the plaintext as used in NIST test vectors
plaintext = [
0x6b, 0xc1, 0xbe, 0xe2, 0x2e, 0x40, 0x9f, 0x96, 0xe9, 0x3d, 0x7e, 0x11, 0x73, 0x93, 0x17, 0x2a
]
# state matrix after the first encryption round
state = [
0xf2, 0x65, 0xe8, 0xd5, 0x1f, 0xd2, 0x39, 0x7b, 0xc3, 0xb9, 0x97, 0x6d, 0x90, 0x76, 0x50, 0x5c
]
# state matrix after ninth round
stateNinthRound = (
0xbb, 0x36, 0xc7, 0xeb, 0x88, 0x33, 0x4d, 0x49, 0xa4, 0xe7, 0x11, 0x2e, 0x74, 0xf1, 0x82, 0xc4
)
# cipher (tenth round)
cipherTenthRound = (
0x3a, 0xd7, 0x7b, 0xb4, 0x0d, 0x7a, 0x36, 0x60, 0xa8, 0x9e, 0xca, 0xf3, 0x24, 0x66, 0xef, 0x97
)
# s-box
sBox = (
0x63, 0x7c, 0x77, 0x7b, 0xf2, 0x6b, 0x6f, 0xc5, 0x30, 0x01, 0x67, 0x2b, 0xfe, 0xd7, 0xab, 0x76,
0xca, 0x82, 0xc9, 0x7d, 0xfa, 0x59, 0x47, 0xf0, 0xad, 0xd4, 0xa2, 0xaf, 0x9c, 0xa4, 0x72, 0xc0,
0xb7, 0xfd, 0x93, 0x26, 0x36, 0x3f, 0xf7, 0xcc, 0x34, 0xa5, 0xe5, 0xf1, 0x71, 0xd8, 0x31, 0x15,
0x04, 0xc7, 0x23, 0xc3, 0x18, 0x96, 0x05, 0x9a, 0x07, 0x12, 0x80, 0xe2, 0xeb, 0x27, 0xb2, 0x75,
0x09, 0x83, 0x2c, 0x1a, 0x1b, 0x6e, 0x5a, 0xa0, 0x52, 0x3b, 0xd6, 0xb3, 0x29, 0xe3, 0x2f, 0x84,
0x53, 0xd1, 0x00, 0xed, 0x20, 0xfc, 0xb1, 0x5b, 0x6a, 0xcb, 0xbe, 0x39, 0x4a, 0x4c, 0x58, 0xcf,
0xd0, 0xef, 0xaa, 0xfb, 0x43, 0x4d, 0x33, 0x85, 0x45, 0xf9, 0x02, 0x7f, 0x50, 0x3c, 0x9f, 0xa8,
0x51, 0xa3, 0x40, 0x8f, 0x92, 0x9d, 0x38, 0xf5, 0xbc, 0xb6, 0xda, 0x21, 0x10, 0xff, 0xf3, 0xd2,
0xcd, 0x0c, 0x13, 0xec, 0x5f, 0x97, 0x44, 0x17, 0xc4, 0xa7, 0x7e, 0x3d, 0x64, 0x5d, 0x19, 0x73,
0x60, 0x81, 0x4f, 0xdc, 0x22, 0x2a, 0x90, 0x88, 0x46, 0xee, 0xb8, 0x14, 0xde, 0x5e, 0x0b, 0xdb,
0xe0, 0x32, 0x3a, 0x0a, 0x49, 0x06, 0x24, 0x5c, 0xc2, 0xd3, 0xac, 0x62, 0x91, 0x95, 0xe4, 0x79,
0xe7, 0xc8, 0x37, 0x6d, 0x8d, 0xd5, 0x4e, 0xa9, 0x6c, 0x56, 0xf4, 0xea, 0x65, 0x7a, 0xae, 0x08,
0xba, 0x78, 0x25, 0x2e, 0x1c, 0xa6, 0xb4, 0xc6, 0xe8, 0xdd, 0x74, 0x1f, 0x4b, 0xbd, 0x8b, 0x8a,
0x70, 0x3e, 0xb5, 0x66, 0x48, 0x03, 0xf6, 0x0e, 0x61, 0x35, 0x57, 0xb9, 0x86, 0xc1, 0x1d, 0x9e,
0xe1, 0xf8, 0x98, 0x11, 0x69, 0xd9, 0x8e, 0x94, 0x9b, 0x1e, 0x87, 0xe9, 0xce, 0x55, 0x28, 0xdf,
0x8c, 0xa1, 0x89, 0x0d, 0xbf, 0xe6, 0x42, 0x68, 0x41, 0x99, 0x2d, 0x0f, 0xb0, 0x54, 0xbb, 0x16
)
# inverse s-box
invSBox = (
0x52, 0x09, 0x6a, 0xd5, 0x30, 0x36, 0xa5, 0x38, 0xbf, 0x40, 0xa3, 0x9e, 0x81, 0xf3, 0xd7, 0xfb,
0x7c, 0xe3, 0x39, 0x82, 0x9b, 0x2f, 0xff, 0x87, 0x34, 0x8e, 0x43, 0x44, 0xc4, 0xde, 0xe9, 0xcb,
0x54, 0x7b, 0x94, 0x32, 0xa6, 0xc2, 0x23, 0x3d, 0xee, 0x4c, 0x95, 0x0b, 0x42, 0xfa, 0xc3, 0x4e,
0x08, 0x2e, 0xa1, 0x66, 0x28, 0xd9, 0x24, 0xb2, 0x76, 0x5b, 0xa2, 0x49, 0x6d, 0x8b, 0xd1, 0x25,
0x72, 0xf8, 0xf6, 0x64, 0x86, 0x68, 0x98, 0x16, 0xd4, 0xa4, 0x5c, 0xcc, 0x5d, 0x65, 0xb6, 0x92,
0x6c, 0x70, 0x48, 0x50, 0xfd, 0xed, 0xb9, 0xda, 0x5e, 0x15, 0x46, 0x57, 0xa7, 0x8d, 0x9d, 0x84,
0x90, 0xd8, 0xab, 0x00, 0x8c, 0xbc, 0xd3, 0x0a, 0xf7, 0xe4, 0x58, 0x05, 0xb8, 0xb3, 0x45, 0x06,
0xd0, 0x2c, 0x1e, 0x8f, 0xca, 0x3f, 0x0f, 0x02, 0xc1, 0xaf, 0xbd, 0x03, 0x01, 0x13, 0x8a, 0x6b,
0x3a, 0x91, 0x11, 0x41, 0x4f, 0x67, 0xdc, 0xea, 0x97, 0xf2, 0xcf, 0xce, 0xf0, 0xb4, 0xe6, 0x73,
0x96, 0xac, 0x74, 0x22, 0xe7, 0xad, 0x35, 0x85, 0xe2, 0xf9, 0x37, 0xe8, 0x1c, 0x75, 0xdf, 0x6e,
0x47, 0xf1, 0x1a, 0x71, 0x1d, 0x29, 0xc5, 0x89, 0x6f, 0xb7, 0x62, 0x0e, 0xaa, 0x18, 0xbe, 0x1b,
0xfc, 0x56, 0x3e, 0x4b, 0xc6, 0xd2, 0x79, 0x20, 0x9a, 0xdb, 0xc0, 0xfe, 0x78, 0xcd, 0x5a, 0xf4,
0x1f, 0xdd, 0xa8, 0x33, 0x88, 0x07, 0xc7, 0x31, 0xb1, 0x12, 0x10, 0x59, 0x27, 0x80, 0xec, 0x5f,
0x60, 0x51, 0x7f, 0xa9, 0x19, 0xb5, 0x4a, 0x0d, 0x2d, 0xe5, 0x7a, 0x9f, 0x93, 0xc9, 0x9c, 0xef,
0xa0, 0xe0, 0x3b, 0x4d, 0xae, 0x2a, 0xf5, 0xb0, 0xc8, 0xeb, 0xbb, 0x3c, 0x83, 0x53, 0x99, 0x61,
0x17, 0x2b, 0x04, 0x7e, 0xba, 0x77, 0xd6, 0x26, 0xe1, 0x69, 0x14, 0x63, 0x55, 0x21, 0x0c, 0x7d
)
# exponential table: lookup table for l table addition result
e = (
0x01, 0x03, 0x05, 0x0f, 0x11, 0x33, 0x55, 0xff, 0x1a, 0x2e, 0x72, 0x96, 0xa1, 0xf8, 0x13, 0x35,
0x5f, 0xe1, 0x38, 0x48, 0xd8, 0x73, 0x95, 0xa4, 0xf7, 0x02, 0x06, 0x0a, 0x1e, 0x22, 0x66, 0xaa,
0xe5, 0x34, 0x5c, 0xe4, 0x37, 0x59, 0xeb, 0x26, 0x6a, 0xbe, 0xd9, 0x70, 0x90, 0xab, 0xe6, 0x31,
0x53, 0xf5, 0x04, 0x0c, 0x14, 0x3c, 0x44, 0xcc, 0x4f, 0xd1, 0x68, 0xb8, 0xd3, 0x6e, 0xb2, 0xcd,
0x4c, 0xd4, 0x67, 0xa9, 0xe0, 0x3b, 0x4d, 0xd7, 0x62, 0xa6, 0xf1, 0x08, 0x18, 0x28, 0x78, 0x88,
0x83, 0x9e, 0xb9, 0xd0, 0x6b, 0xbd, 0xdc, 0x7f, 0x81, 0x98, 0xb3, 0xce, 0x49, 0xdb, 0x76, 0x9a,
0xb5, 0xc4, 0x57, 0xf9, 0x10, 0x30, 0x50, 0xf0, 0x0b, 0x1d, 0x27, 0x69, 0xbb, 0xd6, 0x61, 0xa3,
0xfe, 0x19, 0x2b, 0x7d, 0x87, 0x92, 0xad, 0xec, 0x2f, 0x71, 0x93, 0xae, 0xe9, 0x20, 0x60, 0xa0,
0xfb, 0x16, 0x3a, 0x4e, 0xd2, 0x6d, 0xb7, 0xc2, 0x5d, 0xe7, 0x32, 0x56, 0xfa, 0x15, 0x3f, 0x41,
0xc3, 0x5e, 0xe2, 0x3d, 0x47, 0xc9, 0x40, 0xc0, 0x5b, 0xed, 0x2c, 0x74, 0x9c, 0xbf, 0xda, 0x75,
0x9f, 0xba, 0xd5, 0x64, 0xac, 0xef, 0x2a, 0x7e, 0x82, 0x9d, 0xbc, 0xdf, 0x7a, 0x8e, 0x89, 0x80,
0x9b, 0xb6, 0xc1, 0x58, 0xe8, 0x23, 0x65, 0xaf, 0xea, 0x25, 0x6f, 0xb1, 0xc8, 0x43, 0xc5, 0x54,
0xfc, 0x1f, 0x21, 0x63, 0xa5, 0xf4, 0x07, 0x09, 0x1b, 0x2d, 0x77, 0x99, 0xb0, 0xcb, 0x46, 0xca,
0x45, 0xcf, 0x4a, 0xde, 0x79, 0x8b, 0x86, 0x91, 0xa8, 0xe3, 0x3e, 0x42, 0xc6, 0x51, 0xf3, 0x0e,
0x12, 0x36, 0x5a, 0xee, 0x29, 0x7b, 0x8d, 0x8c, 0x8f, 0x8a, 0x85, 0x94, 0xa7, 0xf2, 0x0d, 0x17,
0x39, 0x4b, 0xdd, 0x7c, 0x84, 0x97, 0xa2, 0xfd, 0x1c, 0x24, 0x6c, 0xb4, 0xc7, 0x52, 0xf6, 0x01
)
# logarithmic table: lookup table for multiplication
# initial value -1 is a dummy value
l = (
-1, 0x00, 0x19, 0x01, 0x32, 0x02, 0x1a, 0xc6, 0x4b, 0xc7, 0x1b, 0x68, 0x33, 0xee, 0xdf, 0x03,
0x64, 0x04, 0xe0, 0x0e, 0x34, 0x8d, 0x81, 0xef, 0x4c, 0x71, 0x08, 0xc8, 0xf8, 0x69, 0x1c, 0xc1,
0x7d, 0xc2, 0x1d, 0xb5, 0xf9, 0xb9, 0x27, 0x6a, 0x4d, 0xe4, 0xa6, 0x72, 0x9a, 0xc9, 0x09, 0x78,
0x65, 0x2f, 0x8a, 0x05, 0x21, 0x0f, 0xe1, 0x24, 0x12, 0xf0, 0x82, 0x45, 0x35, 0x93, 0xda, 0x8e,
0x96, 0x8f, 0xdb, 0xbd, 0x36, 0xd0, 0xce, 0x94, 0x13, 0x5C, 0xd2, 0xf1, 0x40, 0x46, 0x83, 0x38,
0x66, 0xdd, 0xfd, 0x30, 0xbf, 0x06, 0x8b, 0x62, 0xb3, 0x25, 0xe2, 0x98, 0x22, 0x88, 0x91, 0x10,
0x7e, 0x6e, 0x48, 0xc3, 0xa3, 0xb6, 0x1e, 0x42, 0x3a, 0x6b, 0x28, 0x54, 0xfa, 0x85, 0x3d, 0xba,
0x2b, 0x79, 0x0a, 0x15, 0x9b, 0x9f, 0x5e, 0xca, 0x4e, 0xd4, 0xac, 0xe5, 0xf3, 0x73, 0xa7, 0x57,
0xaf, 0x58, 0xa8, 0x50, 0xf4, 0xea, 0xd6, 0x74, 0x4f, 0xae, 0xe9, 0xd5, 0xe7, 0xe6, 0xad, 0xe8,
0x2c, 0xd7, 0x75, 0x7a, 0xeb, 0x16, 0x0b, 0xf5, 0x59, 0xcb, 0x5f, 0xb0, 0x9c, 0xa9, 0x51, 0xa0,
0x7f, 0x0c, 0xf6, 0x6f, 0x17, 0xc4, 0x49, 0xec, 0xd8, 0x43, 0x1f, 0x2d, 0xa4, 0x76, 0x7b, 0xb7,
0xcc, 0xbb, 0x3e, 0x5a, 0xfb, 0x60, 0xb1, 0x86, 0x3b, 0x52, 0xa1, 0x6c, 0xaa, 0x55, 0x29, 0x9d,
0x97, 0xb2, 0x87, 0x90, 0x61, 0xbe, 0xdc, 0xfc, 0xbc, 0x95, 0xcf, 0xcd, 0x37, 0x3f, 0x5b, 0xd1,
0x53, 0x39, 0x84, 0x3c, 0x41, 0xa2, 0x6d, 0x47, 0x14, 0x2a, 0x9e, 0x5d, 0x56, 0xf2, 0xd3, 0xab,
0x44, 0x11, 0x92, 0xd9, 0x23, 0x20, 0x2e, 0x89, 0xb4, 0x7c, 0xb8, 0x26, 0x77, 0x99, 0xe3, 0xa5,
0x67, 0x4a, 0xed, 0xde, 0xc5, 0x31, 0xfe, 0x18, 0x0d, 0x63, 0x8c, 0x80, 0xc0, 0xf7, 0x70, 0x07
)
# l-table lookup of mix column decryption matrix (transposed)
invMixCol = [
[0x0E, 0x09, 0x0D, 0x0B],
[0x0B, 0x0E, 0x09, 0x0D],
[0x0D, 0x0B, 0x0E, 0x09],
[0x09, 0x0D, 0x0B, 0x0E],
]
# l-table lookup of mix column encryption matrix (transposed)
MixCol = [
[0x02, 0x01, 0x01, 0x03],
[0x03, 0x02, 0x01, 0x01],
[0x01, 0x03, 0x02, 0x01],
[0x01, 0x01, 0x03, 0x02],
]
def subBytes(s):
a = [
sBox[s[0]],
sBox[s[1]],
sBox[s[2]],
sBox[s[3]],
sBox[s[4]],
sBox[s[5]],
sBox[s[6]],
sBox[s[7]],
sBox[s[8]],
sBox[s[9]],
sBox[s[10]],
sBox[s[11]],
sBox[s[12]],
sBox[s[13]],
sBox[s[14]],
sBox[s[15]],
]
return a
def invSubBytes(s):
a = [
invSBox[s[0]],
invSBox[s[1]],
invSBox[s[2]],
invSBox[s[3]],
invSBox[s[4]],
invSBox[s[5]],
invSBox[s[6]],
invSBox[s[7]],
invSBox[s[8]],
invSBox[s[9]],
invSBox[s[10]],
invSBox[s[11]],
invSBox[s[12]],
invSBox[s[13]],
invSBox[s[14]],
invSBox[s[15]],
]
return a
# shift row operation
def shiftRows(s):
a = [0] * 16
# indices 0,4,8 and 12 remain the same
a[0] = s[0]
a[4] = s[4]
a[8] = s[8]
a[12] = s[12]
# shift first row
a[1] = s[5]
a[5] = s[9]
a[9] = s[13]
a[13] = s[1]
# shift second row
a[2] = s[10]
a[6] = s[14]
a[10] = s[2]
a[14] = s[6]
# shift third row
a[3] = s[15]
a[7] = s[3]
a[11] = s[7]
a[15] = s[11]
return a
# inverse shift rows
def invShiftRows(s):
a = [0] * 16
# indices 0,4,8 and 12 remain the same
a[0] = s[0]
a[4] = s[4]
a[8] = s[8]
a[12] = s[12]
# reshift first row
a[1] = s[13]
a[5] = s[1]
a[9] = s[5]
a[13] = s[9]
# reshift second row (commutation, i.e, same as above)
a[2] = s[10]
a[6] = s[14]
a[10] = s[2]
a[14] = s[6]
# reshift third row
a[3] = s[7]
a[7] = s[11]
a[11] = s[15]
a[15] = s[3]
return a
def mult(x, y):
# cases 0x00 and 0x01 are specific in terms of multiplication
r = 0
if x == 0 or y == 0:
return 0
elif x == 1:
return y
elif y == 1:
return x
else:
r = l[x] + l[y]
if r > 0xFF:
return e[r - 0xFF]
else:
return e[r]
# mix columns
def mixColumns(s):
# a is the state matrix, lookup is the l-table lookup of the state,
# GalMat are the galois multiplication elements after e-table lookup
# b is the result vector
a = [
[s[0], s[1], s[2], s[3]],
[s[4], s[5], s[6], s[7]],
[s[8], s[9], s[10], s[11]],
[s[12], s[13], s[14], s[15]],
]
GalMat = [0] * 16
b = [[0] * 4 for i in range(4)]
# go through column
for j in range(0, 4):
for k in range(0, 4):
# calculate Galois Mix Columns "multiplication" elements
if a[j][k] == 0:
for p in range(0, 4):
GalMat[k + p * 4] = 0x00
else:
for p in range(0, 4):
GalMat[k + p * 4] = mult(a[j][k], MixCol[k][p])
# for each column perform mix columns
b[j][0] = GalMat[0] ^ GalMat[1] ^ GalMat[2] ^ GalMat[3]
b[j][1] = GalMat[4] ^ GalMat[5] ^ GalMat[6] ^ GalMat[7]
b[j][2] = GalMat[8] ^ GalMat[9] ^ GalMat[10] ^ GalMat[11]
b[j][3] = GalMat[12] ^ GalMat[13] ^ GalMat[14] ^ GalMat[15]
c = [
b[0][0],
b[0][1],
b[0][2],
b[0][3],
b[1][0],
b[1][1],
b[1][2],
b[1][3],
b[2][0],
b[2][1],
b[2][2],
b[2][3],
b[3][0],
b[3][1],
b[3][2],
b[3][3],
]
return c
# inverse mix columns
def invMixColumns(s):
# a is the state matrix, lookup is the l-table lookup of the state,
# GalMat are the galois multiplication elements after e-table lookup
# b is the result vector
a = [
[s[0], s[1], s[2], s[3]],
[s[4], s[5], s[6], s[7]],
[s[8], s[9], s[10], s[11]],
[s[12], s[13], s[14], s[15]],
]
GalMat = [0] * 16
b = [[0] * 4 for i in range(4)]
# go through column
for j in range(0, 4):
# calculate Galois Mix Columns "multiplication" elements
# cases 0x00 and 0x01 are specific in terms of multiplication
for k in range(0, 4):
if a[j][k] == 0:
for p in range(0, 4):
GalMat[k + p * 4] = 0x00
else:
for p in range(0, 4):
GalMat[k + p * 4] = mult(a[j][k], invMixCol[k][p])
# for each column perform inverse mix columns
b[j][0] = GalMat[0] ^ GalMat[1] ^ GalMat[2] ^ GalMat[3]
b[j][1] = GalMat[4] ^ GalMat[5] ^ GalMat[6] ^ GalMat[7]
b[j][2] = GalMat[8] ^ GalMat[9] ^ GalMat[10] ^ GalMat[11]
b[j][3] = GalMat[12] ^ GalMat[13] ^ GalMat[14] ^ GalMat[15]
c = [
b[0][0],
b[0][1],
b[0][2],
b[0][3],
b[1][0],
b[1][1],
b[1][2],
b[1][3],
b[2][0],
b[2][1],
b[2][2],
b[2][3],
b[3][0],
b[3][1],
b[3][2],
b[3][3],
]
return c
# add round key
def addRoundKey(s, k):
sArray = np.array(s)
kArray = np.array(k)
sArray = sArray ^ kArray
return sArray.tolist()
# get 10th round key from 10th round state before subBytes
def getTenthRoundKey(state10, cipher):
return addRoundKey(subBytes(shiftRows(state10)), cipher)
if __name__ == "__main__":
# get 10th round key
tenthRndKey = getTenthRoundKey(stateNinthRound, cipherTenthRound)
print("10th round key (decimal)")
print(tenthRndKey)
print("10th round key (hex)")
print([hex(i) for i in tenthRndKey])
| StarcoderdataPython |
1649826 | <filename>lidopt/model.py
# ## Running the simulation in SWMM
import numpy as np
from pyswmm import Simulation, LidGroups
from pyswmm.lidlayers import Soil
from pyswmm.lidcontrols import LidControls
from .parsers import parse_experiment, parse_report, merge_and_correct
from . import EXP, SIM, METRICS
def evaluate(inputfile=SIM['file'], experiment=None, reportfile='report.txt', params=None):
with Simulation(inputfile=inputfile) as simulation:
lid=LidControls(simulation)[SIM['lid.name']]
lid.drain.coefficient = params['drain.coefficient']
lid.drain.exponent = params['drain.exponent']
lid.drain.offset = params['drain.offset']
lid.drain.delay = params['drain.delay']
lid.soil.thickness = params['soil.thickness']
lid.soil.porosity = params['soil.porosity']
lid.soil.field_capacity = params['soil.field_capacity']
lid.soil.wilting_point = params['soil.wilting_point']
lid.soil.k_saturated = params['soil.k_saturated']
lid.soil.k_slope = params['soil.k_slope']
lid.soil.suction_head = params['soil.suction_head']
lid.surface.thickness = params['surface.thickness']
lid.surface.void_fraction = params['surface.void_fraction']
lid.surface.roughness = params['surface.roughness']
lid.surface.slope = params['surface.slope']
lid.storage.thickness = params['storage.thickness']
lid.storage.void_fraction = params['storage.void_fraction']
lid.storage.k_saturated = params['storage.k_saturated']
lid.storage.clog_factor = params['storage.clog_factor']
for step in simulation:
pass
print("\n")
print('Simulation executed')
metrics = {}
try:
# Read report and compare with experiment
report = parse_report('report.txt')
if experiment is None:
experiment = parse_experiment(EXP['file'])
out = merge_and_correct(experiment=experiment, report=report)
out.to_csv(reportfile)
except:
for metric in METRICS:
metrics[metric] = -1
return metrics
# Recover values from simulation and exp
sim_inflow = out[SIM['inflow_mm_hr']]
sim_outflow = out[SIM['outflow_mm_hr']]
exp_inflow = out[EXP['inflow']]
exp_outflow = out[EXP['outflow']]
metrics = calculate_metrics(sim_inflow, sim_outflow, exp_inflow, exp_outflow)
return metrics
#############################
# METRICS
#############################
def calculate_metrics(sim_inflow, sim_outflow, exp_inflow, exp_outflow):
metrics = {}
####################################################
# Inflow NSE
residuals = np.sum((sim_inflow-exp_inflow)**2)
ss = np.sum((exp_inflow-exp_inflow.mean())**2)
nse_inflow = (1-residuals/ss)
metrics['nse_inflow'] = nse_inflow
# Outflow NSE
residuals = np.sum((sim_outflow-exp_outflow)**2)
ss = np.sum((exp_outflow-exp_outflow.mean())**2)
nse_outflow = (1-residuals/ss)
metrics['nse_outflow'] = nse_outflow
# Inflow vol sim
volume_inflow_sim = np.sum(sim_inflow)
metrics['volume_inflow_sim'] = volume_inflow_sim
#Outflow vol sim
volume_outflow_sim = np.sum(sim_outflow)
metrics['volume_outflow_sim'] = volume_outflow_sim
# Inflow vol exp
volume_inflow_exp = np.sum(exp_inflow)
metrics['volume_inflow_exp'] = volume_inflow_exp
#Outflow vol sim
volume_outflow_exp = np.sum(exp_outflow)
metrics['volume_outflow_exp'] = volume_outflow_exp
# Percent bias
metrics['pbias'] = 100*(exp_outflow-sim_outflow).sum()/exp_outflow.sum()
# Peak flow
metrics['peak_flow'] = np.abs(exp_outflow.max()-sim_outflow.max())
# Time peak
metrics['time_peak'] = np.argmax(exp_outflow.values)-np.argmax(sim_outflow.values)
# Systematic deviation
metrics['sd'] = (exp_outflow-sim_outflow).mean()
# Absolut deviation
metrics['ad'] = (exp_outflow-sim_outflow).abs().mean()
# Quadratic deviation
metrics['qd'] = np.sqrt(np.sum((exp_outflow.values-sim_outflow.values)**2)/len(exp_outflow))
# deltaV
metrics['deltaV'] = (volume_inflow_exp - volume_inflow_sim) / volume_inflow_exp
return metrics | StarcoderdataPython |
16822 | <reponame>gautams3/reacher-done
import gym
from gym import error, spaces, utils
from gym.utils import seeding
from gym.envs.mujoco.reacher import ReacherEnv
import numpy as np
class ReacherDoneEnv(ReacherEnv):
metadata = {'render.modes': ['human']}
# def __init__(self):
# ...
def step(self, action):
self.do_simulation(action, self.frame_skip)
vec = self.get_body_com("fingertip")-self.get_body_com("target")
dist = np.linalg.norm(vec)
reward_dist = - dist
reward_ctrl = - 0.3 * np.square(action).sum()
reward_time = -0.2 # 5 times larger, to see the effect of time reward
done = dist < 0.04 # done if it's close enough
done_reward = 2
reward = reward_dist + reward_ctrl + reward_time + done*done_reward
ob = self._get_obs()
info = dict(reward_dist=reward_dist, reward_ctrl=reward_ctrl, dist=dist)
return ob, reward, done, info
# def reset(self):
# super().reset()
# def render(self, mode='human'):
# ...
# def close(self):
# ... | StarcoderdataPython |
269636 | import json
from dataclasses import dataclass, is_dataclass, asdict
from typing import List, Dict
from collections import defaultdict
import sys
def custom_default(o):
if is_dataclass(o):
return asdict(o)
raise TypeError(f"{o!r} is not JSON serializable")
@dataclass
class AtomicCard:
converted_mana_cost: str
foreign_data: List[Dict]
identifiers: Dict
mana_cost: str
name: str
subtypes: List[str]
supertypes: List[str]
power: str
toughness: str
text: str
type: str
types: List[str]
@classmethod
def create(cls, json_data):
return cls(
converted_mana_cost=json_data["convertedManaCost"],
foreign_data=json_data["foreignData"],
identifiers=json_data["identifiers"],
mana_cost=json_data["manaCost"],
name=json_data["name"],
subtypes=json_data["subtypes"],
supertypes=json_data["supertypes"],
text=json_data["text"],
type=json_data["type"],
types=json_data["types"],
power=json_data["power"],
toughness=json_data["toughness"],
)
@dataclass
class FormattedCard:
convertedManaCost: str
nameEnglish: str
nameJapanese: str
textEnglish: str
textJapanese: str
manaCost: str
power: str
toughness: str
type: str
types: List[str]
@classmethod
def create(cls, card: AtomicCard):
japanese_name = ""
japanese_text = ""
for foreign_data in card.foreign_data:
if foreign_data["language"] == "Japanese":
japanese_name = foreign_data["name"] if "name" in foreign_data else ""
japanese_text = foreign_data["text"] if "text" in foreign_data else ""
return cls(
convertedManaCost=card.converted_mana_cost,
nameEnglish=card.name.strip('"'),
nameJapanese=japanese_name.strip('"'),
textEnglish=card.text.strip('"'),
textJapanese=japanese_text.strip('"'),
manaCost=card.mana_cost,
power=card.power,
toughness=card.toughness,
type=card.type,
types=card.types,
)
def main():
cards = []
with open("../../cardlist/json/AtomicCards.json") as f:
data = json.load(f)
for name, value in data["data"].items():
for c in value:
d = defaultdict(str, **c)
card = AtomicCard.create(d)
cards.append(card)
formated_cards = [FormattedCard.create(c) for c in cards]
print(formated_cards[0], file=sys.stderr)
print(json.dumps(formated_cards, default=custom_default))
if __name__ == "__main__":
main()
| StarcoderdataPython |
1984183 | <filename>test.py
from subprocess import call
from sys import exit
returncode = call(["python", "-m", "unittest", "discover", "-v", "tests"])
exit(returncode)
| StarcoderdataPython |
5048899 | import lbann
from lbann.modules import Module
import math
class DenseGCNConv(Module):
global_count = 0
def __init__(self, input_channels, output_channels, name=None):
super().__init__()
DenseGCNConv.global_count += 1
self.name = (name if name else 'Dense_GCN_{}'.format(DenseGCNConv.global_count))
bounds = math.sqrt(6.0 / (input_channels + output_channels))
self.weights = lbann.Weights(initializer = lbann.UniformInitializer(min=-bounds,max=bounds),
name=self.name+'_Weights')
self.W = lbann.WeightsLayer(dims = [input_channels, output_channels],
name = self.name+'_layer',
weights =self.weights)
def forward(self,X,A):
out = lbann.MatMul(X,self.W, name=self.name+'_weight_mult')
out = lbann.MatMul(A, out, name=self.name+'_adj_mult')
return out
| StarcoderdataPython |
8117929 | <filename>src/prism-fruit/Games-DQL/examples/games/car/networkx/algorithms/approximation/tests/test_matching.py<gh_stars>0
from nose.tools import *
import networkx as nx
import networkx.algorithms.approximation as a
def test_min_maximal_matching():
# smoke test
G = nx.Graph()
assert_equal(len(a.min_maximal_matching(G)),0)
| StarcoderdataPython |
170612 | def f(xs):
ys = 'string'
for x in xs:
g(ys)
def g(x):
return x.lower()
| StarcoderdataPython |
9706248 | <gh_stars>1-10
def quick_sort(data):
yield from __quick_sort(data, 0, len(data) - 1)
def __quick_sort(data, start, end):
"""Quick sort: O(nlogn)"""
if start >= end:
return
pivot = data[end]
pivot_idx = start
for i in range(start, end):
if data[i] < pivot:
data[i], data[pivot_idx] = data[pivot_idx], data[i]
pivot_idx += 1
yield data
data[end], data[pivot_idx] = data[pivot_idx], data[end]
yield data
yield from __quick_sort(data, start, pivot_idx - 1)
yield from __quick_sort(data, pivot_idx + 1, end) | StarcoderdataPython |
6520385 | <reponame>okfde/odm-datenerfassung<gh_stars>1-10
# -*- coding: utf-8 -*-
import urllib2
import urllib
import json
import pprint
import os
import metautils
#This is a one time operation to create organisations based on the originating portal in ODM DB
#Run prior to importing data
url = os.environ['CKANURL']
apikey = os.environ['CKANAPIKEY']
categories = [
{"name": u"Noch nicht kategorisiert"},
{"name": u"Wirtschaft und Arbeit",
"image": "https://raw.githubusercontent.com/fraunhoferfokus/govdata-theme/master/src/main/webapp/images/categories/wirtschaft_arbeit.png"},
{"name": u"Soziales",
"image": "https://raw.githubusercontent.com/fraunhoferfokus/govdata-theme/master/src/main/webapp/images/categories/soziales.png"},
{"name": u"Infrastruktur, Bauen und Wohnen",
"image": "https://raw.githubusercontent.com/fraunhoferfokus/govdata-theme/master/src/main/webapp/images/categories/infrastruktur_bauen_wohnen.png"},
{"name": u"Bildung und Wissenschaft",
"image": "https://raw.githubusercontent.com/fraunhoferfokus/govdata-theme/master/src/main/webapp/images/categories/bildung_wissenschaft.png"},
{"name": u"Gesundheit",
"image": "https://raw.githubusercontent.com/fraunhoferfokus/govdata-theme/master/src/main/webapp/images/categories/gesundheit.png"},
{"name": u"Öffentliche Verwaltung, Haushalt und Steuern",
"image": "https://raw.githubusercontent.com/fraunhoferfokus/govdata-theme/master/src/main/webapp/images/categories/verwaltung.png"},
{"name": u"Gesetze und Justiz",
"image": "https://raw.githubusercontent.com/fraunhoferfokus/govdata-theme/master/src/main/webapp/images/categories/gesetze_justiz.png"},
{"name": u"Transport und Verkehr",
"image": "https://raw.githubusercontent.com/fraunhoferfokus/govdata-theme/master/src/main/webapp/images/categories/transport_verkehr.png"},
{"name": u"Geographie, Geologie und Geobasisdaten",
"image": "https://raw.githubusercontent.com/fraunhoferfokus/govdata-theme/master/src/main/webapp/images/categories/geo.png"},
{"name": u"Verbraucherschutz",
"image": "https://raw.githubusercontent.com/fraunhoferfokus/govdata-theme/master/src/main/webapp/images/categories/verbraucher.png"},
{"name": u"Bevölkerung",
"image": "https://raw.githubusercontent.com/fraunhoferfokus/govdata-theme/master/src/main/webapp/images/categories/bevoelkerung.png"},
{"name": u"Umwelt und Klima",
"image": "https://raw.githubusercontent.com/fraunhoferfokus/govdata-theme/master/src/main/webapp/images/categories/umwelt_klima.png"},
{"name": u"Politik und Wahlen",
"image": "https://raw.githubusercontent.com/fraunhoferfokus/govdata-theme/master/src/main/webapp/images/categories/politik_wahlen.png"},
{"name": u"Kultur, Freizeit, Sport, Tourismus",
"image": "https://raw.githubusercontent.com/fraunhoferfokus/govdata-theme/master/src/main/webapp/images/categories/kultur_freizeit_sport_tourismus.png"}
]
for cat in categories:
dataset_dict = {}
dataset_dict['name'] = metautils.force_alphanumeric_short(cat['name'])
dataset_dict['id'] = dataset_dict['name']
dataset_dict['title'] = cat['name']
dataset_dict['image_url'] = cat['image'] if 'image' in cat else None
data_string = urllib.quote(json.dumps(dataset_dict))
request = urllib2.Request(url +'/api/3/action/group_create')
request.add_header('Authorization', apikey)
response = urllib2.urlopen(request, data_string)
response_dict = json.loads(response.read())
created_grp = response_dict['result']
pprint.pprint(created_grp)
| StarcoderdataPython |
6589872 | #!/usr/bin/python3
"This module is used to generate xml file readed by crete"
import subprocess
def json2xml(workload_str=str, setup_list=list,
function_name=str, full_path=str):
"Convert json format to xml"
json_list = list(workload_str.split(" "))
xml_str = ""
xml_str += "<?xml version=\"1.0\" encoding=\"utf-8\"?>\n"
xml_str += "<crete>\n"
cmd_path = subprocess.check_output(["whereis", "-b", json_list[0]])
cmd_path = cmd_path.decode("utf-8")
cmd_path = cmd_path.split(" ")[-1].strip("\n")
xml_str += "<exec>{}</exec>\n".format(cmd_path)
xml_str += "<args>\n"
for i in range(1, len(json_list)):
xml_str += "<arg index=\"%d\" value=\"%s\"/>\n" % (i, json_list[i])
xml_str += "</args>\n"
xml_str += "<setup_commands>\n"
for i in enumerate(setup_list):
xml_str += "<setup_command>{}</setup_command>\n".format(i[1])
xml_str += "</setup_commands>\n"
xml_str += "<kprobe_module>{}/{}.ko</kprobe_module>\n".format(full_path, function_name)
xml_str += "</crete>\n"
return xml_str
if __name__ == "__main__":
print (json2xml("ifconfig eth0 eth1", ["vcibfug 473hd ds", "cmd fudssdndcia"], function_name="Hello", full_path="~/home/kernel"),) | StarcoderdataPython |
3423265 | import os
import sys
import re
if __name__ == "__main__":
data = None
wdir = os.path.dirname(sys.argv[0])
with open(os.path.join(wdir, "input.txt")) as f:
data = f.readlines()
needle = "shiny gold bag"
bags = []
for d in data:
m = re.match("^(?P<bag>.*bag)s\s*contain (?P<contain>.*)$", d)
if not m:
continue
bag = { "name": m.group("bag").strip(), "subbags": {}}
if m.group("contain") == "no other bags.":
bags.append(bag)
continue
subbags = re.findall("(\d [^,\.]+)", m.group("contain"))
for s in subbags:
m = re.match("(?P<amount>\d) (?P<subbag>[a-z ]+bag)s?", s)
if not m:
continue
bag["subbags"][m.group("subbag")] = int(m.group("amount"))
bags.append(bag)
def addBag(result, bag, amount):
if bag not in result.keys():
result[bag] = amount
else:
result[bag] += amount
return result
def findBag(bags, needle):
for b in bags:
if needle == b["name"]:
return b
return None
def findSubbags(bags, parent):
if not parent or "subbags" not in parent:
return []
result = {}
for s in parent["subbags"].keys():
result = addBag(result, s, parent["subbags"][s])
b = findBag(bags, s)
if b:
tmp = findSubbags(bags, b)
for t in tmp.keys():
result = addBag(result, t, tmp[t] * parent["subbags"][s])
return result
count = 0
b = findBag(bags, needle)
if b:
result = findSubbags(bags, b)
for r in result.keys():
print(r, result[r])
count += result[r]
print("Needed individual bags: %d"%count)
| StarcoderdataPython |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.