id stringlengths 1 265 | text stringlengths 6 5.19M | dataset_id stringclasses 7
values |
|---|---|---|
84068 | '''
Created on 1.12.2016
@author: Darren
''''''
Given n, generate all structurally unique BST s (binary search trees) that store values 1...n.
For example,
Given n = 3, your program should return all 5 unique BST s shown below.
1 3 3 2 1
\ / / / \ \
3 2 1 1 3 2
/ / \ \
2 1 2 3
confused what "{1,#,2,3}" means? > read more on how binary tree is serialized on OJ.
OJ s Binary Tree Serialization:
The serialization of a binary tree follows a level order traversal, where # signifies a path terminator where no node exists below.
Here s an example:
1
/ \
2 3
/
4
\
5
The above binary tree is serialized as "{1,2,3,#,#,4,#,#,5}".
"
'''
# Definition for a binary tree node.
class TreeNode(object):
def __init__(self, x):
self.val = x
self.left = None
self.right = None
class Solution(object):
def generateTrees(self, n):
"""
:type n: int
:rtype: List[TreeNode]
"""
if n<=0:
return []
return self.generateTreesUtil(1,n)
def generateTreesUtil(self, left,right):
if left==right:
return [TreeNode(left)]
if left>right:
return [None]
res=[]
for value in range(left,right+1):
for leftNode in self.generateTreesUtil(left,value-1):
for rightNode in self.generateTreesUtil(value+1,right):
root=TreeNode(value)
root.left=leftNode
root.right=rightNode
res.append(root)
return res | StarcoderdataPython |
1606557 | ''' 3. Faça um Programa que peça a temperatura em graus Farenheit, transforme e mostre a
temperatura em graus Celsius. Dica: C = ( 5 ∗ ( F − 32 ) /9 )''' | StarcoderdataPython |
74123 | <reponame>Poppins001/Projects
# -*- coding: utf-8 -*-
"""Order related definitions."""
definitions = {
"OrderType": {
"MARKET": "A Market Order",
"LIMIT": "A Limit Order",
"STOP": "A Stop Order",
"MARKET_IF_TOUCHED": "A Market-if-touched Order",
"TAKE_PROFIT": "A Take Profit Order",
"STOP_LOSS": "A Stop Loss Order",
"TRAILING_STOP_LOSS": "A Trailing Stop Loss Order"
},
"OrderState": {
"PENDING": "The Order is currently pending execution",
"FILLED": "The Order has been filled",
"TRIGGERED": "The Order has been triggered",
"CANCELLED": "The Order has been cancelled",
},
"TimeInForce": {
"GTC": "The Order is “Good unTil Cancelled”",
"GTD": "The Order is “Good unTil Date” and will be cancelled at "
"the provided time",
"GFD": "The Order is “Good for Day” and will be cancelled at "
"5pm New York time",
"FOK": "The Order must be immediately “Filled Or Killed”",
"IOC": "The Order must be “Immediately partially filled Or Killed”",
},
"OrderPositionFill": {
"OPEN_ONLY": "When the Order is filled, only allow Positions to be "
"opened or extended.",
"REDUCE_FIRST": "When the Order is filled, always fully reduce an "
"existing Position before opening a new Position.",
"REDUCE_ONLY": "When the Order is filled, only reduce an existing "
"Position.",
"DEFAULT": "When the Order is filled, use REDUCE_FIRST behaviour "
"for non-client hedging Accounts, and OPEN_ONLY behaviour "
"for client hedging Accounts."
},
}
| StarcoderdataPython |
3245469 | #!/usr/bin/env python3
import BrickPi as bp
import time
bp.BrickPiSetup() # setup the serial port for communication
color = bp.PORT_1
#col = [None , "Black","Blue","Green","Yellow","Red","White" ] #used for converting the color index to name
bp.BrickPi.SensorType[color] = bp.TYPE_SENSOR_COLOR_RED
bp.BrickPiSetupSensors()
inn = False
while True:
result = bp.BrickPiUpdateValues() # Ask BrickPi to update values for sensors/motors
if not result:
val = bp.BrickPi.Sensor[color] #BrickPi.Sensor[PORT] stores the value obtained from sensor
print(val)
if val > 110 and not inn:
inn = True
print("In!")
if val < 90 and inn:
inn = False
print("Out!")
time.sleep(.1) # sleep for 100 m
| StarcoderdataPython |
1437 | import os
import numpy as np
import pandas as pd
from keras.utils import to_categorical
from sklearn.model_selection import KFold, train_test_split
def load_data(path):
train = pd.read_json(os.path.join(path, "./train.json"))
test = pd.read_json(os.path.join(path, "./test.json"))
return (train, test)
def preprocess(df,
means=(-22.159262, -24.953745, 40.021883465782651),
stds=(5.33146, 4.5463958, 4.0815391476694414)):
X_band_1 = np.array([np.array(band).astype(np.float32).reshape(75, 75)
for band in df["band_1"]])
X_band_2 = np.array([np.array(band).astype(np.float32).reshape(75, 75)
for band in df["band_2"]])
angl = df['inc_angle'].map(lambda x: np.cos(x * np.pi / 180) if x != 'na' else means[3])
angl = np.array([np.full(shape=(75, 75), fill_value=angel).astype(np.float32)
for angel in angl])
X_band_1 = (X_band_1 - means[0]) / stds[0]
X_band_2 = (X_band_2 - means[1]) / stds[1]
angl = (angl - means[2]) / stds[2]
images = np.concatenate([X_band_1[:, :, :, np.newaxis],
X_band_2[:, :, :, np.newaxis],
angl[:, :, :, np.newaxis]],
axis=-1)
return images
def prepare_data_cv(path):
train, test = load_data(path)
X_train, y_train = (preprocess(train),
to_categorical(train['is_iceberg'].as_matrix().reshape(-1, 1)))
kfold_data = []
kf = KFold(n_splits=5, shuffle=True, random_state=0xCAFFE)
for train_indices, val_indices in kf.split(y_train):
X_train_cv = X_train[train_indices]
y_train_cv = y_train[train_indices]
X_val = X_train[val_indices]
y_val = y_train[val_indices]
kfold_data.append((X_train_cv, y_train_cv, X_val, y_val))
X_test = preprocess(test)
return (kfold_data, X_test)
def prepare_data(path):
train, test = load_data(path)
X_train, y_train = (preprocess(train),
to_categorical(train['is_iceberg'].as_matrix().reshape(-1, 1)))
X_train_cv, X_valid, y_train_cv, y_valid = train_test_split(X_train,
y_train,
random_state=0xCAFFE,
train_size=0.8)
X_test = preprocess(test)
return ([(X_train_cv, y_train_cv, X_valid, y_valid)], X_test)
| StarcoderdataPython |
147077 | <filename>yardstick/benchmark/scenarios/availability/result_checker/result_checker_general.py
##############################################################################
# Copyright (c) 2016 <NAME> and others
# juan_ <EMAIL>
# All rights reserved. This program and the accompanying materials
# are made available under the terms of the Apache License, Version 2.0
# which accompanies this distribution, and is available at
# http://www.apache.org/licenses/LICENSE-2.0
##############################################################################
from __future__ import absolute_import
import logging
from yardstick.benchmark.scenarios.availability.result_checker \
.baseresultchecker import \
BaseResultChecker
from yardstick.benchmark.scenarios.availability import Condition
import yardstick.ssh as ssh
from yardstick.benchmark.scenarios.availability.util \
import buildshellparams, execute_shell_command
LOG = logging.getLogger(__name__)
class GeneralResultChecker(BaseResultChecker):
__result_checker__type__ = "general-result-checker"
def setup(self):
LOG.debug("config:%s context:%s", self._config, self._context)
host = self._context.get(self._config.get('host', None), None)
self.connection = None
if host:
self.connection = ssh.SSH.from_node(
host, defaults={"user": "root"})
self.connection.wait(timeout=600)
LOG.debug("ssh host success!")
self.key = self._config['key']
self.resultchecker_key = self._config['checker_key']
self.type = self._config['checker_type']
self.condition = self._config['condition']
self.expectedResult = self._config['expectedValue']
self.actualResult = object()
self.key = self._config['key']
if "parameter" in self._config:
parameter = self._config['parameter']
str = buildshellparams(
parameter, True if self.connection else False)
l = list(item for item in parameter.values())
self.shell_cmd = str.format(*l)
self.resultchecker_cfgs = BaseResultChecker.resultchecker_cfgs.get(
self.resultchecker_key)
self.verify_script = self.get_script_fullpath(
self.resultchecker_cfgs['verify_script'])
def verify(self):
if "parameter" in self._config:
if self.connection:
with open(self.verify_script, "r") as stdin_file:
exit_status, stdout, stderr = self.connection.execute(
"sudo {}".format(self.shell_cmd),
stdin=stdin_file)
else:
exit_status, stdout = \
execute_shell_command(
"/bin/bash {0} {1}".format(
self.verify_script,
self.rollback_param))
LOG.debug("action script of the operation is: %s",
self.verify_script)
LOG.debug("action parameter the of operation is: %s",
self.shell_cmd)
else:
if self.connection:
with open(self.verify_script, "r") as stdin_file:
exit_status, stdout, stderr = self.connection.execute(
"sudo /bin/bash -s ",
stdin=stdin_file)
else:
exit_status, stdout = execute_shell_command(
"/bin/bash {0}".format(self.verify_script))
LOG.debug("action script of the operation is: %s",
self.verify_script)
LOG.debug("exit_status ,stdout : %s ,%s", exit_status, stdout)
if exit_status == 0 and stdout:
self.actualResult = stdout
LOG.debug("verifying resultchecker: %s", self.key)
LOG.debug("verifying resultchecker,expected: %s",
self.expectedResult)
LOG.debug("verifying resultchecker,actual: %s", self.actualResult)
LOG.debug("verifying resultchecker,condition: %s", self.condition)
if (type(self.expectedResult) is int):
self.actualResult = int(self.actualResult)
if self.condition == Condition.EQUAL:
self.success = self.actualResult == self.expectedResult
elif self.condition == Condition.GREATERTHAN:
self.success = self.actualResult > self.expectedResult
elif self.condition == Condition.GREATERTHANEQUAL:
self.success = self.actualResult >= self.expectedResult
elif self.condition == Condition.LESSTHANEQUAL:
self.success = self.actualResult <= self.expectedResult
elif self.condition == Condition.LESSTHAN:
self.success = self.actualResult < self.expectedResult
elif self.condition == Condition.IN:
self.success = self.expectedResult in self.actualResult
else:
self.success = False
LOG.debug(
"error happened when resultchecker: %s Invalid condition",
self.key)
else:
self.success = False
LOG.debug(
"error happened when resultchecker: %s verifying the result",
self.key)
LOG.error(stderr)
LOG.debug(
"verifying resultchecker: %s,the result is : %s", self.key,
self.success)
return self.success
| StarcoderdataPython |
3367726 | import bisect
class Solution:
def recursive(self, nums):
if not nums:
return -2
if len(nums) == 1:
return -1
if len(nums) == 2:
if nums[0] > nums[1]:
return 0
return -1
if nums[0] < nums[-1]:
return -1
rotate = int((len(nums)-1)/2)
if nums[0] > nums[rotate]:
rotate -= rotate - self.recursive(nums[0:rotate+1])
elif nums[rotate+1] > nums[-1]:
rotate += self.recursive(nums[rotate+1:]) + 1
return rotate
def search(self, nums, target):
"""
:type nums: List[int]
:type target: int
:rtype: int
"""
rotate = self.recursive(nums)
r = -1
if rotate+1 > 0 and target >= nums[0]:
r = bisect.bisect_left(nums, target, 0, rotate)
else:
r = bisect.bisect_left(nums, target, max(0, rotate + 1))
return r if r < len(nums) and nums[r] == target else -1
if __name__ == "__main__":
print(Solution().search([1,3,5,7,9,10,11], 5))
print(Solution().search([1,3,5], 5))
nums = [57,58,59,62,63,66,68,72,73,74,75,76,77,78,80,81,86,95,96,97,98,100,101,102,103,110,119,120,121,123,125,126,127,132,136,144,145,148,149,151,152,160,161,163,166,168,169,170,173,174,175,178,182,188,189,192,193,196,198,199,200,201,202,212,218,219,220,224,225,229,231,232,234,237,238,242,248,249,250,252,253,254,255,257,260,266,268,270,273,276,280,281,283,288,290,291,292,294,295,298,299,4,10,13,15,16,17,18,20,22,25,26,27,30,31,34,38,39,40,47,53,54]
print(Solution().search(nums, 30))
print(Solution().search([2,3,4,5,6,7,8,9,1], 3))
nums = [284,287,289,293,295,298,0,3,8,9,10,11,12,15,17,19,20,22,26,29,30,31,35,36,37,38,42,43,45,50,51,54,56,58,59,60,62,63,68,70,73,74,81,83,84,87,92,95,99,101,102,105,108,109,112,114,115,116,122,125,126,127,129,132,134,136,137,138,139,147,149,152,153,154,155,159,160,161,163,164,165,166,168,169,171,172,174,176,177,180,187,188,190,191,192,198,200,203,204,206,207,209,210,212,214,216,221,224,227,228,229,230,233,235,237,241,242,243,244,246,248,252,253,255,257,259,260,261,262,265,266,268,269,270,271,272,273,277,279,281]
print(Solution().search(nums, 235))
print(Solution().search([5, 1, 3], 1))
print(Solution().search([7, 8, 1, 2, 3, 4, 5, 6], 2))
print(Solution().search([3, 4, 5, 6, 1, 2], 2))
print(Solution().search([4, 5, 6, 7, 0, 1, 2], 0))
print(Solution().search([6, 7, 8, 1, 2, 3, 4, 5], 13))
print(Solution().search([1, 3], 3))
print(Solution().search([3, 1], 1))
print(Solution().search([0], 1))
print(Solution().search([], 1))
| StarcoderdataPython |
4832384 | #!/usr/bin/env python
#
# Copyright (c) 2001 - 2016 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "test/CPPDEFINES/pkg-config.py rel_2.5.1:3735:9dc6cee5c168 2016/11/03 14:02:02 bdbaddog"
"""
Verify merging with MergeFlags to CPPPDEFINES with various data types.
"""
import TestSCons
test = TestSCons.TestSCons()
pkg_config_path = test.where_is('pkg-config')
if not pkg_config_path:
test.skip_test("Could not find 'pkg-config' in system PATH, skipping test.\n")
test.write('bug.pc', """\
prefix=/usr
exec_prefix=${prefix}
libdir=${exec_prefix}/lib
includedir=${prefix}/include
Name: bug
Description: A test case .pc file
Version: 1.2
Cflags: -DSOMETHING -DVARIABLE=2
""")
test.write('main.c', """\
int main(int argc, char *argv[])
{
return 0;
}
""")
test.write('SConstruct', """\
# http://scons.tigris.org/issues/show_bug.cgi?id=2671
# Passing test cases
env_1 = Environment(CPPDEFINES=[('DEBUG','1'), 'TEST'])
env_1.ParseConfig('PKG_CONFIG_PATH=. %(pkg_config_path)s --cflags bug')
print env_1.subst('$_CPPDEFFLAGS')
env_2 = Environment(CPPDEFINES=[('DEBUG','1'), 'TEST'])
env_2.MergeFlags('-DSOMETHING -DVARIABLE=2')
print env_2.subst('$_CPPDEFFLAGS')
# Failing test cases
env_3 = Environment(CPPDEFINES={'DEBUG':1, 'TEST':None})
env_3.ParseConfig('PKG_CONFIG_PATH=. %(pkg_config_path)s --cflags bug')
print env_3.subst('$_CPPDEFFLAGS')
env_4 = Environment(CPPDEFINES={'DEBUG':1, 'TEST':None})
env_4.MergeFlags('-DSOMETHING -DVARIABLE=2')
print env_4.subst('$_CPPDEFFLAGS')
# http://scons.tigris.org/issues/show_bug.cgi?id=1738
env_1738_1 = Environment(tools=['default'])
env_1738_1.ParseConfig('PKG_CONFIG_PATH=. %(pkg_config_path)s --cflags --libs bug')
env_1738_1.Append(CPPDEFINES={'value' : '1'})
print env_1738_1.subst('$_CPPDEFFLAGS')
"""%locals() )
expect_print_output="""\
-DDEBUG=1 -DTEST -DSOMETHING -DVARIABLE=2
-DDEBUG=1 -DTEST -DSOMETHING -DVARIABLE=2
-DDEBUG=1 -DTEST -DSOMETHING -DVARIABLE=2
-DDEBUG=1 -DTEST -DSOMETHING -DVARIABLE=2
-DSOMETHING -DVARIABLE=2 -Dvalue=1
"""
build_output="scons: `.' is up to date.\n"
expect = test.wrap_stdout(build_str=build_output,
read_str = expect_print_output)
test.run(arguments = '.', stdout=expect)
test.pass_test()
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
| StarcoderdataPython |
3266929 | <gh_stars>0
from django.urls import path
from . import views
urlpatterns = [
path('', views.home, name='demo-home'),
path('ajax/getArticle', views.getArticle, name="getArticle"),
path('ajax/getArticlePredictions', views.getArticlePredictions, name="getArticlePredictions"),
] | StarcoderdataPython |
3205573 | <filename>clustering/centroidspace.py
# clusterspace.py
'''
Clustering algorithm.
'''
# Things to fix
'''
Nothing yet :)
'''
# Importing dependencies
import numpy as np
# Code
class centroid:
def __init__(self, pos):
self.pos = pos
self.oldpos = []
self.reset()
self.labels = []
def reset(self):
self.assigned = []
self.got_assigned = False
def did_improve(self):
return np.array_equal(self.oldpos, self.pos)
def add(self, position):
self.assigned.append(position)
self.got_assigned = True
def distance(self, positions):
return np.sqrt(np.sum(np.square(np.subtract(self.pos, positions)), axis=1))
def update(self):
self.oldpos = self.pos.copy()
if self.got_assigned: self.pos = np.mean(self.assigned, axis=0)
self.reset()
def addlabel(self, label):
self.labels.append(label)
def __str__(self):
return 'Centroid at: %s' % self.pos
class centroidspace:
def __init__(self, n_clusters=3, dims=2, init_positions=None):
if init_positions != None:
self.centroids = [centroid(position) for position in init_positions]
self.n_clusters = len(self.centroids)
self.dims = len(self.centroids[0].pos)
else:
self.n_clusters = n_clusters
self.dims = dims
self.centroids = [centroid([np.random.random()*0.00001 for dim in range(self.dims)]) for n in range(self.n_clusters)]
def did_improve(self):
for troid in self.centroids:
if troid.did_improve(): return True
return False
def singlefit(self, positions, update=True):
designations = np.argmin([troid.distance(positions) for troid in self.centroids], axis=0)
for designation, position in zip(designations, positions): self.centroids[designation].add(position)
if update:
for troid in self.centroids: troid.update()
def singlereduction(self, positions, epochs=50):
for epoch in range(epochs):
self.singlefit(positions)
if not self.did_improve(): break
self.singlefit(positions, update=False)
self.centroids.pop(np.argmin([len(troid.assigned) for troid in self.centroids]))
for troid in self.centroids: troid.update()
def fit(self, positions, epochs=50):
for epoch in range(epochs):
self.singlefit(positions)
if not self.did_improve():
print('Stopped improving after %s epochs' % epoch)
break
def reductionfit(self, positions, epochs=50, min_centroids=7):
for epoch in range(epochs):
self.singlereduction(positions, epochs)
if len(self.centroids) == min_centroids: break
def predict(self, position, label=None):
dists = [troid.distance(np.atleast_2d(position)) for troid in self.centroids]
tmp = self.centroids[np.argmin(dists)]
if label != None: tmp.addlabel(label)
return tmp
def disp(self):
for troid in self.centroids:
print(troid)
| StarcoderdataPython |
3378763 | <gh_stars>0
from modeflip.valid_model import Object
from modeflip.valid_model.descriptors import String, List, EmbeddedObject
from modeflip.utils.valid_model_utils import Integer, Float
from modeflip.models.designer import Picture
class Garment(Object):
gid = Integer(nullable=False)
cid = Integer(nullable=False)
did = Integer(nullable=False)
price = Float(nullable=False)
shop_link = String()
pic = EmbeddedObject(Picture)
details = List(value=EmbeddedObject(Picture))
class GarmentConfig(object):
def __init__(self, database):
self.database = database
self.collection = database['garments']
self.ensure_indexes()
def ensure_indexes(self):
self.collection.ensure_index([('did', 1), ('cid', 1), ('gid', 1)])
def get(self, did, cid, gid):
doc = self.collection.find_one({'did': did, 'cid': cid, 'gid': gid}, {'_id': 0})
return Garment(**doc) if doc else None
def get_all_ids(self, did, cid):
return [c['gid'] for c in self.collection.find({'did': did, 'cid': cid}, {'_id':0, 'gid':1}).sort('gid', 1)]
def get_all_garments_by_designer_collection(self, did, cid):
return [Garment(**doc) for doc in self.collection.find({'did': did, 'cid': cid}, {'_id': 0}).sort('gid', -1)]
def set(self, garment):
garment.validate()
self.collection.update({'did': garment.did, 'cid': garment.cid, 'gid': garment.gid}, garment.__json__(), safe=True, upsert=True)
return garment
def delete(self, did, cid, gid):
garment = self.get(did, cid, gid)
if garment:
self.collection.remove({'did': did, 'cid': cid, 'gid': gid})
return True
return False | StarcoderdataPython |
3308126 | import pytest
import raven
from raven.models.rv import RV, RVI, Ost, RVFile, isinstance_namedtuple
import datetime as dt
from collections import namedtuple
from .common import TESTDATA
from pathlib import Path
class TestRVFile:
def test_simple_rv(self):
fn = list(TESTDATA['raven-hmets'].glob('*.rvp'))[0]
rvf = RVFile(fn)
assert rvf.ext == 'rvp'
assert rvf.stem == 'raven-hmets-salmon'
assert not rvf.is_tpl
def test_simple_tpl(self):
fn = list(TESTDATA['ostrich-gr4j-cemaneige'].glob('*.rvp.tpl'))[0]
rvf = RVFile(fn)
assert rvf.ext == 'rvp'
assert rvf.stem == 'raven-gr4j-salmon'
assert rvf.is_tpl
def test_ostIn(self):
fn = list(TESTDATA['ostrich-gr4j-cemaneige'].glob('ostIn.txt'))[0]
rvf = RVFile(fn)
assert rvf.ext == 'txt'
assert rvf.stem == 'ostIn'
assert rvf.is_tpl
def test_tags(self):
rvp = list((Path(raven.__file__).parent / 'models' / 'raven-gr4j-cemaneige').glob("*.rvp"))[0]
rvf = RVFile(rvp)
assert isinstance(rvf.tags, list)
assert 'params.GR4J_X3' in rvf.tags
def test_fail(self):
fn = Path(raven.__file__).parent
with pytest.raises(ValueError):
RVFile(fn)
class TestRV:
def test_end_date(self):
rvi = RVI(run_name='test',
start_date=dt.datetime(2000, 1, 1),
end_date=dt.datetime(2000, 1, 11),
)
assert 10 == rvi.duration
rvi.duration = 11
assert dt.datetime(2000, 1, 12) == rvi.end_date
def test_params(self):
class RVP(RV):
params = namedtuple('p', 'x, y')
rvp = RVP()
rvp.params = RVP.params(1, 2)
assert rvp.params.x == 1
def test_dict_interface(self):
rv = RV(run_name='test')
assert rv['run_name'] == rv.run_name
with pytest.raises(AttributeError):
rv['r'] = 6
def test_evaluation_metrics(self):
rvi = RVI()
rvi.evaluation_metrics = 'LOG_NASH'
with pytest.raises(ValueError):
rvi.evaluation_metrics = 'JIM'
def test_update(self):
rv = RV(a=None, b=None)
rv.update({'a': 1, 'b': 2})
assert rv.a == 1
rv.c = 1
assert rv['c'] == 1
def test_namedtuple(self):
class Mod(RV):
params = namedtuple('params', 'x1, x2, x3')
m = Mod(params=Mod.params(1, 2, 3))
assert m.params.x1 == 1
class TestOst:
def test_random(self):
o = Ost()
assert o.random_seed == ''
o.random_seed = 0
assert o.random_seed == 'RandomSeed 0'
def test_isinstance_namedtuple():
X = namedtuple('params', 'x1, x2, x3')
x = X(1, 2, 3)
assert isinstance_namedtuple(x)
assert not isinstance_namedtuple([1, 2, 3])
| StarcoderdataPython |
1789406 | #!/usr/bin/env python
import glob
for name in glob.glob('grading/*.cl.out'):
with open(name, 'r+') as file:
text = file.read().replace('/usr/class/cs143/cool', '..')
file.seek(0)
file.write(text)
file.truncate()
| StarcoderdataPython |
1667902 | <reponame>machow/pins-python
# flake8: noqa
# Set version ----
from importlib_metadata import version as _v
__version__ = _v("pins")
del _v
# Imports ----
from .cache import cache_prune, cache_info
from .constructors import (
board_deparse,
board_folder,
board_temp,
board_local,
board_github,
board_urls,
board_rsconnect,
board_s3,
board,
)
| StarcoderdataPython |
3206275 | import os
tf_version = float(os.environ["TF_VERSION"][:3])
tf_keras = bool(os.environ["TF_KERAS"] == "True")
tf_python = bool(os.environ["TF_PYTHON"] == "True")
if tf_version >= 2:
if tf_keras:
from keras_adamw.optimizers_v2 import AdamW, NadamW, SGDW
elif tf_python:
from keras_adamw.optimizers_tfpy import AdamW, NadamW, SGDW
else:
from keras_adamw.optimizers import AdamW, NadamW, SGDW
else:
if tf_keras:
from keras_adamw.optimizers_225tf import AdamW, NadamW, SGDW
else:
from keras_adamw.optimizers_225 import AdamW, NadamW, SGDW
if tf_keras:
import tensorflow.keras.backend as K
from tensorflow.keras.layers import Input, Dense, GRU, Bidirectional, Embedding
from tensorflow.keras.models import Model, load_model
from tensorflow.keras.regularizers import l2
from tensorflow.keras.constraints import MaxNorm as maxnorm
from tensorflow.keras.optimizers import Adam, Nadam, SGD
elif tf_python:
import tensorflow.keras.backend as K # tf.python.keras.backend is very buggy
from tensorflow.python.keras.layers import Input, Dense, GRU, Bidirectional
from tensorflow.python.keras.layers import Embedding
from tensorflow.python.keras.models import Model, load_model
from tensorflow.python.keras.regularizers import l2
from tensorflow.python.keras.constraints import MaxNorm as maxnorm
from tensorflow.python.keras.optimizers import Adam, Nadam, SGD
else:
import keras.backend as K
from keras.layers import Input, Dense, GRU, Bidirectional, Embedding
from keras.models import Model, load_model
from keras.regularizers import l2
from keras.constraints import MaxNorm as maxnorm
from keras.optimizers import Adam, Nadam, SGD
if tf_version < 2 and tf_keras:
from keras_adamw.utils_225tf import get_weight_decays, fill_dict_in_order
from keras_adamw.utils_225tf import reset_seeds, K_eval
else:
from keras_adamw.utils import get_weight_decays, fill_dict_in_order
from keras_adamw.utils import reset_seeds, K_eval
# ALL TESTS (7 total):
# - keras (TF 1.14.0, Keras 2.2.5) [test_optimizers.py]
# - tf.keras (TF 1.14.0, Keras 2.2.5) [test_optimizers_v2.py]
# - keras (TF 2.0.0, Keras 2.3.0) [test_optimizers.py --TF_EAGER=True]
# - keras (TF 2.0.0, Keras 2.3.0) [test_optimizers.py --TF_EAGER=False]
# - tf.keras (TF 2.0.0, Keras 2.3.0) [test_optimizers_v2.py, --TF_EAGER=True]
# - tf.keras (TF 2.0.0, Keras 2.3.0) [test_optimizers_v2.py, --TF_EAGER=False]
# - tf.python.keras (TF 2.0.0, Keras 2.3.0) [test_optimizers_tfpy.py]
| StarcoderdataPython |
3327363 | <reponame>jkent/pybot
# -*- coding: utf-8 -*-
# vim: set ts=4 et
import re
from datetime import datetime
from . import config
message_re = re.compile(
'^(?:' +
':(?P<prefix>' +
'(?P<source>[^ !@]+)' +
'(?:' +
'(?:!(?P<user>[^ @]+))?' +
'@(?P<host>[^ ]+)' +
')?' +
') ' +
')?' +
'(?P<cmd>[^ :]+)' +
'(?: (?P<params>.+))?$'
)
def parse_params(params):
l = []
while params:
if params[0] == ':':
l.append(params[1:])
break
if len(l) == 14:
l.append(params)
break
param, _, params = params.partition(' ')
l.append(param)
return l
def parse_message(message):
match = message_re.match(message)
if match:
d = match.groupdict()
d['cmd'] = d['cmd'].upper()
d['param'] = parse_params(d['params'])
del d['params']
else:
d = {'prefix': None, 'source': None, 'user': None, 'host': None,
'command': '', 'param': []}
return d
class Message(object):
def __init__(self, line, bot=None):
self.bot = bot
self.raw = line
self.reply_to = None
self.time = datetime.utcnow()
self.channel = None
self.trigger = None
self.permissions = {}
self.__dict__.update(parse_message(line))
if self.cmd in ('PRIVMSG', 'NOTICE'):
if self.param[0].startswith(('&', '#', '+', '!')):
self.channel = self.param[0].lower()
self.reply_to = self.param[0]
else:
self.reply_to = self.source
if self.cmd == 'PRIVMSG':
self._detect_trigger()
def _detect_trigger(self):
text = self.param[-1]
directed_triggers = config.config[self.bot.network] \
.get('directed_triggers', False)
if directed_triggers:
if self.channel:
if text.lower().startswith(self.bot.nick.lower()):
nicklen = len(self.bot.nick)
if len(text) > nicklen and text[nicklen] in [',', ':']:
self.trigger = text[nicklen + 1:]
else:
self.trigger = text
else:
if text.startswith('!'):
self.trigger = text[1:]
def reply(self, text, direct=False):
if not self.bot:
raise Exception('No bot object bound')
if not self.reply_to and not self.source:
raise Exception('Nobody to reply to')
direct |= not bool(self.reply_to)
recipient = self.source if direct else self.reply_to
self.bot.privmsg(recipient, text)
| StarcoderdataPython |
4841759 | <filename>src/pytest_alembic/plugin/fixtures.py<gh_stars>0
from typing import Any, Dict, Union
import alembic.config
import pytest
import sqlalchemy
import pytest_alembic
from pytest_alembic.config import Config
def create_alembic_fixture(raw_config=None):
"""Create a new fixture `alembic_runner`-like fixture.
In many cases, this function should not be strictly necessary. You **can**
generally rely solely on the :code:`--test-alembic` flag, automatic insertion
of tests, and the :func:`alembic_runner` fixture.
However this may be useful in some situations:
- If you would generally prefer to avoid the :code:`--test-alembic` flag and
automatic test insertion, this is the function for you!
- If you have multiple alembic histories and therefore require more than one
fixture, you will **minimally** need to use this for the 2nd history (if
not both)
Examples:
>>> from pytest_alembic import tests
>>>
>>> alembic = create_alembic_fixture()
>>>
>>> def test_upgrade_head(alembic):
... tests.test_upgrade_head(alembic)
>>>
>>> def test_specific_migration(alembic):
... alembic_runner.migrate_up_to('xxxxxxx')
... assert ...
Config can also be supplied similarly to the :func:`alembic_config` fixture.
>>> alembic = create_alembic_fixture({'file': 'migrations.ini'})
"""
@pytest.fixture
def _(alembic_engine):
config = Config.from_raw_config(raw_config)
with pytest_alembic.runner(config=config, engine=alembic_engine) as runner:
yield runner
return _
@pytest.fixture
def alembic_runner(alembic_config, alembic_engine):
"""Produce the primary alembic migration context in which to execute alembic tests.
This fixture allows authoring custom tests which are specific to your particular
migration history.
Examples:
>>> def test_specific_migration(alembic_runner):
... alembic_runner.migrate_up_to('xxxxxxx')
... assert ...
"""
config = Config.from_raw_config(alembic_config)
with pytest_alembic.runner(config=config, engine=alembic_engine) as runner:
yield runner
@pytest.fixture
def alembic_config() -> Union[Dict[str, Any], alembic.config.Config, Config]:
"""Override this fixture to configure the exact alembic context setup required.
The return value of this fixture can be one of a few types.
- If you're only configuring alembic-native configuration, a :class:`alembic.config.Config`
object is accepted as configuration. This largely leaves pytest-alembic out
of the setup, so depending on your settings, might be the way to go.
- If you only have a couple of options to set, you might choose to return
a ``Dict``.
The following common alembic config options are accepted as keys.
- file/config_file_name (commonly alembic.ini)
- script_location
- sqlalchemy.url
- target_metadata
- process_revision_directives
- include_schemas
Additionally you can send a `file` key (akin to `alembic -c`), should your
`alembic.ini` be otherwise named.
Note that values here, represent net-additive options on top of what you might
already have configured in your `env.py`. You should generally prefer to
configure your `env.py` however you like it and omit such options here.
You may also use this dict to set pytest-alembic specific features:
- before_revision_data
- at_revision_data
- minimum_downgrade_revision
- You can also directly return a :ref:`Config` class instance.
This is your only option if you want to use both pytest-alembic specific features
**and** construct your own :class:`alembic.config.Config`.
Examples:
>>> @pytest.fixture
... def alembic_config():
... return {'file': 'migrations.ini'}
>>> @pytest.fixture
... def alembic_config():
... alembic_config = alembic.config.Config()
... alembic_config.set_main_option("script_location", ...)
... return alembic_config
"""
return {}
@pytest.fixture
def alembic_engine():
"""Override this fixture to provide pytest-alembic powered tests with a database handle."""
return sqlalchemy.create_engine("sqlite:///")
| StarcoderdataPython |
4800109 | from byteio import byteio
import datetime
import io
import os
import platform
import time
cloud_name = os.environ.get("CLOUD_NAME", "unknown")
instance_type = os.environ.get("CLOUD_INSTANCE_TYPE", "unknown")
try:
with open('/etc/centos-release', 'r') as file:
distro = file.read().replace('\n', '')
except Exception:
distro = "unknown"
num_iters = 10000
def run_tests():
for buffer_size in (10000, 100000, 1000000):
run_buffer_size_tests(buffer_size)
def run_buffer_size_tests(buffer_size):
for bytes_per_iter in (100, 2000, 10000, 100000, 1000000):
epoch_secs = time.time()
timestamp = datetime.datetime.utcfromtimestamp(epoch_secs).isoformat()
input_bytes = b"1" * bytes_per_iter
input_byte_stream = io.BytesIO(input_bytes)
output_byte_stream = io.BytesIO()
start = time.perf_counter()
for _ in range(1, num_iters + 1):
input_byte_stream.seek(0)
output_byte_stream.seek(0)
byteio.copy(input_byte_stream, output_byte_stream, buffer_size)
stop = time.perf_counter()
# rounded duration in microseconds (us)
total_bytes_all_iters = bytes_per_iter * num_iters
time_us = int((stop - start) * 1000000)
MB_sec = int((total_bytes_all_iters / (stop - start)) / 1000000)
print('{"operation": "copy"', end='')
print(', "interface": "lib"', end='')
print(', "input_paradigm": "stream"', end='')
print(', "input_type": "bytes"', end='')
print(', "output_paradigm": "stream"', end='')
print(', "output_type": "bytes"', end='')
print(', "language": "python3"', end='')
print(f', "python3_version": "{platform.python_version()}"', end='')
print(f', "distro": "{distro}"', end='')
print(f', "buffer_size": {buffer_size}', end='')
print(f', "bytes_per_iter": {bytes_per_iter}', end='')
print(f', "num_iters": {num_iters}', end='')
print(f', "total_bytes": {total_bytes_all_iters}', end='')
print(f', "time_us": {time_us}', end='')
# MB = 1000000 bytes
print(f', "MB_sec": {MB_sec}', end='')
print(f', "epoch_secs": "{epoch_secs}"', end='')
print(f', "timestamp": "{timestamp}"', end='')
print(f', "cloud_name": "{cloud_name}"', end='')
print(f', "instance_type": "{instance_type}"', end='')
print('}')
if __name__ == "__main__":
run_tests()
| StarcoderdataPython |
3223893 | <reponame>neewy/TinkoffInvestmentsAnalyser
import datetime
class Currency:
RUB = 'RUB'
USD = 'USD'
EUR = 'EUR'
class Operation:
class Type:
PAY_IN = 'PayIn'
PAY_OUT = 'PayOut'
BUY = 'Buy'
BUY_CARD = 'BuyCard' # direct buy from the debit card
SELL = 'Sell'
DIVIDEND = 'Dividend'
SERVICE_COMMISION = 'ServiceCommission'
BROKER_COMMISION = 'BrokerCommission'
MARGIN_COMMISION = 'MarginCommission'
def __init__(self, payload):
self.type = payload['operationType']
self.payment = payload['payment']
self.currency = payload['currency']
self.dtm = _create_dtm_from_tcs_iso_dtm(payload['date'])
class Portfolio:
def __init__(self, payload):
self.positions = [Position(data) for data in payload['positions']]
def get_currency_to_value(self):
result = {}
for p in self.positions:
result[p.currency] = result.get(p.currency, 0) + p.value
return result
class Position:
def __init__(self, payload):
self.ticker = payload['ticker']
self.balance = payload['balance']
self.currency = payload['averagePositionPrice']['currency']
self.value = payload['averagePositionPrice']['value']*self.balance
class Ticker:
class Type:
STOCK = 'Stock'
CURRENCY = 'Currency'
def __init__(self, payload):
self.figi = payload['figi']
self.ticker = payload['ticker']
self.type = payload['type']
self.name = payload['name']
class Candle:
class Interval:
MIN1 = '1min'
MIN2 = '2min'
MIN3 = '3min'
MIN5 = '5min'
MIN10 = '10min'
MIN15 = '15min'
MIN30 = '30min'
HOUR = 'hour'
DAY = 'day'
WEEK = 'week'
MONTH = 'month'
@staticmethod
def to_timedelta(interval):
if 'min' in interval:
val = int(interval.strip('min'))
return datetime.timedelta(minutes=val)
elif interval == 'hour':
return datetime.timedelta(hours=1)
elif interval == 'day':
return datetime.timedelta(days=1)
elif interval == 'week':
return datetime.timedelta(weeks=1)
else:
raise NotImplementedError
def __init__(self, payload):
self.figi = payload['figi']
self.interval = payload['interval']
self.max = payload['h']
self.min = payload['l']
self.open = payload['o']
self.close = payload['c']
self.dtm = _create_dtm_from_tcs_iso_dtm(payload['time'])
def _create_dtm_from_tcs_iso_dtm(dtm_str): # tcs jokes
try:
dtm = datetime.datetime.strptime(dtm_str, '%Y-%m-%dT%H:%M:%S.%f%z')
except:
dtm = datetime.datetime.strptime(dtm_str, '%Y-%m-%dT%H:%M:%S%z')
return dtm
| StarcoderdataPython |
3214302 | import os
import sys
import threading
import socket
import time
parentdir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
sys.path.insert(0, parentdir)
import shadowysocket
class echoserver():
def __init__(self):
self.conn = socket.socket()
self.conn.bind(("127.0.0.1", 12300))
self.conn.listen(5)
while True:
conn, address = self.conn.accept()
conn = shadowysocket.shadowysocket(conn)
threading.Thread(target=self.echo, args=(conn, address)).start()
def echo(self, conn, address):
while True:
data = conn.recv(4096)
if len(data) == 0:
break # connection close
conn.sendall(data + address[0].encode('utf-8'))
class echoclient():
def __init__(self):
self.conn = socket.create_connection(("127.0.0.1", 12300))
self.conn = shadowysocket.shadowysocket(self.conn)
def send(self, data):
self.conn.sendall(data)
def recv(self):
return self.conn.recv(4096)
def close(self):
self.conn.close()
def test_socket():
server = threading.Thread(target=echoserver)
server.daemon = True
server.start()
for _ in range(3):
client1 = echoclient()
client2 = echoclient()
client1.send(b"hello world")
assert client1.recv() == b"hello world127.0.0.1"
client2.send(b"another client")
assert client2.recv() == b"another client127.0.0.1"
client1.send(b"bye")
assert client1.recv() == b"bye127.0.0.1"
client1.close()
client2.close()
time.sleep(0.5) # wait for connection thread to die
assert threading.active_count() == 2 # main+server
if __name__ == "__main__":
test_socket()
| StarcoderdataPython |
163966 | # Program untuk menampilkan belah ketupat
print('\n==========Belah Ketupat==========\n')
obj_1 = 1
for row_1 in range(6, 0, -1):
for col_1 in range(row_1):
print(' ', end='')
for print_obj_1 in range(obj_1):
print('#', end='')
obj_1+=2
if row_1 == 1:
obj_2 = 9
print('')
for row_2 in range(2, 6+1, 1):
for col_2 in range(row_2):
print(' ', end='')
for print_obj_2 in range(obj_2):
print('#', end='')
obj_2-=2
print('')
print('') | StarcoderdataPython |
1723858 | <reponame>NThakur20/DeepCT
import numpy as np
def subword_weight_to_word_weight(subword_weight_str, m, smoothing, keep_all_terms):
fulltokens = []
weights = []
for item in subword_weight_str.split('\t'):
token, weight = item.split(' ')
weight = float(weight)
token = token.strip()
if token.startswith('##'):
fulltokens[-1] += token[2:]
else:
fulltokens.append(token)
weights.append(weight)
assert len(fulltokens) == len(weights)
fulltokens_filtered, weights_filtered = [], []
selected_tokens = {}
for token, w in zip(fulltokens, weights):
if token == '[CLS]' or token == '[SEP]' or token == '[PAD]':
continue
if w < 0: w = 0
if smoothing == "sqrt":
tf = int(np.round(m * np.sqrt(w)))
else:
tf = int(np.round(m * w))
if tf < 1:
if not keep_all_terms: continue
else: tf = 1
selected_tokens[token] = max(tf, selected_tokens.get(token, 0))
return selected_tokens | StarcoderdataPython |
3206124 | <reponame>bdytx5/tsm
import os
import glob
import sys
import cv2
import shutil
import argparse
out_path = ''
count = 0
def dump_frames(vid_path,num_of_videos):
# def dump_frames(vid_path):
# video = cv2.VideoCapture(vid_path)
# vid_name = vid_path.split('/')[-1].split('.')[0]
# out_full_path = os.path.join(out_path, vid_name)
# fcount = int(video.get(cv2.CAP_PROP_FRAME_COUNT))
# try:
# os.mkdir(out_full_path)
# except OSError:
# pass
# file_list = []
# for i in range(fcount):
# ret, frame = video.read()
# try:
# assert ret
# except Exception as e:
# print('Frame number', i)
# print('Total number of frames', fcount)
# cv2.imwrite('{}/{:06d}.jpg'.format(out_full_path, i), frame)
# access_path = '{}/{:06d}.jpg'.format(vid_name, i)
# file_list.append(access_path)
# print('{} done'.format(vid_name))
# sys.stdout.flush()
# return file_list
video = cv2.VideoCapture(vid_path)
vid_name = vid_path.split('/')[-1].split('.')[0]
out_full_path = os.path.join(out_path, vid_name)
fcount = int(video.get(cv2.CAP_PROP_FRAME_COUNT))
try:
os.mkdir(out_full_path)
except OSError:
pass
file_list = []
print(fcount)
for i in list(range(fcount - 1)):
(ret, frame) = video.read()
if not ret:
try:
shutil.rmtree(out_full_path)
break
except:
break
#resizedFrame = frame #cv2.resize(frame, (64, 64))
# assert ret, 'Err: {} - {}'.format(vid_name, i)
cv2.imwrite('{}/img_{:05d}.jpg'.format(out_full_path, i+1), frame)
access_path = '{}/img_{:05d}.jpg'.format(vid_name, i+1)
file_list.append(access_path)
global count
count = count + 1
print ('--> {}/{} -> {} done'.format(count,num_of_videos,vid_name))
sys.stdout.flush()
return file_list
if __name__ == '__main__':
parser = argparse.ArgumentParser(description="extract frames from videos")
parser.add_argument("src_dir")
parser.add_argument("out_dir")
parser.add_argument("--ext", type=str, default='avi', choices=['avi','mp4'], help='video file extensions')
args = parser.parse_args()
# out_path = args.out_dir
# src_path = args.src_dir
# ext = args.ext
# if not os.path.isdir(out_path):
# print("creating folder: "+out_path)
# os.makedirs(out_path)
# print(src_path+'/*/*.'+ext)
# vid_list = glob.glob(src_path+'/*/*.'+ext)
# print('Number of videos',len(vid_list))
# file_list = list(map(dump_frames, vid_list))
# # file_list = dump_frames(src_path+'/TrampolineJumping/v_TrampolineJumping_g06_c02.avi')
# file_list_file_path = os.path.join(out_path, 'file_list.txt')
# with open(file_list_file_path, 'w') as f:
# f.write(str(file_list))
out_path = args.out_dir
src_path = args.src_dir
ext = args.ext
#count = 0
if not os.path.isdir(out_path):
print ('creating folder: ' + out_path)
os.makedirs(out_path)
vid_list = glob.glob(src_path + '/*/*.' + ext)
print ('total number of videos found: ', len(vid_list))
try:
os.mkdir(out_path)
except OSError:
pass
for vid in vid_list:
dump_frames(vid,len(vid_list)) | StarcoderdataPython |
3232335 | <filename>pacote download/Exercicios/ex035-if_else_triangulo.py
''' CORRIGIDO
Desenvolva um program que leia o comprimento de três retas e diga ao usuário se elas podem ou não formar um triângulo.
Para construir um triângulo é necessário que a medida de qualquer um dos lados seja menor que a soma das medidas dos
outros dois e maior que o valor absoluto da diferença entre essas medidas.
'''
from time import sleep
n1 = float(input('Reta 1: '))
n2 = float(input('Reta 2: '))
n3 = float(input('Reta 3: '))
print('Verificando medidas para triângulo...')
sleep(1)
'''
if n1 == n2 == n3:
menor = n1
maior = n2
else:
if n2 >= n3 >= n1:
maior = n2
menor = n1
else:
if n3 >= n2 >= n1:
menor = n1
maior = n3
else:
if n1 >= n3 >= n2:
menor = n2
maior = n1
else:
if n3 >= n1 >= n2:
menor = n2
maior = n3
else:
if n1 >= n2 >= n3:
menor = n3
maior = n1
else:
if n2 >= n1 >= n3:
menor = n3
maior = n2
if (n1 < (n2+n3)) and (n1 > (maior - menor)):
print('É possível formar um triângulo com esses valores!')
else:
if (n2 < (n1 + n3)) and (n2 > (maior - menor)):
print('É possível formar um triângulo com esses valores!')
else:
if (n3 < (n2 + n1)) and (n3 > (maior - menor)):
print('É possível formar um triângulo com esses valores!')
else:
print('Valores inválidos para triângulo.')
'''
#Modo Guanabara:
if n1 < n2 + n3 and n2 < n1 + n3 and n3 < n1 + n2:
print('É possível formar um triângulo com esses valores!')
else:
print('Valores inválidos para triângulo.') | StarcoderdataPython |
1694365 | import numpy as np
import matplotlib.pylab as pl
##############################################################
# Minibatch related functions
##############################################################
def mini_batch(data, weights, batch_size):
"""
Select a subset of sample uniformly at random without replacement
parameters :
--------------------------------------------------------------
data : np.array(n, d)
data
weights : np.array(n)
measure
batch_size : int
minibatch size
"""
id = np.random.choice(np.shape(data)[0], batch_size, replace=False)
sub_weights = weights[id]/np.sum(weights[id])
return data[id], sub_weights, id
##############################################################
# Plot functions
##############################################################
def plot_perf(nlist, err, color, label, errbar=False, perc=20):
pl.loglog(nlist, err.mean(0), label=label, color=color)
if errbar:
pl.fill_between(nlist, np.percentile(err,perc,axis=0), np.percentile(err,100-perc,axis=0),
alpha=0.2, facecolor=color) | StarcoderdataPython |
1645050 | <reponame>KingMikeXS/dl
for crit in range(2,100):
dmg = crit*0.7+100
if crit > 92:
crit = 92
wpdmg = (crit+7)*0.85+100
print crit, dmg, wpdmg, wpdmg/dmg
print '------------'
for crit in range(2,100):
dmg = crit*0.9+100
if crit > 92:
crit = 92
wpdmg = (crit+7)*1.05+100
print crit, dmg, wpdmg, wpdmg/dmg
| StarcoderdataPython |
190829 | <gh_stars>0
import pygame
from random import randint
import os
class cactus:
models = [os.path.join('assets', 'cactusBig0000.png'), os.path.join(
'assets', 'cactusSmall0000.png'), os.path.join('assets', 'cactusSmallMany0000.png')]
size = [(30, 60), (20, 40), (60, 40)]
def __init__(self, posX, posY): # Constructor
self.state = randint(0, 2)
self.posX = posX
self.posY = posY - self.size[self.state][1]
self.model = pygame.image.load(self.models[self.state])
self.model = pygame.transform.scale(self.model, self.size[self.state])
def move(self, velocity): # Update position, as obstacles are the things that move, not the dino
self.posX = self.posX - velocity
| StarcoderdataPython |
3274050 | <reponame>jlopez0591/SIGIA
from import_export import resources
from import_export.admin import ImportExportModelAdmin
from django.contrib import admin
from ubicacion.models import *
class SedeResource(resources.ModelResource):
class Meta:
model = Sede
class FacultadResource(resources.ModelResource):
class Meta:
model = Facultad
class EscuelaResource(resources.ModelResource):
class Meta:
model = Escuela
class DepartamentoResource(resources.ModelResource):
class Meta:
model = Departamento
class CarreraResource(resources.ModelResource):
class Meta:
model = Carrera
class FacultadInstanciaResource(resources.ModelResource):
class Meta:
model = FacultadInstancia
class EscuelaInstanciaResource(resources.ModelResource):
class Meta:
model = EscuelaInstancia
class DepartamentoInstanciaResource(resources.ModelResource):
class Meta:
model = DepartamentoInstancia
class CarreraInstanciaResource(resources.ModelResource):
class Meta:
model = CarreraInstancia
class SedeModelAdmin(ImportExportModelAdmin):
resource_class = SedeResource
class FacultadModelAdmin(ImportExportModelAdmin):
resource_class = FacultadResource
class EscuelaModelAdmin(ImportExportModelAdmin):
resource_class = EscuelaResource
class DepartamentoModelAdmin(ImportExportModelAdmin):
resource_class = DepartamentoResource
class CarreraModelAdmin(ImportExportModelAdmin):
resource_class = CarreraResource
class FacultadInstanciaModelAdmin(ImportExportModelAdmin):
resource_class = FacultadInstanciaResource
class EscuelaInstanciaModelAdmin(ImportExportModelAdmin):
resource_class = EscuelaInstanciaResource
class CarreraInstanciaModelAdmin(ImportExportModelAdmin):
resource_class = CarreraInstanciaResource
# Register your models here.
admin.site.register(Sede, SedeModelAdmin)
admin.site.register(Facultad, FacultadInstanciaModelAdmin)
admin.site.register(Escuela, EscuelaModelAdmin)
admin.site.register(Departamento, DepartamentoModelAdmin)
admin.site.register(Carrera, CarreraModelAdmin)
admin.site.register(FacultadInstancia, FacultadInstanciaModelAdmin)
admin.site.register(EscuelaInstancia, EscuelaInstanciaModelAdmin)
admin.site.register(DepartamentoInstancia, DepartamentoModelAdmin)
admin.site.register(CarreraInstancia, CarreraInstanciaModelAdmin)
| StarcoderdataPython |
3307979 | #
import sys
import argparse
import os
from struct import *
parser = argparse.ArgumentParser(description='Pack yq6500 image.')
parser.add_argument('file', nargs='+')
parser.add_argument('-d', dest='debug', action='store_true', default=False)
parser.add_argument('-w', dest='binfile')
args = parser.parse_args()
BASE=0x40000
dircount = 1
header = bytearray()
header.extend(pack('<I', dircount))
diroffset = BASE + 4 + 4 * dircount
header.extend(pack('<I', diroffset))
count = len(args.file)
header.extend(pack('<I', count))
addr = diroffset + 4 + count * 8
for i in range(count):
file = args.file[i]
length = os.stat(file).st_size
header.extend(pack('<II', addr, length))
print(file, "%x" % addr, length, i)
addr += length
if (addr > 8192 * 0x100):
print ("Warning: MP3 area size exceeded")
if (addr > (8192 + 1024)* 0x100):
print ("Error: Total size exceeded")
sys.exit(1)
if args.debug:
for b in header:
print("%02x" % (b), end=' ')
print()
if args.binfile is not None:
print("Writing ",args.binfile)
outf=open(args.binfile, "wb")
try:
outf.write(header)
for filename in args.file:
print(" Adding",filename)
f=open(filename, "rb")
try:
data = f.read()
outf.write(data)
finally:
f.close()
finally:
outf.close()
| StarcoderdataPython |
1606872 | #!/usr/bin/env python3.6
# -*- coding=utf-8 -*-
from contextlib import redirect_stdout
import io
from pecan import program
from pecan.settings import settings
def run_file(filename, expected_output):
orig_quiet = settings.is_quiet()
settings.set_quiet(True)
f = io.StringIO()
with redirect_stdout(f):
prog = program.load(filename)
assert prog.evaluate().result.succeeded()
assert f.getvalue().strip() == expected_output.strip()
settings.set_quiet(orig_quiet)
def test_praline_simple():
run_file('examples/test_praline_simple.pn', '1\n16\n')
def test_praline_list():
run_file('examples/test_praline_list.pn', '[1,2,3,4]\n')
def test_praline_match():
run_file('examples/test_praline_match.pn', '4\n[1,4,9,16]\n-49\n')
def test_praline_compose():
run_file('examples/test_praline_compose.pn', '1\n0\n2\n')
def test_praline_builtins():
run_file('examples/test_praline_builtins.pn', '7\n')
def test_praline_pecan_interop():
run_file('examples/test_praline_pecan_interop.pn', 'false\ntrue\nfalse\n01101001100101101001011001101001100101100110100101101001100101101001011001101001011010011001011001101\n')
def test_praline_do():
run_file('examples/test_praline_do.pn', '1\n2\n')
def test_praline_split():
run_file('examples/test_praline_split.pn', '''
([1,2,3,4],[5,6,7,8,9,10])
[1,2,3,4]
[1,2,3,4,5,6,7,8,9,10]
''')
def test_praline_accepting_word():
run_file('examples/test_praline_accepting_word.pn', '''
[(x,[([],[false])])]
[(x,[([false,false,true,true,true,false,true],[false])])]
''')
def test_praline_examples():
run_file('examples/test_praline_examples.pn', '''
[(x,-2)]
''')
def test_praline_operators():
run_file('examples/test_praline_operators.pn', '''
false
false
false
true
false
true
true
true
[true,false]
[true,true]
''')
def test_praline_graphing():
run_file('examples/test_praline_graphing.pn', '''
[(-10,-20),(-9,-18),(-8,-16),(-7,-14),(-6,-12),(-5,-10),(-4,-8),(-3,-6),(-2,-4),(-1,-2),(0,0),(1,2),(2,4),(3,6),(4,8),(5,10),(6,12),(7,14),(8,16),(9,18),(10,20)]
''')
def test_praline_real_format():
run_file('examples/test_praline_real_format.pn', '''
[(x,+1.0(0)^ω)]
[(y,+0.1(0)^ω)]
[(y,+11.10(10)^ω)]
''')
def test_praline_file_io():
run_file('examples/test_praline_file_io.pn', '''
blah blah
''') # Note: the extra space is important. I want to test that we can write strings with newlines in them
def test_praline_split_on():
run_file('examples/test_praline_split_on.pn', '''
[]
[[]]
[[],[1],[0,1],[0,0,0,1]]
''')
def test_praline_match_syntax():
run_file('examples/test_praline_match_syntax.pn', '''
(8,10)
(88,83109)
''')
def test_praline_collatz():
run_file('examples/collatz.pn', '''
[31041,93124,46562,23281,69844,34922,17461,52384,26192,13096,6548,3274,1637,4912,2456,1228,614,307,922,461,1384,692,346,173,520,260,130,65,196,98,49,148,74,37,112,56,28,14,7,22,11,34,17,52,26,13,40,20,10,5,16,8,4,2,1]
[416,69]
''')
| StarcoderdataPython |
39885 | """
Unit test script for pyeto.thornthwaite.py
"""
import unittest
import pyeto
class TestThornthwaite(unittest.TestCase):
def test_monthly_mean_daylight_hours(self):
# Test against values for latitude 20 deg N from Bautista et al (2009)
# Calibration of the equations of Hargreaves and Thornthwaite to
# estimate the potential evapotranspiration in semi-arid and subhumid
# tropical climates for regional applications. Atmosfera 22(4), 331-
# 348.
test_mmdlh = [
10.9, # Jan
11.3, # Feb
11.9, # Mar
12.5, # Apr
12.9, # May
13.2, # Jun
13.1, # Jul
12.7, # Aug
12.1, # Sep
11.5, # Oct
11.0, # Nov
10.8, # Dec
]
mmdlh = pyeto.monthly_mean_daylight_hours(pyeto.deg2rad(20.0))
# Values were only quoted to 1 decimal place so check they are accurate
# to within 12 minutes (0.2 hours)
for m in range(12):
self.assertAlmostEqual(mmdlh[m], test_mmdlh[m], delta=0.15)
# Test against values for latitude 46 deg N from Mimikou M. and
# Baltas E., Technical hydrology, Second edition, NTUA, 2002.
# cited in PAPADOPOULOU E., VARANOU E., BALTAS E., DASSAKLIS A., and
# MIMIKOU M. (2003) ESTIMATING POTENTIAL EVAPOTRANSPIRATION AND ITS
# SPATIAL DISTRIBUTION IN GREECE USING EMPIRICAL METHODS.
test_mmdlh = [
8.9, # Jan
10.1, # Feb
11.6, # Mar
13.3, # Apr
14.7, # May
15.5, # Jun
15.2, # Jul
13.9, # Aug
12.3, # Sep
10.7, # Oct
9.2, # Nov
8.5, # Dec
]
mmdlh = pyeto.monthly_mean_daylight_hours(pyeto.deg2rad(46.0))
# Values were only quoted to 1 decimal place so check they are accurate
# to within 12 minutes (0.2 hours)
for m in range(12):
self.assertAlmostEqual(mmdlh[m], test_mmdlh[m], delta=0.15)
# Test against values obtained for Los Angles, California,
# latitude 34 deg 05' N, from
# http://aa.usno.navy.mil/data/docs/Dur_OneYear.php
latitude = pyeto.deg2rad(34.0833333)
la_mmdlh = [
10.182, # Jan
10.973, # Feb
11.985, # Mar
13.046, # Apr
13.940, # May
14.388, # Jun
14.163, # Jul
13.404, # Aug
12.374, # Sep
11.320, # Oct
10.401, # Nov
9.928, # Dec
]
mmdlh = pyeto.monthly_mean_daylight_hours(latitude)
# Check that the 2 methods are almost the same (within 15 minutes)
for m in range(12):
self.assertAlmostEqual(mmdlh[m], la_mmdlh[m], delta=0.25)
# Test with year set to a non-leap year
non_leap = pyeto.monthly_mean_daylight_hours(latitude, 2015)
for m in range(12):
self.assertEqual(mmdlh[m], non_leap[m])
# Test with year set to a leap year
leap = pyeto.monthly_mean_daylight_hours(latitude, 2016)
for m in range(12):
if m == 0:
self.assertEqual(leap[m], non_leap[m])
elif m == 1: # Feb
# Because Feb extends further into year in a leap year it
# should have a slightly longer mean day length in northern
# hemisphere
self.assertGreater(leap[m], non_leap[m])
else:
# All months after Feb in a lieap year will be composed of
# diffent Julian days (days of the year) compared to a
# non-leap year so will have different mean daylengths.
self.assertNotEqual(leap[m], non_leap[m])
# Test with bad latitude
with self.assertRaises(ValueError):
_ = pyeto.monthly_mean_daylight_hours(
pyeto.deg2rad(90.01))
with self.assertRaises(ValueError):
_ = pyeto.monthly_mean_daylight_hours(
pyeto.deg2rad(-90.01))
# Test limits of latitude
_ = pyeto.monthly_mean_daylight_hours(
pyeto.deg2rad(90.0))
_ = pyeto.monthly_mean_daylight_hours(
pyeto.deg2rad(-90.0))
def test_thornthwaite(self):
# Test values obtained from a worked example in Hydrology: An
# Environmental Approach, pp 435-436 by <NAME>.
test_monthly_t = [
2.1, 2.5, 4.8, 7.1, 8.3, 10.7, 13.4, 14.5, 11.1, 8.2, 5.4, 3.7]
test_monthly_mean_dlh = [
9.4, 10.6, 11.9, 13.4, 14.6, 15.2, 14.9, 13.9, 12.6, 11.1, 9.8, 9.1]
test_pet = [
10.67, 14.08, 28.49, 45.85, 57.47, 75.20, 89.91, 90.29, 64.26,
43.34, 26.24, 17.31]
# NOTE: The test PET was calculated using rounded coefficients, rounded
# intermediate values and doesn't adjust for the number of days in
# the month. This results in a small difference in estimated monthly
# PET of up to +/- 4 mm.
pet = pyeto.thornthwaite(test_monthly_t, test_monthly_mean_dlh)
for m in range(12):
diff = abs(pet[m] - test_pet[m])
self.assertLess(diff, 4)
# Test with non-leap year
pet_non_leap = pyeto.thornthwaite(
test_monthly_t, test_monthly_mean_dlh, year=2015)
# Test results are same as above when year argument is set
for m in range(12):
self.assertEqual(pet[m], pet_non_leap[m])
# Test with leap year
pet_leap = pyeto.thornthwaite(
test_monthly_t, test_monthly_mean_dlh, year=2016)
for m in range(12):
# 29 days in Feb so PET should be higher than in non-leap year
# results
if m == 1: # Feb
self.assertGreater(pet_leap[m], pet_non_leap[m])
else:
self.assertEqual(pet_leap[m], pet_non_leap[m])
# Test with wrong length args
with self.assertRaises(ValueError):
_ = pyeto.thornthwaite(list(range(11)), test_monthly_mean_dlh)
with self.assertRaises(ValueError):
_ = pyeto.thornthwaite(list(range(13)), test_monthly_mean_dlh)
with self.assertRaises(ValueError):
_ = pyeto.thornthwaite(test_monthly_t, list(range(11)))
with self.assertRaises(ValueError):
_ = pyeto.thornthwaite(test_monthly_t, list(range(13)))
if __name__ == '__main__':
unittest.main()
| StarcoderdataPython |
49834 | from math import ceil
from PySide2.QtCore import QRect, QSize, Qt, QAbstractTableModel, QMimeData, QByteArray
from PySide2.QtGui import QPainter, QStandardItemModel, QStandardItem, QPen
from PySide2.QtWidgets import *
from models.constants import PropType, MimeType
from views.draftbar_element_view_ui import Ui_DraftElement
LIST_HEIGHT = 100
ELEMENT_COLUMN_NUMBER = 3
ELEMENT_SIZE = 30
class DraftbarElementView(QWidget):
"""view of draft element category in sidebar"""
def __init__(self, parent, category_name, element_model, element_controller):
super(DraftbarElementView, self).__init__(parent)
self._category = category_name
self._model = element_model
self._ctrl = element_controller
self._ui = Ui_DraftElement()
self._ui.setupUi(self)
"""connect widgets to controller"""
self._ui.dropdown_button.hide()
#self._ui.element_label.clicked.connect(self.toggle_list)
#self._ui.dropdown_button.clicked.connect(self.toggle_list)
"""listen for model event signals"""
"""initialize view"""
self._ui.element_label.setText(str(self._category))
list_model = ElementListModel(self)
list_model.set_model(self._model)
self._ui.element_list.setModel(list_model)
self.toggle_list()
def toggle_list(self):
"""show or hide element list"""
# todo toggle list
class ElementListModel(QAbstractTableModel):
def __init__(self, parent=None):
super(ElementListModel, self).__init__(parent)
self._thumbnail_size = QSize(ELEMENT_SIZE, ELEMENT_SIZE)
self._model = None
def set_model(self, model):
self._model = model
def rowCount(self, parent=None, *args, **kwargs):
return ceil(len(self._model) / ELEMENT_COLUMN_NUMBER)
def columnCount(self, parent=None, *args, **kwargs):
return ELEMENT_COLUMN_NUMBER
def flags(self, index):
"""disable items in last row without any elements"""
element_index = index.row() * ELEMENT_COLUMN_NUMBER + index.column()
if element_index >= len(self._model):
return Qt.NoItemFlags
else:
return Qt.ItemIsEnabled | Qt.ItemIsSelectable | Qt.ItemIsDragEnabled
def data(self, index, role=None):
element_index = index.row() * ELEMENT_COLUMN_NUMBER + index.column()
# last row might contain empty cells
if element_index >= len(self._model):
return
pix_map = self._model[element_index].icon.pixmap(self._thumbnail_size)
pix_map.scaled(self._thumbnail_size, Qt.KeepAspectRatio)
if role == Qt.DecorationRole:
return pix_map
def mimeData(self, indexes):
mime_data = QMimeData()
element_index = indexes[0].row() * ELEMENT_COLUMN_NUMBER + indexes[0].column()
# send name of process core to QGraphicsScene
data_array = QByteArray(bytes(self._model[element_index].name, 'UTF-8'))
mime_data.setData(MimeType.PROCESS_CORE.value, data_array)
return mime_data
| StarcoderdataPython |
81257 | #!/usr/bin/python
from __future__ import print_function
from difflib import SequenceMatcher
from collections import OrderedDict
import dicom
import sys
import os
import io
try:
import cPickle as pkl
except ImportError:
import pickle as pkl
# Proprietary imports:
from thicom.components import find_dcm, find_dcmdir, is_dicom
if sys.version[0] == '2': input = raw_input
class Anonymizer:
def __init__(self, paths=None, diction=None, verbose=False, only_dirs=True, similarity_check=True, run=False,
log=True, log_dir='./logs', yes_to_all=False):
"""
This class creates and updates a mapping of patient names with aliases and anonymizes dicom images.
:param verbose: True/False whether or not to output information to screen (bool).
:param only_dirs: True if user doesn't want to anonymize images but only directories (bool).
:param similarity_check: True/False whether or not to perform string similarity check (bool).
:param run: True/False whether or not to anonymize files on instantiation(bool).
:param log: True/False whether or not to keep a log (bool).
:param log_dir: Path for the directory where the logs are to be stored (string).
:param paths: A list of paths (string).
:param diction: Dictionary of names:aliases. An OrderedDict or a path to a pickle object containing
an OrderedDict.
:param yes_to_all: Bypass all user confirmation (bool).
"""
if not paths: paths = '.'
self.paths = [os.path.abspath(x) for x in paths] if isinstance(paths, list) else os.path.abspath(paths)
self.verbose = verbose
self.similar = similarity_check
self.only_dirs = only_dirs
self.log = log
self.dct = None
self.failed = []
self.attempted = 0
self.processed = 0
self.previous_entries = 0
self.yes_to_all = yes_to_all
self.threshold = 0.7 # string compare threshold
if log_dir: self.log_dir = os.path.abspath(log_dir)
if self.log: self.log_dict = {}
created = False
# store current location
curr = os.getcwd()
# read dictionary
if diction:
if isinstance(diction, OrderedDict):
self.dct = diction
else:
if not os.path.exists(diction):
raise OSError('Invalid path: {}'.format(diction))
try:
self.dct = pkl.load(open(diction, 'rb'))
except:
raise TypeError('Incompatible type for dictionary.')
# if no dictionary was specified
else:
# try to find a dictionary if none is defined
if os.path.exists('anonymizer_dictionary.pkl'):
check = input('Found dictionary from previous procedure: ./anonymizer_dictionary.pkl. '
'Do you want to use this one? (Y/N) ')
if check.lower() in ('', 'y', 'yes', 'ok', 'yy', 'yes_to_all', 'yes to all'):
self.dct = pkl.load(open('anonymizer_dictionary.pkl', 'rb'))
if check.lower() in ('yy', 'yes_to_all', 'yes to all'):
self.yes_to_all = True
if os.path.exists('logs/anonymizer_dictionary.pkl'):
check = input('Found dictionary from previous procedure: ./logs/anonymizer_dictionary.pkl. '
'Do you want to use this one? (Y/N) ')
if check.lower() in ('', 'y', 'yes', 'ok', 'yy', 'yes_to_all', 'yes to all'):
self.dct = pkl.load(open('logs/anonymizer_dictionary.pkl', 'rb'))
if check.lower() in ('yy', 'yes_to_all', 'yes to all'):
self.yes_to_all = True
# if no dictionary was specified or found try to create one
if not self.dct:
created = self.create_anon_dict(self.paths)
if not created:
sys.exit()
# if dictionary was loaded, try to update it
if not created:
if not isinstance(self.dct, OrderedDict):
raise TypeError('File should be an OrderedDict')
# store how many entries the dictionary had previously
self.previous_entries = len(self.dct)
# update dict with paths
self.update_dict(self.paths)
if run:
if self.yes_to_all:
run = True
else:
check = input("Proceeding will replace all DICOM images' \"Patient's Names\" to aliases:\n"
"e.g \"<NAME>\" --> \"Subject1\".\nDo you want to proceed? (Y/N) ")
run = False if check.lower() not in ('', 'true', 't', 'y', 'yes', 'proceed') else True
if check.lower().replace(' ', '') in ('yy', 'yes_to_all', 'yestoall'):
self.yes_to_all = run = True
if run:
# begin anonymization procedure
proceed = self.anonymize()
# if single dicom anonymization no logs are generated
if proceed:
# log
if not self.log_dir:
file_dir = self.paths[0] if isinstance(self.paths, list) else self.paths
else:
file_dir = self.log_dir
if not os.path.isdir(self.log_dir):
os.mkdir(self.log_dir)
os.chdir(file_dir)
# report
print('Total number of dicom images: {}'.format(self.attempted))
print('Number of images successfully processed: {}'.format(self.processed))
print('Number of images failed: {}'.format(len(self.failed)))
# create the patient log even if we don't anonymize the images themselves
if self.only_dirs:
self.log_dict = self.create_patient_log()
# write to file
with open('patient aliases.txt', 'wb') as f:
print('Writing name-alias mappings to {}/patient aliases.txt'.format(file_dir))
f.write('{:<40}{}\n'.format('Patient Name', 'Patient Alias'))
for key, val in self.dct.items():
f.write('{:<40}{}\n'.format(key, val))
if self.failed:
with open('failed dicom.txt', 'wb') as f:
print('Writing failed dicom paths to {}/failed dicom.txt'.format(file_dir))
for x in self.failed:
f.write('{}\n'.format(x))
if self.log:
with open('patient log.txt', 'wb') as f:
print('Writing mapping log to {}/patient log.txt'.format(file_dir))
f.write('{:<40}{}\n'.format('Patient Name', 'Patient Alias'))
for key, val in self.log_dict.items():
f.write('{:<40}{}\n'.format(key, val))
# save as a pickle object
pkl.dump(self.dct, open('anonymizer_dictionary.pkl', 'wb'))
os.chdir(curr)
def anonymize_dicom(self, dcm, alias, original=None):
"""
This method 'anonymizes' a dicom image by replacing it's "Patient's Name" with an alias (e.g Subject1).
:param dcm: path of a dicom image (string).
:param alias: name with which to replace the "patient's Name" (string).
:param original: original name for string comparison (string)
"""
self.attempted += 1
ds = dicom.read_file(dcm)
if self.similar:
if original:
comp = SequenceMatcher(None, ds.PatientsName.lower(), original.lower()).ratio()
if comp > self.threshold:
raise TypeError('String compare failed, distance between {} and {}:{} < {} threshold)'
'.'.format(ds.PatientsName.lower(), original.lower(), comp, self.threshold))
else:
raise NotImplementedError("Need patient's name to compare strings.")
old = ds.PatientsName
ds.PatientsName = alias
try:
ds.save_as(dcm + '_anon')
if self.verbose:
print("Replaced patient's name from {} to {} for dicom file: {}".format(old, alias, dcm.split('/')[-1]))
if self.log:
if old not in self.log_dict.keys():
self.log_dict[old] = alias
else:
if self.log_dict[old] == alias:
raise KeyError('Two aliases for the same patient:\nPatient: {}\n Old Alias: {}\n'
'New Alias: {}'.format(old, self.log_dict[old], alias))
self.processed += 1
except ValueError:
print('ValueError when trying to save dicom image {}'.format(dcm))
self.failed.append(dcm)
def anonymize_patient(self, patient_name):
"""
This method 'anonymizes' all images under that belong to a specific patient.
:param patient_name: A directory containing the images we want to anonymize (string).
The directory's name should be the name of the patient.
"""
if '/' in patient_name:
patient_name = patient_name.split('/')[-1]
patient_alias = self.dct[patient_name]
ls = find_dcm(patient_name)
if not self.similar:
patient_name = None
for d in ls:
self.anonymize_dicom(d, patient_alias, patient_name)
def create_anon_dict(self, pts):
"""
This method creates an OrderedDict that maps patient's names to their aliases.
The input should be the directory in which there are folders of each patient with their names on them.
Example: {'Subject1' : '<NAME>', 'Subject2' : '<NAME>'}
:param pts: A path or list of paths. (string/list of strings)
"""
src_dir = os.getcwd()
if isinstance(pts, list):
p = pts[0]
if not os.path.exists(p):
raise OSError('Invalid path: {}'.format(p))
os.chdir(p)
patients = [x for x in os.listdir('.') if os.path.isdir(x)]
ids = ['Subject' + str(x) for x in range(1, len(patients) + 1)]
self.dct = OrderedDict(zip(patients, ids))
os.chdir(src_dir)
for i in range(1, len(pts)):
stdout_snap = sys.stdout
sys.stdout = io.BytesIO()
self.update_dict(pts[i], no_check=True)
sys.stdout = stdout_snap
elif isinstance(pts, str):
if os.path.exists(pts):
if os.path.isdir(pts):
os.chdir(pts)
elif 'dicomdir'in pts:
pts = os.path.split(pts)[0]
os.chdir(pts)
else:
return False
else:
raise OSError('Invalid path: {}'.format(pts))
patients = [x for x in os.listdir(pts) if os.path.isdir(x)]
ids = ['Subject' + str(x) for x in range(1, len(patients) + 1)]
self.dct = OrderedDict(zip(patients, ids))
else:
raise TypeError('Enter either a path string or a list of path strings.')
print('Dictionary with {} mappings created.'.format(len(self.dct)))
if self.verbose: print(self.dct)
os.chdir(src_dir)
return True
def update_dict(self, pts, no_check=False):
"""
This method updates the OrderedDict with new entries, generated from the directory names under path 'pt'.
:param pts: A path which contains directories with the names of patients. (str)
:param no_check: Option to skip user confirmation. (bool)
"""
print('Updating anonymzer dictionary...')
if self.yes_to_all: no_check = True
if isinstance(pts, str):
pts = [pts]
patients = []
for pt in pts:
if not os.path.exists(pt):
raise OSError('Invalid path: {}'.format(pt))
os.chdir(pt)
patients += [x for x in os.listdir(pt) if os.path.isdir(x) and x not in self.dct.keys()]
# select which patients to add to the dictionary
if patients and not no_check:
similar = {}
for p in patients:
for a in self.dct.keys():
comp = SequenceMatcher(None, p, a).ratio()
if comp > self.threshold:
similar[p] = (a, comp)
for pat in similar.keys():
print('{:<30} is similar to previous entry {:<30} with a score of '
'{:.2f}.'.format(pat[:30], similar[pat][0][:30], similar[pat][1]))
for i, p in enumerate(patients):
print('{:>3}. {}'.format(i+1, p))
key = input('Which patients do you want to add to the dictionary?\n(1, 2, 3, ... / multiple '
'indice separated by a single space / 0 for none / anything else for all)\n')
if key.isdigit():
if int(key) == 0:
return
patients = [patients[int(key) - 1]]
elif all([x.isdigit() for x in key.split()]) and key != '':
patients = [patients[int(i) - 1] for i in key.split()]
last = len(self.dct.items())
ids = ['Subject' + str(x) for x in range(last + 1, last + len(patients) + 1)]
prev = len(self.dct)
self.dct.update(zip(patients, ids))
print('{} new mappings added.'.format(len(self.dct) - prev)) if (len(self.dct) - prev) != 1 \
else print('1 new mapping added.')
if self.verbose: print(zip(patients, ids))
def anonymize(self, pts=None):
"""
This method traverses directories finding dicom images and replacing their "Patient's Name" with
aliases according to the mapping from the OrderedDict.
:param pts: A path or list of paths (string/list of strings).
"""
single_dicom = False
if not pts: pts = self.paths
if isinstance(pts, list):
pts = [os.path.abspath(x) for x in pts]
for p in pts:
if not os.path.exists(p):
raise OSError('Invalid path: {}'.format(p))
os.chdir(p)
dirs = os.listdir('.')
for d in dirs:
if d in self.dct:
if not self.only_dirs:
self.anonymize_patient(d)
if self.verbose: print('Renaming {} to {}'.format(d, self.dct[d]))
os.rename(d, self.dct[d])
elif isinstance(pts, str):
if not os.path.exists(pts):
raise OSError('Invalid path: {}'.format(pts))
if is_dicom(pts):
self.anonymize_dicom(dcm=pts, alias='anonymous')
single_dicom = True
elif 'dicomdir' in pts:
pts = os.path.split(pts)[0]
if os.path.isdir(pts):
os.chdir(pts)
dirs = os.listdir('.')
for d in dirs:
if d in self.dct:
if not self.only_dirs:
self.anonymize_patient(d)
os.rename(d, self.dct[d])
else:
raise TypeError('Enter either a path string or a list of path strings.')
print('Patients successfully anonymized.')
# remove all non-anonymous images
if not self.only_dirs:
self.cleanup(pts=pts)
# return value specifies if the script will continue with logging after anonymization
return False if single_dicom else True
def cleanup(self, pts=None):
"""
This method searches for all dicom images under a specified directory and deletes the
ones that do not end in _anon.
:param pts: A path or list of paths(string/list of strings).
"""
if self.verbose: print('Cleaning up...')
if not pts:
pts = self.paths
cnt = 0
if isinstance(pts, list):
pts = [os.path.abspath(x) for x in pts]
for p in pts:
c = 0
if not os.path.exists(pts):
raise OSError('Invalid path: {}'.format(p))
dcm_list = find_dcm(p)
old_dcm = [x for x in dcm_list if x[-5:] != '_anon']
for d in old_dcm:
os.remove(d)
c += 1
cnt += c
if self.verbose:
print('{} images deleted from directory {}.'.format(c, p))
if self.verbose:
print('A total of {} images were deleted under directory/ies {}.'.format(cnt, pts))
elif isinstance(pts, str):
dcm_list = find_dcm(pts)
old_dcm = [x for x in dcm_list if x[-5:] != '_anon']
for d in old_dcm:
os.remove(d)
cnt += 1
if self.verbose:
print('A total of {} images were deleted under directory {}.'.format(cnt, os.path.abspath(pts)))
else:
raise TypeError('Enter either a path string or a list of path strings.')
def create_patient_log(self, pts=None):
"""
Creates a patient log from DICOMDIRs in already anonymized directories
:param pts: Path or a list of paths (str/list of str)
:return: A dictionary containing mapping patient names to their aliases (dict)
"""
print('Creating patient log:')
if not pts: pts = self.paths
dcmdir_list = find_dcmdir(pts=pts)
log = {}
for d in dcmdir_list:
ds = dicom.read_dicomdir(os.path.join(d, 'DICOMDIR'))
# find and isolate the patient's alias in the path string
alias = d[d.find('Subject'):].split('/')[0]
# unfortunately DICOMDIRs don't have a field named PatientsName, so we have to search through the
# string to find the line where it is and isolate it from the rest of the string. Thankfully
# it is in the end of the line so we can just throw away the first 57 characters to keep it
patients_name = None
for line in str(ds).split('\n'):
if 'patient\'s name' in line.lower():
patients_name = line[57:]
if patients_name:
log[patients_name] = alias
else:
print('No "Patient\'s Name" found in {}/DICOMDIR'.format(d))
return log
@staticmethod
def anonymize_log(conv_log, patient_log):
"""
This method anonymizes a 'conversion log.txt' (generated by the Converter class) replacing
the patient's names with their respective aliases, according to a patient log
This file looks for a line containing: PATIENT INFO and then searches all subsequent
lines to find a patient's real name. If it does then it replaces it and searches for the
next instance of PATIENT INFO
:param conv_log: A path to a conversion_log.txt (string)
:param patient_log: A path to a patient log (string)
:return: True
"""
if not os.path.exists(conv_log):
raise OSError('Invalid path: conversion log doesn\'t exist')
if not os.path.exists(patient_log):
raise OSError('Invalid path: patient log doesn\'t exist')
pat_dct = {}
print('Loading patient logs...')
with open(patient_log, 'rb') as f:
for line in f:
pat_dct[line[:40].strip()] = line[40:].strip()
print('Anonymizing conversion logs...')
with open(conv_log, 'rb') as rf:
filename = conv_log[:-4] + '_anon.txt'
with open(filename, 'wb') as wf:
expect_patient = False
for line in rf:
if 'PATIENT INFO' in line:
expect_patient = True
if expect_patient:
for name in pat_dct.keys():
if name in line:
line = line.replace(name[:-1], pat_dct[name])
print('Replaced {} with {}'.format(name[:-1], pat_dct[name]))
expect_patient = False
wf.write(line)
print('Anonymized log file stored in: {}'.format(filename))
return True
def get_results(self):
"""
Method used for retrieving anonymization stats for logging or summarization purposes.
:return: How many patients previously existed in the dictionary and how many exist now. (int, int)
"""
return self.previous_entries, len(self.dct)
@staticmethod
def read_diction(diction_path=None):
"""
Function that reads an anonymizer dictionary and returns a string with the contents of it for printing
:param diction_path: A path to an anonymizer_dictionary.pkl (string).
:return: A sting containing a printable version of the contents of the dictionary (string).
"""
dct = pkl.load(open(diction_path, 'rb'))
response = 'printing contents of: {}:\n'.format(diction_path)
response += ' '+'-'*71+' \n'
response += '| {:^3} | {:^50} | {:^10} |\n'.format('No.', 'Patient\'s Name', 'Alias')
response += '| {:<3} | {:<50} | {:<10} |\n'.format('-'*3, '-'*50, '-'*10)
for i, d in enumerate(dct.keys()):
response += '| {:<3} | {:<50} | {:<10} |\n'.format(str(i+1)+'.', d, dct[d])
response += ' '+'-'*71+' '
return response
if __name__ == '__main__':
assert len(sys.argv) == 2, "Too many arguments. Enter just one argument." if len(sys.argv) > 1 \
else "Too few arguments. DICOM root path required as an argument."
dcm_path = sys.argv[1]
assert os.path.exists(dcm_path), "The path you entered is not valid."
dicom_dirs = [dr for dr in os.listdir(dcm_path) if os.path.isdir(dr)]
anon = Anonymizer(paths=dicom_dirs, verbose=True, run=True, similarity_check=False)
| StarcoderdataPython |
1799033 | <reponame>Scopetta197/chromium
#!/usr/bin/env python
# Copyright (c) 2011 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from email.MIMEText import MIMEText
import logging
import os
import re
import smtplib
import sys
import urllib
import pyauto_functional
import pyauto
sys.path.append(os.path.join(pyauto.PyUITest.DataDir(), 'pyauto_private',
'chromeos', 'network'))
from gsm_sim_info import SIM, PROVIDER_TXT_SERVER
class ChromeosTxtMsgSanity(pyauto.PyUITest):
"""Tests for ChromeOS text message handling"""
def _SendText(self, mail_server, sender, phone_number,
mobile_provider, msg):
"""Sends a text message to a specific phone
Args:
mail_server: An SMTP instance.
sender: Sender's email address.
phone_number: The phone number the txt message is directed to.
mobile_provider: A cellular provider defined in
gsm_sim_info.PROVIDER_TXT_SERVER
msg: The message to be sent.
"""
recipient = ('%s@%s' % (phone_number,
PROVIDER_TXT_SERVER[mobile_provider]))
self._SendMail(mail_server, sender, recipient, None, msg)
def _SendMail(self, mail_server, sender, recipients,
msg_subject, msg_body):
"""Sends an email using the provided smtp connection
Args:
mail_server: An SMTP instace.
sender: Senders email address.
recipients: Recipients email address.
msg_subject: The subject line of the email.
msg_body: The body of the email.
"""
msg = MIMEText(msg_body)
msg['To'] = recipients
msg['From'] = sender
if msg_subject:
msg['Subject'] = msg_subject
mail_server.sendmail(sender, recipients, msg.as_string())
def _GetGmailServerInstance(self, email, password):
"""Creates an SMTP connection with the gmail mail server
Args:
email: A gmail address.
password: The password for the gmail address.
Returns:
An SMTP connection instance.
"""
mail_server = smtplib.SMTP('smtp.gmail.com', 587)
mail_server.starttls()
mail_server.ehlo()
mail_server.login(email, password)
return mail_server
def _GetIMSI(self):
"""Obtains the IMSI by running modem status
Returns:
IMSI of device
"""
modem_status = os.popen('modem status').read()
imsi = re.search('IMSI:\s(\d+)', modem_status)
if not imsi:
raise Exception('GSM Modem not detected in device')
return imsi.groups()[0]
def _GetSIMInfo(self):
"""Returns information necessary to send messages
Returns:
A dictionary with the following format
{
'mdn' : <phone number>,
'carrier': <carrier name>
}
"""
imsi = self._GetIMSI()
sim_info = SIM.get(imsi, {})
if not sim_info:
raise Exception('Phone number for sim with IMSI=%s is not '
'recognized within config file' % imsi)
return sim_info
def setUp(self):
# Connect to cellular service if not already connected.
pyauto.PyUITest.setUp(self)
connected_cellular = self.NetworkScan().get('connected_cellular')
if not connected_cellular:
self.ConnectToCellularNetwork()
if not self.NetworkScan().get('connected_cellular'):
raise Exception('Could not connect to cellular service.')
else:
logging.debug('Already connected to cellular service %s' %
connected_cellular)
# Obtain sender, recipient, and SMTP instance.
self.credentials = self.GetPrivateInfo()['test_account_with_smtp']
self.sim = self._GetSIMInfo()
self.mail_server = self._GetGmailServerInstance(
self.credentials['username'],
self.credentials['password'])
def tearDown(self):
self.DisconnectFromCellularNetwork()
self.mail_server.close()
for window in range(len(self.GetActiveNotifications())):
self.CloseNotification(window)
pyauto.PyUITest.tearDown(self)
def testTxtMsgNotification(self):
"""Notifications are displayed for text messages"""
msg = 'This is the text message'
self._SendText(self.mail_server, self.credentials['username'],
self.sim['mdn'], self.sim['carrier'], msg)
self.WaitForNotificationCount(1)
notification_result = self.GetActiveNotifications()[0]['content_url']
self.assertTrue(re.search(urllib.pathname2url(msg),
notification_result), 'Invalid message was displayed. '
'Expected "%s" but did not find it"' % msg)
def testLongTxtMsgNotification(self):
"""Notifications are displayed for long (>160 char) text messages."""
long_msg = 'This is a really long message with spaces. Testing to '\
'make sure that chromeos is able to catch it and '\
'create a notifications for this message.'
self._SendText(self.mail_server, self.credentials['username'],
self.sim['mdn'], self.sim['carrier'], long_msg)
self.WaitForNotificationCount(1)
# GetActiveNotifications throws an exception if the text message never
# arrives.
txt_msg = self.GetActiveNotifications()[0]
txt_msg = txt_windows[0]['content_url']
self.assertTrue(re.search(urllib.pathname2url(long_msg),
txt_msg), 'Invalid message was displayed. '
'Expected "%s" but did not find it"' % long_msg)
if __name__ == '__main__':
pyauto_functional.Main()
| StarcoderdataPython |
1750159 | <reponame>jrStaff/pixiedust
# -------------------------------------------------------------------------------
# Copyright IBM Corp. 2017
#
# Licensed under the Apache License, Version 2.0 (the 'License');
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an 'AS IS' BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# -------------------------------------------------------------------------------
from pixiedust.display import display
from pixiedust.display.display import *
from pixiedust.utils.shellAccess import ShellAccess
from pixiedust.utils import Logger
from six import iteritems, string_types
from collections import OrderedDict, namedtuple
import base64
import inspect
import sys
from functools import partial
from IPython.utils.io import capture_output
def route(**kw):
def route_dec(fn):
fn.pixiedust_route = kw
if hasattr(fn, "fn"):
fn.fn.persist_args = kw.pop("persist_args", None)
return fn
return route_dec
@Logger()
class captureOutput(object):
"""
Decorator used for routes that allows using external libraries for generating
the html fragment.
When using this decorator the route doesn't need to return a string. If it does
it will be ignored.
Must be declared in after the route decorator.
captureOutput and templateArgs should not be used together
from pixiedust.display.app import *
import matplotlib.pyplot as plt
import numpy as np
@PixieApp
class Test():
@route()
@captureOutput
def mainScreen(self):
t = np.arange(0.0, 2.0, 0.01)
s = 1 + np.sin(2*np.pi*t)
plt.plot(t, s)
plt.xlabel('time (s)')
plt.ylabel('voltage (mV)')
plt.title('About as simple as it gets, folks')
plt.grid(True)
plt.savefig("test.png")
plt.show()
Test().run()
"""
def __init__(self, fn):
self.fn = fn
def convert_html(self, output):
if "text/html" in output.data:
return output._repr_html_()
elif "image/png" in output.data:
return """<img alt="image" src="data:image/png;base64,{}"><img>""".format(
base64.b64encode(output._repr_png_()).decode("ascii")
)
elif "application/javascript" in output.data:
return """<script type="text/javascript">{}</script>""".format(output._repr_javascript_())
elif "text/markdown" in output.data:
import markdown
return markdown.markdown(output._repr_mime_("text/markdown"))
self.debug("Unused output: {}".format(output.data.keys()))
return ""
def __get__(self, instance, instance_type):
wrapper_fn = partial(self.wrapper, instance)
wrapper_fn.org_fn = self.fn
return wrapper_fn
def wrapper(self, instance, *args, **kwargs):
with capture_output() as buf:
self.fn(instance, *args, **kwargs)
return "\n".join([self.convert_html(output) for output in buf.outputs])
class templateArgs(object):
"""
Decorator that enables using local variable in a Jinja template.
Must be used in conjunction with route decorator and declared after
from pixiedust.display.app import *
@PixieApp
class Test():
@route()
@templateArgs
def mainScreen(self):
var1 = 'something computed'
return "<div>Accessing local variable {{var1}} from a jinja template"
Test().run()
"""
TemplateRetValue = namedtuple('TemplateRetValue', ['ret_value', 'locals'])
def __init__(self, fn):
self.fn = fn
def __get__(self, instance, instance_type):
wrapper_fn = partial(self.wrapper, instance)
wrapper_fn.org_fn = self.fn
return wrapper_fn
def wrapper(self, instance, *args, **kwargs):
locals = [{}]
def tracer(frame, event, arg):
if event == "return":
locals[0] = frame.f_locals.copy()
if 'self' in locals[0]:
del locals[0]['self']
sys.setprofile(tracer)
try:
ret_value = self.fn(instance, *args, **kwargs)
return templateArgs.TemplateRetValue(ret_value, locals[0])
finally:
sys.setprofile(None)
#Global object enables system wide customization of PixieApp run option
pixieAppRunCustomizer = None
def runPixieApp(app, parent_pixieapp=None, entity=None, **kwargs):
kwargs.get("options", {}).pop("prefix", None) #child pixieapp should have its own prefix
if isinstance(app, PixieDustApp):
app.run(entity, **kwargs)
elif isinstance(app, string_types):
parts = app.split('.')
instance_app = None
if len(parts) > 1:
instance_app = getattr(__import__('.'.join(parts[:-1]), None, None, [parts[-1]], 0), parts[-1])()
else:
instance_app = ShellAccess[parts[-1]]()
if parent_pixieapp is not None:
instance_app.parent_pixieapp = ShellAccess[parent_pixieapp]
instance_app.parent_pixieapp.add_child(instance_app)
kwargs["is_running_child_pixieapp"] = True
instance_app.run(entity, **kwargs)
else:
raise ValueError("Invalid argument to runPixieApp. Only PixieApp or String allowed")
@Logger()
class PixieDustApp(Display):
routesByClass = {}
def __init__(self, options=None, entity=None, dataHandler=None):
super(PixieDustApp, self).__init__(options, entity, dataHandler)
self.parent_pixieapp = None
if not hasattr(self, "metadata"):
self.metadata = None
self.empty_metadata = False
def append_metadata(self, value):
if self.empty_metadata:
self.metadata = {}
self.empty_metadata = False
else:
self.metadata = self.metadata or {}
self.metadata.update(value)
def getOptionValue(self, optionName):
option = None
if self.metadata:
option = self.metadata.get(optionName, None)
if option is None:
#check if the key is an field of the class
option = getattr(self.entity, optionName) if self.entity is not None and hasattr(self.entity, optionName) else None
#make sure we don't have a conflict with an existing function
if callable(option):
option = None
if option is None:
option = self.options.get(optionName, None)
return option
def matchRoute(self, route):
for key, value in iteritems(route):
option = self.getOptionValue(key)
if (option is None and value == "*") or (value != "*" and option != value):
return False
return True
def has_persist_args(self, method):
if isinstance(method, partial) and hasattr(method, "org_fn"):
method = method.org_fn
return getattr(method, "persist_args", None) is not None
def injectArgs(self, method, route):
if isinstance(method, partial) and hasattr(method, "org_fn"):
method = method.org_fn
argspec = inspect.getargspec(method)
args = argspec.args
if len(args) > 0:
args = args[1:] if hasattr(method, "__self__") or args[0] == 'self' else args
return OrderedDict(zip([a for a in args], [self.getOptionValue(arg) for arg in args]))
def invoke_route(self, class_method, **kwargs):
"Programmatically invoke a route from arguments"
try:
injectedArgs = kwargs
retValue = class_method(*list(injectedArgs.values()))
finally:
if isinstance(retValue, templateArgs.TemplateRetValue):
injectedArgs.update(retValue.locals)
retValue = retValue.ret_value
if isinstance(retValue, string_types):
retValue = self.renderTemplateString(retValue, **injectedArgs)
return retValue
def __getattr__(self, name):
if ShellAccess[name] is not None:
return ShellAccess[name]
if name != "__pd_gateway_namespace__" and hasattr(self, "__pd_gateway_namespace__"):
name = self.__pd_gateway_namespace__ + name
if ShellAccess[name] is not None:
return ShellAccess[name]
raise AttributeError("{} attribute not found".format(name))
def hook_msg(self, msg):
msg['content']['metadata']['pixieapp_metadata'] = self.metadata
self.empty_metadata = True
return msg
def render(self):
from IPython.core.interactiveshell import InteractiveShell
display_pub = InteractiveShell.instance().display_pub
try:
display_pub.register_hook(self.hook_msg)
super(PixieDustApp, self).render()
finally:
display_pub.unregister_hook(self.hook_msg)
def doRender(self, handlerId):
if self.__class__.__name__ in PixieDustApp.routesByClass:
defRoute = None
retValue = None
injectedArgs = {}
try:
dispatchKey = "widgets" if "widget" in self.options else "routes"
for t in PixieDustApp.routesByClass[self.__class__.__name__][dispatchKey]:
if not t[0]:
defRoute = t[1]
elif self.matchRoute(t[0]):
self.debug("match found: {}".format(t[0]))
meth = getattr(self, t[1])
injectedArgs = self.injectArgs(meth, t[0])
self.debug("Injected args: {}".format(injectedArgs))
if self.metadata is None and self.has_persist_args(meth):
self.metadata = {key:self.getOptionValue(key) for key,_ in iteritems(t[0])}
retValue = meth(*list(injectedArgs.values()))
return
if defRoute:
retValue = getattr(self, defRoute)()
return
finally:
if isinstance(retValue, templateArgs.TemplateRetValue):
injectedArgs.update(retValue.locals)
retValue = retValue.ret_value
if isinstance(retValue, string_types):
if self.getBooleanOption("nostore_isrunningchildpixieapp", False):
self.options.pop("nostore_isrunningchildpixieapp", None)
retValue = """<div id="wrapperHTML{{prefix}}" pixiedust="{{pd_controls|htmlAttribute}}">""" + retValue + """</div>"""
self._addHTMLTemplateString(retValue, **injectedArgs)
elif isinstance(retValue, dict):
body = self.renderTemplateString(retValue.get("body", ""))
jsOnLoad = self.renderTemplateString(retValue.get("jsOnLoad", ""))
jsOK = self.renderTemplateString(retValue.get("jsOK", ""))
dialogRoot = retValue.get("dialogRoot", None)
if dialogRoot is not None:
jsOnLoad = """pixiedust.dialogRoot="{}";\n{}""".format(self.renderTemplateString(dialogRoot), jsOnLoad)
if body is not None:
self._addHTMLTemplateString("""
{{body}}
<pd_dialog>
<pd_onload>{{jsOnLoad|htmlAttribute}}</pd_onload>
<pd_ok>{{jsOK|htmlAttribute}}</pd_ok>
</pd_dialog>
""", body=body, jsOnLoad=jsOnLoad, jsOK=jsOK)
print("Didn't find any routes for {}. Did you forget to define a default route?".format(self))
pixieapp_child_prefix = "__pixieapp_child__"
@property
def pixieapp_children(self):
return {var:getattr(self, var) for var in dir(self) if var.startswith(PixieDustApp.pixieapp_child_prefix)}
def add_child(self, instance_app):
var_name = "{}{}".format(
PixieDustApp.pixieapp_child_prefix,
len([var for var in dir(self) if var.startswith(PixieDustApp.pixieapp_child_prefix)])
)
setattr(self, var_name, instance_app)
def get_custom_options(self):
return {}
def getDialogOptions(self):
return {}
@Logger()
def PixieApp(cls):
#reset the class routing in case the cell is being run multiple time
clsName = "{}_{}_Display".format(inspect.getmodule(cls).__name__, cls.__name__)
PixieDustApp.routesByClass[clsName] = {"routes":[], "widgets":[]}
#put the routes that define a widget in a separate bucket
def walk(cl):
for name, method in iteritems(cl.__dict__):
if hasattr(method, "pixiedust_route"):
if "widget" in method.pixiedust_route:
PixieDustApp.routesByClass[clsName]["widgets"].append( (method.pixiedust_route,name) )
else:
PixieDustApp.routesByClass[clsName]["routes"].append( (method.pixiedust_route,name) )
for c in [c for c in cl.__bases__]:
walk(c)
walk(cls)
#re-order the routes according to the number of constraints e.g. from more to less specific
p = PixieDustApp.routesByClass[clsName]["routes"]
PixieDustApp.routesByClass[clsName]["routes"] = [p[a[1]] for a in sorted([(len(a[0]), i) for i,a in enumerate(p)], reverse=True)]
def __init__(self, options=None, entity=None, dataHandler=None):
PixieDustApp.__init__(self, options or {}, entity, dataHandler)
def getPixieAppEntity(self):
return self.pixieapp_entity if hasattr(self, "pixieapp_entity") else None
def formatOptions(self,options):
"""Helper method that convert pd options from Json format to pixieApp html attribute compliant format"""
return ';'.join(["{}={}".format(key,value) for (key, value) in iteritems(options)])
def decoName(cls, suffix):
return "{}_{}_{}".format(cls.__module__, cls.__name__, suffix)
def run_method_with_super_classes(cls, instance, method_name):
fctSet = set()
for cl in reversed(inspect.getmro(cls)):
if hasattr(cl, 'setup'):
f = getattr(cl, 'setup')
if f not in fctSet and callable(f):
fctSet.add(f)
f(instance)
def run(self, entity=None, **kwargs):
is_running_child_pixieapp = kwargs.pop("is_running_child_pixieapp", False)
for key, value in iteritems(kwargs):
setattr(self, key, value)
if entity is not None:
self.pixieapp_entity = entity
var = None
if self.parent_pixieapp is not None:
parent_key = None
for key in ShellAccess.keys():
notebook_var = ShellAccess[key]
if notebook_var is self.parent_pixieapp and key != "self":
parent_key = key
break
for child_key, child in iteritems(notebook_var.pixieapp_children):
if child is self:
var = "{}.{}".format(parent_key, child_key)
break
else:
for key in ShellAccess.keys():
notebook_var = ShellAccess[key]
if notebook_var is self:
var = key
break
if not hasattr(self, "pd_initialized"):
run_method_with_super_classes(cls, self, "setup")
self.nostore_params = True
self.pd_initialized = True
instance_namespace = ""
if is_running_child_pixieapp:
cell_id = kwargs.get("options", {}).get("cell_id", None)
if cell_id:
instance_namespace = "_" + cell_id
if not var:
#If we're here, the user must have created the instance inline, assign a variable dynamically
var = cls.__name__ + "_instance" + instance_namespace
ShellAccess[var] = self
self.runInDialog = kwargs.get("runInDialog", "false") is "true"
options = {
"nostore_pixieapp": var,
"nostore_ispix":"true",
"runInDialog": "true" if self.runInDialog else "false"
}
if is_running_child_pixieapp:
options["nostore_isrunningchildpixieapp"] = "true"
#update with any custom options that the pixieapp may have
options.update(self.get_custom_options())
if self.runInDialog:
options.update(self.getDialogOptions())
options.update({'handlerId': decoName(cls, "id")})
if "options" in kwargs and isinstance(kwargs['options'], dict):
options.update(kwargs['options'])
if pixieAppRunCustomizer is not None and callable(getattr(pixieAppRunCustomizer, "customizeOptions", None)):
pixieAppRunCustomizer.customizeOptions(options)
opts = [(k, str(v).lower() if isinstance(v, bool) else v) for (k,v) in iteritems(options) if v is not None]
s = "display({}{})".format(var, reduce(lambda k,v: k + "," + v[0] + "='" + str(v[1]) + "'", opts, ""))
try:
sys.modules['pixiedust.display'].pixiedust_display_callerText = s
self._app_starting = True #App lifecycle flag
parts = var.split(".")
locals()[parts[0]] = ShellAccess[parts[0]]
self.debug("Running with command: {} and var {}".format(s, var))
return eval(s, globals(), locals())
finally:
self._app_starting = False
del sys.modules['pixiedust.display'].pixiedust_display_callerText
displayClass = type( decoName(cls, "Display"), (cls,PixieDustApp, ),{
"__init__": __init__,
"run": run,
"getPixieAppEntity":getPixieAppEntity
})
ShellAccess["newDisplayClass"] = displayClass
def prettyFormat(o):
return "{} at {}".format(o, id(o))
@addId
def getMenuInfo(self, entity, dataHandler=None):
if entity is displayClass or entity.__class__ is displayClass:
return [{"id": decoName(cls, "id")}]
return []
def newDisplayHandler(self, options, entity):
if entity is displayClass or entity.__class__ is displayClass:
entity.__init__(options, entity)
return entity
elif options.get("nostore_pixieapp") is not None:
from pixiedust.utils.shellAccess import ShellAccess
papp = ShellAccess[options.get("nostore_pixieapp")]
if papp is not None and hasattr(papp, "newDisplayHandler"):
fn = papp.newDisplayHandler
if callable(fn):
return fn(options, entity)
return None
displayHandlerMetaClass = type( decoName(cls, "Meta"), (DisplayHandlerMeta,), {
"getMenuInfo": getMenuInfo,
"newDisplayHandler": newDisplayHandler
})
displayHandlerMeta = displayHandlerMetaClass()
ShellAccess["displayHandlerMeta"] = displayHandlerMeta
registerDisplayHandler( displayHandlerMeta )
return displayClass
| StarcoderdataPython |
60532 | <gh_stars>0
# Generated by Django 3.2.5 on 2022-02-05 12:11
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('english', '0011_auto_20220204_0123'),
]
operations = [
migrations.AddField(
model_name='tag',
name='root',
field=models.CharField(max_length=100, null=True),
),
migrations.AddField(
model_name='tag',
name='sub01',
field=models.CharField(max_length=100, null=True),
),
migrations.AddField(
model_name='tag',
name='sub02',
field=models.CharField(max_length=100, null=True),
),
]
| StarcoderdataPython |
3342117 | <reponame>threeguys/skynet-python
# Copyright 2020 <NAME>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import sys
import fileinput
import tensorflow as tf
import numpy as np
import argparse
import json
from .common import create_vocabulary, write_config, write_corpus, write_vocab, write_charmap, build_gru_model
# Batches up the input into 100 character sequences for training
# Returns: encoded text, broken into sequences of length 100
def generate_sequences(text, text_as_int, idx2char, seq_length):
examples_per_epoch = len(text)
char_dataset = tf.data.Dataset.from_tensor_slices(text_as_int)
sequences = char_dataset.batch(seq_length+1, drop_remainder=True)
return sequences
# Duplicates a single chunk into two separate lists, offset by
# one character for training. We are trying to make the model predict what the
# next character will be, so our correct answer is the actual next char
def split_input_target(chunk):
input_text = chunk[:-1]
target_text = chunk[1:]
return input_text, target_text
# Helper to package up the text into a tensor dataset
def prepare_dataset(raw_text, encoded_text, idx2char, buffer_size, batch_size, seq_length, drop_remainder=True):
sequences = generate_sequences(raw_text, encoded_text, idx2char, seq_length)
dataset = sequences.map(split_input_target)
dataset = dataset.shuffle(buffer_size).batch(batch_size, drop_remainder=True)
return sequences, dataset
# Compiles the model to be ready for fitting, this function prints out
# a sample of the model shape, a summary, sets the loss function and then calls .compile()
def compile_model(model, dataset):
# Print out some info about the model
for input_example_batch, target_example_batch in dataset.take(1):
example_batch_predictions = model(input_example_batch)
print(example_batch_predictions.shape, "# (batch_size, sequence_length, vocab_size)")
print(model.summary())
sampled_indices = tf.random.categorical(example_batch_predictions[0], num_samples=1)
sampled_indices = tf.squeeze(sampled_indices,axis=-1).numpy()
def loss(labels, logits):
return tf.keras.losses.sparse_categorical_crossentropy(labels, logits, from_logits=True)
example_batch_loss = loss(target_example_batch, example_batch_predictions)
print("Prediction shape: ", example_batch_predictions.shape, " # (batch_size, sequence_length, vocab_size)")
print("scalar_loss: ", example_batch_loss.numpy().mean())
model.compile(optimizer='adam', loss=loss)
# Fits the model for a given number of epochs, checkpointing into the specified directory
def fit_model(model, dataset, checkpoint_dir, start_epoch=None, end_epoch=10):
# Setup checkpointing
checkpoint_callback = tf.keras.callbacks.ModelCheckpoint(
filepath=os.path.join(checkpoint_dir, "epoch-{epoch}-loss-{loss:.2f}"), verbose=1,
save_weights_only=True)
# Train the model
if start_epoch is None:
print('Training %d epochs from scratch' % end_epoch)
history = model.fit(dataset, epochs=end_epoch, callbacks=[ checkpoint_callback ])
else:
print('Training epochs %d to %d' % (start_epoch, end_epoch))
history = model.fit(dataset, epochs=end_epoch, initial_epoch=start_epoch, callbacks=[ checkpoint_callback ])
return tf.train.latest_checkpoint(checkpoint_dir)
def train_model(model_name, input_path, output_path, buffer_size, batch_size, seq_length, embedding_dim, rnn_units, epochs):
with open(input_path, 'rb') as input_file:
raw_text = ' '.join([line.decode('utf-8').rstrip() for line in input_file])
model_dir = os.path.join(output_path, model_name)
checkpoint_dir = os.path.join(model_dir, 'checkpoints')
steps_per_epoch = int(len(raw_text) / (batch_size * seq_length)) # batch_size * sequence length
if steps_per_epoch <= 0:
raise ValueError('Invalid steps per epoch: %d somethin aint right!' % steps_per_epoch)
# Create the vocab and encode the text
vocab, char2idx, idx2char, encoded_text = create_vocabulary(raw_text)
# Split dataset into batches for training
sequences, dataset = prepare_dataset(raw_text, encoded_text, idx2char, buffer_size, batch_size, seq_length)
# Setup the tf model
model = build_gru_model(len(vocab), embedding_dim, rnn_units, batch_size)
if os.path.isdir(checkpoint_dir):
latest_chkpt = tf.train.latest_checkpoint(checkpoint_dir)
if latest_chkpt is not None:
print('Found checkpoint: %s loading weights...' % latest_chkpt)
chkpt = model.load_weights(latest_chkpt)
# chkpt.assert_consumed()
model.build(tf.TensorShape([batch_size, seq_length]))
# re.search('epoch-([0-9]+)-.*', latest_chkpt)
compile_model(model, dataset)
num_iterations = int(model.optimizer.iterations.numpy())
initial_epoch = round(num_iterations / steps_per_epoch)
print('Optimizer epoch: %d iterations: %d steps-per: %d' % (initial_epoch, num_iterations, steps_per_epoch))
# Training loop...
start_epoch = initial_epoch if initial_epoch > 0 else None
end_epoch = epochs + initial_epoch
last_chkpt = fit_model(model, dataset, checkpoint_dir, start_epoch, end_epoch)
config = {
'name': model_name,
'batch': batch_size,
'buffer': buffer_size,
'embedding': embedding_dim,
'seq': seq_length,
'rnn': rnn_units,
'epochs': epochs
}
write_config(model_dir, config)
write_corpus(model_dir, raw_text)
write_vocab(model_dir, vocab)
write_charmap(model_dir, char2idx)
def isdir_arg(path):
if os.path.isdir(path):
return path
else:
raise argparse.ArgumentTypeError(f"readable_dir:{path} is not a valid directory")
def isfile_arg(path):
if os.path.isfile(path):
return path
elif os.path.isdir(path):
print('Found path: %s' % path)
for root, dirs, files in os.walk(path, topdown=False):
for name in files:
print(os.path.join(root, name))
for name in dirs:
print(os.path.join(root, name))
raise argparse.ArgumentTypeError(f"readable_file:{path} is not a valid file")
def train_rnn(input_args=None):
parser = argparse.ArgumentParser(description='train seq2seq RNN network based on text input')
parser.add_argument('--name', required=True, action='store', help='name of the model')
parser.add_argument('--batch', default=64, type=int, action='store', help='batch size')
parser.add_argument('--buffer', default=10000, type=int, action='store', help='working shuffle buffer size')
parser.add_argument('--type', default='rnn', action='store', help='type of model (rnn|?)')
parser.add_argument('--dim', default=256, type=int, action='store', help='embedding dimension')
parser.add_argument('--units', default=1024, type=int, action='store', help='rnn units')
parser.add_argument('--seq', default=100, type=int, action='store', help='sequence length')
parser.add_argument('--epochs', default=10, type=int, action='store', help='number of epochs to train')
parser.add_argument('--input', required=True, type=isfile_arg, action='store', help='input corpus text (-) for stdin')
parser.add_argument('--output', required=True, type=isdir_arg, default='/opt/ml/output/data', action='store', help='output path to write model files')
if input_args is not None:
args = parser.parse_args(input_args)
else:
args = parser.parse_args()
train_model(args.name,
args.input, args.output, args.buffer,
args.batch, args.seq,
args.dim, args.units, args.epochs)
if __name__ == '__main__':
train_rnn()
| StarcoderdataPython |
4800552 | <reponame>Nuwanda7O/404-Final-assignment<filename>nodeW/intelligent/prediction2.0.py<gh_stars>0
import pandas as pd
import os
import matplotlib.pyplot as plt
import numpy as np
from sklearn.preprocessing import MinMaxScaler
from sklearn.model_selection import train_test_split
from tensorflow.keras.callbacks import TensorBoard
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import Activation,LSTM,Dropout,Dense,BatchNormalization,LeakyReLU
from tensorflow.keras.optimizers import SGD
from numpy import concatenate
from sklearn.metrics import mean_squared_error,mean_absolute_error,r2_score
from math import sqrt
LR = 0.0006
look_back=5
ratio=0.8
scaler=MinMaxScaler(feature_range=(0,1))
df = pd.DataFrame(columns=['date', 'open', '5days-open'])
file='data'
for root, dirs, files in os.walk(file):
for f in files:
code = str(f).split('.')[0]
path = './data/' + str(f)
temp_df = pd.read_csv(path, encoding='gbk', usecols=[0,1, 2])
#dropna()
temp_df.columns = ['date', 'open', '5days-open']
df=df.append(temp_df)
high=len(df)-look_back+1
y = np.empty((high, 1), dtype='float32')
x = np.empty((high,look_back), dtype='float32')
a=-1
for i in range(high):
a=a+1
y[a]=df['5days-open'].iat[i]
x[a][0] = df['open'].iat[i]
x[a][1] = df['open'].iat[i+1]
x[a][2] = df['open'].iat[i+2]
x[a][3] = df['open'].iat[i+3]
x[a][4] = df['open'].iat[i+4]
x=scaler.fit_transform(x)
# print(y.shape)
# print(x.shape)
x_train,x_test,y_train,y_test=train_test_split(x,y,test_size=0.2)
x_train=np.reshape(x_train,(x_train.shape[0],x_train.shape[1],1))
print(x_train.shape)
x_test=np.reshape(x_test,(x_test.shape[0],x_test.shape[1],1))
model = Sequential()
model.add(LSTM(2,input_shape=(None,1)))
model.add(Dense(1))
model.compile(loss='mse', optimizer='Adam')
model.summary()
history=model.fit(x_train,y_train,epochs=40,batch_size=5,
verbose=2)
plt.plot(history.history['loss'], label='train')
plt.plot(history.history['val_loss'], label='predict')
plt.title('2019', fontsize='12')
plt.ylabel('loss', fontsize='10')
plt.xlabel('epoch', fontsize='10')
plt.legend()
plt.show()
# model.add(LSTM( input_shape=(x_train.shape[1],x_train.shape[2]),
# units=50, return_sequences=True))
# model.add(LSTM(input_dim=50, units=100, return_sequences=False))
# model.add(Dense(input_dim=100, units=1))
# model.add(BatchNormalization())
# model.add(Dropout(0.2))
# model.add(LeakyReLU(alpha=0.02))
# model.compile(loss='mse',
# optimizer=SGD(lr=LR, decay=1e-6, momentum=0.9, nesterov=True))
# history=model.fit(x_train,y_train,epochs=40,batch_size=120,validation_data=(x_test, y_test), verbose=2,shuffle=False,callbacks=[TensorBoard(log_dir='MyBoard',histogram_freq=0)])
#
# plt.plot(history.history['loss'], label='train')
# plt.plot(history.history['val_loss'], label='predict')
# plt.title('2019', fontsize='12')
# plt.ylabel('loss', fontsize='10')
# plt.xlabel('epoch', fontsize='10')
# plt.legend()
# plt.show()
# x_test=np.reshape(x_test,(x_test.shape[0],5))
# y_test_predict=model.predict(x_test)
#
# print(history.history['loss'])
# plt.plot(y_test,color='red',label='real')
# plt.plot(y_test_predict,color='blue',label='predict')
# plt.xlabel('days')
# plt.ylabel('earning_rate')
# plt.title('2019')
# plt.legend()
# plt.show()
# # y_test_predict.shape 952*1 ndarray
# # 回归评价指标
# # calculate MSE 均方误差
# mse=mean_squared_error(y_test,y_test_predict)
# # calculate RMSE 均方根误差
# rmse = sqrt(mean_squared_error(y_test, y_test_predict))
# #calculate MAE 平均绝对误差
# mae=mean_absolute_error(y_test,y_test_predict)
# #calculate R square
# r_square=r2_score(y_test,y_test_predict)
# print('均方误差: %.6f' % mse)
# print('均方根误差: %.6f' % rmse)
# print('平均绝对误差: %.6f' % mae)
# print('R_square: %.6f' % r_square)
#
#
#
#
#
| StarcoderdataPython |
3252249 | import bson
from pymongo import ReturnDocument
from pymongo.cursor import Cursor
import numpy as np
import pandas as pd
from config import get_config
from logger import get_logger
from .db import configdb, metricdb
logger = get_logger(__name__, log_level=("ANALYZER", "LOGLEVEL"))
config = get_config()
app_collection = config.get("ANALYZER", "APP_COLLECTION")
def get_all_apps():
return configdb[app_collection].find()
def get_all_apps_by_state(state_filter):
return configdb[app_collection].find({"state": state_filter})
def create_app(app_json):
return configdb[app_collection].insert_one(app_json)
def update_and_get_app(app_id, update_doc, unset=False):
if unset:
updated_doc = configdb[app_collection].find_one_and_update(
{"app_id": app_id},
{"$unset": update_doc},
return_document=ReturnDocument.AFTER
)
else:
updated_doc = configdb[app_collection].find_one_and_update(
{"app_id": app_id},
{"$set": update_doc},
return_document=ReturnDocument.AFTER
)
updated_doc.pop("_id")
return updated_doc
def update_app(app_id, update_doc):
result = configdb[app_collection].update_one(
{"app_id": app_id},
{"$set": update_doc}
)
return result.modified_count > 0
def get_app_microservices(app_id):
application = configdb[app_collection].find_one({"app_id": app_id})
if application:
return application.get("microservices", [])
def get_app_by_id(app_id):
return configdb[app_collection].find_one({"app_id": app_id})
# get from configdb the application json
def get_app_by_name(app_name):
app_filter = {'name': app_name}
app = configdb[app_collection].find_one(app_filter)
return app
# get from configdb the type of an application ("long-running" or "batch")
def get_app_type(app_name):
return "long-running"
'''
app = get_app_by_name(app_name)
return app['type']
'''
# get from configdb the slo metric type of an application
def get_slo_type(app_name):
return 'latency'
'''
app = get_app_by_name(app_name)
return app['slo']['type']
'''
def get_slo_value(app_name):
return 500.
'''
app = get_app_by_name(app_name)
try:
slo_value = app['slo']['value']
except KeyError:
slo_value = 500. # TODO: put an nan
return slo_value
'''
def get_budget(app_name):
return 25000.
'''
app = get_app_by_name(app_name)
try:
budget = app['budget']['value']
except KeyError:
budget = 25000. # all types allowed
return budget
'''
# get from configdb the {cpu|mem} requests (i.e. min) for the app service container
def get_resource_requests(app_name):
app = get_app_by_name(app_name)
# TODO: handle apps with multiple services
service = app['serviceNames'][0]
min_resources = {'cpu': 0., 'mem': 0.}
for task in app['taskDefinitions']:
if task['nodeMapping']['task'] == service:
container_spec = \
task['taskDefinition']['deployment']['spec']['template']['spec']['containers'][0]
try:
resource_requests = container_spec['resources']['requests']
except KeyError:
logger.debug(
f"No resource requests for the container running {service}")
return min_resources
try:
cpu_request = resource_requests['cpu']
# conver cpu unit from millicores to number of vcpus
if cpu_request[-1] == 'm':
min_resources['cpu'] = float(
cpu_request[:len(cpu_request) - 1]) / 1000.
else:
min_resources['cpu'] = float(cpu_request)
except KeyError:
logger.debug(
f"No cpu request for the container running {service}")
try:
mem_request = resource_requests['memory']
# convert memory unit to GB
if mem_request[-1] == 'M':
min_resources['mem'] = float(
mem_request[:len(mem_request) - 1]) / 1000.
elif mem_request[-2:] == 'Mi':
min_resources['mem'] = float(
mem_request[:len(mem_request) - 2]) / 1024.
else:
min_resources['mem'] = float(mem_request) / 1024. / 1024.
except KeyError:
logger.debug(
f"No memory request for the container running {service}")
logger.info(
f"Found resource requests for app {app_name} service {service}: {min_resources}")
return min_resources
# TODO: probably need to change to appId.
def create_profiling_dataframe(app_name, collection='profiling'):
""" Create a dataframe of features.
Args:
app_name(str): Map to the 'appName' in Mongo database.
Returns:
df(pandas dataframe): Dataframe with rows of features, where each row is a service.
(i.e. if an app has N services, where each service has K dimensions, the dataframe would be NxK)
"""
filt = {'appName': app_name}
app = metricdb[collection].find_one(filt)
if app is None:
raise KeyError(
'Cannot find document: filter={}'.format(filt))
serviceNames = pd.Index(app['services'])
benchmarkNames = pd.Index(app['benchmarks'])
# make dataframe
ibenchScores = []
for service in serviceNames:
filt = {'appName': app_name, 'serviceInTest': service}
app = metricdb[collection].find_one(filt)
if app is None:
raise KeyError(
'Cannot find document: filter={}'.format(filt))
ibenchScores.append([i['toleratedInterference']
for i in app['testResult']])
df = pd.DataFrame(data=np.array(ibenchScores),
index=serviceNames, columns=benchmarkNames)
return df
| StarcoderdataPython |
1679390 | <reponame>popsonebz/aws-mlops-framework<gh_stars>0
# #####################################################################################################################
# Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved. #
# #
# Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance #
# with the License. A copy of the License is located at #
# #
# http://www.apache.org/licenses/LICENSE-2.0 #
# #
# or in the 'license' file accompanying this file. This file is distributed on an 'AS IS' BASIS, WITHOUT WARRANTIES #
# OR CONDITIONS OF ANY KIND, express or implied. See the License for the specific language governing permissions #
# and limitations under the License. #
# #####################################################################################################################
from aws_cdk import (
aws_lambda as lambda_,
aws_s3 as s3,
aws_apigateway as apigw,
core,
)
from aws_solutions_constructs.aws_lambda_sagemakerendpoint import LambdaToSagemakerEndpoint
from aws_solutions_constructs import aws_apigateway_lambda
from lib.blueprints.byom.pipeline_definitions.sagemaker_role import create_sagemaker_role
from lib.blueprints.byom.pipeline_definitions.sagemaker_model import create_sagemaker_model
from lib.blueprints.byom.pipeline_definitions.sagemaker_endpoint_config import create_sagemaker_endpoint_config
from lib.blueprints.byom.pipeline_definitions.sagemaker_endpoint import create_sagemaker_endpoint
from lib.blueprints.byom.pipeline_definitions.helpers import suppress_lambda_policies
from lib.blueprints.byom.pipeline_definitions.templates_parameters import (
create_blueprint_bucket_name_parameter,
create_assets_bucket_name_parameter,
create_algorithm_image_uri_parameter,
create_custom_algorithms_ecr_repo_arn_parameter,
create_inference_instance_parameter,
create_kms_key_arn_parameter,
create_model_artifact_location_parameter,
create_model_name_parameter,
create_data_capture_location_parameter,
create_custom_algorithms_ecr_repo_arn_provided_condition,
create_kms_key_arn_provided_condition,
create_model_package_name_parameter,
create_model_registry_provided_condition,
create_model_package_group_name_parameter,
)
class BYOMRealtimePipelineStack(core.Stack):
def __init__(self, scope: core.Construct, id: str, **kwargs) -> None:
super().__init__(scope, id, **kwargs)
# Parameteres #
assets_bucket_name = create_assets_bucket_name_parameter(self)
blueprint_bucket_name = create_blueprint_bucket_name_parameter(self)
custom_algorithms_ecr_repo_arn = create_custom_algorithms_ecr_repo_arn_parameter(self)
kms_key_arn = create_kms_key_arn_parameter(self)
algorithm_image_uri = create_algorithm_image_uri_parameter(self)
model_name = create_model_name_parameter(self)
model_artifact_location = create_model_artifact_location_parameter(self)
data_capture_location = create_data_capture_location_parameter(self)
inference_instance = create_inference_instance_parameter(self)
model_package_group_name = create_model_package_group_name_parameter(self)
model_package_name = create_model_package_name_parameter(self)
# Conditions
custom_algorithms_ecr_repo_arn_provided = create_custom_algorithms_ecr_repo_arn_provided_condition(
self, custom_algorithms_ecr_repo_arn
)
kms_key_arn_provided = create_kms_key_arn_provided_condition(self, kms_key_arn)
model_registry_provided = create_model_registry_provided_condition(self, model_package_name)
# Resources #
# getting blueprint bucket object from its name - will be used later in the stack
blueprint_bucket = s3.Bucket.from_bucket_name(self, "BlueprintBucket", blueprint_bucket_name.value_as_string)
# provision api gateway and lambda for inference using solution constructs
inference_api_gateway = aws_apigateway_lambda.ApiGatewayToLambda(
self,
"BYOMInference",
lambda_function_props={
"runtime": lambda_.Runtime.PYTHON_3_8,
"handler": "main.handler",
"code": lambda_.Code.from_bucket(blueprint_bucket, "blueprints/byom/lambdas/inference.zip"),
},
api_gateway_props={
"defaultMethodOptions": {
"authorizationType": apigw.AuthorizationType.IAM,
},
"restApiName": f"{core.Aws.STACK_NAME}-inference",
"proxy": False,
},
)
# add supressions
inference_api_gateway.lambda_function.node.default_child.cfn_options.metadata = suppress_lambda_policies()
provision_resource = inference_api_gateway.api_gateway.root.add_resource("inference")
provision_resource.add_method("POST")
# create Sagemaker role
sagemaker_role = create_sagemaker_role(
self,
"MLOpsRealtimeSagemakerRole",
custom_algorithms_ecr_arn=custom_algorithms_ecr_repo_arn.value_as_string,
kms_key_arn=kms_key_arn.value_as_string,
model_package_group_name=model_package_group_name.value_as_string,
assets_bucket_name=assets_bucket_name.value_as_string,
input_bucket_name=assets_bucket_name.value_as_string,
input_s3_location=assets_bucket_name.value_as_string,
output_s3_location=data_capture_location.value_as_string,
ecr_repo_arn_provided_condition=custom_algorithms_ecr_repo_arn_provided,
kms_key_arn_provided_condition=kms_key_arn_provided,
model_registry_provided_condition=model_registry_provided,
)
# create sagemaker model
sagemaker_model = create_sagemaker_model(
self,
"MLOpsSagemakerModel",
execution_role=sagemaker_role,
model_registry_provided=model_registry_provided,
algorithm_image_uri=algorithm_image_uri.value_as_string,
assets_bucket_name=assets_bucket_name.value_as_string,
model_artifact_location=model_artifact_location.value_as_string,
model_package_name=model_package_name.value_as_string,
model_name=model_name.value_as_string,
)
# Create Sagemaker EndpointConfg
sagemaker_endpoint_config = create_sagemaker_endpoint_config(
self,
"MLOpsSagemakerEndpointConfig",
sagemaker_model.attr_model_name,
model_name.value_as_string,
inference_instance.value_as_string,
data_capture_location.value_as_string,
core.Fn.condition_if(
kms_key_arn_provided.logical_id, kms_key_arn.value_as_string, core.Aws.NO_VALUE
).to_string(),
)
# create a dependency on the model
sagemaker_endpoint_config.add_depends_on(sagemaker_model)
# create Sagemaker endpoint
sagemaker_endpoint = create_sagemaker_endpoint(
self,
"MLOpsSagemakerEndpoint",
sagemaker_endpoint_config.attr_endpoint_config_name,
model_name.value_as_string,
)
# add dependency on endpoint config
sagemaker_endpoint.add_depends_on(sagemaker_endpoint_config)
# Create Lambda - sagemakerendpoint
LambdaToSagemakerEndpoint(
self,
"LambdaSagmakerEndpoint",
existing_sagemaker_endpoint_obj=sagemaker_endpoint,
existing_lambda_obj=inference_api_gateway.lambda_function,
)
# Outputs #
core.CfnOutput(
self,
id="SageMakerModelName",
value=sagemaker_model.attr_model_name,
)
core.CfnOutput(
self,
id="SageMakerEndpointConfigName",
value=sagemaker_endpoint_config.attr_endpoint_config_name,
)
core.CfnOutput(
self,
id="SageMakerEndpointName",
value=sagemaker_endpoint.attr_endpoint_name,
)
core.CfnOutput(
self,
id="EndpointDataCaptureLocation",
value=f"https://s3.console.aws.amazon.com/s3/buckets/{data_capture_location.value_as_string}/",
description="Endpoint data capture location (to be used by Model Monitor)",
)
| StarcoderdataPython |
1645231 | <reponame>haltiamreptar/ATCA-Secondary-finder
# A library to handle dealing with ATCA MoniCA points.
from requests import Session
import json
from . import errors
class monicaPoint:
def __init__(self, info={}):
self.value = None
self.description = None
self.pointName = None
self.updateTime = None
self.errorState = None
if "value" in info:
self.setValue(info['value'])
if "description" in info:
self.setDescription(info['description'])
if "pointName" in info:
self.pointName = info['pointName']
if "updateTime" in info:
self.setUpdateTime(info['updateTime'])
if "errorState" in info:
self.setErrorState(info['errorState'])
def getPointName(self):
return self.pointName
def setValue(self, value=None):
if value is not None:
self.value = value
return self
def getValue(self):
return self.value
def setDescription(self, description=None):
if description is not None:
self.description = description
return self
def getDescription(self):
return self.description
def setUpdateTime(self, updateTime=None):
if updateTime is not None:
self.updateTime = updateTime
return self
def getUpdateTime(self):
return self.updateTime
def setErrorState(self, errorState=None):
if errorState is not None:
self.errorState = errorState
return self
def getErrorState(self):
return self.errorState
class monicaServer:
def __init__(self, info={}):
self.serverName = "monhost-nar"
self.protocol = "http"
self.webserverName = "www.narrabri.atnf.csiro.au"
self.webserverPath = "cgi-bin/obstools/web_monica/monicainterface_json.pl"
self.points = []
if "serverName" in info:
self.serverName = info['serverName']
if "protocol" in info:
self.protocol = info['protocol']
if "webserverName" in info:
self.webserverName = info['webserverName']
if "webserverPath" in info:
self.webserverPath = info['webserverPath']
def addPoint(self, pointName=None):
if pointName is not None:
npoint = monicaPoint({ 'pointName': pointName })
self.points.append(npoint)
return self
def addPoints(self, points=[]):
if len(points) > 0:
for i in range(0, len(points)):
self.addPoint(points[i])
return self
def getPointByName(self, pointName=None):
if pointName is not None:
for i in range(0, len(self.points)):
if (self.points[i].getPointName() == pointName):
return self.points[i]
return None
def __comms(self, data=None):
if data is None:
return None
session = Session()
url = self.protocol + "://" + self.webserverName + "/" + self.webserverPath
postResponse = session.post( url=url, data=data )
return json.loads(postResponse.text)
def updatePoints(self):
allPointNames = [ p.getPointName() for p in self.points ]
data = { 'action': "points", 'server': self.serverName,
'points': ";".join(allPointNames) }
response = self.__comms(data)
if response is not None and "pointData" in response:
for i in range(0, len(response['pointData'])):
if response['pointData'][i]['pointName'] is not None:
point = self.getPointByName(response['pointData'][i]['pointName'])
point.setValue(response['pointData'][i]['value'])
point.setUpdateTime(response['pointData'][i]['time'])
point.setErrorState(not bool(response['pointData'][i]['errorState']))
return True
return False
serverInstance = None
def initialiseServerInstance(*args):
global serverInstance
if serverInstance is None:
serverInstance = monicaServer()
return serverInstance
def getArray(*args):
server = initialiseServerInstance()
server.addPoint("site.misc.array").updatePoints()
return server.getPointByName("site.misc.array").getValue()
def getFrequencies(*args):
server = initialiseServerInstance()
server.addPoints([ "site.misc.obs.freq1", "site.misc.obs.freq2" ]).updatePoints()
freqs = [ float(server.getPointByName("site.misc.obs.freq1").getValue()), float(server.getPointByName("site.misc.obs.freq2").getValue()) ]
return freqs
| StarcoderdataPython |
1666719 | <filename>nn/util.py
"""
nn.util
Utility class for working with t2t
"""
from tensor2tensor.data_generators import problem
class SingleProcessProblem(problem.Problem):
"""
Mixin to mark a class as using a single process and therefore not needing
to override num_generate_tasks or prepare_to_generate
"""
@property
def num_generate_tasks(self):
"""
Unused since multiprocess_generate is False
"""
@property
def prepare_to_generate(self, data_dir, tmp_dir):
"""
Unused since multiprocess_generate is False
"""
def generate_data(self, data_dir, tmp_dir, task_id=-1):
"""
Just pass this one down to the child
"""
raise NotImplementedError()
@property
def num_training_examples(self):
"""
Just pass this one down to the child
"""
raise NotImplementedError()
| StarcoderdataPython |
89038 | import os
import pytest
import testinfra.utils.ansible_runner
testinfra_hosts = testinfra.utils.ansible_runner.AnsibleRunner(
os.environ['MOLECULE_INVENTORY_FILE']).get_hosts('all')
@pytest.mark.parametrize("installed_packages", [
("httpd"),
("mod_ssl"),
])
def test_packages_installed(host, installed_packages):
rpackage = host.package(installed_packages)
assert rpackage.is_installed
@pytest.mark.parametrize("services", [
("httpd"),
])
def test_services_running_and_enabled(host, services):
service = host.service(services)
assert service.is_enabled
assert service.is_running
@pytest.mark.parametrize("files", [
("/etc/httpd/conf.d/welcome-vm.conf"),
])
def test_welcome(host, files):
welcome = host.file(files)
assert welcome.user == "root"
assert welcome.group == "root"
assert welcome.mode == 0o644
def test_http_ssl_conf(host):
http_ssl_conf = host.file("/etc/httpd/conf.d/ssl.conf")
assert not http_ssl_conf.contains('Listen 443')
@pytest.mark.parametrize("files", [
("/etc/httpd/conf.d/welcome.conf"),
("/etc/httpd/conf.d/userdir.conf"),
("/etc/httpd/conf.d/autoindex.conf"),
])
def test_empty_config(host, files):
test_empty_config = host.file(files)
assert test_empty_config.size == 0
def test_subject_ssll_key(host):
cmd = host.run("openssl x509 -in /etc/pki/tls/certs/backend.molecule.openconext.org.pem -noout -subject")
assert 'subject= /CN=molecule.openconext.org' in cmd.stdout
assert cmd.rc == 0
| StarcoderdataPython |
3310129 | __version__ = '0.2.18+affirm.2'
from client import Client, require_access_token
| StarcoderdataPython |
3234177 | <filename>.ipynb_checkpoints/render-checkpoint.py<gh_stars>1-10
import torch
import os
import argparse
from im2scene import config
from im2scene.checkpoints import CheckpointIO
# 添加config和nocuda参数
parser = argparse.ArgumentParser(
description='Render images of a GIRAFFE model.'
)
parser.add_argument('config', type=str, help='Path to config file.')
parser.add_argument('--no-cuda', action='store_true', help='Do not use cuda.')
args = parser.parse_args()
# 加载配置文件,若没有配置文件就加载默认配置
cfg = config.load_config(args.config, 'configs/default.yaml')
# 判断cuda是否可用
is_cuda = (torch.cuda.is_available() and not args.no_cuda)
# 设置cpu/gpu
device = torch.device("cuda" if is_cuda else "cpu")
# 设置输出路径
out_dir = cfg['training']['out_dir']
# 生成渲染路径在输出路径内
render_dir = os.path.join(out_dir, cfg['rendering']['render_dir'])
# 无此文件夹则新建
if not os.path.exists(render_dir):
os.makedirs(render_dir)
# 加载模型
model = config.get_model(cfg, device=device)
# 加载checkpoint
checkpoint_io = CheckpointIO(out_dir, model=model)
checkpoint_io.load(cfg['test']['model_file'])
# Generator
# 加载渲染器
renderer = config.get_renderer(model, cfg, device=device)
# 模型评估
model.eval()
# 渲染
out = renderer.render_full_visualization(
render_dir,
cfg['rendering']['render_program'])
| StarcoderdataPython |
1668679 | longest = ""
with open(r'Question 51 - 60/file.txt','r') as lines:
for line in lines:
l = line.split()
m = max(l,key=len)
if(len(longest)<len(m)): longest = m
print("the longest word in file is ",longest) | StarcoderdataPython |
3343373 | <reponame>mcopik/serverless-benchmarks<filename>sebs/gcp/function.py
from typing import cast, Optional
from sebs.faas.function import Function, FunctionConfig
from sebs.gcp.storage import GCPStorage
class GCPFunction(Function):
def __init__(
self,
name: str,
benchmark: str,
code_package_hash: str,
cfg: FunctionConfig,
bucket: Optional[str] = None,
):
super().__init__(benchmark, name, code_package_hash, cfg)
self.bucket = bucket
@staticmethod
def typename() -> str:
return "GCP.GCPFunction"
def serialize(self) -> dict:
return {
**super().serialize(),
"bucket": self.bucket,
}
@staticmethod
def deserialize(cached_config: dict) -> "GCPFunction":
from sebs.faas.function import Trigger
from sebs.gcp.triggers import LibraryTrigger, HTTPTrigger
cfg = FunctionConfig.deserialize(cached_config["config"])
ret = GCPFunction(
cached_config["name"],
cached_config["benchmark"],
cached_config["hash"],
cfg,
cached_config["bucket"],
)
for trigger in cached_config["triggers"]:
trigger_type = cast(
Trigger,
{"Library": LibraryTrigger, "HTTP": HTTPTrigger}.get(trigger["type"]),
)
assert trigger_type, "Unknown trigger type {}".format(trigger["type"])
ret.add_trigger(trigger_type.deserialize(trigger))
return ret
def code_bucket(self, benchmark: str, storage_client: GCPStorage):
if not self.bucket:
self.bucket, idx = storage_client.add_input_bucket(benchmark)
return self.bucket
| StarcoderdataPython |
3349211 | <gh_stars>0
import jax.random as random
import jax.numpy as np
import numpy
import h5py
import itertools
from jax.api import jit, grad
from jax.config import config
from jax.experimental import optimizers
from jax.experimental.optimizers import Optimizer
# Generate Randomness
def random_layer_params(m, n, key, scale=1e-2):
w_key, b_key = random.split(key)
return scale * random.normal(w_key, (n, m)), scale * random.normal(b_key, (n,))
def init_network_params(sizes, key):
keys = random.split(key, len(sizes))
return [random_layer_params(m, n, k) for m, n, k in zip(sizes[:-1], sizes[1:], keys)]
## Prediction and Loss Functions
def predict(params,inputs):
return np.matmul (params,inputs)
def loss(params, batch):
inputs, targets = batch
Z = predict(params, inputs)
return np.linalg.norm((targets - Z), ord=2)
| StarcoderdataPython |
3373107 | import cgi
gg_admin_url = "http://gluu.local.org:8001"
gg_proxy_url = "http://gluu.local.org:8000"
oxd_host = "https://gluu.local.org:8553"
ce_url = "https://gluu.local.org"
api_path = "posts/1"
# Kong route register with below host
host_with_claims = "gathering.example.com"
host_without_claims = "non-gathering.example.com"
# Consumer client
client_oxd_id = "91b14554-17ac-4cf4-917d-f1b27e95902a"
client_id = "@!FBA4.9EDD.24E7.909F!0001!64E0.493A!0008!BE4C.B4F6.E5CC.DB74"
client_secret = "<KEY>"
# You need to add this URL in your consumer client in CE
claims_redirect_url = "https://gluu.local.org/cgi-bin/index.py"
def is_ticket_in_url():
arguments = cgi.FieldStorage()
return 'ticket' in arguments
def is_claim_in_url():
arguments = cgi.FieldStorage()
if 'claim' in arguments or 'ticket' in arguments:
return host_with_claims
else:
return host_without_claims
| StarcoderdataPython |
3298354 | <gh_stars>1-10
"""A class to abstract the usage of all endpoints.
"""
import types
import builtins
import pandas as pd
import portiapy.specs as specs
import portiapy.utils as utils
import portiapy.axioms as axioms
import portiapy.events as events
import portiapy.phases as phases
import portiapy.select as select
import portiapy.profile as profile
import portiapy.summary as summary
import portiapy.describe as describe
class CustomDict(dict):
"""Custom dictionary with an humanization method.
Extends:
dict
"""
def humanize(
self,
locale='en-us',
custom_dimension: dict=None,
custom_event: dict=None,
custom_unity: dict=None
) -> dict:
"""Humanizes dictionary.
Keyword Arguments:
locale {str} -- which language to use when humanizing
(default {'en-us'})
custom_dimension {dict} -- custom list of dimension codes
(default {None})
custom_event {dict} -- custom list of event codes (default {None})
custom_unity {dict} -- custom list of unity codes (default {None})
Returns:
dict -- humanized dictionary
"""
return utils.humanize_json(
self, locale, custom_dimension, custom_unity
)
def add_humanize_method(obj: object) -> object:
"""Adds humanize method to an object's instance.
Arguments:
obj {object} -- object to have the method added
Returns:
object -- object with the humanize method
"""
if isinstance(obj, pd.DataFrame):
obj.humanize = types.MethodType(utils.humanize_dataframe, obj)
elif isinstance(obj, dict):
obj = CustomDict(obj)
return obj
class PortiaApi(object):
"""A factory for devices that use our Portia API.
"""
def __init__(self, portia_config: dict):
"""PortiaApi's constructor.
Arguments:
portia_config {dict} -- Portia's configuration arguments
"""
self.portia_config = portia_config
def pipeline(self) -> 'EdgePipeline':
"""Builds a new EdgePipeline instance.
Returns:
EdgePipeline -- EdgePipeline instance
"""
return EdgePipeline(self.portia_config)
def device(self, edge_id: str) -> 'EdgeDevice':
"""Builds a new EdgeDevice instance.
Arguments:
edge_id {str} -- Edge ID that identifies the device
Returns:
EdgeDevice -- EdgeDevice instance
"""
return EdgeDevice(edge_id, self.portia_config)
class EdgePipeline(object):
"""Abstracts usage of pipeline endpoints.
"""
def __init__(self, portia_config: dict):
"""EdgePipeline's constructor.
Arguments:
portia_config {dict} -- Portia's configuration arguments
"""
self.portia_config = portia_config
def phase(self) -> 'EdgePipelinePhase':
"""Builds a new EdgePipelinePhase instance.
Returns:
EdgePipelinePhase -- EdgePipelinePhase instance
"""
return EdgePipelinePhase(self.portia_config)
def axiom(self) -> 'EdgePipelineAxiom':
"""Builds a new EdgePipelineAxiom instance.
Returns:
EdgePipelineAxiom -- EdgePipelineAxiom instance
"""
return EdgePipelineAxiom(self.portia_config)
def specification(self) -> 'EdgePipelineSpecification':
"""Builds a new EdgePipelineSpecification instance.
Returns:
EdgePipelineSpecification -- EdgePipelineSpecification instance
"""
return EdgePipelineSpecification(self.portia_config)
class EdgePipelinePhase(object):
"""Abstracts usage of pipeline phase endpoints.
"""
def __init__(self, portia_config: dict):
"""EdgePipelinePhase's constructor.
Arguments:
portia_config {dict} -- Portia's configuration arguments
"""
self.portia_config = portia_config
def list(self) -> list:
return phases.index(self.portia_config)
def create(self, payload):
return phases.store(self.portia_config, payload)
def display(self, phase_name):
return phases.show(self.portia_config, phase_name)
def update(self, phase_name, payload):
return phases.update(self.portia_config, phase_name, payload)
class EdgePipelineAxiom(object):
"""Abstracts usage of pipeline axiom endpoints.
"""
def __init__(self, portia_config: dict):
"""EdgePipelineAxiom's constructor.
Arguments:
portia_config {dict} -- Portia's configuration arguments
"""
self.portia_config = portia_config
def list(self):
return axioms.index(self.portia_config)
def create(self, payload):
return axioms.store(self.portia_config, payload)
def display(self, axiom_name, show_params=False):
return axioms.show(self.portia_config, axiom_name, show_params)
def update(self, axiom_name, payload):
return axioms.update(self.portia_config, axiom_name, payload)
def delete(self, axiom_name):
return axioms.destroy(self.portia_config, axiom_name)
class EdgePipelineSpecification(object):
"""Abstracts usage of pipeline specification endpoints.
"""
def __init__(self, portia_config: dict):
"""EdgePipelineSpecification's constructor.
Arguments:
portia_config {dict} -- Portia's configuration arguments
"""
self.portia_config = portia_config
def list(self):
return specs.index(self.portia_config)
def create(self, payload):
return specs.store(self.portia_config, payload)
def display(self, specName):
return specs.show(self.portia_config, specName)
def update(self, specName, payload):
return specs.update(self.portia_config, specName, payload)
def delete(self, specName):
return specs.destroy(self.portia_config, specName)
class EdgeDevice(object):
"""Abstracts usage of all Portia endpoints concerning data that only need
an Edge ID.
"""
def __init__(self, edge_id: str, portia_config: dict):
"""EdgeDevice's constructor.
Arguments:
edge_id {str} -- Edge ID that identifies the device
portia_config {dict} -- Portia's configuration arguments
"""
self.edge_id = edge_id
self.portia_config = portia_config
def port(self, port: int) -> 'EdgeDevicePort':
"""Builds a new EdgeDevicePort instance.
Arguments:
port {int} -- port of the device
Returns:
EdgeDevicePort -- EdgeDevicePort instance
"""
return EdgeDevicePort(self, port, self.portia_config)
def dimension(self, dimension: int) -> 'EdgeDeviceDimensionFromDevice':
"""Builds a new EdgeDeviceDimensionFromDevice instance.
Arguments:
dimension {int} -- dimension of the device
Returns:
EdgeDeviceDimensionFromDevice -- EdgeDeviceDimensionFromDevice
instance
"""
return EdgeDeviceDimensionFromDevice(
self, dimension, self.portia_config
)
def ports(self, last: bool=False, params: dict=None) -> object:
"""Lists a device's ports.
Keyword Arguments:
last {bool} -- if the last package of each port should be returned
or not (default: {False})
params {dict} -- params to send to the service (default: {None})
Returns:
object -- object with the list of ports
"""
return add_humanize_method(describe.device_ports(
self.portia_config, self.edge_id, last, params
))
def profile(
self,
strategy: 'ProfileStrategies'=profile.ProfileStrategies.BY_ZERO_PORT,
interval: int=30,
params: dict=None
) -> dict:
"""Retrieves a device's profile.
Keyword Arguments:
strategy {ProfileStrategies} -- strategy to use when building the
profile (default:
{ProfileStrategies.BY_ZERO_PORT})
interval {int} -- interval of time in minutes to build the profile
(default: {30})
params {dict} -- params to send to the service (default: {None})
Returns:
dict -- dictionary with the device's profile
"""
return add_humanize_method(profile.device_profile(
self.portia_config, self.edge_id, strategy, interval, params
))
class EdgeDevicePort(object):
"""Abstracts usage of all Portia endpoints concerning data that only need
an Edge ID and a port.
"""
def __init__(
self, edge_device: EdgeDevice, port: int, portia_config: dict
):
"""EdgeDevicePort's constructor.
Arguments:
edge_device {EdgeDevice} -- instance of an Edge device
port {int} -- port of the device
portia_config {dict} -- Portia's configuration arguments
"""
self.edge_id = edge_device.edge_id
self.port = port
self.portia_config = portia_config
def sensor(self, sensor: int) -> 'EdgeDeviceSensor':
"""Builds a new EdgeDeviceSensor instance.
Arguments:
sensor {int} -- sensor of the device
Returns:
EdgeDeviceSensor -- EdgeDeviceSensor instance
"""
return EdgeDeviceSensor(self, sensor, self.portia_config)
def dimension(self, dimension: int) -> 'EdgeDeviceDimensionFromPort':
"""Builds a new EdgeDeviceDimensionFromPort instance.
Arguments:
dimension {int} -- dimension code of the device
Returns:
EdgeDeviceDimensionFromPort -- EdgeDeviceDimensionFromPort instance
"""
return EdgeDeviceDimensionFromPort(self, dimension, self.portia_config)
def sensors(self, last: bool=False, params: dict=None) -> object:
"""Lists a device's sensors.
Keyword Arguments:
last {bool} -- if the last package of each sensor should be
returned or not (default: {False})
params {dict} -- params to send to the service (default: {None})
Returns:
object -- object with the list of sensors
"""
return add_humanize_method(describe.device_port_sensors(
self.portia_config, self.edge_id, self.port, last, params
))
def dimensions(self, last: bool=False, params: dict=None) -> object:
"""Lists a device's dimensions.
Keyword Arguments:
last {bool} -- if the last package of each dimension should be
returned or not (default: {False})
params {dict} -- params to send to the service (default: {None})
Returns:
object -- object with the list of dimensions
"""
return add_humanize_method(describe.device_port_dimensions(
self.portia_config, self.edge_id, self.port, last, params
))
def profile(
self,
strategy: 'ProfileStrategies'=profile.ProfileStrategies.BY_ZERO_PORT,
interval: int=30,
params: dict=None
) -> dict:
"""Retrieves a port's profile.
Keyword Arguments:
strategy {ProfileStrategies} -- strategy to use when building the
profile (default:
{ProfileStrategies.BY_ZERO_PORT})
interval {int} -- interval of time in minutes to build the profile
(default: {30})
params {dict} -- params to send to the service (default: {None})
Returns:
dict -- dictionary with the port's profile
"""
return add_humanize_method(profile.port_profile(
self.portia_config,
self.edge_id,
self.port,
strategy,
interval,
params
))
class EdgeDeviceSensor(object):
"""Abstracts usage of all Portia endpoints concerning data that only need
an Edge ID, a port and a sensor.
"""
def __init__(
self,
edge_device_port: EdgeDevicePort,
sensor: int,
portia_config: dict
):
"""EdgeDeviceSensor's constructor.
Arguments:
edge_device_port {EdgeDevicePort} -- instance of an Edge device
port
sensor {int} -- sensor of the device
portia_config {dict} -- Portia's configuration arguments
"""
self.edge_id = edge_device_port.edge_id
self.port = edge_device_port.port
self.sensor = sensor
self.portia_config = portia_config
def dimension(self, dimension: int) -> 'EdgeDeviceDimensionFromSensor':
"""Builds a new EdgeDeviceDimensionFromSensor instance.
Arguments:
dimension {int} -- dimension code of the device
Returns:
EdgeDeviceDimensionFromSensor -- EdgeDeviceDimensionFromSensor
instance
"""
return EdgeDeviceDimensionFromSensor(
self, dimension, self.portia_config
)
def event(self, event: int) -> 'EdgeDeviceEventFromSensor':
"""Builds a new EdgeDeviceEventFromSensor instance.
Arguments:
event {int} -- event code of the device
Returns:
EdgeDeviceEventFromSensor -- EdgeDeviceEventFromSensor instance
"""
return EdgeDeviceEventFromSensor(self, event, self.portia_config)
def dimensions(self, last: bool=False, params: dict=None) -> object:
"""Lists a device's dimensions.
Keyword Arguments:
last {bool} -- if the last package of each dimension should be
returned or not (default: {False})
params {dict} -- params to send to the service (default: {None})
Returns:
object -- object with the list of dimensions
"""
return add_humanize_method(describe.device_port_sensor_dimensions(
self.portia_config,
self.edge_id,
self.port,
self.sensor,
last,
params
))
def profile(
self,
strategy: 'ProfileStrategies'=profile.ProfileStrategies.BY_ZERO_PORT,
interval: int=30,
params: dict=None
) -> dict:
"""Retrieves a sensor's profile.
Keyword Arguments:
strategy {ProfileStrategies} -- strategy to use when building the
profile (default:
{ProfileStrategies.BY_ZERO_PORT})
interval {int} -- interval of time in minutes to build the profile
(default: {30})
params {dict} -- params to send to the service (default: {None})
Returns:
dict -- dictionary with the sensor's profile
"""
return add_humanize_method(profile.sensor_profile(
self.portia_config,
self.edge_id,
self.port,
self.sensor,
strategy,
interval,
params
))
def select(self, last: bool=False, params: dict=None) -> object:
"""Retrieves a device's series by its port and sensor.
Keyword Arguments:
last {bool} -- if the last package should be returned or not
(default: {False})
params {dict} -- params to send to the service (default: {None})
Returns:
object -- object with the device's dimensions
"""
return add_humanize_method(select.query_by_port_sensor(
self.portia_config,
self.edge_id,
self.port,
self.sensor,
last,
params
))
def summary(
self,
strategy: 'SummaryStrategies'=summary.SummaryStrategies.PER_HOUR,
interval=1,
params=None
) -> object:
"""Summarizes a device by port and sensor.
Keyword Arguments:
strategy {SummaryStrategies} -- strategy to use when summarizing
(default:
{SummaryStrategies.PER_HOUR})
interval {int} -- interval of time to summarize (default: {1})
params {dict} -- params to send to the service (default: {None})
Returns:
object -- object with the device's summarized dimensions
"""
return add_humanize_method(summary.query_by_port_sensor(
self.portia_config,
self.edge_id,
self.port,
self.sensor,
strategy,
interval,
params
))
def events(self, last: bool=False, params: dict=None) -> object:
"""Retrieves a device's events by its port and sensor.
Keyword Arguments:
last {bool} -- if the last event should be returned or not
(default: {False})
params {dict} -- params to send to the service (default: {None})
Returns:
object -- object with the device's events
"""
return add_humanize_method(events.query_by_port_sensor(
self.portia_config,
self.edge_id,
self.port,
self.sensor,
last,
params
))
class EdgeDeviceDimensionFromDevice(object):
"""Abstracts usage of all Portia endpoints concerning data that only need
an Edge ID and a dimension code.
"""
def __init__(
self,
edge_device: EdgeDevice,
dimension: int,
portia_config: dict
):
"""EdgeDeviceDimensionFromDevice's constructor.
Arguments:
edge_device {EdgeDevice} -- instance of an Edge device
dimension {int} -- dimension code of the device
portia_config {dict} -- Portia's configuration arguments
"""
self.edge_id = edge_device.edge_id
self.dimension = dimension
self.portia_config = portia_config
def summary(
self,
series: list=None,
strategy: 'SummaryStrategies'=summary.SummaryStrategies.PER_HOUR,
interval=1,
params=None
) -> object:
"""Summarizes a device by dimension code.
Keyword Arguments:
series {list} -- list of series to summarize (default: {None})
strategy {SummaryStrategies} -- strategy to use when summarizing
(default:
{SummaryStrategies.PER_HOUR})
interval {int} -- interval of time to summarize (default: {1})
params {dict} -- params to send to the service (default: {None})
Returns:
object -- object with the device's summarized dimensions
"""
return add_humanize_method(summary.query_device_by_dimension(
self.portia_config,
self.edge_id,
self.dimension,
series,
strategy,
interval,
params
))
class EdgeDeviceDimensionFromPort(object):
"""Abstracts usage of all Portia endpoints concerning data that only need
an Edge ID, a port and a dimension code.
"""
def __init__(
self,
edge_device_port: EdgeDevicePort,
dimension: int,
portia_config: dict
):
"""EdgeDeviceDimensionFromPort's constructor.
Arguments:
edge_device_port {EdgeDevicePort} -- instance of an Edge device
port
dimension {int} -- dimension code of the device
portia_config {dict} -- Portia's configuration arguments
"""
self.edge_id = edge_device_port.edge_id
self.port = edge_device_port.port
self.dimension = dimension
self.portia_config = portia_config
def event(self, event: int) -> 'EdgeDeviceEventFromDimension':
"""Builds a new EdgeDeviceEventFromDimension instance.
Arguments:
event {int} -- event code of the device
Returns:
EdgeDeviceEventFromDimension -- EdgeDeviceEventFromDimension
instance
"""
return EdgeDeviceEventFromDimension(self, event, self.portia_config)
def select(self, last: bool=False, params: dict=None) -> object:
"""Retrieves a device's series by its port and dimension code.
Keyword Arguments:
last {bool} -- if the last package should be returned or not
(default: {False})
params {dict} -- params to send to the service (default: {None})
Returns:
object -- object with the device's dimensions
"""
return add_humanize_method(select.query_by_port_dimension(
self.portia_config,
self.edge_id,
self.port,
self.dimension,
last,
params
))
def events(self, last: bool=False, params: dict=None) -> object:
"""Retrieves a device's events by its port and dimension code.
Keyword Arguments:
last {bool} -- if the last event should be returned or not
(default: {False})
params {dict} -- params to send to the service (default: {None})
Returns:
object -- object with the device's events
"""
return add_humanize_method(events.query_by_port_dimension(
self.portia_config,
self.edge_id,
self.port,
self.dimension,
last,
params
))
class EdgeDeviceDimensionFromSensor(object):
"""Abstracts usage of all Portia endpoints concerning data that only need
an Edge ID, a port, a sensor and a dimension code.
"""
def __init__(
self,
edge_device_sensor: EdgeDeviceSensor,
dimension: int,
portia_config: dict
):
"""EdgeDeviceDimensionFromSensor's constructor.
Arguments:
edge_device_sensor {EdgeDeviceSensor} -- instance of an Edge device
sensor
dimension {int} -- dimension code of the device
portia_config {dict} -- Portia's configuration arguments
"""
self.edge_id = edge_device_sensor.edge_id
self.port = edge_device_sensor.port
self.sensor = edge_device_sensor.sensor
self.dimension = dimension
self.portia_config = portia_config
def event(self, event: int) -> 'EdgeDeviceEventFromSensorDimension':
"""Builds a new EdgeDeviceEventFromSensorDimension instance.
Arguments:
event {int} -- event code of the device
Returns:
EdgeDeviceEventFromSensorDimension --
EdgeDeviceEventFromSensorDimension instance
"""
return EdgeDeviceEventFromSensorDimension(
self, event, self.portia_config
)
def select(self, last: bool=False, params: dict=None) -> object:
"""Retrieves a device's series by its port, sensor and dimension code.
Keyword Arguments:
last {bool} -- if the last package should be returned or not
(default: {False})
params {dict} -- params to send to the service (default: {None})
Returns:
object -- object with the device's dimensions
"""
return add_humanize_method(select.query_by_port_sensor_dimension(
self.portia_config,
self.edge_id,
self.port,
self.sensor,
self.dimension,
last,
params
))
def summary(
self,
strategy: 'SummaryStrategies'=summary.SummaryStrategies.PER_HOUR,
interval=1,
params=None
) -> object:
"""Summarizes a device by port, sensor and dimension code.
Keyword Arguments:
strategy {SummaryStrategies} -- strategy to use when summarizing
(default:
{SummaryStrategies.PER_HOUR})
interval {int} -- interval of time to summarize (default: {1})
params {dict} -- params to send to the service (default: {None})
Returns:
object -- object with the device's summarized dimensions
"""
return add_humanize_method(summary.query_by_port_sensor_dimension(
self.portia_config,
self.edge_id,
self.port,
self.sensor,
self.dimension,
strategy,
interval,
params
))
def events(self, last: bool=False, params: dict=None) -> object:
"""Retrieves a device's events by its port, sensor and dimension code.
Keyword Arguments:
last {bool} -- if the last event should be returned or not
(default: {False})
params {dict} -- params to send to the service (default: {None})
Returns:
object -- object with the device's events
"""
return add_humanize_method(events.query_by_port_sensor_dimension(
self.portia_config,
self.edge_id,
self.port,
self.sensor,
self.dimension,
last,
params
))
class EdgeDeviceEventFromSensor(object):
"""Abstracts usage of all Portia endpoints concerning data that only need
an Edge ID, a port a sensor and an event code.
"""
def __init__(
self,
edge_device_sensor: EdgeDeviceSensor,
event: int,
portia_config: dict
):
"""EdgeDeviceEventFromSensor's constructor.
Arguments:
edge_device_sensor {EdgeDeviceSensor} -- instance of an Edge device
sensor
event {int} -- event code of the device
portia_config {dict} -- Portia's configuration arguments
"""
self.edge_id = edge_device_sensor.edge_id
self.port = edge_device_sensor.port
self.sensor = edge_device_sensor.sensor
self.event = event
self.portia_config = portia_config
def events(self, last: bool=False, params: dict=None) -> object:
"""Retrieves a device's events by its port, sensor and event code.
Keyword Arguments:
last {bool} -- if the last event should be returned or not
(default: {False})
params {dict} -- params to send to the service (default: {None})
Returns:
object -- object with the device's events
"""
return add_humanize_method(events.query_by_port_sensor_event(
self.portia_config,
self.edge_id,
self.port,
self.sensor,
self.event,
last,
params
))
class EdgeDeviceEventFromDimension(object):
"""Abstracts usage of all Portia endpoints concerning data that only need
an Edge ID, a port a dimension code and an event code.
"""
def __init__(
self,
edge_device_dimension_from_port: EdgeDeviceDimensionFromPort,
event: int,
portia_config: dict
):
"""EdgeDeviceEventFromDimension's constructor.
Arguments:
edge_device_dimension_from_port {EdgeDeviceDimensionFromPort} --
instance of an Edge device dimension from port
event {int} -- event code of the device
portia_config {dict} -- Portia's configuration arguments
"""
self.edge_id = edge_device_dimension_from_port.edge_id
self.port = edge_device_dimension_from_port.port
self.dimension = edge_device_dimension_from_port.dimension
self.event = event
self.portia_config = portia_config
def events(self, last: bool=False, params: dict=None) -> object:
"""Retrieves a device's events by its port, dimension code and event
code.
Keyword Arguments:
last {bool} -- if the last event should be returned or not
(default: {False})
params {dict} -- params to send to the service (default: {None})
Returns:
object -- object with the device's events
"""
return add_humanize_method(events.query_by_port_dimension_event(
self.portia_config,
self.edge_id,
self.port,
self.dimension,
self.event,
last,
params
))
class EdgeDeviceEventFromSensorDimension(object):
"""Abstracts usage of all Portia endpoints concerning data that only need
an Edge ID, a port, sensor, dimension code and an event code.
"""
def __init__(
self,
edge_device_dimension_from_sensor: EdgeDeviceDimensionFromSensor,
event: int,
portia_config: dict
):
"""EdgeDeviceEventFromDimension's constructor.
Arguments:
edge_device_dimension_from_sensor {EdgeDeviceDimensionFromSensor}
-- instance of an Edge device dimension from sensor
event {int} -- event code of the device
portia_config {dict} -- Portia's configuration arguments
"""
self.edge_id = edge_device_dimension_from_sensor.edge_id
self.port = edge_device_dimension_from_sensor.port
self.sensor = edge_device_dimension_from_sensor.sensor
self.dimension = edge_device_dimension_from_sensor.dimension
self.event = event
self.portia_config = portia_config
def events(self, last: bool=False, params: dict=None) -> object:
"""Retrieves a device's events by its port, sensor, dimension code and
event code.
Keyword Arguments:
last {bool} -- if the last event should be returned or not
(default: {False})
params {dict} -- params to send to the service (default: {None})
Returns:
object -- object with the device's events
"""
return add_humanize_method(events.query_by_port_sensor_dimension_event(
self.portia_config,
self.edge_id,
self.port,
self.sensor,
self.dimension,
self.event,
last,
params
))
| StarcoderdataPython |
1716969 | <filename>tests/test_collections.py
import pytest
from mlconfig.collections import AttrDict
def test_attrdict_init():
d = AttrDict(a=1, b=2)
assert d.a == 1
assert d.b == 2
def test_attrdict_flat():
data = {'a': 0, 'b': {'c': 1, 'd': {'e': 2, 'f': 3}}}
d1 = AttrDict(data).flat()
d2 = {'a': 0, 'b.c': 1, 'b.d.e': 2, 'b.d.f': 3}
assert d1 == d2
def test_attrdict_to_dict():
d = AttrDict()
d.a = 0
d.b = 1
assert d.to_dict() == {'a': 0, 'b': 1}
def test_attrdict_immutable():
d = AttrDict()
d.set_immutable()
with pytest.raises(AttributeError):
d.a = 0
def test_attrdict_is_immutable():
d = AttrDict(a=0, b=1)
assert not d.is_immutable()
d.set_immutable()
assert d.is_immutable()
| StarcoderdataPython |
3324791 | <filename>server/ui_tabs/playlist_tab.py
import wx
class PlaylistEditFrame(wx.Panel):
def __init__(self, parent):
wx.Panel.__init__(self, parent)
wx.StaticText(self, -1, "This page for editing/formatting playlists", (20, 20))
def update(self, args):
"""Input to this method is the json form of the get_file_data from the music module"""
self.output.SetValue(args) | StarcoderdataPython |
1740595 | import sys
import py
from pypy.translator.test.snippet import try_raise_choose
from pypy.rlib.rarithmetic import r_uint, ovfcheck, ovfcheck_lshift
from pypy.rpython.test.test_exception import BaseTestException
from pypy.translator.llvm.test.runtest import *
class TestLLVMException(LLVMTest, BaseTestException):
def test_raise_and_catch_other(self):
py.test.skip('XXX special case me')
def test_raise_prebuilt_and_catch_other(self):
py.test.skip('XXX special case me')
class TestException(Exception):
pass
class MyException(Exception):
def __init__(self, n):
self.n = n
def getitem(l, i): #LookupError, KeyError
if not isinstance(i, int):
raise TypeError
if i < 0:
i = len(l) - i
if i>= len(l):
raise IndexError
return l[i]
def test_simple1():
def raise_(i):
if i:
raise TestException()
else:
return 3
def fn(i):
try:
a = raise_(i) + 11
b = raise_(i) + 12
c = raise_(i) + 13
return a+b+c
except TestException:
return 7
else:
return 3
f = compile_function(fn, [int])
assert f(0) == fn(0)
assert f(1) == fn(1)
def test_simple2():
def fn(n):
lst = range(10)
try:
getitem(lst,n)
except:
return 2
return 4
f = compile_function(fn, [int])
assert f(-1) == fn(-1)
assert f( 0) == fn( 0)
assert f(10) == fn(10)
def test_simple3():
def raise_(i):
if i == 0:
raise TestException()
elif i == 1:
raise MyException(42)
else:
return 3
def fn(i):
try:
a = raise_(i) + 11
b = raise_(i) + 12
c = raise_(i) + 13
return a+b+c
except TestException:
return 7
except MyException:
return 123
except:
return 22
return 66
f = compile_function(fn, [int])
assert f(0) == fn(0)
assert f(1) == fn(1)
assert f(2) == fn(2)
def test_pass_exc():
def fn(n):
lst = range(10)
try:
getitem(lst,n)
except:
pass
return 4
f = compile_function(fn, [int])
assert f(-1) == fn(-1)
assert f( 0) == fn( 0)
assert f(10) == fn(10)
def test_reraise1():
def fnpart2(n):
lst = range(10)
try:
getitem(lst,n)
except:
raise
return 4
def fn(n):
try:
return fnpart2(n)
except:
return 42
f = compile_function(fn, [int])
assert f(-1) == fn(-1)
assert f(-1) == 42
assert f( 0) == fn( 0)
assert f( 0) != 42
assert f(10) == fn(10)
assert f(10) == 42
def test_reraise2():
def fnpart2(n):
lst = range(10)
try:
getitem(lst,n)
except Exception, e:
raise e
return 4
def fn(n):
try:
return fnpart2(n)
except:
return 42
f = compile_function(fn, [int])
assert f(-1) == fn(-1)
assert f(-1) == 42
assert f( 0) == fn( 0)
assert f( 0) != 42
assert f(10) == fn(10)
assert f(10) == 42
def test_simple_exception():
def fn(n):
lst = range(10)
try:
getitem(lst,n)
except IndexError:
return 2
return 4
f = compile_function(fn, [int])
for i in range(10):
assert f(i) == fn(i)
for i in range(10, 20):
assert f(i) == fn(i)
def test_two_exceptionsA():
def fn(n):
lst = range(10)
try:
getitem(lst,n)
except IndexError:
return 2
except KeyError:
return 3
return 4
f = compile_function(fn, [int])
for i in range(10):
assert f(i) == fn(i)
for i in range(10, 20):
assert f(i) == fn(i)
def test_catch_base_exception():
def fn(n):
lst = range(10)
try:
getitem(lst,n)
except LookupError:
return 2
return 4
f = compile_function(fn, [int])
for i in range(10):
assert f(i) == fn(i)
for i in range(10, 20):
assert f(i) == fn(i)
def test_catches():
def raises(i):
if i == 3:
raise MyException, 12
if i == 4:
raise IndexError
if i > 5:
raise MyException(i)
return 1
def fn(i):
try:
return raises(i)
except MyException, e:
return e.n
except:
return 123
f = compile_function(fn, [int])
assert f(1) == fn(1)
assert f(2) == fn(2)
assert f(3) == fn(3)
#py.test.raises(RuntimeError, "f(4)") #XXX would like to test: py.test.not_raises(....)
assert f(5) == fn(5)
assert f(6) == fn(6)
assert f(13) == fn(13)
def test_try_raise_choose():
f = compile_function(try_raise_choose, [int])
for i in [-1, 0, 1, 2]:
assert f(i) == i
def test_two_exceptionsB():
def fn1():
raise Exception
def fn2():
return 10
def two_exceptionsB():
r = 50
try:
fn1()
r += 1
except:
r += 100
try:
r += fn2()
except:
r += 300
r += fn2()
return r
f = compile_function(two_exceptionsB, [])
assert f() == two_exceptionsB()
def test_raise_outside_testfn():
def raiser(n):
if n < 0:
raise ValueError("hello")
else:
raise MyException("world")
def intermediate(n):
raiser(n)
def testfn(n):
try:
intermediate(n)
except ValueError:
return 1
except Exception:
return 2
return 0
saved = no_magic()
try:
f = compile_function(testfn, [int])
assert f(1) == testfn(1)
assert f(-1) == testfn(-1)
finally:
restore_magic(saved)
def test_miss_base():
class A(Exception):
pass
class B(A):
pass
def raise_exception(n):
if n == 1:
raise A
elif n == 0:
raise B
else:
pass #i.e. don't raise
def fnpart2(n):
try:
raise_exception(n)
except B, exc:
return 10
return 20
def fn(n):
try:
return fnpart2(n)
except:
return 765
f = compile_function(fn, [int])
assert f(0) == fn(0)
assert f(1) == fn(1)
assert f(2) == fn(2)
def no_magic():
import __builtin__
try:
py.magic.revert(__builtin__, 'AssertionError')
return True
except ValueError:
return False
def restore_magic(saved):
if saved:
py.magic.invoke(assertion=True)
| StarcoderdataPython |
3303512 | <reponame>linshaoyong/leetcode<gh_stars>1-10
class Solution(object):
def robotSim(self, commands, obstacles):
"""
:type commands: List[int]
:type obstacles: List[List[int]]
:rtype: int
"""
directions = [(0, 1), (1, 0), (0, -1), (-1, 0)]
x, y, m, di = 0, 0, 0, 0
obs = {tuple(ob) for ob in obstacles}
for command in commands:
if command == -1:
di = (di + 1) % 4
continue
if command == -2:
di = (di + 3) % 4
continue
direction = directions[di]
for i in range(1, command + 1):
nx, ny = x + direction[0], y + direction[1]
if (nx, ny) in obs:
break
x, y = nx, ny
m = max(m, x * x + y * y)
return m
def test_robot_sim():
s = Solution()
assert 25 == s.robotSim([4, -1, 3], [])
assert 65 == s.robotSim([4, -1, 4, -2, 4], [[2, 4]])
| StarcoderdataPython |
3228257 | #score_scraper.py
#i'm not trying to pay for jacked transcriptions of liscenced works
#Zoe, 2020
from bs4 import BeautifulSoup
from PyPDF2 import PdfFileMerger
import requests
import cairosvg
import img2pdf
import re
import os
import sys
if (len(sys.argv) == 1):
score = input("Enter the musecore url for the score you want to download: ")
r = requests.get(score.rstrip())
elif (len(sys.argv) == 2) :
r = requests.get(sys.argv[-1])
p = re.compile('score_\d')
soup = BeautifulSoup(r.text, 'html.parser')
page = str(soup)
q = re.search(r'(http|https):\\/\\/([\w]*)\.(([\w]*)\\/){5}(([0-9]*)\\/){4}([\w]*)\\/([\w]*).([\w]{3})', page)
source = q.group(0)
sourcefix = ''.join(filter(lambda x: x not in ['\\'], source))
start = sourcefix[:-5] #[...]score_
end = sourcefix[-4:] #.svg
print(end + ' type image')
z = 0
resp = []
while True:
url = start + str(z) + end
res = requests.get(url)
if (res.status_code != 200) :
break
resp.append(res)
z+=1
title = soup.find('title')
title = str(title)
trim = title.split('Sheet music')[0]
trim = trim.rstrip()
if (trim[0] == '<'):
trim = trim.split('>')[1]
y = 0
titles = []
for i in range(0, z):
titles.append(trim + str(y) + '.pdf')
y+=1
if (end == '.svg'):
for i in range(0, z):
cairosvg.svg2pdf(resp[i].text, write_to=titles[i])
elif (end == '.png'):
for i in range(0, z):
with open(titles[i], 'wb') as f:
f.write(img2pdf.convert(resp[i].content))
merger = PdfFileMerger()
for pdf in titles:
merger.append(pdf)
merger.write(trim+'.pdf')
merger.close()
for pdf in titles:
try:
os.remove(pdf)
except OSError as e:
print("Error: %s - %s." % (e.filename, e.strerror))
| StarcoderdataPython |
3218368 | <reponame>maximilianschaller/genforce
# python3.7
"""Contains the runner for StyleGAN."""
import os
import sys
from .base_gan_runner import BaseGANRunner
sys.path.append(os.getcwd())
from idinvert_pytorch.utils.inverter import StyleGANInverter
__all__ = ['FourierRegularizedStyleGANRunner']
class FourierRegularizedStyleGANRunner(BaseGANRunner):
"""Defines the runner for StyleGAN."""
def __init__(self, config, logger):
super().__init__(config, logger)
self.lod = getattr(self, 'lod', None)
self.inverter = StyleGANInverter(self.models['generator'], "styleganinv_ffhq256", learning_rate=0.01,
iteration=500, reconstruction_loss_weight=1.0, perceptual_loss_weight=5e-5,
regularization_loss_weight=2.0, logger=self.logger)
def build_models(self):
super().build_models()
self.g_smooth_img = self.config.modules['generator'].get(
'g_smooth_img', 10000)
#self.models['generator_smooth'] = deepcopy(self.models['generator'])
def build_loss(self):
super().build_loss()
self.running_stats.add(
f'Gs_beta', log_format='.4f', log_strategy='CURRENT')
def train_step(self, data, **train_kwargs):
# Set level-of-details.
G = self.get_module(self.models['generator'])
D = self.get_module(self.models['discriminator'])
#Gs = self.get_module(self.models['generator_smooth'])
D.lod.data.fill_(self.lod)
#G.net.lod.data.fill_(self.lod)
#Gs.synthesis.lod.data.fill_(self.lod)
"""
# Update discriminator.
self.set_model_requires_grad('discriminator', True)
self.set_model_requires_grad('generator', False)
d_loss = self.loss.d_loss(self, data)
self.optimizers['discriminator'].zero_grad()
d_loss.backward()
self.optimizers['discriminator'].step()
"""
# Life-long update for generator.
beta = 0.5 ** (self.batch_size * self.world_size / self.g_smooth_img)
self.running_stats.update({'Gs_beta': beta})
"""
self.moving_average_model(model=self.models['generator'],
avg_model=self.models['generator_smooth'],
beta=beta)
"""
# Update generator.
if self._iter % self.config.get('D_repeats', 1) == 0:
self.set_model_requires_grad('discriminator', False)
self.set_model_requires_grad('generator', True)
g_loss = self.loss.g_loss(self, data)
self.optimizers['generator'].zero_grad()
g_loss.backward()
self.optimizers['generator'].step()
def load(self, **kwargs):
super().load(**kwargs)
G = self.get_module(self.models['generator'])
D = self.get_module(self.models['discriminator'])
#Gs = self.get_module(self.models['generator_smooth'])
if kwargs['running_metadata']:
lod = G.net.synthesis.lod.cpu().tolist()
assert lod == D.lod.cpu().tolist()
#assert lod == Gs.synthesis.lod.cpu().tolist()
self.lod = lod
| StarcoderdataPython |
1616301 | from articleScraper import getElTiempoArticles
newArticles = getElTiempoArticles()
print("Title:\n%s\n" % newArticles[0]['title'])
print("Text:\n%s\n" % newArticles[0]['text'])
print("Summary:\n%s\n " % newArticles[0]['summary'])
| StarcoderdataPython |
92411 | <filename>setup.py
import zcov
import os
from setuptools import setup, find_packages
# setuptools expects to be invoked from within the directory of setup.py, but it
# is nice to allow:
# python path/to/setup.py install
# to work (for scripts, etc.)
os.chdir(os.path.dirname(os.path.abspath(__file__)))
setup(
name = "zcov",
version = zcov.__version__,
author = zcov.__author__,
author_email = zcov.__email__,
license = 'BSD',
description = "A Code Coverage Reporting Tool for C/C++",
keywords = 'code coverage C++ testing',
long_description = """\
*zcov*
++++++
zcov is wrapper around the basic facilities of gcov for generating pretty
summaries for entire code bases. It is similar to lcov with an emphasis on nicer
HTML output (including, for example, branch coverage), and a greatly simplified
command line interface which tries to work around deficiencies in the metadata
provided by gcov.
""",
packages = find_packages(),
entry_points = {
'console_scripts': [
'zcov = zcov.main:main',
],
},
install_requires=[],
)
| StarcoderdataPython |
95477 | from . import benchmark
from . import statistics | StarcoderdataPython |
3201011 | import logging
from . import mixin
from . import core
from . import Constructs
from .decorators import (
_display_or_return,
_inplace_enabled,
_inplace_enabled_define_and_cleanup,
_manage_log_level_via_verbosity,
)
logger = logging.getLogger(__name__)
class Domain(mixin.FieldDomain, mixin.Container, core.Domain):
"""A domain of the CF data model.
The domain represents a set of discrete "locations" in what
generally would be a multi-dimensional space, either in the real
world or in a model's simulated world. These locations correspond
to individual data array elements of a field construct
The domain is defined collectively by the following constructs of
the CF data model: domain axis, dimension coordinate, auxiliary
coordinate, cell measure, coordinate reference and domain
ancillary constructs.
.. versionadded:: (cfdm) 1.7.0
"""
def __new__(cls, *args, **kwargs):
"""This must be overridden in subclasses.
.. versionadded:: (cfdm) 1.7.0
"""
instance = super().__new__(cls)
instance._Constructs = Constructs
return instance
def __repr__(self):
"""Called by the `repr` built-in function.
x.__repr__() <==> repr(x)
"""
shape = sorted(
[
domain_axis.get_size(None)
for domain_axis in self.domain_axes(todict=True).values()
]
)
shape = str(shape)
shape = shape[1:-1]
return f"<{self.__class__.__name__}: {{{shape}}}>"
def __str__(self):
"""Called by the `str` built-in function.
x.__str__() <==> str(x)
"""
def _print_item(self, cid, variable, axes):
"""Private function called by __str__."""
x = [variable.identity(default=f"key%{cid}")]
if variable.has_data():
shape = [axis_names[axis] for axis in axes]
shape = str(tuple(shape)).replace("'", "")
shape = shape.replace(",)", ")")
x.append(shape)
elif (
variable.construct_type
in ("auxiliary_coordinate", "domain_ancillary")
and variable.has_bounds()
and variable.bounds.has_data()
):
# Construct has no data but it does have bounds
shape = [axis_names[axis] for axis in axes]
shape.extend(
[str(n) for n in variable.bounds.data.shape[len(axes) :]]
)
shape = str(tuple(shape)).replace("'", "")
shape = shape.replace(",)", ")")
x.append(shape)
elif (
hasattr(variable, "nc_get_external")
and variable.nc_get_external()
):
ncvar = variable.nc_get_variable(None)
if ncvar is not None:
x.append(f" (external variable: ncvar%{ncvar}")
else:
x.append(" (external variable)")
if variable.has_data():
x.append(f" = {variable.data}")
elif (
variable.construct_type
in ("auxiliary_coordinate", "domain_ancillary")
and variable.has_bounds()
and variable.bounds.has_data()
):
# Construct has no data but it does have bounds data
x.append(f" = {variable.bounds.data}")
return "".join(x)
string = []
axis_names = self._unique_domain_axis_identities()
construct_data_axes = self.constructs.data_axes()
x = []
dimension_coordinates = self.dimension_coordinates(todict=True)
for axis_cid in sorted(self.domain_axes(todict=True)):
for cid, dim in dimension_coordinates.items():
if construct_data_axes[cid] == (axis_cid,):
name = dim.identity(default=f"key%{0}")
y = "{0}({1})".format(name, dim.get_data().size)
if y != axis_names[axis_cid]:
y = "{0}({1})".format(name, axis_names[axis_cid])
if dim.has_data():
y += " = {0}".format(dim.get_data())
x.append(y)
if x:
x = "\n : ".join(x)
string.append(f"Dimension coords: {x}")
# Auxiliary coordinates
x = [
_print_item(self, cid, v, construct_data_axes[cid])
for cid, v in sorted(
self.auxiliary_coordinates(todict=True).items()
)
]
if x:
x = "\n : ".join(x)
string.append(f"Auxiliary coords: {x}")
# Cell measures
x = [
_print_item(self, cid, v, construct_data_axes[cid])
for cid, v in sorted(self.cell_measures(todict=True).items())
]
if x:
x = "\n : ".join(x)
string.append(f"Cell measures : {x}")
# Coordinate references
x = sorted(
[
str(ref)
for ref in list(
self.coordinate_references(todict=True).values()
)
]
)
if x:
x = "\n : ".join(x)
string.append(f"Coord references: {x}")
# Domain ancillary variables
x = [
_print_item(self, cid, anc, construct_data_axes[cid])
for cid, anc in sorted(
self.domain_ancillaries(todict=True).items()
)
]
if x:
x = "\n : ".join(x)
string.append(f"Domain ancils : {x}")
return "\n".join(string)
@_display_or_return
def _dump_axes(self, axis_names, display=True, _level=0):
"""Returns a string description of the field's domain axes.
:Parameters:
display: `bool`, optional
If False then return the description as a string. By
default the description is printed.
_level: `int`, optional
:Returns:
`str`
A string containing the description.
**Examples:**
"""
indent1 = " " * _level
w = sorted(
[
f"{indent1}Domain Axis: {axis_names[axis]}"
for axis in self.domain_axes(todict=True)
]
)
return "\n".join(w)
def _one_line_description(self, axis_names_sizes=None):
"""Return a one-line description of the domain.
:Returns:
`str`
The description.
"""
if axis_names_sizes is None:
axis_names_sizes = self._unique_domain_axis_identities()
axis_names = ", ".join(sorted(axis_names_sizes.values()))
return f"{self.identity('')}{{{axis_names}}}"
@_inplace_enabled(default=False)
def apply_masking(self, inplace=False):
"""Apply masking as defined by the CF conventions.
Masking is applied to all metadata constructs with data.
Masking is applied according to any of the following criteria
that are applicable:
* where data elements are equal to the value of the
``missing_value`` property;
* where data elements are equal to the value of the
``_FillValue`` property;
* where data elements are strictly less than the value of the
``valid_min`` property;
* where data elements are strictly greater than the value of
the ``valid_max`` property;
* where data elements are within the inclusive range specified
by the two values of ``valid_range`` property.
If any of the above properties have not been set the no
masking is applied for that method.
Elements that are already masked remain so.
.. note:: If using the `apply_masking` method on a construct
that has been read from a dataset with the
``mask=False`` parameter to the `read` function,
then the mask defined in the dataset can only be
recreated if the ``missing_value``, ``_FillValue``,
``valid_min``, ``valid_max``, and ``valid_range``
properties have not been updated.
.. versionadded:: (cfdm) 1.8.9.0
.. seealso:: `{{package}}.Data.apply_masking`, `read`, `write`
:Parameters:
{{inplace: `bool`, optional}}
:Returns:
`Domain` or `None`
A new domain construct with masked values, or `None`
if the operation was in-place.
**Examples:**
>>> d = cfdm.example_field(0).domain
>>> x = d.construct('longitude')
>>> x.data[[0, -1]] = cfdm.masked
>>> print(x.data.array)
[-- 67.5 112.5 157.5 202.5 247.5 292.5 --]
>>> cfdm.write(d, 'masked.nc')
>>> no_mask = {{package}}.read('masked.nc', domain=True, mask=False)[0]
>>> no_mask_x = no_mask.construct('longitude')
>>> print(no_mask_x.data.array)
[9.96920997e+36 6.75000000e+01 1.12500000e+02 1.57500000e+02
2.02500000e+02 2.47500000e+02 2.92500000e+02 9.96920997e+36]
>>> masked = no_mask.apply_masking()
>>> masked_x = masked.construct('longitude')
>>> print(masked_x.data.array)
[-- 67.5 112.5 157.5 202.5 247.5
"""
d = _inplace_enabled_define_and_cleanup(self)
# Apply masking to the metadata constructs
d._apply_masking_constructs()
return d
def climatological_time_axes(self):
"""Return all axes which are climatological time axes.
This is ascertained by inspecting the values returned by each
coordinate construct's `is_climatology` method.
.. versionadded:: (cfdm) 1.8.9.0
:Returns:
`set`
The keys of the domain axis constructs that are
climatological time axes.
**Examples:**
>>> d = cfdm.example_field(0)
>>> d.climatological_time_axes()
set()
"""
data_axes = self.constructs.data_axes()
out = []
for ckey, c in self.coordinates(todict=True).items():
if not c.is_climatology():
continue
out.extend(data_axes.get(ckey, ()))
return set(out)
@_display_or_return
def dump(self, display=True, _level=0, _title=None):
"""A full description of the domain.
The domain components are described without abbreviation with the
exception of data arrays, which are abbreviated to their first and
last values.
.. versionadded:: (cfdm) 1.7.0
:Parameters:
display: `bool`, optional
If False then return the description as a string. By
default the description is printed.
*Parameter example:*
``f.dump()`` is equivalent to ``print
f.dump(display=False)``.
:Returns:
`str` or `None`
If *display* is True then the description is printed and
`None` is returned. Otherwise the description is returned
as a string.
"""
axis_to_name = self._unique_domain_axis_identities()
construct_name = self._unique_construct_names()
construct_data_axes = self.constructs.data_axes()
string = []
# Domain axes
axes = self._dump_axes(axis_to_name, display=False, _level=_level)
if axes:
string.append(axes)
# Dimension coordinates
dimension_coordinates = self.dimension_coordinates(todict=True)
for cid, value in sorted(dimension_coordinates.items()):
string.append("")
string.append(
value.dump(
display=False,
_level=_level,
_title=f"Dimension coordinate: {construct_name[cid]}",
_axes=construct_data_axes[cid],
_axis_names=axis_to_name,
)
)
# Auxiliary coordinates
auxiliary_coordinates = self.auxiliary_coordinates(todict=True)
for cid, value in sorted(auxiliary_coordinates.items()):
string.append("")
string.append(
value.dump(
display=False,
_level=_level,
_title=f"Auxiliary coordinate: {construct_name[cid]}",
_axes=construct_data_axes[cid],
_axis_names=axis_to_name,
)
)
# Domain ancillaries
for cid, value in sorted(self.domain_ancillaries(todict=True).items()):
string.append("")
string.append(
value.dump(
display=False,
_level=_level,
_title=f"Domain ancillary: {construct_name[cid]}",
_axes=construct_data_axes[cid],
_axis_names=axis_to_name,
)
)
# Coordinate references
for cid, value in sorted(
self.coordinate_references(todict=True).items()
):
string.append("")
string.append(
value.dump(
display=False,
_level=_level,
_title=f"Coordinate reference: {construct_name[cid]}",
_construct_names=construct_name,
_auxiliary_coordinates=tuple(auxiliary_coordinates),
_dimension_coordinates=tuple(dimension_coordinates),
)
)
# Cell measures
for cid, value in sorted(self.cell_measures(todict=True).items()):
string.append("")
string.append(
value.dump(
display=False,
_key=cid,
_level=_level,
_title=f"Cell measure: {construct_name[cid]}",
_axes=construct_data_axes[cid],
_axis_names=axis_to_name,
)
)
string.append("")
return "\n".join(string)
@_manage_log_level_via_verbosity
def equals(
self,
other,
rtol=None,
atol=None,
verbose=None,
ignore_data_type=False,
ignore_fill_value=False,
ignore_compression=True,
ignore_type=False,
):
"""Whether two domains are the same.
.. versionadded:: (cfdm) 1.7.0
:Returns:
`bool`
**Examples:**
>>> d.equals(d)
True
>>> d.equals(d.copy())
True
>>> d.equals('not a domain')
False
"""
pp = super()._equals_preprocess(
other, verbose=verbose, ignore_type=ignore_type
)
if pp is True or pp is False:
return pp
other = pp
# ------------------------------------------------------------
# Check the constructs
# ------------------------------------------------------------
if not self._equals(
self.constructs,
other.constructs,
rtol=rtol,
atol=atol,
verbose=verbose,
ignore_data_type=ignore_data_type,
ignore_fill_value=ignore_fill_value,
ignore_compression=ignore_compression,
):
logger.info(
f"{self.__class__.__name__}: Different metadata constructs"
)
return False
return True
def get_filenames(self):
"""Return the file names containing the metadata construct data.
:Returns:
`set`
The file names in normalized, absolute form. If all of
the data are in memory then an empty `set` is
returned.
**Examples:**
>>> d = {{package}}.example_field(0).domain
>>> {{package}}.write(d, 'temp_file.nc')
>>> e = {{package}}.read('temp_file.nc', domain=True)[0]
>>> e.get_filenames()
{'temp_file.nc'}
"""
out = set()
for c in self.constructs.filter_by_data().values():
out.update(c.get_filenames())
return out
| StarcoderdataPython |
47242 | import os
import struct
def readFile(path):
if not os.path.isfile(path):
raise FileNotFoundError
else:
with open(path, 'r') as file:
source = file.read()
return source
def cleaner(source):
lines = source.split('\n')
for i in range(len(lines)):
strings = lines[i].split()
for string in strings:
if string[0] == ';':
index = strings.index(string)
delete = strings[index:]
for item in delete:
strings.remove(item)
lines[i] = ' '.join(strings)
return '\n'.join(lines)
def write_file(path, header, object_dict, mode):
if mode == 'b':
with open(path, 'wb+') as output:
for memory_location in header:
output.write(object_dict[memory_location])
elif mode == 't':
with open(path, 'w+') as output:
for memory_location in header:
integer = struct.unpack('>i', object_dict[memory_location])
integer = integer[0]
output.write(dectobin(integer, 16) + '\n')
def dectobin(decimal, bits):
binary = bin(decimal & int("1" * bits, 2))[2:]
return ("{0:0>%s}" % bits).format(binary) | StarcoderdataPython |
3382185 | <filename>test/libcxx/test/target_info.py<gh_stars>1-10
import locale
import platform
import sys
class TargetInfo(object):
def platform(self):
raise NotImplementedError
def system(self):
raise NotImplementedError
def platform_ver(self):
raise NotImplementedError
def platform_name(self):
raise NotImplementedError
def supports_locale(self, loc):
raise NotImplementedError
class LocalTI(TargetInfo):
def platform(self):
platform_name = sys.platform.lower().strip()
# Strip the '2' from linux2.
if platform_name.startswith('linux'):
platform_name = 'linux'
return platform_name
def system(self):
return platform.system()
def platform_name(self):
if self.platform() == 'linux':
name, _, _ = platform.linux_distribution()
name = name.lower().strip()
if name:
return name
return None
def platform_ver(self):
if self.platform() == 'linux':
_, ver, _ = platform.linux_distribution()
ver = ver.lower().strip()
if ver:
return ver
return None
def supports_locale(self, loc):
try:
locale.setlocale(locale.LC_ALL, loc)
return True
except locale.Error:
return False
| StarcoderdataPython |
1735035 | <gh_stars>1-10
from customuser.tests.custom_user import *
| StarcoderdataPython |
1780885 | <filename>rdtools/test/energy_from_power_test.py
import pandas as pd
import numpy as np
from rdtools import energy_from_power
import pytest
@pytest.fixture
def times():
return pd.date_range(start='20200101 12:00', end='20200101 13:00', freq='15T')
@pytest.fixture
def power(times):
return pd.Series([1.0, 2.0, 3.0, 2.0, 1.0], index=times)
def test_energy_from_power_single_arg(power):
expected = power.iloc[1:]*0.25
expected.name = 'energy_Wh'
result = energy_from_power(power)
pd.testing.assert_series_equal(result, expected)
def test_energy_from_power_instantaneous(power):
expected = (0.25*(power + power.shift())/2).dropna()
expected.name = 'energy_Wh'
result = energy_from_power(power, power_type='instantaneous')
pd.testing.assert_series_equal(result, expected)
def test_energy_from_power_max_timedelta_inference(power):
expected = power.iloc[1:]*0.25
expected.name = 'energy_Wh'
expected.iloc[:2] = np.nan
match = 'Fraction of excluded data (.*) exceeded threshold'
with pytest.warns(UserWarning, match=match):
result = energy_from_power(power.drop(power.index[1]))
pd.testing.assert_series_equal(result, expected)
def test_energy_from_power_max_timedelta(power):
expected = power.iloc[1:]*0.25
expected.name = 'energy_Wh'
result = energy_from_power(power.drop(power.index[1]),
max_timedelta=pd.to_timedelta('30 minutes'))
pd.testing.assert_series_equal(result, expected)
def test_energy_from_power_upsample(power):
expected = power.resample('10T').asfreq().interpolate()/6
expected = expected.iloc[1:]
expected.name = 'energy_Wh'
result = energy_from_power(power, target_frequency='10T')
pd.testing.assert_series_equal(result, expected)
def test_energy_from_power_downsample(power):
expected = power.resample('20T').asfreq()
expected = expected.iloc[1:]
expected = pd.Series([0.75, 0.833333333, 0.416666667], index=expected.index)
expected.name = 'energy_Wh'
result = energy_from_power(power, target_frequency='20T')
pd.testing.assert_series_equal(result, expected)
def test_energy_from_power_max_timedelta_edge_case():
times = pd.date_range('2020-01-01 12:00', periods=4, freq='15T')
power = pd.Series(1, index=times)
power = power.drop(power.index[2])
result = energy_from_power(power, '30T', max_timedelta=pd.to_timedelta('20 minutes'))
assert result.isnull().all()
def test_energy_from_power_single_value_input():
times = pd.date_range('2019-01-01', freq='15T', periods=1)
power = pd.Series([100.], index=times)
expected_result = pd.Series([25.], index=times, name='energy_Wh')
result = energy_from_power(power)
pd.testing.assert_series_equal(result, expected_result)
def test_energy_from_power_single_value_input_no_freq():
power = pd.Series([1], pd.date_range('2019-01-01', periods=1, freq='15T'))
power.index.freq = None
match = "Could not determine period of input power"
with pytest.raises(ValueError, match=match):
energy_from_power(power)
def test_energy_from_power_single_value_instantaneous():
power = pd.Series([1], pd.date_range('2019-01-01', periods=1, freq='15T'))
power.index.freq = None
match = ("power_type='instantaneous' is incompatible with single element power. "
"Use power_type='right-labeled'")
with pytest.raises(ValueError, match=match):
energy_from_power(power, power_type='instantaneous')
def test_energy_from_power_single_value_with_target():
times = pd.date_range('2019-01-01', freq='15T', periods=1)
power = pd.Series([100.], index=times)
expected_result = pd.Series([100.], index=times, name='energy_Wh')
result = energy_from_power(power, target_frequency='H')
pd.testing.assert_series_equal(result, expected_result)
def test_energy_from_power_leading_nans():
# GH 244
power = pd.Series(1, pd.date_range('2019-01-01', freq='15min', periods=5))
power.iloc[:2] = np.nan
expected_result = pd.Series([np.nan, np.nan, 0.25, 0.25],
index=power.index[1:], name='energy_Wh')
result = energy_from_power(power)
pd.testing.assert_series_equal(result, expected_result)
| StarcoderdataPython |
84044 | <filename>ku/gnn_layer/core.py
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
import tensorflow as tf
from tensorflow.python.keras import backend as K
from tensorflow.python.keras.layers.merge import _Merge
from tensorflow.python.keras.layers import Layer, InputSpec, Dense
from tensorflow.python.keras import activations
import tensorflow.keras.initializers as initializers
from ..backend_ext import tensorflow_backend as Ke
class GraphConvolutionNetwork(Layer):
"""Graph convolution network layer."""
def __init__(self, n_node, d_out, output_adjacency=False, activation=None, **kwargs):
# Check exception.
if isinstance(n_node, int) != True \
or isinstance(d_out, int) != True \
or (output_adjacency in [False, True]) != True \
or n_node < 2 \
or d_out < 1:
raise ValueError(f'n_node:{n_node}, d_out:{d_out} or output_adjacency:{output_adjacency} is not valid.')
self.n_node = n_node
self.d_out = d_out
self.output_adjacency = output_adjacency
self.activation = activations.get(activation)
super(GraphConvolutionNetwork, self).__init__(**kwargs)
def build(self, input_shape):
self.d_in = input_shape[0][-1]
self.I = tf.eye(self.n_node)
self.W = self.add_weight(name='gcn_weight'
, shape=(self.d_in, self.d_out)
, initializer='truncated_normal' # Which initializer is optimal?
, trainable=True)
super(GraphConvolutionNetwork, self).build(input_shape)
def call(self, inputs):
X = inputs[0]
A = inputs[1]
A_t = A + self.I
D_t = tf.linalg.diag(tf.pow(K.sum(A_t, axis=2), -0.5))
A_t = K.batch_dot(K.batch_dot(D_t, A_t), D_t)
X_p = tf.tensordot(K.batch_dot(A_t, X), self.W, axes=[[-1], [0]])
if self.activation is not None:
X_p = self.activation(X_p)
if self.output_adjacency:
outputs = [X_p, A]
else:
outputs = X_p
return outputs
def get_config(self):
config = {'n_node': self.n_node
, 'd_out': self.d_out}
base_config = super(GraphConvolutionNetwork, self).get_config()
return dict(list(base_config.items()) + list(config.items())) | StarcoderdataPython |
3390039 | import numpy as np
import pandas as pd
import seaborn as sns
import matplotlib.pyplot as plt
from ._spider import spiderplot
def format_data(mode, x, y):
if mode=="arrays":
return x, y
else:
d = y.shape[1]
cols = ["case %02d"%i for i in range(d)]
df = pd.DataFrame(y, index=x, columns=cols)
df.index.name = "x"
df.columns.name = "dataset"
if mode=="long-form":
return df.reset_index().melt(id_vars="x",
value_name="value")
if mode=="wide-form":
return df
assert False
def generate_data_pair(mode, n=12):
rng = np.random.RandomState(42)
x = ["%02dh"%i for i in range(n)]
y = np.r_[[rng.uniform(0.1, 1, len(x)),
rng.uniform(0.1, 0.2, len(x))]]
sign = (2*(np.arange(len(x))%2))-1
y = (y*sign).T
return format_data(mode, x, y)
def generate_data(mode, n=12, d=2):
rng = np.random.RandomState(42)
x = ["%02dh"%i for i in range(n)]
y = np.r_[[rng.uniform(-1, 1, len(x)) for i in range(d)]].T
return format_data(mode, x, y)
def demo_single():
df = generate_data(mode="long-form", d=1)
sns.set_style("whitegrid")
ax = spiderplot(x="x", y="value", hue="dataset", legend=False,
data=df, palette="husl", rref=0)
ax.set_rlim([-1.4,1.4])
plt.tight_layout()
plt.show()
def demo_pair():
df = generate_data_pair(mode="long-form")
sns.set_style("whitegrid")
ax = spiderplot(x="x", y="value", hue="dataset", style="dataset",
data=df, dashes=False, palette="husl", rref=0)
ax.set_rlim([-1.4,1.4])
ax.legend(loc="upper right",
bbox_to_anchor=(1.4, 1.),
borderaxespad=0.)
plt.tight_layout()
plt.show()
def demo_multi():
df = generate_data(mode="long-form", d=5)
ax = spiderplot(x="x", y="value", hue="dataset",
data=df, palette="husl", rref=0)
ax.set_rlim([-1.4,1.4])
ax.legend(loc="upper right",
bbox_to_anchor=(1.4, 1.),
borderaxespad=0.)
plt.tight_layout()
plt.show()
def demo_aggregate():
df = generate_data(mode="long-form", n=24, d=10)
means = df.groupby("x")["value"].mean()
stds = df.groupby("x")["value"].std()
sns.set_style("whitegrid")
# ax = spiderplot(x="x", y="value", hue="dataset", style="dataset", data=df,
# fill=False, markers=False, dashes=False, legend=False,
# palette=["gray" for i in range(10)], alpha=0.3)
ax = spiderplot(y=means, extent=stds, color="red", fillcolor="gray",
fill=False, rref=0, label="mean ± std")
ax.set_rlim([-1.4,1.4])
ax.legend(loc="upper right",
bbox_to_anchor=(1.4, 1.),
borderaxespad=0.)
plt.tight_layout()
plt.show()
| StarcoderdataPython |
1654426 | x = 1
answer = 'y'
while answer == 'y':
answer = input('Keep going?')
x = x*2
print(x)
| StarcoderdataPython |
179754 | <gh_stars>0
#!/usr/bin/env python3
# coding: utf-8
import argparse
import os
import pickle
'''
Load specified pickled data object (produced by gp_baseline) and
Get all non-obsolete terms for the specified tax_id.
-n directory where the pickle files are
-d data set prefix (e.g., 'egid')
-t tax id; default = '9606' (human)
'''
parser = argparse.ArgumentParser(description="""Get species info from data object.""")
parser.add_argument("-n", required=True, nargs=1, metavar="DIRECTORY",
help="directory where data is stored")
parser.add_argument("-d", required=True, type=str,
help="dataset name")
parser.add_argument(
"-t",
default='9606',
choices=[
'9606',
'10090',
'10116'],
help="species by taxid")
args = parser.parse_args()
dataset = args.d
tax_id = args.t
if os.path.exists(args.n[0]):
os.chdir(args.n[0])
else:
print('data directory {0} not found!'.format(args.n[0]))
if not os.path.exists(dataset + '.parsed_data.pickle'):
print(
'WARNING !!! Required pickled data file %s not found.' %
(dataset + '.parsed_data.pickle'))
else:
with open(dataset + '.parsed_data.pickle', 'rb') as f:
data = pickle.load(f)
for term_id in data.get_values():
if data.get_species(term_id) == tax_id:
print(term_id)
| StarcoderdataPython |
1787545 | <reponame>ad4529/Printer_Detection<filename>Training/correct_final_anns.py
import os
os.chdir('/home/abhisek/Desktop/keras-yolo3/model_data')
with open('coco_reduced_v3.txt', 'r') as f:
lines = f.readlines()
f.close()
lines = [l.strip('\n') for l in lines]
cnt = 0
for i in lines:
vals = i.split()
vals = vals[1:]
for j in vals:
nums = j.split(',')
if len(nums) < 5 or len(nums) > 5:
print(i)
cnt += 1
print(cnt)
| StarcoderdataPython |
3294438 | <gh_stars>10-100
from os import mkdir
from bottle import route, get, request, static_file, run
from settings import PORT, DIR_CACHE, DIR_GRAPH
from crypkograph import render_graph
@route('/')
@route('/index.html')
def serve_html():
return static_file('index.html', '.')
@route('/static/<filename:path>')
def serve_static(filename):
return static_file(filename, 'static')
@route('/generated/<filename:re:.*\.gv\.(png|pdf)>')
def serve_generated(filename):
ext = filename.split('.')[-1]
if ext == 'png':
return static_file(filename, DIR_GRAPH, mimetype='image/png')
elif ext == 'pdf':
return static_file(filename, DIR_GRAPH, download=filename)
# /api/render?owner_addr={owner_addr}
@get('/api/render')
def render():
owner_addr = request.query['owner_addr']
if not owner_addr:
raise Exception()
render_graph(owner_addr, subdir=DIR_GRAPH)
if __name__ == '__main__':
try:
mkdir(DIR_CACHE)
except FileExistsError:
pass
run(host='0.0.0.0', port=PORT)
| StarcoderdataPython |
1612168 | <reponame>remicalixte/integrations-core
# (C) Datadog, Inc. 2010-present
# All rights reserved
# Licensed under Simplified BSD License (see LICENSE)
import pytest
from datadog_test_libs.win.pdh_mocks import initialize_pdh_tests, pdh_mocks_fixture # noqa: F401
from datadog_checks.dotnetclr import DotnetclrCheck
from datadog_checks.dotnetclr.dotnetclr import DEFAULT_COUNTERS
from .common import CHECK_NAME, INSTANCES, MINIMAL_INSTANCE
@pytest.mark.usefixtures('pdh_mocks_fixture')
def test_basic_check(aggregator):
initialize_pdh_tests()
instance = MINIMAL_INSTANCE
c = DotnetclrCheck(CHECK_NAME, {}, [instance])
c.check(instance)
for metric_def in DEFAULT_COUNTERS:
metric = metric_def[3]
for inst in INSTANCES:
aggregator.assert_metric(metric, tags=["instance:%s" % inst], count=1)
assert aggregator.metrics_asserted_pct == 100.0
| StarcoderdataPython |
1770364 | from scipy import signal
from PIL import Image
import cv2
import numpy
import math
import imageio
# Locating the image. If the image is not same then change to relative address.
usedImage = '../../Images/test.jpg'
# Opening the image into an array
img = numpy.array(Image.open(usedImage).convert("L"))
imageio.imwrite('./Outputs/img.jpeg', img)
# Kernel to perform gaussian blur
kernel = [[1, 1, 1], [1, 1, 1], [1, 1, 1]]
# CONVOLUTION 1
# Performing gaussain blur by performing convolution with gaussian kernel.
# I could not code the convolution so I got irritated and used a function for
# convolution instead.
gaussian = signal.convolve(img, kernel, mode='same')
# Print array just to check the output. Can uncomment if you want.
# print ('Im: Convolution 1')
# print (gaussian)
# Saving the array with the blurred image as a JPG image
imageio.imwrite('./Outputs/smooth.jpeg', gaussian)
# cv2.imshow('smooth.jpeg', gaussian) # This statement does not work btw
# Kernel for Sobel X (using horizontal transformation)
kernelX = [[-1, 0, 1],
[-2, 0, 2],
[-1, 0, 1]]
# Kernel for Sobel Y (using vertical transformation)
kernelY = [[-1, -2, -1],
[0, 0, 0],
[1, 2, 1]]
# Kernel for diagonal Kirsch transformation 1
kernelXY = [[0,1,2],
[-1,0,1],
[-2,-1,0]]
# Kernel for diagonal Kirsch transformation 2
kernelYX = [[-2,-1,0],
[-1,0,1],
[0,1,2]]
# CONVOLUTION 2
# Performing convolution over the smoothed image with all the generated kernels.
# Generate output array imX of horizontal convolution
imX = signal.convolve(gaussian, kernelX, mode='same')
# Generate output array imY of vertical convolution
imY = signal.convolve(gaussian, kernelY, mode='same')
# Generate output array imX of horizontal convolution
imXY = signal.convolve(gaussian, kernelXY, mode='same')
# Generate output array imY of vertical convolution
imYX = signal.convolve(gaussian, kernelYX, mode='same')
# Printing arrays to console just to check
# print ('Im X: Convolution 2')
# print (imX)
# print ('Im Y: Convolution 2')
# print (imY)
# print ('Im XY: Convolution 2')
# print (imXY)
# print ('Im YX: Convolution 2')
# print (imYX)
# Saving the arrays created as JPG images
imageio.imwrite('./Outputs/imX.jpeg', imX)
imageio.imwrite('./Outputs/imY.jpeg', imY)
imageio.imwrite('./Outputs/imXY.jpeg', imXY)
imageio.imwrite('./Outputs/imYX.jpeg', imYX)
'''cv2.imshow('imX.jpeg', imX)
cv2.imshow('imY.jpeg', imY)
cv2.imshow('imXY.jpeg', imXY)
cv2.imshow('imYX.jpeg', imYX)'''
# Combining all the horizontal and vertical gradient approximations
# to create the final canny edge detected image
imFinal = numpy.sqrt(imX*imX + imY*imY + imXY*imXY + imYX*imYX)
# Printing the canny edge detected image array just to check
# print ('Im Final: Combining Gradient Approximations')
# print (imFinal)
# Saving the final canny edge detection image as a JPG image
imageio.imwrite('./Outputs/canny.jpeg', imFinal)
# cv2.imshow('canny.jpeg', imFinal)
print ('Finished Canny edge detection')
| StarcoderdataPython |
1776884 | <reponame>AngelOnFira/megagame-controller
from rest_framework import renderers
from .views import TeamViewSet
# team_list = TeamViewSet.as_view({
# 'get': 'list',
# 'post': 'create'
# })
# urlpatterns = format_suffix_patterns([
# path('', api_root),
# path('snippets/', snippet_list, name='snippet-list'),
# path('snippets/<int:pk>/', snippet_detail, name='snippet-detail'),
# path('snippets/<int:pk>/highlight/', snippet_highlight, name='snippet-highlight'),
# path('users/', user_list, name='user-list'),
# path('users/<int:pk>/', user_detail, name='user-detail')
# ])
| StarcoderdataPython |
3276533 | <filename>jsonclasses/modifiers/tocap_modifier.py
"""module for tocap modifier."""
from __future__ import annotations
from typing import Any, TYPE_CHECKING
from .modifier import Modifier
if TYPE_CHECKING:
from ..ctx import Ctx
class ToCapModifier(Modifier):
"""capitalize string"""
def transform(self, ctx: Ctx) -> Any:
return ctx.val.capitalize() if isinstance(ctx.val, str) else ctx.val
| StarcoderdataPython |
170391 | <gh_stars>0
#!/usr/bin/env python
# addapted from gather_key_oauth2.py included with https://github.com/orcasgit/python-fitbit
import cherrypy
import os
import sys
import threading
import traceback
import webbrowser
from base64 import b64encode
from fitbit.api import FitbitOauth2Client
from oauthlib.oauth2.rfc6749.errors import MismatchingStateError, MissingTokenError
from requests_oauthlib import OAuth2Session
from iniHandler import ReadCredentials, WriteTokens
class OAuth2Server:
def __init__(self, client_id, client_secret,
redirect_uri='http://127.0.0.1:8080/'):
""" Initialize the FitbitOauth2Client """
self.redirect_uri = redirect_uri
self.success_html = """
<style>
h1 {text-align:center;}
h3 {text-align:center;}
</style>
<h1>You are now authorised to access the Fitbit API!</h1>
<br/><h3>You can close this window</h3>"""
self.failure_html = """
<style> h1 {text-align:center;} </style>
<h1>ERROR: %s</h1><br/><h3>You can close this window</h3>%s"""
self.oauth = FitbitOauth2Client(client_id, client_secret)
def browser_authorize(self):
"""
Open a browser to the authorization url and spool up a CherryPy
server to accept the response
"""
url, _ = self.oauth.authorize_token_url(redirect_uri=self.redirect_uri)
# Open the web browser in a new thread for command-line browser support
threading.Timer(1, webbrowser.open, args=(url,)).start()
cherrypy.quickstart(self)
@cherrypy.expose
def index(self, state, code=None, error=None):
"""
Receive a Fitbit response containing a verification code. Use the code
to fetch the access_token.
"""
error = None
if code:
try:
self.oauth.fetch_access_token(code, self.redirect_uri)
except MissingTokenError:
error = self._fmt_failure(
'Missing access token parameter.</br>Please check that '
'you are using the correct client_secret')
except MismatchingStateError:
error = self._fmt_failure('CSRF Warning! Mismatching state')
else:
error = self._fmt_failure('Unknown error while authenticating')
# Use a thread to shutdown cherrypy so we can return HTML first
self._shutdown_cherrypy()
return error if error else self.success_html
def _fmt_failure(self, message):
tb = traceback.format_tb(sys.exc_info()[2])
tb_html = '<pre>%s</pre>' % ('\n'.join(tb)) if tb else ''
return self.failure_html % (message, tb_html)
def _shutdown_cherrypy(self):
""" Shutdown cherrypy in one second, if it's running """
if cherrypy.engine.state == cherrypy.engine.states.STARTED:
threading.Timer(1, cherrypy.engine.exit).start()
if __name__ == '__main__':
try: input = raw_input
except NameError: pass
if not (len(sys.argv) == 3):
responce = input("Get credentials from credentials.ini? (Y/N)\n").upper()
if responce == "Y":
id, secret = ReadCredentials()
elif responce == "N":
responce = input("Would you like to enter them manually now? (Y/N)\n").upper()
if responce == "Y":
id = input("Enter client id:\n")
secret = input("Enter client secret:\n")
elif responce == "N":
print("Try again giving arguments: client id and client secret.")
sys.exit(1)
else:
print("Invalid input.")
sys.exit(1)
else:
print("Invalid input.")
sys.exit(1)
elif (len(sys.argv) == 3):
id, secret = sys.argv[1:]
else:
print("Try again giving arguments: client id and client secret.")
sys.exit(1)
server = OAuth2Server(id,secret)
server.browser_authorize()
acc_tok = server.oauth.token['access_token']
ref_tok = server.oauth.token['refresh_token']
print('FULL RESULTS = %s' % server.oauth.token)
print('ACCESS_TOKEN = %s' % acc_tok)
print('REFRESH_TOKEN = %s' % ref_tok)
WriteTokens(acc_tok,ref_tok) | StarcoderdataPython |
4834054 | <filename>university_system/users/views.py
from django.shortcuts import redirect, render
from django.contrib import messages
from .forms import RegisterForm, ChangePasswordForm, MyInfoForm
from .decorators import check_login
@check_login
def register(request):
if request.method == "POST":
form = RegisterForm(request.POST)
if form.is_valid():
form.save()
messages.success(request, "Account Created Now You Can Login")
return redirect("login")
else:
form = RegisterForm()
return render(request, "auth/register.html", context={"form": form})
def edit_profile(request):
if request.method == "POST":
form = MyInfoForm(request.POST, request.FILES, instance=request.user)
if form.is_valid():
form.save()
messages.success(request, "InfoUpdated")
else:
form = MyInfoForm(instance=request.user)
return render(request, "users/edit_profile.html", context={"form": form})
def change_password(request):
if request.method == "POST":
form = ChangePasswordForm(request.POST)
if request.user.check_password(form.data["old_password"]) and form.is_valid():
if form.data["new_password"] == form.data["confirm_password"]:
request.user.set_password(form.data["new_password"])
request.user.save()
messages.success(request, "Password Changed Success")
else:
messages.success(request, "Password Does not Match")
else:
messages.warning(request, "Your Old Password Is Wrong")
else:
form = ChangePasswordForm()
return render(request, "users/change_password.html", context={"form": form})
| StarcoderdataPython |
74659 | <gh_stars>1000+
import json
from typing import Dict, Optional
import logging
from rich.logging import RichHandler
from ciphey.iface import Checker, Config, ParamSpec, T, registry
@registry.register
class JsonChecker(Checker[str]):
"""
This object is effectively a prebuilt quorum (with requirement 1) of common patterns
"""
def check(self, text: T) -> Optional[str]:
logging.debug("Trying json checker")
# https://github.com/Ciphey/Ciphey/issues/389
if text.isdigit():
return None
try:
json.loads(text)
return ""
except ValueError:
return None
def getExpectedRuntime(self, text: T) -> float:
# TODO: actually bench this
return 1e-7 * len(text) # From benchmarks I found online
def __init__(self, config: Config):
super().__init__(config)
@staticmethod
def getParams() -> Optional[Dict[str, ParamSpec]]:
pass
| StarcoderdataPython |
1606276 | <gh_stars>0
import numpy as np
import matplotlib.pyplot as plt
from matplotlib import image as mpimg
from PIL import Image
from PIL import ImageFilter
from scipy.ndimage.interpolation import rotate
import os
import sys
def dist(x1, y1, x2, y2):
return ((x1 - x2) ** 2 + (y1 - y2) ** 2) ** (1/2)
def add_dot(x, base_color=None):
ab = 0.5
color = 250
bubble = np.array([color, color, color])
r1 = int(np.random.rand() * x.shape[0])
c1 = int(np.random.rand() * x.shape[1])
r2 = int((np.random.rand() - 0.5) * x.shape[0] / 5 + r1)
c2 = int((np.random.rand() - 0.5) * x.shape[1] / 5 + c1)
r = (np.random.rand() + 1) * dist(r1, c1, r2, c2)
for i in range(int(max(min(r1,r2) - r, 0)), int(min(max(r1, r2) + r, x.shape[0]))):
for j in range(int(max(min(c1,c2) - r, 0)), int(min(max(c1, c2) + r, x.shape[1]))):
if dist(r1, c1, i, j) + dist(r2, c2, i, j) <= r:
aa = 1-(dist(r1, c1, i, j) + dist(r2, c2, i, j))/r/1.1
x[i][j] = ((bubble * aa + ab * x[i][j] * (1 - aa)) / (aa + ab * (1 - aa))).astype(np.int64)
return x
def random_bg(r = 150, c = 150, base_color = np.array([140, 185, 237], dtype = np.uint64), filters = 1, prob = 0.499):
x = np.zeros((2*r,2*c,3), dtype = np.int64)
x[0][0] = base_color
for i in range(2*r):
for j in range(2*c):
if j > 0:
if np.random.rand() < prob:
x[i][j] = x[i][j-1]
continue
if i > 0:
if np.random.rand() < prob / (1 - prob):
x[i][j] = x[i-1][j]
continue
x[i][j] = base_color + (np.random.rand(3) - 0.5) * 200
over = (x > 255)
x = x - over * x + over * 255
under = x < 0
x = (x - under * x).astype(np.uint8)
ys = rotate(x, angle = np.random.rand() * 360)
ys = ys[(ys.shape[0] - r) // 2: (ys.shape[0] - r) // 2 + r, (ys.shape[1] - c) // 2: (ys.shape[1] - c) // 2 + c]
ys = Image.fromarray(ys)
for i in range(filters):
ys = ys.filter(ImageFilter.SMOOTH)
return np.array(ys)
def create_bg(r = 150, c = 150, base_color = np.array([140, 185, 237], dtype = np.uint64), dots_ratio = 0.1, filters = 1):
prob = 0.5
x = np.zeros((r,c,3), dtype = np.int64)
x[0][0] = base_color
for i in range(r):
for j in range(c):
if j > 0:
if np.random.rand() < 0.7:
x[i][j] = x[i][j-1]
continue
if i > 0:
if np.random.rand() < 0.5:
x[i][j] = x[i-1][j]
continue
x[i][j] = base_color + (np.random.rand(3) - 0.5) * 20
scale1 = (np.arange(r * c * 4).reshape((r * 2,c * 2)) / r / c / 4 * 80).astype(np.int64)
scale = np.zeros((r * 2, c * 2, 3)).astype(np.int64)
for i in range(3):
scale[:,:,i] = scale1
scale = rotate(scale, angle = np.random.rand() * 360)
scale = scale[(scale.shape[0] - r) // 2: (scale.shape[0] - r) // 2 + r, (scale.shape[1] - c) // 2: (scale.shape[1] - c) // 2 + c]
dots = int(r * dots_ratio)
for i in range(dots):
x = add_dot(x)
over = (x > 255)
x = x - over * x + over * 255
under = x < 0
x = (x - under * x).astype(np.uint8)
y = Image.fromarray(x)
ys = y
for i in range(filters):
ys = ys.filter(ImageFilter.SMOOTH)
return ys
def add_bobber(img, bobber, scale = 1, theta = 0):
bobber1 = Image.fromarray(bobber)
bobber1 = bobber1.resize(tuple(map(int, [scale * bobber.shape[0], scale * bobber.shape[1]])), Image.ANTIALIAS)
bobber1 = bobber1.rotate(theta)
bobber2 = np.array(bobber1)
img2 = np.array(img)
r = np.random.randint(0, img2.shape[0] - bobber2.shape[0])
c = np.random.randint(0, img2.shape[1] - bobber2.shape[1])
bobber3 = bobber2.astype(np.int64) + np.random.randint(-2, 2, bobber2.shape)
bobber3 += np.random.randint(0, max(255 - bobber3.max(), 1))
bobber3 *= bobber2 > 10
over = (bobber3 > 255)
bobber3 = bobber3 - over * bobber3 + over * 255
under = bobber3 < 0
bobber3 = (bobber3 - under * bobber3).astype(np.uint8)
img2[r:r + bobber3.shape[0], c:c + bobber3.shape[1]] *= (bobber3 < 10)
img2[r:r + bobber3.shape[0], c:c + bobber3.shape[1]] += bobber3
return img2
def create_image_dataset(bobbers, num):
bar_length = 20
num //= 2
num_per_image = num // len(os.listdir('dataset_images'))
total = num_per_image * len(os.listdir('dataset_images'))
print("Creating Dataset...")
count = 0
fhandle = open("dataset/target.txt", 'w')
for i in os.listdir('dataset_images'):
img = (mpimg.imread('dataset_images/{}'.format(i))[:,:,:3] * 255).astype(np.uint8)
for j in range(num_per_image):
percentage = 50 * count // total + 1
sys.stdout.write('\r[{}{}] {}%'.format('#' * (bar_length * percentage // 100), ' ' * (bar_length - bar_length * percentage // 100), percentage))
sys.stdout.flush()
r = np.random.randint(0, img.shape[0] - 150)
c = np.random.randint(0, img.shape[1] - 150)
bg = img[r:r+150, c:c+150]
plt.imsave(os.path.join('dataset', '{}.png'.format(count)), bg)
test = add_bobber(bg, bobbers[np.random.randint(len(bobbers))], np.random.rand() + 0.5, (np.random.rand() - 0.5) * 20)
count += 1
plt.imsave(os.path.join('dataset', '{}.png'.format(count)), test)
count += 1
fhandle.write("1\n0\n")
fhandle.close()
print("\nDone")
def create_dataset(bobbers, num):
bar_length = 20
num //= 3
print("Creating Dataset...")
for i in range(num):
percentage = 100 * i // num + 1
sys.stdout.write('\r[{}{}] {}%'.format('#' * (bar_length * percentage // 100), ' ' * (bar_length - bar_length * percentage // 100), percentage))
sys.stdout.flush()
bg = create_bg(base_color = np.random.randint(0,255,3), dots_ratio = np.random.random() * 0.1, filters = np.random.randint(0, 4))
if np.random.random() < 0.1:
bg = np.array(bg)
r = np.random.randint(0, bg.shape[0], 2)
c = np.random.randint(0, bg.shape[1], 2)
i1, i2 = min(r), max(r)
j1, j2 = min(c), max(c)
bg[i1:i2, j1:j2] = 0
plt.imsave(os.path.join('dataset', '{}.png'.format(3*i)), np.array(bg))
test = add_bobber(bg, bobbers[np.random.randint(len(bobbers))], np.random.rand() + 0.5, (np.random.rand() - 0.5) * 20)
plt.imsave(os.path.join('dataset', '{}.png'.format(3*i+1)), test)
bg2 = create_bg(base_color = np.random.randint(0,255,3))
if np.random.random() < 0.1:
bg2 = np.array(bg2)
r = np.random.randint(0, bg2.shape[0], 2)
c = np.random.randint(0, bg2.shape[1], 2)
i1, i2 = min(r), max(r)
j1, j2 = min(c), max(c)
bg2[i1:i2, j1:j2] = 0
test2 = add_bobber(bg2, bobbers[np.random.randint(len(bobbers))], np.random.rand() + 0.5, (np.random.rand() - 0.5) * 20)
plt.imsave(os.path.join('dataset', '{}.png'.format(3*i+2)), test2)
"""randombg = add_bobber(randombg, bobbers[np.random.randint(len(bobbers))], np.random.rand() + 0.5, (np.random.rand() - 0.5) * 20)
plt.imsave(os.path.join('dataset', '{}.png'.format(4*i+2)), randombg)"""
print("\nCreating targets...")
with open("dataset/target.txt", 'w') as fhandle:
for i in range(num):
fhandle.write("1\n0\n0\n")
print("Done")
def create_extra(prev, num2):
num = num2 // len(os.listdir('bgs'))
count = (prev // 3) * 3
start = count
bar_length = 20
fhandle = open("dataset/target.txt", 'a')
print("Creating extra bg's...")
for i in os.listdir('bgs'):
img = mpimg.imread('bgs/{}'.format(i))
img = img[:img.shape[0]//3,:,:3]
for j in range(num):
percentage = 100 * (count - start) // num2 + 1
sys.stdout.write('\r[{}{}] {}%'.format('#' * (bar_length * percentage // 100), ' ' * (bar_length - bar_length * percentage // 100), percentage))
sys.stdout.flush()
r = np.random.randint(0, img.shape[0] - 150)
c = np.random.randint(0, img.shape[1] - 150)
bg = img[r:r+150, c:c+150]
bg += (np.random.rand(150, 150, 3) - 0.5) * 2 * np.random.random() * 0.078 + (np.random.random() - 0.5) * 0.1
over = (bg > 1)
bg = bg - over * bg + over
under = bg < 0
bg = (bg - under * bg)
plt.imsave(os.path.join('dataset', '{}.png'.format(count)), bg)
count += 1
fhandle.write("1\n")
fhandle.close()
print("\nDone")
if __name__ == "__main__":
bobbers = []
for i in os.listdir('bobbers'):
bobbers.append((mpimg.imread(os.path.join('bobbers', i)) * 255).astype(np.uint8)[:,:,:3])
bobbers[-1] = bobbers[-1] = bobbers[-1] * (bobbers[-1] < 250)
if len(sys.argv) < 2:
num = 9000
else:
try:
num = int(sys.argv[1])
except:
print("Usage:\npython3 create_dataset.py [<number of samples>] [<number of bg images>]")
if len(sys.argv) < 3:
num2 = 3000
else:
try:
num2 = int(sys.argv[2])
except:
print("Usage:\npython3 create_dataset.py [<number of samples>] [<number of bg images>]")
#create_dataset(bobbers, num)
#create_extra(num, num2)
create_image_dataset(bobbers, num)
| StarcoderdataPython |
40345 | import logging
from pyramid.httpexceptions import HTTPNotImplemented
from pyramid.renderers import render, render_to_response
log = logging.getLogger(__name__)
class RestView(object):
renderers = {}
def __init__(self, request):
self.request = request
self.params = request.params
self.url = request.route_url
self.c = request.tmpl_context
self.routes = self.request.matchdict
def render_(self, *args, **kwargs):
kwargs['request'] = self.request
return render(*args, **kwargs)
def render(self, *args, **kwargs):
kwargs['request'] = self.request
return render_to_response(*args, **kwargs)
def index(self):
raise HTTPNotImplemented()
def new(self):
raise HTTPNotImplemented()
def create(self):
raise HTTPNotImplemented()
def view(self):
raise HTTPNotImplemented()
def edit(self):
raise HTTPNotImplemented()
def update(self):
raise HTTPNotImplemented()
def delete(self):
raise HTTPNotImplemented() | StarcoderdataPython |
3353006 | # -*- coding: utf-8 -*-
""" Script to create user files (user-config.py, user-fixes.py) """
__version__ = '$Id$'
import os, sys, codecs, re
base_dir = ''
console_encoding = sys.stdout.encoding
if console_encoding is None or sys.platform == 'cygwin':
console_encoding = "iso-8859-1"
def listchoice(clist = [], message = None, default = None):
if not message:
message = "Select"
if default:
message += " (default: %s)" % default
message += ": "
for n, i in enumerate(clist):
print ("%d: %s" % (n + 1, i))
while True:
choice = raw_input(message)
if choice == '' and default:
return default
try:
return clist[int(choice) - 1]
except:
print("Invalid response")
return response
def file_exists(filename):
if os.path.exists(filename):
print("'%s' already exists." % filename)
return True
return False
def create_user_config():
_fnc = os.path.join(base_dir, "user-config.py")
if not file_exists(_fnc):
know_families = re.findall(r'(.+)_family.py\b', '\n'.join(os.listdir(os.path.join(base_dir, "families"))))
fam = listchoice(know_families, "Select family of sites we are working on", default = 'wikipedia')
mylang = raw_input("The language code of the site we're working on (default: 'en'): ") or 'en'
username = raw_input("Username (%s %s): " % (mylang, fam)) or 'UnnamedBot'
username = unicode(username, console_encoding)
#
# I don't like this solution. Temporary for me.
f = codecs.open("config.py", "r", "utf-8") ; cpy = f.read() ; f.close()
res = re.findall("^(############## (?:LOGFILE|"
"INTERWIKI|"
"SOLVE_DISAMBIGUATION|"
"IMAGE RELATED|"
"TABLE CONVERSION BOT|"
"WEBLINK CHECKER|"
"DATABASE|"
"SEARCH ENGINE|"
"COPYRIGHT|"
"FURTHER) SETTINGS .*?)^(?=#####|# =====)", cpy, re.MULTILINE | re.DOTALL)
config_text = '\n'.join(res)
f = codecs.open(_fnc, "w", "utf-8")
f.write("""# -*- coding: utf-8 -*-
# This is an automatically generated file. You can find more configuration parameters in 'config.py' file.
# The family of sites we are working on. wikipedia.py will import
# families/xxx_family.py so if you want to change this variable,
# you need to write such a file.
family = '%s'
# The language code of the site we're working on.
mylang = '%s'
# The dictionary usernames should contain a username for each site where you
# have a bot account.
usernames['%s']['%s'] = u'%s'
%s""" % (fam, mylang, fam, mylang, username, config_text))
f.close()
print("'%s' written." % _fnc)
def create_user_fixes():
_fnf = os.path.join(base_dir, "user-fixes.py")
if not file_exists(_fnf):
f = codecs.open(_fnf, "w", "utf-8")
f.write(r"""# -*- coding: utf-8 -*-
#
# This is only an example. Don't use it.
#
fixes['example'] = {
'regex': True,
'msg': {
'_default':u'no summary specified',
},
'replacements': [
(ur'\bword\b', u'two words'),
]
}
""")
f.close()
print("'%s' written." % _fnf)
if __name__ == "__main__":
print("1: Create user_config.py file")
print("2: Create user_fixes.py file")
print("3: The two files")
choice = raw_input("What do you do? ")
if choice == "1":
create_user_config()
if choice == "2":
create_user_fixes()
if choice == "3":
create_user_config()
create_user_fixes()
if not choice in ["1", "2", "3"]:
print("Nothing to do")
| StarcoderdataPython |
1773164 | """Main module to process the GrandPy Bot application.
"""
from app import app
| StarcoderdataPython |
36150 | from sklearn.metrics import r2_score
y_true = [3, -0.5, 2, 7]
y_pred = [2.5, 0.0, 2, 8]
r2=r2_score(y_true, y_pred)
print(r2)
y_true = [5,6,7,8]
y_pred = [-100,524,-1,3]
r2=r2_score(y_true, y_pred)
print(r2)
r2_ | StarcoderdataPython |
4814929 | from bs4 import BeautifulSoup
import urllib2,re, requests, os
from prettytable import PrettyTable
x = PrettyTable()
html = urllib2.urlopen("https://app.wodify.com/Schedule/PublicCalendarListView.aspx?tenant=3920").read()
soup = BeautifulSoup(html,"lxml")
table = soup.find('table', attrs={'class': 'TableRecords'})
table_body = table.find('tbody')
FinalData = []
FinalData2 = []
for row in table_body.find_all("tr"):
for stat in row.find_all("td", attrs={'class':['TableRecords_EvenLine', 'TableRecords_OddLine']}):
dictContent ={}
dict2Content = {}
z={}
for spn in stat.find_all("span"):
if spn.has_attr("class"):
if 'h3' in spn["class"]:
x.field_names = [str(spn.contents[0]), str(spn.contents[2])]
print x.get_string()
dictContent["Day"] = str(spn.contents[0])
dictContent["Date"] = str(spn.contents[2])
elif spn.has_attr("title"):
if 'Olympic Weightlifting' in spn["title"]:
if re.match(r'^[0-9]', spn["title"][0]):
x.add_row([str(spn["title"]),""])
dict2Content["Title"] = str(spn["title"])
elif 'CrossFit' in spn["title"]:
if re.match(r'^[0-9]', spn["title"][0]):
x.add_row([str(spn["title"]),""])
dict2Content["Title"] = str(spn["title"])
elif 'Open Gym' in spn["title"]:
if re.match(r'^[0-9]', spn["title"][0]):
x.add_row([str(spn["title"]),""])
dict2Content["Title"] = str(spn["title"])
elif 'Athletic Conditioning' in spn["title"]:
x.add_row([str(spn["title"]),""])
if re.match(r'^[0-9]', spn["title"][0]):
dict2Content["Title"] = str(spn["title"])
if len(dictContent) >0:
FinalData.append(dictContent)
if len(dict2Content)>0:
FinalData2.append(dict2Content)
print("Final data val:", FinalData)
print("Final data 2val:", FinalData2)
if len(FinalData2) >0:
z = dict(FinalData + FinalData2)
print("the val of z: ",z)
print("THe final data is:", FinalData)
print("THe final data 2 is:", FinalData2)
'''
elif spn.has_attr("class"):
if 'h3' in spn["class"]:
print(spn.contents)
elif spn.has_attr("style"):
print(spn.text)
'''
| StarcoderdataPython |
3358624 | import torch
from torch import Tensor
EPS = torch.tensor(1e-8)
@torch.jit.script
def dist_iou_ab(box_a: Tensor, box_b: Tensor, eps=EPS):
"""
Args:
box_a: tensor of shape [batch_size, boxes_a, 4]
box_b: tensor of shape [batch_size, boxes_b, 4]
gamma: float
eps: float
Original:
https://github.com/Zzh-tju/CIoU/blob/8995056b1e93b86d03c384f042514391b70e58e0/layers/functions/detection.py#L162
https://github.com/Zzh-tju/CIoU/blob/8995056b1e93b86d03c384f042514391b70e58e0/layers/box_utils.py#L82
"""
assert box_a.dim() == 3
assert box_b.dim() == 3
assert box_a.size(0) == box_b.size(0)
A, B = box_a.size(1), box_b.size(1)
box_a = box_a.unsqueeze(2).expand(-1, -1, A, -1)
box_b = box_b.unsqueeze(1).expand(-1, B, -1, -1)
inter_yx0 = torch.max(box_a[..., :2], box_b[..., :2])
inter_yx1 = torch.min(box_a[..., 2:4], box_b[..., 2:4])
inter_hw = torch.clamp_min_(inter_yx1 - inter_yx0, 0)
inter_area = torch.prod(inter_hw, dim=-1)
# del inter_hw, inter_yx0, inter_yx1
hw_a = box_a[..., 2:4] - box_a[..., :2]
hw_b = box_b[..., 2:4] - box_b[..., :2]
area_a = torch.prod(hw_a, dim=-1)
area_b = torch.prod(hw_b, dim=-1)
union_area = area_a + area_b - inter_area
iou = inter_area / (union_area + eps)
# del inter_area, union_area, area_a, area_b, hw_a, hw_b
center_a = (box_a[..., :2] + box_a[..., 2:4]) / 2
center_b = (box_b[..., :2] + box_b[..., 2:4]) / 2
inter_diag = torch.pow(center_b - center_a, 2).sum(dim=-1)
clos_yx0 = torch.min(box_a[..., :2], box_b[..., :2])
clos_yx1 = torch.max(box_a[..., 2:4], box_b[..., 2:4])
clos_hw = torch.clamp_min_(clos_yx1 - clos_yx0, 0)
clos_diag = torch.pow(clos_hw, 2).sum(dim=-1)
# del clos_yx0, clos_yx1, clos_hw, center_a, center_b
dist = inter_diag / (clos_diag + eps)
return iou - dist ** 0.9
def cluster_nms_dist_iou(boxes: Tensor, scores: Tensor, iou_threshold=0.5, top_k=200):
assert boxes.dim() == 2
assert scores.dim() == 2
assert boxes.size(0) == scores.size(0)
scores, classes = torch.max(scores, dim=1)
# scores: [detections]
_, idx = scores.sort(descending=True)
idx = idx[:top_k]
# add batch dim
top_k_boxes = boxes[idx][None, ...]
# [1, top_k, top_k] -> [top_k, top_k]
iou = dist_iou_ab(top_k_boxes, top_k_boxes)[0]
iou = iou.triu_(diagonal=1)
best_iou = torch.zeros_like(idx)
iou_b = iou
for i in range(top_k):
iou_a = iou_b
best_iou, _ = torch.max(iou_b, dim=0)
# keep far away boxes
keep = (best_iou <= iou_threshold)[:, None].expand_as(iou_b)
iou_b = torch.where(keep, iou, torch.zeros_like(iou_b))
if iou_b.eq(iou_a).all():
break
idx = idx[best_iou <= iou_threshold]
return boxes[idx], scores[idx], classes[idx]
| StarcoderdataPython |
181428 | <filename>src/tree/leetcode_tree_solution.py
# -*- coding: utf-8 -*-
import operator
from collections import deque
from sys import maxsize
class TreeNode(object):
def __init__(self, x):
self.val = x
self.left = None
self.right = None
class Solution(object):
def isValidBST(self, root):
"""
:type root: TreeNode
:rtype: bool
"""
minint = - maxsize - 1
return self._valid_bst(root, minint - 1, maxsize + 1)
def _valid_bst(self, root, min, max):
if not root:
return True
if root.val >= max or root.val <= min:
return False
return self._valid_bst(root.left, min, root.val) and self._valid_bst(root.right, root.val, max)
def isValidBST_instinct(self, root):
"""
:type root: TreeNode
:rtype: bool
"""
if not root:
return True
in_order_traversal = self._traversal_tree_in_order(root)
return self._is_list_in_ascend_order(in_order_traversal)
def _traversal_tree_in_order(self, root):
traversal_list = []
self._traversal_in_order_bst_helper(root, traversal_list)
return traversal_list
def _traversal_in_order_bst_helper(self, node, result):
if not node:
return
self._traversal_in_order_bst_helper(node.left, result)
result.append(node.val)
self._traversal_in_order_bst_helper(node.right, result)
@staticmethod
def _is_list_in_ascend_order(order_list):
for i in xrange(1, len(order_list)):
if order_list[i] < order_list[i - 1]:
return False
return True
def inorderTraversal(self, root):
"""
:type root: TreeNode
:rtype: List[int]
"""
return self._traversal_tree_in_order(root)
def inorderTraversal_iteration(self, root):
result_list = []
stack = deque()
current = root
while current or stack:
while current:
stack.append(current)
current = current.left
current = stack.pop()
result_list.append(current.val)
current = current.right
return result_list
def preorderTraversal(self, root):
"""
:type root: TreeNode
:rtype: List[int]
"""
result_list = []
self._traversal_tree_pre_order(root, result_list)
return result_list
def _traversal_tree_pre_order(self, root, result):
if not root:
return
result.append(root.val)
self._traversal_tree_pre_order(root.left, result)
self._traversal_tree_pre_order(root.right, result)
def preorderTraversal_iteration(self, root):
"""
:type root: TreeNode
:rtype: List[int]
"""
result_list = []
stack = deque()
stack.append(root)
while stack:
node = stack.pop()
if node:
result_list.append(node.val)
stack.append(node.right)
stack.append(node.left)
return result_list
def postorderTraversal(self, root):
"""
:type root: TreeNode
:rtype: List[int]
"""
result_list = []
self._traversal_tree_post_order(root, result_list)
return result_list
def _traversal_tree_post_order(self, root, result):
if not root:
return
self._traversal_tree_post_order(root.left, result)
self._traversal_tree_post_order(root.right, result)
result.append(root.val)
def postorderTraversal_iteration(self, root):
"""
:type root: TreeNode
:rtype: List[int]
"""
result_list = []
stack = deque()
stack.append(root)
while stack:
node = stack.pop()
if node:
result_list.append(node.val)
stack.append(node.left)
stack.append(node.right)
return result_list[::-1]
def lowestCommonAncestor(self, root, p, q):
"""
:type root: TreeNode
:type p: TreeNode
:type q: TreeNode
:rtype: TreeNode
"""
if not root or root == p or root == q:
return root
left = self.lowestCommonAncestor(root.left, p, q)
right = self.lowestCommonAncestor(root.right, p, q)
# if not left:
# return right
# else:
# if not right:
# return left
# else:
# return root
return root if left and right else left or right
def lowestCommonAncestorBST(self, root, p, q):
"""
:type root: TreeNode
:type p: TreeNode
:type q: TreeNode
:rtype: TreeNode
"""
if root.val > p.val and root.val > q.val:
return self.lowestCommonAncestorBST(root.left, p, q)
elif root.val < p.val and root.val < q.val:
return self.lowestCommonAncestorBST(root.right, p, q)
return root
def rightSideView(self, root):
"""
:type root: TreeNode
:rtype: List[int]
"""
if not root:
return root
result = []
queue = deque()
queue.append(root)
while queue:
batch_size = len(queue)
for i in xrange(batch_size):
v = queue.popleft()
if v.left:
queue.append(v.left)
if v.right:
queue.append(v.right)
if i == batch_size - 1:
result.append(v.val)
return result
def rightSideView_dfs(self, root):
"""
:type root: TreeNode
:rtype: List[int]
"""
if not root:
return root
result = []
stack = deque()
stack.append((root, 0))
while stack:
node, level = stack.pop()
if len(result) <= level:
result.append([])
if len(result[level]) == 0:
result[level].append(node.val)
if node.left:
stack.append((node.left, level + 1))
if node.right:
stack.append((node.right, level + 1))
return reduce(operator.concat, result)
| StarcoderdataPython |
3351072 | MAX_CLIENTS = 500
RESPONSE_TIMEOUT = 2
BROADCAST_INTERVAL = 2
ENCODING = 'UTF-8'
CATKIN_WS = '/root/catkin_ws'
DISCOVERABLE_TIMEOUT = 0.2
LISTENER_PORT_PREFIX = 8222
BROADCASTER_PORT_PREFIX = 8111
STATIC_LISTENER_PORT = 8877
QUICK_WAIT_TIMER = 0.05
PUB_TOPIC = 'nearby_robots'
SUB_TOPIC = 'coms_listening'
| StarcoderdataPython |
112918 | <filename>at_tmp/model/FUNC/USERINFO/USER_OPT_INFO.py
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Time : 2018/10/31 10:41
# @Author : bxf
# @File : USER_OPT_INFO.py
# @Software: PyCharm
from model.util.TMP_DB_OPT import *
from model.FUNC.USERINFO.LOG_IN import *
from model.util.TMP_MODEL import *
'''
操作数据:
{
"opt_uri":"",
"opt_common":"",
"opt_user":"",
"opt_ip":"",
}
'''
class USER_OPT:
def insertUserOpt(self,data):
try:
return_data=insertToDatabase("t_user_opt_info",toDict(data))
except Exception as e:
return "用户操作信息插入失败,请检查"+str(e)
| StarcoderdataPython |
1689197 | <reponame>madeso/build<filename>windows.py
#!/usr/bin/env python3
"""build script for windows for ride"""
import os
import subprocess
import argparse
import typing
import json
from collections.abc import Callable
import buildtools.core as btcore
import buildtools.deps as btdeps
import buildtools.cmake as btcmake
import buildtools.args as btargs
import buildtools.visualstudio as btstudio
def default_or_required_string(arg, to_string):
if arg is None:
return '<required>'
else:
return f'({to_string(arg)})'
class BuildEnviroment:
def __init__(self, compiler : typing.Optional[btargs.Compiler], platform : typing.Optional[btargs.Platform]):
self.compiler = compiler
self.platform = platform
def get_generator(self) -> btcmake.Generator:
return btstudio.visual_studio_generator(self.compiler, self.platform)
def save_to_file(self, path: str):
"""save the build environment to a json file"""
data = {}
data['compiler'] = self.compiler.name
data['platform'] = self.platform.name
with open(path, 'w') as f:
json.dump(data, f, indent=4)
def add_options(self, parser: argparse.ArgumentParser):
"""add the build environment to an argparse parser"""
parser.add_argument('--compiler', type=str.lower, default=None, help=f'compiler to use {default_or_required_string(self.compiler, btargs.compiler_to_string)}', choices=btargs.all_compiler_names(), required=self.compiler is None)
parser.add_argument('--platform', type=str.lower, default=None, help=f'platform to use {default_or_required_string(self.platform, btargs.platform_to_string)}', choices=btargs.all_platform_names(), required=self.platform is None)
parser.add_argument('--force', action='store_true', help='force the compiler and platform to be changed')
def update_from_args(self, args: argparse.Namespace):
"""update the build environment from an argparse namespace"""
failure = False
if args.compiler is not None:
new_compiler = btargs.compiler_from_name(args.compiler, True)
if self.compiler is not None and self.compiler != new_compiler:
if args.force:
print(f'WARNING: Compiler changed via argument from {btargs.compiler_to_string(self.compiler)} to {btargs.compiler_to_string(new_compiler)}')
self.compiler = new_compiler
else:
print(f'ERROR: Compiler changed via argument from {btargs.compiler_to_string(self.compiler)} to {btargs.compiler_to_string(new_compiler)}')
failure = True
else:
self.compiler = new_compiler
if args.platform is not None:
new_platform = btargs.platform_from_name(args.platform, True)
if self.platform is not None and self.platform != new_platform:
if args.force:
print(f'WARNING: Platform changed via argument from {btargs.platform_to_string(self.platform)} to {btargs.platform_to_string(new_platform)}')
self.platform = new_platform
else:
print(f'ERROR: Platform changed via argument from {btargs.platform_to_string(self.platform)} to {btargs.platform_to_string(new_platform)}')
failure = True
else:
self.platform = new_platform
if failure:
print('ERROR: Build environment is invalid')
exit(-2)
def validate(self):
"""validate the build environment"""
status = True
if self.compiler is None:
print('ERROR: Compiler not set')
status = False
if self.platform is None:
print('ERROR: Platform not set')
status = False
return status
def exit_if_invalid(self):
"""exit if the build environment is invalid"""
if not self.validate():
exit(-2)
def load_build_from_file(path: str, print_error: bool) -> BuildEnviroment:
"""load build enviroment from json file"""
if btcore.file_exists(path):
with open(path, 'r') as file:
data = json.load(file)
compiler_name = data['compiler']
platform_name = data['platform']
return BuildEnviroment(btargs.compiler_from_name(compiler_name, print_error), btargs.platform_from_name(platform_name, print_error))
else:
return BuildEnviroment(None, None)
class Dependency:
def __init__(self, name: str, add_cmake_arguments_impl: Callable[[btcmake.CMake, BuildEnviroment], None], install_impl: Callable[[BuildEnviroment], None], status_impl: Callable[[], typing.List[str]]):
self.name = name
self.add_cmake_arguments_impl = add_cmake_arguments_impl
self.install_impl = install_impl
self.status_impl = status_impl
def add_cmake_arguments(self, cmake: btcmake.CMake, env: BuildEnviroment):
self.add_cmake_arguments_impl(cmake, env)
def install(self, env: BuildEnviroment):
self.install_impl(env)
def status(self) -> typing.List[str]:
"""get the status of the dependency"""
return self.status_impl()
class Data:
"""data for the build"""
def __init__(self, name: str, root_dir: str):
self.dependencies = []
self.name = name
self.root_dir = root_dir
self.build_base_dir = os.path.join(root_dir, "build")
self.build_dir = os.path.join(root_dir, "build", name)
self.dependency_dir = os.path.join(root_dir, "build", "deps")
def get_path_to_settings(self) -> str:
"""get the path to the settings file"""
return os.path.join(self.build_base_dir, "settings.json")
def load_build(self, print_error: bool) -> BuildEnviroment:
"""load the build environment from the settings file"""
return load_build_from_file(self.get_path_to_settings(), print_error)
def add_dependency(self, dep: Dependency):
"""add a dependency"""
self.dependencies.append(dep)
def default_data(name: str) -> Data:
"""default data"""
return Data(name, os.getcwd())
def save_build(build: BuildEnviroment, data: Data):
"""save the build environment to the settings file"""
os.makedirs(data.build_base_dir, exist_ok=True)
build.save_to_file(data.get_path_to_settings())
###############################################################################
def add_dependency_sdl2(data: Data):
"""add sdl2 dependency"""
root_folder = os.path.join(data.dependency_dir, 'sdl2')
build_folder = os.path.join(root_folder, 'cmake-build')
def add_sdl_arguments(cmake: btcmake.CMake, env: BuildEnviroment):
cmake.add_argument('SDL2_HINT_ROOT', root_folder)
cmake.add_argument('SDL2_HINT_BUILD', build_folder)
def install_sdl_dependency(build: BuildEnviroment):
btdeps.install_dependency_sdl2(data.dependency_dir, root_folder, build_folder, build.get_generator())
def status_sdl_dependency() -> typing.List[str]:
return [f'Root: {root_folder}', f'Build: {build_folder}']
d = Dependency('sdl2', add_sdl_arguments, install_sdl_dependency, status_sdl_dependency)
data.add_dependency(d)
def add_dependency_python(data: Data):
"""add python dependency"""
def add_python_arguments(cmake: btcmake.CMake, env: BuildEnviroment):
if 'PYTHON' in os.environ:
python_exe = os.environ['PYTHON']+'\\python.exe'
project.add_argument('PYTHON_EXECUTABLE:FILEPATH', python_exe)
def install_python_dependency(build: BuildEnviroment):
pass
def status_python_dependency() -> typing.List[str]:
return []
d = Dependency('python', add_python_arguments, install_python_dependency, status_python_dependency)
data.add_dependency(d)
def add_dependency_assimp(data: Data):
"""add assimp dependency"""
assimp_folder = os.path.join(data.dependency_dir, 'assimp')
assimp_install_folder = os.path.join(assimp_folder, 'cmake-install')
def add_assimp_arguments(cmake: btcmake.CMake, env: BuildEnviroment):
cmake.add_argument('ASSIMP_ROOT_DIR', assimp_install_folder)
def install_assimp_dependency(build: BuildEnviroment):
btdeps.install_dependency_assimp(data.dependency_dir, assimp_folder, assimp_install_folder, build.get_generator())
def status_assimp_dependency() -> typing.List[str]:
return [f'Root: {assimp_folder}', f'Install: {assimp_install_folder}']
d = Dependency('assimp', add_assimp_arguments, install_assimp_dependency, status_assimp_dependency)
data.add_dependency(d)
###############################################################################
def generate_cmake_project(build: BuildEnviroment, data: Data) -> btcmake.CMake:
"""generate the ride project"""
project = btcmake.CMake(data.build_dir, data.root_dir, build.get_generator())
for dep in data.dependencies:
dep.add_cmake_arguments(project, build)
return project
def run_install(build: BuildEnviroment, data: Data):
"""install dependencies"""
for dep in data.dependencies:
dep.install(build)
def run_cmake(build: BuildEnviroment, data: Data, only_print: bool):
"""configure the euphoria cmake project"""
generate_cmake_project(build, data).config(only_print)
def run(args) -> str:
"""run a terminal and return the output or error"""
try:
return subprocess.check_output(args, stderr=subprocess.STDOUT)
except subprocess.CalledProcessError as error:
print('Failed to run {} {}'.format(error.cmd, error.returncode))
return error.stdout
###############################################################################
def on_cmd_install(arg, data: Data):
"""callback for install command"""
build = data.load_build(True)
build.update_from_args(arg)
build.exit_if_invalid()
save_build(build, data)
run_install(build, data)
def on_cmd_cmake(arg, data: Data):
"""callback for cmake command"""
build = data.load_build(True)
build.update_from_args(arg)
build.exit_if_invalid()
save_build(build, data)
run_cmake(build, data, arg.print)
def on_cmd_dev(arg, data: Data):
"""callback for dev command"""
build = data.load_build(True)
build.update_from_args(arg)
build.exit_if_invalid()
save_build(build, data)
run_install(build, data)
run_cmake(build, data, False)
def on_cmd_build(arg, data: Data):
"""callback for build cmd"""
build = data.load_build(True)
build.update_from_args(arg)
build.exit_if_invalid()
save_build(build, data)
generate_cmake_project(build, data).build()
def on_status_build(args, data: Data):
"""callback for status build cmd"""
build = data.load_build(True)
build.update_from_args(args)
build.exit_if_invalid()
print(f'Project: {data.name}')
print()
print(f'Data: {data.get_path_to_settings()}')
print(f'Root: {data.root_dir}')
print(f'Build: {data.build_dir}')
print(f'Dependencies: {data.dependency_dir}')
indent = ' ' * 4
for dep in data.dependencies:
print(f'{indent}{dep.name}')
lines = dep.status()
for line in lines:
print(f'{indent*2}{line}')
print()
print(f'Compiler: {btargs.compiler_to_string(build.compiler)}')
print(f'Platform: {btargs.platform_to_string(build.platform)}')
gen = build.get_generator()
print(f'CMake generator: {gen.generator}')
if gen.arch is not None:
print(f'CMake archictecture: {gen.arch}')
print()
###############################################################################
def main(data: Data):
"""entry point for script"""
parser = argparse.ArgumentParser(description='Does the windows build')
parser.set_defaults(func=None)
subparsers = parser.add_subparsers()
build = data.load_build(False)
install_parser = subparsers.add_parser('install', help='install dependencies')
install_parser.set_defaults(func=on_cmd_install)
build.add_options(install_parser)
cmmake_parser = subparsers.add_parser('cmake', help='configure cmake project')
cmmake_parser.add_argument('--print', action='store_true')
cmmake_parser.set_defaults(func=on_cmd_cmake)
build.add_options(cmmake_parser)
dev_parser = subparsers.add_parser('dev', help='dev is install+cmake')
dev_parser.set_defaults(func=on_cmd_dev)
build.add_options(dev_parser)
build_parser = subparsers.add_parser('build', help='build the project')
build_parser.set_defaults(func=on_cmd_build)
build.add_options(build_parser)
status_parser = subparsers.add_parser('stat', help='print the status of the build')
status_parser.set_defaults(func=on_status_build)
build.add_options(status_parser)
arg = parser.parse_args()
if arg.func is None:
parser.print_help()
else:
arg.func(arg, data)
| StarcoderdataPython |
3217871 | <reponame>Camiloasc1/AlgorithmsUNAL
import sys
def makeDLinkW(G, n1, n2, W, add = False):
if n1 not in G:
G[n1] = {}
if add:
if n2 not in G[n1]:
G[n1][n2] = 0
G[n1][n2] += W
else:
G[n1][n2] = W
if n2 not in G:
G[n2] = {}
return G
def solve(G, cert, l):
visited = {}
dist = 0
for n1, n2 in zip(cert[:-1], cert[1:]):
if n1 in visited:
return False
else:
visited[n1] = True
if n2 not in G[n1]:
return False
dist += G[n1][n2]
# print (n1, n2), dist
# for n in G:
# if n not in visited:
# return False
if cert[0] not in G[cert[-1]]:
return False
visited[cert[-1]] = True
if len(G) != len(visited):
return False
return dist <= l
def read():
T = int(sys.stdin.readline())
for _ in xrange(T):
G = {}
n, m, l = map(int, sys.stdin.readline().split())
for i in xrange(n):
G[i] = {}
for _ in xrange(m):
a, b, w = map(int, sys.stdin.readline().split())
makeDLinkW(G, a, b, w)
cert = map(int, sys.stdin.readline().split())
if solve(G, cert, l):
print 'si'
else:
print 'no'
read()
| StarcoderdataPython |
1726335 | #!/usr/bin/env python3
import os
from navicatGA.xyz_solver import XYZGenAlgSolver
from navicatGA.quantum_wrappers_xyz import geom2ehl
from navicatGA.chemistry_xyz import get_alphabet_from_path, get_default_alphabet
from chimera import Chimera
# For multiobjective optimization we use chimera to scalarize
chimera = Chimera(tolerances=[0.1, 0.4], absolutes=[False, False], goals=["max", "min"])
database = os.path.join(os.path.dirname(os.path.realpath(__file__)), "scaffolds/")
def my_fitness_function(lot=0):
return lambda geom: (geom2ehl(geom, lot=lot))
alphabet_list = [
get_alphabet_from_path(database),
get_default_alphabet(),
get_default_alphabet(),
]
def test_scalarizer_26():
print(alphabet_list[0])
solver = XYZGenAlgSolver(
n_genes=3,
pop_size=5,
max_gen=5,
mutation_rate=0.25,
selection_rate=0.15,
fitness_function=my_fitness_function(lot=0),
alphabet_list=alphabet_list,
scalarizer=chimera,
random_state=1337,
starting_random=True,
logger_level="TRACE",
selection_strategy="boltzmann",
prune_duplicates=True,
n_crossover_points=1,
verbose=True,
to_stdout=True,
to_file=False,
show_stats=True,
)
solver.solve()
print(solver.printable_fitness)
solver.close_solver_logger()
if __name__ == "__main__":
test_scalarizer_26()
| StarcoderdataPython |
43711 | import datetime
import io
import json
import zipfile
from pathlib import Path
import pyrsistent
import pytest
import yaml
from aiohttp import web
from openapi_core.shortcuts import create_spec
from yarl import URL
from rororo import (
BaseSettings,
get_openapi_context,
get_openapi_schema,
get_openapi_spec,
openapi_context,
OperationTableDef,
setup_openapi,
setup_settings_from_environ,
)
from rororo.annotations import DictStrAny
from rororo.openapi import get_validated_data
from rororo.openapi.exceptions import (
ConfigurationError,
OperationError,
validation_error_context,
ValidationError,
)
ROOT_PATH = Path(__file__).parent
INVALID_OPENAPI_JSON_PATH = ROOT_PATH / "invalid-openapi.json"
INVALID_OPENAPI_YAML_PATH = ROOT_PATH / "invalid-openapi.yaml"
OPENAPI_JSON_PATH = ROOT_PATH / "openapi.json"
OPENAPI_YAML_PATH = ROOT_PATH / "openapi.yaml"
TEST_NESTED_OBJECT = {
"uid": "6fccda1b-0873-4c8a-bceb-a2acfe5851da",
"type": "nested-object",
"data": {
"data_item": {"key": "value1", "any_data": {}},
"data_items": [
{"key": "value2", "any_data": {"two": 2}},
{"key": "value3", "any_data": {"three": 3}},
],
"str_items": ["1", "2", "3"],
},
"any_data": {"key1": "value1", "key2": "value2", "list": [1, 2, 3]},
}
operations = OperationTableDef()
invalid_operations = OperationTableDef()
def custom_json_loader(content: bytes) -> DictStrAny:
return json.load(io.BytesIO(content))
def custom_yaml_loader(content: bytes) -> DictStrAny:
return yaml.load(content, Loader=yaml.SafeLoader)
@invalid_operations.register("does-not-exist")
async def does_not_exist(request: web.Request) -> web.Response:
return web.Response(text="Hello, world!")
@operations.register("create-post")
async def create_post(request: web.Request) -> web.Response:
data = get_validated_data(request)
published_at: datetime.datetime = data["published_at"]
with validation_error_context("body", "published_at"):
if published_at.tzinfo is None:
raise ValidationError(message="Invalid value")
return web.json_response(
{**data, "id": 1, "published_at": data["published_at"].isoformat()},
status=201,
)
@operations.register
async def hello_world(request: web.Request) -> web.Response:
with openapi_context(request) as context:
name = context.parameters.query.get("name") or "world"
email = context.parameters.query.get("email") or "<EMAIL>"
return web.json_response(
{"message": f"Hello, {name}!", "email": email}
)
@operations.register
async def retrieve_any_object_from_request_body(
request: web.Request,
) -> web.Response:
return web.json_response(pyrsistent.thaw(get_validated_data(request)))
@operations.register
async def retrieve_array_from_request_body(
request: web.Request,
) -> web.Response:
with openapi_context(request) as context:
return web.json_response(pyrsistent.thaw(context.data))
@operations.register
async def retrieve_empty(request: web.Request) -> web.Response:
context = get_openapi_context(request)
return web.Response(
status=204, headers={"X-API-Key": context.security.get("apiKey") or ""}
)
@operations.register
async def retrieve_invalid_response(request: web.Request) -> web.Response:
return web.json_response({})
@operations.register
async def retrieve_post(request: web.Request) -> web.Response:
context = get_openapi_context(request)
return web.json_response(
{"id": context.parameters.path["post_id"], "title": "The Post"}
)
@operations.register
async def retrieve_nested_object_from_request_body(
request: web.Request,
) -> web.Response:
with openapi_context(request) as context:
data = pyrsistent.thaw(context.data)
data["uid"] = str(data["uid"])
return web.json_response(
data,
headers={
"X-Data-Type": str(type(context.data)),
"X-Data-Data-Data-Items-Type": str(
type(context.data["data"]["data_items"])
),
"X-Data-Data-Str-Items-Type": str(
type(context.data["data"]["str_items"])
),
"X-Data-UID-Type": str(type(context.data["uid"])),
},
)
@operations.register
async def retrieve_zip(request: web.Request) -> web.Response:
output = io.BytesIO()
with zipfile.ZipFile(output, "w") as handler:
handler.writestr("hello.txt", "Hello, world!")
output.seek(0)
return web.Response(
body=output,
content_type="application/zip",
headers={"Content-Disposition": "attachment; filename=hello.zip"},
)
@operations.register
async def upload_image(request: web.Request) -> web.Response:
return web.Response(
body=get_openapi_context(request).data,
content_type=request.content_type,
status=201,
)
@operations.register
async def upload_text(request: web.Request) -> web.Response:
return web.Response(
text=get_openapi_context(request).data,
content_type=request.content_type,
status=201,
)
@pytest.mark.parametrize("schema_path", (OPENAPI_JSON_PATH, OPENAPI_YAML_PATH))
async def test_any_object_request_body(aiohttp_client, schema_path):
app = setup_openapi(
web.Application(), schema_path, operations, server_url=URL("/api/")
)
client = await aiohttp_client(app)
response = await client.post("/api/any-object", json=TEST_NESTED_OBJECT)
assert response.status == 200
assert await response.json() == TEST_NESTED_OBJECT
@pytest.mark.parametrize(
"data, expected_status, expected_response",
(
(
{},
422,
{"detail": [{"loc": ["body"], "message": "[] is too short"}]},
),
(
[],
422,
{"detail": [{"loc": ["body"], "message": "[] is too short"}]},
),
(
[""],
422,
{"detail": [{"loc": ["body", 0], "message": "'' is too short"}]},
),
(["Hello", "world!"], 200, ["Hello", "world!"]),
),
)
async def test_array_request_body(
aiohttp_client, data, expected_status, expected_response
):
app = setup_openapi(
web.Application(),
OPENAPI_YAML_PATH,
operations,
server_url=URL("/api"),
)
client = await aiohttp_client(app)
response = await client.post("/api/array", json=data)
assert response.status == expected_status
assert await response.json() == expected_response
@pytest.mark.parametrize("schema_path", (OPENAPI_JSON_PATH, OPENAPI_YAML_PATH))
async def test_create_post_201(aiohttp_client, schema_path):
app = setup_openapi(
web.Application(), schema_path, operations, server_url="/api/"
)
published_at = "2020-04-01T12:00:00+02:00"
client = await aiohttp_client(app)
response = await client.post(
"/api/create-post",
json={
"title": "Post",
"slug": "post",
"content": "Post Content",
"published_at": published_at,
},
)
assert response.status == 201
assert await response.json() == {
"id": 1,
"title": "Post",
"slug": "post",
"content": "Post Content",
"published_at": published_at,
}
@pytest.mark.parametrize(
"schema_path, invalid_data, expected_detail",
(
(
OPENAPI_JSON_PATH,
{},
[
{"loc": ["body", "title"], "message": "Field required"},
{"loc": ["body", "slug"], "message": "Field required"},
{"loc": ["body", "content"], "message": "Field required"},
{"loc": ["body", "published_at"], "message": "Field required"},
],
),
(
OPENAPI_YAML_PATH,
{"title": "Title"},
[
{"loc": ["body", "slug"], "message": "Field required"},
{"loc": ["body", "content"], "message": "Field required"},
{"loc": ["body", "published_at"], "message": "Field required"},
],
),
(
OPENAPI_JSON_PATH,
{"title": "Title", "slug": "slug"},
[
{"loc": ["body", "content"], "message": "Field required"},
{"loc": ["body", "published_at"], "message": "Field required"},
],
),
(
OPENAPI_YAML_PATH,
{"title": "Title", "slug": "slug", "content": "Content"},
[{"loc": ["body", "published_at"], "message": "Field required"}],
),
),
)
async def test_create_post_422(
aiohttp_client, schema_path, invalid_data, expected_detail
):
app = setup_openapi(
web.Application(),
schema_path,
operations,
server_url=URL("/dev-api"),
)
client = await aiohttp_client(app)
response = await client.post("/dev-api/create-post", json=invalid_data)
assert response.status == 422
assert (await response.json())["detail"] == expected_detail
@pytest.mark.parametrize(
"schema_path, schema_loader",
(
(OPENAPI_JSON_PATH, custom_json_loader),
(OPENAPI_YAML_PATH, custom_yaml_loader),
),
)
def test_custom_schema_loader(schema_path, schema_loader):
app = setup_openapi(
web.Application(),
schema_path,
operations,
server_url="/api/",
schema_loader=schema_loader,
)
assert isinstance(get_openapi_schema(app), dict)
@pytest.mark.parametrize("schema_path", (OPENAPI_JSON_PATH, OPENAPI_YAML_PATH))
async def test_email_format(aiohttp_client, schema_path):
app = setup_openapi(
web.Application(), schema_path, operations, server_url="/api/"
)
client = await aiohttp_client(app)
response = await client.get(
"/api/hello", params={"email": "<EMAIL>"}
)
assert response.status == 200
assert (await response.json())["email"] == "<EMAIL>"
@pytest.mark.parametrize("schema_path", (OPENAPI_JSON_PATH, OPENAPI_YAML_PATH))
async def test_invalid_parameter_format(aiohttp_client, schema_path):
app = setup_openapi(
web.Application(), schema_path, operations, server_url="/api/"
)
client = await aiohttp_client(app)
response = await client.get("/api/posts/not-an-integer")
assert response.status == 422
assert await response.json() == {
"detail": [
{
"loc": ["parameters", "post_id"],
"message": "'not-an-integer' is not a type of 'integer'",
}
]
}
@pytest.mark.parametrize("schema_path", (OPENAPI_JSON_PATH, OPENAPI_YAML_PATH))
async def test_invalid_parameter_value(aiohttp_client, schema_path):
app = setup_openapi(
web.Application(), schema_path, operations, server_url="/api/"
)
client = await aiohttp_client(app)
response = await client.get("/api/posts/0")
assert response.status == 422
assert await response.json() == {
"detail": [
{
"loc": ["parameters", "post_id"],
"message": "0 is less than the minimum of 1",
}
]
}
def test_get_openapi_schema_no_schema():
with pytest.raises(ConfigurationError):
get_openapi_schema(web.Application())
def test_get_openapi_spec_no_spec():
with pytest.raises(ConfigurationError):
get_openapi_spec(web.Application())
@pytest.mark.parametrize("schema_path", (OPENAPI_JSON_PATH, OPENAPI_YAML_PATH))
async def test_multiple_request_errors(aiohttp_client, schema_path):
app = setup_openapi(
web.Application(), schema_path, operations, server_url="/api/"
)
client = await aiohttp_client(app)
response = await client.get("/api/hello?name=&email=")
assert response.status == 422
assert await response.json() == {
"detail": [
{
"loc": ["parameters", "name"],
"message": "Empty parameter value",
},
{
"loc": ["parameters", "email"],
"message": "Empty parameter value",
},
]
}
@pytest.mark.parametrize(
"schema_path, query_string, expected_message",
(
(OPENAPI_JSON_PATH, None, "Hello, world!"),
(OPENAPI_JSON_PATH, "?name=Name", "Hello, Name!"),
(str(OPENAPI_JSON_PATH), None, "Hello, world!"),
(str(OPENAPI_JSON_PATH), "?name=Name", "Hello, Name!"),
(OPENAPI_YAML_PATH, None, "Hello, world!"),
(OPENAPI_YAML_PATH, "?name=Name", "Hello, Name!"),
(str(OPENAPI_YAML_PATH), None, "Hello, world!"),
(str(OPENAPI_YAML_PATH), "?name=Name", "Hello, Name!"),
),
)
async def test_openapi(
aiohttp_client, schema_path, query_string, expected_message
):
app = setup_openapi(
web.Application(), schema_path, operations, server_url="/api"
)
client = await aiohttp_client(app)
url = "/api/hello"
response = await client.get(
f"{url}{query_string}" if query_string is not None else url
)
assert response.status == 200
assert (await response.json())["message"] == expected_message
@pytest.mark.parametrize("is_enabled", (False, True))
async def test_openapi_validate_response(aiohttp_client, is_enabled):
app = web.Application()
setup_openapi(
app,
OPENAPI_YAML_PATH,
operations,
server_url="/api",
is_validate_response=is_enabled,
)
client = await aiohttp_client(app)
response = await client.get("/api/hello")
assert response.status == 200
assert await response.json() == {
"message": "Hello, world!",
"email": "<EMAIL>",
}
@pytest.mark.parametrize(
"has_openapi_schema_handler, url, expected_status",
(
(True, "/api/openapi.json", 200),
(False, "/api/openapi.yaml", 404),
(True, "/api/openapi.yaml", 200),
(False, "/api/openapi.yaml", 404),
(True, "/api/openapi.txt", 500),
(False, "/api/openapi.txt", 404),
),
)
async def test_openapi_schema_handler(
aiohttp_client, has_openapi_schema_handler, url, expected_status
):
app = web.Application()
setup_openapi(
app,
OPENAPI_YAML_PATH,
operations,
server_url=URL("/api"),
has_openapi_schema_handler=has_openapi_schema_handler,
)
client = await aiohttp_client(app)
response = await client.get(url)
assert response.status == expected_status
@pytest.mark.parametrize(
"schema_path, headers, expected",
(
(OPENAPI_JSON_PATH, {}, ""),
(OPENAPI_JSON_PATH, {"X-API-Key": "apiKey"}, "apiKey"),
(OPENAPI_YAML_PATH, {}, ""),
(OPENAPI_YAML_PATH, {"X-API-Key": "apiKey"}, "apiKey"),
),
)
async def test_optional_security_scheme(
aiohttp_client, schema_path, headers, expected
):
app = setup_openapi(
web.Application(), schema_path, operations, server_url="/api/"
)
client = await aiohttp_client(app)
response = await client.get("/api/empty", headers=headers)
assert response.status == 204
assert response.headers["X-API-Key"] == expected
@pytest.mark.parametrize("schema_path", (OPENAPI_JSON_PATH, OPENAPI_YAML_PATH))
async def test_request_body_nested_object(aiohttp_client, schema_path):
app = setup_openapi(
web.Application(), schema_path, operations, server_url="/api/"
)
client = await aiohttp_client(app)
response = await client.post("/api/nested-object", json=TEST_NESTED_OBJECT)
assert response.status == 200
assert response.headers["X-Data-Type"] == "<class 'pyrsistent._pmap.PMap'>"
assert (
response.headers["X-Data-Data-Data-Items-Type"]
== "<class 'pvectorc.PVector'>"
)
assert (
response.headers["X-Data-Data-Str-Items-Type"]
== "<class 'pvectorc.PVector'>"
)
assert response.headers["X-Data-UID-Type"] == "<class 'uuid.UUID'>"
assert await response.json() == TEST_NESTED_OBJECT
@pytest.mark.parametrize(
"schema_path, loader",
(
(OPENAPI_JSON_PATH, custom_json_loader),
(OPENAPI_YAML_PATH, custom_yaml_loader),
),
)
async def test_setup_openapi_schema_and_spec(
aiohttp_client, schema_path, loader
):
schema = loader(schema_path.read_bytes())
spec = create_spec(schema)
app = setup_openapi(
web.Application(),
operations,
schema=schema,
spec=spec,
server_url="/api/",
)
client = await aiohttp_client(app)
response = await client.get("/api/hello")
assert response.status == 200
assert await response.json() == {
"message": "Hello, world!",
"email": "<EMAIL>",
}
@pytest.mark.parametrize(
"schema_path, loader",
(
(OPENAPI_JSON_PATH, custom_json_loader),
(OPENAPI_YAML_PATH, custom_yaml_loader),
),
)
async def test_setup_openapi_schema_and_path_ignore_invalid_schema_path(
aiohttp_client, schema_path, loader
):
schema = loader(schema_path.read_bytes())
spec = create_spec(schema)
setup_openapi(
web.Application(),
INVALID_OPENAPI_JSON_PATH,
operations,
schema=schema,
spec=spec,
server_url="/api/",
)
@pytest.mark.parametrize("schema_path", (OPENAPI_JSON_PATH, OPENAPI_YAML_PATH))
def test_setup_openapi_invalid_operation(schema_path):
with pytest.raises(OperationError):
setup_openapi(
web.Application(),
schema_path,
invalid_operations,
server_url="/api",
)
def test_setup_openapi_invalid_path():
with pytest.raises(ConfigurationError):
setup_openapi(
web.Application(), ROOT_PATH / "does-not-exist.yaml", operations
)
def test_setup_openapi_invalid_file():
with pytest.raises(ConfigurationError):
setup_openapi(web.Application(), ROOT_PATH / "settings.py", operations)
@pytest.mark.parametrize(
"schema_path", (INVALID_OPENAPI_JSON_PATH, INVALID_OPENAPI_YAML_PATH)
)
def test_setup_openapi_invalid_spec(schema_path):
with pytest.raises(ConfigurationError):
setup_openapi(web.Application(), schema_path, operations)
@pytest.mark.parametrize(
"schema_path, level, url, expected_status",
(
(OPENAPI_JSON_PATH, "test", "/api/hello", 200),
(OPENAPI_JSON_PATH, "test", "/dev-api/hello", 404),
(OPENAPI_YAML_PATH, "test", "/api/hello", 200),
(OPENAPI_YAML_PATH, "test", "/dev-api/hello", 404),
(OPENAPI_JSON_PATH, "dev", "/api/hello", 404),
(OPENAPI_JSON_PATH, "dev", "/dev-api/hello", 200),
(OPENAPI_YAML_PATH, "dev", "/api/hello", 404),
(OPENAPI_YAML_PATH, "dev", "/dev-api/hello", 200),
),
)
async def test_setup_openapi_server_url_from_settings(
monkeypatch, aiohttp_client, schema_path, level, url, expected_status
):
monkeypatch.setenv("LEVEL", level)
app = setup_openapi(
setup_settings_from_environ(web.Application(), BaseSettings),
schema_path,
operations,
)
client = await aiohttp_client(app)
response = await client.get(url)
assert response.status == expected_status
@pytest.mark.parametrize("schema_path", (OPENAPI_JSON_PATH, OPENAPI_YAML_PATH))
def test_setup_openapi_server_url_invalid_level(monkeypatch, schema_path):
monkeypatch.setenv("LEVEL", "prod")
with pytest.raises(ConfigurationError):
setup_openapi(
setup_settings_from_environ(web.Application(), BaseSettings),
schema_path,
operations,
)
@pytest.mark.parametrize("schema_path", (OPENAPI_JSON_PATH, OPENAPI_YAML_PATH))
def test_setup_openapi_server_url_does_not_set(schema_path):
with pytest.raises(ConfigurationError):
setup_openapi(web.Application(), schema_path, operations)
@pytest.mark.parametrize("schema_path", (OPENAPI_JSON_PATH, OPENAPI_YAML_PATH))
async def test_upload_image(aiohttp_client, schema_path):
blank_png = (Path(__file__).parent / "data" / "blank.png").read_bytes()
app = setup_openapi(
web.Application(), schema_path, operations, server_url="/api"
)
client = await aiohttp_client(app)
response = await client.post(
"/api/upload-image",
data=blank_png,
headers={"Content-Type": "image/png"},
)
assert response.status == 201
assert await response.read() == blank_png
@pytest.mark.parametrize("schema_path", (OPENAPI_JSON_PATH, OPENAPI_YAML_PATH))
async def test_upload_text(aiohttp_client, schema_path):
text = "Hello, world! And other things..."
app = setup_openapi(
web.Application(), schema_path, operations, server_url="/api"
)
client = await aiohttp_client(app)
response = await client.post(
"/api/upload-text",
data=text.encode("utf-8"),
headers={"Content-Type": "text/plain"},
)
assert response.status == 201
assert await response.text() == text
@pytest.mark.parametrize("schema_path", (OPENAPI_JSON_PATH, OPENAPI_YAML_PATH))
async def test_validate_binary_response(aiohttp_client, schema_path):
app = setup_openapi(
web.Application(),
schema_path,
operations,
server_url="/api",
is_validate_response=True,
)
client = await aiohttp_client(app)
response = await client.get("/api/download.zip")
assert response.status == 200
assert response.content_type == "application/zip"
content = io.BytesIO(await response.read())
with zipfile.ZipFile(content) as handler:
with handler.open("hello.txt") as item:
assert item.read() == b"Hello, world!"
@pytest.mark.parametrize("schema_path", (OPENAPI_JSON_PATH, OPENAPI_YAML_PATH))
async def test_validate_empty_response(aiohttp_client, schema_path):
app = setup_openapi(
web.Application(),
schema_path,
operations,
server_url="/api",
is_validate_response=True,
)
client = await aiohttp_client(app)
response = await client.get("/api/empty")
assert response.status == 204
@pytest.mark.parametrize(
"schema_path, is_validate_response, expected_status",
(
(OPENAPI_JSON_PATH, False, 200),
(OPENAPI_JSON_PATH, True, 422),
(OPENAPI_YAML_PATH, False, 200),
(OPENAPI_JSON_PATH, True, 422),
),
)
async def test_validate_response(
aiohttp_client, schema_path, is_validate_response, expected_status
):
app = setup_openapi(
web.Application(),
schema_path,
operations,
server_url="/api",
is_validate_response=is_validate_response,
)
client = await aiohttp_client(app)
response = await client.get("/api/invalid-response")
assert response.status == expected_status
@pytest.mark.parametrize("schema_path", (OPENAPI_JSON_PATH, OPENAPI_YAML_PATH))
async def test_validate_response_error(aiohttp_client, schema_path):
app = setup_openapi(
web.Application(),
schema_path,
operations,
server_url="/api",
is_validate_response=True,
)
client = await aiohttp_client(app)
response = await client.get("/api/invalid-response")
assert response.status == 422
assert await response.json() == {
"detail": [
{"loc": ["response", "uid"], "message": "Field required"},
{"loc": ["response", "type"], "message": "Field required"},
{"loc": ["response", "data"], "message": "Field required"},
{"loc": ["response", "any_data"], "message": "Field required"},
]
}
| StarcoderdataPython |
1735761 | # -*- coding: utf-8 -*-
# MegEngine is Licensed under the Apache License, Version 2.0 (the "License")
#
# Copyright (c) 2014-2021 Megvii Inc. All rights reserved.
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT ARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
import os
import re
from typing import Optional
from .core._imperative_rt.common import CompNode, DeviceType
from .core._imperative_rt.common import (
get_cuda_compute_capability as _get_cuda_compute_capability,
)
from .core._imperative_rt.common import set_prealloc_config as _set_prealloc_config
from .core._imperative_rt.common import what_is_xpu as _what_is_xpu
from .core._imperative_rt.utils import _try_coalesce_all_free_memory
__all__ = [
"is_cuda_available",
"get_device_count",
"get_default_device",
"set_default_device",
"get_mem_status_bytes",
"get_cuda_compute_capability",
"get_allocated_memory",
"get_reserved_memory",
"get_max_reserved_memory",
"get_max_allocated_memory",
"reset_max_memory_stats",
"set_prealloc_config",
"coalesce_free_memory",
"DeviceType",
]
class _stream_helper:
def __init__(self):
self.stream = 1
def get_next(self):
out = self.stream
self.stream = self.stream + 1
return out
_sh = _stream_helper()
def _valid_device(inp):
if isinstance(inp, str) and re.match(
"^([cxg]pu|rocm|multithread)(\d+|\d+:\d+|x)$", inp
):
return True
return False
def _str2device_type(type_str: str, allow_unspec: bool = True):
type_str = type_str.upper()
if type_str == "CPU":
return DeviceType.CPU
elif type_str == "GPU" or type_str == "CUDA":
return DeviceType.CUDA
elif type_str == "CAMBRICON":
return DeviceType.CAMBRICON
elif type_str == "ATLAS":
return DeviceType.ATLAS
elif type_str == "ROCM" or type_str == "AMDGPU":
return DeviceType.ROCM
else:
assert (
allow_unspec and type_str == "XPU"
), "device type can only be cpu, gpu or xpu"
return DeviceType.UNSPEC
_device_type_set = {"cpu", "gpu", "xpu", "rocm"}
def get_device_count(device_type: str) -> int:
r"""Gets number of devices installed on this system.
Args:
device_type: device type, one of 'gpu' or 'cpu'
"""
assert device_type in _device_type_set, "device must be one of {}".format(
_device_type_set
)
device_type = _str2device_type(device_type)
return CompNode._get_device_count(device_type, False)
def is_cuda_available() -> bool:
r"""Returns whether cuda device is available on this system."""
t = _str2device_type("gpu")
return CompNode._get_device_count(t, False) > 0
def is_cambricon_available() -> bool:
r"""Returns whether cambricon device is available on this system."""
t = _str2device_type("cambricon")
return CompNode._get_device_count(t, False) > 0
def is_atlas_available() -> bool:
r"""Returns whether atlas device is available on this system."""
t = _str2device_type("atlas")
return CompNode._get_device_count(t, False) > 0
def is_rocm_available() -> bool:
r"""Returns whether rocm device is available on this system."""
t = _str2device_type("rocm")
return CompNode._get_device_count(t, False) > 0
def set_default_device(device: str = "xpux"):
r"""Sets default computing node.
Args:
device: default device type.
Note:
* The type can be 'cpu0', 'cpu1', etc., or 'gpu0', 'gpu1', etc.,
to specify the particular CPU or GPU to use.
* 'cpux' and 'gpux' can also be used to specify any number of CPU or GPU devices.
* The default value is 'xpux' to specify any device available.
* The priority of using GPU is higher when both GPU and CPU are available.
* 'multithread' device type is avaliable when inference,
which implements multi-threading parallelism at the operator level.
For example, 'multithread4' will compute with 4 threads.
* It can also be set by environment variable ``MGE_DEFAULT_DEVICE``.
"""
assert _valid_device(device), "Invalid device name {}".format(device)
CompNode._set_default_device(device)
def get_default_device() -> str:
r"""Gets default computing node.
It returns the value set by :func:`~.set_default_device`.
"""
return CompNode._get_default_device()
def get_mem_status_bytes(device: Optional[str] = None):
r"""Get total and free memory on the computing device in bytes."""
if device is None:
device = get_default_device()
tot, free = CompNode(device).get_mem_status_bytes
return tot, free
def get_cuda_compute_capability(device: int, device_type=DeviceType.CUDA) -> int:
r"""Gets compute capability of the specified device.
Args:
device: device number.
Returns:
a version number, or `SM version`.
"""
return _get_cuda_compute_capability(device, device_type)
def get_allocated_memory(device: Optional[str] = None):
r"""Returns the current memory occupied by tensors on the computing device in bytes.
Due to the asynchronous execution of MegEngine, please call megengine._full_sync
before calling this function in order to get accurate value.
"""
if device is None:
device = get_default_device()
return CompNode(device).get_used_memory
def get_reserved_memory(device: Optional[str] = None):
r"""Returns the current memory managed by the caching allocator on the computing device in bytes.
Due to the asynchronous execution of MegEngine, please call megengine._full_sync
before calling this function in order to get accurate value.
"""
if device is None:
device = get_default_device()
return CompNode(device).get_reserved_memory
def get_max_reserved_memory(device: Optional[str] = None):
r"""Returns the maximum memory managed by the caching allocator on the computing device in bytes.
Due to the asynchronous execution of MegEngine, please call megengine._full_sync
before calling this function in order to get accurate value.
"""
if device is None:
device = get_default_device()
return CompNode(device).get_max_reserved_memory
def get_max_allocated_memory(device: Optional[str] = None):
r"""Returns the maximum memory occupied by tensors on the computing device in bytes.
Due to the asynchronous execution of MegEngine, please call megengine._full_sync
before calling this function in order to get accurate value.
"""
if device is None:
device = get_default_device()
return CompNode(device).get_max_used_memory
def reset_max_memory_stats(device: Optional[str] = None):
r"""Resets the maximum stats on the computing device.
Due to the asynchronous execution of MegEngine, please call megengine._full_sync
before calling this function in order to properly reset memory stats.
"""
if device is None:
device = get_default_device()
CompNode.reset_max_memory_stats(device)
set_default_device(os.getenv("MGE_DEFAULT_DEVICE", "xpux"))
def set_prealloc_config(
alignment: int = 1,
min_req: int = 32 * 1024 * 1024,
max_overhead: int = 0,
growth_factor=2.0,
device_type=DeviceType.CUDA,
):
r"""Specifies how to pre-allocate from raw device allocator.
Args:
alignment: specifies the alignment in bytes.
min_req: min request size in bytes.
max_overhead: max overhead above required size in bytes.
growth_factor: request size / cur allocated`
device_type: the device type
alignment: int:
min_req: int:
max_overhead: int:
"""
assert alignment > 0
assert min_req > 0
assert max_overhead >= 0
assert growth_factor >= 1
_set_prealloc_config(alignment, min_req, max_overhead, growth_factor, device_type)
def what_is_xpu():
return _what_is_xpu().name.lower()
def coalesce_free_memory():
r"""This function will try it best to free all consecutive free chunks back to operating system,
small pieces may not be returned.
because of the async processing of megengine, the effect of this func may not be reflected
immediately. if you want to see the effect immediately, you can call megengine._full_sync after
this func was called
.. note::
* This function will not move any memory in-use;
* This function may do nothing if there are no chunks that can be freed.
"""
return _try_coalesce_all_free_memory()
| StarcoderdataPython |
3239094 | from operator import itemgetter
from keras.models import load_model
from config import siamese_config
from input_handler import create_test_data
from input_handler import word_embed_meta_data
import pandas as pd
path = "data\\combinations\\"
true_data = pd.read_csv(path+"governors_true_match.csv",sep=";")
false_data = pd.read_csv(path+"governors_false_match.csv",sep=";")
combined_data = pd.concat([true_data,false_data])
combined_data = combined_data.sample(frac=1,random_state=20210721)
print(f"Combined dataset shape: {combined_data.shape}")
original_names = list(combined_data.governor)
alternative_names = list(combined_data.combinations)
s_similar = list(combined_data.match)
embedding_dim = siamese_config['EMBEDDING_DIM']
tokenizer, embedding_matrix = word_embed_meta_data(original_names + alternative_names, embedding_dim)
best_model_path = "checkpoints\\1627218633\\lstm_50_50_0.17_0.25.h5"
model = load_model(best_model_path)
class Configuration(object):
"""Dump stuff here"""
CONFIG = Configuration()
CONFIG.embedding_dim = siamese_config['EMBEDDING_DIM']
CONFIG.max_sequence_length = siamese_config['MAX_SEQUENCE_LENGTH']
CONFIG.number_lstm_units = siamese_config['NUMBER_LSTM']
CONFIG.rate_drop_lstm = siamese_config['RATE_DROP_LSTM']
CONFIG.number_dense_units = siamese_config['NUMBER_DENSE_UNITS']
CONFIG.activation_function = siamese_config['ACTIVATION_FUNCTION']
CONFIG.rate_drop_dense = siamese_config['RATE_DROP_DENSE']
CONFIG.validation_split_ratio = siamese_config['VALIDATION_SPLIT']
test_name_pairs = [('<NAME>','<NAME>'),
('<NAME>','<NAME>'),
('<NAME>','<NAME>'),
("<NAME>","<NAME>")]
test_data_x1, test_data_x2, leaks_test = create_test_data(tokenizer,test_name_pairs, siamese_config['MAX_SEQUENCE_LENGTH'])
preds = list(model.predict([test_data_x1, test_data_x2, leaks_test], verbose=1).ravel())
results = [(x, y, z) for (x, y), z in zip(test_name_pairs, preds)]
results.sort(key=itemgetter(2), reverse=True)
print(results) | StarcoderdataPython |
3265264 | from time import sleep
from EDlogger import logger
import json
from pyautogui import typewrite, keyUp, keyDown
from MousePt import MousePoint
from pathlib import Path
"""
File: EDWayPoint.py
Description:
Class will load file called waypoints.json which contains a list of System name to jump to.
Provides methods to select a waypoint pass into it.
Author: <EMAIL>
"""
class EDWayPoint:
def __init__(self, is_odyssey=True):
self.is_odyssey = is_odyssey
self.filename = './waypoints.json'
self.waypoints = {}
# { "Ninabin": {"DockWithTarget": false, "TradeSeq": None, "Completed": false} }
# for i, key in enumerate(self.waypoints):
# self.waypoints[target]['DockWithTarget'] == True ... then go into SC Assist
# self.waypoints[target]['Completed'] == True
# if docked and self.waypoints[target]['Completed'] == False
# execute_seq(self.waypoints[target]['TradeSeq'])
ss = self.read_waypoints()
# if we read it then point to it, otherwise use the default table above
if ss is not None:
self.waypoints = ss
logger.debug("EDWayPoint: read json:"+str(ss))
self.num_waypoints = len(self.waypoints)
#print("waypoints: "+str(self.waypoints))
self.step = 0
self.mouse = MousePoint()
def load_waypoint_file(self, filename=None):
if filename == None:
return
ss = self.read_waypoints(filename)
if ss is not None:
self.waypoints = ss
self.filename = filename
logger.debug("EDWayPoint: read json:"+str(ss))
def read_waypoints(self, fileName='./waypoints/waypoints.json'):
s = None
try:
with open(fileName,"r") as fp:
s = json.load(fp)
except Exception as e:
logger.warning("EDWayPoint.py read_config error :"+str(e))
return s
def write_waypoints(self, data, fileName='./waypoints/waypoints.json'):
if data is None:
data = self.waypoints
try:
with open(fileName,"w") as fp:
json.dump(data,fp, indent=4)
except Exception as e:
logger.warning("EDWayPoint.py write_config error:"+str(e))
def mark_waypoint_complete(self, key):
self.waypoints[key]['Completed'] = True
self.write_waypoints(data=None, fileName='./waypoints/' + Path(self.filename).name)
def waypoint_next(self, ap, target_select_cb=None) -> str:
dest_key = "REPEAT"
# loop back to beginning if last record is "REPEAT"
while dest_key == "REPEAT":
for i, key in enumerate(self.waypoints):
# skip records we already processed
if i < self.step:
continue
# if this step is marked to skip.. i.e. completed, go to next step
if self.waypoints[key]['Completed'] == True:
continue
# if this entry is REPEAT, loop through all and mark them all as Completed = False
if key == "REPEAT":
self.mark_all_waypoints_not_complete()
else:
# Call sequence to select route
if self.set_waypoint_target(ap, key, target_select_cb) == False:
# Error setting target
logger.warning("Error setting waypoint, breaking")
self.step = i
dest_key = key
break
else:
dest_key = "" # End of list, return empty string
print("test: " + dest_key)
return dest_key
def mark_all_waypoints_not_complete(self):
for j, tkey in enumerate(self.waypoints):
self.waypoints[tkey]['Completed'] = False
self.step = 0
self.write_waypoints(data=None, fileName='./waypoints/' + Path(self.filename).name)
def is_station_targeted(self, dest) -> bool:
return self.waypoints[dest]['DockWithStation']
def set_station_target(self, ap, dest):
(x, y) = self.waypoints[dest]['StationCoord']
# check if StationBookmark exists to get the transition compatibility with old waypoint lists
if "StationBookmark" in self.waypoints[dest]:
bookmark = self.waypoints[dest]['StationBookmark']
else:
bookmark = -1
ap.keys.send('SystemMapOpen')
sleep(3.5)
if self.is_odyssey and bookmark != -1:
ap.keys.send('UI_Left')
sleep(1)
ap.keys.send('UI_Select')
sleep(.5)
ap.keys.send('UI_Down', repeat=2)
sleep(.5)
ap.keys.send('UI_Right')
sleep(.5)
ap.keys.send('UI_Down', repeat=bookmark)
sleep(.5)
ap.keys.send('UI_Select', hold=4.0)
else:
self.mouse.do_click(x, y)
# for horizons we need to select it
if self.is_odyssey == False:
ap.keys.send('UI_Select')
ap.keys.send('SystemMapOpen')
sleep(0.5)
# Call either the Odyssey or Horizons version of the Galatic Map sequence
def set_waypoint_target(self, ap, target_name, target_select_cb=None) -> bool:
# No waypoints defined, then return False
if self.waypoints == None:
return False
if self.is_odyssey != True:
return self.set_waypoint_target_horizons(ap, target_name, target_select_cb)
else:
return self.set_waypoint_target_odyssey(ap, target_name, target_select_cb)
#
# This sequence for the Horizons
#
def set_waypoint_target_horizons(self, ap, target_name, target_select_cb=None) -> bool:
ap.keys.send('GalaxyMapOpen')
sleep(2)
ap.keys.send('CycleNextPanel')
sleep(1)
ap.keys.send('UI_Select')
sleep(2)
typewrite(target_name, interval=0.25)
sleep(1)
# send enter key
ap.keys.send_key('Down', 28)
sleep(0.05)
ap.keys.send_key('Up', 28)
sleep(7)
ap.keys.send('UI_Right')
sleep(1)
ap.keys.send('UI_Select')
# if got passed through the ship() object, lets call it to see if a target has been
# selected yet.. otherwise we wait. If long route, it may take a few seconds
if target_select_cb != None:
while not target_select_cb()['target']:
sleep(1)
ap.keys.send('GalaxyMapOpen')
sleep(2)
return True
#
# This sequence for the Odyssey
def set_waypoint_target_odyssey(self, ap, target_name, target_select_cb=None) -> bool:
ap.keys.send('GalaxyMapOpen')
sleep(2)
ap.keys.send('UI_Up')
sleep(.5)
ap.keys.send('UI_Select')
sleep(.5)
#print("Target:"+target_name)
# type in the System name
typewrite(target_name, interval=0.25)
sleep(1)
# send enter key
ap.keys.send_key('Down', 28)
sleep(0.05)
ap.keys.send_key('Up', 28)
sleep(1)
ap.keys.send('UI_Right', repeat=4)
sleep(0.1)
# go down 6x's to plot to target
for i in range(6):
ap.keys.send('UI_Down')
sleep(0.05)
sleep(0.1)
# select Plot course
ap.keys.send('UI_Select')
# if got passed through the ship() object, lets call it to see if a target has been
# selected yet.. otherwise we wait. If long route, it may take a few seconds
if target_select_cb != None:
while not target_select_cb()['target']:
sleep(1)
sleep(1)
ap.keys.send('GalaxyMapOpen')
sleep(1)
return True
def execute_trade(self, ap, dest):
sell_down = self.waypoints[dest]['SellNumDown']
buy_down = self.waypoints[dest]['BuyNumDown']
if sell_down == -1 and buy_down == -1:
return
# We start off on the Main Menu in the Station
ap.keys.send('UI_Up', repeat=3) # make sure at the top
ap.keys.send('UI_Down')
ap.keys.send('UI_Select') # Select StarPort Services
sleep(8) # wait for new menu to finish rendering
ap.keys.send('UI_Down')
ap.keys.send('UI_Select') # Select Commodities
sleep(2.5)
# --------- SELL ----------
if sell_down != -1:
ap.keys.send('UI_Down')
ap.keys.send('UI_Select') # Select Sell
sleep(1.5) # give time to bring up, if needed
ap.keys.send('UI_Right') # Go to top of commodities list
ap.keys.send('UI_Up', repeat=10) # go up 10x in case were not on top of list
ap.keys.send('UI_Down', repeat=sell_down) # go down # of times user specified
ap.keys.send('UI_Select') # Select that commodity
sleep(3) # give time for popup
ap.keys.send('UI_Up', repeat=3) # make sure at top
ap.keys.send('UI_Down') # Down to the Sell button (already assume sell all)
ap.keys.send('UI_Select') # Select to Sell all
# TODO: Note, if the waypoint plan has sell_down != -1, then we are assuming we have
# cargo to sell, if not we are in limbo here as the Sell button not selectable
# Could look at the ship_status['MarketSel'] == True (to be added), to see that we sold
# and if not, go down 1 and select cancel
# --------- BUY ----------
if buy_down != -1:
sleep(3) # give time to popdown
ap.keys.send('UI_Left') # back to left menu
sleep(0.5)
ap.keys.send('UI_Up', repeat=2) # go up to Buy
ap.keys.send('UI_Select') # Select Buy
sleep(1.5) # give time to bring up list
ap.keys.send('UI_Right') # Go to top of commodities list
ap.keys.send('UI_Up', repeat=sell_down+5) # go up sell_down times in case were not on top of list (+5 for pad)
ap.keys.send('UI_Down', repeat=buy_down) # go down # of times user specified
ap.keys.send('UI_Select') # Select that commodity
sleep(2) # give time to popup
ap.keys.send('UI_Up', repeat=3) # go up to quantity to buy (may not default to this)
ap.keys.send('UI_Right', hold=4.0) # Hold down Right key to buy will fill cargo
ap.keys.send('UI_Down')
ap.keys.send('UI_Select') # Select Buy
sleep(1.5) # give time to popdown
ap.keys.send('UI_Left') # back to left menu
ap.keys.send('UI_Down', repeat=8) # go down 4x to highlight Exit
ap.keys.send('UI_Select') # Select Exit, back to StartPort Menu
sleep(1) # give time to get back to menu
if self.is_odyssey == True:
ap.keys.send('UI_Down', repeat=4) # go down 4x to highlight Exit
ap.keys.send('UI_Select') # Select Exit, back to top menu
sleep(2) # give time to popdown menu
# this import the temp class needed for unit testing
"""
from EDKeys import *
class temp:
def __init__(self):
self.keys = EDKeys()
"""
def main():
#keys = temp()
wp = EDWayPoint(True) # False = Horizons
wp.step = 0 #start at first waypoint
sleep(3)
#dest = 'Enayex'
#print(dest)
#print("In waypoint_assist, at:"+str(dest))
# already in doc config, test the trade
#wp.execute_trade(keys, dest)
# Set the Route for the waypoint^#
dest = wp.waypoint_next(ap=None)
while dest != "":
# print("Doing: "+str(dest))
# print(wp.waypoints[dest])
# print("Dock w/station: "+ str(wp.is_station_targeted(dest)))
#wp.set_station_target(None, dest)
# Mark this waypoint as complated
#wp.mark_waypoint_complete(dest)
# set target to next waypoint and loop)::@
dest = wp.waypoint_next(ap=None)
if __name__ == "__main__":
main()
| StarcoderdataPython |
3283499 | from typing import Callable, Sequence, Union, Tuple, List, Optional
import os
import time
import numpy as np
import matplotlib
import matplotlib.pyplot as plt
from qcodes.dataset.measurements import Measurement
from qcodes.instrument.base import _BaseParameter
from qcodes.dataset.plotting import plot_by_id
from qcodes import config
AxesTuple = Tuple[matplotlib.axes.Axes, matplotlib.colorbar.Colorbar]
AxesTupleList = Tuple[List[matplotlib.axes.Axes],
List[Optional[matplotlib.colorbar.Colorbar]]]
AxesTupleListWithRunId = Tuple[int, List[matplotlib.axes.Axes],
List[Optional[matplotlib.colorbar.Colorbar]]]
number = Union[float, int]
def do0d(*param_meas: Union[_BaseParameter, Callable[[], None]],
do_plot: bool = True) -> AxesTupleListWithRunId:
"""
Perform a measurement of a single parameter. This is probably most
useful for an ArrayParamter that already returns an array of data points
Args:
*param_meas: Parameter(s) to measure at each step or functions that
will be called at each step. The function should take no arguments.
The parameters and functions are called in the order they are
supplied.
do_plot: should png and pdf versions of the images be saved after the
run.
Returns:
The run_id of the DataSet created
"""
meas = Measurement()
output = []
for parameter in param_meas:
meas.register_parameter(parameter)
output.append([parameter, None])
with meas.run() as datasaver:
for i, parameter in enumerate(param_meas):
if isinstance(parameter, _BaseParameter):
output[i][1] = parameter.get()
elif callable(parameter):
parameter()
datasaver.add_result(*output)
dataid = datasaver.run_id
if do_plot is True:
ax, cbs = _save_image(datasaver)
else:
ax = None,
cbs = None
return dataid, ax, cbs
def do1d(param_set: _BaseParameter, start: number, stop: number,
num_points: int, delay: number,
*param_meas: Union[_BaseParameter, Callable[[], None]],
enter_actions: Sequence[Callable[[], None]] = (),
exit_actions: Sequence[Callable[[], None]] = (),
do_plot: bool = True) \
-> AxesTupleListWithRunId:
"""
Perform a 1D scan of ``param_set`` from ``start`` to ``stop`` in
``num_points`` measuring param_meas at each step. In case param_meas is
an ArrayParameter this is effectively a 2d scan.
Args:
param_set: The QCoDeS parameter to sweep over
start: Starting point of sweep
stop: End point of sweep
num_points: Number of points in sweep
delay: Delay after setting paramter before measurement is performed
*param_meas: Parameter(s) to measure at each step or functions that
will be called at each step. The function should take no arguments.
The parameters and functions are called in the order they are
supplied.
enter_actions: A list of functions taking no arguments that will be
called before the measurements start
exit_actions: A list of functions taking no arguments that will be
called after the measurements ends
do_plot: should png and pdf versions of the images be saved after the
run.
Returns:
The run_id of the DataSet created
"""
meas = Measurement()
meas.register_parameter(
param_set) # register the first independent parameter
output = []
param_set.post_delay = delay
interrupted = False
for action in enter_actions:
# this omits the posibility of passing
# argument to enter and exit actions.
# Do we want that?
meas.add_before_run(action, ())
for action in exit_actions:
meas.add_after_run(action, ())
# do1D enforces a simple relationship between measured parameters
# and set parameters. For anything more complicated this should be
# reimplemented from scratch
for parameter in param_meas:
if isinstance(parameter, _BaseParameter):
meas.register_parameter(parameter, setpoints=(param_set,))
output.append([parameter, None])
try:
with meas.run() as datasaver:
for set_point in np.linspace(start, stop, num_points):
param_set.set(set_point)
output = []
for parameter in param_meas:
if isinstance(parameter, _BaseParameter):
output.append((parameter, parameter.get()))
elif callable(parameter):
parameter()
datasaver.add_result((param_set, set_point),
*output)
except KeyboardInterrupt:
interrupted = True
dataid = datasaver.run_id # convenient to have for plotting
if do_plot is True:
ax, cbs = _save_image(datasaver)
else:
ax = None,
cbs = None
if interrupted:
raise KeyboardInterrupt
return dataid, ax, cbs
def do2d(param_set1: _BaseParameter, start1: number, stop1: number,
num_points1: int, delay1: number,
param_set2: _BaseParameter, start2: number, stop2: number,
num_points2: int, delay2: number,
*param_meas: Union[_BaseParameter, Callable[[], None]],
set_before_sweep: Optional[bool] = False,
enter_actions: Sequence[Callable[[], None]] = (),
exit_actions: Sequence[Callable[[], None]] = (),
before_inner_actions: Sequence[Callable[[], None]] = (),
after_inner_actions: Sequence[Callable[[], None]] = (),
do_plot: bool=True) -> AxesTupleListWithRunId:
"""
Perform a 1D scan of ``param_set1`` from ``start1`` to ``stop1`` in
``num_points1`` and ``param_set2`` from ``start2`` to ``stop2`` in
``num_points2`` measuring param_meas at each step.
Args:
param_set1: The QCoDeS parameter to sweep over in the outer loop
start1: Starting point of sweep in outer loop
stop1: End point of sweep in the outer loop
num_points1: Number of points to measure in the outer loop
delay1: Delay after setting parameter in the outer loop
param_set2: The QCoDeS parameter to sweep over in the inner loop
start2: Starting point of sweep in inner loop
stop2: End point of sweep in the inner loop
num_points2: Number of points to measure in the inner loop
delay2: Delay after setting paramter before measurement is performed
*param_meas: Parameter(s) to measure at each step or functions that
will be called at each step. The function should take no arguments.
The parameters and functions are called in the order they are
supplied.
set_before_sweep: if True the outer parameter is set to its first value
before the inner parameter is swept to its next value.
enter_actions: A list of functions taking no arguments that will be
called before the measurements start
exit_actions: A list of functions taking no arguments that will be
called after the measurements ends
before_inner_actions: Actions executed before each run of the inner loop
after_inner_actions: Actions executed after each run of the inner loop
do_plot: should png and pdf versions of the images be saved after the
run.
Returns:
The run_id of the DataSet created
"""
meas = Measurement()
meas.register_parameter(param_set1)
param_set1.post_delay = delay1
meas.register_parameter(param_set2)
param_set2.post_delay = delay2
interrupted = False
for action in enter_actions:
# this omits the possibility of passing
# argument to enter and exit actions.
# Do we want that?
meas.add_before_run(action, ())
for action in exit_actions:
meas.add_after_run(action, ())
for parameter in param_meas:
if isinstance(parameter, _BaseParameter):
meas.register_parameter(parameter,
setpoints=(param_set1, param_set2))
try:
with meas.run() as datasaver:
for set_point1 in np.linspace(start1, stop1, num_points1):
if set_before_sweep:
param_set2.set(start2)
param_set1.set(set_point1)
for action in before_inner_actions:
action()
for set_point2 in np.linspace(start2, stop2, num_points2):
# skip first inner set point if `set_before_sweep`
if set_point2 == start2 and set_before_sweep:
pass
else:
param_set2.set(set_point2)
output = []
for parameter in param_meas:
if isinstance(parameter, _BaseParameter):
output.append((parameter, parameter.get()))
elif callable(parameter):
parameter()
datasaver.add_result((param_set1, set_point1),
(param_set2, set_point2),
*output)
for action in after_inner_actions:
action()
except KeyboardInterrupt:
interrupted = True
dataid = datasaver.run_id
if do_plot is True:
ax, cbs = _save_image(datasaver)
else:
ax = None,
cbs = None
if interrupted:
raise KeyboardInterrupt
return dataid, ax, cbs
def _save_image(datasaver) -> AxesTupleList:
"""
Save the plots created by datasaver as pdf and png
Args:
datasaver: a measurement datasaver that contains a dataset to be saved
as plot.
"""
plt.ioff()
dataid = datasaver.run_id
start = time.time()
axes, cbs = plot_by_id(dataid)
stop = time.time()
print(f"plot by id took {stop-start}")
mainfolder = config.user.mainfolder
experiment_name = datasaver._dataset.exp_name
sample_name = datasaver._dataset.sample_name
storage_dir = os.path.join(mainfolder, experiment_name, sample_name)
os.makedirs(storage_dir, exist_ok=True)
png_dir = os.path.join(storage_dir, 'png')
pdf_dif = os.path.join(storage_dir, 'pdf')
os.makedirs(png_dir, exist_ok=True)
os.makedirs(pdf_dif, exist_ok=True)
save_pdf = True
save_png = True
for i, ax in enumerate(axes):
if save_pdf:
full_path = os.path.join(pdf_dif, f'{dataid}_{i}.pdf')
ax.figure.savefig(full_path, dpi=500)
if save_png:
full_path = os.path.join(png_dir, f'{dataid}_{i}.png')
ax.figure.savefig(full_path, dpi=500)
plt.ion()
return axes, cbs
| StarcoderdataPython |
3277157 | <gh_stars>0
"""Holzworth HA7062D phase noise analyzer"""
import re
import sys
import asyncio
import itertools
from dataclasses import dataclass
from unyt import unyt_array
from ha7000d.common import HA7000DBase, Subsystem
PREFIXES = "YZEPTGMkh_dcmµnpfazy"
FACTORS = {
"Y": 10 ** 24,
"Z": 10 ** 21,
"E": 10 ** 18,
"P": 10 ** 15,
"T": 10 ** 12,
"G": 10 ** 9,
"M": 10 ** 6,
"k": 10 ** 3,
"h": 10 ** 2,
"da": 10 ** 1,
"d": 10 ** -1,
"c": 10 ** -2,
"m": 10 ** -3,
"µ": 10 ** -6,
"n": 10 ** -9,
"p": 10 ** -12,
"f": 10 ** -15,
"a": 10 ** -18,
"z": 10 ** -21,
"y": 10 ** -24,
}
@dataclass
class State:
"""Signal for controlling asyncio tasks"""
running: bool = False
class AcquisitionError(Exception):
"""Raise when there's an acquisition error"""
class HA7062D(HA7000DBase):
"""HA7062D Phase Noise Analyzer
Parameters
----------
sock : `~socket.socket`
"""
def __init__(self, sock):
super().__init__(sock)
self.setup = MeasurementSetup(self)
self._state = State()
@property
def model(self):
"""(str): the model number"""
return self._idn.model
@property
def serial_number(self):
"""(str): the serial number"""
return self._idn.serial_number
@property
def firmware_version(self):
"""(str): the firmware version"""
return self._idn.firmware_version
def __repr__(self):
address, port = self._sock.getpeername()
return f"<Holzworth {self.model} at {address}:{port}>"
async def _show_spinner(self):
"""Show an in-progress spinner during phase noise measurement"""
glyph = itertools.cycle(["-", "\\", "|", "/"])
try:
while self._state.running:
sys.stdout.write(next(glyph))
sys.stdout.flush()
sys.stdout.write("\b")
await asyncio.sleep(0.5)
return 0
except asyncio.CancelledError:
pass
finally:
sys.stdout.write("\b \b")
async def _acquire(self):
self._write(":INIT:PN:IMM")
resp = self._read()
try:
while True:
resp = self._query(":SENS:PN:CORE:STATUS?")
if resp == "Data not ready":
# an error occurred
self._state.running = False
return 1
if resp.endswith("initialized"):
break
await asyncio.sleep(1)
while True:
resp = self._query(":STAT:OPER:COND?")
if resp == "Instrument Busy":
await asyncio.sleep(1)
else:
self._state.running = False
return 0
except asyncio.CancelledError:
pass
async def _start_task(self, timeout):
self._state.running = True
task = asyncio.gather(self._show_spinner(), self._acquire())
try:
ret_value = await asyncio.wait_for(task, timeout)
except asyncio.TimeoutError:
raise TimeoutError("Phase noise measurement timed out") from None
else:
return ret_value
def read(self, timeout=None, previous=True):
"""Read measurement data and return tuple (X, Y)
Parameters
----------
timeout : int
timeout in seconds or None
previous : bool
read existing trace data if True, else start a new acquisition
"""
if not previous:
ret_value = asyncio.run(self._start_task(timeout))
if ret_value is None:
return None
if ret_value[1] > 0:
err = self._query(":SENS:PN:CORE:ERROR?")
raise AcquisitionError(err)
# n_points = int(self._query(":SENS:PN:SWE:POIN?"))
resp = self._query(":CALC:PN:DATA:XDAT?")
x = list(map(float, resp.split(",")))
x = unyt_array(x, "Hz", name=r"$f_{\rm offset}$")
resp = self._query(":CALC:PN:DATA:FDAT?")
y = list(map(float, resp.split(",")))
y = unyt_array(y, "dBc/Hz", name=r"$\mathcal{L}$")
return (x, y)
def measure_input(self, channel):
"""Measure CHx input frequency and power level
Parameters
----------
channel : str {CH1, CH2}
Returns
-------
(frequency, power) : two-tuple of frequency (Hz) and power (dBm)
"""
resp = self._query(f":CALC:FP:DATA:CARR:{channel}?")
return tuple(map(lambda s: s.strip(), resp.split(",")))
def measure_lo(self, lo):
"""Measure LOx frequency and power level
Parameters
----------
lo : str {LO1, LO2}
Returns
-------
(frequency, power) : two-tuple of frequency (Hz) and power (dBm)
"""
resp = self._query(f":CALC:FP:DATA:{lo}?")
return tuple(map(lambda s: s.strip(), resp.split(",")))
class MeasurementSetup(Subsystem, kind="MeasurementSetup"):
"""Measurement subsystem
Parameters
----------
instr : HA7000D
"""
def _check_resp(self, expected_response):
if (resp := self._read()) != expected_response:
raise Exception(f"Unexpected response '{resp}'")
@property
def measurement_type(self):
"""value : str, {absolute, additive, AM noise, baseband}"""
return self._query(":SENS:PN:MEAS:TYPE?")
@measurement_type.setter
def measurement_type(self, value):
value = value.upper()
self._set_parameter(f":SENS:PN:MEAS:TYPE:{value}")
set_value = self.measurement_type.upper()
if set_value != value:
raise ValueError(f"Invalid measurement type '{value}'")
@property
def correlations(self):
"""value : int, number of cross-correlations to perform"""
return int(self._query(":SENS:PN:CORR:COUN?"))
@correlations.setter
def correlations(self, value):
self._set_parameter(f":SENS:PN:CORR:COUN:{value}")
if self.correlations != value:
raise ValueError(f"Invalid number of cross-correlations '{value}'")
@staticmethod
def _parse_quantity(value):
pattern = r"([0-9]+\.?[0-9]*) (da|[{}])?([a-zA-Z]+)".format(PREFIXES)
if (m := re.match(pattern, str(value))) is None:
return value
n, p, u = m.groups()
n = float(n)
p = FACTORS[p]
return ({p * n}, u)
@property
def offset_span(self):
"""value : 2-tuple e.g. ('10 Hz', '10 MHz')"""
start = self._query(":SENS:PN:FREQ:STAR?")
stop = self._query(":SENS:PN:FREQ:STOP?")
return (float(start), float(stop))
@offset_span.setter
def offset_span(self, value):
start, stop = value
start = start.replace(" ", "")
stop = stop.replace(" ", "")
self._set_parameter(f":SENS:PN:FREQ:STAR:{start}")
self._set_parameter(f":SENS:PN:FREQ:STOP:{stop}")
@property
def data_type(self):
"""value : str {Channel 1, Channel 2, Cross}"""
return self._query(":SENS:PN:DATA:TYPE?")
@data_type.setter
def data_type(self, value):
self._set_parameter(f":SENS:PN:DATA:TYPE:{value}")
@property
def trigger_type(self):
"""value : str {Single, Each, Continuous, Persist}"""
return self._query(":SENS:PN:MODE?")
@trigger_type.setter
def trigger_type(self, value):
self._set_parameter(f":SENS:PN:MODE:{value}")
@property
def samples(self):
"""value : int {64, 128, 256, 512, 1024}"""
return int(self._query(":SENS:PN:SAMPLES:COUN?"))
@samples.setter
def samples(self, value):
self._set_parameter(f":SENS:PN:SAMPLES:{value}")
@property
def mixer_conversion(self):
"""value : str {Automatic, Manual}"""
return self._query(":SENS:PN:MCONV?")
@mixer_conversion.setter
def mixer_conversion(self, value):
self._set_parameter(f":SENS:PN:MCONV:{value}")
@property
def pll_bandwidth(self):
"""value : str {Wide, Normal}"""
LU = {"VCO Measurement Enabled": "Wide", "VCO Measurement Disabled": "Normal"}
resp = self._query(":SENS:PN:VCO?")
return LU.get(resp, resp)
@pll_bandwidth.setter
def pll_bandwidth(self, value):
LU = {"Wide": True, "Normal": False}
value = LU[value]
self._set_parameter(f":SENS:PN:VCO:{value}")
@property
def if_gain(self):
"""value : str or int {Auto, 0, 14, 28, 42} dB"""
return self._query(":SENS:PN:GAIN?")
@if_gain.setter
def if_gain(self, value):
self._set_parameter(f":SENS:PN:GAIN:{value}")
@property
def dut_frequency_range(self):
"""value : str or int {Auto, 1, 2, 4, 8} divisor
1: 10 MHz to 6 GHz
2: 6.1 GHz to 12 GHz
4: 12.1 GHz to 24 GHz
8: 24.1 GHz to 26.5 GHz
"""
LU = {
"Divisor set to auto detect": "Auto",
"1": "10 MHz to 6 GHz",
"2": "6.1 GHz to 12 GHz",
"4": "12.1 GHz to 24 GHz",
"8": "24.1 GHz to 26.5 GHz",
}
resp = self._query(":SENS:PN:DIV?")
return LU.get(resp, resp)
@dut_frequency_range.setter
def dut_frequency_range(self, value):
self._set_parameter(f":SENS:PN:DIV:{value}")
@property
def dut_splitter(self):
"""value : str {Internal, External}"""
return self._query(":SENS:CORR:POW:STAT?")
@dut_splitter.setter
def dut_splitter(self, value):
self._set_parameter(f":SENS:CORR:POW:STAT:{value}")
@property
def dut_mixer(self):
"""value : str {Internal, External}"""
LU = {
"External mixer Not in Use": "Internal",
"External mixer in Use": "External",
}
resp = self._query(":SENS:PN:MEXT?")
return LU.get(resp, resp)
@dut_mixer.setter
def dut_mixer(self, value):
LU = {"Internal": "Off", "External": "On"}
value = LU[value]
self._set_parameter(f":SENS:PN:MEXT:{value}")
@property
def phase_shifters(self):
"""value : bool {True, False} HX5100 phase shifters in use"""
LU = {"HX5100 Not in Use": False, "HX5100 in Use": True}
resp = self._query(":SENS:PN:HX5100?")
return LU.get(resp, resp)
@phase_shifters.setter
def phase_shifters(self, value):
value = int(value)
self._set_parameter(f":SENS:PN:HX5100:{value}")
@property
def lo_configuration(self):
"""value : str {Internal, External}"""
LU = {"Internal LO status": "Internal", "External LO status": "External"}
resp = self._query(":SENS:PN:LO:STATUS?")
return LU.get(resp, resp)
@property
def smoothing_points(self):
"""value : int, 0 or 3 to 99 points of smoothing"""
resp = self._query(":CALC:PN:TRAC:SMO:STAT?")
if resp == "OFF":
return 0
return int(self._query(":CALC:PN:TRAC:SMO:PNTS?"))
@smoothing_points.setter
def smoothing_points(self, value):
if value > 0:
self._set_parameter(f":CALC:PN:TRAC:SMO:PNTS:{value}")
self._set_parameter(":CALC:PN:TRAC:SMO:STAT:ON")
else:
self._set_parameter(":CALC:PN:TRAC:SMO:STAT:OFF")
@property
def spur_removal(self):
"""value : str or int, {OFF, 0 to 99} dB"""
resp = self._query(":CALC:PN:TRAC:SPUR:OMIS?")
if resp == "OFF":
return resp
return int(self._query(":CALC:PN:TRAC:SPUR:THR?"))
@spur_removal.setter
def spur_removal(self, value):
if value == "OFF":
self._set_parameter(":CALC:PN:TRAC:SPUR:OMIS:OFF")
else:
self._set_parameter(f":CALC:PN:TRAC:SPUR:THR:{value}")
self._set_parameter(":CALC:PN:TRAC:SPUR:OMIS:ON")
| StarcoderdataPython |
178405 | <reponame>welykPereira/pythonExerciciosFaculdade
def soma(x1, y1):
res = x1 + y1
print('O resultado da soma e {}'.format(res))
x = int(input('Digite um valor!'))
y = int(input('Digite um outro valor!'))
soma(x, y)
| StarcoderdataPython |
1638463 | import time
def sleeper():
while True:
# Get user input
num = input('How long to wait: ')
# Try to convert it to a float
try:
num = float(num)
except ValueError:
print('Please enter in a number.\n')
continue
# Run our time.sleep() command,
# and show the before and after time
print('Before: %s' % time.ctime())
time.sleep(num)
print('After: %s\n' % time.ctime())
try:
sleeper()
except KeyboardInterrupt:
print('\n\nKeyboard exception received. Exiting.')
exit() | StarcoderdataPython |
1622430 | # Copyright 2017, OpenCensus Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import inspect
import logging
import mysql.connector
from opencensus.trace.ext.dbapi import trace
MODULE_NAME = 'mysql'
CONN_WRAP_METHOD = 'connect'
def trace_integration(tracer=None):
"""Wrap the mysql connector to trace it."""
logging.info('Integrated module: {}'.format(MODULE_NAME))
conn_func = getattr(mysql.connector, CONN_WRAP_METHOD)
conn_module = inspect.getmodule(conn_func)
wrapped = trace.wrap_conn(conn_func)
setattr(conn_module, CONN_WRAP_METHOD, wrapped)
| StarcoderdataPython |
1549 | <reponame>bibinvasudev/EBI_Project
# SCH1101.sh --> JB_SALES_HIERARCHY_FLAG_N_SR.py
#**************************************************************************************************************
#
# Created by : bibin
# Version : 1.0
#
# Description :
# 1. This script will load the data into 'SALES_HIERARCHY' table based on stream lookups.
#
#
# Initial Creation:
#
# Date (YYYY-MM-DD) Change Description
# ----------------- ------------------
# 2018-11-02 Initial creation
#
#**************************************************************************************************************
# Importing required Lib
from dependencies.spark import start_spark
from dependencies.EbiReadWrite import EbiReadWrite
import logging
import sys
from time import gmtime, strftime
import cx_Oracle
import py4j
import pyspark
# Spark logging
logger = logging.getLogger(__name__)
# Date Formats
start_date = "'"+strftime("%Y-%m-%d %H:%M:%S", gmtime())+"'"
log_date =strftime("%Y%m%d", gmtime())
# Job Naming Details
script_name = "SCH1101.SH"
app_name = "JB_SALES_HIERARCHY_FLAG_N_SR"
log_filename = app_name + '_' + log_date + '.log'
# Query for loading invoice table
def query_data(db_schema):
query = """INSERT INTO """+ db_schema +""".SALES_HIERARCHY
(SALES_GEOGRAPHY, SALES_MULTI_AREA, SALES_AREA, SALES_MULTI_REGION, SALES_REGION, SALES_DISTRICT, SALES_TEAM, EMPLOYEE_ID,
SALES_REP_NUMBER, LOGIN_ID, SALES_REP_NAME, SALES_REP_ORG, COMP_PLAN_TYPE_CODE, COMP_PLAN_TITLE, COMP_PLAN_CATEGORY_CODE, COMP_PLAN_DESCRIPTION,
GOAL_CURR_CODE, START_DATE, END_DATE, STATUS_CODE, PARTICIPANT_LEVEL_CODE, SALES_REP_TYPE_CODE, CURRENT_RECORD_FLAG, LAST_HIRE_DATE)
SELECT
B.WW_DIRECT_GEO_DESCRIPTION AS SALES_GEOGRAPHY,
B.MULTI_AREA_DESCRIPTION AS SALES_MULTI_AREA,
B.AREA_DESCRIPTION AS SALES_AREA,
B.MULTI_REGION_DESCRIPTION AS SALES_MULTI_REGION,
SUBSTR(B.REGION_DESCRIPTION,1,50) AS SALES_REGION,
SUBSTR(B.DISTRICT_DESCRIPTION,1,50) AS SALES_DISTRICT,
SUBSTR(B.TEAM_DESCRIPTION,1,50) AS SALES_TEAM,
A.EMPLOYEE_ID,
A.BK_SALES_REP_NUMBER AS SALES_REP_NUMBER,
SUBSTR(A.EMP_SYS_LOGIN_ID,1,10) AS LOGIN_ID,
SUBSTR(A.SALES_REP_NAME,1,50) AS SALES_REP_NAME,
A.ORGANIZATION_NAME AS SALES_REP_ORG,
A.COMP_PLAN_TYPE_CODE,
A.COMP_PLAN_TITLE,
A.COMP_PLAN_CATEGORY_CODE,
A.COMP_PLAN_DESCRIPTION,
NULL AS GOAL_CURR_CODE ,
A.START_DATE,
A.END_DATE,
A.STATUS_CODE,
A.PARTICIPANT_LEVEL_CODE,
SUBSTR(A.SALES_REP_TYPE_CODE,1,5) AS SALES_REP_TYPE_CODE,
A.CURRENT_RECORD_FLAG,
C.RECENT_HIRE_DATE AS LAST_HIRE_DATE
FROM
(
SELECT a.*,ROW_NUMBER() over (partition by BK_SALES_REP_NUMBER ORDER BY END_DATE desc) as RANK
FROM DIMS.SALES_PARTICIPANT a
WHERE
BK_SALES_REP_NUMBER NOT IN (SELECT DISTINCT BK_SALES_REP_NUMBER FROM DIMS.SALES_PARTICIPANT WHERE CURRENT_RECORD_FLAG = 'Y')
AND PARTICIPANT_LEVEL_CODE = 'SR'
ORDER BY BK_SALES_REP_NUMBER,SALES_PARTICIPANT_KEY
) A
INNER JOIN DIMS.SALES_TERR_HIERAR_AS_IS_MV B ON B.TERRITORY_KEY = A.TERRITORY_KEY
LEFT OUTER JOIN
(SELECT LTRIM(BK_EMPLOYEE_ID,'0') BK_EMPLOYEE_ID,RECENT_HIRE_DATE FROM DIMS.WORKER_DETAIL WHERE CURRENT_RECORD_IND = 1 ) C
ON C.BK_EMPLOYEE_ID = A.EMPLOYEE_ID
WHERE RANK = 1"""
return query
# Main method
def main():
try:
src_count = '0'
dest_count = '0'
# start Spark application and get Spark session, logger and config
spark, config = start_spark(
app_name=app_name)
# Create class Object
Ebi_read_write_obj = EbiReadWrite(app_name,spark,config,logger)
# DB prop Key of Source DB
db_prop_key_load = config['DB_PROP_KEY_LOAD']
db_prop_key_extract = config['DB_PROP_KEY_EXTRACT']
db_schema = config['DB_SCHEMA']
log_file = config['LOG_DIR_NAME'] + "/" + log_filename
#SQL Query
query = query_data(db_schema)
# Calling Job Class method --> get_target_data_update()
Ebi_read_write_obj.get_target_data_update(query,db_prop_key_load)
end_date="'"+strftime("%Y-%m-%d %H:%M:%S", gmtime())+"'"
data_format = "JOB START DT : "+start_date+" | SCRIPT NAME : "+script_name+" | JOB : "+app_name+" | SRC COUNT : "+src_count+" | TGT COUNT : "+dest_count+" | JOB END DT : "+end_date+" | STATUS : %(message)s"
Ebi_read_write_obj.create_log(data_format,log_file,logger)
logger.info("Success")
Ebi_read_write_obj.job_debugger_print(" \n __main__ " + app_name +" --> Job "+app_name+" Succeed \n")
except Exception as err:
# Write expeption in spark log or console
end_date="'"+strftime("%Y-%m-%d %H:%M:%S", gmtime())+"'"
data_format = "JOB START DT : "+start_date+" | SCRIPT NAME : "+script_name+" | JOB : "+app_name+" | SRC COUNT : "+src_count+" | TGT COUNT : "+dest_count+" | JOB END DT : "+end_date+" | STATUS : %(message)s"
Ebi_read_write_obj.create_log(data_format,log_file,logger)
logger.info("[Error] Failed")
Ebi_read_write_obj.job_debugger_print(" \n Job "+app_name+" Failed\n")
logger.error("\n __main__ "+ app_name +" --> Exception-Traceback :: " + str(err))
raise
# Entry point for script
if __name__ == "__main__":
# Calling main() method
main()
| StarcoderdataPython |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.