id stringlengths 1 8 | text stringlengths 6 1.05M | dataset_id stringclasses 1
value |
|---|---|---|
357591 | # this module is the template of all environment in this project
# some basic and common features are listed in this class
class ENVError(Exception):
def __init__(self, value_):
self.__value = value_
def __str__(self):
print('Environment error occur: ' + self.__value)
class ENV:
def __init__(self):
pass
def reset(self):
raise ENVError('You have not defined the reset function of environment')
def step(self, action):
raise ENVError('You have not defined the step function of environment')
| StarcoderdataPython |
3444647 | from sysbot_helper import bot_main
bot_main()
| StarcoderdataPython |
11309340 | import geomtwo.msg as gms
import matplotlib.pyplot as plt
import cmath as cm
class Vector:
def __init__(self, *args, **kwargs):
if len(args) is 2:
self._data = complex(*args)
return
if len(args) is 1:
if isinstance(args[0], (self.__class__, gms.Vector)):
self._data = complex(args[0].x, args[0].y)
return
if isinstance(args[0], complex):
self._data = complex(args[0])
return
if len(args) is 0:
if set(kwargs.keys()) == set(("x", "y")):
self.__class__.__init__(self, kwargs["x"], kwargs["y"])
return
if set(kwargs.keys()) == set(("magnitude", "angle")):
self.__class__.__init__(self, kwargs["magnitude"]*cm.cos(kwargs["angle"]), kwargs["magnitude"]*cm.sin(kwargs["angle"]))
return
if len(kwargs) is 0:
self.__class__.__init__(self, 0, 0)
return
raise ValueError("Constructor arguments for {} not recognized".format(self.__class__.__name__))
@property
def message(self):
return gms.Vector(self.x, self.y)
def __str__(self):
string = "{} instance".format(self.__class__.__name__)
string += "\nx: " + str(self._data.real)
string += "\ny: " + str(self._data.imag)
return string
def draw(self, x0=0., y0=0., **kwargs):
head_size = 0.1*abs(self._data)
artist = plt.gca().arrow(x0, y0, self.x, self.y, head_length=head_size, head_width=head_size, **kwargs)
return artist,
@property
def x(self): return self._data.real
@property
def y(self): return self._data.imag
@property
def complex_number(self):
return self._data
@property
def norm_squared(self):
return self.x**2 + self.y**2
@property
def norm(self):
return abs(self._data)
@property
def vector(self):
return Vector(self)
def __add__(self, other):
return self.__class__(self._data+other._data)
def __neg__(self):
return self.__class__(-self._data)
def __sub__(self, other):
return self + (-other)
def __mul__(self, other):
if isinstance(other, self.__class__): return self.__class__(self._data*other._data)
if isinstance(other, (int,float)): return self.__class__(float(other)*self._data)
raise TypeError()
def __rmul__(self, other):
return self.__class__(other*self._data)
def __div__(self, other):
return self*(1.0/other)
def dot(self, other):
return self.x*other.x + self.y*other.y
def cross(self, other):
return self.x*other.y-self.y*other.x
def angle_to(self, other, force_positive=False):
result = cm.phase(complex(self.dot(other),self.cross(other)))
if result < 0 and force_positive: result += 2*cm.pi
return result
def rotate(self, angle):
return self.__class__(self._data*cm.rect(1.0, angle))
def saturate(self, threshold):
if self.norm > threshold: return self*threshold/self.norm
return self
if __name__ == "__main__":
vec = Vector(gms.Vector(x=2,y=3))
print vec
vec2 = Vector(x=1,y=2)
print vec + vec2
print vec - vec2
print vec.dot(vec2)
print 2*vec
print vec2.angle_to(vec, force_positive=True)
plt.figure()
plt.xlim([-5,5])
plt.ylim([-5,5])
vec.draw()
#plt.show()
class Point(Vector):
def __init__(self, *args, **kwargs):
if len(args) is 1 and isinstance(args[0], (gms.Point, self.__class__)):
self.__class__.__init__(self, args[0].x, args[0].y)
return
Vector.__init__(self, *args, **kwargs)
def draw(self, **kwargs):
return plt.scatter(self.x, self.y, **kwargs),
def __add__(self, other):
if isinstance(other, self.__class__):
raise TypeError("You are trying to add {} and {}. One cannot add two {}".format(self, other, self.__class__))
return Vector.__add__(self, other)
def __sub__(self, other):
if isinstance(other, self.__class__):
return Vector(self._data - other._data)
return self + (-other)
@property
def message(self):
return gms.Point(self.x, self.y)
def __mul__(self, other): raise TypeError
def __rmul__(self, other): raise TypeError
def __div__(self, other): raise TypeError
def dot(self, other): raise TypeError
def cross(self, other): raise TypeError
def angle_to(self, other): raise TypeError
def saturate(self, other): raise TypeError
if __name__ == "__main__":
pt = Point(gms.Point())
pt.draw()
pt2 = Point(2,3)
(pt-pt2).draw(color="red")
#plt.show()
print pt2+vec
class Versor(Vector):
def __init__(self, *args, **kwargs):
if len(args) is 2:
norm = cm.sqrt(args[0]**2+args[1]**2)
Vector.__init__(self, args[0]/norm, args[1]/norm)
return
if len(args) is 1:
if isinstance(args[0], (gms.Vector, gms.Versor, Vector, Versor)):
self.__class__.__init__(self, args[0].x, args[0].y)
return
if isinstance(args[0], complex):
self.__class__.__init__(self, args[0].real, args[0].imag)
return
if isinstance(args[0], float):
self.__class__.__init__(self, cm.cos(args[0]), cm.sin(args[0]))
return
if len(args) is 0:
if set(kwargs.keys()) == set(("x", "y")):
self.__class__.__init__(self, kwargs["x"], kwargs["y"])
return
if set(kwargs.keys()) == set(("angle")):
self.__class__.__init__(self, kwargs["angle"])
return
if len(kwargs) is 0:
self.__class__.__init__(self, 1, 0)
return
raise TypeError()
def serialize(self):
return gms.Versor(self.x, self.y)
def __add__(self, other): raise TypeError
def __sub__(self, other): raise TypeError
def saturate(self): raise TypeError
if __name__ == "__main__":
vs2 = Versor(2,4)
print vs2.norm, vs2.norm_squared
vs2.draw()
class Transform:
def __init__(self, *args, **kwargs):
if len(args) is 2:
translation, rotation = args
if isinstance(translation, (Vector, gms.Vector)) and isinstance(rotation, (int,float)):
self._translation = Vector(translation)
self._rotation = float(rotation)
return
if len(args) is 1:
if isinstance(args[0], (Transform, gms.Transform)):
self.__class__.__init__(self, args[0].translation, args[0].rotation)
return
if len(args) is 0:
if set(kwargs.keys()) == set("translation rotation".split()):
self.__class__.__init__(self, kwargs["translation"], kwargs["rotation"])
return
if len(kwargs) is 0:
self.__class__.__init__(self, Vector(), 0.0)
return
raise TypeError()
def __str__(self):
string = "{} instance".format(self.__class__.__name__)
string += "\ntranslation: " + str(self._translation)
string += "\nrotation: " + str(self._rotation)
return string
def __add__(self, other):
return self.__class__(self._translation+other._translation, self._rotation+other._rotation)
def __neg__(self):
return self.__class__(-self._translation, -self._rotation)
def __sub__(self, other):
return self + (-other)
@property
def translation(self): return self._translation
@property
def rotation(self): return self._rotation
@property
def message(self): return gms.Transform(self._translation.message, self._rotation)
class Twist:
def __init__(self, *args, **kwargs):
if len(args) is 2:
linear, angular = args
if isinstance(linear, (Vector, gms.Vector)) and isinstance(angular, (int,float)):
self._linear = Vector(linear)
self._angular = float(angular)
return
if len(args) is 1:
if isinstance(args[0], (Twist, gms.Twist)):
self.__class__.__init__(self, args[0].linear, args[0].angular)
return
if len(args) is 0:
if set(kwargs.keys()) == set("linear angular".split()):
self.__class__.__init__(self, kwargs["linear"], kwargs["angular"])
return
if len(kwargs) is 0:
self.__class__.__init__(self, Vector(), 0.0)
return
raise TypeError()
def __str__(self):
string = "{} instance".format(self.__class__.__name__)
string += "\nlinear: " + str(self._linear)
string += "\nangular: " + str(self._angular)
return string
def __add__(self, other):
return self.__class__(self._linear+other._linear, self._angular+other._angular)
def __neg__(self):
return self.__class__(-self._linear, -self._angular)
def __sub__(self, other):
return self + (-other)
@property
def linear(self): return self._linear
@property
def angular(self): return self._angular
@property
def message(self): return gms.Twist(self._linear.message, self._angular)
def integrate(self, time):
return Transform( translation=self._linear*time, rotation=self._angular*time )
if __name__ == "__main__":
tf = Transform()
print tf
tf2 = Transform(Vector(1,2), 3)
print tf - tf2
class Pose:
def __init__(self, *args, **kwargs):
if len(args) is 2:
pos, ori = args
if isinstance(pos, (Point, gms.Point)) and isinstance(ori, (Versor, gms.Versor)):
self._position = Point(pos)
self._orientation = Versor(ori)
return
if len(args) is 1:
if isinstance(args[0], (Pose, gms.Pose)):
self.__class__.__init__(self, args[0].position, args[0].orientation)
return
if len(args) is 0:
if set(kwargs.keys()) == set("position orientation".split()):
self.__class__.__init__(self, kwargs["position"], kwargs["orientation"])
return
if len(kwargs) is 0:
self.__class__.__init__(self, Point(), Versor())
return
raise TypeError()
def __str__(self):
string = "{} instance".format(self.__class__.__name__)
string += "\nposition: " + str(self._position)
string += "\norientation: " + str(self._orientation)
return string
def draw(self, **kwargs):
return self._position.draw(**kwargs) + self._orientation.draw(x0=self._position.x, y0=self._position.y, **kwargs)
@property
def position(self):
return self._position
@property
def orientation(self):
return self._orientation
@property
def message(self):
return gms.Pose(self._position.message, self._orientation.message)
def __add__(self, transform):
pos = self._position + transform.translation
ori = self._orientation.rotate( transform.rotation )
return self.__class__(pos, ori)
def __sub__(self, transform):
return self + (-transform)
if __name__ == "__main__":
ps = Pose()
ps.draw()
if __name__ == "__main__":
plt.show()
| StarcoderdataPython |
6616831 | <reponame>niladell/DockStream
from dockstream.utils.enums.taut_enum_enums import TautEnumEnum
from dockstream.utils.execute_external.execute import ExecutorBase
EE = TautEnumEnum()
class TautEnumExecutor(ExecutorBase):
"""For the execution of the "TautEnum" binary."""
def __init__(self, prefix_execution=None, binary_location=None):
super().__init__(prefix_execution=prefix_execution, binary_location=binary_location)
def execute(self, command: str, arguments: list, check=True, location=None):
# check, whether a proper executable is provided
if command not in [EE.TAUTENUM]:
raise ValueError("Parameter command must be an dictionary of the internal TautEnum executable list.")
return super().execute(command=command,
arguments=arguments,
check=check,
location=location)
def is_available(self):
# unfortunately, "TautEnum" does not seem to return a meaningful return value, so instead try to parse
# the "stdout" of the help message
try:
result = self.execute(command=EE.TAUTENUM,
arguments=[EE.TAUTENUM_HELP],
check=False)
if EE.TAUTENUM_HELP_IDENTIFICATION_STRING in result.stdout:
return True
return False
except Exception as e:
return False
| StarcoderdataPython |
8099894 | from django.conf.urls import url
from django.contrib import admin
from django.contrib.sitemaps.views import sitemap
from django.http import HttpResponse
from django.urls import path, include
from apps.finder import views
from apps.accounts import views as account_views
from apps.maintenance import views as maintenance_views
from settings.sitemaps import StaticViewSitemap
import django.contrib.auth.urls
sitemaps = {
'static': StaticViewSitemap,
}
urlpatterns = [
url(r'^robots.txt',
lambda x: HttpResponse("User-Agent: *\nDisallow:", content_type='text/plain'),
name='robots_file'),
path('sitemap.xml', sitemap, {'sitemaps': sitemaps}, name='django.contrib.sitemaps.views.sitemap'),
path('', views.get_home, name='main'),
path('admin/', admin.site.urls),
path('accounts/', include('django.contrib.auth.urls')),
path('signup/', account_views.SignUp.as_view(), name='signup'),
path('about/', views.get_about, name='about'),
path('contact/', views.get_contact, name='contact'),
path('shops/', views.get_shops),
path('tags/', views.get_tags),
path('add_store/', views.add_store),
path('maintenance/', include('apps.maintenance.urls'))
]
| StarcoderdataPython |
6647046 | #!/usr/bin/env python
# Simple script to convert molecular structures into Faunus AAM
# format. Uses OpenBabel's python interface to read a multitude
# of different input formats. Openbabel can be installed in
# anacona using:
# conda install --channel https://conda.anaconda.org/openbabel openbabel
# python 2/3 compatibility
from __future__ import print_function, division
import openbabel as ob
from math import pi
import sys, os, datetime
# see http://openbabel.org/docs/2.3.0/UseTheLibrary/PythonDoc.html
def MolecularWeight(residue):
Mw = 0
for atom in ob.OBResidueAtomIter(residue):
Mw += atom.GetAtomicMass()
return Mw
def Radius(residue):
rho = 1.0
Mw = MolecularWeight(residue)
return (3. / (4 * pi) * Mw / rho) ** (1 / 3.)
def MassCenter(residue):
wsum = 0
v = [0, 0, 0]
for atom in ob.OBResidueAtomIter(residue):
w = atom.GetAtomicMass()
wsum += w
v[0] += w * atom.x()
v[1] += w * atom.y()
v[2] += w * atom.z()
v[0] /= wsum
v[1] /= wsum
v[2] /= wsum
return v
if len(sys.argv) == 1:
print("First argument must be a structure file. Supported formats:")
for s in ob.OBConversion().GetSupportedInputFormat():
print(s)
sys.exit()
mol = ob.OBMol()
obconv = ob.OBConversion()
infile = sys.argv[1]
informat = obconv.FormatFromExt(infile)
obconv.SetInFormat(informat)
obconv.ReadFile(mol, infile)
assert mol.NumResidues() > 0, infile + " not found or it is empty."
print("# Infile:", infile, "on", datetime.datetime.now(), os.uname()[1])
print(mol.NumResidues())
for res in ob.OBResidueIter(mol):
cm = MassCenter(res)
resname = res.GetName()
resnum = res.GetNum()
charge = 0
radius = Radius(res)
weight = MolecularWeight(res)
print('{0:4} {1:5} {2:8.3f} {3:8.3f} {4:8.3f} {5:6.3f} {6:6.2f} {7:6.2f}'.format(
resname, resnum, cm[0], cm[1], cm[2], charge, weight, radius))
| StarcoderdataPython |
241074 | <filename>possum/dev_possum_unsharp_mask.py
#!/usr/bin/python
import itk
from sys import argv
from optparse import OptionParser, OptionGroup
class unsharpMaskImageFilter():
def __init__(self, InputImage, sigmaArray, ammount):
"""
Simple workflow implementing unsharp masking.
"""
im = itk.image(InputImage)
InType = itk.class_(im)
self.gaussianSmooth = itk.SmoothingRecursiveGaussianImageFilter[InType,InType].New(\
InputImage,
SigmaArray = sigmaArray)
self.substract = itk.SubtractImageFilter[InType,InType,InType].New(\
Input1 = InputImage,
Input2 = self.gaussianSmooth.GetOutput())
self.shiftScale = itk.ShiftScaleImageFilter[InType,InType].New(\
Input = self.substract.GetOutput(),
Scale = ammount,
Shift = 0)
self.addFilter = itk.AddImageFilter[InType,InType,InType].New(\
Input1 = self.shiftScale.GetOutput(),
Input2 = InputImage)
def GetOutput(self):
self.addFilter.Update()
return self.addFilter.GetOutput()
def launchFilterMultichannel(options, args):
"""
Multichannel unsharp mask workflow. This is acctually grayscale workflow
applied for each separate color channel. This function is limited only for
3D images, it will not work for
"""
# Define image dimensions, pixels and image type
imageDim = options.imageDim
MultichannelPixelType = itk.RGBPixel
ScalarPixelType = itk.UC
ScalarImageType = itk.Image[ScalarPixelType,imageDim]
MCImageType = itk.Image.RGBUC3
# Read image (define reader and writer)
reader = itk.ImageFileReader.IRGBUC3.New(FileName = options.inputFile)
writer = itk.ImageFileWriter[MCImageType].New()
# Split multichannel image apart into channel
extractR = itk.VectorIndexSelectionCastImageFilter[MCImageType,ScalarImageType].New(\
Input = reader.GetOutput(),
Index = 0)
extractG = itk.VectorIndexSelectionCastImageFilter[MCImageType,ScalarImageType].New(\
Input = reader.GetOutput(),
Index = 1)
extractB = itk.VectorIndexSelectionCastImageFilter[MCImageType,ScalarImageType].New(\
Input = reader.GetOutput(),
Index = 2)
# Apply unsharp mask to each channel separately
unsharpR = unsharpMaskImageFilter(extractR.GetOutput(),
sigmaArray = options.sigmaArray,
ammount = options.unsharpAmmount)
unsharpG = unsharpMaskImageFilter(extractG.GetOutput(),
sigmaArray = options.sigmaArray,
ammount = options.unsharpAmmount)
unsharpB = unsharpMaskImageFilter(extractB.GetOutput(),
sigmaArray = options.sigmaArray,
ammount = options.unsharpAmmount)
# Merge image back into multichannel image.
composeFilter = itk.ComposeImageFilter[ScalarImageType,MCImageType].New(\
Input1 = unsharpR.GetOutput(),
Input2 = unsharpG.GetOutput(),
Input3 = unsharpB.GetOutput())
# And then write it
writer = itk.ImageFileWriter[MCImageType].New(composeFilter, FileName = options.outputFile)
writer.Update();
def launchFilterGrayscale(options, args):
"""
Grayscale unsharp mask workflow
"""
# Define image dimensions, pixels and image type
imageDim = options.imageDim
InputPixelType = itk.F
OutputPixelType = itk.F
InputImageType = itk.Image[InputPixelType, imageDim]
OutputImageType = itk.Image[OutputPixelType, imageDim]
WritePixelType = itk.UC
WriteImageType = itk.Image[WritePixelType, imageDim]
# Read the input image, process it and then save
reader = itk.ImageFileReader[InputImageType].New(FileName = options.inputFile)
unsharp = unsharpMaskImageFilter(reader.GetOutput(),
sigmaArray = options.sigmaArray,
ammount = options.unsharpAmmount)
# Rescale image intensity to match 16bit grayscale image, then save
rescaler = itk.RescaleIntensityImageFilter[OutputImageType,WriteImageType].New(unsharp.GetOutput(), OutputMinimum=0, OutputMaximum=255)
writer = itk.ImageFileWriter[WriteImageType].New(rescaler, FileName = options.outputFile)
writer.Update();
def launchFilter(options, args):
"""
In this filter only 3D unsharp masking is possible. If you want to apply
unsharp mask to a 2D image use e.g. ImageMagic instead as it would be easier
and faster.
"""
if options.multichannelWorkflow:
launchFilterMultichannel(options, args)
else:
launchFilterGrayscale(options, args)
def parseArgs():
usage = "python unsharpMaskFilter.py -i b.nii.gz -o c.nii.gz --sigmaArray 0.05 0.05 0.05 --unsharpAmmount 4"
parser = OptionParser(usage = usage)
parser.add_option('--imageDim', '-d', dest='imageDim', type='int',
default=3, help='')
parser.add_option('--outputFile', '-o', dest='outputFile', type='str',
default=None, help='')
parser.add_option('--inputFile', '-i', dest='inputFile', type='str',
default=None, help='')
parser.add_option('--multichannelWorkflow', default=False,
dest='multichannelWorkflow', action='store_const', const=True,
help='Indicate that provided image is a RGB image and the RGB workflow has to be used.')
parser.add_option('--sigmaArray', default=[1,1,1],
type='float', nargs=3, dest='sigmaArray',
help='Sigma array used during gaussian smoothing')
parser.add_option('--unsharpAmmount', default=0.5,
type='float', dest='unsharpAmmount',
help='Sigma array used during gaussian smoothing')
(options, args) = parser.parse_args()
if (not options.outputFile) or (not options.inputFile):
parser.print_help()
exit(1)
return (options, args)
if __name__ == '__main__':
options, args = parseArgs()
launchFilter(options,args)
| StarcoderdataPython |
5171468 | <reponame>david-zwicker/py-utils
'''
Created on Nov 4, 2016
@author: <NAME> <<EMAIL>>
'''
from __future__ import division
import unittest
import networkx as nx
import six
from .. import graphs
class TestGraphs(unittest.TestCase):
_multiprocess_can_split_ = True # let nose know that tests can run parallel
def test_conncet_components(self):
g = nx.Graph()
g.add_node(0, pos=(0, 0))
g.add_node(1, pos=(1, 1))
g.add_edges_from([[0, 1]])
# trivial test with connected graph
gc = graphs.connect_components(g, 'pos', 'dist')
self.assertTrue(nx.is_connected(gc))
six.assertCountEqual(self, gc.nodes(), [0, 1])
six.assertCountEqual(self, gc.edges(), [(0, 1)])
# add another component
g.add_node(10, pos=(10, 0))
g.add_node(11, pos=(9, 1))
g.add_edges_from([[10, 11]])
gc = graphs.connect_components(g, 'pos', 'dist')
self.assertTrue(nx.is_connected(gc))
six.assertCountEqual(self, gc.nodes(), [0, 1, 10, 11])
six.assertCountEqual(self, gc.edges(), [(0, 1), (10, 11), (1, 11)])
self.assertEqual(nx.get_edge_attributes(gc, 'dist')[(1, 11)], 8)
def test_conncet_components_error(self):
g = nx.Graph()
gc = graphs.connect_components(g, 'pos')
self.assertTrue(nx.is_empty(gc))
g.add_node(0)
g.add_node(1, pos=(1, 1))
g.add_edge(0, 1)
self.assertRaises(ValueError,
lambda: graphs.connect_components(g, 'pos'))
if __name__ == "__main__":
unittest.main()
| StarcoderdataPython |
6494971 | from unittest import mock, TestCase
from taguri.minter import TagUriMinter
class TagUriMinterTestCase(TestCase):
@mock.patch('taguri.minter.authority_name_validator', return_value=True)
def test_minter_validates_authority_name(self, validator):
TagUriMinter('example.org', '2018')
self.assertTrue(validator.called)
self.assertTupleEqual(('example.org',), validator.call_args[0])
@mock.patch('taguri.minter.date_validator', return_value=True)
def test_minter_validates_date(self, validator):
TagUriMinter('example.org', '2018-11-21')
self.assertTrue(validator.called)
self.assertTupleEqual(('2018-11-21',), validator.call_args[0])
@mock.patch('taguri.minter.authority_name_validator', return_value=False)
def test_minter_raises_if_authority_name_is_not_valid(self, validator):
with self.assertRaises(AttributeError):
TagUriMinter('whatever', '2018')
@mock.patch('taguri.minter.date_validator', return_value=False)
def test_minter_raises_if_date_is_not_valid(self, validator):
with self.assertRaises(AttributeError):
TagUriMinter('example.com', '2018-11-21')
def test_minter_properties(self):
minter = TagUriMinter('alice.example.org', '2018-11-21')
self.assertEqual('alice.example.org', minter.authority_name)
self.assertEqual('2018-11-21', minter.date)
self.assertEqual('alice.example.org,2018-11-21', minter.tagging_entity)
self.assertEqual('tag:alice.example.org,2018-11-21', minter.prefix)
def test_minter_mints(self):
minter = TagUriMinter('alice.example.org', '2018-11-21')
expected = 'tag:alice.example.org,2018-11-21:Collections/Books'
self.assertEqual(expected, minter.mint('Collections/Books'))
def test_minter_mints_with_fragment(self):
minter = TagUriMinter('alice.example.org', '2018-11-21')
expected = 'tag:alice.example.org,2018-11-21:Collections/Books#Doe'
self.assertEqual(expected, minter.mint('Collections/Books', 'Doe'))
@mock.patch('taguri.minter.specific_validator', return_value=False)
def test_minter_raises_if_specific_is_not_valid(self, validator):
with self.assertRaises(AttributeError):
minter = TagUriMinter('alice.example.org', '2018-11-21')
minter.mint('Invalid/Item')
self.assertEqual(1, validator.call_count)
@mock.patch('taguri.minter.specific_validator', side_effect=(True, False))
def test_minter_raises_if_fragment_is_not_valid(self, validator):
with self.assertRaises(AttributeError):
minter = TagUriMinter('alice.example.org', '2018-11-21')
minter.mint('Invalid/Item', 'DoeFragment')
self.assertEqual(2, validator.call_count) | StarcoderdataPython |
3315897 | <gh_stars>1-10
########################################################################################
## ##
## THIS LIBRARY IS PART OF THE SOFTWARE DEVELOPED BY THE JET PROPULSION LABORATORY ##
## IN THE CONTEXT OF THE GPU ACCELERATED FLEXIBLE RADIOFREQUENCY READOUT PROJECT ##
## ##
########################################################################################
import numpy as np
import scipy.signal as signal
import signal as Signal
import h5py
import sys
import struct
import json
import os
import socket
import Queue
from Queue import Empty
from threading import Thread, Condition
import multiprocessing
from joblib import Parallel, delayed
from subprocess import call
import time
import gc
import datetime
# plotly stuff
from plotly.graph_objs import Scatter, Layout
from plotly import tools
#import plotly.plotly as py
import plotly.graph_objs as go
import plotly
import colorlover as cl
# matplotlib stuff
import matplotlib
matplotlib.use('agg')
import matplotlib.pyplot as pl
import matplotlib.patches as mpatches
# needed to print the data acquisition process
import progressbar
# import submodules
from USRP_low_level import *
from USRP_files import *
def reinit_data_socket():
'''
Reinitialize the data network socket.
:return: None
'''
global USRP_data_socket
USRP_data_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
USRP_data_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
def reinit_async_socket():
'''
Reinitialize the command network socket.
:return: None
'''
global USRP_socket
USRP_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
USRP_socket.settimeout(1)
USRP_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
def clean_data_queue(USRP_data_queue=USRP_data_queue):
'''
Clean the USRP_data_queue from residual elements. returns the number of element found in the queue.
Returns:
- Integer number of packets removed from the queue.
'''
print_debug("Cleaning data queue... ")
residual_packets = 0
while (True):
try:
meta_data, data = USRP_data_queue.get(timeout=0.1)
residual_packets += 1
except Empty:
break
print_debug("Queue cleaned of " + str(residual_packets) + " packets.")
return residual_packets
def Packets_to_file(parameters, timeout=None, filename=None, dpc_expected=None, push_queue = None, trigger = None, **kwargs):
'''
Consume the USRP_data_queue and writes an H5 file on disk.
:param parameters: global_parameter object containing the informations used to drive the GPU server.
:param timeout: time after which the function stops and tries to stop the server.
:param filename: eventual filename. Default is datetime.
:param dpc_expected: number of sample per channel expected. if given display a percentage progressbar.
:param push_queue: external queue where to push data and metadata
:param trigger: trigger class (see section on trigger function for deteails)
:return filename or empty string if something went wrong
Note:
- if the \"End of measurement\" async signal is received from the GPU server the timeout mode becomes active.
'''
global dynamic_alloc_warning
push_queue_warning = False
def write_ext_H5_packet(metadata, data, h5fp, index, trigger = None):
'''
Write a single packet inside an already opened and formatted H5 file as an ordered dataset.
Arguments:
- metadata: the metadata describing the packet directly coming from the GPU sevrer.
- data: the data to be written inside the dataset.
- dataset: file pointer to the h5 file. extensible dataset has to be already created.
- index: dictionary containg the accumulated length of the dataset.
- trigger: trigger class. (see trigger section for more info)
Notes:
- The way this function write the packets inside the h5 file is strictly related to the metadata type in decribed in USRP_server_setting.hpp as RX_wrapper struct.
'''
global dynamic_alloc_warning
dev_name = "raw_data" + str(int(metadata['usrp_number']))
group_name = metadata['front_end_code']
samples_per_channel = metadata['length'] / metadata['channels']
dataset = h5fp[dev_name][group_name]["data"]
errors = h5fp[dev_name][group_name]["errors"]
data_shape = np.shape(dataset)
data_start = index
data_end = data_start + samples_per_channel
if ((trigger is not None) and (metadata['length']>0)):
if trigger.trigger_control == "AUTO":
trigger_dataset = h5fp[dev_name][group_name]["trigger"]
current_len_trigger = len(trigger_dataset)
trigger_dataset.resize(current_len_trigger+1,0)
trigger_dataset[current_len_trigger] = index
trigger_name = str(trigger.__class__.__name__)
if trigger_name == "amplitude_trigger":
trigger_dataset.attrs["bounds"] = trigger.bounds
trigger_dataset.attrs["nglitch"] = trigger.nglitch
trigger_dataset.attrs["glitch_indices"] = trigger.glitch_indices
trigger_dataset.attrs["samples_per_packet"] = trigger.samples_per_packet
try:
if data_shape[0] < metadata['channels']:
print_warning("Main dataset in H5 file not initialized.")
dataset.resize(metadata['channels'], 0)
if data_end > data_shape[1]:
if dynamic_alloc_warning:
print_warning("Main dataset in H5 file not correctly sized. Dynamically extending dataset...")
# print_debug("File writing thread is dynamically extending datasets.")
dynamic_alloc_warning = False
dataset.resize(data_end, 1)
packet = np.reshape(data, (samples_per_channel,metadata['channels'])).T
dataset[:, data_start:data_end] = packet
dataset.attrs.__setitem__("samples", data_end)
if data_start == 0:
dataset.attrs.__setitem__("start_epoch", time.time())
if metadata['errors'] != 0:
print_warning("The server encounterd an error")
err_shape = np.shape(errors)
err_len = err_shape[1]
if err_shape[0] == 0:
errors.resize(2, 0)
errors.resize(err_len + 1, 1)
errors[:, err_len] = [data_start, data_end]
except RuntimeError as err:
print_error("A packet has not been written because of a problem: " + str(err))
def write_single_H5_packet(metadata, data, h5fp):
'''
Write a single packet inside an already opened and formatted H5 file as an ordered dataset.
Arguments:
- metadata: the metadata describing the packet directly coming from the GPU sevrer.
- data: the data to be written inside the dataset.
- h5fp: already opened, with wite permission and group created h5 file pointer.
Returns:
- Nothing
Notes:
- The way this function write the packets inside the h5 file is strictly related to the metadata type in decribed in USRP_server_setting.hpp as RX_wrapper struct.
'''
dev_name = "raw_data" + str(int(metadata['usrp_number']))
group_name = metadata['front_end_code']
dataset_name = "dataset_" + str(int(metadata['packet_number']))
try:
ds = h5fp[dev_name][group_name].create_dataset(
dataset_name,
data=np.reshape(data, (metadata['channels'], metadata['length'] / metadata['channels']))
# compression = H5PY_compression
)
ds.attrs.create(name="errors", data=metadata['errors'])
if metadata['errors'] != 0:
print_warning("The server encounterd a transmission error: " + str(metadata['errors']))
except RuntimeError as err:
print_error("A packet has not been written because of a problem: " + str(err))
def create_h5_file(filename):
'''
Tries to open a h5 file without overwriting files with the same name. If the file already exists rename it and then create the file.
Arguments:
- String containing the name of the file.
Returns:
- Pointer to rhe opened file in write mode.
'''
filename = filename.split(".")[0]
try:
h5file = h5py.File(filename + ".h5", 'r')
h5file.close()
except IOError:
try:
h5file = h5py.File(filename + ".h5", 'w')
return h5file
except IOError as msg:
print_error("Cannot create the file " + filename + ".h5:")
print msg
return ""
else:
print_warning(
"Filename " + filename + ".h5 is already present in the folder, adding old(#)_ to the filename")
count = 0
while True:
new_filename = "old(" + str(int(count)) + ")_" + filename + ".h5"
try:
test = h5py.File(new_filename, 'r')
tets.close()
except IOError:
os.rename(filename + ".h5", new_filename)
return open_h5_file(filename)
else:
count += 1
global USRP_data_queue, END_OF_MEASURE, EOM_cond, CLIENT_STATUS
more_sample_than_expected_WARNING = True
accumulated_timeout = 0
sleep_time = 0.1
acquisition_end_flag = False
# this variable discriminate between a timeout condition generated
# on purpose to wait the queue and one reached because of an error
legit_off = False
if filename == None:
filename = "USRP_DATA_" + get_timestamp()
print "Writing data on disk with filename: \"" + filename + ".h5\""
H5_file_pointer = create_h5_file(str(filename))
Param_to_H5(H5_file_pointer, parameters, trigger, **kwargs)
allowed_counters = ['A_RX2','B_RX2']
spc_acc = {}
for fr_counter in allowed_counters:
if parameters.parameters[fr_counter] != 'OFF': spc_acc[fr_counter] = 0
CLIENT_STATUS["measure_running_now"] = True
if dpc_expected is not None:
widgets = [progressbar.Percentage(), progressbar.Bar()]
bar = progressbar.ProgressBar(widgets=widgets, max_value=dpc_expected)
else:
widgets = ['', progressbar.Counter('Samples per channel received: %(value)05d'),
' Client time elapsed: ', progressbar.Timer(), '']
bar = progressbar.ProgressBar(widgets=widgets)
data_warning = True
bar.start()
while (not acquisition_end_flag):
try:
meta_data, data = USRP_data_queue.get(timeout=0.1)
# USRP_data_queue.task_done()
accumulated_timeout = 0
if meta_data == None:
acquisition_end_flag = True
else:
# write_single_H5_packet(meta_data, data, H5_file_pointer)
if trigger is not None:
data, meta_data = trigger.trigger(data, meta_data)
write_ext_H5_packet(meta_data, data, H5_file_pointer, spc_acc[meta_data['front_end_code']], trigger = trigger)
if push_queue is not None:
if not push_queue_warning:
try:
push_queue.put((meta_data, data))
except:
print_warning("Cannot push packets into external queue: %s"%str(sys.exc_info()[0]))
push_queue_warning = True
spc_acc[meta_data['front_end_code']] += meta_data['length'] / meta_data['channels']
try:
#print "max expected: %d total received %d"%(dpc_expected, spc_acc)
bar.update(spc_acc[meta_data['front_end_code']])
except:
if data_warning:
if dpc_expected is not None:
bar.update(dpc_expected)
if (more_sample_than_expected_WARNING):
print_warning("Sync rx is receiving more data than expected...")
more_sample_than_expected_WARNING = False
data_warning = False
except Empty:
time.sleep(sleep_time)
if timeout:
accumulated_timeout += sleep_time
if accumulated_timeout > timeout:
if not legit_off: print_warning("Sync data receiver timeout condition reached. Closing file...")
acquisition_end_flag = True
break
if CLIENT_STATUS["keyboard_disconnect"] == True:
Disconnect()
acquisition_end_flag = True
CLIENT_STATUS["keyboard_disconnect"] = False
try:
bar.update(spc_acc[meta_data['front_end_code']])
except NameError:
pass
except:
if (more_sample_than_expected_WARNING): print_debug("Sync RX received more data than expected.")
EOM_cond.acquire()
if END_OF_MEASURE:
timeout = .5
legit_off = True
EOM_cond.release()
bar.finish()
EOM_cond.acquire()
END_OF_MEASURE = False
EOM_cond.release()
if clean_data_queue() != 0:
print_warning("Residual elements in the libUSRP data queue are being lost!")
H5_file_pointer.close()
print "\033[7;1;32mH5 file closed succesfully.\033[0m"
CLIENT_STATUS["measure_running_now"] = False
return filename
def USRP_socket_bind(USRP_socket, server_address, timeout):
"""
Binds a soket object with a server address. Trys untill timeout seconds elaplsed.
Args:
- USRP_socket: socket object to bind with the address tuple.
- server_address: a tuple containing a string with the ip address and a int representing the port.
- timeout: timeout in seconds to wait for connection.
Known bugs:
- On some linux distribution once on two attempts the connection is denied by software. On third attempt however it connects.
Returns:
- True: if connection was succesfull.
- False if no connection was established.
Examples:
>>> if(USRP_socket_bind(USRP_data_socket, USRP_server_address_data, 5)):
# do stuff with function in this library
>>> else:
>>> print "No connection, check hardware and configs."
Notes:
- This method will only connect one soket to the USRP/GPU server, not data and async messages. This function is intended to be used in higher level functions contained in this library. The correct methot for connecting to USRP/GPU server is the use of USERP_Connect(timeout) function.
"""
if timeout < 0:
print_warning("No GPU server connection established after timeout.")
return False
else:
try:
USRP_socket.connect(server_address)
return True
except socket.error as msg:
print(("Socket binding " + str(msg) + ", " + "Retrying..."))
return False
USRP_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
USRP_socket.settimeout(1)
USRP_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
time.sleep(1)
timeout = timeout - 1
return USRP_socket_bind(USRP_socket, server_address, timeout)
def Decode_Sync_Header(raw_header, CLIENT_STATUS=CLIENT_STATUS):
'''
Decode an async header containing the metadata of the packet.
Return:
- The metadata in dictionary form.
Arguments:
- The raww header as a string (as returned by the recv() method of socket).
'''
def decode_frontend(code):
return {
'A': "A_TXRX",
'B': "A_RX2",
'C': "B_TXRX",
'D': "B_RX2"
}[code]
try:
header = np.fromstring(raw_header, dtype=header_type, count=1)
metadata = {}
metadata['usrp_number'] = header[0]['usrp_number']
metadata['front_end_code'] = decode_frontend(header[0]['front_end_code'])
metadata['packet_number'] = header[0]['packet_number']
metadata['length'] = header[0]['length']
metadata['errors'] = header[0]['errors']
metadata['channels'] = header[0]['channels']
return metadata
except ValueError:
if CLIENT_STATUS["keyboard_disconnect"] == False:
print_error("Received corrupted header. No recover method has been implemented.")
return None
def Print_Sync_Header(header):
print "usrp_number" + str(header['usrp_number'])
print "front_end_code" + str(header['front_end_code'])
print "packet_number" + str(header['packet_number'])
print "length" + str(header['length'])
print "errors" + str(header['errors'])
print "channels" + str(header['channels'])
def Decode_Async_header(header):
''' Extract the length of an async message from the header of an async package incoming from the GPU server'''
header = np.fromstring(header, dtype=np.int32, count=2)
if header[0] == 0:
return header[1]
else:
return 0
def Decode_Async_payload(message):
'''
Decode asynchronous payloads coming from the GPU server
'''
global ERROR_STATUS, END_OF_MEASURE, REMOTE_FILENAME, EOM_cond
try:
res = json.loads(message)
except ValueError:
print_warning("Cannot decode response from server.")
return
try:
atype = res['type']
except KeyError:
print_warning("Unexpected json string from the server: type")
# print "FROM SERVER: "+str(res['payload'])
if atype == 'ack':
if res['payload'].find("EOM") != -1:
print_debug("Async message from server: Measure finished")
EOM_cond.acquire()
END_OF_MEASURE = True
EOM_cond.release()
elif res['payload'].find("filename") != -1:
REMOTE_FILENAME = res['payload'].split("\"")[1]
else:
print_debug("Ack message received from the server: " + str(res['payload']))
if atype == 'nack':
print_warning("Server detected an error.")
ERROR_STATUS = True
EOM_cond.acquire()
END_OF_MEASURE = True
EOM_cond.release()
def Encode_async_message(payload):
'''
Format a JSON string so that the GPU server can read it.
Arguments:
- payload: A JSON string.
Returns:
- A formatted string ready to be sent via socket method
Note:
This function performs no check on the validity of the JSON string.
'''
return struct.pack('I', 0) + struct.pack('I', len(payload)) + payload
def Async_send(payload):
'''
Send a JSON string to the GPU server. Typically the JSON string represent a command or a status request.
Arguments:
-payload: JSON formatted string.
Returns:
-Boolean value representing the success of the operation.
Note:
In order to use this function the Async_thread has to be up and running. See Start_Async_RX().
'''
global Async_condition
global Async_status
global USRP_socket
if (Async_status):
# send the data
try:
USRP_socket.send(Encode_async_message(payload))
except socket.error as err:
print_warning("An async message could not be sent due to an error: " + str(err))
if err.errno == 32:
print_error("Async server disconnected")
Async_condition.acquire()
Async_status = False
Async_condition.release()
return False
return True
else:
print_warning("The Async RX thread is not running, cannot send Async message.")
return False
def Async_thread():
'''Receiver thread for async messages from the GPU server. This function is ment to be run as a thread'''
global Async_condition
global Async_status
global USRP_socket
global USRP_server_address
internal_status = True
Async_status = False
# the header iscomposed by two ints: one is always 0 and the other represent the length of the payload
header_size = 2 * 4
# just initialization of variables
old_header_len = 0
old_data_len = 0
# try to connect, if it fails set internal status to False (close the thread)
Async_condition.acquire()
# if(not USRP_socket_bind(USRP_socket, USRP_server_address, 5)):
time_elapsed = 0
timeout = 10 # sys.maxint
data_timeout_wait = 0.01
connected = False
while time_elapsed < timeout and (not connected):
try:
print_debug("Async command thread:")
connected = USRP_socket_bind(USRP_socket, USRP_server_address, 7)
time.sleep(1)
time_elapsed += 1
except KeyboardInterrupt:
print_warning("Keyboard interrupt aborting connection...")
break
if not connected:
internal_status = False
Async_status = False
print_warning("Async data connection failed")
Async_condition.release()
else:
Async_status = True
print_debug("Async data connected")
Async_condition.release()
# acquisition loop
while (internal_status):
# counter used to prevent the API to get stuck on sevrer shutdown
data_timeout_counter = 0
data_timeout_limit = 5
header_timeout_limit = 5
header_timeout_counter = 0
header_timeout_wait = 0.1
# lock the "mutex" for checking the state of the main API instance
Async_condition.acquire()
if Async_status == False:
internal_status = False
Async_condition.release()
size = 0
if (internal_status):
header_data = ""
try:
while (len(header_data) < header_size) and internal_status:
header_timeout_counter += 1
header_data += USRP_socket.recv(min(header_size, header_size - len(header_data)))
if old_header_len != len(header_data):
header_timeout_counter = 0
if (header_timeout_counter > header_timeout_limit):
time.sleep(header_timeout_wait)
Async_condition.acquire()
if Async_status == False:
internal_status = False
# print internal_status
Async_condition.release()
old_header_len = len(header_data)
# general timer
time.sleep(.1)
if (internal_status): size = Decode_Async_header(header_data)
except socket.error as msg:
if msg.errno != None:
print_error("Async header: " + str(msg))
Async_condition.acquire()
internal_status = False
Async_status = False
Async_condition.release()
if (internal_status and size > 0):
data = ""
try:
while (len(data) < size) and internal_status:
data_timeout_counter += 1
data += USRP_socket.recv(min(size, size - len(data)))
if old_data_len != len(data):
data_timeout_counter = 0
if (data_timeout_counter > data_timeout_limit):
time.sleep(data_timeout_wait)
Async_condition.acquire()
if Async_status == False:
internal_status = False
Async_condition.release()
old_data_len = len(data)
if (internal_status): Decode_Async_payload(data)
except socket.error as msg:
if msg.errno == 4:
pass # the ctrl-c exception is handled elsewhere
elif msg.errno != None:
print_error("Async thread: " + str(msg))
Async_condition.acquire()
internal_status = False
Async_status = False
Async_condition.release()
print_warning("Async connection is down: " + msg)
USRP_socket.shutdown(1)
USRP_socket.close()
del USRP_socket
gc.collect()
Async_RX_loop = Thread(target=Async_thread, name="Async_RX", args=(), kwargs={})
Async_RX_loop.daemon = True
def Wait_for_async_connection(timeout=None):
'''
Block until async thead has established a connection with the server or the thread is expired. In case a timeout value is given, returns after timeout if no connection is established before.
Arguments:
- timeout: Second to wait for connection. Default is infinite timeout
Return:
- boolean representing the sucess of the operation.
'''
global Async_condition
global Async_status
time_elapsed = 0
if timeout is None:
timeout = sys.maxint
try:
while time_elapsed < timeout:
Async_condition.acquire()
x = Async_status
Async_condition.release()
time.sleep(1)
if x:
break
else:
time_elapsed += 1
except KeyboardInterrupt:
print_warning("keyboard interrupt received. Closing connections.")
return False
return x
def Wait_for_sync_connection(timeout=None):
'''
Block until async thead has established a connection with the server or the thread is expired. In case a timeout value is given, returns after timeout if no connection is established before.
Arguments:
- timeout: Second to wait for connection. Default is infinite timeout
Return:
- boolean representing the sucess of the operation.
'''
global Sync_RX_condition
global CLIENT_STATUS
time_elapsed = 0
x = False
if timeout is None:
timeout = sys.maxint
try:
while time_elapsed < timeout:
Sync_RX_condition.acquire()
x = CLIENT_STATUS['Sync_RX_status']
Sync_RX_condition.release()
time.sleep(1)
if x:
break
else:
time_elapsed += 1
except KeyboardInterrupt:
print_warning("keyboard interrupt received. Closing connections.")
return False
return x
def Start_Async_RX():
'''Start the Aswync thread. See Async_thread() function for a more detailed explanation.'''
global Async_RX_loop
reinit_async_socket()
try:
Async_RX_loop.start()
except RuntimeError:
Async_RX_loop = Thread(target=Async_thread, name="Async_RX", args=(), kwargs={})
Async_RX_loop.daemon = True
Async_RX_loop.start()
# print "Async RX thread launched"
def Stop_Async_RX():
'''Stop the Async thread. See Async_thread() function for a more detailed explanation.'''
global Async_RX_loop, Async_condition, Async_status
Async_condition.acquire()
print_line("Closing Async RX thread...")
Async_status = False
Async_condition.release()
Async_RX_loop.join()
print_line("Async RX stopped")
def Connect(timeout=None):
'''
Connect both, the Syncronous and Asynchronous communication service.
Returns:
- True if both services are connected, False otherwise.
Arguments:
- the timeout in seconds. Default is retry forever.
'''
ret = True
try:
Start_Sync_RX()
# ret &= Wait_for_sync_connection(timeout = 10)
Start_Async_RX()
ret &= Wait_for_async_connection(timeout=10)
except KeyboardInterrupt:
print_warning("keyboard interrupt received. Closing connections.")
exit()
return ret
def Disconnect(blocking=True):
'''
Disconnect both, the Syncronous and Asynchronous communication service.
Returns:
- True if both services are connected, False otherwise.
Arguments:
- define if the call is blocking or not. Default is blocking.
'''
Stop_Async_RX()
Stop_Sync_RX()
def force_ternimate():
global Sync_RX_loop, Async_RX_loop
Sync_RX_loop.terminate()
def Sync_RX(CLIENT_STATUS, Sync_RX_condition, USRP_data_queue):
'''
Thread that recive data from the TCP data streamer of the GPU server and loads each packet in the data queue USRP_data_queue. The format of the data is specified in a subfunction fill_queue() and consist in a tuple containing (metadata,data).
Note:
This funtion is ment to be a standalone thread handled via the functions Start_Sync_RX() and Stop_Sync_RX().
'''
# global Sync_RX_condition
# global Sync_RX_status
global USRP_data_socket
global USRP_server_address_data
# global USRP_data_queue
header_size = 5 * 4 + 1
acc_recv_time = []
cycle_time = []
# use to pass stuff in the queue without reference
def fill_queue(meta_data, dat, USRP_data_queue=USRP_data_queue):
meta_data_tmp = meta_data
dat_tmp = dat
USRP_data_queue.put((meta_data_tmp, dat_tmp))
# try to connect, if it fails set internal status to False (close the thread)
# Sync_RX_condition.acquire()
# if(not USRP_socket_bind(USRP_data_socket, USRP_server_address_data, 7)):
time_elapsed = 0
timeout = 10 # sys.maxint
connected = False
# try:
while time_elapsed < timeout and (not connected):
print_debug("RX sync data thread:")
connected = USRP_socket_bind(USRP_data_socket, USRP_server_address_data, 7)
time.sleep(1)
time_elapsed += 1
if not connected:
internal_status = False
CLIENT_STATUS['Sync_RX_status'] = False
print_warning("RX data sync connection failed.")
else:
print_debug("RX data sync connected.")
internal_status = True
CLIENT_STATUS['Sync_RX_status'] = True
# Sync_RX_condition.release()
# acquisition loop
start_total = time.time()
while (internal_status):
start_cycle = time.time()
# counter used to prevent the API to get stuck on sevrer shutdown
data_timeout_counter = 0
data_timeout_limit = 5 # (seconds)
header_timeout_limit = 5
header_timeout_counter = 0
header_timeout_wait = 0.01
# lock the "mutex" for checking the state of the main API instance
# Sync_RX_condition.acquire()
if CLIENT_STATUS['Sync_RX_status'] == False:
CLIENT_STATUS['Sync_RX_status'] = False
# print internal_status
# Sync_RX_condition.release()
if (internal_status):
header_data = ""
try:
old_header_len = 0
header_timeout_counter = 0
while (len(header_data) < header_size) and internal_status:
header_timeout_counter += 1
header_data += USRP_data_socket.recv(min(header_size, header_size - len(header_data)))
if old_header_len != len(header_data):
header_timeout_counter = 0
if (header_timeout_counter > header_timeout_limit):
time.sleep(header_timeout_wait)
# Sync_RX_condition.acquire()
if CLIENT_STATUS['Sync_RX_status'] == False:
internal_status = False
# print internal_status
# Sync_RX_condition.release()
old_header_len = len(header_data)
if (len(header_data) == 0): time.sleep(0.001)
except socket.error as msg:
if msg.errno == 4:
pass # message is handled elsewhere
elif msg.errno == 107:
print_debug("Interface connected too soon. This bug has not been covere yet.")
else:
print_error("Sync thread: " + str(msg) + " error number is " + str(msg.errno))
# Sync_RX_condition.acquire()
internal_status = False
# Sync_RX_condition.release()
if (internal_status):
metadata = Decode_Sync_Header(header_data)
if (not metadata):
# Sync_RX_condition.acquire()
internal_status = False
# Sync_RX_condition.release()
# Print_Sync_Header(metadata)
if (internal_status):
data = ""
try:
old_len = 0
while ((old_len < 8 * metadata['length']) and internal_status):
data += USRP_data_socket.recv(min(8 * metadata['length'], 8 * metadata['length'] - old_len))
if (len(data) == old_len):
data_timeout_counter += 1
old_len = len(data)
if data_timeout_counter > data_timeout_limit:
print_error("Tiemout condition reached for buffer acquisition")
internal_status = False
except socket.error as msg:
print_error(msg)
internal_status = False
if (internal_status):
try:
formatted_data = np.fromstring(data[:], dtype=data_type, count=metadata['length'])
except ValueError:
print_error("Packet number " + str(metadata['packet_number']) + " has a length of " + str(
len(data) / float(8)) + "/" + str(metadata['length']))
internal_status = False
else:
# USRP_data_queue.put((metadata,formatted_data))
fill_queue(metadata, formatted_data)
'''
except KeyboardInterrupt:
print_warning("Keyboard interrupt aborting connection...")
internal_status = False
CLIENT_STATUS['Sync_RX_status'] = False
'''
try:
USRP_data_socket.shutdown(1)
USRP_data_socket.close()
del USRP_data_socket
gc.collect()
except socket.error:
print_warning("Sounds like the server was down when the API tried to close the connection")
# print "Sync client thread id down"
Sync_RX_loop = multiprocessing.Process(target=Sync_RX, name="Sync_RX",
args=(CLIENT_STATUS, Sync_RX_condition, USRP_data_queue), kwargs={})
Sync_RX_loop.daemon = True
def signal_handler(sig, frame):
if CLIENT_STATUS["measure_running_now"]:
if CLIENT_STATUS["keyboard_disconnect"] == False:
print_warning('Got Ctrl+C, Disconnecting and saving last chunk of data.')
CLIENT_STATUS["keyboard_disconnect"] = True
CLIENT_STATUS["keyboard_disconnect_attemp"] = 0
else:
print_debug("Already disconnecting...")
CLIENT_STATUS["keyboard_disconnect_attemp"] += 1
if CLIENT_STATUS["keyboard_disconnect_attemp"] > 2:
print_warning("Forcing quit")
force_ternimate();
exit();
else:
force_ternimate();
exit();
Signal.signal(Signal.SIGINT, signal_handler)
def Start_Sync_RX():
global Sync_RX_loop, USRP_data_socket, USRP_data_queue
try:
try:
del USRP_data_socket
reinit_data_socket()
USRP_data_queue = multiprocessing.Queue()
except socket.error as msg:
print msg
pass
Sync_RX_loop = multiprocessing.Process(target=Sync_RX, name="Sync_RX",
args=(CLIENT_STATUS, Sync_RX_condition, USRP_data_queue), kwargs={})
Sync_RX_loop.daemon = True
Sync_RX_loop.start()
except RuntimeError:
print_warning("Falling back to threading interface for Sync RX thread. Network could be slow")
Sync_RX_loop = Thread(target=Sync_RX, name="Sync_RX", args=(), kwargs={})
Sync_RX_loop.daemon = True
Sync_RX_loop.start()
def Stop_Sync_RX(CLIENT_STATUS=CLIENT_STATUS):
global Sync_RX_loop, Sync_RX_condition
# Sync_RX_condition.acquire()
print_line("Closing Sync RX thread...")
# print_line(" reading "+str(CLIENT_STATUS['Sync_RX_status'])+" from thread.. ")
CLIENT_STATUS['Sync_RX_status'] = False
time.sleep(.1)
# Sync_RX_condition.release()
# print "Process is alive? "+str(Sync_RX_loop.is_alive())
if Sync_RX_loop.is_alive():
Sync_RX_loop.terminate() # I do not know why it's alive even if it exited all the loops
# Sync_RX_loop.join(timeout = 5)
print "Sync RX stopped"
| StarcoderdataPython |
8163170 | from .problem import Pathpacking as Problem
| StarcoderdataPython |
6522099 | import argparse
import torch
from torch import nn
from torch import optim
from torch.autograd import Variable
from torchvision import datasets, transforms, models
from torchvision.datasets import ImageFolder
import torch.nn.functional as F
from PIL import Image
from collections import OrderedDict
import time
import numpy as np
import matplotlib.pyplot as plt
def set_data(path,type_transform) :
result = datasets.ImageFolder(path, transform=type_transform)
return result
def set_loader(data,batch_size):
result = torch.utils.data.DataLoader(data, batch_size=batch_size, shuffle=True)
return result
def save_checkpoint(path, model, optimizer, args, classifier,epochs):
if args.arch == "vgg16" :
input_size = model.classifier[0].in_features
elif args.arch == "densenet121" :
input_size = model.classifier.in_features
else :
print("generate default input size")
input_size = model.classifier[0].in_features
checkpoint = {'input_size': input_size,
'output_size': 102,
'arch': args.arch,
'classifier' : classifier,
'learning_rate': args.learning_rate,
'epochs': epochs,
'class_to_idx': model.class_to_idx,
'state_dict': model.state_dict(),
'optimizer': optimizer.state_dict(),
}
torch.save(checkpoint, path)
def parse_args():
parser = argparse.ArgumentParser(description="Training")
parser.add_argument('--data_dir', action='store', default='flowers')
parser.add_argument('--arch', dest='arch', default='vgg16')
parser.add_argument('--learning_rate', dest='learning_rate', default='0.001')
parser.add_argument('--hidden_units', dest='hidden_units', default='500')
parser.add_argument('--epochs', dest='epochs', default='3')
parser.add_argument('--gpu', action="store_true", default="false")
parser.add_argument('--save_dir', dest="save_dir", action="store", default="checkpoint2.pth")
return parser.parse_args()
def train(model, criterion, optimizer, trainloader,vloader, epochs, gpu):
steps = 0
print_every = 10
for e in range(epochs):
running_loss = 0
for ii, (inputs, labels) in enumerate(trainloader): # 0 = train
steps += 1
#if torch.cuda.is_available(): # testing this out, uncomment later
# model.cuda()
if gpu == True :
model.cuda()
inputs, labels = inputs.to('cuda'), labels.to('cuda') # use cuda
#inputs, labels = inputs.to('cuda'), labels.to('cuda') # use cuda # uncomment later
optimizer.zero_grad()
# Forward and backward passes
outputs = model.forward(inputs)
loss = criterion(outputs, labels)
loss.backward()
optimizer.step()
running_loss += loss.item()
if steps % print_every == 0:
model.eval()
valloss = 0
accuracy=0
for ii, (inputs2,labels2) in enumerate(vloader): # 1 = validation
optimizer.zero_grad()
if gpu == True :
inputs2, labels2 = inputs2.to('cuda') , labels2.to('cuda') # use cuda
model.to('cuda:0') # use cuda
#if torch.cuda.is_available(): # commenting out to work with later possibly
#inputs2, labels2 = inputs2.to('cuda') , labels2.to('cuda') # use cuda
#model.to('cuda:0') # use cuda
with torch.no_grad():
outputs = model.forward(inputs2)
valloss = criterion(outputs,labels2)
ps = torch.exp(outputs).data
equality = (labels2.data == ps.max(1)[1])
accuracy += equality.type_as(torch.FloatTensor()).mean()
valloss = valloss / len(vloader)
accuracy = accuracy /len(vloader)
print("Epoch: {}/{}... ".format(e+1, epochs),
"Training Loss: {:.4f}".format(running_loss/print_every),
"Validation Loss {:.4f}".format(valloss),
"Accuracy: {:.4f}".format(accuracy),
)
running_loss = 0
def main():
print("train.py start")
args = parse_args()
data_dir = args.data_dir
train_dir = data_dir + '/train'
valid_dir = data_dir + '/valid'
test_dir = data_dir + '/test'
means = [0.485, 0.456, 0.406]
standard_deviations = [0.229, 0.224, 0.225]
train_transforms = transforms.Compose([
transforms.RandomRotation(30),
transforms.RandomResizedCrop(224),
transforms.RandomHorizontalFlip(),
transforms.ToTensor(),
transforms.Normalize(means,standard_deviations)
])
test_validation_struct = transforms.Compose([
transforms.Resize(256),
transforms.CenterCrop(224),
transforms.ToTensor(),
transforms.Normalize(means,standard_deviations)
])
validation_transform = test_validation_struct
test_transform = test_validation_struct
print("set dataset start")
train_data = set_data(train_dir,train_transforms)
validation_data = set_data(valid_dir,validation_transform)
test_data = set_data(test_dir,test_transform)
print("set dataset done")
print("set loader start")
trainloader = set_loader(train_data,64)
vloader = set_loader(validation_data,32)
testloader = set_loader(test_data,20)
print("set loader done")
model = getattr(models, args.arch)(pretrained=True)
for param in model.parameters():
param.requires_grad = False
if args.arch == "vgg16" :
input_size = model.classifier[0].in_features
elif args.arch == "densenet121" :
input_size = model.classifier.in_features
else :
print("generate default input size")
input_size = model.classifier[0].in_features
print("set classifier start")
classifier = nn.Sequential(OrderedDict([
('fc1', nn.Linear(input_size, args.hidden_units)),
('drop', nn.Dropout(p=0.6)),
('relu', nn.ReLU()),
('fc2', nn.Linear(args.hidden_units, 102)),
('output', nn.LogSoftmax(dim=1))]))
model.classifier = classifier
criterion = nn.NLLLoss() # using criterion and optimizer similar to pytorch lectures (densenet)
optimizer = optim.Adam(model.classifier.parameters(), lr=float(args.learning_rate))
epochs = int(args.epochs)
print("SET epochs : {}".format(epochs))
class_index = train_data.class_to_idx
gpu = args.gpu # get the gpu settings
print("SET GPU : {}".format(gpu))
print("set train start")
train(model, criterion, optimizer, trainloader,vloader, epochs, gpu)
model.class_to_idx = class_index
path = args.save_dir # get the new save location
print("set save_checkpoint start")
save_checkpoint(path, model, optimizer, args, classifier,epochs)
if __name__ == "__main__":
main() | StarcoderdataPython |
11293532 | import numpy as np
import matplotlib.pyplot as plt
import sys
sys.path.append('../src/')
from pysfa import SFA
# generate data
# -----------------------------------------------------------------------------
np.random.seed(123)
m = 2000
k_beta = 2
k_gama = 1
k_deta = 1
x = np.random.randn(m,k_beta)
z = np.ones((m,k_gama))
d = np.ones((m,k_deta))
s = np.ones(m)*0.1
#
beta_t = np.random.randn(k_beta)
gama_t = np.random.rand(k_gama)
deta_t = np.random.rand(k_deta)
# create objec
# -----------------------------------------------------------------------------
sfa = SFA(x, z, d, s, vtype='exponential')
sfa.simData(beta_t, gama_t, deta_t)
# apply solver
# -----------------------------------------------------------------------------
sfa.optimizeSFA()
print('beta_t:', beta_t, ', beta_soln:', sfa.beta_soln)
print('gama_t:', gama_t, ', gama_soln:', sfa.gama_soln)
print('deta_t:', deta_t, ', deta_soln:', sfa.deta_soln) | StarcoderdataPython |
9667621 | <filename>SImple_Resturant/main.py
import getpass
class Person:
def __init__(self, name):
self.name = name
self.state = False
def login(self):
while True:
self.username = input("Enter Your UserName: ")
self.password = getpass.getpass(prompt="Password: ")
if self.username in db:
if self.username.password == self.password:
print("welcome")
break
else:
print("wrong Password")
else:
print("wrong UserName")
self.state = True
class Admin(Person):
def __init__(self, name):
super().__init__(name)
def add_to_menu(self, name, price, qty):
pass
def remove_from_menu(self, name):
pass
class Customer(Person):
def __init__(self, name, phpne, addres):
super().__init__(name)
self.addres = addres
self.phone = phone
self.past_buy = dict()
# def change_info(self, **kwargs):
# info =
# for item in kwargs.keys():
| StarcoderdataPython |
8030288 | <reponame>kdeltared/tcex
#!/usr/bin/env python
"""TcEx Framework LayoutJson."""
# standard library
import json
import logging
import os
from collections import OrderedDict
class LayoutJson:
"""Object for layout.json file.
Args:
filename (str, optional): The config filename. Defaults to layout.json.
path (str, optional): The path to the file. Defaults to os.getcwd().
logger (logging.Logger, optional): A instance of Logger. Defaults to None.
"""
def __init__(self, filename=None, path=None, logger=None):
"""Initialize class properties."""
self._filename = filename or 'layout.json'
self._path = path or os.getcwd()
self.log = logger or logging.getLogger('layout_json')
# properties
self._contents = None
@staticmethod
def _to_bool(value):
"""Convert string value to bool."""
bool_value = False
if str(value).lower() in ['1', 'true']:
bool_value = True
return bool_value
@property
def contents(self):
"""Return layout.json contents."""
if self._contents is None and self.has_layout:
with open(self.filename) as fh:
self._contents = json.load(fh, object_pairs_hook=OrderedDict)
return self._contents
def create(self, inputs, outputs):
"""Create new layout.json file based on inputs and outputs."""
lj = OrderedDict()
# add inputs
lj['inputs'] = []
step = OrderedDict()
step['parameters'] = []
step['sequence'] = 1
step['title'] = 'Action'
lj['inputs'].append(step)
step = OrderedDict()
step['parameters'] = []
step['sequence'] = 2
step['title'] = 'Connection'
lj['inputs'].append(step)
step = OrderedDict()
step['parameters'] = []
step['sequence'] = 3
step['title'] = 'Configure'
lj['inputs'].append(step)
step = OrderedDict()
step['parameters'] = []
step['sequence'] = 4
step['title'] = 'Advanced'
lj['inputs'].append(step)
# add outputs
lj['outputs'] = []
for i in sorted(inputs):
if i.get('name') == 'tc_action':
lj['inputs'][0]['parameters'].append({'name': 'tc_action'})
elif i.get('hidden') is True:
lj['inputs'][2]['parameters'].append(
{'display': "'hidden' != 'hidden'", 'hidden': 'true', 'name': i.get('name')}
)
else:
lj['inputs'][2]['parameters'].append({'display': '', 'name': i.get('name')})
for o in sorted(outputs):
lj['outputs'].append({'display': '', 'name': o.get('name')})
# write layout file to disk
self.write(lj)
@property
def filename(self):
"""Return the fqpn for the layout.json file."""
return os.path.join(self._path, self._filename)
@property
def has_layout(self):
"""Return True if App has layout.json file."""
if os.path.isfile(self.filename):
return True
return False
@property
def params_dict(self):
"""Return layout.json params in a flattened dict with name param as key."""
parameters = {}
for i in self.inputs:
for p in i.get('parameters', []):
parameters.setdefault(p.get('name'), p)
return parameters
@property
def parameters_names(self):
"""Return layout.json params in a flattened dict with name param as key."""
return self.params_dict.keys()
@property
def outputs_dict(self):
"""Return layout.json outputs in a flattened dict with name param as key."""
outputs = {}
for o in self.outputs:
outputs.setdefault(o.get('name'), o)
return outputs
def update(self, features=None):
"""Update the layouts.json file."""
# features from the instal.json
features = features or []
# get fresh copy of layout.json contents
layout_data = self.contents
# APP-86 - sort output data by name
self.update_sort_outputs(layout_data)
# update contents
self._contents = layout_data
# write updated content
self.write(layout_data)
@staticmethod
def update_sort_outputs(layout_data):
"""Sort output field by name."""
# APP-86 - sort output data by name
layout_data['outputs'] = sorted(layout_data.get('outputs', []), key=lambda i: i['name'])
def write(self, json_data):
"""Write updated profile file.
Args:
json_data (dict): The profile data.
"""
with open(self.filename, 'w') as fh:
json.dump(json_data, fh, indent=2, sort_keys=False)
fh.write('\n')
#
# properties
#
@property
def inputs(self):
"""Return property."""
return self.contents.get('inputs', [])
@property
def outputs(self):
"""Return property."""
return self.contents.get('outputs', [])
| StarcoderdataPython |
12810668 | <gh_stars>0
import multiprocessing
import os
from typing import Union
import torchvision
from torch.utils.data import DataLoader, Subset
from torchvision import transforms
from root import from_root
from src.misc.utils import read_lines
DATA_DIRPATH = from_root("data/cifar10")
SPLIT_DIRPATH = from_root("splits/cifar10")
CIFAR10_TRAIN_MEAN = [0.4913, 0.4820, 0.4464]
CIFAR10_TRAIN_STD = [0.2470, 0.2434, 0.2616]
def load_cifar10_train(batch_size: int, n_workers: Union[str, int]) -> DataLoader:
if batch_size <= 0:
raise ValueError("Batch_size must be positive!")
if type(n_workers) == str and n_workers != "n_cores":
raise ValueError("If n_workers is a string, it must be 'n_cores'!")
if type(n_workers) == int and n_workers < 0:
raise ValueError("If n_workers is an int, it must be non-negative!")
transform = transforms.Compose([
transforms.RandomHorizontalFlip(),
transforms.RandomCrop(32, 4),
transforms.ToTensor()
])
dataset = torchvision.datasets.CIFAR10(DATA_DIRPATH, train=True, transform=transform, download=True)
train_indices = read_lines(os.path.join(SPLIT_DIRPATH, "train.txt"), int)
train_dataset = Subset(dataset, train_indices)
if n_workers == "n_cores":
n_workers = multiprocessing.cpu_count()
return DataLoader(train_dataset, batch_size=batch_size, shuffle=True, num_workers=n_workers)
def load_cifar10_infer(split: str, batch_size: int, n_workers: Union[str, int]) -> DataLoader:
if split not in {"train", "val", "test"}:
raise ValueError("Split must be 'train', 'val', or 'test'!")
if batch_size <= 0:
raise ValueError("Batch_size must be positive!")
if type(n_workers) == str and n_workers != "n_cores":
raise ValueError("If n_workers is a string, it must be 'n_cores'!")
if type(n_workers) == int and n_workers < 0:
raise ValueError("If n_workers is an int, it must be non-negative!")
transform = transforms.ToTensor()
if split == "train":
dataset = torchvision.datasets.CIFAR10(DATA_DIRPATH, train=True, transform=transform, download=True)
indices = read_lines(os.path.join(SPLIT_DIRPATH, "train.txt"), int)
dataset = Subset(dataset, indices)
elif split == "val":
dataset = torchvision.datasets.CIFAR10(DATA_DIRPATH, train=True, transform=transform, download=True)
indices = read_lines(os.path.join(SPLIT_DIRPATH, "val.txt"), int)
dataset = Subset(dataset, indices)
else:
dataset = torchvision.datasets.CIFAR10(DATA_DIRPATH, train=False, transform=transform, download=True)
if n_workers == "n_cores":
n_workers = multiprocessing.cpu_count()
return DataLoader(dataset, batch_size=batch_size, shuffle=False, num_workers=n_workers)
| StarcoderdataPython |
6586071 | import os
import time
import concurrent.futures as cf
class ParallelExecutor:
def __init__(self, cf_reader, img_parser, pf_handler, workers, num_to_do, db = None, output_dir = None):
self.db = db
self.cf_reader = cf_reader
self.img_parser = img_parser
self.pf_handler = pf_handler
self.workers = workers
self.num_to_do = num_to_do
self.output_dir = output_dir
self.num_reads = 0
self.num_skipped = 0
self.num_completed = 0
self.completed_images = self.pf_handler.get_completed_images()
self.futures = []
self.img_list = self.cf_reader.create_img_list()
if output_dir is not None:
if not os.path.exists(self.output_dir):
print("Creating new directory for output: " + self.output_dir)
os.mkdir(self.output_dir)
def run(self):
start_time = time.time()
with cf.ProcessPoolExecutor(max_workers=self.workers) as executor:
while self.cf_reader.continue_reading():
rows, img_filename = self.cf_reader.read_full_image_lines()
if self.skip_image(img_filename):
continue
if self.img_list is not None:
if not self.submit_listed_img(executor, img_filename, rows):
break
else:
if not self.submit_and_continue(executor, img_filename, rows):
break
self.handle_all_submitted(start_time)
def submit_listed_img(self, executor, img_filename, rows):
if img_filename.split(os.path.sep)[-1].split(".")[0] in self.img_list:
if not self.submit_and_continue(executor, img_filename, rows):
return False
else:
self.num_skipped += 1
return True
def handle_done(self, done, futures, num_reads):
if done.result() != [] or done.result()[0] is not None:
rows, fn = done.result()
fn = fn.split(os.path.sep)[-1]
self.pf_handler.update_file(fn + '\n')
if self.output_dir is None:
self.img_parser.upload_field_image(fn, rows, self.db)
else:
self.img_parser.write_field_image(fn, rows, self.output_dir)
futures.remove(done)
self.num_completed += 1
if self.num_completed % 10 == 0:
if self.db is not None:
self.db.connection.commit()
print(str(self.num_completed / num_reads * 100) + "%")
else:
futures.remove(done)
self.handle_done_error(done)
def handle_done_error(self, done):
with open('Done_errors.txt', 'a+') as file:
if done.result() == []:
file.write('Empty list\n')
elif done.result()[0] is None:
file.write('Rows were None for ' + str(done.result()[1]) + '\n')
def skip_image(self, img_filename):
if img_filename == "":
print("Something went wrong with reading the image filename from the coordinate file")
return True
if self.completed_images.get(img_filename.split(os.path.sep)[-1]):
print("Skipping " + img_filename)
self.num_skipped += 1
return True
return False
def submit_and_continue(self, executor, img_filename, rows):
# =============================================================================
# if len(rows) > 21 or self.num_reads == 26 or self.num_reads == 63:
# stop = True
# =============================================================================
print('Reading image: {} - {}'.format(self.num_reads, img_filename))
#test = self.img_parser.process_rows(img_filename, rows)
self.futures.append(executor.submit(self.img_parser.process_rows, img_filename, rows))
self.num_reads += 1
# If self.num_reads is high enough, call handle_done with the images that have been completed up to now
if self.num_reads % 1000 == 0:
for done in cf.as_completed(self.futures):
self.handle_done(done, self.futures, self.num_reads)
# This if check is how the code is meant to handle being finished reading all the images.
# Revert back to this if the above did not work
if self.num_reads == self.num_to_do:
return False
return True
def handle_all_submitted(self, start_time):
print("Skipped a total of " + str(self.num_skipped) + " images")
for done in cf.as_completed(self.futures):
self.handle_done(done, self.futures, self.num_reads)
print("Completed a total of " + str(self.num_completed) + " images")
print("--- " + str(time.time() - start_time) + " ---")
| StarcoderdataPython |
3323297 | from sympy import factorint
x = int(input("how many: \n"))
i = 2
while True:
if all(len(factorint(a)) == x for a in range(i, i + x)):
print(i)
break
i += 1
| StarcoderdataPython |
4816330 | # (c) 2022 <NAME> MOPFPPP
# This code is licensed under the MIT license (see LICENSE.txt for details)
"""
Add traversing links to the nodes in the tree
"""
def tree_add_links(tree):
"""
TO DO: docstring this
"""
return _tree_adjust_links(4, tuple(map(_tree_set_first_links(
tuple(map(_tree_first_links(tree), tuple(enumerate(tree))))),
enumerate(tree))))
def _tree_set_first_links(info):
def _inner1(node_data):
return node_data[1] | {"next": info[node_data[0]]}
return _inner1
def _tree_first_links(tree):
def _inner1(node_info):
if node_info[1]["parent"] == []:
return []
if node_info[0] + 1 in tree[tree[node_info[0]]['parent']]['branches']:
return node_info[0] + 1
return -1
return _inner1
def _tree_adjust_links(numb, tree):
if numb == 0:
return tree
return _tree_adjust_links(numb - 1, _tree_mod_node(tree))
def _tree_mod_node(tree):
return tuple(map(_tree_set_node(tree), tree))
def _tree_set_node(tree):
def _inner1(node_info):
if node_info["next"] == -1:
return dict(node_info, next=tree[node_info['parent']]['next'])
return node_info
return _inner1
| StarcoderdataPython |
3407650 | <filename>pw_console/py/table_test.py
# Copyright 2021 The Pigweed Authors
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy of
# the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
"""Tests for pw_console.text_formatting"""
import logging
import unittest
from datetime import datetime
from parameterized import parameterized # type: ignore
from pw_console.log_line import LogLine
from pw_console.widgets.table import TableView
_TIMESTAMP_FORMAT = '%Y%m%d %H:%M:%S'
_TIMESTAMP_SAMPLE = datetime(2021, 6, 30, 16, 10, 37, 818901)
_TIMESTAMP_SAMPLE_STRING = _TIMESTAMP_SAMPLE.strftime(_TIMESTAMP_FORMAT)
_TABLE_PADDING = ('', TableView.COLUMN_PADDING)
formatter = logging.Formatter(
'\x1b[30m\x1b[47m'
'%(asctime)s'
'\x1b[0m'
' '
'\x1b[33m\x1b[1m'
'%(levelname)s'
'\x1b[0m'
' '
'%(message)s', _TIMESTAMP_FORMAT)
def make_log(**kwargs):
"""Create a LogLine instance."""
# Construct a LogRecord
attributes = dict(name='testlogger',
levelno=logging.INFO,
levelname='INF',
msg='[%s] %.3f %s',
args=('MOD1', 3.14159, 'Real message here'),
created=_TIMESTAMP_SAMPLE.timestamp(),
filename='test.py',
lineno=42,
pathname='/home/user/test.py')
# Override any above attrs that are passed in.
attributes.update(kwargs)
# Create the record
record = logging.makeLogRecord(dict(attributes))
# Format
formatted_message = formatter.format(record)
log_line = LogLine(record=record,
formatted_log=formatted_message,
ansi_stripped_log='')
log_line.update_metadata()
return log_line
class TestTableView(unittest.TestCase):
"""Tests for rendering log lines into tables."""
def setUp(self):
# Show large diffs
self.maxDiff = None # pylint: disable=invalid-name
@parameterized.expand([
(
'Correct column widths with all fields set',
[
make_log(
args=('M1', 1.2345, 'Something happened'),
extra_metadata_fields=dict(module='M1', time=12)),
make_log(
args=('MD2', 567.5, 'Another cool event'),
extra_metadata_fields=dict(module='MD2', time=123)),
],
dict(module=len('MD2'), time=len('123')),
),
(
'Missing metadata fields on some rows',
[
make_log(
args=('M1', 54321.2, 'Something happened'),
extra_metadata_fields=dict(module='M1', time=54321.2)),
make_log(
args=('MOD2', 567.5, 'Another cool event'),
extra_metadata_fields=dict(module='MOD2')),
],
dict(module=len('MOD2'), time=len('54321.200')),
),
]) # yapf: disable
def test_column_widths(self, _name, logs, expected_widths) -> None:
"""Test colum widths calculation."""
table = TableView()
for log in logs:
table.update_metadata_column_widths(log)
# update_metadata_column_widths shoulp populate self.metadata.fields
self.assertEqual(log.metadata.fields,
log.record.extra_metadata_fields)
# Check expected column widths
self.assertEqual(dict(table.column_widths), expected_widths)
@parameterized.expand([
(
'Build header adding fields incrementally',
[
make_log(
args=('MODULE2', 567.5, 'Another cool event'),
extra_metadata_fields=dict(
# timestamp missing
module='MODULE2')),
make_log(
args=('MODULE1', 54321.2, 'Something happened'),
extra_metadata_fields=dict(
# timestamp added in
module='MODULE1', timestamp=54321.2)),
],
[
[('bold', 'Time '), _TABLE_PADDING,
('bold', 'Lvl'), _TABLE_PADDING,
('bold', 'Module '),
('bold', 'Message')],
[('bold', 'Time '), _TABLE_PADDING,
('bold', 'Lvl'), _TABLE_PADDING,
# timestamp added in
('bold', 'Timestamp '),
('bold', 'Module '),
('bold', 'Message')],
],
),
]) # yapf: disable
def test_formatted_header(self, _name, logs, expected_headers) -> None:
"""Test colum widths calculation."""
table = TableView()
for log, header in zip(logs, expected_headers):
table.update_metadata_column_widths(log)
self.assertEqual(table.formatted_header(), header)
@parameterized.expand([
(
'Build rows adding fields incrementally',
[
make_log(
args=('MODULE2', 567.5, 'Another cool event'),
extra_metadata_fields=dict(
# timestamp missing
module='MODULE2',
msg='Another cool event')),
make_log(
args=('MODULE2', 567.5, 'Another cool event'),
extra_metadata_fields=dict(
# timestamp and msg missing
module='MODULE2')),
make_log(
args=('MODULE1', 54321.2, 'Something happened'),
extra_metadata_fields=dict(
# timestamp added in
module='MODULE1', timestamp=54321.2,
msg='Something happened')),
],
[
[
('class:log-time', _TIMESTAMP_SAMPLE_STRING),
_TABLE_PADDING,
('class:log-level-20', 'INF'),
_TABLE_PADDING,
('class:log-table-column-3', 'MODULE2 '),
('', 'Another cool event'),
('', '\n')
],
[
('class:log-time', _TIMESTAMP_SAMPLE_STRING),
_TABLE_PADDING,
('class:log-level-20', 'INF'),
_TABLE_PADDING,
('class:log-table-column-3', 'MODULE2 '),
('', '[MODULE2] 567.500 Another cool event'),
('', '\n')
],
[
('class:log-time', _TIMESTAMP_SAMPLE_STRING),
_TABLE_PADDING,
('class:log-level-20', 'INF'),
_TABLE_PADDING,
('class:log-table-column-3', '54321.200 '),
('class:log-table-column-4', 'MODULE1 '),
('', 'Something happened'),
('', '\n')
],
],
),
]) # yapf: disable
def test_formatted_rows(self, _name, logs, expected_log_format) -> None:
"""Test colum widths calculation."""
table = TableView()
# Check each row meets expected formats incrementally.
for log, formatted_log in zip(logs, expected_log_format):
table.update_metadata_column_widths(log)
self.assertEqual(table.formatted_row(log), formatted_log)
if __name__ == '__main__':
unittest.main()
| StarcoderdataPython |
241474 | <filename>SM_openSMILE/openSMILE_preprocessing/convert_dir_to_flac.py
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
convert_dir_to_flac.py
Script to quickly convert all mp3 and mxf files in a directory to
flac files.
Author:
– <NAME>, 2016 (<EMAIL>)
© 2016, Child Mind Institute, Apache v2.0 License
@author: jon.clucas
"""
import argparse, wav_to_flac as wtf, os
def main():
# script can be run from the command line
parser = argparse.ArgumentParser(description='get directory')
parser.add_argument('in_dir', metavar='in_dir', type=str)
arg = parser.parse_args()
for root, dirs, files in os.walk(arg.in_dir):
for file in files:
if file.casefold().endswith(".wav".casefold()):
wtf.wav_to_flac(os.path.join(root, file))
else:
pass
# ============================================================================
if __name__ == '__main__':
main()
| StarcoderdataPython |
3323681 | <reponame>tkinjo1985/lobe_fastapi<gh_stars>1-10
from base64 import b64encode
from io import BytesIO
from PIL import Image
def image2base64(image_file):
try:
image = Image.open(image_file).convert('RGB')
except Exception as err:
print(err)
else:
buffered = BytesIO()
image.save(buffered, format="JPEG")
img_b64 = b64encode(buffered.getvalue())
return img_b64.decode('utf8')
| StarcoderdataPython |
3248325 | <reponame>arunraja-hub/Preference_Extraction
#!/usr/bin/env python
import time
from vizdoom import *
game = DoomGame()
game.load_config("custom.cfg")
game.set_labels_buffer_enabled(True)
game.set_mode(Mode.SPECTATOR)
game.init()
episodes = 10
for i in range(episodes):
game.new_episode()
while not game.is_episode_finished():
state = game.get_state()
game.advance_action()
labels = state.labels
human_label = None
for label in labels:
if label.object_name == 'Demon':
human_label = label
if label.object_name == 'DoomPlayer':
player_label = label
# if human_label is not None:
# print(human_label.object_angle)
# else:
# print(None)
# print("player_label", player_label.object_angle)
if human_label is None:
print("Dead")
else:
if human_label.object_angle < 90 or human_label.object_angle > 270:
print("Attacking monster")
else:
print("Attacking agent")
time.sleep(2) | StarcoderdataPython |
4943975 | <reponame>geoblr008/covid-19-tracker
from bs4 import BeautifulSoup
import requests
data_check = []
worldmetersLink = "https://www.worldometers.info/coronavirus/"
html_page = requests.get(worldmetersLink)
bs = BeautifulSoup(html_page.content, 'html.parser')
search = bs.select("div tbody tr td")
def data_cleanup(array):
L = []
for i in array:
i = i.replace("+", "")
i = i.replace("-", "")
if i == "":
i = "0"
L.append(i.strip())
return L
def get_data(country):
country = country.title()
start = -1
for i in range(len(search)):
if search[i].get_text().find(country) != -1:
start = i
break
data = []
for i in range(1, 8):
try:
data = data + [search[start+i].get_text()]
except:
data = data + ["0"]
data = data_cleanup(data)
keys = ["total_infected", "new_case", "total_deaths",
"new_deaths", "recovred", "active_case", "serious_critical"]
data = dict(zip(keys, data))
return data
def world_data():
return get_data("World")
def get_top_five():
index = [177, 199, 221, 243, 265]
data = []
for i in range(0, 8):
try:
data = data + [search[index[i]].get_text()]
except:
data = data + ["0"]
data = list(filter(("0").__ne__, data))
country_list = []
for i in range(len(data)):
country_list.append(get_data(data[i]))
return country_list, data
| StarcoderdataPython |
5166786 | <reponame>splhack/loam
import magma as m
from mantle.xilinx.spartan3.RAMB import ROMB16
from loam.boards.papilioone import PapilioOne
from loam.shields.megawing import MegaWing
megawing = MegaWing(PapilioOne)
megawing.Clock.on()
megawing.Switch.on(8)
megawing.LED.on(8)
main = megawing.main()
rom = range(1024)
for i in range(1024):
byte = i & 0xff
rom[i] = byte << 8
romb = ROMB16( rom, 16 )
I = m.bits([main.SWITCH[0], main.SWITCH[1], main.SWITCH[2], main.SWITCH[3],
main.SWITCH[4], main.SWITCH[5], main.SWITCH[6], main.SWITCH[7],
GND, GND])
O = main.LED
m.wire( I, romb.A )
m.wire( romb.O[8:16], O )
| StarcoderdataPython |
272674 | # MIT License
#
# Copyright (c) 2019 Red Hat, Inc.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import logging
from http import HTTPStatus
from flask import request
try:
from flask_restx import Namespace, Resource, fields
except ModuleNotFoundError:
from flask_restplus import Namespace, Resource, fields
from packit_service.service.api.utils import response_maker
from packit_service.celerizer import celery_app
from packit_service.config import ServiceConfig
from packit_service.service.api.errors import ValidationFailed
from packit_service.models import TFTTestRunModel
from packit_service.service.api.parsers import indices, pagination_arguments
logger = logging.getLogger("packit_service")
config = ServiceConfig.get_service_config()
ns = Namespace("testing-farm", description="Testing Farm")
payload_artifact = ns.model(
"Testing Farm results artifact",
{
"commit-sha": fields.String(
required=True, example="08bfc38f15082bdf9ba964c3bbd04878666d1d56"
),
"copr-chroot": fields.String(required=True, example="fedora-30-x86_64"),
"copr-repo-name": fields.String(
required=True, example="packit/packit-service-hello-world-14"
),
"git-ref": fields.String(
required=True, example="08bfc38f15082bdf9ba964c3bbd04878666d1d56"
),
"git-url": fields.Url(
required=True, example="https://github.com/packit/hello-world"
),
"repo-name": fields.String(required=True, example="hello-world"),
"repo-namespace": fields.String(required=True, example="packit-service"),
},
)
payload_pipeline = ns.model(
"Testing Farm results pipeline",
{
"id": fields.String(
required=True, example="614d240a-1e27-4758-ad6a-ed3d34281924"
)
},
)
payload = ns.model(
"Testing Farm results",
{
"artifact": fields.Nested(payload_artifact),
"message": fields.String(required=True, example="Command 'git' not found"),
"pipeline": fields.Nested(payload_pipeline),
"result": fields.String(required=True, example="error"),
"token": fields.String(required=True, example="HERE-IS-A-VALID-TOKEN"),
"url": fields.Url(
required=True,
example="https://console-testing-farm.apps.ci.centos.org/pipeline/<ID>",
),
},
)
@ns.route("/results")
class TestingFarmResults(Resource):
@ns.response(HTTPStatus.ACCEPTED, "Test results accepted and being processed")
@ns.response(HTTPStatus.BAD_REQUEST, "Bad request data")
@ns.response(HTTPStatus.UNAUTHORIZED, "Testing farm secret validation failed")
@ns.expect(payload)
def post(self):
"""
Submit Testing Farm results
"""
msg = request.json
if not msg:
logger.debug("/testing-farm/results: we haven't received any JSON data.")
return "We haven't received any JSON data.", HTTPStatus.BAD_REQUEST
try:
self.validate_testing_farm_request()
except ValidationFailed as exc:
logger.info(f"/testing-farm/results {exc}")
return str(exc), HTTPStatus.UNAUTHORIZED
celery_app.send_task(
name="task.steve_jobs.process_message", kwargs={"event": msg}
)
return "Test results accepted", HTTPStatus.ACCEPTED
@staticmethod
def validate_testing_farm_request():
"""
Validate testing farm token received in request with the one in packit-service.yaml
:raises ValidationFailed
"""
if not config.testing_farm_secret:
msg = "Testing farm secret not specified in config"
logger.error(msg)
raise ValidationFailed(msg)
token = request.json.get("token")
if not token:
msg = "The request doesn't contain any token"
logger.info(msg)
raise ValidationFailed(msg)
if token == config.testing_farm_secret:
return
msg = "Invalid testing farm secret provided"
logger.warning(msg)
raise ValidationFailed(msg)
@ns.expect(pagination_arguments)
@ns.response(HTTPStatus.PARTIAL_CONTENT.value, "Testing Farm Results follow")
def get(self):
""" List all Testing Farm results. """
result = []
first, last = indices()
# results have nothing other than ref in common, so it doesnt make sense to
# merge them like copr builds
for tf_result in TFTTestRunModel.get_range(first, last):
result_dict = {
"pipeline_id": tf_result.pipeline_id,
"ref": tf_result.commit_sha,
"status": tf_result.status,
"target": tf_result.target,
"web_url": tf_result.web_url,
"pr_id": tf_result.get_pr_id(),
}
project = tf_result.get_project()
result_dict["repo_namespace"] = project.namespace
result_dict["repo_name"] = project.repo_name
result.append(result_dict)
resp = response_maker(
result,
status=HTTPStatus.PARTIAL_CONTENT.value,
)
resp.headers["Content-Range"] = f"test-results {first + 1}-{last}/*"
return resp
| StarcoderdataPython |
6547452 | from .base import *
from .gmail import *
from .twilio import *
| StarcoderdataPython |
3563677 | import re
from django.contrib.auth.models import User
from django.test import TestCase
from django.conf.urls import url
from django.http import HttpResponse
from django.template import Template, RequestContext
def simple_view(request):
t = Template('{% load websockets %}{% websocket_info %}')
c = RequestContext(request, {'request': request})
return HttpResponse(t.render(c))
urlpatterns = [
url(r'^simple_view/$', simple_view),
]
class ViewsTestCase(TestCase):
def test_anon_simple_view(self):
r = self.client.get('/simple_view/')
self.assertEqual(r.status_code, 200)
content = r.content.decode('utf-8')
self.assertEqual(content, '<script>\n'
' var djws = {"token": "anon", "ws_url": "ws://testserver/ws/"};\n'
'</script>\n')
def test_auth_simple_view(self):
User.objects.create_user('testing', email='<EMAIL>', password='<PASSWORD>')
self.client.login(username='testing', password='<PASSWORD>')
r = self.client.get('/simple_view/')
self.assertEqual(r.status_code, 200)
# print(r.content)
content = re.sub('"token": ".*?"', '"token": "xyz"', r.content.decode('utf-8'))
self.assertEqual(content, '<script>\n'
' var djws = {"token": "xyz", "ws_url": "ws://testserver/ws/"};\n'
'</script>\n')
| StarcoderdataPython |
225590 | <gh_stars>0
#II.5 General Equilibrium
#II.5.1
import numpy as np
from numba import jit
class Household:
r=0.04
ro=0.06
beta=1/(1+ro)
sigmay=0.5
Y=[1-sigmay,1+sigmay]
def __init__(self,
r=0.04, # interest rate
w=1.0, # wages
β=1/(1+ro), # discount factor
a_min=0,
Π=[[0.6, 0.4], [0.4, 0.6]], # Markov chain
z_vals=[1-sigmay, 1+sigmay], # exogenous states
a_max=18,
a_size=200):
# Store values, set up grids over a and z
self.r, self.w, self.β = r, w, β
self.a_min, self.a_max, self.a_size = a_min, a_max, a_size
self.Π = np.asarray(Π)
self.z_vals = np.asarray(z_vals)
self.z_size = len(z_vals)
self.a_vals = np.linspace(a_min, a_max, a_size)
self.n = a_size * self.z_size
# Build the array Q
self.Q = np.zeros((self.n, a_size, self.n))
self.build_Q()
# Build the array R
self.R = np.empty((self.n, a_size))
self.build_R()
def set_prices(self, r, w):
"""
Use this method to reset prices. Calling the method will trigger a
re-build of R.
"""
self.r, self.w = r, w
self.build_R()
def build_Q(self):
populate_Q(self.Q, self.a_size, self.z_size, self.Π)
def build_R(self):
self.R.fill(-np.inf)
populate_R(self.R, self.a_size, self.z_size, self.a_vals, self.z_vals, self.r, self.w)
# Do the hard work using JIT-ed functions
sigma=4
@jit(nopython=True)
def populate_R(R, a_size, z_size, a_vals, z_vals, r, w):
n = a_size * z_size
for s_i in range(n):
a_i = s_i // z_size
z_i = s_i % z_size
a = a_vals[a_i]
z = z_vals[z_i]
for new_a_i in range(a_size):
a_new = a_vals[new_a_i]
c = w * z + (1 + r) * a - a_new
if c > 0:
R[s_i, new_a_i] = (c**(1-sigma)-1)/(1-sigma) # Utility
@jit(nopython=True)
def populate_Q(Q, a_size, z_size, Π):
n = a_size * z_size
for s_i in range(n):
z_i = s_i % z_size
for a_i in range(a_size):
for next_z_i in range(z_size):
Q[s_i, a_i, a_i * z_size + next_z_i] = Π[z_i, next_z_i]
@jit(nopython=True)
def asset_marginal(s_probs, a_size, z_size):
a_probs = np.zeros(a_size)
for a_i in range(a_size):
for z_i in range(z_size):
a_probs[a_i] += s_probs[a_i * z_size + z_i]
return a_probs
import quantecon as qe
import matplotlib.pyplot as plt
from matplotlib.pyplot import hist
from quantecon.markov import DiscreteDP
#%%
ro=0.03
A = 1.0
N = 1.0
α = 0.33
β = 1/(1+ro)
δ = 0.05
def r_to_w(r):
"""
Equilibrium wages associated with a given interest rate r.
"""
return A * (1 - α) * (A * α / (r + δ))**(α / (1 - α))
def rd(K):
"""
Inverse demand curve for capital. The interest rate associated with a
given demand for capital K.
"""
return A * α * (N / K)**(1 - α) - δ
def prices_to_capital_stock(am, r):
"""
Map prices to the induced level of capital stock.
Parameters:
----------
am : Household
An instance of an aiyagari_household.Household
r : float
The interest rate
"""
w = r_to_w(r)
am.set_prices(r, w)
aiyagari_ddp = DiscreteDP(am.R, am.Q, β)
# Compute the optimal policy
results = aiyagari_ddp.solve(method='policy_iteration')
# Compute the stationary distribution
stationary_probs = results.mc.stationary_distributions[0]
# Extract the marginal distribution for assets
asset_probs = asset_marginal(stationary_probs, am.a_size, am.z_size)
# Return K
return np.sum(asset_probs * am.a_vals)
# Create an instance of Household
am = Household(a_max=20)
# Use the instance to build a discrete dynamic program
am_ddp = DiscreteDP(am.R, am.Q, am.β)
# Create a grid of r values at which to compute demand and supply of capital
num_points = 30
r_vals = np.linspace(0.005, 0.04, num_points)
# Compute supply of capital
k_vals = np.empty(num_points)
for i, r in enumerate(r_vals):
k_vals[i] = prices_to_capital_stock(am, r)
# Plot against demand for capital by firms
fig, ax = plt.subplots(figsize=(11, 8))
ax.plot(k_vals, r_vals, lw=2, alpha=0.6, label='supply of capital')
ax.plot(k_vals, rd(k_vals), lw=2, alpha=0.6, label='demand for capital')
ax.set_xlabel('capital')
ax.set_ylabel('interest rate')
ax.legend(loc='upper right')
plt.show()
# Report the endogenous distribution of wealth.
#STEP 1: Stationary distribution of wealth.
am_ddp = DiscreteDP(am.R, am.Q, am.β)
results = am_ddp.solve(method='policy_iteration')
# Compute the stationary distribution
stationary_probs = results.mc.stationary_distributions[0]
# Extract the marginal distribution for assets
asset_probs = asset_marginal(stationary_probs, am.a_size, am.z_size)
#PLOT
plt.hist(asset_probs)
plt.title('Stationary distribution of assets')
Amean=np.mean(asset_probs)
#%% Compare with the paper of Krueger, Mitman and Perri
#Gini's coefficient
def gini(array):
array = array.flatten()
if np.amin(array) < 0:
# Values cannot be negative:
array -= np.amin(array)
# Values cannot be 0:
array += 0.0000001
# Values must be sorted:
array = np.sort(array)
# Index per array element:
index = np.arange(1,array.shape[0]+1)
# Number of array elements:
n = array.shape[0]
# Gini coefficient:
return ((np.sum((2 * index - n - 1) * array)) / (n * np.sum(array)))
Agini=gini(asset_probs) | StarcoderdataPython |
1684761 | # ---------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# ---------------------------------------------------------
import json
import logging
import os
import pickle
import numpy as np
import pandas as pd
from sklearn.externals import joblib
import azureml.automl.core
from azureml.automl.core.shared import logging_utilities, log_server
from azureml.telemetry import INSTRUMENTATION_KEY
from inference_schema.schema_decorators import input_schema, output_schema
from inference_schema.parameter_types.numpy_parameter_type import NumpyParameterType
from inference_schema.parameter_types.pandas_parameter_type import PandasParameterType
input_sample = pd.DataFrame({"age": pd.Series(["24"], dtype="int64"), "job": pd.Series(["technician"], dtype="object"), "marital": pd.Series(["single"], dtype="object"), "education": pd.Series(["university.degree"], dtype="object"), "default": pd.Series(["no"], dtype="object"), "housing": pd.Series(["no"], dtype="object"), "loan": pd.Series(["yes"], dtype="object"), "contact": pd.Series(["cellular"], dtype="object"), "month": pd.Series(["jul"], dtype="object"), "duration": pd.Series(["109"], dtype="int64"), "campaign": pd.Series(["3"], dtype="int64"), "pdays": pd.Series(["999"], dtype="int64"), "previous": pd.Series(["0"], dtype="int64"), "poutcome": pd.Series(["nonexistent"], dtype="object"), "emp.var.rate": pd.Series(["1.4"], dtype="float64"), "cons.price.idx": pd.Series(["93.918"], dtype="float64"), "cons.conf.idx": pd.Series(["-42.7"], dtype="float64"), "euribor3m": pd.Series(["4.963"], dtype="float64"), "nr.employed": pd.Series(["5228.1"], dtype="float64")})
output_sample = np.array([0])
try:
log_server.enable_telemetry(INSTRUMENTATION_KEY)
log_server.set_verbosity('INFO')
logger = logging.getLogger('azureml.automl.core.scoring_script')
except:
pass
def init():
global model
# This name is model.id of model that we want to deploy deserialize the model file back
# into a sklearn model
model_path = os.path.join(os.getenv('AZUREML_MODEL_DIR'), 'model.pkl')
try:
model = joblib.load(model_path)
except Exception as e:
path = os.path.normpath(model_path)
path_split = path.split(os.sep)
log_server.update_custom_dimensions({'model_name': path_split[1], 'model_version': path_split[2]})
logging_utilities.log_traceback(e, logger)
raise
@input_schema('data', PandasParameterType(input_sample))
@output_schema(NumpyParameterType(output_sample))
def run(data):
try:
result = model.predict(data)
return json.dumps({"result": result.tolist()})
except Exception as e:
result = str(e)
return json.dumps({"error": result})
| StarcoderdataPython |
3317055 | g1 = 1 # GL1
def display():
l1 = 2 # GL2
print(l1) # GL3
print(g1) # GL4
display()
print(g1) # GL5
print(l1) # GL6
| StarcoderdataPython |
9703892 | from torch.autograd import Variable
import torchvision.models as models
import torch.nn.functional as F
import torch.optim as optim
import torch.nn as nn
import numpy as np
import random
import torch
import copy
import pdb
from base.base_envs import BaseState
class State(BaseState):
def loss_fn(self, predictions):
return ((predictions - self.labs)/(self.labs + 1e-8))**2
| StarcoderdataPython |
1779017 | <reponame>ml-research/MoRT_NMI
import matplotlib.pyplot as plt
from scipy.stats import pearsonr
import numpy as np
import csv
import os
from matplotlib import rc
from mort import dataMoral
import seaborn as sns
rc('font', **{'family':'sans-serif','sans-serif':['Arial']})
sns.set(style='ticks', palette='Set2')
rc('text', usetex=True)
# how to compute bias: execute file TODO
def read_bias_file(filename):
with open(filename, 'r') as f:
reader = csv.reader(f, delimiter=',')
data = list(reader)
res = [[float(user_score), float(bert_score), float(mort_score), action] for (action, user_score, bert_score, mort_score) in data]
return res
def own_plot(x, y, a=None, b=None, suffix="", text_pos=(-0.3, 1.2)):
fontsize = 9
x_all = x + a
y_all = y + b
fig = plt.figure(figsize=(4, 1.4))
ax = plt.gca()
# plt.axis([-80, 80, -0.8, 0.8])
# plt.xticks([-80, -40, 0, 40, 80], size=8)
# plt.yticks([-0.8, -0.4,0, 0.4, 0.8], size=8)
# plt.axvline(x=0, c='#898989', linestyle=':')
# plt.axhline(y=0, c='#898989', linestyle=':')
# plt.plot(x_new,ffit(x_new))
plt.scatter(x, y, s=5, color='#BE6F00', label='Do')
plt.scatter(a, b, s=5, color='#00715E', label='Dont')
plt.plot(np.unique(x_all), np.poly1d(np.polyfit(x_all, y_all, 1))(np.unique(x_all)),
label='Correlation', color='#004E8A', gid='r = ' + str(round(pearsonr(x_all, y_all)[0], 3)))
plt.ylim((-0.2, 0.22))
plt.yticks(np.arange(-0.2, 0.21, 0.1))
ax.tick_params(axis='both', which='major', labelsize=fontsize, direction='in')
ax.tick_params(axis='both', which='minor', labelsize=fontsize, direction='in')
r = pearsonr(x_all, y_all)
starlets = ''
if r[1] < 0.05:
if r[1] < 0.01:
if r[1] < 0.001:
starlets = '***'
else:
starlets = '**'
else:
starlets = '*'
print(r)
#input("Press key")
plt.xlabel('MCM score', fontsize=fontsize-1)
plt.ylabel('WEAT value', fontsize=fontsize-1)
#plt.tight_layout()
#plt.text(-0.8, 0.12, 'r = ' + str(round(r[0], 2)) + starlets, color='#004E8A', fontsize=10)
print(suffix)
if "BERTcossim" in suffix:
plt.title("\\textbf{BERT (Cosine Similarity)}", fontsize=fontsize)
plt.text(text_pos[0], text_pos[1], 'r = ' + str(round(r[0], 2)) + starlets, color='#004E8A', fontsize=fontsize-1)
elif "BERTstsbcossim" in suffix:
plt.title("\\textbf{BERT$_{stsb}$ (Cosine Similarity)}", fontsize=fontsize)
plt.text(text_pos[0], text_pos[1], 'r = ' + str(round(r[0], 2)) + starlets, color='#004E8A', fontsize=fontsize-1)
elif "BERTsubspace_qa" in suffix:
plt.title("\\textbf{BERT (Moral Compass QT)}", fontsize=fontsize)
plt.text(text_pos[0], text_pos[1], 'r = ' + str(round(r[0], 2)) + starlets, color='#004E8A', fontsize=fontsize-1)
plt.xticks(np.arange(-1, 1.1, 0.25))
elif "BERTsubspace_raw" in suffix:
plt.title("\\textbf{BERT (Moral Compass)}", fontsize=fontsize)
plt.text(text_pos[0], text_pos[1], 'r = ' + str(round(r[0], 2)) + starlets, color='#004E8A', fontsize=fontsize-1)
plt.xticks(np.arange(-1, 1.1, 0.25))
elif "glove" in suffix.lower():
plt.title("\\textbf{GloVe (Cosine Similarity)}", fontsize=fontsize)
plt.text(text_pos[0], text_pos[1], 'r = ' + str(round(r[0], 2)) + starlets, color='#004E8A', fontsize=fontsize-1)
else:
plt.title("\\textbf{USE (Cosine Similarity)}", fontsize=fontsize)
plt.text(text_pos[0], text_pos[1], 'r = ' + str(round(r[0], 2)) + starlets, color='#004E8A', fontsize=fontsize-1)
# plt.legend(bbox_to_anchor=(1.05, 1), loc=2, borderaxespad=0.)
ax.xaxis.set_ticks_position('none')
ax.yaxis.set_ticks_position('none')
plt.grid(True, linestyle=':')
os.makedirs('mort/plot_corr/weat_corr/plots/', exist_ok=True)
plt.savefig('mort/plot_corr/weat_corr/plots/correlation_{}.svg'.format(suffix), bbox_inches='tight', dpi=600)
#plt.show()
plt.clf()
plt.close()
#exit()
#input("Press key")
def use_corr():
# dos with respect to literature
with open("data/correlation/pos_neg/pos50Verbs_use_hubEmbedding.csv", 'r') as f:
reader = csv.reader(f, delimiter=',')
data_mcm = list(reader)
data_weat = dataMoral.dos_50
# sort mcm data and weat
data_weat.sort(key=lambda x: x[0])
data_mcm.sort(key=lambda x: x[1])
assert len(data_weat) == len(data_mcm)
for d_m, d_w in zip(data_mcm, data_weat):
assert d_m[1] == d_w[0]
foo = [[d_m[1], float(d_w[2]), float(d_m[0])] for (d_m, d_w) in zip(data_mcm, data_weat)]
# donts with respect to literature
with open("data/correlation/pos_neg//neg50Verbs_use_hubEmbedding.csv", 'r') as f:
reader = csv.reader(f, delimiter=',')
data_mcm = list(reader)
data_weat = dataMoral.donts_50
# sort mcm data and weat
data_weat.sort(key=lambda x: x[0])
data_mcm.sort(key=lambda x: x[1])
assert len(data_weat) == len(data_mcm)
for d_m, d_w in zip(data_mcm, data_weat):
assert d_m[1] == d_w[0]
bar = [[d_m[1], float(d_w[2]), float(d_m[0])] for (d_m, d_w) in zip(data_mcm, data_weat)]
x = [b[2] for b in foo]
y = [[b[0], b[1]] for b in foo]
a = [b[2] for b in bar]
b = [[b[0], b[1]] for b in bar]
"""for elem in y:
print(elem)
for elem in b:
print(elem)"""
y = [p[1] for p in y]
b = [p[1] for p in b]
own_plot(x, y, a, b, suffix="weat_vs_USE", text_pos=(-0.025, 0.15,))
f = np.array(x + a)
d = np.array(y + b)
print('Pearson ###', pearsonr(f, d))
#print(np.mean(x))
#print(np.mean(a))
#print(np.mean(f))
#print("Mean and std all", np.mean(x + a), np.std(x + a))
#print("Mean and std do", np.mean(x), np.std(x))
#print("Mean and std dont", np.mean(a), np.std(a))
def bertcossim_corr(model_name, model_name2=''):
# dos with respect to literature
with open("data/correlation/pos_neg/BERT{}_dos_{}_bias.csv".format(model_name2, model_name), 'r') as f:
reader = csv.reader(f, delimiter=',')
#['-0.045', 'toast'],
data_mcm = list(reader)
data_weat = dataMoral.dos_50
# sort mcm data and weat
data_weat.sort(key=lambda x: x[0])
data_mcm.sort(key=lambda x: x[1])
assert len(data_weat) == len(data_mcm)
for d_m, d_w in zip(data_mcm, data_weat):
assert d_m[1] == d_w[0]
foo = [[d_m[1], float(d_w[2]), float(d_m[0])] for (d_m, d_w) in zip(data_mcm, data_weat)]
# donts with respect to literature
with open("data/correlation/pos_neg/BERT{}_donts_{}_bias.csv".format(model_name2, model_name), 'r') as f:
reader = csv.reader(f, delimiter=',')
data_mcm = list(reader)
data_weat = dataMoral.donts_50
# sort mcm data and weat
data_weat.sort(key=lambda x: x[0])
data_mcm.sort(key=lambda x: x[1])
assert len(data_weat) == len(data_mcm)
for d_m, d_w in zip(data_mcm, data_weat):
assert d_m[1] == d_w[0]
bar = [[d_m[1], float(d_w[2]), float(d_m[0])] for (d_m, d_w) in zip(data_mcm, data_weat)]
x = [b[2] for b in foo]
y = [[b[0], b[1]] for b in foo]
a = [b[2] for b in bar]
b = [[b[0], b[1]] for b in bar]
"""for elem in y:
print(elem)
for elem in b:
print(elem)"""
y = [p[1] for p in y]
b = [p[1] for p in b]
own_plot(x, y, a, b, suffix="weat_vs_BERT{}{}".format(model_name2.replace('_', ''),
model_name),
text_pos=(-0.09, .15,))
#weat_vs_BERTprojqt
#weat_vs_BERTprojraw
f = np.array(x + a)
d = np.array(y + b)
print('Pearson ###', pearsonr(f, d))
#print(np.mean(x))
#print(np.mean(a))
#print(np.mean(f))
#print("Mean and std all", np.mean(x + a), np.std(x + a))
#print("Mean and std do", np.mean(x), np.std(x))
#print("Mean and std dont", np.mean(a), np.std(a))
def glove_cossim_corr():
# dos with respect to literature
with open("data/correlation/pos_neg/glove_dos_cossim_bias.csv", 'r') as f:
reader = csv.reader(f, delimiter=',')
#['-0.045', 'toast'],
data_mcm = list(reader)
data_weat = dataMoral.dos_50
# sort mcm data and weat
data_weat.sort(key=lambda x: x[0])
data_mcm.sort(key=lambda x: x[1])
assert len(data_weat) == len(data_mcm)
for d_m, d_w in zip(data_mcm, data_weat):
assert d_m[1] == d_w[0]
foo = [[d_m[1], float(d_w[2]), float(d_m[0])] for (d_m, d_w) in zip(data_mcm, data_weat)]
# donts with respect to literature
with open("data/correlation/pos_neg/glove_donts_cossim_bias.csv", 'r') as f:
reader = csv.reader(f, delimiter=',')
data_mcm = list(reader)
data_weat = dataMoral.donts_50
# sort mcm data and weat
data_weat.sort(key=lambda x: x[0])
data_mcm.sort(key=lambda x: x[1])
assert len(data_weat) == len(data_mcm)
for d_m, d_w in zip(data_mcm, data_weat):
assert d_m[1] == d_w[0]
bar = [[d_m[1], float(d_w[2]), float(d_m[0])] for (d_m, d_w) in zip(data_mcm, data_weat)]
x = [b[2] for b in foo]
y = [[b[0], b[1]] for b in foo]
a = [b[2] for b in bar]
b = [[b[0], b[1]] for b in bar]
"""for elem in y:
print(elem)
for elem in b:
print(elem)"""
y = [p[1] for p in y]
b = [p[1] for p in b]
own_plot(x, y, a, b, suffix="weat_vs_GLOVE", text_pos=(0.09, .15,))
#weat_vs_BERTprojqt
#weat_vs_BERTprojraw
f = np.array(x + a)
d = np.array(y + b)
print('Pearson ###', pearsonr(f, d))
#print(np.mean(x))
#print(np.mean(a))
#print(np.mean(f))
#print("Mean and std all", np.mean(x + a), np.std(x + a))
#print("Mean and std do", np.mean(x), np.std(x))
#print("Mean and std dont", np.mean(a), np.std(a))
if __name__ == '__main__':
print("GloVe")
glove_cossim_corr()
# USE
print("USE")
use_corr()
# BERT cossim
print("BERT cossim")
bertcossim_corr('cossim')
# BERT stsb cossim
#print("BERT stsb cossim")
#bertcossim_corr('cossim', model_name2='_stsb')
# BERT proj
print("BERT proj")
bertcossim_corr('subspace_qa')
# BERT proj raw
print("BERT proj raw")
bertcossim_corr('subspace_raw') | StarcoderdataPython |
9607813 | """Support for Wink water heaters."""
import logging
import pywink
from homeassistant.components.water_heater import (
ATTR_TEMPERATURE,
STATE_ECO,
STATE_ELECTRIC,
STATE_GAS,
STATE_HEAT_PUMP,
STATE_HIGH_DEMAND,
STATE_PERFORMANCE,
SUPPORT_AWAY_MODE,
SUPPORT_OPERATION_MODE,
SUPPORT_TARGET_TEMPERATURE,
WaterHeaterEntity,
)
from homeassistant.const import STATE_OFF, STATE_UNKNOWN, TEMP_CELSIUS
from . import DOMAIN, WinkDevice
_LOGGER = logging.getLogger(__name__)
SUPPORT_FLAGS_HEATER = (
SUPPORT_TARGET_TEMPERATURE | SUPPORT_OPERATION_MODE | SUPPORT_AWAY_MODE
)
ATTR_RHEEM_TYPE = "rheem_type"
ATTR_VACATION_MODE = "vacation_mode"
HA_STATE_TO_WINK = {
STATE_ECO: "eco",
STATE_ELECTRIC: "electric_only",
STATE_GAS: "gas",
STATE_HEAT_PUMP: "heat_pump",
STATE_HIGH_DEMAND: "high_demand",
STATE_OFF: "off",
STATE_PERFORMANCE: "performance",
}
WINK_STATE_TO_HA = {value: key for key, value in HA_STATE_TO_WINK.items()}
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Wink water heater devices."""
for water_heater in pywink.get_water_heaters():
_id = water_heater.object_id() + water_heater.name()
if _id not in hass.data[DOMAIN]["unique_ids"]:
add_entities([WinkWaterHeater(water_heater, hass)])
class WinkWaterHeater(WinkDevice, WaterHeaterEntity):
"""Representation of a Wink water heater."""
@property
def supported_features(self):
"""Return the list of supported features."""
return SUPPORT_FLAGS_HEATER
@property
def temperature_unit(self):
"""Return the unit of measurement."""
# The Wink API always returns temp in Celsius
return TEMP_CELSIUS
@property
def device_state_attributes(self):
"""Return the optional device state attributes."""
data = {}
data[ATTR_VACATION_MODE] = self.wink.vacation_mode_enabled()
data[ATTR_RHEEM_TYPE] = self.wink.rheem_type()
return data
@property
def current_operation(self):
"""
Return current operation one of the following.
["eco", "performance", "heat_pump",
"high_demand", "electric_only", "gas]
"""
if not self.wink.is_on():
current_op = STATE_OFF
else:
current_op = WINK_STATE_TO_HA.get(self.wink.current_mode())
if current_op is None:
current_op = STATE_UNKNOWN
return current_op
@property
def operation_list(self):
"""List of available operation modes."""
op_list = ["off"]
modes = self.wink.modes()
for mode in modes:
if mode == "aux":
continue
ha_mode = WINK_STATE_TO_HA.get(mode)
if ha_mode is not None:
op_list.append(ha_mode)
else:
error = (
"Invalid operation mode mapping. "
f"{mode} doesn't map. Please report this."
)
_LOGGER.error(error)
return op_list
def set_temperature(self, **kwargs):
"""Set new target temperature."""
target_temp = kwargs.get(ATTR_TEMPERATURE)
self.wink.set_temperature(target_temp)
def set_operation_mode(self, operation_mode):
"""Set operation mode."""
op_mode_to_set = HA_STATE_TO_WINK.get(operation_mode)
self.wink.set_operation_mode(op_mode_to_set)
@property
def target_temperature(self):
"""Return the temperature we try to reach."""
return self.wink.current_set_point()
def turn_away_mode_on(self):
"""Turn away on."""
self.wink.set_vacation_mode(True)
def turn_away_mode_off(self):
"""Turn away off."""
self.wink.set_vacation_mode(False)
@property
def min_temp(self):
"""Return the minimum temperature."""
return self.wink.min_set_point()
@property
def max_temp(self):
"""Return the maximum temperature."""
return self.wink.max_set_point()
| StarcoderdataPython |
5089806 | import mock
import pytest
import requests
import suds.transport
import suds_requests
def test_no_errors():
m = mock.Mock(__name__='m')
f = suds_requests.handle_errors(m)
assert f() == m.return_value
def test_HTTPError():
resp = mock.Mock(status_code=404,
content='File not found')
m = mock.Mock(
side_effect=requests.HTTPError(response=resp),
__name__='m',
)
f = suds_requests.handle_errors(m)
with pytest.raises(suds.transport.TransportError) as excinfo:
f()
assert excinfo.value.httpcode == 404
assert excinfo.value.fp.read() == 'File not found'
def test_RequestException():
m = mock.Mock(
side_effect=requests.RequestException(),
__name__='m',
)
f = suds_requests.handle_errors(m)
with pytest.raises(suds.transport.TransportError) as excinfo:
f()
assert excinfo.value.httpcode == 000
assert excinfo.value.fp.read().startswith('Traceback')
| StarcoderdataPython |
3376446 | from JumpScale import j
def cb():
from .BtrfsExtension import BtrfsExtension
return BtrfsExtension()
j.sal._register('btrfs', cb)
| StarcoderdataPython |
11305587 | # -*- coding: utf-8 -*-
from kalasearch import Client
import json
myclient = Client("YOUR_APP_ID", "YOUR_API_KEY")
index = myclient.get_index("YOUR_INDEX_ID")
documents = [
{"name": "周杰伦"},
{"name": "陈冠希"}
]
print(index.add_objects(documents))
options = {
"searchableFields": ["name", "story"],
"highlightFields": ["name", "story"]
}
search_results = index.search("冠希")
print(search_results) | StarcoderdataPython |
1654640 | import numpy as np
def generate_complex_multiplication_tensor (dtype=float):
complex_multiplication_tensor = np.zeros((2,2,2), dtype=dtype)
complex_multiplication_tensor[0,0,0] = 1
complex_multiplication_tensor[0,1,1] = -1
complex_multiplication_tensor[1,0,1] = 1
complex_multiplication_tensor[1,1,0] = 1
return complex_multiplication_tensor
if __name__ == '__main__':
import sympy as sp
import tensor
complex_multiplication_tensor = generate_complex_multiplication_tensor(dtype=object)
a,b,c,d = sp.symbols('a,b,c,d')
product = ((a + sp.I*b) * (c + sp.I*d)).expand()
fancy_product = tensor.contract('ijk,j,k', complex_multiplication_tensor, np.array([a,b]), np.array([c,d]), dtype=object)
fancy_product_as_complex = fancy_product[0] + sp.I*fancy_product[1]
# print product
# print fancy_product
# print fancy_product_as_complex
# print 'difference:', (product-fancy_product_as_complex).simplify()
assert (product-fancy_product_as_complex).simplify() == 0
print('passed test')
z = np.array([a,b])
print(tensor.contract('ijk,k', complex_multiplication_tensor, z, dtype=object))
| StarcoderdataPython |
78547 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
# This file is part of CbM (https://github.com/ec-jrc/cbm).
# Author : <NAME>
# Credits : GTCAP Team
# Copyright : 2021 European Commission, Joint Research Centre
# License : 3-Clause BSD
"""
Project: Copernicus DIAS for CAP 'checks by monitoring'.
Functions for spatial management and processing.
Options:
swap_xy
centroid
trasform_geometry
-h, --help Show this screen.
--version Show version.
"""
def swap_xy(geom):
"""
A general function to swap (x,y) for any spatial geometry type,
it also preserves z coordinates (if present)
Exaple code:
from spatial.geometry import Point, Polygon, MultiPoint, MultiLineString
# POINT Z (1 2 3) -> POINT Z (2 1 3)
spatial.swap_xy(Point(1, 2, 3))
# MULTILINESTRING ((1 2, 3 4)) -> MULTILINESTRING ((2 1, 4 3))
spatial.swap_xy(MultiLineString([[(1, 2), (3, 4)]]))
# Map the function to a geopandas geometry column
table.geometry = table.geometry.map(spatial.swap_xy)
"""
# print(type(geom))
def swap_xy_coords(coords):
if len(coords) == 2:
x, y = coords
return [y, x]
elif len(coords) == 3:
x, y, z = coords
return [y, x, z]
if type(geom) is dict:
# Process coordinates from each supported geometry type
if geom['type'] in ('Point', 'LineString', 'LinearRing', 'Polygon'):
coords_list = geom['coordinates'][0]
return [swap_xy_coords(coords) for coords in coords_list]
elif geom['type'].startswith('Multi') or geom['type'] == 'GeometryCollection':
geom_list = []
for sub_geom in geom['coordinates'][0]:
geom_list.append([swap_xy_coords(coords)
for coords in sub_geom])
return geom_list[0]
else:
raise ValueError("Type {geom['type']} not recognized")
return geom
elif type(geom) is list:
if list_depth(geom) == 2:
coords_list = geom
return [swap_xy_coords(coords) for coords in coords_list]
elif list_depth(geom) == 3:
geom_list = []
for sub_geom in geom:
geom_list.append([swap_xy_coords(coords)
for coords in sub_geom])
return geom_list
else:
print("Unrecognized geometry type")
def list_depth(lsit_):
if isinstance(lsit_, list):
return 1 + max(list_depth(item) for item in lsit_)
else:
return 0
def centroid(geom):
"""
Args:
geom: A list of coordinates.
Returns:
_x, _y: Coordinates of the center point.
Raises:
Example:
"""
def xy_center(coords_list):
_x_list = [coords[0] for coords in coords_list]
_y_list = [coords[1] for coords in coords_list]
_len = len(coords_list)
_x = sum(_x_list) / _len
_y = sum(_y_list) / _len
return [_x, _y]
# print(list_depth(geom))
if list_depth(geom) == 1:
try:
geom_ = [list(c) for c in geom]
_x, _y = xy_center(geom_)
except Exception:
pass
elif list_depth(geom) == 2:
_x, _y = xy_center(geom)
elif list_depth(geom) == 3:
_x, _y = xy_center(geom[0])
else:
print("Not recognized coordinates format.")
return(round(_x, 4), round(_y, 4))
def trasform_geometry(jsondata, target_epsg=4326):
"""
Args:
jsondata: Parsel information in json format with geometry.
Must include srid and geom
Returns:
geom_wgs84: json formated geometry in wgs84.
Raises:
Example:
"""
from osgeo import ogr, osr
import json
try:
geom = jsondata['geom'][0]
g = ogr.CreateGeometryFromJson(geom)
source = osr.SpatialReference()
source.ImportFromEPSG(jsondata['srid'][0])
target = osr.SpatialReference()
target.ImportFromEPSG(target_epsg)
transform = osr.CoordinateTransformation(source, target)
g.Transform(transform)
geom_wgs84 = json.loads(g.ExportToJson())
return geom_wgs84
except Exception as err:
print("could not transform geometry", err)
def bounds(geotiff):
import rasterio
import rasterio.features
import rasterio.warp
import numpy as np
"""
Args:
geotiff: A tiff type image with georeferencing information.
Returns:
img_bounds: The bounds of the geotiff.
Example:
(13, -130), (32, -100) # SW and NE corners of the image
"""
with rasterio.open(geotiff) as dataset:
# Read the dataset's valid data mask as a ndarray.
mask = dataset.dataset_mask().astype(np.uint16)
# Extract feature shapes and values from the array.
for geom, val in rasterio.features.shapes(
mask, transform=dataset.transform):
# Transform shapes from the dataset's own coordinate
# reference system to CRS84 (EPSG:4326).
geom = rasterio.warp.transform_geom(
dataset.crs, 'EPSG:4326', geom, precision=6)
# Print GeoJSON shapes to stdout.
img_bounds = ((geom['coordinates'][0][1][1],
geom['coordinates'][0][1][0]),
(geom['coordinates'][0][3][1],
geom['coordinates'][0][3][0]))
return img_bounds
| StarcoderdataPython |
6636645 | <filename>lib/tools.py
import pygame
class Dragger(object):
#TODO: implement auto-drop (after a timeout) so the system
# doesn't freeze?
def __init__(self):
self.button = None
def __del__(self):
self.drop()
def grabbing(self):
return pygame.event.get_grab()
def grab(self, button=None):
if not self.grabbing():
self.button = button
pygame.event.set_grab(True)
def drop(self):
if self.grabbing():
self.button = None
pygame.event.set_grab(False)
| StarcoderdataPython |
3303490 | <reponame>fotonauts/fwissr-python
# conf.py
import os
import json
import yaml
def merge_conf(to_hash, other_hash, path=[]):
"merges other_hash into to_hash"
for key in other_hash:
if (key in to_hash and isinstance(to_hash[key], dict)
and isinstance(other_hash[key], dict)):
merge_conf(to_hash[key], other_hash[key], path + [str(key)])
else:
to_hash[key] = other_hash[key]
return to_hash
def parse_conf_file(conf_file_path):
ext = os.path.splitext(conf_file_path)[1]
if ext == ".json":
return json.load(open(conf_file_path))
elif ext == ".yaml" or ext == ".yml":
return yaml.load(open(conf_file_path))
else:
raise Exception("Unsupported conf file kind", conf_file_path)
| StarcoderdataPython |
9707461 | # importacao de bibliotecas
import requests
from bs4 import BeautifulSoup
import pandas as pd
import time
# criacao de funcao
def coletar_votos():
"""
Função para acessar cada URL de votação nominal.
E coletar os votos, nome de deputado, partido e uf.
"""
# import CSV scraped with def definir_link_nominal()
urls_finais = pd.read_csv("data/urls_finais.csv")
# criando listas vazias para o append
link_all = []
nome_all = []
partido_all = []
voto_all = []
# loop para definir URL
# e depois loop para e entrar no li
# e pegar os spans definidos
for url in range(0, len(urls_finais['link_final'].tolist())):
#print(url)
page = requests.get(urls_finais['link_final'].tolist()[0])
soup = BeautifulSoup(page.text, 'html.parser')
div = soup.find('div', {'class': 'titulares'})
link = urls_finais['link_final'].tolist()[url]
time.sleep(3)
for li in div.find_all('li'):
link_all.append(link)
print(link)
nome = li.find("span", {"class": "nome"}).contents[0]
nome_all.append(nome)
#print(nome_all)
partido = li.find("span", {"class": "nomePartido"}).contents[0]
partido_all.append(partido)
#print(partido_all)
try:
voto = li.find("span", {"class": ["voto", "sim"]}).contents[0]
voto_all.append(voto)
except AttributeError:
try:
voto = li.find("span", {"class": ["voto", "nao"]}).contents[0]
voto_all.append(voto)
except AttributeError:
voto = 'Ausente'
voto_all.append(voto)
#print(f'{nome_all} - {partido_all} - {voto_all} - {link_all}')
dados = {'nome': nome_all, 'partido': partido_all, 'voto': voto_all, 'link': link_all}
print(dados)
dados_finais = pd.DataFrame(dados)
dados_finais.to_csv('data/dados_finais.csv', encoding='utf-8', index = False)
| StarcoderdataPython |
4855435 | def get_data():
return [
(519432,525806),
(632382,518061),
(78864,613712),
(466580,530130),
(780495,510032),
(525895,525320),
(15991,714883),
(960290,502358),
(760018,511029),
(166800,575487),
(210884,564478),
(555151,523163),
(681146,515199),
(563395,522587),
(738250,512126),
(923525,503780),
(595148,520429),
(177108,572629),
(750923,511482),
(440902,532446),
(881418,505504),
(422489,534197),
(979858,501616),
(685893,514935),
(747477,511661),
(167214,575367),
(234140,559696),
(940238,503122),
(728969,512609),
(232083,560102),
(900971,504694),
(688801,514772),
(189664,569402),
(891022,505104),
(445689,531996),
(119570,591871),
(821453,508118),
(371084,539600),
(911745,504251),
(623655,518600),
(144361,582486),
(352442,541775),
(420726,534367),
(295298,549387),
(6530,787777),
(468397,529976),
(672336,515696),
(431861,533289),
(84228,610150),
(805376,508857),
(444409,532117),
(33833,663511),
(381850,538396),
(402931,536157),
(92901,604930),
(304825,548004),
(731917,512452),
(753734,511344),
(51894,637373),
(151578,580103),
(295075,549421),
(303590,548183),
(333594,544123),
(683952,515042),
(60090,628880),
(951420,502692),
(28335,674991),
(714940,513349),
(343858,542826),
(549279,523586),
(804571,508887),
(260653,554881),
(291399,549966),
(402342,536213),
(408889,535550),
(40328,652524),
(375856,539061),
(768907,510590),
(165993,575715),
(976327,501755),
(898500,504795),
(360404,540830),
(478714,529095),
(694144,514472),
(488726,528258),
(841380,507226),
(328012,544839),
(22389,690868),
(604053,519852),
(329514,544641),
(772965,510390),
(492798,527927),
(30125,670983),
(895603,504906),
(450785,531539),
(840237,507276),
(380711,538522),
(63577,625673),
(76801,615157),
(502694,527123),
(597706,520257),
(310484,547206),
(944468,502959),
(121283,591152),
(451131,531507),
(566499,522367),
(425373,533918),
(40240,652665),
(39130,654392),
(714926,513355),
(469219,529903),
(806929,508783),
(287970,550487),
(92189,605332),
(103841,599094),
(671839,515725),
(452048,531421),
(987837,501323),
(935192,503321),
(88585,607450),
(613883,519216),
(144551,582413),
(647359,517155),
(213902,563816),
(184120,570789),
(258126,555322),
(502546,527130),
(407655,535678),
(401528,536306),
(477490,529193),
(841085,507237),
(732831,512408),
(833000,507595),
(904694,504542),
(581435,521348),
(455545,531110),
(873558,505829),
(94916,603796),
(720176,513068),
(545034,523891),
(246348,557409),
(556452,523079),
(832015,507634),
(173663,573564),
(502634,527125),
(250732,556611),
(569786,522139),
(216919,563178),
(521815,525623),
(92304,605270),
(164446,576167),
(753413,511364),
(11410,740712),
(448845,531712),
(925072,503725),
(564888,522477),
(7062,780812),
(641155,517535),
(738878,512100),
(636204,517828),
(372540,539436),
(443162,532237),
(571192,522042),
(655350,516680),
(299741,548735),
(581914,521307),
(965471,502156),
(513441,526277),
(808682,508700),
(237589,559034),
(543300,524025),
(804712,508889),
(247511,557192),
(543486,524008),
(504383,526992),
(326529,545039),
(792493,509458),
(86033,609017),
(126554,589005),
(579379,521481),
(948026,502823),
(404777,535969),
(265767,554022),
(266876,553840),
(46631,643714),
(492397,527958),
(856106,506581),
(795757,509305),
(748946,511584),
(294694,549480),
(409781,535463),
(775887,510253),
(543747,523991),
(210592,564536),
(517119,525990),
(520253,525751),
(247926,557124),
(592141,520626),
(346580,542492),
(544969,523902),
(506501,526817),
(244520,557738),
(144745,582349),
(69274,620858),
(292620,549784),
(926027,503687),
(736320,512225),
(515528,526113),
(407549,535688),
(848089,506927),
(24141,685711),
(9224,757964),
(980684,501586),
(175259,573121),
(489160,528216),
(878970,505604),
(969546,502002),
(525207,525365),
(690461,514675),
(156510,578551),
(659778,516426),
(468739,529945),
(765252,510770),
(76703,615230),
(165151,575959),
(29779,671736),
(928865,503569),
(577538,521605),
(927555,503618),
(185377,570477),
(974756,501809),
(800130,509093),
(217016,563153),
(365709,540216),
(774508,510320),
(588716,520851),
(631673,518104),
(954076,502590),
(777828,510161),
(990659,501222),
(597799,520254),
(786905,509727),
(512547,526348),
(756449,511212),
(869787,505988),
(653747,516779),
(84623,609900),
(839698,507295),
(30159,670909),
(797275,509234),
(678136,515373),
(897144,504851),
(989554,501263),
(413292,535106),
(55297,633667),
(788650,509637),
(486748,528417),
(150724,580377),
(56434,632490),
(77207,614869),
(588631,520859),
(611619,519367),
(100006,601055),
(528924,525093),
(190225,569257),
(851155,506789),
(682593,515114),
(613043,519275),
(514673,526183),
(877634,505655),
(878905,505602),
(1926,914951),
(613245,519259),
(152481,579816),
(841774,507203),
(71060,619442),
(865335,506175),
(90244,606469),
(302156,548388),
(399059,536557),
(478465,529113),
(558601,522925),
(69132,620966),
(267663,553700),
(988276,501310),
(378354,538787),
(529909,525014),
(161733,576968),
(758541,511109),
(823425,508024),
(149821,580667),
(269258,553438),
(481152,528891),
(120871,591322),
(972322,501901),
(981350,501567),
(676129,515483),
(950860,502717),
(119000,592114),
(392252,537272),
(191618,568919),
(946699,502874),
(289555,550247),
(799322,509139),
(703886,513942),
(194812,568143),
(261823,554685),
(203052,566221),
(217330,563093),
(734748,512313),
(391759,537328),
(807052,508777),
(564467,522510),
(59186,629748),
(113447,594545),
(518063,525916),
(905944,504492),
(613922,519213),
(439093,532607),
(445946,531981),
(230530,560399),
(297887,549007),
(459029,530797),
(403692,536075),
(855118,506616),
(963127,502245),
(841711,507208),
(407411,535699),
(924729,503735),
(914823,504132),
(333725,544101),
(176345,572832),
(912507,504225),
(411273,535308),
(259774,555036),
(632853,518038),
(119723,591801),
(163902,576321),
(22691,689944),
(402427,536212),
(175769,572988),
(837260,507402),
(603432,519893),
(313679,546767),
(538165,524394),
(549026,523608),
(61083,627945),
(898345,504798),
(992556,501153),
(369999,539727),
(32847,665404),
(891292,505088),
(152715,579732),
(824104,507997),
(234057,559711),
(730507,512532),
(960529,502340),
(388395,537687),
(958170,502437),
(57105,631806),
(186025,570311),
(993043,501133),
(576770,521664),
(215319,563513),
(927342,503628),
(521353,525666),
(39563,653705),
(752516,511408),
(110755,595770),
(309749,547305),
(374379,539224),
(919184,503952),
(990652,501226),
(647780,517135),
(187177,570017),
(168938,574877),
(649558,517023),
(278126,552016),
(162039,576868),
(658512,516499),
(498115,527486),
(896583,504868),
(561170,522740),
(747772,511647),
(775093,510294),
(652081,516882),
(724905,512824),
(499707,527365),
(47388,642755),
(646668,517204),
(571700,522007),
(180430,571747),
(710015,513617),
(435522,532941),
(98137,602041),
(759176,511070),
(486124,528467),
(526942,525236),
(878921,505604),
(408313,535602),
(926980,503640),
(882353,505459),
(566887,522345),
(3326,853312),
(911981,504248),
(416309,534800),
(392991,537199),
(622829,518651),
(148647,581055),
(496483,527624),
(666314,516044),
(48562,641293),
(672618,515684),
(443676,532187),
(274065,552661),
(265386,554079),
(347668,542358),
(31816,667448),
(181575,571446),
(961289,502320),
(365689,540214),
(987950,501317),
(932299,503440),
(27388,677243),
(746701,511701),
(492258,527969),
(147823,581323),
(57918,630985),
(838849,507333),
(678038,515375),
(27852,676130),
(850241,506828),
(818403,508253),
(131717,587014),
(850216,506834),
(904848,504529),
(189758,569380),
(392845,537217),
(470876,529761),
(925353,503711),
(285431,550877),
(454098,531234),
(823910,508003),
(318493,546112),
(766067,510730),
(261277,554775),
(421530,534289),
(694130,514478),
(120439,591498),
(213308,563949),
(854063,506662),
(365255,540263),
(165437,575872),
(662240,516281),
(289970,550181),
(847977,506933),
(546083,523816),
(413252,535113),
(975829,501767),
(361540,540701),
(235522,559435),
(224643,561577),
(736350,512229),
(328303,544808),
(35022,661330),
(307838,547578),
(474366,529458),
(873755,505819),
(73978,617220),
(827387,507845),
(670830,515791),
(326511,545034),
(309909,547285),
(400970,536363),
(884827,505352),
(718307,513175),
(28462,674699),
(599384,520150),
(253565,556111),
(284009,551093),
(343403,542876),
(446557,531921),
(992372,501160),
(961601,502308),
(696629,514342),
(919537,503945),
(894709,504944),
(892201,505051),
(358160,541097),
(448503,531745),
(832156,507636),
(920045,503924),
(926137,503675),
(416754,534757),
(254422,555966),
(92498,605151),
(826833,507873),
(660716,516371),
(689335,514746),
(160045,577467),
(814642,508425),
(969939,501993),
(242856,558047),
(76302,615517),
(472083,529653),
(587101,520964),
(99066,601543),
(498005,527503),
(709800,513624),
(708000,513716),
(20171,698134),
(285020,550936),
(266564,553891),
(981563,501557),
(846502,506991),
(334,1190800),
(209268,564829),
(9844,752610),
(996519,501007),
(410059,535426),
(432931,533188),
(848012,506929),
(966803,502110),
(983434,501486),
(160700,577267),
(504374,526989),
(832061,507640),
(392825,537214),
(443842,532165),
(440352,532492),
(745125,511776),
(13718,726392),
(661753,516312),
(70500,619875),
(436952,532814),
(424724,533973),
(21954,692224),
(262490,554567),
(716622,513264),
(907584,504425),
(60086,628882),
(837123,507412),
(971345,501940),
(947162,502855),
(139920,584021),
(68330,621624),
(666452,516038),
(731446,512481),
(953350,502619),
(183157,571042),
(845400,507045),
(651548,516910),
(20399,697344),
(861779,506331),
(629771,518229),
(801706,509026),
(189207,569512),
(737501,512168),
(719272,513115),
(479285,529045),
(136046,585401),
(896746,504860),
(891735,505067),
(684771,514999),
(865309,506184),
(379066,538702),
(503117,527090),
(621780,518717),
(209518,564775),
(677135,515423),
(987500,501340),
(197049,567613),
(329315,544673),
(236756,559196),
(357092,541226),
(520440,525733),
(213471,563911),
(956852,502490),
(702223,514032),
(404943,535955),
(178880,572152),
(689477,514734),
(691351,514630),
(866669,506128),
(370561,539656),
(739805,512051),
(71060,619441),
(624861,518534),
(261660,554714),
(366137,540160),
(166054,575698),
(601878,519990),
(153445,579501),
(279899,551729),
(379166,538691),
(423209,534125),
(675310,515526),
(145641,582050),
(691353,514627),
(917468,504026),
(284778,550976),
(81040,612235),
(161699,576978),
(616394,519057),
(767490,510661),
(156896,578431),
(427408,533714),
(254849,555884),
(737217,512182),
(897133,504851),
(203815,566051),
(270822,553189),
(135854,585475),
(778805,510111),
(784373,509847),
(305426,547921),
(733418,512375),
(732087,512448),
(540668,524215),
(702898,513996),
(628057,518328),
(640280,517587),
(422405,534204),
(10604,746569),
(746038,511733),
(839808,507293),
(457417,530938),
(479030,529064),
(341758,543090),
(620223,518824),
(251661,556451),
(561790,522696),
(497733,527521),
(724201,512863),
(489217,528217),
(415623,534867),
(624610,518548),
(847541,506953),
(432295,533249),
(400391,536421),
(961158,502319),
(139173,584284),
(421225,534315),
(579083,521501),
(74274,617000),
(701142,514087),
(374465,539219),
(217814,562985),
(358972,540995),
(88629,607424),
(288597,550389),
(285819,550812),
(538400,524385),
(809930,508645),
(738326,512126),
(955461,502535),
(163829,576343),
(826475,507891),
(376488,538987),
(102234,599905),
(114650,594002),
(52815,636341),
(434037,533082),
(804744,508880),
(98385,601905),
(856620,506559),
(220057,562517),
(844734,507078),
(150677,580387),
(558697,522917),
(621751,518719),
(207067,565321),
(135297,585677),
(932968,503404),
(604456,519822),
(579728,521462),
(244138,557813),
(706487,513800),
(711627,513523),
(853833,506674),
(497220,527562),
(59428,629511),
(564845,522486),
(623621,518603),
(242689,558077),
(125091,589591),
(363819,540432),
(686453,514901),
(656813,516594),
(489901,528155),
(386380,537905),
(542819,524052),
(243987,557841),
(693412,514514),
(488484,528271),
(896331,504881),
(336730,543721),
(728298,512647),
(604215,519840),
(153729,579413),
(595687,520398),
(540360,524240),
(245779,557511),
(924873,503730),
(509628,526577),
(528523,525122),
(3509,847707),
(522756,525555),
(895447,504922),
(44840,646067),
(45860,644715),
(463487,530404),
(398164,536654),
(894483,504959),
(619415,518874),
(966306,502129),
(990922,501212),
(835756,507474),
(548881,523618),
(453578,531282),
(474993,529410),
(80085,612879),
(737091,512193),
(50789,638638),
(979768,501620),
(792018,509483),
(665001,516122),
(86552,608694),
(462772,530469),
(589233,520821),
(891694,505072),
(592605,520594),
(209645,564741),
(42531,649269),
(554376,523226),
(803814,508929),
(334157,544042),
(175836,572970),
(868379,506051),
(658166,516520),
(278203,551995),
(966198,502126),
(627162,518387),
(296774,549165),
(311803,547027),
(843797,507118),
(702304,514032),
(563875,522553),
(33103,664910),
(191932,568841),
(543514,524006),
(506835,526794),
(868368,506052),
(847025,506971),
(678623,515342),
(876139,505726),
(571997,521984),
(598632,520198),
(213590,563892),
(625404,518497),
(726508,512738),
(689426,514738),
(332495,544264),
(411366,535302),
(242546,558110),
(315209,546555),
(797544,509219),
(93889,604371),
(858879,506454),
(124906,589666),
(449072,531693),
(235960,559345),
(642403,517454),
(720567,513047),
(705534,513858),
(603692,519870),
(488137,528302),
(157370,578285),
(63515,625730),
(666326,516041),
(619226,518883),
(443613,532186),
(597717,520257),
(96225,603069),
(86940,608450),
(40725,651929),
(460976,530625),
(268875,553508),
(270671,553214),
(363254,540500),
(384248,538137),
(762889,510892),
(377941,538833),
(278878,551890),
(176615,572755),
(860008,506412),
(944392,502967),
(608395,519571),
(225283,561450),
(45095,645728),
(333798,544090),
(625733,518476),
(995584,501037),
(506135,526853),
(238050,558952),
(557943,522972),
(530978,524938),
(634244,517949),
(177168,572616),
(85200,609541),
(953043,502630),
(523661,525484),
(999295,500902),
(840803,507246),
(961490,502312),
(471747,529685),
(380705,538523),
(911180,504275),
(334149,544046),
(478992,529065),
(325789,545133),
(335884,543826),
(426976,533760),
(749007,511582),
(667067,516000),
(607586,519623),
(674054,515599),
(188534,569675),
(565185,522464),
(172090,573988),
(87592,608052),
(907432,504424),
(8912,760841),
(928318,503590),
(757917,511138),
(718693,513153),
(315141,546566),
(728326,512645),
(353492,541647),
(638429,517695),
(628892,518280),
(877286,505672),
(620895,518778),
(385878,537959),
(423311,534113),
(633501,517997),
(884833,505360),
(883402,505416),
(999665,500894),
(708395,513697),
(548142,523667),
(756491,511205),
(987352,501340),
(766520,510705),
(591775,520647),
(833758,507563),
(843890,507108),
(925551,503698),
(74816,616598),
(646942,517187),
(354923,541481),
(256291,555638),
(634470,517942),
(930904,503494),
(134221,586071),
(282663,551304),
(986070,501394),
(123636,590176),
(123678,590164),
(481717,528841),
(423076,534137),
(866246,506145),
(93313,604697),
(783632,509880),
(317066,546304),
(502977,527103),
(141272,583545),
(71708,618938),
(617748,518975),
(581190,521362),
(193824,568382),
(682368,515131),
(352956,541712),
(351375,541905),
(505362,526909),
(905165,504518),
(128645,588188),
(267143,553787),
(158409,577965),
(482776,528754),
(628896,518282),
(485233,528547),
(563606,522574),
(111001,595655),
(115920,593445),
(365510,540237),
(959724,502374),
(938763,503184),
(930044,503520),
(970959,501956),
(913658,504176),
(68117,621790),
(989729,501253),
(567697,522288),
(820427,508163),
(54236,634794),
(291557,549938),
(124961,589646),
(403177,536130),
(405421,535899),
(410233,535417),
(815111,508403),
(213176,563974),
(83099,610879),
(998588,500934),
(513640,526263),
(129817,587733),
(1820,921851),
(287584,550539),
(299160,548820),
(860621,506386),
(529258,525059),
(586297,521017),
(953406,502616),
(441234,532410),
(986217,501386),
(781938,509957),
(461247,530595),
(735424,512277),
(146623,581722),
(839838,507288),
(510667,526494),
(935085,503327),
(737523,512167),
(303455,548204),
(992779,501145),
(60240,628739),
(939095,503174),
(794368,509370),
(501825,527189),
(459028,530798),
(884641,505363),
(512287,526364),
(835165,507499),
(307723,547590),
(160587,577304),
(735043,512300),
(493289,527887),
(110717,595785),
(306480,547772),
(318593,546089),
(179810,571911),
(200531,566799),
(314999,546580),
(197020,567622),
(301465,548487),
(237808,559000),
(131944,586923),
(882527,505449),
(468117,530003),
(711319,513541),
(156240,578628),
(965452,502162),
(992756,501148),
(437959,532715),
(739938,512046),
(614249,519196),
(391496,537356),
(62746,626418),
(688215,514806),
(75501,616091),
(883573,505412),
(558824,522910),
(759371,511061),
(173913,573489),
(891351,505089),
(727464,512693),
(164833,576051),
(812317,508529),
(540320,524243),
(698061,514257),
(69149,620952),
(471673,529694),
(159092,577753),
(428134,533653),
(89997,606608),
(711061,513557),
(779403,510081),
(203327,566155),
(798176,509187),
(667688,515963),
(636120,517833),
(137410,584913),
(217615,563034),
(556887,523038),
(667229,515991),
(672276,515708),
(325361,545187),
(172115,573985),
(13846,725685)
] | StarcoderdataPython |
345148 | <reponame>pushpendradahiya/pegasus<gh_stars>0
# Copyright 2020 The PEGASUS Authors..
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Library for working with tf estimators."""
import collections
import re
from absl import logging
from pegasus.ops import public_parsing_ops
from tensor2tensor.utils import adafactor
import tensorflow as tf
from tensorflow import summary as contrib_summary
from tensorflow.compat.v1.estimator import tpu as contrib_tpu
from tensorflow.compat.v1.estimator.tpu import TPUConfig
from tensorflow.compat.v1.estimator.tpu import TPUEstimator
from tensorflow.compat.v1.train import Optimizer as tpu_optimizer
from tensorflow.compat.v1.estimator.tpu import RunConfig
def create_estimator(master,
model_dir,
use_tpu,
iterations_per_loop,
num_shards,
model_params,
include_features_in_predictions=True,
decode_keys=(),
train_init_checkpoint=None,
train_warmup_steps=10000,
save_checkpoints_steps=1000,
keep_checkpoint_max=5):
"""Returns an tensorflow estimator."""
run_config = RunConfig(
master=master,
model_dir=model_dir,
session_config=tf.compat.v1.ConfigProto(
allow_soft_placement=True, log_device_placement=False),
tpu_config=TPUConfig(iterations_per_loop),
save_checkpoints_steps=save_checkpoints_steps,
keep_checkpoint_max=keep_checkpoint_max)
return TPUEstimator(
model_fn=_estimator_model_fn(use_tpu, model_params, model_dir,
include_features_in_predictions, decode_keys,
train_init_checkpoint, train_warmup_steps),
use_tpu=use_tpu,
train_batch_size=model_params.batch_size * num_shards,
eval_batch_size=model_params.batch_size * num_shards,
predict_batch_size=model_params.batch_size * num_shards,
config=run_config)
def _estimator_model_fn(use_tpu, model_params, model_dir,
include_features_in_predictions, decode_keys,
train_init_checkpoint, train_warmup_steps):
"""Returns an estimator model function."""
def model_fn(features, labels, mode, config, params):
"""Estimator model function."""
del labels
del config
del params
tf.compat.v1.get_variable_scope().set_initializer(
tf.compat.v1.variance_scaling_initializer(
1.0, mode="fan_avg", distribution="uniform"))
if mode == tf.estimator.ModeKeys.PREDICT:
predictions = model_params.estimator_prediction_fn(features)
if include_features_in_predictions:
predictions.update(features)
if decode_keys:
# Decode the raw ids into strings in prediction.
def decode_host_call(tensor_dict):
for key in decode_keys:
predictions[key] = public_parsing_ops.decode(
tensor_dict[key], model_params.vocab_filename,
model_params.encoder_type)
return tensor_dict
contrib_tpu.outside_compilation(decode_host_call, predictions)
return contrib_tpu.TPUEstimatorSpec(mode=mode, predictions=predictions)
training = mode == tf.estimator.ModeKeys.TRAIN
if use_tpu and model_params.use_bfloat16:
with contrib_tpu.bfloat16_scope():
loss, outputs = model_params.model()(features, training)
else:
loss, outputs = model_params.model()(features, training)
# TPU requires ouputs all have batch dimension and doesn't handle scalar.
# Tile all scalars to 1 dimension vector.
outputs = _tile_scalar_to_batch_size(outputs, model_params.batch_size)
if mode == tf.estimator.ModeKeys.TRAIN:
init_lr = model_params.learning_rate
global_step = tf.compat.v1.train.get_global_step()
lr = init_lr / 0.01 * tf.math.rsqrt(
tf.maximum(tf.cast(global_step, dtype=tf.float32), 10000))
if train_init_checkpoint:
lr = tf.minimum(
tf.cast(global_step + 1, dtype=tf.float32) / train_warmup_steps * init_lr, lr)
optimizer = adafactor.AdafactorOptimizer(
learning_rate=lr,
decay_rate=adafactor.adafactor_decay_rate_pow(0.8),
beta1=0.0)
if use_tpu:
optimizer = tpu_optimizer.CrossShardOptimizer(optimizer)
train_op = optimizer.minimize(loss, global_step=global_step)
return tpu_estimator.TPUEstimatorSpec(
mode=mode,
loss=loss,
train_op=train_op,
scaffold_fn=_load_vars_from_checkpoint(use_tpu,
train_init_checkpoint),
host_call=add_scalars_to_summary(model_dir, {"learning_rate": lr}))
if mode == tf.estimator.ModeKeys.EVAL:
eval_metrics = model_params.estimator_eval_metrics_fn(features, outputs)
return tpu_estimator.TPUEstimatorSpec(
mode=mode, loss=loss, eval_metrics=eval_metrics)
return model_fn
def _tile_scalar_to_batch_size(tensor_dict, batch_size):
"""Tile scalar tensors in the dictionary to have batch dimension."""
# bool(tf.constant(1).shape) = True.
# this is inconsistent with python default length test. Disable pylint.
scalar_keys = [k for k, v in tensor_dict.items() if len(v.shape) == 0] # pylint: disable=g-explicit-length-test
tiled_dict = {}
for k, v in tensor_dict.items():
if k in scalar_keys:
logging.info("Expand scalar to vector: %s", k)
v = tf.tile(tf.reshape(v, [1]), [batch_size])
tiled_dict[k] = v
return tiled_dict
def add_scalars_to_summary(summary_dir, scalar_tensors_dict):
"""Creates a host_call function that writes summaries on TPU."""
# All tensors outfed from TPU should preserve batch size dimension.
scalar_tensors_dict = {
k: tf.reshape(v, [1]) for k, v in scalar_tensors_dict.items()
}
def host_call_fn(**kwargs):
writer = contrib_summary.create_file_writer(summary_dir, max_queue=1000)
always_record = contrib_summary.always_record_summaries()
with writer.as_default(), always_record:
for name, scalar in kwargs.items():
contrib_summary.scalar(name, tf.reduce_mean(input_tensor=scalar))
return contrib_summary.all_summary_ops()
return host_call_fn, scalar_tensors_dict
def _load_vars_from_checkpoint(use_tpu, init_checkpoint):
"""load variables from initial checkpoints.
Args:
use_tpu: bool whether to use tpu.
init_checkpoint: path of checkpoint containing variables to be initialized.
Returns:
scaffold_fn: The scaffold_fn used by tpu estimator spec. If use_tpu=False,
this is set to None.
"""
if not init_checkpoint:
return None
tvars = tf.compat.v1.trainable_variables()
(assignment_map,
initialized_variable_names) = get_assignment_map_from_checkpoint(
tvars, init_checkpoint)
if not initialized_variable_names:
raise ValueError("No matching variables in init_checkpoint. "
"Double check the naming in both models.")
scaffold_fn = None
if use_tpu:
def tpu_scaffold():
tf.compat.v1.train.init_from_checkpoint(init_checkpoint, assignment_map)
return tf.compat.v1.train.Scaffold()
scaffold_fn = tpu_scaffold
else:
tf.compat.v1.train.init_from_checkpoint(init_checkpoint, assignment_map)
logging.info("**** Trainable Variables ****")
for var in tvars:
init_string = ""
if var.name in initialized_variable_names:
init_string = ", *INIT_FROM_CKPT*"
logging.info(" name = %s, shape = %s%s", var.name, var.shape, init_string)
return scaffold_fn
def get_assignment_map_from_checkpoint(tvars, init_checkpoint):
"""Compute the union of the current variables and checkpoint variables."""
assignment_map = {}
initialized_variable_names = {}
name_to_variable = collections.OrderedDict()
for var in tvars:
name = var.name
m = re.match("^(.*):\\d+$", name)
if m is not None:
name = m.group(1)
name_to_variable[name] = var
init_vars = tf.train.list_variables(init_checkpoint)
assignment_map = collections.OrderedDict()
for x in init_vars:
(name, var) = (x[0], x[1])
if name not in name_to_variable:
continue
assignment_map[name] = name
initialized_variable_names[name] = 1
initialized_variable_names[name + ":0"] = 1
return (assignment_map, initialized_variable_names)
| StarcoderdataPython |
12808379 | from base64 import b64decode, b64encode
from json import dumps
from aiohttp import web
from service.handler import LweHandler
from service.rlwe import RLWE
from service.db import DB
db = DB()
algo = RLWE(16, 929)
handler = LweHandler(algo, DB())
app = web.Application()
app.add_routes([
web.get('/get_pub_key', handler.get_pub_key),
web.post('/sign', handler.sign),
web.post('/verify', handler.verify)
]) | StarcoderdataPython |
306389 | # Palindrome Permutation
# Given a string, write a function to check if it is a permutation of a palindrome. A palindrome is a word or phrase that is the same forwards and backwards. A permutation is a rearrangement of letters. The palindrome does not need to be limited to just dictionary words. You can ignore casing and non-letter characters.
| StarcoderdataPython |
5114866 | from math import factorial
n, k = map(int, input().split())
coefficient = factorial(n) // (factorial(k) * factorial(n-k))
print(coefficient) | StarcoderdataPython |
6648835 | def gene_position(gff_file, get_dict=True):
"""Get the start and end position of each gene and determine their respective length.
Input is a .gff file downloaded from https://www.ensembl.org/Saccharomyces_cerevisiae/Info/Index
Output is a dictionary that includes all gene names as keys. The values are lists with four inputs.
The first is the chromosome number the gene belong to, the second is the start position, the third is the end position of the gene in terms of basepairs, the fourth is the reading orientation of the gene.
The reading orientation is indicated with a '+' (forward reading) or '-' (reverse reading).
The get_dict by default sets that the output should be given as a dictionary with keys the different genes and the values a list of the different parameters.
When the get_dict is set to False, the code returns all the values as individual lists.
Parameters
----------
gff_file : str
The file path of a .gff file downloaded from https://www.ensembl.org/Saccharomyces_cerevisiae/Info/Index
get_dict : bool, optional
When the get_dict is set to False, the code returns all the values as individual lists, by default True
Returns
-------
dict
Output is a dictionary that includes all gene names as keys. The values are lists with four inputs.
The first is the chromosome number the gene belong to, the second is the start position, the third is the end position of the gene in terms of basepairs, the fourth is the reading orientation of the gene.
The reading orientation is indicated with a '+' (forward reading) or '-' (reverse reading).
"""
if get_dict == True:
gene_pos_dict = {}
with open(gff_file) as f:
for line in f:
line_list = line.split("\t")
if len(line_list) > 2:
if line_list[2] == "gene":
gene_chr = line_list[0]
gene_start = line_list[3]
gene_end = line_list[4]
gene_orien = line_list[6]
gene_position = [
gene_chr,
int(gene_start),
int(gene_end),
gene_orien,
]
gene_name_string = line_list[8].split(";")[0]
gene_name = gene_name_string.split(":")[1]
gene_pos_dict[gene_name] = gene_position
return gene_pos_dict
else:
gene_chr = []
gene_start = []
gene_end = []
gene_orien = []
gene_name = []
with open(gff_file) as f:
for line in f:
line_list = line.split("\t")
if len(line_list) > 2:
if line_list[2] == "gene":
gene_chr.append(line_list[0])
gene_start.append(int(line_list[3]))
gene_end.append(int(line_list[4]))
gene_orien.append(line_list[6])
gene_name_string = line_list[8].split(";")[0]
gene_name.append(gene_name_string.split(":")[1])
return (gene_name, gene_chr, gene_start, gene_end, gene_orien)
| StarcoderdataPython |
9795004 | <filename>biopen_smoothing_model_code.py
# -*- coding: utf-8 -*-
"""
Created on Tue Jul 21 12:44:21 2020
@author: Vishnu
"""
import glob
from skimage import io
from scipy import ndimage
import numpy as np
#Imports all images (glucose, buffer, and rhodamine for NAD(P)H and Fluo-4) into arrays
NADH_g_flist = glob.glob('E:/Rizzo Lab/Experiments/Biopen/INS1_Clusters_Carb_NADH_07_24_20/Unsmoothed/NADH_Images/*.tif')
im_NADH_g = np.array([np.array(io.imread(fname)) for fname in NADH_g_flist])
NADH_b_flist = glob.glob('E:/Rizzo Lab/Experiments/Biopen/INS1_Clusters_Carb_NADH_07_24_20/Unsmoothed/Buffer_Images/*.tif')
im_NADH_b = np.array([np.array(io.imread(fname)) for fname in NADH_b_flist])
Fluo4_g_flist = glob.glob('E:/Rizzo Lab/Experiments/Biopen/Biopen_Verap_NADH_Fluo4_08_05_20/Unsmoothed/Fluo4_Glucose_Images/*.tif')
im_Fluo4_g = np.array([np.array(io.imread(fname)) for fname in Fluo4_g_flist])
Fluo4_b_flist = glob.glob('E:/Rizzo Lab/Experiments/Biopen/Biopen_Verap_NADH_Fluo4_08_05_20/Unsmoothed/Fluo4_Buffer_Images/*.tif')
im_Fluo4_b = np.array([np.array(io.imread(fname)) for fname in Fluo4_b_flist])
#Array of islet identifiers for each image.
islet_id = ['I1', 'I2', 'I3', 'I4', 'I5', 'I6', 'I7', 'I8', 'I9', 'I10', 'I11', 'I12', 'I13', 'I14', 'I15', 'I16']
#Sets the number of iterations to the number of images in the array
iternum = len(im_NADH_g)
for i in range(0,iternum):
#Smoothes all images using a median filter.
bilateral_ng = ndimage.median_filter(im_NADH_g[i],3)
io.imsave("E:/Rizzo Lab/Experiments/Biopen/New_analysis_GCK_islets_02_09_19/NADH_Images/"+islet_id[i]+"_glucose.tif", bilateral_ng)
bilateral_nb = ndimage.median_filter(im_NADH_b[i],3)
io.imsave("E:/Rizzo Lab/Experiments/Biopen/INS1_Clusters_Carb_NADH_07_24_20/Buffer_Images/"+islet_id[i]+"_buffer.tif", bilateral_nb)
bilateral_fg = ndimage.median_filter(im_Fluo4_g[i],3)
io.imsave("E:/Rizzo Lab/Experiments/Biopen/Biopen_Verap_NADH_Fluo4_08_05_20/Fluo4_Glucose_Images/"+islet_id[i]+"_glucose_Fluo4.tif", bilateral_fg)
bilateral_fb = ndimage.median_filter(im_Fluo4_b[i],3)
io.imsave("E:/Rizzo Lab/Experiments/Biopen/Biopen_Verap_NADH_Fluo4_08_05_20/Fluo4_Buffer_Images/"+islet_id[i]+"_buffer_Fluo4.tif", bilateral_fb) | StarcoderdataPython |
6675454 | #
# Copyright 2021 Red Hat Inc.
# SPDX-License-Identifier: Apache-2.0
#
"""Test Cache of worker tasks currently running."""
import logging
from unittest.mock import patch
from django.core.cache import cache
from django.test.utils import override_settings
from masu.processor.worker_cache import WorkerCache
from masu.test import MasuTestCase
LOG = logging.getLogger(__name__)
class WorkerCacheTest(MasuTestCase):
"""Test class for the worker cache."""
def setUp(self):
"""Set up the test."""
super().setUp()
cache.clear()
def tearDown(self):
"""Tear down the test."""
super().tearDown()
cache.clear()
@patch("masu.processor.worker_cache.CELERY_INSPECT")
def test_worker_cache(self, mock_inspect):
"""Test the worker_cache property."""
_worker_cache = WorkerCache().worker_cache
self.assertEqual(_worker_cache, [])
@patch("masu.processor.worker_cache.CELERY_INSPECT")
def test_invalidate_host(self, mock_inspect):
"""Test that a host's cache is invalidated."""
task_list = [1, 2, 3]
_cache = WorkerCache()
for task in task_list:
_cache.add_task_to_cache(task)
self.assertEqual(_cache.worker_cache, task_list)
_cache.invalidate_host()
self.assertEqual(_cache.worker_cache, [])
@patch("masu.processor.worker_cache.CELERY_INSPECT")
def test_add_task_to_cache(self, mock_inspect):
"""Test that a single task is added."""
task_key = "task_key"
_cache = WorkerCache()
self.assertEqual(_cache.worker_cache, [])
_cache.add_task_to_cache(task_key)
self.assertEqual(_cache.worker_cache, [task_key])
@patch("masu.processor.worker_cache.CELERY_INSPECT")
def test_remove_task_from_cache(self, mock_inspect):
"""Test that a task is removed."""
task_key = "task_key"
_cache = WorkerCache()
_cache.add_task_to_cache(task_key)
self.assertEqual(_cache.worker_cache, [task_key])
_cache.remove_task_from_cache(task_key)
self.assertEqual(_cache.worker_cache, [])
@patch("masu.processor.worker_cache.CELERY_INSPECT")
def test_remove_task_from_cache_value_not_in_cache(self, mock_inspect):
"""Test that a task is removed."""
task_list = [1, 2, 3, 4]
_cache = WorkerCache()
for task in task_list:
_cache.add_task_to_cache(task)
self.assertEqual(_cache.worker_cache, task_list)
_cache.remove_task_from_cache(5)
self.assertEqual(_cache.worker_cache, task_list)
@override_settings(HOSTNAME="kokuworker")
@patch("masu.processor.worker_cache.CELERY_INSPECT")
def test_get_all_running_tasks(self, mock_inspect):
"""Test that multiple hosts' task lists are combined."""
second_host = "koku-worker-2-sdfsdff"
first_host_list = [1, 2, 3]
second_host_list = [4, 5, 6]
expected = first_host_list + second_host_list
mock_worker_list = {"celery@kokuworker": "", f"celery@{second_host}": ""}
mock_inspect.reserved.return_value = mock_worker_list
_cache = WorkerCache()
for task in first_host_list:
_cache.add_task_to_cache(task)
with override_settings(HOSTNAME=second_host):
_cache = WorkerCache()
for task in second_host_list:
_cache.add_task_to_cache(task)
self.assertEqual(sorted(_cache.get_all_running_tasks()), sorted(expected))
@override_settings(HOSTNAME="kokuworker")
@patch("masu.processor.worker_cache.CELERY_INSPECT")
def test_task_is_running_true(self, mock_inspect):
"""Test that a task is running."""
mock_worker_list = {"celery@kokuworker": ""}
mock_inspect.reserved.return_value = mock_worker_list
task_list = [1, 2, 3]
_cache = WorkerCache()
for task in task_list:
_cache.add_task_to_cache(task)
self.assertTrue(_cache.task_is_running(1))
@patch("masu.processor.worker_cache.CELERY_INSPECT")
def test_task_is_running_false(self, mock_inspect):
"""Test that a task is not running."""
task_list = [1, 2, 3]
_cache = WorkerCache()
for task in task_list:
_cache.add_task_to_cache(task)
self.assertFalse(_cache.task_is_running(4))
@patch("masu.processor.worker_cache.CELERY_INSPECT")
def test_active_worker_property(self, mock_inspect):
"""Test the active_workers property."""
test_matrix = [
{"hostname": "celery@kokuworker", "expected_workers": ["kokuworker"]},
{"hostname": "kokuworker", "expected_workers": ["kokuworker"]},
{"hostname": "kokuworker&63)", "expected_workers": ["kokuworker&63)"]},
{"hostname": "koku@worker&63)", "expected_workers": ["worker&63)"]},
{"hostname": "", "expected_workers": [""]},
]
for test in test_matrix:
with self.subTest(test=test):
mock_worker_list = {test.get("hostname"): ""}
mock_inspect.reserved.return_value = mock_worker_list
_cache = WorkerCache()
self.assertEqual(_cache.active_workers, test.get("expected_workers"))
@patch("masu.processor.worker_cache.CELERY_INSPECT")
def test_active_worker_property_instance_not_available(self, mock_inspect):
"""Test the active_workers property when celery inspect is not available."""
mock_inspect.reserved.return_value = None
_cache = WorkerCache()
self.assertEqual(_cache.active_workers, [])
@override_settings(HOSTNAME="kokuworker")
@patch("masu.processor.worker_cache.CELERY_INSPECT")
def test_remove_offline_worker_keys(self, mock_inspect):
"""Test the remove_offline_worker_keys function."""
second_host = "kokuworker2"
first_host_list = [1, 2, 3]
second_host_list = [4, 5, 6]
all_work_list = first_host_list + second_host_list
mock_worker_list = {"celery@kokuworker": "", f"celery@{second_host}": ""}
mock_inspect.reserved.return_value = mock_worker_list
_cache = WorkerCache()
for task in first_host_list:
_cache.add_task_to_cache(task)
with override_settings(HOSTNAME=second_host):
_cache = WorkerCache()
for task in second_host_list:
_cache.add_task_to_cache(task)
self.assertEqual(sorted(_cache.get_all_running_tasks()), sorted(all_work_list))
# kokuworker2 goes offline
mock_inspect.reset()
mock_worker_list = {"celery@kokuworker": ""}
mock_inspect.reserved.return_value = mock_worker_list
_cache.remove_offline_worker_keys()
self.assertEqual(sorted(_cache.get_all_running_tasks()), sorted(first_host_list))
@patch("masu.processor.worker_cache.CELERY_INSPECT")
def test_single_task_caching(self, mock_inspect):
"""Test that single task cache creates and deletes a cache entry."""
cache = WorkerCache()
task_name = "test_task"
task_args = ["schema1", "OCP"]
self.assertFalse(cache.single_task_is_running(task_name, task_args))
cache.lock_single_task(task_name, task_args)
self.assertTrue(cache.single_task_is_running(task_name, task_args))
cache.release_single_task(task_name, task_args)
self.assertFalse(cache.single_task_is_running(task_name, task_args))
| StarcoderdataPython |
3511447 | '''
Some funcs to generate HTML visualizations.
Run from the folder you intend to save the HTML page so
the relative paths in the HTML file are correct and PIL can find the images on disk.
Requires local images, but should be pretty easy to set up an apache server (or whatev)
and host them as long as the relative paths remain the same on ya serva.
'''
from PIL import Image
LMSIZE = 8
def landmarks_on_crop(landmarks, x, y, width, height, imname, maxheight=None, color='green'):
'''
Generates an HTML string that shows landmarks within a given cropped image.
When two landmarked crops are put next to each other, they will be inline. To make each crop its own line, wrap it in a div.
'''
html = _bbcrop(x, y, width, height, imname, maxheight)
if not maxheight:
maxheight = height
ratio = float(maxheight) / height
print(ratio)
if len(landmarks) > 0 and len(landmarks[0]) != 3:
landmarks = [ lm + (color,) for lm in landmarks ]
for lmx,lmy,col in landmarks:
html += landmark((lmx-x)*ratio - (LMSIZE/2), (lmy-y)*ratio - (LMSIZE/2), col)
html += '</div>'
return html
def landmark_img(x, y, img, color='green'):
html = '<div style="position:relative;">'
html += '<img src="%s" />' % img
html += landmark(x,y,color)
html += '</div>'
return html
def landmark(x, y, color='green'):
return '<div style="position:absolute; top:{0}px; left:{1}px; color:{2}; background-color:{2}; width:{3}px; height:{3}px;"></div>'.format(y,x,color,LMSIZE)
def crop_to_bb(x, y, width, height, imname, maxheight=None):
'''
Generates an HTML string that crops to a given bounding box and resizes to maxheight pixels.
A maxheight of None will keep the original size (default).
When two crops are put next to each other, they will be inline. To make each crop its own line, wrap it in a div.
'''
html = _bbcrop(x, y, width, height, imname, maxheight)
html += '</div>'
return html
def _bbcrop(x, y, width, height, imname, maxheight):
img = Image.open(imname)
imwidth, imheight = img.size
if not maxheight:
maxheight = height
ratio = float(maxheight) / height
# note for future me:
# image is cropped with div width/height + overflow:hidden,
# resized with img height,
# and positioned with img margin
html = '<div style="overflow:hidden; display:inline-block; position:relative; width:%ipx; height:%ipx;">' % (width*ratio, maxheight)
html += '<img src="%s" style="height:%ipx; margin:-%ipx 0 0 -%ipx;"/>' % (imname, imheight*ratio, y*ratio, x*ratio)
return html
def bbs_for_image(imname, bbs, maxheight=None, colors=None):
'''
Generates an HTML string for an image with bounding boxes.
bbs: iterable of (x,y,width,height) bounding box tuples
'''
img = Image.open(imname)
imwidth, imheight = img.size
if not maxheight:
maxheight = imheight
ratio = float(maxheight)/imheight
html = [
'<div style="position:relative">',
'<img src="%s" style="height:%ipx" />' % (imname, maxheight)
]
if not colors:
colors = ['green']*len(bbs)
html.extend([ bb(*box, ratio=ratio, color=color) for color,box in zip(colors,bbs) ])
html.append('</div>')
return '\n'.join(html)
def bb(x, y, width, height, ratio=1.0, color='green'):
'''
Generates an HTML string bounding box.
'''
html = '<div style="position:absolute; border:2px solid %s; color:%s; left:%ipx; top:%ipx; width:%ipx; height:%ipx;"></div>'
return html % (color, color, x*ratio, y*ratio, width*ratio, height*ratio)
| StarcoderdataPython |
1755199 | #!/usr/bin/env python
import argparse
import time
import textwrap
import pytricia
from _pybgpstream import BGPStream, BGPRecord, BGPElem
import yaml
def main():
parser = argparse.ArgumentParser()
parser.formatter_class = argparse.RawDescriptionHelpFormatter
parser.description = textwrap.dedent('''\
a proof-of-concept utility for watching updates from BGPstream
and then printing out if an unexpected update is heard
''')
parser.epilog = textwrap.dedent('''\
Example: watch these route announcements
%(prog)s -f routes.yaml ''')
required = parser.add_argument_group('required arguments')
required.add_argument("-f", "--file", required=True, help="yaml file of prefixes to origin asn")
parser.add_argument("-d", "--debug", action='store_true', help="print out all updates containing these prefixes")
args = parser.parse_args()
routes = pytricia.PyTricia(48) # longest reasonable pfx in dfz
with open(args.file, 'r') as f:
routesfile = yaml.safe_load(f)
for pfx in routesfile:
routes[pfx] = routesfile[pfx]
stream = BGPStream()
rec = BGPRecord()
stream.add_filter('record-type', 'updates')
stream.add_interval_filter(int(time.time()),0)
stream.set_live_mode()
stream.start()
while(stream.get_next_record(rec)):
if rec.status == 'valid':
elem = rec.get_next_elem()
while(elem):
if 'as-path' in elem.fields:
path = elem.fields['as-path'].split()
prefix = elem.fields['prefix']
if prefix in routes and (routes[prefix] != path[-1] or args.debug):
print('Heard prefix:', elem.fields['prefix'], 'AS-PATH:', elem.fields['as-path'], ' Found by project:', rec.project, 'collector:', rec.collector, 'type:', rec.type, 'at time:', rec.time, 'Type:', elem.type, 'Peer:', elem.peer_address, 'AS', elem.peer_asn)
elem = rec.get_next_elem()
if __name__ == "__main__":
main()
| StarcoderdataPython |
3558744 | <reponame>agustin380/book-rest-example<gh_stars>0
import click
from app.api import app
from app import settings
@click.group()
def cmds():
pass
@cmds.command()
@click.option('--port', default=5000, type=int,
help='Set server port')
@click.option('--host', default='0.0.0.0', type=str,
help='Set server host')
@click.option('--debug', default=False,
help='Set server debug')
def runserver(port, host, debug):
click.echo('Start server at: {}:{}'.format(host, port))
app.run(host=host, port=port, debug=debug)
@cmds.command()
def syncdb():
from app.models import db
db.create_all()
@cmds.command()
def test():
import unittest
loader = unittest.TestLoader()
tests = loader.discover('app.tests')
testRunner = unittest.runner.TextTestRunner()
testRunner.run(tests)
if __name__ == "__main__":
cmds()
| StarcoderdataPython |
11394193 | import datetime
import timestring
from flask import Blueprint, request, jsonify, abort, make_response
from ..models.meetup import Meetup
from ..models.user import User
from ..models.question import Question, Voting, Comment
from ..utils.base_vals import BaseValidation, token_required
from ..utils.question_vals import QuestionValid
q_blue = Blueprint("que_bl", __name__)
@q_blue.route('/meetups/<meet_id>/questions', methods=["POST"])
@token_required
def ask_question(current_user, meet_id):
logged_user = User.query_username(current_user)
user_image = logged_user[-2]
user_name = logged_user[1]
try:
que_data = request.get_json()
except Exception as error:
return jsonify({"status": 400,
"message": "no data was found", "error": error}), 400
valid_que = QuestionValid(que_data)
valid_que.check_missing_fields(valid_que.question_required)
valid_que.check_field_values_no_whitespace(valid_que.question_required)
body = que_data["body"]
meet_id = QuestionValid.confirm_ids(meet_id)
meetup = Meetup.get_meetup(meet_id, "id")
if not meetup:
return jsonify({
"status": 404,
"error": "Mettup with id {} not found".format(meet_id)}), 404
meetup_date = meetup[4]
if timestring.Date(meetup_date) < datetime.datetime.now():
return jsonify({
"status": 400,
"error": "this meetup has already been conducted"
})
exists = Question.get_by_(logged_user[0], "user_id", body)
if exists:
return jsonify({
"status": 403,
"error": "A similar question already exists"}), 403
meetup = meetup[0]
user_id = logged_user[0]
quest_title = que_data["title"]
quest_body = que_data["body"]
new_question = Question([user_id, meetup, quest_title, quest_body])
new_question.post_a_question()
return jsonify({"status": 201,
"message": "question asked succssfully",
"data": {
"user": user_id,
"meetup": meetup,
"title": quest_title,
"body": quest_body
},
"asker": {
"username": user_name,
"image": user_image
}}), 201
def voting_action(current_user, quiz_id, upvote, downvote):
""" covers the etire voting process """
logged_user = User.query_username(current_user)
question_id = QuestionValid.confirm_ids(quiz_id)
user_id = logged_user[0]
question = Voting.get_from_questions(question_id)
meetup = question[2]
title = question[3]
body = question[4]
downvote = downvote
upvote = upvote
user_id = user_id
if user_id == question[1]:
abort(make_response(jsonify({
"status": 403,
"message": "you cannot vote on your question",
}), 403))
voted_user = Voting.get_votes_by_user(logged_user[0], question_id)
current_vote = (upvote, downvote)
if current_vote in voted_user:
abort(make_response(
jsonify(
{"status": 403,
"message": "you have already voted"}), 403))
if not voted_user:
vote_list = [user_id, meetup, question_id, upvote, downvote]
new_vote = Voting(vote_list)
new_vote.update_to_votes()
else:
all_upvotes = Voting.get_all_up_down_votes(question_id, "upvotes", 1)
all_downvotes = Voting.get_all_up_down_votes(
question_id, "downvotes", 1)
current_votes = len(all_upvotes) - len(all_downvotes)
if len(all_upvotes) > 0:
first_upvoter = all_upvotes[0][1]
if len(all_downvotes) > 0:
first_downvoter = all_downvotes[0][1]
if current_votes == 1 and user_id == first_upvoter:
current_votes = current_votes - downvote - len(all_upvotes)
elif current_votes == -1 and user_id == first_downvoter:
current_votes = current_votes + len(all_downvotes) + upvote
else:
if upvote:
current_votes = current_votes + upvote - \
len(all_upvotes) + len(all_downvotes)
if downvote:
current_votes = current_votes - downvote
Voting.update_user_vote(user_id, question_id,
upvote, downvote)
all_upvotes = len(Voting.get_all_up_down_votes(question_id, "upvotes", 1))
all_downvotes = len(Voting.get_all_up_down_votes(
question_id, "downvotes", 1))
votes = all_upvotes - all_downvotes
votes_data = [all_upvotes, all_downvotes, votes]
return [meetup, title, body, votes_data]
@q_blue.route('/questions/<quiz_id>/upvote', methods=["PATCH"])
@token_required
def upvote_question(current_user, quiz_id):
upvoted = voting_action(current_user, quiz_id, 1, 0)
return jsonify({"status": 201, "data": {
"meetup": upvoted[0],
"title": upvoted[1],
"body": upvoted[2]
},
"voting_stats": {
"votes_data": {
"upvotes": upvoted[3][0],
"downvotes": upvoted[3][1],
"voteDiff": upvoted[3][2]
}
}
}), 201
@q_blue.route('/questions/<quiz_id>/downvote', methods=["PATCH"])
@token_required
def downvote_question(current_user, quiz_id):
downvoted = voting_action(current_user, quiz_id, 0, 1)
return jsonify({"status": 201, "data": {
"meetup": downvoted[0],
"title": downvoted[1],
"body": downvoted[2]
},
"voting_stats": {
"votes_data": {
"upvotes": downvoted[3][0],
"downvotes": downvoted[3][1],
"voteDiff": downvoted[3][2]
}
}}), 201
@q_blue.route('/questions/<quiz_id>/comments', methods=["POST"])
@token_required
def comment_on_question(current_user, quiz_id):
logged_user = User.query_username(current_user)
try:
user_comment = request.get_json()
if not user_comment:
abort(make_response(
jsonify({
"status": 400,
"error": "Missing comment data"}), 400))
validate = BaseValidation(user_comment)
validate.check_missing_fields(["comment"])
validate.check_field_values_no_whitespace(["comment"])
comment = user_comment["comment"]
except Exception:
abort(make_response(
jsonify({
"status": 400, "error": "comment data is required"}), 400))
question_id = QuestionValid.confirm_ids(quiz_id)
question = Voting.get_from_questions(question_id)
if not question:
abort(make_response(jsonify({
"status": 404,
"error": "Question with id {} not found".format(quiz_id)}), 404))
user_id = logged_user[0]
current_user = User.query_by_id(user_id)
user_data = {}
user_data["username"] = current_user[0]
user_data["image"] = current_user[1]
questionId = question[0]
title = question[3]
body = question[4]
new_commment = Comment([user_id, questionId, title, body, comment])
new_commment.post_a_comment()
return jsonify({"status": 201, "data": {
"question": questionId,
"title": title,
"body": body,
"comment": comment,
"user": user_data
}}), 201
@q_blue.route('/meetups/<meet_id>/questions', methods=["GET"])
def get_questions_for_one_meetup(meet_id):
meet_id = BaseValidation.confirm_ids(meet_id)
meetups = Meetup.get_all_meetups()
current_meetup = {}
for meetup in meetups:
if meetup["id"] == meet_id:
current_meetup = meetup
all_meetup_questions = Question.get_all_by_meetup_id(meet_id)
serialized_questions = []
if not current_meetup:
abort(make_response(jsonify({
"status": 404,
"error": "Meetup with id {} not found".format(meet_id)}), 404))
if not all_meetup_questions:
serialized_questions = ["NO questions asked yet"]
return jsonify({
"status": 404,
"meetup": current_meetup,
"questions": serialized_questions
}), 404
for index, question in enumerate(all_meetup_questions):
current_question = {}
current_question["id"] = question[0]
all_upvotes = len(Voting.get_all_up_down_votes(
question[0], "upvotes", 1))
all_downvotes = len(Voting.get_all_up_down_votes(
question[0], "downvotes", 1))
comments = Comment.get_all_question_comments_number(question[0])
current_question["user id"] = question[1]
current_user = User.query_by_id(question[1])
user_data = {}
user_data["username"] = current_user[0]
user_data["image"] = current_user[1]
current_question["meetup id"] = question[2]
current_question["title"] = question[3]
current_question["body"] = question[4]
current_question["asker"] = user_data
current_question["votes"] = {
"upvotes": all_upvotes,
"downvotes": all_downvotes,
"voteDiff": all_upvotes - all_downvotes
}
current_question["comments"] = comments
serialized_questions.append(current_question)
return jsonify({
"status": 200,
"meetup": current_meetup,
"questions": serialized_questions
}), 200
@q_blue.route('/questions/<quiz_id>/comments', methods=["GET"])
def get_all_comments_on_question(quiz_id):
quiz_id = BaseValidation.confirm_ids(quiz_id)
the_question = Question.fetch_all_if_exists(
Question, 'comments', 'question_id', quiz_id)
comments = Question.fetch_all_if_exists(
Question, 'comments', 'question_id', quiz_id)
the_question = Question.serialize_a_question(the_question)
comments = Comment.serialize_a_comment(comments)
for index, comment in enumerate(comments):
comment_user = list(User.query_by_id(comment["User"]))
user_id = comment["User"]
comment_user = {
"id": comment["User"],
"username": comment_user[0],
"image": comment_user[1]
}
comment["user"] = comment_user
return jsonify({
"status": 200,
"asked_question": the_question[0],
"comments": comments
}), 200
| StarcoderdataPython |
3263140 | <filename>model/ksdkt.py
"""
DKT with the following expansions:
- Pre-training Regularizaiton
- Knowledge State Vector Loss
~~~~~
Author: <NAME> (@qqhann)
"""
import logging
import math
import os
import pickle
import random
import sys
import time
import warnings
from math import ceil, log
from pathlib import Path
from typing import Dict, List, Set, Tuple
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
import seaborn as sns
import torch
import torch.nn as nn
import torch.nn.functional as F
import torch.optim as optim
from sklearn import metrics
from torch.autograd import Variable
from torch.nn.utils.rnn import (pack_padded_sequence, pack_sequence,
pad_packed_sequence, pad_sequence)
from torch.utils.data import DataLoader, Dataset, TensorDataset
from model._base import BaseKTModel
from src.data import SOURCE_ASSIST0910_ORIG, SOURCE_ASSIST0910_SELF
from src.utils import sAsMinutes, timeSince
class KSDKT(nn.Module, BaseKTModel):
""" Expansion of original DKT """
def __init__(self, config, device, bidirectional=False):
super().__init__()
self.config = config
self.device = device
self.model_name = config.model_name
self.input_size = ceil(log(2 * config.n_skills))
self.output_size = config.n_skills
self.batch_size = config.batch_size
self.hidden_size = config.dkt["hidden_size"]
self.n_layers = config.dkt["n_layers"]
self.bidirectional = config.dkt["bidirectional"]
self.directions = 2 if self.bidirectional else 1
self.dropout = self.config.dkt["dropout_rate"]
self.cumsum_weights = torch.tensor(
[[[config.ksvector_weight_base ** i for i in range(config.sequence_size)]]],
dtype=torch.float,
device=device,
).permute(2, 1, 0)
# self.cs_basis = torch.randn(config.n_skills * 2 + 2, self.input_size).to(device)
self.embedding = nn.Embedding(
config.n_skills * 2 + config.dkt["preserved_tokens"], self.input_size
).to(device)
# https://pytorch.org/docs/stable/nn.html#rnn
if self.model_name == "dkt:rnn":
self.rnn = nn.RNN(
self.input_size,
self.hidden_size,
self.n_layers,
nonlinearity="tanh",
dropout=self.dropout,
bidirectional=self.bidirectional,
)
elif self.model_name == "ksdkt":
self.lstm = nn.LSTM(
self.input_size,
self.hidden_size,
self.n_layers,
dropout=self.dropout,
bidirectional=self.bidirectional,
)
else:
raise ValueError("Model name not supported")
self.fc = self.init_fc()
# self.sigmoid = nn.Sigmoid()
self._loss = nn.BCELoss()
def forward(self, xseq, yseq, mask, opt=None):
i_batch = self.config.batch_size
if i_batch != xseq.shape[0]:
# warnings.warn(f'batch size mismatch {i_batch} != {xseq.shape[0]}')
i_batch = xseq.shape[0]
i_skill = self.config.n_skills
i_seqen = self.config.sequence_size
assert xseq.shape == (i_batch, i_seqen, 2), "{} != {}".format(
xseq.shape, (i_batch, i_seqen, 2)
)
assert yseq.shape == (i_batch, i_seqen, 2), "{} != {}".format(
yseq.shape, (i_batch, i_seqen, 2)
)
# onehot_size = i_skill * 2 + 2
device = self.device
# Convert to onehot; (12, 1) -> (0, 0, ..., 1, 0, ...)
# https://pytorch.org/docs/master/nn.functional.html#one-hot
inputs = (
torch.matmul(
xseq.float().to(device), torch.Tensor([[1], [i_skill]]).to(device)
)
.long()
.to(device)
)
assert inputs.shape == (i_batch, i_seqen, 1)
# inputs = inputs.squeeze()
# inputs = F.one_hot(inputs, num_classes=onehot_size).float()
yqs = (
torch.matmul(yseq.float().to(device), torch.Tensor([[1], [0]]).to(device))
.long()
.to(device)
)
assert yqs.shape == (i_batch, i_seqen, 1)
yqs = yqs.squeeze(2)
assert (
torch.max(yqs).item() < i_skill
), f"{torch.max(yqs)} < {i_skill} not fulfilled"
yqs = F.one_hot(yqs, num_classes=i_skill).float()
assert yqs.shape == (i_batch, i_seqen, i_skill)
target = torch.matmul(
yseq.float().to(device), torch.Tensor([[0], [1]]).to(device)
).to(device)
assert target.shape == (i_batch, i_seqen, 1)
mask = mask.to(device)
inputs = inputs.permute(1, 0, 2)
yqs = yqs.permute(1, 0, 2)
target = target.permute(1, 0, 2)
inputs = self.embedding(inputs).squeeze(2)
out, _Hn = self.lstm(inputs, self.init_Hidden0(i_batch))
out = self.fc(out)
pred_vect = torch.sigmoid(out) # [0, 1]区間にする
assert pred_vect.shape == (
i_seqen,
i_batch,
i_skill,
), "Unexpected shape {}".format(pred_vect.shape)
pred_prob = torch.max(pred_vect * yqs, 2)[0]
assert pred_prob.shape == (i_seqen, i_batch), "Unexpected shape {}".format(
pred_prob.shape
)
if self.config.pad == True:
# _pred_prob = pack_padded_sequence(
# pred_prob.unsqueeze(2), mask, enforce_sorted=False).data
# _target = pack_padded_sequence(
# target, mask, enforce_sorted=False).data
_pred_prob = pred_prob.masked_select(mask.permute(1, 0))
_target = target.squeeze(2).masked_select(mask.permute(1, 0))
else:
_pred_prob = pred_prob
_target = target.squeeze(2)
loss = self._loss(_pred_prob, _target)
# print(loss, loss.shape) #=> scalar, []
out_dic = {
"loss": loss,
"pred_vect": pred_vect, # (20, 100, 124)
"pred_prob": pred_prob, # (20, 100)
"filtered_pred": _pred_prob,
"filtered_target": _target,
}
if True:
assert yqs.shape == (
i_seqen,
i_batch,
i_skill,
), "Expected {}, got {}".format((i_seqen, i_batch, i_skill), yqs.shape)
assert target.shape == (i_seqen, i_batch, 1), "Expected {}, got {}".format(
(i_seqen, i_batch, 1), target.shape
)
dqa = yqs * target
Sdqa = self.cumsum_weights * torch.cumsum(dqa, dim=0)
Sdq = self.cumsum_weights * torch.cumsum(yqs, dim=0)
ksvector_l1 = torch.sum(torch.abs((Sdq * pred_vect) - (Sdqa))) / torch.sum(
Sdq > 0
)
# / (Sdq.shape[0] * Sdq.shape[1] * Sdq.shape[2])
out_dic["loss"] += self.config.ksvector_l1 * ksvector_l1
out_dic["ksvector_l1"] = ksvector_l1.item()
out_dic["Sdqa"] = Sdqa
out_dic["Sdq"] = Sdq
if self.config.reconstruction or self.config.reconstruction_and_waviness:
reconstruction_target = torch.matmul(
xseq.float().to(device), torch.Tensor([[0], [1]]).to(device)
).to(device)
reconstruction_target = reconstruction_target.permute(1, 0, 2).squeeze(2)
reconstruction_target = reconstruction_target.masked_select(
mask.permute(1, 0)
)
reconstruction_loss = self._loss(_pred_prob.view(-1), reconstruction_target)
out_dic["loss"] += self.config.lambda_rec * reconstruction_loss
out_dic["reconstruction_loss"] = reconstruction_loss.item()
out_dic["filtered_target_c"] = reconstruction_target
if self.config.waviness == True or self.config.reconstruction_and_waviness:
waviness_norm_l1 = torch.abs(pred_vect[1:, :, :] - pred_vect[:-1, :, :])
waviness_l1 = torch.sum(waviness_norm_l1) / (
(pred_vect.shape[0] - 1) * pred_vect.shape[1] * pred_vect.shape[2]
)
lambda_l1 = self.config.lambda_l1
out_dic["loss"] += lambda_l1 * waviness_l1
out_dic["waviness_l1"] = waviness_l1.item()
if self.config.waviness == True or self.config.reconstruction_and_waviness:
waviness_norm_l2 = torch.pow(pred_vect[1:, :, :] - pred_vect[:-1, :, :], 2)
waviness_l2 = torch.sum(waviness_norm_l2) / (
(pred_vect.shape[0] - 1) * pred_vect.shape[1] * pred_vect.shape[2]
)
lambda_l2 = self.config.lambda_l2
out_dic["loss"] += lambda_l2 * waviness_l2
out_dic["waviness_l2"] = waviness_l2.item()
if opt:
# バックプロバゲーション
opt.zero_grad()
out_dic["loss"].backward()
opt.step()
return out_dic
def init_h0(self, batch_size):
return torch.zeros(
self.n_layers * self.directions, batch_size, self.hidden_size
).to(self.device)
def init_c0(self, batch_size):
return torch.zeros(
self.n_layers * self.directions, batch_size, self.hidden_size
).to(self.device)
def init_Hidden0(self, i_batch: int):
if self.model_name == "dkt:rnn":
h0 = self.init_h0(i_batch)
return h0
elif self.model_name == "ksdkt":
h0 = self.init_h0(i_batch)
c0 = self.init_c0(i_batch)
return (h0, c0)
def init_fc(self):
return nn.Linear(self.hidden_size * self.directions, self.output_size).to(
self.device
)
| StarcoderdataPython |
1917138 | import os
from pathlib import Path
from typing import Union
__all__ = ['PathType', 'path_lca', 'path_add_suffix']
PathType = Union[str, Path] # a union type for all possible paths
def path_lca(this: Path, other: PathType) -> Path:
r"""
Return the `lowest common ancestor <https://en.wikipedia.org/wiki/Lowest_common_ancestor>`_ of two paths.
For example::
>>> path_lca(Path("/path/to/file/in/here"), "/path/to/another/file")
Path("/path/to/")
**Caveat:** This implementation simply takes the longest common prefix of two expanded paths using
:func:`os.path.commonprefix`, and may not be robust enough for complex use cases.
:param this: The first path. Has to be of type :class:`pathlib.Path`.
:param other: The second path. Can be either a :class:`pathlib.Path` or a :class:`str`.
:return: The path to the LCA of two paths.
"""
return this.relative_to(os.path.commonprefix([this, other]))
def path_add_suffix(this: Path, suffix: str) -> Path:
r"""
Append a suffix to the given path. For example::
>>> path_add_suffix(Path("/path/to/file.txt", "bak"))
Path("/path/to/file.txt.bak")
:param this: The path to modify.
:param suffix: The suffix to append.
:return: The modified path.
"""
suffix = suffix.strip()
if suffix == '':
return this.with_suffix(this.suffix) # return a copy
if not suffix.startswith('.'):
suffix = '.' + suffix
return this.with_suffix(this.suffix + suffix)
| StarcoderdataPython |
5045213 | import qiniu
# 需要填写你的 Access Key 和 Secret Key
access_key = '<KEY>'
secret_key = '<KEY>'
# 内容空间的名称
bucket_name = "infonews"
# 上传文件
def upload_img(data):
q = qiniu.Auth(access_key, secret_key)
key = None # 上传文件名 不设置会自动生成随机名称
token = q.upload_token(bucket_name)
ret, info = qiniu.put_data(token, key, data)
if ret is not None:
return ret.get("key") # 取出上传的文件名
else:
raise Exception(info)
| StarcoderdataPython |
149723 | import pytest
import pandas as pd
from sklearn.model_selection import train_test_split
from sklearn.pipeline import make_pipeline
from catboost import CatBoostClassifier
from pydrift import DataDriftChecker, ModelDriftChecker, DriftCheckerEstimator
from pydrift.exceptions import (ColumnsNotMatchException,
DriftEstimatorException)
from pydrift.models import cat_features_fillna
from pydrift.constants import PATH_DATA, RANDOM_STATE
TARGET = 'Survived'
df_titanic = pd.read_csv(PATH_DATA / 'titanic.csv')
X = df_titanic.drop(columns=['PassengerId', 'Name', 'Ticket', 'Cabin', TARGET])
y = df_titanic[TARGET]
X_women = X[X['Sex'] == 'female']
X_men = X[X['Sex'] == 'male']
X_train, X_test, y_train, y_test = train_test_split(
X, y, test_size=.5, random_state=RANDOM_STATE, stratify=y
)
cat_features = list(
X
.select_dtypes(include=['category', 'object'])
.columns
)
X_filled = cat_features_fillna(X, cat_features)
X_filled_train, X_filled_test, y_filled_train, y_filled_test = (
train_test_split(
X_filled, y, test_size=.5, random_state=RANDOM_STATE, stratify=y
)
)
df_left_data = pd.concat([X_filled_train, y_filled_train], axis=1)
df_right_data = pd.concat([X_filled_test, y_filled_test], axis=1)
def test_columns_not_match_exception():
"""Tests if correctly raised columns not match
custom exception"""
with pytest.raises(ColumnsNotMatchException):
DataDriftChecker(
X_train.drop(columns='Sex'), X_test, minimal=True, verbose=False
)
DataDriftChecker(
X_train, X_test.drop(columns='Cabin'), minimal=True, verbose=False
)
def test_estimator_drift_ko():
"""Tests if correctly check drifted data
in a pipeline
"""
with pytest.raises(DriftEstimatorException):
df_train_filled = pd.concat([X_filled_train, y_train], axis=1)
df_train_filled_drifted = df_train_filled[
(df_train_filled['Pclass'] > 1) & (
df_train_filled['Fare'] > 10)
].copy()
X_train_filled_drifted = df_train_filled_drifted.drop(columns=TARGET)
y_train_filled_drifted = df_train_filled_drifted[TARGET]
df_test_filled = pd.concat([X_filled_test, y_test], axis=1)
df_test_filled_drifted = df_test_filled[
~(df_test_filled['Pclass'] > 1) & (
df_test_filled['Fare'] > 10)
].copy()
X_test_filled_drifted = df_test_filled_drifted.drop(columns=TARGET)
ml_classifier_model = CatBoostClassifier(
num_trees=5,
max_depth=3,
cat_features=cat_features,
random_state=RANDOM_STATE,
verbose=False
)
pipeline_catboost_drift_checker = make_pipeline(
DriftCheckerEstimator(ml_classifier_model=ml_classifier_model,
column_names=X.columns.tolist(),
minimal=True)
)
pipeline_catboost_drift_checker.fit(X_train_filled_drifted,
y_train_filled_drifted)
pipeline_catboost_drift_checker.predict_proba(X_test_filled_drifted)
def test_data_drift_ok():
"""Tests if correctly check non-drifted data"""
data_drift_checker_ok = DataDriftChecker(
X_train, X_test, minimal=True, verbose=False
)
data_drift_checker_ok.check_categorical_columns()
assert not data_drift_checker_ok.ml_model_can_discriminate()
assert not data_drift_checker_ok.check_numerical_columns()
assert not data_drift_checker_ok.check_categorical_columns()
def test_data_drift_ko():
"""Tests if correctly check drifted data"""
data_drift_checker_ok = DataDriftChecker(
X_women, X_men, minimal=True, verbose=False
)
assert data_drift_checker_ok.ml_model_can_discriminate()
assert data_drift_checker_ok.check_numerical_columns()
assert data_drift_checker_ok.check_categorical_columns()
def test_model_drift_ok():
"""Tests if correctly check non-drifted model"""
ml_classifier_model = CatBoostClassifier(
num_trees=5,
max_depth=3,
cat_features=cat_features,
random_state=RANDOM_STATE,
verbose=False
)
ml_classifier_model.fit(X_filled_train, y_filled_train)
model_drift_checker_ok = ModelDriftChecker(
df_left_data, df_right_data, ml_classifier_model,
target_column_name=TARGET, minimal=True, verbose=False
)
assert not model_drift_checker_ok.check_model()
def test_model_drift_ko():
"""Tests if correctly check drifted model"""
ml_classifier_model_drifted = CatBoostClassifier(
num_trees=10,
max_depth=6,
cat_features=cat_features,
random_state=RANDOM_STATE,
verbose=False
)
ml_classifier_model_drifted.fit(X_filled_train, y_filled_train)
model_drift_checker_ko = ModelDriftChecker(
df_left_data, df_right_data, ml_classifier_model_drifted,
target_column_name=TARGET, minimal=True, verbose=False
)
assert model_drift_checker_ko.check_model()
def test_estimator_drift_ok():
"""Tests if correctly check non-drifted data
in a pipeline
"""
ml_classifier_model = CatBoostClassifier(
num_trees=5,
max_depth=3,
cat_features=cat_features,
random_state=RANDOM_STATE,
verbose=False
)
pipeline_catboost_drift_checker = make_pipeline(
DriftCheckerEstimator(ml_classifier_model=ml_classifier_model,
column_names=X.columns.tolist(),
minimal=True)
)
pipeline_catboost_drift_checker.fit(X_filled_train, y_filled_train)
pipeline_catboost_drift_checker.predict_proba(X_filled_test)
| StarcoderdataPython |
341910 | data1 = "10" # String
data2 = 5 # Int
data3 = 5.23 # Float
data4 = False # Bool
print(data1)
print(data2)
print(data3)
print(data4) | StarcoderdataPython |
4993389 | <reponame>Naoyg/HomeCredit
"""
変数選択
"""
from pathlib import Path
import hydra
import pandas as pd
from sklearn.feature_selection import SequentialFeatureSelector
from sklearn.linear_model import LogisticRegression
REPO = Path(__file__).resolve().parents[2]
col_idx = "SK_ID_CURR"
col_target = "TARGET"
def load_dataset(cols):
train_df = pd.read_pickle(REPO / "data" / "processed" / "train_df.pkl")
test_df = pd.read_pickle(REPO / "data" / "processed" / "test_df.pkl")
train_x = train_df[cols]
test_x = test_df[cols]
train_y = train_df[col_target]
return train_x, test_x, train_y
@hydra.main(config_path="conf", config_name="config")
def main(cfg):
seed = cfg.job.seeds[0]
feature_list = pd.read_csv(REPO / "data" / "processed" / "feature_list.csv")
cols = feature_list.loc[feature_list["採用"] == 1, "変数"].values
train_x, _, train_y = load_dataset(cols)
clf = LogisticRegression(
random_state=seed,
**cfg.model.params)
sfs = SequentialFeatureSelector(clf,
**cfg.selection)
sfs.fit(train_x, train_y)
selected_cols = train_x.columns.values[sfs.get_support()]
# 保存
feature_list["変数選択"] = 0
feature_list.loc[feature_list["変数"].isin(selected_cols), "変数選択"] = 1
feature_list.to_csv(REPO / "data" / "processed" / "feature_list.csv")
if __name__ == "__main__":
main()
| StarcoderdataPython |
390842 | <filename>test/integration/smoke/test_router_dhcphosts.py
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# Import Local Modules
from nose.plugins.attrib import attr
from marvin.cloudstackTestCase import cloudstackTestCase
from marvin.cloudstackAPI import (stopVirtualMachine,
stopRouter,
startRouter)
from marvin.lib.utils import (cleanup_resources,
get_process_status)
from marvin.lib.base import (ServiceOffering,
VirtualMachine,
Account,
ServiceOffering,
NATRule,
NetworkACL,
FireWallRule,
PublicIPAddress,
NetworkOffering,
Network,
Router)
from marvin.lib.common import (get_zone,
get_template,
get_domain,
list_virtual_machines,
list_networks,
list_configurations,
list_routers,
list_nat_rules,
list_publicIP,
list_firewall_rules,
list_hosts)
# Import System modules
import time
import logging
class TestRouterDHCPHosts(cloudstackTestCase):
@classmethod
def setUpClass(cls):
cls.logger = logging.getLogger('TestRouterDHCPHosts')
cls.stream_handler = logging.StreamHandler()
cls.logger.setLevel(logging.DEBUG)
cls.logger.addHandler(cls.stream_handler)
cls.testClient = super(TestRouterDHCPHosts, cls).getClsTestClient()
cls.api_client = cls.testClient.getApiClient()
cls.services = cls.testClient.getParsedTestDataConfig()
# Get Zone, Domain and templates
cls.domain = get_domain(cls.api_client)
cls.zone = get_zone(cls.api_client, cls.testClient.getZoneForTests())
cls.services['mode'] = cls.zone.networktype
cls.template = get_template(
cls.api_client,
cls.zone.id,
cls.services["ostype"]
)
cls.services["virtual_machine"]["zoneid"] = cls.zone.id
cls.logger.debug("Creating Admin Account for domain %s on zone %s" % (cls.domain.id, cls.zone.id))
# Create an account, network, VM and IP addresses
cls.account = Account.create(
cls.api_client,
cls.services["account"],
admin=True,
domainid=cls.domain.id
)
cls.logger.debug("Creating Service Offering on zone %s" % (cls.zone.id))
cls.service_offering = ServiceOffering.create(
cls.api_client,
cls.services["service_offering"]
)
cls.services["isolated_network_offering"]["egress_policy"] = "true"
cls.logger.debug("Creating Network Offering on zone %s" % (cls.zone.id))
cls.network_offering = NetworkOffering.create(cls.api_client,
cls.services["isolated_network_offering"],
conservemode=True)
cls.network_offering.update(cls.api_client, state='Enabled')
cls.logger.debug("Creating Network for Account %s using offering %s" % (cls.account.name, cls.network_offering.id))
cls.network = Network.create(cls.api_client,
cls.services["network"],
accountid=cls.account.name,
domainid=cls.account.domainid,
networkofferingid=cls.network_offering.id,
zoneid=cls.zone.id)
cls.logger.debug("Creating VM1 for Account %s using offering %s with IP 10.1.1.50" % (cls.account.name, cls.service_offering.id))
cls.vm_1 = VirtualMachine.create(cls.api_client,
cls.services["virtual_machine"],
templateid=cls.template.id,
accountid=cls.account.name,
domainid=cls.domain.id,
serviceofferingid=cls.service_offering.id,
networkids=[str(cls.network.id)],
ipaddress="10.1.1.50")
cls.logger.debug("Creating VM2 for Account %s using offering %s with IP 10.1.1.51" % (cls.account.name, cls.service_offering.id))
cls.vm_2 = VirtualMachine.create(cls.api_client,
cls.services["virtual_machine"],
templateid=cls.template.id,
accountid=cls.account.name,
domainid=cls.domain.id,
serviceofferingid=cls.service_offering.id,
networkids=[str(cls.network.id)],
ipaddress="10.1.1.51")
cls.services["natrule1"] = {
"privateport": 22,
"publicport": 222,
"protocol": "TCP"
}
cls.services["natrule2"] = {
"privateport": 22,
"publicport": 223,
"protocol": "TCP"
}
cls.services["configurableData"] = {
"host": {
"password": "password",
"username": "root",
"port": 22
},
"input": "INPUT",
"forward": "FORWARD"
}
cls._cleanup = [
cls.vm_2,
cls.network,
cls.network_offering,
cls.service_offering,
cls.account
]
return
@classmethod
def tearDownClass(cls):
try:
cleanup_resources(cls.api_client, cls._cleanup)
except Exception as e:
raise Exception("Warning: Exception during cleanup : %s" % e)
return
def setUp(self):
self.apiclient = self.testClient.getApiClient()
self.cleanup = []
return
def tearDown(self):
try:
cleanup_resources(self.apiclient, self.cleanup)
except Exception as e:
raise Exception("Warning: Exception during cleanup : %s" % e)
return
def test_ssh_command(self, vm, nat_rule, rule_label):
result = 'failed'
try:
ssh_command = "ping -c 3 8.8.8.8"
self.logger.debug("SSH into VM with IP: %s" % nat_rule.ipaddress)
ssh = vm.get_ssh_client(ipaddress=nat_rule.ipaddress, port=self.services[rule_label]["publicport"], retries=5)
result = str(ssh.execute(ssh_command))
self.logger.debug("SSH result: %s; COUNT is ==> %s" % (result, result.count("3 packets received")))
except:
self.fail("Failed to SSH into VM - %s" % (nat_rule.ipaddress))
self.assertEqual(
result.count("3 packets received"),
1,
"Ping to outside world from VM should be successful"
)
def test_dhcphosts(self, vm, router):
hosts = list_hosts(
self.apiclient,
id=router.hostid)
self.assertEqual(
isinstance(hosts, list),
True,
"Check for list hosts response return valid data")
host = hosts[0]
host.user = self.services["configurableData"]["host"]["username"]
host.passwd = self.services["configurableData"]["host"]["password"]
host.port = self.services["configurableData"]["host"]["port"]
#mac1,10.7.32.101,infinite
try:
result = get_process_status(
host.ipaddress,
host.port,
host.user,
host.passwd,
router.linklocalip,
"cat /etc/dhcphosts.txt | grep %s | sed 's/\,/ /g' | awk '{print $2}'" % (vm.nic[0].ipaddress))
except KeyError:
self.skipTest(
"Provide a marvin config file with host\
credentials to run %s" %
self._testMethodName)
self.logger.debug("cat /etc/dhcphosts.txt | grep %s | sed 's/\,/ /g' | awk '{print $2}' RESULT IS ==> %s" % (vm.nic[0].ipaddress, result))
res = str(result)
self.assertEqual(
res.count(vm.nic[0].ipaddress),
1,
"DHCP hosts file contains duplicate IPs ==> %s!" % res)
@attr(tags=["advanced", "advancedns", "ssh"], required_hardware="true")
def test_router_dhcphosts(self):
"""Check that the /etc/dhcphosts.txt doesn't contain duplicate IPs"""
self.logger.debug("Starting test_router_dhcphosts...")
routers = list_routers(
self.apiclient,
account=self.account.name,
domainid=self.account.domainid
)
self.assertEqual(
isinstance(routers, list),
True,
"Check for list routers response return valid data"
)
self.assertNotEqual(
len(routers),
0,
"Check list router response"
)
router = routers[0]
self.assertEqual(
router.state,
'Running',
"Check list router response for router state"
)
public_ips = list_publicIP(
self.apiclient,
account=self.account.name,
domainid=self.account.domainid,
zoneid=self.zone.id
)
self.assertEqual(
isinstance(public_ips, list),
True,
"Check for list public IPs response return valid data"
)
public_ip = public_ips[0]
self.logger.debug("Creating Firewall rule for VM ID: %s" % self.vm_1.id)
FireWallRule.create(
self.apiclient,
ipaddressid=public_ip.id,
protocol=self.services["natrule1"]["protocol"],
cidrlist=['0.0.0.0/0'],
startport=self.services["natrule1"]["publicport"],
endport=self.services["natrule1"]["publicport"]
)
self.logger.debug("Creating NAT rule for VM ID: %s" % self.vm_1.id)
# Create NAT rule
nat_rule1 = NATRule.create(
self.apiclient,
self.vm_1,
self.services["natrule1"],
public_ip.id
)
self.logger.debug("Creating Firewall rule for VM ID: %s" % self.vm_2.id)
FireWallRule.create(
self.apiclient,
ipaddressid=public_ip.id,
protocol=self.services["natrule2"]["protocol"],
cidrlist=['0.0.0.0/0'],
startport=self.services["natrule2"]["publicport"],
endport=self.services["natrule2"]["publicport"]
)
self.logger.debug("Creating NAT rule for VM ID: %s" % self.vm_2.id)
# Create NAT rule
nat_rule2 = NATRule.create(
self.apiclient,
self.vm_2,
self.services["natrule2"],
public_ip.id
)
nat_rules = list_nat_rules(
self.apiclient,
id=nat_rule1.id
)
self.assertEqual(
isinstance(nat_rules, list),
True,
"Check for list NAT rules response return valid data"
)
self.assertEqual(
nat_rules[0].state,
'Active',
"Check list port forwarding rules"
)
nat_rules = list_nat_rules(
self.apiclient,
id=nat_rule2.id
)
self.assertEqual(
isinstance(nat_rules, list),
True,
"Check for list NAT rules response return valid data"
)
self.assertEqual(
nat_rules[0].state,
'Active',
"Check list port forwarding rules"
)
self.logger.debug("Testing SSH to VMs %s and %s" % (self.vm_1.id, self.vm_2.id))
self.test_ssh_command(self.vm_1, nat_rule1, "natrule1")
self.test_ssh_command(self.vm_2, nat_rule2, "natrule2")
self.logger.debug("Testing DHCP hosts for VMs %s and %s" % (self.vm_1.id, self.vm_2.id))
self.test_dhcphosts(self.vm_1, router)
self.test_dhcphosts(self.vm_2, router)
self.logger.debug("Deleting and Expunging VM %s with ip %s" % (self.vm_1.id, self.vm_1.nic[0].ipaddress))
self.vm_1.delete(self.apiclient)
self.logger.debug("Creating new VM using the same IP as the one which was deleted => IP 10.1.1.50")
self.vm_1 = VirtualMachine.create(self.apiclient,
self.services["virtual_machine"],
templateid=self.template.id,
accountid=self.account.name,
domainid=self.domain.id,
serviceofferingid=self.service_offering.id,
networkids=[str(self.network.id)],
ipaddress="10.1.1.50")
self.cleanup.append(self.vm_1)
self.logger.debug("Testing DHCP hosts for VMs %s and %s" % (self.vm_1.id, self.vm_2.id))
self.test_dhcphosts(self.vm_1, router)
self.test_dhcphosts(self.vm_2, router)
return
| StarcoderdataPython |
201414 | while True:
n = int(input())
if n == 0: break
l = 1
o = '+'
while l <= n:
v = l
c = 1
while c <= n:
if c != n: print('{:>3}'.format(v), end=' ')
else:
print('{:>3}'.format(v))
o = '-'
if v == 1: o = '+'
elif v == n: o = '-'
if o == '+': v += 1
else: v -= 1
c += 1
l += 1
print()
| StarcoderdataPython |
5077600 | import logging
import math
from pandas import DataFrame
from ..features.types import get_type_map_raw
from ..features.feature_metadata import R_INT, R_FLOAT, R_CATEGORY
logger = logging.getLogger(__name__)
# TODO: Documentation
def get_approximate_df_mem_usage(df: DataFrame, sample_ratio=0.2):
if sample_ratio >= 1:
return df.memory_usage(deep=True)
else:
num_rows = len(df)
num_rows_sample = math.ceil(sample_ratio * num_rows)
sample_ratio = num_rows_sample / num_rows
dtypes_raw = get_type_map_raw(df)
columns_category = [column for column in df if dtypes_raw[column] == R_CATEGORY]
columns_inexact = [column for column in df if dtypes_raw[column] not in [R_INT, R_FLOAT, R_CATEGORY]]
memory_usage = df.memory_usage()
if columns_category:
for column in columns_category:
num_categories = len(df[column].cat.categories)
num_categories_sample = math.ceil(sample_ratio * num_categories)
sample_ratio_cat = num_categories_sample / num_categories
memory_usage[column] = df[column].cat.codes.dtype.itemsize * num_rows + df[column].cat.categories[:num_categories_sample].memory_usage(deep=True) / sample_ratio_cat
if columns_inexact:
memory_usage_inexact = df[columns_inexact].head(num_rows_sample).memory_usage(deep=True)[columns_inexact] / sample_ratio
memory_usage = memory_usage_inexact.combine_first(memory_usage)
return memory_usage
| StarcoderdataPython |
3264671 | <gh_stars>0
import numpy as np
import itertools as it
class SymbolicArray(np.ndarray):
def __new__(cls, arr, symbolic=None, **kwargs):
if isinstance(arr, cls):
return arr
arr = np.array(arr, copy=False, **kwargs)
obj = arr.view(cls)
obj.symbolic = str(symbolic or np.array2string(arr, separator=',', threshold=np.inf, floatmode='unique'))
return obj
def expand(self):
sym = self.symbolic
for k,v in self.locals.items():
sym = sym.replace(k,v)
return sym
def __array_finalize__(self, obj) -> None:
if obj is None: return
# This attribute should be maintained!
self.symbolic = getattr(obj, 'symbolic', None)
self.locals = getattr(obj, 'locals', {})
def __array_ufunc__(self, ufunc, method, *inputs, **kwargs):
assert method == '__call__'
return np.core.overrides.array_function_dispatch \
(lambda *args, **kwargs: args, verify=False, module='numpy') (ufunc) \
(*inputs, **kwargs)
def __array_function__(self, func, types, inputs, kwargs):
assert func.__module__
obj = SymbolicArray (
func (
*(x.view(np.ndarray) if isinstance(x, SymbolicArray) else x for x in inputs),
**kwargs
),
f"{func.__module__}.{func.__name__}({', '.join(self._symbolic_args(inputs, kwargs))})"
)
for x in filter(lambda x: isinstance(x, type(self)), inputs):
obj.locals |= x.locals
if obj.symbolic.count(x.symbolic) > 1 and len(x.symbolic) > 5:
var = f"var{abs(hash(x.symbolic))}"
# add symbol to memory
obj.locals[var] = x.symbolic
# substitute
obj.symbolic = obj.symbolic.replace(x.symbolic, var)
return obj
def _symbolic_args(self, inputs, kwargs):
return it.chain (
(x.symbolic if isinstance(x, type(self)) else repr(x) for x in inputs),
(f'{x}={repr(y)}' for x,y in kwargs.items()),
) | StarcoderdataPython |
1661213 | <reponame>IvanNardini/2-Engineering<filename>MachineLearningPlatforms/Kubeflow/applications/base/pipelines/conditionals/component.py
#!/usr/bin/env python3
# This is an example for testing conditions in Kubeflow
# Steps:
# 1 - Define functions
# 2 - Define lightweight python components
# 3 - Write the component to a file
# Goal is:
# - Testing conditions with dsl.Condition
import argparse
import kfp.components as cpt
# Functions ------------------------------------------------------------------------------------------------------------
def get_word(text: str, word: str) -> bool:
text_lower = text.lower()
word_lower = word.lower()
return True if word_lower in text_lower else False
# Component ------------------------------------------------------------------------------------------------------------
def run_component(args):
OUT_COMPONENTS_DIR = args.out_component_dir
get_word_component = cpt.create_component_from_func(get_word,
output_component_file=f'{OUT_COMPONENTS_DIR}/get_word.component')
if __name__ == '__main__':
parser = argparse.ArgumentParser(description="Create the component yaml")
parser.add_argument('--out-component-dir', default='../../out/components')
args = parser.parse_args()
run_component(args=args)
| StarcoderdataPython |
6444967 | <gh_stars>0
import os
from cli import main_script
if __name__ == "__main__":
main_script()
| StarcoderdataPython |
5115947 | <gh_stars>1-10
#!/usr/bin/env python
# -*- coding: utf-8 -*-
__docformat__ = 'restructuredtext en'
{% set cfg = salt['mc_utils.json_load'](cfg) %}
{% set data = cfg.data %}
{% set settings = cfg.data.settings %}
{% macro renderbool(opt)%}
{{opt}} = {%if data.get(opt, False)%}True{%else%}False{%endif%}
{% endmacro %}
import json
from django.utils.translation import gettext_lazy as _
from pymongo import Connection
SITE_ID={{data.SITE_ID}}
SERVER_EMAIL = '{{data.server_email}}'
DEFAULT_FROM_EMAIL = '{{data.default_from_email}}'
DATABASES = {
'default': json.loads("""
{{salt['mc_utils.json_dump'](data.db)}}
""".strip()),
}
{% set admint = None %}
ADMINS = (
{% for dadmins in data.admins %}
{% for admin, data in dadmins.items() %}
{% if data %}{% set admint = (admin, data) %}{%endif %}
('{{admin}}', '{{data.mail}}'),
{% endfor %}
{% endfor %}
)
{{renderbool('DEBUG') }}
{% for i in data.server_aliases %}
{% if i not in data.ALLOWED_HOSTS %}
{% do data.ALLOWED_HOSTS.append(i) %}
{% endif %}
{% endfor %}
CORS_ORIGIN_ALLOW_ALL = {{data.CORS_ORIGIN_ALLOW_ALL}}
ALLOWED_HOSTS = {{data.ALLOWED_HOSTS}}
MEDIA_ROOT = '{{data.media}}'
STATIC_ROOT = '{{data.static}}'
SECRET_KEY = '{{data.SECRET_KEY}}'
USE_X_FORWARDED_HOST={{data.USE_X_FORWARDED_HOST}}
# Internationalization
# https://docs.djangoproject.com/en/1.6/topics/i18n/
DATE_FORMAT = '{{data.DATE_FORMAT}}'
TIME_ZONE = '{{data.timezone}}'
LANGUAGE_CODE = '{{data.LANGUAGE_CODE}}'
LANGUAGES = (
('fr', _('Français')),
('it', _('Italia')),
('en', _('English'))
)
WEBISTE_URL = 'http://{{data.domain}}'
TOUCHFORMS_URL = 'http://localhost:9000/'
DEBUG = '{{data.DEBUG}}'.lower().strip() == 'true'
MEDIA_URL = WEBISTE_URL+'/media/'
ENKETO_URL = '{{data.enketo_url}}/'
BROKER_URL = 'amqp://{{data.rabbitmq_user}}:{{data.rabbitmq_password}}@{{data.rabbitmq_host}}:{{data.rabbitmq_port}}/{{data.rabbitmq_vhost}}'
GOOGLE_STEP2_URI = WEBISTE_URL + '/gwelcome'
GOOGLE_CLIENT_ID = '{{data.google_client_id}}'
GOOGLE_CLIENT_SECRET = '{{data.google_client_secret}}'
MONGO_DATABASE = {
'HOST': '{{data.mongodb_host}}',
'PORT': int('{{data.mongodb_port}}'),
'NAME': '{{data.mongodb_db}}',
'USER': '{{data.mongodb_user}}',
'PASSWORD': '{{data.mongodb_password}}'}
ENKETO_API_TOKEN = '{{data.enketo_token}}'
{% if data.get('ADDITIONAL_TEMPLATE_DIRS', None) %}
ADDITIONAL_TEMPLATE_DIRS = tuple({{data.ADDITIONAL_TEMPLATE_DIRS}})
{% endif %}
# vim:set et sts=4 ts=4 tw=80:
| StarcoderdataPython |
6426711 | #Importing Libraries
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
from sklearn.preprocessing import StandardScaler
import tensorflow as tf
from tensorflow import keras
from tensorflow.keras import layers
KPN = pd.read_csv('/Data/Code_Data/new_data.csv', usecols=[1,2,3,4,5
,6,7,8,9,10
,11,12,13,14,15
,16,17,18,19,20,
21,22,23]
).astype('float32')
#==============================================================================
#==============================================================================
'''Creating k-folds'''
sc = StandardScaler()
k = 5
set_size = len(KPN)//(k+1)
features = 15
#fold 1:
fold1_train = sc.fit_transform(KPN.iloc[:set_size,0:features])
fold1_train_y = KPN["output"][:set_size]
fold1_test = sc.transform(KPN.iloc[set_size:2*set_size,0:features])
fold1_test_y = KPN["output"][set_size:2*set_size]
#fold 2:
fold2_train = sc.fit_transform(KPN.iloc[:2*set_size,0:features])
fold2_train_y = KPN["output"][:2*set_size]
fold2_test = sc.transform(KPN.iloc[2*set_size:3*set_size,0:features])
fold2_test_y = KPN["output"][2*set_size:3*set_size]
#fold 3:
fold3_train = sc.fit_transform(KPN.iloc[:3*set_size,0:features])
fold3_train_y = KPN["output"][:3*set_size]
fold3_test = sc.transform(KPN.iloc[3*set_size:4*set_size,0:features])
fold3_test_y = KPN["output"][3*set_size:4*set_size]
#fold 4:
fold4_train = sc.fit_transform(KPN.iloc[:4*set_size,0:features])
fold4_train_y = KPN["output"][:4*set_size]
fold4_test = sc.transform(KPN.iloc[4*set_size:5*set_size,0:features])
fold4_test_y = KPN["output"][4*set_size:5*set_size]
#fold 5:
fold5_train = sc.fit_transform(KPN.iloc[:5*set_size,0:features])
fold5_train_y = KPN["output"][:5*set_size]
fold5_test = sc.transform(KPN.iloc[5*set_size:6*set_size,0:features])
fold5_test_y = KPN["output"][5*set_size:6*set_size]
#==============================================================================
#==============================================================================
EPOCHS = 200
reg = keras.regularizers.l2(0.01)
'''creating the model:'''
features = fold1_train.shape[1]
output_shape = 1
model = keras.Sequential()
'''layers:'''
model.add(layers.Dense(128, input_shape = (features,), activation="relu",
#kernel_initializer = 'uniform',
#kernel_regularizer = reg
))
model.add(layers.Dense(128, activation = "relu",
#kernel_initializer = 'uniform',
#kernel_regularizer = reg
))
model.add(layers.Dense(1, activation = 'sigmoid', kernel_regularizer = reg
))
optimiser = tf.keras.optimizers.Adam()
'''compiler:'''
model.compile(optimizer='sgd', loss='binary_crossentropy', metrics=['accuracy'])
#==============================================================================
#==============================================================================
#fold 1:
model.fit(fold1_train, fold1_train_y, epochs = EPOCHS, verbose = 1)
fold1_predict = model.predict(fold1_test)
#fold 2:
model.fit(fold2_train, fold2_train_y, epochs = EPOCHS, verbose = 1)
fold2_predict = model.predict(fold2_test)
#fold 3:
model.fit(fold3_train, fold3_train_y, epochs = EPOCHS, verbose = 1)
fold3_predict = model.predict(fold3_test)
#fold 4:
model.fit(fold4_train, fold4_train_y, epochs = EPOCHS, verbose = 1)
fold4_predict = model.predict(fold4_test)
#fold 5:
model.fit(fold5_train, fold5_train_y, epochs = EPOCHS, verbose = 1)
fold5_predict = model.predict(fold5_test)
#==============================================================================
#==============================================================================
def maxi(x):
'''A function used to convert the "probabilities" in classification output to a 1 or 0, 1 being an uptrend, 0 being a downtrend"'''
trend = []
for i in x:
if i > 0.5:
trend.append(1)
else:
trend.append(0)
return trend
#The sets the decision rule which is if the output is greater than 0.5 (ie greater than 0.5 probability the actual output is 1 then output 1 and 0 otherwise). This is explained in my paper.
#==============================================================================
#==============================================================================
fold1_trend = maxi(fold1_predict)
fold2_trend = maxi(fold2_predict)
fold3_trend = maxi(fold3_predict)
fold4_trend = maxi(fold4_predict)
fold5_trend = maxi(fold5_predict)
#==============================================================================
#==============================================================================
def find_hits(act,pred):
'''a function to see when prediction trend is same as actual trend'''
hits = 0
for i in range(len(act)):
if act[i] == pred[i]:
hits+=1
return hits
fold1_hits = find_hits(fold1_trend,np.array(fold1_test_y))
fold1_perc = fold1_hits / len(fold1_trend)
fold2_hits = find_hits(fold2_trend,np.array(fold2_test_y))
fold2_perc = fold2_hits / len(fold2_trend)
fold3_hits = find_hits(fold3_trend,np.array(fold3_test_y))
fold3_perc = fold3_hits / len(fold3_trend)
fold4_hits = find_hits(fold4_trend,np.array(fold4_test_y))
fold4_perc = fold4_hits / len(fold4_trend)
fold5_hits = find_hits(fold5_trend,np.array(fold5_test_y))
fold5_perc = fold5_hits / len(fold5_trend)
print("Fold1 hits percentage", fold1_perc,
"Fold2 hits percentage", fold2_perc,
"Fold3 hits percentage", fold3_perc,
"Fold4 hits percentage", fold4_perc,
"Fold5 hits percentage", fold5_perc,)
#==============================================================================
#==============================================================================
def mse(act,pred):
errr = 0
for i in range(len(act)):
errr += (act[i]-pred[i])**2
errr = errr/len(act)
return errr
def mae(act,pred):
errr = 0
for i in range(len(act)):
error = act[i] - pred[i]
if error > 0:
errr+=error
else:
errr+= (-error)
errr = errr/len(act)
return errr
mse_5 = mse(np.array(fold5_test_y),fold5_predict)
mse_4 = mse(np.array(fold4_test_y),fold4_predict)
mse_3 = mse(np.array(fold3_test_y),fold3_predict)
mse_2 = mse(np.array(fold2_test_y),fold2_predict)
mse_1 = mse(np.array(fold1_test_y),fold1_predict)
mae_5 = mae(np.array(fold5_test_y),fold5_predict)
mae_4 = mae(np.array(fold4_test_y),fold4_predict)
mae_3 = mae(np.array(fold3_test_y),fold3_predict)
mae_2 = mae(np.array(fold2_test_y),fold2_predict)
mae_1 = mae(np.array(fold1_test_y),fold1_predict)
print(mse_1,mse_2,mse_3,mse_4,mse_5)
print(mae_1,mae_2,mae_3,mae_4,mae_5) | StarcoderdataPython |
11353248 | <filename>depricated/km.py
from __future__ import division
import numpy as np
from collections import defaultdict
import json
import itertools
from sklearn import cluster, preprocessing, manifold
from datetime import datetime
import sys
class KeplerMapper(object):
def __init__(
self,
cluster_algorithm=cluster.DBSCAN(eps=0.5, min_samples=3),
nr_cubes=10,
overlap_perc=0.1,
scaler=preprocessing.MinMaxScaler(),
reducer=None,
color_function="distance_origin",
link_local=False,
verbose=1,
):
self.clf = cluster_algorithm
self.nr_cubes = nr_cubes
self.overlap_perc = overlap_perc
self.scaler = scaler
self.color_function = color_function
self.verbose = verbose
self.link_local = link_local
self.reducer = reducer
self.chunk_dist = []
self.overlap_dist = []
self.d = []
if self.verbose > 0:
print(
"\nnr_cubes = %s \n\noverlap_perc = %s\n\nlink_local = %s\n\nClusterer = %s\n\nScaler = %s\n\n"
% (
self.nr_cubes,
overlap_perc,
self.link_local,
str(self.clf),
str(self.scaler),
)
)
def fit_transform(self, X):
# Dimensionality Reduction
if self.reducer != None:
if self.verbose > 0:
try:
self.reducer.set_params(**{"verbose": self.verbose})
except:
pass
print("\n..Reducing Dimensionality using: \n\t%s\n" % str(self.reducer))
reducer = self.reducer
X = reducer.fit_transform(X)
# Scaling
if self.scaler != None:
if self.verbose > 0:
print("\n..Scaling\n")
scaler = self.scaler
X = scaler.fit_transform(X)
# We chop up the min-max column ranges into 'nr_cubes' parts
self.chunk_dist = (np.max(X, axis=0) - np.min(X, axis=0)) / self.nr_cubes
# We calculate the overlapping windows distance
self.overlap_dist = self.overlap_perc * self.chunk_dist
# We find our starting point
self.d = np.min(X, axis=0)
return X
def map(self, X, dimension_index=[0], dimension_name=""):
# This maps the data to a simplicial complex. Returns a dictionary with nodes and links.
start = datetime.now()
def cube_coordinates_all(nr_cubes, nr_dimensions):
# if there are 4 cubes per dimension and 3 dimensions
# return the bottom left (origin) coordinates of 64 hypercubes, in a sorted list of Numpy arrays
l = []
for x in range(nr_cubes):
l += [x] * nr_dimensions
return [
np.array(list(f))
for f in sorted(set(itertools.permutations(l, nr_dimensions)))
]
nodes = defaultdict(list)
links = defaultdict(list)
complex = {}
if self.verbose > 0:
print(
"Mapping on data shaped %s using dimensions %s\n"
% (str(X.shape), str(dimension_index))
)
# Scaling
if self.scaler != None:
scaler = self.scaler
X = scaler.fit_transform(X)
# Initialize Cluster Algorithm
clf = self.clf
# Prefix'ing the data with ID's
ids = np.array([x for x in range(X.shape[0])])
X = np.c_[ids, X]
# Subdivide the data X in intervals/hypercubes with overlap
if self.verbose > 0:
total_cubes = len(cube_coordinates_all(self.nr_cubes, len(dimension_index)))
print("Creating %s hypercubes." % total_cubes)
di = np.array(dimension_index)
for i, coor in enumerate(cube_coordinates_all(self.nr_cubes, di.shape[0])):
# Slice the hypercube
hypercube = X[
np.invert(
np.any(
(X[:, di + 1] >= self.d[di] + (coor * self.chunk_dist[di]))
& (
X[:, di + 1]
< self.d[di]
+ (coor * self.chunk_dist[di])
+ self.chunk_dist[di]
+ self.overlap_dist[di]
)
== False,
axis=1,
)
)
]
if self.verbose > 1:
print(
"There are %s points in cube_%s / %s with starting range %s"
% (
hypercube.shape[0],
i,
total_cubes,
self.d[di] + (coor * self.chunk_dist[di]),
)
)
# If at least one sample inside the hypercube
if hypercube.shape[0] > 0:
# Cluster the data point(s) inside the cube, skipping the id-column
clf.fit(hypercube[:, 1:])
if self.verbose > 1:
print(
"Found %s clusters in cube_%s\n"
% (np.unique(clf.labels_[clf.labels_ > -1]).shape[0], i)
)
# Now for every (sample id in cube, predicted cluster label)
for a in np.c_[hypercube[:, 0], clf.labels_]:
if a[1] != -1: # if not predicted as noise
cluster_id = (
str(coor[0])
+ "_"
+ str(i)
+ "_"
+ str(a[1])
+ "_"
+ str(coor)
+ "_"
+ str(self.d[di] + (coor * self.chunk_dist[di]))
) # Rudimentary cluster id
nodes[cluster_id].append(
int(a[0])
) # Append the member id's as integers
else:
if self.verbose > 1:
print("Cube_%s is empty.\n" % (i))
# Create links when clusters from different hypercubes have members with the same sample id.
for k in nodes:
for kn in nodes:
if k != kn:
if len(nodes[k] + nodes[kn]) != len(
set(nodes[kn] + nodes[k])
): # there are non-unique id's in the union
links[k].append(kn)
# Create links between local hypercube clusters if setting link_local = True
# This is an experimental feature deviating too much from the original mapper algo.
# Creates a lot of spurious edges, and should only be used when mapping one or at most two dimensions.
if self.link_local:
if k.split("_")[0] == kn.split("_")[0]:
links[k].append(kn)
# Reporting
if self.verbose > 0:
nr_links = 0
for k in links:
nr_links += len(links[k])
print(
"\ncreated %s edges and %s nodes in %s."
% (nr_links, len(nodes), str(datetime.now() - start))
)
complex["nodes"] = nodes
complex["links"] = links
complex["meta"] = dimension_name
return complex
def visualize(
self,
complex,
path_html="mapper_visualization_output.html",
title="My Data",
graph_link_distance=30,
graph_gravity=0.1,
graph_charge=-120,
custom_tooltips=None,
width_html=0,
height_html=0,
show_tooltips=True,
show_title=True,
show_meta=True,
):
# Turns the dictionary 'complex' in a html file with d3.js
# Format JSON
json_s = {}
json_s["nodes"] = []
json_s["links"] = []
k2e = {} # a key to incremental int dict, used for id's when linking
for e, k in enumerate(complex["nodes"]):
# Tooltip formatting
if custom_tooltips != None:
tooltip_s = "<h2>Cluster %s</h2>" % k + " ".join(
[str(f) for f in custom_tooltips[complex["nodes"][k]]]
)
if self.color_function == "average_signal_cluster":
tooltip_i = int(
(
(
sum([f for f in custom_tooltips[complex["nodes"][k]]])
/ len(custom_tooltips[complex["nodes"][k]])
)
* 30
)
)
json_s["nodes"].append(
{
"name": str(k),
"tooltip": tooltip_s,
"group": 2 * int(np.log(len(complex["nodes"][k]))),
"color": str(tooltip_i),
}
)
else:
json_s["nodes"].append(
{
"name": str(k),
"tooltip": tooltip_s,
"group": 2 * int(np.log(len(complex["nodes"][k]))),
"color": str(k.split("_")[0]),
}
)
else:
tooltip_s = "<h2>Cluster %s</h2>Contains %s members." % (
k,
len(complex["nodes"][k]),
)
json_s["nodes"].append(
{
"name": str(k),
"tooltip": tooltip_s,
"group": 2 * int(np.log(len(complex["nodes"][k]))),
"color": str(k.split("_")[0]),
}
)
k2e[k] = e
for k in complex["links"]:
for link in complex["links"][k]:
json_s["links"].append(
{"source": k2e[k], "target": k2e[link], "value": 1}
)
# Width and height of graph in HTML output
if width_html == 0:
width_css = "100%"
width_js = 'document.getElementById("holder").offsetWidth-20'
else:
width_css = "%spx" % width_html
width_js = "%s" % width_html
if height_html == 0:
height_css = "100%"
height_js = 'document.getElementById("holder").offsetHeight-20'
else:
height_css = "%spx" % height_html
height_js = "%s" % height_html
# Whether to show certain UI elements or not
if show_tooltips == False:
tooltips_display = "display: none;"
else:
tooltips_display = ""
if show_meta == False:
meta_display = "display: none;"
else:
meta_display = ""
if show_title == False:
title_display = "display: none;"
else:
title_display = ""
with open(path_html, "wb") as outfile:
html = """<!DOCTYPE html>
<meta charset="utf-8">
<meta name="generator" content="KeplerMapper">
<title>%s | KeplerMapper</title>
<link href='https://fonts.googleapis.com/css?family=Roboto:700,300' rel='stylesheet' type='text/css'>
<style>
* {margin: 0; padding: 0;}
html { height: 100%%;}
body {background: #111; height: 100%%; font: 100 16px Roboto, Sans-serif;}
.link { stroke: #999; stroke-opacity: .333; }
.divs div { border-radius: 50%%; background: red; position: absolute; }
.divs { position: absolute; top: 0; left: 0; }
#holder { position: relative; width: %s; height: %s; background: #111; display: block;}
h1 { %s padding: 20px; color: #fafafa; text-shadow: 0px 1px #000,0px -1px #000; position: absolute; font: 300 30px Roboto, Sans-serif;}
h2 { text-shadow: 0px 1px #000,0px -1px #000; font: 700 16px Roboto, Sans-serif;}
.meta { position: absolute; opacity: 0.9; width: 220px; top: 80px; left: 20px; display: block; %s background: #000; line-height: 25px; color: #fafafa; border: 20px solid #000; font: 100 16px Roboto, Sans-serif;}
div.tooltip { position: absolute; width: 380px; display: block; %s padding: 20px; background: #000; border: 0px; border-radius: 3px; pointer-events: none; z-index: 999; color: #FAFAFA;}
}
</style>
<body>
<div id="holder">
<h1>%s</h1>
<p class="meta">
<b>Lens</b><br>%s<br><br>
<b>Cubes per dimension</b><br>%s<br><br>
<b>Overlap percentage</b><br>%s%%<br><br>
<!-- <b>Linking locally</b><br>%s<br><br> -->
<b>Color Function</b><br>%s( %s )<br><br>
<b>Clusterer</b><br>%s<br><br>
<b>Scaler</b><br>%s
</p>
</div>
<script src="https://cdnjs.cloudflare.com/ajax/libs/d3/3.5.5/d3.min.js"></script>
<script>
var width = %s,
height = %s;
var color = d3.scale.ordinal()
.domain(["0","1", "2", "3", "4", "5", "6", "7", "8", "9", "10", "11", "12", "13","14","15","16","17","18","19","20","21","22","23","24","25","26","27","28","29","30"])
.range(["#FF0000","#FF1400","#FF2800","#FF3c00","#FF5000","#FF6400","#FF7800","#FF8c00","#FFa000","#FFb400","#FFc800","#FFdc00","#FFf000","#fdff00","#b0ff00","#65ff00","#17ff00","#00ff36","#00ff83","#00ffd0","#00e4ff","#00c4ff","#00a4ff","#00a4ff","#0084ff","#0064ff","#0044ff","#0022ff","#0002ff","#0100ff","#0300ff","#0500ff"]);
var force = d3.layout.force()
.charge(%s)
.linkDistance(%s)
.gravity(%s)
.size([width, height]);
var svg = d3.select("#holder").append("svg")
.attr("width", width)
.attr("height", height);
var div = d3.select("#holder").append("div")
.attr("class", "tooltip")
.style("opacity", 0.0);
var divs = d3.select('#holder').append('div')
.attr('class', 'divs')
.attr('style', function(d) { return 'overflow: hidden; width: ' + width + 'px; height: ' + height + 'px;'; });
graph = %s;
force
.nodes(graph.nodes)
.links(graph.links)
.start();
var link = svg.selectAll(".link")
.data(graph.links)
.enter().append("line")
.attr("class", "link")
.style("stroke-width", function(d) { return Math.sqrt(d.value); });
var node = divs.selectAll('div')
.data(graph.nodes)
.enter().append('div')
.on("mouseover", function(d) {
div.transition()
.duration(200)
.style("opacity", .9);
div .html(d.tooltip + "<br/>")
.style("left", (d3.event.pageX + 100) + "px")
.style("top", (d3.event.pageY - 28) + "px");
})
.on("mouseout", function(d) {
div.transition()
.duration(500)
.style("opacity", 0);
})
.call(force.drag);
node.append("title")
.text(function(d) { return d.name; });
force.on("tick", function() {
link.attr("x1", function(d) { return d.source.x; })
.attr("y1", function(d) { return d.source.y; })
.attr("x2", function(d) { return d.target.x; })
.attr("y2", function(d) { return d.target.y; });
node.attr("cx", function(d) { return d.x; })
.attr("cy", function(d) { return d.y; })
.attr('style', function(d) { return 'width: ' + (d.group * 2) + 'px; height: ' + (d.group * 2) + 'px; ' + 'left: '+(d.x-(d.group))+'px; ' + 'top: '+(d.y-(d.group))+'px; background: '+color(d.color)+'; box-shadow: 0px 0px 3px #111; box-shadow: 0px 0px 33px '+color(d.color)+', inset 0px 0px 5px rgba(0, 0, 0, 0.2);'})
;
});
</script>""" % (
title,
width_css,
height_css,
title_display,
meta_display,
tooltips_display,
title,
complex["meta"],
self.nr_cubes,
self.overlap_perc * 100,
self.link_local,
self.color_function,
complex["meta"],
str(self.clf),
str(self.scaler),
width_js,
height_js,
graph_charge,
graph_link_distance,
graph_gravity,
json.dumps(json_s),
)
outfile.write(html.encode("utf-8"))
if self.verbose > 0:
print("\nWrote d3.js graph to '%s'" % path_html)
| StarcoderdataPython |
11249132 | from .activity import Activity | StarcoderdataPython |
9611930 | import time
import json
import requests
from binance.client import Client
with open('keys.json', 'r') as file:
keys = json.loads(file.read())['binance']
client = Client(keys['key'], keys['secret'])
rub = lambda: float(requests.get('https://blockchain.info/tobtc?currency=RUB&value=1000').text) / 1000
btc_to_rub = lambda btc: int(btc/rub())
# Market depth / Order book (Стакан)
depth = client.get_order_book(symbol='BNBBTC')
print(depth)
# Trade websocket (Realtime курс)
def process_message(msg):
if msg['e'] != 'aggTrade':
print(msg)
return
price = float(msg['p'])
quantity = float(msg['q'])
print('{} {} V{} {}₽'.format(time.ctime(msg['T']/1000), price, quantity, btc_to_rub(price*quantity)))
from binance.websockets import BinanceSocketManager
bm = BinanceSocketManager(client)
bm.start_aggtrade_socket('BNBBTC', process_message)
bm.start()
# Historical kline data / Candlestick (История)
klines = client.get_historical_klines("ETHBTC", Client.KLINE_INTERVAL_1MINUTE, 1590345715000)
# Client.KLINE_INTERVAL_30MINUTE, "1 Dec, 2017", "1 Jan, 2018")
# Client.KLINE_INTERVAL_1MINUTE, "1 day ago UTC"
# Client.KLINE_INTERVAL_1WEEK, "1 Jan, 2017"
for line in klines:
price_open = float(line[1])
price_high = float(line[2])
price_low = float(line[3])
price_close = float(line[4])
volume = float(line[5])
print('{} OPEN {} HIGH {} LOW {} CLOSE {} V{}'.format(time.ctime(line[0]/1000), price_open, price_high, price_low, price_close, volume))
# # place a test market buy order, to place an actual order use the create_order function
# order = client.create_test_order(
# symbol='BNBBTC',
# side=Client.SIDE_BUY,
# type=Client.ORDER_TYPE_MARKET,
# quantity=100)
# # get all symbol prices
# prices = client.get_all_tickers() | StarcoderdataPython |
1899462 | """
Core admin setup
"""
from django.contrib import admin
from django.contrib.auth.admin import UserAdmin
from django.utils.translation import gettext_lazy as _
from core.forms import CustomUserCreationForm
from core.models import User, Post
@admin.register(User)
class UserCoreAdmin(UserAdmin):
fieldsets = (
(_('Credentials'), {'fields': ('username', 'password')}),
(_('Personal info'), {
'fields': ('first_name', 'last_name', 'other_name', 'email', 'phone_number', 'gender')}),
(_('Others'), {'fields': ('bio', 'profile_image_url', 'account_type', 'status')}),
(_('Permissions'), {
'fields': ('is_active', 'is_staff', 'is_superuser', 'groups', 'user_permissions'),
}),
(_('Important dates'), {'fields': ('last_login', 'date_joined')}),
)
add_fieldsets = (
(None, {
'classes': ('wide',),
'fields': ('username', 'account_type', 'status', 'password1', '<PASSWORD>'),
}),
)
add_form = CustomUserCreationForm
list_filter = ('date_created', 'gender', 'account_type', 'status')
date_hierarchy = 'date_created'
list_display = (
'username', 'full_name', 'gender', 'email', 'phone_number', 'account_type', 'status', 'date_modified',
'date_created')
search_fields = (
'username', 'full_name', 'gender', 'email', 'phone_number', 'account_type__name', 'status__name')
@admin.register(Post)
class PostAdmin(admin.ModelAdmin):
"""
Admin model for Post entity.
"""
list_filter = ('date_created', 'category', 'featured')
date_hierarchy = 'date_created'
list_display = (
'title', 'excerpt', 'user', 'featured', 'category', 'priority', 'comments_count', 'reactions_count', 'status',
'date_modified', 'date_created')
search_fields = (
'title', 'content', 'user__first_name', 'user__last_name', 'user__other_name', 'category__name',
'priority', 'comments_count', 'reactions_count', 'status__name')
| StarcoderdataPython |
1686930 | #! /usr/bin/env python
from enum import IntEnum
from aoc.intcode import IntCodeCPU, InterruptCode
from aoc.utils import load_input
class Color(IntEnum):
BLACK = 0
WHITE = 1
class Direction(IntEnum):
UP = 0
DOWN = 1
LEFT = 2
RIGHT = 3
class TurnDirection(IntEnum):
LEFT = 0
RIGHT = 1
class Panel:
def __init__(self, x, y, color=Color.BLACK, painted=False):
self.x = x
self.y = y
self.color = color
self.painted = painted
def paint(self, color: Color):
self.color = color
self.painted = True
class Robot:
def __init__(self, x, y, direction=Direction.UP):
self.x = x
self.y = y
self.direction = direction
def turn(self, td: TurnDirection):
if td == TurnDirection.LEFT:
self.direction = {
Direction.UP: Direction.LEFT,
Direction.DOWN: Direction.RIGHT,
Direction.LEFT: Direction.DOWN,
Direction.RIGHT: Direction.UP,
}[self.direction]
elif td == TurnDirection.RIGHT:
self.direction = {
Direction.UP: Direction.RIGHT,
Direction.DOWN: Direction.LEFT,
Direction.LEFT: Direction.UP,
Direction.RIGHT: Direction.DOWN,
}[self.direction]
else:
raise ValueError("Invalid Direction")
def move_forward(self):
move = {Direction.UP: (0, 1), Direction.DOWN: (0, -1), Direction.LEFT: (-1, 0), Direction.RIGHT: (1, 0),}[
self.direction
]
self.x += move[0]
self.y += move[1]
@property
def position(self):
return self.x, self.y
def run_robot(program, initial_coords, initial_color):
panels = {initial_coords: Panel(*initial_coords, initial_color)}
robot = Robot(*initial_coords)
cpu = IntCodeCPU(program)
while True:
p = panels.get(robot.position)
if not p:
p = Panel(*robot.position)
panels[robot.position] = p
res = cpu.run((p.color.value,))
if res == InterruptCode.WAITING_ON_INPUT:
c, td = cpu.pop_output()
p.paint(Color(c))
robot.turn(TurnDirection(td))
robot.move_forward()
else:
break
return panels
def render_panels(panels):
panels = panels.values()
min_x = min(p.x for p in panels)
min_y = min(p.y for p in panels)
for p in panels:
p.x -= min_x
p.y -= min_y
w = max(p.x for p in panels) + 1
h = max(p.y for p in panels) + 1
render = []
for i in range(h):
render.append([" "] * w)
for p in panels:
if p.color == Color.WHITE:
render[p.y][p.x] = "#"
for row in reversed(render):
print("".join(row))
def main():
program = load_input("d11.txt")
panels = run_robot(program, (0, 0), Color.BLACK)
print(f"Part 1: {len([p for p in panels.values() if p.painted])}")
panels = run_robot(program, (0, 0), Color.WHITE)
print("Part 2:")
render_panels(panels)
if __name__ == "__main__":
from time import time as ts
_t = ts()
main()
_t = ts() - _t
print(f"Runtime: {_t:.3f}s")
| StarcoderdataPython |
1667522 | <reponame>lorainemg/autogoal
from typing import List
from autogoal.experimental.metalearning.datasets import Dataset
from autogoal.experimental.metalearning.metalearner import MetaLearner
from autogoal.experimental.metalearning.distance_measures import cosine_measure, l2_distance, l1_distance
class NNMetaLearner(MetaLearner):
def __init__(self, features_extractor=None, load=True, number_of_results: int = 15, strategy='aggregated',
distance_metric=l2_distance, *, learner_name='nn_metalearner'):
super().__init__(features_extractor, load, learner_name=learner_name)
self.n_results = number_of_results
self.strategy = strategy
self.distance_metric = distance_metric
def _try_to_load_model(self, load):
if load:
try:
self.load_vectors()
except FileNotFoundError:
pass
def meta_train(self, datasets: List[Dataset], *, save=True):
features, labels, targets, files = self.get_training_samples(datasets)
self.samples = list(zip(features, targets, labels, files))
# features, _ = self.append_features_and_labels(features, labels)
if save:
self.save_vectors()
def predict(self, dataset: Dataset):
data_features = self.preprocess_metafeatures(dataset)
# get the pipelines to test
if self.strategy == 'aggregated':
datasets = self.get_similar_datasets(data_features, self.distance_metric, return_similarity=True)
pipelines, files, scores = self.get_best_pipelines_aggregated_ranking(datasets, self.n_results)
elif self.strategy == 'simple':
datasets = self.get_similar_datasets(data_features, self.distance_metric, return_similarity=False)
pipelines, files, scores = self.get_best_pipelines(datasets, self.n_results, self.n_results)
pipelines, files, scores = self._sort_pipelines_by_score(pipelines, files, scores)
else:
raise Exception('Not a valid strategy')
pipelines, files, scores = pipelines[:self.n_results], files[:self.n_results], scores[:self.n_results]
decode_pipeline = self.decode_pipelines(pipelines)
pipelines_info, pipeline_types = self.get_all_pipeline_info(decode_pipeline, files)
return pipelines_info, pipeline_types, scores
| StarcoderdataPython |
4878816 | load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
def enkit_deps():
excludes = native.existing_rules().keys()
if "io_bazel_rules_go" not in excludes:
http_archive(
name = "io_bazel_rules_go",
sha256 = "69de5c704a05ff37862f7e0f5534d4f479418afc21806c887db544a316f3cb6b",
urls = [
"https://mirror.bazel.build/github.com/bazelbuild/rules_go/releases/download/v0.27.0/rules_go-v0.27.0.tar.gz",
"https://github.com/bazelbuild/rules_go/releases/download/v0.27.0/rules_go-v0.27.0.tar.gz",
],
)
if "com_github_ccontavalli_bazel_rules" not in excludes:
http_archive(
name = "com_github_ccontavalli_bazel_rules",
sha256 = "0d0d8e644fd616d0ee225444889295914405df77cc549e8fc87ad6fd8b9bbb25",
strip_prefix = "bazel-rules-6",
urls = ["https://github.com/ccontavalli/bazel-rules/archive/v6.tar.gz"],
)
if "build_bazel_rules_nodejs" not in excludes:
http_archive(
name = "build_bazel_rules_nodejs",
sha256 = "4a5d654a4ccd4a4c24eca5d319d85a88a650edf119601550c95bf400c8cc897e",
urls = ["https://github.com/bazelbuild/rules_nodejs/releases/download/3.5.1/rules_nodejs-3.5.1.tar.gz"],
)
if "bazel_gazelle" not in excludes:
http_archive(
name = "bazel_gazelle",
sha256 = "62ca106be173579c0a167deb23358fdfe71ffa1e4cfdddf5582af26520f1c66f",
urls = [
"https://mirror.bazel.build/github.com/bazelbuild/bazel-gazelle/releases/download/v0.23.0/bazel-gazelle-v0.23.0.tar.gz",
"https://github.com/bazelbuild/bazel-gazelle/releases/download/v0.23.0/bazel-gazelle-v0.23.0.tar.gz",
],
)
if "rules_proto" not in excludes:
http_archive(
name = "rules_proto",
sha256 = "aa1ee19226f707d44bee44c720915199c20c84a23318bb0597ed4e5c873ccbd5",
strip_prefix = "rules_proto-40298556293ae502c66579620a7ce867d5f57311",
urls = [
"https://mirror.bazel.build/github.com/bazelbuild/rules_proto/archive/40298556293ae502c66579620a7ce867d5f57311.tar.gz",
"https://github.com/bazelbuild/rules_proto/archive/40298556293ae502c66579620a7ce867d5f57311.tar.gz",
],
)
# rules_docker 0.14.4 is incompatible with rules_pkg 0.3.0 as of Oct/2020.
#
# When you update this dependency, please make sure rules_docker has been updated as well,
# and do run a docker build to ensure that there is no breakage.
if "rules_pkg" not in excludes:
http_archive(
name = "rules_pkg",
urls = [
"https://github.com/bazelbuild/rules_pkg/releases/download/0.2.6-1/rules_pkg-0.2.6.tar.gz",
"https://mirror.bazel.build/github.com/bazelbuild/rules_pkg/releases/download/0.2.6/rules_pkg-0.2.6.tar.gz",
],
sha256 = "aeca78988341a2ee1ba097641056d168320ecc51372ef7ff8e64b139516a4937",
)
if "com_github_atlassian_bazel_tools" not in excludes:
http_archive(
name = "com_github_atlassian_bazel_tools",
strip_prefix = "bazel-tools-5c3b9306e703c6669a6ce064dd6dde69f69cba35",
sha256 = "c8630527150f3a9594e557fdcf02694e73420c10811eb214b461e84cb74c3aa8",
urls = [
"https://github.com/atlassian/bazel-tools/archive/5c3b9306e703c6669a6ce064dd6dde69f69cba35.zip",
],
)
| StarcoderdataPython |
1867457 | # -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'browserUi.ui'
#
# Created by: PyQt5 UI code generator 5.15.6
#
# WARNING: Any manual changes made to this file will be lost when pyuic5 is
# run again. Do not edit this file unless you know what you are doing.
from PyQt5 import QtCore, QtGui, QtWidgets
from PyQt5.QtWebEngineWidgets import QWebEngineView
import sys
class Ui_Form(object):
def setupUi(self, Form):
Form.setObjectName("Form")
Form.resize(792, 550)
Form.setWindowFlags(QtCore.Qt.FramelessWindowHint)
Form.setAttribute(QtCore.Qt.WA_TranslucentBackground)
self.verticalLayout = QtWidgets.QVBoxLayout(Form)
self.verticalLayout.setContentsMargins(0, 0, 0, 0)
self.verticalLayout.setSpacing(0)
self.verticalLayout.setObjectName("verticalLayout")
self.widget = QtWidgets.QWidget(Form)
self.widget.setStyleSheet("QWidget#widget{\n"
" border:4px solid rgb(45,45,45);\n"
" border-radius:20px;\n"
"}")
self.widget.setObjectName("widget")
self.verticalLayout_2 = QtWidgets.QVBoxLayout(self.widget)
self.verticalLayout_2.setContentsMargins(2, 2, 2, 2)
self.verticalLayout_2.setSpacing(0)
self.verticalLayout_2.setObjectName("verticalLayout_2")
self.widget_2 = QtWidgets.QWidget(self.widget)
self.widget_2.setMinimumSize(QtCore.QSize(0, 80))
self.widget_2.setMaximumSize(QtCore.QSize(16777215, 80))
self.widget_2.setStyleSheet("QWidget#widget_2{\n"
" background-color:rgb(20,20,20);\n"
" border-top-left-radius:20px;\n"
" border-top-right-radius:20px;\n"
"}\n"
"QPushButton{\n"
" background-color:rgb(0,0,0);\n"
" color:rgb(144,144,144);\n"
" font:bold;\n"
" font-size:15px;\n"
" font-family:entypo;\n"
"}\n"
"QPushButton:hover{\n"
" color:rgb(142,175,27);\n"
"}\n"
"QPushButton:pressed{\n"
" padding-top:5px;\n"
" padding-left:5px;\n"
" color:rgb(91,88,53);\n"
"}\n"
"\n"
"")
self.widget_2.setObjectName("widget_2")
self.verticalLayout_3 = QtWidgets.QVBoxLayout(self.widget_2)
self.verticalLayout_3.setContentsMargins(12, -1, 12, -1)
self.verticalLayout_3.setObjectName("verticalLayout_3")
self.horizontalLayout = QtWidgets.QHBoxLayout()
self.horizontalLayout.setObjectName("horizontalLayout")
self.label_5 = QtWidgets.QLabel(self.widget_2)
self.label_5.setMinimumSize(QtCore.QSize(15, 15))
self.label_5.setMaximumSize(QtCore.QSize(15, 15))
self.label_5.setStyleSheet("background-color:rgb(255,178,102);\n"
"border-radius:7px;")
self.label_5.setText("")
self.label_5.setObjectName("label_5")
self.horizontalLayout.addWidget(self.label_5)
self.label_4 = QtWidgets.QLabel(self.widget_2)
self.label_4.setMinimumSize(QtCore.QSize(15, 15))
self.label_4.setMaximumSize(QtCore.QSize(15, 15))
self.label_4.setStyleSheet("background-color:rgb(255,255,102);\n"
"border-radius:7px")
self.label_4.setText("")
self.label_4.setObjectName("label_4")
self.horizontalLayout.addWidget(self.label_4)
self.label_3 = QtWidgets.QLabel(self.widget_2)
self.label_3.setMinimumSize(QtCore.QSize(15, 15))
self.label_3.setMaximumSize(QtCore.QSize(15, 15))
self.label_3.setStyleSheet("background-color:rgb(255,255,102);\n"
"border:4px solid rgb(45,45,45);\n"
"border-radious7px;")
self.label_3.setText("")
self.label_3.setObjectName("label_3")
self.horizontalLayout.addWidget(self.label_3)
self.label_6 = QtWidgets.QLabel(self.widget_2)
self.label_6.setMinimumSize(QtCore.QSize(200, 0))
self.label_6.setStyleSheet("color:rgb(144,144,144);")
self.label_6.setAlignment(QtCore.Qt.AlignCenter)
self.label_6.setObjectName("label_6")
self.horizontalLayout.addWidget(self.label_6)
self.pushButton = QtWidgets.QPushButton(self.widget_2)
self.pushButton.setMinimumSize(QtCore.QSize(25, 25))
self.pushButton.setMaximumSize(QtCore.QSize(25, 25))
self.pushButton.setObjectName("pushButton")
self.horizontalLayout.addWidget(self.pushButton)
self.pushButton_3 = QtWidgets.QPushButton(self.widget_2)
self.pushButton_3.setMinimumSize(QtCore.QSize(25, 25))
self.pushButton_3.setMaximumSize(QtCore.QSize(25, 25))
font = QtGui.QFont()
font.setFamily("entypo")
font.setPointSize(-1)
font.setBold(True)
font.setItalic(False)
font.setUnderline(False)
font.setWeight(75)
font.setStyleStrategy(QtGui.QFont.PreferAntialias)
self.pushButton_3.setFont(font)
self.pushButton_3.setCheckable(True)
self.pushButton_3.setObjectName("pushButton_3")
self.horizontalLayout.addWidget(self.pushButton_3)
self.pushButton_2 = QtWidgets.QPushButton(self.widget_2)
self.pushButton_2.setEnabled(True)
self.pushButton_2.setMinimumSize(QtCore.QSize(25, 25))
self.pushButton_2.setMaximumSize(QtCore.QSize(25, 25))
font = QtGui.QFont()
font.setFamily("entypo")
font.setPointSize(-1)
font.setBold(True)
font.setItalic(False)
font.setWeight(75)
self.pushButton_2.setFont(font)
self.pushButton_2.setObjectName("pushButton_2")
self.horizontalLayout.addWidget(self.pushButton_2)
self.verticalLayout_3.addLayout(self.horizontalLayout)
self.horizontalLayout_2 = QtWidgets.QHBoxLayout()
self.horizontalLayout_2.setObjectName("horizontalLayout_2")
self.pushButton_4 = QtWidgets.QPushButton(self.widget_2)
self.pushButton_4.setMinimumSize(QtCore.QSize(25, 25))
self.pushButton_4.setMaximumSize(QtCore.QSize(25, 25))
self.pushButton_4.setObjectName("pushButton_4")
self.horizontalLayout_2.addWidget(self.pushButton_4)
self.pushButton_5 = QtWidgets.QPushButton(self.widget_2)
self.pushButton_5.setMinimumSize(QtCore.QSize(25, 25))
self.pushButton_5.setMaximumSize(QtCore.QSize(25, 25))
self.pushButton_5.setObjectName("pushButton_5")
self.horizontalLayout_2.addWidget(self.pushButton_5)
self.pushButton_6 = QtWidgets.QPushButton(self.widget_2)
self.pushButton_6.setMinimumSize(QtCore.QSize(25, 25))
self.pushButton_6.setMaximumSize(QtCore.QSize(25, 25))
self.pushButton_6.setObjectName("pushButton_6")
self.horizontalLayout_2.addWidget(self.pushButton_6)
self.lineEdit = QtWidgets.QLineEdit(self.widget_2)
self.lineEdit.setMinimumSize(QtCore.QSize(0, 25))
self.lineEdit.setMaximumSize(QtCore.QSize(16777215, 25))
self.lineEdit.setStyleSheet("background-color:rgb(32,32,32);\n"
"border-radius:5px;\n"
"color:rgb(144,144,144);\n"
"padding-left:5px;")
self.lineEdit.setObjectName("lineEdit")
self.horizontalLayout_2.addWidget(self.lineEdit)
self.verticalLayout_3.addLayout(self.horizontalLayout_2)
self.verticalLayout_2.addWidget(self.widget_2)
self.webEngineView = QWebEngineView(self.widget)
self.webEngineView.page().setBackgroundColor(QtGui.QColor(45, 45, 45, 255))
self.webEngineView.setObjectName("webEngineView")
self.verticalLayout_2.addWidget(self.webEngineView)
self.label_2 = QtWidgets.QLabel(self.widget)
self.label_2.setMinimumSize(QtCore.QSize(0, 20))
self.label_2.setMaximumSize(QtCore.QSize(16777215, 20))
self.label_2.setStyleSheet("background-color:rgb(45,45,45);\n"
"border-bottom-left-radius:20px;\n"
"border-bottom-right-radius:20px;\n"
"color:rgb(144,144,144);\n"
"")
self.label_2.setAlignment(QtCore.Qt.AlignCenter)
self.label_2.setObjectName("label_2")
self.verticalLayout_2.addWidget(self.label_2)
self.verticalLayout.addWidget(self.widget)
self.retranslateUi(Form)
QtCore.QMetaObject.connectSlotsByName(Form)
def retranslateUi(self, Form):
_translate = QtCore.QCoreApplication.translate
Form.setWindowTitle(_translate("Form", "Form"))
self.label_6.setText(_translate("Form", "<NAME>"))
self.pushButton.setText(_translate("Form", "\\"))
self.pushButton_3.setText(_translate("Form", "o"))
self.pushButton_2.setText(_translate("Form", "X"))
self.pushButton_4.setText(_translate("Form", "â"))
self.pushButton_5.setText(_translate("Form", "ê"))
self.pushButton_6.setText(_translate("Form", "d"))
self.label_2.setText(_translate("Form", "DEVELOP BY JEHANKANDY"))
| StarcoderdataPython |
3456367 | <reponame>CodeButt3rs/CodeButt3rsBot
import datetime
import discord
import asyncio
import json
import random
import threading
from random import randrange
from discord_components.component import ButtonStyle
from discord_components import Button
from DatabaseTools import Database
from discord.ext import commands
from discord.utils import get
from tzlocal import get_localzone
from DjangoORM import giveawayDelete, giveawayObject, giveawayWinnerSet
class Giveaways(commands.Cog):
def __init__(self, bot):
self.bot = bot
print(datetime.datetime.now(), "Giveaways module loaded!")
@commands.has_any_role('🎉Giveaways')
@commands.guild_only()
@commands.group(name='giveaway')
async def giveaway(self, ctx):
if ctx.invoked_subcommand is None:
embed = discord.Embed(description='Choice correct giveaway command!')
await ctx.send(embed=embed)
@commands.has_permissions(administrator=True)
@commands.guild_only()
@giveaway.command(name='channel')
async def giveawayChannel(self, ctx):
fetch = await Database.getGiveawaysChannel(self=Database, guild=ctx.guild)
if get(ctx.guild.channels, id=fetch) is not None:
return print(datetime.datetime.now(), "Can't create Giveaways Channel while another one exists")
overwrites={
ctx.guild.default_role: discord.PermissionOverwrite(send_messages=False, read_messages=True)
}
channel = await ctx.guild.create_text_channel(name='🎉Giveaways', overwrites=overwrites)
await Database.setGiveawaysChannel(self=Database, guild=ctx.guild, id=channel.id)
print(datetime.datetime.now(), ctx.author, 'has created the Giveaways channel')
@commands.has_any_role('🎉Giveaways')
@commands.guild_only()
@giveaway.command(name='create')
async def giveawayCreate(self, ctx, time: int, item):
if time <= 0:
return await ctx.reply(f":pushpin: {ctx.author.mention}, I can't create giveaway with less 10 mins in time!")
fetch = await Database.getGiveawaysChannel(self=Database, guild=ctx.guild)
channel = get(ctx.guild.channels, id=fetch)
if channel is None:
return print(datetime.datetime.now(), "Can't create Giveaway: Channel doesn't exist")
emb = discord.Embed(
title = f'🎉 Giveaway # by {ctx.author.name}!',
color = ctx.author.color,
timestamp = (datetime.datetime.now().astimezone(get_localzone())),
colour=0xFFD966
)
end = datetime.datetime.now().astimezone(get_localzone()) + datetime.timedelta(seconds= time*60)
emb.add_field(name='Prize', value=item, inline=False)
emb.add_field(name='Ends at', value=end.strftime("%b %d %Y %H:%M:%S"), inline=False)
emb.add_field(name = 'Null', value = f'Null', inline=False )
emb.add_field(name = 'Null', value = f'Null', inline=False )
emb.set_footer(text=f'Created by {self.bot.user.name}')
msg = await channel.send('everyone',
embed=emb,
components =
[Button(label= '🎉 Enter giveaway', style=ButtonStyle.green)])
emb.title = f'🎉 Giveaway #{msg.id} by {ctx.author.name}!'
await msg.edit(embed=emb)
# JSON area
data = {
'time': f'{datetime.datetime.now().astimezone(get_localzone()).strftime("%b %d %Y %H:%M:%S")}',
'prize': item,
'hostedBy': ctx.author.id,
'status': True,
'winner': None,
'participants': [],
}
with open(f"Giveaways/{msg.id}.json", "w") as i:
json.dump(data, i)
print(datetime.datetime.now(), 'Giveaway #', msg.id, 'has created by', ctx.author, 'with item', item, 'and time', time)
t = threading.Thread(target=giveawayObject, args=(ctx, msg, end, item))
t.start()
t.join()
while time > 0:
with open(f"Giveaways/{msg.id}.json", "r") as i:
data = json.load(i)
if time <= 15:
emb.title = f'🎉 Giveaway #{msg.id} by {ctx.author.name}! LAST CHANCE TO ENTER!'
emb.colour = 0xFF0000
if time < 60:
emb.set_field_at(index= 2, name = 'Remaining time', value = f'**Ends in {time} mins**', inline=False )
else:
_timeHrs = time // 60
_timeMins = time - (_timeHrs * 60)
emb.set_field_at(index= 2, name = 'Remaining time', value = f'**Ends in {_timeHrs} hrs and {_timeMins} mins**', inline=False )
emb.set_field_at(index = 3, name = 'Number of participants', value = f"`{len(data['participants'])}`", inline=False )
try:
await msg.edit(embed=emb)
except:
print(datetime.datetime.now(), "Can't find giveaway: maybe it was deleted")
threading.Thread(target=giveawayDelete(msg)).start()
break
time += -1
await asyncio.sleep(60)
if time <= 0:
emb.clear_fields()
emb.title = f'🎉 Giveaway #{msg.id} by {ctx.author.name}'
with open(f"Giveaways/{msg.id}.json", "r") as i:
data = json.load(i)
data['status'] = False
if (len(data['participants'])) == 0:
emb.add_field(name='Winner', value='No valid entrants, so a winner could not be determined!')
emb.add_field(name='Prize', value=item, inline=False)
data['winner'] = 'No valid entrants'
with open(f"Giveaways/{msg.id}.json", "w") as i:
json.dump(data, i)
print(datetime.datetime.now(), 'Giveaway #', msg.id, 'created by', ctx.author, 'has ended! No valid entrants, so a winner could not be determined.')
threading.Thread(target=giveawayWinnerSet(msg, "No valid entrants")).start()
return await msg.edit(embed=emb, components = [])
else:
random.seed(randrange(10000))
winnerNumber = randrange(len(data['participants']))
winnerId = data['participants'][winnerNumber]
winner = get(ctx.guild.members, id=winnerId)
emb.add_field(name='Winner', value=f'{winner.mention} won {item}!')
emb.colour = 0xFFD966
emb.add_field(name='Ended at', value=end.strftime("%b %d %Y %H:%M:%S"), inline=False)
await msg.edit(embed=emb, components = [])
data['winner'] = winner.id
print(datetime.datetime.now(), 'Giveaway #', msg.id, 'created by', ctx.author, 'has ended! Random Number -', winnerNumber, ',', winner,'has won', item)
threading.Thread(target=giveawayWinnerSet(msg, winner.id)).start()
with open(f"Giveaways/{msg.id}.json", "w") as i:
json.dump(data, i)
@commands.Cog.listener()
async def on_button_click(self, interaction):
guild = get(self.bot.guilds, id=int(interaction.raw_data['d']['guild_id']))
if int(interaction.raw_data['d']['message']['id']) == await Database.getWelcomeMsg(Database, guild):
return
try:
with open(f"Giveaways/{int(interaction.raw_data['d']['message']['id'])}.json", "r") as i:
data = json.load(i)
if interaction.user.id in data['participants']:
return await interaction.respond(content = "You're already in giveaway list")
if data['hostedBy'] == interaction.user.id:
return await interaction.respond(content = "You can't participant in your own giveaway")
else:
data['participants'].append(interaction.user.id)
with open(f"Giveaways/{int(interaction.raw_data['d']['message']['id'])}.json", "w") as i:
json.dump(data, i)
return await interaction.respond(content = "You were added to the participants list")
except:
pass
def setup(bot):
bot.add_cog(Giveaways(bot)) | StarcoderdataPython |
11295972 | import pytest
import pandas as pd
from calculator import Calculate
@pytest.mark.parametrize(
"test_input, expected",
[("3+5", {"elevator":[1]}),
("2+4", {"elevator":[]}),
("6*9", {"elevator":[]})
],
)
def test_eval(test_input, expected):
test_df = pd.DataFrame({"listing_id": [1], "desc_addinfo": [test_input]})
result = Calculate().find_in_listings(test_df)
assert result == expected
| StarcoderdataPython |
1634184 | import asyncio
from copy import deepcopy
from dataclasses import dataclass
from importlib.resources import path
from subprocess import Popen
from typing import List, Optional
import google.protobuf
from multiaddr import Multiaddr
import hivemind.hivemind_cli as cli
import hivemind.p2p.p2p_daemon_bindings.p2pclient as p2pclient
from hivemind.p2p.p2p_daemon_bindings.datastructures import PeerID, StreamInfo
from hivemind.proto import p2pd_pb2
from hivemind.utils import MSGPackSerializer
from hivemind.utils.logging import get_logger
from hivemind.utils.networking import find_open_port
logger = get_logger(__name__)
P2PD_FILENAME = 'p2pd'
NUM_RETRIES = 3
RETRY_DELAY = 0.4
class P2PInterruptedError(Exception):
pass
@dataclass(frozen=False)
class P2PContext(object):
id: str
port: int
handle_name: str
peer_id: PeerID = None
peer_addr: Multiaddr = None
class P2P:
"""
Forks a child process and executes p2pd command with given arguments.
Can be used for peer to peer communication and procedure calls.
Sends SIGKILL to the child in destructor.
"""
HEADER_LEN = 8
BYTEORDER = 'big'
PB_HEADER_LEN = 1
RESULT_MESSAGE = b'\x00'
ERROR_MESSAGE = b'\x01'
DHT_MODE_MAPPING = {
'dht': {'dht': 1},
'dht_server': {'dhtServer': 1},
'dht_client': {'dhtClient': 1},
}
FORCE_REACHABILITY_MAPPING = {
'public': {'forceReachabilityPublic': 1},
'private': {'forceReachabilityPrivate': 1},
}
def __init__(self):
self._child = None
self._alive = False
self._listen_task = None
self._server_stopped = asyncio.Event()
@classmethod
async def create(cls, *args, quic: bool = True, tls: bool = True, conn_manager: bool = True,
dht_mode: str = 'dht_server', force_reachability: Optional[str] = None,
nat_port_map: bool = True, auto_nat: bool = True, bootstrap: bool = False,
bootstrap_peers: Optional[List[str]] = None, use_global_ipfs: bool = False, host_port: int = None,
daemon_listen_port: int = None, **kwargs):
"""
Start a new p2pd process and connect to it.
:param args:
:param quic: Enables the QUIC transport
:param tls: Enables TLS1.3 channel security protocol
:param conn_manager: Enables the Connection Manager
:param dht_mode: DHT mode (dht_client/dht_server/dht)
:param force_reachability: Force reachability mode (public/private)
:param nat_port_map: Enables NAT port mapping
:param auto_nat: Enables the AutoNAT service
:param bootstrap: Connects to bootstrap peers and bootstraps the dht if enabled
:param bootstrap_peers: List of bootstrap peers; defaults to the IPFS DHT peers
:param use_global_ipfs: Bootstrap to global ipfs (works only if bootstrap=True and bootstrap_peers=None)
:param host_port: port for p2p network
:param daemon_listen_port: port for connection daemon and client binding
:param kwargs:
:return: new wrapper for p2p daemon
"""
assert not (bootstrap and bootstrap_peers is None and not use_global_ipfs), \
'Trying to create with bootstrap node without bootstrap nodes list. ' \
'It is very dangerous, because p2pd connects to global ipfs and it is very unstable. ' \
'If you really want this, pass use_global_ipfs=True'
assert not (bootstrap_peers is not None and use_global_ipfs), \
'Non empty bootstrap_nodes and use_global_ipfs=True are incompatible.' \
'Choose one option: your nodes list (preferable) or global ipfs (very unstable)'
self = cls()
with path(cli, P2PD_FILENAME) as p:
p2pd_path = p
bootstrap_peers = cls._make_bootstrap_peers(bootstrap_peers)
dht = cls.DHT_MODE_MAPPING.get(dht_mode, {'dht': 0})
force_reachability = cls.FORCE_REACHABILITY_MAPPING.get(force_reachability, {})
proc_args = self._make_process_args(
str(p2pd_path), *args,
quic=quic, tls=tls, connManager=conn_manager,
natPortMap=nat_port_map, autonat=auto_nat,
b=bootstrap, **{**bootstrap_peers, **dht, **force_reachability, **kwargs})
self._assign_daemon_ports(host_port, daemon_listen_port)
for try_count in range(NUM_RETRIES):
try:
self._initialize(proc_args)
await self._wait_for_client(RETRY_DELAY * (2 ** try_count))
break
except Exception as e:
logger.debug(f"Failed to initialize p2p daemon: {e}")
self._terminate()
if try_count == NUM_RETRIES - 1:
raise
self._assign_daemon_ports()
return self
@classmethod
async def replicate(cls, daemon_listen_port: int, host_port: int):
"""
Connect to existing p2p daemon
:param daemon_listen_port: port for connection daemon and client binding
:param host_port: port for p2p network
:return: new wrapper for existing p2p daemon
"""
self = cls()
# There is no child under control
# Use external already running p2pd
self._child = None
self._alive = True
self._assign_daemon_ports(host_port, daemon_listen_port)
self._client_listen_port = find_open_port()
self._client = p2pclient.Client(
Multiaddr(f'/ip4/127.0.0.1/tcp/{self._daemon_listen_port}'),
Multiaddr(f'/ip4/127.0.0.1/tcp/{self._client_listen_port}'))
await self._wait_for_client()
return self
async def wait_for_at_least_n_peers(self, n_peers, attempts=3, delay=1):
for _ in range(attempts):
peers = await self._client.list_peers()
if len(peers) >= n_peers:
return
await asyncio.sleep(delay)
raise RuntimeError('Not enough peers')
def _initialize(self, proc_args: List[str]) -> None:
proc_args = deepcopy(proc_args)
proc_args.extend(self._make_process_args(
hostAddrs=f'/ip4/0.0.0.0/tcp/{self._host_port},/ip4/0.0.0.0/udp/{self._host_port}/quic',
listen=f'/ip4/12172.16.17.32/tcp/{self._daemon_listen_port}'
))
self._child = Popen(args=proc_args, encoding="utf8")
self._alive = True
self._client_listen_port = find_open_port()
self._client = p2pclient.Client(
Multiaddr(f'/ip4/127.0.0.1/tcp/{self._daemon_listen_port}'),
Multiaddr(f'/ip4/127.0.0.1/tcp/{self._client_listen_port}'))
async def _wait_for_client(self, delay=0):
await asyncio.sleep(delay)
encoded = await self._client.identify()
self.id = encoded[0].to_base58()
def _assign_daemon_ports(self, host_port=None, daemon_listen_port=None):
if host_port is None:
host_port = find_open_port()
if daemon_listen_port is None:
daemon_listen_port = find_open_port()
while daemon_listen_port == host_port:
daemon_listen_port = find_open_port()
self._host_port, self._daemon_listen_port = host_port, daemon_listen_port
@staticmethod
async def send_raw_data(byte_str, writer):
request = len(byte_str).to_bytes(P2P.HEADER_LEN, P2P.BYTEORDER) + byte_str
writer.write(request)
@staticmethod
async def send_msgpack(data, writer):
raw_data = MSGPackSerializer.dumps(data)
await P2P.send_raw_data(raw_data, writer)
@staticmethod
async def send_protobuf(protobuf, out_proto_type, writer):
if type(protobuf) != out_proto_type:
raise TypeError('Unary handler returned protobuf of wrong type.')
if out_proto_type == p2pd_pb2.RPCError:
await P2P.send_raw_data(P2P.ERROR_MESSAGE, writer)
else:
await P2P.send_raw_data(P2P.RESULT_MESSAGE, writer)
await P2P.send_raw_data(protobuf.SerializeToString(), writer)
@staticmethod
async def receive_raw_data(reader: asyncio.StreamReader, header_len=HEADER_LEN):
header = await reader.readexactly(header_len)
content_length = int.from_bytes(header, P2P.BYTEORDER)
data = await reader.readexactly(content_length)
return data
@staticmethod
async def receive_msgpack(reader):
return MSGPackSerializer.loads(await P2P.receive_raw_data(reader))
@staticmethod
async def receive_protobuf(in_proto_type, reader):
msg_type = await P2P.receive_raw_data(reader)
if msg_type == P2P.RESULT_MESSAGE:
protobuf = in_proto_type()
protobuf.ParseFromString(await P2P.receive_raw_data(reader))
return protobuf, None
elif msg_type == P2P.ERROR_MESSAGE:
protobuf = p2pd_pb2.RPCError()
protobuf.ParseFromString(await P2P.receive_raw_data(reader))
return None, protobuf
else:
raise TypeError('Invalid Protobuf message type')
@staticmethod
def _handle_stream(handle):
async def do_handle_stream(stream_info, reader, writer):
try:
request = await P2P.receive_raw_data(reader)
except asyncio.IncompleteReadError:
logger.debug("Incomplete read while receiving request from peer")
writer.close()
return
try:
result = handle(request)
await P2P.send_raw_data(result, writer)
finally:
writer.close()
return do_handle_stream
@staticmethod
def _handle_unary_stream(handle, context, in_proto_type, out_proto_type):
async def watchdog(reader: asyncio.StreamReader):
await reader.read(n=1)
raise P2PInterruptedError()
async def do_handle_unary_stream(
stream_info: StreamInfo,
reader: asyncio.StreamReader,
writer: asyncio.StreamWriter) -> None:
try:
try:
request = await P2P.receive_protobuf(in_proto_type, reader)
except asyncio.IncompleteReadError:
logger.debug("Incomplete read while receiving request from peer")
return
except google.protobuf.message.DecodeError as error:
logger.exception(error)
return
context.peer_id, context.peer_addr = stream_info.peer_id, stream_info.addr
done, pending = await asyncio.wait([watchdog(reader), handle(request, context)],
return_when=asyncio.FIRST_COMPLETED)
try:
result = done.pop().result()
await P2P.send_protobuf(result, out_proto_type, writer)
except P2PInterruptedError:
pass
except Exception as exc:
error = p2pd_pb2.RPCError(message=str(exc))
await P2P.send_protobuf(error, p2pd_pb2.RPCError, writer)
finally:
pending_task = pending.pop()
pending_task.cancel()
try:
await pending_task
except asyncio.CancelledError:
pass
finally:
writer.close()
return do_handle_unary_stream
def start_listening(self):
async def listen():
async with self._client.listen():
await self._server_stopped.wait()
self._listen_task = asyncio.create_task(listen())
async def stop_listening(self):
if self._listen_task is not None:
self._server_stopped.set()
self._listen_task.cancel()
try:
await self._listen_task
except asyncio.CancelledError:
self._listen_task = None
self._server_stopped.clear()
async def add_stream_handler(self, name, handle):
if self._listen_task is None:
self.start_listening()
await self._client.stream_handler(name, self._handle_stream(handle))
async def add_unary_handler(self, name, handle, in_proto_type, out_proto_type):
if self._listen_task is None:
self.start_listening()
context = P2PContext(id=self.id, port=self._host_port, handle_name=name)
await self._client.stream_handler(
name, P2P._handle_unary_stream(handle, context, in_proto_type, out_proto_type))
async def call_peer_handler(self, peer_id, handler_name, input_data):
libp2p_peer_id = PeerID.from_base58(peer_id)
stream_info, reader, writer = await self._client.stream_open(libp2p_peer_id, (handler_name,))
try:
await P2P.send_raw_data(input_data, writer)
return await P2P.receive_raw_data(reader)
finally:
writer.close()
def __del__(self):
self._terminate()
@property
def is_alive(self):
return self._alive
async def shutdown(self):
await asyncio.get_event_loop().run_in_executor(None, self._terminate)
def _terminate(self):
self._alive = False
if self._child is not None and self._child.poll() is None:
self._child.kill()
self._child.wait()
@staticmethod
def _make_process_args(*args, **kwargs) -> List[str]:
proc_args = []
proc_args.extend(
str(entry) for entry in args
)
proc_args.extend(
f'-{key}={P2P._convert_process_arg_type(value)}' if value is not None else f'-{key}'
for key, value in kwargs.items()
)
return proc_args
@staticmethod
def _convert_process_arg_type(val):
if isinstance(val, bool):
return 1 if val else 0
return val
@staticmethod
def _make_bootstrap_peers(nodes):
if nodes is None:
return {}
return {'bootstrapPeers': ','.join(nodes)}
| StarcoderdataPython |
6673 | from rest_framework.response import Response
from rest_framework.decorators import api_view
from rest_framework.reverse import reverse
from rest_framework_simplejwt.tokens import RefreshToken
@api_view(['GET'])
def root(request, fmt=None):
return Response({
'v1': reverse('api_v1:root', request=request, format=fmt),
})
@api_view(['GET'])
def v1_root(request, fmt=None):
root_navigation = {
'redirects': reverse('api_v1:redirects:redirect-list', request=request, format=fmt),
'token': reverse('api_v1:token_root', request=request, format=fmt)
}
return Response(root_navigation)
@api_view(['GET'])
def token_root(request, fmt=None):
token_navigation = {
'auth': reverse('api_v1:token_auth', request=request, format=fmt),
'refresh': reverse('api_v1:token_refresh', request=request, format=fmt),
'verify': reverse('api_v1:token_verify', request=request, format=fmt),
}
return Response(token_navigation)
@api_view(['POST'])
def token_refresh(request):
token = request.COOKIES.get("burl_refresh_token")
if token:
refresh = RefreshToken(str(token))
access = str(refresh.access_token)
if access:
return Response({"access": access}, 200)
else:
return Response({"unauthorized"}, 401)
return Response("unauthorized", 401)
@api_view(['POST'])
def token_refresh_revoke(_request):
response = Response("ok")
response.delete_cookie("burl_refresh_token")
return response
| StarcoderdataPython |
9647096 | # test_cmalpha.py
# <NAME>, April 2014
# Reference: Aircraft Dynamics: from Modeling to Simulation, by <NAME>
# ----------------------------------------------------------------------
# Imports
# ----------------------------------------------------------------------
import SUAVE
import numpy as np
from SUAVE.Methods.Flight_Dynamics.Static_Stability.Approximations.datcom import datcom
from SUAVE.Methods.Flight_Dynamics.Static_Stability.Approximations.Supporting_Functions.trapezoid_mac import trapezoid_mac
#from SUAVE.Methods.Flight_Dynamics.Static_Stability.Approximations.Supporting_Functions.convert_sweep import convert_sweep
from SUAVE.Methods.Flight_Dynamics.Static_Stability.Approximations.Supporting_Functions.trapezoid_ac_x import trapezoid_ac_x
#from SUAVE.Methods.Flight_Dynamics.Static_Stability.Approsimations.Supporting_Functions.extend_to_ref_area import extend_to_ref_area
from SUAVE.Methods.Flight_Dynamics.Static_Stability.Approximations.Tube_Wing.taw_cmalpha import taw_cmalpha
from SUAVE.Methods.Geometry.Three_Dimensional.compute_span_location_from_chord_length import compute_span_location_from_chord_length
from SUAVE.Core import Units
from SUAVE.Core import (
Data, Container,
)
def main():
#Parameters Required
#Using values for a Boeing 747-200
vehicle = SUAVE.Vehicle()
#print vehicle
vehicle.mass_properties.max_zero_fuel=238780*Units.kg
vehicle.mass_properties.max_takeoff =785000.*Units.lbs
wing = SUAVE.Components.Wings.Wing()
wing.tag = 'main_wing'
wing.areas.reference = 5500.0 * Units.feet**2
wing.spans.projected = 196.0 * Units.feet
wing.chords.mean_aerodynamic = 27.3 * Units.feet
wing.chords.root = 42.9 * Units.feet #54.5ft
wing.sweeps.quarter_chord = 42.0 * Units.deg # Leading edge
wing.sweeps.leading_edge = 42.0 * Units.deg # Same as the quarter chord sweep (ignore why EMB)
wing.taper = 14.7/42.9 #14.7/54.5
wing.aspect_ratio = wing.spans.projected**2/wing.areas.reference
wing.symmetric = True
wing.vertical = False
wing.origin = np.array([58.6,0,0]) * Units.feet
wing.aerodynamic_center = np.array([112.2*Units.feet,0.,0.])-wing.origin#16.16 * Units.meters,0.,0,])
wing.dynamic_pressure_ratio = 1.0
wing.ep_alpha = 0.0
span_location_mac =compute_span_location_from_chord_length(wing, wing.chords.mean_aerodynamic)
mac_le_offset =.8*np.sin(wing.sweeps.leading_edge)*span_location_mac #assume that 80% of the chord difference is from leading edge sweep
wing.mass_properties.center_of_gravity[0]=.3*wing.chords.mean_aerodynamic+mac_le_offset
Mach = np.array([0.198])
conditions = Data()
conditions.weights = Data()
conditions.lift_curve_slope = datcom(wing,Mach)
conditions.weights.total_mass=np.array([[vehicle.mass_properties.max_takeoff]])
wing.CL_alpha = conditions.lift_curve_slope
vehicle.reference_area = wing.areas.reference
vehicle.append_component(wing)
main_wing_CLa = wing.CL_alpha
main_wing_ar = wing.aspect_ratio
wing = SUAVE.Components.Wings.Wing()
wing.tag = 'horizontal_stabilizer'
wing.areas.reference = 1490.55* Units.feet**2
wing.spans.projected = 71.6 * Units.feet
wing.sweeps.quarter_chord = 44.0 * Units.deg # leading edge
wing.sweeps.leading_edge = 44.0 * Units.deg # Same as the quarter chord sweep (ignore why EMB)
wing.taper = 7.5/32.6
wing.aspect_ratio = wing.spans.projected**2/wing.areas.reference
wing.origin = np.array([187.0,0,0]) * Units.feet
wing.symmetric = True
wing.vertical = False
wing.dynamic_pressure_ratio = 0.95
wing.ep_alpha = 2.0*main_wing_CLa/np.pi/main_wing_ar
wing.aerodynamic_center = [trapezoid_ac_x(wing), 0.0, 0.0]
wing.CL_alpha = datcom(wing,Mach)
vehicle.append_component(wing)
fuselage = SUAVE.Components.Fuselages.Fuselage()
fuselage.tag = 'fuselage'
fuselage.x_root_quarter_chord = 77.0 * Units.feet
fuselage.lengths.total = 229.7 * Units.feet
fuselage.width = 20.9 * Units.feet
vehicle.append_component(fuselage)
vehicle.mass_properties.center_of_gravity=np.array([112.2,0,0]) * Units.feet
#configuration.mass_properties.zero_fuel_center_of_gravity=np.array([76.5,0,0])*Units.feet #just put a number here that got the expected value output; may want to change
fuel =SUAVE.Components.Physical_Component()
fuel.origin =wing.origin
fuel.mass_properties.center_of_gravity =wing.mass_properties.center_of_gravity
fuel.mass_properties.mass =vehicle.mass_properties.max_takeoff-vehicle.mass_properties.max_zero_fuel
#find zero_fuel_center_of_gravity
cg =vehicle.mass_properties.center_of_gravity
MTOW =vehicle.mass_properties.max_takeoff
fuel_cg =fuel.origin+fuel.mass_properties.center_of_gravity
fuel_mass =fuel.mass_properties.mass
sum_moments_less_fuel=(cg*MTOW-fuel_cg*fuel_mass)
#now define configuration for calculation
configuration = Data()
configuration.mass_properties = Data()
configuration.mass_properties.center_of_gravity = vehicle.mass_properties.center_of_gravity
configuration.mass_properties.max_zero_fuel =vehicle.mass_properties.max_zero_fuel
configuration.fuel =fuel
configuration.mass_properties.zero_fuel_center_of_gravity=sum_moments_less_fuel/vehicle.mass_properties.max_zero_fuel
#print configuration
cm_a = taw_cmalpha(vehicle,Mach,conditions,configuration)
expected =-1.56222373 #Should be -1.45
error = Data()
error.cm_a_747 = (cm_a - expected)/expected
#Parameters Required
#Using values for a Beech 99
vehicle = SUAVE.Vehicle()
vehicle.mass_properties.max_takeoff =4727*Units.kg #from Wikipedia
vehicle.mass_properties.empty =2515*Units.kg
vehicle.mass_properties.max_zero_fuel=vehicle.mass_properties.max_takeoff-vehicle.mass_properties.empty+15.*225*Units.lbs #15 passenger ac
wing = SUAVE.Components.Wings.Wing()
wing.tag = 'main_wing'
wing.areas.reference = 280.0 * Units.feet**2
wing.spans.projected = 46.0 * Units.feet
wing.chords.mean_aerodynamic = 6.5 * Units.feet
wing.chords.root = 7.9 * Units.feet
wing.sweeps.leading_edge = 4.0 * Units.deg # Same as the quarter chord sweep (ignore why EMB)
wing.sweeps.quarter_chord = 4.0 * Units.deg # Leading edge
wing.taper = 0.47
wing.aspect_ratio = wing.spans.projected**2/wing.areas.reference
wing.symmetric = True
wing.vertical = False
wing.origin = np.array([15.,0,0]) * Units.feet
wing.aerodynamic_center = np.array([trapezoid_ac_x(wing), 0. , 0. ])
wing.dynamic_pressure_ratio = 1.0
wing.ep_alpha = 0.0
span_location_mac =compute_span_location_from_chord_length(wing, wing.chords.mean_aerodynamic)
mac_le_offset =.8*np.sin(wing.sweeps.leading_edge)*span_location_mac #assume that 80% of the chord difference is from leading edge sweep
wing.mass_properties.center_of_gravity[0]=.3*wing.chords.mean_aerodynamic+mac_le_offset
Mach = np.array([0.152])
reference = SUAVE.Core.Container()
conditions = Data()
conditions.lift_curve_slope = datcom(wing,Mach)
conditions.weights=Data()
conditions.weights.total_mass=np.array([[vehicle.mass_properties.max_takeoff]])
wing.CL_alpha = conditions.lift_curve_slope
vehicle.reference_area = wing.areas.reference
vehicle.append_component(wing)
main_wing_CLa = wing.CL_alpha
main_wing_ar = wing.aspect_ratio
wing = SUAVE.Components.Wings.Wing()
wing.tag = 'horizontal_stabilizer'
wing.areas.reference = 100.5 * Units.feet**2
wing.spans.projected = 22.5 * Units.feet
wing.sweeps.leading_edge = 21.0 * Units.deg # Same as the quarter chord sweep (ignore why EMB)
wing.sweeps.quarter_chord = 21.0 * Units.deg # leading edge
wing.taper = 3.1/6.17
wing.aspect_ratio = wing.spans.projected**2/wing.areas.reference
wing.origin = np.array([36.3,0,0]) * Units.feet
wing.symmetric = True
wing.vertical = False
wing.dynamic_pressure_ratio = 0.95
wing.ep_alpha = 2.0*main_wing_CLa/np.pi/main_wing_ar
wing.aerodynamic_center = np.array([trapezoid_ac_x(wing), 0.0, 0.0])
wing.CL_alpha = datcom(wing,Mach)
vehicle.append_component(wing)
fuselage = SUAVE.Components.Fuselages.Fuselage()
fuselage.tag = 'fuselage'
fuselage.x_root_quarter_chord = 5.4 * Units.feet
fuselage.lengths.total = 44.0 * Units.feet
fuselage.width = 5.4 * Units.feet
vehicle.append_component(fuselage)
vehicle.mass_properties.center_of_gravity = np.array([17.2,0,0]) * Units.feet
fuel.origin =wing.origin
fuel.mass_properties.center_of_gravity =wing.mass_properties.center_of_gravity
fuel.mass_properties.mass =vehicle.mass_properties.max_takeoff-vehicle.mass_properties.max_zero_fuel
#find zero_fuel_center_of_gravity
cg =vehicle.mass_properties.center_of_gravity
MTOW =vehicle.mass_properties.max_takeoff
fuel_cg =fuel.origin+fuel.mass_properties.center_of_gravity
fuel_mass =fuel.mass_properties.mass
sum_moments_less_fuel=(cg*MTOW-fuel_cg*fuel_mass)
#now define configuration for calculation
configuration = Data()
configuration.mass_properties = Data()
configuration.mass_properties.center_of_gravity = vehicle.mass_properties.center_of_gravity
configuration.mass_properties.max_zero_fuel = vehicle.mass_properties.max_zero_fuel
configuration.fuel =fuel
configuration.mass_properties.zero_fuel_center_of_gravity=sum_moments_less_fuel/vehicle.mass_properties.max_zero_fuel
#Method Test
#print configuration
cm_a = taw_cmalpha(vehicle,Mach,conditions,configuration)
expected = -2.48843437 #Should be -2.08
error.cm_a_beech_99 = (cm_a - expected)/expected
#Parameters Required
#Using values for an SIAI Marchetti S-211
vehicle = SUAVE.Vehicle()
vehicle.mass_properties.max_takeoff =2750*Units.kg #from Wikipedia
vehicle.mass_properties.empty =1850*Units.kg
vehicle.mass_properties.max_zero_fuel=vehicle.mass_properties.max_takeoff-vehicle.mass_properties.empty+2.*225*Units.lbs #2 passenger ac
wing = SUAVE.Components.Wings.Wing()
wing.tag = 'main_wing'
wing.areas.reference = 136.0 * Units.feet**2
wing.spans.projected = 26.3 * Units.feet
wing.chords.mean_aerodynamic = 5.4 * Units.feet
wing.chords.root = 7.03 * Units.feet
wing.chords.tip = 3.1 * Units.feet
wing.sweeps.quarter_chord = 19.5 * Units.deg # Leading edge
wing.sweeps.leading_edge = 19.5 * Units.deg # Same as the quarter chord sweep (ignore why EMB)
wing.taper = 3.1/7.03
wing.aspect_ratio = wing.spans.projected**2/wing.areas.reference
wing.symmetric = True
wing.vertical = False
wing.origin = np.array([13.5,0,0]) * Units.feet
wing.aerodynamic_center = np.array([trapezoid_ac_x(wing),0.,0.])#16.6, 0. , 0. ]) * Units.feet - wing.origin
wing.dynamic_pressure_ratio = 1.0
wing.ep_alpha = 0.0
span_location_mac =compute_span_location_from_chord_length(wing, wing.chords.mean_aerodynamic)
mac_le_offset =.8*np.sin(wing.sweeps.leading_edge)*span_location_mac #assume that 80% of the chord difference is from leading edge sweep
wing.mass_properties.center_of_gravity[0]=.3*wing.chords.mean_aerodynamic+mac_le_offset
Mach = np.array([0.111])
conditions = Data()
conditions.lift_curve_slope = datcom(wing,Mach)
conditions.weights=Data()
conditions.weights.total_mass=np.array([[vehicle.mass_properties.max_takeoff]])
wing.CL_alpha = conditions.lift_curve_slope
vehicle.reference_area = wing.areas.reference
vehicle.append_component(wing)
main_wing_CLa = wing.CL_alpha
main_wing_ar = wing.aspect_ratio
wing = SUAVE.Components.Wings.Wing()
wing.tag = 'horizontal_stabilizer'
wing.areas.reference = 36.46 * Units.feet**2
wing.spans.projected = 13.3 * Units.feet
wing.sweeps.quarter_chord= 18.5 * Units.deg # leading edge
wing.sweeps.leading_edge = 18.5 * Units.deg # Same as the quarter chord sweep (ignore why EMB)
wing.taper = 1.6/3.88
wing.aspect_ratio = wing.spans.projected**2/wing.areas.reference
wing.origin = np.array([26.07,0.,0.]) * Units.feet
wing.symmetric = True
wing.vertical = False
wing.dynamic_pressure_ratio = 0.9
wing.ep_alpha = 2.0*main_wing_CLa/np.pi/main_wing_ar
wing.aerodynamic_center = np.array([trapezoid_ac_x(wing), 0.0, 0.0])
wing.CL_alpha = datcom(wing,Mach)
span_location_mac =compute_span_location_from_chord_length(wing, wing.chords.mean_aerodynamic)
mac_le_offset =.8*np.sin(wing.sweeps.leading_edge)*span_location_mac #assume that 80% of the chord difference is from leading edge sweep
wing.mass_properties.center_of_gravity[0]=.3*wing.chords.mean_aerodynamic+mac_le_offset
vehicle.append_component(wing)
fuselage = SUAVE.Components.Fuselages.Fuselage()
fuselage.tag = 'fuselage'
fuselage.x_root_quarter_chord = 12.67 * Units.feet
fuselage.lengths.total = 30.9 * Units.feet
fuselage.width = ((2.94+5.9)/2) * Units.feet
vehicle.append_component(fuselage)
vehicle.mass_properties.center_of_gravity = np.array([16.6,0,0]) * Units.feet
fuel.origin =wing.origin
fuel.mass_properties.center_of_gravity =wing.mass_properties.center_of_gravity
fuel.mass_properties.mass =vehicle.mass_properties.max_takeoff-vehicle.mass_properties.max_zero_fuel
#find zero_fuel_center_of_gravity
cg =vehicle.mass_properties.center_of_gravity
MTOW =vehicle.mass_properties.max_takeoff
fuel_cg =fuel.origin+fuel.mass_properties.center_of_gravity
fuel_mass =fuel.mass_properties.mass
sum_moments_less_fuel=(cg*MTOW-fuel_cg*fuel_mass)
#now define configuration for calculation
configuration = Data()
configuration.mass_properties = Data()
configuration.mass_properties.center_of_gravity = vehicle.mass_properties.center_of_gravity
configuration.mass_properties.max_zero_fuel = vehicle.mass_properties.max_zero_fuel
configuration.fuel =fuel
configuration.mass_properties.zero_fuel_center_of_gravity=sum_moments_less_fuel/vehicle.mass_properties.max_zero_fuel
#Method Test
#print configuration
cm_a = taw_cmalpha(vehicle,Mach,conditions,configuration)
expected = -0.54071741 #Should be -0.6
error.cm_a_SIAI = (cm_a - expected)/expected
print error
for k,v in error.items():
assert(np.abs(v)<0.01)
return
# ----------------------------------------------------------------------
# Call Main
# ----------------------------------------------------------------------
if __name__ == '__main__':
main()
| StarcoderdataPython |
3422256 |
# -*- coding: utf-8 -*-
import pytest
import sys
from .test_base_class import TestBaseClass
from .as_status_codes import AerospikeStatus
from aerospike import exception as e
from aerospike import predicates as p
import time
aerospike = pytest.importorskip("aerospike")
try:
import aerospike
except:
print("Please install aerospike python client.")
sys.exit(1)
def get_geo_object():
geo_object = aerospike.GeoJSON({"type": "Polygon",
"coordinates": [[
[-124.500000, 37.000000],
[-125.000000, 37.000000],
[-121.000000, 38.080000],
[-122.500000, 38.080000],
[-124.500000, 37.000000]]]})
return geo_object
def add_geo_indexes(connection):
try:
connection.index_geo2dsphere_create(
"test", "demo", "loc", "loc_index")
except(e.IndexFoundError):
pass
try:
connection.index_geo2dsphere_create(
"test", "demo", "loc_polygon", "loc_polygon_index")
except(e.IndexFoundError):
pass
try:
connection.index_geo2dsphere_create(
"test", "demo", "loc_circle", "loc_circle_index")
except(e.IndexFoundError):
pass
try:
connection.index_list_create(
"test", "demo", "geo_list", aerospike.INDEX_GEO2DSPHERE,
"geo_list_index")
except(e.IndexFoundError):
pass
try:
connection.index_map_keys_create(
"test", "demo", "geo_map_keys", aerospike.INDEX_GEO2DSPHERE,
"geo_map_key_index")
except(e.IndexFoundError):
pass
try:
connection.index_map_values_create(
"test", "demo", "geo_map_vals", aerospike.INDEX_GEO2DSPHERE,
"geo_map_val_index")
except(e.IndexFoundError):
pass
try:
connection.index_list_create(
"test", "demo", "geo_loc_list", aerospike.INDEX_GEO2DSPHERE,
"geo_loc_list_index")
except(e.IndexFoundError):
pass
try:
connection.index_map_keys_create(
"test", "demo", "geo_loc_mk", aerospike.INDEX_GEO2DSPHERE,
"geo_loc_map_key_index")
except(e.IndexFoundError):
pass
try:
connection.index_map_values_create(
"test", "demo", "geo_loc_mv", aerospike.INDEX_GEO2DSPHERE,
"geo_loc_map_val_index")
except(e.IndexFoundError):
pass
def add_geo_data(connection):
pre = '{"type": "Point", "coordinates"'
suf = ']}'
for i in range(10):
lng = 1220 - (2 * i)
lat = 375 + (2 * i)
key = ('test', 'demo', i)
s = "{0}: [-{1}.{2}, {3}.{4}{5}".format(
pre, (lng // 10), (lng % 10), (lat // 10), (lat % 10), suf)
geo_object = aerospike.geojson(s)
geo_list = [geo_object]
geo_map_key = {geo_object: i}
geo_map_val = {i: geo_object}
connection.put(
key,
{
"loc": geo_object,
'geo_list': geo_list,
'geo_map_keys': geo_map_key,
'geo_map_vals': geo_map_val
}
)
key = ('test', 'demo', 'polygon')
geo_object_polygon = aerospike.GeoJSON(
{"type": "Polygon",
"coordinates": [[[-122.500000, 37.000000],
[-121.000000, 37.000000],
[-121.000000, 38.080000],
[-122.500000, 38.080000],
[-122.500000, 37.000000]]]})
geo_loc_list = [geo_object_polygon]
geo_loc_mk = {geo_object_polygon: 1}
geo_loc_mv = {2: geo_object_polygon}
connection.put(
key,
{
"loc_polygon": geo_object_polygon,
'geo_loc_list': geo_loc_list,
'geo_loc_mk': geo_loc_mk,
'geo_loc_mv': geo_loc_mv
}
)
key = ('test', 'demo', 'polygon2')
geo_object_polygon = aerospike.GeoJSON(
{"type": "Polygon",
"coordinates": [[[-52.500000, 37.000000],
[-51.000000, 37.000000],
[-51.000000, 38.080000],
[-52.500000, 38.080000],
[-52.500000, 37.000000]]]})
geo_loc_list = [geo_object_polygon]
geo_loc_mk = {geo_object_polygon: 1}
geo_loc_mv = {2: geo_object_polygon}
connection.put(
key,
{
"loc_polygon": geo_object_polygon,
'geo_loc_list': geo_loc_list,
'geo_loc_mk': geo_loc_mk,
'geo_loc_mv': geo_loc_mv
}
)
def remove_geo_indexes(connection):
try:
connection.index_remove('test', 'loc_index')
except:
pass
try:
connection.index_remove('test', 'loc_polygon_index')
except:
pass
try:
connection.index_remove('test', 'loc_circle_index')
except:
pass
try:
connection.index_remove('test', 'geo_list_index')
except:
pass
try:
connection.index_remove('test', 'geo_map_key_index')
except:
pass
try:
connection.index_remove('test', 'geo_map_val_index')
except:
pass
try:
connection.index_remove('test', 'geo_loc_list_index')
except:
pass
try:
connection.index_remove('test', 'geo_loc_map_key_index')
except:
pass
try:
connection.index_remove('test', 'geo_loc_map_val_index')
except:
pass
def remove_geo_data(connection):
for i in range(10):
key = ('test', 'demo', i)
connection.remove(key)
key = ('test', 'demo', 'polygon')
connection.remove(key)
key = ('test', 'demo', 'polygon2')
connection.remove(key)
class TestGeospatial(object):
pytestmark = pytest.mark.skipif(
not TestBaseClass.has_geo_support(),
reason="Server does not support geospatial data.")
def setup_class(cls):
"""
Setup method.
"""
cls.connection_setup_functions = (
add_geo_indexes,
add_geo_data
)
cls.connection_teardown_functions = (
remove_geo_indexes,
remove_geo_data
)
@pytest.fixture(autouse=True)
def setup(self, request, connection_with_config_funcs):
as_connection = connection_with_config_funcs
self.keys = []
if not as_connection.has_geo():
pytest.skip(
reason="Server does not support geospatial data")
if not self.skip_old_server:
key = ('test', 'demo', 'circle')
geo_circle = aerospike.GeoJSON(
{"type": "AeroCircle", "coordinates": [[-122.0, 37.0], 250.2]})
as_connection.put(key, {"loc_circle": geo_circle})
self.keys.append(key)
def teardown():
for key in self.keys:
as_connection.remove(key)
request.addfinalizer(teardown)
def test_geospatial_put_get_positive(self):
"""
Perform a get and put with multiple bins including geospatial bin
"""
key = ('test', 'demo', 'single_geo_put')
geo_object_single = aerospike.GeoJSON(
{"type": "Point", "coordinates": [42.34, 58.62]})
geo_object_dict = aerospike.GeoJSON(
{"type": "Point", "coordinates": [56.34, 69.62]})
self.as_connection.put(key, {"loc": geo_object_single, "int_bin": 2,
"string_bin": "str",
"dict_bin": {"a": 1, "b": 2,
"geo": geo_object_dict}})
key, _, bins = self.as_connection.get(key)
expected = {'loc': {'coordinates': [42.34, 58.62], 'type': 'Point'},
"int_bin": 2, "string_bin": "str",
"dict_bin": {"a": 1, "b": 2,
"geo": {'coordinates': [56.34, 69.62], 'type':
'Point'}}}
for b in bins:
assert b in expected
self.as_connection.remove(key)
def test_geospatial_positive_query(self):
"""
Perform a positive geospatial query for a polygon
"""
records = []
query = self.as_connection.query("test", "demo")
geo_object2 = aerospike.GeoJSON({"type": "Polygon",
"coordinates": [[
[-122.500000, 37.000000],
[-121.000000, 37.000000],
[-121.000000, 38.080000],
[-122.500000, 38.080000],
[-122.500000, 37.000000]]]})
query.where(p.geo_within_geojson_region("loc", geo_object2.dumps()))
def callback(input_tuple):
_, _, record = input_tuple
records.append(record)
query.foreach(callback)
assert len(records) == 3
expected = [{"coordinates": [-122.0, 37.5], "type": "Point"},
{"coordinates": [-121.8, 37.7], "type": "Point"},
{"coordinates": [-121.6, 37.9], "type": "Point"}]
for r in records:
assert r['loc'].unwrap() in expected
def test_geospatial_positive_query_outside_shape(self):
"""
Perform a positive geospatial query for polygon where all points
are outside polygon
"""
records = []
query = self.as_connection.query("test", "demo")
geo_object2 = aerospike.GeoJSON({"type": "Polygon",
"coordinates": [[
[-126.500000, 37.000000],
[-124.000000, 37.000000],
[-124.000000, 38.080000],
[-126.500000, 38.080000],
[-126.500000, 37.000000]]]})
query.where(p.geo_within_geojson_region("loc", geo_object2.dumps()))
def callback(input_tuple):
_, _, record = input_tuple
records.append(record)
query.foreach(callback)
assert len(records) == 0
def test_geospatial_positive_query_without_set(self):
"""
Perform a positive geospatial query for a polygon without a set
"""
keys = []
pre = '{"type": "Point", "coordinates"'
suf = ']}'
for i in range(1, 10):
lng = 1220 - (2 * i)
lat = 375 + (2 * i)
key = ('test', None, i)
s = "{0}: [-{1}.{2}, {3}.{4}{5}".format(
pre, (lng // 10), (lng % 10), (lat // 10), (lat % 10), suf)
geo_object = aerospike.geojson(s)
self.as_connection.put(key, {"loc": geo_object})
keys.append(key)
try:
self.as_connection.index_geo2dsphere_create(
"test", None, "loc", "loc_index_no_set")
except(e.IndexFoundError):
pass
records = []
query = self.as_connection.query("test", None)
geo_object2 = aerospike.GeoJSON({"type": "Polygon",
"coordinates": [[
[-122.500000, 37.000000],
[-121.000000, 37.000000],
[-121.000000, 38.080000],
[-122.500000, 38.080000],
[-122.500000, 37.000000]]]})
query.where(p.geo_within_geojson_region("loc", geo_object2.dumps()))
def callback(input_tuple):
_, _, record = input_tuple
records.append(record)
query.foreach(callback)
try:
self.as_connection.index_remove('test', 'loc_index_no_set')
except(Exception):
pass
for key in keys:
self.as_connection.remove(key)
assert len(records) == 2
expected = [{'coordinates': [-121.8, 37.7], 'type': 'Point'},
{'coordinates': [-121.6, 37.9], 'type': 'Point'}]
for r in records:
assert r['loc'].unwrap() in expected
def test_geospatial_positive_query_for_circle(self):
"""
Perform a positive geospatial query for a circle
"""
if TestGeospatial.skip_old_server is True:
pytest.skip(
"Server does not support apply on AeroCircle for GeoJSON")
records = []
query = self.as_connection.query("test", "demo")
geo_object2 = aerospike.GeoJSON(
{"type": "AeroCircle", "coordinates": [[-122.0, 37.5], 250.2]})
query.where(p.geo_within_geojson_region("loc", geo_object2.dumps()))
def callback(input_tuple):
_, _, record = input_tuple
records.append(record)
query.foreach(callback)
assert len(records) == 1
expected = [{'coordinates': [-122.0, 37.5], 'type': 'Point'}]
for r in records:
assert r['loc'].unwrap() in expected
def test_geo_query_with_geo_within_radius_predicate(self):
"""
Perform a positive geospatial query for a circle with helper
"""
if TestGeospatial.skip_old_server is True:
pytest.skip(
"Server does not support apply on AeroCircle for GeoJSON")
records = []
query = self.as_connection.query("test", "demo")
query.where(p.geo_within_radius("loc", -122.0, 37.5, 250.2))
def callback(input_tuple):
_, _, record = input_tuple
records.append(record)
query.foreach(callback)
assert len(records) == 1
expected = [{'coordinates': [-122.0, 37.5], 'type': 'Point'}]
for r in records:
assert r['loc'].unwrap() in expected
def test_geospatial_operate_positive(self):
"""
Perform an operate operation with geospatial bin
"""
geo_object_operate = aerospike.GeoJSON(
{"type": "Point", "coordinates": [43.45, 56.75]})
key = ('test', 'demo', 'single_geo_operate')
llist = [{
"op": aerospike.OPERATOR_WRITE,
"bin": "write_bin",
"val": {"no": geo_object_operate}
}, {"op": aerospike.OPERATOR_READ,
"bin": "write_bin"}]
key, _, bins = self.as_connection.operate(key, llist)
self.keys.append(key)
assert type(bins['write_bin']['no']) == aerospike.GeoJSON
assert bins['write_bin'][
'no'].unwrap() == {'coordinates': [43.45, 56.75], 'type': 'Point'}
def test_geospatial_wrap_positive(self):
"""
Perform a positive wrap on geospatial data
"""
geo_object = aerospike.GeoJSON(
{
"type": "Polygon",
"coordinates": [[
[-124.500000, 37.000000],
[-125.000000, 37.000000],
[-121.000000, 38.080000],
[-122.500000, 38.080000],
[-124.500000, 37.000000]]]
})
geo_object.wrap({"type": "Polygon",
"coordinates": [[[-122.500000, 37.000000],
[-121.000000, 37.000000],
[-121.000000, 38.080000],
[-122.500000, 38.080000],
[-122.500000, 37.000000]]]})
assert geo_object.unwrap() == {
'coordinates': [[[-122.5, 37.0], [-121.0, 37.0],
[-121.0, 38.08], [-122.5, 38.08],
[-122.5, 37.0]]], 'type': 'Polygon'}
def test_geospatial_wrap_positive_with_query(self):
"""
Perform a positive wrap on geospatial data followed by a query
"""
geo_object_wrap = get_geo_object()
geo_object_wrap.wrap({"type": "Polygon",
"coordinates": [[[-122.500000, 37.000000],
[-121.000000, 37.000000],
[-121.000000, 38.080000],
[-122.500000, 38.080000],
[-122.500000, 37.000000]]]})
assert geo_object_wrap.unwrap() == {
'coordinates': [[[-122.5, 37.0], [-121.0, 37.0],
[-121.0, 38.08], [-122.5, 38.08],
[-122.5, 37.0]]], 'type': 'Polygon'}
records = []
query = self.as_connection.query("test", "demo")
query.where(
p.geo_within_geojson_region("loc", geo_object_wrap.dumps()))
def callback(input_tuple):
_, _, record = input_tuple
records.append(record)
query.foreach(callback)
assert len(records) == 3
expected = [{'coordinates': [-122.0, 37.5], 'type': 'Point'},
{'coordinates': [-121.8, 37.7], 'type':'Point'},
{'coordinates': [-121.6, 37.9], 'type': 'Point'}]
for r in records:
assert r['loc'].unwrap() in expected
def test_geospatial_loads_positive(self):
"""
Perform a positive loads on geoJSON raw string
"""
geo_object = get_geo_object()
geo_object.loads(
'{"type": "Polygon", "coordinates": [[[-122.500000, 37.000000],\
[-121.000000, 37.000000], [-121.000000, 38.080000],\
[-122.500000, 38.080000], [-122.500000, 37.000000]]]}')
assert geo_object.unwrap() == {
'coordinates': [[[-122.5, 37.0], [-121.0, 37.0],
[-121.0, 38.08], [-122.5, 38.08],
[-122.5, 37.0]]], 'type': 'Polygon'}
def test_geospatial_loads_positive_with_query(self):
"""
Perform a positive loads on geoJSON raw string followed by a query
"""
geo_object_loads = get_geo_object()
geo_object_loads.loads(
'{"type": "Polygon", "coordinates": [[[-122.500000, 37.000000],\
[-121.000000, 37.000000], [-121.000000, 38.080000],\
[-122.500000, 38.080000], [-122.500000, 37.000000]]]}')
assert geo_object_loads.unwrap() == {
'coordinates': [[[-122.5, 37.0], [-121.0, 37.0],
[-121.0, 38.08], [-122.5, 38.08],
[-122.5, 37.0]]], 'type': 'Polygon'}
records = []
query = self.as_connection.query("test", "demo")
query.where(
p.geo_within_geojson_region("loc", geo_object_loads.dumps()))
def callback(input_tuple):
_, _, record = input_tuple
records.append(record)
query.foreach(callback)
assert len(records) == 3
expected = [{'coordinates': [-122.0, 37.5], 'type': 'Point'},
{'coordinates': [-121.8, 37.7], 'type': 'Point'},
{'coordinates': [-121.6, 37.9], 'type': 'Point'}]
for r in records:
assert r['loc'].unwrap() in expected
def test_geospatial_dumps_positive(self):
"""
Perform a positive dumps. Verify using str
"""
geo_object = get_geo_object()
geojson_str = geo_object.dumps()
assert isinstance(geojson_str, str)
obj = aerospike.geojson(geojson_str)
assert obj.unwrap() == geo_object.unwrap()
def test_geojson_str(self):
"""
verify that str representation of geojson object is correct
"""
geo_object = get_geo_object()
geojson_str = str(geo_object)
assert isinstance(geojson_str, str)
obj = aerospike.geojson(geojson_str)
assert obj.unwrap() == geo_object.unwrap()
def test_geospatial_repr_positive(self):
"""
Perform a positive repr. Verify using eval()
"""
geo_object = get_geo_object()
geojson_str = eval(repr(geo_object))
assert isinstance(geojson_str, str)
obj = aerospike.geojson(geojson_str)
assert obj.unwrap() == geo_object.unwrap()
def test_geospatial_put_get_positive_with_geodata(self):
"""
Perform a get and put with multiple bins including geospatial bin
using geodata method
"""
key = ('test', 'demo', 'single_geo_put')
geo_object_single = aerospike.geodata(
{"type": "Point", "coordinates": [42.34, 58.62]})
geo_object_dict = aerospike.geodata(
{"type": "Point", "coordinates": [56.34, 69.62]})
self.as_connection.put(key, {
"loc": geo_object_single,
"int_bin": 2,
"string_bin": "str",
"dict_bin": {
"a": 1, "b": 2,
"geo": geo_object_dict
}
})
key, _, bins = self.as_connection.get(key)
expected = {'loc': {'coordinates': [42.34, 58.62], 'type': 'Point'},
"int_bin": 2, "string_bin": "str",
"dict_bin": {"a": 1, "b": 2,
"geo": {'coordinates': [56.34, 69.62], 'type':
'Point'}}}
for b in bins:
assert b in expected
self.as_connection.remove(key)
def test_geospatial_put_get_positive_with_geojson(self):
"""
Perform a get and put with multiple bins including geospatial bin
using geodata method
"""
key = ('test', 'demo', 'single_geo_put')
geo_object_single = aerospike.geojson(
'{"type": "Point", "coordinates": [42.34, 58.62] }')
geo_object_dict = aerospike.geojson(
'{"type": "Point", "coordinates": [56.34, 69.62] }')
self.as_connection.put(key, {"loc": geo_object_single, "int_bin": 2,
"string_bin": "str",
"dict_bin": {"a": 1, "b": 2, "geo":
geo_object_dict}})
key, _, bins = self.as_connection.get(key)
expected = {'loc': {'coordinates': [42.34, 58.62], 'type': 'Point'},
"int_bin": 2, "string_bin": "str",
"dict_bin": {"a": 1, "b": 2,
"geo": {'coordinates': [56.34, 69.62], 'type':
'Point'}}}
for b in bins:
assert b in expected
self.as_connection.remove(key)
def test_geospatial_positive_query_with_geodata(self):
"""
Perform a positive geospatial query for a polygon with geodata
"""
records = []
query = self.as_connection.query("test", "demo")
geo_object2 = aerospike.geodata({"type": "Polygon",
"coordinates": [[
[-122.500000, 37.000000],
[-121.000000, 37.000000],
[-121.000000, 38.080000],
[-122.500000, 38.080000],
[-122.500000, 37.000000]]]})
query.where(p.geo_within_geojson_region("loc", geo_object2.dumps()))
def callback(input_tuple):
_, _, record = input_tuple
records.append(record)
query.foreach(callback)
assert len(records) == 3
expected = [{'coordinates': [-122.0, 37.5], 'type': 'Point'},
{'coordinates': [-121.8, 37.7], 'type': 'Point'},
{'coordinates': [-121.6, 37.9], 'type': 'Point'}]
for r in records:
assert r['loc'].unwrap() in expected
def test_geospatial_positive_query_with_geojson(self):
"""
Perform a positive geospatial query for a polygon with geojson
"""
records = []
query = self.as_connection.query("test", "demo")
geo_object2 = aerospike.geojson(
'{"type": "Polygon", "coordinates": [[[-122.500000, 37.000000], \
[-121.000000, 37.000000], [-121.000000, 38.080000],\
[-122.500000, 38.080000], [-122.500000, 37.000000]]]}')
query.where(p.geo_within_geojson_region("loc", geo_object2.dumps()))
def callback(input_tuple):
_, _, record = input_tuple
records.append(record)
query.foreach(callback)
assert len(records) == 3
expected = [{'coordinates': [-122.0, 37.5], 'type': 'Point'},
{'coordinates': [-121.8, 37.7], 'type': 'Point'},
{'coordinates': [-121.6, 37.9], 'type': 'Point'}]
for r in records:
assert r['loc'].unwrap() in expected
def test_geospatial_2dindex_positive(self):
"""
Perform a positive 2d index creation
"""
try:
status = self.as_connection.index_remove('test', 'loc_index')
time.sleep(2)
except:
pass
status = self.as_connection.index_geo2dsphere_create(
"test", "demo", "loc", "loc_index")
assert status == 0
def test_geospatial_2dindex_positive_with_policy(self):
"""
Perform a positive 2d index creation with policy
"""
try:
status = self.as_connection.index_remove('test', 'loc_index')
time.sleep(2)
except:
pass
status = self.as_connection.index_geo2dsphere_create(
"test", "demo", "loc", "loc_index", {"timeout": 2000})
assert status == 0
def test_geospatial_positive_query_with_point(self):
"""
Perform a positive geospatial query for a point
"""
records = []
query = self.as_connection.query("test", "demo")
geo_object2 = aerospike.GeoJSON({"type": "Point", "coordinates":
[-121.700000, 37.200000]})
query.where(
p.geo_contains_geojson_point("loc_polygon", geo_object2.dumps()))
def callback(input_tuple):
_, _, record = input_tuple
records.append(record)
query.foreach(callback)
assert len(records) == 1
expected = [{'coordinates': [[[-122.500000, 37.000000],
[-121.000000, 37.000000],
[-121.000000, 38.080000],
[-122.500000, 38.080000],
[-122.500000, 37.000000]]],
'type': 'Polygon'}]
for r in records:
assert r['loc_polygon'].unwrap() in expected
def test_geospatial_positive_query_with_point_outside_polygon(self):
"""
Perform a positive geospatial query for a point outside polygon
"""
records = []
query = self.as_connection.query("test", "demo")
geo_object2 = aerospike.GeoJSON({"type": "Point", "coordinates":
[-123.700000, 37.200000]})
query.where(
p.geo_contains_geojson_point("loc_polygon", geo_object2.dumps()))
def callback(input_tuple):
_, _, record = input_tuple
records.append(record)
query.foreach(callback)
assert len(records) == 0
def test_geospatial_positive_query_with_point_in_aerocircle(self):
"""
Perform a positive geospatial query for a point in aerocircle
"""
if TestGeospatial.skip_old_server is True:
pytest.skip(
"Server does not support apply on AeroCircle for GeoJSON")
records = []
query = self.as_connection.query("test", "demo")
geo_object2 = aerospike.GeoJSON({"type": "Point", "coordinates":
[-122.000000, 37.000000]})
query.where(
p.geo_contains_geojson_point("loc_circle", geo_object2.dumps()))
def callback(input_tuple):
_, _, record = input_tuple
records.append(record)
query.foreach(callback)
assert len(records) == 1
expected = [
{'coordinates': [[-122.0, 37.0], 250.2], 'type': 'AeroCircle'}]
for r in records:
assert r['loc_circle'].unwrap() in expected
def test_geospatial_positive_query_with_point_in_aerocircle_int(self):
"""
Perform a positive geospatial query for a point in aerocircle
"""
if TestGeospatial.skip_old_server is True:
pytest.skip(
"Server does not support apply on AeroCircle for GeoJSON")
records = []
query = self.as_connection.query("test", "demo")
geo_object2 = aerospike.GeoJSON({"type": "Point", "coordinates":
[-122, 37]})
query.where(
p.geo_contains_geojson_point("loc_circle", geo_object2.dumps()))
def callback(input_tuple):
_, _, record = input_tuple
records.append(record)
query.foreach(callback)
assert len(records) == 1
expected = [
{'coordinates': [[-122.0, 37.0], 250.2], 'type': 'AeroCircle'}]
for r in records:
assert r['loc_circle'].unwrap() in expected
def test_geospatial_positive_query_with_point_outside_aerocircle(self):
"""
Perform a positive geospatial query for a point in aerocircle
"""
if TestGeospatial.skip_old_server is True:
pytest.skip(
"Server does not support apply on AeroCircle for GeoJSON")
records = []
query = self.as_connection.query("test", "demo")
geo_object2 = aerospike.GeoJSON({"type": "Point", "coordinates":
[-122.0, 48.0]})
query.where(
p.geo_contains_geojson_point("loc_circle", geo_object2.dumps()))
def callback(input_tuple):
_, _, record = input_tuple
records.append(record)
query.foreach(callback)
assert len(records) == 0
def test_geospatial_positive_query_with_point_helper_method(self):
"""
Perform a positive geospatial query for a point with helper method
"""
records = []
query = self.as_connection.query("test", "demo")
query.where(p.geo_contains_point("loc_polygon", -121.7, 37.2))
def callback(input_tuple):
_, _, record = input_tuple
records.append(record)
query.foreach(callback)
assert len(records) == 1
expected = [{'coordinates': [[[-122.500000, 37.000000],
[-121.000000, 37.000000],
[-121.000000, 38.080000],
[-122.500000, 38.080000],
[-122.500000, 37.000000]]],
'type': 'Polygon'}]
for r in records:
assert r['loc_polygon'].unwrap() in expected
@pytest.mark.parametrize(
"bin_name, idx_type",
(
('geo_list', aerospike.INDEX_TYPE_LIST),
('geo_map_keys', aerospike.INDEX_TYPE_MAPKEYS),
('geo_map_vals', aerospike.INDEX_TYPE_MAPVALUES)
)
)
def test_geospatial_within_radius_pred(self, bin_name, idx_type):
records = []
query = self.as_connection.query("test", "demo")
def callback(input_tuple):
_, _, record = input_tuple
records.append(record)
predicate = p.geo_within_radius(
bin_name, -122.0, 37.5, 250.2, idx_type)
query.where(predicate)
query.foreach(callback)
assert len(records) == 1
@pytest.mark.parametrize(
"bin_name, idx_type",
(
('geo_list', aerospike.INDEX_TYPE_LIST),
('geo_map_keys', aerospike.INDEX_TYPE_MAPKEYS),
('geo_map_vals', aerospike.INDEX_TYPE_MAPVALUES)
)
)
def test_geospatial_within_geojson_region_pred(self, bin_name, idx_type):
records = []
query = self.as_connection.query("test", "demo")
geo_object2 = aerospike.geodata({"type": "Polygon",
"coordinates": [[
[-122.500000, 37.000000],
[-121.000000, 37.000000],
[-121.000000, 38.080000],
[-122.500000, 38.080000],
[-122.500000, 37.000000]]]})
predicate = p.geo_within_geojson_region(
bin_name, geo_object2.dumps(), idx_type)
query.where(predicate)
def callback(input_tuple):
_, _, record = input_tuple
records.append(record)
query.foreach(callback)
assert len(records) == 3
def test_store_multipolygon(self):
polygons = [
[[
[-124.500000, 37.000000],
[-125.000000, 37.000000],
[-121.000000, 38.080000],
[-122.500000, 38.080000],
[-124.500000, 37.000000]
]],
[[
[-24.500000, 37.000000],
[-25.000000, 37.000000],
[-21.000000, 38.080000],
[-22.500000, 38.080000],
[-24.500000, 37.000000]
]]
]
geo_object = aerospike.GeoJSON(
{
"type": "MultiPolygon",
"coordinates": polygons
}
)
key = ('test', 'demo', 'multipoly')
self.as_connection.put(key, {'multi': geo_object})
_, _, bins = self.as_connection.get(key)
geo_returned = bins['multi'].unwrap()
assert geo_returned['type'] == 'MultiPolygon'
assert geo_returned['coordinates'] == polygons
self.as_connection.remove(key)
@pytest.mark.parametrize(
"bin_name, idx_type",
(
('geo_loc_list', aerospike.INDEX_TYPE_LIST),
('geo_loc_mk', aerospike.INDEX_TYPE_MAPKEYS),
('geo_loc_mv', aerospike.INDEX_TYPE_MAPVALUES)
)
)
def test_geospatial_contains_point_pred(self, bin_name, idx_type):
records = []
query = self.as_connection.query("test", "demo")
lat = -122.45
lon = 37.5
predicate = p.geo_contains_point(
bin_name, lat, lon, idx_type)
def callback(input_tuple):
_, _, record = input_tuple
records.append(record)
query.where(predicate)
query.foreach(callback)
assert len(records) == 1
@pytest.mark.parametrize(
"bin_name, idx_type",
(
('geo_loc_list', aerospike.INDEX_TYPE_LIST),
('geo_loc_mk', aerospike.INDEX_TYPE_MAPKEYS),
('geo_loc_mv', aerospike.INDEX_TYPE_MAPVALUES)
)
)
def test_geospatial_contains_json_point_pred(self, bin_name, idx_type):
records = []
query = self.as_connection.query("test", "demo")
lat = -122.45
lon = 37.5
point_list = [lat, lon]
point = aerospike.GeoJSON({'type': "Point",
'coordinates': point_list})
predicate = p.geo_contains_geojson_point(
bin_name, point.dumps(), idx_type)
def callback(input_tuple):
_, _, record = input_tuple
records.append(record)
query.where(predicate)
query.foreach(callback)
assert len(records) == 1
def test_geospatial_object_not_dict_or_string(self):
"""
The geospatial object is not a dictionary or string
"""
with pytest.raises(e.ParamError) as err_info:
aerospike.GeoJSON(1)
err_code = err_info.value.code
assert err_code == AerospikeStatus.AEROSPIKE_ERR_PARAM
def test_geospatial_object_non_json_serializable_string(self):
"""
The geospatial object is not a json serializable string
"""
with pytest.raises(e.ClientError) as err_info:
aerospike.GeoJSON("abc")
err_code = err_info.value.code
assert err_code == AerospikeStatus.AEROSPIKE_ERR_CLIENT
def test_geospatial_object_wrap_non_dict(self):
"""
The geospatial object provided to wrap() is not a dictionary
"""
geo_object = get_geo_object()
with pytest.raises(e.ParamError) as err_info:
geo_object.wrap("abc")
err_code = err_info.value.code
assert err_code == AerospikeStatus.AEROSPIKE_ERR_PARAM
def test_geospatial_object_loads_non_dict(self):
"""
The geospatial object provided to loads() is not a dictionary
"""
geo_object = get_geo_object()
with pytest.raises(e.ClientError) as err_info:
geo_object.loads('{"abc"}')
err_code = err_info.value.code
assert err_code == AerospikeStatus.AEROSPIKE_ERR_CLIENT
def test_geospatial_2dindex_set_length_extra(self):
"""
Perform a 2d creation with set length exceeding limit
"""
set_name = 'a' * 100
with pytest.raises(e.InvalidRequest) as err_info:
self.as_connection.index_geo2dsphere_create(
"test", set_name, "loc", "loc_index_creation_should_fail")
err_code = err_info.value.code
assert err_code == AerospikeStatus.AEROSPIKE_ERR_REQUEST_INVALID
@pytest.mark.skip(reason="These raise system errors")
@pytest.mark.parametrize(
"method",
[
'geo_within_geojson_region',
'geo_contains_geojson_point',
'geo_within_radius',
'geo_contains_point'
])
def test_call_geo_predicates_with_wrong_args(self, method):
query = self.as_connection.query("test", "demo")
predicate = getattr(p, method)
with pytest.raises(e.ParamError) as err_info:
query.where(predicate())
err_code = err_info.value.code
assert err_code == AerospikeStatus.AEROSPIKE_ERR_PARAM
| StarcoderdataPython |
12850404 | """
Constraint functions for grasp sampling
Author: <NAME>
"""
from abc import ABCMeta, abstractmethod
import numpy as np
class GraspConstraintFn(object):
"""
Abstract constraint functions for grasp sampling.
"""
__metaclass__ = ABCMeta
def __init__(self, config):
# set params
self._config = config
def __call__(self, grasp):
"""
Evaluates whether or not a grasp is valid.
Parameters
----------
grasp : :obj:`Grasp2D`
grasp to evaluate
Returns
-------
bool
True if the grasp satisfies constraints, False otherwise
"""
return self.satisfies_constraints(grasp)
@abstractmethod
def satisfies_constraints(self, grasp):
"""
Evaluates whether or not a grasp is valid.
Parameters
----------
grasp : :obj:`Grasp2D`
grasp to evaluate
Returns
-------
bool
True if the grasp satisfies constraints, False otherwise
"""
pass
class DiscreteApproachGraspConstraintFn(GraspConstraintFn):
"""
Constrains the grasp approach direction into a discrete set of
angles from the world z direction.
"""
def __init__(self, config):
# init superclass
GraspConstraintFn.__init__(self, config)
self._max_approach_angle = self._config['max_approach_angle']
self._angular_tolerance = self._config['angular_tolerance']
self._angular_step = self._config['angular_step']
self._T_camera_world = self._config['camera_pose']
def satisfies_constraints(self, grasp):
"""
Evaluates whether or not a grasp is valid by evaluating the
angle between the approach axis and the world z direction.
Parameters
----------
grasp : :obj:`Grasp2D`
grasp to evaluate
Returns
-------
bool
True if the grasp satisfies constraints, False otherwise
"""
# find grasp angle in world coordinates
axis_world = self._T_camera_world.rotation.dot(grasp.approach_axis)
angle = np.arccos(-axis_world[2])
# check closest available angle
available_angles = np.array([0.0])
if self._angular_step > 0:
available_angles = np.arange(start=0.0,
stop=self._max_approach_angle,
step=self._angular_step)
diff = np.abs(available_angles - angle)
angle_index = np.argmin(diff)
closest_angle = available_angles[angle_index]
if diff[angle_index] < self._angular_tolerance:
return True
return False
class GraspConstraintFnFactory(object):
@staticmethod
def constraint_fn(fn_type, config):
if fn_type == 'none':
return None
elif fn_type == 'discrete_approach_angle':
return DiscreteApproachGraspConstraintFn(config)
else:
raise ValueError('Grasp constraint function type %s not supported!' %(fn_type))
| StarcoderdataPython |
4803003 | <reponame>tychen5/Audio_Tagging_Challenge
import os
import sys
import time
import math
import random
import pickle
import numpy as np
import pandas as pd
from keras.utils import to_categorical
# map_path = path of 'map.pkl'
map_path = sys.argv[1]
# base_path = directory of all the data (train_label.csv, X_train.npy)
base_path = sys.argv[2]
with open(map_path, 'rb') as f:
map_dict = pickle.load(f)
verified = []
unverified = []
train_label_path = os.path.join(base_path, 'train_label.csv')
Y_train = pd.read_csv(train_label_path)
for i in range(len(Y_train)):
if Y_train['manually_verified'][i] == 1:
verified.append(i)
else:
unverified.append(i)
Y_un = Y_train.loc[unverified,:]
fname_all = Y_un['fname']
fname_all = np.array(fname_all)
Y_dict = Y_un['label'].map(map_dict)
Y_dict = np.array(Y_dict)
Y_all = []
for i in Y_dict:
Y_all.append(to_categorical(i, num_classes=41))
Y_all = np.array(Y_all)
filename = os.path.join(base_path, 'fname_unverified.npy')
np.save(filename, fname_all)
filename = os.path.join(base_path, 'y_unverified.npy')
np.save(filename, Y_all)
X_train_path = os.path.join(base_path, 'X_train.npy')
X_all = np.load(X_train_path)
X_un = X_all[unverified, :]
filename = os.path.join(base_path, 'X_unverified.npy')
np.save(filename, X_un)
| StarcoderdataPython |
12804648 | <filename>trpg_bot/trpg_bot.py
#!/usr/bin/env python
#-*- coding:utf-8 -*-
import os
import re
import traceback
import discord
import redis
from logic import CommandInterpreterLogic, DropboxLogic, ModeSelectorLogic
if __name__ == '__main__':
TOKEN = os.environ['DISCORD_BOT_TOKEN']
REDIS = os.environ['REDIS_URL']
DROPBOX_TOKEN = os.environ['DROPBOX_TOKEN']
COMMIT_HASH = os.environ['HEROKU_SLUG_COMMIT'] if 'HEROKU_SLUG_COMMIT' in os.environ.keys() else 'None'
client = discord.Client()
r = redis.from_url(os.environ.get("REDIS_URL"), decode_responses=True)
mode_selector = ModeSelectorLogic(r)
dbx = DropboxLogic(DROPBOX_TOKEN)
@client.event
async def on_ready():
dbx.sync()
print('activated trpg-bot client.')
@client.event
async def on_message(message):
try:
if message.author.bot:
return
guild = message.guild.name
session = message.channel.name
user = message.author.name
command, params = CommandInterpreterLogic().interp_command(message.content)
if command == 'ping':
await message.channel.send('pong')
if command == 'debug':
ls_dropbox = os.listdir('./trpg_bot/resources/mayokin')
await message.channel.send(f"```\nrevision: {COMMIT_HASH}\nresources_dropbox: {ls_dropbox}```")
if command == 'redis':
reply = '\n'.join([key for key in r.scan_iter()])
await message.channel.send(f"```\n{reply}```")
if command == 'sync':
await message.channel.send('Start syncing...')
dbx.sync()
await message.channel.send('Dice lists were synced with Dropbox.')
if command == 'mode':
mode_name = params[0]
mode = mode_selector.select(guild, session, mode_name)
await message.channel.send(f"{mode} モードになったよ")
if command == 'help':
help = mode_selector.get(guild, session).help()
await message.channel.send(help)
if command == 'regist':
url = params[0]
mode_selector.get(guild, session).regist(guild, session, user, url)
await message.channel.send(f"{message.author.mention} がキャラシートを登録したよ\n=> {url}")
if command == 'players':
table = mode_selector.get(guild, session).players(guild, session)
await message.channel.send(f"{message.channel.mention} のキャラシート一覧だよ\n```{table}```")
if command == 'dice':
result = mode_selector.get(guild, session).dice(guild, session, user, params)
await message.channel.send(f"{message.author.mention} がサイコロを振ったよ\n=> {result}")
if command == 'status':
status = mode_selector.get(guild, session).status(guild, session, user)
await message.channel.send(f"{message.author.mention} のキャラシートだよ\n```{status}```")
if command == 'extra':
result = mode_selector.get(guild, session).extra(params)
if result != None:
await message.channel.send(result)
except Exception as e:
await message.channel.send(f"何かエラーが起きたみたいだよ\n```{str(e)}```")
traceback.print_exc()
@client.event
async def on_guild_channel_delete(channel):
try:
r.hdel(f"{channel.guild.name}.mode", channel.name)
r.delete(f"{channel.guild.name}.{channel.name}")
except Exception as e:
traceback.print_exc()
client.run(TOKEN)
| StarcoderdataPython |
9740360 | import math
from fwk.metrics import Metric
from fwk.config import Config
class EpochCounter(Metric):
def __init__(self) -> None:
super().__init__()
self.epoch = None
self.total_epochs = None
def on_before_epoch(self, local_variables):
self.epoch = local_variables['epoch']
self.total_epochs = int(Config.config['ALGORITHM']['epochs'])
self.print_metric()
def text_record(self):
record_str = f'\n---- epoch {self.epoch + 1:03d} of {self.total_epochs:03d} --------------------\n'
return record_str
class TrainBatchCounter(Metric):
def __init__(self) -> None:
super().__init__()
self.number_of_subjects = None
self.batch_idx = None
self.subjects_per_batch = None
self.number_of_batches = None
self.regime = 'train'
def on_before_train_batch(self, local_variables):
self.number_of_subjects = len(local_variables['self'].data_loaders[self.regime].dataset.subjects)
self.subjects_per_batch = int(Config.config['ALGORITHM'][f'{self.regime}_batch_size'])
self.batch_idx = local_variables['batch_idx']
self.number_of_batches = math.ceil(self.number_of_subjects / self.subjects_per_batch)
self.print_metric()
def text_record(self):
text_record = f'\n batch {self.batch_idx + 1:03d} of {self.number_of_batches:03d} ({self.regime})\n'
return text_record
class TestBatchCounter(Metric):
def __init__(self) -> None:
super().__init__()
self.number_of_subjects = None
self.batch_idx = None
self.subjects_per_batch = None
self.number_of_batches = None
self.regime = 'test'
def on_before_test_batch(self, local_variables):
self.number_of_subjects = len(local_variables['self'].data_loaders[self.regime].dataset.subjects)
self.subjects_per_batch = int(Config.config['ALGORITHM'][f'{self.regime}_batch_size'])
self.batch_idx = local_variables['batch_idx']
self.number_of_batches = math.ceil(self.number_of_subjects / self.subjects_per_batch)
self.print_metric()
def text_record(self):
text_record = f'\n batch {self.batch_idx + 1:03d} of {self.number_of_batches:03d} ({self.regime})\n'
return text_record
class ImageTrainBatchCounter(Metric):
def __init__(self) -> None:
super().__init__()
self.number_of_images = None
self.batch_idx = None
self.images_per_batch = None
self.number_of_batches = None
self.regime = 'train'
def on_before_train_batch(self, local_variables):
self.number_of_images = len(local_variables['self'].data_loaders[self.regime].dataset.images)
self.images_per_batch = int(Config.config['ALGORITHM'][f'{self.regime}_batch_size'])
self.batch_idx = local_variables['batch_idx']
self.number_of_batches = int(self.number_of_images / self.images_per_batch)
self.print_metric()
def text_record(self):
text_record = f'\n batch {self.batch_idx + 1:03d} of {self.number_of_batches:03d} ({self.regime})\n'
return text_record
class ImageTestBatchCounter(Metric):
def __init__(self) -> None:
super().__init__()
self.number_of_images = None
self.batch_idx = None
self.images_per_batch = None
self.number_of_batches = None
self.regime = 'test'
def on_before_test_batch(self, local_variables):
self.number_of_images = len(local_variables['self'].data_loaders[self.regime].dataset.images)
self.images_per_batch = int(Config.config['ALGORITHM'][f'{self.regime}_batch_size'])
self.batch_idx = local_variables['batch_idx']
self.number_of_batches = int(self.number_of_images / self.images_per_batch)
self.print_metric()
def text_record(self):
text_record = f'\n batch {self.batch_idx + 1:03d} of {self.number_of_batches:03d} ({self.regime})\n'
return text_record
class BatchLoss(Metric):
def __init__(self) -> None:
super().__init__()
self.loss = None
def on_after_train_batch(self, local_variables):
self.loss = local_variables['loss'].item()
self.print_metric()
def text_record(self):
text_record = f' batch loss: {self.loss:.3e}\n'
return text_record
class EpochLoss(Metric):
def __init__(self) -> None:
super().__init__()
self.loss = None
def on_after_train_batch(self, local_variables):
self.loss += local_variables['loss'].item()
def on_before_epoch(self, local_variables):
self.loss = 0
def on_after_epoch(self, local_variables):
self.print_metric()
def text_record(self):
text_record = f'\n epoch loss: {self.loss:.3e}\n'
return text_record
def numpy_record(self, records=None):
if 'epoch_loss' not in records.keys():
records['epoch_loss'] = list()
records['epoch_loss'].append(self.loss)
return records
class GradientMetrics(Metric):
def __init__(self) -> None:
super().__init__()
self.gradient_norm = None
self.batch_idx = None
self.epoch = None
def on_after_train_batch(self, local_variables):
model = local_variables['self'].model
self.batch_idx = local_variables['batch_idx']
self.epoch = local_variables['epoch']
self.gradient_norm = self._compute_norm(model)
self.print_metric()
def text_record(self):
text_record = f' gradient norm: {self.gradient_norm:.3e}\n'
return text_record
def numpy_record(self, records=None):
if 'gradient_norm' not in records.keys():
records['gradient_norm'] = []
if self.batch_idx == 0:
records['gradient_norm'].append([self.gradient_norm])
else:
records['gradient_norm'][self.epoch].append(self.gradient_norm)
return records
@staticmethod
def _compute_norm(model):
total_norm = 0
for p in model.parameters():
param_norm = p.grad.norm(2).item() if p.grad is not None else 0
total_norm += param_norm ** 2
total_norm = total_norm ** (1. / 2)
return total_norm
class NumberOfParameters(Metric):
def __init__(self) -> None:
super().__init__()
self.model = None
def on_after_setup(self, local_variables):
self.model = local_variables['self'].model
self.print_metric()
def text_record(self):
total_str = f'number of parameters {self._total_parameters(self.model):1.3e}\n'
total_trainable_str = f'number of trainable parameters {self._total_trainable_parameters(self.model):1.3e}\n'
return total_str + total_trainable_str
@staticmethod
def _total_parameters(model):
return sum(p.numel() for p in model.parameters())
@staticmethod
def _total_trainable_parameters(model):
return sum(p.numel() for p in model.parameters() if p.requires_grad)
| StarcoderdataPython |
8149729 | <gh_stars>100-1000
import time
import unittest
import torch
import numpy as np
import random
from functools import lru_cache
from models.diagonaled_mm_2d import diagonaled_mm2d, mask_invalid_locations
@lru_cache()
def get_2dmask(seq_len, nx, ny, w, d):
return torch.BoolTensor([
[
abs(i // ny - j // ny) > w or abs(i % ny - j % ny) > w or (i // ny - j // ny)%d or (i % ny - j % ny)%d for j in range(seq_len)
]
for i in range(seq_len)
], device='cpu')
def naive2d_matmul_qk(q, k, nx, ny, w, d, padding=0.0):
bsz, num_heads, seq_len, head_dim = q.size()
attn_weights = q @ k.transpose(-2, -1)
# get mask
mask = get_2dmask(seq_len, nx, ny, w, d).to(q.device)
mask = mask[None, None, :, :]
attn_weights.masked_fill_(mask, padding)
return attn_weights
def same_storage(x, y):
'''Tests if two tensors share the same underlying storage (for memory optimizations)'''
return x.storage().data_ptr() == y.storage().data_ptr()
class TestSlidingChunksMM(unittest.TestCase):
def test_tvm_equal_naiven2(self):
np.random.seed(300)
random.seed(300)
torch.manual_seed(300)
torch.cuda.manual_seed(300)
torch.cuda.manual_seed_all(300)
torch.set_printoptions(sci_mode=False)
nx = 14
ny = 14
Nloc = nx * ny
Nglo = 1
N = Nloc + Nglo
M = 64 # hidden size
W = 13 # one sided. Actual window size = (2w+1)**2
B = 2
D = 1 # no dilation
H = 6 # number of heads
C = M * H
autoregressive = False # not autoregressive
scale = M ** -0.5
device = 'cuda'
dtype = torch.float32
failed_tests = 0
time1 = time2 = 0
for i in range(50):
if i < 5:
time1 = time2 = 0 # don't include the first few iterations because of high variance
query = torch.randn(B * H * N * M, requires_grad=True, device=device, dtype=dtype).view(B, H, N, M)
query.retain_grad()
key = torch.randn(B * H * N * M, requires_grad=True, device=device, dtype=dtype).flip(dims=(0,)).view(B, H, N, M)
key.retain_grad()
value = torch.randn(B * H * N * M, requires_grad=True, device=device, dtype=dtype).view(B, H, N, M)
value.retain_grad()
# TVM MM
torch.cuda.synchronize()
start = time.time()
q = query[:, :, Nglo:].float().contiguous() * scale
k = key.float()
v = value.float()
attn11 = diagonaled_mm2d(q, k[:, :, Nglo:].contiguous(), nx, ny,
W, D, False, 0, autoregressive)
mask_invalid_locations(attn11, nx, ny, W, D, autoregressive)
attn10 = torch.bmm(q.view(B * H, Nloc, M), k[:, :, :Nglo].reshape(B * H, Nglo, M).transpose(-2, -1)).view(B, H, Nloc, Nglo)
attn1 = torch.cat((attn10, attn11), dim=-1)
attn1 = (attn1 - torch.max(attn1, dim=-1, keepdim=True)[0]).softmax(dim=-1)
x1 = diagonaled_mm2d(attn1[:,:,:,Nglo:].float().contiguous(), v[:,:,Nglo:].contiguous(), nx, ny, W, D, True, 0, autoregressive)
x1 = x1 + torch.bmm(attn1[:, :, :, :Nglo].view(B * H, Nloc, Nglo), v[:, :, :Nglo].reshape(B * H, Nglo, M)).view(B, H, Nloc, M)
x1 = x1.transpose(1, 2).reshape(B, Nloc, C)
q_global = query[:, :, :Nglo].float().contiguous() * scale
k_global = k
v_global = v
attn0 = torch.bmm(q_global.view(B * H, Nglo, M), k_global.reshape(B * H, N, M).transpose(-2, -1))
attn0 = (attn0 - torch.max(attn0, dim=-1, keepdim=True)[0]).softmax(dim=-1)
x0 = torch.bmm(attn0, v_global.reshape(B * H, N, M)).view(B, H, Nglo, M).transpose(1, 2).reshape(B, Nglo, C)
context1 = torch.cat((x0, x1), dim=1)
context1.sum().backward()
torch.cuda.synchronize()
end = time.time()
time1 += end - start
query_grad1 = 1.0*query.grad
query.grad.zero_()
key_grad1 = 1.0*key.grad
key.grad.zero_()
value_grad1 = 1.0*value.grad
value.grad.zero_()
torch.cuda.empty_cache()
assert D == 1
assert not autoregressive
torch.cuda.synchronize()
start = time.time()
attn = (query @ key.transpose(-2, -1)) * scale
attn = (attn - torch.max(attn, dim=-1, keepdim=True)[0]).softmax(dim=-1)
context2 = (attn @ value).transpose(1, 2).reshape(B, N, C)
context2.sum().backward()
torch.cuda.synchronize()
end = time.time()
time2 += end - start
query_grad2 = 1.0*query.grad
query.grad.zero_()
key_grad2 = 1.0*key.grad
key.grad.zero_()
value_grad2 = 1.0*value.grad
value.grad.zero_()
torch.cuda.empty_cache()
try:
# assert torch.allclose(attention1, attention2.float(), atol=1e-4, rtol=1e-5)
assert torch.allclose(context1, context2.float(), atol=1e-4, rtol=1e-5), "context1"
assert torch.allclose(query_grad1, query_grad2.float(), atol=1e-4, rtol=1e-3), "query_grad1"
assert torch.allclose(key_grad1, key_grad2.float(), atol=1e-4, rtol=1e-3), "key_grad1"
assert torch.allclose(value_grad1, value_grad2.float(), atol=1e-4, rtol=1e-3), "value_grad1"
except AssertionError:
failed_tests += 1
print('Time tvm: {0:.5f} s'.format(time1))
print('Time pytorch naive implementation: {0:.5f} s'.format(time2))
print('TVM vs. Naive speedup: {0:.5f}x'.format(time1/time2))
print(f'Failed tests: {failed_tests}/{i+1}')
assert failed_tests == 0
if __name__ == '__main__':
unittest.main()
| StarcoderdataPython |
1894446 | <reponame>gabriel-vitor/Python-projects
import sys
import random
import time
#Nesse primeiro projeto, o professor da disciplina não permitiu utilizar funçoes (modularizar o código), porque o assunto ainda não tinha sido dado.
#Sendo assim, esse projeto ficou muito extenso. Como a discplina era introdutória e eu era iniciante, há algumas más práticas como por exemplo, algumas
#variáveis iniciadas em letra maiuscula.
#In this first project, the teacher of the subject did not permit to use functions (modularize the code), because the subject had not yet been given.
#This being so, this project became very extensive. As the discpline was introductory and I was a beginner, there are some bad practices like, for example, some
#variables started in capital letters.
print('='*100)
print("Olá, bem vindo ao jogo Super Trunfo!")
print('='*100)
opcao = 0 #Criei uma variável igualando a zero para ser usada no laço de repetição while
while opcao != 2: #o laço irá rodar enquanto a variável "opçao" for diferente de 2, pois esse número é referente a opção de sair do jogo
print("Digite:")
print("[1] para ler as informações de jogo")
print('[2] para sair do jogo')
print('[3] para iniciar o jogo')
opcao = str(input('Digite o número da sua opção: ')) #esse input serve para o usuário escolher sua opção
if opcao == '1': #através do if, será possível mostrar as informações do jogo caso o usuário digite 1
print('=' * 100)
print(" O Jogo Super Trunfo consiste em colocar dois jogadores para disputarem personagens de cartas.")
print(" O tema escolhido é sobre Pokemons, e terá como atributos Força, Defesa e Vida. O jogo será ")
print(" composto por 2 jogadores, que deverão informar seus nicknames e o número máximo de rodadas.")
print(" Cada jogador receberá três cartas por rodada e cada carta terá um identificador.")
print('=' * 100)
elif opcao == '2': #para sair do programa por inteiro, importei a biblioteca nativa sys do python e utilizei uma função aqui.
print('='*100)
print("Saindo do jogo...")
sys.exit(0)
elif opcao == '3': #utilizando o break, a opção 3 sai do laço e vai para o código onde o jogo começa
break
else:
print('='*100)
print('Informação Inválida. Você só pode digitar os números 1, 2 ou 3.') #aqui serve para evitar erros de digitos de outros números ou letras
print('='*100)
print('Iniciando o jogo...')
Jogador1 = str(input('Jogador 1, digite o seu nick: ')) #aqui será feito o cadastro dos jogadores
Jogador2 = str(input('Jogador 2, digite o seu nick: '))
#os jogadores serão armazenados nessa lista para o sorteio.
Jogadores = [Jogador1, Jogador2]
numero_minimo = 3 #o número mínimo de rodadas
# este while será responsável pela escolha do usuário do número de partidas. Caso o número digitado seja inferior ao valor mínimo de 3, será
# solictidado que o usuário escolha um número igual ou maior que 3.
while numero_minimo == 3:
numero_maximo = int(input('Digite o número de rodadas que deseja jogar:'))
if numero_maximo >= numero_minimo:
print('Vamos lá!')
break
else:
print('Número inválido. Só é permitido jogar três rodadas ou mais. Digite outro número')
#baralho de cartas
Cartas = ['Pikachu', 'Raichu', 'Squirtle', 'Bulbasaur', 'Charmander', 'Charizard','Kakuna', 'Weedle', 'Pidgey', 'Pidgeotto',
'Rattata','Raticate','Golem','Onix','Hitmonchan','Tangela','Dragonair','Dragonite','Mew','Mewtwo']
#Inicio das rodadas
print('Game Start!!')
print('-'*100)
print('Suas cartas serão mostradas por 10 segundos e a selecionada por mais 5 segundos.')
print('-'*100)
#distribuição de cartas
#o laço vai de zero até o número de rodadas escolhido
contador = 0
variaveldecontrole = 1
Pontuacao1 = 0 #a pontuação foi igualada a zero para que os valores sejam acumulados dentro do while futuramente
Pontuacao2 = 0
#este while será responsável pela decisão do usuário após o final da partida, se ele deseja jogar novamente ou encerrar o jogo
while variaveldecontrole ==1:
print(''*100)
while contador < numero_maximo: #a partir desse while começa o embaralhamento das cartas, a distribuição e o ínicio da disputa
random.shuffle(Cartas)
print(Jogador1)
atributoforca0 = random.randint(0,100)
atributodefesa0 = random.randint(0,100)
atributovida0 = random.randint(0,100)
#os atributos serão adicionados numa lista.
#uma lista será criada para cada carta.
#o primeiro indice é referente ao nome da carta, utilizei a lista do baralho de cartas e o indice zero.
#o fato das cartas esterem sendo embaralhadas no começo do laço evita que elas se repitam com o mesmo índice.
Carta1 = [Cartas[0], 'Força:', atributoforca0, 'Defesa:', atributodefesa0, 'Vida:', atributovida0]
print(Carta1)
atributoforca1 = random.randint(0,100) #os atributos da primeira carta
atributodefesa1 = random.randint(0,100)
atributovida1 = random.randint(0,100)
Carta2 = [Cartas[1], 'Força:', atributoforca1, 'Defesa:', atributodefesa1, 'Vida:', atributovida1]
print(Carta2)
atributoforca2 = random.randint(0,100) #a variável dos atributos da segunda carta terão nomes diferentes
atributodefesa2 = random.randint(0,100) #para evitar que se repitam. O mesmo irá acontecer com as demais
atributovida2 = random.randint(0,100)
Carta3 = [Cartas[2], 'Força:', atributoforca2, 'Defesa:', atributodefesa2, 'Vida:', atributovida2]
print(Carta3)
print('='*100)
time.sleep(10)
print("\n"*100)
print(Jogador2)
atributoforca3 = random.randint(0,100)
atributodefesa3 = random.randint(0,100)
atributovida3 = random.randint(0,100)
Carta4 = [Cartas[3],'Força:', atributoforca3, 'Defesa:', atributodefesa3, 'Vida:', atributovida3]
print(Carta4)
atributoforca4 = random.randint(0,100)
atributodefesa4 = random.randint(0,100)
atributovida4 = random.randint(0,100)
Carta5 = [Cartas[4],'Força:', atributoforca4, 'Defesa:', atributodefesa4, 'Vida:', atributovida4]
print(Carta5)
atributoforca5 = random.randint(0,100)
atributodefesa5 = random.randint(0,100)
atributovida5 = random.randint(0,100)
Carta6 = [Cartas[5],'Força:', atributoforca5, 'Defesa:', atributodefesa5, 'Vida:', atributovida5]
print(Carta6)
print('='*100)
time.sleep(10)
print("\n"*100)
#a partir daqui, será feito o sorteio dos jogadores
# #logo em seguida terá um input para a escolha do atributo e dentro desse input um if com outro input para a escolha das cartas.
sorteio = random.choice(Jogadores)
if sorteio == Jogadores[0]:
print('o jogador sorteado é o Jogador 1: ', Jogador1)
escolha_atributo = int(input('escolha seu atributo \n [1]Força \n [2]Defesa \n [3]Vida'))
if escolha_atributo == 1:
print('o atributo escolhido foi Força')
print('suas cartas são \n {} \n {} \n {}'.format(Carta1, Carta2, Carta3))
escolha_carta1 = int(input('Escolha sua carta [1] Primeira [2] Segunda [3] Terceira: '))
if escolha_carta1 == 1:
print('A carta do jogador 1 é:', Carta1)
escolhida1 = Carta1 #foi criada uma variável para receber o valor da lista Carta1 e o mesmo se repete
time.sleep(5) #para as outras cartas
print("\n"*100)
elif escolha_carta1 == 2:
print('A carta do jogador 1 é:', Carta2)
escolhida1 = Carta2
time.sleep(5)
print("\n"*100) # esta linha de código serve para limpar a tela. Ela se repete em algumas partes do código.
elif escolha_carta1 == 3:
print('A carta do jogador 1 é:', Carta3)
escolhida1 = Carta3
time.sleep(5)
print("\n"*100)
print('='*100)
print('Jogador 2, suas cartas são \n {} \n {} \n {}'.format(Carta4, Carta5, Carta6))
print('-'*100)
print('a carta do seu oponente é: ', escolhida1)
print('-'*100)
escolha_carta2 = int(input('Jogador 2, escolha sua carta [1] Primeira [2] Segunda [3] Terceira:')) #o jogador oposto escolhe a carta
if escolha_carta2 == 1:
print('a carta do jogador 2 é:', Carta4)
escolhida2 = Carta4
time.sleep(5)
print("\n"*100)
elif escolha_carta2 == 2:
print('a carta do jogador 2 é:', Carta5)
escolhida2 = Carta5
time.sleep(5)
print("\n"*100)
elif escolha_carta2 == 3:
print('a carta do jogador 2 é:', Carta6)
escolhida2 = Carta6
time.sleep(5)
print("\n"*100)
if escolhida1[2] > escolhida2[2]: # os valores são comparados utilizando o o indice da lista referente a carta
print('Vitória do Jogador 1:', Jogador1) # que foi transformada na variável "escolhida1" e "escolhida2".
Pontuacao1 += escolhida1[2] # a pontuação do vencedor se torna o valor do atributo escolhido.
elif escolhida1[2] == escolhida2[2]:
print('deu empate!')
elif escolhida1[2] < escolhida2[2]:
print('Vitória do Jogador 2:', Jogador2)
Pontuacao2 += escolhida2[2]
elif escolha_atributo == 2:
print('o atributo escolhido foi Defesa')
print('suas cartas são \n {} \n {} \n {}'.format(Carta1, Carta2, Carta3))
escolha_carta1 = int(input('Escolha sua carta [1] Primeira [2] Segunda [3] Terceira: '))
if escolha_carta1 == 1:
print('A carta do jogador 1 é:', Carta1)
escolhida1 = Carta1
time.sleep(5)
print("\n"*100)
elif escolha_carta1 == 2:
print('A carta do jogador 1 é:', Carta2)
escolhida1 = Carta2
time.sleep(5)
print("\n"*100)
elif escolha_carta1 == 3:
print('A carta do jogador 1 é:', Carta3)
escolhida1 = Carta3
time.sleep(5)
print("\n"*100)
print('='*100)
print('Jogador 2, suas cartas são \n {} \n {} \n {}'.format(Carta4, Carta5, Carta6))
print('-'*100)
print('a carta do seu oponente é: ', escolhida1)
print('-'*100)
escolha_carta2 = int(input('Jogador 2, escolha sua carta [1] Primeira [2] Segunda [3] Terceira:'))
if escolha_carta2 == 1:
print('a carta do jogador 2 é:', Carta4)
escolhida2 = Carta4
time.sleep(5)
print("\n"*100)
elif escolha_carta2 == 2:
print('a carta do jogador 2 é:', Carta5)
escolhida2 = Carta5
time.sleep(5)
print("\n"*100)
elif escolha_carta2 == 3:
print('a carta do jogador 2 é:', Carta6)
escolhida2 = Carta6
time.sleep(5)
print("\n"*100)
if escolhida1[4] > escolhida2[4]:
print('Vitória do Jogador 1:', Jogador1)
Pontuacao1 += escolhida1[4]
elif escolhida1[4] == escolhida2[4]:
print('deu empate!')
elif escolhida1[4] < escolhida2[4]:
print('Vitória do Jogador 2:', Jogador2)
Pontuacao2 += escolhida2[4]
elif escolha_atributo == 3:
print('o atributo escolhido foi Vida')
print('suas cartas são \n {} \n {} \n {}'.format(Carta1, Carta2, Carta3))
escolha_carta1 = int(input('Escolha sua carta [1] Primeira [2] Segunda [3] Terceira: '))
if escolha_carta1 == 1:
print('A carta do jogador 1 é:', Carta1)
escolhida1 = Carta1
time.sleep(5)
print("\n"*100)
elif escolha_carta1 == 2:
print('A carta do jogador 1 é:', Carta2)
escolhida1 = Carta2
time.sleep(5)
print("\n"*100)
elif escolha_carta1 == 3:
print('A carta do jogador 1 é:', Carta3)
escolhida1 = Carta3
time.sleep(5)
print("\n"*100)
print('='*100)
print('Jogador2, suas cartas são \n {} \n {} \n {}'.format(Carta4, Carta5, Carta6))
print('-'*100)
print('a carta do seu oponente é: ', escolhida1)
print('-'*100)
escolha_carta2 = int(input('Jogador 2, escolha sua carta [1] Primeira [2] Segunda [3] Terceira:'))
if escolha_carta2 == 1:
print('a carta do jogador 2 é:', Carta4)
escolhida2 = Carta4
time.sleep(5)
print("\n"*100)
elif escolha_carta2 == 2:
print('a carta do jogador 2 é:', Carta5)
escolhida2 = Carta5
time.sleep(5)
print("\n"*100)
elif escolha_carta2 == 3:
print('a carta do jogador 2 é:', Carta6)
escolhida2 = Carta6
time.sleep(5)
print("\n"*100)
if escolhida1[6] > escolhida2[6]:
print('Vitória do Jogador 1:', Jogador1)
Pontuacao1 += escolhida1[6]
elif escolhida1[6] == escolhida2[6]:
print('deu empate!')
elif escolhida1[6] < escolhida2[6]:
print('Vitória do Jogador 2:', Jogador2)
Pontuacao2 += escolhida2[6]
#a partir daqui, começa a vez do jogador 2 caso ele seja sorteado. Todo o código é repetido, invertendo apenas os valores
#das variáveis e do índice das listas.
else:
print('o jogador sorteado é o Jogador 2: ', Jogador2)
escolha_atributo = int(input('escolha seu atributo \n [1]Força \n [2]Defesa \n [3]Vida'))
if escolha_atributo == 1:
print('o atributo escolhido foi Força')
print('suas cartas são \n {} \n {} \n {}'.format(Carta4, Carta5, Carta6))
escolha_carta2 = int(input('Escolha sua carta [1] Primeira [2] Segunda [3] Terceira: '))
if escolha_carta2 == 1:
print('A carta do jogador 2 é:', Carta4)
escolhida2 = Carta4
time.sleep(5)
print("\n"*100)
elif escolha_carta2 == 2:
print('A carta do jogador 2 é:', Carta5)
escolhida2 = Carta5
time.sleep(5)
print("\n"*100)
elif escolha_carta2 == 3:
print('A carta do jogador 2 é:', Carta6)
escolhida2 = Carta6
time.sleep(5)
print("\n"*100)
print('='*100)
print('Jogador 1, suas cartas são \n {} \n {} \n {}'.format(Carta1, Carta2, Carta3))
print('-'*100)
print('a carta do seu oponente é: ', escolhida2)
print('-'*100)
escolha_carta1 = int(input('Jogador 1, escolha sua carta [1] Primeira [2] Segunda [3] Terceira:'))
if escolha_carta1 == 1:
print('a carta do jogador 1 é:', Carta1)
escolhida1 = Carta1
time.sleep(5)
print("\n"*100)
elif escolha_carta1 == 2:
print('a carta do jogador 1 é:', Carta2)
escolhida1 = Carta2
time.sleep(5)
print("\n"*100)
elif escolha_carta1 == 3:
print('a carta do jogador 1 é:', Carta3)
escolhida1 = Carta3
time.sleep(5)
print("\n"*100)
if escolhida2[2] > escolhida1[2]:
print('Vitória do Jogador 2:', Jogador2)
Pontuacao2 += escolhida2[2]
elif escolhida2[2] == escolhida1[2]:
print('deu empate!')
elif escolhida2[2] < escolhida1[2]:
print('Vitória do Jogador 1:', Jogador1)
Pontuacao1 += escolhida1[2]
elif escolha_atributo == 2:
print('o atributo escolhido foi Defesa')
print('suas cartas são \n {} \n {} \n {}'.format(Carta4, Carta5, Carta6))
escolha_carta2 = int(input('Escolha sua carta [1] Primeira [2] Segunda [3] Terceira: '))
if escolha_carta2 == 1:
print('A carta do jogador 2 é:', Carta4)
escolhida2 = Carta4
time.sleep(5)
print("\n"*100)
elif escolha_carta2 == 2:
print('A carta do jogador 2 é:', Carta5)
escolhida2 = Carta5
time.sleep(5)
print("\n"*100)
elif escolha_carta2 == 3:
print('A carta do jogador 2 é:', Carta6)
escolhida2 = Carta6
time.sleep(5)
print("\n"*100)
print('='*100)
print('jogador 1, suas cartas são \n {} \n {} \n {}'.format(Carta1, Carta2, Carta3))
print('-'*100)
print('a carta do seu oponente é: ', escolhida2)
print('-'*100)
escolha_carta1 = int(input('Jogador 1, escolha sua carta [1] Primeira [2] Segunda [3] Terceira:'))
if escolha_carta1 == 1:
print('a carta do jogador 1 é:', Carta1)
escolhida1 = Carta1
time.sleep(5)
print("\n"*100)
elif escolha_carta1 == 2:
print('a carta do jogador 1 é:', Carta2)
escolhida1 = Carta2
time.sleep(5)
print("\n"*100)
elif escolha_carta1 == 3:
print('a carta do jogador 1 é:', Carta3)
escolhida1 = Carta3
time.sleep(5)
print("\n"*100)
if escolhida2[4] > escolhida1[4]:
print('Vitória do Jogador 2:', Jogador2)
Pontuacao2 += escolhida2[4]
elif escolhida2[4] == escolhida1[4]:
print('deu empate!')
elif escolhida2[4] < escolhida1[4]:
print('Vitória do Jogador 1:', Jogador1)
Pontuacao1 += escolhida1[4]
elif escolha_atributo == 3:
print('o atributo escolhido foi Vida')
print('suas cartas são \n {} \n {} \n {}'.format(Carta4, Carta5, Carta6))
escolha_carta2 = int(input('Escolha sua carta [1] Primeira [2] Segunda [3] Terceira: '))
if escolha_carta2 == 1:
print('A carta do jogador 2 é:', Carta4)
escolhida2 = Carta4
time.sleep(5)
print("\n"*100)
elif escolha_carta2 == 2:
print('A carta do jogador 2 é:', Carta5)
escolhida5 = Carta5
time.sleep(5)
print("\n"*100)
elif escolha_carta2 == 3:
print('A carta do jogador 2 é:', Carta6)
escolhida2 = Carta6
time.sleep(5)
print("\n"*100)
print('='*100)
print('Jogador 1, suas cartas são \n {} \n {} \n {}'.format(Carta1, Carta2, Carta3))
print('-'*100)
print('a carta do seu oponente é: ', escolhida2)
print('-'*100)
escolha_carta1 = int(input('Jogador 1, escolha sua carta [1] Primeira [2] Segunda [3] Terceira:'))
if escolha_carta1 == 1:
print('a carta do jogador 1 é:', Carta1)
escolhida1 = Carta1
time.sleep(5)
print("\n"*100)
elif escolha_carta1 == 2:
print('a carta do jogador 2 é:', Carta2)
escolhida1 = Carta2
time.sleep(5)
print("\n"*100)
elif escolha_carta1 == 3:
print('a carta do jogador 1 é:', Carta3)
escolhida1 = Carta3
time.sleep(5)
print("\n"*100)
if escolhida2[6] > escolhida1[6]:
print('Vitória do Jogador 2:', Jogador2)
Pontuacao2 += escolhida2[6]
elif escolhida2[6] == escolhida1[6]:
print('deu empate!')
elif escolhida2[6] < escolhida1[6]:
print('Vitória do Jogador 1:', Jogador1)
Pontuacao1 += escolhida1[6]
contador = contador + 1 #o contador é somado a ele mesmo com o valor de 1 para que chegue até o número de rodadas escolhido e ecerrando o laço
if contador < numero_maximo:
print('-'*100)
print('Próxima rodada!')
print('-'*100)
else:
print('Rodada encerrada')
if Pontuacao1 > Pontuacao2:
print('Jogador 1 venceu a partida!!', Jogador1)
print('Pontuacao:', Pontuacao1)
print('Jogador 2 perdeu :(', Jogador2)
print('Pontuação: ', Pontuacao2)
elif Pontuacao1 == Pontuacao2:
print('ops, parece que temos um empate')
elif Pontuacao1 < Pontuacao2:
print('Jogador 2 venceu a partida!!', Jogador2)
print('Pontuação:', Pontuacao2)
print('Jogador 1 perdeu :(', Jogador1)
print('Pontuação:', Pontuacao1)
variaveldecontrole = int(input('deseja iniciar uma nova partida? [1] Sim / [2] Não: '))
if variaveldecontrole == 1:
print('*'*100)
print('iniciando nova partida')
print('*'*100)
contador = 0 #para que a partida se inicie, é preciso zerar o contador, senão a disputa dos jogadores será ignorada.
else:
print('*'*100)
print('Encerrando o jogo')
print('*'*100)
| StarcoderdataPython |
8076142 | __author__ = 'j'
import pandas as pd
FACTOR = 10
def convertCSV(filelists, factor):
allcsv = pd.DataFrame()
for i, file in enumerate(filelists):
a = pd.read_csv(file, header=1)
a.index = a[a.columns[0]]
a = pd.DataFrame(a[a.columns[4]])
a.columns = [str(i)]
if allcsv.empty:
allcsv = a
else:
allcsv = pd.merge(allcsv, a, how='outer', left_index=True, right_index=True)
for i in range(len(allcsv.columns)-1):
allcsv[str(i+1)] = allcsv[str(i)] * factor
return allcsv
| StarcoderdataPython |
4983595 | <filename>tests/test_forms.py
import pytest
from squall import File, Form, Squall, UploadFile
from squall.testclient import TestClient
app = Squall()
@app.post("/files/")
async def create_file(
file: UploadFile = File(...), fileb: UploadFile = File(...), token: str = Form(...)
):
file = await file.read()
return {
"file_size": len(file),
"token": token,
"fileb_content_type": fileb.content_type,
}
client = TestClient(app)
openapi_schema = {
"openapi": "3.0.2",
"info": {"title": "FastAPI", "version": "0.1.0"},
"paths": {
"/files/": {
"post": {
"responses": {
"200": {
"description": "Successful Response",
"content": {"application/json": {"schema": {}}},
},
"422": {
"description": "Validation Error",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/HTTPValidationError"
}
}
},
},
},
"summary": "Create File",
"operationId": "create_file_files__post",
"requestBody": {
"content": {
"multipart/form-data": {
"schema": {
"$ref": "#/components/schemas/Body_create_file_files__post"
}
}
},
"required": True,
},
}
}
},
"components": {
"schemas": {
"Body_create_file_files__post": {
"title": "Body_create_file_files__post",
"required": ["file", "fileb", "token"],
"type": "object",
"properties": {
"file": {"title": "File", "type": "string", "format": "binary"},
"fileb": {"title": "Fileb", "type": "string", "format": "binary"},
"token": {"title": "Token", "type": "string"},
},
},
"ValidationError": {
"title": "ValidationError",
"required": ["loc", "msg", "type"],
"type": "object",
"properties": {
"loc": {
"title": "Location",
"type": "array",
"items": {"type": "string"},
},
"msg": {"title": "Message", "type": "string"},
"type": {"title": "Error Type", "type": "string"},
},
},
"HTTPValidationError": {
"title": "HTTPValidationError",
"type": "object",
"properties": {
"detail": {
"title": "Detail",
"type": "array",
"items": {"$ref": "#/components/schemas/ValidationError"},
}
},
},
}
},
}
@pytest.mark.skip("Not implemented")
def test_openapi_schema():
response = client.get("/openapi.json")
assert response.status_code == 200, response.text
assert response.json() == openapi_schema
file_required = {
"details": [
{"loc": ["form", "file"], "msg": "field required"},
{"loc": ["form", "fileb"], "msg": "field required"},
]
}
error_parsing_body = {"detail": "There was an error parsing the body"}
file_and_token_required = {
"details": [
{"loc": ["form", "file"], "msg": "field required"},
{"loc": ["form", "fileb"], "msg": "field required"},
{"loc": ["form", "token"], "msg": "field required"},
]
}
def test_post_form_no_body():
response = client.post("/files/")
assert response.status_code == 400, response.text
assert response.json() == error_parsing_body
def test_post_form_no_file():
response = client.post("/files/", data={"token": "foo"})
assert response.status_code == 400, response.text
assert response.json() == error_parsing_body
def test_post_body_json():
response = client.post("/files/", json={"file": "Foo", "token": "Bar"})
assert response.status_code == 400, response.text
assert response.json() == error_parsing_body
def test_post_file_no_token(tmp_path):
path = tmp_path / "test.txt"
path.write_bytes(b"<file content>")
client = TestClient(app)
with path.open("rb") as file:
response = client.post("/files/", files={"file": file})
assert response.status_code == 400, response.text
assert response.json() == error_parsing_body
def test_post_files_and_token(tmp_path):
patha = tmp_path / "test.txt"
pathb = tmp_path / "testb.txt"
patha.write_text("<file content>")
pathb.write_text("<file b content>")
client = TestClient(app)
with patha.open("rb") as filea, pathb.open("rb") as fileb:
response = client.post(
"/files/",
data={"token": "foo"},
files={"file": filea, "fileb": ("testb.txt", fileb, "text/plain")},
)
assert response.status_code == 200, response.text
assert response.json() == {
"file_size": 14,
"token": "foo",
"fileb_content_type": "text/plain",
}
| StarcoderdataPython |
5087725 | import os
import re
import yaml
from yaml.loader import Reader, Scanner, Parser, Composer, SafeConstructor, Resolver
from glob import glob
from urllib.request import urlopen
from selectolax.parser import HTMLParser
# import for pdf to text
import io
from pdfminer.pdfinterp import PDFResourceManager, PDFPageInterpreter
from pdfminer.converter import TextConverter
from pdfminer.layout import LAParams
from pdfminer.pdfpage import PDFPage
from PyPDF2 import PdfFileReader, PdfFileWriter
# epub converter
import ebooklib
from ebooklib import epub
__all__ = ['ingest_config', 'ProcessEpub', 'ProcessHtml', 'ProcessPDF']
def get_text_selectolax(html):
html = html.strip()
if len(html) == 0:
return None
tree = HTMLParser(html)
for tag in tree.css('script'):
tag.decompose()
for tag in tree.css('style'):
tag.decompose()
text = tree.body.text(separator='\n')
return text
def ingest_config(configfile):
with open(configfile, 'r') as s:
config = yaml.load(s, Loader=MySafeLoader)
config.name = config.outputfile.name
config.outputdir = os.path.join(config.outputfile.dir, config.name)
config.outputdir_audio = os.path.join(config.outputdir, config.outputfile.subdir_audio)
os.makedirs(config.outputdir_audio, exist_ok=True)
if config.inputfile.type == 'pdf':
pass
elif config.inputfile.type == 'html':
pass
elif config.inputfile.type == 'epub':
pass
return config
class ProcessEpub:
def __init__(self, epub_path, outputdir, param_epub):
self.epub_path = epub_path
self.book_name = os.path.basename(outputdir)
self.param = param_epub
self.param.cut_start = None if not self.param.get('cut_start') else param_epub.cut_start
self.param.cut_end = None if not self.param.get('cut_end') else -param_epub.cut_end
self.chapterstextprefix = os.path.join(outputdir, 'chaptertexts', self.book_name)
os.makedirs(os.path.dirname(self.chapterstextprefix), exist_ok=True)
def process(self):
chapters_html = self.epub2html()
chapters_text = self.html2text(chapters_html)
chapters_text = chapters_text[self.param.cut_start: self.param.cut_end]
for i, chapter_text in enumerate(chapters_text):
# save the chapters text
with open('{}_chapter{:03d}.txt'.format(self.chapterstextprefix, i + 1), 'w') as f:
f.write(chapter_text)
return chapters_text
@ staticmethod
def html2text(chapters_html):
chapters_text = []
for html_ in chapters_html:
text_ = get_text_selectolax(html_).replace('\x0c', '').replace('\xa0', '')
text_ = re.sub('\n{3,}', '\n', text_)
chapters_text.append(text_)
return chapters_text
def epub2html(self):
"""
extract html from epub book. Assumes the book is pre-split into chapters
:return: chapters in html format
"""
book = epub.read_epub(self.epub_path)
chapters_html = []
for item in book.get_items():
if item.get_type() == ebooklib.ITEM_DOCUMENT:
chapters_html.append(item.get_content())
return chapters_html
class ProcessHtml:
def __init__(self, webpage, outputdir, param_html, maxsplit=1):
self.webpage = webpage
self.param = param_html
self.param.cut_start = None if not self.param.get('cut_start') else param_html.cut_start
self.param.cut_end = None if not self.param.get('cut_end') else -param_html.cut_end
self.maxsplit = maxsplit
self.book_name = os.path.basename(outputdir)
self.chapterstextprefix = os.path.join(outputdir, 'chaptertexts', self.book_name)
os.makedirs(os.path.dirname(self.chapterstextprefix), exist_ok=True)
def process(self):
html = self.extract_html()
chapters_html = re.split(self.param.split_regex, html)
chapters_html = chapters_html[self.param.cut_start: self.param.cut_end]
chapters_html = [i + '.\n' + j for i, j in zip(chapters_html[::2], chapters_html[1::2])]
chapters_text = []
for i, chapter_html in enumerate(chapters_html):
chapter_text = get_text_selectolax(chapter_html).replace('\x0c', '').replace('\xa0', '')
chapter_text = re.sub('\n{4,}', '\n', chapter_text)
# chapters_text[i] = 'CHAPTER {}.\n'.format(i+1) + chapters_text[i]
chapters_text.append(chapter_text)
# save the chapters text
with open('{}_chapter{:03d}.txt'.format(self.chapterstextprefix, i + 1), 'w') as f:
f.write(chapter_text)
return chapters_text
def extract_html(self):
try: # try to open as a url first
with urlopen(self.webpage) as f:
_, html = f.read().split(b'\r\n\r\n', maxsplit=self.maxsplit)
except ValueError: # if not then try as a html file
with open(self.webpage, 'r') as f:
html = f.read()
return html
class ProcessPDF:
def __init__(self, pdf_path, outputdir, param_pdf):
self.pdf_path = pdf_path
self.ch_page_list = param_pdf.ch_page_list
self.book_name = os.path.basename(outputdir)
self.chapterstextprefix = os.path.join(outputdir, 'chaptertexts', self.book_name)
self.chapterspdfprefix = os.path.join(outputdir, 'chapterpdfs', self.book_name)
os.makedirs(os.path.dirname(self.chapterspdfprefix), exist_ok=True)
os.makedirs(os.path.dirname(self.chapterstextprefix), exist_ok=True)
def process(self):
self.pdf_splitter()
chapterpdfs = sorted(glob(self.chapterspdfprefix + '*.pdf'), key=os.path.basename)
chapters_text = []
for i, chapterpdf in enumerate(chapterpdfs):
print('----- Chapter %d -----' % (i + 1))
chapter_text = self.convert_pdf_to_txt(chapterpdf).replace('\x0c', '').replace('\xa0', '')
# save the chapters text
with open('{}_chapter{:03d}.txt'.format(self.chapterstextprefix, i + 1), 'w') as f:
f.write(chapter_text)
chapters_text.append(chapter_text)
return chapters_text
@ staticmethod
def convert_pdf_to_txt(filepath):
rsrcmgr = PDFResourceManager()
retstr = io.StringIO()
codec = 'utf-8'
laparams = LAParams()
device = TextConverter(rsrcmgr, retstr, laparams=laparams)
fp = open(filepath, 'rb')
interpreter = PDFPageInterpreter(rsrcmgr, device)
password = ""
maxpages = 0
caching = True
pagenos = set()
for page in PDFPage.get_pages(fp, pagenos, maxpages=maxpages,
password=password,
caching=caching,
check_extractable=True):
interpreter.process_page(page)
fp.close()
device.close()
text = retstr.getvalue()
retstr.close()
return text
def pdf_splitter(self):
inputfile = open(self.pdf_path, 'rb')
pdf = PdfFileReader(inputfile)
last_page = self.ch_page_list.pop(-1)
# loop through each chapter start page
for i, ch_page_num in enumerate(self.ch_page_list):
pdf_writer = PdfFileWriter()
end_page_num = self.ch_page_list[i+1] - 1 if not len(self.ch_page_list) == i+1 else last_page
# loop through pages in each chapter
for page in range(ch_page_num, end_page_num+1):
pdf_writer.addPage(pdf.getPage(page-1))
output_filename = '{}_chapter{}.pdf'.format(self.chapterspdfprefix, i + 1)
with open(output_filename, 'wb') as out:
pdf_writer.write(out)
print('Created: {}'.format(output_filename))
inputfile.close()
class MyDict(dict):
def __getattr__(self, name):
return self[name]
class MySafeConstructor(SafeConstructor):
def construct_yaml_map(self, node):
data = MyDict()
yield data
value = self.construct_mapping(node)
data.update(value)
MySafeConstructor.add_constructor(u'tag:yaml.org,2002:map', MySafeConstructor.construct_yaml_map)
class MySafeLoader(Reader, Scanner, Parser, Composer, MySafeConstructor, Resolver):
def __init__(self, stream):
Reader.__init__(self, stream)
Scanner.__init__(self)
Parser.__init__(self)
Composer.__init__(self)
MySafeConstructor.__init__(self)
Resolver.__init__(self)
def split_html_to_chapters(html, regex='<p><a id="chap[0-9]{2,2}"/></p>', cut=(0, 0), start_chapter=1):
with open(html, 'r') as f:
html_text = f.read()
results = re.split(regex, html_text)
cut_start = None if not cut[0] else cut[0]
cut_end = None if not cut[1] else -cut[1]
results = results[cut_start: cut_end]
print(cut_start, cut_end, len(results))
for i, result in enumerate(results):
fn = os.path.join(os.path.dirname(html), 'chapter{:02d}.html'.format(i+start_chapter))
with open(fn, 'w') as f:
f.write(results[i])
| StarcoderdataPython |
9756790 | <gh_stars>1-10
PRICES = {
1: 299,
3: 800,
6: 900,
12: 999
}
| StarcoderdataPython |
11341561 | import logging
import sys
import time
import datetime
import signal
from multiprocessing.managers import SyncManager
class AspineClient:
def __init__(self, host: str = "127.0.0.1", port: int = 5116, authkey: str = "123456", *args, **kwargs):
logging.debug(f"Start initializing client at {datetime.datetime.now()}")
self.core = SyncManager(
(host, port),
authkey=authkey.encode()
)
def connect(self):
try:
self.core.connect()
self.core.register("get_mem_data")
self.core.register("get_manager_info")
self._get_mem_data = getattr(self.core, "get_mem_data")
self._get_manager_info = getattr(self.core, "get_manager_info")
except ConnectionRefusedError as cre:
logging.error(f"Please check server/manager is up and running.")
logging.exception(cre)
def get(self, key_name):
try:
# self._get_mem_data()._close()
res = self._get_mem_data().get(key_name)
if res is not None:
return res
except Exception as e:
logging.error(f"Error.")
logging.exception(e)
def set(self, key_name, value):
try:
res = {key_name: {
"name": key_name,
"value": value,
"set_ts": time.time()
}}
# self._get_mem_data()._close()
self._get_mem_data().update(res)
except Exception as e:
logging.error(f"Error.")
logging.exception(e)
def is_exist(self, key_name):
try:
r = self.get(key_name)
if r is not None:
return True
else:
return False
except Exception as e:
logging.error(f"Error.")
logging.exception(e)
def list(self):
try:
r = self._get_mem_data().keys()
return r
except Exception as e:
logging.error(f"Error.")
logging.exception(e)
| StarcoderdataPython |
5041218 | <reponame>Ascend/modelzoo
# Copyright 2020 Huawei Technologies Co., Ltd
#
# Licensed under the BSD 3-Clause License (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://opensource.org/licenses/BSD-3-Clause
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#encoding=utf-8
import os
import sys
import argparse
parser = argparse.ArgumentParser(description="Normalize the phoneme on TIMIT")
parser.add_argument("--map", default="./decode_map_48-39/phones.60-48-39.map", help="The map file")
parser.add_argument("--to", default=48, help="Determine how many phonemes to map")
parser.add_argument("--src", default='./data_prepare/train/phn_text', help="The source file to mapping")
parser.add_argument("--tgt", default='./data_prepare/train/48_text' ,help="The target file after mapping")
def main():
args = parser.parse_args()
if not os.path.exists(args.map) or not os.path.exists(args.src):
print("Map file or source file not exist !")
sys.exit(1)
map_dict = {}
with open(args.map) as f:
for line in f.readlines():
line = line.strip().split('\t')
if args.to == "60-48":
if len(line) == 1:
map_dict[line[0]] = ""
else:
map_dict[line[0]] = line[1]
elif args.to == "60-39":
if len(line) == 1:
map_dict[line[0]] = ""
else:
map_dict[line[0]] = line[2]
elif args.to == "48-39":
if len(line) == 3:
map_dict[line[1]] = line[2]
else:
print("%s phonemes are not supported" % args.to)
sys.exit(1)
with open(args.src, 'r') as rf, open(args.tgt, 'w') as wf:
for line in rf.readlines():
line = line.strip().split(' ')
uttid, utt = line[0], line[1:]
map_utt = [ map_dict[phone] for phone in utt if map_dict[phone] != "" ]
wf.writelines(uttid + ' ' + ' '.join(map_utt) + '\n')
if __name__ == "__main__":
main()
| StarcoderdataPython |
5138091 | <filename>epookman_gui/ui/widgets/mainWindow.py<gh_stars>1-10
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# This file is part of epookman_gui.
# License: MIT, see the file "LICENCS" for details.
from PyQt5.QtCore import (QCoreApplication, QSize)
from PyQt5.QtWidgets import (QFrame, QHBoxLayout, QVBoxLayout, QWidget,
QMessageBox)
from epookman_gui.ui.widgets.leftMenu import LeftMenu
from epookman_gui.ui.widgets.pages import Pages
from epookman_gui.api.themer import themer
from epookman_gui.api.db import (fetch_option, commit_option, connect)
MAINWINDOW_WIDTH_MIN = 1000
MAINWINDOW_HEIGHT_MIN = 500
MAINWINDOW_WIDTH = 1400
MAINWINDOW_HEIGHT = 700
DEFAULT_PAGE = "ALL"
DEFAULT_THEME = "dark-purple"
class Ui_MainWindow(object):
def setupUi(self, MainWindow):
self.setupMainWindow(MainWindow)
def setupMainWindow(self, MainWindow):
MainWindow.setObjectName("MainWindow")
MainWindow.resize(MAINWINDOW_WIDTH, MAINWINDOW_HEIGHT)
MainWindow.setMinimumSize(
QSize(MAINWINDOW_WIDTH_MIN, MAINWINDOW_HEIGHT_MIN))
styleSheet = self.getDefaultStyle()
MainWindow.setStyleSheet(styleSheet)
self.centralwidget = QWidget(MainWindow)
self.centralwidget.setObjectName("centralwidget")
self.MainWindowLayout = QVBoxLayout(self.centralwidget)
self.MainWindowLayout.setContentsMargins(0, 0, 0, 0)
self.MainWindowLayout.setSpacing(0)
self.MainWindowLayout.setObjectName("MainWindowLayout")
self.MainWindowContent = QFrame(self.centralwidget)
self.MainWindowContent.setFrameShape(QFrame.NoFrame)
self.MainWindowContent.setFrameShadow(QFrame.Raised)
self.MainWindowContent.setObjectName("MainWindowContent")
self.MainWindowContentLayout = QHBoxLayout(self.MainWindowContent)
self.MainWindowContentLayout.setContentsMargins(0, 0, 0, 0)
self.MainWindowContentLayout.setSpacing(0)
self.MainWindowContentLayout.setObjectName("MainWindowContentLayout")
self.leftMenu = LeftMenu(MainWindow)
self.pages = Pages(MainWindow)
self.MainWindowContentLayout.addWidget(self.leftMenu)
self.MainWindowContentLayout.addWidget(self.pages)
self.MainWindowLayout.addWidget(self.MainWindowContent)
MainWindow.setCentralWidget(self.centralwidget)
self.setButtons()
self.connectThemingMenu()
if self.pages.settingsPage.content.dirs:
self.setDefaultPage("ALL")
else:
self.setDefaultPage("SETTINGS")
def setButtons(self):
# LEFT MENU BUTTONS
leftMenufunc = self.pages.changePage
button = self.leftMenu.reading
self.connectButton(button, leftMenufunc, "READING")
button = self.leftMenu.toread
self.connectButton(button, leftMenufunc, "TO READ")
button = self.leftMenu.all
self.connectButton(button, leftMenufunc, "ALL")
button = self.leftMenu.done
self.connectButton(button, leftMenufunc, "DONE")
button = self.leftMenu.fav
self.connectButton(button, leftMenufunc, "FAV")
button = self.leftMenu.settings
self.connectButton(button, leftMenufunc, "SETTINGS")
def connectButton(self, button, func, *args):
button.setMouseTracking(True)
button.mousePressEvent = lambda event: func(*args)
def connectThemingMenu(self):
self.pages.settingsPage.content.themingMenu.currentTextChanged.connect(
self.changeTheme)
def changeTheme(self, theme):
conn = connect()
commit_option(conn, "DEFAULT_THEME", theme)
conn.close()
styleSheet = themer(theme)
self.centralwidget.setStyleSheet(styleSheet)
def getDefaultStyle(self):
conn = connect()
theme = fetch_option(conn, "DEFAULT_THEME")
if not theme:
theme = DEFAULT_THEME
self.theme = theme
styleSheet = themer(theme)
conn.close()
return styleSheet
def retranslateUi(self, MainWindow):
_translate = translate
MainWindow.setWindowTitle(_translate("MainWindow", "MainWindow"))
def setDefaultPage(self, name=None):
if not name:
name = DEFAULT_PAGE
self.pages.changePage(name)
| StarcoderdataPython |
6513198 | #! /usr/bin/python
# coding=utf8
# gom.py -- Command line tool for managing a Github organization
# Originally from https://github.com/bertvv/github-org-mgmt
# conversion to python 3 by <NAME>
import sys
from .github_org import GithubOrganizationManager as OrgMgr
#
# Helper functions
#
def usage():
print("""
Usage %s ORGANIZATION ACTION [OPTION]...
ORGANIZATION is the name of the Github organization to be managed. You should
have a configuration file named 'ORGANIZATION-conf.yml' in the working
directory.
ACTIONS
c, create-teams CSV creates teams and members from the specified CSV file
p, purge-teams PREFIX delete all teams and associated repos that have a
name starting with PREFIX
delete-teams TXT deletes all teams enumerated in the specified TXT file
delete-repos TXT deletes all repos enumerated in the specified TXT file
l, list-repos prints all repositories in the organization
x, export-teams PREFIX export repositories starting with PREFIX and members
as a CSV file
"""[1:-1] % sys.argv[0])
def list_repos(manager):
"""List repositories in the organization."""
repos = manager._organization.get_repos()
for repo in repos:
print(repo.name)
def list_teams(manager):
"""List teams in the organization."""
teams = manager._organization.get_teams()
for team in teams:
print(team.name)
def delete_repos(manager, options):
"""Delete repositories enumerated in the specified text file"""
if len(options) < 1:
print("No file containing repo names specified!")
usage()
sys.exit(1)
manager.delete_repos_in_file(options[0])
def delete_teams(manager, options):
"""Delete teams enumerated in the specified text file"""
if len(options) < 1:
print("No file containing team names specified!")
usage()
sys.exit(1)
manager.delete_teams_in_file(options[0])
def create_teams(manager, options):
"""Create new teams and repositories"""
if len(options) < 1:
print("No user file specified!")
usage()
sys.exit(1)
user_file = options[0]
print("Fetching users and teams from %s. This may take a while." \
% user_file)
print("Failed users (if any):")
teams = manager.read_teams_from_csv(user_file)
print("Adding teams to organization")
manager.add_teams_to_org(teams)
def purge_teams(manager, options):
"""Delete teams and their repos starting with the specified prefix"""
if len(options) < 1:
print("No name prefix of teams to delete specified!")
usage()
sys.exit(1)
prefix = options[0]
manager.delete_teams_and_repos(prefix)
def export_repos(manager, options):
"""Export repos starting with the specified prefix and their contributors as a CSV file"""
if len(options) < 1:
print("No name prefix of teams to export specified!")
usage()
sys.exit(1)
prefix = options[0]
manager.export_repos_and_contributors(prefix)
def add_members_to_team(manager, options):
"""Adds members from a text file to a team"""
if len(options) < 2:
print("No team/member list specified!")
usage
sys.exit(1)
team_name = options[0]
user_file = options[1]
print("Fetching users from %s." % user_file)
users = manager.read_members_from_txt(user_file)
manager.add_members_to_team(team, users)
#
# Script proper
#
if len(sys.argv) < 3:
print("Not enough arguments, expected at least 2")
usage()
sys.exit(2)
organization_name = sys.argv[1]
action = sys.argv[2]
options = sys.argv[3:]
manager = OrgMgr(organization_name)
# print "Org : %s" % manager._organization_name
# print "Action : %s" % action
# print "Options: %s" % ', '.join(options)
if action == "list-repos" or action == "l":
list_repos(manager)
if action == "list-teams" or action == "l":
list_teams(manager)
elif action == "export-teams" or action == "x":
export_repos(manager, options)
elif action == "create-teams" or action == "c":
create_teams(manager, options)
elif action == "delete-repos":
delete_repos(manager, options)
elif action == "purge-teams" or action == "p":
purge_teams(manager, options)
elif action == "delete-teams" or action == "d":
delete_teams(manager, options)
elif action == "add-members" or action == "a":
add_members_to_team(manager, options)
else:
print("Unknown action: %s" % action)
usage()
sys.exit(1)
| StarcoderdataPython |
3537091 | # Databricks CLI
# Copyright 2017 Databricks, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"), except
# that the use of services to which certain application programming
# interfaces (each, an "API") connect requires that the user first obtain
# a license for the use of the APIs from Databricks, Inc. ("Databricks"),
# by creating an account at www.databricks.com and agreeing to either (a)
# the Community Edition Terms of Service, (b) the Databricks Terms of
# Service, or (c) another written agreement between Licensee and Databricks
# for the use of the APIs.
#
# You may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
version = '0.8.6' # NOQA
def print_version_callback(ctx, param, value): # NOQA
import click
if not value or ctx.resilient_parsing:
return
click.echo('Version {}'.format(version))
ctx.exit()
| StarcoderdataPython |
4896959 | <gh_stars>1-10
#!/usr/bin/env python3
#
# Copyright 2021 Graviti. Licensed under MIT License.
#
"""Utility classes."""
from .attr import AttrsMixin, attr, attr_base, camel, upper
from .common import EqMixin, MatrixType, common_loads, locked
from .deprecated import DefaultValueDeprecated, Deprecated, Disable, KwargsDeprecated
from .file import FileMixin, RemoteFileMixin
from .itertools import chunked
from .name import NameList, NameMixin, SortedNameList
from .repr import ReprMixin, ReprType, repr_config
from .type import TypeEnum, TypeMixin, TypeRegister
from .user import UserMapping, UserMutableMapping, UserMutableSequence, UserSequence
__all__ = [
"AttrsMixin",
"DefaultValueDeprecated",
"Deprecated",
"Disable",
"EqMixin",
"FileMixin",
"KwargsDeprecated",
"MatrixType",
"NameList",
"NameMixin",
"RemoteFileMixin",
"ReprMixin",
"ReprType",
"SortedNameList",
"TypeEnum",
"TypeMixin",
"TypeRegister",
"UserMapping",
"UserMutableMapping",
"UserMutableSequence",
"UserSequence",
"attr",
"attr_base",
"camel",
"chunked",
"common_loads",
"locked",
"repr_config",
"upper",
]
| StarcoderdataPython |
9657121 | <gh_stars>10-100
"""Miniature script:
Takes a plaintext email and
* Truncates quotes nested past depth 2
* Removes Evan's signature (3 lines long)
* Runs it through Markdown
"""
import sys
from pathlib import Path
import markdown
signature_lines = 0
signature_found = False
content = ""
for line in sys.stdin:
if line == '-- \n':
content += '\n' * 2
content += '**<NAME> (陳誼廷)**<br>' + '\n'
content += '[https://web.evanchen.cc](https://web.evanchen.cc/)'
content += '\n' * 2
signature_lines = 3
signature_found = True
elif signature_lines > 0:
signature_lines -= 1
continue
elif line.startswith('>>>>') and signature_found:
break
else:
content += line.strip() + '\n'
output_path = Path('/tmp/neomutt-alternative.html')
if output_path.exists():
output_path.unlink()
with open(output_path, 'w') as f:
print(markdown.markdown(content, extensions=['extra', 'sane_lists', 'smarty']), file=f)
| StarcoderdataPython |
6641159 | <reponame>kapot65/python-df-tcp
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Mon Sep 12 11:18:05 2016
@author: chernov
"""
import os
import sys
import asyncio
main_dir = os.path.realpath(os.path.join(os.path.dirname(__file__), ".."))
if not main_dir in sys.path: sys.path.append(main_dir)
del main_dir
import dfparser as env_parser
class DataforgeEnvelopeProtocol(asyncio.Protocol):
"""
Base class for dataforge envelope protocol server.
To define your message processing, redefine process_message()
function
To send message back to peer use function send_message()
"""
def process_message(self, message):
"""
Process handler for received messages. This function will be executed
for every datafogre envelope formatted message extracted from socket.
@message - Message container. Contains 3 fields:
- header - binary message header
- meta - parsed message metadata
- data - message binary data
"""
print("this is process_message() placeholder\n"
"received message: ", message)
self.send_message(message['meta'])
def send_message(self, meta, data=b''):
prep_message = env_parser.create_message(meta, data)
self.transport.write(prep_message)
def __init__(self, *args, **kwargs):
super(DataforgeEnvelopeProtocol, self).__init__(*args, **kwargs)
self.data = b''
def connection_made(self, transport):
self.transport = transport
def data_received(self, data):
self.data += data
messages, self.data = env_parser.get_messages_from_stream(self.data)
for message in messages:
self.process_message(message)
def def_callback(message, client_obj):
print("default callback triggered: received message:", message)
client_obj.transport.close()
class DataforgeEnvelopeEchoClient(DataforgeEnvelopeProtocol):
"""
Echo client for dataforge protocol
Class will pass command, then wait for answer and close socket
"""
def __init__(self, loop, meta, data=b'', callback=def_callback,
timeout_sec=1):
super(DataforgeEnvelopeEchoClient, self).__init__()
self.meta = meta
self.data = data
self.loop = loop
self.timeout_sec = timeout_sec
self.callback = callback
def connection_made(self, transport):
super(DataforgeEnvelopeEchoClient, self).connection_made(transport)
self.h_timeout = self.loop.call_later(self.timeout_sec, self.timeout)
self.send_message(self.meta, self.data)
def process_message(self, message):
self.h_timeout.cancel()
self.loop.call_soon_threadsafe(self.callback, message, self)
def data_received(self, data):
super(DataforgeEnvelopeEchoClient, self).data_received(data)
def connection_lost(self, exc):
self.loop.stop()
def timeout(self):
self.transport.close()
| StarcoderdataPython |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.